From a6b64cd58c7ba95167af77b1e7132c7fc67429af Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 12 Mar 2025 20:48:26 +0000 Subject: [PATCH 001/295] feat: support streaming uploads (#1) --- lib/openai/pooled_net_requester.rb | 30 ++-- lib/openai/util.rb | 181 ++++++++++++++++++------ rbi/lib/openai/pooled_net_requester.rbi | 11 +- rbi/lib/openai/util.rbi | 36 ++++- sig/openai/pooled_net_requester.rbs | 8 +- sig/openai/util.rbs | 22 ++- test/openai/util_test.rb | 33 ++++- 7 files changed, 254 insertions(+), 67 deletions(-) diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb index 57bd2e81..d15f367e 100644 --- a/lib/openai/pooled_net_requester.rb +++ b/lib/openai/pooled_net_requester.rb @@ -48,9 +48,11 @@ def calibrate_socket_timeout(conn, deadline) # # @option request [Hash{String=>String}] :headers # + # @param blk [Proc] + # # @return [Net::HTTPGenericRequest] # - def build_request(request) + def build_request(request, &) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) req = Net::HTTPGenericRequest.new( method.to_s.upcase, @@ -64,12 +66,14 @@ def build_request(request) case body in nil in String - req.body = body + req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) in StringIO - req.body = body.string - in IO - body.rewind - req.body_stream = body + req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + in IO | Enumerator + req["transfer-encoding"] ||= "chunked" unless req["content-length"] + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) end req @@ -97,7 +101,7 @@ def build_request(request) pool = @mutex.synchronize do - @pools[origin] ||= ConnectionPool.new(size: Etc.nprocessors) do + @pools[origin] ||= ConnectionPool.new(size: @size) do self.class.connect(url) end end @@ -128,7 +132,6 @@ def build_request(request) # def execute(request) url, deadline = request.fetch_values(:url, :deadline) - req = self.class.build_request(request) eof = false finished = false @@ -136,6 +139,10 @@ def execute(request) with_pool(url) do |conn| next if finished + req = self.class.build_request(request) do + self.class.calibrate_socket_timeout(conn, deadline) + end + self.class.calibrate_socket_timeout(conn, deadline) conn.start unless conn.started? @@ -168,8 +175,13 @@ def execute(request) [response, (response.body = body)] end - def initialize + # @private + # + # @param size [Integer] + # + def initialize(size: Etc.nprocessors) @mutex = Mutex.new + @size = size @pools = {} end end diff --git a/lib/openai/util.rb b/lib/openai/util.rb index 29f6258a..10368046 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -399,41 +399,152 @@ def normalized_headers(*headers) end end + # @private + # + # An adapter that satisfies the IO interface required by `::IO.copy_stream` + class ReadIOAdapter + # @private + # + # @param max_len [Integer, nil] + # + # @return [String] + # + private def read_enum(max_len) + case max_len + in nil + @stream.to_a.join + in Integer + @buf << @stream.next while @buf.length < max_len + @buf.slice!(..max_len) + end + rescue StopIteration + @stream = nil + @buf.slice!(0..) + end + + # @private + # + # @param max_len [Integer, nil] + # @param out_string [String, nil] + # + # @return [String, nil] + # + def read(max_len = nil, out_string = nil) + case @stream + in nil + nil + in IO | StringIO + @stream.read(max_len, out_string) + in Enumerator + read = read_enum(max_len) + case out_string + in String + out_string.replace(read) + in nil + read + end + end + .tap(&@blk) + end + + # @private + # + # @param stream [String, IO, StringIO, Enumerable] + # @param blk [Proc] + # + def initialize(stream, &blk) + @stream = stream.is_a?(String) ? StringIO.new(stream) : stream + @buf = String.new.b + @blk = blk + end + end + + class << self + # @param blk [Proc] + # + # @return [Enumerable] + # + def string_io(&blk) + Enumerator.new do |y| + y.define_singleton_method(:write) do + self << _1.clone + _1.bytesize + end + + blk.call(y) + end + end + end + class << self # @private # - # @param io [StringIO] + # @param y [Enumerator::Yielder] # @param boundary [String] # @param key [Symbol, String] # @param val [Object] # - private def encode_multipart_formdata(io, boundary:, key:, val:) - io << "--#{boundary}\r\n" - io << "Content-Disposition: form-data" + private def encode_multipart_formdata(y, boundary:, key:, val:) + y << "--#{boundary}\r\n" + y << "Content-Disposition: form-data" unless key.nil? name = ERB::Util.url_encode(key.to_s) - io << "; name=\"#{name}\"" + y << "; name=\"#{name}\"" end if val.is_a?(IO) filename = ERB::Util.url_encode(File.basename(val.to_path)) - io << "; filename=\"#{filename}\"" + y << "; filename=\"#{filename}\"" end - io << "\r\n" + y << "\r\n" case val - in IO | StringIO - io << "Content-Type: application/octet-stream\r\n\r\n" - IO.copy_stream(val, io) + in IO + y << "Content-Type: application/octet-stream\r\n\r\n" + IO.copy_stream(val, y) + in StringIO + y << "Content-Type: application/octet-stream\r\n\r\n" + y << val.string in String - io << "Content-Type: application/octet-stream\r\n\r\n" - io << val.to_s + y << "Content-Type: application/octet-stream\r\n\r\n" + y << val.to_s in true | false | Integer | Float | Symbol - io << "Content-Type: text/plain\r\n\r\n" - io << val.to_s + y << "Content-Type: text/plain\r\n\r\n" + y << val.to_s else - io << "Content-Type: application/json\r\n\r\n" - io << JSON.fast_generate(val) + y << "Content-Type: application/json\r\n\r\n" + y << JSON.fast_generate(val) end - io << "\r\n" + y << "\r\n" + end + + # @private + # + # @param body [Object] + # + # @return [Array(String, Enumerable)] + # + private def encode_multipart_streaming(body) + boundary = SecureRandom.urlsafe_base64(60) + + strio = string_io do |y| + case body + in Hash + body.each do |key, val| + case val + in Array if val.all? { primitive?(_1) } + val.each do |v| + encode_multipart_formdata(y, boundary: boundary, key: key, val: v) + end + else + encode_multipart_formdata(y, boundary: boundary, key: key, val: val) + end + end + else + encode_multipart_formdata(y, boundary: boundary, key: nil, val: body) + end + y << "--#{boundary}--\r\n" + end + + [boundary, strio] end # @private @@ -449,37 +560,11 @@ def encode_content(headers, body) in ["application/json", Hash | Array] [headers, JSON.fast_generate(body)] in [%r{^multipart/form-data}, Hash | IO | StringIO] - boundary = SecureRandom.urlsafe_base64(60) - strio = StringIO.new.tap do |io| - case body - in Hash - body.each do |key, val| - case val - in Array if val.all? { primitive?(_1) } - val.each do |v| - encode_multipart_formdata(io, boundary: boundary, key: key, val: v) - end - else - encode_multipart_formdata(io, boundary: boundary, key: key, val: val) - end - end - else - encode_multipart_formdata(io, boundary: boundary, key: nil, val: body) - end - io << "--#{boundary}--\r\n" - io.rewind - end - headers = { - **headers, - "content-type" => "#{content_type}; boundary=#{boundary}", - "transfer-encoding" => "chunked" - } + boundary, strio = encode_multipart_streaming(body) + headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} [headers, strio] in [_, StringIO] [headers, body.string] - in [_, IO] - headers = {**headers, "transfer-encoding" => "chunked"} - [headers, body] else [headers, body] end @@ -589,8 +674,9 @@ def decode_lines(enum) chain_fused(enum) do |y| enum.each do |row| + offset = buffer.bytesize buffer << row - while (match = re.match(buffer, cr_seen.to_i)) + while (match = re.match(buffer, cr_seen&.to_i || offset)) case [match.captures.first, cr_seen] in ["\r", nil] cr_seen = match.end(1) @@ -600,6 +686,7 @@ def decode_lines(enum) else y << buffer.slice!(..(match.end(1).pred)) end + offset = 0 cr_seen = nil end end @@ -637,7 +724,7 @@ def decode_sse(lines) in "event" current.merge!(event: value) in "data" - (current[:data] ||= String.new.b) << value << "\n" + (current[:data] ||= String.new.b) << (value << "\n") in "id" unless value.include?("\0") current.merge!(id: value) in "retry" if /^\d+$/ =~ value diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi index e94f1912..9d80cd5f 100644 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ b/rbi/lib/openai/pooled_net_requester.rbi @@ -15,8 +15,11 @@ module OpenAI def calibrate_socket_timeout(conn, deadline) end - sig { params(request: OpenAI::PooledNetRequester::RequestShape).returns(Net::HTTPGenericRequest) } - def build_request(request) + sig do + params(request: OpenAI::PooledNetRequester::RequestShape, blk: T.proc.params(arg0: String).void) + .returns(Net::HTTPGenericRequest) + end + def build_request(request, &blk) end end @@ -31,8 +34,8 @@ module OpenAI def execute(request) end - sig { returns(T.attached_class) } - def self.new + sig { params(size: Integer).returns(T.attached_class) } + def self.new(size: Etc.nprocessors) end end end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 8b354b0d..246a7672 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -130,9 +130,41 @@ module OpenAI end end + class ReadIOAdapter + sig { params(max_len: T.nilable(Integer)).returns(String) } + private def read_enum(max_len) + end + + sig { params(max_len: T.nilable(Integer), out_string: T.nilable(String)).returns(T.nilable(String)) } + def read(max_len = nil, out_string = nil) + end + + sig do + params( + stream: T.any(String, IO, StringIO, T::Enumerable[String]), + blk: T.proc.params(arg0: String).void + ) + .returns(T.attached_class) + end + def self.new(stream, &blk) + end + end + + class << self + sig { params(blk: T.proc.params(y: Enumerator::Yielder).void).returns(T::Enumerable[String]) } + def string_io(&blk) + end + end + class << self - sig { params(io: StringIO, boundary: String, key: T.any(Symbol, String), val: T.anything).void } - private def encode_multipart_formdata(io, boundary:, key:, val:) + sig do + params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void + end + private def encode_multipart_formdata(y, boundary:, key:, val:) + end + + sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } + private def encode_multipart_streaming(body) end sig { params(headers: T::Hash[String, String], body: T.anything).returns(T.anything) } diff --git a/sig/openai/pooled_net_requester.rbs b/sig/openai/pooled_net_requester.rbs index 4f89cf8d..9e7daafb 100644 --- a/sig/openai/pooled_net_requester.rbs +++ b/sig/openai/pooled_net_requester.rbs @@ -13,7 +13,11 @@ module OpenAI def self.calibrate_socket_timeout: (top conn, Float deadline) -> void - def self.build_request: (OpenAI::PooledNetRequester::request request) -> top + def self.build_request: ( + OpenAI::PooledNetRequester::request request + ) { + (String arg0) -> void + } -> top private def with_pool: (URI::Generic url) { (top arg0) -> void } -> void @@ -21,6 +25,6 @@ module OpenAI OpenAI::PooledNetRequester::request request ) -> [top, Enumerable[String]] - def initialize: -> void + def initialize: (size: Integer) -> void end end diff --git a/sig/openai/util.rbs b/sig/openai/util.rbs index 2781e634..2ee3b4df 100644 --- a/sig/openai/util.rbs +++ b/sig/openai/util.rbs @@ -70,13 +70,33 @@ module OpenAI | ::Array[(String | Integer)?])?] headers ) -> ::Hash[String, String] + class ReadIOAdapter + private def read_enum: (Integer? max_len) -> String + + def read: (?Integer? max_len, ?String? out_string) -> String? + + def initialize: ( + String | IO | StringIO | Enumerable[String] stream + ) { + (String arg0) -> void + } -> void + end + + def self?.string_io: { + (Enumerator::Yielder y) -> void + } -> Enumerable[String] + def self?.encode_multipart_formdata: ( - StringIO io, + Enumerator::Yielder y, boundary: String, key: Symbol | String, val: top ) -> void + def self?.encode_multipart_streaming: ( + top body + ) -> [String, Enumerable[String]] + def self?.encode_content: (::Hash[String, String] headers, top body) -> top def self?.decode_content: ( diff --git a/test/openai/util_test.rb b/test/openai/util_test.rb index 91c7bd24..d319e2f9 100644 --- a/test/openai/util_test.rb +++ b/test/openai/util_test.rb @@ -161,7 +161,8 @@ class OpenAI::Test::UtilFormDataEncodingTest < Minitest::Test class FakeCGI < CGI def initialize(headers, io) @ctype = headers["content-type"] - @io = io + @io = OpenAI::Util::ReadIOAdapter.new(io) {} + @c_len = io.to_a.join.bytesize.to_s super() end @@ -171,7 +172,7 @@ def env_table { "REQUEST_METHOD" => "POST", "CONTENT_TYPE" => @ctype, - "CONTENT_LENGTH" => stdinput.string.length + "CONTENT_LENGTH" => @c_len } end end @@ -208,6 +209,34 @@ def test_hash_encode end end +class OpenAI::Test::UtilIOAdapterTest < Minitest::Test + def test_copy_read + cases = { + StringIO.new("abc") => "abc", + Enumerator.new { _1 << "abc" } => "abc" + } + cases.each do |input, expected| + io = StringIO.new + adapter = OpenAI::Util::ReadIOAdapter.new(input) {} + IO.copy_stream(adapter, io) + assert_equal(expected, io.string) + end + end + + def test_copy_write + cases = { + StringIO.new => "", + StringIO.new("abc") => "abc" + } + cases.each do |input, expected| + enum = OpenAI::Util.string_io do |y| + IO.copy_stream(input, y) + end + assert_equal(expected, enum.to_a.join) + end + end +end + class OpenAI::Test::UtilFusedEnumTest < Minitest::Test def test_closing arr = [1, 2, 3] From 765f97eed688167e1ed79c4284cae7e670a65264 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 12 Mar 2025 21:06:12 +0000 Subject: [PATCH 002/295] fix: enums should only coerce matching symbols into strings (#3) --- lib/openai/base_model.rb | 29 +++++++++++++++++++++++++---- test/openai/base_model_test.rb | 16 +++++++++++++++- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 8f58a6ab..798e49ca 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -55,11 +55,11 @@ def type_info(spec) type_info(spec.slice(:const, :enum, :union).first&.last) in Proc spec - in OpenAI::Converter | Class + in OpenAI::Converter | Class | Symbol -> { spec } in true | false -> { OpenAI::BooleanModel } - in NilClass | true | false | Symbol | Integer | Float + in NilClass | Integer | Float -> { spec.class } end end @@ -82,6 +82,13 @@ def coerce(target, value) case target in OpenAI::Converter target.coerce(value) + in Symbol + case value + in Symbol | String if (val = value.to_sym) == target + val + else + value + end in Class case target in -> { _1 <= NilClass } @@ -140,6 +147,13 @@ def try_strict_coerce(target, value) case target in OpenAI::Converter target.try_strict_coerce(value) + in Symbol + case value + in Symbol | String if (val = value.to_sym) == target + [true, val, 1] + else + [false, false, 0] + end in Class case [target, value] in [-> { _1 <= NilClass }, _] @@ -367,7 +381,14 @@ class << self # # @return [Symbol, Object] # - def coerce(value) = (value.is_a?(String) ? value.to_sym : value) + def coerce(value) + case value + in Symbol | String if values.include?(val = value.to_sym) + val + else + value + end + end # @!parse # # @private @@ -388,7 +409,7 @@ def try_strict_coerce(value) return [true, value, 1] if values.include?(value) case value - in String if values.include?(val = value.to_sym) + in Symbol | String if values.include?(val = value.to_sym) [true, val, 1] else case [value, values.first] diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index 0444fb0e..f6e598d0 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -22,7 +22,7 @@ def test_basic_coerce end assert_pattern do - OpenAI::Converter.coerce(A2, %w[a b c]) => [:a, :b, :c] + OpenAI::Converter.coerce(A2, %w[a b c]) => [:a, :b, "c"] end end @@ -338,4 +338,18 @@ def test_basic_eql refute_equal(U1, U2) assert_equal(U1, U3) end + + class U4 < OpenAI::Union + variant :a, const: :a + variant :b, const: :b + end + + def test_basic_const_union + assert_pattern do + U4.coerce(nil) => nil + U4.coerce("") => "" + U4.coerce(:a) => :a + U4.coerce("a") => :a + end + end end From bef3f79b52f7d9cb1312ce3b3528abd7d0ecea3a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 12 Mar 2025 23:02:56 +0000 Subject: [PATCH 003/295] chore: remove stale thread local checks (#4) --- lib/openai/pooled_net_requester.rb | 21 ++------------------- 1 file changed, 2 insertions(+), 19 deletions(-) diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb index d15f367e..d1a15ffe 100644 --- a/lib/openai/pooled_net_requester.rb +++ b/lib/openai/pooled_net_requester.rb @@ -85,20 +85,8 @@ def build_request(request, &) # @param url [URI::Generic] # @param blk [Proc] # - private def with_pool(url, &blk) + private def with_pool(url, &) origin = OpenAI::Util.uri_origin(url) - th = Thread.current - key = :"#{object_id}-#{self.class.name}-connection_in_use_for_#{origin}" - - if th[key] - tap do - conn = self.class.connect(url) - return blk.call(conn) - ensure - conn.finish if conn&.started? - end - end - pool = @mutex.synchronize do @pools[origin] ||= ConnectionPool.new(size: @size) do @@ -106,12 +94,7 @@ def build_request(request, &) end end - pool.with do |conn| - th[key] = true - blk.call(conn) - ensure - th[key] = nil - end + pool.with(&) end # @private From fb00050dcc1fb6143cd54e6e3de627da3f2390b8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 15:23:39 +0000 Subject: [PATCH 004/295] chore: refactor BasePage to have initializer (#5) --- lib/openai.rb | 1 + lib/openai/base_page.rb | 21 +++++++++++-------- lib/openai/base_stream.rb | 38 +++++++++++++++++----------------- lib/openai/cursor_page.rb | 6 +----- lib/openai/page.rb | 6 +----- manifest.yaml | 1 + rbi/lib/openai/base_stream.rbi | 30 +++++++++++++-------------- sig/openai/base_stream.rbs | 18 ++++++++-------- 8 files changed, 59 insertions(+), 62 deletions(-) diff --git a/lib/openai.rb b/lib/openai.rb index 2c73d487..77ad926a 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -7,6 +7,7 @@ require "etc" require "json" require "net/http" +require "pathname" require "rbconfig" require "securerandom" require "set" diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index 762b38d7..9f315c7b 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -47,14 +47,17 @@ def to_enum = super(:auto_paging_each) alias_method :enum_for, :to_enum - # @!parse - # # @private - # # - # # @param client [OpenAI::BaseClient] - # # @param req [Hash{Symbol=>Object}] - # # @param headers [Hash{String=>String}, Net::HTTPHeader] - # # @param page_data [Object] - # # - # def initialize(client:, req:, headers:, page_data:); end + # @private + # + # @param client [OpenAI::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Object] + # + def initialize(client:, req:, headers:, page_data:) + @client = client + @req = req + super() + end end end diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index 51e8dee6..082d045d 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -17,22 +17,9 @@ module OpenAI # messages => Array # ``` class BaseStream - # @private - # - # @param model [Class, OpenAI::Converter] - # @param url [URI::Generic] - # @param status [Integer] - # @param response [Net::HTTPResponse] - # @param messages [Enumerable] + # @return [void] # - def initialize(model:, url:, status:, response:, messages:) - @model = model - @url = url - @status = status - @response = response - @messages = messages - @iterator = iterator - end + def close = OpenAI::Util.close_fused!(@iterator) # @private # @@ -40,10 +27,6 @@ def initialize(model:, url:, status:, response:, messages:) # private def iterator = (raise NotImplementedError) - # @return [void] - # - def close = OpenAI::Util.close_fused!(@iterator) - # @param blk [Proc] # # @return [void] @@ -60,5 +43,22 @@ def for_each(&) def to_enum = @iterator alias_method :enum_for, :to_enum + + # @private + # + # @param model [Class, OpenAI::Converter] + # @param url [URI::Generic] + # @param status [Integer] + # @param response [Net::HTTPResponse] + # @param messages [Enumerable] + # + def initialize(model:, url:, status:, response:, messages:) + @model = model + @url = url + @status = status + @response = response + @messages = messages + @iterator = iterator + end end end diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index f9f84dcd..0e74ea9e 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -30,7 +30,6 @@ class CursorPage # @return [Boolean] attr_accessor :has_more - # rubocop:disable Lint/UnusedMethodArgument # @private # # @param client [OpenAI::BaseClient] @@ -39,8 +38,7 @@ class CursorPage # @param page_data [Hash{Symbol=>Object}] # def initialize(client:, req:, headers:, page_data:) - @client = client - @req = req + super model = req.fetch(:model) case page_data @@ -55,10 +53,8 @@ def initialize(client:, req:, headers:, page_data:) else end end - # rubocop:enable Lint/UnusedMethodArgument # @return [Boolean] - # def next_page? has_more end diff --git a/lib/openai/page.rb b/lib/openai/page.rb index 54fc4d39..fa3bd198 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -30,7 +30,6 @@ class Page # @return [String] attr_accessor :object - # rubocop:disable Lint/UnusedMethodArgument # @private # # @param client [OpenAI::BaseClient] @@ -39,8 +38,7 @@ class Page # @param page_data [Array] # def initialize(client:, req:, headers:, page_data:) - @client = client - @req = req + super model = req.fetch(:model) case page_data @@ -55,10 +53,8 @@ def initialize(client:, req:, headers:, page_data:) else end end - # rubocop:enable Lint/UnusedMethodArgument # @return [Boolean] - # def next_page? false end diff --git a/manifest.yaml b/manifest.yaml index e2306edd..fa9c3e5e 100644 --- a/manifest.yaml +++ b/manifest.yaml @@ -5,6 +5,7 @@ dependencies: - etc - json - net/http + - pathname - rbconfig - securerandom - set diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi index edd7627b..f527b849 100644 --- a/rbi/lib/openai/base_stream.rbi +++ b/rbi/lib/openai/base_stream.rbi @@ -4,27 +4,14 @@ module OpenAI class BaseStream Elem = type_member(:out) - sig do - params( - model: T.any(T::Class[T.anything], OpenAI::Converter), - url: URI::Generic, - status: Integer, - response: Net::HTTPResponse, - messages: T::Enumerable[OpenAI::Util::SSEMessage] - ) - .returns(T.attached_class) - end - def self.new(model:, url:, status:, response:, messages:) + sig { void } + def close end sig { overridable.returns(T::Enumerable[Elem]) } private def iterator end - sig { void } - def close - end - sig { params(blk: T.proc.params(arg0: Elem).void).void } def for_each(&blk) end @@ -34,5 +21,18 @@ module OpenAI end alias_method :enum_for, :to_enum + + sig do + params( + model: T.any(T::Class[T.anything], OpenAI::Converter), + url: URI::Generic, + status: Integer, + response: Net::HTTPResponse, + messages: T::Enumerable[OpenAI::Util::SSEMessage] + ) + .returns(T.attached_class) + end + def self.new(model:, url:, status:, response:, messages:) + end end end diff --git a/sig/openai/base_stream.rbs b/sig/openai/base_stream.rbs index e80fcba0..397d46e5 100644 --- a/sig/openai/base_stream.rbs +++ b/sig/openai/base_stream.rbs @@ -1,21 +1,21 @@ module OpenAI class BaseStream[Elem] - def initialize: ( - model: Class | OpenAI::Converter, - url: URI::Generic, - status: Integer, - response: top, - messages: Enumerable[OpenAI::Util::sse_message] - ) -> void + def close: -> void private def iterator: -> Enumerable[Elem] - def close: -> void - def for_each: { (Elem arg0) -> void } -> void def to_enum: -> Enumerable[Elem] alias enum_for to_enum + + def initialize: ( + model: Class | OpenAI::Converter, + url: URI::Generic, + status: Integer, + response: top, + messages: Enumerable[OpenAI::Util::sse_message] + ) -> void end end From 9489ab08a09b369130c6fc0778ef6a4c712f4d9c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 16:32:08 +0000 Subject: [PATCH 005/295] chore: improve rbi typedef for page classes (#6) --- rbi/lib/openai/cursor_page.rbi | 4 ++-- rbi/lib/openai/page.rbi | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi index d7c63bcf..ffbe0d60 100644 --- a/rbi/lib/openai/cursor_page.rbi +++ b/rbi/lib/openai/cursor_page.rbi @@ -29,9 +29,9 @@ module OpenAI headers: T.any(T::Hash[String, String], Net::HTTPHeader), page_data: T::Hash[Symbol, T.anything] ) - .void + .returns(T.attached_class) end - def initialize(client:, req:, headers:, page_data:) + def self.new(client:, req:, headers:, page_data:) end end end diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi index 876486a4..9f33956d 100644 --- a/rbi/lib/openai/page.rbi +++ b/rbi/lib/openai/page.rbi @@ -29,9 +29,9 @@ module OpenAI headers: T.any(T::Hash[String, String], Net::HTTPHeader), page_data: T::Array[T.anything] ) - .void + .returns(T.attached_class) end - def initialize(client:, req:, headers:, page_data:) + def self.new(client:, req:, headers:, page_data:) end end end From df541aadec8dffc1e23734d68fffdf5524ce1a4d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 16:48:55 +0000 Subject: [PATCH 006/295] chore(internal): remove extra empty newlines (#7) --- .github/workflows/ci.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c4d79b2d..de0c9185 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,4 +43,3 @@ jobs: - name: Run tests run: ./scripts/test - From f390a2a876962acbb72273391629261517cfc4ae Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 17:55:23 +0000 Subject: [PATCH 007/295] chore: more accurate generic params for stream classes (#8) --- lib/openai/base_stream.rb | 2 +- lib/openai/stream.rb | 4 +++- rbi/lib/openai/base_client.rbi | 4 ++-- rbi/lib/openai/base_stream.rbi | 9 +++++---- rbi/lib/openai/stream.rbi | 18 +++++++++++++++++- sig/openai/base_stream.rbs | 4 ++-- sig/openai/stream.rbs | 12 +++++++++++- 7 files changed, 41 insertions(+), 12 deletions(-) diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index 082d045d..c2beb4b9 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -16,7 +16,7 @@ module OpenAI # # messages => Array # ``` - class BaseStream + module BaseStream # @return [void] # def close = OpenAI::Util.close_fused!(@iterator) diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index 801b6247..f9319992 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -16,7 +16,9 @@ module OpenAI # # messages => Array # ``` - class Stream < OpenAI::BaseStream + class Stream + include OpenAI::BaseStream + # @private # # @return [Enumerable] diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index ae1f372e..fd80c3c5 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -22,7 +22,7 @@ module OpenAI body: T.nilable(T.anything), unwrap: T.nilable(Symbol), page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::BaseStream[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), model: T.nilable(OpenAI::Converter::Input), options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) } @@ -148,7 +148,7 @@ module OpenAI body: T.nilable(T.anything), unwrap: T.nilable(Symbol), page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::BaseStream[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), model: T.nilable(OpenAI::Converter::Input), options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi index f527b849..8b829bd1 100644 --- a/rbi/lib/openai/base_stream.rbi +++ b/rbi/lib/openai/base_stream.rbi @@ -1,7 +1,8 @@ # typed: strong module OpenAI - class BaseStream + module BaseStream + Message = type_member(:in) Elem = type_member(:out) sig { void } @@ -28,11 +29,11 @@ module OpenAI url: URI::Generic, status: Integer, response: Net::HTTPResponse, - messages: T::Enumerable[OpenAI::Util::SSEMessage] + messages: T::Enumerable[Message] ) - .returns(T.attached_class) + .void end - def self.new(model:, url:, status:, response:, messages:) + def initialize(model:, url:, status:, response:, messages:) end end end diff --git a/rbi/lib/openai/stream.rbi b/rbi/lib/openai/stream.rbi index f4bf6fa9..3dc46e28 100644 --- a/rbi/lib/openai/stream.rbi +++ b/rbi/lib/openai/stream.rbi @@ -1,11 +1,27 @@ # typed: strong module OpenAI - class Stream < OpenAI::BaseStream + class Stream + include OpenAI::BaseStream + + Message = type_member(:in) { {fixed: OpenAI::Util::SSEMessage} } Elem = type_member(:out) sig { override.returns(T::Enumerable[Elem]) } private def iterator end + + sig do + params( + model: T.any(T::Class[T.anything], OpenAI::Converter), + url: URI::Generic, + status: Integer, + response: Net::HTTPResponse, + messages: T::Enumerable[OpenAI::Util::SSEMessage] + ) + .returns(T.attached_class) + end + def self.new(model:, url:, status:, response:, messages:) + end end end diff --git a/sig/openai/base_stream.rbs b/sig/openai/base_stream.rbs index 397d46e5..e5d9ec89 100644 --- a/sig/openai/base_stream.rbs +++ b/sig/openai/base_stream.rbs @@ -1,5 +1,5 @@ module OpenAI - class BaseStream[Elem] + module BaseStream[Message, Elem] def close: -> void private def iterator: -> Enumerable[Elem] @@ -15,7 +15,7 @@ module OpenAI url: URI::Generic, status: Integer, response: top, - messages: Enumerable[OpenAI::Util::sse_message] + messages: Enumerable[Message] ) -> void end end diff --git a/sig/openai/stream.rbs b/sig/openai/stream.rbs index 78d58b92..675ecb74 100644 --- a/sig/openai/stream.rbs +++ b/sig/openai/stream.rbs @@ -1,5 +1,15 @@ module OpenAI - class Stream[Elem] < OpenAI::BaseStream[Elem] + class Stream[Elem] + include OpenAI::BaseStream[OpenAI::Util::sse_message, Elem] + private def iterator: -> Enumerable[Elem] + + def initialize: ( + model: Class | OpenAI::Converter, + url: URI::Generic, + status: Integer, + response: top, + messages: Enumerable[OpenAI::Util::sse_message] + ) -> void end end From 5fdfcd1f92f5bd3ee7d472d524391b6cd71a0ef7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 18:14:20 +0000 Subject: [PATCH 008/295] chore: sdk internal updates (#9) --- lib/openai/util.rb | 7 +++++-- rbi/lib/openai/cursor_page.rbi | 12 ------------ rbi/lib/openai/page.rbi | 12 ------------ rbi/lib/openai/stream.rbi | 15 +-------------- rbi/lib/openai/util.rbi | 4 ++-- sig/openai/cursor_page.rbs | 7 ------- sig/openai/page.rbs | 7 ------- sig/openai/stream.rbs | 10 +--------- sig/openai/util.rbs | 4 ++-- 9 files changed, 11 insertions(+), 67 deletions(-) diff --git a/lib/openai/util.rb b/lib/openai/util.rb index 10368046..b59d893f 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -557,7 +557,7 @@ class << self def encode_content(headers, body) content_type = headers["content-type"] case [content_type, body] - in ["application/json", Hash | Array] + in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array] [headers, JSON.fast_generate(body)] in [%r{^multipart/form-data}, Hash | IO | StringIO] boundary, strio = encode_multipart_streaming(body) @@ -593,7 +593,10 @@ def decode_content(headers, stream:, suppress_error: false) lines = decode_lines(stream) decode_sse(lines) in %r{^application/(?:x-)?jsonl} - decode_lines(stream) + lines = decode_lines(stream) + chain_fused(lines) do |y| + lines.each { y << JSON.parse(_1, symbolize_names: true) } + end in %r{^text/} stream.to_a.join else diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi index ffbe0d60..a489a9ff 100644 --- a/rbi/lib/openai/cursor_page.rbi +++ b/rbi/lib/openai/cursor_page.rbi @@ -21,17 +21,5 @@ module OpenAI sig { params(_: T::Boolean).returns(T::Boolean) } def has_more=(_) end - - sig do - params( - client: OpenAI::BaseClient, - req: OpenAI::BaseClient::RequestComponentsShape, - headers: T.any(T::Hash[String, String], Net::HTTPHeader), - page_data: T::Hash[Symbol, T.anything] - ) - .returns(T.attached_class) - end - def self.new(client:, req:, headers:, page_data:) - end end end diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi index 9f33956d..3b8ff55d 100644 --- a/rbi/lib/openai/page.rbi +++ b/rbi/lib/openai/page.rbi @@ -21,17 +21,5 @@ module OpenAI sig { params(_: String).returns(String) } def object=(_) end - - sig do - params( - client: OpenAI::BaseClient, - req: OpenAI::BaseClient::RequestComponentsShape, - headers: T.any(T::Hash[String, String], Net::HTTPHeader), - page_data: T::Array[T.anything] - ) - .returns(T.attached_class) - end - def self.new(client:, req:, headers:, page_data:) - end end end diff --git a/rbi/lib/openai/stream.rbi b/rbi/lib/openai/stream.rbi index 3dc46e28..01f98197 100644 --- a/rbi/lib/openai/stream.rbi +++ b/rbi/lib/openai/stream.rbi @@ -4,24 +4,11 @@ module OpenAI class Stream include OpenAI::BaseStream - Message = type_member(:in) { {fixed: OpenAI::Util::SSEMessage} } + Message = type_member(:in) { {fixed: OpenAI::Util::ServerSentEvent} } Elem = type_member(:out) sig { override.returns(T::Enumerable[Elem]) } private def iterator end - - sig do - params( - model: T.any(T::Class[T.anything], OpenAI::Converter), - url: URI::Generic, - status: Integer, - response: Net::HTTPResponse, - messages: T::Enumerable[OpenAI::Util::SSEMessage] - ) - .returns(T.attached_class) - end - def self.new(model:, url:, status:, response:, messages:) - end end end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 246a7672..0888f1da 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -205,7 +205,7 @@ module OpenAI end end - SSEMessage = T.type_alias do + ServerSentEvent = T.type_alias do {event: T.nilable(String), data: T.nilable(String), id: T.nilable(String), retry: T.nilable(Integer)} end @@ -214,7 +214,7 @@ module OpenAI def decode_lines(enum) end - sig { params(lines: T::Enumerable[String]).returns(OpenAI::Util::SSEMessage) } + sig { params(lines: T::Enumerable[String]).returns(OpenAI::Util::ServerSentEvent) } def decode_sse(lines) end end diff --git a/sig/openai/cursor_page.rbs b/sig/openai/cursor_page.rbs index d7f7e58b..3d313782 100644 --- a/sig/openai/cursor_page.rbs +++ b/sig/openai/cursor_page.rbs @@ -5,12 +5,5 @@ module OpenAI attr_accessor data: ::Array[Elem] attr_accessor has_more: bool - - def initialize: ( - client: OpenAI::BaseClient, - req: OpenAI::BaseClient::request_components, - headers: ::Hash[String, String], - page_data: ::Hash[Symbol, top] - ) -> void end end diff --git a/sig/openai/page.rbs b/sig/openai/page.rbs index 94bcf79a..37846820 100644 --- a/sig/openai/page.rbs +++ b/sig/openai/page.rbs @@ -5,12 +5,5 @@ module OpenAI attr_accessor data: ::Array[Elem] attr_accessor object: String - - def initialize: ( - client: OpenAI::BaseClient, - req: OpenAI::BaseClient::request_components, - headers: ::Hash[String, String], - page_data: ::Array[top] - ) -> void end end diff --git a/sig/openai/stream.rbs b/sig/openai/stream.rbs index 675ecb74..7474463b 100644 --- a/sig/openai/stream.rbs +++ b/sig/openai/stream.rbs @@ -1,15 +1,7 @@ module OpenAI class Stream[Elem] - include OpenAI::BaseStream[OpenAI::Util::sse_message, Elem] + include OpenAI::BaseStream[OpenAI::Util::server_sent_event, Elem] private def iterator: -> Enumerable[Elem] - - def initialize: ( - model: Class | OpenAI::Converter, - url: URI::Generic, - status: Integer, - response: top, - messages: Enumerable[OpenAI::Util::sse_message] - ) -> void end end diff --git a/sig/openai/util.rbs b/sig/openai/util.rbs index 2ee3b4df..065ab7d1 100644 --- a/sig/openai/util.rbs +++ b/sig/openai/util.rbs @@ -120,13 +120,13 @@ module OpenAI (Enumerator::Yielder arg0) -> void } -> void - type sse_message = + type server_sent_event = { event: String?, data: String?, id: String?, retry: Integer? } def self?.decode_lines: (Enumerable[String] enum) -> Enumerable[String] def self?.decode_sse: ( Enumerable[String] lines - ) -> OpenAI::Util::sse_message + ) -> OpenAI::Util::server_sent_event end end From 023492cf78fabe7e8eed2bb00503f1d3ace38163 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 13 Mar 2025 19:32:43 +0000 Subject: [PATCH 009/295] feat: support jsonl uploads (#10) --- lib/openai/util.rb | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/openai/util.rb b/lib/openai/util.rb index b59d893f..2546262b 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -559,6 +559,8 @@ def encode_content(headers, body) case [content_type, body] in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array] [headers, JSON.fast_generate(body)] + in [%r{^application/(?:x-)?jsonl}, Enumerable] + [headers, body.lazy.map { JSON.fast_generate(_1) }] in [%r{^multipart/form-data}, Hash | IO | StringIO] boundary, strio = encode_multipart_streaming(body) headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} @@ -589,14 +591,14 @@ def decode_content(headers, stream:, suppress_error: false) raise e unless suppress_error json end - in %r{^text/event-stream} - lines = decode_lines(stream) - decode_sse(lines) in %r{^application/(?:x-)?jsonl} lines = decode_lines(stream) chain_fused(lines) do |y| lines.each { y << JSON.parse(_1, symbolize_names: true) } end + in %r{^text/event-stream} + lines = decode_lines(stream) + decode_sse(lines) in %r{^text/} stream.to_a.join else From 6532997808870f7355bc3eb5b3232286671b6c8a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 03:19:02 +0000 Subject: [PATCH 010/295] chore: add most doc strings to rbi type definitions (#12) --- .yardopts | 2 + lib/openai/base_client.rb | 35 +-- lib/openai/base_model.rb | 173 ++++--------- lib/openai/base_page.rb | 9 +- lib/openai/base_stream.rb | 11 +- lib/openai/client.rb | 4 +- lib/openai/cursor_page.rb | 6 +- lib/openai/errors.rb | 15 +- lib/openai/extern.rb | 3 +- lib/openai/models/audio/speech_model.rb | 1 - .../audio/transcription_create_params.rb | 1 - .../audio/translation_create_response.rb | 1 - lib/openai/models/audio_model.rb | 1 - lib/openai/models/beta/assistant_tool.rb | 1 - .../beta/thread_create_and_run_params.rb | 2 - .../models/beta/thread_create_params.rb | 1 - lib/openai/models/beta/threads/message.rb | 1 - .../beta/threads/message_create_params.rb | 1 - .../models/beta/threads/run_create_params.rb | 1 - .../beta/threads/runs/run_step_include.rb | 1 - ...chat_completion_assistant_message_param.rb | 1 - .../models/chat/chat_completion_chunk.rb | 1 - .../chat_completion_function_message_param.rb | 1 - .../models/chat/chat_completion_message.rb | 1 - .../models/chat/chat_completion_modality.rb | 1 - .../models/chat/completion_create_params.rb | 2 - lib/openai/models/chat_model.rb | 1 - lib/openai/models/embedding_model.rb | 1 - .../models/fine_tuning/job_create_params.rb | 1 - lib/openai/models/image_model.rb | 1 - lib/openai/models/moderation.rb | 13 - lib/openai/models/moderation_model.rb | 1 - .../response_file_search_tool_call.rb | 1 - .../models/vector_store_search_params.rb | 1 - .../models/vector_store_search_response.rb | 1 - .../vector_stores/file_batch_create_params.rb | 1 - .../vector_stores/file_create_params.rb | 1 - .../vector_stores/file_update_params.rb | 1 - .../models/vector_stores/vector_store_file.rb | 1 - lib/openai/page.rb | 6 +- lib/openai/pooled_net_requester.rb | 21 +- lib/openai/request_options.rb | 13 +- lib/openai/resources/audio.rb | 1 - lib/openai/resources/audio/speech.rb | 2 - lib/openai/resources/audio/transcriptions.rb | 2 - lib/openai/resources/audio/translations.rb | 2 - lib/openai/resources/batches.rb | 5 - lib/openai/resources/beta.rb | 1 - lib/openai/resources/beta/assistants.rb | 6 - lib/openai/resources/beta/threads.rb | 7 - lib/openai/resources/beta/threads/messages.rb | 6 - lib/openai/resources/beta/threads/runs.rb | 9 - .../resources/beta/threads/runs/steps.rb | 3 - lib/openai/resources/chat.rb | 1 - lib/openai/resources/chat/completions.rb | 7 - .../resources/chat/completions/messages.rb | 2 - lib/openai/resources/completions.rb | 3 - lib/openai/resources/embeddings.rb | 2 - lib/openai/resources/files.rb | 6 - lib/openai/resources/fine_tuning.rb | 1 - lib/openai/resources/fine_tuning/jobs.rb | 6 - .../resources/fine_tuning/jobs/checkpoints.rb | 2 - lib/openai/resources/images.rb | 4 - lib/openai/resources/models.rb | 4 - lib/openai/resources/moderations.rb | 2 - lib/openai/resources/responses.rb | 5 - lib/openai/resources/responses/input_items.rb | 2 - lib/openai/resources/uploads.rb | 4 - lib/openai/resources/uploads/parts.rb | 2 - lib/openai/resources/vector_stores.rb | 7 - .../resources/vector_stores/file_batches.rb | 5 - lib/openai/resources/vector_stores/files.rb | 7 - lib/openai/stream.rb | 5 +- lib/openai/util.rb | 102 +++----- rbi/lib/openai/base_client.rbi | 11 + rbi/lib/openai/base_model.rbi | 106 ++++++++ rbi/lib/openai/base_page.rbi | 2 + rbi/lib/openai/base_stream.rbi | 3 + rbi/lib/openai/client.rbi | 2 + rbi/lib/openai/errors.rbi | 5 + rbi/lib/openai/extern.rbi | 1 + .../models/audio/speech_create_params.rbi | 20 ++ rbi/lib/openai/models/audio/transcription.rbi | 3 + .../audio/transcription_create_params.rbi | 26 ++ .../audio/transcription_create_response.rbi | 3 + .../models/audio/transcription_segment.rbi | 13 + .../models/audio/transcription_verbose.rbi | 7 + .../models/audio/transcription_word.rbi | 3 + .../audio/translation_create_params.rbi | 18 ++ .../audio/translation_create_response.rbi | 1 + .../models/audio/translation_verbose.rbi | 4 + .../openai/models/audio_response_format.rbi | 2 + .../auto_file_chunking_strategy_param.rbi | 3 + rbi/lib/openai/models/batch.rbi | 25 ++ rbi/lib/openai/models/batch_create_params.rbi | 27 ++ rbi/lib/openai/models/batch_error.rbi | 4 + rbi/lib/openai/models/batch_list_params.rbi | 6 + .../openai/models/batch_request_counts.rbi | 4 + rbi/lib/openai/models/beta/assistant.rbi | 65 +++++ .../models/beta/assistant_create_params.rbi | 100 ++++++++ .../models/beta/assistant_list_params.rbi | 14 ++ .../beta/assistant_response_format_option.rbi | 21 ++ .../models/beta/assistant_stream_event.rbi | 124 ++++++++++ rbi/lib/openai/models/beta/assistant_tool.rbi | 1 + .../models/beta/assistant_tool_choice.rbi | 4 + .../beta/assistant_tool_choice_function.rbi | 1 + .../beta/assistant_tool_choice_option.rbi | 12 + .../models/beta/assistant_update_params.rbi | 79 ++++++ .../models/beta/code_interpreter_tool.rbi | 1 + .../openai/models/beta/file_search_tool.rbi | 29 +++ rbi/lib/openai/models/beta/function_tool.rbi | 1 + .../models/beta/message_stream_event.rbi | 29 +++ .../models/beta/run_step_stream_event.rbi | 33 +++ .../openai/models/beta/run_stream_event.rbi | 43 ++++ rbi/lib/openai/models/beta/thread.rbi | 26 ++ .../beta/thread_create_and_run_params.rbi | 180 ++++++++++++++ .../models/beta/thread_create_params.rbi | 76 ++++++ .../models/beta/thread_stream_event.rbi | 6 + .../models/beta/thread_update_params.rbi | 21 ++ .../openai/models/beta/threads/annotation.rbi | 4 + .../models/beta/threads/annotation_delta.rbi | 4 + .../beta/threads/file_citation_annotation.rbi | 6 + .../file_citation_delta_annotation.rbi | 8 + .../beta/threads/file_path_annotation.rbi | 5 + .../threads/file_path_delta_annotation.rbi | 6 + .../openai/models/beta/threads/image_file.rbi | 7 + .../beta/threads/image_file_content_block.rbi | 3 + .../models/beta/threads/image_file_delta.rbi | 7 + .../beta/threads/image_file_delta_block.rbi | 4 + .../openai/models/beta/threads/image_url.rbi | 6 + .../beta/threads/image_url_content_block.rbi | 2 + .../models/beta/threads/image_url_delta.rbi | 6 + .../beta/threads/image_url_delta_block.rbi | 3 + .../openai/models/beta/threads/message.rbi | 37 +++ .../models/beta/threads/message_content.rbi | 3 + .../beta/threads/message_content_delta.rbi | 3 + .../threads/message_content_part_param.rbi | 3 + .../beta/threads/message_create_params.rbi | 26 ++ .../models/beta/threads/message_delta.rbi | 4 + .../beta/threads/message_delta_event.rbi | 5 + .../beta/threads/message_list_params.rbi | 15 ++ .../beta/threads/message_update_params.rbi | 6 + .../beta/threads/refusal_content_block.rbi | 2 + .../beta/threads/refusal_delta_block.rbi | 3 + .../required_action_function_tool_call.rbi | 11 + rbi/lib/openai/models/beta/threads/run.rbi | 111 +++++++++ .../models/beta/threads/run_create_params.rbi | 128 ++++++++++ .../models/beta/threads/run_list_params.rbi | 14 ++ .../openai/models/beta/threads/run_status.rbi | 3 + .../run_submit_tool_outputs_params.rbi | 4 + .../models/beta/threads/run_update_params.rbi | 6 + .../threads/runs/code_interpreter_logs.rbi | 4 + .../runs/code_interpreter_output_image.rbi | 4 + .../runs/code_interpreter_tool_call.rbi | 18 ++ .../runs/code_interpreter_tool_call_delta.rbi | 13 + .../threads/runs/file_search_tool_call.rbi | 24 ++ .../runs/file_search_tool_call_delta.rbi | 5 + .../beta/threads/runs/function_tool_call.rbi | 10 + .../threads/runs/function_tool_call_delta.rbi | 11 + .../runs/message_creation_step_details.rbi | 3 + .../models/beta/threads/runs/run_step.rbi | 45 ++++ .../beta/threads/runs/run_step_delta.rbi | 4 + .../threads/runs/run_step_delta_event.rbi | 5 + .../runs/run_step_delta_message_delta.rbi | 3 + .../beta/threads/runs/step_list_params.rbi | 21 ++ .../threads/runs/step_retrieve_params.rbi | 7 + .../models/beta/threads/runs/tool_call.rbi | 2 + .../beta/threads/runs/tool_call_delta.rbi | 2 + .../threads/runs/tool_call_delta_object.rbi | 5 + .../threads/runs/tool_calls_step_details.rbi | 5 + rbi/lib/openai/models/beta/threads/text.rbi | 1 + .../beta/threads/text_content_block.rbi | 2 + .../beta/threads/text_content_block_param.rbi | 3 + .../openai/models/beta/threads/text_delta.rbi | 1 + .../models/beta/threads/text_delta_block.rbi | 3 + .../openai/models/chat/chat_completion.rbi | 33 +++ ...hat_completion_assistant_message_param.rbi | 28 +++ .../models/chat/chat_completion_audio.rbi | 9 + .../chat/chat_completion_audio_param.rbi | 11 + .../models/chat/chat_completion_chunk.rbi | 60 +++++ .../chat/chat_completion_content_part.rbi | 10 + .../chat_completion_content_part_image.rbi | 7 + ...at_completion_content_part_input_audio.rbi | 5 + .../chat_completion_content_part_refusal.rbi | 2 + .../chat_completion_content_part_text.rbi | 4 + .../models/chat/chat_completion_deleted.rbi | 3 + ...hat_completion_developer_message_param.rbi | 9 + .../chat_completion_function_call_option.rbi | 3 + ...chat_completion_function_message_param.rbi | 3 + .../models/chat/chat_completion_message.rbi | 27 ++ .../chat/chat_completion_message_param.rbi | 4 + .../chat_completion_message_tool_call.rbi | 9 + .../chat_completion_named_tool_choice.rbi | 4 + .../chat_completion_prediction_content.rbi | 11 + .../models/chat/chat_completion_role.rbi | 1 + .../chat/chat_completion_store_message.rbi | 2 + .../chat/chat_completion_stream_options.rbi | 5 + .../chat_completion_system_message_param.rbi | 9 + .../chat/chat_completion_token_logprob.rbi | 19 ++ .../models/chat/chat_completion_tool.rbi | 1 + .../chat_completion_tool_choice_option.rbi | 13 + .../chat_completion_tool_message_param.rbi | 5 + .../chat_completion_user_message_param.rbi | 8 + .../models/chat/completion_create_params.rbi | 234 ++++++++++++++++++ .../models/chat/completion_list_params.rbi | 10 + .../models/chat/completion_update_params.rbi | 6 + .../chat/completions/message_list_params.rbi | 6 + rbi/lib/openai/models/comparison_filter.rbi | 24 ++ rbi/lib/openai/models/completion.rbi | 12 + rbi/lib/openai/models/completion_choice.rbi | 8 + .../models/completion_create_params.rbi | 107 ++++++++ rbi/lib/openai/models/completion_usage.rbi | 18 ++ rbi/lib/openai/models/compound_filter.rbi | 8 + .../models/create_embedding_response.rbi | 7 + rbi/lib/openai/models/embedding.rbi | 6 + .../openai/models/embedding_create_params.rbi | 37 +++ .../openai/models/file_chunking_strategy.rbi | 2 + .../models/file_chunking_strategy_param.rbi | 3 + rbi/lib/openai/models/file_create_params.rbi | 5 + rbi/lib/openai/models/file_list_params.rbi | 11 + rbi/lib/openai/models/file_object.rbi | 19 ++ rbi/lib/openai/models/file_purpose.rbi | 4 + .../models/fine_tuning/fine_tuning_job.rbi | 109 ++++++++ .../fine_tuning/fine_tuning_job_event.rbi | 10 + .../fine_tuning_job_wandb_integration.rbi | 13 + ...ne_tuning_job_wandb_integration_object.rbi | 5 + .../models/fine_tuning/job_create_params.rbi | 136 ++++++++++ .../fine_tuning/job_list_events_params.rbi | 2 + .../models/fine_tuning/job_list_params.rbi | 4 + .../jobs/checkpoint_list_params.rbi | 2 + .../jobs/fine_tuning_job_checkpoint.rbi | 10 + rbi/lib/openai/models/function_definition.rbi | 16 ++ rbi/lib/openai/models/image.rbi | 6 + .../models/image_create_variation_params.rbi | 22 ++ rbi/lib/openai/models/image_edit_params.rbi | 26 ++ .../openai/models/image_generate_params.rbi | 36 +++ rbi/lib/openai/models/model.rbi | 5 + rbi/lib/openai/models/moderation.rbi | 63 +++++ .../models/moderation_create_params.rbi | 14 ++ .../models/moderation_create_response.rbi | 4 + .../models/moderation_image_url_input.rbi | 5 + .../models/moderation_multi_modal_input.rbi | 2 + .../openai/models/moderation_text_input.rbi | 3 + .../other_file_chunking_strategy_object.rbi | 4 + rbi/lib/openai/models/reasoning.rbi | 20 ++ rbi/lib/openai/models/reasoning_effort.rbi | 6 + .../models/response_format_json_object.rbi | 4 + .../models/response_format_json_schema.rbi | 17 ++ .../openai/models/response_format_text.rbi | 2 + .../openai/models/responses/computer_tool.rbi | 7 + .../models/responses/easy_input_message.rbi | 16 ++ .../models/responses/file_search_tool.rbi | 17 ++ .../openai/models/responses/function_tool.rbi | 9 + .../responses/input_item_list_params.rbi | 12 + rbi/lib/openai/models/responses/response.rbi | 105 ++++++++ .../responses/response_audio_delta_event.rbi | 3 + .../responses/response_audio_done_event.rbi | 2 + .../response_audio_transcript_delta_event.rbi | 3 + .../response_audio_transcript_done_event.rbi | 2 + ...code_interpreter_call_code_delta_event.rbi | 4 + ..._code_interpreter_call_code_done_event.rbi | 4 + ..._code_interpreter_call_completed_event.rbi | 4 + ...ode_interpreter_call_in_progress_event.rbi | 4 + ...de_interpreter_call_interpreting_event.rbi | 4 + .../response_code_interpreter_tool_call.rbi | 16 ++ .../responses/response_completed_event.rbi | 3 + .../responses/response_computer_tool_call.rbi | 75 ++++++ .../models/responses/response_content.rbi | 2 + .../response_content_part_added_event.rbi | 8 + .../response_content_part_done_event.rbi | 8 + .../responses/response_create_params.rbi | 114 +++++++++ .../responses/response_created_event.rbi | 3 + .../models/responses/response_error.rbi | 4 + .../models/responses/response_error_event.rbi | 5 + .../responses/response_failed_event.rbi | 3 + ...ponse_file_search_call_completed_event.rbi | 4 + ...nse_file_search_call_in_progress_event.rbi | 4 + ...ponse_file_search_call_searching_event.rbi | 4 + .../response_file_search_tool_call.rbi | 21 ++ .../responses/response_format_text_config.rbi | 14 ++ ...esponse_format_text_json_schema_config.rbi | 15 ++ ...se_function_call_arguments_delta_event.rbi | 5 + ...nse_function_call_arguments_done_event.rbi | 4 + .../responses/response_function_tool_call.rbi | 12 + .../response_function_web_search.rbi | 7 + .../responses/response_in_progress_event.rbi | 3 + .../models/responses/response_includable.rbi | 8 + .../responses/response_incomplete_event.rbi | 3 + .../models/responses/response_input_audio.rbi | 5 + .../responses/response_input_content.rbi | 2 + .../models/responses/response_input_file.rbi | 5 + .../models/responses/response_input_image.rbi | 10 + .../models/responses/response_input_item.rbi | 52 ++++ .../models/responses/response_input_text.rbi | 3 + .../models/responses/response_item_list.rbi | 46 ++++ .../responses/response_output_audio.rbi | 4 + .../models/responses/response_output_item.rbi | 2 + .../response_output_item_added_event.rbi | 4 + .../response_output_item_done_event.rbi | 4 + .../responses/response_output_message.rbi | 11 + .../responses/response_output_refusal.rbi | 3 + .../models/responses/response_output_text.rbi | 20 ++ .../responses/response_reasoning_item.rbi | 11 + .../response_refusal_delta_event.rbi | 6 + .../responses/response_refusal_done_event.rbi | 6 + .../responses/response_retrieve_params.rbi | 2 + .../models/responses/response_status.rbi | 2 + .../responses/response_stream_event.rbi | 2 + .../response_text_annotation_delta_event.rbi | 23 ++ .../models/responses/response_text_config.rbi | 18 ++ .../responses/response_text_delta_event.rbi | 6 + .../responses/response_text_done_event.rbi | 6 + .../models/responses/response_usage.rbi | 8 + ...sponse_web_search_call_completed_event.rbi | 4 + ...onse_web_search_call_in_progress_event.rbi | 4 + ...sponse_web_search_call_searching_event.rbi | 4 + rbi/lib/openai/models/responses/tool.rbi | 4 + .../models/responses/tool_choice_function.rbi | 3 + .../models/responses/tool_choice_options.rbi | 8 + .../models/responses/tool_choice_types.rbi | 18 ++ .../models/responses/web_search_tool.rbi | 22 ++ .../models/static_file_chunking_strategy.rbi | 5 + .../static_file_chunking_strategy_object.rbi | 1 + ...ic_file_chunking_strategy_object_param.rbi | 2 + rbi/lib/openai/models/upload.rbi | 13 + .../openai/models/upload_complete_params.rbi | 3 + .../openai/models/upload_create_params.rbi | 10 + .../models/uploads/part_create_params.rbi | 1 + rbi/lib/openai/models/uploads/upload_part.rbi | 5 + rbi/lib/openai/models/vector_store.rbi | 31 +++ .../models/vector_store_create_params.rbi | 17 ++ .../models/vector_store_list_params.rbi | 14 ++ .../models/vector_store_search_params.rbi | 11 + .../models/vector_store_search_response.rbi | 13 + .../models/vector_store_update_params.rbi | 12 + .../file_batch_create_params.rbi | 11 + .../file_batch_list_files_params.rbi | 16 ++ .../vector_stores/file_content_response.rbi | 2 + .../vector_stores/file_create_params.rbi | 11 + .../models/vector_stores/file_list_params.rbi | 16 ++ .../vector_stores/file_update_params.rbi | 6 + .../vector_stores/vector_store_file.rbi | 30 +++ .../vector_stores/vector_store_file_batch.rbi | 18 ++ rbi/lib/openai/pooled_net_requester.rbi | 6 + rbi/lib/openai/request_options.rbi | 21 ++ rbi/lib/openai/resources/audio/speech.rbi | 1 + .../openai/resources/audio/transcriptions.rbi | 1 + .../openai/resources/audio/translations.rbi | 1 + rbi/lib/openai/resources/batches.rbi | 6 + rbi/lib/openai/resources/beta/assistants.rbi | 5 + rbi/lib/openai/resources/beta/threads.rbi | 6 + .../resources/beta/threads/messages.rbi | 5 + .../openai/resources/beta/threads/runs.rbi | 14 ++ .../resources/beta/threads/runs/steps.rbi | 2 + rbi/lib/openai/resources/chat/completions.rbi | 43 ++++ .../resources/chat/completions/messages.rbi | 2 + rbi/lib/openai/resources/completions.rbi | 2 + rbi/lib/openai/resources/embeddings.rbi | 1 + rbi/lib/openai/resources/files.rbi | 25 ++ rbi/lib/openai/resources/fine_tuning/jobs.rbi | 13 + .../fine_tuning/jobs/checkpoints.rbi | 1 + rbi/lib/openai/resources/images.rbi | 3 + rbi/lib/openai/resources/models.rbi | 6 + rbi/lib/openai/resources/moderations.rbi | 2 + rbi/lib/openai/resources/responses.rbi | 24 ++ .../resources/responses/input_items.rbi | 1 + rbi/lib/openai/resources/uploads.rbi | 33 +++ rbi/lib/openai/resources/uploads/parts.rbi | 11 + rbi/lib/openai/resources/vector_stores.rbi | 7 + .../resources/vector_stores/file_batches.rbi | 5 + .../openai/resources/vector_stores/files.rbi | 11 + rbi/lib/openai/stream.rbi | 1 + rbi/lib/openai/util.rbi | 41 +++ sig/openai/base_client.rbs | 2 +- test/openai/client_test.rb | 5 - 375 files changed, 4744 insertions(+), 444 deletions(-) diff --git a/.yardopts b/.yardopts index 29c933bc..c7c3301d 100644 --- a/.yardopts +++ b/.yardopts @@ -1 +1,3 @@ --markup markdown +--exclude /rbi +--exclude /sig diff --git a/lib/openai/base_client.rb b/lib/openai/base_client.rb index 9707f135..5c609ac7 100644 --- a/lib/openai/base_client.rb +++ b/lib/openai/base_client.rb @@ -1,10 +1,9 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @abstract - # class BaseClient # from whatwg fetch spec MAX_REDIRECTS = 20 @@ -21,12 +20,11 @@ class BaseClient # rubocop:enable Style/MutableConstant class << self - # @private + # @api private # # @param req [Hash{Symbol=>Object}] # # @raise [ArgumentError] - # def validate!(req) keys = [:method, :path, :query, :headers, :body, :unwrap, :page, :stream, :model, :options] case req @@ -41,13 +39,12 @@ def validate!(req) end end - # @private + # @api private # # @param status [Integer] # @param headers [Hash{String=>String}, Net::HTTPHeader] # # @return [Boolean] - # def should_retry?(status, headers:) coerced = OpenAI::Util.coerce_boolean(headers["x-should-retry"]) case [coerced, status] @@ -65,7 +62,7 @@ def should_retry?(status, headers:) end end - # @private + # @api private # # @param request [Hash{Symbol=>Object}] . # @@ -86,7 +83,6 @@ def should_retry?(status, headers:) # @param response_headers [Hash{String=>String}, Net::HTTPHeader] # # @return [Hash{Symbol=>Object}] - # def follow_redirect(request, status:, response_headers:) method, url, headers = request.fetch_values(:method, :url, :headers) location = @@ -130,12 +126,11 @@ def follow_redirect(request, status:, response_headers:) end end - # @private - # + # @api private # @return [OpenAI::PooledNetRequester] attr_accessor :requester - # @private + # @api private # # @param base_url [String] # @param timeout [Float] @@ -144,7 +139,6 @@ def follow_redirect(request, status:, response_headers:) # @param max_retry_delay [Float] # @param headers [Hash{String=>String, Integer, Array, nil}] # @param idempotency_header [String, nil] - # def initialize( base_url:, timeout: 0.0, @@ -171,19 +165,17 @@ def initialize( @max_retry_delay = max_retry_delay end - # @private + # @api private # # @return [Hash{String=>String}] - # private def auth_headers = {} - # @private + # @api private # # @return [String] - # private def generate_idempotency_key = "stainless-ruby-retry-#{SecureRandom.uuid}" - # @private + # @api private # # @param req [Hash{Symbol=>Object}] . # @@ -220,7 +212,6 @@ def initialize( # @option opts [Float, nil] :timeout # # @return [Hash{Symbol=>Object}] - # private def build_request(req, opts) method, uninterpolated_path = req.fetch_values(:method, :path) @@ -271,13 +262,12 @@ def initialize( } end - # @private + # @api private # # @param headers [Hash{String=>String}] # @param retry_count [Integer] # # @return [Float] - # private def retry_delay(headers, retry_count:) # Non-standard extension span = Float(headers["retry-after-ms"], exception: false)&.then { _1 / 1000 } @@ -298,7 +288,7 @@ def initialize( (@initial_retry_delay * scale * jitter).clamp(0, @max_retry_delay) end - # @private + # @api private # # @param request [Hash{Symbol=>Object}] . # @@ -322,7 +312,6 @@ def initialize( # # @raise [OpenAI::APIError] # @return [Array(Integer, Net::HTTPResponse, Enumerable)] - # private def send_request(request, redirect_count:, retry_count:, send_retry_header:) url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) input = {**request.except(:timeout), deadline: OpenAI::Util.monotonic_secs + timeout} @@ -424,7 +413,6 @@ def initialize( # # @raise [OpenAI::APIError] # @return [Object] - # def request(req) self.class.validate!(req) model = req.fetch(:model) { OpenAI::Unknown } @@ -455,7 +443,6 @@ def request(req) end # @return [String] - # def inspect # rubocop:disable Layout/LineLength base_url = OpenAI::Util.unparse_uri(@base_url) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 798e49ca..23f83864 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -1,41 +1,37 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @abstract - # module Converter # rubocop:disable Lint/UnusedMethodArgument - # @private + # @api private # # @param value [Object] # # @return [Object] - # def coerce(value) = value - # @private + # @api private # # @param value [Object] # # @return [Object] - # def dump(value) = value - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) = (raise NotImplementedError) # rubocop:enable Lint/UnusedMethodArgument class << self - # @private + # @api private # # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . # @@ -48,7 +44,6 @@ class << self # @option spec [Boolean] :"nil?" # # @return [Proc] - # def type_info(spec) case spec in Hash @@ -64,7 +59,7 @@ def type_info(spec) end end - # @private + # @api private # # Based on `target`, transform `value` into `target`, to the extent possible: # @@ -77,7 +72,6 @@ def type_info(spec) # @param value [Object] # # @return [Object] - # def coerce(target, value) case target in OpenAI::Converter @@ -111,13 +105,12 @@ def coerce(target, value) end end - # @private + # @api private # # @param target [OpenAI::Converter, Class] # @param value [Object] # # @return [Object] - # def dump(target, value) case target in OpenAI::Converter @@ -127,7 +120,7 @@ def dump(target, value) end end - # @private + # @api private # # The underlying algorithm for computing maximal compatibility is subject to # future improvements. @@ -142,7 +135,6 @@ def dump(target, value) # @param value [Object] # # @return [Object] - # def try_strict_coerce(target, value) case target in OpenAI::Converter @@ -182,7 +174,7 @@ def try_strict_coerce(target, value) end end - # @private + # @api private # # @abstract # @@ -197,40 +189,35 @@ class Unknown # @param other [Object] # # @return [Boolean] - # def self.===(other) = true # @param other [Object] # # @return [Boolean] - # def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown class << self # @!parse - # # @private + # # @api private # # # # @param value [Object] # # # # @return [Object] - # # # def coerce(value) = super # @!parse - # # @private + # # @api private # # # # @param value [Object] # # # # @return [Object] - # # # def dump(value) = super - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) # prevent unknown variant from being chosen during the first coercion pass [false, true, 0] @@ -240,7 +227,7 @@ def try_strict_coerce(value) # rubocop:enable Lint/UnusedMethodArgument end - # @private + # @api private # # @abstract # @@ -253,40 +240,35 @@ class BooleanModel # @param other [Object] # # @return [Boolean] - # def self.===(other) = other == true || other == false # @param other [Object] # # @return [Boolean] - # def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel class << self # @!parse - # # @private + # # @api private # # # # @param value [Boolean, Object] # # # # @return [Boolean, Object] - # # # def coerce(value) = super # @!parse - # # @private + # # @api private # # # # @param value [Boolean, Object] # # # # @return [Boolean, Object] - # # # def dump(value) = super - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) case value in true | false @@ -298,7 +280,7 @@ def try_strict_coerce(value) end end - # @private + # @api private # # @abstract # @@ -348,13 +330,11 @@ class << self # All of the valid Symbol values for this enum. # # @return [Array] - # def values = (@values ||= constants.map { const_get(_1) }) - # @private + # @api private # # Guard against thread safety issues by instantiating `@values`. - # private def finalize! = values end @@ -363,24 +343,21 @@ def values = (@values ||= constants.map { const_get(_1) }) # @param other [Object] # # @return [Boolean] - # def self.===(other) = values.include?(other) # @param other [Object] # # @return [Boolean] - # def self.==(other) other.is_a?(Class) && other <= OpenAI::Enum && other.values.to_set == values.to_set end class << self - # @private + # @api private # # @param value [String, Symbol, Object] # # @return [Symbol, Object] - # def coerce(value) case value in Symbol | String if values.include?(val = value.to_sym) @@ -391,20 +368,18 @@ def coerce(value) end # @!parse - # # @private + # # @api private # # # # @param value [Symbol, Object] # # # # @return [Symbol, Object] - # # # def dump(value) = super - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) return [true, value, 1] if values.include?(value) @@ -423,7 +398,7 @@ def try_strict_coerce(value) end end - # @private + # @api private # # @abstract # @@ -461,28 +436,25 @@ class Union extend OpenAI::Converter class << self - # @private + # @api private # # All of the specified variant info for this union. # # @return [Array] - # private def known_variants = (@known_variants ||= []) - # @private + # @api private # # All of the specified variants for this union. # # @return [Array] - # protected def variants @known_variants.map { |key, variant_fn| [key, variant_fn.call] } end - # @private + # @api private # # @param property [Symbol] - # private def discriminator(property) case property in Symbol @@ -490,7 +462,7 @@ class << self end end - # @private + # @api private # # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] # @@ -503,7 +475,6 @@ class << self # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - # private def variant(key, spec = nil) variant_info = case key @@ -516,12 +487,11 @@ class << self known_variants << variant_info end - # @private + # @api private # # @param value [Object] # # @return [OpenAI::Converter, Class, nil] - # private def resolve_variant(value) case [@discriminator, value] in [_, OpenAI::BaseModel] @@ -551,7 +521,6 @@ class << self # @param other [Object] # # @return [Boolean] - # def self.===(other) known_variants.any? do |_, variant_fn| variant_fn.call === other @@ -561,18 +530,16 @@ def self.===(other) # @param other [Object] # # @return [Boolean] - # def self.==(other) other.is_a?(Class) && other <= OpenAI::Union && other.variants == variants end class << self - # @private + # @api private # # @param value [Object] # # @return [Object] - # def coerce(value) if (variant = resolve_variant(value)) return OpenAI::Converter.coerce(variant, value) @@ -597,12 +564,11 @@ def coerce(value) variant.nil? ? value : OpenAI::Converter.coerce(variant, value) end - # @private + # @api private # # @param value [Object] # # @return [Object] - # def dump(value) if (variant = resolve_variant(value)) return OpenAI::Converter.dump(variant, value) @@ -617,12 +583,11 @@ def dump(value) value end - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) # TODO(ruby) this will result in super linear decoding behaviour for nested unions # follow up with a decoding context that captures current strictness levels @@ -655,7 +620,7 @@ def try_strict_coerce(value) # rubocop:enable Style/HashEachMethods end - # @private + # @api private # # @abstract # @@ -670,7 +635,6 @@ def self.[](...) = new(...) # @param other [Object] # # @return [Boolean] - # def ===(other) type = item_type case other @@ -686,15 +650,13 @@ def ===(other) # @param other [Object] # # @return [Boolean] - # def ==(other) = other.is_a?(OpenAI::ArrayOf) && other.item_type == item_type - # @private + # @api private # # @param value [Enumerable, Object] # # @return [Array, Object] - # def coerce(value) type = item_type case value @@ -705,12 +667,11 @@ def coerce(value) end end - # @private + # @api private # # @param value [Enumerable, Object] # # @return [Array, Object] - # def dump(value) type = item_type case value @@ -721,12 +682,11 @@ def dump(value) end end - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) case value in Array @@ -760,13 +720,12 @@ def try_strict_coerce(value) end end - # @private + # @api private # # @return [OpenAI::Converter, Class] - # protected def item_type = @item_type_fn.call - # @private + # @api private # # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] # @@ -779,13 +738,12 @@ def try_strict_coerce(value) # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - # def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Converter.type_info(type_info || spec) end end - # @private + # @api private # # @abstract # @@ -800,7 +758,6 @@ def self.[](...) = new(...) # @param other [Object] # # @return [Boolean] - # def ===(other) type = item_type case other @@ -821,15 +778,13 @@ def ===(other) # @param other [Object] # # @return [Boolean] - # def ==(other) = other.is_a?(OpenAI::HashOf) && other.item_type == item_type - # @private + # @api private # # @param value [Hash{Object=>Object}, Object] # # @return [Hash{Symbol=>Object}, Object] - # def coerce(value) type = item_type case value @@ -843,12 +798,11 @@ def coerce(value) end end - # @private + # @api private # # @param value [Hash{Object=>Object}, Object] # # @return [Hash{Symbol=>Object}, Object] - # def dump(value) type = item_type case value @@ -861,12 +815,11 @@ def dump(value) end end - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) case value in Hash @@ -900,13 +853,12 @@ def try_strict_coerce(value) end end - # @private + # @api private # # @return [OpenAI::Converter, Class] - # protected def item_type = @item_type_fn.call - # @private + # @api private # # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] # @@ -919,13 +871,12 @@ def try_strict_coerce(value) # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - # def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Converter.type_info(type_info || spec) end end - # @private + # @api private # # @abstract # @@ -942,32 +893,29 @@ class BaseModel extend OpenAI::Converter class << self - # @private + # @api private # # Assumes superclass fields are totally defined before fields are accessed / # defined on subclasses. # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - # def known_fields @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) end # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - # def fields known_fields.transform_values do |field| {**field.except(:type_fn), type: field.fetch(:type_fn).call} end end - # @private + # @api private # # @return [Hash{Symbol=>Proc}] - # def defaults = (@defaults ||= {}) - # @private + # @api private # # @param name_sym [Symbol] # @@ -984,7 +932,6 @@ def defaults = (@defaults ||= {}) # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - # private def add_field(name_sym, required:, type_info:, spec:) type_fn, info = case type_info @@ -1023,7 +970,7 @@ def defaults = (@defaults ||= {}) end end - # @private + # @api private # # @param name_sym [Symbol] # @@ -1038,12 +985,11 @@ def defaults = (@defaults ||= {}) # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - # def required(name_sym, type_info, spec = {}) add_field(name_sym, required: true, type_info: type_info, spec: spec) end - # @private + # @api private # # @param name_sym [Symbol] # @@ -1058,18 +1004,16 @@ def required(name_sym, type_info, spec = {}) # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - # def optional(name_sym, type_info, spec = {}) add_field(name_sym, required: false, type_info: type_info, spec: spec) end - # @private + # @api private # # `request_only` attributes not excluded from `.#coerce` when receiving responses # even if well behaved servers should not send them # # @param blk [Proc] - # private def request_only(&blk) @mode = :dump blk.call @@ -1077,12 +1021,11 @@ def optional(name_sym, type_info, spec = {}) @mode = nil end - # @private + # @api private # # `response_only` attributes are omitted from `.#dump` when making requests # # @param blk [Proc] - # private def response_only(&blk) @mode = :coerce blk.call @@ -1094,7 +1037,6 @@ def optional(name_sym, type_info, spec = {}) # @param other [Object] # # @return [Boolean] - # def ==(other) case other in OpenAI::BaseModel @@ -1105,12 +1047,11 @@ def ==(other) end class << self - # @private + # @api private # # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] # # @return [OpenAI::BaseModel, Object] - # def coerce(value) case OpenAI::Util.coerce_hash(value) in Hash => coerced @@ -1120,12 +1061,11 @@ def coerce(value) end end - # @private + # @api private # # @param value [OpenAI::BaseModel, Object] # # @return [Hash{Object=>Object}, Object] - # def dump(value) unless (coerced = OpenAI::Util.coerce_hash(value)).is_a?(Hash) return value @@ -1157,12 +1097,11 @@ def dump(value) values end - # @private + # @api private # # @param value [Object] # # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - # def try_strict_coerce(value) case value in Hash | OpenAI::BaseModel @@ -1220,7 +1159,6 @@ def try_strict_coerce(value) # @param key [Symbol] # # @return [Object, nil] - # def [](key) unless key.instance_of?(Symbol) raise ArgumentError.new("Expected symbol key for lookup, got #{key.inspect}") @@ -1239,7 +1177,6 @@ def [](key) # should not be mutated. # # @return [Hash{Symbol=>Object}] - # def to_h = @data alias_method :to_hash, :to_h @@ -1247,7 +1184,6 @@ def to_h = @data # @param keys [Array, nil] # # @return [Hash{Symbol=>Object}] - # def deconstruct_keys(keys) (keys || self.class.known_fields.keys).filter_map do |k| unless self.class.known_fields.key?(k) @@ -1262,7 +1198,6 @@ def deconstruct_keys(keys) # Create a new instance of a model. # # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] - # def initialize(data = {}) case OpenAI::Util.coerce_hash(data) in Hash => coerced @@ -1273,11 +1208,9 @@ def initialize(data = {}) end # @return [String] - # def to_s = @data.to_s # @return [String] - # def inspect "#<#{self.class.name}:0x#{object_id.to_s(16)} #{deconstruct_keys(nil).map do |k, v| "#{k}=#{v.inspect}" diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index 9f315c7b..b8185c65 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @abstract # @@ -27,33 +27,28 @@ module OpenAI # ``` module BasePage # @return [Boolean] - # def next_page? = (raise NotImplementedError) # @raise [OpenAI::APIError] # @return [OpenAI::BasePage] - # def next_page = (raise NotImplementedError) # @param blk [Proc] # # @return [void] - # def auto_paging_each(&) = (raise NotImplementedError) # @return [Enumerable] - # def to_enum = super(:auto_paging_each) alias_method :enum_for, :to_enum - # @private + # @api private # # @param client [OpenAI::BaseClient] # @param req [Hash{Symbol=>Object}] # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param page_data [Object] - # def initialize(client:, req:, headers:, page_data:) @client = client @req = req diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index c2beb4b9..7151b3f7 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @example # ```ruby @@ -18,19 +18,16 @@ module OpenAI # ``` module BaseStream # @return [void] - # def close = OpenAI::Util.close_fused!(@iterator) - # @private + # @api private # # @return [Enumerable] - # private def iterator = (raise NotImplementedError) # @param blk [Proc] # # @return [void] - # def for_each(&) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") @@ -39,19 +36,17 @@ def for_each(&) end # @return [Enumerable] - # def to_enum = @iterator alias_method :enum_for, :to_enum - # @private + # @api private # # @param model [Class, OpenAI::Converter] # @param url [URI::Generic] # @param status [Integer] # @param response [Net::HTTPResponse] # @param messages [Enumerable] - # def initialize(model:, url:, status:, response:, messages:) @model = model @url = url diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 7160cc26..126da608 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -66,10 +66,9 @@ class Client < OpenAI::BaseClient # @return [OpenAI::Resources::Responses] attr_reader :responses - # @private + # @api private # # @return [Hash{String=>String}] - # private def auth_headers return {} if @api_key.nil? @@ -93,7 +92,6 @@ class Client < OpenAI::BaseClient # @param initial_retry_delay [Float] # # @param max_retry_delay [Float] - # def initialize( base_url: nil, api_key: ENV["OPENAI_API_KEY"], diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 0e74ea9e..7773eb35 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -30,13 +30,12 @@ class CursorPage # @return [Boolean] attr_accessor :has_more - # @private + # @api private # # @param client [OpenAI::BaseClient] # @param req [Hash{Symbol=>Object}] # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param page_data [Hash{Symbol=>Object}] - # def initialize(client:, req:, headers:, page_data:) super model = req.fetch(:model) @@ -61,7 +60,6 @@ def next_page? # @raise [OpenAI::HTTP::Error] # @return [OpenAI::CursorPage] - # def next_page unless next_page? raise RuntimeError.new("No more pages available. Please check #next_page? before calling ##{__method__}") @@ -72,7 +70,6 @@ def next_page end # @param blk [Proc] - # def auto_paging_each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") @@ -86,7 +83,6 @@ def auto_paging_each(&blk) end # @return [String] - # def inspect "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} has_more=#{has_more.inspect}>" end diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 40faaaae..90cebc97 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -29,7 +29,7 @@ class APIError < OpenAI::Error # @return [String, nil] attr_reader :type - # @private + # @api private # # @param url [URI::Generic] # @param status [Integer, nil] @@ -37,7 +37,6 @@ class APIError < OpenAI::Error # @param request [nil] # @param response [nil] # @param message [String, nil] - # def initialize(url:, status: nil, body: nil, request: nil, response: nil, message: nil) @url = url @status = status @@ -69,7 +68,7 @@ class APIConnectionError < OpenAI::APIError # # @return [nil] # attr_reader :type - # @private + # @api private # # @param url [URI::Generic] # @param status [nil] @@ -77,7 +76,6 @@ class APIConnectionError < OpenAI::APIError # @param request [nil] # @param response [nil] # @param message [String, nil] - # def initialize( url:, status: nil, @@ -91,7 +89,7 @@ def initialize( end class APITimeoutError < OpenAI::APIConnectionError - # @private + # @api private # # @param url [URI::Generic] # @param status [nil] @@ -99,7 +97,6 @@ class APITimeoutError < OpenAI::APIConnectionError # @param request [nil] # @param response [nil] # @param message [String, nil] - # def initialize( url:, status: nil, @@ -113,7 +110,7 @@ def initialize( end class APIStatusError < OpenAI::APIError - # @private + # @api private # # @param url [URI::Generic] # @param status [Integer] @@ -123,7 +120,6 @@ class APIStatusError < OpenAI::APIError # @param message [String, nil] # # @return [OpenAI::APIStatusError] - # def self.for(url:, status:, body:, request:, response:, message: nil) kwargs = {url: url, status: status, body: body, request: request, response: response, message: message} @@ -165,7 +161,7 @@ def self.for(url:, status:, body:, request:, response:, message: nil) # # @return [String, nil] # attr_reader :type - # @private + # @api private # # @param url [URI::Generic] # @param status [Integer] @@ -173,7 +169,6 @@ def self.for(url:, status:, body:, request:, response:, message: nil) # @param request [nil] # @param response [nil] # @param message [String, nil] - # def initialize(url:, status:, body:, request:, response:, message: nil) message ||= OpenAI::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} } @code = OpenAI::Converter.coerce(String, OpenAI::Util.dig(body, :code)) diff --git a/lib/openai/extern.rb b/lib/openai/extern.rb index 3faad4c1..c8e115d3 100644 --- a/lib/openai/extern.rb +++ b/lib/openai/extern.rb @@ -1,10 +1,9 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @abstract - # module Extern end end diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index 84765c9f..96744e0c 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -4,7 +4,6 @@ module OpenAI module Models module Audio # @abstract - # class SpeechModel < OpenAI::Enum TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index d0c79556..9be2124c 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -125,7 +125,6 @@ class Model < OpenAI::Union end # @abstract - # class TimestampGranularity < OpenAI::Enum WORD = :word SEGMENT = :segment diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 278a37e5..25a9c3e1 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -4,7 +4,6 @@ module OpenAI module Models module Audio # @abstract - # class TranslationCreateResponse < OpenAI::Union variant -> { OpenAI::Models::Audio::Translation } diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index 1043030f..81db712e 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -3,7 +3,6 @@ module OpenAI module Models # @abstract - # class AudioModel < OpenAI::Enum WHISPER_1 = :"whisper-1" diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 7cd4e9b8..2b17c0d2 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -4,7 +4,6 @@ module OpenAI module Models module Beta # @abstract - # class AssistantTool < OpenAI::Union discriminator :type diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 3cf817fa..bb147096 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -376,7 +376,6 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Tool < OpenAI::Union discriminator :type @@ -701,7 +700,6 @@ class FileSearch < OpenAI::BaseModel end # @abstract - # class Tool < OpenAI::Union variant -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 46dfaee7..fc3e6299 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -155,7 +155,6 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Tool < OpenAI::Union discriminator :type diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index b434fe46..71c118cd 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -173,7 +173,6 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Tool < OpenAI::Union variant -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 544cf794..e89ac3e5 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -115,7 +115,6 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Tool < OpenAI::Union discriminator :type diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index c655fda0..57a11b7c 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -344,7 +344,6 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Tool < OpenAI::Union discriminator :type diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index 1d4c531e..ae9413a8 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -6,7 +6,6 @@ module Beta module Threads module Runs # @abstract - # class RunStepInclude < OpenAI::Enum STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 7dcbccab..e3682e55 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -134,7 +134,6 @@ class ArrayOfContentPart < OpenAI::Union end # @deprecated - # class FunctionCall < OpenAI::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 8d0ec4c1..0c8a0cea 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -194,7 +194,6 @@ class Delta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @deprecated - # class FunctionCall < OpenAI::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON diff --git a/lib/openai/models/chat/chat_completion_function_message_param.rb b/lib/openai/models/chat/chat_completion_function_message_param.rb index 4ce20d75..1da70875 100644 --- a/lib/openai/models/chat/chat_completion_function_message_param.rb +++ b/lib/openai/models/chat/chat_completion_function_message_param.rb @@ -4,7 +4,6 @@ module OpenAI module Models module Chat # @deprecated - # class ChatCompletionFunctionMessageParam < OpenAI::BaseModel # @!attribute content # The contents of the function message. diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 96223d57..228616ad 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -151,7 +151,6 @@ class URLCitation < OpenAI::BaseModel end # @deprecated - # class FunctionCall < OpenAI::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index e7558545..725b907d 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -4,7 +4,6 @@ module OpenAI module Models module Chat # @abstract - # class ChatCompletionModality < OpenAI::Enum TEXT = :text AUDIO = :audio diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index b77143a8..4426c2a5 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -473,7 +473,6 @@ class FunctionCallMode < OpenAI::Enum end # @deprecated - # class Function < OpenAI::BaseModel # @!attribute name # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain @@ -520,7 +519,6 @@ class Function < OpenAI::BaseModel end # @abstract - # class Modality < OpenAI::Enum TEXT = :text AUDIO = :audio diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 7b512490..29b0a851 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -3,7 +3,6 @@ module OpenAI module Models # @abstract - # class ChatModel < OpenAI::Enum O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index ae14fe32..65247fdf 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -3,7 +3,6 @@ module OpenAI module Models # @abstract - # class EmbeddingModel < OpenAI::Enum TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 926e7f82..988c7703 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -168,7 +168,6 @@ class Preset < OpenAI::Enum end # @deprecated - # class Hyperparameters < OpenAI::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index c9c62780..e49e6699 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -3,7 +3,6 @@ module OpenAI module Models # @abstract - # class ImageModel < OpenAI::Enum DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 98a25176..0f3c5a90 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -310,7 +310,6 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Harassment < OpenAI::Enum TEXT = :text @@ -318,7 +317,6 @@ class Harassment < OpenAI::Enum end # @abstract - # class HarassmentThreatening < OpenAI::Enum TEXT = :text @@ -326,7 +324,6 @@ class HarassmentThreatening < OpenAI::Enum end # @abstract - # class Hate < OpenAI::Enum TEXT = :text @@ -334,7 +331,6 @@ class Hate < OpenAI::Enum end # @abstract - # class HateThreatening < OpenAI::Enum TEXT = :text @@ -342,7 +338,6 @@ class HateThreatening < OpenAI::Enum end # @abstract - # class Illicit < OpenAI::Enum TEXT = :text @@ -350,7 +345,6 @@ class Illicit < OpenAI::Enum end # @abstract - # class IllicitViolent < OpenAI::Enum TEXT = :text @@ -358,7 +352,6 @@ class IllicitViolent < OpenAI::Enum end # @abstract - # class SelfHarm < OpenAI::Enum TEXT = :text IMAGE = :image @@ -367,7 +360,6 @@ class SelfHarm < OpenAI::Enum end # @abstract - # class SelfHarmInstruction < OpenAI::Enum TEXT = :text IMAGE = :image @@ -376,7 +368,6 @@ class SelfHarmInstruction < OpenAI::Enum end # @abstract - # class SelfHarmIntent < OpenAI::Enum TEXT = :text IMAGE = :image @@ -385,7 +376,6 @@ class SelfHarmIntent < OpenAI::Enum end # @abstract - # class Sexual < OpenAI::Enum TEXT = :text IMAGE = :image @@ -394,7 +384,6 @@ class Sexual < OpenAI::Enum end # @abstract - # class SexualMinor < OpenAI::Enum TEXT = :text @@ -402,7 +391,6 @@ class SexualMinor < OpenAI::Enum end # @abstract - # class Violence < OpenAI::Enum TEXT = :text IMAGE = :image @@ -411,7 +399,6 @@ class Violence < OpenAI::Enum end # @abstract - # class ViolenceGraphic < OpenAI::Enum TEXT = :text IMAGE = :image diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index 2abe4a13..4089ad86 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -3,7 +3,6 @@ module OpenAI module Models # @abstract - # class ModerationModel < OpenAI::Enum OMNI_MODERATION_LATEST = :"omni-moderation-latest" OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index df500ca6..eb1bd637 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -131,7 +131,6 @@ class Result < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Attribute < OpenAI::Union variant String diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index ff451753..268a7e7d 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -128,7 +128,6 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Ranker < OpenAI::Enum AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index aa09a30a..bc92a2f8 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -51,7 +51,6 @@ class VectorStoreSearchResponse < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Attribute < OpenAI::Union variant String diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index f5859e61..fbe67069 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -50,7 +50,6 @@ class FileBatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Attribute < OpenAI::Union variant String diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index c91648aa..a303415b 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -50,7 +50,6 @@ class FileCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Attribute < OpenAI::Union variant String diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 047e821f..5b0b4e3d 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -35,7 +35,6 @@ class FileUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract - # class Attribute < OpenAI::Union variant String diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 27d1234a..15841d27 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -156,7 +156,6 @@ class Status < OpenAI::Enum end # @abstract - # class Attribute < OpenAI::Union variant String diff --git a/lib/openai/page.rb b/lib/openai/page.rb index fa3bd198..0d0866e7 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -30,13 +30,12 @@ class Page # @return [String] attr_accessor :object - # @private + # @api private # # @param client [OpenAI::BaseClient] # @param req [Hash{Symbol=>Object}] # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param page_data [Array] - # def initialize(client:, req:, headers:, page_data:) super model = req.fetch(:model) @@ -61,13 +60,11 @@ def next_page? # @raise [OpenAI::HTTP::Error] # @return [OpenAI::Page] - # def next_page RuntimeError.new("No more pages available.") end # @param blk [Proc] - # def auto_paging_each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") @@ -81,7 +78,6 @@ def auto_paging_each(&blk) end # @return [String] - # def inspect "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} object=#{object.inspect}>" end diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb index d1a15ffe..0e16cc88 100644 --- a/lib/openai/pooled_net_requester.rb +++ b/lib/openai/pooled_net_requester.rb @@ -1,16 +1,14 @@ # frozen_string_literal: true module OpenAI - # @private - # + # @api private class PooledNetRequester class << self - # @private + # @api private # # @param url [URI::Generic] # # @return [Net::HTTP] - # def connect(url) port = case [url.port, url.scheme] @@ -28,17 +26,16 @@ def connect(url) end end - # @private + # @api private # # @param conn [Net::HTTP] # @param deadline [Float] - # def calibrate_socket_timeout(conn, deadline) timeout = deadline - OpenAI::Util.monotonic_secs conn.open_timeout = conn.read_timeout = conn.write_timeout = conn.continue_timeout = timeout end - # @private + # @api private # # @param request [Hash{Symbol=>Object}] . # @@ -51,7 +48,6 @@ def calibrate_socket_timeout(conn, deadline) # @param blk [Proc] # # @return [Net::HTTPGenericRequest] - # def build_request(request, &) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) req = Net::HTTPGenericRequest.new( @@ -80,11 +76,10 @@ def build_request(request, &) end end - # @private + # @api private # # @param url [URI::Generic] # @param blk [Proc] - # private def with_pool(url, &) origin = OpenAI::Util.uri_origin(url) pool = @@ -97,7 +92,7 @@ def build_request(request, &) pool.with(&) end - # @private + # @api private # # @param request [Hash{Symbol=>Object}] . # @@ -112,7 +107,6 @@ def build_request(request, &) # @option request [Float] :deadline # # @return [Array(Net::HTTPResponse, Enumerable)] - # def execute(request) url, deadline = request.fetch_values(:url, :deadline) @@ -158,10 +152,9 @@ def execute(request) [response, (response.body = body)] end - # @private + # @api private # # @param size [Integer] - # def initialize(size: Etc.nprocessors) @mutex = Mutex.new @size = size diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index befdf96f..405cf3c3 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -1,10 +1,9 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @abstract - # module RequestParameters # @!parse # # Options to specify HTTP behaviour for this request. @@ -12,7 +11,6 @@ module RequestParameters # attr_accessor :request_options # @param mod [Module] - # def self.included(mod) return unless mod <= OpenAI::BaseModel @@ -20,15 +18,13 @@ def self.included(mod) mod.optional(:request_options, OpenAI::RequestOptions) end - # @private - # + # @api private module Converter - # @private + # @api private # # @param params [Object] # # @return [Array(Object, Hash{Symbol=>Object})] - # def dump_request(params) case (dumped = dump(params)) in Hash @@ -46,12 +42,11 @@ def dump_request(params) # When making a request, you can pass an actual {RequestOptions} instance, or # simply pass a Hash with symbol keys matching the attributes on this class. class RequestOptions < OpenAI::BaseModel - # @private + # @api private # # @param opts [OpenAI::RequestOptions, Hash{Symbol=>Object}] # # @raise [ArgumentError] - # def self.validate!(opts) case opts in OpenAI::RequestOptions | Hash diff --git a/lib/openai/resources/audio.rb b/lib/openai/resources/audio.rb index db698a4b..e82c41f4 100644 --- a/lib/openai/resources/audio.rb +++ b/lib/openai/resources/audio.rb @@ -13,7 +13,6 @@ class Audio attr_reader :speech # @param client [OpenAI::Client] - # def initialize(client:) @client = client @transcriptions = OpenAI::Resources::Audio::Transcriptions.new(client: client) diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index dce52041..9c5d8284 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -27,7 +27,6 @@ class Speech # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [Object] - # def create(params) parsed, options = OpenAI::Models::Audio::SpeechCreateParams.dump_request(params) @client.request( @@ -41,7 +40,6 @@ def create(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 4c158734..9e291700 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -41,7 +41,6 @@ class Transcriptions # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] - # def create(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) @client.request( @@ -55,7 +54,6 @@ def create(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index 4e1431c1..c1de4f8e 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -31,7 +31,6 @@ class Translations # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] - # def create(params) parsed, options = OpenAI::Models::Audio::TranslationCreateParams.dump_request(params) @client.request( @@ -45,7 +44,6 @@ def create(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 751d6548..8f0799eb 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -35,7 +35,6 @@ class Batches # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Batch] - # def create(params) parsed, options = OpenAI::Models::BatchCreateParams.dump_request(params) @client.request( @@ -56,7 +55,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Batch] - # def retrieve(batch_id, params = {}) @client.request( method: :get, @@ -81,7 +79,6 @@ def retrieve(batch_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(params = {}) parsed, options = OpenAI::Models::BatchListParams.dump_request(params) @client.request( @@ -105,7 +102,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Batch] - # def cancel(batch_id, params = {}) @client.request( method: :post, @@ -116,7 +112,6 @@ def cancel(batch_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/beta.rb b/lib/openai/resources/beta.rb index 4eee364b..c1b7273a 100644 --- a/lib/openai/resources/beta.rb +++ b/lib/openai/resources/beta.rb @@ -10,7 +10,6 @@ class Beta attr_reader :threads # @param client [OpenAI::Client] - # def initialize(client:) @client = client @assistants = OpenAI::Resources::Beta::Assistants.new(client: client) diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 45d0010d..5911fe3f 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -78,7 +78,6 @@ class Assistants # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Assistant] - # def create(params) parsed, options = OpenAI::Models::Beta::AssistantCreateParams.dump_request(params) @client.request( @@ -99,7 +98,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Assistant] - # def retrieve(assistant_id, params = {}) @client.request( method: :get, @@ -185,7 +183,6 @@ def retrieve(assistant_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Assistant] - # def update(assistant_id, params = {}) parsed, options = OpenAI::Models::Beta::AssistantUpdateParams.dump_request(params) @client.request( @@ -220,7 +217,6 @@ def update(assistant_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(params = {}) parsed, options = OpenAI::Models::Beta::AssistantListParams.dump_request(params) @client.request( @@ -242,7 +238,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::AssistantDeleted] - # def delete(assistant_id, params = {}) @client.request( method: :delete, @@ -253,7 +248,6 @@ def delete(assistant_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index b7e58a7c..d8f2e660 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -32,7 +32,6 @@ class Threads # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Thread] - # def create(params = {}) parsed, options = OpenAI::Models::Beta::ThreadCreateParams.dump_request(params) @client.request( @@ -53,7 +52,6 @@ def create(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Thread] - # def retrieve(thread_id, params = {}) @client.request( method: :get, @@ -84,7 +82,6 @@ def retrieve(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Thread] - # def update(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::ThreadUpdateParams.dump_request(params) @client.request( @@ -105,7 +102,6 @@ def update(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::ThreadDeleted] - # def delete(thread_id, params = {}) @client.request( method: :delete, @@ -210,7 +206,6 @@ def delete(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] - # def create_and_run(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) parsed.delete(:stream) @@ -318,7 +313,6 @@ def create_and_run(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] - # def create_and_run_streaming(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) parsed.store(:stream, true) @@ -334,7 +328,6 @@ def create_and_run_streaming(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @runs = OpenAI::Resources::Beta::Threads::Runs.new(client: client) diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 3ea2f318..4e5141de 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -33,7 +33,6 @@ class Messages # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Message] - # def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageCreateParams.dump_request(params) @client.request( @@ -57,7 +56,6 @@ def create(thread_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Message] - # def retrieve(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageRetrieveParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -89,7 +87,6 @@ def retrieve(message_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Message] - # def update(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageUpdateParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -132,7 +129,6 @@ def update(message_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::Threads::MessageListParams.dump_request(params) @client.request( @@ -156,7 +152,6 @@ def list(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::MessageDeleted] - # def delete(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageDeleteParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -171,7 +166,6 @@ def delete(message_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 34f408d7..2f3b3b12 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -123,7 +123,6 @@ class Runs # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] - # def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) parsed.delete(:stream) @@ -253,7 +252,6 @@ def create(thread_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] - # def create_streaming(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) parsed.store(:stream, true) @@ -282,7 +280,6 @@ def create_streaming(thread_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] - # def retrieve(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunRetrieveParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -315,7 +312,6 @@ def retrieve(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] - # def update(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunUpdateParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -355,7 +351,6 @@ def update(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::Threads::RunListParams.dump_request(params) @client.request( @@ -379,7 +374,6 @@ def list(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] - # def cancel(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCancelParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -411,7 +405,6 @@ def cancel(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] - # def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) parsed.delete(:stream) @@ -445,7 +438,6 @@ def submit_tool_outputs(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] - # def submit_tool_outputs_streaming(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) parsed.store(:stream, true) @@ -464,7 +456,6 @@ def submit_tool_outputs_streaming(run_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @steps = OpenAI::Resources::Beta::Threads::Runs::Steps.new(client: client) diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index e44fb9a5..d5c4ddb3 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -28,7 +28,6 @@ class Steps # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - # def retrieve(step_id, params) parsed, options = OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -82,7 +81,6 @@ def retrieve(step_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::Runs::StepListParams.dump_request(params) thread_id = parsed.delete(:thread_id) do @@ -99,7 +97,6 @@ def list(run_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/chat.rb b/lib/openai/resources/chat.rb index 8945e202..d5bf1e2e 100644 --- a/lib/openai/resources/chat.rb +++ b/lib/openai/resources/chat.rb @@ -7,7 +7,6 @@ class Chat attr_reader :completions # @param client [OpenAI::Client] - # def initialize(client:) @client = client @completions = OpenAI::Resources::Chat::Completions.new(client: client) diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 7be5f0ae..94e76ede 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -213,7 +213,6 @@ class Completions # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletion] - # def create(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) parsed.delete(:stream) @@ -432,7 +431,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] - # def create_streaming(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) parsed.store(:stream, true) @@ -457,7 +455,6 @@ def create_streaming(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletion] - # def retrieve(completion_id, params = {}) @client.request( method: :get, @@ -485,7 +482,6 @@ def retrieve(completion_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletion] - # def update(completion_id, params) parsed, options = OpenAI::Models::Chat::CompletionUpdateParams.dump_request(params) @client.request( @@ -518,7 +514,6 @@ def update(completion_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(params = {}) parsed, options = OpenAI::Models::Chat::CompletionListParams.dump_request(params) @client.request( @@ -541,7 +536,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletionDeleted] - # def delete(completion_id, params = {}) @client.request( method: :delete, @@ -552,7 +546,6 @@ def delete(completion_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @messages = OpenAI::Resources::Chat::Completions::Messages.new(client: client) diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index 3bc5880d..decc122d 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -22,7 +22,6 @@ class Messages # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(completion_id, params = {}) parsed, options = OpenAI::Models::Chat::Completions::MessageListParams.dump_request(params) @client.request( @@ -36,7 +35,6 @@ def list(completion_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 1a6dcb98..a16f4bcf 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -113,7 +113,6 @@ class Completions # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Completion] - # def create(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) parsed.delete(:stream) @@ -236,7 +235,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] - # def create_streaming(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) parsed.store(:stream, true) @@ -252,7 +250,6 @@ def create_streaming(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index b5e6eda1..b70c1ef5 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -35,7 +35,6 @@ class Embeddings # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::CreateEmbeddingResponse] - # def create(params) parsed, options = OpenAI::Models::EmbeddingCreateParams.dump_request(params) @client.request( @@ -48,7 +47,6 @@ def create(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 2164c02b..191387e3 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -37,7 +37,6 @@ class Files # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FileObject] - # def create(params) parsed, options = OpenAI::Models::FileCreateParams.dump_request(params) @client.request( @@ -59,7 +58,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FileObject] - # def retrieve(file_id, params = {}) @client.request( method: :get, @@ -89,7 +87,6 @@ def retrieve(file_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(params = {}) parsed, options = OpenAI::Models::FileListParams.dump_request(params) @client.request( @@ -111,7 +108,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FileDeleted] - # def delete(file_id, params = {}) @client.request( method: :delete, @@ -130,7 +126,6 @@ def delete(file_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [Object] - # def content(file_id, params = {}) @client.request( method: :get, @@ -142,7 +137,6 @@ def content(file_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/fine_tuning.rb b/lib/openai/resources/fine_tuning.rb index e7a161e4..61663e79 100644 --- a/lib/openai/resources/fine_tuning.rb +++ b/lib/openai/resources/fine_tuning.rb @@ -7,7 +7,6 @@ class FineTuning attr_reader :jobs # @param client [OpenAI::Client] - # def initialize(client:) @client = client @jobs = OpenAI::Resources::FineTuning::Jobs.new(client: client) diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index bcfa238e..177d978b 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -78,7 +78,6 @@ class Jobs # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FineTuning::FineTuningJob] - # def create(params) parsed, options = OpenAI::Models::FineTuning::JobCreateParams.dump_request(params) @client.request( @@ -101,7 +100,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FineTuning::FineTuningJob] - # def retrieve(fine_tuning_job_id, params = {}) @client.request( method: :get, @@ -125,7 +123,6 @@ def retrieve(fine_tuning_job_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(params = {}) parsed, options = OpenAI::Models::FineTuning::JobListParams.dump_request(params) @client.request( @@ -147,7 +144,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FineTuning::FineTuningJob] - # def cancel(fine_tuning_job_id, params = {}) @client.request( method: :post, @@ -170,7 +166,6 @@ def cancel(fine_tuning_job_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list_events(fine_tuning_job_id, params = {}) parsed, options = OpenAI::Models::FineTuning::JobListEventsParams.dump_request(params) @client.request( @@ -184,7 +179,6 @@ def list_events(fine_tuning_job_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @checkpoints = OpenAI::Resources::FineTuning::Jobs::Checkpoints.new(client: client) diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index e9f2d303..cb4b3c18 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -18,7 +18,6 @@ class Checkpoints # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(fine_tuning_job_id, params = {}) parsed, options = OpenAI::Models::FineTuning::Jobs::CheckpointListParams.dump_request(params) @client.request( @@ -32,7 +31,6 @@ def list(fine_tuning_job_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 8cc42136..48b2ac87 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -30,7 +30,6 @@ class Images # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ImagesResponse] - # def create_variation(params) parsed, options = OpenAI::Models::ImageCreateVariationParams.dump_request(params) @client.request( @@ -76,7 +75,6 @@ def create_variation(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ImagesResponse] - # def edit(params) parsed, options = OpenAI::Models::ImageEditParams.dump_request(params) @client.request( @@ -125,7 +123,6 @@ def edit(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ImagesResponse] - # def generate(params) parsed, options = OpenAI::Models::ImageGenerateParams.dump_request(params) @client.request( @@ -138,7 +135,6 @@ def generate(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index 9c086955..cff4bf06 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -13,7 +13,6 @@ class Models # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Model] - # def retrieve(model, params = {}) @client.request( method: :get, @@ -31,7 +30,6 @@ def retrieve(model, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Page] - # def list(params = {}) @client.request( method: :get, @@ -52,7 +50,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ModelDeleted] - # def delete(model, params = {}) @client.request( method: :delete, @@ -63,7 +60,6 @@ def delete(model, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index f1375d48..85d8cd8f 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -19,7 +19,6 @@ class Moderations # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ModerationCreateResponse] - # def create(params) parsed, options = OpenAI::Models::ModerationCreateParams.dump_request(params) @client.request( @@ -32,7 +31,6 @@ def create(params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 3b4c2c2d..fdbe46bc 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -127,7 +127,6 @@ class Responses # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Responses::Response] - # def create(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) parsed.delete(:stream) @@ -261,7 +260,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] - # def create_streaming(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) parsed.store(:stream, true) @@ -288,7 +286,6 @@ def create_streaming(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Responses::Response] - # def retrieve(response_id, params = {}) parsed, options = OpenAI::Models::Responses::ResponseRetrieveParams.dump_request(params) @client.request( @@ -309,7 +306,6 @@ def retrieve(response_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [nil] - # def delete(response_id, params = {}) @client.request( method: :delete, @@ -320,7 +316,6 @@ def delete(response_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @input_items = OpenAI::Resources::Responses::InputItems.new(client: client) diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 87ea88ab..1fe57b71 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -25,7 +25,6 @@ class InputItems # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(response_id, params = {}) parsed, options = OpenAI::Models::Responses::InputItemListParams.dump_request(params) @client.request( @@ -39,7 +38,6 @@ def list(response_id, params = {}) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index e7a40175..f72e4255 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -45,7 +45,6 @@ class Uploads # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Upload] - # def create(params) parsed, options = OpenAI::Models::UploadCreateParams.dump_request(params) @client.request( @@ -66,7 +65,6 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Upload] - # def cancel(upload_id, params = {}) @client.request( method: :post, @@ -102,7 +100,6 @@ def cancel(upload_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Upload] - # def complete(upload_id, params) parsed, options = OpenAI::Models::UploadCompleteParams.dump_request(params) @client.request( @@ -115,7 +112,6 @@ def complete(upload_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @parts = OpenAI::Resources::Uploads::Parts.new(client: client) diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index c6f90707..b90eef15 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -25,7 +25,6 @@ class Parts # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Uploads::UploadPart] - # def create(upload_id, params) parsed, options = OpenAI::Models::Uploads::PartCreateParams.dump_request(params) @client.request( @@ -39,7 +38,6 @@ def create(upload_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 18803daa..e60a1592 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -34,7 +34,6 @@ class VectorStores # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStore] - # def create(params = {}) parsed, options = OpenAI::Models::VectorStoreCreateParams.dump_request(params) @client.request( @@ -55,7 +54,6 @@ def create(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStore] - # def retrieve(vector_store_id, params = {}) @client.request( method: :get, @@ -85,7 +83,6 @@ def retrieve(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStore] - # def update(vector_store_id, params = {}) parsed, options = OpenAI::Models::VectorStoreUpdateParams.dump_request(params) @client.request( @@ -120,7 +117,6 @@ def update(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(params = {}) parsed, options = OpenAI::Models::VectorStoreListParams.dump_request(params) @client.request( @@ -142,7 +138,6 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStoreDeleted] - # def delete(vector_store_id, params = {}) @client.request( method: :delete, @@ -173,7 +168,6 @@ def delete(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Page] - # def search(vector_store_id, params) parsed, options = OpenAI::Models::VectorStoreSearchParams.dump_request(params) @client.request( @@ -187,7 +181,6 @@ def search(vector_store_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client @files = OpenAI::Resources::VectorStores::Files.new(client: client) diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 3c484618..7893fd17 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -26,7 +26,6 @@ class FileBatches # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] - # def create(vector_store_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchCreateParams.dump_request(params) @client.request( @@ -49,7 +48,6 @@ def create(vector_store_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] - # def retrieve(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchRetrieveParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -75,7 +73,6 @@ def retrieve(batch_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] - # def cancel(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchCancelParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -119,7 +116,6 @@ def cancel(batch_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list_files(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchListFilesParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -136,7 +132,6 @@ def list_files(batch_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 23eafaa3..0c9ae2a5 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -28,7 +28,6 @@ class Files # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFile] - # def create(vector_store_id, params) parsed, options = OpenAI::Models::VectorStores::FileCreateParams.dump_request(params) @client.request( @@ -51,7 +50,6 @@ def create(vector_store_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFile] - # def retrieve(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileRetrieveParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -82,7 +80,6 @@ def retrieve(file_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFile] - # def update(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileUpdateParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -124,7 +121,6 @@ def update(file_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] - # def list(vector_store_id, params = {}) parsed, options = OpenAI::Models::VectorStores::FileListParams.dump_request(params) @client.request( @@ -151,7 +147,6 @@ def list(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] - # def delete(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileDeleteParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -176,7 +171,6 @@ def delete(file_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Page] - # def content(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileContentParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do @@ -192,7 +186,6 @@ def content(file_id, params) end # @param client [OpenAI::Client] - # def initialize(client:) @client = client end diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index f9319992..02dea5de 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module OpenAI - # @private + # @api private # # @example # ```ruby @@ -19,10 +19,9 @@ module OpenAI class Stream include OpenAI::BaseStream - # @private + # @api private # # @return [Enumerable] - # private def iterator # rubocop:disable Metrics/BlockLength @iterator ||= OpenAI::Util.chain_fused(@messages) do |y| diff --git a/lib/openai/util.rb b/lib/openai/util.rb index 2546262b..2f4fecde 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -3,20 +3,17 @@ module OpenAI # rubocop:disable Metrics/ModuleLength - # @private - # + # @api private module Util - # @private + # @api private # # @return [Float] - # def self.monotonic_secs = Process.clock_gettime(Process::CLOCK_MONOTONIC) class << self - # @private + # @api private # # @return [String] - # def arch case (arch = RbConfig::CONFIG["arch"])&.downcase in nil @@ -32,10 +29,9 @@ def arch end end - # @private + # @api private # # @return [String] - # def os case (host = RbConfig::CONFIG["host_os"])&.downcase in nil @@ -57,12 +53,11 @@ def os end class << self - # @private + # @api private # # @param input [Object] # # @return [Boolean, Object] - # def primitive?(input) case input in true | false | Integer | Float | Symbol | String @@ -72,12 +67,11 @@ def primitive?(input) end end - # @private + # @api private # # @param input [Object] # # @return [Boolean, Object] - # def coerce_boolean(input) case input.is_a?(String) ? input.downcase : input in Numeric @@ -91,13 +85,12 @@ def coerce_boolean(input) end end - # @private + # @api private # # @param input [Object] # # @raise [ArgumentError] # @return [Boolean, nil] - # def coerce_boolean!(input) case coerce_boolean(input) in true | false | nil => coerced @@ -107,12 +100,11 @@ def coerce_boolean!(input) end end - # @private + # @api private # # @param input [Object] # # @return [Integer, Object] - # def coerce_integer(input) case input in true @@ -124,12 +116,11 @@ def coerce_integer(input) end end - # @private + # @api private # # @param input [Object] # # @return [Float, Object] - # def coerce_float(input) case input in true @@ -141,12 +132,11 @@ def coerce_float(input) end end - # @private + # @api private # # @param input [Object] # # @return [Hash{Object=>Object}, Object] - # def coerce_hash(input) case input in NilClass | Array | Set | Enumerator @@ -165,14 +155,13 @@ def coerce_hash(input) OMIT = Object.new.freeze class << self - # @private + # @api private # # @param lhs [Object] # @param rhs [Object] # @param concat [Boolean] # # @return [Object] - # private def deep_merge_lr(lhs, rhs, concat: false) case [lhs, rhs, concat] in [Hash, Hash, _] @@ -191,7 +180,7 @@ class << self end end - # @private + # @api private # # Recursively merge one hash with another. If the values at a given key are not # both hashes, just take the new value. @@ -203,7 +192,6 @@ class << self # @param concat [Boolean] whether to merge sequences by concatenation. # # @return [Object] - # def deep_merge(*values, sentinel: nil, concat: false) case values in [value, *values] @@ -215,7 +203,7 @@ def deep_merge(*values, sentinel: nil, concat: false) end end - # @private + # @api private # # @param data [Hash{Symbol=>Object}, Array, Object] # @param pick [Symbol, Integer, Array, nil] @@ -223,7 +211,6 @@ def deep_merge(*values, sentinel: nil, concat: false) # @param blk [Proc, nil] # # @return [Object, nil] - # def dig(data, pick, sentinel = nil, &blk) case [data, pick, blk] in [_, nil, nil] @@ -248,22 +235,20 @@ def dig(data, pick, sentinel = nil, &blk) end class << self - # @private + # @api private # # @param uri [URI::Generic] # # @return [String] - # def uri_origin(uri) "#{uri.scheme}://#{uri.host}#{uri.port == uri.default_port ? '' : ":#{uri.port}"}" end - # @private + # @api private # # @param path [String, Array] # # @return [String] - # def interpolate_path(path) case path in String @@ -278,40 +263,37 @@ def interpolate_path(path) end class << self - # @private + # @api private # # @param query [String, nil] # # @return [Hash{String=>Array}] - # def decode_query(query) CGI.parse(query.to_s) end - # @private + # @api private # # @param query [Hash{String=>Array, String, nil}, nil] # # @return [String, nil] - # def encode_query(query) query.to_h.empty? ? nil : URI.encode_www_form(query) end end class << self - # @private + # @api private # # @param url [URI::Generic, String] # # @return [Hash{Symbol=>String, Integer, nil}] - # def parse_uri(url) parsed = URI::Generic.component.zip(URI.split(url)).to_h {**parsed, query: decode_query(parsed.fetch(:query))} end - # @private + # @api private # # @param parsed [Hash{Symbol=>String, Integer, nil}] . # @@ -326,12 +308,11 @@ def parse_uri(url) # @option parsed [Hash{String=>Array}] :query # # @return [URI::Generic] - # def unparse_uri(parsed) URI::Generic.build(**parsed, query: encode_query(parsed.fetch(:query))) end - # @private + # @api private # # @param lhs [Hash{Symbol=>String, Integer, nil}] . # @@ -358,7 +339,6 @@ def unparse_uri(parsed) # @option rhs [Hash{String=>Array}] :query # # @return [URI::Generic] - # def join_parsed_uri(lhs, rhs) base_path, base_query = lhs.fetch_values(:path, :query) slashed = base_path.end_with?("/") ? base_path : "#{base_path}/" @@ -380,12 +360,11 @@ def join_parsed_uri(lhs, rhs) end class << self - # @private + # @api private # # @param headers [Hash{String=>String, Integer, Array, nil}] # # @return [Hash{String=>String}] - # def normalized_headers(*headers) {}.merge(*headers.compact).to_h do |key, val| case val @@ -399,16 +378,15 @@ def normalized_headers(*headers) end end - # @private + # @api private # # An adapter that satisfies the IO interface required by `::IO.copy_stream` class ReadIOAdapter - # @private + # @api private # # @param max_len [Integer, nil] # # @return [String] - # private def read_enum(max_len) case max_len in nil @@ -422,13 +400,12 @@ class ReadIOAdapter @buf.slice!(0..) end - # @private + # @api private # # @param max_len [Integer, nil] # @param out_string [String, nil] # # @return [String, nil] - # def read(max_len = nil, out_string = nil) case @stream in nil @@ -447,11 +424,10 @@ def read(max_len = nil, out_string = nil) .tap(&@blk) end - # @private + # @api private # # @param stream [String, IO, StringIO, Enumerable] # @param blk [Proc] - # def initialize(stream, &blk) @stream = stream.is_a?(String) ? StringIO.new(stream) : stream @buf = String.new.b @@ -463,7 +439,6 @@ class << self # @param blk [Proc] # # @return [Enumerable] - # def string_io(&blk) Enumerator.new do |y| y.define_singleton_method(:write) do @@ -477,13 +452,12 @@ def string_io(&blk) end class << self - # @private + # @api private # # @param y [Enumerator::Yielder] # @param boundary [String] # @param key [Symbol, String] # @param val [Object] - # private def encode_multipart_formdata(y, boundary:, key:, val:) y << "--#{boundary}\r\n" y << "Content-Disposition: form-data" @@ -516,12 +490,11 @@ class << self y << "\r\n" end - # @private + # @api private # # @param body [Object] # # @return [Array(String, Enumerable)] - # private def encode_multipart_streaming(body) boundary = SecureRandom.urlsafe_base64(60) @@ -547,13 +520,12 @@ class << self [boundary, strio] end - # @private + # @api private # # @param headers [Hash{String=>String}] # @param body [Object] # # @return [Object] - # def encode_content(headers, body) content_type = headers["content-type"] case [content_type, body] @@ -572,7 +544,7 @@ def encode_content(headers, body) end end - # @private + # @api private # # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param stream [Enumerable] @@ -580,7 +552,6 @@ def encode_content(headers, body) # # @raise [JSON::ParserError] # @return [Object] - # def decode_content(headers, stream:, suppress_error: false) case headers["content-type"] in %r{^application/(?:vnd\.api\+)?json} @@ -609,7 +580,7 @@ def decode_content(headers, stream:, suppress_error: false) end class << self - # @private + # @api private # # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html # @@ -618,7 +589,6 @@ class << self # @param close [Proc] # # @return [Enumerable] - # def fused_enum(enum, external: false, &close) fused = false iter = Enumerator.new do |y| @@ -642,10 +612,9 @@ def fused_enum(enum, external: false, &close) iter end - # @private + # @api private # # @param enum [Enumerable, nil] - # def close_fused!(enum) return unless enum.is_a?(Enumerator) @@ -654,11 +623,10 @@ def close_fused!(enum) # rubocop:enable Lint/UnreachableLoop end - # @private + # @api private # # @param enum [Enumerable, nil] # @param blk [Proc] - # def chain_fused(enum, &blk) iter = Enumerator.new { blk.call(_1) } fused_enum(iter) { close_fused!(enum) } @@ -666,12 +634,11 @@ def chain_fused(enum, &blk) end class << self - # @private + # @api private # # @param enum [Enumerable] # # @return [Enumerable] - # def decode_lines(enum) re = /(\r\n|\r|\n)/ buffer = String.new.b @@ -701,14 +668,13 @@ def decode_lines(enum) end end - # @private + # @api private # # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream # # @param lines [Enumerable] # # @return [Hash{Symbol=>Object}] - # def decode_sse(lines) # rubocop:disable Metrics/BlockLength chain_fused(lines) do |y| diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index fd80c3c5..85abd5cb 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -44,16 +44,19 @@ module OpenAI PLATFORM_HEADERS = T::Hash[String, String] class << self + # @api private sig { params(req: OpenAI::BaseClient::RequestComponentsShape).void } def validate!(req) end + # @api private sig do params(status: Integer, headers: T.any(T::Hash[String, String], Net::HTTPHeader)).returns(T::Boolean) end def should_retry?(status, headers:) end + # @api private sig do params( request: OpenAI::BaseClient::RequestInputShape, @@ -74,6 +77,7 @@ module OpenAI def requester=(_) end + # @api private sig do params( base_url: String, @@ -98,14 +102,17 @@ module OpenAI ) end + # @api private sig { overridable.returns(T::Hash[String, String]) } private def auth_headers end + # @api private sig { returns(String) } private def generate_idempotency_key end + # @api private sig do overridable .params(req: OpenAI::BaseClient::RequestComponentsShape, opts: T::Hash[Symbol, T.anything]) @@ -114,10 +121,12 @@ module OpenAI private def build_request(req, opts) end + # @api private sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } private def retry_delay(headers, retry_count:) end + # @api private sig do params( request: OpenAI::BaseClient::RequestInputShape, @@ -130,6 +139,8 @@ module OpenAI private def send_request(request, redirect_count:, retry_count:, send_retry_header:) end + # Execute the request specified by `req`. This is the method that all resource + # methods call into. sig do params( method: Symbol, diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index f64d87e2..b0ed4c49 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -1,19 +1,23 @@ # typed: strong module OpenAI + # @api private module Converter abstract! Input = T.type_alias { T.any(OpenAI::Converter, T::Class[T.anything]) } + # @api private sig { overridable.params(value: T.anything).returns(T.anything) } def coerce(value) end + # @api private sig { overridable.params(value: T.anything).returns(T.anything) } def dump(value) end + # @api private sig do overridable .params(value: T.anything) @@ -23,6 +27,7 @@ module OpenAI end class << self + # @api private sig do params( spec: T.any( @@ -40,20 +45,40 @@ module OpenAI def self.type_info(spec) end + # @api private + # + # Based on `target`, transform `value` into `target`, to the extent possible: + # + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } def self.coerce(target, value) end + # @api private sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } def self.dump(target, value) end + # @api private + # + # The underlying algorithm for computing maximal compatibility is subject to + # future improvements. + # + # Similar to `#.coerce`, used to determine the best union variant to decode into. + # + # 1. determine if strict-ish coercion is possible + # 2. return either result of successful coercion or if loose coercion is possible + # 3. return a score for recursively tallied count for fields that can be coerced sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } def self.try_strict_coerce(target, value) end end end + # When we don't know what to expect for the value. class Unknown abstract! @@ -68,14 +93,17 @@ module OpenAI end class << self + # @api private sig { override.params(value: T.anything).returns(T.anything) } def coerce(value) end + # @api private sig { override.params(value: T.anything).returns(T.anything) } def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -86,6 +114,7 @@ module OpenAI end end + # Ruby has no Boolean class; this is something for models to refer to. class BooleanModel abstract! @@ -100,14 +129,17 @@ module OpenAI end class << self + # @api private sig { override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) } def coerce(value) end + # @api private sig { override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) } def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -118,16 +150,30 @@ module OpenAI end end + # A value from among a specified list of options. OpenAPI enum values map to Ruby + # values in the SDK as follows: + # + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol + # + # We can therefore convert string values to Symbols, but can't convert other + # values safely. class Enum abstract! extend OpenAI::Converter class << self + # All of the valid Symbol values for this enum. sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } def values end + # @api private + # + # Guard against thread safety issues by instantiating `@values`. sig { void } private def finalize! end @@ -142,14 +188,17 @@ module OpenAI end class << self + # @api private sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } def coerce(value) end + # @api private sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -166,18 +215,26 @@ module OpenAI extend OpenAI::Converter class << self + # @api private + # + # All of the specified variant info for this union. sig { returns(T::Array[[T.nilable(Symbol), Proc]]) } private def known_variants end + # @api private + # + # All of the specified variants for this union. sig { overridable.returns(T::Array[[T.nilable(Symbol), T.anything]]) } protected def variants end + # @api private sig { params(property: Symbol).void } private def discriminator(property) end + # @api private sig do params( key: T.any( @@ -197,6 +254,7 @@ module OpenAI private def variant(key, spec = nil) end + # @api private sig { params(value: T.anything).returns(T.nilable(OpenAI::Converter::Input)) } private def resolve_variant(value) end @@ -211,14 +269,17 @@ module OpenAI end class << self + # @api private sig { override.params(value: T.anything).returns(T.anything) } def coerce(value) end + # @api private sig { override.params(value: T.anything).returns(T.anything) } def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -229,6 +290,7 @@ module OpenAI end end + # Array of items of a given type. class ArrayOf abstract! @@ -242,6 +304,7 @@ module OpenAI def ==(other) end + # @api private sig do override .params(value: T.any(T::Enumerable[T.anything], T.anything)) @@ -250,6 +313,7 @@ module OpenAI def coerce(value) end + # @api private sig do override .params(value: T.any(T::Enumerable[T.anything], T.anything)) @@ -258,6 +322,7 @@ module OpenAI def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -266,10 +331,12 @@ module OpenAI def try_strict_coerce(value) end + # @api private sig { returns(OpenAI::Converter::Input) } protected def item_type end + # @api private sig do params( type_info: T.any( @@ -285,6 +352,7 @@ module OpenAI end end + # Hash of items of a given type. class HashOf abstract! @@ -298,6 +366,7 @@ module OpenAI def ==(other) end + # @api private sig do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) @@ -306,6 +375,7 @@ module OpenAI def coerce(value) end + # @api private sig do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) @@ -314,6 +384,7 @@ module OpenAI def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -322,10 +393,12 @@ module OpenAI def try_strict_coerce(value) end + # @api private sig { returns(OpenAI::Converter::Input) } protected def item_type end + # @api private sig do params( type_info: T.any( @@ -349,6 +422,10 @@ module OpenAI KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean} } class << self + # @api private + # + # Assumes superclass fields are totally defined before fields are accessed / + # defined on subclasses. sig do returns( T::Hash[Symbol, @@ -367,10 +444,12 @@ module OpenAI def fields end + # @api private sig { returns(T::Hash[Symbol, T.proc.returns(T::Class[T.anything])]) } def defaults end + # @api private sig do params( name_sym: Symbol, @@ -393,6 +472,7 @@ module OpenAI private def add_field(name_sym, required:, type_info:, spec:) end + # @api private sig do params( name_sym: Symbol, @@ -408,6 +488,7 @@ module OpenAI def required(name_sym, type_info, spec = {}) end + # @api private sig do params( name_sym: Symbol, @@ -423,10 +504,17 @@ module OpenAI def optional(name_sym, type_info, spec = {}) end + # @api private + # + # `request_only` attributes not excluded from `.#coerce` when receiving responses + # even if well behaved servers should not send them sig { params(blk: T.proc.void).void } private def request_only(&blk) end + # @api private + # + # `response_only` attributes are omitted from `.#dump` when making requests sig { params(blk: T.proc.void).void } private def response_only(&blk) end @@ -437,6 +525,7 @@ module OpenAI end class << self + # @api private sig do override .params(value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything)) @@ -445,6 +534,7 @@ module OpenAI def coerce(value) end + # @api private sig do override .params(value: T.any(T.attached_class, T.anything)) @@ -453,6 +543,7 @@ module OpenAI def dump(value) end + # @api private sig do override .params(value: T.anything) @@ -462,10 +553,24 @@ module OpenAI end end + # Returns the raw value associated with the given key, if found. Otherwise, nil is + # returned. + # + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. sig { params(key: Symbol).returns(T.nilable(T.anything)) } def [](key) end + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. sig { overridable.returns(T::Hash[Symbol, T.anything]) } def to_h end @@ -476,6 +581,7 @@ module OpenAI def deconstruct_keys(keys) end + # Create a new instance of a model. sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } def self.new(data = {}) end diff --git a/rbi/lib/openai/base_page.rbi b/rbi/lib/openai/base_page.rbi index b4a17615..bf6ab11f 100644 --- a/rbi/lib/openai/base_page.rbi +++ b/rbi/lib/openai/base_page.rbi @@ -1,6 +1,7 @@ # typed: strong module OpenAI + # @api private module BasePage abstract! @@ -24,6 +25,7 @@ module OpenAI alias_method :enum_for, :to_enum + # @api private sig do params( client: OpenAI::BaseClient, diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi index 8b829bd1..79324aa7 100644 --- a/rbi/lib/openai/base_stream.rbi +++ b/rbi/lib/openai/base_stream.rbi @@ -1,6 +1,7 @@ # typed: strong module OpenAI + # @api private module BaseStream Message = type_member(:in) Elem = type_member(:out) @@ -9,6 +10,7 @@ module OpenAI def close end + # @api private sig { overridable.returns(T::Enumerable[Elem]) } private def iterator end @@ -23,6 +25,7 @@ module OpenAI alias_method :enum_for, :to_enum + # @api private sig do params( model: T.any(T::Class[T.anything], OpenAI::Converter), diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index ba6253be..4b8256ce 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -78,10 +78,12 @@ module OpenAI def responses end + # @api private sig { override.returns(T::Hash[String, String]) } private def auth_headers end + # Creates and returns a new client for interacting with the API. sig do params( base_url: T.nilable(String), diff --git a/rbi/lib/openai/errors.rbi b/rbi/lib/openai/errors.rbi index 69c98916..1408d718 100644 --- a/rbi/lib/openai/errors.rbi +++ b/rbi/lib/openai/errors.rbi @@ -35,6 +35,7 @@ module OpenAI def type end + # @api private sig do params( url: URI::Generic, @@ -71,6 +72,7 @@ module OpenAI def type end + # @api private sig do params( url: URI::Generic, @@ -87,6 +89,7 @@ module OpenAI end class APITimeoutError < OpenAI::APIConnectionError + # @api private sig do params( url: URI::Generic, @@ -103,6 +106,7 @@ module OpenAI end class APIStatusError < OpenAI::APIError + # @api private sig do params( url: URI::Generic, @@ -133,6 +137,7 @@ module OpenAI def type end + # @api private sig do params( url: URI::Generic, diff --git a/rbi/lib/openai/extern.rbi b/rbi/lib/openai/extern.rbi index ca7768e3..b47bd767 100644 --- a/rbi/lib/openai/extern.rbi +++ b/rbi/lib/openai/extern.rbi @@ -1,6 +1,7 @@ # typed: strong module OpenAI + # @api private module Extern abstract! end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index db0763fb..feb3ae70 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The text to generate audio for. The maximum length is 4096 characters. sig { returns(String) } def input end @@ -15,6 +16,8 @@ module OpenAI def input=(_) end + # One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # `tts-1` or `tts-1-hd` sig { returns(T.any(String, Symbol)) } def model end @@ -23,6 +26,10 @@ module OpenAI def model=(_) end + # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the + # voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). sig { returns(Symbol) } def voice end @@ -31,6 +38,8 @@ module OpenAI def voice=(_) end + # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + # `wav`, and `pcm`. sig { returns(T.nilable(Symbol)) } def response_format end @@ -39,6 +48,8 @@ module OpenAI def response_format=(_) end + # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + # the default. sig { returns(T.nilable(Float)) } def speed end @@ -77,16 +88,23 @@ module OpenAI def to_hash end + # One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # `tts-1` or `tts-1-hd` class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the + # voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). class Voice < OpenAI::Enum abstract! @@ -107,6 +125,8 @@ module OpenAI end end + # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + # `wav`, and `pcm`. class ResponseFormat < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index c314036c..bc8940ae 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Audio class Transcription < OpenAI::BaseModel + # The transcribed text. sig { returns(String) } def text end @@ -12,6 +13,8 @@ module OpenAI def text=(_) end + # Represents a transcription response returned by model, based on the provided + # input. sig { params(text: String).returns(T.attached_class) } def self.new(text:) end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 324c2060..cc5e0e40 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The audio file object (not file name) to transcribe, in one of these formats: + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(IO, StringIO)) } def file end @@ -15,6 +17,8 @@ module OpenAI def file=(_) end + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. sig { returns(T.any(String, Symbol)) } def model end @@ -23,6 +27,9 @@ module OpenAI def model=(_) end + # The language of the input audio. Supplying the input language in + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. sig { returns(T.nilable(String)) } def language end @@ -31,6 +38,10 @@ module OpenAI def language=(_) end + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. sig { returns(T.nilable(String)) } def prompt end @@ -39,6 +50,8 @@ module OpenAI def prompt=(_) end + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. sig { returns(T.nilable(Symbol)) } def response_format end @@ -47,6 +60,11 @@ module OpenAI def response_format=(_) end + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. sig { returns(T.nilable(Float)) } def temperature end @@ -55,6 +73,11 @@ module OpenAI def temperature=(_) end + # The timestamp granularities to populate for this transcription. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. sig { returns(T.nilable(T::Array[Symbol])) } def timestamp_granularities end @@ -106,10 +129,13 @@ module OpenAI def to_hash end + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index ba16b2e7..49982089 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -3,10 +3,13 @@ module OpenAI module Models module Audio + # Represents a transcription response returned by model, based on the provided + # input. class TranscriptionCreateResponse < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index 2e1f1c65..92a534a2 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Audio class TranscriptionSegment < OpenAI::BaseModel + # Unique identifier of the segment. sig { returns(Integer) } def id end @@ -12,6 +13,8 @@ module OpenAI def id=(_) end + # Average logprob of the segment. If the value is lower than -1, consider the + # logprobs failed. sig { returns(Float) } def avg_logprob end @@ -20,6 +23,8 @@ module OpenAI def avg_logprob=(_) end + # Compression ratio of the segment. If the value is greater than 2.4, consider the + # compression failed. sig { returns(Float) } def compression_ratio end @@ -28,6 +33,7 @@ module OpenAI def compression_ratio=(_) end + # End time of the segment in seconds. sig { returns(Float) } def end_ end @@ -36,6 +42,8 @@ module OpenAI def end_=(_) end + # Probability of no speech in the segment. If the value is higher than 1.0 and the + # `avg_logprob` is below -1, consider this segment silent. sig { returns(Float) } def no_speech_prob end @@ -44,6 +52,7 @@ module OpenAI def no_speech_prob=(_) end + # Seek offset of the segment. sig { returns(Integer) } def seek end @@ -52,6 +61,7 @@ module OpenAI def seek=(_) end + # Start time of the segment in seconds. sig { returns(Float) } def start end @@ -60,6 +70,7 @@ module OpenAI def start=(_) end + # Temperature parameter used for generating the segment. sig { returns(Float) } def temperature end @@ -68,6 +79,7 @@ module OpenAI def temperature=(_) end + # Text content of the segment. sig { returns(String) } def text end @@ -76,6 +88,7 @@ module OpenAI def text=(_) end + # Array of token IDs for the text content. sig { returns(T::Array[Integer]) } def tokens end diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index 7dbf05e8..55eb1b23 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Audio class TranscriptionVerbose < OpenAI::BaseModel + # The duration of the input audio. sig { returns(Float) } def duration end @@ -12,6 +13,7 @@ module OpenAI def duration=(_) end + # The language of the input audio. sig { returns(String) } def language end @@ -20,6 +22,7 @@ module OpenAI def language=(_) end + # The transcribed text. sig { returns(String) } def text end @@ -28,6 +31,7 @@ module OpenAI def text=(_) end + # Segments of the transcribed text and their corresponding details. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } def segments end @@ -39,6 +43,7 @@ module OpenAI def segments=(_) end + # Extracted words and their corresponding timestamps. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionWord])) } def words end @@ -50,6 +55,8 @@ module OpenAI def words=(_) end + # Represents a verbose json transcription response returned by model, based on the + # provided input. sig do params( duration: Float, diff --git a/rbi/lib/openai/models/audio/transcription_word.rbi b/rbi/lib/openai/models/audio/transcription_word.rbi index 8a81058f..a3be4b46 100644 --- a/rbi/lib/openai/models/audio/transcription_word.rbi +++ b/rbi/lib/openai/models/audio/transcription_word.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Audio class TranscriptionWord < OpenAI::BaseModel + # End time of the word in seconds. sig { returns(Float) } def end_ end @@ -12,6 +13,7 @@ module OpenAI def end_=(_) end + # Start time of the word in seconds. sig { returns(Float) } def start end @@ -20,6 +22,7 @@ module OpenAI def start=(_) end + # The text content of the word. sig { returns(String) } def word end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index c8175a8d..e1a51573 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The audio file object (not file name) translate, in one of these formats: flac, + # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(IO, StringIO)) } def file end @@ -15,6 +17,8 @@ module OpenAI def file=(_) end + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. sig { returns(T.any(String, Symbol)) } def model end @@ -23,6 +27,10 @@ module OpenAI def model=(_) end + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should be in English. sig { returns(T.nilable(String)) } def prompt end @@ -31,6 +39,8 @@ module OpenAI def prompt=(_) end + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. sig { returns(T.nilable(Symbol)) } def response_format end @@ -39,6 +49,11 @@ module OpenAI def response_format=(_) end + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. sig { returns(T.nilable(Float)) } def temperature end @@ -77,10 +92,13 @@ module OpenAI def to_hash end + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 9a25186f..79f531fc 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -7,6 +7,7 @@ module OpenAI abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index 566f2db2..bceb7944 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Audio class TranslationVerbose < OpenAI::BaseModel + # The duration of the input audio. sig { returns(Float) } def duration end @@ -12,6 +13,7 @@ module OpenAI def duration=(_) end + # The language of the output translation (always `english`). sig { returns(String) } def language end @@ -20,6 +22,7 @@ module OpenAI def language=(_) end + # The translated text. sig { returns(String) } def text end @@ -28,6 +31,7 @@ module OpenAI def text=(_) end + # Segments of the translated text and their corresponding details. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } def segments end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 087af985..2acd496e 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. class AudioResponseFormat < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi index 5d106690..b1d97ad3 100644 --- a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class AutoFileChunkingStrategyParam < OpenAI::BaseModel + # Always `auto`. sig { returns(Symbol) } def type end @@ -11,6 +12,8 @@ module OpenAI def type=(_) end + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto) end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index cec4b9fb..95ad26fe 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -11,6 +11,7 @@ module OpenAI def id=(_) end + # The time frame within which the batch should be processed. sig { returns(String) } def completion_window end @@ -19,6 +20,7 @@ module OpenAI def completion_window=(_) end + # The Unix timestamp (in seconds) for when the batch was created. sig { returns(Integer) } def created_at end @@ -27,6 +29,7 @@ module OpenAI def created_at=(_) end + # The OpenAI API endpoint used by the batch. sig { returns(String) } def endpoint end @@ -35,6 +38,7 @@ module OpenAI def endpoint=(_) end + # The ID of the input file for the batch. sig { returns(String) } def input_file_id end @@ -43,6 +47,7 @@ module OpenAI def input_file_id=(_) end + # The object type, which is always `batch`. sig { returns(Symbol) } def object end @@ -51,6 +56,7 @@ module OpenAI def object=(_) end + # The current status of the batch. sig { returns(Symbol) } def status end @@ -59,6 +65,7 @@ module OpenAI def status=(_) end + # The Unix timestamp (in seconds) for when the batch was cancelled. sig { returns(T.nilable(Integer)) } def cancelled_at end @@ -67,6 +74,7 @@ module OpenAI def cancelled_at=(_) end + # The Unix timestamp (in seconds) for when the batch started cancelling. sig { returns(T.nilable(Integer)) } def cancelling_at end @@ -75,6 +83,7 @@ module OpenAI def cancelling_at=(_) end + # The Unix timestamp (in seconds) for when the batch was completed. sig { returns(T.nilable(Integer)) } def completed_at end @@ -83,6 +92,7 @@ module OpenAI def completed_at=(_) end + # The ID of the file containing the outputs of requests with errors. sig { returns(T.nilable(String)) } def error_file_id end @@ -99,6 +109,7 @@ module OpenAI def errors=(_) end + # The Unix timestamp (in seconds) for when the batch expired. sig { returns(T.nilable(Integer)) } def expired_at end @@ -107,6 +118,7 @@ module OpenAI def expired_at=(_) end + # The Unix timestamp (in seconds) for when the batch will expire. sig { returns(T.nilable(Integer)) } def expires_at end @@ -115,6 +127,7 @@ module OpenAI def expires_at=(_) end + # The Unix timestamp (in seconds) for when the batch failed. sig { returns(T.nilable(Integer)) } def failed_at end @@ -123,6 +136,7 @@ module OpenAI def failed_at=(_) end + # The Unix timestamp (in seconds) for when the batch started finalizing. sig { returns(T.nilable(Integer)) } def finalizing_at end @@ -131,6 +145,7 @@ module OpenAI def finalizing_at=(_) end + # The Unix timestamp (in seconds) for when the batch started processing. sig { returns(T.nilable(Integer)) } def in_progress_at end @@ -139,6 +154,12 @@ module OpenAI def in_progress_at=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -147,6 +168,7 @@ module OpenAI def metadata=(_) end + # The ID of the file containing the outputs of successfully executed requests. sig { returns(T.nilable(String)) } def output_file_id end @@ -155,6 +177,7 @@ module OpenAI def output_file_id=(_) end + # The request counts for different statuses within the batch. sig { returns(T.nilable(OpenAI::Models::BatchRequestCounts)) } def request_counts end @@ -242,6 +265,7 @@ module OpenAI def to_hash end + # The current status of the batch. class Status < OpenAI::Enum abstract! @@ -270,6 +294,7 @@ module OpenAI def data=(_) end + # The object type, which is always `list`. sig { returns(T.nilable(String)) } def object end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index ff5efeab..6588dc92 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The time frame within which the batch should be processed. Currently only `24h` + # is supported. sig { returns(Symbol) } def completion_window end @@ -14,6 +16,10 @@ module OpenAI def completion_window=(_) end + # The endpoint to be used for all requests in the batch. Currently + # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. + # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 + # embedding inputs across all requests in the batch. sig { returns(Symbol) } def endpoint end @@ -22,6 +28,15 @@ module OpenAI def endpoint=(_) end + # The ID of an uploaded file that contains requests for the new batch. + # + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. + # + # Your input file must be formatted as a + # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + # requests, and can be up to 200 MB in size. sig { returns(String) } def input_file_id end @@ -30,6 +45,12 @@ module OpenAI def input_file_id=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -66,6 +87,8 @@ module OpenAI def to_hash end + # The time frame within which the batch should be processed. Currently only `24h` + # is supported. class CompletionWindow < OpenAI::Enum abstract! @@ -78,6 +101,10 @@ module OpenAI end end + # The endpoint to be used for all requests in the batch. Currently + # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. + # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 + # embedding inputs across all requests in the batch. class Endpoint < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/batch_error.rbi b/rbi/lib/openai/models/batch_error.rbi index b0742afb..ded1e3bb 100644 --- a/rbi/lib/openai/models/batch_error.rbi +++ b/rbi/lib/openai/models/batch_error.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class BatchError < OpenAI::BaseModel + # An error code identifying the error type. sig { returns(T.nilable(String)) } def code end @@ -11,6 +12,7 @@ module OpenAI def code=(_) end + # The line number of the input file where the error occurred, if applicable. sig { returns(T.nilable(Integer)) } def line end @@ -19,6 +21,7 @@ module OpenAI def line=(_) end + # A human-readable message providing more details about the error. sig { returns(T.nilable(String)) } def message end @@ -27,6 +30,7 @@ module OpenAI def message=(_) end + # The name of the parameter that caused the error, if applicable. sig { returns(T.nilable(String)) } def param end diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 5d04cdfd..8a8368b8 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -6,6 +6,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -14,6 +18,8 @@ module OpenAI def after=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end diff --git a/rbi/lib/openai/models/batch_request_counts.rbi b/rbi/lib/openai/models/batch_request_counts.rbi index 59b86c64..df746c81 100644 --- a/rbi/lib/openai/models/batch_request_counts.rbi +++ b/rbi/lib/openai/models/batch_request_counts.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class BatchRequestCounts < OpenAI::BaseModel + # Number of requests that have been completed successfully. sig { returns(Integer) } def completed end @@ -11,6 +12,7 @@ module OpenAI def completed=(_) end + # Number of requests that have failed. sig { returns(Integer) } def failed end @@ -19,6 +21,7 @@ module OpenAI def failed=(_) end + # Total number of requests in the batch. sig { returns(Integer) } def total end @@ -27,6 +30,7 @@ module OpenAI def total=(_) end + # The request counts for different statuses within the batch. sig { params(completed: Integer, failed: Integer, total: Integer).returns(T.attached_class) } def self.new(completed:, failed:, total:) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index c573af22..c6f6f83f 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Beta class Assistant < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the assistant was created. sig { returns(Integer) } def created_at end @@ -20,6 +22,7 @@ module OpenAI def created_at=(_) end + # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } def description end @@ -28,6 +31,8 @@ module OpenAI def description=(_) end + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. sig { returns(T.nilable(String)) } def instructions end @@ -36,6 +41,12 @@ module OpenAI def instructions=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -44,6 +55,11 @@ module OpenAI def metadata=(_) end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(String) } def model end @@ -52,6 +68,7 @@ module OpenAI def model=(_) end + # The name of the assistant. The maximum length is 256 characters. sig { returns(T.nilable(String)) } def name end @@ -60,6 +77,7 @@ module OpenAI def name=(_) end + # The object type, which is always `assistant`. sig { returns(Symbol) } def object end @@ -68,6 +86,9 @@ module OpenAI def object=(_) end + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. sig do returns( T::Array[ @@ -105,6 +126,26 @@ module OpenAI def tools=(_) end + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -145,6 +186,9 @@ module OpenAI def response_format=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } def temperature end @@ -153,6 +197,10 @@ module OpenAI def temperature=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources)) } def tool_resources end @@ -164,6 +212,11 @@ module OpenAI def tool_resources=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -172,6 +225,7 @@ module OpenAI def top_p=(_) end + # Represents an `assistant` that can call the model and use tools. sig do params( id: String, @@ -279,6 +333,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, @@ -302,6 +360,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter`` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -320,6 +381,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 9287af82..37fd1121 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -7,6 +7,11 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.any(String, Symbol)) } def model end @@ -15,6 +20,7 @@ module OpenAI def model=(_) end + # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } def description end @@ -23,6 +29,8 @@ module OpenAI def description=(_) end + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. sig { returns(T.nilable(String)) } def instructions end @@ -31,6 +39,12 @@ module OpenAI def instructions=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -39,6 +53,7 @@ module OpenAI def metadata=(_) end + # The name of the assistant. The maximum length is 256 characters. sig { returns(T.nilable(String)) } def name end @@ -47,6 +62,12 @@ module OpenAI def name=(_) end + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(Symbol)) } def reasoning_effort end @@ -55,6 +76,26 @@ module OpenAI def reasoning_effort=(_) end + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -95,6 +136,9 @@ module OpenAI def response_format=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } def temperature end @@ -103,6 +147,10 @@ module OpenAI def temperature=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources)) } def tool_resources end @@ -114,6 +162,9 @@ module OpenAI def tool_resources=(_) end + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. sig do returns( T.nilable( @@ -153,6 +204,11 @@ module OpenAI def tools=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -242,10 +298,16 @@ module OpenAI def to_hash end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end @@ -275,6 +337,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, @@ -298,6 +364,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -316,6 +385,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end @@ -324,6 +397,10 @@ module OpenAI def vector_store_ids=(_) end + # A helper to create a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this assistant. There can be a maximum of 1 + # vector store attached to the assistant. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) @@ -362,6 +439,8 @@ module OpenAI end class VectorStore < OpenAI::BaseModel + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. sig do returns( T.nilable( @@ -392,6 +471,9 @@ module OpenAI def chunking_strategy=(_) end + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -400,6 +482,12 @@ module OpenAI def file_ids=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -438,10 +526,13 @@ module OpenAI def to_hash end + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. class ChunkingStrategy < OpenAI::Union abstract! class Auto < OpenAI::BaseModel + # Always `auto`. sig { returns(Symbol) } def type end @@ -450,6 +541,8 @@ module OpenAI def type=(_) end + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto) end @@ -479,6 +572,7 @@ module OpenAI def static=(_) end + # Always `static`. sig { returns(Symbol) } def type end @@ -510,6 +604,9 @@ module OpenAI end class Static < OpenAI::BaseModel + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } def chunk_overlap_tokens end @@ -518,6 +615,8 @@ module OpenAI def chunk_overlap_tokens=(_) end + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } def max_chunk_size_tokens end @@ -542,6 +641,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 8653e3c0..1e331b4a 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -7,6 +7,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -15,6 +19,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -23,6 +31,8 @@ module OpenAI def before=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -31,6 +41,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -67,6 +79,8 @@ module OpenAI def to_hash end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index 1e789450..c79726bd 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -3,10 +3,31 @@ module OpenAI module Models module Beta + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. class AssistantResponseFormatOption < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 7ac9987b..4684ef7c 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -3,10 +3,32 @@ module OpenAI module Models module Beta + # Represents an event emitted when streaming a Run. + # + # Each event in a server-sent events stream has an `event` and `data` property: + # + # ``` + # event: thread.created + # data: {"id": "thread_123", "object": "thread", ...} + # ``` + # + # We emit events whenever a new object is created, transitions to a new state, or + # is being streamed in parts (deltas). For example, we emit `thread.run.created` + # when a new run is created, `thread.run.completed` when a run completes, and so + # on. When an Assistant chooses to create a message during a run, we emit a + # `thread.message.created event`, a `thread.message.in_progress` event, many + # `thread.message.delta` events, and finally a `thread.message.completed` event. + # + # We may add additional events over time, so we recommend handling unknown events + # gracefully in your code. See the + # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) + # to learn how to integrate the Assistants API with streaming. class AssistantStreamEvent < OpenAI::Union abstract! class ThreadCreated < OpenAI::BaseModel + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } def data end @@ -23,6 +45,7 @@ module OpenAI def event=(_) end + # Whether to enable input audio transcription. sig { returns(T.nilable(T::Boolean)) } def enabled end @@ -31,6 +54,9 @@ module OpenAI def enabled=(_) end + # Occurs when a new + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. sig { params(data: OpenAI::Models::Beta::Thread, enabled: T::Boolean, event: Symbol).returns(T.attached_class) } def self.new(data:, enabled: nil, event: :"thread.created") end @@ -41,6 +67,8 @@ module OpenAI end class ThreadRunCreated < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -57,6 +85,8 @@ module OpenAI def event=(_) end + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.created") end @@ -67,6 +97,8 @@ module OpenAI end class ThreadRunQueued < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -83,6 +115,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `queued` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.queued") end @@ -93,6 +127,8 @@ module OpenAI end class ThreadRunInProgress < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -109,6 +145,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to an `in_progress` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.in_progress") end @@ -119,6 +157,8 @@ module OpenAI end class ThreadRunRequiresAction < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -135,6 +175,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `requires_action` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.requires_action") end @@ -145,6 +187,8 @@ module OpenAI end class ThreadRunCompleted < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -161,6 +205,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is completed. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.completed") end @@ -171,6 +217,8 @@ module OpenAI end class ThreadRunIncomplete < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -187,6 +235,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # ends with status `incomplete`. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.incomplete") end @@ -197,6 +247,8 @@ module OpenAI end class ThreadRunFailed < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -213,6 +265,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # fails. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.failed") end @@ -223,6 +277,8 @@ module OpenAI end class ThreadRunCancelling < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -239,6 +295,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `cancelling` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.cancelling") end @@ -249,6 +307,8 @@ module OpenAI end class ThreadRunCancelled < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -265,6 +325,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is cancelled. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.cancelled") end @@ -275,6 +337,8 @@ module OpenAI end class ThreadRunExpired < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -291,6 +355,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # expires. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.expired") end @@ -301,6 +367,7 @@ module OpenAI end class ThreadRunStepCreated < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -320,6 +387,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.created") end @@ -330,6 +400,7 @@ module OpenAI end class ThreadRunStepInProgress < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -349,6 +420,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.in_progress") end @@ -359,6 +433,8 @@ module OpenAI end class ThreadRunStepDelta < OpenAI::BaseModel + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } def data end @@ -378,6 +454,9 @@ module OpenAI def event=(_) end + # Occurs when parts of a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. sig do params(data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol) .returns(T.attached_class) @@ -391,6 +470,7 @@ module OpenAI end class ThreadRunStepCompleted < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -410,6 +490,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.completed") end @@ -420,6 +503,7 @@ module OpenAI end class ThreadRunStepFailed < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -439,6 +523,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.failed") end @@ -449,6 +536,7 @@ module OpenAI end class ThreadRunStepCancelled < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -468,6 +556,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.cancelled") end @@ -478,6 +569,7 @@ module OpenAI end class ThreadRunStepExpired < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -497,6 +589,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.expired") end @@ -507,6 +602,8 @@ module OpenAI end class ThreadMessageCreated < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -523,6 +620,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.created") end @@ -533,6 +633,8 @@ module OpenAI end class ThreadMessageInProgress < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -549,6 +651,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.in_progress") end @@ -559,6 +664,8 @@ module OpenAI end class ThreadMessageDelta < OpenAI::BaseModel + # Represents a message delta i.e. any changed fields on a message during + # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } def data end @@ -578,6 +685,9 @@ module OpenAI def event=(_) end + # Occurs when parts of a + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. sig { params(data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.delta") end @@ -588,6 +698,8 @@ module OpenAI end class ThreadMessageCompleted < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -604,6 +716,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.completed") end @@ -614,6 +729,8 @@ module OpenAI end class ThreadMessageIncomplete < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -630,6 +747,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.incomplete") end @@ -656,6 +776,9 @@ module OpenAI def event=(_) end + # Occurs when an + # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + # This can happen due to an internal server error or a timeout. sig { params(data: OpenAI::Models::ErrorObject, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :error) end @@ -666,6 +789,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 18612436..520704fc 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -7,6 +7,7 @@ module OpenAI abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 7d82ebcf..1cec31b5 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Beta class AssistantToolChoice < OpenAI::BaseModel + # The type of the tool. If type is `function`, the function name must be set sig { returns(Symbol) } def type end @@ -23,6 +24,8 @@ module OpenAI def function=(_) end + # Specifies a tool the model should use. Use to force the model to call a specific + # tool. sig do params(type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction).returns(T.attached_class) end @@ -33,6 +36,7 @@ module OpenAI def to_hash end + # The type of the tool. If type is `function`, the function name must be set class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi index be6c8d7f..6e594ce1 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Beta class AssistantToolChoiceFunction < OpenAI::BaseModel + # The name of the function to call. sig { returns(String) } def name end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 6ae486e1..38c43dba 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -3,9 +3,20 @@ module OpenAI module Models module Beta + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. class AssistantToolChoiceOption < OpenAI::Union abstract! + # `none` means the model will not call any tools and instead generates a message. + # `auto` means the model can pick between generating a message or calling one or + # more tools. `required` means the model must call one or more tools before + # responding to the user. class Auto < OpenAI::Enum abstract! @@ -21,6 +32,7 @@ module OpenAI end class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, OpenAI::Models::Beta::AssistantToolChoice]]) } private def variants end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 0c75a7fb..446f263d 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } def description end @@ -15,6 +16,8 @@ module OpenAI def description=(_) end + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. sig { returns(T.nilable(String)) } def instructions end @@ -23,6 +26,12 @@ module OpenAI def instructions=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -31,6 +40,11 @@ module OpenAI def metadata=(_) end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -39,6 +53,7 @@ module OpenAI def model=(_) end + # The name of the assistant. The maximum length is 256 characters. sig { returns(T.nilable(String)) } def name end @@ -47,6 +62,12 @@ module OpenAI def name=(_) end + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(Symbol)) } def reasoning_effort end @@ -55,6 +76,26 @@ module OpenAI def reasoning_effort=(_) end + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -95,6 +136,9 @@ module OpenAI def response_format=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } def temperature end @@ -103,6 +147,10 @@ module OpenAI def temperature=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources)) } def tool_resources end @@ -114,6 +162,9 @@ module OpenAI def tool_resources=(_) end + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. sig do returns( T.nilable( @@ -153,6 +204,11 @@ module OpenAI def tools=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -242,9 +298,19 @@ module OpenAI def to_hash end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. class Model < OpenAI::Union abstract! + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. class AssistantSupportedModels < OpenAI::Enum abstract! @@ -287,6 +353,7 @@ module OpenAI end class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end @@ -316,6 +383,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, @@ -339,6 +410,10 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # Overrides the list of + # [file](https://platform.openai.com/docs/api-reference/files) IDs made available + # to the `code_interpreter` tool. There can be a maximum of 20 files associated + # with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -357,6 +432,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # Overrides the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end diff --git a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi index a560387e..ba0e9924 100644 --- a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Beta class CodeInterpreterTool < OpenAI::BaseModel + # The type of tool being defined: `code_interpreter` sig { returns(Symbol) } def type end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index a7d4bbaf..bf120b56 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Beta class FileSearchTool < OpenAI::BaseModel + # The type of tool being defined: `file_search` sig { returns(Symbol) } def type end @@ -12,6 +13,7 @@ module OpenAI def type=(_) end + # Overrides for the file search tool. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch)) } def file_search end @@ -35,6 +37,14 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The maximum number of results the file search tool should output. The default is + # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between + # 1 and 50 inclusive. + # + # Note that the file search tool may output fewer than `max_num_results` results. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(Integer)) } def max_num_results end @@ -43,6 +53,12 @@ module OpenAI def max_num_results=(_) end + # The ranking options for the file search. If not specified, the file search tool + # will use the `auto` ranker and a score_threshold of 0. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions)) } def ranking_options end @@ -54,6 +70,7 @@ module OpenAI def ranking_options=(_) end + # Overrides for the file search tool. sig do params( max_num_results: Integer, @@ -74,6 +91,8 @@ module OpenAI end class RankingOptions < OpenAI::BaseModel + # The score threshold for the file search. All values must be a floating point + # number between 0 and 1. sig { returns(Float) } def score_threshold end @@ -82,6 +101,8 @@ module OpenAI def score_threshold=(_) end + # The ranker to use for the file search. If not specified will use the `auto` + # ranker. sig { returns(T.nilable(Symbol)) } def ranker end @@ -90,6 +111,12 @@ module OpenAI def ranker=(_) end + # The ranking options for the file search. If not specified, the file search tool + # will use the `auto` ranker and a score_threshold of 0. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { params(score_threshold: Float, ranker: Symbol).returns(T.attached_class) } def self.new(score_threshold:, ranker: nil) end @@ -98,6 +125,8 @@ module OpenAI def to_hash end + # The ranker to use for the file search. If not specified will use the `auto` + # ranker. class Ranker < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index 0c7a758f..645a4c1c 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -12,6 +12,7 @@ module OpenAI def function=(_) end + # The type of tool being defined: `function` sig { returns(Symbol) } def type end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 135308ce..c00a5caa 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -3,10 +3,15 @@ module OpenAI module Models module Beta + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. class MessageStreamEvent < OpenAI::Union abstract! class ThreadMessageCreated < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -23,6 +28,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.created") end @@ -33,6 +41,8 @@ module OpenAI end class ThreadMessageInProgress < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -49,6 +59,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.in_progress") end @@ -59,6 +72,8 @@ module OpenAI end class ThreadMessageDelta < OpenAI::BaseModel + # Represents a message delta i.e. any changed fields on a message during + # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } def data end @@ -78,6 +93,9 @@ module OpenAI def event=(_) end + # Occurs when parts of a + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. sig { params(data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.delta") end @@ -88,6 +106,8 @@ module OpenAI end class ThreadMessageCompleted < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -104,6 +124,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.completed") end @@ -114,6 +137,8 @@ module OpenAI end class ThreadMessageIncomplete < OpenAI::BaseModel + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } def data end @@ -130,6 +155,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.message.incomplete") end @@ -140,6 +168,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 1adb063a..b2ebe6e2 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -3,10 +3,14 @@ module OpenAI module Models module Beta + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. class RunStepStreamEvent < OpenAI::Union abstract! class ThreadRunStepCreated < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -26,6 +30,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.created") end @@ -36,6 +43,7 @@ module OpenAI end class ThreadRunStepInProgress < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -55,6 +63,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.in_progress") end @@ -65,6 +76,8 @@ module OpenAI end class ThreadRunStepDelta < OpenAI::BaseModel + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } def data end @@ -84,6 +97,9 @@ module OpenAI def event=(_) end + # Occurs when parts of a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. sig do params(data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol) .returns(T.attached_class) @@ -97,6 +113,7 @@ module OpenAI end class ThreadRunStepCompleted < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -116,6 +133,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.completed") end @@ -126,6 +146,7 @@ module OpenAI end class ThreadRunStepFailed < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -145,6 +166,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.failed") end @@ -155,6 +179,7 @@ module OpenAI end class ThreadRunStepCancelled < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -174,6 +199,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.cancelled") end @@ -184,6 +212,7 @@ module OpenAI end class ThreadRunStepExpired < OpenAI::BaseModel + # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } def data end @@ -203,6 +232,9 @@ module OpenAI def event=(_) end + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.step.expired") end @@ -213,6 +245,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 47d3fd71..aff962cb 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -3,10 +3,14 @@ module OpenAI module Models module Beta + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. class RunStreamEvent < OpenAI::Union abstract! class ThreadRunCreated < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -23,6 +27,8 @@ module OpenAI def event=(_) end + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.created") end @@ -33,6 +39,8 @@ module OpenAI end class ThreadRunQueued < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -49,6 +57,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `queued` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.queued") end @@ -59,6 +69,8 @@ module OpenAI end class ThreadRunInProgress < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -75,6 +87,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to an `in_progress` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.in_progress") end @@ -85,6 +99,8 @@ module OpenAI end class ThreadRunRequiresAction < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -101,6 +117,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `requires_action` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.requires_action") end @@ -111,6 +129,8 @@ module OpenAI end class ThreadRunCompleted < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -127,6 +147,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is completed. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.completed") end @@ -137,6 +159,8 @@ module OpenAI end class ThreadRunIncomplete < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -153,6 +177,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # ends with status `incomplete`. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.incomplete") end @@ -163,6 +189,8 @@ module OpenAI end class ThreadRunFailed < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -179,6 +207,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # fails. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.failed") end @@ -189,6 +219,8 @@ module OpenAI end class ThreadRunCancelling < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -205,6 +237,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `cancelling` status. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.cancelling") end @@ -215,6 +249,8 @@ module OpenAI end class ThreadRunCancelled < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -231,6 +267,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is cancelled. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.cancelled") end @@ -241,6 +279,8 @@ module OpenAI end class ThreadRunExpired < OpenAI::BaseModel + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } def data end @@ -257,6 +297,8 @@ module OpenAI def event=(_) end + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # expires. sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } def self.new(data:, event: :"thread.run.expired") end @@ -267,6 +309,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index f7263e57..799f589a 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Beta class Thread < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the thread was created. sig { returns(Integer) } def created_at end @@ -20,6 +22,12 @@ module OpenAI def created_at=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -28,6 +36,7 @@ module OpenAI def metadata=(_) end + # The object type, which is always `thread`. sig { returns(Symbol) } def object end @@ -36,6 +45,10 @@ module OpenAI def object=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources)) } def tool_resources end @@ -47,6 +60,8 @@ module OpenAI def tool_resources=(_) end + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). sig do params( id: String, @@ -98,6 +113,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, @@ -121,6 +140,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -139,6 +161,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index d0dd117f..c6f68453 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -7,6 +7,9 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. sig { returns(String) } def assistant_id end @@ -15,6 +18,8 @@ module OpenAI def assistant_id=(_) end + # Override the default system message of the assistant. This is useful for + # modifying the behavior on a per-run basis. sig { returns(T.nilable(String)) } def instructions end @@ -23,6 +28,11 @@ module OpenAI def instructions=(_) end + # The maximum number of completion tokens that may be used over the course of the + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } def max_completion_tokens end @@ -31,6 +41,11 @@ module OpenAI def max_completion_tokens=(_) end + # The maximum number of prompt tokens that may be used over the course of the run. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } def max_prompt_tokens end @@ -39,6 +54,12 @@ module OpenAI def max_prompt_tokens=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -47,6 +68,10 @@ module OpenAI def metadata=(_) end + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -55,6 +80,9 @@ module OpenAI def model=(_) end + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T.nilable(T::Boolean)) } def parallel_tool_calls end @@ -63,6 +91,26 @@ module OpenAI def parallel_tool_calls=(_) end + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -103,6 +151,9 @@ module OpenAI def response_format=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } def temperature end @@ -111,6 +162,8 @@ module OpenAI def temperature=(_) end + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread)) } def thread end @@ -122,6 +175,13 @@ module OpenAI def thread=(_) end + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } def tool_choice end @@ -133,6 +193,10 @@ module OpenAI def tool_choice=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources)) } def tool_resources end @@ -144,6 +208,8 @@ module OpenAI def tool_resources=(_) end + # Override the tools the assistant can use for this run. This is useful for + # modifying the behavior on a per-run basis. sig do returns( T.nilable( @@ -187,6 +253,11 @@ module OpenAI def tools=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -195,6 +266,8 @@ module OpenAI def top_p=(_) end + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy)) } def truncation_strategy end @@ -303,10 +376,15 @@ module OpenAI def to_hash end + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end @@ -314,6 +392,8 @@ module OpenAI end class Thread < OpenAI::BaseModel + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message])) } def messages end @@ -325,6 +405,12 @@ module OpenAI def messages=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -333,6 +419,10 @@ module OpenAI def metadata=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources)) } def tool_resources end @@ -344,6 +434,8 @@ module OpenAI def tool_resources=(_) end + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. sig do params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], @@ -369,6 +461,7 @@ module OpenAI end class Message < OpenAI::BaseModel + # The text contents of the message. sig do returns( T.any( @@ -415,6 +508,12 @@ module OpenAI def content=(_) end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(Symbol) } def role end @@ -423,6 +522,7 @@ module OpenAI def role=(_) end + # A list of files attached to the message, and the tools they should be added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment])) } def attachments end @@ -436,6 +536,12 @@ module OpenAI def attachments=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -488,6 +594,7 @@ module OpenAI def to_hash end + # The text contents of the message. class Content < OpenAI::Union abstract! @@ -502,6 +609,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -525,6 +633,12 @@ module OpenAI end end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. class Role < OpenAI::Enum abstract! @@ -539,6 +653,7 @@ module OpenAI end class Attachment < OpenAI::BaseModel + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } def file_id end @@ -547,6 +662,7 @@ module OpenAI def file_id=(_) end + # The tools to add this file to. sig do returns( T.nilable( @@ -619,6 +735,7 @@ module OpenAI abstract! class FileSearch < OpenAI::BaseModel + # The type of tool being defined: `file_search` sig { returns(Symbol) } def type end @@ -637,6 +754,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -673,6 +791,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, @@ -696,6 +818,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -714,6 +839,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end @@ -722,6 +851,10 @@ module OpenAI def vector_store_ids=(_) end + # A helper to create a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. sig do returns( T.nilable( @@ -766,6 +899,8 @@ module OpenAI end class VectorStore < OpenAI::BaseModel + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. sig do returns( T.nilable( @@ -796,6 +931,9 @@ module OpenAI def chunking_strategy=(_) end + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -804,6 +942,12 @@ module OpenAI def file_ids=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -842,10 +986,13 @@ module OpenAI def to_hash end + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. class ChunkingStrategy < OpenAI::Union abstract! class Auto < OpenAI::BaseModel + # Always `auto`. sig { returns(Symbol) } def type end @@ -854,6 +1001,8 @@ module OpenAI def type=(_) end + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto) end @@ -883,6 +1032,7 @@ module OpenAI def static=(_) end + # Always `static`. sig { returns(Symbol) } def type end @@ -914,6 +1064,9 @@ module OpenAI end class Static < OpenAI::BaseModel + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } def chunk_overlap_tokens end @@ -922,6 +1075,8 @@ module OpenAI def chunk_overlap_tokens=(_) end + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } def max_chunk_size_tokens end @@ -948,6 +1103,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -986,6 +1142,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, @@ -1009,6 +1169,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -1027,6 +1190,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end @@ -1049,6 +1216,7 @@ module OpenAI abstract! class << self + # @api private sig do override .returns( @@ -1061,6 +1229,10 @@ module OpenAI end class TruncationStrategy < OpenAI::BaseModel + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(Symbol) } def type end @@ -1069,6 +1241,8 @@ module OpenAI def type=(_) end + # The number of most recent messages from the thread when constructing the context + # for the run. sig { returns(T.nilable(Integer)) } def last_messages end @@ -1077,6 +1251,8 @@ module OpenAI def last_messages=(_) end + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } def self.new(type:, last_messages: nil) end @@ -1085,6 +1261,10 @@ module OpenAI def to_hash end + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 627808ef..670f30b9 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message])) } def messages end @@ -18,6 +20,12 @@ module OpenAI def messages=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -26,6 +34,10 @@ module OpenAI def metadata=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources)) } def tool_resources end @@ -64,6 +76,7 @@ module OpenAI end class Message < OpenAI::BaseModel + # The text contents of the message. sig do returns( T.any( @@ -110,6 +123,12 @@ module OpenAI def content=(_) end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(Symbol) } def role end @@ -118,6 +137,7 @@ module OpenAI def role=(_) end + # A list of files attached to the message, and the tools they should be added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment])) } def attachments end @@ -129,6 +149,12 @@ module OpenAI def attachments=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -181,6 +207,7 @@ module OpenAI def to_hash end + # The text contents of the message. class Content < OpenAI::Union abstract! @@ -195,6 +222,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -218,6 +246,12 @@ module OpenAI end end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. class Role < OpenAI::Enum abstract! @@ -232,6 +266,7 @@ module OpenAI end class Attachment < OpenAI::BaseModel + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } def file_id end @@ -240,6 +275,7 @@ module OpenAI def file_id=(_) end + # The tools to add this file to. sig do returns( T.nilable( @@ -312,6 +348,7 @@ module OpenAI abstract! class FileSearch < OpenAI::BaseModel + # The type of tool being defined: `file_search` sig { returns(Symbol) } def type end @@ -330,6 +367,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -366,6 +404,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, @@ -389,6 +431,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -407,6 +452,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end @@ -415,6 +464,10 @@ module OpenAI def vector_store_ids=(_) end + # A helper to create a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) @@ -453,6 +506,8 @@ module OpenAI end class VectorStore < OpenAI::BaseModel + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. sig do returns( T.nilable( @@ -483,6 +538,9 @@ module OpenAI def chunking_strategy=(_) end + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -491,6 +549,12 @@ module OpenAI def file_ids=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -529,10 +593,13 @@ module OpenAI def to_hash end + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. class ChunkingStrategy < OpenAI::Union abstract! class Auto < OpenAI::BaseModel + # Always `auto`. sig { returns(Symbol) } def type end @@ -541,6 +608,8 @@ module OpenAI def type=(_) end + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto) end @@ -570,6 +639,7 @@ module OpenAI def static=(_) end + # Always `static`. sig { returns(Symbol) } def type end @@ -601,6 +671,9 @@ module OpenAI end class Static < OpenAI::BaseModel + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } def chunk_overlap_tokens end @@ -609,6 +682,8 @@ module OpenAI def chunk_overlap_tokens=(_) end + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } def max_chunk_size_tokens end @@ -633,6 +708,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index df43a6d2..b7db3495 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class ThreadStreamEvent < OpenAI::BaseModel + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } def data end @@ -20,6 +22,7 @@ module OpenAI def event=(_) end + # Whether to enable input audio transcription. sig { returns(T.nilable(T::Boolean)) } def enabled end @@ -28,6 +31,9 @@ module OpenAI def enabled=(_) end + # Occurs when a new + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. sig { params(data: OpenAI::Models::Beta::Thread, enabled: T::Boolean, event: Symbol).returns(T.attached_class) } def self.new(data:, enabled: nil, event: :"thread.created") end diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index f95411f9..2748a8cc 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -7,6 +7,12 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -15,6 +21,10 @@ module OpenAI def metadata=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources)) } def tool_resources end @@ -73,6 +83,10 @@ module OpenAI def file_search=(_) end + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, @@ -96,6 +110,9 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -114,6 +131,10 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } def vector_store_ids end diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 6510f8df..9311bee4 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -4,10 +4,14 @@ module OpenAI module Models module Beta module Threads + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. class Annotation < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 81699c4b..fad66836 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -4,10 +4,14 @@ module OpenAI module Models module Beta module Threads + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. class AnnotationDelta < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index 631db0a5..75c0a985 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -32,6 +32,7 @@ module OpenAI def start_index=(_) end + # The text in the message content that needs to be replaced. sig { returns(String) } def text end @@ -40,6 +41,7 @@ module OpenAI def text=(_) end + # Always `file_citation`. sig { returns(Symbol) } def type end @@ -48,6 +50,9 @@ module OpenAI def type=(_) end + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. sig do params( end_index: Integer, @@ -77,6 +82,7 @@ module OpenAI end class FileCitation < OpenAI::BaseModel + # The ID of the specific File the citation is from. sig { returns(String) } def file_id end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index f2e75732..7031c979 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class FileCitationDeltaAnnotation < OpenAI::BaseModel + # The index of the annotation in the text content part. sig { returns(Integer) } def index end @@ -13,6 +14,7 @@ module OpenAI def index=(_) end + # Always `file_citation`. sig { returns(Symbol) } def type end @@ -48,6 +50,7 @@ module OpenAI def start_index=(_) end + # The text in the message content that needs to be replaced. sig { returns(T.nilable(String)) } def text end @@ -56,6 +59,9 @@ module OpenAI def text=(_) end + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. sig do params( index: Integer, @@ -87,6 +93,7 @@ module OpenAI end class FileCitation < OpenAI::BaseModel + # The ID of the specific File the citation is from. sig { returns(T.nilable(String)) } def file_id end @@ -95,6 +102,7 @@ module OpenAI def file_id=(_) end + # The specific quote in the file. sig { returns(T.nilable(String)) } def quote end diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index 0feca1b6..c5e902d9 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -32,6 +32,7 @@ module OpenAI def start_index=(_) end + # The text in the message content that needs to be replaced. sig { returns(String) } def text end @@ -40,6 +41,7 @@ module OpenAI def text=(_) end + # Always `file_path`. sig { returns(Symbol) } def type end @@ -48,6 +50,8 @@ module OpenAI def type=(_) end + # A URL for the file that's generated when the assistant used the + # `code_interpreter` tool to generate a file. sig do params( end_index: Integer, @@ -77,6 +81,7 @@ module OpenAI end class FilePath < OpenAI::BaseModel + # The ID of the file that was generated. sig { returns(String) } def file_id end diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index f12cbc1f..3da258f2 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class FilePathDeltaAnnotation < OpenAI::BaseModel + # The index of the annotation in the text content part. sig { returns(Integer) } def index end @@ -13,6 +14,7 @@ module OpenAI def index=(_) end + # Always `file_path`. sig { returns(Symbol) } def type end @@ -48,6 +50,7 @@ module OpenAI def start_index=(_) end + # The text in the message content that needs to be replaced. sig { returns(T.nilable(String)) } def text end @@ -56,6 +59,8 @@ module OpenAI def text=(_) end + # A URL for the file that's generated when the assistant used the + # `code_interpreter` tool to generate a file. sig do params( index: Integer, @@ -87,6 +92,7 @@ module OpenAI end class FilePath < OpenAI::BaseModel + # The ID of the file that was generated. sig { returns(T.nilable(String)) } def file_id end diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 5f75f9fc..dccaa783 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class ImageFile < OpenAI::BaseModel + # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. sig { returns(String) } def file_id end @@ -13,6 +16,8 @@ module OpenAI def file_id=(_) end + # Specifies the detail level of the image if specified by the user. `low` uses + # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(Symbol)) } def detail end @@ -29,6 +34,8 @@ module OpenAI def to_hash end + # Specifies the detail level of the image if specified by the user. `low` uses + # fewer tokens, you can opt in to high resolution using `high`. class Detail < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index ebd1e9c5..281b7cb7 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -13,6 +13,7 @@ module OpenAI def image_file=(_) end + # Always `image_file`. sig { returns(Symbol) } def type end @@ -21,6 +22,8 @@ module OpenAI def type=(_) end + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. sig { params(image_file: OpenAI::Models::Beta::Threads::ImageFile, type: Symbol).returns(T.attached_class) } def self.new(image_file:, type: :image_file) end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 2f6a3435..faa92642 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -5,6 +5,8 @@ module OpenAI module Beta module Threads class ImageFileDelta < OpenAI::BaseModel + # Specifies the detail level of the image if specified by the user. `low` uses + # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(Symbol)) } def detail end @@ -13,6 +15,9 @@ module OpenAI def detail=(_) end + # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. sig { returns(T.nilable(String)) } def file_id end @@ -29,6 +34,8 @@ module OpenAI def to_hash end + # Specifies the detail level of the image if specified by the user. `low` uses + # fewer tokens, you can opt in to high resolution using `high`. class Detail < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index dd93fbb6..9dba68e1 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class ImageFileDeltaBlock < OpenAI::BaseModel + # The index of the content part in the message. sig { returns(Integer) } def index end @@ -13,6 +14,7 @@ module OpenAI def index=(_) end + # Always `image_file`. sig { returns(Symbol) } def type end @@ -32,6 +34,8 @@ module OpenAI def image_file=(_) end + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. sig do params(index: Integer, image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 512367f1..7baa0ea2 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -5,6 +5,8 @@ module OpenAI module Beta module Threads class ImageURL < OpenAI::BaseModel + # The external URL of the image, must be a supported image types: jpeg, jpg, png, + # gif, webp. sig { returns(String) } def url end @@ -13,6 +15,8 @@ module OpenAI def url=(_) end + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # to high resolution using `high`. Default value is `auto` sig { returns(T.nilable(Symbol)) } def detail end @@ -29,6 +33,8 @@ module OpenAI def to_hash end + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # to high resolution using `high`. Default value is `auto` class Detail < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index 13a1daf7..a6c65020 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -13,6 +13,7 @@ module OpenAI def image_url=(_) end + # The type of the content part. sig { returns(Symbol) } def type end @@ -21,6 +22,7 @@ module OpenAI def type=(_) end + # References an image URL in the content of a message. sig { params(image_url: OpenAI::Models::Beta::Threads::ImageURL, type: Symbol).returns(T.attached_class) } def self.new(image_url:, type: :image_url) end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index b5ffce82..59edca5a 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -5,6 +5,8 @@ module OpenAI module Beta module Threads class ImageURLDelta < OpenAI::BaseModel + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # to high resolution using `high`. sig { returns(T.nilable(Symbol)) } def detail end @@ -13,6 +15,8 @@ module OpenAI def detail=(_) end + # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, + # webp. sig { returns(T.nilable(String)) } def url end @@ -29,6 +33,8 @@ module OpenAI def to_hash end + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # to high resolution using `high`. class Detail < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index 38635411..9f8e8803 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class ImageURLDeltaBlock < OpenAI::BaseModel + # The index of the content part in the message. sig { returns(Integer) } def index end @@ -13,6 +14,7 @@ module OpenAI def index=(_) end + # Always `image_url`. sig { returns(Symbol) } def type end @@ -32,6 +34,7 @@ module OpenAI def image_url=(_) end + # References an image URL in the content of a message. sig do params(index: Integer, image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 3f40f84e..f27ae4ab 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class Message < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -13,6 +14,9 @@ module OpenAI def id=(_) end + # If applicable, the ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) that + # authored this message. sig { returns(T.nilable(String)) } def assistant_id end @@ -21,6 +25,7 @@ module OpenAI def assistant_id=(_) end + # A list of files attached to the message, and the tools they were added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment])) } def attachments end @@ -32,6 +37,7 @@ module OpenAI def attachments=(_) end + # The Unix timestamp (in seconds) for when the message was completed. sig { returns(T.nilable(Integer)) } def completed_at end @@ -40,6 +46,7 @@ module OpenAI def completed_at=(_) end + # The content of the message in array of text and/or images. sig do returns( T::Array[ @@ -80,6 +87,7 @@ module OpenAI def content=(_) end + # The Unix timestamp (in seconds) for when the message was created. sig { returns(Integer) } def created_at end @@ -88,6 +96,7 @@ module OpenAI def created_at=(_) end + # The Unix timestamp (in seconds) for when the message was marked as incomplete. sig { returns(T.nilable(Integer)) } def incomplete_at end @@ -96,6 +105,7 @@ module OpenAI def incomplete_at=(_) end + # On an incomplete message, details about why the message is incomplete. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails)) } def incomplete_details end @@ -107,6 +117,12 @@ module OpenAI def incomplete_details=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -115,6 +131,7 @@ module OpenAI def metadata=(_) end + # The object type, which is always `thread.message`. sig { returns(Symbol) } def object end @@ -123,6 +140,7 @@ module OpenAI def object=(_) end + # The entity that produced the message. One of `user` or `assistant`. sig { returns(Symbol) } def role end @@ -131,6 +149,9 @@ module OpenAI def role=(_) end + # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) + # associated with the creation of this message. Value is `null` when messages are + # created manually using the create message or create thread endpoints. sig { returns(T.nilable(String)) } def run_id end @@ -139,6 +160,8 @@ module OpenAI def run_id=(_) end + # The status of the message, which can be either `in_progress`, `incomplete`, or + # `completed`. sig { returns(Symbol) } def status end @@ -147,6 +170,8 @@ module OpenAI def status=(_) end + # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that + # this message belongs to. sig { returns(String) } def thread_id end @@ -155,6 +180,8 @@ module OpenAI def thread_id=(_) end + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig do params( id: String, @@ -231,6 +258,7 @@ module OpenAI end class Attachment < OpenAI::BaseModel + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } def file_id end @@ -239,6 +267,7 @@ module OpenAI def file_id=(_) end + # The tools to add this file to. sig do returns( T.nilable( @@ -311,6 +340,7 @@ module OpenAI abstract! class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel + # The type of tool being defined: `file_search` sig { returns(Symbol) } def type end @@ -329,6 +359,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -342,6 +373,7 @@ module OpenAI end class IncompleteDetails < OpenAI::BaseModel + # The reason the message is incomplete. sig { returns(Symbol) } def reason end @@ -350,6 +382,7 @@ module OpenAI def reason=(_) end + # On an incomplete message, details about why the message is incomplete. sig { params(reason: Symbol).returns(T.attached_class) } def self.new(reason:) end @@ -358,6 +391,7 @@ module OpenAI def to_hash end + # The reason the message is incomplete. class Reason < OpenAI::Enum abstract! @@ -375,6 +409,7 @@ module OpenAI end end + # The entity that produced the message. One of `user` or `assistant`. class Role < OpenAI::Enum abstract! @@ -388,6 +423,8 @@ module OpenAI end end + # The status of the message, which can be either `in_progress`, `incomplete`, or + # `completed`. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index b2972189..591b21e4 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -4,10 +4,13 @@ module OpenAI module Models module Beta module Threads + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. class MessageContent < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index fff1ee0b..a91d5b77 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -4,10 +4,13 @@ module OpenAI module Models module Beta module Threads + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. class MessageContentDelta < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index 82903174..65c8d43f 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -4,10 +4,13 @@ module OpenAI module Models module Beta module Threads + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. class MessageContentPartParam < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 4a1db9ed..8169f3b8 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -8,6 +8,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The text contents of the message. sig do returns( T.any( @@ -54,6 +55,12 @@ module OpenAI def content=(_) end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(Symbol) } def role end @@ -62,6 +69,7 @@ module OpenAI def role=(_) end + # A list of files attached to the message, and the tools they should be added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment])) } def attachments end @@ -73,6 +81,12 @@ module OpenAI def attachments=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -127,6 +141,7 @@ module OpenAI def to_hash end + # The text contents of the message. class Content < OpenAI::Union abstract! @@ -141,6 +156,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -164,6 +180,12 @@ module OpenAI end end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. class Role < OpenAI::Enum abstract! @@ -178,6 +200,7 @@ module OpenAI end class Attachment < OpenAI::BaseModel + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } def file_id end @@ -186,6 +209,7 @@ module OpenAI def file_id=(_) end + # The tools to add this file to. sig do returns( T.nilable( @@ -258,6 +282,7 @@ module OpenAI abstract! class FileSearch < OpenAI::BaseModel + # The type of tool being defined: `file_search` sig { returns(Symbol) } def type end @@ -276,6 +301,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 4830ec92..f85da37a 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class MessageDelta < OpenAI::BaseModel + # The content of the message in array of text and/or images. sig do returns( T.nilable( @@ -47,6 +48,7 @@ module OpenAI def content=(_) end + # The entity that produced the message. One of `user` or `assistant`. sig { returns(T.nilable(Symbol)) } def role end @@ -55,6 +57,7 @@ module OpenAI def role=(_) end + # The delta containing the fields that have changed on the Message. sig do params( content: T::Array[ @@ -91,6 +94,7 @@ module OpenAI def to_hash end + # The entity that produced the message. One of `user` or `assistant`. class Role < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index 7908606f..5df8b179 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class MessageDeltaEvent < OpenAI::BaseModel + # The identifier of the message, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -13,6 +14,7 @@ module OpenAI def id=(_) end + # The delta containing the fields that have changed on the Message. sig { returns(OpenAI::Models::Beta::Threads::MessageDelta) } def delta end @@ -23,6 +25,7 @@ module OpenAI def delta=(_) end + # The object type, which is always `thread.message.delta`. sig { returns(Symbol) } def object end @@ -31,6 +34,8 @@ module OpenAI def object=(_) end + # Represents a message delta i.e. any changed fields on a message during + # streaming. sig do params(id: String, delta: OpenAI::Models::Beta::Threads::MessageDelta, object: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index d2cb9550..2745c825 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -8,6 +8,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -16,6 +20,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -24,6 +32,8 @@ module OpenAI def before=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -32,6 +42,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -40,6 +52,7 @@ module OpenAI def order=(_) end + # Filter messages by the run ID that generated them. sig { returns(T.nilable(String)) } def run_id end @@ -78,6 +91,8 @@ module OpenAI def to_hash end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 4fe12d9d..f2d3e73a 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -16,6 +16,12 @@ module OpenAI def thread_id=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end diff --git a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi index 2451c015..c2179549 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi @@ -13,6 +13,7 @@ module OpenAI def refusal=(_) end + # Always `refusal`. sig { returns(Symbol) } def type end @@ -21,6 +22,7 @@ module OpenAI def type=(_) end + # The refusal content generated by the assistant. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } def self.new(refusal:, type: :refusal) end diff --git a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi index c6f9732d..87dd9957 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class RefusalDeltaBlock < OpenAI::BaseModel + # The index of the refusal part in the message. sig { returns(Integer) } def index end @@ -13,6 +14,7 @@ module OpenAI def index=(_) end + # Always `refusal`. sig { returns(Symbol) } def type end @@ -29,6 +31,7 @@ module OpenAI def refusal=(_) end + # The refusal content that is part of a message. sig { params(index: Integer, refusal: String, type: Symbol).returns(T.attached_class) } def self.new(index:, refusal: nil, type: :refusal) end diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index 2627ee3f..ef418354 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -5,6 +5,10 @@ module OpenAI module Beta module Threads class RequiredActionFunctionToolCall < OpenAI::BaseModel + # The ID of the tool call. This ID must be referenced when you submit the tool + # outputs in using the + # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # endpoint. sig { returns(String) } def id end @@ -13,6 +17,7 @@ module OpenAI def id=(_) end + # The function definition. sig { returns(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function) } def function end @@ -24,6 +29,8 @@ module OpenAI def function=(_) end + # The type of tool call the output is required for. For now, this is always + # `function`. sig { returns(Symbol) } def type end @@ -32,6 +39,7 @@ module OpenAI def type=(_) end + # Tool call objects sig do params( id: String, @@ -53,6 +61,7 @@ module OpenAI end class Function < OpenAI::BaseModel + # The arguments that the model expects you to pass to the function. sig { returns(String) } def arguments end @@ -61,6 +70,7 @@ module OpenAI def arguments=(_) end + # The name of the function. sig { returns(String) } def name end @@ -69,6 +79,7 @@ module OpenAI def name=(_) end + # The function definition. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments:, name:) end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 61ddea55..6e74d579 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class Run < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -13,6 +14,9 @@ module OpenAI def id=(_) end + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # execution of this run. sig { returns(String) } def assistant_id end @@ -21,6 +25,7 @@ module OpenAI def assistant_id=(_) end + # The Unix timestamp (in seconds) for when the run was cancelled. sig { returns(T.nilable(Integer)) } def cancelled_at end @@ -29,6 +34,7 @@ module OpenAI def cancelled_at=(_) end + # The Unix timestamp (in seconds) for when the run was completed. sig { returns(T.nilable(Integer)) } def completed_at end @@ -37,6 +43,7 @@ module OpenAI def completed_at=(_) end + # The Unix timestamp (in seconds) for when the run was created. sig { returns(Integer) } def created_at end @@ -45,6 +52,7 @@ module OpenAI def created_at=(_) end + # The Unix timestamp (in seconds) for when the run will expire. sig { returns(T.nilable(Integer)) } def expires_at end @@ -53,6 +61,7 @@ module OpenAI def expires_at=(_) end + # The Unix timestamp (in seconds) for when the run failed. sig { returns(T.nilable(Integer)) } def failed_at end @@ -61,6 +70,8 @@ module OpenAI def failed_at=(_) end + # Details on why the run is incomplete. Will be `null` if the run is not + # incomplete. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails)) } def incomplete_details end @@ -72,6 +83,9 @@ module OpenAI def incomplete_details=(_) end + # The instructions that the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. sig { returns(String) } def instructions end @@ -80,6 +94,7 @@ module OpenAI def instructions=(_) end + # The last error associated with this run. Will be `null` if there are no errors. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::LastError)) } def last_error end @@ -91,6 +106,8 @@ module OpenAI def last_error=(_) end + # The maximum number of completion tokens specified to have been used over the + # course of the run. sig { returns(T.nilable(Integer)) } def max_completion_tokens end @@ -99,6 +116,8 @@ module OpenAI def max_completion_tokens=(_) end + # The maximum number of prompt tokens specified to have been used over the course + # of the run. sig { returns(T.nilable(Integer)) } def max_prompt_tokens end @@ -107,6 +126,12 @@ module OpenAI def max_prompt_tokens=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -115,6 +140,9 @@ module OpenAI def metadata=(_) end + # The model that the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. sig { returns(String) } def model end @@ -123,6 +151,7 @@ module OpenAI def model=(_) end + # The object type, which is always `thread.run`. sig { returns(Symbol) } def object end @@ -131,6 +160,9 @@ module OpenAI def object=(_) end + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T::Boolean) } def parallel_tool_calls end @@ -139,6 +171,8 @@ module OpenAI def parallel_tool_calls=(_) end + # Details on the action required to continue the run. Will be `null` if no action + # is required. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction)) } def required_action end @@ -150,6 +184,26 @@ module OpenAI def required_action=(_) end + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -190,6 +244,7 @@ module OpenAI def response_format=(_) end + # The Unix timestamp (in seconds) for when the run was started. sig { returns(T.nilable(Integer)) } def started_at end @@ -198,6 +253,9 @@ module OpenAI def started_at=(_) end + # The status of the run, which can be either `queued`, `in_progress`, + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. sig { returns(Symbol) } def status end @@ -206,6 +264,8 @@ module OpenAI def status=(_) end + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # that was executed on as a part of this run. sig { returns(String) } def thread_id end @@ -214,6 +274,13 @@ module OpenAI def thread_id=(_) end + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } def tool_choice end @@ -225,6 +292,9 @@ module OpenAI def tool_choice=(_) end + # The list of tools that the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. sig do returns( T::Array[ @@ -262,6 +332,8 @@ module OpenAI def tools=(_) end + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy)) } def truncation_strategy end @@ -273,6 +345,8 @@ module OpenAI def truncation_strategy=(_) end + # Usage statistics related to the run. This value will be `null` if the run is not + # in a terminal state (i.e. `in_progress`, `queued`, etc.). sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) } def usage end @@ -284,6 +358,7 @@ module OpenAI def usage=(_) end + # The sampling temperature used for this run. If not set, defaults to 1. sig { returns(T.nilable(Float)) } def temperature end @@ -292,6 +367,7 @@ module OpenAI def temperature=(_) end + # The nucleus sampling value used for this run. If not set, defaults to 1. sig { returns(T.nilable(Float)) } def top_p end @@ -300,6 +376,8 @@ module OpenAI def top_p=(_) end + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). sig do params( id: String, @@ -427,6 +505,8 @@ module OpenAI end class IncompleteDetails < OpenAI::BaseModel + # The reason why the run is incomplete. This will point to which specific token + # limit was reached over the course of the run. sig { returns(T.nilable(Symbol)) } def reason end @@ -435,6 +515,8 @@ module OpenAI def reason=(_) end + # Details on why the run is incomplete. Will be `null` if the run is not + # incomplete. sig { params(reason: Symbol).returns(T.attached_class) } def self.new(reason: nil) end @@ -443,6 +525,8 @@ module OpenAI def to_hash end + # The reason why the run is incomplete. This will point to which specific token + # limit was reached over the course of the run. class Reason < OpenAI::Enum abstract! @@ -458,6 +542,7 @@ module OpenAI end class LastError < OpenAI::BaseModel + # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. sig { returns(Symbol) } def code end @@ -466,6 +551,7 @@ module OpenAI def code=(_) end + # A human-readable description of the error. sig { returns(String) } def message end @@ -474,6 +560,7 @@ module OpenAI def message=(_) end + # The last error associated with this run. Will be `null` if there are no errors. sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end @@ -482,6 +569,7 @@ module OpenAI def to_hash end + # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. class Code < OpenAI::Enum abstract! @@ -498,6 +586,7 @@ module OpenAI end class RequiredAction < OpenAI::BaseModel + # Details on the tool outputs needed for this run to continue. sig { returns(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs) } def submit_tool_outputs end @@ -509,6 +598,7 @@ module OpenAI def submit_tool_outputs=(_) end + # For now, this is always `submit_tool_outputs`. sig { returns(Symbol) } def type end @@ -517,6 +607,8 @@ module OpenAI def type=(_) end + # Details on the action required to continue the run. Will be `null` if no action + # is required. sig do params( submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, @@ -537,6 +629,7 @@ module OpenAI end class SubmitToolOutputs < OpenAI::BaseModel + # A list of the relevant tool calls. sig { returns(T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) } def tool_calls end @@ -548,6 +641,7 @@ module OpenAI def tool_calls=(_) end + # Details on the tool outputs needed for this run to continue. sig do params(tool_calls: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) .returns(T.attached_class) @@ -562,6 +656,10 @@ module OpenAI end class TruncationStrategy < OpenAI::BaseModel + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(Symbol) } def type end @@ -570,6 +668,8 @@ module OpenAI def type=(_) end + # The number of most recent messages from the thread when constructing the context + # for the run. sig { returns(T.nilable(Integer)) } def last_messages end @@ -578,6 +678,8 @@ module OpenAI def last_messages=(_) end + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } def self.new(type:, last_messages: nil) end @@ -586,6 +688,10 @@ module OpenAI def to_hash end + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. class Type < OpenAI::Enum abstract! @@ -601,6 +707,7 @@ module OpenAI end class Usage < OpenAI::BaseModel + # Number of completion tokens used over the course of the run. sig { returns(Integer) } def completion_tokens end @@ -609,6 +716,7 @@ module OpenAI def completion_tokens=(_) end + # Number of prompt tokens used over the course of the run. sig { returns(Integer) } def prompt_tokens end @@ -617,6 +725,7 @@ module OpenAI def prompt_tokens=(_) end + # Total number of tokens used (prompt + completion). sig { returns(Integer) } def total_tokens end @@ -625,6 +734,8 @@ module OpenAI def total_tokens=(_) end + # Usage statistics related to the run. This value will be `null` if the run is not + # in a terminal state (i.e. `in_progress`, `queued`, etc.). sig do params( completion_tokens: Integer, diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 3c923be3..dbf8d42e 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. sig { returns(String) } def assistant_id end @@ -16,6 +19,13 @@ module OpenAI def assistant_id=(_) end + # A list of additional fields to include in the response. Currently the only + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(T::Array[Symbol])) } def include end @@ -24,6 +34,9 @@ module OpenAI def include=(_) end + # Appends additional instructions at the end of the instructions for the run. This + # is useful for modifying the behavior on a per-run basis without overriding other + # instructions. sig { returns(T.nilable(String)) } def additional_instructions end @@ -32,6 +45,7 @@ module OpenAI def additional_instructions=(_) end + # Adds additional messages to the thread before creating the run. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage])) } def additional_messages end @@ -43,6 +57,9 @@ module OpenAI def additional_messages=(_) end + # Overrides the + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. sig { returns(T.nilable(String)) } def instructions end @@ -51,6 +68,11 @@ module OpenAI def instructions=(_) end + # The maximum number of completion tokens that may be used over the course of the + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } def max_completion_tokens end @@ -59,6 +81,11 @@ module OpenAI def max_completion_tokens=(_) end + # The maximum number of prompt tokens that may be used over the course of the run. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } def max_prompt_tokens end @@ -67,6 +94,12 @@ module OpenAI def max_prompt_tokens=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -75,6 +108,10 @@ module OpenAI def metadata=(_) end + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -83,6 +120,9 @@ module OpenAI def model=(_) end + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T.nilable(T::Boolean)) } def parallel_tool_calls end @@ -91,6 +131,12 @@ module OpenAI def parallel_tool_calls=(_) end + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(Symbol)) } def reasoning_effort end @@ -99,6 +145,26 @@ module OpenAI def reasoning_effort=(_) end + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -139,6 +205,9 @@ module OpenAI def response_format=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } def temperature end @@ -147,6 +216,13 @@ module OpenAI def temperature=(_) end + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } def tool_choice end @@ -158,6 +234,8 @@ module OpenAI def tool_choice=(_) end + # Override the tools the assistant can use for this run. This is useful for + # modifying the behavior on a per-run basis. sig do returns( T.nilable( @@ -201,6 +279,11 @@ module OpenAI def tools=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -209,6 +292,8 @@ module OpenAI def top_p=(_) end + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy)) } def truncation_strategy end @@ -324,6 +409,7 @@ module OpenAI end class AdditionalMessage < OpenAI::BaseModel + # The text contents of the message. sig do returns( T.any( @@ -370,6 +456,12 @@ module OpenAI def content=(_) end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(Symbol) } def role end @@ -378,6 +470,7 @@ module OpenAI def role=(_) end + # A list of files attached to the message, and the tools they should be added to. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]) @@ -397,6 +490,12 @@ module OpenAI def attachments=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -449,6 +548,7 @@ module OpenAI def to_hash end + # The text contents of the message. class Content < OpenAI::Union abstract! @@ -463,6 +563,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -486,6 +587,12 @@ module OpenAI end end + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. class Role < OpenAI::Enum abstract! @@ -500,6 +607,7 @@ module OpenAI end class Attachment < OpenAI::BaseModel + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } def file_id end @@ -508,6 +616,7 @@ module OpenAI def file_id=(_) end + # The tools to add this file to. sig do returns( T.nilable( @@ -580,6 +689,7 @@ module OpenAI abstract! class FileSearch < OpenAI::BaseModel + # The type of tool being defined: `file_search` sig { returns(Symbol) } def type end @@ -598,6 +708,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -611,10 +722,15 @@ module OpenAI end end + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end @@ -622,6 +738,10 @@ module OpenAI end class TruncationStrategy < OpenAI::BaseModel + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(Symbol) } def type end @@ -630,6 +750,8 @@ module OpenAI def type=(_) end + # The number of most recent messages from the thread when constructing the context + # for the run. sig { returns(T.nilable(Integer)) } def last_messages end @@ -638,6 +760,8 @@ module OpenAI def last_messages=(_) end + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } def self.new(type:, last_messages: nil) end @@ -646,6 +770,10 @@ module OpenAI def to_hash end + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index cffb671d..ce7fa02c 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -8,6 +8,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -16,6 +20,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -24,6 +32,8 @@ module OpenAI def before=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -32,6 +42,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -68,6 +80,8 @@ module OpenAI def to_hash end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index e890d711..ea69e05c 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -4,6 +4,9 @@ module OpenAI module Models module Beta module Threads + # The status of the run, which can be either `queued`, `in_progress`, + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. class RunStatus < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 42c12d0a..0a39a132 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -16,6 +16,7 @@ module OpenAI def thread_id=(_) end + # A list of tools for which the outputs are being submitted. sig { returns(T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput]) } def tool_outputs end @@ -52,6 +53,7 @@ module OpenAI end class ToolOutput < OpenAI::BaseModel + # The output of the tool call to be submitted to continue the run. sig { returns(T.nilable(String)) } def output end @@ -60,6 +62,8 @@ module OpenAI def output=(_) end + # The ID of the tool call in the `required_action` object within the run object + # the output is being submitted for. sig { returns(T.nilable(String)) } def tool_call_id end diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 0ede0dab..d01aa52d 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -16,6 +16,12 @@ module OpenAI def thread_id=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi index 53f5040c..a6c26d19 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class CodeInterpreterLogs < OpenAI::BaseModel + # The index of the output in the outputs array. sig { returns(Integer) } def index end @@ -14,6 +15,7 @@ module OpenAI def index=(_) end + # Always `logs`. sig { returns(Symbol) } def type end @@ -22,6 +24,7 @@ module OpenAI def type=(_) end + # The text output from the Code Interpreter tool call. sig { returns(T.nilable(String)) } def logs end @@ -30,6 +33,7 @@ module OpenAI def logs=(_) end + # Text output from the Code Interpreter tool call as part of a run step. sig { params(index: Integer, logs: String, type: Symbol).returns(T.attached_class) } def self.new(index:, logs: nil, type: :logs) end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index e3a5d1f7..d598eb87 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class CodeInterpreterOutputImage < OpenAI::BaseModel + # The index of the output in the outputs array. sig { returns(Integer) } def index end @@ -14,6 +15,7 @@ module OpenAI def index=(_) end + # Always `image`. sig { returns(Symbol) } def type end @@ -54,6 +56,8 @@ module OpenAI end class Image < OpenAI::BaseModel + # The [file](https://platform.openai.com/docs/api-reference/files) ID of the + # image. sig { returns(T.nilable(String)) } def file_id end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index fb36dea2..373b9a5a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class CodeInterpreterToolCall < OpenAI::BaseModel + # The ID of the tool call. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The Code Interpreter tool call definition. sig { returns(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter) } def code_interpreter end @@ -25,6 +27,8 @@ module OpenAI def code_interpreter=(_) end + # The type of tool call. This is always going to be `code_interpreter` for this + # type of tool call. sig { returns(Symbol) } def type end @@ -33,6 +37,7 @@ module OpenAI def type=(_) end + # Details of the Code Interpreter tool call the run step was involved in. sig do params( id: String, @@ -58,6 +63,7 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # The input to the Code Interpreter tool call. sig { returns(String) } def input end @@ -66,6 +72,9 @@ module OpenAI def input=(_) end + # The outputs from the Code Interpreter tool call. Code Interpreter can output one + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. sig do returns( T::Array[ @@ -100,6 +109,7 @@ module OpenAI def outputs=(_) end + # The Code Interpreter tool call definition. sig do params( input: String, @@ -132,10 +142,12 @@ module OpenAI def to_hash end + # Text output from the Code Interpreter tool call as part of a run step. class Output < OpenAI::Union abstract! class Logs < OpenAI::BaseModel + # The text output from the Code Interpreter tool call. sig { returns(String) } def logs end @@ -144,6 +156,7 @@ module OpenAI def logs=(_) end + # Always `logs`. sig { returns(Symbol) } def type end @@ -152,6 +165,7 @@ module OpenAI def type=(_) end + # Text output from the Code Interpreter tool call as part of a run step. sig { params(logs: String, type: Symbol).returns(T.attached_class) } def self.new(logs:, type: :logs) end @@ -181,6 +195,7 @@ module OpenAI def image=(_) end + # Always `image`. sig { returns(Symbol) } def type end @@ -212,6 +227,8 @@ module OpenAI end class Image < OpenAI::BaseModel + # The [file](https://platform.openai.com/docs/api-reference/files) ID of the + # image. sig { returns(String) } def file_id end @@ -231,6 +248,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 763d9012..e355df3e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class CodeInterpreterToolCallDelta < OpenAI::BaseModel + # The index of the tool call in the tool calls array. sig { returns(Integer) } def index end @@ -14,6 +15,8 @@ module OpenAI def index=(_) end + # The type of tool call. This is always going to be `code_interpreter` for this + # type of tool call. sig { returns(Symbol) } def type end @@ -22,6 +25,7 @@ module OpenAI def type=(_) end + # The ID of the tool call. sig { returns(T.nilable(String)) } def id end @@ -30,6 +34,7 @@ module OpenAI def id=(_) end + # The Code Interpreter tool call definition. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter)) } def code_interpreter end @@ -41,6 +46,7 @@ module OpenAI def code_interpreter=(_) end + # Details of the Code Interpreter tool call the run step was involved in. sig do params( index: Integer, @@ -68,6 +74,7 @@ module OpenAI end class CodeInterpreter < OpenAI::BaseModel + # The input to the Code Interpreter tool call. sig { returns(T.nilable(String)) } def input end @@ -76,6 +83,9 @@ module OpenAI def input=(_) end + # The outputs from the Code Interpreter tool call. Code Interpreter can output one + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. sig do returns( T.nilable( @@ -112,6 +122,7 @@ module OpenAI def outputs=(_) end + # The Code Interpreter tool call definition. sig do params( input: String, @@ -144,10 +155,12 @@ module OpenAI def to_hash end + # Text output from the Code Interpreter tool call as part of a run step. class Output < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 9aaccc68..af309515 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class FileSearchToolCall < OpenAI::BaseModel + # The ID of the tool call object. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # For now, this is always going to be an empty object. sig { returns(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch) } def file_search end @@ -25,6 +27,8 @@ module OpenAI def file_search=(_) end + # The type of tool call. This is always going to be `file_search` for this type of + # tool call. sig { returns(Symbol) } def type end @@ -54,6 +58,7 @@ module OpenAI end class FileSearch < OpenAI::BaseModel + # The ranking options for the file search. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions)) } def ranking_options end @@ -65,6 +70,7 @@ module OpenAI def ranking_options=(_) end + # The results of the file search. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result])) } def results end @@ -76,6 +82,7 @@ module OpenAI def results=(_) end + # For now, this is always going to be an empty object. sig do params( ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, @@ -99,6 +106,8 @@ module OpenAI end class RankingOptions < OpenAI::BaseModel + # The ranker to use for the file search. If not specified will use the `auto` + # ranker. sig { returns(Symbol) } def ranker end @@ -107,6 +116,8 @@ module OpenAI def ranker=(_) end + # The score threshold for the file search. All values must be a floating point + # number between 0 and 1. sig { returns(Float) } def score_threshold end @@ -115,6 +126,7 @@ module OpenAI def score_threshold=(_) end + # The ranking options for the file search. sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } def self.new(ranker:, score_threshold:) end @@ -123,6 +135,8 @@ module OpenAI def to_hash end + # The ranker to use for the file search. If not specified will use the `auto` + # ranker. class Ranker < OpenAI::Enum abstract! @@ -138,6 +152,7 @@ module OpenAI end class Result < OpenAI::BaseModel + # The ID of the file that result was found in. sig { returns(String) } def file_id end @@ -146,6 +161,7 @@ module OpenAI def file_id=(_) end + # The name of the file that result was found in. sig { returns(String) } def file_name end @@ -154,6 +170,8 @@ module OpenAI def file_name=(_) end + # The score of the result. All values must be a floating point number between 0 + # and 1. sig { returns(Float) } def score end @@ -162,6 +180,8 @@ module OpenAI def score=(_) end + # The content of the result that was found. The content is only included if + # requested via the include query parameter. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) @@ -177,6 +197,7 @@ module OpenAI def content=(_) end + # A result instance of the file search. sig do params( file_id: String, @@ -204,6 +225,7 @@ module OpenAI end class Content < OpenAI::BaseModel + # The text content of the file. sig { returns(T.nilable(String)) } def text end @@ -212,6 +234,7 @@ module OpenAI def text=(_) end + # The type of the content. sig { returns(T.nilable(Symbol)) } def type end @@ -228,6 +251,7 @@ module OpenAI def to_hash end + # The type of the content. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 82d07489..4986a86f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class FileSearchToolCallDelta < OpenAI::BaseModel + # For now, this is always going to be an empty object. sig { returns(T.anything) } def file_search end @@ -14,6 +15,7 @@ module OpenAI def file_search=(_) end + # The index of the tool call in the tool calls array. sig { returns(Integer) } def index end @@ -22,6 +24,8 @@ module OpenAI def index=(_) end + # The type of tool call. This is always going to be `file_search` for this type of + # tool call. sig { returns(Symbol) } def type end @@ -30,6 +34,7 @@ module OpenAI def type=(_) end + # The ID of the tool call object. sig { returns(T.nilable(String)) } def id end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 4f13f258..2fbc10a5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class FunctionToolCall < OpenAI::BaseModel + # The ID of the tool call object. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The definition of the function that was called. sig { returns(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function) } def function end @@ -25,6 +27,8 @@ module OpenAI def function=(_) end + # The type of tool call. This is always going to be `function` for this type of + # tool call. sig { returns(Symbol) } def type end @@ -54,6 +58,7 @@ module OpenAI end class Function < OpenAI::BaseModel + # The arguments passed to the function. sig { returns(String) } def arguments end @@ -62,6 +67,7 @@ module OpenAI def arguments=(_) end + # The name of the function. sig { returns(String) } def name end @@ -70,6 +76,9 @@ module OpenAI def name=(_) end + # The output of the function. This will be `null` if the outputs have not been + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. sig { returns(T.nilable(String)) } def output end @@ -78,6 +87,7 @@ module OpenAI def output=(_) end + # The definition of the function that was called. sig do params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index d5acf0b1..c026f468 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class FunctionToolCallDelta < OpenAI::BaseModel + # The index of the tool call in the tool calls array. sig { returns(Integer) } def index end @@ -14,6 +15,8 @@ module OpenAI def index=(_) end + # The type of tool call. This is always going to be `function` for this type of + # tool call. sig { returns(Symbol) } def type end @@ -22,6 +25,7 @@ module OpenAI def type=(_) end + # The ID of the tool call object. sig { returns(T.nilable(String)) } def id end @@ -30,6 +34,7 @@ module OpenAI def id=(_) end + # The definition of the function that was called. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function)) } def function end @@ -68,6 +73,7 @@ module OpenAI end class Function < OpenAI::BaseModel + # The arguments passed to the function. sig { returns(T.nilable(String)) } def arguments end @@ -76,6 +82,7 @@ module OpenAI def arguments=(_) end + # The name of the function. sig { returns(T.nilable(String)) } def name end @@ -84,6 +91,9 @@ module OpenAI def name=(_) end + # The output of the function. This will be `null` if the outputs have not been + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. sig { returns(T.nilable(String)) } def output end @@ -92,6 +102,7 @@ module OpenAI def output=(_) end + # The definition of the function that was called. sig do params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index 73222dd0..ba7932c9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -17,6 +17,7 @@ module OpenAI def message_creation=(_) end + # Always `message_creation`. sig { returns(Symbol) } def type end @@ -25,6 +26,7 @@ module OpenAI def type=(_) end + # Details of the message creation by the run step. sig do params( message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, @@ -48,6 +50,7 @@ module OpenAI end class MessageCreation < OpenAI::BaseModel + # The ID of the message that was created by this run step. sig { returns(String) } def message_id end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 2f08a34f..557b42f9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -8,6 +8,7 @@ module OpenAI module Runs class RunStep < OpenAI::BaseModel + # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -16,6 +17,9 @@ module OpenAI def id=(_) end + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) + # associated with the run step. sig { returns(String) } def assistant_id end @@ -24,6 +28,7 @@ module OpenAI def assistant_id=(_) end + # The Unix timestamp (in seconds) for when the run step was cancelled. sig { returns(T.nilable(Integer)) } def cancelled_at end @@ -32,6 +37,7 @@ module OpenAI def cancelled_at=(_) end + # The Unix timestamp (in seconds) for when the run step completed. sig { returns(T.nilable(Integer)) } def completed_at end @@ -40,6 +46,7 @@ module OpenAI def completed_at=(_) end + # The Unix timestamp (in seconds) for when the run step was created. sig { returns(Integer) } def created_at end @@ -48,6 +55,8 @@ module OpenAI def created_at=(_) end + # The Unix timestamp (in seconds) for when the run step expired. A step is + # considered expired if the parent run is expired. sig { returns(T.nilable(Integer)) } def expired_at end @@ -56,6 +65,7 @@ module OpenAI def expired_at=(_) end + # The Unix timestamp (in seconds) for when the run step failed. sig { returns(T.nilable(Integer)) } def failed_at end @@ -64,6 +74,8 @@ module OpenAI def failed_at=(_) end + # The last error associated with this run step. Will be `null` if there are no + # errors. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError)) } def last_error end @@ -75,6 +87,12 @@ module OpenAI def last_error=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -83,6 +101,7 @@ module OpenAI def metadata=(_) end + # The object type, which is always `thread.run.step`. sig { returns(Symbol) } def object end @@ -91,6 +110,8 @@ module OpenAI def object=(_) end + # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that + # this run step is a part of. sig { returns(String) } def run_id end @@ -99,6 +120,8 @@ module OpenAI def run_id=(_) end + # The status of the run step, which can be either `in_progress`, `cancelled`, + # `failed`, `completed`, or `expired`. sig { returns(Symbol) } def status end @@ -107,6 +130,7 @@ module OpenAI def status=(_) end + # The details of the run step. sig do returns( T.any( @@ -135,6 +159,8 @@ module OpenAI def step_details=(_) end + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # that was run. sig { returns(String) } def thread_id end @@ -143,6 +169,7 @@ module OpenAI def thread_id=(_) end + # The type of run step, which can be either `message_creation` or `tool_calls`. sig { returns(Symbol) } def type end @@ -151,6 +178,8 @@ module OpenAI def type=(_) end + # Usage statistics related to the run step. This value will be `null` while the + # run step's status is `in_progress`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage)) } def usage end @@ -162,6 +191,7 @@ module OpenAI def usage=(_) end + # Represents a step in execution of a run. sig do params( id: String, @@ -236,6 +266,7 @@ module OpenAI end class LastError < OpenAI::BaseModel + # One of `server_error` or `rate_limit_exceeded`. sig { returns(Symbol) } def code end @@ -244,6 +275,7 @@ module OpenAI def code=(_) end + # A human-readable description of the error. sig { returns(String) } def message end @@ -252,6 +284,8 @@ module OpenAI def message=(_) end + # The last error associated with this run step. Will be `null` if there are no + # errors. sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end @@ -260,6 +294,7 @@ module OpenAI def to_hash end + # One of `server_error` or `rate_limit_exceeded`. class Code < OpenAI::Enum abstract! @@ -274,6 +309,8 @@ module OpenAI end end + # The status of the run step, which can be either `in_progress`, `cancelled`, + # `failed`, `completed`, or `expired`. class Status < OpenAI::Enum abstract! @@ -290,10 +327,12 @@ module OpenAI end end + # The details of the run step. class StepDetails < OpenAI::Union abstract! class << self + # @api private sig do override .returns( @@ -305,6 +344,7 @@ module OpenAI end end + # The type of run step, which can be either `message_creation` or `tool_calls`. class Type < OpenAI::Enum abstract! @@ -319,6 +359,7 @@ module OpenAI end class Usage < OpenAI::BaseModel + # Number of completion tokens used over the course of the run step. sig { returns(Integer) } def completion_tokens end @@ -327,6 +368,7 @@ module OpenAI def completion_tokens=(_) end + # Number of prompt tokens used over the course of the run step. sig { returns(Integer) } def prompt_tokens end @@ -335,6 +377,7 @@ module OpenAI def prompt_tokens=(_) end + # Total number of tokens used (prompt + completion). sig { returns(Integer) } def total_tokens end @@ -343,6 +386,8 @@ module OpenAI def total_tokens=(_) end + # Usage statistics related to the run step. This value will be `null` while the + # run step's status is `in_progress`. sig do params( completion_tokens: Integer, diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 60805244..a5c04563 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -8,6 +8,7 @@ module OpenAI module Runs class RunStepDelta < OpenAI::BaseModel + # The details of the run step. sig do returns( T.nilable( @@ -38,6 +39,7 @@ module OpenAI def step_details=(_) end + # The delta containing the fields that have changed on the run step. sig do params( step_details: T.any( @@ -64,10 +66,12 @@ module OpenAI def to_hash end + # The details of the run step. class StepDetails < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index 73076f3a..7cc4a3fb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -8,6 +8,7 @@ module OpenAI module Runs class RunStepDeltaEvent < OpenAI::BaseModel + # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -16,6 +17,7 @@ module OpenAI def id=(_) end + # The delta containing the fields that have changed on the run step. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDelta) } def delta end @@ -27,6 +29,7 @@ module OpenAI def delta=(_) end + # The object type, which is always `thread.run.step.delta`. sig { returns(Symbol) } def object end @@ -35,6 +38,8 @@ module OpenAI def object=(_) end + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. sig do params(id: String, delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, object: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index e933c731..02216a5c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -8,6 +8,7 @@ module OpenAI module Runs class RunStepDeltaMessageDelta < OpenAI::BaseModel + # Always `message_creation`. sig { returns(Symbol) } def type end @@ -27,6 +28,7 @@ module OpenAI def message_creation=(_) end + # Details of the message creation by the run step. sig do params( message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, @@ -50,6 +52,7 @@ module OpenAI end class MessageCreation < OpenAI::BaseModel + # The ID of the message that was created by this run step. sig { returns(T.nilable(String)) } def message_id end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 56b21c91..81075759 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -17,6 +17,10 @@ module OpenAI def thread_id=(_) end + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -25,6 +29,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -33,6 +41,13 @@ module OpenAI def before=(_) end + # A list of additional fields to include in the response. Currently the only + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(T::Array[Symbol])) } def include end @@ -41,6 +56,8 @@ module OpenAI def include=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -49,6 +66,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -89,6 +108,8 @@ module OpenAI def to_hash end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 9778a156..71dc2e52 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -25,6 +25,13 @@ module OpenAI def run_id=(_) end + # A list of additional fields to include in the response. Currently the only + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(T::Array[Symbol])) } def include end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 5d68bb14..9e16226b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -5,10 +5,12 @@ module OpenAI module Beta module Threads module Runs + # Details of the Code Interpreter tool call the run step was involved in. class ToolCall < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index fc5420d6..9fae60be 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -5,10 +5,12 @@ module OpenAI module Beta module Threads module Runs + # Details of the Code Interpreter tool call the run step was involved in. class ToolCallDelta < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index f9cb13ff..3258b9d6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -6,6 +6,7 @@ module OpenAI module Threads module Runs class ToolCallDeltaObject < OpenAI::BaseModel + # Always `tool_calls`. sig { returns(Symbol) } def type end @@ -14,6 +15,9 @@ module OpenAI def type=(_) end + # An array of tool calls the run step was involved in. These can be associated + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. sig do returns( T.nilable( @@ -53,6 +57,7 @@ module OpenAI def tool_calls=(_) end + # Details of the tool call. sig do params( tool_calls: T::Array[ diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 4d2d6cf2..3ad8cc7f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class ToolCallsStepDetails < OpenAI::BaseModel + # An array of tool calls the run step was involved in. These can be associated + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. sig do returns( T::Array[ @@ -43,6 +46,7 @@ module OpenAI def tool_calls=(_) end + # Always `tool_calls`. sig { returns(Symbol) } def type end @@ -51,6 +55,7 @@ module OpenAI def type=(_) end + # Details of the tool call. sig do params( tool_calls: T::Array[ diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 97fe10b1..0ba8015c 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -39,6 +39,7 @@ module OpenAI def annotations=(_) end + # The data that makes up the text. sig { returns(String) } def value end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index d714859a..5a3f345f 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -13,6 +13,7 @@ module OpenAI def text=(_) end + # Always `text`. sig { returns(Symbol) } def type end @@ -21,6 +22,7 @@ module OpenAI def type=(_) end + # The text content that is part of a message. sig { params(text: OpenAI::Models::Beta::Threads::Text, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :text) end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi index dcfd074b..58764a6f 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class TextContentBlockParam < OpenAI::BaseModel + # Text content to be sent to the model sig { returns(String) } def text end @@ -13,6 +14,7 @@ module OpenAI def text=(_) end + # Always `text`. sig { returns(Symbol) } def type end @@ -21,6 +23,7 @@ module OpenAI def type=(_) end + # The text content that is part of a message. sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :text) end diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index 3ffea679..12996683 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -41,6 +41,7 @@ module OpenAI def annotations=(_) end + # The data that makes up the text. sig { returns(T.nilable(String)) } def value end diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index a3e06fc4..80145b24 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -5,6 +5,7 @@ module OpenAI module Beta module Threads class TextDeltaBlock < OpenAI::BaseModel + # The index of the content part in the message. sig { returns(Integer) } def index end @@ -13,6 +14,7 @@ module OpenAI def index=(_) end + # Always `text`. sig { returns(Symbol) } def type end @@ -29,6 +31,7 @@ module OpenAI def text=(_) end + # The text content that is part of a message. sig do params(index: Integer, text: OpenAI::Models::Beta::Threads::TextDelta, type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 85749208..361ee7ba 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletion < OpenAI::BaseModel + # A unique identifier for the chat completion. sig { returns(String) } def id end @@ -14,6 +15,8 @@ module OpenAI def id=(_) end + # A list of chat completion choices. Can be more than one if `n` is greater + # than 1. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice]) } def choices end @@ -25,6 +28,7 @@ module OpenAI def choices=(_) end + # The Unix timestamp (in seconds) of when the chat completion was created. sig { returns(Integer) } def created end @@ -33,6 +37,7 @@ module OpenAI def created=(_) end + # The model used for the chat completion. sig { returns(String) } def model end @@ -41,6 +46,7 @@ module OpenAI def model=(_) end + # The object type, which is always `chat.completion`. sig { returns(Symbol) } def object end @@ -49,6 +55,7 @@ module OpenAI def object=(_) end + # The service tier used for processing the request. sig { returns(T.nilable(Symbol)) } def service_tier end @@ -57,6 +64,10 @@ module OpenAI def service_tier=(_) end + # This fingerprint represents the backend configuration that the model runs with. + # + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } def system_fingerprint end @@ -65,6 +76,7 @@ module OpenAI def system_fingerprint=(_) end + # Usage statistics for the completion request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } def usage end @@ -73,6 +85,8 @@ module OpenAI def usage=(_) end + # Represents a chat completion response returned by model, based on the provided + # input. sig do params( id: String, @@ -117,6 +131,12 @@ module OpenAI end class Choice < OpenAI::BaseModel + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. sig { returns(Symbol) } def finish_reason end @@ -125,6 +145,7 @@ module OpenAI def finish_reason=(_) end + # The index of the choice in the list of choices. sig { returns(Integer) } def index end @@ -133,6 +154,7 @@ module OpenAI def index=(_) end + # Log probability information for the choice. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs)) } def logprobs end @@ -144,6 +166,7 @@ module OpenAI def logprobs=(_) end + # A chat completion message generated by the model. sig { returns(OpenAI::Models::Chat::ChatCompletionMessage) } def message end @@ -180,6 +203,12 @@ module OpenAI def to_hash end + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. class FinishReason < OpenAI::Enum abstract! @@ -197,6 +226,7 @@ module OpenAI end class Logprobs < OpenAI::BaseModel + # A list of message content tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } def content end @@ -208,6 +238,7 @@ module OpenAI def content=(_) end + # A list of message refusal tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } def refusal end @@ -219,6 +250,7 @@ module OpenAI def refusal=(_) end + # Log probability information for the choice. sig do params( content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), @@ -243,6 +275,7 @@ module OpenAI end end + # The service tier used for processing the request. class ServiceTier < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 10203986..a2c3cf1c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionAssistantMessageParam < OpenAI::BaseModel + # The role of the messages author, in this case `assistant`. sig { returns(Symbol) } def role end @@ -14,6 +15,8 @@ module OpenAI def role=(_) end + # Data about a previous audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio)) } def audio end @@ -25,6 +28,8 @@ module OpenAI def audio=(_) end + # The contents of the assistant message. Required unless `tool_calls` or + # `function_call` is specified. sig do returns( T.nilable( @@ -74,6 +79,8 @@ module OpenAI def content=(_) end + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall)) } def function_call end @@ -85,6 +92,8 @@ module OpenAI def function_call=(_) end + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } def name end @@ -93,6 +102,7 @@ module OpenAI def name=(_) end + # The refusal message by the assistant. sig { returns(T.nilable(String)) } def refusal end @@ -101,6 +111,7 @@ module OpenAI def refusal=(_) end + # The tool calls generated by the model, such as function calls. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall])) } def tool_calls end @@ -112,6 +123,7 @@ module OpenAI def tool_calls=(_) end + # Messages sent by the model in response to user messages. sig do params( audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio), @@ -165,6 +177,7 @@ module OpenAI end class Audio < OpenAI::BaseModel + # Unique identifier for a previous audio response from the model. sig { returns(String) } def id end @@ -173,6 +186,8 @@ module OpenAI def id=(_) end + # Data about a previous audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { params(id: String).returns(T.attached_class) } def self.new(id:) end @@ -182,6 +197,8 @@ module OpenAI end end + # The contents of the assistant message. Required unless `tool_calls` or + # `function_call` is specified. class Content < OpenAI::Union abstract! @@ -194,10 +211,13 @@ module OpenAI ] end + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). class ArrayOfContentPart < OpenAI::Union abstract! class << self + # @api private sig do override .returns( @@ -210,6 +230,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -233,6 +254,10 @@ module OpenAI end class FunctionCall < OpenAI::BaseModel + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(String) } def arguments end @@ -241,6 +266,7 @@ module OpenAI def arguments=(_) end + # The name of the function to call. sig { returns(String) } def name end @@ -249,6 +275,8 @@ module OpenAI def name=(_) end + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments:, name:) end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index f8431d64..489f4145 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionAudio < OpenAI::BaseModel + # Unique identifier for this audio response. sig { returns(String) } def id end @@ -14,6 +15,8 @@ module OpenAI def id=(_) end + # Base64 encoded audio bytes generated by the model, in the format specified in + # the request. sig { returns(String) } def data end @@ -22,6 +25,8 @@ module OpenAI def data=(_) end + # The Unix timestamp (in seconds) for when this audio response will no longer be + # accessible on the server for use in multi-turn conversations. sig { returns(Integer) } def expires_at end @@ -30,6 +35,7 @@ module OpenAI def expires_at=(_) end + # Transcript of the audio generated by the model. sig { returns(String) } def transcript end @@ -38,6 +44,9 @@ module OpenAI def transcript=(_) end + # If the audio output modality is requested, this object contains data about the + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig do params(id: String, data: String, expires_at: Integer, transcript: String).returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 0cbbdba5..a8c18d75 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionAudioParam < OpenAI::BaseModel + # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, + # or `pcm16`. sig { returns(Symbol) } def format_ end @@ -14,6 +16,8 @@ module OpenAI def format_=(_) end + # The voice the model uses to respond. Supported voices are `alloy`, `ash`, + # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. sig { returns(Symbol) } def voice end @@ -22,6 +26,9 @@ module OpenAI def voice=(_) end + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { params(format_: Symbol, voice: Symbol).returns(T.attached_class) } def self.new(format_:, voice:) end @@ -30,6 +37,8 @@ module OpenAI def to_hash end + # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, + # or `pcm16`. class Format < OpenAI::Enum abstract! @@ -46,6 +55,8 @@ module OpenAI end end + # The voice the model uses to respond. Supported voices are `alloy`, `ash`, + # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. class Voice < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 12f6d8b5..7dbd7a66 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionChunk < OpenAI::BaseModel + # A unique identifier for the chat completion. Each chunk has the same ID. sig { returns(String) } def id end @@ -14,6 +15,9 @@ module OpenAI def id=(_) end + # A list of chat completion choices. Can contain more than one elements if `n` is + # greater than 1. Can also be empty for the last chunk if you set + # `stream_options: {"include_usage": true}`. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice]) } def choices end @@ -25,6 +29,8 @@ module OpenAI def choices=(_) end + # The Unix timestamp (in seconds) of when the chat completion was created. Each + # chunk has the same timestamp. sig { returns(Integer) } def created end @@ -33,6 +39,7 @@ module OpenAI def created=(_) end + # The model to generate the completion. sig { returns(String) } def model end @@ -41,6 +48,7 @@ module OpenAI def model=(_) end + # The object type, which is always `chat.completion.chunk`. sig { returns(Symbol) } def object end @@ -49,6 +57,7 @@ module OpenAI def object=(_) end + # The service tier used for processing the request. sig { returns(T.nilable(Symbol)) } def service_tier end @@ -57,6 +66,9 @@ module OpenAI def service_tier=(_) end + # This fingerprint represents the backend configuration that the model runs with. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } def system_fingerprint end @@ -65,6 +77,10 @@ module OpenAI def system_fingerprint=(_) end + # An optional field that will only be present when you set + # `stream_options: {"include_usage": true}` in your request. When present, it + # contains a null value except for the last chunk which contains the token usage + # statistics for the entire request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } def usage end @@ -73,6 +89,9 @@ module OpenAI def usage=(_) end + # Represents a streamed chunk of a chat completion response returned by the model, + # based on the provided input. + # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). sig do params( id: String, @@ -117,6 +136,7 @@ module OpenAI end class Choice < OpenAI::BaseModel + # A chat completion delta generated by streamed model responses. sig { returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) } def delta end @@ -128,6 +148,12 @@ module OpenAI def delta=(_) end + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. sig { returns(T.nilable(Symbol)) } def finish_reason end @@ -136,6 +162,7 @@ module OpenAI def finish_reason=(_) end + # The index of the choice in the list of choices. sig { returns(Integer) } def index end @@ -144,6 +171,7 @@ module OpenAI def index=(_) end + # Log probability information for the choice. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs)) } def logprobs end @@ -182,6 +210,7 @@ module OpenAI end class Delta < OpenAI::BaseModel + # The contents of the chunk message. sig { returns(T.nilable(String)) } def content end @@ -190,6 +219,8 @@ module OpenAI def content=(_) end + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall)) } def function_call end @@ -201,6 +232,7 @@ module OpenAI def function_call=(_) end + # The refusal message generated by the model. sig { returns(T.nilable(String)) } def refusal end @@ -209,6 +241,7 @@ module OpenAI def refusal=(_) end + # The role of the author of this message. sig { returns(T.nilable(Symbol)) } def role end @@ -228,6 +261,7 @@ module OpenAI def tool_calls=(_) end + # A chat completion delta generated by streamed model responses. sig do params( content: T.nilable(String), @@ -257,6 +291,10 @@ module OpenAI end class FunctionCall < OpenAI::BaseModel + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(T.nilable(String)) } def arguments end @@ -265,6 +303,7 @@ module OpenAI def arguments=(_) end + # The name of the function to call. sig { returns(T.nilable(String)) } def name end @@ -273,6 +312,8 @@ module OpenAI def name=(_) end + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments: nil, name: nil) end @@ -282,6 +323,7 @@ module OpenAI end end + # The role of the author of this message. class Role < OpenAI::Enum abstract! @@ -307,6 +349,7 @@ module OpenAI def index=(_) end + # The ID of the tool call. sig { returns(T.nilable(String)) } def id end @@ -326,6 +369,7 @@ module OpenAI def function=(_) end + # The type of the tool. Currently, only `function` is supported. sig { returns(T.nilable(Symbol)) } def type end @@ -361,6 +405,10 @@ module OpenAI end class Function < OpenAI::BaseModel + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(T.nilable(String)) } def arguments end @@ -369,6 +417,7 @@ module OpenAI def arguments=(_) end + # The name of the function to call. sig { returns(T.nilable(String)) } def name end @@ -386,6 +435,7 @@ module OpenAI end end + # The type of the tool. Currently, only `function` is supported. class Type < OpenAI::Enum abstract! @@ -400,6 +450,12 @@ module OpenAI end end + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. class FinishReason < OpenAI::Enum abstract! @@ -417,6 +473,7 @@ module OpenAI end class Logprobs < OpenAI::BaseModel + # A list of message content tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } def content end @@ -428,6 +485,7 @@ module OpenAI def content=(_) end + # A list of message refusal tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } def refusal end @@ -439,6 +497,7 @@ module OpenAI def refusal=(_) end + # Log probability information for the choice. sig do params( content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), @@ -463,6 +522,7 @@ module OpenAI end end + # The service tier used for processing the request. class ServiceTier < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 02a241f0..8ee6a1f4 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -5,6 +5,8 @@ module OpenAI ChatCompletionContentPart = T.type_alias { Chat::ChatCompletionContentPart } module Chat + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). class ChatCompletionContentPart < OpenAI::Union abstract! @@ -20,6 +22,7 @@ module OpenAI def file=(_) end + # The type of the content part. Always `file`. sig { returns(Symbol) } def type end @@ -28,6 +31,8 @@ module OpenAI def type=(_) end + # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text + # generation. sig do params(file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, type: Symbol) .returns(T.attached_class) @@ -40,6 +45,8 @@ module OpenAI end class File < OpenAI::BaseModel + # The base64 encoded file data, used when passing the file to the model as a + # string. sig { returns(T.nilable(String)) } def file_data end @@ -48,6 +55,7 @@ module OpenAI def file_data=(_) end + # The ID of an uploaded file to use as input. sig { returns(T.nilable(String)) } def file_id end @@ -56,6 +64,7 @@ module OpenAI def file_id=(_) end + # The name of the file, used when passing the file to the model as a string. sig { returns(T.nilable(String)) } def file_name end @@ -75,6 +84,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index e74c6c80..4f78201d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -17,6 +17,7 @@ module OpenAI def image_url=(_) end + # The type of the content part. sig { returns(Symbol) } def type end @@ -25,6 +26,7 @@ module OpenAI def type=(_) end + # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). sig do params(image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, type: Symbol) .returns(T.attached_class) @@ -39,6 +41,7 @@ module OpenAI end class ImageURL < OpenAI::BaseModel + # Either a URL of the image or the base64 encoded image data. sig { returns(String) } def url end @@ -47,6 +50,8 @@ module OpenAI def url=(_) end + # Specifies the detail level of the image. Learn more in the + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). sig { returns(T.nilable(Symbol)) } def detail end @@ -63,6 +68,8 @@ module OpenAI def to_hash end + # Specifies the detail level of the image. Learn more in the + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). class Detail < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index bff9cc3b..52dfed23 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -17,6 +17,7 @@ module OpenAI def input_audio=(_) end + # The type of the content part. Always `input_audio`. sig { returns(Symbol) } def type end @@ -25,6 +26,7 @@ module OpenAI def type=(_) end + # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). sig do params(input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, type: Symbol) .returns(T.attached_class) @@ -42,6 +44,7 @@ module OpenAI end class InputAudio < OpenAI::BaseModel + # Base64 encoded audio data. sig { returns(String) } def data end @@ -50,6 +53,7 @@ module OpenAI def data=(_) end + # The format of the encoded audio data. Currently supports "wav" and "mp3". sig { returns(Symbol) } def format_ end @@ -66,6 +70,7 @@ module OpenAI def to_hash end + # The format of the encoded audio data. Currently supports "wav" and "mp3". class Format < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi index 263c9c0b..392ce584 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionContentPartRefusal < OpenAI::BaseModel + # The refusal message generated by the model. sig { returns(String) } def refusal end @@ -14,6 +15,7 @@ module OpenAI def refusal=(_) end + # The type of the content part. sig { returns(Symbol) } def type end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index 84a24c5d..1154a72d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionContentPartText < OpenAI::BaseModel + # The text content. sig { returns(String) } def text end @@ -14,6 +15,7 @@ module OpenAI def text=(_) end + # The type of the content part. sig { returns(Symbol) } def type end @@ -22,6 +24,8 @@ module OpenAI def type=(_) end + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :text) end diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi index 763165dc..72764c05 100644 --- a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionDeleted < OpenAI::BaseModel + # The ID of the chat completion that was deleted. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # Whether the chat completion was deleted. sig { returns(T::Boolean) } def deleted end @@ -22,6 +24,7 @@ module OpenAI def deleted=(_) end + # The type of object being deleted. sig { returns(Symbol) } def object end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index b7f2d9e7..5f435cb4 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel + # The contents of the developer message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } def content end @@ -17,6 +18,7 @@ module OpenAI def content=(_) end + # The role of the messages author, in this case `developer`. sig { returns(Symbol) } def role end @@ -25,6 +27,8 @@ module OpenAI def role=(_) end + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } def name end @@ -33,6 +37,9 @@ module OpenAI def name=(_) end + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. sig do params( content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), @@ -57,12 +64,14 @@ module OpenAI def to_hash end + # The contents of the developer message. class Content < OpenAI::Union abstract! ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self + # @api private sig do override .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index abf316d2..a17d3350 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionFunctionCallOption < OpenAI::BaseModel + # The name of the function to call. sig { returns(String) } def name end @@ -14,6 +15,8 @@ module OpenAI def name=(_) end + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. sig { params(name: String).returns(T.attached_class) } def self.new(name:) end diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi index 1cf6ef40..1c035a2e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionFunctionMessageParam < OpenAI::BaseModel + # The contents of the function message. sig { returns(T.nilable(String)) } def content end @@ -14,6 +15,7 @@ module OpenAI def content=(_) end + # The name of the function to call. sig { returns(String) } def name end @@ -22,6 +24,7 @@ module OpenAI def name=(_) end + # The role of the messages author, in this case `function`. sig { returns(Symbol) } def role end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 91a6344d..a54250d7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionMessage < OpenAI::BaseModel + # The contents of the message. sig { returns(T.nilable(String)) } def content end @@ -14,6 +15,7 @@ module OpenAI def content=(_) end + # The refusal message generated by the model. sig { returns(T.nilable(String)) } def refusal end @@ -22,6 +24,7 @@ module OpenAI def refusal=(_) end + # The role of the author of this message. sig { returns(Symbol) } def role end @@ -30,6 +33,8 @@ module OpenAI def role=(_) end + # Annotations for the message, when applicable, as when using the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation])) } def annotations end @@ -41,6 +46,9 @@ module OpenAI def annotations=(_) end + # If the audio output modality is requested, this object contains data about the + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) } def audio end @@ -52,6 +60,8 @@ module OpenAI def audio=(_) end + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall)) } def function_call end @@ -63,6 +73,7 @@ module OpenAI def function_call=(_) end + # The tool calls generated by the model, such as function calls. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall])) } def tool_calls end @@ -74,6 +85,7 @@ module OpenAI def tool_calls=(_) end + # A chat completion message generated by the model. sig do params( content: T.nilable(String), @@ -107,6 +119,7 @@ module OpenAI end class Annotation < OpenAI::BaseModel + # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } def type end @@ -115,6 +128,7 @@ module OpenAI def type=(_) end + # A URL citation when using web search. sig { returns(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation) } def url_citation end @@ -126,6 +140,7 @@ module OpenAI def url_citation=(_) end + # A URL citation when using web search. sig do params(url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, type: Symbol) .returns(T.attached_class) @@ -143,6 +158,7 @@ module OpenAI end class URLCitation < OpenAI::BaseModel + # The index of the last character of the URL citation in the message. sig { returns(Integer) } def end_index end @@ -151,6 +167,7 @@ module OpenAI def end_index=(_) end + # The index of the first character of the URL citation in the message. sig { returns(Integer) } def start_index end @@ -159,6 +176,7 @@ module OpenAI def start_index=(_) end + # The title of the web resource. sig { returns(String) } def title end @@ -167,6 +185,7 @@ module OpenAI def title=(_) end + # The URL of the web resource. sig { returns(String) } def url end @@ -175,6 +194,7 @@ module OpenAI def url=(_) end + # A URL citation when using web search. sig do params( end_index: Integer, @@ -193,6 +213,10 @@ module OpenAI end class FunctionCall < OpenAI::BaseModel + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(String) } def arguments end @@ -201,6 +225,7 @@ module OpenAI def arguments=(_) end + # The name of the function to call. sig { returns(String) } def name end @@ -209,6 +234,8 @@ module OpenAI def name=(_) end + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments:, name:) end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 471f5f89..1a47472e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -5,10 +5,14 @@ module OpenAI ChatCompletionMessageParam = T.type_alias { Chat::ChatCompletionMessageParam } module Chat + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. class ChatCompletionMessageParam < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index ca4c6ac0..fadd4257 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionMessageToolCall < OpenAI::BaseModel + # The ID of the tool call. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The function that the model called. sig { returns(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function) } def function end @@ -25,6 +27,7 @@ module OpenAI def function=(_) end + # The type of the tool. Currently, only `function` is supported. sig { returns(Symbol) } def type end @@ -50,6 +53,10 @@ module OpenAI end class Function < OpenAI::BaseModel + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(String) } def arguments end @@ -58,6 +65,7 @@ module OpenAI def arguments=(_) end + # The name of the function to call. sig { returns(String) } def name end @@ -66,6 +74,7 @@ module OpenAI def name=(_) end + # The function that the model called. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments:, name:) end diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 89db6837..d1d23deb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -17,6 +17,7 @@ module OpenAI def function=(_) end + # The type of the tool. Currently, only `function` is supported. sig { returns(Symbol) } def type end @@ -25,6 +26,8 @@ module OpenAI def type=(_) end + # Specifies a tool the model should use. Use to force the model to call a specific + # function. sig do params(function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, type: Symbol) .returns(T.attached_class) @@ -37,6 +40,7 @@ module OpenAI end class Function < OpenAI::BaseModel + # The name of the function to call. sig { returns(String) } def name end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 279b9fbe..b16430d8 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -6,6 +6,9 @@ module OpenAI module Chat class ChatCompletionPredictionContent < OpenAI::BaseModel + # The content that should be matched when generating a model response. If + # generated tokens would match this content, the entire model response can be + # returned much more quickly. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } def content end @@ -17,6 +20,8 @@ module OpenAI def content=(_) end + # The type of the predicted content you want to provide. This type is currently + # always `content`. sig { returns(Symbol) } def type end @@ -25,6 +30,8 @@ module OpenAI def type=(_) end + # Static predicted output content, such as the content of a text file that is + # being regenerated. sig do params( content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), @@ -44,12 +51,16 @@ module OpenAI def to_hash end + # The content that should be matched when generating a model response. If + # generated tokens would match this content, the entire model response can be + # returned much more quickly. class Content < OpenAI::Union abstract! ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self + # @api private sig do override .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index 807d6735..24a8acf6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -5,6 +5,7 @@ module OpenAI ChatCompletionRole = T.type_alias { Chat::ChatCompletionRole } module Chat + # The role of the author of a message class ChatCompletionRole < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi index 0c487bdc..a63c1c01 100644 --- a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage + # The identifier of the chat message. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # A chat completion message generated by the model. sig { params(id: String).returns(T.attached_class) } def self.new(id:) end diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index 88104415..9fbf5879 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -6,6 +6,10 @@ module OpenAI module Chat class ChatCompletionStreamOptions < OpenAI::BaseModel + # If set, an additional chunk will be streamed before the `data: [DONE]` message. + # The `usage` field on this chunk shows the token usage statistics for the entire + # request, and the `choices` field will always be an empty array. All other chunks + # will also include a `usage` field, but with a null value. sig { returns(T.nilable(T::Boolean)) } def include_usage end @@ -14,6 +18,7 @@ module OpenAI def include_usage=(_) end + # Options for streaming response. Only set this when you set `stream: true`. sig { params(include_usage: T::Boolean).returns(T.attached_class) } def self.new(include_usage: nil) end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 4c4e8def..b2a9408e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionSystemMessageParam < OpenAI::BaseModel + # The contents of the system message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } def content end @@ -17,6 +18,7 @@ module OpenAI def content=(_) end + # The role of the messages author, in this case `system`. sig { returns(Symbol) } def role end @@ -25,6 +27,8 @@ module OpenAI def role=(_) end + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } def name end @@ -33,6 +37,9 @@ module OpenAI def name=(_) end + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, use `developer` messages + # for this purpose instead. sig do params( content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), @@ -57,12 +64,14 @@ module OpenAI def to_hash end + # The contents of the system message. class Content < OpenAI::Union abstract! ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self + # @api private sig do override .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index c280c13e..2341c139 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionTokenLogprob < OpenAI::BaseModel + # The token. sig { returns(String) } def token end @@ -14,6 +15,10 @@ module OpenAI def token=(_) end + # A list of integers representing the UTF-8 bytes representation of the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. sig { returns(T.nilable(T::Array[Integer])) } def bytes end @@ -22,6 +27,9 @@ module OpenAI def bytes=(_) end + # The log probability of this token, if it is within the top 20 most likely + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. sig { returns(Float) } def logprob end @@ -30,6 +38,9 @@ module OpenAI def logprob=(_) end + # List of the most likely tokens and their log probability, at this token + # position. In rare cases, there may be fewer than the number of requested + # `top_logprobs` returned. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob]) } def top_logprobs end @@ -68,6 +79,7 @@ module OpenAI end class TopLogprob < OpenAI::BaseModel + # The token. sig { returns(String) } def token end @@ -76,6 +88,10 @@ module OpenAI def token=(_) end + # A list of integers representing the UTF-8 bytes representation of the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. sig { returns(T.nilable(T::Array[Integer])) } def bytes end @@ -84,6 +100,9 @@ module OpenAI def bytes=(_) end + # The log probability of this token, if it is within the top 20 most likely + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. sig { returns(Float) } def logprob end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index 363528f2..5a41a09d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -14,6 +14,7 @@ module OpenAI def function=(_) end + # The type of the tool. Currently, only `function` is supported. sig { returns(Symbol) } def type end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 3d7a6aea..218b467d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -5,9 +5,21 @@ module OpenAI ChatCompletionToolChoiceOption = T.type_alias { Chat::ChatCompletionToolChoiceOption } module Chat + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. class ChatCompletionToolChoiceOption < OpenAI::Union abstract! + # `none` means the model will not call any tool and instead generates a message. + # `auto` means the model can pick between generating a message or calling one or + # more tools. `required` means the model must call one or more tools. class Auto < OpenAI::Enum abstract! @@ -23,6 +35,7 @@ module OpenAI end class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, OpenAI::Models::Chat::ChatCompletionNamedToolChoice]]) } private def variants end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index e320aa6c..d0aa1120 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionToolMessageParam < OpenAI::BaseModel + # The contents of the tool message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } def content end @@ -17,6 +18,7 @@ module OpenAI def content=(_) end + # The role of the messages author, in this case `tool`. sig { returns(Symbol) } def role end @@ -25,6 +27,7 @@ module OpenAI def role=(_) end + # Tool call that this message is responding to. sig { returns(String) } def tool_call_id end @@ -57,12 +60,14 @@ module OpenAI def to_hash end + # The contents of the tool message. class Content < OpenAI::Union abstract! ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self + # @api private sig do override .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index a41ad724..2ecc1036 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -6,6 +6,7 @@ module OpenAI module Chat class ChatCompletionUserMessageParam < OpenAI::BaseModel + # The contents of the user message. sig do returns( T.any( @@ -55,6 +56,7 @@ module OpenAI def content=(_) end + # The role of the messages author, in this case `user`. sig { returns(Symbol) } def role end @@ -63,6 +65,8 @@ module OpenAI def role=(_) end + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } def name end @@ -71,6 +75,8 @@ module OpenAI def name=(_) end + # Messages sent by an end user, containing prompts or additional context + # information. sig do params( content: T.any( @@ -115,6 +121,7 @@ module OpenAI def to_hash end + # The contents of the user message. class Content < OpenAI::Union abstract! @@ -130,6 +137,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 77aab350..6aa2796e 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -7,6 +7,12 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A list of messages comprising the conversation so far. Depending on the + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). sig do returns( T::Array[ @@ -53,6 +59,11 @@ module OpenAI def messages=(_) end + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. sig { returns(T.any(String, Symbol)) } def model end @@ -61,6 +72,9 @@ module OpenAI def model=(_) end + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) } def audio end @@ -72,6 +86,9 @@ module OpenAI def audio=(_) end + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. sig { returns(T.nilable(Float)) } def frequency_penalty end @@ -80,6 +97,20 @@ module OpenAI def frequency_penalty=(_) end + # Deprecated in favor of `tool_choice`. + # + # Controls which (if any) function is called by the model. + # + # `none` means the model will not call a function and instead generates a message. + # + # `auto` means the model can pick between generating a message or calling a + # function. + # + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption))) } def function_call end @@ -91,6 +122,9 @@ module OpenAI def function_call=(_) end + # Deprecated in favor of `tools`. + # + # A list of functions the model may generate JSON inputs for. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function])) } def functions end @@ -102,6 +136,14 @@ module OpenAI def functions=(_) end + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. sig { returns(T.nilable(T::Hash[Symbol, Integer])) } def logit_bias end @@ -110,6 +152,9 @@ module OpenAI def logit_bias=(_) end + # Whether to return log probabilities of the output tokens or not. If true, + # returns the log probabilities of each output token returned in the `content` of + # `message`. sig { returns(T.nilable(T::Boolean)) } def logprobs end @@ -118,6 +163,9 @@ module OpenAI def logprobs=(_) end + # An upper bound for the number of tokens that can be generated for a completion, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } def max_completion_tokens end @@ -126,6 +174,13 @@ module OpenAI def max_completion_tokens=(_) end + # The maximum number of [tokens](/tokenizer) that can be generated in the chat + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. + # + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } def max_tokens end @@ -134,6 +189,12 @@ module OpenAI def max_tokens=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -142,6 +203,16 @@ module OpenAI def metadata=(_) end + # Output types that you would like the model to generate. Most models are capable + # of generating text, which is the default: + # + # `["text"]` + # + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: + # + # `["text", "audio"]` sig { returns(T.nilable(T::Array[Symbol])) } def modalities end @@ -150,6 +221,9 @@ module OpenAI def modalities=(_) end + # How many chat completion choices to generate for each input message. Note that + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. sig { returns(T.nilable(Integer)) } def n end @@ -158,6 +232,9 @@ module OpenAI def n=(_) end + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T.nilable(T::Boolean)) } def parallel_tool_calls end @@ -166,6 +243,8 @@ module OpenAI def parallel_tool_calls=(_) end + # Static predicted output content, such as the content of a text file that is + # being regenerated. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent)) } def prediction end @@ -177,6 +256,9 @@ module OpenAI def prediction=(_) end + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. sig { returns(T.nilable(Float)) } def presence_penalty end @@ -185,6 +267,12 @@ module OpenAI def presence_penalty=(_) end + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(Symbol)) } def reasoning_effort end @@ -193,6 +281,16 @@ module OpenAI def reasoning_effort=(_) end + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. sig do returns( T.nilable( @@ -226,6 +324,11 @@ module OpenAI def response_format=(_) end + # This feature is in Beta. If specified, our system will make a best effort to + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. sig { returns(T.nilable(Integer)) } def seed end @@ -234,6 +337,20 @@ module OpenAI def seed=(_) end + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. sig { returns(T.nilable(Symbol)) } def service_tier end @@ -242,6 +359,8 @@ module OpenAI def service_tier=(_) end + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } def stop end @@ -259,6 +378,9 @@ module OpenAI def stop=(_) end + # Whether or not to store the output of this chat completion request for use in + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. sig { returns(T.nilable(T::Boolean)) } def store end @@ -267,6 +389,7 @@ module OpenAI def store=(_) end + # Options for streaming response. Only set this when you set `stream: true`. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) } def stream_options end @@ -278,6 +401,10 @@ module OpenAI def stream_options=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. sig { returns(T.nilable(Float)) } def temperature end @@ -286,6 +413,15 @@ module OpenAI def temperature=(_) end + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice))) } def tool_choice end @@ -297,6 +433,9 @@ module OpenAI def tool_choice=(_) end + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTool])) } def tools end @@ -308,6 +447,9 @@ module OpenAI def tools=(_) end + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. sig { returns(T.nilable(Integer)) } def top_logprobs end @@ -316,6 +458,11 @@ module OpenAI def top_logprobs=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -324,6 +471,9 @@ module OpenAI def top_p=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -332,6 +482,9 @@ module OpenAI def user=(_) end + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions)) } def web_search_options end @@ -481,19 +634,42 @@ module OpenAI def to_hash end + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # Deprecated in favor of `tool_choice`. + # + # Controls which (if any) function is called by the model. + # + # `none` means the model will not call a function and instead generates a message. + # + # `auto` means the model can pick between generating a message or calling a + # function. + # + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. class FunctionCall < OpenAI::Union abstract! + # `none` means the model will not call a function and instead generates a message. + # `auto` means the model can pick between generating a message or calling a + # function. class FunctionCallMode < OpenAI::Enum abstract! @@ -508,6 +684,7 @@ module OpenAI end class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, OpenAI::Models::Chat::ChatCompletionFunctionCallOption]]) } private def variants end @@ -515,6 +692,8 @@ module OpenAI end class Function < OpenAI::BaseModel + # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + # underscores and dashes, with a maximum length of 64. sig { returns(String) } def name end @@ -523,6 +702,8 @@ module OpenAI def name=(_) end + # A description of what the function does, used by the model to choose when and + # how to call the function. sig { returns(T.nilable(String)) } def description end @@ -531,6 +712,13 @@ module OpenAI def description=(_) end + # The parameters the functions accepts, described as a JSON Schema object. See the + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. + # + # Omitting `parameters` defines a function with an empty parameter list. sig { returns(T.nilable(OpenAI::Models::FunctionParameters)) } def parameters end @@ -564,10 +752,21 @@ module OpenAI end end + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. class ResponseFormat < OpenAI::Union abstract! class << self + # @api private sig do override .returns( @@ -579,6 +778,20 @@ module OpenAI end end + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. class ServiceTier < OpenAI::Enum abstract! @@ -592,12 +805,15 @@ module OpenAI end end + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. class Stop < OpenAI::Union abstract! StringArray = T.type_alias { T::Array[String] } class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, T::Array[String]]]) } private def variants end @@ -605,6 +821,8 @@ module OpenAI end class WebSearchOptions < OpenAI::BaseModel + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. sig { returns(T.nilable(Symbol)) } def search_context_size end @@ -613,6 +831,7 @@ module OpenAI def search_context_size=(_) end + # Approximate location parameters for the search. sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation)) } def user_location end @@ -624,6 +843,9 @@ module OpenAI def user_location=(_) end + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig do params( search_context_size: Symbol, @@ -646,6 +868,8 @@ module OpenAI def to_hash end + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. class SearchContextSize < OpenAI::Enum abstract! @@ -661,6 +885,7 @@ module OpenAI end class UserLocation < OpenAI::BaseModel + # Approximate location parameters for the search. sig { returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate) } def approximate end @@ -672,6 +897,7 @@ module OpenAI def approximate=(_) end + # The type of location approximation. Always `approximate`. sig { returns(Symbol) } def type end @@ -680,6 +906,7 @@ module OpenAI def type=(_) end + # Approximate location parameters for the search. sig do params( approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, @@ -703,6 +930,7 @@ module OpenAI end class Approximate < OpenAI::BaseModel + # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } def city end @@ -711,6 +939,8 @@ module OpenAI def city=(_) end + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. sig { returns(T.nilable(String)) } def country end @@ -719,6 +949,7 @@ module OpenAI def country=(_) end + # Free text input for the region of the user, e.g. `California`. sig { returns(T.nilable(String)) } def region end @@ -727,6 +958,8 @@ module OpenAI def region=(_) end + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } def timezone end @@ -735,6 +968,7 @@ module OpenAI def timezone=(_) end + # Approximate location parameters for the search. sig do params( city: String, diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 224d64b7..66305617 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Identifier for the last chat completion from the previous pagination request. sig { returns(T.nilable(String)) } def after end @@ -15,6 +16,7 @@ module OpenAI def after=(_) end + # Number of Chat Completions to retrieve. sig { returns(T.nilable(Integer)) } def limit end @@ -23,6 +25,9 @@ module OpenAI def limit=(_) end + # A list of metadata keys to filter the Chat Completions by. Example: + # + # `metadata[key1]=value1&metadata[key2]=value2` sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -31,6 +36,7 @@ module OpenAI def metadata=(_) end + # The model used to generate the Chat Completions. sig { returns(T.nilable(String)) } def model end @@ -39,6 +45,8 @@ module OpenAI def model=(_) end + # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. sig { returns(T.nilable(Symbol)) } def order end @@ -77,6 +85,8 @@ module OpenAI def to_hash end + # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index 7c557df2..0b3aa56f 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -7,6 +7,12 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index b5474b6c..b639be67 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -8,6 +8,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Identifier for the last message from the previous pagination request. sig { returns(T.nilable(String)) } def after end @@ -16,6 +17,7 @@ module OpenAI def after=(_) end + # Number of messages to retrieve. sig { returns(T.nilable(Integer)) } def limit end @@ -24,6 +26,8 @@ module OpenAI def limit=(_) end + # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` + # for descending order. Defaults to `asc`. sig { returns(T.nilable(Symbol)) } def order end @@ -57,6 +61,8 @@ module OpenAI def to_hash end + # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` + # for descending order. Defaults to `asc`. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index a44961a9..48f85bd6 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ComparisonFilter < OpenAI::BaseModel + # The key to compare against the value. sig { returns(String) } def key end @@ -11,6 +12,14 @@ module OpenAI def key=(_) end + # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. + # + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal sig { returns(Symbol) } def type end @@ -19,6 +28,8 @@ module OpenAI def type=(_) end + # The value to compare against the attribute key; supports string, number, or + # boolean types. sig { returns(T.any(String, Float, T::Boolean)) } def value end @@ -27,6 +38,8 @@ module OpenAI def value=(_) end + # A filter used to compare a specified attribute key to a given value using a + # defined comparison operation. sig do params(key: String, type: Symbol, value: T.any(String, Float, T::Boolean)).returns(T.attached_class) end @@ -37,6 +50,14 @@ module OpenAI def to_hash end + # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. + # + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal class Type < OpenAI::Enum abstract! @@ -54,10 +75,13 @@ module OpenAI end end + # The value to compare against the attribute key; supports string, number, or + # boolean types. class Value < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index 53205098..5c9f6e01 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class Completion < OpenAI::BaseModel + # A unique identifier for the completion. sig { returns(String) } def id end @@ -11,6 +12,7 @@ module OpenAI def id=(_) end + # The list of completion choices the model generated for the input prompt. sig { returns(T::Array[OpenAI::Models::CompletionChoice]) } def choices end @@ -19,6 +21,7 @@ module OpenAI def choices=(_) end + # The Unix timestamp (in seconds) of when the completion was created. sig { returns(Integer) } def created end @@ -27,6 +30,7 @@ module OpenAI def created=(_) end + # The model used for completion. sig { returns(String) } def model end @@ -35,6 +39,7 @@ module OpenAI def model=(_) end + # The object type, which is always "text_completion" sig { returns(Symbol) } def object end @@ -43,6 +48,10 @@ module OpenAI def object=(_) end + # This fingerprint represents the backend configuration that the model runs with. + # + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } def system_fingerprint end @@ -51,6 +60,7 @@ module OpenAI def system_fingerprint=(_) end + # Usage statistics for the completion request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } def usage end @@ -59,6 +69,8 @@ module OpenAI def usage=(_) end + # Represents a completion response from the API. Note: both the streamed and + # non-streamed response objects share the same shape (unlike the chat endpoint). sig do params( id: String, diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 0e80d12b..860f0c29 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -3,6 +3,10 @@ module OpenAI module Models class CompletionChoice < OpenAI::BaseModel + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. sig { returns(Symbol) } def finish_reason end @@ -64,6 +68,10 @@ module OpenAI def to_hash end + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. class FinishReason < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index dc1a4be1..ea1a4342 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -6,6 +6,11 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.any(String, Symbol)) } def model end @@ -14,6 +19,12 @@ module OpenAI def model=(_) end + # The prompt(s) to generate completions for, encoded as a string, array of + # strings, array of tokens, or array of token arrays. + # + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. sig do returns(T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]))) end @@ -27,6 +38,15 @@ module OpenAI def prompt=(_) end + # Generates `best_of` completions server-side and returns the "best" (the one with + # the highest log probability per token). Results cannot be streamed. + # + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. sig { returns(T.nilable(Integer)) } def best_of end @@ -35,6 +55,7 @@ module OpenAI def best_of=(_) end + # Echo back the prompt in addition to the completion sig { returns(T.nilable(T::Boolean)) } def echo end @@ -43,6 +64,11 @@ module OpenAI def echo=(_) end + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) sig { returns(T.nilable(Float)) } def frequency_penalty end @@ -51,6 +77,18 @@ module OpenAI def frequency_penalty=(_) end + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. + # + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. sig { returns(T.nilable(T::Hash[Symbol, Integer])) } def logit_bias end @@ -59,6 +97,12 @@ module OpenAI def logit_bias=(_) end + # Include the log probabilities on the `logprobs` most likely output tokens, as + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. + # + # The maximum value for `logprobs` is 5. sig { returns(T.nilable(Integer)) } def logprobs end @@ -67,6 +111,13 @@ module OpenAI def logprobs=(_) end + # The maximum number of [tokens](/tokenizer) that can be generated in the + # completion. + # + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. sig { returns(T.nilable(Integer)) } def max_tokens end @@ -75,6 +126,11 @@ module OpenAI def max_tokens=(_) end + # How many completions to generate for each prompt. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. sig { returns(T.nilable(Integer)) } def n end @@ -83,6 +139,11 @@ module OpenAI def n=(_) end + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) sig { returns(T.nilable(Float)) } def presence_penalty end @@ -91,6 +152,12 @@ module OpenAI def presence_penalty=(_) end + # If specified, our system will make a best effort to sample deterministically, + # such that repeated requests with the same `seed` and parameters should return + # the same result. + # + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. sig { returns(T.nilable(Integer)) } def seed end @@ -99,6 +166,8 @@ module OpenAI def seed=(_) end + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } def stop end @@ -116,6 +185,7 @@ module OpenAI def stop=(_) end + # Options for streaming response. Only set this when you set `stream: true`. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) } def stream_options end @@ -127,6 +197,9 @@ module OpenAI def stream_options=(_) end + # The suffix that comes after a completion of inserted text. + # + # This parameter is only supported for `gpt-3.5-turbo-instruct`. sig { returns(T.nilable(String)) } def suffix end @@ -135,6 +208,11 @@ module OpenAI def suffix=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. + # + # We generally recommend altering this or `top_p` but not both. sig { returns(T.nilable(Float)) } def temperature end @@ -143,6 +221,11 @@ module OpenAI def temperature=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -151,6 +234,9 @@ module OpenAI def top_p=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -246,9 +332,19 @@ module OpenAI def to_hash end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. class Model < OpenAI::Union abstract! + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. class Preset < OpenAI::Enum abstract! @@ -264,12 +360,19 @@ module OpenAI end class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # The prompt(s) to generate completions for, encoded as a string, array of + # strings, array of tokens, or array of token arrays. + # + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. class Prompt < OpenAI::Union abstract! @@ -280,6 +383,7 @@ module OpenAI ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } class << self + # @api private sig do override .returns( @@ -296,12 +400,15 @@ module OpenAI end end + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. class Stop < OpenAI::Union abstract! StringArray = T.type_alias { T::Array[String] } class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, T::Array[String]]]) } private def variants end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index 36ebba0c..de91da0f 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class CompletionUsage < OpenAI::BaseModel + # Number of tokens in the generated completion. sig { returns(Integer) } def completion_tokens end @@ -11,6 +12,7 @@ module OpenAI def completion_tokens=(_) end + # Number of tokens in the prompt. sig { returns(Integer) } def prompt_tokens end @@ -19,6 +21,7 @@ module OpenAI def prompt_tokens=(_) end + # Total number of tokens used in the request (prompt + completion). sig { returns(Integer) } def total_tokens end @@ -27,6 +30,7 @@ module OpenAI def total_tokens=(_) end + # Breakdown of tokens used in a completion. sig { returns(T.nilable(OpenAI::Models::CompletionUsage::CompletionTokensDetails)) } def completion_tokens_details end @@ -38,6 +42,7 @@ module OpenAI def completion_tokens_details=(_) end + # Breakdown of tokens used in the prompt. sig { returns(T.nilable(OpenAI::Models::CompletionUsage::PromptTokensDetails)) } def prompt_tokens_details end @@ -49,6 +54,7 @@ module OpenAI def prompt_tokens_details=(_) end + # Usage statistics for the completion request. sig do params( completion_tokens: Integer, @@ -84,6 +90,8 @@ module OpenAI end class CompletionTokensDetails < OpenAI::BaseModel + # When using Predicted Outputs, the number of tokens in the prediction that + # appeared in the completion. sig { returns(T.nilable(Integer)) } def accepted_prediction_tokens end @@ -92,6 +100,7 @@ module OpenAI def accepted_prediction_tokens=(_) end + # Audio input tokens generated by the model. sig { returns(T.nilable(Integer)) } def audio_tokens end @@ -100,6 +109,7 @@ module OpenAI def audio_tokens=(_) end + # Tokens generated by the model for reasoning. sig { returns(T.nilable(Integer)) } def reasoning_tokens end @@ -108,6 +118,10 @@ module OpenAI def reasoning_tokens=(_) end + # When using Predicted Outputs, the number of tokens in the prediction that did + # not appear in the completion. However, like reasoning tokens, these tokens are + # still counted in the total completion tokens for purposes of billing, output, + # and context window limits. sig { returns(T.nilable(Integer)) } def rejected_prediction_tokens end @@ -116,6 +130,7 @@ module OpenAI def rejected_prediction_tokens=(_) end + # Breakdown of tokens used in a completion. sig do params( accepted_prediction_tokens: Integer, @@ -144,6 +159,7 @@ module OpenAI end class PromptTokensDetails < OpenAI::BaseModel + # Audio input tokens present in the prompt. sig { returns(T.nilable(Integer)) } def audio_tokens end @@ -152,6 +168,7 @@ module OpenAI def audio_tokens=(_) end + # Cached tokens present in the prompt. sig { returns(T.nilable(Integer)) } def cached_tokens end @@ -160,6 +177,7 @@ module OpenAI def cached_tokens=(_) end + # Breakdown of tokens used in the prompt. sig { params(audio_tokens: Integer, cached_tokens: Integer).returns(T.attached_class) } def self.new(audio_tokens: nil, cached_tokens: nil) end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index e8e64bd4..90cc38f4 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class CompoundFilter < OpenAI::BaseModel + # Array of filters to combine. Items can be `ComparisonFilter` or + # `CompoundFilter`. sig { returns(T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) } def filters end @@ -14,6 +16,7 @@ module OpenAI def filters=(_) end + # Type of operation: `and` or `or`. sig { returns(Symbol) } def type end @@ -22,6 +25,7 @@ module OpenAI def type=(_) end + # Combine multiple filters using `and` or `or`. sig do params(filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol) .returns(T.attached_class) @@ -33,16 +37,20 @@ module OpenAI def to_hash end + # A filter used to compare a specified attribute key to a given value using a + # defined comparison operation. class Filter < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, OpenAI::Models::ComparisonFilter], [NilClass, T.anything]]) } private def variants end end end + # Type of operation: `and` or `or`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index 6f823131..c095b791 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class CreateEmbeddingResponse < OpenAI::BaseModel + # The list of embeddings generated by the model. sig { returns(T::Array[OpenAI::Models::Embedding]) } def data end @@ -11,6 +12,7 @@ module OpenAI def data=(_) end + # The name of the model used to generate the embedding. sig { returns(String) } def model end @@ -19,6 +21,7 @@ module OpenAI def model=(_) end + # The object type, which is always "list". sig { returns(Symbol) } def object end @@ -27,6 +30,7 @@ module OpenAI def object=(_) end + # The usage information for the request. sig { returns(OpenAI::Models::CreateEmbeddingResponse::Usage) } def usage end @@ -65,6 +69,7 @@ module OpenAI end class Usage < OpenAI::BaseModel + # The number of tokens used by the prompt. sig { returns(Integer) } def prompt_tokens end @@ -73,6 +78,7 @@ module OpenAI def prompt_tokens=(_) end + # The total number of tokens used by the request. sig { returns(Integer) } def total_tokens end @@ -81,6 +87,7 @@ module OpenAI def total_tokens=(_) end + # The usage information for the request. sig { params(prompt_tokens: Integer, total_tokens: Integer).returns(T.attached_class) } def self.new(prompt_tokens:, total_tokens:) end diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/lib/openai/models/embedding.rbi index 11cc9072..4218f3b9 100644 --- a/rbi/lib/openai/models/embedding.rbi +++ b/rbi/lib/openai/models/embedding.rbi @@ -3,6 +3,9 @@ module OpenAI module Models class Embedding < OpenAI::BaseModel + # The embedding vector, which is a list of floats. The length of vector depends on + # the model as listed in the + # [embedding guide](https://platform.openai.com/docs/guides/embeddings). sig { returns(T::Array[Float]) } def embedding end @@ -11,6 +14,7 @@ module OpenAI def embedding=(_) end + # The index of the embedding in the list of embeddings. sig { returns(Integer) } def index end @@ -19,6 +23,7 @@ module OpenAI def index=(_) end + # The object type, which is always "embedding". sig { returns(Symbol) } def object end @@ -27,6 +32,7 @@ module OpenAI def object=(_) end + # Represents an embedding vector returned by embedding endpoint. sig { params(embedding: T::Array[Float], index: Integer, object: Symbol).returns(T.attached_class) } def self.new(embedding:, index:, object: :embedding) end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 2a08c856..bc012b2b 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -6,6 +6,14 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Input text to embed, encoded as a string or array of tokens. To embed multiple + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. sig { returns(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])) } def input end @@ -17,6 +25,11 @@ module OpenAI def input=(_) end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.any(String, Symbol)) } def model end @@ -25,6 +38,8 @@ module OpenAI def model=(_) end + # The number of dimensions the resulting output embeddings should have. Only + # supported in `text-embedding-3` and later models. sig { returns(T.nilable(Integer)) } def dimensions end @@ -33,6 +48,8 @@ module OpenAI def dimensions=(_) end + # The format to return the embeddings in. Can be either `float` or + # [`base64`](https://pypi.org/project/pybase64/). sig { returns(T.nilable(Symbol)) } def encoding_format end @@ -41,6 +58,9 @@ module OpenAI def encoding_format=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -79,6 +99,14 @@ module OpenAI def to_hash end + # Input text to embed, encoded as a string or array of tokens. To embed multiple + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. class Input < OpenAI::Union abstract! @@ -89,6 +117,7 @@ module OpenAI ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } class << self + # @api private sig do override .returns( @@ -105,16 +134,24 @@ module OpenAI end end + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # The format to return the embeddings in. Can be either `float` or + # [`base64`](https://pypi.org/project/pybase64/). class EncodingFormat < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index b0b4a1b0..a7159b15 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -2,10 +2,12 @@ module OpenAI module Models + # The strategy used to chunk the file. class FileChunkingStrategy < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 9a360f39..ccabae20 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -2,10 +2,13 @@ module OpenAI module Models + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. class FileChunkingStrategyParam < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index aa9afe8a..98619f88 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -6,6 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The File object (not file name) to be uploaded. sig { returns(T.any(IO, StringIO)) } def file end @@ -14,6 +15,10 @@ module OpenAI def file=(_) end + # The intended purpose of the uploaded file. One of: - `assistants`: Used in the + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets sig { returns(Symbol) } def purpose end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 2da43a92..e5f795dc 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -6,6 +6,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -14,6 +18,8 @@ module OpenAI def after=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 10,000, and the default is 10,000. sig { returns(T.nilable(Integer)) } def limit end @@ -22,6 +28,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -30,6 +38,7 @@ module OpenAI def order=(_) end + # Only return files with the given purpose. sig { returns(T.nilable(String)) } def purpose end @@ -66,6 +75,8 @@ module OpenAI def to_hash end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 91a4d778..6659dc79 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class FileObject < OpenAI::BaseModel + # The file identifier, which can be referenced in the API endpoints. sig { returns(String) } def id end @@ -11,6 +12,7 @@ module OpenAI def id=(_) end + # The size of the file, in bytes. sig { returns(Integer) } def bytes end @@ -19,6 +21,7 @@ module OpenAI def bytes=(_) end + # The Unix timestamp (in seconds) for when the file was created. sig { returns(Integer) } def created_at end @@ -27,6 +30,7 @@ module OpenAI def created_at=(_) end + # The name of the file. sig { returns(String) } def filename end @@ -35,6 +39,7 @@ module OpenAI def filename=(_) end + # The object type, which is always `file`. sig { returns(Symbol) } def object end @@ -43,6 +48,9 @@ module OpenAI def object=(_) end + # The intended purpose of the file. Supported values are `assistants`, + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. sig { returns(Symbol) } def purpose end @@ -51,6 +59,8 @@ module OpenAI def purpose=(_) end + # Deprecated. The current status of the file, which can be either `uploaded`, + # `processed`, or `error`. sig { returns(Symbol) } def status end @@ -59,6 +69,7 @@ module OpenAI def status=(_) end + # The Unix timestamp (in seconds) for when the file will expire. sig { returns(T.nilable(Integer)) } def expires_at end @@ -67,6 +78,8 @@ module OpenAI def expires_at=(_) end + # Deprecated. For details on why a fine-tuning training file failed validation, + # see the `error` field on `fine_tuning.job`. sig { returns(T.nilable(String)) } def status_details end @@ -75,6 +88,7 @@ module OpenAI def status_details=(_) end + # The `File` object represents a document that has been uploaded to OpenAI. sig do params( id: String, @@ -121,6 +135,9 @@ module OpenAI def to_hash end + # The intended purpose of the file. Supported values are `assistants`, + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. class Purpose < OpenAI::Enum abstract! @@ -139,6 +156,8 @@ module OpenAI end end + # Deprecated. The current status of the file, which can be either `uploaded`, + # `processed`, or `error`. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index a30abf94..edc943c5 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -2,6 +2,10 @@ module OpenAI module Models + # The intended purpose of the uploaded file. One of: - `assistants`: Used in the + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets class FilePurpose < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index ece16391..b6221aef 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -6,6 +6,7 @@ module OpenAI module FineTuning class FineTuningJob < OpenAI::BaseModel + # The object identifier, which can be referenced in the API endpoints. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the fine-tuning job was created. sig { returns(Integer) } def created_at end @@ -22,6 +24,8 @@ module OpenAI def created_at=(_) end + # For fine-tuning jobs that have `failed`, this will contain more information on + # the cause of the failure. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error)) } def error end @@ -33,6 +37,8 @@ module OpenAI def error=(_) end + # The name of the fine-tuned model that is being created. The value will be null + # if the fine-tuning job is still running. sig { returns(T.nilable(String)) } def fine_tuned_model end @@ -41,6 +47,8 @@ module OpenAI def fine_tuned_model=(_) end + # The Unix timestamp (in seconds) for when the fine-tuning job was finished. The + # value will be null if the fine-tuning job is still running. sig { returns(T.nilable(Integer)) } def finished_at end @@ -49,6 +57,8 @@ module OpenAI def finished_at=(_) end + # The hyperparameters used for the fine-tuning job. This value will only be + # returned when running `supervised` jobs. sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters) } def hyperparameters end @@ -60,6 +70,7 @@ module OpenAI def hyperparameters=(_) end + # The base model that is being fine-tuned. sig { returns(String) } def model end @@ -68,6 +79,7 @@ module OpenAI def model=(_) end + # The object type, which is always "fine_tuning.job". sig { returns(Symbol) } def object end @@ -76,6 +88,7 @@ module OpenAI def object=(_) end + # The organization that owns the fine-tuning job. sig { returns(String) } def organization_id end @@ -84,6 +97,9 @@ module OpenAI def organization_id=(_) end + # The compiled results file ID(s) for the fine-tuning job. You can retrieve the + # results with the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(T::Array[String]) } def result_files end @@ -92,6 +108,7 @@ module OpenAI def result_files=(_) end + # The seed used for the fine-tuning job. sig { returns(Integer) } def seed end @@ -100,6 +117,8 @@ module OpenAI def seed=(_) end + # The current status of the fine-tuning job, which can be either + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. sig { returns(Symbol) } def status end @@ -108,6 +127,8 @@ module OpenAI def status=(_) end + # The total number of billable tokens processed by this fine-tuning job. The value + # will be null if the fine-tuning job is still running. sig { returns(T.nilable(Integer)) } def trained_tokens end @@ -116,6 +137,8 @@ module OpenAI def trained_tokens=(_) end + # The file ID used for training. You can retrieve the training data with the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(String) } def training_file end @@ -124,6 +147,9 @@ module OpenAI def training_file=(_) end + # The file ID used for validation. You can retrieve the validation results with + # the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(T.nilable(String)) } def validation_file end @@ -132,6 +158,8 @@ module OpenAI def validation_file=(_) end + # The Unix timestamp (in seconds) for when the fine-tuning job is estimated to + # finish. The value will be null if the fine-tuning job is not running. sig { returns(T.nilable(Integer)) } def estimated_finish end @@ -140,6 +168,7 @@ module OpenAI def estimated_finish=(_) end + # A list of integrations to enable for this fine-tuning job. sig { returns(T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject])) } def integrations end @@ -151,6 +180,12 @@ module OpenAI def integrations=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -159,6 +194,7 @@ module OpenAI def metadata=(_) end + # The method used for fine-tuning. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method)) } def method_ end @@ -170,6 +206,8 @@ module OpenAI def method_=(_) end + # The `fine_tuning.job` object represents a fine-tuning job that has been created + # through the API. sig do params( id: String, @@ -247,6 +285,7 @@ module OpenAI end class Error < OpenAI::BaseModel + # A machine-readable error code. sig { returns(String) } def code end @@ -255,6 +294,7 @@ module OpenAI def code=(_) end + # A human-readable error message. sig { returns(String) } def message end @@ -263,6 +303,8 @@ module OpenAI def message=(_) end + # The parameter that was invalid, usually `training_file` or `validation_file`. + # This field will be null if the failure was not parameter-specific. sig { returns(T.nilable(String)) } def param end @@ -271,6 +313,8 @@ module OpenAI def param=(_) end + # For fine-tuning jobs that have `failed`, this will contain more information on + # the cause of the failure. sig { params(code: String, message: String, param: T.nilable(String)).returns(T.attached_class) } def self.new(code:, message:, param:) end @@ -281,6 +325,8 @@ module OpenAI end class Hyperparameters < OpenAI::BaseModel + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } def batch_size end @@ -289,6 +335,8 @@ module OpenAI def batch_size=(_) end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } def learning_rate_multiplier end @@ -297,6 +345,8 @@ module OpenAI def learning_rate_multiplier=(_) end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } def n_epochs end @@ -305,6 +355,8 @@ module OpenAI def n_epochs=(_) end + # The hyperparameters used for the fine-tuning job. This value will only be + # returned when running `supervised` jobs. sig do params( batch_size: T.any(Symbol, Integer), @@ -329,30 +381,39 @@ module OpenAI def to_hash end + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. class BatchSize < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end end end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. class LearningRateMultiplier < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. class NEpochs < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end @@ -360,6 +421,8 @@ module OpenAI end end + # The current status of the fine-tuning job, which can be either + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. class Status < OpenAI::Enum abstract! @@ -378,6 +441,7 @@ module OpenAI end class Method < OpenAI::BaseModel + # Configuration for the DPO fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo)) } def dpo end @@ -389,6 +453,7 @@ module OpenAI def dpo=(_) end + # Configuration for the supervised fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised)) } def supervised end @@ -400,6 +465,7 @@ module OpenAI def supervised=(_) end + # The type of method. Is either `supervised` or `dpo`. sig { returns(T.nilable(Symbol)) } def type end @@ -408,6 +474,7 @@ module OpenAI def type=(_) end + # The method used for fine-tuning. sig do params( dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, @@ -433,6 +500,7 @@ module OpenAI end class Dpo < OpenAI::BaseModel + # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters)) } def hyperparameters end @@ -444,6 +512,7 @@ module OpenAI def hyperparameters=(_) end + # Configuration for the DPO fine-tuning method. sig do params(hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters) .returns(T.attached_class) @@ -459,6 +528,8 @@ module OpenAI end class Hyperparameters < OpenAI::BaseModel + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } def batch_size end @@ -467,6 +538,8 @@ module OpenAI def batch_size=(_) end + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. sig { returns(T.nilable(T.any(Symbol, Float))) } def beta end @@ -475,6 +548,8 @@ module OpenAI def beta=(_) end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } def learning_rate_multiplier end @@ -483,6 +558,8 @@ module OpenAI def learning_rate_multiplier=(_) end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } def n_epochs end @@ -491,6 +568,7 @@ module OpenAI def n_epochs=(_) end + # The hyperparameters used for the fine-tuning job. sig do params( batch_size: T.any(Symbol, Integer), @@ -517,40 +595,52 @@ module OpenAI def to_hash end + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. class BatchSize < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end end end + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. class Beta < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. class LearningRateMultiplier < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. class NEpochs < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end @@ -560,6 +650,7 @@ module OpenAI end class Supervised < OpenAI::BaseModel + # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters)) } def hyperparameters end @@ -571,6 +662,7 @@ module OpenAI def hyperparameters=(_) end + # Configuration for the supervised fine-tuning method. sig do params(hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters) .returns(T.attached_class) @@ -586,6 +678,8 @@ module OpenAI end class Hyperparameters < OpenAI::BaseModel + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } def batch_size end @@ -594,6 +688,8 @@ module OpenAI def batch_size=(_) end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } def learning_rate_multiplier end @@ -602,6 +698,8 @@ module OpenAI def learning_rate_multiplier=(_) end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } def n_epochs end @@ -610,6 +708,7 @@ module OpenAI def n_epochs=(_) end + # The hyperparameters used for the fine-tuning job. sig do params( batch_size: T.any(Symbol, Integer), @@ -634,30 +733,39 @@ module OpenAI def to_hash end + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. class BatchSize < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end end end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. class LearningRateMultiplier < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. class NEpochs < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end @@ -666,6 +774,7 @@ module OpenAI end end + # The type of method. Is either `supervised` or `dpo`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 15ce95c4..3f5027cb 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -6,6 +6,7 @@ module OpenAI module FineTuning class FineTuningJobEvent < OpenAI::BaseModel + # The object identifier. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the fine-tuning job was created. sig { returns(Integer) } def created_at end @@ -22,6 +24,7 @@ module OpenAI def created_at=(_) end + # The log level of the event. sig { returns(Symbol) } def level end @@ -30,6 +33,7 @@ module OpenAI def level=(_) end + # The message of the event. sig { returns(String) } def message end @@ -38,6 +42,7 @@ module OpenAI def message=(_) end + # The object type, which is always "fine_tuning.job.event". sig { returns(Symbol) } def object end @@ -46,6 +51,7 @@ module OpenAI def object=(_) end + # The data associated with the event. sig { returns(T.nilable(T.anything)) } def data end @@ -54,6 +60,7 @@ module OpenAI def data=(_) end + # The type of event. sig { returns(T.nilable(Symbol)) } def type end @@ -62,6 +69,7 @@ module OpenAI def type=(_) end + # Fine-tuning job event object sig do params( id: String, @@ -94,6 +102,7 @@ module OpenAI def to_hash end + # The log level of the event. class Level < OpenAI::Enum abstract! @@ -108,6 +117,7 @@ module OpenAI end end + # The type of event. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index 37b15696..f3899e11 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -6,6 +6,7 @@ module OpenAI module FineTuning class FineTuningJobWandbIntegration < OpenAI::BaseModel + # The name of the project that the new run will be created under. sig { returns(String) } def project end @@ -14,6 +15,9 @@ module OpenAI def project=(_) end + # The entity to use for the run. This allows you to set the team or username of + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. sig { returns(T.nilable(String)) } def entity end @@ -22,6 +26,8 @@ module OpenAI def entity=(_) end + # A display name to set for the run. If not set, we will use the Job ID as the + # name. sig { returns(T.nilable(String)) } def name end @@ -30,6 +36,9 @@ module OpenAI def name=(_) end + # A list of tags to be attached to the newly created run. These tags are passed + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". sig { returns(T.nilable(T::Array[String])) } def tags end @@ -38,6 +47,10 @@ module OpenAI def tags=(_) end + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig do params(project: String, entity: T.nilable(String), name: T.nilable(String), tags: T::Array[String]) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index 148ab9c5..f6af3f86 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -6,6 +6,7 @@ module OpenAI module FineTuning class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel + # The type of the integration being enabled for the fine-tuning job sig { returns(Symbol) } def type end @@ -14,6 +15,10 @@ module OpenAI def type=(_) end + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig { returns(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) } def wandb end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 269a125d..9747dd2a 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The name of the model to fine-tune. You can select one of the + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). sig { returns(T.any(String, Symbol)) } def model end @@ -15,6 +17,23 @@ module OpenAI def model=(_) end + # The ID of an uploaded file that contains training data. + # + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. + # + # Your dataset must be formatted as a JSONL file. Additionally, you must upload + # your file with the purpose `fine-tune`. + # + # The contents of the file should differ depending on if the model uses the + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # format, or if the fine-tuning method uses the + # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + # format. + # + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. sig { returns(String) } def training_file end @@ -23,6 +42,8 @@ module OpenAI def training_file=(_) end + # The hyperparameters used for the fine-tuning job. This value is now deprecated + # in favor of `method`, and should be passed in under the `method` parameter. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters)) } def hyperparameters end @@ -34,6 +55,7 @@ module OpenAI def hyperparameters=(_) end + # A list of integrations to enable for your fine-tuning job. sig { returns(T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration])) } def integrations end @@ -45,6 +67,12 @@ module OpenAI def integrations=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -53,6 +81,7 @@ module OpenAI def metadata=(_) end + # The method used for fine-tuning. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method)) } def method_ end @@ -64,6 +93,9 @@ module OpenAI def method_=(_) end + # The seed controls the reproducibility of the job. Passing in the same seed and + # job parameters should produce the same results, but may differ in rare cases. If + # a seed is not specified, one will be generated for you. sig { returns(T.nilable(Integer)) } def seed end @@ -72,6 +104,11 @@ module OpenAI def seed=(_) end + # A string of up to 64 characters that will be added to your fine-tuned model + # name. + # + # For example, a `suffix` of "custom-model-name" would produce a model name like + # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. sig { returns(T.nilable(String)) } def suffix end @@ -80,6 +117,18 @@ module OpenAI def suffix=(_) end + # The ID of an uploaded file that contains validation data. + # + # If you provide this file, the data is used to generate validation metrics + # periodically during fine-tuning. These metrics can be viewed in the fine-tuning + # results file. The same data should not be present in both train and validation + # files. + # + # Your dataset must be formatted as a JSONL file. You must upload your file with + # the purpose `fine-tune`. + # + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. sig { returns(T.nilable(String)) } def validation_file end @@ -137,9 +186,13 @@ module OpenAI def to_hash end + # The name of the model to fine-tune. You can select one of the + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). class Model < OpenAI::Union abstract! + # The name of the model to fine-tune. You can select one of the + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). class Preset < OpenAI::Enum abstract! @@ -156,6 +209,7 @@ module OpenAI end class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end @@ -163,6 +217,8 @@ module OpenAI end class Hyperparameters < OpenAI::BaseModel + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } def batch_size end @@ -171,6 +227,8 @@ module OpenAI def batch_size=(_) end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } def learning_rate_multiplier end @@ -179,6 +237,8 @@ module OpenAI def learning_rate_multiplier=(_) end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } def n_epochs end @@ -187,6 +247,8 @@ module OpenAI def n_epochs=(_) end + # The hyperparameters used for the fine-tuning job. This value is now deprecated + # in favor of `method`, and should be passed in under the `method` parameter. sig do params( batch_size: T.any(Symbol, Integer), @@ -211,30 +273,39 @@ module OpenAI def to_hash end + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. class BatchSize < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end end end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. class LearningRateMultiplier < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. class NEpochs < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end @@ -243,6 +314,8 @@ module OpenAI end class Integration < OpenAI::BaseModel + # The type of integration to enable. Currently, only "wandb" (Weights and Biases) + # is supported. sig { returns(Symbol) } def type end @@ -251,6 +324,10 @@ module OpenAI def type=(_) end + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig { returns(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb) } def wandb end @@ -274,6 +351,7 @@ module OpenAI end class Wandb < OpenAI::BaseModel + # The name of the project that the new run will be created under. sig { returns(String) } def project end @@ -282,6 +360,9 @@ module OpenAI def project=(_) end + # The entity to use for the run. This allows you to set the team or username of + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. sig { returns(T.nilable(String)) } def entity end @@ -290,6 +371,8 @@ module OpenAI def entity=(_) end + # A display name to set for the run. If not set, we will use the Job ID as the + # name. sig { returns(T.nilable(String)) } def name end @@ -298,6 +381,9 @@ module OpenAI def name=(_) end + # A list of tags to be attached to the newly created run. These tags are passed + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". sig { returns(T.nilable(T::Array[String])) } def tags end @@ -306,6 +392,10 @@ module OpenAI def tags=(_) end + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig do params( project: String, @@ -333,6 +423,7 @@ module OpenAI end class Method < OpenAI::BaseModel + # Configuration for the DPO fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo)) } def dpo end @@ -344,6 +435,7 @@ module OpenAI def dpo=(_) end + # Configuration for the supervised fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised)) } def supervised end @@ -355,6 +447,7 @@ module OpenAI def supervised=(_) end + # The type of method. Is either `supervised` or `dpo`. sig { returns(T.nilable(Symbol)) } def type end @@ -363,6 +456,7 @@ module OpenAI def type=(_) end + # The method used for fine-tuning. sig do params( dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, @@ -388,6 +482,7 @@ module OpenAI end class Dpo < OpenAI::BaseModel + # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters)) } def hyperparameters end @@ -399,6 +494,7 @@ module OpenAI def hyperparameters=(_) end + # Configuration for the DPO fine-tuning method. sig do params(hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters) .returns(T.attached_class) @@ -414,6 +510,8 @@ module OpenAI end class Hyperparameters < OpenAI::BaseModel + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } def batch_size end @@ -422,6 +520,8 @@ module OpenAI def batch_size=(_) end + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. sig { returns(T.nilable(T.any(Symbol, Float))) } def beta end @@ -430,6 +530,8 @@ module OpenAI def beta=(_) end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } def learning_rate_multiplier end @@ -438,6 +540,8 @@ module OpenAI def learning_rate_multiplier=(_) end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } def n_epochs end @@ -446,6 +550,7 @@ module OpenAI def n_epochs=(_) end + # The hyperparameters used for the fine-tuning job. sig do params( batch_size: T.any(Symbol, Integer), @@ -472,40 +577,52 @@ module OpenAI def to_hash end + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. class BatchSize < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end end end + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. class Beta < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. class LearningRateMultiplier < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. class NEpochs < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end @@ -515,6 +632,7 @@ module OpenAI end class Supervised < OpenAI::BaseModel + # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters)) } def hyperparameters end @@ -526,6 +644,7 @@ module OpenAI def hyperparameters=(_) end + # Configuration for the supervised fine-tuning method. sig do params(hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters) .returns(T.attached_class) @@ -543,6 +662,8 @@ module OpenAI end class Hyperparameters < OpenAI::BaseModel + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } def batch_size end @@ -551,6 +672,8 @@ module OpenAI def batch_size=(_) end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } def learning_rate_multiplier end @@ -559,6 +682,8 @@ module OpenAI def learning_rate_multiplier=(_) end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } def n_epochs end @@ -567,6 +692,7 @@ module OpenAI def n_epochs=(_) end + # The hyperparameters used for the fine-tuning job. sig do params( batch_size: T.any(Symbol, Integer), @@ -591,30 +717,39 @@ module OpenAI def to_hash end + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. class BatchSize < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end end end + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. class LearningRateMultiplier < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } private def variants end end end + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. class NEpochs < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } private def variants end @@ -623,6 +758,7 @@ module OpenAI end end + # The type of method. Is either `supervised` or `dpo`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index ba90b85e..fc4ede17 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Identifier for the last event from the previous pagination request. sig { returns(T.nilable(String)) } def after end @@ -15,6 +16,7 @@ module OpenAI def after=(_) end + # Number of events to retrieve. sig { returns(T.nilable(Integer)) } def limit end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index e217f2fe..6e667d46 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Identifier for the last job from the previous pagination request. sig { returns(T.nilable(String)) } def after end @@ -15,6 +16,7 @@ module OpenAI def after=(_) end + # Number of fine-tuning jobs to retrieve. sig { returns(T.nilable(Integer)) } def limit end @@ -23,6 +25,8 @@ module OpenAI def limit=(_) end + # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. + # Alternatively, set `metadata=null` to indicate no metadata. sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index a032fba4..27b1407f 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -8,6 +8,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Identifier for the last checkpoint ID from the previous pagination request. sig { returns(T.nilable(String)) } def after end @@ -16,6 +17,7 @@ module OpenAI def after=(_) end + # Number of checkpoints to retrieve. sig { returns(T.nilable(Integer)) } def limit end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index 077add44..54fe9d93 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -5,6 +5,7 @@ module OpenAI module FineTuning module Jobs class FineTuningJobCheckpoint < OpenAI::BaseModel + # The checkpoint identifier, which can be referenced in the API endpoints. sig { returns(String) } def id end @@ -13,6 +14,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the checkpoint was created. sig { returns(Integer) } def created_at end @@ -21,6 +23,7 @@ module OpenAI def created_at=(_) end + # The name of the fine-tuned checkpoint model that is created. sig { returns(String) } def fine_tuned_model_checkpoint end @@ -29,6 +32,7 @@ module OpenAI def fine_tuned_model_checkpoint=(_) end + # The name of the fine-tuning job that this checkpoint was created from. sig { returns(String) } def fine_tuning_job_id end @@ -37,6 +41,7 @@ module OpenAI def fine_tuning_job_id=(_) end + # Metrics at the step number during the fine-tuning job. sig { returns(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics) } def metrics end @@ -48,6 +53,7 @@ module OpenAI def metrics=(_) end + # The object type, which is always "fine_tuning.job.checkpoint". sig { returns(Symbol) } def object end @@ -56,6 +62,7 @@ module OpenAI def object=(_) end + # The step number that the checkpoint was created at. sig { returns(Integer) } def step_number end @@ -64,6 +71,8 @@ module OpenAI def step_number=(_) end + # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a + # fine-tuning job that is ready to use. sig do params( id: String, @@ -161,6 +170,7 @@ module OpenAI def valid_mean_token_accuracy=(_) end + # Metrics at the step number during the fine-tuning job. sig do params( full_valid_loss: Float, diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index f4fe38c3..75a4000f 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class FunctionDefinition < OpenAI::BaseModel + # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + # underscores and dashes, with a maximum length of 64. sig { returns(String) } def name end @@ -11,6 +13,8 @@ module OpenAI def name=(_) end + # A description of what the function does, used by the model to choose when and + # how to call the function. sig { returns(T.nilable(String)) } def description end @@ -19,6 +23,13 @@ module OpenAI def description=(_) end + # The parameters the functions accepts, described as a JSON Schema object. See the + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. + # + # Omitting `parameters` defines a function with an empty parameter list. sig { returns(T.nilable(OpenAI::Models::FunctionParameters)) } def parameters end @@ -27,6 +38,11 @@ module OpenAI def parameters=(_) end + # Whether to enable strict schema adherence when generating the function call. If + # set to true, the model will follow the exact schema defined in the `parameters` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn + # more about Structured Outputs in the + # [function calling guide](docs/guides/function-calling). sig { returns(T.nilable(T::Boolean)) } def strict end diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index 81607a7a..3c0fc4fa 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class Image < OpenAI::BaseModel + # The base64-encoded JSON of the generated image, if `response_format` is + # `b64_json`. sig { returns(T.nilable(String)) } def b64_json end @@ -11,6 +13,8 @@ module OpenAI def b64_json=(_) end + # The prompt that was used to generate the image, if there was any revision to the + # prompt. sig { returns(T.nilable(String)) } def revised_prompt end @@ -19,6 +23,7 @@ module OpenAI def revised_prompt=(_) end + # The URL of the generated image, if `response_format` is `url` (default). sig { returns(T.nilable(String)) } def url end @@ -27,6 +32,7 @@ module OpenAI def url=(_) end + # Represents the url or the content of an image generated by the OpenAI API. sig { params(b64_json: String, revised_prompt: String, url: String).returns(T.attached_class) } def self.new(b64_json: nil, revised_prompt: nil, url: nil) end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 1d40fb57..85b77ba9 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The image to use as the basis for the variation(s). Must be a valid PNG file, + # less than 4MB, and square. sig { returns(T.any(IO, StringIO)) } def image end @@ -14,6 +16,8 @@ module OpenAI def image=(_) end + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -22,6 +26,8 @@ module OpenAI def model=(_) end + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # `n=1` is supported. sig { returns(T.nilable(Integer)) } def n end @@ -30,6 +36,9 @@ module OpenAI def n=(_) end + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. sig { returns(T.nilable(Symbol)) } def response_format end @@ -38,6 +47,8 @@ module OpenAI def response_format=(_) end + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. sig { returns(T.nilable(Symbol)) } def size end @@ -46,6 +57,9 @@ module OpenAI def size=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -86,16 +100,22 @@ module OpenAI def to_hash end + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. class ResponseFormat < OpenAI::Enum abstract! @@ -109,6 +129,8 @@ module OpenAI end end + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. class Size < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index fb3b78a8..e3241afe 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask + # is not provided, image must have transparency, which will be used as the mask. sig { returns(T.any(IO, StringIO)) } def image end @@ -14,6 +16,8 @@ module OpenAI def image=(_) end + # A text description of the desired image(s). The maximum length is 1000 + # characters. sig { returns(String) } def prompt end @@ -22,6 +26,9 @@ module OpenAI def prompt=(_) end + # An additional image whose fully transparent areas (e.g. where alpha is zero) + # indicate where `image` should be edited. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. sig { returns(T.nilable(T.any(IO, StringIO))) } def mask end @@ -30,6 +37,8 @@ module OpenAI def mask=(_) end + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -38,6 +47,7 @@ module OpenAI def model=(_) end + # The number of images to generate. Must be between 1 and 10. sig { returns(T.nilable(Integer)) } def n end @@ -46,6 +56,9 @@ module OpenAI def n=(_) end + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. sig { returns(T.nilable(Symbol)) } def response_format end @@ -54,6 +67,8 @@ module OpenAI def response_format=(_) end + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. sig { returns(T.nilable(Symbol)) } def size end @@ -62,6 +77,9 @@ module OpenAI def size=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -116,16 +134,22 @@ module OpenAI def to_hash end + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. class ResponseFormat < OpenAI::Enum abstract! @@ -139,6 +163,8 @@ module OpenAI end end + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. class Size < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index c5e39887..32f55ca9 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A text description of the desired image(s). The maximum length is 1000 + # characters for `dall-e-2` and 4000 characters for `dall-e-3`. sig { returns(String) } def prompt end @@ -14,6 +16,7 @@ module OpenAI def prompt=(_) end + # The model to use for image generation. sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -22,6 +25,8 @@ module OpenAI def model=(_) end + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # `n=1` is supported. sig { returns(T.nilable(Integer)) } def n end @@ -30,6 +35,9 @@ module OpenAI def n=(_) end + # The quality of the image that will be generated. `hd` creates images with finer + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. sig { returns(T.nilable(Symbol)) } def quality end @@ -38,6 +46,9 @@ module OpenAI def quality=(_) end + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. sig { returns(T.nilable(Symbol)) } def response_format end @@ -46,6 +57,9 @@ module OpenAI def response_format=(_) end + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. sig { returns(T.nilable(Symbol)) } def size end @@ -54,6 +68,10 @@ module OpenAI def size=(_) end + # The style of the generated images. Must be one of `vivid` or `natural`. Vivid + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. sig { returns(T.nilable(Symbol)) } def style end @@ -62,6 +80,9 @@ module OpenAI def style=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -116,16 +137,21 @@ module OpenAI def to_hash end + # The model to use for image generation. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # The quality of the image that will be generated. `hd` creates images with finer + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. class Quality < OpenAI::Enum abstract! @@ -139,6 +165,9 @@ module OpenAI end end + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. class ResponseFormat < OpenAI::Enum abstract! @@ -152,6 +181,9 @@ module OpenAI end end + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. class Size < OpenAI::Enum abstract! @@ -168,6 +200,10 @@ module OpenAI end end + # The style of the generated images. Must be one of `vivid` or `natural`. Vivid + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. class Style < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/model.rbi b/rbi/lib/openai/models/model.rbi index ad1c5f72..07b59908 100644 --- a/rbi/lib/openai/models/model.rbi +++ b/rbi/lib/openai/models/model.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class Model < OpenAI::BaseModel + # The model identifier, which can be referenced in the API endpoints. sig { returns(String) } def id end @@ -11,6 +12,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) when the model was created. sig { returns(Integer) } def created end @@ -19,6 +21,7 @@ module OpenAI def created=(_) end + # The object type, which is always "model". sig { returns(Symbol) } def object end @@ -27,6 +30,7 @@ module OpenAI def object=(_) end + # The organization that owns the model. sig { returns(String) } def owned_by end @@ -35,6 +39,7 @@ module OpenAI def owned_by=(_) end + # Describes an OpenAI model offering that can be used with the API. sig { params(id: String, created: Integer, owned_by: String, object: Symbol).returns(T.attached_class) } def self.new(id:, created:, owned_by:, object: :model) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index aba76998..672182dd 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class Moderation < OpenAI::BaseModel + # A list of the categories, and whether they are flagged or not. sig { returns(OpenAI::Models::Moderation::Categories) } def categories end @@ -11,6 +12,7 @@ module OpenAI def categories=(_) end + # A list of the categories along with the input type(s) that the score applies to. sig { returns(OpenAI::Models::Moderation::CategoryAppliedInputTypes) } def category_applied_input_types end @@ -22,6 +24,7 @@ module OpenAI def category_applied_input_types=(_) end + # A list of the categories along with their scores as predicted by model. sig { returns(OpenAI::Models::Moderation::CategoryScores) } def category_scores end @@ -30,6 +33,7 @@ module OpenAI def category_scores=(_) end + # Whether any of the below categories are flagged. sig { returns(T::Boolean) } def flagged end @@ -65,6 +69,8 @@ module OpenAI end class Categories < OpenAI::BaseModel + # Content that expresses, incites, or promotes harassing language towards any + # target. sig { returns(T::Boolean) } def harassment end @@ -73,6 +79,8 @@ module OpenAI def harassment=(_) end + # Harassment content that also includes violence or serious harm towards any + # target. sig { returns(T::Boolean) } def harassment_threatening end @@ -81,6 +89,10 @@ module OpenAI def harassment_threatening=(_) end + # Content that expresses, incites, or promotes hate based on race, gender, + # ethnicity, religion, nationality, sexual orientation, disability status, or + # caste. Hateful content aimed at non-protected groups (e.g., chess players) is + # harassment. sig { returns(T::Boolean) } def hate end @@ -89,6 +101,9 @@ module OpenAI def hate=(_) end + # Hateful content that also includes violence or serious harm towards the targeted + # group based on race, gender, ethnicity, religion, nationality, sexual + # orientation, disability status, or caste. sig { returns(T::Boolean) } def hate_threatening end @@ -97,6 +112,9 @@ module OpenAI def hate_threatening=(_) end + # Content that includes instructions or advice that facilitate the planning or + # execution of wrongdoing, or that gives advice or instruction on how to commit + # illicit acts. For example, "how to shoplift" would fit this category. sig { returns(T.nilable(T::Boolean)) } def illicit end @@ -105,6 +123,9 @@ module OpenAI def illicit=(_) end + # Content that includes instructions or advice that facilitate the planning or + # execution of wrongdoing that also includes violence, or that gives advice or + # instruction on the procurement of any weapon. sig { returns(T.nilable(T::Boolean)) } def illicit_violent end @@ -113,6 +134,8 @@ module OpenAI def illicit_violent=(_) end + # Content that promotes, encourages, or depicts acts of self-harm, such as + # suicide, cutting, and eating disorders. sig { returns(T::Boolean) } def self_harm end @@ -121,6 +144,9 @@ module OpenAI def self_harm=(_) end + # Content that encourages performing acts of self-harm, such as suicide, cutting, + # and eating disorders, or that gives instructions or advice on how to commit such + # acts. sig { returns(T::Boolean) } def self_harm_instructions end @@ -129,6 +155,8 @@ module OpenAI def self_harm_instructions=(_) end + # Content where the speaker expresses that they are engaging or intend to engage + # in acts of self-harm, such as suicide, cutting, and eating disorders. sig { returns(T::Boolean) } def self_harm_intent end @@ -137,6 +165,9 @@ module OpenAI def self_harm_intent=(_) end + # Content meant to arouse sexual excitement, such as the description of sexual + # activity, or that promotes sexual services (excluding sex education and + # wellness). sig { returns(T::Boolean) } def sexual end @@ -145,6 +176,7 @@ module OpenAI def sexual=(_) end + # Sexual content that includes an individual who is under 18 years old. sig { returns(T::Boolean) } def sexual_minors end @@ -153,6 +185,7 @@ module OpenAI def sexual_minors=(_) end + # Content that depicts death, violence, or physical injury. sig { returns(T::Boolean) } def violence end @@ -161,6 +194,7 @@ module OpenAI def violence=(_) end + # Content that depicts death, violence, or physical injury in graphic detail. sig { returns(T::Boolean) } def violence_graphic end @@ -169,6 +203,7 @@ module OpenAI def violence_graphic=(_) end + # A list of the categories, and whether they are flagged or not. sig do params( harassment: T::Boolean, @@ -229,6 +264,7 @@ module OpenAI end class CategoryAppliedInputTypes < OpenAI::BaseModel + # The applied input type(s) for the category 'harassment'. sig { returns(T::Array[Symbol]) } def harassment end @@ -237,6 +273,7 @@ module OpenAI def harassment=(_) end + # The applied input type(s) for the category 'harassment/threatening'. sig { returns(T::Array[Symbol]) } def harassment_threatening end @@ -245,6 +282,7 @@ module OpenAI def harassment_threatening=(_) end + # The applied input type(s) for the category 'hate'. sig { returns(T::Array[Symbol]) } def hate end @@ -253,6 +291,7 @@ module OpenAI def hate=(_) end + # The applied input type(s) for the category 'hate/threatening'. sig { returns(T::Array[Symbol]) } def hate_threatening end @@ -261,6 +300,7 @@ module OpenAI def hate_threatening=(_) end + # The applied input type(s) for the category 'illicit'. sig { returns(T::Array[Symbol]) } def illicit end @@ -269,6 +309,7 @@ module OpenAI def illicit=(_) end + # The applied input type(s) for the category 'illicit/violent'. sig { returns(T::Array[Symbol]) } def illicit_violent end @@ -277,6 +318,7 @@ module OpenAI def illicit_violent=(_) end + # The applied input type(s) for the category 'self-harm'. sig { returns(T::Array[Symbol]) } def self_harm end @@ -285,6 +327,7 @@ module OpenAI def self_harm=(_) end + # The applied input type(s) for the category 'self-harm/instructions'. sig { returns(T::Array[Symbol]) } def self_harm_instructions end @@ -293,6 +336,7 @@ module OpenAI def self_harm_instructions=(_) end + # The applied input type(s) for the category 'self-harm/intent'. sig { returns(T::Array[Symbol]) } def self_harm_intent end @@ -301,6 +345,7 @@ module OpenAI def self_harm_intent=(_) end + # The applied input type(s) for the category 'sexual'. sig { returns(T::Array[Symbol]) } def sexual end @@ -309,6 +354,7 @@ module OpenAI def sexual=(_) end + # The applied input type(s) for the category 'sexual/minors'. sig { returns(T::Array[Symbol]) } def sexual_minors end @@ -317,6 +363,7 @@ module OpenAI def sexual_minors=(_) end + # The applied input type(s) for the category 'violence'. sig { returns(T::Array[Symbol]) } def violence end @@ -325,6 +372,7 @@ module OpenAI def violence=(_) end + # The applied input type(s) for the category 'violence/graphic'. sig { returns(T::Array[Symbol]) } def violence_graphic end @@ -333,6 +381,7 @@ module OpenAI def violence_graphic=(_) end + # A list of the categories along with the input type(s) that the score applies to. sig do params( harassment: T::Array[Symbol], @@ -555,6 +604,7 @@ module OpenAI end class CategoryScores < OpenAI::BaseModel + # The score for the category 'harassment'. sig { returns(Float) } def harassment end @@ -563,6 +613,7 @@ module OpenAI def harassment=(_) end + # The score for the category 'harassment/threatening'. sig { returns(Float) } def harassment_threatening end @@ -571,6 +622,7 @@ module OpenAI def harassment_threatening=(_) end + # The score for the category 'hate'. sig { returns(Float) } def hate end @@ -579,6 +631,7 @@ module OpenAI def hate=(_) end + # The score for the category 'hate/threatening'. sig { returns(Float) } def hate_threatening end @@ -587,6 +640,7 @@ module OpenAI def hate_threatening=(_) end + # The score for the category 'illicit'. sig { returns(Float) } def illicit end @@ -595,6 +649,7 @@ module OpenAI def illicit=(_) end + # The score for the category 'illicit/violent'. sig { returns(Float) } def illicit_violent end @@ -603,6 +658,7 @@ module OpenAI def illicit_violent=(_) end + # The score for the category 'self-harm'. sig { returns(Float) } def self_harm end @@ -611,6 +667,7 @@ module OpenAI def self_harm=(_) end + # The score for the category 'self-harm/instructions'. sig { returns(Float) } def self_harm_instructions end @@ -619,6 +676,7 @@ module OpenAI def self_harm_instructions=(_) end + # The score for the category 'self-harm/intent'. sig { returns(Float) } def self_harm_intent end @@ -627,6 +685,7 @@ module OpenAI def self_harm_intent=(_) end + # The score for the category 'sexual'. sig { returns(Float) } def sexual end @@ -635,6 +694,7 @@ module OpenAI def sexual=(_) end + # The score for the category 'sexual/minors'. sig { returns(Float) } def sexual_minors end @@ -643,6 +703,7 @@ module OpenAI def sexual_minors=(_) end + # The score for the category 'violence'. sig { returns(Float) } def violence end @@ -651,6 +712,7 @@ module OpenAI def violence=(_) end + # The score for the category 'violence/graphic'. sig { returns(Float) } def violence_graphic end @@ -659,6 +721,7 @@ module OpenAI def violence_graphic=(_) end + # A list of the categories along with their scores as predicted by model. sig do params( harassment: Float, diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 998863d3..bac4a3a8 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Input (or inputs) to classify. Can be a single string, an array of strings, or + # an array of multi-modal input objects similar to other models. sig do returns( T.any( @@ -37,6 +39,10 @@ module OpenAI def input=(_) end + # The content moderation model you would like to use. Learn more in + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). sig { returns(T.nilable(T.any(String, Symbol))) } def model end @@ -77,6 +83,8 @@ module OpenAI def to_hash end + # Input (or inputs) to classify. Can be a single string, an array of strings, or + # an array of multi-modal input objects similar to other models. class Input < OpenAI::Union abstract! @@ -85,6 +93,7 @@ module OpenAI ModerationMultiModalInputArray = T.type_alias { T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] } class << self + # @api private sig do override .returns( @@ -96,10 +105,15 @@ module OpenAI end end + # The content moderation model you would like to use. Learn more in + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index 46b29878..bf831d82 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ModerationCreateResponse < OpenAI::BaseModel + # The unique identifier for the moderation request. sig { returns(String) } def id end @@ -11,6 +12,7 @@ module OpenAI def id=(_) end + # The model used to generate the moderation results. sig { returns(String) } def model end @@ -19,6 +21,7 @@ module OpenAI def model=(_) end + # A list of moderation objects. sig { returns(T::Array[OpenAI::Models::Moderation]) } def results end @@ -27,6 +30,7 @@ module OpenAI def results=(_) end + # Represents if a given text input is potentially harmful. sig { params(id: String, model: String, results: T::Array[OpenAI::Models::Moderation]).returns(T.attached_class) } def self.new(id:, model:, results:) end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index a7ecaefe..222a1447 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ModerationImageURLInput < OpenAI::BaseModel + # Contains either an image URL or a data URL for a base64 encoded image. sig { returns(OpenAI::Models::ModerationImageURLInput::ImageURL) } def image_url end @@ -14,6 +15,7 @@ module OpenAI def image_url=(_) end + # Always `image_url`. sig { returns(Symbol) } def type end @@ -22,6 +24,7 @@ module OpenAI def type=(_) end + # An object describing an image to classify. sig do params(image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, type: Symbol) .returns(T.attached_class) @@ -34,6 +37,7 @@ module OpenAI end class ImageURL < OpenAI::BaseModel + # Either a URL of the image or the base64 encoded image data. sig { returns(String) } def url end @@ -42,6 +46,7 @@ module OpenAI def url=(_) end + # Contains either an image URL or a data URL for a base64 encoded image. sig { params(url: String).returns(T.attached_class) } def self.new(url:) end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 2d658e57..1c24bbd3 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -2,10 +2,12 @@ module OpenAI module Models + # An object describing an image to classify. class ModerationMultiModalInput < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/moderation_text_input.rbi b/rbi/lib/openai/models/moderation_text_input.rbi index 85c34f7f..41888533 100644 --- a/rbi/lib/openai/models/moderation_text_input.rbi +++ b/rbi/lib/openai/models/moderation_text_input.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ModerationTextInput < OpenAI::BaseModel + # A string of text to classify. sig { returns(String) } def text end @@ -11,6 +12,7 @@ module OpenAI def text=(_) end + # Always `text`. sig { returns(Symbol) } def type end @@ -19,6 +21,7 @@ module OpenAI def type=(_) end + # An object describing text to classify. sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :text) end diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi index c4d89d20..db3ddb71 100644 --- a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class OtherFileChunkingStrategyObject < OpenAI::BaseModel + # Always `other`. sig { returns(Symbol) } def type end @@ -11,6 +12,9 @@ module OpenAI def type=(_) end + # This is returned when the chunking strategy is unknown. Typically, this is + # because the file was indexed before the `chunking_strategy` concept was + # introduced in the API. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :other) end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index de57d2db..3c5fb130 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -3,6 +3,12 @@ module OpenAI module Models class Reasoning < OpenAI::BaseModel + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(Symbol)) } def effort end @@ -11,6 +17,11 @@ module OpenAI def effort=(_) end + # **o-series models only** + # + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `concise` or + # `detailed`. sig { returns(T.nilable(Symbol)) } def generate_summary end @@ -19,6 +30,10 @@ module OpenAI def generate_summary=(_) end + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { params(effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)).returns(T.attached_class) } def self.new(effort:, generate_summary: nil) end @@ -27,6 +42,11 @@ module OpenAI def to_hash end + # **o-series models only** + # + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `concise` or + # `detailed`. class GenerateSummary < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index b4182a8d..8f9c3bc8 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -2,6 +2,12 @@ module OpenAI module Models + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. class ReasoningEffort < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/lib/openai/models/response_format_json_object.rbi index ffd5658c..044c6ff6 100644 --- a/rbi/lib/openai/models/response_format_json_object.rbi +++ b/rbi/lib/openai/models/response_format_json_object.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ResponseFormatJSONObject < OpenAI::BaseModel + # The type of response format being defined. Always `json_object`. sig { returns(Symbol) } def type end @@ -11,6 +12,9 @@ module OpenAI def type=(_) end + # JSON object response format. An older method of generating JSON responses. Using + # `json_schema` is recommended for models that support it. Note that the model + # will not generate JSON without a system or user message instructing it to do so. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :json_object) end diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index b622a63b..de32d2a7 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ResponseFormatJSONSchema < OpenAI::BaseModel + # Structured Outputs configuration options, including a JSON Schema. sig { returns(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema) } def json_schema end @@ -14,6 +15,7 @@ module OpenAI def json_schema=(_) end + # The type of response format being defined. Always `json_schema`. sig { returns(Symbol) } def type end @@ -22,6 +24,9 @@ module OpenAI def type=(_) end + # JSON Schema response format. Used to generate structured JSON responses. Learn + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params(json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, type: Symbol) .returns(T.attached_class) @@ -34,6 +39,8 @@ module OpenAI end class JSONSchema < OpenAI::BaseModel + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + # and dashes, with a maximum length of 64. sig { returns(String) } def name end @@ -42,6 +49,8 @@ module OpenAI def name=(_) end + # A description of what the response format is for, used by the model to determine + # how to respond in the format. sig { returns(T.nilable(String)) } def description end @@ -50,6 +59,8 @@ module OpenAI def description=(_) end + # The schema for the response format, described as a JSON Schema object. Learn how + # to build JSON schemas [here](https://json-schema.org/). sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } def schema end @@ -58,6 +69,11 @@ module OpenAI def schema=(_) end + # Whether to enable strict schema adherence when generating the output. If set to + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). sig { returns(T.nilable(T::Boolean)) } def strict end @@ -66,6 +82,7 @@ module OpenAI def strict=(_) end + # Structured Outputs configuration options, including a JSON Schema. sig do params( name: String, diff --git a/rbi/lib/openai/models/response_format_text.rbi b/rbi/lib/openai/models/response_format_text.rbi index 6f3c8970..2894efdf 100644 --- a/rbi/lib/openai/models/response_format_text.rbi +++ b/rbi/lib/openai/models/response_format_text.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class ResponseFormatText < OpenAI::BaseModel + # The type of response format being defined. Always `text`. sig { returns(Symbol) } def type end @@ -11,6 +12,7 @@ module OpenAI def type=(_) end + # Default response format. Used to generate text responses. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :text) end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index b6ba2c12..d37038e7 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ComputerTool < OpenAI::BaseModel + # The height of the computer display. sig { returns(Float) } def display_height end @@ -12,6 +13,7 @@ module OpenAI def display_height=(_) end + # The width of the computer display. sig { returns(Float) } def display_width end @@ -20,6 +22,7 @@ module OpenAI def display_width=(_) end + # The type of computer environment to control. sig { returns(Symbol) } def environment end @@ -28,6 +31,7 @@ module OpenAI def environment=(_) end + # The type of the computer use tool. Always `computer_use_preview`. sig { returns(Symbol) } def type end @@ -36,6 +40,8 @@ module OpenAI def type=(_) end + # A tool that controls a virtual computer. Learn more about the + # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). sig do params(display_height: Float, display_width: Float, environment: Symbol, type: Symbol) .returns(T.attached_class) @@ -49,6 +55,7 @@ module OpenAI def to_hash end + # The type of computer environment to control. class Environment < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 428515ec..5d8b1737 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class EasyInputMessage < OpenAI::BaseModel + # Text, image, or audio input to the model, used to generate a response. Can also + # contain previous assistant responses. sig { returns(T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)) } def content end @@ -15,6 +17,8 @@ module OpenAI def content=(_) end + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. sig { returns(Symbol) } def role end @@ -23,6 +27,7 @@ module OpenAI def role=(_) end + # The type of the message input. Always `message`. sig { returns(T.nilable(Symbol)) } def type end @@ -31,6 +36,11 @@ module OpenAI def type=(_) end + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. sig do params( content: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList), @@ -51,10 +61,13 @@ module OpenAI def to_hash end + # Text, image, or audio input to the model, used to generate a response. Can also + # contain previous assistant responses. class Content < OpenAI::Union abstract! class << self + # @api private sig do override .returns([[NilClass, String], [NilClass, OpenAI::Models::Responses::ResponseInputMessageContentList]]) @@ -64,6 +77,8 @@ module OpenAI end end + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. class Role < OpenAI::Enum abstract! @@ -79,6 +94,7 @@ module OpenAI end end + # The type of the message input. Always `message`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 204c6f9d..6854bd65 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class FileSearchTool < OpenAI::BaseModel + # The type of the file search tool. Always `file_search`. sig { returns(Symbol) } def type end @@ -12,6 +13,7 @@ module OpenAI def type=(_) end + # The IDs of the vector stores to search. sig { returns(T::Array[String]) } def vector_store_ids end @@ -20,6 +22,7 @@ module OpenAI def vector_store_ids=(_) end + # A filter to apply based on file attributes. sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } def filters end @@ -31,6 +34,8 @@ module OpenAI def filters=(_) end + # The maximum number of results to return. This number should be between 1 and 50 + # inclusive. sig { returns(T.nilable(Integer)) } def max_num_results end @@ -39,6 +44,7 @@ module OpenAI def max_num_results=(_) end + # Ranking options for search. sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions)) } def ranking_options end @@ -50,6 +56,9 @@ module OpenAI def ranking_options=(_) end + # A tool that searches for relevant content from uploaded files. Learn more about + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). sig do params( vector_store_ids: T::Array[String], @@ -78,10 +87,12 @@ module OpenAI def to_hash end + # A filter to apply based on file attributes. class Filters < OpenAI::Union abstract! class << self + # @api private sig do override .returns([[NilClass, OpenAI::Models::ComparisonFilter], [NilClass, OpenAI::Models::CompoundFilter]]) @@ -92,6 +103,7 @@ module OpenAI end class RankingOptions < OpenAI::BaseModel + # The ranker to use for the file search. sig { returns(T.nilable(Symbol)) } def ranker end @@ -100,6 +112,9 @@ module OpenAI def ranker=(_) end + # The score threshold for the file search, a number between 0 and 1. Numbers + # closer to 1 will attempt to return only the most relevant results, but may + # return fewer results. sig { returns(T.nilable(Float)) } def score_threshold end @@ -108,6 +123,7 @@ module OpenAI def score_threshold=(_) end + # Ranking options for search. sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } def self.new(ranker: nil, score_threshold: nil) end @@ -116,6 +132,7 @@ module OpenAI def to_hash end + # The ranker to use for the file search. class Ranker < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/lib/openai/models/responses/function_tool.rbi index b1e8d293..8513be94 100644 --- a/rbi/lib/openai/models/responses/function_tool.rbi +++ b/rbi/lib/openai/models/responses/function_tool.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class FunctionTool < OpenAI::BaseModel + # The name of the function to call. sig { returns(String) } def name end @@ -12,6 +13,7 @@ module OpenAI def name=(_) end + # A JSON schema object describing the parameters of the function. sig { returns(T::Hash[Symbol, T.anything]) } def parameters end @@ -20,6 +22,7 @@ module OpenAI def parameters=(_) end + # Whether to enforce strict parameter validation. Default `true`. sig { returns(T::Boolean) } def strict end @@ -28,6 +31,7 @@ module OpenAI def strict=(_) end + # The type of the function tool. Always `function`. sig { returns(Symbol) } def type end @@ -36,6 +40,8 @@ module OpenAI def type=(_) end + # A description of the function. Used by the model to determine whether or not to + # call the function. sig { returns(T.nilable(String)) } def description end @@ -44,6 +50,9 @@ module OpenAI def description=(_) end + # Defines a function in your own code the model can choose to call. Learn more + # about + # [function calling](https://platform.openai.com/docs/guides/function-calling). sig do params( name: String, diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 8f16ac93..77dd539f 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # An item ID to list items after, used in pagination. sig { returns(T.nilable(String)) } def after end @@ -15,6 +16,7 @@ module OpenAI def after=(_) end + # An item ID to list items before, used in pagination. sig { returns(T.nilable(String)) } def before end @@ -23,6 +25,8 @@ module OpenAI def before=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -31,6 +35,10 @@ module OpenAI def limit=(_) end + # The order to return the input items in. Default is `asc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -67,6 +75,10 @@ module OpenAI def to_hash end + # The order to return the input items in. Default is `asc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index c4764143..c5e9ed8f 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class Response < OpenAI::BaseModel + # Unique identifier for this Response. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # Unix timestamp (in seconds) of when this Response was created. sig { returns(Float) } def created_at end @@ -20,6 +22,7 @@ module OpenAI def created_at=(_) end + # An error object returned when the model fails to generate a Response. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseError)) } def error end @@ -31,6 +34,7 @@ module OpenAI def error=(_) end + # Details about why the response is incomplete. sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails)) } def incomplete_details end @@ -42,6 +46,12 @@ module OpenAI def incomplete_details=(_) end + # Inserts a system (or developer) message as the first item in the model's + # context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will be not be carried over to the next response. This makes it simple + # to swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } def instructions end @@ -50,6 +60,12 @@ module OpenAI def instructions=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -58,6 +74,11 @@ module OpenAI def metadata=(_) end + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. sig { returns(T.any(String, Symbol)) } def model end @@ -66,6 +87,7 @@ module OpenAI def model=(_) end + # The object type of this resource - always set to `response`. sig { returns(Symbol) } def object end @@ -74,6 +96,13 @@ module OpenAI def object=(_) end + # An array of content items generated by the model. + # + # - The length and order of items in the `output` array is dependent on the + # model's response. + # - Rather than accessing the first item in the `output` array and assuming it's + # an `assistant` message with the content generated by the model, you might + # consider using the `output_text` property where supported in SDKs. sig do returns( T::Array[ @@ -120,6 +149,7 @@ module OpenAI def output=(_) end + # Whether to allow the model to run tool calls in parallel. sig { returns(T::Boolean) } def parallel_tool_calls end @@ -128,6 +158,10 @@ module OpenAI def parallel_tool_calls=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. sig { returns(T.nilable(Float)) } def temperature end @@ -136,6 +170,9 @@ module OpenAI def temperature=(_) end + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. sig do returns( T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) @@ -155,6 +192,20 @@ module OpenAI def tool_choice=(_) end + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). sig do returns( T::Array[ @@ -195,6 +246,11 @@ module OpenAI def tools=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -203,6 +259,9 @@ module OpenAI def top_p=(_) end + # An upper bound for the number of tokens that can be generated for a response, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } def max_output_tokens end @@ -211,6 +270,9 @@ module OpenAI def max_output_tokens=(_) end + # The unique ID of the previous response to the model. Use this to create + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). sig { returns(T.nilable(String)) } def previous_response_id end @@ -219,6 +281,10 @@ module OpenAI def previous_response_id=(_) end + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(OpenAI::Models::Reasoning)) } def reasoning end @@ -227,6 +293,8 @@ module OpenAI def reasoning=(_) end + # The status of the response generation. One of `completed`, `failed`, + # `in_progress`, or `incomplete`. sig { returns(T.nilable(Symbol)) } def status end @@ -235,6 +303,11 @@ module OpenAI def status=(_) end + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } def text end @@ -246,6 +319,13 @@ module OpenAI def text=(_) end + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. sig { returns(T.nilable(Symbol)) } def truncation end @@ -254,6 +334,8 @@ module OpenAI def truncation=(_) end + # Represents token usage details including input tokens, output tokens, a + # breakdown of output tokens, and the total tokens used. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseUsage)) } def usage end @@ -262,6 +344,9 @@ module OpenAI def usage=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -388,6 +473,7 @@ module OpenAI end class IncompleteDetails < OpenAI::BaseModel + # The reason why the response is incomplete. sig { returns(T.nilable(Symbol)) } def reason end @@ -396,6 +482,7 @@ module OpenAI def reason=(_) end + # Details about why the response is incomplete. sig { params(reason: Symbol).returns(T.attached_class) } def self.new(reason: nil) end @@ -404,6 +491,7 @@ module OpenAI def to_hash end + # The reason why the response is incomplete. class Reason < OpenAI::Enum abstract! @@ -418,20 +506,30 @@ module OpenAI end end + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. class ToolChoice < OpenAI::Union abstract! class << self + # @api private sig do override .returns( @@ -443,6 +541,13 @@ module OpenAI end end + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. class Truncation < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi index cecdd81e..54ec9c86 100644 --- a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseAudioDeltaEvent < OpenAI::BaseModel + # A chunk of Base64 encoded response audio bytes. sig { returns(String) } def delta end @@ -12,6 +13,7 @@ module OpenAI def delta=(_) end + # The type of the event. Always `response.audio.delta`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # Emitted when there is a partial audio response. sig { params(delta: String, type: Symbol).returns(T.attached_class) } def self.new(delta:, type: :"response.audio.delta") end diff --git a/rbi/lib/openai/models/responses/response_audio_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_done_event.rbi index c67012a1..d60d8ffe 100644 --- a/rbi/lib/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseAudioDoneEvent < OpenAI::BaseModel + # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } def type end @@ -12,6 +13,7 @@ module OpenAI def type=(_) end + # Emitted when the audio response is complete. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :"response.audio.done") end diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi index fe5f4c18..072b6541 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDeltaEvent < OpenAI::BaseModel + # The partial transcript of the audio response. sig { returns(String) } def delta end @@ -12,6 +13,7 @@ module OpenAI def delta=(_) end + # The type of the event. Always `response.audio.transcript.delta`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # Emitted when there is a partial transcript of audio. sig { params(delta: String, type: Symbol).returns(T.attached_class) } def self.new(delta:, type: :"response.audio.transcript.delta") end diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi index 97204636..940f3497 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel + # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } def type end @@ -12,6 +13,7 @@ module OpenAI def type=(_) end + # Emitted when the full audio transcript is completed. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :"response.audio.transcript.done") end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 15a3e9c4..21d55044 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::BaseModel + # The partial code snippet added by the code interpreter. sig { returns(String) } def delta end @@ -12,6 +13,7 @@ module OpenAI def delta=(_) end + # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.code_interpreter_call.code.delta`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a partial code snippet is added by the code interpreter. sig { params(delta: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 25f31749..294664ab 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::BaseModel + # The final code snippet output by the code interpreter. sig { returns(String) } def code end @@ -12,6 +13,7 @@ module OpenAI def code=(_) end + # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.code_interpreter_call.code.done`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when code snippet output is finalized by the code interpreter. sig { params(code: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(code:, output_index:, type: :"response.code_interpreter_call.code.done") end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 93ae27fe..389d9f49 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCompletedEvent < OpenAI::BaseModel + # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } def code_interpreter_call end @@ -15,6 +16,7 @@ module OpenAI def code_interpreter_call=(_) end + # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } def output_index end @@ -23,6 +25,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.code_interpreter_call.completed`. sig { returns(Symbol) } def type end @@ -31,6 +34,7 @@ module OpenAI def type=(_) end + # Emitted when the code interpreter call is completed. sig do params( code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 815750a0..9d0d0524 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInProgressEvent < OpenAI::BaseModel + # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } def code_interpreter_call end @@ -15,6 +16,7 @@ module OpenAI def code_interpreter_call=(_) end + # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } def output_index end @@ -23,6 +25,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.code_interpreter_call.in_progress`. sig { returns(Symbol) } def type end @@ -31,6 +34,7 @@ module OpenAI def type=(_) end + # Emitted when a code interpreter call is in progress. sig do params( code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index fa22f0e6..4757018f 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::BaseModel + # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } def code_interpreter_call end @@ -15,6 +16,7 @@ module OpenAI def code_interpreter_call=(_) end + # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } def output_index end @@ -23,6 +25,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.code_interpreter_call.interpreting`. sig { returns(Symbol) } def type end @@ -31,6 +34,7 @@ module OpenAI def type=(_) end + # Emitted when the code interpreter is actively interpreting the code snippet. sig do params( code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index d1a61cfa..c4e3d1ae 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterToolCall < OpenAI::BaseModel + # The unique ID of the code interpreter tool call. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # The code to run. sig { returns(String) } def code end @@ -20,6 +22,7 @@ module OpenAI def code=(_) end + # The results of the code interpreter tool call. sig do returns( T::Array[ @@ -54,6 +57,7 @@ module OpenAI def results=(_) end + # The status of the code interpreter tool call. sig { returns(Symbol) } def status end @@ -62,6 +66,7 @@ module OpenAI def status=(_) end + # The type of the code interpreter tool call. Always `code_interpreter_call`. sig { returns(Symbol) } def type end @@ -70,6 +75,7 @@ module OpenAI def type=(_) end + # A tool call to run code. sig do params( id: String, @@ -108,10 +114,12 @@ module OpenAI def to_hash end + # The output of a code interpreter tool call that is text. class Result < OpenAI::Union abstract! class Logs < OpenAI::BaseModel + # The logs of the code interpreter tool call. sig { returns(String) } def logs end @@ -120,6 +128,7 @@ module OpenAI def logs=(_) end + # The type of the code interpreter text output. Always `logs`. sig { returns(Symbol) } def type end @@ -128,6 +137,7 @@ module OpenAI def type=(_) end + # The output of a code interpreter tool call that is text. sig { params(logs: String, type: Symbol).returns(T.attached_class) } def self.new(logs:, type: :logs) end @@ -149,6 +159,7 @@ module OpenAI def files=(_) end + # The type of the code interpreter file output. Always `files`. sig { returns(Symbol) } def type end @@ -157,6 +168,7 @@ module OpenAI def type=(_) end + # The output of a code interpreter tool call that is a file. sig do params( files: T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], @@ -180,6 +192,7 @@ module OpenAI end class File < OpenAI::BaseModel + # The ID of the file. sig { returns(String) } def file_id end @@ -188,6 +201,7 @@ module OpenAI def file_id=(_) end + # The MIME type of the file. sig { returns(String) } def mime_type end @@ -207,6 +221,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -218,6 +233,7 @@ module OpenAI end end + # The status of the code interpreter tool call. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 6ae602db..7db04649 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCompletedEvent < OpenAI::BaseModel + # Properties of the completed response. sig { returns(OpenAI::Models::Responses::Response) } def response end @@ -12,6 +13,7 @@ module OpenAI def response=(_) end + # The type of the event. Always `response.completed`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # Emitted when the model response is complete. sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } def self.new(response:, type: :"response.completed") end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 9360dc6c..cc58669a 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseComputerToolCall < OpenAI::BaseModel + # The unique ID of the computer call. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # A click action. sig do returns( T.any( @@ -61,6 +63,7 @@ module OpenAI def action=(_) end + # An identifier used when responding to the tool call with output. sig { returns(String) } def call_id end @@ -69,6 +72,7 @@ module OpenAI def call_id=(_) end + # The pending safety checks for the computer call. sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]) } def pending_safety_checks end @@ -80,6 +84,8 @@ module OpenAI def pending_safety_checks=(_) end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(Symbol) } def status end @@ -88,6 +94,7 @@ module OpenAI def status=(_) end + # The type of the computer call. Always `computer_call`. sig { returns(Symbol) } def type end @@ -96,6 +103,9 @@ module OpenAI def type=(_) end + # A tool call to a computer use tool. See the + # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) + # for more information. sig do params( id: String, @@ -146,10 +156,13 @@ module OpenAI def to_hash end + # A click action. class Action < OpenAI::Union abstract! class Click < OpenAI::BaseModel + # Indicates which mouse button was pressed during the click. One of `left`, + # `right`, `wheel`, `back`, or `forward`. sig { returns(Symbol) } def button end @@ -158,6 +171,8 @@ module OpenAI def button=(_) end + # Specifies the event type. For a click action, this property is always set to + # `click`. sig { returns(Symbol) } def type end @@ -166,6 +181,7 @@ module OpenAI def type=(_) end + # The x-coordinate where the click occurred. sig { returns(Integer) } def x end @@ -174,6 +190,7 @@ module OpenAI def x=(_) end + # The y-coordinate where the click occurred. sig { returns(Integer) } def y_ end @@ -182,6 +199,7 @@ module OpenAI def y_=(_) end + # A click action. sig { params(button: Symbol, x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } def self.new(button:, x:, y_:, type: :click) end @@ -190,6 +208,8 @@ module OpenAI def to_hash end + # Indicates which mouse button was pressed during the click. One of `left`, + # `right`, `wheel`, `back`, or `forward`. class Button < OpenAI::Enum abstract! @@ -208,6 +228,8 @@ module OpenAI end class DoubleClick < OpenAI::BaseModel + # Specifies the event type. For a double click action, this property is always set + # to `double_click`. sig { returns(Symbol) } def type end @@ -216,6 +238,7 @@ module OpenAI def type=(_) end + # The x-coordinate where the double click occurred. sig { returns(Integer) } def x end @@ -224,6 +247,7 @@ module OpenAI def x=(_) end + # The y-coordinate where the double click occurred. sig { returns(Integer) } def y_ end @@ -232,6 +256,7 @@ module OpenAI def y_=(_) end + # A double click action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } def self.new(x:, y_:, type: :double_click) end @@ -242,6 +267,15 @@ module OpenAI end class Drag < OpenAI::BaseModel + # An array of coordinates representing the path of the drag action. Coordinates + # will appear as an array of objects, eg + # + # ``` + # [ + # { x: 100, y: 200 }, + # { x: 200, y: 300 } + # ] + # ``` sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path]) } def path end @@ -253,6 +287,8 @@ module OpenAI def path=(_) end + # Specifies the event type. For a drag action, this property is always set to + # `drag`. sig { returns(Symbol) } def type end @@ -261,6 +297,7 @@ module OpenAI def type=(_) end + # A drag action. sig do params( path: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], @@ -281,6 +318,7 @@ module OpenAI end class Path < OpenAI::BaseModel + # The x-coordinate. sig { returns(Integer) } def x end @@ -289,6 +327,7 @@ module OpenAI def x=(_) end + # The y-coordinate. sig { returns(Integer) } def y_ end @@ -297,6 +336,7 @@ module OpenAI def y_=(_) end + # A series of x/y coordinate pairs in the drag path. sig { params(x: Integer, y_: Integer).returns(T.attached_class) } def self.new(x:, y_:) end @@ -308,6 +348,8 @@ module OpenAI end class Keypress < OpenAI::BaseModel + # The combination of keys the model is requesting to be pressed. This is an array + # of strings, each representing a key. sig { returns(T::Array[String]) } def keys end @@ -316,6 +358,8 @@ module OpenAI def keys=(_) end + # Specifies the event type. For a keypress action, this property is always set to + # `keypress`. sig { returns(Symbol) } def type end @@ -324,6 +368,7 @@ module OpenAI def type=(_) end + # A collection of keypresses the model would like to perform. sig { params(keys: T::Array[String], type: Symbol).returns(T.attached_class) } def self.new(keys:, type: :keypress) end @@ -334,6 +379,8 @@ module OpenAI end class Move < OpenAI::BaseModel + # Specifies the event type. For a move action, this property is always set to + # `move`. sig { returns(Symbol) } def type end @@ -342,6 +389,7 @@ module OpenAI def type=(_) end + # The x-coordinate to move to. sig { returns(Integer) } def x end @@ -350,6 +398,7 @@ module OpenAI def x=(_) end + # The y-coordinate to move to. sig { returns(Integer) } def y_ end @@ -358,6 +407,7 @@ module OpenAI def y_=(_) end + # A mouse move action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } def self.new(x:, y_:, type: :move) end @@ -368,6 +418,8 @@ module OpenAI end class Screenshot < OpenAI::BaseModel + # Specifies the event type. For a screenshot action, this property is always set + # to `screenshot`. sig { returns(Symbol) } def type end @@ -376,6 +428,7 @@ module OpenAI def type=(_) end + # A screenshot action. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :screenshot) end @@ -386,6 +439,7 @@ module OpenAI end class Scroll < OpenAI::BaseModel + # The horizontal scroll distance. sig { returns(Integer) } def scroll_x end @@ -394,6 +448,7 @@ module OpenAI def scroll_x=(_) end + # The vertical scroll distance. sig { returns(Integer) } def scroll_y end @@ -402,6 +457,8 @@ module OpenAI def scroll_y=(_) end + # Specifies the event type. For a scroll action, this property is always set to + # `scroll`. sig { returns(Symbol) } def type end @@ -410,6 +467,7 @@ module OpenAI def type=(_) end + # The x-coordinate where the scroll occurred. sig { returns(Integer) } def x end @@ -418,6 +476,7 @@ module OpenAI def x=(_) end + # The y-coordinate where the scroll occurred. sig { returns(Integer) } def y_ end @@ -426,6 +485,7 @@ module OpenAI def y_=(_) end + # A scroll action. sig do params(scroll_x: Integer, scroll_y: Integer, x: Integer, y_: Integer, type: Symbol) .returns(T.attached_class) @@ -441,6 +501,7 @@ module OpenAI end class Type < OpenAI::BaseModel + # The text to type. sig { returns(String) } def text end @@ -449,6 +510,8 @@ module OpenAI def text=(_) end + # Specifies the event type. For a type action, this property is always set to + # `type`. sig { returns(Symbol) } def type end @@ -457,6 +520,7 @@ module OpenAI def type=(_) end + # An action to type in text. sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :type) end @@ -467,6 +531,8 @@ module OpenAI end class Wait < OpenAI::BaseModel + # Specifies the event type. For a wait action, this property is always set to + # `wait`. sig { returns(Symbol) } def type end @@ -475,6 +541,7 @@ module OpenAI def type=(_) end + # A wait action. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :wait) end @@ -485,6 +552,7 @@ module OpenAI end class << self + # @api private sig do override .returns( @@ -497,6 +565,7 @@ module OpenAI end class PendingSafetyCheck < OpenAI::BaseModel + # The ID of the pending safety check. sig { returns(String) } def id end @@ -505,6 +574,7 @@ module OpenAI def id=(_) end + # The type of the pending safety check. sig { returns(String) } def code end @@ -513,6 +583,7 @@ module OpenAI def code=(_) end + # Details about the pending safety check. sig { returns(String) } def message end @@ -521,6 +592,7 @@ module OpenAI def message=(_) end + # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } def self.new(id:, code:, message:) end @@ -530,6 +602,8 @@ module OpenAI end end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! @@ -544,6 +618,7 @@ module OpenAI end end + # The type of the computer call. Always `computer_call`. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index c2e6aed1..3580164a 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -3,10 +3,12 @@ module OpenAI module Models module Responses + # Multi-modal input and output contents. class ResponseContent < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 6280062a..9a037fc4 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseContentPartAddedEvent < OpenAI::BaseModel + # The index of the content part that was added. sig { returns(Integer) } def content_index end @@ -12,6 +13,7 @@ module OpenAI def content_index=(_) end + # The ID of the output item that the content part was added to. sig { returns(String) } def item_id end @@ -20,6 +22,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the content part was added to. sig { returns(Integer) } def output_index end @@ -28,6 +31,7 @@ module OpenAI def output_index=(_) end + # The content part that was added. sig do returns( T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) @@ -47,6 +51,7 @@ module OpenAI def part=(_) end + # The type of the event. Always `response.content_part.added`. sig { returns(Symbol) } def type end @@ -55,6 +60,7 @@ module OpenAI def type=(_) end + # Emitted when a new content part is added. sig do params( content_index: Integer, @@ -83,10 +89,12 @@ module OpenAI def to_hash end + # The content part that was added. class Part < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 01ea5776..c7102f5b 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseContentPartDoneEvent < OpenAI::BaseModel + # The index of the content part that is done. sig { returns(Integer) } def content_index end @@ -12,6 +13,7 @@ module OpenAI def content_index=(_) end + # The ID of the output item that the content part was added to. sig { returns(String) } def item_id end @@ -20,6 +22,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the content part was added to. sig { returns(Integer) } def output_index end @@ -28,6 +31,7 @@ module OpenAI def output_index=(_) end + # The content part that is done. sig do returns( T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) @@ -47,6 +51,7 @@ module OpenAI def part=(_) end + # The type of the event. Always `response.content_part.done`. sig { returns(Symbol) } def type end @@ -55,6 +60,7 @@ module OpenAI def type=(_) end + # Emitted when a content part is done. sig do params( content_index: Integer, @@ -83,10 +89,12 @@ module OpenAI def to_hash end + # The content part that is done. class Part < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index fe1d6f07..e7a3ffca 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -7,6 +7,15 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) sig { returns(T.any(String, OpenAI::Models::Responses::ResponseInput)) } def input end @@ -18,6 +27,11 @@ module OpenAI def input=(_) end + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. sig { returns(T.any(String, Symbol)) } def model end @@ -26,6 +40,14 @@ module OpenAI def model=(_) end + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. sig { returns(T.nilable(T::Array[Symbol])) } def include end @@ -34,6 +56,12 @@ module OpenAI def include=(_) end + # Inserts a system (or developer) message as the first item in the model's + # context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will be not be carried over to the next response. This makes it simple + # to swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } def instructions end @@ -42,6 +70,9 @@ module OpenAI def instructions=(_) end + # An upper bound for the number of tokens that can be generated for a response, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } def max_output_tokens end @@ -50,6 +81,12 @@ module OpenAI def max_output_tokens=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -58,6 +95,7 @@ module OpenAI def metadata=(_) end + # Whether to allow the model to run tool calls in parallel. sig { returns(T.nilable(T::Boolean)) } def parallel_tool_calls end @@ -66,6 +104,9 @@ module OpenAI def parallel_tool_calls=(_) end + # The unique ID of the previous response to the model. Use this to create + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). sig { returns(T.nilable(String)) } def previous_response_id end @@ -74,6 +115,10 @@ module OpenAI def previous_response_id=(_) end + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(OpenAI::Models::Reasoning)) } def reasoning end @@ -82,6 +127,7 @@ module OpenAI def reasoning=(_) end + # Whether to store the generated model response for later retrieval via API. sig { returns(T.nilable(T::Boolean)) } def store end @@ -90,6 +136,10 @@ module OpenAI def store=(_) end + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. sig { returns(T.nilable(Float)) } def temperature end @@ -98,6 +148,11 @@ module OpenAI def temperature=(_) end + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } def text end @@ -109,6 +164,9 @@ module OpenAI def text=(_) end + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. sig do returns( T.nilable( @@ -130,6 +188,20 @@ module OpenAI def tool_choice=(_) end + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). sig do returns( T.nilable( @@ -172,6 +244,11 @@ module OpenAI def tools=(_) end + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } def top_p end @@ -180,6 +257,13 @@ module OpenAI def top_p=(_) end + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. sig { returns(T.nilable(Symbol)) } def truncation end @@ -188,6 +272,9 @@ module OpenAI def truncation=(_) end + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } def user end @@ -283,30 +370,50 @@ module OpenAI def to_hash end + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) class Input < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, OpenAI::Models::Responses::ResponseInput]]) } private def variants end end end + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. class Model < OpenAI::Union abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } private def variants end end end + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. class ToolChoice < OpenAI::Union abstract! class << self + # @api private sig do override .returns( @@ -318,6 +425,13 @@ module OpenAI end end + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. class Truncation < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index 2a500348..c68b3697 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseCreatedEvent < OpenAI::BaseModel + # The response that was created. sig { returns(OpenAI::Models::Responses::Response) } def response end @@ -12,6 +13,7 @@ module OpenAI def response=(_) end + # The type of the event. Always `response.created`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # An event that is emitted when a response is created. sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } def self.new(response:, type: :"response.created") end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 367eea43..f6a6c36d 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseError < OpenAI::BaseModel + # The error code for the response. sig { returns(Symbol) } def code end @@ -12,6 +13,7 @@ module OpenAI def code=(_) end + # A human-readable description of the error. sig { returns(String) } def message end @@ -20,6 +22,7 @@ module OpenAI def message=(_) end + # An error object returned when the model fails to generate a Response. sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end @@ -28,6 +31,7 @@ module OpenAI def to_hash end + # The error code for the response. class Code < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_error_event.rbi b/rbi/lib/openai/models/responses/response_error_event.rbi index 03c5b3b3..f4c0e9f0 100644 --- a/rbi/lib/openai/models/responses/response_error_event.rbi +++ b/rbi/lib/openai/models/responses/response_error_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseErrorEvent < OpenAI::BaseModel + # The error code. sig { returns(T.nilable(String)) } def code end @@ -12,6 +13,7 @@ module OpenAI def code=(_) end + # The error message. sig { returns(String) } def message end @@ -20,6 +22,7 @@ module OpenAI def message=(_) end + # The error parameter. sig { returns(T.nilable(String)) } def param end @@ -28,6 +31,7 @@ module OpenAI def param=(_) end + # The type of the event. Always `error`. sig { returns(Symbol) } def type end @@ -36,6 +40,7 @@ module OpenAI def type=(_) end + # Emitted when an error occurs. sig do params(code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index 5c3f69cb..c6d9fd32 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFailedEvent < OpenAI::BaseModel + # The response that failed. sig { returns(OpenAI::Models::Responses::Response) } def response end @@ -12,6 +13,7 @@ module OpenAI def response=(_) end + # The type of the event. Always `response.failed`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # An event that is emitted when a response fails. sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } def self.new(response:, type: :"response.failed") end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi index 77ecf89c..ffb5cae7 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFileSearchCallCompletedEvent < OpenAI::BaseModel + # The ID of the output item that the file search call is initiated. sig { returns(String) } def item_id end @@ -12,6 +13,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the file search call is initiated. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.file_search_call.completed`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a file search call is completed (results found). sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(item_id:, output_index:, type: :"response.file_search_call.completed") end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi index f4d4c09f..2feeebed 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFileSearchCallInProgressEvent < OpenAI::BaseModel + # The ID of the output item that the file search call is initiated. sig { returns(String) } def item_id end @@ -12,6 +13,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the file search call is initiated. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.file_search_call.in_progress`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a file search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(item_id:, output_index:, type: :"response.file_search_call.in_progress") end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi index 3ea7ffd2..b340e2ff 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFileSearchCallSearchingEvent < OpenAI::BaseModel + # The ID of the output item that the file search call is initiated. sig { returns(String) } def item_id end @@ -12,6 +13,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the file search call is searching. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.file_search_call.searching`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a file search is currently searching. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(item_id:, output_index:, type: :"response.file_search_call.searching") end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index c0c8564d..e3a52573 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFileSearchToolCall < OpenAI::BaseModel + # The unique ID of the file search tool call. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # The queries used to search for files. sig { returns(T::Array[String]) } def queries end @@ -20,6 +22,8 @@ module OpenAI def queries=(_) end + # The status of the file search tool call. One of `in_progress`, `searching`, + # `incomplete` or `failed`, sig { returns(Symbol) } def status end @@ -28,6 +32,7 @@ module OpenAI def status=(_) end + # The type of the file search tool call. Always `file_search_call`. sig { returns(Symbol) } def type end @@ -36,6 +41,7 @@ module OpenAI def type=(_) end + # The results of the file search tool call. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result])) } def results end @@ -47,6 +53,9 @@ module OpenAI def results=(_) end + # The results of a file search tool call. See the + # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) + # for more information. sig do params( id: String, @@ -75,6 +84,8 @@ module OpenAI def to_hash end + # The status of the file search tool call. One of `in_progress`, `searching`, + # `incomplete` or `failed`, class Status < OpenAI::Enum abstract! @@ -92,6 +103,11 @@ module OpenAI end class Result < OpenAI::BaseModel + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } def attributes end @@ -103,6 +119,7 @@ module OpenAI def attributes=(_) end + # The unique ID of the file. sig { returns(T.nilable(String)) } def file_id end @@ -111,6 +128,7 @@ module OpenAI def file_id=(_) end + # The name of the file. sig { returns(T.nilable(String)) } def filename end @@ -119,6 +137,7 @@ module OpenAI def filename=(_) end + # The relevance score of the file - a value between 0 and 1. sig { returns(T.nilable(Float)) } def score end @@ -127,6 +146,7 @@ module OpenAI def score=(_) end + # The text that was retrieved from the file. sig { returns(T.nilable(String)) } def text end @@ -167,6 +187,7 @@ module OpenAI abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 170c0610..68fa12af 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -3,10 +3,24 @@ module OpenAI module Models module Responses + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. class ResponseFormatTextConfig < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index 4899b49f..aab69ea5 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + # The schema for the response format, described as a JSON Schema object. Learn how + # to build JSON schemas [here](https://json-schema.org/). sig { returns(T::Hash[Symbol, T.anything]) } def schema end @@ -12,6 +14,7 @@ module OpenAI def schema=(_) end + # The type of response format being defined. Always `json_schema`. sig { returns(Symbol) } def type end @@ -20,6 +23,8 @@ module OpenAI def type=(_) end + # A description of what the response format is for, used by the model to determine + # how to respond in the format. sig { returns(T.nilable(String)) } def description end @@ -28,6 +33,8 @@ module OpenAI def description=(_) end + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + # and dashes, with a maximum length of 64. sig { returns(T.nilable(String)) } def name end @@ -36,6 +43,11 @@ module OpenAI def name=(_) end + # Whether to enable strict schema adherence when generating the output. If set to + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). sig { returns(T.nilable(T::Boolean)) } def strict end @@ -44,6 +56,9 @@ module OpenAI def strict=(_) end + # JSON Schema response format. Used to generate structured JSON responses. Learn + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( schema: T::Hash[Symbol, T.anything], diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi index c543936a..bd790e94 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::BaseModel + # The function-call arguments delta that is added. sig { returns(String) } def delta end @@ -12,6 +13,7 @@ module OpenAI def delta=(_) end + # The ID of the output item that the function-call arguments delta is added to. sig { returns(String) } def item_id end @@ -20,6 +22,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the function-call arguments delta is added to. sig { returns(Integer) } def output_index end @@ -28,6 +31,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.function_call_arguments.delta`. sig { returns(Symbol) } def type end @@ -36,6 +40,7 @@ module OpenAI def type=(_) end + # Emitted when there is a partial function-call arguments delta. sig do params( delta: String, diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi index 17234bf9..48684e3e 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFunctionCallArgumentsDoneEvent < OpenAI::BaseModel + # The function-call arguments. sig { returns(String) } def arguments end @@ -12,6 +13,7 @@ module OpenAI def arguments=(_) end + # The ID of the item. sig { returns(String) } def item_id end @@ -20,6 +22,7 @@ module OpenAI def item_id=(_) end + # The index of the output item. sig { returns(Integer) } def output_index end @@ -36,6 +39,7 @@ module OpenAI def type=(_) end + # Emitted when function-call arguments are finalized. sig do params( arguments: String, diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 97c8db84..49e2b6d9 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFunctionToolCall < OpenAI::BaseModel + # The unique ID of the function tool call. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # A JSON string of the arguments to pass to the function. sig { returns(String) } def arguments end @@ -20,6 +22,7 @@ module OpenAI def arguments=(_) end + # The unique ID of the function tool call generated by the model. sig { returns(String) } def call_id end @@ -28,6 +31,7 @@ module OpenAI def call_id=(_) end + # The name of the function to run. sig { returns(String) } def name end @@ -36,6 +40,7 @@ module OpenAI def name=(_) end + # The type of the function tool call. Always `function_call`. sig { returns(Symbol) } def type end @@ -44,6 +49,8 @@ module OpenAI def type=(_) end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -52,6 +59,9 @@ module OpenAI def status=(_) end + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. sig do params(id: String, arguments: String, call_id: String, name: String, status: Symbol, type: Symbol) .returns(T.attached_class) @@ -73,6 +83,8 @@ module OpenAI def to_hash end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index fc8ec7a6..daf897ed 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseFunctionWebSearch < OpenAI::BaseModel + # The unique ID of the web search tool call. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # The status of the web search tool call. sig { returns(Symbol) } def status end @@ -20,6 +22,7 @@ module OpenAI def status=(_) end + # The type of the web search tool call. Always `web_search_call`. sig { returns(Symbol) } def type end @@ -28,6 +31,9 @@ module OpenAI def type=(_) end + # The results of a web search tool call. See the + # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for + # more information. sig { params(id: String, status: Symbol, type: Symbol).returns(T.attached_class) } def self.new(id:, status:, type: :web_search_call) end @@ -36,6 +42,7 @@ module OpenAI def to_hash end + # The status of the web search tool call. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index ad20d756..57a31950 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseInProgressEvent < OpenAI::BaseModel + # The response that is in progress. sig { returns(OpenAI::Models::Responses::Response) } def response end @@ -12,6 +13,7 @@ module OpenAI def response=(_) end + # The type of the event. Always `response.in_progress`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # Emitted when the response is in progress. sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } def self.new(response:, type: :"response.in_progress") end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 6eab634e..f36ae216 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -3,6 +3,14 @@ module OpenAI module Models module Responses + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. class ResponseIncludable < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index ec47033d..9ca0c85e 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseIncompleteEvent < OpenAI::BaseModel + # The response that was incomplete. sig { returns(OpenAI::Models::Responses::Response) } def response end @@ -12,6 +13,7 @@ module OpenAI def response=(_) end + # The type of the event. Always `response.incomplete`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # An event that is emitted when a response finishes as incomplete. sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } def self.new(response:, type: :"response.incomplete") end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index d7f1eee2..4a8628f6 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseInputAudio < OpenAI::BaseModel + # Base64-encoded audio data. sig { returns(String) } def data end @@ -12,6 +13,7 @@ module OpenAI def data=(_) end + # The format of the audio data. Currently supported formats are `mp3` and `wav`. sig { returns(Symbol) } def format_ end @@ -20,6 +22,7 @@ module OpenAI def format_=(_) end + # The type of the input item. Always `input_audio`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # An audio input to the model. sig { params(data: String, format_: Symbol, type: Symbol).returns(T.attached_class) } def self.new(data:, format_:, type: :input_audio) end @@ -36,6 +40,7 @@ module OpenAI def to_hash end + # The format of the audio data. Currently supported formats are `mp3` and `wav`. class Format < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index 5857ddd3..04d1918f 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -3,10 +3,12 @@ module OpenAI module Models module Responses + # A text input to the model. class ResponseInputContent < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_input_file.rbi b/rbi/lib/openai/models/responses/response_input_file.rbi index e6245523..cedf90ec 100644 --- a/rbi/lib/openai/models/responses/response_input_file.rbi +++ b/rbi/lib/openai/models/responses/response_input_file.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseInputFile < OpenAI::BaseModel + # The type of the input item. Always `input_file`. sig { returns(Symbol) } def type end @@ -12,6 +13,7 @@ module OpenAI def type=(_) end + # The content of the file to be sent to the model. sig { returns(T.nilable(String)) } def file_data end @@ -20,6 +22,7 @@ module OpenAI def file_data=(_) end + # The ID of the file to be sent to the model. sig { returns(T.nilable(String)) } def file_id end @@ -28,6 +31,7 @@ module OpenAI def file_id=(_) end + # The name of the file to be sent to the model. sig { returns(T.nilable(String)) } def filename end @@ -36,6 +40,7 @@ module OpenAI def filename=(_) end + # A file input to the model. sig do params(file_data: String, file_id: String, filename: String, type: Symbol).returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 62bd604a..2e1819a4 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseInputImage < OpenAI::BaseModel + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. sig { returns(Symbol) } def detail end @@ -12,6 +14,7 @@ module OpenAI def detail=(_) end + # The type of the input item. Always `input_image`. sig { returns(Symbol) } def type end @@ -20,6 +23,7 @@ module OpenAI def type=(_) end + # The ID of the file to be sent to the model. sig { returns(T.nilable(String)) } def file_id end @@ -28,6 +32,8 @@ module OpenAI def file_id=(_) end + # The URL of the image to be sent to the model. A fully qualified URL or base64 + # encoded image in a data URL. sig { returns(T.nilable(String)) } def image_url end @@ -36,6 +42,8 @@ module OpenAI def image_url=(_) end + # An image input to the model. Learn about + # [image inputs](https://platform.openai.com/docs/guides/vision). sig do params(detail: Symbol, file_id: T.nilable(String), image_url: T.nilable(String), type: Symbol) .returns(T.attached_class) @@ -56,6 +64,8 @@ module OpenAI def to_hash end + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. class Detail < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index a2ac733a..faf692fb 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -3,10 +3,17 @@ module OpenAI module Models module Responses + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. class ResponseInputItem < OpenAI::Union abstract! class Message < OpenAI::BaseModel + # A list of one or many input items to the model, containing different content + # types. sig { returns(OpenAI::Models::Responses::ResponseInputMessageContentList) } def content end @@ -18,6 +25,7 @@ module OpenAI def content=(_) end + # The role of the message input. One of `user`, `system`, or `developer`. sig { returns(Symbol) } def role end @@ -26,6 +34,8 @@ module OpenAI def role=(_) end + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -34,6 +44,7 @@ module OpenAI def status=(_) end + # The type of the message input. Always set to `message`. sig { returns(T.nilable(Symbol)) } def type end @@ -42,6 +53,9 @@ module OpenAI def type=(_) end + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. sig do params( content: OpenAI::Models::Responses::ResponseInputMessageContentList, @@ -63,6 +77,7 @@ module OpenAI def to_hash end + # The role of the message input. One of `user`, `system`, or `developer`. class Role < OpenAI::Enum abstract! @@ -77,6 +92,8 @@ module OpenAI end end + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! @@ -91,6 +108,7 @@ module OpenAI end end + # The type of the message input. Always set to `message`. class Type < OpenAI::Enum abstract! @@ -105,6 +123,7 @@ module OpenAI end class ComputerCallOutput < OpenAI::BaseModel + # The ID of the computer tool call that produced the output. sig { returns(String) } def call_id end @@ -113,6 +132,7 @@ module OpenAI def call_id=(_) end + # A computer screenshot image used with the computer use tool. sig { returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output) } def output end @@ -124,6 +144,7 @@ module OpenAI def output=(_) end + # The type of the computer tool call output. Always `computer_call_output`. sig { returns(Symbol) } def type end @@ -132,6 +153,7 @@ module OpenAI def type=(_) end + # The ID of the computer tool call output. sig { returns(T.nilable(String)) } def id end @@ -140,6 +162,8 @@ module OpenAI def id=(_) end + # The safety checks reported by the API that have been acknowledged by the + # developer. sig do returns( T.nilable( @@ -161,6 +185,8 @@ module OpenAI def acknowledged_safety_checks=(_) end + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -169,6 +195,7 @@ module OpenAI def status=(_) end + # The output of a computer tool call. sig do params( call_id: String, @@ -200,6 +227,8 @@ module OpenAI end class Output < OpenAI::BaseModel + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. sig { returns(Symbol) } def type end @@ -208,6 +237,7 @@ module OpenAI def type=(_) end + # The identifier of an uploaded file that contains the screenshot. sig { returns(T.nilable(String)) } def file_id end @@ -216,6 +246,7 @@ module OpenAI def file_id=(_) end + # The URL of the screenshot image. sig { returns(T.nilable(String)) } def image_url end @@ -224,6 +255,7 @@ module OpenAI def image_url=(_) end + # A computer screenshot image used with the computer use tool. sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } def self.new(file_id: nil, image_url: nil, type: :computer_screenshot) end @@ -234,6 +266,7 @@ module OpenAI end class AcknowledgedSafetyCheck < OpenAI::BaseModel + # The ID of the pending safety check. sig { returns(String) } def id end @@ -242,6 +275,7 @@ module OpenAI def id=(_) end + # The type of the pending safety check. sig { returns(String) } def code end @@ -250,6 +284,7 @@ module OpenAI def code=(_) end + # Details about the pending safety check. sig { returns(String) } def message end @@ -258,6 +293,7 @@ module OpenAI def message=(_) end + # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } def self.new(id:, code:, message:) end @@ -267,6 +303,8 @@ module OpenAI end end + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. class Status < OpenAI::Enum abstract! @@ -283,6 +321,7 @@ module OpenAI end class FunctionCallOutput < OpenAI::BaseModel + # The unique ID of the function tool call generated by the model. sig { returns(String) } def call_id end @@ -291,6 +330,7 @@ module OpenAI def call_id=(_) end + # A JSON string of the output of the function tool call. sig { returns(String) } def output end @@ -299,6 +339,7 @@ module OpenAI def output=(_) end + # The type of the function tool call output. Always `function_call_output`. sig { returns(Symbol) } def type end @@ -307,6 +348,8 @@ module OpenAI def type=(_) end + # The unique ID of the function tool call output. Populated when this item is + # returned via API. sig { returns(T.nilable(String)) } def id end @@ -315,6 +358,8 @@ module OpenAI def id=(_) end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -323,6 +368,7 @@ module OpenAI def status=(_) end + # The output of a function tool call. sig do params( call_id: String, @@ -341,6 +387,8 @@ module OpenAI def to_hash end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! @@ -357,6 +405,7 @@ module OpenAI end class ItemReference < OpenAI::BaseModel + # The ID of the item to reference. sig { returns(String) } def id end @@ -365,6 +414,7 @@ module OpenAI def id=(_) end + # The type of item to reference. Always `item_reference`. sig { returns(Symbol) } def type end @@ -373,6 +423,7 @@ module OpenAI def type=(_) end + # An internal identifier for an item to reference. sig { params(id: String, type: Symbol).returns(T.attached_class) } def self.new(id:, type: :item_reference) end @@ -383,6 +434,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_input_text.rbi b/rbi/lib/openai/models/responses/response_input_text.rbi index df900197..16fc4040 100644 --- a/rbi/lib/openai/models/responses/response_input_text.rbi +++ b/rbi/lib/openai/models/responses/response_input_text.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseInputText < OpenAI::BaseModel + # The text input to the model. sig { returns(String) } def text end @@ -12,6 +13,7 @@ module OpenAI def text=(_) end + # The type of the input item. Always `input_text`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # A text input to the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :input_text) end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index c24e41f8..6b49186e 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -6,6 +6,7 @@ module OpenAI module Responses class ResponseItemList < OpenAI::BaseModel + # A list of items used to generate this response. sig do returns( T::Array[ @@ -58,6 +59,7 @@ module OpenAI def data=(_) end + # The ID of the first item in the list. sig { returns(String) } def first_id end @@ -66,6 +68,7 @@ module OpenAI def first_id=(_) end + # Whether there are more items available. sig { returns(T::Boolean) } def has_more end @@ -74,6 +77,7 @@ module OpenAI def has_more=(_) end + # The ID of the last item in the list. sig { returns(String) } def last_id end @@ -82,6 +86,7 @@ module OpenAI def last_id=(_) end + # The type of object returned, must be `list`. sig { returns(Symbol) } def object end @@ -90,6 +95,7 @@ module OpenAI def object=(_) end + # A list of Response items. sig do params( data: T::Array[ @@ -140,10 +146,12 @@ module OpenAI def to_hash end + # Content item used to generate a response. class Data < OpenAI::Union abstract! class Message < OpenAI::BaseModel + # The unique ID of the message input. sig { returns(String) } def id end @@ -152,6 +160,8 @@ module OpenAI def id=(_) end + # A list of one or many input items to the model, containing different content + # types. sig { returns(OpenAI::Models::Responses::ResponseInputMessageContentList) } def content end @@ -163,6 +173,7 @@ module OpenAI def content=(_) end + # The role of the message input. One of `user`, `system`, or `developer`. sig { returns(Symbol) } def role end @@ -171,6 +182,8 @@ module OpenAI def role=(_) end + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -179,6 +192,7 @@ module OpenAI def status=(_) end + # The type of the message input. Always set to `message`. sig { returns(T.nilable(Symbol)) } def type end @@ -215,6 +229,7 @@ module OpenAI def to_hash end + # The role of the message input. One of `user`, `system`, or `developer`. class Role < OpenAI::Enum abstract! @@ -229,6 +244,8 @@ module OpenAI end end + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! @@ -243,6 +260,7 @@ module OpenAI end end + # The type of the message input. Always set to `message`. class Type < OpenAI::Enum abstract! @@ -257,6 +275,7 @@ module OpenAI end class ComputerCallOutput < OpenAI::BaseModel + # The unique ID of the computer call tool output. sig { returns(String) } def id end @@ -265,6 +284,7 @@ module OpenAI def id=(_) end + # The ID of the computer tool call that produced the output. sig { returns(String) } def call_id end @@ -273,6 +293,7 @@ module OpenAI def call_id=(_) end + # A computer screenshot image used with the computer use tool. sig { returns(OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output) } def output end @@ -284,6 +305,7 @@ module OpenAI def output=(_) end + # The type of the computer tool call output. Always `computer_call_output`. sig { returns(Symbol) } def type end @@ -292,6 +314,8 @@ module OpenAI def type=(_) end + # The safety checks reported by the API that have been acknowledged by the + # developer. sig do returns( T.nilable( @@ -313,6 +337,8 @@ module OpenAI def acknowledged_safety_checks=(_) end + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -352,6 +378,8 @@ module OpenAI end class Output < OpenAI::BaseModel + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. sig { returns(Symbol) } def type end @@ -360,6 +388,7 @@ module OpenAI def type=(_) end + # The identifier of an uploaded file that contains the screenshot. sig { returns(T.nilable(String)) } def file_id end @@ -368,6 +397,7 @@ module OpenAI def file_id=(_) end + # The URL of the screenshot image. sig { returns(T.nilable(String)) } def image_url end @@ -376,6 +406,7 @@ module OpenAI def image_url=(_) end + # A computer screenshot image used with the computer use tool. sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } def self.new(file_id: nil, image_url: nil, type: :computer_screenshot) end @@ -386,6 +417,7 @@ module OpenAI end class AcknowledgedSafetyCheck < OpenAI::BaseModel + # The ID of the pending safety check. sig { returns(String) } def id end @@ -394,6 +426,7 @@ module OpenAI def id=(_) end + # The type of the pending safety check. sig { returns(String) } def code end @@ -402,6 +435,7 @@ module OpenAI def code=(_) end + # Details about the pending safety check. sig { returns(String) } def message end @@ -410,6 +444,7 @@ module OpenAI def message=(_) end + # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } def self.new(id:, code:, message:) end @@ -419,6 +454,8 @@ module OpenAI end end + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. class Status < OpenAI::Enum abstract! @@ -435,6 +472,7 @@ module OpenAI end class FunctionCallOutput < OpenAI::BaseModel + # The unique ID of the function call tool output. sig { returns(String) } def id end @@ -443,6 +481,7 @@ module OpenAI def id=(_) end + # The unique ID of the function tool call generated by the model. sig { returns(String) } def call_id end @@ -451,6 +490,7 @@ module OpenAI def call_id=(_) end + # A JSON string of the output of the function tool call. sig { returns(String) } def output end @@ -459,6 +499,7 @@ module OpenAI def output=(_) end + # The type of the function tool call output. Always `function_call_output`. sig { returns(Symbol) } def type end @@ -467,6 +508,8 @@ module OpenAI def type=(_) end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -493,6 +536,8 @@ module OpenAI def to_hash end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! @@ -509,6 +554,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_output_audio.rbi b/rbi/lib/openai/models/responses/response_output_audio.rbi index 1aed1ddd..162e5138 100644 --- a/rbi/lib/openai/models/responses/response_output_audio.rbi +++ b/rbi/lib/openai/models/responses/response_output_audio.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseOutputAudio < OpenAI::BaseModel + # Base64-encoded audio data from the model. sig { returns(String) } def data end @@ -12,6 +13,7 @@ module OpenAI def data=(_) end + # The transcript of the audio data from the model. sig { returns(String) } def transcript end @@ -20,6 +22,7 @@ module OpenAI def transcript=(_) end + # The type of the output audio. Always `output_audio`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # An audio output from the model. sig { params(data: String, transcript: String, type: Symbol).returns(T.attached_class) } def self.new(data:, transcript:, type: :output_audio) end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index 8a2bf039..f508403c 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -3,10 +3,12 @@ module OpenAI module Models module Responses + # An output message from the model. class ResponseOutputItem < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 837047df..0e49a206 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseOutputItemAddedEvent < OpenAI::BaseModel + # The output item that was added. sig do returns( T.any( @@ -44,6 +45,7 @@ module OpenAI def item=(_) end + # The index of the output item that was added. sig { returns(Integer) } def output_index end @@ -52,6 +54,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.output_item.added`. sig { returns(Symbol) } def type end @@ -60,6 +63,7 @@ module OpenAI def type=(_) end + # Emitted when a new output item is added. sig do params( item: T.any( diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index 75961890..e53adef5 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseOutputItemDoneEvent < OpenAI::BaseModel + # The output item that was marked done. sig do returns( T.any( @@ -44,6 +45,7 @@ module OpenAI def item=(_) end + # The index of the output item that was marked done. sig { returns(Integer) } def output_index end @@ -52,6 +54,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.output_item.done`. sig { returns(Symbol) } def type end @@ -60,6 +63,7 @@ module OpenAI def type=(_) end + # Emitted when an output item is marked done. sig do params( item: T.any( diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index d2cbf773..80dfb0e2 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseOutputMessage < OpenAI::BaseModel + # The unique ID of the output message. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # The content of the output message. sig do returns( T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] @@ -31,6 +33,7 @@ module OpenAI def content=(_) end + # The role of the output message. Always `assistant`. sig { returns(Symbol) } def role end @@ -39,6 +42,8 @@ module OpenAI def role=(_) end + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. sig { returns(Symbol) } def status end @@ -47,6 +52,7 @@ module OpenAI def status=(_) end + # The type of the output message. Always `message`. sig { returns(Symbol) } def type end @@ -55,6 +61,7 @@ module OpenAI def type=(_) end + # An output message from the model. sig do params( id: String, @@ -83,10 +90,12 @@ module OpenAI def to_hash end + # A text output from the model. class Content < OpenAI::Union abstract! class << self + # @api private sig do override .returns( @@ -98,6 +107,8 @@ module OpenAI end end + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_output_refusal.rbi b/rbi/lib/openai/models/responses/response_output_refusal.rbi index f573eb83..1db3c101 100644 --- a/rbi/lib/openai/models/responses/response_output_refusal.rbi +++ b/rbi/lib/openai/models/responses/response_output_refusal.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseOutputRefusal < OpenAI::BaseModel + # The refusal explanationfrom the model. sig { returns(String) } def refusal end @@ -12,6 +13,7 @@ module OpenAI def refusal=(_) end + # The type of the refusal. Always `refusal`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # A refusal from the model. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } def self.new(refusal:, type: :refusal) end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 7f9e510e..e9ce0233 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseOutputText < OpenAI::BaseModel + # The annotations of the text output. sig do returns( T::Array[ @@ -41,6 +42,7 @@ module OpenAI def annotations=(_) end + # The text output from the model. sig { returns(String) } def text end @@ -49,6 +51,7 @@ module OpenAI def text=(_) end + # The type of the output text. Always `output_text`. sig { returns(Symbol) } def type end @@ -57,6 +60,7 @@ module OpenAI def type=(_) end + # A text output from the model. sig do params( annotations: T::Array[ @@ -93,10 +97,12 @@ module OpenAI def to_hash end + # A citation to a file. class Annotation < OpenAI::Union abstract! class FileCitation < OpenAI::BaseModel + # The ID of the file. sig { returns(String) } def file_id end @@ -105,6 +111,7 @@ module OpenAI def file_id=(_) end + # The index of the file in the list of files. sig { returns(Integer) } def index end @@ -113,6 +120,7 @@ module OpenAI def index=(_) end + # The type of the file citation. Always `file_citation`. sig { returns(Symbol) } def type end @@ -121,6 +129,7 @@ module OpenAI def type=(_) end + # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } def self.new(file_id:, index:, type: :file_citation) end @@ -131,6 +140,7 @@ module OpenAI end class URLCitation < OpenAI::BaseModel + # The index of the last character of the URL citation in the message. sig { returns(Integer) } def end_index end @@ -139,6 +149,7 @@ module OpenAI def end_index=(_) end + # The index of the first character of the URL citation in the message. sig { returns(Integer) } def start_index end @@ -147,6 +158,7 @@ module OpenAI def start_index=(_) end + # The title of the web resource. sig { returns(String) } def title end @@ -155,6 +167,7 @@ module OpenAI def title=(_) end + # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } def type end @@ -163,6 +176,7 @@ module OpenAI def type=(_) end + # The URL of the web resource. sig { returns(String) } def url end @@ -171,6 +185,7 @@ module OpenAI def url=(_) end + # A citation for a web resource used to generate a model response. sig do params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) .returns(T.attached_class) @@ -194,6 +209,7 @@ module OpenAI end class FilePath < OpenAI::BaseModel + # The ID of the file. sig { returns(String) } def file_id end @@ -202,6 +218,7 @@ module OpenAI def file_id=(_) end + # The index of the file in the list of files. sig { returns(Integer) } def index end @@ -210,6 +227,7 @@ module OpenAI def index=(_) end + # The type of the file path. Always `file_path`. sig { returns(Symbol) } def type end @@ -218,6 +236,7 @@ module OpenAI def type=(_) end + # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } def self.new(file_id:, index:, type: :file_path) end @@ -228,6 +247,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 4733fee4..9207ae25 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseReasoningItem < OpenAI::BaseModel + # The unique identifier of the reasoning content. sig { returns(String) } def id end @@ -12,6 +13,7 @@ module OpenAI def id=(_) end + # Reasoning text contents. sig { returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary]) } def summary end @@ -23,6 +25,7 @@ module OpenAI def summary=(_) end + # The type of the object. Always `reasoning`. sig { returns(Symbol) } def type end @@ -31,6 +34,8 @@ module OpenAI def type=(_) end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } def status end @@ -39,6 +44,8 @@ module OpenAI def status=(_) end + # A description of the chain of thought used by a reasoning model while generating + # a response. sig do params( id: String, @@ -66,6 +73,7 @@ module OpenAI end class Summary < OpenAI::BaseModel + # A short summary of the reasoning used by the model when generating the response. sig { returns(String) } def text end @@ -74,6 +82,7 @@ module OpenAI def text=(_) end + # The type of the object. Always `summary_text`. sig { returns(Symbol) } def type end @@ -91,6 +100,8 @@ module OpenAI end end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi index 7517aba1..f4a53f02 100644 --- a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseRefusalDeltaEvent < OpenAI::BaseModel + # The index of the content part that the refusal text is added to. sig { returns(Integer) } def content_index end @@ -12,6 +13,7 @@ module OpenAI def content_index=(_) end + # The refusal text that is added. sig { returns(String) } def delta end @@ -20,6 +22,7 @@ module OpenAI def delta=(_) end + # The ID of the output item that the refusal text is added to. sig { returns(String) } def item_id end @@ -28,6 +31,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the refusal text is added to. sig { returns(Integer) } def output_index end @@ -36,6 +40,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.refusal.delta`. sig { returns(Symbol) } def type end @@ -44,6 +49,7 @@ module OpenAI def type=(_) end + # Emitted when there is a partial refusal text. sig do params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi index 560a27ef..1e4bf80b 100644 --- a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseRefusalDoneEvent < OpenAI::BaseModel + # The index of the content part that the refusal text is finalized. sig { returns(Integer) } def content_index end @@ -12,6 +13,7 @@ module OpenAI def content_index=(_) end + # The ID of the output item that the refusal text is finalized. sig { returns(String) } def item_id end @@ -20,6 +22,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the refusal text is finalized. sig { returns(Integer) } def output_index end @@ -28,6 +31,7 @@ module OpenAI def output_index=(_) end + # The refusal text that is finalized. sig { returns(String) } def refusal end @@ -36,6 +40,7 @@ module OpenAI def refusal=(_) end + # The type of the event. Always `response.refusal.done`. sig { returns(Symbol) } def type end @@ -44,6 +49,7 @@ module OpenAI def type=(_) end + # Emitted when refusal text is finalized. sig do params( content_index: Integer, diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index 229f8fbb..8e817f0d 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. sig { returns(T.nilable(T::Array[Symbol])) } def include end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index 11ae0a2a..9fcb3ab3 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -3,6 +3,8 @@ module OpenAI module Models module Responses + # The status of the response generation. One of `completed`, `failed`, + # `in_progress`, or `incomplete`. class ResponseStatus < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index a0f3b82c..6e6608c2 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -3,10 +3,12 @@ module OpenAI module Models module Responses + # Emitted when there is a partial audio response. class ResponseStreamEvent < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 2d60b64e..bafadc88 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel + # A citation to a file. sig do returns( T.any( @@ -35,6 +36,7 @@ module OpenAI def annotation=(_) end + # The index of the annotation that was added. sig { returns(Integer) } def annotation_index end @@ -43,6 +45,7 @@ module OpenAI def annotation_index=(_) end + # The index of the content part that the text annotation was added to. sig { returns(Integer) } def content_index end @@ -51,6 +54,7 @@ module OpenAI def content_index=(_) end + # The ID of the output item that the text annotation was added to. sig { returns(String) } def item_id end @@ -59,6 +63,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the text annotation was added to. sig { returns(Integer) } def output_index end @@ -67,6 +72,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.output_text.annotation.added`. sig { returns(Symbol) } def type end @@ -75,6 +81,7 @@ module OpenAI def type=(_) end + # Emitted when a text annotation is added. sig do params( annotation: T.any( @@ -120,10 +127,12 @@ module OpenAI def to_hash end + # A citation to a file. class Annotation < OpenAI::Union abstract! class FileCitation < OpenAI::BaseModel + # The ID of the file. sig { returns(String) } def file_id end @@ -132,6 +141,7 @@ module OpenAI def file_id=(_) end + # The index of the file in the list of files. sig { returns(Integer) } def index end @@ -140,6 +150,7 @@ module OpenAI def index=(_) end + # The type of the file citation. Always `file_citation`. sig { returns(Symbol) } def type end @@ -148,6 +159,7 @@ module OpenAI def type=(_) end + # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } def self.new(file_id:, index:, type: :file_citation) end @@ -158,6 +170,7 @@ module OpenAI end class URLCitation < OpenAI::BaseModel + # The index of the last character of the URL citation in the message. sig { returns(Integer) } def end_index end @@ -166,6 +179,7 @@ module OpenAI def end_index=(_) end + # The index of the first character of the URL citation in the message. sig { returns(Integer) } def start_index end @@ -174,6 +188,7 @@ module OpenAI def start_index=(_) end + # The title of the web resource. sig { returns(String) } def title end @@ -182,6 +197,7 @@ module OpenAI def title=(_) end + # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } def type end @@ -190,6 +206,7 @@ module OpenAI def type=(_) end + # The URL of the web resource. sig { returns(String) } def url end @@ -198,6 +215,7 @@ module OpenAI def url=(_) end + # A citation for a web resource used to generate a model response. sig do params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) .returns(T.attached_class) @@ -221,6 +239,7 @@ module OpenAI end class FilePath < OpenAI::BaseModel + # The ID of the file. sig { returns(String) } def file_id end @@ -229,6 +248,7 @@ module OpenAI def file_id=(_) end + # The index of the file in the list of files. sig { returns(Integer) } def index end @@ -237,6 +257,7 @@ module OpenAI def index=(_) end + # The type of the file path. Always `file_path`. sig { returns(Symbol) } def type end @@ -245,6 +266,7 @@ module OpenAI def type=(_) end + # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } def self.new(file_id:, index:, type: :file_path) end @@ -255,6 +277,7 @@ module OpenAI end class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index a04a35f9..2287e496 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -4,6 +4,19 @@ module OpenAI module Models module Responses class ResponseTextConfig < OpenAI::BaseModel + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. sig do returns( T.nilable( @@ -37,6 +50,11 @@ module OpenAI def format_=(_) end + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig do params( format_: T.any( diff --git a/rbi/lib/openai/models/responses/response_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_delta_event.rbi index acc2fa04..50307018 100644 --- a/rbi/lib/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_delta_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseTextDeltaEvent < OpenAI::BaseModel + # The index of the content part that the text delta was added to. sig { returns(Integer) } def content_index end @@ -12,6 +13,7 @@ module OpenAI def content_index=(_) end + # The text delta that was added. sig { returns(String) } def delta end @@ -20,6 +22,7 @@ module OpenAI def delta=(_) end + # The ID of the output item that the text delta was added to. sig { returns(String) } def item_id end @@ -28,6 +31,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the text delta was added to. sig { returns(Integer) } def output_index end @@ -36,6 +40,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.output_text.delta`. sig { returns(Symbol) } def type end @@ -44,6 +49,7 @@ module OpenAI def type=(_) end + # Emitted when there is an additional text delta. sig do params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_text_done_event.rbi b/rbi/lib/openai/models/responses/response_text_done_event.rbi index 8b2ece95..56e6ddd7 100644 --- a/rbi/lib/openai/models/responses/response_text_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_done_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseTextDoneEvent < OpenAI::BaseModel + # The index of the content part that the text content is finalized. sig { returns(Integer) } def content_index end @@ -12,6 +13,7 @@ module OpenAI def content_index=(_) end + # The ID of the output item that the text content is finalized. sig { returns(String) } def item_id end @@ -20,6 +22,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the text content is finalized. sig { returns(Integer) } def output_index end @@ -28,6 +31,7 @@ module OpenAI def output_index=(_) end + # The text content that is finalized. sig { returns(String) } def text end @@ -36,6 +40,7 @@ module OpenAI def text=(_) end + # The type of the event. Always `response.output_text.done`. sig { returns(Symbol) } def type end @@ -44,6 +49,7 @@ module OpenAI def type=(_) end + # Emitted when text content is finalized. sig do params(content_index: Integer, item_id: String, output_index: Integer, text: String, type: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 87f7c238..b46f45aa 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseUsage < OpenAI::BaseModel + # The number of input tokens. sig { returns(Integer) } def input_tokens end @@ -12,6 +13,7 @@ module OpenAI def input_tokens=(_) end + # The number of output tokens. sig { returns(Integer) } def output_tokens end @@ -20,6 +22,7 @@ module OpenAI def output_tokens=(_) end + # A detailed breakdown of the output tokens. sig { returns(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails) } def output_tokens_details end @@ -31,6 +34,7 @@ module OpenAI def output_tokens_details=(_) end + # The total number of tokens used. sig { returns(Integer) } def total_tokens end @@ -39,6 +43,8 @@ module OpenAI def total_tokens=(_) end + # Represents token usage details including input tokens, output tokens, a + # breakdown of output tokens, and the total tokens used. sig do params( input_tokens: Integer, @@ -66,6 +72,7 @@ module OpenAI end class OutputTokensDetails < OpenAI::BaseModel + # The number of reasoning tokens. sig { returns(Integer) } def reasoning_tokens end @@ -74,6 +81,7 @@ module OpenAI def reasoning_tokens=(_) end + # A detailed breakdown of the output tokens. sig { params(reasoning_tokens: Integer).returns(T.attached_class) } def self.new(reasoning_tokens:) end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi index 16bde4c4..1348fded 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseWebSearchCallCompletedEvent < OpenAI::BaseModel + # Unique ID for the output item associated with the web search call. sig { returns(String) } def item_id end @@ -12,6 +13,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the web search call is associated with. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.web_search_call.completed`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a web search call is completed. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(item_id:, output_index:, type: :"response.web_search_call.completed") end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi index 654b7293..891725d6 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseWebSearchCallInProgressEvent < OpenAI::BaseModel + # Unique ID for the output item associated with the web search call. sig { returns(String) } def item_id end @@ -12,6 +13,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the web search call is associated with. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.web_search_call.in_progress`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a web search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(item_id:, output_index:, type: :"response.web_search_call.in_progress") end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi index c0de9efa..a2f0a421 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ResponseWebSearchCallSearchingEvent < OpenAI::BaseModel + # Unique ID for the output item associated with the web search call. sig { returns(String) } def item_id end @@ -12,6 +13,7 @@ module OpenAI def item_id=(_) end + # The index of the output item that the web search call is associated with. sig { returns(Integer) } def output_index end @@ -20,6 +22,7 @@ module OpenAI def output_index=(_) end + # The type of the event. Always `response.web_search_call.searching`. sig { returns(Symbol) } def type end @@ -28,6 +31,7 @@ module OpenAI def type=(_) end + # Emitted when a web search call is executing. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } def self.new(item_id:, output_index:, type: :"response.web_search_call.searching") end diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 00e4ecf8..104f7bf0 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -3,10 +3,14 @@ module OpenAI module Models module Responses + # A tool that searches for relevant content from uploaded files. Learn more about + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). class Tool < OpenAI::Union abstract! class << self + # @api private sig do override .returns( diff --git a/rbi/lib/openai/models/responses/tool_choice_function.rbi b/rbi/lib/openai/models/responses/tool_choice_function.rbi index c11c91a4..a8afd2d4 100644 --- a/rbi/lib/openai/models/responses/tool_choice_function.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_function.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module Responses class ToolChoiceFunction < OpenAI::BaseModel + # The name of the function to call. sig { returns(String) } def name end @@ -12,6 +13,7 @@ module OpenAI def name=(_) end + # For function calling, the type is always `function`. sig { returns(Symbol) } def type end @@ -20,6 +22,7 @@ module OpenAI def type=(_) end + # Use this option to force the model to call a specific function. sig { params(name: String, type: Symbol).returns(T.attached_class) } def self.new(name:, type: :function) end diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index 7e1f9984..f2fdff28 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -3,6 +3,14 @@ module OpenAI module Models module Responses + # Controls which (if any) tool is called by the model. + # + # `none` means the model will not call any tool and instead generates a message. + # + # `auto` means the model can pick between generating a message or calling one or + # more tools. + # + # `required` means the model must call one or more tools. class ToolChoiceOptions < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 8176455f..7f039150 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -4,6 +4,14 @@ module OpenAI module Models module Responses class ToolChoiceTypes < OpenAI::BaseModel + # The type of hosted tool the model should to use. Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # + # Allowed values are: + # + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` sig { returns(Symbol) } def type end @@ -12,6 +20,8 @@ module OpenAI def type=(_) end + # Indicates that the model should use a built-in tool to generate a response. + # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). sig { params(type: Symbol).returns(T.attached_class) } def self.new(type:) end @@ -20,6 +30,14 @@ module OpenAI def to_hash end + # The type of hosted tool the model should to use. Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # + # Allowed values are: + # + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index be37fbf0..71b37e66 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -4,6 +4,10 @@ module OpenAI module Models module Responses class WebSearchTool < OpenAI::BaseModel + # The type of the web search tool. One of: + # + # - `web_search_preview` + # - `web_search_preview_2025_03_11` sig { returns(Symbol) } def type end @@ -12,6 +16,8 @@ module OpenAI def type=(_) end + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. sig { returns(T.nilable(Symbol)) } def search_context_size end @@ -31,6 +37,9 @@ module OpenAI def user_location=(_) end + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). sig do params( type: Symbol, @@ -55,6 +64,10 @@ module OpenAI def to_hash end + # The type of the web search tool. One of: + # + # - `web_search_preview` + # - `web_search_preview_2025_03_11` class Type < OpenAI::Enum abstract! @@ -68,6 +81,8 @@ module OpenAI end end + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. class SearchContextSize < OpenAI::Enum abstract! @@ -83,6 +98,7 @@ module OpenAI end class UserLocation < OpenAI::BaseModel + # The type of location approximation. Always `approximate`. sig { returns(Symbol) } def type end @@ -91,6 +107,7 @@ module OpenAI def type=(_) end + # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } def city end @@ -99,6 +116,8 @@ module OpenAI def city=(_) end + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. sig { returns(T.nilable(String)) } def country end @@ -107,6 +126,7 @@ module OpenAI def country=(_) end + # Free text input for the region of the user, e.g. `California`. sig { returns(T.nilable(String)) } def region end @@ -115,6 +135,8 @@ module OpenAI def region=(_) end + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } def timezone end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/lib/openai/models/static_file_chunking_strategy.rbi index b076e093..1d8d219c 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy.rbi @@ -3,6 +3,9 @@ module OpenAI module Models class StaticFileChunkingStrategy < OpenAI::BaseModel + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } def chunk_overlap_tokens end @@ -11,6 +14,8 @@ module OpenAI def chunk_overlap_tokens=(_) end + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } def max_chunk_size_tokens end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index 94e5e78c..99645473 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -11,6 +11,7 @@ module OpenAI def static=(_) end + # Always `static`. sig { returns(Symbol) } def type end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index 3eed65cb..a7ed94e1 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -11,6 +11,7 @@ module OpenAI def static=(_) end + # Always `static`. sig { returns(Symbol) } def type end @@ -19,6 +20,7 @@ module OpenAI def type=(_) end + # Customize your own chunking strategy by setting chunk size and chunk overlap. sig { params(static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol).returns(T.attached_class) } def self.new(static:, type: :static) end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 422b80e9..6d1b30b0 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class Upload < OpenAI::BaseModel + # The Upload unique identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -11,6 +12,7 @@ module OpenAI def id=(_) end + # The intended number of bytes to be uploaded. sig { returns(Integer) } def bytes end @@ -19,6 +21,7 @@ module OpenAI def bytes=(_) end + # The Unix timestamp (in seconds) for when the Upload was created. sig { returns(Integer) } def created_at end @@ -27,6 +30,7 @@ module OpenAI def created_at=(_) end + # The Unix timestamp (in seconds) for when the Upload will expire. sig { returns(Integer) } def expires_at end @@ -35,6 +39,7 @@ module OpenAI def expires_at=(_) end + # The name of the file to be uploaded. sig { returns(String) } def filename end @@ -43,6 +48,7 @@ module OpenAI def filename=(_) end + # The object type, which is always "upload". sig { returns(Symbol) } def object end @@ -51,6 +57,9 @@ module OpenAI def object=(_) end + # The intended purpose of the file. + # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) + # for acceptable values. sig { returns(String) } def purpose end @@ -59,6 +68,7 @@ module OpenAI def purpose=(_) end + # The status of the Upload. sig { returns(Symbol) } def status end @@ -67,6 +77,7 @@ module OpenAI def status=(_) end + # The `File` object represents a document that has been uploaded to OpenAI. sig { returns(T.nilable(OpenAI::Models::FileObject)) } def file end @@ -75,6 +86,7 @@ module OpenAI def file=(_) end + # The Upload object can accept byte chunks in the form of Parts. sig do params( id: String, @@ -111,6 +123,7 @@ module OpenAI def to_hash end + # The status of the Upload. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 840707e6..f8550617 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -6,6 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The ordered list of Part IDs. sig { returns(T::Array[String]) } def part_ids end @@ -14,6 +15,8 @@ module OpenAI def part_ids=(_) end + # The optional md5 checksum for the file contents to verify if the bytes uploaded + # matches what you expect. sig { returns(T.nilable(String)) } def md5 end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 6d144a54..22555f0e 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -6,6 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The number of bytes in the file you are uploading. sig { returns(Integer) } def bytes end @@ -14,6 +15,7 @@ module OpenAI def bytes=(_) end + # The name of the file to upload. sig { returns(String) } def filename end @@ -22,6 +24,10 @@ module OpenAI def filename=(_) end + # The MIME type of the file. + # + # This must fall within the supported MIME types for your file purpose. See the + # supported MIME types for assistants and vision. sig { returns(String) } def mime_type end @@ -30,6 +36,10 @@ module OpenAI def mime_type=(_) end + # The intended purpose of the uploaded file. + # + # See the + # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). sig { returns(Symbol) } def purpose end diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index 4e2fe8c7..7ef6052a 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -7,6 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The chunk of bytes for this Part. sig { returns(T.any(IO, StringIO)) } def data end diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/lib/openai/models/uploads/upload_part.rbi index 6801e859..ae805178 100644 --- a/rbi/lib/openai/models/uploads/upload_part.rbi +++ b/rbi/lib/openai/models/uploads/upload_part.rbi @@ -6,6 +6,7 @@ module OpenAI module Uploads class UploadPart < OpenAI::BaseModel + # The upload Part unique identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the Part was created. sig { returns(Integer) } def created_at end @@ -22,6 +24,7 @@ module OpenAI def created_at=(_) end + # The object type, which is always `upload.part`. sig { returns(Symbol) } def object end @@ -30,6 +33,7 @@ module OpenAI def object=(_) end + # The ID of the Upload object that this Part was added to. sig { returns(String) } def upload_id end @@ -38,6 +42,7 @@ module OpenAI def upload_id=(_) end + # The upload Part represents a chunk of bytes we can add to an Upload object. sig do params(id: String, created_at: Integer, upload_id: String, object: Symbol).returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index e60530c4..bf4bb14f 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -3,6 +3,7 @@ module OpenAI module Models class VectorStore < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -11,6 +12,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the vector store was created. sig { returns(Integer) } def created_at end @@ -27,6 +29,7 @@ module OpenAI def file_counts=(_) end + # The Unix timestamp (in seconds) for when the vector store was last active. sig { returns(T.nilable(Integer)) } def last_active_at end @@ -35,6 +38,12 @@ module OpenAI def last_active_at=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -43,6 +52,7 @@ module OpenAI def metadata=(_) end + # The name of the vector store. sig { returns(String) } def name end @@ -51,6 +61,7 @@ module OpenAI def name=(_) end + # The object type, which is always `vector_store`. sig { returns(Symbol) } def object end @@ -59,6 +70,9 @@ module OpenAI def object=(_) end + # The status of the vector store, which can be either `expired`, `in_progress`, or + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. sig { returns(Symbol) } def status end @@ -67,6 +81,7 @@ module OpenAI def status=(_) end + # The total number of bytes used by the files in the vector store. sig { returns(Integer) } def usage_bytes end @@ -75,6 +90,7 @@ module OpenAI def usage_bytes=(_) end + # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStore::ExpiresAfter)) } def expires_after end @@ -83,6 +99,7 @@ module OpenAI def expires_after=(_) end + # The Unix timestamp (in seconds) for when the vector store will expire. sig { returns(T.nilable(Integer)) } def expires_at end @@ -91,6 +108,8 @@ module OpenAI def expires_at=(_) end + # A vector store is a collection of processed files can be used by the + # `file_search` tool. sig do params( id: String, @@ -144,6 +163,7 @@ module OpenAI end class FileCounts < OpenAI::BaseModel + # The number of files that were cancelled. sig { returns(Integer) } def cancelled end @@ -152,6 +172,7 @@ module OpenAI def cancelled=(_) end + # The number of files that have been successfully processed. sig { returns(Integer) } def completed end @@ -160,6 +181,7 @@ module OpenAI def completed=(_) end + # The number of files that have failed to process. sig { returns(Integer) } def failed end @@ -168,6 +190,7 @@ module OpenAI def failed=(_) end + # The number of files that are currently being processed. sig { returns(Integer) } def in_progress end @@ -176,6 +199,7 @@ module OpenAI def in_progress=(_) end + # The total number of files. sig { returns(Integer) } def total end @@ -211,6 +235,9 @@ module OpenAI end end + # The status of the vector store, which can be either `expired`, `in_progress`, or + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. class Status < OpenAI::Enum abstract! @@ -226,6 +253,8 @@ module OpenAI end class ExpiresAfter < OpenAI::BaseModel + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. sig { returns(Symbol) } def anchor end @@ -234,6 +263,7 @@ module OpenAI def anchor=(_) end + # The number of days after the anchor time that the vector store will expire. sig { returns(Integer) } def days end @@ -242,6 +272,7 @@ module OpenAI def days=(_) end + # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } def self.new(days:, anchor: :last_active_at) end diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 4444751d..02ffb8d9 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( @@ -36,6 +38,7 @@ module OpenAI def chunking_strategy=(_) end + # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter)) } def expires_after end @@ -47,6 +50,9 @@ module OpenAI def expires_after=(_) end + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # the vector store should use. Useful for tools like `file_search` that can access + # files. sig { returns(T.nilable(T::Array[String])) } def file_ids end @@ -55,6 +61,12 @@ module OpenAI def file_ids=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -63,6 +75,7 @@ module OpenAI def metadata=(_) end + # The name of the vector store. sig { returns(T.nilable(String)) } def name end @@ -108,6 +121,8 @@ module OpenAI end class ExpiresAfter < OpenAI::BaseModel + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. sig { returns(Symbol) } def anchor end @@ -116,6 +131,7 @@ module OpenAI def anchor=(_) end + # The number of days after the anchor time that the vector store will expire. sig { returns(Integer) } def days end @@ -124,6 +140,7 @@ module OpenAI def days=(_) end + # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } def self.new(days:, anchor: :last_active_at) end diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index a0c37415..91af6210 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -6,6 +6,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -14,6 +18,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -22,6 +30,8 @@ module OpenAI def before=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -30,6 +40,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -66,6 +78,8 @@ module OpenAI def to_hash end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 52ea39e4..593b1417 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -6,6 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A query string for a search sig { returns(T.any(String, T::Array[String])) } def query end @@ -14,6 +15,7 @@ module OpenAI def query=(_) end + # A filter to apply based on file attributes. sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } def filters end @@ -25,6 +27,8 @@ module OpenAI def filters=(_) end + # The maximum number of results to return. This number should be between 1 and 50 + # inclusive. sig { returns(T.nilable(Integer)) } def max_num_results end @@ -33,6 +37,7 @@ module OpenAI def max_num_results=(_) end + # Ranking options for search. sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions)) } def ranking_options end @@ -44,6 +49,7 @@ module OpenAI def ranking_options=(_) end + # Whether to rewrite the natural language query for vector search. sig { returns(T.nilable(T::Boolean)) } def rewrite_query end @@ -82,22 +88,26 @@ module OpenAI def to_hash end + # A query string for a search class Query < OpenAI::Union abstract! StringArray = T.type_alias { T::Array[String] } class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, T::Array[String]]]) } private def variants end end end + # A filter to apply based on file attributes. class Filters < OpenAI::Union abstract! class << self + # @api private sig do override .returns([[NilClass, OpenAI::Models::ComparisonFilter], [NilClass, OpenAI::Models::CompoundFilter]]) @@ -124,6 +134,7 @@ module OpenAI def score_threshold=(_) end + # Ranking options for search. sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } def self.new(ranker: nil, score_threshold: nil) end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index d8ffa5fa..ab3f35c5 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -3,6 +3,11 @@ module OpenAI module Models class VectorStoreSearchResponse < OpenAI::BaseModel + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } def attributes end @@ -14,6 +19,7 @@ module OpenAI def attributes=(_) end + # Content chunks from the file. sig { returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content]) } def content end @@ -25,6 +31,7 @@ module OpenAI def content=(_) end + # The ID of the vector store file. sig { returns(String) } def file_id end @@ -33,6 +40,7 @@ module OpenAI def file_id=(_) end + # The name of the vector store file. sig { returns(String) } def filename end @@ -41,6 +49,7 @@ module OpenAI def filename=(_) end + # The similarity score for the result. sig { returns(Float) } def score end @@ -81,6 +90,7 @@ module OpenAI abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end @@ -88,6 +98,7 @@ module OpenAI end class Content < OpenAI::BaseModel + # The text content returned from search. sig { returns(String) } def text end @@ -96,6 +107,7 @@ module OpenAI def text=(_) end + # The type of content. sig { returns(Symbol) } def type end @@ -112,6 +124,7 @@ module OpenAI def to_hash end + # The type of content. class Type < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 609a1ea7..88614cee 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -6,6 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter)) } def expires_after end @@ -17,6 +18,12 @@ module OpenAI def expires_after=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(OpenAI::Models::Metadata)) } def metadata end @@ -25,6 +32,7 @@ module OpenAI def metadata=(_) end + # The name of the vector store. sig { returns(T.nilable(String)) } def name end @@ -60,6 +68,8 @@ module OpenAI end class ExpiresAfter < OpenAI::BaseModel + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. sig { returns(Symbol) } def anchor end @@ -68,6 +78,7 @@ module OpenAI def anchor=(_) end + # The number of days after the anchor time that the vector store will expire. sig { returns(Integer) } def days end @@ -76,6 +87,7 @@ module OpenAI def days=(_) end + # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } def self.new(days:, anchor: :last_active_at) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 50d4260c..fd90edaf 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -7,6 +7,9 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # the vector store should use. Useful for tools like `file_search` that can access + # files. sig { returns(T::Array[String]) } def file_ids end @@ -15,6 +18,11 @@ module OpenAI def file_ids=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } def attributes end @@ -26,6 +34,8 @@ module OpenAI def attributes=(_) end + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( @@ -92,6 +102,7 @@ module OpenAI abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 6847460c..c1cde336 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -15,6 +15,10 @@ module OpenAI def vector_store_id=(_) end + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -23,6 +27,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -31,6 +39,7 @@ module OpenAI def before=(_) end + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. sig { returns(T.nilable(Symbol)) } def filter end @@ -39,6 +48,8 @@ module OpenAI def filter=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -47,6 +58,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -87,6 +100,7 @@ module OpenAI def to_hash end + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. class Filter < OpenAI::Enum abstract! @@ -102,6 +116,8 @@ module OpenAI end end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/vector_stores/file_content_response.rbi b/rbi/lib/openai/models/vector_stores/file_content_response.rbi index da110839..7986b030 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_response.rbi @@ -4,6 +4,7 @@ module OpenAI module Models module VectorStores class FileContentResponse < OpenAI::BaseModel + # The text content sig { returns(T.nilable(String)) } def text end @@ -12,6 +13,7 @@ module OpenAI def text=(_) end + # The content type (currently only `"text"`) sig { returns(T.nilable(String)) } def type end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 9c1d277d..e29d8b47 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -7,6 +7,9 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A [File](https://platform.openai.com/docs/api-reference/files) ID that the + # vector store should use. Useful for tools like `file_search` that can access + # files. sig { returns(String) } def file_id end @@ -15,6 +18,11 @@ module OpenAI def file_id=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } def attributes end @@ -26,6 +34,8 @@ module OpenAI def attributes=(_) end + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( @@ -92,6 +102,7 @@ module OpenAI abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 42032f29..6613871b 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -7,6 +7,10 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } def after end @@ -15,6 +19,10 @@ module OpenAI def after=(_) end + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } def before end @@ -23,6 +31,7 @@ module OpenAI def before=(_) end + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. sig { returns(T.nilable(Symbol)) } def filter end @@ -31,6 +40,8 @@ module OpenAI def filter=(_) end + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } def limit end @@ -39,6 +50,8 @@ module OpenAI def limit=(_) end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. sig { returns(T.nilable(Symbol)) } def order end @@ -77,6 +90,7 @@ module OpenAI def to_hash end + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. class Filter < OpenAI::Enum abstract! @@ -92,6 +106,8 @@ module OpenAI end end + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. class Order < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 865c29c6..ea9cd54d 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -15,6 +15,11 @@ module OpenAI def vector_store_id=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } def attributes end @@ -54,6 +59,7 @@ module OpenAI abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 102bd1c5..ca194b2c 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -6,6 +6,7 @@ module OpenAI module VectorStores class VectorStoreFile < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -14,6 +15,7 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the vector store file was created. sig { returns(Integer) } def created_at end @@ -22,6 +24,8 @@ module OpenAI def created_at=(_) end + # The last error associated with this vector store file. Will be `null` if there + # are no errors. sig { returns(T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError)) } def last_error end @@ -33,6 +37,7 @@ module OpenAI def last_error=(_) end + # The object type, which is always `vector_store.file`. sig { returns(Symbol) } def object end @@ -41,6 +46,9 @@ module OpenAI def object=(_) end + # The status of the vector store file, which can be either `in_progress`, + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. sig { returns(Symbol) } def status end @@ -49,6 +57,8 @@ module OpenAI def status=(_) end + # The total vector store usage in bytes. Note that this may be different from the + # original file size. sig { returns(Integer) } def usage_bytes end @@ -57,6 +67,10 @@ module OpenAI def usage_bytes=(_) end + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. sig { returns(String) } def vector_store_id end @@ -65,6 +79,11 @@ module OpenAI def vector_store_id=(_) end + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } def attributes end @@ -76,6 +95,7 @@ module OpenAI def attributes=(_) end + # The strategy used to chunk the file. sig do returns( T.nilable( @@ -97,6 +117,7 @@ module OpenAI def chunking_strategy=(_) end + # A list of files attached to a vector store. sig do params( id: String, @@ -144,6 +165,7 @@ module OpenAI end class LastError < OpenAI::BaseModel + # One of `server_error` or `rate_limit_exceeded`. sig { returns(Symbol) } def code end @@ -152,6 +174,7 @@ module OpenAI def code=(_) end + # A human-readable description of the error. sig { returns(String) } def message end @@ -160,6 +183,8 @@ module OpenAI def message=(_) end + # The last error associated with this vector store file. Will be `null` if there + # are no errors. sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end @@ -168,6 +193,7 @@ module OpenAI def to_hash end + # One of `server_error` or `rate_limit_exceeded`. class Code < OpenAI::Enum abstract! @@ -183,6 +209,9 @@ module OpenAI end end + # The status of the vector store file, which can be either `in_progress`, + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. class Status < OpenAI::Enum abstract! @@ -202,6 +231,7 @@ module OpenAI abstract! class << self + # @api private sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } private def variants end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 1fa22e5f..497044cd 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -6,6 +6,7 @@ module OpenAI module VectorStores class VectorStoreFileBatch < OpenAI::BaseModel + # The identifier, which can be referenced in API endpoints. sig { returns(String) } def id end @@ -14,6 +15,8 @@ module OpenAI def id=(_) end + # The Unix timestamp (in seconds) for when the vector store files batch was + # created. sig { returns(Integer) } def created_at end @@ -33,6 +36,7 @@ module OpenAI def file_counts=(_) end + # The object type, which is always `vector_store.file_batch`. sig { returns(Symbol) } def object end @@ -41,6 +45,8 @@ module OpenAI def object=(_) end + # The status of the vector store files batch, which can be either `in_progress`, + # `completed`, `cancelled` or `failed`. sig { returns(Symbol) } def status end @@ -49,6 +55,10 @@ module OpenAI def status=(_) end + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. sig { returns(String) } def vector_store_id end @@ -57,6 +67,7 @@ module OpenAI def vector_store_id=(_) end + # A batch of files attached to a vector store. sig do params( id: String, @@ -88,6 +99,7 @@ module OpenAI end class FileCounts < OpenAI::BaseModel + # The number of files that where cancelled. sig { returns(Integer) } def cancelled end @@ -96,6 +108,7 @@ module OpenAI def cancelled=(_) end + # The number of files that have been processed. sig { returns(Integer) } def completed end @@ -104,6 +117,7 @@ module OpenAI def completed=(_) end + # The number of files that have failed to process. sig { returns(Integer) } def failed end @@ -112,6 +126,7 @@ module OpenAI def failed=(_) end + # The number of files that are currently being processed. sig { returns(Integer) } def in_progress end @@ -120,6 +135,7 @@ module OpenAI def in_progress=(_) end + # The total number of files. sig { returns(Integer) } def total end @@ -155,6 +171,8 @@ module OpenAI end end + # The status of the vector store files batch, which can be either `in_progress`, + # `completed`, `cancelled` or `failed`. class Status < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi index 9d80cd5f..e940c4f4 100644 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ b/rbi/lib/openai/pooled_net_requester.rbi @@ -7,14 +7,17 @@ module OpenAI end class << self + # @api private sig { params(url: URI::Generic).returns(Net::HTTP) } def connect(url) end + # @api private sig { params(conn: Net::HTTP, deadline: Float).void } def calibrate_socket_timeout(conn, deadline) end + # @api private sig do params(request: OpenAI::PooledNetRequester::RequestShape, blk: T.proc.params(arg0: String).void) .returns(Net::HTTPGenericRequest) @@ -23,10 +26,12 @@ module OpenAI end end + # @api private sig { params(url: URI::Generic, blk: T.proc.params(arg0: Net::HTTP).void).void } private def with_pool(url, &blk) end + # @api private sig do params(request: OpenAI::PooledNetRequester::RequestShape) .returns([Net::HTTPResponse, T::Enumerable[String]]) @@ -34,6 +39,7 @@ module OpenAI def execute(request) end + # @api private sig { params(size: Integer).returns(T.attached_class) } def self.new(size: Etc.nprocessors) end diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index f55d6e56..efd2de36 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -1,9 +1,11 @@ # typed: strong module OpenAI + # @api private module RequestParameters abstract! + # Options to specify HTTP behaviour for this request. sig { returns(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) } def request_options end @@ -15,18 +17,28 @@ module OpenAI def request_options=(_) end + # @api private module Converter + # @api private sig { params(params: T.anything).returns([T.anything, T::Hash[Symbol, T.anything]]) } def dump_request(params) end end end + # Specify HTTP behaviour to use for a specific request. These options supplement + # or override those provided at the client level. + # + # When making a request, you can pass an actual {RequestOptions} instance, or + # simply pass a Hash with symbol keys matching the attributes on this class. class RequestOptions < OpenAI::BaseModel + # @api private sig { params(opts: T.any(T.self_type, T::Hash[Symbol, T.anything])).void } def self.validate!(opts) end + # Idempotency key to send with request and all associated retries. Will only be + # sent for write requests. sig { returns(T.nilable(String)) } def idempotency_key end @@ -35,6 +47,8 @@ module OpenAI def idempotency_key=(_) end + # Extra query params to send with the request. These are `.merge`’d into any + # `query` given at the client level. sig { returns(T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) } def extra_query end @@ -46,6 +60,8 @@ module OpenAI def extra_query=(_) end + # Extra headers to send with the request. These are `.merged`’d into any + # `extra_headers` given at the client level. sig { returns(T.nilable(T::Hash[String, T.nilable(String)])) } def extra_headers end @@ -57,6 +73,8 @@ module OpenAI def extra_headers=(_) end + # Extra data to send with the request. These are deep merged into any data + # generated as part of the normal request. sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } def extra_body end @@ -65,6 +83,7 @@ module OpenAI def extra_body=(_) end + # Maximum number of retries to attempt after a failed initial request. sig { returns(T.nilable(Integer)) } def max_retries end @@ -73,6 +92,7 @@ module OpenAI def max_retries=(_) end + # Request timeout in seconds. sig { returns(T.nilable(Float)) } def timeout end @@ -81,6 +101,7 @@ module OpenAI def timeout=(_) end + # Returns a new instance of RequestOptions. sig { params(values: T::Hash[Symbol, T.anything]).returns(T.attached_class) } def self.new(values = {}) end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index 5ad154db..ac1162aa 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -4,6 +4,7 @@ module OpenAI module Resources class Audio class Speech + # Generates audio from the input text. sig do params( input: String, diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index a88bec77..b0455588 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -4,6 +4,7 @@ module OpenAI module Resources class Audio class Transcriptions + # Transcribes audio into the input language. sig do params( file: T.any(IO, StringIO), diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 2b323900..02565a2d 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -4,6 +4,7 @@ module OpenAI module Resources class Audio class Translations + # Translates audio into English. sig do params( file: T.any(IO, StringIO), diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index ec305bd4..fa633d8c 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -3,6 +3,7 @@ module OpenAI module Resources class Batches + # Creates and executes a batch from an uploaded file of requests sig do params( completion_window: Symbol, @@ -16,6 +17,7 @@ module OpenAI def create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) end + # Retrieves a batch. sig do params( batch_id: String, @@ -26,6 +28,7 @@ module OpenAI def retrieve(batch_id, request_options: {}) end + # List your organization's batches. sig do params( after: String, @@ -37,6 +40,9 @@ module OpenAI def list(after: nil, limit: nil, request_options: {}) end + # Cancels an in-progress batch. The batch will be in status `cancelling` for up to + # 10 minutes, before changing to `cancelled`, where it will have partial results + # (if any) available in the output file. sig do params( batch_id: String, diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 72828f08..5a235b78 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -4,6 +4,7 @@ module OpenAI module Resources class Beta class Assistants + # Create an assistant with a model and instructions. sig do params( model: T.any(String, Symbol), @@ -50,6 +51,7 @@ module OpenAI ) end + # Retrieves an assistant. sig do params( assistant_id: String, @@ -60,6 +62,7 @@ module OpenAI def retrieve(assistant_id, request_options: {}) end + # Modifies an assistant. sig do params( assistant_id: String, @@ -108,6 +111,7 @@ module OpenAI ) end + # Returns a list of assistants. sig do params( after: String, @@ -121,6 +125,7 @@ module OpenAI def list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) end + # Delete an assistant. sig do params( assistant_id: String, diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 02bc8060..b2f631a3 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -12,6 +12,7 @@ module OpenAI def messages end + # Create a thread. sig do params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], @@ -24,6 +25,7 @@ module OpenAI def create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) end + # Retrieves a thread. sig do params( thread_id: String, @@ -34,6 +36,7 @@ module OpenAI def retrieve(thread_id, request_options: {}) end + # Modifies a thread. sig do params( thread_id: String, @@ -46,6 +49,7 @@ module OpenAI def update(thread_id, metadata: nil, tool_resources: nil, request_options: {}) end + # Delete a thread. sig do params( thread_id: String, @@ -56,6 +60,7 @@ module OpenAI def delete(thread_id, request_options: {}) end + # Create a thread and run it in one request. sig do params( assistant_id: String, @@ -112,6 +117,7 @@ module OpenAI ) end + # Create a thread and run it in one request. sig do params( assistant_id: String, diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 28c889d1..2e0009e8 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -5,6 +5,7 @@ module OpenAI class Beta class Threads class Messages + # Create a message. sig do params( thread_id: String, @@ -28,6 +29,7 @@ module OpenAI def create(thread_id, content:, role:, attachments: nil, metadata: nil, request_options: {}) end + # Retrieve a message. sig do params( message_id: String, @@ -39,6 +41,7 @@ module OpenAI def retrieve(message_id, thread_id:, request_options: {}) end + # Modifies a message. sig do params( message_id: String, @@ -51,6 +54,7 @@ module OpenAI def update(message_id, thread_id:, metadata: nil, request_options: {}) end + # Returns a list of messages for a given thread. sig do params( thread_id: String, @@ -74,6 +78,7 @@ module OpenAI ) end + # Deletes a message. sig do params( message_id: String, diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 7f5df3f4..ecf30e5f 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -9,6 +9,7 @@ module OpenAI def steps end + # Create a run. sig do params( thread_id: String, @@ -71,6 +72,7 @@ module OpenAI ) end + # Create a run. sig do params( thread_id: String, @@ -162,6 +164,7 @@ module OpenAI ) end + # Retrieves a run. sig do params( run_id: String, @@ -173,6 +176,7 @@ module OpenAI def retrieve(run_id, thread_id:, request_options: {}) end + # Modifies a run. sig do params( run_id: String, @@ -185,6 +189,7 @@ module OpenAI def update(run_id, thread_id:, metadata: nil, request_options: {}) end + # Returns a list of runs belonging to a thread. sig do params( thread_id: String, @@ -199,6 +204,7 @@ module OpenAI def list(thread_id, after: nil, before: nil, limit: nil, order: nil, request_options: {}) end + # Cancels a run that is `in_progress`. sig do params( run_id: String, @@ -210,6 +216,10 @@ module OpenAI def cancel(run_id, thread_id:, request_options: {}) end + # When a run has the `status: "requires_action"` and `required_action.type` is + # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + # tool calls once they're all completed. All outputs must be submitted in a single + # request. sig do params( run_id: String, @@ -222,6 +232,10 @@ module OpenAI def submit_tool_outputs(run_id, thread_id:, tool_outputs:, request_options: {}) end + # When a run has the `status: "requires_action"` and `required_action.type` is + # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + # tool calls once they're all completed. All outputs must be submitted in a single + # request. sig do params( run_id: String, diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 1714ab4d..0987c273 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -6,6 +6,7 @@ module OpenAI class Threads class Runs class Steps + # Retrieves a run step. sig do params( step_id: String, @@ -19,6 +20,7 @@ module OpenAI def retrieve(step_id, thread_id:, run_id:, include: nil, request_options: {}) end + # Returns a list of run steps belonging to a run. sig do params( run_id: String, diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 2ea5223f..b4a45a99 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -8,6 +8,23 @@ module OpenAI def messages end + # **Starting a new project?** We recommend trying + # [Responses](https://platform.openai.com/docs/api-reference/responses) to take + # advantage of the latest OpenAI platform features. Compare + # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). + # + # --- + # + # Creates a model response for the given chat conversation. Learn more in the + # [text generation](https://platform.openai.com/docs/guides/text-generation), + # [vision](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio) guides. + # + # Parameter support can differ depending on the model used to generate the + # response, particularly for newer reasoning models. Parameters that are only + # supported for reasoning models are noted below. For the current state of + # unsupported parameters in reasoning models, + # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). sig do params( messages: T::Array[ @@ -92,6 +109,23 @@ module OpenAI ) end + # **Starting a new project?** We recommend trying + # [Responses](https://platform.openai.com/docs/api-reference/responses) to take + # advantage of the latest OpenAI platform features. Compare + # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). + # + # --- + # + # Creates a model response for the given chat conversation. Learn more in the + # [text generation](https://platform.openai.com/docs/guides/text-generation), + # [vision](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio) guides. + # + # Parameter support can differ depending on the model used to generate the + # response, particularly for newer reasoning models. Parameters that are only + # supported for reasoning models are noted below. For the current state of + # unsupported parameters in reasoning models, + # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). sig do params( messages: T::Array[ @@ -176,6 +210,8 @@ module OpenAI ) end + # Get a stored chat completion. Only Chat Completions that have been created with + # the `store` parameter set to `true` will be returned. sig do params( completion_id: String, @@ -186,6 +222,9 @@ module OpenAI def retrieve(completion_id, request_options: {}) end + # Modify a stored chat completion. Only Chat Completions that have been created + # with the `store` parameter set to `true` can be modified. Currently, the only + # supported modification is to update the `metadata` field. sig do params( completion_id: String, @@ -197,6 +236,8 @@ module OpenAI def update(completion_id, metadata:, request_options: {}) end + # List stored Chat Completions. Only Chat Completions that have been stored with + # the `store` parameter set to `true` will be returned. sig do params( after: String, @@ -211,6 +252,8 @@ module OpenAI def list(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) end + # Delete a stored chat completion. Only Chat Completions that have been created + # with the `store` parameter set to `true` can be deleted. sig do params( completion_id: String, diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 4f39f196..26b43645 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -5,6 +5,8 @@ module OpenAI class Chat class Completions class Messages + # Get the messages in a stored chat completion. Only Chat Completions that have + # been created with the `store` parameter set to `true` will be returned. sig do params( completion_id: String, diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 5f018bc1..2291fad9 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -3,6 +3,7 @@ module OpenAI module Resources class Completions + # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, Symbol), @@ -55,6 +56,7 @@ module OpenAI ) end + # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, Symbol), diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 86d61392..a251c27d 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -3,6 +3,7 @@ module OpenAI module Resources class Embeddings + # Creates an embedding vector representing the input text. sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 508e9d63..39b009e9 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -3,6 +3,27 @@ module OpenAI module Resources class Files + # Upload a file that can be used across various endpoints. Individual files can be + # up to 512 MB, and the size of all files uploaded by one organization can be up + # to 100 GB. + # + # The Assistants API supports files up to 2 million tokens and of specific file + # types. See the + # [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + # details. + # + # The Fine-tuning API only supports `.jsonl` files. The input also has certain + # required formats for fine-tuning + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # models. + # + # The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + # has a specific required + # [format](https://platform.openai.com/docs/api-reference/batch/request-input). + # + # Please [contact us](https://help.openai.com/) if you need to increase these + # storage limits. sig do params( file: T.any(IO, StringIO), @@ -14,6 +35,7 @@ module OpenAI def create(file:, purpose:, request_options: {}) end + # Returns information about a specific file. sig do params( file_id: String, @@ -24,6 +46,7 @@ module OpenAI def retrieve(file_id, request_options: {}) end + # Returns a list of files. sig do params( after: String, @@ -37,6 +60,7 @@ module OpenAI def list(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) end + # Delete a file. sig do params( file_id: String, @@ -47,6 +71,7 @@ module OpenAI def delete(file_id, request_options: {}) end + # Returns the contents of the specified file. sig do params( file_id: String, diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 8ace3da3..5c720561 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -8,6 +8,13 @@ module OpenAI def checkpoints end + # Creates a fine-tuning job which begins the process of creating a new model from + # a given dataset. + # + # Response includes details of the enqueued job including job status and the name + # of the fine-tuned models once complete. + # + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( model: T.any(String, Symbol), @@ -37,6 +44,9 @@ module OpenAI ) end + # Get info about a fine-tuning job. + # + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( fine_tuning_job_id: String, @@ -47,6 +57,7 @@ module OpenAI def retrieve(fine_tuning_job_id, request_options: {}) end + # List your organization's fine-tuning jobs sig do params( after: String, @@ -59,6 +70,7 @@ module OpenAI def list(after: nil, limit: nil, metadata: nil, request_options: {}) end + # Immediately cancel a fine-tune job. sig do params( fine_tuning_job_id: String, @@ -69,6 +81,7 @@ module OpenAI def cancel(fine_tuning_job_id, request_options: {}) end + # Get status updates for a fine-tuning job. sig do params( fine_tuning_job_id: String, diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index 297b57f1..d0a7bb83 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -5,6 +5,7 @@ module OpenAI class FineTuning class Jobs class Checkpoints + # List checkpoints for a fine-tuning job. sig do params( fine_tuning_job_id: String, diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 357eccc3..3655f172 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -3,6 +3,7 @@ module OpenAI module Resources class Images + # Creates a variation of a given image. sig do params( image: T.any(IO, StringIO), @@ -26,6 +27,7 @@ module OpenAI ) end + # Creates an edited or extended image given an original image and a prompt. sig do params( image: T.any(IO, StringIO), @@ -53,6 +55,7 @@ module OpenAI ) end + # Creates an image given a prompt. sig do params( prompt: String, diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 2e4b916b..04a4cf0d 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -3,6 +3,8 @@ module OpenAI module Resources class Models + # Retrieves a model instance, providing basic information about the model such as + # the owner and permissioning. sig do params( model: String, @@ -13,6 +15,8 @@ module OpenAI def retrieve(model, request_options: {}) end + # Lists the currently available models, and provides basic information about each + # one such as the owner and availability. sig do params(request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]))) .returns(OpenAI::Page[OpenAI::Models::Model]) @@ -20,6 +24,8 @@ module OpenAI def list(request_options: {}) end + # Delete a fine-tuned model. You must have the Owner role in your organization to + # delete a model. sig do params( model: String, diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 8b836716..3b856753 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -3,6 +3,8 @@ module OpenAI module Resources class Moderations + # Classifies if text and/or image inputs are potentially harmful. Learn more in + # the [moderation guide](https://platform.openai.com/docs/guides/moderation). sig do params( input: T.any( diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index d225d52c..ded8cf36 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -7,6 +7,17 @@ module OpenAI def input_items end + # Creates a model response. Provide + # [text](https://platform.openai.com/docs/guides/text) or + # [image](https://platform.openai.com/docs/guides/images) inputs to generate + # [text](https://platform.openai.com/docs/guides/text) or + # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have + # the model call your own + # [custom code](https://platform.openai.com/docs/guides/function-calling) or use + # built-in [tools](https://platform.openai.com/docs/guides/tools) like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use + # your own data as input for the model's response. sig do params( input: T.any(String, OpenAI::Models::Responses::ResponseInput), @@ -59,6 +70,17 @@ module OpenAI ) end + # Creates a model response. Provide + # [text](https://platform.openai.com/docs/guides/text) or + # [image](https://platform.openai.com/docs/guides/images) inputs to generate + # [text](https://platform.openai.com/docs/guides/text) or + # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have + # the model call your own + # [custom code](https://platform.openai.com/docs/guides/function-calling) or use + # built-in [tools](https://platform.openai.com/docs/guides/tools) like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use + # your own data as input for the model's response. sig do params( input: T.any(String, OpenAI::Models::Responses::ResponseInput), @@ -148,6 +170,7 @@ module OpenAI ) end + # Retrieves a model response with the given ID. sig do params( response_id: String, @@ -159,6 +182,7 @@ module OpenAI def retrieve(response_id, include: nil, request_options: {}) end + # Deletes a model response with the given ID. sig do params( response_id: String, diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 5c8d359e..afd82a10 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -4,6 +4,7 @@ module OpenAI module Resources class Responses class InputItems + # Returns a list of input items for a given response. sig do params( response_id: String, diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 094b93ea..561a624f 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -7,6 +7,25 @@ module OpenAI def parts end + # Creates an intermediate + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + # that you can add + # [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + # Currently, an Upload can accept at most 8 GB in total and expires after an hour + # after you create it. + # + # Once you complete the Upload, we will create a + # [File](https://platform.openai.com/docs/api-reference/files/object) object that + # contains all the parts you uploaded. This File is usable in the rest of our + # platform as a regular File object. + # + # For certain `purpose` values, the correct `mime_type` must be specified. Please + # refer to documentation for the + # [supported MIME types for your use case](https://platform.openai.com/docs/assistants/tools/file-search#supported-files). + # + # For guidance on the proper filename extensions for each purpose, please follow + # the documentation on + # [creating a File](https://platform.openai.com/docs/api-reference/files/create). sig do params( bytes: Integer, @@ -20,6 +39,7 @@ module OpenAI def create(bytes:, filename:, mime_type:, purpose:, request_options: {}) end + # Cancels the Upload. No Parts may be added after an Upload is cancelled. sig do params( upload_id: String, @@ -30,6 +50,19 @@ module OpenAI def cancel(upload_id, request_options: {}) end + # Completes the + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + # + # Within the returned Upload object, there is a nested + # [File](https://platform.openai.com/docs/api-reference/files/object) object that + # is ready to use in the rest of the platform. + # + # You can specify the order of the Parts by passing in an ordered list of the Part + # IDs. + # + # The number of bytes uploaded upon completion must match the number of bytes + # initially specified when creating the Upload object. No Parts may be added after + # an Upload is completed. sig do params( upload_id: String, diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index e32900d9..347903f3 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -4,6 +4,17 @@ module OpenAI module Resources class Uploads class Parts + # Adds a + # [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + # A Part represents a chunk of bytes from the file you are trying to upload. + # + # Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + # maximum of 8 GB. + # + # It is possible to add multiple Parts in parallel. You can decide the intended + # order of the Parts when you + # [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). sig do params( upload_id: String, diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index fdbb2632..4420a17f 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -11,6 +11,7 @@ module OpenAI def file_batches end + # Create a vector store. sig do params( chunking_strategy: T.any( @@ -35,6 +36,7 @@ module OpenAI ) end + # Retrieves a vector store. sig do params( vector_store_id: String, @@ -45,6 +47,7 @@ module OpenAI def retrieve(vector_store_id, request_options: {}) end + # Modifies a vector store. sig do params( vector_store_id: String, @@ -58,6 +61,7 @@ module OpenAI def update(vector_store_id, expires_after: nil, metadata: nil, name: nil, request_options: {}) end + # Returns a list of vector stores. sig do params( after: String, @@ -71,6 +75,7 @@ module OpenAI def list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) end + # Delete a vector store. sig do params( vector_store_id: String, @@ -81,6 +86,8 @@ module OpenAI def delete(vector_store_id, request_options: {}) end + # Search a vector store for relevant chunks based on a query and file attributes + # filter. sig do params( vector_store_id: String, diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index bcec19cb..dbed991d 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -4,6 +4,7 @@ module OpenAI module Resources class VectorStores class FileBatches + # Create a vector store file batch. sig do params( vector_store_id: String, @@ -20,6 +21,7 @@ module OpenAI def create(vector_store_id, file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) end + # Retrieves a vector store file batch. sig do params( batch_id: String, @@ -31,6 +33,8 @@ module OpenAI def retrieve(batch_id, vector_store_id:, request_options: {}) end + # Cancel a vector store file batch. This attempts to cancel the processing of + # files in this batch as soon as possible. sig do params( batch_id: String, @@ -42,6 +46,7 @@ module OpenAI def cancel(batch_id, vector_store_id:, request_options: {}) end + # Returns a list of vector store files in a batch. sig do params( batch_id: String, diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 85c53733..daf2b28a 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -4,6 +4,9 @@ module OpenAI module Resources class VectorStores class Files + # Create a vector store file by attaching a + # [File](https://platform.openai.com/docs/api-reference/files) to a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). sig do params( vector_store_id: String, @@ -20,6 +23,7 @@ module OpenAI def create(vector_store_id, file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) end + # Retrieves a vector store file. sig do params( file_id: String, @@ -31,6 +35,7 @@ module OpenAI def retrieve(file_id, vector_store_id:, request_options: {}) end + # Update attributes on a vector store file. sig do params( file_id: String, @@ -43,6 +48,7 @@ module OpenAI def update(file_id, vector_store_id:, attributes:, request_options: {}) end + # Returns a list of vector store files. sig do params( vector_store_id: String, @@ -66,6 +72,10 @@ module OpenAI ) end + # Delete a vector store file. This will remove the file from the vector store but + # the file itself will not be deleted. To delete the file, use the + # [delete file](https://platform.openai.com/docs/api-reference/files/delete) + # endpoint. sig do params( file_id: String, @@ -77,6 +87,7 @@ module OpenAI def delete(file_id, vector_store_id:, request_options: {}) end + # Retrieve the parsed contents of a vector store file. sig do params( file_id: String, diff --git a/rbi/lib/openai/stream.rbi b/rbi/lib/openai/stream.rbi index 01f98197..75e469ce 100644 --- a/rbi/lib/openai/stream.rbi +++ b/rbi/lib/openai/stream.rbi @@ -7,6 +7,7 @@ module OpenAI Message = type_member(:in) { {fixed: OpenAI::Util::ServerSentEvent} } Elem = type_member(:out) + # @api private sig { override.returns(T::Enumerable[Elem]) } private def iterator end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 0888f1da..62ce155d 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -1,42 +1,52 @@ # typed: strong module OpenAI + # @api private module Util + # @api private sig { returns(Float) } def self.monotonic_secs end class << self + # @api private sig { returns(String) } def arch end + # @api private sig { returns(String) } def os end end class << self + # @api private sig { params(input: T.anything).returns(T.any(T::Boolean, T.anything)) } def primitive?(input) end + # @api private sig { params(input: T.anything).returns(T.any(T::Boolean, T.anything)) } def coerce_boolean(input) end + # @api private sig { params(input: T.anything).returns(T.nilable(T::Boolean)) } def coerce_boolean!(input) end + # @api private sig { params(input: T.anything).returns(T.any(Integer, T.anything)) } def coerce_integer(input) end + # @api private sig { params(input: T.anything).returns(T.any(Float, T.anything)) } def coerce_float(input) end + # @api private sig { params(input: T.anything).returns(T.any(T::Hash[T.anything, T.anything], T.anything)) } def coerce_hash(input) end @@ -45,10 +55,15 @@ module OpenAI OMIT = T.let(T.anything, T.anything) class << self + # @api private sig { params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns(T.anything) } private def deep_merge_lr(lhs, rhs, concat: false) end + # @api private + # + # Recursively merge one hash with another. If the values at a given key are not + # both hashes, just take the new value. sig do params(values: T::Array[T.anything], sentinel: T.nilable(T.anything), concat: T::Boolean) .returns(T.anything) @@ -56,6 +71,7 @@ module OpenAI def deep_merge(*values, sentinel: nil, concat: false) end + # @api private sig do params( data: T.any(T::Hash[Symbol, T.anything], T::Array[T.anything], T.anything), @@ -70,20 +86,24 @@ module OpenAI end class << self + # @api private sig { params(uri: URI::Generic).returns(String) } def uri_origin(uri) end + # @api private sig { params(path: T.any(String, T::Array[String])).returns(String) } def interpolate_path(path) end end class << self + # @api private sig { params(query: T.nilable(String)).returns(T::Hash[String, T::Array[String]]) } def decode_query(query) end + # @api private sig do params(query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) .returns(T.nilable(String)) @@ -103,14 +123,17 @@ module OpenAI end class << self + # @api private sig { params(url: T.any(URI::Generic, String)).returns(OpenAI::Util::ParsedUriShape) } def parse_uri(url) end + # @api private sig { params(parsed: OpenAI::Util::ParsedUriShape).returns(URI::Generic) } def unparse_uri(parsed) end + # @api private sig do params(lhs: OpenAI::Util::ParsedUriShape, rhs: OpenAI::Util::ParsedUriShape).returns(URI::Generic) end @@ -119,6 +142,7 @@ module OpenAI end class << self + # @api private sig do params( headers: T::Hash[String, @@ -130,15 +154,19 @@ module OpenAI end end + # An adapter that satisfies the IO interface required by `::IO.copy_stream` class ReadIOAdapter + # @api private sig { params(max_len: T.nilable(Integer)).returns(String) } private def read_enum(max_len) end + # @api private sig { params(max_len: T.nilable(Integer), out_string: T.nilable(String)).returns(T.nilable(String)) } def read(max_len = nil, out_string = nil) end + # @api private sig do params( stream: T.any(String, IO, StringIO, T::Enumerable[String]), @@ -157,20 +185,24 @@ module OpenAI end class << self + # @api private sig do params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void end private def encode_multipart_formdata(y, boundary:, key:, val:) end + # @api private sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } private def encode_multipart_streaming(body) end + # @api private sig { params(headers: T::Hash[String, String], body: T.anything).returns(T.anything) } def encode_content(headers, body) end + # @api private sig do params( headers: T.any(T::Hash[String, String], Net::HTTPHeader), @@ -184,6 +216,9 @@ module OpenAI end class << self + # @api private + # + # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html sig do params(enum: T::Enumerable[T.anything], external: T::Boolean, close: T.proc.void) .returns(T::Enumerable[T.anything]) @@ -191,10 +226,12 @@ module OpenAI def fused_enum(enum, external: false, &close) end + # @api private sig { params(enum: T.nilable(T::Enumerable[T.anything])).void } def close_fused!(enum) end + # @api private sig do params( enum: T.nilable(T::Enumerable[T.anything]), @@ -210,10 +247,14 @@ module OpenAI end class << self + # @api private sig { params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) } def decode_lines(enum) end + # @api private + # + # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream sig { params(lines: T::Enumerable[String]).returns(OpenAI::Util::ServerSentEvent) } def decode_sse(lines) end diff --git a/sig/openai/base_client.rbs b/sig/openai/base_client.rbs index 4ca67417..d685733f 100644 --- a/sig/openai/base_client.rbs +++ b/sig/openai/base_client.rbs @@ -43,7 +43,7 @@ module OpenAI response_headers: ::Hash[String, String] ) -> OpenAI::BaseClient::request_input - # @private + # @api private attr_accessor requester: top def initialize: ( diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index ced591bb..4147e2ac 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -24,7 +24,6 @@ class MockResponse # @param code [Integer] # @param headers [Hash{String=>String}] - # def initialize(code, headers) @code = code @headers = {"content-type" => "application/json", **headers} @@ -33,7 +32,6 @@ def initialize(code, headers) # @param header [String] # # @return [String, nil] - # def [](header) @headers[header] end @@ -41,7 +39,6 @@ def [](header) # @param header [String] # # @return [Boolean] - # def key?(header) @headers.key?(header) end @@ -63,7 +60,6 @@ class MockRequester # @param response_code [Integer] # @param response_headers [Hash{String=>String}] # @param response_data [Object] - # def initialize(response_code, response_headers, response_data) @response_code = response_code @response_headers = response_headers @@ -72,7 +68,6 @@ def initialize(response_code, response_headers, response_data) end # @param req [Hash{Symbol=>Object}] - # def execute(req) # Deep copy the request because it is mutated on each retry. attempts.push(Marshal.load(Marshal.dump(req))) From 6c6c6eb0e293e6496fa514af0c022ebacb67e3b7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 04:13:40 +0000 Subject: [PATCH 011/295] chore: ensure doc strings for rbi method arguments (#13) --- lib/openai/base_model.rb | 4 +- lib/openai/base_page.rb | 2 - lib/openai/base_stream.rb | 2 - lib/openai/stream.rb | 2 - rbi/lib/openai/base_model.rbi | 1 + rbi/lib/openai/base_page.rbi | 1 - rbi/lib/openai/base_stream.rbi | 1 - rbi/lib/openai/client.rbi | 5 + rbi/lib/openai/resources/audio/speech.rbi | 20 +- .../openai/resources/audio/transcriptions.rbi | 23 ++ .../openai/resources/audio/translations.rbi | 24 +- rbi/lib/openai/resources/batches.rbi | 53 ++- rbi/lib/openai/resources/beta/assistants.rbi | 145 +++++++- rbi/lib/openai/resources/beta/threads.rbi | 195 +++++++++- .../resources/beta/threads/messages.rbi | 72 +++- .../openai/resources/beta/threads/runs.rbi | 259 ++++++++++++- .../resources/beta/threads/runs/steps.rbi | 41 ++- rbi/lib/openai/resources/chat/completions.rbi | 348 +++++++++++++++++- .../resources/chat/completions/messages.rbi | 13 +- rbi/lib/openai/resources/completions.rbi | 172 +++++++++ rbi/lib/openai/resources/embeddings.rbi | 29 +- rbi/lib/openai/resources/files.rbi | 46 ++- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 82 ++++- .../fine_tuning/jobs/checkpoints.rbi | 10 +- rbi/lib/openai/resources/images.rbi | 53 +++ rbi/lib/openai/resources/models.rbi | 12 +- rbi/lib/openai/resources/moderations.rbi | 12 +- rbi/lib/openai/resources/responses.rbi | 189 +++++++++- .../resources/responses/input_items.rbi | 18 +- rbi/lib/openai/resources/uploads.rbi | 35 +- rbi/lib/openai/resources/uploads/parts.rbi | 8 +- rbi/lib/openai/resources/vector_stores.rbi | 69 +++- .../resources/vector_stores/file_batches.rbi | 51 ++- .../openai/resources/vector_stores/files.rbi | 71 +++- rbi/lib/openai/util.rbi | 8 +- 35 files changed, 2003 insertions(+), 73 deletions(-) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 23f83864..3abd2ca3 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -876,8 +876,6 @@ def initialize(type_info, spec = {}) end end - # @api private - # # @abstract # # @example @@ -903,6 +901,8 @@ def known_fields @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) end + # @api private + # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] def fields known_fields.transform_values do |field| diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index b8185c65..481df2ea 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true module OpenAI - # @api private - # # @abstract # # @example diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index 7151b3f7..2aef50fe 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true module OpenAI - # @api private - # # @example # ```ruby # stream.for_each do |message| diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index 02dea5de..f6f0b113 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true module OpenAI - # @api private - # # @example # ```ruby # stream.for_each do |message| diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index b0ed4c49..b6422096 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -438,6 +438,7 @@ module OpenAI def known_fields end + # @api private sig do returns(T::Hash[Symbol, T.all(OpenAI::BaseModel::KnownFieldShape, {type: OpenAI::Converter::Input})]) end diff --git a/rbi/lib/openai/base_page.rbi b/rbi/lib/openai/base_page.rbi index bf6ab11f..ad3a2e19 100644 --- a/rbi/lib/openai/base_page.rbi +++ b/rbi/lib/openai/base_page.rbi @@ -1,7 +1,6 @@ # typed: strong module OpenAI - # @api private module BasePage abstract! diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi index 79324aa7..c5f6c58e 100644 --- a/rbi/lib/openai/base_stream.rbi +++ b/rbi/lib/openai/base_stream.rbi @@ -1,7 +1,6 @@ # typed: strong module OpenAI - # @api private module BaseStream Message = type_member(:in) Elem = type_member(:out) diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 4b8256ce..2d8c31bf 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -98,10 +98,15 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` base_url: nil, + # Defaults to `ENV["OPENAI_API_KEY"]` api_key: ENV["OPENAI_API_KEY"], + # Defaults to `ENV["OPENAI_ORG_ID"]` organization: ENV["OPENAI_ORG_ID"], + # Defaults to `ENV["OPENAI_PROJECT_ID"]` project: ENV["OPENAI_PROJECT_ID"], + # Max number of retries to attempt after a failed retryable request. max_retries: DEFAULT_MAX_RETRIES, timeout: DEFAULT_TIMEOUT_IN_SECONDS, initial_retry_delay: DEFAULT_INITIAL_RETRY_DELAY, diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index ac1162aa..ae6f4be5 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -16,7 +16,25 @@ module OpenAI ) .returns(T.anything) end - def create(input:, model:, voice:, response_format: nil, speed: nil, request_options: {}) + def create( + # The text to generate audio for. The maximum length is 4096 characters. + input:, + # One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # `tts-1` or `tts-1-hd` + model:, + # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the + # voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + voice:, + # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + # `wav`, and `pcm`. + response_format: nil, + # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + # the default. + speed: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index b0455588..50e5c416 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -19,12 +19,35 @@ module OpenAI .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) end def create( + # The audio file object (not file name) to transcribe, in one of these formats: + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. model:, + # The language of the input audio. Supplying the input language in + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. language: nil, + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. prompt: nil, + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. response_format: nil, + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. temperature: nil, + # The timestamp granularities to populate for this transcription. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. timestamp_granularities: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 02565a2d..278e3855 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -16,7 +16,29 @@ module OpenAI ) .returns(T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)) end - def create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) + def create( + # The audio file object (not file name) translate, in one of these formats: flac, + # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + file:, + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. + model:, + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should be in English. + prompt: nil, + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. + response_format: nil, + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. + temperature: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index fa633d8c..a3f56e28 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -14,7 +14,34 @@ module OpenAI ) .returns(OpenAI::Models::Batch) end - def create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) + def create( + # The time frame within which the batch should be processed. Currently only `24h` + # is supported. + completion_window:, + # The endpoint to be used for all requests in the batch. Currently + # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. + # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 + # embedding inputs across all requests in the batch. + endpoint:, + # The ID of an uploaded file that contains requests for the new batch. + # + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. + # + # Your input file must be formatted as a + # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + # requests, and can be up to 200 MB in size. + input_file_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ) end # Retrieves a batch. @@ -25,7 +52,11 @@ module OpenAI ) .returns(OpenAI::Models::Batch) end - def retrieve(batch_id, request_options: {}) + def retrieve( + # The ID of the batch to retrieve. + batch_id, + request_options: {} + ) end # List your organization's batches. @@ -37,7 +68,17 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::Batch]) end - def list(after: nil, limit: nil, request_options: {}) + def list( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + request_options: {} + ) end # Cancels an in-progress batch. The batch will be in status `cancelling` for up to @@ -50,7 +91,11 @@ module OpenAI ) .returns(OpenAI::Models::Batch) end - def cancel(batch_id, request_options: {}) + def cancel( + # The ID of the batch to cancel. + batch_id, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 5a235b78..c9d62665 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -36,16 +36,72 @@ module OpenAI .returns(OpenAI::Models::Beta::Assistant) end def create( + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, + # The description of the assistant. The maximum length is 512 characters. description: nil, + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. instructions: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The name of the assistant. The maximum length is 256 characters. name: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} ) @@ -59,7 +115,11 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Assistant) end - def retrieve(assistant_id, request_options: {}) + def retrieve( + # The ID of the assistant to retrieve. + assistant_id, + request_options: {} + ) end # Modifies an assistant. @@ -95,17 +155,74 @@ module OpenAI .returns(OpenAI::Models::Beta::Assistant) end def update( + # The ID of the assistant to modify. assistant_id, + # The description of the assistant. The maximum length is 512 characters. description: nil, + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. instructions: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model: nil, + # The name of the assistant. The maximum length is 256 characters. name: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} ) @@ -122,7 +239,25 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Assistant]) end - def list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + def list( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) end # Delete an assistant. @@ -133,7 +268,11 @@ module OpenAI ) .returns(OpenAI::Models::Beta::AssistantDeleted) end - def delete(assistant_id, request_options: {}) + def delete( + # The ID of the assistant to delete. + assistant_id, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index b2f631a3..2170549d 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -22,7 +22,24 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Thread) end - def create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) + def create( + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # start the thread with. + messages: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + tool_resources: nil, + request_options: {} + ) end # Retrieves a thread. @@ -33,7 +50,11 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Thread) end - def retrieve(thread_id, request_options: {}) + def retrieve( + # The ID of the thread to retrieve. + thread_id, + request_options: {} + ) end # Modifies a thread. @@ -46,7 +67,23 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Thread) end - def update(thread_id, metadata: nil, tool_resources: nil, request_options: {}) + def update( + # The ID of the thread to modify. Only the `metadata` can be modified. + thread_id, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + tool_resources: nil, + request_options: {} + ) end # Delete a thread. @@ -57,7 +94,11 @@ module OpenAI ) .returns(OpenAI::Models::Beta::ThreadDeleted) end - def delete(thread_id, request_options: {}) + def delete( + # The ID of the thread to delete. + thread_id, + request_options: {} + ) end # Create a thread and run it in one request. @@ -98,20 +139,93 @@ module OpenAI .returns(OpenAI::Models::Beta::Threads::Run) end def create_and_run( + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, + # Override the default system message of the assistant. This is useful for + # modifying the behavior on a per-run basis. instructions: nil, + # The maximum number of completion tokens that may be used over the course of the + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_completion_tokens: nil, + # The maximum number of prompt tokens that may be used over the course of the run. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_prompt_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. model: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. thread: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # Override the tools the assistant can use for this run. This is useful for + # modifying the behavior on a per-run basis. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. truncation_strategy: nil, request_options: {} ) @@ -184,20 +298,93 @@ module OpenAI ) end def create_and_run_streaming( + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, + # Override the default system message of the assistant. This is useful for + # modifying the behavior on a per-run basis. instructions: nil, + # The maximum number of completion tokens that may be used over the course of the + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_completion_tokens: nil, + # The maximum number of prompt tokens that may be used over the course of the run. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_prompt_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. model: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. thread: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # Override the tools the assistant can use for this run. This is useful for + # modifying the behavior on a per-run basis. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. truncation_strategy: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 2e0009e8..d417b9d0 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -26,7 +26,30 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Message) end - def create(thread_id, content:, role:, attachments: nil, metadata: nil, request_options: {}) + def create( + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # to create a message for. + thread_id, + # The text contents of the message. + content:, + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. + role:, + # A list of files attached to the message, and the tools they should be added to. + attachments: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ) end # Retrieve a message. @@ -38,7 +61,14 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Message) end - def retrieve(message_id, thread_id:, request_options: {}) + def retrieve( + # The ID of the message to retrieve. + message_id, + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # to which this message belongs. + thread_id:, + request_options: {} + ) end # Modifies a message. @@ -51,7 +81,20 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Message) end - def update(message_id, thread_id:, metadata: nil, request_options: {}) + def update( + # Path param: The ID of the message to modify. + message_id, + # Path param: The ID of the thread to which this message belongs. + thread_id:, + # Body param: Set of 16 key-value pairs that can be attached to an object. This + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ) end # Returns a list of messages for a given thread. @@ -68,11 +111,26 @@ module OpenAI .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message]) end def list( + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # the messages belong to. thread_id, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. order: nil, + # Filter messages by the run ID that generated them. run_id: nil, request_options: {} ) @@ -87,7 +145,13 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::MessageDeleted) end - def delete(message_id, thread_id:, request_options: {}) + def delete( + # The ID of the message to delete. + message_id, + # The ID of the thread to which this message belongs. + thread_id:, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index ecf30e5f..c2a318dd 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -50,23 +50,113 @@ module OpenAI .returns(OpenAI::Models::Beta::Threads::Run) end def create( + # Path param: The ID of the thread to run. thread_id, + # Body param: The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, + # Query param: A list of additional fields to include in the response. Currently + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, + # Body param: Appends additional instructions at the end of the instructions for + # the run. This is useful for modifying the behavior on a per-run basis without + # overriding other instructions. additional_instructions: nil, + # Body param: Adds additional messages to the thread before creating the run. additional_messages: nil, + # Body param: Overrides the + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. instructions: nil, + # Body param: The maximum number of completion tokens that may be used over the + # course of the run. The run will make a best effort to use only the number of + # completion tokens specified, across multiple turns of the run. If the run + # exceeds the number of completion tokens specified, the run will end with status + # `incomplete`. See `incomplete_details` for more info. max_completion_tokens: nil, + # Body param: The maximum number of prompt tokens that may be used over the course + # of the run. The run will make a best effort to use only the number of prompt + # tokens specified, across multiple turns of the run. If the run exceeds the + # number of prompt tokens specified, the run will end with status `incomplete`. + # See `incomplete_details` for more info. max_prompt_tokens: nil, + # Body param: Set of 16 key-value pairs that can be attached to an object. This + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Body param: The ID of the + # [Model](https://platform.openai.com/docs/api-reference/models) to be used to + # execute this run. If a value is provided here, it will override the model + # associated with the assistant. If not, the model associated with the assistant + # will be used. model: nil, + # Body param: Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Body param: **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Body param: Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # Body param: What sampling temperature to use, between 0 and 2. Higher values + # like 0.8 will make the output more random, while lower values like 0.2 will make + # it more focused and deterministic. temperature: nil, + # Body param: Controls which (if any) tool is called by the model. `none` means + # the model will not call any tools and instead generates a message. `auto` is the + # default value and means the model can pick between generating a message or + # calling one or more tools. `required` means the model must call one or more + # tools before responding to the user. Specifying a particular tool like + # `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, + # Body param: Override the tools the assistant can use for this run. This is + # useful for modifying the behavior on a per-run basis. tools: nil, + # Body param: An alternative to sampling with temperature, called nucleus + # sampling, where the model considers the results of the tokens with top_p + # probability mass. So 0.1 means only the tokens comprising the top 10% + # probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # Body param: Controls for how a thread will be truncated prior to the run. Use + # this to control the intial context window of the run. truncation_strategy: nil, request_options: {} ) @@ -142,23 +232,113 @@ module OpenAI ) end def create_streaming( + # Path param: The ID of the thread to run. thread_id, + # Body param: The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, + # Query param: A list of additional fields to include in the response. Currently + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, + # Body param: Appends additional instructions at the end of the instructions for + # the run. This is useful for modifying the behavior on a per-run basis without + # overriding other instructions. additional_instructions: nil, + # Body param: Adds additional messages to the thread before creating the run. additional_messages: nil, + # Body param: Overrides the + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. instructions: nil, + # Body param: The maximum number of completion tokens that may be used over the + # course of the run. The run will make a best effort to use only the number of + # completion tokens specified, across multiple turns of the run. If the run + # exceeds the number of completion tokens specified, the run will end with status + # `incomplete`. See `incomplete_details` for more info. max_completion_tokens: nil, + # Body param: The maximum number of prompt tokens that may be used over the course + # of the run. The run will make a best effort to use only the number of prompt + # tokens specified, across multiple turns of the run. If the run exceeds the + # number of prompt tokens specified, the run will end with status `incomplete`. + # See `incomplete_details` for more info. max_prompt_tokens: nil, + # Body param: Set of 16 key-value pairs that can be attached to an object. This + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Body param: The ID of the + # [Model](https://platform.openai.com/docs/api-reference/models) to be used to + # execute this run. If a value is provided here, it will override the model + # associated with the assistant. If not, the model associated with the assistant + # will be used. model: nil, + # Body param: Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Body param: **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Body param: Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # Body param: What sampling temperature to use, between 0 and 2. Higher values + # like 0.8 will make the output more random, while lower values like 0.2 will make + # it more focused and deterministic. temperature: nil, + # Body param: Controls which (if any) tool is called by the model. `none` means + # the model will not call any tools and instead generates a message. `auto` is the + # default value and means the model can pick between generating a message or + # calling one or more tools. `required` means the model must call one or more + # tools before responding to the user. Specifying a particular tool like + # `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, + # Body param: Override the tools the assistant can use for this run. This is + # useful for modifying the behavior on a per-run basis. tools: nil, + # Body param: An alternative to sampling with temperature, called nucleus + # sampling, where the model considers the results of the tokens with top_p + # probability mass. So 0.1 means only the tokens comprising the top 10% + # probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # Body param: Controls for how a thread will be truncated prior to the run. Use + # this to control the intial context window of the run. truncation_strategy: nil, request_options: {} ) @@ -173,7 +353,14 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Run) end - def retrieve(run_id, thread_id:, request_options: {}) + def retrieve( + # The ID of the run to retrieve. + run_id, + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # that was run. + thread_id:, + request_options: {} + ) end # Modifies a run. @@ -186,7 +373,21 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Run) end - def update(run_id, thread_id:, metadata: nil, request_options: {}) + def update( + # Path param: The ID of the run to modify. + run_id, + # Path param: The ID of the + # [thread](https://platform.openai.com/docs/api-reference/threads) that was run. + thread_id:, + # Body param: Set of 16 key-value pairs that can be attached to an object. This + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ) end # Returns a list of runs belonging to a thread. @@ -201,7 +402,27 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run]) end - def list(thread_id, after: nil, before: nil, limit: nil, order: nil, request_options: {}) + def list( + # The ID of the thread the run belongs to. + thread_id, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) end # Cancels a run that is `in_progress`. @@ -213,7 +434,13 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Run) end - def cancel(run_id, thread_id:, request_options: {}) + def cancel( + # The ID of the run to cancel. + run_id, + # The ID of the thread to which this run belongs. + thread_id:, + request_options: {} + ) end # When a run has the `status: "requires_action"` and `required_action.type` is @@ -229,7 +456,17 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Run) end - def submit_tool_outputs(run_id, thread_id:, tool_outputs:, request_options: {}) + def submit_tool_outputs( + # Path param: The ID of the run that requires the tool output submission. + run_id, + # Path param: The ID of the + # [thread](https://platform.openai.com/docs/api-reference/threads) to which this + # run belongs. + thread_id:, + # Body param: A list of tools for which the outputs are being submitted. + tool_outputs:, + request_options: {} + ) end # When a run has the `status: "requires_action"` and `required_action.type` is @@ -274,7 +511,17 @@ module OpenAI ] ) end - def submit_tool_outputs_streaming(run_id, thread_id:, tool_outputs:, request_options: {}) + def submit_tool_outputs_streaming( + # Path param: The ID of the run that requires the tool output submission. + run_id, + # Path param: The ID of the + # [thread](https://platform.openai.com/docs/api-reference/threads) to which this + # run belongs. + thread_id:, + # Body param: A list of tools for which the outputs are being submitted. + tool_outputs:, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 0987c273..4833b5fb 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -17,7 +17,24 @@ module OpenAI ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) end - def retrieve(step_id, thread_id:, run_id:, include: nil, request_options: {}) + def retrieve( + # Path param: The ID of the run step to retrieve. + step_id, + # Path param: The ID of the thread to which the run and run step belongs. + thread_id:, + # Path param: The ID of the run to which the run step belongs. + run_id:, + # Query param: A list of additional fields to include in the response. Currently + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. + include: nil, + request_options: {} + ) end # Returns a list of run steps belonging to a run. @@ -35,12 +52,34 @@ module OpenAI .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) end def list( + # Path param: The ID of the run the run steps belong to. run_id, + # Path param: The ID of the thread the run and run steps belong to. thread_id:, + # Query param: A cursor for use in pagination. `after` is an object ID that + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, ending with obj_foo, your subsequent call can include + # after=obj_foo in order to fetch the next page of the list. after: nil, + # Query param: A cursor for use in pagination. `before` is an object ID that + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, starting with obj_foo, your subsequent call can include + # before=obj_foo in order to fetch the previous page of the list. before: nil, + # Query param: A list of additional fields to include in the response. Currently + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, + # Query param: A limit on the number of objects to be returned. Limit can range + # between 1 and 100, and the default is 20. limit: nil, + # Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # ascending order and `desc` for descending order. order: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index b4a45a99..7f6a851c 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -75,35 +75,188 @@ module OpenAI .returns(OpenAI::Models::Chat::ChatCompletion) end def create( + # A list of messages comprising the conversation so far. Depending on the + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). messages:, + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. frequency_penalty: nil, + # Deprecated in favor of `tool_choice`. + # + # Controls which (if any) function is called by the model. + # + # `none` means the model will not call a function and instead generates a message. + # + # `auto` means the model can pick between generating a message or calling a + # function. + # + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. function_call: nil, + # Deprecated in favor of `tools`. + # + # A list of functions the model may generate JSON inputs for. functions: nil, + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. logit_bias: nil, + # Whether to return log probabilities of the output tokens or not. If true, + # returns the log probabilities of each output token returned in the `content` of + # `message`. logprobs: nil, + # An upper bound for the number of tokens that can be generated for a completion, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_completion_tokens: nil, + # The maximum number of [tokens](/tokenizer) that can be generated in the chat + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. + # + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Output types that you would like the model to generate. Most models are capable + # of generating text, which is the default: + # + # `["text"]` + # + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: + # + # `["text", "audio"]` modalities: nil, + # How many chat completion choices to generate for each input message. Note that + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. n: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Static predicted output content, such as the content of a text file that is + # being regenerated. prediction: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. presence_penalty: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. response_format: nil, + # This feature is in Beta. If specified, our system will make a best effort to + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. seed: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. stop: nil, + # Whether or not to store the output of this chat completion request for use in + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. store: nil, + # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. tool_choice: nil, + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. tools: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. top_logprobs: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, request_options: {} ) @@ -176,35 +329,188 @@ module OpenAI .returns(OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end def create_streaming( + # A list of messages comprising the conversation so far. Depending on the + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). messages:, + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. frequency_penalty: nil, + # Deprecated in favor of `tool_choice`. + # + # Controls which (if any) function is called by the model. + # + # `none` means the model will not call a function and instead generates a message. + # + # `auto` means the model can pick between generating a message or calling a + # function. + # + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. function_call: nil, + # Deprecated in favor of `tools`. + # + # A list of functions the model may generate JSON inputs for. functions: nil, + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. logit_bias: nil, + # Whether to return log probabilities of the output tokens or not. If true, + # returns the log probabilities of each output token returned in the `content` of + # `message`. logprobs: nil, + # An upper bound for the number of tokens that can be generated for a completion, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_completion_tokens: nil, + # The maximum number of [tokens](/tokenizer) that can be generated in the chat + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. + # + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Output types that you would like the model to generate. Most models are capable + # of generating text, which is the default: + # + # `["text"]` + # + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: + # + # `["text", "audio"]` modalities: nil, + # How many chat completion choices to generate for each input message. Note that + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. n: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Static predicted output content, such as the content of a text file that is + # being regenerated. prediction: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. presence_penalty: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. response_format: nil, + # This feature is in Beta. If specified, our system will make a best effort to + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. seed: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. stop: nil, + # Whether or not to store the output of this chat completion request for use in + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. store: nil, + # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. tool_choice: nil, + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. tools: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. top_logprobs: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, request_options: {} ) @@ -219,7 +525,11 @@ module OpenAI ) .returns(OpenAI::Models::Chat::ChatCompletion) end - def retrieve(completion_id, request_options: {}) + def retrieve( + # The ID of the chat completion to retrieve. + completion_id, + request_options: {} + ) end # Modify a stored chat completion. Only Chat Completions that have been created @@ -233,7 +543,18 @@ module OpenAI ) .returns(OpenAI::Models::Chat::ChatCompletion) end - def update(completion_id, metadata:, request_options: {}) + def update( + # The ID of the chat completion to update. + completion_id, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + request_options: {} + ) end # List stored Chat Completions. Only Chat Completions that have been stored with @@ -249,7 +570,22 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion]) end - def list(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) + def list( + # Identifier for the last chat completion from the previous pagination request. + after: nil, + # Number of Chat Completions to retrieve. + limit: nil, + # A list of metadata keys to filter the Chat Completions by. Example: + # + # `metadata[key1]=value1&metadata[key2]=value2` + metadata: nil, + # The model used to generate the Chat Completions. + model: nil, + # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + order: nil, + request_options: {} + ) end # Delete a stored chat completion. Only Chat Completions that have been created @@ -261,7 +597,11 @@ module OpenAI ) .returns(OpenAI::Models::Chat::ChatCompletionDeleted) end - def delete(completion_id, request_options: {}) + def delete( + # The ID of the chat completion to delete. + completion_id, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 26b43645..3e7c16e2 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -17,7 +17,18 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) end - def list(completion_id, after: nil, limit: nil, order: nil, request_options: {}) + def list( + # The ID of the chat completion to retrieve messages from. + completion_id, + # Identifier for the last message from the previous pagination request. + after: nil, + # Number of messages to retrieve. + limit: nil, + # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` + # for descending order. Defaults to `asc`. + order: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 2291fad9..949f94ed 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -35,22 +35,108 @@ module OpenAI .returns(OpenAI::Models::Completion) end def create( + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, + # The prompt(s) to generate completions for, encoded as a string, array of + # strings, array of tokens, or array of token arrays. + # + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. prompt:, + # Generates `best_of` completions server-side and returns the "best" (the one with + # the highest log probability per token). Results cannot be streamed. + # + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. best_of: nil, + # Echo back the prompt in addition to the completion echo: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) frequency_penalty: nil, + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. + # + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. logit_bias: nil, + # Include the log probabilities on the `logprobs` most likely output tokens, as + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. + # + # The maximum value for `logprobs` is 5. logprobs: nil, + # The maximum number of [tokens](/tokenizer) that can be generated in the + # completion. + # + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. max_tokens: nil, + # How many completions to generate for each prompt. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. n: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) presence_penalty: nil, + # If specified, our system will make a best effort to sample deterministically, + # such that repeated requests with the same `seed` and parameters should return + # the same result. + # + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. seed: nil, + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. stop: nil, + # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, + # The suffix that comes after a completion of inserted text. + # + # This parameter is only supported for `gpt-3.5-turbo-instruct`. suffix: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. + # + # We generally recommend altering this or `top_p` but not both. temperature: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) @@ -88,22 +174,108 @@ module OpenAI .returns(OpenAI::Stream[OpenAI::Models::Completion]) end def create_streaming( + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, + # The prompt(s) to generate completions for, encoded as a string, array of + # strings, array of tokens, or array of token arrays. + # + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. prompt:, + # Generates `best_of` completions server-side and returns the "best" (the one with + # the highest log probability per token). Results cannot be streamed. + # + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. best_of: nil, + # Echo back the prompt in addition to the completion echo: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) frequency_penalty: nil, + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. + # + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. logit_bias: nil, + # Include the log probabilities on the `logprobs` most likely output tokens, as + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. + # + # The maximum value for `logprobs` is 5. logprobs: nil, + # The maximum number of [tokens](/tokenizer) that can be generated in the + # completion. + # + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. max_tokens: nil, + # How many completions to generate for each prompt. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. n: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) presence_penalty: nil, + # If specified, our system will make a best effort to sample deterministically, + # such that repeated requests with the same `seed` and parameters should return + # the same result. + # + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. seed: nil, + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. stop: nil, + # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, + # The suffix that comes after a completion of inserted text. + # + # This parameter is only supported for `gpt-3.5-turbo-instruct`. suffix: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. + # + # We generally recommend altering this or `top_p` but not both. temperature: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index a251c27d..025c37a7 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -15,7 +15,34 @@ module OpenAI ) .returns(OpenAI::Models::CreateEmbeddingResponse) end - def create(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) + def create( + # Input text to embed, encoded as a string or array of tokens. To embed multiple + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. + input:, + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. + model:, + # The number of dimensions the resulting output embeddings should have. Only + # supported in `text-embedding-3` and later models. + dimensions: nil, + # The format to return the embeddings in. Can be either `float` or + # [`base64`](https://pypi.org/project/pybase64/). + encoding_format: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + user: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 39b009e9..441f93e0 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -32,7 +32,16 @@ module OpenAI ) .returns(OpenAI::Models::FileObject) end - def create(file:, purpose:, request_options: {}) + def create( + # The File object (not file name) to be uploaded. + file:, + # The intended purpose of the uploaded file. One of: - `assistants`: Used in the + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets + purpose:, + request_options: {} + ) end # Returns information about a specific file. @@ -43,7 +52,11 @@ module OpenAI ) .returns(OpenAI::Models::FileObject) end - def retrieve(file_id, request_options: {}) + def retrieve( + # The ID of the file to use for this request. + file_id, + request_options: {} + ) end # Returns a list of files. @@ -57,7 +70,22 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::FileObject]) end - def list(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) + def list( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 10,000, and the default is 10,000. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + # Only return files with the given purpose. + purpose: nil, + request_options: {} + ) end # Delete a file. @@ -68,7 +96,11 @@ module OpenAI ) .returns(OpenAI::Models::FileDeleted) end - def delete(file_id, request_options: {}) + def delete( + # The ID of the file to use for this request. + file_id, + request_options: {} + ) end # Returns the contents of the specified file. @@ -79,7 +111,11 @@ module OpenAI ) .returns(T.anything) end - def content(file_id, request_options: {}) + def content( + # The ID of the file to use for this request. + file_id, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 5c720561..e1709e2c 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -31,14 +31,63 @@ module OpenAI .returns(OpenAI::Models::FineTuning::FineTuningJob) end def create( + # The name of the model to fine-tune. You can select one of the + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). model:, + # The ID of an uploaded file that contains training data. + # + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. + # + # Your dataset must be formatted as a JSONL file. Additionally, you must upload + # your file with the purpose `fine-tune`. + # + # The contents of the file should differ depending on if the model uses the + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # format, or if the fine-tuning method uses the + # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + # format. + # + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. training_file:, + # The hyperparameters used for the fine-tuning job. This value is now deprecated + # in favor of `method`, and should be passed in under the `method` parameter. hyperparameters: nil, + # A list of integrations to enable for your fine-tuning job. integrations: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The method used for fine-tuning. method_: nil, + # The seed controls the reproducibility of the job. Passing in the same seed and + # job parameters should produce the same results, but may differ in rare cases. If + # a seed is not specified, one will be generated for you. seed: nil, + # A string of up to 64 characters that will be added to your fine-tuned model + # name. + # + # For example, a `suffix` of "custom-model-name" would produce a model name like + # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. suffix: nil, + # The ID of an uploaded file that contains validation data. + # + # If you provide this file, the data is used to generate validation metrics + # periodically during fine-tuning. These metrics can be viewed in the fine-tuning + # results file. The same data should not be present in both train and validation + # files. + # + # Your dataset must be formatted as a JSONL file. You must upload your file with + # the purpose `fine-tune`. + # + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. validation_file: nil, request_options: {} ) @@ -54,7 +103,11 @@ module OpenAI ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end - def retrieve(fine_tuning_job_id, request_options: {}) + def retrieve( + # The ID of the fine-tuning job. + fine_tuning_job_id, + request_options: {} + ) end # List your organization's fine-tuning jobs @@ -67,7 +120,16 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) end - def list(after: nil, limit: nil, metadata: nil, request_options: {}) + def list( + # Identifier for the last job from the previous pagination request. + after: nil, + # Number of fine-tuning jobs to retrieve. + limit: nil, + # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. + # Alternatively, set `metadata=null` to indicate no metadata. + metadata: nil, + request_options: {} + ) end # Immediately cancel a fine-tune job. @@ -78,7 +140,11 @@ module OpenAI ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end - def cancel(fine_tuning_job_id, request_options: {}) + def cancel( + # The ID of the fine-tuning job to cancel. + fine_tuning_job_id, + request_options: {} + ) end # Get status updates for a fine-tuning job. @@ -91,7 +157,15 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) end - def list_events(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) + def list_events( + # The ID of the fine-tuning job to get events for. + fine_tuning_job_id, + # Identifier for the last event from the previous pagination request. + after: nil, + # Number of events to retrieve. + limit: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index d0a7bb83..24509186 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -15,7 +15,15 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) end - def list(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) + def list( + # The ID of the fine-tuning job to get checkpoints for. + fine_tuning_job_id, + # Identifier for the last checkpoint ID from the previous pagination request. + after: nil, + # Number of checkpoints to retrieve. + limit: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 3655f172..396f2edd 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -17,11 +17,25 @@ module OpenAI .returns(OpenAI::Models::ImagesResponse) end def create_variation( + # The image to use as the basis for the variation(s). Must be a valid PNG file, + # less than 4MB, and square. image:, + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. model: nil, + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # `n=1` is supported. n: nil, + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. size: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) @@ -43,13 +57,31 @@ module OpenAI .returns(OpenAI::Models::ImagesResponse) end def edit( + # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask + # is not provided, image must have transparency, which will be used as the mask. image:, + # A text description of the desired image(s). The maximum length is 1000 + # characters. prompt:, + # An additional image whose fully transparent areas (e.g. where alpha is zero) + # indicate where `image` should be edited. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. mask: nil, + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. model: nil, + # The number of images to generate. Must be between 1 and 10. n: nil, + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. size: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) @@ -71,13 +103,34 @@ module OpenAI .returns(OpenAI::Models::ImagesResponse) end def generate( + # A text description of the desired image(s). The maximum length is 1000 + # characters for `dall-e-2` and 4000 characters for `dall-e-3`. prompt:, + # The model to use for image generation. model: nil, + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # `n=1` is supported. n: nil, + # The quality of the image that will be generated. `hd` creates images with finer + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. quality: nil, + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. size: nil, + # The style of the generated images. Must be one of `vivid` or `natural`. Vivid + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. style: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 04a4cf0d..52e65f29 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -12,7 +12,11 @@ module OpenAI ) .returns(OpenAI::Models::Model) end - def retrieve(model, request_options: {}) + def retrieve( + # The ID of the model to use for this request + model, + request_options: {} + ) end # Lists the currently available models, and provides basic information about each @@ -33,7 +37,11 @@ module OpenAI ) .returns(OpenAI::Models::ModelDeleted) end - def delete(model, request_options: {}) + def delete( + # The model to delete + model, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 3b856753..3b9b2bd0 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -17,7 +17,17 @@ module OpenAI ) .returns(OpenAI::Models::ModerationCreateResponse) end - def create(input:, model: nil, request_options: {}) + def create( + # Input (or inputs) to classify. Can be a single string, an array of strings, or + # an array of multi-modal input objects similar to other models. + input:, + # The content moderation model you would like to use. Learn more in + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). + model: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index ded8cf36..823cc8a1 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -49,22 +49,109 @@ module OpenAI .returns(OpenAI::Models::Responses::Response) end def create( + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. include: nil, + # Inserts a system (or developer) message as the first item in the model's + # context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will be not be carried over to the next response. This makes it simple + # to swap out system (or developer) messages in new responses. instructions: nil, + # An upper bound for the number of tokens that can be generated for a response, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, + # The unique ID of the previous response to the model. Use this to create + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # Whether to store the generated model response for later retrieval via API. store: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. tool_choice: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. truncation: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) @@ -149,22 +236,109 @@ module OpenAI ) end def create_streaming( + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, + # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. include: nil, + # Inserts a system (or developer) message as the first item in the model's + # context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will be not be carried over to the next response. This makes it simple + # to swap out system (or developer) messages in new responses. instructions: nil, + # An upper bound for the number of tokens that can be generated for a response, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, + # The unique ID of the previous response to the model. Use this to create + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # Whether to store the generated model response for later retrieval via API. store: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. tool_choice: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. truncation: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ) @@ -179,7 +353,14 @@ module OpenAI ) .returns(OpenAI::Models::Responses::Response) end - def retrieve(response_id, include: nil, request_options: {}) + def retrieve( + # The ID of the response to retrieve. + response_id, + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + include: nil, + request_options: {} + ) end # Deletes a model response with the given ID. @@ -190,7 +371,11 @@ module OpenAI ) .void end - def delete(response_id, request_options: {}) + def delete( + # The ID of the response to delete. + response_id, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index afd82a10..f6d2e651 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -29,7 +29,23 @@ module OpenAI ] ) end - def list(response_id, after: nil, before: nil, limit: nil, order: nil, request_options: {}) + def list( + # The ID of the response to retrieve input items for. + response_id, + # An item ID to list items after, used in pagination. + after: nil, + # An item ID to list items before, used in pagination. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # The order to return the input items in. Default is `asc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + order: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 561a624f..98a58dc5 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -36,7 +36,23 @@ module OpenAI ) .returns(OpenAI::Models::Upload) end - def create(bytes:, filename:, mime_type:, purpose:, request_options: {}) + def create( + # The number of bytes in the file you are uploading. + bytes:, + # The name of the file to upload. + filename:, + # The MIME type of the file. + # + # This must fall within the supported MIME types for your file purpose. See the + # supported MIME types for assistants and vision. + mime_type:, + # The intended purpose of the uploaded file. + # + # See the + # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + purpose:, + request_options: {} + ) end # Cancels the Upload. No Parts may be added after an Upload is cancelled. @@ -47,7 +63,11 @@ module OpenAI ) .returns(OpenAI::Models::Upload) end - def cancel(upload_id, request_options: {}) + def cancel( + # The ID of the Upload. + upload_id, + request_options: {} + ) end # Completes the @@ -72,7 +92,16 @@ module OpenAI ) .returns(OpenAI::Models::Upload) end - def complete(upload_id, part_ids:, md5: nil, request_options: {}) + def complete( + # The ID of the Upload. + upload_id, + # The ordered list of Part IDs. + part_ids:, + # The optional md5 checksum for the file contents to verify if the bytes uploaded + # matches what you expect. + md5: nil, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 347903f3..6e52432f 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -23,7 +23,13 @@ module OpenAI ) .returns(OpenAI::Models::Uploads::UploadPart) end - def create(upload_id, data:, request_options: {}) + def create( + # The ID of the Upload. + upload_id, + # The chunk of bytes for this Part. + data:, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 4420a17f..32aecdbf 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -27,10 +27,23 @@ module OpenAI .returns(OpenAI::Models::VectorStore) end def create( + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, + # The expiration policy for a vector store. expires_after: nil, + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # the vector store should use. Useful for tools like `file_search` that can access + # files. file_ids: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The name of the vector store. name: nil, request_options: {} ) @@ -44,7 +57,11 @@ module OpenAI ) .returns(OpenAI::Models::VectorStore) end - def retrieve(vector_store_id, request_options: {}) + def retrieve( + # The ID of the vector store to retrieve. + vector_store_id, + request_options: {} + ) end # Modifies a vector store. @@ -58,7 +75,22 @@ module OpenAI ) .returns(OpenAI::Models::VectorStore) end - def update(vector_store_id, expires_after: nil, metadata: nil, name: nil, request_options: {}) + def update( + # The ID of the vector store to modify. + vector_store_id, + # The expiration policy for a vector store. + expires_after: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The name of the vector store. + name: nil, + request_options: {} + ) end # Returns a list of vector stores. @@ -72,7 +104,25 @@ module OpenAI ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStore]) end - def list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + def list( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) end # Delete a vector store. @@ -83,7 +133,11 @@ module OpenAI ) .returns(OpenAI::Models::VectorStoreDeleted) end - def delete(vector_store_id, request_options: {}) + def delete( + # The ID of the vector store to delete. + vector_store_id, + request_options: {} + ) end # Search a vector store for relevant chunks based on a query and file attributes @@ -101,11 +155,18 @@ module OpenAI .returns(OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse]) end def search( + # The ID of the vector store to search. vector_store_id, + # A query string for a search query:, + # A filter to apply based on file attributes. filters: nil, + # The maximum number of results to return. This number should be between 1 and 50 + # inclusive. max_num_results: nil, + # Ranking options for search. ranking_options: nil, + # Whether to rewrite the natural language query for vector search. rewrite_query: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index dbed991d..ec4e8e7a 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -18,7 +18,24 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end - def create(vector_store_id, file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) + def create( + # The ID of the vector store for which to create a File Batch. + vector_store_id, + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # the vector store should use. Useful for tools like `file_search` that can access + # files. + file_ids:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes: nil, + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. + chunking_strategy: nil, + request_options: {} + ) end # Retrieves a vector store file batch. @@ -30,7 +47,13 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end - def retrieve(batch_id, vector_store_id:, request_options: {}) + def retrieve( + # The ID of the file batch being retrieved. + batch_id, + # The ID of the vector store that the file batch belongs to. + vector_store_id:, + request_options: {} + ) end # Cancel a vector store file batch. This attempts to cancel the processing of @@ -43,7 +66,13 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end - def cancel(batch_id, vector_store_id:, request_options: {}) + def cancel( + # The ID of the file batch to cancel. + batch_id, + # The ID of the vector store that the file batch belongs to. + vector_store_id:, + request_options: {} + ) end # Returns a list of vector store files in a batch. @@ -61,12 +90,28 @@ module OpenAI .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end def list_files( + # Path param: The ID of the file batch that the files belong to. batch_id, + # Path param: The ID of the vector store that the files belong to. vector_store_id:, + # Query param: A cursor for use in pagination. `after` is an object ID that + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, ending with obj_foo, your subsequent call can include + # after=obj_foo in order to fetch the next page of the list. after: nil, + # Query param: A cursor for use in pagination. `before` is an object ID that + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, starting with obj_foo, your subsequent call can include + # before=obj_foo in order to fetch the previous page of the list. before: nil, + # Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, + # `cancelled`. filter: nil, + # Query param: A limit on the number of objects to be returned. Limit can range + # between 1 and 100, and the default is 20. limit: nil, + # Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # ascending order and `desc` for descending order. order: nil, request_options: {} ) diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index daf2b28a..2fc3ae57 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -20,7 +20,24 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end - def create(vector_store_id, file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) + def create( + # The ID of the vector store for which to create a File. + vector_store_id, + # A [File](https://platform.openai.com/docs/api-reference/files) ID that the + # vector store should use. Useful for tools like `file_search` that can access + # files. + file_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes: nil, + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. + chunking_strategy: nil, + request_options: {} + ) end # Retrieves a vector store file. @@ -32,7 +49,13 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end - def retrieve(file_id, vector_store_id:, request_options: {}) + def retrieve( + # The ID of the file being retrieved. + file_id, + # The ID of the vector store that the file belongs to. + vector_store_id:, + request_options: {} + ) end # Update attributes on a vector store file. @@ -45,7 +68,19 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end - def update(file_id, vector_store_id:, attributes:, request_options: {}) + def update( + # Path param: The ID of the file to update attributes. + file_id, + # Path param: The ID of the vector store the file belongs to. + vector_store_id:, + # Body param: Set of 16 key-value pairs that can be attached to an object. This + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. Keys are + # strings with a maximum length of 64 characters. Values are strings with a + # maximum length of 512 characters, booleans, or numbers. + attributes:, + request_options: {} + ) end # Returns a list of vector store files. @@ -62,11 +97,25 @@ module OpenAI .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end def list( + # The ID of the vector store that the files belong to. vector_store_id, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. filter: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. order: nil, request_options: {} ) @@ -84,7 +133,13 @@ module OpenAI ) .returns(OpenAI::Models::VectorStores::VectorStoreFileDeleted) end - def delete(file_id, vector_store_id:, request_options: {}) + def delete( + # The ID of the file to delete. + file_id, + # The ID of the vector store that the file belongs to. + vector_store_id:, + request_options: {} + ) end # Retrieve the parsed contents of a vector store file. @@ -96,7 +151,13 @@ module OpenAI ) .returns(OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse]) end - def content(file_id, vector_store_id:, request_options: {}) + def content( + # The ID of the file within the vector store. + file_id, + # The ID of the vector store. + vector_store_id:, + request_options: {} + ) end sig { params(client: OpenAI::Client).returns(T.attached_class) } diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 62ce155d..8faebf37 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -68,7 +68,13 @@ module OpenAI params(values: T::Array[T.anything], sentinel: T.nilable(T.anything), concat: T::Boolean) .returns(T.anything) end - def deep_merge(*values, sentinel: nil, concat: false) + def deep_merge( + *values, + # the value to return if no values are provided. + sentinel: nil, + # whether to merge sequences by concatenation. + concat: false + ) end # @api private From a4a251ad0b26f1b8859eabbc87bdc3569568a5bb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 05:19:45 +0000 Subject: [PATCH 012/295] chore: touch up sdk usage examples (#14) --- README.md | 2 +- lib/openai/base_model.rb | 22 ++++++++++------------ lib/openai/base_page.rb | 8 +++++++- lib/openai/base_stream.rb | 14 ++++++++++---- lib/openai/cursor_page.rb | 8 +++++++- lib/openai/page.rb | 8 +++++++- lib/openai/stream.rb | 14 ++++++++++---- 7 files changed, 52 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index c5e359bc..8b458397 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,7 @@ end We provide support for streaming responses using Server Side Events (SSE). ```ruby -stream = openai.chat_completions_create_streaming( +stream = openai.chat.completions.create_streaming( messages: [{ role: "user", content: "Say this is a test" diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 3abd2ca3..09b3d852 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -306,7 +306,7 @@ def try_strict_coerce(value) # when OpenAI::Models::ChatModel::O1 # # ... # else - # # ... + # puts(chat_model) # end # ``` # @@ -320,7 +320,7 @@ def try_strict_coerce(value) # in :o1 # # ... # else - # # ... + # puts(chat_model) # end # ``` class Enum @@ -407,13 +407,13 @@ def try_strict_coerce(value) # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` # case chat_completion_content_part # when OpenAI::Models::Chat::ChatCompletionContentPartText - # # ... + # puts(chat_completion_content_part.text) # when OpenAI::Models::Chat::ChatCompletionContentPartImage - # # ... + # puts(chat_completion_content_part.image_url) # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio - # # ... + # puts(chat_completion_content_part.input_audio) # else - # # ... + # puts(chat_completion_content_part) # end # ``` # @@ -421,15 +421,13 @@ def try_strict_coerce(value) # ```ruby # case chat_completion_content_part # in {type: :text, text: text} - # # ... + # puts(text) # in {type: :image_url, image_url: image_url} - # # ... + # puts(image_url) # in {type: :input_audio, input_audio: input_audio} - # # ... - # in {type: :file, file: file} - # # ... + # puts(input_audio) # else - # # ... + # puts(chat_completion_content_part) # end # ``` class Union diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index 481df2ea..c7489dfe 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -19,7 +19,13 @@ module OpenAI # # @example # ```ruby - # completions = page.to_enum.take(2) + # completions = page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # # completions => Array # ``` diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index 2aef50fe..59f8f874 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -3,16 +3,22 @@ module OpenAI # @example # ```ruby - # stream.for_each do |message| - # puts(message) + # stream.for_each do |chunk| + # puts(chunk) # end # ``` # # @example # ```ruby - # messages = stream.to_enum.take(2) + # chunks = stream + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # messages => Array + # chunks => Array # ``` module BaseStream # @return [void] diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 7773eb35..9ee3cd79 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -17,7 +17,13 @@ module OpenAI # # @example # ```ruby - # completions = cursor_page.to_enum.take(2) + # completions = cursor_page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # # completions => Array # ``` diff --git a/lib/openai/page.rb b/lib/openai/page.rb index 0d0866e7..55b56fe4 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -17,7 +17,13 @@ module OpenAI # # @example # ```ruby - # models = page.to_enum.take(2) + # models = page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # # models => Array # ``` diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index f6f0b113..2d448ef0 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -3,16 +3,22 @@ module OpenAI # @example # ```ruby - # stream.for_each do |message| - # puts(message) + # stream.for_each do |event| + # puts(event) # end # ``` # # @example # ```ruby - # messages = stream.to_enum.take(2) + # events = stream + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # messages => Array + # events => Array # ``` class Stream include OpenAI::BaseStream From 5d34fa26ed577ce7c924e2bb495230bd97b3207b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 05:38:31 +0000 Subject: [PATCH 013/295] chore: error fields are now mutable in keeping with rest of SDK (#15) --- lib/openai/cursor_page.rb | 2 +- lib/openai/errors.rb | 32 ++++++++--------- lib/openai/page.rb | 4 +-- rbi/lib/openai/cursor_page.rbi | 4 +-- rbi/lib/openai/errors.rbi | 64 ++++++++++++++++++++++++++++++++++ rbi/lib/openai/page.rbi | 4 +-- sig/openai/cursor_page.rbs | 2 +- sig/openai/errors.rbs | 14 ++++---- sig/openai/page.rbs | 2 +- 9 files changed, 96 insertions(+), 32 deletions(-) diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 9ee3cd79..92a6b024 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -30,7 +30,7 @@ module OpenAI class CursorPage include OpenAI::BasePage - # @return [Array] + # @return [Array, nil] attr_accessor :data # @return [Boolean] diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 90cebc97..7a4228e8 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -4,7 +4,7 @@ module OpenAI class Error < StandardError # @!parse # # @return [StandardError, nil] - # attr_reader :cause + # attr_accessor :cause end class ConversionError < OpenAI::Error @@ -12,22 +12,22 @@ class ConversionError < OpenAI::Error class APIError < OpenAI::Error # @return [URI::Generic] - attr_reader :url + attr_accessor :url # @return [Integer, nil] - attr_reader :status + attr_accessor :status # @return [Object, nil] - attr_reader :body + attr_accessor :body # @return [String, nil] - attr_reader :code + attr_accessor :code # @return [String, nil] - attr_reader :param + attr_accessor :param # @return [String, nil] - attr_reader :type + attr_accessor :type # @api private # @@ -50,23 +50,23 @@ def initialize(url:, status: nil, body: nil, request: nil, response: nil, messag class APIConnectionError < OpenAI::APIError # @!parse # # @return [nil] - # attr_reader :status + # attr_accessor :status # @!parse # # @return [nil] - # attr_reader :body + # attr_accessor :body # @!parse # # @return [nil] - # attr_reader :code + # attr_accessor :code # @!parse # # @return [nil] - # attr_reader :param + # attr_accessor :param # @!parse # # @return [nil] - # attr_reader :type + # attr_accessor :type # @api private # @@ -147,19 +147,19 @@ def self.for(url:, status:, body:, request:, response:, message: nil) # @!parse # # @return [Integer] - # attr_reader :status + # attr_accessor :status # @!parse # # @return [String, nil] - # attr_reader :code + # attr_accessor :code # @!parse # # @return [String, nil] - # attr_reader :param + # attr_accessor :param # @!parse # # @return [String, nil] - # attr_reader :type + # attr_accessor :type # @api private # diff --git a/lib/openai/page.rb b/lib/openai/page.rb index 55b56fe4..40ccc49a 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -30,7 +30,7 @@ module OpenAI class Page include OpenAI::BasePage - # @return [Array] + # @return [Array, nil] attr_accessor :data # @return [String] @@ -53,7 +53,7 @@ def initialize(client:, req:, headers:, page_data:) end case page_data - in {object: String | nil => object} + in {object: String => object} @object = object else end diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi index a489a9ff..c40bab43 100644 --- a/rbi/lib/openai/cursor_page.rbi +++ b/rbi/lib/openai/cursor_page.rbi @@ -6,11 +6,11 @@ module OpenAI Elem = type_member - sig { returns(T::Array[Elem]) } + sig { returns(T.nilable(T::Array[Elem])) } def data end - sig { params(_: T::Array[Elem]).returns(T::Array[Elem]) } + sig { params(_: T.nilable(T::Array[Elem])).returns(T.nilable(T::Array[Elem])) } def data=(_) end diff --git a/rbi/lib/openai/errors.rbi b/rbi/lib/openai/errors.rbi index 1408d718..56d79f0b 100644 --- a/rbi/lib/openai/errors.rbi +++ b/rbi/lib/openai/errors.rbi @@ -5,6 +5,10 @@ module OpenAI sig { returns(T.nilable(StandardError)) } def cause end + + sig { params(_: T.nilable(StandardError)).returns(T.nilable(StandardError)) } + def cause=(_) + end end class ConversionError < OpenAI::Error @@ -15,26 +19,50 @@ module OpenAI def url end + sig { params(_: URI::Generic).returns(URI::Generic) } + def url=(_) + end + sig { returns(T.nilable(Integer)) } def status end + sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } + def status=(_) + end + sig { returns(T.nilable(T.anything)) } def body end + sig { params(_: T.nilable(T.anything)).returns(T.nilable(T.anything)) } + def body=(_) + end + sig { returns(T.nilable(String)) } def code end + sig { params(_: T.nilable(String)).returns(T.nilable(String)) } + def code=(_) + end + sig { returns(T.nilable(String)) } def param end + sig { params(_: T.nilable(String)).returns(T.nilable(String)) } + def param=(_) + end + sig { returns(T.nilable(String)) } def type end + sig { params(_: T.nilable(String)).returns(T.nilable(String)) } + def type=(_) + end + # @api private sig do params( @@ -56,22 +84,42 @@ module OpenAI def status end + sig { params(_: NilClass).void } + def status=(_) + end + sig { void } def body end + sig { params(_: NilClass).void } + def body=(_) + end + sig { void } def code end + sig { params(_: NilClass).void } + def code=(_) + end + sig { void } def param end + sig { params(_: NilClass).void } + def param=(_) + end + sig { void } def type end + sig { params(_: NilClass).void } + def type=(_) + end + # @api private sig do params( @@ -125,18 +173,34 @@ module OpenAI def status end + sig { params(_: Integer).returns(Integer) } + def status=(_) + end + sig { returns(T.nilable(String)) } def code end + sig { params(_: T.nilable(String)).returns(T.nilable(String)) } + def code=(_) + end + sig { returns(T.nilable(String)) } def param end + sig { params(_: T.nilable(String)).returns(T.nilable(String)) } + def param=(_) + end + sig { returns(T.nilable(String)) } def type end + sig { params(_: T.nilable(String)).returns(T.nilable(String)) } + def type=(_) + end + # @api private sig do params( diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi index 3b8ff55d..a738bab8 100644 --- a/rbi/lib/openai/page.rbi +++ b/rbi/lib/openai/page.rbi @@ -6,11 +6,11 @@ module OpenAI Elem = type_member - sig { returns(T::Array[Elem]) } + sig { returns(T.nilable(T::Array[Elem])) } def data end - sig { params(_: T::Array[Elem]).returns(T::Array[Elem]) } + sig { params(_: T.nilable(T::Array[Elem])).returns(T.nilable(T::Array[Elem])) } def data=(_) end diff --git a/sig/openai/cursor_page.rbs b/sig/openai/cursor_page.rbs index 3d313782..5aa7c659 100644 --- a/sig/openai/cursor_page.rbs +++ b/sig/openai/cursor_page.rbs @@ -2,7 +2,7 @@ module OpenAI class CursorPage[Elem] include OpenAI::BasePage[Elem] - attr_accessor data: ::Array[Elem] + attr_accessor data: ::Array[Elem]? attr_accessor has_more: bool end diff --git a/sig/openai/errors.rbs b/sig/openai/errors.rbs index 08f05bd0..b5c00826 100644 --- a/sig/openai/errors.rbs +++ b/sig/openai/errors.rbs @@ -1,23 +1,23 @@ module OpenAI class Error < StandardError - attr_reader cause: StandardError? + attr_accessor cause: StandardError? end class ConversionError < OpenAI::Error end class APIError < OpenAI::Error - attr_reader url: URI::Generic + attr_accessor url: URI::Generic - attr_reader status: Integer? + attr_accessor status: Integer? - attr_reader body: top? + attr_accessor body: top? - attr_reader code: String? + attr_accessor code: String? - attr_reader param: String? + attr_accessor param: String? - attr_reader type: String? + attr_accessor type: String? def initialize: ( url: URI::Generic, diff --git a/sig/openai/page.rbs b/sig/openai/page.rbs index 37846820..420e2716 100644 --- a/sig/openai/page.rbs +++ b/sig/openai/page.rbs @@ -2,7 +2,7 @@ module OpenAI class Page[Elem] include OpenAI::BasePage[Elem] - attr_accessor data: ::Array[Elem] + attr_accessor data: ::Array[Elem]? attr_accessor object: String end From 9b8102e8631e81bc288cc0c680b3e38c05567100 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 06:21:58 +0000 Subject: [PATCH 014/295] chore: document union variants in yard doc (#16) --- lib/openai/base_model.rb | 13 ++-- .../models/audio/speech_create_params.rb | 6 ++ .../audio/transcription_create_params.rb | 6 ++ .../audio/transcription_create_response.rb | 6 ++ .../models/audio/translation_create_params.rb | 6 ++ .../audio/translation_create_response.rb | 6 ++ .../models/beta/assistant_create_params.rb | 12 ++++ .../beta/assistant_response_format_option.rb | 6 ++ .../models/beta/assistant_stream_event.rb | 6 ++ lib/openai/models/beta/assistant_tool.rb | 6 ++ .../beta/assistant_tool_choice_option.rb | 6 ++ .../models/beta/assistant_update_params.rb | 6 ++ .../models/beta/message_stream_event.rb | 6 ++ .../models/beta/run_step_stream_event.rb | 6 ++ lib/openai/models/beta/run_stream_event.rb | 6 ++ .../beta/thread_create_and_run_params.rb | 30 +++++++++ .../models/beta/thread_create_params.rb | 18 +++++ lib/openai/models/beta/threads/annotation.rb | 6 ++ .../models/beta/threads/annotation_delta.rb | 6 ++ lib/openai/models/beta/threads/message.rb | 6 ++ .../models/beta/threads/message_content.rb | 6 ++ .../beta/threads/message_content_delta.rb | 6 ++ .../threads/message_content_part_param.rb | 6 ++ .../beta/threads/message_create_params.rb | 12 ++++ .../models/beta/threads/run_create_params.rb | 18 +++++ .../runs/code_interpreter_tool_call.rb | 6 ++ .../runs/code_interpreter_tool_call_delta.rb | 6 ++ .../models/beta/threads/runs/run_step.rb | 6 ++ .../beta/threads/runs/run_step_delta.rb | 6 ++ .../models/beta/threads/runs/tool_call.rb | 6 ++ .../beta/threads/runs/tool_call_delta.rb | 6 ++ ...chat_completion_assistant_message_param.rb | 12 ++++ .../chat/chat_completion_content_part.rb | 6 ++ ...chat_completion_developer_message_param.rb | 6 ++ .../chat/chat_completion_message_param.rb | 6 ++ .../chat_completion_prediction_content.rb | 6 ++ .../chat_completion_system_message_param.rb | 6 ++ .../chat_completion_tool_choice_option.rb | 6 ++ .../chat_completion_tool_message_param.rb | 6 ++ .../chat_completion_user_message_param.rb | 6 ++ .../models/chat/completion_create_params.rb | 24 +++++++ lib/openai/models/comparison_filter.rb | 6 ++ lib/openai/models/completion_create_params.rb | 18 +++++ lib/openai/models/compound_filter.rb | 6 ++ lib/openai/models/embedding_create_params.rb | 12 ++++ lib/openai/models/file_chunking_strategy.rb | 6 ++ .../models/file_chunking_strategy_param.rb | 6 ++ .../models/fine_tuning/fine_tuning_job.rb | 60 +++++++++++++++++ .../models/fine_tuning/job_create_params.rb | 66 +++++++++++++++++++ .../models/image_create_variation_params.rb | 6 ++ lib/openai/models/image_edit_params.rb | 6 ++ lib/openai/models/image_generate_params.rb | 6 ++ lib/openai/models/moderation_create_params.rb | 12 ++++ .../models/moderation_multi_modal_input.rb | 6 ++ .../models/responses/easy_input_message.rb | 6 ++ .../models/responses/file_search_tool.rb | 6 ++ lib/openai/models/responses/response.rb | 12 ++++ .../response_code_interpreter_tool_call.rb | 6 ++ .../responses/response_computer_tool_call.rb | 6 ++ .../models/responses/response_content.rb | 6 ++ .../response_content_part_added_event.rb | 6 ++ .../response_content_part_done_event.rb | 6 ++ .../responses/response_create_params.rb | 18 +++++ .../response_file_search_tool_call.rb | 6 ++ .../responses/response_format_text_config.rb | 6 ++ .../responses/response_input_content.rb | 6 ++ .../models/responses/response_input_item.rb | 6 ++ .../models/responses/response_item_list.rb | 6 ++ .../models/responses/response_output_item.rb | 6 ++ .../responses/response_output_message.rb | 6 ++ .../models/responses/response_output_text.rb | 6 ++ .../models/responses/response_stream_event.rb | 6 ++ .../response_text_annotation_delta_event.rb | 6 ++ lib/openai/models/responses/tool.rb | 6 ++ .../models/vector_store_search_params.rb | 12 ++++ .../models/vector_store_search_response.rb | 6 ++ .../vector_stores/file_batch_create_params.rb | 6 ++ .../vector_stores/file_create_params.rb | 6 ++ .../vector_stores/file_update_params.rb | 6 ++ .../models/vector_stores/vector_store_file.rb | 6 ++ rbi/lib/openai/base_model.rbi | 9 ++- .../models/audio/speech_create_params.rbi | 5 +- .../audio/transcription_create_params.rbi | 5 +- .../audio/transcription_create_response.rbi | 10 +-- .../audio/translation_create_params.rbi | 5 +- .../audio/translation_create_response.rbi | 10 +-- .../models/beta/assistant_create_params.rbi | 10 ++- .../beta/assistant_response_format_option.rbi | 5 +- .../models/beta/assistant_stream_event.rbi | 5 +- rbi/lib/openai/models/beta/assistant_tool.rbi | 5 +- .../beta/assistant_tool_choice_option.rbi | 5 +- .../models/beta/assistant_update_params.rbi | 5 +- .../models/beta/message_stream_event.rbi | 5 +- .../models/beta/run_step_stream_event.rbi | 5 +- .../openai/models/beta/run_stream_event.rbi | 5 +- .../beta/thread_create_and_run_params.rbi | 42 +++++------- .../models/beta/thread_create_params.rbi | 32 ++++----- .../openai/models/beta/threads/annotation.rbi | 5 +- .../models/beta/threads/annotation_delta.rbi | 5 +- .../openai/models/beta/threads/message.rbi | 5 +- .../models/beta/threads/message_content.rbi | 5 +- .../beta/threads/message_content_delta.rbi | 5 +- .../threads/message_content_part_param.rbi | 5 +- .../beta/threads/message_create_params.rbi | 27 ++++---- .../models/beta/threads/run_create_params.rbi | 32 ++++----- .../runs/code_interpreter_tool_call.rbi | 5 +- .../runs/code_interpreter_tool_call_delta.rbi | 5 +- .../models/beta/threads/runs/run_step.rbi | 5 +- .../beta/threads/runs/run_step_delta.rbi | 5 +- .../models/beta/threads/runs/tool_call.rbi | 5 +- .../beta/threads/runs/tool_call_delta.rbi | 5 +- ...hat_completion_assistant_message_param.rbi | 25 +++---- .../chat/chat_completion_content_part.rbi | 5 +- ...hat_completion_developer_message_param.rbi | 8 +-- .../chat/chat_completion_message_param.rbi | 5 +- .../chat_completion_prediction_content.rbi | 8 +-- .../chat_completion_system_message_param.rbi | 8 +-- .../chat_completion_tool_choice_option.rbi | 5 +- .../chat_completion_tool_message_param.rbi | 8 +-- .../chat_completion_user_message_param.rbi | 24 +++---- .../models/chat/completion_create_params.rbi | 20 +++--- rbi/lib/openai/models/comparison_filter.rbi | 5 +- .../models/completion_create_params.rbi | 25 ++----- rbi/lib/openai/models/compound_filter.rbi | 5 +- .../openai/models/embedding_create_params.rbi | 20 ++---- .../openai/models/file_chunking_strategy.rbi | 5 +- .../models/file_chunking_strategy_param.rbi | 5 +- .../models/fine_tuning/fine_tuning_job.rbi | 50 ++++++-------- .../models/fine_tuning/job_create_params.rbi | 55 +++++++--------- .../models/image_create_variation_params.rbi | 5 +- rbi/lib/openai/models/image_edit_params.rbi | 5 +- .../openai/models/image_generate_params.rbi | 5 +- .../models/moderation_create_params.rbi | 10 ++- .../models/moderation_multi_modal_input.rbi | 10 +-- .../models/responses/easy_input_message.rbi | 8 +-- .../models/responses/file_search_tool.rbi | 8 +-- rbi/lib/openai/models/responses/response.rbi | 10 ++- .../response_code_interpreter_tool_call.rbi | 5 +- .../responses/response_computer_tool_call.rbi | 5 +- .../models/responses/response_content.rbi | 5 +- .../response_content_part_added_event.rbi | 7 +- .../response_content_part_done_event.rbi | 7 +- .../responses/response_create_params.rbi | 15 ++--- .../response_file_search_tool_call.rbi | 5 +- .../responses/response_format_text_config.rbi | 5 +- .../responses/response_input_content.rbi | 5 +- .../models/responses/response_input_item.rbi | 5 +- .../models/responses/response_item_list.rbi | 5 +- .../models/responses/response_output_item.rbi | 5 +- .../responses/response_output_message.rbi | 7 +- .../models/responses/response_output_text.rbi | 5 +- .../responses/response_stream_event.rbi | 5 +- .../response_text_annotation_delta_event.rbi | 5 +- rbi/lib/openai/models/responses/tool.rbi | 5 +- .../models/vector_store_search_params.rbi | 13 ++-- .../models/vector_store_search_response.rbi | 5 +- .../file_batch_create_params.rbi | 5 +- .../vector_stores/file_create_params.rbi | 5 +- .../vector_stores/file_update_params.rbi | 5 +- .../vector_stores/vector_store_file.rbi | 5 +- sig/openai/base_model.rbs | 4 +- .../models/audio/speech_create_params.rbs | 2 +- .../audio/transcription_create_params.rbs | 2 +- .../audio/transcription_create_response.rbs | 2 +- .../audio/translation_create_params.rbs | 2 +- .../audio/translation_create_response.rbs | 2 +- .../models/beta/assistant_create_params.rbs | 4 +- .../beta/assistant_response_format_option.rbs | 2 +- .../models/beta/assistant_stream_event.rbs | 2 +- sig/openai/models/beta/assistant_tool.rbs | 2 +- .../beta/assistant_tool_choice_option.rbs | 2 +- .../models/beta/assistant_update_params.rbs | 2 +- .../models/beta/message_stream_event.rbs | 2 +- .../models/beta/run_step_stream_event.rbs | 2 +- sig/openai/models/beta/run_stream_event.rbs | 2 +- .../beta/thread_create_and_run_params.rbs | 10 +-- .../models/beta/thread_create_params.rbs | 6 +- sig/openai/models/beta/threads/annotation.rbs | 2 +- .../models/beta/threads/annotation_delta.rbs | 2 +- sig/openai/models/beta/threads/message.rbs | 2 +- .../models/beta/threads/message_content.rbs | 2 +- .../beta/threads/message_content_delta.rbs | 2 +- .../threads/message_content_part_param.rbs | 2 +- .../beta/threads/message_create_params.rbs | 4 +- .../models/beta/threads/run_create_params.rbs | 6 +- .../runs/code_interpreter_tool_call.rbs | 2 +- .../runs/code_interpreter_tool_call_delta.rbs | 2 +- .../models/beta/threads/runs/run_step.rbs | 2 +- .../beta/threads/runs/run_step_delta.rbs | 2 +- .../models/beta/threads/runs/tool_call.rbs | 2 +- .../beta/threads/runs/tool_call_delta.rbs | 2 +- ...hat_completion_assistant_message_param.rbs | 4 +- .../chat/chat_completion_content_part.rbs | 2 +- ...hat_completion_developer_message_param.rbs | 2 +- .../chat/chat_completion_message_param.rbs | 2 +- .../chat_completion_prediction_content.rbs | 2 +- .../chat_completion_system_message_param.rbs | 2 +- .../chat_completion_tool_choice_option.rbs | 2 +- .../chat_completion_tool_message_param.rbs | 2 +- .../chat_completion_user_message_param.rbs | 2 +- .../models/chat/completion_create_params.rbs | 8 +-- sig/openai/models/comparison_filter.rbs | 2 +- .../models/completion_create_params.rbs | 6 +- sig/openai/models/compound_filter.rbs | 2 +- sig/openai/models/embedding_create_params.rbs | 4 +- sig/openai/models/file_chunking_strategy.rbs | 2 +- .../models/file_chunking_strategy_param.rbs | 2 +- .../models/fine_tuning/fine_tuning_job.rbs | 20 +++--- .../models/fine_tuning/job_create_params.rbs | 22 +++---- .../models/image_create_variation_params.rbs | 2 +- sig/openai/models/image_edit_params.rbs | 2 +- sig/openai/models/image_generate_params.rbs | 2 +- .../models/moderation_create_params.rbs | 4 +- .../models/moderation_multi_modal_input.rbs | 2 +- .../models/responses/easy_input_message.rbs | 2 +- .../models/responses/file_search_tool.rbs | 2 +- sig/openai/models/responses/response.rbs | 4 +- .../response_code_interpreter_tool_call.rbs | 2 +- .../responses/response_computer_tool_call.rbs | 2 +- .../models/responses/response_content.rbs | 2 +- .../response_content_part_added_event.rbs | 2 +- .../response_content_part_done_event.rbs | 2 +- .../responses/response_create_params.rbs | 6 +- .../response_file_search_tool_call.rbs | 2 +- .../responses/response_format_text_config.rbs | 2 +- .../responses/response_input_content.rbs | 2 +- .../models/responses/response_input_item.rbs | 2 +- .../models/responses/response_item_list.rbs | 2 +- .../models/responses/response_output_item.rbs | 2 +- .../responses/response_output_message.rbs | 2 +- .../models/responses/response_output_text.rbs | 2 +- .../responses/response_stream_event.rbs | 2 +- .../response_text_annotation_delta_event.rbs | 2 +- sig/openai/models/responses/tool.rbs | 2 +- .../models/vector_store_search_params.rbs | 4 +- .../models/vector_store_search_response.rbs | 2 +- .../file_batch_create_params.rbs | 2 +- .../vector_stores/file_create_params.rbs | 2 +- .../vector_stores/file_update_params.rbs | 2 +- .../vector_stores/vector_store_file.rbs | 2 +- 240 files changed, 1140 insertions(+), 610 deletions(-) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 09b3d852..19722030 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -443,13 +443,18 @@ class << self # @api private # - # All of the specified variants for this union. - # # @return [Array] - protected def variants + protected def derefed_variants @known_variants.map { |key, variant_fn| [key, variant_fn.call] } end + # All of the specified variants for this union. + # + # @return [Array] + def variants + derefed_variants.map(&:last) + end + # @api private # # @param property [Symbol] @@ -529,7 +534,7 @@ def self.===(other) # # @return [Boolean] def self.==(other) - other.is_a?(Class) && other <= OpenAI::Union && other.variants == variants + other.is_a?(Class) && other <= OpenAI::Union && other.derefed_variants == derefed_variants end class << self diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index fa2f4155..b7e77b57 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -73,6 +73,12 @@ class Model < OpenAI::Union # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1` or `tts-1-hd` variant enum: -> { OpenAI::Models::Audio::SpeechModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 9be2124c..d6d9f071 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -122,6 +122,12 @@ class Model < OpenAI::Union # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. variant enum: -> { OpenAI::Models::AudioModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 1ff3100f..2ad0d933 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -13,6 +13,12 @@ class TranscriptionCreateResponse < OpenAI::Union # Represents a verbose json transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::TranscriptionVerbose } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] + # def variants; end + # end end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 556b8419..4fd4a4dc 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -81,6 +81,12 @@ class Model < OpenAI::Union # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. variant enum: -> { OpenAI::Models::AudioModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] + # def variants; end + # end end end end diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 25a9c3e1..94020236 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -8,6 +8,12 @@ class TranslationCreateResponse < OpenAI::Union variant -> { OpenAI::Models::Audio::Translation } variant -> { OpenAI::Models::Audio::TranslationVerbose } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index de543e84..f66edb15 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -169,6 +169,12 @@ class Model < OpenAI::Union # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. variant enum: -> { OpenAI::Models::ChatModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def variants; end + # end end class ToolResources < OpenAI::BaseModel @@ -384,6 +390,12 @@ class Static < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 18671049..1e0036f2 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -41,6 +41,12 @@ class AssistantResponseFormatOption < OpenAI::Union # JSON Schema response format. Used to generate structured JSON responses. # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). variant -> { OpenAI::Models::ResponseFormatJSONSchema } + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 80632016..fdc598d7 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -725,6 +725,12 @@ class ErrorEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 2b17c0d2..0ea9bc07 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -12,6 +12,12 @@ class AssistantTool < OpenAI::Union variant :file_search, -> { OpenAI::Models::Beta::FileSearchTool } variant :function, -> { OpenAI::Models::Beta::FunctionTool } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 84736979..62a2c795 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -32,6 +32,12 @@ class Auto < OpenAI::Enum finalize! end + + # @!parse + # class << self + # # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 5cedcb79..5a89f162 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -215,6 +215,12 @@ class AssistantSupportedModels < OpenAI::Enum finalize! end + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels)] + # def variants; end + # end end class ToolResources < OpenAI::BaseModel diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 32e6ee21..b110bfed 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -158,6 +158,12 @@ class ThreadMessageIncomplete < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 645de8bd..82c7266e 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -210,6 +210,12 @@ class ThreadRunStepExpired < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 1e792b0e..f8548343 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -290,6 +290,12 @@ class ThreadRunExpired < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index bb147096..72d1ae24 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -221,6 +221,12 @@ class Model < OpenAI::Union # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. variant enum: -> { OpenAI::Models::ChatModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def variants; end + # end end class Thread < OpenAI::BaseModel @@ -328,6 +334,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract @@ -398,6 +410,12 @@ class FileSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] + # def variants; end + # end end end end @@ -616,6 +634,12 @@ class Static < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # def variants; end + # end end end end @@ -706,6 +730,12 @@ class Tool < OpenAI::Union variant -> { OpenAI::Models::Beta::FileSearchTool } variant -> { OpenAI::Models::Beta::FunctionTool } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] + # def variants; end + # end end class TruncationStrategy < OpenAI::BaseModel diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index fc3e6299..6d67b56a 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -107,6 +107,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract @@ -177,6 +183,12 @@ class FileSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] + # def variants; end + # end end end end @@ -394,6 +406,12 @@ class Static < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 3e21c302..3f0a547d 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -17,6 +17,12 @@ class Annotation < OpenAI::Union # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathAnnotation } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index dfaf1d9e..6b22d5be 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -17,6 +17,12 @@ class AnnotationDelta < OpenAI::Union # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 71c118cd..0a6b9d1b 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -192,6 +192,12 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] + # def variants; end + # end end end diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index df27bb65..7e6e4698 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -22,6 +22,12 @@ class MessageContent < OpenAI::Union # The refusal content generated by the assistant. variant :refusal, -> { OpenAI::Models::Beta::Threads::RefusalContentBlock } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index f2e05dfb..bcf970c1 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -22,6 +22,12 @@ class MessageContentDelta < OpenAI::Union # References an image URL in the content of a message. variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDeltaBlock } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 86766a56..74766529 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -19,6 +19,12 @@ class MessageContentPartParam < OpenAI::Union # The text content that is part of a message. variant :text, -> { OpenAI::Models::Beta::Threads::TextContentBlockParam } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index e89ac3e5..b0a54ec9 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -67,6 +67,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract @@ -137,6 +143,12 @@ class FileSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 57a11b7c..c5c473e5 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -296,6 +296,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract @@ -366,6 +372,12 @@ class FileSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] + # def variants; end + # end end end end @@ -381,6 +393,12 @@ class Model < OpenAI::Union # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. variant enum: -> { OpenAI::Models::ChatModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def variants; end + # end end class TruncationStrategy < OpenAI::BaseModel diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index fcde0967..0313a163 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -137,6 +137,12 @@ class Image < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index ce8315ac..9e9097b3 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -96,6 +96,12 @@ class Output < OpenAI::Union variant :logs, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs } variant :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index ae1e41c3..4a7aa3e2 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -219,6 +219,12 @@ class StepDetails < OpenAI::Union # Details of the tool call. variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 3f19839e..c8ba4aa1 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -36,6 +36,12 @@ class StepDetails < OpenAI::Union # Details of the tool call. variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index d5d780ad..20cc29a6 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -17,6 +17,12 @@ class ToolCall < OpenAI::Union variant :file_search, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall } variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCall } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] + # def variants; end + # end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index a1b92351..3dae6b9b 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -17,6 +17,12 @@ class ToolCallDelta < OpenAI::Union variant :file_search, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta } variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] + # def variants; end + # end end end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index e3682e55..0ab1ed53 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -130,7 +130,19 @@ class ArrayOfContentPart < OpenAI::Union variant :text, -> { OpenAI::Models::Chat::ChatCompletionContentPartText } variant :refusal, -> { OpenAI::Models::Chat::ChatCompletionContentPartRefusal } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] + # def variants; end + # end end + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @deprecated diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 836e1222..f9fede31 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -87,6 +87,12 @@ class File < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] + # def variants; end + # end end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 7735915f..d7d8b7c4 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -51,6 +51,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type. For developer messages, only type `text` is supported. variant OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end end end diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index dac398d3..811b7f8c 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -31,6 +31,12 @@ class ChatCompletionMessageParam < OpenAI::Union variant :tool, -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam } variant :function, -> { OpenAI::Models::Chat::ChatCompletionFunctionMessageParam } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] + # def variants; end + # end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index b7cb311e..d5aca19d 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -44,6 +44,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. variant OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 188aa0be..658bb497 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -51,6 +51,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type. For system messages, only type `text` is supported. variant OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index c704bffb..28f7750e 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -33,6 +33,12 @@ class Auto < OpenAI::Enum finalize! end + + # @!parse + # class << self + # # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] + # def variants; end + # end end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 9ec24f99..6ba3a959 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -42,6 +42,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type. For tool messages, only type `text` is supported. variant OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end end end diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 9f51546a..18451c7b 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -50,6 +50,12 @@ class Content < OpenAI::Union # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. variant OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 4426c2a5..49e175c1 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -432,6 +432,12 @@ class Model < OpenAI::Union # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) # to browse and compare available models. variant enum: -> { OpenAI::Models::ChatModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def variants; end + # end end # @abstract @@ -470,6 +476,12 @@ class FunctionCallMode < OpenAI::Enum finalize! end + + # @!parse + # class << self + # # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] + # def variants; end + # end end # @deprecated @@ -551,6 +563,12 @@ class ResponseFormat < OpenAI::Union # model will not generate JSON without a system or user message instructing it # to do so. variant -> { OpenAI::Models::ResponseFormatJSONObject } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] + # def variants; end + # end end # @abstract @@ -586,6 +604,12 @@ class Stop < OpenAI::Union variant String variant OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end class WebSearchOptions < OpenAI::BaseModel diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 5e74b3b4..17be219d 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -72,6 +72,12 @@ class Value < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index b2835980..a65b7051 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -255,6 +255,12 @@ class Preset < OpenAI::Enum finalize! end + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::CompletionCreateParams::Model::Preset)] + # def variants; end + # end end # @abstract @@ -279,6 +285,12 @@ class Prompt < OpenAI::Union variant OpenAI::Models::CompletionCreateParams::Prompt::IntegerArray variant OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray + + # @!parse + # class << self + # # @return [Array(String, Array, Array, Array>)] + # def variants; end + # end end # @abstract @@ -291,6 +303,12 @@ class Stop < OpenAI::Union variant String variant OpenAI::Models::CompletionCreateParams::Stop::StringArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end end end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 68d4e0a2..11452bfe 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -35,6 +35,12 @@ class Filter < OpenAI::Union variant -> { OpenAI::Models::ComparisonFilter } variant OpenAI::Unknown + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::ComparisonFilter, Object)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 7bfe8e35..4d126d18 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -104,6 +104,12 @@ class Input < OpenAI::Union # The array of arrays containing integers that will be turned into an embedding. variant OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray + + # @!parse + # class << self + # # @return [Array(String, Array, Array, Array>)] + # def variants; end + # end end # @abstract @@ -118,6 +124,12 @@ class Model < OpenAI::Union # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. variant enum: -> { OpenAI::Models::EmbeddingModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 948ebefc..24c4dd7e 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -12,6 +12,12 @@ class FileChunkingStrategy < OpenAI::Union # This is returned when the chunking strategy is unknown. Typically, this is because the file was indexed before the `chunking_strategy` concept was introduced in the API. variant :other, -> { OpenAI::Models::OtherFileChunkingStrategyObject } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] + # def variants; end + # end end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 77ca2e6a..d9e6a634 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -14,6 +14,12 @@ class FileChunkingStrategyParam < OpenAI::Union # Customize your own chunking strategy by setting chunk size and chunk overlap. variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObjectParam } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] + # def variants; end + # end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 86996d90..68876089 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -280,6 +280,12 @@ class BatchSize < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end # @abstract @@ -290,6 +296,12 @@ class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -300,6 +312,12 @@ class NEpochs < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end end @@ -449,6 +467,12 @@ class BatchSize < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end # @abstract @@ -459,6 +483,12 @@ class Beta < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -469,6 +499,12 @@ class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -479,6 +515,12 @@ class NEpochs < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end end end @@ -560,6 +602,12 @@ class BatchSize < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end # @abstract @@ -570,6 +618,12 @@ class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -580,6 +634,12 @@ class NEpochs < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 988c7703..db4866c1 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -165,6 +165,12 @@ class Preset < OpenAI::Enum finalize! end + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::Preset)] + # def variants; end + # end end # @deprecated @@ -224,6 +230,12 @@ class BatchSize < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end # @abstract @@ -234,6 +246,12 @@ class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -244,6 +262,12 @@ class NEpochs < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end end @@ -455,6 +479,12 @@ class BatchSize < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end # @abstract @@ -465,6 +495,12 @@ class Beta < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -475,6 +511,12 @@ class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -485,6 +527,12 @@ class NEpochs < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end end end @@ -566,6 +614,12 @@ class BatchSize < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end # @abstract @@ -576,6 +630,12 @@ class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Float)] + # def variants; end + # end end # @abstract @@ -586,6 +646,12 @@ class NEpochs < OpenAI::Union variant const: :auto variant Integer + + # @!parse + # class << self + # # @return [Array(Symbol, :auto, Integer)] + # def variants; end + # end end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index df16e7e4..d4546f63 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -79,6 +79,12 @@ class Model < OpenAI::Union # The model to use for image generation. Only `dall-e-2` is supported at this time. variant enum: -> { OpenAI::Models::ImageModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 87317727..14c8b8cf 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -110,6 +110,12 @@ class Model < OpenAI::Union # The model to use for image generation. Only `dall-e-2` is supported at this time. variant enum: -> { OpenAI::Models::ImageModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 9f1f00e2..577665bd 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -112,6 +112,12 @@ class Model < OpenAI::Union # The model to use for image generation. variant enum: -> { OpenAI::Models::ImageModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 17a49eb2..e0789618 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -53,6 +53,12 @@ class Input < OpenAI::Union # An array of multi-modal inputs to the moderation model. variant OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray + + # @!parse + # class << self + # # @return [Array(String, Array, Array)] + # def variants; end + # end end # @abstract @@ -68,6 +74,12 @@ class Model < OpenAI::Union # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and learn about # available models [here](https://platform.openai.com/docs/models#moderation). variant enum: -> { OpenAI::Models::ModerationModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] + # def variants; end + # end end end end diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index d5f91171..47271a66 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -13,6 +13,12 @@ class ModerationMultiModalInput < OpenAI::Union # An object describing text to classify. variant :text, -> { OpenAI::Models::ModerationTextInput } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 280bc258..e4effc89 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -54,6 +54,12 @@ class Content < OpenAI::Union # A list of one or many input items to the model, containing different content # types. variant -> { OpenAI::Models::Responses::ResponseInputMessageContentList } + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 5c138fa3..c2ba4177 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -71,6 +71,12 @@ class Filters < OpenAI::Union # Combine multiple filters using `and` or `or`. variant -> { OpenAI::Models::CompoundFilter } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] + # def variants; end + # end end class RankingOptions < OpenAI::BaseModel diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 4ff08aa8..98b2c145 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -315,6 +315,12 @@ class Model < OpenAI::Union # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) # to browse and compare available models. variant enum: -> { OpenAI::Models::ChatModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def variants; end + # end end # @abstract @@ -339,6 +345,12 @@ class ToolChoice < OpenAI::Union # Use this option to force the model to call a specific function. variant -> { OpenAI::Models::Responses::ToolChoiceFunction } + + # @!parse + # class << self + # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 25181e47..e93e62cf 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -129,6 +129,12 @@ class File < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 268cd184..20bd85ae 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -399,6 +399,12 @@ class Wait < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] + # def variants; end + # end end class PendingSafetyCheck < OpenAI::BaseModel diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 379aaed8..72456bac 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -21,6 +21,12 @@ class ResponseContent < OpenAI::Union # A refusal from the model. variant -> { OpenAI::Models::Responses::ResponseOutputRefusal } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 094f7f60..1fa8be8b 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -58,6 +58,12 @@ class Part < OpenAI::Union # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 33a8cedb..0911d697 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -58,6 +58,12 @@ class Part < OpenAI::Union # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 94db6d99..4c6c485b 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -261,6 +261,12 @@ class Input < OpenAI::Union # A list of one or many input items to the model, containing # different content types. variant -> { OpenAI::Models::Responses::ResponseInput } + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract @@ -278,6 +284,12 @@ class Model < OpenAI::Union # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) # to browse and compare available models. variant enum: -> { OpenAI::Models::ChatModel } + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def variants; end + # end end # @abstract @@ -302,6 +314,12 @@ class ToolChoice < OpenAI::Union # Use this option to force the model to call a specific function. variant -> { OpenAI::Models::Responses::ToolChoiceFunction } + + # @!parse + # class << self + # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index eb1bd637..8054ec60 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -137,6 +137,12 @@ class Attribute < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index beac92f7..6749d6bc 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -33,6 +33,12 @@ class ResponseFormatTextConfig < OpenAI::Union # model will not generate JSON without a system or user message instructing it # to do so. variant :json_object, -> { OpenAI::Models::ResponseFormatJSONObject } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 1ce3a1ca..8d6226d4 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -17,6 +17,12 @@ class ResponseInputContent < OpenAI::Union # A file input to the model. variant :input_file, -> { OpenAI::Models::Responses::ResponseInputFile } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index c94a979c..cd5d395b 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -379,6 +379,12 @@ class ItemReference < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 595ec411..7890dc3a 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -370,6 +370,12 @@ class Status < OpenAI::Enum finalize! end end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseItemList::Data::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 2a71d21e..2ed933fd 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -31,6 +31,12 @@ class ResponseOutputItem < OpenAI::Union # A description of the chain of thought used by a reasoning model while generating # a response. variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 7fcb4c6a..1dee0300 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -60,6 +60,12 @@ class Content < OpenAI::Union # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def variants; end + # end end # @abstract diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 8e83fef4..175d5eda 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -155,6 +155,12 @@ class FilePath < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 571c3d7a..85fe7a25 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -120,6 +120,12 @@ class ResponseStreamEvent < OpenAI::Union # Emitted when a web search call is executing. variant :"response.web_search_call.searching", -> { OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 27041443..4d980266 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -189,6 +189,12 @@ class FilePath < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] + # def variants; end + # end end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index a97afcb0..9093989e 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -26,6 +26,12 @@ class Tool < OpenAI::Union # This tool searches the web for relevant results to use in a response. # Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Models::Responses::WebSearchTool } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] + # def variants; end + # end end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 268a7e7d..30d37581 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -85,6 +85,12 @@ class Query < OpenAI::Union variant String variant OpenAI::Models::VectorStoreSearchParams::Query::StringArray + + # @!parse + # class << self + # # @return [Array(String, Array)] + # def variants; end + # end end # @abstract @@ -96,6 +102,12 @@ class Filters < OpenAI::Union # Combine multiple filters using `and` or `or`. variant -> { OpenAI::Models::CompoundFilter } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] + # def variants; end + # end end class RankingOptions < OpenAI::BaseModel diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index bc92a2f8..233cdad0 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -57,6 +57,12 @@ class Attribute < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end class Content < OpenAI::BaseModel diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index fbe67069..e35057be 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -56,6 +56,12 @@ class Attribute < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index a303415b..3e7c7817 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -56,6 +56,12 @@ class Attribute < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 5b0b4e3d..6b9100d4 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -41,6 +41,12 @@ class Attribute < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 15841d27..5aa034c1 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -162,6 +162,12 @@ class Attribute < OpenAI::Union variant Float variant OpenAI::BooleanModel + + # @!parse + # class << self + # # @return [Array(String, Float, Boolean)] + # def variants; end + # end end end end diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index b6422096..6fa7ec26 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -223,10 +223,13 @@ module OpenAI end # @api private - # + sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + protected def derefed_variants + end + # All of the specified variants for this union. - sig { overridable.returns(T::Array[[T.nilable(Symbol), T.anything]]) } - protected def variants + sig { overridable.returns(T::Array[T.anything]) } + def variants end # @api private diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index feb3ae70..44c40dc2 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -94,9 +94,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index cc5e0e40..4423fd4b 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -135,9 +135,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index 49982089..a54d08ec 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -9,14 +9,8 @@ module OpenAI abstract! class << self - # @api private - sig do - override - .returns( - [[NilClass, OpenAI::Models::Audio::Transcription], [NilClass, OpenAI::Models::Audio::TranscriptionVerbose]] - ) - end - private def variants + sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } + def variants end end end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index e1a51573..6b3f5fca 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -98,9 +98,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 79f531fc..44b2c2f8 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -7,14 +7,8 @@ module OpenAI abstract! class << self - # @api private - sig do - override - .returns( - [[NilClass, OpenAI::Models::Audio::Translation], [NilClass, OpenAI::Models::Audio::TranslationVerbose]] - ) - end - private def variants + sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } + def variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 37fd1121..c37ed2d4 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -307,9 +307,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -641,14 +640,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto], [Symbol, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static]] + [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index c79726bd..464f3cda 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -27,14 +27,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[NilClass, Symbol], [NilClass, OpenAI::Models::ResponseFormatText], [NilClass, OpenAI::Models::ResponseFormatJSONObject], [NilClass, OpenAI::Models::ResponseFormatJSONSchema]] + [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 4684ef7c..6da114a3 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -789,14 +789,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete], [Symbol, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent]] + [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 520704fc..29c76b9b 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -7,14 +7,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::CodeInterpreterTool], [Symbol, OpenAI::Models::Beta::FileSearchTool], [Symbol, OpenAI::Models::Beta::FunctionTool]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 38c43dba..3eadbf23 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -32,9 +32,8 @@ module OpenAI end class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, OpenAI::Models::Beta::AssistantToolChoice]]) } - private def variants + sig { override.returns([Symbol, OpenAI::Models::Beta::AssistantToolChoice]) } + def variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 446f263d..59895471 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -353,9 +353,8 @@ module OpenAI end class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index c00a5caa..b51b6036 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -168,14 +168,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated], [Symbol, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress], [Symbol, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta], [Symbol, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted], [Symbol, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete]] + [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index b2ebe6e2..eea34354 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -245,14 +245,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated], [Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress], [Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta], [Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted], [Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed], [Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled], [Symbol, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired]] + [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index aff962cb..1a00a6d5 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -309,14 +309,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled], [Symbol, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired]] + [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index c6f68453..688101e1 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -384,9 +384,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -609,26 +608,22 @@ module OpenAI end class << self - # @api private sig do override .returns( [ - [NilClass, String], - [ - NilClass, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ] ) end - private def variants + def variants end end end @@ -754,14 +749,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::CodeInterpreterTool], [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] ) end - private def variants + def variants end end end @@ -1103,14 +1097,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto], [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static]] + [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] ) end - private def variants + def variants end end end @@ -1216,14 +1209,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[NilClass, OpenAI::Models::Beta::CodeInterpreterTool], [NilClass, OpenAI::Models::Beta::FileSearchTool], [NilClass, OpenAI::Models::Beta::FunctionTool]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 670f30b9..a22128f8 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -222,26 +222,22 @@ module OpenAI end class << self - # @api private sig do override .returns( [ - [NilClass, String], - [ - NilClass, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ] ) end - private def variants + def variants end end end @@ -367,14 +363,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::CodeInterpreterTool], [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] ) end - private def variants + def variants end end end @@ -708,14 +703,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto], [Symbol, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static]] + [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 9311bee4..8681c1a1 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -11,14 +11,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::FileCitationAnnotation], [Symbol, OpenAI::Models::Beta::Threads::FilePathAnnotation]] + [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index fad66836..ccc794f6 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -11,14 +11,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation], [Symbol, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation]] + [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index f27ae4ab..6990e99b 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -359,14 +359,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[NilClass, OpenAI::Models::Beta::CodeInterpreterTool], [NilClass, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 591b21e4..86c7e454 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -10,14 +10,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::ImageFileContentBlock], [Symbol, OpenAI::Models::Beta::Threads::ImageURLContentBlock], [Symbol, OpenAI::Models::Beta::Threads::TextContentBlock], [Symbol, OpenAI::Models::Beta::Threads::RefusalContentBlock]] + [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index a91d5b77..b7a75330 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -10,14 +10,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::ImageFileDeltaBlock], [Symbol, OpenAI::Models::Beta::Threads::TextDeltaBlock], [Symbol, OpenAI::Models::Beta::Threads::RefusalDeltaBlock], [Symbol, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock]] + [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index 65c8d43f..5d3fc9f8 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -10,14 +10,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::ImageFileContentBlock], [Symbol, OpenAI::Models::Beta::Threads::ImageURLContentBlock], [Symbol, OpenAI::Models::Beta::Threads::TextContentBlockParam]] + [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 8169f3b8..dd85dbd5 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -156,26 +156,22 @@ module OpenAI end class << self - # @api private sig do override .returns( [ - [NilClass, String], - [ - NilClass, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ] ) end - private def variants + def variants end end end @@ -301,14 +297,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::CodeInterpreterTool], [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index dbf8d42e..1a0afe06 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -563,26 +563,22 @@ module OpenAI end class << self - # @api private sig do override .returns( [ - [NilClass, String], - [ - NilClass, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ] ) end - private def variants + def variants end end end @@ -708,14 +704,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::CodeInterpreterTool], [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch]] + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] ) end - private def variants + def variants end end end @@ -730,9 +725,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 373b9a5a..2408a1ed 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -248,14 +248,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs], [Symbol, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image]] + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index e355df3e..80276568 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -160,14 +160,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs], [Symbol, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage]] + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 557b42f9..c46342b2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -332,14 +332,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails], [Symbol, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails]] + [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index a5c04563..02171343 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -71,14 +71,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta], [Symbol, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject]] + [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 9e16226b..4bd9eacd 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -10,14 +10,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall], [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall], [Symbol, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall]] + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 9fae60be..d19b0e4d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -10,14 +10,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta], [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta], [Symbol, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta]] + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index a2c3cf1c..3bf3c8bb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -217,38 +217,33 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Chat::ChatCompletionContentPartText], [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartRefusal]] + [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] ) end - private def variants + def variants end end end class << self - # @api private sig do override .returns( [ - [NilClass, String], - [ - NilClass, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ] + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + ] ] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 8ee6a1f4..3da63995 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -84,14 +84,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Chat::ChatCompletionContentPartText], [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage], [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio], [Symbol, OpenAI::Models::Chat::ChatCompletionContentPart::File]] + [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 5f435cb4..13661fc1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -71,12 +71,8 @@ module OpenAI ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self - # @api private - sig do - override - .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) - end - private def variants + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 1a47472e..4ff15449 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -12,14 +12,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam], [Symbol, OpenAI::Models::Chat::ChatCompletionSystemMessageParam], [Symbol, OpenAI::Models::Chat::ChatCompletionUserMessageParam], [Symbol, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam], [Symbol, OpenAI::Models::Chat::ChatCompletionToolMessageParam], [Symbol, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam]] + [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index b16430d8..421f1392 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -60,12 +60,8 @@ module OpenAI ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self - # @api private - sig do - override - .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) - end - private def variants + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index b2a9408e..cd265771 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -71,12 +71,8 @@ module OpenAI ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self - # @api private - sig do - override - .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) - end - private def variants + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 218b467d..9eeeca42 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -35,9 +35,8 @@ module OpenAI end class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, OpenAI::Models::Chat::ChatCompletionNamedToolChoice]]) } - private def variants + sig { override.returns([Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice]) } + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index d0aa1120..98e3c60f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -67,12 +67,8 @@ module OpenAI ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } class << self - # @api private - sig do - override - .returns([[NilClass, String], [NilClass, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]]) - end - private def variants + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 2ecc1036..1d73f550 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -137,27 +137,23 @@ module OpenAI end class << self - # @api private sig do override .returns( [ - [NilClass, String], - [ - NilClass, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ] + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + ] ] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 6aa2796e..ff953111 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -643,9 +643,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -684,9 +683,8 @@ module OpenAI end class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, OpenAI::Models::Chat::ChatCompletionFunctionCallOption]]) } - private def variants + sig { override.returns([Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption]) } + def variants end end end @@ -766,14 +764,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[NilClass, OpenAI::Models::ResponseFormatText], [NilClass, OpenAI::Models::ResponseFormatJSONSchema], [NilClass, OpenAI::Models::ResponseFormatJSONObject]] + [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] ) end - private def variants + def variants end end end @@ -813,9 +810,8 @@ module OpenAI StringArray = T.type_alias { T::Array[String] } class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, T::Array[String]]]) } - private def variants + sig { override.returns([String, T::Array[String]]) } + def variants end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 48f85bd6..3b3364ff 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -81,9 +81,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index ea1a4342..4879178d 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -360,9 +360,8 @@ module OpenAI end class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -383,19 +382,8 @@ module OpenAI ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } class << self - # @api private - sig do - override - .returns( - [ - [NilClass, String], - [NilClass, T::Array[String]], - [NilClass, T::Array[Integer]], - [NilClass, T::Array[T::Array[Integer]]] - ] - ) - end - private def variants + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def variants end end end @@ -408,9 +396,8 @@ module OpenAI StringArray = T.type_alias { T::Array[String] } class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, T::Array[String]]]) } - private def variants + sig { override.returns([String, T::Array[String]]) } + def variants end end end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 90cc38f4..c7164169 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -43,9 +43,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, OpenAI::Models::ComparisonFilter], [NilClass, T.anything]]) } - private def variants + sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } + def variants end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index bc012b2b..a8aa19ad 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -117,19 +117,8 @@ module OpenAI ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } class << self - # @api private - sig do - override - .returns( - [ - [NilClass, String], - [NilClass, T::Array[String]], - [NilClass, T::Array[Integer]], - [NilClass, T::Array[T::Array[Integer]]] - ] - ) - end - private def variants + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def variants end end end @@ -143,9 +132,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index a7159b15..3aacf6c7 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -7,14 +7,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::StaticFileChunkingStrategyObject], [Symbol, OpenAI::Models::OtherFileChunkingStrategyObject]] + [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index ccabae20..4a8433cd 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -8,14 +8,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::AutoFileChunkingStrategyParam], [Symbol, OpenAI::Models::StaticFileChunkingStrategyObjectParam]] + [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index b6221aef..8d5f40e6 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -387,9 +387,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -400,9 +399,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -413,9 +411,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -601,9 +598,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -614,9 +610,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -627,9 +622,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -640,9 +634,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -739,9 +732,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -752,9 +744,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -765,9 +756,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 9747dd2a..1878af29 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -209,9 +209,8 @@ module OpenAI end class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -279,9 +278,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -292,9 +290,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -305,9 +302,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -583,9 +579,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -596,9 +591,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -609,9 +603,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -622,9 +615,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -723,9 +715,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end @@ -736,9 +727,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Float]]) } - private def variants + sig { override.returns([Symbol, Float]) } + def variants end end end @@ -749,9 +739,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, Symbol], [NilClass, Integer]]) } - private def variants + sig { override.returns([Symbol, Integer]) } + def variants end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 85b77ba9..74514824 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -106,9 +106,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index e3241afe..c63db2d5 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -140,9 +140,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 32f55ca9..e2d37e95 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -142,9 +142,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index bac4a3a8..7f38dcfc 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -93,14 +93,13 @@ module OpenAI ModerationMultiModalInputArray = T.type_alias { T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] } class << self - # @api private sig do override .returns( - [[NilClass, String], [NilClass, T::Array[String]], [NilClass, T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]]] + [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] ) end - private def variants + def variants end end end @@ -113,9 +112,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 1c24bbd3..8228a40a 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -7,14 +7,8 @@ module OpenAI abstract! class << self - # @api private - sig do - override - .returns( - [[Symbol, OpenAI::Models::ModerationImageURLInput], [Symbol, OpenAI::Models::ModerationTextInput]] - ) - end - private def variants + sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } + def variants end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 5d8b1737..4425bbe5 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -67,12 +67,8 @@ module OpenAI abstract! class << self - # @api private - sig do - override - .returns([[NilClass, String], [NilClass, OpenAI::Models::Responses::ResponseInputMessageContentList]]) - end - private def variants + sig { override.returns([String, OpenAI::Models::Responses::ResponseInputMessageContentList]) } + def variants end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 6854bd65..d84172d8 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -92,12 +92,8 @@ module OpenAI abstract! class << self - # @api private - sig do - override - .returns([[NilClass, OpenAI::Models::ComparisonFilter], [NilClass, OpenAI::Models::CompoundFilter]]) - end - private def variants + sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } + def variants end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index c5e9ed8f..3f36d9be 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -515,9 +515,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -529,14 +528,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[NilClass, Symbol], [NilClass, OpenAI::Models::Responses::ToolChoiceTypes], [NilClass, OpenAI::Models::Responses::ToolChoiceFunction]] + [Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index c4e3d1ae..885d55f7 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -221,14 +221,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs], [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files]] + [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index cc58669a..39a9b9ae 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -552,14 +552,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait]] + [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 3580164a..0c788ee4 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -8,14 +8,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[NilClass, OpenAI::Models::Responses::ResponseInputText], [NilClass, OpenAI::Models::Responses::ResponseInputImage], [NilClass, OpenAI::Models::Responses::ResponseInputFile], [NilClass, OpenAI::Models::Responses::ResponseOutputText], [NilClass, OpenAI::Models::Responses::ResponseOutputRefusal]] + [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 9a037fc4..1ecd338c 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -94,14 +94,11 @@ module OpenAI abstract! class << self - # @api private sig do override - .returns( - [[Symbol, OpenAI::Models::Responses::ResponseOutputText], [Symbol, OpenAI::Models::Responses::ResponseOutputRefusal]] - ) + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index c7102f5b..415388dc 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -94,14 +94,11 @@ module OpenAI abstract! class << self - # @api private sig do override - .returns( - [[Symbol, OpenAI::Models::Responses::ResponseOutputText], [Symbol, OpenAI::Models::Responses::ResponseOutputRefusal]] - ) + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index e7a3ffca..26531f0e 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -383,9 +383,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, OpenAI::Models::Responses::ResponseInput]]) } - private def variants + sig { override.returns([String, OpenAI::Models::Responses::ResponseInput]) } + def variants end end end @@ -399,9 +398,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Symbol]]) } - private def variants + sig { override.returns([String, Symbol]) } + def variants end end end @@ -413,14 +411,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[NilClass, Symbol], [NilClass, OpenAI::Models::Responses::ToolChoiceTypes], [NilClass, OpenAI::Models::Responses::ToolChoiceFunction]] + [Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index e3a52573..11cdc45f 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -187,9 +187,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 68fa12af..2d40704e 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -20,14 +20,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::ResponseFormatText], [Symbol, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig], [Symbol, OpenAI::Models::ResponseFormatJSONObject]] + [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index 04d1918f..c64e9858 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -8,14 +8,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseInputText], [Symbol, OpenAI::Models::Responses::ResponseInputImage], [Symbol, OpenAI::Models::Responses::ResponseInputFile]] + [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index faf692fb..d0a5fdcc 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -434,14 +434,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::EasyInputMessage], [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message], [Symbol, OpenAI::Models::Responses::ResponseOutputMessage], [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall], [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput], [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch], [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall], [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput], [Symbol, OpenAI::Models::Responses::ResponseReasoningItem], [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference]] + [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 6b49186e..58529788 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -554,14 +554,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message], [Symbol, OpenAI::Models::Responses::ResponseOutputMessage], [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall], [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput], [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch], [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall], [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput]] + [OpenAI::Models::Responses::ResponseItemList::Data::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index f508403c..85e300c7 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -8,14 +8,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseOutputMessage], [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall], [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall], [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch], [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall], [Symbol, OpenAI::Models::Responses::ResponseReasoningItem]] + [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 80dfb0e2..54501157 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -95,14 +95,11 @@ module OpenAI abstract! class << self - # @api private sig do override - .returns( - [[Symbol, OpenAI::Models::Responses::ResponseOutputText], [Symbol, OpenAI::Models::Responses::ResponseOutputRefusal]] - ) + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index e9ce0233..a7745ea7 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -247,14 +247,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation], [Symbol, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation], [Symbol, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath]] + [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index 6e6608c2..26cf28f5 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -8,14 +8,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseAudioDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseAudioDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent], [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent], [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent], [Symbol, OpenAI::Models::Responses::ResponseCompletedEvent], [Symbol, OpenAI::Models::Responses::ResponseContentPartAddedEvent], [Symbol, OpenAI::Models::Responses::ResponseContentPartDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseCreatedEvent], [Symbol, OpenAI::Models::Responses::ResponseErrorEvent], [Symbol, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent], [Symbol, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent], [Symbol, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent], [Symbol, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseInProgressEvent], [Symbol, OpenAI::Models::Responses::ResponseFailedEvent], [Symbol, OpenAI::Models::Responses::ResponseIncompleteEvent], [Symbol, OpenAI::Models::Responses::ResponseOutputItemAddedEvent], [Symbol, OpenAI::Models::Responses::ResponseOutputItemDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseRefusalDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseRefusalDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseTextDeltaEvent], [Symbol, OpenAI::Models::Responses::ResponseTextDoneEvent], [Symbol, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent], [Symbol, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent], [Symbol, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent]] + [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index bafadc88..8208d88f 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -277,14 +277,13 @@ module OpenAI end class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation], [Symbol, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation], [Symbol, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath]] + [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 104f7bf0..92e9d728 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -10,14 +10,13 @@ module OpenAI abstract! class << self - # @api private sig do override .returns( - [[Symbol, OpenAI::Models::Responses::FileSearchTool], [Symbol, OpenAI::Models::Responses::FunctionTool], [Symbol, OpenAI::Models::Responses::ComputerTool], [NilClass, OpenAI::Models::Responses::WebSearchTool]] + [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] ) end - private def variants + def variants end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 593b1417..987867c8 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -95,9 +95,8 @@ module OpenAI StringArray = T.type_alias { T::Array[String] } class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, T::Array[String]]]) } - private def variants + sig { override.returns([String, T::Array[String]]) } + def variants end end end @@ -107,12 +106,8 @@ module OpenAI abstract! class << self - # @api private - sig do - override - .returns([[NilClass, OpenAI::Models::ComparisonFilter], [NilClass, OpenAI::Models::CompoundFilter]]) - end - private def variants + sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } + def variants end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index ab3f35c5..f430c4b3 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -90,9 +90,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index fd90edaf..0553e577 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -102,9 +102,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index e29d8b47..139b8f61 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -102,9 +102,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index ea9cd54d..ef0fccc3 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -59,9 +59,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index ca194b2c..2e8a9c1a 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -231,9 +231,8 @@ module OpenAI abstract! class << self - # @api private - sig { override.returns([[NilClass, String], [NilClass, Float], [NilClass, T::Boolean]]) } - private def variants + sig { override.returns([String, Float, T::Boolean]) } + def variants end end end diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index 5b709bfc..3da7b96f 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -87,7 +87,9 @@ module OpenAI private def self.known_variants: -> ::Array[[Symbol?, Proc]] - def self.variants: -> ::Array[[Symbol?, top]] + def self.derefed_variants: -> ::Array[[Symbol?, top]] + + def self.variants: -> ::Array[top] private def self.discriminator: (Symbol property) -> void diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 4a89f863..c1e0ec60 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -50,7 +50,7 @@ module OpenAI type model = String | OpenAI::Models::Audio::speech_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::Audio::speech_model]] + def self.variants: -> [String, OpenAI::Models::Audio::speech_model] end type voice = diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 3f154e80..22080806 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -66,7 +66,7 @@ module OpenAI type model = String | OpenAI::Models::audio_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::audio_model]] + def self.variants: -> [String, OpenAI::Models::audio_model] end type timestamp_granularity = :word | :segment diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index bae11946..f0179c81 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -6,7 +6,7 @@ module OpenAI | OpenAI::Models::Audio::TranscriptionVerbose class TranscriptionCreateResponse < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::Audio::Transcription], [nil, OpenAI::Models::Audio::TranscriptionVerbose]] + def self.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] end end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 3dd3ee9c..916641d5 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -52,7 +52,7 @@ module OpenAI type model = String | OpenAI::Models::audio_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::audio_model]] + def self.variants: -> [String, OpenAI::Models::audio_model] end end end diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index a8516dab..d80690b9 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -6,7 +6,7 @@ module OpenAI | OpenAI::Models::Audio::TranslationVerbose class TranslationCreateResponse < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::Audio::Translation], [nil, OpenAI::Models::Audio::TranslationVerbose]] + def self.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] end end end diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 63ed4e29..21b76a8c 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -72,7 +72,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::chat_model]] + def self.variants: -> [String, OpenAI::Models::chat_model] end type tool_resources = @@ -253,7 +253,7 @@ module OpenAI end end - private def self.variants: -> [[:auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto], [:static, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static]] + def self.variants: -> [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] end end end diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index c9efaa33..158a3a03 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -8,7 +8,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONSchema class AssistantResponseFormatOption < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, OpenAI::Models::ResponseFormatText], [nil, OpenAI::Models::ResponseFormatJSONObject], [nil, OpenAI::Models::ResponseFormatJSONSchema]] + def self.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] end end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index a22d0f87..37d03210 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -603,7 +603,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::error_event end - private def self.variants: -> [[:"thread.created", OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated], [:"thread.run.created", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated], [:"thread.run.queued", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued], [:"thread.run.in_progress", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress], [:"thread.run.requires_action", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction], [:"thread.run.completed", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted], [:"thread.run.incomplete", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete], [:"thread.run.failed", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed], [:"thread.run.cancelling", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling], [:"thread.run.cancelled", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled], [:"thread.run.expired", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired], [:"thread.run.step.created", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated], [:"thread.run.step.in_progress", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress], [:"thread.run.step.delta", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta], [:"thread.run.step.completed", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted], [:"thread.run.step.failed", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed], [:"thread.run.step.cancelled", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled], [:"thread.run.step.expired", OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired], [:"thread.message.created", OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated], [:"thread.message.in_progress", OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress], [:"thread.message.delta", OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta], [:"thread.message.completed", OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted], [:"thread.message.incomplete", OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete], [:error, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent]] + def self.variants: -> [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] end end end diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index 054c260d..5421e7bc 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Beta::FunctionTool class AssistantTool < OpenAI::Union - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::CodeInterpreterTool], [:file_search, OpenAI::Models::Beta::FileSearchTool], [:function, OpenAI::Models::Beta::FunctionTool]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index e8243ffd..f7886116 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -16,7 +16,7 @@ module OpenAI def self.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::auto] end - private def self.variants: -> [[nil, OpenAI::Models::Beta::AssistantToolChoiceOption::auto], [nil, OpenAI::Models::Beta::AssistantToolChoice]] + def self.variants: -> [OpenAI::Models::Beta::AssistantToolChoiceOption::auto, OpenAI::Models::Beta::AssistantToolChoice] end end end diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index 8e5079c9..f2f849d4 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -145,7 +145,7 @@ module OpenAI def self.values: -> ::Array[OpenAI::Models::Beta::AssistantUpdateParams::Model::assistant_supported_models] end - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::Beta::AssistantUpdateParams::Model::assistant_supported_models]] + def self.variants: -> [String, OpenAI::Models::Beta::AssistantUpdateParams::Model::assistant_supported_models] end type tool_resources = diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index 376e54bc..d46abf27 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -129,7 +129,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_incomplete end - private def self.variants: -> [[:"thread.message.created", OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated], [:"thread.message.in_progress", OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress], [:"thread.message.delta", OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta], [:"thread.message.completed", OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted], [:"thread.message.incomplete", OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete]] + def self.variants: -> [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] end end end diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index d8fa10c9..b0c1a0a3 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -179,7 +179,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_expired end - private def self.variants: -> [[:"thread.run.step.created", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated], [:"thread.run.step.in_progress", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress], [:"thread.run.step.delta", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta], [:"thread.run.step.completed", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted], [:"thread.run.step.failed", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed], [:"thread.run.step.cancelled", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled], [:"thread.run.step.expired", OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired]] + def self.variants: -> [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] end end end diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index ae28ae5e..650253ce 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -254,7 +254,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_expired end - private def self.variants: -> [[:"thread.run.created", OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated], [:"thread.run.queued", OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued], [:"thread.run.in_progress", OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress], [:"thread.run.requires_action", OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction], [:"thread.run.completed", OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted], [:"thread.run.incomplete", OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete], [:"thread.run.failed", OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed], [:"thread.run.cancelling", OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling], [:"thread.run.cancelled", OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled], [:"thread.run.expired", OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired]] + def self.variants: -> [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] end end end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 0f7e5730..76bb1909 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -90,7 +90,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::chat_model]] + def self.variants: -> [String, OpenAI::Models::chat_model] end type thread = @@ -165,7 +165,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -226,7 +226,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::file_search end - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::CodeInterpreterTool], [:file_search, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] end end end @@ -409,7 +409,7 @@ module OpenAI end end - private def self.variants: -> [[:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto], [:static, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static]] + def self.variants: -> [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] end end end @@ -488,7 +488,7 @@ module OpenAI | OpenAI::Models::Beta::FunctionTool class Tool < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::Beta::CodeInterpreterTool], [nil, OpenAI::Models::Beta::FileSearchTool], [nil, OpenAI::Models::Beta::FunctionTool]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end type truncation_strategy = diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index cb189c09..8de821f1 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -77,7 +77,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -138,7 +138,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::file_search end - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::CodeInterpreterTool], [:file_search, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] end end end @@ -321,7 +321,7 @@ module OpenAI end end - private def self.variants: -> [[:auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto], [:static, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static]] + def self.variants: -> [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] end end end diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index 547b90c2..527a6e58 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::FilePathAnnotation class Annotation < OpenAI::Union - private def self.variants: -> [[:file_citation, OpenAI::Models::Beta::Threads::FileCitationAnnotation], [:file_path, OpenAI::Models::Beta::Threads::FilePathAnnotation]] + def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] end end end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 80ab4f91..21dee9df 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation class AnnotationDelta < OpenAI::Union - private def self.variants: -> [[:file_citation, OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation], [:file_path, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation]] + def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] end end end diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index f501c1d1..40d2be8d 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -122,7 +122,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::Threads::Message::Attachment::Tool::assistant_tools_file_search_type_only end - private def self.variants: -> [[nil, OpenAI::Models::Beta::CodeInterpreterTool], [nil, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] end end diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index 970c2528..252bb7ff 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::RefusalContentBlock class MessageContent < OpenAI::Union - private def self.variants: -> [[:image_file, OpenAI::Models::Beta::Threads::ImageFileContentBlock], [:image_url, OpenAI::Models::Beta::Threads::ImageURLContentBlock], [:text, OpenAI::Models::Beta::Threads::TextContentBlock], [:refusal, OpenAI::Models::Beta::Threads::RefusalContentBlock]] + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] end end end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index 88d80be8..aab10ba6 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::ImageURLDeltaBlock class MessageContentDelta < OpenAI::Union - private def self.variants: -> [[:image_file, OpenAI::Models::Beta::Threads::ImageFileDeltaBlock], [:text, OpenAI::Models::Beta::Threads::TextDeltaBlock], [:refusal, OpenAI::Models::Beta::Threads::RefusalDeltaBlock], [:image_url, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock]] + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] end end end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 0fa6c1af..39228e33 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -8,7 +8,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::TextContentBlockParam class MessageContentPartParam < OpenAI::Union - private def self.variants: -> [[:image_file, OpenAI::Models::Beta::Threads::ImageFileContentBlock], [:image_url, OpenAI::Models::Beta::Threads::ImageURLContentBlock], [:text, OpenAI::Models::Beta::Threads::TextContentBlockParam]] + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] end end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index d9e425f5..9e3451c3 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -48,7 +48,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -109,7 +109,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::file_search end - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::CodeInterpreterTool], [:file_search, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] end end end diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index eac12be2..09105c8f 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -137,7 +137,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -198,7 +198,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::file_search end - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::CodeInterpreterTool], [:file_search, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch]] + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] end end end @@ -206,7 +206,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::chat_model]] + def self.variants: -> [String, OpenAI::Models::chat_model] end type truncation_strategy = diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index 3c997331..41731b5a 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -114,7 +114,7 @@ module OpenAI end end - private def self.variants: -> [[:logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs], [:image, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image]] + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index aeb01140..7a808c31 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -74,7 +74,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage class Output < OpenAI::Union - private def self.variants: -> [[:logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs], [:image, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage]] + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] end end end diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 69a157aa..af3c7db7 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -136,7 +136,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails class StepDetails < OpenAI::Union - private def self.variants: -> [[:message_creation, OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails], [:tool_calls, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails]] + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] end type type_ = :message_creation | :tool_calls diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 2d78e758..9c5a49d3 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -34,7 +34,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject class StepDetails < OpenAI::Union - private def self.variants: -> [[:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta], [:tool_calls, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject]] + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index e4de6ce0..42300b3c 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FunctionToolCall class ToolCall < OpenAI::Union - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall], [:file_search, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall], [:function, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall]] + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index 49679360..6c3c0ec7 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta class ToolCallDelta < OpenAI::Union - private def self.variants: -> [[:code_interpreter, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta], [:file_search, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta], [:function, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta]] + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] end end end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index db1fee4b..f481adde 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -83,10 +83,10 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionContentPartRefusal class ArrayOfContentPart < OpenAI::Union - private def self.variants: -> [[:text, OpenAI::Models::Chat::ChatCompletionContentPartText], [:refusal, OpenAI::Models::Chat::ChatCompletionContentPartRefusal]] + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] end - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]] end type function_call = { arguments: String, name: String } diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index f0459960..31333ab5 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -60,7 +60,7 @@ module OpenAI end end - private def self.variants: -> [[:text, OpenAI::Models::Chat::ChatCompletionContentPartText], [:image_url, OpenAI::Models::Chat::ChatCompletionContentPartImage], [:input_audio, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio], [:file, OpenAI::Models::Chat::ChatCompletionContentPart::File]] + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] end end end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index e528c37a..bcc12871 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -42,7 +42,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index 771ad5ab..a42d88ca 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -13,7 +13,7 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionFunctionMessageParam class ChatCompletionMessageParam < OpenAI::Union - private def self.variants: -> [[:developer, OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam], [:system, OpenAI::Models::Chat::ChatCompletionSystemMessageParam], [:user, OpenAI::Models::Chat::ChatCompletionUserMessageParam], [:assistant, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam], [:tool, OpenAI::Models::Chat::ChatCompletionToolMessageParam], [:function, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam]] + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index 4bd18cc2..c04d1cc4 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -36,7 +36,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index e5dc4d84..7501e229 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -42,7 +42,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index d5d57aef..e6c246ee 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -19,7 +19,7 @@ module OpenAI def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto] end - private def self.variants: -> [[nil, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto], [nil, OpenAI::Models::Chat::ChatCompletionNamedToolChoice]] + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 5082b0dd..90f917db 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -40,7 +40,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index f9b825b4..75491108 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -42,7 +42,7 @@ module OpenAI ChatCompletionContentPartArray: chat_completion_content_part_array - private def self.variants: -> [[nil, String], [nil, ::Array[OpenAI::Models::Chat::chat_completion_content_part]]] + def self.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] end end end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index c32fa792..17fd54d4 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -172,7 +172,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::chat_model]] + def self.variants: -> [String, OpenAI::Models::chat_model] end type function_call = @@ -189,7 +189,7 @@ module OpenAI def self.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode] end - private def self.variants: -> [[nil, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode], [nil, OpenAI::Models::Chat::ChatCompletionFunctionCallOption]] + def self.variants: -> [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] end type function = @@ -241,7 +241,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject class ResponseFormat < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::ResponseFormatText], [nil, OpenAI::Models::ResponseFormatJSONSchema], [nil, OpenAI::Models::ResponseFormatJSONObject]] + def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end type service_tier = :auto | :default @@ -260,7 +260,7 @@ module OpenAI StringArray: string_array - private def self.variants: -> [[nil, String], [nil, ::Array[String]]] + def self.variants: -> [String, ::Array[String]] end type web_search_options = diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index 435d3cc2..02a4b26d 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -40,7 +40,7 @@ module OpenAI type value = String | Float | bool class Value < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 362d945c..6f496b04 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -104,7 +104,7 @@ module OpenAI def self.values: -> ::Array[OpenAI::Models::CompletionCreateParams::Model::preset] end - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::CompletionCreateParams::Model::preset]] + def self.variants: -> [String, OpenAI::Models::CompletionCreateParams::Model::preset] end type prompt = @@ -123,7 +123,7 @@ module OpenAI ArrayOfToken2DArray: array_of_token2_d_array - private def self.variants: -> [[nil, String], [nil, ::Array[String]], [nil, ::Array[Integer]], [nil, ::Array[::Array[Integer]]]] + def self.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] end type stop = (String | ::Array[String])? @@ -133,7 +133,7 @@ module OpenAI StringArray: string_array - private def self.variants: -> [[nil, String], [nil, ::Array[String]]] + def self.variants: -> [String, ::Array[String]] end end end diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 620ab14b..ddbe75e7 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -23,7 +23,7 @@ module OpenAI type filter = OpenAI::Models::ComparisonFilter | top class Filter < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::ComparisonFilter], [nil, top]] + def self.variants: -> [OpenAI::Models::ComparisonFilter, top] end type type_ = :and | :or diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index fdddf90e..4e32f5ba 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -63,13 +63,13 @@ module OpenAI ArrayOfToken2DArray: array_of_token2_d_array - private def self.variants: -> [[nil, String], [nil, ::Array[String]], [nil, ::Array[Integer]], [nil, ::Array[::Array[Integer]]]] + def self.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] end type model = String | OpenAI::Models::embedding_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::embedding_model]] + def self.variants: -> [String, OpenAI::Models::embedding_model] end type encoding_format = :float | :base64 diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index 48e1a062..d287b675 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -5,7 +5,7 @@ module OpenAI | OpenAI::Models::OtherFileChunkingStrategyObject class FileChunkingStrategy < OpenAI::Union - private def self.variants: -> [[:static, OpenAI::Models::StaticFileChunkingStrategyObject], [:other, OpenAI::Models::OtherFileChunkingStrategyObject]] + def self.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] end end end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index 85961a3f..f5f9f28a 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -5,7 +5,7 @@ module OpenAI | OpenAI::Models::StaticFileChunkingStrategyObjectParam class FileChunkingStrategyParam < OpenAI::Union - private def self.variants: -> [[:auto, OpenAI::Models::AutoFileChunkingStrategyParam], [:static, OpenAI::Models::StaticFileChunkingStrategyObjectParam]] + def self.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 44ab2010..12dfc6a9 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -160,19 +160,19 @@ module OpenAI type batch_size = :auto | Integer class BatchSize < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float class LearningRateMultiplier < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer class NEpochs < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end end @@ -307,25 +307,25 @@ module OpenAI type batch_size = :auto | Integer class BatchSize < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end type beta = :auto | Float class Beta < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float class LearningRateMultiplier < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer class NEpochs < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end end end @@ -395,19 +395,19 @@ module OpenAI type batch_size = :auto | Integer class BatchSize < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float class LearningRateMultiplier < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer class NEpochs < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end end end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index b8be5ee1..e5712ef5 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -81,7 +81,7 @@ module OpenAI def self.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Model::preset] end - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::FineTuning::JobCreateParams::Model::preset]] + def self.variants: -> [String, OpenAI::Models::FineTuning::JobCreateParams::Model::preset] end type hyperparameters = @@ -126,19 +126,19 @@ module OpenAI type batch_size = :auto | Integer class BatchSize < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float class LearningRateMultiplier < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer class NEpochs < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end end @@ -312,25 +312,25 @@ module OpenAI type batch_size = :auto | Integer class BatchSize < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end type beta = :auto | Float class Beta < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float class LearningRateMultiplier < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer class NEpochs < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end end end @@ -400,19 +400,19 @@ module OpenAI type batch_size = :auto | Integer class BatchSize < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float class LearningRateMultiplier < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Float]] + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer class NEpochs < OpenAI::Union - private def self.variants: -> [[nil, :auto], [nil, Integer]] + def self.variants: -> [:auto, Integer] end end end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 983f3cea..2cb6174c 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -49,7 +49,7 @@ module OpenAI type model = String | OpenAI::Models::image_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::image_model]] + def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index e791162c..17f6d2a5 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -56,7 +56,7 @@ module OpenAI type model = String | OpenAI::Models::image_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::image_model]] + def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index a91c05d0..ebeabef9 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -60,7 +60,7 @@ module OpenAI type model = String | OpenAI::Models::image_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::image_model]] + def self.variants: -> [String, OpenAI::Models::image_model] end type quality = :standard | :hd diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 0ea15edd..e5c23167 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -46,13 +46,13 @@ module OpenAI ModerationMultiModalInputArray: moderation_multi_modal_input_array - private def self.variants: -> [[nil, String], [nil, ::Array[String]], [nil, ::Array[OpenAI::Models::moderation_multi_modal_input]]] + def self.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] end type model = String | OpenAI::Models::moderation_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::moderation_model]] + def self.variants: -> [String, OpenAI::Models::moderation_model] end end end diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index af2b3a37..c98cd3a4 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -5,7 +5,7 @@ module OpenAI | OpenAI::Models::ModerationTextInput class ModerationMultiModalInput < OpenAI::Union - private def self.variants: -> [[:image_url, OpenAI::Models::ModerationImageURLInput], [:text, OpenAI::Models::ModerationTextInput]] + def self.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] end end end diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 4f7d7d6b..7347e123 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -37,7 +37,7 @@ module OpenAI | OpenAI::Models::Responses::response_input_message_content_list class Content < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::Responses::response_input_message_content_list]] + def self.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 2c1ab9da..32c08269 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -50,7 +50,7 @@ module OpenAI OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter class Filters < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::ComparisonFilter], [nil, OpenAI::Models::CompoundFilter]] + def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end type ranking_options = diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 7bee86f9..2fa4632c 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -153,7 +153,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::chat_model]] + def self.variants: -> [String, OpenAI::Models::chat_model] end type tool_choice = @@ -162,7 +162,7 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceFunction class ToolChoice < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::Responses::tool_choice_options], [nil, OpenAI::Models::Responses::ToolChoiceTypes], [nil, OpenAI::Models::Responses::ToolChoiceFunction]] + def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 4f0b66ba..f57c376a 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -99,7 +99,7 @@ module OpenAI end end - private def self.variants: -> [[:logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs], [:files, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files]] + def self.variants: -> [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] end type status = :in_progress | :interpreting | :completed diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 49069bb3..7cc62b09 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -275,7 +275,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::wait end - private def self.variants: -> [[:click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click], [:double_click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick], [:drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag], [:keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress], [:move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move], [:screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot], [:scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll], [:type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type], [:wait, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait]] + def self.variants: -> [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] end type pending_safety_check = diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index 246a0708..c7bb377e 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal class ResponseContent < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::Responses::ResponseInputText], [nil, OpenAI::Models::Responses::ResponseInputImage], [nil, OpenAI::Models::Responses::ResponseInputFile], [nil, OpenAI::Models::Responses::ResponseOutputText], [nil, OpenAI::Models::Responses::ResponseOutputRefusal]] + def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 37deb50b..5bbbdac3 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -41,7 +41,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal class Part < OpenAI::Union - private def self.variants: -> [[:output_text, OpenAI::Models::Responses::ResponseOutputText], [:refusal, OpenAI::Models::Responses::ResponseOutputRefusal]] + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index d8830b30..b915c001 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -41,7 +41,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal class Part < OpenAI::Union - private def self.variants: -> [[:output_text, OpenAI::Models::Responses::ResponseOutputText], [:refusal, OpenAI::Models::Responses::ResponseOutputRefusal]] + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 785ee05f..d9049e41 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -106,13 +106,13 @@ module OpenAI type input = String | OpenAI::Models::Responses::response_input class Input < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::Responses::response_input]] + def self.variants: -> [String, OpenAI::Models::Responses::response_input] end type model = String | OpenAI::Models::chat_model class Model < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, OpenAI::Models::chat_model]] + def self.variants: -> [String, OpenAI::Models::chat_model] end type tool_choice = @@ -121,7 +121,7 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceFunction class ToolChoice < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::Responses::tool_choice_options], [nil, OpenAI::Models::Responses::ToolChoiceTypes], [nil, OpenAI::Models::Responses::ToolChoiceFunction]] + def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index bff8946c..42d71f9c 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -95,7 +95,7 @@ module OpenAI type attribute = String | Float | bool class Attribute < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index bdd7473c..7a38cb8e 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject class ResponseFormatTextConfig < OpenAI::Union - private def self.variants: -> [[:text, OpenAI::Models::ResponseFormatText], [:json_schema, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig], [:json_object, OpenAI::Models::ResponseFormatJSONObject]] + def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end end diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index d12edcd5..004cfa5c 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseInputFile class ResponseInputContent < OpenAI::Union - private def self.variants: -> [[:input_text, OpenAI::Models::Responses::ResponseInputText], [:input_image, OpenAI::Models::Responses::ResponseInputImage], [:input_file, OpenAI::Models::Responses::ResponseInputFile]] + def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] end end end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 6f23ae1b..b60c6d6a 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -260,7 +260,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::item_reference end - private def self.variants: -> [[:message, OpenAI::Models::Responses::EasyInputMessage], [:message, OpenAI::Models::Responses::ResponseInputItem::Message], [:message, OpenAI::Models::Responses::ResponseOutputMessage], [:file_search_call, OpenAI::Models::Responses::ResponseFileSearchToolCall], [:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall], [:computer_call_output, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput], [:web_search_call, OpenAI::Models::Responses::ResponseFunctionWebSearch], [:function_call, OpenAI::Models::Responses::ResponseFunctionToolCall], [:function_call_output, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput], [:reasoning, OpenAI::Models::Responses::ResponseReasoningItem], [:item_reference, OpenAI::Models::Responses::ResponseInputItem::ItemReference]] + def self.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] end end end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index faf8e1a5..a8d0d3fc 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -278,7 +278,7 @@ module OpenAI end end - private def self.variants: -> [[:message, OpenAI::Models::Responses::ResponseItemList::Data::Message], [:message, OpenAI::Models::Responses::ResponseOutputMessage], [:file_search_call, OpenAI::Models::Responses::ResponseFileSearchToolCall], [:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall], [:computer_call_output, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput], [:web_search_call, OpenAI::Models::Responses::ResponseFunctionWebSearch], [:function_call, OpenAI::Models::Responses::ResponseFunctionToolCall], [:function_call_output, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput]] + def self.variants: -> [OpenAI::Models::Responses::ResponseItemList::Data::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput] end end end diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index cb441e32..e01fe1b2 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -10,7 +10,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseReasoningItem class ResponseOutputItem < OpenAI::Union - private def self.variants: -> [[:message, OpenAI::Models::Responses::ResponseOutputMessage], [:file_search_call, OpenAI::Models::Responses::ResponseFileSearchToolCall], [:function_call, OpenAI::Models::Responses::ResponseFunctionToolCall], [:web_search_call, OpenAI::Models::Responses::ResponseFunctionWebSearch], [:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall], [:reasoning, OpenAI::Models::Responses::ResponseReasoningItem]] + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] end end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index dc20c3b5..c1bbed4a 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -41,7 +41,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal class Content < OpenAI::Union - private def self.variants: -> [[:output_text, OpenAI::Models::Responses::ResponseOutputText], [:refusal, OpenAI::Models::Responses::ResponseOutputRefusal]] + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end type status = :in_progress | :completed | :incomplete diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index 6b128276..f8da61b0 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -109,7 +109,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::file_path end - private def self.variants: -> [[:file_citation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation], [:url_citation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation], [:file_path, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath]] + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] end end end diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 10d54108..0d48dfd6 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -36,7 +36,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent class ResponseStreamEvent < OpenAI::Union - private def self.variants: -> [[:"response.audio.delta", OpenAI::Models::Responses::ResponseAudioDeltaEvent], [:"response.audio.done", OpenAI::Models::Responses::ResponseAudioDoneEvent], [:"response.audio.transcript.delta", OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent], [:"response.audio.transcript.done", OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent], [:"response.code_interpreter_call.code.delta", OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent], [:"response.code_interpreter_call.code.done", OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent], [:"response.code_interpreter_call.completed", OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent], [:"response.code_interpreter_call.in_progress", OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent], [:"response.code_interpreter_call.interpreting", OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent], [:"response.completed", OpenAI::Models::Responses::ResponseCompletedEvent], [:"response.content_part.added", OpenAI::Models::Responses::ResponseContentPartAddedEvent], [:"response.content_part.done", OpenAI::Models::Responses::ResponseContentPartDoneEvent], [:"response.created", OpenAI::Models::Responses::ResponseCreatedEvent], [:error, OpenAI::Models::Responses::ResponseErrorEvent], [:"response.file_search_call.completed", OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent], [:"response.file_search_call.in_progress", OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent], [:"response.file_search_call.searching", OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent], [:"response.function_call_arguments.delta", OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent], [:"response.function_call_arguments.done", OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent], [:"response.in_progress", OpenAI::Models::Responses::ResponseInProgressEvent], [:"response.failed", OpenAI::Models::Responses::ResponseFailedEvent], [:"response.incomplete", OpenAI::Models::Responses::ResponseIncompleteEvent], [:"response.output_item.added", OpenAI::Models::Responses::ResponseOutputItemAddedEvent], [:"response.output_item.done", OpenAI::Models::Responses::ResponseOutputItemDoneEvent], [:"response.refusal.delta", OpenAI::Models::Responses::ResponseRefusalDeltaEvent], [:"response.refusal.done", OpenAI::Models::Responses::ResponseRefusalDoneEvent], [:"response.output_text.annotation.added", OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent], [:"response.output_text.delta", OpenAI::Models::Responses::ResponseTextDeltaEvent], [:"response.output_text.done", OpenAI::Models::Responses::ResponseTextDoneEvent], [:"response.web_search_call.completed", OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent], [:"response.web_search_call.in_progress", OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent], [:"response.web_search_call.searching", OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent]] + def self.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index f3f165b9..9030b65e 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -121,7 +121,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_path end - private def self.variants: -> [[:file_citation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation], [:url_citation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation], [:file_path, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath]] + def self.variants: -> [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] end end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 62a0dd21..642f7196 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -8,7 +8,7 @@ module OpenAI | OpenAI::Models::Responses::WebSearchTool class Tool < OpenAI::Union - private def self.variants: -> [[:file_search, OpenAI::Models::Responses::FileSearchTool], [:function, OpenAI::Models::Responses::FunctionTool], [:computer_use_preview, OpenAI::Models::Responses::ComputerTool], [nil, OpenAI::Models::Responses::WebSearchTool]] + def self.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] end end end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 3f179fa9..2ac6032b 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -58,14 +58,14 @@ module OpenAI StringArray: string_array - private def self.variants: -> [[nil, String], [nil, ::Array[String]]] + def self.variants: -> [String, ::Array[String]] end type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter class Filters < OpenAI::Union - private def self.variants: -> [[nil, OpenAI::Models::ComparisonFilter], [nil, OpenAI::Models::CompoundFilter]] + def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end type ranking_options = diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index f9014f12..9207fde5 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -37,7 +37,7 @@ module OpenAI type attribute = String | Float | bool class Attribute < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end type content = diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 394219fe..963a641f 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -40,7 +40,7 @@ module OpenAI type attribute = String | Float | bool class Attribute < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 52eb4cfe..471cc8a7 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -40,7 +40,7 @@ module OpenAI type attribute = String | Float | bool class Attribute < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 7320f7b8..946352ec 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -32,7 +32,7 @@ module OpenAI type attribute = String | Float | bool class Attribute < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 3bc27550..c25e3ea0 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -107,7 +107,7 @@ module OpenAI type attribute = String | Float | bool class Attribute < OpenAI::Union - private def self.variants: -> [[nil, String], [nil, Float], [nil, bool]] + def self.variants: -> [String, Float, bool] end end end From 85c27f676c41a73066afa5f483e03c78de29f556 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 18:04:12 +0000 Subject: [PATCH 015/295] chore: slightly more consistent type definition layout (#17) --- rbi/lib/openai/base_model.rbi | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 6fa7ec26..8e6ccbdc 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -80,10 +80,10 @@ module OpenAI # When we don't know what to expect for the value. class Unknown - abstract! - extend OpenAI::Converter + abstract! + sig { params(other: T.anything).returns(T::Boolean) } def self.===(other) end @@ -116,10 +116,10 @@ module OpenAI # Ruby has no Boolean class; this is something for models to refer to. class BooleanModel - abstract! - extend OpenAI::Converter + abstract! + sig { params(other: T.anything).returns(T::Boolean) } def self.===(other) end @@ -161,10 +161,10 @@ module OpenAI # We can therefore convert string values to Symbols, but can't convert other # values safely. class Enum - abstract! - extend OpenAI::Converter + abstract! + class << self # All of the valid Symbol values for this enum. sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } @@ -210,10 +210,10 @@ module OpenAI end class Union - abstract! - extend OpenAI::Converter + abstract! + class << self # @api private # @@ -295,10 +295,10 @@ module OpenAI # Array of items of a given type. class ArrayOf - abstract! - include OpenAI::Converter + abstract! + sig { params(other: T.anything).returns(T::Boolean) } def ===(other) end @@ -357,10 +357,10 @@ module OpenAI # Hash of items of a given type. class HashOf - abstract! - include OpenAI::Converter + abstract! + sig { params(other: T.anything).returns(T::Boolean) } def ===(other) end @@ -418,10 +418,10 @@ module OpenAI end class BaseModel - abstract! - extend OpenAI::Converter + abstract! + KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean} } class << self From 459264b9f0ad4b8be38da7ba17cb10041d17ec90 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 14 Mar 2025 20:15:17 +0000 Subject: [PATCH 016/295] chore(internal): remove CI condition (#18) --- .github/workflows/ci.yml | 3 +-- .github/workflows/create-releases.yml | 28 --------------------------- .github/workflows/publish-gem.yml | 8 ++++++-- .github/workflows/release-doctor.yml | 1 - .release-please-manifest.json | 2 +- .stats.yml | 2 +- Gemfile.lock | 2 +- bin/check-release-environment | 4 ---- lib/openai/version.rb | 2 +- rbi/lib/openai/version.rbi | 2 +- sig/openai/version.rbs | 2 +- 11 files changed, 13 insertions(+), 43 deletions(-) delete mode 100644 .github/workflows/create-releases.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index de0c9185..6992080f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: lint: name: lint runs-on: ubuntu-latest - if: github.repository == 'openai/openai-ruby' + steps: - uses: actions/checkout@v4 @@ -29,7 +29,6 @@ jobs: test: name: test runs-on: ubuntu-latest - if: github.repository == 'openai/openai-ruby' steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml deleted file mode 100644 index ba6cb5ea..00000000 --- a/.github/workflows/create-releases.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Create releases -on: - schedule: - - cron: '0 5 * * *' # every day at 5am UTC - push: - branches: - - main - -jobs: - release: - name: release - if: github.ref == 'refs/heads/main' && github.repository == 'openai/openai-ruby' - runs-on: ubuntu-latest - environment: publish - - steps: - - uses: actions/checkout@v4 - - - uses: stainless-api/trigger-release-please@v1 - id: release - with: - repo: ${{ github.event.repository.full_name }} - stainless-api-key: ${{ secrets.STAINLESS_API_KEY }} - - - name: Update RubyDocs - if: ${{ steps.release.outputs.releases_created }} - run: | - curl -i -H "Content-Type: application/json" -X POST -d '{"repository":{"url":"https://github.com/openai/openai-ruby"}}' https://www.rubydoc.info/checkout diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index 48dbf6e6..d6ba1c4a 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -1,9 +1,13 @@ -# workflow for re-running publishing to rubygems.org in case it fails for some reason -# you can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-gem.yml +# This workflow is triggered when a GitHub release is created. +# It can also be run manually to re-publish to rubygems.org in case it failed for some reason. +# You can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-gem.yml name: Publish Gem on: workflow_dispatch: + release: + types: [published] + jobs: publish: name: publish diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index fc3ec131..1659237f 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -19,6 +19,5 @@ jobs: run: | bash ./bin/check-release-environment env: - STAINLESS_API_KEY: ${{ secrets.STAINLESS_API_KEY }} RUBYGEMS_HOST: ${{ secrets.OPENAI_RUBYGEMS_HOST || secrets.RUBYGEMS_HOST }} GEM_HOST_API_KEY: ${{ secrets.OPENAI_GEM_HOST_API_KEY || secrets.GEM_HOST_API_KEY }} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index c4762802..ba6c3483 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.0.1-alpha.0" + ".": "0.1.0-alpha.1" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index f18d6148..26b57a65 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-c8579861bc21d4d2155a5b9e8e7d54faee8083730673c4d32cbbe573d7fb4116.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-c8579861bc21d4d2155a5b9e8e7d54faee8083730673c4d32cbbe573d7fb4116.yml diff --git a/Gemfile.lock b/Gemfile.lock index 37e29d5a..341cc850 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.0.1.pre.alpha.0) + openai (0.1.0.pre.alpha.1) connection_pool GEM diff --git a/bin/check-release-environment b/bin/check-release-environment index 6aa95c4f..6303e291 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -2,10 +2,6 @@ errors=() -if [ -z "${STAINLESS_API_KEY}" ]; then - errors+=("The STAINLESS_API_KEY secret has not been set. Please contact Stainless for an API key & set it in your organization secrets on GitHub.") -fi - if [ -z "${GEM_HOST_API_KEY}" ]; then errors+=("The OPENAI_GEM_HOST_API_KEY secret has not been set. Please set it in either this repository's secrets or your organization secrets") fi diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 804492b4..bcab79ff 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.0.1-alpha.0" + VERSION = "0.1.0-alpha.1" end diff --git a/rbi/lib/openai/version.rbi b/rbi/lib/openai/version.rbi index 4cab3ef4..5366ece5 100644 --- a/rbi/lib/openai/version.rbi +++ b/rbi/lib/openai/version.rbi @@ -1,5 +1,5 @@ # typed: strong module OpenAI - VERSION = "0.0.1-alpha.0" + VERSION = "0.1.0-alpha.1" end diff --git a/sig/openai/version.rbs b/sig/openai/version.rbs index adde5d9f..e4f5239e 100644 --- a/sig/openai/version.rbs +++ b/sig/openai/version.rbs @@ -1,3 +1,3 @@ module OpenAI - VERSION: "0.0.1-alpha.0" + VERSION: "0.1.0-alpha.1" end From 3504a4267bc7e02c7bb36cc2aefedfceadb97899 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 15 Mar 2025 01:26:59 +0000 Subject: [PATCH 017/295] chore: mark non-inheritable SDK internal classes as final (#19) --- rbi/lib/openai/base_model.rbi | 56 ++++++++++++++++++++--------------- 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 8e6ccbdc..fabb75ef 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -83,28 +83,29 @@ module OpenAI extend OpenAI::Converter abstract! + final! - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def self.===(other) end - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def self.==(other) end class << self # @api private - sig { override.params(value: T.anything).returns(T.anything) } + sig(:final) { override.params(value: T.anything).returns(T.anything) } def coerce(value) end # @api private - sig { override.params(value: T.anything).returns(T.anything) } + sig(:final) { override.params(value: T.anything).returns(T.anything) } def dump(value) end # @api private - sig do + sig(:final) do override .params(value: T.anything) .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) @@ -119,28 +120,33 @@ module OpenAI extend OpenAI::Converter abstract! + final! - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def self.===(other) end - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def self.==(other) end class << self # @api private - sig { override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) } + sig(:final) do + override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) + end def coerce(value) end # @api private - sig { override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) } + sig(:final) do + override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) + end def dump(value) end # @api private - sig do + sig(:final) do override .params(value: T.anything) .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) @@ -298,17 +304,18 @@ module OpenAI include OpenAI::Converter abstract! + final! - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ===(other) end - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ==(other) end # @api private - sig do + sig(:final) do override .params(value: T.any(T::Enumerable[T.anything], T.anything)) .returns(T.any(T::Array[T.anything], T.anything)) @@ -317,7 +324,7 @@ module OpenAI end # @api private - sig do + sig(:final) do override .params(value: T.any(T::Enumerable[T.anything], T.anything)) .returns(T.any(T::Array[T.anything], T.anything)) @@ -326,7 +333,7 @@ module OpenAI end # @api private - sig do + sig(:final) do override .params(value: T.anything) .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) @@ -335,12 +342,12 @@ module OpenAI end # @api private - sig { returns(OpenAI::Converter::Input) } + sig(:final) { returns(OpenAI::Converter::Input) } protected def item_type end # @api private - sig do + sig(:final) do params( type_info: T.any( T::Hash[Symbol, T.anything], @@ -360,17 +367,18 @@ module OpenAI include OpenAI::Converter abstract! + final! - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ===(other) end - sig { params(other: T.anything).returns(T::Boolean) } + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ==(other) end # @api private - sig do + sig(:final) do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) .returns(T.any(T::Hash[Symbol, T.anything], T.anything)) @@ -379,7 +387,7 @@ module OpenAI end # @api private - sig do + sig(:final) do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) .returns(T.any(T::Hash[Symbol, T.anything], T.anything)) @@ -388,7 +396,7 @@ module OpenAI end # @api private - sig do + sig(:final) do override .params(value: T.anything) .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) @@ -397,12 +405,12 @@ module OpenAI end # @api private - sig { returns(OpenAI::Converter::Input) } + sig(:final) { returns(OpenAI::Converter::Input) } protected def item_type end # @api private - sig do + sig(:final) do params( type_info: T.any( T::Hash[Symbol, T.anything], From 936e2196b716dccafc36c3a1babf18b26968ce29 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 00:02:50 +0000 Subject: [PATCH 018/295] chore: use generics instead of overloading for sorbet type definitions (#20) --- rbi/lib/openai/base_model.rbi | 18 ++- .../models/audio/speech_create_params.rbi | 22 +--- rbi/lib/openai/models/audio/speech_model.rbi | 8 +- .../audio/transcription_create_params.rbi | 14 +-- .../audio/transcription_create_response.rbi | 6 +- .../audio/translation_create_params.rbi | 6 +- .../audio/translation_create_response.rbi | 6 +- rbi/lib/openai/models/audio_model.rbi | 8 +- .../openai/models/audio_response_format.rbi | 8 +- rbi/lib/openai/models/batch.rbi | 8 +- rbi/lib/openai/models/batch_create_params.rbi | 16 +-- .../models/beta/assistant_create_params.rbi | 26 ++-- .../models/beta/assistant_list_params.rbi | 8 +- .../beta/assistant_response_format_option.rbi | 18 +-- .../models/beta/assistant_stream_event.rbi | 42 +++++-- rbi/lib/openai/models/beta/assistant_tool.rbi | 17 ++- .../models/beta/assistant_tool_choice.rbi | 8 +- .../beta/assistant_tool_choice_option.rbi | 16 +-- .../models/beta/assistant_update_params.rbi | 16 +-- .../openai/models/beta/file_search_tool.rbi | 8 +- .../models/beta/message_stream_event.rbi | 23 ++-- .../models/beta/run_step_stream_event.rbi | 25 ++-- .../openai/models/beta/run_stream_event.rbi | 28 +++-- .../beta/thread_create_and_run_params.rbi | 114 +++++++----------- .../models/beta/thread_create_params.rbi | 83 ++++++------- .../openai/models/beta/threads/annotation.rbi | 16 ++- .../models/beta/threads/annotation_delta.rbi | 16 ++- .../openai/models/beta/threads/image_file.rbi | 8 +- .../models/beta/threads/image_file_delta.rbi | 8 +- .../openai/models/beta/threads/image_url.rbi | 8 +- .../models/beta/threads/image_url_delta.rbi | 8 +- .../openai/models/beta/threads/message.rbi | 44 +++---- .../models/beta/threads/message_content.rbi | 18 +-- .../beta/threads/message_content_delta.rbi | 18 +-- .../threads/message_content_part_param.rbi | 17 ++- .../beta/threads/message_create_params.rbi | 63 ++++------ .../models/beta/threads/message_delta.rbi | 8 +- .../beta/threads/message_list_params.rbi | 8 +- rbi/lib/openai/models/beta/threads/run.rbi | 24 +--- .../models/beta/threads/run_create_params.rbi | 77 +++++------- .../models/beta/threads/run_list_params.rbi | 8 +- .../openai/models/beta/threads/run_status.rbi | 8 +- .../runs/code_interpreter_tool_call.rbi | 20 ++- .../runs/code_interpreter_tool_call_delta.rbi | 16 ++- .../threads/runs/file_search_tool_call.rbi | 16 +-- .../models/beta/threads/runs/run_step.rbi | 40 ++---- .../beta/threads/runs/run_step_delta.rbi | 16 ++- .../beta/threads/runs/run_step_include.rbi | 8 +- .../beta/threads/runs/step_list_params.rbi | 8 +- .../models/beta/threads/runs/tool_call.rbi | 17 ++- .../beta/threads/runs/tool_call_delta.rbi | 17 ++- .../openai/models/chat/chat_completion.rbi | 18 +-- ...hat_completion_assistant_message_param.rbi | 47 +++----- .../chat/chat_completion_audio_param.rbi | 16 +-- .../models/chat/chat_completion_chunk.rbi | 42 ++----- .../chat/chat_completion_content_part.rbi | 22 ++-- .../chat_completion_content_part_image.rbi | 8 +- ...at_completion_content_part_input_audio.rbi | 8 +- ...hat_completion_developer_message_param.rbi | 8 +- .../chat/chat_completion_message_param.rbi | 20 +-- .../models/chat/chat_completion_modality.rbi | 8 +- .../chat_completion_prediction_content.rbi | 8 +- .../models/chat/chat_completion_role.rbi | 8 +- .../chat_completion_system_message_param.rbi | 8 +- .../chat_completion_tool_choice_option.rbi | 16 +-- .../chat_completion_tool_message_param.rbi | 8 +- .../chat_completion_user_message_param.rbi | 37 +++--- .../models/chat/completion_create_params.rbi | 73 ++++------- .../models/chat/completion_list_params.rbi | 8 +- .../chat/completions/message_list_params.rbi | 8 +- rbi/lib/openai/models/chat_model.rbi | 8 +- rbi/lib/openai/models/comparison_filter.rbi | 14 +-- rbi/lib/openai/models/completion_choice.rbi | 8 +- .../models/completion_create_params.rbi | 34 ++---- rbi/lib/openai/models/compound_filter.rbi | 14 +-- .../openai/models/embedding_create_params.rbi | 24 ++-- rbi/lib/openai/models/embedding_model.rbi | 8 +- .../openai/models/file_chunking_strategy.rbi | 13 +- .../models/file_chunking_strategy_param.rbi | 16 ++- rbi/lib/openai/models/file_list_params.rbi | 8 +- rbi/lib/openai/models/file_object.rbi | 16 +-- rbi/lib/openai/models/file_purpose.rbi | 8 +- .../models/fine_tuning/fine_tuning_job.rbi | 76 +++--------- .../fine_tuning/fine_tuning_job_event.rbi | 16 +-- .../models/fine_tuning/job_create_params.rbi | 84 +++---------- .../models/image_create_variation_params.rbi | 28 ++--- rbi/lib/openai/models/image_edit_params.rbi | 28 ++--- .../openai/models/image_generate_params.rbi | 50 +++----- rbi/lib/openai/models/image_model.rbi | 8 +- rbi/lib/openai/models/moderation.rbi | 104 ++++------------ .../models/moderation_create_params.rbi | 27 ++--- rbi/lib/openai/models/moderation_model.rbi | 8 +- .../models/moderation_multi_modal_input.rbi | 6 +- rbi/lib/openai/models/reasoning.rbi | 10 +- rbi/lib/openai/models/reasoning_effort.rbi | 12 +- .../openai/models/responses/computer_tool.rbi | 8 +- .../models/responses/easy_input_message.rbi | 22 +--- .../models/responses/file_search_tool.rbi | 14 +-- .../responses/input_item_list_params.rbi | 8 +- rbi/lib/openai/models/responses/response.rbi | 37 ++---- .../response_code_interpreter_tool_call.rbi | 28 ++--- .../responses/response_computer_tool_call.rbi | 51 ++++---- .../models/responses/response_content.rbi | 19 +-- .../response_content_part_added_event.rbi | 11 +- .../response_content_part_done_event.rbi | 11 +- .../responses/response_create_params.rbi | 35 ++---- .../models/responses/response_error.rbi | 8 +- .../response_file_search_tool_call.rbi | 14 +-- .../responses/response_format_text_config.rbi | 17 ++- .../responses/response_function_tool_call.rbi | 8 +- .../response_function_web_search.rbi | 8 +- .../models/responses/response_includable.rbi | 8 +- .../models/responses/response_input_audio.rbi | 8 +- .../responses/response_input_content.rbi | 17 ++- .../models/responses/response_input_image.rbi | 8 +- .../models/responses/response_input_item.rbi | 69 +++++------ .../models/responses/response_item_list.rbi | 66 ++++------ .../models/responses/response_output_item.rbi | 20 +-- .../responses/response_output_message.rbi | 19 +-- .../models/responses/response_output_text.rbi | 21 ++-- .../responses/response_reasoning_item.rbi | 8 +- .../models/responses/response_status.rbi | 8 +- .../responses/response_stream_event.rbi | 46 +++++-- .../response_text_annotation_delta_event.rbi | 21 ++-- rbi/lib/openai/models/responses/tool.rbi | 18 +-- .../models/responses/tool_choice_options.rbi | 8 +- .../models/responses/tool_choice_types.rbi | 8 +- .../models/responses/web_search_tool.rbi | 16 +-- rbi/lib/openai/models/upload.rbi | 8 +- rbi/lib/openai/models/vector_store.rbi | 8 +- .../models/vector_store_list_params.rbi | 8 +- .../models/vector_store_search_params.rbi | 22 +--- .../models/vector_store_search_response.rbi | 14 +-- .../file_batch_create_params.rbi | 6 +- .../file_batch_list_files_params.rbi | 16 +-- .../vector_stores/file_create_params.rbi | 6 +- .../models/vector_stores/file_list_params.rbi | 16 +-- .../vector_stores/file_update_params.rbi | 6 +- .../vector_stores/vector_store_file.rbi | 22 +--- .../vector_stores/vector_store_file_batch.rbi | 8 +- sig/openai/base_model.rbs | 8 +- 141 files changed, 1009 insertions(+), 1873 deletions(-) diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index fabb75ef..3a7913f6 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -171,9 +171,11 @@ module OpenAI abstract! + Value = type_template(:out) + class << self # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } + sig { overridable.returns(T::Array[Value]) } def values end @@ -220,6 +222,8 @@ module OpenAI abstract! + Variants = type_template(:out) + class << self # @api private # @@ -229,12 +233,12 @@ module OpenAI end # @api private - sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + sig { returns(T::Array[[T.nilable(Symbol), Variants]]) } protected def derefed_variants end # All of the specified variants for this union. - sig { overridable.returns(T::Array[T.anything]) } + sig { overridable.returns(T::Array[Variants]) } def variants end @@ -306,6 +310,8 @@ module OpenAI abstract! final! + Elem = type_member(:out) + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ===(other) end @@ -342,7 +348,7 @@ module OpenAI end # @api private - sig(:final) { returns(OpenAI::Converter::Input) } + sig(:final) { returns(Elem) } protected def item_type end @@ -369,6 +375,8 @@ module OpenAI abstract! final! + Elem = type_member(:out) + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ===(other) end @@ -405,7 +413,7 @@ module OpenAI end # @api private - sig(:final) { returns(OpenAI::Converter::Input) } + sig(:final) { returns(Elem) } protected def item_type end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 44c40dc2..043a7179 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -93,11 +93,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -107,6 +103,8 @@ module OpenAI class Voice < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ALLOY = :alloy ASH = :ash CORAL = :coral @@ -116,12 +114,6 @@ module OpenAI NOVA = :nova SAGE = :sage SHIMMER = :shimmer - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, @@ -129,18 +121,14 @@ module OpenAI class ResponseFormat < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MP3 = :mp3 OPUS = :opus AAC = :aac FLAC = :flac WAV = :wav PCM = :pcm - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 5caef0af..f465baf8 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -6,14 +6,10 @@ module OpenAI class SpeechModel < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 4423fd4b..29ecd8fb 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -134,24 +134,16 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class TimestampGranularity < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + WORD = :word SEGMENT = :segment - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index a54d08ec..c414dc3d 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -8,11 +8,7 @@ module OpenAI class TranscriptionCreateResponse < OpenAI::Union abstract! - class << self - sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } end end end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 6b3f5fca..fb5d4a71 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -97,11 +97,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 44b2c2f8..79ac8c66 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -6,11 +6,7 @@ module OpenAI class TranslationCreateResponse < OpenAI::Union abstract! - class << self - sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } end end end diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 7f4186f7..85348552 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -5,13 +5,9 @@ module OpenAI class AudioModel < OpenAI::Enum abstract! - WHISPER_1 = :"whisper-1" + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + WHISPER_1 = :"whisper-1" end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 2acd496e..fb54aad0 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -7,17 +7,13 @@ module OpenAI class AudioResponseFormat < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + JSON = :json TEXT = :text SRT = :srt VERBOSE_JSON = :verbose_json VTT = :vtt - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 95ad26fe..699d0782 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -269,6 +269,8 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + VALIDATING = :validating FAILED = :failed IN_PROGRESS = :in_progress @@ -277,12 +279,6 @@ module OpenAI EXPIRED = :expired CANCELLING = :cancelling CANCELLED = :cancelled - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Errors < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 6588dc92..17c682c5 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -92,13 +92,9 @@ module OpenAI class CompletionWindow < OpenAI::Enum abstract! - NUMBER_24H = :"24h" + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + NUMBER_24H = :"24h" end # The endpoint to be used for all requests in the batch. Currently @@ -108,15 +104,11 @@ module OpenAI class Endpoint < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + V1_CHAT_COMPLETIONS = :"/v1/chat/completions" V1_EMBEDDINGS = :"/v1/embeddings" V1_COMPLETIONS = :"/v1/completions" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index c37ed2d4..44da54e2 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -306,11 +306,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class ToolResources < OpenAI::BaseModel @@ -530,6 +526,15 @@ module OpenAI class ChunkingStrategy < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + } + end + class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } @@ -638,17 +643,6 @@ module OpenAI end end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 1e331b4a..bb11fb1b 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -84,14 +84,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index 464f3cda..ae4f724e 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -26,15 +26,15 @@ module OpenAI class AssistantResponseFormatOption < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + Symbol, + OpenAI::Models::ResponseFormatText, + OpenAI::Models::ResponseFormatJSONObject, + OpenAI::Models::ResponseFormatJSONSchema + ) + } end end end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 6da114a3..e84d0007 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -26,6 +26,37 @@ module OpenAI class AssistantStreamEvent < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent + ) + } + end + class ThreadCreated < OpenAI::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -787,17 +818,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 29c76b9b..af9e9f94 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -6,15 +6,14 @@ module OpenAI class AssistantTool < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) + } end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 1cec31b5..84562850 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -40,15 +40,11 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + FUNCTION = :function CODE_INTERPRETER = :code_interpreter FILE_SEARCH = :file_search - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 3eadbf23..552ca737 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -13,6 +13,8 @@ module OpenAI class AssistantToolChoiceOption < OpenAI::Union abstract! + Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)} } + # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before @@ -20,21 +22,11 @@ module OpenAI class Auto < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + NONE = :none AUTO = :auto REQUIRED = :required - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - - class << self - sig { override.returns([Symbol, OpenAI::Models::Beta::AssistantToolChoice]) } - def variants - end end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 59895471..44b8293f 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -306,6 +306,8 @@ module OpenAI class Model < OpenAI::Union abstract! + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our @@ -314,6 +316,8 @@ module OpenAI class AssistantSupportedModels < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 @@ -344,18 +348,6 @@ module OpenAI GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - - class << self - sig { override.returns([String, Symbol]) } - def variants - end end end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index bf120b56..ce4b782a 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -130,14 +130,10 @@ module OpenAI class Ranker < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index b51b6036..ef592ec4 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -9,6 +9,18 @@ module OpenAI class MessageStreamEvent < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete + ) + } + end + class ThreadMessageCreated < OpenAI::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -166,17 +178,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index eea34354..40de2bea 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -9,6 +9,20 @@ module OpenAI class RunStepStreamEvent < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired + ) + } + end + class ThreadRunStepCreated < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } @@ -243,17 +257,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 1a00a6d5..fb5ba148 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -8,6 +8,23 @@ module OpenAI class RunStreamEvent < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired + ) + } + end + class ThreadRunCreated < OpenAI::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -307,17 +324,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 688101e1..60679618 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -383,11 +383,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class Thread < OpenAI::BaseModel @@ -597,6 +593,21 @@ module OpenAI class Content < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ) + } + end + MessageContentPartParamArray = T.type_alias do T::Array[ T.any( @@ -606,26 +617,6 @@ module OpenAI ) ] end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end - end end # The role of the entity that is creating the message. Allowed values include: @@ -637,14 +628,10 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Attachment < OpenAI::BaseModel @@ -729,6 +716,15 @@ module OpenAI class Tool < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) + } + end + class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -747,17 +743,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] - ) - end - def variants - end - end end end end @@ -985,6 +970,15 @@ module OpenAI class ChunkingStrategy < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + } + end + class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } @@ -1095,17 +1089,6 @@ module OpenAI end end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) - end - def variants - end - end end end end @@ -1208,15 +1191,14 @@ module OpenAI class Tool < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) + } end end @@ -1260,14 +1242,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LAST_MESSAGES = :last_messages - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index a22128f8..94c632cf 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -211,6 +211,21 @@ module OpenAI class Content < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ) + } + end + MessageContentPartParamArray = T.type_alias do T::Array[ T.any( @@ -220,26 +235,6 @@ module OpenAI ) ] end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end - end end # The role of the entity that is creating the message. Allowed values include: @@ -251,14 +246,10 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Attachment < OpenAI::BaseModel @@ -343,6 +334,15 @@ module OpenAI class Tool < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) + } + end + class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -361,17 +361,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] - ) - end - def variants - end - end end end end @@ -593,6 +582,15 @@ module OpenAI class ChunkingStrategy < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + } + end + class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } @@ -701,17 +699,6 @@ module OpenAI end end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 8681c1a1..dc7434e2 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -10,15 +10,13 @@ module OpenAI class Annotation < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Models::Beta::Threads::FilePathAnnotation + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index ccc794f6..32598149 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -10,15 +10,13 @@ module OpenAI class AnnotationDelta < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index dccaa783..879b0320 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -39,15 +39,11 @@ module OpenAI class Detail < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LOW = :low HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index faa92642..89c10a01 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -39,15 +39,11 @@ module OpenAI class Detail < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LOW = :low HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 7baa0ea2..e4aa98eb 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -38,15 +38,11 @@ module OpenAI class Detail < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LOW = :low HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 59edca5a..a300a59e 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -38,15 +38,11 @@ module OpenAI class Detail < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LOW = :low HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 6990e99b..b79d9945 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -339,6 +339,15 @@ module OpenAI class Tool < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) + } + end + class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -357,17 +366,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] - ) - end - def variants - end - end end end @@ -394,17 +392,13 @@ module OpenAI class Reason < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + CONTENT_FILTER = :content_filter MAX_TOKENS = :max_tokens RUN_CANCELLED = :run_cancelled RUN_EXPIRED = :run_expired RUN_FAILED = :run_failed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -412,14 +406,10 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -427,15 +417,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress INCOMPLETE = :incomplete COMPLETED = :completed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 86c7e454..7bc0759c 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -9,15 +9,15 @@ module OpenAI class MessageContent < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlock, + OpenAI::Models::Beta::Threads::RefusalContentBlock + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index b7a75330..09915e69 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -9,15 +9,15 @@ module OpenAI class MessageContentDelta < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Models::Beta::Threads::TextDeltaBlock, + OpenAI::Models::Beta::Threads::RefusalDeltaBlock, + OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index 5d3fc9f8..fe9ae396 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -9,15 +9,14 @@ module OpenAI class MessageContentPartParam < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index dd85dbd5..0783f811 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -145,6 +145,21 @@ module OpenAI class Content < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ) + } + end + MessageContentPartParamArray = T.type_alias do T::Array[ T.any( @@ -154,26 +169,6 @@ module OpenAI ) ] end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end - end end # The role of the entity that is creating the message. Allowed values include: @@ -185,14 +180,10 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Attachment < OpenAI::BaseModel @@ -277,6 +268,15 @@ module OpenAI class Tool < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) + } + end + class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -295,17 +295,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index f85da37a..7883727c 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -98,14 +98,10 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 2745c825..4b93b0c8 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -96,14 +96,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 6e74d579..94002e57 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -530,14 +530,10 @@ module OpenAI class Reason < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -573,15 +569,11 @@ module OpenAI class Code < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -695,14 +687,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LAST_MESSAGES = :last_messages - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 1a0afe06..dc48611b 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -552,6 +552,21 @@ module OpenAI class Content < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ) + } + end + MessageContentPartParamArray = T.type_alias do T::Array[ T.any( @@ -561,26 +576,6 @@ module OpenAI ) ] end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end - end end # The role of the entity that is creating the message. Allowed values include: @@ -592,14 +587,10 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Attachment < OpenAI::BaseModel @@ -684,6 +675,15 @@ module OpenAI class Tool < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) + } + end + class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -702,17 +702,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] - ) - end - def variants - end - end end end end @@ -724,11 +713,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class TruncationStrategy < OpenAI::BaseModel @@ -771,14 +756,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LAST_MESSAGES = :last_messages - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index ce7fa02c..44ca10d2 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -85,14 +85,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index ea69e05c..76ada3f1 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -10,6 +10,8 @@ module OpenAI class RunStatus < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + QUEUED = :queued IN_PROGRESS = :in_progress REQUIRES_ACTION = :requires_action @@ -19,12 +21,6 @@ module OpenAI COMPLETED = :completed INCOMPLETE = :incomplete EXPIRED = :expired - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 2408a1ed..4ddb7b18 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -146,6 +146,15 @@ module OpenAI class Output < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) + } + end + class Logs < OpenAI::BaseModel # The text output from the Code Interpreter tool call. sig { returns(String) } @@ -246,17 +255,6 @@ module OpenAI end end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 80276568..76d2e5d9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -159,15 +159,13 @@ module OpenAI class Output < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index af309515..b4f818ee 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -140,14 +140,10 @@ module OpenAI class Ranker < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -255,13 +251,9 @@ module OpenAI class Type < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index c46342b2..c4e9125c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -298,14 +298,10 @@ module OpenAI class Code < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -314,32 +310,26 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress CANCELLED = :cancelled FAILED = :failed COMPLETED = :completed EXPIRED = :expired - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The details of the run step. class StepDetails < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails + ) + } end end @@ -347,14 +337,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Usage < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 02171343..cb3b9c62 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -70,15 +70,13 @@ module OpenAI class StepDetails < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index b2726421..9fd4c51e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -10,13 +10,9 @@ module OpenAI class RunStepInclude < OpenAI::Enum abstract! - STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 81075759..73059bac 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -113,14 +113,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 4bd9eacd..e3f26a25 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -9,15 +9,14 @@ module OpenAI class ToolCall < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + ) + } end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index d19b0e4d..8169ef81 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -9,15 +9,14 @@ module OpenAI class ToolCallDelta < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + ) + } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 361ee7ba..c68e3594 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -212,17 +212,13 @@ module OpenAI class FinishReason < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls CONTENT_FILTER = :content_filter FUNCTION_CALL = :function_call - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Logprobs < OpenAI::BaseModel @@ -279,14 +275,10 @@ module OpenAI class ServiceTier < OpenAI::Enum abstract! - SCALE = T.let(:scale, T.nilable(Symbol)) - DEFAULT = T.let(:default, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + SCALE = :scale + DEFAULT = :default end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 3bf3c8bb..624890ee 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -202,6 +202,20 @@ module OpenAI class Content < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + ] + ) + } + end + ArrayOfContentPartArray = T.type_alias do T::Array[ T.any( @@ -216,34 +230,13 @@ module OpenAI class ArrayOfContentPart < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] - ) - end - def variants - end - end - end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ] + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal ) - end - def variants + } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index a8c18d75..0af4b8ee 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -42,17 +42,13 @@ module OpenAI class Format < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + WAV = :wav MP3 = :mp3 FLAC = :flac OPUS = :opus PCM16 = :pcm16 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -60,6 +56,8 @@ module OpenAI class Voice < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ALLOY = :alloy ASH = :ash BALLAD = :ballad @@ -68,12 +66,6 @@ module OpenAI SAGE = :sage SHIMMER = :shimmer VERSE = :verse - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 7dbd7a66..9595e84a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -327,17 +327,13 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + DEVELOPER = :developer SYSTEM = :system USER = :user ASSISTANT = :assistant TOOL = :tool - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class ToolCall < OpenAI::BaseModel @@ -439,13 +435,9 @@ module OpenAI class Type < OpenAI::Enum abstract! - FUNCTION = :function + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + FUNCTION = :function end end end @@ -459,17 +451,13 @@ module OpenAI class FinishReason < OpenAI::Enum abstract! - STOP = T.let(:stop, T.nilable(Symbol)) - LENGTH = T.let(:length, T.nilable(Symbol)) - TOOL_CALLS = T.let(:tool_calls, T.nilable(Symbol)) - CONTENT_FILTER = T.let(:content_filter, T.nilable(Symbol)) - FUNCTION_CALL = T.let(:function_call, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + STOP = :stop + LENGTH = :length + TOOL_CALLS = :tool_calls + CONTENT_FILTER = :content_filter + FUNCTION_CALL = :function_call end class Logprobs < OpenAI::BaseModel @@ -526,14 +514,10 @@ module OpenAI class ServiceTier < OpenAI::Enum abstract! - SCALE = T.let(:scale, T.nilable(Symbol)) - DEFAULT = T.let(:default, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + SCALE = :scale + DEFAULT = :default end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 3da63995..a36f38f5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -10,6 +10,17 @@ module OpenAI class ChatCompletionContentPart < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + } + end + class File < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) } def file @@ -82,17 +93,6 @@ module OpenAI end end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 4f78201d..56a6931a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -73,15 +73,11 @@ module OpenAI class Detail < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto LOW = :low HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 52dfed23..83e891b0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -74,14 +74,10 @@ module OpenAI class Format < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + WAV = :wav MP3 = :mp3 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 13661fc1..d5dca054 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -68,13 +68,9 @@ module OpenAI class Content < OpenAI::Union abstract! - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end - end + ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 4ff15449..97d8192e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -11,15 +11,17 @@ module OpenAI class ChatCompletionMessageParam < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) + } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index bb95e5c8..df9e0044 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -8,14 +8,10 @@ module OpenAI class ChatCompletionModality < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text AUDIO = :audio - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 421f1392..f595e430 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -57,13 +57,9 @@ module OpenAI class Content < OpenAI::Union abstract! - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end - end + ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index 24a8acf6..ae42bac3 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -9,18 +9,14 @@ module OpenAI class ChatCompletionRole < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + DEVELOPER = :developer SYSTEM = :system USER = :user ASSISTANT = :assistant TOOL = :tool FUNCTION = :function - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index cd265771..1863c0f8 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -68,13 +68,9 @@ module OpenAI class Content < OpenAI::Union abstract! - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end - end + ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 9eeeca42..0c237bac 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -17,27 +17,19 @@ module OpenAI class ChatCompletionToolChoiceOption < OpenAI::Union abstract! + Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)} } + # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. class Auto < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + NONE = :none AUTO = :auto REQUIRED = :required - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - - class << self - sig { override.returns([Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice]) } - def variants - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 98e3c60f..20936406 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -64,13 +64,9 @@ module OpenAI class Content < OpenAI::Union abstract! - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end - end + ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 1d73f550..7ebaa424 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -125,6 +125,22 @@ module OpenAI class Content < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + ] + ) + } + end + ChatCompletionContentPartArray = T.type_alias do T::Array[ T.any( @@ -135,27 +151,6 @@ module OpenAI ) ] end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index ff953111..7b69e065 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -642,11 +642,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # Deprecated in favor of `tool_choice`. @@ -666,26 +662,18 @@ module OpenAI class FunctionCall < OpenAI::Union abstract! + Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)} } + # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. class FunctionCallMode < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + NONE = :none AUTO = :auto - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - - class << self - sig { override.returns([Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption]) } - def variants - end end end @@ -740,14 +728,10 @@ module OpenAI class Modality < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text AUDIO = :audio - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # An object specifying the format that the model must output. @@ -763,15 +747,14 @@ module OpenAI class ResponseFormat < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::ResponseFormatText, + OpenAI::Models::ResponseFormatJSONSchema, + OpenAI::Models::ResponseFormatJSONObject + ) + } end end @@ -792,14 +775,10 @@ module OpenAI class ServiceTier < OpenAI::Enum abstract! - AUTO = T.let(:auto, T.nilable(Symbol)) - DEFAULT = T.let(:default, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + AUTO = :auto + DEFAULT = :default end # Up to 4 sequences where the API will stop generating further tokens. The @@ -807,13 +786,9 @@ module OpenAI class Stop < OpenAI::Union abstract! - StringArray = T.type_alias { T::Array[String] } + Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } - class << self - sig { override.returns([String, T::Array[String]]) } - def variants - end - end + StringArray = T.type_alias { T::Array[String] } end class WebSearchOptions < OpenAI::BaseModel @@ -869,15 +844,11 @@ module OpenAI class SearchContextSize < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + LOW = :low MEDIUM = :medium HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 66305617..45a53c74 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -90,14 +90,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index b639be67..014bdbc9 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -66,14 +66,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 5120ba73..acb06a2d 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -5,6 +5,8 @@ module OpenAI class ChatModel < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 @@ -49,12 +51,6 @@ module OpenAI GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 3b3364ff..bbb84c5d 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -61,18 +61,14 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + EQ = :eq NE = :ne GT = :gt GTE = :gte LT = :lt LTE = :lte - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The value to compare against the attribute key; supports string, number, or @@ -80,11 +76,7 @@ module OpenAI class Value < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end end end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 860f0c29..93b41ebf 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -75,15 +75,11 @@ module OpenAI class FinishReason < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + STOP = :stop LENGTH = :length CONTENT_FILTER = :content_filter - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Logprobs < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 4879178d..f2ca35dd 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -340,6 +340,8 @@ module OpenAI class Model < OpenAI::Union abstract! + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our @@ -348,21 +350,11 @@ module OpenAI class Preset < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" DAVINCI_002 = :"davinci-002" BABBAGE_002 = :"babbage-002" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - - class << self - sig { override.returns([String, Symbol]) } - def variants - end end end @@ -375,17 +367,15 @@ module OpenAI class Prompt < OpenAI::Union abstract! + Variants = type_template(:out) do + {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} + end + StringArray = T.type_alias { T::Array[String] } IntegerArray = T.type_alias { T::Array[Integer] } ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } - - class << self - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def variants - end - end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -393,13 +383,9 @@ module OpenAI class Stop < OpenAI::Union abstract! - StringArray = T.type_alias { T::Array[String] } + Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } - class << self - sig { override.returns([String, T::Array[String]]) } - def variants - end - end + StringArray = T.type_alias { T::Array[String] } end end end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index c7164169..6722f10d 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -42,25 +42,17 @@ module OpenAI class Filter < OpenAI::Union abstract! - class << self - sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, T.anything)} } end # Type of operation: `and` or `or`. class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AND = :and OR = :or - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index a8aa19ad..a9a37c56 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -110,17 +110,15 @@ module OpenAI class Input < OpenAI::Union abstract! + Variants = type_template(:out) do + {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} + end + StringArray = T.type_alias { T::Array[String] } IntegerArray = T.type_alias { T::Array[Integer] } ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } - - class << self - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def variants - end - end end # ID of the model to use. You can use the @@ -131,11 +129,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format to return the embeddings in. Can be either `float` or @@ -143,14 +137,10 @@ module OpenAI class EncodingFormat < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + FLOAT = :float BASE64 = :base64 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 161fb296..1bd2eac2 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -5,15 +5,11 @@ module OpenAI class EmbeddingModel < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index 3aacf6c7..93972f9b 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -6,15 +6,10 @@ module OpenAI class FileChunkingStrategy < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + } end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 4a8433cd..aeffdf61 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -7,15 +7,13 @@ module OpenAI class FileChunkingStrategyParam < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Models::StaticFileChunkingStrategyObjectParam + ) + } end end end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index e5f795dc..48b8106e 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -80,14 +80,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 6659dc79..b31df148 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -141,6 +141,8 @@ module OpenAI class Purpose < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASSISTANTS = :assistants ASSISTANTS_OUTPUT = :assistants_output BATCH = :batch @@ -148,12 +150,6 @@ module OpenAI FINE_TUNE = :"fine-tune" FINE_TUNE_RESULTS = :"fine-tune-results" VISION = :vision - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # Deprecated. The current status of the file, which can be either `uploaded`, @@ -161,15 +157,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + UPLOADED = :uploaded PROCESSED = :processed ERROR = :error - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index edc943c5..07bd11c5 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -9,18 +9,14 @@ module OpenAI class FilePurpose < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASSISTANTS = :assistants BATCH = :batch FINE_TUNE = :"fine-tune" VISION = :vision USER_DATA = :user_data EVALS = :evals - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 8d5f40e6..2de2b251 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -386,11 +386,7 @@ module OpenAI class BatchSize < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -398,11 +394,7 @@ module OpenAI class LearningRateMultiplier < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -410,11 +402,7 @@ module OpenAI class NEpochs < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end end @@ -423,18 +411,14 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + VALIDATING_FILES = :validating_files QUEUED = :queued RUNNING = :running SUCCEEDED = :succeeded FAILED = :failed CANCELLED = :cancelled - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Method < OpenAI::BaseModel @@ -597,11 +581,7 @@ module OpenAI class BatchSize < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight @@ -609,11 +589,7 @@ module OpenAI class Beta < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -621,11 +597,7 @@ module OpenAI class LearningRateMultiplier < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -633,11 +605,7 @@ module OpenAI class NEpochs < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end end end @@ -731,11 +699,7 @@ module OpenAI class BatchSize < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -743,11 +707,7 @@ module OpenAI class LearningRateMultiplier < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -755,11 +715,7 @@ module OpenAI class NEpochs < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end end end @@ -768,14 +724,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + SUPERVISED = :supervised DPO = :dpo - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 3f5027cb..b1e399d7 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -106,29 +106,21 @@ module OpenAI class Level < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + INFO = :info WARN = :warn ERROR = :error - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The type of event. class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MESSAGE = :message METRICS = :metrics - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 1878af29..1af7bd79 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -191,27 +191,19 @@ module OpenAI class Model < OpenAI::Union abstract! + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). class Preset < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + BABBAGE_002 = :"babbage-002" DAVINCI_002 = :"davinci-002" GPT_3_5_TURBO = :"gpt-3.5-turbo" GPT_4O_MINI = :"gpt-4o-mini" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - - class << self - sig { override.returns([String, Symbol]) } - def variants - end end end @@ -277,11 +269,7 @@ module OpenAI class BatchSize < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -289,11 +277,7 @@ module OpenAI class LearningRateMultiplier < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -301,11 +285,7 @@ module OpenAI class NEpochs < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end end @@ -578,11 +558,7 @@ module OpenAI class BatchSize < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight @@ -590,11 +566,7 @@ module OpenAI class Beta < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -602,11 +574,7 @@ module OpenAI class LearningRateMultiplier < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -614,11 +582,7 @@ module OpenAI class NEpochs < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end end end @@ -714,11 +678,7 @@ module OpenAI class BatchSize < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -726,11 +686,7 @@ module OpenAI class LearningRateMultiplier < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Float]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -738,11 +694,7 @@ module OpenAI class NEpochs < OpenAI::Union abstract! - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end end end @@ -751,14 +703,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + SUPERVISED = :supervised DPO = :dpo - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 74514824..285e3c70 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -105,11 +105,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format in which the generated images are returned. Must be one of `url` or @@ -118,14 +114,10 @@ module OpenAI class ResponseFormat < OpenAI::Enum abstract! - URL = T.let(:url, T.nilable(Symbol)) - B64_JSON = T.let(:b64_json, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + URL = :url + B64_JSON = :b64_json end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -133,15 +125,11 @@ module OpenAI class Size < OpenAI::Enum abstract! - NUMBER_256X256 = T.let(:"256x256", T.nilable(Symbol)) - NUMBER_512X512 = T.let(:"512x512", T.nilable(Symbol)) - NUMBER_1024X1024 = T.let(:"1024x1024", T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + NUMBER_256X256 = :"256x256" + NUMBER_512X512 = :"512x512" + NUMBER_1024X1024 = :"1024x1024" end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index c63db2d5..b672e912 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -139,11 +139,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format in which the generated images are returned. Must be one of `url` or @@ -152,14 +148,10 @@ module OpenAI class ResponseFormat < OpenAI::Enum abstract! - URL = T.let(:url, T.nilable(Symbol)) - B64_JSON = T.let(:b64_json, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + URL = :url + B64_JSON = :b64_json end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -167,15 +159,11 @@ module OpenAI class Size < OpenAI::Enum abstract! - NUMBER_256X256 = T.let(:"256x256", T.nilable(Symbol)) - NUMBER_512X512 = T.let(:"512x512", T.nilable(Symbol)) - NUMBER_1024X1024 = T.let(:"1024x1024", T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + NUMBER_256X256 = :"256x256" + NUMBER_512X512 = :"512x512" + NUMBER_1024X1024 = :"1024x1024" end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index e2d37e95..7fccc54c 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -141,11 +141,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The quality of the image that will be generated. `hd` creates images with finer @@ -154,14 +150,10 @@ module OpenAI class Quality < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + STANDARD = :standard HD = :hd - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The format in which the generated images are returned. Must be one of `url` or @@ -170,14 +162,10 @@ module OpenAI class ResponseFormat < OpenAI::Enum abstract! - URL = T.let(:url, T.nilable(Symbol)) - B64_JSON = T.let(:b64_json, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + URL = :url + B64_JSON = :b64_json end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -186,17 +174,13 @@ module OpenAI class Size < OpenAI::Enum abstract! - NUMBER_256X256 = T.let(:"256x256", T.nilable(Symbol)) - NUMBER_512X512 = T.let(:"512x512", T.nilable(Symbol)) - NUMBER_1024X1024 = T.let(:"1024x1024", T.nilable(Symbol)) - NUMBER_1792X1024 = T.let(:"1792x1024", T.nilable(Symbol)) - NUMBER_1024X1792 = T.let(:"1024x1792", T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + NUMBER_256X256 = :"256x256" + NUMBER_512X512 = :"512x512" + NUMBER_1024X1024 = :"1024x1024" + NUMBER_1792X1024 = :"1792x1024" + NUMBER_1024X1792 = :"1024x1792" end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -206,14 +190,10 @@ module OpenAI class Style < OpenAI::Enum abstract! - VIVID = T.let(:vivid, T.nilable(Symbol)) - NATURAL = T.let(:natural, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + VIVID = :vivid + NATURAL = :natural end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 08c7dccd..6716a390 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -5,14 +5,10 @@ module OpenAI class ImageModel < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 672182dd..c430dd8a 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -443,163 +443,111 @@ module OpenAI class Harassment < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class HarassmentThreatening < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class Hate < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class HateThreatening < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class Illicit < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class IllicitViolent < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class SelfHarm < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text IMAGE = :image - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class SelfHarmInstruction < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text IMAGE = :image - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class SelfHarmIntent < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text IMAGE = :image - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Sexual < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text IMAGE = :image - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class SexualMinor < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end class Violence < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text IMAGE = :image - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class ViolenceGraphic < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + TEXT = :text IMAGE = :image - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 7f38dcfc..e00a5df9 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -88,20 +88,19 @@ module OpenAI class Input < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + String, + T::Array[String], + T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + ) + } + end + StringArray = T.type_alias { T::Array[String] } ModerationMultiModalInputArray = T.type_alias { T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] } - - class << self - sig do - override - .returns( - [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] - ) - end - def variants - end - end end # The content moderation model you would like to use. Learn more in @@ -111,11 +110,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index e3a00bac..a34a1f36 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -5,16 +5,12 @@ module OpenAI class ModerationModel < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + OMNI_MODERATION_LATEST = :"omni-moderation-latest" OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST = :"text-moderation-latest" TEXT_MODERATION_STABLE = :"text-moderation-stable" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 8228a40a..a24328fc 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -6,11 +6,7 @@ module OpenAI class ModerationMultiModalInput < OpenAI::Union abstract! - class << self - sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } end end end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 3c5fb130..e6f69e82 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -50,14 +50,10 @@ module OpenAI class GenerateSummary < OpenAI::Enum abstract! - CONCISE = T.let(:concise, T.nilable(Symbol)) - DETAILED = T.let(:detailed, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + CONCISE = :concise + DETAILED = :detailed end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 8f9c3bc8..dcca18c9 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -11,15 +11,11 @@ module OpenAI class ReasoningEffort < OpenAI::Enum abstract! - LOW = T.let(:low, T.nilable(Symbol)) - MEDIUM = T.let(:medium, T.nilable(Symbol)) - HIGH = T.let(:high, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + LOW = :low + MEDIUM = :medium + HIGH = :high end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index d37038e7..40b9918f 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -59,16 +59,12 @@ module OpenAI class Environment < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MAC = :mac WINDOWS = :windows UBUNTU = :ubuntu BROWSER = :browser - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 4425bbe5..990b3527 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -66,11 +66,7 @@ module OpenAI class Content < OpenAI::Union abstract! - class << self - sig { override.returns([String, OpenAI::Models::Responses::ResponseInputMessageContentList]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)} } end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -78,29 +74,21 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user ASSISTANT = :assistant SYSTEM = :system DEVELOPER = :developer - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The type of the message input. Always `message`. class Type < OpenAI::Enum abstract! - MESSAGE = :message + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + MESSAGE = :message end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index d84172d8..3a6bd9db 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -91,11 +91,7 @@ module OpenAI class Filters < OpenAI::Union abstract! - class << self - sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel @@ -132,14 +128,10 @@ module OpenAI class Ranker < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 77dd539f..1162dc6a 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -82,14 +82,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 3f36d9be..b7062e57 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -495,14 +495,10 @@ module OpenAI class Reason < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -514,11 +510,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # How the model should select which tool (or tools) to use when generating a @@ -527,15 +519,10 @@ module OpenAI class ToolChoice < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + } end end @@ -549,14 +536,10 @@ module OpenAI class Truncation < OpenAI::Enum abstract! - AUTO = T.let(:auto, T.nilable(Symbol)) - DISABLED = T.let(:disabled, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + AUTO = :auto + DISABLED = :disabled end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 885d55f7..b68befef 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -118,6 +118,15 @@ module OpenAI class Result < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) + } + end + class Logs < OpenAI::BaseModel # The logs of the code interpreter tool call. sig { returns(String) } @@ -219,32 +228,17 @@ module OpenAI end end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] - ) - end - def variants - end - end end # The status of the code interpreter tool call. class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress INTERPRETING = :interpreting COMPLETED = :completed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 39a9b9ae..bc8782dc 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -160,6 +160,22 @@ module OpenAI class Action < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait + ) + } + end + class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -213,17 +229,13 @@ module OpenAI class Button < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + LEFT = :left RIGHT = :right WHEEL = :wheel BACK = :back FORWARD = :forward - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -550,17 +562,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - ) - end - def variants - end - end end class PendingSafetyCheck < OpenAI::BaseModel @@ -606,28 +607,20 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The type of the computer call. Always `computer_call`. class Type < OpenAI::Enum abstract! - COMPUTER_CALL = :computer_call + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + COMPUTER_CALL = :computer_call end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 0c788ee4..58f3f635 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -7,15 +7,16 @@ module OpenAI class ResponseContent < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile, + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Models::Responses::ResponseOutputRefusal + ) + } end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 1ecd338c..ca3acd5c 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -93,13 +93,10 @@ module OpenAI class Part < OpenAI::Union abstract! - class << self - sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + } end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 415388dc..65977cbe 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -93,13 +93,10 @@ module OpenAI class Part < OpenAI::Union abstract! - class << self - sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + } end end end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 26531f0e..f7396668 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -382,11 +382,7 @@ module OpenAI class Input < OpenAI::Union abstract! - class << self - sig { override.returns([String, OpenAI::Models::Responses::ResponseInput]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInput)} } end # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a @@ -397,11 +393,7 @@ module OpenAI class Model < OpenAI::Union abstract! - class << self - sig { override.returns([String, Symbol]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # How the model should select which tool (or tools) to use when generating a @@ -410,15 +402,10 @@ module OpenAI class ToolChoice < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + } end end @@ -432,14 +419,10 @@ module OpenAI class Truncation < OpenAI::Enum abstract! - AUTO = T.let(:auto, T.nilable(Symbol)) - DISABLED = T.let(:disabled, T.nilable(Symbol)) + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + AUTO = :auto + DISABLED = :disabled end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index f6a6c36d..7f9b2db2 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -35,6 +35,8 @@ module OpenAI class Code < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt @@ -53,12 +55,6 @@ module OpenAI EMPTY_IMAGE_FILE = :empty_image_file FAILED_TO_DOWNLOAD_IMAGE = :failed_to_download_image IMAGE_FILE_NOT_FOUND = :image_file_not_found - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 11cdc45f..17a4bdb2 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -89,17 +89,13 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed INCOMPLETE = :incomplete FAILED = :failed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Result < OpenAI::BaseModel @@ -186,11 +182,7 @@ module OpenAI class Attribute < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end end end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 2d40704e..f3595c51 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -19,15 +19,14 @@ module OpenAI class ResponseFormatTextConfig < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::ResponseFormatText, + OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::Models::ResponseFormatJSONObject + ) + } end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 49e2b6d9..a92d38ed 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -88,15 +88,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index daf897ed..fa36c718 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -46,16 +46,12 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed FAILED = :failed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index f36ae216..363cdad4 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -14,15 +14,11 @@ module OpenAI class ResponseIncludable < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index 4a8628f6..f49d5163 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -44,14 +44,10 @@ module OpenAI class Format < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + MP3 = :mp3 WAV = :wav - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index c64e9858..957c8020 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -7,15 +7,14 @@ module OpenAI class ResponseInputContent < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + } end end end diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 2e1819a4..f4e450ae 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -69,15 +69,11 @@ module OpenAI class Detail < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + HIGH = :high LOW = :low AUTO = :auto - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index d0a5fdcc..fc0ede1f 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -11,6 +11,24 @@ module OpenAI class ResponseInputItem < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + } + end + class Message < OpenAI::BaseModel # A list of one or many input items to the model, containing different content # types. @@ -81,15 +99,11 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user SYSTEM = :system DEVELOPER = :developer - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -97,28 +111,20 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The type of the message input. Always set to `message`. class Type < OpenAI::Enum abstract! - MESSAGE = :message + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + MESSAGE = :message end end @@ -308,15 +314,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -392,15 +394,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -432,17 +430,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 58529788..f6e9982c 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -150,6 +150,21 @@ module OpenAI class Data < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + ) + } + end + class Message < OpenAI::BaseModel # The unique ID of the message input. sig { returns(String) } @@ -233,15 +248,11 @@ module OpenAI class Role < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + USER = :user SYSTEM = :system DEVELOPER = :developer - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -249,28 +260,20 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # The type of the message input. Always set to `message`. class Type < OpenAI::Enum abstract! - MESSAGE = :message + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + MESSAGE = :message end end @@ -459,15 +462,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -541,26 +540,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end - end - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseItemList::Data::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput] - ) - end - def variants end end end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index 85e300c7..75949da7 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -7,15 +7,17 @@ module OpenAI class ResponseOutputItem < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseReasoningItem + ) + } end end end diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 54501157..7fb7026c 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -94,13 +94,10 @@ module OpenAI class Content < OpenAI::Union abstract! - class << self - sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + } end end @@ -109,15 +106,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index a7745ea7..e04d3988 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -101,6 +101,16 @@ module OpenAI class Annotation < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + ) + } + end + class FileCitation < OpenAI::BaseModel # The ID of the file. sig { returns(String) } @@ -245,17 +255,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 9207ae25..66b69b58 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -105,15 +105,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index 9fcb3ab3..f6a3f6ce 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -8,16 +8,12 @@ module OpenAI class ResponseStatus < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + COMPLETED = :completed FAILED = :failed IN_PROGRESS = :in_progress INCOMPLETE = :incomplete - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index 26cf28f5..0b1bb12f 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -7,15 +7,43 @@ module OpenAI class ResponseStreamEvent < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseAudioDeltaEvent, + OpenAI::Models::Responses::ResponseAudioDoneEvent, + OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Models::Responses::ResponseCompletedEvent, + OpenAI::Models::Responses::ResponseContentPartAddedEvent, + OpenAI::Models::Responses::ResponseContentPartDoneEvent, + OpenAI::Models::Responses::ResponseCreatedEvent, + OpenAI::Models::Responses::ResponseErrorEvent, + OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Models::Responses::ResponseInProgressEvent, + OpenAI::Models::Responses::ResponseFailedEvent, + OpenAI::Models::Responses::ResponseIncompleteEvent, + OpenAI::Models::Responses::ResponseOutputItemAddedEvent, + OpenAI::Models::Responses::ResponseOutputItemDoneEvent, + OpenAI::Models::Responses::ResponseRefusalDeltaEvent, + OpenAI::Models::Responses::ResponseRefusalDoneEvent, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Models::Responses::ResponseTextDeltaEvent, + OpenAI::Models::Responses::ResponseTextDoneEvent, + OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent + ) + } end end end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 8208d88f..00dfd4c2 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -131,6 +131,16 @@ module OpenAI class Annotation < OpenAI::Union abstract! + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + ) + } + end + class FileCitation < OpenAI::BaseModel # The ID of the file. sig { returns(String) } @@ -275,17 +285,6 @@ module OpenAI def to_hash end end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - ) - end - def variants - end - end end end end diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 92e9d728..e1477b05 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -9,15 +9,15 @@ module OpenAI class Tool < OpenAI::Union abstract! - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] - ) - end - def variants - end + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) + } end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index f2fdff28..c047abb7 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -14,15 +14,11 @@ module OpenAI class ToolChoiceOptions < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + NONE = :none AUTO = :auto REQUIRED = :required - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 7f039150..8459293b 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -41,16 +41,12 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + FILE_SEARCH = :file_search WEB_SEARCH_PREVIEW = :web_search_preview COMPUTER_USE_PREVIEW = :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 71b37e66..cf2fb2f6 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -71,14 +71,10 @@ module OpenAI class Type < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # High level guidance for the amount of context window space to use for the @@ -86,15 +82,11 @@ module OpenAI class SearchContextSize < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + LOW = :low MEDIUM = :medium HIGH = :high - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 6d1b30b0..1da23f04 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -127,16 +127,12 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + PENDING = :pending COMPLETED = :completed CANCELLED = :cancelled EXPIRED = :expired - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index bf4bb14f..642c27bf 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -241,15 +241,11 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + EXPIRED = :expired IN_PROGRESS = :in_progress COMPLETED = :completed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class ExpiresAfter < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 91af6210..5749ce8a 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -83,14 +83,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 987867c8..91a2ec22 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -92,24 +92,16 @@ module OpenAI class Query < OpenAI::Union abstract! - StringArray = T.type_alias { T::Array[String] } + Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } - class << self - sig { override.returns([String, T::Array[String]]) } - def variants - end - end + StringArray = T.type_alias { T::Array[String] } end # A filter to apply based on file attributes. class Filters < OpenAI::Union abstract! - class << self - sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel @@ -141,14 +133,10 @@ module OpenAI class Ranker < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index f430c4b3..a2443437 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -89,11 +89,7 @@ module OpenAI class Attribute < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end class Content < OpenAI::BaseModel @@ -127,13 +123,9 @@ module OpenAI class Type < OpenAI::Enum abstract! - TEXT = :text + Value = type_template(:out) { {fixed: Symbol} } - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end + TEXT = :text end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 0553e577..4be0dcb6 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -101,11 +101,7 @@ module OpenAI class Attribute < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index c1cde336..c7da2f41 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -104,16 +104,12 @@ module OpenAI class Filter < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed CANCELLED = :cancelled - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -121,14 +117,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 139b8f61..57e35c52 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -101,11 +101,7 @@ module OpenAI class Attribute < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end end end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 6613871b..0f62a65d 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -94,16 +94,12 @@ module OpenAI class Filter < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed CANCELLED = :cancelled - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -111,14 +107,10 @@ module OpenAI class Order < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + ASC = :asc DESC = :desc - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index ef0fccc3..e3693815 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -58,11 +58,7 @@ module OpenAI class Attribute < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 2e8a9c1a..0d906238 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -197,15 +197,11 @@ module OpenAI class Code < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + SERVER_ERROR = :server_error UNSUPPORTED_FILE = :unsupported_file INVALID_FILE = :invalid_file - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end @@ -215,26 +211,18 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled FAILED = :failed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end class Attribute < OpenAI::Union abstract! - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end - end + Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 497044cd..3ee9a72d 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -176,16 +176,12 @@ module OpenAI class Status < OpenAI::Enum abstract! + Value = type_template(:out) { {fixed: Symbol} } + IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled FAILED = :failed - - class << self - sig { override.returns(T::Array[Symbol]) } - def values - end - end end end end diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index 3da7b96f..857d4573 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -118,7 +118,7 @@ module OpenAI ) -> ([true, top, nil] | [false, bool, Integer]) end - class ArrayOf + class ArrayOf[Elem] include OpenAI::Converter def ===: (top other) -> bool @@ -133,7 +133,7 @@ module OpenAI top value ) -> ([true, top, nil] | [false, bool, Integer]) - def item_type: -> OpenAI::Converter::input + def item_type: -> Elem def initialize: ( ::Hash[Symbol, top] @@ -143,7 +143,7 @@ module OpenAI ) -> void end - class HashOf + class HashOf[Elem] include OpenAI::Converter def ===: (top other) -> bool @@ -158,7 +158,7 @@ module OpenAI top value ) -> ([true, top, nil] | [false, bool, Integer]) - def item_type: -> OpenAI::Converter::input + def item_type: -> Elem def initialize: ( ::Hash[Symbol, top] From 3b68b30c99bca011baf463085218c1e105d269dd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 03:24:18 +0000 Subject: [PATCH 019/295] chore: sdk client internal refactoring (#21) --- lib/openai/base_client.rb | 47 +++++++++++++------------ lib/openai/cursor_page.rb | 3 +- lib/openai/pooled_net_requester.rb | 19 ++++++---- lib/openai/util.rb | 4 ++- rbi/lib/openai/base_client.rbi | 10 ++++++ rbi/lib/openai/base_model.rbi | 17 +++------ rbi/lib/openai/pooled_net_requester.rbi | 6 ++-- rbi/lib/openai/util.rbi | 8 ++--- sig/openai/base_client.rbs | 5 +++ sig/openai/base_model.rbs | 2 +- sig/openai/pooled_net_requester.rbs | 9 +++-- sig/openai/util.rbs | 4 +-- test/openai/client_test.rb | 29 ++------------- test/openai/util_test.rb | 4 +++ 14 files changed, 83 insertions(+), 84 deletions(-) diff --git a/lib/openai/base_client.rb b/lib/openai/base_client.rb index 5c609ac7..b5cc5490 100644 --- a/lib/openai/base_client.rb +++ b/lib/openai/base_client.rb @@ -124,6 +124,20 @@ def follow_redirect(request, status:, response_headers:) request end + + # @api private + # + # @param status [Integer, OpenAI::APIConnectionError] + # @param stream [Enumerable, nil] + def reap_connection!(status, stream:) + case status + in (..199) | (300..499) + stream&.each { next } + in OpenAI::APIConnectionError | (500..) + OpenAI::Util.close_fused!(stream) + else + end + end end # @api private @@ -321,28 +335,23 @@ def initialize( end begin - response, stream = @requester.execute(input) - status = Integer(response.code) + status, response, stream = @requester.execute(input) rescue OpenAI::APIConnectionError => e status = e end - # normally we want to drain the response body and reuse the HTTP session by clearing the socket buffers - # unless we hit a server error - srv_fault = (500...).include?(status) - case status in ..299 [status, response, stream] in 300..399 if redirect_count >= self.class::MAX_REDIRECTS - message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." + self.class.reap_connection!(status, stream: stream) - stream.each { next } + message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." raise OpenAI::APIConnectionError.new(url: url, message: message) in 300..399 - request = self.class.follow_redirect(request, status: status, response_headers: response) + self.class.reap_connection!(status, stream: stream) - stream.each { next } + request = self.class.follow_redirect(request, status: status, response_headers: response) send_request( request, redirect_count: redirect_count + 1, @@ -352,12 +361,10 @@ def initialize( in OpenAI::APIConnectionError if retry_count >= max_retries raise status in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response) - decoded = OpenAI::Util.decode_content(response, stream: stream, suppress_error: true) - - if srv_fault - OpenAI::Util.close_fused!(stream) - else - stream.each { next } + decoded = Kernel.then do + OpenAI::Util.decode_content(response, stream: stream, suppress_error: true) + ensure + self.class.reap_connection!(status, stream: stream) end raise OpenAI::APIStatusError.for( @@ -368,13 +375,9 @@ def initialize( response: response ) in (400..) | OpenAI::APIConnectionError - delay = retry_delay(response, retry_count: retry_count) + self.class.reap_connection!(status, stream: stream) - if srv_fault - OpenAI::Util.close_fused!(stream) - else - stream&.each { next } - end + delay = retry_delay(response, retry_count: retry_count) sleep(delay) send_request( diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 92a6b024..35b585e9 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -68,7 +68,8 @@ def next_page? # @return [OpenAI::CursorPage] def next_page unless next_page? - raise RuntimeError.new("No more pages available. Please check #next_page? before calling ##{__method__}") + message = "No more pages available. Please check #next_page? before calling ##{__method__}" + raise RuntimeError.new(message) end req = OpenAI::Util.deep_merge(@req, {query: {after: data&.last&.id}}) diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb index 0e16cc88..4b0ae742 100644 --- a/lib/openai/pooled_net_requester.rb +++ b/lib/openai/pooled_net_requester.rb @@ -61,6 +61,7 @@ def build_request(request, &) case body in nil + nil in String req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) @@ -79,9 +80,11 @@ def build_request(request, &) # @api private # # @param url [URI::Generic] + # @param deadline [Float] # @param blk [Proc] - private def with_pool(url, &) + private def with_pool(url, deadline:, &blk) origin = OpenAI::Util.uri_origin(url) + timeout = deadline - OpenAI::Util.monotonic_secs pool = @mutex.synchronize do @pools[origin] ||= ConnectionPool.new(size: @size) do @@ -89,7 +92,7 @@ def build_request(request, &) end end - pool.with(&) + pool.with(timeout: timeout, &blk) end # @api private @@ -106,14 +109,14 @@ def build_request(request, &) # # @option request [Float] :deadline # - # @return [Array(Net::HTTPResponse, Enumerable)] + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] def execute(request) url, deadline = request.fetch_values(:url, :deadline) eof = false finished = false enum = Enumerator.new do |y| - with_pool(url) do |conn| + with_pool(url, deadline: deadline) do |conn| next if finished req = self.class.build_request(request) do @@ -125,7 +128,7 @@ def execute(request) self.class.calibrate_socket_timeout(conn, deadline) conn.request(req) do |rsp| - y << [conn, rsp] + y << [conn, req, rsp] break if finished rsp.read_body do |bytes| @@ -137,9 +140,11 @@ def execute(request) eof = true end end + rescue Timeout::Error + raise OpenAI::APITimeoutError end - conn, response = enum.next + conn, _, response = enum.next body = OpenAI::Util.fused_enum(enum, external: true) do finished = true tap do @@ -149,7 +154,7 @@ def execute(request) end conn.finish if !eof && conn&.started? end - [response, (response.body = body)] + [Integer(response.code), response, (response.body = body)] end # @api private diff --git a/lib/openai/util.rb b/lib/openai/util.rb index 2f4fecde..ef208d81 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -57,7 +57,7 @@ class << self # # @param input [Object] # - # @return [Boolean, Object] + # @return [Boolean] def primitive?(input) case input in true | false | Integer | Float | Symbol | String @@ -627,6 +627,8 @@ def close_fused!(enum) # # @param enum [Enumerable, nil] # @param blk [Proc] + # + # @return [Enumerable] def chain_fused(enum, &blk) iter = Enumerator.new { blk.call(_1) } fused_enum(iter) { close_fused!(enum) } diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index 85abd5cb..6b76b254 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -67,6 +67,16 @@ module OpenAI end def follow_redirect(request, status:, response_headers:) end + + # @api private + sig do + params( + status: T.any(Integer, OpenAI::APIConnectionError), + stream: T.nilable(T::Enumerable[String]) + ).void + end + def reap_connection!(status, stream:) + end end sig { returns(T.anything) } diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 3a7913f6..06bae256 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -228,7 +228,7 @@ module OpenAI # @api private # # All of the specified variant info for this union. - sig { returns(T::Array[[T.nilable(Symbol), Proc]]) } + sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(Variants)]]) } private def known_variants end @@ -250,17 +250,8 @@ module OpenAI # @api private sig do params( - key: T.any( - Symbol, - T::Hash[Symbol, T.anything], - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: T.any( - T::Hash[Symbol, T.anything], - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ) + key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants), + spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants) ) .void end @@ -268,7 +259,7 @@ module OpenAI end # @api private - sig { params(value: T.anything).returns(T.nilable(OpenAI::Converter::Input)) } + sig { params(value: T.anything).returns(T.nilable(Variants)) } private def resolve_variant(value) end end diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi index e940c4f4..9297bdea 100644 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ b/rbi/lib/openai/pooled_net_requester.rbi @@ -27,14 +27,14 @@ module OpenAI end # @api private - sig { params(url: URI::Generic, blk: T.proc.params(arg0: Net::HTTP).void).void } - private def with_pool(url, &blk) + sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } + private def with_pool(url, deadline:, &blk) end # @api private sig do params(request: OpenAI::PooledNetRequester::RequestShape) - .returns([Net::HTTPResponse, T::Enumerable[String]]) + .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) end def execute(request) end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 8faebf37..22824de5 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -22,7 +22,7 @@ module OpenAI class << self # @api private - sig { params(input: T.anything).returns(T.any(T::Boolean, T.anything)) } + sig { params(input: T.anything).returns(T::Boolean) } def primitive?(input) end @@ -239,10 +239,8 @@ module OpenAI # @api private sig do - params( - enum: T.nilable(T::Enumerable[T.anything]), - blk: T.proc.params(arg0: Enumerator::Yielder).void - ).void + params(enum: T.nilable(T::Enumerable[T.anything]), blk: T.proc.params(arg0: Enumerator::Yielder).void) + .returns(T::Enumerable[T.anything]) end def chain_fused(enum, &blk) end diff --git a/sig/openai/base_client.rbs b/sig/openai/base_client.rbs index d685733f..0c19b54e 100644 --- a/sig/openai/base_client.rbs +++ b/sig/openai/base_client.rbs @@ -43,6 +43,11 @@ module OpenAI response_headers: ::Hash[String, String] ) -> OpenAI::BaseClient::request_input + def self.reap_connection!: ( + Integer | OpenAI::APIConnectionError status, + stream: Enumerable[String]? + ) -> void + # @api private attr_accessor requester: top diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index 857d4573..574847b4 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -85,7 +85,7 @@ module OpenAI class Union extend OpenAI::Converter - private def self.known_variants: -> ::Array[[Symbol?, Proc]] + private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Converter::input)]] def self.derefed_variants: -> ::Array[[Symbol?, top]] diff --git a/sig/openai/pooled_net_requester.rbs b/sig/openai/pooled_net_requester.rbs index 9e7daafb..c9f6520d 100644 --- a/sig/openai/pooled_net_requester.rbs +++ b/sig/openai/pooled_net_requester.rbs @@ -19,11 +19,16 @@ module OpenAI (String arg0) -> void } -> top - private def with_pool: (URI::Generic url) { (top arg0) -> void } -> void + private def with_pool: ( + URI::Generic url, + deadline: Float + ) { + (top arg0) -> void + } -> void def execute: ( OpenAI::PooledNetRequester::request request - ) -> [top, Enumerable[String]] + ) -> [Integer, top, Enumerable[String]] def initialize: (size: Integer) -> void end diff --git a/sig/openai/util.rbs b/sig/openai/util.rbs index 065ab7d1..375f8324 100644 --- a/sig/openai/util.rbs +++ b/sig/openai/util.rbs @@ -6,7 +6,7 @@ module OpenAI def self?.os: -> String - def self?.primitive?: (top input) -> (bool | top) + def self?.primitive?: (top input) -> bool def self?.coerce_boolean: (top input) -> (bool | top) @@ -118,7 +118,7 @@ module OpenAI Enumerable[top]? enum ) { (Enumerator::Yielder arg0) -> void - } -> void + } -> Enumerable[top] type server_sent_event = { event: String?, data: String?, id: String?, retry: Integer? } diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 4147e2ac..7d0758fa 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -18,32 +18,6 @@ def test_raises_on_missing_non_nullable_opts assert_match(/is required/, e.message) end - class MockResponse - # @return [Integer] - attr_reader :code - - # @param code [Integer] - # @param headers [Hash{String=>String}] - def initialize(code, headers) - @code = code - @headers = {"content-type" => "application/json", **headers} - end - - # @param header [String] - # - # @return [String, nil] - def [](header) - @headers[header] - end - - # @param header [String] - # - # @return [Boolean] - def key?(header) - @headers.key?(header) - end - end - class MockRequester # @return [Integer] attr_reader :response_code @@ -71,7 +45,8 @@ def initialize(response_code, response_headers, response_data) def execute(req) # Deep copy the request because it is mutated on each retry. attempts.push(Marshal.load(Marshal.dump(req))) - [MockResponse.new(response_code, response_headers), response_data.grapheme_clusters] + headers = {"content-type" => "application/json", **response_headers} + [response_code, headers, response_data.grapheme_clusters] end end diff --git a/test/openai/util_test.rb b/test/openai/util_test.rb index d319e2f9..476e16af 100644 --- a/test/openai/util_test.rb +++ b/test/openai/util_test.rb @@ -161,7 +161,9 @@ class OpenAI::Test::UtilFormDataEncodingTest < Minitest::Test class FakeCGI < CGI def initialize(headers, io) @ctype = headers["content-type"] + # rubocop:disable Lint/EmptyBlock @io = OpenAI::Util::ReadIOAdapter.new(io) {} + # rubocop:enable Lint/EmptyBlock @c_len = io.to_a.join.bytesize.to_s super() end @@ -217,7 +219,9 @@ def test_copy_read } cases.each do |input, expected| io = StringIO.new + # rubocop:disable Lint/EmptyBlock adapter = OpenAI::Util::ReadIOAdapter.new(input) {} + # rubocop:enable Lint/EmptyBlock IO.copy_stream(adapter, io) assert_equal(expected, io.string) end From a1ac1a19276cde2ed80ec752a50fb3e03b90f2a6 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 16 Mar 2025 14:29:26 +0000 Subject: [PATCH 020/295] chore: do not label modules as abstract (#22) --- .rubocop.yml | 5 +++++ lib/openai/base_model.rb | 2 -- lib/openai/base_page.rb | 2 -- lib/openai/extern.rb | 2 -- lib/openai/request_options.rb | 2 -- rbi/lib/openai/base_client.rbi | 1 + rbi/lib/openai/base_model.rbi | 13 +++++++++++-- rbi/lib/openai/base_page.rbi | 2 -- rbi/lib/openai/extern.rbi | 1 - rbi/lib/openai/pooled_net_requester.rbi | 1 + rbi/lib/openai/request_options.rbi | 2 -- rbi/lib/openai/util.rbi | 2 ++ 12 files changed, 20 insertions(+), 15 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index 17e6abbd..e0a360b6 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -58,6 +58,11 @@ Layout/MultilineMethodParameterLineBreaks: Layout/SpaceInsideHashLiteralBraces: EnforcedStyle: no_space +# This option occasionally mangles identifier names +Lint/DeprecatedConstants: + Exclude: + - "**/*.rbi" + # Fairly useful in tests for pattern assertions. Lint/EmptyInPattern: Exclude: diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 19722030..4ef0106e 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -2,8 +2,6 @@ module OpenAI # @api private - # - # @abstract module Converter # rubocop:disable Lint/UnusedMethodArgument diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index c7489dfe..c8a9058a 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -1,8 +1,6 @@ # frozen_string_literal: true module OpenAI - # @abstract - # # @example # ```ruby # if page.has_next? diff --git a/lib/openai/extern.rb b/lib/openai/extern.rb index c8e115d3..1ab41492 100644 --- a/lib/openai/extern.rb +++ b/lib/openai/extern.rb @@ -2,8 +2,6 @@ module OpenAI # @api private - # - # @abstract module Extern end end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index 405cf3c3..a3245591 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -2,8 +2,6 @@ module OpenAI # @api private - # - # @abstract module RequestParameters # @!parse # # Options to specify HTTP behaviour for this request. diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index 6b76b254..e4208ab7 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -1,6 +1,7 @@ # typed: strong module OpenAI + # @api private class BaseClient abstract! diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 06bae256..8840d3a8 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -3,8 +3,6 @@ module OpenAI # @api private module Converter - abstract! - Input = T.type_alias { T.any(OpenAI::Converter, T::Class[T.anything]) } # @api private @@ -78,6 +76,8 @@ module OpenAI end end + # @api private + # # When we don't know what to expect for the value. class Unknown extend OpenAI::Converter @@ -115,6 +115,8 @@ module OpenAI end end + # @api private + # # Ruby has no Boolean class; this is something for models to refer to. class BooleanModel extend OpenAI::Converter @@ -156,6 +158,8 @@ module OpenAI end end + # @api private + # # A value from among a specified list of options. OpenAPI enum values map to Ruby # values in the SDK as follows: # @@ -217,6 +221,7 @@ module OpenAI end end + # @api private class Union extend OpenAI::Converter @@ -294,6 +299,8 @@ module OpenAI end end + # @api private + # # Array of items of a given type. class ArrayOf include OpenAI::Converter @@ -359,6 +366,8 @@ module OpenAI end end + # @api private + # # Hash of items of a given type. class HashOf include OpenAI::Converter diff --git a/rbi/lib/openai/base_page.rbi b/rbi/lib/openai/base_page.rbi index ad3a2e19..c5dc2a2d 100644 --- a/rbi/lib/openai/base_page.rbi +++ b/rbi/lib/openai/base_page.rbi @@ -2,8 +2,6 @@ module OpenAI module BasePage - abstract! - Elem = type_member(:out) sig { overridable.returns(T::Boolean) } diff --git a/rbi/lib/openai/extern.rbi b/rbi/lib/openai/extern.rbi index b47bd767..e5e18a8d 100644 --- a/rbi/lib/openai/extern.rbi +++ b/rbi/lib/openai/extern.rbi @@ -3,6 +3,5 @@ module OpenAI # @api private module Extern - abstract! end end diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi index 9297bdea..2cdf7b6b 100644 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ b/rbi/lib/openai/pooled_net_requester.rbi @@ -1,6 +1,7 @@ # typed: strong module OpenAI + # @api private class PooledNetRequester RequestShape = T.type_alias do {method: Symbol, url: URI::Generic, headers: T::Hash[String, String], body: T.anything, deadline: Float} diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index efd2de36..ded742c2 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -3,8 +3,6 @@ module OpenAI # @api private module RequestParameters - abstract! - # Options to specify HTTP behaviour for this request. sig { returns(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) } def request_options diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 22824de5..fe12b3ff 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -160,6 +160,8 @@ module OpenAI end end + # @api private + # # An adapter that satisfies the IO interface required by `::IO.copy_stream` class ReadIOAdapter # @api private From 932e05d9d53fbd11f8e491cff0983152d4b2bf31 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 14:28:58 +0000 Subject: [PATCH 021/295] fix: enums should not unnecessarily convert non-members to symbol type (#23) --- .../models/beta/assistant_update_params.rb | 144 ++++++++++++------ lib/openai/models/completion_create_params.rb | 32 ++-- .../models/fine_tuning/job_create_params.rb | 34 ++--- lib/openai/resources/beta/assistants.rb | 2 +- lib/openai/resources/completions.rb | 4 +- lib/openai/resources/fine_tuning/jobs.rb | 2 +- .../models/beta/assistant_update_params.rbi | 42 ----- .../models/completion_create_params.rbi | 15 -- .../models/fine_tuning/job_create_params.rbi | 13 -- .../models/beta/assistant_update_params.rbs | 129 ++++++++-------- .../models/completion_create_params.rbs | 17 +-- .../models/fine_tuning/job_create_params.rbs | 23 ++- test/openai/client_test.rb | 30 ++-- test/openai/resources/audio/speech_test.rb | 2 +- test/openai/resources/beta/assistants_test.rb | 2 +- .../openai/resources/chat/completions_test.rb | 2 +- test/openai/resources/completions_test.rb | 6 +- test/openai/resources/embeddings_test.rb | 2 +- .../openai/resources/fine_tuning/jobs_test.rb | 2 +- test/openai/resources/responses_test.rb | 2 +- 20 files changed, 231 insertions(+), 274 deletions(-) diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 5a89f162..494df652 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -39,11 +39,11 @@ class AssistantUpdateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels, nil] + # @return [String, Symbol, nil] optional :model, union: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model } # @!parse - # # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels] + # # @return [String, Symbol] # attr_writer :model # @!attribute name @@ -131,7 +131,7 @@ class AssistantUpdateParams < OpenAI::BaseModel # # @param description [String, nil] # # @param instructions [String, nil] # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels] + # # @param model [String, Symbol] # # @param name [String, nil] # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] @@ -169,56 +169,106 @@ class AssistantUpdateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. class Model < OpenAI::Union + # @!group + + O3_MINI = :"o3-mini" + O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" + O1 = :o1 + O1_2024_12_17 = :"o1-2024-12-17" + GPT_4O = :"gpt-4o" + GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" + GPT_4O_MINI = :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" + GPT_4_5_PREVIEW = :"gpt-4.5-preview" + GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" + GPT_4_TURBO = :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" + GPT_4 = :"gpt-4" + GPT_4_0314 = :"gpt-4-0314" + GPT_4_0613 = :"gpt-4-0613" + GPT_4_32K = :"gpt-4-32k" + GPT_4_32K_0314 = :"gpt-4-32k-0314" + GPT_4_32K_0613 = :"gpt-4-32k-0613" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" + + # @!endgroup + variant String - # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. - variant enum: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels } + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI - # @abstract - # - # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - class AssistantSupportedModels < OpenAI::Enum - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - - finalize! - end + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O1 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O1_2024_12_17 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0314 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0613 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 + + variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 # @!parse # class << self - # # @return [Array(String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels)] + # # @return [Array(String, Symbol)] # def variants; end # end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index a65b7051..c1c44876 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -14,7 +14,7 @@ class CompletionCreateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Models::CompletionCreateParams::Model::Preset] + # @return [String, Symbol] required :model, union: -> { OpenAI::Models::CompletionCreateParams::Model } # @!attribute prompt @@ -183,7 +183,7 @@ class CompletionCreateParams < OpenAI::BaseModel # attr_writer :user # @!parse - # # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model::Preset] + # # @param model [String, Symbol] # # @param prompt [String, Array, Array, Array>, nil] # # @param best_of [Integer, nil] # # @param echo [Boolean, nil] @@ -236,29 +236,25 @@ class CompletionCreateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. class Model < OpenAI::Union + # @!group + + GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" + DAVINCI_002 = :"davinci-002" + BABBAGE_002 = :"babbage-002" + + # @!endgroup + variant String - # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. - variant enum: -> { OpenAI::Models::CompletionCreateParams::Model::Preset } + variant const: OpenAI::Models::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT - # @abstract - # - # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - class Preset < OpenAI::Enum - GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" - DAVINCI_002 = :"davinci-002" - BABBAGE_002 = :"babbage-002" + variant const: OpenAI::Models::CompletionCreateParams::Model::DAVINCI_002 - finalize! - end + variant const: OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 # @!parse # class << self - # # @return [Array(String, Symbol, OpenAI::Models::CompletionCreateParams::Model::Preset)] + # # @return [Array(String, Symbol)] # def variants; end # end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index db4866c1..6900ac01 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -12,7 +12,7 @@ class JobCreateParams < OpenAI::BaseModel # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # - # @return [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::Preset] + # @return [String, Symbol] required :model, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Model } # @!attribute training_file @@ -113,7 +113,7 @@ class JobCreateParams < OpenAI::BaseModel optional :validation_file, String, nil?: true # @!parse - # # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::Preset] + # # @param model [String, Symbol] # # @param training_file [String] # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] # # @param integrations [Array, nil] @@ -147,28 +147,28 @@ class JobCreateParams < OpenAI::BaseModel # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). class Model < OpenAI::Union + # @!group + + BABBAGE_002 = :"babbage-002" + DAVINCI_002 = :"davinci-002" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_4O_MINI = :"gpt-4o-mini" + + # @!endgroup + variant String - # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - variant enum: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::Preset } + variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::BABBAGE_002 - # @abstract - # - # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - class Preset < OpenAI::Enum - BABBAGE_002 = :"babbage-002" - DAVINCI_002 = :"davinci-002" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_4O_MINI = :"gpt-4o-mini" + variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::DAVINCI_002 - finalize! - end + variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO + + variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI # @!parse # class << self - # # @return [Array(String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::Preset)] + # # @return [Array(String, Symbol)] # def variants; end # end end diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 5911fe3f..8fbb01b9 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -125,7 +125,7 @@ def retrieve(assistant_id, params = {}) # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. # - # @option params [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::AssistantSupportedModels] :model ID of the model to use. You can use the + # @option params [String, Symbol] :model ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index a16f4bcf..79cc6805 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -7,7 +7,7 @@ class Completions # # @param params [OpenAI::Models::CompletionCreateParams, Hash{Symbol=>Object}] . # - # @option params [String, Symbol, OpenAI::Models::CompletionCreateParams::Model::Preset] :model ID of the model to use. You can use the + # @option params [String, Symbol] :model ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of @@ -129,7 +129,7 @@ def create(params) # # @param params [OpenAI::Models::CompletionCreateParams, Hash{Symbol=>Object}] . # - # @option params [String, Symbol, OpenAI::Models::CompletionCreateParams::Model::Preset] :model ID of the model to use. You can use the + # @option params [String, Symbol] :model ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 177d978b..9d00777f 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -17,7 +17,7 @@ class Jobs # # @param params [OpenAI::Models::FineTuning::JobCreateParams, Hash{Symbol=>Object}] . # - # @option params [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::Preset] :model The name of the model to fine-tune. You can select one of the + # @option params [String, Symbol] :model The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # # @option params [String] :training_file The ID of an uploaded file that contains training data. diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 44b8293f..e05cff62 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -307,48 +307,6 @@ module OpenAI abstract! Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - - # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - class AssistantSupportedModels < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - end end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index f2ca35dd..2e1185c5 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -341,21 +341,6 @@ module OpenAI abstract! Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - - # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - class Preset < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" - DAVINCI_002 = :"davinci-002" - BABBAGE_002 = :"babbage-002" - end end # The prompt(s) to generate completions for, encoded as a string, array of diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 1af7bd79..224616d6 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -192,19 +192,6 @@ module OpenAI abstract! Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - - # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - class Preset < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - BABBAGE_002 = :"babbage-002" - DAVINCI_002 = :"davinci-002" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_4O_MINI = :"gpt-4o-mini" - end end class Hyperparameters < OpenAI::BaseModel diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index f2f849d4..39a0d8e2 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -75,77 +75,68 @@ module OpenAI type model = String - | OpenAI::Models::Beta::AssistantUpdateParams::Model::assistant_supported_models + | :"o3-mini" + | :"o3-mini-2025-01-31" + | :o1 + | :"o1-2024-12-17" + | :"gpt-4o" + | :"gpt-4o-2024-11-20" + | :"gpt-4o-2024-08-06" + | :"gpt-4o-2024-05-13" + | :"gpt-4o-mini" + | :"gpt-4o-mini-2024-07-18" + | :"gpt-4.5-preview" + | :"gpt-4.5-preview-2025-02-27" + | :"gpt-4-turbo" + | :"gpt-4-turbo-2024-04-09" + | :"gpt-4-0125-preview" + | :"gpt-4-turbo-preview" + | :"gpt-4-1106-preview" + | :"gpt-4-vision-preview" + | :"gpt-4" + | :"gpt-4-0314" + | :"gpt-4-0613" + | :"gpt-4-32k" + | :"gpt-4-32k-0314" + | :"gpt-4-32k-0613" + | :"gpt-3.5-turbo" + | :"gpt-3.5-turbo-16k" + | :"gpt-3.5-turbo-0613" + | :"gpt-3.5-turbo-1106" + | :"gpt-3.5-turbo-0125" + | :"gpt-3.5-turbo-16k-0613" class Model < OpenAI::Union - type assistant_supported_models = - :"o3-mini" - | :"o3-mini-2025-01-31" - | :o1 - | :"o1-2024-12-17" - | :"gpt-4o" - | :"gpt-4o-2024-11-20" - | :"gpt-4o-2024-08-06" - | :"gpt-4o-2024-05-13" - | :"gpt-4o-mini" - | :"gpt-4o-mini-2024-07-18" - | :"gpt-4.5-preview" - | :"gpt-4.5-preview-2025-02-27" - | :"gpt-4-turbo" - | :"gpt-4-turbo-2024-04-09" - | :"gpt-4-0125-preview" - | :"gpt-4-turbo-preview" - | :"gpt-4-1106-preview" - | :"gpt-4-vision-preview" - | :"gpt-4" - | :"gpt-4-0314" - | :"gpt-4-0613" - | :"gpt-4-32k" - | :"gpt-4-32k-0314" - | :"gpt-4-32k-0613" - | :"gpt-3.5-turbo" - | :"gpt-3.5-turbo-16k" - | :"gpt-3.5-turbo-0613" - | :"gpt-3.5-turbo-1106" - | :"gpt-3.5-turbo-0125" - | :"gpt-3.5-turbo-16k-0613" - - class AssistantSupportedModels < OpenAI::Enum - O3_MINI: :"o3-mini" - O3_MINI_2025_01_31: :"o3-mini-2025-01-31" - O1: :o1 - O1_2024_12_17: :"o1-2024-12-17" - GPT_4O: :"gpt-4o" - GPT_4O_2024_11_20: :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06: :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13: :"gpt-4o-2024-05-13" - GPT_4O_MINI: :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW: :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27: :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO: :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09: :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW: :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW: :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW: :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW: :"gpt-4-vision-preview" - GPT_4: :"gpt-4" - GPT_4_0314: :"gpt-4-0314" - GPT_4_0613: :"gpt-4-0613" - GPT_4_32K: :"gpt-4-32k" - GPT_4_32K_0314: :"gpt-4-32k-0314" - GPT_4_32K_0613: :"gpt-4-32k-0613" - GPT_3_5_TURBO: :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K: :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613: :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106: :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125: :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613: :"gpt-3.5-turbo-16k-0613" - - def self.values: -> ::Array[OpenAI::Models::Beta::AssistantUpdateParams::Model::assistant_supported_models] - end - - def self.variants: -> [String, OpenAI::Models::Beta::AssistantUpdateParams::Model::assistant_supported_models] + def self.variants: -> [String, (:"o3-mini" + | :"o3-mini-2025-01-31" + | :o1 + | :"o1-2024-12-17" + | :"gpt-4o" + | :"gpt-4o-2024-11-20" + | :"gpt-4o-2024-08-06" + | :"gpt-4o-2024-05-13" + | :"gpt-4o-mini" + | :"gpt-4o-mini-2024-07-18" + | :"gpt-4.5-preview" + | :"gpt-4.5-preview-2025-02-27" + | :"gpt-4-turbo" + | :"gpt-4-turbo-2024-04-09" + | :"gpt-4-0125-preview" + | :"gpt-4-turbo-preview" + | :"gpt-4-1106-preview" + | :"gpt-4-vision-preview" + | :"gpt-4" + | :"gpt-4-0314" + | :"gpt-4-0613" + | :"gpt-4-32k" + | :"gpt-4-32k-0314" + | :"gpt-4-32k-0613" + | :"gpt-3.5-turbo" + | :"gpt-3.5-turbo-16k" + | :"gpt-3.5-turbo-0613" + | :"gpt-3.5-turbo-1106" + | :"gpt-3.5-turbo-0125" + | :"gpt-3.5-turbo-16k-0613")] end type tool_resources = diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 6f496b04..131c7bd7 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -90,21 +90,12 @@ module OpenAI def to_hash: -> OpenAI::Models::completion_create_params type model = - String | OpenAI::Models::CompletionCreateParams::Model::preset + String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" class Model < OpenAI::Union - type preset = - :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" - - class Preset < OpenAI::Enum - GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" - DAVINCI_002: :"davinci-002" - BABBAGE_002: :"babbage-002" - - def self.values: -> ::Array[OpenAI::Models::CompletionCreateParams::Model::preset] - end - - def self.variants: -> [String, OpenAI::Models::CompletionCreateParams::Model::preset] + def self.variants: -> [String, (:"gpt-3.5-turbo-instruct" + | :"davinci-002" + | :"babbage-002")] end type prompt = diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index e5712ef5..468dee15 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -66,22 +66,17 @@ module OpenAI def to_hash: -> OpenAI::Models::FineTuning::job_create_params type model = - String | OpenAI::Models::FineTuning::JobCreateParams::Model::preset + String + | :"babbage-002" + | :"davinci-002" + | :"gpt-3.5-turbo" + | :"gpt-4o-mini" class Model < OpenAI::Union - type preset = - :"babbage-002" | :"davinci-002" | :"gpt-3.5-turbo" | :"gpt-4o-mini" - - class Preset < OpenAI::Enum - BABBAGE_002: :"babbage-002" - DAVINCI_002: :"davinci-002" - GPT_3_5_TURBO: :"gpt-3.5-turbo" - GPT_4O_MINI: :"gpt-4o-mini" - - def self.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Model::preset] - end - - def self.variants: -> [String, OpenAI::Models::FineTuning::JobCreateParams::Model::preset] + def self.variants: -> [String, (:"babbage-002" + | :"davinci-002" + | :"gpt-3.5-turbo" + | :"gpt-4o-mini")] end type hyperparameters = diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 7d0758fa..9a3f400f 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -58,7 +58,7 @@ def test_client_default_request_default_retry_attempts assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) end @@ -74,7 +74,7 @@ def test_client_given_request_default_retry_attempts assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) end @@ -90,7 +90,7 @@ def test_client_default_request_given_retry_attempts assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {max_retries: 3} ) @@ -107,7 +107,7 @@ def test_client_given_request_given_retry_attempts assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {max_retries: 4} ) @@ -124,7 +124,7 @@ def test_client_retry_after_seconds assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) end @@ -142,7 +142,7 @@ def test_client_retry_after_date Thread.current.thread_variable_set(:time_now, Time.now) openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) Thread.current.thread_variable_set(:time_now, nil) @@ -160,7 +160,7 @@ def test_client_retry_after_ms assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) end @@ -177,7 +177,7 @@ def test_retry_count_header assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) end @@ -194,7 +194,7 @@ def test_omit_retry_count_header assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {extra_headers: {"x-stainless-retry-count" => nil}} ) @@ -212,7 +212,7 @@ def test_overwrite_retry_count_header assert_raises(OpenAI::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {extra_headers: {"x-stainless-retry-count" => "42"}} ) @@ -230,7 +230,7 @@ def test_client_redirect_307 assert_raises(OpenAI::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {extra_headers: {}} ) @@ -253,7 +253,7 @@ def test_client_redirect_303 assert_raises(OpenAI::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {extra_headers: {}} ) @@ -273,7 +273,7 @@ def test_client_redirect_auth_keep_same_origin assert_raises(OpenAI::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {extra_headers: {"Authorization" => "Bearer xyz"}} ) @@ -293,7 +293,7 @@ def test_client_redirect_auth_strip_cross_origin assert_raises(OpenAI::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true, request_options: {extra_headers: {"Authorization" => "Bearer xyz"}} ) @@ -308,7 +308,7 @@ def test_default_headers openai.requester = requester openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) headers = requester.attempts.first[:headers] diff --git a/test/openai/resources/audio/speech_test.rb b/test/openai/resources/audio/speech_test.rb index e2d6de20..9718aa1a 100644 --- a/test/openai/resources/audio/speech_test.rb +++ b/test/openai/resources/audio/speech_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Audio::SpeechTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.audio.speech.create(input: "input", model: "string", voice: :alloy) + response = @openai.audio.speech.create(input: "input", model: :"tts-1", voice: :alloy) assert_pattern do response => OpenAI::Unknown diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index 175fb0d5..e0ffb63a 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Beta::AssistantsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.beta.assistants.create(model: :"gpt-4o") + response = @openai.beta.assistants.create(model: :"o3-mini") assert_pattern do response => OpenAI::Models::Beta::Assistant diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index c45ef70f..4b353ac2 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -6,7 +6,7 @@ class OpenAI::Test::Resources::Chat::CompletionsTest < OpenAI::Test::ResourceTes def test_create_required_params response = @openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4o", + model: :"o3-mini", stream: true ) diff --git a/test/openai/resources/completions_test.rb b/test/openai/resources/completions_test.rb index 0ff1c63c..6fb99107 100644 --- a/test/openai/resources/completions_test.rb +++ b/test/openai/resources/completions_test.rb @@ -4,7 +4,11 @@ class OpenAI::Test::Resources::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.completions.create(model: "string", prompt: "This is a test.", stream: true) + response = @openai.completions.create( + model: :"gpt-3.5-turbo-instruct", + prompt: "This is a test.", + stream: true + ) assert_pattern do response => OpenAI::Models::Completion diff --git a/test/openai/resources/embeddings_test.rb b/test/openai/resources/embeddings_test.rb index 59218d06..244d5817 100644 --- a/test/openai/resources/embeddings_test.rb +++ b/test/openai/resources/embeddings_test.rb @@ -6,7 +6,7 @@ class OpenAI::Test::Resources::EmbeddingsTest < OpenAI::Test::ResourceTest def test_create_required_params response = @openai.embeddings.create( input: "The quick brown fox jumped over the lazy dog", - model: :"text-embedding-3-small" + model: :"text-embedding-ada-002" ) assert_pattern do diff --git a/test/openai/resources/fine_tuning/jobs_test.rb b/test/openai/resources/fine_tuning/jobs_test.rb index c085d073..2b35089a 100644 --- a/test/openai/resources/fine_tuning/jobs_test.rb +++ b/test/openai/resources/fine_tuning/jobs_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::FineTuning::JobsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.fine_tuning.jobs.create(model: :"gpt-4o-mini", training_file: "file-abc123") + response = @openai.fine_tuning.jobs.create(model: :"babbage-002", training_file: "file-abc123") assert_pattern do response => OpenAI::Models::FineTuning::FineTuningJob diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index bd1bcaf3..6ecd70a3 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::ResponsesTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.responses.create(input: "string", model: :"gpt-4o", stream: true) + response = @openai.responses.create(input: "string", model: :"o3-mini", stream: true) assert_pattern do response => OpenAI::Models::Responses::Response From cad4b8c6fba0f61f7c812ae1ee56b5ac90fd4b34 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 02:21:38 +0000 Subject: [PATCH 022/295] chore(internal): version bump (#26) --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- lib/openai/version.rb | 2 +- rbi/lib/openai/version.rbi | 2 +- sig/openai/version.rbs | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index ba6c3483..f14b480a 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.1" + ".": "0.1.0-alpha.2" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 341cc850..333b95fb 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.alpha.1) + openai (0.1.0.pre.alpha.2) connection_pool GEM diff --git a/lib/openai/version.rb b/lib/openai/version.rb index bcab79ff..8175d17f 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0-alpha.1" + VERSION = "0.1.0-alpha.2" end diff --git a/rbi/lib/openai/version.rbi b/rbi/lib/openai/version.rbi index 5366ece5..1f42bc59 100644 --- a/rbi/lib/openai/version.rbi +++ b/rbi/lib/openai/version.rbi @@ -1,5 +1,5 @@ # typed: strong module OpenAI - VERSION = "0.1.0-alpha.1" + VERSION = "0.1.0-alpha.2" end diff --git a/sig/openai/version.rbs b/sig/openai/version.rbs index e4f5239e..b4a8f46d 100644 --- a/sig/openai/version.rbs +++ b/sig/openai/version.rbs @@ -1,3 +1,3 @@ module OpenAI - VERSION: "0.1.0-alpha.1" + VERSION: "0.1.0-alpha.2" end From 66e5f17fceda14d2b42d93156ef2b2bf38bb63ff Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 05:01:56 +0000 Subject: [PATCH 023/295] chore(internal): codegen related update (#27) --- README.md | 4 +++- openai.gemspec | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 8b458397..c6570f9b 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ The OpenAI Ruby library provides convenient access to the OpenAI REST API from a ## Documentation -Documentation for the most recent release of this gem can be found [on RubyDoc](https://gemdocs.org/gems/openai/latest). +Documentation for released of this gem can be found [on RubyDoc](https://gemdocs.org/gems/openai). The underlying REST API documentation can be found on [platform.openai.com](https://platform.openai.com/docs). @@ -172,6 +172,8 @@ What this means is that while you can use Sorbet to type check your code statica Due to limitations with the Sorbet type system, where a method otherwise can take an instance of `OpenAI::BaseModel` class, you will need to use the `**` splat operator to pass the arguments: +Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. + ```ruby model = CompletionCreateParams.new( messages: [{ diff --git a/openai.gemspec b/openai.gemspec index e2cf1559..e9cdbc7b 100644 --- a/openai.gemspec +++ b/openai.gemspec @@ -12,7 +12,7 @@ Gem::Specification.new do |s| s.extra_rdoc_files = ["README.md"] s.required_ruby_version = ">= 3.0.0" s.add_dependency "connection_pool" - s.homepage = "https://gemdocs.org/gems/openai/latest" + s.homepage = "https://gemdocs.org/gems/openai" s.metadata["homepage_uri"] = s.homepage s.metadata["source_code_uri"] = "https://github.com/openai/openai-ruby" s.metadata["rubygems_mfa_required"] = "false" From 1d51530f88aedb1525c1acf553e0466bde9fbcbd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 15:26:14 +0000 Subject: [PATCH 024/295] fix: missing union constants in rbs and rbi type definitions (#28) --- .../models/beta/assistant_update_params.rbi | 31 +++++++++++++++++++ .../models/completion_create_params.rbi | 4 +++ .../models/fine_tuning/job_create_params.rbi | 5 +++ .../models/beta/assistant_update_params.rbs | 31 +++++++++++++++++++ .../models/completion_create_params.rbs | 4 +++ .../models/fine_tuning/job_create_params.rbs | 5 +++ 6 files changed, 80 insertions(+) diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index e05cff62..69e28e75 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -307,6 +307,37 @@ module OpenAI abstract! Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + + O3_MINI = :"o3-mini" + O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" + O1 = :o1 + O1_2024_12_17 = :"o1-2024-12-17" + GPT_4O = :"gpt-4o" + GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" + GPT_4O_MINI = :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" + GPT_4_5_PREVIEW = :"gpt-4.5-preview" + GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" + GPT_4_TURBO = :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" + GPT_4 = :"gpt-4" + GPT_4_0314 = :"gpt-4-0314" + GPT_4_0613 = :"gpt-4-0613" + GPT_4_32K = :"gpt-4-32k" + GPT_4_32K_0314 = :"gpt-4-32k-0314" + GPT_4_32K_0613 = :"gpt-4-32k-0613" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 2e1185c5..cab146c9 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -341,6 +341,10 @@ module OpenAI abstract! Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + + GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" + DAVINCI_002 = :"davinci-002" + BABBAGE_002 = :"babbage-002" end # The prompt(s) to generate completions for, encoded as a string, array of diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 224616d6..3081b9f0 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -192,6 +192,11 @@ module OpenAI abstract! Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + + BABBAGE_002 = :"babbage-002" + DAVINCI_002 = :"davinci-002" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_4O_MINI = :"gpt-4o-mini" end class Hyperparameters < OpenAI::BaseModel diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index 39a0d8e2..984e1952 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -107,6 +107,37 @@ module OpenAI | :"gpt-3.5-turbo-16k-0613" class Model < OpenAI::Union + O3_MINI: :"o3-mini" + O3_MINI_2025_01_31: :"o3-mini-2025-01-31" + O1: :o1 + O1_2024_12_17: :"o1-2024-12-17" + GPT_4O: :"gpt-4o" + GPT_4O_2024_11_20: :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06: :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13: :"gpt-4o-2024-05-13" + GPT_4O_MINI: :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" + GPT_4_5_PREVIEW: :"gpt-4.5-preview" + GPT_4_5_PREVIEW_2025_02_27: :"gpt-4.5-preview-2025-02-27" + GPT_4_TURBO: :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09: :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW: :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW: :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW: :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW: :"gpt-4-vision-preview" + GPT_4: :"gpt-4" + GPT_4_0314: :"gpt-4-0314" + GPT_4_0613: :"gpt-4-0613" + GPT_4_32K: :"gpt-4-32k" + GPT_4_32K_0314: :"gpt-4-32k-0314" + GPT_4_32K_0613: :"gpt-4-32k-0613" + GPT_3_5_TURBO: :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K: :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0613: :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106: :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125: :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613: :"gpt-3.5-turbo-16k-0613" + def self.variants: -> [String, (:"o3-mini" | :"o3-mini-2025-01-31" | :o1 diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 131c7bd7..1b3c7810 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -93,6 +93,10 @@ module OpenAI String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" class Model < OpenAI::Union + GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" + DAVINCI_002: :"davinci-002" + BABBAGE_002: :"babbage-002" + def self.variants: -> [String, (:"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002")] diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 468dee15..eee5a597 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -73,6 +73,11 @@ module OpenAI | :"gpt-4o-mini" class Model < OpenAI::Union + BABBAGE_002: :"babbage-002" + DAVINCI_002: :"davinci-002" + GPT_3_5_TURBO: :"gpt-3.5-turbo" + GPT_4O_MINI: :"gpt-4o-mini" + def self.variants: -> [String, (:"babbage-002" | :"davinci-002" | :"gpt-3.5-turbo" From 457baef817f09c77501c98711d79976ed4120bbd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 16:33:10 +0000 Subject: [PATCH 025/295] fix(model)!: base model should recursively store coerced base models (#29) --- lib/openai/base_model.rb | 25 +++++++++++++++++++++++-- rbi/lib/openai/base_model.rbi | 5 +++++ sig/openai/base_model.rbs | 2 ++ test/openai/base_model_test.rb | 14 ++++++++++++++ 4 files changed, 44 insertions(+), 2 deletions(-) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 4ef0106e..5c1a2499 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -902,6 +902,13 @@ def known_fields @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) end + # @api private + # + # @return [Hash{Symbol=>Symbol}] + def reverse_map + @reverse_map ||= (self < OpenAI::BaseModel ? superclass.reverse_map.dup : {}) + end + # @api private # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] @@ -945,7 +952,7 @@ def defaults = (@defaults ||= {}) fallback = info[:const] defaults[name_sym] = fallback if required && !info[:nil?] && info.key?(:const) - key = info.fetch(:api_name, name_sym) + key = info[:api_name]&.tap { reverse_map[_1] = name_sym } || name_sym setter = "#{name_sym}=" if known_fields.key?(name_sym) @@ -1202,7 +1209,21 @@ def deconstruct_keys(keys) def initialize(data = {}) case OpenAI::Util.coerce_hash(data) in Hash => coerced - @data = coerced.transform_keys(&:to_sym) + @data = coerced.to_h do |key, value| + name = key.to_sym + mapped = self.class.reverse_map.fetch(name, name) + type = self.class.fields[mapped]&.fetch(:type) + stored = + case [type, value] + in [Class, Hash] if type <= OpenAI::BaseModel + type.new(value) + in [OpenAI::ArrayOf, Array] | [OpenAI::HashOf, Hash] + type.coerce(value) + else + value + end + [name, stored] + end else raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") end diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 8840d3a8..11854971 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -457,6 +457,11 @@ module OpenAI def known_fields end + # @api private + sig { returns(T::Hash[Symbol, Symbol]) } + def reverse_map + end + # @api private sig do returns(T::Hash[Symbol, T.all(OpenAI::BaseModel::KnownFieldShape, {type: OpenAI::Converter::Input})]) diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index 574847b4..d9267814 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -176,6 +176,8 @@ module OpenAI def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field & { type_fn: (^-> OpenAI::Converter::input) })] + def self.reverse_map: -> ::Hash[Symbol, Symbol] + def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field & { type: OpenAI::Converter::input })] diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index f6e598d0..1a3c623e 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -222,6 +222,20 @@ def test_nested_model_dump end end + class M4 < M2 + required :c, M1 + required :d, OpenAI::ArrayOf[M4] + required :e, M2, api_name: :f + end + + def test_model_to_h + model = M4.new(a: "wow", c: {}, d: [{}, 2, {c: {}}], f: {}) + assert_pattern do + model.to_h => {a: "wow", c: M1, d: [M4, 2, M4 => child], f: M2} + assert_equal({c: M1.new}, child.to_h) + end + end + A3 = OpenAI::ArrayOf[A1] class M3 < M1 From 4a5a9c9a064596cd7cc1f0b135e5247eb5c373b5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 17:55:08 +0000 Subject: [PATCH 026/295] fix(api): correct some Responses types (#30) --- .stats.yml | 2 +- lib/openai/models/batch_create_params.rb | 13 ++++--- .../models/chat/chat_completion_chunk.rb | 17 ++++++--- .../chat/chat_completion_content_part.rb | 10 ++--- .../chat/chat_completion_stream_options.rb | 7 +++- lib/openai/models/reasoning.rb | 8 ++-- .../responses/response_function_tool_call.rb | 20 ++++++---- lib/openai/models/responses/response_usage.rb | 27 +++++++++++++- lib/openai/resources/batches.rb | 6 +-- rbi/lib/openai/models/batch_create_params.rbi | 13 ++++--- .../models/chat/chat_completion_chunk.rbi | 13 ++++--- .../chat/chat_completion_content_part.rbi | 10 ++--- .../chat/chat_completion_stream_options.rbi | 7 +++- rbi/lib/openai/models/reasoning.rbi | 6 +-- .../responses/response_function_tool_call.rbi | 24 ++++++------ .../models/responses/response_usage.rbi | 37 ++++++++++++++++++- rbi/lib/openai/resources/batches.rbi | 6 +-- sig/openai/models/batch_create_params.rbs | 6 ++- .../models/chat/chat_completion_chunk.rbs | 10 +++-- .../chat/chat_completion_content_part.rbs | 8 ++-- .../responses/response_function_tool_call.rbs | 10 +++-- .../models/responses/response_usage.rbs | 19 ++++++++++ test/openai/resources/batches_test.rb | 2 +- .../resources/responses/input_items_test.rb | 2 +- 24 files changed, 197 insertions(+), 86 deletions(-) diff --git a/.stats.yml b/.stats.yml index 26b57a65..00d7f331 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-c8579861bc21d4d2155a5b9e8e7d54faee8083730673c4d32cbbe573d7fb4116.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f763c1a35c8b9b02f1e31b9b2e09e21f98bfe8413e5079c86cbb07da2dd7779b.yml diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 8fa58e3f..593eb7d2 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -16,9 +16,9 @@ class BatchCreateParams < OpenAI::BaseModel # @!attribute endpoint # The endpoint to be used for all requests in the batch. Currently - # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. - # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 - # embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. # # @return [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] required :endpoint, enum: -> { OpenAI::Models::BatchCreateParams::Endpoint } @@ -72,10 +72,11 @@ class CompletionWindow < OpenAI::Enum # @abstract # # The endpoint to be used for all requests in the batch. Currently - # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. - # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 - # embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. class Endpoint < OpenAI::Enum + V1_RESPONSES = :"/v1/responses" V1_CHAT_COMPLETIONS = :"/v1/chat/completions" V1_EMBEDDINGS = :"/v1/embeddings" V1_COMPLETIONS = :"/v1/completions" diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 0c8a0cea..5c0d47df 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -55,14 +55,21 @@ class ChatCompletionChunk < OpenAI::BaseModel # # @return [String] # attr_writer :system_fingerprint - # @!attribute usage + # @!attribute [r] usage # An optional field that will only be present when you set # `stream_options: {"include_usage": true}` in your request. When present, it - # contains a null value except for the last chunk which contains the token usage - # statistics for the entire request. + # contains a null value **except for the last chunk** which contains the token + # usage statistics for the entire request. + # + # **NOTE:** If the stream is interrupted or cancelled, you may not receive the + # final usage chunk which contains the total token usage for the request. # # @return [OpenAI::Models::CompletionUsage, nil] - optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true + optional :usage, -> { OpenAI::Models::CompletionUsage } + + # @!parse + # # @return [OpenAI::Models::CompletionUsage] + # attr_writer :usage # @!parse # # Represents a streamed chunk of a chat completion response returned by the model, @@ -75,7 +82,7 @@ class ChatCompletionChunk < OpenAI::BaseModel # # @param model [String] # # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage, nil] + # # @param usage [OpenAI::Models::CompletionUsage] # # @param object [Symbol, :"chat.completion.chunk"] # # # def initialize( diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index f9fede31..fd7d3c2c 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -67,22 +67,22 @@ class File < OpenAI::BaseModel # # @return [String] # attr_writer :file_id - # @!attribute [r] file_name + # @!attribute [r] filename # The name of the file, used when passing the file to the model as a string. # # @return [String, nil] - optional :file_name, String + optional :filename, String # @!parse # # @return [String] - # attr_writer :file_name + # attr_writer :filename # @!parse # # @param file_data [String] # # @param file_id [String] - # # @param file_name [String] + # # @param filename [String] # # - # def initialize(file_data: nil, file_id: nil, file_name: nil, **) = super + # def initialize(file_data: nil, file_id: nil, filename: nil, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index be16b6d6..0d66702d 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -7,8 +7,11 @@ class ChatCompletionStreamOptions < OpenAI::BaseModel # @!attribute [r] include_usage # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire - # request, and the `choices` field will always be an empty array. All other chunks - # will also include a `usage` field, but with a null value. + # request, and the `choices` field will always be an empty array. + # + # All other chunks will also include a `usage` field, but with a null value. + # **NOTE:** If the stream is interrupted, you may not receive the final usage + # chunk which contains the total token usage for the request. # # @return [Boolean, nil] optional :include_usage, OpenAI::BooleanModel diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 28b7129f..a4ec26de 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -12,10 +12,10 @@ class Reasoning < OpenAI::BaseModel # result in faster responses and fewer tokens used on reasoning in a response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - required :effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + optional :effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true # @!attribute generate_summary - # **o-series models only** + # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or @@ -33,13 +33,13 @@ class Reasoning < OpenAI::BaseModel # # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] # # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] # # - # def initialize(effort:, generate_summary: nil, **) = super + # def initialize(effort: nil, generate_summary: nil, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract # - # **o-series models only** + # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 065ee3ea..b2b500c1 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -4,12 +4,6 @@ module OpenAI module Models module Responses class ResponseFunctionToolCall < OpenAI::BaseModel - # @!attribute id - # The unique ID of the function tool call. - # - # @return [String] - required :id, String - # @!attribute arguments # A JSON string of the arguments to pass to the function. # @@ -34,6 +28,16 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # @return [Symbol, :function_call] required :type, const: :function_call + # @!attribute [r] id + # The unique ID of the function tool call. + # + # @return [String, nil] + optional :id, String + + # @!parse + # # @return [String] + # attr_writer :id + # @!attribute [r] status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -50,14 +54,14 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # # for more information. # # - # # @param id [String] # # @param arguments [String] # # @param call_id [String] # # @param name [String] + # # @param id [String] # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] # # @param type [Symbol, :function_call] # # - # def initialize(id:, arguments:, call_id:, name:, status: nil, type: :function_call, **) = super + # def initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 198bb4bd..8e4ccddd 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -10,6 +10,12 @@ class ResponseUsage < OpenAI::BaseModel # @return [Integer] required :input_tokens, Integer + # @!attribute input_tokens_details + # A detailed breakdown of the input tokens. + # + # @return [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] + required :input_tokens_details, -> { OpenAI::Models::Responses::ResponseUsage::InputTokensDetails } + # @!attribute output_tokens # The number of output tokens. # @@ -33,14 +39,33 @@ class ResponseUsage < OpenAI::BaseModel # # breakdown of output tokens, and the total tokens used. # # # # @param input_tokens [Integer] + # # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] # # @param output_tokens [Integer] # # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] # # @param total_tokens [Integer] # # - # def initialize(input_tokens:, output_tokens:, output_tokens_details:, total_tokens:, **) = super + # def initialize(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void + class InputTokensDetails < OpenAI::BaseModel + # @!attribute cached_tokens + # The number of tokens that were retrieved from the cache. + # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). + # + # @return [Integer] + required :cached_tokens, Integer + + # @!parse + # # A detailed breakdown of the input tokens. + # # + # # @param cached_tokens [Integer] + # # + # def initialize(cached_tokens:, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end + class OutputTokensDetails < OpenAI::BaseModel # @!attribute reasoning_tokens # The number of reasoning tokens. diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 8f0799eb..62883c2e 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -11,9 +11,9 @@ class Batches # is supported. # # @option params [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] :endpoint The endpoint to be used for all requests in the batch. Currently - # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. - # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 - # embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. # # @option params [String] :input_file_id The ID of an uploaded file that contains requests for the new batch. # diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 17c682c5..c97a8484 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -17,9 +17,9 @@ module OpenAI end # The endpoint to be used for all requests in the batch. Currently - # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. - # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 - # embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. sig { returns(Symbol) } def endpoint end @@ -98,14 +98,15 @@ module OpenAI end # The endpoint to be used for all requests in the batch. Currently - # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. - # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 - # embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. class Endpoint < OpenAI::Enum abstract! Value = type_template(:out) { {fixed: Symbol} } + V1_RESPONSES = :"/v1/responses" V1_CHAT_COMPLETIONS = :"/v1/chat/completions" V1_EMBEDDINGS = :"/v1/embeddings" V1_COMPLETIONS = :"/v1/completions" diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 9595e84a..a3cc4a45 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -79,13 +79,16 @@ module OpenAI # An optional field that will only be present when you set # `stream_options: {"include_usage": true}` in your request. When present, it - # contains a null value except for the last chunk which contains the token usage - # statistics for the entire request. + # contains a null value **except for the last chunk** which contains the token + # usage statistics for the entire request. + # + # **NOTE:** If the stream is interrupted or cancelled, you may not receive the + # final usage chunk which contains the total token usage for the request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } def usage end - sig { params(_: T.nilable(OpenAI::Models::CompletionUsage)).returns(T.nilable(OpenAI::Models::CompletionUsage)) } + sig { params(_: OpenAI::Models::CompletionUsage).returns(OpenAI::Models::CompletionUsage) } def usage=(_) end @@ -100,7 +103,7 @@ module OpenAI model: String, service_tier: T.nilable(Symbol), system_fingerprint: String, - usage: T.nilable(OpenAI::Models::CompletionUsage), + usage: OpenAI::Models::CompletionUsage, object: Symbol ) .returns(T.attached_class) @@ -128,7 +131,7 @@ module OpenAI object: Symbol, service_tier: T.nilable(Symbol), system_fingerprint: String, - usage: T.nilable(OpenAI::Models::CompletionUsage) + usage: OpenAI::Models::CompletionUsage } ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index a36f38f5..5f1deb83 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -77,18 +77,18 @@ module OpenAI # The name of the file, used when passing the file to the model as a string. sig { returns(T.nilable(String)) } - def file_name + def filename end sig { params(_: String).returns(String) } - def file_name=(_) + def filename=(_) end - sig { params(file_data: String, file_id: String, file_name: String).returns(T.attached_class) } - def self.new(file_data: nil, file_id: nil, file_name: nil) + sig { params(file_data: String, file_id: String, filename: String).returns(T.attached_class) } + def self.new(file_data: nil, file_id: nil, filename: nil) end - sig { override.returns({file_data: String, file_id: String, file_name: String}) } + sig { override.returns({file_data: String, file_id: String, filename: String}) } def to_hash end end diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index 9fbf5879..7d7809fc 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -8,8 +8,11 @@ module OpenAI class ChatCompletionStreamOptions < OpenAI::BaseModel # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire - # request, and the `choices` field will always be an empty array. All other chunks - # will also include a `usage` field, but with a null value. + # request, and the `choices` field will always be an empty array. + # + # All other chunks will also include a `usage` field, but with a null value. + # **NOTE:** If the stream is interrupted, you may not receive the final usage + # chunk which contains the total token usage for the request. sig { returns(T.nilable(T::Boolean)) } def include_usage end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index e6f69e82..38290949 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -17,7 +17,7 @@ module OpenAI def effort=(_) end - # **o-series models only** + # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or @@ -35,14 +35,14 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { params(effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)).returns(T.attached_class) } - def self.new(effort:, generate_summary: nil) + def self.new(effort: nil, generate_summary: nil) end sig { override.returns({effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)}) } def to_hash end - # **o-series models only** + # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index a92d38ed..7f65d691 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -4,15 +4,6 @@ module OpenAI module Models module Responses class ResponseFunctionToolCall < OpenAI::BaseModel - # The unique ID of the function tool call. - sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end - # A JSON string of the arguments to pass to the function. sig { returns(String) } def arguments @@ -49,6 +40,15 @@ module OpenAI def type=(_) end + # The unique ID of the function tool call. + sig { returns(T.nilable(String)) } + def id + end + + sig { params(_: String).returns(String) } + def id=(_) + end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(Symbol)) } @@ -63,20 +63,20 @@ module OpenAI # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. sig do - params(id: String, arguments: String, call_id: String, name: String, status: Symbol, type: Symbol) + params(arguments: String, call_id: String, name: String, id: String, status: Symbol, type: Symbol) .returns(T.attached_class) end - def self.new(id:, arguments:, call_id:, name:, status: nil, type: :function_call) + def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) end sig do override .returns({ - id: String, arguments: String, call_id: String, name: String, type: Symbol, + id: String, status: Symbol }) end diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index b46f45aa..90401965 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -13,6 +13,18 @@ module OpenAI def input_tokens=(_) end + # A detailed breakdown of the input tokens. + sig { returns(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) } + def input_tokens_details + end + + sig do + params(_: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) + .returns(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) + end + def input_tokens_details=(_) + end + # The number of output tokens. sig { returns(Integer) } def output_tokens @@ -48,13 +60,14 @@ module OpenAI sig do params( input_tokens: Integer, + input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, output_tokens: Integer, output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, total_tokens: Integer ) .returns(T.attached_class) end - def self.new(input_tokens:, output_tokens:, output_tokens_details:, total_tokens:) + def self.new(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:) end sig do @@ -62,6 +75,7 @@ module OpenAI .returns( { input_tokens: Integer, + input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, output_tokens: Integer, output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, total_tokens: Integer @@ -71,6 +85,27 @@ module OpenAI def to_hash end + class InputTokensDetails < OpenAI::BaseModel + # The number of tokens that were retrieved from the cache. + # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). + sig { returns(Integer) } + def cached_tokens + end + + sig { params(_: Integer).returns(Integer) } + def cached_tokens=(_) + end + + # A detailed breakdown of the input tokens. + sig { params(cached_tokens: Integer).returns(T.attached_class) } + def self.new(cached_tokens:) + end + + sig { override.returns({cached_tokens: Integer}) } + def to_hash + end + end + class OutputTokensDetails < OpenAI::BaseModel # The number of reasoning tokens. sig { returns(Integer) } diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index a3f56e28..7754b091 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -19,9 +19,9 @@ module OpenAI # is supported. completion_window:, # The endpoint to be used for all requests in the batch. Currently - # `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` are supported. - # Note that `/v1/embeddings` batches are also restricted to a maximum of 50,000 - # embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. endpoint:, # The ID of an uploaded file that contains requests for the new batch. # diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 2f317b07..961c7870 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -44,9 +44,13 @@ module OpenAI end type endpoint = - :"/v1/chat/completions" | :"/v1/embeddings" | :"/v1/completions" + :"/v1/responses" + | :"/v1/chat/completions" + | :"/v1/embeddings" + | :"/v1/completions" class Endpoint < OpenAI::Enum + V1_RESPONSES: :"/v1/responses" V1_CHAT_COMPLETIONS: :"/v1/chat/completions" V1_EMBEDDINGS: :"/v1/embeddings" V1_COMPLETIONS: :"/v1/completions" diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 980b2889..0e9780de 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -13,7 +13,7 @@ module OpenAI object: :"chat.completion.chunk", service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage? + usage: OpenAI::Models::CompletionUsage } class ChatCompletionChunk < OpenAI::BaseModel @@ -33,7 +33,11 @@ module OpenAI def system_fingerprint=: (String) -> String - attr_accessor usage: OpenAI::Models::CompletionUsage? + attr_reader usage: OpenAI::Models::CompletionUsage? + + def usage=: ( + OpenAI::Models::CompletionUsage + ) -> OpenAI::Models::CompletionUsage def initialize: ( @@ -43,7 +47,7 @@ module OpenAI model: String, service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage?, + usage: OpenAI::Models::CompletionUsage, object: :"chat.completion.chunk" ) -> void | ( diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 31333ab5..13220e58 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -34,7 +34,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPart::file - type file = { file_data: String, file_id: String, file_name: String } + type file = { file_data: String, file_id: String, filename: String } class File < OpenAI::BaseModel attr_reader file_data: String? @@ -45,12 +45,12 @@ module OpenAI def file_id=: (String) -> String - attr_reader file_name: String? + attr_reader filename: String? - def file_name=: (String) -> String + def filename=: (String) -> String def initialize: - (file_data: String, file_id: String, file_name: String) -> void + (file_data: String, file_id: String, filename: String) -> void | ( ?OpenAI::Models::Chat::ChatCompletionContentPart::File::file | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index 1314a1f4..dc7f9bf6 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -3,17 +3,15 @@ module OpenAI module Responses type response_function_tool_call = { - id: String, arguments: String, call_id: String, name: String, type: :function_call, + id: String, status: OpenAI::Models::Responses::ResponseFunctionToolCall::status } class ResponseFunctionToolCall < OpenAI::BaseModel - attr_accessor id: String - attr_accessor arguments: String attr_accessor call_id: String @@ -22,6 +20,10 @@ module OpenAI attr_accessor type: :function_call + attr_reader id: String? + + def id=: (String) -> String + attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCall::status? def status=: ( @@ -30,10 +32,10 @@ module OpenAI def initialize: ( - id: String, arguments: String, call_id: String, name: String, + id: String, status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, type: :function_call ) -> void diff --git a/sig/openai/models/responses/response_usage.rbs b/sig/openai/models/responses/response_usage.rbs index cca39c67..c00715ed 100644 --- a/sig/openai/models/responses/response_usage.rbs +++ b/sig/openai/models/responses/response_usage.rbs @@ -4,6 +4,7 @@ module OpenAI type response_usage = { input_tokens: Integer, + input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, output_tokens: Integer, output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, total_tokens: Integer @@ -12,6 +13,8 @@ module OpenAI class ResponseUsage < OpenAI::BaseModel attr_accessor input_tokens: Integer + attr_accessor input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails + attr_accessor output_tokens: Integer attr_accessor output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails @@ -21,6 +24,7 @@ module OpenAI def initialize: ( input_tokens: Integer, + input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, output_tokens: Integer, output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, total_tokens: Integer @@ -31,6 +35,21 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::response_usage + type input_tokens_details = { cached_tokens: Integer } + + class InputTokensDetails < OpenAI::BaseModel + attr_accessor cached_tokens: Integer + + def initialize: + (cached_tokens: Integer) -> void + | ( + ?OpenAI::Models::Responses::ResponseUsage::input_tokens_details + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::ResponseUsage::input_tokens_details + end + type output_tokens_details = { reasoning_tokens: Integer } class OutputTokensDetails < OpenAI::BaseModel diff --git a/test/openai/resources/batches_test.rb b/test/openai/resources/batches_test.rb index 50ae1244..c7885939 100644 --- a/test/openai/resources/batches_test.rb +++ b/test/openai/resources/batches_test.rb @@ -6,7 +6,7 @@ class OpenAI::Test::Resources::BatchesTest < OpenAI::Test::ResourceTest def test_create_required_params response = @openai.batches.create( completion_window: :"24h", - endpoint: :"/v1/chat/completions", + endpoint: :"/v1/responses", input_file_id: "input_file_id" ) diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index c00355a7..6886eb62 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -70,10 +70,10 @@ def test_list in {type: :web_search_call, id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status} in { type: :function_call, - id: String, arguments: String, call_id: String, name: String, + id: String | nil, status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status | nil } in { From 49e3c0d1f3597dea09893ae8612d50f184c1e8dd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 19:22:31 +0000 Subject: [PATCH 027/295] chore: ignore some spurious linter warnings and formatting changes (#31) --- .rubocop.yml | 4 ++++ lib/openai/base_page.rb | 4 ++++ lib/openai/util.rb | 2 ++ openai.gemspec | 2 +- rbi/lib/openai/base_model.rbi | 36 ++++++++++++++++++++++++++++++---- rbi/lib/openai/cursor_page.rbi | 4 ++++ rbi/lib/openai/page.rbi | 4 ++++ sig/openai/base_model.rbs | 14 +++++++++++++ sig/openai/cursor_page.rbs | 2 ++ sig/openai/page.rbs | 2 ++ 10 files changed, 69 insertions(+), 5 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index e0a360b6..e612c952 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -10,6 +10,10 @@ AllCops: SuggestExtensions: false TargetRubyVersion: 3.1.0 +# Whether MFA is required or not should be left to the token configuration +Gemspec/RequireMFA: + Enabled: false + # Don't require this extra line break, it can be excessive. Layout/EmptyLineAfterGuardClause: Enabled: false diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index c8a9058a..3892c2bc 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -28,6 +28,8 @@ module OpenAI # completions => Array # ``` module BasePage + # rubocop:disable Lint/UnusedMethodArgument + # @return [Boolean] def next_page? = (raise NotImplementedError) @@ -56,5 +58,7 @@ def initialize(client:, req:, headers:, page_data:) @req = req super() end + + # rubocop:enable Lint/UnusedMethodArgument end end diff --git a/lib/openai/util.rb b/lib/openai/util.rb index ef208d81..552fc640 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -452,6 +452,7 @@ def string_io(&blk) end class << self + # rubocop:disable Naming/MethodParameterName # @api private # # @param y [Enumerator::Yielder] @@ -489,6 +490,7 @@ class << self end y << "\r\n" end + # rubocop:enable Naming/MethodParameterName # @api private # diff --git a/openai.gemspec b/openai.gemspec index e9cdbc7b..60a53a67 100644 --- a/openai.gemspec +++ b/openai.gemspec @@ -15,5 +15,5 @@ Gem::Specification.new do |s| s.homepage = "https://gemdocs.org/gems/openai" s.metadata["homepage_uri"] = s.homepage s.metadata["source_code_uri"] = "https://github.com/openai/openai-ruby" - s.metadata["rubygems_mfa_required"] = "false" + s.metadata["rubygems_mfa_required"] = false.to_s end diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 11854971..4b5b673d 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -310,6 +310,20 @@ module OpenAI Elem = type_member(:out) + sig(:final) do + params( + type_info: T.any( + T::Hash[Symbol, T.anything], + T.proc.returns(OpenAI::Converter::Input), + OpenAI::Converter::Input + ), + spec: T::Hash[Symbol, T.anything] + ) + .returns(T.attached_class) + end + def self.[](type_info, spec = {}) + end + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ===(other) end @@ -360,9 +374,9 @@ module OpenAI ), spec: T::Hash[Symbol, T.anything] ) - .returns(T.attached_class) + .void end - def self.new(type_info, spec = {}) + def initialize(type_info, spec = {}) end end @@ -377,6 +391,20 @@ module OpenAI Elem = type_member(:out) + sig(:final) do + params( + type_info: T.any( + T::Hash[Symbol, T.anything], + T.proc.returns(OpenAI::Converter::Input), + OpenAI::Converter::Input + ), + spec: T::Hash[Symbol, T.anything] + ) + .returns(T.attached_class) + end + def self.[](type_info, spec = {}) + end + sig(:final) { params(other: T.anything).returns(T::Boolean) } def ===(other) end @@ -427,9 +455,9 @@ module OpenAI ), spec: T::Hash[Symbol, T.anything] ) - .returns(T.attached_class) + .void end - def self.new(type_info, spec = {}) + def initialize(type_info, spec = {}) end end diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi index c40bab43..261ed7d6 100644 --- a/rbi/lib/openai/cursor_page.rbi +++ b/rbi/lib/openai/cursor_page.rbi @@ -21,5 +21,9 @@ module OpenAI sig { params(_: T::Boolean).returns(T::Boolean) } def has_more=(_) end + + sig { returns(String) } + def inspect + end end end diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi index a738bab8..41e78793 100644 --- a/rbi/lib/openai/page.rbi +++ b/rbi/lib/openai/page.rbi @@ -21,5 +21,9 @@ module OpenAI sig { params(_: String).returns(String) } def object=(_) end + + sig { returns(String) } + def inspect + end end end diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index d9267814..581a545f 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -121,6 +121,13 @@ module OpenAI class ArrayOf[Elem] include OpenAI::Converter + def self.[]: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Converter::input + | OpenAI::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> instance + def ===: (top other) -> bool def ==: (top other) -> bool @@ -146,6 +153,13 @@ module OpenAI class HashOf[Elem] include OpenAI::Converter + def self.[]: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Converter::input + | OpenAI::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> instance + def ===: (top other) -> bool def ==: (top other) -> bool diff --git a/sig/openai/cursor_page.rbs b/sig/openai/cursor_page.rbs index 5aa7c659..4bf87e3a 100644 --- a/sig/openai/cursor_page.rbs +++ b/sig/openai/cursor_page.rbs @@ -5,5 +5,7 @@ module OpenAI attr_accessor data: ::Array[Elem]? attr_accessor has_more: bool + + def inspect: -> String end end diff --git a/sig/openai/page.rbs b/sig/openai/page.rbs index 420e2716..0159e75d 100644 --- a/sig/openai/page.rbs +++ b/sig/openai/page.rbs @@ -5,5 +5,7 @@ module OpenAI attr_accessor data: ::Array[Elem]? attr_accessor object: String + + def inspect: -> String end end From b3218483fe95a33636a1ee956522fa27f22a9ed5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 20:25:12 +0000 Subject: [PATCH 028/295] chore: document Client's concurrency capability (#33) --- README.md | 12 ++++++++++++ lib/openai/pooled_net_requester.rb | 9 ++++++++- lib/openai/util.rb | 17 ++++++++++------- rbi/lib/openai/base_client.rbi | 1 + rbi/lib/openai/pooled_net_requester.rbi | 4 ++++ rbi/lib/openai/util.rbi | 5 +++++ sig/openai/pooled_net_requester.rbs | 2 ++ 7 files changed, 42 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index c6570f9b..1b269d07 100644 --- a/README.md +++ b/README.md @@ -186,6 +186,18 @@ model = CompletionCreateParams.new( openai.chat.completions.create(**model) ``` +## Advanced + +### Concurrency & Connection Pooling + +The `OpenAI::Client` instances are thread-safe, and should be re-used across multiple threads. By default, each `Client` have their own HTTP connection pool, with a maximum number of connections equal to thread count. + +When the maximum number of connections has been checked out from the connection pool, the `Client` will wait for an in use connection to become available. The queue time for this mechanism is accounted for by the per-request timeout. + +Unless otherwise specified, other classes in the SDK do not have locks protecting their underlying data structure. + +Currently, `OpenAI::Client` instances are only fork-safe if there are no in-flight HTTP requests. + ## Versioning This package follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions. As the library is in initial development and has a major version of `0`, APIs may change at any time. diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb index 4b0ae742..c6b17713 100644 --- a/lib/openai/pooled_net_requester.rb +++ b/lib/openai/pooled_net_requester.rb @@ -3,6 +3,10 @@ module OpenAI # @api private class PooledNetRequester + # from the golang stdlib + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + KEEP_ALIVE_TIMEOUT = 30 + class << self # @api private # @@ -124,7 +128,10 @@ def execute(request) end self.class.calibrate_socket_timeout(conn, deadline) - conn.start unless conn.started? + unless conn.started? + conn.keep_alive_timeout = self.class::KEEP_ALIVE_TIMEOUT + conn.start + end self.class.calibrate_socket_timeout(conn, deadline) conn.request(req) do |rsp| diff --git a/lib/openai/util.rb b/lib/openai/util.rb index 552fc640..a372dfa3 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -367,13 +367,14 @@ class << self # @return [Hash{String=>String}] def normalized_headers(*headers) {}.merge(*headers.compact).to_h do |key, val| - case val - in Array - val.map { _1.to_s.strip }.join(", ") - else - val&.to_s&.strip - end - [key.downcase, val] + value = + case val + in Array + val.map { _1.to_s.strip }.join(", ") + else + val&.to_s&.strip + end + [key.downcase, value] end end end @@ -453,6 +454,7 @@ def string_io(&blk) class << self # rubocop:disable Naming/MethodParameterName + # @api private # # @param y [Enumerator::Yielder] @@ -490,6 +492,7 @@ class << self end y << "\r\n" end + # rubocop:enable Naming/MethodParameterName # @api private diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index e4208ab7..e0351a43 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -40,6 +40,7 @@ module OpenAI } end + # from whatwg fetch spec MAX_REDIRECTS = 20 PLATFORM_HEADERS = T::Hash[String, String] diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi index 2cdf7b6b..b01c005a 100644 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ b/rbi/lib/openai/pooled_net_requester.rbi @@ -7,6 +7,10 @@ module OpenAI {method: Symbol, url: URI::Generic, headers: T::Hash[String, String], body: T.anything, deadline: Float} end + # from the golang stdlib + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + KEEP_ALIVE_TIMEOUT = 30 + class << self # @api private sig { params(url: URI::Generic).returns(Net::HTTP) } diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index fe12b3ff..6215e687 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -52,6 +52,11 @@ module OpenAI end end + # Use this to indicate that a value should be explicitly removed from a data + # structure when using `OpenAI::Util.deep_merge`. + # + # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging + # `{a: 1}` and `{}` would produce `{a: 1}`. OMIT = T.let(T.anything, T.anything) class << self diff --git a/sig/openai/pooled_net_requester.rbs b/sig/openai/pooled_net_requester.rbs index c9f6520d..46a89cad 100644 --- a/sig/openai/pooled_net_requester.rbs +++ b/sig/openai/pooled_net_requester.rbs @@ -9,6 +9,8 @@ module OpenAI deadline: Float } + KEEP_ALIVE_TIMEOUT: 30 + def self.connect: (URI::Generic url) -> top def self.calibrate_socket_timeout: (top conn, Float deadline) -> void From bca7b40a69de4bea570ee22fef1ad3bb86da213d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 20:46:30 +0000 Subject: [PATCH 029/295] fix(types): improve responses type names (#34) --- .stats.yml | 2 +- lib/openai.rb | 8 +- ...response_computer_tool_call_output_item.rb | 111 +++++ ...se_computer_tool_call_output_screenshot.rb | 47 ++ .../response_function_tool_call_item.rb | 26 + ...response_function_tool_call_output_item.rb | 67 +++ .../models/responses/response_input_item.rb | 46 +- .../responses/response_input_message_item.rb | 92 ++++ lib/openai/models/responses/response_item.rb | 45 ++ .../models/responses/response_item_list.rb | 337 +------------ lib/openai/resources/responses/input_items.rb | 4 +- ...esponse_computer_tool_call_output_item.rbi | 159 +++++++ ...e_computer_tool_call_output_screenshot.rbi | 46 ++ .../response_function_tool_call_item.rbi | 29 ++ ...esponse_function_tool_call_output_item.rbi | 83 ++++ .../models/responses/response_input_item.rbi | 49 +- .../responses/response_input_message_item.rbi | 119 +++++ .../openai/models/responses/response_item.rbi | 27 ++ .../models/responses/response_item_list.rbi | 443 +----------------- .../resources/responses/input_items.rbi | 8 +- ...esponse_computer_tool_call_output_item.rbs | 83 ++++ ...e_computer_tool_call_output_screenshot.rbs | 33 ++ .../response_function_tool_call_item.rbs | 20 + ...esponse_function_tool_call_output_item.rbs | 55 +++ .../models/responses/response_input_item.rbs | 34 +- .../responses/response_input_message_item.rbs | 77 +++ sig/openai/models/responses/response_item.rbs | 19 + .../models/responses/response_item_list.rbs | 248 +--------- .../resources/responses/input_items.rbs | 4 +- .../resources/responses/input_items_test.rb | 30 +- 30 files changed, 1202 insertions(+), 1149 deletions(-) create mode 100644 lib/openai/models/responses/response_computer_tool_call_output_item.rb create mode 100644 lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb create mode 100644 lib/openai/models/responses/response_function_tool_call_item.rb create mode 100644 lib/openai/models/responses/response_function_tool_call_output_item.rb create mode 100644 lib/openai/models/responses/response_input_message_item.rb create mode 100644 lib/openai/models/responses/response_item.rb create mode 100644 rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi create mode 100644 rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi create mode 100644 rbi/lib/openai/models/responses/response_function_tool_call_item.rbi create mode 100644 rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi create mode 100644 rbi/lib/openai/models/responses/response_input_message_item.rbi create mode 100644 rbi/lib/openai/models/responses/response_item.rbi create mode 100644 sig/openai/models/responses/response_computer_tool_call_output_item.rbs create mode 100644 sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs create mode 100644 sig/openai/models/responses/response_function_tool_call_item.rbs create mode 100644 sig/openai/models/responses/response_function_tool_call_output_item.rbs create mode 100644 sig/openai/models/responses/response_input_message_item.rbs create mode 100644 sig/openai/models/responses/response_item.rbs diff --git a/.stats.yml b/.stats.yml index 00d7f331..2ffca777 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f763c1a35c8b9b02f1e31b9b2e09e21f98bfe8413e5079c86cbb07da2dd7779b.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f3bce04386c4fcfd5037e0477fbaa39010003fd1558eb5185fe4a71dd6a05fdd.yml diff --git a/lib/openai.rb b/lib/openai.rb index 77ad926a..a722c1a4 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -36,6 +36,7 @@ require_relative "openai/models/reasoning_effort" require_relative "openai/models/chat/chat_completion_message" require_relative "openai/models/fine_tuning/fine_tuning_job_wandb_integration_object" +require_relative "openai/models/responses/response_function_tool_call" require_relative "openai/models/audio/speech_create_params" require_relative "openai/models/audio/speech_model" require_relative "openai/models/audio/transcription" @@ -259,6 +260,8 @@ require_relative "openai/models/responses/response_code_interpreter_tool_call" require_relative "openai/models/responses/response_completed_event" require_relative "openai/models/responses/response_computer_tool_call" +require_relative "openai/models/responses/response_computer_tool_call_output_item" +require_relative "openai/models/responses/response_computer_tool_call_output_screenshot" require_relative "openai/models/responses/response_content" require_relative "openai/models/responses/response_content_part_added_event" require_relative "openai/models/responses/response_content_part_done_event" @@ -276,7 +279,8 @@ require_relative "openai/models/responses/response_format_text_json_schema_config" require_relative "openai/models/responses/response_function_call_arguments_delta_event" require_relative "openai/models/responses/response_function_call_arguments_done_event" -require_relative "openai/models/responses/response_function_tool_call" +require_relative "openai/models/responses/response_function_tool_call_item" +require_relative "openai/models/responses/response_function_tool_call_output_item" require_relative "openai/models/responses/response_function_web_search" require_relative "openai/models/responses/response_includable" require_relative "openai/models/responses/response_incomplete_event" @@ -288,7 +292,9 @@ require_relative "openai/models/responses/response_input_image" require_relative "openai/models/responses/response_input_item" require_relative "openai/models/responses/response_input_message_content_list" +require_relative "openai/models/responses/response_input_message_item" require_relative "openai/models/responses/response_input_text" +require_relative "openai/models/responses/response_item" require_relative "openai/models/responses/response_item_list" require_relative "openai/models/responses/response_output_audio" require_relative "openai/models/responses/response_output_item" diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb new file mode 100644 index 00000000..252be325 --- /dev/null +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -0,0 +1,111 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseComputerToolCallOutputItem < OpenAI::BaseModel + # @!attribute id + # The unique ID of the computer call tool output. + # + # @return [String] + required :id, String + + # @!attribute call_id + # The ID of the computer tool call that produced the output. + # + # @return [String] + required :call_id, String + + # @!attribute output + # A computer screenshot image used with the computer use tool. + # + # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + required :output, -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot } + + # @!attribute type + # The type of the computer tool call output. Always `computer_call_output`. + # + # @return [Symbol, :computer_call_output] + required :type, const: :computer_call_output + + # @!attribute [r] acknowledged_safety_checks + # The safety checks reported by the API that have been acknowledged by the + # developer. + # + # @return [Array, nil] + optional :acknowledged_safety_checks, + -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } + + # @!parse + # # @return [Array] + # attr_writer :acknowledged_safety_checks + + # @!attribute [r] status + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] + optional :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status } + + # @!parse + # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] + # attr_writer :status + + # @!parse + # # @param id [String] + # # @param call_id [String] + # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + # # @param acknowledged_safety_checks [Array] + # # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] + # # @param type [Symbol, :computer_call_output] + # # + # def initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + + class AcknowledgedSafetyCheck < OpenAI::BaseModel + # @!attribute id + # The ID of the pending safety check. + # + # @return [String] + required :id, String + + # @!attribute code + # The type of the pending safety check. + # + # @return [String] + required :code, String + + # @!attribute message + # Details about the pending safety check. + # + # @return [String] + required :message, String + + # @!parse + # # A pending safety check for the computer call. + # # + # # @param id [String] + # # @param code [String] + # # @param message [String] + # # + # def initialize(id:, code:, message:, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end + + # @abstract + # + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. + class Status < OpenAI::Enum + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + finalize! + end + end + end + end +end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb new file mode 100644 index 00000000..4dc426ff --- /dev/null +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel + # @!attribute type + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. + # + # @return [Symbol, :computer_screenshot] + required :type, const: :computer_screenshot + + # @!attribute [r] file_id + # The identifier of an uploaded file that contains the screenshot. + # + # @return [String, nil] + optional :file_id, String + + # @!parse + # # @return [String] + # attr_writer :file_id + + # @!attribute [r] image_url + # The URL of the screenshot image. + # + # @return [String, nil] + optional :image_url, String + + # @!parse + # # @return [String] + # attr_writer :image_url + + # @!parse + # # A computer screenshot image used with the computer use tool. + # # + # # @param file_id [String] + # # @param image_url [String] + # # @param type [Symbol, :computer_screenshot] + # # + # def initialize(file_id: nil, image_url: nil, type: :computer_screenshot, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end + end + end +end diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb new file mode 100644 index 00000000..fa7413ea --- /dev/null +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall + # @!attribute id + # The unique ID of the function call tool output. + # + # @return [String] + required :id, String + + # @!parse + # # A tool call to run a function. See the + # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # # for more information. + # # + # # @param id [String] + # # + # def initialize(id:, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end + end + end +end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb new file mode 100644 index 00000000..503dee7b --- /dev/null +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel + # @!attribute id + # The unique ID of the function call tool output. + # + # @return [String] + required :id, String + + # @!attribute call_id + # The unique ID of the function tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute output + # A JSON string of the output of the function tool call. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the function tool call output. Always `function_call_output`. + # + # @return [Symbol, :function_call_output] + required :type, const: :function_call_output + + # @!attribute [r] status + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] + optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status } + + # @!parse + # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] + # attr_writer :status + + # @!parse + # # @param id [String] + # # @param call_id [String] + # # @param output [String] + # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] + # # @param type [Symbol, :function_call_output] + # # + # def initialize(id:, call_id:, output:, status: nil, type: :function_call_output, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + + # @abstract + # + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + class Status < OpenAI::Enum + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + finalize! + end + end + end + end +end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index cd5d395b..de2477cf 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -149,8 +149,8 @@ class ComputerCallOutput < OpenAI::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output] - required :output, -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output } + # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + required :output, -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type # The type of the computer tool call output. Always `computer_call_output`. @@ -195,7 +195,7 @@ class ComputerCallOutput < OpenAI::BaseModel # # The output of a computer tool call. # # # # @param call_id [String] - # # @param output [OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output] + # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] # # @param id [String] # # @param acknowledged_safety_checks [Array] # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] @@ -205,46 +205,6 @@ class ComputerCallOutput < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - class Output < OpenAI::BaseModel - # @!attribute type - # Specifies the event type. For a computer screenshot, this property is always set - # to `computer_screenshot`. - # - # @return [Symbol, :computer_screenshot] - required :type, const: :computer_screenshot - - # @!attribute [r] file_id - # The identifier of an uploaded file that contains the screenshot. - # - # @return [String, nil] - optional :file_id, String - - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] image_url - # The URL of the screenshot image. - # - # @return [String, nil] - optional :image_url, String - - # @!parse - # # @return [String] - # attr_writer :image_url - - # @!parse - # # A computer screenshot image used with the computer use tool. - # # - # # @param file_id [String] - # # @param image_url [String] - # # @param type [Symbol, :computer_screenshot] - # # - # def initialize(file_id: nil, image_url: nil, type: :computer_screenshot, **) = super - - # def initialize: (Hash | OpenAI::BaseModel) -> void - end - class AcknowledgedSafetyCheck < OpenAI::BaseModel # @!attribute id # The ID of the pending safety check. diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb new file mode 100644 index 00000000..2a9dd999 --- /dev/null +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -0,0 +1,92 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseInputMessageItem < OpenAI::BaseModel + # @!attribute id + # The unique ID of the message input. + # + # @return [String] + required :id, String + + # @!attribute content + # A list of one or many input items to the model, containing different content + # types. + # + # @return [Array] + required :content, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } + + # @!attribute role + # The role of the message input. One of `user`, `system`, or `developer`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] + required :role, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Role } + + # @!attribute [r] status + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] + optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Status } + + # @!parse + # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] + # attr_writer :status + + # @!attribute [r] type + # The type of the message input. Always set to `message`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type, nil] + optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Type } + + # @!parse + # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] + # attr_writer :type + + # @!parse + # # @param id [String] + # # @param content [Array] + # # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] + # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] + # # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] + # # + # def initialize(id:, content:, role:, status: nil, type: nil, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + + # @abstract + # + # The role of the message input. One of `user`, `system`, or `developer`. + class Role < OpenAI::Enum + USER = :user + SYSTEM = :system + DEVELOPER = :developer + + finalize! + end + + # @abstract + # + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + class Status < OpenAI::Enum + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + finalize! + end + + # @abstract + # + # The type of the message input. Always set to `message`. + class Type < OpenAI::Enum + MESSAGE = :message + + finalize! + end + end + end + end +end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb new file mode 100644 index 00000000..b3f4f86c --- /dev/null +++ b/lib/openai/models/responses/response_item.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + # @abstract + # + # Content item used to generate a response. + class ResponseItem < OpenAI::Union + discriminator :type + + variant :message, -> { OpenAI::Models::Responses::ResponseInputMessageItem } + + # An output message from the model. + variant :message, -> { OpenAI::Models::Responses::ResponseOutputMessage } + + # The results of a file search tool call. See the + # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) for more information. + variant :file_search_call, -> { OpenAI::Models::Responses::ResponseFileSearchToolCall } + + # A tool call to a computer use tool. See the + # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) for more information. + variant :computer_call, -> { OpenAI::Models::Responses::ResponseComputerToolCall } + + variant :computer_call_output, -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem } + + # The results of a web search tool call. See the + # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for more information. + variant :web_search_call, -> { OpenAI::Models::Responses::ResponseFunctionWebSearch } + + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) for more information. + variant :function_call, -> { OpenAI::Models::Responses::ResponseFunctionToolCallItem } + + variant :function_call_output, -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] + # def variants; end + # end + end + end + end +end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 7890dc3a..34ca03ad 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -7,8 +7,8 @@ class ResponseItemList < OpenAI::BaseModel # @!attribute data # A list of items used to generate this response. # - # @return [Array] - required :data, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseItemList::Data] } + # @return [Array] + required :data, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseItem] } # @!attribute first_id # The ID of the first item in the list. @@ -37,7 +37,7 @@ class ResponseItemList < OpenAI::BaseModel # @!parse # # A list of Response items. # # - # # @param data [Array] + # # @param data [Array] # # @param first_id [String] # # @param has_more [Boolean] # # @param last_id [String] @@ -46,337 +46,6 @@ class ResponseItemList < OpenAI::BaseModel # def initialize(data:, first_id:, has_more:, last_id:, object: :list, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void - - # @abstract - # - # Content item used to generate a response. - class Data < OpenAI::Union - discriminator :type - - variant :message, -> { OpenAI::Models::Responses::ResponseItemList::Data::Message } - - # An output message from the model. - variant :message, -> { OpenAI::Models::Responses::ResponseOutputMessage } - - # The results of a file search tool call. See the - # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) for more information. - variant :file_search_call, -> { OpenAI::Models::Responses::ResponseFileSearchToolCall } - - # A tool call to a computer use tool. See the - # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) for more information. - variant :computer_call, -> { OpenAI::Models::Responses::ResponseComputerToolCall } - - variant :computer_call_output, - -> { OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput } - - # The results of a web search tool call. See the - # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for more information. - variant :web_search_call, -> { OpenAI::Models::Responses::ResponseFunctionWebSearch } - - # A tool call to run a function. See the - # [function calling guide](https://platform.openai.com/docs/guides/function-calling) for more information. - variant :function_call, -> { OpenAI::Models::Responses::ResponseFunctionToolCall } - - variant :function_call_output, - -> { OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput } - - class Message < OpenAI::BaseModel - # @!attribute id - # The unique ID of the message input. - # - # @return [String] - required :id, String - - # @!attribute content - # A list of one or many input items to the model, containing different content - # types. - # - # @return [Array] - required :content, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } - - # @!attribute role - # The role of the message input. One of `user`, `system`, or `developer`. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Role] - required :role, enum: -> { OpenAI::Models::Responses::ResponseItemList::Data::Message::Role } - - # @!attribute [r] status - # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseItemList::Data::Message::Status } - - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Status] - # attr_writer :status - - # @!attribute [r] type - # The type of the message input. Always set to `message`. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Type, nil] - optional :type, enum: -> { OpenAI::Models::Responses::ResponseItemList::Data::Message::Type } - - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Type] - # attr_writer :type - - # @!parse - # # @param id [String] - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Role] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::Message::Type] - # # - # def initialize(id:, content:, role:, status: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::BaseModel) -> void - - # @abstract - # - # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum - USER = :user - SYSTEM = :system - DEVELOPER = :developer - - finalize! - end - - # @abstract - # - # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - class Status < OpenAI::Enum - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete - - finalize! - end - - # @abstract - # - # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum - MESSAGE = :message - - finalize! - end - end - - class ComputerCallOutput < OpenAI::BaseModel - # @!attribute id - # The unique ID of the computer call tool output. - # - # @return [String] - required :id, String - - # @!attribute call_id - # The ID of the computer tool call that produced the output. - # - # @return [String] - required :call_id, String - - # @!attribute output - # A computer screenshot image used with the computer use tool. - # - # @return [OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output] - required :output, -> { OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output } - - # @!attribute type - # The type of the computer tool call output. Always `computer_call_output`. - # - # @return [Symbol, :computer_call_output] - required :type, const: :computer_call_output - - # @!attribute [r] acknowledged_safety_checks - # The safety checks reported by the API that have been acknowledged by the - # developer. - # - # @return [Array, nil] - optional :acknowledged_safety_checks, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck] } - - # @!parse - # # @return [Array] - # attr_writer :acknowledged_safety_checks - - # @!attribute [r] status - # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Status, nil] - optional :status, - enum: -> { OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Status } - - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Status] - # attr_writer :status - - # @!parse - # # @param id [String] - # # @param call_id [String] - # # @param output [OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output] - # # @param acknowledged_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Status] - # # @param type [Symbol, :computer_call_output] - # # - # def initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - - # def initialize: (Hash | OpenAI::BaseModel) -> void - - class Output < OpenAI::BaseModel - # @!attribute type - # Specifies the event type. For a computer screenshot, this property is always set - # to `computer_screenshot`. - # - # @return [Symbol, :computer_screenshot] - required :type, const: :computer_screenshot - - # @!attribute [r] file_id - # The identifier of an uploaded file that contains the screenshot. - # - # @return [String, nil] - optional :file_id, String - - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] image_url - # The URL of the screenshot image. - # - # @return [String, nil] - optional :image_url, String - - # @!parse - # # @return [String] - # attr_writer :image_url - - # @!parse - # # A computer screenshot image used with the computer use tool. - # # - # # @param file_id [String] - # # @param image_url [String] - # # @param type [Symbol, :computer_screenshot] - # # - # def initialize(file_id: nil, image_url: nil, type: :computer_screenshot, **) = super - - # def initialize: (Hash | OpenAI::BaseModel) -> void - end - - class AcknowledgedSafetyCheck < OpenAI::BaseModel - # @!attribute id - # The ID of the pending safety check. - # - # @return [String] - required :id, String - - # @!attribute code - # The type of the pending safety check. - # - # @return [String] - required :code, String - - # @!attribute message - # Details about the pending safety check. - # - # @return [String] - required :message, String - - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::BaseModel) -> void - end - - # @abstract - # - # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete - - finalize! - end - end - - class FunctionCallOutput < OpenAI::BaseModel - # @!attribute id - # The unique ID of the function call tool output. - # - # @return [String] - required :id, String - - # @!attribute call_id - # The unique ID of the function tool call generated by the model. - # - # @return [String] - required :call_id, String - - # @!attribute output - # A JSON string of the output of the function tool call. - # - # @return [String] - required :output, String - - # @!attribute type - # The type of the function tool call output. Always `function_call_output`. - # - # @return [Symbol, :function_call_output] - required :type, const: :function_call_output - - # @!attribute [r] status - # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::Status, nil] - optional :status, - enum: -> { OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::Status } - - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::Status] - # attr_writer :status - - # @!parse - # # @param id [String] - # # @param call_id [String] - # # @param output [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::Status] - # # @param type [Symbol, :function_call_output] - # # - # def initialize(id:, call_id:, output:, status: nil, type: :function_call_output, **) = super - - # def initialize: (Hash | OpenAI::BaseModel) -> void - - # @abstract - # - # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - class Status < OpenAI::Enum - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete - - finalize! - end - end - - # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseItemList::Data::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput)] - # def variants; end - # end - end end end diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 1fe57b71..ff8ff1d0 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -24,7 +24,7 @@ class InputItems # # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # - # @return [OpenAI::CursorPage] + # @return [OpenAI::CursorPage] def list(response_id, params = {}) parsed, options = OpenAI::Models::Responses::InputItemListParams.dump_request(params) @client.request( @@ -32,7 +32,7 @@ def list(response_id, params = {}) path: ["responses/%0s/input_items", response_id], query: parsed, page: OpenAI::CursorPage, - model: OpenAI::Models::Responses::ResponseItemList::Data, + model: OpenAI::Models::Responses::ResponseItem, options: options ) end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi new file mode 100644 index 00000000..4e5be0f8 --- /dev/null +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -0,0 +1,159 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseComputerToolCallOutputItem < OpenAI::BaseModel + # The unique ID of the computer call tool output. + sig { returns(String) } + def id + end + + sig { params(_: String).returns(String) } + def id=(_) + end + + # The ID of the computer tool call that produced the output. + sig { returns(String) } + def call_id + end + + sig { params(_: String).returns(String) } + def call_id=(_) + end + + # A computer screenshot image used with the computer use tool. + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) } + def output + end + + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) + .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) + end + def output=(_) + end + + # The type of the computer tool call output. Always `computer_call_output`. + sig { returns(Symbol) } + def type + end + + sig { params(_: Symbol).returns(Symbol) } + def type=(_) + end + + # The safety checks reported by the API that have been acknowledged by the + # developer. + sig do + returns( + T.nilable( + T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + ) + ) + end + def acknowledged_safety_checks + end + + sig do + params( + _: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + ) + .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) + end + def acknowledged_safety_checks=(_) + end + + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. + sig { returns(T.nilable(Symbol)) } + def status + end + + sig { params(_: Symbol).returns(Symbol) } + def status=(_) + end + + sig do + params( + id: String, + call_id: String, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: Symbol, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + end + + sig do + override + .returns( + { + id: String, + call_id: String, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + type: Symbol, + acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: Symbol + } + ) + end + def to_hash + end + + class AcknowledgedSafetyCheck < OpenAI::BaseModel + # The ID of the pending safety check. + sig { returns(String) } + def id + end + + sig { params(_: String).returns(String) } + def id=(_) + end + + # The type of the pending safety check. + sig { returns(String) } + def code + end + + sig { params(_: String).returns(String) } + def code=(_) + end + + # Details about the pending safety check. + sig { returns(String) } + def message + end + + sig { params(_: String).returns(String) } + def message=(_) + end + + # A pending safety check for the computer call. + sig { params(id: String, code: String, message: String).returns(T.attached_class) } + def self.new(id:, code:, message:) + end + + sig { override.returns({id: String, code: String, message: String}) } + def to_hash + end + end + + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi new file mode 100644 index 00000000..7b4d757f --- /dev/null +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -0,0 +1,46 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. + sig { returns(Symbol) } + def type + end + + sig { params(_: Symbol).returns(Symbol) } + def type=(_) + end + + # The identifier of an uploaded file that contains the screenshot. + sig { returns(T.nilable(String)) } + def file_id + end + + sig { params(_: String).returns(String) } + def file_id=(_) + end + + # The URL of the screenshot image. + sig { returns(T.nilable(String)) } + def image_url + end + + sig { params(_: String).returns(String) } + def image_url=(_) + end + + # A computer screenshot image used with the computer use tool. + sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } + def self.new(file_id: nil, image_url: nil, type: :computer_screenshot) + end + + sig { override.returns({type: Symbol, file_id: String, image_url: String}) } + def to_hash + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi new file mode 100644 index 00000000..6e1a4cce --- /dev/null +++ b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi @@ -0,0 +1,29 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall + # The unique ID of the function call tool output. + sig { returns(String) } + def id + end + + sig { params(_: String).returns(String) } + def id=(_) + end + + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. + sig { params(id: String).returns(T.attached_class) } + def self.new(id:) + end + + sig { override.returns({id: String}) } + def to_hash + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi new file mode 100644 index 00000000..89456a9b --- /dev/null +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel + # The unique ID of the function call tool output. + sig { returns(String) } + def id + end + + sig { params(_: String).returns(String) } + def id=(_) + end + + # The unique ID of the function tool call generated by the model. + sig { returns(String) } + def call_id + end + + sig { params(_: String).returns(String) } + def call_id=(_) + end + + # A JSON string of the output of the function tool call. + sig { returns(String) } + def output + end + + sig { params(_: String).returns(String) } + def output=(_) + end + + # The type of the function tool call output. Always `function_call_output`. + sig { returns(Symbol) } + def type + end + + sig { params(_: Symbol).returns(Symbol) } + def type=(_) + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + sig { returns(T.nilable(Symbol)) } + def status + end + + sig { params(_: Symbol).returns(Symbol) } + def status=(_) + end + + sig do + params( + id: String, + call_id: String, + output: String, + status: Symbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new(id:, call_id:, output:, status: nil, type: :function_call_output) + end + + sig { override.returns({id: String, call_id: String, output: String, type: Symbol, status: Symbol}) } + def to_hash + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index fc0ede1f..f3b960b7 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -139,13 +139,13 @@ module OpenAI end # A computer screenshot image used with the computer use tool. - sig { returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) } def output end sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output) - .returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output) + params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) + .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) end def output=(_) end @@ -205,7 +205,7 @@ module OpenAI sig do params( call_id: String, - output: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], status: Symbol, @@ -221,7 +221,7 @@ module OpenAI .returns( { call_id: String, - output: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: Symbol, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], @@ -232,45 +232,6 @@ module OpenAI def to_hash end - class Output < OpenAI::BaseModel - # Specifies the event type. For a computer screenshot, this property is always set - # to `computer_screenshot`. - sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end - - # The identifier of an uploaded file that contains the screenshot. - sig { returns(T.nilable(String)) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end - - # The URL of the screenshot image. - sig { returns(T.nilable(String)) } - def image_url - end - - sig { params(_: String).returns(String) } - def image_url=(_) - end - - # A computer screenshot image used with the computer use tool. - sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } - def self.new(file_id: nil, image_url: nil, type: :computer_screenshot) - end - - sig { override.returns({type: Symbol, file_id: String, image_url: String}) } - def to_hash - end - end - class AcknowledgedSafetyCheck < OpenAI::BaseModel # The ID of the pending safety check. sig { returns(String) } diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi new file mode 100644 index 00000000..2070f09c --- /dev/null +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -0,0 +1,119 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseInputMessageItem < OpenAI::BaseModel + # The unique ID of the message input. + sig { returns(String) } + def id + end + + sig { params(_: String).returns(String) } + def id=(_) + end + + # A list of one or many input items to the model, containing different content + # types. + sig { returns(OpenAI::Models::Responses::ResponseInputMessageContentList) } + def content + end + + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageContentList) + .returns(OpenAI::Models::Responses::ResponseInputMessageContentList) + end + def content=(_) + end + + # The role of the message input. One of `user`, `system`, or `developer`. + sig { returns(Symbol) } + def role + end + + sig { params(_: Symbol).returns(Symbol) } + def role=(_) + end + + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + sig { returns(T.nilable(Symbol)) } + def status + end + + sig { params(_: Symbol).returns(Symbol) } + def status=(_) + end + + # The type of the message input. Always set to `message`. + sig { returns(T.nilable(Symbol)) } + def type + end + + sig { params(_: Symbol).returns(Symbol) } + def type=(_) + end + + sig do + params( + id: String, + content: OpenAI::Models::Responses::ResponseInputMessageContentList, + role: Symbol, + status: Symbol, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(id:, content:, role:, status: nil, type: nil) + end + + sig do + override + .returns( + { + id: String, + content: OpenAI::Models::Responses::ResponseInputMessageContentList, + role: Symbol, + status: Symbol, + type: Symbol + } + ) + end + def to_hash + end + + # The role of the message input. One of `user`, `system`, or `developer`. + class Role < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + USER = :user + SYSTEM = :system + DEVELOPER = :developer + end + + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + end + + # The type of the message input. Always set to `message`. + class Type < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + MESSAGE = :message + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi new file mode 100644 index 00000000..fac7e7a2 --- /dev/null +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Models + module Responses + # Content item used to generate a response. + class ResponseItem < OpenAI::Union + abstract! + + Variants = type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + ) + } + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index f6e9982c..983bd5b6 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -11,14 +11,14 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem ) ] ) @@ -30,28 +30,28 @@ module OpenAI params( _: T::Array[ T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem ) ] ) .returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem ) ] ) @@ -100,14 +100,14 @@ module OpenAI params( data: T::Array[ T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem ) ], first_id: String, @@ -126,14 +126,14 @@ module OpenAI { data: T::Array[ T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem ) ], first_id: String, @@ -145,409 +145,6 @@ module OpenAI end def to_hash end - - # Content item used to generate a response. - class Data < OpenAI::Union - abstract! - - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput - ) - } - end - - class Message < OpenAI::BaseModel - # The unique ID of the message input. - sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end - - # A list of one or many input items to the model, containing different content - # types. - sig { returns(OpenAI::Models::Responses::ResponseInputMessageContentList) } - def content - end - - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageContentList) - .returns(OpenAI::Models::Responses::ResponseInputMessageContentList) - end - def content=(_) - end - - # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end - - # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } - def status - end - - sig { params(_: Symbol).returns(Symbol) } - def status=(_) - end - - # The type of the message input. Always set to `message`. - sig { returns(T.nilable(Symbol)) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end - - sig do - params( - id: String, - content: OpenAI::Models::Responses::ResponseInputMessageContentList, - role: Symbol, - status: Symbol, - type: Symbol - ) - .returns(T.attached_class) - end - def self.new(id:, content:, role:, status: nil, type: nil) - end - - sig do - override - .returns( - { - id: String, - content: OpenAI::Models::Responses::ResponseInputMessageContentList, - role: Symbol, - status: Symbol, - type: Symbol - } - ) - end - def to_hash - end - - # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - USER = :user - SYSTEM = :system - DEVELOPER = :developer - end - - # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete - end - - # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - MESSAGE = :message - end - end - - class ComputerCallOutput < OpenAI::BaseModel - # The unique ID of the computer call tool output. - sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end - - # The ID of the computer tool call that produced the output. - sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end - - # A computer screenshot image used with the computer use tool. - sig { returns(OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output) } - def output - end - - sig do - params(_: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output) - .returns(OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output) - end - def output=(_) - end - - # The type of the computer tool call output. Always `computer_call_output`. - sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end - - # The safety checks reported by the API that have been acknowledged by the - # developer. - sig do - returns( - T.nilable( - T::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck] - ) - ) - end - def acknowledged_safety_checks - end - - sig do - params( - _: T::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck] - ) - .returns( - T::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck] - ) - end - def acknowledged_safety_checks=(_) - end - - # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(Symbol)) } - def status - end - - sig { params(_: Symbol).returns(Symbol) } - def status=(_) - end - - sig do - params( - id: String, - call_id: String, - output: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output, - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck], - status: Symbol, - type: Symbol - ) - .returns(T.attached_class) - end - def self.new(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) - end - - sig do - override - .returns( - { - id: String, - call_id: String, - output: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output, - type: Symbol, - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck], - status: Symbol - } - ) - end - def to_hash - end - - class Output < OpenAI::BaseModel - # Specifies the event type. For a computer screenshot, this property is always set - # to `computer_screenshot`. - sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end - - # The identifier of an uploaded file that contains the screenshot. - sig { returns(T.nilable(String)) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end - - # The URL of the screenshot image. - sig { returns(T.nilable(String)) } - def image_url - end - - sig { params(_: String).returns(String) } - def image_url=(_) - end - - # A computer screenshot image used with the computer use tool. - sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } - def self.new(file_id: nil, image_url: nil, type: :computer_screenshot) - end - - sig { override.returns({type: Symbol, file_id: String, image_url: String}) } - def to_hash - end - end - - class AcknowledgedSafetyCheck < OpenAI::BaseModel - # The ID of the pending safety check. - sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end - - # The type of the pending safety check. - sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end - - # Details about the pending safety check. - sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end - - # A pending safety check for the computer call. - sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:) - end - - sig { override.returns({id: String, code: String, message: String}) } - def to_hash - end - end - - # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete - end - end - - class FunctionCallOutput < OpenAI::BaseModel - # The unique ID of the function call tool output. - sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end - - # The unique ID of the function tool call generated by the model. - sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end - - # A JSON string of the output of the function tool call. - sig { returns(String) } - def output - end - - sig { params(_: String).returns(String) } - def output=(_) - end - - # The type of the function tool call output. Always `function_call_output`. - sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end - - # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } - def status - end - - sig { params(_: Symbol).returns(Symbol) } - def status=(_) - end - - sig do - params( - id: String, - call_id: String, - output: String, - status: Symbol, - type: Symbol - ).returns(T.attached_class) - end - def self.new(id:, call_id:, output:, status: nil, type: :function_call_output) - end - - sig do - override.returns({id: String, call_id: String, output: String, type: Symbol, status: Symbol}) - end - def to_hash - end - - # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete - end - end - end end end end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index f6d2e651..44e41eb2 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -17,14 +17,14 @@ module OpenAI .returns( OpenAI::CursorPage[ T.any( - OpenAI::Models::Responses::ResponseItemList::Data::Message, + OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem ) ] ) diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs new file mode 100644 index 00000000..90b93e5a --- /dev/null +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -0,0 +1,83 @@ +module OpenAI + module Models + module Responses + type response_computer_tool_call_output_item = + { + id: String, + call_id: String, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + type: :computer_call_output, + acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + } + + class ResponseComputerToolCallOutputItem < OpenAI::BaseModel + attr_accessor id: String + + attr_accessor call_id: String + + attr_accessor output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot + + attr_accessor type: :computer_call_output + + attr_reader acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]? + + def acknowledged_safety_checks=: ( + ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + ) -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + + attr_reader status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status? + + def status=: ( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + ) -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + + def initialize: + ( + id: String, + call_id: String, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, + type: :computer_call_output + ) -> void + | ( + ?OpenAI::Models::Responses::response_computer_tool_call_output_item + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call_output_item + + type acknowledged_safety_check = + { id: String, code: String, message: String } + + class AcknowledgedSafetyCheck < OpenAI::BaseModel + attr_accessor id: String + + attr_accessor code: String + + attr_accessor message: String + + def initialize: + (id: String, code: String, message: String) -> void + | ( + ?OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::acknowledged_safety_check + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::acknowledged_safety_check + end + + type status = :in_progress | :completed | :incomplete + + class Status < OpenAI::Enum + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status] + end + end + end + end +end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs new file mode 100644 index 00000000..931b0f8d --- /dev/null +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -0,0 +1,33 @@ +module OpenAI + module Models + module Responses + type response_computer_tool_call_output_screenshot = + { type: :computer_screenshot, file_id: String, image_url: String } + + class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel + attr_accessor type: :computer_screenshot + + attr_reader file_id: String? + + def file_id=: (String) -> String + + attr_reader image_url: String? + + def image_url=: (String) -> String + + def initialize: + ( + file_id: String, + image_url: String, + type: :computer_screenshot + ) -> void + | ( + ?OpenAI::Models::Responses::response_computer_tool_call_output_screenshot + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call_output_screenshot + end + end + end +end diff --git a/sig/openai/models/responses/response_function_tool_call_item.rbs b/sig/openai/models/responses/response_function_tool_call_item.rbs new file mode 100644 index 00000000..48d08581 --- /dev/null +++ b/sig/openai/models/responses/response_function_tool_call_item.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Models + module Responses + type response_function_tool_call_item = { id: String } + + class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall + attr_accessor id: String + + def initialize: + (id: String) -> void + | ( + ?OpenAI::Models::Responses::response_function_tool_call_item + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_function_tool_call_item + end + end + end +end diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs new file mode 100644 index 00000000..2208c1db --- /dev/null +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -0,0 +1,55 @@ +module OpenAI + module Models + module Responses + type response_function_tool_call_output_item = + { + id: String, + call_id: String, + output: String, + type: :function_call_output, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + } + + class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel + attr_accessor id: String + + attr_accessor call_id: String + + attr_accessor output: String + + attr_accessor type: :function_call_output + + attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status? + + def status=: ( + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + ) -> OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + + def initialize: + ( + id: String, + call_id: String, + output: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, + type: :function_call_output + ) -> void + | ( + ?OpenAI::Models::Responses::response_function_tool_call_output_item + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_function_tool_call_output_item + + type status = :in_progress | :completed | :incomplete + + class Status < OpenAI::Enum + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status] + end + end + end + end +end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index b60c6d6a..e6a03f3b 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -86,7 +86,7 @@ module OpenAI type computer_call_output = { call_id: String, - output: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: :computer_call_output, id: String, acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], @@ -96,7 +96,7 @@ module OpenAI class ComputerCallOutput < OpenAI::BaseModel attr_accessor call_id: String - attr_accessor output: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output + attr_accessor output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot attr_accessor type: :computer_call_output @@ -119,7 +119,7 @@ module OpenAI def initialize: ( call_id: String, - output: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Output, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, id: String, acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status, @@ -132,34 +132,6 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::computer_call_output - type output = - { type: :computer_screenshot, file_id: String, image_url: String } - - class Output < OpenAI::BaseModel - attr_accessor type: :computer_screenshot - - attr_reader file_id: String? - - def file_id=: (String) -> String - - attr_reader image_url: String? - - def image_url=: (String) -> String - - def initialize: - ( - file_id: String, - image_url: String, - type: :computer_screenshot - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::output - | OpenAI::BaseModel data - ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::output - end - type acknowledged_safety_check = { id: String, code: String, message: String } diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs new file mode 100644 index 00000000..0b1246b9 --- /dev/null +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -0,0 +1,77 @@ +module OpenAI + module Models + module Responses + type response_input_message_item = + { + id: String, + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + } + + class ResponseInputMessageItem < OpenAI::BaseModel + attr_accessor id: String + + attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list + + attr_accessor role: OpenAI::Models::Responses::ResponseInputMessageItem::role + + attr_reader status: OpenAI::Models::Responses::ResponseInputMessageItem::status? + + def status=: ( + OpenAI::Models::Responses::ResponseInputMessageItem::status + ) -> OpenAI::Models::Responses::ResponseInputMessageItem::status + + attr_reader type: OpenAI::Models::Responses::ResponseInputMessageItem::type_? + + def type=: ( + OpenAI::Models::Responses::ResponseInputMessageItem::type_ + ) -> OpenAI::Models::Responses::ResponseInputMessageItem::type_ + + def initialize: + ( + id: String, + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + ) -> void + | ( + ?OpenAI::Models::Responses::response_input_message_item + | OpenAI::BaseModel data + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_input_message_item + + type role = :user | :system | :developer + + class Role < OpenAI::Enum + USER: :user + SYSTEM: :system + DEVELOPER: :developer + + def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::role] + end + + type status = :in_progress | :completed | :incomplete + + class Status < OpenAI::Enum + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::status] + end + + type type_ = :message + + class Type < OpenAI::Enum + MESSAGE: :message + + def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] + end + end + end + end +end diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs new file mode 100644 index 00000000..8fc80907 --- /dev/null +++ b/sig/openai/models/responses/response_item.rbs @@ -0,0 +1,19 @@ +module OpenAI + module Models + module Responses + type response_item = + OpenAI::Models::Responses::ResponseInputMessageItem + | OpenAI::Models::Responses::ResponseOutputMessage + | OpenAI::Models::Responses::ResponseFileSearchToolCall + | OpenAI::Models::Responses::ResponseComputerToolCall + | OpenAI::Models::Responses::ResponseComputerToolCallOutputItem + | OpenAI::Models::Responses::ResponseFunctionWebSearch + | OpenAI::Models::Responses::ResponseFunctionToolCallItem + | OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + + class ResponseItem < OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] + end + end + end +end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index a8d0d3fc..05202a31 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -6,7 +6,7 @@ module OpenAI module Responses type response_item_list = { - data: ::Array[OpenAI::Models::Responses::ResponseItemList::data], + data: ::Array[OpenAI::Models::Responses::response_item], first_id: String, has_more: bool, last_id: String, @@ -14,7 +14,7 @@ module OpenAI } class ResponseItemList < OpenAI::BaseModel - attr_accessor data: ::Array[OpenAI::Models::Responses::ResponseItemList::data] + attr_accessor data: ::Array[OpenAI::Models::Responses::response_item] attr_accessor first_id: String @@ -26,7 +26,7 @@ module OpenAI def initialize: ( - data: ::Array[OpenAI::Models::Responses::ResponseItemList::data], + data: ::Array[OpenAI::Models::Responses::response_item], first_id: String, has_more: bool, last_id: String, @@ -38,248 +38,6 @@ module OpenAI ) -> void def to_hash: -> OpenAI::Models::Responses::response_item_list - - type data = - OpenAI::Models::Responses::ResponseItemList::Data::Message - | OpenAI::Models::Responses::ResponseOutputMessage - | OpenAI::Models::Responses::ResponseFileSearchToolCall - | OpenAI::Models::Responses::ResponseComputerToolCall - | OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput - | OpenAI::Models::Responses::ResponseFunctionWebSearch - | OpenAI::Models::Responses::ResponseFunctionToolCall - | OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput - - class Data < OpenAI::Union - type message = - { - id: String, - content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseItemList::Data::Message::role, - status: OpenAI::Models::Responses::ResponseItemList::Data::Message::status, - type: OpenAI::Models::Responses::ResponseItemList::Data::Message::type_ - } - - class Message < OpenAI::BaseModel - attr_accessor id: String - - attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - - attr_accessor role: OpenAI::Models::Responses::ResponseItemList::Data::Message::role - - attr_reader status: OpenAI::Models::Responses::ResponseItemList::Data::Message::status? - - def status=: ( - OpenAI::Models::Responses::ResponseItemList::Data::Message::status - ) -> OpenAI::Models::Responses::ResponseItemList::Data::Message::status - - attr_reader type: OpenAI::Models::Responses::ResponseItemList::Data::Message::type_? - - def type=: ( - OpenAI::Models::Responses::ResponseItemList::Data::Message::type_ - ) -> OpenAI::Models::Responses::ResponseItemList::Data::Message::type_ - - def initialize: - ( - id: String, - content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseItemList::Data::Message::role, - status: OpenAI::Models::Responses::ResponseItemList::Data::Message::status, - type: OpenAI::Models::Responses::ResponseItemList::Data::Message::type_ - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseItemList::Data::message - | OpenAI::BaseModel data - ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseItemList::Data::message - - type role = :user | :system | :developer - - class Role < OpenAI::Enum - USER: :user - SYSTEM: :system - DEVELOPER: :developer - - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseItemList::Data::Message::role] - end - - type status = :in_progress | :completed | :incomplete - - class Status < OpenAI::Enum - IN_PROGRESS: :in_progress - COMPLETED: :completed - INCOMPLETE: :incomplete - - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseItemList::Data::Message::status] - end - - type type_ = :message - - class Type < OpenAI::Enum - MESSAGE: :message - - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseItemList::Data::Message::type_] - end - end - - type computer_call_output = - { - id: String, - call_id: String, - output: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output, - type: :computer_call_output, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::status - } - - class ComputerCallOutput < OpenAI::BaseModel - attr_accessor id: String - - attr_accessor call_id: String - - attr_accessor output: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output - - attr_accessor type: :computer_call_output - - attr_reader acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck]? - - def acknowledged_safety_checks=: ( - ::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck] - ) -> ::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck] - - attr_reader status: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::status? - - def status=: ( - OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::status - ) -> OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::status - - def initialize: - ( - id: String, - call_id: String, - output: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::status, - type: :computer_call_output - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseItemList::Data::computer_call_output - | OpenAI::BaseModel data - ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseItemList::Data::computer_call_output - - type output = - { type: :computer_screenshot, file_id: String, image_url: String } - - class Output < OpenAI::BaseModel - attr_accessor type: :computer_screenshot - - attr_reader file_id: String? - - def file_id=: (String) -> String - - attr_reader image_url: String? - - def image_url=: (String) -> String - - def initialize: - ( - file_id: String, - image_url: String, - type: :computer_screenshot - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::output - | OpenAI::BaseModel data - ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::output - end - - type acknowledged_safety_check = - { id: String, code: String, message: String } - - class AcknowledgedSafetyCheck < OpenAI::BaseModel - attr_accessor id: String - - attr_accessor code: String - - attr_accessor message: String - - def initialize: - (id: String, code: String, message: String) -> void - | ( - ?OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::acknowledged_safety_check - | OpenAI::BaseModel data - ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::acknowledged_safety_check - end - - type status = :in_progress | :completed | :incomplete - - class Status < OpenAI::Enum - IN_PROGRESS: :in_progress - COMPLETED: :completed - INCOMPLETE: :incomplete - - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::status] - end - end - - type function_call_output = - { - id: String, - call_id: String, - output: String, - type: :function_call_output, - status: OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::status - } - - class FunctionCallOutput < OpenAI::BaseModel - attr_accessor id: String - - attr_accessor call_id: String - - attr_accessor output: String - - attr_accessor type: :function_call_output - - attr_reader status: OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::status? - - def status=: ( - OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::status - ) -> OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::status - - def initialize: - ( - id: String, - call_id: String, - output: String, - status: OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::status, - type: :function_call_output - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseItemList::Data::function_call_output - | OpenAI::BaseModel data - ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseItemList::Data::function_call_output - - type status = :in_progress | :completed | :incomplete - - class Status < OpenAI::Enum - IN_PROGRESS: :in_progress - COMPLETED: :completed - INCOMPLETE: :incomplete - - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::status] - end - end - - def self.variants: -> [OpenAI::Models::Responses::ResponseItemList::Data::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput] - end end end end diff --git a/sig/openai/resources/responses/input_items.rbs b/sig/openai/resources/responses/input_items.rbs index 34cc6a93..5cc014f8 100644 --- a/sig/openai/resources/responses/input_items.rbs +++ b/sig/openai/resources/responses/input_items.rbs @@ -7,7 +7,7 @@ module OpenAI String response_id, ?OpenAI::Models::Responses::InputItemListParams | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Responses::ResponseItemList::data] + ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] | ( String response_id, after: String, @@ -15,7 +15,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::order, request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Responses::ResponseItemList::data] + ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] def initialize: (client: OpenAI::Client) -> void end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 6886eb62..9ae8f8e4 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -12,19 +12,19 @@ def test_list row = response.to_enum.first assert_pattern do - row => OpenAI::Models::Responses::ResponseItemList::Data + row => OpenAI::Models::Responses::ResponseItem end assert_pattern do case row - in OpenAI::Models::Responses::ResponseItemList::Data::Message + in OpenAI::Models::Responses::ResponseInputMessageItem in OpenAI::Models::Responses::ResponseOutputMessage in OpenAI::Models::Responses::ResponseFileSearchToolCall in OpenAI::Models::Responses::ResponseComputerToolCall - in OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput + in OpenAI::Models::Responses::ResponseComputerToolCallOutputItem in OpenAI::Models::Responses::ResponseFunctionWebSearch - in OpenAI::Models::Responses::ResponseFunctionToolCall - in OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput + in OpenAI::Models::Responses::ResponseFunctionToolCallItem + in OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem end end @@ -34,8 +34,8 @@ def test_list type: :message, id: String, content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent]), - role: OpenAI::Models::Responses::ResponseItemList::Data::Message::Role, - status: OpenAI::Models::Responses::ResponseItemList::Data::Message::Status | nil + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status | nil } in { type: :message, @@ -63,25 +63,17 @@ def test_list type: :computer_call_output, id: String, call_id: String, - output: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Output, - acknowledged_safety_checks: ^(OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::AcknowledgedSafetyCheck]) | nil, - status: OpenAI::Models::Responses::ResponseItemList::Data::ComputerCallOutput::Status | nil + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + acknowledged_safety_checks: ^(OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status | nil } in {type: :web_search_call, id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status} - in { - type: :function_call, - arguments: String, - call_id: String, - name: String, - id: String | nil, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status | nil - } in { type: :function_call_output, id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseItemList::Data::FunctionCallOutput::Status | nil + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status | nil } end end From 3665bde5c0929e36af96bc1951038eedef841156 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 23:00:30 +0000 Subject: [PATCH 030/295] chore: disable unnecessary linter rules for sorbet manifests (#35) --- .rubocop.yml | 18 ++++++++++++++++++ .../chat/chat_completion_store_message.rbs | 4 +++- .../response_function_tool_call_item.rbs | 4 +++- 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index e612c952..3cb19f0a 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -83,6 +83,12 @@ Lint/UnusedMethodArgument: Metrics/AbcSize: Enabled: false +Metrics/BlockLength: + AllowedPatterns: + - assert_pattern + Exclude: + - "**/*.rbi" + Metrics/ClassLength: Enabled: false @@ -92,6 +98,10 @@ Metrics/CyclomaticComplexity: Metrics/MethodLength: Enabled: false +Metrics/ModuleLength: + Exclude: + - "**/*.rbi" + Metrics/ParameterLists: Enabled: false @@ -102,10 +112,18 @@ Naming/BlockForwarding: Exclude: - "**/*.rbi" +Naming/ClassAndModuleCamelCase: + Exclude: + - "**/*.rbi" + Naming/MethodParameterName: Exclude: - "**/*.rbi" +Naming/PredicateName: + Exclude: + - "**/*.rbi" + Naming/VariableNumber: Enabled: false diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index e96afacc..dabd6ab3 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -7,7 +7,9 @@ module OpenAI type chat_completion_store_message = { id: String } class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage - attr_accessor id: String + def id: -> String + + def id=: (String _) -> String def initialize: (id: String) -> void diff --git a/sig/openai/models/responses/response_function_tool_call_item.rbs b/sig/openai/models/responses/response_function_tool_call_item.rbs index 48d08581..fcb5863d 100644 --- a/sig/openai/models/responses/response_function_tool_call_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_item.rbs @@ -4,7 +4,9 @@ module OpenAI type response_function_tool_call_item = { id: String } class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall - attr_accessor id: String + def id: -> String + + def id=: (String _) -> String def initialize: (id: String) -> void From 34f9c1c57dadad076228c825333914c0e96cf29e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 18 Mar 2025 23:20:13 +0000 Subject: [PATCH 031/295] chore: add `@yieldparam` to yard doc (#36) --- lib/openai/pooled_net_requester.rb | 4 ++++ lib/openai/util.rb | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb index c6b17713..a7e5242a 100644 --- a/lib/openai/pooled_net_requester.rb +++ b/lib/openai/pooled_net_requester.rb @@ -51,6 +51,7 @@ def calibrate_socket_timeout(conn, deadline) # # @param blk [Proc] # + # @yieldparam [String] # @return [Net::HTTPGenericRequest] def build_request(request, &) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) @@ -86,6 +87,9 @@ def build_request(request, &) # @param url [URI::Generic] # @param deadline [Float] # @param blk [Proc] + # + # @raise [Timeout::Error] + # @yieldparam [Net::HTTP] private def with_pool(url, deadline:, &blk) origin = OpenAI::Util.uri_origin(url) timeout = deadline - OpenAI::Util.monotonic_secs diff --git a/lib/openai/util.rb b/lib/openai/util.rb index a372dfa3..a06aa76e 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -429,6 +429,8 @@ def read(max_len = nil, out_string = nil) # # @param stream [String, IO, StringIO, Enumerable] # @param blk [Proc] + # + # @yieldparam [String] def initialize(stream, &blk) @stream = stream.is_a?(String) ? StringIO.new(stream) : stream @buf = String.new.b @@ -439,6 +441,7 @@ def initialize(stream, &blk) class << self # @param blk [Proc] # + # @yieldparam [Enumerator::Yielder] # @return [Enumerable] def string_io(&blk) Enumerator.new do |y| @@ -633,6 +636,7 @@ def close_fused!(enum) # @param enum [Enumerable, nil] # @param blk [Proc] # + # @yieldparam [Enumerator::Yielder] # @return [Enumerable] def chain_fused(enum, &blk) iter = Enumerator.new { blk.call(_1) } From 7f4afb4261686e080cf3580fa390e4cfe32c6a30 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 00:07:36 +0000 Subject: [PATCH 032/295] chore: use multi-line formatting style for really long lines (#37) --- lib/openai/base_client.rb | 17 ++-- lib/openai/base_page.rb | 3 +- lib/openai/base_stream.rb | 3 +- lib/openai/cursor_page.rb | 3 +- .../beta/thread_create_and_run_params.rb | 3 +- .../models/beta/thread_create_params.rb | 3 +- .../beta/threads/message_create_params.rb | 3 +- .../models/beta/threads/run_create_params.rb | 3 +- .../beta/threads/runs/run_step_include.rb | 3 +- ...chat_completion_assistant_message_param.rb | 3 +- ...chat_completion_developer_message_param.rb | 3 +- .../chat_completion_prediction_content.rb | 3 +- .../chat_completion_system_message_param.rb | 3 +- .../chat_completion_tool_message_param.rb | 3 +- .../chat_completion_user_message_param.rb | 3 +- .../response_input_message_content_list.rb | 3 +- lib/openai/page.rb | 3 +- lib/openai/resources/beta/threads/messages.rb | 21 ++-- lib/openai/resources/beta/threads/runs.rb | 35 ++++--- .../resources/beta/threads/runs/steps.rb | 21 ++-- .../resources/vector_stores/file_batches.rb | 21 ++-- lib/openai/resources/vector_stores/files.rb | 28 +++--- lib/openai/stream.rb | 3 +- rbi/lib/openai/base_client.rbi | 68 ++++++------- .../audio/transcription_create_response.rbi | 3 +- .../audio/translation_create_response.rbi | 3 +- .../models/beta/assistant_create_params.rbi | 17 ++-- .../beta/assistant_response_format_option.rbi | 21 ++-- .../models/beta/assistant_stream_event.rbi | 61 ++++++------ rbi/lib/openai/models/beta/assistant_tool.rbi | 19 ++-- .../models/beta/message_stream_event.rbi | 23 ++--- .../models/beta/run_step_stream_event.rbi | 27 ++--- .../openai/models/beta/run_stream_event.rbi | 33 ++++--- .../beta/thread_create_and_run_params.rbi | 99 ++++++++++--------- .../models/beta/thread_create_params.rbi | 80 ++++++++------- .../openai/models/beta/threads/annotation.rbi | 17 ++-- .../models/beta/threads/annotation_delta.rbi | 17 ++-- .../openai/models/beta/threads/message.rbi | 17 ++-- .../models/beta/threads/message_content.rbi | 21 ++-- .../beta/threads/message_content_delta.rbi | 21 ++-- .../threads/message_content_part_param.rbi | 19 ++-- .../beta/threads/message_create_params.rbi | 63 ++++++------ .../models/beta/threads/run_create_params.rbi | 63 ++++++------ .../runs/code_interpreter_tool_call.rbi | 17 ++-- .../runs/code_interpreter_tool_call_delta.rbi | 17 ++-- .../models/beta/threads/runs/run_step.rbi | 17 ++-- .../beta/threads/runs/run_step_delta.rbi | 17 ++-- .../beta/threads/runs/run_step_include.rbi | 3 +- .../models/beta/threads/runs/tool_call.rbi | 19 ++-- .../beta/threads/runs/tool_call_delta.rbi | 19 ++-- ...hat_completion_assistant_message_param.rbi | 59 +++++------ .../chat/chat_completion_content_part.rbi | 21 ++-- ...hat_completion_developer_message_param.rbi | 6 +- .../chat/chat_completion_message_param.rbi | 25 ++--- .../chat_completion_prediction_content.rbi | 6 +- .../chat_completion_system_message_param.rbi | 6 +- .../chat_completion_tool_choice_option.rbi | 3 +- .../chat_completion_tool_message_param.rbi | 6 +- .../chat_completion_user_message_param.rbi | 50 +++++----- .../models/chat/completion_create_params.rbi | 22 +++-- .../models/completion_create_params.rbi | 7 +- .../openai/models/embedding_create_params.rbi | 7 +- .../openai/models/file_chunking_strategy.rbi | 11 ++- .../models/file_chunking_strategy_param.rbi | 17 ++-- .../fine_tuning_job_integration.rbi | 3 +- .../models/moderation_create_params.rbi | 22 +++-- .../models/moderation_multi_modal_input.rbi | 3 +- .../models/responses/easy_input_message.rbi | 3 +- .../models/responses/file_search_tool.rbi | 3 +- rbi/lib/openai/models/responses/response.rbi | 11 ++- .../response_code_interpreter_tool_call.rbi | 17 ++-- .../responses/response_computer_tool_call.rbi | 31 +++--- .../models/responses/response_content.rbi | 23 ++--- .../response_content_part_added_event.rbi | 11 ++- .../response_content_part_done_event.rbi | 11 ++- .../responses/response_create_params.rbi | 11 ++- .../responses/response_format_text_config.rbi | 19 ++-- .../models/responses/response_input.rbi | 35 +++---- .../responses/response_input_content.rbi | 19 ++-- .../models/responses/response_input_item.rbi | 35 +++---- .../response_input_message_content_list.rbi | 19 ++-- .../openai/models/responses/response_item.rbi | 29 +++--- .../models/responses/response_output_item.rbi | 25 ++--- .../responses/response_output_message.rbi | 11 ++- .../models/responses/response_output_text.rbi | 19 ++-- .../responses/response_stream_event.rbi | 77 ++++++++------- .../response_text_annotation_delta_event.rbi | 19 ++-- rbi/lib/openai/models/responses/tool.rbi | 21 ++-- .../models/vector_store_search_params.rbi | 3 +- rbi/lib/openai/pooled_net_requester.rbi | 13 ++- rbi/lib/openai/util.rbi | 26 ++--- .../resources/audio/transcriptions_test.rb | 6 +- .../resources/audio/translations_test.rb | 6 +- test/openai/resources/batches_test.rb | 11 ++- .../resources/beta/threads/runs_test.rb | 13 +-- .../openai/resources/chat/completions_test.rb | 11 ++- test/openai/resources/completions_test.rb | 7 +- test/openai/resources/embeddings_test.rb | 9 +- test/openai/resources/images_test.rb | 9 +- test/openai/resources/uploads_test.rb | 8 +- .../resources/vector_stores/files_test.rb | 11 ++- 101 files changed, 989 insertions(+), 862 deletions(-) diff --git a/lib/openai/base_client.rb b/lib/openai/base_client.rb index b5cc5490..fbbda8f4 100644 --- a/lib/openai/base_client.rb +++ b/lib/openai/base_client.rb @@ -9,14 +9,15 @@ class BaseClient MAX_REDIRECTS = 20 # rubocop:disable Style/MutableConstant - PLATFORM_HEADERS = { - "x-stainless-arch" => OpenAI::Util.arch, - "x-stainless-lang" => "ruby", - "x-stainless-os" => OpenAI::Util.os, - "x-stainless-package-version" => OpenAI::VERSION, - "x-stainless-runtime" => ::RUBY_ENGINE, - "x-stainless-runtime-version" => ::RUBY_ENGINE_VERSION - } + PLATFORM_HEADERS = + { + "x-stainless-arch" => OpenAI::Util.arch, + "x-stainless-lang" => "ruby", + "x-stainless-os" => OpenAI::Util.os, + "x-stainless-package-version" => OpenAI::VERSION, + "x-stainless-runtime" => ::RUBY_ENGINE, + "x-stainless-runtime-version" => ::RUBY_ENGINE_VERSION + } # rubocop:enable Style/MutableConstant class << self diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index 3892c2bc..359bb6c7 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -17,7 +17,8 @@ module OpenAI # # @example # ```ruby - # completions = page + # completions = + # page # .to_enum # .lazy # .select { _1.object_id.even? } diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index 59f8f874..77e0c71a 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -10,7 +10,8 @@ module OpenAI # # @example # ```ruby - # chunks = stream + # chunks = + # stream # .to_enum # .lazy # .select { _1.object_id.even? } diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 35b585e9..341d2479 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -17,7 +17,8 @@ module OpenAI # # @example # ```ruby - # completions = cursor_page + # completions = + # cursor_page # .to_enum # .lazy # .select { _1.object_id.even? } diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 72d1ae24..7650d56b 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -327,7 +327,8 @@ class Message < OpenAI::BaseModel # # The text contents of the message. class Content < OpenAI::Union - MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] # The text contents of the message. variant String diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 6d67b56a..734a53ad 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -100,7 +100,8 @@ class Message < OpenAI::BaseModel # # The text contents of the message. class Content < OpenAI::Union - MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] # The text contents of the message. variant String diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index b0a54ec9..480f9a24 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -60,7 +60,8 @@ class MessageCreateParams < OpenAI::BaseModel # # The text contents of the message. class Content < OpenAI::Union - MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] # The text contents of the message. variant String diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index c5c473e5..b6b3564f 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -289,7 +289,8 @@ class AdditionalMessage < OpenAI::BaseModel # # The text contents of the message. class Content < OpenAI::Union - MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] # The text contents of the message. variant String diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index ae9413a8..dd1c5381 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -7,7 +7,8 @@ module Threads module Runs # @abstract class RunStepInclude < OpenAI::Enum - STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = + :"step_details.tool_calls[*].file_search.results[*].content" finalize! end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 0ab1ed53..5b2e877b 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -111,7 +111,8 @@ class Audio < OpenAI::BaseModel # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. class Content < OpenAI::Union - ArrayOfContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] + ArrayOfContentPartArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] # The contents of the assistant message. variant String diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index d7d8b7c4..9e91e405 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -44,7 +44,8 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # # The contents of the developer message. class Content < OpenAI::Union - ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] # The contents of the developer message. variant String diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index d5aca19d..f9e93347 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -36,7 +36,8 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # generated tokens would match this content, the entire model response can be # returned much more quickly. class Content < OpenAI::Union - ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] # The content used for a Predicted Output. This is often the # text of a file you are regenerating with minor changes. diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 658bb497..a0cc371d 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -44,7 +44,8 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel # # The contents of the system message. class Content < OpenAI::Union - ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] # The contents of the system message. variant String diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 6ba3a959..ebbca6aa 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -35,7 +35,8 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel # # The contents of the tool message. class Content < OpenAI::Union - ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] # The contents of the tool message. variant String diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 18451c7b..4a1eee01 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -43,7 +43,8 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel # # The contents of the user message. class Content < OpenAI::Union - ChatCompletionContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] + ChatCompletionContentPartArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] # The text contents of the message. variant String diff --git a/lib/openai/models/responses/response_input_message_content_list.rb b/lib/openai/models/responses/response_input_message_content_list.rb index 9ac48e2d..98901e68 100644 --- a/lib/openai/models/responses/response_input_message_content_list.rb +++ b/lib/openai/models/responses/response_input_message_content_list.rb @@ -3,7 +3,8 @@ module OpenAI module Models module Responses - ResponseInputMessageContentList = OpenAI::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputContent }] + ResponseInputMessageContentList = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputContent }] end end end diff --git a/lib/openai/page.rb b/lib/openai/page.rb index 40ccc49a..d8834323 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -17,7 +17,8 @@ module OpenAI # # @example # ```ruby - # models = page + # models = + # page # .to_enum # .lazy # .select { _1.object_id.even? } diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 4e5141de..5b87f5f5 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -58,9 +58,10 @@ def create(thread_id, params) # @return [OpenAI::Models::Beta::Threads::Message] def retrieve(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageRetrieveParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["threads/%0s/messages/%1s", thread_id, message_id], @@ -89,9 +90,10 @@ def retrieve(message_id, params) # @return [OpenAI::Models::Beta::Threads::Message] def update(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageUpdateParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["threads/%0s/messages/%1s", thread_id, message_id], @@ -154,9 +156,10 @@ def list(thread_id, params = {}) # @return [OpenAI::Models::Beta::Threads::MessageDeleted] def delete(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageDeleteParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :delete, path: ["threads/%0s/messages/%1s", thread_id, message_id], diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 2f3b3b12..9f5a4401 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -282,9 +282,10 @@ def create_streaming(thread_id, params) # @return [OpenAI::Models::Beta::Threads::Run] def retrieve(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunRetrieveParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["threads/%0s/runs/%1s", thread_id, run_id], @@ -314,9 +315,10 @@ def retrieve(run_id, params) # @return [OpenAI::Models::Beta::Threads::Run] def update(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunUpdateParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["threads/%0s/runs/%1s", thread_id, run_id], @@ -376,9 +378,10 @@ def list(thread_id, params = {}) # @return [OpenAI::Models::Beta::Threads::Run] def cancel(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCancelParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["threads/%0s/runs/%1s/cancel", thread_id, run_id], @@ -408,9 +411,10 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) parsed.delete(:stream) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["threads/%0s/runs/%1s/submit_tool_outputs", thread_id, run_id], @@ -441,9 +445,10 @@ def submit_tool_outputs(run_id, params) def submit_tool_outputs_streaming(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) parsed.store(:stream, true) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["threads/%0s/runs/%1s/submit_tool_outputs", thread_id, run_id], diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index d5c4ddb3..0dc70121 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -30,12 +30,14 @@ class Steps # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] def retrieve(step_id, params) parsed, options = OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end - run_id = parsed.delete(:run_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + run_id = + parsed.delete(:run_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["threads/%0s/runs/%1s/steps/%2s", thread_id, run_id, step_id], @@ -83,9 +85,10 @@ def retrieve(step_id, params) # @return [OpenAI::CursorPage] def list(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::Runs::StepListParams.dump_request(params) - thread_id = parsed.delete(:thread_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + thread_id = + parsed.delete(:thread_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["threads/%0s/runs/%1s/steps", thread_id, run_id], diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 7893fd17..b3523d38 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -50,9 +50,10 @@ def create(vector_store_id, params) # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] def retrieve(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchRetrieveParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["vector_stores/%0s/file_batches/%1s", vector_store_id, batch_id], @@ -75,9 +76,10 @@ def retrieve(batch_id, params) # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] def cancel(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchCancelParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["vector_stores/%0s/file_batches/%1s/cancel", vector_store_id, batch_id], @@ -118,9 +120,10 @@ def cancel(batch_id, params) # @return [OpenAI::CursorPage] def list_files(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchListFilesParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["vector_stores/%0s/file_batches/%1s/files", vector_store_id, batch_id], diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 0c9ae2a5..54b806c0 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -52,9 +52,10 @@ def create(vector_store_id, params) # @return [OpenAI::Models::VectorStores::VectorStoreFile] def retrieve(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileRetrieveParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["vector_stores/%0s/files/%1s", vector_store_id, file_id], @@ -82,9 +83,10 @@ def retrieve(file_id, params) # @return [OpenAI::Models::VectorStores::VectorStoreFile] def update(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileUpdateParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :post, path: ["vector_stores/%0s/files/%1s", vector_store_id, file_id], @@ -149,9 +151,10 @@ def list(vector_store_id, params = {}) # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] def delete(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileDeleteParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :delete, path: ["vector_stores/%0s/files/%1s", vector_store_id, file_id], @@ -173,9 +176,10 @@ def delete(file_id, params) # @return [OpenAI::Page] def content(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileContentParams.dump_request(params) - vector_store_id = parsed.delete(:vector_store_id) do - raise ArgumentError.new("missing required path argument #{_1}") - end + vector_store_id = + parsed.delete(:vector_store_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :get, path: ["vector_stores/%0s/files/%1s/content", vector_store_id, file_id], diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index 2d448ef0..8e218404 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -10,7 +10,8 @@ module OpenAI # # @example # ```ruby - # events = stream + # events = + # stream # .to_enum # .lazy # .select { _1.object_id.even? } diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index e0351a43..bb9872fa 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -5,40 +5,42 @@ module OpenAI class BaseClient abstract! - RequestComponentsShape = T.type_alias do - { - method: Symbol, - path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), - body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), - model: T.nilable(OpenAI::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - } - end + RequestComponentsShape = + T.type_alias do + { + method: Symbol, + path: T.any(String, T::Array[String]), + query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), + headers: T.nilable( + T::Hash[String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + )] + ), + body: T.nilable(T.anything), + unwrap: T.nilable(Symbol), + page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), + model: T.nilable(OpenAI::Converter::Input), + options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + } + end - RequestInputShape = T.type_alias do - { - method: Symbol, - url: URI::Generic, - headers: T::Hash[String, String], - body: T.anything, - max_retries: Integer, - timeout: Float - } - end + RequestInputShape = + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + max_retries: Integer, + timeout: Float + } + end # from whatwg fetch spec MAX_REDIRECTS = 20 diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index c414dc3d..814a204b 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -8,7 +8,8 @@ module OpenAI class TranscriptionCreateResponse < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } + Variants = + type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 79ac8c66..61e32cae 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -6,7 +6,8 @@ module OpenAI class TranslationCreateResponse < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } + Variants = + type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 44da54e2..9a62347b 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -526,14 +526,15 @@ module OpenAI class ChunkingStrategy < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + } + end class Auto < OpenAI::BaseModel # Always `auto`. diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index ae4f724e..db97be12 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -26,16 +26,17 @@ module OpenAI class AssistantResponseFormatOption < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + Symbol, + OpenAI::Models::ResponseFormatText, + OpenAI::Models::ResponseFormatJSONObject, + OpenAI::Models::ResponseFormatJSONSchema + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index e84d0007..7a366c59 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -26,36 +26,37 @@ module OpenAI class AssistantStreamEvent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent + ) + } + end class ThreadCreated < OpenAI::BaseModel # Represents a thread that contains diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index af9e9f94..313f24bc 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -6,15 +6,16 @@ module OpenAI class AssistantTool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index ef592ec4..97821d4a 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -9,17 +9,18 @@ module OpenAI class MessageStreamEvent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete + ) + } + end class ThreadMessageCreated < OpenAI::BaseModel # Represents a message within a diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 40de2bea..6b1ae66d 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -9,19 +9,20 @@ module OpenAI class RunStepStreamEvent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired + ) + } + end class ThreadRunStepCreated < OpenAI::BaseModel # Represents a step in execution of a run. diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index fb5ba148..5642aefb 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -8,22 +8,23 @@ module OpenAI class RunStreamEvent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired + ) + } + end class ThreadRunCreated < OpenAI::BaseModel # Represents an execution run on a diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 60679618..0117301e 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -593,30 +593,32 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ) - ] - ) - } - end + } + end - MessageContentPartParamArray = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + MessageContentPartParamArray = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + end end # The role of the entity that is creating the message. Allowed values include: @@ -716,14 +718,15 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) + } + end class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` @@ -970,14 +973,15 @@ module OpenAI class ChunkingStrategy < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + } + end class Auto < OpenAI::BaseModel # Always `auto`. @@ -1191,15 +1195,16 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) + } + end end class TruncationStrategy < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 94c632cf..6fdb5196 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -211,30 +211,32 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ) - ] - ) - } - end + } + end - MessageContentPartParamArray = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + MessageContentPartParamArray = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + end end # The role of the entity that is creating the message. Allowed values include: @@ -334,14 +336,15 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) + } + end class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` @@ -582,14 +585,15 @@ module OpenAI class ChunkingStrategy < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + } + end class Auto < OpenAI::BaseModel # Always `auto`. diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index dc7434e2..1351f46f 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -10,14 +10,15 @@ module OpenAI class Annotation < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Models::Beta::Threads::FilePathAnnotation + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 32598149..a388880d 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -10,14 +10,15 @@ module OpenAI class AnnotationDelta < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index b79d9945..4316b56f 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -339,14 +339,15 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) + } + end class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel # The type of tool being defined: `file_search` diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 7bc0759c..9e3c7f22 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -9,16 +9,17 @@ module OpenAI class MessageContent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlock, + OpenAI::Models::Beta::Threads::RefusalContentBlock + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index 09915e69..dce4489b 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -9,16 +9,17 @@ module OpenAI class MessageContentDelta < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Models::Beta::Threads::TextDeltaBlock, + OpenAI::Models::Beta::Threads::RefusalDeltaBlock, + OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index fe9ae396..e4fb311c 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -9,15 +9,16 @@ module OpenAI class MessageContentPartParam < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 0783f811..0c691f82 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -145,30 +145,32 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ) - ] - ) - } - end + } + end - MessageContentPartParamArray = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + MessageContentPartParamArray = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + end end # The role of the entity that is creating the message. Allowed values include: @@ -268,14 +270,15 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) + } + end class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index dc48611b..0732a2a0 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -552,30 +552,32 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] ) - ] - ) - } - end + } + end - MessageContentPartParamArray = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + MessageContentPartParamArray = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + end end # The role of the entity that is creating the message. Allowed values include: @@ -675,14 +677,15 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) + } + end class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 4ddb7b18..2b204c7c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -146,14 +146,15 @@ module OpenAI class Output < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) + } + end class Logs < OpenAI::BaseModel # The text output from the Code Interpreter tool call. diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 76d2e5d9..6e716d13 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -159,14 +159,15 @@ module OpenAI class Output < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index c4e9125c..941b170d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -323,14 +323,15 @@ module OpenAI class StepDetails < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails + ) + } + end end # The type of run step, which can be either `message_creation` or `tool_calls`. diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index cb3b9c62..b2600ee1 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -70,14 +70,15 @@ module OpenAI class StepDetails < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 9fd4c51e..5026a3f5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -12,7 +12,8 @@ module OpenAI Value = type_template(:out) { {fixed: Symbol} } - STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = + :"step_details.tool_calls[*].file_search.results[*].content" end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index e3f26a25..2550325a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -9,15 +9,16 @@ module OpenAI class ToolCall < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + ) + } + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 8169ef81..37550da6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -9,15 +9,16 @@ module OpenAI class ToolCallDelta < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + ) + } + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 624890ee..e97fdb66 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -202,42 +202,45 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + ] ) - ] - ) - } - end + } + end - ArrayOfContentPartArray = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - end + ArrayOfContentPartArray = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + ] + end # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). class ArrayOfContentPart < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + } + end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 5f1deb83..70f9cc23 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -10,16 +10,17 @@ module OpenAI class ChatCompletionContentPart < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + } + end class File < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) } diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index d5dca054..c3905878 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -68,9 +68,11 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } + Variants = + type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + ChatCompletionContentPartTextArray = + T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 97d8192e..4172f718 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -11,18 +11,19 @@ module OpenAI class ChatCompletionMessageParam < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) + } + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index f595e430..2baabb6b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -57,9 +57,11 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } + Variants = + type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + ChatCompletionContentPartTextArray = + T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 1863c0f8..fd7477b7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -68,9 +68,11 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } + Variants = + type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + ChatCompletionContentPartTextArray = + T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 0c237bac..5d0ba77e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -17,7 +17,8 @@ module OpenAI class ChatCompletionToolChoiceOption < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)} } + Variants = + type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)} } # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 20936406..f4ea0f2e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -64,9 +64,11 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } + Variants = + type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + ChatCompletionContentPartTextArray = + T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 7ebaa424..ffaa8bc7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -125,32 +125,34 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + ] ) - ] - ) - } - end + } + end - ChatCompletionContentPartArray = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - end + ChatCompletionContentPartArray = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + ] + end end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 7b69e065..1a0003e1 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -662,7 +662,8 @@ module OpenAI class FunctionCall < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)} } + Variants = + type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)} } # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a @@ -747,15 +748,16 @@ module OpenAI class ResponseFormat < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::ResponseFormatText, + OpenAI::Models::ResponseFormatJSONSchema, + OpenAI::Models::ResponseFormatJSONObject + ) + } + end end # Specifies the latency tier to use for processing the request. This parameter is diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index cab146c9..8350f105 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -356,9 +356,10 @@ module OpenAI class Prompt < OpenAI::Union abstract! - Variants = type_template(:out) do - {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} - end + Variants = + type_template(:out) do + {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} + end StringArray = T.type_alias { T::Array[String] } diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index a9a37c56..c7966b7d 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -110,9 +110,10 @@ module OpenAI class Input < OpenAI::Union abstract! - Variants = type_template(:out) do - {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} - end + Variants = + type_template(:out) do + {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} + end StringArray = T.type_alias { T::Array[String] } diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index 93972f9b..aad68d3d 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -6,11 +6,12 @@ module OpenAI class FileChunkingStrategy < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) - } - end + Variants = + type_template(:out) do + { + fixed: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + } + end end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index aeffdf61..07560304 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -7,14 +7,15 @@ module OpenAI class FileChunkingStrategyParam < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Models::StaticFileChunkingStrategyObjectParam + ) + } + end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi index d1f932e5..96daf9a4 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi @@ -5,7 +5,8 @@ module OpenAI FineTuningJobIntegration = T.type_alias { FineTuning::FineTuningJobIntegration } module FineTuning - FineTuningJobIntegration = T.type_alias { OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject } + FineTuningJobIntegration = + T.type_alias { OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject } end end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index e00a5df9..6b9a1b5a 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -88,19 +88,21 @@ module OpenAI class Input < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - String, - T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[String], + T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + ) + } + end StringArray = T.type_alias { T::Array[String] } - ModerationMultiModalInputArray = T.type_alias { T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] } + ModerationMultiModalInputArray = + T.type_alias { T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] } end # The content moderation model you would like to use. Learn more in diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index a24328fc..861ae45d 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -6,7 +6,8 @@ module OpenAI class ModerationMultiModalInput < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } + Variants = + type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 990b3527..23c15e32 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -66,7 +66,8 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)} } + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)} } end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 3a6bd9db..4085e59c 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -91,7 +91,8 @@ module OpenAI class Filters < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } + Variants = + type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index b7062e57..1ece0d1b 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -519,11 +519,12 @@ module OpenAI class ToolChoice < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) - } - end + Variants = + type_template(:out) do + { + fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + } + end end # The truncation strategy to use for the model response. diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index b68befef..8b5451ff 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -118,14 +118,15 @@ module OpenAI class Result < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) + } + end class Logs < OpenAI::BaseModel # The logs of the code interpreter tool call. diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index bc8782dc..c0e726fe 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -160,21 +160,22 @@ module OpenAI class Action < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait + ) + } + end class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 58f3f635..92cf9df2 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -7,17 +7,18 @@ module OpenAI class ResponseContent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile, - OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Models::Responses::ResponseOutputRefusal - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile, + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Models::Responses::ResponseOutputRefusal + ) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index ca3acd5c..9e79484e 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -93,11 +93,12 @@ module OpenAI class Part < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - } - end + Variants = + type_template(:out) do + { + fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 65977cbe..5661d776 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -93,11 +93,12 @@ module OpenAI class Part < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - } - end + Variants = + type_template(:out) do + { + fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index f7396668..cc441334 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -402,11 +402,12 @@ module OpenAI class ToolChoice < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) - } - end + Variants = + type_template(:out) do + { + fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + } + end end # The truncation strategy to use for the model response. diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index f3595c51..fea986b1 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -19,15 +19,16 @@ module OpenAI class ResponseFormatTextConfig < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::ResponseFormatText, + OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::Models::ResponseFormatJSONObject + ) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_input.rbi b/rbi/lib/openai/models/responses/response_input.rbi index 622e3348..2bd4a97a 100644 --- a/rbi/lib/openai/models/responses/response_input.rbi +++ b/rbi/lib/openai/models/responses/response_input.rbi @@ -3,23 +3,24 @@ module OpenAI module Models module Responses - ResponseInput = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - end + ResponseInput = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index 957c8020..cba404fb 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -7,15 +7,16 @@ module OpenAI class ResponseInputContent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index f3b960b7..66f24422 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -11,23 +11,24 @@ module OpenAI class ResponseInputItem < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + } + end class Message < OpenAI::BaseModel # A list of one or many input items to the model, containing different content diff --git a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi index d4c36f8d..80207c6a 100644 --- a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi @@ -3,15 +3,16 @@ module OpenAI module Models module Responses - ResponseInputMessageContentList = T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - end + ResponseInputMessageContentList = + T.type_alias do + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + end end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index fac7e7a2..77bef808 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -7,20 +7,21 @@ module OpenAI class ResponseItem < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + ) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index 75949da7..bec1b93e 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -7,18 +7,19 @@ module OpenAI class ResponseOutputItem < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseReasoningItem + ) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 7fb7026c..17bf1ad0 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -94,11 +94,12 @@ module OpenAI class Content < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - } - end + Variants = + type_template(:out) do + { + fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + } + end end # The status of the message input. One of `in_progress`, `completed`, or diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index e04d3988..13eb0a87 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -101,15 +101,16 @@ module OpenAI class Annotation < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + ) + } + end class FileCitation < OpenAI::BaseModel # The ID of the file. diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index 0b1bb12f..ed1980bf 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -7,44 +7,45 @@ module OpenAI class ResponseStreamEvent < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseAudioDeltaEvent, - OpenAI::Models::Responses::ResponseAudioDoneEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Models::Responses::ResponseCompletedEvent, - OpenAI::Models::Responses::ResponseContentPartAddedEvent, - OpenAI::Models::Responses::ResponseContentPartDoneEvent, - OpenAI::Models::Responses::ResponseCreatedEvent, - OpenAI::Models::Responses::ResponseErrorEvent, - OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Models::Responses::ResponseInProgressEvent, - OpenAI::Models::Responses::ResponseFailedEvent, - OpenAI::Models::Responses::ResponseIncompleteEvent, - OpenAI::Models::Responses::ResponseOutputItemAddedEvent, - OpenAI::Models::Responses::ResponseOutputItemDoneEvent, - OpenAI::Models::Responses::ResponseRefusalDeltaEvent, - OpenAI::Models::Responses::ResponseRefusalDoneEvent, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Models::Responses::ResponseTextDeltaEvent, - OpenAI::Models::Responses::ResponseTextDoneEvent, - OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseAudioDeltaEvent, + OpenAI::Models::Responses::ResponseAudioDoneEvent, + OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Models::Responses::ResponseCompletedEvent, + OpenAI::Models::Responses::ResponseContentPartAddedEvent, + OpenAI::Models::Responses::ResponseContentPartDoneEvent, + OpenAI::Models::Responses::ResponseCreatedEvent, + OpenAI::Models::Responses::ResponseErrorEvent, + OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Models::Responses::ResponseInProgressEvent, + OpenAI::Models::Responses::ResponseFailedEvent, + OpenAI::Models::Responses::ResponseIncompleteEvent, + OpenAI::Models::Responses::ResponseOutputItemAddedEvent, + OpenAI::Models::Responses::ResponseOutputItemDoneEvent, + OpenAI::Models::Responses::ResponseRefusalDeltaEvent, + OpenAI::Models::Responses::ResponseRefusalDoneEvent, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Models::Responses::ResponseTextDeltaEvent, + OpenAI::Models::Responses::ResponseTextDoneEvent, + OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent + ) + } + end end end end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 00dfd4c2..ed354df8 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -131,15 +131,16 @@ module OpenAI class Annotation < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + ) + } + end class FileCitation < OpenAI::BaseModel # The ID of the file. diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index e1477b05..0085ad60 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -9,16 +9,17 @@ module OpenAI class Tool < OpenAI::Union abstract! - Variants = type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - } - end + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) + } + end end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 91a2ec22..22b4b1e4 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -101,7 +101,8 @@ module OpenAI class Filters < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } + Variants = + type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi index b01c005a..43651130 100644 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ b/rbi/lib/openai/pooled_net_requester.rbi @@ -3,9 +3,16 @@ module OpenAI # @api private class PooledNetRequester - RequestShape = T.type_alias do - {method: Symbol, url: URI::Generic, headers: T::Hash[String, String], body: T.anything, deadline: Float} - end + RequestShape = + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + deadline: Float + } + end # from the golang stdlib # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 6215e687..c6676a16 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -123,15 +123,16 @@ module OpenAI end end - ParsedUriShape = T.type_alias do - { - scheme: T.nilable(String), - host: T.nilable(String), - port: T.nilable(Integer), - path: T.nilable(String), - query: T::Hash[String, T::Array[String]] - } - end + ParsedUriShape = + T.type_alias do + { + scheme: T.nilable(String), + host: T.nilable(String), + port: T.nilable(Integer), + path: T.nilable(String), + query: T::Hash[String, T::Array[String]] + } + end class << self # @api private @@ -253,9 +254,10 @@ module OpenAI end end - ServerSentEvent = T.type_alias do - {event: T.nilable(String), data: T.nilable(String), id: T.nilable(String), retry: T.nilable(Integer)} - end + ServerSentEvent = + T.type_alias do + {event: T.nilable(String), data: T.nilable(String), id: T.nilable(String), retry: T.nilable(Integer)} + end class << self # @api private diff --git a/test/openai/resources/audio/transcriptions_test.rb b/test/openai/resources/audio/transcriptions_test.rb index ca797160..a8a6d471 100644 --- a/test/openai/resources/audio/transcriptions_test.rb +++ b/test/openai/resources/audio/transcriptions_test.rb @@ -4,10 +4,8 @@ class OpenAI::Test::Resources::Audio::TranscriptionsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.audio.transcriptions.create( - file: StringIO.new("some file contents"), - model: :"whisper-1" - ) + response = + @openai.audio.transcriptions.create(file: StringIO.new("some file contents"), model: :"whisper-1") assert_pattern do response => OpenAI::Models::Audio::TranscriptionCreateResponse diff --git a/test/openai/resources/audio/translations_test.rb b/test/openai/resources/audio/translations_test.rb index f0b69d4f..d42b8267 100644 --- a/test/openai/resources/audio/translations_test.rb +++ b/test/openai/resources/audio/translations_test.rb @@ -4,10 +4,8 @@ class OpenAI::Test::Resources::Audio::TranslationsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.audio.translations.create( - file: StringIO.new("some file contents"), - model: :"whisper-1" - ) + response = + @openai.audio.translations.create(file: StringIO.new("some file contents"), model: :"whisper-1") assert_pattern do response => OpenAI::Models::Audio::TranslationCreateResponse diff --git a/test/openai/resources/batches_test.rb b/test/openai/resources/batches_test.rb index c7885939..08893c6c 100644 --- a/test/openai/resources/batches_test.rb +++ b/test/openai/resources/batches_test.rb @@ -4,11 +4,12 @@ class OpenAI::Test::Resources::BatchesTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.batches.create( - completion_window: :"24h", - endpoint: :"/v1/responses", - input_file_id: "input_file_id" - ) + response = + @openai.batches.create( + completion_window: :"24h", + endpoint: :"/v1/responses", + input_file_id: "input_file_id" + ) assert_pattern do response => OpenAI::Models::Batch diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 98123435..7c074c33 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -209,12 +209,13 @@ def test_cancel_required_params end def test_submit_tool_outputs_required_params - response = @openai.beta.threads.runs.submit_tool_outputs( - "run_id", - thread_id: "thread_id", - stream: true, - tool_outputs: [{}] - ) + response = + @openai.beta.threads.runs.submit_tool_outputs( + "run_id", + thread_id: "thread_id", + stream: true, + tool_outputs: [{}] + ) assert_pattern do response => OpenAI::Models::Beta::Threads::Run diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index 4b353ac2..0d861145 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -4,11 +4,12 @@ class OpenAI::Test::Resources::Chat::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + response = + @openai.chat.completions.create( + messages: [{content: "string", role: :developer}], + model: :"o3-mini", + stream: true + ) assert_pattern do response => OpenAI::Models::Chat::ChatCompletion diff --git a/test/openai/resources/completions_test.rb b/test/openai/resources/completions_test.rb index 6fb99107..b402a581 100644 --- a/test/openai/resources/completions_test.rb +++ b/test/openai/resources/completions_test.rb @@ -4,11 +4,8 @@ class OpenAI::Test::Resources::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.completions.create( - model: :"gpt-3.5-turbo-instruct", - prompt: "This is a test.", - stream: true - ) + response = + @openai.completions.create(model: :"gpt-3.5-turbo-instruct", prompt: "This is a test.", stream: true) assert_pattern do response => OpenAI::Models::Completion diff --git a/test/openai/resources/embeddings_test.rb b/test/openai/resources/embeddings_test.rb index 244d5817..d9eb182b 100644 --- a/test/openai/resources/embeddings_test.rb +++ b/test/openai/resources/embeddings_test.rb @@ -4,10 +4,11 @@ class OpenAI::Test::Resources::EmbeddingsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.embeddings.create( - input: "The quick brown fox jumped over the lazy dog", - model: :"text-embedding-ada-002" - ) + response = + @openai.embeddings.create( + input: "The quick brown fox jumped over the lazy dog", + model: :"text-embedding-ada-002" + ) assert_pattern do response => OpenAI::Models::CreateEmbeddingResponse diff --git a/test/openai/resources/images_test.rb b/test/openai/resources/images_test.rb index 82078e4d..c935d0c6 100644 --- a/test/openai/resources/images_test.rb +++ b/test/openai/resources/images_test.rb @@ -19,10 +19,11 @@ def test_create_variation_required_params end def test_edit_required_params - response = @openai.images.edit( - image: StringIO.new("some file contents"), - prompt: "A cute baby sea otter wearing a beret" - ) + response = + @openai.images.edit( + image: StringIO.new("some file contents"), + prompt: "A cute baby sea otter wearing a beret" + ) assert_pattern do response => OpenAI::Models::ImagesResponse diff --git a/test/openai/resources/uploads_test.rb b/test/openai/resources/uploads_test.rb index 88af217d..3b3e356c 100644 --- a/test/openai/resources/uploads_test.rb +++ b/test/openai/resources/uploads_test.rb @@ -4,12 +4,8 @@ class OpenAI::Test::Resources::UploadsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.uploads.create( - bytes: 0, - filename: "filename", - mime_type: "mime_type", - purpose: :assistants - ) + response = + @openai.uploads.create(bytes: 0, filename: "filename", mime_type: "mime_type", purpose: :assistants) assert_pattern do response => OpenAI::Models::Upload diff --git a/test/openai/resources/vector_stores/files_test.rb b/test/openai/resources/vector_stores/files_test.rb index a8c36003..6e38e8a7 100644 --- a/test/openai/resources/vector_stores/files_test.rb +++ b/test/openai/resources/vector_stores/files_test.rb @@ -48,11 +48,12 @@ def test_retrieve_required_params end def test_update_required_params - response = @openai.vector_stores.files.update( - "file-abc123", - vector_store_id: "vs_abc123", - attributes: {foo: "string"} - ) + response = + @openai.vector_stores.files.update( + "file-abc123", + vector_store_id: "vs_abc123", + attributes: {foo: "string"} + ) assert_pattern do response => OpenAI::Models::VectorStores::VectorStoreFile From f743b2a0d006b761c08ba3a47a3e4f79d3eba055 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 01:53:44 +0000 Subject: [PATCH 033/295] chore(internal): add sorbet config for SDK local development (#38) --- .gitignore | 9 +++++---- .solargraph.yml | 9 +++++++++ Rakefile | 2 +- sorbet/config | 2 ++ 4 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 .solargraph.yml create mode 100644 sorbet/config diff --git a/.gitignore b/.gitignore index 1ef280e1..8b1228a8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,10 @@ -.prism.log +*.gem .idea/ +.prism.log .ruby-lsp/ .yardoc/ -doc/ -sorbet/ Brewfile.lock.json bin/tapioca -*.gem +doc/ +sorbet/* +!/sorbet/config diff --git a/.solargraph.yml b/.solargraph.yml new file mode 100644 index 00000000..98397e75 --- /dev/null +++ b/.solargraph.yml @@ -0,0 +1,9 @@ +--- +max_files: 0 +include: + - '*.gemspec' + - '/test/openai/test_helper.rb' + - 'Rakefile' + - 'lib/**/*.rb' +exclude: + - 'rbi/**/*' diff --git a/Rakefile b/Rakefile index 7ea8a2fc..cde192ee 100644 --- a/Rakefile +++ b/Rakefile @@ -70,7 +70,7 @@ multitask(:steep) do end multitask(:sorbet) do - sh(*%w[srb typecheck -- .], chdir: "./rbi") + sh(*%w[srb typecheck]) end file("sorbet/tapioca") do diff --git a/sorbet/config b/sorbet/config new file mode 100644 index 00000000..538c1528 --- /dev/null +++ b/sorbet/config @@ -0,0 +1,2 @@ +--dir=rbi +--ignore=test/ From bba66df4f72358d422d71758da1be85dd892e026 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 17:20:03 +0000 Subject: [PATCH 034/295] chore: add example directory (#39) --- .rubocop.yml | 11 ++++++++++- .solargraph.yml | 3 ++- examples/.keep | 4 ++++ 3 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 examples/.keep diff --git a/.rubocop.yml b/.rubocop.yml index 3cb19f0a..6dbfa8bf 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -10,7 +10,7 @@ AllCops: SuggestExtensions: false TargetRubyVersion: 3.1.0 -# Whether MFA is required or not should be left to the token configuration +# Whether MFA is required or not should be left to the token configuration. Gemspec/RequireMFA: Enabled: false @@ -72,6 +72,10 @@ Lint/EmptyInPattern: Exclude: - "test/**/*" +Lint/MissingCopEnableDirective: + Exclude: + - "examples/**/*.rb" + Lint/MissingSuper: Exclude: - "**/*.rbi" @@ -237,3 +241,8 @@ Style/StringLiterals: # Prefer explicit symbols for clarity; you can search for `:the_symbol`. Style/SymbolArray: EnforcedStyle: brackets + +# This option makes examples harder to read for ruby novices. +Style/SymbolProc: + Exclude: + - "examples/**/*.rb" diff --git a/.solargraph.yml b/.solargraph.yml index 98397e75..4f571833 100644 --- a/.solargraph.yml +++ b/.solargraph.yml @@ -2,8 +2,9 @@ max_files: 0 include: - '*.gemspec' - - '/test/openai/test_helper.rb' - 'Rakefile' + - 'examples/**/*.rb' - 'lib/**/*.rb' + - 'test/openai/test_helper.rb' exclude: - 'rbi/**/*' diff --git a/examples/.keep b/examples/.keep new file mode 100644 index 00000000..d8c73e93 --- /dev/null +++ b/examples/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store example files demonstrating usage of this SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file From 1cc01bc6d278f5c098021f32568ed24d46637afd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 17:27:40 +0000 Subject: [PATCH 035/295] fix: sorbet class aliases are not type aliases (#40) --- rbi/lib/openai/models/beta/threads/runs/run_step.rbi | 4 ++-- rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi | 4 ++-- .../models/beta/threads/runs/run_step_delta_event.rbi | 4 ++-- .../beta/threads/runs/run_step_delta_message_delta.rbi | 4 ++-- .../openai/models/beta/threads/runs/run_step_include.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion.rbi | 4 ++-- .../chat/chat_completion_assistant_message_param.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_audio.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_audio_param.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_chunk.rbi | 4 ++-- .../openai/models/chat/chat_completion_content_part.rbi | 4 ++-- .../models/chat/chat_completion_content_part_image.rbi | 4 ++-- .../chat/chat_completion_content_part_input_audio.rbi | 4 ++-- .../models/chat/chat_completion_content_part_refusal.rbi | 4 ++-- .../models/chat/chat_completion_content_part_text.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_deleted.rbi | 4 ++-- .../chat/chat_completion_developer_message_param.rbi | 4 ++-- .../models/chat/chat_completion_function_call_option.rbi | 4 ++-- .../models/chat/chat_completion_function_message_param.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_message.rbi | 4 ++-- .../openai/models/chat/chat_completion_message_param.rbi | 4 ++-- .../models/chat/chat_completion_message_tool_call.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_modality.rbi | 4 ++-- .../models/chat/chat_completion_named_tool_choice.rbi | 4 ++-- .../models/chat/chat_completion_prediction_content.rbi | 4 ++-- .../models/chat/chat_completion_reasoning_effort.rbi | 6 +++--- rbi/lib/openai/models/chat/chat_completion_role.rbi | 4 ++-- .../openai/models/chat/chat_completion_store_message.rbi | 4 ++-- .../openai/models/chat/chat_completion_stream_options.rbi | 4 ++-- .../models/chat/chat_completion_system_message_param.rbi | 4 ++-- .../openai/models/chat/chat_completion_token_logprob.rbi | 4 ++-- rbi/lib/openai/models/chat/chat_completion_tool.rbi | 4 ++-- .../models/chat/chat_completion_tool_choice_option.rbi | 4 ++-- .../models/chat/chat_completion_tool_message_param.rbi | 4 ++-- .../models/chat/chat_completion_user_message_param.rbi | 4 ++-- rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi | 4 ++-- .../openai/models/fine_tuning/fine_tuning_job_event.rbi | 4 ++-- .../models/fine_tuning/fine_tuning_job_integration.rbi | 7 +++---- .../fine_tuning/fine_tuning_job_wandb_integration.rbi | 4 ++-- .../fine_tuning_job_wandb_integration_object.rbi | 4 ++-- rbi/lib/openai/models/responses/response_item_list.rbi | 4 ++-- rbi/lib/openai/models/uploads/upload_part.rbi | 4 ++-- rbi/lib/openai/models/vector_stores/vector_store_file.rbi | 4 ++-- .../models/vector_stores/vector_store_file_batch.rbi | 4 ++-- .../models/vector_stores/vector_store_file_deleted.rbi | 4 ++-- 45 files changed, 92 insertions(+), 93 deletions(-) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 941b170d..2b1d5ced 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -4,8 +4,6 @@ module OpenAI module Models module Beta module Threads - RunStep = T.type_alias { Runs::RunStep } - module Runs class RunStep < OpenAI::BaseModel # The identifier of the run step, which can be referenced in API endpoints. @@ -392,6 +390,8 @@ module OpenAI end end end + + RunStep = Runs::RunStep end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index b2600ee1..6dca9ad3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -4,8 +4,6 @@ module OpenAI module Models module Beta module Threads - RunStepDelta = T.type_alias { Runs::RunStepDelta } - module Runs class RunStepDelta < OpenAI::BaseModel # The details of the run step. @@ -82,6 +80,8 @@ module OpenAI end end end + + RunStepDelta = Runs::RunStepDelta end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index 7cc4a3fb..fe5c77a3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -4,8 +4,6 @@ module OpenAI module Models module Beta module Threads - RunStepDeltaEvent = T.type_alias { Runs::RunStepDeltaEvent } - module Runs class RunStepDeltaEvent < OpenAI::BaseModel # The identifier of the run step, which can be referenced in API endpoints. @@ -52,6 +50,8 @@ module OpenAI end end end + + RunStepDeltaEvent = Runs::RunStepDeltaEvent end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index 02216a5c..6d4b72e8 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -4,8 +4,6 @@ module OpenAI module Models module Beta module Threads - RunStepDeltaMessageDelta = T.type_alias { Runs::RunStepDeltaMessageDelta } - module Runs class RunStepDeltaMessageDelta < OpenAI::BaseModel # Always `message_creation`. @@ -71,6 +69,8 @@ module OpenAI end end end + + RunStepDeltaMessageDelta = Runs::RunStepDeltaMessageDelta end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 5026a3f5..566dd76e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -4,8 +4,6 @@ module OpenAI module Models module Beta module Threads - RunStepInclude = T.type_alias { Runs::RunStepInclude } - module Runs class RunStepInclude < OpenAI::Enum abstract! @@ -16,6 +14,8 @@ module OpenAI :"step_details.tool_calls[*].file_search.results[*].content" end end + + RunStepInclude = Runs::RunStepInclude end end end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index c68e3594..508ca188 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletion = T.type_alias { Chat::ChatCompletion } - module Chat class ChatCompletion < OpenAI::BaseModel # A unique identifier for the chat completion. @@ -282,5 +280,7 @@ module OpenAI end end end + + ChatCompletion = Chat::ChatCompletion end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index e97fdb66..60807405 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionAssistantMessageParam = T.type_alias { Chat::ChatCompletionAssistantMessageParam } - module Chat class ChatCompletionAssistantMessageParam < OpenAI::BaseModel # The role of the messages author, in this case `assistant`. @@ -278,5 +276,7 @@ module OpenAI end end end + + ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index 489f4145..5b7b1eae 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionAudio = T.type_alias { Chat::ChatCompletionAudio } - module Chat class ChatCompletionAudio < OpenAI::BaseModel # Unique identifier for this audio response. @@ -58,5 +56,7 @@ module OpenAI end end end + + ChatCompletionAudio = Chat::ChatCompletionAudio end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 0af4b8ee..215adc55 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionAudioParam = T.type_alias { Chat::ChatCompletionAudioParam } - module Chat class ChatCompletionAudioParam < OpenAI::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, @@ -69,5 +67,7 @@ module OpenAI end end end + + ChatCompletionAudioParam = Chat::ChatCompletionAudioParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index a3cc4a45..c2451af5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionChunk = T.type_alias { Chat::ChatCompletionChunk } - module Chat class ChatCompletionChunk < OpenAI::BaseModel # A unique identifier for the chat completion. Each chunk has the same ID. @@ -524,5 +522,7 @@ module OpenAI end end end + + ChatCompletionChunk = Chat::ChatCompletionChunk end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 70f9cc23..bff95f2d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionContentPart = T.type_alias { Chat::ChatCompletionContentPart } - module Chat # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). @@ -96,5 +94,7 @@ module OpenAI end end end + + ChatCompletionContentPart = Chat::ChatCompletionContentPart end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 56a6931a..78ef980f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionContentPartImage = T.type_alias { Chat::ChatCompletionContentPartImage } - module Chat class ChatCompletionContentPartImage < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) } @@ -82,5 +80,7 @@ module OpenAI end end end + + ChatCompletionContentPartImage = Chat::ChatCompletionContentPartImage end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 83e891b0..87144715 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionContentPartInputAudio = T.type_alias { Chat::ChatCompletionContentPartInputAudio } - module Chat class ChatCompletionContentPartInputAudio < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) } @@ -82,5 +80,7 @@ module OpenAI end end end + + ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi index 392ce584..1cae47c2 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionContentPartRefusal = T.type_alias { Chat::ChatCompletionContentPartRefusal } - module Chat class ChatCompletionContentPartRefusal < OpenAI::BaseModel # The refusal message generated by the model. @@ -33,5 +31,7 @@ module OpenAI end end end + + ChatCompletionContentPartRefusal = Chat::ChatCompletionContentPartRefusal end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index 1154a72d..2fcd20fa 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionContentPartText = T.type_alias { Chat::ChatCompletionContentPartText } - module Chat class ChatCompletionContentPartText < OpenAI::BaseModel # The text content. @@ -35,5 +33,7 @@ module OpenAI end end end + + ChatCompletionContentPartText = Chat::ChatCompletionContentPartText end end diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi index 72764c05..8ef5a9d5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionDeleted = T.type_alias { Chat::ChatCompletionDeleted } - module Chat class ChatCompletionDeleted < OpenAI::BaseModel # The ID of the chat completion that was deleted. @@ -42,5 +40,7 @@ module OpenAI end end end + + ChatCompletionDeleted = Chat::ChatCompletionDeleted end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index c3905878..33c1f24d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionDeveloperMessageParam = T.type_alias { Chat::ChatCompletionDeveloperMessageParam } - module Chat class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # The contents of the developer message. @@ -76,5 +74,7 @@ module OpenAI end end end + + ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index a17d3350..e1d6c904 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionFunctionCallOption = T.type_alias { Chat::ChatCompletionFunctionCallOption } - module Chat class ChatCompletionFunctionCallOption < OpenAI::BaseModel # The name of the function to call. @@ -26,5 +24,7 @@ module OpenAI end end end + + ChatCompletionFunctionCallOption = Chat::ChatCompletionFunctionCallOption end end diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi index 1c035a2e..665339f5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionFunctionMessageParam = T.type_alias { Chat::ChatCompletionFunctionMessageParam } - module Chat class ChatCompletionFunctionMessageParam < OpenAI::BaseModel # The contents of the function message. @@ -42,5 +40,7 @@ module OpenAI end end end + + ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index a54250d7..041b818c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionMessage = T.type_alias { Chat::ChatCompletionMessage } - module Chat class ChatCompletionMessage < OpenAI::BaseModel # The contents of the message. @@ -246,5 +244,7 @@ module OpenAI end end end + + ChatCompletionMessage = Chat::ChatCompletionMessage end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 4172f718..6d17e25c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionMessageParam = T.type_alias { Chat::ChatCompletionMessageParam } - module Chat # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages @@ -26,5 +24,7 @@ module OpenAI end end end + + ChatCompletionMessageParam = Chat::ChatCompletionMessageParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index fadd4257..7ed40089 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionMessageToolCall = T.type_alias { Chat::ChatCompletionMessageToolCall } - module Chat class ChatCompletionMessageToolCall < OpenAI::BaseModel # The ID of the tool call. @@ -85,5 +83,7 @@ module OpenAI end end end + + ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall end end diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index df9e0044..25501740 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionModality = T.type_alias { Chat::ChatCompletionModality } - module Chat class ChatCompletionModality < OpenAI::Enum abstract! @@ -14,5 +12,7 @@ module OpenAI AUDIO = :audio end end + + ChatCompletionModality = Chat::ChatCompletionModality end end diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index d1d23deb..421a020f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionNamedToolChoice = T.type_alias { Chat::ChatCompletionNamedToolChoice } - module Chat class ChatCompletionNamedToolChoice < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) } @@ -59,5 +57,7 @@ module OpenAI end end end + + ChatCompletionNamedToolChoice = Chat::ChatCompletionNamedToolChoice end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 2baabb6b..50f8e2fd 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionPredictionContent = T.type_alias { Chat::ChatCompletionPredictionContent } - module Chat class ChatCompletionPredictionContent < OpenAI::BaseModel # The content that should be matched when generating a model response. If @@ -65,5 +63,7 @@ module OpenAI end end end + + ChatCompletionPredictionContent = Chat::ChatCompletionPredictionContent end end diff --git a/rbi/lib/openai/models/chat/chat_completion_reasoning_effort.rbi b/rbi/lib/openai/models/chat/chat_completion_reasoning_effort.rbi index 204ceb39..479be67b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_reasoning_effort.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_reasoning_effort.rbi @@ -2,10 +2,10 @@ module OpenAI module Models - ChatCompletionReasoningEffort = T.type_alias { Chat::ChatCompletionReasoningEffort } - module Chat - ChatCompletionReasoningEffort = T.type_alias { OpenAI::Models::ReasoningEffort } + ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort end + + ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort end end diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index ae42bac3..e2a2c70e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionRole = T.type_alias { Chat::ChatCompletionRole } - module Chat # The role of the author of a message class ChatCompletionRole < OpenAI::Enum @@ -19,5 +17,7 @@ module OpenAI FUNCTION = :function end end + + ChatCompletionRole = Chat::ChatCompletionRole end end diff --git a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi index a63c1c01..50654ac0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionStoreMessage = T.type_alias { Chat::ChatCompletionStoreMessage } - module Chat class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # The identifier of the chat message. @@ -25,5 +23,7 @@ module OpenAI end end end + + ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage end end diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index 7d7809fc..fe3e980e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionStreamOptions = T.type_alias { Chat::ChatCompletionStreamOptions } - module Chat class ChatCompletionStreamOptions < OpenAI::BaseModel # If set, an additional chunk will be streamed before the `data: [DONE]` message. @@ -31,5 +29,7 @@ module OpenAI end end end + + ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index fd7477b7..49524af6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionSystemMessageParam = T.type_alias { Chat::ChatCompletionSystemMessageParam } - module Chat class ChatCompletionSystemMessageParam < OpenAI::BaseModel # The contents of the system message. @@ -76,5 +74,7 @@ module OpenAI end end end + + ChatCompletionSystemMessageParam = Chat::ChatCompletionSystemMessageParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 2341c139..1eae4294 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionTokenLogprob = T.type_alias { Chat::ChatCompletionTokenLogprob } - module Chat class ChatCompletionTokenLogprob < OpenAI::BaseModel # The token. @@ -127,5 +125,7 @@ module OpenAI end end end + + ChatCompletionTokenLogprob = Chat::ChatCompletionTokenLogprob end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index 5a41a09d..c25c2648 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionTool = T.type_alias { Chat::ChatCompletionTool } - module Chat class ChatCompletionTool < OpenAI::BaseModel sig { returns(OpenAI::Models::FunctionDefinition) } @@ -32,5 +30,7 @@ module OpenAI end end end + + ChatCompletionTool = Chat::ChatCompletionTool end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 5d0ba77e..c9ad2647 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionToolChoiceOption = T.type_alias { Chat::ChatCompletionToolChoiceOption } - module Chat # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can @@ -34,5 +32,7 @@ module OpenAI end end end + + ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index f4ea0f2e..485b4b8c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionToolMessageParam = T.type_alias { Chat::ChatCompletionToolMessageParam } - module Chat class ChatCompletionToolMessageParam < OpenAI::BaseModel # The contents of the tool message. @@ -72,5 +70,7 @@ module OpenAI end end end + + ChatCompletionToolMessageParam = Chat::ChatCompletionToolMessageParam end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index ffaa8bc7..f97c4e46 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ChatCompletionUserMessageParam = T.type_alias { Chat::ChatCompletionUserMessageParam } - module Chat class ChatCompletionUserMessageParam < OpenAI::BaseModel # The contents of the user message. @@ -156,5 +154,7 @@ module OpenAI end end end + + ChatCompletionUserMessageParam = Chat::ChatCompletionUserMessageParam end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 2de2b251..20dd1a07 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - FineTuningJob = T.type_alias { FineTuning::FineTuningJob } - module FineTuning class FineTuningJob < OpenAI::BaseModel # The object identifier, which can be referenced in the API endpoints. @@ -732,5 +730,7 @@ module OpenAI end end end + + FineTuningJob = FineTuning::FineTuningJob end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index b1e399d7..fb1daf42 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - FineTuningJobEvent = T.type_alias { FineTuning::FineTuningJobEvent } - module FineTuning class FineTuningJobEvent < OpenAI::BaseModel # The object identifier. @@ -124,5 +122,7 @@ module OpenAI end end end + + FineTuningJobEvent = FineTuning::FineTuningJobEvent end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi index 96daf9a4..8a6c1da1 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi @@ -2,11 +2,10 @@ module OpenAI module Models - FineTuningJobIntegration = T.type_alias { FineTuning::FineTuningJobIntegration } - module FineTuning - FineTuningJobIntegration = - T.type_alias { OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject } + FineTuningJobIntegration = OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject end + + FineTuningJobIntegration = FineTuning::FineTuningJobIntegration end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index f3899e11..22879caa 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - FineTuningJobWandbIntegration = T.type_alias { FineTuning::FineTuningJobWandbIntegration } - module FineTuning class FineTuningJobWandbIntegration < OpenAI::BaseModel # The name of the project that the new run will be created under. @@ -71,5 +69,7 @@ module OpenAI end end end + + FineTuningJobWandbIntegration = FineTuning::FineTuningJobWandbIntegration end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index f6af3f86..fd3bcf9a 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - FineTuningJobWandbIntegrationObject = T.type_alias { FineTuning::FineTuningJobWandbIntegrationObject } - module FineTuning class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel # The type of the integration being enabled for the fine-tuning job @@ -42,5 +40,7 @@ module OpenAI end end end + + FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject end end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 983bd5b6..301dcc40 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - ResponseItemList = T.type_alias { Responses::ResponseItemList } - module Responses class ResponseItemList < OpenAI::BaseModel # A list of items used to generate this response. @@ -147,5 +145,7 @@ module OpenAI end end end + + ResponseItemList = Responses::ResponseItemList end end diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/lib/openai/models/uploads/upload_part.rbi index ae805178..59ae7dda 100644 --- a/rbi/lib/openai/models/uploads/upload_part.rbi +++ b/rbi/lib/openai/models/uploads/upload_part.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - UploadPart = T.type_alias { Uploads::UploadPart } - module Uploads class UploadPart < OpenAI::BaseModel # The upload Part unique identifier, which can be referenced in API endpoints. @@ -54,5 +52,7 @@ module OpenAI end end end + + UploadPart = Uploads::UploadPart end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 0d906238..4e1a4a36 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - VectorStoreFile = T.type_alias { VectorStores::VectorStoreFile } - module VectorStores class VectorStoreFile < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. @@ -226,5 +224,7 @@ module OpenAI end end end + + VectorStoreFile = VectorStores::VectorStoreFile end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 3ee9a72d..2e5ce798 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - VectorStoreFileBatch = T.type_alias { VectorStores::VectorStoreFileBatch } - module VectorStores class VectorStoreFileBatch < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. @@ -185,5 +183,7 @@ module OpenAI end end end + + VectorStoreFileBatch = VectorStores::VectorStoreFileBatch end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi index 932ec0f8..d7a5a707 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -2,8 +2,6 @@ module OpenAI module Models - VectorStoreFileDeleted = T.type_alias { VectorStores::VectorStoreFileDeleted } - module VectorStores class VectorStoreFileDeleted < OpenAI::BaseModel sig { returns(String) } @@ -39,5 +37,7 @@ module OpenAI end end end + + VectorStoreFileDeleted = VectorStores::VectorStoreFileDeleted end end From 9e0acbeea87b23f6e57f7369fabf2470775da8e9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 17:33:05 +0000 Subject: [PATCH 036/295] fix: label optional keyword arguments in *.rbs type definitions (#41) --- sig/openai/base_client.rbs | 28 ++-- sig/openai/client.rbs | 16 +-- sig/openai/errors.rbs | 34 ++--- .../models/audio/speech_create_params.rbs | 6 +- .../audio/transcription_create_params.rbs | 12 +- .../models/audio/transcription_verbose.rbs | 4 +- .../audio/translation_create_params.rbs | 8 +- .../models/audio/translation_verbose.rbs | 2 +- .../auto_file_chunking_strategy_param.rbs | 2 +- sig/openai/models/batch.rbs | 30 ++-- sig/openai/models/batch_cancel_params.rbs | 2 +- sig/openai/models/batch_create_params.rbs | 4 +- sig/openai/models/batch_error.rbs | 7 +- sig/openai/models/batch_list_params.rbs | 6 +- sig/openai/models/batch_retrieve_params.rbs | 2 +- sig/openai/models/beta/assistant.rbs | 18 +-- .../models/beta/assistant_create_params.rbs | 42 +++--- .../models/beta/assistant_delete_params.rbs | 2 +- sig/openai/models/beta/assistant_deleted.rbs | 2 +- .../models/beta/assistant_list_params.rbs | 10 +- .../models/beta/assistant_retrieve_params.rbs | 2 +- .../models/beta/assistant_stream_event.rbs | 50 +++---- .../models/beta/assistant_tool_choice.rbs | 2 +- .../models/beta/assistant_update_params.rbs | 32 ++--- .../models/beta/code_interpreter_tool.rbs | 2 +- sig/openai/models/beta/file_search_tool.rbs | 10 +- sig/openai/models/beta/function_tool.rbs | 2 +- .../models/beta/message_stream_event.rbs | 10 +- .../models/beta/run_step_stream_event.rbs | 14 +- sig/openai/models/beta/run_stream_event.rbs | 20 +-- sig/openai/models/beta/thread.rbs | 10 +- .../beta/thread_create_and_run_params.rbs | 76 +++++----- .../models/beta/thread_create_params.rbs | 38 ++--- .../models/beta/thread_delete_params.rbs | 2 +- sig/openai/models/beta/thread_deleted.rbs | 2 +- .../models/beta/thread_retrieve_params.rbs | 2 +- .../models/beta/thread_stream_event.rbs | 4 +- .../models/beta/thread_update_params.rbs | 14 +- .../beta/threads/file_citation_annotation.rbs | 2 +- .../file_citation_delta_annotation.rbs | 12 +- .../beta/threads/file_path_annotation.rbs | 2 +- .../threads/file_path_delta_annotation.rbs | 12 +- sig/openai/models/beta/threads/image_file.rbs | 2 +- .../beta/threads/image_file_content_block.rbs | 2 +- .../models/beta/threads/image_file_delta.rbs | 4 +- .../beta/threads/image_file_delta_block.rbs | 4 +- sig/openai/models/beta/threads/image_url.rbs | 2 +- .../beta/threads/image_url_content_block.rbs | 2 +- .../models/beta/threads/image_url_delta.rbs | 4 +- .../beta/threads/image_url_delta_block.rbs | 4 +- sig/openai/models/beta/threads/message.rbs | 8 +- .../beta/threads/message_create_params.rbs | 12 +- .../beta/threads/message_delete_params.rbs | 2 +- .../models/beta/threads/message_deleted.rbs | 2 +- .../models/beta/threads/message_delta.rbs | 4 +- .../beta/threads/message_delta_event.rbs | 2 +- .../beta/threads/message_list_params.rbs | 12 +- .../beta/threads/message_retrieve_params.rbs | 2 +- .../beta/threads/message_update_params.rbs | 4 +- .../beta/threads/refusal_content_block.rbs | 2 +- .../beta/threads/refusal_delta_block.rbs | 2 +- .../required_action_function_tool_call.rbs | 2 +- sig/openai/models/beta/threads/run.rbs | 12 +- .../models/beta/threads/run_cancel_params.rbs | 2 +- .../models/beta/threads/run_create_params.rbs | 46 +++--- .../models/beta/threads/run_list_params.rbs | 10 +- .../beta/threads/run_retrieve_params.rbs | 2 +- .../run_submit_tool_outputs_params.rbs | 4 +- .../models/beta/threads/run_update_params.rbs | 4 +- .../threads/runs/code_interpreter_logs.rbs | 2 +- .../runs/code_interpreter_output_image.rbs | 6 +- .../runs/code_interpreter_tool_call.rbs | 6 +- .../runs/code_interpreter_tool_call_delta.rbs | 10 +- .../threads/runs/file_search_tool_call.rbs | 12 +- .../runs/file_search_tool_call_delta.rbs | 4 +- .../beta/threads/runs/function_tool_call.rbs | 2 +- .../threads/runs/function_tool_call_delta.rbs | 8 +- .../runs/message_creation_step_details.rbs | 2 +- .../models/beta/threads/runs/run_step.rbs | 2 +- .../beta/threads/runs/run_step_delta.rbs | 2 +- .../threads/runs/run_step_delta_event.rbs | 2 +- .../runs/run_step_delta_message_delta.rbs | 6 +- .../beta/threads/runs/step_list_params.rbs | 12 +- .../threads/runs/step_retrieve_params.rbs | 4 +- .../threads/runs/tool_call_delta_object.rbs | 4 +- .../threads/runs/tool_calls_step_details.rbs | 2 +- .../beta/threads/text_content_block.rbs | 2 +- .../beta/threads/text_content_block_param.rbs | 2 +- sig/openai/models/beta/threads/text_delta.rbs | 4 +- .../models/beta/threads/text_delta_block.rbs | 4 +- sig/openai/models/chat/chat_completion.rbs | 8 +- ...hat_completion_assistant_message_param.rbs | 14 +- .../models/chat/chat_completion_chunk.rbs | 30 ++-- .../chat/chat_completion_content_part.rbs | 4 +- .../chat_completion_content_part_image.rbs | 4 +- ...at_completion_content_part_input_audio.rbs | 2 +- .../chat_completion_content_part_refusal.rbs | 2 +- .../chat_completion_content_part_text.rbs | 2 +- .../models/chat/chat_completion_deleted.rbs | 2 +- ...hat_completion_developer_message_param.rbs | 4 +- ...chat_completion_function_message_param.rbs | 2 +- .../models/chat/chat_completion_message.rbs | 12 +- .../chat_completion_message_tool_call.rbs | 2 +- .../chat_completion_named_tool_choice.rbs | 2 +- .../chat_completion_prediction_content.rbs | 2 +- .../chat/chat_completion_stream_options.rbs | 2 +- .../chat_completion_system_message_param.rbs | 4 +- .../models/chat/chat_completion_tool.rbs | 2 +- .../chat_completion_tool_message_param.rbs | 2 +- .../chat_completion_user_message_param.rbs | 4 +- .../models/chat/completion_create_params.rbs | 76 +++++----- .../models/chat/completion_delete_params.rbs | 2 +- .../models/chat/completion_list_params.rbs | 12 +- .../chat/completion_retrieve_params.rbs | 2 +- .../models/chat/completion_update_params.rbs | 2 +- .../chat/completions/message_list_params.rbs | 8 +- sig/openai/models/completion.rbs | 6 +- sig/openai/models/completion_choice.rbs | 8 +- .../models/completion_create_params.rbs | 32 ++--- sig/openai/models/completion_usage.rbs | 14 +- .../models/create_embedding_response.rbs | 2 +- sig/openai/models/embedding.rbs | 2 +- sig/openai/models/embedding_create_params.rbs | 8 +- sig/openai/models/file_content_params.rbs | 2 +- sig/openai/models/file_create_params.rbs | 2 +- sig/openai/models/file_delete_params.rbs | 2 +- sig/openai/models/file_deleted.rbs | 2 +- sig/openai/models/file_list_params.rbs | 10 +- sig/openai/models/file_object.rbs | 6 +- sig/openai/models/file_retrieve_params.rbs | 2 +- .../models/fine_tuning/fine_tuning_job.rbs | 40 +++--- .../fine_tuning/fine_tuning_job_event.rbs | 6 +- .../fine_tuning_job_wandb_integration.rbs | 6 +- ...ne_tuning_job_wandb_integration_object.rbs | 2 +- .../models/fine_tuning/job_cancel_params.rbs | 2 +- .../models/fine_tuning/job_create_params.rbs | 54 +++---- .../fine_tuning/job_list_events_params.rbs | 6 +- .../models/fine_tuning/job_list_params.rbs | 8 +- .../fine_tuning/job_retrieve_params.rbs | 2 +- .../jobs/checkpoint_list_params.rbs | 6 +- .../jobs/fine_tuning_job_checkpoint.rbs | 16 +-- sig/openai/models/function_definition.rbs | 6 +- sig/openai/models/image.rbs | 2 +- .../models/image_create_variation_params.rbs | 12 +- sig/openai/models/image_edit_params.rbs | 14 +- sig/openai/models/image_generate_params.rbs | 16 +-- sig/openai/models/model.rbs | 7 +- sig/openai/models/model_delete_params.rbs | 2 +- sig/openai/models/model_list_params.rbs | 2 +- sig/openai/models/model_retrieve_params.rbs | 2 +- .../models/moderation_create_params.rbs | 4 +- .../models/moderation_image_url_input.rbs | 2 +- sig/openai/models/moderation_text_input.rbs | 2 +- .../other_file_chunking_strategy_object.rbs | 2 +- sig/openai/models/reasoning.rbs | 4 +- .../models/response_format_json_object.rbs | 2 +- .../models/response_format_json_schema.rbs | 8 +- sig/openai/models/response_format_text.rbs | 2 +- sig/openai/models/responses/computer_tool.rbs | 2 +- .../models/responses/easy_input_message.rbs | 2 +- .../models/responses/file_search_tool.rbs | 12 +- sig/openai/models/responses/function_tool.rbs | 4 +- .../responses/input_item_list_params.rbs | 10 +- sig/openai/models/responses/response.rbs | 20 +-- .../responses/response_audio_delta_event.rbs | 2 +- .../responses/response_audio_done_event.rbs | 2 +- .../response_audio_transcript_delta_event.rbs | 2 +- .../response_audio_transcript_done_event.rbs | 2 +- ...code_interpreter_call_code_delta_event.rbs | 2 +- ..._code_interpreter_call_code_done_event.rbs | 2 +- ..._code_interpreter_call_completed_event.rbs | 2 +- ...ode_interpreter_call_in_progress_event.rbs | 2 +- ...de_interpreter_call_interpreting_event.rbs | 2 +- .../response_code_interpreter_tool_call.rbs | 6 +- .../responses/response_completed_event.rbs | 2 +- .../responses/response_computer_tool_call.rbs | 18 +-- ...esponse_computer_tool_call_output_item.rbs | 6 +- ...e_computer_tool_call_output_screenshot.rbs | 6 +- .../response_content_part_added_event.rbs | 2 +- .../response_content_part_done_event.rbs | 2 +- .../responses/response_create_params.rbs | 32 ++--- .../responses/response_created_event.rbs | 2 +- .../responses/response_delete_params.rbs | 2 +- .../models/responses/response_error_event.rbs | 7 +- .../responses/response_failed_event.rbs | 2 +- ...ponse_file_search_call_completed_event.rbs | 2 +- ...nse_file_search_call_in_progress_event.rbs | 2 +- ...ponse_file_search_call_searching_event.rbs | 2 +- .../response_file_search_tool_call.rbs | 14 +- ...esponse_format_text_json_schema_config.rbs | 8 +- ...se_function_call_arguments_delta_event.rbs | 2 +- ...nse_function_call_arguments_done_event.rbs | 2 +- .../responses/response_function_tool_call.rbs | 6 +- ...esponse_function_tool_call_output_item.rbs | 4 +- .../response_function_web_search.rbs | 2 +- .../responses/response_in_progress_event.rbs | 2 +- .../responses/response_incomplete_event.rbs | 2 +- .../models/responses/response_input_audio.rbs | 2 +- .../models/responses/response_input_file.rbs | 8 +- .../models/responses/response_input_image.rbs | 6 +- .../models/responses/response_input_item.rbs | 20 +-- .../responses/response_input_message_item.rbs | 4 +- .../models/responses/response_input_text.rbs | 2 +- .../models/responses/response_item_list.rbs | 2 +- .../responses/response_output_audio.rbs | 2 +- .../response_output_item_added_event.rbs | 2 +- .../response_output_item_done_event.rbs | 2 +- .../responses/response_output_message.rbs | 4 +- .../responses/response_output_refusal.rbs | 2 +- .../models/responses/response_output_text.rbs | 8 +- .../responses/response_reasoning_item.rbs | 6 +- .../response_refusal_delta_event.rbs | 2 +- .../responses/response_refusal_done_event.rbs | 2 +- .../responses/response_retrieve_params.rbs | 4 +- .../response_text_annotation_delta_event.rbs | 8 +- .../models/responses/response_text_config.rbs | 2 +- .../responses/response_text_delta_event.rbs | 2 +- .../responses/response_text_done_event.rbs | 2 +- ...sponse_web_search_call_completed_event.rbs | 2 +- ...onse_web_search_call_in_progress_event.rbs | 2 +- ...sponse_web_search_call_searching_event.rbs | 2 +- .../models/responses/tool_choice_function.rbs | 2 +- .../models/responses/web_search_tool.rbs | 14 +- .../static_file_chunking_strategy_object.rbs | 2 +- ...ic_file_chunking_strategy_object_param.rbs | 2 +- sig/openai/models/upload.rbs | 4 +- sig/openai/models/upload_cancel_params.rbs | 2 +- sig/openai/models/upload_complete_params.rbs | 4 +- sig/openai/models/upload_create_params.rbs | 2 +- .../models/uploads/part_create_params.rbs | 2 +- sig/openai/models/uploads/upload_part.rbs | 2 +- sig/openai/models/vector_store.rbs | 8 +- .../models/vector_store_create_params.rbs | 14 +- .../models/vector_store_delete_params.rbs | 2 +- sig/openai/models/vector_store_deleted.rbs | 2 +- .../models/vector_store_list_params.rbs | 10 +- .../models/vector_store_retrieve_params.rbs | 2 +- .../models/vector_store_search_params.rbs | 14 +- .../models/vector_store_update_params.rbs | 10 +- .../file_batch_cancel_params.rbs | 2 +- .../file_batch_create_params.rbs | 6 +- .../file_batch_list_files_params.rbs | 12 +- .../file_batch_retrieve_params.rbs | 2 +- .../vector_stores/file_content_params.rbs | 2 +- .../vector_stores/file_content_response.rbs | 2 +- .../vector_stores/file_create_params.rbs | 6 +- .../vector_stores/file_delete_params.rbs | 2 +- .../models/vector_stores/file_list_params.rbs | 12 +- .../vector_stores/file_retrieve_params.rbs | 2 +- .../vector_stores/file_update_params.rbs | 2 +- .../vector_stores/vector_store_file.rbs | 6 +- .../vector_stores/vector_store_file_batch.rbs | 2 +- .../vector_store_file_deleted.rbs | 2 +- sig/openai/pooled_net_requester.rbs | 2 +- sig/openai/resources/audio/speech.rbs | 6 +- sig/openai/resources/audio/transcriptions.rbs | 12 +- sig/openai/resources/audio/translations.rbs | 8 +- sig/openai/resources/batches.rbs | 14 +- sig/openai/resources/beta/assistants.rbs | 60 ++++---- sig/openai/resources/beta/threads.rbs | 78 +++++----- .../resources/beta/threads/messages.rbs | 26 ++-- sig/openai/resources/beta/threads/runs.rbs | 90 ++++++------ .../resources/beta/threads/runs/steps.rbs | 16 +-- sig/openai/resources/chat/completions.rbs | 134 +++++++++--------- .../resources/chat/completions/messages.rbs | 8 +- sig/openai/resources/completions.rbs | 64 ++++----- sig/openai/resources/embeddings.rbs | 8 +- sig/openai/resources/files.rbs | 18 +-- sig/openai/resources/fine_tuning/jobs.rbs | 34 ++--- .../fine_tuning/jobs/checkpoints.rbs | 6 +- sig/openai/resources/images.rbs | 42 +++--- sig/openai/resources/models.rbs | 6 +- sig/openai/resources/moderations.rbs | 4 +- sig/openai/resources/responses.rbs | 70 ++++----- .../resources/responses/input_items.rbs | 10 +- sig/openai/resources/uploads.rbs | 8 +- sig/openai/resources/uploads/parts.rbs | 2 +- sig/openai/resources/vector_stores.rbs | 44 +++--- .../resources/vector_stores/file_batches.rbs | 22 +-- sig/openai/resources/vector_stores/files.rbs | 26 ++-- sig/openai/util.rbs | 10 +- 281 files changed, 1329 insertions(+), 1314 deletions(-) diff --git a/sig/openai/base_client.rbs b/sig/openai/base_client.rbs index 0c19b54e..38596607 100644 --- a/sig/openai/base_client.rbs +++ b/sig/openai/base_client.rbs @@ -53,14 +53,14 @@ module OpenAI def initialize: ( base_url: String, - timeout: Float, - max_retries: Integer, - initial_retry_delay: Float, - max_retry_delay: Float, - headers: ::Hash[String, (String + ?timeout: Float, + ?max_retries: Integer, + ?initial_retry_delay: Float, + ?max_retry_delay: Float, + ?headers: ::Hash[String, (String | Integer | ::Array[(String | Integer)?])?], - idempotency_header: String? + ?idempotency_header: String? ) -> void private def auth_headers: -> ::Hash[String, String] @@ -88,16 +88,16 @@ module OpenAI ( Symbol method, String | ::Array[String] path, - query: ::Hash[String, (::Array[String] | String)?]?, - headers: ::Hash[String, (String + ?query: ::Hash[String, (::Array[String] | String)?]?, + ?headers: ::Hash[String, (String | Integer | ::Array[(String | Integer)?])?]?, - body: top?, - unwrap: Symbol?, - page: Class?, - stream: Class?, - model: OpenAI::Converter::input?, - options: OpenAI::request_opts? + ?body: top?, + ?unwrap: Symbol?, + ?page: Class?, + ?stream: Class?, + ?model: OpenAI::Converter::input?, + ?options: OpenAI::request_opts? ) -> top | (OpenAI::BaseClient::request_components req) -> top diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 2d7d9a11..f97f86d3 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -45,14 +45,14 @@ module OpenAI private def auth_headers: -> ::Hash[String, String] def initialize: ( - base_url: String?, - api_key: String?, - organization: String?, - project: String?, - max_retries: Integer, - timeout: Float, - initial_retry_delay: Float, - max_retry_delay: Float + ?base_url: String?, + ?api_key: String?, + ?organization: String?, + ?project: String?, + ?max_retries: Integer, + ?timeout: Float, + ?initial_retry_delay: Float, + ?max_retry_delay: Float ) -> void end end diff --git a/sig/openai/errors.rbs b/sig/openai/errors.rbs index b5c00826..6d5804f3 100644 --- a/sig/openai/errors.rbs +++ b/sig/openai/errors.rbs @@ -21,33 +21,33 @@ module OpenAI def initialize: ( url: URI::Generic, - status: Integer?, - body: Object?, - request: nil, - response: nil, - message: String? + ?status: Integer?, + ?body: Object?, + ?request: nil, + ?response: nil, + ?message: String? ) -> void end class APIConnectionError < OpenAI::APIError def initialize: ( url: URI::Generic, - status: nil, - body: nil, - request: nil, - response: nil, - message: String? + ?status: nil, + ?body: nil, + ?request: nil, + ?response: nil, + ?message: String? ) -> void end class APITimeoutError < OpenAI::APIConnectionError def initialize: ( url: URI::Generic, - status: nil, - body: nil, - request: nil, - response: nil, - message: String? + ?status: nil, + ?body: nil, + ?request: nil, + ?response: nil, + ?message: String? ) -> void end @@ -58,7 +58,7 @@ module OpenAI body: Object?, request: nil, response: nil, - message: String? + ?message: String? ) -> instance def initialize: ( @@ -67,7 +67,7 @@ module OpenAI body: Object?, request: nil, response: nil, - message: String? + ?message: String? ) -> void end diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index c1e0ec60..2b81b56f 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -36,9 +36,9 @@ module OpenAI input: String, model: OpenAI::Models::Audio::SpeechCreateParams::model, voice: OpenAI::Models::Audio::SpeechCreateParams::voice, - response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, - speed: Float, - request_options: OpenAI::request_opts + ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, + ?speed: Float, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Audio::speech_create_params diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 22080806..06a30723 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -49,12 +49,12 @@ module OpenAI ( file: IO | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, - language: String, - prompt: String, - response_format: OpenAI::Models::audio_response_format, - temperature: Float, - timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], - request_options: OpenAI::request_opts + ?language: String, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Audio::transcription_create_params diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 268831d6..95ef5685 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -34,8 +34,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment], - words: ::Array[OpenAI::Models::Audio::TranscriptionWord] + ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment], + ?words: ::Array[OpenAI::Models::Audio::TranscriptionWord] ) -> void | ( ?OpenAI::Models::Audio::transcription_verbose diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 916641d5..3cabef67 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -37,10 +37,10 @@ module OpenAI ( file: IO | StringIO, model: OpenAI::Models::Audio::TranslationCreateParams::model, - prompt: String, - response_format: OpenAI::Models::audio_response_format, - temperature: Float, - request_options: OpenAI::request_opts + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Audio::translation_create_params diff --git a/sig/openai/models/audio/translation_verbose.rbs b/sig/openai/models/audio/translation_verbose.rbs index 0b2aeae7..11c8b1ba 100644 --- a/sig/openai/models/audio/translation_verbose.rbs +++ b/sig/openai/models/audio/translation_verbose.rbs @@ -27,7 +27,7 @@ module OpenAI duration: Float, language: String, text: String, - segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] + ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] ) -> void | ( ?OpenAI::Models::Audio::translation_verbose | OpenAI::BaseModel data diff --git a/sig/openai/models/auto_file_chunking_strategy_param.rbs b/sig/openai/models/auto_file_chunking_strategy_param.rbs index 85817088..c97d8d48 100644 --- a/sig/openai/models/auto_file_chunking_strategy_param.rbs +++ b/sig/openai/models/auto_file_chunking_strategy_param.rbs @@ -6,7 +6,7 @@ module OpenAI attr_accessor type: :auto def initialize: - (type: :auto) -> void + (?type: :auto) -> void | ( ?OpenAI::Models::auto_file_chunking_strategy_param | OpenAI::BaseModel data diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 97e29c54..fc897fa2 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -101,20 +101,20 @@ module OpenAI endpoint: String, input_file_id: String, status: OpenAI::Models::Batch::status, - cancelled_at: Integer, - cancelling_at: Integer, - completed_at: Integer, - error_file_id: String, - errors: OpenAI::Models::Batch::Errors, - expired_at: Integer, - expires_at: Integer, - failed_at: Integer, - finalizing_at: Integer, - in_progress_at: Integer, - metadata: OpenAI::Models::metadata?, - output_file_id: String, - request_counts: OpenAI::Models::BatchRequestCounts, - object: :batch + ?cancelled_at: Integer, + ?cancelling_at: Integer, + ?completed_at: Integer, + ?error_file_id: String, + ?errors: OpenAI::Models::Batch::Errors, + ?expired_at: Integer, + ?expires_at: Integer, + ?failed_at: Integer, + ?finalizing_at: Integer, + ?in_progress_at: Integer, + ?metadata: OpenAI::Models::metadata?, + ?output_file_id: String, + ?request_counts: OpenAI::Models::BatchRequestCounts, + ?object: :batch ) -> void | (?OpenAI::Models::batch | OpenAI::BaseModel data) -> void @@ -158,7 +158,7 @@ module OpenAI def object=: (String) -> String def initialize: - (data: ::Array[OpenAI::Models::BatchError], object: String) -> void + (?data: ::Array[OpenAI::Models::BatchError], ?object: String) -> void | (?OpenAI::Models::Batch::errors | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::Batch::errors diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index 793ea0a9..cff10508 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::batch_cancel_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 961c7870..6706d5f8 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -26,8 +26,8 @@ module OpenAI completion_window: OpenAI::Models::BatchCreateParams::completion_window, endpoint: OpenAI::Models::BatchCreateParams::endpoint, input_file_id: String, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::batch_create_params | OpenAI::BaseModel data diff --git a/sig/openai/models/batch_error.rbs b/sig/openai/models/batch_error.rbs index 4803b3cd..6818a5f8 100644 --- a/sig/openai/models/batch_error.rbs +++ b/sig/openai/models/batch_error.rbs @@ -17,7 +17,12 @@ module OpenAI attr_accessor param: String? def initialize: - (code: String, line: Integer?, message: String, param: String?) -> void + ( + ?code: String, + ?line: Integer?, + ?message: String, + ?param: String? + ) -> void | (?OpenAI::Models::batch_error | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::batch_error diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index e1265113..f82c752c 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -17,9 +17,9 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts ) -> void | (?OpenAI::Models::batch_list_params | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index e3af551a..684b6d6e 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::batch_retrieve_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/beta/assistant.rbs b/sig/openai/models/beta/assistant.rbs index 5e7d12d3..498d0353 100644 --- a/sig/openai/models/beta/assistant.rbs +++ b/sig/openai/models/beta/assistant.rbs @@ -55,11 +55,11 @@ module OpenAI model: String, name: String?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_resources: OpenAI::Models::Beta::Assistant::ToolResources?, - top_p: Float?, - object: :assistant + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::Assistant::ToolResources?, + ?top_p: Float?, + ?object: :assistant ) -> void | (?OpenAI::Models::Beta::assistant | OpenAI::BaseModel data) -> void @@ -86,8 +86,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::Assistant::tool_resources @@ -104,7 +104,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::Assistant::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -121,7 +121,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: - (vector_store_ids: ::Array[String]) -> void + (?vector_store_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::Assistant::ToolResources::file_search | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 21b76a8c..ffa5e021 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -50,17 +50,17 @@ module OpenAI def initialize: ( model: OpenAI::Models::Beta::AssistantCreateParams::model, - description: String?, - instructions: String?, - metadata: OpenAI::Models::metadata?, - name: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool], - top_p: Float?, - request_options: OpenAI::request_opts + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::assistant_create_params @@ -96,8 +96,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::AssistantCreateParams::tool_resources @@ -114,7 +114,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -142,8 +142,8 @@ module OpenAI def initialize: ( - vector_store_ids: ::Array[String], - vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + ?vector_store_ids: ::Array[String], + ?vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] ) -> void | ( ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::file_search @@ -174,9 +174,9 @@ module OpenAI def initialize: ( - chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, - file_ids: ::Array[String], - metadata: OpenAI::Models::metadata? + ?chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata? ) -> void | ( ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::vector_store @@ -196,7 +196,7 @@ module OpenAI attr_accessor type: :auto def initialize: - (type: :auto) -> void + (?type: :auto) -> void | ( ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto | OpenAI::BaseModel data @@ -219,7 +219,7 @@ module OpenAI def initialize: ( static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - type: :static + ?type: :static ) -> void | ( ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index fae651c6..cddab789 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::assistant_delete_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/assistant_deleted.rbs b/sig/openai/models/beta/assistant_deleted.rbs index 655b5bdc..6b58f13d 100644 --- a/sig/openai/models/beta/assistant_deleted.rbs +++ b/sig/openai/models/beta/assistant_deleted.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor object: :"assistant.deleted" def initialize: - (id: String, deleted: bool, object: :"assistant.deleted") -> void + (id: String, deleted: bool, ?object: :"assistant.deleted") -> void | ( ?OpenAI::Models::Beta::assistant_deleted | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index c2254036..8d300b0b 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -34,11 +34,11 @@ module OpenAI def initialize: ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::AssistantListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::assistant_list_params diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index a1148ac9..16e5a79b 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::assistant_retrieve_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 37d03210..09fd2e28 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -47,8 +47,8 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Thread, - enabled: bool, - event: :"thread.created" + ?enabled: bool, + ?event: :"thread.created" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_created @@ -72,7 +72,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.created" + ?event: :"thread.run.created" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_created @@ -96,7 +96,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.queued" + ?event: :"thread.run.queued" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_queued @@ -120,7 +120,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.in_progress" + ?event: :"thread.run.in_progress" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_in_progress @@ -144,7 +144,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.requires_action" + ?event: :"thread.run.requires_action" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_requires_action @@ -168,7 +168,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.completed" + ?event: :"thread.run.completed" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_completed @@ -192,7 +192,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.incomplete" + ?event: :"thread.run.incomplete" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_incomplete @@ -216,7 +216,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.failed" + ?event: :"thread.run.failed" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_failed @@ -240,7 +240,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelling" + ?event: :"thread.run.cancelling" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelling @@ -264,7 +264,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelled" + ?event: :"thread.run.cancelled" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelled @@ -288,7 +288,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.expired" + ?event: :"thread.run.expired" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_expired @@ -312,7 +312,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.created" + ?event: :"thread.run.step.created" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_created @@ -336,7 +336,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.in_progress" + ?event: :"thread.run.step.in_progress" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_in_progress @@ -360,7 +360,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, - event: :"thread.run.step.delta" + ?event: :"thread.run.step.delta" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_delta @@ -384,7 +384,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.completed" + ?event: :"thread.run.step.completed" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_completed @@ -408,7 +408,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.failed" + ?event: :"thread.run.step.failed" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_failed @@ -432,7 +432,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.cancelled" + ?event: :"thread.run.step.cancelled" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_cancelled @@ -456,7 +456,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.expired" + ?event: :"thread.run.step.expired" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_expired @@ -480,7 +480,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.created" + ?event: :"thread.message.created" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_created @@ -504,7 +504,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.in_progress" + ?event: :"thread.message.in_progress" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_in_progress @@ -528,7 +528,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, - event: :"thread.message.delta" + ?event: :"thread.message.delta" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_delta @@ -552,7 +552,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.completed" + ?event: :"thread.message.completed" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_completed @@ -576,7 +576,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.incomplete" + ?event: :"thread.message.incomplete" ) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_incomplete @@ -594,7 +594,7 @@ module OpenAI attr_accessor event: :error def initialize: - (data: OpenAI::Models::ErrorObject, event: :error) -> void + (data: OpenAI::Models::ErrorObject, ?event: :error) -> void | ( ?OpenAI::Models::Beta::AssistantStreamEvent::error_event | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index e3234bbe..caa0e8dc 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -19,7 +19,7 @@ module OpenAI def initialize: ( type: OpenAI::Models::Beta::AssistantToolChoice::type_, - function: OpenAI::Models::Beta::AssistantToolChoiceFunction + ?function: OpenAI::Models::Beta::AssistantToolChoiceFunction ) -> void | ( ?OpenAI::Models::Beta::assistant_tool_choice diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index 984e1952..c0704e99 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -53,18 +53,18 @@ module OpenAI def initialize: ( - description: String?, - instructions: String?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::AssistantUpdateParams::model, - name: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool], - top_p: Float?, - request_options: OpenAI::request_opts + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::AssistantUpdateParams::model, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::assistant_update_params @@ -191,8 +191,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::AssistantUpdateParams::tool_resources @@ -209,7 +209,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -226,7 +226,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: - (vector_store_ids: ::Array[String]) -> void + (?vector_store_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::file_search | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/code_interpreter_tool.rbs b/sig/openai/models/beta/code_interpreter_tool.rbs index 31372e23..af14840c 100644 --- a/sig/openai/models/beta/code_interpreter_tool.rbs +++ b/sig/openai/models/beta/code_interpreter_tool.rbs @@ -7,7 +7,7 @@ module OpenAI attr_accessor type: :code_interpreter def initialize: - (type: :code_interpreter) -> void + (?type: :code_interpreter) -> void | ( ?OpenAI::Models::Beta::code_interpreter_tool | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index 731166b6..05089b00 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -18,8 +18,8 @@ module OpenAI def initialize: ( - file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch, - type: :file_search + ?file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch, + ?type: :file_search ) -> void | ( ?OpenAI::Models::Beta::file_search_tool | OpenAI::BaseModel data @@ -46,8 +46,8 @@ module OpenAI def initialize: ( - max_num_results: Integer, - ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions ) -> void | ( ?OpenAI::Models::Beta::FileSearchTool::file_search @@ -74,7 +74,7 @@ module OpenAI def initialize: ( score_threshold: Float, - ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ?ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker ) -> void | ( ?OpenAI::Models::Beta::FileSearchTool::FileSearch::ranking_options diff --git a/sig/openai/models/beta/function_tool.rbs b/sig/openai/models/beta/function_tool.rbs index 43a63e37..e8fb7686 100644 --- a/sig/openai/models/beta/function_tool.rbs +++ b/sig/openai/models/beta/function_tool.rbs @@ -12,7 +12,7 @@ module OpenAI def initialize: ( function: OpenAI::Models::FunctionDefinition, - type: :function + ?type: :function ) -> void | ( ?OpenAI::Models::Beta::function_tool | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index d46abf27..7b782bca 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -23,7 +23,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.created" + ?event: :"thread.message.created" ) -> void | ( ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_created @@ -47,7 +47,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.in_progress" + ?event: :"thread.message.in_progress" ) -> void | ( ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_in_progress @@ -71,7 +71,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, - event: :"thread.message.delta" + ?event: :"thread.message.delta" ) -> void | ( ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_delta @@ -95,7 +95,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.completed" + ?event: :"thread.message.completed" ) -> void | ( ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_completed @@ -119,7 +119,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Message, - event: :"thread.message.incomplete" + ?event: :"thread.message.incomplete" ) -> void | ( ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_incomplete diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index b0c1a0a3..59c2999e 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -25,7 +25,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.created" + ?event: :"thread.run.step.created" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_created @@ -49,7 +49,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.in_progress" + ?event: :"thread.run.step.in_progress" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_in_progress @@ -73,7 +73,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, - event: :"thread.run.step.delta" + ?event: :"thread.run.step.delta" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_delta @@ -97,7 +97,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.completed" + ?event: :"thread.run.step.completed" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_completed @@ -121,7 +121,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.failed" + ?event: :"thread.run.step.failed" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_failed @@ -145,7 +145,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.cancelled" + ?event: :"thread.run.step.cancelled" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_cancelled @@ -169,7 +169,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Runs::RunStep, - event: :"thread.run.step.expired" + ?event: :"thread.run.step.expired" ) -> void | ( ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_expired diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 650253ce..38c2a746 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -28,7 +28,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.created" + ?event: :"thread.run.created" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_created @@ -52,7 +52,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.queued" + ?event: :"thread.run.queued" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_queued @@ -76,7 +76,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.in_progress" + ?event: :"thread.run.in_progress" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_in_progress @@ -100,7 +100,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.requires_action" + ?event: :"thread.run.requires_action" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_requires_action @@ -124,7 +124,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.completed" + ?event: :"thread.run.completed" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_completed @@ -148,7 +148,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.incomplete" + ?event: :"thread.run.incomplete" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_incomplete @@ -172,7 +172,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.failed" + ?event: :"thread.run.failed" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_failed @@ -196,7 +196,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelling" + ?event: :"thread.run.cancelling" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelling @@ -220,7 +220,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelled" + ?event: :"thread.run.cancelled" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelled @@ -244,7 +244,7 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.expired" + ?event: :"thread.run.expired" ) -> void | ( ?OpenAI::Models::Beta::RunStreamEvent::thread_run_expired diff --git a/sig/openai/models/beta/thread.rbs b/sig/openai/models/beta/thread.rbs index 14d481b1..0188f0dc 100644 --- a/sig/openai/models/beta/thread.rbs +++ b/sig/openai/models/beta/thread.rbs @@ -27,7 +27,7 @@ module OpenAI created_at: Integer, metadata: OpenAI::Models::metadata?, tool_resources: OpenAI::Models::Beta::Thread::ToolResources?, - object: :thread + ?object: :thread ) -> void | (?OpenAI::Models::Beta::thread | OpenAI::BaseModel data) -> void @@ -54,8 +54,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::Thread::tool_resources @@ -72,7 +72,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::Thread::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -89,7 +89,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: - (vector_store_ids: ::Array[String]) -> void + (?vector_store_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::Thread::ToolResources::file_search | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 76bb1909..d430d1fa 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -64,21 +64,21 @@ module OpenAI def initialize: ( assistant_id: String, - instructions: String?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, - parallel_tool_calls: bool, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, - top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, - request_options: OpenAI::request_opts + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + ?parallel_tool_calls: bool, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::thread_create_and_run_params @@ -113,9 +113,9 @@ module OpenAI def initialize: ( - messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], - metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::thread @@ -145,8 +145,8 @@ module OpenAI ( content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, - attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, - metadata: OpenAI::Models::metadata? + ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, + ?metadata: OpenAI::Models::metadata? ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::message @@ -196,8 +196,8 @@ module OpenAI def initialize: ( - file_id: String, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::attachment @@ -217,7 +217,7 @@ module OpenAI attr_accessor type: :file_search def initialize: - (type: :file_search) -> void + (?type: :file_search) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::file_search | OpenAI::BaseModel data @@ -252,8 +252,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::tool_resources @@ -270,7 +270,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -298,8 +298,8 @@ module OpenAI def initialize: ( - vector_store_ids: ::Array[String], - vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + ?vector_store_ids: ::Array[String], + ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::file_search @@ -330,9 +330,9 @@ module OpenAI def initialize: ( - chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, - file_ids: ::Array[String], - metadata: OpenAI::Models::metadata? + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata? ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::vector_store @@ -352,7 +352,7 @@ module OpenAI attr_accessor type: :auto def initialize: - (type: :auto) -> void + (?type: :auto) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto | OpenAI::BaseModel data @@ -375,7 +375,7 @@ module OpenAI def initialize: ( static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - type: :static + ?type: :static ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static @@ -437,8 +437,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::tool_resources @@ -455,7 +455,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -472,7 +472,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: - (vector_store_ids: ::Array[String]) -> void + (?vector_store_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::file_search | OpenAI::BaseModel data @@ -505,7 +505,7 @@ module OpenAI def initialize: ( type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, - last_messages: Integer? + ?last_messages: Integer? ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateAndRunParams::truncation_strategy diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 8de821f1..c4cf1e6a 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -25,10 +25,10 @@ module OpenAI def initialize: ( - messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, - request_options: OpenAI::request_opts + ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::thread_create_params | OpenAI::BaseModel data @@ -57,8 +57,8 @@ module OpenAI ( content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, - attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]?, - metadata: OpenAI::Models::metadata? + ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]?, + ?metadata: OpenAI::Models::metadata? ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::message @@ -108,8 +108,8 @@ module OpenAI def initialize: ( - file_id: String, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::Message::attachment @@ -129,7 +129,7 @@ module OpenAI attr_accessor type: :file_search def initialize: - (type: :file_search) -> void + (?type: :file_search) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::file_search | OpenAI::BaseModel data @@ -164,8 +164,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::tool_resources @@ -182,7 +182,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -210,8 +210,8 @@ module OpenAI def initialize: ( - vector_store_ids: ::Array[String], - vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + ?vector_store_ids: ::Array[String], + ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::file_search @@ -242,9 +242,9 @@ module OpenAI def initialize: ( - chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, - file_ids: ::Array[String], - metadata: OpenAI::Models::metadata? + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata? ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::vector_store @@ -264,7 +264,7 @@ module OpenAI attr_accessor type: :auto def initialize: - (type: :auto) -> void + (?type: :auto) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto | OpenAI::BaseModel data @@ -287,7 +287,7 @@ module OpenAI def initialize: ( static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - type: :static + ?type: :static ) -> void | ( ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index 42d8665e..0ae7f5b6 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::thread_delete_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/beta/thread_deleted.rbs b/sig/openai/models/beta/thread_deleted.rbs index 137eb97d..6e32556e 100644 --- a/sig/openai/models/beta/thread_deleted.rbs +++ b/sig/openai/models/beta/thread_deleted.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor object: :"thread.deleted" def initialize: - (id: String, deleted: bool, object: :"thread.deleted") -> void + (id: String, deleted: bool, ?object: :"thread.deleted") -> void | ( ?OpenAI::Models::Beta::thread_deleted | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index fd9a2212..3c42faa5 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::thread_retrieve_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/thread_stream_event.rbs b/sig/openai/models/beta/thread_stream_event.rbs index 51deb8c1..6c3a4dc3 100644 --- a/sig/openai/models/beta/thread_stream_event.rbs +++ b/sig/openai/models/beta/thread_stream_event.rbs @@ -20,8 +20,8 @@ module OpenAI def initialize: ( data: OpenAI::Models::Beta::Thread, - enabled: bool, - event: :"thread.created" + ?enabled: bool, + ?event: :"thread.created" ) -> void | ( ?OpenAI::Models::Beta::thread_stream_event | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index 4462d12c..adfa8b00 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -18,9 +18,9 @@ module OpenAI def initialize: ( - metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::thread_update_params | OpenAI::BaseModel data @@ -49,8 +49,8 @@ module OpenAI def initialize: ( - code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch ) -> void | ( ?OpenAI::Models::Beta::ThreadUpdateParams::tool_resources @@ -67,7 +67,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: - (file_ids: ::Array[String]) -> void + (?file_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::code_interpreter | OpenAI::BaseModel data @@ -84,7 +84,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: - (vector_store_ids: ::Array[String]) -> void + (?vector_store_ids: ::Array[String]) -> void | ( ?OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::file_search | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/file_citation_annotation.rbs b/sig/openai/models/beta/threads/file_citation_annotation.rbs index 8c9eadaa..3f972d87 100644 --- a/sig/openai/models/beta/threads/file_citation_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_annotation.rbs @@ -28,7 +28,7 @@ module OpenAI file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, start_index: Integer, text: String, - type: :file_citation + ?type: :file_citation ) -> void | ( ?OpenAI::Models::Beta::Threads::file_citation_annotation diff --git a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs index 933da760..72eff280 100644 --- a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs @@ -38,11 +38,11 @@ module OpenAI def initialize: ( index: Integer, - end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, - start_index: Integer, - text: String, - type: :file_citation + ?end_index: Integer, + ?file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + ?start_index: Integer, + ?text: String, + ?type: :file_citation ) -> void | ( ?OpenAI::Models::Beta::Threads::file_citation_delta_annotation @@ -63,7 +63,7 @@ module OpenAI def quote=: (String) -> String def initialize: - (file_id: String, quote: String) -> void + (?file_id: String, ?quote: String) -> void | ( ?OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::file_citation | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/file_path_annotation.rbs b/sig/openai/models/beta/threads/file_path_annotation.rbs index 7c590adb..c4031231 100644 --- a/sig/openai/models/beta/threads/file_path_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_annotation.rbs @@ -28,7 +28,7 @@ module OpenAI file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, start_index: Integer, text: String, - type: :file_path + ?type: :file_path ) -> void | ( ?OpenAI::Models::Beta::Threads::file_path_annotation diff --git a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs index 536a311d..59a7e537 100644 --- a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs @@ -38,11 +38,11 @@ module OpenAI def initialize: ( index: Integer, - end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, - start_index: Integer, - text: String, - type: :file_path + ?end_index: Integer, + ?file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, + ?start_index: Integer, + ?text: String, + ?type: :file_path ) -> void | ( ?OpenAI::Models::Beta::Threads::file_path_delta_annotation @@ -59,7 +59,7 @@ module OpenAI def file_id=: (String) -> String def initialize: - (file_id: String) -> void + (?file_id: String) -> void | ( ?OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::file_path | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 2d2fb9e4..d2f16077 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -20,7 +20,7 @@ module OpenAI def initialize: ( file_id: String, - detail: OpenAI::Models::Beta::Threads::ImageFile::detail + ?detail: OpenAI::Models::Beta::Threads::ImageFile::detail ) -> void | ( ?OpenAI::Models::Beta::Threads::image_file diff --git a/sig/openai/models/beta/threads/image_file_content_block.rbs b/sig/openai/models/beta/threads/image_file_content_block.rbs index 5267e2c7..1783edd8 100644 --- a/sig/openai/models/beta/threads/image_file_content_block.rbs +++ b/sig/openai/models/beta/threads/image_file_content_block.rbs @@ -16,7 +16,7 @@ module OpenAI def initialize: ( image_file: OpenAI::Models::Beta::Threads::ImageFile, - type: :image_file + ?type: :image_file ) -> void | ( ?OpenAI::Models::Beta::Threads::image_file_content_block diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index bf715b22..2a16093f 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -21,8 +21,8 @@ module OpenAI def initialize: ( - detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, - file_id: String + ?detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, + ?file_id: String ) -> void | ( ?OpenAI::Models::Beta::Threads::image_file_delta diff --git a/sig/openai/models/beta/threads/image_file_delta_block.rbs b/sig/openai/models/beta/threads/image_file_delta_block.rbs index 1dc26a44..e48b002e 100644 --- a/sig/openai/models/beta/threads/image_file_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_file_delta_block.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( index: Integer, - image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, - type: :image_file + ?image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, + ?type: :image_file ) -> void | ( ?OpenAI::Models::Beta::Threads::image_file_delta_block diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index f786387e..4b135a8a 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -20,7 +20,7 @@ module OpenAI def initialize: ( url: String, - detail: OpenAI::Models::Beta::Threads::ImageURL::detail + ?detail: OpenAI::Models::Beta::Threads::ImageURL::detail ) -> void | ( ?OpenAI::Models::Beta::Threads::image_url | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/image_url_content_block.rbs b/sig/openai/models/beta/threads/image_url_content_block.rbs index 13d90d01..fda56960 100644 --- a/sig/openai/models/beta/threads/image_url_content_block.rbs +++ b/sig/openai/models/beta/threads/image_url_content_block.rbs @@ -16,7 +16,7 @@ module OpenAI def initialize: ( image_url: OpenAI::Models::Beta::Threads::ImageURL, - type: :image_url + ?type: :image_url ) -> void | ( ?OpenAI::Models::Beta::Threads::image_url_content_block diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 6f5c92ca..816b5421 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -21,8 +21,8 @@ module OpenAI def initialize: ( - detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, - url: String + ?detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + ?url: String ) -> void | ( ?OpenAI::Models::Beta::Threads::image_url_delta diff --git a/sig/openai/models/beta/threads/image_url_delta_block.rbs b/sig/openai/models/beta/threads/image_url_delta_block.rbs index 37b1861a..4e83b0df 100644 --- a/sig/openai/models/beta/threads/image_url_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_url_delta_block.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( index: Integer, - image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, - type: :image_url + ?image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, + ?type: :image_url ) -> void | ( ?OpenAI::Models::Beta::Threads::image_url_delta_block diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 40d2be8d..18c52a5c 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -64,7 +64,7 @@ module OpenAI run_id: String?, status: OpenAI::Models::Beta::Threads::Message::status, thread_id: String, - object: :"thread.message" + ?object: :"thread.message" ) -> void | ( ?OpenAI::Models::Beta::Threads::message | OpenAI::BaseModel data @@ -91,8 +91,8 @@ module OpenAI def initialize: ( - file_id: String, - tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] ) -> void | ( ?OpenAI::Models::Beta::Threads::Message::attachment @@ -113,7 +113,7 @@ module OpenAI attr_accessor type: :file_search def initialize: - (type: :file_search) -> void + (?type: :file_search) -> void | ( ?OpenAI::Models::Beta::Threads::Message::Attachment::Tool::assistant_tools_file_search_type_only | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 9e3451c3..fe1490c3 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -27,9 +27,9 @@ module OpenAI ( content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::message_create_params @@ -79,8 +79,8 @@ module OpenAI def initialize: ( - file_id: String, - tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] ) -> void | ( ?OpenAI::Models::Beta::Threads::MessageCreateParams::attachment @@ -100,7 +100,7 @@ module OpenAI attr_accessor type: :file_search def initialize: - (type: :file_search) -> void + (?type: :file_search) -> void | ( ?OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::file_search | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index 0b6c4e94..8723d195 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor thread_id: String def initialize: - (thread_id: String, request_options: OpenAI::request_opts) -> void + (thread_id: String, ?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::Threads::message_delete_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/message_deleted.rbs b/sig/openai/models/beta/threads/message_deleted.rbs index 58941655..f8d6ebeb 100644 --- a/sig/openai/models/beta/threads/message_deleted.rbs +++ b/sig/openai/models/beta/threads/message_deleted.rbs @@ -16,7 +16,7 @@ module OpenAI ( id: String, deleted: bool, - object: :"thread.message.deleted" + ?object: :"thread.message.deleted" ) -> void | ( ?OpenAI::Models::Beta::Threads::message_deleted diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index 2c4dd70b..fe36e15a 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( - content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - role: OpenAI::Models::Beta::Threads::MessageDelta::role + ?content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], + ?role: OpenAI::Models::Beta::Threads::MessageDelta::role ) -> void | ( ?OpenAI::Models::Beta::Threads::message_delta diff --git a/sig/openai/models/beta/threads/message_delta_event.rbs b/sig/openai/models/beta/threads/message_delta_event.rbs index f682114b..44b036a8 100644 --- a/sig/openai/models/beta/threads/message_delta_event.rbs +++ b/sig/openai/models/beta/threads/message_delta_event.rbs @@ -20,7 +20,7 @@ module OpenAI ( id: String, delta: OpenAI::Models::Beta::Threads::MessageDelta, - object: :"thread.message.delta" + ?object: :"thread.message.delta" ) -> void | ( ?OpenAI::Models::Beta::Threads::message_delta_event diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index ec8567b0..623c5e48 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -40,12 +40,12 @@ module OpenAI def initialize: ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::order, - run_id: String, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, + ?run_id: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::message_list_params diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index f5475f00..e0e6d123 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor thread_id: String def initialize: - (thread_id: String, request_options: OpenAI::request_opts) -> void + (thread_id: String, ?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::Threads::message_retrieve_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index 5fb9f563..a5908b4b 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -17,8 +17,8 @@ module OpenAI def initialize: ( thread_id: String, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::message_update_params diff --git a/sig/openai/models/beta/threads/refusal_content_block.rbs b/sig/openai/models/beta/threads/refusal_content_block.rbs index b44bf064..f9a5be4f 100644 --- a/sig/openai/models/beta/threads/refusal_content_block.rbs +++ b/sig/openai/models/beta/threads/refusal_content_block.rbs @@ -10,7 +10,7 @@ module OpenAI attr_accessor type: :refusal def initialize: - (refusal: String, type: :refusal) -> void + (refusal: String, ?type: :refusal) -> void | ( ?OpenAI::Models::Beta::Threads::refusal_content_block | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/refusal_delta_block.rbs b/sig/openai/models/beta/threads/refusal_delta_block.rbs index eae553e9..50a977cf 100644 --- a/sig/openai/models/beta/threads/refusal_delta_block.rbs +++ b/sig/openai/models/beta/threads/refusal_delta_block.rbs @@ -15,7 +15,7 @@ module OpenAI def refusal=: (String) -> String def initialize: - (index: Integer, refusal: String, type: :refusal) -> void + (index: Integer, ?refusal: String, ?type: :refusal) -> void | ( ?OpenAI::Models::Beta::Threads::refusal_delta_block | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs index 3b4334bf..2d95c46d 100644 --- a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs +++ b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs @@ -20,7 +20,7 @@ module OpenAI ( id: String, function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, - type: :function + ?type: :function ) -> void | ( ?OpenAI::Models::Beta::Threads::required_action_function_tool_call diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index fc4b4d57..71146021 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -114,9 +114,9 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::assistant_tool], truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy?, usage: OpenAI::Models::Beta::Threads::Run::Usage?, - temperature: Float?, - top_p: Float?, - object: :"thread.run" + ?temperature: Float?, + ?top_p: Float?, + ?object: :"thread.run" ) -> void | ( ?OpenAI::Models::Beta::Threads::run | OpenAI::BaseModel data @@ -138,7 +138,7 @@ module OpenAI def initialize: ( - reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + ?reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason ) -> void | ( ?OpenAI::Models::Beta::Threads::Run::incomplete_details @@ -205,7 +205,7 @@ module OpenAI def initialize: ( submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, - type: :submit_tool_outputs + ?type: :submit_tool_outputs ) -> void | ( ?OpenAI::Models::Beta::Threads::Run::required_action @@ -249,7 +249,7 @@ module OpenAI def initialize: ( type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, - last_messages: Integer? + ?last_messages: Integer? ) -> void | ( ?OpenAI::Models::Beta::Threads::Run::truncation_strategy diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 34ea5c7f..b8538449 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor thread_id: String def initialize: - (thread_id: String, request_options: OpenAI::request_opts) -> void + (thread_id: String, ?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::Threads::run_cancel_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 09105c8f..6948072a 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -71,23 +71,23 @@ module OpenAI def initialize: ( assistant_id: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - additional_instructions: String?, - additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, - instructions: String?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, - parallel_tool_calls: bool, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, - top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?additional_instructions: String?, + ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + ?parallel_tool_calls: bool, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::run_create_params @@ -117,8 +117,8 @@ module OpenAI ( content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, - attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, - metadata: OpenAI::Models::metadata? + ?attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, + ?metadata: OpenAI::Models::metadata? ) -> void | ( ?OpenAI::Models::Beta::Threads::RunCreateParams::additional_message @@ -168,8 +168,8 @@ module OpenAI def initialize: ( - file_id: String, - tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] ) -> void | ( ?OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::attachment @@ -189,7 +189,7 @@ module OpenAI attr_accessor type: :file_search def initialize: - (type: :file_search) -> void + (?type: :file_search) -> void | ( ?OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::file_search | OpenAI::BaseModel data @@ -223,7 +223,7 @@ module OpenAI def initialize: ( type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, - last_messages: Integer? + ?last_messages: Integer? ) -> void | ( ?OpenAI::Models::Beta::Threads::RunCreateParams::truncation_strategy diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index ba37749f..48095b4f 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -35,11 +35,11 @@ module OpenAI def initialize: ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::RunListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::run_list_params diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index 44423e27..a3512fd8 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor thread_id: String def initialize: - (thread_id: String, request_options: OpenAI::request_opts) -> void + (thread_id: String, ?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Beta::Threads::run_retrieve_params | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index d044c675..a1c6838d 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -21,7 +21,7 @@ module OpenAI ( thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::run_submit_tool_outputs_params @@ -42,7 +42,7 @@ module OpenAI def tool_call_id=: (String) -> String def initialize: - (output: String, tool_call_id: String) -> void + (?output: String, ?tool_call_id: String) -> void | ( ?OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::tool_output | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index ad8a6e84..80b6bebd 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -17,8 +17,8 @@ module OpenAI def initialize: ( thread_id: String, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::run_update_params diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs index d62fbb78..70c44092 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs @@ -16,7 +16,7 @@ module OpenAI def logs=: (String) -> String def initialize: - (index: Integer, logs: String, type: :logs) -> void + (index: Integer, ?logs: String, ?type: :logs) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_logs | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs index 2560a0c4..bbb9b5c9 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs @@ -24,8 +24,8 @@ module OpenAI def initialize: ( index: Integer, - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, - type: :image + ?image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + ?type: :image ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_output_image @@ -42,7 +42,7 @@ module OpenAI def file_id=: (String) -> String def initialize: - (file_id: String) -> void + (?file_id: String) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::image | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index 41731b5a..4d13d7ff 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -21,7 +21,7 @@ module OpenAI ( id: String, code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - type: :code_interpreter + ?type: :code_interpreter ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call @@ -66,7 +66,7 @@ module OpenAI attr_accessor type: :logs def initialize: - (logs: String, type: :logs) -> void + (logs: String, ?type: :logs) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::logs | OpenAI::BaseModel data @@ -89,7 +89,7 @@ module OpenAI def initialize: ( image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - type: :image + ?type: :image ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::image diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index 7a808c31..72b4b8fe 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -29,9 +29,9 @@ module OpenAI def initialize: ( index: Integer, - id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - type: :code_interpreter + ?id: String, + ?code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + ?type: :code_interpreter ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call_delta @@ -59,8 +59,8 @@ module OpenAI def initialize: ( - input: String, - outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ?input: String, + ?outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::code_interpreter diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index e8476225..55cf4877 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -21,7 +21,7 @@ module OpenAI ( id: String, file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, - type: :file_search + ?type: :file_search ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::file_search_tool_call @@ -51,8 +51,8 @@ module OpenAI def initialize: ( - ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + ?ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + ?results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::file_search @@ -120,7 +120,7 @@ module OpenAI file_id: String, file_name: String, score: Float, - content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + ?content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::result @@ -148,8 +148,8 @@ module OpenAI def initialize: ( - text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ?text: String, + ?type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::content diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs index 91172bba..ecc227de 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs @@ -21,8 +21,8 @@ module OpenAI ( file_search: top, index: Integer, - id: String, - type: :file_search + ?id: String, + ?type: :file_search ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::file_search_tool_call_delta diff --git a/sig/openai/models/beta/threads/runs/function_tool_call.rbs b/sig/openai/models/beta/threads/runs/function_tool_call.rbs index d3c415c2..5c442503 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call.rbs @@ -21,7 +21,7 @@ module OpenAI ( id: String, function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, - type: :function + ?type: :function ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::function_tool_call diff --git a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs index 8a6261a4..bb2f4482 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs @@ -29,9 +29,9 @@ module OpenAI def initialize: ( index: Integer, - id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, - type: :function + ?id: String, + ?function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, + ?type: :function ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::function_tool_call_delta @@ -54,7 +54,7 @@ module OpenAI attr_accessor output: String? def initialize: - (arguments: String, name: String, output: String?) -> void + (?arguments: String, ?name: String, ?output: String?) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::function | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs index 5eb94efe..f84bf9e6 100644 --- a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs @@ -17,7 +17,7 @@ module OpenAI def initialize: ( message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - type: :message_creation + ?type: :message_creation ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::message_creation_step_details diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index af3c7db7..fc8b08c2 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -76,7 +76,7 @@ module OpenAI thread_id: String, type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage?, - object: :"thread.run.step" + ?object: :"thread.run.step" ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::run_step diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 9c5a49d3..ae21101e 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -20,7 +20,7 @@ module OpenAI def initialize: ( - step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + ?step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::run_step_delta diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index 717849fb..1154b691 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -24,7 +24,7 @@ module OpenAI ( id: String, delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, - object: :"thread.run.step.delta" + ?object: :"thread.run.step.delta" ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::run_step_delta_event diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index cd740a0f..6ef0818b 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( - message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - type: :message_creation + ?message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + ?type: :message_creation ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::run_step_delta_message_delta @@ -41,7 +41,7 @@ module OpenAI def message_id=: (String) -> String def initialize: - (message_id: String) -> void + (?message_id: String) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::message_creation | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 1ff2c6fc..b2bdafc6 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -47,12 +47,12 @@ module OpenAI def initialize: ( thread_id: String, - after: String, - before: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::step_list_params diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index cfeb8e95..937742d8 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -29,8 +29,8 @@ module OpenAI ( thread_id: String, run_id: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::step_retrieve_params diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs index 68e23f3a..275ec6b1 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs @@ -20,8 +20,8 @@ module OpenAI def initialize: ( - tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], - type: :tool_calls + ?tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], + ?type: :tool_calls ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::tool_call_delta_object diff --git a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs index 156c02c6..20aed347 100644 --- a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs @@ -17,7 +17,7 @@ module OpenAI def initialize: ( tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], - type: :tool_calls + ?type: :tool_calls ) -> void | ( ?OpenAI::Models::Beta::Threads::Runs::tool_calls_step_details diff --git a/sig/openai/models/beta/threads/text_content_block.rbs b/sig/openai/models/beta/threads/text_content_block.rbs index 57b71af4..5800b03c 100644 --- a/sig/openai/models/beta/threads/text_content_block.rbs +++ b/sig/openai/models/beta/threads/text_content_block.rbs @@ -11,7 +11,7 @@ module OpenAI attr_accessor type: :text def initialize: - (text: OpenAI::Models::Beta::Threads::Text, type: :text) -> void + (text: OpenAI::Models::Beta::Threads::Text, ?type: :text) -> void | ( ?OpenAI::Models::Beta::Threads::text_content_block | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/text_content_block_param.rbs b/sig/openai/models/beta/threads/text_content_block_param.rbs index 669f9eed..8f3b6fc8 100644 --- a/sig/openai/models/beta/threads/text_content_block_param.rbs +++ b/sig/openai/models/beta/threads/text_content_block_param.rbs @@ -10,7 +10,7 @@ module OpenAI attr_accessor type: :text def initialize: - (text: String, type: :text) -> void + (text: String, ?type: :text) -> void | ( ?OpenAI::Models::Beta::Threads::text_content_block_param | OpenAI::BaseModel data diff --git a/sig/openai/models/beta/threads/text_delta.rbs b/sig/openai/models/beta/threads/text_delta.rbs index 9156cf24..64131287 100644 --- a/sig/openai/models/beta/threads/text_delta.rbs +++ b/sig/openai/models/beta/threads/text_delta.rbs @@ -21,8 +21,8 @@ module OpenAI def initialize: ( - annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], - value: String + ?annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], + ?value: String ) -> void | ( ?OpenAI::Models::Beta::Threads::text_delta diff --git a/sig/openai/models/beta/threads/text_delta_block.rbs b/sig/openai/models/beta/threads/text_delta_block.rbs index 263bd619..e026e18f 100644 --- a/sig/openai/models/beta/threads/text_delta_block.rbs +++ b/sig/openai/models/beta/threads/text_delta_block.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( index: Integer, - text: OpenAI::Models::Beta::Threads::TextDelta, - type: :text + ?text: OpenAI::Models::Beta::Threads::TextDelta, + ?type: :text ) -> void | ( ?OpenAI::Models::Beta::Threads::text_delta_block diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index b0f0b6b3..5a60c3e4 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -45,10 +45,10 @@ module OpenAI choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice], created: Integer, model: String, - service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, - system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage, - object: :"chat.completion" + ?service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, + ?system_fingerprint: String, + ?usage: OpenAI::Models::CompletionUsage, + ?object: :"chat.completion" ) -> void | ( ?OpenAI::Models::Chat::chat_completion | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index f481adde..e1cfebf4 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -38,13 +38,13 @@ module OpenAI def initialize: ( - audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio?, - content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, - function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, - name: String, - refusal: String?, - tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], - role: :assistant + ?audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio?, + ?content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, + ?function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, + ?name: String, + ?refusal: String?, + ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + ?role: :assistant ) -> void | ( ?OpenAI::Models::Chat::chat_completion_assistant_message_param diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 0e9780de..5874643a 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -45,10 +45,10 @@ module OpenAI choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, - system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage, - object: :"chat.completion.chunk" + ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, + ?system_fingerprint: String, + ?usage: OpenAI::Models::CompletionUsage, + ?object: :"chat.completion.chunk" ) -> void | ( ?OpenAI::Models::Chat::chat_completion_chunk @@ -79,7 +79,7 @@ module OpenAI delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, - logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? + ?logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? ) -> void | ( ?OpenAI::Models::Chat::ChatCompletionChunk::choice @@ -122,11 +122,11 @@ module OpenAI def initialize: ( - content: String?, - function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, - refusal: String?, - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, - tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + ?content: String?, + ?function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + ?refusal: String?, + ?role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, + ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) -> void | ( ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::delta @@ -147,7 +147,7 @@ module OpenAI def name=: (String) -> String def initialize: - (arguments: String, name: String) -> void + (?arguments: String, ?name: String) -> void | ( ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::function_call | OpenAI::BaseModel data @@ -198,9 +198,9 @@ module OpenAI def initialize: ( index: Integer, - id: String, - function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ?id: String, + ?function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + ?type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ ) -> void | ( ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::tool_call @@ -221,7 +221,7 @@ module OpenAI def name=: (String) -> String def initialize: - (arguments: String, name: String) -> void + (?arguments: String, ?name: String) -> void | ( ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::function | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 13220e58..aff3d3a0 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -25,7 +25,7 @@ module OpenAI def initialize: ( file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, - type: :file + ?type: :file ) -> void | ( ?OpenAI::Models::Chat::ChatCompletionContentPart::file @@ -50,7 +50,7 @@ module OpenAI def filename=: (String) -> String def initialize: - (file_data: String, file_id: String, filename: String) -> void + (?file_data: String, ?file_id: String, ?filename: String) -> void | ( ?OpenAI::Models::Chat::ChatCompletionContentPart::File::file | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index df324202..8576b8d1 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -18,7 +18,7 @@ module OpenAI def initialize: ( image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, - type: :image_url + ?type: :image_url ) -> void | ( ?OpenAI::Models::Chat::chat_completion_content_part_image @@ -45,7 +45,7 @@ module OpenAI def initialize: ( url: String, - detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + ?detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail ) -> void | ( ?OpenAI::Models::Chat::ChatCompletionContentPartImage::image_url diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index 5bec0fc3..d750e4ff 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -18,7 +18,7 @@ module OpenAI def initialize: ( input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, - type: :input_audio + ?type: :input_audio ) -> void | ( ?OpenAI::Models::Chat::chat_completion_content_part_input_audio diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index 98e94526..3d783701 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -13,7 +13,7 @@ module OpenAI attr_accessor type: :refusal def initialize: - (refusal: String, type: :refusal) -> void + (refusal: String, ?type: :refusal) -> void | ( ?OpenAI::Models::Chat::chat_completion_content_part_refusal | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 36f4b5a1..9f35ab25 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor type: :text def initialize: - (text: String, type: :text) -> void + (text: String, ?type: :text) -> void | ( ?OpenAI::Models::Chat::chat_completion_content_part_text | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index 824a0a21..bcb4421a 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -18,7 +18,7 @@ module OpenAI ( id: String, deleted: bool, - object: :"chat.completion.deleted" + ?object: :"chat.completion.deleted" ) -> void | ( ?OpenAI::Models::Chat::chat_completion_deleted diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index bcc12871..9a009158 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, - name: String, - role: :developer + ?name: String, + ?role: :developer ) -> void | ( ?OpenAI::Models::Chat::chat_completion_developer_message_param diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index dca0f275..718c3dca 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -15,7 +15,7 @@ module OpenAI attr_accessor role: :function def initialize: - (content: String?, name: String, role: :function) -> void + (content: String?, name: String, ?role: :function) -> void | ( ?OpenAI::Models::Chat::chat_completion_function_message_param | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index 16b8a72a..4323b546 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -46,11 +46,11 @@ module OpenAI ( content: String?, refusal: String?, - annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], - audio: OpenAI::Models::Chat::ChatCompletionAudio?, - function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, - tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], - role: :assistant + ?annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], + ?audio: OpenAI::Models::Chat::ChatCompletionAudio?, + ?function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, + ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + ?role: :assistant ) -> void | ( ?OpenAI::Models::Chat::chat_completion_message @@ -73,7 +73,7 @@ module OpenAI def initialize: ( url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, - type: :url_citation + ?type: :url_citation ) -> void | ( ?OpenAI::Models::Chat::ChatCompletionMessage::annotation diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index 797c8c1c..e19eb318 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -22,7 +22,7 @@ module OpenAI ( id: String, function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, - type: :function + ?type: :function ) -> void | ( ?OpenAI::Models::Chat::chat_completion_message_tool_call diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index e71c5191..80d234ef 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -18,7 +18,7 @@ module OpenAI def initialize: ( function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, - type: :function + ?type: :function ) -> void | ( ?OpenAI::Models::Chat::chat_completion_named_tool_choice diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index c04d1cc4..d1006f45 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -18,7 +18,7 @@ module OpenAI def initialize: ( content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, - type: :content + ?type: :content ) -> void | ( ?OpenAI::Models::Chat::chat_completion_prediction_content diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 8c4502bd..3bb7af41 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -12,7 +12,7 @@ module OpenAI def include_usage=: (bool) -> bool def initialize: - (include_usage: bool) -> void + (?include_usage: bool) -> void | ( ?OpenAI::Models::Chat::chat_completion_stream_options | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 7501e229..24948420 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, - name: String, - role: :system + ?name: String, + ?role: :system ) -> void | ( ?OpenAI::Models::Chat::chat_completion_system_message_param diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index 6dc1b119..3090dfae 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -15,7 +15,7 @@ module OpenAI def initialize: ( function: OpenAI::Models::FunctionDefinition, - type: :function + ?type: :function ) -> void | ( ?OpenAI::Models::Chat::chat_completion_tool | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 90f917db..91d693d7 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -22,7 +22,7 @@ module OpenAI ( content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, tool_call_id: String, - role: :tool + ?role: :tool ) -> void | ( ?OpenAI::Models::Chat::chat_completion_tool_message_param diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 75491108..3f5d1a3f 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -23,8 +23,8 @@ module OpenAI def initialize: ( content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, - name: String, - role: :user + ?name: String, + ?role: :user ) -> void | ( ?OpenAI::Models::Chat::chat_completion_user_message_param diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 17fd54d4..ce23df92 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -132,35 +132,35 @@ module OpenAI ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, - frequency_penalty: Float?, - function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - logit_bias: ::Hash[Symbol, Integer]?, - logprobs: bool?, - max_completion_tokens: Integer?, - max_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, - n: Integer?, - parallel_tool_calls: bool, - prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, - presence_penalty: Float?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, - seed: Integer?, - service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, - stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, - store: bool?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - temperature: Float?, - tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], - top_logprobs: Integer?, - top_p: Float?, - user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - request_options: OpenAI::request_opts + ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?frequency_penalty: Float?, + ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: bool?, + ?max_completion_tokens: Integer?, + ?max_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?n: Integer?, + ?parallel_tool_calls: bool, + ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?presence_penalty: Float?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?seed: Integer?, + ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + ?store: bool?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?top_logprobs: Integer?, + ?top_p: Float?, + ?user: String, + ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Chat::completion_create_params @@ -215,8 +215,8 @@ module OpenAI def initialize: ( name: String, - description: String, - parameters: OpenAI::Models::function_parameters + ?description: String, + ?parameters: OpenAI::Models::function_parameters ) -> void | ( ?OpenAI::Models::Chat::CompletionCreateParams::function @@ -280,8 +280,8 @@ module OpenAI def initialize: ( - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, - user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + ?search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + ?user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? ) -> void | ( ?OpenAI::Models::Chat::CompletionCreateParams::web_search_options @@ -314,7 +314,7 @@ module OpenAI def initialize: ( approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - type: :approximate + ?type: :approximate ) -> void | ( ?OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::user_location @@ -350,10 +350,10 @@ module OpenAI def initialize: ( - city: String, - country: String, - region: String, - timezone: String + ?city: String, + ?country: String, + ?region: String, + ?timezone: String ) -> void | ( ?OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::approximate diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index ea7b5315..c4a285ad 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Chat::completion_delete_params | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index b135cfcf..38d48801 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -37,12 +37,12 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - metadata: OpenAI::Models::metadata?, - model: String, - order: OpenAI::Models::Chat::CompletionListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?metadata: OpenAI::Models::metadata?, + ?model: String, + ?order: OpenAI::Models::Chat::CompletionListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Chat::completion_list_params diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index e29df6fe..96a62d84 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Chat::completion_retrieve_params | OpenAI::BaseModel data diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index 7551639e..a5e3906e 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Chat::completion_update_params diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 232ef376..ff666bc2 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -30,10 +30,10 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Chat::Completions::message_list_params diff --git a/sig/openai/models/completion.rbs b/sig/openai/models/completion.rbs index 4394b753..13acceee 100644 --- a/sig/openai/models/completion.rbs +++ b/sig/openai/models/completion.rbs @@ -38,9 +38,9 @@ module OpenAI choices: ::Array[OpenAI::Models::CompletionChoice], created: Integer, model: String, - system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage, - object: :text_completion + ?system_fingerprint: String, + ?usage: OpenAI::Models::CompletionUsage, + ?object: :text_completion ) -> void | (?OpenAI::Models::completion | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index 847712cf..e08de29b 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -67,10 +67,10 @@ module OpenAI def initialize: ( - text_offset: ::Array[Integer], - token_logprobs: ::Array[Float], - tokens: ::Array[String], - top_logprobs: ::Array[::Hash[Symbol, Float]] + ?text_offset: ::Array[Integer], + ?token_logprobs: ::Array[Float], + ?tokens: ::Array[String], + ?top_logprobs: ::Array[::Hash[Symbol, Float]] ) -> void | ( ?OpenAI::Models::CompletionChoice::logprobs | OpenAI::BaseModel data diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 1b3c7810..a860e0b1 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -66,22 +66,22 @@ module OpenAI ( model: OpenAI::Models::CompletionCreateParams::model, prompt: OpenAI::Models::CompletionCreateParams::prompt?, - best_of: Integer?, - echo: bool?, - frequency_penalty: Float?, - logit_bias: ::Hash[Symbol, Integer]?, - logprobs: Integer?, - max_tokens: Integer?, - n: Integer?, - presence_penalty: Float?, - seed: Integer?, - stop: OpenAI::Models::CompletionCreateParams::stop?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - suffix: String?, - temperature: Float?, - top_p: Float?, - user: String, - request_options: OpenAI::request_opts + ?best_of: Integer?, + ?echo: bool?, + ?frequency_penalty: Float?, + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: Integer?, + ?max_tokens: Integer?, + ?n: Integer?, + ?presence_penalty: Float?, + ?seed: Integer?, + ?stop: OpenAI::Models::CompletionCreateParams::stop?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?suffix: String?, + ?temperature: Float?, + ?top_p: Float?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::completion_create_params | OpenAI::BaseModel data diff --git a/sig/openai/models/completion_usage.rbs b/sig/openai/models/completion_usage.rbs index 3c334ef6..882a8f98 100644 --- a/sig/openai/models/completion_usage.rbs +++ b/sig/openai/models/completion_usage.rbs @@ -33,8 +33,8 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, - prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails + ?completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, + ?prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails ) -> void | (?OpenAI::Models::completion_usage | OpenAI::BaseModel data) -> void @@ -67,10 +67,10 @@ module OpenAI def initialize: ( - accepted_prediction_tokens: Integer, - audio_tokens: Integer, - reasoning_tokens: Integer, - rejected_prediction_tokens: Integer + ?accepted_prediction_tokens: Integer, + ?audio_tokens: Integer, + ?reasoning_tokens: Integer, + ?rejected_prediction_tokens: Integer ) -> void | ( ?OpenAI::Models::CompletionUsage::completion_tokens_details @@ -93,7 +93,7 @@ module OpenAI def cached_tokens=: (Integer) -> Integer def initialize: - (audio_tokens: Integer, cached_tokens: Integer) -> void + (?audio_tokens: Integer, ?cached_tokens: Integer) -> void | ( ?OpenAI::Models::CompletionUsage::prompt_tokens_details | OpenAI::BaseModel data diff --git a/sig/openai/models/create_embedding_response.rbs b/sig/openai/models/create_embedding_response.rbs index d6fa5f3d..8fa6b6d9 100644 --- a/sig/openai/models/create_embedding_response.rbs +++ b/sig/openai/models/create_embedding_response.rbs @@ -22,7 +22,7 @@ module OpenAI data: ::Array[OpenAI::Models::Embedding], model: String, usage: OpenAI::Models::CreateEmbeddingResponse::Usage, - object: :list + ?object: :list ) -> void | ( ?OpenAI::Models::create_embedding_response | OpenAI::BaseModel data diff --git a/sig/openai/models/embedding.rbs b/sig/openai/models/embedding.rbs index 997cabfc..892d3226 100644 --- a/sig/openai/models/embedding.rbs +++ b/sig/openai/models/embedding.rbs @@ -11,7 +11,7 @@ module OpenAI attr_accessor object: :embedding def initialize: - (embedding: ::Array[Float], index: Integer, object: :embedding) -> void + (embedding: ::Array[Float], index: Integer, ?object: :embedding) -> void | (?OpenAI::Models::embedding | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::embedding diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 4e32f5ba..459ee35c 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -36,10 +36,10 @@ module OpenAI ( input: OpenAI::Models::EmbeddingCreateParams::input, model: OpenAI::Models::EmbeddingCreateParams::model, - dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, - user: String, - request_options: OpenAI::request_opts + ?dimensions: Integer, + ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, + ?user: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::embedding_create_params | OpenAI::BaseModel data diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index 0ecb693f..638daf85 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::file_content_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 70531f53..76f6a57d 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -16,7 +16,7 @@ module OpenAI ( file: IO | StringIO, purpose: OpenAI::Models::file_purpose, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | (?OpenAI::Models::file_create_params | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index fa6918f3..d611f520 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | (?OpenAI::Models::file_delete_params | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::file_delete_params diff --git a/sig/openai/models/file_deleted.rbs b/sig/openai/models/file_deleted.rbs index a691b96b..5fae57f0 100644 --- a/sig/openai/models/file_deleted.rbs +++ b/sig/openai/models/file_deleted.rbs @@ -10,7 +10,7 @@ module OpenAI attr_accessor object: :file def initialize: - (id: String, deleted: bool, object: :file) -> void + (id: String, deleted: bool, ?object: :file) -> void | (?OpenAI::Models::file_deleted | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::file_deleted diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 4b92a0e8..b67cdad6 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -33,11 +33,11 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - order: OpenAI::Models::FileListParams::order, - purpose: String, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::FileListParams::order, + ?purpose: String, + ?request_options: OpenAI::request_opts ) -> void | (?OpenAI::Models::file_list_params | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 1b0a4939..44034e82 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -44,9 +44,9 @@ module OpenAI filename: String, purpose: OpenAI::Models::FileObject::purpose, status: OpenAI::Models::FileObject::status, - expires_at: Integer, - status_details: String, - object: :file + ?expires_at: Integer, + ?status_details: String, + ?object: :file ) -> void | (?OpenAI::Models::file_object | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index dc986a6b..c1163712 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::file_retrieve_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 12dfc6a9..81e5f0bc 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -86,11 +86,11 @@ module OpenAI trained_tokens: Integer?, training_file: String, validation_file: String?, - estimated_finish: Integer?, - integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]?, - metadata: OpenAI::Models::metadata?, - method_: OpenAI::Models::FineTuning::FineTuningJob::Method, - object: :"fine_tuning.job" + ?estimated_finish: Integer?, + ?integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]?, + ?metadata: OpenAI::Models::metadata?, + ?method_: OpenAI::Models::FineTuning::FineTuningJob::Method, + ?object: :"fine_tuning.job" ) -> void | ( ?OpenAI::Models::FineTuning::fine_tuning_job @@ -146,9 +146,9 @@ module OpenAI def initialize: ( - batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs ) -> void | ( ?OpenAI::Models::FineTuning::FineTuningJob::hyperparameters @@ -223,9 +223,9 @@ module OpenAI def initialize: ( - dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, - supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ + ?dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, + ?supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, + ?type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ ) -> void | ( ?OpenAI::Models::FineTuning::FineTuningJob::method_ @@ -248,7 +248,7 @@ module OpenAI def initialize: ( - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters ) -> void | ( ?OpenAI::Models::FineTuning::FineTuningJob::Method::dpo @@ -292,10 +292,10 @@ module OpenAI def initialize: ( - batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, - beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, - learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, + ?beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs ) -> void | ( ?OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::hyperparameters @@ -344,7 +344,7 @@ module OpenAI def initialize: ( - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters ) -> void | ( ?OpenAI::Models::FineTuning::FineTuningJob::Method::supervised @@ -381,9 +381,9 @@ module OpenAI def initialize: ( - batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs ) -> void | ( ?OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::hyperparameters diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index 53287f13..950c38f0 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -42,9 +42,9 @@ module OpenAI created_at: Integer, level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, message: String, - data: top, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, - object: :"fine_tuning.job.event" + ?data: top, + ?type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, + ?object: :"fine_tuning.job.event" ) -> void | ( ?OpenAI::Models::FineTuning::fine_tuning_job_event diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index 47f895a9..c8d6439d 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -26,9 +26,9 @@ module OpenAI def initialize: ( project: String, - entity: String?, - name: String?, - tags: ::Array[String] + ?entity: String?, + ?name: String?, + ?tags: ::Array[String] ) -> void | ( ?OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index f9039369..13b6893a 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -18,7 +18,7 @@ module OpenAI def initialize: ( wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, - type: :wandb + ?type: :wandb ) -> void | ( ?OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration_object diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index 922d99dc..e9218d86 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::FineTuning::job_cancel_params | OpenAI::BaseModel data diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index eee5a597..dbeaa601 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -49,14 +49,14 @@ module OpenAI ( model: OpenAI::Models::FineTuning::JobCreateParams::model, training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, - metadata: OpenAI::Models::metadata?, - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, - seed: Integer?, - suffix: String?, - validation_file: String?, - request_options: OpenAI::request_opts + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, + ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + ?metadata: OpenAI::Models::metadata?, + ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + ?seed: Integer?, + ?suffix: String?, + ?validation_file: String?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::FineTuning::job_create_params @@ -112,9 +112,9 @@ module OpenAI def initialize: ( - batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::hyperparameters @@ -156,7 +156,7 @@ module OpenAI def initialize: ( wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, - type: :wandb + ?type: :wandb ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::integration @@ -187,9 +187,9 @@ module OpenAI def initialize: ( project: String, - entity: String?, - name: String?, - tags: ::Array[String] + ?entity: String?, + ?name: String?, + ?tags: ::Array[String] ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::Integration::wandb @@ -228,9 +228,9 @@ module OpenAI def initialize: ( - dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, - supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ + ?dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, + ?supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, + ?type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::method_ @@ -253,7 +253,7 @@ module OpenAI def initialize: ( - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::Method::dpo @@ -297,10 +297,10 @@ module OpenAI def initialize: ( - batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, - beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, - learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, + ?beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::hyperparameters @@ -349,7 +349,7 @@ module OpenAI def initialize: ( - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::Method::supervised @@ -386,9 +386,9 @@ module OpenAI def initialize: ( - batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs ) -> void | ( ?OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::hyperparameters diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index b14a4ba2..a6d3e8ad 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -18,9 +18,9 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::FineTuning::job_list_events_params diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index f4fc50fc..e1202bb6 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -21,10 +21,10 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - metadata: ::Hash[Symbol, String]?, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?metadata: ::Hash[Symbol, String]?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::FineTuning::job_list_params diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index 53329e19..9e78eb66 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::FineTuning::job_retrieve_params | OpenAI::BaseModel data diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index 5c8dec0b..3f634afe 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -19,9 +19,9 @@ module OpenAI def initialize: ( - after: String, - limit: Integer, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::FineTuning::Jobs::checkpoint_list_params diff --git a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs index e3b4c10a..6c945032 100644 --- a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs +++ b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs @@ -36,7 +36,7 @@ module OpenAI fine_tuning_job_id: String, metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, step_number: Integer, - object: :"fine_tuning.job.checkpoint" + ?object: :"fine_tuning.job.checkpoint" ) -> void | ( ?OpenAI::Models::FineTuning::Jobs::fine_tuning_job_checkpoint @@ -87,13 +87,13 @@ module OpenAI def initialize: ( - full_valid_loss: Float, - full_valid_mean_token_accuracy: Float, - step: Float, - train_loss: Float, - train_mean_token_accuracy: Float, - valid_loss: Float, - valid_mean_token_accuracy: Float + ?full_valid_loss: Float, + ?full_valid_mean_token_accuracy: Float, + ?step: Float, + ?train_loss: Float, + ?train_mean_token_accuracy: Float, + ?valid_loss: Float, + ?valid_mean_token_accuracy: Float ) -> void | ( ?OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::metrics diff --git a/sig/openai/models/function_definition.rbs b/sig/openai/models/function_definition.rbs index edff252f..4361e6e1 100644 --- a/sig/openai/models/function_definition.rbs +++ b/sig/openai/models/function_definition.rbs @@ -26,9 +26,9 @@ module OpenAI def initialize: ( name: String, - description: String, - parameters: OpenAI::Models::function_parameters, - strict: bool? + ?description: String, + ?parameters: OpenAI::Models::function_parameters, + ?strict: bool? ) -> void | ( ?OpenAI::Models::function_definition | OpenAI::BaseModel data diff --git a/sig/openai/models/image.rbs b/sig/openai/models/image.rbs index 3c094e5d..bd80e80e 100644 --- a/sig/openai/models/image.rbs +++ b/sig/openai/models/image.rbs @@ -16,7 +16,7 @@ module OpenAI def url=: (String) -> String def initialize: - (b64_json: String, revised_prompt: String, url: String) -> void + (?b64_json: String, ?revised_prompt: String, ?url: String) -> void | (?OpenAI::Models::image | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::image diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 2cb6174c..fa5b8d08 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -32,12 +32,12 @@ module OpenAI def initialize: ( image: IO | StringIO, - model: OpenAI::Models::ImageCreateVariationParams::model?, - n: Integer?, - response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, - size: OpenAI::Models::ImageCreateVariationParams::size?, - user: String, - request_options: OpenAI::request_opts + ?model: OpenAI::Models::ImageCreateVariationParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, + ?size: OpenAI::Models::ImageCreateVariationParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::image_create_variation_params diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 17f6d2a5..4910dbfa 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -41,13 +41,13 @@ module OpenAI ( image: IO | StringIO, prompt: String, - mask: IO | StringIO, - model: OpenAI::Models::ImageEditParams::model?, - n: Integer?, - response_format: OpenAI::Models::ImageEditParams::response_format?, - size: OpenAI::Models::ImageEditParams::size?, - user: String, - request_options: OpenAI::request_opts + ?mask: IO | StringIO, + ?model: OpenAI::Models::ImageEditParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageEditParams::response_format?, + ?size: OpenAI::Models::ImageEditParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> void | (?OpenAI::Models::image_edit_params | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index ebeabef9..224faa06 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -42,14 +42,14 @@ module OpenAI def initialize: ( prompt: String, - model: OpenAI::Models::ImageGenerateParams::model?, - n: Integer?, - quality: OpenAI::Models::ImageGenerateParams::quality, - response_format: OpenAI::Models::ImageGenerateParams::response_format?, - size: OpenAI::Models::ImageGenerateParams::size?, - style: OpenAI::Models::ImageGenerateParams::style?, - user: String, - request_options: OpenAI::request_opts + ?model: OpenAI::Models::ImageGenerateParams::model?, + ?n: Integer?, + ?quality: OpenAI::Models::ImageGenerateParams::quality, + ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, + ?size: OpenAI::Models::ImageGenerateParams::size?, + ?style: OpenAI::Models::ImageGenerateParams::style?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::image_generate_params | OpenAI::BaseModel data diff --git a/sig/openai/models/model.rbs b/sig/openai/models/model.rbs index 4d5cd661..dee8fd85 100644 --- a/sig/openai/models/model.rbs +++ b/sig/openai/models/model.rbs @@ -13,7 +13,12 @@ module OpenAI attr_accessor owned_by: String def initialize: - (id: String, created: Integer, owned_by: String, object: :model) -> void + ( + id: String, + created: Integer, + owned_by: String, + ?object: :model + ) -> void | (?OpenAI::Models::model | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::model diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index 4ea85eb1..ca717581 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::model_delete_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 258c64c8..7b6caaeb 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | (?OpenAI::Models::model_list_params | OpenAI::BaseModel data) -> void def to_hash: -> OpenAI::Models::model_list_params diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index 78cbe28c..61008928 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::model_retrieve_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index e5c23167..5f8b6773 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -22,8 +22,8 @@ module OpenAI def initialize: ( input: OpenAI::Models::ModerationCreateParams::input, - model: OpenAI::Models::ModerationCreateParams::model, - request_options: OpenAI::request_opts + ?model: OpenAI::Models::ModerationCreateParams::model, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::moderation_create_params | OpenAI::BaseModel data diff --git a/sig/openai/models/moderation_image_url_input.rbs b/sig/openai/models/moderation_image_url_input.rbs index b780a3e0..ac58a3d7 100644 --- a/sig/openai/models/moderation_image_url_input.rbs +++ b/sig/openai/models/moderation_image_url_input.rbs @@ -14,7 +14,7 @@ module OpenAI def initialize: ( image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, - type: :image_url + ?type: :image_url ) -> void | ( ?OpenAI::Models::moderation_image_url_input | OpenAI::BaseModel data diff --git a/sig/openai/models/moderation_text_input.rbs b/sig/openai/models/moderation_text_input.rbs index 58c7920b..3a5a4a09 100644 --- a/sig/openai/models/moderation_text_input.rbs +++ b/sig/openai/models/moderation_text_input.rbs @@ -8,7 +8,7 @@ module OpenAI attr_accessor type: :text def initialize: - (text: String, type: :text) -> void + (text: String, ?type: :text) -> void | ( ?OpenAI::Models::moderation_text_input | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/other_file_chunking_strategy_object.rbs b/sig/openai/models/other_file_chunking_strategy_object.rbs index 253ded71..0d5fa3b5 100644 --- a/sig/openai/models/other_file_chunking_strategy_object.rbs +++ b/sig/openai/models/other_file_chunking_strategy_object.rbs @@ -6,7 +6,7 @@ module OpenAI attr_accessor type: :other def initialize: - (type: :other) -> void + (?type: :other) -> void | ( ?OpenAI::Models::other_file_chunking_strategy_object | OpenAI::BaseModel data diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index 3a38b298..a7604c1f 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -13,8 +13,8 @@ module OpenAI def initialize: ( - effort: OpenAI::Models::reasoning_effort?, - generate_summary: OpenAI::Models::Reasoning::generate_summary? + ?effort: OpenAI::Models::reasoning_effort?, + ?generate_summary: OpenAI::Models::Reasoning::generate_summary? ) -> void | (?OpenAI::Models::reasoning | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/response_format_json_object.rbs b/sig/openai/models/response_format_json_object.rbs index 3e129a52..f0021d58 100644 --- a/sig/openai/models/response_format_json_object.rbs +++ b/sig/openai/models/response_format_json_object.rbs @@ -6,7 +6,7 @@ module OpenAI attr_accessor type: :json_object def initialize: - (type: :json_object) -> void + (?type: :json_object) -> void | ( ?OpenAI::Models::response_format_json_object | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/response_format_json_schema.rbs b/sig/openai/models/response_format_json_schema.rbs index 7b3816a5..eb9e4e25 100644 --- a/sig/openai/models/response_format_json_schema.rbs +++ b/sig/openai/models/response_format_json_schema.rbs @@ -14,7 +14,7 @@ module OpenAI def initialize: ( json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, - type: :json_schema + ?type: :json_schema ) -> void | ( ?OpenAI::Models::response_format_json_schema | OpenAI::BaseModel data @@ -46,9 +46,9 @@ module OpenAI def initialize: ( name: String, - description: String, - schema: ::Hash[Symbol, top], - strict: bool? + ?description: String, + ?schema: ::Hash[Symbol, top], + ?strict: bool? ) -> void | ( ?OpenAI::Models::ResponseFormatJSONSchema::json_schema diff --git a/sig/openai/models/response_format_text.rbs b/sig/openai/models/response_format_text.rbs index c6c2619d..d8c203e5 100644 --- a/sig/openai/models/response_format_text.rbs +++ b/sig/openai/models/response_format_text.rbs @@ -6,7 +6,7 @@ module OpenAI attr_accessor type: :text def initialize: - (type: :text) -> void + (?type: :text) -> void | ( ?OpenAI::Models::response_format_text | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 39cc531c..0b7870e8 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -23,7 +23,7 @@ module OpenAI display_height: Float, display_width: Float, environment: OpenAI::Models::Responses::ComputerTool::environment, - type: :computer_use_preview + ?type: :computer_use_preview ) -> void | ( ?OpenAI::Models::Responses::computer_tool | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 7347e123..7638ced1 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -23,7 +23,7 @@ module OpenAI ( content: OpenAI::Models::Responses::EasyInputMessage::content, role: OpenAI::Models::Responses::EasyInputMessage::role, - type: OpenAI::Models::Responses::EasyInputMessage::type_ + ?type: OpenAI::Models::Responses::EasyInputMessage::type_ ) -> void | ( ?OpenAI::Models::Responses::easy_input_message diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 32c08269..34894b2e 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -34,10 +34,10 @@ module OpenAI def initialize: ( vector_store_ids: ::Array[String], - filters: OpenAI::Models::Responses::FileSearchTool::filters, - max_num_results: Integer, - ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, - type: :file_search + ?filters: OpenAI::Models::Responses::FileSearchTool::filters, + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, + ?type: :file_search ) -> void | ( ?OpenAI::Models::Responses::file_search_tool @@ -72,8 +72,8 @@ module OpenAI def initialize: ( - ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, - score_threshold: Float + ?ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, + ?score_threshold: Float ) -> void | ( ?OpenAI::Models::Responses::FileSearchTool::ranking_options diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index 3a052a98..78d3312b 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -26,8 +26,8 @@ module OpenAI name: String, parameters: ::Hash[Symbol, top], strict: bool, - description: String?, - type: :function + ?description: String?, + ?type: :function ) -> void | ( ?OpenAI::Models::Responses::function_tool | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index e9993f02..29314e57 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -34,11 +34,11 @@ module OpenAI def initialize: ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Responses::InputItemListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Responses::input_item_list_params diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 2fa4632c..42ac11e7 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -101,15 +101,15 @@ module OpenAI tool_choice: OpenAI::Models::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, - max_output_tokens: Integer?, - previous_response_id: String?, - reasoning: OpenAI::Models::Reasoning?, - status: OpenAI::Models::Responses::response_status, - text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: OpenAI::Models::Responses::Response::truncation?, - usage: OpenAI::Models::Responses::ResponseUsage, - user: String, - object: :response + ?max_output_tokens: Integer?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?status: OpenAI::Models::Responses::response_status, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?truncation: OpenAI::Models::Responses::Response::truncation?, + ?usage: OpenAI::Models::Responses::ResponseUsage, + ?user: String, + ?object: :response ) -> void | ( ?OpenAI::Models::Responses::response | OpenAI::BaseModel data @@ -131,7 +131,7 @@ module OpenAI def initialize: ( - reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + ?reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason ) -> void | ( ?OpenAI::Models::Responses::Response::incomplete_details diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index 3f1416c2..72465557 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -10,7 +10,7 @@ module OpenAI attr_accessor type: :"response.audio.delta" def initialize: - (delta: String, type: :"response.audio.delta") -> void + (delta: String, ?type: :"response.audio.delta") -> void | ( ?OpenAI::Models::Responses::response_audio_delta_event | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index 2e521bbe..4617b59f 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -7,7 +7,7 @@ module OpenAI attr_accessor type: :"response.audio.done" def initialize: - (type: :"response.audio.done") -> void + (?type: :"response.audio.done") -> void | ( ?OpenAI::Models::Responses::response_audio_done_event | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index 047e0072..c6efbd43 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -10,7 +10,7 @@ module OpenAI attr_accessor type: :"response.audio.transcript.delta" def initialize: - (delta: String, type: :"response.audio.transcript.delta") -> void + (delta: String, ?type: :"response.audio.transcript.delta") -> void | ( ?OpenAI::Models::Responses::response_audio_transcript_delta_event | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index 225a0012..9446ffeb 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -8,7 +8,7 @@ module OpenAI attr_accessor type: :"response.audio.transcript.done" def initialize: - (type: :"response.audio.transcript.done") -> void + (?type: :"response.audio.transcript.done") -> void | ( ?OpenAI::Models::Responses::response_audio_transcript_done_event | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index edd9f658..a1822231 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( delta: String, output_index: Integer, - type: :"response.code_interpreter_call.code.delta" + ?type: :"response.code_interpreter_call.code.delta" ) -> void | ( ?OpenAI::Models::Responses::response_code_interpreter_call_code_delta_event diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index 91d301fe..d735f06b 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( code: String, output_index: Integer, - type: :"response.code_interpreter_call.code.done" + ?type: :"response.code_interpreter_call.code.done" ) -> void | ( ?OpenAI::Models::Responses::response_code_interpreter_call_code_done_event diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index 8faa1b2d..feccd45a 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, - type: :"response.code_interpreter_call.completed" + ?type: :"response.code_interpreter_call.completed" ) -> void | ( ?OpenAI::Models::Responses::response_code_interpreter_call_completed_event diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index ee6b0ff0..e46fc46f 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, - type: :"response.code_interpreter_call.in_progress" + ?type: :"response.code_interpreter_call.in_progress" ) -> void | ( ?OpenAI::Models::Responses::response_code_interpreter_call_in_progress_event diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index a8ed5522..66622c55 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, - type: :"response.code_interpreter_call.interpreting" + ?type: :"response.code_interpreter_call.interpreting" ) -> void | ( ?OpenAI::Models::Responses::response_code_interpreter_call_interpreting_event diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index f57c376a..8dbeca6d 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -27,7 +27,7 @@ module OpenAI code: String, results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, - type: :code_interpreter_call + ?type: :code_interpreter_call ) -> void | ( ?OpenAI::Models::Responses::response_code_interpreter_tool_call @@ -49,7 +49,7 @@ module OpenAI attr_accessor type: :logs def initialize: - (logs: String, type: :logs) -> void + (logs: String, ?type: :logs) -> void | ( ?OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::logs | OpenAI::BaseModel data @@ -72,7 +72,7 @@ module OpenAI def initialize: ( files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], - type: :files + ?type: :files ) -> void | ( ?OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::files diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 05a0a5ab..15c402f6 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -15,7 +15,7 @@ module OpenAI def initialize: ( response: OpenAI::Models::Responses::Response, - type: :"response.completed" + ?type: :"response.completed" ) -> void | ( ?OpenAI::Models::Responses::response_completed_event diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 7cc62b09..6a5b9b8a 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -74,7 +74,7 @@ module OpenAI button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, x: Integer, y_: Integer, - type: :click + ?type: :click ) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::click @@ -106,7 +106,7 @@ module OpenAI attr_accessor y_: Integer def initialize: - (x: Integer, y_: Integer, type: :double_click) -> void + (x: Integer, y_: Integer, ?type: :double_click) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::double_click | OpenAI::BaseModel data @@ -129,7 +129,7 @@ module OpenAI def initialize: ( path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], - type: :drag + ?type: :drag ) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::drag @@ -164,7 +164,7 @@ module OpenAI attr_accessor type: :keypress def initialize: - (keys: ::Array[String], type: :keypress) -> void + (keys: ::Array[String], ?type: :keypress) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::keypress | OpenAI::BaseModel data @@ -183,7 +183,7 @@ module OpenAI attr_accessor y_: Integer def initialize: - (x: Integer, y_: Integer, type: :move) -> void + (x: Integer, y_: Integer, ?type: :move) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::move | OpenAI::BaseModel data @@ -198,7 +198,7 @@ module OpenAI attr_accessor type: :screenshot def initialize: - (type: :screenshot) -> void + (?type: :screenshot) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::screenshot | OpenAI::BaseModel data @@ -233,7 +233,7 @@ module OpenAI scroll_y: Integer, x: Integer, y_: Integer, - type: :scroll + ?type: :scroll ) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::scroll @@ -251,7 +251,7 @@ module OpenAI attr_accessor type: :type def initialize: - (text: String, type: :type) -> void + (text: String, ?type: :type) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::type_ | OpenAI::BaseModel data @@ -266,7 +266,7 @@ module OpenAI attr_accessor type: :wait def initialize: - (type: :wait) -> void + (?type: :wait) -> void | ( ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::wait | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 90b93e5a..5fbdb5f4 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -37,9 +37,9 @@ module OpenAI id: String, call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, - type: :computer_call_output + ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + ?status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, + ?type: :computer_call_output ) -> void | ( ?OpenAI::Models::Responses::response_computer_tool_call_output_item diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs index 931b0f8d..f71fb9a1 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -17,9 +17,9 @@ module OpenAI def initialize: ( - file_id: String, - image_url: String, - type: :computer_screenshot + ?file_id: String, + ?image_url: String, + ?type: :computer_screenshot ) -> void | ( ?OpenAI::Models::Responses::response_computer_tool_call_output_screenshot diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 5bbbdac3..00707adc 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -27,7 +27,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, - type: :"response.content_part.added" + ?type: :"response.content_part.added" ) -> void | ( ?OpenAI::Models::Responses::response_content_part_added_event diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index b915c001..06c3b822 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -27,7 +27,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, - type: :"response.content_part.done" + ?type: :"response.content_part.done" ) -> void | ( ?OpenAI::Models::Responses::response_content_part_done_event diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index d9049e41..f69f74bc 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -79,22 +79,22 @@ module OpenAI ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::Responses::ResponseCreateParams::model, - include: ::Array[OpenAI::Models::Responses::response_includable]?, - instructions: String?, - max_output_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - parallel_tool_calls: bool?, - previous_response_id: String?, - reasoning: OpenAI::Models::Reasoning?, - store: bool?, - temperature: Float?, - text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, - tools: ::Array[OpenAI::Models::Responses::tool], - top_p: Float?, - truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - user: String, - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?instructions: String?, + ?max_output_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?parallel_tool_calls: bool?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?store: bool?, + ?temperature: Float?, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_p: Float?, + ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Responses::response_create_params diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index 6a67bba2..386c4aa8 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -15,7 +15,7 @@ module OpenAI def initialize: ( response: OpenAI::Models::Responses::Response, - type: :"response.created" + ?type: :"response.created" ) -> void | ( ?OpenAI::Models::Responses::response_created_event diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index 509b3360..4458ecb9 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -8,7 +8,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Responses::response_delete_params | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index ddfbd3d6..8f3fcbdf 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -14,7 +14,12 @@ module OpenAI attr_accessor type: :error def initialize: - (code: String?, message: String, param: String?, type: :error) -> void + ( + code: String?, + message: String, + param: String?, + ?type: :error + ) -> void | ( ?OpenAI::Models::Responses::response_error_event | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index 1edd4ecc..a00dfcef 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -15,7 +15,7 @@ module OpenAI def initialize: ( response: OpenAI::Models::Responses::Response, - type: :"response.failed" + ?type: :"response.failed" ) -> void | ( ?OpenAI::Models::Responses::response_failed_event diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index 7f8857c3..876bfbc4 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item_id: String, output_index: Integer, - type: :"response.file_search_call.completed" + ?type: :"response.file_search_call.completed" ) -> void | ( ?OpenAI::Models::Responses::response_file_search_call_completed_event diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index d5e5064e..389c41de 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item_id: String, output_index: Integer, - type: :"response.file_search_call.in_progress" + ?type: :"response.file_search_call.in_progress" ) -> void | ( ?OpenAI::Models::Responses::response_file_search_call_in_progress_event diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index 14f56371..37da4da7 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item_id: String, output_index: Integer, - type: :"response.file_search_call.searching" + ?type: :"response.file_search_call.searching" ) -> void | ( ?OpenAI::Models::Responses::response_file_search_call_searching_event diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 42d71f9c..689645fc 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -26,8 +26,8 @@ module OpenAI id: String, queries: ::Array[String], status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, - results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]?, - type: :file_search_call + ?results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]?, + ?type: :file_search_call ) -> void | ( ?OpenAI::Models::Responses::response_file_search_tool_call @@ -79,11 +79,11 @@ module OpenAI def initialize: ( - attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, - file_id: String, - filename: String, - score: Float, - text: String + ?attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, + ?file_id: String, + ?filename: String, + ?score: Float, + ?text: String ) -> void | ( ?OpenAI::Models::Responses::ResponseFileSearchToolCall::result diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index a539a703..9f0fd321 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -28,10 +28,10 @@ module OpenAI def initialize: ( schema: ::Hash[Symbol, top], - description: String, - name: String, - strict: bool?, - type: :json_schema + ?description: String, + ?name: String, + ?strict: bool?, + ?type: :json_schema ) -> void | ( ?OpenAI::Models::Responses::response_format_text_json_schema_config diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index 0a9a828b..3b98be65 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -23,7 +23,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, - type: :"response.function_call_arguments.delta" + ?type: :"response.function_call_arguments.delta" ) -> void | ( ?OpenAI::Models::Responses::response_function_call_arguments_delta_event diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index fc573721..89a5d2ad 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -23,7 +23,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, - type: :"response.function_call_arguments.done" + ?type: :"response.function_call_arguments.done" ) -> void | ( ?OpenAI::Models::Responses::response_function_call_arguments_done_event diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index dc7f9bf6..1a7b694b 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -35,9 +35,9 @@ module OpenAI arguments: String, call_id: String, name: String, - id: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, - type: :function_call + ?id: String, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, + ?type: :function_call ) -> void | ( ?OpenAI::Models::Responses::response_function_tool_call diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 2208c1db..481354aa 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -30,8 +30,8 @@ module OpenAI id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, - type: :function_call_output + ?status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, + ?type: :function_call_output ) -> void | ( ?OpenAI::Models::Responses::response_function_tool_call_output_item diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 198d14e2..0333dad4 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -19,7 +19,7 @@ module OpenAI ( id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, - type: :web_search_call + ?type: :web_search_call ) -> void | ( ?OpenAI::Models::Responses::response_function_web_search diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index 2649e5cb..7b76bb87 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -15,7 +15,7 @@ module OpenAI def initialize: ( response: OpenAI::Models::Responses::Response, - type: :"response.in_progress" + ?type: :"response.in_progress" ) -> void | ( ?OpenAI::Models::Responses::response_in_progress_event diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index 270ea93d..b68b4f2d 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -15,7 +15,7 @@ module OpenAI def initialize: ( response: OpenAI::Models::Responses::Response, - type: :"response.incomplete" + ?type: :"response.incomplete" ) -> void | ( ?OpenAI::Models::Responses::response_incomplete_event diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index a076443c..79ba418e 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -19,7 +19,7 @@ module OpenAI ( data: String, format_: OpenAI::Models::Responses::ResponseInputAudio::format_, - type: :input_audio + ?type: :input_audio ) -> void | ( ?OpenAI::Models::Responses::response_input_audio diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index 90aaf6d9..4b09c314 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -26,10 +26,10 @@ module OpenAI def initialize: ( - file_data: String, - file_id: String, - filename: String, - type: :input_file + ?file_data: String, + ?file_id: String, + ?filename: String, + ?type: :input_file ) -> void | ( ?OpenAI::Models::Responses::response_input_file diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index 18ac35c5..bd044576 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -21,9 +21,9 @@ module OpenAI def initialize: ( detail: OpenAI::Models::Responses::ResponseInputImage::detail, - file_id: String?, - image_url: String?, - type: :input_image + ?file_id: String?, + ?image_url: String?, + ?type: :input_image ) -> void | ( ?OpenAI::Models::Responses::response_input_image diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index e6a03f3b..646f1565 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -44,8 +44,8 @@ module OpenAI ( content: OpenAI::Models::Responses::response_input_message_content_list, role: OpenAI::Models::Responses::ResponseInputItem::Message::role, - status: OpenAI::Models::Responses::ResponseInputItem::Message::status, - type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ + ?status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + ?type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ ) -> void | ( ?OpenAI::Models::Responses::ResponseInputItem::message @@ -120,10 +120,10 @@ module OpenAI ( call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - id: String, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status, - type: :computer_call_output + ?id: String, + ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], + ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status, + ?type: :computer_call_output ) -> void | ( ?OpenAI::Models::Responses::ResponseInputItem::computer_call_output @@ -193,9 +193,9 @@ module OpenAI ( call_id: String, output: String, - id: String, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status, - type: :function_call_output + ?id: String, + ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status, + ?type: :function_call_output ) -> void | ( ?OpenAI::Models::Responses::ResponseInputItem::function_call_output @@ -223,7 +223,7 @@ module OpenAI attr_accessor type: :item_reference def initialize: - (id: String, type: :item_reference) -> void + (id: String, ?type: :item_reference) -> void | ( ?OpenAI::Models::Responses::ResponseInputItem::item_reference | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index 0b1246b9..d0c4f6ac 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -34,8 +34,8 @@ module OpenAI id: String, content: OpenAI::Models::Responses::response_input_message_content_list, role: OpenAI::Models::Responses::ResponseInputMessageItem::role, - status: OpenAI::Models::Responses::ResponseInputMessageItem::status, - type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + ?status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + ?type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ ) -> void | ( ?OpenAI::Models::Responses::response_input_message_item diff --git a/sig/openai/models/responses/response_input_text.rbs b/sig/openai/models/responses/response_input_text.rbs index df1540c3..8c1de672 100644 --- a/sig/openai/models/responses/response_input_text.rbs +++ b/sig/openai/models/responses/response_input_text.rbs @@ -9,7 +9,7 @@ module OpenAI attr_accessor type: :input_text def initialize: - (text: String, type: :input_text) -> void + (text: String, ?type: :input_text) -> void | ( ?OpenAI::Models::Responses::response_input_text | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index 05202a31..3ce80286 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -30,7 +30,7 @@ module OpenAI first_id: String, has_more: bool, last_id: String, - object: :list + ?object: :list ) -> void | ( ?OpenAI::Models::Responses::response_item_list diff --git a/sig/openai/models/responses/response_output_audio.rbs b/sig/openai/models/responses/response_output_audio.rbs index 79e1d98c..7c36f267 100644 --- a/sig/openai/models/responses/response_output_audio.rbs +++ b/sig/openai/models/responses/response_output_audio.rbs @@ -12,7 +12,7 @@ module OpenAI attr_accessor type: :output_audio def initialize: - (data: String, transcript: String, type: :output_audio) -> void + (data: String, transcript: String, ?type: :output_audio) -> void | ( ?OpenAI::Models::Responses::response_output_audio | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index a8576913..2e3f2b52 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item: OpenAI::Models::Responses::response_output_item, output_index: Integer, - type: :"response.output_item.added" + ?type: :"response.output_item.added" ) -> void | ( ?OpenAI::Models::Responses::response_output_item_added_event diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index 9f91df23..fbe7e12a 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item: OpenAI::Models::Responses::response_output_item, output_index: Integer, - type: :"response.output_item.done" + ?type: :"response.output_item.done" ) -> void | ( ?OpenAI::Models::Responses::response_output_item_done_event diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index c1bbed4a..429aac88 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -26,8 +26,8 @@ module OpenAI id: String, content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], status: OpenAI::Models::Responses::ResponseOutputMessage::status, - role: :assistant, - type: :message + ?role: :assistant, + ?type: :message ) -> void | ( ?OpenAI::Models::Responses::response_output_message diff --git a/sig/openai/models/responses/response_output_refusal.rbs b/sig/openai/models/responses/response_output_refusal.rbs index adbcb084..8ea6676a 100644 --- a/sig/openai/models/responses/response_output_refusal.rbs +++ b/sig/openai/models/responses/response_output_refusal.rbs @@ -9,7 +9,7 @@ module OpenAI attr_accessor type: :refusal def initialize: - (refusal: String, type: :refusal) -> void + (refusal: String, ?type: :refusal) -> void | ( ?OpenAI::Models::Responses::response_output_refusal | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index f8da61b0..c900d1f0 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -19,7 +19,7 @@ module OpenAI ( annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, - type: :output_text + ?type: :output_text ) -> void | ( ?OpenAI::Models::Responses::response_output_text @@ -45,7 +45,7 @@ module OpenAI attr_accessor type: :file_citation def initialize: - (file_id: String, index: Integer, type: :file_citation) -> void + (file_id: String, index: Integer, ?type: :file_citation) -> void | ( ?OpenAI::Models::Responses::ResponseOutputText::Annotation::file_citation | OpenAI::BaseModel data @@ -80,7 +80,7 @@ module OpenAI start_index: Integer, title: String, url: String, - type: :url_citation + ?type: :url_citation ) -> void | ( ?OpenAI::Models::Responses::ResponseOutputText::Annotation::url_citation @@ -100,7 +100,7 @@ module OpenAI attr_accessor type: :file_path def initialize: - (file_id: String, index: Integer, type: :file_path) -> void + (file_id: String, index: Integer, ?type: :file_path) -> void | ( ?OpenAI::Models::Responses::ResponseOutputText::Annotation::file_path | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index b9b01efa..6e9cb7a8 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -26,8 +26,8 @@ module OpenAI ( id: String, summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], - status: OpenAI::Models::Responses::ResponseReasoningItem::status, - type: :reasoning + ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, + ?type: :reasoning ) -> void | ( ?OpenAI::Models::Responses::response_reasoning_item @@ -44,7 +44,7 @@ module OpenAI attr_accessor type: :summary_text def initialize: - (text: String, type: :summary_text) -> void + (text: String, ?type: :summary_text) -> void | ( ?OpenAI::Models::Responses::ResponseReasoningItem::summary | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index eb50a0ed..53a1cf99 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -27,7 +27,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, - type: :"response.refusal.delta" + ?type: :"response.refusal.delta" ) -> void | ( ?OpenAI::Models::Responses::response_refusal_delta_event diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 0fbfff6e..2b52e381 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -27,7 +27,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, - type: :"response.refusal.done" + ?type: :"response.refusal.done" ) -> void | ( ?OpenAI::Models::Responses::response_refusal_done_event diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 23530635..846f8753 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -17,8 +17,8 @@ module OpenAI def initialize: ( - include: ::Array[OpenAI::Models::Responses::response_includable], - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::Responses::response_retrieve_params diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 9030b65e..71d0cbd2 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -31,7 +31,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - type: :"response.output_text.annotation.added" + ?type: :"response.output_text.annotation.added" ) -> void | ( ?OpenAI::Models::Responses::response_text_annotation_delta_event @@ -57,7 +57,7 @@ module OpenAI attr_accessor type: :file_citation def initialize: - (file_id: String, index: Integer, type: :file_citation) -> void + (file_id: String, index: Integer, ?type: :file_citation) -> void | ( ?OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_citation | OpenAI::BaseModel data @@ -92,7 +92,7 @@ module OpenAI start_index: Integer, title: String, url: String, - type: :url_citation + ?type: :url_citation ) -> void | ( ?OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::url_citation @@ -112,7 +112,7 @@ module OpenAI attr_accessor type: :file_path def initialize: - (file_id: String, index: Integer, type: :file_path) -> void + (file_id: String, index: Integer, ?type: :file_path) -> void | ( ?OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_path | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index 0957217a..84d7da60 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( - format_: OpenAI::Models::Responses::response_format_text_config + ?format_: OpenAI::Models::Responses::response_format_text_config ) -> void | ( ?OpenAI::Models::Responses::response_text_config diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index 5a692585..89c2f2fa 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -27,7 +27,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, - type: :"response.output_text.delta" + ?type: :"response.output_text.delta" ) -> void | ( ?OpenAI::Models::Responses::response_text_delta_event diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 8cda3224..16f089dd 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -27,7 +27,7 @@ module OpenAI item_id: String, output_index: Integer, text: String, - type: :"response.output_text.done" + ?type: :"response.output_text.done" ) -> void | ( ?OpenAI::Models::Responses::response_text_done_event diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index 3be8bb79..0b7a9a7d 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item_id: String, output_index: Integer, - type: :"response.web_search_call.completed" + ?type: :"response.web_search_call.completed" ) -> void | ( ?OpenAI::Models::Responses::response_web_search_call_completed_event diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index f8d56aa4..236857f8 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item_id: String, output_index: Integer, - type: :"response.web_search_call.in_progress" + ?type: :"response.web_search_call.in_progress" ) -> void | ( ?OpenAI::Models::Responses::response_web_search_call_in_progress_event diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index 0e2a2c23..26145d41 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -19,7 +19,7 @@ module OpenAI ( item_id: String, output_index: Integer, - type: :"response.web_search_call.searching" + ?type: :"response.web_search_call.searching" ) -> void | ( ?OpenAI::Models::Responses::response_web_search_call_searching_event diff --git a/sig/openai/models/responses/tool_choice_function.rbs b/sig/openai/models/responses/tool_choice_function.rbs index 7dbf4708..6f91e879 100644 --- a/sig/openai/models/responses/tool_choice_function.rbs +++ b/sig/openai/models/responses/tool_choice_function.rbs @@ -9,7 +9,7 @@ module OpenAI attr_accessor type: :function def initialize: - (name: String, type: :function) -> void + (name: String, ?type: :function) -> void | ( ?OpenAI::Models::Responses::tool_choice_function | OpenAI::BaseModel data diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index e815a5e3..31394f16 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -22,8 +22,8 @@ module OpenAI def initialize: ( type: OpenAI::Models::Responses::WebSearchTool::type_, - search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, - user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? + ?search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, + ?user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? ) -> void | ( ?OpenAI::Models::Responses::web_search_tool | OpenAI::BaseModel data @@ -80,11 +80,11 @@ module OpenAI def initialize: ( - city: String, - country: String, - region: String, - timezone: String, - type: :approximate + ?city: String, + ?country: String, + ?region: String, + ?timezone: String, + ?type: :approximate ) -> void | ( ?OpenAI::Models::Responses::WebSearchTool::user_location diff --git a/sig/openai/models/static_file_chunking_strategy_object.rbs b/sig/openai/models/static_file_chunking_strategy_object.rbs index f3b1f264..8cf1c351 100644 --- a/sig/openai/models/static_file_chunking_strategy_object.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object.rbs @@ -11,7 +11,7 @@ module OpenAI def initialize: ( static: OpenAI::Models::StaticFileChunkingStrategy, - type: :static + ?type: :static ) -> void | ( ?OpenAI::Models::static_file_chunking_strategy_object diff --git a/sig/openai/models/static_file_chunking_strategy_object_param.rbs b/sig/openai/models/static_file_chunking_strategy_object_param.rbs index 9d6c4140..48965341 100644 --- a/sig/openai/models/static_file_chunking_strategy_object_param.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object_param.rbs @@ -11,7 +11,7 @@ module OpenAI def initialize: ( static: OpenAI::Models::StaticFileChunkingStrategy, - type: :static + ?type: :static ) -> void | ( ?OpenAI::Models::static_file_chunking_strategy_object_param diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index e58a0c87..c4c5f528 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -41,8 +41,8 @@ module OpenAI filename: String, purpose: String, status: OpenAI::Models::Upload::status, - file: OpenAI::Models::FileObject?, - object: :upload + ?file: OpenAI::Models::FileObject?, + ?object: :upload ) -> void | (?OpenAI::Models::upload | OpenAI::BaseModel data) -> void diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index 4ddc9f7b..de559455 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::upload_cancel_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index a22d1531..fdea0062 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -16,8 +16,8 @@ module OpenAI def initialize: ( part_ids: ::Array[String], - md5: String, - request_options: OpenAI::request_opts + ?md5: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::upload_complete_params | OpenAI::BaseModel data diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index de006ed9..20230ef3 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -27,7 +27,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::file_purpose, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::upload_create_params | OpenAI::BaseModel data diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index 0276fa19..b01dc193 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI attr_accessor data: IO | StringIO def initialize: - (data: IO | StringIO, request_options: OpenAI::request_opts) -> void + (data: IO | StringIO, ?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::Uploads::part_create_params | OpenAI::BaseModel data diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 18ff6f6e..6953994b 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -26,7 +26,7 @@ module OpenAI id: String, created_at: Integer, upload_id: String, - object: :"upload.part" + ?object: :"upload.part" ) -> void | ( ?OpenAI::Models::Uploads::upload_part | OpenAI::BaseModel data diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index de85bfdb..dbd3c546 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -52,9 +52,9 @@ module OpenAI name: String, status: OpenAI::Models::VectorStore::status, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter, - expires_at: Integer?, - object: :vector_store + ?expires_after: OpenAI::Models::VectorStore::ExpiresAfter, + ?expires_at: Integer?, + ?object: :vector_store ) -> void | (?OpenAI::Models::vector_store | OpenAI::BaseModel data) -> void @@ -113,7 +113,7 @@ module OpenAI attr_accessor days: Integer def initialize: - (days: Integer, anchor: :last_active_at) -> void + (days: Integer, ?anchor: :last_active_at) -> void | ( ?OpenAI::Models::VectorStore::expires_after | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 93b49fe7..8e565574 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -38,12 +38,12 @@ module OpenAI def initialize: ( - chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, - file_ids: ::Array[String], - metadata: OpenAI::Models::metadata?, - name: String, - request_options: OpenAI::request_opts + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::vector_store_create_params | OpenAI::BaseModel data @@ -59,7 +59,7 @@ module OpenAI attr_accessor days: Integer def initialize: - (days: Integer, anchor: :last_active_at) -> void + (days: Integer, ?anchor: :last_active_at) -> void | ( ?OpenAI::Models::VectorStoreCreateParams::expires_after | OpenAI::BaseModel data diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index 42f031e5..a53f5cac 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::vector_store_delete_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/vector_store_deleted.rbs b/sig/openai/models/vector_store_deleted.rbs index 53fe34e4..761c47e9 100644 --- a/sig/openai/models/vector_store_deleted.rbs +++ b/sig/openai/models/vector_store_deleted.rbs @@ -11,7 +11,7 @@ module OpenAI attr_accessor object: :"vector_store.deleted" def initialize: - (id: String, deleted: bool, object: :"vector_store.deleted") -> void + (id: String, deleted: bool, ?object: :"vector_store.deleted") -> void | ( ?OpenAI::Models::vector_store_deleted | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index 7ec8d50d..aefc7b2b 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -33,11 +33,11 @@ module OpenAI def initialize: ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::VectorStoreListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::VectorStoreListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::vector_store_list_params | OpenAI::BaseModel data diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index a2603a79..2aac5641 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -7,7 +7,7 @@ module OpenAI include OpenAI::RequestParameters def initialize: - (request_options: OpenAI::request_opts) -> void + (?request_options: OpenAI::request_opts) -> void | ( ?OpenAI::Models::vector_store_retrieve_params | OpenAI::BaseModel data ) -> void diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 2ac6032b..e1467da5 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -39,11 +39,11 @@ module OpenAI def initialize: ( query: OpenAI::Models::VectorStoreSearchParams::query, - filters: OpenAI::Models::VectorStoreSearchParams::filters, - max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, - rewrite_query: bool, - request_options: OpenAI::request_opts + ?filters: OpenAI::Models::VectorStoreSearchParams::filters, + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ?rewrite_query: bool, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::vector_store_search_params | OpenAI::BaseModel data @@ -87,8 +87,8 @@ module OpenAI def initialize: ( - ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, - score_threshold: Float + ?ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, + ?score_threshold: Float ) -> void | ( ?OpenAI::Models::VectorStoreSearchParams::ranking_options diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index 33acab97..440f7453 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -20,10 +20,10 @@ module OpenAI def initialize: ( - expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, - metadata: OpenAI::Models::metadata?, - name: String?, - request_options: OpenAI::request_opts + ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::vector_store_update_params | OpenAI::BaseModel data @@ -39,7 +39,7 @@ module OpenAI attr_accessor days: Integer def initialize: - (days: Integer, anchor: :last_active_at) -> void + (days: Integer, ?anchor: :last_active_at) -> void | ( ?OpenAI::Models::VectorStoreUpdateParams::expires_after | OpenAI::BaseModel data diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index 42b68be5..5d2e3b32 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_batch_cancel_params diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 963a641f..80bc34fc 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -26,9 +26,9 @@ module OpenAI def initialize: ( file_ids: ::Array[String], - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, - chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - request_options: OpenAI::request_opts + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_batch_create_params diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index 21e09293..e542038d 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -45,12 +45,12 @@ module OpenAI def initialize: ( vector_store_id: String, - after: String, - before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, - limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_batch_list_files_params diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index 090286fa..f10bdb48 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_batch_retrieve_params diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index 751c410c..58b90952 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_content_params diff --git a/sig/openai/models/vector_stores/file_content_response.rbs b/sig/openai/models/vector_stores/file_content_response.rbs index 3153dccf..6d24a610 100644 --- a/sig/openai/models/vector_stores/file_content_response.rbs +++ b/sig/openai/models/vector_stores/file_content_response.rbs @@ -13,7 +13,7 @@ module OpenAI def type=: (String) -> String def initialize: - (text: String, type: String) -> void + (?text: String, ?type: String) -> void | ( ?OpenAI::Models::VectorStores::file_content_response | OpenAI::BaseModel data diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 471cc8a7..70f7ed4b 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -26,9 +26,9 @@ module OpenAI def initialize: ( file_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, - chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - request_options: OpenAI::request_opts + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_create_params diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index 212e1e30..14edfd60 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_delete_params diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 75b43fd1..d5d920b8 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -41,12 +41,12 @@ module OpenAI def initialize: ( - after: String, - before: String, - filter: OpenAI::Models::VectorStores::FileListParams::filter, - limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileListParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileListParams::order, + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_list_params diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index b490be00..09096f81 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -13,7 +13,7 @@ module OpenAI def initialize: ( vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_retrieve_params diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 946352ec..3574bfa7 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -20,7 +20,7 @@ module OpenAI ( vector_store_id: String, attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> void | ( ?OpenAI::Models::VectorStores::file_update_params diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index c25e3ea0..b857316f 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -48,9 +48,9 @@ module OpenAI status: OpenAI::Models::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, - chunking_strategy: OpenAI::Models::file_chunking_strategy, - object: :"vector_store.file" + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy, + ?object: :"vector_store.file" ) -> void | ( ?OpenAI::Models::VectorStores::vector_store_file diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index 452a536f..848fdedc 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -34,7 +34,7 @@ module OpenAI file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, vector_store_id: String, - object: :"vector_store.files_batch" + ?object: :"vector_store.files_batch" ) -> void | ( ?OpenAI::Models::VectorStores::vector_store_file_batch diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index fe8437d3..a1ccb5ea 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -18,7 +18,7 @@ module OpenAI ( id: String, deleted: bool, - object: :"vector_store.file.deleted" + ?object: :"vector_store.file.deleted" ) -> void | ( ?OpenAI::Models::VectorStores::vector_store_file_deleted diff --git a/sig/openai/pooled_net_requester.rbs b/sig/openai/pooled_net_requester.rbs index 46a89cad..58f0fcce 100644 --- a/sig/openai/pooled_net_requester.rbs +++ b/sig/openai/pooled_net_requester.rbs @@ -32,6 +32,6 @@ module OpenAI OpenAI::PooledNetRequester::request request ) -> [Integer, top, Enumerable[String]] - def initialize: (size: Integer) -> void + def initialize: (?size: Integer) -> void end end diff --git a/sig/openai/resources/audio/speech.rbs b/sig/openai/resources/audio/speech.rbs index e537a18a..fc30ff40 100644 --- a/sig/openai/resources/audio/speech.rbs +++ b/sig/openai/resources/audio/speech.rbs @@ -11,9 +11,9 @@ module OpenAI input: String, model: OpenAI::Models::Audio::SpeechCreateParams::model, voice: OpenAI::Models::Audio::SpeechCreateParams::voice, - response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, - speed: Float, - request_options: OpenAI::request_opts + ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, + ?speed: Float, + ?request_options: OpenAI::request_opts ) -> top def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index d16f632f..67d0eb49 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -10,12 +10,12 @@ module OpenAI | ( file: IO | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, - language: String, - prompt: String, - response_format: OpenAI::Models::audio_response_format, - temperature: Float, - timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], - request_options: OpenAI::request_opts + ?language: String, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index 540b8eaf..7683d6e2 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -10,10 +10,10 @@ module OpenAI | ( file: IO | StringIO, model: OpenAI::Models::Audio::TranslationCreateParams::model, - prompt: String, - response_format: OpenAI::Models::audio_response_format, - temperature: Float, - request_options: OpenAI::request_opts + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::translation_create_response def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/batches.rbs b/sig/openai/resources/batches.rbs index 8d004f2c..506824d7 100644 --- a/sig/openai/resources/batches.rbs +++ b/sig/openai/resources/batches.rbs @@ -9,8 +9,8 @@ module OpenAI completion_window: OpenAI::Models::BatchCreateParams::completion_window, endpoint: OpenAI::Models::BatchCreateParams::endpoint, input_file_id: String, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Batch def retrieve: @@ -20,7 +20,7 @@ module OpenAI ) -> OpenAI::Models::Batch | ( String batch_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Batch def list: @@ -28,9 +28,9 @@ module OpenAI ?OpenAI::Models::BatchListParams | ::Hash[Symbol, top] params ) -> OpenAI::CursorPage[OpenAI::Models::Batch] | ( - after: String, - limit: Integer, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Batch] def cancel: @@ -40,7 +40,7 @@ module OpenAI ) -> OpenAI::Models::Batch | ( String batch_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Batch def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/beta/assistants.rbs b/sig/openai/resources/beta/assistants.rbs index 50acef87..508c1f80 100644 --- a/sig/openai/resources/beta/assistants.rbs +++ b/sig/openai/resources/beta/assistants.rbs @@ -9,17 +9,17 @@ module OpenAI ) -> OpenAI::Models::Beta::Assistant | ( model: OpenAI::Models::Beta::AssistantCreateParams::model, - description: String?, - instructions: String?, - metadata: OpenAI::Models::metadata?, - name: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool], - top_p: Float?, - request_options: OpenAI::request_opts + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Assistant def retrieve: @@ -30,7 +30,7 @@ module OpenAI ) -> OpenAI::Models::Beta::Assistant | ( String assistant_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Assistant def update: @@ -41,18 +41,18 @@ module OpenAI ) -> OpenAI::Models::Beta::Assistant | ( String assistant_id, - description: String?, - instructions: String?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::AssistantUpdateParams::model, - name: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool], - top_p: Float?, - request_options: OpenAI::request_opts + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::AssistantUpdateParams::model, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Assistant def list: @@ -61,11 +61,11 @@ module OpenAI | ::Hash[Symbol, top] params ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Assistant] | ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::AssistantListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Assistant] def delete: @@ -76,7 +76,7 @@ module OpenAI ) -> OpenAI::Models::Beta::AssistantDeleted | ( String assistant_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::AssistantDeleted def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index b825e4a8..3c50dbb1 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -12,10 +12,10 @@ module OpenAI | ::Hash[Symbol, top] params ) -> OpenAI::Models::Beta::Thread | ( - messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, - request_options: OpenAI::request_opts + ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Thread def retrieve: @@ -26,7 +26,7 @@ module OpenAI ) -> OpenAI::Models::Beta::Thread | ( String thread_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Thread def update: @@ -37,9 +37,9 @@ module OpenAI ) -> OpenAI::Models::Beta::Thread | ( String thread_id, - metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Thread def delete: @@ -50,7 +50,7 @@ module OpenAI ) -> OpenAI::Models::Beta::ThreadDeleted | ( String thread_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::ThreadDeleted def create_and_run: @@ -60,21 +60,21 @@ module OpenAI ) -> OpenAI::Models::Beta::Threads::Run | ( assistant_id: String, - instructions: String?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, - parallel_tool_calls: bool, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, - top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, - request_options: OpenAI::request_opts + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + ?parallel_tool_calls: bool, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run def create_and_run_streaming: @@ -84,21 +84,21 @@ module OpenAI ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] | ( assistant_id: String, - instructions: String?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, - parallel_tool_calls: bool, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, - top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, - request_options: OpenAI::request_opts + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + ?parallel_tool_calls: bool, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/beta/threads/messages.rbs b/sig/openai/resources/beta/threads/messages.rbs index 3c37a9d8..ca8ccb42 100644 --- a/sig/openai/resources/beta/threads/messages.rbs +++ b/sig/openai/resources/beta/threads/messages.rbs @@ -13,9 +13,9 @@ module OpenAI String thread_id, content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Message def retrieve: @@ -27,7 +27,7 @@ module OpenAI | ( String message_id, thread_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Message def update: @@ -39,8 +39,8 @@ module OpenAI | ( String message_id, thread_id: String, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Message def list: @@ -51,12 +51,12 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message] | ( String thread_id, - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::order, - run_id: String, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, + ?run_id: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message] def delete: @@ -68,7 +68,7 @@ module OpenAI | ( String message_id, thread_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::MessageDeleted def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index a0c45000..2ad2c954 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -14,23 +14,23 @@ module OpenAI | ( String thread_id, assistant_id: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - additional_instructions: String?, - additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, - instructions: String?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, - parallel_tool_calls: bool, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, - top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?additional_instructions: String?, + ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + ?parallel_tool_calls: bool, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run def create_streaming: @@ -42,23 +42,23 @@ module OpenAI | ( String thread_id, assistant_id: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - additional_instructions: String?, - additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, - instructions: String?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, - parallel_tool_calls: bool, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - temperature: Float?, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, - top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?additional_instructions: String?, + ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + ?parallel_tool_calls: bool, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] def retrieve: @@ -70,7 +70,7 @@ module OpenAI | ( String run_id, thread_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run def update: @@ -82,8 +82,8 @@ module OpenAI | ( String run_id, thread_id: String, - metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run def list: @@ -94,11 +94,11 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run] | ( String thread_id, - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::RunListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run] def cancel: @@ -110,7 +110,7 @@ module OpenAI | ( String run_id, thread_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run def submit_tool_outputs: @@ -123,7 +123,7 @@ module OpenAI String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run def submit_tool_outputs_streaming: @@ -136,7 +136,7 @@ module OpenAI String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/beta/threads/runs/steps.rbs b/sig/openai/resources/beta/threads/runs/steps.rbs index d9c96cfa..765d7135 100644 --- a/sig/openai/resources/beta/threads/runs/steps.rbs +++ b/sig/openai/resources/beta/threads/runs/steps.rbs @@ -14,8 +14,8 @@ module OpenAI String step_id, thread_id: String, run_id: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Runs::RunStep def list: @@ -27,12 +27,12 @@ module OpenAI | ( String run_id, thread_id: String, - after: String, - before: String, - include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index c843c57c..84a03b90 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -12,35 +12,35 @@ module OpenAI | ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, - frequency_penalty: Float?, - function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - logit_bias: ::Hash[Symbol, Integer]?, - logprobs: bool?, - max_completion_tokens: Integer?, - max_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, - n: Integer?, - parallel_tool_calls: bool, - prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, - presence_penalty: Float?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, - seed: Integer?, - service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, - stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, - store: bool?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - temperature: Float?, - tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], - top_logprobs: Integer?, - top_p: Float?, - user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - request_options: OpenAI::request_opts + ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?frequency_penalty: Float?, + ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: bool?, + ?max_completion_tokens: Integer?, + ?max_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?n: Integer?, + ?parallel_tool_calls: bool, + ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?presence_penalty: Float?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?seed: Integer?, + ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + ?store: bool?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?top_logprobs: Integer?, + ?top_p: Float?, + ?user: String, + ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletion def create_streaming: @@ -51,35 +51,35 @@ module OpenAI | ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, - frequency_penalty: Float?, - function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - logit_bias: ::Hash[Symbol, Integer]?, - logprobs: bool?, - max_completion_tokens: Integer?, - max_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, - n: Integer?, - parallel_tool_calls: bool, - prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, - presence_penalty: Float?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, - seed: Integer?, - service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, - stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, - store: bool?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - temperature: Float?, - tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], - top_logprobs: Integer?, - top_p: Float?, - user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - request_options: OpenAI::request_opts + ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?frequency_penalty: Float?, + ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: bool?, + ?max_completion_tokens: Integer?, + ?max_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?n: Integer?, + ?parallel_tool_calls: bool, + ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?presence_penalty: Float?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?seed: Integer?, + ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + ?store: bool?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?top_logprobs: Integer?, + ?top_p: Float?, + ?user: String, + ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?request_options: OpenAI::request_opts ) -> OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk] def retrieve: @@ -90,7 +90,7 @@ module OpenAI ) -> OpenAI::Models::Chat::ChatCompletion | ( String completion_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletion def update: @@ -102,7 +102,7 @@ module OpenAI | ( String completion_id, metadata: OpenAI::Models::metadata?, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletion def list: @@ -111,12 +111,12 @@ module OpenAI | ::Hash[Symbol, top] params ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion] | ( - after: String, - limit: Integer, - metadata: OpenAI::Models::metadata?, - model: String, - order: OpenAI::Models::Chat::CompletionListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?metadata: OpenAI::Models::metadata?, + ?model: String, + ?order: OpenAI::Models::Chat::CompletionListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion] def delete: @@ -127,7 +127,7 @@ module OpenAI ) -> OpenAI::Models::Chat::ChatCompletionDeleted | ( String completion_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletionDeleted def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/chat/completions/messages.rbs b/sig/openai/resources/chat/completions/messages.rbs index cd798db3..cfda0b34 100644 --- a/sig/openai/resources/chat/completions/messages.rbs +++ b/sig/openai/resources/chat/completions/messages.rbs @@ -11,10 +11,10 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] | ( String completion_id, - after: String, - limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/completions.rbs b/sig/openai/resources/completions.rbs index c04b9ca0..1c0da611 100644 --- a/sig/openai/resources/completions.rbs +++ b/sig/openai/resources/completions.rbs @@ -8,22 +8,22 @@ module OpenAI | ( model: OpenAI::Models::CompletionCreateParams::model, prompt: OpenAI::Models::CompletionCreateParams::prompt?, - best_of: Integer?, - echo: bool?, - frequency_penalty: Float?, - logit_bias: ::Hash[Symbol, Integer]?, - logprobs: Integer?, - max_tokens: Integer?, - n: Integer?, - presence_penalty: Float?, - seed: Integer?, - stop: OpenAI::Models::CompletionCreateParams::stop?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - suffix: String?, - temperature: Float?, - top_p: Float?, - user: String, - request_options: OpenAI::request_opts + ?best_of: Integer?, + ?echo: bool?, + ?frequency_penalty: Float?, + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: Integer?, + ?max_tokens: Integer?, + ?n: Integer?, + ?presence_penalty: Float?, + ?seed: Integer?, + ?stop: OpenAI::Models::CompletionCreateParams::stop?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?suffix: String?, + ?temperature: Float?, + ?top_p: Float?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Completion def create_streaming: @@ -33,22 +33,22 @@ module OpenAI | ( model: OpenAI::Models::CompletionCreateParams::model, prompt: OpenAI::Models::CompletionCreateParams::prompt?, - best_of: Integer?, - echo: bool?, - frequency_penalty: Float?, - logit_bias: ::Hash[Symbol, Integer]?, - logprobs: Integer?, - max_tokens: Integer?, - n: Integer?, - presence_penalty: Float?, - seed: Integer?, - stop: OpenAI::Models::CompletionCreateParams::stop?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - suffix: String?, - temperature: Float?, - top_p: Float?, - user: String, - request_options: OpenAI::request_opts + ?best_of: Integer?, + ?echo: bool?, + ?frequency_penalty: Float?, + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: Integer?, + ?max_tokens: Integer?, + ?n: Integer?, + ?presence_penalty: Float?, + ?seed: Integer?, + ?stop: OpenAI::Models::CompletionCreateParams::stop?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?suffix: String?, + ?temperature: Float?, + ?top_p: Float?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Stream[OpenAI::Models::Completion] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/embeddings.rbs b/sig/openai/resources/embeddings.rbs index 05dd3a2d..1f2c2bfe 100644 --- a/sig/openai/resources/embeddings.rbs +++ b/sig/openai/resources/embeddings.rbs @@ -8,10 +8,10 @@ module OpenAI | ( input: OpenAI::Models::EmbeddingCreateParams::input, model: OpenAI::Models::EmbeddingCreateParams::model, - dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, - user: String, - request_options: OpenAI::request_opts + ?dimensions: Integer, + ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::CreateEmbeddingResponse def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index f8a6cae0..d958d65e 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -8,7 +8,7 @@ module OpenAI | ( file: IO | StringIO, purpose: OpenAI::Models::file_purpose, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FileObject def retrieve: @@ -18,7 +18,7 @@ module OpenAI ) -> OpenAI::Models::FileObject | ( String file_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FileObject def list: @@ -26,11 +26,11 @@ module OpenAI ?OpenAI::Models::FileListParams | ::Hash[Symbol, top] params ) -> OpenAI::CursorPage[OpenAI::Models::FileObject] | ( - after: String, - limit: Integer, - order: OpenAI::Models::FileListParams::order, - purpose: String, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::FileListParams::order, + ?purpose: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::FileObject] def delete: @@ -40,7 +40,7 @@ module OpenAI ) -> OpenAI::Models::FileDeleted | ( String file_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FileDeleted def content: @@ -48,7 +48,7 @@ module OpenAI String file_id, ?OpenAI::Models::FileContentParams | ::Hash[Symbol, top] params ) -> top - | (String file_id, request_options: OpenAI::request_opts) -> top + | (String file_id, ?request_options: OpenAI::request_opts) -> top def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/fine_tuning/jobs.rbs b/sig/openai/resources/fine_tuning/jobs.rbs index 8bd2b3c9..1d411f33 100644 --- a/sig/openai/resources/fine_tuning/jobs.rbs +++ b/sig/openai/resources/fine_tuning/jobs.rbs @@ -12,14 +12,14 @@ module OpenAI | ( model: OpenAI::Models::FineTuning::JobCreateParams::model, training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, - metadata: OpenAI::Models::metadata?, - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, - seed: Integer?, - suffix: String?, - validation_file: String?, - request_options: OpenAI::request_opts + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, + ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + ?metadata: OpenAI::Models::metadata?, + ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + ?seed: Integer?, + ?suffix: String?, + ?validation_file: String?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FineTuning::FineTuningJob def retrieve: @@ -30,7 +30,7 @@ module OpenAI ) -> OpenAI::Models::FineTuning::FineTuningJob | ( String fine_tuning_job_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FineTuning::FineTuningJob def list: @@ -39,10 +39,10 @@ module OpenAI | ::Hash[Symbol, top] params ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] | ( - after: String, - limit: Integer, - metadata: ::Hash[Symbol, String]?, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?metadata: ::Hash[Symbol, String]?, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] def cancel: @@ -53,7 +53,7 @@ module OpenAI ) -> OpenAI::Models::FineTuning::FineTuningJob | ( String fine_tuning_job_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FineTuning::FineTuningJob def list_events: @@ -64,9 +64,9 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] | ( String fine_tuning_job_id, - after: String, - limit: Integer, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs index 824d9aa0..b4b532a4 100644 --- a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs +++ b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs @@ -11,9 +11,9 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] | ( String fine_tuning_job_id, - after: String, - limit: Integer, - request_options: OpenAI::request_opts + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index ce35a4e6..058dc7d8 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -8,12 +8,12 @@ module OpenAI ) -> OpenAI::Models::ImagesResponse | ( image: IO | StringIO, - model: OpenAI::Models::ImageCreateVariationParams::model?, - n: Integer?, - response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, - size: OpenAI::Models::ImageCreateVariationParams::size?, - user: String, - request_options: OpenAI::request_opts + ?model: OpenAI::Models::ImageCreateVariationParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, + ?size: OpenAI::Models::ImageCreateVariationParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::ImagesResponse def edit: @@ -23,13 +23,13 @@ module OpenAI | ( image: IO | StringIO, prompt: String, - mask: IO | StringIO, - model: OpenAI::Models::ImageEditParams::model?, - n: Integer?, - response_format: OpenAI::Models::ImageEditParams::response_format?, - size: OpenAI::Models::ImageEditParams::size?, - user: String, - request_options: OpenAI::request_opts + ?mask: IO | StringIO, + ?model: OpenAI::Models::ImageEditParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageEditParams::response_format?, + ?size: OpenAI::Models::ImageEditParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::ImagesResponse def generate: @@ -38,14 +38,14 @@ module OpenAI ) -> OpenAI::Models::ImagesResponse | ( prompt: String, - model: OpenAI::Models::ImageGenerateParams::model?, - n: Integer?, - quality: OpenAI::Models::ImageGenerateParams::quality, - response_format: OpenAI::Models::ImageGenerateParams::response_format?, - size: OpenAI::Models::ImageGenerateParams::size?, - style: OpenAI::Models::ImageGenerateParams::style?, - user: String, - request_options: OpenAI::request_opts + ?model: OpenAI::Models::ImageGenerateParams::model?, + ?n: Integer?, + ?quality: OpenAI::Models::ImageGenerateParams::quality, + ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, + ?size: OpenAI::Models::ImageGenerateParams::size?, + ?style: OpenAI::Models::ImageGenerateParams::style?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::ImagesResponse def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/models.rbs b/sig/openai/resources/models.rbs index c1ed007f..2b01ceac 100644 --- a/sig/openai/resources/models.rbs +++ b/sig/openai/resources/models.rbs @@ -8,7 +8,7 @@ module OpenAI ) -> OpenAI::Models::Model | ( String model, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Model def list: @@ -16,7 +16,7 @@ module OpenAI ?OpenAI::Models::ModelListParams | ::Hash[Symbol, top] params ) -> OpenAI::Page[OpenAI::Models::Model] | ( - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Page[OpenAI::Models::Model] def delete: @@ -26,7 +26,7 @@ module OpenAI ) -> OpenAI::Models::ModelDeleted | ( String model, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::ModelDeleted def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/moderations.rbs b/sig/openai/resources/moderations.rbs index a91a0487..18063d79 100644 --- a/sig/openai/resources/moderations.rbs +++ b/sig/openai/resources/moderations.rbs @@ -7,8 +7,8 @@ module OpenAI ) -> OpenAI::Models::ModerationCreateResponse | ( input: OpenAI::Models::ModerationCreateParams::input, - model: OpenAI::Models::ModerationCreateParams::model, - request_options: OpenAI::request_opts + ?model: OpenAI::Models::ModerationCreateParams::model, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::ModerationCreateResponse def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index bf3ce99c..2e4c1ae1 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -11,22 +11,22 @@ module OpenAI | ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::Responses::ResponseCreateParams::model, - include: ::Array[OpenAI::Models::Responses::response_includable]?, - instructions: String?, - max_output_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - parallel_tool_calls: bool?, - previous_response_id: String?, - reasoning: OpenAI::Models::Reasoning?, - store: bool?, - temperature: Float?, - text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, - tools: ::Array[OpenAI::Models::Responses::tool], - top_p: Float?, - truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - user: String, - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?instructions: String?, + ?max_output_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?parallel_tool_calls: bool?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?store: bool?, + ?temperature: Float?, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_p: Float?, + ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Responses::Response def create_streaming: @@ -37,22 +37,22 @@ module OpenAI | ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::Responses::ResponseCreateParams::model, - include: ::Array[OpenAI::Models::Responses::response_includable]?, - instructions: String?, - max_output_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - parallel_tool_calls: bool?, - previous_response_id: String?, - reasoning: OpenAI::Models::Reasoning?, - store: bool?, - temperature: Float?, - text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, - tools: ::Array[OpenAI::Models::Responses::tool], - top_p: Float?, - truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - user: String, - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?instructions: String?, + ?max_output_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?parallel_tool_calls: bool?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?store: bool?, + ?temperature: Float?, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_p: Float?, + ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + ?user: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Stream[OpenAI::Models::Responses::response_stream_event] def retrieve: @@ -63,8 +63,8 @@ module OpenAI ) -> OpenAI::Models::Responses::Response | ( String response_id, - include: ::Array[OpenAI::Models::Responses::response_includable], - request_options: OpenAI::request_opts + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Responses::Response def delete: @@ -73,7 +73,7 @@ module OpenAI ?OpenAI::Models::Responses::ResponseDeleteParams | ::Hash[Symbol, top] params ) -> nil - | (String response_id, request_options: OpenAI::request_opts) -> nil + | (String response_id, ?request_options: OpenAI::request_opts) -> nil def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/responses/input_items.rbs b/sig/openai/resources/responses/input_items.rbs index 5cc014f8..fa47896a 100644 --- a/sig/openai/resources/responses/input_items.rbs +++ b/sig/openai/resources/responses/input_items.rbs @@ -10,11 +10,11 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] | ( String response_id, - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Responses::InputItemListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/uploads.rbs b/sig/openai/resources/uploads.rbs index 116d8e02..631f9449 100644 --- a/sig/openai/resources/uploads.rbs +++ b/sig/openai/resources/uploads.rbs @@ -12,7 +12,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::file_purpose, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Upload def cancel: @@ -22,7 +22,7 @@ module OpenAI ) -> OpenAI::Models::Upload | ( String upload_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Upload def complete: @@ -33,8 +33,8 @@ module OpenAI | ( String upload_id, part_ids: ::Array[String], - md5: String, - request_options: OpenAI::request_opts + ?md5: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Upload def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index 40aa9015..3dc1af0d 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -11,7 +11,7 @@ module OpenAI | ( String upload_id, data: IO | StringIO, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Uploads::UploadPart def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index 34a6d26a..d12bd025 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -10,12 +10,12 @@ module OpenAI ?OpenAI::Models::VectorStoreCreateParams | ::Hash[Symbol, top] params ) -> OpenAI::Models::VectorStore | ( - chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, - file_ids: ::Array[String], - metadata: OpenAI::Models::metadata?, - name: String, - request_options: OpenAI::request_opts + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStore def retrieve: @@ -26,7 +26,7 @@ module OpenAI ) -> OpenAI::Models::VectorStore | ( String vector_store_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStore def update: @@ -36,10 +36,10 @@ module OpenAI ) -> OpenAI::Models::VectorStore | ( String vector_store_id, - expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, - metadata: OpenAI::Models::metadata?, - name: String?, - request_options: OpenAI::request_opts + ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStore def list: @@ -47,11 +47,11 @@ module OpenAI ?OpenAI::Models::VectorStoreListParams | ::Hash[Symbol, top] params ) -> OpenAI::CursorPage[OpenAI::Models::VectorStore] | ( - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::VectorStoreListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::VectorStoreListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::VectorStore] def delete: @@ -61,7 +61,7 @@ module OpenAI ) -> OpenAI::Models::VectorStoreDeleted | ( String vector_store_id, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStoreDeleted def search: @@ -72,11 +72,11 @@ module OpenAI | ( String vector_store_id, query: OpenAI::Models::VectorStoreSearchParams::query, - filters: OpenAI::Models::VectorStoreSearchParams::filters, - max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, - rewrite_query: bool, - request_options: OpenAI::request_opts + ?filters: OpenAI::Models::VectorStoreSearchParams::filters, + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ?rewrite_query: bool, + ?request_options: OpenAI::request_opts ) -> OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/vector_stores/file_batches.rbs b/sig/openai/resources/vector_stores/file_batches.rbs index 39a8abf0..470b66e3 100644 --- a/sig/openai/resources/vector_stores/file_batches.rbs +++ b/sig/openai/resources/vector_stores/file_batches.rbs @@ -11,9 +11,9 @@ module OpenAI | ( String vector_store_id, file_ids: ::Array[String], - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, - chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - request_options: OpenAI::request_opts + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch def retrieve: @@ -25,7 +25,7 @@ module OpenAI | ( String batch_id, vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch def cancel: @@ -37,7 +37,7 @@ module OpenAI | ( String batch_id, vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch def list_files: @@ -49,12 +49,12 @@ module OpenAI | ( String batch_id, vector_store_id: String, - after: String, - before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, - limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/resources/vector_stores/files.rbs b/sig/openai/resources/vector_stores/files.rbs index 5abfd68c..8187380b 100644 --- a/sig/openai/resources/vector_stores/files.rbs +++ b/sig/openai/resources/vector_stores/files.rbs @@ -11,9 +11,9 @@ module OpenAI | ( String vector_store_id, file_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, - chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - request_options: OpenAI::request_opts + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFile def retrieve: @@ -25,7 +25,7 @@ module OpenAI | ( String file_id, vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFile def update: @@ -38,7 +38,7 @@ module OpenAI String file_id, vector_store_id: String, attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFile def list: @@ -49,12 +49,12 @@ module OpenAI ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] | ( String vector_store_id, - after: String, - before: String, - filter: OpenAI::Models::VectorStores::FileListParams::filter, - limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::order, - request_options: OpenAI::request_opts + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileListParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileListParams::order, + ?request_options: OpenAI::request_opts ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] def delete: @@ -66,7 +66,7 @@ module OpenAI | ( String file_id, vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Models::VectorStores::VectorStoreFileDeleted def content: @@ -78,7 +78,7 @@ module OpenAI | ( String file_id, vector_store_id: String, - request_options: OpenAI::request_opts + ?request_options: OpenAI::request_opts ) -> OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse] def initialize: (client: OpenAI::Client) -> void diff --git a/sig/openai/util.rbs b/sig/openai/util.rbs index 375f8324..97336cd4 100644 --- a/sig/openai/util.rbs +++ b/sig/openai/util.rbs @@ -20,12 +20,12 @@ module OpenAI OMIT: top - def self?.deep_merge_lr: (top lhs, top rhs, concat: bool) -> top + def self?.deep_merge_lr: (top lhs, top rhs, ?concat: bool) -> top def self?.deep_merge: ( *::Array[top] values, - sentinel: top?, - concat: bool + ?sentinel: top?, + ?concat: bool ) -> top def self?.dig: ( @@ -102,12 +102,12 @@ module OpenAI def self?.decode_content: ( ::Hash[String, String] headers, stream: Enumerable[String], - suppress_error: bool + ?suppress_error: bool ) -> top def self?.fused_enum: ( Enumerable[top] enum, - external: bool + ?external: bool ) { -> void } -> Enumerable[top] From b5ec213a664d449917cba5c75fbadd12f9e0c25b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 17:36:44 +0000 Subject: [PATCH 037/295] chore: disable overloads in `*.rbs` definitions for readable LSP errors (#42) --- .../models/audio/speech_create_params.rbs | 21 +- sig/openai/models/audio/transcription.rbs | 6 +- .../audio/transcription_create_params.rbs | 25 +- .../models/audio/transcription_segment.rbs | 29 +- .../models/audio/transcription_verbose.rbs | 19 +- .../models/audio/transcription_word.rbs | 6 +- sig/openai/models/audio/translation.rbs | 6 +- .../audio/translation_create_params.rbs | 21 +- .../models/audio/translation_verbose.rbs | 16 +- .../auto_file_chunking_strategy_param.rbs | 7 +- sig/openai/models/batch.rbs | 53 ++- sig/openai/models/batch_cancel_params.rbs | 6 +- sig/openai/models/batch_create_params.rbs | 18 +- sig/openai/models/batch_error.rbs | 14 +- sig/openai/models/batch_list_params.rbs | 12 +- sig/openai/models/batch_request_counts.rbs | 10 +- sig/openai/models/batch_retrieve_params.rbs | 6 +- sig/openai/models/beta/assistant.rbs | 59 ++-- .../models/beta/assistant_create_params.rbs | 114 +++---- .../models/beta/assistant_delete_params.rbs | 7 +- sig/openai/models/beta/assistant_deleted.rbs | 10 +- .../models/beta/assistant_list_params.rbs | 19 +- .../models/beta/assistant_retrieve_params.rbs | 7 +- .../models/beta/assistant_stream_event.rbs | 311 ++++++------------ .../models/beta/assistant_tool_choice.rbs | 13 +- .../beta/assistant_tool_choice_function.rbs | 7 +- .../models/beta/assistant_update_params.rbs | 60 ++-- .../models/beta/code_interpreter_tool.rbs | 7 +- sig/openai/models/beta/file_search_tool.rbs | 38 +-- sig/openai/models/beta/function_tool.rbs | 12 +- .../models/beta/message_stream_event.rbs | 65 ++-- .../models/beta/run_step_stream_event.rbs | 91 ++--- sig/openai/models/beta/run_stream_event.rbs | 130 +++----- sig/openai/models/beta/thread.rbs | 43 +-- .../beta/thread_create_and_run_params.rbs | 214 ++++-------- .../models/beta/thread_create_params.rbs | 134 +++----- .../models/beta/thread_delete_params.rbs | 6 +- sig/openai/models/beta/thread_deleted.rbs | 10 +- .../models/beta/thread_retrieve_params.rbs | 7 +- .../models/beta/thread_stream_event.rbs | 14 +- .../models/beta/thread_update_params.rbs | 41 +-- .../beta/threads/file_citation_annotation.rbs | 26 +- .../file_citation_delta_annotation.rbs | 28 +- .../beta/threads/file_path_annotation.rbs | 26 +- .../threads/file_path_delta_annotation.rbs | 28 +- sig/openai/models/beta/threads/image_file.rbs | 13 +- .../beta/threads/image_file_content_block.rbs | 13 +- .../models/beta/threads/image_file_delta.rbs | 13 +- .../beta/threads/image_file_delta_block.rbs | 15 +- sig/openai/models/beta/threads/image_url.rbs | 12 +- .../beta/threads/image_url_content_block.rbs | 13 +- .../models/beta/threads/image_url_delta.rbs | 13 +- .../beta/threads/image_url_delta_block.rbs | 15 +- sig/openai/models/beta/threads/message.rbs | 67 ++-- .../beta/threads/message_create_params.rbs | 39 +-- .../beta/threads/message_delete_params.rbs | 10 +- .../models/beta/threads/message_deleted.rbs | 15 +- .../models/beta/threads/message_delta.rbs | 13 +- .../beta/threads/message_delta_event.rbs | 15 +- .../beta/threads/message_list_params.rbs | 21 +- .../beta/threads/message_retrieve_params.rbs | 10 +- .../beta/threads/message_update_params.rbs | 15 +- .../beta/threads/refusal_content_block.rbs | 7 +- .../beta/threads/refusal_delta_block.rbs | 11 +- .../required_action_function_tool_call.rbs | 22 +- sig/openai/models/beta/threads/run.rbs | 138 +++----- .../models/beta/threads/run_cancel_params.rbs | 10 +- .../models/beta/threads/run_create_params.rbs | 95 ++---- .../models/beta/threads/run_list_params.rbs | 19 +- .../beta/threads/run_retrieve_params.rbs | 10 +- .../run_submit_tool_outputs_params.rbs | 22 +- .../models/beta/threads/run_update_params.rbs | 15 +- .../threads/runs/code_interpreter_logs.rbs | 11 +- .../runs/code_interpreter_output_image.rbs | 22 +- .../runs/code_interpreter_tool_call.rbs | 55 +--- .../runs/code_interpreter_tool_call_delta.rbs | 30 +- .../threads/runs/file_search_tool_call.rbs | 71 ++-- .../runs/file_search_tool_call_delta.rbs | 17 +- .../beta/threads/runs/function_tool_call.rbs | 26 +- .../threads/runs/function_tool_call_delta.rbs | 28 +- .../runs/message_creation_step_details.rbs | 20 +- .../models/beta/threads/runs/run_step.rbs | 69 ++-- .../beta/threads/runs/run_step_delta.rbs | 11 +- .../threads/runs/run_step_delta_event.rbs | 15 +- .../runs/run_step_delta_message_delta.rbs | 20 +- .../beta/threads/runs/step_list_params.rbs | 23 +- .../threads/runs/step_retrieve_params.rbs | 17 +- .../threads/runs/tool_call_delta_object.rbs | 13 +- .../threads/runs/tool_calls_step_details.rbs | 13 +- sig/openai/models/beta/threads/text.rbs | 12 +- .../beta/threads/text_content_block.rbs | 10 +- .../beta/threads/text_content_block_param.rbs | 7 +- sig/openai/models/beta/threads/text_delta.rbs | 13 +- .../models/beta/threads/text_delta_block.rbs | 15 +- sig/openai/models/chat/chat_completion.rbs | 54 ++- ...hat_completion_assistant_message_param.rbs | 37 +-- .../models/chat/chat_completion_audio.rbs | 17 +- .../chat/chat_completion_audio_param.rbs | 13 +- .../models/chat/chat_completion_chunk.rbs | 105 ++---- .../chat/chat_completion_content_part.rbs | 24 +- .../chat_completion_content_part_image.rbs | 26 +- ...at_completion_content_part_input_audio.rbs | 26 +- .../chat_completion_content_part_refusal.rbs | 7 +- .../chat_completion_content_part_text.rbs | 7 +- .../models/chat/chat_completion_deleted.rbs | 15 +- ...hat_completion_developer_message_param.rbs | 15 +- .../chat_completion_function_call_option.rbs | 7 +- ...chat_completion_function_message_param.rbs | 11 +- .../models/chat/chat_completion_message.rbs | 60 ++-- .../chat_completion_message_tool_call.rbs | 22 +- .../chat_completion_named_tool_choice.rbs | 20 +- .../chat_completion_prediction_content.rbs | 13 +- .../chat/chat_completion_store_message.rbs | 7 +- .../chat/chat_completion_stream_options.rbs | 7 +- .../chat_completion_system_message_param.rbs | 15 +- .../chat/chat_completion_token_logprob.rbs | 28 +- .../models/chat/chat_completion_tool.rbs | 12 +- .../chat_completion_tool_message_param.rbs | 15 +- .../chat_completion_user_message_param.rbs | 15 +- .../models/chat/completion_create_params.rbs | 129 +++----- .../models/chat/completion_delete_params.rbs | 7 +- .../models/chat/completion_list_params.rbs | 21 +- .../chat/completion_retrieve_params.rbs | 7 +- .../models/chat/completion_update_params.rbs | 13 +- .../chat/completions/message_list_params.rbs | 17 +- sig/openai/models/comparison_filter.rbs | 12 +- sig/openai/models/completion.rbs | 20 +- sig/openai/models/completion_choice.rbs | 30 +- .../models/completion_create_params.rbs | 44 ++- sig/openai/models/completion_usage.rbs | 43 +-- sig/openai/models/compound_filter.rbs | 10 +- .../models/create_embedding_response.rbs | 23 +- sig/openai/models/embedding.rbs | 8 +- sig/openai/models/embedding_create_params.rbs | 20 +- sig/openai/models/error_object.rbs | 9 +- sig/openai/models/file_content_params.rbs | 6 +- sig/openai/models/file_create_params.rbs | 12 +- sig/openai/models/file_delete_params.rbs | 4 +- sig/openai/models/file_deleted.rbs | 4 +- sig/openai/models/file_list_params.rbs | 16 +- sig/openai/models/file_object.rbs | 24 +- sig/openai/models/file_retrieve_params.rbs | 6 +- .../models/fine_tuning/fine_tuning_job.rbs | 142 +++----- .../fine_tuning/fine_tuning_job_event.rbs | 23 +- .../fine_tuning_job_wandb_integration.rbs | 17 +- ...ne_tuning_job_wandb_integration_object.rbs | 13 +- .../models/fine_tuning/job_cancel_params.rbs | 7 +- .../models/fine_tuning/job_create_params.rbs | 143 +++----- .../fine_tuning/job_list_events_params.rbs | 15 +- .../models/fine_tuning/job_list_params.rbs | 17 +- .../fine_tuning/job_retrieve_params.rbs | 7 +- .../jobs/checkpoint_list_params.rbs | 15 +- .../jobs/fine_tuning_job_checkpoint.rbs | 46 +-- sig/openai/models/function_definition.rbs | 16 +- sig/openai/models/image.rbs | 8 +- .../models/image_create_variation_params.rbs | 23 +- sig/openai/models/image_edit_params.rbs | 24 +- sig/openai/models/image_generate_params.rbs | 26 +- sig/openai/models/images_response.rbs | 7 +- sig/openai/models/model.rbs | 14 +- sig/openai/models/model_delete_params.rbs | 6 +- sig/openai/models/model_deleted.rbs | 4 +- sig/openai/models/model_list_params.rbs | 4 +- sig/openai/models/model_retrieve_params.rbs | 6 +- sig/openai/models/moderation.rbs | 118 +++---- .../models/moderation_create_params.rbs | 14 +- .../models/moderation_create_response.rbs | 14 +- .../models/moderation_image_url_input.rbs | 19 +- sig/openai/models/moderation_text_input.rbs | 6 +- .../other_file_chunking_strategy_object.rbs | 7 +- sig/openai/models/reasoning.rbs | 10 +- .../models/response_format_json_object.rbs | 6 +- .../models/response_format_json_schema.rbs | 29 +- sig/openai/models/response_format_text.rbs | 6 +- sig/openai/models/responses/computer_tool.rbs | 16 +- .../models/responses/easy_input_message.rbs | 15 +- .../models/responses/file_search_tool.rbs | 32 +- sig/openai/models/responses/function_tool.rbs | 18 +- .../responses/input_item_list_params.rbs | 19 +- sig/openai/models/responses/response.rbs | 63 ++-- .../responses/response_audio_delta_event.rbs | 7 +- .../responses/response_audio_done_event.rbs | 7 +- .../response_audio_transcript_delta_event.rbs | 10 +- .../response_audio_transcript_done_event.rbs | 7 +- ...code_interpreter_call_code_delta_event.rbs | 15 +- ..._code_interpreter_call_code_done_event.rbs | 15 +- ..._code_interpreter_call_completed_event.rbs | 15 +- ...ode_interpreter_call_in_progress_event.rbs | 15 +- ...de_interpreter_call_interpreting_event.rbs | 15 +- .../response_code_interpreter_tool_call.rbs | 46 +-- .../responses/response_completed_event.rbs | 13 +- .../responses/response_computer_tool_call.rbs | 130 +++----- ...esponse_computer_tool_call_output_item.rbs | 28 +- ...e_computer_tool_call_output_screenshot.rbs | 15 +- .../response_content_part_added_event.rbs | 19 +- .../response_content_part_done_event.rbs | 19 +- .../responses/response_create_params.rbs | 45 ++- .../responses/response_created_event.rbs | 13 +- .../responses/response_delete_params.rbs | 7 +- .../models/responses/response_error.rbs | 12 +- .../models/responses/response_error_event.rbs | 17 +- .../responses/response_failed_event.rbs | 13 +- ...ponse_file_search_call_completed_event.rbs | 15 +- ...nse_file_search_call_in_progress_event.rbs | 15 +- ...ponse_file_search_call_searching_event.rbs | 15 +- .../response_file_search_tool_call.rbs | 38 +-- ...esponse_format_text_json_schema_config.rbs | 19 +- ...se_function_call_arguments_delta_event.rbs | 17 +- ...nse_function_call_arguments_done_event.rbs | 17 +- .../responses/response_function_tool_call.rbs | 21 +- .../response_function_tool_call_item.rbs | 7 +- ...esponse_function_tool_call_output_item.rbs | 19 +- .../response_function_web_search.rbs | 15 +- .../responses/response_in_progress_event.rbs | 13 +- .../responses/response_incomplete_event.rbs | 13 +- .../models/responses/response_input_audio.rbs | 15 +- .../models/responses/response_input_file.rbs | 17 +- .../models/responses/response_input_image.rbs | 17 +- .../models/responses/response_input_item.rbs | 71 ++-- .../responses/response_input_message_item.rbs | 19 +- .../models/responses/response_input_text.rbs | 7 +- .../models/responses/response_item_list.rbs | 19 +- .../responses/response_output_audio.rbs | 11 +- .../response_output_item_added_event.rbs | 15 +- .../response_output_item_done_event.rbs | 15 +- .../responses/response_output_message.rbs | 19 +- .../responses/response_output_refusal.rbs | 7 +- .../models/responses/response_output_text.rbs | 56 ++-- .../responses/response_reasoning_item.rbs | 24 +- .../response_refusal_delta_event.rbs | 19 +- .../responses/response_refusal_done_event.rbs | 19 +- .../responses/response_retrieve_params.rbs | 13 +- .../response_text_annotation_delta_event.rbs | 62 ++-- .../models/responses/response_text_config.rbs | 11 +- .../responses/response_text_delta_event.rbs | 19 +- .../responses/response_text_done_event.rbs | 19 +- .../models/responses/response_usage.rbs | 32 +- ...sponse_web_search_call_completed_event.rbs | 15 +- ...onse_web_search_call_in_progress_event.rbs | 15 +- ...sponse_web_search_call_searching_event.rbs | 15 +- .../models/responses/tool_choice_function.rbs | 7 +- .../models/responses/tool_choice_types.rbs | 9 +- .../models/responses/web_search_tool.rbs | 33 +- .../models/static_file_chunking_strategy.rbs | 10 +- .../static_file_chunking_strategy_object.rbs | 13 +- ...ic_file_chunking_strategy_object_param.rbs | 13 +- sig/openai/models/upload.rbs | 24 +- sig/openai/models/upload_cancel_params.rbs | 6 +- sig/openai/models/upload_complete_params.rbs | 14 +- sig/openai/models/upload_create_params.rbs | 18 +- .../models/uploads/part_create_params.rbs | 10 +- sig/openai/models/uploads/upload_part.rbs | 16 +- sig/openai/models/vector_store.rbs | 52 ++- .../models/vector_store_create_params.rbs | 27 +- .../models/vector_store_delete_params.rbs | 6 +- sig/openai/models/vector_store_deleted.rbs | 10 +- .../models/vector_store_list_params.rbs | 18 +- .../models/vector_store_retrieve_params.rbs | 6 +- .../models/vector_store_search_params.rbs | 33 +- .../models/vector_store_search_response.rbs | 31 +- .../models/vector_store_update_params.rbs | 23 +- .../file_batch_cancel_params.rbs | 13 +- .../file_batch_create_params.rbs | 17 +- .../file_batch_list_files_params.rbs | 23 +- .../file_batch_retrieve_params.rbs | 13 +- .../vector_stores/file_content_params.rbs | 13 +- .../vector_stores/file_content_response.rbs | 7 +- .../vector_stores/file_create_params.rbs | 17 +- .../vector_stores/file_delete_params.rbs | 13 +- .../models/vector_stores/file_list_params.rbs | 21 +- .../vector_stores/file_retrieve_params.rbs | 13 +- .../vector_stores/file_update_params.rbs | 15 +- .../vector_stores/vector_store_file.rbs | 40 +-- .../vector_stores/vector_store_file_batch.rbs | 40 +-- .../vector_store_file_deleted.rbs | 15 +- sig/openai/resources/audio/speech.rbs | 21 +- sig/openai/resources/audio/transcriptions.rbs | 25 +- sig/openai/resources/audio/translations.rbs | 21 +- sig/openai/resources/batches.rbs | 58 ++-- sig/openai/resources/beta/assistants.rbs | 116 +++---- sig/openai/resources/beta/threads.rbs | 145 ++++---- .../resources/beta/threads/messages.rbs | 96 ++---- sig/openai/resources/beta/threads/runs.rbs | 204 +++++------- .../resources/beta/threads/runs/steps.rbs | 46 +-- sig/openai/resources/chat/completions.rbs | 207 +++++------- .../resources/chat/completions/messages.rbs | 20 +- sig/openai/resources/completions.rbs | 88 +++-- sig/openai/resources/embeddings.rbs | 20 +- sig/openai/resources/files.rbs | 68 ++-- sig/openai/resources/fine_tuning/jobs.rbs | 92 ++---- .../fine_tuning/jobs/checkpoints.rbs | 18 +- sig/openai/resources/images.rbs | 75 ++--- sig/openai/resources/models.rbs | 36 +- sig/openai/resources/moderations.rbs | 14 +- sig/openai/resources/responses.rbs | 117 +++---- .../resources/responses/input_items.rbs | 22 +- sig/openai/resources/uploads.rbs | 48 +-- sig/openai/resources/uploads/parts.rbs | 16 +- sig/openai/resources/vector_stores.rbs | 117 +++---- .../resources/vector_stores/file_batches.rbs | 78 ++--- sig/openai/resources/vector_stores/files.rbs | 110 +++---- 301 files changed, 3081 insertions(+), 5713 deletions(-) diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 2b81b56f..ae9debbf 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -31,19 +31,14 @@ module OpenAI def speed=: (Float) -> Float - def initialize: - ( - input: String, - model: OpenAI::Models::Audio::SpeechCreateParams::model, - voice: OpenAI::Models::Audio::SpeechCreateParams::voice, - ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, - ?speed: Float, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Audio::speech_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + input: String, + model: OpenAI::Models::Audio::SpeechCreateParams::model, + voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, + ?speed: Float, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Audio::speech_create_params diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 6f7b7c9f..0ea5f955 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -6,11 +6,7 @@ module OpenAI class Transcription < OpenAI::BaseModel attr_accessor text: String - def initialize: - (text: String) -> void - | ( - ?OpenAI::Models::Audio::transcription | OpenAI::BaseModel data - ) -> void + def initialize: (text: String) -> void def to_hash: -> OpenAI::Models::Audio::transcription end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 06a30723..16280f3e 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -45,21 +45,16 @@ module OpenAI ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] ) -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] - def initialize: - ( - file: IO | StringIO, - model: OpenAI::Models::Audio::TranscriptionCreateParams::model, - ?language: String, - ?prompt: String, - ?response_format: OpenAI::Models::audio_response_format, - ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Audio::transcription_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file: IO | StringIO, + model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?language: String, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Audio::transcription_create_params diff --git a/sig/openai/models/audio/transcription_segment.rbs b/sig/openai/models/audio/transcription_segment.rbs index a721f545..85af1669 100644 --- a/sig/openai/models/audio/transcription_segment.rbs +++ b/sig/openai/models/audio/transcription_segment.rbs @@ -36,23 +36,18 @@ module OpenAI attr_accessor tokens: ::Array[Integer] - def initialize: - ( - id: Integer, - avg_logprob: Float, - compression_ratio: Float, - end_: Float, - no_speech_prob: Float, - seek: Integer, - start: Float, - temperature: Float, - text: String, - tokens: ::Array[Integer] - ) -> void - | ( - ?OpenAI::Models::Audio::transcription_segment - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: Integer, + avg_logprob: Float, + compression_ratio: Float, + end_: Float, + no_speech_prob: Float, + seek: Integer, + start: Float, + temperature: Float, + text: String, + tokens: ::Array[Integer] + ) -> void def to_hash: -> OpenAI::Models::Audio::transcription_segment end diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 95ef5685..9546e704 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -29,18 +29,13 @@ module OpenAI ::Array[OpenAI::Models::Audio::TranscriptionWord] ) -> ::Array[OpenAI::Models::Audio::TranscriptionWord] - def initialize: - ( - duration: Float, - language: String, - text: String, - ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment], - ?words: ::Array[OpenAI::Models::Audio::TranscriptionWord] - ) -> void - | ( - ?OpenAI::Models::Audio::transcription_verbose - | OpenAI::BaseModel data - ) -> void + def initialize: ( + duration: Float, + language: String, + text: String, + ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment], + ?words: ::Array[OpenAI::Models::Audio::TranscriptionWord] + ) -> void def to_hash: -> OpenAI::Models::Audio::transcription_verbose end diff --git a/sig/openai/models/audio/transcription_word.rbs b/sig/openai/models/audio/transcription_word.rbs index 17a2ca32..5b9eee66 100644 --- a/sig/openai/models/audio/transcription_word.rbs +++ b/sig/openai/models/audio/transcription_word.rbs @@ -10,11 +10,7 @@ module OpenAI attr_accessor word: String - def initialize: - (end_: Float, start: Float, word: String) -> void - | ( - ?OpenAI::Models::Audio::transcription_word | OpenAI::BaseModel data - ) -> void + def initialize: (end_: Float, start: Float, word: String) -> void def to_hash: -> OpenAI::Models::Audio::transcription_word end diff --git a/sig/openai/models/audio/translation.rbs b/sig/openai/models/audio/translation.rbs index e16987a3..4a88fe3f 100644 --- a/sig/openai/models/audio/translation.rbs +++ b/sig/openai/models/audio/translation.rbs @@ -6,11 +6,7 @@ module OpenAI class Translation < OpenAI::BaseModel attr_accessor text: String - def initialize: - (text: String) -> void - | ( - ?OpenAI::Models::Audio::translation | OpenAI::BaseModel data - ) -> void + def initialize: (text: String) -> void def to_hash: -> OpenAI::Models::Audio::translation end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 3cabef67..252ed2dc 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -33,19 +33,14 @@ module OpenAI def temperature=: (Float) -> Float - def initialize: - ( - file: IO | StringIO, - model: OpenAI::Models::Audio::TranslationCreateParams::model, - ?prompt: String, - ?response_format: OpenAI::Models::audio_response_format, - ?temperature: Float, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Audio::translation_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file: IO | StringIO, + model: OpenAI::Models::Audio::TranslationCreateParams::model, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Audio::translation_create_params diff --git a/sig/openai/models/audio/translation_verbose.rbs b/sig/openai/models/audio/translation_verbose.rbs index 11c8b1ba..0a805f53 100644 --- a/sig/openai/models/audio/translation_verbose.rbs +++ b/sig/openai/models/audio/translation_verbose.rbs @@ -22,16 +22,12 @@ module OpenAI ::Array[OpenAI::Models::Audio::TranscriptionSegment] ) -> ::Array[OpenAI::Models::Audio::TranscriptionSegment] - def initialize: - ( - duration: Float, - language: String, - text: String, - ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] - ) -> void - | ( - ?OpenAI::Models::Audio::translation_verbose | OpenAI::BaseModel data - ) -> void + def initialize: ( + duration: Float, + language: String, + text: String, + ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] + ) -> void def to_hash: -> OpenAI::Models::Audio::translation_verbose end diff --git a/sig/openai/models/auto_file_chunking_strategy_param.rbs b/sig/openai/models/auto_file_chunking_strategy_param.rbs index c97d8d48..49630c41 100644 --- a/sig/openai/models/auto_file_chunking_strategy_param.rbs +++ b/sig/openai/models/auto_file_chunking_strategy_param.rbs @@ -5,12 +5,7 @@ module OpenAI class AutoFileChunkingStrategyParam < OpenAI::BaseModel attr_accessor type: :auto - def initialize: - (?type: :auto) -> void - | ( - ?OpenAI::Models::auto_file_chunking_strategy_param - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :auto) -> void def to_hash: -> OpenAI::Models::auto_file_chunking_strategy_param end diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index fc897fa2..278b30f0 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -93,30 +93,28 @@ module OpenAI OpenAI::Models::BatchRequestCounts ) -> OpenAI::Models::BatchRequestCounts - def initialize: - ( - id: String, - completion_window: String, - created_at: Integer, - endpoint: String, - input_file_id: String, - status: OpenAI::Models::Batch::status, - ?cancelled_at: Integer, - ?cancelling_at: Integer, - ?completed_at: Integer, - ?error_file_id: String, - ?errors: OpenAI::Models::Batch::Errors, - ?expired_at: Integer, - ?expires_at: Integer, - ?failed_at: Integer, - ?finalizing_at: Integer, - ?in_progress_at: Integer, - ?metadata: OpenAI::Models::metadata?, - ?output_file_id: String, - ?request_counts: OpenAI::Models::BatchRequestCounts, - ?object: :batch - ) -> void - | (?OpenAI::Models::batch | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + completion_window: String, + created_at: Integer, + endpoint: String, + input_file_id: String, + status: OpenAI::Models::Batch::status, + ?cancelled_at: Integer, + ?cancelling_at: Integer, + ?completed_at: Integer, + ?error_file_id: String, + ?errors: OpenAI::Models::Batch::Errors, + ?expired_at: Integer, + ?expires_at: Integer, + ?failed_at: Integer, + ?finalizing_at: Integer, + ?in_progress_at: Integer, + ?metadata: OpenAI::Models::metadata?, + ?output_file_id: String, + ?request_counts: OpenAI::Models::BatchRequestCounts, + ?object: :batch + ) -> void def to_hash: -> OpenAI::Models::batch @@ -157,9 +155,10 @@ module OpenAI def object=: (String) -> String - def initialize: - (?data: ::Array[OpenAI::Models::BatchError], ?object: String) -> void - | (?OpenAI::Models::Batch::errors | OpenAI::BaseModel data) -> void + def initialize: ( + ?data: ::Array[OpenAI::Models::BatchError], + ?object: String + ) -> void def to_hash: -> OpenAI::Models::Batch::errors end diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index cff10508..89722862 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::batch_cancel_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::batch_cancel_params end diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 6706d5f8..5a459418 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -21,17 +21,13 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - completion_window: OpenAI::Models::BatchCreateParams::completion_window, - endpoint: OpenAI::Models::BatchCreateParams::endpoint, - input_file_id: String, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::batch_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + completion_window: OpenAI::Models::BatchCreateParams::completion_window, + endpoint: OpenAI::Models::BatchCreateParams::endpoint, + input_file_id: String, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::batch_create_params diff --git a/sig/openai/models/batch_error.rbs b/sig/openai/models/batch_error.rbs index 6818a5f8..b2aadd8e 100644 --- a/sig/openai/models/batch_error.rbs +++ b/sig/openai/models/batch_error.rbs @@ -16,14 +16,12 @@ module OpenAI attr_accessor param: String? - def initialize: - ( - ?code: String, - ?line: Integer?, - ?message: String, - ?param: String? - ) -> void - | (?OpenAI::Models::batch_error | OpenAI::BaseModel data) -> void + def initialize: ( + ?code: String, + ?line: Integer?, + ?message: String, + ?param: String? + ) -> void def to_hash: -> OpenAI::Models::batch_error end diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index f82c752c..16b65628 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -15,13 +15,11 @@ module OpenAI def limit=: (Integer) -> Integer - def initialize: - ( - ?after: String, - ?limit: Integer, - ?request_options: OpenAI::request_opts - ) -> void - | (?OpenAI::Models::batch_list_params | OpenAI::BaseModel data) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::batch_list_params end diff --git a/sig/openai/models/batch_request_counts.rbs b/sig/openai/models/batch_request_counts.rbs index d2a56b3b..0020eb38 100644 --- a/sig/openai/models/batch_request_counts.rbs +++ b/sig/openai/models/batch_request_counts.rbs @@ -10,11 +10,11 @@ module OpenAI attr_accessor total: Integer - def initialize: - (completed: Integer, failed: Integer, total: Integer) -> void - | ( - ?OpenAI::Models::batch_request_counts | OpenAI::BaseModel data - ) -> void + def initialize: ( + completed: Integer, + failed: Integer, + total: Integer + ) -> void def to_hash: -> OpenAI::Models::batch_request_counts end diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index 684b6d6e..a7d75dad 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::batch_retrieve_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::batch_retrieve_params end diff --git a/sig/openai/models/beta/assistant.rbs b/sig/openai/models/beta/assistant.rbs index 498d0353..91f1b5ee 100644 --- a/sig/openai/models/beta/assistant.rbs +++ b/sig/openai/models/beta/assistant.rbs @@ -45,23 +45,21 @@ module OpenAI attr_accessor top_p: Float? - def initialize: - ( - id: String, - created_at: Integer, - description: String?, - instructions: String?, - metadata: OpenAI::Models::metadata?, - model: String, - name: String?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool], - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::Assistant::ToolResources?, - ?top_p: Float?, - ?object: :assistant - ) -> void - | (?OpenAI::Models::Beta::assistant | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + created_at: Integer, + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: String, + name: String?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::Assistant::ToolResources?, + ?top_p: Float?, + ?object: :assistant + ) -> void def to_hash: -> OpenAI::Models::Beta::assistant @@ -84,15 +82,10 @@ module OpenAI OpenAI::Models::Beta::Assistant::ToolResources::FileSearch ) -> OpenAI::Models::Beta::Assistant::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::Assistant::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::Assistant::tool_resources @@ -103,12 +96,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::Assistant::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::Assistant::ToolResources::code_interpreter end @@ -120,12 +108,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?vector_store_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::Assistant::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?vector_store_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::Assistant::ToolResources::file_search end diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index ffa5e021..d4a0708e 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -47,25 +47,20 @@ module OpenAI attr_accessor top_p: Float? - def initialize: - ( - model: OpenAI::Models::Beta::AssistantCreateParams::model, - ?description: String?, - ?instructions: String?, - ?metadata: OpenAI::Models::metadata?, - ?name: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], - ?top_p: Float?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::assistant_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + model: OpenAI::Models::Beta::AssistantCreateParams::model, + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::assistant_create_params @@ -94,15 +89,10 @@ module OpenAI OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch ) -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::tool_resources @@ -113,12 +103,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::code_interpreter end @@ -140,15 +125,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] ) -> ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] - def initialize: - ( - ?vector_store_ids: ::Array[String], - ?vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?vector_store_ids: ::Array[String], + ?vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::file_search @@ -172,16 +152,11 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - ?chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, - ?file_ids: ::Array[String], - ?metadata: OpenAI::Models::metadata? - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::vector_store - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata? + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::vector_store @@ -195,12 +170,7 @@ module OpenAI class Auto < OpenAI::BaseModel attr_accessor type: :auto - def initialize: - (?type: :auto) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :auto) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto end @@ -216,15 +186,10 @@ module OpenAI attr_accessor type: :static - def initialize: - ( - static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - ?type: :static - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static - | OpenAI::BaseModel data - ) -> void + def initialize: ( + static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + ?type: :static + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static @@ -239,15 +204,10 @@ module OpenAI attr_accessor max_chunk_size_tokens: Integer - def initialize: - ( - chunk_overlap_tokens: Integer, - max_chunk_size_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static - | OpenAI::BaseModel data - ) -> void + def initialize: ( + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static end diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index cddab789..391db3a6 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::assistant_delete_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Beta::assistant_delete_params end diff --git a/sig/openai/models/beta/assistant_deleted.rbs b/sig/openai/models/beta/assistant_deleted.rbs index 6b58f13d..39b34057 100644 --- a/sig/openai/models/beta/assistant_deleted.rbs +++ b/sig/openai/models/beta/assistant_deleted.rbs @@ -11,11 +11,11 @@ module OpenAI attr_accessor object: :"assistant.deleted" - def initialize: - (id: String, deleted: bool, ?object: :"assistant.deleted") -> void - | ( - ?OpenAI::Models::Beta::assistant_deleted | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + deleted: bool, + ?object: :"assistant.deleted" + ) -> void def to_hash: -> OpenAI::Models::Beta::assistant_deleted end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 8d300b0b..89fbdc93 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -32,18 +32,13 @@ module OpenAI OpenAI::Models::Beta::AssistantListParams::order ) -> OpenAI::Models::Beta::AssistantListParams::order - def initialize: - ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Beta::AssistantListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::assistant_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::AssistantListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::assistant_list_params diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index 16e5a79b..bb0b45bb 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::assistant_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Beta::assistant_retrieve_params end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 09fd2e28..3cff7d58 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -44,16 +44,11 @@ module OpenAI def enabled=: (bool) -> bool - def initialize: - ( - data: OpenAI::Models::Beta::Thread, - ?enabled: bool, - ?event: :"thread.created" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Thread, + ?enabled: bool, + ?event: :"thread.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_created end @@ -69,15 +64,10 @@ module OpenAI attr_accessor event: :"thread.run.created" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.created" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_created end @@ -93,15 +83,10 @@ module OpenAI attr_accessor event: :"thread.run.queued" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.queued" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_queued - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.queued" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_queued end @@ -117,15 +102,10 @@ module OpenAI attr_accessor event: :"thread.run.in_progress" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.in_progress" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_in_progress - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_in_progress end @@ -141,15 +121,10 @@ module OpenAI attr_accessor event: :"thread.run.requires_action" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.requires_action" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_requires_action - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.requires_action" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_requires_action end @@ -165,15 +140,10 @@ module OpenAI attr_accessor event: :"thread.run.completed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.completed" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_completed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.completed" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_completed end @@ -189,15 +159,10 @@ module OpenAI attr_accessor event: :"thread.run.incomplete" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.incomplete" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_incomplete - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.incomplete" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_incomplete end @@ -213,15 +178,10 @@ module OpenAI attr_accessor event: :"thread.run.failed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.failed" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_failed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.failed" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_failed end @@ -237,15 +197,10 @@ module OpenAI attr_accessor event: :"thread.run.cancelling" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.cancelling" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelling - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.cancelling" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelling end @@ -261,15 +216,10 @@ module OpenAI attr_accessor event: :"thread.run.cancelled" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.cancelled" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelled - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.cancelled" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelled end @@ -285,15 +235,10 @@ module OpenAI attr_accessor event: :"thread.run.expired" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.expired" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_expired - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.expired" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_expired end @@ -309,15 +254,10 @@ module OpenAI attr_accessor event: :"thread.run.step.created" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.created" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_created end @@ -333,15 +273,10 @@ module OpenAI attr_accessor event: :"thread.run.step.in_progress" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.in_progress" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_in_progress - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_in_progress end @@ -357,15 +292,10 @@ module OpenAI attr_accessor event: :"thread.run.step.delta" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, - ?event: :"thread.run.step.delta" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, + ?event: :"thread.run.step.delta" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_delta end @@ -381,15 +311,10 @@ module OpenAI attr_accessor event: :"thread.run.step.completed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.completed" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_completed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.completed" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_completed end @@ -405,15 +330,10 @@ module OpenAI attr_accessor event: :"thread.run.step.failed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.failed" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_failed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.failed" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_failed end @@ -429,15 +349,10 @@ module OpenAI attr_accessor event: :"thread.run.step.cancelled" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.cancelled" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_cancelled - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.cancelled" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_cancelled end @@ -453,15 +368,10 @@ module OpenAI attr_accessor event: :"thread.run.step.expired" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.expired" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_expired - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.expired" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_expired end @@ -477,15 +387,10 @@ module OpenAI attr_accessor event: :"thread.message.created" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.created" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_created end @@ -501,15 +406,10 @@ module OpenAI attr_accessor event: :"thread.message.in_progress" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.in_progress" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_in_progress - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_in_progress end @@ -525,15 +425,10 @@ module OpenAI attr_accessor event: :"thread.message.delta" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, - ?event: :"thread.message.delta" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, + ?event: :"thread.message.delta" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_delta end @@ -549,15 +444,10 @@ module OpenAI attr_accessor event: :"thread.message.completed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.completed" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_completed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.completed" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_completed end @@ -573,15 +463,10 @@ module OpenAI attr_accessor event: :"thread.message.incomplete" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.incomplete" - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::thread_message_incomplete - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.incomplete" + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_incomplete end @@ -593,12 +478,10 @@ module OpenAI attr_accessor event: :error - def initialize: - (data: OpenAI::Models::ErrorObject, ?event: :error) -> void - | ( - ?OpenAI::Models::Beta::AssistantStreamEvent::error_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::ErrorObject, + ?event: :error + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::error_event end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index caa0e8dc..eabceb53 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -16,15 +16,10 @@ module OpenAI OpenAI::Models::Beta::AssistantToolChoiceFunction ) -> OpenAI::Models::Beta::AssistantToolChoiceFunction - def initialize: - ( - type: OpenAI::Models::Beta::AssistantToolChoice::type_, - ?function: OpenAI::Models::Beta::AssistantToolChoiceFunction - ) -> void - | ( - ?OpenAI::Models::Beta::assistant_tool_choice - | OpenAI::BaseModel data - ) -> void + def initialize: ( + type: OpenAI::Models::Beta::AssistantToolChoice::type_, + ?function: OpenAI::Models::Beta::AssistantToolChoiceFunction + ) -> void def to_hash: -> OpenAI::Models::Beta::assistant_tool_choice diff --git a/sig/openai/models/beta/assistant_tool_choice_function.rbs b/sig/openai/models/beta/assistant_tool_choice_function.rbs index f10e8dd0..ab112b3d 100644 --- a/sig/openai/models/beta/assistant_tool_choice_function.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_function.rbs @@ -6,12 +6,7 @@ module OpenAI class AssistantToolChoiceFunction < OpenAI::BaseModel attr_accessor name: String - def initialize: - (name: String) -> void - | ( - ?OpenAI::Models::Beta::assistant_tool_choice_function - | OpenAI::BaseModel data - ) -> void + def initialize: (name: String) -> void def to_hash: -> OpenAI::Models::Beta::assistant_tool_choice_function end diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index c0704e99..e67f47e5 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -51,25 +51,20 @@ module OpenAI attr_accessor top_p: Float? - def initialize: - ( - ?description: String?, - ?instructions: String?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::AssistantUpdateParams::model, - ?name: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], - ?top_p: Float?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::assistant_update_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::AssistantUpdateParams::model, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::assistant_update_params @@ -189,15 +184,10 @@ module OpenAI OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch ) -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::AssistantUpdateParams::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::AssistantUpdateParams::tool_resources @@ -208,12 +198,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::code_interpreter end @@ -225,12 +210,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?vector_store_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?vector_store_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::file_search end diff --git a/sig/openai/models/beta/code_interpreter_tool.rbs b/sig/openai/models/beta/code_interpreter_tool.rbs index af14840c..3855b043 100644 --- a/sig/openai/models/beta/code_interpreter_tool.rbs +++ b/sig/openai/models/beta/code_interpreter_tool.rbs @@ -6,12 +6,7 @@ module OpenAI class CodeInterpreterTool < OpenAI::BaseModel attr_accessor type: :code_interpreter - def initialize: - (?type: :code_interpreter) -> void - | ( - ?OpenAI::Models::Beta::code_interpreter_tool - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :code_interpreter) -> void def to_hash: -> OpenAI::Models::Beta::code_interpreter_tool end diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index 05089b00..b042a2f7 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -16,14 +16,10 @@ module OpenAI OpenAI::Models::Beta::FileSearchTool::FileSearch ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch - def initialize: - ( - ?file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch, - ?type: :file_search - ) -> void - | ( - ?OpenAI::Models::Beta::file_search_tool | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch, + ?type: :file_search + ) -> void def to_hash: -> OpenAI::Models::Beta::file_search_tool @@ -44,15 +40,10 @@ module OpenAI OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions - def initialize: - ( - ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions - ) -> void - | ( - ?OpenAI::Models::Beta::FileSearchTool::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions + ) -> void def to_hash: -> OpenAI::Models::Beta::FileSearchTool::file_search @@ -71,15 +62,10 @@ module OpenAI OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker - def initialize: - ( - score_threshold: Float, - ?ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker - ) -> void - | ( - ?OpenAI::Models::Beta::FileSearchTool::FileSearch::ranking_options - | OpenAI::BaseModel data - ) -> void + def initialize: ( + score_threshold: Float, + ?ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ) -> void def to_hash: -> OpenAI::Models::Beta::FileSearchTool::FileSearch::ranking_options diff --git a/sig/openai/models/beta/function_tool.rbs b/sig/openai/models/beta/function_tool.rbs index e8fb7686..72724066 100644 --- a/sig/openai/models/beta/function_tool.rbs +++ b/sig/openai/models/beta/function_tool.rbs @@ -9,14 +9,10 @@ module OpenAI attr_accessor type: :function - def initialize: - ( - function: OpenAI::Models::FunctionDefinition, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Beta::function_tool | OpenAI::BaseModel data - ) -> void + def initialize: ( + function: OpenAI::Models::FunctionDefinition, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Beta::function_tool end diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index 7b782bca..ad12cf61 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -20,15 +20,10 @@ module OpenAI attr_accessor event: :"thread.message.created" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.created" - ) -> void - | ( - ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_created end @@ -44,15 +39,10 @@ module OpenAI attr_accessor event: :"thread.message.in_progress" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.in_progress" - ) -> void - | ( - ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_in_progress - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_in_progress end @@ -68,15 +58,10 @@ module OpenAI attr_accessor event: :"thread.message.delta" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, - ?event: :"thread.message.delta" - ) -> void - | ( - ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, + ?event: :"thread.message.delta" + ) -> void def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_delta end @@ -92,15 +77,10 @@ module OpenAI attr_accessor event: :"thread.message.completed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.completed" - ) -> void - | ( - ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_completed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.completed" + ) -> void def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_completed end @@ -116,15 +96,10 @@ module OpenAI attr_accessor event: :"thread.message.incomplete" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Message, - ?event: :"thread.message.incomplete" - ) -> void - | ( - ?OpenAI::Models::Beta::MessageStreamEvent::thread_message_incomplete - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Message, + ?event: :"thread.message.incomplete" + ) -> void def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_incomplete end diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 59c2999e..51f78cb2 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -22,15 +22,10 @@ module OpenAI attr_accessor event: :"thread.run.step.created" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.created" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_created end @@ -46,15 +41,10 @@ module OpenAI attr_accessor event: :"thread.run.step.in_progress" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.in_progress" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_in_progress - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_in_progress end @@ -70,15 +60,10 @@ module OpenAI attr_accessor event: :"thread.run.step.delta" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, - ?event: :"thread.run.step.delta" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, + ?event: :"thread.run.step.delta" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_delta end @@ -94,15 +79,10 @@ module OpenAI attr_accessor event: :"thread.run.step.completed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.completed" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_completed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.completed" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_completed end @@ -118,15 +98,10 @@ module OpenAI attr_accessor event: :"thread.run.step.failed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.failed" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_failed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.failed" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_failed end @@ -142,15 +117,10 @@ module OpenAI attr_accessor event: :"thread.run.step.cancelled" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.cancelled" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_cancelled - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.cancelled" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_cancelled end @@ -166,15 +136,10 @@ module OpenAI attr_accessor event: :"thread.run.step.expired" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, - ?event: :"thread.run.step.expired" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_expired - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Runs::RunStep, + ?event: :"thread.run.step.expired" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_expired end diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 38c2a746..46793589 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -25,15 +25,10 @@ module OpenAI attr_accessor event: :"thread.run.created" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.created" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_created - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_created end @@ -49,15 +44,10 @@ module OpenAI attr_accessor event: :"thread.run.queued" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.queued" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_queued - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.queued" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_queued end @@ -73,15 +63,10 @@ module OpenAI attr_accessor event: :"thread.run.in_progress" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.in_progress" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_in_progress - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_in_progress end @@ -97,15 +82,10 @@ module OpenAI attr_accessor event: :"thread.run.requires_action" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.requires_action" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_requires_action - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.requires_action" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_requires_action end @@ -121,15 +101,10 @@ module OpenAI attr_accessor event: :"thread.run.completed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.completed" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_completed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.completed" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_completed end @@ -145,15 +120,10 @@ module OpenAI attr_accessor event: :"thread.run.incomplete" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.incomplete" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_incomplete - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.incomplete" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_incomplete end @@ -169,15 +139,10 @@ module OpenAI attr_accessor event: :"thread.run.failed" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.failed" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_failed - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.failed" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_failed end @@ -193,15 +158,10 @@ module OpenAI attr_accessor event: :"thread.run.cancelling" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.cancelling" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelling - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.cancelling" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelling end @@ -217,15 +177,10 @@ module OpenAI attr_accessor event: :"thread.run.cancelled" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.cancelled" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelled - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.cancelled" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelled end @@ -241,15 +196,10 @@ module OpenAI attr_accessor event: :"thread.run.expired" - def initialize: - ( - data: OpenAI::Models::Beta::Threads::Run, - ?event: :"thread.run.expired" - ) -> void - | ( - ?OpenAI::Models::Beta::RunStreamEvent::thread_run_expired - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Threads::Run, + ?event: :"thread.run.expired" + ) -> void def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_expired end diff --git a/sig/openai/models/beta/thread.rbs b/sig/openai/models/beta/thread.rbs index 0188f0dc..1fdf76c8 100644 --- a/sig/openai/models/beta/thread.rbs +++ b/sig/openai/models/beta/thread.rbs @@ -21,15 +21,13 @@ module OpenAI attr_accessor tool_resources: OpenAI::Models::Beta::Thread::ToolResources? - def initialize: - ( - id: String, - created_at: Integer, - metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::Thread::ToolResources?, - ?object: :thread - ) -> void - | (?OpenAI::Models::Beta::thread | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + created_at: Integer, + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Models::Beta::Thread::ToolResources?, + ?object: :thread + ) -> void def to_hash: -> OpenAI::Models::Beta::thread @@ -52,15 +50,10 @@ module OpenAI OpenAI::Models::Beta::Thread::ToolResources::FileSearch ) -> OpenAI::Models::Beta::Thread::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::Thread::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::Thread::tool_resources @@ -71,12 +64,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::Thread::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::Thread::ToolResources::code_interpreter end @@ -88,12 +76,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?vector_store_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::Thread::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?vector_store_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::Thread::ToolResources::file_search end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index d430d1fa..a7b4922f 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -61,29 +61,24 @@ module OpenAI attr_accessor truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy? - def initialize: - ( - assistant_id: String, - ?instructions: String?, - ?max_completion_tokens: Integer?, - ?max_prompt_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, - ?parallel_tool_calls: bool, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, - ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::thread_create_and_run_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + assistant_id: String, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + ?parallel_tool_calls: bool, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::thread_create_and_run_params @@ -111,16 +106,11 @@ module OpenAI attr_accessor tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? - def initialize: - ( - ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], - ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::thread - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::thread @@ -141,17 +131,12 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, - ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, - ?metadata: OpenAI::Models::metadata? - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, + ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, + ?metadata: OpenAI::Models::metadata? + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::message @@ -194,15 +179,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] - def initialize: - ( - ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::attachment - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::attachment @@ -216,12 +196,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel attr_accessor type: :file_search - def initialize: - (?type: :file_search) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :file_search) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::file_search end @@ -250,15 +225,10 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::tool_resources @@ -269,12 +239,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::code_interpreter end @@ -296,15 +261,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] - def initialize: - ( - ?vector_store_ids: ::Array[String], - ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?vector_store_ids: ::Array[String], + ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::file_search @@ -328,16 +288,11 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, - ?file_ids: ::Array[String], - ?metadata: OpenAI::Models::metadata? - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::vector_store - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata? + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::vector_store @@ -351,12 +306,7 @@ module OpenAI class Auto < OpenAI::BaseModel attr_accessor type: :auto - def initialize: - (?type: :auto) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :auto) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto end @@ -372,15 +322,10 @@ module OpenAI attr_accessor type: :static - def initialize: - ( - static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - ?type: :static - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static - | OpenAI::BaseModel data - ) -> void + def initialize: ( + static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + ?type: :static + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static @@ -395,15 +340,10 @@ module OpenAI attr_accessor max_chunk_size_tokens: Integer - def initialize: - ( - chunk_overlap_tokens: Integer, - max_chunk_size_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static - | OpenAI::BaseModel data - ) -> void + def initialize: ( + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static end @@ -435,15 +375,10 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::tool_resources @@ -454,12 +389,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::code_interpreter end @@ -471,12 +401,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?vector_store_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?vector_store_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::file_search end @@ -502,15 +427,10 @@ module OpenAI attr_accessor last_messages: Integer? - def initialize: - ( - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, - ?last_messages: Integer? - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateAndRunParams::truncation_strategy - | OpenAI::BaseModel data - ) -> void + def initialize: ( + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::truncation_strategy diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index c4cf1e6a..5acf3300 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -23,16 +23,12 @@ module OpenAI attr_accessor tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources? - def initialize: - ( - ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::thread_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::thread_create_params @@ -53,17 +49,12 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, - ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]?, - ?metadata: OpenAI::Models::metadata? - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, + ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]?, + ?metadata: OpenAI::Models::metadata? + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::message @@ -106,15 +97,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] - def initialize: - ( - ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::Message::attachment - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::Message::attachment @@ -128,12 +114,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel attr_accessor type: :file_search - def initialize: - (?type: :file_search) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :file_search) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::file_search end @@ -162,15 +143,10 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch ) -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::tool_resources @@ -181,12 +157,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::code_interpreter end @@ -208,15 +179,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] - def initialize: - ( - ?vector_store_ids: ::Array[String], - ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?vector_store_ids: ::Array[String], + ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::file_search @@ -240,16 +206,11 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, - ?file_ids: ::Array[String], - ?metadata: OpenAI::Models::metadata? - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::vector_store - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata? + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::vector_store @@ -263,12 +224,7 @@ module OpenAI class Auto < OpenAI::BaseModel attr_accessor type: :auto - def initialize: - (?type: :auto) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :auto) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto end @@ -284,15 +240,10 @@ module OpenAI attr_accessor type: :static - def initialize: - ( - static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - ?type: :static - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static - | OpenAI::BaseModel data - ) -> void + def initialize: ( + static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + ?type: :static + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static @@ -307,15 +258,10 @@ module OpenAI attr_accessor max_chunk_size_tokens: Integer - def initialize: - ( - chunk_overlap_tokens: Integer, - max_chunk_size_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static - | OpenAI::BaseModel data - ) -> void + def initialize: ( + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static end diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index 0ae7f5b6..c6749d4e 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -7,11 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::thread_delete_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Beta::thread_delete_params end diff --git a/sig/openai/models/beta/thread_deleted.rbs b/sig/openai/models/beta/thread_deleted.rbs index 6e32556e..c3b1ff2b 100644 --- a/sig/openai/models/beta/thread_deleted.rbs +++ b/sig/openai/models/beta/thread_deleted.rbs @@ -11,11 +11,11 @@ module OpenAI attr_accessor object: :"thread.deleted" - def initialize: - (id: String, deleted: bool, ?object: :"thread.deleted") -> void - | ( - ?OpenAI::Models::Beta::thread_deleted | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + deleted: bool, + ?object: :"thread.deleted" + ) -> void def to_hash: -> OpenAI::Models::Beta::thread_deleted end diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index 3c42faa5..ef480309 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::thread_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Beta::thread_retrieve_params end diff --git a/sig/openai/models/beta/thread_stream_event.rbs b/sig/openai/models/beta/thread_stream_event.rbs index 6c3a4dc3..672ef1d6 100644 --- a/sig/openai/models/beta/thread_stream_event.rbs +++ b/sig/openai/models/beta/thread_stream_event.rbs @@ -17,15 +17,11 @@ module OpenAI def enabled=: (bool) -> bool - def initialize: - ( - data: OpenAI::Models::Beta::Thread, - ?enabled: bool, - ?event: :"thread.created" - ) -> void - | ( - ?OpenAI::Models::Beta::thread_stream_event | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: OpenAI::Models::Beta::Thread, + ?enabled: bool, + ?event: :"thread.created" + ) -> void def to_hash: -> OpenAI::Models::Beta::thread_stream_event end diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index adfa8b00..d5e29904 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -16,15 +16,11 @@ module OpenAI attr_accessor tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources? - def initialize: - ( - ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::thread_update_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::thread_update_params @@ -47,15 +43,10 @@ module OpenAI OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch ) -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch - def initialize: - ( - ?code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch - ) -> void - | ( - ?OpenAI::Models::Beta::ThreadUpdateParams::tool_resources - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch + ) -> void def to_hash: -> OpenAI::Models::Beta::ThreadUpdateParams::tool_resources @@ -66,12 +57,7 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?file_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::code_interpreter end @@ -83,12 +69,7 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - def initialize: - (?vector_store_ids: ::Array[String]) -> void - | ( - ?OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?vector_store_ids: ::Array[String]) -> void def to_hash: -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::file_search end diff --git a/sig/openai/models/beta/threads/file_citation_annotation.rbs b/sig/openai/models/beta/threads/file_citation_annotation.rbs index 3f972d87..da859605 100644 --- a/sig/openai/models/beta/threads/file_citation_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_annotation.rbs @@ -22,18 +22,13 @@ module OpenAI attr_accessor type: :file_citation - def initialize: - ( - end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, - start_index: Integer, - text: String, - ?type: :file_citation - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::file_citation_annotation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + end_index: Integer, + file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, + start_index: Integer, + text: String, + ?type: :file_citation + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::file_citation_annotation @@ -42,12 +37,7 @@ module OpenAI class FileCitation < OpenAI::BaseModel attr_accessor file_id: String - def initialize: - (file_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::FileCitationAnnotation::file_citation - | OpenAI::BaseModel data - ) -> void + def initialize: (file_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::FileCitationAnnotation::file_citation end diff --git a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs index 72eff280..09994a1b 100644 --- a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs @@ -35,19 +35,14 @@ module OpenAI def text=: (String) -> String - def initialize: - ( - index: Integer, - ?end_index: Integer, - ?file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, - ?start_index: Integer, - ?text: String, - ?type: :file_citation - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::file_citation_delta_annotation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?end_index: Integer, + ?file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + ?start_index: Integer, + ?text: String, + ?type: :file_citation + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::file_citation_delta_annotation @@ -62,12 +57,7 @@ module OpenAI def quote=: (String) -> String - def initialize: - (?file_id: String, ?quote: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::file_citation - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_id: String, ?quote: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::file_citation end diff --git a/sig/openai/models/beta/threads/file_path_annotation.rbs b/sig/openai/models/beta/threads/file_path_annotation.rbs index c4031231..d3e7acc5 100644 --- a/sig/openai/models/beta/threads/file_path_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_annotation.rbs @@ -22,18 +22,13 @@ module OpenAI attr_accessor type: :file_path - def initialize: - ( - end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, - start_index: Integer, - text: String, - ?type: :file_path - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::file_path_annotation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + end_index: Integer, + file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, + start_index: Integer, + text: String, + ?type: :file_path + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::file_path_annotation @@ -42,12 +37,7 @@ module OpenAI class FilePath < OpenAI::BaseModel attr_accessor file_id: String - def initialize: - (file_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::FilePathAnnotation::file_path - | OpenAI::BaseModel data - ) -> void + def initialize: (file_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::FilePathAnnotation::file_path end diff --git a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs index 59a7e537..6c8e2380 100644 --- a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs @@ -35,19 +35,14 @@ module OpenAI def text=: (String) -> String - def initialize: - ( - index: Integer, - ?end_index: Integer, - ?file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, - ?start_index: Integer, - ?text: String, - ?type: :file_path - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::file_path_delta_annotation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?end_index: Integer, + ?file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, + ?start_index: Integer, + ?text: String, + ?type: :file_path + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::file_path_delta_annotation @@ -58,12 +53,7 @@ module OpenAI def file_id=: (String) -> String - def initialize: - (?file_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::file_path - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::file_path end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index d2f16077..ce6ca5d5 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -17,15 +17,10 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageFile::detail ) -> OpenAI::Models::Beta::Threads::ImageFile::detail - def initialize: - ( - file_id: String, - ?detail: OpenAI::Models::Beta::Threads::ImageFile::detail - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_file - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + ?detail: OpenAI::Models::Beta::Threads::ImageFile::detail + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_file diff --git a/sig/openai/models/beta/threads/image_file_content_block.rbs b/sig/openai/models/beta/threads/image_file_content_block.rbs index 1783edd8..7f0e99ee 100644 --- a/sig/openai/models/beta/threads/image_file_content_block.rbs +++ b/sig/openai/models/beta/threads/image_file_content_block.rbs @@ -13,15 +13,10 @@ module OpenAI attr_accessor type: :image_file - def initialize: - ( - image_file: OpenAI::Models::Beta::Threads::ImageFile, - ?type: :image_file - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_file_content_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + image_file: OpenAI::Models::Beta::Threads::ImageFile, + ?type: :image_file + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_file_content_block end diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index 2a16093f..3b996b60 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -19,15 +19,10 @@ module OpenAI def file_id=: (String) -> String - def initialize: - ( - ?detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, - ?file_id: String - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_file_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, + ?file_id: String + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_file_delta diff --git a/sig/openai/models/beta/threads/image_file_delta_block.rbs b/sig/openai/models/beta/threads/image_file_delta_block.rbs index e48b002e..e7e85e2b 100644 --- a/sig/openai/models/beta/threads/image_file_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_file_delta_block.rbs @@ -20,16 +20,11 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageFileDelta ) -> OpenAI::Models::Beta::Threads::ImageFileDelta - def initialize: - ( - index: Integer, - ?image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, - ?type: :image_file - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_file_delta_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, + ?type: :image_file + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_file_delta_block end diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 4b135a8a..9aec42d8 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -17,14 +17,10 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURL::detail ) -> OpenAI::Models::Beta::Threads::ImageURL::detail - def initialize: - ( - url: String, - ?detail: OpenAI::Models::Beta::Threads::ImageURL::detail - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_url | OpenAI::BaseModel data - ) -> void + def initialize: ( + url: String, + ?detail: OpenAI::Models::Beta::Threads::ImageURL::detail + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_url diff --git a/sig/openai/models/beta/threads/image_url_content_block.rbs b/sig/openai/models/beta/threads/image_url_content_block.rbs index fda56960..bd4eb8a7 100644 --- a/sig/openai/models/beta/threads/image_url_content_block.rbs +++ b/sig/openai/models/beta/threads/image_url_content_block.rbs @@ -13,15 +13,10 @@ module OpenAI attr_accessor type: :image_url - def initialize: - ( - image_url: OpenAI::Models::Beta::Threads::ImageURL, - ?type: :image_url - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_url_content_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + image_url: OpenAI::Models::Beta::Threads::ImageURL, + ?type: :image_url + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_url_content_block end diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 816b5421..c7d07d96 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -19,15 +19,10 @@ module OpenAI def url=: (String) -> String - def initialize: - ( - ?detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, - ?url: String - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_url_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + ?url: String + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_url_delta diff --git a/sig/openai/models/beta/threads/image_url_delta_block.rbs b/sig/openai/models/beta/threads/image_url_delta_block.rbs index 4e83b0df..7e548334 100644 --- a/sig/openai/models/beta/threads/image_url_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_url_delta_block.rbs @@ -20,16 +20,11 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDelta ) -> OpenAI::Models::Beta::Threads::ImageURLDelta - def initialize: - ( - index: Integer, - ?image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, - ?type: :image_url - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::image_url_delta_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, + ?type: :image_url + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::image_url_delta_block end diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 18c52a5c..e6c155a8 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -49,26 +49,22 @@ module OpenAI attr_accessor thread_id: String - def initialize: - ( - id: String, - assistant_id: String?, - attachments: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment]?, - completed_at: Integer?, - content: ::Array[OpenAI::Models::Beta::Threads::message_content], - created_at: Integer, - incomplete_at: Integer?, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails?, - metadata: OpenAI::Models::metadata?, - role: OpenAI::Models::Beta::Threads::Message::role, - run_id: String?, - status: OpenAI::Models::Beta::Threads::Message::status, - thread_id: String, - ?object: :"thread.message" - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + assistant_id: String?, + attachments: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment]?, + completed_at: Integer?, + content: ::Array[OpenAI::Models::Beta::Threads::message_content], + created_at: Integer, + incomplete_at: Integer?, + incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails?, + metadata: OpenAI::Models::metadata?, + role: OpenAI::Models::Beta::Threads::Message::role, + run_id: String?, + status: OpenAI::Models::Beta::Threads::Message::status, + thread_id: String, + ?object: :"thread.message" + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message @@ -89,15 +85,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] ) -> ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] - def initialize: - ( - ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Message::attachment - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Message::attachment @@ -112,12 +103,7 @@ module OpenAI class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel attr_accessor type: :file_search - def initialize: - (?type: :file_search) -> void - | ( - ?OpenAI::Models::Beta::Threads::Message::Attachment::Tool::assistant_tools_file_search_type_only - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :file_search) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Message::Attachment::Tool::assistant_tools_file_search_type_only end @@ -134,14 +120,9 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel attr_accessor reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason - def initialize: - ( - reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Message::incomplete_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Message::incomplete_details diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index fe1490c3..fb2276f9 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -23,18 +23,13 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, - role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, + ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_create_params @@ -77,15 +72,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] ) -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] - def initialize: - ( - ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::MessageCreateParams::attachment - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::MessageCreateParams::attachment @@ -99,12 +89,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel attr_accessor type: :file_search - def initialize: - (?type: :file_search) -> void - | ( - ?OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :file_search) -> void def to_hash: -> OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::file_search end diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index 8723d195..d7f60e8a 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -11,12 +11,10 @@ module OpenAI attr_accessor thread_id: String - def initialize: - (thread_id: String, ?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_delete_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_delete_params end diff --git a/sig/openai/models/beta/threads/message_deleted.rbs b/sig/openai/models/beta/threads/message_deleted.rbs index f8d6ebeb..f6d3a96e 100644 --- a/sig/openai/models/beta/threads/message_deleted.rbs +++ b/sig/openai/models/beta/threads/message_deleted.rbs @@ -12,16 +12,11 @@ module OpenAI attr_accessor object: :"thread.message.deleted" - def initialize: - ( - id: String, - deleted: bool, - ?object: :"thread.message.deleted" - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_deleted - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + deleted: bool, + ?object: :"thread.message.deleted" + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_deleted end diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index fe36e15a..e053ee9b 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -21,15 +21,10 @@ module OpenAI OpenAI::Models::Beta::Threads::MessageDelta::role ) -> OpenAI::Models::Beta::Threads::MessageDelta::role - def initialize: - ( - ?content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - ?role: OpenAI::Models::Beta::Threads::MessageDelta::role - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], + ?role: OpenAI::Models::Beta::Threads::MessageDelta::role + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_delta diff --git a/sig/openai/models/beta/threads/message_delta_event.rbs b/sig/openai/models/beta/threads/message_delta_event.rbs index 44b036a8..d34c13f6 100644 --- a/sig/openai/models/beta/threads/message_delta_event.rbs +++ b/sig/openai/models/beta/threads/message_delta_event.rbs @@ -16,16 +16,11 @@ module OpenAI attr_accessor object: :"thread.message.delta" - def initialize: - ( - id: String, - delta: OpenAI::Models::Beta::Threads::MessageDelta, - ?object: :"thread.message.delta" - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + delta: OpenAI::Models::Beta::Threads::MessageDelta, + ?object: :"thread.message.delta" + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_delta_event end diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index 623c5e48..dca2c571 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -38,19 +38,14 @@ module OpenAI def run_id=: (String) -> String - def initialize: - ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, - ?run_id: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, + ?run_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_list_params diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index e0e6d123..5e2772e6 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -11,12 +11,10 @@ module OpenAI attr_accessor thread_id: String - def initialize: - (thread_id: String, ?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_retrieve_params end diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index a5908b4b..a6b12a9a 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -14,16 +14,11 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - thread_id: String, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::message_update_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::message_update_params end diff --git a/sig/openai/models/beta/threads/refusal_content_block.rbs b/sig/openai/models/beta/threads/refusal_content_block.rbs index f9a5be4f..041435a5 100644 --- a/sig/openai/models/beta/threads/refusal_content_block.rbs +++ b/sig/openai/models/beta/threads/refusal_content_block.rbs @@ -9,12 +9,7 @@ module OpenAI attr_accessor type: :refusal - def initialize: - (refusal: String, ?type: :refusal) -> void - | ( - ?OpenAI::Models::Beta::Threads::refusal_content_block - | OpenAI::BaseModel data - ) -> void + def initialize: (refusal: String, ?type: :refusal) -> void def to_hash: -> OpenAI::Models::Beta::Threads::refusal_content_block end diff --git a/sig/openai/models/beta/threads/refusal_delta_block.rbs b/sig/openai/models/beta/threads/refusal_delta_block.rbs index 50a977cf..635407d5 100644 --- a/sig/openai/models/beta/threads/refusal_delta_block.rbs +++ b/sig/openai/models/beta/threads/refusal_delta_block.rbs @@ -14,12 +14,11 @@ module OpenAI def refusal=: (String) -> String - def initialize: - (index: Integer, ?refusal: String, ?type: :refusal) -> void - | ( - ?OpenAI::Models::Beta::Threads::refusal_delta_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?refusal: String, + ?type: :refusal + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::refusal_delta_block end diff --git a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs index 2d95c46d..9429819f 100644 --- a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs +++ b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs @@ -16,16 +16,11 @@ module OpenAI attr_accessor type: :function - def initialize: - ( - id: String, - function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::required_action_function_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::required_action_function_tool_call @@ -36,12 +31,7 @@ module OpenAI attr_accessor name: String - def initialize: - (arguments: String, name: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::function - | OpenAI::BaseModel data - ) -> void + def initialize: (arguments: String, name: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::function end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index 71146021..deae5170 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -88,39 +88,35 @@ module OpenAI attr_accessor top_p: Float? - def initialize: - ( - id: String, - assistant_id: String, - cancelled_at: Integer?, - completed_at: Integer?, - created_at: Integer, - expires_at: Integer?, - failed_at: Integer?, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails?, - instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError?, - max_completion_tokens: Integer?, - max_prompt_tokens: Integer?, - metadata: OpenAI::Models::metadata?, - model: String, - parallel_tool_calls: bool, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction?, - response_format: OpenAI::Models::Beta::assistant_response_format_option?, - started_at: Integer?, - status: OpenAI::Models::Beta::Threads::run_status, - thread_id: String, - tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy?, - usage: OpenAI::Models::Beta::Threads::Run::Usage?, - ?temperature: Float?, - ?top_p: Float?, - ?object: :"thread.run" - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::run | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + assistant_id: String, + cancelled_at: Integer?, + completed_at: Integer?, + created_at: Integer, + expires_at: Integer?, + failed_at: Integer?, + incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails?, + instructions: String, + last_error: OpenAI::Models::Beta::Threads::Run::LastError?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: String, + parallel_tool_calls: bool, + required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + started_at: Integer?, + status: OpenAI::Models::Beta::Threads::run_status, + thread_id: String, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy?, + usage: OpenAI::Models::Beta::Threads::Run::Usage?, + ?temperature: Float?, + ?top_p: Float?, + ?object: :"thread.run" + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run @@ -136,14 +132,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason ) -> OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason - def initialize: - ( - ?reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Run::incomplete_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Run::incomplete_details @@ -168,15 +159,10 @@ module OpenAI attr_accessor message: String - def initialize: - ( - code: OpenAI::Models::Beta::Threads::Run::LastError::code, - message: String - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Run::last_error - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: OpenAI::Models::Beta::Threads::Run::LastError::code, + message: String + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Run::last_error @@ -202,15 +188,10 @@ module OpenAI attr_accessor type: :submit_tool_outputs - def initialize: - ( - submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, - ?type: :submit_tool_outputs - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Run::required_action - | OpenAI::BaseModel data - ) -> void + def initialize: ( + submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + ?type: :submit_tool_outputs + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Run::required_action @@ -222,14 +203,9 @@ module OpenAI class SubmitToolOutputs < OpenAI::BaseModel attr_accessor tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] - def initialize: - ( - tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Run::RequiredAction::submit_tool_outputs - | OpenAI::BaseModel data - ) -> void + def initialize: ( + tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Run::RequiredAction::submit_tool_outputs end @@ -246,15 +222,10 @@ module OpenAI attr_accessor last_messages: Integer? - def initialize: - ( - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, - ?last_messages: Integer? - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Run::truncation_strategy - | OpenAI::BaseModel data - ) -> void + def initialize: ( + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Run::truncation_strategy @@ -282,16 +253,11 @@ module OpenAI attr_accessor total_tokens: Integer - def initialize: - ( - completion_tokens: Integer, - prompt_tokens: Integer, - total_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Run::usage - | OpenAI::BaseModel data - ) -> void + def initialize: ( + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Run::usage end diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index b8538449..7c5a325c 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -11,12 +11,10 @@ module OpenAI attr_accessor thread_id: String - def initialize: - (thread_id: String, ?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::Threads::run_cancel_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run_cancel_params end diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 6948072a..c9f5247b 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -68,31 +68,26 @@ module OpenAI attr_accessor truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy? - def initialize: - ( - assistant_id: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?additional_instructions: String?, - ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, - ?instructions: String?, - ?max_completion_tokens: Integer?, - ?max_prompt_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, - ?parallel_tool_calls: bool, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, - ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::run_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + assistant_id: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?additional_instructions: String?, + ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + ?parallel_tool_calls: bool, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run_create_params @@ -113,17 +108,12 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, - ?attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, - ?metadata: OpenAI::Models::metadata? - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::RunCreateParams::additional_message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, + ?attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, + ?metadata: OpenAI::Models::metadata? + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::additional_message @@ -166,15 +156,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] ) -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] - def initialize: - ( - ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::attachment - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_id: String, + ?tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::attachment @@ -188,12 +173,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel attr_accessor type: :file_search - def initialize: - (?type: :file_search) -> void - | ( - ?OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :file_search) -> void def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::file_search end @@ -220,15 +200,10 @@ module OpenAI attr_accessor last_messages: Integer? - def initialize: - ( - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, - ?last_messages: Integer? - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::RunCreateParams::truncation_strategy - | OpenAI::BaseModel data - ) -> void + def initialize: ( + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::truncation_strategy diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index 48095b4f..fc43edb9 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -33,18 +33,13 @@ module OpenAI OpenAI::Models::Beta::Threads::RunListParams::order ) -> OpenAI::Models::Beta::Threads::RunListParams::order - def initialize: - ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Beta::Threads::RunListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::run_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::RunListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run_list_params diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index a3512fd8..1409bd1a 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -11,12 +11,10 @@ module OpenAI attr_accessor thread_id: String - def initialize: - (thread_id: String, ?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Beta::Threads::run_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run_retrieve_params end diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index a1c6838d..67893bc6 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -17,16 +17,11 @@ module OpenAI attr_accessor tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] - def initialize: - ( - thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::run_submit_tool_outputs_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run_submit_tool_outputs_params @@ -41,12 +36,7 @@ module OpenAI def tool_call_id=: (String) -> String - def initialize: - (?output: String, ?tool_call_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::tool_output - | OpenAI::BaseModel data - ) -> void + def initialize: (?output: String, ?tool_call_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::tool_output end diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index 80b6bebd..d466458d 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -14,16 +14,11 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - thread_id: String, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::run_update_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::run_update_params end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs index 70c44092..85c320b0 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs @@ -15,12 +15,11 @@ module OpenAI def logs=: (String) -> String - def initialize: - (index: Integer, ?logs: String, ?type: :logs) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_logs - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?logs: String, + ?type: :logs + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_logs end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs index bbb9b5c9..f640b74e 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs @@ -21,16 +21,11 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image ) -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image - def initialize: - ( - index: Integer, - ?image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, - ?type: :image - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_output_image - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + ?type: :image + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_output_image @@ -41,12 +36,7 @@ module OpenAI def file_id=: (String) -> String - def initialize: - (?file_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::image - | OpenAI::BaseModel data - ) -> void + def initialize: (?file_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::image end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index 4d13d7ff..67d18341 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -17,16 +17,11 @@ module OpenAI attr_accessor type: :code_interpreter - def initialize: - ( - id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - ?type: :code_interpreter - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + ?type: :code_interpreter + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call @@ -41,15 +36,10 @@ module OpenAI attr_accessor outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] - def initialize: - ( - input: String, - outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: ( + input: String, + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::code_interpreter @@ -65,12 +55,7 @@ module OpenAI attr_accessor type: :logs - def initialize: - (logs: String, ?type: :logs) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::logs - | OpenAI::BaseModel data - ) -> void + def initialize: (logs: String, ?type: :logs) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::logs end @@ -86,15 +71,10 @@ module OpenAI attr_accessor type: :image - def initialize: - ( - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - ?type: :image - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::image - | OpenAI::BaseModel data - ) -> void + def initialize: ( + image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + ?type: :image + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::image @@ -103,12 +83,7 @@ module OpenAI class Image < OpenAI::BaseModel attr_accessor file_id: String - def initialize: - (file_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::image - | OpenAI::BaseModel data - ) -> void + def initialize: (file_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::image end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index 72b4b8fe..d8884223 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -26,17 +26,12 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter ) -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter - def initialize: - ( - index: Integer, - ?id: String, - ?code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - ?type: :code_interpreter - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?id: String, + ?code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + ?type: :code_interpreter + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call_delta @@ -57,15 +52,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] - def initialize: - ( - ?input: String, - ?outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::code_interpreter - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?input: String, + ?outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::code_interpreter diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 55cf4877..5b6c9102 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -17,16 +17,11 @@ module OpenAI attr_accessor type: :file_search - def initialize: - ( - id: String, - file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, - ?type: :file_search - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::file_search_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + ?type: :file_search + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::file_search_tool_call @@ -49,15 +44,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] - def initialize: - ( - ?ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - ?results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::file_search - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + ?results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::file_search @@ -72,15 +62,10 @@ module OpenAI attr_accessor score_threshold: Float - def initialize: - ( - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, - score_threshold: Float - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::ranking_options - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + score_threshold: Float + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::ranking_options @@ -115,17 +100,12 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] - def initialize: - ( - file_id: String, - file_name: String, - score: Float, - ?content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::result - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + file_name: String, + score: Float, + ?content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::result @@ -146,15 +126,10 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ ) -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ - def initialize: - ( - ?text: String, - ?type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::content - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?text: String, + ?type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::content diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs index ecc227de..11ae3dc7 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs @@ -17,17 +17,12 @@ module OpenAI def id=: (String) -> String - def initialize: - ( - file_search: top, - index: Integer, - ?id: String, - ?type: :file_search - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::file_search_tool_call_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_search: top, + index: Integer, + ?id: String, + ?type: :file_search + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::file_search_tool_call_delta end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call.rbs b/sig/openai/models/beta/threads/runs/function_tool_call.rbs index 5c442503..c3e9168a 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call.rbs @@ -17,16 +17,11 @@ module OpenAI attr_accessor type: :function - def initialize: - ( - id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::function_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::function_tool_call @@ -39,12 +34,11 @@ module OpenAI attr_accessor output: String? - def initialize: - (arguments: String, name: String, output: String?) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::function - | OpenAI::BaseModel data - ) -> void + def initialize: ( + arguments: String, + name: String, + output: String? + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::function end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs index bb2f4482..f6a01247 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs @@ -26,17 +26,12 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function ) -> OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function - def initialize: - ( - index: Integer, - ?id: String, - ?function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::function_tool_call_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?id: String, + ?function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::function_tool_call_delta @@ -53,12 +48,11 @@ module OpenAI attr_accessor output: String? - def initialize: - (?arguments: String, ?name: String, ?output: String?) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::function - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?arguments: String, + ?name: String, + ?output: String? + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::function end diff --git a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs index f84bf9e6..3ddacf7b 100644 --- a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs @@ -14,15 +14,10 @@ module OpenAI attr_accessor type: :message_creation - def initialize: - ( - message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - ?type: :message_creation - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::message_creation_step_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + ?type: :message_creation + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::message_creation_step_details @@ -31,12 +26,7 @@ module OpenAI class MessageCreation < OpenAI::BaseModel attr_accessor message_id: String - def initialize: - (message_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::message_creation - | OpenAI::BaseModel data - ) -> void + def initialize: (message_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::message_creation end diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index fc8b08c2..4216d33b 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -59,29 +59,24 @@ module OpenAI attr_accessor usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage? - def initialize: - ( - id: String, - assistant_id: String, - cancelled_at: Integer?, - completed_at: Integer?, - created_at: Integer, - expired_at: Integer?, - failed_at: Integer?, - last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError?, - metadata: OpenAI::Models::metadata?, - run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, - thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, - usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage?, - ?object: :"thread.run.step" - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::run_step - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + assistant_id: String, + cancelled_at: Integer?, + completed_at: Integer?, + created_at: Integer, + expired_at: Integer?, + failed_at: Integer?, + last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError?, + metadata: OpenAI::Models::metadata?, + run_id: String, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, + thread_id: String, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, + usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage?, + ?object: :"thread.run.step" + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step @@ -96,15 +91,10 @@ module OpenAI attr_accessor message: String - def initialize: - ( - code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, - message: String - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::RunStep::last_error - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, + message: String + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::RunStep::last_error @@ -162,16 +152,11 @@ module OpenAI attr_accessor total_tokens: Integer - def initialize: - ( - completion_tokens: Integer, - prompt_tokens: Integer, - total_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::RunStep::usage - | OpenAI::BaseModel data - ) -> void + def initialize: ( + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::RunStep::usage end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index ae21101e..7f45537b 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -18,14 +18,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details ) -> OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details - def initialize: - ( - ?step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::run_step_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step_delta diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index 1154b691..f3f75aab 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -20,16 +20,11 @@ module OpenAI attr_accessor object: :"thread.run.step.delta" - def initialize: - ( - id: String, - delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, - ?object: :"thread.run.step.delta" - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::run_step_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, + ?object: :"thread.run.step.delta" + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step_delta_event end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index 6ef0818b..5dd59c8c 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -21,15 +21,10 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation ) -> OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation - def initialize: - ( - ?message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - ?type: :message_creation - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::run_step_delta_message_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + ?type: :message_creation + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step_delta_message_delta @@ -40,12 +35,7 @@ module OpenAI def message_id=: (String) -> String - def initialize: - (?message_id: String) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::message_creation - | OpenAI::BaseModel data - ) -> void + def initialize: (?message_id: String) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::message_creation end diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index b2bdafc6..bd65efbf 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -44,20 +44,15 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::StepListParams::order ) -> OpenAI::Models::Beta::Threads::Runs::StepListParams::order - def initialize: - ( - thread_id: String, - ?after: String, - ?before: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?limit: Integer, - ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::step_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + ?after: String, + ?before: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::step_list_params diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index 937742d8..3ae1e68e 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -25,17 +25,12 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] - def initialize: - ( - thread_id: String, - run_id: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::step_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + thread_id: String, + run_id: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::step_retrieve_params end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs index 275ec6b1..50619aae 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs @@ -18,15 +18,10 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta] ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta] - def initialize: - ( - ?tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], - ?type: :tool_calls - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::tool_call_delta_object - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], + ?type: :tool_calls + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::tool_call_delta_object end diff --git a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs index 20aed347..7f5c8851 100644 --- a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs @@ -14,15 +14,10 @@ module OpenAI attr_accessor type: :tool_calls - def initialize: - ( - tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], - ?type: :tool_calls - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::Runs::tool_calls_step_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], + ?type: :tool_calls + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::Runs::tool_calls_step_details end diff --git a/sig/openai/models/beta/threads/text.rbs b/sig/openai/models/beta/threads/text.rbs index 4311ade7..1c4ebeb3 100644 --- a/sig/openai/models/beta/threads/text.rbs +++ b/sig/openai/models/beta/threads/text.rbs @@ -13,14 +13,10 @@ module OpenAI attr_accessor value: String - def initialize: - ( - annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], - value: String - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::text | OpenAI::BaseModel data - ) -> void + def initialize: ( + annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], + value: String + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::text end diff --git a/sig/openai/models/beta/threads/text_content_block.rbs b/sig/openai/models/beta/threads/text_content_block.rbs index 5800b03c..b83f4585 100644 --- a/sig/openai/models/beta/threads/text_content_block.rbs +++ b/sig/openai/models/beta/threads/text_content_block.rbs @@ -10,12 +10,10 @@ module OpenAI attr_accessor type: :text - def initialize: - (text: OpenAI::Models::Beta::Threads::Text, ?type: :text) -> void - | ( - ?OpenAI::Models::Beta::Threads::text_content_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + text: OpenAI::Models::Beta::Threads::Text, + ?type: :text + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::text_content_block end diff --git a/sig/openai/models/beta/threads/text_content_block_param.rbs b/sig/openai/models/beta/threads/text_content_block_param.rbs index 8f3b6fc8..bf0b5b7e 100644 --- a/sig/openai/models/beta/threads/text_content_block_param.rbs +++ b/sig/openai/models/beta/threads/text_content_block_param.rbs @@ -9,12 +9,7 @@ module OpenAI attr_accessor type: :text - def initialize: - (text: String, ?type: :text) -> void - | ( - ?OpenAI::Models::Beta::Threads::text_content_block_param - | OpenAI::BaseModel data - ) -> void + def initialize: (text: String, ?type: :text) -> void def to_hash: -> OpenAI::Models::Beta::Threads::text_content_block_param end diff --git a/sig/openai/models/beta/threads/text_delta.rbs b/sig/openai/models/beta/threads/text_delta.rbs index 64131287..721b012e 100644 --- a/sig/openai/models/beta/threads/text_delta.rbs +++ b/sig/openai/models/beta/threads/text_delta.rbs @@ -19,15 +19,10 @@ module OpenAI def value=: (String) -> String - def initialize: - ( - ?annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], - ?value: String - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::text_delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], + ?value: String + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::text_delta end diff --git a/sig/openai/models/beta/threads/text_delta_block.rbs b/sig/openai/models/beta/threads/text_delta_block.rbs index e026e18f..2a03d985 100644 --- a/sig/openai/models/beta/threads/text_delta_block.rbs +++ b/sig/openai/models/beta/threads/text_delta_block.rbs @@ -20,16 +20,11 @@ module OpenAI OpenAI::Models::Beta::Threads::TextDelta ) -> OpenAI::Models::Beta::Threads::TextDelta - def initialize: - ( - index: Integer, - ?text: OpenAI::Models::Beta::Threads::TextDelta, - ?type: :text - ) -> void - | ( - ?OpenAI::Models::Beta::Threads::text_delta_block - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?text: OpenAI::Models::Beta::Threads::TextDelta, + ?type: :text + ) -> void def to_hash: -> OpenAI::Models::Beta::Threads::text_delta_block end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index 5a60c3e4..6c256523 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -39,20 +39,16 @@ module OpenAI OpenAI::Models::CompletionUsage ) -> OpenAI::Models::CompletionUsage - def initialize: - ( - id: String, - choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice], - created: Integer, - model: String, - ?service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, - ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage, - ?object: :"chat.completion" - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice], + created: Integer, + model: String, + ?service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, + ?system_fingerprint: String, + ?usage: OpenAI::Models::CompletionUsage, + ?object: :"chat.completion" + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion @@ -73,17 +69,12 @@ module OpenAI attr_accessor message: OpenAI::Models::Chat::ChatCompletionMessage - def initialize: - ( - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, - index: Integer, - logprobs: OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs?, - message: OpenAI::Models::Chat::ChatCompletionMessage - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletion::choice - | OpenAI::BaseModel data - ) -> void + def initialize: ( + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, + index: Integer, + logprobs: OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs?, + message: OpenAI::Models::Chat::ChatCompletionMessage + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletion::choice @@ -111,15 +102,10 @@ module OpenAI attr_accessor refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? - def initialize: - ( - content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, - refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletion::Choice::logprobs - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletion::Choice::logprobs end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index e1cfebf4..fbe3c3f8 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -36,20 +36,15 @@ module OpenAI ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] ) -> ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] - def initialize: - ( - ?audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio?, - ?content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, - ?function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, - ?name: String, - ?refusal: String?, - ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], - ?role: :assistant - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_assistant_message_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio?, + ?content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, + ?function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, + ?name: String, + ?refusal: String?, + ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + ?role: :assistant + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_assistant_message_param @@ -58,12 +53,7 @@ module OpenAI class Audio < OpenAI::BaseModel attr_accessor id: String - def initialize: - (id: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::audio - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::audio end @@ -96,12 +86,7 @@ module OpenAI attr_accessor name: String - def initialize: - (arguments: String, name: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::function_call - | OpenAI::BaseModel data - ) -> void + def initialize: (arguments: String, name: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::function_call end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index f7d13734..856fe6c8 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -16,17 +16,12 @@ module OpenAI attr_accessor transcript: String - def initialize: - ( - id: String, - data: String, - expires_at: Integer, - transcript: String - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_audio - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + data: String, + expires_at: Integer, + transcript: String + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_audio end diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index 6a35e189..d90d22b0 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -15,15 +15,10 @@ module OpenAI attr_accessor voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice - def initialize: - ( - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_audio_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_audio_param diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 5874643a..3eef3ef4 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -39,21 +39,16 @@ module OpenAI OpenAI::Models::CompletionUsage ) -> OpenAI::Models::CompletionUsage - def initialize: - ( - id: String, - choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], - created: Integer, - model: String, - ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, - ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage, - ?object: :"chat.completion.chunk" - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_chunk - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], + created: Integer, + model: String, + ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, + ?system_fingerprint: String, + ?usage: OpenAI::Models::CompletionUsage, + ?object: :"chat.completion.chunk" + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_chunk @@ -74,17 +69,12 @@ module OpenAI attr_accessor logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? - def initialize: - ( - delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, - index: Integer, - ?logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionChunk::choice - | OpenAI::BaseModel data - ) -> void + def initialize: ( + delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, + index: Integer, + ?logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::choice @@ -120,18 +110,13 @@ module OpenAI ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] - def initialize: - ( - ?content: String?, - ?function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, - ?refusal: String?, - ?role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, - ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::delta - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?content: String?, + ?function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + ?refusal: String?, + ?role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, + ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::delta @@ -146,12 +131,7 @@ module OpenAI def name=: (String) -> String - def initialize: - (?arguments: String, ?name: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::function_call - | OpenAI::BaseModel data - ) -> void + def initialize: (?arguments: String, ?name: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::function_call end @@ -195,17 +175,12 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ - def initialize: - ( - index: Integer, - ?id: String, - ?function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - ?type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + index: Integer, + ?id: String, + ?function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + ?type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::tool_call @@ -220,12 +195,7 @@ module OpenAI def name=: (String) -> String - def initialize: - (?arguments: String, ?name: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::function - | OpenAI::BaseModel data - ) -> void + def initialize: (?arguments: String, ?name: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::function end @@ -264,15 +234,10 @@ module OpenAI attr_accessor refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? - def initialize: - ( - content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, - refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionChunk::Choice::logprobs - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::logprobs end diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index aff3d3a0..01231844 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -22,15 +22,10 @@ module OpenAI attr_accessor type: :file - def initialize: - ( - file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, - ?type: :file - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionContentPart::file - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, + ?type: :file + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPart::file @@ -49,12 +44,11 @@ module OpenAI def filename=: (String) -> String - def initialize: - (?file_data: String, ?file_id: String, ?filename: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionContentPart::File::file - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_data: String, + ?file_id: String, + ?filename: String + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPart::File::file end diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index 8576b8d1..b80ee986 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -15,15 +15,10 @@ module OpenAI attr_accessor type: :image_url - def initialize: - ( - image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, - ?type: :image_url - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_content_part_image - | OpenAI::BaseModel data - ) -> void + def initialize: ( + image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, + ?type: :image_url + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_image @@ -42,15 +37,10 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail ) -> OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail - def initialize: - ( - url: String, - ?detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionContentPartImage::image_url - | OpenAI::BaseModel data - ) -> void + def initialize: ( + url: String, + ?detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPartImage::image_url diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index d750e4ff..bb409774 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -15,15 +15,10 @@ module OpenAI attr_accessor type: :input_audio - def initialize: - ( - input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, - ?type: :input_audio - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_content_part_input_audio - | OpenAI::BaseModel data - ) -> void + def initialize: ( + input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, + ?type: :input_audio + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_input_audio @@ -38,15 +33,10 @@ module OpenAI attr_accessor format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ - def initialize: - ( - data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::input_audio - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::input_audio diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index 3d783701..e69c1ee3 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -12,12 +12,7 @@ module OpenAI attr_accessor type: :refusal - def initialize: - (refusal: String, ?type: :refusal) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_content_part_refusal - | OpenAI::BaseModel data - ) -> void + def initialize: (refusal: String, ?type: :refusal) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_refusal end diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 9f35ab25..9c723c66 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -11,12 +11,7 @@ module OpenAI attr_accessor type: :text - def initialize: - (text: String, ?type: :text) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_content_part_text - | OpenAI::BaseModel data - ) -> void + def initialize: (text: String, ?type: :text) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_text end diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index bcb4421a..2d8a2cf3 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -14,16 +14,11 @@ module OpenAI attr_accessor object: :"chat.completion.deleted" - def initialize: - ( - id: String, - deleted: bool, - ?object: :"chat.completion.deleted" - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_deleted - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + deleted: bool, + ?object: :"chat.completion.deleted" + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_deleted end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index 9a009158..e9cbba0a 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -20,16 +20,11 @@ module OpenAI def name=: (String) -> String - def initialize: - ( - content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, - ?name: String, - ?role: :developer - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_developer_message_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, + ?name: String, + ?role: :developer + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_developer_message_param diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index fdc44315..b5e722fb 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -9,12 +9,7 @@ module OpenAI class ChatCompletionFunctionCallOption < OpenAI::BaseModel attr_accessor name: String - def initialize: - (name: String) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_function_call_option - | OpenAI::BaseModel data - ) -> void + def initialize: (name: String) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_function_call_option end diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index 718c3dca..b9ac7154 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -14,12 +14,11 @@ module OpenAI attr_accessor role: :function - def initialize: - (content: String?, name: String, ?role: :function) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_function_message_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: String?, + name: String, + ?role: :function + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_function_message_param end diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index 4323b546..b13a6e43 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -42,20 +42,15 @@ module OpenAI ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] ) -> ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] - def initialize: - ( - content: String?, - refusal: String?, - ?annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], - ?audio: OpenAI::Models::Chat::ChatCompletionAudio?, - ?function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, - ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], - ?role: :assistant - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: String?, + refusal: String?, + ?annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], + ?audio: OpenAI::Models::Chat::ChatCompletionAudio?, + ?function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, + ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + ?role: :assistant + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_message @@ -70,15 +65,10 @@ module OpenAI attr_accessor url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation - def initialize: - ( - url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, - ?type: :url_citation - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionMessage::annotation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, + ?type: :url_citation + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessage::annotation @@ -99,17 +89,12 @@ module OpenAI attr_accessor url: String - def initialize: - ( - end_index: Integer, - start_index: Integer, - title: String, - url: String - ) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionMessage::Annotation::url_citation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + end_index: Integer, + start_index: Integer, + title: String, + url: String + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessage::Annotation::url_citation end @@ -122,12 +107,7 @@ module OpenAI attr_accessor name: String - def initialize: - (arguments: String, name: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionMessage::function_call - | OpenAI::BaseModel data - ) -> void + def initialize: (arguments: String, name: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessage::function_call end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index e19eb318..4ed4b2d2 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -18,16 +18,11 @@ module OpenAI attr_accessor type: :function - def initialize: - ( - id: String, - function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_message_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_message_tool_call @@ -38,12 +33,7 @@ module OpenAI attr_accessor name: String - def initialize: - (arguments: String, name: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionMessageToolCall::function - | OpenAI::BaseModel data - ) -> void + def initialize: (arguments: String, name: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessageToolCall::function end diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 80d234ef..8da9abd2 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -15,15 +15,10 @@ module OpenAI attr_accessor type: :function - def initialize: - ( - function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_named_tool_choice - | OpenAI::BaseModel data - ) -> void + def initialize: ( + function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_named_tool_choice @@ -32,12 +27,7 @@ module OpenAI class Function < OpenAI::BaseModel attr_accessor name: String - def initialize: - (name: String) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionNamedToolChoice::function - | OpenAI::BaseModel data - ) -> void + def initialize: (name: String) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionNamedToolChoice::function end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index d1006f45..e38d1c31 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -15,15 +15,10 @@ module OpenAI attr_accessor type: :content - def initialize: - ( - content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, - ?type: :content - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_prediction_content - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, + ?type: :content + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_prediction_content diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index dabd6ab3..2719aa2e 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -11,12 +11,7 @@ module OpenAI def id=: (String _) -> String - def initialize: - (id: String) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_store_message - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_store_message end diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 3bb7af41..ed0721bc 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -11,12 +11,7 @@ module OpenAI def include_usage=: (bool) -> bool - def initialize: - (?include_usage: bool) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_stream_options - | OpenAI::BaseModel data - ) -> void + def initialize: (?include_usage: bool) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_stream_options end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 24948420..b3ae49f9 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -20,16 +20,11 @@ module OpenAI def name=: (String) -> String - def initialize: - ( - content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, - ?name: String, - ?role: :system - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_system_message_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, + ?name: String, + ?role: :system + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_system_message_param diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index cd55959d..4695ba6a 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -21,17 +21,12 @@ module OpenAI attr_accessor top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] - def initialize: - ( - token: String, - bytes: ::Array[Integer]?, - logprob: Float, - top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_token_logprob - | OpenAI::BaseModel data - ) -> void + def initialize: ( + token: String, + bytes: ::Array[Integer]?, + logprob: Float, + top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_token_logprob @@ -45,12 +40,11 @@ module OpenAI attr_accessor logprob: Float - def initialize: - (token: String, bytes: ::Array[Integer]?, logprob: Float) -> void - | ( - ?OpenAI::Models::Chat::ChatCompletionTokenLogprob::top_logprob - | OpenAI::BaseModel data - ) -> void + def initialize: ( + token: String, + bytes: ::Array[Integer]?, + logprob: Float + ) -> void def to_hash: -> OpenAI::Models::Chat::ChatCompletionTokenLogprob::top_logprob end diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index 3090dfae..f690d128 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -12,14 +12,10 @@ module OpenAI attr_accessor type: :function - def initialize: - ( - function: OpenAI::Models::FunctionDefinition, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_tool | OpenAI::BaseModel data - ) -> void + def initialize: ( + function: OpenAI::Models::FunctionDefinition, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_tool end diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 91d693d7..86a425de 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -18,16 +18,11 @@ module OpenAI attr_accessor tool_call_id: String - def initialize: - ( - content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, - tool_call_id: String, - ?role: :tool - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_tool_message_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, + tool_call_id: String, + ?role: :tool + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_tool_message_param diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 3f5d1a3f..53192647 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -20,16 +20,11 @@ module OpenAI def name=: (String) -> String - def initialize: - ( - content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, - ?name: String, - ?role: :user - ) -> void - | ( - ?OpenAI::Models::Chat::chat_completion_user_message_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, + ?name: String, + ?role: :user + ) -> void def to_hash: -> OpenAI::Models::Chat::chat_completion_user_message_param diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index ce23df92..56982b45 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -128,44 +128,39 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions ) -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions - def initialize: - ( - messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], - model: OpenAI::Models::Chat::CompletionCreateParams::model, - ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, - ?frequency_penalty: Float?, - ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - ?logit_bias: ::Hash[Symbol, Integer]?, - ?logprobs: bool?, - ?max_completion_tokens: Integer?, - ?max_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, - ?n: Integer?, - ?parallel_tool_calls: bool, - ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, - ?presence_penalty: Float?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, - ?seed: Integer?, - ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, - ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, - ?store: bool?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - ?temperature: Float?, - ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], - ?top_logprobs: Integer?, - ?top_p: Float?, - ?user: String, - ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Chat::completion_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], + model: OpenAI::Models::Chat::CompletionCreateParams::model, + ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?frequency_penalty: Float?, + ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: bool?, + ?max_completion_tokens: Integer?, + ?max_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?n: Integer?, + ?parallel_tool_calls: bool, + ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?presence_penalty: Float?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?seed: Integer?, + ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + ?store: bool?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?top_logprobs: Integer?, + ?top_p: Float?, + ?user: String, + ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Chat::completion_create_params @@ -212,16 +207,11 @@ module OpenAI OpenAI::Models::function_parameters ) -> OpenAI::Models::function_parameters - def initialize: - ( - name: String, - ?description: String, - ?parameters: OpenAI::Models::function_parameters - ) -> void - | ( - ?OpenAI::Models::Chat::CompletionCreateParams::function - | OpenAI::BaseModel data - ) -> void + def initialize: ( + name: String, + ?description: String, + ?parameters: OpenAI::Models::function_parameters + ) -> void def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::function end @@ -278,15 +268,10 @@ module OpenAI attr_accessor user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? - def initialize: - ( - ?search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, - ?user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? - ) -> void - | ( - ?OpenAI::Models::Chat::CompletionCreateParams::web_search_options - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + ?user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + ) -> void def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::web_search_options @@ -311,15 +296,10 @@ module OpenAI attr_accessor type: :approximate - def initialize: - ( - approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - ?type: :approximate - ) -> void - | ( - ?OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::user_location - | OpenAI::BaseModel data - ) -> void + def initialize: ( + approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + ?type: :approximate + ) -> void def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::user_location @@ -348,17 +328,12 @@ module OpenAI def timezone=: (String) -> String - def initialize: - ( - ?city: String, - ?country: String, - ?region: String, - ?timezone: String - ) -> void - | ( - ?OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::approximate - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?city: String, + ?country: String, + ?region: String, + ?timezone: String + ) -> void def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::approximate end diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index c4a285ad..e32e018f 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Chat::completion_delete_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Chat::completion_delete_params end diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 38d48801..9540d790 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -35,19 +35,14 @@ module OpenAI OpenAI::Models::Chat::CompletionListParams::order ) -> OpenAI::Models::Chat::CompletionListParams::order - def initialize: - ( - ?after: String, - ?limit: Integer, - ?metadata: OpenAI::Models::metadata?, - ?model: String, - ?order: OpenAI::Models::Chat::CompletionListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Chat::completion_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?metadata: OpenAI::Models::metadata?, + ?model: String, + ?order: OpenAI::Models::Chat::CompletionListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Chat::completion_list_params diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 96a62d84..89d6e6d5 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Chat::completion_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Chat::completion_retrieve_params end diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index a5e3906e..ea20f161 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -10,15 +10,10 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - def initialize: - ( - metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Chat::completion_update_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Chat::completion_update_params end diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index ff666bc2..88456945 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -28,17 +28,12 @@ module OpenAI OpenAI::Models::Chat::Completions::MessageListParams::order ) -> OpenAI::Models::Chat::Completions::MessageListParams::order - def initialize: - ( - ?after: String, - ?limit: Integer, - ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Chat::Completions::message_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Chat::Completions::message_list_params diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index 02a4b26d..a3873e2d 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -14,13 +14,11 @@ module OpenAI attr_accessor value: OpenAI::Models::ComparisonFilter::value - def initialize: - ( - key: String, - type: OpenAI::Models::ComparisonFilter::type_, - value: OpenAI::Models::ComparisonFilter::value - ) -> void - | (?OpenAI::Models::comparison_filter | OpenAI::BaseModel data) -> void + def initialize: ( + key: String, + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value + ) -> void def to_hash: -> OpenAI::Models::comparison_filter diff --git a/sig/openai/models/completion.rbs b/sig/openai/models/completion.rbs index 13acceee..0e189869 100644 --- a/sig/openai/models/completion.rbs +++ b/sig/openai/models/completion.rbs @@ -32,17 +32,15 @@ module OpenAI OpenAI::Models::CompletionUsage ) -> OpenAI::Models::CompletionUsage - def initialize: - ( - id: String, - choices: ::Array[OpenAI::Models::CompletionChoice], - created: Integer, - model: String, - ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage, - ?object: :text_completion - ) -> void - | (?OpenAI::Models::completion | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + choices: ::Array[OpenAI::Models::CompletionChoice], + created: Integer, + model: String, + ?system_fingerprint: String, + ?usage: OpenAI::Models::CompletionUsage, + ?object: :text_completion + ) -> void def to_hash: -> OpenAI::Models::completion end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index e08de29b..05e3ac68 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -17,14 +17,12 @@ module OpenAI attr_accessor text: String - def initialize: - ( - finish_reason: OpenAI::Models::CompletionChoice::finish_reason, - index: Integer, - logprobs: OpenAI::Models::CompletionChoice::Logprobs?, - text: String - ) -> void - | (?OpenAI::Models::completion_choice | OpenAI::BaseModel data) -> void + def initialize: ( + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, + index: Integer, + logprobs: OpenAI::Models::CompletionChoice::Logprobs?, + text: String + ) -> void def to_hash: -> OpenAI::Models::completion_choice @@ -65,16 +63,12 @@ module OpenAI ::Array[::Hash[Symbol, Float]] ) -> ::Array[::Hash[Symbol, Float]] - def initialize: - ( - ?text_offset: ::Array[Integer], - ?token_logprobs: ::Array[Float], - ?tokens: ::Array[String], - ?top_logprobs: ::Array[::Hash[Symbol, Float]] - ) -> void - | ( - ?OpenAI::Models::CompletionChoice::logprobs | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?text_offset: ::Array[Integer], + ?token_logprobs: ::Array[Float], + ?tokens: ::Array[String], + ?top_logprobs: ::Array[::Hash[Symbol, Float]] + ) -> void def to_hash: -> OpenAI::Models::CompletionChoice::logprobs end diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index a860e0b1..fd2677d3 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -62,30 +62,26 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - model: OpenAI::Models::CompletionCreateParams::model, - prompt: OpenAI::Models::CompletionCreateParams::prompt?, - ?best_of: Integer?, - ?echo: bool?, - ?frequency_penalty: Float?, - ?logit_bias: ::Hash[Symbol, Integer]?, - ?logprobs: Integer?, - ?max_tokens: Integer?, - ?n: Integer?, - ?presence_penalty: Float?, - ?seed: Integer?, - ?stop: OpenAI::Models::CompletionCreateParams::stop?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - ?suffix: String?, - ?temperature: Float?, - ?top_p: Float?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::completion_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + model: OpenAI::Models::CompletionCreateParams::model, + prompt: OpenAI::Models::CompletionCreateParams::prompt?, + ?best_of: Integer?, + ?echo: bool?, + ?frequency_penalty: Float?, + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: Integer?, + ?max_tokens: Integer?, + ?n: Integer?, + ?presence_penalty: Float?, + ?seed: Integer?, + ?stop: OpenAI::Models::CompletionCreateParams::stop?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?suffix: String?, + ?temperature: Float?, + ?top_p: Float?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::completion_create_params diff --git a/sig/openai/models/completion_usage.rbs b/sig/openai/models/completion_usage.rbs index 882a8f98..fe20a24b 100644 --- a/sig/openai/models/completion_usage.rbs +++ b/sig/openai/models/completion_usage.rbs @@ -28,15 +28,13 @@ module OpenAI OpenAI::Models::CompletionUsage::PromptTokensDetails ) -> OpenAI::Models::CompletionUsage::PromptTokensDetails - def initialize: - ( - completion_tokens: Integer, - prompt_tokens: Integer, - total_tokens: Integer, - ?completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, - ?prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails - ) -> void - | (?OpenAI::Models::completion_usage | OpenAI::BaseModel data) -> void + def initialize: ( + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer, + ?completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, + ?prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails + ) -> void def to_hash: -> OpenAI::Models::completion_usage @@ -65,17 +63,12 @@ module OpenAI def rejected_prediction_tokens=: (Integer) -> Integer - def initialize: - ( - ?accepted_prediction_tokens: Integer, - ?audio_tokens: Integer, - ?reasoning_tokens: Integer, - ?rejected_prediction_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::CompletionUsage::completion_tokens_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?accepted_prediction_tokens: Integer, + ?audio_tokens: Integer, + ?reasoning_tokens: Integer, + ?rejected_prediction_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::CompletionUsage::completion_tokens_details end @@ -92,12 +85,10 @@ module OpenAI def cached_tokens=: (Integer) -> Integer - def initialize: - (?audio_tokens: Integer, ?cached_tokens: Integer) -> void - | ( - ?OpenAI::Models::CompletionUsage::prompt_tokens_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?audio_tokens: Integer, + ?cached_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::CompletionUsage::prompt_tokens_details end diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index ddbe75e7..35aba042 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -11,12 +11,10 @@ module OpenAI attr_accessor type: OpenAI::Models::CompoundFilter::type_ - def initialize: - ( - filters: ::Array[OpenAI::Models::CompoundFilter::filter], - type: OpenAI::Models::CompoundFilter::type_ - ) -> void - | (?OpenAI::Models::compound_filter | OpenAI::BaseModel data) -> void + def initialize: ( + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ + ) -> void def to_hash: -> OpenAI::Models::compound_filter diff --git a/sig/openai/models/create_embedding_response.rbs b/sig/openai/models/create_embedding_response.rbs index 8fa6b6d9..e9ae13dc 100644 --- a/sig/openai/models/create_embedding_response.rbs +++ b/sig/openai/models/create_embedding_response.rbs @@ -17,16 +17,12 @@ module OpenAI attr_accessor usage: OpenAI::Models::CreateEmbeddingResponse::Usage - def initialize: - ( - data: ::Array[OpenAI::Models::Embedding], - model: String, - usage: OpenAI::Models::CreateEmbeddingResponse::Usage, - ?object: :list - ) -> void - | ( - ?OpenAI::Models::create_embedding_response | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: ::Array[OpenAI::Models::Embedding], + model: String, + usage: OpenAI::Models::CreateEmbeddingResponse::Usage, + ?object: :list + ) -> void def to_hash: -> OpenAI::Models::create_embedding_response @@ -37,12 +33,7 @@ module OpenAI attr_accessor total_tokens: Integer - def initialize: - (prompt_tokens: Integer, total_tokens: Integer) -> void - | ( - ?OpenAI::Models::CreateEmbeddingResponse::usage - | OpenAI::BaseModel data - ) -> void + def initialize: (prompt_tokens: Integer, total_tokens: Integer) -> void def to_hash: -> OpenAI::Models::CreateEmbeddingResponse::usage end diff --git a/sig/openai/models/embedding.rbs b/sig/openai/models/embedding.rbs index 892d3226..08eb26a7 100644 --- a/sig/openai/models/embedding.rbs +++ b/sig/openai/models/embedding.rbs @@ -10,9 +10,11 @@ module OpenAI attr_accessor object: :embedding - def initialize: - (embedding: ::Array[Float], index: Integer, ?object: :embedding) -> void - | (?OpenAI::Models::embedding | OpenAI::BaseModel data) -> void + def initialize: ( + embedding: ::Array[Float], + index: Integer, + ?object: :embedding + ) -> void def to_hash: -> OpenAI::Models::embedding end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 459ee35c..59ae7a9f 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -32,18 +32,14 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - input: OpenAI::Models::EmbeddingCreateParams::input, - model: OpenAI::Models::EmbeddingCreateParams::model, - ?dimensions: Integer, - ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::embedding_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + input: OpenAI::Models::EmbeddingCreateParams::input, + model: OpenAI::Models::EmbeddingCreateParams::model, + ?dimensions: Integer, + ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::embedding_create_params diff --git a/sig/openai/models/error_object.rbs b/sig/openai/models/error_object.rbs index 1e44e215..bd0ae3a7 100644 --- a/sig/openai/models/error_object.rbs +++ b/sig/openai/models/error_object.rbs @@ -12,9 +12,12 @@ module OpenAI attr_accessor type: String - def initialize: - (code: String?, message: String, param: String?, type: String) -> void - | (?OpenAI::Models::error_object | OpenAI::BaseModel data) -> void + def initialize: ( + code: String?, + message: String, + param: String?, + type: String + ) -> void def to_hash: -> OpenAI::Models::error_object end diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index 638daf85..133e1f62 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::file_content_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::file_content_params end diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 76f6a57d..9426d743 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -12,13 +12,11 @@ module OpenAI attr_accessor purpose: OpenAI::Models::file_purpose - def initialize: - ( - file: IO | StringIO, - purpose: OpenAI::Models::file_purpose, - ?request_options: OpenAI::request_opts - ) -> void - | (?OpenAI::Models::file_create_params | OpenAI::BaseModel data) -> void + def initialize: ( + file: IO | StringIO, + purpose: OpenAI::Models::file_purpose, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::file_create_params end diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index d611f520..36ce2488 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -6,9 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | (?OpenAI::Models::file_delete_params | OpenAI::BaseModel data) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::file_delete_params end diff --git a/sig/openai/models/file_deleted.rbs b/sig/openai/models/file_deleted.rbs index 5fae57f0..04eef616 100644 --- a/sig/openai/models/file_deleted.rbs +++ b/sig/openai/models/file_deleted.rbs @@ -9,9 +9,7 @@ module OpenAI attr_accessor object: :file - def initialize: - (id: String, deleted: bool, ?object: :file) -> void - | (?OpenAI::Models::file_deleted | OpenAI::BaseModel data) -> void + def initialize: (id: String, deleted: bool, ?object: :file) -> void def to_hash: -> OpenAI::Models::file_deleted end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index b67cdad6..2f36b51c 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -31,15 +31,13 @@ module OpenAI def purpose=: (String) -> String - def initialize: - ( - ?after: String, - ?limit: Integer, - ?order: OpenAI::Models::FileListParams::order, - ?purpose: String, - ?request_options: OpenAI::request_opts - ) -> void - | (?OpenAI::Models::file_list_params | OpenAI::BaseModel data) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::FileListParams::order, + ?purpose: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::file_list_params diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 44034e82..f229d68d 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -36,19 +36,17 @@ module OpenAI def status_details=: (String) -> String - def initialize: - ( - id: String, - bytes: Integer, - created_at: Integer, - filename: String, - purpose: OpenAI::Models::FileObject::purpose, - status: OpenAI::Models::FileObject::status, - ?expires_at: Integer, - ?status_details: String, - ?object: :file - ) -> void - | (?OpenAI::Models::file_object | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + bytes: Integer, + created_at: Integer, + filename: String, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, + ?expires_at: Integer, + ?status_details: String, + ?object: :file + ) -> void def to_hash: -> OpenAI::Models::file_object diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index c1163712..79f137bc 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::file_retrieve_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::file_retrieve_params end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 81e5f0bc..3b4f5915 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -70,32 +70,27 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Method ) -> OpenAI::Models::FineTuning::FineTuningJob::Method - def initialize: - ( - id: String, - created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error?, - fine_tuned_model: String?, - finished_at: Integer?, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, - model: String, - organization_id: String, - result_files: ::Array[String], - seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::status, - trained_tokens: Integer?, - training_file: String, - validation_file: String?, - ?estimated_finish: Integer?, - ?integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]?, - ?metadata: OpenAI::Models::metadata?, - ?method_: OpenAI::Models::FineTuning::FineTuningJob::Method, - ?object: :"fine_tuning.job" - ) -> void - | ( - ?OpenAI::Models::FineTuning::fine_tuning_job - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Integer, + error: OpenAI::Models::FineTuning::FineTuningJob::Error?, + fine_tuned_model: String?, + finished_at: Integer?, + hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + model: String, + organization_id: String, + result_files: ::Array[String], + seed: Integer, + status: OpenAI::Models::FineTuning::FineTuningJob::status, + trained_tokens: Integer?, + training_file: String, + validation_file: String?, + ?estimated_finish: Integer?, + ?integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]?, + ?metadata: OpenAI::Models::metadata?, + ?method_: OpenAI::Models::FineTuning::FineTuningJob::Method, + ?object: :"fine_tuning.job" + ) -> void def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job @@ -108,12 +103,11 @@ module OpenAI attr_accessor param: String? - def initialize: - (code: String, message: String, param: String?) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::error - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: String, + message: String, + param: String? + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::error end @@ -144,16 +138,11 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs - def initialize: - ( - ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs - ) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::hyperparameters - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::hyperparameters @@ -221,16 +210,11 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Method::type_ ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::type_ - def initialize: - ( - ?dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, - ?supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - ?type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ - ) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::method_ - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, + ?supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, + ?type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::method_ @@ -246,14 +230,9 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - def initialize: - ( - ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - ) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::Method::dpo - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::dpo @@ -290,17 +269,12 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - def initialize: - ( - ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, - ?beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - ) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::hyperparameters - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, + ?beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::hyperparameters @@ -342,14 +316,9 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - def initialize: - ( - ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - ) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::Method::supervised - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::supervised @@ -379,16 +348,11 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - def initialize: - ( - ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - ) -> void - | ( - ?OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::hyperparameters - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs + ) -> void def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::hyperparameters diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index 950c38f0..239f0331 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -36,20 +36,15 @@ module OpenAI OpenAI::Models::FineTuning::FineTuningJobEvent::type_ ) -> OpenAI::Models::FineTuning::FineTuningJobEvent::type_ - def initialize: - ( - id: String, - created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, - message: String, - ?data: top, - ?type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, - ?object: :"fine_tuning.job.event" - ) -> void - | ( - ?OpenAI::Models::FineTuning::fine_tuning_job_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Integer, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, + message: String, + ?data: top, + ?type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, + ?object: :"fine_tuning.job.event" + ) -> void def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job_event diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index c8d6439d..bc0a81a5 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -23,17 +23,12 @@ module OpenAI def tags=: (::Array[String]) -> ::Array[String] - def initialize: - ( - project: String, - ?entity: String?, - ?name: String?, - ?tags: ::Array[String] - ) -> void - | ( - ?OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration - | OpenAI::BaseModel data - ) -> void + def initialize: ( + project: String, + ?entity: String?, + ?name: String?, + ?tags: ::Array[String] + ) -> void def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index 13b6893a..18b23c37 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -15,15 +15,10 @@ module OpenAI attr_accessor wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration - def initialize: - ( - wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, - ?type: :wandb - ) -> void - | ( - ?OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration_object - | OpenAI::BaseModel data - ) -> void + def initialize: ( + wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, + ?type: :wandb + ) -> void def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration_object end diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index e9218d86..d3a98211 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::FineTuning::job_cancel_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::FineTuning::job_cancel_params end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index dbeaa601..8942f62f 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -45,23 +45,18 @@ module OpenAI attr_accessor validation_file: String? - def initialize: - ( - model: OpenAI::Models::FineTuning::JobCreateParams::model, - training_file: String, - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, - ?metadata: OpenAI::Models::metadata?, - ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, - ?seed: Integer?, - ?suffix: String?, - ?validation_file: String?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::FineTuning::job_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + model: OpenAI::Models::FineTuning::JobCreateParams::model, + training_file: String, + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, + ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + ?metadata: OpenAI::Models::metadata?, + ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + ?seed: Integer?, + ?suffix: String?, + ?validation_file: String?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::FineTuning::job_create_params @@ -110,16 +105,11 @@ module OpenAI OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs - def initialize: - ( - ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::hyperparameters - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::hyperparameters @@ -153,15 +143,10 @@ module OpenAI attr_accessor wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb - def initialize: - ( - wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, - ?type: :wandb - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::integration - | OpenAI::BaseModel data - ) -> void + def initialize: ( + wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, + ?type: :wandb + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::integration @@ -184,17 +169,12 @@ module OpenAI def tags=: (::Array[String]) -> ::Array[String] - def initialize: - ( - project: String, - ?entity: String?, - ?name: String?, - ?tags: ::Array[String] - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::Integration::wandb - | OpenAI::BaseModel data - ) -> void + def initialize: ( + project: String, + ?entity: String?, + ?name: String?, + ?tags: ::Array[String] + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Integration::wandb end @@ -226,16 +206,11 @@ module OpenAI OpenAI::Models::FineTuning::JobCreateParams::Method::type_ ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::type_ - def initialize: - ( - ?dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, - ?supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - ?type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::method_ - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, + ?supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, + ?type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::method_ @@ -251,14 +226,9 @@ module OpenAI OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - def initialize: - ( - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::Method::dpo - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::dpo @@ -295,17 +265,12 @@ module OpenAI OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - def initialize: - ( - ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, - ?beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::hyperparameters - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, + ?beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::hyperparameters @@ -347,14 +312,9 @@ module OpenAI OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - def initialize: - ( - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::Method::supervised - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::supervised @@ -384,16 +344,11 @@ module OpenAI OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - def initialize: - ( - ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - ) -> void - | ( - ?OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::hyperparameters - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs + ) -> void def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::hyperparameters diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index a6d3e8ad..0700828c 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -16,16 +16,11 @@ module OpenAI def limit=: (Integer) -> Integer - def initialize: - ( - ?after: String, - ?limit: Integer, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::FineTuning::job_list_events_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::FineTuning::job_list_events_params end diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index e1202bb6..26f60cf4 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -19,17 +19,12 @@ module OpenAI attr_accessor metadata: ::Hash[Symbol, String]? - def initialize: - ( - ?after: String, - ?limit: Integer, - ?metadata: ::Hash[Symbol, String]?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::FineTuning::job_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?metadata: ::Hash[Symbol, String]?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::FineTuning::job_list_params end diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index 9e78eb66..5a155e22 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::FineTuning::job_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::FineTuning::job_retrieve_params end diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index 3f634afe..23b73542 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -17,16 +17,11 @@ module OpenAI def limit=: (Integer) -> Integer - def initialize: - ( - ?after: String, - ?limit: Integer, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::FineTuning::Jobs::checkpoint_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::FineTuning::Jobs::checkpoint_list_params end diff --git a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs index 6c945032..4f7cf1b7 100644 --- a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs +++ b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs @@ -28,20 +28,15 @@ module OpenAI attr_accessor step_number: Integer - def initialize: - ( - id: String, - created_at: Integer, - fine_tuned_model_checkpoint: String, - fine_tuning_job_id: String, - metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, - step_number: Integer, - ?object: :"fine_tuning.job.checkpoint" - ) -> void - | ( - ?OpenAI::Models::FineTuning::Jobs::fine_tuning_job_checkpoint - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Integer, + fine_tuned_model_checkpoint: String, + fine_tuning_job_id: String, + metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + step_number: Integer, + ?object: :"fine_tuning.job.checkpoint" + ) -> void def to_hash: -> OpenAI::Models::FineTuning::Jobs::fine_tuning_job_checkpoint @@ -85,20 +80,15 @@ module OpenAI def valid_mean_token_accuracy=: (Float) -> Float - def initialize: - ( - ?full_valid_loss: Float, - ?full_valid_mean_token_accuracy: Float, - ?step: Float, - ?train_loss: Float, - ?train_mean_token_accuracy: Float, - ?valid_loss: Float, - ?valid_mean_token_accuracy: Float - ) -> void - | ( - ?OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::metrics - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?full_valid_loss: Float, + ?full_valid_mean_token_accuracy: Float, + ?step: Float, + ?train_loss: Float, + ?train_mean_token_accuracy: Float, + ?valid_loss: Float, + ?valid_mean_token_accuracy: Float + ) -> void def to_hash: -> OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::metrics end diff --git a/sig/openai/models/function_definition.rbs b/sig/openai/models/function_definition.rbs index 4361e6e1..194ec56f 100644 --- a/sig/openai/models/function_definition.rbs +++ b/sig/openai/models/function_definition.rbs @@ -23,16 +23,12 @@ module OpenAI attr_accessor strict: bool? - def initialize: - ( - name: String, - ?description: String, - ?parameters: OpenAI::Models::function_parameters, - ?strict: bool? - ) -> void - | ( - ?OpenAI::Models::function_definition | OpenAI::BaseModel data - ) -> void + def initialize: ( + name: String, + ?description: String, + ?parameters: OpenAI::Models::function_parameters, + ?strict: bool? + ) -> void def to_hash: -> OpenAI::Models::function_definition end diff --git a/sig/openai/models/image.rbs b/sig/openai/models/image.rbs index bd80e80e..ed120c3b 100644 --- a/sig/openai/models/image.rbs +++ b/sig/openai/models/image.rbs @@ -15,9 +15,11 @@ module OpenAI def url=: (String) -> String - def initialize: - (?b64_json: String, ?revised_prompt: String, ?url: String) -> void - | (?OpenAI::Models::image | OpenAI::BaseModel data) -> void + def initialize: ( + ?b64_json: String, + ?revised_prompt: String, + ?url: String + ) -> void def to_hash: -> OpenAI::Models::image end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index fa5b8d08..7770fc34 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -29,20 +29,15 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - image: IO | StringIO, - ?model: OpenAI::Models::ImageCreateVariationParams::model?, - ?n: Integer?, - ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, - ?size: OpenAI::Models::ImageCreateVariationParams::size?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::image_create_variation_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + image: IO | StringIO, + ?model: OpenAI::Models::ImageCreateVariationParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, + ?size: OpenAI::Models::ImageCreateVariationParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::image_create_variation_params diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 4910dbfa..63e0fbf0 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -37,19 +37,17 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - image: IO | StringIO, - prompt: String, - ?mask: IO | StringIO, - ?model: OpenAI::Models::ImageEditParams::model?, - ?n: Integer?, - ?response_format: OpenAI::Models::ImageEditParams::response_format?, - ?size: OpenAI::Models::ImageEditParams::size?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> void - | (?OpenAI::Models::image_edit_params | OpenAI::BaseModel data) -> void + def initialize: ( + image: IO | StringIO, + prompt: String, + ?mask: IO | StringIO, + ?model: OpenAI::Models::ImageEditParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageEditParams::response_format?, + ?size: OpenAI::Models::ImageEditParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::image_edit_params diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index 224faa06..1a3f9162 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -39,21 +39,17 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - prompt: String, - ?model: OpenAI::Models::ImageGenerateParams::model?, - ?n: Integer?, - ?quality: OpenAI::Models::ImageGenerateParams::quality, - ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, - ?size: OpenAI::Models::ImageGenerateParams::size?, - ?style: OpenAI::Models::ImageGenerateParams::style?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::image_generate_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + prompt: String, + ?model: OpenAI::Models::ImageGenerateParams::model?, + ?n: Integer?, + ?quality: OpenAI::Models::ImageGenerateParams::quality, + ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, + ?size: OpenAI::Models::ImageGenerateParams::size?, + ?style: OpenAI::Models::ImageGenerateParams::style?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::image_generate_params diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index 97764d62..cafb98c6 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -8,9 +8,10 @@ module OpenAI attr_accessor data: ::Array[OpenAI::Models::Image] - def initialize: - (created: Integer, data: ::Array[OpenAI::Models::Image]) -> void - | (?OpenAI::Models::images_response | OpenAI::BaseModel data) -> void + def initialize: ( + created: Integer, + data: ::Array[OpenAI::Models::Image] + ) -> void def to_hash: -> OpenAI::Models::images_response end diff --git a/sig/openai/models/model.rbs b/sig/openai/models/model.rbs index dee8fd85..3540d0d1 100644 --- a/sig/openai/models/model.rbs +++ b/sig/openai/models/model.rbs @@ -12,14 +12,12 @@ module OpenAI attr_accessor owned_by: String - def initialize: - ( - id: String, - created: Integer, - owned_by: String, - ?object: :model - ) -> void - | (?OpenAI::Models::model | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + created: Integer, + owned_by: String, + ?object: :model + ) -> void def to_hash: -> OpenAI::Models::model end diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index ca717581..72705871 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::model_delete_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::model_delete_params end diff --git a/sig/openai/models/model_deleted.rbs b/sig/openai/models/model_deleted.rbs index 47e9b6bb..6297c483 100644 --- a/sig/openai/models/model_deleted.rbs +++ b/sig/openai/models/model_deleted.rbs @@ -9,9 +9,7 @@ module OpenAI attr_accessor object: String - def initialize: - (id: String, deleted: bool, object: String) -> void - | (?OpenAI::Models::model_deleted | OpenAI::BaseModel data) -> void + def initialize: (id: String, deleted: bool, object: String) -> void def to_hash: -> OpenAI::Models::model_deleted end diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 7b6caaeb..1c3127a8 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -6,9 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | (?OpenAI::Models::model_list_params | OpenAI::BaseModel data) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::model_list_params end diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index 61008928..7a36fdcb 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::model_retrieve_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::model_retrieve_params end diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index a3717393..d35f2749 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -17,14 +17,12 @@ module OpenAI attr_accessor flagged: bool - def initialize: - ( - categories: OpenAI::Models::Moderation::Categories, - category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes, - category_scores: OpenAI::Models::Moderation::CategoryScores, - flagged: bool - ) -> void - | (?OpenAI::Models::moderation | OpenAI::BaseModel data) -> void + def initialize: ( + categories: OpenAI::Models::Moderation::Categories, + category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes, + category_scores: OpenAI::Models::Moderation::CategoryScores, + flagged: bool + ) -> void def to_hash: -> OpenAI::Models::moderation @@ -72,25 +70,21 @@ module OpenAI attr_accessor violence_graphic: bool - def initialize: - ( - harassment: bool, - harassment_threatening: bool, - hate: bool, - hate_threatening: bool, - illicit: bool?, - illicit_violent: bool?, - self_harm: bool, - self_harm_instructions: bool, - self_harm_intent: bool, - sexual: bool, - sexual_minors: bool, - violence: bool, - violence_graphic: bool - ) -> void - | ( - ?OpenAI::Models::Moderation::categories | OpenAI::BaseModel data - ) -> void + def initialize: ( + harassment: bool, + harassment_threatening: bool, + hate: bool, + hate_threatening: bool, + illicit: bool?, + illicit_violent: bool?, + self_harm: bool, + self_harm_instructions: bool, + self_harm_intent: bool, + sexual: bool, + sexual_minors: bool, + violence: bool, + violence_graphic: bool + ) -> void def to_hash: -> OpenAI::Models::Moderation::categories end @@ -139,26 +133,21 @@ module OpenAI attr_accessor violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] - def initialize: - ( - harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] - ) -> void - | ( - ?OpenAI::Models::Moderation::category_applied_input_types - | OpenAI::BaseModel data - ) -> void + def initialize: ( + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + ) -> void def to_hash: -> OpenAI::Models::Moderation::category_applied_input_types @@ -317,26 +306,21 @@ module OpenAI attr_accessor violence_graphic: Float - def initialize: - ( - harassment: Float, - harassment_threatening: Float, - hate: Float, - hate_threatening: Float, - illicit: Float, - illicit_violent: Float, - self_harm: Float, - self_harm_instructions: Float, - self_harm_intent: Float, - sexual: Float, - sexual_minors: Float, - violence: Float, - violence_graphic: Float - ) -> void - | ( - ?OpenAI::Models::Moderation::category_scores - | OpenAI::BaseModel data - ) -> void + def initialize: ( + harassment: Float, + harassment_threatening: Float, + hate: Float, + hate_threatening: Float, + illicit: Float, + illicit_violent: Float, + self_harm: Float, + self_harm_instructions: Float, + self_harm_intent: Float, + sexual: Float, + sexual_minors: Float, + violence: Float, + violence_graphic: Float + ) -> void def to_hash: -> OpenAI::Models::Moderation::category_scores end diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 5f8b6773..c95e6d3e 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -19,15 +19,11 @@ module OpenAI OpenAI::Models::ModerationCreateParams::model ) -> OpenAI::Models::ModerationCreateParams::model - def initialize: - ( - input: OpenAI::Models::ModerationCreateParams::input, - ?model: OpenAI::Models::ModerationCreateParams::model, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::moderation_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + input: OpenAI::Models::ModerationCreateParams::input, + ?model: OpenAI::Models::ModerationCreateParams::model, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::moderation_create_params diff --git a/sig/openai/models/moderation_create_response.rbs b/sig/openai/models/moderation_create_response.rbs index faa955af..5934337f 100644 --- a/sig/openai/models/moderation_create_response.rbs +++ b/sig/openai/models/moderation_create_response.rbs @@ -14,15 +14,11 @@ module OpenAI attr_accessor results: ::Array[OpenAI::Models::Moderation] - def initialize: - ( - id: String, - model: String, - results: ::Array[OpenAI::Models::Moderation] - ) -> void - | ( - ?OpenAI::Models::moderation_create_response | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + model: String, + results: ::Array[OpenAI::Models::Moderation] + ) -> void def to_hash: -> OpenAI::Models::moderation_create_response end diff --git a/sig/openai/models/moderation_image_url_input.rbs b/sig/openai/models/moderation_image_url_input.rbs index ac58a3d7..e23f649d 100644 --- a/sig/openai/models/moderation_image_url_input.rbs +++ b/sig/openai/models/moderation_image_url_input.rbs @@ -11,14 +11,10 @@ module OpenAI attr_accessor type: :image_url - def initialize: - ( - image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, - ?type: :image_url - ) -> void - | ( - ?OpenAI::Models::moderation_image_url_input | OpenAI::BaseModel data - ) -> void + def initialize: ( + image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, + ?type: :image_url + ) -> void def to_hash: -> OpenAI::Models::moderation_image_url_input @@ -27,12 +23,7 @@ module OpenAI class ImageURL < OpenAI::BaseModel attr_accessor url: String - def initialize: - (url: String) -> void - | ( - ?OpenAI::Models::ModerationImageURLInput::image_url - | OpenAI::BaseModel data - ) -> void + def initialize: (url: String) -> void def to_hash: -> OpenAI::Models::ModerationImageURLInput::image_url end diff --git a/sig/openai/models/moderation_text_input.rbs b/sig/openai/models/moderation_text_input.rbs index 3a5a4a09..45143b65 100644 --- a/sig/openai/models/moderation_text_input.rbs +++ b/sig/openai/models/moderation_text_input.rbs @@ -7,11 +7,7 @@ module OpenAI attr_accessor type: :text - def initialize: - (text: String, ?type: :text) -> void - | ( - ?OpenAI::Models::moderation_text_input | OpenAI::BaseModel data - ) -> void + def initialize: (text: String, ?type: :text) -> void def to_hash: -> OpenAI::Models::moderation_text_input end diff --git a/sig/openai/models/other_file_chunking_strategy_object.rbs b/sig/openai/models/other_file_chunking_strategy_object.rbs index 0d5fa3b5..60c24b65 100644 --- a/sig/openai/models/other_file_chunking_strategy_object.rbs +++ b/sig/openai/models/other_file_chunking_strategy_object.rbs @@ -5,12 +5,7 @@ module OpenAI class OtherFileChunkingStrategyObject < OpenAI::BaseModel attr_accessor type: :other - def initialize: - (?type: :other) -> void - | ( - ?OpenAI::Models::other_file_chunking_strategy_object - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :other) -> void def to_hash: -> OpenAI::Models::other_file_chunking_strategy_object end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index a7604c1f..a4184fb6 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -11,12 +11,10 @@ module OpenAI attr_accessor generate_summary: OpenAI::Models::Reasoning::generate_summary? - def initialize: - ( - ?effort: OpenAI::Models::reasoning_effort?, - ?generate_summary: OpenAI::Models::Reasoning::generate_summary? - ) -> void - | (?OpenAI::Models::reasoning | OpenAI::BaseModel data) -> void + def initialize: ( + ?effort: OpenAI::Models::reasoning_effort?, + ?generate_summary: OpenAI::Models::Reasoning::generate_summary? + ) -> void def to_hash: -> OpenAI::Models::reasoning diff --git a/sig/openai/models/response_format_json_object.rbs b/sig/openai/models/response_format_json_object.rbs index f0021d58..f228d234 100644 --- a/sig/openai/models/response_format_json_object.rbs +++ b/sig/openai/models/response_format_json_object.rbs @@ -5,11 +5,7 @@ module OpenAI class ResponseFormatJSONObject < OpenAI::BaseModel attr_accessor type: :json_object - def initialize: - (?type: :json_object) -> void - | ( - ?OpenAI::Models::response_format_json_object | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :json_object) -> void def to_hash: -> OpenAI::Models::response_format_json_object end diff --git a/sig/openai/models/response_format_json_schema.rbs b/sig/openai/models/response_format_json_schema.rbs index eb9e4e25..05e4a025 100644 --- a/sig/openai/models/response_format_json_schema.rbs +++ b/sig/openai/models/response_format_json_schema.rbs @@ -11,14 +11,10 @@ module OpenAI attr_accessor type: :json_schema - def initialize: - ( - json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, - ?type: :json_schema - ) -> void - | ( - ?OpenAI::Models::response_format_json_schema | OpenAI::BaseModel data - ) -> void + def initialize: ( + json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, + ?type: :json_schema + ) -> void def to_hash: -> OpenAI::Models::response_format_json_schema @@ -43,17 +39,12 @@ module OpenAI attr_accessor strict: bool? - def initialize: - ( - name: String, - ?description: String, - ?schema: ::Hash[Symbol, top], - ?strict: bool? - ) -> void - | ( - ?OpenAI::Models::ResponseFormatJSONSchema::json_schema - | OpenAI::BaseModel data - ) -> void + def initialize: ( + name: String, + ?description: String, + ?schema: ::Hash[Symbol, top], + ?strict: bool? + ) -> void def to_hash: -> OpenAI::Models::ResponseFormatJSONSchema::json_schema end diff --git a/sig/openai/models/response_format_text.rbs b/sig/openai/models/response_format_text.rbs index d8c203e5..691bace2 100644 --- a/sig/openai/models/response_format_text.rbs +++ b/sig/openai/models/response_format_text.rbs @@ -5,11 +5,7 @@ module OpenAI class ResponseFormatText < OpenAI::BaseModel attr_accessor type: :text - def initialize: - (?type: :text) -> void - | ( - ?OpenAI::Models::response_format_text | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :text) -> void def to_hash: -> OpenAI::Models::response_format_text end diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 0b7870e8..dbfd8278 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -18,16 +18,12 @@ module OpenAI attr_accessor type: :computer_use_preview - def initialize: - ( - display_height: Float, - display_width: Float, - environment: OpenAI::Models::Responses::ComputerTool::environment, - ?type: :computer_use_preview - ) -> void - | ( - ?OpenAI::Models::Responses::computer_tool | OpenAI::BaseModel data - ) -> void + def initialize: ( + display_height: Float, + display_width: Float, + environment: OpenAI::Models::Responses::ComputerTool::environment, + ?type: :computer_use_preview + ) -> void def to_hash: -> OpenAI::Models::Responses::computer_tool diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 7638ced1..8e5bc808 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -19,16 +19,11 @@ module OpenAI OpenAI::Models::Responses::EasyInputMessage::type_ ) -> OpenAI::Models::Responses::EasyInputMessage::type_ - def initialize: - ( - content: OpenAI::Models::Responses::EasyInputMessage::content, - role: OpenAI::Models::Responses::EasyInputMessage::role, - ?type: OpenAI::Models::Responses::EasyInputMessage::type_ - ) -> void - | ( - ?OpenAI::Models::Responses::easy_input_message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + ?type: OpenAI::Models::Responses::EasyInputMessage::type_ + ) -> void def to_hash: -> OpenAI::Models::Responses::easy_input_message diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 34894b2e..8c714006 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -31,18 +31,13 @@ module OpenAI OpenAI::Models::Responses::FileSearchTool::RankingOptions ) -> OpenAI::Models::Responses::FileSearchTool::RankingOptions - def initialize: - ( - vector_store_ids: ::Array[String], - ?filters: OpenAI::Models::Responses::FileSearchTool::filters, - ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, - ?type: :file_search - ) -> void - | ( - ?OpenAI::Models::Responses::file_search_tool - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_ids: ::Array[String], + ?filters: OpenAI::Models::Responses::FileSearchTool::filters, + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, + ?type: :file_search + ) -> void def to_hash: -> OpenAI::Models::Responses::file_search_tool @@ -70,15 +65,10 @@ module OpenAI def score_threshold=: (Float) -> Float - def initialize: - ( - ?ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, - ?score_threshold: Float - ) -> void - | ( - ?OpenAI::Models::Responses::FileSearchTool::ranking_options - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, + ?score_threshold: Float + ) -> void def to_hash: -> OpenAI::Models::Responses::FileSearchTool::ranking_options diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index 78d3312b..b0e5aa03 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -21,17 +21,13 @@ module OpenAI attr_accessor description: String? - def initialize: - ( - name: String, - parameters: ::Hash[Symbol, top], - strict: bool, - ?description: String?, - ?type: :function - ) -> void - | ( - ?OpenAI::Models::Responses::function_tool | OpenAI::BaseModel data - ) -> void + def initialize: ( + name: String, + parameters: ::Hash[Symbol, top], + strict: bool, + ?description: String?, + ?type: :function + ) -> void def to_hash: -> OpenAI::Models::Responses::function_tool end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 29314e57..2971462e 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -32,18 +32,13 @@ module OpenAI OpenAI::Models::Responses::InputItemListParams::order ) -> OpenAI::Models::Responses::InputItemListParams::order - def initialize: - ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Responses::InputItemListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Responses::input_item_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Responses::InputItemListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Responses::input_item_list_params diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 42ac11e7..f8f5c22b 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -86,34 +86,30 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - id: String, - created_at: Float, - error: OpenAI::Models::Responses::ResponseError?, - incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails?, - instructions: String?, - metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Responses::Response::model, - output: ::Array[OpenAI::Models::Responses::response_output_item], - parallel_tool_calls: bool, - temperature: Float?, - tool_choice: OpenAI::Models::Responses::Response::tool_choice, - tools: ::Array[OpenAI::Models::Responses::tool], - top_p: Float?, - ?max_output_tokens: Integer?, - ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, - ?status: OpenAI::Models::Responses::response_status, - ?text: OpenAI::Models::Responses::ResponseTextConfig, - ?truncation: OpenAI::Models::Responses::Response::truncation?, - ?usage: OpenAI::Models::Responses::ResponseUsage, - ?user: String, - ?object: :response - ) -> void - | ( - ?OpenAI::Models::Responses::response | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Float, + error: OpenAI::Models::Responses::ResponseError?, + incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Responses::Response::model, + output: ::Array[OpenAI::Models::Responses::response_output_item], + parallel_tool_calls: bool, + temperature: Float?, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, + tools: ::Array[OpenAI::Models::Responses::tool], + top_p: Float?, + ?max_output_tokens: Integer?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?status: OpenAI::Models::Responses::response_status, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?truncation: OpenAI::Models::Responses::Response::truncation?, + ?usage: OpenAI::Models::Responses::ResponseUsage, + ?user: String, + ?object: :response + ) -> void def to_hash: -> OpenAI::Models::Responses::response @@ -129,14 +125,9 @@ module OpenAI OpenAI::Models::Responses::Response::IncompleteDetails::reason ) -> OpenAI::Models::Responses::Response::IncompleteDetails::reason - def initialize: - ( - ?reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason - ) -> void - | ( - ?OpenAI::Models::Responses::Response::incomplete_details - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + ) -> void def to_hash: -> OpenAI::Models::Responses::Response::incomplete_details diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index 72465557..f6a9c6fd 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -9,12 +9,7 @@ module OpenAI attr_accessor type: :"response.audio.delta" - def initialize: - (delta: String, ?type: :"response.audio.delta") -> void - | ( - ?OpenAI::Models::Responses::response_audio_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: (delta: String, ?type: :"response.audio.delta") -> void def to_hash: -> OpenAI::Models::Responses::response_audio_delta_event end diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index 4617b59f..9449f329 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -6,12 +6,7 @@ module OpenAI class ResponseAudioDoneEvent < OpenAI::BaseModel attr_accessor type: :"response.audio.done" - def initialize: - (?type: :"response.audio.done") -> void - | ( - ?OpenAI::Models::Responses::response_audio_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :"response.audio.done") -> void def to_hash: -> OpenAI::Models::Responses::response_audio_done_event end diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index c6efbd43..b1e7a534 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -9,12 +9,10 @@ module OpenAI attr_accessor type: :"response.audio.transcript.delta" - def initialize: - (delta: String, ?type: :"response.audio.transcript.delta") -> void - | ( - ?OpenAI::Models::Responses::response_audio_transcript_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + delta: String, + ?type: :"response.audio.transcript.delta" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_audio_transcript_delta_event end diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index 9446ffeb..ca5e5241 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -7,12 +7,7 @@ module OpenAI class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel attr_accessor type: :"response.audio.transcript.done" - def initialize: - (?type: :"response.audio.transcript.done") -> void - | ( - ?OpenAI::Models::Responses::response_audio_transcript_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :"response.audio.transcript.done") -> void def to_hash: -> OpenAI::Models::Responses::response_audio_transcript_done_event end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index a1822231..a330e86c 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.code.delta" - def initialize: - ( - delta: String, - output_index: Integer, - ?type: :"response.code_interpreter_call.code.delta" - ) -> void - | ( - ?OpenAI::Models::Responses::response_code_interpreter_call_code_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + delta: String, + output_index: Integer, + ?type: :"response.code_interpreter_call.code.delta" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_code_delta_event end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index d735f06b..45e0a1fd 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.code.done" - def initialize: - ( - code: String, - output_index: Integer, - ?type: :"response.code_interpreter_call.code.done" - ) -> void - | ( - ?OpenAI::Models::Responses::response_code_interpreter_call_code_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: String, + output_index: Integer, + ?type: :"response.code_interpreter_call.code.done" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_code_done_event end diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index feccd45a..46aae17c 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.completed" - def initialize: - ( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, - output_index: Integer, - ?type: :"response.code_interpreter_call.completed" - ) -> void - | ( - ?OpenAI::Models::Responses::response_code_interpreter_call_completed_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + ?type: :"response.code_interpreter_call.completed" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_completed_event end diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index e46fc46f..a5384a8c 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.in_progress" - def initialize: - ( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, - output_index: Integer, - ?type: :"response.code_interpreter_call.in_progress" - ) -> void - | ( - ?OpenAI::Models::Responses::response_code_interpreter_call_in_progress_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + ?type: :"response.code_interpreter_call.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_in_progress_event end diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index 66622c55..3fcff8bf 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.interpreting" - def initialize: - ( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, - output_index: Integer, - ?type: :"response.code_interpreter_call.interpreting" - ) -> void - | ( - ?OpenAI::Models::Responses::response_code_interpreter_call_interpreting_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + ?type: :"response.code_interpreter_call.interpreting" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_interpreting_event end diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 8dbeca6d..1201cdfe 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :code_interpreter_call - def initialize: - ( - id: String, - code: String, - results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, - ?type: :code_interpreter_call - ) -> void - | ( - ?OpenAI::Models::Responses::response_code_interpreter_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + code: String, + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + ?type: :code_interpreter_call + ) -> void def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_tool_call @@ -48,12 +43,7 @@ module OpenAI attr_accessor type: :logs - def initialize: - (logs: String, ?type: :logs) -> void - | ( - ?OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::logs - | OpenAI::BaseModel data - ) -> void + def initialize: (logs: String, ?type: :logs) -> void def to_hash: -> OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::logs end @@ -69,15 +59,10 @@ module OpenAI attr_accessor type: :files - def initialize: - ( - files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], - ?type: :files - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::files - | OpenAI::BaseModel data - ) -> void + def initialize: ( + files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + ?type: :files + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::files @@ -88,12 +73,7 @@ module OpenAI attr_accessor mime_type: String - def initialize: - (file_id: String, mime_type: String) -> void - | ( - ?OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::file - | OpenAI::BaseModel data - ) -> void + def initialize: (file_id: String, mime_type: String) -> void def to_hash: -> OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::file end diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 15c402f6..372c8eb9 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -12,15 +12,10 @@ module OpenAI attr_accessor type: :"response.completed" - def initialize: - ( - response: OpenAI::Models::Responses::Response, - ?type: :"response.completed" - ) -> void - | ( - ?OpenAI::Models::Responses::response_completed_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + response: OpenAI::Models::Responses::Response, + ?type: :"response.completed" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_completed_event end diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 6a5b9b8a..69f3d3e4 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -24,19 +24,14 @@ module OpenAI attr_accessor type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ - def initialize: - ( - id: String, - action: OpenAI::Models::Responses::ResponseComputerToolCall::action, - call_id: String, - pending_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCall::status, - type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ - ) -> void - | ( - ?OpenAI::Models::Responses::response_computer_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, + call_id: String, + pending_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ + ) -> void def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call @@ -69,17 +64,12 @@ module OpenAI attr_accessor y_: Integer - def initialize: - ( - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, - x: Integer, - y_: Integer, - ?type: :click - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::click - | OpenAI::BaseModel data - ) -> void + def initialize: ( + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, + x: Integer, + y_: Integer, + ?type: :click + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::click @@ -105,12 +95,11 @@ module OpenAI attr_accessor y_: Integer - def initialize: - (x: Integer, y_: Integer, ?type: :double_click) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::double_click - | OpenAI::BaseModel data - ) -> void + def initialize: ( + x: Integer, + y_: Integer, + ?type: :double_click + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::double_click end @@ -126,15 +115,10 @@ module OpenAI attr_accessor type: :drag - def initialize: - ( - path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], - ?type: :drag - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::drag - | OpenAI::BaseModel data - ) -> void + def initialize: ( + path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], + ?type: :drag + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::drag @@ -145,12 +129,7 @@ module OpenAI attr_accessor y_: Integer - def initialize: - (x: Integer, y_: Integer) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::path - | OpenAI::BaseModel data - ) -> void + def initialize: (x: Integer, y_: Integer) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::path end @@ -163,12 +142,7 @@ module OpenAI attr_accessor type: :keypress - def initialize: - (keys: ::Array[String], ?type: :keypress) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::keypress - | OpenAI::BaseModel data - ) -> void + def initialize: (keys: ::Array[String], ?type: :keypress) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::keypress end @@ -182,12 +156,7 @@ module OpenAI attr_accessor y_: Integer - def initialize: - (x: Integer, y_: Integer, ?type: :move) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::move - | OpenAI::BaseModel data - ) -> void + def initialize: (x: Integer, y_: Integer, ?type: :move) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::move end @@ -197,12 +166,7 @@ module OpenAI class Screenshot < OpenAI::BaseModel attr_accessor type: :screenshot - def initialize: - (?type: :screenshot) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::screenshot - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :screenshot) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::screenshot end @@ -227,18 +191,13 @@ module OpenAI attr_accessor y_: Integer - def initialize: - ( - scroll_x: Integer, - scroll_y: Integer, - x: Integer, - y_: Integer, - ?type: :scroll - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::scroll - | OpenAI::BaseModel data - ) -> void + def initialize: ( + scroll_x: Integer, + scroll_y: Integer, + x: Integer, + y_: Integer, + ?type: :scroll + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::scroll end @@ -250,12 +209,7 @@ module OpenAI attr_accessor type: :type - def initialize: - (text: String, ?type: :type) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::type_ - | OpenAI::BaseModel data - ) -> void + def initialize: (text: String, ?type: :type) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::type_ end @@ -265,12 +219,7 @@ module OpenAI class Wait < OpenAI::BaseModel attr_accessor type: :wait - def initialize: - (?type: :wait) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::Action::wait - | OpenAI::BaseModel data - ) -> void + def initialize: (?type: :wait) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::wait end @@ -288,12 +237,7 @@ module OpenAI attr_accessor message: String - def initialize: - (id: String, code: String, message: String) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCall::pending_safety_check - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String, code: String, message: String) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::pending_safety_check end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 5fbdb5f4..0e737186 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -32,19 +32,14 @@ module OpenAI OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status ) -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status - def initialize: - ( - id: String, - call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - ?status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, - ?type: :computer_call_output - ) -> void - | ( - ?OpenAI::Models::Responses::response_computer_tool_call_output_item - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + call_id: String, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + ?status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, + ?type: :computer_call_output + ) -> void def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call_output_item @@ -58,12 +53,7 @@ module OpenAI attr_accessor message: String - def initialize: - (id: String, code: String, message: String) -> void - | ( - ?OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::acknowledged_safety_check - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String, code: String, message: String) -> void def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::acknowledged_safety_check end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs index f71fb9a1..3d8cc77b 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -15,16 +15,11 @@ module OpenAI def image_url=: (String) -> String - def initialize: - ( - ?file_id: String, - ?image_url: String, - ?type: :computer_screenshot - ) -> void - | ( - ?OpenAI::Models::Responses::response_computer_tool_call_output_screenshot - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_id: String, + ?image_url: String, + ?type: :computer_screenshot + ) -> void def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call_output_screenshot end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 00707adc..b221cdc0 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :"response.content_part.added" - def initialize: - ( - content_index: Integer, - item_id: String, - output_index: Integer, - part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, - ?type: :"response.content_part.added" - ) -> void - | ( - ?OpenAI::Models::Responses::response_content_part_added_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content_index: Integer, + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + ?type: :"response.content_part.added" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_content_part_added_event diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 06c3b822..f7c71025 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :"response.content_part.done" - def initialize: - ( - content_index: Integer, - item_id: String, - output_index: Integer, - part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, - ?type: :"response.content_part.done" - ) -> void - | ( - ?OpenAI::Models::Responses::response_content_part_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content_index: Integer, + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + ?type: :"response.content_part.done" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_content_part_done_event diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index f69f74bc..2f4ca294 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -75,31 +75,26 @@ module OpenAI def user=: (String) -> String - def initialize: - ( - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, - ?include: ::Array[OpenAI::Models::Responses::response_includable]?, - ?instructions: String?, - ?max_output_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?parallel_tool_calls: bool?, - ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, - ?store: bool?, - ?temperature: Float?, - ?text: OpenAI::Models::Responses::ResponseTextConfig, - ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, - ?tools: ::Array[OpenAI::Models::Responses::tool], - ?top_p: Float?, - ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Responses::response_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + input: OpenAI::Models::Responses::ResponseCreateParams::input, + model: OpenAI::Models::Responses::ResponseCreateParams::model, + ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?instructions: String?, + ?max_output_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?parallel_tool_calls: bool?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?store: bool?, + ?temperature: Float?, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_p: Float?, + ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Responses::response_create_params diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index 386c4aa8..bd5a386a 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -12,15 +12,10 @@ module OpenAI attr_accessor type: :"response.created" - def initialize: - ( - response: OpenAI::Models::Responses::Response, - ?type: :"response.created" - ) -> void - | ( - ?OpenAI::Models::Responses::response_created_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + response: OpenAI::Models::Responses::Response, + ?type: :"response.created" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_created_event end diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index 4458ecb9..269f94f6 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -7,12 +7,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Responses::response_delete_params - | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::Responses::response_delete_params end diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index 0adca85b..5d705a42 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -12,14 +12,10 @@ module OpenAI attr_accessor message: String - def initialize: - ( - code: OpenAI::Models::Responses::ResponseError::code, - message: String - ) -> void - | ( - ?OpenAI::Models::Responses::response_error | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: OpenAI::Models::Responses::ResponseError::code, + message: String + ) -> void def to_hash: -> OpenAI::Models::Responses::response_error diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index 8f3fcbdf..2c037b8a 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -13,17 +13,12 @@ module OpenAI attr_accessor type: :error - def initialize: - ( - code: String?, - message: String, - param: String?, - ?type: :error - ) -> void - | ( - ?OpenAI::Models::Responses::response_error_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: String?, + message: String, + param: String?, + ?type: :error + ) -> void def to_hash: -> OpenAI::Models::Responses::response_error_event end diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index a00dfcef..5747ae10 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -12,15 +12,10 @@ module OpenAI attr_accessor type: :"response.failed" - def initialize: - ( - response: OpenAI::Models::Responses::Response, - ?type: :"response.failed" - ) -> void - | ( - ?OpenAI::Models::Responses::response_failed_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + response: OpenAI::Models::Responses::Response, + ?type: :"response.failed" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_failed_event end diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index 876bfbc4..069463fe 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.file_search_call.completed" - def initialize: - ( - item_id: String, - output_index: Integer, - ?type: :"response.file_search_call.completed" - ) -> void - | ( - ?OpenAI::Models::Responses::response_file_search_call_completed_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.file_search_call.completed" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_file_search_call_completed_event end diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index 389c41de..e8163f14 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.file_search_call.in_progress" - def initialize: - ( - item_id: String, - output_index: Integer, - ?type: :"response.file_search_call.in_progress" - ) -> void - | ( - ?OpenAI::Models::Responses::response_file_search_call_in_progress_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.file_search_call.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_file_search_call_in_progress_event end diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index 37da4da7..1745ca78 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.file_search_call.searching" - def initialize: - ( - item_id: String, - output_index: Integer, - ?type: :"response.file_search_call.searching" - ) -> void - | ( - ?OpenAI::Models::Responses::response_file_search_call_searching_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.file_search_call.searching" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_file_search_call_searching_event end diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 689645fc..8bcaf1dc 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]? - def initialize: - ( - id: String, - queries: ::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, - ?results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]?, - ?type: :file_search_call - ) -> void - | ( - ?OpenAI::Models::Responses::response_file_search_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + queries: ::Array[String], + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, + ?results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]?, + ?type: :file_search_call + ) -> void def to_hash: -> OpenAI::Models::Responses::response_file_search_tool_call @@ -77,18 +72,13 @@ module OpenAI def text=: (String) -> String - def initialize: - ( - ?attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, - ?file_id: String, - ?filename: String, - ?score: Float, - ?text: String - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseFileSearchToolCall::result - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, + ?file_id: String, + ?filename: String, + ?score: Float, + ?text: String + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseFileSearchToolCall::result diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index 9f0fd321..a4435499 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -25,18 +25,13 @@ module OpenAI attr_accessor strict: bool? - def initialize: - ( - schema: ::Hash[Symbol, top], - ?description: String, - ?name: String, - ?strict: bool?, - ?type: :json_schema - ) -> void - | ( - ?OpenAI::Models::Responses::response_format_text_json_schema_config - | OpenAI::BaseModel data - ) -> void + def initialize: ( + schema: ::Hash[Symbol, top], + ?description: String, + ?name: String, + ?strict: bool?, + ?type: :json_schema + ) -> void def to_hash: -> OpenAI::Models::Responses::response_format_text_json_schema_config end diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index 3b98be65..afcf88ef 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -18,17 +18,12 @@ module OpenAI attr_accessor type: :"response.function_call_arguments.delta" - def initialize: - ( - delta: String, - item_id: String, - output_index: Integer, - ?type: :"response.function_call_arguments.delta" - ) -> void - | ( - ?OpenAI::Models::Responses::response_function_call_arguments_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + delta: String, + item_id: String, + output_index: Integer, + ?type: :"response.function_call_arguments.delta" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_function_call_arguments_delta_event end diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index 89a5d2ad..5a0dc613 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -18,17 +18,12 @@ module OpenAI attr_accessor type: :"response.function_call_arguments.done" - def initialize: - ( - arguments: String, - item_id: String, - output_index: Integer, - ?type: :"response.function_call_arguments.done" - ) -> void - | ( - ?OpenAI::Models::Responses::response_function_call_arguments_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + arguments: String, + item_id: String, + output_index: Integer, + ?type: :"response.function_call_arguments.done" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_function_call_arguments_done_event end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index 1a7b694b..acd89dee 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -30,19 +30,14 @@ module OpenAI OpenAI::Models::Responses::ResponseFunctionToolCall::status ) -> OpenAI::Models::Responses::ResponseFunctionToolCall::status - def initialize: - ( - arguments: String, - call_id: String, - name: String, - ?id: String, - ?status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, - ?type: :function_call - ) -> void - | ( - ?OpenAI::Models::Responses::response_function_tool_call - | OpenAI::BaseModel data - ) -> void + def initialize: ( + arguments: String, + call_id: String, + name: String, + ?id: String, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, + ?type: :function_call + ) -> void def to_hash: -> OpenAI::Models::Responses::response_function_tool_call diff --git a/sig/openai/models/responses/response_function_tool_call_item.rbs b/sig/openai/models/responses/response_function_tool_call_item.rbs index fcb5863d..86727e9b 100644 --- a/sig/openai/models/responses/response_function_tool_call_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_item.rbs @@ -8,12 +8,7 @@ module OpenAI def id=: (String _) -> String - def initialize: - (id: String) -> void - | ( - ?OpenAI::Models::Responses::response_function_tool_call_item - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String) -> void def to_hash: -> OpenAI::Models::Responses::response_function_tool_call_item end diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 481354aa..3265f83f 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -25,18 +25,13 @@ module OpenAI OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status ) -> OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status - def initialize: - ( - id: String, - call_id: String, - output: String, - ?status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, - ?type: :function_call_output - ) -> void - | ( - ?OpenAI::Models::Responses::response_function_tool_call_output_item - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + call_id: String, + output: String, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, + ?type: :function_call_output + ) -> void def to_hash: -> OpenAI::Models::Responses::response_function_tool_call_output_item diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 0333dad4..7da9b40d 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :web_search_call - def initialize: - ( - id: String, - status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, - ?type: :web_search_call - ) -> void - | ( - ?OpenAI::Models::Responses::response_function_web_search - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, + ?type: :web_search_call + ) -> void def to_hash: -> OpenAI::Models::Responses::response_function_web_search diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index 7b76bb87..35845e80 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -12,15 +12,10 @@ module OpenAI attr_accessor type: :"response.in_progress" - def initialize: - ( - response: OpenAI::Models::Responses::Response, - ?type: :"response.in_progress" - ) -> void - | ( - ?OpenAI::Models::Responses::response_in_progress_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + response: OpenAI::Models::Responses::Response, + ?type: :"response.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_in_progress_event end diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index b68b4f2d..24d172fe 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -12,15 +12,10 @@ module OpenAI attr_accessor type: :"response.incomplete" - def initialize: - ( - response: OpenAI::Models::Responses::Response, - ?type: :"response.incomplete" - ) -> void - | ( - ?OpenAI::Models::Responses::response_incomplete_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + response: OpenAI::Models::Responses::Response, + ?type: :"response.incomplete" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_incomplete_event end diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index 79ba418e..b41c744b 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :input_audio - def initialize: - ( - data: String, - format_: OpenAI::Models::Responses::ResponseInputAudio::format_, - ?type: :input_audio - ) -> void - | ( - ?OpenAI::Models::Responses::response_input_audio - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: String, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, + ?type: :input_audio + ) -> void def to_hash: -> OpenAI::Models::Responses::response_input_audio diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index 4b09c314..4c336cd6 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -24,17 +24,12 @@ module OpenAI def filename=: (String) -> String - def initialize: - ( - ?file_data: String, - ?file_id: String, - ?filename: String, - ?type: :input_file - ) -> void - | ( - ?OpenAI::Models::Responses::response_input_file - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?file_data: String, + ?file_id: String, + ?filename: String, + ?type: :input_file + ) -> void def to_hash: -> OpenAI::Models::Responses::response_input_file end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index bd044576..24abed14 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -18,17 +18,12 @@ module OpenAI attr_accessor image_url: String? - def initialize: - ( - detail: OpenAI::Models::Responses::ResponseInputImage::detail, - ?file_id: String?, - ?image_url: String?, - ?type: :input_image - ) -> void - | ( - ?OpenAI::Models::Responses::response_input_image - | OpenAI::BaseModel data - ) -> void + def initialize: ( + detail: OpenAI::Models::Responses::ResponseInputImage::detail, + ?file_id: String?, + ?image_url: String?, + ?type: :input_image + ) -> void def to_hash: -> OpenAI::Models::Responses::response_input_image diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 646f1565..869b143f 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -40,17 +40,12 @@ module OpenAI OpenAI::Models::Responses::ResponseInputItem::Message::type_ ) -> OpenAI::Models::Responses::ResponseInputItem::Message::type_ - def initialize: - ( - content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseInputItem::Message::role, - ?status: OpenAI::Models::Responses::ResponseInputItem::Message::status, - ?type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseInputItem::message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + ?status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + ?type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::message @@ -116,19 +111,14 @@ module OpenAI OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status ) -> OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status - def initialize: - ( - call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - ?id: String, - ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status, - ?type: :computer_call_output - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseInputItem::computer_call_output - | OpenAI::BaseModel data - ) -> void + def initialize: ( + call_id: String, + output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + ?id: String, + ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], + ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status, + ?type: :computer_call_output + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::computer_call_output @@ -142,12 +132,7 @@ module OpenAI attr_accessor message: String - def initialize: - (id: String, code: String, message: String) -> void - | ( - ?OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::acknowledged_safety_check - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String, code: String, message: String) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::acknowledged_safety_check end @@ -189,18 +174,13 @@ module OpenAI OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status ) -> OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status - def initialize: - ( - call_id: String, - output: String, - ?id: String, - ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status, - ?type: :function_call_output - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseInputItem::function_call_output - | OpenAI::BaseModel data - ) -> void + def initialize: ( + call_id: String, + output: String, + ?id: String, + ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status, + ?type: :function_call_output + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::function_call_output @@ -222,12 +202,7 @@ module OpenAI attr_accessor type: :item_reference - def initialize: - (id: String, ?type: :item_reference) -> void - | ( - ?OpenAI::Models::Responses::ResponseInputItem::item_reference - | OpenAI::BaseModel data - ) -> void + def initialize: (id: String, ?type: :item_reference) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::item_reference end diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index d0c4f6ac..45e1f023 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -29,18 +29,13 @@ module OpenAI OpenAI::Models::Responses::ResponseInputMessageItem::type_ ) -> OpenAI::Models::Responses::ResponseInputMessageItem::type_ - def initialize: - ( - id: String, - content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseInputMessageItem::role, - ?status: OpenAI::Models::Responses::ResponseInputMessageItem::status, - ?type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ - ) -> void - | ( - ?OpenAI::Models::Responses::response_input_message_item - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + ?status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + ?type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + ) -> void def to_hash: -> OpenAI::Models::Responses::response_input_message_item diff --git a/sig/openai/models/responses/response_input_text.rbs b/sig/openai/models/responses/response_input_text.rbs index 8c1de672..c41c7537 100644 --- a/sig/openai/models/responses/response_input_text.rbs +++ b/sig/openai/models/responses/response_input_text.rbs @@ -8,12 +8,7 @@ module OpenAI attr_accessor type: :input_text - def initialize: - (text: String, ?type: :input_text) -> void - | ( - ?OpenAI::Models::Responses::response_input_text - | OpenAI::BaseModel data - ) -> void + def initialize: (text: String, ?type: :input_text) -> void def to_hash: -> OpenAI::Models::Responses::response_input_text end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index 3ce80286..37f78a23 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -24,18 +24,13 @@ module OpenAI attr_accessor object: :list - def initialize: - ( - data: ::Array[OpenAI::Models::Responses::response_item], - first_id: String, - has_more: bool, - last_id: String, - ?object: :list - ) -> void - | ( - ?OpenAI::Models::Responses::response_item_list - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: ::Array[OpenAI::Models::Responses::response_item], + first_id: String, + has_more: bool, + last_id: String, + ?object: :list + ) -> void def to_hash: -> OpenAI::Models::Responses::response_item_list end diff --git a/sig/openai/models/responses/response_output_audio.rbs b/sig/openai/models/responses/response_output_audio.rbs index 7c36f267..1f90e75d 100644 --- a/sig/openai/models/responses/response_output_audio.rbs +++ b/sig/openai/models/responses/response_output_audio.rbs @@ -11,12 +11,11 @@ module OpenAI attr_accessor type: :output_audio - def initialize: - (data: String, transcript: String, ?type: :output_audio) -> void - | ( - ?OpenAI::Models::Responses::response_output_audio - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: String, + transcript: String, + ?type: :output_audio + ) -> void def to_hash: -> OpenAI::Models::Responses::response_output_audio end diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index 2e3f2b52..e59d12f3 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.output_item.added" - def initialize: - ( - item: OpenAI::Models::Responses::response_output_item, - output_index: Integer, - ?type: :"response.output_item.added" - ) -> void - | ( - ?OpenAI::Models::Responses::response_output_item_added_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item: OpenAI::Models::Responses::response_output_item, + output_index: Integer, + ?type: :"response.output_item.added" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_output_item_added_event end diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index fbe7e12a..c4df8604 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.output_item.done" - def initialize: - ( - item: OpenAI::Models::Responses::response_output_item, - output_index: Integer, - ?type: :"response.output_item.done" - ) -> void - | ( - ?OpenAI::Models::Responses::response_output_item_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item: OpenAI::Models::Responses::response_output_item, + output_index: Integer, + ?type: :"response.output_item.done" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_output_item_done_event end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index 429aac88..b003219c 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :message - def initialize: - ( - id: String, - content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], - status: OpenAI::Models::Responses::ResponseOutputMessage::status, - ?role: :assistant, - ?type: :message - ) -> void - | ( - ?OpenAI::Models::Responses::response_output_message - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], + status: OpenAI::Models::Responses::ResponseOutputMessage::status, + ?role: :assistant, + ?type: :message + ) -> void def to_hash: -> OpenAI::Models::Responses::response_output_message diff --git a/sig/openai/models/responses/response_output_refusal.rbs b/sig/openai/models/responses/response_output_refusal.rbs index 8ea6676a..f1743e42 100644 --- a/sig/openai/models/responses/response_output_refusal.rbs +++ b/sig/openai/models/responses/response_output_refusal.rbs @@ -8,12 +8,7 @@ module OpenAI attr_accessor type: :refusal - def initialize: - (refusal: String, ?type: :refusal) -> void - | ( - ?OpenAI::Models::Responses::response_output_refusal - | OpenAI::BaseModel data - ) -> void + def initialize: (refusal: String, ?type: :refusal) -> void def to_hash: -> OpenAI::Models::Responses::response_output_refusal end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index c900d1f0..a40aee39 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :output_text - def initialize: - ( - annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], - text: String, - ?type: :output_text - ) -> void - | ( - ?OpenAI::Models::Responses::response_output_text - | OpenAI::BaseModel data - ) -> void + def initialize: ( + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], + text: String, + ?type: :output_text + ) -> void def to_hash: -> OpenAI::Models::Responses::response_output_text @@ -44,12 +39,11 @@ module OpenAI attr_accessor type: :file_citation - def initialize: - (file_id: String, index: Integer, ?type: :file_citation) -> void - | ( - ?OpenAI::Models::Responses::ResponseOutputText::Annotation::file_citation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + index: Integer, + ?type: :file_citation + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::file_citation end @@ -74,18 +68,13 @@ module OpenAI attr_accessor url: String - def initialize: - ( - end_index: Integer, - start_index: Integer, - title: String, - url: String, - ?type: :url_citation - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseOutputText::Annotation::url_citation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + end_index: Integer, + start_index: Integer, + title: String, + url: String, + ?type: :url_citation + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::url_citation end @@ -99,12 +88,11 @@ module OpenAI attr_accessor type: :file_path - def initialize: - (file_id: String, index: Integer, ?type: :file_path) -> void - | ( - ?OpenAI::Models::Responses::ResponseOutputText::Annotation::file_path - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + index: Integer, + ?type: :file_path + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::file_path end diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 6e9cb7a8..23d2dd02 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -22,17 +22,12 @@ module OpenAI OpenAI::Models::Responses::ResponseReasoningItem::status ) -> OpenAI::Models::Responses::ResponseReasoningItem::status - def initialize: - ( - id: String, - summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], - ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, - ?type: :reasoning - ) -> void - | ( - ?OpenAI::Models::Responses::response_reasoning_item - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], + ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, + ?type: :reasoning + ) -> void def to_hash: -> OpenAI::Models::Responses::response_reasoning_item @@ -43,12 +38,7 @@ module OpenAI attr_accessor type: :summary_text - def initialize: - (text: String, ?type: :summary_text) -> void - | ( - ?OpenAI::Models::Responses::ResponseReasoningItem::summary - | OpenAI::BaseModel data - ) -> void + def initialize: (text: String, ?type: :summary_text) -> void def to_hash: -> OpenAI::Models::Responses::ResponseReasoningItem::summary end diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index 53a1cf99..6d26e2d3 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :"response.refusal.delta" - def initialize: - ( - content_index: Integer, - delta: String, - item_id: String, - output_index: Integer, - ?type: :"response.refusal.delta" - ) -> void - | ( - ?OpenAI::Models::Responses::response_refusal_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + ?type: :"response.refusal.delta" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_refusal_delta_event end diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 2b52e381..693e0216 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :"response.refusal.done" - def initialize: - ( - content_index: Integer, - item_id: String, - output_index: Integer, - refusal: String, - ?type: :"response.refusal.done" - ) -> void - | ( - ?OpenAI::Models::Responses::response_refusal_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content_index: Integer, + item_id: String, + output_index: Integer, + refusal: String, + ?type: :"response.refusal.done" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_refusal_done_event end diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 846f8753..2b845fc5 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -15,15 +15,10 @@ module OpenAI ::Array[OpenAI::Models::Responses::response_includable] ) -> ::Array[OpenAI::Models::Responses::response_includable] - def initialize: - ( - ?include: ::Array[OpenAI::Models::Responses::response_includable], - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::Responses::response_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Responses::response_retrieve_params end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 71d0cbd2..d01375b4 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -24,19 +24,14 @@ module OpenAI attr_accessor type: :"response.output_text.annotation.added" - def initialize: - ( - annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - ?type: :"response.output_text.annotation.added" - ) -> void - | ( - ?OpenAI::Models::Responses::response_text_annotation_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + ?type: :"response.output_text.annotation.added" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_text_annotation_delta_event @@ -56,12 +51,11 @@ module OpenAI attr_accessor type: :file_citation - def initialize: - (file_id: String, index: Integer, ?type: :file_citation) -> void - | ( - ?OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_citation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + index: Integer, + ?type: :file_citation + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_citation end @@ -86,18 +80,13 @@ module OpenAI attr_accessor url: String - def initialize: - ( - end_index: Integer, - start_index: Integer, - title: String, - url: String, - ?type: :url_citation - ) -> void - | ( - ?OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::url_citation - | OpenAI::BaseModel data - ) -> void + def initialize: ( + end_index: Integer, + start_index: Integer, + title: String, + url: String, + ?type: :url_citation + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::url_citation end @@ -111,12 +100,11 @@ module OpenAI attr_accessor type: :file_path - def initialize: - (file_id: String, index: Integer, ?type: :file_path) -> void - | ( - ?OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_path - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + index: Integer, + ?type: :file_path + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_path end diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index 84d7da60..2429e169 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -11,14 +11,9 @@ module OpenAI OpenAI::Models::Responses::response_format_text_config ) -> OpenAI::Models::Responses::response_format_text_config - def initialize: - ( - ?format_: OpenAI::Models::Responses::response_format_text_config - ) -> void - | ( - ?OpenAI::Models::Responses::response_text_config - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config + ) -> void def to_hash: -> OpenAI::Models::Responses::response_text_config end diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index 89c2f2fa..45cfac57 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :"response.output_text.delta" - def initialize: - ( - content_index: Integer, - delta: String, - item_id: String, - output_index: Integer, - ?type: :"response.output_text.delta" - ) -> void - | ( - ?OpenAI::Models::Responses::response_text_delta_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + ?type: :"response.output_text.delta" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_text_delta_event end diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 16f089dd..bc4c94d4 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -21,18 +21,13 @@ module OpenAI attr_accessor type: :"response.output_text.done" - def initialize: - ( - content_index: Integer, - item_id: String, - output_index: Integer, - text: String, - ?type: :"response.output_text.done" - ) -> void - | ( - ?OpenAI::Models::Responses::response_text_done_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + ?type: :"response.output_text.done" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_text_done_event end diff --git a/sig/openai/models/responses/response_usage.rbs b/sig/openai/models/responses/response_usage.rbs index c00715ed..8f4d0dac 100644 --- a/sig/openai/models/responses/response_usage.rbs +++ b/sig/openai/models/responses/response_usage.rbs @@ -21,17 +21,13 @@ module OpenAI attr_accessor total_tokens: Integer - def initialize: - ( - input_tokens: Integer, - input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, - output_tokens: Integer, - output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, - total_tokens: Integer - ) -> void - | ( - ?OpenAI::Models::Responses::response_usage | OpenAI::BaseModel data - ) -> void + def initialize: ( + input_tokens: Integer, + input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, + output_tokens: Integer, + output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, + total_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::Responses::response_usage @@ -40,12 +36,7 @@ module OpenAI class InputTokensDetails < OpenAI::BaseModel attr_accessor cached_tokens: Integer - def initialize: - (cached_tokens: Integer) -> void - | ( - ?OpenAI::Models::Responses::ResponseUsage::input_tokens_details - | OpenAI::BaseModel data - ) -> void + def initialize: (cached_tokens: Integer) -> void def to_hash: -> OpenAI::Models::Responses::ResponseUsage::input_tokens_details end @@ -55,12 +46,7 @@ module OpenAI class OutputTokensDetails < OpenAI::BaseModel attr_accessor reasoning_tokens: Integer - def initialize: - (reasoning_tokens: Integer) -> void - | ( - ?OpenAI::Models::Responses::ResponseUsage::output_tokens_details - | OpenAI::BaseModel data - ) -> void + def initialize: (reasoning_tokens: Integer) -> void def to_hash: -> OpenAI::Models::Responses::ResponseUsage::output_tokens_details end diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index 0b7a9a7d..bc8d0e77 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.web_search_call.completed" - def initialize: - ( - item_id: String, - output_index: Integer, - ?type: :"response.web_search_call.completed" - ) -> void - | ( - ?OpenAI::Models::Responses::response_web_search_call_completed_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.web_search_call.completed" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_web_search_call_completed_event end diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index 236857f8..34cdc0c4 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.web_search_call.in_progress" - def initialize: - ( - item_id: String, - output_index: Integer, - ?type: :"response.web_search_call.in_progress" - ) -> void - | ( - ?OpenAI::Models::Responses::response_web_search_call_in_progress_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.web_search_call.in_progress" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_web_search_call_in_progress_event end diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index 26145d41..f03cf1a9 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -15,16 +15,11 @@ module OpenAI attr_accessor type: :"response.web_search_call.searching" - def initialize: - ( - item_id: String, - output_index: Integer, - ?type: :"response.web_search_call.searching" - ) -> void - | ( - ?OpenAI::Models::Responses::response_web_search_call_searching_event - | OpenAI::BaseModel data - ) -> void + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.web_search_call.searching" + ) -> void def to_hash: -> OpenAI::Models::Responses::response_web_search_call_searching_event end diff --git a/sig/openai/models/responses/tool_choice_function.rbs b/sig/openai/models/responses/tool_choice_function.rbs index 6f91e879..bf83f1a7 100644 --- a/sig/openai/models/responses/tool_choice_function.rbs +++ b/sig/openai/models/responses/tool_choice_function.rbs @@ -8,12 +8,7 @@ module OpenAI attr_accessor type: :function - def initialize: - (name: String, ?type: :function) -> void - | ( - ?OpenAI::Models::Responses::tool_choice_function - | OpenAI::BaseModel data - ) -> void + def initialize: (name: String, ?type: :function) -> void def to_hash: -> OpenAI::Models::Responses::tool_choice_function end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 66e763a9..ea1e958d 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -7,12 +7,9 @@ module OpenAI class ToolChoiceTypes < OpenAI::BaseModel attr_accessor type: OpenAI::Models::Responses::ToolChoiceTypes::type_ - def initialize: - (type: OpenAI::Models::Responses::ToolChoiceTypes::type_) -> void - | ( - ?OpenAI::Models::Responses::tool_choice_types - | OpenAI::BaseModel data - ) -> void + def initialize: ( + type: OpenAI::Models::Responses::ToolChoiceTypes::type_ + ) -> void def to_hash: -> OpenAI::Models::Responses::tool_choice_types diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index 31394f16..abb3f9a1 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -19,15 +19,11 @@ module OpenAI attr_accessor user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? - def initialize: - ( - type: OpenAI::Models::Responses::WebSearchTool::type_, - ?search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, - ?user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? - ) -> void - | ( - ?OpenAI::Models::Responses::web_search_tool | OpenAI::BaseModel data - ) -> void + def initialize: ( + type: OpenAI::Models::Responses::WebSearchTool::type_, + ?search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, + ?user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? + ) -> void def to_hash: -> OpenAI::Models::Responses::web_search_tool @@ -78,18 +74,13 @@ module OpenAI def timezone=: (String) -> String - def initialize: - ( - ?city: String, - ?country: String, - ?region: String, - ?timezone: String, - ?type: :approximate - ) -> void - | ( - ?OpenAI::Models::Responses::WebSearchTool::user_location - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?city: String, + ?country: String, + ?region: String, + ?timezone: String, + ?type: :approximate + ) -> void def to_hash: -> OpenAI::Models::Responses::WebSearchTool::user_location end diff --git a/sig/openai/models/static_file_chunking_strategy.rbs b/sig/openai/models/static_file_chunking_strategy.rbs index a10dab32..e77bd4ac 100644 --- a/sig/openai/models/static_file_chunking_strategy.rbs +++ b/sig/openai/models/static_file_chunking_strategy.rbs @@ -8,12 +8,10 @@ module OpenAI attr_accessor max_chunk_size_tokens: Integer - def initialize: - (chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer) -> void - | ( - ?OpenAI::Models::static_file_chunking_strategy - | OpenAI::BaseModel data - ) -> void + def initialize: ( + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + ) -> void def to_hash: -> OpenAI::Models::static_file_chunking_strategy end diff --git a/sig/openai/models/static_file_chunking_strategy_object.rbs b/sig/openai/models/static_file_chunking_strategy_object.rbs index 8cf1c351..62e506a3 100644 --- a/sig/openai/models/static_file_chunking_strategy_object.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object.rbs @@ -8,15 +8,10 @@ module OpenAI attr_accessor type: :static - def initialize: - ( - static: OpenAI::Models::StaticFileChunkingStrategy, - ?type: :static - ) -> void - | ( - ?OpenAI::Models::static_file_chunking_strategy_object - | OpenAI::BaseModel data - ) -> void + def initialize: ( + static: OpenAI::Models::StaticFileChunkingStrategy, + ?type: :static + ) -> void def to_hash: -> OpenAI::Models::static_file_chunking_strategy_object end diff --git a/sig/openai/models/static_file_chunking_strategy_object_param.rbs b/sig/openai/models/static_file_chunking_strategy_object_param.rbs index 48965341..a86f96ab 100644 --- a/sig/openai/models/static_file_chunking_strategy_object_param.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object_param.rbs @@ -8,15 +8,10 @@ module OpenAI attr_accessor type: :static - def initialize: - ( - static: OpenAI::Models::StaticFileChunkingStrategy, - ?type: :static - ) -> void - | ( - ?OpenAI::Models::static_file_chunking_strategy_object_param - | OpenAI::BaseModel data - ) -> void + def initialize: ( + static: OpenAI::Models::StaticFileChunkingStrategy, + ?type: :static + ) -> void def to_hash: -> OpenAI::Models::static_file_chunking_strategy_object_param end diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index c4c5f528..eead4b34 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -32,19 +32,17 @@ module OpenAI attr_accessor file: OpenAI::Models::FileObject? - def initialize: - ( - id: String, - bytes: Integer, - created_at: Integer, - expires_at: Integer, - filename: String, - purpose: String, - status: OpenAI::Models::Upload::status, - ?file: OpenAI::Models::FileObject?, - ?object: :upload - ) -> void - | (?OpenAI::Models::upload | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + bytes: Integer, + created_at: Integer, + expires_at: Integer, + filename: String, + purpose: String, + status: OpenAI::Models::Upload::status, + ?file: OpenAI::Models::FileObject?, + ?object: :upload + ) -> void def to_hash: -> OpenAI::Models::upload diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index de559455..67e76b0d 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::upload_cancel_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::upload_cancel_params end diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index fdea0062..c7df9dc0 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -13,15 +13,11 @@ module OpenAI def md5=: (String) -> String - def initialize: - ( - part_ids: ::Array[String], - ?md5: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::upload_complete_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + part_ids: ::Array[String], + ?md5: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::upload_complete_params end diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index 20230ef3..0ea5b497 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -21,17 +21,13 @@ module OpenAI attr_accessor purpose: OpenAI::Models::file_purpose - def initialize: - ( - bytes: Integer, - filename: String, - mime_type: String, - purpose: OpenAI::Models::file_purpose, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::upload_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + bytes: Integer, + filename: String, + mime_type: String, + purpose: OpenAI::Models::file_purpose, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::upload_create_params end diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index b01dc193..dfd0732f 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -10,12 +10,10 @@ module OpenAI attr_accessor data: IO | StringIO - def initialize: - (data: IO | StringIO, ?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::Uploads::part_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + data: IO | StringIO, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::Uploads::part_create_params end diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 6953994b..94f8bb7b 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -21,16 +21,12 @@ module OpenAI attr_accessor upload_id: String - def initialize: - ( - id: String, - created_at: Integer, - upload_id: String, - ?object: :"upload.part" - ) -> void - | ( - ?OpenAI::Models::Uploads::upload_part | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Integer, + upload_id: String, + ?object: :"upload.part" + ) -> void def to_hash: -> OpenAI::Models::Uploads::upload_part end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index dbd3c546..2f4e439b 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -42,21 +42,19 @@ module OpenAI attr_accessor expires_at: Integer? - def initialize: - ( - id: String, - created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, - last_active_at: Integer?, - metadata: OpenAI::Models::metadata?, - name: String, - status: OpenAI::Models::VectorStore::status, - usage_bytes: Integer, - ?expires_after: OpenAI::Models::VectorStore::ExpiresAfter, - ?expires_at: Integer?, - ?object: :vector_store - ) -> void - | (?OpenAI::Models::vector_store | OpenAI::BaseModel data) -> void + def initialize: ( + id: String, + created_at: Integer, + file_counts: OpenAI::Models::VectorStore::FileCounts, + last_active_at: Integer?, + metadata: OpenAI::Models::metadata?, + name: String, + status: OpenAI::Models::VectorStore::status, + usage_bytes: Integer, + ?expires_after: OpenAI::Models::VectorStore::ExpiresAfter, + ?expires_at: Integer?, + ?object: :vector_store + ) -> void def to_hash: -> OpenAI::Models::vector_store @@ -80,17 +78,13 @@ module OpenAI attr_accessor total: Integer - def initialize: - ( - cancelled: Integer, - completed: Integer, - failed: Integer, - in_progress: Integer, - total: Integer - ) -> void - | ( - ?OpenAI::Models::VectorStore::file_counts | OpenAI::BaseModel data - ) -> void + def initialize: ( + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + ) -> void def to_hash: -> OpenAI::Models::VectorStore::file_counts end @@ -112,11 +106,7 @@ module OpenAI attr_accessor days: Integer - def initialize: - (days: Integer, ?anchor: :last_active_at) -> void - | ( - ?OpenAI::Models::VectorStore::expires_after | OpenAI::BaseModel data - ) -> void + def initialize: (days: Integer, ?anchor: :last_active_at) -> void def to_hash: -> OpenAI::Models::VectorStore::expires_after end diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 8e565574..2072c316 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -36,18 +36,14 @@ module OpenAI def name=: (String) -> String - def initialize: - ( - ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, - ?file_ids: ::Array[String], - ?metadata: OpenAI::Models::metadata?, - ?name: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::vector_store_create_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::vector_store_create_params @@ -58,12 +54,7 @@ module OpenAI attr_accessor days: Integer - def initialize: - (days: Integer, ?anchor: :last_active_at) -> void - | ( - ?OpenAI::Models::VectorStoreCreateParams::expires_after - | OpenAI::BaseModel data - ) -> void + def initialize: (days: Integer, ?anchor: :last_active_at) -> void def to_hash: -> OpenAI::Models::VectorStoreCreateParams::expires_after end diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index a53f5cac..a65d42cf 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::vector_store_delete_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::vector_store_delete_params end diff --git a/sig/openai/models/vector_store_deleted.rbs b/sig/openai/models/vector_store_deleted.rbs index 761c47e9..3403e058 100644 --- a/sig/openai/models/vector_store_deleted.rbs +++ b/sig/openai/models/vector_store_deleted.rbs @@ -10,11 +10,11 @@ module OpenAI attr_accessor object: :"vector_store.deleted" - def initialize: - (id: String, deleted: bool, ?object: :"vector_store.deleted") -> void - | ( - ?OpenAI::Models::vector_store_deleted | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + deleted: bool, + ?object: :"vector_store.deleted" + ) -> void def to_hash: -> OpenAI::Models::vector_store_deleted end diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index aefc7b2b..b2ea6f76 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -31,17 +31,13 @@ module OpenAI OpenAI::Models::VectorStoreListParams::order ) -> OpenAI::Models::VectorStoreListParams::order - def initialize: - ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::VectorStoreListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::vector_store_list_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::VectorStoreListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::vector_store_list_params diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 2aac5641..072120dd 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -6,11 +6,7 @@ module OpenAI extend OpenAI::RequestParameters::Converter include OpenAI::RequestParameters - def initialize: - (?request_options: OpenAI::request_opts) -> void - | ( - ?OpenAI::Models::vector_store_retrieve_params | OpenAI::BaseModel data - ) -> void + def initialize: (?request_options: OpenAI::request_opts) -> void def to_hash: -> OpenAI::Models::vector_store_retrieve_params end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index e1467da5..0918e4b6 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -36,18 +36,14 @@ module OpenAI def rewrite_query=: (bool) -> bool - def initialize: - ( - query: OpenAI::Models::VectorStoreSearchParams::query, - ?filters: OpenAI::Models::VectorStoreSearchParams::filters, - ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, - ?rewrite_query: bool, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::vector_store_search_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + query: OpenAI::Models::VectorStoreSearchParams::query, + ?filters: OpenAI::Models::VectorStoreSearchParams::filters, + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ?rewrite_query: bool, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::vector_store_search_params @@ -85,15 +81,10 @@ module OpenAI def score_threshold=: (Float) -> Float - def initialize: - ( - ?ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, - ?score_threshold: Float - ) -> void - | ( - ?OpenAI::Models::VectorStoreSearchParams::ranking_options - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, + ?score_threshold: Float + ) -> void def to_hash: -> OpenAI::Models::VectorStoreSearchParams::ranking_options diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index 9207fde5..ae59192d 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -20,17 +20,13 @@ module OpenAI attr_accessor score: Float - def initialize: - ( - attributes: ::Hash[Symbol, OpenAI::Models::VectorStoreSearchResponse::attribute]?, - content: ::Array[OpenAI::Models::VectorStoreSearchResponse::Content], - file_id: String, - filename: String, - score: Float - ) -> void - | ( - ?OpenAI::Models::vector_store_search_response | OpenAI::BaseModel data - ) -> void + def initialize: ( + attributes: ::Hash[Symbol, OpenAI::Models::VectorStoreSearchResponse::attribute]?, + content: ::Array[OpenAI::Models::VectorStoreSearchResponse::Content], + file_id: String, + filename: String, + score: Float + ) -> void def to_hash: -> OpenAI::Models::vector_store_search_response @@ -51,15 +47,10 @@ module OpenAI attr_accessor type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ - def initialize: - ( - text: String, - type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ - ) -> void - | ( - ?OpenAI::Models::VectorStoreSearchResponse::content - | OpenAI::BaseModel data - ) -> void + def initialize: ( + text: String, + type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ + ) -> void def to_hash: -> OpenAI::Models::VectorStoreSearchResponse::content diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index 440f7453..f0b56795 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -18,16 +18,12 @@ module OpenAI attr_accessor name: String? - def initialize: - ( - ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, - ?metadata: OpenAI::Models::metadata?, - ?name: String?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::vector_store_update_params | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::vector_store_update_params @@ -38,12 +34,7 @@ module OpenAI attr_accessor days: Integer - def initialize: - (days: Integer, ?anchor: :last_active_at) -> void - | ( - ?OpenAI::Models::VectorStoreUpdateParams::expires_after - | OpenAI::BaseModel data - ) -> void + def initialize: (days: Integer, ?anchor: :last_active_at) -> void def to_hash: -> OpenAI::Models::VectorStoreUpdateParams::expires_after end diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index 5d2e3b32..b83566f3 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -10,15 +10,10 @@ module OpenAI attr_accessor vector_store_id: String - def initialize: - ( - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_batch_cancel_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_batch_cancel_params end diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 80bc34fc..80029298 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -23,17 +23,12 @@ module OpenAI OpenAI::Models::file_chunking_strategy_param ) -> OpenAI::Models::file_chunking_strategy_param - def initialize: - ( - file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, - ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_batch_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_ids: ::Array[String], + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_batch_create_params diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index e542038d..e642c6f9 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -42,20 +42,15 @@ module OpenAI OpenAI::Models::VectorStores::FileBatchListFilesParams::order ) -> OpenAI::Models::VectorStores::FileBatchListFilesParams::order - def initialize: - ( - vector_store_id: String, - ?after: String, - ?before: String, - ?filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, - ?limit: Integer, - ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_batch_list_files_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_batch_list_files_params diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index f10bdb48..ac22b295 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -10,15 +10,10 @@ module OpenAI attr_accessor vector_store_id: String - def initialize: - ( - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_batch_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_batch_retrieve_params end diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index 58b90952..95606785 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -10,15 +10,10 @@ module OpenAI attr_accessor vector_store_id: String - def initialize: - ( - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_content_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_content_params end diff --git a/sig/openai/models/vector_stores/file_content_response.rbs b/sig/openai/models/vector_stores/file_content_response.rbs index 6d24a610..11f49799 100644 --- a/sig/openai/models/vector_stores/file_content_response.rbs +++ b/sig/openai/models/vector_stores/file_content_response.rbs @@ -12,12 +12,7 @@ module OpenAI def type=: (String) -> String - def initialize: - (?text: String, ?type: String) -> void - | ( - ?OpenAI::Models::VectorStores::file_content_response - | OpenAI::BaseModel data - ) -> void + def initialize: (?text: String, ?type: String) -> void def to_hash: -> OpenAI::Models::VectorStores::file_content_response end diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 70f7ed4b..ac3e0e2c 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -23,17 +23,12 @@ module OpenAI OpenAI::Models::file_chunking_strategy_param ) -> OpenAI::Models::file_chunking_strategy_param - def initialize: - ( - file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, - ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_create_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + file_id: String, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_create_params diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index 14edfd60..634054fa 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -10,15 +10,10 @@ module OpenAI attr_accessor vector_store_id: String - def initialize: - ( - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_delete_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_delete_params end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index d5d920b8..0cc01f91 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -39,19 +39,14 @@ module OpenAI OpenAI::Models::VectorStores::FileListParams::order ) -> OpenAI::Models::VectorStores::FileListParams::order - def initialize: - ( - ?after: String, - ?before: String, - ?filter: OpenAI::Models::VectorStores::FileListParams::filter, - ?limit: Integer, - ?order: OpenAI::Models::VectorStores::FileListParams::order, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_list_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileListParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileListParams::order, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_list_params diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index 09096f81..0e6f1b85 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -10,15 +10,10 @@ module OpenAI attr_accessor vector_store_id: String - def initialize: - ( - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_retrieve_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_retrieve_params end diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 3574bfa7..6b2b7029 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -16,16 +16,11 @@ module OpenAI attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? - def initialize: - ( - vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, - ?request_options: OpenAI::request_opts - ) -> void - | ( - ?OpenAI::Models::VectorStores::file_update_params - | OpenAI::BaseModel data - ) -> void + def initialize: ( + vector_store_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::VectorStores::file_update_params diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index b857316f..79940901 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -40,22 +40,17 @@ module OpenAI OpenAI::Models::file_chunking_strategy ) -> OpenAI::Models::file_chunking_strategy - def initialize: - ( - id: String, - created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError?, - status: OpenAI::Models::VectorStores::VectorStoreFile::status, - usage_bytes: Integer, - vector_store_id: String, - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, - ?chunking_strategy: OpenAI::Models::file_chunking_strategy, - ?object: :"vector_store.file" - ) -> void - | ( - ?OpenAI::Models::VectorStores::vector_store_file - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Integer, + last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError?, + status: OpenAI::Models::VectorStores::VectorStoreFile::status, + usage_bytes: Integer, + vector_store_id: String, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy, + ?object: :"vector_store.file" + ) -> void def to_hash: -> OpenAI::Models::VectorStores::vector_store_file @@ -70,15 +65,10 @@ module OpenAI attr_accessor message: String - def initialize: - ( - code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, - message: String - ) -> void - | ( - ?OpenAI::Models::VectorStores::VectorStoreFile::last_error - | OpenAI::BaseModel data - ) -> void + def initialize: ( + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, + message: String + ) -> void def to_hash: -> OpenAI::Models::VectorStores::VectorStoreFile::last_error diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index 848fdedc..497f5dc3 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -27,19 +27,14 @@ module OpenAI attr_accessor vector_store_id: String - def initialize: - ( - id: String, - created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, - vector_store_id: String, - ?object: :"vector_store.files_batch" - ) -> void - | ( - ?OpenAI::Models::VectorStores::vector_store_file_batch - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + created_at: Integer, + file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, + vector_store_id: String, + ?object: :"vector_store.files_batch" + ) -> void def to_hash: -> OpenAI::Models::VectorStores::vector_store_file_batch @@ -63,18 +58,13 @@ module OpenAI attr_accessor total: Integer - def initialize: - ( - cancelled: Integer, - completed: Integer, - failed: Integer, - in_progress: Integer, - total: Integer - ) -> void - | ( - ?OpenAI::Models::VectorStores::VectorStoreFileBatch::file_counts - | OpenAI::BaseModel data - ) -> void + def initialize: ( + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + ) -> void def to_hash: -> OpenAI::Models::VectorStores::VectorStoreFileBatch::file_counts end diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index a1ccb5ea..dde63be2 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -14,16 +14,11 @@ module OpenAI attr_accessor object: :"vector_store.file.deleted" - def initialize: - ( - id: String, - deleted: bool, - ?object: :"vector_store.file.deleted" - ) -> void - | ( - ?OpenAI::Models::VectorStores::vector_store_file_deleted - | OpenAI::BaseModel data - ) -> void + def initialize: ( + id: String, + deleted: bool, + ?object: :"vector_store.file.deleted" + ) -> void def to_hash: -> OpenAI::Models::VectorStores::vector_store_file_deleted end diff --git a/sig/openai/resources/audio/speech.rbs b/sig/openai/resources/audio/speech.rbs index fc30ff40..65002d04 100644 --- a/sig/openai/resources/audio/speech.rbs +++ b/sig/openai/resources/audio/speech.rbs @@ -2,19 +2,14 @@ module OpenAI module Resources class Audio class Speech - def create: - ( - OpenAI::Models::Audio::SpeechCreateParams - | ::Hash[Symbol, top] params - ) -> top - | ( - input: String, - model: OpenAI::Models::Audio::SpeechCreateParams::model, - voice: OpenAI::Models::Audio::SpeechCreateParams::voice, - ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, - ?speed: Float, - ?request_options: OpenAI::request_opts - ) -> top + def create: ( + input: String, + model: OpenAI::Models::Audio::SpeechCreateParams::model, + voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, + ?speed: Float, + ?request_options: OpenAI::request_opts + ) -> top def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 67d0eb49..9ee728bb 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -2,21 +2,16 @@ module OpenAI module Resources class Audio class Transcriptions - def create: - ( - OpenAI::Models::Audio::TranscriptionCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Audio::transcription_create_response - | ( - file: IO | StringIO, - model: OpenAI::Models::Audio::TranscriptionCreateParams::model, - ?language: String, - ?prompt: String, - ?response_format: OpenAI::Models::audio_response_format, - ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Audio::transcription_create_response + def create: ( + file: IO | StringIO, + model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?language: String, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Audio::transcription_create_response def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index 7683d6e2..f4c61adf 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -2,19 +2,14 @@ module OpenAI module Resources class Audio class Translations - def create: - ( - OpenAI::Models::Audio::TranslationCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Audio::translation_create_response - | ( - file: IO | StringIO, - model: OpenAI::Models::Audio::TranslationCreateParams::model, - ?prompt: String, - ?response_format: OpenAI::Models::audio_response_format, - ?temperature: Float, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Audio::translation_create_response + def create: ( + file: IO | StringIO, + model: OpenAI::Models::Audio::TranslationCreateParams::model, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Audio::translation_create_response def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/batches.rbs b/sig/openai/resources/batches.rbs index 506824d7..37dbb707 100644 --- a/sig/openai/resources/batches.rbs +++ b/sig/openai/resources/batches.rbs @@ -1,47 +1,29 @@ module OpenAI module Resources class Batches - def create: - ( - OpenAI::Models::BatchCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Batch - | ( - completion_window: OpenAI::Models::BatchCreateParams::completion_window, - endpoint: OpenAI::Models::BatchCreateParams::endpoint, - input_file_id: String, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Batch + def create: ( + completion_window: OpenAI::Models::BatchCreateParams::completion_window, + endpoint: OpenAI::Models::BatchCreateParams::endpoint, + input_file_id: String, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Batch - def retrieve: - ( - String batch_id, - ?OpenAI::Models::BatchRetrieveParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Batch - | ( - String batch_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Batch + def retrieve: ( + String batch_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Batch - def list: - ( - ?OpenAI::Models::BatchListParams | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Batch] - | ( - ?after: String, - ?limit: Integer, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Batch] + def list: ( + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Batch] - def cancel: - ( - String batch_id, - ?OpenAI::Models::BatchCancelParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Batch - | ( - String batch_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Batch + def cancel: ( + String batch_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Batch def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/assistants.rbs b/sig/openai/resources/beta/assistants.rbs index 508c1f80..84ea0af9 100644 --- a/sig/openai/resources/beta/assistants.rbs +++ b/sig/openai/resources/beta/assistants.rbs @@ -2,82 +2,54 @@ module OpenAI module Resources class Beta class Assistants - def create: - ( - OpenAI::Models::Beta::AssistantCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Assistant - | ( - model: OpenAI::Models::Beta::AssistantCreateParams::model, - ?description: String?, - ?instructions: String?, - ?metadata: OpenAI::Models::metadata?, - ?name: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], - ?top_p: Float?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Assistant + def create: ( + model: OpenAI::Models::Beta::AssistantCreateParams::model, + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Assistant - def retrieve: - ( - String assistant_id, - ?OpenAI::Models::Beta::AssistantRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Assistant - | ( - String assistant_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Assistant + def retrieve: ( + String assistant_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Assistant - def update: - ( - String assistant_id, - ?OpenAI::Models::Beta::AssistantUpdateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Assistant - | ( - String assistant_id, - ?description: String?, - ?instructions: String?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::AssistantUpdateParams::model, - ?name: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], - ?top_p: Float?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Assistant + def update: ( + String assistant_id, + ?description: String?, + ?instructions: String?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::AssistantUpdateParams::model, + ?name: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], + ?top_p: Float?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Assistant - def list: - ( - ?OpenAI::Models::Beta::AssistantListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Assistant] - | ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Beta::AssistantListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Assistant] + def list: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::AssistantListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Assistant] - def delete: - ( - String assistant_id, - ?OpenAI::Models::Beta::AssistantDeleteParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::AssistantDeleted - | ( - String assistant_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::AssistantDeleted + def delete: ( + String assistant_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::AssistantDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 3c50dbb1..10e58839 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -6,100 +6,67 @@ module OpenAI attr_reader messages: OpenAI::Resources::Beta::Threads::Messages - def create: - ( - ?OpenAI::Models::Beta::ThreadCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Thread - | ( - ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Thread + def create: ( + ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Thread - def retrieve: - ( - String thread_id, - ?OpenAI::Models::Beta::ThreadRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Thread - | ( - String thread_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Thread + def retrieve: ( + String thread_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Thread - def update: - ( - String thread_id, - ?OpenAI::Models::Beta::ThreadUpdateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Thread - | ( - String thread_id, - ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Thread + def update: ( + String thread_id, + ?metadata: OpenAI::Models::metadata?, + ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Thread - def delete: - ( - String thread_id, - ?OpenAI::Models::Beta::ThreadDeleteParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::ThreadDeleted - | ( - String thread_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::ThreadDeleted + def delete: ( + String thread_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::ThreadDeleted - def create_and_run: - ( - OpenAI::Models::Beta::ThreadCreateAndRunParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Run - | ( - assistant_id: String, - ?instructions: String?, - ?max_completion_tokens: Integer?, - ?max_prompt_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, - ?parallel_tool_calls: bool, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, - ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + def create_and_run: ( + assistant_id: String, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + ?parallel_tool_calls: bool, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Run - def create_and_run_streaming: - ( - OpenAI::Models::Beta::ThreadCreateAndRunParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] - | ( - assistant_id: String, - ?instructions: String?, - ?max_completion_tokens: Integer?, - ?max_prompt_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, - ?parallel_tool_calls: bool, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, - ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] + def create_and_run_streaming: ( + assistant_id: String, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + ?parallel_tool_calls: bool, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads/messages.rbs b/sig/openai/resources/beta/threads/messages.rbs index ca8ccb42..f5421f58 100644 --- a/sig/openai/resources/beta/threads/messages.rbs +++ b/sig/openai/resources/beta/threads/messages.rbs @@ -3,73 +3,43 @@ module OpenAI class Beta class Threads class Messages - def create: - ( - String thread_id, - OpenAI::Models::Beta::Threads::MessageCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Message - | ( - String thread_id, - content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, - role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Message + def create: ( + String thread_id, + content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, + ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Message - def retrieve: - ( - String message_id, - OpenAI::Models::Beta::Threads::MessageRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Message - | ( - String message_id, - thread_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Message + def retrieve: ( + String message_id, + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Message - def update: - ( - String message_id, - OpenAI::Models::Beta::Threads::MessageUpdateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Message - | ( - String message_id, - thread_id: String, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Message + def update: ( + String message_id, + thread_id: String, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Message - def list: - ( - String thread_id, - ?OpenAI::Models::Beta::Threads::MessageListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message] - | ( - String thread_id, - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, - ?run_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message] + def list: ( + String thread_id, + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, + ?run_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message] - def delete: - ( - String message_id, - OpenAI::Models::Beta::Threads::MessageDeleteParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::MessageDeleted - | ( - String message_id, - thread_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::MessageDeleted + def delete: ( + String message_id, + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::MessageDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 2ad2c954..24c1f8c5 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -5,139 +5,91 @@ module OpenAI class Runs attr_reader steps: OpenAI::Resources::Beta::Threads::Runs::Steps - def create: - ( - String thread_id, - OpenAI::Models::Beta::Threads::RunCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Run - | ( - String thread_id, - assistant_id: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?additional_instructions: String?, - ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, - ?instructions: String?, - ?max_completion_tokens: Integer?, - ?max_prompt_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, - ?parallel_tool_calls: bool, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, - ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + def create: ( + String thread_id, + assistant_id: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?additional_instructions: String?, + ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + ?parallel_tool_calls: bool, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Run - def create_streaming: - ( - String thread_id, - OpenAI::Models::Beta::Threads::RunCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] - | ( - String thread_id, - assistant_id: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?additional_instructions: String?, - ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, - ?instructions: String?, - ?max_completion_tokens: Integer?, - ?max_prompt_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, - ?parallel_tool_calls: bool, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, - ?temperature: Float?, - ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, - ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] + def create_streaming: ( + String thread_id, + assistant_id: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?additional_instructions: String?, + ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?instructions: String?, + ?max_completion_tokens: Integer?, + ?max_prompt_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + ?parallel_tool_calls: bool, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + ?top_p: Float?, + ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] - def retrieve: - ( - String run_id, - OpenAI::Models::Beta::Threads::RunRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Run - | ( - String run_id, - thread_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + def retrieve: ( + String run_id, + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Run - def update: - ( - String run_id, - OpenAI::Models::Beta::Threads::RunUpdateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Run - | ( - String run_id, - thread_id: String, - ?metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + def update: ( + String run_id, + thread_id: String, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Run - def list: - ( - String thread_id, - ?OpenAI::Models::Beta::Threads::RunListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run] - | ( - String thread_id, - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Beta::Threads::RunListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run] + def list: ( + String thread_id, + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::RunListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run] - def cancel: - ( - String run_id, - OpenAI::Models::Beta::Threads::RunCancelParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Run - | ( - String run_id, - thread_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + def cancel: ( + String run_id, + thread_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Run - def submit_tool_outputs: - ( - String run_id, - OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Run - | ( - String run_id, - thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + def submit_tool_outputs: ( + String run_id, + thread_id: String, + tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Run - def submit_tool_outputs_streaming: - ( - String run_id, - OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] - | ( - String run_id, - thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] + def submit_tool_outputs_streaming: ( + String run_id, + thread_id: String, + tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads/runs/steps.rbs b/sig/openai/resources/beta/threads/runs/steps.rbs index 765d7135..262179eb 100644 --- a/sig/openai/resources/beta/threads/runs/steps.rbs +++ b/sig/openai/resources/beta/threads/runs/steps.rbs @@ -4,36 +4,24 @@ module OpenAI class Threads class Runs class Steps - def retrieve: - ( - String step_id, - OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Beta::Threads::Runs::RunStep - | ( - String step_id, - thread_id: String, - run_id: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Runs::RunStep + def retrieve: ( + String step_id, + thread_id: String, + run_id: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Beta::Threads::Runs::RunStep - def list: - ( - String run_id, - OpenAI::Models::Beta::Threads::Runs::StepListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] - | ( - String run_id, - thread_id: String, - ?after: String, - ?before: String, - ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], - ?limit: Integer, - ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] + def list: ( + String run_id, + thread_id: String, + ?after: String, + ?before: String, + ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + ?limit: Integer, + ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 84a03b90..5bd7a8db 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -4,131 +4,98 @@ module OpenAI class Completions attr_reader messages: OpenAI::Resources::Chat::Completions::Messages - def create: - ( - OpenAI::Models::Chat::CompletionCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Chat::ChatCompletion - | ( - messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], - model: OpenAI::Models::Chat::CompletionCreateParams::model, - ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, - ?frequency_penalty: Float?, - ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - ?logit_bias: ::Hash[Symbol, Integer]?, - ?logprobs: bool?, - ?max_completion_tokens: Integer?, - ?max_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, - ?n: Integer?, - ?parallel_tool_calls: bool, - ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, - ?presence_penalty: Float?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, - ?seed: Integer?, - ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, - ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, - ?store: bool?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - ?temperature: Float?, - ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], - ?top_logprobs: Integer?, - ?top_p: Float?, - ?user: String, - ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletion + def create: ( + messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], + model: OpenAI::Models::Chat::CompletionCreateParams::model, + ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?frequency_penalty: Float?, + ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: bool?, + ?max_completion_tokens: Integer?, + ?max_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?n: Integer?, + ?parallel_tool_calls: bool, + ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?presence_penalty: Float?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?seed: Integer?, + ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + ?store: bool?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?top_logprobs: Integer?, + ?top_p: Float?, + ?user: String, + ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Chat::ChatCompletion - def create_streaming: - ( - OpenAI::Models::Chat::CompletionCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk] - | ( - messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], - model: OpenAI::Models::Chat::CompletionCreateParams::model, - ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, - ?frequency_penalty: Float?, - ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - ?logit_bias: ::Hash[Symbol, Integer]?, - ?logprobs: bool?, - ?max_completion_tokens: Integer?, - ?max_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, - ?n: Integer?, - ?parallel_tool_calls: bool, - ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, - ?presence_penalty: Float?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, - ?seed: Integer?, - ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, - ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, - ?store: bool?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - ?temperature: Float?, - ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], - ?top_logprobs: Integer?, - ?top_p: Float?, - ?user: String, - ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk] + def create_streaming: ( + messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], + model: OpenAI::Models::Chat::CompletionCreateParams::model, + ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?frequency_penalty: Float?, + ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: bool?, + ?max_completion_tokens: Integer?, + ?max_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?n: Integer?, + ?parallel_tool_calls: bool, + ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?presence_penalty: Float?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?seed: Integer?, + ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + ?store: bool?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?temperature: Float?, + ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?top_logprobs: Integer?, + ?top_p: Float?, + ?user: String, + ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk] - def retrieve: - ( - String completion_id, - ?OpenAI::Models::Chat::CompletionRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Chat::ChatCompletion - | ( - String completion_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletion + def retrieve: ( + String completion_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Chat::ChatCompletion - def update: - ( - String completion_id, - OpenAI::Models::Chat::CompletionUpdateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Chat::ChatCompletion - | ( - String completion_id, - metadata: OpenAI::Models::metadata?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletion + def update: ( + String completion_id, + metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Chat::ChatCompletion - def list: - ( - ?OpenAI::Models::Chat::CompletionListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion] - | ( - ?after: String, - ?limit: Integer, - ?metadata: OpenAI::Models::metadata?, - ?model: String, - ?order: OpenAI::Models::Chat::CompletionListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion] + def list: ( + ?after: String, + ?limit: Integer, + ?metadata: OpenAI::Models::metadata?, + ?model: String, + ?order: OpenAI::Models::Chat::CompletionListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion] - def delete: - ( - String completion_id, - ?OpenAI::Models::Chat::CompletionDeleteParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Chat::ChatCompletionDeleted - | ( - String completion_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletionDeleted + def delete: ( + String completion_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Chat::ChatCompletionDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/chat/completions/messages.rbs b/sig/openai/resources/chat/completions/messages.rbs index cfda0b34..f2fc47a1 100644 --- a/sig/openai/resources/chat/completions/messages.rbs +++ b/sig/openai/resources/chat/completions/messages.rbs @@ -3,19 +3,13 @@ module OpenAI class Chat class Completions class Messages - def list: - ( - String completion_id, - ?OpenAI::Models::Chat::Completions::MessageListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] - | ( - String completion_id, - ?after: String, - ?limit: Integer, - ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] + def list: ( + String completion_id, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/completions.rbs b/sig/openai/resources/completions.rbs index 1c0da611..42f91241 100644 --- a/sig/openai/resources/completions.rbs +++ b/sig/openai/resources/completions.rbs @@ -1,55 +1,47 @@ module OpenAI module Resources class Completions - def create: - ( - OpenAI::Models::CompletionCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Completion - | ( - model: OpenAI::Models::CompletionCreateParams::model, - prompt: OpenAI::Models::CompletionCreateParams::prompt?, - ?best_of: Integer?, - ?echo: bool?, - ?frequency_penalty: Float?, - ?logit_bias: ::Hash[Symbol, Integer]?, - ?logprobs: Integer?, - ?max_tokens: Integer?, - ?n: Integer?, - ?presence_penalty: Float?, - ?seed: Integer?, - ?stop: OpenAI::Models::CompletionCreateParams::stop?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - ?suffix: String?, - ?temperature: Float?, - ?top_p: Float?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Completion + def create: ( + model: OpenAI::Models::CompletionCreateParams::model, + prompt: OpenAI::Models::CompletionCreateParams::prompt?, + ?best_of: Integer?, + ?echo: bool?, + ?frequency_penalty: Float?, + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: Integer?, + ?max_tokens: Integer?, + ?n: Integer?, + ?presence_penalty: Float?, + ?seed: Integer?, + ?stop: OpenAI::Models::CompletionCreateParams::stop?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?suffix: String?, + ?temperature: Float?, + ?top_p: Float?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Completion - def create_streaming: - ( - OpenAI::Models::CompletionCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Stream[OpenAI::Models::Completion] - | ( - model: OpenAI::Models::CompletionCreateParams::model, - prompt: OpenAI::Models::CompletionCreateParams::prompt?, - ?best_of: Integer?, - ?echo: bool?, - ?frequency_penalty: Float?, - ?logit_bias: ::Hash[Symbol, Integer]?, - ?logprobs: Integer?, - ?max_tokens: Integer?, - ?n: Integer?, - ?presence_penalty: Float?, - ?seed: Integer?, - ?stop: OpenAI::Models::CompletionCreateParams::stop?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, - ?suffix: String?, - ?temperature: Float?, - ?top_p: Float?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Completion] + def create_streaming: ( + model: OpenAI::Models::CompletionCreateParams::model, + prompt: OpenAI::Models::CompletionCreateParams::prompt?, + ?best_of: Integer?, + ?echo: bool?, + ?frequency_penalty: Float?, + ?logit_bias: ::Hash[Symbol, Integer]?, + ?logprobs: Integer?, + ?max_tokens: Integer?, + ?n: Integer?, + ?presence_penalty: Float?, + ?seed: Integer?, + ?stop: OpenAI::Models::CompletionCreateParams::stop?, + ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?suffix: String?, + ?temperature: Float?, + ?top_p: Float?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Completion] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/embeddings.rbs b/sig/openai/resources/embeddings.rbs index 1f2c2bfe..3babf508 100644 --- a/sig/openai/resources/embeddings.rbs +++ b/sig/openai/resources/embeddings.rbs @@ -1,18 +1,14 @@ module OpenAI module Resources class Embeddings - def create: - ( - OpenAI::Models::EmbeddingCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::CreateEmbeddingResponse - | ( - input: OpenAI::Models::EmbeddingCreateParams::input, - model: OpenAI::Models::EmbeddingCreateParams::model, - ?dimensions: Integer, - ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::CreateEmbeddingResponse + def create: ( + input: OpenAI::Models::EmbeddingCreateParams::input, + model: OpenAI::Models::EmbeddingCreateParams::model, + ?dimensions: Integer, + ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::CreateEmbeddingResponse def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index d958d65e..f3913739 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -1,54 +1,34 @@ module OpenAI module Resources class Files - def create: - ( - OpenAI::Models::FileCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::FileObject - | ( - file: IO | StringIO, - purpose: OpenAI::Models::file_purpose, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FileObject + def create: ( + file: IO | StringIO, + purpose: OpenAI::Models::file_purpose, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FileObject - def retrieve: - ( - String file_id, - ?OpenAI::Models::FileRetrieveParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::FileObject - | ( - String file_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FileObject + def retrieve: ( + String file_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FileObject - def list: - ( - ?OpenAI::Models::FileListParams | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::FileObject] - | ( - ?after: String, - ?limit: Integer, - ?order: OpenAI::Models::FileListParams::order, - ?purpose: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FileObject] + def list: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::FileListParams::order, + ?purpose: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::FileObject] - def delete: - ( - String file_id, - ?OpenAI::Models::FileDeleteParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::FileDeleted - | ( - String file_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FileDeleted + def delete: ( + String file_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FileDeleted - def content: - ( - String file_id, - ?OpenAI::Models::FileContentParams | ::Hash[Symbol, top] params - ) -> top - | (String file_id, ?request_options: OpenAI::request_opts) -> top + def content: ( + String file_id, + ?request_options: OpenAI::request_opts + ) -> top def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/fine_tuning/jobs.rbs b/sig/openai/resources/fine_tuning/jobs.rbs index 1d411f33..339d5f85 100644 --- a/sig/openai/resources/fine_tuning/jobs.rbs +++ b/sig/openai/resources/fine_tuning/jobs.rbs @@ -4,70 +4,42 @@ module OpenAI class Jobs attr_reader checkpoints: OpenAI::Resources::FineTuning::Jobs::Checkpoints - def create: - ( - OpenAI::Models::FineTuning::JobCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::FineTuning::FineTuningJob - | ( - model: OpenAI::Models::FineTuning::JobCreateParams::model, - training_file: String, - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, - ?metadata: OpenAI::Models::metadata?, - ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, - ?seed: Integer?, - ?suffix: String?, - ?validation_file: String?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::FineTuningJob + def create: ( + model: OpenAI::Models::FineTuning::JobCreateParams::model, + training_file: String, + ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, + ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + ?metadata: OpenAI::Models::metadata?, + ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + ?seed: Integer?, + ?suffix: String?, + ?validation_file: String?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::FineTuningJob - def retrieve: - ( - String fine_tuning_job_id, - ?OpenAI::Models::FineTuning::JobRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::FineTuning::FineTuningJob - | ( - String fine_tuning_job_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::FineTuningJob + def retrieve: ( + String fine_tuning_job_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::FineTuningJob - def list: - ( - ?OpenAI::Models::FineTuning::JobListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] - | ( - ?after: String, - ?limit: Integer, - ?metadata: ::Hash[Symbol, String]?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] + def list: ( + ?after: String, + ?limit: Integer, + ?metadata: ::Hash[Symbol, String]?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] - def cancel: - ( - String fine_tuning_job_id, - ?OpenAI::Models::FineTuning::JobCancelParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::FineTuning::FineTuningJob - | ( - String fine_tuning_job_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::FineTuningJob + def cancel: ( + String fine_tuning_job_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::FineTuningJob - def list_events: - ( - String fine_tuning_job_id, - ?OpenAI::Models::FineTuning::JobListEventsParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] - | ( - String fine_tuning_job_id, - ?after: String, - ?limit: Integer, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] + def list_events: ( + String fine_tuning_job_id, + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs index b4b532a4..9912513d 100644 --- a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs +++ b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs @@ -3,18 +3,12 @@ module OpenAI class FineTuning class Jobs class Checkpoints - def list: - ( - String fine_tuning_job_id, - ?OpenAI::Models::FineTuning::Jobs::CheckpointListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] - | ( - String fine_tuning_job_id, - ?after: String, - ?limit: Integer, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] + def list: ( + String fine_tuning_job_id, + ?after: String, + ?limit: Integer, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index 058dc7d8..b1d09b4d 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -1,52 +1,39 @@ module OpenAI module Resources class Images - def create_variation: - ( - OpenAI::Models::ImageCreateVariationParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::ImagesResponse - | ( - image: IO | StringIO, - ?model: OpenAI::Models::ImageCreateVariationParams::model?, - ?n: Integer?, - ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, - ?size: OpenAI::Models::ImageCreateVariationParams::size?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ImagesResponse + def create_variation: ( + image: IO | StringIO, + ?model: OpenAI::Models::ImageCreateVariationParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, + ?size: OpenAI::Models::ImageCreateVariationParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ImagesResponse - def edit: - ( - OpenAI::Models::ImageEditParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::ImagesResponse - | ( - image: IO | StringIO, - prompt: String, - ?mask: IO | StringIO, - ?model: OpenAI::Models::ImageEditParams::model?, - ?n: Integer?, - ?response_format: OpenAI::Models::ImageEditParams::response_format?, - ?size: OpenAI::Models::ImageEditParams::size?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ImagesResponse + def edit: ( + image: IO | StringIO, + prompt: String, + ?mask: IO | StringIO, + ?model: OpenAI::Models::ImageEditParams::model?, + ?n: Integer?, + ?response_format: OpenAI::Models::ImageEditParams::response_format?, + ?size: OpenAI::Models::ImageEditParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ImagesResponse - def generate: - ( - OpenAI::Models::ImageGenerateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::ImagesResponse - | ( - prompt: String, - ?model: OpenAI::Models::ImageGenerateParams::model?, - ?n: Integer?, - ?quality: OpenAI::Models::ImageGenerateParams::quality, - ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, - ?size: OpenAI::Models::ImageGenerateParams::size?, - ?style: OpenAI::Models::ImageGenerateParams::style?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ImagesResponse + def generate: ( + prompt: String, + ?model: OpenAI::Models::ImageGenerateParams::model?, + ?n: Integer?, + ?quality: OpenAI::Models::ImageGenerateParams::quality, + ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, + ?size: OpenAI::Models::ImageGenerateParams::size?, + ?style: OpenAI::Models::ImageGenerateParams::style?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ImagesResponse def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/models.rbs b/sig/openai/resources/models.rbs index 2b01ceac..cb91fa06 100644 --- a/sig/openai/resources/models.rbs +++ b/sig/openai/resources/models.rbs @@ -1,33 +1,19 @@ module OpenAI module Resources class Models - def retrieve: - ( - String model, - ?OpenAI::Models::ModelRetrieveParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Model - | ( - String model, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Model + def retrieve: ( + String model, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Model - def list: - ( - ?OpenAI::Models::ModelListParams | ::Hash[Symbol, top] params - ) -> OpenAI::Page[OpenAI::Models::Model] - | ( - ?request_options: OpenAI::request_opts - ) -> OpenAI::Page[OpenAI::Models::Model] + def list: ( + ?request_options: OpenAI::request_opts + ) -> OpenAI::Page[OpenAI::Models::Model] - def delete: - ( - String model, - ?OpenAI::Models::ModelDeleteParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::ModelDeleted - | ( - String model, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ModelDeleted + def delete: ( + String model, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ModelDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/moderations.rbs b/sig/openai/resources/moderations.rbs index 18063d79..1a10cff6 100644 --- a/sig/openai/resources/moderations.rbs +++ b/sig/openai/resources/moderations.rbs @@ -1,15 +1,11 @@ module OpenAI module Resources class Moderations - def create: - ( - OpenAI::Models::ModerationCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::ModerationCreateResponse - | ( - input: OpenAI::Models::ModerationCreateParams::input, - ?model: OpenAI::Models::ModerationCreateParams::model, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ModerationCreateResponse + def create: ( + input: OpenAI::Models::ModerationCreateParams::input, + ?model: OpenAI::Models::ModerationCreateParams::model, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ModerationCreateResponse def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 2e4c1ae1..3e91571f 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -3,77 +3,58 @@ module OpenAI class Responses attr_reader input_items: OpenAI::Resources::Responses::InputItems - def create: - ( - OpenAI::Models::Responses::ResponseCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Responses::Response - | ( - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, - ?include: ::Array[OpenAI::Models::Responses::response_includable]?, - ?instructions: String?, - ?max_output_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?parallel_tool_calls: bool?, - ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, - ?store: bool?, - ?temperature: Float?, - ?text: OpenAI::Models::Responses::ResponseTextConfig, - ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, - ?tools: ::Array[OpenAI::Models::Responses::tool], - ?top_p: Float?, - ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Responses::Response + def create: ( + input: OpenAI::Models::Responses::ResponseCreateParams::input, + model: OpenAI::Models::Responses::ResponseCreateParams::model, + ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?instructions: String?, + ?max_output_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?parallel_tool_calls: bool?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?store: bool?, + ?temperature: Float?, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_p: Float?, + ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Responses::Response - def create_streaming: - ( - OpenAI::Models::Responses::ResponseCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Stream[OpenAI::Models::Responses::response_stream_event] - | ( - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, - ?include: ::Array[OpenAI::Models::Responses::response_includable]?, - ?instructions: String?, - ?max_output_tokens: Integer?, - ?metadata: OpenAI::Models::metadata?, - ?parallel_tool_calls: bool?, - ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, - ?store: bool?, - ?temperature: Float?, - ?text: OpenAI::Models::Responses::ResponseTextConfig, - ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, - ?tools: ::Array[OpenAI::Models::Responses::tool], - ?top_p: Float?, - ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - ?user: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Responses::response_stream_event] + def create_streaming: ( + input: OpenAI::Models::Responses::ResponseCreateParams::input, + model: OpenAI::Models::Responses::ResponseCreateParams::model, + ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?instructions: String?, + ?max_output_tokens: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?parallel_tool_calls: bool?, + ?previous_response_id: String?, + ?reasoning: OpenAI::Models::Reasoning?, + ?store: bool?, + ?temperature: Float?, + ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_p: Float?, + ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Responses::response_stream_event] - def retrieve: - ( - String response_id, - ?OpenAI::Models::Responses::ResponseRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Responses::Response - | ( - String response_id, - ?include: ::Array[OpenAI::Models::Responses::response_includable], - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Responses::Response + def retrieve: ( + String response_id, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Responses::Response - def delete: - ( - String response_id, - ?OpenAI::Models::Responses::ResponseDeleteParams - | ::Hash[Symbol, top] params - ) -> nil - | (String response_id, ?request_options: OpenAI::request_opts) -> nil + def delete: ( + String response_id, + ?request_options: OpenAI::request_opts + ) -> nil def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/responses/input_items.rbs b/sig/openai/resources/responses/input_items.rbs index fa47896a..63b4ed3c 100644 --- a/sig/openai/resources/responses/input_items.rbs +++ b/sig/openai/resources/responses/input_items.rbs @@ -2,20 +2,14 @@ module OpenAI module Resources class Responses class InputItems - def list: - ( - String response_id, - ?OpenAI::Models::Responses::InputItemListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] - | ( - String response_id, - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::Responses::InputItemListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] + def list: ( + String response_id, + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::Responses::InputItemListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/uploads.rbs b/sig/openai/resources/uploads.rbs index 631f9449..c60f0054 100644 --- a/sig/openai/resources/uploads.rbs +++ b/sig/openai/resources/uploads.rbs @@ -3,39 +3,25 @@ module OpenAI class Uploads attr_reader parts: OpenAI::Resources::Uploads::Parts - def create: - ( - OpenAI::Models::UploadCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Upload - | ( - bytes: Integer, - filename: String, - mime_type: String, - purpose: OpenAI::Models::file_purpose, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Upload + def create: ( + bytes: Integer, + filename: String, + mime_type: String, + purpose: OpenAI::Models::file_purpose, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Upload - def cancel: - ( - String upload_id, - ?OpenAI::Models::UploadCancelParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Upload - | ( - String upload_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Upload + def cancel: ( + String upload_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Upload - def complete: - ( - String upload_id, - OpenAI::Models::UploadCompleteParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Upload - | ( - String upload_id, - part_ids: ::Array[String], - ?md5: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Upload + def complete: ( + String upload_id, + part_ids: ::Array[String], + ?md5: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Upload def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index 3dc1af0d..e00684f2 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -2,17 +2,11 @@ module OpenAI module Resources class Uploads class Parts - def create: - ( - String upload_id, - OpenAI::Models::Uploads::PartCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::Uploads::UploadPart - | ( - String upload_id, - data: IO | StringIO, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Uploads::UploadPart + def create: ( + String upload_id, + data: IO | StringIO, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Uploads::UploadPart def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index d12bd025..707af947 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -5,79 +5,50 @@ module OpenAI attr_reader file_batches: OpenAI::Resources::VectorStores::FileBatches - def create: - ( - ?OpenAI::Models::VectorStoreCreateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStore - | ( - ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, - ?file_ids: ::Array[String], - ?metadata: OpenAI::Models::metadata?, - ?name: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStore - - def retrieve: - ( - String vector_store_id, - ?OpenAI::Models::VectorStoreRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStore - | ( - String vector_store_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStore - - def update: - ( - String vector_store_id, - ?OpenAI::Models::VectorStoreUpdateParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStore - | ( - String vector_store_id, - ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, - ?metadata: OpenAI::Models::metadata?, - ?name: String?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStore - - def list: - ( - ?OpenAI::Models::VectorStoreListParams | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStore] - | ( - ?after: String, - ?before: String, - ?limit: Integer, - ?order: OpenAI::Models::VectorStoreListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStore] - - def delete: - ( - String vector_store_id, - ?OpenAI::Models::VectorStoreDeleteParams | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStoreDeleted - | ( - String vector_store_id, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStoreDeleted - - def search: - ( - String vector_store_id, - OpenAI::Models::VectorStoreSearchParams | ::Hash[Symbol, top] params - ) -> OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse] - | ( - String vector_store_id, - query: OpenAI::Models::VectorStoreSearchParams::query, - ?filters: OpenAI::Models::VectorStoreSearchParams::filters, - ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, - ?rewrite_query: bool, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse] + def create: ( + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStore + + def retrieve: ( + String vector_store_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStore + + def update: ( + String vector_store_id, + ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + ?metadata: OpenAI::Models::metadata?, + ?name: String?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStore + + def list: ( + ?after: String, + ?before: String, + ?limit: Integer, + ?order: OpenAI::Models::VectorStoreListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::VectorStore] + + def delete: ( + String vector_store_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStoreDeleted + + def search: ( + String vector_store_id, + query: OpenAI::Models::VectorStoreSearchParams::query, + ?filters: OpenAI::Models::VectorStoreSearchParams::filters, + ?max_num_results: Integer, + ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ?rewrite_query: bool, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores/file_batches.rbs b/sig/openai/resources/vector_stores/file_batches.rbs index 470b66e3..5985e792 100644 --- a/sig/openai/resources/vector_stores/file_batches.rbs +++ b/sig/openai/resources/vector_stores/file_batches.rbs @@ -2,60 +2,36 @@ module OpenAI module Resources class VectorStores class FileBatches - def create: - ( - String vector_store_id, - OpenAI::Models::VectorStores::FileBatchCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch - | ( - String vector_store_id, - file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, - ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch + def create: ( + String vector_store_id, + file_ids: ::Array[String], + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch - def retrieve: - ( - String batch_id, - OpenAI::Models::VectorStores::FileBatchRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch - | ( - String batch_id, - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch + def retrieve: ( + String batch_id, + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch - def cancel: - ( - String batch_id, - OpenAI::Models::VectorStores::FileBatchCancelParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch - | ( - String batch_id, - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch + def cancel: ( + String batch_id, + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch - def list_files: - ( - String batch_id, - OpenAI::Models::VectorStores::FileBatchListFilesParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] - | ( - String batch_id, - vector_store_id: String, - ?after: String, - ?before: String, - ?filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, - ?limit: Integer, - ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] + def list_files: ( + String batch_id, + vector_store_id: String, + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores/files.rbs b/sig/openai/resources/vector_stores/files.rbs index 8187380b..b0e11ad0 100644 --- a/sig/openai/resources/vector_stores/files.rbs +++ b/sig/openai/resources/vector_stores/files.rbs @@ -2,84 +2,48 @@ module OpenAI module Resources class VectorStores class Files - def create: - ( - String vector_store_id, - OpenAI::Models::VectorStores::FileCreateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFile - | ( - String vector_store_id, - file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, - ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFile + def create: ( + String vector_store_id, + file_id: String, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFile - def retrieve: - ( - String file_id, - OpenAI::Models::VectorStores::FileRetrieveParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFile - | ( - String file_id, - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFile + def retrieve: ( + String file_id, + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFile - def update: - ( - String file_id, - OpenAI::Models::VectorStores::FileUpdateParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFile - | ( - String file_id, - vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFile + def update: ( + String file_id, + vector_store_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFile - def list: - ( - String vector_store_id, - ?OpenAI::Models::VectorStores::FileListParams - | ::Hash[Symbol, top] params - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] - | ( - String vector_store_id, - ?after: String, - ?before: String, - ?filter: OpenAI::Models::VectorStores::FileListParams::filter, - ?limit: Integer, - ?order: OpenAI::Models::VectorStores::FileListParams::order, - ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] + def list: ( + String vector_store_id, + ?after: String, + ?before: String, + ?filter: OpenAI::Models::VectorStores::FileListParams::filter, + ?limit: Integer, + ?order: OpenAI::Models::VectorStores::FileListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] - def delete: - ( - String file_id, - OpenAI::Models::VectorStores::FileDeleteParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Models::VectorStores::VectorStoreFileDeleted - | ( - String file_id, - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileDeleted + def delete: ( + String file_id, + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::VectorStores::VectorStoreFileDeleted - def content: - ( - String file_id, - OpenAI::Models::VectorStores::FileContentParams - | ::Hash[Symbol, top] params - ) -> OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse] - | ( - String file_id, - vector_store_id: String, - ?request_options: OpenAI::request_opts - ) -> OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse] + def content: ( + String file_id, + vector_store_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse] def initialize: (client: OpenAI::Client) -> void end From d680b41954060a6ceac5644016d1fd8acc6bb11f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 20:35:46 +0000 Subject: [PATCH 038/295] feat(api): o1-pro now available through the API (#43) --- .stats.yml | 2 +- lib/openai.rb | 2 ++ lib/openai/models/all_models.rb | 32 +++++++++++++++++++ lib/openai/models/chat_model.rb | 9 +++--- lib/openai/models/responses/response.rb | 29 ++--------------- .../responses/response_create_params.rb | 29 ++--------------- .../response_function_tool_call_item.rb | 2 +- lib/openai/models/responses_model.rb | 30 +++++++++++++++++ lib/openai/resources/responses.rb | 4 +-- rbi/lib/openai/models/all_models.rbi | 22 +++++++++++++ rbi/lib/openai/models/chat_model.rbi | 9 +++--- rbi/lib/openai/models/responses/response.rbi | 11 ------- .../responses/response_create_params.rbi | 11 ------- .../response_function_tool_call_item.rbi | 2 +- rbi/lib/openai/models/responses_model.rbi | 22 +++++++++++++ sig/openai/models/all_models.rbs | 27 ++++++++++++++++ sig/openai/models/chat_model.rbs | 18 +++++------ sig/openai/models/responses/response.rbs | 12 ++----- .../responses/response_create_params.rbs | 12 ++----- sig/openai/models/responses_model.rbs | 27 ++++++++++++++++ sig/openai/resources/responses.rbs | 4 +-- test/openai/resources/responses_test.rb | 6 ++-- 22 files changed, 200 insertions(+), 122 deletions(-) create mode 100644 lib/openai/models/all_models.rb create mode 100644 lib/openai/models/responses_model.rb create mode 100644 rbi/lib/openai/models/all_models.rbi create mode 100644 rbi/lib/openai/models/responses_model.rbi create mode 100644 sig/openai/models/all_models.rbs create mode 100644 sig/openai/models/responses_model.rbs diff --git a/.stats.yml b/.stats.yml index 2ffca777..16c6386b 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f3bce04386c4fcfd5037e0477fbaa39010003fd1558eb5185fe4a71dd6a05fdd.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-b26121d5df6eb5d3032a45a267473798b15fcfec76dd44a3256cf1238be05fa4.yml diff --git a/lib/openai.rb b/lib/openai.rb index a722c1a4..84c5d333 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -37,6 +37,7 @@ require_relative "openai/models/chat/chat_completion_message" require_relative "openai/models/fine_tuning/fine_tuning_job_wandb_integration_object" require_relative "openai/models/responses/response_function_tool_call" +require_relative "openai/models/all_models" require_relative "openai/models/audio/speech_create_params" require_relative "openai/models/audio/speech_model" require_relative "openai/models/audio/transcription" @@ -322,6 +323,7 @@ require_relative "openai/models/responses/tool_choice_options" require_relative "openai/models/responses/tool_choice_types" require_relative "openai/models/responses/web_search_tool" +require_relative "openai/models/responses_model" require_relative "openai/models/static_file_chunking_strategy" require_relative "openai/models/static_file_chunking_strategy_object" require_relative "openai/models/static_file_chunking_strategy_object_param" diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb new file mode 100644 index 00000000..b306f178 --- /dev/null +++ b/lib/openai/models/all_models.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @abstract + class AllModels < OpenAI::Union + variant String + + variant enum: -> { OpenAI::Models::ChatModel } + + variant enum: -> { OpenAI::Models::ChatModel } + + variant enum: -> { OpenAI::Models::AllModels::UnionMember4 } + + # @abstract + class UnionMember4 < OpenAI::Enum + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + + finalize! + end + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::UnionMember4)] + # def variants; end + # end + end + end +end diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 29b0a851..8edd5e1a 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -12,11 +12,6 @@ class ChatModel < OpenAI::Enum O1_PREVIEW_2024_09_12 = :"o1-preview-2024-09-12" O1_MINI = :"o1-mini" O1_MINI_2024_09_12 = :"o1-mini-2024-09-12" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_02_04 = :"computer-use-preview-2025-02-04" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" GPT_4O = :"gpt-4o" GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" @@ -26,6 +21,10 @@ class ChatModel < OpenAI::Enum GPT_4O_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-audio-preview-2024-12-17" GPT_4O_MINI_AUDIO_PREVIEW = :"gpt-4o-mini-audio-preview" GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-mini-audio-preview-2024-12-17" + GPT_4O_SEARCH_PREVIEW = :"gpt-4o-search-preview" + GPT_4O_MINI_SEARCH_PREVIEW = :"gpt-4o-mini-search-preview" + GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST = :"chatgpt-4o-latest" GPT_4O_MINI = :"gpt-4o-mini" GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 98b2c145..8b979829 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -57,8 +57,8 @@ class Response < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel] - required :model, union: -> { OpenAI::Models::Responses::Response::Model } + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] + required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute object # The object type of this resource - always set to `response`. @@ -222,7 +222,7 @@ class Response < OpenAI::BaseModel # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] # # @param instructions [String, nil] # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel] + # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] # # @param output [Array] # # @param parallel_tool_calls [Boolean] # # @param temperature [Float, nil] @@ -300,29 +300,6 @@ class Reason < OpenAI::Enum end end - # @abstract - # - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - class Model < OpenAI::Union - variant String - - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI - # offers a wide range of models with different capabilities, performance - # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) - # to browse and compare available models. - variant enum: -> { OpenAI::Models::ChatModel } - - # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def variants; end - # end - end - # @abstract # # How the model should select which tool (or tools) to use when generating a diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 4c6c485b..05ef2120 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -29,8 +29,8 @@ class ResponseCreateParams < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel] - required :model, union: -> { OpenAI::Models::Responses::ResponseCreateParams::Model } + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] + required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute include # Specify additional output data to include in the model response. Currently @@ -198,7 +198,7 @@ class ResponseCreateParams < OpenAI::BaseModel # @!parse # # @param input [String, Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel] + # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] # # @param include [Array, nil] # # @param instructions [String, nil] # # @param max_output_tokens [Integer, nil] @@ -269,29 +269,6 @@ class Input < OpenAI::Union # end end - # @abstract - # - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - class Model < OpenAI::Union - variant String - - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI - # offers a wide range of models with different capabilities, performance - # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) - # to browse and compare available models. - variant enum: -> { OpenAI::Models::ChatModel } - - # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def variants; end - # end - end - # @abstract # # How the model should select which tool (or tools) to use when generating a diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index fa7413ea..9317cee6 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -5,7 +5,7 @@ module Models module Responses class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall # @!attribute id - # The unique ID of the function call tool output. + # The unique ID of the function tool call. # # @return [String] required :id, String diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb new file mode 100644 index 00000000..d96fdbc5 --- /dev/null +++ b/lib/openai/models/responses_model.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @abstract + class ResponsesModel < OpenAI::Union + variant String + + variant enum: -> { OpenAI::Models::ChatModel } + + variant enum: -> { OpenAI::Models::ResponsesModel::UnionMember2 } + + # @abstract + class UnionMember2 < OpenAI::Enum + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + + finalize! + end + + # @!parse + # class << self + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::UnionMember2)] + # def variants; end + # end + end + end +end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index fdbe46bc..b09ce757 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -30,7 +30,7 @@ class Responses # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # @option params [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -163,7 +163,7 @@ def create(params) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # @option params [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi new file mode 100644 index 00000000..d124a099 --- /dev/null +++ b/rbi/lib/openai/models/all_models.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Models + class AllModels < OpenAI::Union + abstract! + + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + + class UnionMember4 < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + end + end + end +end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index acb06a2d..9ec815a5 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -15,11 +15,6 @@ module OpenAI O1_PREVIEW_2024_09_12 = :"o1-preview-2024-09-12" O1_MINI = :"o1-mini" O1_MINI_2024_09_12 = :"o1-mini-2024-09-12" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_02_04 = :"computer-use-preview-2025-02-04" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" GPT_4O = :"gpt-4o" GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" @@ -29,6 +24,10 @@ module OpenAI GPT_4O_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-audio-preview-2024-12-17" GPT_4O_MINI_AUDIO_PREVIEW = :"gpt-4o-mini-audio-preview" GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-mini-audio-preview-2024-12-17" + GPT_4O_SEARCH_PREVIEW = :"gpt-4o-search-preview" + GPT_4O_MINI_SEARCH_PREVIEW = :"gpt-4o-mini-search-preview" + GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST = :"chatgpt-4o-latest" GPT_4O_MINI = :"gpt-4o-mini" GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 1ece0d1b..d0e82f4e 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -502,17 +502,6 @@ module OpenAI end end - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - class Model < OpenAI::Union - abstract! - - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - end - # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index cc441334..21d6b940 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -385,17 +385,6 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInput)} } end - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - class Model < OpenAI::Union - abstract! - - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - end - # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi index 6e1a4cce..d0e917fc 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall - # The unique ID of the function call tool output. + # The unique ID of the function tool call. sig { returns(String) } def id end diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi new file mode 100644 index 00000000..ecad7412 --- /dev/null +++ b/rbi/lib/openai/models/responses_model.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Models + class ResponsesModel < OpenAI::Union + abstract! + + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + + class UnionMember2 < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + end + end + end +end diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs new file mode 100644 index 00000000..7abb928a --- /dev/null +++ b/sig/openai/models/all_models.rbs @@ -0,0 +1,27 @@ +module OpenAI + module Models + type all_models = + String + | OpenAI::Models::chat_model + | OpenAI::Models::AllModels::union_member4 + + class AllModels < OpenAI::Union + type union_member4 = + :"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11" + + class UnionMember4 < OpenAI::Enum + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" + + def self.values: -> ::Array[OpenAI::Models::AllModels::union_member4] + end + + def self.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::chat_model, OpenAI::Models::AllModels::union_member4] + end + end +end diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 5940f36f..75caff71 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -9,11 +9,6 @@ module OpenAI | :"o1-preview-2024-09-12" | :"o1-mini" | :"o1-mini-2024-09-12" - | :"computer-use-preview" - | :"computer-use-preview-2025-02-04" - | :"computer-use-preview-2025-03-11" - | :"gpt-4.5-preview" - | :"gpt-4.5-preview-2025-02-27" | :"gpt-4o" | :"gpt-4o-2024-11-20" | :"gpt-4o-2024-08-06" @@ -23,6 +18,10 @@ module OpenAI | :"gpt-4o-audio-preview-2024-12-17" | :"gpt-4o-mini-audio-preview" | :"gpt-4o-mini-audio-preview-2024-12-17" + | :"gpt-4o-search-preview" + | :"gpt-4o-mini-search-preview" + | :"gpt-4o-search-preview-2025-03-11" + | :"gpt-4o-mini-search-preview-2025-03-11" | :"chatgpt-4o-latest" | :"gpt-4o-mini" | :"gpt-4o-mini-2024-07-18" @@ -55,11 +54,6 @@ module OpenAI O1_PREVIEW_2024_09_12: :"o1-preview-2024-09-12" O1_MINI: :"o1-mini" O1_MINI_2024_09_12: :"o1-mini-2024-09-12" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_02_04: :"computer-use-preview-2025-02-04" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - GPT_4_5_PREVIEW: :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27: :"gpt-4.5-preview-2025-02-27" GPT_4O: :"gpt-4o" GPT_4O_2024_11_20: :"gpt-4o-2024-11-20" GPT_4O_2024_08_06: :"gpt-4o-2024-08-06" @@ -69,6 +63,10 @@ module OpenAI GPT_4O_AUDIO_PREVIEW_2024_12_17: :"gpt-4o-audio-preview-2024-12-17" GPT_4O_MINI_AUDIO_PREVIEW: :"gpt-4o-mini-audio-preview" GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17: :"gpt-4o-mini-audio-preview-2024-12-17" + GPT_4O_SEARCH_PREVIEW: :"gpt-4o-search-preview" + GPT_4O_MINI_SEARCH_PREVIEW: :"gpt-4o-mini-search-preview" + GPT_4O_SEARCH_PREVIEW_2025_03_11: :"gpt-4o-search-preview-2025-03-11" + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11: :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST: :"chatgpt-4o-latest" GPT_4O_MINI: :"gpt-4o-mini" GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index f8f5c22b..39953b70 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -9,7 +9,7 @@ module OpenAI incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails?, instructions: String?, metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Responses::Response::model, + model: OpenAI::Models::responses_model, object: :response, output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, @@ -40,7 +40,7 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor model: OpenAI::Models::Responses::Response::model + attr_accessor model: OpenAI::Models::responses_model attr_accessor object: :response @@ -93,7 +93,7 @@ module OpenAI incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails?, instructions: String?, metadata: OpenAI::Models::metadata?, - model: OpenAI::Models::Responses::Response::model, + model: OpenAI::Models::responses_model, output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, @@ -141,12 +141,6 @@ module OpenAI end end - type model = String | OpenAI::Models::chat_model - - class Model < OpenAI::Union - def self.variants: -> [String, OpenAI::Models::chat_model] - end - type tool_choice = OpenAI::Models::Responses::tool_choice_options | OpenAI::Models::Responses::ToolChoiceTypes diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 2f4ca294..697640de 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type response_create_params = { input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, + model: OpenAI::Models::responses_model, include: ::Array[OpenAI::Models::Responses::response_includable]?, instructions: String?, max_output_tokens: Integer?, @@ -29,7 +29,7 @@ module OpenAI attr_accessor input: OpenAI::Models::Responses::ResponseCreateParams::input - attr_accessor model: OpenAI::Models::Responses::ResponseCreateParams::model + attr_accessor model: OpenAI::Models::responses_model attr_accessor include: ::Array[OpenAI::Models::Responses::response_includable]? @@ -77,7 +77,7 @@ module OpenAI def initialize: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, + model: OpenAI::Models::responses_model, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, @@ -104,12 +104,6 @@ module OpenAI def self.variants: -> [String, OpenAI::Models::Responses::response_input] end - type model = String | OpenAI::Models::chat_model - - class Model < OpenAI::Union - def self.variants: -> [String, OpenAI::Models::chat_model] - end - type tool_choice = OpenAI::Models::Responses::tool_choice_options | OpenAI::Models::Responses::ToolChoiceTypes diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs new file mode 100644 index 00000000..582ad254 --- /dev/null +++ b/sig/openai/models/responses_model.rbs @@ -0,0 +1,27 @@ +module OpenAI + module Models + type responses_model = + String + | OpenAI::Models::chat_model + | OpenAI::Models::ResponsesModel::union_member2 + + class ResponsesModel < OpenAI::Union + type union_member2 = + :"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11" + + class UnionMember2 < OpenAI::Enum + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" + + def self.values: -> ::Array[OpenAI::Models::ResponsesModel::union_member2] + end + + def self.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::ResponsesModel::union_member2] + end + end +end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 3e91571f..ba80da78 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, + model: OpenAI::Models::responses_model, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, @@ -26,7 +26,7 @@ module OpenAI def create_streaming: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::Responses::ResponseCreateParams::model, + model: OpenAI::Models::responses_model, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 6ecd70a3..1f0c3d9f 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::ResponsesTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.responses.create(input: "string", model: :"o3-mini", stream: true) + response = @openai.responses.create(input: "string", model: :"gpt-4o", stream: true) assert_pattern do response => OpenAI::Models::Responses::Response @@ -18,7 +18,7 @@ def test_create_required_params incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails | nil, instructions: String | nil, metadata: ^(OpenAI::HashOf[String]) | nil, - model: OpenAI::Models::Responses::Response::Model, + model: OpenAI::Models::ResponsesModel, object: Symbol, output: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), parallel_tool_calls: OpenAI::BooleanModel, @@ -53,7 +53,7 @@ def test_retrieve incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails | nil, instructions: String | nil, metadata: ^(OpenAI::HashOf[String]) | nil, - model: OpenAI::Models::Responses::Response::Model, + model: OpenAI::Models::ResponsesModel, object: Symbol, output: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), parallel_tool_calls: OpenAI::BooleanModel, From 3ba7280be6c70d913285adbe537bcda664880671 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 23:16:20 +0000 Subject: [PATCH 039/295] feat!: support `for item in stream` style iteration on `Stream`s (#44) --- README.md | 2 +- lib/openai/base_client.rb | 2 +- lib/openai/base_stream.rb | 15 +++--- lib/openai/resources/beta/threads.rb | 9 +++- lib/openai/resources/beta/threads/runs.rb | 18 ++++++- lib/openai/resources/chat/completions.rb | 9 +++- lib/openai/resources/completions.rb | 9 +++- lib/openai/resources/responses.rb | 9 +++- lib/openai/stream.rb | 7 ++- rbi/lib/openai/base_stream.rbi | 10 ++-- rbi/lib/openai/resources/beta/threads.rbi | 8 +++ .../openai/resources/beta/threads/runs.rbi | 18 +++++++ rbi/lib/openai/resources/chat/completions.rbi | 8 +++ rbi/lib/openai/resources/completions.rbi | 8 +++ rbi/lib/openai/resources/responses.rbi | 8 +++ sig/openai/base_stream.rbs | 8 +-- test/openai/client_test.rb | 50 +++---------------- .../resources/beta/threads/runs_test.rb | 9 +--- test/openai/resources/beta/threads_test.rb | 2 +- .../openai/resources/chat/completions_test.rb | 6 +-- test/openai/resources/completions_test.rb | 3 +- test/openai/resources/responses_test.rb | 2 +- 22 files changed, 135 insertions(+), 85 deletions(-) diff --git a/README.md b/README.md index 1b269d07..ff557c2d 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,7 @@ stream = openai.chat.completions.create_streaming( model: "gpt-4o" ) -stream.for_each do |completion| +stream.each do |completion| puts(completion) end ``` diff --git a/lib/openai/base_client.rb b/lib/openai/base_client.rb index fbbda8f4..4178f663 100644 --- a/lib/openai/base_client.rb +++ b/lib/openai/base_client.rb @@ -437,7 +437,7 @@ def request(req) decoded = OpenAI::Util.decode_content(response, stream: stream) case req in { stream: Class => st } - st.new(model: model, url: url, status: status, response: response, messages: decoded) + st.new(model: model, url: url, status: status, response: response, stream: decoded) in { page: Class => page } page.new(client: self, req: req, headers: response, page_data: decoded) else diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index 77e0c71a..a4dbe0bc 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -3,7 +3,7 @@ module OpenAI # @example # ```ruby - # stream.for_each do |chunk| + # stream.each do |chunk| # puts(chunk) # end # ``` @@ -12,7 +12,6 @@ module OpenAI # ```ruby # chunks = # stream - # .to_enum # .lazy # .select { _1.object_id.even? } # .map(&:itself) @@ -22,6 +21,8 @@ module OpenAI # chunks => Array # ``` module BaseStream + include Enumerable + # @return [void] def close = OpenAI::Util.close_fused!(@iterator) @@ -33,14 +34,14 @@ def close = OpenAI::Util.close_fused!(@iterator) # @param blk [Proc] # # @return [void] - def for_each(&) + def each(&) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") end @iterator.each(&) end - # @return [Enumerable] + # @return [Enumerator] def to_enum = @iterator alias_method :enum_for, :to_enum @@ -51,13 +52,13 @@ def to_enum = @iterator # @param url [URI::Generic] # @param status [Integer] # @param response [Net::HTTPResponse] - # @param messages [Enumerable] - def initialize(model:, url:, status:, response:, messages:) + # @param stream [Enumerable] + def initialize(model:, url:, status:, response:, stream:) @model = model @url = url @status = status @response = response - @messages = messages + @stream = stream @iterator = iterator end end diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index d8f2e660..27b67c15 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -208,7 +208,10 @@ def delete(thread_id, params = {}) # @return [OpenAI::Models::Beta::Threads::Run] def create_and_run(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) - parsed.delete(:stream) + if parsed[:stream] + message = "Please use `#create_and_run_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "threads/runs", @@ -315,6 +318,10 @@ def create_and_run(params) # @return [OpenAI::Stream] def create_and_run_streaming(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#create_and_run` for the non-streaming use case." + raise ArgumentError.new(message) + end parsed.store(:stream, true) @client.request( method: :post, diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 9f5a4401..fa61373d 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -125,7 +125,10 @@ class Runs # @return [OpenAI::Models::Beta::Threads::Run] def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) - parsed.delete(:stream) + if parsed[:stream] + message = "Please use `#create_streaming` for the streaming use case." + raise ArgumentError.new(message) + end query_params = [:include] @client.request( method: :post, @@ -254,6 +257,10 @@ def create(thread_id, params) # @return [OpenAI::Stream] def create_streaming(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#create` for the non-streaming use case." + raise ArgumentError.new(message) + end parsed.store(:stream, true) query_params = [:include] @client.request( @@ -410,7 +417,10 @@ def cancel(run_id, params) # @return [OpenAI::Models::Beta::Threads::Run] def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) - parsed.delete(:stream) + if parsed[:stream] + message = "Please use `#submit_tool_outputs_streaming` for the streaming use case." + raise ArgumentError.new(message) + end thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -444,6 +454,10 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Stream] def submit_tool_outputs_streaming(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#submit_tool_outputs` for the non-streaming use case." + raise ArgumentError.new(message) + end parsed.store(:stream, true) thread_id = parsed.delete(:thread_id) do diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 94e76ede..52e05866 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -215,7 +215,10 @@ class Completions # @return [OpenAI::Models::Chat::ChatCompletion] def create(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) - parsed.delete(:stream) + if parsed[:stream] + message = "Please use `#create_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "chat/completions", @@ -433,6 +436,10 @@ def create(params) # @return [OpenAI::Stream] def create_streaming(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#create` for the non-streaming use case." + raise ArgumentError.new(message) + end parsed.store(:stream, true) @client.request( method: :post, diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 79cc6805..2cf80ec1 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -115,7 +115,10 @@ class Completions # @return [OpenAI::Models::Completion] def create(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) - parsed.delete(:stream) + if parsed[:stream] + message = "Please use `#create_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "completions", @@ -237,6 +240,10 @@ def create(params) # @return [OpenAI::Stream] def create_streaming(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#create` for the non-streaming use case." + raise ArgumentError.new(message) + end parsed.store(:stream, true) @client.request( method: :post, diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index b09ce757..738b3a39 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -129,7 +129,10 @@ class Responses # @return [OpenAI::Models::Responses::Response] def create(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) - parsed.delete(:stream) + if parsed[:stream] + message = "Please use `#create_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "responses", @@ -262,6 +265,10 @@ def create(params) # @return [OpenAI::Stream] def create_streaming(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#create` for the non-streaming use case." + raise ArgumentError.new(message) + end parsed.store(:stream, true) @client.request( method: :post, diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index 8e218404..e510a11d 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -3,7 +3,7 @@ module OpenAI # @example # ```ruby - # stream.for_each do |event| + # stream.each do |event| # puts(event) # end # ``` @@ -12,7 +12,6 @@ module OpenAI # ```ruby # events = # stream - # .to_enum # .lazy # .select { _1.object_id.even? } # .map(&:itself) @@ -29,10 +28,10 @@ class Stream # @return [Enumerable] private def iterator # rubocop:disable Metrics/BlockLength - @iterator ||= OpenAI::Util.chain_fused(@messages) do |y| + @iterator ||= OpenAI::Util.chain_fused(@stream) do |y| consume = false - @messages.each do |msg| + @stream.each do |msg| next if consume case msg diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi index c5f6c58e..9b1654a6 100644 --- a/rbi/lib/openai/base_stream.rbi +++ b/rbi/lib/openai/base_stream.rbi @@ -2,6 +2,8 @@ module OpenAI module BaseStream + include Enumerable + Message = type_member(:in) Elem = type_member(:out) @@ -15,10 +17,10 @@ module OpenAI end sig { params(blk: T.proc.params(arg0: Elem).void).void } - def for_each(&blk) + def each(&blk) end - sig { returns(T::Enumerable[Elem]) } + sig { returns(T::Enumerator[Elem]) } def to_enum end @@ -31,11 +33,11 @@ module OpenAI url: URI::Generic, status: Integer, response: Net::HTTPResponse, - messages: T::Enumerable[Message] + stream: T::Enumerable[Message] ) .void end - def initialize(model:, url:, status:, response:, messages:) + def initialize(model:, url:, status:, response:, stream:) end end end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 2170549d..aa843bbe 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -134,6 +134,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Run) @@ -227,6 +228,9 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, + # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` + # or `#create_and_run` for streaming and non-streaming use cases, respectively. + stream: false, request_options: {} ) end @@ -264,6 +268,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( @@ -386,6 +391,9 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, + # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` + # or `#create_and_run` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index c2a318dd..d4b1a449 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -45,6 +45,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Run) @@ -158,6 +159,9 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: false, request_options: {} ) end @@ -198,6 +202,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( @@ -340,6 +345,9 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end @@ -452,6 +460,7 @@ module OpenAI run_id: String, thread_id: String, tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Run) @@ -465,6 +474,10 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, + # There is no need to provide `stream:`. Instead, use + # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and + # non-streaming use cases, respectively. + stream: false, request_options: {} ) end @@ -478,6 +491,7 @@ module OpenAI run_id: String, thread_id: String, tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( @@ -520,6 +534,10 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, + # There is no need to provide `stream:`. Instead, use + # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and + # non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 7f6a851c..7358d205 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -70,6 +70,7 @@ module OpenAI top_p: T.nilable(Float), user: String, web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Chat::ChatCompletion) @@ -258,6 +259,9 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: false, request_options: {} ) end @@ -324,6 +328,7 @@ module OpenAI top_p: T.nilable(Float), user: String, web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) @@ -512,6 +517,9 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 949f94ed..8e9e52d4 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -30,6 +30,7 @@ module OpenAI temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Completion) @@ -138,6 +139,9 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: false, request_options: {} ) end @@ -169,6 +173,7 @@ module OpenAI temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Stream[OpenAI::Models::Completion]) @@ -277,6 +282,9 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 823cc8a1..c285a853 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -44,6 +44,7 @@ module OpenAI top_p: T.nilable(Float), truncation: T.nilable(Symbol), user: String, + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Responses::Response) @@ -153,6 +154,9 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: false, request_options: {} ) end @@ -194,6 +198,7 @@ module OpenAI top_p: T.nilable(Float), truncation: T.nilable(Symbol), user: String, + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( @@ -340,6 +345,9 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/sig/openai/base_stream.rbs b/sig/openai/base_stream.rbs index e5d9ec89..caa21732 100644 --- a/sig/openai/base_stream.rbs +++ b/sig/openai/base_stream.rbs @@ -1,12 +1,14 @@ module OpenAI module BaseStream[Message, Elem] + include Enumerable[Elem] + def close: -> void private def iterator: -> Enumerable[Elem] - def for_each: { (Elem arg0) -> void } -> void + def each: { (Elem arg0) -> void } -> void - def to_enum: -> Enumerable[Elem] + def to_enum: -> Enumerator[Elem] alias enum_for to_enum @@ -15,7 +17,7 @@ module OpenAI url: URI::Generic, status: Integer, response: top, - messages: Enumerable[Message] + stream: Enumerable[Message] ) -> void end end diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 9a3f400f..cbb0c21d 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -56,11 +56,7 @@ def test_client_default_request_default_retry_attempts openai.requester = requester assert_raises(OpenAI::InternalServerError) do - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end assert_equal(3, requester.attempts.length) @@ -72,11 +68,7 @@ def test_client_given_request_default_retry_attempts openai.requester = requester assert_raises(OpenAI::InternalServerError) do - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end assert_equal(4, requester.attempts.length) @@ -91,7 +83,6 @@ def test_client_default_request_given_retry_attempts openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {max_retries: 3} ) end @@ -108,7 +99,6 @@ def test_client_given_request_given_retry_attempts openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {max_retries: 4} ) end @@ -122,11 +112,7 @@ def test_client_retry_after_seconds openai.requester = requester assert_raises(OpenAI::InternalServerError) do - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end assert_equal(2, requester.attempts.length) @@ -140,11 +126,7 @@ def test_client_retry_after_date assert_raises(OpenAI::InternalServerError) do Thread.current.thread_variable_set(:time_now, Time.now) - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") Thread.current.thread_variable_set(:time_now, nil) end @@ -158,11 +140,7 @@ def test_client_retry_after_ms openai.requester = requester assert_raises(OpenAI::InternalServerError) do - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end assert_equal(2, requester.attempts.length) @@ -175,11 +153,7 @@ def test_retry_count_header openai.requester = requester assert_raises(OpenAI::InternalServerError) do - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end retry_count_headers = requester.attempts.map { _1[:headers]["x-stainless-retry-count"] } @@ -195,7 +169,6 @@ def test_omit_retry_count_header openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {extra_headers: {"x-stainless-retry-count" => nil}} ) end @@ -213,7 +186,6 @@ def test_overwrite_retry_count_header openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {extra_headers: {"x-stainless-retry-count" => "42"}} ) end @@ -231,7 +203,6 @@ def test_client_redirect_307 openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {extra_headers: {}} ) end @@ -254,7 +225,6 @@ def test_client_redirect_303 openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {extra_headers: {}} ) end @@ -274,7 +244,6 @@ def test_client_redirect_auth_keep_same_origin openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {extra_headers: {"Authorization" => "Bearer xyz"}} ) end @@ -294,7 +263,6 @@ def test_client_redirect_auth_strip_cross_origin openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - stream: true, request_options: {extra_headers: {"Authorization" => "Bearer xyz"}} ) end @@ -306,11 +274,7 @@ def test_default_headers openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") requester = MockRequester.new(200, {}, {}) openai.requester = requester - openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") headers = requester.attempts.first[:headers] refute_empty(headers["accept"]) diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 7c074c33..9341955e 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Beta::Threads::RunsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.beta.threads.runs.create("thread_id", assistant_id: "assistant_id", stream: true) + response = @openai.beta.threads.runs.create("thread_id", assistant_id: "assistant_id") assert_pattern do response => OpenAI::Models::Beta::Threads::Run @@ -210,12 +210,7 @@ def test_cancel_required_params def test_submit_tool_outputs_required_params response = - @openai.beta.threads.runs.submit_tool_outputs( - "run_id", - thread_id: "thread_id", - stream: true, - tool_outputs: [{}] - ) + @openai.beta.threads.runs.submit_tool_outputs("run_id", thread_id: "thread_id", tool_outputs: [{}]) assert_pattern do response => OpenAI::Models::Beta::Threads::Run diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index 4203a6f3..f7f1021f 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -74,7 +74,7 @@ def test_delete end def test_create_and_run_required_params - response = @openai.beta.threads.create_and_run(assistant_id: "assistant_id", stream: true) + response = @openai.beta.threads.create_and_run(assistant_id: "assistant_id") assert_pattern do response => OpenAI::Models::Beta::Threads::Run diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index 0d861145..ffbd392e 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -5,11 +5,7 @@ class OpenAI::Test::Resources::Chat::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params response = - @openai.chat.completions.create( - messages: [{content: "string", role: :developer}], - model: :"o3-mini", - stream: true - ) + @openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") assert_pattern do response => OpenAI::Models::Chat::ChatCompletion diff --git a/test/openai/resources/completions_test.rb b/test/openai/resources/completions_test.rb index b402a581..f7d94ab8 100644 --- a/test/openai/resources/completions_test.rb +++ b/test/openai/resources/completions_test.rb @@ -4,8 +4,7 @@ class OpenAI::Test::Resources::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = - @openai.completions.create(model: :"gpt-3.5-turbo-instruct", prompt: "This is a test.", stream: true) + response = @openai.completions.create(model: :"gpt-3.5-turbo-instruct", prompt: "This is a test.") assert_pattern do response => OpenAI::Models::Completion diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 1f0c3d9f..d0e0157e 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::ResponsesTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.responses.create(input: "string", model: :"gpt-4o", stream: true) + response = @openai.responses.create(input: "string", model: :"gpt-4o") assert_pattern do response => OpenAI::Models::Responses::Response From 6f5f74ea286f0e113d40a8137d3da8cc09a625b2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 14:19:42 +0000 Subject: [PATCH 040/295] fix: resolve tapioca derived sorbet errors (#45) --- lib/openai/base_model.rb | 38 +++-- rbi/lib/openai/base_model.rbi | 20 ++- rbi/lib/openai/base_page.rbi | 2 - rbi/lib/openai/base_stream.rbi | 2 - rbi/lib/openai/models/batch.rbi | 8 +- rbi/lib/openai/models/batch_create_params.rbi | 8 +- rbi/lib/openai/models/beta/assistant.rbi | 8 +- .../models/beta/assistant_create_params.rbi | 18 +-- .../models/beta/assistant_update_params.rbi | 8 +- rbi/lib/openai/models/beta/thread.rbi | 8 +- .../beta/thread_create_and_run_params.rbi | 44 +++--- .../models/beta/thread_create_params.rbi | 36 ++--- .../models/beta/thread_update_params.rbi | 8 +- .../openai/models/beta/threads/message.rbi | 8 +- .../beta/threads/message_create_params.rbi | 18 +-- .../beta/threads/message_update_params.rbi | 12 +- rbi/lib/openai/models/beta/threads/run.rbi | 8 +- .../models/beta/threads/run_create_params.rbi | 26 ++-- .../models/beta/threads/run_update_params.rbi | 12 +- .../models/beta/threads/runs/run_step.rbi | 8 +- ...hat_completion_assistant_message_param.rbi | 12 +- ...hat_completion_developer_message_param.rbi | 2 +- .../chat_completion_prediction_content.rbi | 2 +- .../chat_completion_system_message_param.rbi | 2 +- .../chat_completion_tool_message_param.rbi | 2 +- .../chat_completion_user_message_param.rbi | 11 +- .../models/chat/completion_create_params.rbi | 20 +-- .../models/chat/completion_list_params.rbi | 8 +- .../models/chat/completion_update_params.rbi | 15 +- .../models/completion_create_params.rbi | 8 +- .../openai/models/embedding_create_params.rbi | 6 +- .../models/fine_tuning/fine_tuning_job.rbi | 8 +- .../models/fine_tuning/job_create_params.rbi | 8 +- rbi/lib/openai/models/function_definition.rbi | 13 +- rbi/lib/openai/models/function_parameters.rbi | 2 +- rbi/lib/openai/models/metadata.rbi | 2 +- .../models/moderation_create_params.rbi | 4 +- .../models/responses/easy_input_message.rbi | 82 ++++++++++- rbi/lib/openai/models/responses/response.rbi | 8 +- .../responses/response_create_params.rbi | 135 ++++++++++++++++-- .../models/responses/response_input.rbi | 18 +-- .../models/responses/response_input_item.rbi | 53 ++++++- .../response_input_message_content_list.rbi | 10 +- .../responses/response_input_message_item.rbi | 48 ++++++- rbi/lib/openai/models/vector_store.rbi | 8 +- .../models/vector_store_create_params.rbi | 8 +- .../models/vector_store_search_params.rbi | 2 +- .../models/vector_store_update_params.rbi | 8 +- rbi/lib/openai/resources/batches.rbi | 2 +- rbi/lib/openai/resources/beta/assistants.rbi | 4 +- rbi/lib/openai/resources/beta/threads.rbi | 8 +- .../resources/beta/threads/messages.rbi | 4 +- .../openai/resources/beta/threads/runs.rbi | 6 +- rbi/lib/openai/resources/chat/completions.rbi | 8 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 2 +- rbi/lib/openai/resources/responses.rbi | 42 +++++- rbi/lib/openai/resources/vector_stores.rbi | 4 +- sig/openai/base_model.rbs | 8 +- 58 files changed, 582 insertions(+), 311 deletions(-) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 5c1a2499..5a63f4fa 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -182,8 +182,6 @@ class Unknown # rubocop:disable Lint/UnusedMethodArgument - private_class_method :new - # @param other [Object] # # @return [Boolean] @@ -233,8 +231,6 @@ def try_strict_coerce(value) class BooleanModel extend OpenAI::Converter - private_class_method :new - # @param other [Object] # # @return [Boolean] @@ -336,8 +332,6 @@ def values = (@values ||= constants.map { const_get(_1) }) private def finalize! = values end - private_class_method :new - # @param other [Object] # # @return [Boolean] @@ -517,8 +511,6 @@ def variants # rubocop:disable Style/HashEachMethods # rubocop:disable Style/CaseEquality - private_class_method :new - # @param other [Object] # # @return [Boolean] @@ -629,9 +621,18 @@ def try_strict_coerce(value) class ArrayOf include OpenAI::Converter - private_class_method :new - - def self.[](...) = new(...) + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def self.[](type_info, spec = {}) = new(type_info, spec) # @param other [Object] # @@ -752,9 +753,18 @@ def initialize(type_info, spec = {}) class HashOf include OpenAI::Converter - private_class_method :new - - def self.[](...) = new(...) + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def self.[](type_info, spec = {}) = new(type_info, spec) # @param other [Object] # diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 4b5b673d..3312ef84 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -308,8 +308,6 @@ module OpenAI abstract! final! - Elem = type_member(:out) - sig(:final) do params( type_info: T.any( @@ -360,7 +358,7 @@ module OpenAI end # @api private - sig(:final) { returns(Elem) } + sig(:final) { returns(T.anything) } protected def item_type end @@ -389,8 +387,6 @@ module OpenAI abstract! final! - Elem = type_member(:out) - sig(:final) do params( type_info: T.any( @@ -441,7 +437,7 @@ module OpenAI end # @api private - sig(:final) { returns(Elem) } + sig(:final) { returns(T.anything) } protected def item_type end @@ -628,7 +624,17 @@ module OpenAI def to_h end - alias_method :to_hash, :to_h + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + sig { overridable.returns(T::Hash[Symbol, T.anything]) } + def to_hash + end sig { params(keys: T.nilable(T::Array[Symbol])).returns(T::Hash[Symbol, T.anything]) } def deconstruct_keys(keys) diff --git a/rbi/lib/openai/base_page.rbi b/rbi/lib/openai/base_page.rbi index c5dc2a2d..6c91ffcc 100644 --- a/rbi/lib/openai/base_page.rbi +++ b/rbi/lib/openai/base_page.rbi @@ -20,8 +20,6 @@ module OpenAI def to_enum end - alias_method :enum_for, :to_enum - # @api private sig do params( diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi index 9b1654a6..dee8cc83 100644 --- a/rbi/lib/openai/base_stream.rbi +++ b/rbi/lib/openai/base_stream.rbi @@ -24,8 +24,6 @@ module OpenAI def to_enum end - alias_method :enum_for, :to_enum - # @api private sig do params( diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 699d0782..7a7e54b3 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -160,11 +160,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -204,7 +204,7 @@ module OpenAI failed_at: Integer, finalizing_at: Integer, in_progress_at: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), output_file_id: String, request_counts: OpenAI::Models::BatchRequestCounts, object: Symbol @@ -256,7 +256,7 @@ module OpenAI failed_at: Integer, finalizing_at: Integer, in_progress_at: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), output_file_id: String, request_counts: OpenAI::Models::BatchRequestCounts } diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index c97a8484..50216257 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -51,11 +51,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -64,7 +64,7 @@ module OpenAI completion_window: Symbol, endpoint: Symbol, input_file_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -79,7 +79,7 @@ module OpenAI completion_window: Symbol, endpoint: Symbol, input_file_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index c6f6f83f..3a393e39 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -47,11 +47,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -232,7 +232,7 @@ module OpenAI created_at: Integer, description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: T.nilable(String), tools: T::Array[ @@ -282,7 +282,7 @@ module OpenAI created_at: Integer, description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: T.nilable(String), object: Symbol, diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 9a62347b..32cf935a 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -45,11 +45,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -222,7 +222,7 @@ module OpenAI model: T.any(String, Symbol), description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), reasoning_effort: T.nilable(Symbol), response_format: T.nilable( @@ -270,7 +270,7 @@ module OpenAI model: T.any(String, Symbol), description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), reasoning_effort: T.nilable(Symbol), response_format: T.nilable( @@ -483,11 +483,13 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig do + params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) + end def metadata=(_) end @@ -498,7 +500,7 @@ module OpenAI OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end @@ -514,7 +516,7 @@ module OpenAI OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) } ) end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 69e28e75..b4ca656b 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -32,11 +32,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -221,7 +221,7 @@ module OpenAI params( description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, Symbol), name: T.nilable(String), reasoning_effort: T.nilable(Symbol), @@ -269,7 +269,7 @@ module OpenAI { description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, Symbol), name: T.nilable(String), reasoning_effort: T.nilable(Symbol), diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 799f589a..3a2ea58e 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -28,11 +28,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -66,7 +66,7 @@ module OpenAI params( id: String, created_at: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::Thread::ToolResources), object: Symbol ) @@ -81,7 +81,7 @@ module OpenAI { id: String, created_at: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, tool_resources: T.nilable(OpenAI::Models::Beta::Thread::ToolResources) } diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 0117301e..3a5a42f8 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -60,11 +60,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -285,7 +285,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -343,7 +343,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -406,11 +406,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -434,7 +434,7 @@ module OpenAI sig do params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources) ) .returns(T.attached_class) @@ -447,7 +447,7 @@ module OpenAI .returns( { messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources) } ) @@ -537,11 +537,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -559,7 +559,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end @@ -582,7 +582,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) } ) end @@ -610,15 +610,7 @@ module OpenAI end MessageContentPartParamArray = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: @@ -930,11 +922,13 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig do + params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) + end def metadata=(_) end @@ -945,7 +939,7 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end @@ -961,7 +955,7 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) } ) end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 6fdb5196..77085982 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -26,11 +26,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -52,7 +52,7 @@ module OpenAI sig do params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -66,7 +66,7 @@ module OpenAI .returns( { messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), request_options: OpenAI::RequestOptions } @@ -155,11 +155,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -177,7 +177,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end @@ -200,7 +200,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) } ) end @@ -228,15 +228,7 @@ module OpenAI end MessageContentPartParamArray = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: @@ -542,11 +534,13 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig do + params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) + end def metadata=(_) end @@ -557,7 +551,7 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end @@ -573,7 +567,7 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) } ) end diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 2748a8cc..2a3c0bc9 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -13,11 +13,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -38,7 +38,7 @@ module OpenAI sig do params( - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -51,7 +51,7 @@ module OpenAI override .returns( { - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), request_options: OpenAI::RequestOptions } diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 4316b56f..01311c44 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -123,11 +123,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -199,7 +199,7 @@ module OpenAI created_at: Integer, incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), role: Symbol, run_id: T.nilable(String), status: Symbol, @@ -245,7 +245,7 @@ module OpenAI created_at: Integer, incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, role: Symbol, run_id: T.nilable(String), diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 0c691f82..6d8913ff 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -87,11 +87,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -109,7 +109,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -133,7 +133,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions } ) @@ -162,15 +162,7 @@ module OpenAI end MessageContentPartParamArray = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index f2d3e73a..26ec576d 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -22,18 +22,18 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end sig do params( thread_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -44,7 +44,11 @@ module OpenAI sig do override .returns( - {thread_id: String, metadata: T.nilable(OpenAI::Models::Metadata), request_options: OpenAI::RequestOptions} + { + thread_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } ) end def to_hash diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 94002e57..0ea43a0c 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -132,11 +132,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -392,7 +392,7 @@ module OpenAI last_error: T.nilable(OpenAI::Models::Beta::Threads::Run::LastError), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, parallel_tool_calls: T::Boolean, required_action: T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction), @@ -470,7 +470,7 @@ module OpenAI last_error: T.nilable(OpenAI::Models::Beta::Threads::Run::LastError), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, object: Symbol, parallel_tool_calls: T::Boolean, diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 0732a2a0..a54ec011 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -100,11 +100,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -314,7 +314,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(Symbol), @@ -376,7 +376,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(Symbol), @@ -496,11 +496,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -518,7 +518,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end @@ -541,7 +541,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata) + metadata: T.nilable(T::Hash[Symbol, String]) } ) end @@ -569,15 +569,7 @@ module OpenAI end MessageContentPartParamArray = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index d01aa52d..732bd936 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -22,18 +22,18 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end sig do params( thread_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -44,7 +44,11 @@ module OpenAI sig do override .returns( - {thread_id: String, metadata: T.nilable(OpenAI::Models::Metadata), request_options: OpenAI::RequestOptions} + { + thread_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } ) end def to_hash diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 2b1d5ced..d6fc02be 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -91,11 +91,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -200,7 +200,7 @@ module OpenAI expired_at: T.nilable(Integer), failed_at: T.nilable(Integer), last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, status: Symbol, step_details: T.any( @@ -246,7 +246,7 @@ module OpenAI expired_at: T.nilable(Integer), failed_at: T.nilable(Integer), last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, run_id: String, status: Symbol, diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 60807405..bdba736c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -216,14 +216,10 @@ module OpenAI end ArrayOfContentPartArray = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - end + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], + OpenAI::Converter + ) # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 33c1f24d..7c1f2e46 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -70,7 +70,7 @@ module OpenAI type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } ChatCompletionContentPartTextArray = - T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 50f8e2fd..206b2990 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -59,7 +59,7 @@ module OpenAI type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } ChatCompletionContentPartTextArray = - T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 49524af6..9d79c62a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -70,7 +70,7 @@ module OpenAI type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } ChatCompletionContentPartTextArray = - T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 485b4b8c..7c447076 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -66,7 +66,7 @@ module OpenAI type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } ChatCompletionContentPartTextArray = - T.type_alias { T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] } + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index f97c4e46..53c7c3a7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -141,16 +141,7 @@ module OpenAI end ChatCompletionContentPartArray = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 1a0003e1..f0e77033 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -195,11 +195,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -517,7 +517,7 @@ module OpenAI logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, @@ -603,7 +603,7 @@ module OpenAI logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, @@ -706,22 +706,24 @@ module OpenAI # documentation about the format. # # Omitting `parameters` defines a function with an empty parameter list. - sig { returns(T.nilable(OpenAI::Models::FunctionParameters)) } + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } def parameters end - sig { params(_: OpenAI::Models::FunctionParameters).returns(OpenAI::Models::FunctionParameters) } + sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } def parameters=(_) end sig do - params(name: String, description: String, parameters: OpenAI::Models::FunctionParameters) + params(name: String, description: String, parameters: T::Hash[Symbol, T.anything]) .returns(T.attached_class) end def self.new(name:, description: nil, parameters: nil) end - sig { override.returns({name: String, description: String, parameters: OpenAI::Models::FunctionParameters}) } + sig do + override.returns({name: String, description: String, parameters: T::Hash[Symbol, T.anything]}) + end def to_hash end end @@ -790,7 +792,7 @@ module OpenAI Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } - StringArray = T.type_alias { T::Array[String] } + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) end class WebSearchOptions < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 45a53c74..d4ce52ba 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -28,11 +28,11 @@ module OpenAI # A list of metadata keys to filter the Chat Completions by. Example: # # `metadata[key1]=value1&metadata[key2]=value2` - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -59,7 +59,7 @@ module OpenAI params( after: String, limit: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -75,7 +75,7 @@ module OpenAI { after: String, limit: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: Symbol, request_options: OpenAI::RequestOptions diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index 0b3aa56f..64b6d477 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -13,17 +13,17 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end sig do params( - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -31,7 +31,14 @@ module OpenAI def self.new(metadata:, request_options: {}) end - sig { override.returns({metadata: T.nilable(OpenAI::Models::Metadata), request_options: OpenAI::RequestOptions}) } + sig do + override.returns( + { + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end def to_hash end end diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 8350f105..eb7a6ceb 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -361,11 +361,11 @@ module OpenAI {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} end - StringArray = T.type_alias { T::Array[String] } + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - IntegerArray = T.type_alias { T::Array[Integer] } + IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) - ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } + ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) end # Up to 4 sequences where the API will stop generating further tokens. The @@ -375,7 +375,7 @@ module OpenAI Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } - StringArray = T.type_alias { T::Array[String] } + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index c7966b7d..3f30f067 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -115,11 +115,11 @@ module OpenAI {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} end - StringArray = T.type_alias { T::Array[String] } + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - IntegerArray = T.type_alias { T::Array[Integer] } + IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) - ArrayOfToken2DArray = T.type_alias { T::Array[T::Array[Integer]] } + ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) end # ID of the model to use. You can use the diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 20dd1a07..b4326f0b 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -184,11 +184,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -224,7 +224,7 @@ module OpenAI validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), method_: OpenAI::Models::FineTuning::FineTuningJob::Method, object: Symbol ) @@ -274,7 +274,7 @@ module OpenAI validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), method_: OpenAI::Models::FineTuning::FineTuningJob::Method } ) diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 3081b9f0..29a4f7bf 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -73,11 +73,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -143,7 +143,7 @@ module OpenAI training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), method_: OpenAI::Models::FineTuning::JobCreateParams::Method, seed: T.nilable(Integer), suffix: T.nilable(String), @@ -174,7 +174,7 @@ module OpenAI training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), method_: OpenAI::Models::FineTuning::JobCreateParams::Method, seed: T.nilable(Integer), suffix: T.nilable(String), diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index 75a4000f..542b14aa 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -30,11 +30,11 @@ module OpenAI # documentation about the format. # # Omitting `parameters` defines a function with an empty parameter list. - sig { returns(T.nilable(OpenAI::Models::FunctionParameters)) } + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } def parameters end - sig { params(_: OpenAI::Models::FunctionParameters).returns(OpenAI::Models::FunctionParameters) } + sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } def parameters=(_) end @@ -55,7 +55,7 @@ module OpenAI params( name: String, description: String, - parameters: OpenAI::Models::FunctionParameters, + parameters: T::Hash[Symbol, T.anything], strict: T.nilable(T::Boolean) ) .returns(T.attached_class) @@ -66,7 +66,12 @@ module OpenAI sig do override .returns( - {name: String, description: String, parameters: OpenAI::Models::FunctionParameters, strict: T.nilable(T::Boolean)} + { + name: String, + description: String, + parameters: T::Hash[Symbol, T.anything], + strict: T.nilable(T::Boolean) + } ) end def to_hash diff --git a/rbi/lib/openai/models/function_parameters.rbi b/rbi/lib/openai/models/function_parameters.rbi index 4a46a5a2..7f79a305 100644 --- a/rbi/lib/openai/models/function_parameters.rbi +++ b/rbi/lib/openai/models/function_parameters.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - FunctionParameters = T.type_alias { T::Hash[Symbol, T.anything] } + FunctionParameters = T.let(OpenAI::HashOf[OpenAI::Unknown], OpenAI::Converter) end end diff --git a/rbi/lib/openai/models/metadata.rbi b/rbi/lib/openai/models/metadata.rbi index 634c341b..e09a2c38 100644 --- a/rbi/lib/openai/models/metadata.rbi +++ b/rbi/lib/openai/models/metadata.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - Metadata = T.type_alias { T.nilable(T::Hash[Symbol, String]) } + Metadata = T.let(OpenAI::HashOf[String], OpenAI::Converter) end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 6b9a1b5a..3a7a01a9 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -99,10 +99,10 @@ module OpenAI } end - StringArray = T.type_alias { T::Array[String] } + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) ModerationMultiModalInputArray = - T.type_alias { T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] } + T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Converter) end # The content moderation model you would like to use. Learn more in diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 23c15e32..5b3386f8 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -6,13 +6,48 @@ module OpenAI class EasyInputMessage < OpenAI::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - sig { returns(T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)) } + sig do + returns( + T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + ) + end def content end sig do - params(_: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)) - .returns(T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)) + params( + _: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + ) + .returns( + T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + ) end def content=(_) end @@ -43,7 +78,16 @@ module OpenAI # interactions. sig do params( - content: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList), + content: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ), role: Symbol, type: Symbol ) @@ -55,7 +99,20 @@ module OpenAI sig do override .returns( - {content: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList), role: Symbol, type: Symbol} + { + content: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ), + role: Symbol, + type: Symbol + } ) end def to_hash @@ -67,7 +124,20 @@ module OpenAI abstract! Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInputMessageContentList)} } + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + } + end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index d0e82f4e..29aba874 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -66,11 +66,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -362,7 +362,7 @@ module OpenAI error: T.nilable(OpenAI::Models::Responses::ResponseError), incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, Symbol), output: T::Array[ T.any( @@ -433,7 +433,7 @@ module OpenAI error: T.nilable(OpenAI::Models::Responses::ResponseError), incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, Symbol), object: Symbol, output: T::Array[ diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 21d6b940..dbd9ed7b 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -16,13 +16,72 @@ module OpenAI # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - sig { returns(T.any(String, OpenAI::Models::Responses::ResponseInput)) } + sig do + returns( + T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ) + ) + end def input end sig do - params(_: T.any(String, OpenAI::Models::Responses::ResponseInput)) - .returns(T.any(String, OpenAI::Models::Responses::ResponseInput)) + params( + _: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ) + ) + .returns( + T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ) + ) end def input=(_) end @@ -87,11 +146,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -285,12 +344,29 @@ module OpenAI sig do params( - input: T.any(String, OpenAI::Models::Responses::ResponseInput), + input: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ), model: T.any(String, Symbol), include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), @@ -339,12 +415,29 @@ module OpenAI override .returns( { - input: T.any(String, OpenAI::Models::Responses::ResponseInput), + input: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ), model: T.any(String, Symbol), include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), @@ -382,7 +475,29 @@ module OpenAI class Input < OpenAI::Union abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Responses::ResponseInput)} } + Variants = + type_template(:out) do + { + fixed: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ) + } + end end # How the model should select which tool (or tools) to use when generating a diff --git a/rbi/lib/openai/models/responses/response_input.rbi b/rbi/lib/openai/models/responses/response_input.rbi index 2bd4a97a..5b6d68df 100644 --- a/rbi/lib/openai/models/responses/response_input.rbi +++ b/rbi/lib/openai/models/responses/response_input.rbi @@ -4,23 +4,7 @@ module OpenAI module Models module Responses ResponseInput = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 66f24422..6ccd31a3 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -33,13 +33,39 @@ module OpenAI class Message < OpenAI::BaseModel # A list of one or many input items to the model, containing different content # types. - sig { returns(OpenAI::Models::Responses::ResponseInputMessageContentList) } + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + end def content end sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageContentList) - .returns(OpenAI::Models::Responses::ResponseInputMessageContentList) + params( + _: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) end def content=(_) end @@ -77,7 +103,13 @@ module OpenAI # precedence over instructions given with the `user` role. sig do params( - content: OpenAI::Models::Responses::ResponseInputMessageContentList, + content: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ], role: Symbol, status: Symbol, type: Symbol @@ -90,7 +122,18 @@ module OpenAI sig do override .returns( - {content: OpenAI::Models::Responses::ResponseInputMessageContentList, role: Symbol, status: Symbol, type: Symbol} + { + content: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ], + role: Symbol, + status: Symbol, + type: Symbol + } ) end def to_hash diff --git a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi index 80207c6a..14a913ae 100644 --- a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi @@ -4,15 +4,7 @@ module OpenAI module Models module Responses ResponseInputMessageContentList = - T.type_alias do - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - end + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 2070f09c..cabf1399 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -15,13 +15,39 @@ module OpenAI # A list of one or many input items to the model, containing different content # types. - sig { returns(OpenAI::Models::Responses::ResponseInputMessageContentList) } + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + end def content end sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageContentList) - .returns(OpenAI::Models::Responses::ResponseInputMessageContentList) + params( + _: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ) end def content=(_) end @@ -57,7 +83,13 @@ module OpenAI sig do params( id: String, - content: OpenAI::Models::Responses::ResponseInputMessageContentList, + content: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ], role: Symbol, status: Symbol, type: Symbol @@ -72,7 +104,13 @@ module OpenAI .returns( { id: String, - content: OpenAI::Models::Responses::ResponseInputMessageContentList, + content: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ], role: Symbol, status: Symbol, type: Symbol diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 642c27bf..e80d2488 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -44,11 +44,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -116,7 +116,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::Models::VectorStore::FileCounts, last_active_at: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: String, status: Symbol, usage_bytes: Integer, @@ -149,7 +149,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::Models::VectorStore::FileCounts, last_active_at: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, status: Symbol, diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 02ffb8d9..89042cab 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -67,11 +67,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -92,7 +92,7 @@ module OpenAI ), expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -111,7 +111,7 @@ module OpenAI ), expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: String, request_options: OpenAI::RequestOptions } diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 22b4b1e4..029b5b83 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -94,7 +94,7 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } - StringArray = T.type_alias { T::Array[String] } + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) end # A filter to apply based on file attributes. diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 88614cee..0dbd8974 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -24,11 +24,11 @@ module OpenAI # # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. - sig { returns(T.nilable(OpenAI::Models::Metadata)) } + sig { returns(T.nilable(T::Hash[Symbol, String])) } def metadata end - sig { params(_: T.nilable(OpenAI::Models::Metadata)).returns(T.nilable(OpenAI::Models::Metadata)) } + sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } def metadata=(_) end @@ -44,7 +44,7 @@ module OpenAI sig do params( expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -58,7 +58,7 @@ module OpenAI .returns( { expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions } diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index 7754b091..10691913 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -9,7 +9,7 @@ module OpenAI completion_window: Symbol, endpoint: Symbol, input_file_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Batch) diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index c9d62665..00d18547 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -10,7 +10,7 @@ module OpenAI model: T.any(String, Symbol), description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), reasoning_effort: T.nilable(Symbol), response_format: T.nilable( @@ -128,7 +128,7 @@ module OpenAI assistant_id: String, description: T.nilable(String), instructions: T.nilable(String), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, Symbol), name: T.nilable(String), reasoning_effort: T.nilable(Symbol), diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index aa843bbe..1479fd01 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -16,7 +16,7 @@ module OpenAI sig do params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -61,7 +61,7 @@ module OpenAI sig do params( thread_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -108,7 +108,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -242,7 +242,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index d417b9d0..3ec7bab4 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -21,7 +21,7 @@ module OpenAI ), role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Message) @@ -76,7 +76,7 @@ module OpenAI params( message_id: String, thread_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Message) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index d4b1a449..cd747220 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -20,7 +20,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(Symbol), @@ -177,7 +177,7 @@ module OpenAI instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(Symbol), @@ -376,7 +376,7 @@ module OpenAI params( run_id: String, thread_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Run) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 7358d205..fff5ad69 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -46,7 +46,7 @@ module OpenAI logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, @@ -304,7 +304,7 @@ module OpenAI logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, @@ -546,7 +546,7 @@ module OpenAI sig do params( completion_id: String, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Chat::ChatCompletion) @@ -571,7 +571,7 @@ module OpenAI params( after: String, limit: Integer, - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index e1709e2c..b3f7cc15 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -21,7 +21,7 @@ module OpenAI training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), method_: OpenAI::Models::FineTuning::JobCreateParams::Method, seed: T.nilable(Integer), suffix: T.nilable(String), diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index c285a853..64ab5c27 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -20,12 +20,29 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: T.any(String, OpenAI::Models::Responses::ResponseInput), + input: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ), model: T.any(String, Symbol), include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), @@ -174,12 +191,29 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: T.any(String, OpenAI::Models::Responses::ResponseInput), + input: T.any( + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ), model: T.any(String, Symbol), include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 32aecdbf..86418315 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -20,7 +20,7 @@ module OpenAI ), expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, file_ids: T::Array[String], - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -69,7 +69,7 @@ module OpenAI params( vector_store_id: String, expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), - metadata: T.nilable(OpenAI::Models::Metadata), + metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index 581a545f..ba62241d 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -118,7 +118,7 @@ module OpenAI ) -> ([true, top, nil] | [false, bool, Integer]) end - class ArrayOf[Elem] + class ArrayOf include OpenAI::Converter def self.[]: ( @@ -140,7 +140,7 @@ module OpenAI top value ) -> ([true, top, nil] | [false, bool, Integer]) - def item_type: -> Elem + def item_type: -> top def initialize: ( ::Hash[Symbol, top] @@ -150,7 +150,7 @@ module OpenAI ) -> void end - class HashOf[Elem] + class HashOf include OpenAI::Converter def self.[]: ( @@ -172,7 +172,7 @@ module OpenAI top value ) -> ([true, top, nil] | [false, bool, Integer]) - def item_type: -> Elem + def item_type: -> top def initialize: ( ::Hash[Symbol, top] From 1048d1950052a755cdb148e93dc62a44b709c706 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 16:09:05 +0000 Subject: [PATCH 041/295] feat(api): new models for TTS, STT, + new audio features for Realtime (#46) --- .stats.yml | 2 +- lib/openai.rb | 4 + .../models/audio/speech_create_params.rb | 20 +++- lib/openai/models/audio/speech_model.rb | 1 + lib/openai/models/audio/transcription.rb | 56 +++++++++- .../audio/transcription_create_params.rb | 31 +++++- .../models/audio/transcription_include.rb | 14 +++ .../audio/transcription_stream_event.rb | 29 +++++ .../audio/transcription_text_delta_event.rb | 88 +++++++++++++++ .../audio/transcription_text_done_event.rb | 89 +++++++++++++++ .../models/audio/translation_create_params.rb | 22 +++- lib/openai/models/audio_model.rb | 2 + lib/openai/models/audio_response_format.rb | 3 +- .../models/chat/chat_completion_chunk.rb | 10 +- lib/openai/resources/audio/speech.rb | 5 +- lib/openai/resources/audio/transcriptions.rb | 81 +++++++++++++- lib/openai/resources/audio/translations.rb | 2 +- .../models/audio/speech_create_params.rbi | 18 ++- rbi/lib/openai/models/audio/speech_model.rbi | 1 + rbi/lib/openai/models/audio/transcription.rbi | 60 +++++++++- .../audio/transcription_create_params.rbi | 29 ++++- .../models/audio/transcription_include.rbi | 15 +++ .../audio/transcription_stream_event.rbi | 25 +++++ .../audio/transcription_text_delta_event.rbi | 102 +++++++++++++++++ .../audio/transcription_text_done_event.rbi | 103 ++++++++++++++++++ .../audio/translation_create_params.rbi | 14 +++ rbi/lib/openai/models/audio_model.rbi | 2 + .../openai/models/audio_response_format.rbi | 3 +- .../models/chat/chat_completion_chunk.rbi | 6 +- rbi/lib/openai/resources/audio/speech.rbi | 6 +- .../openai/resources/audio/transcriptions.rbi | 88 ++++++++++++++- .../models/audio/speech_create_params.rbs | 6 + sig/openai/models/audio/speech_model.rbs | 3 +- sig/openai/models/audio/transcription.rbs | 41 ++++++- .../audio/transcription_create_params.rbs | 8 ++ .../models/audio/transcription_include.rbs | 13 +++ .../audio/transcription_stream_event.rbs | 13 +++ .../audio/transcription_text_delta_event.rbs | 56 ++++++++++ .../audio/transcription_text_done_event.rbs | 56 ++++++++++ .../audio/translation_create_params.rbs | 22 +++- sig/openai/models/audio_model.rbs | 5 +- .../models/chat/chat_completion_chunk.rbs | 10 +- sig/openai/resources/audio/speech.rbs | 1 + sig/openai/resources/audio/transcriptions.rbs | 13 +++ sig/openai/resources/audio/translations.rbs | 2 +- 45 files changed, 1115 insertions(+), 65 deletions(-) create mode 100644 lib/openai/models/audio/transcription_include.rb create mode 100644 lib/openai/models/audio/transcription_stream_event.rb create mode 100644 lib/openai/models/audio/transcription_text_delta_event.rb create mode 100644 lib/openai/models/audio/transcription_text_done_event.rb create mode 100644 rbi/lib/openai/models/audio/transcription_include.rbi create mode 100644 rbi/lib/openai/models/audio/transcription_stream_event.rbi create mode 100644 rbi/lib/openai/models/audio/transcription_text_delta_event.rbi create mode 100644 rbi/lib/openai/models/audio/transcription_text_done_event.rbi create mode 100644 sig/openai/models/audio/transcription_include.rbs create mode 100644 sig/openai/models/audio/transcription_stream_event.rbs create mode 100644 sig/openai/models/audio/transcription_text_delta_event.rbs create mode 100644 sig/openai/models/audio/transcription_text_done_event.rbs diff --git a/.stats.yml b/.stats.yml index 16c6386b..199d46be 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-b26121d5df6eb5d3032a45a267473798b15fcfec76dd44a3256cf1238be05fa4.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-c22f59c66aec7914b6ee653d3098d1c1c8c16c180d2a158e819c8ddbf476f74b.yml diff --git a/lib/openai.rb b/lib/openai.rb index 84c5d333..3a9e6cd2 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -43,7 +43,11 @@ require_relative "openai/models/audio/transcription" require_relative "openai/models/audio/transcription_create_params" require_relative "openai/models/audio/transcription_create_response" +require_relative "openai/models/audio/transcription_include" require_relative "openai/models/audio/transcription_segment" +require_relative "openai/models/audio/transcription_stream_event" +require_relative "openai/models/audio/transcription_text_delta_event" +require_relative "openai/models/audio/transcription_text_done_event" require_relative "openai/models/audio/transcription_verbose" require_relative "openai/models/audio/transcription_word" require_relative "openai/models/audio/translation" diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index b7e77b57..2477a4ca 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -16,7 +16,7 @@ class SpeechCreateParams < OpenAI::BaseModel # @!attribute model # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1` or `tts-1-hd` + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # # @return [String, Symbol, OpenAI::Models::Audio::SpeechModel] required :model, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Model } @@ -30,6 +30,17 @@ class SpeechCreateParams < OpenAI::BaseModel # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, enum: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } + # @!attribute [r] instructions + # Control the voice of your generated audio with additional instructions. Does not + # work with `tts-1` or `tts-1-hd`. + # + # @return [String, nil] + optional :instructions, String + + # @!parse + # # @return [String] + # attr_writer :instructions + # @!attribute [r] response_format # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. @@ -56,22 +67,23 @@ class SpeechCreateParams < OpenAI::BaseModel # # @param input [String] # # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] # # @param voice [Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] + # # @param instructions [String] # # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] # # @param speed [Float] # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # # - # def initialize(input:, model:, voice:, response_format: nil, speed: nil, request_options: {}, **) = super + # def initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void # @abstract # # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1` or `tts-1-hd` + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. class Model < OpenAI::Union variant String - # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1` or `tts-1-hd` + # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. variant enum: -> { OpenAI::Models::Audio::SpeechModel } # @!parse diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index 96744e0c..26aae9d6 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -7,6 +7,7 @@ module Audio class SpeechModel < OpenAI::Enum TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" + GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" finalize! end diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 6bd2d97b..8185ea6c 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -10,15 +10,69 @@ class Transcription < OpenAI::BaseModel # @return [String] required :text, String + # @!attribute [r] logprobs + # The log probabilities of the tokens in the transcription. Only returned with the + # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added + # to the `include` array. + # + # @return [Array, nil] + optional :logprobs, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } + + # @!parse + # # @return [Array] + # attr_writer :logprobs + # @!parse # # Represents a transcription response returned by model, based on the provided # # input. # # # # @param text [String] + # # @param logprobs [Array] # # - # def initialize(text:, **) = super + # def initialize(text:, logprobs: nil, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void + + class Logprob < OpenAI::BaseModel + # @!attribute [r] token + # The token in the transcription. + # + # @return [String, nil] + optional :token, String + + # @!parse + # # @return [String] + # attr_writer :token + + # @!attribute [r] bytes + # The bytes of the token. + # + # @return [Array, nil] + optional :bytes, OpenAI::ArrayOf[Float] + + # @!parse + # # @return [Array] + # attr_writer :bytes + + # @!attribute [r] logprob + # The log probability of the token. + # + # @return [Float, nil] + optional :logprob, Float + + # @!parse + # # @return [Float] + # attr_writer :logprob + + # @!parse + # # @param token [String] + # # @param bytes [Array] + # # @param logprob [Float] + # # + # def initialize(token: nil, bytes: nil, logprob: nil, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index d6d9f071..3ff8c770 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -16,12 +16,27 @@ class TranscriptionCreateParams < OpenAI::BaseModel required :file, IO # @!attribute model - # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). # # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranscriptionCreateParams::Model } + # @!attribute [r] include + # Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. + # + # @return [Array, nil] + optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionInclude] } + + # @!parse + # # @return [Array] + # attr_writer :include + # @!attribute [r] language # The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) @@ -49,7 +64,8 @@ class TranscriptionCreateParams < OpenAI::BaseModel # @!attribute [r] response_format # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. # # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::AudioResponseFormat } @@ -90,6 +106,7 @@ class TranscriptionCreateParams < OpenAI::BaseModel # @!parse # # @param file [IO, StringIO] # # @param model [String, Symbol, OpenAI::Models::AudioModel] + # # @param include [Array] # # @param language [String] # # @param prompt [String] # # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] @@ -100,6 +117,7 @@ class TranscriptionCreateParams < OpenAI::BaseModel # def initialize( # file:, # model:, + # include: nil, # language: nil, # prompt: nil, # response_format: nil, @@ -115,12 +133,13 @@ class TranscriptionCreateParams < OpenAI::BaseModel # @abstract # - # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). class Model < OpenAI::Union variant String - # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. + # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). variant enum: -> { OpenAI::Models::AudioModel } # @!parse diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb new file mode 100644 index 00000000..97303675 --- /dev/null +++ b/lib/openai/models/audio/transcription_include.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Audio + # @abstract + class TranscriptionInclude < OpenAI::Enum + LOGPROBS = :logprobs + + finalize! + end + end + end +end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb new file mode 100644 index 00000000..4bddaa1b --- /dev/null +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Audio + # @abstract + # + # Emitted when there is an additional text delta. This is also the first event + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + class TranscriptionStreamEvent < OpenAI::Union + discriminator :type + + # Emitted when there is an additional text delta. This is also the first event emitted when the transcription starts. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. + variant :"transcript.text.delta", -> { OpenAI::Models::Audio::TranscriptionTextDeltaEvent } + + # Emitted when the transcription is complete. Contains the complete transcription text. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. + variant :"transcript.text.done", -> { OpenAI::Models::Audio::TranscriptionTextDoneEvent } + + # @!parse + # class << self + # # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] + # def variants; end + # end + end + end + end +end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb new file mode 100644 index 00000000..ec8ca4f4 --- /dev/null +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Audio + class TranscriptionTextDeltaEvent < OpenAI::BaseModel + # @!attribute delta + # The text delta that was additionally transcribed. + # + # @return [String] + required :delta, String + + # @!attribute type + # The type of the event. Always `transcript.text.delta`. + # + # @return [Symbol, :"transcript.text.delta"] + required :type, const: :"transcript.text.delta" + + # @!attribute [r] logprobs + # The log probabilities of the delta. Only included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. + # + # @return [Array, nil] + optional :logprobs, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } + + # @!parse + # # @return [Array] + # attr_writer :logprobs + + # @!parse + # # Emitted when there is an additional text delta. This is also the first event + # # emitted when the transcription starts. Only emitted when you + # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # # with the `Stream` parameter set to `true`. + # # + # # @param delta [String] + # # @param logprobs [Array] + # # @param type [Symbol, :"transcript.text.delta"] + # # + # def initialize(delta:, logprobs: nil, type: :"transcript.text.delta", **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + + class Logprob < OpenAI::BaseModel + # @!attribute [r] token + # The token that was used to generate the log probability. + # + # @return [String, nil] + optional :token, String + + # @!parse + # # @return [String] + # attr_writer :token + + # @!attribute [r] bytes + # The bytes that were used to generate the log probability. + # + # @return [Array, nil] + optional :bytes, OpenAI::ArrayOf[OpenAI::Unknown] + + # @!parse + # # @return [Array] + # attr_writer :bytes + + # @!attribute [r] logprob + # The log probability of the token. + # + # @return [Float, nil] + optional :logprob, Float + + # @!parse + # # @return [Float] + # attr_writer :logprob + + # @!parse + # # @param token [String] + # # @param bytes [Array] + # # @param logprob [Float] + # # + # def initialize(token: nil, bytes: nil, logprob: nil, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb new file mode 100644 index 00000000..b2a78b25 --- /dev/null +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Audio + class TranscriptionTextDoneEvent < OpenAI::BaseModel + # @!attribute text + # The text that was transcribed. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always `transcript.text.done`. + # + # @return [Symbol, :"transcript.text.done"] + required :type, const: :"transcript.text.done" + + # @!attribute [r] logprobs + # The log probabilities of the individual tokens in the transcription. Only + # included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. + # + # @return [Array, nil] + optional :logprobs, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } + + # @!parse + # # @return [Array] + # attr_writer :logprobs + + # @!parse + # # Emitted when the transcription is complete. Contains the complete transcription + # # text. Only emitted when you + # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # # with the `Stream` parameter set to `true`. + # # + # # @param text [String] + # # @param logprobs [Array] + # # @param type [Symbol, :"transcript.text.done"] + # # + # def initialize(text:, logprobs: nil, type: :"transcript.text.done", **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + + class Logprob < OpenAI::BaseModel + # @!attribute [r] token + # The token that was used to generate the log probability. + # + # @return [String, nil] + optional :token, String + + # @!parse + # # @return [String] + # attr_writer :token + + # @!attribute [r] bytes + # The bytes that were used to generate the log probability. + # + # @return [Array, nil] + optional :bytes, OpenAI::ArrayOf[OpenAI::Unknown] + + # @!parse + # # @return [Array] + # attr_writer :bytes + + # @!attribute [r] logprob + # The log probability of the token. + # + # @return [Float, nil] + optional :logprob, Float + + # @!parse + # # @return [Float] + # attr_writer :logprob + + # @!parse + # # @param token [String] + # # @param bytes [Array] + # # @param logprob [Float] + # # + # def initialize(token: nil, bytes: nil, logprob: nil, **) = super + + # def initialize: (Hash | OpenAI::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 4fd4a4dc..0b31b58c 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -39,11 +39,11 @@ class TranslationCreateParams < OpenAI::BaseModel # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # - # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] - optional :response_format, enum: -> { OpenAI::Models::AudioResponseFormat } + # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat } # @!parse - # # @return [Symbol, OpenAI::Models::AudioResponseFormat] + # # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] # attr_writer :response_format # @!attribute [r] temperature @@ -64,7 +64,7 @@ class TranslationCreateParams < OpenAI::BaseModel # # @param file [IO, StringIO] # # @param model [String, Symbol, OpenAI::Models::AudioModel] # # @param prompt [String] - # # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] + # # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] # # @param temperature [Float] # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # # @@ -88,6 +88,20 @@ class Model < OpenAI::Union # def variants; end # end end + + # @abstract + # + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. + class ResponseFormat < OpenAI::Enum + JSON = :json + TEXT = :text + SRT = :srt + VERBOSE_JSON = :verbose_json + VTT = :vtt + + finalize! + end end end end diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index 81db712e..88507173 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -5,6 +5,8 @@ module Models # @abstract class AudioModel < OpenAI::Enum WHISPER_1 = :"whisper-1" + GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" + GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" finalize! end diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 8b92a3b9..9593d816 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -5,7 +5,8 @@ module Models # @abstract # # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. class AudioResponseFormat < OpenAI::Enum JSON = :json TEXT = :text diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 5c0d47df..5f0a0fef 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -55,7 +55,7 @@ class ChatCompletionChunk < OpenAI::BaseModel # # @return [String] # attr_writer :system_fingerprint - # @!attribute [r] usage + # @!attribute usage # An optional field that will only be present when you set # `stream_options: {"include_usage": true}` in your request. When present, it # contains a null value **except for the last chunk** which contains the token @@ -65,11 +65,7 @@ class ChatCompletionChunk < OpenAI::BaseModel # final usage chunk which contains the total token usage for the request. # # @return [OpenAI::Models::CompletionUsage, nil] - optional :usage, -> { OpenAI::Models::CompletionUsage } - - # @!parse - # # @return [OpenAI::Models::CompletionUsage] - # attr_writer :usage + optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true # @!parse # # Represents a streamed chunk of a chat completion response returned by the model, @@ -82,7 +78,7 @@ class ChatCompletionChunk < OpenAI::BaseModel # # @param model [String] # # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage] + # # @param usage [OpenAI::Models::CompletionUsage, nil] # # @param object [Symbol, :"chat.completion.chunk"] # # # def initialize( diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index 9c5d8284..b5b584fe 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -11,13 +11,16 @@ class Speech # @option params [String] :input The text to generate audio for. The maximum length is 4096 characters. # # @option params [String, Symbol, OpenAI::Models::Audio::SpeechModel] :model One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1` or `tts-1-hd` + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # # @option params [Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] :voice The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # + # @option params [String] :instructions Control the voice of your generated audio with additional instructions. Does not + # work with `tts-1` or `tts-1-hd`. + # # @option params [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] :response_format The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. # diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 9e291700..3c7238bf 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -11,8 +11,15 @@ class Transcriptions # @option params [IO, StringIO] :file The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @option params [String, Symbol, OpenAI::Models::AudioModel] :model ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # @option params [String, Symbol, OpenAI::Models::AudioModel] :model ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). + # + # @option params [Array] :include Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. # # @option params [String] :language The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) @@ -24,7 +31,8 @@ class Transcriptions # should match the audio language. # # @option params [Symbol, OpenAI::Models::AudioResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. # # @option params [Float] :temperature The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and @@ -43,6 +51,10 @@ class Transcriptions # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] def create(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) + if parsed[:stream] + message = "Please use `#create_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "audio/transcriptions", @@ -53,6 +65,69 @@ def create(params) ) end + # Transcribes audio into the input language. + # + # @param params [OpenAI::Models::Audio::TranscriptionCreateParams, Hash{Symbol=>Object}] . + # + # @option params [IO, StringIO] :file The audio file object (not file name) to transcribe, in one of these formats: + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # + # @option params [String, Symbol, OpenAI::Models::AudioModel] :model ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). + # + # @option params [Array] :include Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. + # + # @option params [String] :language The language of the input audio. Supplying the input language in + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. + # + # @option params [String] :prompt An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. + # + # @option params [Symbol, OpenAI::Models::AudioResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. + # + # @option params [Float] :temperature The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. + # + # @option params [Array] :timestamp_granularities The timestamp granularities to populate for this transcription. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. + # + # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # + # @return [OpenAI::Stream] + def create_streaming(params) + parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#create` for the non-streaming use case." + raise ArgumentError.new(message) + end + parsed.store(:stream, true) + @client.request( + method: :post, + path: "audio/transcriptions", + headers: {"content-type" => "multipart/form-data", "accept" => "text/event-stream"}, + body: parsed, + stream: OpenAI::Stream, + model: OpenAI::Models::Audio::TranscriptionStreamEvent, + options: options + ) + end + # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index c1de4f8e..ea8e0e4a 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -19,7 +19,7 @@ class Translations # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) # should be in English. # - # @option params [Symbol, OpenAI::Models::AudioResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, + # @option params [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # # @option params [Float] :temperature The sampling temperature, between 0 and 1. Higher values like 0.8 will make the diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 043a7179..e74cec3d 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -17,7 +17,7 @@ module OpenAI end # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1` or `tts-1-hd` + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. sig { returns(T.any(String, Symbol)) } def model end @@ -38,6 +38,16 @@ module OpenAI def voice=(_) end + # Control the voice of your generated audio with additional instructions. Does not + # work with `tts-1` or `tts-1-hd`. + sig { returns(T.nilable(String)) } + def instructions + end + + sig { params(_: String).returns(String) } + def instructions=(_) + end + # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. sig { returns(T.nilable(Symbol)) } @@ -63,13 +73,14 @@ module OpenAI input: String, model: T.any(String, Symbol), voice: Symbol, + instructions: String, response_format: Symbol, speed: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) end - def self.new(input:, model:, voice:, response_format: nil, speed: nil, request_options: {}) + def self.new(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) end sig do @@ -79,6 +90,7 @@ module OpenAI input: String, model: T.any(String, Symbol), voice: Symbol, + instructions: String, response_format: Symbol, speed: Float, request_options: OpenAI::RequestOptions @@ -89,7 +101,7 @@ module OpenAI end # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1` or `tts-1-hd` + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. class Model < OpenAI::Union abstract! diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index f465baf8..5228e000 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -10,6 +10,7 @@ module OpenAI TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" + GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" end end end diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index bc8940ae..ac4346e0 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -13,15 +13,69 @@ module OpenAI def text=(_) end + # The log probabilities of the tokens in the transcription. Only returned with the + # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added + # to the `include` array. + sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::Transcription::Logprob])) } + def logprobs + end + + sig do + params(_: T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + .returns(T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + end + def logprobs=(_) + end + # Represents a transcription response returned by model, based on the provided # input. - sig { params(text: String).returns(T.attached_class) } - def self.new(text:) + sig do + params(text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + .returns(T.attached_class) + end + def self.new(text:, logprobs: nil) end - sig { override.returns({text: String}) } + sig { override.returns({text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]}) } def to_hash end + + class Logprob < OpenAI::BaseModel + # The token in the transcription. + sig { returns(T.nilable(String)) } + def token + end + + sig { params(_: String).returns(String) } + def token=(_) + end + + # The bytes of the token. + sig { returns(T.nilable(T::Array[Float])) } + def bytes + end + + sig { params(_: T::Array[Float]).returns(T::Array[Float]) } + def bytes=(_) + end + + # The log probability of the token. + sig { returns(T.nilable(Float)) } + def logprob + end + + sig { params(_: Float).returns(Float) } + def logprob=(_) + end + + sig { params(token: String, bytes: T::Array[Float], logprob: Float).returns(T.attached_class) } + def self.new(token: nil, bytes: nil, logprob: nil) + end + + sig { override.returns({token: String, bytes: T::Array[Float], logprob: Float}) } + def to_hash + end + end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 29ecd8fb..026f8b77 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -17,8 +17,9 @@ module OpenAI def file=(_) end - # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). sig { returns(T.any(String, Symbol)) } def model end @@ -27,6 +28,19 @@ module OpenAI def model=(_) end + # Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. + sig { returns(T.nilable(T::Array[Symbol])) } + def include + end + + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + def include=(_) + end + # The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) # format will improve accuracy and latency. @@ -51,7 +65,8 @@ module OpenAI end # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. sig { returns(T.nilable(Symbol)) } def response_format end @@ -90,6 +105,7 @@ module OpenAI params( file: T.any(IO, StringIO), model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, response_format: Symbol, @@ -102,6 +118,7 @@ module OpenAI def self.new( file:, model:, + include: nil, language: nil, prompt: nil, response_format: nil, @@ -117,6 +134,7 @@ module OpenAI { file: T.any(IO, StringIO), model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, response_format: Symbol, @@ -129,8 +147,9 @@ module OpenAI def to_hash end - # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). class Model < OpenAI::Union abstract! diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi new file mode 100644 index 00000000..7a60b02a --- /dev/null +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -0,0 +1,15 @@ +# typed: strong + +module OpenAI + module Models + module Audio + class TranscriptionInclude < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + LOGPROBS = :logprobs + end + end + end +end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi new file mode 100644 index 00000000..4c8cc6bc --- /dev/null +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -0,0 +1,25 @@ +# typed: strong + +module OpenAI + module Models + module Audio + # Emitted when there is an additional text delta. This is also the first event + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + class TranscriptionStreamEvent < OpenAI::Union + abstract! + + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Audio::TranscriptionTextDeltaEvent, + OpenAI::Models::Audio::TranscriptionTextDoneEvent + ) + } + end + end + end + end +end diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi new file mode 100644 index 00000000..6c73838b --- /dev/null +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -0,0 +1,102 @@ +# typed: strong + +module OpenAI + module Models + module Audio + class TranscriptionTextDeltaEvent < OpenAI::BaseModel + # The text delta that was additionally transcribed. + sig { returns(String) } + def delta + end + + sig { params(_: String).returns(String) } + def delta=(_) + end + + # The type of the event. Always `transcript.text.delta`. + sig { returns(Symbol) } + def type + end + + sig { params(_: Symbol).returns(Symbol) } + def type=(_) + end + + # The log probabilities of the delta. Only included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. + sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob])) } + def logprobs + end + + sig do + params(_: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]) + .returns(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]) + end + def logprobs=(_) + end + + # Emitted when there is an additional text delta. This is also the first event + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + sig do + params( + delta: String, + logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(delta:, logprobs: nil, type: :"transcript.text.delta") + end + + sig do + override + .returns( + {delta: String, type: Symbol, logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]} + ) + end + def to_hash + end + + class Logprob < OpenAI::BaseModel + # The token that was used to generate the log probability. + sig { returns(T.nilable(String)) } + def token + end + + sig { params(_: String).returns(String) } + def token=(_) + end + + # The bytes that were used to generate the log probability. + sig { returns(T.nilable(T::Array[T.anything])) } + def bytes + end + + sig { params(_: T::Array[T.anything]).returns(T::Array[T.anything]) } + def bytes=(_) + end + + # The log probability of the token. + sig { returns(T.nilable(Float)) } + def logprob + end + + sig { params(_: Float).returns(Float) } + def logprob=(_) + end + + sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } + def self.new(token: nil, bytes: nil, logprob: nil) + end + + sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } + def to_hash + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi new file mode 100644 index 00000000..fb616718 --- /dev/null +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -0,0 +1,103 @@ +# typed: strong + +module OpenAI + module Models + module Audio + class TranscriptionTextDoneEvent < OpenAI::BaseModel + # The text that was transcribed. + sig { returns(String) } + def text + end + + sig { params(_: String).returns(String) } + def text=(_) + end + + # The type of the event. Always `transcript.text.done`. + sig { returns(Symbol) } + def type + end + + sig { params(_: Symbol).returns(Symbol) } + def type=(_) + end + + # The log probabilities of the individual tokens in the transcription. Only + # included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. + sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob])) } + def logprobs + end + + sig do + params(_: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]) + .returns(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]) + end + def logprobs=(_) + end + + # Emitted when the transcription is complete. Contains the complete transcription + # text. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + sig do + params( + text: String, + logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(text:, logprobs: nil, type: :"transcript.text.done") + end + + sig do + override + .returns( + {text: String, type: Symbol, logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]} + ) + end + def to_hash + end + + class Logprob < OpenAI::BaseModel + # The token that was used to generate the log probability. + sig { returns(T.nilable(String)) } + def token + end + + sig { params(_: String).returns(String) } + def token=(_) + end + + # The bytes that were used to generate the log probability. + sig { returns(T.nilable(T::Array[T.anything])) } + def bytes + end + + sig { params(_: T::Array[T.anything]).returns(T::Array[T.anything]) } + def bytes=(_) + end + + # The log probability of the token. + sig { returns(T.nilable(Float)) } + def logprob + end + + sig { params(_: Float).returns(Float) } + def logprob=(_) + end + + sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } + def self.new(token: nil, bytes: nil, logprob: nil) + end + + sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } + def to_hash + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index fb5d4a71..ce2e6e77 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -99,6 +99,20 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end + + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. + class ResponseFormat < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + JSON = :json + TEXT = :text + SRT = :srt + VERBOSE_JSON = :verbose_json + VTT = :vtt + end end end end diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 85348552..917ce7d8 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -8,6 +8,8 @@ module OpenAI Value = type_template(:out) { {fixed: Symbol} } WHISPER_1 = :"whisper-1" + GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" + GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index fb54aad0..405da3e2 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -3,7 +3,8 @@ module OpenAI module Models # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. class AudioResponseFormat < OpenAI::Enum abstract! diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index c2451af5..647695a9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -86,7 +86,7 @@ module OpenAI def usage end - sig { params(_: OpenAI::Models::CompletionUsage).returns(OpenAI::Models::CompletionUsage) } + sig { params(_: T.nilable(OpenAI::Models::CompletionUsage)).returns(T.nilable(OpenAI::Models::CompletionUsage)) } def usage=(_) end @@ -101,7 +101,7 @@ module OpenAI model: String, service_tier: T.nilable(Symbol), system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage, + usage: T.nilable(OpenAI::Models::CompletionUsage), object: Symbol ) .returns(T.attached_class) @@ -129,7 +129,7 @@ module OpenAI object: Symbol, service_tier: T.nilable(Symbol), system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage + usage: T.nilable(OpenAI::Models::CompletionUsage) } ) end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index ae6f4be5..e2e85216 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -10,6 +10,7 @@ module OpenAI input: String, model: T.any(String, Symbol), voice: Symbol, + instructions: String, response_format: Symbol, speed: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -20,13 +21,16 @@ module OpenAI # The text to generate audio for. The maximum length is 4096 characters. input:, # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1` or `tts-1-hd` + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. model:, # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). voice:, + # Control the voice of your generated audio with additional instructions. Does not + # work with `tts-1` or `tts-1-hd`. + instructions: nil, # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. response_format: nil, diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 50e5c416..a1340034 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -9,11 +9,13 @@ module OpenAI params( file: T.any(IO, StringIO), model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, response_format: Symbol, temperature: Float, timestamp_granularities: T::Array[Symbol], + stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) @@ -22,9 +24,16 @@ module OpenAI # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, - # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). model:, + # Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. + include: nil, # The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) # format will improve accuracy and latency. @@ -35,7 +44,8 @@ module OpenAI # should match the audio language. prompt: nil, # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. response_format: nil, # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and @@ -49,6 +59,78 @@ module OpenAI # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. timestamp_granularities: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: false, + request_options: {} + ) + end + + # Transcribes audio into the input language. + sig do + params( + file: T.any(IO, StringIO), + model: T.any(String, Symbol), + include: T::Array[Symbol], + language: String, + prompt: String, + response_format: Symbol, + temperature: Float, + timestamp_granularities: T::Array[Symbol], + stream: T.noreturn, + request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + ) + .returns( + OpenAI::Stream[ + T.any( + OpenAI::Models::Audio::TranscriptionTextDeltaEvent, + OpenAI::Models::Audio::TranscriptionTextDoneEvent + ) + ] + ) + end + def create_streaming( + # The audio file object (not file name) to transcribe, in one of these formats: + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + file:, + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). + model:, + # Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. + include: nil, + # The language of the input audio. Supplying the input language in + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. + language: nil, + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. + prompt: nil, + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. + response_format: nil, + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. + temperature: nil, + # The timestamp granularities to populate for this transcription. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. + timestamp_granularities: nil, + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index ae9debbf..eb21a97f 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -6,6 +6,7 @@ module OpenAI input: String, model: OpenAI::Models::Audio::SpeechCreateParams::model, voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, speed: Float } @@ -21,6 +22,10 @@ module OpenAI attr_accessor voice: OpenAI::Models::Audio::SpeechCreateParams::voice + attr_reader instructions: String? + + def instructions=: (String) -> String + attr_reader response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format? def response_format=: ( @@ -35,6 +40,7 @@ module OpenAI input: String, model: OpenAI::Models::Audio::SpeechCreateParams::model, voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + ?instructions: String, ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, ?speed: Float, ?request_options: OpenAI::request_opts diff --git a/sig/openai/models/audio/speech_model.rbs b/sig/openai/models/audio/speech_model.rbs index 7ab47f1f..357eaa4c 100644 --- a/sig/openai/models/audio/speech_model.rbs +++ b/sig/openai/models/audio/speech_model.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Audio - type speech_model = :"tts-1" | :"tts-1-hd" + type speech_model = :"tts-1" | :"tts-1-hd" | :"gpt-4o-mini-tts" class SpeechModel < OpenAI::Enum TTS_1: :"tts-1" TTS_1_HD: :"tts-1-hd" + GPT_4O_MINI_TTS: :"gpt-4o-mini-tts" def self.values: -> ::Array[OpenAI::Models::Audio::speech_model] end diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 0ea5f955..3f9bf1d4 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -1,14 +1,51 @@ module OpenAI module Models module Audio - type transcription = { text: String } + type transcription = + { + text: String, + logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob] + } class Transcription < OpenAI::BaseModel attr_accessor text: String - def initialize: (text: String) -> void + attr_reader logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob]? + + def logprobs=: ( + ::Array[OpenAI::Models::Audio::Transcription::Logprob] + ) -> ::Array[OpenAI::Models::Audio::Transcription::Logprob] + + def initialize: ( + text: String, + ?logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob] + ) -> void def to_hash: -> OpenAI::Models::Audio::transcription + + type logprob = { token: String, bytes: ::Array[Float], logprob: Float } + + class Logprob < OpenAI::BaseModel + attr_reader token: String? + + def token=: (String) -> String + + attr_reader bytes: ::Array[Float]? + + def bytes=: (::Array[Float]) -> ::Array[Float] + + attr_reader logprob: Float? + + def logprob=: (Float) -> Float + + def initialize: ( + ?token: String, + ?bytes: ::Array[Float], + ?logprob: Float + ) -> void + + def to_hash: -> OpenAI::Models::Audio::Transcription::logprob + end end end end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 16280f3e..28f79a92 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -5,6 +5,7 @@ module OpenAI { file: (IO | StringIO), model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + include: ::Array[OpenAI::Models::Audio::transcription_include], language: String, prompt: String, response_format: OpenAI::Models::audio_response_format, @@ -21,6 +22,12 @@ module OpenAI attr_accessor model: OpenAI::Models::Audio::TranscriptionCreateParams::model + attr_reader include: ::Array[OpenAI::Models::Audio::transcription_include]? + + def include=: ( + ::Array[OpenAI::Models::Audio::transcription_include] + ) -> ::Array[OpenAI::Models::Audio::transcription_include] + attr_reader language: String? def language=: (String) -> String @@ -48,6 +55,7 @@ module OpenAI def initialize: ( file: IO | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, diff --git a/sig/openai/models/audio/transcription_include.rbs b/sig/openai/models/audio/transcription_include.rbs new file mode 100644 index 00000000..cf06a929 --- /dev/null +++ b/sig/openai/models/audio/transcription_include.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Models + module Audio + type transcription_include = :logprobs + + class TranscriptionInclude < OpenAI::Enum + LOGPROBS: :logprobs + + def self.values: -> ::Array[OpenAI::Models::Audio::transcription_include] + end + end + end +end diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs new file mode 100644 index 00000000..b9233feb --- /dev/null +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Models + module Audio + type transcription_stream_event = + OpenAI::Models::Audio::TranscriptionTextDeltaEvent + | OpenAI::Models::Audio::TranscriptionTextDoneEvent + + class TranscriptionStreamEvent < OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] + end + end + end +end diff --git a/sig/openai/models/audio/transcription_text_delta_event.rbs b/sig/openai/models/audio/transcription_text_delta_event.rbs new file mode 100644 index 00000000..373c6aed --- /dev/null +++ b/sig/openai/models/audio/transcription_text_delta_event.rbs @@ -0,0 +1,56 @@ +module OpenAI + module Models + module Audio + type transcription_text_delta_event = + { + delta: String, + type: :"transcript.text.delta", + logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] + } + + class TranscriptionTextDeltaEvent < OpenAI::BaseModel + attr_accessor delta: String + + attr_accessor type: :"transcript.text.delta" + + attr_reader logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]? + + def logprobs=: ( + ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] + ) -> ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] + + def initialize: ( + delta: String, + ?logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob], + ?type: :"transcript.text.delta" + ) -> void + + def to_hash: -> OpenAI::Models::Audio::transcription_text_delta_event + + type logprob = { token: String, bytes: ::Array[top], logprob: Float } + + class Logprob < OpenAI::BaseModel + attr_reader token: String? + + def token=: (String) -> String + + attr_reader bytes: ::Array[top]? + + def bytes=: (::Array[top]) -> ::Array[top] + + attr_reader logprob: Float? + + def logprob=: (Float) -> Float + + def initialize: ( + ?token: String, + ?bytes: ::Array[top], + ?logprob: Float + ) -> void + + def to_hash: -> OpenAI::Models::Audio::TranscriptionTextDeltaEvent::logprob + end + end + end + end +end diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs new file mode 100644 index 00000000..f1f1dd1d --- /dev/null +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -0,0 +1,56 @@ +module OpenAI + module Models + module Audio + type transcription_text_done_event = + { + text: String, + type: :"transcript.text.done", + logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] + } + + class TranscriptionTextDoneEvent < OpenAI::BaseModel + attr_accessor text: String + + attr_accessor type: :"transcript.text.done" + + attr_reader logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]? + + def logprobs=: ( + ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] + ) -> ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] + + def initialize: ( + text: String, + ?logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob], + ?type: :"transcript.text.done" + ) -> void + + def to_hash: -> OpenAI::Models::Audio::transcription_text_done_event + + type logprob = { token: String, bytes: ::Array[top], logprob: Float } + + class Logprob < OpenAI::BaseModel + attr_reader token: String? + + def token=: (String) -> String + + attr_reader bytes: ::Array[top]? + + def bytes=: (::Array[top]) -> ::Array[top] + + attr_reader logprob: Float? + + def logprob=: (Float) -> Float + + def initialize: ( + ?token: String, + ?bytes: ::Array[top], + ?logprob: Float + ) -> void + + def to_hash: -> OpenAI::Models::Audio::TranscriptionTextDoneEvent::logprob + end + end + end + end +end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 252ed2dc..83dc3322 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -6,7 +6,7 @@ module OpenAI file: (IO | StringIO), model: OpenAI::Models::Audio::TranslationCreateParams::model, prompt: String, - response_format: OpenAI::Models::audio_response_format, + response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, temperature: Float } & OpenAI::request_parameters @@ -23,11 +23,11 @@ module OpenAI def prompt=: (String) -> String - attr_reader response_format: OpenAI::Models::audio_response_format? + attr_reader response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format? def response_format=: ( - OpenAI::Models::audio_response_format - ) -> OpenAI::Models::audio_response_format + OpenAI::Models::Audio::TranslationCreateParams::response_format + ) -> OpenAI::Models::Audio::TranslationCreateParams::response_format attr_reader temperature: Float? @@ -37,7 +37,7 @@ module OpenAI file: IO | StringIO, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, - ?response_format: OpenAI::Models::audio_response_format, + ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, ?temperature: Float, ?request_options: OpenAI::request_opts ) -> void @@ -49,6 +49,18 @@ module OpenAI class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::audio_model] end + + type response_format = :json | :text | :srt | :verbose_json | :vtt + + class ResponseFormat < OpenAI::Enum + JSON: :json + TEXT: :text + SRT: :srt + VERBOSE_JSON: :verbose_json + VTT: :vtt + + def self.values: -> ::Array[OpenAI::Models::Audio::TranslationCreateParams::response_format] + end end end end diff --git a/sig/openai/models/audio_model.rbs b/sig/openai/models/audio_model.rbs index f9841d31..72b67344 100644 --- a/sig/openai/models/audio_model.rbs +++ b/sig/openai/models/audio_model.rbs @@ -1,9 +1,12 @@ module OpenAI module Models - type audio_model = :"whisper-1" + type audio_model = + :"whisper-1" | :"gpt-4o-transcribe" | :"gpt-4o-mini-transcribe" class AudioModel < OpenAI::Enum WHISPER_1: :"whisper-1" + GPT_4O_TRANSCRIBE: :"gpt-4o-transcribe" + GPT_4O_MINI_TRANSCRIBE: :"gpt-4o-mini-transcribe" def self.values: -> ::Array[OpenAI::Models::audio_model] end diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 3eef3ef4..fa2494d9 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -13,7 +13,7 @@ module OpenAI object: :"chat.completion.chunk", service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage + usage: OpenAI::Models::CompletionUsage? } class ChatCompletionChunk < OpenAI::BaseModel @@ -33,11 +33,7 @@ module OpenAI def system_fingerprint=: (String) -> String - attr_reader usage: OpenAI::Models::CompletionUsage? - - def usage=: ( - OpenAI::Models::CompletionUsage - ) -> OpenAI::Models::CompletionUsage + attr_accessor usage: OpenAI::Models::CompletionUsage? def initialize: ( id: String, @@ -46,7 +42,7 @@ module OpenAI model: String, ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage, + ?usage: OpenAI::Models::CompletionUsage?, ?object: :"chat.completion.chunk" ) -> void diff --git a/sig/openai/resources/audio/speech.rbs b/sig/openai/resources/audio/speech.rbs index 65002d04..7e7d117c 100644 --- a/sig/openai/resources/audio/speech.rbs +++ b/sig/openai/resources/audio/speech.rbs @@ -6,6 +6,7 @@ module OpenAI input: String, model: OpenAI::Models::Audio::SpeechCreateParams::model, voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + ?instructions: String, ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, ?speed: Float, ?request_options: OpenAI::request_opts diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 9ee728bb..b52531d0 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -5,6 +5,7 @@ module OpenAI def create: ( file: IO | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, @@ -13,6 +14,18 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response + def create_streaming: ( + file: IO | StringIO, + model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?include: ::Array[OpenAI::Models::Audio::transcription_include], + ?language: String, + ?prompt: String, + ?response_format: OpenAI::Models::audio_response_format, + ?temperature: Float, + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Stream[OpenAI::Models::Audio::transcription_stream_event] + def initialize: (client: OpenAI::Client) -> void end end diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index f4c61adf..25ea82e3 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -6,7 +6,7 @@ module OpenAI file: IO | StringIO, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, - ?response_format: OpenAI::Models::audio_response_format, + ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, ?temperature: Float, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::translation_create_response From cf9f7a00a5d8a9df5dd4c0e26f8449a9fd2e49ef Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 17:59:25 +0000 Subject: [PATCH 042/295] fix(client): remove duplicate types (#47) --- lib/openai/models/all_models.rb | 8 +++----- rbi/lib/openai/models/all_models.rbi | 2 +- sig/openai/models/all_models.rbs | 10 +++++----- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index b306f178..edf0c8df 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -8,12 +8,10 @@ class AllModels < OpenAI::Union variant enum: -> { OpenAI::Models::ChatModel } - variant enum: -> { OpenAI::Models::ChatModel } - - variant enum: -> { OpenAI::Models::AllModels::UnionMember4 } + variant enum: -> { OpenAI::Models::AllModels::UnionMember2 } # @abstract - class UnionMember4 < OpenAI::Enum + class UnionMember2 < OpenAI::Enum O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" @@ -24,7 +22,7 @@ class UnionMember4 < OpenAI::Enum # @!parse # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::UnionMember4)] + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::UnionMember2)] # def variants; end # end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index d124a099..75d67db0 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -7,7 +7,7 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - class UnionMember4 < OpenAI::Enum + class UnionMember2 < OpenAI::Enum abstract! Value = type_template(:out) { {fixed: Symbol} } diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 7abb928a..817c97a9 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -3,25 +3,25 @@ module OpenAI type all_models = String | OpenAI::Models::chat_model - | OpenAI::Models::AllModels::union_member4 + | OpenAI::Models::AllModels::union_member2 class AllModels < OpenAI::Union - type union_member4 = + type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - class UnionMember4 < OpenAI::Enum + class UnionMember2 < OpenAI::Enum O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self.values: -> ::Array[OpenAI::Models::AllModels::union_member4] + def self.values: -> ::Array[OpenAI::Models::AllModels::union_member2] end - def self.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::chat_model, OpenAI::Models::AllModels::union_member4] + def self.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::AllModels::union_member2] end end end From 3613ccd014e18ebe1a8a9305404e40428b316f7e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 21:29:04 +0000 Subject: [PATCH 043/295] feat!: use tagged enums in sorbet type definitions (#49) --- Rakefile | 21 +- lib/openai/base_model.rb | 388 +++++++++--------- lib/openai/models/all_models.rb | 10 +- .../models/audio/speech_create_params.rb | 18 +- lib/openai/models/audio/speech_model.rb | 5 +- .../audio/transcription_create_params.rb | 11 +- .../audio/transcription_create_response.rb | 6 +- .../models/audio/transcription_include.rb | 5 +- .../audio/transcription_stream_event.rb | 6 +- .../models/audio/translation_create_params.rb | 12 +- .../audio/translation_create_response.rb | 5 +- lib/openai/models/audio_model.rb | 5 +- lib/openai/models/audio_response_format.rb | 6 +- lib/openai/models/batch.rb | 6 +- lib/openai/models/batch_create_params.rb | 12 +- .../models/beta/assistant_create_params.rb | 6 +- .../models/beta/assistant_list_params.rb | 6 +- .../beta/assistant_response_format_option.rb | 6 +- .../models/beta/assistant_stream_event.rb | 6 +- lib/openai/models/beta/assistant_tool.rb | 5 +- .../models/beta/assistant_tool_choice.rb | 6 +- .../beta/assistant_tool_choice_option.rb | 12 +- .../models/beta/assistant_update_params.rb | 6 +- lib/openai/models/beta/file_search_tool.rb | 6 +- .../models/beta/message_stream_event.rb | 6 +- .../models/beta/run_step_stream_event.rb | 6 +- lib/openai/models/beta/run_stream_event.rb | 6 +- .../beta/thread_create_and_run_params.rb | 34 +- .../models/beta/thread_create_params.rb | 17 +- lib/openai/models/beta/threads/annotation.rb | 6 +- .../models/beta/threads/annotation_delta.rb | 6 +- lib/openai/models/beta/threads/image_file.rb | 6 +- .../models/beta/threads/image_file_delta.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 6 +- lib/openai/models/beta/threads/message.rb | 23 +- .../models/beta/threads/message_content.rb | 6 +- .../beta/threads/message_content_delta.rb | 6 +- .../threads/message_content_part_param.rb | 6 +- .../beta/threads/message_create_params.rb | 17 +- .../models/beta/threads/message_delta.rb | 6 +- .../beta/threads/message_list_params.rb | 6 +- lib/openai/models/beta/threads/run.rb | 18 +- .../models/beta/threads/run_create_params.rb | 29 +- .../models/beta/threads/run_list_params.rb | 6 +- lib/openai/models/beta/threads/run_status.rb | 6 +- .../runs/code_interpreter_tool_call.rb | 6 +- .../runs/code_interpreter_tool_call_delta.rb | 6 +- .../threads/runs/file_search_tool_call.rb | 12 +- .../models/beta/threads/runs/run_step.rb | 24 +- .../beta/threads/runs/run_step_delta.rb | 6 +- .../beta/threads/runs/run_step_include.rb | 5 +- .../beta/threads/runs/step_list_params.rb | 6 +- .../models/beta/threads/runs/tool_call.rb | 6 +- .../beta/threads/runs/tool_call_delta.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 12 +- ...chat_completion_assistant_message_param.rb | 12 +- .../chat/chat_completion_audio_param.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 24 +- .../chat/chat_completion_content_part.rb | 6 +- .../chat_completion_content_part_image.rb | 6 +- ...hat_completion_content_part_input_audio.rb | 6 +- ...chat_completion_developer_message_param.rb | 6 +- .../chat/chat_completion_message_param.rb | 6 +- .../models/chat/chat_completion_modality.rb | 5 +- .../chat_completion_prediction_content.rb | 6 +- .../models/chat/chat_completion_role.rb | 6 +- .../chat_completion_system_message_param.rb | 6 +- .../chat_completion_tool_choice_option.rb | 12 +- .../chat_completion_tool_message_param.rb | 6 +- .../chat_completion_user_message_param.rb | 6 +- .../models/chat/completion_create_params.rb | 47 +-- .../models/chat/completion_list_params.rb | 6 +- .../chat/completions/message_list_params.rb | 6 +- lib/openai/models/chat_model.rb | 5 +- lib/openai/models/comparison_filter.rb | 12 +- lib/openai/models/completion_choice.rb | 6 +- lib/openai/models/completion_create_params.rb | 18 +- lib/openai/models/compound_filter.rb | 12 +- lib/openai/models/embedding_create_params.rb | 18 +- lib/openai/models/embedding_model.rb | 5 +- lib/openai/models/file_chunking_strategy.rb | 6 +- .../models/file_chunking_strategy_param.rb | 6 +- lib/openai/models/file_list_params.rb | 6 +- lib/openai/models/file_object.rb | 12 +- lib/openai/models/file_purpose.rb | 6 +- .../models/fine_tuning/fine_tuning_job.rb | 72 ++-- .../fine_tuning/fine_tuning_job_event.rb | 12 +- .../models/fine_tuning/job_create_params.rb | 72 ++-- .../models/image_create_variation_params.rb | 18 +- lib/openai/models/image_edit_params.rb | 18 +- lib/openai/models/image_generate_params.rb | 30 +- lib/openai/models/image_model.rb | 5 +- lib/openai/models/moderation.rb | 65 +-- lib/openai/models/moderation_create_params.rb | 12 +- lib/openai/models/moderation_model.rb | 5 +- .../models/moderation_multi_modal_input.rb | 6 +- lib/openai/models/reasoning.rb | 6 +- lib/openai/models/reasoning_effort.rb | 6 +- lib/openai/models/responses/computer_tool.rb | 6 +- .../models/responses/easy_input_message.rb | 18 +- .../models/responses/file_search_tool.rb | 12 +- .../responses/input_item_list_params.rb | 6 +- lib/openai/models/responses/response.rb | 18 +- .../response_code_interpreter_tool_call.rb | 12 +- .../responses/response_computer_tool_call.rb | 24 +- ...response_computer_tool_call_output_item.rb | 6 +- .../models/responses/response_content.rb | 6 +- .../response_content_part_added_event.rb | 6 +- .../response_content_part_done_event.rb | 6 +- .../responses/response_create_params.rb | 18 +- lib/openai/models/responses/response_error.rb | 6 +- .../response_file_search_tool_call.rb | 11 +- .../responses/response_format_text_config.rb | 6 +- .../responses/response_function_tool_call.rb | 6 +- ...response_function_tool_call_output_item.rb | 6 +- .../responses/response_function_web_search.rb | 6 +- .../models/responses/response_includable.rb | 6 +- .../models/responses/response_input_audio.rb | 6 +- .../responses/response_input_content.rb | 6 +- .../models/responses/response_input_image.rb | 6 +- .../models/responses/response_input_item.rb | 36 +- .../responses/response_input_message_item.rb | 18 +- lib/openai/models/responses/response_item.rb | 6 +- .../models/responses/response_output_item.rb | 6 +- .../responses/response_output_message.rb | 12 +- .../models/responses/response_output_text.rb | 6 +- .../responses/response_reasoning_item.rb | 6 +- .../models/responses/response_status.rb | 6 +- .../models/responses/response_stream_event.rb | 6 +- .../response_text_annotation_delta_event.rb | 6 +- lib/openai/models/responses/tool.rb | 6 +- .../models/responses/tool_choice_options.rb | 6 +- .../models/responses/tool_choice_types.rb | 6 +- .../models/responses/web_search_tool.rb | 12 +- lib/openai/models/responses_model.rb | 10 +- lib/openai/models/upload.rb | 6 +- lib/openai/models/vector_store.rb | 6 +- lib/openai/models/vector_store_list_params.rb | 6 +- .../models/vector_store_search_params.rb | 17 +- .../models/vector_store_search_response.rb | 11 +- .../vector_stores/file_batch_create_params.rb | 5 +- .../file_batch_list_files_params.rb | 12 +- .../vector_stores/file_create_params.rb | 5 +- .../models/vector_stores/file_list_params.rb | 12 +- .../vector_stores/file_update_params.rb | 5 +- .../models/vector_stores/vector_store_file.rb | 17 +- .../vector_stores/vector_store_file_batch.rb | 6 +- rbi/lib/openai/base_model.rbi | 172 ++++---- rbi/lib/openai/models/all_models.rbi | 27 +- .../models/audio/speech_create_params.rbi | 88 ++-- rbi/lib/openai/models/audio/speech_model.rbi | 13 +- .../audio/transcription_create_params.rbi | 68 +-- .../audio/transcription_create_response.rbi | 4 +- .../models/audio/transcription_include.rbi | 9 +- .../audio/transcription_stream_event.rbi | 4 +- .../audio/translation_create_params.rbi | 52 ++- .../audio/translation_create_response.rbi | 4 +- rbi/lib/openai/models/audio_model.rbi | 13 +- .../openai/models/audio_response_format.rbi | 17 +- rbi/lib/openai/models/batch.rbi | 37 +- rbi/lib/openai/models/batch_create_params.rbi | 48 ++- .../models/beta/assistant_create_params.rbi | 28 +- .../models/beta/assistant_list_params.rbi | 22 +- .../beta/assistant_response_format_option.rbi | 4 +- .../models/beta/assistant_stream_event.rbi | 4 +- rbi/lib/openai/models/beta/assistant_tool.rbi | 4 +- .../models/beta/assistant_tool_choice.rbi | 36 +- .../beta/assistant_tool_choice_option.rbi | 28 +- .../models/beta/assistant_update_params.rbi | 113 +++-- .../openai/models/beta/file_search_tool.rbi | 46 ++- .../models/beta/message_stream_event.rbi | 4 +- .../models/beta/run_step_stream_event.rbi | 4 +- .../openai/models/beta/run_stream_event.rbi | 4 +- .../beta/thread_create_and_run_params.rbi | 136 ++++-- .../models/beta/thread_create_params.rbi | 31 +- .../openai/models/beta/threads/annotation.rbi | 4 +- .../models/beta/threads/annotation_delta.rbi | 4 +- .../openai/models/beta/threads/image_file.rbi | 27 +- .../models/beta/threads/image_file_delta.rbi | 31 +- .../openai/models/beta/threads/image_url.rbi | 27 +- .../models/beta/threads/image_url_delta.rbi | 30 +- .../openai/models/beta/threads/message.rbi | 94 +++-- .../models/beta/threads/message_content.rbi | 4 +- .../beta/threads/message_content_delta.rbi | 4 +- .../threads/message_content_part_param.rbi | 4 +- .../beta/threads/message_create_params.rbi | 31 +- .../models/beta/threads/message_delta.rbi | 23 +- .../beta/threads/message_list_params.rbi | 23 +- rbi/lib/openai/models/beta/threads/run.rbi | 155 +++++-- .../models/beta/threads/run_create_params.rbi | 154 +++++-- .../models/beta/threads/run_list_params.rbi | 23 +- .../openai/models/beta/threads/run_status.rbi | 25 +- .../runs/code_interpreter_tool_call.rbi | 4 +- .../runs/code_interpreter_tool_call_delta.rbi | 4 +- .../threads/runs/file_search_tool_call.rbi | 122 +++++- .../models/beta/threads/runs/run_step.rbi | 95 +++-- .../beta/threads/runs/run_step_delta.rbi | 4 +- .../beta/threads/runs/run_step_include.rbi | 13 +- .../beta/threads/runs/step_list_params.rbi | 34 +- .../threads/runs/step_retrieve_params.rbi | 11 +- .../models/beta/threads/runs/tool_call.rbi | 4 +- .../beta/threads/runs/tool_call_delta.rbi | 4 +- .../openai/models/chat/chat_completion.rbi | 58 +-- ...hat_completion_assistant_message_param.rbi | 8 +- .../chat/chat_completion_audio_param.rbi | 74 ++-- .../models/chat/chat_completion_chunk.rbi | 119 ++++-- .../chat/chat_completion_content_part.rbi | 4 +- .../chat_completion_content_part_image.rbi | 37 +- ...at_completion_content_part_input_audio.rbi | 44 +- ...hat_completion_developer_message_param.rbi | 4 +- .../chat/chat_completion_message_param.rbi | 4 +- .../models/chat/chat_completion_modality.rbi | 11 +- .../chat_completion_prediction_content.rbi | 4 +- .../models/chat/chat_completion_role.rbi | 19 +- .../chat_completion_system_message_param.rbi | 4 +- .../chat_completion_tool_choice_option.rbi | 27 +- .../chat_completion_tool_message_param.rbi | 4 +- .../chat_completion_user_message_param.rbi | 4 +- .../models/chat/completion_create_params.rbi | 225 +++++++--- .../models/chat/completion_list_params.rbi | 23 +- .../chat/completions/message_list_params.rbi | 39 +- rbi/lib/openai/models/chat_model.rbi | 98 ++--- rbi/lib/openai/models/comparison_filter.rbi | 44 +- rbi/lib/openai/models/completion_choice.rbi | 24 +- .../models/completion_create_params.rbi | 36 +- rbi/lib/openai/models/compound_filter.rbi | 37 +- .../openai/models/embedding_create_params.rbi | 44 +- rbi/lib/openai/models/embedding_model.rbi | 13 +- .../openai/models/file_chunking_strategy.rbi | 4 +- .../models/file_chunking_strategy_param.rbi | 4 +- rbi/lib/openai/models/file_create_params.rbi | 21 +- rbi/lib/openai/models/file_list_params.rbi | 22 +- rbi/lib/openai/models/file_object.rbi | 56 +-- rbi/lib/openai/models/file_purpose.rbi | 19 +- .../models/fine_tuning/fine_tuning_job.rbi | 95 +++-- .../fine_tuning/fine_tuning_job_event.rbi | 48 ++- .../models/fine_tuning/job_create_params.rbi | 93 +++-- .../models/image_create_variation_params.rbi | 64 +-- rbi/lib/openai/models/image_edit_params.rbi | 63 +-- .../openai/models/image_generate_params.rbi | 112 ++--- rbi/lib/openai/models/image_model.rbi | 11 +- rbi/lib/openai/models/moderation.rbi | 319 +++++++++----- .../models/moderation_create_params.rbi | 21 +- rbi/lib/openai/models/moderation_model.rbi | 16 +- .../models/moderation_multi_modal_input.rbi | 4 +- rbi/lib/openai/models/reasoning.rbi | 43 +- rbi/lib/openai/models/reasoning_effort.rbi | 13 +- .../openai/models/responses/computer_tool.rbi | 40 +- .../models/responses/easy_input_message.rbi | 52 ++- .../models/responses/file_search_tool.rbi | 40 +- .../responses/input_item_list_params.rbi | 23 +- rbi/lib/openai/models/responses/response.rbi | 145 +++++-- .../response_code_interpreter_tool_call.rbi | 37 +- .../responses/response_computer_tool_call.rbi | 107 +++-- ...esponse_computer_tool_call_output_item.rbi | 33 +- .../models/responses/response_content.rbi | 4 +- .../response_content_part_added_event.rbi | 4 +- .../response_content_part_done_event.rbi | 4 +- .../responses/response_create_params.rbi | 98 +++-- .../models/responses/response_error.rbi | 73 ++-- .../response_file_search_tool_call.rbi | 35 +- .../responses/response_format_text_config.rbi | 4 +- .../responses/response_function_tool_call.rbi | 49 ++- ...esponse_function_tool_call_output_item.rbi | 43 +- .../response_function_web_search.rbi | 40 +- .../models/responses/response_includable.rbi | 16 +- .../models/responses/response_input_audio.rbi | 35 +- .../responses/response_input_content.rbi | 4 +- .../models/responses/response_input_image.rbi | 45 +- .../models/responses/response_input_item.rbi | 153 ++++--- .../responses/response_input_message_item.rbi | 74 ++-- .../openai/models/responses/response_item.rbi | 4 +- .../models/responses/response_output_item.rbi | 4 +- .../responses/response_output_message.rbi | 29 +- .../models/responses/response_output_text.rbi | 4 +- .../responses/response_reasoning_item.rbi | 25 +- .../responses/response_retrieve_params.rbi | 19 +- .../models/responses/response_status.rbi | 15 +- .../responses/response_stream_event.rbi | 4 +- .../response_text_annotation_delta_event.rbi | 4 +- rbi/lib/openai/models/responses/tool.rbi | 4 +- .../models/responses/tool_choice_options.rbi | 13 +- .../models/responses/tool_choice_types.rbi | 29 +- .../models/responses/web_search_tool.rbi | 49 ++- rbi/lib/openai/models/responses_model.rbi | 28 +- rbi/lib/openai/models/upload.rbi | 26 +- .../openai/models/upload_create_params.rbi | 8 +- rbi/lib/openai/models/vector_store.rbi | 24 +- .../models/vector_store_list_params.rbi | 22 +- .../models/vector_store_search_params.rbi | 44 +- .../models/vector_store_search_response.rbi | 31 +- .../file_batch_create_params.rbi | 4 +- .../file_batch_list_files_params.rbi | 53 ++- .../vector_stores/file_create_params.rbi | 4 +- .../models/vector_stores/file_list_params.rbi | 50 ++- .../vector_stores/file_update_params.rbi | 4 +- .../vector_stores/vector_store_file.rbi | 75 ++-- .../vector_stores/vector_store_file_batch.rbi | 32 +- rbi/lib/openai/resources/audio/speech.rbi | 6 +- .../openai/resources/audio/transcriptions.rbi | 16 +- .../openai/resources/audio/translations.rbi | 4 +- rbi/lib/openai/resources/batches.rbi | 4 +- rbi/lib/openai/resources/beta/assistants.rbi | 10 +- rbi/lib/openai/resources/beta/threads.rbi | 18 +- .../resources/beta/threads/messages.rbi | 4 +- .../openai/resources/beta/threads/runs.rbi | 28 +- .../resources/beta/threads/runs/steps.rbi | 6 +- rbi/lib/openai/resources/chat/completions.rbi | 38 +- .../resources/chat/completions/messages.rbi | 2 +- rbi/lib/openai/resources/completions.rbi | 4 +- rbi/lib/openai/resources/embeddings.rbi | 4 +- rbi/lib/openai/resources/files.rbi | 4 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 22 +- rbi/lib/openai/resources/moderations.rbi | 2 +- rbi/lib/openai/resources/responses.rbi | 26 +- .../resources/responses/input_items.rbi | 2 +- rbi/lib/openai/resources/uploads.rbi | 2 +- rbi/lib/openai/resources/vector_stores.rbi | 2 +- .../resources/vector_stores/file_batches.rbi | 4 +- .../openai/resources/vector_stores/files.rbi | 4 +- sig/openai/base_model.rbs | 28 +- sig/openai/models/all_models.rbs | 8 +- .../models/audio/speech_create_params.rbs | 12 +- sig/openai/models/audio/speech_model.rbs | 4 +- .../audio/transcription_create_params.rbs | 8 +- .../audio/transcription_create_response.rbs | 4 +- .../models/audio/transcription_include.rbs | 4 +- .../audio/transcription_stream_event.rbs | 4 +- .../audio/translation_create_params.rbs | 8 +- .../audio/translation_create_response.rbs | 4 +- sig/openai/models/audio_model.rbs | 4 +- sig/openai/models/audio_response_format.rbs | 4 +- sig/openai/models/batch.rbs | 4 +- sig/openai/models/batch_create_params.rbs | 8 +- .../models/beta/assistant_create_params.rbs | 4 +- .../models/beta/assistant_list_params.rbs | 4 +- .../beta/assistant_response_format_option.rbs | 4 +- .../models/beta/assistant_stream_event.rbs | 4 +- sig/openai/models/beta/assistant_tool.rbs | 4 +- .../models/beta/assistant_tool_choice.rbs | 4 +- .../beta/assistant_tool_choice_option.rbs | 8 +- .../models/beta/assistant_update_params.rbs | 4 +- sig/openai/models/beta/file_search_tool.rbs | 4 +- .../models/beta/message_stream_event.rbs | 4 +- .../models/beta/run_step_stream_event.rbs | 4 +- sig/openai/models/beta/run_stream_event.rbs | 4 +- .../beta/thread_create_and_run_params.rbs | 24 +- .../models/beta/thread_create_params.rbs | 12 +- sig/openai/models/beta/threads/annotation.rbs | 4 +- .../models/beta/threads/annotation_delta.rbs | 4 +- sig/openai/models/beta/threads/image_file.rbs | 4 +- .../models/beta/threads/image_file_delta.rbs | 4 +- sig/openai/models/beta/threads/image_url.rbs | 4 +- .../models/beta/threads/image_url_delta.rbs | 4 +- sig/openai/models/beta/threads/message.rbs | 16 +- .../models/beta/threads/message_content.rbs | 4 +- .../beta/threads/message_content_delta.rbs | 4 +- .../threads/message_content_part_param.rbs | 4 +- .../beta/threads/message_create_params.rbs | 12 +- .../models/beta/threads/message_delta.rbs | 4 +- .../beta/threads/message_list_params.rbs | 4 +- sig/openai/models/beta/threads/run.rbs | 12 +- .../models/beta/threads/run_create_params.rbs | 20 +- .../models/beta/threads/run_list_params.rbs | 4 +- sig/openai/models/beta/threads/run_status.rbs | 4 +- .../runs/code_interpreter_tool_call.rbs | 4 +- .../runs/code_interpreter_tool_call_delta.rbs | 4 +- .../threads/runs/file_search_tool_call.rbs | 8 +- .../models/beta/threads/runs/run_step.rbs | 17 +- .../beta/threads/runs/run_step_delta.rbs | 5 +- .../threads/runs/run_step_delta_event.rbs | 1 - .../runs/run_step_delta_message_delta.rbs | 1 - .../beta/threads/runs/run_step_include.rbs | 7 +- .../beta/threads/runs/step_list_params.rbs | 4 +- .../models/beta/threads/runs/tool_call.rbs | 4 +- .../beta/threads/runs/tool_call_delta.rbs | 4 +- sig/openai/models/chat/chat_completion.rbs | 9 +- ...hat_completion_assistant_message_param.rbs | 9 +- .../models/chat/chat_completion_audio.rbs | 1 - .../chat/chat_completion_audio_param.rbs | 9 +- .../models/chat/chat_completion_chunk.rbs | 17 +- .../chat/chat_completion_content_part.rbs | 7 +- .../chat_completion_content_part_image.rbs | 5 +- ...at_completion_content_part_input_audio.rbs | 5 +- .../chat_completion_content_part_refusal.rbs | 1 - .../chat_completion_content_part_text.rbs | 1 - .../models/chat/chat_completion_deleted.rbs | 1 - ...hat_completion_developer_message_param.rbs | 5 +- .../chat_completion_function_call_option.rbs | 1 - ...chat_completion_function_message_param.rbs | 1 - .../models/chat/chat_completion_message.rbs | 1 - .../chat/chat_completion_message_param.rbs | 7 +- .../chat_completion_message_tool_call.rbs | 1 - .../models/chat/chat_completion_modality.rbs | 7 +- .../chat_completion_named_tool_choice.rbs | 1 - .../chat_completion_prediction_content.rbs | 5 +- .../chat/chat_completion_reasoning_effort.rbs | 6 +- .../models/chat/chat_completion_role.rbs | 7 +- .../chat/chat_completion_store_message.rbs | 1 - .../chat/chat_completion_stream_options.rbs | 1 - .../chat_completion_system_message_param.rbs | 5 +- .../chat/chat_completion_token_logprob.rbs | 1 - .../models/chat/chat_completion_tool.rbs | 1 - .../chat_completion_tool_choice_option.rbs | 11 +- .../chat_completion_tool_message_param.rbs | 5 +- .../chat_completion_user_message_param.rbs | 5 +- .../models/chat/completion_create_params.rbs | 32 +- .../models/chat/completion_list_params.rbs | 4 +- .../chat/completions/message_list_params.rbs | 4 +- sig/openai/models/chat_model.rbs | 4 +- sig/openai/models/comparison_filter.rbs | 8 +- sig/openai/models/completion_choice.rbs | 4 +- .../models/completion_create_params.rbs | 12 +- sig/openai/models/compound_filter.rbs | 8 +- sig/openai/models/embedding_create_params.rbs | 12 +- sig/openai/models/embedding_model.rbs | 4 +- sig/openai/models/file_chunking_strategy.rbs | 4 +- .../models/file_chunking_strategy_param.rbs | 4 +- sig/openai/models/file_list_params.rbs | 4 +- sig/openai/models/file_object.rbs | 8 +- sig/openai/models/file_purpose.rbs | 4 +- .../models/fine_tuning/fine_tuning_job.rbs | 49 ++- .../fine_tuning/fine_tuning_job_event.rbs | 9 +- .../fine_tuning_job_integration.rbs | 2 - .../fine_tuning_job_wandb_integration.rbs | 1 - ...ne_tuning_job_wandb_integration_object.rbs | 1 - .../models/fine_tuning/job_create_params.rbs | 48 ++- .../models/image_create_variation_params.rbs | 12 +- sig/openai/models/image_edit_params.rbs | 12 +- sig/openai/models/image_generate_params.rbs | 20 +- sig/openai/models/image_model.rbs | 4 +- sig/openai/models/moderation.rbs | 52 ++- .../models/moderation_create_params.rbs | 8 +- sig/openai/models/moderation_model.rbs | 4 +- .../models/moderation_multi_modal_input.rbs | 4 +- sig/openai/models/reasoning.rbs | 4 +- sig/openai/models/reasoning_effort.rbs | 4 +- sig/openai/models/responses/computer_tool.rbs | 4 +- .../models/responses/easy_input_message.rbs | 12 +- .../models/responses/file_search_tool.rbs | 8 +- .../responses/input_item_list_params.rbs | 4 +- sig/openai/models/responses/response.rbs | 12 +- .../response_code_interpreter_tool_call.rbs | 8 +- .../responses/response_computer_tool_call.rbs | 16 +- ...esponse_computer_tool_call_output_item.rbs | 4 +- .../models/responses/response_content.rbs | 4 +- .../response_content_part_added_event.rbs | 4 +- .../response_content_part_done_event.rbs | 4 +- .../responses/response_create_params.rbs | 12 +- .../models/responses/response_error.rbs | 4 +- .../response_file_search_tool_call.rbs | 8 +- .../responses/response_format_text_config.rbs | 4 +- .../responses/response_function_tool_call.rbs | 4 +- ...esponse_function_tool_call_output_item.rbs | 4 +- .../response_function_web_search.rbs | 4 +- .../models/responses/response_includable.rbs | 4 +- .../models/responses/response_input_audio.rbs | 4 +- .../responses/response_input_content.rbs | 4 +- .../models/responses/response_input_image.rbs | 4 +- .../models/responses/response_input_item.rbs | 24 +- .../responses/response_input_message_item.rbs | 12 +- sig/openai/models/responses/response_item.rbs | 4 +- .../models/responses/response_item_list.rbs | 1 - .../models/responses/response_output_item.rbs | 4 +- .../responses/response_output_message.rbs | 8 +- .../models/responses/response_output_text.rbs | 4 +- .../responses/response_reasoning_item.rbs | 4 +- .../models/responses/response_status.rbs | 4 +- .../responses/response_stream_event.rbs | 4 +- .../response_text_annotation_delta_event.rbs | 4 +- sig/openai/models/responses/tool.rbs | 4 +- .../models/responses/tool_choice_options.rbs | 4 +- .../models/responses/tool_choice_types.rbs | 4 +- .../models/responses/web_search_tool.rbs | 8 +- sig/openai/models/responses_model.rbs | 8 +- sig/openai/models/upload.rbs | 4 +- sig/openai/models/uploads/upload_part.rbs | 1 - sig/openai/models/vector_store.rbs | 4 +- .../models/vector_store_list_params.rbs | 4 +- .../models/vector_store_search_params.rbs | 12 +- .../models/vector_store_search_response.rbs | 8 +- .../file_batch_create_params.rbs | 4 +- .../file_batch_list_files_params.rbs | 8 +- .../vector_stores/file_create_params.rbs | 4 +- .../models/vector_stores/file_list_params.rbs | 8 +- .../vector_stores/file_update_params.rbs | 4 +- .../vector_stores/vector_store_file.rbs | 13 +- .../vector_stores/vector_store_file_batch.rbs | 5 +- .../vector_store_file_deleted.rbs | 1 - test/openai/base_model_test.rb | 24 +- 492 files changed, 5673 insertions(+), 3492 deletions(-) diff --git a/Rakefile b/Rakefile index cde192ee..e97d158b 100644 --- a/Rakefile +++ b/Rakefile @@ -35,24 +35,25 @@ multitask(:syntax_tree) do inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? %w[-i''] : %w[-i] uuid = SecureRandom.uuid - # `syntax_tree` has trouble with `rbs`'s class aliases + # `syntax_tree` has trouble with `rbs`'s class & module aliases sed = xargs + %w[sed -E] + inplace + %w[-e] - # annotate class aliases with a unique comment - pre = sed + ["s/class ([^ ]+) = (.+$)/# #{uuid}\\n\\1: \\2/", "--"] + # annotate unprocessable aliases with a unique comment + pre = sed + ["s/(class|module) ([^ ]+) = (.+$)/# \\1 #{uuid}\\n\\2: \\3/", "--"] fmt = xargs + %w[stree write --plugin=rbs --] - # remove the unique comment and transform class aliases to type aliases + # remove the unique comment and unprocessable aliases to type aliases subst = <<~SED - s/# #{uuid}// + s/# (class|module) #{uuid}/\\1/ t l1 b + : l1 - n - s/([^ :]+): (.+$)/class \\1 = \\2/ + N + s/\\n *([^:]+): (.+)$/ \\1 = \\2/ SED - # 1. delete the unique comment - # 2. if deletion happened, branch to label `l1`, else continue - # 3. transform the class alias to a type alias at label `l1` + # for each line: + # 1. try transform the unique comment into `class | module`, if successful, branch to label `l1`. + # 2. at label `l1`, join previously annotated line with `class | module` information. pst = sed + [subst, "--"] # transform class aliases to type aliases, which syntax tree has no trouble with diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 5a63f4fa..a1f9e29c 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -48,7 +48,7 @@ def type_info(spec) type_info(spec.slice(:const, :enum, :union).first&.last) in Proc spec - in OpenAI::Converter | Class | Symbol + in OpenAI::Converter | Module | Symbol -> { spec } in true | false -> { OpenAI::BooleanModel } @@ -81,7 +81,7 @@ def coerce(target, value) else value end - in Class + in Module case target in -> { _1 <= NilClass } nil @@ -144,7 +144,7 @@ def try_strict_coerce(target, value) else [false, false, 0] end - in Class + in Module case [target, value] in [-> { _1 <= NilClass }, _] [true, nil, value.nil? ? 1 : 0] @@ -276,8 +276,6 @@ def try_strict_coerce(value) # @api private # - # @abstract - # # A value from among a specified list of options. OpenAPI enum values map to Ruby # values in the SDK as follows: # @@ -317,74 +315,70 @@ def try_strict_coerce(value) # puts(chat_model) # end # ``` - class Enum - extend OpenAI::Converter + module Enum + include OpenAI::Converter - class << self - # All of the valid Symbol values for this enum. - # - # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) + # All of the valid Symbol values for this enum. + # + # @return [Array] + def values = (@values ||= constants.map { const_get(_1) }) - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values - end + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + private def finalize! = values # @param other [Object] # # @return [Boolean] - def self.===(other) = values.include?(other) + def ===(other) = values.include?(other) # @param other [Object] # # @return [Boolean] - def self.==(other) - other.is_a?(Class) && other <= OpenAI::Enum && other.values.to_set == values.to_set + def ==(other) + other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Enum) && other.values.to_set == values.to_set end - class << self - # @api private - # - # @param value [String, Symbol, Object] - # - # @return [Symbol, Object] - def coerce(value) - case value - in Symbol | String if values.include?(val = value.to_sym) - val - else - value - end + # @api private + # + # @param value [String, Symbol, Object] + # + # @return [Symbol, Object] + def coerce(value) + case value + in Symbol | String if values.include?(val = value.to_sym) + val + else + value end + end - # @!parse - # # @api private - # # - # # @param value [Symbol, Object] - # # - # # @return [Symbol, Object] - # def dump(value) = super + # @!parse + # # @api private + # # + # # @param value [Symbol, Object] + # # + # # @return [Symbol, Object] + # def dump(value) = super - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - return [true, value, 1] if values.include?(value) + # @api private + # + # @param value [Object] + # + # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] + def try_strict_coerce(value) + return [true, value, 1] if values.include?(value) - case value - in Symbol | String if values.include?(val = value.to_sym) - [true, val, 1] + case value + in Symbol | String if values.include?(val = value.to_sym) + [true, val, 1] + else + case [value, values.first] + in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] + [false, true, 0] else - case [value, values.first] - in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] - [false, true, 0] - else - [false, false, 0] - end + [false, false, 0] end end end @@ -392,8 +386,6 @@ def try_strict_coerce(value) # @api private # - # @abstract - # # @example # ```ruby # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` @@ -422,89 +414,87 @@ def try_strict_coerce(value) # puts(chat_completion_content_part) # end # ``` - class Union - extend OpenAI::Converter + module Union + include OpenAI::Converter - class << self - # @api private - # - # All of the specified variant info for this union. - # - # @return [Array] - private def known_variants = (@known_variants ||= []) + # @api private + # + # All of the specified variant info for this union. + # + # @return [Array] + private def known_variants = (@known_variants ||= []) - # @api private - # - # @return [Array] - protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } - end + # @api private + # + # @return [Array] + protected def derefed_variants + @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + end - # All of the specified variants for this union. - # - # @return [Array] - def variants - derefed_variants.map(&:last) + # All of the specified variants for this union. + # + # @return [Array] + def variants + derefed_variants.map(&:last) + end + + # @api private + # + # @param property [Symbol] + private def discriminator(property) + case property + in Symbol + @discriminator = property end + end - # @api private - # - # @param property [Symbol] - private def discriminator(property) - case property + # @api private + # + # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def variant(key, spec = nil) + variant_info = + case key in Symbol - @discriminator = property + [key, OpenAI::Converter.type_info(spec)] + in Proc | OpenAI::Converter | Module | Hash + [nil, OpenAI::Converter.type_info(key)] end - end - # @api private - # - # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def variant(key, spec = nil) - variant_info = - case key - in Symbol - [key, OpenAI::Converter.type_info(spec)] - in Proc | OpenAI::Converter | Class | Hash - [nil, OpenAI::Converter.type_info(key)] - end - - known_variants << variant_info - end + known_variants << variant_info + end - # @api private - # - # @param value [Object] - # - # @return [OpenAI::Converter, Class, nil] - private def resolve_variant(value) - case [@discriminator, value] - in [_, OpenAI::BaseModel] - value.class - in [Symbol, Hash] - key = - if value.key?(@discriminator) - value.fetch(@discriminator) - elsif value.key?((discriminator = @discriminator.to_s)) - value.fetch(discriminator) - end + # @api private + # + # @param value [Object] + # + # @return [OpenAI::Converter, Class, nil] + private def resolve_variant(value) + case [@discriminator, value] + in [_, OpenAI::BaseModel] + value.class + in [Symbol, Hash] + key = + if value.key?(@discriminator) + value.fetch(@discriminator) + elsif value.key?((discriminator = @discriminator.to_s)) + value.fetch(discriminator) + end - key = key.to_sym if key.is_a?(String) - _, resolved = known_variants.find { |k,| k == key } - resolved.nil? ? OpenAI::Unknown : resolved.call - else - nil - end + key = key.to_sym if key.is_a?(String) + _, resolved = known_variants.find { |k,| k == key } + resolved.nil? ? OpenAI::Unknown : resolved.call + else + nil end end @@ -514,7 +504,7 @@ def variants # @param other [Object] # # @return [Boolean] - def self.===(other) + def ===(other) known_variants.any? do |_, variant_fn| variant_fn.call === other end @@ -523,90 +513,88 @@ def self.===(other) # @param other [Object] # # @return [Boolean] - def self.==(other) - other.is_a?(Class) && other <= OpenAI::Union && other.derefed_variants == derefed_variants + def ==(other) + other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Union) && other.derefed_variants == derefed_variants end - class << self - # @api private - # - # @param value [Object] - # - # @return [Object] - def coerce(value) - if (variant = resolve_variant(value)) - return OpenAI::Converter.coerce(variant, value) - end + # @api private + # + # @param value [Object] + # + # @return [Object] + def coerce(value) + if (variant = resolve_variant(value)) + return OpenAI::Converter.coerce(variant, value) + end - matches = [] + matches = [] - known_variants.each do |_, variant_fn| - variant = variant_fn.call + known_variants.each do |_, variant_fn| + variant = variant_fn.call - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, _] - return coerced - in [false, true, score] - matches << [score, variant] - in [false, false, _] - nil - end + case OpenAI::Converter.try_strict_coerce(variant, value) + in [true, coerced, _] + return coerced + in [false, true, score] + matches << [score, variant] + in [false, false, _] + nil end + end + + _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } + variant.nil? ? value : OpenAI::Converter.coerce(variant, value) + end - _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } - variant.nil? ? value : OpenAI::Converter.coerce(variant, value) + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + if (variant = resolve_variant(value)) + return OpenAI::Converter.dump(variant, value) end - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - if (variant = resolve_variant(value)) + known_variants.each do |_, variant_fn| + variant = variant_fn.call + if variant === value return OpenAI::Converter.dump(variant, value) end - - known_variants.each do |_, variant_fn| - variant = variant_fn.call - if variant === value - return OpenAI::Converter.dump(variant, value) - end - end - value end + value + end - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - # TODO(ruby) this will result in super linear decoding behaviour for nested unions - # follow up with a decoding context that captures current strictness levels - if (variant = resolve_variant(value)) - return Converter.try_strict_coerce(variant, value) - end + # @api private + # + # @param value [Object] + # + # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] + def try_strict_coerce(value) + # TODO(ruby) this will result in super linear decoding behaviour for nested unions + # follow up with a decoding context that captures current strictness levels + if (variant = resolve_variant(value)) + return Converter.try_strict_coerce(variant, value) + end - coercible = false - max_score = 0 + coercible = false + max_score = 0 - known_variants.each do |_, variant_fn| - variant = variant_fn.call + known_variants.each do |_, variant_fn| + variant = variant_fn.call - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, score] - return [true, coerced, score] - in [false, true, score] - coercible = true - max_score = [max_score, score].max - in [false, false, _] - nil - end + case OpenAI::Converter.try_strict_coerce(variant, value) + in [true, coerced, score] + return [true, coerced, score] + in [false, true, score] + coercible = true + max_score = [max_score, score].max + in [false, false, _] + nil end - - [false, coercible, max_score] end + + [false, coercible, max_score] end # rubocop:enable Style/CaseEquality @@ -953,7 +941,7 @@ def defaults = (@defaults ||= {}) private def add_field(name_sym, required:, type_info:, spec:) type_fn, info = case type_info - in Proc | Class | OpenAI::Converter + in Proc | Module | OpenAI::Converter [OpenAI::Converter.type_info({**spec, union: type_info}), spec] in Hash [OpenAI::Converter.type_info(type_info), type_info] @@ -1225,7 +1213,7 @@ def initialize(data = {}) type = self.class.fields[mapped]&.fetch(:type) stored = case [type, value] - in [Class, Hash] if type <= OpenAI::BaseModel + in [Module, Hash] if type <= OpenAI::BaseModel type.new(value) in [OpenAI::ArrayOf, Array] | [OpenAI::HashOf, Hash] type.coerce(value) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index edf0c8df..f11ccb01 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -2,16 +2,18 @@ module OpenAI module Models - # @abstract - class AllModels < OpenAI::Union + module AllModels + extend OpenAI::Union + variant String variant enum: -> { OpenAI::Models::ChatModel } variant enum: -> { OpenAI::Models::AllModels::UnionMember2 } - # @abstract - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 2477a4ca..b7efc98d 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -76,11 +76,11 @@ class SpeechCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. @@ -93,13 +93,13 @@ class Model < OpenAI::Union # end end - # @abstract - # # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY = :alloy ASH = :ash CORAL = :coral @@ -113,11 +113,11 @@ class Voice < OpenAI::Enum finalize! end - # @abstract - # # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + MP3 = :mp3 OPUS = :opus AAC = :aac diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index 26aae9d6..b85a4f1e 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Audio - # @abstract - class SpeechModel < OpenAI::Enum + module SpeechModel + extend OpenAI::Enum + TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 3ff8c770..1ec3b0ac 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -131,12 +131,12 @@ class TranscriptionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). @@ -149,8 +149,9 @@ class Model < OpenAI::Union # end end - # @abstract - class TimestampGranularity < OpenAI::Enum + module TimestampGranularity + extend OpenAI::Enum + WORD = :word SEGMENT = :segment diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 2ad0d933..07d30d90 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Audio - # @abstract - # # Represents a transcription response returned by model, based on the provided # input. - class TranscriptionCreateResponse < OpenAI::Union + module TranscriptionCreateResponse + extend OpenAI::Union + # Represents a transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::Transcription } diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index 97303675..fc9e716d 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Audio - # @abstract - class TranscriptionInclude < OpenAI::Enum + module TranscriptionInclude + extend OpenAI::Enum + LOGPROBS = :logprobs finalize! diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 4bddaa1b..be40014b 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -3,13 +3,13 @@ module OpenAI module Models module Audio - # @abstract - # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. - class TranscriptionStreamEvent < OpenAI::Union + module TranscriptionStreamEvent + extend OpenAI::Union + discriminator :type # Emitted when there is an additional text delta. This is also the first event emitted when the transcription starts. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 0b31b58c..087b2d38 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -72,11 +72,11 @@ class TranslationCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. @@ -89,11 +89,11 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + JSON = :json TEXT = :text SRT = :srt diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 94020236..4d7cf38c 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Audio - # @abstract - class TranslationCreateResponse < OpenAI::Union + module TranslationCreateResponse + extend OpenAI::Union + variant -> { OpenAI::Models::Audio::Translation } variant -> { OpenAI::Models::Audio::TranslationVerbose } diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index 88507173..cb25d87b 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class AudioModel < OpenAI::Enum + module AudioModel + extend OpenAI::Enum + WHISPER_1 = :"whisper-1" GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 9593d816..105ac628 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -2,12 +2,12 @@ module OpenAI module Models - # @abstract - # # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - class AudioResponseFormat < OpenAI::Enum + module AudioResponseFormat + extend OpenAI::Enum + JSON = :json TEXT = :text SRT = :srt diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 91778516..dcf830ae 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -224,10 +224,10 @@ class Batch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The current status of the batch. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING = :validating FAILED = :failed IN_PROGRESS = :in_progress diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 593eb7d2..35dc43cb 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -59,23 +59,23 @@ class BatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The time frame within which the batch should be processed. Currently only `24h` # is supported. - class CompletionWindow < OpenAI::Enum + module CompletionWindow + extend OpenAI::Enum + NUMBER_24H = :"24h" finalize! end - # @abstract - # # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - class Endpoint < OpenAI::Enum + module Endpoint + extend OpenAI::Enum + V1_RESPONSES = :"/v1/responses" V1_CHAT_COMPLETIONS = :"/v1/chat/completions" V1_EMBEDDINGS = :"/v1/embeddings" diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index f66edb15..9e1ef48e 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -157,14 +157,14 @@ class AssistantCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 2deef264..5586ebe7 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -67,11 +67,11 @@ class AssistantListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 1e0036f2..da77528c 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Beta - # @abstract - # # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), @@ -25,7 +23,9 @@ module Beta # the message content may be partially cut off if `finish_reason="length"`, which # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. - class AssistantResponseFormatOption < OpenAI::Union + module AssistantResponseFormatOption + extend OpenAI::Union + # `auto` is the default value variant const: :auto diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index fdc598d7..02fad932 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Beta - # @abstract - # # Represents an event emitted when streaming a Run. # # Each event in a server-sent events stream has an `event` and `data` property: @@ -25,7 +23,9 @@ module Beta # gracefully in your code. See the # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. - class AssistantStreamEvent < OpenAI::Union + module AssistantStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a new [thread](https://platform.openai.com/docs/api-reference/threads/object) is created. diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 0ea9bc07..ecd6b3db 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Beta - # @abstract - class AssistantTool < OpenAI::Union + module AssistantTool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 9aba0349..78dd540e 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -30,10 +30,10 @@ class AssistantToolChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of the tool. If type is `function`, the function name must be set - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION = :function CODE_INTERPRETER = :code_interpreter FILE_SEARCH = :file_search diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 62a2c795..560a4a80 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Beta - # @abstract - # # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value # and means the model can pick between generating a message or calling one or more @@ -12,20 +10,22 @@ module Beta # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - class AssistantToolChoiceOption < OpenAI::Union + module AssistantToolChoiceOption + extend OpenAI::Union + # `none` means the model will not call any tools and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools before responding to the user. variant enum: -> { OpenAI::Models::Beta::AssistantToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific tool. variant -> { OpenAI::Models::Beta::AssistantToolChoice } - # @abstract - # # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 494df652..4a6636f6 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -161,14 +161,14 @@ class AssistantUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + # @!group O3_MINI = :"o3-mini" diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 333f1770..102489f1 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -105,11 +105,11 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index b110bfed..5ea73994 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Beta - # @abstract - # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - class MessageStreamEvent < OpenAI::Union + module MessageStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is created. diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 82c7266e..45e973df 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Beta - # @abstract - # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - class RunStepStreamEvent < OpenAI::Union + module RunStepStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is created. diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index f8548343..f506c9e8 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Beta - # @abstract - # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - class RunStreamEvent < OpenAI::Union + module RunStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a new [run](https://platform.openai.com/docs/api-reference/runs/object) is created. diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 7650d56b..60e8fa5b 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -210,13 +210,13 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. @@ -323,10 +323,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -343,15 +343,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -388,8 +388,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -724,8 +725,9 @@ class FileSearch < OpenAI::BaseModel end end - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::FileSearchTool } @@ -767,13 +769,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 734a53ad..4978a330 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -96,10 +96,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -116,15 +116,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -161,8 +161,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 3f0a547d..99110d38 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 6b22d5be..66fbd501 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class AnnotationDelta < OpenAI::Union + module AnnotationDelta + extend OpenAI::Union + discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 2156325f..fab8e1fc 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -32,11 +32,11 @@ class ImageFile < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 81caa23f..e40aa711 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -36,11 +36,11 @@ class ImageFileDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 8c08ac9d..59dcae47 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -31,11 +31,11 @@ class ImageURL < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index ad6177d4..787dd26c 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -35,11 +35,11 @@ class ImageURLDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 0a6b9d1b..da2a8d75 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -172,8 +172,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly } @@ -217,10 +218,10 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason the message is incomplete. - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + CONTENT_FILTER = :content_filter MAX_TOKENS = :max_tokens RUN_CANCELLED = :run_cancelled @@ -231,21 +232,21 @@ class Reason < OpenAI::Enum end end - # @abstract - # # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant finalize! end - # @abstract - # # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress INCOMPLETE = :incomplete COMPLETED = :completed diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 7e6e4698..40ea6954 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContent < OpenAI::Union + module MessageContent + extend OpenAI::Union + discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index bcf970c1..634b517c 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentDelta < OpenAI::Union + module MessageContentDelta + extend OpenAI::Union + discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 74766529..4ae935f2 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentPartParam < OpenAI::Union + module MessageContentPartParam + extend OpenAI::Union + discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 480f9a24..1fcbaa76 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -56,10 +56,10 @@ class MessageCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -76,15 +76,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -121,8 +121,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index f3bc9993..46d5d46f 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -35,10 +35,10 @@ class MessageDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 1a302d8c..816ed048 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -79,11 +79,11 @@ class MessageListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 02c20e73..2d7b18a8 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -305,11 +305,11 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens @@ -340,10 +340,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt @@ -424,13 +424,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index b6b3564f..f1e8d793 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -285,10 +285,10 @@ class AdditionalMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -305,15 +305,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -350,8 +350,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -383,13 +384,13 @@ class FileSearch < OpenAI::BaseModel end end - # @abstract - # # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. @@ -430,13 +431,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index ce011119..fa0aee48 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -68,11 +68,11 @@ class RunListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index b85ac1e3..727fa009 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - class RunStatus < OpenAI::Enum + module RunStatus + extend OpenAI::Enum + QUEUED = :queued IN_PROGRESS = :in_progress REQUIRES_ACTION = :requires_action diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 0313a163..e21e1235 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -63,10 +63,10 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union + module Output + extend OpenAI::Union + discriminator :type # Text output from the Code Interpreter tool call as part of a run step. diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 9e9097b3..12321ae7 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -86,10 +86,10 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union + module Output + extend OpenAI::Union + discriminator :type # Text output from the Code Interpreter tool call as part of a run step. diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 9011d196..47fd4a37 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -93,11 +93,11 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 @@ -179,10 +179,10 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of the content. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT = :text finalize! diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 4a7aa3e2..d54a639a 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -183,10 +183,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded @@ -194,11 +194,11 @@ class Code < OpenAI::Enum end end - # @abstract - # # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress CANCELLED = :cancelled FAILED = :failed @@ -208,10 +208,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The details of the run step. - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + discriminator :type # Details of the message creation by the run step. @@ -227,10 +227,10 @@ class StepDetails < OpenAI::Union # end end - # @abstract - # # The type of run step, which can be either `message_creation` or `tool_calls`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index c8ba4aa1..d5fc45bb 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -25,10 +25,10 @@ class RunStepDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The details of the run step. - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + discriminator :type # Details of the message creation by the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index dd1c5381..32e98757 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -5,8 +5,9 @@ module Models module Beta module Threads module Runs - # @abstract - class RunStepInclude < OpenAI::Enum + module RunStepInclude + extend OpenAI::Enum + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 685e0e75..3a12f3fe 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -92,11 +92,11 @@ class StepListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 20cc29a6..c6e9367f 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -5,10 +5,10 @@ module Models module Beta module Threads module Runs - # @abstract - # # Details of the Code Interpreter tool call the run step was involved in. - class ToolCall < OpenAI::Union + module ToolCall + extend OpenAI::Union + discriminator :type # Details of the Code Interpreter tool call the run step was involved in. diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index 3dae6b9b..f58ae752 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -5,10 +5,10 @@ module Models module Beta module Threads module Runs - # @abstract - # # Details of the Code Interpreter tool call the run step was involved in. - class ToolCallDelta < OpenAI::Union + module ToolCallDelta + extend OpenAI::Union + discriminator :type # Details of the Code Interpreter tool call the run step was involved in. diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index b80607a6..fcb5196d 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -133,15 +133,15 @@ class Choice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls @@ -176,10 +176,10 @@ class Logprobs < OpenAI::BaseModel end end - # @abstract - # # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 5b2e877b..e9ee4909 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -106,11 +106,11 @@ class Audio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ArrayOfContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] @@ -120,11 +120,11 @@ class Content < OpenAI::Union # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. variant OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray - # @abstract - # # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ArrayOfContentPart < OpenAI::Union + module ArrayOfContentPart + extend OpenAI::Union + discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 5d72b2de..5f1ca422 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -30,11 +30,11 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV = :wav MP3 = :mp3 FLAC = :flac @@ -44,11 +44,11 @@ class Format < OpenAI::Enum finalize! end - # @abstract - # # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY = :alloy ASH = :ash BALLAD = :ballad diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 5f0a0fef..fd4648fc 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -233,10 +233,10 @@ class FunctionCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The role of the author of this message. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + DEVELOPER = :developer SYSTEM = :system USER = :user @@ -324,10 +324,10 @@ class Function < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The type of the tool. Currently, only `function` is supported. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION = :function finalize! @@ -335,15 +335,15 @@ class Type < OpenAI::Enum end end - # @abstract - # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls @@ -378,10 +378,10 @@ class Logprobs < OpenAI::BaseModel end end - # @abstract - # # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index fd7d3c2c..9b1d2144 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Chat - # @abstract - # # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ChatCompletionContentPart < OpenAI::Union + module ChatCompletionContentPart + extend OpenAI::Union + discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 9990262c..7b165ecb 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -51,11 +51,11 @@ class ImageURL < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 0ed3dff8..7e5f53ee 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -48,10 +48,10 @@ class InputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The format of the encoded audio data. Currently supports "wav" and "mp3". - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV = :wav MP3 = :mp3 diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 9e91e405..44c3742f 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -40,10 +40,10 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the developer message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index 811b7f8c..c8a3a17d 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Chat - # @abstract - # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - class ChatCompletionMessageParam < OpenAI::Union + module ChatCompletionMessageParam + extend OpenAI::Union + discriminator :role # Developer-provided instructions that the model should follow, regardless of diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index 725b907d..eae25ae3 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Chat - # @abstract - class ChatCompletionModality < OpenAI::Enum + module ChatCompletionModality + extend OpenAI::Enum + TEXT = :text AUDIO = :audio diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index f9e93347..ed62e69d 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -30,12 +30,12 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 234d78a6..43666654 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Chat - # @abstract - # # The role of the author of a message - class ChatCompletionRole < OpenAI::Enum + module ChatCompletionRole + extend OpenAI::Enum + DEVELOPER = :developer SYSTEM = :system USER = :user diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index a0cc371d..ac99aca3 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -40,10 +40,10 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the system message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 28f7750e..273236d8 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Chat - # @abstract - # # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -14,19 +12,21 @@ module Chat # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - class ChatCompletionToolChoiceOption < OpenAI::Union + module ChatCompletionToolChoiceOption + extend OpenAI::Union + # `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools. variant enum: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific function. variant -> { OpenAI::Models::Chat::ChatCompletionNamedToolChoice } - # @abstract - # # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index ebbca6aa..fc5f1903 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -31,10 +31,10 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the tool message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 4a1eee01..89d87d20 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -39,10 +39,10 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the user message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 49e175c1..c326a2d8 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -417,14 +417,14 @@ class CompletionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI @@ -440,8 +440,6 @@ class Model < OpenAI::Union # end end - # @abstract - # # @deprecated # # Deprecated in favor of `tool_choice`. @@ -458,19 +456,21 @@ class Model < OpenAI::Union # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - class FunctionCall < OpenAI::Union + module FunctionCall + extend OpenAI::Union + # `none` means the model will not call a function and instead generates a message. `auto` means the model can pick between generating a message or calling a function. variant enum: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode } # Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. variant -> { OpenAI::Models::Chat::ChatCompletionFunctionCallOption } - # @abstract - # # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. - class FunctionCallMode < OpenAI::Enum + module FunctionCallMode + extend OpenAI::Enum + NONE = :none AUTO = :auto @@ -530,16 +530,15 @@ class Function < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - class Modality < OpenAI::Enum + module Modality + extend OpenAI::Enum + TEXT = :text AUDIO = :audio finalize! end - # @abstract - # # An object specifying the format that the model must output. # # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured @@ -550,7 +549,9 @@ class Modality < OpenAI::Enum # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormat < OpenAI::Union + module ResponseFormat + extend OpenAI::Union + # Default response format. Used to generate text responses. variant -> { OpenAI::Models::ResponseFormatText } @@ -571,8 +572,6 @@ class ResponseFormat < OpenAI::Union # end end - # @abstract - # # Specifies the latency tier to use for processing the request. This parameter is # relevant for customers subscribed to the scale tier service: # @@ -587,18 +586,20 @@ class ResponseFormat < OpenAI::Union # # When this parameter is set, the response body will include the `service_tier` # utilized. - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + AUTO = :auto DEFAULT = :default finalize! end - # @abstract - # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] variant String @@ -645,11 +646,11 @@ class WebSearchOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 81936268..ce2187d8 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -69,11 +69,11 @@ class CompletionListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 66af6ada..7d45da51 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -50,11 +50,11 @@ class MessageListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 8edd5e1a..695dcb50 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class ChatModel < OpenAI::Enum + module ChatModel + extend OpenAI::Enum + O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 17be219d..2b00e018 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -41,8 +41,6 @@ class ComparisonFilter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # # - `eq`: equals @@ -51,7 +49,9 @@ class ComparisonFilter < OpenAI::BaseModel # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + EQ = :eq NE = :ne GT = :gt @@ -62,11 +62,11 @@ class Type < OpenAI::Enum finalize! end - # @abstract - # # The value to compare against the attribute key; supports string, number, or # boolean types. - class Value < OpenAI::Union + module Value + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 6237b56e..df42c59b 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -37,13 +37,13 @@ class CompletionChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP = :stop LENGTH = :length CONTENT_FILTER = :content_filter diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index c1c44876..cbdc341b 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -228,14 +228,14 @@ class CompletionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + # @!group GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" @@ -259,15 +259,15 @@ class Model < OpenAI::Union # end end - # @abstract - # # The prompt(s) to generate completions for, encoded as a string, array of # strings, array of tokens, or array of token arrays. # # Note that <|endoftext|> is the document separator that the model sees during # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. - class Prompt < OpenAI::Union + module Prompt + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] IntegerArray = OpenAI::ArrayOf[Integer] @@ -289,11 +289,11 @@ class Prompt < OpenAI::Union # end end - # @abstract - # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] variant String diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 11452bfe..4223ab0d 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -26,11 +26,11 @@ class CompoundFilter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. - class Filter < OpenAI::Union + module Filter + extend OpenAI::Union + # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -43,10 +43,10 @@ class Filter < OpenAI::Union # end end - # @abstract - # # Type of operation: `and` or `or`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AND = :and OR = :or diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 4d126d18..e215c574 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -76,8 +76,6 @@ class EmbeddingCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for @@ -86,7 +84,9 @@ class EmbeddingCreateParams < OpenAI::BaseModel # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - class Input < OpenAI::Union + module Input + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] IntegerArray = OpenAI::ArrayOf[Integer] @@ -112,14 +112,14 @@ class Input < OpenAI::Union # end end - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. @@ -132,11 +132,11 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - class EncodingFormat < OpenAI::Enum + module EncodingFormat + extend OpenAI::Enum + FLOAT = :float BASE64 = :base64 diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index 65247fdf..0eab075d 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class EmbeddingModel < OpenAI::Enum + module EmbeddingModel + extend OpenAI::Enum + TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 24c4dd7e..9e917e35 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -2,10 +2,10 @@ module OpenAI module Models - # @abstract - # # The strategy used to chunk the file. - class FileChunkingStrategy < OpenAI::Union + module FileChunkingStrategy + extend OpenAI::Union + discriminator :type variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObject } diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index d9e6a634..d1943074 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -2,11 +2,11 @@ module OpenAI module Models - # @abstract - # # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. - class FileChunkingStrategyParam < OpenAI::Union + module FileChunkingStrategyParam + extend OpenAI::Union + discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 3a3c6b9b..ae6cf6ed 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -63,11 +63,11 @@ class FileListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index f0476f5d..a89db46f 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -99,12 +99,12 @@ class FileObject < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - class Purpose < OpenAI::Enum + module Purpose + extend OpenAI::Enum + ASSISTANTS = :assistants ASSISTANTS_OUTPUT = :assistants_output BATCH = :batch @@ -116,13 +116,13 @@ class Purpose < OpenAI::Enum finalize! end - # @abstract - # # @deprecated # # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + UPLOADED = :uploaded PROCESSED = :processed ERROR = :error diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 8b4f9af2..49c2717d 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -2,13 +2,13 @@ module OpenAI module Models - # @abstract - # # The intended purpose of the uploaded file. One of: - `assistants`: Used in the # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - class FilePurpose < OpenAI::Enum + module FilePurpose + extend OpenAI::Enum + ASSISTANTS = :assistants BATCH = :batch FINE_TUNE = :"fine-tune" diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 68876089..cdfbcc5f 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -272,11 +272,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -288,11 +288,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -304,11 +304,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -321,11 +321,11 @@ class NEpochs < OpenAI::Union end end - # @abstract - # # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING_FILES = :validating_files QUEUED = :queued RUNNING = :running @@ -459,11 +459,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -475,11 +475,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + variant const: :auto variant Float @@ -491,11 +491,11 @@ class Beta < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -507,11 +507,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -594,11 +594,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -610,11 +610,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -626,11 +626,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -644,10 +644,10 @@ class NEpochs < OpenAI::Union end end - # @abstract - # # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 0688b7a8..207444b5 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -69,10 +69,10 @@ class FineTuningJobEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The log level of the event. - class Level < OpenAI::Enum + module Level + extend OpenAI::Enum + INFO = :info WARN = :warn ERROR = :error @@ -80,10 +80,10 @@ class Level < OpenAI::Enum finalize! end - # @abstract - # # The type of event. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message METRICS = :metrics diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 6900ac01..c1cbd747 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -142,11 +142,11 @@ class JobCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - class Model < OpenAI::Union + module Model + extend OpenAI::Union + # @!group BABBAGE_002 = :"babbage-002" @@ -222,11 +222,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -238,11 +238,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -254,11 +254,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -471,11 +471,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -487,11 +487,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + variant const: :auto variant Float @@ -503,11 +503,11 @@ class Beta < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -519,11 +519,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -606,11 +606,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -622,11 +622,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -638,11 +638,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -656,10 +656,10 @@ class NEpochs < OpenAI::Union end end - # @abstract - # # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index d4546f63..ff2e6112 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -70,11 +70,11 @@ class ImageCreateVariationParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. @@ -87,23 +87,23 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL = :url B64_JSON = :b64_json finalize! end - # @abstract - # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 14c8b8cf..523b95ca 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -101,11 +101,11 @@ class ImageEditParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. @@ -118,23 +118,23 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL = :url B64_JSON = :b64_json finalize! end - # @abstract - # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 577665bd..a4993e78 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -104,10 +104,10 @@ class ImageGenerateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The model to use for image generation. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The model to use for image generation. @@ -120,36 +120,36 @@ class Model < OpenAI::Union # end end - # @abstract - # # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - class Quality < OpenAI::Enum + module Quality + extend OpenAI::Enum + STANDARD = :standard HD = :hd finalize! end - # @abstract - # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL = :url B64_JSON = :b64_json finalize! end - # @abstract - # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" @@ -159,13 +159,13 @@ class Size < OpenAI::Enum finalize! end - # @abstract - # # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - class Style < OpenAI::Enum + module Style + extend OpenAI::Enum + VIVID = :vivid NATURAL = :natural diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index e49e6699..ce36cc6d 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class ImageModel < OpenAI::Enum + module ImageModel + extend OpenAI::Enum + DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 0f3c5a90..2b0818e8 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -309,97 +309,110 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Harassment < OpenAI::Enum + module Harassment + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class HarassmentThreatening < OpenAI::Enum + module HarassmentThreatening + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class Hate < OpenAI::Enum + module Hate + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class HateThreatening < OpenAI::Enum + module HateThreatening + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class Illicit < OpenAI::Enum + module Illicit + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class IllicitViolent < OpenAI::Enum + module IllicitViolent + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class SelfHarm < OpenAI::Enum + module SelfHarm + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class SelfHarmInstruction < OpenAI::Enum + module SelfHarmInstruction + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class SelfHarmIntent < OpenAI::Enum + module SelfHarmIntent + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class Sexual < OpenAI::Enum + module Sexual + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class SexualMinor < OpenAI::Enum + module SexualMinor + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class Violence < OpenAI::Enum + module Violence + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class ViolenceGraphic < OpenAI::Enum + module ViolenceGraphic + extend OpenAI::Enum + TEXT = :text IMAGE = :image diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index e0789618..89acded2 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -36,11 +36,11 @@ class ModerationCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - class Input < OpenAI::Union + module Input + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] ModerationMultiModalInputArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] @@ -61,13 +61,13 @@ class Input < OpenAI::Union # end end - # @abstract - # # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The content moderation model you would like to use. Learn more in diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index 4089ad86..aad66a00 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class ModerationModel < OpenAI::Enum + module ModerationModel + extend OpenAI::Enum + OMNI_MODERATION_LATEST = :"omni-moderation-latest" OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST = :"text-moderation-latest" diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index 47271a66..c6441173 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -2,10 +2,10 @@ module OpenAI module Models - # @abstract - # # An object describing an image to classify. - class ModerationMultiModalInput < OpenAI::Union + module ModerationMultiModalInput + extend OpenAI::Union + discriminator :type # An object describing an image to classify. diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index a4ec26de..cfe04696 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -37,14 +37,14 @@ class Reasoning < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - class GenerateSummary < OpenAI::Enum + module GenerateSummary + extend OpenAI::Enum + CONCISE = :concise DETAILED = :detailed diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index f1fee21d..e27540e1 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -2,15 +2,15 @@ module OpenAI module Models - # @abstract - # # **o-series models only** # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - class ReasoningEffort < OpenAI::Enum + module ReasoningEffort + extend OpenAI::Enum + LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 65a0097a..1359937f 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -41,10 +41,10 @@ class ComputerTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of computer environment to control. - class Environment < OpenAI::Enum + module Environment + extend OpenAI::Enum + MAC = :mac WINDOWS = :windows UBUNTU = :ubuntu diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index e4effc89..bd57a3e2 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -43,11 +43,11 @@ class EasyInputMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + # A text input to the model. variant String @@ -62,11 +62,11 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant SYSTEM = :system @@ -75,10 +75,10 @@ class Role < OpenAI::Enum finalize! end - # @abstract - # # The type of the message input. Always `message`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message finalize! diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index c2ba4177..28ded43b 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -62,10 +62,10 @@ class FileSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A filter to apply based on file attributes. - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -112,10 +112,10 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ranker to use for the file search. - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index e0cb2854..adee6e5b 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -63,13 +63,13 @@ class InputItemListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 8b979829..71264a0f 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -289,10 +289,10 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason why the response is incomplete. - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter @@ -300,12 +300,12 @@ class Reason < OpenAI::Enum end end - # @abstract - # # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -330,8 +330,6 @@ class ToolChoice < OpenAI::Union # end end - # @abstract - # # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -339,7 +337,9 @@ class ToolChoice < OpenAI::Union # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index e93e62cf..bf04793b 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -48,10 +48,10 @@ class ResponseCodeInterpreterToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The output of a code interpreter tool call that is text. - class Result < OpenAI::Union + module Result + extend OpenAI::Union + discriminator :type # The output of a code interpreter tool call that is text. @@ -137,10 +137,10 @@ class File < OpenAI::BaseModel # end end - # @abstract - # # The status of the code interpreter tool call. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress INTERPRETING = :interpreting COMPLETED = :completed diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 20bd85ae..07da24be 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -58,10 +58,10 @@ class ResponseComputerToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A click action. - class Action < OpenAI::Union + module Action + extend OpenAI::Union + discriminator :type # A click action. @@ -130,11 +130,11 @@ class Click < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - class Button < OpenAI::Enum + module Button + extend OpenAI::Enum + LEFT = :left RIGHT = :right WHEEL = :wheel @@ -438,11 +438,11 @@ class PendingSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -450,10 +450,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The type of the computer call. Always `computer_call`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + COMPUTER_CALL = :computer_call finalize! diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 252be325..33e5105d 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -94,11 +94,11 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 72456bac..ad933f5d 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # Multi-modal input and output contents. - class ResponseContent < OpenAI::Union + module ResponseContent + extend OpenAI::Union + # A text input to the model. variant -> { OpenAI::Models::Responses::ResponseInputText } diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 1fa8be8b..a1fb23a0 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -47,10 +47,10 @@ class ResponseContentPartAddedEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The content part that was added. - class Part < OpenAI::Union + module Part + extend OpenAI::Union + discriminator :type # A text output from the model. diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 0911d697..968c86e1 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -47,10 +47,10 @@ class ResponseContentPartDoneEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The content part that is done. - class Part < OpenAI::Union + module Part + extend OpenAI::Union + discriminator :type # A text output from the model. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 05ef2120..ebca56e3 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -242,8 +242,6 @@ class ResponseCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -253,7 +251,9 @@ class ResponseCreateParams < OpenAI::BaseModel # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - class Input < OpenAI::Union + module Input + extend OpenAI::Union + # A text input to the model, equivalent to a text input with the # `user` role. variant String @@ -269,12 +269,12 @@ class Input < OpenAI::Union # end end - # @abstract - # # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -299,8 +299,6 @@ class ToolChoice < OpenAI::Union # end end - # @abstract - # # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -308,7 +306,9 @@ class ToolChoice < OpenAI::Union # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 47dd8b4b..21b8b11b 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -26,10 +26,10 @@ class ResponseError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The error code for the response. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 8054ec60..11c8e566 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -52,11 +52,11 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed @@ -130,8 +130,9 @@ class Result < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 6749d6bc..81d6ac10 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Responses - # @abstract - # # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -18,7 +16,9 @@ module Responses # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormatTextConfig < OpenAI::Union + module ResponseFormatTextConfig + extend OpenAI::Union + discriminator :type # Default response format. Used to generate text responses. diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index b2b500c1..2cdffc5f 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -65,11 +65,11 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 503dee7b..283f9afa 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -50,11 +50,11 @@ class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 8afd6c8b..fbcf112a 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -35,10 +35,10 @@ class ResponseFunctionWebSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the web search tool call. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 2d318473..7eb0bd14 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Responses - # @abstract - # # Specify additional output data to include in the model response. Currently # supported values are: # @@ -13,7 +11,9 @@ module Responses # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - class ResponseIncludable < OpenAI::Enum + module ResponseIncludable + extend OpenAI::Enum + FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 24201588..23539df8 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -33,10 +33,10 @@ class ResponseInputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The format of the audio data. Currently supported formats are `mp3` and `wav`. - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + MP3 = :mp3 WAV = :wav diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 8d6226d4..fd4e014d 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # A text input to the model. - class ResponseInputContent < OpenAI::Union + module ResponseInputContent + extend OpenAI::Union + discriminator :type # A text input to the model. diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index e9334fd0..e6d2c0ce 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -43,11 +43,11 @@ class ResponseInputImage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + HIGH = :high LOW = :low AUTO = :auto diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index de2477cf..adb90a0a 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -3,14 +3,14 @@ module OpenAI module Models module Responses - # @abstract - # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - class ResponseInputItem < OpenAI::Union + module ResponseInputItem + extend OpenAI::Union + discriminator :type # A message input to the model with a role indicating instruction following @@ -106,10 +106,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user SYSTEM = :system DEVELOPER = :developer @@ -117,11 +117,11 @@ class Role < OpenAI::Enum finalize! end - # @abstract - # # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -129,10 +129,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message finalize! @@ -236,11 +236,11 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -303,11 +303,11 @@ class FunctionCallOutput < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 2a9dd999..4e29d6dd 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -55,10 +55,10 @@ class ResponseInputMessageItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user SYSTEM = :system DEVELOPER = :developer @@ -66,11 +66,11 @@ class Role < OpenAI::Enum finalize! end - # @abstract - # # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -78,10 +78,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message finalize! diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index b3f4f86c..baeff0e5 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # Content item used to generate a response. - class ResponseItem < OpenAI::Union + module ResponseItem + extend OpenAI::Union + discriminator :type variant :message, -> { OpenAI::Models::Responses::ResponseInputMessageItem } diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 2ed933fd..9bdb2a25 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # An output message from the model. - class ResponseOutputItem < OpenAI::Union + module ResponseOutputItem + extend OpenAI::Union + discriminator :type # An output message from the model. diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 1dee0300..d9ab6443 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -49,10 +49,10 @@ class ResponseOutputMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A text output from the model. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + discriminator :type # A text output from the model. @@ -68,11 +68,11 @@ class Content < OpenAI::Union # end end - # @abstract - # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 175d5eda..a5027854 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -34,10 +34,10 @@ class ResponseOutputText < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A citation to a file. - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + discriminator :type # A citation to a file. diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index c4d60d3f..4c410d54 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -68,11 +68,11 @@ class Summary < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index e15a7a6e..3ec9d99f 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Responses - # @abstract - # # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - class ResponseStatus < OpenAI::Enum + module ResponseStatus + extend OpenAI::Enum + COMPLETED = :completed FAILED = :failed IN_PROGRESS = :in_progress diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 85fe7a25..73a45396 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # Emitted when there is a partial audio response. - class ResponseStreamEvent < OpenAI::Union + module ResponseStreamEvent + extend OpenAI::Union + discriminator :type # Emitted when there is a partial audio response. diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 4d980266..a8dbcfba 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -65,10 +65,10 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A citation to a file. - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + discriminator :type # A citation to a file. diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 9093989e..c3d097ce 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - # @abstract - # # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type # A tool that searches for relevant content from uploaded files. diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index e72582b9..9ec1a734 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Responses - # @abstract - # # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -13,7 +11,9 @@ module Responses # more tools. # # `required` means the model must call one or more tools. - class ToolChoiceOptions < OpenAI::Enum + module ToolChoiceOptions + extend OpenAI::Enum + NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 3398854c..9d8e05e2 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -27,8 +27,6 @@ class ToolChoiceTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # @@ -37,7 +35,9 @@ class ToolChoiceTypes < OpenAI::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FILE_SEARCH = :file_search WEB_SEARCH_PREVIEW = :web_search_preview COMPUTER_USE_PREVIEW = :computer_use_preview diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 6a2acdc0..9341e708 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -42,24 +42,24 @@ class WebSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of the web search tool. One of: # # - `web_search_preview` # - `web_search_preview_2025_03_11` - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 finalize! end - # @abstract - # # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index d96fdbc5..c80cbfbd 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -2,16 +2,18 @@ module OpenAI module Models - # @abstract - class ResponsesModel < OpenAI::Union + module ResponsesModel + extend OpenAI::Union + variant String variant enum: -> { OpenAI::Models::ChatModel } variant enum: -> { OpenAI::Models::ResponsesModel::UnionMember2 } - # @abstract - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 0e6b2be5..507858a6 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -76,10 +76,10 @@ class Upload < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the Upload. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + PENDING = :pending COMPLETED = :completed CANCELLED = :cancelled diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 7593bb95..3c74e1d8 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -157,12 +157,12 @@ class FileCounts < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + EXPIRED = :expired IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index ee2a79cf..5effb4ea 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -66,11 +66,11 @@ class VectorStoreListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 30d37581..ea877db0 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -76,10 +76,10 @@ class VectorStoreSearchParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A query string for a search - class Query < OpenAI::Union + module Query + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] variant String @@ -93,10 +93,10 @@ class Query < OpenAI::Union # end end - # @abstract - # # A filter to apply based on file attributes. - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -139,8 +139,9 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 233cdad0..ebb55560 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -50,8 +50,9 @@ class VectorStoreSearchResponse < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float @@ -86,10 +87,10 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of content. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT = :text finalize! diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index e35057be..ad9ef46e 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -49,8 +49,9 @@ class FileBatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 40629c3c..20469fe9 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -84,10 +84,10 @@ class FileBatchListFilesParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed @@ -96,11 +96,11 @@ class Filter < OpenAI::Enum finalize! end - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 3e7c7817..58fbaa08 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -49,8 +49,9 @@ class FileCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index c227d297..2f7ffbbc 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -78,10 +78,10 @@ class FileListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed @@ -90,11 +90,11 @@ class Filter < OpenAI::Enum finalize! end - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 6b9100d4..c88b9a73 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -34,8 +34,9 @@ class FileUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 5aa034c1..c3d22aef 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -129,10 +129,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error UNSUPPORTED_FILE = :unsupported_file INVALID_FILE = :invalid_file @@ -141,12 +141,12 @@ class Code < OpenAI::Enum end end - # @abstract - # # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled @@ -155,8 +155,9 @@ class Status < OpenAI::Enum finalize! end - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index da82a1f6..a8628b40 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -101,11 +101,11 @@ class FileCounts < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 3312ef84..2fead0cf 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -170,132 +170,116 @@ module OpenAI # # We can therefore convert string values to Symbols, but can't convert other # values safely. - class Enum - extend OpenAI::Converter - - abstract! - - Value = type_template(:out) + module Enum + include OpenAI::Converter - class << self - # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[Value]) } - def values - end + # All of the valid Symbol values for this enum. + sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } + def values + end - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize! - end + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + sig { void } + private def finalize! end sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other) + def ===(other) end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other) + def ==(other) end - class << self - # @api private - sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def coerce(value) - end + # @api private + sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def coerce(value) + end - # @api private - sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value) - end + # @api private + sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def dump(value) + end - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end + # @api private + sig do + override + .params(value: T.anything) + .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) + end + def try_strict_coerce(value) end end # @api private - class Union - extend OpenAI::Converter - - abstract! - - Variants = type_template(:out) + module Union + include OpenAI::Converter - class << self - # @api private - # - # All of the specified variant info for this union. - sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(Variants)]]) } - private def known_variants - end + # @api private + # + # All of the specified variant info for this union. + sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Converter::Input)]]) } + private def known_variants + end - # @api private - sig { returns(T::Array[[T.nilable(Symbol), Variants]]) } - protected def derefed_variants - end + # @api private + sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + protected def derefed_variants + end - # All of the specified variants for this union. - sig { overridable.returns(T::Array[Variants]) } - def variants - end + # All of the specified variants for this union. + sig { overridable.returns(T::Array[T.anything]) } + def variants + end - # @api private - sig { params(property: Symbol).void } - private def discriminator(property) - end + # @api private + sig { params(property: Symbol).void } + private def discriminator(property) + end - # @api private - sig do - params( - key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants), - spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants) - ) - .void - end - private def variant(key, spec = nil) - end + # @api private + sig do + params( + key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything), + spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything) + ) + .void + end + private def variant(key, spec = nil) + end - # @api private - sig { params(value: T.anything).returns(T.nilable(Variants)) } - private def resolve_variant(value) - end + # @api private + sig { params(value: T.anything).returns(T.nilable(T.anything)) } + private def resolve_variant(value) end sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other) + def ===(other) end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other) + def ==(other) end - class << self - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def coerce(value) - end + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def coerce(value) + end - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end + # @api private + sig do + override + .params(value: T.anything) + .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) + end + def try_strict_coerce(value) end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 75d67db0..5fb326c8 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -2,20 +2,27 @@ module OpenAI module Models - class AllModels < OpenAI::Union - abstract! + module AllModels + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = + type_template(:out) do + { + fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::UnionMember2::OrSymbol) + } + end - class UnionMember2 < OpenAI::Enum - abstract! + module UnionMember2 + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::UnionMember2) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::UnionMember2::TaggedSymbol) } - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::UnionMember2::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index e74cec3d..c7babe62 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -18,11 +18,14 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) + end def model=(_) end @@ -30,11 +33,14 @@ module OpenAI # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) } def voice end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + .returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + end def voice=(_) end @@ -50,11 +56,14 @@ module OpenAI # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + .returns(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + end def response_format=(_) end @@ -71,10 +80,10 @@ module OpenAI sig do params( input: String, - model: T.any(String, Symbol), - voice: Symbol, + model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), + voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, instructions: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -88,10 +97,10 @@ module OpenAI .returns( { input: String, - model: T.any(String, Symbol), - voice: Symbol, + model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), + voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, instructions: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, request_options: OpenAI::RequestOptions } @@ -102,45 +111,48 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)} } end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - class Voice < OpenAI::Enum - abstract! + module Voice + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } - ALLOY = :alloy - ASH = :ash - CORAL = :coral - ECHO = :echo - FABLE = :fable - ONYX = :onyx - NOVA = :nova - SAGE = :sage - SHIMMER = :shimmer + ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ONYX = T.let(:onyx, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - class ResponseFormat < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - MP3 = :mp3 - OPUS = :opus - AAC = :aac - FLAC = :flac - WAV = :wav - PCM = :pcm + module ResponseFormat + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } + + MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + AAC = T.let(:aac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + FLAC = T.let(:flac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 5228e000..d375fcda 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -3,14 +3,15 @@ module OpenAI module Models module Audio - class SpeechModel < OpenAI::Enum - abstract! + module SpeechModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } - TTS_1 = :"tts-1" - TTS_1_HD = :"tts-1-hd" - GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" + TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::OrSymbol) + TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::OrSymbol) + GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 026f8b77..272b6fc5 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -20,11 +20,14 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + end def model=(_) end @@ -33,11 +36,14 @@ module OpenAI # model's confidence in the transcription. `logprobs` only works with # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) + end def include=(_) end @@ -67,11 +73,14 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::AudioResponseFormat::OrSymbol)) } def response_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::AudioResponseFormat::OrSymbol) + .returns(OpenAI::Models::AudioResponseFormat::OrSymbol) + end def response_format=(_) end @@ -93,24 +102,31 @@ module OpenAI # Either or both of these options are supported: `word`, or `segment`. Note: There # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. - sig { returns(T.nilable(T::Array[Symbol])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + ) + end def timestamp_granularities end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + end def timestamp_granularities=(_) end sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -133,13 +149,13 @@ module OpenAI .returns( { file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], request_options: OpenAI::RequestOptions } ) @@ -150,19 +166,23 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } end - class TimestampGranularity < OpenAI::Enum - abstract! + module TimestampGranularity + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) } - WORD = :word - SEGMENT = :segment + WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) + SEGMENT = + T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index 814a204b..e6190f8a 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -5,8 +5,8 @@ module OpenAI module Audio # Represents a transcription response returned by model, based on the provided # input. - class TranscriptionCreateResponse < OpenAI::Union - abstract! + module TranscriptionCreateResponse + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 7a60b02a..95c1bbab 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -3,12 +3,13 @@ module OpenAI module Models module Audio - class TranscriptionInclude < OpenAI::Enum - abstract! + module TranscriptionInclude + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } - LOGPROBS = :logprobs + LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 4c8cc6bc..4ca09f44 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -7,8 +7,8 @@ module OpenAI # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. - class TranscriptionStreamEvent < OpenAI::Union - abstract! + module TranscriptionStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index ce2e6e77..3e9eeb48 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -19,11 +19,14 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + end def model=(_) end @@ -41,11 +44,14 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + .returns(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + end def response_format=(_) end @@ -65,9 +71,9 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -81,9 +87,9 @@ module OpenAI .returns( { file: T.any(IO, StringIO), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: OpenAI::RequestOptions } @@ -94,24 +100,28 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } end # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - class ResponseFormat < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - JSON = :json - TEXT = :text - SRT = :srt - VERBOSE_JSON = :verbose_json - VTT = :vtt + module ResponseFormat + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } + + JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + SRT = T.let(:srt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + VERBOSE_JSON = + T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 61e32cae..2d27d701 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -3,8 +3,8 @@ module OpenAI module Models module Audio - class TranslationCreateResponse < OpenAI::Union - abstract! + module TranslationCreateResponse + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 917ce7d8..c17c0811 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -2,14 +2,15 @@ module OpenAI module Models - class AudioModel < OpenAI::Enum - abstract! + module AudioModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioModel::TaggedSymbol) } - WHISPER_1 = :"whisper-1" - GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" - GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" + WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::OrSymbol) + GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::OrSymbol) + GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 405da3e2..fc0693da 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -5,16 +5,17 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - class AudioResponseFormat < OpenAI::Enum - abstract! + module AudioResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } - JSON = :json - TEXT = :text - SRT = :srt - VERBOSE_JSON = :verbose_json - VTT = :vtt + JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::OrSymbol) + TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::OrSymbol) + SRT = T.let(:srt, OpenAI::Models::AudioResponseFormat::OrSymbol) + VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::OrSymbol) + VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 7a7e54b3..75e90393 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -57,11 +57,13 @@ module OpenAI end # The current status of the batch. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Batch::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Batch::Status::TaggedSymbol).returns(OpenAI::Models::Batch::Status::TaggedSymbol) + end def status=(_) end @@ -193,7 +195,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: Symbol, + status: OpenAI::Models::Batch::Status::TaggedSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -245,7 +247,7 @@ module OpenAI endpoint: String, input_file_id: String, object: Symbol, - status: Symbol, + status: OpenAI::Models::Batch::Status::TaggedSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -266,19 +268,20 @@ module OpenAI end # The current status of the batch. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - VALIDATING = :validating - FAILED = :failed - IN_PROGRESS = :in_progress - FINALIZING = :finalizing - COMPLETED = :completed - EXPIRED = :expired - CANCELLING = :cancelling - CANCELLED = :cancelled + module Status + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Batch::Status::TaggedSymbol) } + + VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Batch::Status::TaggedSymbol) + FINALIZING = T.let(:finalizing, OpenAI::Models::Batch::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Batch::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Batch::Status::TaggedSymbol) + CANCELLING = T.let(:cancelling, OpenAI::Models::Batch::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) end class Errors < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 50216257..62413988 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -8,11 +8,14 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) } def completion_window end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) + .returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) + end def completion_window=(_) end @@ -20,11 +23,14 @@ module OpenAI # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) } def endpoint end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + .returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + end def endpoint=(_) end @@ -61,8 +67,8 @@ module OpenAI sig do params( - completion_window: Symbol, - endpoint: Symbol, + completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -76,8 +82,8 @@ module OpenAI override .returns( { - completion_window: Symbol, - endpoint: Symbol, + completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions @@ -89,27 +95,31 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. - class CompletionWindow < OpenAI::Enum - abstract! + module CompletionWindow + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } - NUMBER_24H = :"24h" + NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) end # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - class Endpoint < OpenAI::Enum - abstract! + module Endpoint + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } - V1_RESPONSES = :"/v1/responses" - V1_CHAT_COMPLETIONS = :"/v1/chat/completions" - V1_EMBEDDINGS = :"/v1/embeddings" - V1_COMPLETIONS = :"/v1/completions" + V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_CHAT_COMPLETIONS = + T.let(:"/v1/chat/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 32cf935a..f7f3eec4 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -12,11 +12,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + end def model=(_) end @@ -68,11 +71,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -219,12 +225,12 @@ module OpenAI sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -267,12 +273,12 @@ module OpenAI override .returns( { - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -303,10 +309,10 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index bb11fb1b..7d2b0fef 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -43,11 +43,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + end def order=(_) end @@ -56,7 +59,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -71,7 +74,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -81,13 +84,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index db97be12..d0c034e2 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -23,8 +23,8 @@ module OpenAI # the message content may be partially cut off if `finish_reason="length"`, which # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. - class AssistantResponseFormatOption < OpenAI::Union - abstract! + module AssistantResponseFormatOption + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 7a366c59..31e96276 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -23,8 +23,8 @@ module OpenAI # gracefully in your code. See the # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. - class AssistantStreamEvent < OpenAI::Union - abstract! + module AssistantStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 313f24bc..6efa6193 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -3,8 +3,8 @@ module OpenAI module Models module Beta - class AssistantTool < OpenAI::Union - abstract! + module AssistantTool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 84562850..9a1ac99e 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -5,11 +5,14 @@ module OpenAI module Beta class AssistantToolChoice < OpenAI::BaseModel # The type of the tool. If type is `function`, the function name must be set - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + .returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + end def type=(_) end @@ -27,24 +30,37 @@ module OpenAI # Specifies a tool the model should use. Use to force the model to call a specific # tool. sig do - params(type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction).returns(T.attached_class) + params( + type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, + function: OpenAI::Models::Beta::AssistantToolChoiceFunction + ) + .returns(T.attached_class) end def self.new(type:, function: nil) end - sig { override.returns({type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, + function: OpenAI::Models::Beta::AssistantToolChoiceFunction + } + ) + end def to_hash end # The type of the tool. If type is `function`, the function name must be set - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } - FUNCTION = :function - CODE_INTERPRETER = :code_interpreter - FILE_SEARCH = :file_search + FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + CODE_INTERPRETER = T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 552ca737..14fac828 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -10,23 +10,33 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - class AssistantToolChoiceOption < OpenAI::Union - abstract! + module AssistantToolChoiceOption + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)} } + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + } + end # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. - class Auto < OpenAI::Enum - abstract! + module Auto + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } - NONE = :none - AUTO = :auto - REQUIRED = :required + NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index b4ca656b..595f430c 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -45,11 +45,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol))) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) + .returns(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) + end def model=(_) end @@ -68,11 +71,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -222,9 +228,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -270,9 +276,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -303,41 +309,62 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! - - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" + module Model + extend OpenAI::Union + + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)} } + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + + O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O3_MINI_2025_01_31 = + T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O1 = T.let(:o1, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_2024_11_20 = + T.let(:"gpt-4o-2024-11-20", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_2024_08_06 = + T.let(:"gpt-4o-2024-08-06", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_2024_05_13 = + T.let(:"gpt-4o-2024-05-13", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_MINI_2024_07_18 = + T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_5_PREVIEW = T.let(:"gpt-4.5-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_5_PREVIEW_2025_02_27 = + T.let(:"gpt-4.5-preview-2025-02-27", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_TURBO_2024_04_09 = + T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_0125_PREVIEW = + T.let(:"gpt-4-0125-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_TURBO_PREVIEW = + T.let(:"gpt-4-turbo-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_1106_PREVIEW = + T.let(:"gpt-4-1106-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_VISION_PREVIEW = + T.let(:"gpt-4-vision-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_16K = + T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_0613 = + T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_1106 = + T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_0125 = + T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_16K_0613 = + T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index ce4b782a..310aab2b 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -103,11 +103,14 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol)) } def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + .returns(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + end def ranker=(_) end @@ -117,23 +120,44 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { params(score_threshold: Float, ranker: Symbol).returns(T.attached_class) } + sig do + params( + score_threshold: Float, + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ) + .returns(T.attached_class) + end def self.new(score_threshold:, ranker: nil) end - sig { override.returns({score_threshold: Float, ranker: Symbol}) } + sig do + override + .returns( + { + score_threshold: Float, + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + } + ) + end def to_hash end # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - AUTO = :auto - DEFAULT_2024_08_21 = :default_2024_08_21 + module Ranker + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) } + + AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + DEFAULT_2024_08_21 = + T.let( + :default_2024_08_21, + OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ) end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 97821d4a..495664c0 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -6,8 +6,8 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - class MessageStreamEvent < OpenAI::Union - abstract! + module MessageStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 6b1ae66d..51fa7db0 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -6,8 +6,8 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - class RunStepStreamEvent < OpenAI::Union - abstract! + module RunStepStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 5642aefb..e18dd1f2 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - class RunStreamEvent < OpenAI::Union - abstract! + module RunStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 3a5a42f8..b20ddd1b 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -72,11 +72,14 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + end def model=(_) end @@ -182,13 +185,36 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) - .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + params( + _: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + .returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) end def tool_choice=(_) end @@ -286,7 +312,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -298,7 +324,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -344,7 +375,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -356,7 +387,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -380,10 +416,10 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end class Thread < OpenAI::BaseModel @@ -509,11 +545,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + end def role=(_) end @@ -557,7 +596,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -580,7 +619,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -590,8 +629,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -619,13 +658,17 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -707,8 +750,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -1186,8 +1229,8 @@ module OpenAI end end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -1206,11 +1249,14 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + end def type=(_) end @@ -1226,11 +1272,25 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } + sig do + params( + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ) + .returns(T.attached_class) + end def self.new(type:, last_messages: nil) end - sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end def to_hash end @@ -1238,13 +1298,17 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) } - AUTO = :auto - LAST_MESSAGES = :last_messages + AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + LAST_MESSAGES = + T.let(:last_messages, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 77085982..e2c429ba 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -129,11 +129,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + .returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + end def role=(_) end @@ -175,7 +178,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -198,7 +201,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -208,8 +211,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -237,13 +240,15 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -325,8 +330,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 1351f46f..74967cee 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -7,8 +7,8 @@ module OpenAI # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class Annotation < OpenAI::Union - abstract! + module Annotation + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index a388880d..90db2a3c 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -7,8 +7,8 @@ module OpenAI # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class AnnotationDelta < OpenAI::Union - abstract! + module AnnotationDelta + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 879b0320..61ad33c5 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -18,32 +18,39 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + end def detail=(_) end - sig { params(file_id: String, detail: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + .returns(T.attached_class) + end def self.new(file_id:, detail: nil) end - sig { override.returns({file_id: String, detail: Symbol}) } + sig { override.returns({file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol}) } def to_hash end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 89c10a01..efac1cbe 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -7,11 +7,14 @@ module OpenAI class ImageFileDelta < OpenAI::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + end def detail=(_) end @@ -26,24 +29,32 @@ module OpenAI def file_id=(_) end - sig { params(detail: Symbol, file_id: String).returns(T.attached_class) } + sig do + params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String) + .returns(T.attached_class) + end def self.new(detail: nil, file_id: nil) end - sig { override.returns({detail: Symbol, file_id: String}) } + sig do + override + .returns({detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String}) + end def to_hash end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index e4aa98eb..032d894d 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -17,32 +17,39 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + end def detail=(_) end - sig { params(url: String, detail: Symbol).returns(T.attached_class) } + sig do + params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + .returns(T.attached_class) + end def self.new(url:, detail: nil) end - sig { override.returns({url: String, detail: Symbol}) } + sig { override.returns({url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol}) } def to_hash end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index a300a59e..6e8e3245 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -7,11 +7,14 @@ module OpenAI class ImageURLDelta < OpenAI::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + end def detail=(_) end @@ -25,24 +28,31 @@ module OpenAI def url=(_) end - sig { params(detail: Symbol, url: String).returns(T.attached_class) } + sig do + params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String) + .returns(T.attached_class) + end def self.new(detail: nil, url: nil) end - sig { override.returns({detail: Symbol, url: String}) } + sig do + override.returns({detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String}) + end def to_hash end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 01311c44..96603146 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -141,11 +141,14 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + end def role=(_) end @@ -162,11 +165,14 @@ module OpenAI # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + end def status=(_) end @@ -200,9 +206,9 @@ module OpenAI incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), metadata: T.nilable(T::Hash[Symbol, String]), - role: Symbol, + role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, run_id: T.nilable(String), - status: Symbol, + status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, thread_id: String, object: Symbol ) @@ -247,9 +253,9 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, - role: Symbol, + role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, run_id: T.nilable(String), - status: Symbol, + status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, thread_id: String } ) @@ -336,8 +342,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -372,57 +378,75 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason the message is incomplete. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } def reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + end def reason=(_) end # On an incomplete message, details about why the message is incomplete. - sig { params(reason: Symbol).returns(T.attached_class) } + sig do + params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + .returns(T.attached_class) + end def self.new(reason:) end - sig { override.returns({reason: Symbol}) } + sig do + override.returns({reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol}) + end def to_hash end # The reason the message is incomplete. - class Reason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - CONTENT_FILTER = :content_filter - MAX_TOKENS = :max_tokens - RUN_CANCELLED = :run_cancelled - RUN_EXPIRED = :run_expired - RUN_FAILED = :run_failed + module Reason + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + MAX_TOKENS = + T.let(:max_tokens, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + RUN_CANCELLED = + T.let(:run_cancelled, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + RUN_EXPIRED = + T.let(:run_expired, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + RUN_FAILED = + T.let(:run_failed, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) end end # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) end # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - INCOMPLETE = :incomplete - COMPLETED = :completed + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 9e3c7f22..97ed19fe 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContent < OpenAI::Union - abstract! + module MessageContent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index dce4489b..ec1c6ccc 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentDelta < OpenAI::Union - abstract! + module MessageContentDelta + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index e4fb311c..effc6e6c 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentPartParam < OpenAI::Union - abstract! + module MessageContentPartParam + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 6d8913ff..9c0c17d5 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -61,11 +61,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + end def role=(_) end @@ -107,7 +110,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -131,7 +134,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions @@ -142,8 +145,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -171,13 +174,15 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -259,8 +264,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 7883727c..b7bfc61c 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -49,11 +49,14 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol)) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + end def role=(_) end @@ -68,7 +71,7 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: Symbol + role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol ) .returns(T.attached_class) end @@ -87,7 +90,7 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: Symbol + role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol } ) end @@ -95,13 +98,15 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 4b93b0c8..d80e8d49 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -44,11 +44,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + end def order=(_) end @@ -66,7 +69,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -82,7 +85,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, request_options: OpenAI::RequestOptions } @@ -93,13 +96,15 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 0ea43a0c..d1587a17 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -256,11 +256,14 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + end def status=(_) end @@ -281,13 +284,36 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) - .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + params( + _: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + .returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) end def tool_choice=(_) end @@ -405,9 +431,14 @@ module OpenAI ) ), started_at: T.nilable(Integer), - status: Symbol, + status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, thread_id: String, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -484,9 +515,14 @@ module OpenAI ) ), started_at: T.nilable(Integer), - status: Symbol, + status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, thread_id: String, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -507,43 +543,57 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } def reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + end def reason=(_) end # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. - sig { params(reason: Symbol).returns(T.attached_class) } + sig do + params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + .returns(T.attached_class) + end def self.new(reason: nil) end - sig { override.returns({reason: Symbol}) } + sig { override.returns({reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol}) } def to_hash end # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - class Reason < OpenAI::Enum - abstract! + module Reason + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } - MAX_COMPLETION_TOKENS = :max_completion_tokens - MAX_PROMPT_TOKENS = :max_prompt_tokens + MAX_COMPLETION_TOKENS = + T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + MAX_PROMPT_TOKENS = + T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) end end class LastError < OpenAI::BaseModel # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + end def code=(_) end @@ -557,23 +607,33 @@ module OpenAI end # The last error associated with this run. Will be `null` if there are no errors. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig do + override + .returns({code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String}) + end def to_hash end # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - class Code < OpenAI::Enum - abstract! + module Code + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } - SERVER_ERROR = :server_error - RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - INVALID_PROMPT = :invalid_prompt + SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + RATE_LIMIT_EXCEEDED = + T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + INVALID_PROMPT = + T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) end end @@ -652,11 +712,14 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + end def type=(_) end @@ -672,11 +735,25 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } + sig do + params( + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + ) + .returns(T.attached_class) + end def self.new(type:, last_messages: nil) end - sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + } + ) + end def to_hash end @@ -684,13 +761,17 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } - AUTO = :auto - LAST_MESSAGES = :last_messages + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + LAST_MESSAGES = + T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index a54ec011..46e8ddc5 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -26,11 +26,14 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + end def include=(_) end @@ -112,11 +115,14 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + end def model=(_) end @@ -137,11 +143,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -223,13 +232,36 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) - .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + params( + _: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + .returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) end def tool_choice=(_) end @@ -308,16 +340,16 @@ module OpenAI sig do params( assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -327,7 +359,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -370,16 +407,16 @@ module OpenAI .returns( { assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -389,7 +426,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -462,11 +504,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + end def role=(_) end @@ -516,7 +561,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -539,7 +584,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -549,8 +594,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -578,13 +623,17 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -666,8 +715,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -705,10 +754,10 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end class TruncationStrategy < OpenAI::BaseModel @@ -716,11 +765,14 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + end def type=(_) end @@ -736,11 +788,25 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } + sig do + params( + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ) + .returns(T.attached_class) + end def self.new(type:, last_messages: nil) end - sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end def to_hash end @@ -748,13 +814,17 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) } - AUTO = :auto - LAST_MESSAGES = :last_messages + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + LAST_MESSAGES = + T.let(:last_messages, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 44ca10d2..39cc0ab8 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -44,11 +44,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + end def order=(_) end @@ -57,7 +60,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -72,7 +75,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -82,13 +85,15 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index 76ada3f1..a6246d8c 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -7,20 +7,21 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - class RunStatus < OpenAI::Enum - abstract! + module RunStatus + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } - QUEUED = :queued - IN_PROGRESS = :in_progress - REQUIRES_ACTION = :requires_action - CANCELLING = :cancelling - CANCELLED = :cancelled - FAILED = :failed - COMPLETED = :completed - INCOMPLETE = :incomplete - EXPIRED = :expired + QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + REQUIRES_ACTION = T.let(:requires_action, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + CANCELLING = T.let(:cancelling, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 2b204c7c..749ba465 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -143,8 +143,8 @@ module OpenAI end # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union - abstract! + module Output + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 6e716d13..e369160c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -156,8 +156,8 @@ module OpenAI end # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union - abstract! + module Output + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index b4f818ee..13b22ad3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -108,11 +108,22 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig { returns(Symbol) } + sig do + returns( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params( + _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + .returns( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end def ranker=(_) end @@ -127,23 +138,53 @@ module OpenAI end # The ranking options for the file search. - sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } + sig do + params( + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + score_threshold: Float + ) + .returns(T.attached_class) + end def self.new(ranker:, score_threshold:) end - sig { override.returns({ranker: Symbol, score_threshold: Float}) } + sig do + override + .returns( + { + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + score_threshold: Float + } + ) + end def to_hash end # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - AUTO = :auto - DEFAULT_2024_08_21 = :default_2024_08_21 + module Ranker + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end + + AUTO = + T.let( + :auto, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + DEFAULT_2024_08_21 = + T.let( + :default_2024_08_21, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) end end @@ -231,29 +272,68 @@ module OpenAI end # The type of the content. - sig { returns(T.nilable(Symbol)) } + sig do + returns( + T.nilable( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + ) + end def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params( + _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + .returns( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + end def type=(_) end - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params( + text: String, + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + .returns(T.attached_class) + end def self.new(text: nil, type: nil) end - sig { override.returns({text: String, type: Symbol}) } + sig do + override + .returns( + { + text: String, + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + } + ) + end def to_hash end # The type of the content. - class Type < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - TEXT = :text + module Type + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + end + + TEXT = + T.let( + :text, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index d6fc02be..06e7495e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -120,11 +120,14 @@ module OpenAI # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + end def status=(_) end @@ -168,11 +171,14 @@ module OpenAI end # The type of run step, which can be either `message_creation` or `tool_calls`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + end def type=(_) end @@ -202,13 +208,13 @@ module OpenAI last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, - status: Symbol, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: Symbol, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage), object: Symbol ) @@ -249,13 +255,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, run_id: String, - status: Symbol, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: Symbol, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage) } ) @@ -265,11 +271,14 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + end def code=(_) end @@ -284,42 +293,57 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig do + override + .returns( + {code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String} + ) + end def to_hash end # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - SERVER_ERROR = :server_error - RATE_LIMIT_EXCEEDED = :rate_limit_exceeded + module Code + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + + SERVER_ERROR = + T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + RATE_LIMIT_EXCEEDED = + T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) end end # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - CANCELLED = :cancelled - FAILED = :failed - COMPLETED = :completed - EXPIRED = :expired + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) end # The details of the run step. - class StepDetails < OpenAI::Union - abstract! + module StepDetails + extend OpenAI::Union Variants = type_template(:out) do @@ -333,13 +357,16 @@ module OpenAI end # The type of run step, which can be either `message_creation` or `tool_calls`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } - MESSAGE_CREATION = :message_creation - TOOL_CALLS = :tool_calls + MESSAGE_CREATION = + T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) end class Usage < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 6dca9ad3..cbc714ec 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -65,8 +65,8 @@ module OpenAI end # The details of the run step. - class StepDetails < OpenAI::Union - abstract! + module StepDetails + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 566dd76e..9e59ca4b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -5,13 +5,18 @@ module OpenAI module Beta module Threads module Runs - class RunStepInclude < OpenAI::Enum - abstract! + module RunStepInclude + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = - :"step_details.tool_calls[*].file_search.results[*].content" + T.let( + :"step_details.tool_calls[*].file_search.results[*].content", + OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol + ) end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 73059bac..ef62a39d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -48,11 +48,14 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + end def include=(_) end @@ -68,11 +71,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + end def order=(_) end @@ -81,9 +87,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -98,9 +104,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -110,13 +116,15 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 71dc2e52..54eb0d50 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -32,11 +32,14 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + end def include=(_) end @@ -44,7 +47,7 @@ module OpenAI params( thread_id: String, run_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -58,7 +61,7 @@ module OpenAI { thread_id: String, run_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 2550325a..85c08734 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. - class ToolCall < OpenAI::Union - abstract! + module ToolCall + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 37550da6..0043de5a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. - class ToolCallDelta < OpenAI::Union - abstract! + module ToolCallDelta + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 508ca188..d47b79d1 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -54,11 +54,14 @@ module OpenAI end # The service tier used for processing the request. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) } def service_tier end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) + end def service_tier=(_) end @@ -91,7 +94,7 @@ module OpenAI choices: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice], created: Integer, model: String, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: OpenAI::Models::CompletionUsage, object: Symbol @@ -119,7 +122,7 @@ module OpenAI created: Integer, model: String, object: Symbol, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: OpenAI::Models::CompletionUsage } @@ -135,11 +138,14 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } def finish_reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + .returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + end def finish_reason=(_) end @@ -177,7 +183,7 @@ module OpenAI sig do params( - finish_reason: Symbol, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), message: OpenAI::Models::Chat::ChatCompletionMessage @@ -191,7 +197,7 @@ module OpenAI override .returns( { - finish_reason: Symbol, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), message: OpenAI::Models::Chat::ChatCompletionMessage @@ -207,16 +213,20 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - STOP = :stop - LENGTH = :length - TOOL_CALLS = :tool_calls - CONTENT_FILTER = :content_filter - FUNCTION_CALL = :function_call + module FinishReason + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + + STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + FUNCTION_CALL = + T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) end class Logprobs < OpenAI::BaseModel @@ -270,13 +280,15 @@ module OpenAI end # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum - abstract! + module ServiceTier + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } - SCALE = :scale - DEFAULT = :default + SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index bdba736c..737d825e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -197,8 +197,8 @@ module OpenAI # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -223,8 +223,8 @@ module OpenAI # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ArrayOfContentPart < OpenAI::Union - abstract! + module ArrayOfContentPart + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 215adc55..2cd8fe2e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -6,64 +6,88 @@ module OpenAI class ChatCompletionAudioParam < OpenAI::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } def format_ end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + end def format_=(_) end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) } def voice end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + end def voice=(_) end # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). - sig { params(format_: Symbol, voice: Symbol).returns(T.attached_class) } + sig do + params( + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) + .returns(T.attached_class) + end def self.new(format_:, voice:) end - sig { override.returns({format_: Symbol, voice: Symbol}) } + sig do + override + .returns( + { + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol + } + ) + end def to_hash end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - class Format < OpenAI::Enum - abstract! + module Format + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } - WAV = :wav - MP3 = :mp3 - FLAC = :flac - OPUS = :opus - PCM16 = :pcm16 + WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - class Voice < OpenAI::Enum - abstract! + module Voice + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } - ALLOY = :alloy - ASH = :ash - BALLAD = :ballad - CORAL = :coral - ECHO = :echo - SAGE = :sage - SHIMMER = :shimmer - VERSE = :verse + ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 647695a9..f5530b98 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -56,11 +56,14 @@ module OpenAI end # The service tier used for processing the request. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) } def service_tier end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) + end def service_tier=(_) end @@ -99,7 +102,7 @@ module OpenAI choices: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: T.nilable(OpenAI::Models::CompletionUsage), object: Symbol @@ -127,7 +130,7 @@ module OpenAI created: Integer, model: String, object: Symbol, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: T.nilable(OpenAI::Models::CompletionUsage) } @@ -155,11 +158,14 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) } def finish_reason end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) + end def finish_reason=(_) end @@ -187,7 +193,7 @@ module OpenAI sig do params( delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(Symbol), + finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) ) @@ -201,7 +207,7 @@ module OpenAI .returns( { delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(Symbol), + finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) } @@ -243,11 +249,14 @@ module OpenAI end # The role of the author of this message. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol)) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + end def role=(_) end @@ -268,7 +277,7 @@ module OpenAI content: T.nilable(String), function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: T.nilable(String), - role: Symbol, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) .returns(T.attached_class) @@ -283,7 +292,7 @@ module OpenAI content: T.nilable(String), function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: T.nilable(String), - role: Symbol, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } ) @@ -325,16 +334,21 @@ module OpenAI end # The role of the author of this message. - class Role < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - DEVELOPER = :developer - SYSTEM = :system - USER = :user - ASSISTANT = :assistant - TOOL = :tool + module Role + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } + + DEVELOPER = + T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) end class ToolCall < OpenAI::BaseModel @@ -367,11 +381,14 @@ module OpenAI end # The type of the tool. Currently, only `function` is supported. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + end def type=(_) end @@ -380,7 +397,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: Symbol + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol ) .returns(T.attached_class) end @@ -394,7 +411,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: Symbol + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol } ) end @@ -433,12 +450,16 @@ module OpenAI end # The type of the tool. Currently, only `function` is supported. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) } - FUNCTION = :function + FUNCTION = + T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) end end end @@ -449,16 +470,22 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - STOP = :stop - LENGTH = :length - TOOL_CALLS = :tool_calls - CONTENT_FILTER = :content_filter - FUNCTION_CALL = :function_call + module FinishReason + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } + + STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + TOOL_CALLS = + T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + FUNCTION_CALL = + T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) end class Logprobs < OpenAI::BaseModel @@ -512,13 +539,15 @@ module OpenAI end # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum - abstract! + module ServiceTier + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } - SCALE = :scale - DEFAULT = :default + SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index bff95f2d..340e2a97 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -5,8 +5,8 @@ module OpenAI module Chat # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ChatCompletionContentPart < OpenAI::Union - abstract! + module ChatCompletionContentPart + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 78ef980f..01893dab 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -50,32 +50,49 @@ module OpenAI # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + end def detail=(_) end - sig { params(url: String, detail: Symbol).returns(T.attached_class) } + sig do + params( + url: String, + detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol + ) + .returns(T.attached_class) + end def self.new(url:, detail: nil) end - sig { override.returns({url: String, detail: Symbol}) } + sig do + override + .returns( + {url: String, detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol} + ) + end def to_hash end # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 87144715..0a14a898 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -52,30 +52,52 @@ module OpenAI end # The format of the encoded audio data. Currently supports "wav" and "mp3". - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) } def format_ end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + end def format_=(_) end - sig { params(data: String, format_: Symbol).returns(T.attached_class) } + sig do + params( + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + ) + .returns(T.attached_class) + end def self.new(data:, format_:) end - sig { override.returns({data: String, format_: Symbol}) } + sig do + override + .returns( + { + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + } + ) + end def to_hash end # The format of the encoded audio data. Currently supports "wav" and "mp3". - class Format < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - WAV = :wav - MP3 = :mp3 + module Format + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) } + + WAV = + T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + MP3 = + T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 7c1f2e46..935cbc88 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -63,8 +63,8 @@ module OpenAI end # The contents of the developer message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 6d17e25c..7dbb72ea 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -6,8 +6,8 @@ module OpenAI # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - class ChatCompletionMessageParam < OpenAI::Union - abstract! + module ChatCompletionMessageParam + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 25501740..5e747ff9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -3,13 +3,14 @@ module OpenAI module Models module Chat - class ChatCompletionModality < OpenAI::Enum - abstract! + module ChatCompletionModality + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } - TEXT = :text - AUDIO = :audio + TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) + AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 206b2990..9773677d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -52,8 +52,8 @@ module OpenAI # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index e2a2c70e..dd316075 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -4,17 +4,18 @@ module OpenAI module Models module Chat # The role of the author of a message - class ChatCompletionRole < OpenAI::Enum - abstract! + module ChatCompletionRole + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } - DEVELOPER = :developer - SYSTEM = :system - USER = :user - ASSISTANT = :assistant - TOOL = :tool - FUNCTION = :function + DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 9d79c62a..ca2dc2e7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -63,8 +63,8 @@ module OpenAI end # The contents of the system message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index c9ad2647..9a79b1ff 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -12,23 +12,32 @@ module OpenAI # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - class ChatCompletionToolChoiceOption < OpenAI::Union - abstract! + module ChatCompletionToolChoiceOption + extend OpenAI::Union Variants = - type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)} } + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + } + end # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. - class Auto < OpenAI::Enum - abstract! + module Auto + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } - NONE = :none - AUTO = :auto - REQUIRED = :required + NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 7c447076..6f5f249b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -59,8 +59,8 @@ module OpenAI end # The contents of the tool message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 53c7c3a7..dde9e769 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -120,8 +120,8 @@ module OpenAI end # The contents of the user message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index f0e77033..851c6948 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -64,11 +64,14 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + end def model=(_) end @@ -111,13 +114,32 @@ module OpenAI # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + ) + ) + end def function_call end sig do - params(_: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)) - .returns(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)) + params( + _: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + ) + .returns( + T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + ) end def function_call=(_) end @@ -213,11 +235,14 @@ module OpenAI # this model generate both text and audio responses, you can use: # # `["text", "audio"]` - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) } def modalities end - sig { params(_: T.nilable(T::Array[Symbol])).returns(T.nilable(T::Array[Symbol])) } + sig do + params(_: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) + .returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) + end def modalities=(_) end @@ -273,11 +298,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -351,11 +379,14 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } def service_tier end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) + end def service_tier=(_) end @@ -422,13 +453,32 @@ module OpenAI # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)) - .returns(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)) + params( + _: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + ) + .returns( + T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + ) end def tool_choice=(_) end @@ -508,34 +558,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -594,34 +650,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -639,10 +701,10 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end # Deprecated in favor of `tool_choice`. @@ -659,22 +721,34 @@ module OpenAI # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - class FunctionCall < OpenAI::Union - abstract! + module FunctionCall + extend OpenAI::Union Variants = - type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)} } + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + } + end # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. - class FunctionCallMode < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - NONE = :none - AUTO = :auto + module FunctionCallMode + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) } + + NONE = + T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) + AUTO = + T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) end end @@ -728,13 +802,15 @@ module OpenAI end end - class Modality < OpenAI::Enum - abstract! + module Modality + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } - TEXT = :text - AUDIO = :audio + TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) + AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) end # An object specifying the format that the model must output. @@ -747,8 +823,8 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormat < OpenAI::Union - abstract! + module ResponseFormat + extend OpenAI::Union Variants = type_template(:out) do @@ -776,19 +852,21 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - class ServiceTier < OpenAI::Enum - abstract! + module ServiceTier + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } - AUTO = :auto - DEFAULT = :default + AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) end # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union - abstract! + module Stop + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } @@ -798,11 +876,18 @@ module OpenAI class WebSearchOptions < OpenAI::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig { returns(T.nilable(Symbol)) } + sig do + returns( + T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + ) + end def search_context_size end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + .returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + end def search_context_size=(_) end @@ -823,7 +908,7 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig do params( - search_context_size: Symbol, + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) ) .returns(T.attached_class) @@ -835,7 +920,7 @@ module OpenAI override .returns( { - search_context_size: Symbol, + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) } ) @@ -845,14 +930,28 @@ module OpenAI # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum - abstract! + module SearchContextSize + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + ) + end - LOW = :low - MEDIUM = :medium - HIGH = :high + LOW = + T.let(:low, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + MEDIUM = + T.let( + :medium, + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol + ) + HIGH = + T.let(:high, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index d4ce52ba..096a12f8 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -47,11 +47,14 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + .returns(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + end def order=(_) end @@ -61,7 +64,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: Symbol, + order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -77,7 +80,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: Symbol, + order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -87,13 +90,15 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 014bdbc9..d0cfdba1 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -28,11 +28,14 @@ module OpenAI # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + .returns(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + end def order=(_) end @@ -40,7 +43,7 @@ module OpenAI params( after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -49,27 +52,31 @@ module OpenAI end sig do - override.returns( - { - after: String, - limit: Integer, - order: Symbol, - request_options: OpenAI::RequestOptions - } - ) + override + .returns( + { + after: String, + limit: Integer, + order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) end def to_hash end # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 9ec815a5..20705fb9 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -2,54 +2,60 @@ module OpenAI module Models - class ChatModel < OpenAI::Enum - abstract! + module ChatModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ChatModel::TaggedSymbol) } - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - O1_PREVIEW = :"o1-preview" - O1_PREVIEW_2024_09_12 = :"o1-preview-2024-09-12" - O1_MINI = :"o1-mini" - O1_MINI_2024_09_12 = :"o1-mini-2024-09-12" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_AUDIO_PREVIEW = :"gpt-4o-audio-preview" - GPT_4O_AUDIO_PREVIEW_2024_10_01 = :"gpt-4o-audio-preview-2024-10-01" - GPT_4O_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-audio-preview-2024-12-17" - GPT_4O_MINI_AUDIO_PREVIEW = :"gpt-4o-mini-audio-preview" - GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-mini-audio-preview-2024-12-17" - GPT_4O_SEARCH_PREVIEW = :"gpt-4o-search-preview" - GPT_4O_MINI_SEARCH_PREVIEW = :"gpt-4o-mini-search-preview" - GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" - GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" - CHATGPT_4O_LATEST = :"chatgpt-4o-latest" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0301 = :"gpt-3.5-turbo-0301" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" + O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::OrSymbol) + O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::OrSymbol) + O1 = T.let(:o1, OpenAI::Models::ChatModel::OrSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) + O1_PREVIEW = T.let(:"o1-preview", OpenAI::Models::ChatModel::OrSymbol) + O1_PREVIEW_2024_09_12 = T.let(:"o1-preview-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) + O1_MINI = T.let(:"o1-mini", OpenAI::Models::ChatModel::OrSymbol) + O1_MINI_2024_09_12 = T.let(:"o1-mini-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_2024_11_20 = T.let(:"gpt-4o-2024-11-20", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_2024_08_06 = T.let(:"gpt-4o-2024-08-06", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_2024_05_13 = T.let(:"gpt-4o-2024-05-13", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_AUDIO_PREVIEW = T.let(:"gpt-4o-audio-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_AUDIO_PREVIEW_2024_10_01 = + T.let(:"gpt-4o-audio-preview-2024-10-01", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_AUDIO_PREVIEW_2024_12_17 = + T.let(:"gpt-4o-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = + T.let(:"gpt-4o-mini-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_SEARCH_PREVIEW = T.let(:"gpt-4o-search-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_SEARCH_PREVIEW = T.let(:"gpt-4o-mini-search-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_SEARCH_PREVIEW_2025_03_11 = + T.let(:"gpt-4o-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = + T.let(:"gpt-4o-mini-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) + CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_TURBO_2024_04_09 = T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_0125_PREVIEW = T.let(:"gpt-4-0125-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_TURBO_PREVIEW = T.let(:"gpt-4-turbo-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_1106_PREVIEW = T.let(:"gpt-4-1106-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_VISION_PREVIEW = T.let(:"gpt-4-vision-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_16K = T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_0301 = T.let(:"gpt-3.5-turbo-0301", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_0613 = T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_1106 = T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index bbb84c5d..8ceb7ea4 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -20,11 +20,14 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - sig { returns(Symbol) } + sig { returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::ComparisonFilter::Type::OrSymbol) + .returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) + end def type=(_) end @@ -41,12 +44,22 @@ module OpenAI # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. sig do - params(key: String, type: Symbol, value: T.any(String, Float, T::Boolean)).returns(T.attached_class) + params( + key: String, + type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, + value: T.any(String, Float, T::Boolean) + ) + .returns(T.attached_class) end def self.new(key:, type:, value:) end - sig { override.returns({key: String, type: Symbol, value: T.any(String, Float, T::Boolean)}) } + sig do + override + .returns( + {key: String, type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, value: T.any(String, Float, T::Boolean)} + ) + end def to_hash end @@ -58,23 +71,24 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } - EQ = :eq - NE = :ne - GT = :gt - GTE = :gte - LT = :lt - LTE = :lte + EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + GT = T.let(:gt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + GTE = T.let(:gte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) end # The value to compare against the attribute key; supports string, number, or # boolean types. - class Value < OpenAI::Union - abstract! + module Value + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 93b41ebf..a03a5dd9 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -7,11 +7,14 @@ module OpenAI # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } def finish_reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + .returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + end def finish_reason=(_) end @@ -44,7 +47,7 @@ module OpenAI sig do params( - finish_reason: Symbol, + finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), text: String @@ -58,7 +61,7 @@ module OpenAI override .returns( { - finish_reason: Symbol, + finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), text: String @@ -72,14 +75,15 @@ module OpenAI # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - class FinishReason < OpenAI::Enum - abstract! + module FinishReason + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } - STOP = :stop - LENGTH = :length - CONTENT_FILTER = :content_filter + STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) end class Logprobs < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index eb7a6ceb..4b26334b 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -11,11 +11,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) + .returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) + end def model=(_) end @@ -247,7 +250,7 @@ module OpenAI sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, @@ -301,7 +304,7 @@ module OpenAI override .returns( { - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, @@ -337,14 +340,19 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union + + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)} } - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } - GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" - DAVINCI_002 = :"davinci-002" - BABBAGE_002 = :"babbage-002" + GPT_3_5_TURBO_INSTRUCT = + T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) + DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) + BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) end # The prompt(s) to generate completions for, encoded as a string, array of @@ -353,8 +361,8 @@ module OpenAI # Note that <|endoftext|> is the document separator that the model sees during # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. - class Prompt < OpenAI::Union - abstract! + module Prompt + extend OpenAI::Union Variants = type_template(:out) do @@ -370,8 +378,8 @@ module OpenAI # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union - abstract! + module Stop + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 6722f10d..6a79164a 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -17,42 +17,57 @@ module OpenAI end # Type of operation: `and` or `or`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::CompoundFilter::Type::OrSymbol) + .returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) + end def type=(_) end # Combine multiple filters using `and` or `or`. sig do - params(filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol) + params( + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], + type: OpenAI::Models::CompoundFilter::Type::OrSymbol + ) .returns(T.attached_class) end def self.new(filters:, type:) end - sig { override.returns({filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol}) } + sig do + override + .returns( + { + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], + type: OpenAI::Models::CompoundFilter::Type::OrSymbol + } + ) + end def to_hash end # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. - class Filter < OpenAI::Union - abstract! + module Filter + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, T.anything)} } end # Type of operation: `and` or `or`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } - AND = :and - OR = :or + AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::OrSymbol) + OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 3f30f067..bc8012b1 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -30,11 +30,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) + end def model=(_) end @@ -50,11 +53,14 @@ module OpenAI # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol)) } def encoding_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + .returns(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + end def encoding_format=(_) end @@ -72,9 +78,9 @@ module OpenAI sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: Symbol, + encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -88,9 +94,9 @@ module OpenAI .returns( { input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: Symbol, + encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, request_options: OpenAI::RequestOptions } @@ -107,8 +113,8 @@ module OpenAI # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - class Input < OpenAI::Union - abstract! + module Input + extend OpenAI::Union Variants = type_template(:out) do @@ -127,21 +133,23 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)} } end # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - class EncodingFormat < OpenAI::Enum - abstract! + module EncodingFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } - FLOAT = :float - BASE64 = :base64 + FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 1bd2eac2..2b064f56 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -2,14 +2,15 @@ module OpenAI module Models - class EmbeddingModel < OpenAI::Enum - abstract! + module EmbeddingModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingModel::TaggedSymbol) } - TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" - TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" - TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" + TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::OrSymbol) + TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::OrSymbol) + TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index aad68d3d..eb1f53b5 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # The strategy used to chunk the file. - class FileChunkingStrategy < OpenAI::Union - abstract! + module FileChunkingStrategy + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 07560304..211e8f69 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -4,8 +4,8 @@ module OpenAI module Models # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. - class FileChunkingStrategyParam < OpenAI::Union - abstract! + module FileChunkingStrategyParam + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index 98619f88..fbd7b9d0 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -19,18 +19,18 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose end - sig { params(_: Symbol).returns(Symbol) } + sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose=(_) end sig do params( file: T.any(IO, StringIO), - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -39,13 +39,14 @@ module OpenAI end sig do - override.returns( - { - file: T.any(IO, StringIO), - purpose: Symbol, - request_options: OpenAI::RequestOptions - } - ) + override + .returns( + { + file: T.any(IO, StringIO), + purpose: OpenAI::Models::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) end def to_hash end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 48b8106e..4af7b2fc 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -30,11 +30,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FileListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FileListParams::Order::OrSymbol) + .returns(OpenAI::Models::FileListParams::Order::OrSymbol) + end def order=(_) end @@ -51,7 +54,7 @@ module OpenAI params( after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -66,7 +69,7 @@ module OpenAI { after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, request_options: OpenAI::RequestOptions } @@ -77,13 +80,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index b31df148..2918d2f0 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -51,21 +51,27 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) } def purpose end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FileObject::Purpose::TaggedSymbol) + .returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) + end def purpose=(_) end # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FileObject::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FileObject::Status::TaggedSymbol) + .returns(OpenAI::Models::FileObject::Status::TaggedSymbol) + end def status=(_) end @@ -95,8 +101,8 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: Symbol, - status: Symbol, + purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, + status: OpenAI::Models::FileObject::Status::TaggedSymbol, expires_at: Integer, status_details: String, object: Symbol @@ -125,8 +131,8 @@ module OpenAI created_at: Integer, filename: String, object: Symbol, - purpose: Symbol, - status: Symbol, + purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, + status: OpenAI::Models::FileObject::Status::TaggedSymbol, expires_at: Integer, status_details: String } @@ -138,30 +144,32 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - class Purpose < OpenAI::Enum - abstract! + module Purpose + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } - ASSISTANTS = :assistants - ASSISTANTS_OUTPUT = :assistants_output - BATCH = :batch - BATCH_OUTPUT = :batch_output - FINE_TUNE = :"fine-tune" - FINE_TUNE_RESULTS = :"fine-tune-results" - VISION = :vision + ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + BATCH = T.let(:batch, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + BATCH_OUTPUT = T.let(:batch_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FileObject::Purpose::TaggedSymbol) + FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::Models::FileObject::Purpose::TaggedSymbol) + VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) end # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Status::TaggedSymbol) } - UPLOADED = :uploaded - PROCESSED = :processed - ERROR = :error + UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) + PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) + ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 07bd11c5..77bba00f 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -6,17 +6,18 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - class FilePurpose < OpenAI::Enum - abstract! + module FilePurpose + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FilePurpose::TaggedSymbol) } - ASSISTANTS = :assistants - BATCH = :batch - FINE_TUNE = :"fine-tune" - VISION = :vision - USER_DATA = :user_data - EVALS = :evals + ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::OrSymbol) + BATCH = T.let(:batch, OpenAI::Models::FilePurpose::OrSymbol) + FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FilePurpose::OrSymbol) + VISION = T.let(:vision, OpenAI::Models::FilePurpose::OrSymbol) + USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::OrSymbol) + EVALS = T.let(:evals, OpenAI::Models::FilePurpose::OrSymbol) end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index b4326f0b..f688b4b2 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -117,11 +117,14 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + end def status=(_) end @@ -218,7 +221,7 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: Symbol, + status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), @@ -268,7 +271,7 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: Symbol, + status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), @@ -381,24 +384,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -406,17 +409,20 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } - VALIDATING_FILES = :validating_files - QUEUED = :queued - RUNNING = :running - SUCCEEDED = :succeeded - FAILED = :failed - CANCELLED = :cancelled + VALIDATING_FILES = + T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + QUEUED = T.let(:queued, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + RUNNING = T.let(:running, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + SUCCEEDED = T.let(:succeeded, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) end class Method < OpenAI::BaseModel @@ -445,11 +451,14 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + end def type=(_) end @@ -458,7 +467,7 @@ module OpenAI params( dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol ) .returns(T.attached_class) end @@ -471,7 +480,7 @@ module OpenAI { dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol } ) end @@ -576,32 +585,32 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union - abstract! + module Beta + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -694,24 +703,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -719,13 +728,15 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } - SUPERVISED = :supervised - DPO = :dpo + SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index fb1daf42..7bedb937 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -23,11 +23,14 @@ module OpenAI end # The log level of the event. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } def level end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + end def level=(_) end @@ -59,11 +62,14 @@ module OpenAI end # The type of event. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + end def type=(_) end @@ -72,10 +78,10 @@ module OpenAI params( id: String, created_at: Integer, - level: Symbol, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, message: String, data: T.anything, - type: Symbol, + type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol, object: Symbol ) .returns(T.attached_class) @@ -89,11 +95,11 @@ module OpenAI { id: String, created_at: Integer, - level: Symbol, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, message: String, object: Symbol, data: T.anything, - type: Symbol + type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol } ) end @@ -101,24 +107,28 @@ module OpenAI end # The log level of the event. - class Level < OpenAI::Enum - abstract! + module Level + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } - INFO = :info - WARN = :warn - ERROR = :error + INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) end # The type of event. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } - MESSAGE = :message - METRICS = :metrics + MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 29a4f7bf..9142e294 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -9,11 +9,14 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) + .returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) + end def model=(_) end @@ -139,7 +142,7 @@ module OpenAI sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), @@ -170,7 +173,7 @@ module OpenAI override .returns( { - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), @@ -188,15 +191,20 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union + + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)} } - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } - BABBAGE_002 = :"babbage-002" - DAVINCI_002 = :"davinci-002" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_4O_MINI = :"gpt-4o-mini" + BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) end class Hyperparameters < OpenAI::BaseModel @@ -258,24 +266,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -416,11 +424,14 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + end def type=(_) end @@ -429,7 +440,7 @@ module OpenAI params( dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol ) .returns(T.attached_class) end @@ -442,7 +453,7 @@ module OpenAI { dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol } ) end @@ -547,32 +558,32 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union - abstract! + module Beta + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -667,24 +678,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -692,13 +703,15 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } - SUPERVISED = :supervised - DPO = :dpo + SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 285e3c70..09479fbe 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -18,11 +18,14 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + end def model=(_) end @@ -39,21 +42,27 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) + end def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) } def size end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) + end def size=(_) end @@ -71,10 +80,10 @@ module OpenAI sig do params( image: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -88,10 +97,10 @@ module OpenAI .returns( { image: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -102,34 +111,37 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum - abstract! + module ResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } - URL = :url - B64_JSON = :b64_json + URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum - abstract! + module Size + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index b672e912..02a9dc31 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -39,11 +39,14 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + end def model=(_) end @@ -59,21 +62,27 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) + end def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } def size end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) + end def size=(_) end @@ -93,10 +102,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -122,10 +131,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -136,34 +145,36 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum - abstract! + module ResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } - URL = :url - B64_JSON = :b64_json + URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum - abstract! + module Size + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::OrSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::OrSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 7fccc54c..4754ce0e 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -17,11 +17,14 @@ module OpenAI end # The model to use for image generation. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + end def model=(_) end @@ -38,33 +41,42 @@ module OpenAI # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } def quality end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + .returns(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + end def quality=(_) end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) + end def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } def size end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) + end def size=(_) end @@ -72,11 +84,14 @@ module OpenAI # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } def style end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) + end def style=(_) end @@ -94,12 +109,12 @@ module OpenAI sig do params( prompt: String, - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: Symbol, - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), - style: T.nilable(Symbol), + quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -123,12 +138,12 @@ module OpenAI .returns( { prompt: String, - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: Symbol, - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), - style: T.nilable(Symbol), + quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -138,62 +153,67 @@ module OpenAI end # The model to use for image generation. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } end # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - class Quality < OpenAI::Enum - abstract! + module Quality + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } - STANDARD = :standard - HD = :hd + STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum - abstract! + module ResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } - URL = :url - B64_JSON = :b64_json + URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - class Size < OpenAI::Enum - abstract! + module Size + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" - NUMBER_1792X1024 = :"1792x1024" - NUMBER_1024X1792 = :"1024x1792" + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - class Style < OpenAI::Enum - abstract! + module Style + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } - VIVID = :vivid - NATURAL = :natural + VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) + NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 6716a390..3cc4d5f4 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -2,13 +2,14 @@ module OpenAI module Models - class ImageModel < OpenAI::Enum - abstract! + module ImageModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageModel::TaggedSymbol) } - DALL_E_2 = :"dall-e-2" - DALL_E_3 = :"dall-e-3" + DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::OrSymbol) + DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index c430dd8a..0ebd7ee8 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -265,138 +265,193 @@ module OpenAI class CategoryAppliedInputTypes < OpenAI::BaseModel # The applied input type(s) for the category 'harassment'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) } def harassment end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + end def harassment=(_) end # The applied input type(s) for the category 'harassment/threatening'. - sig { returns(T::Array[Symbol]) } + sig do + returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + end def harassment_threatening end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params( + _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + end def harassment_threatening=(_) end # The applied input type(s) for the category 'hate'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } def hate end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) + end def hate=(_) end # The applied input type(s) for the category 'hate/threatening'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) } def hate_threatening end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + end def hate_threatening=(_) end # The applied input type(s) for the category 'illicit'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } def illicit end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) + end def illicit=(_) end # The applied input type(s) for the category 'illicit/violent'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) } def illicit_violent end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + end def illicit_violent=(_) end # The applied input type(s) for the category 'self-harm'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } def self_harm end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) + end def self_harm=(_) end # The applied input type(s) for the category 'self-harm/instructions'. - sig { returns(T::Array[Symbol]) } + sig do + returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + end def self_harm_instructions end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params( + _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + end def self_harm_instructions=(_) end # The applied input type(s) for the category 'self-harm/intent'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) } def self_harm_intent end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + end def self_harm_intent=(_) end # The applied input type(s) for the category 'sexual'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } def sexual end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) + end def sexual=(_) end # The applied input type(s) for the category 'sexual/minors'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) } def sexual_minors end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + end def sexual_minors=(_) end # The applied input type(s) for the category 'violence'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } def violence end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) + end def violence=(_) end # The applied input type(s) for the category 'violence/graphic'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) } def violence_graphic end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + end def violence_graphic=(_) end # A list of the categories along with the input type(s) that the score applies to. sig do params( - harassment: T::Array[Symbol], - harassment_threatening: T::Array[Symbol], - hate: T::Array[Symbol], - hate_threatening: T::Array[Symbol], - illicit: T::Array[Symbol], - illicit_violent: T::Array[Symbol], - self_harm: T::Array[Symbol], - self_harm_instructions: T::Array[Symbol], - self_harm_intent: T::Array[Symbol], - sexual: T::Array[Symbol], - sexual_minors: T::Array[Symbol], - violence: T::Array[Symbol], - violence_graphic: T::Array[Symbol] + harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], + harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], + hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], + hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], + illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], + illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], + self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], + self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], + self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], + sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], + sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], + violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], + violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] ) .returns(T.attached_class) end @@ -421,133 +476,177 @@ module OpenAI override .returns( { - harassment: T::Array[Symbol], - harassment_threatening: T::Array[Symbol], - hate: T::Array[Symbol], - hate_threatening: T::Array[Symbol], - illicit: T::Array[Symbol], - illicit_violent: T::Array[Symbol], - self_harm: T::Array[Symbol], - self_harm_instructions: T::Array[Symbol], - self_harm_intent: T::Array[Symbol], - sexual: T::Array[Symbol], - sexual_minors: T::Array[Symbol], - violence: T::Array[Symbol], - violence_graphic: T::Array[Symbol] + harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], + harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], + hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], + hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], + illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], + illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], + self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], + self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], + self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], + sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], + sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], + violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], + violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] } ) end def to_hash end - class Harassment < OpenAI::Enum - abstract! + module Harassment + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) end - class HarassmentThreatening < OpenAI::Enum - abstract! + module HarassmentThreatening + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) } - TEXT = :text + TEXT = + T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) end - class Hate < OpenAI::Enum - abstract! + module Hate + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) end - class HateThreatening < OpenAI::Enum - abstract! + module HateThreatening + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) end - class Illicit < OpenAI::Enum - abstract! + module Illicit + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) end - class IllicitViolent < OpenAI::Enum - abstract! + module IllicitViolent + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) end - class SelfHarm < OpenAI::Enum - abstract! + module SelfHarm + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) + IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) end - class SelfHarmInstruction < OpenAI::Enum - abstract! + module SelfHarmInstruction + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = + T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) + IMAGE = + T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) end - class SelfHarmIntent < OpenAI::Enum - abstract! + module SelfHarmIntent + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) + IMAGE = + T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) end - class Sexual < OpenAI::Enum - abstract! + module Sexual + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) + IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) end - class SexualMinor < OpenAI::Enum - abstract! + module SexualMinor + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) end - class Violence < OpenAI::Enum - abstract! + module Violence + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) + IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) end - class ViolenceGraphic < OpenAI::Enum - abstract! + module ViolenceGraphic + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) + IMAGE = + T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 3a7a01a9..a23b68a0 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -43,11 +43,14 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ModerationModel::OrSymbol))) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) + end def model=(_) end @@ -58,7 +61,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -75,7 +78,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions } ) @@ -85,8 +88,8 @@ module OpenAI # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - class Input < OpenAI::Union - abstract! + module Input + extend OpenAI::Union Variants = type_template(:out) do @@ -109,10 +112,10 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)} } end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index a34a1f36..a08f4a80 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -2,15 +2,17 @@ module OpenAI module Models - class ModerationModel < OpenAI::Enum - abstract! + module ModerationModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ModerationModel::TaggedSymbol) } - OMNI_MODERATION_LATEST = :"omni-moderation-latest" - OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" - TEXT_MODERATION_LATEST = :"text-moderation-latest" - TEXT_MODERATION_STABLE = :"text-moderation-stable" + OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) + OMNI_MODERATION_2024_09_26 = + T.let(:"omni-moderation-2024-09-26", OpenAI::Models::ModerationModel::OrSymbol) + TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) + TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 861ae45d..705b6af9 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # An object describing an image to classify. - class ModerationMultiModalInput < OpenAI::Union - abstract! + module ModerationMultiModalInput + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 38290949..909f27d4 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -9,11 +9,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def effort=(_) end @@ -22,11 +25,14 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } def generate_summary end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) + .returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) + end def generate_summary=(_) end @@ -34,11 +40,25 @@ module OpenAI # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - sig { params(effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)).returns(T.attached_class) } + sig do + params( + effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + ) + .returns(T.attached_class) + end def self.new(effort: nil, generate_summary: nil) end - sig { override.returns({effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)}) } + sig do + override + .returns( + { + effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + } + ) + end def to_hash end @@ -47,13 +67,14 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - class GenerateSummary < OpenAI::Enum - abstract! + module GenerateSummary + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } - CONCISE = :concise - DETAILED = :detailed + CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index dcca18c9..2cf29ee5 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -8,14 +8,15 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - class ReasoningEffort < OpenAI::Enum - abstract! + module ReasoningEffort + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ReasoningEffort::TaggedSymbol) } - LOW = :low - MEDIUM = :medium - HIGH = :high + LOW = T.let(:low, OpenAI::Models::ReasoningEffort::OrSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 40b9918f..638039d3 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -23,11 +23,14 @@ module OpenAI end # The type of computer environment to control. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) } def environment end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + .returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + end def environment=(_) end @@ -43,28 +46,43 @@ module OpenAI # A tool that controls a virtual computer. Learn more about the # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). sig do - params(display_height: Float, display_width: Float, environment: Symbol, type: Symbol) + params( + display_height: Float, + display_width: Float, + environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, + type: Symbol + ) .returns(T.attached_class) end def self.new(display_height:, display_width:, environment:, type: :computer_use_preview) end sig do - override.returns({display_height: Float, display_width: Float, environment: Symbol, type: Symbol}) + override + .returns( + { + display_height: Float, + display_width: Float, + environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, + type: Symbol + } + ) end def to_hash end # The type of computer environment to control. - class Environment < OpenAI::Enum - abstract! + module Environment + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } - MAC = :mac - WINDOWS = :windows - UBUNTU = :ubuntu - BROWSER = :browser + MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 5b3386f8..42bb10de 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -54,20 +54,26 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + .returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + end def role=(_) end # The type of the message input. Always `message`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) + .returns(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) + end def type=(_) end @@ -88,8 +94,8 @@ module OpenAI ) ] ), - role: Symbol, - type: Symbol + role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, + type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol ) .returns(T.attached_class) end @@ -110,8 +116,8 @@ module OpenAI ) ] ), - role: Symbol, - type: Symbol + role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, + type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol } ) end @@ -120,8 +126,8 @@ module OpenAI # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -142,24 +148,28 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer + USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) end # The type of the message input. Always `message`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } - MESSAGE = :message + MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 4085e59c..71be889d 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -88,8 +88,8 @@ module OpenAI end # A filter to apply based on file attributes. - class Filters < OpenAI::Union - abstract! + module Filters + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } @@ -97,11 +97,14 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol)) } def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + .returns(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + end def ranker=(_) end @@ -117,22 +120,37 @@ module OpenAI end # Ranking options for search. - sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } + sig do + params( + ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, + score_threshold: Float + ) + .returns(T.attached_class) + end def self.new(ranker: nil, score_threshold: nil) end - sig { override.returns({ranker: Symbol, score_threshold: Float}) } + sig do + override + .returns( + {ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, score_threshold: Float} + ) + end def to_hash end # The ranker to use for the file search. - class Ranker < OpenAI::Enum - abstract! + module Ranker + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } - AUTO = :auto - DEFAULT_2024_11_15 = :"default-2024-11-15" + AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + DEFAULT_2024_11_15 = + T.let(:"default-2024-11-15", OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 1162dc6a..dfee9da7 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -39,11 +39,14 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + .returns(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + end def order=(_) end @@ -52,7 +55,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -67,7 +70,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -79,13 +82,15 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 29aba874..e785499b 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -79,11 +79,34 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, Symbol)) } + sig do + returns( + T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ) + ) + end def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params( + _: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ) + ) + .returns( + T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ) + ) + end def model=(_) end @@ -175,7 +198,11 @@ module OpenAI # can call. sig do returns( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) end def tool_choice @@ -183,10 +210,18 @@ module OpenAI sig do params( - _: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + _: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) .returns( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) end def tool_choice=(_) @@ -295,11 +330,14 @@ module OpenAI # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + end def status=(_) end @@ -326,11 +364,14 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) } def truncation end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) + end def truncation=(_) end @@ -363,7 +404,11 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, @@ -376,7 +421,11 @@ module OpenAI ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -389,9 +438,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), - status: Symbol, + status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), usage: OpenAI::Models::Responses::ResponseUsage, user: String, object: Symbol @@ -434,7 +483,11 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ), object: Symbol, output: T::Array[ T.any( @@ -448,7 +501,11 @@ module OpenAI ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -461,9 +518,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), - status: Symbol, + status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), usage: OpenAI::Models::Responses::ResponseUsage, user: String } @@ -474,44 +531,59 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the response is incomplete. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol)) } def reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + end def reason=(_) end # Details about why the response is incomplete. - sig { params(reason: Symbol).returns(T.attached_class) } + sig do + params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + .returns(T.attached_class) + end def self.new(reason: nil) end - sig { override.returns({reason: Symbol}) } + sig { override.returns({reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol}) } def to_hash end # The reason why the response is incomplete. - class Reason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - MAX_OUTPUT_TOKENS = :max_output_tokens - CONTENT_FILTER = :content_filter + module Reason + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } + + MAX_OUTPUT_TOKENS = + T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) end end # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union - abstract! + module ToolChoice + extend OpenAI::Union Variants = type_template(:out) do { - fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + fixed: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) } end end @@ -523,13 +595,14 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum - abstract! + module Truncation + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } - AUTO = :auto - DISABLED = :disabled + AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) + DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 8b5451ff..42fee896 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -58,11 +58,14 @@ module OpenAI end # The status of the code interpreter tool call. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + end def status=(_) end @@ -86,7 +89,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: Symbol, + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, type: Symbol ) .returns(T.attached_class) @@ -106,7 +109,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: Symbol, + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, type: Symbol } ) @@ -115,8 +118,8 @@ module OpenAI end # The output of a code interpreter tool call that is text. - class Result < OpenAI::Union - abstract! + module Result + extend OpenAI::Union Variants = type_template(:out) do @@ -232,14 +235,20 @@ module OpenAI end # The status of the code interpreter tool call. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - INTERPRETING = :interpreting - COMPLETED = :completed + module Status + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + INTERPRETING = + T.let(:interpreting, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index c0e726fe..63676938 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -86,20 +86,26 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + end def status=(_) end # The type of the computer call. Always `computer_call`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) + end def type=(_) end @@ -122,8 +128,8 @@ module OpenAI ), call_id: String, pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: Symbol, - type: Symbol + status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol ) .returns(T.attached_class) end @@ -148,8 +154,8 @@ module OpenAI ), call_id: String, pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: Symbol, - type: Symbol + status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol } ) end @@ -157,8 +163,8 @@ module OpenAI end # A click action. - class Action < OpenAI::Union - abstract! + module Action + extend OpenAI::Union Variants = type_template(:out) do @@ -180,11 +186,14 @@ module OpenAI class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } def button end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + end def button=(_) end @@ -217,26 +226,50 @@ module OpenAI end # A click action. - sig { params(button: Symbol, x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } + sig do + params( + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, + x: Integer, + y_: Integer, + type: Symbol + ) + .returns(T.attached_class) + end def self.new(button:, x:, y_:, type: :click) end - sig { override.returns({button: Symbol, type: Symbol, x: Integer, y_: Integer}) } + sig do + override + .returns( + { + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, + type: Symbol, + x: Integer, + y_: Integer + } + ) + end def to_hash end # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - class Button < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - LEFT = :left - RIGHT = :right - WHEEL = :wheel - BACK = :back - FORWARD = :forward + module Button + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) } + + LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + RIGHT = + T.let(:right, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + WHEEL = + T.let(:wheel, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + BACK = T.let(:back, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + FORWARD = + T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) end end @@ -605,23 +638,29 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) end # The type of the computer call. Always `computer_call`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } - COMPUTER_CALL = :computer_call + COMPUTER_CALL = + T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 4e5be0f8..9cdcacd2 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -66,11 +66,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + end def status=(_) end @@ -80,7 +83,7 @@ module OpenAI call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: Symbol, + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol, type: Symbol ) .returns(T.attached_class) @@ -97,7 +100,7 @@ module OpenAI output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: Symbol, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: Symbol + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol } ) end @@ -144,14 +147,20 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + module Status + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 92cf9df2..0cc16286 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Multi-modal input and output contents. - class ResponseContent < OpenAI::Union - abstract! + module ResponseContent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 9e79484e..b040d85c 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -90,8 +90,8 @@ module OpenAI end # The content part that was added. - class Part < OpenAI::Union - abstract! + module Part + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 5661d776..6f2a4562 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -90,8 +90,8 @@ module OpenAI end # The content part that is done. - class Part < OpenAI::Union - abstract! + module Part + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index dbd9ed7b..9c2938bb 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -91,11 +91,22 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, Symbol)) } + sig do + returns( + T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + ) + end def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params( + _: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + ) + .returns( + T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + ) + end def model=(_) end @@ -107,11 +118,14 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } def include end - sig { params(_: T.nilable(T::Array[Symbol])).returns(T.nilable(T::Array[Symbol])) } + sig do + params(_: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) + .returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) + end def include=(_) end @@ -229,7 +243,11 @@ module OpenAI sig do returns( T.nilable( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) ) end @@ -238,10 +256,18 @@ module OpenAI sig do params( - _: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + _: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) .returns( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) end def tool_choice=(_) @@ -323,11 +349,14 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) } def truncation end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) + .returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) + end def truncation=(_) end @@ -362,8 +391,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -373,7 +402,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -383,7 +416,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -433,8 +466,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -444,7 +477,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -454,7 +491,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -472,8 +509,8 @@ module OpenAI # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - class Input < OpenAI::Union - abstract! + module Input + extend OpenAI::Union Variants = type_template(:out) do @@ -503,13 +540,17 @@ module OpenAI # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union - abstract! + module ToolChoice + extend OpenAI::Union Variants = type_template(:out) do { - fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + fixed: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) } end end @@ -521,13 +562,16 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum - abstract! + module Truncation + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } - AUTO = :auto - DISABLED = :disabled + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) + DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 7f9b2db2..90d6cf33 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -5,11 +5,14 @@ module OpenAI module Responses class ResponseError < OpenAI::BaseModel # The error code for the response. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + end def code=(_) end @@ -23,38 +26,54 @@ module OpenAI end # An error object returned when the model fails to generate a Response. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params(code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig { override.returns({code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String}) } def to_hash end # The error code for the response. - class Code < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - SERVER_ERROR = :server_error - RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - INVALID_PROMPT = :invalid_prompt - VECTOR_STORE_TIMEOUT = :vector_store_timeout - INVALID_IMAGE = :invalid_image - INVALID_IMAGE_FORMAT = :invalid_image_format - INVALID_BASE64_IMAGE = :invalid_base64_image - INVALID_IMAGE_URL = :invalid_image_url - IMAGE_TOO_LARGE = :image_too_large - IMAGE_TOO_SMALL = :image_too_small - IMAGE_PARSE_ERROR = :image_parse_error - IMAGE_CONTENT_POLICY_VIOLATION = :image_content_policy_violation - INVALID_IMAGE_MODE = :invalid_image_mode - IMAGE_FILE_TOO_LARGE = :image_file_too_large - UNSUPPORTED_IMAGE_MEDIA_TYPE = :unsupported_image_media_type - EMPTY_IMAGE_FILE = :empty_image_file - FAILED_TO_DOWNLOAD_IMAGE = :failed_to_download_image - IMAGE_FILE_NOT_FOUND = :image_file_not_found + module Code + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + + SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + RATE_LIMIT_EXCEEDED = + T.let(:rate_limit_exceeded, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_PROMPT = T.let(:invalid_prompt, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + VECTOR_STORE_TIMEOUT = + T.let(:vector_store_timeout, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE = T.let(:invalid_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE_FORMAT = + T.let(:invalid_image_format, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_BASE64_IMAGE = + T.let(:invalid_base64_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE_URL = + T.let(:invalid_image_url, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_TOO_LARGE = T.let(:image_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_TOO_SMALL = T.let(:image_too_small, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_PARSE_ERROR = + T.let(:image_parse_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_CONTENT_POLICY_VIOLATION = + T.let(:image_content_policy_violation, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE_MODE = + T.let(:invalid_image_mode, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_FILE_TOO_LARGE = + T.let(:image_file_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + UNSUPPORTED_IMAGE_MEDIA_TYPE = + T.let(:unsupported_image_media_type, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + EMPTY_IMAGE_FILE = T.let(:empty_image_file, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + FAILED_TO_DOWNLOAD_IMAGE = + T.let(:failed_to_download_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_FILE_NOT_FOUND = + T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 17a4bdb2..439cbcd7 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -24,11 +24,14 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + end def status=(_) end @@ -60,7 +63,7 @@ module OpenAI params( id: String, queries: T::Array[String], - status: Symbol, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]), type: Symbol ) @@ -75,7 +78,7 @@ module OpenAI { id: String, queries: T::Array[String], - status: Symbol, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, type: Symbol, results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) } @@ -86,16 +89,20 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - SEARCHING = :searching - COMPLETED = :completed - INCOMPLETE = :incomplete - FAILED = :failed + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) end class Result < OpenAI::BaseModel @@ -179,8 +186,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index fea986b1..8c4e9b9a 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -16,8 +16,8 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormatTextConfig < OpenAI::Union - abstract! + module ResponseFormatTextConfig + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 7f65d691..e2fb2951 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -51,11 +51,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + end def status=(_) end @@ -63,7 +66,14 @@ module OpenAI # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. sig do - params(arguments: String, call_id: String, name: String, id: String, status: Symbol, type: Symbol) + params( + arguments: String, + call_id: String, + name: String, + id: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol, + type: Symbol + ) .returns(T.attached_class) end def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) @@ -71,28 +81,33 @@ module OpenAI sig do override - .returns({ - arguments: String, - call_id: String, - name: String, - type: Symbol, - id: String, - status: Symbol - }) + .returns( + { + arguments: String, + call_id: String, + name: String, + type: Symbol, + id: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol + } + ) end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 89456a9b..4a38931a 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -42,11 +42,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + end def status=(_) end @@ -55,27 +58,45 @@ module OpenAI id: String, call_id: String, output: String, - status: Symbol, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol, type: Symbol - ).returns(T.attached_class) + ) + .returns(T.attached_class) end def self.new(id:, call_id:, output:, status: nil, type: :function_call_output) end - sig { override.returns({id: String, call_id: String, output: String, type: Symbol, status: Symbol}) } + sig do + override + .returns( + { + id: String, + call_id: String, + output: String, + type: Symbol, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + } + ) + end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index fa36c718..1a4c8366 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -14,11 +14,14 @@ module OpenAI end # The status of the web search tool call. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + end def status=(_) end @@ -34,24 +37,39 @@ module OpenAI # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for # more information. - sig { params(id: String, status: Symbol, type: Symbol).returns(T.attached_class) } + sig do + params( + id: String, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, + type: Symbol + ) + .returns(T.attached_class) + end def self.new(id:, status:, type: :web_search_call) end - sig { override.returns({id: String, status: Symbol, type: Symbol}) } + sig do + override + .returns( + {id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol} + ) + end def to_hash end # The status of the web search tool call. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - SEARCHING = :searching - COMPLETED = :completed - FAILED = :failed + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 363cdad4..3b6a4039 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -11,14 +11,18 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - class ResponseIncludable < OpenAI::Enum - abstract! + module ResponseIncludable + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } - FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" - MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" + FILE_SEARCH_CALL_RESULTS = + T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + MESSAGE_INPUT_IMAGE_IMAGE_URL = + T.let(:"message.input_image.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = + T.let(:"computer_call_output.output.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index f49d5163..e6db921a 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -14,11 +14,14 @@ module OpenAI end # The format of the audio data. Currently supported formats are `mp3` and `wav`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) } def format_ end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + end def format_=(_) end @@ -32,22 +35,36 @@ module OpenAI end # An audio input to the model. - sig { params(data: String, format_: Symbol, type: Symbol).returns(T.attached_class) } + sig do + params( + data: String, + format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, + type: Symbol + ) + .returns(T.attached_class) + end def self.new(data:, format_:, type: :input_audio) end - sig { override.returns({data: String, format_: Symbol, type: Symbol}) } + sig do + override + .returns( + {data: String, format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, type: Symbol} + ) + end def to_hash end # The format of the audio data. Currently supported formats are `mp3` and `wav`. - class Format < OpenAI::Enum - abstract! + module Format + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } - MP3 = :mp3 - WAV = :wav + MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index cba404fb..37ed1a5a 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # A text input to the model. - class ResponseInputContent < OpenAI::Union - abstract! + module ResponseInputContent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index f4e450ae..ade87200 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -6,11 +6,14 @@ module OpenAI class ResponseInputImage < OpenAI::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + end def detail=(_) end @@ -45,35 +48,43 @@ module OpenAI # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). sig do - params(detail: Symbol, file_id: T.nilable(String), image_url: T.nilable(String), type: Symbol) + params( + detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, + file_id: T.nilable(String), + image_url: T.nilable(String), + type: Symbol + ) .returns(T.attached_class) end def self.new(detail:, file_id: nil, image_url: nil, type: :input_image) end sig do - override.returns( - { - detail: Symbol, - type: Symbol, - file_id: T.nilable(String), - image_url: T.nilable(String) - } - ) + override + .returns( + { + detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, + type: Symbol, + file_id: T.nilable(String), + image_url: T.nilable(String) + } + ) end def to_hash end # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } - HIGH = :high - LOW = :low - AUTO = :auto + HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 6ccd31a3..496ac76e 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -8,8 +8,8 @@ module OpenAI # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - class ResponseInputItem < OpenAI::Union - abstract! + module ResponseInputItem + extend OpenAI::Union Variants = type_template(:out) do @@ -71,30 +71,39 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + end def role=(_) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + end def status=(_) end # The type of the message input. Always set to `message`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) + end def type=(_) end @@ -110,9 +119,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, + status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol ) .returns(T.attached_class) end @@ -130,9 +139,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, + status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol } ) end @@ -140,35 +149,45 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } - USER = :user - SYSTEM = :system - DEVELOPER = :developer + USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) end # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } - MESSAGE = :message + MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) end end @@ -237,11 +256,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + end def status=(_) end @@ -252,7 +274,7 @@ module OpenAI output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: Symbol, + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) @@ -269,7 +291,7 @@ module OpenAI type: Symbol, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: Symbol + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol } ) end @@ -316,14 +338,20 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) end end @@ -367,11 +395,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + end def status=(_) end @@ -381,29 +412,45 @@ module OpenAI call_id: String, output: String, id: String, - status: Symbol, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol, type: Symbol - ).returns(T.attached_class) + ) + .returns(T.attached_class) end def self.new(call_id:, output:, id: nil, status: nil, type: :function_call_output) end sig do - override.returns({call_id: String, output: String, type: Symbol, id: String, status: Symbol}) + override + .returns( + { + call_id: String, + output: String, + type: Symbol, + id: String, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol + } + ) end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + module Status + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index cabf1399..0aaaacd6 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -53,30 +53,39 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + end def role=(_) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + end def status=(_) end # The type of the message input. Always set to `message`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + end def type=(_) end @@ -90,9 +99,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, + type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol ) .returns(T.attached_class) end @@ -111,9 +120,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, + type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol } ) end @@ -121,35 +130,44 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } - USER = :user - SYSTEM = :system - DEVELOPER = :developer + USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) end # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } - MESSAGE = :message + MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index 77bef808..0d59846c 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Content item used to generate a response. - class ResponseItem < OpenAI::Union - abstract! + module ResponseItem + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index bec1b93e..f4a81f11 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # An output message from the model. - class ResponseOutputItem < OpenAI::Union - abstract! + module ResponseOutputItem + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 17bf1ad0..63f0758f 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -44,11 +44,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + end def status=(_) end @@ -66,7 +69,7 @@ module OpenAI params( id: String, content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], - status: Symbol, + status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, role: Symbol, type: Symbol ) @@ -82,7 +85,7 @@ module OpenAI id: String, content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], role: Symbol, - status: Symbol, + status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, type: Symbol } ) @@ -91,8 +94,8 @@ module OpenAI end # A text output from the model. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -104,14 +107,16 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 13eb0a87..78b35143 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -98,8 +98,8 @@ module OpenAI end # A citation to a file. - class Annotation < OpenAI::Union - abstract! + module Annotation + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 66b69b58..aabd22e0 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -36,11 +36,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + end def status=(_) end @@ -50,7 +53,7 @@ module OpenAI params( id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], - status: Symbol, + status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) @@ -65,7 +68,7 @@ module OpenAI id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], type: Symbol, - status: Symbol + status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol } ) end @@ -102,14 +105,16 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index 8e817f0d..d2129c7d 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -9,17 +9,20 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) + .returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) + end def include=(_) end sig do params( - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -27,7 +30,15 @@ module OpenAI def self.new(include: nil, request_options: {}) end - sig { override.returns({include: T::Array[Symbol], request_options: OpenAI::RequestOptions}) } + sig do + override + .returns( + { + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions + } + ) + end def to_hash end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index f6a3f6ce..95b80ac1 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -5,15 +5,16 @@ module OpenAI module Responses # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - class ResponseStatus < OpenAI::Enum - abstract! + module ResponseStatus + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } - COMPLETED = :completed - FAILED = :failed - IN_PROGRESS = :in_progress - INCOMPLETE = :incomplete + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index ed1980bf..4c87665c 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Emitted when there is a partial audio response. - class ResponseStreamEvent < OpenAI::Union - abstract! + module ResponseStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index ed354df8..f7d64bc8 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -128,8 +128,8 @@ module OpenAI end # A citation to a file. - class Annotation < OpenAI::Union - abstract! + module Annotation + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 0085ad60..0d0c2a77 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -6,8 +6,8 @@ module OpenAI # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index c047abb7..a4acb23d 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -11,14 +11,15 @@ module OpenAI # more tools. # # `required` means the model must call one or more tools. - class ToolChoiceOptions < OpenAI::Enum - abstract! + module ToolChoiceOptions + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } - NONE = :none - AUTO = :auto - REQUIRED = :required + NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 8459293b..45b4ecfc 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -12,21 +12,24 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + end def type=(_) end # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - sig { params(type: Symbol).returns(T.attached_class) } + sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } def self.new(type:) end - sig { override.returns({type: Symbol}) } + sig { override.returns({type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol}) } def to_hash end @@ -38,15 +41,19 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } - FILE_SEARCH = :file_search - WEB_SEARCH_PREVIEW = :web_search_preview - COMPUTER_USE_PREVIEW = :computer_use_preview - WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 + FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + WEB_SEARCH_PREVIEW = + T.let(:web_search_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + COMPUTER_USE_PREVIEW = + T.let(:computer_use_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + WEB_SEARCH_PREVIEW_2025_03_11 = + T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index cf2fb2f6..a0ae2d7c 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -8,21 +8,27 @@ module OpenAI # # - `web_search_preview` # - `web_search_preview_2025_03_11` - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + .returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + end def type=(_) end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol)) } def search_context_size end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + .returns(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + end def search_context_size=(_) end @@ -42,8 +48,8 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). sig do params( - type: Symbol, - search_context_size: Symbol, + type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, + search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) ) .returns(T.attached_class) @@ -55,8 +61,8 @@ module OpenAI override .returns( { - type: Symbol, - search_context_size: Symbol, + type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, + search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) } ) @@ -68,25 +74,30 @@ module OpenAI # # - `web_search_preview` # - `web_search_preview_2025_03_11` - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } - WEB_SEARCH_PREVIEW = :web_search_preview - WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 + WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + WEB_SEARCH_PREVIEW_2025_03_11 = + T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum - abstract! + module SearchContextSize + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } - LOW = :low - MEDIUM = :medium - HIGH = :high + LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index ecad7412..ddb9e74a 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -2,20 +2,28 @@ module OpenAI module Models - class ResponsesModel < OpenAI::Union - abstract! + module ResponsesModel + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = + type_template(:out) do + { + fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + } + end - class UnionMember2 < OpenAI::Enum - abstract! + module UnionMember2 + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::UnionMember2) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol) } - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW = + T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) end end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 1da23f04..05b6e1e2 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -69,11 +69,14 @@ module OpenAI end # The status of the Upload. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Upload::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Upload::Status::TaggedSymbol) + .returns(OpenAI::Models::Upload::Status::TaggedSymbol) + end def status=(_) end @@ -95,7 +98,7 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: Symbol, + status: OpenAI::Models::Upload::Status::TaggedSymbol, file: T.nilable(OpenAI::Models::FileObject), object: Symbol ) @@ -115,7 +118,7 @@ module OpenAI filename: String, object: Symbol, purpose: String, - status: Symbol, + status: OpenAI::Models::Upload::Status::TaggedSymbol, file: T.nilable(OpenAI::Models::FileObject) } ) @@ -124,15 +127,16 @@ module OpenAI end # The status of the Upload. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Upload::Status::TaggedSymbol) } - PENDING = :pending - COMPLETED = :completed - CANCELLED = :cancelled - EXPIRED = :expired + PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Upload::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 22555f0e..93d701d3 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -40,11 +40,11 @@ module OpenAI # # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose end - sig { params(_: Symbol).returns(Symbol) } + sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose=(_) end @@ -53,7 +53,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -68,7 +68,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index e80d2488..188bfd85 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -73,11 +73,14 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStore::Status::TaggedSymbol) + .returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) + end def status=(_) end @@ -118,7 +121,7 @@ module OpenAI last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - status: Symbol, + status: OpenAI::Models::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, expires_after: OpenAI::Models::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer), @@ -152,7 +155,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - status: Symbol, + status: OpenAI::Models::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, expires_after: OpenAI::Models::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer) @@ -238,14 +241,15 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStore::Status::TaggedSymbol) } - EXPIRED = :expired - IN_PROGRESS = :in_progress - COMPLETED = :completed + EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) end class ExpiresAfter < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 5749ce8a..c2400193 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -42,11 +42,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStoreListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + .returns(OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + end def order=(_) end @@ -55,7 +58,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -70,7 +73,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -80,13 +83,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 029b5b83..1e785923 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -89,8 +89,8 @@ module OpenAI end # A query string for a search - class Query < OpenAI::Union - abstract! + module Query + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } @@ -98,19 +98,22 @@ module OpenAI end # A filter to apply based on file attributes. - class Filters < OpenAI::Union - abstract! + module Filters + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol)) } def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + .returns(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + end def ranker=(_) end @@ -123,21 +126,36 @@ module OpenAI end # Ranking options for search. - sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } + sig do + params( + ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, + score_threshold: Float + ) + .returns(T.attached_class) + end def self.new(ranker: nil, score_threshold: nil) end - sig { override.returns({ranker: Symbol, score_threshold: Float}) } + sig do + override + .returns( + {ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, score_threshold: Float} + ) + end def to_hash end - class Ranker < OpenAI::Enum - abstract! + module Ranker + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } - AUTO = :auto - DEFAULT_2024_11_15 = :"default-2024-11-15" + AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + DEFAULT_2024_11_15 = + T.let(:"default-2024-11-15", OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index a2443437..e4039a7c 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -86,8 +86,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end @@ -103,29 +103,40 @@ module OpenAI end # The type of content. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + .returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + end def type=(_) end - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + .returns(T.attached_class) + end def self.new(text:, type:) end - sig { override.returns({text: String, type: Symbol}) } + sig do + override + .returns({text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol}) + end def to_hash end # The type of content. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 4be0dcb6..f2dd4d99 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -98,8 +98,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index c7da2f41..22045ec2 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -40,11 +40,14 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol)) } def filter end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + end def filter=(_) end @@ -60,11 +63,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + end def order=(_) end @@ -73,9 +79,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -90,9 +96,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -101,26 +107,33 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum - abstract! + module Filter + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - FAILED = :failed - CANCELLED = :cancelled + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 57e35c52..d8ec4fba 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -98,8 +98,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 0f62a65d..20a4bce5 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -32,11 +32,14 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol)) } def filter end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + end def filter=(_) end @@ -52,11 +55,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + end def order=(_) end @@ -64,9 +70,9 @@ module OpenAI params( after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -80,9 +86,9 @@ module OpenAI { after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -91,26 +97,30 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum - abstract! + module Filter + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - FAILED = :failed - CANCELLED = :cancelled + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index e3693815..fe35965e 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -55,8 +55,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 4e1a4a36..fc8ccf5a 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -47,11 +47,14 @@ module OpenAI # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + .returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + end def status=(_) end @@ -121,7 +124,7 @@ module OpenAI id: String, created_at: Integer, last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), @@ -151,7 +154,7 @@ module OpenAI created_at: Integer, last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), object: Symbol, - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), @@ -164,11 +167,14 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + .returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + end def code=(_) end @@ -183,42 +189,61 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params( + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, + message: String + ) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig do + override + .returns( + {code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, message: String} + ) + end def to_hash end # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - SERVER_ERROR = :server_error - UNSUPPORTED_FILE = :unsupported_file - INVALID_FILE = :invalid_file + module Code + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + + SERVER_ERROR = + T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + UNSUPPORTED_FILE = + T.let(:unsupported_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + INVALID_FILE = + T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) end end # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - CANCELLED = :cancelled - FAILED = :failed + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 2e5ce798..4627b63f 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -45,11 +45,14 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + end def status=(_) end @@ -71,7 +74,7 @@ module OpenAI id: String, created_at: Integer, file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, vector_store_id: String, object: Symbol ) @@ -88,7 +91,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, object: Symbol, - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, vector_store_id: String } ) @@ -171,15 +174,18 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - CANCELLED = :cancelled - FAILED = :failed + module Status + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index e2e85216..c434cedb 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -8,10 +8,10 @@ module OpenAI sig do params( input: String, - model: T.any(String, Symbol), - voice: Symbol, + model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), + voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, instructions: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index a1340034..38f106b4 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -8,13 +8,13 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -70,13 +70,13 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 278e3855..baf563ad 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -8,9 +8,9 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index 10691913..1a12c440 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -6,8 +6,8 @@ module OpenAI # Creates and executes a batch from an uploaded file of requests sig do params( - completion_window: Symbol, - endpoint: Symbol, + completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 00d18547..a31361cb 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -7,12 +7,12 @@ module OpenAI # Create an assistant with a model and instructions. sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -129,9 +129,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -234,7 +234,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Assistant]) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 1479fd01..6db707e7 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -109,7 +109,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -121,7 +121,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -243,7 +248,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -255,7 +260,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 3ec7bab4..68fd1790 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -19,7 +19,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -104,7 +104,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index cd747220..f5d012a0 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -14,16 +14,16 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -33,7 +33,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -171,16 +176,16 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -190,7 +195,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -405,7 +415,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run]) diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 4833b5fb..5dfa9a8a 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -12,7 +12,7 @@ module OpenAI step_id: String, thread_id: String, run_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) @@ -44,9 +44,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index fff5ad69..8fc9878c 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -37,34 +37,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -295,34 +301,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -573,7 +585,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: Symbol, + order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion]) diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 3e7c16e2..0667ddbc 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -12,7 +12,7 @@ module OpenAI completion_id: String, after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 8e9e52d4..b894675b 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -6,7 +6,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, @@ -149,7 +149,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 025c37a7..65a19f1d 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -7,9 +7,9 @@ module OpenAI sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: Symbol, + encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 441f93e0..da0efe9d 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -27,7 +27,7 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::FileObject) @@ -64,7 +64,7 @@ module OpenAI params( after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index b3f7cc15..1c55189e 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -17,7 +17,7 @@ module OpenAI # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 396f2edd..1944e5f2 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -7,10 +7,10 @@ module OpenAI sig do params( image: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -47,10 +47,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -91,12 +91,12 @@ module OpenAI sig do params( prompt: String, - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: Symbol, - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), - style: T.nilable(Symbol), + quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 3b9b2bd0..b57441a5 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -12,7 +12,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::ModerationCreateResponse) diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 64ab5c27..701c8406 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -38,8 +38,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -49,7 +49,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -59,7 +63,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -209,8 +213,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -220,7 +224,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -230,7 +238,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -390,7 +398,7 @@ module OpenAI sig do params( response_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Responses::Response) diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 44e41eb2..7f92008d 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -11,7 +11,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 98a58dc5..6ee12473 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -31,7 +31,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Upload) diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 86418315..7e24de66 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -99,7 +99,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStore]) diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index ec4e8e7a..c8a0af8b 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -82,9 +82,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 2fc3ae57..88902be4 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -89,9 +89,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index ba62241d..ff01cbf3 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -62,28 +62,28 @@ module OpenAI ) -> ([true, top, nil] | [false, bool, Integer]) end - class Enum - extend OpenAI::Converter + module Enum + include OpenAI::Converter def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] private def self.finalize!: -> void - def self.===: (top other) -> bool + def ===: (top other) -> bool - def self.==: (top other) -> bool + def ==: (top other) -> bool - def self.coerce: (String | Symbol | top value) -> (Symbol | top) + def coerce: (String | Symbol | top value) -> (Symbol | top) - def self.dump: (Symbol | top value) -> (Symbol | top) + def dump: (Symbol | top value) -> (Symbol | top) - def self.try_strict_coerce: ( + def try_strict_coerce: ( top value ) -> ([true, top, nil] | [false, bool, Integer]) end - class Union - extend OpenAI::Converter + module Union + include OpenAI::Converter private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Converter::input)]] @@ -105,15 +105,15 @@ module OpenAI private def self.resolve_variant: (top value) -> OpenAI::Converter::input? - def self.===: (top other) -> bool + def ===: (top other) -> bool - def self.==: (top other) -> bool + def ==: (top other) -> bool - def self.coerce: (top value) -> top + def coerce: (top value) -> top - def self.dump: (top value) -> top + def dump: (top value) -> top - def self.try_strict_coerce: ( + def try_strict_coerce: ( top value ) -> ([true, top, nil] | [false, bool, Integer]) end diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 817c97a9..7427ba06 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -5,14 +5,18 @@ module OpenAI | OpenAI::Models::chat_model | OpenAI::Models::AllModels::union_member2 - class AllModels < OpenAI::Union + module AllModels + extend OpenAI::Union + type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index eb21a97f..30772347 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -50,7 +50,9 @@ module OpenAI type model = String | OpenAI::Models::Audio::speech_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::Audio::speech_model] end @@ -65,7 +67,9 @@ module OpenAI | :sage | :shimmer - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY: :alloy ASH: :ash CORAL: :coral @@ -81,7 +85,9 @@ module OpenAI type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + MP3: :mp3 OPUS: :opus AAC: :aac diff --git a/sig/openai/models/audio/speech_model.rbs b/sig/openai/models/audio/speech_model.rbs index 357eaa4c..51c913a4 100644 --- a/sig/openai/models/audio/speech_model.rbs +++ b/sig/openai/models/audio/speech_model.rbs @@ -3,7 +3,9 @@ module OpenAI module Audio type speech_model = :"tts-1" | :"tts-1-hd" | :"gpt-4o-mini-tts" - class SpeechModel < OpenAI::Enum + module SpeechModel + extend OpenAI::Enum + TTS_1: :"tts-1" TTS_1_HD: :"tts-1-hd" GPT_4O_MINI_TTS: :"gpt-4o-mini-tts" diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 28f79a92..3834d3cc 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -68,13 +68,17 @@ module OpenAI type model = String | OpenAI::Models::audio_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::audio_model] end type timestamp_granularity = :word | :segment - class TimestampGranularity < OpenAI::Enum + module TimestampGranularity + extend OpenAI::Enum + WORD: :word SEGMENT: :segment diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index f0179c81..32f15ab6 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -5,7 +5,9 @@ module OpenAI OpenAI::Models::Audio::Transcription | OpenAI::Models::Audio::TranscriptionVerbose - class TranscriptionCreateResponse < OpenAI::Union + module TranscriptionCreateResponse + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] end end diff --git a/sig/openai/models/audio/transcription_include.rbs b/sig/openai/models/audio/transcription_include.rbs index cf06a929..1fc83e72 100644 --- a/sig/openai/models/audio/transcription_include.rbs +++ b/sig/openai/models/audio/transcription_include.rbs @@ -3,7 +3,9 @@ module OpenAI module Audio type transcription_include = :logprobs - class TranscriptionInclude < OpenAI::Enum + module TranscriptionInclude + extend OpenAI::Enum + LOGPROBS: :logprobs def self.values: -> ::Array[OpenAI::Models::Audio::transcription_include] diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs index b9233feb..caffcd1f 100644 --- a/sig/openai/models/audio/transcription_stream_event.rbs +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -5,7 +5,9 @@ module OpenAI OpenAI::Models::Audio::TranscriptionTextDeltaEvent | OpenAI::Models::Audio::TranscriptionTextDoneEvent - class TranscriptionStreamEvent < OpenAI::Union + module TranscriptionStreamEvent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 83dc3322..d5dcc175 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -46,13 +46,17 @@ module OpenAI type model = String | OpenAI::Models::audio_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::audio_model] end type response_format = :json | :text | :srt | :verbose_json | :vtt - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + JSON: :json TEXT: :text SRT: :srt diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index d80690b9..6c26f34d 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -5,7 +5,9 @@ module OpenAI OpenAI::Models::Audio::Translation | OpenAI::Models::Audio::TranslationVerbose - class TranslationCreateResponse < OpenAI::Union + module TranslationCreateResponse + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] end end diff --git a/sig/openai/models/audio_model.rbs b/sig/openai/models/audio_model.rbs index 72b67344..7c47323a 100644 --- a/sig/openai/models/audio_model.rbs +++ b/sig/openai/models/audio_model.rbs @@ -3,7 +3,9 @@ module OpenAI type audio_model = :"whisper-1" | :"gpt-4o-transcribe" | :"gpt-4o-mini-transcribe" - class AudioModel < OpenAI::Enum + module AudioModel + extend OpenAI::Enum + WHISPER_1: :"whisper-1" GPT_4O_TRANSCRIBE: :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE: :"gpt-4o-mini-transcribe" diff --git a/sig/openai/models/audio_response_format.rbs b/sig/openai/models/audio_response_format.rbs index e91a52b8..ee7b583f 100644 --- a/sig/openai/models/audio_response_format.rbs +++ b/sig/openai/models/audio_response_format.rbs @@ -2,7 +2,9 @@ module OpenAI module Models type audio_response_format = :json | :text | :srt | :verbose_json | :vtt - class AudioResponseFormat < OpenAI::Enum + module AudioResponseFormat + extend OpenAI::Enum + JSON: :json TEXT: :text SRT: :srt diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 278b30f0..2a3d4888 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -128,7 +128,9 @@ module OpenAI | :cancelling | :cancelled - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING: :validating FAILED: :failed IN_PROGRESS: :in_progress diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 5a459418..c73264e6 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -33,7 +33,9 @@ module OpenAI type completion_window = :"24h" - class CompletionWindow < OpenAI::Enum + module CompletionWindow + extend OpenAI::Enum + NUMBER_24H: :"24h" def self.values: -> ::Array[OpenAI::Models::BatchCreateParams::completion_window] @@ -45,7 +47,9 @@ module OpenAI | :"/v1/embeddings" | :"/v1/completions" - class Endpoint < OpenAI::Enum + module Endpoint + extend OpenAI::Enum + V1_RESPONSES: :"/v1/responses" V1_CHAT_COMPLETIONS: :"/v1/chat/completions" V1_EMBEDDINGS: :"/v1/embeddings" diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index d4a0708e..9a108229 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -66,7 +66,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 89fbdc93..c8869670 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -44,7 +44,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index 158a3a03..d5b4a073 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -7,7 +7,9 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject | OpenAI::Models::ResponseFormatJSONSchema - class AssistantResponseFormatOption < OpenAI::Union + module AssistantResponseFormatOption + extend OpenAI::Union + def self.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 3cff7d58..4ed62507 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -27,7 +27,9 @@ module OpenAI | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete | OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - class AssistantStreamEvent < OpenAI::Union + module AssistantStreamEvent + extend OpenAI::Union + type thread_created = { data: OpenAI::Models::Beta::Thread, diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index 5421e7bc..48827d7f 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -6,7 +6,9 @@ module OpenAI | OpenAI::Models::Beta::FileSearchTool | OpenAI::Models::Beta::FunctionTool - class AssistantTool < OpenAI::Union + module AssistantTool + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index eabceb53..ab690c71 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -25,7 +25,9 @@ module OpenAI type type_ = :function | :code_interpreter | :file_search - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION: :function CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index f7886116..ee421612 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -5,10 +5,14 @@ module OpenAI OpenAI::Models::Beta::AssistantToolChoiceOption::auto | OpenAI::Models::Beta::AssistantToolChoice - class AssistantToolChoiceOption < OpenAI::Union + module AssistantToolChoiceOption + extend OpenAI::Union + type auto = :none | :auto | :required - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index e67f47e5..d3efa3c2 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -101,7 +101,9 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613" - class Model < OpenAI::Union + module Model + extend OpenAI::Union + O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index b042a2f7..bd1238f4 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -71,7 +71,9 @@ module OpenAI type ranker = :auto | :default_2024_08_21 - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index ad12cf61..edd61dcf 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete - class MessageStreamEvent < OpenAI::Union + module MessageStreamEvent + extend OpenAI::Union + type thread_message_created = { data: OpenAI::Models::Beta::Threads::Message, diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 51f78cb2..9207cecc 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -10,7 +10,9 @@ module OpenAI | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired - class RunStepStreamEvent < OpenAI::Union + module RunStepStreamEvent + extend OpenAI::Union + type thread_run_step_created = { data: OpenAI::Models::Beta::Threads::Runs::RunStep, diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 46793589..4081dbf8 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -13,7 +13,9 @@ module OpenAI | OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled | OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired - class RunStreamEvent < OpenAI::Union + module RunStreamEvent + extend OpenAI::Union + type thread_run_created = { data: OpenAI::Models::Beta::Threads::Run, diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index a7b4922f..5c9d3cd3 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -84,7 +84,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -144,7 +146,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -155,7 +159,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -190,7 +196,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -412,7 +420,9 @@ module OpenAI | OpenAI::Models::Beta::FileSearchTool | OpenAI::Models::Beta::FunctionTool - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end @@ -436,7 +446,9 @@ module OpenAI type type_ = :auto | :last_messages - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 5acf3300..fb2c4dc7 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -62,7 +62,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -73,7 +75,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -108,7 +112,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index 527a6e58..cde96d26 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -6,7 +6,9 @@ module OpenAI OpenAI::Models::Beta::Threads::FileCitationAnnotation | OpenAI::Models::Beta::Threads::FilePathAnnotation - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] end end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 21dee9df..22600103 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -6,7 +6,9 @@ module OpenAI OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation | OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - class AnnotationDelta < OpenAI::Union + module AnnotationDelta + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index ce6ca5d5..0b0987a7 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -26,7 +26,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index 3b996b60..d0870c96 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -28,7 +28,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 9aec42d8..36929cf8 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -26,7 +26,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index c7d07d96..3f1fcbdc 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -28,7 +28,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index e6c155a8..29fa0135 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -96,7 +96,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type assistant_tools_file_search_type_only = { type: :file_search } @@ -133,7 +135,9 @@ module OpenAI | :run_expired | :run_failed - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + CONTENT_FILTER: :content_filter MAX_TOKENS: :max_tokens RUN_CANCELLED: :run_cancelled @@ -146,7 +150,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -155,7 +161,9 @@ module OpenAI type status = :in_progress | :incomplete | :completed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress INCOMPLETE: :incomplete COMPLETED: :completed diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index 252bb7ff..dc2a9215 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::TextContentBlock | OpenAI::Models::Beta::Threads::RefusalContentBlock - class MessageContent < OpenAI::Union + module MessageContent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] end end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index aab10ba6..1357cfba 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::RefusalDeltaBlock | OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - class MessageContentDelta < OpenAI::Union + module MessageContentDelta + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] end end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 39228e33..76a88822 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -7,7 +7,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::ImageURLContentBlock | OpenAI::Models::Beta::Threads::TextContentBlockParam - class MessageContentPartParam < OpenAI::Union + module MessageContentPartParam + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index fb2276f9..3e6288ac 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -37,7 +37,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -48,7 +50,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -83,7 +87,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index e053ee9b..0487cbfd 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -30,7 +30,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index dca2c571..c8146f4e 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -51,7 +51,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index deae5170..e84c343f 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -140,7 +140,9 @@ module OpenAI type reason = :max_completion_tokens | :max_prompt_tokens - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens @@ -168,7 +170,9 @@ module OpenAI type code = :server_error | :rate_limit_exceeded | :invalid_prompt - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt @@ -231,7 +235,9 @@ module OpenAI type type_ = :auto | :last_messages - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index c9f5247b..5dfe9de6 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -121,7 +121,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -132,7 +134,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -167,7 +171,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -185,7 +191,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -209,7 +217,9 @@ module OpenAI type type_ = :auto | :last_messages - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index fc43edb9..54dd93bc 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -45,7 +45,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run_status.rbs b/sig/openai/models/beta/threads/run_status.rbs index cfc75c7b..d7433b66 100644 --- a/sig/openai/models/beta/threads/run_status.rbs +++ b/sig/openai/models/beta/threads/run_status.rbs @@ -13,7 +13,9 @@ module OpenAI | :incomplete | :expired - class RunStatus < OpenAI::Enum + module RunStatus + extend OpenAI::Enum + QUEUED: :queued IN_PROGRESS: :in_progress REQUIRES_ACTION: :requires_action diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index 67d18341..ddc03a7d 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -47,7 +47,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - class Output < OpenAI::Union + module Output + extend OpenAI::Union + type logs = { logs: String, type: :logs } class Logs < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index d8884223..81324a83 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -63,7 +63,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - class Output < OpenAI::Union + module Output + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 5b6c9102..8952fadb 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -71,7 +71,9 @@ module OpenAI type ranker = :auto | :default_2024_08_21 - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 @@ -135,7 +137,9 @@ module OpenAI type type_ = :text - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 4216d33b..36dd9a60 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStep = Runs::RunStep module Runs @@ -100,7 +99,9 @@ module OpenAI type code = :server_error | :rate_limit_exceeded - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded @@ -111,7 +112,9 @@ module OpenAI type status = :in_progress | :cancelled | :failed | :completed | :expired - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress CANCELLED: :cancelled FAILED: :failed @@ -125,13 +128,17 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails | OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] end type type_ = :message_creation | :tool_calls - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 7f45537b..171b4303 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStepDelta = Runs::RunStepDelta module Runs @@ -28,7 +27,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta | OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index f3f75aab..295594a8 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStepDeltaEvent = Runs::RunStepDeltaEvent module Runs diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index 5dd59c8c..d1daf15e 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStepDeltaMessageDelta = Runs::RunStepDeltaMessageDelta module Runs diff --git a/sig/openai/models/beta/threads/runs/run_step_include.rbs b/sig/openai/models/beta/threads/runs/run_step_include.rbs index be00b41f..ed1e3934 100644 --- a/sig/openai/models/beta/threads/runs/run_step_include.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_include.rbs @@ -2,14 +2,15 @@ module OpenAI module Models module Beta module Threads - - class RunStepInclude = Runs::RunStepInclude + module RunStepInclude = Runs::RunStepInclude module Runs type run_step_include = :"step_details.tool_calls[*].file_search.results[*].content" - class RunStepInclude < OpenAI::Enum + module RunStepInclude + extend OpenAI::Enum + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT: :"step_details.tool_calls[*].file_search.results[*].content" def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index bd65efbf..641b3530 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -58,7 +58,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index 42300b3c..081dbbdf 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall | OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - class ToolCall < OpenAI::Union + module ToolCall + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index 6c3c0ec7..472aee5f 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta | OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - class ToolCallDelta < OpenAI::Union + module ToolCallDelta + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] end end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index 6c256523..ed15fbfe 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletion = Chat::ChatCompletion module Chat @@ -81,7 +80,9 @@ module OpenAI type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP: :stop LENGTH: :length TOOL_CALLS: :tool_calls @@ -113,7 +114,9 @@ module OpenAI type service_tier = :scale | :default - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index fbe3c3f8..e05d98b5 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam module Chat @@ -62,7 +61,9 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type array_of_content_part_array = ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] @@ -72,7 +73,9 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionContentPartText | OpenAI::Models::Chat::ChatCompletionContentPartRefusal - class ArrayOfContentPart < OpenAI::Union + module ArrayOfContentPart + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index 856fe6c8..a3b3cda4 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionAudio = Chat::ChatCompletionAudio module Chat diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index d90d22b0..5f2424d0 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionAudioParam = Chat::ChatCompletionAudioParam module Chat @@ -24,7 +23,9 @@ module OpenAI type format_ = :wav | :mp3 | :flac | :opus | :pcm16 - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV: :wav MP3: :mp3 FLAC: :flac @@ -37,7 +38,9 @@ module OpenAI type voice = :alloy | :ash | :ballad | :coral | :echo | :sage | :shimmer | :verse - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY: :alloy ASH: :ash BALLAD: :ballad diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index fa2494d9..c85a596e 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionChunk = Chat::ChatCompletionChunk module Chat @@ -134,7 +133,9 @@ module OpenAI type role = :developer | :system | :user | :assistant | :tool - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + DEVELOPER: :developer SYSTEM: :system USER: :user @@ -198,7 +199,9 @@ module OpenAI type type_ = :function - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION: :function def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] @@ -209,7 +212,9 @@ module OpenAI type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP: :stop LENGTH: :length TOOL_CALLS: :tool_calls @@ -241,7 +246,9 @@ module OpenAI type service_tier = :scale | :default - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 01231844..348780aa 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -1,7 +1,6 @@ module OpenAI module Models - - class ChatCompletionContentPart = Chat::ChatCompletionContentPart + module ChatCompletionContentPart = Chat::ChatCompletionContentPart module Chat type chat_completion_content_part = @@ -10,7 +9,9 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionContentPartInputAudio | OpenAI::Models::Chat::ChatCompletionContentPart::File - class ChatCompletionContentPart < OpenAI::Union + module ChatCompletionContentPart + extend OpenAI::Union + type file = { file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index b80ee986..1bde5081 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartImage = Chat::ChatCompletionContentPartImage module Chat @@ -46,7 +45,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index bb409774..85902db7 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio module Chat @@ -42,7 +41,9 @@ module OpenAI type format_ = :wav | :mp3 - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV: :wav MP3: :mp3 diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index e69c1ee3..e715e480 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartRefusal = Chat::ChatCompletionContentPartRefusal module Chat diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 9c723c66..638f5e4e 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartText = Chat::ChatCompletionContentPartText module Chat diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index 2d8a2cf3..e776fc71 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionDeleted = Chat::ChatCompletionDeleted module Chat diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index e9cbba0a..aa1379e0 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam module Chat @@ -31,7 +30,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index b5e722fb..e7067a87 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionFunctionCallOption = Chat::ChatCompletionFunctionCallOption module Chat diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index b9ac7154..fad91c63 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam module Chat diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index b13a6e43..d3668749 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionMessage = Chat::ChatCompletionMessage module Chat diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index a42d88ca..c8d7612a 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -1,7 +1,6 @@ module OpenAI module Models - - class ChatCompletionMessageParam = Chat::ChatCompletionMessageParam + module ChatCompletionMessageParam = Chat::ChatCompletionMessageParam module Chat type chat_completion_message_param = @@ -12,7 +11,9 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionToolMessageParam | OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - class ChatCompletionMessageParam < OpenAI::Union + module ChatCompletionMessageParam + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] end end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index 4ed4b2d2..cd147e2d 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall module Chat diff --git a/sig/openai/models/chat/chat_completion_modality.rbs b/sig/openai/models/chat/chat_completion_modality.rbs index e8df5962..7075d723 100644 --- a/sig/openai/models/chat/chat_completion_modality.rbs +++ b/sig/openai/models/chat/chat_completion_modality.rbs @@ -1,12 +1,13 @@ module OpenAI module Models - - class ChatCompletionModality = Chat::ChatCompletionModality + module ChatCompletionModality = Chat::ChatCompletionModality module Chat type chat_completion_modality = :text | :audio - class ChatCompletionModality < OpenAI::Enum + module ChatCompletionModality + extend OpenAI::Enum + TEXT: :text AUDIO: :audio diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 8da9abd2..62feb1bf 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionNamedToolChoice = Chat::ChatCompletionNamedToolChoice module Chat diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index e38d1c31..f49bc614 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionPredictionContent = Chat::ChatCompletionPredictionContent module Chat @@ -25,7 +24,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_reasoning_effort.rbs b/sig/openai/models/chat/chat_completion_reasoning_effort.rbs index 0d91a009..28d9e504 100644 --- a/sig/openai/models/chat/chat_completion_reasoning_effort.rbs +++ b/sig/openai/models/chat/chat_completion_reasoning_effort.rbs @@ -1,11 +1,9 @@ module OpenAI module Models - - class ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort + module ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort module Chat - - class ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort + module ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort end end end diff --git a/sig/openai/models/chat/chat_completion_role.rbs b/sig/openai/models/chat/chat_completion_role.rbs index be395c69..d805ec9e 100644 --- a/sig/openai/models/chat/chat_completion_role.rbs +++ b/sig/openai/models/chat/chat_completion_role.rbs @@ -1,13 +1,14 @@ module OpenAI module Models - - class ChatCompletionRole = Chat::ChatCompletionRole + module ChatCompletionRole = Chat::ChatCompletionRole module Chat type chat_completion_role = :developer | :system | :user | :assistant | :tool | :function - class ChatCompletionRole < OpenAI::Enum + module ChatCompletionRole + extend OpenAI::Enum + DEVELOPER: :developer SYSTEM: :system USER: :user diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 2719aa2e..75e77b6c 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage module Chat diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index ed0721bc..7fed3536 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions module Chat diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index b3ae49f9..f889a520 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionSystemMessageParam = Chat::ChatCompletionSystemMessageParam module Chat @@ -31,7 +30,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index 4695ba6a..a076afef 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionTokenLogprob = Chat::ChatCompletionTokenLogprob module Chat diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index f690d128..d465043d 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionTool = Chat::ChatCompletionTool module Chat diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index e6c246ee..001520b8 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -1,17 +1,20 @@ module OpenAI module Models - - class ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption + module ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption module Chat type chat_completion_tool_choice_option = OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto | OpenAI::Models::Chat::ChatCompletionNamedToolChoice - class ChatCompletionToolChoiceOption < OpenAI::Union + module ChatCompletionToolChoiceOption + extend OpenAI::Union + type auto = :none | :auto | :required - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 86a425de..8dc39541 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionToolMessageParam = Chat::ChatCompletionToolMessageParam module Chat @@ -29,7 +28,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 53192647..b359b18e 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionUserMessageParam = Chat::ChatCompletionUserMessageParam module Chat @@ -31,7 +30,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_array = ::Array[OpenAI::Models::Chat::chat_completion_content_part] diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 56982b45..9d777b19 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -166,7 +166,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -174,10 +176,14 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode | OpenAI::Models::Chat::ChatCompletionFunctionCallOption - class FunctionCall < OpenAI::Union + module FunctionCall + extend OpenAI::Union + type function_call_mode = :none | :auto - class FunctionCallMode < OpenAI::Enum + module FunctionCallMode + extend OpenAI::Enum + NONE: :none AUTO: :auto @@ -218,7 +224,9 @@ module OpenAI type modality = :text | :audio - class Modality < OpenAI::Enum + module Modality + extend OpenAI::Enum + TEXT: :text AUDIO: :audio @@ -230,13 +238,17 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONSchema | OpenAI::Models::ResponseFormatJSONObject - class ResponseFormat < OpenAI::Union + module ResponseFormat + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end type service_tier = :auto | :default - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + AUTO: :auto DEFAULT: :default @@ -245,7 +257,9 @@ module OpenAI type stop = (String | ::Array[String])? - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -277,7 +291,9 @@ module OpenAI type search_context_size = :low | :medium | :high - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 9540d790..bb536c64 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -48,7 +48,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 88456945..c066a38c 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -39,7 +39,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 75caff71..1578b25d 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -45,7 +45,9 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613" - class ChatModel < OpenAI::Enum + module ChatModel + extend OpenAI::Enum + O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index a3873e2d..20f33540 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -24,7 +24,9 @@ module OpenAI type type_ = :eq | :ne | :gt | :gte | :lt | :lte - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + EQ: :eq NE: :ne GT: :gt @@ -37,7 +39,9 @@ module OpenAI type value = String | Float | bool - class Value < OpenAI::Union + module Value + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index 05e3ac68..700baf43 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -28,7 +28,9 @@ module OpenAI type finish_reason = :stop | :length | :content_filter - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP: :stop LENGTH: :length CONTENT_FILTER: :content_filter diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index fd2677d3..358e6503 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -88,7 +88,9 @@ module OpenAI type model = String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" - class Model < OpenAI::Union + module Model + extend OpenAI::Union + GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" DAVINCI_002: :"davinci-002" BABBAGE_002: :"babbage-002" @@ -101,7 +103,9 @@ module OpenAI type prompt = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] - class Prompt < OpenAI::Union + module Prompt + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -119,7 +123,9 @@ module OpenAI type stop = (String | ::Array[String])? - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 35aba042..80a9c41e 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -20,13 +20,17 @@ module OpenAI type filter = OpenAI::Models::ComparisonFilter | top - class Filter < OpenAI::Union + module Filter + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ComparisonFilter, top] end type type_ = :and | :or - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AND: :and OR: :or diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 59ae7a9f..d08604d9 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -46,7 +46,9 @@ module OpenAI type input = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] - class Input < OpenAI::Union + module Input + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -64,13 +66,17 @@ module OpenAI type model = String | OpenAI::Models::embedding_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::embedding_model] end type encoding_format = :float | :base64 - class EncodingFormat < OpenAI::Enum + module EncodingFormat + extend OpenAI::Enum + FLOAT: :float BASE64: :base64 diff --git a/sig/openai/models/embedding_model.rbs b/sig/openai/models/embedding_model.rbs index c334a8a5..ed029cea 100644 --- a/sig/openai/models/embedding_model.rbs +++ b/sig/openai/models/embedding_model.rbs @@ -5,7 +5,9 @@ module OpenAI | :"text-embedding-3-small" | :"text-embedding-3-large" - class EmbeddingModel < OpenAI::Enum + module EmbeddingModel + extend OpenAI::Enum + TEXT_EMBEDDING_ADA_002: :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL: :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE: :"text-embedding-3-large" diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index d287b675..5efd7f51 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -4,7 +4,9 @@ module OpenAI OpenAI::Models::StaticFileChunkingStrategyObject | OpenAI::Models::OtherFileChunkingStrategyObject - class FileChunkingStrategy < OpenAI::Union + module FileChunkingStrategy + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] end end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index f5f9f28a..818b7c90 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -4,7 +4,9 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam | OpenAI::Models::StaticFileChunkingStrategyObjectParam - class FileChunkingStrategyParam < OpenAI::Union + module FileChunkingStrategyParam + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 2f36b51c..9d793064 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -43,7 +43,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index f229d68d..3bb22b13 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -59,7 +59,9 @@ module OpenAI | :"fine-tune-results" | :vision - class Purpose < OpenAI::Enum + module Purpose + extend OpenAI::Enum + ASSISTANTS: :assistants ASSISTANTS_OUTPUT: :assistants_output BATCH: :batch @@ -73,7 +75,9 @@ module OpenAI type status = :uploaded | :processed | :error - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + UPLOADED: :uploaded PROCESSED: :processed ERROR: :error diff --git a/sig/openai/models/file_purpose.rbs b/sig/openai/models/file_purpose.rbs index cf532f5b..527e5d11 100644 --- a/sig/openai/models/file_purpose.rbs +++ b/sig/openai/models/file_purpose.rbs @@ -3,7 +3,9 @@ module OpenAI type file_purpose = :assistants | :batch | :"fine-tune" | :vision | :user_data | :evals - class FilePurpose < OpenAI::Enum + module FilePurpose + extend OpenAI::Enum + ASSISTANTS: :assistants BATCH: :batch FINE_TUNE: :"fine-tune" diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 3b4f5915..b696b25a 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJob = FineTuning::FineTuningJob module FineTuning @@ -148,19 +147,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -173,7 +178,9 @@ module OpenAI | :failed | :cancelled - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING_FILES: :validating_files QUEUED: :queued RUNNING: :running @@ -280,25 +287,33 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type beta = :auto | Float - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -358,19 +373,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -378,7 +399,9 @@ module OpenAI type type_ = :supervised | :dpo - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index 239f0331..e70febc9 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJobEvent = FineTuning::FineTuningJobEvent module FineTuning @@ -50,7 +49,9 @@ module OpenAI type level = :info | :warn | :error - class Level < OpenAI::Enum + module Level + extend OpenAI::Enum + INFO: :info WARN: :warn ERROR: :error @@ -60,7 +61,9 @@ module OpenAI type type_ = :message | :metrics - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message METRICS: :metrics diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs index 55ed40a2..ada2b1f0 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs @@ -1,10 +1,8 @@ module OpenAI module Models - class FineTuningJobIntegration = FineTuning::FineTuningJobIntegration module FineTuning - class FineTuningJobIntegration = OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index bc0a81a5..f5fc5cec 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJobWandbIntegration = FineTuning::FineTuningJobWandbIntegration module FineTuning diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index 18b23c37..e156cf05 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject module FineTuning diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 8942f62f..913d7cd3 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -67,7 +67,9 @@ module OpenAI | :"gpt-3.5-turbo" | :"gpt-4o-mini" - class Model < OpenAI::Union + module Model + extend OpenAI::Union + BABBAGE_002: :"babbage-002" DAVINCI_002: :"davinci-002" GPT_3_5_TURBO: :"gpt-3.5-turbo" @@ -115,19 +117,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -276,25 +284,33 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type beta = :auto | Float - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -354,19 +370,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -374,7 +396,9 @@ module OpenAI type type_ = :supervised | :dpo - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 7770fc34..1460783f 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -43,13 +43,17 @@ module OpenAI type model = String | OpenAI::Models::image_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL: :url B64_JSON: :b64_json @@ -58,7 +62,9 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 63e0fbf0..cebbb406 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -53,13 +53,17 @@ module OpenAI type model = String | OpenAI::Models::image_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL: :url B64_JSON: :b64_json @@ -68,7 +72,9 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index 1a3f9162..ea843f0f 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -55,13 +55,17 @@ module OpenAI type model = String | OpenAI::Models::image_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::image_model] end type quality = :standard | :hd - class Quality < OpenAI::Enum + module Quality + extend OpenAI::Enum + STANDARD: :standard HD: :hd @@ -70,7 +74,9 @@ module OpenAI type response_format = :url | :b64_json - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL: :url B64_JSON: :b64_json @@ -80,7 +86,9 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" | :"1792x1024" | :"1024x1792" - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" @@ -92,7 +100,9 @@ module OpenAI type style = :vivid | :natural - class Style < OpenAI::Enum + module Style + extend OpenAI::Enum + VIVID: :vivid NATURAL: :natural diff --git a/sig/openai/models/image_model.rbs b/sig/openai/models/image_model.rbs index 1151fd9b..e68710d9 100644 --- a/sig/openai/models/image_model.rbs +++ b/sig/openai/models/image_model.rbs @@ -2,7 +2,9 @@ module OpenAI module Models type image_model = :"dall-e-2" | :"dall-e-3" - class ImageModel < OpenAI::Enum + module ImageModel + extend OpenAI::Enum + DALL_E_2: :"dall-e-2" DALL_E_3: :"dall-e-3" diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index d35f2749..9e7a8041 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -153,7 +153,9 @@ module OpenAI type harassment = :text - class Harassment < OpenAI::Enum + module Harassment + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] @@ -161,7 +163,9 @@ module OpenAI type harassment_threatening = :text - class HarassmentThreatening < OpenAI::Enum + module HarassmentThreatening + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] @@ -169,7 +173,9 @@ module OpenAI type hate = :text - class Hate < OpenAI::Enum + module Hate + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] @@ -177,7 +183,9 @@ module OpenAI type hate_threatening = :text - class HateThreatening < OpenAI::Enum + module HateThreatening + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] @@ -185,7 +193,9 @@ module OpenAI type illicit = :text - class Illicit < OpenAI::Enum + module Illicit + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] @@ -193,7 +203,9 @@ module OpenAI type illicit_violent = :text - class IllicitViolent < OpenAI::Enum + module IllicitViolent + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] @@ -201,7 +213,9 @@ module OpenAI type self_harm = :text | :image - class SelfHarm < OpenAI::Enum + module SelfHarm + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -210,7 +224,9 @@ module OpenAI type self_harm_instruction = :text | :image - class SelfHarmInstruction < OpenAI::Enum + module SelfHarmInstruction + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -219,7 +235,9 @@ module OpenAI type self_harm_intent = :text | :image - class SelfHarmIntent < OpenAI::Enum + module SelfHarmIntent + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -228,7 +246,9 @@ module OpenAI type sexual = :text | :image - class Sexual < OpenAI::Enum + module Sexual + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -237,7 +257,9 @@ module OpenAI type sexual_minor = :text - class SexualMinor < OpenAI::Enum + module SexualMinor + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] @@ -245,7 +267,9 @@ module OpenAI type violence = :text | :image - class Violence < OpenAI::Enum + module Violence + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -254,7 +278,9 @@ module OpenAI type violence_graphic = :text | :image - class ViolenceGraphic < OpenAI::Enum + module ViolenceGraphic + extend OpenAI::Enum + TEXT: :text IMAGE: :image diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index c95e6d3e..41fec7ce 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -32,7 +32,9 @@ module OpenAI | ::Array[String] | ::Array[OpenAI::Models::moderation_multi_modal_input] - class Input < OpenAI::Union + module Input + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -47,7 +49,9 @@ module OpenAI type model = String | OpenAI::Models::moderation_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::moderation_model] end end diff --git a/sig/openai/models/moderation_model.rbs b/sig/openai/models/moderation_model.rbs index 6fb884f3..fa7264f2 100644 --- a/sig/openai/models/moderation_model.rbs +++ b/sig/openai/models/moderation_model.rbs @@ -6,7 +6,9 @@ module OpenAI | :"text-moderation-latest" | :"text-moderation-stable" - class ModerationModel < OpenAI::Enum + module ModerationModel + extend OpenAI::Enum + OMNI_MODERATION_LATEST: :"omni-moderation-latest" OMNI_MODERATION_2024_09_26: :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST: :"text-moderation-latest" diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index c98cd3a4..9388bae0 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -4,7 +4,9 @@ module OpenAI OpenAI::Models::ModerationImageURLInput | OpenAI::Models::ModerationTextInput - class ModerationMultiModalInput < OpenAI::Union + module ModerationMultiModalInput + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index a4184fb6..8452d648 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -20,7 +20,9 @@ module OpenAI type generate_summary = :concise | :detailed - class GenerateSummary < OpenAI::Enum + module GenerateSummary + extend OpenAI::Enum + CONCISE: :concise DETAILED: :detailed diff --git a/sig/openai/models/reasoning_effort.rbs b/sig/openai/models/reasoning_effort.rbs index 57327554..27d712d0 100644 --- a/sig/openai/models/reasoning_effort.rbs +++ b/sig/openai/models/reasoning_effort.rbs @@ -2,7 +2,9 @@ module OpenAI module Models type reasoning_effort = :low | :medium | :high - class ReasoningEffort < OpenAI::Enum + module ReasoningEffort + extend OpenAI::Enum + LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index dbfd8278..944fbd51 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -29,7 +29,9 @@ module OpenAI type environment = :mac | :windows | :ubuntu | :browser - class Environment < OpenAI::Enum + module Environment + extend OpenAI::Enum + MAC: :mac WINDOWS: :windows UBUNTU: :ubuntu diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 8e5bc808..10931508 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -31,13 +31,17 @@ module OpenAI String | OpenAI::Models::Responses::response_input_message_content_list - class Content < OpenAI::Union + module Content + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] end type role = :user | :assistant | :system | :developer - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant SYSTEM: :system @@ -48,7 +52,9 @@ module OpenAI type type_ = :message - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 8c714006..fbea9c27 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -44,7 +44,9 @@ module OpenAI type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -74,7 +76,9 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 2971462e..633f4ef4 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -44,7 +44,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 39953b70..21f27944 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -133,7 +133,9 @@ module OpenAI type reason = :max_output_tokens | :content_filter - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter @@ -146,13 +148,17 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceTypes | OpenAI::Models::Responses::ToolChoiceFunction - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 1201cdfe..3d2f285b 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -35,7 +35,9 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs | OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - class Result < OpenAI::Union + module Result + extend OpenAI::Union + type logs = { logs: String, type: :logs } class Logs < OpenAI::BaseModel @@ -84,7 +86,9 @@ module OpenAI type status = :in_progress | :interpreting | :completed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress INTERPRETING: :interpreting COMPLETED: :completed diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 69f3d3e4..e6b8fd9e 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -46,7 +46,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - class Action < OpenAI::Union + module Action + extend OpenAI::Union + type click = { button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, @@ -75,7 +77,9 @@ module OpenAI type button = :left | :right | :wheel | :back | :forward - class Button < OpenAI::Enum + module Button + extend OpenAI::Enum + LEFT: :left RIGHT: :right WHEEL: :wheel @@ -244,7 +248,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -254,7 +260,9 @@ module OpenAI type type_ = :computer_call - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + COMPUTER_CALL: :computer_call def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 0e737186..c4a48c1d 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -60,7 +60,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index c7bb377e..ebfaa85b 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class ResponseContent < OpenAI::Union + module ResponseContent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index b221cdc0..2f4c1e05 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -35,7 +35,9 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class Part < OpenAI::Union + module Part + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index f7c71025..792dd89a 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -35,7 +35,9 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class Part < OpenAI::Union + module Part + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 697640de..7336a20e 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -100,7 +100,9 @@ module OpenAI type input = String | OpenAI::Models::Responses::response_input - class Input < OpenAI::Union + module Input + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::Responses::response_input] end @@ -109,13 +111,17 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceTypes | OpenAI::Models::Responses::ToolChoiceFunction - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index 5d705a42..c9461327 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -39,7 +39,9 @@ module OpenAI | :failed_to_download_image | :image_file_not_found - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 8bcaf1dc..102ce635 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -34,7 +34,9 @@ module OpenAI type status = :in_progress | :searching | :completed | :incomplete | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress SEARCHING: :searching COMPLETED: :completed @@ -84,7 +86,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index 7a38cb8e..ffd41786 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -6,7 +6,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig | OpenAI::Models::ResponseFormatJSONObject - class ResponseFormatTextConfig < OpenAI::Union + module ResponseFormatTextConfig + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index acd89dee..a726e572 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -43,7 +43,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 3265f83f..eea6788a 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -37,7 +37,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 7da9b40d..301b596c 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -25,7 +25,9 @@ module OpenAI type status = :in_progress | :searching | :completed | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress SEARCHING: :searching COMPLETED: :completed diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index b2a08af5..df76bd92 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -6,7 +6,9 @@ module OpenAI | :"message.input_image.image_url" | :"computer_call_output.output.image_url" - class ResponseIncludable < OpenAI::Enum + module ResponseIncludable + extend OpenAI::Enum + FILE_SEARCH_CALL_RESULTS: :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index b41c744b..e015e2d8 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -25,7 +25,9 @@ module OpenAI type format_ = :mp3 | :wav - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + MP3: :mp3 WAV: :wav diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index 004cfa5c..6f2fe81c 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -6,7 +6,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseInputImage | OpenAI::Models::Responses::ResponseInputFile - class ResponseInputContent < OpenAI::Union + module ResponseInputContent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index 24abed14..fd361d9a 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -29,7 +29,9 @@ module OpenAI type detail = :high | :low | :auto - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + HIGH: :high LOW: :low AUTO: :auto diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 869b143f..20caf979 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -14,7 +14,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseReasoningItem | OpenAI::Models::Responses::ResponseInputItem::ItemReference - class ResponseInputItem < OpenAI::Union + module ResponseInputItem + extend OpenAI::Union + type message = { content: OpenAI::Models::Responses::response_input_message_content_list, @@ -51,7 +53,9 @@ module OpenAI type role = :user | :system | :developer - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user SYSTEM: :system DEVELOPER: :developer @@ -61,7 +65,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -71,7 +77,9 @@ module OpenAI type type_ = :message - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] @@ -139,7 +147,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -186,7 +196,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index 45e1f023..2671fd84 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -41,7 +41,9 @@ module OpenAI type role = :user | :system | :developer - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user SYSTEM: :system DEVELOPER: :developer @@ -51,7 +53,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -61,7 +65,9 @@ module OpenAI type type_ = :message - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index 8fc80907..8d2e807d 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -11,7 +11,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseFunctionToolCallItem | OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - class ResponseItem < OpenAI::Union + module ResponseItem + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] end end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index 37f78a23..dbed0410 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ResponseItemList = Responses::ResponseItemList module Responses diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index e01fe1b2..9868493b 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -9,7 +9,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall | OpenAI::Models::Responses::ResponseReasoningItem - class ResponseOutputItem < OpenAI::Union + module ResponseOutputItem + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index b003219c..81eebadd 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -35,13 +35,17 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class Content < OpenAI::Union + module Content + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index a40aee39..3d499c72 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -28,7 +28,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation | OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + type file_citation = { file_id: String, index: Integer, type: :file_citation } diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 23d2dd02..17d8480f 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -45,7 +45,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index 17349a0c..3ecc2fc1 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -3,7 +3,9 @@ module OpenAI module Responses type response_status = :completed | :failed | :in_progress | :incomplete - class ResponseStatus < OpenAI::Enum + module ResponseStatus + extend OpenAI::Enum + COMPLETED: :completed FAILED: :failed IN_PROGRESS: :in_progress diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 0d48dfd6..8642b35b 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -35,7 +35,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent | OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - class ResponseStreamEvent < OpenAI::Union + module ResponseStreamEvent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index d01375b4..17d56582 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -40,7 +40,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + type file_citation = { file_id: String, index: Integer, type: :file_citation } diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 642f7196..7cf475b8 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -7,7 +7,9 @@ module OpenAI | OpenAI::Models::Responses::ComputerTool | OpenAI::Models::Responses::WebSearchTool - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] end end diff --git a/sig/openai/models/responses/tool_choice_options.rbs b/sig/openai/models/responses/tool_choice_options.rbs index e902ea2c..412031bf 100644 --- a/sig/openai/models/responses/tool_choice_options.rbs +++ b/sig/openai/models/responses/tool_choice_options.rbs @@ -3,7 +3,9 @@ module OpenAI module Responses type tool_choice_options = :none | :auto | :required - class ToolChoiceOptions < OpenAI::Enum + module ToolChoiceOptions + extend OpenAI::Enum + NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index ea1e958d..73f15517 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -19,7 +19,9 @@ module OpenAI | :computer_use_preview | :web_search_preview_2025_03_11 - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FILE_SEARCH: :file_search WEB_SEARCH_PREVIEW: :web_search_preview COMPUTER_USE_PREVIEW: :computer_use_preview diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index abb3f9a1..20a3b337 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -29,7 +29,9 @@ module OpenAI type type_ = :web_search_preview | :web_search_preview_2025_03_11 - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 @@ -38,7 +40,9 @@ module OpenAI type search_context_size = :low | :medium | :high - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 582ad254..3bfeacd2 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -5,14 +5,18 @@ module OpenAI | OpenAI::Models::chat_model | OpenAI::Models::ResponsesModel::union_member2 - class ResponsesModel < OpenAI::Union + module ResponsesModel + extend OpenAI::Union + type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index eead4b34..a5baefff 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -48,7 +48,9 @@ module OpenAI type status = :pending | :completed | :cancelled | :expired - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + PENDING: :pending COMPLETED: :completed CANCELLED: :cancelled diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 94f8bb7b..2a1f9e38 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class UploadPart = Uploads::UploadPart module Uploads diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 2f4e439b..ba9a3850 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -91,7 +91,9 @@ module OpenAI type status = :expired | :in_progress | :completed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + EXPIRED: :expired IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index b2ea6f76..b9d2ab90 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -43,7 +43,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 0918e4b6..17e51e48 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -49,7 +49,9 @@ module OpenAI type query = String | ::Array[String] - class Query < OpenAI::Union + module Query + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -60,7 +62,9 @@ module OpenAI type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -90,7 +94,9 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index ae59192d..497e9d9d 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -32,7 +32,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end @@ -56,7 +58,9 @@ module OpenAI type type_ = :text - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::VectorStoreSearchResponse::Content::type_] diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 80029298..78f70166 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -34,7 +34,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index e642c6f9..d5bdb26b 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -56,7 +56,9 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed FAILED: :failed @@ -67,7 +69,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index ac3e0e2c..8ecec280 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -34,7 +34,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 0cc01f91..362141a4 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -52,7 +52,9 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed FAILED: :failed @@ -63,7 +65,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 6b2b7029..b85d9f01 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -26,7 +26,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 79940901..79b4b73c 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class VectorStoreFile = VectorStores::VectorStoreFile module VectorStores @@ -74,7 +73,9 @@ module OpenAI type code = :server_error | :unsupported_file | :invalid_file - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file @@ -85,7 +86,9 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed CANCELLED: :cancelled @@ -96,7 +99,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index 497f5dc3..cc689671 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class VectorStoreFileBatch = VectorStores::VectorStoreFileBatch module VectorStores @@ -71,7 +70,9 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed CANCELLED: :cancelled diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index dde63be2..19257a53 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class VectorStoreFileDeleted = VectorStores::VectorStoreFileDeleted module VectorStores diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index 1a3c623e..bb5fb2a6 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -3,7 +3,9 @@ require_relative "test_helper" class OpenAI::Test::BaseModelTest < Minitest::Test - class E1 < OpenAI::Enum + module E1 + extend OpenAI::Enum + A = :a B = :b end @@ -242,13 +244,17 @@ class M3 < M1 optional :b, E1, api_name: :renamed_again end - class U1 < OpenAI::Union + module U1 + extend OpenAI::Union + discriminator :type variant :a, M1 variant :b, M3 end - class U2 < OpenAI::Union + module U2 + extend OpenAI::Union + variant A1 variant A3 end @@ -330,12 +336,16 @@ def test_basic_const end end - class E2 < OpenAI::Enum + module E2 + extend OpenAI::Enum + A = :a B = :b end - class U3 < OpenAI::Union + module U3 + extend OpenAI::Union + discriminator :type variant :a, M1 variant :b, M3 @@ -353,7 +363,9 @@ def test_basic_eql assert_equal(U1, U3) end - class U4 < OpenAI::Union + module U4 + extend OpenAI::Union + variant :a, const: :a variant :b, const: :b end From 1f9f6d53b681d703f4357187aaa7fbcce7a0a159 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 18:15:06 -0400 Subject: [PATCH 044/295] fix: type names --- Rakefile | 21 +- lib/openai/base_model.rb | 388 +++++++++--------- lib/openai/models/all_models.rb | 10 +- .../models/audio/speech_create_params.rb | 18 +- lib/openai/models/audio/speech_model.rb | 5 +- .../audio/transcription_create_params.rb | 11 +- .../audio/transcription_create_response.rb | 6 +- .../models/audio/transcription_include.rb | 5 +- .../audio/transcription_stream_event.rb | 6 +- .../models/audio/translation_create_params.rb | 12 +- .../audio/translation_create_response.rb | 5 +- lib/openai/models/audio_model.rb | 5 +- lib/openai/models/audio_response_format.rb | 6 +- lib/openai/models/batch.rb | 6 +- lib/openai/models/batch_create_params.rb | 12 +- .../models/beta/assistant_create_params.rb | 6 +- .../models/beta/assistant_list_params.rb | 6 +- .../beta/assistant_response_format_option.rb | 6 +- .../models/beta/assistant_stream_event.rb | 6 +- lib/openai/models/beta/assistant_tool.rb | 5 +- .../models/beta/assistant_tool_choice.rb | 6 +- .../beta/assistant_tool_choice_option.rb | 12 +- .../models/beta/assistant_update_params.rb | 6 +- lib/openai/models/beta/file_search_tool.rb | 6 +- .../models/beta/message_stream_event.rb | 6 +- .../models/beta/run_step_stream_event.rb | 6 +- lib/openai/models/beta/run_stream_event.rb | 6 +- .../beta/thread_create_and_run_params.rb | 34 +- .../models/beta/thread_create_params.rb | 17 +- lib/openai/models/beta/threads/annotation.rb | 6 +- .../models/beta/threads/annotation_delta.rb | 6 +- lib/openai/models/beta/threads/image_file.rb | 6 +- .../models/beta/threads/image_file_delta.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 6 +- lib/openai/models/beta/threads/message.rb | 23 +- .../models/beta/threads/message_content.rb | 6 +- .../beta/threads/message_content_delta.rb | 6 +- .../threads/message_content_part_param.rb | 6 +- .../beta/threads/message_create_params.rb | 17 +- .../models/beta/threads/message_delta.rb | 6 +- .../beta/threads/message_list_params.rb | 6 +- lib/openai/models/beta/threads/run.rb | 18 +- .../models/beta/threads/run_create_params.rb | 29 +- .../models/beta/threads/run_list_params.rb | 6 +- lib/openai/models/beta/threads/run_status.rb | 6 +- .../runs/code_interpreter_tool_call.rb | 6 +- .../runs/code_interpreter_tool_call_delta.rb | 6 +- .../threads/runs/file_search_tool_call.rb | 12 +- .../models/beta/threads/runs/run_step.rb | 24 +- .../beta/threads/runs/run_step_delta.rb | 6 +- .../beta/threads/runs/run_step_include.rb | 5 +- .../beta/threads/runs/step_list_params.rb | 6 +- .../models/beta/threads/runs/tool_call.rb | 6 +- .../beta/threads/runs/tool_call_delta.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 12 +- ...chat_completion_assistant_message_param.rb | 12 +- .../chat/chat_completion_audio_param.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 24 +- .../chat/chat_completion_content_part.rb | 6 +- .../chat_completion_content_part_image.rb | 6 +- ...hat_completion_content_part_input_audio.rb | 6 +- ...chat_completion_developer_message_param.rb | 6 +- .../chat/chat_completion_message_param.rb | 6 +- .../models/chat/chat_completion_modality.rb | 5 +- .../chat_completion_prediction_content.rb | 6 +- .../models/chat/chat_completion_role.rb | 6 +- .../chat_completion_system_message_param.rb | 6 +- .../chat_completion_tool_choice_option.rb | 12 +- .../chat_completion_tool_message_param.rb | 6 +- .../chat_completion_user_message_param.rb | 6 +- .../models/chat/completion_create_params.rb | 47 ++- .../models/chat/completion_list_params.rb | 6 +- .../chat/completions/message_list_params.rb | 6 +- lib/openai/models/chat_model.rb | 5 +- lib/openai/models/comparison_filter.rb | 12 +- lib/openai/models/completion_choice.rb | 6 +- lib/openai/models/completion_create_params.rb | 18 +- lib/openai/models/compound_filter.rb | 12 +- lib/openai/models/embedding_create_params.rb | 18 +- lib/openai/models/embedding_model.rb | 5 +- lib/openai/models/file_chunking_strategy.rb | 6 +- .../models/file_chunking_strategy_param.rb | 6 +- lib/openai/models/file_list_params.rb | 6 +- lib/openai/models/file_object.rb | 12 +- lib/openai/models/file_purpose.rb | 6 +- .../models/fine_tuning/fine_tuning_job.rb | 72 ++-- .../fine_tuning/fine_tuning_job_event.rb | 12 +- .../models/fine_tuning/job_create_params.rb | 72 ++-- .../models/image_create_variation_params.rb | 18 +- lib/openai/models/image_edit_params.rb | 18 +- lib/openai/models/image_generate_params.rb | 30 +- lib/openai/models/image_model.rb | 5 +- lib/openai/models/moderation.rb | 65 ++- lib/openai/models/moderation_create_params.rb | 12 +- lib/openai/models/moderation_model.rb | 5 +- .../models/moderation_multi_modal_input.rb | 6 +- lib/openai/models/reasoning.rb | 6 +- lib/openai/models/reasoning_effort.rb | 6 +- lib/openai/models/responses/computer_tool.rb | 6 +- .../models/responses/easy_input_message.rb | 18 +- .../models/responses/file_search_tool.rb | 12 +- .../responses/input_item_list_params.rb | 6 +- lib/openai/models/responses/response.rb | 18 +- .../response_code_interpreter_tool_call.rb | 12 +- .../responses/response_computer_tool_call.rb | 24 +- ...response_computer_tool_call_output_item.rb | 6 +- .../models/responses/response_content.rb | 6 +- .../response_content_part_added_event.rb | 6 +- .../response_content_part_done_event.rb | 6 +- .../responses/response_create_params.rb | 18 +- lib/openai/models/responses/response_error.rb | 6 +- .../response_file_search_tool_call.rb | 11 +- .../responses/response_format_text_config.rb | 6 +- .../responses/response_function_tool_call.rb | 6 +- ...response_function_tool_call_output_item.rb | 6 +- .../responses/response_function_web_search.rb | 6 +- .../models/responses/response_includable.rb | 6 +- .../models/responses/response_input_audio.rb | 6 +- .../responses/response_input_content.rb | 6 +- .../models/responses/response_input_image.rb | 6 +- .../models/responses/response_input_item.rb | 36 +- .../responses/response_input_message_item.rb | 18 +- lib/openai/models/responses/response_item.rb | 6 +- .../models/responses/response_output_item.rb | 6 +- .../responses/response_output_message.rb | 12 +- .../models/responses/response_output_text.rb | 6 +- .../responses/response_reasoning_item.rb | 6 +- .../models/responses/response_status.rb | 6 +- .../models/responses/response_stream_event.rb | 6 +- .../response_text_annotation_delta_event.rb | 6 +- lib/openai/models/responses/tool.rb | 6 +- .../models/responses/tool_choice_options.rb | 6 +- .../models/responses/tool_choice_types.rb | 6 +- .../models/responses/web_search_tool.rb | 12 +- lib/openai/models/responses_model.rb | 10 +- lib/openai/models/upload.rb | 6 +- lib/openai/models/vector_store.rb | 6 +- lib/openai/models/vector_store_list_params.rb | 6 +- .../models/vector_store_search_params.rb | 17 +- .../models/vector_store_search_response.rb | 11 +- .../vector_stores/file_batch_create_params.rb | 5 +- .../file_batch_list_files_params.rb | 12 +- .../vector_stores/file_create_params.rb | 5 +- .../models/vector_stores/file_list_params.rb | 12 +- .../vector_stores/file_update_params.rb | 5 +- .../models/vector_stores/vector_store_file.rb | 17 +- .../vector_stores/vector_store_file_batch.rb | 6 +- rbi/lib/openai/base_model.rbi | 172 ++++---- rbi/lib/openai/models/all_models.rbi | 27 +- .../models/audio/speech_create_params.rbi | 88 ++-- rbi/lib/openai/models/audio/speech_model.rbi | 13 +- .../audio/transcription_create_params.rbi | 68 ++- .../audio/transcription_create_response.rbi | 4 +- .../models/audio/transcription_include.rbi | 9 +- .../audio/transcription_stream_event.rbi | 4 +- .../audio/translation_create_params.rbi | 52 +-- .../audio/translation_create_response.rbi | 4 +- rbi/lib/openai/models/audio_model.rbi | 13 +- .../openai/models/audio_response_format.rbi | 17 +- rbi/lib/openai/models/batch.rbi | 37 +- rbi/lib/openai/models/batch_create_params.rbi | 48 +-- .../models/beta/assistant_create_params.rbi | 28 +- .../models/beta/assistant_list_params.rbi | 22 +- .../beta/assistant_response_format_option.rbi | 4 +- .../models/beta/assistant_stream_event.rbi | 4 +- rbi/lib/openai/models/beta/assistant_tool.rbi | 4 +- .../models/beta/assistant_tool_choice.rbi | 36 +- .../beta/assistant_tool_choice_option.rbi | 28 +- .../models/beta/assistant_update_params.rbi | 113 ++--- .../openai/models/beta/file_search_tool.rbi | 46 +-- .../models/beta/message_stream_event.rbi | 4 +- .../models/beta/run_step_stream_event.rbi | 4 +- .../openai/models/beta/run_stream_event.rbi | 4 +- .../beta/thread_create_and_run_params.rbi | 136 ++---- .../models/beta/thread_create_params.rbi | 31 +- .../openai/models/beta/threads/annotation.rbi | 4 +- .../models/beta/threads/annotation_delta.rbi | 4 +- .../openai/models/beta/threads/image_file.rbi | 27 +- .../models/beta/threads/image_file_delta.rbi | 31 +- .../openai/models/beta/threads/image_url.rbi | 27 +- .../models/beta/threads/image_url_delta.rbi | 30 +- .../openai/models/beta/threads/message.rbi | 94 ++--- .../models/beta/threads/message_content.rbi | 4 +- .../beta/threads/message_content_delta.rbi | 4 +- .../threads/message_content_part_param.rbi | 4 +- .../beta/threads/message_create_params.rbi | 31 +- .../models/beta/threads/message_delta.rbi | 23 +- .../beta/threads/message_list_params.rbi | 23 +- rbi/lib/openai/models/beta/threads/run.rbi | 155 ++----- .../models/beta/threads/run_create_params.rbi | 154 ++----- .../models/beta/threads/run_list_params.rbi | 23 +- .../openai/models/beta/threads/run_status.rbi | 25 +- .../runs/code_interpreter_tool_call.rbi | 4 +- .../runs/code_interpreter_tool_call_delta.rbi | 4 +- .../threads/runs/file_search_tool_call.rbi | 122 +----- .../models/beta/threads/runs/run_step.rbi | 95 ++--- .../beta/threads/runs/run_step_delta.rbi | 4 +- .../beta/threads/runs/run_step_include.rbi | 13 +- .../beta/threads/runs/step_list_params.rbi | 34 +- .../threads/runs/step_retrieve_params.rbi | 11 +- .../models/beta/threads/runs/tool_call.rbi | 4 +- .../beta/threads/runs/tool_call_delta.rbi | 4 +- .../openai/models/chat/chat_completion.rbi | 58 ++- ...hat_completion_assistant_message_param.rbi | 8 +- .../chat/chat_completion_audio_param.rbi | 74 ++-- .../models/chat/chat_completion_chunk.rbi | 119 ++---- .../chat/chat_completion_content_part.rbi | 4 +- .../chat_completion_content_part_image.rbi | 37 +- ...at_completion_content_part_input_audio.rbi | 44 +- ...hat_completion_developer_message_param.rbi | 4 +- .../chat/chat_completion_message_param.rbi | 4 +- .../models/chat/chat_completion_modality.rbi | 11 +- .../chat_completion_prediction_content.rbi | 4 +- .../models/chat/chat_completion_role.rbi | 19 +- .../chat_completion_system_message_param.rbi | 4 +- .../chat_completion_tool_choice_option.rbi | 27 +- .../chat_completion_tool_message_param.rbi | 4 +- .../chat_completion_user_message_param.rbi | 4 +- .../models/chat/completion_create_params.rbi | 225 +++------- .../models/chat/completion_list_params.rbi | 23 +- .../chat/completions/message_list_params.rbi | 39 +- rbi/lib/openai/models/chat_model.rbi | 98 +++-- rbi/lib/openai/models/comparison_filter.rbi | 44 +- rbi/lib/openai/models/completion_choice.rbi | 24 +- .../models/completion_create_params.rbi | 36 +- rbi/lib/openai/models/compound_filter.rbi | 37 +- .../openai/models/embedding_create_params.rbi | 44 +- rbi/lib/openai/models/embedding_model.rbi | 13 +- .../openai/models/file_chunking_strategy.rbi | 4 +- .../models/file_chunking_strategy_param.rbi | 4 +- rbi/lib/openai/models/file_create_params.rbi | 21 +- rbi/lib/openai/models/file_list_params.rbi | 22 +- rbi/lib/openai/models/file_object.rbi | 56 ++- rbi/lib/openai/models/file_purpose.rbi | 19 +- .../models/fine_tuning/fine_tuning_job.rbi | 95 ++--- .../fine_tuning/fine_tuning_job_event.rbi | 48 +-- .../models/fine_tuning/job_create_params.rbi | 93 ++--- .../models/image_create_variation_params.rbi | 64 ++- rbi/lib/openai/models/image_edit_params.rbi | 63 ++- .../openai/models/image_generate_params.rbi | 112 +++-- rbi/lib/openai/models/image_model.rbi | 11 +- rbi/lib/openai/models/moderation.rbi | 319 +++++--------- .../models/moderation_create_params.rbi | 21 +- rbi/lib/openai/models/moderation_model.rbi | 16 +- .../models/moderation_multi_modal_input.rbi | 4 +- rbi/lib/openai/models/reasoning.rbi | 43 +- rbi/lib/openai/models/reasoning_effort.rbi | 13 +- .../openai/models/responses/computer_tool.rbi | 40 +- .../models/responses/easy_input_message.rbi | 52 +-- .../models/responses/file_search_tool.rbi | 40 +- .../responses/input_item_list_params.rbi | 23 +- rbi/lib/openai/models/responses/response.rbi | 145 ++----- .../response_code_interpreter_tool_call.rbi | 37 +- .../responses/response_computer_tool_call.rbi | 107 ++--- ...esponse_computer_tool_call_output_item.rbi | 33 +- .../models/responses/response_content.rbi | 4 +- .../response_content_part_added_event.rbi | 4 +- .../response_content_part_done_event.rbi | 4 +- .../responses/response_create_params.rbi | 98 ++--- .../models/responses/response_error.rbi | 73 ++-- .../response_file_search_tool_call.rbi | 35 +- .../responses/response_format_text_config.rbi | 4 +- .../responses/response_function_tool_call.rbi | 49 +-- ...esponse_function_tool_call_output_item.rbi | 43 +- .../response_function_web_search.rbi | 40 +- .../models/responses/response_includable.rbi | 16 +- .../models/responses/response_input_audio.rbi | 35 +- .../responses/response_input_content.rbi | 4 +- .../models/responses/response_input_image.rbi | 45 +- .../models/responses/response_input_item.rbi | 153 +++---- .../responses/response_input_message_item.rbi | 74 ++-- .../openai/models/responses/response_item.rbi | 4 +- .../models/responses/response_output_item.rbi | 4 +- .../responses/response_output_message.rbi | 29 +- .../models/responses/response_output_text.rbi | 4 +- .../responses/response_reasoning_item.rbi | 25 +- .../responses/response_retrieve_params.rbi | 19 +- .../models/responses/response_status.rbi | 15 +- .../responses/response_stream_event.rbi | 4 +- .../response_text_annotation_delta_event.rbi | 4 +- rbi/lib/openai/models/responses/tool.rbi | 4 +- .../models/responses/tool_choice_options.rbi | 13 +- .../models/responses/tool_choice_types.rbi | 29 +- .../models/responses/web_search_tool.rbi | 49 +-- rbi/lib/openai/models/responses_model.rbi | 28 +- rbi/lib/openai/models/upload.rbi | 26 +- .../openai/models/upload_create_params.rbi | 8 +- rbi/lib/openai/models/vector_store.rbi | 24 +- .../models/vector_store_list_params.rbi | 22 +- .../models/vector_store_search_params.rbi | 44 +- .../models/vector_store_search_response.rbi | 31 +- .../file_batch_create_params.rbi | 4 +- .../file_batch_list_files_params.rbi | 53 +-- .../vector_stores/file_create_params.rbi | 4 +- .../models/vector_stores/file_list_params.rbi | 50 +-- .../vector_stores/file_update_params.rbi | 4 +- .../vector_stores/vector_store_file.rbi | 75 ++-- .../vector_stores/vector_store_file_batch.rbi | 32 +- rbi/lib/openai/resources/audio/speech.rbi | 6 +- .../openai/resources/audio/transcriptions.rbi | 16 +- .../openai/resources/audio/translations.rbi | 4 +- rbi/lib/openai/resources/batches.rbi | 4 +- rbi/lib/openai/resources/beta/assistants.rbi | 10 +- rbi/lib/openai/resources/beta/threads.rbi | 18 +- .../resources/beta/threads/messages.rbi | 4 +- .../openai/resources/beta/threads/runs.rbi | 28 +- .../resources/beta/threads/runs/steps.rbi | 6 +- rbi/lib/openai/resources/chat/completions.rbi | 38 +- .../resources/chat/completions/messages.rbi | 2 +- rbi/lib/openai/resources/completions.rbi | 4 +- rbi/lib/openai/resources/embeddings.rbi | 4 +- rbi/lib/openai/resources/files.rbi | 4 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 22 +- rbi/lib/openai/resources/moderations.rbi | 2 +- rbi/lib/openai/resources/responses.rbi | 26 +- .../resources/responses/input_items.rbi | 2 +- rbi/lib/openai/resources/uploads.rbi | 2 +- rbi/lib/openai/resources/vector_stores.rbi | 2 +- .../resources/vector_stores/file_batches.rbi | 4 +- .../openai/resources/vector_stores/files.rbi | 4 +- sig/openai/base_model.rbs | 28 +- sig/openai/models/all_models.rbs | 8 +- .../models/audio/speech_create_params.rbs | 12 +- sig/openai/models/audio/speech_model.rbs | 4 +- .../audio/transcription_create_params.rbs | 8 +- .../audio/transcription_create_response.rbs | 4 +- .../models/audio/transcription_include.rbs | 4 +- .../audio/transcription_stream_event.rbs | 4 +- .../audio/translation_create_params.rbs | 8 +- .../audio/translation_create_response.rbs | 4 +- sig/openai/models/audio_model.rbs | 4 +- sig/openai/models/audio_response_format.rbs | 4 +- sig/openai/models/batch.rbs | 4 +- sig/openai/models/batch_create_params.rbs | 8 +- .../models/beta/assistant_create_params.rbs | 4 +- .../models/beta/assistant_list_params.rbs | 4 +- .../beta/assistant_response_format_option.rbs | 4 +- .../models/beta/assistant_stream_event.rbs | 4 +- sig/openai/models/beta/assistant_tool.rbs | 4 +- .../models/beta/assistant_tool_choice.rbs | 4 +- .../beta/assistant_tool_choice_option.rbs | 8 +- .../models/beta/assistant_update_params.rbs | 4 +- sig/openai/models/beta/file_search_tool.rbs | 4 +- .../models/beta/message_stream_event.rbs | 4 +- .../models/beta/run_step_stream_event.rbs | 4 +- sig/openai/models/beta/run_stream_event.rbs | 4 +- .../beta/thread_create_and_run_params.rbs | 24 +- .../models/beta/thread_create_params.rbs | 12 +- sig/openai/models/beta/threads/annotation.rbs | 4 +- .../models/beta/threads/annotation_delta.rbs | 4 +- sig/openai/models/beta/threads/image_file.rbs | 4 +- .../models/beta/threads/image_file_delta.rbs | 4 +- sig/openai/models/beta/threads/image_url.rbs | 4 +- .../models/beta/threads/image_url_delta.rbs | 4 +- sig/openai/models/beta/threads/message.rbs | 16 +- .../models/beta/threads/message_content.rbs | 4 +- .../beta/threads/message_content_delta.rbs | 4 +- .../threads/message_content_part_param.rbs | 4 +- .../beta/threads/message_create_params.rbs | 12 +- .../models/beta/threads/message_delta.rbs | 4 +- .../beta/threads/message_list_params.rbs | 4 +- sig/openai/models/beta/threads/run.rbs | 12 +- .../models/beta/threads/run_create_params.rbs | 20 +- .../models/beta/threads/run_list_params.rbs | 4 +- sig/openai/models/beta/threads/run_status.rbs | 4 +- .../runs/code_interpreter_tool_call.rbs | 4 +- .../runs/code_interpreter_tool_call_delta.rbs | 4 +- .../threads/runs/file_search_tool_call.rbs | 8 +- .../models/beta/threads/runs/run_step.rbs | 17 +- .../beta/threads/runs/run_step_delta.rbs | 5 +- .../threads/runs/run_step_delta_event.rbs | 1 + .../runs/run_step_delta_message_delta.rbs | 1 + .../beta/threads/runs/run_step_include.rbs | 7 +- .../beta/threads/runs/step_list_params.rbs | 4 +- .../models/beta/threads/runs/tool_call.rbs | 4 +- .../beta/threads/runs/tool_call_delta.rbs | 4 +- sig/openai/models/chat/chat_completion.rbs | 9 +- ...hat_completion_assistant_message_param.rbs | 9 +- .../models/chat/chat_completion_audio.rbs | 1 + .../chat/chat_completion_audio_param.rbs | 9 +- .../models/chat/chat_completion_chunk.rbs | 17 +- .../chat/chat_completion_content_part.rbs | 7 +- .../chat_completion_content_part_image.rbs | 5 +- ...at_completion_content_part_input_audio.rbs | 5 +- .../chat_completion_content_part_refusal.rbs | 1 + .../chat_completion_content_part_text.rbs | 1 + .../models/chat/chat_completion_deleted.rbs | 1 + ...hat_completion_developer_message_param.rbs | 5 +- .../chat_completion_function_call_option.rbs | 1 + ...chat_completion_function_message_param.rbs | 1 + .../models/chat/chat_completion_message.rbs | 1 + .../chat/chat_completion_message_param.rbs | 7 +- .../chat_completion_message_tool_call.rbs | 1 + .../models/chat/chat_completion_modality.rbs | 7 +- .../chat_completion_named_tool_choice.rbs | 1 + .../chat_completion_prediction_content.rbs | 5 +- .../chat/chat_completion_reasoning_effort.rbs | 6 +- .../models/chat/chat_completion_role.rbs | 7 +- .../chat/chat_completion_store_message.rbs | 1 + .../chat/chat_completion_stream_options.rbs | 1 + .../chat_completion_system_message_param.rbs | 5 +- .../chat/chat_completion_token_logprob.rbs | 1 + .../models/chat/chat_completion_tool.rbs | 1 + .../chat_completion_tool_choice_option.rbs | 11 +- .../chat_completion_tool_message_param.rbs | 5 +- .../chat_completion_user_message_param.rbs | 5 +- .../models/chat/completion_create_params.rbs | 32 +- .../models/chat/completion_list_params.rbs | 4 +- .../chat/completions/message_list_params.rbs | 4 +- sig/openai/models/chat_model.rbs | 4 +- sig/openai/models/comparison_filter.rbs | 8 +- sig/openai/models/completion_choice.rbs | 4 +- .../models/completion_create_params.rbs | 12 +- sig/openai/models/compound_filter.rbs | 8 +- sig/openai/models/embedding_create_params.rbs | 12 +- sig/openai/models/embedding_model.rbs | 4 +- sig/openai/models/file_chunking_strategy.rbs | 4 +- .../models/file_chunking_strategy_param.rbs | 4 +- sig/openai/models/file_list_params.rbs | 4 +- sig/openai/models/file_object.rbs | 8 +- sig/openai/models/file_purpose.rbs | 4 +- .../models/fine_tuning/fine_tuning_job.rbs | 49 +-- .../fine_tuning/fine_tuning_job_event.rbs | 9 +- .../fine_tuning_job_integration.rbs | 2 + .../fine_tuning_job_wandb_integration.rbs | 1 + ...ne_tuning_job_wandb_integration_object.rbs | 1 + .../models/fine_tuning/job_create_params.rbs | 48 +-- .../models/image_create_variation_params.rbs | 12 +- sig/openai/models/image_edit_params.rbs | 12 +- sig/openai/models/image_generate_params.rbs | 20 +- sig/openai/models/image_model.rbs | 4 +- sig/openai/models/moderation.rbs | 52 +-- .../models/moderation_create_params.rbs | 8 +- sig/openai/models/moderation_model.rbs | 4 +- .../models/moderation_multi_modal_input.rbs | 4 +- sig/openai/models/reasoning.rbs | 4 +- sig/openai/models/reasoning_effort.rbs | 4 +- sig/openai/models/responses/computer_tool.rbs | 4 +- .../models/responses/easy_input_message.rbs | 12 +- .../models/responses/file_search_tool.rbs | 8 +- .../responses/input_item_list_params.rbs | 4 +- sig/openai/models/responses/response.rbs | 12 +- .../response_code_interpreter_tool_call.rbs | 8 +- .../responses/response_computer_tool_call.rbs | 16 +- ...esponse_computer_tool_call_output_item.rbs | 4 +- .../models/responses/response_content.rbs | 4 +- .../response_content_part_added_event.rbs | 4 +- .../response_content_part_done_event.rbs | 4 +- .../responses/response_create_params.rbs | 12 +- .../models/responses/response_error.rbs | 4 +- .../response_file_search_tool_call.rbs | 8 +- .../responses/response_format_text_config.rbs | 4 +- .../responses/response_function_tool_call.rbs | 4 +- ...esponse_function_tool_call_output_item.rbs | 4 +- .../response_function_web_search.rbs | 4 +- .../models/responses/response_includable.rbs | 4 +- .../models/responses/response_input_audio.rbs | 4 +- .../responses/response_input_content.rbs | 4 +- .../models/responses/response_input_image.rbs | 4 +- .../models/responses/response_input_item.rbs | 24 +- .../responses/response_input_message_item.rbs | 12 +- sig/openai/models/responses/response_item.rbs | 4 +- .../models/responses/response_item_list.rbs | 1 + .../models/responses/response_output_item.rbs | 4 +- .../responses/response_output_message.rbs | 8 +- .../models/responses/response_output_text.rbs | 4 +- .../responses/response_reasoning_item.rbs | 4 +- .../models/responses/response_status.rbs | 4 +- .../responses/response_stream_event.rbs | 4 +- .../response_text_annotation_delta_event.rbs | 4 +- sig/openai/models/responses/tool.rbs | 4 +- .../models/responses/tool_choice_options.rbs | 4 +- .../models/responses/tool_choice_types.rbs | 4 +- .../models/responses/web_search_tool.rbs | 8 +- sig/openai/models/responses_model.rbs | 8 +- sig/openai/models/upload.rbs | 4 +- sig/openai/models/uploads/upload_part.rbs | 1 + sig/openai/models/vector_store.rbs | 4 +- .../models/vector_store_list_params.rbs | 4 +- .../models/vector_store_search_params.rbs | 12 +- .../models/vector_store_search_response.rbs | 8 +- .../file_batch_create_params.rbs | 4 +- .../file_batch_list_files_params.rbs | 8 +- .../vector_stores/file_create_params.rbs | 4 +- .../models/vector_stores/file_list_params.rbs | 8 +- .../vector_stores/file_update_params.rbs | 4 +- .../vector_stores/vector_store_file.rbs | 13 +- .../vector_stores/vector_store_file_batch.rbs | 5 +- .../vector_store_file_deleted.rbs | 1 + test/openai/base_model_test.rb | 24 +- 492 files changed, 3492 insertions(+), 5673 deletions(-) diff --git a/Rakefile b/Rakefile index e97d158b..cde192ee 100644 --- a/Rakefile +++ b/Rakefile @@ -35,25 +35,24 @@ multitask(:syntax_tree) do inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? %w[-i''] : %w[-i] uuid = SecureRandom.uuid - # `syntax_tree` has trouble with `rbs`'s class & module aliases + # `syntax_tree` has trouble with `rbs`'s class aliases sed = xargs + %w[sed -E] + inplace + %w[-e] - # annotate unprocessable aliases with a unique comment - pre = sed + ["s/(class|module) ([^ ]+) = (.+$)/# \\1 #{uuid}\\n\\2: \\3/", "--"] + # annotate class aliases with a unique comment + pre = sed + ["s/class ([^ ]+) = (.+$)/# #{uuid}\\n\\1: \\2/", "--"] fmt = xargs + %w[stree write --plugin=rbs --] - # remove the unique comment and unprocessable aliases to type aliases + # remove the unique comment and transform class aliases to type aliases subst = <<~SED - s/# (class|module) #{uuid}/\\1/ + s/# #{uuid}// t l1 b - : l1 - N - s/\\n *([^:]+): (.+)$/ \\1 = \\2/ + n + s/([^ :]+): (.+$)/class \\1 = \\2/ SED - # for each line: - # 1. try transform the unique comment into `class | module`, if successful, branch to label `l1`. - # 2. at label `l1`, join previously annotated line with `class | module` information. + # 1. delete the unique comment + # 2. if deletion happened, branch to label `l1`, else continue + # 3. transform the class alias to a type alias at label `l1` pst = sed + [subst, "--"] # transform class aliases to type aliases, which syntax tree has no trouble with diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index a1f9e29c..5a63f4fa 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -48,7 +48,7 @@ def type_info(spec) type_info(spec.slice(:const, :enum, :union).first&.last) in Proc spec - in OpenAI::Converter | Module | Symbol + in OpenAI::Converter | Class | Symbol -> { spec } in true | false -> { OpenAI::BooleanModel } @@ -81,7 +81,7 @@ def coerce(target, value) else value end - in Module + in Class case target in -> { _1 <= NilClass } nil @@ -144,7 +144,7 @@ def try_strict_coerce(target, value) else [false, false, 0] end - in Module + in Class case [target, value] in [-> { _1 <= NilClass }, _] [true, nil, value.nil? ? 1 : 0] @@ -276,6 +276,8 @@ def try_strict_coerce(value) # @api private # + # @abstract + # # A value from among a specified list of options. OpenAPI enum values map to Ruby # values in the SDK as follows: # @@ -315,70 +317,74 @@ def try_strict_coerce(value) # puts(chat_model) # end # ``` - module Enum - include OpenAI::Converter + class Enum + extend OpenAI::Converter - # All of the valid Symbol values for this enum. - # - # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) + class << self + # All of the valid Symbol values for this enum. + # + # @return [Array] + def values = (@values ||= constants.map { const_get(_1) }) - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + private def finalize! = values + end # @param other [Object] # # @return [Boolean] - def ===(other) = values.include?(other) + def self.===(other) = values.include?(other) # @param other [Object] # # @return [Boolean] - def ==(other) - other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Enum) && other.values.to_set == values.to_set + def self.==(other) + other.is_a?(Class) && other <= OpenAI::Enum && other.values.to_set == values.to_set end - # @api private - # - # @param value [String, Symbol, Object] - # - # @return [Symbol, Object] - def coerce(value) - case value - in Symbol | String if values.include?(val = value.to_sym) - val - else - value + class << self + # @api private + # + # @param value [String, Symbol, Object] + # + # @return [Symbol, Object] + def coerce(value) + case value + in Symbol | String if values.include?(val = value.to_sym) + val + else + value + end end - end - # @!parse - # # @api private - # # - # # @param value [Symbol, Object] - # # - # # @return [Symbol, Object] - # def dump(value) = super + # @!parse + # # @api private + # # + # # @param value [Symbol, Object] + # # + # # @return [Symbol, Object] + # def dump(value) = super - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - return [true, value, 1] if values.include?(value) + # @api private + # + # @param value [Object] + # + # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] + def try_strict_coerce(value) + return [true, value, 1] if values.include?(value) - case value - in Symbol | String if values.include?(val = value.to_sym) - [true, val, 1] - else - case [value, values.first] - in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] - [false, true, 0] + case value + in Symbol | String if values.include?(val = value.to_sym) + [true, val, 1] else - [false, false, 0] + case [value, values.first] + in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] + [false, true, 0] + else + [false, false, 0] + end end end end @@ -386,6 +392,8 @@ def try_strict_coerce(value) # @api private # + # @abstract + # # @example # ```ruby # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` @@ -414,87 +422,89 @@ def try_strict_coerce(value) # puts(chat_completion_content_part) # end # ``` - module Union - include OpenAI::Converter - - # @api private - # - # All of the specified variant info for this union. - # - # @return [Array] - private def known_variants = (@known_variants ||= []) + class Union + extend OpenAI::Converter - # @api private - # - # @return [Array] - protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } - end + class << self + # @api private + # + # All of the specified variant info for this union. + # + # @return [Array] + private def known_variants = (@known_variants ||= []) - # All of the specified variants for this union. - # - # @return [Array] - def variants - derefed_variants.map(&:last) - end + # @api private + # + # @return [Array] + protected def derefed_variants + @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + end - # @api private - # - # @param property [Symbol] - private def discriminator(property) - case property - in Symbol - @discriminator = property + # All of the specified variants for this union. + # + # @return [Array] + def variants + derefed_variants.map(&:last) end - end - # @api private - # - # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def variant(key, spec = nil) - variant_info = - case key + # @api private + # + # @param property [Symbol] + private def discriminator(property) + case property in Symbol - [key, OpenAI::Converter.type_info(spec)] - in Proc | OpenAI::Converter | Module | Hash - [nil, OpenAI::Converter.type_info(key)] + @discriminator = property end + end - known_variants << variant_info - end - - # @api private - # - # @param value [Object] - # - # @return [OpenAI::Converter, Class, nil] - private def resolve_variant(value) - case [@discriminator, value] - in [_, OpenAI::BaseModel] - value.class - in [Symbol, Hash] - key = - if value.key?(@discriminator) - value.fetch(@discriminator) - elsif value.key?((discriminator = @discriminator.to_s)) - value.fetch(discriminator) + # @api private + # + # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def variant(key, spec = nil) + variant_info = + case key + in Symbol + [key, OpenAI::Converter.type_info(spec)] + in Proc | OpenAI::Converter | Class | Hash + [nil, OpenAI::Converter.type_info(key)] end - key = key.to_sym if key.is_a?(String) - _, resolved = known_variants.find { |k,| k == key } - resolved.nil? ? OpenAI::Unknown : resolved.call - else - nil + known_variants << variant_info + end + + # @api private + # + # @param value [Object] + # + # @return [OpenAI::Converter, Class, nil] + private def resolve_variant(value) + case [@discriminator, value] + in [_, OpenAI::BaseModel] + value.class + in [Symbol, Hash] + key = + if value.key?(@discriminator) + value.fetch(@discriminator) + elsif value.key?((discriminator = @discriminator.to_s)) + value.fetch(discriminator) + end + + key = key.to_sym if key.is_a?(String) + _, resolved = known_variants.find { |k,| k == key } + resolved.nil? ? OpenAI::Unknown : resolved.call + else + nil + end end end @@ -504,7 +514,7 @@ def variants # @param other [Object] # # @return [Boolean] - def ===(other) + def self.===(other) known_variants.any? do |_, variant_fn| variant_fn.call === other end @@ -513,88 +523,90 @@ def ===(other) # @param other [Object] # # @return [Boolean] - def ==(other) - other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Union) && other.derefed_variants == derefed_variants + def self.==(other) + other.is_a?(Class) && other <= OpenAI::Union && other.derefed_variants == derefed_variants end - # @api private - # - # @param value [Object] - # - # @return [Object] - def coerce(value) - if (variant = resolve_variant(value)) - return OpenAI::Converter.coerce(variant, value) - end + class << self + # @api private + # + # @param value [Object] + # + # @return [Object] + def coerce(value) + if (variant = resolve_variant(value)) + return OpenAI::Converter.coerce(variant, value) + end - matches = [] + matches = [] - known_variants.each do |_, variant_fn| - variant = variant_fn.call + known_variants.each do |_, variant_fn| + variant = variant_fn.call - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, _] - return coerced - in [false, true, score] - matches << [score, variant] - in [false, false, _] - nil + case OpenAI::Converter.try_strict_coerce(variant, value) + in [true, coerced, _] + return coerced + in [false, true, score] + matches << [score, variant] + in [false, false, _] + nil + end end - end - - _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } - variant.nil? ? value : OpenAI::Converter.coerce(variant, value) - end - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - if (variant = resolve_variant(value)) - return OpenAI::Converter.dump(variant, value) + _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } + variant.nil? ? value : OpenAI::Converter.coerce(variant, value) end - known_variants.each do |_, variant_fn| - variant = variant_fn.call - if variant === value + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + if (variant = resolve_variant(value)) return OpenAI::Converter.dump(variant, value) end - end - value - end - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - # TODO(ruby) this will result in super linear decoding behaviour for nested unions - # follow up with a decoding context that captures current strictness levels - if (variant = resolve_variant(value)) - return Converter.try_strict_coerce(variant, value) + known_variants.each do |_, variant_fn| + variant = variant_fn.call + if variant === value + return OpenAI::Converter.dump(variant, value) + end + end + value end - coercible = false - max_score = 0 + # @api private + # + # @param value [Object] + # + # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] + def try_strict_coerce(value) + # TODO(ruby) this will result in super linear decoding behaviour for nested unions + # follow up with a decoding context that captures current strictness levels + if (variant = resolve_variant(value)) + return Converter.try_strict_coerce(variant, value) + end + + coercible = false + max_score = 0 - known_variants.each do |_, variant_fn| - variant = variant_fn.call + known_variants.each do |_, variant_fn| + variant = variant_fn.call - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, score] - return [true, coerced, score] - in [false, true, score] - coercible = true - max_score = [max_score, score].max - in [false, false, _] - nil + case OpenAI::Converter.try_strict_coerce(variant, value) + in [true, coerced, score] + return [true, coerced, score] + in [false, true, score] + coercible = true + max_score = [max_score, score].max + in [false, false, _] + nil + end end - end - [false, coercible, max_score] + [false, coercible, max_score] + end end # rubocop:enable Style/CaseEquality @@ -941,7 +953,7 @@ def defaults = (@defaults ||= {}) private def add_field(name_sym, required:, type_info:, spec:) type_fn, info = case type_info - in Proc | Module | OpenAI::Converter + in Proc | Class | OpenAI::Converter [OpenAI::Converter.type_info({**spec, union: type_info}), spec] in Hash [OpenAI::Converter.type_info(type_info), type_info] @@ -1213,7 +1225,7 @@ def initialize(data = {}) type = self.class.fields[mapped]&.fetch(:type) stored = case [type, value] - in [Module, Hash] if type <= OpenAI::BaseModel + in [Class, Hash] if type <= OpenAI::BaseModel type.new(value) in [OpenAI::ArrayOf, Array] | [OpenAI::HashOf, Hash] type.coerce(value) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index f11ccb01..edf0c8df 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -2,18 +2,16 @@ module OpenAI module Models - module AllModels - extend OpenAI::Union - + # @abstract + class AllModels < OpenAI::Union variant String variant enum: -> { OpenAI::Models::ChatModel } variant enum: -> { OpenAI::Models::AllModels::UnionMember2 } - module UnionMember2 - extend OpenAI::Enum - + # @abstract + class UnionMember2 < OpenAI::Enum O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index b7efc98d..2477a4ca 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -76,11 +76,11 @@ class SpeechCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. @@ -93,13 +93,13 @@ module Model # end end + # @abstract + # # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - module Voice - extend OpenAI::Enum - + class Voice < OpenAI::Enum ALLOY = :alloy ASH = :ash CORAL = :coral @@ -113,11 +113,11 @@ module Voice finalize! end + # @abstract + # # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum MP3 = :mp3 OPUS = :opus AAC = :aac diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index b85a4f1e..26aae9d6 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -3,9 +3,8 @@ module OpenAI module Models module Audio - module SpeechModel - extend OpenAI::Enum - + # @abstract + class SpeechModel < OpenAI::Enum TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 1ec3b0ac..3ff8c770 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -131,12 +131,12 @@ class TranscriptionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). @@ -149,9 +149,8 @@ module Model # end end - module TimestampGranularity - extend OpenAI::Enum - + # @abstract + class TimestampGranularity < OpenAI::Enum WORD = :word SEGMENT = :segment diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 07d30d90..2ad0d933 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Audio + # @abstract + # # Represents a transcription response returned by model, based on the provided # input. - module TranscriptionCreateResponse - extend OpenAI::Union - + class TranscriptionCreateResponse < OpenAI::Union # Represents a transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::Transcription } diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index fc9e716d..97303675 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -3,9 +3,8 @@ module OpenAI module Models module Audio - module TranscriptionInclude - extend OpenAI::Enum - + # @abstract + class TranscriptionInclude < OpenAI::Enum LOGPROBS = :logprobs finalize! diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index be40014b..4bddaa1b 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -3,13 +3,13 @@ module OpenAI module Models module Audio + # @abstract + # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. - module TranscriptionStreamEvent - extend OpenAI::Union - + class TranscriptionStreamEvent < OpenAI::Union discriminator :type # Emitted when there is an additional text delta. This is also the first event emitted when the transcription starts. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 087b2d38..0b31b58c 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -72,11 +72,11 @@ class TranslationCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. @@ -89,11 +89,11 @@ module Model # end end + # @abstract + # # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum JSON = :json TEXT = :text SRT = :srt diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 4d7cf38c..94020236 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -3,9 +3,8 @@ module OpenAI module Models module Audio - module TranslationCreateResponse - extend OpenAI::Union - + # @abstract + class TranslationCreateResponse < OpenAI::Union variant -> { OpenAI::Models::Audio::Translation } variant -> { OpenAI::Models::Audio::TranslationVerbose } diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index cb25d87b..88507173 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -2,9 +2,8 @@ module OpenAI module Models - module AudioModel - extend OpenAI::Enum - + # @abstract + class AudioModel < OpenAI::Enum WHISPER_1 = :"whisper-1" GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 105ac628..9593d816 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -2,12 +2,12 @@ module OpenAI module Models + # @abstract + # # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - module AudioResponseFormat - extend OpenAI::Enum - + class AudioResponseFormat < OpenAI::Enum JSON = :json TEXT = :text SRT = :srt diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index dcf830ae..91778516 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -224,10 +224,10 @@ class Batch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The current status of the batch. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum VALIDATING = :validating FAILED = :failed IN_PROGRESS = :in_progress diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 35dc43cb..593eb7d2 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -59,23 +59,23 @@ class BatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The time frame within which the batch should be processed. Currently only `24h` # is supported. - module CompletionWindow - extend OpenAI::Enum - + class CompletionWindow < OpenAI::Enum NUMBER_24H = :"24h" finalize! end + # @abstract + # # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - module Endpoint - extend OpenAI::Enum - + class Endpoint < OpenAI::Enum V1_RESPONSES = :"/v1/responses" V1_CHAT_COMPLETIONS = :"/v1/chat/completions" V1_EMBEDDINGS = :"/v1/embeddings" diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 9e1ef48e..f66edb15 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -157,14 +157,14 @@ class AssistantCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 5586ebe7..2deef264 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -67,11 +67,11 @@ class AssistantListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index da77528c..1e0036f2 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Beta + # @abstract + # # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), @@ -23,9 +25,7 @@ module Beta # the message content may be partially cut off if `finish_reason="length"`, which # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. - module AssistantResponseFormatOption - extend OpenAI::Union - + class AssistantResponseFormatOption < OpenAI::Union # `auto` is the default value variant const: :auto diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 02fad932..fdc598d7 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Beta + # @abstract + # # Represents an event emitted when streaming a Run. # # Each event in a server-sent events stream has an `event` and `data` property: @@ -23,9 +25,7 @@ module Beta # gracefully in your code. See the # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. - module AssistantStreamEvent - extend OpenAI::Union - + class AssistantStreamEvent < OpenAI::Union discriminator :event # Occurs when a new [thread](https://platform.openai.com/docs/api-reference/threads/object) is created. diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index ecd6b3db..0ea9bc07 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -3,9 +3,8 @@ module OpenAI module Models module Beta - module AssistantTool - extend OpenAI::Union - + # @abstract + class AssistantTool < OpenAI::Union discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 78dd540e..9aba0349 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -30,10 +30,10 @@ class AssistantToolChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The type of the tool. If type is `function`, the function name must be set - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum FUNCTION = :function CODE_INTERPRETER = :code_interpreter FILE_SEARCH = :file_search diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 560a4a80..62a2c795 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Beta + # @abstract + # # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value # and means the model can pick between generating a message or calling one or more @@ -10,22 +12,20 @@ module Beta # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - module AssistantToolChoiceOption - extend OpenAI::Union - + class AssistantToolChoiceOption < OpenAI::Union # `none` means the model will not call any tools and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools before responding to the user. variant enum: -> { OpenAI::Models::Beta::AssistantToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific tool. variant -> { OpenAI::Models::Beta::AssistantToolChoice } + # @abstract + # # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. - module Auto - extend OpenAI::Enum - + class Auto < OpenAI::Enum NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 4a6636f6..494df652 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -161,14 +161,14 @@ class AssistantUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union # @!group O3_MINI = :"o3-mini" diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 102489f1..333f1770 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -105,11 +105,11 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The ranker to use for the file search. If not specified will use the `auto` # ranker. - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 5ea73994..b110bfed 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Beta + # @abstract + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - module MessageStreamEvent - extend OpenAI::Union - + class MessageStreamEvent < OpenAI::Union discriminator :event # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is created. diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 45e973df..82c7266e 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Beta + # @abstract + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - module RunStepStreamEvent - extend OpenAI::Union - + class RunStepStreamEvent < OpenAI::Union discriminator :event # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is created. diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index f506c9e8..f8548343 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Beta + # @abstract + # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - module RunStreamEvent - extend OpenAI::Union - + class RunStreamEvent < OpenAI::Union discriminator :event # Occurs when a new [run](https://platform.openai.com/docs/api-reference/runs/object) is created. diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 60e8fa5b..7650d56b 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -210,13 +210,13 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. @@ -323,10 +323,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The text contents of the message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -343,15 +343,15 @@ module Content # end end + # @abstract + # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant @@ -388,9 +388,8 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Tool - extend OpenAI::Union - + # @abstract + class Tool < OpenAI::Union discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -725,9 +724,8 @@ class FileSearch < OpenAI::BaseModel end end - module Tool - extend OpenAI::Union - + # @abstract + class Tool < OpenAI::Union variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::FileSearchTool } @@ -769,13 +767,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 4978a330..734a53ad 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -96,10 +96,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The text contents of the message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -116,15 +116,15 @@ module Content # end end + # @abstract + # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant @@ -161,9 +161,8 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Tool - extend OpenAI::Union - + # @abstract + class Tool < OpenAI::Union discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 99110d38..3f0a547d 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads + # @abstract + # # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - module Annotation - extend OpenAI::Union - + class Annotation < OpenAI::Union discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 66fbd501..6b22d5be 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads + # @abstract + # # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - module AnnotationDelta - extend OpenAI::Union - + class AnnotationDelta < OpenAI::Union discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index fab8e1fc..2156325f 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -32,11 +32,11 @@ class ImageFile < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index e40aa711..81caa23f 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -36,11 +36,11 @@ class ImageFileDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 59dcae47..8c08ac9d 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -31,11 +31,11 @@ class ImageURL < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 787dd26c..ad6177d4 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -35,11 +35,11 @@ class ImageURLDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index da2a8d75..0a6b9d1b 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -172,9 +172,8 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Tool - extend OpenAI::Union - + # @abstract + class Tool < OpenAI::Union variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly } @@ -218,10 +217,10 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The reason the message is incomplete. - module Reason - extend OpenAI::Enum - + class Reason < OpenAI::Enum CONTENT_FILTER = :content_filter MAX_TOKENS = :max_tokens RUN_CANCELLED = :run_cancelled @@ -232,21 +231,21 @@ module Reason end end + # @abstract + # # The entity that produced the message. One of `user` or `assistant`. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant finalize! end + # @abstract + # # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress INCOMPLETE = :incomplete COMPLETED = :completed diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 40ea6954..7e6e4698 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads + # @abstract + # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - module MessageContent - extend OpenAI::Union - + class MessageContent < OpenAI::Union discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 634b517c..bcf970c1 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads + # @abstract + # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - module MessageContentDelta - extend OpenAI::Union - + class MessageContentDelta < OpenAI::Union discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 4ae935f2..74766529 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads + # @abstract + # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - module MessageContentPartParam - extend OpenAI::Union - + class MessageContentPartParam < OpenAI::Union discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 1fcbaa76..480f9a24 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -56,10 +56,10 @@ class MessageCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The text contents of the message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -76,15 +76,15 @@ module Content # end end + # @abstract + # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant @@ -121,9 +121,8 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Tool - extend OpenAI::Union - + # @abstract + class Tool < OpenAI::Union discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 46d5d46f..f3bc9993 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -35,10 +35,10 @@ class MessageDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The entity that produced the message. One of `user` or `assistant`. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 816ed048..1a302d8c 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -79,11 +79,11 @@ class MessageListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 2d7b18a8..02c20e73 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -305,11 +305,11 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - module Reason - extend OpenAI::Enum - + class Reason < OpenAI::Enum MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens @@ -340,10 +340,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt @@ -424,13 +424,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index f1e8d793..b6b3564f 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -285,10 +285,10 @@ class AdditionalMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The text contents of the message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -305,15 +305,15 @@ module Content # end end + # @abstract + # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant @@ -350,9 +350,8 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Tool - extend OpenAI::Union - + # @abstract + class Tool < OpenAI::Union discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -384,13 +383,13 @@ class FileSearch < OpenAI::BaseModel end end + # @abstract + # # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. @@ -431,13 +430,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index fa0aee48..ce011119 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -68,11 +68,11 @@ class RunListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index 727fa009..b85ac1e3 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads + # @abstract + # # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - module RunStatus - extend OpenAI::Enum - + class RunStatus < OpenAI::Enum QUEUED = :queued IN_PROGRESS = :in_progress REQUIRES_ACTION = :requires_action diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index e21e1235..0313a163 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -63,10 +63,10 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Text output from the Code Interpreter tool call as part of a run step. - module Output - extend OpenAI::Union - + class Output < OpenAI::Union discriminator :type # Text output from the Code Interpreter tool call as part of a run step. diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 12321ae7..9e9097b3 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -86,10 +86,10 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Text output from the Code Interpreter tool call as part of a run step. - module Output - extend OpenAI::Union - + class Output < OpenAI::Union discriminator :type # Text output from the Code Interpreter tool call as part of a run step. diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 47fd4a37..9011d196 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -93,11 +93,11 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The ranker to use for the file search. If not specified will use the `auto` # ranker. - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 @@ -179,10 +179,10 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The type of the content. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum TEXT = :text finalize! diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index d54a639a..4a7aa3e2 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -183,10 +183,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # One of `server_error` or `rate_limit_exceeded`. - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded @@ -194,11 +194,11 @@ module Code end end + # @abstract + # # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress CANCELLED = :cancelled FAILED = :failed @@ -208,10 +208,10 @@ module Status finalize! end + # @abstract + # # The details of the run step. - module StepDetails - extend OpenAI::Union - + class StepDetails < OpenAI::Union discriminator :type # Details of the message creation by the run step. @@ -227,10 +227,10 @@ module StepDetails # end end + # @abstract + # # The type of run step, which can be either `message_creation` or `tool_calls`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index d5fc45bb..c8ba4aa1 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -25,10 +25,10 @@ class RunStepDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The details of the run step. - module StepDetails - extend OpenAI::Union - + class StepDetails < OpenAI::Union discriminator :type # Details of the message creation by the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index 32e98757..dd1c5381 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -5,9 +5,8 @@ module Models module Beta module Threads module Runs - module RunStepInclude - extend OpenAI::Enum - + # @abstract + class RunStepInclude < OpenAI::Enum STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 3a12f3fe..685e0e75 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -92,11 +92,11 @@ class StepListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index c6e9367f..20cc29a6 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -5,10 +5,10 @@ module Models module Beta module Threads module Runs + # @abstract + # # Details of the Code Interpreter tool call the run step was involved in. - module ToolCall - extend OpenAI::Union - + class ToolCall < OpenAI::Union discriminator :type # Details of the Code Interpreter tool call the run step was involved in. diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index f58ae752..3dae6b9b 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -5,10 +5,10 @@ module Models module Beta module Threads module Runs + # @abstract + # # Details of the Code Interpreter tool call the run step was involved in. - module ToolCallDelta - extend OpenAI::Union - + class ToolCallDelta < OpenAI::Union discriminator :type # Details of the Code Interpreter tool call the run step was involved in. diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index fcb5196d..b80607a6 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -133,15 +133,15 @@ class Choice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - module FinishReason - extend OpenAI::Enum - + class FinishReason < OpenAI::Enum STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls @@ -176,10 +176,10 @@ class Logprobs < OpenAI::BaseModel end end + # @abstract + # # The service tier used for processing the request. - module ServiceTier - extend OpenAI::Enum - + class ServiceTier < OpenAI::Enum SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index e9ee4909..5b2e877b 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -106,11 +106,11 @@ class Audio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union ArrayOfContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] @@ -120,11 +120,11 @@ module Content # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. variant OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray + # @abstract + # # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - module ArrayOfContentPart - extend OpenAI::Union - + class ArrayOfContentPart < OpenAI::Union discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 5f1ca422..5d72b2de 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -30,11 +30,11 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - module Format - extend OpenAI::Enum - + class Format < OpenAI::Enum WAV = :wav MP3 = :mp3 FLAC = :flac @@ -44,11 +44,11 @@ module Format finalize! end + # @abstract + # # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - module Voice - extend OpenAI::Enum - + class Voice < OpenAI::Enum ALLOY = :alloy ASH = :ash BALLAD = :ballad diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index fd4648fc..5f0a0fef 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -233,10 +233,10 @@ class FunctionCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The role of the author of this message. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum DEVELOPER = :developer SYSTEM = :system USER = :user @@ -324,10 +324,10 @@ class Function < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The type of the tool. Currently, only `function` is supported. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum FUNCTION = :function finalize! @@ -335,15 +335,15 @@ module Type end end + # @abstract + # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - module FinishReason - extend OpenAI::Enum - + class FinishReason < OpenAI::Enum STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls @@ -378,10 +378,10 @@ class Logprobs < OpenAI::BaseModel end end + # @abstract + # # The service tier used for processing the request. - module ServiceTier - extend OpenAI::Enum - + class ServiceTier < OpenAI::Enum SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 9b1d2144..fd7d3c2c 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Chat + # @abstract + # # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - module ChatCompletionContentPart - extend OpenAI::Union - + class ChatCompletionContentPart < OpenAI::Union discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 7b165ecb..9990262c 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -51,11 +51,11 @@ class ImageURL < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 7e5f53ee..0ed3dff8 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -48,10 +48,10 @@ class InputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The format of the encoded audio data. Currently supports "wav" and "mp3". - module Format - extend OpenAI::Enum - + class Format < OpenAI::Enum WAV = :wav MP3 = :mp3 diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 44c3742f..9e91e405 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -40,10 +40,10 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The contents of the developer message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index c8a3a17d..811b7f8c 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Chat + # @abstract + # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - module ChatCompletionMessageParam - extend OpenAI::Union - + class ChatCompletionMessageParam < OpenAI::Union discriminator :role # Developer-provided instructions that the model should follow, regardless of diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index eae25ae3..725b907d 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -3,9 +3,8 @@ module OpenAI module Models module Chat - module ChatCompletionModality - extend OpenAI::Enum - + # @abstract + class ChatCompletionModality < OpenAI::Enum TEXT = :text AUDIO = :audio diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index ed62e69d..f9e93347 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -30,12 +30,12 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 43666654..234d78a6 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Chat + # @abstract + # # The role of the author of a message - module ChatCompletionRole - extend OpenAI::Enum - + class ChatCompletionRole < OpenAI::Enum DEVELOPER = :developer SYSTEM = :system USER = :user diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index ac99aca3..a0cc371d 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -40,10 +40,10 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The contents of the system message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 273236d8..28f7750e 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Chat + # @abstract + # # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -12,21 +14,19 @@ module Chat # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - module ChatCompletionToolChoiceOption - extend OpenAI::Union - + class ChatCompletionToolChoiceOption < OpenAI::Union # `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools. variant enum: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific function. variant -> { OpenAI::Models::Chat::ChatCompletionNamedToolChoice } + # @abstract + # # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. - module Auto - extend OpenAI::Enum - + class Auto < OpenAI::Enum NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index fc5f1903..ebbca6aa 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -31,10 +31,10 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The contents of the tool message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 89d87d20..4a1eee01 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -39,10 +39,10 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The contents of the user message. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union ChatCompletionContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index c326a2d8..49e175c1 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -417,14 +417,14 @@ class CompletionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI @@ -440,6 +440,8 @@ module Model # end end + # @abstract + # # @deprecated # # Deprecated in favor of `tool_choice`. @@ -456,21 +458,19 @@ module Model # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - module FunctionCall - extend OpenAI::Union - + class FunctionCall < OpenAI::Union # `none` means the model will not call a function and instead generates a message. `auto` means the model can pick between generating a message or calling a function. variant enum: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode } # Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. variant -> { OpenAI::Models::Chat::ChatCompletionFunctionCallOption } + # @abstract + # # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. - module FunctionCallMode - extend OpenAI::Enum - + class FunctionCallMode < OpenAI::Enum NONE = :none AUTO = :auto @@ -530,15 +530,16 @@ class Function < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - module Modality - extend OpenAI::Enum - + # @abstract + class Modality < OpenAI::Enum TEXT = :text AUDIO = :audio finalize! end + # @abstract + # # An object specifying the format that the model must output. # # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured @@ -549,9 +550,7 @@ module Modality # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - module ResponseFormat - extend OpenAI::Union - + class ResponseFormat < OpenAI::Union # Default response format. Used to generate text responses. variant -> { OpenAI::Models::ResponseFormatText } @@ -572,6 +571,8 @@ module ResponseFormat # end end + # @abstract + # # Specifies the latency tier to use for processing the request. This parameter is # relevant for customers subscribed to the scale tier service: # @@ -586,20 +587,18 @@ module ResponseFormat # # When this parameter is set, the response body will include the `service_tier` # utilized. - module ServiceTier - extend OpenAI::Enum - + class ServiceTier < OpenAI::Enum AUTO = :auto DEFAULT = :default finalize! end + # @abstract + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - module Stop - extend OpenAI::Union - + class Stop < OpenAI::Union StringArray = OpenAI::ArrayOf[String] variant String @@ -646,11 +645,11 @@ class WebSearchOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - module SearchContextSize - extend OpenAI::Enum - + class SearchContextSize < OpenAI::Enum LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index ce2187d8..81936268 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -69,11 +69,11 @@ class CompletionListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 7d45da51..66af6ada 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -50,11 +50,11 @@ class MessageListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 695dcb50..8edd5e1a 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -2,9 +2,8 @@ module OpenAI module Models - module ChatModel - extend OpenAI::Enum - + # @abstract + class ChatModel < OpenAI::Enum O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 2b00e018..17be219d 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -41,6 +41,8 @@ class ComparisonFilter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # # - `eq`: equals @@ -49,9 +51,7 @@ class ComparisonFilter < OpenAI::BaseModel # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum EQ = :eq NE = :ne GT = :gt @@ -62,11 +62,11 @@ module Type finalize! end + # @abstract + # # The value to compare against the attribute key; supports string, number, or # boolean types. - module Value - extend OpenAI::Union - + class Value < OpenAI::Union variant String variant Float diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index df42c59b..6237b56e 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -37,13 +37,13 @@ class CompletionChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - module FinishReason - extend OpenAI::Enum - + class FinishReason < OpenAI::Enum STOP = :stop LENGTH = :length CONTENT_FILTER = :content_filter diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index cbdc341b..c1c44876 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -228,14 +228,14 @@ class CompletionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union # @!group GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" @@ -259,15 +259,15 @@ module Model # end end + # @abstract + # # The prompt(s) to generate completions for, encoded as a string, array of # strings, array of tokens, or array of token arrays. # # Note that <|endoftext|> is the document separator that the model sees during # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. - module Prompt - extend OpenAI::Union - + class Prompt < OpenAI::Union StringArray = OpenAI::ArrayOf[String] IntegerArray = OpenAI::ArrayOf[Integer] @@ -289,11 +289,11 @@ module Prompt # end end + # @abstract + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - module Stop - extend OpenAI::Union - + class Stop < OpenAI::Union StringArray = OpenAI::ArrayOf[String] variant String diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 4223ab0d..11452bfe 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -26,11 +26,11 @@ class CompoundFilter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. - module Filter - extend OpenAI::Union - + class Filter < OpenAI::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -43,10 +43,10 @@ module Filter # end end + # @abstract + # # Type of operation: `and` or `or`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AND = :and OR = :or diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index e215c574..4d126d18 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -76,6 +76,8 @@ class EmbeddingCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for @@ -84,9 +86,7 @@ class EmbeddingCreateParams < OpenAI::BaseModel # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - module Input - extend OpenAI::Union - + class Input < OpenAI::Union StringArray = OpenAI::ArrayOf[String] IntegerArray = OpenAI::ArrayOf[Integer] @@ -112,14 +112,14 @@ module Input # end end + # @abstract + # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. @@ -132,11 +132,11 @@ module Model # end end + # @abstract + # # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - module EncodingFormat - extend OpenAI::Enum - + class EncodingFormat < OpenAI::Enum FLOAT = :float BASE64 = :base64 diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index 0eab075d..65247fdf 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -2,9 +2,8 @@ module OpenAI module Models - module EmbeddingModel - extend OpenAI::Enum - + # @abstract + class EmbeddingModel < OpenAI::Enum TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 9e917e35..24c4dd7e 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -2,10 +2,10 @@ module OpenAI module Models + # @abstract + # # The strategy used to chunk the file. - module FileChunkingStrategy - extend OpenAI::Union - + class FileChunkingStrategy < OpenAI::Union discriminator :type variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObject } diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index d1943074..d9e6a634 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -2,11 +2,11 @@ module OpenAI module Models + # @abstract + # # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. - module FileChunkingStrategyParam - extend OpenAI::Union - + class FileChunkingStrategyParam < OpenAI::Union discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index ae6cf6ed..3a3c6b9b 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -63,11 +63,11 @@ class FileListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index a89db46f..f0476f5d 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -99,12 +99,12 @@ class FileObject < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - module Purpose - extend OpenAI::Enum - + class Purpose < OpenAI::Enum ASSISTANTS = :assistants ASSISTANTS_OUTPUT = :assistants_output BATCH = :batch @@ -116,13 +116,13 @@ module Purpose finalize! end + # @abstract + # # @deprecated # # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum UPLOADED = :uploaded PROCESSED = :processed ERROR = :error diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 49c2717d..8b4f9af2 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -2,13 +2,13 @@ module OpenAI module Models + # @abstract + # # The intended purpose of the uploaded file. One of: - `assistants`: Used in the # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - module FilePurpose - extend OpenAI::Enum - + class FilePurpose < OpenAI::Enum ASSISTANTS = :assistants BATCH = :batch FINE_TUNE = :"fine-tune" diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index cdfbcc5f..68876089 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -272,11 +272,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union variant const: :auto variant Integer @@ -288,11 +288,11 @@ module BatchSize # end end + # @abstract + # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float @@ -304,11 +304,11 @@ module LearningRateMultiplier # end end + # @abstract + # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union variant const: :auto variant Integer @@ -321,11 +321,11 @@ module NEpochs end end + # @abstract + # # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum VALIDATING_FILES = :validating_files QUEUED = :queued RUNNING = :running @@ -459,11 +459,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union variant const: :auto variant Integer @@ -475,11 +475,11 @@ module BatchSize # end end + # @abstract + # # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - module Beta - extend OpenAI::Union - + class Beta < OpenAI::Union variant const: :auto variant Float @@ -491,11 +491,11 @@ module Beta # end end + # @abstract + # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float @@ -507,11 +507,11 @@ module LearningRateMultiplier # end end + # @abstract + # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union variant const: :auto variant Integer @@ -594,11 +594,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union variant const: :auto variant Integer @@ -610,11 +610,11 @@ module BatchSize # end end + # @abstract + # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float @@ -626,11 +626,11 @@ module LearningRateMultiplier # end end + # @abstract + # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union variant const: :auto variant Integer @@ -644,10 +644,10 @@ module NEpochs end end + # @abstract + # # The type of method. Is either `supervised` or `dpo`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 207444b5..0688b7a8 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -69,10 +69,10 @@ class FineTuningJobEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The log level of the event. - module Level - extend OpenAI::Enum - + class Level < OpenAI::Enum INFO = :info WARN = :warn ERROR = :error @@ -80,10 +80,10 @@ module Level finalize! end + # @abstract + # # The type of event. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE = :message METRICS = :metrics diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index c1cbd747..6900ac01 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -142,11 +142,11 @@ class JobCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - module Model - extend OpenAI::Union - + class Model < OpenAI::Union # @!group BABBAGE_002 = :"babbage-002" @@ -222,11 +222,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union variant const: :auto variant Integer @@ -238,11 +238,11 @@ module BatchSize # end end + # @abstract + # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float @@ -254,11 +254,11 @@ module LearningRateMultiplier # end end + # @abstract + # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union variant const: :auto variant Integer @@ -471,11 +471,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union variant const: :auto variant Integer @@ -487,11 +487,11 @@ module BatchSize # end end + # @abstract + # # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - module Beta - extend OpenAI::Union - + class Beta < OpenAI::Union variant const: :auto variant Float @@ -503,11 +503,11 @@ module Beta # end end + # @abstract + # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float @@ -519,11 +519,11 @@ module LearningRateMultiplier # end end + # @abstract + # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union variant const: :auto variant Integer @@ -606,11 +606,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union variant const: :auto variant Integer @@ -622,11 +622,11 @@ module BatchSize # end end + # @abstract + # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union variant const: :auto variant Float @@ -638,11 +638,11 @@ module LearningRateMultiplier # end end + # @abstract + # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union variant const: :auto variant Integer @@ -656,10 +656,10 @@ module NEpochs end end + # @abstract + # # The type of method. Is either `supervised` or `dpo`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index ff2e6112..d4546f63 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -70,11 +70,11 @@ class ImageCreateVariationParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The model to use for image generation. Only `dall-e-2` is supported at this # time. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. @@ -87,23 +87,23 @@ module Model # end end + # @abstract + # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum URL = :url B64_JSON = :b64_json finalize! end + # @abstract + # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - module Size - extend OpenAI::Enum - + class Size < OpenAI::Enum NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 523b95ca..14c8b8cf 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -101,11 +101,11 @@ class ImageEditParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The model to use for image generation. Only `dall-e-2` is supported at this # time. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. @@ -118,23 +118,23 @@ module Model # end end + # @abstract + # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum URL = :url B64_JSON = :b64_json finalize! end + # @abstract + # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - module Size - extend OpenAI::Enum - + class Size < OpenAI::Enum NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index a4993e78..577665bd 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -104,10 +104,10 @@ class ImageGenerateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The model to use for image generation. - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # The model to use for image generation. @@ -120,36 +120,36 @@ module Model # end end + # @abstract + # # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - module Quality - extend OpenAI::Enum - + class Quality < OpenAI::Enum STANDARD = :standard HD = :hd finalize! end + # @abstract + # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum URL = :url B64_JSON = :b64_json finalize! end + # @abstract + # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - module Size - extend OpenAI::Enum - + class Size < OpenAI::Enum NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" @@ -159,13 +159,13 @@ module Size finalize! end + # @abstract + # # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - module Style - extend OpenAI::Enum - + class Style < OpenAI::Enum VIVID = :vivid NATURAL = :natural diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index ce36cc6d..e49e6699 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -2,9 +2,8 @@ module OpenAI module Models - module ImageModel - extend OpenAI::Enum - + # @abstract + class ImageModel < OpenAI::Enum DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 2b0818e8..0f3c5a90 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -309,110 +309,97 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Harassment - extend OpenAI::Enum - + # @abstract + class Harassment < OpenAI::Enum TEXT = :text finalize! end - module HarassmentThreatening - extend OpenAI::Enum - + # @abstract + class HarassmentThreatening < OpenAI::Enum TEXT = :text finalize! end - module Hate - extend OpenAI::Enum - + # @abstract + class Hate < OpenAI::Enum TEXT = :text finalize! end - module HateThreatening - extend OpenAI::Enum - + # @abstract + class HateThreatening < OpenAI::Enum TEXT = :text finalize! end - module Illicit - extend OpenAI::Enum - + # @abstract + class Illicit < OpenAI::Enum TEXT = :text finalize! end - module IllicitViolent - extend OpenAI::Enum - + # @abstract + class IllicitViolent < OpenAI::Enum TEXT = :text finalize! end - module SelfHarm - extend OpenAI::Enum - + # @abstract + class SelfHarm < OpenAI::Enum TEXT = :text IMAGE = :image finalize! end - module SelfHarmInstruction - extend OpenAI::Enum - + # @abstract + class SelfHarmInstruction < OpenAI::Enum TEXT = :text IMAGE = :image finalize! end - module SelfHarmIntent - extend OpenAI::Enum - + # @abstract + class SelfHarmIntent < OpenAI::Enum TEXT = :text IMAGE = :image finalize! end - module Sexual - extend OpenAI::Enum - + # @abstract + class Sexual < OpenAI::Enum TEXT = :text IMAGE = :image finalize! end - module SexualMinor - extend OpenAI::Enum - + # @abstract + class SexualMinor < OpenAI::Enum TEXT = :text finalize! end - module Violence - extend OpenAI::Enum - + # @abstract + class Violence < OpenAI::Enum TEXT = :text IMAGE = :image finalize! end - module ViolenceGraphic - extend OpenAI::Enum - + # @abstract + class ViolenceGraphic < OpenAI::Enum TEXT = :text IMAGE = :image diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 89acded2..e0789618 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -36,11 +36,11 @@ class ModerationCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - module Input - extend OpenAI::Union - + class Input < OpenAI::Union StringArray = OpenAI::ArrayOf[String] ModerationMultiModalInputArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] @@ -61,13 +61,13 @@ module Input # end end + # @abstract + # # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - module Model - extend OpenAI::Union - + class Model < OpenAI::Union variant String # The content moderation model you would like to use. Learn more in diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index aad66a00..4089ad86 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -2,9 +2,8 @@ module OpenAI module Models - module ModerationModel - extend OpenAI::Enum - + # @abstract + class ModerationModel < OpenAI::Enum OMNI_MODERATION_LATEST = :"omni-moderation-latest" OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST = :"text-moderation-latest" diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index c6441173..47271a66 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -2,10 +2,10 @@ module OpenAI module Models + # @abstract + # # An object describing an image to classify. - module ModerationMultiModalInput - extend OpenAI::Union - + class ModerationMultiModalInput < OpenAI::Union discriminator :type # An object describing an image to classify. diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index cfe04696..a4ec26de 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -37,14 +37,14 @@ class Reasoning < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - module GenerateSummary - extend OpenAI::Enum - + class GenerateSummary < OpenAI::Enum CONCISE = :concise DETAILED = :detailed diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index e27540e1..f1fee21d 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -2,15 +2,15 @@ module OpenAI module Models + # @abstract + # # **o-series models only** # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - module ReasoningEffort - extend OpenAI::Enum - + class ReasoningEffort < OpenAI::Enum LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 1359937f..65a0097a 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -41,10 +41,10 @@ class ComputerTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The type of computer environment to control. - module Environment - extend OpenAI::Enum - + class Environment < OpenAI::Enum MAC = :mac WINDOWS = :windows UBUNTU = :ubuntu diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index bd57a3e2..e4effc89 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -43,11 +43,11 @@ class EasyInputMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union # A text input to the model. variant String @@ -62,11 +62,11 @@ module Content # end end + # @abstract + # # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user ASSISTANT = :assistant SYSTEM = :system @@ -75,10 +75,10 @@ module Role finalize! end + # @abstract + # # The type of the message input. Always `message`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE = :message finalize! diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 28ded43b..c2ba4177 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -62,10 +62,10 @@ class FileSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A filter to apply based on file attributes. - module Filters - extend OpenAI::Union - + class Filters < OpenAI::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -112,10 +112,10 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The ranker to use for the file search. - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index adee6e5b..e0cb2854 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -63,13 +63,13 @@ class InputItemListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 71264a0f..8b979829 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -289,10 +289,10 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The reason why the response is incomplete. - module Reason - extend OpenAI::Enum - + class Reason < OpenAI::Enum MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter @@ -300,12 +300,12 @@ module Reason end end + # @abstract + # # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - module ToolChoice - extend OpenAI::Union - + class ToolChoice < OpenAI::Union # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -330,6 +330,8 @@ module ToolChoice # end end + # @abstract + # # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -337,9 +339,7 @@ module ToolChoice # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - module Truncation - extend OpenAI::Enum - + class Truncation < OpenAI::Enum AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index bf04793b..e93e62cf 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -48,10 +48,10 @@ class ResponseCodeInterpreterToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The output of a code interpreter tool call that is text. - module Result - extend OpenAI::Union - + class Result < OpenAI::Union discriminator :type # The output of a code interpreter tool call that is text. @@ -137,10 +137,10 @@ class File < OpenAI::BaseModel # end end + # @abstract + # # The status of the code interpreter tool call. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress INTERPRETING = :interpreting COMPLETED = :completed diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 07da24be..20bd85ae 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -58,10 +58,10 @@ class ResponseComputerToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A click action. - module Action - extend OpenAI::Union - + class Action < OpenAI::Union discriminator :type # A click action. @@ -130,11 +130,11 @@ class Click < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - module Button - extend OpenAI::Enum - + class Button < OpenAI::Enum LEFT = :left RIGHT = :right WHEEL = :wheel @@ -438,11 +438,11 @@ class PendingSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -450,10 +450,10 @@ module Status finalize! end + # @abstract + # # The type of the computer call. Always `computer_call`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum COMPUTER_CALL = :computer_call finalize! diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 33e5105d..252be325 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -94,11 +94,11 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index ad933f5d..72456bac 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses + # @abstract + # # Multi-modal input and output contents. - module ResponseContent - extend OpenAI::Union - + class ResponseContent < OpenAI::Union # A text input to the model. variant -> { OpenAI::Models::Responses::ResponseInputText } diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index a1fb23a0..1fa8be8b 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -47,10 +47,10 @@ class ResponseContentPartAddedEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The content part that was added. - module Part - extend OpenAI::Union - + class Part < OpenAI::Union discriminator :type # A text output from the model. diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 968c86e1..0911d697 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -47,10 +47,10 @@ class ResponseContentPartDoneEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The content part that is done. - module Part - extend OpenAI::Union - + class Part < OpenAI::Union discriminator :type # A text output from the model. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index ebca56e3..05ef2120 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -242,6 +242,8 @@ class ResponseCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -251,9 +253,7 @@ class ResponseCreateParams < OpenAI::BaseModel # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - module Input - extend OpenAI::Union - + class Input < OpenAI::Union # A text input to the model, equivalent to a text input with the # `user` role. variant String @@ -269,12 +269,12 @@ module Input # end end + # @abstract + # # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - module ToolChoice - extend OpenAI::Union - + class ToolChoice < OpenAI::Union # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -299,6 +299,8 @@ module ToolChoice # end end + # @abstract + # # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -306,9 +308,7 @@ module ToolChoice # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - module Truncation - extend OpenAI::Enum - + class Truncation < OpenAI::Enum AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 21b8b11b..47dd8b4b 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -26,10 +26,10 @@ class ResponseError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The error code for the response. - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 11c8e566..8054ec60 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -52,11 +52,11 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed @@ -130,9 +130,8 @@ class Result < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Attribute - extend OpenAI::Union - + # @abstract + class Attribute < OpenAI::Union variant String variant Float diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 81d6ac10..6749d6bc 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Responses + # @abstract + # # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -16,9 +18,7 @@ module Responses # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - module ResponseFormatTextConfig - extend OpenAI::Union - + class ResponseFormatTextConfig < OpenAI::Union discriminator :type # Default response format. Used to generate text responses. diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 2cdffc5f..b2b500c1 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -65,11 +65,11 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 283f9afa..503dee7b 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -50,11 +50,11 @@ class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index fbcf112a..8afd6c8b 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -35,10 +35,10 @@ class ResponseFunctionWebSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The status of the web search tool call. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 7eb0bd14..2d318473 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Responses + # @abstract + # # Specify additional output data to include in the model response. Currently # supported values are: # @@ -11,9 +13,7 @@ module Responses # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - module ResponseIncludable - extend OpenAI::Enum - + class ResponseIncludable < OpenAI::Enum FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 23539df8..24201588 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -33,10 +33,10 @@ class ResponseInputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The format of the audio data. Currently supported formats are `mp3` and `wav`. - module Format - extend OpenAI::Enum - + class Format < OpenAI::Enum MP3 = :mp3 WAV = :wav diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index fd4e014d..8d6226d4 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses + # @abstract + # # A text input to the model. - module ResponseInputContent - extend OpenAI::Union - + class ResponseInputContent < OpenAI::Union discriminator :type # A text input to the model. diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index e6d2c0ce..e9334fd0 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -43,11 +43,11 @@ class ResponseInputImage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum HIGH = :high LOW = :low AUTO = :auto diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index adb90a0a..de2477cf 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -3,14 +3,14 @@ module OpenAI module Models module Responses + # @abstract + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - module ResponseInputItem - extend OpenAI::Union - + class ResponseInputItem < OpenAI::Union discriminator :type # A message input to the model with a role indicating instruction following @@ -106,10 +106,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The role of the message input. One of `user`, `system`, or `developer`. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user SYSTEM = :system DEVELOPER = :developer @@ -117,11 +117,11 @@ module Role finalize! end + # @abstract + # # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -129,10 +129,10 @@ module Status finalize! end + # @abstract + # # The type of the message input. Always set to `message`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE = :message finalize! @@ -236,11 +236,11 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -303,11 +303,11 @@ class FunctionCallOutput < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 4e29d6dd..2a9dd999 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -55,10 +55,10 @@ class ResponseInputMessageItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The role of the message input. One of `user`, `system`, or `developer`. - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER = :user SYSTEM = :system DEVELOPER = :developer @@ -66,11 +66,11 @@ module Role finalize! end + # @abstract + # # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -78,10 +78,10 @@ module Status finalize! end + # @abstract + # # The type of the message input. Always set to `message`. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE = :message finalize! diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index baeff0e5..b3f4f86c 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses + # @abstract + # # Content item used to generate a response. - module ResponseItem - extend OpenAI::Union - + class ResponseItem < OpenAI::Union discriminator :type variant :message, -> { OpenAI::Models::Responses::ResponseInputMessageItem } diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 9bdb2a25..2ed933fd 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses + # @abstract + # # An output message from the model. - module ResponseOutputItem - extend OpenAI::Union - + class ResponseOutputItem < OpenAI::Union discriminator :type # An output message from the model. diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index d9ab6443..1dee0300 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -49,10 +49,10 @@ class ResponseOutputMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A text output from the model. - module Content - extend OpenAI::Union - + class Content < OpenAI::Union discriminator :type # A text output from the model. @@ -68,11 +68,11 @@ module Content # end end + # @abstract + # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index a5027854..175d5eda 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -34,10 +34,10 @@ class ResponseOutputText < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A citation to a file. - module Annotation - extend OpenAI::Union - + class Annotation < OpenAI::Union discriminator :type # A citation to a file. diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 4c410d54..c4d60d3f 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -68,11 +68,11 @@ class Summary < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index 3ec9d99f..e15a7a6e 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Responses + # @abstract + # # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - module ResponseStatus - extend OpenAI::Enum - + class ResponseStatus < OpenAI::Enum COMPLETED = :completed FAILED = :failed IN_PROGRESS = :in_progress diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 73a45396..85fe7a25 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses + # @abstract + # # Emitted when there is a partial audio response. - module ResponseStreamEvent - extend OpenAI::Union - + class ResponseStreamEvent < OpenAI::Union discriminator :type # Emitted when there is a partial audio response. diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index a8dbcfba..4d980266 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -65,10 +65,10 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A citation to a file. - module Annotation - extend OpenAI::Union - + class Annotation < OpenAI::Union discriminator :type # A citation to a file. diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index c3d097ce..9093989e 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Responses + # @abstract + # # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union discriminator :type # A tool that searches for relevant content from uploaded files. diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 9ec1a734..e72582b9 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -3,6 +3,8 @@ module OpenAI module Models module Responses + # @abstract + # # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -11,9 +13,7 @@ module Responses # more tools. # # `required` means the model must call one or more tools. - module ToolChoiceOptions - extend OpenAI::Enum - + class ToolChoiceOptions < OpenAI::Enum NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 9d8e05e2..3398854c 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -27,6 +27,8 @@ class ToolChoiceTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # @@ -35,9 +37,7 @@ class ToolChoiceTypes < OpenAI::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum FILE_SEARCH = :file_search WEB_SEARCH_PREVIEW = :web_search_preview COMPUTER_USE_PREVIEW = :computer_use_preview diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 9341e708..6a2acdc0 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -42,24 +42,24 @@ class WebSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The type of the web search tool. One of: # # - `web_search_preview` # - `web_search_preview_2025_03_11` - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 finalize! end + # @abstract + # # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - module SearchContextSize - extend OpenAI::Enum - + class SearchContextSize < OpenAI::Enum LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index c80cbfbd..d96fdbc5 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -2,18 +2,16 @@ module OpenAI module Models - module ResponsesModel - extend OpenAI::Union - + # @abstract + class ResponsesModel < OpenAI::Union variant String variant enum: -> { OpenAI::Models::ChatModel } variant enum: -> { OpenAI::Models::ResponsesModel::UnionMember2 } - module UnionMember2 - extend OpenAI::Enum - + # @abstract + class UnionMember2 < OpenAI::Enum O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 507858a6..0e6b2be5 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -76,10 +76,10 @@ class Upload < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The status of the Upload. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum PENDING = :pending COMPLETED = :completed CANCELLED = :cancelled diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 3c74e1d8..7593bb95 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -157,12 +157,12 @@ class FileCounts < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum EXPIRED = :expired IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 5effb4ea..ee2a79cf 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -66,11 +66,11 @@ class VectorStoreListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index ea877db0..30d37581 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -76,10 +76,10 @@ class VectorStoreSearchParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # A query string for a search - module Query - extend OpenAI::Union - + class Query < OpenAI::Union StringArray = OpenAI::ArrayOf[String] variant String @@ -93,10 +93,10 @@ module Query # end end + # @abstract + # # A filter to apply based on file attributes. - module Filters - extend OpenAI::Union - + class Filters < OpenAI::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -139,9 +139,8 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Ranker - extend OpenAI::Enum - + # @abstract + class Ranker < OpenAI::Enum AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index ebb55560..233cdad0 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -50,9 +50,8 @@ class VectorStoreSearchResponse < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Attribute - extend OpenAI::Union - + # @abstract + class Attribute < OpenAI::Union variant String variant Float @@ -87,10 +86,10 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # The type of content. - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum TEXT = :text finalize! diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index ad9ef46e..e35057be 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -49,9 +49,8 @@ class FileBatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Attribute - extend OpenAI::Union - + # @abstract + class Attribute < OpenAI::Union variant String variant Float diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 20469fe9..40629c3c 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -84,10 +84,10 @@ class FileBatchListFilesParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - module Filter - extend OpenAI::Enum - + class Filter < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed @@ -96,11 +96,11 @@ module Filter finalize! end + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 58fbaa08..3e7c7817 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -49,9 +49,8 @@ class FileCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Attribute - extend OpenAI::Union - + # @abstract + class Attribute < OpenAI::Union variant String variant Float diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 2f7ffbbc..c227d297 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -78,10 +78,10 @@ class FileListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - module Filter - extend OpenAI::Enum - + class Filter < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed @@ -90,11 +90,11 @@ module Filter finalize! end + # @abstract + # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index c88b9a73..6b9100d4 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -34,9 +34,8 @@ class FileUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - module Attribute - extend OpenAI::Union - + # @abstract + class Attribute < OpenAI::Union variant String variant Float diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index c3d22aef..5aa034c1 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -129,10 +129,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @abstract + # # One of `server_error` or `rate_limit_exceeded`. - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR = :server_error UNSUPPORTED_FILE = :unsupported_file INVALID_FILE = :invalid_file @@ -141,12 +141,12 @@ module Code end end + # @abstract + # # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled @@ -155,9 +155,8 @@ module Status finalize! end - module Attribute - extend OpenAI::Union - + # @abstract + class Attribute < OpenAI::Union variant String variant Float diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index a8628b40..da82a1f6 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -101,11 +101,11 @@ class FileCounts < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @abstract + # # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 2fead0cf..3312ef84 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -170,116 +170,132 @@ module OpenAI # # We can therefore convert string values to Symbols, but can't convert other # values safely. - module Enum - include OpenAI::Converter + class Enum + extend OpenAI::Converter - # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } - def values - end + abstract! - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize! + Value = type_template(:out) + + class << self + # All of the valid Symbol values for this enum. + sig { overridable.returns(T::Array[Value]) } + def values + end + + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + sig { void } + private def finalize! + end end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) + def self.===(other) end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) + def self.==(other) end - # @api private - sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def coerce(value) - end + class << self + # @api private + sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def coerce(value) + end - # @api private - sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value) - end + # @api private + sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def dump(value) + end - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) + # @api private + sig do + override + .params(value: T.anything) + .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) + end + def try_strict_coerce(value) + end end end # @api private - module Union - include OpenAI::Converter + class Union + extend OpenAI::Converter - # @api private - # - # All of the specified variant info for this union. - sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Converter::Input)]]) } - private def known_variants - end + abstract! - # @api private - sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } - protected def derefed_variants - end + Variants = type_template(:out) - # All of the specified variants for this union. - sig { overridable.returns(T::Array[T.anything]) } - def variants - end + class << self + # @api private + # + # All of the specified variant info for this union. + sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(Variants)]]) } + private def known_variants + end - # @api private - sig { params(property: Symbol).void } - private def discriminator(property) - end + # @api private + sig { returns(T::Array[[T.nilable(Symbol), Variants]]) } + protected def derefed_variants + end - # @api private - sig do - params( - key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything), - spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything) - ) - .void - end - private def variant(key, spec = nil) - end + # All of the specified variants for this union. + sig { overridable.returns(T::Array[Variants]) } + def variants + end - # @api private - sig { params(value: T.anything).returns(T.nilable(T.anything)) } - private def resolve_variant(value) + # @api private + sig { params(property: Symbol).void } + private def discriminator(property) + end + + # @api private + sig do + params( + key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants), + spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants) + ) + .void + end + private def variant(key, spec = nil) + end + + # @api private + sig { params(value: T.anything).returns(T.nilable(Variants)) } + private def resolve_variant(value) + end end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) + def self.===(other) end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) + def self.==(other) end - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def coerce(value) - end + class << self + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def coerce(value) + end - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) + # @api private + sig do + override + .params(value: T.anything) + .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) + end + def try_strict_coerce(value) + end end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 5fb326c8..75d67db0 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -2,27 +2,20 @@ module OpenAI module Models - module AllModels - extend OpenAI::Union + class AllModels < OpenAI::Union + abstract! - Variants = - type_template(:out) do - { - fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::UnionMember2::OrSymbol) - } - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - module UnionMember2 - extend OpenAI::Enum + class UnionMember2 < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::UnionMember2) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::UnionMember2::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" end end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index c7babe62..e74cec3d 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -18,14 +18,11 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - sig { returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -33,14 +30,11 @@ module OpenAI # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig { returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) } + sig { returns(Symbol) } def voice end - sig do - params(_: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - .returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def voice=(_) end @@ -56,14 +50,11 @@ module OpenAI # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - sig { returns(T.nilable(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def response_format end - sig do - params(_: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - .returns(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def response_format=(_) end @@ -80,10 +71,10 @@ module OpenAI sig do params( input: String, - model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, + model: T.any(String, Symbol), + voice: Symbol, instructions: String, - response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, + response_format: Symbol, speed: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -97,10 +88,10 @@ module OpenAI .returns( { input: String, - model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, + model: T.any(String, Symbol), + voice: Symbol, instructions: String, - response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, + response_format: Symbol, speed: Float, request_options: OpenAI::RequestOptions } @@ -111,48 +102,45 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - module Voice - extend OpenAI::Enum + class Voice < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - ONYX = T.let(:onyx, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ALLOY = :alloy + ASH = :ash + CORAL = :coral + ECHO = :echo + FABLE = :fable + ONYX = :onyx + NOVA = :nova + SAGE = :sage + SHIMMER = :shimmer end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - module ResponseFormat - extend OpenAI::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } - - MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - AAC = T.let(:aac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - FLAC = T.let(:flac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + class ResponseFormat < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + MP3 = :mp3 + OPUS = :opus + AAC = :aac + FLAC = :flac + WAV = :wav + PCM = :pcm end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index d375fcda..5228e000 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -3,15 +3,14 @@ module OpenAI module Models module Audio - module SpeechModel - extend OpenAI::Enum + class SpeechModel < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::OrSymbol) - TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::OrSymbol) - GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::OrSymbol) + TTS_1 = :"tts-1" + TTS_1_HD = :"tts-1-hd" + GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 272b6fc5..026f8b77 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -20,14 +20,11 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -36,14 +33,11 @@ module OpenAI # model's confidence in the transcription. `logprobs` only works with # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def include end - sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def include=(_) end @@ -73,14 +67,11 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - sig { returns(T.nilable(OpenAI::Models::AudioResponseFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def response_format end - sig do - params(_: OpenAI::Models::AudioResponseFormat::OrSymbol) - .returns(OpenAI::Models::AudioResponseFormat::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def response_format=(_) end @@ -102,31 +93,24 @@ module OpenAI # Either or both of these options are supported: `word`, or `segment`. Note: There # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. - sig do - returns( - T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) - ) - end + sig { returns(T.nilable(T::Array[Symbol])) } def timestamp_granularities end - sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def timestamp_granularities=(_) end sig do params( file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], + timestamp_granularities: T::Array[Symbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -149,13 +133,13 @@ module OpenAI .returns( { file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], + timestamp_granularities: T::Array[Symbol], request_options: OpenAI::RequestOptions } ) @@ -166,23 +150,19 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end - module TimestampGranularity - extend OpenAI::Enum + class TimestampGranularity < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) - SEGMENT = - T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) + WORD = :word + SEGMENT = :segment end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index e6190f8a..814a204b 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -5,8 +5,8 @@ module OpenAI module Audio # Represents a transcription response returned by model, based on the provided # input. - module TranscriptionCreateResponse - extend OpenAI::Union + class TranscriptionCreateResponse < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 95c1bbab..7a60b02a 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -3,13 +3,12 @@ module OpenAI module Models module Audio - module TranscriptionInclude - extend OpenAI::Enum + class TranscriptionInclude < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::OrSymbol) + LOGPROBS = :logprobs end end end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 4ca09f44..4c8cc6bc 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -7,8 +7,8 @@ module OpenAI # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. - module TranscriptionStreamEvent - extend OpenAI::Union + class TranscriptionStreamEvent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 3e9eeb48..ce2e6e77 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -19,14 +19,11 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -44,14 +41,11 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - sig { returns(T.nilable(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def response_format end - sig do - params(_: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - .returns(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def response_format=(_) end @@ -71,9 +65,9 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + model: T.any(String, Symbol), prompt: String, - response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -87,9 +81,9 @@ module OpenAI .returns( { file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + model: T.any(String, Symbol), prompt: String, - response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, request_options: OpenAI::RequestOptions } @@ -100,28 +94,24 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - module ResponseFormat - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } - - JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - SRT = T.let(:srt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - VERBOSE_JSON = - T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + class ResponseFormat < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + JSON = :json + TEXT = :text + SRT = :srt + VERBOSE_JSON = :verbose_json + VTT = :vtt end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 2d27d701..61e32cae 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -3,8 +3,8 @@ module OpenAI module Models module Audio - module TranslationCreateResponse - extend OpenAI::Union + class TranslationCreateResponse < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index c17c0811..917ce7d8 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -2,15 +2,14 @@ module OpenAI module Models - module AudioModel - extend OpenAI::Enum + class AudioModel < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioModel::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::OrSymbol) - GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::OrSymbol) - GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::OrSymbol) + WHISPER_1 = :"whisper-1" + GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" + GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index fc0693da..405da3e2 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -5,17 +5,16 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - module AudioResponseFormat - extend OpenAI::Enum + class AudioResponseFormat < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::OrSymbol) - TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::OrSymbol) - SRT = T.let(:srt, OpenAI::Models::AudioResponseFormat::OrSymbol) - VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::OrSymbol) - VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::OrSymbol) + JSON = :json + TEXT = :text + SRT = :srt + VERBOSE_JSON = :verbose_json + VTT = :vtt end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 75e90393..7a7e54b3 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -57,13 +57,11 @@ module OpenAI end # The current status of the batch. - sig { returns(OpenAI::Models::Batch::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Batch::Status::TaggedSymbol).returns(OpenAI::Models::Batch::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -195,7 +193,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Models::Batch::Status::TaggedSymbol, + status: Symbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -247,7 +245,7 @@ module OpenAI endpoint: String, input_file_id: String, object: Symbol, - status: OpenAI::Models::Batch::Status::TaggedSymbol, + status: Symbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -268,20 +266,19 @@ module OpenAI end # The current status of the batch. - module Status - extend OpenAI::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Batch::Status::TaggedSymbol) } - - VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Batch::Status::TaggedSymbol) - FINALIZING = T.let(:finalizing, OpenAI::Models::Batch::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Batch::Status::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Batch::Status::TaggedSymbol) - CANCELLING = T.let(:cancelling, OpenAI::Models::Batch::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + VALIDATING = :validating + FAILED = :failed + IN_PROGRESS = :in_progress + FINALIZING = :finalizing + COMPLETED = :completed + EXPIRED = :expired + CANCELLING = :cancelling + CANCELLED = :cancelled end class Errors < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 62413988..50216257 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -8,14 +8,11 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. - sig { returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) } + sig { returns(Symbol) } def completion_window end - sig do - params(_: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) - .returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def completion_window=(_) end @@ -23,14 +20,11 @@ module OpenAI # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - sig { returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) } + sig { returns(Symbol) } def endpoint end - sig do - params(_: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - .returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def endpoint=(_) end @@ -67,8 +61,8 @@ module OpenAI sig do params( - completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, - endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, + completion_window: Symbol, + endpoint: Symbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -82,8 +76,8 @@ module OpenAI override .returns( { - completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, - endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, + completion_window: Symbol, + endpoint: Symbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions @@ -95,31 +89,27 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. - module CompletionWindow - extend OpenAI::Enum + class CompletionWindow < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) + NUMBER_24H = :"24h" end # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - module Endpoint - extend OpenAI::Enum + class Endpoint < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - V1_CHAT_COMPLETIONS = - T.let(:"/v1/chat/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_RESPONSES = :"/v1/responses" + V1_CHAT_COMPLETIONS = :"/v1/chat/completions" + V1_EMBEDDINGS = :"/v1/embeddings" + V1_COMPLETIONS = :"/v1/completions" end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index f7f3eec4..32cf935a 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -12,14 +12,11 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -71,14 +68,11 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def reasoning_effort end - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def reasoning_effort=(_) end @@ -225,12 +219,12 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -273,12 +267,12 @@ module OpenAI override .returns( { - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -309,10 +303,10 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 7d2b0fef..bb11fb1b 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -43,14 +43,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -59,7 +56,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -74,7 +71,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -84,14 +81,13 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index d0c034e2..db97be12 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -23,8 +23,8 @@ module OpenAI # the message content may be partially cut off if `finish_reason="length"`, which # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. - module AssistantResponseFormatOption - extend OpenAI::Union + class AssistantResponseFormatOption < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 31e96276..7a366c59 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -23,8 +23,8 @@ module OpenAI # gracefully in your code. See the # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. - module AssistantStreamEvent - extend OpenAI::Union + class AssistantStreamEvent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 6efa6193..313f24bc 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -3,8 +3,8 @@ module OpenAI module Models module Beta - module AssistantTool - extend OpenAI::Union + class AssistantTool < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 9a1ac99e..84562850 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -5,14 +5,11 @@ module OpenAI module Beta class AssistantToolChoice < OpenAI::BaseModel # The type of the tool. If type is `function`, the function name must be set - sig { returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - .returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -30,37 +27,24 @@ module OpenAI # Specifies a tool the model should use. Use to force the model to call a specific # tool. sig do - params( - type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: OpenAI::Models::Beta::AssistantToolChoiceFunction - ) - .returns(T.attached_class) + params(type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction).returns(T.attached_class) end def self.new(type:, function: nil) end - sig do - override - .returns( - { - type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: OpenAI::Models::Beta::AssistantToolChoiceFunction - } - ) - end + sig { override.returns({type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction}) } def to_hash end # The type of the tool. If type is `function`, the function name must be set - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - CODE_INTERPRETER = T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + FUNCTION = :function + CODE_INTERPRETER = :code_interpreter + FILE_SEARCH = :file_search end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 14fac828..552ca737 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -10,33 +10,23 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - module AssistantToolChoiceOption - extend OpenAI::Union + class AssistantToolChoiceOption < OpenAI::Union + abstract! - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - } - end + Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)} } # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. - module Auto - extend OpenAI::Enum + class Auto < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) + NONE = :none + AUTO = :auto + REQUIRED = :required end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 595f430c..b4ca656b 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -45,14 +45,11 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.nilable(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) - .returns(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -71,14 +68,11 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def reasoning_effort end - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def reasoning_effort=(_) end @@ -228,9 +222,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), + model: T.any(String, Symbol), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -276,9 +270,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), + model: T.any(String, Symbol), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -309,62 +303,41 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union - - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)} } - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } - - O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - O3_MINI_2025_01_31 = - T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - O1 = T.let(:o1, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O = T.let(:"gpt-4o", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O_2024_11_20 = - T.let(:"gpt-4o-2024-11-20", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O_2024_08_06 = - T.let(:"gpt-4o-2024-08-06", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O_2024_05_13 = - T.let(:"gpt-4o-2024-05-13", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O_MINI_2024_07_18 = - T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_5_PREVIEW = T.let(:"gpt-4.5-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_5_PREVIEW_2025_02_27 = - T.let(:"gpt-4.5-preview-2025-02-27", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_TURBO_2024_04_09 = - T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_0125_PREVIEW = - T.let(:"gpt-4-0125-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_TURBO_PREVIEW = - T.let(:"gpt-4-turbo-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_1106_PREVIEW = - T.let(:"gpt-4-1106-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_VISION_PREVIEW = - T.let(:"gpt-4-vision-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4 = T.let(:"gpt-4", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO_16K = - T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO_0613 = - T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO_1106 = - T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO_0125 = - T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO_16K_0613 = - T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + class Model < OpenAI::Union + abstract! + + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + + O3_MINI = :"o3-mini" + O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" + O1 = :o1 + O1_2024_12_17 = :"o1-2024-12-17" + GPT_4O = :"gpt-4o" + GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" + GPT_4O_MINI = :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" + GPT_4_5_PREVIEW = :"gpt-4.5-preview" + GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" + GPT_4_TURBO = :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" + GPT_4 = :"gpt-4" + GPT_4_0314 = :"gpt-4-0314" + GPT_4_0613 = :"gpt-4-0613" + GPT_4_32K = :"gpt-4-32k" + GPT_4_32K_0314 = :"gpt-4-32k-0314" + GPT_4_32K_0613 = :"gpt-4-32k-0613" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 310aab2b..ce4b782a 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -103,14 +103,11 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def ranker end - sig do - params(_: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) - .returns(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def ranker=(_) end @@ -120,44 +117,23 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig do - params( - score_threshold: Float, - ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol - ) - .returns(T.attached_class) - end + sig { params(score_threshold: Float, ranker: Symbol).returns(T.attached_class) } def self.new(score_threshold:, ranker: nil) end - sig do - override - .returns( - { - score_threshold: Float, - ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol - } - ) - end + sig { override.returns({score_threshold: Float, ranker: Symbol}) } def to_hash end # The ranker to use for the file search. If not specified will use the `auto` # ranker. - module Ranker - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) } - - AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) - DEFAULT_2024_08_21 = - T.let( - :default_2024_08_21, - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol - ) + class Ranker < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + AUTO = :auto + DEFAULT_2024_08_21 = :default_2024_08_21 end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 495664c0..97821d4a 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -6,8 +6,8 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - module MessageStreamEvent - extend OpenAI::Union + class MessageStreamEvent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 51fa7db0..6b1ae66d 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -6,8 +6,8 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - module RunStepStreamEvent - extend OpenAI::Union + class RunStepStreamEvent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index e18dd1f2..5642aefb 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - module RunStreamEvent - extend OpenAI::Union + class RunStreamEvent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index b20ddd1b..3a5a42f8 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -72,14 +72,11 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - end + sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } def model=(_) end @@ -185,36 +182,13 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } def tool_choice end sig do - params( - _: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - .returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) + params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) end def tool_choice=(_) end @@ -312,7 +286,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -324,12 +298,7 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -375,7 +344,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -387,12 +356,7 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -416,10 +380,10 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class Thread < OpenAI::BaseModel @@ -545,14 +509,11 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -596,7 +557,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -619,7 +580,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -629,8 +590,8 @@ module OpenAI end # The text contents of the message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -658,17 +619,13 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) - ASSISTANT = - T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + USER = :user + ASSISTANT = :assistant end class Attachment < OpenAI::BaseModel @@ -750,8 +707,8 @@ module OpenAI def to_hash end - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -1229,8 +1186,8 @@ module OpenAI end end - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -1249,14 +1206,11 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -1272,25 +1226,11 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig do - params( - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - ) - .returns(T.attached_class) - end + sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } def self.new(type:, last_messages: nil) end - sig do - override - .returns( - { - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) - end + sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } def to_hash end @@ -1298,17 +1238,13 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) - LAST_MESSAGES = - T.let(:last_messages, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + AUTO = :auto + LAST_MESSAGES = :last_messages end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index e2c429ba..77085982 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -129,14 +129,11 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) - .returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -178,7 +175,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -201,7 +198,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -211,8 +208,8 @@ module OpenAI end # The text contents of the message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -240,15 +237,13 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + USER = :user + ASSISTANT = :assistant end class Attachment < OpenAI::BaseModel @@ -330,8 +325,8 @@ module OpenAI def to_hash end - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 74967cee..1351f46f 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -7,8 +7,8 @@ module OpenAI # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - module Annotation - extend OpenAI::Union + class Annotation < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 90db2a3c..a388880d 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -7,8 +7,8 @@ module OpenAI # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - module AnnotationDelta - extend OpenAI::Union + class AnnotationDelta < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 61ad33c5..879b0320 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -18,39 +18,32 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def detail end - sig do - params(_: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def detail=(_) end - sig do - params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - .returns(T.attached_class) - end + sig { params(file_id: String, detail: Symbol).returns(T.attached_class) } def self.new(file_id:, detail: nil) end - sig { override.returns({file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol}) } + sig { override.returns({file_id: String, detail: Symbol}) } def to_hash end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - module Detail - extend OpenAI::Enum + class Detail < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + AUTO = :auto + LOW = :low + HIGH = :high end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index efac1cbe..89c10a01 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -7,14 +7,11 @@ module OpenAI class ImageFileDelta < OpenAI::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def detail end - sig do - params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def detail=(_) end @@ -29,32 +26,24 @@ module OpenAI def file_id=(_) end - sig do - params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String) - .returns(T.attached_class) - end + sig { params(detail: Symbol, file_id: String).returns(T.attached_class) } def self.new(detail: nil, file_id: nil) end - sig do - override - .returns({detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String}) - end + sig { override.returns({detail: Symbol, file_id: String}) } def to_hash end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - module Detail - extend OpenAI::Enum + class Detail < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + AUTO = :auto + LOW = :low + HIGH = :high end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 032d894d..e4aa98eb 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -17,39 +17,32 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def detail end - sig do - params(_: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def detail=(_) end - sig do - params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - .returns(T.attached_class) - end + sig { params(url: String, detail: Symbol).returns(T.attached_class) } def self.new(url:, detail: nil) end - sig { override.returns({url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol}) } + sig { override.returns({url: String, detail: Symbol}) } def to_hash end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - module Detail - extend OpenAI::Enum + class Detail < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + AUTO = :auto + LOW = :low + HIGH = :high end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 6e8e3245..a300a59e 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -7,14 +7,11 @@ module OpenAI class ImageURLDelta < OpenAI::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def detail end - sig do - params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def detail=(_) end @@ -28,31 +25,24 @@ module OpenAI def url=(_) end - sig do - params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String) - .returns(T.attached_class) - end + sig { params(detail: Symbol, url: String).returns(T.attached_class) } def self.new(detail: nil, url: nil) end - sig do - override.returns({detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String}) - end + sig { override.returns({detail: Symbol, url: String}) } def to_hash end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - module Detail - extend OpenAI::Enum + class Detail < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + AUTO = :auto + LOW = :low + HIGH = :high end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 96603146..01311c44 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -141,14 +141,11 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - sig { returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -165,14 +162,11 @@ module OpenAI # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - sig { returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -206,9 +200,9 @@ module OpenAI incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), metadata: T.nilable(T::Hash[Symbol, String]), - role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, + role: Symbol, run_id: T.nilable(String), - status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, + status: Symbol, thread_id: String, object: Symbol ) @@ -253,9 +247,9 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, - role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, + role: Symbol, run_id: T.nilable(String), - status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, + status: Symbol, thread_id: String } ) @@ -342,8 +336,8 @@ module OpenAI def to_hash end - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -378,75 +372,57 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason the message is incomplete. - sig { returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + sig { returns(Symbol) } def reason end - sig do - params(_: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def reason=(_) end # On an incomplete message, details about why the message is incomplete. - sig do - params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - .returns(T.attached_class) - end + sig { params(reason: Symbol).returns(T.attached_class) } def self.new(reason:) end - sig do - override.returns({reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol}) - end + sig { override.returns({reason: Symbol}) } def to_hash end # The reason the message is incomplete. - module Reason - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } - - CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - MAX_TOKENS = - T.let(:max_tokens, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - RUN_CANCELLED = - T.let(:run_cancelled, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - RUN_EXPIRED = - T.let(:run_expired, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - RUN_FAILED = - T.let(:run_failed, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + class Reason < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + CONTENT_FILTER = :content_filter + MAX_TOKENS = :max_tokens + RUN_CANCELLED = :run_cancelled + RUN_EXPIRED = :run_expired + RUN_FAILED = :run_failed end end # The entity that produced the message. One of `user` or `assistant`. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + USER = :user + ASSISTANT = :assistant end # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + IN_PROGRESS = :in_progress + INCOMPLETE = :incomplete + COMPLETED = :completed end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 97ed19fe..9e3c7f22 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - module MessageContent - extend OpenAI::Union + class MessageContent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index ec1c6ccc..dce4489b 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - module MessageContentDelta - extend OpenAI::Union + class MessageContentDelta < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index effc6e6c..e4fb311c 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - module MessageContentPartParam - extend OpenAI::Union + class MessageContentPartParam < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 9c0c17d5..6d8913ff 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -61,14 +61,11 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -110,7 +107,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -134,7 +131,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions @@ -145,8 +142,8 @@ module OpenAI end # The text contents of the message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -174,15 +171,13 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + USER = :user + ASSISTANT = :assistant end class Attachment < OpenAI::BaseModel @@ -264,8 +259,8 @@ module OpenAI def to_hash end - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index b7bfc61c..7883727c 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -49,14 +49,11 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def role end - sig do - params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -71,7 +68,7 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol + role: Symbol ) .returns(T.attached_class) end @@ -90,7 +87,7 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol + role: Symbol } ) end @@ -98,15 +95,13 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + USER = :user + ASSISTANT = :assistant end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index d80e8d49..4b93b0c8 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -44,14 +44,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -69,7 +66,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, + order: Symbol, run_id: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -85,7 +82,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, + order: Symbol, run_id: String, request_options: OpenAI::RequestOptions } @@ -96,15 +93,13 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index d1587a17..0ea43a0c 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -256,14 +256,11 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - sig { returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -284,36 +281,13 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } def tool_choice end sig do - params( - _: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - .returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) + params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) end def tool_choice=(_) end @@ -431,14 +405,9 @@ module OpenAI ) ), started_at: T.nilable(Integer), - status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, + status: Symbol, thread_id: String, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -515,14 +484,9 @@ module OpenAI ) ), started_at: T.nilable(Integer), - status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, + status: Symbol, thread_id: String, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -543,57 +507,43 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def reason end - sig do - params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def reason=(_) end # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. - sig do - params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - .returns(T.attached_class) - end + sig { params(reason: Symbol).returns(T.attached_class) } def self.new(reason: nil) end - sig { override.returns({reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol}) } + sig { override.returns({reason: Symbol}) } def to_hash end # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - module Reason - extend OpenAI::Enum + class Reason < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MAX_COMPLETION_TOKENS = - T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - MAX_PROMPT_TOKENS = - T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + MAX_COMPLETION_TOKENS = :max_completion_tokens + MAX_PROMPT_TOKENS = :max_prompt_tokens end end class LastError < OpenAI::BaseModel # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - sig { returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } + sig { returns(Symbol) } def code end - sig do - params(_: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def code=(_) end @@ -607,33 +557,23 @@ module OpenAI end # The last error associated with this run. Will be `null` if there are no errors. - sig do - params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String) - .returns(T.attached_class) - end + sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end - sig do - override - .returns({code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String}) - end + sig { override.returns({code: Symbol, message: String}) } def to_hash end # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - module Code - extend OpenAI::Enum + class Code < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - RATE_LIMIT_EXCEEDED = - T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - INVALID_PROMPT = - T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + SERVER_ERROR = :server_error + RATE_LIMIT_EXCEEDED = :rate_limit_exceeded + INVALID_PROMPT = :invalid_prompt end end @@ -712,14 +652,11 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -735,25 +672,11 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig do - params( - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, - last_messages: T.nilable(Integer) - ) - .returns(T.attached_class) - end + sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } def self.new(type:, last_messages: nil) end - sig do - override - .returns( - { - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, - last_messages: T.nilable(Integer) - } - ) - end + sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } def to_hash end @@ -761,17 +684,13 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) - LAST_MESSAGES = - T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + AUTO = :auto + LAST_MESSAGES = :last_messages end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 46e8ddc5..a54ec011 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -26,14 +26,11 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def include end - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def include=(_) end @@ -115,14 +112,11 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - end + sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } def model=(_) end @@ -143,14 +137,11 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def reasoning_effort end - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def reasoning_effort=(_) end @@ -232,36 +223,13 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } def tool_choice end sig do - params( - _: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - .returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) + params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) end def tool_choice=(_) end @@ -340,16 +308,16 @@ module OpenAI sig do params( assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -359,12 +327,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tools: T.nilable( T::Array[ T.any( @@ -407,16 +370,16 @@ module OpenAI .returns( { assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -426,12 +389,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tools: T.nilable( T::Array[ T.any( @@ -504,14 +462,11 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -561,7 +516,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -584,7 +539,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -594,8 +549,8 @@ module OpenAI end # The text contents of the message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -623,17 +578,13 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) - ASSISTANT = - T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + USER = :user + ASSISTANT = :assistant end class Attachment < OpenAI::BaseModel @@ -715,8 +666,8 @@ module OpenAI def to_hash end - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -754,10 +705,10 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end class TruncationStrategy < OpenAI::BaseModel @@ -765,14 +716,11 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -788,25 +736,11 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig do - params( - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - ) - .returns(T.attached_class) - end + sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } def self.new(type:, last_messages: nil) end - sig do - override - .returns( - { - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) - end + sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } def to_hash end @@ -814,17 +748,13 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) - LAST_MESSAGES = - T.let(:last_messages, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + AUTO = :auto + LAST_MESSAGES = :last_messages end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 39cc0ab8..44ca10d2 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -44,14 +44,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -60,7 +57,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -75,7 +72,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -85,15 +82,13 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index a6246d8c..76ada3f1 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -7,21 +7,20 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - module RunStatus - extend OpenAI::Enum + class RunStatus < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - REQUIRES_ACTION = T.let(:requires_action, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - CANCELLING = T.let(:cancelling, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + QUEUED = :queued + IN_PROGRESS = :in_progress + REQUIRES_ACTION = :requires_action + CANCELLING = :cancelling + CANCELLED = :cancelled + FAILED = :failed + COMPLETED = :completed + INCOMPLETE = :incomplete + EXPIRED = :expired end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 749ba465..2b204c7c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -143,8 +143,8 @@ module OpenAI end # Text output from the Code Interpreter tool call as part of a run step. - module Output - extend OpenAI::Union + class Output < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index e369160c..6e716d13 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -156,8 +156,8 @@ module OpenAI end # Text output from the Code Interpreter tool call as part of a run step. - module Output - extend OpenAI::Union + class Output < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 13b22ad3..b4f818ee 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -108,22 +108,11 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig do - returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end + sig { returns(Symbol) } def ranker end - sig do - params( - _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - .returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end + sig { params(_: Symbol).returns(Symbol) } def ranker=(_) end @@ -138,53 +127,23 @@ module OpenAI end # The ranking options for the file search. - sig do - params( - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, - score_threshold: Float - ) - .returns(T.attached_class) - end + sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } def self.new(ranker:, score_threshold:) end - sig do - override - .returns( - { - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, - score_threshold: Float - } - ) - end + sig { override.returns({ranker: Symbol, score_threshold: Float}) } def to_hash end # The ranker to use for the file search. If not specified will use the `auto` # ranker. - module Ranker - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end - - AUTO = - T.let( - :auto, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - DEFAULT_2024_08_21 = - T.let( - :default_2024_08_21, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) + class Ranker < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + AUTO = :auto + DEFAULT_2024_08_21 = :default_2024_08_21 end end @@ -272,68 +231,29 @@ module OpenAI end # The type of the content. - sig do - returns( - T.nilable( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - ) - end + sig { returns(T.nilable(Symbol)) } def type end - sig do - params( - _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - .returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end - sig do - params( - text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - .returns(T.attached_class) - end + sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text: nil, type: nil) end - sig do - override - .returns( - { - text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - } - ) - end + sig { override.returns({text: String, type: Symbol}) } def to_hash end # The type of the content. - module Type - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - end - - TEXT = - T.let( - :text, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) + class Type < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + TEXT = :text end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 06e7495e..d6fc02be 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -120,14 +120,11 @@ module OpenAI # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -171,14 +168,11 @@ module OpenAI end # The type of run step, which can be either `message_creation` or `tool_calls`. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -208,13 +202,13 @@ module OpenAI last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, + status: Symbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, + type: Symbol, usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage), object: Symbol ) @@ -255,13 +249,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, + status: Symbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, + type: Symbol, usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage) } ) @@ -271,14 +265,11 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + sig { returns(Symbol) } def code end - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def code=(_) end @@ -293,57 +284,42 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. - sig do - params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String) - .returns(T.attached_class) - end + sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end - sig do - override - .returns( - {code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String} - ) - end + sig { override.returns({code: Symbol, message: String}) } def to_hash end # One of `server_error` or `rate_limit_exceeded`. - module Code - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } - - SERVER_ERROR = - T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) - RATE_LIMIT_EXCEEDED = - T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + class Code < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + SERVER_ERROR = :server_error + RATE_LIMIT_EXCEEDED = :rate_limit_exceeded end end # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + IN_PROGRESS = :in_progress + CANCELLED = :cancelled + FAILED = :failed + COMPLETED = :completed + EXPIRED = :expired end # The details of the run step. - module StepDetails - extend OpenAI::Union + class StepDetails < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -357,16 +333,13 @@ module OpenAI end # The type of run step, which can be either `message_creation` or `tool_calls`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MESSAGE_CREATION = - T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + MESSAGE_CREATION = :message_creation + TOOL_CALLS = :tool_calls end class Usage < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index cbc714ec..6dca9ad3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -65,8 +65,8 @@ module OpenAI end # The details of the run step. - module StepDetails - extend OpenAI::Union + class StepDetails < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 9e59ca4b..566dd76e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -5,18 +5,13 @@ module OpenAI module Beta module Threads module Runs - module RunStepInclude - extend OpenAI::Enum + class RunStepInclude < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = - T.let( - :"step_details.tool_calls[*].file_search.results[*].content", - OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol - ) + :"step_details.tool_calls[*].file_search.results[*].content" end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index ef62a39d..73059bac 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -48,14 +48,11 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def include end - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def include=(_) end @@ -71,14 +68,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -87,9 +81,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -104,9 +98,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -116,15 +110,13 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 54eb0d50..71dc2e52 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -32,14 +32,11 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def include end - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def include=(_) end @@ -47,7 +44,7 @@ module OpenAI params( thread_id: String, run_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -61,7 +58,7 @@ module OpenAI { thread_id: String, run_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 85c08734..2550325a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. - module ToolCall - extend OpenAI::Union + class ToolCall < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 0043de5a..37550da6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. - module ToolCallDelta - extend OpenAI::Union + class ToolCallDelta < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index d47b79d1..508ca188 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -54,14 +54,11 @@ module OpenAI end # The service tier used for processing the request. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def service_tier end - sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def service_tier=(_) end @@ -94,7 +91,7 @@ module OpenAI choices: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), + service_tier: T.nilable(Symbol), system_fingerprint: String, usage: OpenAI::Models::CompletionUsage, object: Symbol @@ -122,7 +119,7 @@ module OpenAI created: Integer, model: String, object: Symbol, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), + service_tier: T.nilable(Symbol), system_fingerprint: String, usage: OpenAI::Models::CompletionUsage } @@ -138,14 +135,11 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + sig { returns(Symbol) } def finish_reason end - sig do - params(_: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def finish_reason=(_) end @@ -183,7 +177,7 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, + finish_reason: Symbol, index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), message: OpenAI::Models::Chat::ChatCompletionMessage @@ -197,7 +191,7 @@ module OpenAI override .returns( { - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, + finish_reason: Symbol, index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), message: OpenAI::Models::Chat::ChatCompletionMessage @@ -213,20 +207,16 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - module FinishReason - extend OpenAI::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } - - STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - FUNCTION_CALL = - T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + class FinishReason < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + STOP = :stop + LENGTH = :length + TOOL_CALLS = :tool_calls + CONTENT_FILTER = :content_filter + FUNCTION_CALL = :function_call end class Logprobs < OpenAI::BaseModel @@ -280,15 +270,13 @@ module OpenAI end # The service tier used for processing the request. - module ServiceTier - extend OpenAI::Enum + class ServiceTier < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + SCALE = :scale + DEFAULT = :default end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 737d825e..bdba736c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -197,8 +197,8 @@ module OpenAI # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -223,8 +223,8 @@ module OpenAI # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - module ArrayOfContentPart - extend OpenAI::Union + class ArrayOfContentPart < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 2cd8fe2e..215adc55 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -6,88 +6,64 @@ module OpenAI class ChatCompletionAudioParam < OpenAI::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } + sig { returns(Symbol) } def format_ end - sig do - params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def format_=(_) end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) } + sig { returns(Symbol) } def voice end - sig do - params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def voice=(_) end # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). - sig do - params( - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol - ) - .returns(T.attached_class) - end + sig { params(format_: Symbol, voice: Symbol).returns(T.attached_class) } def self.new(format_:, voice:) end - sig do - override - .returns( - { - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol - } - ) - end + sig { override.returns({format_: Symbol, voice: Symbol}) } def to_hash end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - module Format - extend OpenAI::Enum + class Format < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + WAV = :wav + MP3 = :mp3 + FLAC = :flac + OPUS = :opus + PCM16 = :pcm16 end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - module Voice - extend OpenAI::Enum + class Voice < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + ALLOY = :alloy + ASH = :ash + BALLAD = :ballad + CORAL = :coral + ECHO = :echo + SAGE = :sage + SHIMMER = :shimmer + VERSE = :verse end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index f5530b98..647695a9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -56,14 +56,11 @@ module OpenAI end # The service tier used for processing the request. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def service_tier end - sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def service_tier=(_) end @@ -102,7 +99,7 @@ module OpenAI choices: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), + service_tier: T.nilable(Symbol), system_fingerprint: String, usage: T.nilable(OpenAI::Models::CompletionUsage), object: Symbol @@ -130,7 +127,7 @@ module OpenAI created: Integer, model: String, object: Symbol, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), + service_tier: T.nilable(Symbol), system_fingerprint: String, usage: T.nilable(OpenAI::Models::CompletionUsage) } @@ -158,14 +155,11 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def finish_reason end - sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def finish_reason=(_) end @@ -193,7 +187,7 @@ module OpenAI sig do params( delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), + finish_reason: T.nilable(Symbol), index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) ) @@ -207,7 +201,7 @@ module OpenAI .returns( { delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), + finish_reason: T.nilable(Symbol), index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) } @@ -249,14 +243,11 @@ module OpenAI end # The role of the author of this message. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def role end - sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end @@ -277,7 +268,7 @@ module OpenAI content: T.nilable(String), function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: T.nilable(String), - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, + role: Symbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) .returns(T.attached_class) @@ -292,7 +283,7 @@ module OpenAI content: T.nilable(String), function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: T.nilable(String), - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, + role: Symbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } ) @@ -334,21 +325,16 @@ module OpenAI end # The role of the author of this message. - module Role - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } - - DEVELOPER = - T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - ASSISTANT = - T.let(:assistant, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + class Role < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + DEVELOPER = :developer + SYSTEM = :system + USER = :user + ASSISTANT = :assistant + TOOL = :tool end class ToolCall < OpenAI::BaseModel @@ -381,14 +367,11 @@ module OpenAI end # The type of the tool. Currently, only `function` is supported. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -397,7 +380,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + type: Symbol ) .returns(T.attached_class) end @@ -411,7 +394,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + type: Symbol } ) end @@ -450,16 +433,12 @@ module OpenAI end # The type of the tool. Currently, only `function` is supported. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - FUNCTION = - T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + FUNCTION = :function end end end @@ -470,22 +449,16 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - module FinishReason - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } - - STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) - LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) - TOOL_CALLS = - T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) - CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) - FUNCTION_CALL = - T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + class FinishReason < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + STOP = :stop + LENGTH = :length + TOOL_CALLS = :tool_calls + CONTENT_FILTER = :content_filter + FUNCTION_CALL = :function_call end class Logprobs < OpenAI::BaseModel @@ -539,15 +512,13 @@ module OpenAI end # The service tier used for processing the request. - module ServiceTier - extend OpenAI::Enum + class ServiceTier < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + SCALE = :scale + DEFAULT = :default end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 340e2a97..bff95f2d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -5,8 +5,8 @@ module OpenAI module Chat # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - module ChatCompletionContentPart - extend OpenAI::Union + class ChatCompletionContentPart < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 01893dab..78ef980f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -50,49 +50,32 @@ module OpenAI # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def detail end - sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def detail=(_) end - sig do - params( - url: String, - detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol - ) - .returns(T.attached_class) - end + sig { params(url: String, detail: Symbol).returns(T.attached_class) } def self.new(url:, detail: nil) end - sig do - override - .returns( - {url: String, detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol} - ) - end + sig { override.returns({url: String, detail: Symbol}) } def to_hash end # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - module Detail - extend OpenAI::Enum + class Detail < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + AUTO = :auto + LOW = :low + HIGH = :high end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 0a14a898..87144715 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -52,52 +52,30 @@ module OpenAI end # The format of the encoded audio data. Currently supports "wav" and "mp3". - sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) } + sig { returns(Symbol) } def format_ end - sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def format_=(_) end - sig do - params( - data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol - ) - .returns(T.attached_class) - end + sig { params(data: String, format_: Symbol).returns(T.attached_class) } def self.new(data:, format_:) end - sig do - override - .returns( - { - data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol - } - ) - end + sig { override.returns({data: String, format_: Symbol}) } def to_hash end # The format of the encoded audio data. Currently supports "wav" and "mp3". - module Format - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) } - - WAV = - T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) - MP3 = - T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + class Format < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + WAV = :wav + MP3 = :mp3 end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 935cbc88..7c1f2e46 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -63,8 +63,8 @@ module OpenAI end # The contents of the developer message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 7dbb72ea..6d17e25c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -6,8 +6,8 @@ module OpenAI # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - module ChatCompletionMessageParam - extend OpenAI::Union + class ChatCompletionMessageParam < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 5e747ff9..25501740 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -3,14 +3,13 @@ module OpenAI module Models module Chat - module ChatCompletionModality - extend OpenAI::Enum + class ChatCompletionModality < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) - AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) + TEXT = :text + AUDIO = :audio end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 9773677d..206b2990 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -52,8 +52,8 @@ module OpenAI # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index dd316075..e2a2c70e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -4,18 +4,17 @@ module OpenAI module Models module Chat # The role of the author of a message - module ChatCompletionRole - extend OpenAI::Enum + class ChatCompletionRole < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + DEVELOPER = :developer + SYSTEM = :system + USER = :user + ASSISTANT = :assistant + TOOL = :tool + FUNCTION = :function end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index ca2dc2e7..9d79c62a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -63,8 +63,8 @@ module OpenAI end # The contents of the system message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 9a79b1ff..c9ad2647 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -12,32 +12,23 @@ module OpenAI # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - module ChatCompletionToolChoiceOption - extend OpenAI::Union + class ChatCompletionToolChoiceOption < OpenAI::Union + abstract! Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ) - } - end + type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)} } # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. - module Auto - extend OpenAI::Enum + class Auto < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) + NONE = :none + AUTO = :auto + REQUIRED = :required end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 6f5f249b..7c447076 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -59,8 +59,8 @@ module OpenAI end # The contents of the tool message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index dde9e769..53c7c3a7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -120,8 +120,8 @@ module OpenAI end # The contents of the user message. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 851c6948..f0e77033 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -64,14 +64,11 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -114,32 +111,13 @@ module OpenAI # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ) - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption))) } def function_call end sig do - params( - _: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ) - ) - .returns( - T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ) - ) + params(_: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)) + .returns(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)) end def function_call=(_) end @@ -235,14 +213,11 @@ module OpenAI # this model generate both text and audio responses, you can use: # # `["text", "audio"]` - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def modalities end - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) - .returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) - end + sig { params(_: T.nilable(T::Array[Symbol])).returns(T.nilable(T::Array[Symbol])) } def modalities=(_) end @@ -298,14 +273,11 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def reasoning_effort end - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def reasoning_effort=(_) end @@ -379,14 +351,11 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def service_tier end - sig do - params(_: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def service_tier=(_) end @@ -453,32 +422,13 @@ module OpenAI # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ) - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice))) } def tool_choice end sig do - params( - _: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ) - ) - .returns( - T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ) - ) + params(_: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)) + .returns(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)) end def tool_choice=(_) end @@ -558,40 +508,34 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ), + function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: T.nilable(Symbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ), + tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -650,40 +594,34 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ), + function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: T.nilable(Symbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ), + tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -701,10 +639,10 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # Deprecated in favor of `tool_choice`. @@ -721,34 +659,22 @@ module OpenAI # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - module FunctionCall - extend OpenAI::Union + class FunctionCall < OpenAI::Union + abstract! Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ) - } - end + type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)} } # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. - module FunctionCallMode - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) } - - NONE = - T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) - AUTO = - T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) + class FunctionCallMode < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + NONE = :none + AUTO = :auto end end @@ -802,15 +728,13 @@ module OpenAI end end - module Modality - extend OpenAI::Enum + class Modality < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) - AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) + TEXT = :text + AUDIO = :audio end # An object specifying the format that the model must output. @@ -823,8 +747,8 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - module ResponseFormat - extend OpenAI::Union + class ResponseFormat < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -852,21 +776,19 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - module ServiceTier - extend OpenAI::Enum + class ServiceTier < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) + AUTO = :auto + DEFAULT = :default end # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - module Stop - extend OpenAI::Union + class Stop < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } @@ -876,18 +798,11 @@ module OpenAI class WebSearchOptions < OpenAI::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig do - returns( - T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) - ) - end + sig { returns(T.nilable(Symbol)) } def search_context_size end - sig do - params(_: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) - .returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def search_context_size=(_) end @@ -908,7 +823,7 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig do params( - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, + search_context_size: Symbol, user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) ) .returns(T.attached_class) @@ -920,7 +835,7 @@ module OpenAI override .returns( { - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, + search_context_size: Symbol, user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) } ) @@ -930,28 +845,14 @@ module OpenAI # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - module SearchContextSize - extend OpenAI::Enum + class SearchContextSize < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol - ) - end + Value = type_template(:out) { {fixed: Symbol} } - LOW = - T.let(:low, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) - MEDIUM = - T.let( - :medium, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol - ) - HIGH = - T.let(:high, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + LOW = :low + MEDIUM = :medium + HIGH = :high end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 096a12f8..d4ce52ba 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -47,14 +47,11 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - sig { returns(T.nilable(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) - .returns(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -64,7 +61,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -80,7 +77,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -90,15 +87,13 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index d0cfdba1..014bdbc9 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -28,14 +28,11 @@ module OpenAI # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - sig { returns(T.nilable(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) - .returns(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -43,7 +40,7 @@ module OpenAI params( after: String, limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -52,31 +49,27 @@ module OpenAI end sig do - override - .returns( - { - after: String, - limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + order: Symbol, + request_options: OpenAI::RequestOptions + } + ) end def to_hash end # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 20705fb9..9ec815a5 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -2,60 +2,54 @@ module OpenAI module Models - module ChatModel - extend OpenAI::Enum + class ChatModel < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ChatModel::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::OrSymbol) - O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::OrSymbol) - O1 = T.let(:o1, OpenAI::Models::ChatModel::OrSymbol) - O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) - O1_PREVIEW = T.let(:"o1-preview", OpenAI::Models::ChatModel::OrSymbol) - O1_PREVIEW_2024_09_12 = T.let(:"o1-preview-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) - O1_MINI = T.let(:"o1-mini", OpenAI::Models::ChatModel::OrSymbol) - O1_MINI_2024_09_12 = T.let(:"o1-mini-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O = T.let(:"gpt-4o", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_2024_11_20 = T.let(:"gpt-4o-2024-11-20", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_2024_08_06 = T.let(:"gpt-4o-2024-08-06", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_2024_05_13 = T.let(:"gpt-4o-2024-05-13", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_AUDIO_PREVIEW = T.let(:"gpt-4o-audio-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_AUDIO_PREVIEW_2024_10_01 = - T.let(:"gpt-4o-audio-preview-2024-10-01", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_AUDIO_PREVIEW_2024_12_17 = - T.let(:"gpt-4o-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = - T.let(:"gpt-4o-mini-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_SEARCH_PREVIEW = T.let(:"gpt-4o-search-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_SEARCH_PREVIEW = T.let(:"gpt-4o-mini-search-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_SEARCH_PREVIEW_2025_03_11 = - T.let(:"gpt-4o-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = - T.let(:"gpt-4o-mini-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) - CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_TURBO_2024_04_09 = T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_0125_PREVIEW = T.let(:"gpt-4-0125-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_TURBO_PREVIEW = T.let(:"gpt-4-turbo-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_1106_PREVIEW = T.let(:"gpt-4-1106-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_VISION_PREVIEW = T.let(:"gpt-4-vision-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4 = T.let(:"gpt-4", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_16K = T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_0301 = T.let(:"gpt-3.5-turbo-0301", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_0613 = T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_1106 = T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::OrSymbol) + O3_MINI = :"o3-mini" + O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" + O1 = :o1 + O1_2024_12_17 = :"o1-2024-12-17" + O1_PREVIEW = :"o1-preview" + O1_PREVIEW_2024_09_12 = :"o1-preview-2024-09-12" + O1_MINI = :"o1-mini" + O1_MINI_2024_09_12 = :"o1-mini-2024-09-12" + GPT_4O = :"gpt-4o" + GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" + GPT_4O_AUDIO_PREVIEW = :"gpt-4o-audio-preview" + GPT_4O_AUDIO_PREVIEW_2024_10_01 = :"gpt-4o-audio-preview-2024-10-01" + GPT_4O_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-audio-preview-2024-12-17" + GPT_4O_MINI_AUDIO_PREVIEW = :"gpt-4o-mini-audio-preview" + GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-mini-audio-preview-2024-12-17" + GPT_4O_SEARCH_PREVIEW = :"gpt-4o-search-preview" + GPT_4O_MINI_SEARCH_PREVIEW = :"gpt-4o-mini-search-preview" + GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" + CHATGPT_4O_LATEST = :"chatgpt-4o-latest" + GPT_4O_MINI = :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" + GPT_4_TURBO = :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" + GPT_4 = :"gpt-4" + GPT_4_0314 = :"gpt-4-0314" + GPT_4_0613 = :"gpt-4-0613" + GPT_4_32K = :"gpt-4-32k" + GPT_4_32K_0314 = :"gpt-4-32k-0314" + GPT_4_32K_0613 = :"gpt-4-32k-0613" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0301 = :"gpt-3.5-turbo-0301" + GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 8ceb7ea4..bbb84c5d 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -20,14 +20,11 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - sig { returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::ComparisonFilter::Type::OrSymbol) - .returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -44,22 +41,12 @@ module OpenAI # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. sig do - params( - key: String, - type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, - value: T.any(String, Float, T::Boolean) - ) - .returns(T.attached_class) + params(key: String, type: Symbol, value: T.any(String, Float, T::Boolean)).returns(T.attached_class) end def self.new(key:, type:, value:) end - sig do - override - .returns( - {key: String, type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, value: T.any(String, Float, T::Boolean)} - ) - end + sig { override.returns({key: String, type: Symbol, value: T.any(String, Float, T::Boolean)}) } def to_hash end @@ -71,24 +58,23 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - GT = T.let(:gt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - GTE = T.let(:gte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + EQ = :eq + NE = :ne + GT = :gt + GTE = :gte + LT = :lt + LTE = :lte end # The value to compare against the attribute key; supports string, number, or # boolean types. - module Value - extend OpenAI::Union + class Value < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index a03a5dd9..93b41ebf 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -7,14 +7,11 @@ module OpenAI # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - sig { returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } + sig { returns(Symbol) } def finish_reason end - sig do - params(_: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - .returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def finish_reason=(_) end @@ -47,7 +44,7 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, + finish_reason: Symbol, index: Integer, logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), text: String @@ -61,7 +58,7 @@ module OpenAI override .returns( { - finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, + finish_reason: Symbol, index: Integer, logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), text: String @@ -75,15 +72,14 @@ module OpenAI # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - module FinishReason - extend OpenAI::Enum + class FinishReason < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + STOP = :stop + LENGTH = :length + CONTENT_FILTER = :content_filter end class Logprobs < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 4b26334b..eb7a6ceb 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -11,14 +11,11 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) - .returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -250,7 +247,7 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), prompt: T.nilable( T.any( String, @@ -304,7 +301,7 @@ module OpenAI override .returns( { - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), prompt: T.nilable( T.any( String, @@ -340,19 +337,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union - - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)} } + class Model < OpenAI::Union + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - GPT_3_5_TURBO_INSTRUCT = - T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) - DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) - BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) + GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" + DAVINCI_002 = :"davinci-002" + BABBAGE_002 = :"babbage-002" end # The prompt(s) to generate completions for, encoded as a string, array of @@ -361,8 +353,8 @@ module OpenAI # Note that <|endoftext|> is the document separator that the model sees during # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. - module Prompt - extend OpenAI::Union + class Prompt < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -378,8 +370,8 @@ module OpenAI # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - module Stop - extend OpenAI::Union + class Stop < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 6a79164a..6722f10d 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -17,57 +17,42 @@ module OpenAI end # Type of operation: `and` or `or`. - sig { returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::CompoundFilter::Type::OrSymbol) - .returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end # Combine multiple filters using `and` or `or`. sig do - params( - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], - type: OpenAI::Models::CompoundFilter::Type::OrSymbol - ) + params(filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol) .returns(T.attached_class) end def self.new(filters:, type:) end - sig do - override - .returns( - { - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], - type: OpenAI::Models::CompoundFilter::Type::OrSymbol - } - ) - end + sig { override.returns({filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol}) } def to_hash end # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. - module Filter - extend OpenAI::Union + class Filter < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, T.anything)} } end # Type of operation: `and` or `or`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::OrSymbol) - OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::OrSymbol) + AND = :and + OR = :or end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index bc8012b1..3f30f067 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -30,14 +30,11 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -53,14 +50,11 @@ module OpenAI # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - sig { returns(T.nilable(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def encoding_format end - sig do - params(_: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) - .returns(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def encoding_format=(_) end @@ -78,9 +72,9 @@ module OpenAI sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), + model: T.any(String, Symbol), dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, + encoding_format: Symbol, user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -94,9 +88,9 @@ module OpenAI .returns( { input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), + model: T.any(String, Symbol), dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, + encoding_format: Symbol, user: String, request_options: OpenAI::RequestOptions } @@ -113,8 +107,8 @@ module OpenAI # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - module Input - extend OpenAI::Union + class Input < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -133,23 +127,21 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - module EncodingFormat - extend OpenAI::Enum + class EncodingFormat < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) - BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + FLOAT = :float + BASE64 = :base64 end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 2b064f56..1bd2eac2 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -2,15 +2,14 @@ module OpenAI module Models - module EmbeddingModel - extend OpenAI::Enum + class EmbeddingModel < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingModel::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::OrSymbol) - TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::OrSymbol) - TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::OrSymbol) + TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" + TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" + TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index eb1f53b5..aad68d3d 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # The strategy used to chunk the file. - module FileChunkingStrategy - extend OpenAI::Union + class FileChunkingStrategy < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 211e8f69..07560304 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -4,8 +4,8 @@ module OpenAI module Models # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. - module FileChunkingStrategyParam - extend OpenAI::Union + class FileChunkingStrategyParam < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index fbd7b9d0..98619f88 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -19,18 +19,18 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } + sig { returns(Symbol) } def purpose end - sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } + sig { params(_: Symbol).returns(Symbol) } def purpose=(_) end sig do params( file: T.any(IO, StringIO), - purpose: OpenAI::Models::FilePurpose::OrSymbol, + purpose: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -39,14 +39,13 @@ module OpenAI end sig do - override - .returns( - { - file: T.any(IO, StringIO), - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + file: T.any(IO, StringIO), + purpose: Symbol, + request_options: OpenAI::RequestOptions + } + ) end def to_hash end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 4af7b2fc..48b8106e 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -30,14 +30,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::FileListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::FileListParams::Order::OrSymbol) - .returns(OpenAI::Models::FileListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -54,7 +51,7 @@ module OpenAI params( after: String, limit: Integer, - order: OpenAI::Models::FileListParams::Order::OrSymbol, + order: Symbol, purpose: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -69,7 +66,7 @@ module OpenAI { after: String, limit: Integer, - order: OpenAI::Models::FileListParams::Order::OrSymbol, + order: Symbol, purpose: String, request_options: OpenAI::RequestOptions } @@ -80,14 +77,13 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 2918d2f0..b31df148 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -51,27 +51,21 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - sig { returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) } + sig { returns(Symbol) } def purpose end - sig do - params(_: OpenAI::Models::FileObject::Purpose::TaggedSymbol) - .returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def purpose=(_) end # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - sig { returns(OpenAI::Models::FileObject::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::FileObject::Status::TaggedSymbol) - .returns(OpenAI::Models::FileObject::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -101,8 +95,8 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, - status: OpenAI::Models::FileObject::Status::TaggedSymbol, + purpose: Symbol, + status: Symbol, expires_at: Integer, status_details: String, object: Symbol @@ -131,8 +125,8 @@ module OpenAI created_at: Integer, filename: String, object: Symbol, - purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, - status: OpenAI::Models::FileObject::Status::TaggedSymbol, + purpose: Symbol, + status: Symbol, expires_at: Integer, status_details: String } @@ -144,32 +138,30 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - module Purpose - extend OpenAI::Enum + class Purpose < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - BATCH = T.let(:batch, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - BATCH_OUTPUT = T.let(:batch_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FileObject::Purpose::TaggedSymbol) - FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::Models::FileObject::Purpose::TaggedSymbol) - VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + ASSISTANTS = :assistants + ASSISTANTS_OUTPUT = :assistants_output + BATCH = :batch + BATCH_OUTPUT = :batch_output + FINE_TUNE = :"fine-tune" + FINE_TUNE_RESULTS = :"fine-tune-results" + VISION = :vision end # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) - PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) - ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) + UPLOADED = :uploaded + PROCESSED = :processed + ERROR = :error end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 77bba00f..07bd11c5 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -6,18 +6,17 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - module FilePurpose - extend OpenAI::Enum + class FilePurpose < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FilePurpose::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::OrSymbol) - BATCH = T.let(:batch, OpenAI::Models::FilePurpose::OrSymbol) - FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FilePurpose::OrSymbol) - VISION = T.let(:vision, OpenAI::Models::FilePurpose::OrSymbol) - USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::OrSymbol) - EVALS = T.let(:evals, OpenAI::Models::FilePurpose::OrSymbol) + ASSISTANTS = :assistants + BATCH = :batch + FINE_TUNE = :"fine-tune" + VISION = :vision + USER_DATA = :user_data + EVALS = :evals end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index f688b4b2..b4326f0b 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -117,14 +117,11 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -221,7 +218,7 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, + status: Symbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), @@ -271,7 +268,7 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, + status: Symbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), @@ -384,24 +381,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union + class BatchSize < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union + class LearningRateMultiplier < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union + class NEpochs < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -409,20 +406,17 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - VALIDATING_FILES = - T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - QUEUED = T.let(:queued, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - RUNNING = T.let(:running, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - SUCCEEDED = T.let(:succeeded, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + VALIDATING_FILES = :validating_files + QUEUED = :queued + RUNNING = :running + SUCCEEDED = :succeeded + FAILED = :failed + CANCELLED = :cancelled end class Method < OpenAI::BaseModel @@ -451,14 +445,11 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -467,7 +458,7 @@ module OpenAI params( dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + type: Symbol ) .returns(T.attached_class) end @@ -480,7 +471,7 @@ module OpenAI { dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + type: Symbol } ) end @@ -585,32 +576,32 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union + class BatchSize < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - module Beta - extend OpenAI::Union + class Beta < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union + class LearningRateMultiplier < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union + class NEpochs < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -703,24 +694,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union + class BatchSize < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union + class LearningRateMultiplier < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union + class NEpochs < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -728,15 +719,13 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + SUPERVISED = :supervised + DPO = :dpo end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 7bedb937..fb1daf42 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -23,14 +23,11 @@ module OpenAI end # The log level of the event. - sig { returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } + sig { returns(Symbol) } def level end - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def level=(_) end @@ -62,14 +59,11 @@ module OpenAI end # The type of event. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -78,10 +72,10 @@ module OpenAI params( id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, + level: Symbol, message: String, data: T.anything, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol, + type: Symbol, object: Symbol ) .returns(T.attached_class) @@ -95,11 +89,11 @@ module OpenAI { id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, + level: Symbol, message: String, object: Symbol, data: T.anything, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol + type: Symbol } ) end @@ -107,28 +101,24 @@ module OpenAI end # The log level of the event. - module Level - extend OpenAI::Enum + class Level < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + INFO = :info + WARN = :warn + ERROR = :error end # The type of event. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + MESSAGE = :message + METRICS = :metrics end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 9142e294..29a4f7bf 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -9,14 +9,11 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - sig { returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) } + sig { returns(T.any(String, Symbol)) } def model end - sig do - params(_: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) - .returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -142,7 +139,7 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), @@ -173,7 +170,7 @@ module OpenAI override .returns( { - model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), @@ -191,20 +188,15 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - module Model - extend OpenAI::Union - - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)} } + class Model < OpenAI::Union + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) - DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + BABBAGE_002 = :"babbage-002" + DAVINCI_002 = :"davinci-002" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_4O_MINI = :"gpt-4o-mini" end class Hyperparameters < OpenAI::BaseModel @@ -266,24 +258,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union + class BatchSize < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union + class LearningRateMultiplier < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union + class NEpochs < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -424,14 +416,11 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -440,7 +429,7 @@ module OpenAI params( dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol + type: Symbol ) .returns(T.attached_class) end @@ -453,7 +442,7 @@ module OpenAI { dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol + type: Symbol } ) end @@ -558,32 +547,32 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union + class BatchSize < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - module Beta - extend OpenAI::Union + class Beta < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union + class LearningRateMultiplier < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union + class NEpochs < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -678,24 +667,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Union + class BatchSize < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Union + class LearningRateMultiplier < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - module NEpochs - extend OpenAI::Union + class NEpochs < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -703,15 +692,13 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) - DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + SUPERVISED = :supervised + DPO = :dpo end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 09479fbe..285e3c70 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -18,14 +18,11 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - end + sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } def model=(_) end @@ -42,27 +39,21 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def response_format end - sig do - params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def size end - sig do - params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def size=(_) end @@ -80,10 +71,10 @@ module OpenAI sig do params( image: T.any(IO, StringIO), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -97,10 +88,10 @@ module OpenAI .returns( { image: T.any(IO, StringIO), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), user: String, request_options: OpenAI::RequestOptions } @@ -111,37 +102,34 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - module ResponseFormat - extend OpenAI::Enum + class ResponseFormat < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) + URL = :url + B64_JSON = :b64_json end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - module Size - extend OpenAI::Enum + class Size < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) + NUMBER_256X256 = :"256x256" + NUMBER_512X512 = :"512x512" + NUMBER_1024X1024 = :"1024x1024" end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 02a9dc31..b672e912 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -39,14 +39,11 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - end + sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } def model=(_) end @@ -62,27 +59,21 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def response_format end - sig do - params(_: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def size end - sig do - params(_: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def size=(_) end @@ -102,10 +93,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -131,10 +122,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), user: String, request_options: OpenAI::RequestOptions } @@ -145,36 +136,34 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - module ResponseFormat - extend OpenAI::Enum + class ResponseFormat < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) + URL = :url + B64_JSON = :b64_json end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - module Size - extend OpenAI::Enum + class Size < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::OrSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::OrSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::OrSymbol) + NUMBER_256X256 = :"256x256" + NUMBER_512X512 = :"512x512" + NUMBER_1024X1024 = :"1024x1024" end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 4754ce0e..7fccc54c 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -17,14 +17,11 @@ module OpenAI end # The model to use for image generation. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - end + sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } def model=(_) end @@ -41,42 +38,33 @@ module OpenAI # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def quality end - sig do - params(_: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) - .returns(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def quality=(_) end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def response_format end - sig do - params(_: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def size end - sig do - params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def size=(_) end @@ -84,14 +72,11 @@ module OpenAI # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def style end - sig do - params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def style=(_) end @@ -109,12 +94,12 @@ module OpenAI sig do params( prompt: String, - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, - response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), - style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), + quality: Symbol, + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), + style: T.nilable(Symbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -138,12 +123,12 @@ module OpenAI .returns( { prompt: String, - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, - response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), - style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), + quality: Symbol, + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), + style: T.nilable(Symbol), user: String, request_options: OpenAI::RequestOptions } @@ -153,67 +138,62 @@ module OpenAI end # The model to use for image generation. - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - module Quality - extend OpenAI::Enum + class Quality < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) - HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + STANDARD = :standard + HD = :hd end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - module ResponseFormat - extend OpenAI::Enum + class ResponseFormat < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) + URL = :url + B64_JSON = :b64_json end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - module Size - extend OpenAI::Enum + class Size < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_256X256 = :"256x256" + NUMBER_512X512 = :"512x512" + NUMBER_1024X1024 = :"1024x1024" + NUMBER_1792X1024 = :"1792x1024" + NUMBER_1024X1792 = :"1024x1792" end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - module Style - extend OpenAI::Enum + class Style < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) - NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) + VIVID = :vivid + NATURAL = :natural end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 3cc4d5f4..6716a390 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -2,14 +2,13 @@ module OpenAI module Models - module ImageModel - extend OpenAI::Enum + class ImageModel < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageModel::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::OrSymbol) - DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::OrSymbol) + DALL_E_2 = :"dall-e-2" + DALL_E_3 = :"dall-e-3" end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 0ebd7ee8..c430dd8a 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -265,193 +265,138 @@ module OpenAI class CategoryAppliedInputTypes < OpenAI::BaseModel # The applied input type(s) for the category 'harassment'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def harassment end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def harassment=(_) end # The applied input type(s) for the category 'harassment/threatening'. - sig do - returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) - end + sig { returns(T::Array[Symbol]) } def harassment_threatening end - sig do - params( - _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def harassment_threatening=(_) end # The applied input type(s) for the category 'hate'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def hate end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def hate=(_) end # The applied input type(s) for the category 'hate/threatening'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def hate_threatening end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def hate_threatening=(_) end # The applied input type(s) for the category 'illicit'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def illicit end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def illicit=(_) end # The applied input type(s) for the category 'illicit/violent'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def illicit_violent end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def illicit_violent=(_) end # The applied input type(s) for the category 'self-harm'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def self_harm end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def self_harm=(_) end # The applied input type(s) for the category 'self-harm/instructions'. - sig do - returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) - end + sig { returns(T::Array[Symbol]) } def self_harm_instructions end - sig do - params( - _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def self_harm_instructions=(_) end # The applied input type(s) for the category 'self-harm/intent'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def self_harm_intent end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def self_harm_intent=(_) end # The applied input type(s) for the category 'sexual'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def sexual end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def sexual=(_) end # The applied input type(s) for the category 'sexual/minors'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def sexual_minors end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def sexual_minors=(_) end # The applied input type(s) for the category 'violence'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def violence end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def violence=(_) end # The applied input type(s) for the category 'violence/graphic'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) } + sig { returns(T::Array[Symbol]) } def violence_graphic end - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def violence_graphic=(_) end # A list of the categories along with the input type(s) that the score applies to. sig do params( - harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], - harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], - hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], - hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], - illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], - illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], - self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], - self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], - self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], - sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], - sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], - violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], - violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] + harassment: T::Array[Symbol], + harassment_threatening: T::Array[Symbol], + hate: T::Array[Symbol], + hate_threatening: T::Array[Symbol], + illicit: T::Array[Symbol], + illicit_violent: T::Array[Symbol], + self_harm: T::Array[Symbol], + self_harm_instructions: T::Array[Symbol], + self_harm_intent: T::Array[Symbol], + sexual: T::Array[Symbol], + sexual_minors: T::Array[Symbol], + violence: T::Array[Symbol], + violence_graphic: T::Array[Symbol] ) .returns(T.attached_class) end @@ -476,177 +421,133 @@ module OpenAI override .returns( { - harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], - harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], - hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], - hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], - illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], - illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], - self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], - self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], - self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], - sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], - sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], - violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], - violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] + harassment: T::Array[Symbol], + harassment_threatening: T::Array[Symbol], + hate: T::Array[Symbol], + hate_threatening: T::Array[Symbol], + illicit: T::Array[Symbol], + illicit_violent: T::Array[Symbol], + self_harm: T::Array[Symbol], + self_harm_instructions: T::Array[Symbol], + self_harm_intent: T::Array[Symbol], + sexual: T::Array[Symbol], + sexual_minors: T::Array[Symbol], + violence: T::Array[Symbol], + violence_graphic: T::Array[Symbol] } ) end def to_hash end - module Harassment - extend OpenAI::Enum + class Harassment < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) + TEXT = :text end - module HarassmentThreatening - extend OpenAI::Enum + class HarassmentThreatening < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = - T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) + TEXT = :text end - module Hate - extend OpenAI::Enum + class Hate < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) + TEXT = :text end - module HateThreatening - extend OpenAI::Enum + class HateThreatening < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) + TEXT = :text end - module Illicit - extend OpenAI::Enum + class Illicit < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) + TEXT = :text end - module IllicitViolent - extend OpenAI::Enum + class IllicitViolent < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) + TEXT = :text end - module SelfHarm - extend OpenAI::Enum + class SelfHarm < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) - IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) + TEXT = :text + IMAGE = :image end - module SelfHarmInstruction - extend OpenAI::Enum + class SelfHarmInstruction < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = - T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) - IMAGE = - T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) + TEXT = :text + IMAGE = :image end - module SelfHarmIntent - extend OpenAI::Enum + class SelfHarmIntent < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) - IMAGE = - T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) + TEXT = :text + IMAGE = :image end - module Sexual - extend OpenAI::Enum + class Sexual < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) - IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) + TEXT = :text + IMAGE = :image end - module SexualMinor - extend OpenAI::Enum + class SexualMinor < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) + TEXT = :text end - module Violence - extend OpenAI::Enum + class Violence < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) - IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) + TEXT = :text + IMAGE = :image end - module ViolenceGraphic - extend OpenAI::Enum + class ViolenceGraphic < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) - IMAGE = - T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) + TEXT = :text + IMAGE = :image end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index a23b68a0..3a7a01a9 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -43,14 +43,11 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - sig { returns(T.nilable(T.any(String, OpenAI::Models::ModerationModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, Symbol))) } def model end - sig do - params(_: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -61,7 +58,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), + model: T.any(String, Symbol), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -78,7 +75,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), + model: T.any(String, Symbol), request_options: OpenAI::RequestOptions } ) @@ -88,8 +85,8 @@ module OpenAI # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - module Input - extend OpenAI::Union + class Input < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -112,10 +109,10 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - module Model - extend OpenAI::Union + class Model < OpenAI::Union + abstract! - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)} } + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index a08f4a80..a34a1f36 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -2,17 +2,15 @@ module OpenAI module Models - module ModerationModel - extend OpenAI::Enum + class ModerationModel < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ModerationModel::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) - OMNI_MODERATION_2024_09_26 = - T.let(:"omni-moderation-2024-09-26", OpenAI::Models::ModerationModel::OrSymbol) - TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) - TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::OrSymbol) + OMNI_MODERATION_LATEST = :"omni-moderation-latest" + OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" + TEXT_MODERATION_LATEST = :"text-moderation-latest" + TEXT_MODERATION_STABLE = :"text-moderation-stable" end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 705b6af9..861ae45d 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # An object describing an image to classify. - module ModerationMultiModalInput - extend OpenAI::Union + class ModerationMultiModalInput < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 909f27d4..38290949 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -9,14 +9,11 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def effort end - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def effort=(_) end @@ -25,14 +22,11 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def generate_summary end - sig do - params(_: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) - .returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def generate_summary=(_) end @@ -40,25 +34,11 @@ module OpenAI # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - sig do - params( - effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) - ) - .returns(T.attached_class) - end + sig { params(effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)).returns(T.attached_class) } def self.new(effort: nil, generate_summary: nil) end - sig do - override - .returns( - { - effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) - } - ) - end + sig { override.returns({effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)}) } def to_hash end @@ -67,14 +47,13 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - module GenerateSummary - extend OpenAI::Enum + class GenerateSummary < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) - DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + CONCISE = :concise + DETAILED = :detailed end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 2cf29ee5..dcca18c9 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -8,15 +8,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - module ReasoningEffort - extend OpenAI::Enum + class ReasoningEffort < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ReasoningEffort::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - LOW = T.let(:low, OpenAI::Models::ReasoningEffort::OrSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::OrSymbol) + LOW = :low + MEDIUM = :medium + HIGH = :high end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 638039d3..40b9918f 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -23,14 +23,11 @@ module OpenAI end # The type of computer environment to control. - sig { returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) } + sig { returns(Symbol) } def environment end - sig do - params(_: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - .returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def environment=(_) end @@ -46,43 +43,28 @@ module OpenAI # A tool that controls a virtual computer. Learn more about the # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). sig do - params( - display_height: Float, - display_width: Float, - environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, - type: Symbol - ) + params(display_height: Float, display_width: Float, environment: Symbol, type: Symbol) .returns(T.attached_class) end def self.new(display_height:, display_width:, environment:, type: :computer_use_preview) end sig do - override - .returns( - { - display_height: Float, - display_width: Float, - environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, - type: Symbol - } - ) + override.returns({display_height: Float, display_width: Float, environment: Symbol, type: Symbol}) end def to_hash end # The type of computer environment to control. - module Environment - extend OpenAI::Enum + class Environment < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + MAC = :mac + WINDOWS = :windows + UBUNTU = :ubuntu + BROWSER = :browser end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 42bb10de..5b3386f8 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -54,26 +54,20 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - .returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end # The type of the message input. Always `message`. - sig { returns(T.nilable(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) - .returns(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -94,8 +88,8 @@ module OpenAI ) ] ), - role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, - type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol + role: Symbol, + type: Symbol ) .returns(T.attached_class) end @@ -116,8 +110,8 @@ module OpenAI ) ] ), - role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, - type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol + role: Symbol, + type: Symbol } ) end @@ -126,8 +120,8 @@ module OpenAI # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -148,28 +142,24 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer end # The type of the message input. Always `message`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) + MESSAGE = :message end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 71be889d..4085e59c 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -88,8 +88,8 @@ module OpenAI end # A filter to apply based on file attributes. - module Filters - extend OpenAI::Union + class Filters < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } @@ -97,14 +97,11 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. - sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def ranker end - sig do - params(_: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) - .returns(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def ranker=(_) end @@ -120,37 +117,22 @@ module OpenAI end # Ranking options for search. - sig do - params( - ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, - score_threshold: Float - ) - .returns(T.attached_class) - end + sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } def self.new(ranker: nil, score_threshold: nil) end - sig do - override - .returns( - {ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, score_threshold: Float} - ) - end + sig { override.returns({ranker: Symbol, score_threshold: Float}) } def to_hash end # The ranker to use for the file search. - module Ranker - extend OpenAI::Enum + class Ranker < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) - DEFAULT_2024_11_15 = - T.let(:"default-2024-11-15", OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + AUTO = :auto + DEFAULT_2024_11_15 = :"default-2024-11-15" end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index dfee9da7..1162dc6a 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -39,14 +39,11 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - sig { returns(T.nilable(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) - .returns(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -55,7 +52,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -70,7 +67,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -82,15 +79,13 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index e785499b..29aba874 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -79,34 +79,11 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig do - returns( - T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ) - ) - end + sig { returns(T.any(String, Symbol)) } def model end - sig do - params( - _: T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ) - ) - .returns( - T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ) - ) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -198,11 +175,7 @@ module OpenAI # can call. sig do returns( - T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) ) end def tool_choice @@ -210,18 +183,10 @@ module OpenAI sig do params( - _: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + _: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) ) .returns( - T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) ) end def tool_choice=(_) @@ -330,14 +295,11 @@ module OpenAI # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -364,14 +326,11 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def truncation end - sig do - params(_: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def truncation=(_) end @@ -404,11 +363,7 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ), + model: T.any(String, Symbol), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, @@ -421,11 +376,7 @@ module OpenAI ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ), + tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -438,9 +389,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), - status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, + status: Symbol, text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), + truncation: T.nilable(Symbol), usage: OpenAI::Models::Responses::ResponseUsage, user: String, object: Symbol @@ -483,11 +434,7 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ), + model: T.any(String, Symbol), object: Symbol, output: T::Array[ T.any( @@ -501,11 +448,7 @@ module OpenAI ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ), + tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -518,9 +461,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), - status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, + status: Symbol, text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), + truncation: T.nilable(Symbol), usage: OpenAI::Models::Responses::ResponseUsage, user: String } @@ -531,59 +474,44 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the response is incomplete. - sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def reason end - sig do - params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def reason=(_) end # Details about why the response is incomplete. - sig do - params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - .returns(T.attached_class) - end + sig { params(reason: Symbol).returns(T.attached_class) } def self.new(reason: nil) end - sig { override.returns({reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol}) } + sig { override.returns({reason: Symbol}) } def to_hash end # The reason why the response is incomplete. - module Reason - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } - - MAX_OUTPUT_TOKENS = - T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + class Reason < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + MAX_OUTPUT_TOKENS = :max_output_tokens + CONTENT_FILTER = :content_filter end end # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - module ToolChoice - extend OpenAI::Union + class ToolChoice < OpenAI::Union + abstract! Variants = type_template(:out) do { - fixed: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) } end end @@ -595,14 +523,13 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - module Truncation - extend OpenAI::Enum + class Truncation < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) - DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) + AUTO = :auto + DISABLED = :disabled end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 42fee896..8b5451ff 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -58,14 +58,11 @@ module OpenAI end # The status of the code interpreter tool call. - sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -89,7 +86,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, + status: Symbol, type: Symbol ) .returns(T.attached_class) @@ -109,7 +106,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, + status: Symbol, type: Symbol } ) @@ -118,8 +115,8 @@ module OpenAI end # The output of a code interpreter tool call that is text. - module Result - extend OpenAI::Union + class Result < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -235,20 +232,14 @@ module OpenAI end # The status of the code interpreter tool call. - module Status - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } - - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - INTERPRETING = - T.let(:interpreting, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + INTERPRETING = :interpreting + COMPLETED = :completed end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 63676938..c0e726fe 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -86,26 +86,20 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end # The type of the computer call. Always `computer_call`. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -128,8 +122,8 @@ module OpenAI ), call_id: String, pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol + status: Symbol, + type: Symbol ) .returns(T.attached_class) end @@ -154,8 +148,8 @@ module OpenAI ), call_id: String, pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol + status: Symbol, + type: Symbol } ) end @@ -163,8 +157,8 @@ module OpenAI end # A click action. - module Action - extend OpenAI::Union + class Action < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -186,14 +180,11 @@ module OpenAI class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } + sig { returns(Symbol) } def button end - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def button=(_) end @@ -226,50 +217,26 @@ module OpenAI end # A click action. - sig do - params( - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, - x: Integer, - y_: Integer, - type: Symbol - ) - .returns(T.attached_class) - end + sig { params(button: Symbol, x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } def self.new(button:, x:, y_:, type: :click) end - sig do - override - .returns( - { - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, - type: Symbol, - x: Integer, - y_: Integer - } - ) - end + sig { override.returns({button: Symbol, type: Symbol, x: Integer, y_: Integer}) } def to_hash end # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - module Button - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) } - - LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - RIGHT = - T.let(:right, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - WHEEL = - T.let(:wheel, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - BACK = T.let(:back, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - FORWARD = - T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + class Button < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + LEFT = :left + RIGHT = :right + WHEEL = :wheel + BACK = :back + FORWARD = :forward end end @@ -638,29 +605,23 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end # The type of the computer call. Always `computer_call`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - COMPUTER_CALL = - T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) + COMPUTER_CALL = :computer_call end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 9cdcacd2..4e5be0f8 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -66,14 +66,11 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -83,7 +80,7 @@ module OpenAI call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol, + status: Symbol, type: Symbol ) .returns(T.attached_class) @@ -100,7 +97,7 @@ module OpenAI output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: Symbol, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + status: Symbol } ) end @@ -147,20 +144,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - module Status - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } - - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 0cc16286..92cf9df2 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Multi-modal input and output contents. - module ResponseContent - extend OpenAI::Union + class ResponseContent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index b040d85c..9e79484e 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -90,8 +90,8 @@ module OpenAI end # The content part that was added. - module Part - extend OpenAI::Union + class Part < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 6f2a4562..5661d776 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -90,8 +90,8 @@ module OpenAI end # The content part that is done. - module Part - extend OpenAI::Union + class Part < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 9c2938bb..dbd9ed7b 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -91,22 +91,11 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig do - returns( - T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - ) - end + sig { returns(T.any(String, Symbol)) } def model end - sig do - params( - _: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - ) - .returns( - T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - ) - end + sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } def model=(_) end @@ -118,14 +107,11 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def include end - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) - .returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) - end + sig { params(_: T.nilable(T::Array[Symbol])).returns(T.nilable(T::Array[Symbol])) } def include=(_) end @@ -243,11 +229,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) ) ) end @@ -256,18 +238,10 @@ module OpenAI sig do params( - _: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + _: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) ) .returns( - T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) ) end def tool_choice=(_) @@ -349,14 +323,11 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def truncation end - sig do - params(_: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) - .returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) - end + sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } def truncation=(_) end @@ -391,8 +362,8 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + model: T.any(String, Symbol), + include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -402,11 +373,7 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ), + tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -416,7 +383,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: T.nilable(Symbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -466,8 +433,8 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + model: T.any(String, Symbol), + include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -477,11 +444,7 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ), + tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -491,7 +454,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: T.nilable(Symbol), user: String, request_options: OpenAI::RequestOptions } @@ -509,8 +472,8 @@ module OpenAI # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - module Input - extend OpenAI::Union + class Input < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -540,17 +503,13 @@ module OpenAI # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - module ToolChoice - extend OpenAI::Union + class ToolChoice < OpenAI::Union + abstract! Variants = type_template(:out) do { - fixed: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) + fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) } end end @@ -562,16 +521,13 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - module Truncation - extend OpenAI::Enum + class Truncation < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) - DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) + AUTO = :auto + DISABLED = :disabled end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 90d6cf33..7f9b2db2 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -5,14 +5,11 @@ module OpenAI module Responses class ResponseError < OpenAI::BaseModel # The error code for the response. - sig { returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + sig { returns(Symbol) } def code end - sig do - params(_: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def code=(_) end @@ -26,54 +23,38 @@ module OpenAI end # An error object returned when the model fails to generate a Response. - sig do - params(code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String) - .returns(T.attached_class) - end + sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end - sig { override.returns({code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String}) } + sig { override.returns({code: Symbol, message: String}) } def to_hash end # The error code for the response. - module Code - extend OpenAI::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } - - SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - RATE_LIMIT_EXCEEDED = - T.let(:rate_limit_exceeded, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_PROMPT = T.let(:invalid_prompt, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - VECTOR_STORE_TIMEOUT = - T.let(:vector_store_timeout, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_IMAGE = T.let(:invalid_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_IMAGE_FORMAT = - T.let(:invalid_image_format, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_BASE64_IMAGE = - T.let(:invalid_base64_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_IMAGE_URL = - T.let(:invalid_image_url, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_TOO_LARGE = T.let(:image_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_TOO_SMALL = T.let(:image_too_small, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_PARSE_ERROR = - T.let(:image_parse_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_CONTENT_POLICY_VIOLATION = - T.let(:image_content_policy_violation, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_IMAGE_MODE = - T.let(:invalid_image_mode, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_FILE_TOO_LARGE = - T.let(:image_file_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - UNSUPPORTED_IMAGE_MEDIA_TYPE = - T.let(:unsupported_image_media_type, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - EMPTY_IMAGE_FILE = T.let(:empty_image_file, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - FAILED_TO_DOWNLOAD_IMAGE = - T.let(:failed_to_download_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_FILE_NOT_FOUND = - T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + class Code < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + SERVER_ERROR = :server_error + RATE_LIMIT_EXCEEDED = :rate_limit_exceeded + INVALID_PROMPT = :invalid_prompt + VECTOR_STORE_TIMEOUT = :vector_store_timeout + INVALID_IMAGE = :invalid_image + INVALID_IMAGE_FORMAT = :invalid_image_format + INVALID_BASE64_IMAGE = :invalid_base64_image + INVALID_IMAGE_URL = :invalid_image_url + IMAGE_TOO_LARGE = :image_too_large + IMAGE_TOO_SMALL = :image_too_small + IMAGE_PARSE_ERROR = :image_parse_error + IMAGE_CONTENT_POLICY_VIOLATION = :image_content_policy_violation + INVALID_IMAGE_MODE = :invalid_image_mode + IMAGE_FILE_TOO_LARGE = :image_file_too_large + UNSUPPORTED_IMAGE_MEDIA_TYPE = :unsupported_image_media_type + EMPTY_IMAGE_FILE = :empty_image_file + FAILED_TO_DOWNLOAD_IMAGE = :failed_to_download_image + IMAGE_FILE_NOT_FOUND = :image_file_not_found end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 439cbcd7..17a4bdb2 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -24,14 +24,11 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - sig { returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -63,7 +60,7 @@ module OpenAI params( id: String, queries: T::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, + status: Symbol, results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]), type: Symbol ) @@ -78,7 +75,7 @@ module OpenAI { id: String, queries: T::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, + status: Symbol, type: Symbol, results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) } @@ -89,20 +86,16 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + IN_PROGRESS = :in_progress + SEARCHING = :searching + COMPLETED = :completed + INCOMPLETE = :incomplete + FAILED = :failed end class Result < OpenAI::BaseModel @@ -186,8 +179,8 @@ module OpenAI def to_hash end - module Attribute - extend OpenAI::Union + class Attribute < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 8c4e9b9a..fea986b1 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -16,8 +16,8 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - module ResponseFormatTextConfig - extend OpenAI::Union + class ResponseFormatTextConfig < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index e2fb2951..7f65d691 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -51,14 +51,11 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -66,14 +63,7 @@ module OpenAI # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. sig do - params( - arguments: String, - call_id: String, - name: String, - id: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol, - type: Symbol - ) + params(arguments: String, call_id: String, name: String, id: String, status: Symbol, type: Symbol) .returns(T.attached_class) end def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) @@ -81,33 +71,28 @@ module OpenAI sig do override - .returns( - { - arguments: String, - call_id: String, - name: String, - type: Symbol, - id: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol - } - ) + .returns({ + arguments: String, + call_id: String, + name: String, + type: Symbol, + id: String, + status: Symbol + }) end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 4a38931a..89456a9b 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -42,14 +42,11 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -58,45 +55,27 @@ module OpenAI id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol, + status: Symbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new(id:, call_id:, output:, status: nil, type: :function_call_output) end - sig do - override - .returns( - { - id: String, - call_id: String, - output: String, - type: Symbol, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol - } - ) - end + sig { override.returns({id: String, call_id: String, output: String, type: Symbol, status: Symbol}) } def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 1a4c8366..fa36c718 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -14,14 +14,11 @@ module OpenAI end # The status of the web search tool call. - sig { returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -37,39 +34,24 @@ module OpenAI # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for # more information. - sig do - params( - id: String, - status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, - type: Symbol - ) - .returns(T.attached_class) - end + sig { params(id: String, status: Symbol, type: Symbol).returns(T.attached_class) } def self.new(id:, status:, type: :web_search_call) end - sig do - override - .returns( - {id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol} - ) - end + sig { override.returns({id: String, status: Symbol, type: Symbol}) } def to_hash end # The status of the web search tool call. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + IN_PROGRESS = :in_progress + SEARCHING = :searching + COMPLETED = :completed + FAILED = :failed end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 3b6a4039..363cdad4 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -11,18 +11,14 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - module ResponseIncludable - extend OpenAI::Enum + class ResponseIncludable < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - FILE_SEARCH_CALL_RESULTS = - T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) - MESSAGE_INPUT_IMAGE_IMAGE_URL = - T.let(:"message.input_image.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = - T.let(:"computer_call_output.output.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" + MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index e6db921a..f49d5163 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -14,14 +14,11 @@ module OpenAI end # The format of the audio data. Currently supported formats are `mp3` and `wav`. - sig { returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) } + sig { returns(Symbol) } def format_ end - sig do - params(_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def format_=(_) end @@ -35,36 +32,22 @@ module OpenAI end # An audio input to the model. - sig do - params( - data: String, - format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, - type: Symbol - ) - .returns(T.attached_class) - end + sig { params(data: String, format_: Symbol, type: Symbol).returns(T.attached_class) } def self.new(data:, format_:, type: :input_audio) end - sig do - override - .returns( - {data: String, format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, type: Symbol} - ) - end + sig { override.returns({data: String, format_: Symbol, type: Symbol}) } def to_hash end # The format of the audio data. Currently supported formats are `mp3` and `wav`. - module Format - extend OpenAI::Enum + class Format < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) - WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + MP3 = :mp3 + WAV = :wav end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index 37ed1a5a..cba404fb 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # A text input to the model. - module ResponseInputContent - extend OpenAI::Union + class ResponseInputContent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index ade87200..f4e450ae 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -6,14 +6,11 @@ module OpenAI class ResponseInputImage < OpenAI::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } + sig { returns(Symbol) } def detail end - sig do - params(_: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def detail=(_) end @@ -48,43 +45,35 @@ module OpenAI # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). sig do - params( - detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, - file_id: T.nilable(String), - image_url: T.nilable(String), - type: Symbol - ) + params(detail: Symbol, file_id: T.nilable(String), image_url: T.nilable(String), type: Symbol) .returns(T.attached_class) end def self.new(detail:, file_id: nil, image_url: nil, type: :input_image) end sig do - override - .returns( - { - detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, - type: Symbol, - file_id: T.nilable(String), - image_url: T.nilable(String) - } - ) + override.returns( + { + detail: Symbol, + type: Symbol, + file_id: T.nilable(String), + image_url: T.nilable(String) + } + ) end def to_hash end # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - module Detail - extend OpenAI::Enum + class Detail < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + HIGH = :high + LOW = :low + AUTO = :auto end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 496ac76e..6ccd31a3 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -8,8 +8,8 @@ module OpenAI # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - module ResponseInputItem - extend OpenAI::Union + class ResponseInputItem < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -71,39 +71,30 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end # The type of the message input. Always set to `message`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -119,9 +110,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, - status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol + role: Symbol, + status: Symbol, + type: Symbol ) .returns(T.attached_class) end @@ -139,9 +130,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, - status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol + role: Symbol, + status: Symbol, + type: Symbol } ) end @@ -149,45 +140,35 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + USER = :user + SYSTEM = :system + DEVELOPER = :developer end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end # The type of the message input. Always set to `message`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) + MESSAGE = :message end end @@ -256,14 +237,11 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -274,7 +252,7 @@ module OpenAI output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, + status: Symbol, type: Symbol ) .returns(T.attached_class) @@ -291,7 +269,7 @@ module OpenAI type: Symbol, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol + status: Symbol } ) end @@ -338,20 +316,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end @@ -395,14 +367,11 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -412,45 +381,29 @@ module OpenAI call_id: String, output: String, id: String, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol, + status: Symbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new(call_id:, output:, id: nil, status: nil, type: :function_call_output) end sig do - override - .returns( - { - call_id: String, - output: String, - type: Symbol, - id: String, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol - } - ) + override.returns({call_id: String, output: String, type: Symbol, id: String, status: Symbol}) end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) } - - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 0aaaacd6..cabf1399 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -53,39 +53,30 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } + sig { returns(Symbol) } def role end - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def role=(_) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end # The type of the message input. Always set to `message`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol)) } + sig { returns(T.nilable(Symbol)) } def type end - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end @@ -99,9 +90,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, - type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol + role: Symbol, + status: Symbol, + type: Symbol ) .returns(T.attached_class) end @@ -120,9 +111,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, - type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol + role: Symbol, + status: Symbol, + type: Symbol } ) end @@ -130,44 +121,35 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - module Role - extend OpenAI::Enum + class Role < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + USER = :user + SYSTEM = :system + DEVELOPER = :developer end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end # The type of the message input. Always set to `message`. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + MESSAGE = :message end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index 0d59846c..77bef808 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Content item used to generate a response. - module ResponseItem - extend OpenAI::Union + class ResponseItem < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index f4a81f11..bec1b93e 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # An output message from the model. - module ResponseOutputItem - extend OpenAI::Union + class ResponseOutputItem < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 63f0758f..17bf1ad0 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -44,14 +44,11 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -69,7 +66,7 @@ module OpenAI params( id: String, content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], - status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, + status: Symbol, role: Symbol, type: Symbol ) @@ -85,7 +82,7 @@ module OpenAI id: String, content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], role: Symbol, - status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, + status: Symbol, type: Symbol } ) @@ -94,8 +91,8 @@ module OpenAI end # A text output from the model. - module Content - extend OpenAI::Union + class Content < OpenAI::Union + abstract! Variants = type_template(:out) do @@ -107,16 +104,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 78b35143..13eb0a87 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -98,8 +98,8 @@ module OpenAI end # A citation to a file. - module Annotation - extend OpenAI::Union + class Annotation < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index aabd22e0..66b69b58 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -36,14 +36,11 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def status end - sig do - params(_: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -53,7 +50,7 @@ module OpenAI params( id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], - status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, + status: Symbol, type: Symbol ) .returns(T.attached_class) @@ -68,7 +65,7 @@ module OpenAI id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], type: Symbol, - status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol + status: Symbol } ) end @@ -105,16 +102,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete end end end diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index d2129c7d..8e817f0d 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -9,20 +9,17 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } + sig { returns(T.nilable(T::Array[Symbol])) } def include end - sig do - params(_: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) - .returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) - end + sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } def include=(_) end sig do params( - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], + include: T::Array[Symbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -30,15 +27,7 @@ module OpenAI def self.new(include: nil, request_options: {}) end - sig do - override - .returns( - { - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: OpenAI::RequestOptions - } - ) - end + sig { override.returns({include: T::Array[Symbol], request_options: OpenAI::RequestOptions}) } def to_hash end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index 95b80ac1..f6a3f6ce 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -5,16 +5,15 @@ module OpenAI module Responses # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - module ResponseStatus - extend OpenAI::Enum + class ResponseStatus < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + COMPLETED = :completed + FAILED = :failed + IN_PROGRESS = :in_progress + INCOMPLETE = :incomplete end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index 4c87665c..ed1980bf 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Emitted when there is a partial audio response. - module ResponseStreamEvent - extend OpenAI::Union + class ResponseStreamEvent < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index f7d64bc8..ed354df8 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -128,8 +128,8 @@ module OpenAI end # A citation to a file. - module Annotation - extend OpenAI::Union + class Annotation < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 0d0c2a77..0085ad60 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -6,8 +6,8 @@ module OpenAI # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - module Tool - extend OpenAI::Union + class Tool < OpenAI::Union + abstract! Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index a4acb23d..c047abb7 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -11,15 +11,14 @@ module OpenAI # more tools. # # `required` means the model must call one or more tools. - module ToolChoiceOptions - extend OpenAI::Enum + class ToolChoiceOptions < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) + NONE = :none + AUTO = :auto + REQUIRED = :required end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 45b4ecfc..8459293b 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -12,24 +12,21 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - sig { returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } + sig { params(type: Symbol).returns(T.attached_class) } def self.new(type:) end - sig { override.returns({type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol}) } + sig { override.returns({type: Symbol}) } def to_hash end @@ -41,19 +38,15 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - WEB_SEARCH_PREVIEW = - T.let(:web_search_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - COMPUTER_USE_PREVIEW = - T.let(:computer_use_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - WEB_SEARCH_PREVIEW_2025_03_11 = - T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + FILE_SEARCH = :file_search + WEB_SEARCH_PREVIEW = :web_search_preview + COMPUTER_USE_PREVIEW = :computer_use_preview + WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index a0ae2d7c..cf2fb2f6 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -8,27 +8,21 @@ module OpenAI # # - `web_search_preview` # - `web_search_preview_2025_03_11` - sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) - .returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def search_context_size end - sig do - params(_: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - .returns(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def search_context_size=(_) end @@ -48,8 +42,8 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). sig do params( - type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, - search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, + type: Symbol, + search_context_size: Symbol, user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) ) .returns(T.attached_class) @@ -61,8 +55,8 @@ module OpenAI override .returns( { - type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, - search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, + type: Symbol, + search_context_size: Symbol, user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) } ) @@ -74,30 +68,25 @@ module OpenAI # # - `web_search_preview` # - `web_search_preview_2025_03_11` - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) - WEB_SEARCH_PREVIEW_2025_03_11 = - T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + WEB_SEARCH_PREVIEW = :web_search_preview + WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - module SearchContextSize - extend OpenAI::Enum + class SearchContextSize < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + LOW = :low + MEDIUM = :medium + HIGH = :high end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index ddb9e74a..ecad7412 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -2,28 +2,20 @@ module OpenAI module Models - module ResponsesModel - extend OpenAI::Union + class ResponsesModel < OpenAI::Union + abstract! - Variants = - type_template(:out) do - { - fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - } - end + Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - module UnionMember2 - extend OpenAI::Enum + class UnionMember2 < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::UnionMember2) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW = - T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" end end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 05b6e1e2..1da23f04 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -69,14 +69,11 @@ module OpenAI end # The status of the Upload. - sig { returns(OpenAI::Models::Upload::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::Upload::Status::TaggedSymbol) - .returns(OpenAI::Models::Upload::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -98,7 +95,7 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Models::Upload::Status::TaggedSymbol, + status: Symbol, file: T.nilable(OpenAI::Models::FileObject), object: Symbol ) @@ -118,7 +115,7 @@ module OpenAI filename: String, object: Symbol, purpose: String, - status: OpenAI::Models::Upload::Status::TaggedSymbol, + status: Symbol, file: T.nilable(OpenAI::Models::FileObject) } ) @@ -127,16 +124,15 @@ module OpenAI end # The status of the Upload. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Upload::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Upload::Status::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) + PENDING = :pending + COMPLETED = :completed + CANCELLED = :cancelled + EXPIRED = :expired end end end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 93d701d3..22555f0e 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -40,11 +40,11 @@ module OpenAI # # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). - sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } + sig { returns(Symbol) } def purpose end - sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } + sig { params(_: Symbol).returns(Symbol) } def purpose=(_) end @@ -53,7 +53,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: OpenAI::Models::FilePurpose::OrSymbol, + purpose: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -68,7 +68,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: OpenAI::Models::FilePurpose::OrSymbol, + purpose: Symbol, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 188bfd85..e80d2488 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -73,14 +73,11 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - sig { returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::VectorStore::Status::TaggedSymbol) - .returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -121,7 +118,7 @@ module OpenAI last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - status: OpenAI::Models::VectorStore::Status::TaggedSymbol, + status: Symbol, usage_bytes: Integer, expires_after: OpenAI::Models::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer), @@ -155,7 +152,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - status: OpenAI::Models::VectorStore::Status::TaggedSymbol, + status: Symbol, usage_bytes: Integer, expires_after: OpenAI::Models::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer) @@ -241,15 +238,14 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStore::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) + EXPIRED = :expired + IN_PROGRESS = :in_progress + COMPLETED = :completed end class ExpiresAfter < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index c2400193..5749ce8a 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -42,14 +42,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::VectorStoreListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::VectorStoreListParams::Order::OrSymbol) - .returns(OpenAI::Models::VectorStoreListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -58,7 +55,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -73,7 +70,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -83,14 +80,13 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 1e785923..029b5b83 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -89,8 +89,8 @@ module OpenAI end # A query string for a search - module Query - extend OpenAI::Union + class Query < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } @@ -98,22 +98,19 @@ module OpenAI end # A filter to apply based on file attributes. - module Filters - extend OpenAI::Union + class Filters < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel - sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def ranker end - sig do - params(_: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) - .returns(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def ranker=(_) end @@ -126,36 +123,21 @@ module OpenAI end # Ranking options for search. - sig do - params( - ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, - score_threshold: Float - ) - .returns(T.attached_class) - end + sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } def self.new(ranker: nil, score_threshold: nil) end - sig do - override - .returns( - {ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, score_threshold: Float} - ) - end + sig { override.returns({ranker: Symbol, score_threshold: Float}) } def to_hash end - module Ranker - extend OpenAI::Enum + class Ranker < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) - DEFAULT_2024_11_15 = - T.let(:"default-2024-11-15", OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + AUTO = :auto + DEFAULT_2024_11_15 = :"default-2024-11-15" end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index e4039a7c..a2443437 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -86,8 +86,8 @@ module OpenAI def to_hash end - module Attribute - extend OpenAI::Union + class Attribute < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end @@ -103,40 +103,29 @@ module OpenAI end # The type of content. - sig { returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } + sig { returns(Symbol) } def type end - sig do - params(_: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) - .returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def type=(_) end - sig do - params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) - .returns(T.attached_class) - end + sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type:) end - sig do - override - .returns({text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol}) - end + sig { override.returns({text: String, type: Symbol}) } def to_hash end # The type of content. - module Type - extend OpenAI::Enum + class Type < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + TEXT = :text end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index f2dd4d99..4be0dcb6 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -98,8 +98,8 @@ module OpenAI def to_hash end - module Attribute - extend OpenAI::Union + class Attribute < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 22045ec2..c7da2f41 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -40,14 +40,11 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def filter end - sig do - params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def filter=(_) end @@ -63,14 +60,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -79,9 +73,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, + filter: Symbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -96,9 +90,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, + filter: Symbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -107,33 +101,26 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - module Filter - extend OpenAI::Enum + class Filter < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + FAILED = :failed + CANCELLED = :cancelled end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index d8ec4fba..57e35c52 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -98,8 +98,8 @@ module OpenAI def to_hash end - module Attribute - extend OpenAI::Union + class Attribute < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 20a4bce5..0f62a65d 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -32,14 +32,11 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def filter end - sig do - params(_: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def filter=(_) end @@ -55,14 +52,11 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol)) } + sig { returns(T.nilable(Symbol)) } def order end - sig do - params(_: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def order=(_) end @@ -70,9 +64,9 @@ module OpenAI params( after: String, before: String, - filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, + filter: Symbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, + order: Symbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -86,9 +80,9 @@ module OpenAI { after: String, before: String, - filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, + filter: Symbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, + order: Symbol, request_options: OpenAI::RequestOptions } ) @@ -97,30 +91,26 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - module Filter - extend OpenAI::Enum + class Filter < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + FAILED = :failed + CANCELLED = :cancelled end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - module Order - extend OpenAI::Enum + class Order < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + ASC = :asc + DESC = :desc end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index fe35965e..e3693815 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -55,8 +55,8 @@ module OpenAI def to_hash end - module Attribute - extend OpenAI::Union + class Attribute < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index fc8ccf5a..4e1a4a36 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -47,14 +47,11 @@ module OpenAI # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - .returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -124,7 +121,7 @@ module OpenAI id: String, created_at: Integer, last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), - status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, + status: Symbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), @@ -154,7 +151,7 @@ module OpenAI created_at: Integer, last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, + status: Symbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), @@ -167,14 +164,11 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. - sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + sig { returns(Symbol) } def code end - sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - .returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def code=(_) end @@ -189,61 +183,42 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. - sig do - params( - code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, - message: String - ) - .returns(T.attached_class) - end + sig { params(code: Symbol, message: String).returns(T.attached_class) } def self.new(code:, message:) end - sig do - override - .returns( - {code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, message: String} - ) - end + sig { override.returns({code: Symbol, message: String}) } def to_hash end # One of `server_error` or `rate_limit_exceeded`. - module Code - extend OpenAI::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } - - SERVER_ERROR = - T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - UNSUPPORTED_FILE = - T.let(:unsupported_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - INVALID_FILE = - T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + class Code < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + SERVER_ERROR = :server_error + UNSUPPORTED_FILE = :unsupported_file + INVALID_FILE = :invalid_file end end # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - module Status - extend OpenAI::Enum + class Status < OpenAI::Enum + abstract! - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } + Value = type_template(:out) { {fixed: Symbol} } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + IN_PROGRESS = :in_progress + COMPLETED = :completed + CANCELLED = :cancelled + FAILED = :failed end - module Attribute - extend OpenAI::Union + class Attribute < OpenAI::Union + abstract! Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 4627b63f..2e5ce798 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -45,14 +45,11 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + sig { returns(Symbol) } def status end - sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - end + sig { params(_: Symbol).returns(Symbol) } def status=(_) end @@ -74,7 +71,7 @@ module OpenAI id: String, created_at: Integer, file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, + status: Symbol, vector_store_id: String, object: Symbol ) @@ -91,7 +88,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, + status: Symbol, vector_store_id: String } ) @@ -174,18 +171,15 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - module Status - extend OpenAI::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } - - IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + class Status < OpenAI::Enum + abstract! + + Value = type_template(:out) { {fixed: Symbol} } + + IN_PROGRESS = :in_progress + COMPLETED = :completed + CANCELLED = :cancelled + FAILED = :failed end end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index c434cedb..e2e85216 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -8,10 +8,10 @@ module OpenAI sig do params( input: String, - model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, + model: T.any(String, Symbol), + voice: Symbol, instructions: String, - response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, + response_format: Symbol, speed: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 38f106b4..a1340034 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -8,13 +8,13 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], + timestamp_granularities: T::Array[Symbol], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -70,13 +70,13 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, Symbol), + include: T::Array[Symbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], + timestamp_granularities: T::Array[Symbol], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index baf563ad..278e3855 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -8,9 +8,9 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + model: T.any(String, Symbol), prompt: String, - response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, + response_format: Symbol, temperature: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index 1a12c440..10691913 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -6,8 +6,8 @@ module OpenAI # Creates and executes a batch from an uploaded file of requests sig do params( - completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, - endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, + completion_window: Symbol, + endpoint: Symbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index a31361cb..00d18547 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -7,12 +7,12 @@ module OpenAI # Create an assistant with a model and instructions. sig do params( - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -129,9 +129,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), + model: T.any(String, Symbol), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -234,7 +234,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Assistant]) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 6db707e7..1479fd01 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -109,7 +109,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -121,12 +121,7 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -248,7 +243,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -260,12 +255,7 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 68fd1790..3ec7bab4 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -19,7 +19,7 @@ module OpenAI ) ] ), - role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, + role: Symbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -104,7 +104,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, + order: Symbol, run_id: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index f5d012a0..cd747220 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -14,16 +14,16 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -33,12 +33,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tools: T.nilable( T::Array[ T.any( @@ -176,16 +171,16 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.nilable( T.any( Symbol, @@ -195,12 +190,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ), + tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), tools: T.nilable( T::Array[ T.any( @@ -415,7 +405,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run]) diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 5dfa9a8a..4833b5fb 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -12,7 +12,7 @@ module OpenAI step_id: String, thread_id: String, run_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) @@ -44,9 +44,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: T::Array[Symbol], limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 8fc9878c..fff5ad69 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -37,40 +37,34 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ), + function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: T.nilable(Symbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ), + tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -301,40 +295,34 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, Symbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ), + function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: T.nilable(T::Array[Symbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: T.nilable(Symbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: T.nilable(Symbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ), + tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -585,7 +573,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion]) diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 0667ddbc..3e7c16e2 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -12,7 +12,7 @@ module OpenAI completion_id: String, after: String, limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index b894675b..8e9e52d4 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -6,7 +6,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), prompt: T.nilable( T.any( String, @@ -149,7 +149,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), prompt: T.nilable( T.any( String, diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 65a19f1d..025c37a7 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -7,9 +7,9 @@ module OpenAI sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), + model: T.any(String, Symbol), dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, + encoding_format: Symbol, user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index da0efe9d..441f93e0 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -27,7 +27,7 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - purpose: OpenAI::Models::FilePurpose::OrSymbol, + purpose: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::FileObject) @@ -64,7 +64,7 @@ module OpenAI params( after: String, limit: Integer, - order: OpenAI::Models::FileListParams::Order::OrSymbol, + order: Symbol, purpose: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 1c55189e..b3f7cc15 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -17,7 +17,7 @@ module OpenAI # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( - model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), + model: T.any(String, Symbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 1944e5f2..396f2edd 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -7,10 +7,10 @@ module OpenAI sig do params( image: T.any(IO, StringIO), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -47,10 +47,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -91,12 +91,12 @@ module OpenAI sig do params( prompt: String, - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, Symbol)), n: T.nilable(Integer), - quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, - response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), - style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), + quality: Symbol, + response_format: T.nilable(Symbol), + size: T.nilable(Symbol), + style: T.nilable(Symbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index b57441a5..3b9b2bd0 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -12,7 +12,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), + model: T.any(String, Symbol), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::ModerationCreateResponse) diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 701c8406..64ab5c27 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -38,8 +38,8 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + model: T.any(String, Symbol), + include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -49,11 +49,7 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ), + tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -63,7 +59,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: T.nilable(Symbol), user: String, stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -213,8 +209,8 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + model: T.any(String, Symbol), + include: T.nilable(T::Array[Symbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -224,11 +220,7 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ), + tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -238,7 +230,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: T.nilable(Symbol), user: String, stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -398,7 +390,7 @@ module OpenAI sig do params( response_id: String, - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], + include: T::Array[Symbol], request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Responses::Response) diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 7f92008d..44e41eb2 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -11,7 +11,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 6ee12473..98a58dc5 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -31,7 +31,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: OpenAI::Models::FilePurpose::OrSymbol, + purpose: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Upload) diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 7e24de66..86418315 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -99,7 +99,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStore]) diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index c8a0af8b..ec4e8e7a 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -82,9 +82,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, + filter: Symbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 88902be4..2fc3ae57 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -89,9 +89,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, + filter: Symbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, + order: Symbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index ff01cbf3..ba62241d 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -62,28 +62,28 @@ module OpenAI ) -> ([true, top, nil] | [false, bool, Integer]) end - module Enum - include OpenAI::Converter + class Enum + extend OpenAI::Converter def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] private def self.finalize!: -> void - def ===: (top other) -> bool + def self.===: (top other) -> bool - def ==: (top other) -> bool + def self.==: (top other) -> bool - def coerce: (String | Symbol | top value) -> (Symbol | top) + def self.coerce: (String | Symbol | top value) -> (Symbol | top) - def dump: (Symbol | top value) -> (Symbol | top) + def self.dump: (Symbol | top value) -> (Symbol | top) - def try_strict_coerce: ( + def self.try_strict_coerce: ( top value ) -> ([true, top, nil] | [false, bool, Integer]) end - module Union - include OpenAI::Converter + class Union + extend OpenAI::Converter private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Converter::input)]] @@ -105,15 +105,15 @@ module OpenAI private def self.resolve_variant: (top value) -> OpenAI::Converter::input? - def ===: (top other) -> bool + def self.===: (top other) -> bool - def ==: (top other) -> bool + def self.==: (top other) -> bool - def coerce: (top value) -> top + def self.coerce: (top value) -> top - def dump: (top value) -> top + def self.dump: (top value) -> top - def try_strict_coerce: ( + def self.try_strict_coerce: ( top value ) -> ([true, top, nil] | [false, bool, Integer]) end diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 7427ba06..817c97a9 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -5,18 +5,14 @@ module OpenAI | OpenAI::Models::chat_model | OpenAI::Models::AllModels::union_member2 - module AllModels - extend OpenAI::Union - + class AllModels < OpenAI::Union type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - module UnionMember2 - extend OpenAI::Enum - + class UnionMember2 < OpenAI::Enum O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 30772347..eb21a97f 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -50,9 +50,7 @@ module OpenAI type model = String | OpenAI::Models::Audio::speech_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::Audio::speech_model] end @@ -67,9 +65,7 @@ module OpenAI | :sage | :shimmer - module Voice - extend OpenAI::Enum - + class Voice < OpenAI::Enum ALLOY: :alloy ASH: :ash CORAL: :coral @@ -85,9 +81,7 @@ module OpenAI type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum MP3: :mp3 OPUS: :opus AAC: :aac diff --git a/sig/openai/models/audio/speech_model.rbs b/sig/openai/models/audio/speech_model.rbs index 51c913a4..357eaa4c 100644 --- a/sig/openai/models/audio/speech_model.rbs +++ b/sig/openai/models/audio/speech_model.rbs @@ -3,9 +3,7 @@ module OpenAI module Audio type speech_model = :"tts-1" | :"tts-1-hd" | :"gpt-4o-mini-tts" - module SpeechModel - extend OpenAI::Enum - + class SpeechModel < OpenAI::Enum TTS_1: :"tts-1" TTS_1_HD: :"tts-1-hd" GPT_4O_MINI_TTS: :"gpt-4o-mini-tts" diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 3834d3cc..28f79a92 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -68,17 +68,13 @@ module OpenAI type model = String | OpenAI::Models::audio_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::audio_model] end type timestamp_granularity = :word | :segment - module TimestampGranularity - extend OpenAI::Enum - + class TimestampGranularity < OpenAI::Enum WORD: :word SEGMENT: :segment diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index 32f15ab6..f0179c81 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -5,9 +5,7 @@ module OpenAI OpenAI::Models::Audio::Transcription | OpenAI::Models::Audio::TranscriptionVerbose - module TranscriptionCreateResponse - extend OpenAI::Union - + class TranscriptionCreateResponse < OpenAI::Union def self.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] end end diff --git a/sig/openai/models/audio/transcription_include.rbs b/sig/openai/models/audio/transcription_include.rbs index 1fc83e72..cf06a929 100644 --- a/sig/openai/models/audio/transcription_include.rbs +++ b/sig/openai/models/audio/transcription_include.rbs @@ -3,9 +3,7 @@ module OpenAI module Audio type transcription_include = :logprobs - module TranscriptionInclude - extend OpenAI::Enum - + class TranscriptionInclude < OpenAI::Enum LOGPROBS: :logprobs def self.values: -> ::Array[OpenAI::Models::Audio::transcription_include] diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs index caffcd1f..b9233feb 100644 --- a/sig/openai/models/audio/transcription_stream_event.rbs +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -5,9 +5,7 @@ module OpenAI OpenAI::Models::Audio::TranscriptionTextDeltaEvent | OpenAI::Models::Audio::TranscriptionTextDoneEvent - module TranscriptionStreamEvent - extend OpenAI::Union - + class TranscriptionStreamEvent < OpenAI::Union def self.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index d5dcc175..83dc3322 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -46,17 +46,13 @@ module OpenAI type model = String | OpenAI::Models::audio_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::audio_model] end type response_format = :json | :text | :srt | :verbose_json | :vtt - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum JSON: :json TEXT: :text SRT: :srt diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index 6c26f34d..d80690b9 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -5,9 +5,7 @@ module OpenAI OpenAI::Models::Audio::Translation | OpenAI::Models::Audio::TranslationVerbose - module TranslationCreateResponse - extend OpenAI::Union - + class TranslationCreateResponse < OpenAI::Union def self.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] end end diff --git a/sig/openai/models/audio_model.rbs b/sig/openai/models/audio_model.rbs index 7c47323a..72b67344 100644 --- a/sig/openai/models/audio_model.rbs +++ b/sig/openai/models/audio_model.rbs @@ -3,9 +3,7 @@ module OpenAI type audio_model = :"whisper-1" | :"gpt-4o-transcribe" | :"gpt-4o-mini-transcribe" - module AudioModel - extend OpenAI::Enum - + class AudioModel < OpenAI::Enum WHISPER_1: :"whisper-1" GPT_4O_TRANSCRIBE: :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE: :"gpt-4o-mini-transcribe" diff --git a/sig/openai/models/audio_response_format.rbs b/sig/openai/models/audio_response_format.rbs index ee7b583f..e91a52b8 100644 --- a/sig/openai/models/audio_response_format.rbs +++ b/sig/openai/models/audio_response_format.rbs @@ -2,9 +2,7 @@ module OpenAI module Models type audio_response_format = :json | :text | :srt | :verbose_json | :vtt - module AudioResponseFormat - extend OpenAI::Enum - + class AudioResponseFormat < OpenAI::Enum JSON: :json TEXT: :text SRT: :srt diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 2a3d4888..278b30f0 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -128,9 +128,7 @@ module OpenAI | :cancelling | :cancelled - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum VALIDATING: :validating FAILED: :failed IN_PROGRESS: :in_progress diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index c73264e6..5a459418 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -33,9 +33,7 @@ module OpenAI type completion_window = :"24h" - module CompletionWindow - extend OpenAI::Enum - + class CompletionWindow < OpenAI::Enum NUMBER_24H: :"24h" def self.values: -> ::Array[OpenAI::Models::BatchCreateParams::completion_window] @@ -47,9 +45,7 @@ module OpenAI | :"/v1/embeddings" | :"/v1/completions" - module Endpoint - extend OpenAI::Enum - + class Endpoint < OpenAI::Enum V1_RESPONSES: :"/v1/responses" V1_CHAT_COMPLETIONS: :"/v1/chat/completions" V1_EMBEDDINGS: :"/v1/embeddings" diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 9a108229..d4a0708e 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -66,9 +66,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::chat_model] end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index c8869670..89fbdc93 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -44,9 +44,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index d5b4a073..158a3a03 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -7,9 +7,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject | OpenAI::Models::ResponseFormatJSONSchema - module AssistantResponseFormatOption - extend OpenAI::Union - + class AssistantResponseFormatOption < OpenAI::Union def self.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 4ed62507..3cff7d58 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -27,9 +27,7 @@ module OpenAI | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete | OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - module AssistantStreamEvent - extend OpenAI::Union - + class AssistantStreamEvent < OpenAI::Union type thread_created = { data: OpenAI::Models::Beta::Thread, diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index 48827d7f..5421e7bc 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -6,9 +6,7 @@ module OpenAI | OpenAI::Models::Beta::FileSearchTool | OpenAI::Models::Beta::FunctionTool - module AssistantTool - extend OpenAI::Union - + class AssistantTool < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index ab690c71..eabceb53 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -25,9 +25,7 @@ module OpenAI type type_ = :function | :code_interpreter | :file_search - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum FUNCTION: :function CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index ee421612..f7886116 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -5,14 +5,10 @@ module OpenAI OpenAI::Models::Beta::AssistantToolChoiceOption::auto | OpenAI::Models::Beta::AssistantToolChoice - module AssistantToolChoiceOption - extend OpenAI::Union - + class AssistantToolChoiceOption < OpenAI::Union type auto = :none | :auto | :required - module Auto - extend OpenAI::Enum - + class Auto < OpenAI::Enum NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index d3efa3c2..e67f47e5 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -101,9 +101,7 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613" - module Model - extend OpenAI::Union - + class Model < OpenAI::Union O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index bd1238f4..b042a2f7 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -71,9 +71,7 @@ module OpenAI type ranker = :auto | :default_2024_08_21 - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index edd61dcf..ad12cf61 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -8,9 +8,7 @@ module OpenAI | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete - module MessageStreamEvent - extend OpenAI::Union - + class MessageStreamEvent < OpenAI::Union type thread_message_created = { data: OpenAI::Models::Beta::Threads::Message, diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 9207cecc..51f78cb2 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -10,9 +10,7 @@ module OpenAI | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired - module RunStepStreamEvent - extend OpenAI::Union - + class RunStepStreamEvent < OpenAI::Union type thread_run_step_created = { data: OpenAI::Models::Beta::Threads::Runs::RunStep, diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 4081dbf8..46793589 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -13,9 +13,7 @@ module OpenAI | OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled | OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired - module RunStreamEvent - extend OpenAI::Union - + class RunStreamEvent < OpenAI::Union type thread_run_created = { data: OpenAI::Models::Beta::Threads::Run, diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 5c9d3cd3..a7b4922f 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -84,9 +84,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -146,9 +144,7 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -159,9 +155,7 @@ module OpenAI type role = :user | :assistant - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant @@ -196,9 +190,7 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -420,9 +412,7 @@ module OpenAI | OpenAI::Models::Beta::FileSearchTool | OpenAI::Models::Beta::FunctionTool - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end @@ -446,9 +436,7 @@ module OpenAI type type_ = :auto | :last_messages - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index fb2c4dc7..5acf3300 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -62,9 +62,7 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -75,9 +73,7 @@ module OpenAI type role = :user | :assistant - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant @@ -112,9 +108,7 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index cde96d26..527a6e58 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -6,9 +6,7 @@ module OpenAI OpenAI::Models::Beta::Threads::FileCitationAnnotation | OpenAI::Models::Beta::Threads::FilePathAnnotation - module Annotation - extend OpenAI::Union - + class Annotation < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] end end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 22600103..21dee9df 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -6,9 +6,7 @@ module OpenAI OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation | OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - module AnnotationDelta - extend OpenAI::Union - + class AnnotationDelta < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 0b0987a7..ce6ca5d5 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -26,9 +26,7 @@ module OpenAI type detail = :auto | :low | :high - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index d0870c96..3b996b60 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -28,9 +28,7 @@ module OpenAI type detail = :auto | :low | :high - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 36929cf8..9aec42d8 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -26,9 +26,7 @@ module OpenAI type detail = :auto | :low | :high - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 3f1fcbdc..c7d07d96 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -28,9 +28,7 @@ module OpenAI type detail = :auto | :low | :high - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 29fa0135..e6c155a8 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -96,9 +96,7 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union type assistant_tools_file_search_type_only = { type: :file_search } @@ -135,9 +133,7 @@ module OpenAI | :run_expired | :run_failed - module Reason - extend OpenAI::Enum - + class Reason < OpenAI::Enum CONTENT_FILTER: :content_filter MAX_TOKENS: :max_tokens RUN_CANCELLED: :run_cancelled @@ -150,9 +146,7 @@ module OpenAI type role = :user | :assistant - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant @@ -161,9 +155,7 @@ module OpenAI type status = :in_progress | :incomplete | :completed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress INCOMPLETE: :incomplete COMPLETED: :completed diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index dc2a9215..252bb7ff 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -8,9 +8,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::TextContentBlock | OpenAI::Models::Beta::Threads::RefusalContentBlock - module MessageContent - extend OpenAI::Union - + class MessageContent < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] end end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index 1357cfba..aab10ba6 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -8,9 +8,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::RefusalDeltaBlock | OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - module MessageContentDelta - extend OpenAI::Union - + class MessageContentDelta < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] end end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 76a88822..39228e33 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -7,9 +7,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::ImageURLContentBlock | OpenAI::Models::Beta::Threads::TextContentBlockParam - module MessageContentPartParam - extend OpenAI::Union - + class MessageContentPartParam < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 3e6288ac..fb2276f9 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -37,9 +37,7 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -50,9 +48,7 @@ module OpenAI type role = :user | :assistant - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant @@ -87,9 +83,7 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index 0487cbfd..e053ee9b 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -30,9 +30,7 @@ module OpenAI type role = :user | :assistant - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index c8146f4e..dca2c571 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -51,9 +51,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index e84c343f..deae5170 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -140,9 +140,7 @@ module OpenAI type reason = :max_completion_tokens | :max_prompt_tokens - module Reason - extend OpenAI::Enum - + class Reason < OpenAI::Enum MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens @@ -170,9 +168,7 @@ module OpenAI type code = :server_error | :rate_limit_exceeded | :invalid_prompt - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt @@ -235,9 +231,7 @@ module OpenAI type type_ = :auto | :last_messages - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 5dfe9de6..c9f5247b 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -121,9 +121,7 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -134,9 +132,7 @@ module OpenAI type role = :user | :assistant - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant @@ -171,9 +167,7 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -191,9 +185,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -217,9 +209,7 @@ module OpenAI type type_ = :auto | :last_messages - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index 54dd93bc..fc43edb9 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -45,9 +45,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run_status.rbs b/sig/openai/models/beta/threads/run_status.rbs index d7433b66..cfc75c7b 100644 --- a/sig/openai/models/beta/threads/run_status.rbs +++ b/sig/openai/models/beta/threads/run_status.rbs @@ -13,9 +13,7 @@ module OpenAI | :incomplete | :expired - module RunStatus - extend OpenAI::Enum - + class RunStatus < OpenAI::Enum QUEUED: :queued IN_PROGRESS: :in_progress REQUIRES_ACTION: :requires_action diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index ddc03a7d..67d18341 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -47,9 +47,7 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - module Output - extend OpenAI::Union - + class Output < OpenAI::Union type logs = { logs: String, type: :logs } class Logs < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index 81324a83..d8884223 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -63,9 +63,7 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - module Output - extend OpenAI::Union - + class Output < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 8952fadb..5b6c9102 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -71,9 +71,7 @@ module OpenAI type ranker = :auto | :default_2024_08_21 - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 @@ -137,9 +135,7 @@ module OpenAI type type_ = :text - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 36dd9a60..4216d33b 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -2,6 +2,7 @@ module OpenAI module Models module Beta module Threads + class RunStep = Runs::RunStep module Runs @@ -99,9 +100,7 @@ module OpenAI type code = :server_error | :rate_limit_exceeded - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded @@ -112,9 +111,7 @@ module OpenAI type status = :in_progress | :cancelled | :failed | :completed | :expired - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress CANCELLED: :cancelled FAILED: :failed @@ -128,17 +125,13 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails | OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - module StepDetails - extend OpenAI::Union - + class StepDetails < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] end type type_ = :message_creation | :tool_calls - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 171b4303..7f45537b 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -2,6 +2,7 @@ module OpenAI module Models module Beta module Threads + class RunStepDelta = Runs::RunStepDelta module Runs @@ -27,9 +28,7 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta | OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - module StepDetails - extend OpenAI::Union - + class StepDetails < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index 295594a8..f3f75aab 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -2,6 +2,7 @@ module OpenAI module Models module Beta module Threads + class RunStepDeltaEvent = Runs::RunStepDeltaEvent module Runs diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index d1daf15e..5dd59c8c 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -2,6 +2,7 @@ module OpenAI module Models module Beta module Threads + class RunStepDeltaMessageDelta = Runs::RunStepDeltaMessageDelta module Runs diff --git a/sig/openai/models/beta/threads/runs/run_step_include.rbs b/sig/openai/models/beta/threads/runs/run_step_include.rbs index ed1e3934..be00b41f 100644 --- a/sig/openai/models/beta/threads/runs/run_step_include.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_include.rbs @@ -2,15 +2,14 @@ module OpenAI module Models module Beta module Threads - module RunStepInclude = Runs::RunStepInclude + + class RunStepInclude = Runs::RunStepInclude module Runs type run_step_include = :"step_details.tool_calls[*].file_search.results[*].content" - module RunStepInclude - extend OpenAI::Enum - + class RunStepInclude < OpenAI::Enum STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT: :"step_details.tool_calls[*].file_search.results[*].content" def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 641b3530..bd65efbf 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -58,9 +58,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index 081dbbdf..42300b3c 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -8,9 +8,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall | OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - module ToolCall - extend OpenAI::Union - + class ToolCall < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index 472aee5f..6c3c0ec7 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -8,9 +8,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta | OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - module ToolCallDelta - extend OpenAI::Union - + class ToolCallDelta < OpenAI::Union def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] end end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index ed15fbfe..6c256523 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletion = Chat::ChatCompletion module Chat @@ -80,9 +81,7 @@ module OpenAI type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call - module FinishReason - extend OpenAI::Enum - + class FinishReason < OpenAI::Enum STOP: :stop LENGTH: :length TOOL_CALLS: :tool_calls @@ -114,9 +113,7 @@ module OpenAI type service_tier = :scale | :default - module ServiceTier - extend OpenAI::Enum - + class ServiceTier < OpenAI::Enum SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index e05d98b5..fbe3c3f8 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam module Chat @@ -61,9 +62,7 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type array_of_content_part_array = ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] @@ -73,9 +72,7 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionContentPartText | OpenAI::Models::Chat::ChatCompletionContentPartRefusal - module ArrayOfContentPart - extend OpenAI::Union - + class ArrayOfContentPart < OpenAI::Union def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index a3b3cda4..856fe6c8 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionAudio = Chat::ChatCompletionAudio module Chat diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index 5f2424d0..d90d22b0 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionAudioParam = Chat::ChatCompletionAudioParam module Chat @@ -23,9 +24,7 @@ module OpenAI type format_ = :wav | :mp3 | :flac | :opus | :pcm16 - module Format - extend OpenAI::Enum - + class Format < OpenAI::Enum WAV: :wav MP3: :mp3 FLAC: :flac @@ -38,9 +37,7 @@ module OpenAI type voice = :alloy | :ash | :ballad | :coral | :echo | :sage | :shimmer | :verse - module Voice - extend OpenAI::Enum - + class Voice < OpenAI::Enum ALLOY: :alloy ASH: :ash BALLAD: :ballad diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index c85a596e..fa2494d9 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionChunk = Chat::ChatCompletionChunk module Chat @@ -133,9 +134,7 @@ module OpenAI type role = :developer | :system | :user | :assistant | :tool - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum DEVELOPER: :developer SYSTEM: :system USER: :user @@ -199,9 +198,7 @@ module OpenAI type type_ = :function - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum FUNCTION: :function def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] @@ -212,9 +209,7 @@ module OpenAI type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call - module FinishReason - extend OpenAI::Enum - + class FinishReason < OpenAI::Enum STOP: :stop LENGTH: :length TOOL_CALLS: :tool_calls @@ -246,9 +241,7 @@ module OpenAI type service_tier = :scale | :default - module ServiceTier - extend OpenAI::Enum - + class ServiceTier < OpenAI::Enum SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 348780aa..01231844 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -1,6 +1,7 @@ module OpenAI module Models - module ChatCompletionContentPart = Chat::ChatCompletionContentPart + + class ChatCompletionContentPart = Chat::ChatCompletionContentPart module Chat type chat_completion_content_part = @@ -9,9 +10,7 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionContentPartInputAudio | OpenAI::Models::Chat::ChatCompletionContentPart::File - module ChatCompletionContentPart - extend OpenAI::Union - + class ChatCompletionContentPart < OpenAI::Union type file = { file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index 1bde5081..b80ee986 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionContentPartImage = Chat::ChatCompletionContentPartImage module Chat @@ -45,9 +46,7 @@ module OpenAI type detail = :auto | :low | :high - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index 85902db7..bb409774 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio module Chat @@ -41,9 +42,7 @@ module OpenAI type format_ = :wav | :mp3 - module Format - extend OpenAI::Enum - + class Format < OpenAI::Enum WAV: :wav MP3: :mp3 diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index e715e480..e69c1ee3 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionContentPartRefusal = Chat::ChatCompletionContentPartRefusal module Chat diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 638f5e4e..9c723c66 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionContentPartText = Chat::ChatCompletionContentPartText module Chat diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index e776fc71..2d8a2cf3 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionDeleted = Chat::ChatCompletionDeleted module Chat diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index aa1379e0..e9cbba0a 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam module Chat @@ -30,9 +31,7 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index e7067a87..b5e722fb 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionFunctionCallOption = Chat::ChatCompletionFunctionCallOption module Chat diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index fad91c63..b9ac7154 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam module Chat diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index d3668749..b13a6e43 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionMessage = Chat::ChatCompletionMessage module Chat diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index c8d7612a..a42d88ca 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -1,6 +1,7 @@ module OpenAI module Models - module ChatCompletionMessageParam = Chat::ChatCompletionMessageParam + + class ChatCompletionMessageParam = Chat::ChatCompletionMessageParam module Chat type chat_completion_message_param = @@ -11,9 +12,7 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionToolMessageParam | OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - module ChatCompletionMessageParam - extend OpenAI::Union - + class ChatCompletionMessageParam < OpenAI::Union def self.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] end end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index cd147e2d..4ed4b2d2 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall module Chat diff --git a/sig/openai/models/chat/chat_completion_modality.rbs b/sig/openai/models/chat/chat_completion_modality.rbs index 7075d723..e8df5962 100644 --- a/sig/openai/models/chat/chat_completion_modality.rbs +++ b/sig/openai/models/chat/chat_completion_modality.rbs @@ -1,13 +1,12 @@ module OpenAI module Models - module ChatCompletionModality = Chat::ChatCompletionModality + + class ChatCompletionModality = Chat::ChatCompletionModality module Chat type chat_completion_modality = :text | :audio - module ChatCompletionModality - extend OpenAI::Enum - + class ChatCompletionModality < OpenAI::Enum TEXT: :text AUDIO: :audio diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 62feb1bf..8da9abd2 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionNamedToolChoice = Chat::ChatCompletionNamedToolChoice module Chat diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index f49bc614..e38d1c31 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionPredictionContent = Chat::ChatCompletionPredictionContent module Chat @@ -24,9 +25,7 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_reasoning_effort.rbs b/sig/openai/models/chat/chat_completion_reasoning_effort.rbs index 28d9e504..0d91a009 100644 --- a/sig/openai/models/chat/chat_completion_reasoning_effort.rbs +++ b/sig/openai/models/chat/chat_completion_reasoning_effort.rbs @@ -1,9 +1,11 @@ module OpenAI module Models - module ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort + + class ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort module Chat - module ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort + + class ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort end end end diff --git a/sig/openai/models/chat/chat_completion_role.rbs b/sig/openai/models/chat/chat_completion_role.rbs index d805ec9e..be395c69 100644 --- a/sig/openai/models/chat/chat_completion_role.rbs +++ b/sig/openai/models/chat/chat_completion_role.rbs @@ -1,14 +1,13 @@ module OpenAI module Models - module ChatCompletionRole = Chat::ChatCompletionRole + + class ChatCompletionRole = Chat::ChatCompletionRole module Chat type chat_completion_role = :developer | :system | :user | :assistant | :tool | :function - module ChatCompletionRole - extend OpenAI::Enum - + class ChatCompletionRole < OpenAI::Enum DEVELOPER: :developer SYSTEM: :system USER: :user diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 75e77b6c..2719aa2e 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage module Chat diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 7fed3536..ed0721bc 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions module Chat diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index f889a520..b3ae49f9 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionSystemMessageParam = Chat::ChatCompletionSystemMessageParam module Chat @@ -30,9 +31,7 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index a076afef..4695ba6a 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionTokenLogprob = Chat::ChatCompletionTokenLogprob module Chat diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index d465043d..f690d128 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionTool = Chat::ChatCompletionTool module Chat diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index 001520b8..e6c246ee 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -1,20 +1,17 @@ module OpenAI module Models - module ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption + + class ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption module Chat type chat_completion_tool_choice_option = OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto | OpenAI::Models::Chat::ChatCompletionNamedToolChoice - module ChatCompletionToolChoiceOption - extend OpenAI::Union - + class ChatCompletionToolChoiceOption < OpenAI::Union type auto = :none | :auto | :required - module Auto - extend OpenAI::Enum - + class Auto < OpenAI::Enum NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 8dc39541..86a425de 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionToolMessageParam = Chat::ChatCompletionToolMessageParam module Chat @@ -28,9 +29,7 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index b359b18e..53192647 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ChatCompletionUserMessageParam = Chat::ChatCompletionUserMessageParam module Chat @@ -30,9 +31,7 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] - module Content - extend OpenAI::Union - + class Content < OpenAI::Union type chat_completion_content_part_array = ::Array[OpenAI::Models::Chat::chat_completion_content_part] diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 9d777b19..56982b45 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -166,9 +166,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -176,14 +174,10 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode | OpenAI::Models::Chat::ChatCompletionFunctionCallOption - module FunctionCall - extend OpenAI::Union - + class FunctionCall < OpenAI::Union type function_call_mode = :none | :auto - module FunctionCallMode - extend OpenAI::Enum - + class FunctionCallMode < OpenAI::Enum NONE: :none AUTO: :auto @@ -224,9 +218,7 @@ module OpenAI type modality = :text | :audio - module Modality - extend OpenAI::Enum - + class Modality < OpenAI::Enum TEXT: :text AUDIO: :audio @@ -238,17 +230,13 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONSchema | OpenAI::Models::ResponseFormatJSONObject - module ResponseFormat - extend OpenAI::Union - + class ResponseFormat < OpenAI::Union def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end type service_tier = :auto | :default - module ServiceTier - extend OpenAI::Enum - + class ServiceTier < OpenAI::Enum AUTO: :auto DEFAULT: :default @@ -257,9 +245,7 @@ module OpenAI type stop = (String | ::Array[String])? - module Stop - extend OpenAI::Union - + class Stop < OpenAI::Union type string_array = ::Array[String] StringArray: string_array @@ -291,9 +277,7 @@ module OpenAI type search_context_size = :low | :medium | :high - module SearchContextSize - extend OpenAI::Enum - + class SearchContextSize < OpenAI::Enum LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index bb536c64..9540d790 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -48,9 +48,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index c066a38c..88456945 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -39,9 +39,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 1578b25d..75caff71 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -45,9 +45,7 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613" - module ChatModel - extend OpenAI::Enum - + class ChatModel < OpenAI::Enum O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index 20f33540..a3873e2d 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -24,9 +24,7 @@ module OpenAI type type_ = :eq | :ne | :gt | :gte | :lt | :lte - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum EQ: :eq NE: :ne GT: :gt @@ -39,9 +37,7 @@ module OpenAI type value = String | Float | bool - module Value - extend OpenAI::Union - + class Value < OpenAI::Union def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index 700baf43..05e3ac68 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -28,9 +28,7 @@ module OpenAI type finish_reason = :stop | :length | :content_filter - module FinishReason - extend OpenAI::Enum - + class FinishReason < OpenAI::Enum STOP: :stop LENGTH: :length CONTENT_FILTER: :content_filter diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 358e6503..fd2677d3 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -88,9 +88,7 @@ module OpenAI type model = String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" - module Model - extend OpenAI::Union - + class Model < OpenAI::Union GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" DAVINCI_002: :"davinci-002" BABBAGE_002: :"babbage-002" @@ -103,9 +101,7 @@ module OpenAI type prompt = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] - module Prompt - extend OpenAI::Union - + class Prompt < OpenAI::Union type string_array = ::Array[String] StringArray: string_array @@ -123,9 +119,7 @@ module OpenAI type stop = (String | ::Array[String])? - module Stop - extend OpenAI::Union - + class Stop < OpenAI::Union type string_array = ::Array[String] StringArray: string_array diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 80a9c41e..35aba042 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -20,17 +20,13 @@ module OpenAI type filter = OpenAI::Models::ComparisonFilter | top - module Filter - extend OpenAI::Union - + class Filter < OpenAI::Union def self.variants: -> [OpenAI::Models::ComparisonFilter, top] end type type_ = :and | :or - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum AND: :and OR: :or diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index d08604d9..59ae7a9f 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -46,9 +46,7 @@ module OpenAI type input = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] - module Input - extend OpenAI::Union - + class Input < OpenAI::Union type string_array = ::Array[String] StringArray: string_array @@ -66,17 +64,13 @@ module OpenAI type model = String | OpenAI::Models::embedding_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::embedding_model] end type encoding_format = :float | :base64 - module EncodingFormat - extend OpenAI::Enum - + class EncodingFormat < OpenAI::Enum FLOAT: :float BASE64: :base64 diff --git a/sig/openai/models/embedding_model.rbs b/sig/openai/models/embedding_model.rbs index ed029cea..c334a8a5 100644 --- a/sig/openai/models/embedding_model.rbs +++ b/sig/openai/models/embedding_model.rbs @@ -5,9 +5,7 @@ module OpenAI | :"text-embedding-3-small" | :"text-embedding-3-large" - module EmbeddingModel - extend OpenAI::Enum - + class EmbeddingModel < OpenAI::Enum TEXT_EMBEDDING_ADA_002: :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL: :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE: :"text-embedding-3-large" diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index 5efd7f51..d287b675 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -4,9 +4,7 @@ module OpenAI OpenAI::Models::StaticFileChunkingStrategyObject | OpenAI::Models::OtherFileChunkingStrategyObject - module FileChunkingStrategy - extend OpenAI::Union - + class FileChunkingStrategy < OpenAI::Union def self.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] end end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index 818b7c90..f5f9f28a 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -4,9 +4,7 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam | OpenAI::Models::StaticFileChunkingStrategyObjectParam - module FileChunkingStrategyParam - extend OpenAI::Union - + class FileChunkingStrategyParam < OpenAI::Union def self.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 9d793064..2f36b51c 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -43,9 +43,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 3bb22b13..f229d68d 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -59,9 +59,7 @@ module OpenAI | :"fine-tune-results" | :vision - module Purpose - extend OpenAI::Enum - + class Purpose < OpenAI::Enum ASSISTANTS: :assistants ASSISTANTS_OUTPUT: :assistants_output BATCH: :batch @@ -75,9 +73,7 @@ module OpenAI type status = :uploaded | :processed | :error - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum UPLOADED: :uploaded PROCESSED: :processed ERROR: :error diff --git a/sig/openai/models/file_purpose.rbs b/sig/openai/models/file_purpose.rbs index 527e5d11..cf532f5b 100644 --- a/sig/openai/models/file_purpose.rbs +++ b/sig/openai/models/file_purpose.rbs @@ -3,9 +3,7 @@ module OpenAI type file_purpose = :assistants | :batch | :"fine-tune" | :vision | :user_data | :evals - module FilePurpose - extend OpenAI::Enum - + class FilePurpose < OpenAI::Enum ASSISTANTS: :assistants BATCH: :batch FINE_TUNE: :"fine-tune" diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index b696b25a..3b4f5915 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class FineTuningJob = FineTuning::FineTuningJob module FineTuning @@ -147,25 +148,19 @@ module OpenAI type batch_size = :auto | Integer - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union def self.variants: -> [:auto, Integer] end end @@ -178,9 +173,7 @@ module OpenAI | :failed | :cancelled - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum VALIDATING_FILES: :validating_files QUEUED: :queued RUNNING: :running @@ -287,33 +280,25 @@ module OpenAI type batch_size = :auto | Integer - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union def self.variants: -> [:auto, Integer] end type beta = :auto | Float - module Beta - extend OpenAI::Union - + class Beta < OpenAI::Union def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union def self.variants: -> [:auto, Integer] end end @@ -373,25 +358,19 @@ module OpenAI type batch_size = :auto | Integer - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union def self.variants: -> [:auto, Integer] end end @@ -399,9 +378,7 @@ module OpenAI type type_ = :supervised | :dpo - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index e70febc9..239f0331 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class FineTuningJobEvent = FineTuning::FineTuningJobEvent module FineTuning @@ -49,9 +50,7 @@ module OpenAI type level = :info | :warn | :error - module Level - extend OpenAI::Enum - + class Level < OpenAI::Enum INFO: :info WARN: :warn ERROR: :error @@ -61,9 +60,7 @@ module OpenAI type type_ = :message | :metrics - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE: :message METRICS: :metrics diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs index ada2b1f0..55ed40a2 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs @@ -1,8 +1,10 @@ module OpenAI module Models + class FineTuningJobIntegration = FineTuning::FineTuningJobIntegration module FineTuning + class FineTuningJobIntegration = OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index f5fc5cec..bc0a81a5 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class FineTuningJobWandbIntegration = FineTuning::FineTuningJobWandbIntegration module FineTuning diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index e156cf05..18b23c37 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject module FineTuning diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 913d7cd3..8942f62f 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -67,9 +67,7 @@ module OpenAI | :"gpt-3.5-turbo" | :"gpt-4o-mini" - module Model - extend OpenAI::Union - + class Model < OpenAI::Union BABBAGE_002: :"babbage-002" DAVINCI_002: :"davinci-002" GPT_3_5_TURBO: :"gpt-3.5-turbo" @@ -117,25 +115,19 @@ module OpenAI type batch_size = :auto | Integer - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union def self.variants: -> [:auto, Integer] end end @@ -284,33 +276,25 @@ module OpenAI type batch_size = :auto | Integer - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union def self.variants: -> [:auto, Integer] end type beta = :auto | Float - module Beta - extend OpenAI::Union - + class Beta < OpenAI::Union def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union def self.variants: -> [:auto, Integer] end end @@ -370,25 +354,19 @@ module OpenAI type batch_size = :auto | Integer - module BatchSize - extend OpenAI::Union - + class BatchSize < OpenAI::Union def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - module LearningRateMultiplier - extend OpenAI::Union - + class LearningRateMultiplier < OpenAI::Union def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - module NEpochs - extend OpenAI::Union - + class NEpochs < OpenAI::Union def self.variants: -> [:auto, Integer] end end @@ -396,9 +374,7 @@ module OpenAI type type_ = :supervised | :dpo - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 1460783f..7770fc34 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -43,17 +43,13 @@ module OpenAI type model = String | OpenAI::Models::image_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum URL: :url B64_JSON: :b64_json @@ -62,9 +58,7 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" - module Size - extend OpenAI::Enum - + class Size < OpenAI::Enum NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index cebbb406..63e0fbf0 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -53,17 +53,13 @@ module OpenAI type model = String | OpenAI::Models::image_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum URL: :url B64_JSON: :b64_json @@ -72,9 +68,7 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" - module Size - extend OpenAI::Enum - + class Size < OpenAI::Enum NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index ea843f0f..1a3f9162 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -55,17 +55,13 @@ module OpenAI type model = String | OpenAI::Models::image_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::image_model] end type quality = :standard | :hd - module Quality - extend OpenAI::Enum - + class Quality < OpenAI::Enum STANDARD: :standard HD: :hd @@ -74,9 +70,7 @@ module OpenAI type response_format = :url | :b64_json - module ResponseFormat - extend OpenAI::Enum - + class ResponseFormat < OpenAI::Enum URL: :url B64_JSON: :b64_json @@ -86,9 +80,7 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" | :"1792x1024" | :"1024x1792" - module Size - extend OpenAI::Enum - + class Size < OpenAI::Enum NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" @@ -100,9 +92,7 @@ module OpenAI type style = :vivid | :natural - module Style - extend OpenAI::Enum - + class Style < OpenAI::Enum VIVID: :vivid NATURAL: :natural diff --git a/sig/openai/models/image_model.rbs b/sig/openai/models/image_model.rbs index e68710d9..1151fd9b 100644 --- a/sig/openai/models/image_model.rbs +++ b/sig/openai/models/image_model.rbs @@ -2,9 +2,7 @@ module OpenAI module Models type image_model = :"dall-e-2" | :"dall-e-3" - module ImageModel - extend OpenAI::Enum - + class ImageModel < OpenAI::Enum DALL_E_2: :"dall-e-2" DALL_E_3: :"dall-e-3" diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index 9e7a8041..d35f2749 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -153,9 +153,7 @@ module OpenAI type harassment = :text - module Harassment - extend OpenAI::Enum - + class Harassment < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] @@ -163,9 +161,7 @@ module OpenAI type harassment_threatening = :text - module HarassmentThreatening - extend OpenAI::Enum - + class HarassmentThreatening < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] @@ -173,9 +169,7 @@ module OpenAI type hate = :text - module Hate - extend OpenAI::Enum - + class Hate < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] @@ -183,9 +177,7 @@ module OpenAI type hate_threatening = :text - module HateThreatening - extend OpenAI::Enum - + class HateThreatening < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] @@ -193,9 +185,7 @@ module OpenAI type illicit = :text - module Illicit - extend OpenAI::Enum - + class Illicit < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] @@ -203,9 +193,7 @@ module OpenAI type illicit_violent = :text - module IllicitViolent - extend OpenAI::Enum - + class IllicitViolent < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] @@ -213,9 +201,7 @@ module OpenAI type self_harm = :text | :image - module SelfHarm - extend OpenAI::Enum - + class SelfHarm < OpenAI::Enum TEXT: :text IMAGE: :image @@ -224,9 +210,7 @@ module OpenAI type self_harm_instruction = :text | :image - module SelfHarmInstruction - extend OpenAI::Enum - + class SelfHarmInstruction < OpenAI::Enum TEXT: :text IMAGE: :image @@ -235,9 +219,7 @@ module OpenAI type self_harm_intent = :text | :image - module SelfHarmIntent - extend OpenAI::Enum - + class SelfHarmIntent < OpenAI::Enum TEXT: :text IMAGE: :image @@ -246,9 +228,7 @@ module OpenAI type sexual = :text | :image - module Sexual - extend OpenAI::Enum - + class Sexual < OpenAI::Enum TEXT: :text IMAGE: :image @@ -257,9 +237,7 @@ module OpenAI type sexual_minor = :text - module SexualMinor - extend OpenAI::Enum - + class SexualMinor < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] @@ -267,9 +245,7 @@ module OpenAI type violence = :text | :image - module Violence - extend OpenAI::Enum - + class Violence < OpenAI::Enum TEXT: :text IMAGE: :image @@ -278,9 +254,7 @@ module OpenAI type violence_graphic = :text | :image - module ViolenceGraphic - extend OpenAI::Enum - + class ViolenceGraphic < OpenAI::Enum TEXT: :text IMAGE: :image diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 41fec7ce..c95e6d3e 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -32,9 +32,7 @@ module OpenAI | ::Array[String] | ::Array[OpenAI::Models::moderation_multi_modal_input] - module Input - extend OpenAI::Union - + class Input < OpenAI::Union type string_array = ::Array[String] StringArray: string_array @@ -49,9 +47,7 @@ module OpenAI type model = String | OpenAI::Models::moderation_model - module Model - extend OpenAI::Union - + class Model < OpenAI::Union def self.variants: -> [String, OpenAI::Models::moderation_model] end end diff --git a/sig/openai/models/moderation_model.rbs b/sig/openai/models/moderation_model.rbs index fa7264f2..6fb884f3 100644 --- a/sig/openai/models/moderation_model.rbs +++ b/sig/openai/models/moderation_model.rbs @@ -6,9 +6,7 @@ module OpenAI | :"text-moderation-latest" | :"text-moderation-stable" - module ModerationModel - extend OpenAI::Enum - + class ModerationModel < OpenAI::Enum OMNI_MODERATION_LATEST: :"omni-moderation-latest" OMNI_MODERATION_2024_09_26: :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST: :"text-moderation-latest" diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index 9388bae0..c98cd3a4 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -4,9 +4,7 @@ module OpenAI OpenAI::Models::ModerationImageURLInput | OpenAI::Models::ModerationTextInput - module ModerationMultiModalInput - extend OpenAI::Union - + class ModerationMultiModalInput < OpenAI::Union def self.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index 8452d648..a4184fb6 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -20,9 +20,7 @@ module OpenAI type generate_summary = :concise | :detailed - module GenerateSummary - extend OpenAI::Enum - + class GenerateSummary < OpenAI::Enum CONCISE: :concise DETAILED: :detailed diff --git a/sig/openai/models/reasoning_effort.rbs b/sig/openai/models/reasoning_effort.rbs index 27d712d0..57327554 100644 --- a/sig/openai/models/reasoning_effort.rbs +++ b/sig/openai/models/reasoning_effort.rbs @@ -2,9 +2,7 @@ module OpenAI module Models type reasoning_effort = :low | :medium | :high - module ReasoningEffort - extend OpenAI::Enum - + class ReasoningEffort < OpenAI::Enum LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 944fbd51..dbfd8278 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -29,9 +29,7 @@ module OpenAI type environment = :mac | :windows | :ubuntu | :browser - module Environment - extend OpenAI::Enum - + class Environment < OpenAI::Enum MAC: :mac WINDOWS: :windows UBUNTU: :ubuntu diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 10931508..8e5bc808 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -31,17 +31,13 @@ module OpenAI String | OpenAI::Models::Responses::response_input_message_content_list - module Content - extend OpenAI::Union - + class Content < OpenAI::Union def self.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] end type role = :user | :assistant | :system | :developer - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user ASSISTANT: :assistant SYSTEM: :system @@ -52,9 +48,7 @@ module OpenAI type type_ = :message - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index fbea9c27..8c714006 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -44,9 +44,7 @@ module OpenAI type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter - module Filters - extend OpenAI::Union - + class Filters < OpenAI::Union def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -76,9 +74,7 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 633f4ef4..2971462e 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -44,9 +44,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 21f27944..39953b70 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -133,9 +133,7 @@ module OpenAI type reason = :max_output_tokens | :content_filter - module Reason - extend OpenAI::Enum - + class Reason < OpenAI::Enum MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter @@ -148,17 +146,13 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceTypes | OpenAI::Models::Responses::ToolChoiceFunction - module ToolChoice - extend OpenAI::Union - + class ToolChoice < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled - module Truncation - extend OpenAI::Enum - + class Truncation < OpenAI::Enum AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 3d2f285b..1201cdfe 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -35,9 +35,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs | OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - module Result - extend OpenAI::Union - + class Result < OpenAI::Union type logs = { logs: String, type: :logs } class Logs < OpenAI::BaseModel @@ -86,9 +84,7 @@ module OpenAI type status = :in_progress | :interpreting | :completed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress INTERPRETING: :interpreting COMPLETED: :completed diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index e6b8fd9e..69f3d3e4 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -46,9 +46,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - module Action - extend OpenAI::Union - + class Action < OpenAI::Union type click = { button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, @@ -77,9 +75,7 @@ module OpenAI type button = :left | :right | :wheel | :back | :forward - module Button - extend OpenAI::Enum - + class Button < OpenAI::Enum LEFT: :left RIGHT: :right WHEEL: :wheel @@ -248,9 +244,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -260,9 +254,7 @@ module OpenAI type type_ = :computer_call - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum COMPUTER_CALL: :computer_call def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index c4a48c1d..0e737186 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -60,9 +60,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index ebfaa85b..c7bb377e 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -8,9 +8,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - module ResponseContent - extend OpenAI::Union - + class ResponseContent < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 2f4c1e05..b221cdc0 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -35,9 +35,7 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - module Part - extend OpenAI::Union - + class Part < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 792dd89a..f7c71025 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -35,9 +35,7 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - module Part - extend OpenAI::Union - + class Part < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 7336a20e..697640de 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -100,9 +100,7 @@ module OpenAI type input = String | OpenAI::Models::Responses::response_input - module Input - extend OpenAI::Union - + class Input < OpenAI::Union def self.variants: -> [String, OpenAI::Models::Responses::response_input] end @@ -111,17 +109,13 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceTypes | OpenAI::Models::Responses::ToolChoiceFunction - module ToolChoice - extend OpenAI::Union - + class ToolChoice < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled - module Truncation - extend OpenAI::Enum - + class Truncation < OpenAI::Enum AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index c9461327..5d705a42 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -39,9 +39,7 @@ module OpenAI | :failed_to_download_image | :image_file_not_found - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 102ce635..8bcaf1dc 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -34,9 +34,7 @@ module OpenAI type status = :in_progress | :searching | :completed | :incomplete | :failed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress SEARCHING: :searching COMPLETED: :completed @@ -86,9 +84,7 @@ module OpenAI type attribute = String | Float | bool - module Attribute - extend OpenAI::Union - + class Attribute < OpenAI::Union def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index ffd41786..7a38cb8e 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -6,9 +6,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig | OpenAI::Models::ResponseFormatJSONObject - module ResponseFormatTextConfig - extend OpenAI::Union - + class ResponseFormatTextConfig < OpenAI::Union def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index a726e572..acd89dee 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -43,9 +43,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index eea6788a..3265f83f 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -37,9 +37,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 301b596c..7da9b40d 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -25,9 +25,7 @@ module OpenAI type status = :in_progress | :searching | :completed | :failed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress SEARCHING: :searching COMPLETED: :completed diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index df76bd92..b2a08af5 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -6,9 +6,7 @@ module OpenAI | :"message.input_image.image_url" | :"computer_call_output.output.image_url" - module ResponseIncludable - extend OpenAI::Enum - + class ResponseIncludable < OpenAI::Enum FILE_SEARCH_CALL_RESULTS: :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index e015e2d8..b41c744b 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -25,9 +25,7 @@ module OpenAI type format_ = :mp3 | :wav - module Format - extend OpenAI::Enum - + class Format < OpenAI::Enum MP3: :mp3 WAV: :wav diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index 6f2fe81c..004cfa5c 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -6,9 +6,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseInputImage | OpenAI::Models::Responses::ResponseInputFile - module ResponseInputContent - extend OpenAI::Union - + class ResponseInputContent < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index fd361d9a..24abed14 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -29,9 +29,7 @@ module OpenAI type detail = :high | :low | :auto - module Detail - extend OpenAI::Enum - + class Detail < OpenAI::Enum HIGH: :high LOW: :low AUTO: :auto diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 20caf979..869b143f 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -14,9 +14,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseReasoningItem | OpenAI::Models::Responses::ResponseInputItem::ItemReference - module ResponseInputItem - extend OpenAI::Union - + class ResponseInputItem < OpenAI::Union type message = { content: OpenAI::Models::Responses::response_input_message_content_list, @@ -53,9 +51,7 @@ module OpenAI type role = :user | :system | :developer - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user SYSTEM: :system DEVELOPER: :developer @@ -65,9 +61,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -77,9 +71,7 @@ module OpenAI type type_ = :message - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] @@ -147,9 +139,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -196,9 +186,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index 2671fd84..45e1f023 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -41,9 +41,7 @@ module OpenAI type role = :user | :system | :developer - module Role - extend OpenAI::Enum - + class Role < OpenAI::Enum USER: :user SYSTEM: :system DEVELOPER: :developer @@ -53,9 +51,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -65,9 +61,7 @@ module OpenAI type type_ = :message - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index 8d2e807d..8fc80907 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -11,9 +11,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseFunctionToolCallItem | OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - module ResponseItem - extend OpenAI::Union - + class ResponseItem < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] end end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index dbed0410..37f78a23 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class ResponseItemList = Responses::ResponseItemList module Responses diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index 9868493b..e01fe1b2 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -9,9 +9,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall | OpenAI::Models::Responses::ResponseReasoningItem - module ResponseOutputItem - extend OpenAI::Union - + class ResponseOutputItem < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index 81eebadd..b003219c 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -35,17 +35,13 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - module Content - extend OpenAI::Union - + class Content < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index 3d499c72..a40aee39 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -28,9 +28,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation | OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - module Annotation - extend OpenAI::Union - + class Annotation < OpenAI::Union type file_citation = { file_id: String, index: Integer, type: :file_citation } diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 17d8480f..23d2dd02 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -45,9 +45,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index 3ecc2fc1..17349a0c 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -3,9 +3,7 @@ module OpenAI module Responses type response_status = :completed | :failed | :in_progress | :incomplete - module ResponseStatus - extend OpenAI::Enum - + class ResponseStatus < OpenAI::Enum COMPLETED: :completed FAILED: :failed IN_PROGRESS: :in_progress diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 8642b35b..0d48dfd6 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -35,9 +35,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent | OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - module ResponseStreamEvent - extend OpenAI::Union - + class ResponseStreamEvent < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 17d56582..d01375b4 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -40,9 +40,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - module Annotation - extend OpenAI::Union - + class Annotation < OpenAI::Union type file_citation = { file_id: String, index: Integer, type: :file_citation } diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 7cf475b8..642f7196 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -7,9 +7,7 @@ module OpenAI | OpenAI::Models::Responses::ComputerTool | OpenAI::Models::Responses::WebSearchTool - module Tool - extend OpenAI::Union - + class Tool < OpenAI::Union def self.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] end end diff --git a/sig/openai/models/responses/tool_choice_options.rbs b/sig/openai/models/responses/tool_choice_options.rbs index 412031bf..e902ea2c 100644 --- a/sig/openai/models/responses/tool_choice_options.rbs +++ b/sig/openai/models/responses/tool_choice_options.rbs @@ -3,9 +3,7 @@ module OpenAI module Responses type tool_choice_options = :none | :auto | :required - module ToolChoiceOptions - extend OpenAI::Enum - + class ToolChoiceOptions < OpenAI::Enum NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 73f15517..ea1e958d 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -19,9 +19,7 @@ module OpenAI | :computer_use_preview | :web_search_preview_2025_03_11 - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum FILE_SEARCH: :file_search WEB_SEARCH_PREVIEW: :web_search_preview COMPUTER_USE_PREVIEW: :computer_use_preview diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index 20a3b337..abb3f9a1 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -29,9 +29,7 @@ module OpenAI type type_ = :web_search_preview | :web_search_preview_2025_03_11 - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 @@ -40,9 +38,7 @@ module OpenAI type search_context_size = :low | :medium | :high - module SearchContextSize - extend OpenAI::Enum - + class SearchContextSize < OpenAI::Enum LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 3bfeacd2..582ad254 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -5,18 +5,14 @@ module OpenAI | OpenAI::Models::chat_model | OpenAI::Models::ResponsesModel::union_member2 - module ResponsesModel - extend OpenAI::Union - + class ResponsesModel < OpenAI::Union type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - module UnionMember2 - extend OpenAI::Enum - + class UnionMember2 < OpenAI::Enum O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index a5baefff..eead4b34 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -48,9 +48,7 @@ module OpenAI type status = :pending | :completed | :cancelled | :expired - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum PENDING: :pending COMPLETED: :completed CANCELLED: :cancelled diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 2a1f9e38..94f8bb7b 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class UploadPart = Uploads::UploadPart module Uploads diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index ba9a3850..2f4e439b 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -91,9 +91,7 @@ module OpenAI type status = :expired | :in_progress | :completed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum EXPIRED: :expired IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index b9d2ab90..b2ea6f76 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -43,9 +43,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 17e51e48..0918e4b6 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -49,9 +49,7 @@ module OpenAI type query = String | ::Array[String] - module Query - extend OpenAI::Union - + class Query < OpenAI::Union type string_array = ::Array[String] StringArray: string_array @@ -62,9 +60,7 @@ module OpenAI type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter - module Filters - extend OpenAI::Union - + class Filters < OpenAI::Union def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -94,9 +90,7 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" - module Ranker - extend OpenAI::Enum - + class Ranker < OpenAI::Enum AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index 497e9d9d..ae59192d 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -32,9 +32,7 @@ module OpenAI type attribute = String | Float | bool - module Attribute - extend OpenAI::Union - + class Attribute < OpenAI::Union def self.variants: -> [String, Float, bool] end @@ -58,9 +56,7 @@ module OpenAI type type_ = :text - module Type - extend OpenAI::Enum - + class Type < OpenAI::Enum TEXT: :text def self.values: -> ::Array[OpenAI::Models::VectorStoreSearchResponse::Content::type_] diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 78f70166..80029298 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -34,9 +34,7 @@ module OpenAI type attribute = String | Float | bool - module Attribute - extend OpenAI::Union - + class Attribute < OpenAI::Union def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index d5bdb26b..e642c6f9 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -56,9 +56,7 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled - module Filter - extend OpenAI::Enum - + class Filter < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed FAILED: :failed @@ -69,9 +67,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 8ecec280..ac3e0e2c 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -34,9 +34,7 @@ module OpenAI type attribute = String | Float | bool - module Attribute - extend OpenAI::Union - + class Attribute < OpenAI::Union def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 362141a4..0cc01f91 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -52,9 +52,7 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled - module Filter - extend OpenAI::Enum - + class Filter < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed FAILED: :failed @@ -65,9 +63,7 @@ module OpenAI type order = :asc | :desc - module Order - extend OpenAI::Enum - + class Order < OpenAI::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index b85d9f01..6b2b7029 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -26,9 +26,7 @@ module OpenAI type attribute = String | Float | bool - module Attribute - extend OpenAI::Union - + class Attribute < OpenAI::Union def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 79b4b73c..79940901 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class VectorStoreFile = VectorStores::VectorStoreFile module VectorStores @@ -73,9 +74,7 @@ module OpenAI type code = :server_error | :unsupported_file | :invalid_file - module Code - extend OpenAI::Enum - + class Code < OpenAI::Enum SERVER_ERROR: :server_error UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file @@ -86,9 +85,7 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed CANCELLED: :cancelled @@ -99,9 +96,7 @@ module OpenAI type attribute = String | Float | bool - module Attribute - extend OpenAI::Union - + class Attribute < OpenAI::Union def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index cc689671..497f5dc3 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class VectorStoreFileBatch = VectorStores::VectorStoreFileBatch module VectorStores @@ -70,9 +71,7 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed - module Status - extend OpenAI::Enum - + class Status < OpenAI::Enum IN_PROGRESS: :in_progress COMPLETED: :completed CANCELLED: :cancelled diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index 19257a53..dde63be2 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -1,5 +1,6 @@ module OpenAI module Models + class VectorStoreFileDeleted = VectorStores::VectorStoreFileDeleted module VectorStores diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index bb5fb2a6..1a3c623e 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -3,9 +3,7 @@ require_relative "test_helper" class OpenAI::Test::BaseModelTest < Minitest::Test - module E1 - extend OpenAI::Enum - + class E1 < OpenAI::Enum A = :a B = :b end @@ -244,17 +242,13 @@ class M3 < M1 optional :b, E1, api_name: :renamed_again end - module U1 - extend OpenAI::Union - + class U1 < OpenAI::Union discriminator :type variant :a, M1 variant :b, M3 end - module U2 - extend OpenAI::Union - + class U2 < OpenAI::Union variant A1 variant A3 end @@ -336,16 +330,12 @@ def test_basic_const end end - module E2 - extend OpenAI::Enum - + class E2 < OpenAI::Enum A = :a B = :b end - module U3 - extend OpenAI::Union - + class U3 < OpenAI::Union discriminator :type variant :a, M1 variant :b, M3 @@ -363,9 +353,7 @@ def test_basic_eql assert_equal(U1, U3) end - module U4 - extend OpenAI::Union - + class U4 < OpenAI::Union variant :a, const: :a variant :b, const: :b end From 17ac3742c22bf9acea8707eb0a99b427bf4e29b4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 22:34:17 +0000 Subject: [PATCH 045/295] chore(internal): bugfix (#51) --- .stats.yml | 2 +- Rakefile | 21 +- lib/openai/base_model.rb | 388 +++++++++--------- lib/openai/models/all_models.rb | 10 +- .../models/audio/speech_create_params.rb | 18 +- lib/openai/models/audio/speech_model.rb | 5 +- .../audio/transcription_create_params.rb | 11 +- .../audio/transcription_create_response.rb | 6 +- .../models/audio/transcription_include.rb | 5 +- .../audio/transcription_stream_event.rb | 6 +- .../models/audio/translation_create_params.rb | 12 +- .../audio/translation_create_response.rb | 5 +- lib/openai/models/audio_model.rb | 5 +- lib/openai/models/audio_response_format.rb | 6 +- lib/openai/models/batch.rb | 6 +- lib/openai/models/batch_create_params.rb | 12 +- .../models/beta/assistant_create_params.rb | 12 +- .../models/beta/assistant_list_params.rb | 6 +- .../beta/assistant_response_format_option.rb | 6 +- .../models/beta/assistant_stream_event.rb | 6 +- lib/openai/models/beta/assistant_tool.rb | 5 +- .../models/beta/assistant_tool_choice.rb | 6 +- .../beta/assistant_tool_choice_option.rb | 12 +- .../models/beta/assistant_update_params.rb | 6 +- lib/openai/models/beta/file_search_tool.rb | 6 +- .../models/beta/message_stream_event.rb | 6 +- .../models/beta/run_step_stream_event.rb | 6 +- lib/openai/models/beta/run_stream_event.rb | 6 +- .../beta/thread_create_and_run_params.rb | 40 +- .../models/beta/thread_create_params.rb | 23 +- lib/openai/models/beta/threads/annotation.rb | 6 +- .../models/beta/threads/annotation_delta.rb | 6 +- lib/openai/models/beta/threads/image_file.rb | 6 +- .../models/beta/threads/image_file_delta.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 6 +- lib/openai/models/beta/threads/message.rb | 23 +- .../models/beta/threads/message_content.rb | 6 +- .../beta/threads/message_content_delta.rb | 6 +- .../threads/message_content_part_param.rb | 6 +- .../beta/threads/message_create_params.rb | 17 +- .../models/beta/threads/message_delta.rb | 6 +- .../beta/threads/message_list_params.rb | 6 +- lib/openai/models/beta/threads/run.rb | 18 +- .../models/beta/threads/run_create_params.rb | 29 +- .../models/beta/threads/run_list_params.rb | 6 +- lib/openai/models/beta/threads/run_status.rb | 6 +- .../runs/code_interpreter_tool_call.rb | 6 +- .../runs/code_interpreter_tool_call_delta.rb | 6 +- .../threads/runs/file_search_tool_call.rb | 12 +- .../models/beta/threads/runs/run_step.rb | 24 +- .../beta/threads/runs/run_step_delta.rb | 6 +- .../beta/threads/runs/run_step_include.rb | 5 +- .../beta/threads/runs/step_list_params.rb | 6 +- .../models/beta/threads/runs/tool_call.rb | 6 +- .../beta/threads/runs/tool_call_delta.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 12 +- ...chat_completion_assistant_message_param.rb | 12 +- .../chat/chat_completion_audio_param.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 24 +- .../chat/chat_completion_content_part.rb | 6 +- .../chat_completion_content_part_image.rb | 6 +- ...hat_completion_content_part_input_audio.rb | 6 +- ...chat_completion_developer_message_param.rb | 6 +- .../chat/chat_completion_message_param.rb | 6 +- .../models/chat/chat_completion_modality.rb | 5 +- .../chat_completion_prediction_content.rb | 6 +- .../models/chat/chat_completion_role.rb | 6 +- .../chat_completion_system_message_param.rb | 6 +- .../chat_completion_tool_choice_option.rb | 12 +- .../chat_completion_tool_message_param.rb | 6 +- .../chat_completion_user_message_param.rb | 6 +- .../models/chat/completion_create_params.rb | 47 +-- .../models/chat/completion_list_params.rb | 6 +- .../chat/completions/message_list_params.rb | 6 +- lib/openai/models/chat_model.rb | 5 +- lib/openai/models/comparison_filter.rb | 12 +- lib/openai/models/completion_choice.rb | 6 +- lib/openai/models/completion_create_params.rb | 18 +- lib/openai/models/compound_filter.rb | 12 +- lib/openai/models/embedding_create_params.rb | 18 +- lib/openai/models/embedding_model.rb | 5 +- lib/openai/models/file_chunking_strategy.rb | 6 +- .../models/file_chunking_strategy_param.rb | 6 +- lib/openai/models/file_list_params.rb | 6 +- lib/openai/models/file_object.rb | 12 +- lib/openai/models/file_purpose.rb | 6 +- .../models/fine_tuning/fine_tuning_job.rb | 72 ++-- .../fine_tuning/fine_tuning_job_event.rb | 12 +- .../models/fine_tuning/job_create_params.rb | 72 ++-- .../models/image_create_variation_params.rb | 18 +- lib/openai/models/image_edit_params.rb | 18 +- lib/openai/models/image_generate_params.rb | 30 +- lib/openai/models/image_model.rb | 5 +- lib/openai/models/moderation.rb | 65 +-- lib/openai/models/moderation_create_params.rb | 12 +- lib/openai/models/moderation_model.rb | 5 +- .../models/moderation_multi_modal_input.rb | 6 +- lib/openai/models/reasoning.rb | 6 +- lib/openai/models/reasoning_effort.rb | 6 +- lib/openai/models/responses/computer_tool.rb | 6 +- .../models/responses/easy_input_message.rb | 18 +- .../models/responses/file_search_tool.rb | 12 +- .../responses/input_item_list_params.rb | 6 +- lib/openai/models/responses/response.rb | 18 +- .../response_code_interpreter_tool_call.rb | 12 +- .../responses/response_computer_tool_call.rb | 24 +- ...response_computer_tool_call_output_item.rb | 6 +- .../models/responses/response_content.rb | 6 +- .../response_content_part_added_event.rb | 6 +- .../response_content_part_done_event.rb | 6 +- .../responses/response_create_params.rb | 18 +- lib/openai/models/responses/response_error.rb | 6 +- .../response_file_search_tool_call.rb | 11 +- .../responses/response_format_text_config.rb | 6 +- .../responses/response_function_tool_call.rb | 6 +- ...response_function_tool_call_output_item.rb | 6 +- .../responses/response_function_web_search.rb | 6 +- .../models/responses/response_includable.rb | 6 +- .../models/responses/response_input_audio.rb | 6 +- .../responses/response_input_content.rb | 6 +- .../models/responses/response_input_image.rb | 6 +- .../models/responses/response_input_item.rb | 36 +- .../responses/response_input_message_item.rb | 18 +- lib/openai/models/responses/response_item.rb | 6 +- .../models/responses/response_output_item.rb | 6 +- .../responses/response_output_message.rb | 12 +- .../models/responses/response_output_text.rb | 6 +- .../responses/response_reasoning_item.rb | 6 +- .../models/responses/response_status.rb | 6 +- .../models/responses/response_stream_event.rb | 6 +- .../response_text_annotation_delta_event.rb | 6 +- lib/openai/models/responses/tool.rb | 6 +- .../models/responses/tool_choice_options.rb | 6 +- .../models/responses/tool_choice_types.rb | 6 +- .../models/responses/web_search_tool.rb | 12 +- lib/openai/models/responses_model.rb | 10 +- lib/openai/models/upload.rb | 6 +- lib/openai/models/vector_store.rb | 6 +- lib/openai/models/vector_store_list_params.rb | 6 +- .../models/vector_store_search_params.rb | 17 +- .../models/vector_store_search_response.rb | 11 +- .../vector_stores/file_batch_create_params.rb | 5 +- .../file_batch_list_files_params.rb | 12 +- .../vector_stores/file_create_params.rb | 5 +- .../models/vector_stores/file_list_params.rb | 12 +- .../vector_stores/file_update_params.rb | 5 +- .../models/vector_stores/vector_store_file.rb | 17 +- .../vector_stores/vector_store_file_batch.rb | 6 +- rbi/lib/openai/base_model.rbi | 172 ++++---- rbi/lib/openai/models/all_models.rbi | 27 +- .../models/audio/speech_create_params.rbi | 88 ++-- rbi/lib/openai/models/audio/speech_model.rbi | 13 +- .../audio/transcription_create_params.rbi | 68 +-- .../audio/transcription_create_response.rbi | 4 +- .../models/audio/transcription_include.rbi | 9 +- .../audio/transcription_stream_event.rbi | 4 +- .../audio/translation_create_params.rbi | 52 ++- .../audio/translation_create_response.rbi | 4 +- rbi/lib/openai/models/audio_model.rbi | 13 +- .../openai/models/audio_response_format.rbi | 17 +- rbi/lib/openai/models/batch.rbi | 37 +- rbi/lib/openai/models/batch_create_params.rbi | 48 ++- .../models/beta/assistant_create_params.rbi | 32 +- .../models/beta/assistant_list_params.rbi | 22 +- .../beta/assistant_response_format_option.rbi | 4 +- .../models/beta/assistant_stream_event.rbi | 4 +- rbi/lib/openai/models/beta/assistant_tool.rbi | 4 +- .../models/beta/assistant_tool_choice.rbi | 36 +- .../beta/assistant_tool_choice_option.rbi | 28 +- .../models/beta/assistant_update_params.rbi | 113 +++-- .../openai/models/beta/file_search_tool.rbi | 46 ++- .../models/beta/message_stream_event.rbi | 4 +- .../models/beta/run_step_stream_event.rbi | 4 +- .../openai/models/beta/run_stream_event.rbi | 4 +- .../beta/thread_create_and_run_params.rbi | 140 +++++-- .../models/beta/thread_create_params.rbi | 35 +- .../openai/models/beta/threads/annotation.rbi | 4 +- .../models/beta/threads/annotation_delta.rbi | 4 +- .../openai/models/beta/threads/image_file.rbi | 27 +- .../models/beta/threads/image_file_delta.rbi | 31 +- .../openai/models/beta/threads/image_url.rbi | 27 +- .../models/beta/threads/image_url_delta.rbi | 30 +- .../openai/models/beta/threads/message.rbi | 94 +++-- .../models/beta/threads/message_content.rbi | 4 +- .../beta/threads/message_content_delta.rbi | 4 +- .../threads/message_content_part_param.rbi | 4 +- .../beta/threads/message_create_params.rbi | 31 +- .../models/beta/threads/message_delta.rbi | 23 +- .../beta/threads/message_list_params.rbi | 23 +- rbi/lib/openai/models/beta/threads/run.rbi | 155 +++++-- .../models/beta/threads/run_create_params.rbi | 154 +++++-- .../models/beta/threads/run_list_params.rbi | 23 +- .../openai/models/beta/threads/run_status.rbi | 25 +- .../runs/code_interpreter_tool_call.rbi | 4 +- .../runs/code_interpreter_tool_call_delta.rbi | 4 +- .../threads/runs/file_search_tool_call.rbi | 122 +++++- .../models/beta/threads/runs/run_step.rbi | 95 +++-- .../beta/threads/runs/run_step_delta.rbi | 4 +- .../beta/threads/runs/run_step_include.rbi | 13 +- .../beta/threads/runs/step_list_params.rbi | 34 +- .../threads/runs/step_retrieve_params.rbi | 11 +- .../models/beta/threads/runs/tool_call.rbi | 4 +- .../beta/threads/runs/tool_call_delta.rbi | 4 +- .../openai/models/chat/chat_completion.rbi | 58 +-- ...hat_completion_assistant_message_param.rbi | 8 +- .../chat/chat_completion_audio_param.rbi | 74 ++-- .../models/chat/chat_completion_chunk.rbi | 119 ++++-- .../chat/chat_completion_content_part.rbi | 4 +- .../chat_completion_content_part_image.rbi | 37 +- ...at_completion_content_part_input_audio.rbi | 44 +- ...hat_completion_developer_message_param.rbi | 4 +- .../chat/chat_completion_message_param.rbi | 4 +- .../models/chat/chat_completion_modality.rbi | 11 +- .../chat_completion_prediction_content.rbi | 4 +- .../models/chat/chat_completion_role.rbi | 19 +- .../chat_completion_system_message_param.rbi | 4 +- .../chat_completion_tool_choice_option.rbi | 27 +- .../chat_completion_tool_message_param.rbi | 4 +- .../chat_completion_user_message_param.rbi | 4 +- .../models/chat/completion_create_params.rbi | 225 +++++++--- .../models/chat/completion_list_params.rbi | 23 +- .../chat/completions/message_list_params.rbi | 39 +- rbi/lib/openai/models/chat_model.rbi | 98 ++--- rbi/lib/openai/models/comparison_filter.rbi | 44 +- rbi/lib/openai/models/completion_choice.rbi | 24 +- .../models/completion_create_params.rbi | 36 +- rbi/lib/openai/models/compound_filter.rbi | 37 +- .../openai/models/embedding_create_params.rbi | 44 +- rbi/lib/openai/models/embedding_model.rbi | 13 +- .../openai/models/file_chunking_strategy.rbi | 4 +- .../models/file_chunking_strategy_param.rbi | 4 +- rbi/lib/openai/models/file_create_params.rbi | 21 +- rbi/lib/openai/models/file_list_params.rbi | 22 +- rbi/lib/openai/models/file_object.rbi | 56 +-- rbi/lib/openai/models/file_purpose.rbi | 19 +- .../models/fine_tuning/fine_tuning_job.rbi | 95 +++-- .../fine_tuning/fine_tuning_job_event.rbi | 48 ++- .../models/fine_tuning/job_create_params.rbi | 93 +++-- .../models/image_create_variation_params.rbi | 64 +-- rbi/lib/openai/models/image_edit_params.rbi | 63 +-- .../openai/models/image_generate_params.rbi | 112 ++--- rbi/lib/openai/models/image_model.rbi | 11 +- rbi/lib/openai/models/moderation.rbi | 319 +++++++++----- .../models/moderation_create_params.rbi | 21 +- rbi/lib/openai/models/moderation_model.rbi | 16 +- .../models/moderation_multi_modal_input.rbi | 4 +- rbi/lib/openai/models/reasoning.rbi | 43 +- rbi/lib/openai/models/reasoning_effort.rbi | 13 +- .../openai/models/responses/computer_tool.rbi | 40 +- .../models/responses/easy_input_message.rbi | 52 ++- .../models/responses/file_search_tool.rbi | 40 +- .../responses/input_item_list_params.rbi | 23 +- rbi/lib/openai/models/responses/response.rbi | 145 +++++-- .../response_code_interpreter_tool_call.rbi | 37 +- .../responses/response_computer_tool_call.rbi | 107 +++-- ...esponse_computer_tool_call_output_item.rbi | 33 +- .../models/responses/response_content.rbi | 4 +- .../response_content_part_added_event.rbi | 4 +- .../response_content_part_done_event.rbi | 4 +- .../responses/response_create_params.rbi | 98 +++-- .../models/responses/response_error.rbi | 73 ++-- .../response_file_search_tool_call.rbi | 35 +- .../responses/response_format_text_config.rbi | 4 +- .../responses/response_function_tool_call.rbi | 49 ++- ...esponse_function_tool_call_output_item.rbi | 43 +- .../response_function_web_search.rbi | 40 +- .../models/responses/response_includable.rbi | 16 +- .../models/responses/response_input_audio.rbi | 35 +- .../responses/response_input_content.rbi | 4 +- .../models/responses/response_input_image.rbi | 45 +- .../models/responses/response_input_item.rbi | 153 ++++--- .../responses/response_input_message_item.rbi | 74 ++-- .../openai/models/responses/response_item.rbi | 4 +- .../models/responses/response_output_item.rbi | 4 +- .../responses/response_output_message.rbi | 29 +- .../models/responses/response_output_text.rbi | 4 +- .../responses/response_reasoning_item.rbi | 25 +- .../responses/response_retrieve_params.rbi | 19 +- .../models/responses/response_status.rbi | 15 +- .../responses/response_stream_event.rbi | 4 +- .../response_text_annotation_delta_event.rbi | 4 +- rbi/lib/openai/models/responses/tool.rbi | 4 +- .../models/responses/tool_choice_options.rbi | 13 +- .../models/responses/tool_choice_types.rbi | 29 +- .../models/responses/web_search_tool.rbi | 49 ++- rbi/lib/openai/models/responses_model.rbi | 28 +- rbi/lib/openai/models/upload.rbi | 26 +- .../openai/models/upload_create_params.rbi | 8 +- rbi/lib/openai/models/vector_store.rbi | 24 +- .../models/vector_store_list_params.rbi | 22 +- .../models/vector_store_search_params.rbi | 44 +- .../models/vector_store_search_response.rbi | 31 +- .../file_batch_create_params.rbi | 4 +- .../file_batch_list_files_params.rbi | 53 ++- .../vector_stores/file_create_params.rbi | 4 +- .../models/vector_stores/file_list_params.rbi | 50 ++- .../vector_stores/file_update_params.rbi | 4 +- .../vector_stores/vector_store_file.rbi | 75 ++-- .../vector_stores/vector_store_file_batch.rbi | 32 +- rbi/lib/openai/resources/audio/speech.rbi | 6 +- .../openai/resources/audio/transcriptions.rbi | 16 +- .../openai/resources/audio/translations.rbi | 4 +- rbi/lib/openai/resources/batches.rbi | 4 +- rbi/lib/openai/resources/beta/assistants.rbi | 10 +- rbi/lib/openai/resources/beta/threads.rbi | 18 +- .../resources/beta/threads/messages.rbi | 4 +- .../openai/resources/beta/threads/runs.rbi | 28 +- .../resources/beta/threads/runs/steps.rbi | 6 +- rbi/lib/openai/resources/chat/completions.rbi | 38 +- .../resources/chat/completions/messages.rbi | 2 +- rbi/lib/openai/resources/completions.rbi | 4 +- rbi/lib/openai/resources/embeddings.rbi | 4 +- rbi/lib/openai/resources/files.rbi | 4 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 22 +- rbi/lib/openai/resources/moderations.rbi | 2 +- rbi/lib/openai/resources/responses.rbi | 26 +- .../resources/responses/input_items.rbi | 2 +- rbi/lib/openai/resources/uploads.rbi | 2 +- rbi/lib/openai/resources/vector_stores.rbi | 2 +- .../resources/vector_stores/file_batches.rbi | 4 +- .../openai/resources/vector_stores/files.rbi | 4 +- sig/openai/base_model.rbs | 28 +- sig/openai/models/all_models.rbs | 8 +- .../models/audio/speech_create_params.rbs | 12 +- sig/openai/models/audio/speech_model.rbs | 4 +- .../audio/transcription_create_params.rbs | 8 +- .../audio/transcription_create_response.rbs | 4 +- .../models/audio/transcription_include.rbs | 4 +- .../audio/transcription_stream_event.rbs | 4 +- .../audio/translation_create_params.rbs | 8 +- .../audio/translation_create_response.rbs | 4 +- sig/openai/models/audio_model.rbs | 4 +- sig/openai/models/audio_response_format.rbs | 4 +- sig/openai/models/batch.rbs | 4 +- sig/openai/models/batch_create_params.rbs | 8 +- .../models/beta/assistant_create_params.rbs | 8 +- .../models/beta/assistant_list_params.rbs | 4 +- .../beta/assistant_response_format_option.rbs | 4 +- .../models/beta/assistant_stream_event.rbs | 4 +- sig/openai/models/beta/assistant_tool.rbs | 4 +- .../models/beta/assistant_tool_choice.rbs | 4 +- .../beta/assistant_tool_choice_option.rbs | 8 +- .../models/beta/assistant_update_params.rbs | 4 +- sig/openai/models/beta/file_search_tool.rbs | 4 +- .../models/beta/message_stream_event.rbs | 4 +- .../models/beta/run_step_stream_event.rbs | 4 +- sig/openai/models/beta/run_stream_event.rbs | 4 +- .../beta/thread_create_and_run_params.rbs | 28 +- .../models/beta/thread_create_params.rbs | 16 +- sig/openai/models/beta/threads/annotation.rbs | 4 +- .../models/beta/threads/annotation_delta.rbs | 4 +- sig/openai/models/beta/threads/image_file.rbs | 4 +- .../models/beta/threads/image_file_delta.rbs | 4 +- sig/openai/models/beta/threads/image_url.rbs | 4 +- .../models/beta/threads/image_url_delta.rbs | 4 +- sig/openai/models/beta/threads/message.rbs | 16 +- .../models/beta/threads/message_content.rbs | 4 +- .../beta/threads/message_content_delta.rbs | 4 +- .../threads/message_content_part_param.rbs | 4 +- .../beta/threads/message_create_params.rbs | 12 +- .../models/beta/threads/message_delta.rbs | 4 +- .../beta/threads/message_list_params.rbs | 4 +- sig/openai/models/beta/threads/run.rbs | 12 +- .../models/beta/threads/run_create_params.rbs | 20 +- .../models/beta/threads/run_list_params.rbs | 4 +- sig/openai/models/beta/threads/run_status.rbs | 4 +- .../runs/code_interpreter_tool_call.rbs | 4 +- .../runs/code_interpreter_tool_call_delta.rbs | 4 +- .../threads/runs/file_search_tool_call.rbs | 8 +- .../models/beta/threads/runs/run_step.rbs | 17 +- .../beta/threads/runs/run_step_delta.rbs | 5 +- .../threads/runs/run_step_delta_event.rbs | 1 - .../runs/run_step_delta_message_delta.rbs | 1 - .../beta/threads/runs/run_step_include.rbs | 7 +- .../beta/threads/runs/step_list_params.rbs | 4 +- .../models/beta/threads/runs/tool_call.rbs | 4 +- .../beta/threads/runs/tool_call_delta.rbs | 4 +- sig/openai/models/chat/chat_completion.rbs | 9 +- ...hat_completion_assistant_message_param.rbs | 9 +- .../models/chat/chat_completion_audio.rbs | 1 - .../chat/chat_completion_audio_param.rbs | 9 +- .../models/chat/chat_completion_chunk.rbs | 17 +- .../chat/chat_completion_content_part.rbs | 7 +- .../chat_completion_content_part_image.rbs | 5 +- ...at_completion_content_part_input_audio.rbs | 5 +- .../chat_completion_content_part_refusal.rbs | 1 - .../chat_completion_content_part_text.rbs | 1 - .../models/chat/chat_completion_deleted.rbs | 1 - ...hat_completion_developer_message_param.rbs | 5 +- .../chat_completion_function_call_option.rbs | 1 - ...chat_completion_function_message_param.rbs | 1 - .../models/chat/chat_completion_message.rbs | 1 - .../chat/chat_completion_message_param.rbs | 7 +- .../chat_completion_message_tool_call.rbs | 1 - .../models/chat/chat_completion_modality.rbs | 7 +- .../chat_completion_named_tool_choice.rbs | 1 - .../chat_completion_prediction_content.rbs | 5 +- .../chat/chat_completion_reasoning_effort.rbs | 6 +- .../models/chat/chat_completion_role.rbs | 7 +- .../chat/chat_completion_store_message.rbs | 1 - .../chat/chat_completion_stream_options.rbs | 1 - .../chat_completion_system_message_param.rbs | 5 +- .../chat/chat_completion_token_logprob.rbs | 1 - .../models/chat/chat_completion_tool.rbs | 1 - .../chat_completion_tool_choice_option.rbs | 11 +- .../chat_completion_tool_message_param.rbs | 5 +- .../chat_completion_user_message_param.rbs | 5 +- .../models/chat/completion_create_params.rbs | 32 +- .../models/chat/completion_list_params.rbs | 4 +- .../chat/completions/message_list_params.rbs | 4 +- sig/openai/models/chat_model.rbs | 4 +- sig/openai/models/comparison_filter.rbs | 8 +- sig/openai/models/completion_choice.rbs | 4 +- .../models/completion_create_params.rbs | 12 +- sig/openai/models/compound_filter.rbs | 8 +- sig/openai/models/embedding_create_params.rbs | 12 +- sig/openai/models/embedding_model.rbs | 4 +- sig/openai/models/file_chunking_strategy.rbs | 4 +- .../models/file_chunking_strategy_param.rbs | 4 +- sig/openai/models/file_list_params.rbs | 4 +- sig/openai/models/file_object.rbs | 8 +- sig/openai/models/file_purpose.rbs | 4 +- .../models/fine_tuning/fine_tuning_job.rbs | 49 ++- .../fine_tuning/fine_tuning_job_event.rbs | 9 +- .../fine_tuning_job_integration.rbs | 2 - .../fine_tuning_job_wandb_integration.rbs | 1 - ...ne_tuning_job_wandb_integration_object.rbs | 1 - .../models/fine_tuning/job_create_params.rbs | 48 ++- .../models/image_create_variation_params.rbs | 12 +- sig/openai/models/image_edit_params.rbs | 12 +- sig/openai/models/image_generate_params.rbs | 20 +- sig/openai/models/image_model.rbs | 4 +- sig/openai/models/moderation.rbs | 52 ++- .../models/moderation_create_params.rbs | 8 +- sig/openai/models/moderation_model.rbs | 4 +- .../models/moderation_multi_modal_input.rbs | 4 +- sig/openai/models/reasoning.rbs | 4 +- sig/openai/models/reasoning_effort.rbs | 4 +- sig/openai/models/responses/computer_tool.rbs | 4 +- .../models/responses/easy_input_message.rbs | 12 +- .../models/responses/file_search_tool.rbs | 8 +- .../responses/input_item_list_params.rbs | 4 +- sig/openai/models/responses/response.rbs | 12 +- .../response_code_interpreter_tool_call.rbs | 8 +- .../responses/response_computer_tool_call.rbs | 16 +- ...esponse_computer_tool_call_output_item.rbs | 4 +- .../models/responses/response_content.rbs | 4 +- .../response_content_part_added_event.rbs | 4 +- .../response_content_part_done_event.rbs | 4 +- .../responses/response_create_params.rbs | 12 +- .../models/responses/response_error.rbs | 4 +- .../response_file_search_tool_call.rbs | 8 +- .../responses/response_format_text_config.rbs | 4 +- .../responses/response_function_tool_call.rbs | 4 +- ...esponse_function_tool_call_output_item.rbs | 4 +- .../response_function_web_search.rbs | 4 +- .../models/responses/response_includable.rbs | 4 +- .../models/responses/response_input_audio.rbs | 4 +- .../responses/response_input_content.rbs | 4 +- .../models/responses/response_input_image.rbs | 4 +- .../models/responses/response_input_item.rbs | 24 +- .../responses/response_input_message_item.rbs | 12 +- sig/openai/models/responses/response_item.rbs | 4 +- .../models/responses/response_item_list.rbs | 1 - .../models/responses/response_output_item.rbs | 4 +- .../responses/response_output_message.rbs | 8 +- .../models/responses/response_output_text.rbs | 4 +- .../responses/response_reasoning_item.rbs | 4 +- .../models/responses/response_status.rbs | 4 +- .../responses/response_stream_event.rbs | 4 +- .../response_text_annotation_delta_event.rbs | 4 +- sig/openai/models/responses/tool.rbs | 4 +- .../models/responses/tool_choice_options.rbs | 4 +- .../models/responses/tool_choice_types.rbs | 4 +- .../models/responses/web_search_tool.rbs | 8 +- sig/openai/models/responses_model.rbs | 8 +- sig/openai/models/upload.rbs | 4 +- sig/openai/models/uploads/upload_part.rbs | 1 - sig/openai/models/vector_store.rbs | 4 +- .../models/vector_store_list_params.rbs | 4 +- .../models/vector_store_search_params.rbs | 12 +- .../models/vector_store_search_response.rbs | 8 +- .../file_batch_create_params.rbs | 4 +- .../file_batch_list_files_params.rbs | 8 +- .../vector_stores/file_create_params.rbs | 4 +- .../models/vector_stores/file_list_params.rbs | 8 +- .../vector_stores/file_update_params.rbs | 4 +- .../vector_stores/vector_store_file.rbs | 13 +- .../vector_stores/vector_store_file_batch.rbs | 5 +- .../vector_store_file_deleted.rbs | 1 - test/openai/base_model_test.rb | 24 +- 493 files changed, 5698 insertions(+), 3511 deletions(-) diff --git a/.stats.yml b/.stats.yml index 199d46be..7011b57a 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-c22f59c66aec7914b6ee653d3098d1c1c8c16c180d2a158e819c8ddbf476f74b.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5ad6884898c07591750dde560118baf7074a59aecd1f367f930c5e42b04e848a.yml diff --git a/Rakefile b/Rakefile index cde192ee..e97d158b 100644 --- a/Rakefile +++ b/Rakefile @@ -35,24 +35,25 @@ multitask(:syntax_tree) do inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? %w[-i''] : %w[-i] uuid = SecureRandom.uuid - # `syntax_tree` has trouble with `rbs`'s class aliases + # `syntax_tree` has trouble with `rbs`'s class & module aliases sed = xargs + %w[sed -E] + inplace + %w[-e] - # annotate class aliases with a unique comment - pre = sed + ["s/class ([^ ]+) = (.+$)/# #{uuid}\\n\\1: \\2/", "--"] + # annotate unprocessable aliases with a unique comment + pre = sed + ["s/(class|module) ([^ ]+) = (.+$)/# \\1 #{uuid}\\n\\2: \\3/", "--"] fmt = xargs + %w[stree write --plugin=rbs --] - # remove the unique comment and transform class aliases to type aliases + # remove the unique comment and unprocessable aliases to type aliases subst = <<~SED - s/# #{uuid}// + s/# (class|module) #{uuid}/\\1/ t l1 b + : l1 - n - s/([^ :]+): (.+$)/class \\1 = \\2/ + N + s/\\n *([^:]+): (.+)$/ \\1 = \\2/ SED - # 1. delete the unique comment - # 2. if deletion happened, branch to label `l1`, else continue - # 3. transform the class alias to a type alias at label `l1` + # for each line: + # 1. try transform the unique comment into `class | module`, if successful, branch to label `l1`. + # 2. at label `l1`, join previously annotated line with `class | module` information. pst = sed + [subst, "--"] # transform class aliases to type aliases, which syntax tree has no trouble with diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 5a63f4fa..a1f9e29c 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -48,7 +48,7 @@ def type_info(spec) type_info(spec.slice(:const, :enum, :union).first&.last) in Proc spec - in OpenAI::Converter | Class | Symbol + in OpenAI::Converter | Module | Symbol -> { spec } in true | false -> { OpenAI::BooleanModel } @@ -81,7 +81,7 @@ def coerce(target, value) else value end - in Class + in Module case target in -> { _1 <= NilClass } nil @@ -144,7 +144,7 @@ def try_strict_coerce(target, value) else [false, false, 0] end - in Class + in Module case [target, value] in [-> { _1 <= NilClass }, _] [true, nil, value.nil? ? 1 : 0] @@ -276,8 +276,6 @@ def try_strict_coerce(value) # @api private # - # @abstract - # # A value from among a specified list of options. OpenAPI enum values map to Ruby # values in the SDK as follows: # @@ -317,74 +315,70 @@ def try_strict_coerce(value) # puts(chat_model) # end # ``` - class Enum - extend OpenAI::Converter + module Enum + include OpenAI::Converter - class << self - # All of the valid Symbol values for this enum. - # - # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) + # All of the valid Symbol values for this enum. + # + # @return [Array] + def values = (@values ||= constants.map { const_get(_1) }) - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values - end + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + private def finalize! = values # @param other [Object] # # @return [Boolean] - def self.===(other) = values.include?(other) + def ===(other) = values.include?(other) # @param other [Object] # # @return [Boolean] - def self.==(other) - other.is_a?(Class) && other <= OpenAI::Enum && other.values.to_set == values.to_set + def ==(other) + other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Enum) && other.values.to_set == values.to_set end - class << self - # @api private - # - # @param value [String, Symbol, Object] - # - # @return [Symbol, Object] - def coerce(value) - case value - in Symbol | String if values.include?(val = value.to_sym) - val - else - value - end + # @api private + # + # @param value [String, Symbol, Object] + # + # @return [Symbol, Object] + def coerce(value) + case value + in Symbol | String if values.include?(val = value.to_sym) + val + else + value end + end - # @!parse - # # @api private - # # - # # @param value [Symbol, Object] - # # - # # @return [Symbol, Object] - # def dump(value) = super + # @!parse + # # @api private + # # + # # @param value [Symbol, Object] + # # + # # @return [Symbol, Object] + # def dump(value) = super - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - return [true, value, 1] if values.include?(value) + # @api private + # + # @param value [Object] + # + # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] + def try_strict_coerce(value) + return [true, value, 1] if values.include?(value) - case value - in Symbol | String if values.include?(val = value.to_sym) - [true, val, 1] + case value + in Symbol | String if values.include?(val = value.to_sym) + [true, val, 1] + else + case [value, values.first] + in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] + [false, true, 0] else - case [value, values.first] - in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] - [false, true, 0] - else - [false, false, 0] - end + [false, false, 0] end end end @@ -392,8 +386,6 @@ def try_strict_coerce(value) # @api private # - # @abstract - # # @example # ```ruby # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` @@ -422,89 +414,87 @@ def try_strict_coerce(value) # puts(chat_completion_content_part) # end # ``` - class Union - extend OpenAI::Converter + module Union + include OpenAI::Converter - class << self - # @api private - # - # All of the specified variant info for this union. - # - # @return [Array] - private def known_variants = (@known_variants ||= []) + # @api private + # + # All of the specified variant info for this union. + # + # @return [Array] + private def known_variants = (@known_variants ||= []) - # @api private - # - # @return [Array] - protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } - end + # @api private + # + # @return [Array] + protected def derefed_variants + @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + end - # All of the specified variants for this union. - # - # @return [Array] - def variants - derefed_variants.map(&:last) + # All of the specified variants for this union. + # + # @return [Array] + def variants + derefed_variants.map(&:last) + end + + # @api private + # + # @param property [Symbol] + private def discriminator(property) + case property + in Symbol + @discriminator = property end + end - # @api private - # - # @param property [Symbol] - private def discriminator(property) - case property + # @api private + # + # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def variant(key, spec = nil) + variant_info = + case key in Symbol - @discriminator = property + [key, OpenAI::Converter.type_info(spec)] + in Proc | OpenAI::Converter | Module | Hash + [nil, OpenAI::Converter.type_info(key)] end - end - # @api private - # - # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def variant(key, spec = nil) - variant_info = - case key - in Symbol - [key, OpenAI::Converter.type_info(spec)] - in Proc | OpenAI::Converter | Class | Hash - [nil, OpenAI::Converter.type_info(key)] - end - - known_variants << variant_info - end + known_variants << variant_info + end - # @api private - # - # @param value [Object] - # - # @return [OpenAI::Converter, Class, nil] - private def resolve_variant(value) - case [@discriminator, value] - in [_, OpenAI::BaseModel] - value.class - in [Symbol, Hash] - key = - if value.key?(@discriminator) - value.fetch(@discriminator) - elsif value.key?((discriminator = @discriminator.to_s)) - value.fetch(discriminator) - end + # @api private + # + # @param value [Object] + # + # @return [OpenAI::Converter, Class, nil] + private def resolve_variant(value) + case [@discriminator, value] + in [_, OpenAI::BaseModel] + value.class + in [Symbol, Hash] + key = + if value.key?(@discriminator) + value.fetch(@discriminator) + elsif value.key?((discriminator = @discriminator.to_s)) + value.fetch(discriminator) + end - key = key.to_sym if key.is_a?(String) - _, resolved = known_variants.find { |k,| k == key } - resolved.nil? ? OpenAI::Unknown : resolved.call - else - nil - end + key = key.to_sym if key.is_a?(String) + _, resolved = known_variants.find { |k,| k == key } + resolved.nil? ? OpenAI::Unknown : resolved.call + else + nil end end @@ -514,7 +504,7 @@ def variants # @param other [Object] # # @return [Boolean] - def self.===(other) + def ===(other) known_variants.any? do |_, variant_fn| variant_fn.call === other end @@ -523,90 +513,88 @@ def self.===(other) # @param other [Object] # # @return [Boolean] - def self.==(other) - other.is_a?(Class) && other <= OpenAI::Union && other.derefed_variants == derefed_variants + def ==(other) + other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Union) && other.derefed_variants == derefed_variants end - class << self - # @api private - # - # @param value [Object] - # - # @return [Object] - def coerce(value) - if (variant = resolve_variant(value)) - return OpenAI::Converter.coerce(variant, value) - end + # @api private + # + # @param value [Object] + # + # @return [Object] + def coerce(value) + if (variant = resolve_variant(value)) + return OpenAI::Converter.coerce(variant, value) + end - matches = [] + matches = [] - known_variants.each do |_, variant_fn| - variant = variant_fn.call + known_variants.each do |_, variant_fn| + variant = variant_fn.call - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, _] - return coerced - in [false, true, score] - matches << [score, variant] - in [false, false, _] - nil - end + case OpenAI::Converter.try_strict_coerce(variant, value) + in [true, coerced, _] + return coerced + in [false, true, score] + matches << [score, variant] + in [false, false, _] + nil end + end + + _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } + variant.nil? ? value : OpenAI::Converter.coerce(variant, value) + end - _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } - variant.nil? ? value : OpenAI::Converter.coerce(variant, value) + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + if (variant = resolve_variant(value)) + return OpenAI::Converter.dump(variant, value) end - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - if (variant = resolve_variant(value)) + known_variants.each do |_, variant_fn| + variant = variant_fn.call + if variant === value return OpenAI::Converter.dump(variant, value) end - - known_variants.each do |_, variant_fn| - variant = variant_fn.call - if variant === value - return OpenAI::Converter.dump(variant, value) - end - end - value end + value + end - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - # TODO(ruby) this will result in super linear decoding behaviour for nested unions - # follow up with a decoding context that captures current strictness levels - if (variant = resolve_variant(value)) - return Converter.try_strict_coerce(variant, value) - end + # @api private + # + # @param value [Object] + # + # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] + def try_strict_coerce(value) + # TODO(ruby) this will result in super linear decoding behaviour for nested unions + # follow up with a decoding context that captures current strictness levels + if (variant = resolve_variant(value)) + return Converter.try_strict_coerce(variant, value) + end - coercible = false - max_score = 0 + coercible = false + max_score = 0 - known_variants.each do |_, variant_fn| - variant = variant_fn.call + known_variants.each do |_, variant_fn| + variant = variant_fn.call - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, score] - return [true, coerced, score] - in [false, true, score] - coercible = true - max_score = [max_score, score].max - in [false, false, _] - nil - end + case OpenAI::Converter.try_strict_coerce(variant, value) + in [true, coerced, score] + return [true, coerced, score] + in [false, true, score] + coercible = true + max_score = [max_score, score].max + in [false, false, _] + nil end - - [false, coercible, max_score] end + + [false, coercible, max_score] end # rubocop:enable Style/CaseEquality @@ -953,7 +941,7 @@ def defaults = (@defaults ||= {}) private def add_field(name_sym, required:, type_info:, spec:) type_fn, info = case type_info - in Proc | Class | OpenAI::Converter + in Proc | Module | OpenAI::Converter [OpenAI::Converter.type_info({**spec, union: type_info}), spec] in Hash [OpenAI::Converter.type_info(type_info), type_info] @@ -1225,7 +1213,7 @@ def initialize(data = {}) type = self.class.fields[mapped]&.fetch(:type) stored = case [type, value] - in [Class, Hash] if type <= OpenAI::BaseModel + in [Module, Hash] if type <= OpenAI::BaseModel type.new(value) in [OpenAI::ArrayOf, Array] | [OpenAI::HashOf, Hash] type.coerce(value) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index edf0c8df..f11ccb01 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -2,16 +2,18 @@ module OpenAI module Models - # @abstract - class AllModels < OpenAI::Union + module AllModels + extend OpenAI::Union + variant String variant enum: -> { OpenAI::Models::ChatModel } variant enum: -> { OpenAI::Models::AllModels::UnionMember2 } - # @abstract - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 2477a4ca..b7efc98d 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -76,11 +76,11 @@ class SpeechCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. @@ -93,13 +93,13 @@ class Model < OpenAI::Union # end end - # @abstract - # # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY = :alloy ASH = :ash CORAL = :coral @@ -113,11 +113,11 @@ class Voice < OpenAI::Enum finalize! end - # @abstract - # # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + MP3 = :mp3 OPUS = :opus AAC = :aac diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index 26aae9d6..b85a4f1e 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Audio - # @abstract - class SpeechModel < OpenAI::Enum + module SpeechModel + extend OpenAI::Enum + TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 3ff8c770..1ec3b0ac 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -131,12 +131,12 @@ class TranscriptionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). @@ -149,8 +149,9 @@ class Model < OpenAI::Union # end end - # @abstract - class TimestampGranularity < OpenAI::Enum + module TimestampGranularity + extend OpenAI::Enum + WORD = :word SEGMENT = :segment diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 2ad0d933..07d30d90 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Audio - # @abstract - # # Represents a transcription response returned by model, based on the provided # input. - class TranscriptionCreateResponse < OpenAI::Union + module TranscriptionCreateResponse + extend OpenAI::Union + # Represents a transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::Transcription } diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index 97303675..fc9e716d 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Audio - # @abstract - class TranscriptionInclude < OpenAI::Enum + module TranscriptionInclude + extend OpenAI::Enum + LOGPROBS = :logprobs finalize! diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 4bddaa1b..be40014b 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -3,13 +3,13 @@ module OpenAI module Models module Audio - # @abstract - # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. - class TranscriptionStreamEvent < OpenAI::Union + module TranscriptionStreamEvent + extend OpenAI::Union + discriminator :type # Emitted when there is an additional text delta. This is also the first event emitted when the transcription starts. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 0b31b58c..087b2d38 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -72,11 +72,11 @@ class TranslationCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. @@ -89,11 +89,11 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + JSON = :json TEXT = :text SRT = :srt diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 94020236..4d7cf38c 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Audio - # @abstract - class TranslationCreateResponse < OpenAI::Union + module TranslationCreateResponse + extend OpenAI::Union + variant -> { OpenAI::Models::Audio::Translation } variant -> { OpenAI::Models::Audio::TranslationVerbose } diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index 88507173..cb25d87b 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class AudioModel < OpenAI::Enum + module AudioModel + extend OpenAI::Enum + WHISPER_1 = :"whisper-1" GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 9593d816..105ac628 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -2,12 +2,12 @@ module OpenAI module Models - # @abstract - # # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - class AudioResponseFormat < OpenAI::Enum + module AudioResponseFormat + extend OpenAI::Enum + JSON = :json TEXT = :text SRT = :srt diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 91778516..dcf830ae 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -224,10 +224,10 @@ class Batch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The current status of the batch. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING = :validating FAILED = :failed IN_PROGRESS = :in_progress diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 593eb7d2..35dc43cb 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -59,23 +59,23 @@ class BatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The time frame within which the batch should be processed. Currently only `24h` # is supported. - class CompletionWindow < OpenAI::Enum + module CompletionWindow + extend OpenAI::Enum + NUMBER_24H = :"24h" finalize! end - # @abstract - # # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - class Endpoint < OpenAI::Enum + module Endpoint + extend OpenAI::Enum + V1_RESPONSES = :"/v1/responses" V1_CHAT_COMPLETIONS = :"/v1/chat/completions" V1_EMBEDDINGS = :"/v1/embeddings" diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index f66edb15..76336aef 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -157,14 +157,14 @@ class AssistantCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. @@ -312,11 +312,11 @@ class VectorStore < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. - class ChunkingStrategy < OpenAI::Union + module ChunkingStrategy + extend OpenAI::Union + discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 2deef264..5586ebe7 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -67,11 +67,11 @@ class AssistantListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 1e0036f2..da77528c 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Beta - # @abstract - # # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), @@ -25,7 +23,9 @@ module Beta # the message content may be partially cut off if `finish_reason="length"`, which # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. - class AssistantResponseFormatOption < OpenAI::Union + module AssistantResponseFormatOption + extend OpenAI::Union + # `auto` is the default value variant const: :auto diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index fdc598d7..02fad932 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Beta - # @abstract - # # Represents an event emitted when streaming a Run. # # Each event in a server-sent events stream has an `event` and `data` property: @@ -25,7 +23,9 @@ module Beta # gracefully in your code. See the # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. - class AssistantStreamEvent < OpenAI::Union + module AssistantStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a new [thread](https://platform.openai.com/docs/api-reference/threads/object) is created. diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 0ea9bc07..ecd6b3db 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Beta - # @abstract - class AssistantTool < OpenAI::Union + module AssistantTool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 9aba0349..78dd540e 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -30,10 +30,10 @@ class AssistantToolChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of the tool. If type is `function`, the function name must be set - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION = :function CODE_INTERPRETER = :code_interpreter FILE_SEARCH = :file_search diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 62a2c795..560a4a80 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Beta - # @abstract - # # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value # and means the model can pick between generating a message or calling one or more @@ -12,20 +10,22 @@ module Beta # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - class AssistantToolChoiceOption < OpenAI::Union + module AssistantToolChoiceOption + extend OpenAI::Union + # `none` means the model will not call any tools and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools before responding to the user. variant enum: -> { OpenAI::Models::Beta::AssistantToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific tool. variant -> { OpenAI::Models::Beta::AssistantToolChoice } - # @abstract - # # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 494df652..4a6636f6 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -161,14 +161,14 @@ class AssistantUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + # @!group O3_MINI = :"o3-mini" diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 333f1770..102489f1 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -105,11 +105,11 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index b110bfed..5ea73994 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Beta - # @abstract - # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - class MessageStreamEvent < OpenAI::Union + module MessageStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is created. diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 82c7266e..45e973df 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Beta - # @abstract - # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - class RunStepStreamEvent < OpenAI::Union + module RunStepStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is created. diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index f8548343..f506c9e8 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Beta - # @abstract - # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - class RunStreamEvent < OpenAI::Union + module RunStreamEvent + extend OpenAI::Union + discriminator :event # Occurs when a new [run](https://platform.openai.com/docs/api-reference/runs/object) is created. diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 7650d56b..4461713d 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -210,13 +210,13 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. @@ -323,10 +323,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -343,15 +343,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -388,8 +388,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -557,11 +558,11 @@ class VectorStore < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. - class ChunkingStrategy < OpenAI::Union + module ChunkingStrategy + extend OpenAI::Union + discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. @@ -724,8 +725,9 @@ class FileSearch < OpenAI::BaseModel end end - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::FileSearchTool } @@ -767,13 +769,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 734a53ad..53669757 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -96,10 +96,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -116,15 +116,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -161,8 +161,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -329,11 +330,11 @@ class VectorStore < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. - class ChunkingStrategy < OpenAI::Union + module ChunkingStrategy + extend OpenAI::Union + discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 3f0a547d..99110d38 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 6b22d5be..66fbd501 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class AnnotationDelta < OpenAI::Union + module AnnotationDelta + extend OpenAI::Union + discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 2156325f..fab8e1fc 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -32,11 +32,11 @@ class ImageFile < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 81caa23f..e40aa711 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -36,11 +36,11 @@ class ImageFileDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 8c08ac9d..59dcae47 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -31,11 +31,11 @@ class ImageURL < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index ad6177d4..787dd26c 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -35,11 +35,11 @@ class ImageURLDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 0a6b9d1b..da2a8d75 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -172,8 +172,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly } @@ -217,10 +218,10 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason the message is incomplete. - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + CONTENT_FILTER = :content_filter MAX_TOKENS = :max_tokens RUN_CANCELLED = :run_cancelled @@ -231,21 +232,21 @@ class Reason < OpenAI::Enum end end - # @abstract - # # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant finalize! end - # @abstract - # # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress INCOMPLETE = :incomplete COMPLETED = :completed diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 7e6e4698..40ea6954 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContent < OpenAI::Union + module MessageContent + extend OpenAI::Union + discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index bcf970c1..634b517c 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentDelta < OpenAI::Union + module MessageContentDelta + extend OpenAI::Union + discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 74766529..4ae935f2 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentPartParam < OpenAI::Union + module MessageContentPartParam + extend OpenAI::Union + discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 480f9a24..1fcbaa76 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -56,10 +56,10 @@ class MessageCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -76,15 +76,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -121,8 +121,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index f3bc9993..46d5d46f 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -35,10 +35,10 @@ class MessageDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 1a302d8c..816ed048 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -79,11 +79,11 @@ class MessageListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 02c20e73..2d7b18a8 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -305,11 +305,11 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens @@ -340,10 +340,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt @@ -424,13 +424,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index b6b3564f..f1e8d793 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -285,10 +285,10 @@ class AdditionalMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The text contents of the message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + MessageContentPartParamArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -305,15 +305,15 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the entity that is creating the message. Allowed values include: # # - `user`: Indicates the message is sent by an actual user and should be used in # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant @@ -350,8 +350,9 @@ class Attachment < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -383,13 +384,13 @@ class FileSearch < OpenAI::BaseModel end end - # @abstract - # # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. @@ -430,13 +431,13 @@ class TruncationStrategy < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index ce011119..fa0aee48 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -68,11 +68,11 @@ class RunListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index b85ac1e3..727fa009 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta module Threads - # @abstract - # # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - class RunStatus < OpenAI::Enum + module RunStatus + extend OpenAI::Enum + QUEUED = :queued IN_PROGRESS = :in_progress REQUIRES_ACTION = :requires_action diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 0313a163..e21e1235 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -63,10 +63,10 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union + module Output + extend OpenAI::Union + discriminator :type # Text output from the Code Interpreter tool call as part of a run step. diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 9e9097b3..12321ae7 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -86,10 +86,10 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union + module Output + extend OpenAI::Union + discriminator :type # Text output from the Code Interpreter tool call as part of a run step. diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 9011d196..47fd4a37 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -93,11 +93,11 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 @@ -179,10 +179,10 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of the content. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT = :text finalize! diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 4a7aa3e2..d54a639a 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -183,10 +183,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded @@ -194,11 +194,11 @@ class Code < OpenAI::Enum end end - # @abstract - # # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress CANCELLED = :cancelled FAILED = :failed @@ -208,10 +208,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The details of the run step. - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + discriminator :type # Details of the message creation by the run step. @@ -227,10 +227,10 @@ class StepDetails < OpenAI::Union # end end - # @abstract - # # The type of run step, which can be either `message_creation` or `tool_calls`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index c8ba4aa1..d5fc45bb 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -25,10 +25,10 @@ class RunStepDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The details of the run step. - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + discriminator :type # Details of the message creation by the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index dd1c5381..32e98757 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -5,8 +5,9 @@ module Models module Beta module Threads module Runs - # @abstract - class RunStepInclude < OpenAI::Enum + module RunStepInclude + extend OpenAI::Enum + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 685e0e75..3a12f3fe 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -92,11 +92,11 @@ class StepListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 20cc29a6..c6e9367f 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -5,10 +5,10 @@ module Models module Beta module Threads module Runs - # @abstract - # # Details of the Code Interpreter tool call the run step was involved in. - class ToolCall < OpenAI::Union + module ToolCall + extend OpenAI::Union + discriminator :type # Details of the Code Interpreter tool call the run step was involved in. diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index 3dae6b9b..f58ae752 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -5,10 +5,10 @@ module Models module Beta module Threads module Runs - # @abstract - # # Details of the Code Interpreter tool call the run step was involved in. - class ToolCallDelta < OpenAI::Union + module ToolCallDelta + extend OpenAI::Union + discriminator :type # Details of the Code Interpreter tool call the run step was involved in. diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index b80607a6..fcb5196d 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -133,15 +133,15 @@ class Choice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls @@ -176,10 +176,10 @@ class Logprobs < OpenAI::BaseModel end end - # @abstract - # # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 5b2e877b..e9ee4909 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -106,11 +106,11 @@ class Audio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ArrayOfContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] @@ -120,11 +120,11 @@ class Content < OpenAI::Union # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. variant OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray - # @abstract - # # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ArrayOfContentPart < OpenAI::Union + module ArrayOfContentPart + extend OpenAI::Union + discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 5d72b2de..5f1ca422 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -30,11 +30,11 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV = :wav MP3 = :mp3 FLAC = :flac @@ -44,11 +44,11 @@ class Format < OpenAI::Enum finalize! end - # @abstract - # # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY = :alloy ASH = :ash BALLAD = :ballad diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 5f0a0fef..fd4648fc 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -233,10 +233,10 @@ class FunctionCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The role of the author of this message. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + DEVELOPER = :developer SYSTEM = :system USER = :user @@ -324,10 +324,10 @@ class Function < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The type of the tool. Currently, only `function` is supported. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION = :function finalize! @@ -335,15 +335,15 @@ class Type < OpenAI::Enum end end - # @abstract - # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP = :stop LENGTH = :length TOOL_CALLS = :tool_calls @@ -378,10 +378,10 @@ class Logprobs < OpenAI::BaseModel end end - # @abstract - # # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index fd7d3c2c..9b1d2144 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Chat - # @abstract - # # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ChatCompletionContentPart < OpenAI::Union + module ChatCompletionContentPart + extend OpenAI::Union + discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 9990262c..7b165ecb 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -51,11 +51,11 @@ class ImageURL < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO = :auto LOW = :low HIGH = :high diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 0ed3dff8..7e5f53ee 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -48,10 +48,10 @@ class InputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The format of the encoded audio data. Currently supports "wav" and "mp3". - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV = :wav MP3 = :mp3 diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 9e91e405..44c3742f 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -40,10 +40,10 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the developer message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index 811b7f8c..c8a3a17d 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Chat - # @abstract - # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - class ChatCompletionMessageParam < OpenAI::Union + module ChatCompletionMessageParam + extend OpenAI::Union + discriminator :role # Developer-provided instructions that the model should follow, regardless of diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index 725b907d..eae25ae3 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -3,8 +3,9 @@ module OpenAI module Models module Chat - # @abstract - class ChatCompletionModality < OpenAI::Enum + module ChatCompletionModality + extend OpenAI::Enum + TEXT = :text AUDIO = :audio diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index f9e93347..ed62e69d 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -30,12 +30,12 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 234d78a6..43666654 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Chat - # @abstract - # # The role of the author of a message - class ChatCompletionRole < OpenAI::Enum + module ChatCompletionRole + extend OpenAI::Enum + DEVELOPER = :developer SYSTEM = :system USER = :user diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index a0cc371d..ac99aca3 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -40,10 +40,10 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the system message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 28f7750e..273236d8 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Chat - # @abstract - # # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -14,19 +12,21 @@ module Chat # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - class ChatCompletionToolChoiceOption < OpenAI::Union + module ChatCompletionToolChoiceOption + extend OpenAI::Union + # `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools. variant enum: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific function. variant -> { OpenAI::Models::Chat::ChatCompletionNamedToolChoice } - # @abstract - # # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index ebbca6aa..fc5f1903 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -31,10 +31,10 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the tool message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartTextArray = OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 4a1eee01..89d87d20 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -39,10 +39,10 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The contents of the user message. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + ChatCompletionContentPartArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 49e175c1..c326a2d8 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -417,14 +417,14 @@ class CompletionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI @@ -440,8 +440,6 @@ class Model < OpenAI::Union # end end - # @abstract - # # @deprecated # # Deprecated in favor of `tool_choice`. @@ -458,19 +456,21 @@ class Model < OpenAI::Union # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - class FunctionCall < OpenAI::Union + module FunctionCall + extend OpenAI::Union + # `none` means the model will not call a function and instead generates a message. `auto` means the model can pick between generating a message or calling a function. variant enum: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode } # Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. variant -> { OpenAI::Models::Chat::ChatCompletionFunctionCallOption } - # @abstract - # # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. - class FunctionCallMode < OpenAI::Enum + module FunctionCallMode + extend OpenAI::Enum + NONE = :none AUTO = :auto @@ -530,16 +530,15 @@ class Function < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - class Modality < OpenAI::Enum + module Modality + extend OpenAI::Enum + TEXT = :text AUDIO = :audio finalize! end - # @abstract - # # An object specifying the format that the model must output. # # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured @@ -550,7 +549,9 @@ class Modality < OpenAI::Enum # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormat < OpenAI::Union + module ResponseFormat + extend OpenAI::Union + # Default response format. Used to generate text responses. variant -> { OpenAI::Models::ResponseFormatText } @@ -571,8 +572,6 @@ class ResponseFormat < OpenAI::Union # end end - # @abstract - # # Specifies the latency tier to use for processing the request. This parameter is # relevant for customers subscribed to the scale tier service: # @@ -587,18 +586,20 @@ class ResponseFormat < OpenAI::Union # # When this parameter is set, the response body will include the `service_tier` # utilized. - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + AUTO = :auto DEFAULT = :default finalize! end - # @abstract - # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] variant String @@ -645,11 +646,11 @@ class WebSearchOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 81936268..ce2187d8 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -69,11 +69,11 @@ class CompletionListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 66af6ada..7d45da51 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -50,11 +50,11 @@ class MessageListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 8edd5e1a..695dcb50 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class ChatModel < OpenAI::Enum + module ChatModel + extend OpenAI::Enum + O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 17be219d..2b00e018 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -41,8 +41,6 @@ class ComparisonFilter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # # - `eq`: equals @@ -51,7 +49,9 @@ class ComparisonFilter < OpenAI::BaseModel # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + EQ = :eq NE = :ne GT = :gt @@ -62,11 +62,11 @@ class Type < OpenAI::Enum finalize! end - # @abstract - # # The value to compare against the attribute key; supports string, number, or # boolean types. - class Value < OpenAI::Union + module Value + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 6237b56e..df42c59b 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -37,13 +37,13 @@ class CompletionChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP = :stop LENGTH = :length CONTENT_FILTER = :content_filter diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index c1c44876..cbdc341b 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -228,14 +228,14 @@ class CompletionCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + # @!group GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" @@ -259,15 +259,15 @@ class Model < OpenAI::Union # end end - # @abstract - # # The prompt(s) to generate completions for, encoded as a string, array of # strings, array of tokens, or array of token arrays. # # Note that <|endoftext|> is the document separator that the model sees during # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. - class Prompt < OpenAI::Union + module Prompt + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] IntegerArray = OpenAI::ArrayOf[Integer] @@ -289,11 +289,11 @@ class Prompt < OpenAI::Union # end end - # @abstract - # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] variant String diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 11452bfe..4223ab0d 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -26,11 +26,11 @@ class CompoundFilter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. - class Filter < OpenAI::Union + module Filter + extend OpenAI::Union + # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -43,10 +43,10 @@ class Filter < OpenAI::Union # end end - # @abstract - # # Type of operation: `and` or `or`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AND = :and OR = :or diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 4d126d18..e215c574 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -76,8 +76,6 @@ class EmbeddingCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for @@ -86,7 +84,9 @@ class EmbeddingCreateParams < OpenAI::BaseModel # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - class Input < OpenAI::Union + module Input + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] IntegerArray = OpenAI::ArrayOf[Integer] @@ -112,14 +112,14 @@ class Input < OpenAI::Union # end end - # @abstract - # # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. @@ -132,11 +132,11 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - class EncodingFormat < OpenAI::Enum + module EncodingFormat + extend OpenAI::Enum + FLOAT = :float BASE64 = :base64 diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index 65247fdf..0eab075d 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class EmbeddingModel < OpenAI::Enum + module EmbeddingModel + extend OpenAI::Enum + TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 24c4dd7e..9e917e35 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -2,10 +2,10 @@ module OpenAI module Models - # @abstract - # # The strategy used to chunk the file. - class FileChunkingStrategy < OpenAI::Union + module FileChunkingStrategy + extend OpenAI::Union + discriminator :type variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObject } diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index d9e6a634..d1943074 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -2,11 +2,11 @@ module OpenAI module Models - # @abstract - # # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. - class FileChunkingStrategyParam < OpenAI::Union + module FileChunkingStrategyParam + extend OpenAI::Union + discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 3a3c6b9b..ae6cf6ed 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -63,11 +63,11 @@ class FileListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index f0476f5d..a89db46f 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -99,12 +99,12 @@ class FileObject < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - class Purpose < OpenAI::Enum + module Purpose + extend OpenAI::Enum + ASSISTANTS = :assistants ASSISTANTS_OUTPUT = :assistants_output BATCH = :batch @@ -116,13 +116,13 @@ class Purpose < OpenAI::Enum finalize! end - # @abstract - # # @deprecated # # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + UPLOADED = :uploaded PROCESSED = :processed ERROR = :error diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 8b4f9af2..49c2717d 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -2,13 +2,13 @@ module OpenAI module Models - # @abstract - # # The intended purpose of the uploaded file. One of: - `assistants`: Used in the # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - class FilePurpose < OpenAI::Enum + module FilePurpose + extend OpenAI::Enum + ASSISTANTS = :assistants BATCH = :batch FINE_TUNE = :"fine-tune" diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 68876089..cdfbcc5f 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -272,11 +272,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -288,11 +288,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -304,11 +304,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -321,11 +321,11 @@ class NEpochs < OpenAI::Union end end - # @abstract - # # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING_FILES = :validating_files QUEUED = :queued RUNNING = :running @@ -459,11 +459,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -475,11 +475,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + variant const: :auto variant Float @@ -491,11 +491,11 @@ class Beta < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -507,11 +507,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -594,11 +594,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -610,11 +610,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -626,11 +626,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -644,10 +644,10 @@ class NEpochs < OpenAI::Union end end - # @abstract - # # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 0688b7a8..207444b5 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -69,10 +69,10 @@ class FineTuningJobEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The log level of the event. - class Level < OpenAI::Enum + module Level + extend OpenAI::Enum + INFO = :info WARN = :warn ERROR = :error @@ -80,10 +80,10 @@ class Level < OpenAI::Enum finalize! end - # @abstract - # # The type of event. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message METRICS = :metrics diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 6900ac01..c1cbd747 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -142,11 +142,11 @@ class JobCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - class Model < OpenAI::Union + module Model + extend OpenAI::Union + # @!group BABBAGE_002 = :"babbage-002" @@ -222,11 +222,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -238,11 +238,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -254,11 +254,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -471,11 +471,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -487,11 +487,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + variant const: :auto variant Float @@ -503,11 +503,11 @@ class Beta < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -519,11 +519,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -606,11 +606,11 @@ class Hyperparameters < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + variant const: :auto variant Integer @@ -622,11 +622,11 @@ class BatchSize < OpenAI::Union # end end - # @abstract - # # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + variant const: :auto variant Float @@ -638,11 +638,11 @@ class LearningRateMultiplier < OpenAI::Union # end end - # @abstract - # # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + variant const: :auto variant Integer @@ -656,10 +656,10 @@ class NEpochs < OpenAI::Union end end - # @abstract - # # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index d4546f63..ff2e6112 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -70,11 +70,11 @@ class ImageCreateVariationParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. @@ -87,23 +87,23 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL = :url B64_JSON = :b64_json finalize! end - # @abstract - # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 14c8b8cf..523b95ca 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -101,11 +101,11 @@ class ImageEditParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. @@ -118,23 +118,23 @@ class Model < OpenAI::Union # end end - # @abstract - # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL = :url B64_JSON = :b64_json finalize! end - # @abstract - # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 577665bd..a4993e78 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -104,10 +104,10 @@ class ImageGenerateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The model to use for image generation. - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The model to use for image generation. @@ -120,36 +120,36 @@ class Model < OpenAI::Union # end end - # @abstract - # # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - class Quality < OpenAI::Enum + module Quality + extend OpenAI::Enum + STANDARD = :standard HD = :hd finalize! end - # @abstract - # # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL = :url B64_JSON = :b64_json finalize! end - # @abstract - # # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256 = :"256x256" NUMBER_512X512 = :"512x512" NUMBER_1024X1024 = :"1024x1024" @@ -159,13 +159,13 @@ class Size < OpenAI::Enum finalize! end - # @abstract - # # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - class Style < OpenAI::Enum + module Style + extend OpenAI::Enum + VIVID = :vivid NATURAL = :natural diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index e49e6699..ce36cc6d 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class ImageModel < OpenAI::Enum + module ImageModel + extend OpenAI::Enum + DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 0f3c5a90..2b0818e8 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -309,97 +309,110 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Harassment < OpenAI::Enum + module Harassment + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class HarassmentThreatening < OpenAI::Enum + module HarassmentThreatening + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class Hate < OpenAI::Enum + module Hate + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class HateThreatening < OpenAI::Enum + module HateThreatening + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class Illicit < OpenAI::Enum + module Illicit + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class IllicitViolent < OpenAI::Enum + module IllicitViolent + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class SelfHarm < OpenAI::Enum + module SelfHarm + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class SelfHarmInstruction < OpenAI::Enum + module SelfHarmInstruction + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class SelfHarmIntent < OpenAI::Enum + module SelfHarmIntent + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class Sexual < OpenAI::Enum + module Sexual + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class SexualMinor < OpenAI::Enum + module SexualMinor + extend OpenAI::Enum + TEXT = :text finalize! end - # @abstract - class Violence < OpenAI::Enum + module Violence + extend OpenAI::Enum + TEXT = :text IMAGE = :image finalize! end - # @abstract - class ViolenceGraphic < OpenAI::Enum + module ViolenceGraphic + extend OpenAI::Enum + TEXT = :text IMAGE = :image diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index e0789618..89acded2 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -36,11 +36,11 @@ class ModerationCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - class Input < OpenAI::Union + module Input + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] ModerationMultiModalInputArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] @@ -61,13 +61,13 @@ class Input < OpenAI::Union # end end - # @abstract - # # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - class Model < OpenAI::Union + module Model + extend OpenAI::Union + variant String # The content moderation model you would like to use. Learn more in diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index 4089ad86..aad66a00 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -2,8 +2,9 @@ module OpenAI module Models - # @abstract - class ModerationModel < OpenAI::Enum + module ModerationModel + extend OpenAI::Enum + OMNI_MODERATION_LATEST = :"omni-moderation-latest" OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST = :"text-moderation-latest" diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index 47271a66..c6441173 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -2,10 +2,10 @@ module OpenAI module Models - # @abstract - # # An object describing an image to classify. - class ModerationMultiModalInput < OpenAI::Union + module ModerationMultiModalInput + extend OpenAI::Union + discriminator :type # An object describing an image to classify. diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index a4ec26de..cfe04696 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -37,14 +37,14 @@ class Reasoning < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # **computer_use_preview only** # # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - class GenerateSummary < OpenAI::Enum + module GenerateSummary + extend OpenAI::Enum + CONCISE = :concise DETAILED = :detailed diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index f1fee21d..e27540e1 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -2,15 +2,15 @@ module OpenAI module Models - # @abstract - # # **o-series models only** # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - class ReasoningEffort < OpenAI::Enum + module ReasoningEffort + extend OpenAI::Enum + LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 65a0097a..1359937f 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -41,10 +41,10 @@ class ComputerTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of computer environment to control. - class Environment < OpenAI::Enum + module Environment + extend OpenAI::Enum + MAC = :mac WINDOWS = :windows UBUNTU = :ubuntu diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index e4effc89..bd57a3e2 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -43,11 +43,11 @@ class EasyInputMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + # A text input to the model. variant String @@ -62,11 +62,11 @@ class Content < OpenAI::Union # end end - # @abstract - # # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user ASSISTANT = :assistant SYSTEM = :system @@ -75,10 +75,10 @@ class Role < OpenAI::Enum finalize! end - # @abstract - # # The type of the message input. Always `message`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message finalize! diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index c2ba4177..28ded43b 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -62,10 +62,10 @@ class FileSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A filter to apply based on file attributes. - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -112,10 +112,10 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The ranker to use for the file search. - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index e0cb2854..adee6e5b 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -63,13 +63,13 @@ class InputItemListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 8b979829..71264a0f 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -289,10 +289,10 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The reason why the response is incomplete. - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter @@ -300,12 +300,12 @@ class Reason < OpenAI::Enum end end - # @abstract - # # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -330,8 +330,6 @@ class ToolChoice < OpenAI::Union # end end - # @abstract - # # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -339,7 +337,9 @@ class ToolChoice < OpenAI::Union # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index e93e62cf..bf04793b 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -48,10 +48,10 @@ class ResponseCodeInterpreterToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The output of a code interpreter tool call that is text. - class Result < OpenAI::Union + module Result + extend OpenAI::Union + discriminator :type # The output of a code interpreter tool call that is text. @@ -137,10 +137,10 @@ class File < OpenAI::BaseModel # end end - # @abstract - # # The status of the code interpreter tool call. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress INTERPRETING = :interpreting COMPLETED = :completed diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 20bd85ae..07da24be 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -58,10 +58,10 @@ class ResponseComputerToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A click action. - class Action < OpenAI::Union + module Action + extend OpenAI::Union + discriminator :type # A click action. @@ -130,11 +130,11 @@ class Click < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - class Button < OpenAI::Enum + module Button + extend OpenAI::Enum + LEFT = :left RIGHT = :right WHEEL = :wheel @@ -438,11 +438,11 @@ class PendingSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -450,10 +450,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The type of the computer call. Always `computer_call`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + COMPUTER_CALL = :computer_call finalize! diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 252be325..33e5105d 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -94,11 +94,11 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 72456bac..ad933f5d 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # Multi-modal input and output contents. - class ResponseContent < OpenAI::Union + module ResponseContent + extend OpenAI::Union + # A text input to the model. variant -> { OpenAI::Models::Responses::ResponseInputText } diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 1fa8be8b..a1fb23a0 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -47,10 +47,10 @@ class ResponseContentPartAddedEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The content part that was added. - class Part < OpenAI::Union + module Part + extend OpenAI::Union + discriminator :type # A text output from the model. diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 0911d697..968c86e1 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -47,10 +47,10 @@ class ResponseContentPartDoneEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The content part that is done. - class Part < OpenAI::Union + module Part + extend OpenAI::Union + discriminator :type # A text output from the model. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 05ef2120..ebca56e3 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -242,8 +242,6 @@ class ResponseCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -253,7 +251,9 @@ class ResponseCreateParams < OpenAI::BaseModel # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - class Input < OpenAI::Union + module Input + extend OpenAI::Union + # A text input to the model, equivalent to a text input with the # `user` role. variant String @@ -269,12 +269,12 @@ class Input < OpenAI::Union # end end - # @abstract - # # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -299,8 +299,6 @@ class ToolChoice < OpenAI::Union # end end - # @abstract - # # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -308,7 +306,9 @@ class ToolChoice < OpenAI::Union # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 47dd8b4b..21b8b11b 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -26,10 +26,10 @@ class ResponseError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The error code for the response. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 8054ec60..11c8e566 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -52,11 +52,11 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed @@ -130,8 +130,9 @@ class Result < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 6749d6bc..81d6ac10 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Responses - # @abstract - # # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -18,7 +16,9 @@ module Responses # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormatTextConfig < OpenAI::Union + module ResponseFormatTextConfig + extend OpenAI::Union + discriminator :type # Default response format. Used to generate text responses. diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index b2b500c1..2cdffc5f 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -65,11 +65,11 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 503dee7b..283f9afa 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -50,11 +50,11 @@ class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 8afd6c8b..fbcf112a 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -35,10 +35,10 @@ class ResponseFunctionWebSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the web search tool call. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress SEARCHING = :searching COMPLETED = :completed diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 2d318473..7eb0bd14 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Responses - # @abstract - # # Specify additional output data to include in the model response. Currently # supported values are: # @@ -13,7 +11,9 @@ module Responses # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - class ResponseIncludable < OpenAI::Enum + module ResponseIncludable + extend OpenAI::Enum + FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 24201588..23539df8 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -33,10 +33,10 @@ class ResponseInputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The format of the audio data. Currently supported formats are `mp3` and `wav`. - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + MP3 = :mp3 WAV = :wav diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 8d6226d4..fd4e014d 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # A text input to the model. - class ResponseInputContent < OpenAI::Union + module ResponseInputContent + extend OpenAI::Union + discriminator :type # A text input to the model. diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index e9334fd0..e6d2c0ce 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -43,11 +43,11 @@ class ResponseInputImage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + HIGH = :high LOW = :low AUTO = :auto diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index de2477cf..adb90a0a 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -3,14 +3,14 @@ module OpenAI module Models module Responses - # @abstract - # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - class ResponseInputItem < OpenAI::Union + module ResponseInputItem + extend OpenAI::Union + discriminator :type # A message input to the model with a role indicating instruction following @@ -106,10 +106,10 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user SYSTEM = :system DEVELOPER = :developer @@ -117,11 +117,11 @@ class Role < OpenAI::Enum finalize! end - # @abstract - # # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -129,10 +129,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message finalize! @@ -236,11 +236,11 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -303,11 +303,11 @@ class FunctionCallOutput < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 2a9dd999..4e29d6dd 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -55,10 +55,10 @@ class ResponseInputMessageItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER = :user SYSTEM = :system DEVELOPER = :developer @@ -66,11 +66,11 @@ class Role < OpenAI::Enum finalize! end - # @abstract - # # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete @@ -78,10 +78,10 @@ class Status < OpenAI::Enum finalize! end - # @abstract - # # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE = :message finalize! diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index b3f4f86c..baeff0e5 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # Content item used to generate a response. - class ResponseItem < OpenAI::Union + module ResponseItem + extend OpenAI::Union + discriminator :type variant :message, -> { OpenAI::Models::Responses::ResponseInputMessageItem } diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 2ed933fd..9bdb2a25 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # An output message from the model. - class ResponseOutputItem < OpenAI::Union + module ResponseOutputItem + extend OpenAI::Union + discriminator :type # An output message from the model. diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 1dee0300..d9ab6443 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -49,10 +49,10 @@ class ResponseOutputMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A text output from the model. - class Content < OpenAI::Union + module Content + extend OpenAI::Union + discriminator :type # A text output from the model. @@ -68,11 +68,11 @@ class Content < OpenAI::Union # end end - # @abstract - # # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 175d5eda..a5027854 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -34,10 +34,10 @@ class ResponseOutputText < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A citation to a file. - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + discriminator :type # A citation to a file. diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index c4d60d3f..4c410d54 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -68,11 +68,11 @@ class Summary < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed INCOMPLETE = :incomplete diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index e15a7a6e..3ec9d99f 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -3,11 +3,11 @@ module OpenAI module Models module Responses - # @abstract - # # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - class ResponseStatus < OpenAI::Enum + module ResponseStatus + extend OpenAI::Enum + COMPLETED = :completed FAILED = :failed IN_PROGRESS = :in_progress diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 85fe7a25..73a45396 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -3,10 +3,10 @@ module OpenAI module Models module Responses - # @abstract - # # Emitted when there is a partial audio response. - class ResponseStreamEvent < OpenAI::Union + module ResponseStreamEvent + extend OpenAI::Union + discriminator :type # Emitted when there is a partial audio response. diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 4d980266..a8dbcfba 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -65,10 +65,10 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A citation to a file. - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + discriminator :type # A citation to a file. diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 9093989e..c3d097ce 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - # @abstract - # # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + discriminator :type # A tool that searches for relevant content from uploaded files. diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index e72582b9..9ec1a734 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -3,8 +3,6 @@ module OpenAI module Models module Responses - # @abstract - # # Controls which (if any) tool is called by the model. # # `none` means the model will not call any tool and instead generates a message. @@ -13,7 +11,9 @@ module Responses # more tools. # # `required` means the model must call one or more tools. - class ToolChoiceOptions < OpenAI::Enum + module ToolChoiceOptions + extend OpenAI::Enum + NONE = :none AUTO = :auto REQUIRED = :required diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 3398854c..9d8e05e2 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -27,8 +27,6 @@ class ToolChoiceTypes < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # @@ -37,7 +35,9 @@ class ToolChoiceTypes < OpenAI::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FILE_SEARCH = :file_search WEB_SEARCH_PREVIEW = :web_search_preview COMPUTER_USE_PREVIEW = :computer_use_preview diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 6a2acdc0..9341e708 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -42,24 +42,24 @@ class WebSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of the web search tool. One of: # # - `web_search_preview` # - `web_search_preview_2025_03_11` - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 finalize! end - # @abstract - # # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index d96fdbc5..c80cbfbd 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -2,16 +2,18 @@ module OpenAI module Models - # @abstract - class ResponsesModel < OpenAI::Union + module ResponsesModel + extend OpenAI::Union + variant String variant enum: -> { OpenAI::Models::ChatModel } variant enum: -> { OpenAI::Models::ResponsesModel::UnionMember2 } - # @abstract - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW = :"computer-use-preview" diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 0e6b2be5..507858a6 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -76,10 +76,10 @@ class Upload < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The status of the Upload. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + PENDING = :pending COMPLETED = :completed CANCELLED = :cancelled diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 7593bb95..3c74e1d8 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -157,12 +157,12 @@ class FileCounts < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + EXPIRED = :expired IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index ee2a79cf..5effb4ea 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -66,11 +66,11 @@ class VectorStoreListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 30d37581..ea877db0 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -76,10 +76,10 @@ class VectorStoreSearchParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # A query string for a search - class Query < OpenAI::Union + module Query + extend OpenAI::Union + StringArray = OpenAI::ArrayOf[String] variant String @@ -93,10 +93,10 @@ class Query < OpenAI::Union # end end - # @abstract - # # A filter to apply based on file attributes. - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -139,8 +139,9 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 233cdad0..ebb55560 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -50,8 +50,9 @@ class VectorStoreSearchResponse < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float @@ -86,10 +87,10 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # The type of content. - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT = :text finalize! diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index e35057be..ad9ef46e 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -49,8 +49,9 @@ class FileBatchCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 40629c3c..20469fe9 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -84,10 +84,10 @@ class FileBatchListFilesParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed @@ -96,11 +96,11 @@ class Filter < OpenAI::Enum finalize! end - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 3e7c7817..58fbaa08 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -49,8 +49,9 @@ class FileCreateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index c227d297..2f7ffbbc 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -78,10 +78,10 @@ class FileListParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed FAILED = :failed @@ -90,11 +90,11 @@ class Filter < OpenAI::Enum finalize! end - # @abstract - # # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 6b9100d4..c88b9a73 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -34,8 +34,9 @@ class FileUpdateParams < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 5aa034c1..c3d22aef 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -129,10 +129,10 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void - # @abstract - # # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR = :server_error UNSUPPORTED_FILE = :unsupported_file INVALID_FILE = :invalid_file @@ -141,12 +141,12 @@ class Code < OpenAI::Enum end end - # @abstract - # # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled @@ -155,8 +155,9 @@ class Status < OpenAI::Enum finalize! end - # @abstract - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + variant String variant Float diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index da82a1f6..a8628b40 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -101,11 +101,11 @@ class FileCounts < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end - # @abstract - # # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS = :in_progress COMPLETED = :completed CANCELLED = :cancelled diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 3312ef84..2fead0cf 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -170,132 +170,116 @@ module OpenAI # # We can therefore convert string values to Symbols, but can't convert other # values safely. - class Enum - extend OpenAI::Converter - - abstract! - - Value = type_template(:out) + module Enum + include OpenAI::Converter - class << self - # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[Value]) } - def values - end + # All of the valid Symbol values for this enum. + sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } + def values + end - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize! - end + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + sig { void } + private def finalize! end sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other) + def ===(other) end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other) + def ==(other) end - class << self - # @api private - sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def coerce(value) - end + # @api private + sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def coerce(value) + end - # @api private - sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value) - end + # @api private + sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def dump(value) + end - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end + # @api private + sig do + override + .params(value: T.anything) + .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) + end + def try_strict_coerce(value) end end # @api private - class Union - extend OpenAI::Converter - - abstract! - - Variants = type_template(:out) + module Union + include OpenAI::Converter - class << self - # @api private - # - # All of the specified variant info for this union. - sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(Variants)]]) } - private def known_variants - end + # @api private + # + # All of the specified variant info for this union. + sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Converter::Input)]]) } + private def known_variants + end - # @api private - sig { returns(T::Array[[T.nilable(Symbol), Variants]]) } - protected def derefed_variants - end + # @api private + sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + protected def derefed_variants + end - # All of the specified variants for this union. - sig { overridable.returns(T::Array[Variants]) } - def variants - end + # All of the specified variants for this union. + sig { overridable.returns(T::Array[T.anything]) } + def variants + end - # @api private - sig { params(property: Symbol).void } - private def discriminator(property) - end + # @api private + sig { params(property: Symbol).void } + private def discriminator(property) + end - # @api private - sig do - params( - key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants), - spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(Variants), Variants) - ) - .void - end - private def variant(key, spec = nil) - end + # @api private + sig do + params( + key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything), + spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything) + ) + .void + end + private def variant(key, spec = nil) + end - # @api private - sig { params(value: T.anything).returns(T.nilable(Variants)) } - private def resolve_variant(value) - end + # @api private + sig { params(value: T.anything).returns(T.nilable(T.anything)) } + private def resolve_variant(value) end sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other) + def ===(other) end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other) + def ==(other) end - class << self - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def coerce(value) - end + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def coerce(value) + end - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end + # @api private + sig do + override + .params(value: T.anything) + .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) + end + def try_strict_coerce(value) end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 75d67db0..5fb326c8 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -2,20 +2,27 @@ module OpenAI module Models - class AllModels < OpenAI::Union - abstract! + module AllModels + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = + type_template(:out) do + { + fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::UnionMember2::OrSymbol) + } + end - class UnionMember2 < OpenAI::Enum - abstract! + module UnionMember2 + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::UnionMember2) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::UnionMember2::TaggedSymbol) } - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::UnionMember2::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index e74cec3d..c7babe62 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -18,11 +18,14 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) + end def model=(_) end @@ -30,11 +33,14 @@ module OpenAI # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) } def voice end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + .returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + end def voice=(_) end @@ -50,11 +56,14 @@ module OpenAI # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + .returns(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + end def response_format=(_) end @@ -71,10 +80,10 @@ module OpenAI sig do params( input: String, - model: T.any(String, Symbol), - voice: Symbol, + model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), + voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, instructions: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -88,10 +97,10 @@ module OpenAI .returns( { input: String, - model: T.any(String, Symbol), - voice: Symbol, + model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), + voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, instructions: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, request_options: OpenAI::RequestOptions } @@ -102,45 +111,48 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)} } end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - class Voice < OpenAI::Enum - abstract! + module Voice + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } - ALLOY = :alloy - ASH = :ash - CORAL = :coral - ECHO = :echo - FABLE = :fable - ONYX = :onyx - NOVA = :nova - SAGE = :sage - SHIMMER = :shimmer + ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ONYX = T.let(:onyx, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - class ResponseFormat < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - MP3 = :mp3 - OPUS = :opus - AAC = :aac - FLAC = :flac - WAV = :wav - PCM = :pcm + module ResponseFormat + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } + + MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + AAC = T.let(:aac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + FLAC = T.let(:flac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 5228e000..d375fcda 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -3,14 +3,15 @@ module OpenAI module Models module Audio - class SpeechModel < OpenAI::Enum - abstract! + module SpeechModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } - TTS_1 = :"tts-1" - TTS_1_HD = :"tts-1-hd" - GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" + TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::OrSymbol) + TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::OrSymbol) + GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 026f8b77..272b6fc5 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -20,11 +20,14 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + end def model=(_) end @@ -33,11 +36,14 @@ module OpenAI # model's confidence in the transcription. `logprobs` only works with # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) + end def include=(_) end @@ -67,11 +73,14 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::AudioResponseFormat::OrSymbol)) } def response_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::AudioResponseFormat::OrSymbol) + .returns(OpenAI::Models::AudioResponseFormat::OrSymbol) + end def response_format=(_) end @@ -93,24 +102,31 @@ module OpenAI # Either or both of these options are supported: `word`, or `segment`. Note: There # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. - sig { returns(T.nilable(T::Array[Symbol])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + ) + end def timestamp_granularities end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + end def timestamp_granularities=(_) end sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -133,13 +149,13 @@ module OpenAI .returns( { file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], request_options: OpenAI::RequestOptions } ) @@ -150,19 +166,23 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } end - class TimestampGranularity < OpenAI::Enum - abstract! + module TimestampGranularity + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) } - WORD = :word - SEGMENT = :segment + WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) + SEGMENT = + T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index 814a204b..e6190f8a 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -5,8 +5,8 @@ module OpenAI module Audio # Represents a transcription response returned by model, based on the provided # input. - class TranscriptionCreateResponse < OpenAI::Union - abstract! + module TranscriptionCreateResponse + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 7a60b02a..95c1bbab 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -3,12 +3,13 @@ module OpenAI module Models module Audio - class TranscriptionInclude < OpenAI::Enum - abstract! + module TranscriptionInclude + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } - LOGPROBS = :logprobs + LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 4c8cc6bc..4ca09f44 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -7,8 +7,8 @@ module OpenAI # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. - class TranscriptionStreamEvent < OpenAI::Union - abstract! + module TranscriptionStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index ce2e6e77..3e9eeb48 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -19,11 +19,14 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) + end def model=(_) end @@ -41,11 +44,14 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + .returns(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + end def response_format=(_) end @@ -65,9 +71,9 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -81,9 +87,9 @@ module OpenAI .returns( { file: T.any(IO, StringIO), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: OpenAI::RequestOptions } @@ -94,24 +100,28 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } end # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - class ResponseFormat < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - JSON = :json - TEXT = :text - SRT = :srt - VERBOSE_JSON = :verbose_json - VTT = :vtt + module ResponseFormat + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } + + JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + SRT = T.let(:srt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + VERBOSE_JSON = + T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 61e32cae..2d27d701 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -3,8 +3,8 @@ module OpenAI module Models module Audio - class TranslationCreateResponse < OpenAI::Union - abstract! + module TranslationCreateResponse + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 917ce7d8..c17c0811 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -2,14 +2,15 @@ module OpenAI module Models - class AudioModel < OpenAI::Enum - abstract! + module AudioModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioModel::TaggedSymbol) } - WHISPER_1 = :"whisper-1" - GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" - GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" + WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::OrSymbol) + GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::OrSymbol) + GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 405da3e2..fc0693da 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -5,16 +5,17 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - class AudioResponseFormat < OpenAI::Enum - abstract! + module AudioResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } - JSON = :json - TEXT = :text - SRT = :srt - VERBOSE_JSON = :verbose_json - VTT = :vtt + JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::OrSymbol) + TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::OrSymbol) + SRT = T.let(:srt, OpenAI::Models::AudioResponseFormat::OrSymbol) + VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::OrSymbol) + VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 7a7e54b3..75e90393 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -57,11 +57,13 @@ module OpenAI end # The current status of the batch. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Batch::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Batch::Status::TaggedSymbol).returns(OpenAI::Models::Batch::Status::TaggedSymbol) + end def status=(_) end @@ -193,7 +195,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: Symbol, + status: OpenAI::Models::Batch::Status::TaggedSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -245,7 +247,7 @@ module OpenAI endpoint: String, input_file_id: String, object: Symbol, - status: Symbol, + status: OpenAI::Models::Batch::Status::TaggedSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -266,19 +268,20 @@ module OpenAI end # The current status of the batch. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - VALIDATING = :validating - FAILED = :failed - IN_PROGRESS = :in_progress - FINALIZING = :finalizing - COMPLETED = :completed - EXPIRED = :expired - CANCELLING = :cancelling - CANCELLED = :cancelled + module Status + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Batch::Status::TaggedSymbol) } + + VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Batch::Status::TaggedSymbol) + FINALIZING = T.let(:finalizing, OpenAI::Models::Batch::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Batch::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Batch::Status::TaggedSymbol) + CANCELLING = T.let(:cancelling, OpenAI::Models::Batch::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) end class Errors < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 50216257..62413988 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -8,11 +8,14 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) } def completion_window end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) + .returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) + end def completion_window=(_) end @@ -20,11 +23,14 @@ module OpenAI # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) } def endpoint end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + .returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + end def endpoint=(_) end @@ -61,8 +67,8 @@ module OpenAI sig do params( - completion_window: Symbol, - endpoint: Symbol, + completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -76,8 +82,8 @@ module OpenAI override .returns( { - completion_window: Symbol, - endpoint: Symbol, + completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions @@ -89,27 +95,31 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. - class CompletionWindow < OpenAI::Enum - abstract! + module CompletionWindow + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } - NUMBER_24H = :"24h" + NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) end # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - class Endpoint < OpenAI::Enum - abstract! + module Endpoint + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } - V1_RESPONSES = :"/v1/responses" - V1_CHAT_COMPLETIONS = :"/v1/chat/completions" - V1_EMBEDDINGS = :"/v1/embeddings" - V1_COMPLETIONS = :"/v1/completions" + V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_CHAT_COMPLETIONS = + T.let(:"/v1/chat/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 32cf935a..0e2f75e4 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -12,11 +12,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + end def model=(_) end @@ -68,11 +71,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -219,12 +225,12 @@ module OpenAI sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -267,12 +273,12 @@ module OpenAI override .returns( { - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -303,10 +309,10 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end class ToolResources < OpenAI::BaseModel @@ -525,8 +531,8 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. - class ChunkingStrategy < OpenAI::Union - abstract! + module ChunkingStrategy + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index bb11fb1b..7d2b0fef 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -43,11 +43,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + end def order=(_) end @@ -56,7 +59,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -71,7 +74,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -81,13 +84,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index db97be12..d0c034e2 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -23,8 +23,8 @@ module OpenAI # the message content may be partially cut off if `finish_reason="length"`, which # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. - class AssistantResponseFormatOption < OpenAI::Union - abstract! + module AssistantResponseFormatOption + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 7a366c59..31e96276 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -23,8 +23,8 @@ module OpenAI # gracefully in your code. See the # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. - class AssistantStreamEvent < OpenAI::Union - abstract! + module AssistantStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 313f24bc..6efa6193 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -3,8 +3,8 @@ module OpenAI module Models module Beta - class AssistantTool < OpenAI::Union - abstract! + module AssistantTool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 84562850..9a1ac99e 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -5,11 +5,14 @@ module OpenAI module Beta class AssistantToolChoice < OpenAI::BaseModel # The type of the tool. If type is `function`, the function name must be set - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + .returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + end def type=(_) end @@ -27,24 +30,37 @@ module OpenAI # Specifies a tool the model should use. Use to force the model to call a specific # tool. sig do - params(type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction).returns(T.attached_class) + params( + type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, + function: OpenAI::Models::Beta::AssistantToolChoiceFunction + ) + .returns(T.attached_class) end def self.new(type:, function: nil) end - sig { override.returns({type: Symbol, function: OpenAI::Models::Beta::AssistantToolChoiceFunction}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, + function: OpenAI::Models::Beta::AssistantToolChoiceFunction + } + ) + end def to_hash end # The type of the tool. If type is `function`, the function name must be set - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } - FUNCTION = :function - CODE_INTERPRETER = :code_interpreter - FILE_SEARCH = :file_search + FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + CODE_INTERPRETER = T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 552ca737..14fac828 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -10,23 +10,33 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - class AssistantToolChoiceOption < OpenAI::Union - abstract! + module AssistantToolChoiceOption + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)} } + Variants = + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + } + end # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. - class Auto < OpenAI::Enum - abstract! + module Auto + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } - NONE = :none - AUTO = :auto - REQUIRED = :required + NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index b4ca656b..595f430c 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -45,11 +45,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol))) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) + .returns(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) + end def model=(_) end @@ -68,11 +71,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -222,9 +228,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -270,9 +276,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -303,41 +309,62 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! - - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } - - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" + module Model + extend OpenAI::Union + + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)} } + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + + O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O3_MINI_2025_01_31 = + T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O1 = T.let(:o1, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_2024_11_20 = + T.let(:"gpt-4o-2024-11-20", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_2024_08_06 = + T.let(:"gpt-4o-2024-08-06", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_2024_05_13 = + T.let(:"gpt-4o-2024-05-13", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4O_MINI_2024_07_18 = + T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_5_PREVIEW = T.let(:"gpt-4.5-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_5_PREVIEW_2025_02_27 = + T.let(:"gpt-4.5-preview-2025-02-27", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_TURBO_2024_04_09 = + T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_0125_PREVIEW = + T.let(:"gpt-4-0125-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_TURBO_PREVIEW = + T.let(:"gpt-4-turbo-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_1106_PREVIEW = + T.let(:"gpt-4-1106-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_VISION_PREVIEW = + T.let(:"gpt-4-vision-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_16K = + T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_0613 = + T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_1106 = + T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_0125 = + T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + GPT_3_5_TURBO_16K_0613 = + T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index ce4b782a..310aab2b 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -103,11 +103,14 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol)) } def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + .returns(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + end def ranker=(_) end @@ -117,23 +120,44 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { params(score_threshold: Float, ranker: Symbol).returns(T.attached_class) } + sig do + params( + score_threshold: Float, + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ) + .returns(T.attached_class) + end def self.new(score_threshold:, ranker: nil) end - sig { override.returns({score_threshold: Float, ranker: Symbol}) } + sig do + override + .returns( + { + score_threshold: Float, + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + } + ) + end def to_hash end # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - AUTO = :auto - DEFAULT_2024_08_21 = :default_2024_08_21 + module Ranker + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) } + + AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + DEFAULT_2024_08_21 = + T.let( + :default_2024_08_21, + OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ) end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 97821d4a..495664c0 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -6,8 +6,8 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - class MessageStreamEvent < OpenAI::Union - abstract! + module MessageStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 6b1ae66d..51fa7db0 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -6,8 +6,8 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - class RunStepStreamEvent < OpenAI::Union - abstract! + module RunStepStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 5642aefb..e18dd1f2 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - class RunStreamEvent < OpenAI::Union - abstract! + module RunStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 3a5a42f8..91459ba1 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -72,11 +72,14 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + end def model=(_) end @@ -182,13 +185,36 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) - .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + params( + _: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + .returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) end def tool_choice=(_) end @@ -286,7 +312,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -298,7 +324,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -344,7 +375,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -356,7 +387,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -380,10 +416,10 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end class Thread < OpenAI::BaseModel @@ -509,11 +545,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + end def role=(_) end @@ -557,7 +596,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -580,7 +619,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -590,8 +629,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -619,13 +658,17 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -707,8 +750,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -964,8 +1007,8 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. - class ChunkingStrategy < OpenAI::Union - abstract! + module ChunkingStrategy + extend OpenAI::Union Variants = type_template(:out) do @@ -1186,8 +1229,8 @@ module OpenAI end end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -1206,11 +1249,14 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + end def type=(_) end @@ -1226,11 +1272,25 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } + sig do + params( + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ) + .returns(T.attached_class) + end def self.new(type:, last_messages: nil) end - sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end def to_hash end @@ -1238,13 +1298,17 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) } - AUTO = :auto - LAST_MESSAGES = :last_messages + AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + LAST_MESSAGES = + T.let(:last_messages, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 77085982..b02538f9 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -129,11 +129,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + .returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + end def role=(_) end @@ -175,7 +178,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -198,7 +201,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -208,8 +211,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -237,13 +240,15 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -325,8 +330,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -576,8 +581,8 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. - class ChunkingStrategy < OpenAI::Union - abstract! + module ChunkingStrategy + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 1351f46f..74967cee 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -7,8 +7,8 @@ module OpenAI # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class Annotation < OpenAI::Union - abstract! + module Annotation + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index a388880d..90db2a3c 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -7,8 +7,8 @@ module OpenAI # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. - class AnnotationDelta < OpenAI::Union - abstract! + module AnnotationDelta + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 879b0320..61ad33c5 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -18,32 +18,39 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + end def detail=(_) end - sig { params(file_id: String, detail: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + .returns(T.attached_class) + end def self.new(file_id:, detail: nil) end - sig { override.returns({file_id: String, detail: Symbol}) } + sig { override.returns({file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol}) } def to_hash end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 89c10a01..efac1cbe 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -7,11 +7,14 @@ module OpenAI class ImageFileDelta < OpenAI::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + end def detail=(_) end @@ -26,24 +29,32 @@ module OpenAI def file_id=(_) end - sig { params(detail: Symbol, file_id: String).returns(T.attached_class) } + sig do + params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String) + .returns(T.attached_class) + end def self.new(detail: nil, file_id: nil) end - sig { override.returns({detail: Symbol, file_id: String}) } + sig do + override + .returns({detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String}) + end def to_hash end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index e4aa98eb..032d894d 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -17,32 +17,39 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + end def detail=(_) end - sig { params(url: String, detail: Symbol).returns(T.attached_class) } + sig do + params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + .returns(T.attached_class) + end def self.new(url:, detail: nil) end - sig { override.returns({url: String, detail: Symbol}) } + sig { override.returns({url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol}) } def to_hash end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index a300a59e..6e8e3245 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -7,11 +7,14 @@ module OpenAI class ImageURLDelta < OpenAI::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + end def detail=(_) end @@ -25,24 +28,31 @@ module OpenAI def url=(_) end - sig { params(detail: Symbol, url: String).returns(T.attached_class) } + sig do + params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String) + .returns(T.attached_class) + end def self.new(detail: nil, url: nil) end - sig { override.returns({detail: Symbol, url: String}) } + sig do + override.returns({detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String}) + end def to_hash end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 01311c44..96603146 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -141,11 +141,14 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + end def role=(_) end @@ -162,11 +165,14 @@ module OpenAI # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + end def status=(_) end @@ -200,9 +206,9 @@ module OpenAI incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), metadata: T.nilable(T::Hash[Symbol, String]), - role: Symbol, + role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, run_id: T.nilable(String), - status: Symbol, + status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, thread_id: String, object: Symbol ) @@ -247,9 +253,9 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, - role: Symbol, + role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, run_id: T.nilable(String), - status: Symbol, + status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, thread_id: String } ) @@ -336,8 +342,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -372,57 +378,75 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason the message is incomplete. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } def reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + end def reason=(_) end # On an incomplete message, details about why the message is incomplete. - sig { params(reason: Symbol).returns(T.attached_class) } + sig do + params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + .returns(T.attached_class) + end def self.new(reason:) end - sig { override.returns({reason: Symbol}) } + sig do + override.returns({reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol}) + end def to_hash end # The reason the message is incomplete. - class Reason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - CONTENT_FILTER = :content_filter - MAX_TOKENS = :max_tokens - RUN_CANCELLED = :run_cancelled - RUN_EXPIRED = :run_expired - RUN_FAILED = :run_failed + module Reason + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + MAX_TOKENS = + T.let(:max_tokens, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + RUN_CANCELLED = + T.let(:run_cancelled, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + RUN_EXPIRED = + T.let(:run_expired, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + RUN_FAILED = + T.let(:run_failed, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) end end # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) end # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - INCOMPLETE = :incomplete - COMPLETED = :completed + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 9e3c7f22..97ed19fe 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContent < OpenAI::Union - abstract! + module MessageContent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index dce4489b..ec1c6ccc 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentDelta < OpenAI::Union - abstract! + module MessageContentDelta + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index e4fb311c..effc6e6c 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - class MessageContentPartParam < OpenAI::Union - abstract! + module MessageContentPartParam + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 6d8913ff..9c0c17d5 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -61,11 +61,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + end def role=(_) end @@ -107,7 +110,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) @@ -131,7 +134,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: OpenAI::RequestOptions @@ -142,8 +145,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -171,13 +174,15 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -259,8 +264,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 7883727c..b7bfc61c 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -49,11 +49,14 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol)) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + end def role=(_) end @@ -68,7 +71,7 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: Symbol + role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol ) .returns(T.attached_class) end @@ -87,7 +90,7 @@ module OpenAI OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: Symbol + role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol } ) end @@ -95,13 +98,15 @@ module OpenAI end # The entity that produced the message. One of `user` or `assistant`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 4b93b0c8..d80e8d49 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -44,11 +44,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + end def order=(_) end @@ -66,7 +69,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -82,7 +85,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, request_options: OpenAI::RequestOptions } @@ -93,13 +96,15 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 0ea43a0c..d1587a17 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -256,11 +256,14 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + end def status=(_) end @@ -281,13 +284,36 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) - .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + params( + _: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + .returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) end def tool_choice=(_) end @@ -405,9 +431,14 @@ module OpenAI ) ), started_at: T.nilable(Integer), - status: Symbol, + status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, thread_id: String, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -484,9 +515,14 @@ module OpenAI ) ), started_at: T.nilable(Integer), - status: Symbol, + status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, thread_id: String, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -507,43 +543,57 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } def reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + end def reason=(_) end # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. - sig { params(reason: Symbol).returns(T.attached_class) } + sig do + params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + .returns(T.attached_class) + end def self.new(reason: nil) end - sig { override.returns({reason: Symbol}) } + sig { override.returns({reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol}) } def to_hash end # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - class Reason < OpenAI::Enum - abstract! + module Reason + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } - MAX_COMPLETION_TOKENS = :max_completion_tokens - MAX_PROMPT_TOKENS = :max_prompt_tokens + MAX_COMPLETION_TOKENS = + T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + MAX_PROMPT_TOKENS = + T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) end end class LastError < OpenAI::BaseModel # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + end def code=(_) end @@ -557,23 +607,33 @@ module OpenAI end # The last error associated with this run. Will be `null` if there are no errors. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig do + override + .returns({code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String}) + end def to_hash end # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - class Code < OpenAI::Enum - abstract! + module Code + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } - SERVER_ERROR = :server_error - RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - INVALID_PROMPT = :invalid_prompt + SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + RATE_LIMIT_EXCEEDED = + T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + INVALID_PROMPT = + T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) end end @@ -652,11 +712,14 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + end def type=(_) end @@ -672,11 +735,25 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } + sig do + params( + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + ) + .returns(T.attached_class) + end def self.new(type:, last_messages: nil) end - sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + } + ) + end def to_hash end @@ -684,13 +761,17 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } - AUTO = :auto - LAST_MESSAGES = :last_messages + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + LAST_MESSAGES = + T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index a54ec011..46e8ddc5 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -26,11 +26,14 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + end def include=(_) end @@ -112,11 +115,14 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) + end def model=(_) end @@ -137,11 +143,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -223,13 +232,36 @@ module OpenAI # to the user. Specifying a particular tool like `{"type": "file_search"}` or # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) - .returns(T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice))) + params( + _: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) + .returns( + T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ) + ) end def tool_choice=(_) end @@ -308,16 +340,16 @@ module OpenAI sig do params( assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -327,7 +359,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -370,16 +407,16 @@ module OpenAI .returns( { assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -389,7 +426,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -462,11 +504,14 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + end def role=(_) end @@ -516,7 +561,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -539,7 +584,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]) } @@ -549,8 +594,8 @@ module OpenAI end # The text contents of the message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -578,13 +623,17 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant + USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) end class Attachment < OpenAI::BaseModel @@ -666,8 +715,8 @@ module OpenAI def to_hash end - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do @@ -705,10 +754,10 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end class TruncationStrategy < OpenAI::BaseModel @@ -716,11 +765,14 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + end def type=(_) end @@ -736,11 +788,25 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { params(type: Symbol, last_messages: T.nilable(Integer)).returns(T.attached_class) } + sig do + params( + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ) + .returns(T.attached_class) + end def self.new(type:, last_messages: nil) end - sig { override.returns({type: Symbol, last_messages: T.nilable(Integer)}) } + sig do + override + .returns( + { + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end def to_hash end @@ -748,13 +814,17 @@ module OpenAI # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) } - AUTO = :auto - LAST_MESSAGES = :last_messages + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + LAST_MESSAGES = + T.let(:last_messages, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 44ca10d2..39cc0ab8 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -44,11 +44,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + end def order=(_) end @@ -57,7 +60,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -72,7 +75,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -82,13 +85,15 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index 76ada3f1..a6246d8c 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -7,20 +7,21 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - class RunStatus < OpenAI::Enum - abstract! + module RunStatus + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } - QUEUED = :queued - IN_PROGRESS = :in_progress - REQUIRES_ACTION = :requires_action - CANCELLING = :cancelling - CANCELLED = :cancelled - FAILED = :failed - COMPLETED = :completed - INCOMPLETE = :incomplete - EXPIRED = :expired + QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + REQUIRES_ACTION = T.let(:requires_action, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + CANCELLING = T.let(:cancelling, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 2b204c7c..749ba465 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -143,8 +143,8 @@ module OpenAI end # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union - abstract! + module Output + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 6e716d13..e369160c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -156,8 +156,8 @@ module OpenAI end # Text output from the Code Interpreter tool call as part of a run step. - class Output < OpenAI::Union - abstract! + module Output + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index b4f818ee..13b22ad3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -108,11 +108,22 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig { returns(Symbol) } + sig do + returns( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params( + _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + .returns( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end def ranker=(_) end @@ -127,23 +138,53 @@ module OpenAI end # The ranking options for the file search. - sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } + sig do + params( + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + score_threshold: Float + ) + .returns(T.attached_class) + end def self.new(ranker:, score_threshold:) end - sig { override.returns({ranker: Symbol, score_threshold: Float}) } + sig do + override + .returns( + { + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + score_threshold: Float + } + ) + end def to_hash end # The ranker to use for the file search. If not specified will use the `auto` # ranker. - class Ranker < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - AUTO = :auto - DEFAULT_2024_08_21 = :default_2024_08_21 + module Ranker + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end + + AUTO = + T.let( + :auto, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + DEFAULT_2024_08_21 = + T.let( + :default_2024_08_21, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) end end @@ -231,29 +272,68 @@ module OpenAI end # The type of the content. - sig { returns(T.nilable(Symbol)) } + sig do + returns( + T.nilable( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + ) + end def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params( + _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + .returns( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + end def type=(_) end - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params( + text: String, + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + .returns(T.attached_class) + end def self.new(text: nil, type: nil) end - sig { override.returns({text: String, type: Symbol}) } + sig do + override + .returns( + { + text: String, + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + } + ) + end def to_hash end # The type of the content. - class Type < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - TEXT = :text + module Type + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) + end + + TEXT = + T.let( + :text, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ) end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index d6fc02be..06e7495e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -120,11 +120,14 @@ module OpenAI # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + end def status=(_) end @@ -168,11 +171,14 @@ module OpenAI end # The type of run step, which can be either `message_creation` or `tool_calls`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + end def type=(_) end @@ -202,13 +208,13 @@ module OpenAI last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, - status: Symbol, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: Symbol, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage), object: Symbol ) @@ -249,13 +255,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), object: Symbol, run_id: String, - status: Symbol, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: Symbol, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage) } ) @@ -265,11 +271,14 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + end def code=(_) end @@ -284,42 +293,57 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig do + override + .returns( + {code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String} + ) + end def to_hash end # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - SERVER_ERROR = :server_error - RATE_LIMIT_EXCEEDED = :rate_limit_exceeded + module Code + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + + SERVER_ERROR = + T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + RATE_LIMIT_EXCEEDED = + T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) end end # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - CANCELLED = :cancelled - FAILED = :failed - COMPLETED = :completed - EXPIRED = :expired + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) end # The details of the run step. - class StepDetails < OpenAI::Union - abstract! + module StepDetails + extend OpenAI::Union Variants = type_template(:out) do @@ -333,13 +357,16 @@ module OpenAI end # The type of run step, which can be either `message_creation` or `tool_calls`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } - MESSAGE_CREATION = :message_creation - TOOL_CALLS = :tool_calls + MESSAGE_CREATION = + T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) end class Usage < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 6dca9ad3..cbc714ec 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -65,8 +65,8 @@ module OpenAI end # The details of the run step. - class StepDetails < OpenAI::Union - abstract! + module StepDetails + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 566dd76e..9e59ca4b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -5,13 +5,18 @@ module OpenAI module Beta module Threads module Runs - class RunStepInclude < OpenAI::Enum - abstract! + module RunStepInclude + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = - :"step_details.tool_calls[*].file_search.results[*].content" + T.let( + :"step_details.tool_calls[*].file_search.results[*].content", + OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol + ) end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 73059bac..ef62a39d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -48,11 +48,14 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + end def include=(_) end @@ -68,11 +71,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + end def order=(_) end @@ -81,9 +87,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -98,9 +104,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -110,13 +116,15 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 71dc2e52..54eb0d50 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -32,11 +32,14 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) + end def include=(_) end @@ -44,7 +47,7 @@ module OpenAI params( thread_id: String, run_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -58,7 +61,7 @@ module OpenAI { thread_id: String, run_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 2550325a..85c08734 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. - class ToolCall < OpenAI::Union - abstract! + module ToolCall + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 37550da6..0043de5a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. - class ToolCallDelta < OpenAI::Union - abstract! + module ToolCallDelta + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 508ca188..d47b79d1 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -54,11 +54,14 @@ module OpenAI end # The service tier used for processing the request. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) } def service_tier end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) + end def service_tier=(_) end @@ -91,7 +94,7 @@ module OpenAI choices: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice], created: Integer, model: String, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: OpenAI::Models::CompletionUsage, object: Symbol @@ -119,7 +122,7 @@ module OpenAI created: Integer, model: String, object: Symbol, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: OpenAI::Models::CompletionUsage } @@ -135,11 +138,14 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } def finish_reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + .returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + end def finish_reason=(_) end @@ -177,7 +183,7 @@ module OpenAI sig do params( - finish_reason: Symbol, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), message: OpenAI::Models::Chat::ChatCompletionMessage @@ -191,7 +197,7 @@ module OpenAI override .returns( { - finish_reason: Symbol, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), message: OpenAI::Models::Chat::ChatCompletionMessage @@ -207,16 +213,20 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - STOP = :stop - LENGTH = :length - TOOL_CALLS = :tool_calls - CONTENT_FILTER = :content_filter - FUNCTION_CALL = :function_call + module FinishReason + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + + STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + FUNCTION_CALL = + T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) end class Logprobs < OpenAI::BaseModel @@ -270,13 +280,15 @@ module OpenAI end # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum - abstract! + module ServiceTier + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } - SCALE = :scale - DEFAULT = :default + SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index bdba736c..737d825e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -197,8 +197,8 @@ module OpenAI # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -223,8 +223,8 @@ module OpenAI # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ArrayOfContentPart < OpenAI::Union - abstract! + module ArrayOfContentPart + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 215adc55..2cd8fe2e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -6,64 +6,88 @@ module OpenAI class ChatCompletionAudioParam < OpenAI::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } def format_ end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + end def format_=(_) end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) } def voice end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + end def voice=(_) end # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). - sig { params(format_: Symbol, voice: Symbol).returns(T.attached_class) } + sig do + params( + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) + .returns(T.attached_class) + end def self.new(format_:, voice:) end - sig { override.returns({format_: Symbol, voice: Symbol}) } + sig do + override + .returns( + { + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol + } + ) + end def to_hash end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - class Format < OpenAI::Enum - abstract! + module Format + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } - WAV = :wav - MP3 = :mp3 - FLAC = :flac - OPUS = :opus - PCM16 = :pcm16 + WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - class Voice < OpenAI::Enum - abstract! + module Voice + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } - ALLOY = :alloy - ASH = :ash - BALLAD = :ballad - CORAL = :coral - ECHO = :echo - SAGE = :sage - SHIMMER = :shimmer - VERSE = :verse + ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 647695a9..f5530b98 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -56,11 +56,14 @@ module OpenAI end # The service tier used for processing the request. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) } def service_tier end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) + end def service_tier=(_) end @@ -99,7 +102,7 @@ module OpenAI choices: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: T.nilable(OpenAI::Models::CompletionUsage), object: Symbol @@ -127,7 +130,7 @@ module OpenAI created: Integer, model: String, object: Symbol, - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), system_fingerprint: String, usage: T.nilable(OpenAI::Models::CompletionUsage) } @@ -155,11 +158,14 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) } def finish_reason end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) + end def finish_reason=(_) end @@ -187,7 +193,7 @@ module OpenAI sig do params( delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(Symbol), + finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) ) @@ -201,7 +207,7 @@ module OpenAI .returns( { delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(Symbol), + finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), index: Integer, logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) } @@ -243,11 +249,14 @@ module OpenAI end # The role of the author of this message. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol)) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + end def role=(_) end @@ -268,7 +277,7 @@ module OpenAI content: T.nilable(String), function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: T.nilable(String), - role: Symbol, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) .returns(T.attached_class) @@ -283,7 +292,7 @@ module OpenAI content: T.nilable(String), function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: T.nilable(String), - role: Symbol, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } ) @@ -325,16 +334,21 @@ module OpenAI end # The role of the author of this message. - class Role < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - DEVELOPER = :developer - SYSTEM = :system - USER = :user - ASSISTANT = :assistant - TOOL = :tool + module Role + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } + + DEVELOPER = + T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) end class ToolCall < OpenAI::BaseModel @@ -367,11 +381,14 @@ module OpenAI end # The type of the tool. Currently, only `function` is supported. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + end def type=(_) end @@ -380,7 +397,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: Symbol + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol ) .returns(T.attached_class) end @@ -394,7 +411,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: Symbol + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol } ) end @@ -433,12 +450,16 @@ module OpenAI end # The type of the tool. Currently, only `function` is supported. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) } - FUNCTION = :function + FUNCTION = + T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) end end end @@ -449,16 +470,22 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - class FinishReason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - STOP = :stop - LENGTH = :length - TOOL_CALLS = :tool_calls - CONTENT_FILTER = :content_filter - FUNCTION_CALL = :function_call + module FinishReason + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } + + STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + TOOL_CALLS = + T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + FUNCTION_CALL = + T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) end class Logprobs < OpenAI::BaseModel @@ -512,13 +539,15 @@ module OpenAI end # The service tier used for processing the request. - class ServiceTier < OpenAI::Enum - abstract! + module ServiceTier + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } - SCALE = :scale - DEFAULT = :default + SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index bff95f2d..340e2a97 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -5,8 +5,8 @@ module OpenAI module Chat # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). - class ChatCompletionContentPart < OpenAI::Union - abstract! + module ChatCompletionContentPart + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 78ef980f..01893dab 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -50,32 +50,49 @@ module OpenAI # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol)) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + end def detail=(_) end - sig { params(url: String, detail: Symbol).returns(T.attached_class) } + sig do + params( + url: String, + detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol + ) + .returns(T.attached_class) + end def self.new(url:, detail: nil) end - sig { override.returns({url: String, detail: Symbol}) } + sig do + override + .returns( + {url: String, detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol} + ) + end def to_hash end # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) } - AUTO = :auto - LOW = :low - HIGH = :high + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 87144715..0a14a898 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -52,30 +52,52 @@ module OpenAI end # The format of the encoded audio data. Currently supports "wav" and "mp3". - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) } def format_ end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + end def format_=(_) end - sig { params(data: String, format_: Symbol).returns(T.attached_class) } + sig do + params( + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + ) + .returns(T.attached_class) + end def self.new(data:, format_:) end - sig { override.returns({data: String, format_: Symbol}) } + sig do + override + .returns( + { + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + } + ) + end def to_hash end # The format of the encoded audio data. Currently supports "wav" and "mp3". - class Format < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - WAV = :wav - MP3 = :mp3 + module Format + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) } + + WAV = + T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + MP3 = + T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 7c1f2e46..935cbc88 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -63,8 +63,8 @@ module OpenAI end # The contents of the developer message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 6d17e25c..7dbb72ea 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -6,8 +6,8 @@ module OpenAI # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - class ChatCompletionMessageParam < OpenAI::Union - abstract! + module ChatCompletionMessageParam + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 25501740..5e747ff9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -3,13 +3,14 @@ module OpenAI module Models module Chat - class ChatCompletionModality < OpenAI::Enum - abstract! + module ChatCompletionModality + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } - TEXT = :text - AUDIO = :audio + TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) + AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 206b2990..9773677d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -52,8 +52,8 @@ module OpenAI # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index e2a2c70e..dd316075 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -4,17 +4,18 @@ module OpenAI module Models module Chat # The role of the author of a message - class ChatCompletionRole < OpenAI::Enum - abstract! + module ChatCompletionRole + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } - DEVELOPER = :developer - SYSTEM = :system - USER = :user - ASSISTANT = :assistant - TOOL = :tool - FUNCTION = :function + DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 9d79c62a..ca2dc2e7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -63,8 +63,8 @@ module OpenAI end # The contents of the system message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index c9ad2647..9a79b1ff 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -12,23 +12,32 @@ module OpenAI # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - class ChatCompletionToolChoiceOption < OpenAI::Union - abstract! + module ChatCompletionToolChoiceOption + extend OpenAI::Union Variants = - type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)} } + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + } + end # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. - class Auto < OpenAI::Enum - abstract! + module Auto + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } - NONE = :none - AUTO = :auto - REQUIRED = :required + NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 7c447076..6f5f249b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -59,8 +59,8 @@ module OpenAI end # The contents of the tool message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 53c7c3a7..dde9e769 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -120,8 +120,8 @@ module OpenAI end # The contents of the user message. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index f0e77033..851c6948 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -64,11 +64,14 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) + end def model=(_) end @@ -111,13 +114,32 @@ module OpenAI # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + ) + ) + end def function_call end sig do - params(_: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)) - .returns(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)) + params( + _: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + ) + .returns( + T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + ) end def function_call=(_) end @@ -213,11 +235,14 @@ module OpenAI # this model generate both text and audio responses, you can use: # # `["text", "audio"]` - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) } def modalities end - sig { params(_: T.nilable(T::Array[Symbol])).returns(T.nilable(T::Array[Symbol])) } + sig do + params(_: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) + .returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) + end def modalities=(_) end @@ -273,11 +298,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def reasoning_effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def reasoning_effort=(_) end @@ -351,11 +379,14 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } def service_tier end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) + .returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) + end def service_tier=(_) end @@ -422,13 +453,32 @@ module OpenAI # # `none` is the default when no tools are present. `auto` is the default if tools # are present. - sig { returns(T.nilable(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice))) } + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + ) + ) + end def tool_choice end sig do - params(_: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)) - .returns(T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)) + params( + _: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + ) + .returns( + T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ) + ) end def tool_choice=(_) end @@ -508,34 +558,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -594,34 +650,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -639,10 +701,10 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } end # Deprecated in favor of `tool_choice`. @@ -659,22 +721,34 @@ module OpenAI # # `none` is the default when no functions are present. `auto` is the default if # functions are present. - class FunctionCall < OpenAI::Union - abstract! + module FunctionCall + extend OpenAI::Union Variants = - type_template(:out) { {fixed: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)} } + type_template(:out) do + { + fixed: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ) + } + end # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. - class FunctionCallMode < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - NONE = :none - AUTO = :auto + module FunctionCallMode + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) } + + NONE = + T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) + AUTO = + T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) end end @@ -728,13 +802,15 @@ module OpenAI end end - class Modality < OpenAI::Enum - abstract! + module Modality + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } - TEXT = :text - AUDIO = :audio + TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) + AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) end # An object specifying the format that the model must output. @@ -747,8 +823,8 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormat < OpenAI::Union - abstract! + module ResponseFormat + extend OpenAI::Union Variants = type_template(:out) do @@ -776,19 +852,21 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - class ServiceTier < OpenAI::Enum - abstract! + module ServiceTier + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } - AUTO = :auto - DEFAULT = :default + AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) end # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union - abstract! + module Stop + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } @@ -798,11 +876,18 @@ module OpenAI class WebSearchOptions < OpenAI::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig { returns(T.nilable(Symbol)) } + sig do + returns( + T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + ) + end def search_context_size end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + .returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + end def search_context_size=(_) end @@ -823,7 +908,7 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig do params( - search_context_size: Symbol, + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) ) .returns(T.attached_class) @@ -835,7 +920,7 @@ module OpenAI override .returns( { - search_context_size: Symbol, + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) } ) @@ -845,14 +930,28 @@ module OpenAI # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum - abstract! + module SearchContextSize + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + ) + end - LOW = :low - MEDIUM = :medium - HIGH = :high + LOW = + T.let(:low, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + MEDIUM = + T.let( + :medium, + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol + ) + HIGH = + T.let(:high, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index d4ce52ba..096a12f8 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -47,11 +47,14 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + .returns(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + end def order=(_) end @@ -61,7 +64,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: Symbol, + order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -77,7 +80,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: Symbol, + order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -87,13 +90,15 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 014bdbc9..d0cfdba1 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -28,11 +28,14 @@ module OpenAI # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + .returns(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + end def order=(_) end @@ -40,7 +43,7 @@ module OpenAI params( after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -49,27 +52,31 @@ module OpenAI end sig do - override.returns( - { - after: String, - limit: Integer, - order: Symbol, - request_options: OpenAI::RequestOptions - } - ) + override + .returns( + { + after: String, + limit: Integer, + order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) end def to_hash end # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 9ec815a5..20705fb9 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -2,54 +2,60 @@ module OpenAI module Models - class ChatModel < OpenAI::Enum - abstract! + module ChatModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ChatModel::TaggedSymbol) } - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - O1_PREVIEW = :"o1-preview" - O1_PREVIEW_2024_09_12 = :"o1-preview-2024-09-12" - O1_MINI = :"o1-mini" - O1_MINI_2024_09_12 = :"o1-mini-2024-09-12" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_AUDIO_PREVIEW = :"gpt-4o-audio-preview" - GPT_4O_AUDIO_PREVIEW_2024_10_01 = :"gpt-4o-audio-preview-2024-10-01" - GPT_4O_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-audio-preview-2024-12-17" - GPT_4O_MINI_AUDIO_PREVIEW = :"gpt-4o-mini-audio-preview" - GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-mini-audio-preview-2024-12-17" - GPT_4O_SEARCH_PREVIEW = :"gpt-4o-search-preview" - GPT_4O_MINI_SEARCH_PREVIEW = :"gpt-4o-mini-search-preview" - GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" - GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" - CHATGPT_4O_LATEST = :"chatgpt-4o-latest" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0301 = :"gpt-3.5-turbo-0301" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" + O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::OrSymbol) + O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::OrSymbol) + O1 = T.let(:o1, OpenAI::Models::ChatModel::OrSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) + O1_PREVIEW = T.let(:"o1-preview", OpenAI::Models::ChatModel::OrSymbol) + O1_PREVIEW_2024_09_12 = T.let(:"o1-preview-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) + O1_MINI = T.let(:"o1-mini", OpenAI::Models::ChatModel::OrSymbol) + O1_MINI_2024_09_12 = T.let(:"o1-mini-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_2024_11_20 = T.let(:"gpt-4o-2024-11-20", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_2024_08_06 = T.let(:"gpt-4o-2024-08-06", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_2024_05_13 = T.let(:"gpt-4o-2024-05-13", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_AUDIO_PREVIEW = T.let(:"gpt-4o-audio-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_AUDIO_PREVIEW_2024_10_01 = + T.let(:"gpt-4o-audio-preview-2024-10-01", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_AUDIO_PREVIEW_2024_12_17 = + T.let(:"gpt-4o-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = + T.let(:"gpt-4o-mini-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_SEARCH_PREVIEW = T.let(:"gpt-4o-search-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_SEARCH_PREVIEW = T.let(:"gpt-4o-mini-search-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_SEARCH_PREVIEW_2025_03_11 = + T.let(:"gpt-4o-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = + T.let(:"gpt-4o-mini-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) + CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::ChatModel::OrSymbol) + GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_TURBO_2024_04_09 = T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_0125_PREVIEW = T.let(:"gpt-4-0125-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_TURBO_PREVIEW = T.let(:"gpt-4-turbo-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_1106_PREVIEW = T.let(:"gpt-4-1106-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_VISION_PREVIEW = T.let(:"gpt-4-vision-preview", OpenAI::Models::ChatModel::OrSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::ChatModel::OrSymbol) + GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_16K = T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_0301 = T.let(:"gpt-3.5-turbo-0301", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_0613 = T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_1106 = T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::OrSymbol) + GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index bbb84c5d..8ceb7ea4 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -20,11 +20,14 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - sig { returns(Symbol) } + sig { returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::ComparisonFilter::Type::OrSymbol) + .returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) + end def type=(_) end @@ -41,12 +44,22 @@ module OpenAI # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. sig do - params(key: String, type: Symbol, value: T.any(String, Float, T::Boolean)).returns(T.attached_class) + params( + key: String, + type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, + value: T.any(String, Float, T::Boolean) + ) + .returns(T.attached_class) end def self.new(key:, type:, value:) end - sig { override.returns({key: String, type: Symbol, value: T.any(String, Float, T::Boolean)}) } + sig do + override + .returns( + {key: String, type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, value: T.any(String, Float, T::Boolean)} + ) + end def to_hash end @@ -58,23 +71,24 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } - EQ = :eq - NE = :ne - GT = :gt - GTE = :gte - LT = :lt - LTE = :lte + EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + GT = T.let(:gt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + GTE = T.let(:gte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) end # The value to compare against the attribute key; supports string, number, or # boolean types. - class Value < OpenAI::Union - abstract! + module Value + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 93b41ebf..a03a5dd9 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -7,11 +7,14 @@ module OpenAI # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } def finish_reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + .returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + end def finish_reason=(_) end @@ -44,7 +47,7 @@ module OpenAI sig do params( - finish_reason: Symbol, + finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), text: String @@ -58,7 +61,7 @@ module OpenAI override .returns( { - finish_reason: Symbol, + finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, index: Integer, logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), text: String @@ -72,14 +75,15 @@ module OpenAI # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - class FinishReason < OpenAI::Enum - abstract! + module FinishReason + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } - STOP = :stop - LENGTH = :length - CONTENT_FILTER = :content_filter + STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) end class Logprobs < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index eb7a6ceb..4b26334b 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -11,11 +11,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) + .returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) + end def model=(_) end @@ -247,7 +250,7 @@ module OpenAI sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, @@ -301,7 +304,7 @@ module OpenAI override .returns( { - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, @@ -337,14 +340,19 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union + + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)} } - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } - GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" - DAVINCI_002 = :"davinci-002" - BABBAGE_002 = :"babbage-002" + GPT_3_5_TURBO_INSTRUCT = + T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) + DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) + BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) end # The prompt(s) to generate completions for, encoded as a string, array of @@ -353,8 +361,8 @@ module OpenAI # Note that <|endoftext|> is the document separator that the model sees during # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. - class Prompt < OpenAI::Union - abstract! + module Prompt + extend OpenAI::Union Variants = type_template(:out) do @@ -370,8 +378,8 @@ module OpenAI # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - class Stop < OpenAI::Union - abstract! + module Stop + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 6722f10d..6a79164a 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -17,42 +17,57 @@ module OpenAI end # Type of operation: `and` or `or`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::CompoundFilter::Type::OrSymbol) + .returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) + end def type=(_) end # Combine multiple filters using `and` or `or`. sig do - params(filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol) + params( + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], + type: OpenAI::Models::CompoundFilter::Type::OrSymbol + ) .returns(T.attached_class) end def self.new(filters:, type:) end - sig { override.returns({filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], type: Symbol}) } + sig do + override + .returns( + { + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], + type: OpenAI::Models::CompoundFilter::Type::OrSymbol + } + ) + end def to_hash end # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. - class Filter < OpenAI::Union - abstract! + module Filter + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, T.anything)} } end # Type of operation: `and` or `or`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } - AND = :and - OR = :or + AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::OrSymbol) + OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 3f30f067..bc8012b1 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -30,11 +30,14 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) + end def model=(_) end @@ -50,11 +53,14 @@ module OpenAI # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol)) } def encoding_format end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + .returns(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + end def encoding_format=(_) end @@ -72,9 +78,9 @@ module OpenAI sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: Symbol, + encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -88,9 +94,9 @@ module OpenAI .returns( { input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: Symbol, + encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, request_options: OpenAI::RequestOptions } @@ -107,8 +113,8 @@ module OpenAI # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - class Input < OpenAI::Union - abstract! + module Input + extend OpenAI::Union Variants = type_template(:out) do @@ -127,21 +133,23 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)} } end # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - class EncodingFormat < OpenAI::Enum - abstract! + module EncodingFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } - FLOAT = :float - BASE64 = :base64 + FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 1bd2eac2..2b064f56 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -2,14 +2,15 @@ module OpenAI module Models - class EmbeddingModel < OpenAI::Enum - abstract! + module EmbeddingModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingModel::TaggedSymbol) } - TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" - TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" - TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" + TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::OrSymbol) + TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::OrSymbol) + TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index aad68d3d..eb1f53b5 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # The strategy used to chunk the file. - class FileChunkingStrategy < OpenAI::Union - abstract! + module FileChunkingStrategy + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 07560304..211e8f69 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -4,8 +4,8 @@ module OpenAI module Models # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. - class FileChunkingStrategyParam < OpenAI::Union - abstract! + module FileChunkingStrategyParam + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index 98619f88..fbd7b9d0 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -19,18 +19,18 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose end - sig { params(_: Symbol).returns(Symbol) } + sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose=(_) end sig do params( file: T.any(IO, StringIO), - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -39,13 +39,14 @@ module OpenAI end sig do - override.returns( - { - file: T.any(IO, StringIO), - purpose: Symbol, - request_options: OpenAI::RequestOptions - } - ) + override + .returns( + { + file: T.any(IO, StringIO), + purpose: OpenAI::Models::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) end def to_hash end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 48b8106e..4af7b2fc 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -30,11 +30,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FileListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FileListParams::Order::OrSymbol) + .returns(OpenAI::Models::FileListParams::Order::OrSymbol) + end def order=(_) end @@ -51,7 +54,7 @@ module OpenAI params( after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -66,7 +69,7 @@ module OpenAI { after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, request_options: OpenAI::RequestOptions } @@ -77,13 +80,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index b31df148..2918d2f0 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -51,21 +51,27 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) } def purpose end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FileObject::Purpose::TaggedSymbol) + .returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) + end def purpose=(_) end # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FileObject::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FileObject::Status::TaggedSymbol) + .returns(OpenAI::Models::FileObject::Status::TaggedSymbol) + end def status=(_) end @@ -95,8 +101,8 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: Symbol, - status: Symbol, + purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, + status: OpenAI::Models::FileObject::Status::TaggedSymbol, expires_at: Integer, status_details: String, object: Symbol @@ -125,8 +131,8 @@ module OpenAI created_at: Integer, filename: String, object: Symbol, - purpose: Symbol, - status: Symbol, + purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, + status: OpenAI::Models::FileObject::Status::TaggedSymbol, expires_at: Integer, status_details: String } @@ -138,30 +144,32 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - class Purpose < OpenAI::Enum - abstract! + module Purpose + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } - ASSISTANTS = :assistants - ASSISTANTS_OUTPUT = :assistants_output - BATCH = :batch - BATCH_OUTPUT = :batch_output - FINE_TUNE = :"fine-tune" - FINE_TUNE_RESULTS = :"fine-tune-results" - VISION = :vision + ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + BATCH = T.let(:batch, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + BATCH_OUTPUT = T.let(:batch_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FileObject::Purpose::TaggedSymbol) + FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::Models::FileObject::Purpose::TaggedSymbol) + VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) end # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Status::TaggedSymbol) } - UPLOADED = :uploaded - PROCESSED = :processed - ERROR = :error + UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) + PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) + ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 07bd11c5..77bba00f 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -6,17 +6,18 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - class FilePurpose < OpenAI::Enum - abstract! + module FilePurpose + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FilePurpose::TaggedSymbol) } - ASSISTANTS = :assistants - BATCH = :batch - FINE_TUNE = :"fine-tune" - VISION = :vision - USER_DATA = :user_data - EVALS = :evals + ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::OrSymbol) + BATCH = T.let(:batch, OpenAI::Models::FilePurpose::OrSymbol) + FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FilePurpose::OrSymbol) + VISION = T.let(:vision, OpenAI::Models::FilePurpose::OrSymbol) + USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::OrSymbol) + EVALS = T.let(:evals, OpenAI::Models::FilePurpose::OrSymbol) end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index b4326f0b..f688b4b2 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -117,11 +117,14 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + end def status=(_) end @@ -218,7 +221,7 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: Symbol, + status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), @@ -268,7 +271,7 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: Symbol, + status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), @@ -381,24 +384,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -406,17 +409,20 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } - VALIDATING_FILES = :validating_files - QUEUED = :queued - RUNNING = :running - SUCCEEDED = :succeeded - FAILED = :failed - CANCELLED = :cancelled + VALIDATING_FILES = + T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + QUEUED = T.let(:queued, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + RUNNING = T.let(:running, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + SUCCEEDED = T.let(:succeeded, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) end class Method < OpenAI::BaseModel @@ -445,11 +451,14 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + end def type=(_) end @@ -458,7 +467,7 @@ module OpenAI params( dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol ) .returns(T.attached_class) end @@ -471,7 +480,7 @@ module OpenAI { dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol } ) end @@ -576,32 +585,32 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union - abstract! + module Beta + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -694,24 +703,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -719,13 +728,15 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } - SUPERVISED = :supervised - DPO = :dpo + SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index fb1daf42..7bedb937 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -23,11 +23,14 @@ module OpenAI end # The log level of the event. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } def level end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + end def level=(_) end @@ -59,11 +62,14 @@ module OpenAI end # The type of event. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + end def type=(_) end @@ -72,10 +78,10 @@ module OpenAI params( id: String, created_at: Integer, - level: Symbol, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, message: String, data: T.anything, - type: Symbol, + type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol, object: Symbol ) .returns(T.attached_class) @@ -89,11 +95,11 @@ module OpenAI { id: String, created_at: Integer, - level: Symbol, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, message: String, object: Symbol, data: T.anything, - type: Symbol + type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol } ) end @@ -101,24 +107,28 @@ module OpenAI end # The log level of the event. - class Level < OpenAI::Enum - abstract! + module Level + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } - INFO = :info - WARN = :warn - ERROR = :error + INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) end # The type of event. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } - MESSAGE = :message - METRICS = :metrics + MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 29a4f7bf..9142e294 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -9,11 +9,14 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - sig { returns(T.any(String, Symbol)) } + sig { returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) + .returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) + end def model=(_) end @@ -139,7 +142,7 @@ module OpenAI sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), @@ -170,7 +173,7 @@ module OpenAI override .returns( { - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), @@ -188,15 +191,20 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union + + Variants = + type_template(:out) { {fixed: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)} } - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } - BABBAGE_002 = :"babbage-002" - DAVINCI_002 = :"davinci-002" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_4O_MINI = :"gpt-4o-mini" + BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) end class Hyperparameters < OpenAI::BaseModel @@ -258,24 +266,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -416,11 +424,14 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + end def type=(_) end @@ -429,7 +440,7 @@ module OpenAI params( dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol ) .returns(T.attached_class) end @@ -442,7 +453,7 @@ module OpenAI { dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: Symbol + type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol } ) end @@ -547,32 +558,32 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - class Beta < OpenAI::Union - abstract! + module Beta + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -667,24 +678,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - class BatchSize < OpenAI::Union - abstract! + module BatchSize + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - class LearningRateMultiplier < OpenAI::Union - abstract! + module LearningRateMultiplier + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - class NEpochs < OpenAI::Union - abstract! + module NEpochs + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } end @@ -692,13 +703,15 @@ module OpenAI end # The type of method. Is either `supervised` or `dpo`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } - SUPERVISED = :supervised - DPO = :dpo + SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 285e3c70..09479fbe 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -18,11 +18,14 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + end def model=(_) end @@ -39,21 +42,27 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) + end def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) } def size end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) + end def size=(_) end @@ -71,10 +80,10 @@ module OpenAI sig do params( image: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -88,10 +97,10 @@ module OpenAI .returns( { image: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -102,34 +111,37 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum - abstract! + module ResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } - URL = :url - B64_JSON = :b64_json + URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum - abstract! + module Size + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index b672e912..02a9dc31 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -39,11 +39,14 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + end def model=(_) end @@ -59,21 +62,27 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) + end def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } def size end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) + end def size=(_) end @@ -93,10 +102,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -122,10 +131,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -136,34 +145,36 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum - abstract! + module ResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } - URL = :url - B64_JSON = :b64_json + URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - class Size < OpenAI::Enum - abstract! + module Size + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::OrSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::OrSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 7fccc54c..4754ce0e 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -17,11 +17,14 @@ module OpenAI end # The model to use for image generation. - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } def model end - sig { params(_: T.nilable(T.any(String, Symbol))).returns(T.nilable(T.any(String, Symbol))) } + sig do + params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) + end def model=(_) end @@ -38,33 +41,42 @@ module OpenAI # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } def quality end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + .returns(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + end def quality=(_) end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } def response_format end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) + end def response_format=(_) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } def size end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) + end def size=(_) end @@ -72,11 +84,14 @@ module OpenAI # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } def style end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) + end def style=(_) end @@ -94,12 +109,12 @@ module OpenAI sig do params( prompt: String, - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: Symbol, - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), - style: T.nilable(Symbol), + quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -123,12 +138,12 @@ module OpenAI .returns( { prompt: String, - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: Symbol, - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), - style: T.nilable(Symbol), + quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -138,62 +153,67 @@ module OpenAI end # The model to use for image generation. - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } end # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. - class Quality < OpenAI::Enum - abstract! + module Quality + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } - STANDARD = :standard - HD = :hd + STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) end # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - class ResponseFormat < OpenAI::Enum - abstract! + module ResponseFormat + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } - URL = :url - B64_JSON = :b64_json + URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) end # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. - class Size < OpenAI::Enum - abstract! + module Size + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" - NUMBER_1792X1024 = :"1792x1024" - NUMBER_1024X1792 = :"1024x1792" + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. - class Style < OpenAI::Enum - abstract! + module Style + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } - VIVID = :vivid - NATURAL = :natural + VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) + NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 6716a390..3cc4d5f4 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -2,13 +2,14 @@ module OpenAI module Models - class ImageModel < OpenAI::Enum - abstract! + module ImageModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageModel::TaggedSymbol) } - DALL_E_2 = :"dall-e-2" - DALL_E_3 = :"dall-e-3" + DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::OrSymbol) + DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index c430dd8a..0ebd7ee8 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -265,138 +265,193 @@ module OpenAI class CategoryAppliedInputTypes < OpenAI::BaseModel # The applied input type(s) for the category 'harassment'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) } def harassment end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + end def harassment=(_) end # The applied input type(s) for the category 'harassment/threatening'. - sig { returns(T::Array[Symbol]) } + sig do + returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + end def harassment_threatening end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params( + _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + end def harassment_threatening=(_) end # The applied input type(s) for the category 'hate'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } def hate end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) + end def hate=(_) end # The applied input type(s) for the category 'hate/threatening'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) } def hate_threatening end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + end def hate_threatening=(_) end # The applied input type(s) for the category 'illicit'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } def illicit end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) + end def illicit=(_) end # The applied input type(s) for the category 'illicit/violent'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) } def illicit_violent end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + end def illicit_violent=(_) end # The applied input type(s) for the category 'self-harm'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } def self_harm end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) + end def self_harm=(_) end # The applied input type(s) for the category 'self-harm/instructions'. - sig { returns(T::Array[Symbol]) } + sig do + returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + end def self_harm_instructions end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params( + _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + end def self_harm_instructions=(_) end # The applied input type(s) for the category 'self-harm/intent'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) } def self_harm_intent end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + end def self_harm_intent=(_) end # The applied input type(s) for the category 'sexual'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } def sexual end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) + end def sexual=(_) end # The applied input type(s) for the category 'sexual/minors'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) } def sexual_minors end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + end def sexual_minors=(_) end # The applied input type(s) for the category 'violence'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } def violence end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) + end def violence=(_) end # The applied input type(s) for the category 'violence/graphic'. - sig { returns(T::Array[Symbol]) } + sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) } def violence_graphic end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + end def violence_graphic=(_) end # A list of the categories along with the input type(s) that the score applies to. sig do params( - harassment: T::Array[Symbol], - harassment_threatening: T::Array[Symbol], - hate: T::Array[Symbol], - hate_threatening: T::Array[Symbol], - illicit: T::Array[Symbol], - illicit_violent: T::Array[Symbol], - self_harm: T::Array[Symbol], - self_harm_instructions: T::Array[Symbol], - self_harm_intent: T::Array[Symbol], - sexual: T::Array[Symbol], - sexual_minors: T::Array[Symbol], - violence: T::Array[Symbol], - violence_graphic: T::Array[Symbol] + harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], + harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], + hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], + hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], + illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], + illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], + self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], + self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], + self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], + sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], + sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], + violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], + violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] ) .returns(T.attached_class) end @@ -421,133 +476,177 @@ module OpenAI override .returns( { - harassment: T::Array[Symbol], - harassment_threatening: T::Array[Symbol], - hate: T::Array[Symbol], - hate_threatening: T::Array[Symbol], - illicit: T::Array[Symbol], - illicit_violent: T::Array[Symbol], - self_harm: T::Array[Symbol], - self_harm_instructions: T::Array[Symbol], - self_harm_intent: T::Array[Symbol], - sexual: T::Array[Symbol], - sexual_minors: T::Array[Symbol], - violence: T::Array[Symbol], - violence_graphic: T::Array[Symbol] + harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], + harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], + hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], + hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], + illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], + illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], + self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], + self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], + self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], + sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], + sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], + violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], + violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] } ) end def to_hash end - class Harassment < OpenAI::Enum - abstract! + module Harassment + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) end - class HarassmentThreatening < OpenAI::Enum - abstract! + module HarassmentThreatening + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) } - TEXT = :text + TEXT = + T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) end - class Hate < OpenAI::Enum - abstract! + module Hate + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) end - class HateThreatening < OpenAI::Enum - abstract! + module HateThreatening + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) end - class Illicit < OpenAI::Enum - abstract! + module Illicit + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) end - class IllicitViolent < OpenAI::Enum - abstract! + module IllicitViolent + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) end - class SelfHarm < OpenAI::Enum - abstract! + module SelfHarm + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) + IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) end - class SelfHarmInstruction < OpenAI::Enum - abstract! + module SelfHarmInstruction + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = + T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) + IMAGE = + T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) end - class SelfHarmIntent < OpenAI::Enum - abstract! + module SelfHarmIntent + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) + IMAGE = + T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) end - class Sexual < OpenAI::Enum - abstract! + module Sexual + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) + IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) end - class SexualMinor < OpenAI::Enum - abstract! + module SexualMinor + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) end - class Violence < OpenAI::Enum - abstract! + module Violence + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) + IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) end - class ViolenceGraphic < OpenAI::Enum - abstract! + module ViolenceGraphic + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) } - TEXT = :text - IMAGE = :image + TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) + IMAGE = + T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 3a7a01a9..a23b68a0 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -43,11 +43,14 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - sig { returns(T.nilable(T.any(String, Symbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::Models::ModerationModel::OrSymbol))) } def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params(_: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) + end def model=(_) end @@ -58,7 +61,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -75,7 +78,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions } ) @@ -85,8 +88,8 @@ module OpenAI # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - class Input < OpenAI::Union - abstract! + module Input + extend OpenAI::Union Variants = type_template(:out) do @@ -109,10 +112,10 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - class Model < OpenAI::Union - abstract! + module Model + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)} } end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index a34a1f36..a08f4a80 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -2,15 +2,17 @@ module OpenAI module Models - class ModerationModel < OpenAI::Enum - abstract! + module ModerationModel + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ModerationModel::TaggedSymbol) } - OMNI_MODERATION_LATEST = :"omni-moderation-latest" - OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" - TEXT_MODERATION_LATEST = :"text-moderation-latest" - TEXT_MODERATION_STABLE = :"text-moderation-stable" + OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) + OMNI_MODERATION_2024_09_26 = + T.let(:"omni-moderation-2024-09-26", OpenAI::Models::ModerationModel::OrSymbol) + TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) + TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::OrSymbol) end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 861ae45d..705b6af9 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # An object describing an image to classify. - class ModerationMultiModalInput < OpenAI::Union - abstract! + module ModerationMultiModalInput + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 38290949..909f27d4 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -9,11 +9,14 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } def effort end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) + end def effort=(_) end @@ -22,11 +25,14 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } def generate_summary end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) + .returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) + end def generate_summary=(_) end @@ -34,11 +40,25 @@ module OpenAI # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - sig { params(effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)).returns(T.attached_class) } + sig do + params( + effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + ) + .returns(T.attached_class) + end def self.new(effort: nil, generate_summary: nil) end - sig { override.returns({effort: T.nilable(Symbol), generate_summary: T.nilable(Symbol)}) } + sig do + override + .returns( + { + effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + } + ) + end def to_hash end @@ -47,13 +67,14 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. - class GenerateSummary < OpenAI::Enum - abstract! + module GenerateSummary + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } - CONCISE = :concise - DETAILED = :detailed + CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index dcca18c9..2cf29ee5 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -8,14 +8,15 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - class ReasoningEffort < OpenAI::Enum - abstract! + module ReasoningEffort + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ReasoningEffort::TaggedSymbol) } - LOW = :low - MEDIUM = :medium - HIGH = :high + LOW = T.let(:low, OpenAI::Models::ReasoningEffort::OrSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 40b9918f..638039d3 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -23,11 +23,14 @@ module OpenAI end # The type of computer environment to control. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) } def environment end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + .returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + end def environment=(_) end @@ -43,28 +46,43 @@ module OpenAI # A tool that controls a virtual computer. Learn more about the # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). sig do - params(display_height: Float, display_width: Float, environment: Symbol, type: Symbol) + params( + display_height: Float, + display_width: Float, + environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, + type: Symbol + ) .returns(T.attached_class) end def self.new(display_height:, display_width:, environment:, type: :computer_use_preview) end sig do - override.returns({display_height: Float, display_width: Float, environment: Symbol, type: Symbol}) + override + .returns( + { + display_height: Float, + display_width: Float, + environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, + type: Symbol + } + ) end def to_hash end # The type of computer environment to control. - class Environment < OpenAI::Enum - abstract! + module Environment + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } - MAC = :mac - WINDOWS = :windows - UBUNTU = :ubuntu - BROWSER = :browser + MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 5b3386f8..42bb10de 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -54,20 +54,26 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + .returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + end def role=(_) end # The type of the message input. Always `message`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) + .returns(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) + end def type=(_) end @@ -88,8 +94,8 @@ module OpenAI ) ] ), - role: Symbol, - type: Symbol + role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, + type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol ) .returns(T.attached_class) end @@ -110,8 +116,8 @@ module OpenAI ) ] ), - role: Symbol, - type: Symbol + role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, + type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol } ) end @@ -120,8 +126,8 @@ module OpenAI # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -142,24 +148,28 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer + USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) end # The type of the message input. Always `message`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } - MESSAGE = :message + MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 4085e59c..71be889d 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -88,8 +88,8 @@ module OpenAI end # A filter to apply based on file attributes. - class Filters < OpenAI::Union - abstract! + module Filters + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } @@ -97,11 +97,14 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol)) } def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + .returns(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + end def ranker=(_) end @@ -117,22 +120,37 @@ module OpenAI end # Ranking options for search. - sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } + sig do + params( + ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, + score_threshold: Float + ) + .returns(T.attached_class) + end def self.new(ranker: nil, score_threshold: nil) end - sig { override.returns({ranker: Symbol, score_threshold: Float}) } + sig do + override + .returns( + {ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, score_threshold: Float} + ) + end def to_hash end # The ranker to use for the file search. - class Ranker < OpenAI::Enum - abstract! + module Ranker + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } - AUTO = :auto - DEFAULT_2024_11_15 = :"default-2024-11-15" + AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + DEFAULT_2024_11_15 = + T.let(:"default-2024-11-15", OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 1162dc6a..dfee9da7 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -39,11 +39,14 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + .returns(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + end def order=(_) end @@ -52,7 +55,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -67,7 +70,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -79,13 +82,15 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 29aba874..e785499b 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -79,11 +79,34 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, Symbol)) } + sig do + returns( + T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ) + ) + end def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params( + _: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ) + ) + .returns( + T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ) + ) + end def model=(_) end @@ -175,7 +198,11 @@ module OpenAI # can call. sig do returns( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) end def tool_choice @@ -183,10 +210,18 @@ module OpenAI sig do params( - _: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + _: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) .returns( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) end def tool_choice=(_) @@ -295,11 +330,14 @@ module OpenAI # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + end def status=(_) end @@ -326,11 +364,14 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) } def truncation end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) + .returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) + end def truncation=(_) end @@ -363,7 +404,11 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, @@ -376,7 +421,11 @@ module OpenAI ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -389,9 +438,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), - status: Symbol, + status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), usage: OpenAI::Models::Responses::ResponseUsage, user: String, object: Symbol @@ -434,7 +483,11 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol + ), object: Symbol, output: T::Array[ T.any( @@ -448,7 +501,11 @@ module OpenAI ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -461,9 +518,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), - status: Symbol, + status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), usage: OpenAI::Models::Responses::ResponseUsage, user: String } @@ -474,44 +531,59 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the response is incomplete. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol)) } def reason end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + end def reason=(_) end # Details about why the response is incomplete. - sig { params(reason: Symbol).returns(T.attached_class) } + sig do + params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + .returns(T.attached_class) + end def self.new(reason: nil) end - sig { override.returns({reason: Symbol}) } + sig { override.returns({reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol}) } def to_hash end # The reason why the response is incomplete. - class Reason < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - MAX_OUTPUT_TOKENS = :max_output_tokens - CONTENT_FILTER = :content_filter + module Reason + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } + + MAX_OUTPUT_TOKENS = + T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + CONTENT_FILTER = + T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) end end # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union - abstract! + module ToolChoice + extend OpenAI::Union Variants = type_template(:out) do { - fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + fixed: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) } end end @@ -523,13 +595,14 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum - abstract! + module Truncation + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } - AUTO = :auto - DISABLED = :disabled + AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) + DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 8b5451ff..42fee896 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -58,11 +58,14 @@ module OpenAI end # The status of the code interpreter tool call. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + end def status=(_) end @@ -86,7 +89,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: Symbol, + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, type: Symbol ) .returns(T.attached_class) @@ -106,7 +109,7 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: Symbol, + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, type: Symbol } ) @@ -115,8 +118,8 @@ module OpenAI end # The output of a code interpreter tool call that is text. - class Result < OpenAI::Union - abstract! + module Result + extend OpenAI::Union Variants = type_template(:out) do @@ -232,14 +235,20 @@ module OpenAI end # The status of the code interpreter tool call. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - INTERPRETING = :interpreting - COMPLETED = :completed + module Status + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + INTERPRETING = + T.let(:interpreting, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index c0e726fe..63676938 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -86,20 +86,26 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + end def status=(_) end # The type of the computer call. Always `computer_call`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) + end def type=(_) end @@ -122,8 +128,8 @@ module OpenAI ), call_id: String, pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: Symbol, - type: Symbol + status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol ) .returns(T.attached_class) end @@ -148,8 +154,8 @@ module OpenAI ), call_id: String, pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: Symbol, - type: Symbol + status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol } ) end @@ -157,8 +163,8 @@ module OpenAI end # A click action. - class Action < OpenAI::Union - abstract! + module Action + extend OpenAI::Union Variants = type_template(:out) do @@ -180,11 +186,14 @@ module OpenAI class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } def button end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + end def button=(_) end @@ -217,26 +226,50 @@ module OpenAI end # A click action. - sig { params(button: Symbol, x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } + sig do + params( + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, + x: Integer, + y_: Integer, + type: Symbol + ) + .returns(T.attached_class) + end def self.new(button:, x:, y_:, type: :click) end - sig { override.returns({button: Symbol, type: Symbol, x: Integer, y_: Integer}) } + sig do + override + .returns( + { + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, + type: Symbol, + x: Integer, + y_: Integer + } + ) + end def to_hash end # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - class Button < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - LEFT = :left - RIGHT = :right - WHEEL = :wheel - BACK = :back - FORWARD = :forward + module Button + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) } + + LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + RIGHT = + T.let(:right, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + WHEEL = + T.let(:wheel, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + BACK = T.let(:back, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + FORWARD = + T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) end end @@ -605,23 +638,29 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) end # The type of the computer call. Always `computer_call`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } - COMPUTER_CALL = :computer_call + COMPUTER_CALL = + T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 4e5be0f8..9cdcacd2 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -66,11 +66,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + end def status=(_) end @@ -80,7 +83,7 @@ module OpenAI call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: Symbol, + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol, type: Symbol ) .returns(T.attached_class) @@ -97,7 +100,7 @@ module OpenAI output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: Symbol, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: Symbol + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol } ) end @@ -144,14 +147,20 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + module Status + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 92cf9df2..0cc16286 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Multi-modal input and output contents. - class ResponseContent < OpenAI::Union - abstract! + module ResponseContent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 9e79484e..b040d85c 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -90,8 +90,8 @@ module OpenAI end # The content part that was added. - class Part < OpenAI::Union - abstract! + module Part + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 5661d776..6f2a4562 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -90,8 +90,8 @@ module OpenAI end # The content part that is done. - class Part < OpenAI::Union - abstract! + module Part + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index dbd9ed7b..9c2938bb 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -91,11 +91,22 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, Symbol)) } + sig do + returns( + T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + ) + end def model end - sig { params(_: T.any(String, Symbol)).returns(T.any(String, Symbol)) } + sig do + params( + _: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + ) + .returns( + T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + ) + end def model=(_) end @@ -107,11 +118,14 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } def include end - sig { params(_: T.nilable(T::Array[Symbol])).returns(T.nilable(T::Array[Symbol])) } + sig do + params(_: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) + .returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) + end def include=(_) end @@ -229,7 +243,11 @@ module OpenAI sig do returns( T.nilable( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) ) end @@ -238,10 +256,18 @@ module OpenAI sig do params( - _: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + _: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) .returns( - T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) ) end def tool_choice=(_) @@ -323,11 +349,14 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) } def truncation end - sig { params(_: T.nilable(Symbol)).returns(T.nilable(Symbol)) } + sig do + params(_: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) + .returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) + end def truncation=(_) end @@ -362,8 +391,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -373,7 +402,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -383,7 +416,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) @@ -433,8 +466,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -444,7 +477,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -454,7 +491,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, request_options: OpenAI::RequestOptions } @@ -472,8 +509,8 @@ module OpenAI # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - class Input < OpenAI::Union - abstract! + module Input + extend OpenAI::Union Variants = type_template(:out) do @@ -503,13 +540,17 @@ module OpenAI # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - class ToolChoice < OpenAI::Union - abstract! + module ToolChoice + extend OpenAI::Union Variants = type_template(:out) do { - fixed: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction) + fixed: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ) } end end @@ -521,13 +562,16 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - class Truncation < OpenAI::Enum - abstract! + module Truncation + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } - AUTO = :auto - DISABLED = :disabled + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) + DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 7f9b2db2..90d6cf33 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -5,11 +5,14 @@ module OpenAI module Responses class ResponseError < OpenAI::BaseModel # The error code for the response. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + end def code=(_) end @@ -23,38 +26,54 @@ module OpenAI end # An error object returned when the model fails to generate a Response. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params(code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig { override.returns({code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String}) } def to_hash end # The error code for the response. - class Code < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - SERVER_ERROR = :server_error - RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - INVALID_PROMPT = :invalid_prompt - VECTOR_STORE_TIMEOUT = :vector_store_timeout - INVALID_IMAGE = :invalid_image - INVALID_IMAGE_FORMAT = :invalid_image_format - INVALID_BASE64_IMAGE = :invalid_base64_image - INVALID_IMAGE_URL = :invalid_image_url - IMAGE_TOO_LARGE = :image_too_large - IMAGE_TOO_SMALL = :image_too_small - IMAGE_PARSE_ERROR = :image_parse_error - IMAGE_CONTENT_POLICY_VIOLATION = :image_content_policy_violation - INVALID_IMAGE_MODE = :invalid_image_mode - IMAGE_FILE_TOO_LARGE = :image_file_too_large - UNSUPPORTED_IMAGE_MEDIA_TYPE = :unsupported_image_media_type - EMPTY_IMAGE_FILE = :empty_image_file - FAILED_TO_DOWNLOAD_IMAGE = :failed_to_download_image - IMAGE_FILE_NOT_FOUND = :image_file_not_found + module Code + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + + SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + RATE_LIMIT_EXCEEDED = + T.let(:rate_limit_exceeded, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_PROMPT = T.let(:invalid_prompt, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + VECTOR_STORE_TIMEOUT = + T.let(:vector_store_timeout, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE = T.let(:invalid_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE_FORMAT = + T.let(:invalid_image_format, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_BASE64_IMAGE = + T.let(:invalid_base64_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE_URL = + T.let(:invalid_image_url, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_TOO_LARGE = T.let(:image_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_TOO_SMALL = T.let(:image_too_small, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_PARSE_ERROR = + T.let(:image_parse_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_CONTENT_POLICY_VIOLATION = + T.let(:image_content_policy_violation, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + INVALID_IMAGE_MODE = + T.let(:invalid_image_mode, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_FILE_TOO_LARGE = + T.let(:image_file_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + UNSUPPORTED_IMAGE_MEDIA_TYPE = + T.let(:unsupported_image_media_type, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + EMPTY_IMAGE_FILE = T.let(:empty_image_file, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + FAILED_TO_DOWNLOAD_IMAGE = + T.let(:failed_to_download_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + IMAGE_FILE_NOT_FOUND = + T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 17a4bdb2..439cbcd7 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -24,11 +24,14 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + end def status=(_) end @@ -60,7 +63,7 @@ module OpenAI params( id: String, queries: T::Array[String], - status: Symbol, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]), type: Symbol ) @@ -75,7 +78,7 @@ module OpenAI { id: String, queries: T::Array[String], - status: Symbol, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, type: Symbol, results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) } @@ -86,16 +89,20 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - SEARCHING = :searching - COMPLETED = :completed - INCOMPLETE = :incomplete - FAILED = :failed + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) end class Result < OpenAI::BaseModel @@ -179,8 +186,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index fea986b1..8c4e9b9a 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -16,8 +16,8 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - class ResponseFormatTextConfig < OpenAI::Union - abstract! + module ResponseFormatTextConfig + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 7f65d691..e2fb2951 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -51,11 +51,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + end def status=(_) end @@ -63,7 +66,14 @@ module OpenAI # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. sig do - params(arguments: String, call_id: String, name: String, id: String, status: Symbol, type: Symbol) + params( + arguments: String, + call_id: String, + name: String, + id: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol, + type: Symbol + ) .returns(T.attached_class) end def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) @@ -71,28 +81,33 @@ module OpenAI sig do override - .returns({ - arguments: String, - call_id: String, - name: String, - type: Symbol, - id: String, - status: Symbol - }) + .returns( + { + arguments: String, + call_id: String, + name: String, + type: Symbol, + id: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol + } + ) end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 89456a9b..4a38931a 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -42,11 +42,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + end def status=(_) end @@ -55,27 +58,45 @@ module OpenAI id: String, call_id: String, output: String, - status: Symbol, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol, type: Symbol - ).returns(T.attached_class) + ) + .returns(T.attached_class) end def self.new(id:, call_id:, output:, status: nil, type: :function_call_output) end - sig { override.returns({id: String, call_id: String, output: String, type: Symbol, status: Symbol}) } + sig do + override + .returns( + { + id: String, + call_id: String, + output: String, + type: Symbol, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + } + ) + end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index fa36c718..1a4c8366 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -14,11 +14,14 @@ module OpenAI end # The status of the web search tool call. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + end def status=(_) end @@ -34,24 +37,39 @@ module OpenAI # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for # more information. - sig { params(id: String, status: Symbol, type: Symbol).returns(T.attached_class) } + sig do + params( + id: String, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, + type: Symbol + ) + .returns(T.attached_class) + end def self.new(id:, status:, type: :web_search_call) end - sig { override.returns({id: String, status: Symbol, type: Symbol}) } + sig do + override + .returns( + {id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol} + ) + end def to_hash end # The status of the web search tool call. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - SEARCHING = :searching - COMPLETED = :completed - FAILED = :failed + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 363cdad4..3b6a4039 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -11,14 +11,18 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. - class ResponseIncludable < OpenAI::Enum - abstract! + module ResponseIncludable + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } - FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" - MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" + FILE_SEARCH_CALL_RESULTS = + T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + MESSAGE_INPUT_IMAGE_IMAGE_URL = + T.let(:"message.input_image.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = + T.let(:"computer_call_output.output.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index f49d5163..e6db921a 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -14,11 +14,14 @@ module OpenAI end # The format of the audio data. Currently supported formats are `mp3` and `wav`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) } def format_ end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + end def format_=(_) end @@ -32,22 +35,36 @@ module OpenAI end # An audio input to the model. - sig { params(data: String, format_: Symbol, type: Symbol).returns(T.attached_class) } + sig do + params( + data: String, + format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, + type: Symbol + ) + .returns(T.attached_class) + end def self.new(data:, format_:, type: :input_audio) end - sig { override.returns({data: String, format_: Symbol, type: Symbol}) } + sig do + override + .returns( + {data: String, format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, type: Symbol} + ) + end def to_hash end # The format of the audio data. Currently supported formats are `mp3` and `wav`. - class Format < OpenAI::Enum - abstract! + module Format + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } - MP3 = :mp3 - WAV = :wav + MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index cba404fb..37ed1a5a 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # A text input to the model. - class ResponseInputContent < OpenAI::Union - abstract! + module ResponseInputContent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index f4e450ae..ade87200 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -6,11 +6,14 @@ module OpenAI class ResponseInputImage < OpenAI::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } def detail end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + end def detail=(_) end @@ -45,35 +48,43 @@ module OpenAI # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). sig do - params(detail: Symbol, file_id: T.nilable(String), image_url: T.nilable(String), type: Symbol) + params( + detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, + file_id: T.nilable(String), + image_url: T.nilable(String), + type: Symbol + ) .returns(T.attached_class) end def self.new(detail:, file_id: nil, image_url: nil, type: :input_image) end sig do - override.returns( - { - detail: Symbol, - type: Symbol, - file_id: T.nilable(String), - image_url: T.nilable(String) - } - ) + override + .returns( + { + detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, + type: Symbol, + file_id: T.nilable(String), + image_url: T.nilable(String) + } + ) end def to_hash end # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - class Detail < OpenAI::Enum - abstract! + module Detail + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } - HIGH = :high - LOW = :low - AUTO = :auto + HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 6ccd31a3..496ac76e 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -8,8 +8,8 @@ module OpenAI # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - class ResponseInputItem < OpenAI::Union - abstract! + module ResponseInputItem + extend OpenAI::Union Variants = type_template(:out) do @@ -71,30 +71,39 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + end def role=(_) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + end def status=(_) end # The type of the message input. Always set to `message`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) + end def type=(_) end @@ -110,9 +119,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, + status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol ) .returns(T.attached_class) end @@ -130,9 +139,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, + status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol } ) end @@ -140,35 +149,45 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } - USER = :user - SYSTEM = :system - DEVELOPER = :developer + USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) end # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } - MESSAGE = :message + MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) end end @@ -237,11 +256,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + end def status=(_) end @@ -252,7 +274,7 @@ module OpenAI output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: Symbol, + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) @@ -269,7 +291,7 @@ module OpenAI type: Symbol, id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: Symbol + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol } ) end @@ -316,14 +338,20 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) end end @@ -367,11 +395,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + end def status=(_) end @@ -381,29 +412,45 @@ module OpenAI call_id: String, output: String, id: String, - status: Symbol, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol, type: Symbol - ).returns(T.attached_class) + ) + .returns(T.attached_class) end def self.new(call_id:, output:, id: nil, status: nil, type: :function_call_output) end sig do - override.returns({call_id: String, output: String, type: Symbol, id: String, status: Symbol}) + override + .returns( + { + call_id: String, + output: String, + type: Symbol, + id: String, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol + } + ) end def to_hash end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + module Status + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index cabf1399..0aaaacd6 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -53,30 +53,39 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } def role end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + end def role=(_) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + end def status=(_) end # The type of the message input. Always set to `message`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol)) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + end def type=(_) end @@ -90,9 +99,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, + type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol ) .returns(T.attached_class) end @@ -111,9 +120,9 @@ module OpenAI OpenAI::Models::Responses::ResponseInputFile ) ], - role: Symbol, - status: Symbol, - type: Symbol + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, + type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol } ) end @@ -121,35 +130,44 @@ module OpenAI end # The role of the message input. One of `user`, `system`, or `developer`. - class Role < OpenAI::Enum - abstract! + module Role + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } - USER = :user - SYSTEM = :system - DEVELOPER = :developer + USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) end # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) end # The type of the message input. Always set to `message`. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } - MESSAGE = :message + MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index 77bef808..0d59846c 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Content item used to generate a response. - class ResponseItem < OpenAI::Union - abstract! + module ResponseItem + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index bec1b93e..f4a81f11 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # An output message from the model. - class ResponseOutputItem < OpenAI::Union - abstract! + module ResponseOutputItem + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 17bf1ad0..63f0758f 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -44,11 +44,14 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + end def status=(_) end @@ -66,7 +69,7 @@ module OpenAI params( id: String, content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], - status: Symbol, + status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, role: Symbol, type: Symbol ) @@ -82,7 +85,7 @@ module OpenAI id: String, content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], role: Symbol, - status: Symbol, + status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, type: Symbol } ) @@ -91,8 +94,8 @@ module OpenAI end # A text output from the model. - class Content < OpenAI::Union - abstract! + module Content + extend OpenAI::Union Variants = type_template(:out) do @@ -104,14 +107,16 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 13eb0a87..78b35143 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -98,8 +98,8 @@ module OpenAI end # A citation to a file. - class Annotation < OpenAI::Union - abstract! + module Annotation + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 66b69b58..aabd22e0 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -36,11 +36,14 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + end def status=(_) end @@ -50,7 +53,7 @@ module OpenAI params( id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], - status: Symbol, + status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) @@ -65,7 +68,7 @@ module OpenAI id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], type: Symbol, - status: Symbol + status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol } ) end @@ -102,14 +105,16 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - INCOMPLETE = :incomplete + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index 8e817f0d..d2129c7d 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -9,17 +9,20 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. - sig { returns(T.nilable(T::Array[Symbol])) } + sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } def include end - sig { params(_: T::Array[Symbol]).returns(T::Array[Symbol]) } + sig do + params(_: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) + .returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) + end def include=(_) end sig do params( - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -27,7 +30,15 @@ module OpenAI def self.new(include: nil, request_options: {}) end - sig { override.returns({include: T::Array[Symbol], request_options: OpenAI::RequestOptions}) } + sig do + override + .returns( + { + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions + } + ) + end def to_hash end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index f6a3f6ce..95b80ac1 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -5,15 +5,16 @@ module OpenAI module Responses # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - class ResponseStatus < OpenAI::Enum - abstract! + module ResponseStatus + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } - COMPLETED = :completed - FAILED = :failed - IN_PROGRESS = :in_progress - INCOMPLETE = :incomplete + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index ed1980bf..4c87665c 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # Emitted when there is a partial audio response. - class ResponseStreamEvent < OpenAI::Union - abstract! + module ResponseStreamEvent + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index ed354df8..f7d64bc8 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -128,8 +128,8 @@ module OpenAI end # A citation to a file. - class Annotation < OpenAI::Union - abstract! + module Annotation + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 0085ad60..0d0c2a77 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -6,8 +6,8 @@ module OpenAI # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - class Tool < OpenAI::Union - abstract! + module Tool + extend OpenAI::Union Variants = type_template(:out) do diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index c047abb7..a4acb23d 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -11,14 +11,15 @@ module OpenAI # more tools. # # `required` means the model must call one or more tools. - class ToolChoiceOptions < OpenAI::Enum - abstract! + module ToolChoiceOptions + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } - NONE = :none - AUTO = :auto - REQUIRED = :required + NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 8459293b..45b4ecfc 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -12,21 +12,24 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + end def type=(_) end # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - sig { params(type: Symbol).returns(T.attached_class) } + sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } def self.new(type:) end - sig { override.returns({type: Symbol}) } + sig { override.returns({type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol}) } def to_hash end @@ -38,15 +41,19 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } - FILE_SEARCH = :file_search - WEB_SEARCH_PREVIEW = :web_search_preview - COMPUTER_USE_PREVIEW = :computer_use_preview - WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 + FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + WEB_SEARCH_PREVIEW = + T.let(:web_search_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + COMPUTER_USE_PREVIEW = + T.let(:computer_use_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + WEB_SEARCH_PREVIEW_2025_03_11 = + T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index cf2fb2f6..a0ae2d7c 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -8,21 +8,27 @@ module OpenAI # # - `web_search_preview` # - `web_search_preview_2025_03_11` - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + .returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + end def type=(_) end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol)) } def search_context_size end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + .returns(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + end def search_context_size=(_) end @@ -42,8 +48,8 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). sig do params( - type: Symbol, - search_context_size: Symbol, + type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, + search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) ) .returns(T.attached_class) @@ -55,8 +61,8 @@ module OpenAI override .returns( { - type: Symbol, - search_context_size: Symbol, + type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, + search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) } ) @@ -68,25 +74,30 @@ module OpenAI # # - `web_search_preview` # - `web_search_preview_2025_03_11` - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } - WEB_SEARCH_PREVIEW = :web_search_preview - WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 + WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + WEB_SEARCH_PREVIEW_2025_03_11 = + T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - class SearchContextSize < OpenAI::Enum - abstract! + module SearchContextSize + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } - LOW = :low - MEDIUM = :medium - HIGH = :high + LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index ecad7412..ddb9e74a 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -2,20 +2,28 @@ module OpenAI module Models - class ResponsesModel < OpenAI::Union - abstract! + module ResponsesModel + extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Symbol)} } + Variants = + type_template(:out) do + { + fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + } + end - class UnionMember2 < OpenAI::Enum - abstract! + module UnionMember2 + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::UnionMember2) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol) } - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW = + T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) end end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 1da23f04..05b6e1e2 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -69,11 +69,14 @@ module OpenAI end # The status of the Upload. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::Upload::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::Upload::Status::TaggedSymbol) + .returns(OpenAI::Models::Upload::Status::TaggedSymbol) + end def status=(_) end @@ -95,7 +98,7 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: Symbol, + status: OpenAI::Models::Upload::Status::TaggedSymbol, file: T.nilable(OpenAI::Models::FileObject), object: Symbol ) @@ -115,7 +118,7 @@ module OpenAI filename: String, object: Symbol, purpose: String, - status: Symbol, + status: OpenAI::Models::Upload::Status::TaggedSymbol, file: T.nilable(OpenAI::Models::FileObject) } ) @@ -124,15 +127,16 @@ module OpenAI end # The status of the Upload. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Upload::Status::TaggedSymbol) } - PENDING = :pending - COMPLETED = :completed - CANCELLED = :cancelled - EXPIRED = :expired + PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::Upload::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 22555f0e..93d701d3 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -40,11 +40,11 @@ module OpenAI # # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). - sig { returns(Symbol) } + sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose end - sig { params(_: Symbol).returns(Symbol) } + sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } def purpose=(_) end @@ -53,7 +53,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -68,7 +68,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index e80d2488..188bfd85 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -73,11 +73,14 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStore::Status::TaggedSymbol) + .returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) + end def status=(_) end @@ -118,7 +121,7 @@ module OpenAI last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - status: Symbol, + status: OpenAI::Models::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, expires_after: OpenAI::Models::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer), @@ -152,7 +155,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - status: Symbol, + status: OpenAI::Models::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, expires_after: OpenAI::Models::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer) @@ -238,14 +241,15 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStore::Status::TaggedSymbol) } - EXPIRED = :expired - IN_PROGRESS = :in_progress - COMPLETED = :completed + EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) end class ExpiresAfter < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 5749ce8a..c2400193 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -42,11 +42,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStoreListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + .returns(OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + end def order=(_) end @@ -55,7 +58,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -70,7 +73,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -80,13 +83,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 029b5b83..1e785923 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -89,8 +89,8 @@ module OpenAI end # A query string for a search - class Query < OpenAI::Union - abstract! + module Query + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } @@ -98,19 +98,22 @@ module OpenAI end # A filter to apply based on file attributes. - class Filters < OpenAI::Union - abstract! + module Filters + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } end class RankingOptions < OpenAI::BaseModel - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol)) } def ranker end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + .returns(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + end def ranker=(_) end @@ -123,21 +126,36 @@ module OpenAI end # Ranking options for search. - sig { params(ranker: Symbol, score_threshold: Float).returns(T.attached_class) } + sig do + params( + ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, + score_threshold: Float + ) + .returns(T.attached_class) + end def self.new(ranker: nil, score_threshold: nil) end - sig { override.returns({ranker: Symbol, score_threshold: Float}) } + sig do + override + .returns( + {ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, score_threshold: Float} + ) + end def to_hash end - class Ranker < OpenAI::Enum - abstract! + module Ranker + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } - AUTO = :auto - DEFAULT_2024_11_15 = :"default-2024-11-15" + AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + DEFAULT_2024_11_15 = + T.let(:"default-2024-11-15", OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index a2443437..e4039a7c 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -86,8 +86,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end @@ -103,29 +103,40 @@ module OpenAI end # The type of content. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } def type end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + .returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + end def type=(_) end - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + .returns(T.attached_class) + end def self.new(text:, type:) end - sig { override.returns({text: String, type: Symbol}) } + sig do + override + .returns({text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol}) + end def to_hash end # The type of content. - class Type < OpenAI::Enum - abstract! + module Type + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } - TEXT = :text + TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 4be0dcb6..f2dd4d99 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -98,8 +98,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index c7da2f41..22045ec2 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -40,11 +40,14 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol)) } def filter end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + end def filter=(_) end @@ -60,11 +63,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + end def order=(_) end @@ -73,9 +79,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -90,9 +96,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -101,26 +107,33 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum - abstract! + module Filter + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - FAILED = :failed - CANCELLED = :cancelled + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 57e35c52..d8ec4fba 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -98,8 +98,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 0f62a65d..20a4bce5 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -32,11 +32,14 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol)) } def filter end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + end def filter=(_) end @@ -52,11 +55,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(Symbol)) } + sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol)) } def order end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + .returns(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + end def order=(_) end @@ -64,9 +70,9 @@ module OpenAI params( after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) ) .returns(T.attached_class) @@ -80,9 +86,9 @@ module OpenAI { after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -91,26 +97,30 @@ module OpenAI end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - class Filter < OpenAI::Enum - abstract! + module Filter + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - FAILED = :failed - CANCELLED = :cancelled + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - class Order < OpenAI::Enum - abstract! + module Order + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } - ASC = :asc - DESC = :desc + ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index e3693815..fe35965e 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -55,8 +55,8 @@ module OpenAI def to_hash end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 4e1a4a36..fc8ccf5a 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -47,11 +47,14 @@ module OpenAI # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + .returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + end def status=(_) end @@ -121,7 +124,7 @@ module OpenAI id: String, created_at: Integer, last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), @@ -151,7 +154,7 @@ module OpenAI created_at: Integer, last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), object: Symbol, - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), @@ -164,11 +167,14 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } def code end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + .returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + end def code=(_) end @@ -183,42 +189,61 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. - sig { params(code: Symbol, message: String).returns(T.attached_class) } + sig do + params( + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, + message: String + ) + .returns(T.attached_class) + end def self.new(code:, message:) end - sig { override.returns({code: Symbol, message: String}) } + sig do + override + .returns( + {code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, message: String} + ) + end def to_hash end # One of `server_error` or `rate_limit_exceeded`. - class Code < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - SERVER_ERROR = :server_error - UNSUPPORTED_FILE = :unsupported_file - INVALID_FILE = :invalid_file + module Code + extend OpenAI::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + + SERVER_ERROR = + T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + UNSUPPORTED_FILE = + T.let(:unsupported_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + INVALID_FILE = + T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) end end # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - class Status < OpenAI::Enum - abstract! + module Status + extend OpenAI::Enum - Value = type_template(:out) { {fixed: Symbol} } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } - IN_PROGRESS = :in_progress - COMPLETED = :completed - CANCELLED = :cancelled - FAILED = :failed + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) end - class Attribute < OpenAI::Union - abstract! + module Attribute + extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 2e5ce798..4627b63f 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -45,11 +45,14 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - sig { returns(Symbol) } + sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } def status end - sig { params(_: Symbol).returns(Symbol) } + sig do + params(_: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + end def status=(_) end @@ -71,7 +74,7 @@ module OpenAI id: String, created_at: Integer, file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, vector_store_id: String, object: Symbol ) @@ -88,7 +91,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, object: Symbol, - status: Symbol, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, vector_store_id: String } ) @@ -171,15 +174,18 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - class Status < OpenAI::Enum - abstract! - - Value = type_template(:out) { {fixed: Symbol} } - - IN_PROGRESS = :in_progress - COMPLETED = :completed - CANCELLED = :cancelled - FAILED = :failed + module Status + extend OpenAI::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) end end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index e2e85216..c434cedb 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -8,10 +8,10 @@ module OpenAI sig do params( input: String, - model: T.any(String, Symbol), - voice: Symbol, + model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), + voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, instructions: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index a1340034..38f106b4 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -8,13 +8,13 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -70,13 +70,13 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), - include: T::Array[Symbol], + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[Symbol], + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 278e3855..baf563ad 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -8,9 +8,9 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, - response_format: Symbol, + response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index 10691913..1a12c440 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -6,8 +6,8 @@ module OpenAI # Creates and executes a batch from an uploaded file of requests sig do params( - completion_window: Symbol, - endpoint: Symbol, + completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 00d18547..a31361cb 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -7,12 +7,12 @@ module OpenAI # Create an assistant with a model and instructions. sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -129,9 +129,9 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), name: T.nilable(String), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -234,7 +234,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Assistant]) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 1479fd01..6db707e7 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -109,7 +109,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -121,7 +121,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ @@ -243,7 +248,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( T.any( @@ -255,7 +260,12 @@ module OpenAI ), temperature: T.nilable(Float), thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 3ec7bab4..68fd1790 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -19,7 +19,7 @@ module OpenAI ) ] ), - role: Symbol, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -104,7 +104,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index cd747220..f5d012a0 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -14,16 +14,16 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -33,7 +33,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -171,16 +176,16 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.nilable( T.any( Symbol, @@ -190,7 +195,12 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_choice: T.nilable(T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice)), + tool_choice: T.nilable( + T.any( + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice + ) + ), tools: T.nilable( T::Array[ T.any( @@ -405,7 +415,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run]) diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 4833b5fb..5dfa9a8a 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -12,7 +12,7 @@ module OpenAI step_id: String, thread_id: String, run_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) @@ -44,9 +44,9 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, - order: Symbol, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index fff5ad69..8fc9878c 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -37,34 +37,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -295,34 +301,40 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), - function_call: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption), + function_call: T.any( + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Models::Chat::ChatCompletionFunctionCallOption + ), functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[Symbol]), + modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(Symbol), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), seed: T.nilable(Integer), - service_tier: T.nilable(Symbol), + service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - tool_choice: T.any(Symbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice), + tool_choice: T.any( + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Chat::ChatCompletionNamedToolChoice + ), tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), @@ -573,7 +585,7 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: Symbol, + order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion]) diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 3e7c16e2..0667ddbc 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -12,7 +12,7 @@ module OpenAI completion_id: String, after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 8e9e52d4..b894675b 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -6,7 +6,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, @@ -149,7 +149,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable( T.any( String, diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 025c37a7..65a19f1d 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -7,9 +7,9 @@ module OpenAI sig do params( input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: Symbol, + encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 441f93e0..da0efe9d 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -27,7 +27,7 @@ module OpenAI sig do params( file: T.any(IO, StringIO), - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::FileObject) @@ -64,7 +64,7 @@ module OpenAI params( after: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index b3f7cc15..1c55189e 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -17,7 +17,7 @@ module OpenAI # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 396f2edd..1944e5f2 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -7,10 +7,10 @@ module OpenAI sig do params( image: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -47,10 +47,10 @@ module OpenAI image: T.any(IO, StringIO), prompt: String, mask: T.any(IO, StringIO), - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), + response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) @@ -91,12 +91,12 @@ module OpenAI sig do params( prompt: String, - model: T.nilable(T.any(String, Symbol)), + model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: Symbol, - response_format: T.nilable(Symbol), - size: T.nilable(Symbol), - style: T.nilable(Symbol), + quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 3b9b2bd0..b57441a5 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -12,7 +12,7 @@ module OpenAI T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), - model: T.any(String, Symbol), + model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::ModerationCreateResponse) diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 64ab5c27..701c8406 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -38,8 +38,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -49,7 +49,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -59,7 +63,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -209,8 +213,8 @@ module OpenAI ) ] ), - model: T.any(String, Symbol), - include: T.nilable(T::Array[Symbol]), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), @@ -220,7 +224,11 @@ module OpenAI store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction), + tool_choice: T.any( + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Models::Responses::ToolChoiceFunction + ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, @@ -230,7 +238,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - truncation: T.nilable(Symbol), + truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) @@ -390,7 +398,7 @@ module OpenAI sig do params( response_id: String, - include: T::Array[Symbol], + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Responses::Response) diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 44e41eb2..7f92008d 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -11,7 +11,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns( diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 98a58dc5..6ee12473 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -31,7 +31,7 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: Symbol, + purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::Models::Upload) diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 86418315..7e24de66 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -99,7 +99,7 @@ module OpenAI after: String, before: String, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStore]) diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index ec4e8e7a..c8a0af8b 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -82,9 +82,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 2fc3ae57..88902be4 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -89,9 +89,9 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: Symbol, + filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: Symbol, + order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index ba62241d..ff01cbf3 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -62,28 +62,28 @@ module OpenAI ) -> ([true, top, nil] | [false, bool, Integer]) end - class Enum - extend OpenAI::Converter + module Enum + include OpenAI::Converter def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] private def self.finalize!: -> void - def self.===: (top other) -> bool + def ===: (top other) -> bool - def self.==: (top other) -> bool + def ==: (top other) -> bool - def self.coerce: (String | Symbol | top value) -> (Symbol | top) + def coerce: (String | Symbol | top value) -> (Symbol | top) - def self.dump: (Symbol | top value) -> (Symbol | top) + def dump: (Symbol | top value) -> (Symbol | top) - def self.try_strict_coerce: ( + def try_strict_coerce: ( top value ) -> ([true, top, nil] | [false, bool, Integer]) end - class Union - extend OpenAI::Converter + module Union + include OpenAI::Converter private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Converter::input)]] @@ -105,15 +105,15 @@ module OpenAI private def self.resolve_variant: (top value) -> OpenAI::Converter::input? - def self.===: (top other) -> bool + def ===: (top other) -> bool - def self.==: (top other) -> bool + def ==: (top other) -> bool - def self.coerce: (top value) -> top + def coerce: (top value) -> top - def self.dump: (top value) -> top + def dump: (top value) -> top - def self.try_strict_coerce: ( + def try_strict_coerce: ( top value ) -> ([true, top, nil] | [false, bool, Integer]) end diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 817c97a9..7427ba06 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -5,14 +5,18 @@ module OpenAI | OpenAI::Models::chat_model | OpenAI::Models::AllModels::union_member2 - class AllModels < OpenAI::Union + module AllModels + extend OpenAI::Union + type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index eb21a97f..30772347 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -50,7 +50,9 @@ module OpenAI type model = String | OpenAI::Models::Audio::speech_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::Audio::speech_model] end @@ -65,7 +67,9 @@ module OpenAI | :sage | :shimmer - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY: :alloy ASH: :ash CORAL: :coral @@ -81,7 +85,9 @@ module OpenAI type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + MP3: :mp3 OPUS: :opus AAC: :aac diff --git a/sig/openai/models/audio/speech_model.rbs b/sig/openai/models/audio/speech_model.rbs index 357eaa4c..51c913a4 100644 --- a/sig/openai/models/audio/speech_model.rbs +++ b/sig/openai/models/audio/speech_model.rbs @@ -3,7 +3,9 @@ module OpenAI module Audio type speech_model = :"tts-1" | :"tts-1-hd" | :"gpt-4o-mini-tts" - class SpeechModel < OpenAI::Enum + module SpeechModel + extend OpenAI::Enum + TTS_1: :"tts-1" TTS_1_HD: :"tts-1-hd" GPT_4O_MINI_TTS: :"gpt-4o-mini-tts" diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 28f79a92..3834d3cc 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -68,13 +68,17 @@ module OpenAI type model = String | OpenAI::Models::audio_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::audio_model] end type timestamp_granularity = :word | :segment - class TimestampGranularity < OpenAI::Enum + module TimestampGranularity + extend OpenAI::Enum + WORD: :word SEGMENT: :segment diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index f0179c81..32f15ab6 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -5,7 +5,9 @@ module OpenAI OpenAI::Models::Audio::Transcription | OpenAI::Models::Audio::TranscriptionVerbose - class TranscriptionCreateResponse < OpenAI::Union + module TranscriptionCreateResponse + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] end end diff --git a/sig/openai/models/audio/transcription_include.rbs b/sig/openai/models/audio/transcription_include.rbs index cf06a929..1fc83e72 100644 --- a/sig/openai/models/audio/transcription_include.rbs +++ b/sig/openai/models/audio/transcription_include.rbs @@ -3,7 +3,9 @@ module OpenAI module Audio type transcription_include = :logprobs - class TranscriptionInclude < OpenAI::Enum + module TranscriptionInclude + extend OpenAI::Enum + LOGPROBS: :logprobs def self.values: -> ::Array[OpenAI::Models::Audio::transcription_include] diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs index b9233feb..caffcd1f 100644 --- a/sig/openai/models/audio/transcription_stream_event.rbs +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -5,7 +5,9 @@ module OpenAI OpenAI::Models::Audio::TranscriptionTextDeltaEvent | OpenAI::Models::Audio::TranscriptionTextDoneEvent - class TranscriptionStreamEvent < OpenAI::Union + module TranscriptionStreamEvent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 83dc3322..d5dcc175 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -46,13 +46,17 @@ module OpenAI type model = String | OpenAI::Models::audio_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::audio_model] end type response_format = :json | :text | :srt | :verbose_json | :vtt - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + JSON: :json TEXT: :text SRT: :srt diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index d80690b9..6c26f34d 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -5,7 +5,9 @@ module OpenAI OpenAI::Models::Audio::Translation | OpenAI::Models::Audio::TranslationVerbose - class TranslationCreateResponse < OpenAI::Union + module TranslationCreateResponse + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] end end diff --git a/sig/openai/models/audio_model.rbs b/sig/openai/models/audio_model.rbs index 72b67344..7c47323a 100644 --- a/sig/openai/models/audio_model.rbs +++ b/sig/openai/models/audio_model.rbs @@ -3,7 +3,9 @@ module OpenAI type audio_model = :"whisper-1" | :"gpt-4o-transcribe" | :"gpt-4o-mini-transcribe" - class AudioModel < OpenAI::Enum + module AudioModel + extend OpenAI::Enum + WHISPER_1: :"whisper-1" GPT_4O_TRANSCRIBE: :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE: :"gpt-4o-mini-transcribe" diff --git a/sig/openai/models/audio_response_format.rbs b/sig/openai/models/audio_response_format.rbs index e91a52b8..ee7b583f 100644 --- a/sig/openai/models/audio_response_format.rbs +++ b/sig/openai/models/audio_response_format.rbs @@ -2,7 +2,9 @@ module OpenAI module Models type audio_response_format = :json | :text | :srt | :verbose_json | :vtt - class AudioResponseFormat < OpenAI::Enum + module AudioResponseFormat + extend OpenAI::Enum + JSON: :json TEXT: :text SRT: :srt diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 278b30f0..2a3d4888 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -128,7 +128,9 @@ module OpenAI | :cancelling | :cancelled - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING: :validating FAILED: :failed IN_PROGRESS: :in_progress diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 5a459418..c73264e6 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -33,7 +33,9 @@ module OpenAI type completion_window = :"24h" - class CompletionWindow < OpenAI::Enum + module CompletionWindow + extend OpenAI::Enum + NUMBER_24H: :"24h" def self.values: -> ::Array[OpenAI::Models::BatchCreateParams::completion_window] @@ -45,7 +47,9 @@ module OpenAI | :"/v1/embeddings" | :"/v1/completions" - class Endpoint < OpenAI::Enum + module Endpoint + extend OpenAI::Enum + V1_RESPONSES: :"/v1/responses" V1_CHAT_COMPLETIONS: :"/v1/chat/completions" V1_EMBEDDINGS: :"/v1/embeddings" diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index d4a0708e..2ffe711e 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -66,7 +66,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -164,7 +166,9 @@ module OpenAI OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - class ChunkingStrategy < OpenAI::Union + module ChunkingStrategy + extend OpenAI::Union + type auto = { type: :auto } class Auto < OpenAI::BaseModel diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 89fbdc93..c8869670 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -44,7 +44,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index 158a3a03..d5b4a073 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -7,7 +7,9 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject | OpenAI::Models::ResponseFormatJSONSchema - class AssistantResponseFormatOption < OpenAI::Union + module AssistantResponseFormatOption + extend OpenAI::Union + def self.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 3cff7d58..4ed62507 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -27,7 +27,9 @@ module OpenAI | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete | OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - class AssistantStreamEvent < OpenAI::Union + module AssistantStreamEvent + extend OpenAI::Union + type thread_created = { data: OpenAI::Models::Beta::Thread, diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index 5421e7bc..48827d7f 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -6,7 +6,9 @@ module OpenAI | OpenAI::Models::Beta::FileSearchTool | OpenAI::Models::Beta::FunctionTool - class AssistantTool < OpenAI::Union + module AssistantTool + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index eabceb53..ab690c71 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -25,7 +25,9 @@ module OpenAI type type_ = :function | :code_interpreter | :file_search - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION: :function CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index f7886116..ee421612 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -5,10 +5,14 @@ module OpenAI OpenAI::Models::Beta::AssistantToolChoiceOption::auto | OpenAI::Models::Beta::AssistantToolChoice - class AssistantToolChoiceOption < OpenAI::Union + module AssistantToolChoiceOption + extend OpenAI::Union + type auto = :none | :auto | :required - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index e67f47e5..d3efa3c2 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -101,7 +101,9 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613" - class Model < OpenAI::Union + module Model + extend OpenAI::Union + O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index b042a2f7..bd1238f4 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -71,7 +71,9 @@ module OpenAI type ranker = :auto | :default_2024_08_21 - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index ad12cf61..edd61dcf 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete - class MessageStreamEvent < OpenAI::Union + module MessageStreamEvent + extend OpenAI::Union + type thread_message_created = { data: OpenAI::Models::Beta::Threads::Message, diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 51f78cb2..9207cecc 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -10,7 +10,9 @@ module OpenAI | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired - class RunStepStreamEvent < OpenAI::Union + module RunStepStreamEvent + extend OpenAI::Union + type thread_run_step_created = { data: OpenAI::Models::Beta::Threads::Runs::RunStep, diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 46793589..4081dbf8 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -13,7 +13,9 @@ module OpenAI | OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled | OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired - class RunStreamEvent < OpenAI::Union + module RunStreamEvent + extend OpenAI::Union + type thread_run_created = { data: OpenAI::Models::Beta::Threads::Run, diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index a7b4922f..9c1cf73a 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -84,7 +84,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -144,7 +146,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -155,7 +159,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -190,7 +196,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -300,7 +308,9 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - class ChunkingStrategy < OpenAI::Union + module ChunkingStrategy + extend OpenAI::Union + type auto = { type: :auto } class Auto < OpenAI::BaseModel @@ -412,7 +422,9 @@ module OpenAI | OpenAI::Models::Beta::FileSearchTool | OpenAI::Models::Beta::FunctionTool - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end @@ -436,7 +448,9 @@ module OpenAI type type_ = :auto | :last_messages - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 5acf3300..f270cee9 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -62,7 +62,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -73,7 +75,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -108,7 +112,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -218,7 +224,9 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - class ChunkingStrategy < OpenAI::Union + module ChunkingStrategy + extend OpenAI::Union + type auto = { type: :auto } class Auto < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index 527a6e58..cde96d26 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -6,7 +6,9 @@ module OpenAI OpenAI::Models::Beta::Threads::FileCitationAnnotation | OpenAI::Models::Beta::Threads::FilePathAnnotation - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] end end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 21dee9df..22600103 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -6,7 +6,9 @@ module OpenAI OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation | OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - class AnnotationDelta < OpenAI::Union + module AnnotationDelta + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index ce6ca5d5..0b0987a7 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -26,7 +26,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index 3b996b60..d0870c96 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -28,7 +28,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 9aec42d8..36929cf8 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -26,7 +26,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index c7d07d96..3f1fcbdc 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -28,7 +28,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index e6c155a8..29fa0135 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -96,7 +96,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type assistant_tools_file_search_type_only = { type: :file_search } @@ -133,7 +135,9 @@ module OpenAI | :run_expired | :run_failed - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + CONTENT_FILTER: :content_filter MAX_TOKENS: :max_tokens RUN_CANCELLED: :run_cancelled @@ -146,7 +150,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -155,7 +161,9 @@ module OpenAI type status = :in_progress | :incomplete | :completed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress INCOMPLETE: :incomplete COMPLETED: :completed diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index 252bb7ff..dc2a9215 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::TextContentBlock | OpenAI::Models::Beta::Threads::RefusalContentBlock - class MessageContent < OpenAI::Union + module MessageContent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] end end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index aab10ba6..1357cfba 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::RefusalDeltaBlock | OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - class MessageContentDelta < OpenAI::Union + module MessageContentDelta + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] end end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 39228e33..76a88822 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -7,7 +7,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::ImageURLContentBlock | OpenAI::Models::Beta::Threads::TextContentBlockParam - class MessageContentPartParam < OpenAI::Union + module MessageContentPartParam + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index fb2276f9..3e6288ac 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -37,7 +37,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -48,7 +50,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -83,7 +87,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index e053ee9b..0487cbfd 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -30,7 +30,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index dca2c571..c8146f4e 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -51,7 +51,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index deae5170..e84c343f 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -140,7 +140,9 @@ module OpenAI type reason = :max_completion_tokens | :max_prompt_tokens - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens @@ -168,7 +170,9 @@ module OpenAI type code = :server_error | :rate_limit_exceeded | :invalid_prompt - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt @@ -231,7 +235,9 @@ module OpenAI type type_ = :auto | :last_messages - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index c9f5247b..5dfe9de6 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -121,7 +121,9 @@ module OpenAI String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -132,7 +134,9 @@ module OpenAI type role = :user | :assistant - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant @@ -167,7 +171,9 @@ module OpenAI OpenAI::Models::Beta::CodeInterpreterTool | OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + type file_search = { type: :file_search } class FileSearch < OpenAI::BaseModel @@ -185,7 +191,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -209,7 +217,9 @@ module OpenAI type type_ = :auto | :last_messages - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index fc43edb9..54dd93bc 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -45,7 +45,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run_status.rbs b/sig/openai/models/beta/threads/run_status.rbs index cfc75c7b..d7433b66 100644 --- a/sig/openai/models/beta/threads/run_status.rbs +++ b/sig/openai/models/beta/threads/run_status.rbs @@ -13,7 +13,9 @@ module OpenAI | :incomplete | :expired - class RunStatus < OpenAI::Enum + module RunStatus + extend OpenAI::Enum + QUEUED: :queued IN_PROGRESS: :in_progress REQUIRES_ACTION: :requires_action diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index 67d18341..ddc03a7d 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -47,7 +47,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - class Output < OpenAI::Union + module Output + extend OpenAI::Union + type logs = { logs: String, type: :logs } class Logs < OpenAI::BaseModel diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index d8884223..81324a83 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -63,7 +63,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - class Output < OpenAI::Union + module Output + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 5b6c9102..8952fadb 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -71,7 +71,9 @@ module OpenAI type ranker = :auto | :default_2024_08_21 - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 @@ -135,7 +137,9 @@ module OpenAI type type_ = :text - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 4216d33b..36dd9a60 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStep = Runs::RunStep module Runs @@ -100,7 +99,9 @@ module OpenAI type code = :server_error | :rate_limit_exceeded - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded @@ -111,7 +112,9 @@ module OpenAI type status = :in_progress | :cancelled | :failed | :completed | :expired - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress CANCELLED: :cancelled FAILED: :failed @@ -125,13 +128,17 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails | OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] end type type_ = :message_creation | :tool_calls - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 7f45537b..171b4303 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStepDelta = Runs::RunStepDelta module Runs @@ -28,7 +27,9 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta | OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - class StepDetails < OpenAI::Union + module StepDetails + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index f3f75aab..295594a8 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStepDeltaEvent = Runs::RunStepDeltaEvent module Runs diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index 5dd59c8c..d1daf15e 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -2,7 +2,6 @@ module OpenAI module Models module Beta module Threads - class RunStepDeltaMessageDelta = Runs::RunStepDeltaMessageDelta module Runs diff --git a/sig/openai/models/beta/threads/runs/run_step_include.rbs b/sig/openai/models/beta/threads/runs/run_step_include.rbs index be00b41f..ed1e3934 100644 --- a/sig/openai/models/beta/threads/runs/run_step_include.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_include.rbs @@ -2,14 +2,15 @@ module OpenAI module Models module Beta module Threads - - class RunStepInclude = Runs::RunStepInclude + module RunStepInclude = Runs::RunStepInclude module Runs type run_step_include = :"step_details.tool_calls[*].file_search.results[*].content" - class RunStepInclude < OpenAI::Enum + module RunStepInclude + extend OpenAI::Enum + STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT: :"step_details.tool_calls[*].file_search.results[*].content" def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index bd65efbf..641b3530 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -58,7 +58,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index 42300b3c..081dbbdf 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall | OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - class ToolCall < OpenAI::Union + module ToolCall + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index 6c3c0ec7..472aee5f 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta | OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - class ToolCallDelta < OpenAI::Union + module ToolCallDelta + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] end end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index 6c256523..ed15fbfe 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletion = Chat::ChatCompletion module Chat @@ -81,7 +80,9 @@ module OpenAI type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP: :stop LENGTH: :length TOOL_CALLS: :tool_calls @@ -113,7 +114,9 @@ module OpenAI type service_tier = :scale | :default - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index fbe3c3f8..e05d98b5 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam module Chat @@ -62,7 +61,9 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type array_of_content_part_array = ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] @@ -72,7 +73,9 @@ module OpenAI OpenAI::Models::Chat::ChatCompletionContentPartText | OpenAI::Models::Chat::ChatCompletionContentPartRefusal - class ArrayOfContentPart < OpenAI::Union + module ArrayOfContentPart + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index 856fe6c8..a3b3cda4 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionAudio = Chat::ChatCompletionAudio module Chat diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index d90d22b0..5f2424d0 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionAudioParam = Chat::ChatCompletionAudioParam module Chat @@ -24,7 +23,9 @@ module OpenAI type format_ = :wav | :mp3 | :flac | :opus | :pcm16 - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV: :wav MP3: :mp3 FLAC: :flac @@ -37,7 +38,9 @@ module OpenAI type voice = :alloy | :ash | :ballad | :coral | :echo | :sage | :shimmer | :verse - class Voice < OpenAI::Enum + module Voice + extend OpenAI::Enum + ALLOY: :alloy ASH: :ash BALLAD: :ballad diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index fa2494d9..c85a596e 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionChunk = Chat::ChatCompletionChunk module Chat @@ -134,7 +133,9 @@ module OpenAI type role = :developer | :system | :user | :assistant | :tool - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + DEVELOPER: :developer SYSTEM: :system USER: :user @@ -198,7 +199,9 @@ module OpenAI type type_ = :function - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FUNCTION: :function def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] @@ -209,7 +212,9 @@ module OpenAI type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP: :stop LENGTH: :length TOOL_CALLS: :tool_calls @@ -241,7 +246,9 @@ module OpenAI type service_tier = :scale | :default - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 01231844..348780aa 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -1,7 +1,6 @@ module OpenAI module Models - - class ChatCompletionContentPart = Chat::ChatCompletionContentPart + module ChatCompletionContentPart = Chat::ChatCompletionContentPart module Chat type chat_completion_content_part = @@ -10,7 +9,9 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionContentPartInputAudio | OpenAI::Models::Chat::ChatCompletionContentPart::File - class ChatCompletionContentPart < OpenAI::Union + module ChatCompletionContentPart + extend OpenAI::Union + type file = { file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index b80ee986..1bde5081 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartImage = Chat::ChatCompletionContentPartImage module Chat @@ -46,7 +45,9 @@ module OpenAI type detail = :auto | :low | :high - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + AUTO: :auto LOW: :low HIGH: :high diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index bb409774..85902db7 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio module Chat @@ -42,7 +41,9 @@ module OpenAI type format_ = :wav | :mp3 - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + WAV: :wav MP3: :mp3 diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index e69c1ee3..e715e480 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartRefusal = Chat::ChatCompletionContentPartRefusal module Chat diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 9c723c66..638f5e4e 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionContentPartText = Chat::ChatCompletionContentPartText module Chat diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index 2d8a2cf3..e776fc71 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionDeleted = Chat::ChatCompletionDeleted module Chat diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index e9cbba0a..aa1379e0 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam module Chat @@ -31,7 +30,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index b5e722fb..e7067a87 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionFunctionCallOption = Chat::ChatCompletionFunctionCallOption module Chat diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index b9ac7154..fad91c63 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam module Chat diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index b13a6e43..d3668749 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionMessage = Chat::ChatCompletionMessage module Chat diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index a42d88ca..c8d7612a 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -1,7 +1,6 @@ module OpenAI module Models - - class ChatCompletionMessageParam = Chat::ChatCompletionMessageParam + module ChatCompletionMessageParam = Chat::ChatCompletionMessageParam module Chat type chat_completion_message_param = @@ -12,7 +11,9 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionToolMessageParam | OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - class ChatCompletionMessageParam < OpenAI::Union + module ChatCompletionMessageParam + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] end end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index 4ed4b2d2..cd147e2d 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall module Chat diff --git a/sig/openai/models/chat/chat_completion_modality.rbs b/sig/openai/models/chat/chat_completion_modality.rbs index e8df5962..7075d723 100644 --- a/sig/openai/models/chat/chat_completion_modality.rbs +++ b/sig/openai/models/chat/chat_completion_modality.rbs @@ -1,12 +1,13 @@ module OpenAI module Models - - class ChatCompletionModality = Chat::ChatCompletionModality + module ChatCompletionModality = Chat::ChatCompletionModality module Chat type chat_completion_modality = :text | :audio - class ChatCompletionModality < OpenAI::Enum + module ChatCompletionModality + extend OpenAI::Enum + TEXT: :text AUDIO: :audio diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 8da9abd2..62feb1bf 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionNamedToolChoice = Chat::ChatCompletionNamedToolChoice module Chat diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index e38d1c31..f49bc614 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionPredictionContent = Chat::ChatCompletionPredictionContent module Chat @@ -25,7 +24,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_reasoning_effort.rbs b/sig/openai/models/chat/chat_completion_reasoning_effort.rbs index 0d91a009..28d9e504 100644 --- a/sig/openai/models/chat/chat_completion_reasoning_effort.rbs +++ b/sig/openai/models/chat/chat_completion_reasoning_effort.rbs @@ -1,11 +1,9 @@ module OpenAI module Models - - class ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort + module ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort module Chat - - class ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort + module ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort end end end diff --git a/sig/openai/models/chat/chat_completion_role.rbs b/sig/openai/models/chat/chat_completion_role.rbs index be395c69..d805ec9e 100644 --- a/sig/openai/models/chat/chat_completion_role.rbs +++ b/sig/openai/models/chat/chat_completion_role.rbs @@ -1,13 +1,14 @@ module OpenAI module Models - - class ChatCompletionRole = Chat::ChatCompletionRole + module ChatCompletionRole = Chat::ChatCompletionRole module Chat type chat_completion_role = :developer | :system | :user | :assistant | :tool | :function - class ChatCompletionRole < OpenAI::Enum + module ChatCompletionRole + extend OpenAI::Enum + DEVELOPER: :developer SYSTEM: :system USER: :user diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 2719aa2e..75e77b6c 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage module Chat diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index ed0721bc..7fed3536 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions module Chat diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index b3ae49f9..f889a520 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionSystemMessageParam = Chat::ChatCompletionSystemMessageParam module Chat @@ -31,7 +30,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index 4695ba6a..a076afef 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionTokenLogprob = Chat::ChatCompletionTokenLogprob module Chat diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index f690d128..d465043d 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionTool = Chat::ChatCompletionTool module Chat diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index e6c246ee..001520b8 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -1,17 +1,20 @@ module OpenAI module Models - - class ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption + module ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption module Chat type chat_completion_tool_choice_option = OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto | OpenAI::Models::Chat::ChatCompletionNamedToolChoice - class ChatCompletionToolChoiceOption < OpenAI::Union + module ChatCompletionToolChoiceOption + extend OpenAI::Union + type auto = :none | :auto | :required - class Auto < OpenAI::Enum + module Auto + extend OpenAI::Enum + NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 86a425de..8dc39541 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionToolMessageParam = Chat::ChatCompletionToolMessageParam module Chat @@ -29,7 +28,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 53192647..b359b18e 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ChatCompletionUserMessageParam = Chat::ChatCompletionUserMessageParam module Chat @@ -31,7 +30,9 @@ module OpenAI type content = String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] - class Content < OpenAI::Union + module Content + extend OpenAI::Union + type chat_completion_content_part_array = ::Array[OpenAI::Models::Chat::chat_completion_content_part] diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 56982b45..9d777b19 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -166,7 +166,9 @@ module OpenAI type model = String | OpenAI::Models::chat_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::chat_model] end @@ -174,10 +176,14 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode | OpenAI::Models::Chat::ChatCompletionFunctionCallOption - class FunctionCall < OpenAI::Union + module FunctionCall + extend OpenAI::Union + type function_call_mode = :none | :auto - class FunctionCallMode < OpenAI::Enum + module FunctionCallMode + extend OpenAI::Enum + NONE: :none AUTO: :auto @@ -218,7 +224,9 @@ module OpenAI type modality = :text | :audio - class Modality < OpenAI::Enum + module Modality + extend OpenAI::Enum + TEXT: :text AUDIO: :audio @@ -230,13 +238,17 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONSchema | OpenAI::Models::ResponseFormatJSONObject - class ResponseFormat < OpenAI::Union + module ResponseFormat + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end type service_tier = :auto | :default - class ServiceTier < OpenAI::Enum + module ServiceTier + extend OpenAI::Enum + AUTO: :auto DEFAULT: :default @@ -245,7 +257,9 @@ module OpenAI type stop = (String | ::Array[String])? - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -277,7 +291,9 @@ module OpenAI type search_context_size = :low | :medium | :high - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 9540d790..bb536c64 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -48,7 +48,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 88456945..c066a38c 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -39,7 +39,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 75caff71..1578b25d 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -45,7 +45,9 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613" - class ChatModel < OpenAI::Enum + module ChatModel + extend OpenAI::Enum + O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index a3873e2d..20f33540 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -24,7 +24,9 @@ module OpenAI type type_ = :eq | :ne | :gt | :gte | :lt | :lte - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + EQ: :eq NE: :ne GT: :gt @@ -37,7 +39,9 @@ module OpenAI type value = String | Float | bool - class Value < OpenAI::Union + module Value + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index 05e3ac68..700baf43 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -28,7 +28,9 @@ module OpenAI type finish_reason = :stop | :length | :content_filter - class FinishReason < OpenAI::Enum + module FinishReason + extend OpenAI::Enum + STOP: :stop LENGTH: :length CONTENT_FILTER: :content_filter diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index fd2677d3..358e6503 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -88,7 +88,9 @@ module OpenAI type model = String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" - class Model < OpenAI::Union + module Model + extend OpenAI::Union + GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" DAVINCI_002: :"davinci-002" BABBAGE_002: :"babbage-002" @@ -101,7 +103,9 @@ module OpenAI type prompt = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] - class Prompt < OpenAI::Union + module Prompt + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -119,7 +123,9 @@ module OpenAI type stop = (String | ::Array[String])? - class Stop < OpenAI::Union + module Stop + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 35aba042..80a9c41e 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -20,13 +20,17 @@ module OpenAI type filter = OpenAI::Models::ComparisonFilter | top - class Filter < OpenAI::Union + module Filter + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ComparisonFilter, top] end type type_ = :and | :or - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + AND: :and OR: :or diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 59ae7a9f..d08604d9 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -46,7 +46,9 @@ module OpenAI type input = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] - class Input < OpenAI::Union + module Input + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -64,13 +66,17 @@ module OpenAI type model = String | OpenAI::Models::embedding_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::embedding_model] end type encoding_format = :float | :base64 - class EncodingFormat < OpenAI::Enum + module EncodingFormat + extend OpenAI::Enum + FLOAT: :float BASE64: :base64 diff --git a/sig/openai/models/embedding_model.rbs b/sig/openai/models/embedding_model.rbs index c334a8a5..ed029cea 100644 --- a/sig/openai/models/embedding_model.rbs +++ b/sig/openai/models/embedding_model.rbs @@ -5,7 +5,9 @@ module OpenAI | :"text-embedding-3-small" | :"text-embedding-3-large" - class EmbeddingModel < OpenAI::Enum + module EmbeddingModel + extend OpenAI::Enum + TEXT_EMBEDDING_ADA_002: :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL: :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE: :"text-embedding-3-large" diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index d287b675..5efd7f51 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -4,7 +4,9 @@ module OpenAI OpenAI::Models::StaticFileChunkingStrategyObject | OpenAI::Models::OtherFileChunkingStrategyObject - class FileChunkingStrategy < OpenAI::Union + module FileChunkingStrategy + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] end end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index f5f9f28a..818b7c90 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -4,7 +4,9 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam | OpenAI::Models::StaticFileChunkingStrategyObjectParam - class FileChunkingStrategyParam < OpenAI::Union + module FileChunkingStrategyParam + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 2f36b51c..9d793064 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -43,7 +43,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index f229d68d..3bb22b13 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -59,7 +59,9 @@ module OpenAI | :"fine-tune-results" | :vision - class Purpose < OpenAI::Enum + module Purpose + extend OpenAI::Enum + ASSISTANTS: :assistants ASSISTANTS_OUTPUT: :assistants_output BATCH: :batch @@ -73,7 +75,9 @@ module OpenAI type status = :uploaded | :processed | :error - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + UPLOADED: :uploaded PROCESSED: :processed ERROR: :error diff --git a/sig/openai/models/file_purpose.rbs b/sig/openai/models/file_purpose.rbs index cf532f5b..527e5d11 100644 --- a/sig/openai/models/file_purpose.rbs +++ b/sig/openai/models/file_purpose.rbs @@ -3,7 +3,9 @@ module OpenAI type file_purpose = :assistants | :batch | :"fine-tune" | :vision | :user_data | :evals - class FilePurpose < OpenAI::Enum + module FilePurpose + extend OpenAI::Enum + ASSISTANTS: :assistants BATCH: :batch FINE_TUNE: :"fine-tune" diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 3b4f5915..b696b25a 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJob = FineTuning::FineTuningJob module FineTuning @@ -148,19 +147,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -173,7 +178,9 @@ module OpenAI | :failed | :cancelled - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + VALIDATING_FILES: :validating_files QUEUED: :queued RUNNING: :running @@ -280,25 +287,33 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type beta = :auto | Float - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -358,19 +373,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -378,7 +399,9 @@ module OpenAI type type_ = :supervised | :dpo - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index 239f0331..e70febc9 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJobEvent = FineTuning::FineTuningJobEvent module FineTuning @@ -50,7 +49,9 @@ module OpenAI type level = :info | :warn | :error - class Level < OpenAI::Enum + module Level + extend OpenAI::Enum + INFO: :info WARN: :warn ERROR: :error @@ -60,7 +61,9 @@ module OpenAI type type_ = :message | :metrics - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message METRICS: :metrics diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs index 55ed40a2..ada2b1f0 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_integration.rbs @@ -1,10 +1,8 @@ module OpenAI module Models - class FineTuningJobIntegration = FineTuning::FineTuningJobIntegration module FineTuning - class FineTuningJobIntegration = OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index bc0a81a5..f5fc5cec 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJobWandbIntegration = FineTuning::FineTuningJobWandbIntegration module FineTuning diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index 18b23c37..e156cf05 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject module FineTuning diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 8942f62f..913d7cd3 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -67,7 +67,9 @@ module OpenAI | :"gpt-3.5-turbo" | :"gpt-4o-mini" - class Model < OpenAI::Union + module Model + extend OpenAI::Union + BABBAGE_002: :"babbage-002" DAVINCI_002: :"davinci-002" GPT_3_5_TURBO: :"gpt-3.5-turbo" @@ -115,19 +117,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -276,25 +284,33 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type beta = :auto | Float - class Beta < OpenAI::Union + module Beta + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -354,19 +370,25 @@ module OpenAI type batch_size = :auto | Integer - class BatchSize < OpenAI::Union + module BatchSize + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float - class LearningRateMultiplier < OpenAI::Union + module LearningRateMultiplier + extend OpenAI::Union + def self.variants: -> [:auto, Float] end type n_epochs = :auto | Integer - class NEpochs < OpenAI::Union + module NEpochs + extend OpenAI::Union + def self.variants: -> [:auto, Integer] end end @@ -374,7 +396,9 @@ module OpenAI type type_ = :supervised | :dpo - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 7770fc34..1460783f 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -43,13 +43,17 @@ module OpenAI type model = String | OpenAI::Models::image_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL: :url B64_JSON: :b64_json @@ -58,7 +62,9 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 63e0fbf0..cebbb406 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -53,13 +53,17 @@ module OpenAI type model = String | OpenAI::Models::image_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL: :url B64_JSON: :b64_json @@ -68,7 +72,9 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index 1a3f9162..ea843f0f 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -55,13 +55,17 @@ module OpenAI type model = String | OpenAI::Models::image_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::image_model] end type quality = :standard | :hd - class Quality < OpenAI::Enum + module Quality + extend OpenAI::Enum + STANDARD: :standard HD: :hd @@ -70,7 +74,9 @@ module OpenAI type response_format = :url | :b64_json - class ResponseFormat < OpenAI::Enum + module ResponseFormat + extend OpenAI::Enum + URL: :url B64_JSON: :b64_json @@ -80,7 +86,9 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" | :"1792x1024" | :"1024x1792" - class Size < OpenAI::Enum + module Size + extend OpenAI::Enum + NUMBER_256X256: :"256x256" NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" @@ -92,7 +100,9 @@ module OpenAI type style = :vivid | :natural - class Style < OpenAI::Enum + module Style + extend OpenAI::Enum + VIVID: :vivid NATURAL: :natural diff --git a/sig/openai/models/image_model.rbs b/sig/openai/models/image_model.rbs index 1151fd9b..e68710d9 100644 --- a/sig/openai/models/image_model.rbs +++ b/sig/openai/models/image_model.rbs @@ -2,7 +2,9 @@ module OpenAI module Models type image_model = :"dall-e-2" | :"dall-e-3" - class ImageModel < OpenAI::Enum + module ImageModel + extend OpenAI::Enum + DALL_E_2: :"dall-e-2" DALL_E_3: :"dall-e-3" diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index d35f2749..9e7a8041 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -153,7 +153,9 @@ module OpenAI type harassment = :text - class Harassment < OpenAI::Enum + module Harassment + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] @@ -161,7 +163,9 @@ module OpenAI type harassment_threatening = :text - class HarassmentThreatening < OpenAI::Enum + module HarassmentThreatening + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] @@ -169,7 +173,9 @@ module OpenAI type hate = :text - class Hate < OpenAI::Enum + module Hate + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] @@ -177,7 +183,9 @@ module OpenAI type hate_threatening = :text - class HateThreatening < OpenAI::Enum + module HateThreatening + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] @@ -185,7 +193,9 @@ module OpenAI type illicit = :text - class Illicit < OpenAI::Enum + module Illicit + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] @@ -193,7 +203,9 @@ module OpenAI type illicit_violent = :text - class IllicitViolent < OpenAI::Enum + module IllicitViolent + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] @@ -201,7 +213,9 @@ module OpenAI type self_harm = :text | :image - class SelfHarm < OpenAI::Enum + module SelfHarm + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -210,7 +224,9 @@ module OpenAI type self_harm_instruction = :text | :image - class SelfHarmInstruction < OpenAI::Enum + module SelfHarmInstruction + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -219,7 +235,9 @@ module OpenAI type self_harm_intent = :text | :image - class SelfHarmIntent < OpenAI::Enum + module SelfHarmIntent + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -228,7 +246,9 @@ module OpenAI type sexual = :text | :image - class Sexual < OpenAI::Enum + module Sexual + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -237,7 +257,9 @@ module OpenAI type sexual_minor = :text - class SexualMinor < OpenAI::Enum + module SexualMinor + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] @@ -245,7 +267,9 @@ module OpenAI type violence = :text | :image - class Violence < OpenAI::Enum + module Violence + extend OpenAI::Enum + TEXT: :text IMAGE: :image @@ -254,7 +278,9 @@ module OpenAI type violence_graphic = :text | :image - class ViolenceGraphic < OpenAI::Enum + module ViolenceGraphic + extend OpenAI::Enum + TEXT: :text IMAGE: :image diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index c95e6d3e..41fec7ce 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -32,7 +32,9 @@ module OpenAI | ::Array[String] | ::Array[OpenAI::Models::moderation_multi_modal_input] - class Input < OpenAI::Union + module Input + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -47,7 +49,9 @@ module OpenAI type model = String | OpenAI::Models::moderation_model - class Model < OpenAI::Union + module Model + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::moderation_model] end end diff --git a/sig/openai/models/moderation_model.rbs b/sig/openai/models/moderation_model.rbs index 6fb884f3..fa7264f2 100644 --- a/sig/openai/models/moderation_model.rbs +++ b/sig/openai/models/moderation_model.rbs @@ -6,7 +6,9 @@ module OpenAI | :"text-moderation-latest" | :"text-moderation-stable" - class ModerationModel < OpenAI::Enum + module ModerationModel + extend OpenAI::Enum + OMNI_MODERATION_LATEST: :"omni-moderation-latest" OMNI_MODERATION_2024_09_26: :"omni-moderation-2024-09-26" TEXT_MODERATION_LATEST: :"text-moderation-latest" diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index c98cd3a4..9388bae0 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -4,7 +4,9 @@ module OpenAI OpenAI::Models::ModerationImageURLInput | OpenAI::Models::ModerationTextInput - class ModerationMultiModalInput < OpenAI::Union + module ModerationMultiModalInput + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index a4184fb6..8452d648 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -20,7 +20,9 @@ module OpenAI type generate_summary = :concise | :detailed - class GenerateSummary < OpenAI::Enum + module GenerateSummary + extend OpenAI::Enum + CONCISE: :concise DETAILED: :detailed diff --git a/sig/openai/models/reasoning_effort.rbs b/sig/openai/models/reasoning_effort.rbs index 57327554..27d712d0 100644 --- a/sig/openai/models/reasoning_effort.rbs +++ b/sig/openai/models/reasoning_effort.rbs @@ -2,7 +2,9 @@ module OpenAI module Models type reasoning_effort = :low | :medium | :high - class ReasoningEffort < OpenAI::Enum + module ReasoningEffort + extend OpenAI::Enum + LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index dbfd8278..944fbd51 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -29,7 +29,9 @@ module OpenAI type environment = :mac | :windows | :ubuntu | :browser - class Environment < OpenAI::Enum + module Environment + extend OpenAI::Enum + MAC: :mac WINDOWS: :windows UBUNTU: :ubuntu diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 8e5bc808..10931508 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -31,13 +31,17 @@ module OpenAI String | OpenAI::Models::Responses::response_input_message_content_list - class Content < OpenAI::Union + module Content + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] end type role = :user | :assistant | :system | :developer - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user ASSISTANT: :assistant SYSTEM: :system @@ -48,7 +52,9 @@ module OpenAI type type_ = :message - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 8c714006..fbea9c27 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -44,7 +44,9 @@ module OpenAI type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -74,7 +76,9 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 2971462e..633f4ef4 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -44,7 +44,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 39953b70..21f27944 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -133,7 +133,9 @@ module OpenAI type reason = :max_output_tokens | :content_filter - class Reason < OpenAI::Enum + module Reason + extend OpenAI::Enum + MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter @@ -146,13 +148,17 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceTypes | OpenAI::Models::Responses::ToolChoiceFunction - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 1201cdfe..3d2f285b 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -35,7 +35,9 @@ module OpenAI OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs | OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - class Result < OpenAI::Union + module Result + extend OpenAI::Union + type logs = { logs: String, type: :logs } class Logs < OpenAI::BaseModel @@ -84,7 +86,9 @@ module OpenAI type status = :in_progress | :interpreting | :completed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress INTERPRETING: :interpreting COMPLETED: :completed diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 69f3d3e4..e6b8fd9e 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -46,7 +46,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - class Action < OpenAI::Union + module Action + extend OpenAI::Union + type click = { button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, @@ -75,7 +77,9 @@ module OpenAI type button = :left | :right | :wheel | :back | :forward - class Button < OpenAI::Enum + module Button + extend OpenAI::Enum + LEFT: :left RIGHT: :right WHEEL: :wheel @@ -244,7 +248,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -254,7 +260,9 @@ module OpenAI type type_ = :computer_call - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + COMPUTER_CALL: :computer_call def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 0e737186..c4a48c1d 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -60,7 +60,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index c7bb377e..ebfaa85b 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -8,7 +8,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class ResponseContent < OpenAI::Union + module ResponseContent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index b221cdc0..2f4c1e05 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -35,7 +35,9 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class Part < OpenAI::Union + module Part + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index f7c71025..792dd89a 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -35,7 +35,9 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class Part < OpenAI::Union + module Part + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 697640de..7336a20e 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -100,7 +100,9 @@ module OpenAI type input = String | OpenAI::Models::Responses::response_input - class Input < OpenAI::Union + module Input + extend OpenAI::Union + def self.variants: -> [String, OpenAI::Models::Responses::response_input] end @@ -109,13 +111,17 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceTypes | OpenAI::Models::Responses::ToolChoiceFunction - class ToolChoice < OpenAI::Union + module ToolChoice + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled - class Truncation < OpenAI::Enum + module Truncation + extend OpenAI::Enum + AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index 5d705a42..c9461327 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -39,7 +39,9 @@ module OpenAI | :failed_to_download_image | :image_file_not_found - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 8bcaf1dc..102ce635 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -34,7 +34,9 @@ module OpenAI type status = :in_progress | :searching | :completed | :incomplete | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress SEARCHING: :searching COMPLETED: :completed @@ -84,7 +86,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index 7a38cb8e..ffd41786 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -6,7 +6,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig | OpenAI::Models::ResponseFormatJSONObject - class ResponseFormatTextConfig < OpenAI::Union + module ResponseFormatTextConfig + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index acd89dee..a726e572 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -43,7 +43,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 3265f83f..eea6788a 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -37,7 +37,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 7da9b40d..301b596c 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -25,7 +25,9 @@ module OpenAI type status = :in_progress | :searching | :completed | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress SEARCHING: :searching COMPLETED: :completed diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index b2a08af5..df76bd92 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -6,7 +6,9 @@ module OpenAI | :"message.input_image.image_url" | :"computer_call_output.output.image_url" - class ResponseIncludable < OpenAI::Enum + module ResponseIncludable + extend OpenAI::Enum + FILE_SEARCH_CALL_RESULTS: :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index b41c744b..e015e2d8 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -25,7 +25,9 @@ module OpenAI type format_ = :mp3 | :wav - class Format < OpenAI::Enum + module Format + extend OpenAI::Enum + MP3: :mp3 WAV: :wav diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index 004cfa5c..6f2fe81c 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -6,7 +6,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseInputImage | OpenAI::Models::Responses::ResponseInputFile - class ResponseInputContent < OpenAI::Union + module ResponseInputContent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index 24abed14..fd361d9a 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -29,7 +29,9 @@ module OpenAI type detail = :high | :low | :auto - class Detail < OpenAI::Enum + module Detail + extend OpenAI::Enum + HIGH: :high LOW: :low AUTO: :auto diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 869b143f..20caf979 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -14,7 +14,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseReasoningItem | OpenAI::Models::Responses::ResponseInputItem::ItemReference - class ResponseInputItem < OpenAI::Union + module ResponseInputItem + extend OpenAI::Union + type message = { content: OpenAI::Models::Responses::response_input_message_content_list, @@ -51,7 +53,9 @@ module OpenAI type role = :user | :system | :developer - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user SYSTEM: :system DEVELOPER: :developer @@ -61,7 +65,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -71,7 +77,9 @@ module OpenAI type type_ = :message - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] @@ -139,7 +147,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -186,7 +196,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index 45e1f023..2671fd84 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -41,7 +41,9 @@ module OpenAI type role = :user | :system | :developer - class Role < OpenAI::Enum + module Role + extend OpenAI::Enum + USER: :user SYSTEM: :system DEVELOPER: :developer @@ -51,7 +53,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete @@ -61,7 +65,9 @@ module OpenAI type type_ = :message - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + MESSAGE: :message def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index 8fc80907..8d2e807d 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -11,7 +11,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseFunctionToolCallItem | OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - class ResponseItem < OpenAI::Union + module ResponseItem + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] end end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index 37f78a23..dbed0410 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class ResponseItemList = Responses::ResponseItemList module Responses diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index e01fe1b2..9868493b 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -9,7 +9,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall | OpenAI::Models::Responses::ResponseReasoningItem - class ResponseOutputItem < OpenAI::Union + module ResponseOutputItem + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index b003219c..81eebadd 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -35,13 +35,17 @@ module OpenAI OpenAI::Models::Responses::ResponseOutputText | OpenAI::Models::Responses::ResponseOutputRefusal - class Content < OpenAI::Union + module Content + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index a40aee39..3d499c72 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -28,7 +28,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation | OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + type file_citation = { file_id: String, index: Integer, type: :file_citation } diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 23d2dd02..17d8480f 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -45,7 +45,9 @@ module OpenAI type status = :in_progress | :completed | :incomplete - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed INCOMPLETE: :incomplete diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index 17349a0c..3ecc2fc1 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -3,7 +3,9 @@ module OpenAI module Responses type response_status = :completed | :failed | :in_progress | :incomplete - class ResponseStatus < OpenAI::Enum + module ResponseStatus + extend OpenAI::Enum + COMPLETED: :completed FAILED: :failed IN_PROGRESS: :in_progress diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 0d48dfd6..8642b35b 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -35,7 +35,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent | OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - class ResponseStreamEvent < OpenAI::Union + module ResponseStreamEvent + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index d01375b4..17d56582 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -40,7 +40,9 @@ module OpenAI | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - class Annotation < OpenAI::Union + module Annotation + extend OpenAI::Union + type file_citation = { file_id: String, index: Integer, type: :file_citation } diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 642f7196..7cf475b8 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -7,7 +7,9 @@ module OpenAI | OpenAI::Models::Responses::ComputerTool | OpenAI::Models::Responses::WebSearchTool - class Tool < OpenAI::Union + module Tool + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] end end diff --git a/sig/openai/models/responses/tool_choice_options.rbs b/sig/openai/models/responses/tool_choice_options.rbs index e902ea2c..412031bf 100644 --- a/sig/openai/models/responses/tool_choice_options.rbs +++ b/sig/openai/models/responses/tool_choice_options.rbs @@ -3,7 +3,9 @@ module OpenAI module Responses type tool_choice_options = :none | :auto | :required - class ToolChoiceOptions < OpenAI::Enum + module ToolChoiceOptions + extend OpenAI::Enum + NONE: :none AUTO: :auto REQUIRED: :required diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index ea1e958d..73f15517 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -19,7 +19,9 @@ module OpenAI | :computer_use_preview | :web_search_preview_2025_03_11 - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + FILE_SEARCH: :file_search WEB_SEARCH_PREVIEW: :web_search_preview COMPUTER_USE_PREVIEW: :computer_use_preview diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index abb3f9a1..20a3b337 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -29,7 +29,9 @@ module OpenAI type type_ = :web_search_preview | :web_search_preview_2025_03_11 - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 @@ -38,7 +40,9 @@ module OpenAI type search_context_size = :low | :medium | :high - class SearchContextSize < OpenAI::Enum + module SearchContextSize + extend OpenAI::Enum + LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 582ad254..3bfeacd2 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -5,14 +5,18 @@ module OpenAI | OpenAI::Models::chat_model | OpenAI::Models::ResponsesModel::union_member2 - class ResponsesModel < OpenAI::Union + module ResponsesModel + extend OpenAI::Union + type union_member2 = :"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" - class UnionMember2 < OpenAI::Enum + module UnionMember2 + extend OpenAI::Enum + O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" COMPUTER_USE_PREVIEW: :"computer-use-preview" diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index eead4b34..a5baefff 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -48,7 +48,9 @@ module OpenAI type status = :pending | :completed | :cancelled | :expired - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + PENDING: :pending COMPLETED: :completed CANCELLED: :cancelled diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 94f8bb7b..2a1f9e38 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class UploadPart = Uploads::UploadPart module Uploads diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 2f4e439b..ba9a3850 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -91,7 +91,9 @@ module OpenAI type status = :expired | :in_progress | :completed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + EXPIRED: :expired IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index b2ea6f76..b9d2ab90 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -43,7 +43,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 0918e4b6..17e51e48 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -49,7 +49,9 @@ module OpenAI type query = String | ::Array[String] - class Query < OpenAI::Union + module Query + extend OpenAI::Union + type string_array = ::Array[String] StringArray: string_array @@ -60,7 +62,9 @@ module OpenAI type filters = OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter - class Filters < OpenAI::Union + module Filters + extend OpenAI::Union + def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -90,7 +94,9 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" - class Ranker < OpenAI::Enum + module Ranker + extend OpenAI::Enum + AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index ae59192d..497e9d9d 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -32,7 +32,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end @@ -56,7 +58,9 @@ module OpenAI type type_ = :text - class Type < OpenAI::Enum + module Type + extend OpenAI::Enum + TEXT: :text def self.values: -> ::Array[OpenAI::Models::VectorStoreSearchResponse::Content::type_] diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 80029298..78f70166 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -34,7 +34,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index e642c6f9..d5bdb26b 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -56,7 +56,9 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed FAILED: :failed @@ -67,7 +69,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index ac3e0e2c..8ecec280 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -34,7 +34,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 0cc01f91..362141a4 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -52,7 +52,9 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled - class Filter < OpenAI::Enum + module Filter + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed FAILED: :failed @@ -63,7 +65,9 @@ module OpenAI type order = :asc | :desc - class Order < OpenAI::Enum + module Order + extend OpenAI::Enum + ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 6b2b7029..b85d9f01 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -26,7 +26,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 79940901..79b4b73c 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class VectorStoreFile = VectorStores::VectorStoreFile module VectorStores @@ -74,7 +73,9 @@ module OpenAI type code = :server_error | :unsupported_file | :invalid_file - class Code < OpenAI::Enum + module Code + extend OpenAI::Enum + SERVER_ERROR: :server_error UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file @@ -85,7 +86,9 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed CANCELLED: :cancelled @@ -96,7 +99,9 @@ module OpenAI type attribute = String | Float | bool - class Attribute < OpenAI::Union + module Attribute + extend OpenAI::Union + def self.variants: -> [String, Float, bool] end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index 497f5dc3..cc689671 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class VectorStoreFileBatch = VectorStores::VectorStoreFileBatch module VectorStores @@ -71,7 +70,9 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed - class Status < OpenAI::Enum + module Status + extend OpenAI::Enum + IN_PROGRESS: :in_progress COMPLETED: :completed CANCELLED: :cancelled diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index dde63be2..19257a53 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -1,6 +1,5 @@ module OpenAI module Models - class VectorStoreFileDeleted = VectorStores::VectorStoreFileDeleted module VectorStores diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index 1a3c623e..bb5fb2a6 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -3,7 +3,9 @@ require_relative "test_helper" class OpenAI::Test::BaseModelTest < Minitest::Test - class E1 < OpenAI::Enum + module E1 + extend OpenAI::Enum + A = :a B = :b end @@ -242,13 +244,17 @@ class M3 < M1 optional :b, E1, api_name: :renamed_again end - class U1 < OpenAI::Union + module U1 + extend OpenAI::Union + discriminator :type variant :a, M1 variant :b, M3 end - class U2 < OpenAI::Union + module U2 + extend OpenAI::Union + variant A1 variant A3 end @@ -330,12 +336,16 @@ def test_basic_const end end - class E2 < OpenAI::Enum + module E2 + extend OpenAI::Enum + A = :a B = :b end - class U3 < OpenAI::Union + module U3 + extend OpenAI::Union + discriminator :type variant :a, M1 variant :b, M3 @@ -353,7 +363,9 @@ def test_basic_eql assert_equal(U1, U3) end - class U4 < OpenAI::Union + module U4 + extend OpenAI::Union + variant :a, const: :a variant :b, const: :b end From 52e55e5f3025417d187d4ba37b115202889e52c8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 22:43:08 +0000 Subject: [PATCH 046/295] chore: `BaseModel` fields that are `BaseModel` typed should also accept `Hash` (#52) --- rbi/lib/openai/models/batch.rbi | 10 +- rbi/lib/openai/models/beta/assistant.rbi | 12 +- .../models/beta/assistant_create_params.rbi | 34 ++++-- .../models/beta/assistant_stream_event.rbi | 112 +++++++++++++----- .../models/beta/assistant_tool_choice.rbi | 4 +- .../models/beta/assistant_update_params.rbi | 24 +++- .../openai/models/beta/file_search_tool.rbi | 8 +- rbi/lib/openai/models/beta/function_tool.rbi | 5 +- .../models/beta/message_stream_event.rbi | 24 +++- .../models/beta/run_step_stream_event.rbi | 28 ++--- .../openai/models/beta/run_stream_event.rbi | 50 ++++++-- rbi/lib/openai/models/beta/thread.rbi | 12 +- .../beta/thread_create_and_run_params.rbi | 96 ++++++++++++--- .../models/beta/thread_create_params.rbi | 30 +++-- .../models/beta/thread_stream_event.rbi | 5 +- .../models/beta/thread_update_params.rbi | 20 +++- .../beta/threads/file_citation_annotation.rbi | 6 +- .../file_citation_delta_annotation.rbi | 8 +- .../beta/threads/file_path_annotation.rbi | 4 +- .../threads/file_path_delta_annotation.rbi | 4 +- .../beta/threads/image_file_content_block.rbi | 5 +- .../beta/threads/image_file_delta_block.rbi | 4 +- .../beta/threads/image_url_content_block.rbi | 5 +- .../beta/threads/image_url_delta_block.rbi | 4 +- .../openai/models/beta/threads/message.rbi | 8 +- .../beta/threads/message_delta_event.rbi | 3 +- .../required_action_function_tool_call.rbi | 8 +- rbi/lib/openai/models/beta/threads/run.rbi | 28 +++-- .../models/beta/threads/run_create_params.rbi | 12 +- .../runs/code_interpreter_output_image.rbi | 8 +- .../runs/code_interpreter_tool_call.rbi | 24 +++- .../runs/code_interpreter_tool_call_delta.rbi | 14 ++- .../threads/runs/file_search_tool_call.rbi | 20 +++- .../beta/threads/runs/function_tool_call.rbi | 4 +- .../threads/runs/function_tool_call_delta.rbi | 8 +- .../runs/message_creation_step_details.rbi | 14 ++- .../models/beta/threads/runs/run_step.rbi | 10 +- .../threads/runs/run_step_delta_event.rbi | 4 +- .../runs/run_step_delta_message_delta.rbi | 14 ++- .../beta/threads/text_content_block.rbi | 5 +- .../models/beta/threads/text_delta_block.rbi | 5 +- .../openai/models/chat/chat_completion.rbi | 12 +- ...hat_completion_assistant_message_param.rbi | 20 +++- .../models/chat/chat_completion_chunk.rbi | 33 ++++-- .../chat/chat_completion_content_part.rbi | 4 +- .../chat_completion_content_part_image.rbi | 4 +- ...at_completion_content_part_input_audio.rbi | 8 +- .../models/chat/chat_completion_message.rbi | 16 ++- .../chat_completion_message_tool_call.rbi | 4 +- .../chat_completion_named_tool_choice.rbi | 4 +- .../models/chat/chat_completion_tool.rbi | 5 +- .../models/chat/completion_create_params.rbi | 42 +++++-- rbi/lib/openai/models/completion.rbi | 5 +- rbi/lib/openai/models/completion_choice.rbi | 4 +- .../models/completion_create_params.rbi | 4 +- rbi/lib/openai/models/completion_usage.rbi | 8 +- .../models/create_embedding_response.rbi | 4 +- .../models/fine_tuning/fine_tuning_job.rbi | 42 ++++--- ...ne_tuning_job_wandb_integration_object.rbi | 4 +- .../models/fine_tuning/job_create_params.rbi | 42 ++++--- .../jobs/fine_tuning_job_checkpoint.rbi | 6 +- rbi/lib/openai/models/moderation.rbi | 14 ++- .../models/moderation_image_url_input.rbi | 4 +- .../models/response_format_json_schema.rbi | 4 +- .../models/responses/file_search_tool.rbi | 4 +- rbi/lib/openai/models/responses/response.rbi | 22 ++-- ..._code_interpreter_call_completed_event.rbi | 4 +- ...ode_interpreter_call_in_progress_event.rbi | 4 +- ...de_interpreter_call_interpreting_event.rbi | 4 +- .../responses/response_completed_event.rbi | 5 +- ...esponse_computer_tool_call_output_item.rbi | 6 +- .../responses/response_create_params.rbi | 9 +- .../responses/response_created_event.rbi | 5 +- .../responses/response_failed_event.rbi | 5 +- .../responses/response_in_progress_event.rbi | 5 +- .../responses/response_incomplete_event.rbi | 5 +- .../models/responses/response_input_item.rbi | 6 +- .../models/responses/response_usage.rbi | 8 +- .../models/responses/web_search_tool.rbi | 4 +- .../static_file_chunking_strategy_object.rbi | 5 +- ...ic_file_chunking_strategy_object_param.rbi | 5 +- rbi/lib/openai/models/upload.rbi | 5 +- rbi/lib/openai/models/vector_store.rbi | 10 +- .../models/vector_store_create_params.rbi | 4 +- .../models/vector_store_search_params.rbi | 4 +- .../models/vector_store_update_params.rbi | 4 +- .../vector_stores/vector_store_file.rbi | 6 +- .../vector_stores/vector_store_file_batch.rbi | 4 +- rbi/lib/openai/util.rbi | 2 + 89 files changed, 804 insertions(+), 337 deletions(-) diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 75e90393..7ff79566 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -107,7 +107,10 @@ module OpenAI def errors end - sig { params(_: OpenAI::Models::Batch::Errors).returns(OpenAI::Models::Batch::Errors) } + sig do + params(_: T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash)) + end def errors=(_) end @@ -184,7 +187,10 @@ module OpenAI def request_counts end - sig { params(_: OpenAI::Models::BatchRequestCounts).returns(OpenAI::Models::BatchRequestCounts) } + sig do + params(_: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash)) + end def request_counts=(_) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index 3a393e39..14e315f2 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -206,8 +206,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Assistant::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash))) end def tool_resources=(_) end @@ -316,8 +316,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter) + params(_: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) end def code_interpreter=(_) end @@ -327,8 +327,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch) + params(_: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash)) end def file_search=(_) end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 0e2f75e4..5a4a056a 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -162,8 +162,12 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)) + ) + .returns( + T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)) + ) end def tool_resources=(_) end @@ -321,8 +325,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter) + params( + _: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) end def code_interpreter=(_) end @@ -332,8 +340,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch) + params( + _: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) end def file_search=(_) end @@ -576,10 +588,16 @@ module OpenAI sig do params( - _: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + _: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ) ) .returns( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ) ) end def static=(_) diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 31e96276..59dfcef6 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -65,7 +65,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Thread).returns(OpenAI::Models::Beta::Thread) } + sig do + params(_: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -105,7 +108,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -135,7 +141,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -165,7 +174,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -195,7 +207,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -225,7 +240,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -255,7 +273,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -285,7 +306,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -315,7 +339,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -345,7 +372,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -375,7 +405,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -405,8 +438,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -438,8 +471,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -472,8 +505,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) end def data=(_) end @@ -508,8 +541,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -541,8 +574,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -574,8 +607,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -607,8 +640,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -640,7 +673,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -671,7 +707,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -703,8 +742,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::MessageDeltaEvent) - .returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) + params(_: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) end def data=(_) end @@ -736,7 +775,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -767,7 +809,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -796,7 +841,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::ErrorObject).returns(OpenAI::Models::ErrorObject) } + sig do + params(_: T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash)) + end def data=(_) end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 9a1ac99e..cb86fbe8 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::AssistantToolChoiceFunction) - .returns(OpenAI::Models::Beta::AssistantToolChoiceFunction) + params(_: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash)) end def function=(_) end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 595f430c..c1eb48ee 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -162,8 +162,12 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + ) + .returns( + T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + ) end def tool_resources=(_) end @@ -373,8 +377,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter) + params( + _: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) end def code_interpreter=(_) end @@ -384,8 +392,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch) + params( + _: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) end def file_search=(_) end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 310aab2b..059290ff 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -19,8 +19,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::FileSearchTool::FileSearch) - .returns(OpenAI::Models::Beta::FileSearchTool::FileSearch) + params(_: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash)) end def file_search=(_) end @@ -64,8 +64,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions) - .returns(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions) + params(_: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash)) end def ranking_options=(_) end diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index 645a4c1c..bc5fdf14 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -8,7 +8,10 @@ module OpenAI def function end - sig { params(_: OpenAI::Models::FunctionDefinition).returns(OpenAI::Models::FunctionDefinition) } + sig do + params(_: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) + end def function=(_) end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 495664c0..98f7e150 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -29,7 +29,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -60,7 +63,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -92,8 +98,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::MessageDeltaEvent) - .returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) + params(_: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) end def data=(_) end @@ -125,7 +131,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -156,7 +165,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Message).returns(OpenAI::Models::Beta::Threads::Message) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) + end def data=(_) end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 51fa7db0..cfd0841f 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -31,8 +31,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -64,8 +64,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -98,8 +98,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) end def data=(_) end @@ -134,8 +134,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -167,8 +167,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -200,8 +200,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end @@ -233,8 +233,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) end def data=(_) end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index e18dd1f2..37b5fe0c 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -33,7 +33,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -63,7 +66,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -93,7 +99,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -123,7 +132,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -153,7 +165,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -183,7 +198,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -213,7 +231,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -243,7 +264,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -273,7 +297,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end @@ -303,7 +330,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Threads::Run).returns(OpenAI::Models::Beta::Threads::Run) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) + end def data=(_) end diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 3a2ea58e..df3f74a9 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -54,8 +54,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Thread::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash))) end def tool_resources=(_) end @@ -96,8 +96,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter) + params(_: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) end def code_interpreter=(_) end @@ -107,8 +107,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Thread::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::Thread::ToolResources::FileSearch) + params(_: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash)) end def file_search=(_) end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 91459ba1..31b70a45 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -172,8 +172,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread) + params(_: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash)) end def thread=(_) end @@ -228,8 +228,12 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)) + ) + .returns( + T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)) + ) end def tool_resources=(_) end @@ -299,8 +303,16 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy)) - .returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy)) + params( + _: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ) + ) + .returns( + T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ) + ) end def truncation_strategy=(_) end @@ -459,8 +471,16 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources)) + params( + _: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) + ) + ) + .returns( + T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) + ) + ) end def tool_resources=(_) end @@ -791,8 +811,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter) + params( + _: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) end def code_interpreter=(_) end @@ -802,8 +832,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch) + params( + _: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, + OpenAI::Util::AnyHash + ) + ) end def file_search=(_) end @@ -1052,10 +1092,16 @@ module OpenAI sig do params( - _: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + _: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ) ) .returns( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ) ) end def static=(_) @@ -1142,8 +1188,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter) + params( + _: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) end def code_interpreter=(_) end @@ -1153,8 +1209,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch) + params( + _: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) end def file_search=(_) end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index b02538f9..9539398e 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -43,8 +43,10 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)) + ) + .returns(T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash))) end def tool_resources=(_) end @@ -371,8 +373,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter) + params( + _: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) end def code_interpreter=(_) end @@ -382,8 +388,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch) + params( + _: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash)) end def file_search=(_) end @@ -626,10 +634,16 @@ module OpenAI sig do params( - _: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + _: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ) ) .returns( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ) ) end def static=(_) diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index b7db3495..95989f48 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -10,7 +10,10 @@ module OpenAI def data end - sig { params(_: OpenAI::Models::Beta::Thread).returns(OpenAI::Models::Beta::Thread) } + sig do + params(_: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) + end def data=(_) end diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 2a3c0bc9..dae45106 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -30,8 +30,10 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources)) - .returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + ) + .returns(T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash))) end def tool_resources=(_) end @@ -66,8 +68,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter) - .returns(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter) + params( + _: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) end def code_interpreter=(_) end @@ -77,8 +83,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch) - .returns(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch) + params( + _: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash)) end def file_search=(_) end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index 75c0a985..29970857 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -18,8 +18,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation) - .returns(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation) + params( + _: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash)) end def file_citation=(_) end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index 7031c979..8902d331 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -36,8 +36,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation) - .returns(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation) + params( + _: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash) + ) end def file_citation=(_) end diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index c5e902d9..5eaca363 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -18,8 +18,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath) - .returns(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath) + params(_: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash)) end def file_path=(_) end diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index 3da258f2..1239b189 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -36,8 +36,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath) - .returns(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath) + params(_: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash)) end def file_path=(_) end diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index 281b7cb7..da91c722 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -9,7 +9,10 @@ module OpenAI def image_file end - sig { params(_: OpenAI::Models::Beta::Threads::ImageFile).returns(OpenAI::Models::Beta::Threads::ImageFile) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash)) + end def image_file=(_) end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 9dba68e1..6b7c7867 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -28,8 +28,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::ImageFileDelta) - .returns(OpenAI::Models::Beta::Threads::ImageFileDelta) + params(_: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash)) end def image_file=(_) end diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index a6c65020..fa47f814 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -9,7 +9,10 @@ module OpenAI def image_url end - sig { params(_: OpenAI::Models::Beta::Threads::ImageURL).returns(OpenAI::Models::Beta::Threads::ImageURL) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash)) + end def image_url=(_) end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index 9f8e8803..d4bec7be 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -28,8 +28,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::ImageURLDelta) - .returns(OpenAI::Models::Beta::Threads::ImageURLDelta) + params(_: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash)) end def image_url=(_) end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 96603146..90e7e16f 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -111,8 +111,12 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)) + ) + .returns( + T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)) + ) end def incomplete_details=(_) end diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index 5df8b179..299d8e92 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -20,7 +20,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::MessageDelta).returns(OpenAI::Models::Beta::Threads::MessageDelta) + params(_: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash)) end def delta=(_) end diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index ef418354..6cbe1e05 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -23,8 +23,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function) - .returns(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function) + params( + _: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash) + ) end def function=(_) end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index d1587a17..3b24496c 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -77,8 +77,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash))) end def incomplete_details=(_) end @@ -100,8 +100,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Run::LastError)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Run::LastError)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash))) end def last_error=(_) end @@ -178,8 +178,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash))) end def required_action=(_) end @@ -365,8 +365,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash))) end def truncation_strategy=(_) end @@ -378,8 +378,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash))) end def usage=(_) end @@ -644,8 +644,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs) - .returns(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs) + params( + _: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash) + ) end def submit_tool_outputs=(_) end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 46e8ddc5..d744dbfc 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -331,8 +331,16 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy)) + params( + _: T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ) + ) + .returns( + T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ) + ) end def truncation_strategy=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index d598eb87..3b9ea8e2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -29,8 +29,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image) - .returns(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image) + params( + _: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash) + ) end def image=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 749ba465..f4e3da46 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -21,8 +21,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter) - .returns(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter) + params( + _: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) end def code_interpreter=(_) end @@ -196,10 +206,16 @@ module OpenAI sig do params( - _: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image + _: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + OpenAI::Util::AnyHash + ) ) .returns( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + OpenAI::Util::AnyHash + ) ) end def image=(_) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index e369160c..292195bb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -40,8 +40,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter) - .returns(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter) + params( + _: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + OpenAI::Util::AnyHash + ) + ) end def code_interpreter=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 13b22ad3..3bd957cc 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -21,8 +21,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch) - .returns(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch) + params( + _: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash)) end def file_search=(_) end @@ -64,8 +66,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions) - .returns(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions) + params( + _: T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + OpenAI::Util::AnyHash + ) + ) end def ranking_options=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 2fbc10a5..9c94d56e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function) - .returns(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash)) end def function=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index c026f468..50168e95 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -40,8 +40,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function) - .returns(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function) + params( + _: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash) + ) end def function=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index ba7932c9..4293b0b6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -11,8 +11,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation) - .returns(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation) + params( + _: T.any( + OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + OpenAI::Util::AnyHash + ) + ) end def message_creation=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 06e7495e..b5d5805b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -79,8 +79,10 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError)) + params( + _: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)) + ) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash))) end def last_error=(_) end @@ -189,8 +191,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage)) - .returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage)) + params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash))) end def usage=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index fe5c77a3..62c90cff 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStepDelta) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStepDelta) + params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash)) end def delta=(_) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index 6d4b72e8..03db1624 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -20,8 +20,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation) + params( + _: T.any( + OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + OpenAI::Util::AnyHash + ) + ) end def message_creation=(_) end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 5a3f345f..30ab7921 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -9,7 +9,10 @@ module OpenAI def text end - sig { params(_: OpenAI::Models::Beta::Threads::Text).returns(OpenAI::Models::Beta::Threads::Text) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash)) + end def text=(_) end diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index 80145b24..c021cb96 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -27,7 +27,10 @@ module OpenAI def text end - sig { params(_: OpenAI::Models::Beta::Threads::TextDelta).returns(OpenAI::Models::Beta::Threads::TextDelta) } + sig do + params(_: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash)) + end def text=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index d47b79d1..3271a675 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -82,7 +82,10 @@ module OpenAI def usage end - sig { params(_: OpenAI::Models::CompletionUsage).returns(OpenAI::Models::CompletionUsage) } + sig do + params(_: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) + end def usage=(_) end @@ -164,8 +167,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs)) + params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash))) end def logprobs=(_) end @@ -176,7 +179,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionMessage).returns(OpenAI::Models::Chat::ChatCompletionMessage) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash)) end def message=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 737d825e..fe1dc7f0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -20,8 +20,12 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio)) + params( + _: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)) + ) + .returns( + T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)) + ) end def audio=(_) end @@ -84,8 +88,16 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall)) + params( + _: T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) + ) + ) + .returns( + T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) + ) + ) end def function_call=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index f5530b98..56cab68c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -89,7 +89,10 @@ module OpenAI def usage end - sig { params(_: T.nilable(OpenAI::Models::CompletionUsage)).returns(T.nilable(OpenAI::Models::CompletionUsage)) } + sig do + params(_: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash))) + end def usage=(_) end @@ -146,8 +149,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash)) end def delta=(_) end @@ -184,8 +187,12 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs)) + params( + _: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) + ) + .returns( + T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) + ) end def logprobs=(_) end @@ -233,8 +240,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall) + params( + _: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash) + ) end def function_call=(_) end @@ -374,8 +385,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function) + params( + _: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash) + ) end def function=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 340e2a97..521b44fb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -26,8 +26,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPart::File::File) - .returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash)) end def file=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 01893dab..c59d5e11 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -9,8 +9,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) - .returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash)) end def image_url=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 0a14a898..1c7e9d96 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -9,8 +9,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) - .returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) + params( + _: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash) + ) end def input_audio=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 041b818c..850a6c60 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -52,8 +52,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) + params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash))) end def audio=(_) end @@ -65,8 +65,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall) - .returns(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash)) end def function_call=(_) end @@ -132,8 +132,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation) - .returns(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation) + params( + _: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash) + ) end def url_citation=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index 7ed40089..09f27b3e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -19,8 +19,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function) - .returns(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash)) end def function=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 421a020f..602ab1d6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -9,8 +9,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) - .returns(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) + params(_: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash)) end def function=(_) end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index c25c2648..7cf95402 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -8,7 +8,10 @@ module OpenAI def function end - sig { params(_: OpenAI::Models::FunctionDefinition).returns(OpenAI::Models::FunctionDefinition) } + sig do + params(_: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) + end def function=(_) end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 851c6948..9ce1cd00 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -83,8 +83,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) + params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash))) end def audio=(_) end @@ -275,8 +275,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent)) + params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash))) end def prediction=(_) end @@ -426,8 +426,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) + params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) end def stream_options=(_) end @@ -540,8 +540,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions) - .returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions) + params(_: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash)) end def web_search_options=(_) end @@ -897,8 +897,16 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation)) - .returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation)) + params( + _: T.nilable( + T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) + ) + ) + .returns( + T.nilable( + T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) + ) + ) end def user_location=(_) end @@ -961,8 +969,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate) - .returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate) + params( + _: T.any( + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + OpenAI::Util::AnyHash + ) + ) end def approximate=(_) end diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index 5c9f6e01..c4a10497 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -65,7 +65,10 @@ module OpenAI def usage end - sig { params(_: OpenAI::Models::CompletionUsage).returns(OpenAI::Models::CompletionUsage) } + sig do + params(_: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) + end def usage=(_) end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index a03a5dd9..5ecf87af 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -31,8 +31,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::CompletionChoice::Logprobs)) - .returns(T.nilable(OpenAI::Models::CompletionChoice::Logprobs)) + params(_: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash))) end def logprobs=(_) end diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 4b26334b..e2edd3e9 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -194,8 +194,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) + params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) end def stream_options=(_) end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index de91da0f..411d8384 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -36,8 +36,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::CompletionUsage::CompletionTokensDetails) - .returns(OpenAI::Models::CompletionUsage::CompletionTokensDetails) + params(_: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash)) end def completion_tokens_details=(_) end @@ -48,8 +48,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::CompletionUsage::PromptTokensDetails) - .returns(OpenAI::Models::CompletionUsage::PromptTokensDetails) + params(_: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash)) end def prompt_tokens_details=(_) end diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index c095b791..aa1ebc37 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -36,8 +36,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::CreateEmbeddingResponse::Usage) - .returns(OpenAI::Models::CreateEmbeddingResponse::Usage) + params(_: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash)) end def usage=(_) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index f688b4b2..bc3a62a0 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -29,8 +29,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error)) - .returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error)) + params(_: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash))) end def error=(_) end @@ -62,8 +62,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters) + params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash)) end def hyperparameters=(_) end @@ -201,8 +201,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method) + params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash)) end def method_=(_) end @@ -432,8 +432,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo) + params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash)) end def dpo=(_) end @@ -444,8 +444,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised) + params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash)) end def supervised=(_) end @@ -494,8 +494,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters) + params( + _: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + ) end def hyperparameters=(_) end @@ -624,8 +628,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters) + params( + _: T.any( + OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, + OpenAI::Util::AnyHash + ) + ) end def hyperparameters=(_) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index fd3bcf9a..e7898d88 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -22,8 +22,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) - .returns(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) + params(_: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash)) end def wandb=(_) end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 9142e294..eb52be26 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -52,8 +52,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters) + params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash)) end def hyperparameters=(_) end @@ -90,8 +90,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method) + params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash)) end def method_=(_) end @@ -309,8 +309,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb) + params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash)) end def wandb=(_) end @@ -405,8 +405,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo) + params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash)) end def dpo=(_) end @@ -417,8 +417,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised) + params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash)) end def supervised=(_) end @@ -467,8 +467,12 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters) + params( + _: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + ) + .returns( + T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + ) end def hyperparameters=(_) end @@ -597,8 +601,18 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters) + params( + _: T.any( + OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, + OpenAI::Util::AnyHash + ) + ) + .returns( + T.any( + OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, + OpenAI::Util::AnyHash + ) + ) end def hyperparameters=(_) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index 54fe9d93..a04c9043 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -47,8 +47,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics) - .returns(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics) + params( + _: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash)) end def metrics=(_) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 0ebd7ee8..4f6d074e 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -8,7 +8,10 @@ module OpenAI def categories end - sig { params(_: OpenAI::Models::Moderation::Categories).returns(OpenAI::Models::Moderation::Categories) } + sig do + params(_: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash)) + end def categories=(_) end @@ -18,8 +21,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Moderation::CategoryAppliedInputTypes) - .returns(OpenAI::Models::Moderation::CategoryAppliedInputTypes) + params(_: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash)) end def category_applied_input_types=(_) end @@ -29,7 +32,10 @@ module OpenAI def category_scores end - sig { params(_: OpenAI::Models::Moderation::CategoryScores).returns(OpenAI::Models::Moderation::CategoryScores) } + sig do + params(_: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash)) + end def category_scores=(_) end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index 222a1447..e5d12ba8 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -9,8 +9,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::ModerationImageURLInput::ImageURL) - .returns(OpenAI::Models::ModerationImageURLInput::ImageURL) + params(_: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash)) end def image_url=(_) end diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index de32d2a7..219e6d13 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -9,8 +9,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema) - .returns(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema) + params(_: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash)) end def json_schema=(_) end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 71be889d..3eba1cb5 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -50,8 +50,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::FileSearchTool::RankingOptions) - .returns(OpenAI::Models::Responses::FileSearchTool::RankingOptions) + params(_: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash)) end def ranking_options=(_) end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index e785499b..b480096f 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -28,8 +28,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Responses::ResponseError)) - .returns(T.nilable(OpenAI::Models::Responses::ResponseError)) + params(_: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash))) end def error=(_) end @@ -40,8 +40,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails)) - .returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails)) + params(_: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash))) end def incomplete_details=(_) end @@ -324,7 +324,10 @@ module OpenAI def reasoning end - sig { params(_: T.nilable(OpenAI::Models::Reasoning)).returns(T.nilable(OpenAI::Models::Reasoning)) } + sig do + params(_: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) + end def reasoning=(_) end @@ -351,8 +354,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseTextConfig) - .returns(OpenAI::Models::Responses::ResponseTextConfig) + params(_: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) end def text=(_) end @@ -381,7 +384,10 @@ module OpenAI def usage end - sig { params(_: OpenAI::Models::Responses::ResponseUsage).returns(OpenAI::Models::Responses::ResponseUsage) } + sig do + params(_: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash)) + end def usage=(_) end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 389d9f49..7ced83f5 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -10,8 +10,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) - .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) + params(_: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) end def code_interpreter_call=(_) end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 9d0d0524..afa6822d 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -10,8 +10,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) - .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) + params(_: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) end def code_interpreter_call=(_) end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index 4757018f..13bb3219 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -10,8 +10,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) - .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) + params(_: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) end def code_interpreter_call=(_) end diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 7db04649..7129137a 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -9,7 +9,10 @@ module OpenAI def response end - sig { params(_: OpenAI::Models::Responses::Response).returns(OpenAI::Models::Responses::Response) } + sig do + params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + end def response=(_) end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 9cdcacd2..7d10f0bc 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -28,8 +28,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) - .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) + params( + _: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash)) end def output=(_) end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 9c2938bb..5a20a69d 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -196,7 +196,10 @@ module OpenAI def reasoning end - sig { params(_: T.nilable(OpenAI::Models::Reasoning)).returns(T.nilable(OpenAI::Models::Reasoning)) } + sig do + params(_: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) + end def reasoning=(_) end @@ -231,8 +234,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseTextConfig) - .returns(OpenAI::Models::Responses::ResponseTextConfig) + params(_: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) end def text=(_) end diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index c68b3697..84ab75a8 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -9,7 +9,10 @@ module OpenAI def response end - sig { params(_: OpenAI::Models::Responses::Response).returns(OpenAI::Models::Responses::Response) } + sig do + params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + end def response=(_) end diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index c6d9fd32..98d7cb48 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -9,7 +9,10 @@ module OpenAI def response end - sig { params(_: OpenAI::Models::Responses::Response).returns(OpenAI::Models::Responses::Response) } + sig do + params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + end def response=(_) end diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index 57a31950..3f82d5d8 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -9,7 +9,10 @@ module OpenAI def response end - sig { params(_: OpenAI::Models::Responses::Response).returns(OpenAI::Models::Responses::Response) } + sig do + params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + end def response=(_) end diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index 9ca0c85e..dfe15923 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -9,7 +9,10 @@ module OpenAI def response end - sig { params(_: OpenAI::Models::Responses::Response).returns(OpenAI::Models::Responses::Response) } + sig do + params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) + end def response=(_) end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 496ac76e..a85418a6 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -207,8 +207,10 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) - .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) + params( + _: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) + ) + .returns(T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash)) end def output=(_) end diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 90401965..2e0872c8 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -19,8 +19,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) - .returns(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) + params(_: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash)) end def input_tokens_details=(_) end @@ -40,8 +40,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails) - .returns(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails) + params(_: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash)) end def output_tokens_details=(_) end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index a0ae2d7c..7b583956 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -37,8 +37,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation)) - .returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation)) + params(_: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash))) end def user_location=(_) end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index 99645473..1f106d7d 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -7,7 +7,10 @@ module OpenAI def static end - sig { params(_: OpenAI::Models::StaticFileChunkingStrategy).returns(OpenAI::Models::StaticFileChunkingStrategy) } + sig do + params(_: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) + end def static=(_) end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index a7ed94e1..617954af 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -7,7 +7,10 @@ module OpenAI def static end - sig { params(_: OpenAI::Models::StaticFileChunkingStrategy).returns(OpenAI::Models::StaticFileChunkingStrategy) } + sig do + params(_: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) + end def static=(_) end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 05b6e1e2..7f0dbe40 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -85,7 +85,10 @@ module OpenAI def file end - sig { params(_: T.nilable(OpenAI::Models::FileObject)).returns(T.nilable(OpenAI::Models::FileObject)) } + sig do + params(_: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash))) + end def file=(_) end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 188bfd85..92aa8be2 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -25,7 +25,10 @@ module OpenAI def file_counts end - sig { params(_: OpenAI::Models::VectorStore::FileCounts).returns(OpenAI::Models::VectorStore::FileCounts) } + sig do + params(_: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash)) + end def file_counts=(_) end @@ -98,7 +101,10 @@ module OpenAI def expires_after end - sig { params(_: OpenAI::Models::VectorStore::ExpiresAfter).returns(OpenAI::Models::VectorStore::ExpiresAfter) } + sig do + params(_: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash)) + end def expires_after=(_) end diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 89042cab..b3c4a55a 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -44,8 +44,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter) - .returns(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter) + params(_: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash)) end def expires_after=(_) end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 1e785923..a8bd0484 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -43,8 +43,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::VectorStoreSearchParams::RankingOptions) - .returns(OpenAI::Models::VectorStoreSearchParams::RankingOptions) + params(_: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash)) end def ranking_options=(_) end diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 0dbd8974..4e5c605f 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -12,8 +12,8 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter)) - .returns(T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter)) + params(_: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash))) + .returns(T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash))) end def expires_after=(_) end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index fc8ccf5a..ea53495c 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -29,8 +29,10 @@ module OpenAI end sig do - params(_: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError)) - .returns(T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError)) + params( + _: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)) + ) + .returns(T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash))) end def last_error=(_) end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 4627b63f..61961f17 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -28,8 +28,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts) - .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts) + params(_: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash)) end def file_counts=(_) end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index c6676a16..3ad41179 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -3,6 +3,8 @@ module OpenAI # @api private module Util + AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } + # @api private sig { returns(Float) } def self.monotonic_secs From 0d25ee4190afc4516bba53b96757d9a8fad611cb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 23:05:26 +0000 Subject: [PATCH 047/295] fix: yard example tag formatting (#53) --- lib/openai/base_client.rb | 2 +- lib/openai/base_model.rb | 106 +++++++++++++---------------- lib/openai/base_page.rb | 36 ++++------ lib/openai/base_stream.rb | 26 +++---- lib/openai/cursor_page.rb | 36 ++++------ lib/openai/page.rb | 36 ++++------ lib/openai/request_options.rb | 2 +- lib/openai/stream.rb | 26 +++---- rbi/lib/openai/request_options.rbi | 4 +- sig/openai/request_options.rbs | 4 +- 10 files changed, 121 insertions(+), 157 deletions(-) diff --git a/lib/openai/base_client.rb b/lib/openai/base_client.rb index 4178f663..8234a2aa 100644 --- a/lib/openai/base_client.rb +++ b/lib/openai/base_client.rb @@ -220,7 +220,7 @@ def initialize( # # @option opts [Hash{String=>String, nil}, nil] :extra_headers # - # @option opts [Hash{Symbol=>Object}, nil] :extra_body + # @option opts [Object, nil] :extra_body # # @option opts [Integer, nil] :max_retries # diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index a1f9e29c..c12857ab 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -288,33 +288,29 @@ def try_strict_coerce(value) # values safely. # # @example - # ```ruby - # # `chat_model` is a `OpenAI::Models::ChatModel` - # case chat_model - # when OpenAI::Models::ChatModel::O3_MINI - # # ... - # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 - # # ... - # when OpenAI::Models::ChatModel::O1 - # # ... - # else - # puts(chat_model) - # end - # ``` + # # `chat_model` is a `OpenAI::Models::ChatModel` + # case chat_model + # when OpenAI::Models::ChatModel::O3_MINI + # # ... + # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 + # # ... + # when OpenAI::Models::ChatModel::O1 + # # ... + # else + # puts(chat_model) + # end # # @example - # ```ruby - # case chat_model - # in :"o3-mini" - # # ... - # in :"o3-mini-2025-01-31" - # # ... - # in :o1 - # # ... - # else - # puts(chat_model) - # end - # ``` + # case chat_model + # in :"o3-mini" + # # ... + # in :"o3-mini-2025-01-31" + # # ... + # in :o1 + # # ... + # else + # puts(chat_model) + # end module Enum include OpenAI::Converter @@ -387,33 +383,29 @@ def try_strict_coerce(value) # @api private # # @example - # ```ruby - # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` - # case chat_completion_content_part - # when OpenAI::Models::Chat::ChatCompletionContentPartText - # puts(chat_completion_content_part.text) - # when OpenAI::Models::Chat::ChatCompletionContentPartImage - # puts(chat_completion_content_part.image_url) - # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio - # puts(chat_completion_content_part.input_audio) - # else - # puts(chat_completion_content_part) - # end - # ``` + # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` + # case chat_completion_content_part + # when OpenAI::Models::Chat::ChatCompletionContentPartText + # puts(chat_completion_content_part.text) + # when OpenAI::Models::Chat::ChatCompletionContentPartImage + # puts(chat_completion_content_part.image_url) + # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio + # puts(chat_completion_content_part.input_audio) + # else + # puts(chat_completion_content_part) + # end # # @example - # ```ruby - # case chat_completion_content_part - # in {type: :text, text: text} - # puts(text) - # in {type: :image_url, image_url: image_url} - # puts(image_url) - # in {type: :input_audio, input_audio: input_audio} - # puts(input_audio) - # else - # puts(chat_completion_content_part) - # end - # ``` + # case chat_completion_content_part + # in {type: :text, text: text} + # puts(text) + # in {type: :image_url, image_url: image_url} + # puts(image_url) + # in {type: :input_audio, input_audio: input_audio} + # puts(input_audio) + # else + # puts(chat_completion_content_part) + # end module Union include OpenAI::Converter @@ -878,14 +870,12 @@ def initialize(type_info, spec = {}) # @abstract # # @example - # ```ruby - # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` - # comparison_filter => { - # key: key, - # type: type, - # value: value - # } - # ``` + # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` + # comparison_filter => { + # key: key, + # type: type, + # value: value + # } class BaseModel extend OpenAI::Converter diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb index 359bb6c7..3c6c0e71 100644 --- a/lib/openai/base_page.rb +++ b/lib/openai/base_page.rb @@ -2,32 +2,26 @@ module OpenAI # @example - # ```ruby - # if page.has_next? - # page = page.next_page - # end - # ``` + # if page.has_next? + # page = page.next_page + # end # # @example - # ```ruby - # page.auto_paging_each do |completion| - # puts(completion) - # end - # ``` + # page.auto_paging_each do |completion| + # puts(completion) + # end # # @example - # ```ruby - # completions = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a + # completions = + # page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # completions => Array - # ``` + # completions => Array module BasePage # rubocop:disable Lint/UnusedMethodArgument diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb index a4dbe0bc..519cf83d 100644 --- a/lib/openai/base_stream.rb +++ b/lib/openai/base_stream.rb @@ -2,24 +2,20 @@ module OpenAI # @example - # ```ruby - # stream.each do |chunk| - # puts(chunk) - # end - # ``` + # stream.each do |chunk| + # puts(chunk) + # end # # @example - # ```ruby - # chunks = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a + # chunks = + # stream + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # chunks => Array - # ``` + # chunks => Array module BaseStream include Enumerable diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 341d2479..3ba7ab46 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -2,32 +2,26 @@ module OpenAI # @example - # ```ruby - # if cursor_page.has_next? - # cursor_page = cursor_page.next_page - # end - # ``` + # if cursor_page.has_next? + # cursor_page = cursor_page.next_page + # end # # @example - # ```ruby - # cursor_page.auto_paging_each do |completion| - # puts(completion) - # end - # ``` + # cursor_page.auto_paging_each do |completion| + # puts(completion) + # end # # @example - # ```ruby - # completions = - # cursor_page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a + # completions = + # cursor_page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # completions => Array - # ``` + # completions => Array class CursorPage include OpenAI::BasePage diff --git a/lib/openai/page.rb b/lib/openai/page.rb index d8834323..6dd82f08 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -2,32 +2,26 @@ module OpenAI # @example - # ```ruby - # if page.has_next? - # page = page.next_page - # end - # ``` + # if page.has_next? + # page = page.next_page + # end # # @example - # ```ruby - # page.auto_paging_each do |model| - # puts(model) - # end - # ``` + # page.auto_paging_each do |model| + # puts(model) + # end # # @example - # ```ruby - # models = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a + # models = + # page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # models => Array - # ``` + # models => Array class Page include OpenAI::BasePage diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index a3245591..917753db 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -83,7 +83,7 @@ def self.validate!(opts) # Extra data to send with the request. These are deep merged into any data # generated as part of the normal request. # - # @return [Hash{Symbol=>Object}, nil] + # @return [Object, nil] optional :extra_body, OpenAI::HashOf[OpenAI::Unknown] # @!attribute max_retries diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index e510a11d..fc67dadc 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -2,24 +2,20 @@ module OpenAI # @example - # ```ruby - # stream.each do |event| - # puts(event) - # end - # ``` + # stream.each do |event| + # puts(event) + # end # # @example - # ```ruby - # events = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a + # events = + # stream + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a # - # events => Array - # ``` + # events => Array class Stream include OpenAI::BaseStream diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index ded742c2..8c7934b0 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -73,11 +73,11 @@ module OpenAI # Extra data to send with the request. These are deep merged into any data # generated as part of the normal request. - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + sig { returns(T.nilable(T.anything)) } def extra_body end - sig { params(_: T.nilable(T::Hash[Symbol, T.anything])).returns(T.nilable(T::Hash[Symbol, T.anything])) } + sig { params(_: T.nilable(T.anything)).returns(T.nilable(T.anything)) } def extra_body=(_) end diff --git a/sig/openai/request_options.rbs b/sig/openai/request_options.rbs index 97561491..26e1c1fa 100644 --- a/sig/openai/request_options.rbs +++ b/sig/openai/request_options.rbs @@ -17,7 +17,7 @@ module OpenAI idempotency_key: String?, extra_query: ::Hash[String, (::Array[String] | String)?]?, extra_headers: ::Hash[String, String?]?, - extra_body: ::Hash[Symbol, top]?, + extra_body: top?, max_retries: Integer?, timeout: Float? } @@ -31,7 +31,7 @@ module OpenAI attr_accessor extra_headers: ::Hash[String, String?]? - attr_accessor extra_body: ::Hash[Symbol, top]? + attr_accessor extra_body: top? attr_accessor max_retries: Integer? From faccf2b8e0f6c166476f5ad951667075890aecdc Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 23:23:25 +0000 Subject: [PATCH 048/295] feat: collapse anonymous enum into unions (#54) --- lib/openai/models/all_models.rb | 24 ++++++++------- lib/openai/models/responses/response.rb | 4 +-- .../responses/response_create_params.rb | 4 +-- lib/openai/models/responses_model.rb | 24 ++++++++------- lib/openai/resources/responses.rb | 4 +-- rbi/lib/openai/models/all_models.rbi | 24 +++++---------- rbi/lib/openai/models/responses/response.rbi | 30 ++++--------------- .../responses/response_create_params.rbi | 18 ++++------- rbi/lib/openai/models/responses_model.rbi | 25 +++++----------- rbi/lib/openai/resources/responses.rbi | 4 +-- sig/openai/models/all_models.rbs | 30 ++++++++----------- sig/openai/models/responses_model.rbs | 30 ++++++++----------- 12 files changed, 86 insertions(+), 135 deletions(-) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index f11ccb01..0727842f 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -5,26 +5,30 @@ module Models module AllModels extend OpenAI::Union + # @!group + + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + + # @!endgroup + variant String variant enum: -> { OpenAI::Models::ChatModel } - variant enum: -> { OpenAI::Models::AllModels::UnionMember2 } + variant const: OpenAI::Models::AllModels::O1_PRO - module UnionMember2 - extend OpenAI::Enum + variant const: OpenAI::Models::AllModels::O1_PRO_2025_03_19 - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + variant const: OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW - finalize! - end + variant const: OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW_2025_03_11 # @!parse # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::UnionMember2)] + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] # def variants; end # end end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 71264a0f..a9ba3f8c 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -57,7 +57,7 @@ class Response < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] + # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute object @@ -222,7 +222,7 @@ class Response < OpenAI::BaseModel # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] # # @param instructions [String, nil] # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] + # # @param model [String, Symbol, OpenAI::Models::ChatModel] # # @param output [Array] # # @param parallel_tool_calls [Boolean] # # @param temperature [Float, nil] diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index ebca56e3..45eb0019 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -29,7 +29,7 @@ class ResponseCreateParams < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] + # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute include @@ -198,7 +198,7 @@ class ResponseCreateParams < OpenAI::BaseModel # @!parse # # @param input [String, Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] + # # @param model [String, Symbol, OpenAI::Models::ChatModel] # # @param include [Array, nil] # # @param instructions [String, nil] # # @param max_output_tokens [Integer, nil] diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index c80cbfbd..51c15860 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -5,26 +5,30 @@ module Models module ResponsesModel extend OpenAI::Union + # @!group + + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + + # @!endgroup + variant String variant enum: -> { OpenAI::Models::ChatModel } - variant enum: -> { OpenAI::Models::ResponsesModel::UnionMember2 } + variant const: OpenAI::Models::ResponsesModel::O1_PRO - module UnionMember2 - extend OpenAI::Enum + variant const: OpenAI::Models::ResponsesModel::O1_PRO_2025_03_19 - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + variant const: OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW - finalize! - end + variant const: OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW_2025_03_11 # @!parse # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::UnionMember2)] + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] # def variants; end # end end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 738b3a39..f12fe46e 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -30,7 +30,7 @@ class Responses # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @option params [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -166,7 +166,7 @@ def create(params) # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @option params [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::UnionMember2] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 5fb326c8..feedfa10 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -6,24 +6,16 @@ module OpenAI extend OpenAI::Union Variants = - type_template(:out) do - { - fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::UnionMember2::OrSymbol) - } - end + type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol)} } - module UnionMember2 - extend OpenAI::Enum + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::TaggedSymbol) } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::UnionMember2) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::UnionMember2::TaggedSymbol) } - - O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::UnionMember2::OrSymbol) - end + O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::OrSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::OrSymbol) + COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::OrSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::OrSymbol) end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index b480096f..d36aa43f 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -81,11 +81,7 @@ module OpenAI # available models. sig do returns( - T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ) + T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) ) end def model @@ -93,18 +89,10 @@ module OpenAI sig do params( - _: T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ) + _: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) ) .returns( - T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ) + T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) ) end def model=(_) @@ -410,11 +398,7 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ), + model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, @@ -489,11 +473,7 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any( - String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol - ), + model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), object: Symbol, output: T::Array[ T.any( diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 5a20a69d..be470d61 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -91,21 +91,13 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig do - returns( - T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - ) - end + sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) } def model end sig do - params( - _: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - ) - .returns( - T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - ) + params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) + .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) end def model=(_) end @@ -394,7 +386,7 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), @@ -469,7 +461,7 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index ddb9e74a..1c929891 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -6,25 +6,16 @@ module OpenAI extend OpenAI::Union Variants = - type_template(:out) do - { - fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - } - end + type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)} } - module UnionMember2 - extend OpenAI::Enum + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } + OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::TaggedSymbol) } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::UnionMember2) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::UnionMember2::TaggedSymbol) } - - O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW = - T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol) - end + O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::OrSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::OrSymbol) + COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::OrSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::OrSymbol) end end end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 701c8406..3ed27f5a 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -38,7 +38,7 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), @@ -213,7 +213,7 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::UnionMember2::OrSymbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 7427ba06..8a1f89cf 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -3,29 +3,23 @@ module OpenAI type all_models = String | OpenAI::Models::chat_model - | OpenAI::Models::AllModels::union_member2 + | :"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11" module AllModels extend OpenAI::Union - type union_member2 = - :"o1-pro" - | :"o1-pro-2025-03-19" - | :"computer-use-preview" - | :"computer-use-preview-2025-03-11" + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - module UnionMember2 - extend OpenAI::Enum - - O1_PRO: :"o1-pro" - O1_PRO_2025_03_19: :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - - def self.values: -> ::Array[OpenAI::Models::AllModels::union_member2] - end - - def self.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::AllModels::union_member2] + def self.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11")] end end end diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 3bfeacd2..70ac005a 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -3,29 +3,23 @@ module OpenAI type responses_model = String | OpenAI::Models::chat_model - | OpenAI::Models::ResponsesModel::union_member2 + | :"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11" module ResponsesModel extend OpenAI::Union - type union_member2 = - :"o1-pro" - | :"o1-pro-2025-03-19" - | :"computer-use-preview" - | :"computer-use-preview-2025-03-11" + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - module UnionMember2 - extend OpenAI::Enum - - O1_PRO: :"o1-pro" - O1_PRO_2025_03_19: :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - - def self.values: -> ::Array[OpenAI::Models::ResponsesModel::union_member2] - end - - def self.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::ResponsesModel::union_member2] + def self.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11")] end end end From 3fc6466bbd970290806879143886ee8cbadc243f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 23:39:08 +0000 Subject: [PATCH 049/295] chore: add type annotations for enum and union member listing methods (#55) --- .../models/audio/speech_create_params.rb | 12 ++ lib/openai/models/audio/speech_model.rb | 6 + .../audio/transcription_create_params.rb | 6 + .../models/audio/transcription_include.rb | 6 + .../models/audio/translation_create_params.rb | 6 + lib/openai/models/audio_model.rb | 6 + lib/openai/models/audio_response_format.rb | 6 + lib/openai/models/batch.rb | 6 + lib/openai/models/batch_create_params.rb | 12 ++ .../models/beta/assistant_list_params.rb | 6 + .../models/beta/assistant_tool_choice.rb | 6 + .../beta/assistant_tool_choice_option.rb | 6 + lib/openai/models/beta/file_search_tool.rb | 6 + .../beta/thread_create_and_run_params.rb | 12 ++ .../models/beta/thread_create_params.rb | 6 + lib/openai/models/beta/threads/image_file.rb | 6 + .../models/beta/threads/image_file_delta.rb | 6 + lib/openai/models/beta/threads/image_url.rb | 6 + .../models/beta/threads/image_url_delta.rb | 6 + lib/openai/models/beta/threads/message.rb | 18 +++ .../beta/threads/message_create_params.rb | 6 + .../models/beta/threads/message_delta.rb | 6 + .../beta/threads/message_list_params.rb | 6 + lib/openai/models/beta/threads/run.rb | 18 +++ .../models/beta/threads/run_create_params.rb | 12 ++ .../models/beta/threads/run_list_params.rb | 6 + lib/openai/models/beta/threads/run_status.rb | 6 + .../threads/runs/file_search_tool_call.rb | 12 ++ .../models/beta/threads/runs/run_step.rb | 18 +++ .../beta/threads/runs/run_step_include.rb | 6 + .../beta/threads/runs/step_list_params.rb | 6 + lib/openai/models/chat/chat_completion.rb | 12 ++ .../chat/chat_completion_audio_param.rb | 12 ++ .../models/chat/chat_completion_chunk.rb | 24 ++++ .../chat_completion_content_part_image.rb | 6 + ...hat_completion_content_part_input_audio.rb | 6 + .../models/chat/chat_completion_modality.rb | 6 + .../models/chat/chat_completion_role.rb | 6 + .../chat_completion_tool_choice_option.rb | 6 + .../models/chat/completion_create_params.rb | 24 ++++ .../models/chat/completion_list_params.rb | 6 + .../chat/completions/message_list_params.rb | 6 + lib/openai/models/chat_model.rb | 6 + lib/openai/models/comparison_filter.rb | 6 + lib/openai/models/completion_choice.rb | 6 + lib/openai/models/compound_filter.rb | 6 + lib/openai/models/embedding_create_params.rb | 6 + lib/openai/models/embedding_model.rb | 6 + lib/openai/models/file_list_params.rb | 6 + lib/openai/models/file_object.rb | 12 ++ lib/openai/models/file_purpose.rb | 6 + .../models/fine_tuning/fine_tuning_job.rb | 12 ++ .../fine_tuning/fine_tuning_job_event.rb | 12 ++ .../models/fine_tuning/job_create_params.rb | 6 + .../models/image_create_variation_params.rb | 12 ++ lib/openai/models/image_edit_params.rb | 12 ++ lib/openai/models/image_generate_params.rb | 24 ++++ lib/openai/models/image_model.rb | 6 + lib/openai/models/moderation.rb | 78 +++++++++++++ lib/openai/models/moderation_model.rb | 6 + lib/openai/models/reasoning.rb | 6 + lib/openai/models/reasoning_effort.rb | 6 + lib/openai/models/responses/computer_tool.rb | 6 + .../models/responses/easy_input_message.rb | 12 ++ .../models/responses/file_search_tool.rb | 6 + .../responses/input_item_list_params.rb | 6 + lib/openai/models/responses/response.rb | 12 ++ .../response_code_interpreter_tool_call.rb | 6 + .../responses/response_computer_tool_call.rb | 18 +++ ...response_computer_tool_call_output_item.rb | 6 + .../responses/response_create_params.rb | 6 + lib/openai/models/responses/response_error.rb | 6 + .../response_file_search_tool_call.rb | 6 + .../responses/response_function_tool_call.rb | 6 + ...response_function_tool_call_output_item.rb | 6 + .../responses/response_function_web_search.rb | 6 + .../models/responses/response_includable.rb | 6 + .../models/responses/response_input_audio.rb | 6 + .../models/responses/response_input_image.rb | 6 + .../models/responses/response_input_item.rb | 30 +++++ .../responses/response_input_message_item.rb | 18 +++ .../responses/response_output_message.rb | 6 + .../responses/response_reasoning_item.rb | 6 + .../models/responses/response_status.rb | 6 + .../models/responses/tool_choice_options.rb | 6 + .../models/responses/tool_choice_types.rb | 6 + .../models/responses/web_search_tool.rb | 12 ++ lib/openai/models/upload.rb | 6 + lib/openai/models/vector_store.rb | 6 + lib/openai/models/vector_store_list_params.rb | 6 + .../models/vector_store_search_params.rb | 6 + .../models/vector_store_search_response.rb | 6 + .../file_batch_list_files_params.rb | 12 ++ .../models/vector_stores/file_list_params.rb | 12 ++ .../models/vector_stores/vector_store_file.rb | 12 ++ .../vector_stores/vector_store_file_batch.rb | 6 + rbi/lib/openai/models/all_models.rbi | 14 ++- .../models/audio/speech_create_params.rbi | 48 +++++--- rbi/lib/openai/models/audio/speech_model.rbi | 12 +- .../audio/transcription_create_params.rbi | 19 +++- .../audio/transcription_create_response.rbi | 6 + .../models/audio/transcription_include.rbi | 8 +- .../audio/transcription_stream_event.rbi | 11 ++ .../audio/translation_create_params.rbi | 22 +++- .../audio/translation_create_response.rbi | 6 + rbi/lib/openai/models/audio_model.rbi | 12 +- .../openai/models/audio_response_format.rbi | 16 ++- rbi/lib/openai/models/batch.rbi | 6 + rbi/lib/openai/models/batch_create_params.rbi | 22 +++- .../models/beta/assistant_create_params.rbi | 17 +++ .../models/beta/assistant_list_params.rbi | 10 +- .../beta/assistant_response_format_option.rbi | 11 ++ .../models/beta/assistant_stream_event.rbi | 11 ++ rbi/lib/openai/models/beta/assistant_tool.rbi | 11 ++ .../models/beta/assistant_tool_choice.rbi | 13 ++- .../beta/assistant_tool_choice_option.rbi | 23 +++- .../models/beta/assistant_update_params.rbi | 69 +++++++----- .../openai/models/beta/file_search_tool.rbi | 14 ++- .../models/beta/message_stream_event.rbi | 11 ++ .../models/beta/run_step_stream_event.rbi | 11 ++ .../openai/models/beta/run_stream_event.rbi | 11 ++ .../beta/thread_create_and_run_params.rbi | 89 ++++++++++++++- .../models/beta/thread_create_params.rbi | 52 ++++++++- .../openai/models/beta/threads/annotation.rbi | 11 ++ .../models/beta/threads/annotation_delta.rbi | 11 ++ .../openai/models/beta/threads/image_file.rbi | 12 +- .../models/beta/threads/image_file_delta.rbi | 6 + .../openai/models/beta/threads/image_url.rbi | 12 +- .../models/beta/threads/image_url_delta.rbi | 6 + .../openai/models/beta/threads/message.rbi | 31 ++++++ .../models/beta/threads/message_content.rbi | 11 ++ .../beta/threads/message_content_delta.rbi | 11 ++ .../threads/message_content_part_param.rbi | 11 ++ .../beta/threads/message_create_params.rbi | 41 ++++++- .../models/beta/threads/message_delta.rbi | 6 + .../beta/threads/message_list_params.rbi | 10 +- rbi/lib/openai/models/beta/threads/run.rbi | 18 +++ .../models/beta/threads/run_create_params.rbi | 68 +++++++++++- .../models/beta/threads/run_list_params.rbi | 10 +- .../openai/models/beta/threads/run_status.rbi | 6 + .../runs/code_interpreter_tool_call.rbi | 11 ++ .../runs/code_interpreter_tool_call_delta.rbi | 11 ++ .../threads/runs/file_search_tool_call.rbi | 22 ++++ .../models/beta/threads/runs/run_step.rbi | 29 +++++ .../beta/threads/runs/run_step_delta.rbi | 11 ++ .../beta/threads/runs/run_step_include.rbi | 8 +- .../beta/threads/runs/step_list_params.rbi | 10 +- .../models/beta/threads/runs/tool_call.rbi | 11 ++ .../beta/threads/runs/tool_call_delta.rbi | 11 ++ .../openai/models/chat/chat_completion.rbi | 12 ++ ...hat_completion_assistant_message_param.rbi | 30 +++++ .../chat/chat_completion_audio_param.rbi | 38 ++++--- .../models/chat/chat_completion_chunk.rbi | 27 +++++ .../chat/chat_completion_content_part.rbi | 11 ++ .../chat_completion_content_part_image.rbi | 15 ++- ...at_completion_content_part_input_audio.rbi | 15 ++- ...hat_completion_developer_message_param.rbi | 6 + .../chat/chat_completion_message_param.rbi | 11 ++ .../models/chat/chat_completion_modality.rbi | 10 +- .../chat_completion_prediction_content.rbi | 6 + .../models/chat/chat_completion_role.rbi | 18 ++- .../chat_completion_system_message_param.rbi | 6 + .../chat_completion_tool_choice_option.rbi | 23 +++- .../chat_completion_tool_message_param.rbi | 6 + .../chat_completion_user_message_param.rbi | 21 ++++ .../models/chat/completion_create_params.rbi | 92 +++++++++++++-- .../models/chat/completion_list_params.rbi | 10 +- .../chat/completions/message_list_params.rbi | 10 +- rbi/lib/openai/models/chat_model.rbi | 93 +++++++++------- rbi/lib/openai/models/comparison_filter.rbi | 24 +++- rbi/lib/openai/models/completion_choice.rbi | 6 + .../models/completion_create_params.rbi | 24 +++- rbi/lib/openai/models/compound_filter.rbi | 16 ++- .../openai/models/embedding_create_params.rbi | 22 +++- rbi/lib/openai/models/embedding_model.rbi | 12 +- .../openai/models/file_chunking_strategy.rbi | 11 ++ .../models/file_chunking_strategy_param.rbi | 11 ++ rbi/lib/openai/models/file_list_params.rbi | 10 +- rbi/lib/openai/models/file_object.rbi | 12 ++ rbi/lib/openai/models/file_purpose.rbi | 18 ++- .../models/fine_tuning/fine_tuning_job.rbi | 72 ++++++++++++ .../fine_tuning/fine_tuning_job_event.rbi | 12 ++ .../models/fine_tuning/job_create_params.rbi | 84 +++++++++++++- .../models/image_create_variation_params.rbi | 28 ++++- rbi/lib/openai/models/image_edit_params.rbi | 28 ++++- .../openai/models/image_generate_params.rbi | 52 +++++++-- rbi/lib/openai/models/image_model.rbi | 10 +- rbi/lib/openai/models/moderation.rbi | 105 ++++++++++++++++++ .../models/moderation_create_params.rbi | 17 +++ rbi/lib/openai/models/moderation_model.rbi | 14 ++- .../models/moderation_multi_modal_input.rbi | 6 + rbi/lib/openai/models/reasoning.rbi | 10 +- rbi/lib/openai/models/reasoning_effort.rbi | 12 +- .../openai/models/responses/computer_tool.rbi | 14 ++- .../models/responses/easy_input_message.rbi | 42 ++++++- .../models/responses/file_search_tool.rbi | 21 +++- .../responses/input_item_list_params.rbi | 10 +- rbi/lib/openai/models/responses/response.rbi | 23 ++++ .../response_code_interpreter_tool_call.rbi | 20 ++++ .../responses/response_computer_tool_call.rbi | 56 ++++++++-- ...esponse_computer_tool_call_output_item.rbi | 9 ++ .../models/responses/response_content.rbi | 11 ++ .../response_content_part_added_event.rbi | 9 ++ .../response_content_part_done_event.rbi | 9 ++ .../responses/response_create_params.rbi | 49 +++++++- .../models/responses/response_error.rbi | 6 + .../response_file_search_tool_call.rbi | 25 ++++- .../responses/response_format_text_config.rbi | 11 ++ .../responses/response_function_tool_call.rbi | 14 ++- ...esponse_function_tool_call_output_item.rbi | 9 ++ .../response_function_web_search.rbi | 15 ++- .../models/responses/response_includable.rbi | 15 ++- .../models/responses/response_input_audio.rbi | 10 +- .../responses/response_input_content.rbi | 11 ++ .../models/responses/response_input_image.rbi | 12 +- .../models/responses/response_input_item.rbi | 81 +++++++++++--- .../responses/response_input_message_item.rbi | 18 +++ .../openai/models/responses/response_item.rbi | 11 ++ .../models/responses/response_output_item.rbi | 11 ++ .../responses/response_output_message.rbi | 21 +++- .../models/responses/response_output_text.rbi | 11 ++ .../responses/response_reasoning_item.rbi | 12 +- .../models/responses/response_status.rbi | 6 + .../responses/response_stream_event.rbi | 11 ++ .../response_text_annotation_delta_event.rbi | 11 ++ rbi/lib/openai/models/responses/tool.rbi | 11 ++ .../models/responses/tool_choice_options.rbi | 12 +- .../models/responses/tool_choice_types.rbi | 14 ++- .../models/responses/web_search_tool.rbi | 23 +++- rbi/lib/openai/models/responses_model.rbi | 14 ++- rbi/lib/openai/models/upload.rbi | 6 + rbi/lib/openai/models/vector_store.rbi | 6 + .../models/vector_store_list_params.rbi | 10 +- .../models/vector_store_search_params.rbi | 25 ++++- .../models/vector_store_search_response.rbi | 12 ++ .../file_batch_create_params.rbi | 6 + .../file_batch_list_files_params.rbi | 26 ++++- .../vector_stores/file_create_params.rbi | 6 + .../models/vector_stores/file_list_params.rbi | 24 +++- .../vector_stores/file_update_params.rbi | 6 + .../vector_stores/vector_store_file.rbi | 18 +++ .../vector_stores/vector_store_file_batch.rbi | 6 + sig/openai/models/all_models.rbs | 2 +- .../models/audio/speech_create_params.rbs | 6 +- sig/openai/models/audio/speech_model.rbs | 2 +- .../audio/transcription_create_params.rbs | 4 +- .../audio/transcription_create_response.rbs | 2 +- .../models/audio/transcription_include.rbs | 2 +- .../audio/transcription_stream_event.rbs | 2 +- .../audio/translation_create_params.rbs | 4 +- .../audio/translation_create_response.rbs | 2 +- sig/openai/models/audio_model.rbs | 2 +- sig/openai/models/audio_response_format.rbs | 2 +- sig/openai/models/batch.rbs | 2 +- sig/openai/models/batch_create_params.rbs | 4 +- .../models/beta/assistant_create_params.rbs | 4 +- .../models/beta/assistant_list_params.rbs | 2 +- .../beta/assistant_response_format_option.rbs | 2 +- .../models/beta/assistant_stream_event.rbs | 2 +- sig/openai/models/beta/assistant_tool.rbs | 2 +- .../models/beta/assistant_tool_choice.rbs | 2 +- .../beta/assistant_tool_choice_option.rbs | 4 +- .../models/beta/assistant_update_params.rbs | 2 +- sig/openai/models/beta/file_search_tool.rbs | 2 +- .../models/beta/message_stream_event.rbs | 2 +- .../models/beta/run_step_stream_event.rbs | 2 +- sig/openai/models/beta/run_stream_event.rbs | 2 +- .../beta/thread_create_and_run_params.rbs | 14 +-- .../models/beta/thread_create_params.rbs | 8 +- sig/openai/models/beta/threads/annotation.rbs | 2 +- .../models/beta/threads/annotation_delta.rbs | 2 +- sig/openai/models/beta/threads/image_file.rbs | 2 +- .../models/beta/threads/image_file_delta.rbs | 2 +- sig/openai/models/beta/threads/image_url.rbs | 2 +- .../models/beta/threads/image_url_delta.rbs | 2 +- sig/openai/models/beta/threads/message.rbs | 8 +- .../models/beta/threads/message_content.rbs | 2 +- .../beta/threads/message_content_delta.rbs | 2 +- .../threads/message_content_part_param.rbs | 2 +- .../beta/threads/message_create_params.rbs | 6 +- .../models/beta/threads/message_delta.rbs | 2 +- .../beta/threads/message_list_params.rbs | 2 +- sig/openai/models/beta/threads/run.rbs | 6 +- .../models/beta/threads/run_create_params.rbs | 10 +- .../models/beta/threads/run_list_params.rbs | 2 +- sig/openai/models/beta/threads/run_status.rbs | 2 +- .../runs/code_interpreter_tool_call.rbs | 2 +- .../runs/code_interpreter_tool_call_delta.rbs | 2 +- .../threads/runs/file_search_tool_call.rbs | 4 +- .../models/beta/threads/runs/run_step.rbs | 8 +- .../beta/threads/runs/run_step_delta.rbs | 2 +- .../beta/threads/runs/run_step_include.rbs | 2 +- .../beta/threads/runs/step_list_params.rbs | 2 +- .../models/beta/threads/runs/tool_call.rbs | 2 +- .../beta/threads/runs/tool_call_delta.rbs | 2 +- sig/openai/models/chat/chat_completion.rbs | 4 +- ...hat_completion_assistant_message_param.rbs | 4 +- .../chat/chat_completion_audio_param.rbs | 4 +- .../models/chat/chat_completion_chunk.rbs | 8 +- .../chat/chat_completion_content_part.rbs | 2 +- .../chat_completion_content_part_image.rbs | 2 +- ...at_completion_content_part_input_audio.rbs | 2 +- ...hat_completion_developer_message_param.rbs | 2 +- .../chat/chat_completion_message_param.rbs | 2 +- .../models/chat/chat_completion_modality.rbs | 2 +- .../chat_completion_prediction_content.rbs | 2 +- .../models/chat/chat_completion_role.rbs | 2 +- .../chat_completion_system_message_param.rbs | 2 +- .../chat_completion_tool_choice_option.rbs | 4 +- .../chat_completion_tool_message_param.rbs | 2 +- .../chat_completion_user_message_param.rbs | 2 +- .../models/chat/completion_create_params.rbs | 16 +-- .../models/chat/completion_list_params.rbs | 2 +- .../chat/completions/message_list_params.rbs | 2 +- sig/openai/models/chat_model.rbs | 2 +- sig/openai/models/comparison_filter.rbs | 4 +- sig/openai/models/completion_choice.rbs | 2 +- .../models/completion_create_params.rbs | 6 +- sig/openai/models/compound_filter.rbs | 4 +- sig/openai/models/embedding_create_params.rbs | 6 +- sig/openai/models/embedding_model.rbs | 2 +- sig/openai/models/file_chunking_strategy.rbs | 2 +- .../models/file_chunking_strategy_param.rbs | 2 +- sig/openai/models/file_list_params.rbs | 2 +- sig/openai/models/file_object.rbs | 4 +- sig/openai/models/file_purpose.rbs | 2 +- .../models/fine_tuning/fine_tuning_job.rbs | 24 ++-- .../fine_tuning/fine_tuning_job_event.rbs | 4 +- .../models/fine_tuning/job_create_params.rbs | 24 ++-- .../models/image_create_variation_params.rbs | 6 +- sig/openai/models/image_edit_params.rbs | 6 +- sig/openai/models/image_generate_params.rbs | 10 +- sig/openai/models/image_model.rbs | 2 +- sig/openai/models/moderation.rbs | 26 ++--- .../models/moderation_create_params.rbs | 4 +- sig/openai/models/moderation_model.rbs | 2 +- .../models/moderation_multi_modal_input.rbs | 2 +- sig/openai/models/reasoning.rbs | 2 +- sig/openai/models/reasoning_effort.rbs | 2 +- sig/openai/models/responses/computer_tool.rbs | 2 +- .../models/responses/easy_input_message.rbs | 6 +- .../models/responses/file_search_tool.rbs | 4 +- .../responses/input_item_list_params.rbs | 2 +- sig/openai/models/responses/response.rbs | 6 +- .../response_code_interpreter_tool_call.rbs | 4 +- .../responses/response_computer_tool_call.rbs | 8 +- ...esponse_computer_tool_call_output_item.rbs | 2 +- .../models/responses/response_content.rbs | 2 +- .../response_content_part_added_event.rbs | 2 +- .../response_content_part_done_event.rbs | 2 +- .../responses/response_create_params.rbs | 6 +- .../models/responses/response_error.rbs | 2 +- .../response_file_search_tool_call.rbs | 4 +- .../responses/response_format_text_config.rbs | 2 +- .../responses/response_function_tool_call.rbs | 2 +- ...esponse_function_tool_call_output_item.rbs | 2 +- .../response_function_web_search.rbs | 2 +- .../models/responses/response_includable.rbs | 2 +- .../models/responses/response_input_audio.rbs | 2 +- .../responses/response_input_content.rbs | 2 +- .../models/responses/response_input_image.rbs | 2 +- .../models/responses/response_input_item.rbs | 12 +- .../responses/response_input_message_item.rbs | 6 +- sig/openai/models/responses/response_item.rbs | 2 +- .../models/responses/response_output_item.rbs | 2 +- .../responses/response_output_message.rbs | 4 +- .../models/responses/response_output_text.rbs | 2 +- .../responses/response_reasoning_item.rbs | 2 +- .../models/responses/response_status.rbs | 2 +- .../responses/response_stream_event.rbs | 2 +- .../response_text_annotation_delta_event.rbs | 2 +- sig/openai/models/responses/tool.rbs | 2 +- .../models/responses/tool_choice_options.rbs | 2 +- .../models/responses/tool_choice_types.rbs | 2 +- .../models/responses/web_search_tool.rbs | 4 +- sig/openai/models/responses_model.rbs | 2 +- sig/openai/models/upload.rbs | 2 +- sig/openai/models/vector_store.rbs | 2 +- .../models/vector_store_list_params.rbs | 2 +- .../models/vector_store_search_params.rbs | 6 +- .../models/vector_store_search_response.rbs | 4 +- .../file_batch_create_params.rbs | 2 +- .../file_batch_list_files_params.rbs | 4 +- .../vector_stores/file_create_params.rbs | 2 +- .../models/vector_stores/file_list_params.rbs | 4 +- .../vector_stores/file_update_params.rbs | 2 +- .../vector_stores/vector_store_file.rbs | 6 +- .../vector_stores/vector_store_file_batch.rbs | 2 +- 388 files changed, 3669 insertions(+), 620 deletions(-) diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index b7efc98d..3a0c5b33 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -111,6 +111,12 @@ module Voice SHIMMER = :shimmer finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, @@ -126,6 +132,12 @@ module ResponseFormat PCM = :pcm finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index b85a4f1e..b2e5f284 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -11,6 +11,12 @@ module SpeechModel GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 1ec3b0ac..c41eb0d5 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -156,6 +156,12 @@ module TimestampGranularity SEGMENT = :segment finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index fc9e716d..41e2d8cf 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -9,6 +9,12 @@ module TranscriptionInclude LOGPROBS = :logprobs finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 087b2d38..47ce78ce 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -101,6 +101,12 @@ module ResponseFormat VTT = :vtt finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index cb25d87b..d84ec517 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -10,6 +10,12 @@ module AudioModel GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 105ac628..86745cf4 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -15,6 +15,12 @@ module AudioResponseFormat VTT = :vtt finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index dcf830ae..0c1883c2 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -238,6 +238,12 @@ module Status CANCELLED = :cancelled finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Errors < OpenAI::BaseModel diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 35dc43cb..85c2bb71 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -67,6 +67,12 @@ module CompletionWindow NUMBER_24H = :"24h" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The endpoint to be used for all requests in the batch. Currently @@ -82,6 +88,12 @@ module Endpoint V1_COMPLETIONS = :"/v1/completions" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 5586ebe7..c7b7b460 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -76,6 +76,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 78dd540e..332a8971 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -39,6 +39,12 @@ module Type FILE_SEARCH = :file_search finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 560a4a80..0f60a7c0 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -31,6 +31,12 @@ module Auto REQUIRED = :required finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # @!parse diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 102489f1..94ce6f78 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -114,6 +114,12 @@ module Ranker DEFAULT_2024_08_21 = :default_2024_08_21 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 4461713d..58aea234 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -356,6 +356,12 @@ module Role ASSISTANT = :assistant finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Attachment < OpenAI::BaseModel @@ -780,6 +786,12 @@ module Type LAST_MESSAGES = :last_messages finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 53669757..abadddfc 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -129,6 +129,12 @@ module Role ASSISTANT = :assistant finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Attachment < OpenAI::BaseModel diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index fab8e1fc..873f7306 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -42,6 +42,12 @@ module Detail HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index e40aa711..a07a1a85 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -46,6 +46,12 @@ module Detail HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 59dcae47..36f95249 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -41,6 +41,12 @@ module Detail HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 787dd26c..f6c26510 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -45,6 +45,12 @@ module Detail HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index da2a8d75..4af15e3b 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -229,6 +229,12 @@ module Reason RUN_FAILED = :run_failed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -240,6 +246,12 @@ module Role ASSISTANT = :assistant finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -252,6 +264,12 @@ module Status COMPLETED = :completed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 1fcbaa76..faca57aa 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -89,6 +89,12 @@ module Role ASSISTANT = :assistant finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Attachment < OpenAI::BaseModel diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 46d5d46f..34586ce6 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -43,6 +43,12 @@ module Role ASSISTANT = :assistant finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 816ed048..8eacf749 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -88,6 +88,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 2d7b18a8..f79c15af 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -314,6 +314,12 @@ module Reason MAX_PROMPT_TOKENS = :max_prompt_tokens finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -349,6 +355,12 @@ module Code INVALID_PROMPT = :invalid_prompt finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -435,6 +447,12 @@ module Type LAST_MESSAGES = :last_messages finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index f1e8d793..c5d6dd63 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -318,6 +318,12 @@ module Role ASSISTANT = :assistant finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Attachment < OpenAI::BaseModel @@ -442,6 +448,12 @@ module Type LAST_MESSAGES = :last_messages finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index fa0aee48..a0286c58 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -77,6 +77,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index 727fa009..f3701e63 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -21,6 +21,12 @@ module RunStatus EXPIRED = :expired finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 47fd4a37..00d5e5ca 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -102,6 +102,12 @@ module Ranker DEFAULT_2024_08_21 = :default_2024_08_21 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -186,6 +192,12 @@ module Type TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index d54a639a..15da4b34 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -191,6 +191,12 @@ module Code RATE_LIMIT_EXCEEDED = :rate_limit_exceeded finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -206,6 +212,12 @@ module Status EXPIRED = :expired finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The details of the run step. @@ -235,6 +247,12 @@ module Type TOOL_CALLS = :tool_calls finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Usage < OpenAI::BaseModel diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index 32e98757..9aad4b0e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -12,6 +12,12 @@ module RunStepInclude :"step_details.tool_calls[*].file_search.results[*].content" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 3a12f3fe..c699a8f7 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -101,6 +101,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index fcb5196d..5920c8d3 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -149,6 +149,12 @@ module FinishReason FUNCTION_CALL = :function_call finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Logprobs < OpenAI::BaseModel @@ -184,6 +190,12 @@ module ServiceTier DEFAULT = :default finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 5f1ca422..c03b45ad 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -42,6 +42,12 @@ module Format PCM16 = :pcm16 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -59,6 +65,12 @@ module Voice VERSE = :verse finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index fd4648fc..ee479086 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -244,6 +244,12 @@ module Role TOOL = :tool finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class ToolCall < OpenAI::BaseModel @@ -331,6 +337,12 @@ module Type FUNCTION = :function finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end @@ -351,6 +363,12 @@ module FinishReason FUNCTION_CALL = :function_call finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Logprobs < OpenAI::BaseModel @@ -386,6 +404,12 @@ module ServiceTier DEFAULT = :default finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 7b165ecb..b2a578df 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -61,6 +61,12 @@ module Detail HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 7e5f53ee..27ce4e32 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -56,6 +56,12 @@ module Format MP3 = :mp3 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index eae25ae3..9d7033f1 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -10,6 +10,12 @@ module ChatCompletionModality AUDIO = :audio finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 43666654..ad95f7d6 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -15,6 +15,12 @@ module ChatCompletionRole FUNCTION = :function finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 273236d8..349d84e1 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -32,6 +32,12 @@ module Auto REQUIRED = :required finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # @!parse diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index c326a2d8..b22818b3 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -475,6 +475,12 @@ module FunctionCallMode AUTO = :auto finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # @!parse @@ -537,6 +543,12 @@ module Modality AUDIO = :audio finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # An object specifying the format that the model must output. @@ -593,6 +605,12 @@ module ServiceTier DEFAULT = :default finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -656,6 +674,12 @@ module SearchContextSize HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class UserLocation < OpenAI::BaseModel diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index ce2187d8..8c9be5ed 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -78,6 +78,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 7d45da51..fb9059c7 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -59,6 +59,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 695dcb50..5c34a108 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -50,6 +50,12 @@ module ChatModel GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 2b00e018..3d5e8ba5 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -60,6 +60,12 @@ module Type LTE = :lte finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The value to compare against the attribute key; supports string, number, or diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index df42c59b..26b78056 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -49,6 +49,12 @@ module FinishReason CONTENT_FILTER = :content_filter finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Logprobs < OpenAI::BaseModel diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 4223ab0d..f27c7333 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -51,6 +51,12 @@ module Type OR = :or finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index e215c574..d41fd3c1 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -141,6 +141,12 @@ module EncodingFormat BASE64 = :base64 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index 0eab075d..a3ace03e 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -10,6 +10,12 @@ module EmbeddingModel TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index ae6cf6ed..814201ae 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -72,6 +72,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index a89db46f..0ff33248 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -114,6 +114,12 @@ module Purpose VISION = :vision finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # @deprecated @@ -128,6 +134,12 @@ module Status ERROR = :error finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 49c2717d..5a1df197 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -17,6 +17,12 @@ module FilePurpose EVALS = :evals finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index cdfbcc5f..9a0b6281 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -334,6 +334,12 @@ module Status CANCELLED = :cancelled finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Method < OpenAI::BaseModel @@ -652,6 +658,12 @@ module Type DPO = :dpo finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 207444b5..11db3c9d 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -78,6 +78,12 @@ module Level ERROR = :error finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The type of event. @@ -88,6 +94,12 @@ module Type METRICS = :metrics finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index c1cbd747..c75b6b94 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -664,6 +664,12 @@ module Type DPO = :dpo finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index ff2e6112..6b469d42 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -97,6 +97,12 @@ module ResponseFormat B64_JSON = :b64_json finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -109,6 +115,12 @@ module Size NUMBER_1024X1024 = :"1024x1024" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 523b95ca..a2017242 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -128,6 +128,12 @@ module ResponseFormat B64_JSON = :b64_json finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -140,6 +146,12 @@ module Size NUMBER_1024X1024 = :"1024x1024" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index a4993e78..420abb8a 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -130,6 +130,12 @@ module Quality HD = :hd finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The format in which the generated images are returned. Must be one of `url` or @@ -142,6 +148,12 @@ module ResponseFormat B64_JSON = :b64_json finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -157,6 +169,12 @@ module Size NUMBER_1024X1792 = :"1024x1792" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -170,6 +188,12 @@ module Style NATURAL = :natural finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index ce36cc6d..7ebd947c 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -9,6 +9,12 @@ module ImageModel DALL_E_3 = :"dall-e-3" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 2b0818e8..922325b5 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -315,6 +315,12 @@ module Harassment TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module HarassmentThreatening @@ -323,6 +329,12 @@ module HarassmentThreatening TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module Hate @@ -331,6 +343,12 @@ module Hate TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module HateThreatening @@ -339,6 +357,12 @@ module HateThreatening TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module Illicit @@ -347,6 +371,12 @@ module Illicit TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module IllicitViolent @@ -355,6 +385,12 @@ module IllicitViolent TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module SelfHarm @@ -364,6 +400,12 @@ module SelfHarm IMAGE = :image finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module SelfHarmInstruction @@ -373,6 +415,12 @@ module SelfHarmInstruction IMAGE = :image finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module SelfHarmIntent @@ -382,6 +430,12 @@ module SelfHarmIntent IMAGE = :image finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module Sexual @@ -391,6 +445,12 @@ module Sexual IMAGE = :image finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module SexualMinor @@ -399,6 +459,12 @@ module SexualMinor TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module Violence @@ -408,6 +474,12 @@ module Violence IMAGE = :image finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module ViolenceGraphic @@ -417,6 +489,12 @@ module ViolenceGraphic IMAGE = :image finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index aad66a00..58e33aeb 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -11,6 +11,12 @@ module ModerationModel TEXT_MODERATION_STABLE = :"text-moderation-stable" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index cfe04696..86ed1a42 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -49,6 +49,12 @@ module GenerateSummary DETAILED = :detailed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index e27540e1..e59ef5cb 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -16,6 +16,12 @@ module ReasoningEffort HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 1359937f..86220eb2 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -51,6 +51,12 @@ module Environment BROWSER = :browser finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index bd57a3e2..cbc17798 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -73,6 +73,12 @@ module Role DEVELOPER = :developer finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The type of the message input. Always `message`. @@ -82,6 +88,12 @@ module Type MESSAGE = :message finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 28ded43b..a557be49 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -120,6 +120,12 @@ module Ranker DEFAULT_2024_11_15 = :"default-2024-11-15" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index adee6e5b..c0cd1b8d 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -74,6 +74,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index a9ba3f8c..60bff913 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -297,6 +297,12 @@ module Reason CONTENT_FILTER = :content_filter finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -344,6 +350,12 @@ module Truncation DISABLED = :disabled finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index bf04793b..8c1cc921 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -146,6 +146,12 @@ module Status COMPLETED = :completed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 07da24be..0123e679 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -142,6 +142,12 @@ module Button FORWARD = :forward finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -448,6 +454,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The type of the computer call. Always `computer_call`. @@ -457,6 +469,12 @@ module Type COMPUTER_CALL = :computer_call finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 33e5105d..cc5bfbe9 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -104,6 +104,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 45eb0019..d49d609c 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -313,6 +313,12 @@ module Truncation DISABLED = :disabled finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 21b8b11b..2fcc855b 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -50,6 +50,12 @@ module Code IMAGE_FILE_NOT_FOUND = :image_file_not_found finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 11c8e566..9b5b331c 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -64,6 +64,12 @@ module Status FAILED = :failed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class Result < OpenAI::BaseModel diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 2cdffc5f..99de4281 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -75,6 +75,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 283f9afa..a3113d19 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -60,6 +60,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index fbcf112a..05bb88d9 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -45,6 +45,12 @@ module Status FAILED = :failed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 7eb0bd14..2ed1572f 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -19,6 +19,12 @@ module ResponseIncludable COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 23539df8..0beb7f70 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -41,6 +41,12 @@ module Format WAV = :wav finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index e6d2c0ce..025c1255 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -53,6 +53,12 @@ module Detail AUTO = :auto finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index adb90a0a..5aade5e3 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -115,6 +115,12 @@ module Role DEVELOPER = :developer finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -127,6 +133,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The type of the message input. Always set to `message`. @@ -136,6 +148,12 @@ module Type MESSAGE = :message finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -246,6 +264,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -313,6 +337,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 4e29d6dd..af7318e2 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -64,6 +64,12 @@ module Role DEVELOPER = :developer finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -76,6 +82,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # The type of the message input. Always set to `message`. @@ -85,6 +97,12 @@ module Type MESSAGE = :message finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index d9ab6443..e602d2ea 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -78,6 +78,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 4c410d54..8309c595 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -78,6 +78,12 @@ module Status INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index 3ec9d99f..0dd22d9b 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -14,6 +14,12 @@ module ResponseStatus INCOMPLETE = :incomplete finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 9ec1a734..48a22269 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -19,6 +19,12 @@ module ToolChoiceOptions REQUIRED = :required finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 9d8e05e2..c62ca447 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -44,6 +44,12 @@ module Type WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 9341e708..1826a57b 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -53,6 +53,12 @@ module Type WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # High level guidance for the amount of context window space to use for the @@ -65,6 +71,12 @@ module SearchContextSize HIGH = :high finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class UserLocation < OpenAI::BaseModel diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 507858a6..3e23d757 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -86,6 +86,12 @@ module Status EXPIRED = :expired finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 3c74e1d8..f86bb7ea 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -168,6 +168,12 @@ module Status COMPLETED = :completed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end class ExpiresAfter < OpenAI::BaseModel diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 5effb4ea..94cfda44 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -75,6 +75,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index ea877db0..38670d94 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -146,6 +146,12 @@ module Ranker DEFAULT_2024_11_15 = :"default-2024-11-15" finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index ebb55560..23f38c59 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -94,6 +94,12 @@ module Type TEXT = :text finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 20469fe9..13e346c1 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -94,6 +94,12 @@ module Filter CANCELLED = :cancelled finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -105,6 +111,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 2f7ffbbc..092f40b0 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -88,6 +88,12 @@ module Filter CANCELLED = :cancelled finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -99,6 +105,12 @@ module Order DESC = :desc finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index c3d22aef..ad361605 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -138,6 +138,12 @@ module Code INVALID_FILE = :invalid_file finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end @@ -153,6 +159,12 @@ module Status FAILED = :failed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end module Attribute diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index a8628b40..fbc1cc4f 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -112,6 +112,12 @@ module Status FAILED = :failed finalize! + + class << self + # @!parse + # # @return [Array] + # def values; end + end end end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index feedfa10..c1a126f5 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -11,11 +11,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::TaggedSymbol) } - O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::OrSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::OrSymbol) - COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::OrSymbol) + O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::TaggedSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::TaggedSymbol) + COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::TaggedSymbol) COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::OrSymbol) + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::TaggedSymbol) + + class << self + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index c7babe62..49573052 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -115,6 +115,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } + def variants + end + end end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -127,15 +133,21 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } - ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - ONYX = T.let(:onyx, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) + ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + ONYX = T.let(:onyx, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } + def values + end + end end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, @@ -147,12 +159,18 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } - MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - AAC = T.let(:aac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - FLAC = T.let(:flac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) + MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + AAC = T.let(:aac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + FLAC = T.let(:flac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index d375fcda..495283eb 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -9,9 +9,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } - TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::OrSymbol) - TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::OrSymbol) - GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::OrSymbol) + TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) + TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) + GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 272b6fc5..0d5d853f 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -170,6 +170,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + def variants + end + end end module TimestampGranularity @@ -180,9 +186,18 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) } - WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) + WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) SEGMENT = - T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol) + T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index e6190f8a..a15d78bf 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -10,6 +10,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } + + class << self + sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 95c1bbab..531eaf5f 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -9,7 +9,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } - LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::OrSymbol) + LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 4ca09f44..40a5e97b 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 3e9eeb48..b7cda45d 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -104,6 +104,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + def variants + end + end end # The format of the output, in one of these options: `json`, `text`, `srt`, @@ -116,12 +122,18 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } - JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - SRT = T.let(:srt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) + TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) + SRT = T.let(:srt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) VERBOSE_JSON = - T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) + T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) + VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 2d27d701..3ee8d56c 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -8,6 +8,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } + + class << self + sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index c17c0811..5ae6945c 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -8,9 +8,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioModel::TaggedSymbol) } - WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::OrSymbol) - GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::OrSymbol) - GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::OrSymbol) + WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::TaggedSymbol) + GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) + GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::AudioModel::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index fc0693da..37fec73f 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -11,11 +11,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } - JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::OrSymbol) - TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::OrSymbol) - SRT = T.let(:srt, OpenAI::Models::AudioResponseFormat::OrSymbol) - VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::OrSymbol) - VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::OrSymbol) + JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) + TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::TaggedSymbol) + SRT = T.let(:srt, OpenAI::Models::AudioResponseFormat::TaggedSymbol) + VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) + VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::AudioResponseFormat::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 7ff79566..5c383697 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -288,6 +288,12 @@ module OpenAI EXPIRED = T.let(:expired, OpenAI::Models::Batch::Status::TaggedSymbol) CANCELLING = T.let(:cancelling, OpenAI::Models::Batch::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Batch::Status::TaggedSymbol]) } + def values + end + end end class Errors < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 62413988..90e1afe6 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -102,7 +102,13 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } - NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) + NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol]) } + def values + end + end end # The endpoint to be used for all requests in the batch. Currently @@ -115,11 +121,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } - V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) V1_CHAT_COMPLETIONS = - T.let(:"/v1/chat/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) + T.let(:"/v1/chat/completions", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) + V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) + V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 5a4a056a..e7046d48 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -317,6 +317,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def variants + end + end end class ToolResources < OpenAI::BaseModel @@ -670,6 +676,17 @@ module OpenAI end end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 7d2b0fef..cf967a08 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -90,8 +90,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index d0c034e2..65a440c2 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -37,6 +37,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 59dfcef6..8651ae68 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -867,6 +867,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index 6efa6193..aea5d000 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -16,6 +16,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index cb86fbe8..e40ae9b4 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -58,9 +58,16 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } - FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - CODE_INTERPRETER = T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) + FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) + CODE_INTERPRETER = + T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) + FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 14fac828..8afac7a6 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -34,9 +34,26 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } - NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol) + NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol]) } + def values + end + end + end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice] + ) + end + def variants + end end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index c1eb48ee..eb77a1f2 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -323,52 +323,61 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } - O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) O3_MINI_2025_01_31 = - T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - O1 = T.let(:o1, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O = T.let(:"gpt-4o", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + O1 = T.let(:o1, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4O_2024_11_20 = - T.let(:"gpt-4o-2024-11-20", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4o-2024-11-20", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4O_2024_08_06 = - T.let(:"gpt-4o-2024-08-06", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4o-2024-08-06", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4O_2024_05_13 = - T.let(:"gpt-4o-2024-05-13", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4o-2024-05-13", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4O_MINI_2024_07_18 = - T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_5_PREVIEW = T.let(:"gpt-4.5-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_5_PREVIEW = + T.let(:"gpt-4.5-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_5_PREVIEW_2025_02_27 = - T.let(:"gpt-4.5-preview-2025-02-27", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4.5-preview-2025-02-27", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_TURBO_2024_04_09 = - T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_0125_PREVIEW = - T.let(:"gpt-4-0125-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4-0125-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_TURBO_PREVIEW = - T.let(:"gpt-4-turbo-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4-turbo-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_1106_PREVIEW = - T.let(:"gpt-4-1106-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4-1106-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_VISION_PREVIEW = - T.let(:"gpt-4-vision-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4 = T.let(:"gpt-4", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-4-vision-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_32K_0314 = + T.let(:"gpt-4-32k-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_32K_0613 = + T.let(:"gpt-4-32k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_3_5_TURBO_16K = - T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_3_5_TURBO_0613 = - T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_3_5_TURBO_1106 = - T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_3_5_TURBO_0125 = - T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_3_5_TURBO_16K_0613 = - T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol) + T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + + class << self + sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } + def variants + end + end end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 059290ff..27504367 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -152,12 +152,22 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) + AUTO = + T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_08_21 = T.let( :default_2024_08_21, - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol ) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 98f7e150..0766b1b4 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -191,6 +191,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index cfd0841f..11b0dbf9 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -258,6 +258,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 37b5fe0c..0b347621 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -355,6 +355,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 31b70a45..8e93da0a 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -432,6 +432,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def variants + end + end end class Thread < OpenAI::BaseModel @@ -670,6 +676,26 @@ module OpenAI MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def variants + end + end end # The role of the entity that is creating the message. Allowed values include: @@ -686,9 +712,18 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } - USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) ASSISTANT = - T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) + T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol]) + end + def values + end + end end class Attachment < OpenAI::BaseModel @@ -801,6 +836,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] + ) + end + def variants + end + end end end end @@ -1176,6 +1222,17 @@ module OpenAI end end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + ) + end + def variants + end + end end end end @@ -1302,6 +1359,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + ) + end + def variants + end + end end class TruncationStrategy < OpenAI::BaseModel @@ -1366,9 +1434,22 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + AUTO = + T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) LAST_MESSAGES = - T.let(:last_messages, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) + T.let( + :last_messages, + OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 9539398e..b546a4c2 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -234,6 +234,26 @@ module OpenAI MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def variants + end + end end # The role of the entity that is creating the message. Allowed values include: @@ -249,8 +269,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } - USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) + USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol]) } + def values + end + end end class Attachment < OpenAI::BaseModel @@ -363,6 +389,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] + ) + end + def variants + end + end end end end @@ -716,6 +753,17 @@ module OpenAI end end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 74967cee..a260cd30 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 90db2a3c..170ebc33 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 61ad33c5..edcd6cf4 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -48,9 +48,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index efac1cbe..2d687b40 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -55,6 +55,12 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 032d894d..9b0d2994 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -47,9 +47,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 6e8e3245..1363acdb 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -53,6 +53,12 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 90e7e16f..ca7f31be 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -377,6 +377,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] + ) + end + def variants + end + end end end @@ -426,6 +437,14 @@ module OpenAI T.let(:run_expired, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) RUN_FAILED = T.let(:run_failed, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + + class << self + sig do + override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol]) + end + def values + end + end end end @@ -438,6 +457,12 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol]) } + def values + end + end end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -451,6 +476,12 @@ module OpenAI IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 97ed19fe..58d1462c 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -20,6 +20,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index ec1c6ccc..b4216635 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -20,6 +20,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index effc6e6c..215aeccb 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 9c0c17d5..2bbad44b 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -166,6 +166,26 @@ module OpenAI MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def variants + end + end end # The role of the entity that is creating the message. Allowed values include: @@ -181,8 +201,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } - USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) + USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol]) } + def values + end + end end class Attachment < OpenAI::BaseModel @@ -295,6 +321,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index b7bfc61c..c396bfdb 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -107,6 +107,12 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index d80e8d49..8b7fcfb2 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -103,8 +103,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 3b24496c..23687e33 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -581,6 +581,12 @@ module OpenAI T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) MAX_PROMPT_TOKENS = T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol]) } + def values + end + end end end @@ -634,6 +640,12 @@ module OpenAI T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) INVALID_PROMPT = T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol]) } + def values + end + end end end @@ -776,6 +788,12 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) LAST_MESSAGES = T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol]) } + def values + end + end end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index d744dbfc..18a61e58 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -623,6 +623,26 @@ module OpenAI MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def variants + end + end end # The role of the entity that is creating the message. Allowed values include: @@ -639,9 +659,19 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) } - USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + USER = + T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) ASSISTANT = - T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) + T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol]) + end + def values + end + end end class Attachment < OpenAI::BaseModel @@ -754,6 +784,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] + ) + end + def variants + end + end end end end @@ -766,6 +807,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def variants + end + end end class TruncationStrategy < OpenAI::BaseModel @@ -830,9 +877,22 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + AUTO = + T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) LAST_MESSAGES = - T.let(:last_messages, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) + T.let( + :last_messages, + OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 39cc0ab8..28ea5210 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -92,8 +92,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index a6246d8c..fea1250b 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -22,6 +22,12 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index f4e3da46..13ecfae5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -272,6 +272,17 @@ module OpenAI end end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 292195bb..4b26a9eb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -178,6 +178,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 3bd957cc..0423fad0 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -197,6 +197,17 @@ module OpenAI :default_2024_08_21, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol] + ) + end + def values + end + end end end @@ -346,6 +357,17 @@ module OpenAI :text, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol ) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol] + ) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index b5d5805b..d55ca19e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -324,6 +324,12 @@ module OpenAI T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol]) } + def values + end + end end end @@ -341,6 +347,12 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol]) } + def values + end + end end # The details of the run step. @@ -356,6 +368,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + ) + end + def variants + end + end end # The type of run step, which can be either `message_creation` or `tool_calls`. @@ -369,6 +392,12 @@ module OpenAI MESSAGE_CREATION = T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol]) } + def values + end + end end class Usage < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index cbc714ec..3753e97f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -77,6 +77,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 9e59ca4b..7216c68b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -15,8 +15,14 @@ module OpenAI STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = T.let( :"step_details.tool_calls[*].file_search.results[*].content", - OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol + OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol ) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol]) } + def values + end + end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index ef62a39d..8a8e6212 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -123,8 +123,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 85c08734..d69faf62 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 0043de5a..50b03360 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 3271a675..0131e9f7 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -231,6 +231,12 @@ module OpenAI T.let(:content_filter, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) FUNCTION_CALL = T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol]) } + def values + end + end end class Logprobs < OpenAI::BaseModel @@ -293,6 +299,12 @@ module OpenAI SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index fe1dc7f0..02f68760 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -247,6 +247,36 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] + ) + end + def variants + end + end + end + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + ] + ] + ) + end + def variants + end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 2cd8fe2e..cd667fba 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -64,11 +64,17 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } - WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) + WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol]) } + def values + end + end end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -80,14 +86,20 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } - ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) + ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 56cab68c..dafab50f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -360,6 +360,12 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol]) } + def values + end + end end class ToolCall < OpenAI::BaseModel @@ -475,6 +481,15 @@ module OpenAI FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol]) + end + def values + end + end end end end @@ -501,6 +516,12 @@ module OpenAI T.let(:content_filter, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) FUNCTION_CALL = T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol]) } + def values + end + end end class Logprobs < OpenAI::BaseModel @@ -563,6 +584,12 @@ module OpenAI SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 521b44fb..baf81fd2 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -92,6 +92,17 @@ module OpenAI end end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] + ) + end + def variants + end + end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index c59d5e11..5b8ea698 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -90,9 +90,18 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 1c7e9d96..89a3b585 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -99,9 +99,20 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) } WAV = - T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) MP3 = - T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) + T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol] + ) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 935cbc88..2ccc5505 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -71,6 +71,12 @@ module OpenAI ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 7dbb72ea..9efb8e97 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -22,6 +22,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] + ) + end + def variants + end + end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 5e747ff9..efa22f99 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -9,8 +9,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } - TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) - AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::OrSymbol) + TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) + AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol]) } + def values + end + end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 9773677d..4996c9f4 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -60,6 +60,12 @@ module OpenAI ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index dd316075..77577cd0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -10,12 +10,18 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } - DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) - FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::OrSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol]) } + def values + end + end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index ca2dc2e7..f8cb806e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -71,6 +71,12 @@ module OpenAI ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 9a79b1ff..9da6e2a7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -35,9 +35,26 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } - NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol) + NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol]) } + def values + end + end + end + + class << self + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + ) + end + def variants + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 6f5f249b..42697bfe 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -67,6 +67,12 @@ module OpenAI ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index dde9e769..58bd1d57 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -142,6 +142,27 @@ module OpenAI ChatCompletionContentPartArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Converter) + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + ] + ] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 9ce1cd00..2bfbf47c 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -705,6 +705,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def variants + end + end end # Deprecated in favor of `tool_choice`. @@ -746,9 +752,31 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) } NONE = - T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) + T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) AUTO = - T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol) + T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol] + ) + end + def values + end + end + end + + class << self + sig do + override + .returns( + [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + ) + end + def variants + end end end @@ -809,8 +837,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } - TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) - AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol) + TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) + AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol]) } + def values + end + end end # An object specifying the format that the model must output. @@ -836,6 +870,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + ) + end + def variants + end + end end # Specifies the latency tier to use for processing the request. This parameter is @@ -859,8 +904,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol]) } + def values + end + end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -871,6 +922,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[String]]) } + def variants + end + end end class WebSearchOptions < OpenAI::BaseModel @@ -952,14 +1009,31 @@ module OpenAI end LOW = - T.let(:low, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + T.let( + :low, + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + ) MEDIUM = T.let( :medium, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol ) HIGH = - T.let(:high, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + T.let( + :high, + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + ) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol] + ) + end + def values + end + end end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 096a12f8..e91a5f65 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -97,8 +97,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index d0cfdba1..8bffd407 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -75,8 +75,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 20705fb9..deecbd65 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -8,54 +8,61 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ChatModel::TaggedSymbol) } - O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::OrSymbol) - O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::OrSymbol) - O1 = T.let(:o1, OpenAI::Models::ChatModel::OrSymbol) - O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) - O1_PREVIEW = T.let(:"o1-preview", OpenAI::Models::ChatModel::OrSymbol) - O1_PREVIEW_2024_09_12 = T.let(:"o1-preview-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) - O1_MINI = T.let(:"o1-mini", OpenAI::Models::ChatModel::OrSymbol) - O1_MINI_2024_09_12 = T.let(:"o1-mini-2024-09-12", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O = T.let(:"gpt-4o", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_2024_11_20 = T.let(:"gpt-4o-2024-11-20", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_2024_08_06 = T.let(:"gpt-4o-2024-08-06", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_2024_05_13 = T.let(:"gpt-4o-2024-05-13", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_AUDIO_PREVIEW = T.let(:"gpt-4o-audio-preview", OpenAI::Models::ChatModel::OrSymbol) + O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::TaggedSymbol) + O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::TaggedSymbol) + O1 = T.let(:o1, OpenAI::Models::ChatModel::TaggedSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::ChatModel::TaggedSymbol) + O1_PREVIEW = T.let(:"o1-preview", OpenAI::Models::ChatModel::TaggedSymbol) + O1_PREVIEW_2024_09_12 = T.let(:"o1-preview-2024-09-12", OpenAI::Models::ChatModel::TaggedSymbol) + O1_MINI = T.let(:"o1-mini", OpenAI::Models::ChatModel::TaggedSymbol) + O1_MINI_2024_09_12 = T.let(:"o1-mini-2024-09-12", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_2024_11_20 = T.let(:"gpt-4o-2024-11-20", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_2024_08_06 = T.let(:"gpt-4o-2024-08-06", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_2024_05_13 = T.let(:"gpt-4o-2024-05-13", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_AUDIO_PREVIEW = T.let(:"gpt-4o-audio-preview", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4O_AUDIO_PREVIEW_2024_10_01 = - T.let(:"gpt-4o-audio-preview-2024-10-01", OpenAI::Models::ChatModel::OrSymbol) + T.let(:"gpt-4o-audio-preview-2024-10-01", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4O_AUDIO_PREVIEW_2024_12_17 = - T.let(:"gpt-4o-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::Models::ChatModel::OrSymbol) + T.let(:"gpt-4o-audio-preview-2024-12-17", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = - T.let(:"gpt-4o-mini-audio-preview-2024-12-17", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_SEARCH_PREVIEW = T.let(:"gpt-4o-search-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_SEARCH_PREVIEW = T.let(:"gpt-4o-mini-search-preview", OpenAI::Models::ChatModel::OrSymbol) + T.let(:"gpt-4o-mini-audio-preview-2024-12-17", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_SEARCH_PREVIEW = T.let(:"gpt-4o-search-preview", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_MINI_SEARCH_PREVIEW = + T.let(:"gpt-4o-mini-search-preview", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4O_SEARCH_PREVIEW_2025_03_11 = - T.let(:"gpt-4o-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) + T.let(:"gpt-4o-search-preview-2025-03-11", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = - T.let(:"gpt-4o-mini-search-preview-2025-03-11", OpenAI::Models::ChatModel::OrSymbol) - CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::ChatModel::OrSymbol) - GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_TURBO_2024_04_09 = T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_0125_PREVIEW = T.let(:"gpt-4-0125-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_TURBO_PREVIEW = T.let(:"gpt-4-turbo-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_1106_PREVIEW = T.let(:"gpt-4-1106-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_VISION_PREVIEW = T.let(:"gpt-4-vision-preview", OpenAI::Models::ChatModel::OrSymbol) - GPT_4 = T.let(:"gpt-4", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::ChatModel::OrSymbol) - GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_16K = T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_0301 = T.let(:"gpt-3.5-turbo-0301", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_0613 = T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_1106 = T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::OrSymbol) - GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::OrSymbol) + T.let(:"gpt-4o-mini-search-preview-2025-03-11", OpenAI::Models::ChatModel::TaggedSymbol) + CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_TURBO_2024_04_09 = T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_0125_PREVIEW = T.let(:"gpt-4-0125-preview", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_TURBO_PREVIEW = T.let(:"gpt-4-turbo-preview", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1106_PREVIEW = T.let(:"gpt-4-1106-preview", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_VISION_PREVIEW = T.let(:"gpt-4-vision-preview", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_16K = T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_0301 = T.let(:"gpt-3.5-turbo-0301", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_0613 = T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_1106 = T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ChatModel::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 8ceb7ea4..d9019724 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -77,12 +77,18 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } - EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - GT = T.let(:gt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - GTE = T.let(:gte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::OrSymbol) - LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::OrSymbol) + EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + GT = T.let(:gt, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + GTE = T.let(:gte, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ComparisonFilter::Type::TaggedSymbol]) } + def values + end + end end # The value to compare against the attribute key; supports string, number, or @@ -91,6 +97,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 5ecf87af..7cc42500 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -84,6 +84,12 @@ module OpenAI STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol]) } + def values + end + end end class Logprobs < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index e2edd3e9..d0fbc5a9 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -350,9 +350,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } GPT_3_5_TURBO_INSTRUCT = - T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) - DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) - BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::OrSymbol) + T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) + DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) + BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) + + class << self + sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } + def variants + end + end end # The prompt(s) to generate completions for, encoded as a string, array of @@ -374,6 +380,12 @@ module OpenAI IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def variants + end + end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -384,6 +396,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[String]]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 6a79164a..3f6d0c84 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -57,6 +57,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, T.anything)} } + + class << self + sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } + def variants + end + end end # Type of operation: `and` or `or`. @@ -66,8 +72,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } - AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::OrSymbol) - OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::OrSymbol) + AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) + OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::CompoundFilter::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index bc8012b1..8492ccdf 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -126,6 +126,12 @@ module OpenAI IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def variants + end + end end # ID of the model to use. You can use the @@ -137,6 +143,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } + def variants + end + end end # The format to return the embeddings in. Can be either `float` or @@ -148,8 +160,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } - FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) - BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) + FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) + BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 2b064f56..54a2f972 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -8,9 +8,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingModel::TaggedSymbol) } - TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::OrSymbol) - TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::OrSymbol) - TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::OrSymbol) + TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::TaggedSymbol) + TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::TaggedSymbol) + TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::EmbeddingModel::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index eb1f53b5..4f77f905 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -12,6 +12,17 @@ module OpenAI fixed: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 211e8f69..46711f26 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -16,6 +16,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 4af7b2fc..be376ff6 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -86,8 +86,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FileListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 2918d2f0..c96e406d 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -157,6 +157,12 @@ module OpenAI FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FileObject::Purpose::TaggedSymbol) FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::Models::FileObject::Purpose::TaggedSymbol) VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FileObject::Purpose::TaggedSymbol]) } + def values + end + end end # Deprecated. The current status of the file, which can be either `uploaded`, @@ -170,6 +176,12 @@ module OpenAI UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FileObject::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 77bba00f..0caf4b3f 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -12,12 +12,18 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FilePurpose::TaggedSymbol) } - ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::OrSymbol) - BATCH = T.let(:batch, OpenAI::Models::FilePurpose::OrSymbol) - FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FilePurpose::OrSymbol) - VISION = T.let(:vision, OpenAI::Models::FilePurpose::OrSymbol) - USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::OrSymbol) - EVALS = T.let(:evals, OpenAI::Models::FilePurpose::OrSymbol) + ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::TaggedSymbol) + BATCH = T.let(:batch, OpenAI::Models::FilePurpose::TaggedSymbol) + FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FilePurpose::TaggedSymbol) + VISION = T.let(:vision, OpenAI::Models::FilePurpose::TaggedSymbol) + USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::TaggedSymbol) + EVALS = T.let(:evals, OpenAI::Models::FilePurpose::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FilePurpose::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index bc3a62a0..cce5ee80 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -388,6 +388,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -396,6 +402,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -404,6 +416,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end end @@ -423,6 +441,12 @@ module OpenAI SUCCEEDED = T.let(:succeeded, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol]) } + def values + end + end end class Method < OpenAI::BaseModel @@ -593,6 +617,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -601,6 +631,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -609,6 +645,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -617,6 +659,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end end end @@ -721,6 +769,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -729,6 +783,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -737,6 +797,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end end end @@ -751,6 +817,12 @@ module OpenAI SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 7bedb937..aeb14b83 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -117,6 +117,12 @@ module OpenAI INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol]) } + def values + end + end end # The type of event. @@ -129,6 +135,12 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index eb52be26..1f474701 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -201,10 +201,16 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } - BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) - DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol) + BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) + DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) + + class << self + sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } + def variants + end + end end class Hyperparameters < OpenAI::BaseModel @@ -270,6 +276,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -278,6 +290,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -286,6 +304,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end end @@ -566,6 +590,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -574,6 +604,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -582,6 +618,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -590,6 +632,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end end end @@ -696,6 +744,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -704,6 +758,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } + + class << self + sig { override.returns([Symbol, Float]) } + def variants + end + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -712,6 +772,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } + + class << self + sig { override.returns([Symbol, Integer]) } + def variants + end + end end end end @@ -724,8 +790,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } - SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) - DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) + SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) + DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 09479fbe..821d44c7 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -115,6 +115,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + def variants + end + end end # The format in which the generated images are returned. Must be one of `url` or @@ -127,8 +133,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } - URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol) + URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol]) } + def values + end + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -139,9 +151,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol) + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 02a9dc31..f5b930f4 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -149,6 +149,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + def variants + end + end end # The format in which the generated images are returned. Must be one of `url` or @@ -160,8 +166,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } - URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol) + URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol]) } + def values + end + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -172,9 +184,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::OrSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::OrSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::OrSymbol) + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 4754ce0e..9aab13b2 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -157,6 +157,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + def variants + end + end end # The quality of the image that will be generated. `hd` creates images with finer @@ -168,8 +174,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } - STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) - HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) + STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) + HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol]) } + def values + end + end end # The format in which the generated images are returned. Must be one of `url` or @@ -182,8 +194,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } - URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol) + URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) + B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol]) } + def values + end + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -195,11 +213,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) - NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::OrSymbol) + NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol]) } + def values + end + end end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -212,8 +236,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } - VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) - NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::OrSymbol) + VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) + NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 3cc4d5f4..ba80b7d9 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -8,8 +8,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageModel::TaggedSymbol) } - DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::OrSymbol) - DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::OrSymbol) + DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) + DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ImageModel::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 4f6d074e..f30ecf4b 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -510,6 +510,14 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) + + class << self + sig do + override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + end + def values + end + end end module HarassmentThreatening @@ -522,6 +530,17 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + end + def values + end + end end module Hate @@ -533,6 +552,12 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } + def values + end + end end module HateThreatening @@ -544,6 +569,15 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + end + def values + end + end end module Illicit @@ -555,6 +589,12 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } + def values + end + end end module IllicitViolent @@ -566,6 +606,15 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + end + def values + end + end end module SelfHarm @@ -578,6 +627,12 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } + def values + end + end end module SelfHarmInstruction @@ -592,6 +647,17 @@ module OpenAI T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + end + def values + end + end end module SelfHarmIntent @@ -605,6 +671,15 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + end + def values + end + end end module Sexual @@ -617,6 +692,12 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } + def values + end + end end module SexualMinor @@ -628,6 +709,15 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + end + def values + end + end end module Violence @@ -640,6 +730,12 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } + def values + end + end end module ViolenceGraphic @@ -653,6 +749,15 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + end + def values + end + end end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index a23b68a0..eb0cf50f 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -106,6 +106,17 @@ module OpenAI ModerationMultiModalInputArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Converter) + + class << self + sig do + override + .returns( + [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] + ) + end + def variants + end + end end # The content moderation model you would like to use. Learn more in @@ -116,6 +127,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)} } + + class << self + sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index a08f4a80..97ae9021 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -8,11 +8,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ModerationModel::TaggedSymbol) } - OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) + OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) OMNI_MODERATION_2024_09_26 = - T.let(:"omni-moderation-2024-09-26", OpenAI::Models::ModerationModel::OrSymbol) - TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::OrSymbol) - TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::OrSymbol) + T.let(:"omni-moderation-2024-09-26", OpenAI::Models::ModerationModel::TaggedSymbol) + TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) + TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ModerationModel::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 705b6af9..5d0b8337 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -8,6 +8,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } + + class << self + sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 909f27d4..f221616f 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -73,8 +73,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } - CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) - DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) + DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 2cf29ee5..ad236c65 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -14,9 +14,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ReasoningEffort::TaggedSymbol) } - LOW = T.let(:low, OpenAI::Models::ReasoningEffort::OrSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::OrSymbol) + LOW = T.let(:low, OpenAI::Models::ReasoningEffort::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::ReasoningEffort::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 638039d3..82add0e9 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -79,10 +79,16 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } - MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) + MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 42bb10de..bf8bc082 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -144,6 +144,26 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ] + ) + end + def variants + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -155,10 +175,16 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } - USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) + USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol]) } + def values + end + end end # The type of the message input. Always `message`. @@ -169,7 +195,13 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } - MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) + MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 3eba1cb5..43f9d2b2 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -93,6 +93,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } + + class << self + sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } + def variants + end + end end class RankingOptions < OpenAI::BaseModel @@ -148,9 +154,20 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = - T.let(:"default-2024-11-15", OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) + T.let( + :"default-2024-11-15", + OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol + ) + + class << self + sig do + override.returns(T::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index dfee9da7..57d5f60d 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -89,8 +89,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index d36aa43f..c7462537 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -553,6 +553,12 @@ module OpenAI T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol]) } + def values + end + end end end @@ -572,6 +578,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + ) + end + def variants + end + end end # The truncation strategy to use for the model response. @@ -589,6 +606,12 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::Response::Truncation::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 42fee896..cb72f53d 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -232,6 +232,17 @@ module OpenAI end end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] + ) + end + def variants + end + end end # The status of the code interpreter tool call. @@ -249,6 +260,15 @@ module OpenAI T.let(:interpreting, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 63676938..d7f5e2c6 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -262,14 +262,27 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) } - LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + LEFT = + T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) RIGHT = - T.let(:right, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + T.let(:right, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) WHEEL = - T.let(:wheel, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - BACK = T.let(:back, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + T.let(:wheel, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + BACK = + T.let(:back, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) FORWARD = - T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) + T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + + class << self + sig do + override + .returns( + T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol] + ) + end + def values + end + end end end @@ -596,6 +609,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] + ) + end + def variants + end + end end class PendingSafetyCheck < OpenAI::BaseModel @@ -646,9 +670,17 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol]) } + def values + end + end end # The type of the computer call. Always `computer_call`. @@ -660,7 +692,13 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } COMPUTER_CALL = - T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) + T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 7d10f0bc..332d500c 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -163,6 +163,15 @@ module OpenAI T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 0cc16286..adbde5c6 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -19,6 +19,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index b040d85c..ac337005 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -99,6 +99,15 @@ module OpenAI fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) } end + + class << self + sig do + override + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 6f2a4562..d6185fc4 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -99,6 +99,15 @@ module OpenAI fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) } end + + class << self + sig do + override + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index be470d61..ef53ffa4 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -530,6 +530,34 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ] + ) + end + def variants + end + end end # How the model should select which tool (or tools) to use when generating a @@ -548,6 +576,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + ) + end + def variants + end + end end # The truncation strategy to use for the model response. @@ -565,8 +604,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) - DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) + DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 90d6cf33..b2a69187 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -74,6 +74,12 @@ module OpenAI T.let(:failed_to_download_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) IMAGE_FILE_NOT_FOUND = T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 439cbcd7..e3a77e36 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -98,11 +98,20 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) + T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + SEARCHING = + T.let(:searching, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol]) } + def values + end + end end class Result < OpenAI::BaseModel @@ -190,6 +199,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 8c4e9b9a..3019db16 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -29,6 +29,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index e2fb2951..b721ef25 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -105,9 +105,17 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 4a38931a..bb1f3e0a 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -97,6 +97,15 @@ module OpenAI T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol]) + end + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 1a4c8366..0fa5fadb 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -66,10 +66,17 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) + IN_PROGRESS = + T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) + SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 3b6a4039..ca498afe 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -18,11 +18,20 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } FILE_SEARCH_CALL_RESULTS = - T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) MESSAGE_INPUT_IMAGE_IMAGE_URL = - T.let(:"message.input_image.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + T.let(:"message.input_image.image_url", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = - T.let(:"computer_call_output.output.image_url", OpenAI::Models::Responses::ResponseIncludable::OrSymbol) + T.let( + :"computer_call_output.output.image_url", + OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol + ) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index e6db921a..e0768a09 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -63,8 +63,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } - MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) - WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) + MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) + WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index 37ed1a5a..b1e862e0 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -17,6 +17,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index ade87200..12174be2 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -82,9 +82,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } - HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) + HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index a85418a6..f99aa262 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -157,9 +157,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } - USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) + USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol]) } + def values + end + end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -173,9 +179,17 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) + T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol]) } + def values + end + end end # The type of the message input. Always set to `message`. @@ -187,7 +201,13 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } - MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) + MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol]) } + def values + end + end end end @@ -349,11 +369,23 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + T.let( + :in_progress, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol]) + end + def values + end + end end end @@ -448,11 +480,23 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + T.let( + :in_progress, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) + T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) + + class << self + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol]) + end + def values + end + end end end @@ -484,6 +528,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 0aaaacd6..1c80f8f2 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -140,6 +140,12 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol]) } + def values + end + end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -157,6 +163,12 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol]) } + def values + end + end end # The type of the message input. Always set to `message`. @@ -168,6 +180,12 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index 0d59846c..4bfa929a 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -22,6 +22,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index f4a81f11..f0406f67 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -20,6 +20,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 63f0758f..681787d8 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -103,6 +103,15 @@ module OpenAI fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) } end + + class << self + sig do + override + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + end + def variants + end + end end # The status of the message input. One of `in_progress`, `completed`, or @@ -114,9 +123,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 78b35143..2b88cd02 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -256,6 +256,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index aabd22e0..b31a3e74 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -112,9 +112,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) + INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index 95b80ac1..c4d55827 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -15,6 +15,12 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseStatus::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index 4c87665c..c549a897 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -46,6 +46,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index f7d64bc8..c557f685 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -286,6 +286,17 @@ module OpenAI def to_hash end end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 0d0c2a77..3e2da0c3 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -20,6 +20,17 @@ module OpenAI ) } end + + class << self + sig do + override + .returns( + [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] + ) + end + def variants + end + end end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index a4acb23d..a3f23a00 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -17,9 +17,15 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } - NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) - AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol) + NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) + REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 45b4ecfc..efede45b 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -47,13 +47,19 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } - FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) WEB_SEARCH_PREVIEW = - T.let(:web_search_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + T.let(:web_search_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) COMPUTER_USE_PREVIEW = - T.let(:computer_use_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + T.let(:computer_use_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) WEB_SEARCH_PREVIEW_2025_03_11 = - T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) + T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 7b583956..7d675e8c 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -80,9 +80,16 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } - WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + WEB_SEARCH_PREVIEW = + T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) WEB_SEARCH_PREVIEW_2025_03_11 = - T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) + T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol]) } + def values + end + end end # High level guidance for the amount of context window space to use for the @@ -95,9 +102,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } - LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) + LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol]) } + def values + end + end end class UserLocation < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 1c929891..ebec2d7d 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -11,11 +11,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::TaggedSymbol) } - O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::OrSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::OrSymbol) - COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::OrSymbol) + O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::TaggedSymbol) + O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::TaggedSymbol) + COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::TaggedSymbol) COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::OrSymbol) + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::TaggedSymbol) + + class << self + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 7f0dbe40..18dcc179 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -140,6 +140,12 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::Upload::Status::TaggedSymbol) EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::Upload::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 92aa8be2..dadca7f7 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -256,6 +256,12 @@ module OpenAI EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStore::Status::TaggedSymbol]) } + def values + end + end end class ExpiresAfter < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index c2400193..38db6860 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -89,8 +89,14 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index a8bd0484..377f1ad9 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -95,6 +95,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + + class << self + sig { override.returns([String, T::Array[String]]) } + def variants + end + end end # A filter to apply based on file attributes. @@ -103,6 +109,12 @@ module OpenAI Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } + + class << self + sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } + def variants + end + end end class RankingOptions < OpenAI::BaseModel @@ -153,9 +165,18 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } - AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = - T.let(:"default-2024-11-15", OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) + T.let( + :"default-2024-11-15", + OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol + ) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index e4039a7c..22a47028 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -90,6 +90,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end class Content < OpenAI::BaseModel @@ -137,6 +143,12 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index f2dd4d99..1dfb5480 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -102,6 +102,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 22045ec2..7cb94eb9 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -116,10 +116,18 @@ module OpenAI T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) + T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + CANCELLED = + T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol]) } + def values + end + end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -132,8 +140,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index d8ec4fba..bf6ee554 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -102,6 +102,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 20a4bce5..e4ff05f1 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -104,10 +104,16 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol]) } + def values + end + end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -119,8 +125,14 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } - ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) + ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol]) } + def values + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index fe35965e..69fe9291 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -59,6 +59,12 @@ module OpenAI extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index ea53495c..8e2181bb 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -225,6 +225,12 @@ module OpenAI T.let(:unsupported_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) INVALID_FILE = T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol]) } + def values + end + end end end @@ -242,12 +248,24 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol]) } + def values + end + end end module Attribute extend OpenAI::Union Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } + + class << self + sig { override.returns([String, Float, T::Boolean]) } + def variants + end + end end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 61961f17..d32c3323 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -186,6 +186,12 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + + class << self + sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol]) } + def values + end + end end end end diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 8a1f89cf..ef2cc9d6 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -16,7 +16,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" + def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11")] diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 30772347..2b0a7895 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -53,7 +53,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::Audio::speech_model] + def self?.variants: -> [String, OpenAI::Models::Audio::speech_model] end type voice = @@ -80,7 +80,7 @@ module OpenAI SAGE: :sage SHIMMER: :shimmer - def self.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::voice] + def self?.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::voice] end type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm @@ -95,7 +95,7 @@ module OpenAI WAV: :wav PCM: :pcm - def self.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::response_format] + def self?.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::response_format] end end end diff --git a/sig/openai/models/audio/speech_model.rbs b/sig/openai/models/audio/speech_model.rbs index 51c913a4..8fc430d6 100644 --- a/sig/openai/models/audio/speech_model.rbs +++ b/sig/openai/models/audio/speech_model.rbs @@ -10,7 +10,7 @@ module OpenAI TTS_1_HD: :"tts-1-hd" GPT_4O_MINI_TTS: :"gpt-4o-mini-tts" - def self.values: -> ::Array[OpenAI::Models::Audio::speech_model] + def self?.values: -> ::Array[OpenAI::Models::Audio::speech_model] end end end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 3834d3cc..8bd43f21 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -71,7 +71,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::audio_model] + def self?.variants: -> [String, OpenAI::Models::audio_model] end type timestamp_granularity = :word | :segment @@ -82,7 +82,7 @@ module OpenAI WORD: :word SEGMENT: :segment - def self.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] + def self?.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] end end end diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index 32f15ab6..49eb0f09 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -8,7 +8,7 @@ module OpenAI module TranscriptionCreateResponse extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] + def self?.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] end end end diff --git a/sig/openai/models/audio/transcription_include.rbs b/sig/openai/models/audio/transcription_include.rbs index 1fc83e72..349684b6 100644 --- a/sig/openai/models/audio/transcription_include.rbs +++ b/sig/openai/models/audio/transcription_include.rbs @@ -8,7 +8,7 @@ module OpenAI LOGPROBS: :logprobs - def self.values: -> ::Array[OpenAI::Models::Audio::transcription_include] + def self?.values: -> ::Array[OpenAI::Models::Audio::transcription_include] end end end diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs index caffcd1f..8441d201 100644 --- a/sig/openai/models/audio/transcription_stream_event.rbs +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -8,7 +8,7 @@ module OpenAI module TranscriptionStreamEvent extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] + def self?.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] end end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index d5dcc175..222208b9 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -49,7 +49,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::audio_model] + def self?.variants: -> [String, OpenAI::Models::audio_model] end type response_format = :json | :text | :srt | :verbose_json | :vtt @@ -63,7 +63,7 @@ module OpenAI VERBOSE_JSON: :verbose_json VTT: :vtt - def self.values: -> ::Array[OpenAI::Models::Audio::TranslationCreateParams::response_format] + def self?.values: -> ::Array[OpenAI::Models::Audio::TranslationCreateParams::response_format] end end end diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index 6c26f34d..7b88f273 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -8,7 +8,7 @@ module OpenAI module TranslationCreateResponse extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] + def self?.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] end end end diff --git a/sig/openai/models/audio_model.rbs b/sig/openai/models/audio_model.rbs index 7c47323a..c5a9d284 100644 --- a/sig/openai/models/audio_model.rbs +++ b/sig/openai/models/audio_model.rbs @@ -10,7 +10,7 @@ module OpenAI GPT_4O_TRANSCRIBE: :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE: :"gpt-4o-mini-transcribe" - def self.values: -> ::Array[OpenAI::Models::audio_model] + def self?.values: -> ::Array[OpenAI::Models::audio_model] end end end diff --git a/sig/openai/models/audio_response_format.rbs b/sig/openai/models/audio_response_format.rbs index ee7b583f..7c91cd99 100644 --- a/sig/openai/models/audio_response_format.rbs +++ b/sig/openai/models/audio_response_format.rbs @@ -11,7 +11,7 @@ module OpenAI VERBOSE_JSON: :verbose_json VTT: :vtt - def self.values: -> ::Array[OpenAI::Models::audio_response_format] + def self?.values: -> ::Array[OpenAI::Models::audio_response_format] end end end diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 2a3d4888..46840a64 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -140,7 +140,7 @@ module OpenAI CANCELLING: :cancelling CANCELLED: :cancelled - def self.values: -> ::Array[OpenAI::Models::Batch::status] + def self?.values: -> ::Array[OpenAI::Models::Batch::status] end type errors = diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index c73264e6..61f2a0a0 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -38,7 +38,7 @@ module OpenAI NUMBER_24H: :"24h" - def self.values: -> ::Array[OpenAI::Models::BatchCreateParams::completion_window] + def self?.values: -> ::Array[OpenAI::Models::BatchCreateParams::completion_window] end type endpoint = @@ -55,7 +55,7 @@ module OpenAI V1_EMBEDDINGS: :"/v1/embeddings" V1_COMPLETIONS: :"/v1/completions" - def self.values: -> ::Array[OpenAI::Models::BatchCreateParams::endpoint] + def self?.values: -> ::Array[OpenAI::Models::BatchCreateParams::endpoint] end end end diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 2ffe711e..b25b8eaf 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -69,7 +69,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> [String, OpenAI::Models::chat_model] end type tool_resources = @@ -217,7 +217,7 @@ module OpenAI end end - def self.variants: -> [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + def self?.variants: -> [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] end end end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index c8869670..80fc8f41 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -50,7 +50,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Beta::AssistantListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantListParams::order] end end end diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index d5b4a073..3b848b00 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -10,7 +10,7 @@ module OpenAI module AssistantResponseFormatOption extend OpenAI::Union - def self.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] + def self?.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] end end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 4ed62507..ae88c40e 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -488,7 +488,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::error_event end - def self.variants: -> [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] + def self?.variants: -> [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] end end end diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index 48827d7f..d3a43fed 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -9,7 +9,7 @@ module OpenAI module AssistantTool extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index ab690c71..69e6e1b5 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -32,7 +32,7 @@ module OpenAI CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search - def self.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoice::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoice::type_] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index ee421612..4b1d9ff3 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -17,10 +17,10 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::auto] end - def self.variants: -> [OpenAI::Models::Beta::AssistantToolChoiceOption::auto, OpenAI::Models::Beta::AssistantToolChoice] + def self?.variants: -> [OpenAI::Models::Beta::AssistantToolChoiceOption::auto, OpenAI::Models::Beta::AssistantToolChoice] end end end diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index d3efa3c2..490fde16 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -135,7 +135,7 @@ module OpenAI GPT_3_5_TURBO_0125: :"gpt-3.5-turbo-0125" GPT_3_5_TURBO_16K_0613: :"gpt-3.5-turbo-16k-0613" - def self.variants: -> [String, (:"o3-mini" + def self?.variants: -> [String, (:"o3-mini" | :"o3-mini-2025-01-31" | :o1 | :"o1-2024-12-17" diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index bd1238f4..c18305c9 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -77,7 +77,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self.values: -> ::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] end end end diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index edd61dcf..fb0a513a 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -106,7 +106,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_incomplete end - def self.variants: -> [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] + def self?.variants: -> [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] end end end diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 9207cecc..027b49a0 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -146,7 +146,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_expired end - def self.variants: -> [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] + def self?.variants: -> [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] end end end diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 4081dbf8..87e168ab 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -206,7 +206,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_expired end - def self.variants: -> [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] + def self?.variants: -> [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] end end end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 9c1cf73a..f9593b82 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -87,7 +87,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> [String, OpenAI::Models::chat_model] end type thread = @@ -154,7 +154,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -165,7 +165,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role] end type attachment = @@ -209,7 +209,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::file_search end - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] end end end @@ -359,7 +359,7 @@ module OpenAI end end - def self.variants: -> [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + def self?.variants: -> [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] end end end @@ -425,7 +425,7 @@ module OpenAI module Tool extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end type truncation_strategy = @@ -454,7 +454,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index f270cee9..12ab7a4d 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -70,7 +70,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -81,7 +81,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::role] end type attachment = @@ -125,7 +125,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::file_search end - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] end end end @@ -275,7 +275,7 @@ module OpenAI end end - def self.variants: -> [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + def self?.variants: -> [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] end end end diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index cde96d26..00961486 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -9,7 +9,7 @@ module OpenAI module Annotation extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] + def self?.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] end end end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 22600103..84b0d6a2 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -9,7 +9,7 @@ module OpenAI module AnnotationDelta extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] + def self?.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] end end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 0b0987a7..0fb507cc 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -33,7 +33,7 @@ module OpenAI LOW: :low HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFile::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFile::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index d0870c96..f645c00f 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -35,7 +35,7 @@ module OpenAI LOW: :low HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 36929cf8..6039ca67 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -33,7 +33,7 @@ module OpenAI LOW: :low HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURL::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 3f1fcbdc..9cd1c721 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -35,7 +35,7 @@ module OpenAI LOW: :low HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 29fa0135..e9a83633 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -110,7 +110,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::Threads::Message::Attachment::Tool::assistant_tools_file_search_type_only end - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] end end @@ -144,7 +144,7 @@ module OpenAI RUN_EXPIRED: :run_expired RUN_FAILED: :run_failed - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason] end end @@ -156,7 +156,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::role] end type status = :in_progress | :incomplete | :completed @@ -168,7 +168,7 @@ module OpenAI INCOMPLETE: :incomplete COMPLETED: :completed - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::status] end end end diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index dc2a9215..fda3bd19 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -11,7 +11,7 @@ module OpenAI module MessageContent extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] + def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] end end end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index 1357cfba..c143f0ac 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -11,7 +11,7 @@ module OpenAI module MessageContentDelta extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] + def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] end end end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 76a88822..9749af26 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -10,7 +10,7 @@ module OpenAI module MessageContentPartParam extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] + def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] end end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 3e6288ac..3acdf91c 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -45,7 +45,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -56,7 +56,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::role] end type attachment = @@ -100,7 +100,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::file_search end - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] end end end diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index 0487cbfd..f40f44ea 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -36,7 +36,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageDelta::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageDelta::role] end end end diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index c8146f4e..aa976bd1 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -57,7 +57,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageListParams::order] end end end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index e84c343f..c36d145c 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -146,7 +146,7 @@ module OpenAI MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason] end end @@ -177,7 +177,7 @@ module OpenAI RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::LastError::code] end end @@ -241,7 +241,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_] end end diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 5dfe9de6..bf78c39f 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -129,7 +129,7 @@ module OpenAI MessageContentPartParamArray: message_content_part_param_array - def self.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant @@ -140,7 +140,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role] end type attachment = @@ -184,7 +184,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::file_search end - def self.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] + def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] end end end @@ -194,7 +194,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> [String, OpenAI::Models::chat_model] end type truncation_strategy = @@ -223,7 +223,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index 54dd93bc..fe34ea7a 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -51,7 +51,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::RunListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunListParams::order] end end end diff --git a/sig/openai/models/beta/threads/run_status.rbs b/sig/openai/models/beta/threads/run_status.rbs index d7433b66..4b0e430b 100644 --- a/sig/openai/models/beta/threads/run_status.rbs +++ b/sig/openai/models/beta/threads/run_status.rbs @@ -26,7 +26,7 @@ module OpenAI INCOMPLETE: :incomplete EXPIRED: :expired - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::run_status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::run_status] end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index ddc03a7d..fa6f7865 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -91,7 +91,7 @@ module OpenAI end end - def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] + def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index 81324a83..32dc3fb8 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -66,7 +66,7 @@ module OpenAI module Output extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] + def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 8952fadb..f5a993d3 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -77,7 +77,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] end end @@ -142,7 +142,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] end end end diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 36dd9a60..a60e10a6 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -105,7 +105,7 @@ module OpenAI SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code] end end @@ -121,7 +121,7 @@ module OpenAI COMPLETED: :completed EXPIRED: :expired - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::status] end type step_details = @@ -131,7 +131,7 @@ module OpenAI module StepDetails extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] end type type_ = :message_creation | :tool_calls @@ -142,7 +142,7 @@ module OpenAI MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::type_] end type usage = diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 171b4303..45b48449 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -30,7 +30,7 @@ module OpenAI module StepDetails extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] + def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_include.rbs b/sig/openai/models/beta/threads/runs/run_step_include.rbs index ed1e3934..529038db 100644 --- a/sig/openai/models/beta/threads/runs/run_step_include.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_include.rbs @@ -13,7 +13,7 @@ module OpenAI STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT: :"step_details.tool_calls[*].file_search.results[*].content" - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] end end end diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 641b3530..31771eec 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -64,7 +64,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::order] end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index 081dbbdf..2d13d06d 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -11,7 +11,7 @@ module OpenAI module ToolCall extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] + def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index 472aee5f..2e349dcd 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -11,7 +11,7 @@ module OpenAI module ToolCallDelta extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] + def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] end end end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index ed15fbfe..b0013010 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -89,7 +89,7 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason] end type logprobs = @@ -120,7 +120,7 @@ module OpenAI SCALE: :scale DEFAULT: :default - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index e05d98b5..47bcea48 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -76,10 +76,10 @@ module OpenAI module ArrayOfContentPart extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] + def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] end - def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]] end type function_call = { arguments: String, name: String } diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index 5f2424d0..f5963ec9 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -32,7 +32,7 @@ module OpenAI OPUS: :opus PCM16: :pcm16 - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::format_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::format_] end type voice = @@ -50,7 +50,7 @@ module OpenAI SHIMMER: :shimmer VERSE: :verse - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::voice] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::voice] end end end diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index c85a596e..1ec7aadf 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -142,7 +142,7 @@ module OpenAI ASSISTANT: :assistant TOOL: :tool - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role] end type tool_call = @@ -204,7 +204,7 @@ module OpenAI FUNCTION: :function - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] end end end @@ -221,7 +221,7 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason] end type logprobs = @@ -252,7 +252,7 @@ module OpenAI SCALE: :scale DEFAULT: :default - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 348780aa..df907b34 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -55,7 +55,7 @@ module OpenAI end end - def self.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] + def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index 1bde5081..d8ca22d9 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -52,7 +52,7 @@ module OpenAI LOW: :low HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index 85902db7..8d40f203 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -47,7 +47,7 @@ module OpenAI WAV: :wav MP3: :mp3 - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] end end end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index aa1379e0..ccf0ea3d 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -38,7 +38,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index c8d7612a..557d3c2c 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -14,7 +14,7 @@ module OpenAI module ChatCompletionMessageParam extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] + def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] end end end diff --git a/sig/openai/models/chat/chat_completion_modality.rbs b/sig/openai/models/chat/chat_completion_modality.rbs index 7075d723..b634c024 100644 --- a/sig/openai/models/chat/chat_completion_modality.rbs +++ b/sig/openai/models/chat/chat_completion_modality.rbs @@ -11,7 +11,7 @@ module OpenAI TEXT: :text AUDIO: :audio - def self.values: -> ::Array[OpenAI::Models::Chat::chat_completion_modality] + def self?.values: -> ::Array[OpenAI::Models::Chat::chat_completion_modality] end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index f49bc614..ccfde6db 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -32,7 +32,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_role.rbs b/sig/openai/models/chat/chat_completion_role.rbs index d805ec9e..4744870b 100644 --- a/sig/openai/models/chat/chat_completion_role.rbs +++ b/sig/openai/models/chat/chat_completion_role.rbs @@ -16,7 +16,7 @@ module OpenAI TOOL: :tool FUNCTION: :function - def self.values: -> ::Array[OpenAI::Models::Chat::chat_completion_role] + def self?.values: -> ::Array[OpenAI::Models::Chat::chat_completion_role] end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index f889a520..5c0e475c 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -38,7 +38,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index 001520b8..114b7588 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -19,10 +19,10 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto] end - def self.variants: -> [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 8dc39541..6f295779 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -36,7 +36,7 @@ module OpenAI ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - def self.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index b359b18e..7724c406 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -38,7 +38,7 @@ module OpenAI ChatCompletionContentPartArray: chat_completion_content_part_array - def self.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] end end end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 9d777b19..4bcc300d 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -169,7 +169,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> [String, OpenAI::Models::chat_model] end type function_call = @@ -187,10 +187,10 @@ module OpenAI NONE: :none AUTO: :auto - def self.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode] end - def self.variants: -> [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + def self?.variants: -> [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] end type function = @@ -230,7 +230,7 @@ module OpenAI TEXT: :text AUDIO: :audio - def self.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality] end type response_format = @@ -241,7 +241,7 @@ module OpenAI module ResponseFormat extend OpenAI::Union - def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end type service_tier = :auto | :default @@ -252,7 +252,7 @@ module OpenAI AUTO: :auto DEFAULT: :default - def self.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::service_tier] end type stop = (String | ::Array[String])? @@ -264,7 +264,7 @@ module OpenAI StringArray: string_array - def self.variants: -> [String, ::Array[String]] + def self?.variants: -> [String, ::Array[String]] end type web_search_options = @@ -298,7 +298,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] end type user_location = diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index bb536c64..8ea7b59f 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -54,7 +54,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Chat::CompletionListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionListParams::order] end end end diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index c066a38c..96ebdaed 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -45,7 +45,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Chat::Completions::MessageListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Chat::Completions::MessageListParams::order] end end end diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 1578b25d..d0aa3240 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -92,7 +92,7 @@ module OpenAI GPT_3_5_TURBO_0125: :"gpt-3.5-turbo-0125" GPT_3_5_TURBO_16K_0613: :"gpt-3.5-turbo-16k-0613" - def self.values: -> ::Array[OpenAI::Models::chat_model] + def self?.values: -> ::Array[OpenAI::Models::chat_model] end end end diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index 20f33540..8d14f63b 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -34,7 +34,7 @@ module OpenAI LT: :lt LTE: :lte - def self.values: -> ::Array[OpenAI::Models::ComparisonFilter::type_] + def self?.values: -> ::Array[OpenAI::Models::ComparisonFilter::type_] end type value = String | Float | bool @@ -42,7 +42,7 @@ module OpenAI module Value extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index 700baf43..e506a455 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -35,7 +35,7 @@ module OpenAI LENGTH: :length CONTENT_FILTER: :content_filter - def self.values: -> ::Array[OpenAI::Models::CompletionChoice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::CompletionChoice::finish_reason] end type logprobs = diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 358e6503..b6138203 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -95,7 +95,7 @@ module OpenAI DAVINCI_002: :"davinci-002" BABBAGE_002: :"babbage-002" - def self.variants: -> [String, (:"gpt-3.5-turbo-instruct" + def self?.variants: -> [String, (:"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002")] end @@ -118,7 +118,7 @@ module OpenAI ArrayOfToken2DArray: array_of_token2_d_array - def self.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] + def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] end type stop = (String | ::Array[String])? @@ -130,7 +130,7 @@ module OpenAI StringArray: string_array - def self.variants: -> [String, ::Array[String]] + def self?.variants: -> [String, ::Array[String]] end end end diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 80a9c41e..211de484 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -23,7 +23,7 @@ module OpenAI module Filter extend OpenAI::Union - def self.variants: -> [OpenAI::Models::ComparisonFilter, top] + def self?.variants: -> [OpenAI::Models::ComparisonFilter, top] end type type_ = :and | :or @@ -34,7 +34,7 @@ module OpenAI AND: :and OR: :or - def self.values: -> ::Array[OpenAI::Models::CompoundFilter::type_] + def self?.values: -> ::Array[OpenAI::Models::CompoundFilter::type_] end end end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index d08604d9..423f85e6 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -61,7 +61,7 @@ module OpenAI ArrayOfToken2DArray: array_of_token2_d_array - def self.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] + def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] end type model = String | OpenAI::Models::embedding_model @@ -69,7 +69,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::embedding_model] + def self?.variants: -> [String, OpenAI::Models::embedding_model] end type encoding_format = :float | :base64 @@ -80,7 +80,7 @@ module OpenAI FLOAT: :float BASE64: :base64 - def self.values: -> ::Array[OpenAI::Models::EmbeddingCreateParams::encoding_format] + def self?.values: -> ::Array[OpenAI::Models::EmbeddingCreateParams::encoding_format] end end end diff --git a/sig/openai/models/embedding_model.rbs b/sig/openai/models/embedding_model.rbs index ed029cea..30fd2a8a 100644 --- a/sig/openai/models/embedding_model.rbs +++ b/sig/openai/models/embedding_model.rbs @@ -12,7 +12,7 @@ module OpenAI TEXT_EMBEDDING_3_SMALL: :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE: :"text-embedding-3-large" - def self.values: -> ::Array[OpenAI::Models::embedding_model] + def self?.values: -> ::Array[OpenAI::Models::embedding_model] end end end diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index 5efd7f51..7ac3728a 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -7,7 +7,7 @@ module OpenAI module FileChunkingStrategy extend OpenAI::Union - def self.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + def self?.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] end end end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index 818b7c90..df5528b1 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -7,7 +7,7 @@ module OpenAI module FileChunkingStrategyParam extend OpenAI::Union - def self.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + def self?.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] end end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 9d793064..1cdd93d3 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -49,7 +49,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::FileListParams::order] + def self?.values: -> ::Array[OpenAI::Models::FileListParams::order] end end end diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 3bb22b13..c4b2cd6b 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -70,7 +70,7 @@ module OpenAI FINE_TUNE_RESULTS: :"fine-tune-results" VISION: :vision - def self.values: -> ::Array[OpenAI::Models::FileObject::purpose] + def self?.values: -> ::Array[OpenAI::Models::FileObject::purpose] end type status = :uploaded | :processed | :error @@ -82,7 +82,7 @@ module OpenAI PROCESSED: :processed ERROR: :error - def self.values: -> ::Array[OpenAI::Models::FileObject::status] + def self?.values: -> ::Array[OpenAI::Models::FileObject::status] end end end diff --git a/sig/openai/models/file_purpose.rbs b/sig/openai/models/file_purpose.rbs index 527e5d11..84067b2c 100644 --- a/sig/openai/models/file_purpose.rbs +++ b/sig/openai/models/file_purpose.rbs @@ -13,7 +13,7 @@ module OpenAI USER_DATA: :user_data EVALS: :evals - def self.values: -> ::Array[OpenAI::Models::file_purpose] + def self?.values: -> ::Array[OpenAI::Models::file_purpose] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index b696b25a..b939849a 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -150,7 +150,7 @@ module OpenAI module BatchSize extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float @@ -158,7 +158,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type n_epochs = :auto | Integer @@ -166,7 +166,7 @@ module OpenAI module NEpochs extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end end @@ -188,7 +188,7 @@ module OpenAI FAILED: :failed CANCELLED: :cancelled - def self.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::status] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::status] end type method_ = @@ -290,7 +290,7 @@ module OpenAI module BatchSize extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end type beta = :auto | Float @@ -298,7 +298,7 @@ module OpenAI module Beta extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float @@ -306,7 +306,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type n_epochs = :auto | Integer @@ -314,7 +314,7 @@ module OpenAI module NEpochs extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end end end @@ -376,7 +376,7 @@ module OpenAI module BatchSize extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float @@ -384,7 +384,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type n_epochs = :auto | Integer @@ -392,7 +392,7 @@ module OpenAI module NEpochs extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end end end @@ -405,7 +405,7 @@ module OpenAI SUPERVISED: :supervised DPO: :dpo - def self.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index e70febc9..e9f208ef 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -56,7 +56,7 @@ module OpenAI WARN: :warn ERROR: :error - def self.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::level] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::level] end type type_ = :message | :metrics @@ -67,7 +67,7 @@ module OpenAI MESSAGE: :message METRICS: :metrics - def self.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::type_] end end end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 913d7cd3..e0b090b5 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -75,7 +75,7 @@ module OpenAI GPT_3_5_TURBO: :"gpt-3.5-turbo" GPT_4O_MINI: :"gpt-4o-mini" - def self.variants: -> [String, (:"babbage-002" + def self?.variants: -> [String, (:"babbage-002" | :"davinci-002" | :"gpt-3.5-turbo" | :"gpt-4o-mini")] @@ -120,7 +120,7 @@ module OpenAI module BatchSize extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float @@ -128,7 +128,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type n_epochs = :auto | Integer @@ -136,7 +136,7 @@ module OpenAI module NEpochs extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end end @@ -287,7 +287,7 @@ module OpenAI module BatchSize extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end type beta = :auto | Float @@ -295,7 +295,7 @@ module OpenAI module Beta extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type learning_rate_multiplier = :auto | Float @@ -303,7 +303,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type n_epochs = :auto | Integer @@ -311,7 +311,7 @@ module OpenAI module NEpochs extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end end end @@ -373,7 +373,7 @@ module OpenAI module BatchSize extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end type learning_rate_multiplier = :auto | Float @@ -381,7 +381,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - def self.variants: -> [:auto, Float] + def self?.variants: -> [:auto, Float] end type n_epochs = :auto | Integer @@ -389,7 +389,7 @@ module OpenAI module NEpochs extend OpenAI::Union - def self.variants: -> [:auto, Integer] + def self?.variants: -> [:auto, Integer] end end end @@ -402,7 +402,7 @@ module OpenAI SUPERVISED: :supervised DPO: :dpo - def self.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::type_] end end end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 1460783f..60907fc8 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -46,7 +46,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::image_model] + def self?.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json @@ -57,7 +57,7 @@ module OpenAI URL: :url B64_JSON: :b64_json - def self.values: -> ::Array[OpenAI::Models::ImageCreateVariationParams::response_format] + def self?.values: -> ::Array[OpenAI::Models::ImageCreateVariationParams::response_format] end type size = :"256x256" | :"512x512" | :"1024x1024" @@ -69,7 +69,7 @@ module OpenAI NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" - def self.values: -> ::Array[OpenAI::Models::ImageCreateVariationParams::size] + def self?.values: -> ::Array[OpenAI::Models::ImageCreateVariationParams::size] end end end diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index cebbb406..d205ee7c 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -56,7 +56,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::image_model] + def self?.variants: -> [String, OpenAI::Models::image_model] end type response_format = :url | :b64_json @@ -67,7 +67,7 @@ module OpenAI URL: :url B64_JSON: :b64_json - def self.values: -> ::Array[OpenAI::Models::ImageEditParams::response_format] + def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::response_format] end type size = :"256x256" | :"512x512" | :"1024x1024" @@ -79,7 +79,7 @@ module OpenAI NUMBER_512X512: :"512x512" NUMBER_1024X1024: :"1024x1024" - def self.values: -> ::Array[OpenAI::Models::ImageEditParams::size] + def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::size] end end end diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index ea843f0f..2c18ac73 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -58,7 +58,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::image_model] + def self?.variants: -> [String, OpenAI::Models::image_model] end type quality = :standard | :hd @@ -69,7 +69,7 @@ module OpenAI STANDARD: :standard HD: :hd - def self.values: -> ::Array[OpenAI::Models::ImageGenerateParams::quality] + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::quality] end type response_format = :url | :b64_json @@ -80,7 +80,7 @@ module OpenAI URL: :url B64_JSON: :b64_json - def self.values: -> ::Array[OpenAI::Models::ImageGenerateParams::response_format] + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::response_format] end type size = @@ -95,7 +95,7 @@ module OpenAI NUMBER_1792X1024: :"1792x1024" NUMBER_1024X1792: :"1024x1792" - def self.values: -> ::Array[OpenAI::Models::ImageGenerateParams::size] + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::size] end type style = :vivid | :natural @@ -106,7 +106,7 @@ module OpenAI VIVID: :vivid NATURAL: :natural - def self.values: -> ::Array[OpenAI::Models::ImageGenerateParams::style] + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::style] end end end diff --git a/sig/openai/models/image_model.rbs b/sig/openai/models/image_model.rbs index e68710d9..97e2e64f 100644 --- a/sig/openai/models/image_model.rbs +++ b/sig/openai/models/image_model.rbs @@ -8,7 +8,7 @@ module OpenAI DALL_E_2: :"dall-e-2" DALL_E_3: :"dall-e-3" - def self.values: -> ::Array[OpenAI::Models::image_model] + def self?.values: -> ::Array[OpenAI::Models::image_model] end end end diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index 9e7a8041..9967761f 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -158,7 +158,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] end type harassment_threatening = :text @@ -168,7 +168,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] end type hate = :text @@ -178,7 +178,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] end type hate_threatening = :text @@ -188,7 +188,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] end type illicit = :text @@ -198,7 +198,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] end type illicit_violent = :text @@ -208,7 +208,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] end type self_harm = :text | :image @@ -219,7 +219,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] end type self_harm_instruction = :text | :image @@ -230,7 +230,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] end type self_harm_intent = :text | :image @@ -241,7 +241,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] end type sexual = :text | :image @@ -252,7 +252,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] end type sexual_minor = :text @@ -262,7 +262,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] end type violence = :text | :image @@ -273,7 +273,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] end type violence_graphic = :text | :image @@ -284,7 +284,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] end end diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 41fec7ce..266f6beb 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -44,7 +44,7 @@ module OpenAI ModerationMultiModalInputArray: moderation_multi_modal_input_array - def self.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] + def self?.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] end type model = String | OpenAI::Models::moderation_model @@ -52,7 +52,7 @@ module OpenAI module Model extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::moderation_model] + def self?.variants: -> [String, OpenAI::Models::moderation_model] end end end diff --git a/sig/openai/models/moderation_model.rbs b/sig/openai/models/moderation_model.rbs index fa7264f2..1f6ab3fd 100644 --- a/sig/openai/models/moderation_model.rbs +++ b/sig/openai/models/moderation_model.rbs @@ -14,7 +14,7 @@ module OpenAI TEXT_MODERATION_LATEST: :"text-moderation-latest" TEXT_MODERATION_STABLE: :"text-moderation-stable" - def self.values: -> ::Array[OpenAI::Models::moderation_model] + def self?.values: -> ::Array[OpenAI::Models::moderation_model] end end end diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index 9388bae0..967234fc 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -7,7 +7,7 @@ module OpenAI module ModerationMultiModalInput extend OpenAI::Union - def self.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] + def self?.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] end end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index 8452d648..d4060722 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -26,7 +26,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self.values: -> ::Array[OpenAI::Models::Reasoning::generate_summary] + def self?.values: -> ::Array[OpenAI::Models::Reasoning::generate_summary] end end end diff --git a/sig/openai/models/reasoning_effort.rbs b/sig/openai/models/reasoning_effort.rbs index 27d712d0..7ee7ea11 100644 --- a/sig/openai/models/reasoning_effort.rbs +++ b/sig/openai/models/reasoning_effort.rbs @@ -9,7 +9,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self.values: -> ::Array[OpenAI::Models::reasoning_effort] + def self?.values: -> ::Array[OpenAI::Models::reasoning_effort] end end end diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 944fbd51..0b984fde 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -37,7 +37,7 @@ module OpenAI UBUNTU: :ubuntu BROWSER: :browser - def self.values: -> ::Array[OpenAI::Models::Responses::ComputerTool::environment] + def self?.values: -> ::Array[OpenAI::Models::Responses::ComputerTool::environment] end end end diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 10931508..04936217 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -34,7 +34,7 @@ module OpenAI module Content extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] + def self?.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] end type role = :user | :assistant | :system | :developer @@ -47,7 +47,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::role] end type type_ = :message @@ -57,7 +57,7 @@ module OpenAI MESSAGE: :message - def self.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] end end end diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index fbea9c27..69476cbd 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -47,7 +47,7 @@ module OpenAI module Filters extend OpenAI::Union - def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + def self?.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end type ranking_options = @@ -82,7 +82,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self.values: -> ::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker] end end end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 633f4ef4..1a91bd7e 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -50,7 +50,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::Responses::InputItemListParams::order] + def self?.values: -> ::Array[OpenAI::Models::Responses::InputItemListParams::order] end end end diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 21f27944..254aa7e1 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -139,7 +139,7 @@ module OpenAI MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter - def self.values: -> ::Array[OpenAI::Models::Responses::Response::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::IncompleteDetails::reason] end end @@ -151,7 +151,7 @@ module OpenAI module ToolChoice extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled @@ -162,7 +162,7 @@ module OpenAI AUTO: :auto DISABLED: :disabled - def self.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] end end end diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 3d2f285b..0a64a853 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -81,7 +81,7 @@ module OpenAI end end - def self.variants: -> [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] + def self?.variants: -> [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] end type status = :in_progress | :interpreting | :completed @@ -93,7 +93,7 @@ module OpenAI INTERPRETING: :interpreting COMPLETED: :completed - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index e6b8fd9e..1cf4a066 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -86,7 +86,7 @@ module OpenAI BACK: :back FORWARD: :forward - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button] end end @@ -228,7 +228,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::wait end - def self.variants: -> [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] + def self?.variants: -> [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] end type pending_safety_check = @@ -255,7 +255,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::status] end type type_ = :computer_call @@ -265,7 +265,7 @@ module OpenAI COMPUTER_CALL: :computer_call - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index c4a48c1d..793ed85e 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -67,7 +67,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index ebfaa85b..d8469d84 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -11,7 +11,7 @@ module OpenAI module ResponseContent extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 2f4c1e05..f4ca687c 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -38,7 +38,7 @@ module OpenAI module Part extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 792dd89a..4ffbc22b 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -38,7 +38,7 @@ module OpenAI module Part extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 7336a20e..ec23db60 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -103,7 +103,7 @@ module OpenAI module Input extend OpenAI::Union - def self.variants: -> [String, OpenAI::Models::Responses::response_input] + def self?.variants: -> [String, OpenAI::Models::Responses::response_input] end type tool_choice = @@ -114,7 +114,7 @@ module OpenAI module ToolChoice extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end type truncation = :auto | :disabled @@ -125,7 +125,7 @@ module OpenAI AUTO: :auto DISABLED: :disabled - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::truncation] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::truncation] end end end diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index c9461327..4187ffad 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -61,7 +61,7 @@ module OpenAI FAILED_TO_DOWNLOAD_IMAGE: :failed_to_download_image IMAGE_FILE_NOT_FOUND: :image_file_not_found - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseError::code] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseError::code] end end end diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 102ce635..c42ff6ab 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -43,7 +43,7 @@ module OpenAI INCOMPLETE: :incomplete FAILED: :failed - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::status] end type result = @@ -89,7 +89,7 @@ module OpenAI module Attribute extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index ffd41786..a78a0964 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -9,7 +9,7 @@ module OpenAI module ResponseFormatTextConfig extend OpenAI::Union - def self.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] + def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index a726e572..af876b35 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -50,7 +50,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::status] end end end diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index eea6788a..8feafadc 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -44,7 +44,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 301b596c..4b1b18f8 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -33,7 +33,7 @@ module OpenAI COMPLETED: :completed FAILED: :failed - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::status] end end end diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index df76bd92..17f0b9d0 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -13,7 +13,7 @@ module OpenAI MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" - def self.values: -> ::Array[OpenAI::Models::Responses::response_includable] + def self?.values: -> ::Array[OpenAI::Models::Responses::response_includable] end end end diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index e015e2d8..cebca75a 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -31,7 +31,7 @@ module OpenAI MP3: :mp3 WAV: :wav - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputAudio::format_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputAudio::format_] end end end diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index 6f2fe81c..b94f1f20 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -9,7 +9,7 @@ module OpenAI module ResponseInputContent extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] + def self?.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] end end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index fd361d9a..18b3631c 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -36,7 +36,7 @@ module OpenAI LOW: :low AUTO: :auto - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputImage::detail] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputImage::detail] end end end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 20caf979..ec441979 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -60,7 +60,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::role] end type status = :in_progress | :completed | :incomplete @@ -72,7 +72,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::status] end type type_ = :message @@ -82,7 +82,7 @@ module OpenAI MESSAGE: :message - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] end end @@ -154,7 +154,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status] end end @@ -203,7 +203,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status] end end @@ -219,7 +219,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::item_reference end - def self.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] + def self?.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] end end end diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index 2671fd84..e0a6c449 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -48,7 +48,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::role] end type status = :in_progress | :completed | :incomplete @@ -60,7 +60,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::status] end type type_ = :message @@ -70,7 +70,7 @@ module OpenAI MESSAGE: :message - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] end end end diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index 8d2e807d..59b6a1f5 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -14,7 +14,7 @@ module OpenAI module ResponseItem extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] + def self?.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] end end end diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index 9868493b..fb0c190a 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -12,7 +12,7 @@ module OpenAI module ResponseOutputItem extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] end end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index 81eebadd..dd1c2a28 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -38,7 +38,7 @@ module OpenAI module Content extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end type status = :in_progress | :completed | :incomplete @@ -50,7 +50,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::status] end end end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index 3d499c72..51f31ba9 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -99,7 +99,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::file_path end - def self.variants: -> [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] + def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] end end end diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 17d8480f..9c17d2fd 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -52,7 +52,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::ResponseReasoningItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseReasoningItem::status] end end end diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index 3ecc2fc1..cc7c73f5 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -11,7 +11,7 @@ module OpenAI IN_PROGRESS: :in_progress INCOMPLETE: :incomplete - def self.values: -> ::Array[OpenAI::Models::Responses::response_status] + def self?.values: -> ::Array[OpenAI::Models::Responses::response_status] end end end diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 8642b35b..a73d12f4 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -38,7 +38,7 @@ module OpenAI module ResponseStreamEvent extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + def self?.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 17d56582..71096d81 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -111,7 +111,7 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_path end - def self.variants: -> [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + def self?.variants: -> [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] end end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 7cf475b8..c3b27fe5 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -10,7 +10,7 @@ module OpenAI module Tool extend OpenAI::Union - def self.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] + def self?.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] end end end diff --git a/sig/openai/models/responses/tool_choice_options.rbs b/sig/openai/models/responses/tool_choice_options.rbs index 412031bf..44974540 100644 --- a/sig/openai/models/responses/tool_choice_options.rbs +++ b/sig/openai/models/responses/tool_choice_options.rbs @@ -10,7 +10,7 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self.values: -> ::Array[OpenAI::Models::Responses::tool_choice_options] + def self?.values: -> ::Array[OpenAI::Models::Responses::tool_choice_options] end end end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 73f15517..b0562635 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -27,7 +27,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 - def self.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceTypes::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceTypes::type_] end end end diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index 20a3b337..cb6606be 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -35,7 +35,7 @@ module OpenAI WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 - def self.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::type_] end type search_context_size = :low | :medium | :high @@ -47,7 +47,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::search_context_size] + def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::search_context_size] end type user_location = diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 70ac005a..2f676c59 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -16,7 +16,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" + def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11")] diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index a5baefff..ff4fec3b 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -56,7 +56,7 @@ module OpenAI CANCELLED: :cancelled EXPIRED: :expired - def self.values: -> ::Array[OpenAI::Models::Upload::status] + def self?.values: -> ::Array[OpenAI::Models::Upload::status] end end end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index ba9a3850..02c2f02f 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -98,7 +98,7 @@ module OpenAI IN_PROGRESS: :in_progress COMPLETED: :completed - def self.values: -> ::Array[OpenAI::Models::VectorStore::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStore::status] end type expires_after = { anchor: :last_active_at, days: Integer } diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index b9d2ab90..113f4b27 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -49,7 +49,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::VectorStoreListParams::order] + def self?.values: -> ::Array[OpenAI::Models::VectorStoreListParams::order] end end end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 17e51e48..24619a7e 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -56,7 +56,7 @@ module OpenAI StringArray: string_array - def self.variants: -> [String, ::Array[String]] + def self?.variants: -> [String, ::Array[String]] end type filters = @@ -65,7 +65,7 @@ module OpenAI module Filters extend OpenAI::Union - def self.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + def self?.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end type ranking_options = @@ -100,7 +100,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self.values: -> ::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker] end end end diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index 497e9d9d..a5ba79e3 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -35,7 +35,7 @@ module OpenAI module Attribute extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end type content = @@ -63,7 +63,7 @@ module OpenAI TEXT: :text - def self.values: -> ::Array[OpenAI::Models::VectorStoreSearchResponse::Content::type_] + def self?.values: -> ::Array[OpenAI::Models::VectorStoreSearchResponse::Content::type_] end end end diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 78f70166..f53e80ea 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -37,7 +37,7 @@ module OpenAI module Attribute extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index d5bdb26b..f4378efa 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -64,7 +64,7 @@ module OpenAI FAILED: :failed CANCELLED: :cancelled - def self.values: -> ::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::filter] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::filter] end type order = :asc | :desc @@ -75,7 +75,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::order] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::order] end end end diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 8ecec280..c0b0ba41 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -37,7 +37,7 @@ module OpenAI module Attribute extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 362141a4..179d8472 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -60,7 +60,7 @@ module OpenAI FAILED: :failed CANCELLED: :cancelled - def self.values: -> ::Array[OpenAI::Models::VectorStores::FileListParams::filter] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::FileListParams::filter] end type order = :asc | :desc @@ -71,7 +71,7 @@ module OpenAI ASC: :asc DESC: :desc - def self.values: -> ::Array[OpenAI::Models::VectorStores::FileListParams::order] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::FileListParams::order] end end end diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index b85d9f01..fe03cee6 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -29,7 +29,7 @@ module OpenAI module Attribute extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 79b4b73c..200981e8 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -80,7 +80,7 @@ module OpenAI UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file - def self.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::code] end end @@ -94,7 +94,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::status] end type attribute = String | Float | bool @@ -102,7 +102,7 @@ module OpenAI module Attribute extend OpenAI::Union - def self.variants: -> [String, Float, bool] + def self?.variants: -> [String, Float, bool] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index cc689671..d5ae501f 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -78,7 +78,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::status] end end end From ccfaeaa52810f6a95c6daae5ec86cd8d5f6419ba Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 01:02:54 +0000 Subject: [PATCH 050/295] feat: prevent tapioca from introspecting the gem internals (#56) --- README.md | 2 ++ lib/openai.rb | 15 +++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/README.md b/README.md index ff557c2d..2c5d6001 100644 --- a/README.md +++ b/README.md @@ -166,6 +166,8 @@ openai.chat.completions.create( ## Sorbet Support +**This library emits an intentional warning under the [`tapioca` toolchain](https://github.com/Shopify/tapioca)**. This is normal, and does not impact functionality. + This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. What this means is that while you can use Sorbet to type check your code statically, and benefit from the [Sorbet Language Server](https://sorbet.org/docs/lsp) in your editor, there is no runtime type checking and execution overhead from Sorbet itself. diff --git a/lib/openai.rb b/lib/openai.rb index 3a9e6cd2..64c67b2f 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -1,5 +1,20 @@ # frozen_string_literal: true +# We already ship the preferred sorbet manifests in the package itself. +# `tapioca` currently does not offer us a way to opt out of unnecessary compilation. +if defined?(:Tapioca) && caller_locations.any? { _1.path.end_with?("tapioca/cli.rb") } + Warning.warn( + <<~WARN + \n + ⚠️ skipped loading of "openai" gem under `tapioca`. + + This message is normal and expected if you are running a `tapioca` command, and does not impact `.rbi` generation. + \n + WARN + ) + return +end + # Standard libraries. require "cgi" require "date" From 125681eb13b3b939e3c2e1581849345a134c4879 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 01:28:03 +0000 Subject: [PATCH 051/295] feat: consistently accept `AnyHash` types in parameter positions in sorbet (#57) --- rbi/lib/openai/base_client.rbi | 6 +- rbi/lib/openai/base_model.rbi | 40 +++--- .../models/audio/speech_create_params.rbi | 2 +- .../audio/transcription_create_params.rbi | 2 +- .../audio/translation_create_params.rbi | 2 +- rbi/lib/openai/models/batch.rbi | 4 +- rbi/lib/openai/models/batch_cancel_params.rbi | 8 +- rbi/lib/openai/models/batch_create_params.rbi | 2 +- rbi/lib/openai/models/batch_list_params.rbi | 2 +- .../openai/models/batch_retrieve_params.rbi | 8 +- rbi/lib/openai/models/beta/assistant.rbi | 6 +- .../models/beta/assistant_create_params.rbi | 13 +- .../models/beta/assistant_delete_params.rbi | 8 +- .../models/beta/assistant_list_params.rbi | 2 +- .../models/beta/assistant_retrieve_params.rbi | 8 +- .../models/beta/assistant_stream_event.rbi | 127 ++++++++++++++---- .../models/beta/assistant_tool_choice.rbi | 2 +- .../models/beta/assistant_update_params.rbi | 8 +- .../openai/models/beta/file_search_tool.rbi | 7 +- rbi/lib/openai/models/beta/function_tool.rbi | 5 +- .../models/beta/message_stream_event.rbi | 28 +++- .../models/beta/run_step_stream_event.rbi | 35 ++++- .../openai/models/beta/run_stream_event.rbi | 50 +++++-- rbi/lib/openai/models/beta/thread.rbi | 6 +- .../beta/thread_create_and_run_params.rbi | 36 +++-- .../models/beta/thread_create_params.rbi | 13 +- .../models/beta/thread_delete_params.rbi | 8 +- .../models/beta/thread_retrieve_params.rbi | 8 +- .../models/beta/thread_stream_event.rbi | 9 +- .../models/beta/thread_update_params.rbi | 8 +- .../beta/threads/file_citation_annotation.rbi | 2 +- .../file_citation_delta_annotation.rbi | 2 +- .../beta/threads/file_path_annotation.rbi | 2 +- .../threads/file_path_delta_annotation.rbi | 2 +- .../beta/threads/image_file_content_block.rbi | 5 +- .../beta/threads/image_file_delta_block.rbi | 6 +- .../beta/threads/image_url_content_block.rbi | 5 +- .../beta/threads/image_url_delta_block.rbi | 6 +- .../openai/models/beta/threads/message.rbi | 2 +- .../beta/threads/message_create_params.rbi | 2 +- .../beta/threads/message_delete_params.rbi | 8 +- .../beta/threads/message_delta_event.rbi | 6 +- .../beta/threads/message_list_params.rbi | 2 +- .../beta/threads/message_retrieve_params.rbi | 8 +- .../beta/threads/message_update_params.rbi | 2 +- .../required_action_function_tool_call.rbi | 2 +- rbi/lib/openai/models/beta/threads/run.rbi | 12 +- .../models/beta/threads/run_cancel_params.rbi | 8 +- .../models/beta/threads/run_create_params.rbi | 6 +- .../models/beta/threads/run_list_params.rbi | 2 +- .../beta/threads/run_retrieve_params.rbi | 8 +- .../run_submit_tool_outputs_params.rbi | 2 +- .../models/beta/threads/run_update_params.rbi | 2 +- .../runs/code_interpreter_output_image.rbi | 2 +- .../runs/code_interpreter_tool_call.rbi | 10 +- .../runs/code_interpreter_tool_call_delta.rbi | 5 +- .../threads/runs/file_search_tool_call.rbi | 7 +- .../beta/threads/runs/function_tool_call.rbi | 2 +- .../threads/runs/function_tool_call_delta.rbi | 2 +- .../runs/message_creation_step_details.rbi | 5 +- .../models/beta/threads/runs/run_step.rbi | 4 +- .../threads/runs/run_step_delta_event.rbi | 6 +- .../runs/run_step_delta_message_delta.rbi | 5 +- .../beta/threads/runs/step_list_params.rbi | 2 +- .../threads/runs/step_retrieve_params.rbi | 2 +- .../beta/threads/text_content_block.rbi | 5 +- .../models/beta/threads/text_delta_block.rbi | 6 +- .../openai/models/chat/chat_completion.rbi | 6 +- ...hat_completion_assistant_message_param.rbi | 6 +- .../models/chat/chat_completion_chunk.rbi | 10 +- .../chat/chat_completion_content_part.rbi | 5 +- .../chat_completion_content_part_image.rbi | 5 +- ...at_completion_content_part_input_audio.rbi | 5 +- .../models/chat/chat_completion_message.rbi | 9 +- .../chat_completion_message_tool_call.rbi | 6 +- .../chat_completion_named_tool_choice.rbi | 5 +- .../models/chat/chat_completion_tool.rbi | 5 +- .../models/chat/completion_create_params.rbi | 19 ++- .../models/chat/completion_delete_params.rbi | 8 +- .../models/chat/completion_list_params.rbi | 2 +- .../chat/completion_retrieve_params.rbi | 8 +- .../models/chat/completion_update_params.rbi | 2 +- .../chat/completions/message_list_params.rbi | 2 +- rbi/lib/openai/models/completion.rbi | 2 +- rbi/lib/openai/models/completion_choice.rbi | 2 +- .../models/completion_create_params.rbi | 4 +- rbi/lib/openai/models/completion_usage.rbi | 4 +- .../models/create_embedding_response.rbi | 2 +- .../openai/models/embedding_create_params.rbi | 2 +- rbi/lib/openai/models/file_content_params.rbi | 8 +- rbi/lib/openai/models/file_create_params.rbi | 2 +- rbi/lib/openai/models/file_delete_params.rbi | 8 +- rbi/lib/openai/models/file_list_params.rbi | 2 +- .../openai/models/file_retrieve_params.rbi | 8 +- .../models/fine_tuning/fine_tuning_job.rbi | 21 ++- ...ne_tuning_job_wandb_integration_object.rbi | 5 +- .../models/fine_tuning/job_cancel_params.rbi | 8 +- .../models/fine_tuning/job_create_params.rbi | 26 ++-- .../fine_tuning/job_list_events_params.rbi | 2 +- .../models/fine_tuning/job_list_params.rbi | 2 +- .../fine_tuning/job_retrieve_params.rbi | 8 +- .../jobs/checkpoint_list_params.rbi | 2 +- .../jobs/fine_tuning_job_checkpoint.rbi | 2 +- .../models/image_create_variation_params.rbi | 2 +- rbi/lib/openai/models/image_edit_params.rbi | 2 +- .../openai/models/image_generate_params.rbi | 2 +- rbi/lib/openai/models/model_delete_params.rbi | 8 +- rbi/lib/openai/models/model_list_params.rbi | 8 +- .../openai/models/model_retrieve_params.rbi | 8 +- rbi/lib/openai/models/moderation.rbi | 6 +- .../models/moderation_create_params.rbi | 2 +- .../models/moderation_image_url_input.rbi | 5 +- .../models/response_format_json_schema.rbi | 5 +- .../models/responses/file_search_tool.rbi | 2 +- .../responses/input_item_list_params.rbi | 2 +- rbi/lib/openai/models/responses/response.rbi | 10 +- ..._code_interpreter_call_completed_event.rbi | 2 +- ...ode_interpreter_call_in_progress_event.rbi | 2 +- ...de_interpreter_call_interpreting_event.rbi | 2 +- .../responses/response_completed_event.rbi | 5 +- ...esponse_computer_tool_call_output_item.rbi | 2 +- .../responses/response_create_params.rbi | 6 +- .../responses/response_created_event.rbi | 5 +- .../responses/response_delete_params.rbi | 8 +- .../responses/response_failed_event.rbi | 5 +- .../responses/response_in_progress_event.rbi | 5 +- .../responses/response_incomplete_event.rbi | 5 +- .../models/responses/response_input_item.rbi | 2 +- .../responses/response_retrieve_params.rbi | 2 +- .../models/responses/response_usage.rbi | 4 +- .../models/responses/web_search_tool.rbi | 2 +- .../static_file_chunking_strategy_object.rbi | 5 +- ...ic_file_chunking_strategy_object_param.rbi | 5 +- rbi/lib/openai/models/upload.rbi | 2 +- .../openai/models/upload_cancel_params.rbi | 8 +- .../openai/models/upload_complete_params.rbi | 2 +- .../openai/models/upload_create_params.rbi | 2 +- .../models/uploads/part_create_params.rbi | 2 +- rbi/lib/openai/models/vector_store.rbi | 4 +- .../models/vector_store_create_params.rbi | 4 +- .../models/vector_store_delete_params.rbi | 8 +- .../models/vector_store_list_params.rbi | 2 +- .../models/vector_store_retrieve_params.rbi | 8 +- .../models/vector_store_search_params.rbi | 4 +- .../models/vector_store_update_params.rbi | 4 +- .../file_batch_cancel_params.rbi | 2 +- .../file_batch_create_params.rbi | 2 +- .../file_batch_list_files_params.rbi | 2 +- .../file_batch_retrieve_params.rbi | 2 +- .../vector_stores/file_content_params.rbi | 2 +- .../vector_stores/file_create_params.rbi | 2 +- .../vector_stores/file_delete_params.rbi | 2 +- .../models/vector_stores/file_list_params.rbi | 2 +- .../vector_stores/file_retrieve_params.rbi | 2 +- .../vector_stores/file_update_params.rbi | 2 +- .../vector_stores/vector_store_file.rbi | 2 +- .../vector_stores/vector_store_file_batch.rbi | 2 +- rbi/lib/openai/request_options.rbi | 10 +- rbi/lib/openai/resources/audio/speech.rbi | 2 +- .../openai/resources/audio/transcriptions.rbi | 4 +- .../openai/resources/audio/translations.rbi | 2 +- rbi/lib/openai/resources/batches.rbi | 18 ++- rbi/lib/openai/resources/beta/assistants.rbi | 10 +- rbi/lib/openai/resources/beta/threads.rbi | 12 +- .../resources/beta/threads/messages.rbi | 10 +- .../openai/resources/beta/threads/runs.rbi | 16 +-- .../resources/beta/threads/runs/steps.rbi | 4 +- rbi/lib/openai/resources/chat/completions.rbi | 12 +- .../resources/chat/completions/messages.rbi | 2 +- rbi/lib/openai/resources/completions.rbi | 4 +- rbi/lib/openai/resources/embeddings.rbi | 2 +- rbi/lib/openai/resources/files.rbi | 25 +++- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 10 +- .../fine_tuning/jobs/checkpoints.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 6 +- rbi/lib/openai/resources/models.rbi | 16 ++- rbi/lib/openai/resources/moderations.rbi | 2 +- rbi/lib/openai/resources/responses.rbi | 8 +- .../resources/responses/input_items.rbi | 2 +- rbi/lib/openai/resources/uploads.rbi | 6 +- rbi/lib/openai/resources/uploads/parts.rbi | 2 +- rbi/lib/openai/resources/vector_stores.rbi | 12 +- .../resources/vector_stores/file_batches.rbi | 8 +- .../openai/resources/vector_stores/files.rbi | 12 +- rbi/lib/openai/util.rbi | 4 +- 185 files changed, 839 insertions(+), 438 deletions(-) diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index bb9872fa..56d1eb77 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -26,7 +26,7 @@ module OpenAI page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), model: T.nilable(OpenAI::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) } end @@ -129,7 +129,7 @@ module OpenAI # @api private sig do overridable - .params(req: OpenAI::BaseClient::RequestComponentsShape, opts: T::Hash[Symbol, T.anything]) + .params(req: OpenAI::BaseClient::RequestComponentsShape, opts: OpenAI::Util::AnyHash) .returns(OpenAI::BaseClient::RequestInputShape) end private def build_request(req, opts) @@ -175,7 +175,7 @@ module OpenAI page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), model: T.nilable(OpenAI::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(T.anything) end diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 2fead0cf..c99d19ab 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -242,8 +242,8 @@ module OpenAI # @api private sig do params( - key: T.any(Symbol, T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything), - spec: T.any(T::Hash[Symbol, T.anything], T.proc.returns(T.anything), T.anything) + key: T.any(Symbol, OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything), + spec: T.any(OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything) ) .void end @@ -295,11 +295,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - T::Hash[Symbol, T.anything], + OpenAI::Util::AnyHash, T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .returns(T.attached_class) end @@ -350,11 +350,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - T::Hash[Symbol, T.anything], + OpenAI::Util::AnyHash, T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .void end @@ -374,11 +374,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - T::Hash[Symbol, T.anything], + OpenAI::Util::AnyHash, T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .returns(T.attached_class) end @@ -397,7 +397,7 @@ module OpenAI sig(:final) do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) - .returns(T.any(T::Hash[Symbol, T.anything], T.anything)) + .returns(T.any(OpenAI::Util::AnyHash, T.anything)) end def coerce(value) end @@ -406,7 +406,7 @@ module OpenAI sig(:final) do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) - .returns(T.any(T::Hash[Symbol, T.anything], T.anything)) + .returns(T.any(OpenAI::Util::AnyHash, T.anything)) end def dump(value) end @@ -429,11 +429,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - T::Hash[Symbol, T.anything], + OpenAI::Util::AnyHash, T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .void end @@ -498,7 +498,7 @@ module OpenAI T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .void end @@ -510,11 +510,11 @@ module OpenAI params( name_sym: Symbol, type_info: T.any( - T::Hash[Symbol, T.anything], + OpenAI::Util::AnyHash, T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .void end @@ -526,11 +526,11 @@ module OpenAI params( name_sym: Symbol, type_info: T.any( - T::Hash[Symbol, T.anything], + OpenAI::Util::AnyHash, T.proc.returns(OpenAI::Converter::Input), OpenAI::Converter::Input ), - spec: T::Hash[Symbol, T.anything] + spec: OpenAI::Util::AnyHash ) .void end @@ -604,7 +604,7 @@ module OpenAI # # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. - sig { overridable.returns(T::Hash[Symbol, T.anything]) } + sig { overridable.returns(OpenAI::Util::AnyHash) } def to_h end @@ -616,11 +616,11 @@ module OpenAI # # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. - sig { overridable.returns(T::Hash[Symbol, T.anything]) } + sig { overridable.returns(OpenAI::Util::AnyHash) } def to_hash end - sig { params(keys: T.nilable(T::Array[Symbol])).returns(T::Hash[Symbol, T.anything]) } + sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Util::AnyHash) } def deconstruct_keys(keys) end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 49573052..0e0c0f1e 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -85,7 +85,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 0d5d853f..21e8d1ad 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -127,7 +127,7 @@ module OpenAI response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index b7cda45d..824eab32 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -75,7 +75,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 5c383697..acb1dcad 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -206,7 +206,7 @@ module OpenAI cancelling_at: Integer, completed_at: Integer, error_file_id: String, - errors: OpenAI::Models::Batch::Errors, + errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash), expired_at: Integer, expires_at: Integer, failed_at: Integer, @@ -214,7 +214,7 @@ module OpenAI in_progress_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), output_file_id: String, - request_counts: OpenAI::Models::BatchRequestCounts, + request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/batch_cancel_params.rbi b/rbi/lib/openai/models/batch_cancel_params.rbi index fba65e6e..4a71901d 100644 --- a/rbi/lib/openai/models/batch_cancel_params.rbi +++ b/rbi/lib/openai/models/batch_cancel_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 90e1afe6..46c3a392 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -71,7 +71,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 8a8368b8..460fe7ca 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -32,7 +32,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch_retrieve_params.rbi b/rbi/lib/openai/models/batch_retrieve_params.rbi index 56cbcb05..cb2b72ff 100644 --- a/rbi/lib/openai/models/batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/batch_retrieve_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index 14e315f2..a15e202a 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -251,7 +251,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::Assistant::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash)), top_p: T.nilable(Float), object: Symbol ) @@ -339,8 +339,8 @@ module OpenAI # IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch + code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index e7046d48..8e5c3a9b 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -244,7 +244,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -253,7 +253,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -362,8 +362,8 @@ module OpenAI # IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch + code_interpreter: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -620,7 +620,10 @@ module OpenAI sig do params( - static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/assistant_delete_params.rbi b/rbi/lib/openai/models/beta/assistant_delete_params.rbi index f6e02e32..ef0b02c4 100644 --- a/rbi/lib/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_delete_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index cf967a08..76427298 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -60,7 +60,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi index 493dc3a9..fcad6743 100644 --- a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 8651ae68..449d6397 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -92,7 +92,14 @@ module OpenAI # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. - sig { params(data: OpenAI::Models::Beta::Thread, enabled: T::Boolean, event: Symbol).returns(T.attached_class) } + sig do + params( + data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash), + enabled: T::Boolean, + event: Symbol + ) + .returns(T.attached_class) + end def self.new(data:, enabled: nil, event: :"thread.created") end @@ -125,7 +132,10 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.created") end @@ -158,7 +168,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.queued") end @@ -191,7 +204,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.in_progress") end @@ -224,7 +240,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.requires_action") end @@ -257,7 +276,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.completed") end @@ -290,7 +312,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.incomplete") end @@ -323,7 +348,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.failed") end @@ -356,7 +384,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.cancelling") end @@ -389,7 +420,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.cancelled") end @@ -422,7 +456,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.expired") end @@ -455,7 +492,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.created") end @@ -488,7 +528,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.in_progress") end @@ -523,7 +566,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. sig do - params(data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.delta") @@ -558,7 +604,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.completed") end @@ -591,7 +640,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.failed") end @@ -624,7 +676,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.cancelled") end @@ -657,7 +712,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.expired") end @@ -691,7 +749,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.created") end @@ -725,7 +786,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.in_progress") end @@ -759,7 +823,13 @@ module OpenAI # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. - sig { params(data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol).returns(T.attached_class) } + sig do + params( + data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash), + event: Symbol + ) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.delta") end @@ -793,7 +863,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.completed") end @@ -827,7 +900,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.incomplete") end @@ -859,7 +935,10 @@ module OpenAI # Occurs when an # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. - sig { params(data: OpenAI::Models::ErrorObject, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :error) end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index e40ae9b4..1d11f89d 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -32,7 +32,7 @@ module OpenAI sig do params( type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: OpenAI::Models::Beta::AssistantToolChoiceFunction + function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index eb77a1f2..8fc4069c 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -244,7 +244,7 @@ module OpenAI ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, @@ -253,7 +253,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -417,8 +417,8 @@ module OpenAI # IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch + code_interpreter: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 27504367..e44328a2 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -26,7 +26,10 @@ module OpenAI end sig do - params(file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch, type: Symbol) + params( + file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(file_search: nil, type: :file_search) @@ -74,7 +77,7 @@ module OpenAI sig do params( max_num_results: Integer, - ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions + ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index bc5fdf14..c69fa0dd 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -24,7 +24,10 @@ module OpenAI def type=(_) end - sig { params(function: OpenAI::Models::FunctionDefinition, type: Symbol).returns(T.attached_class) } + sig do + params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(function:, type: :function) end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 0766b1b4..8eb06277 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -47,7 +47,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.created") end @@ -81,7 +84,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.in_progress") end @@ -115,7 +121,13 @@ module OpenAI # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. - sig { params(data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol).returns(T.attached_class) } + sig do + params( + data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash), + event: Symbol + ) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.delta") end @@ -149,7 +161,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.completed") end @@ -183,7 +198,10 @@ module OpenAI # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. - sig { params(data: OpenAI::Models::Beta::Threads::Message, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.message.incomplete") end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 11b0dbf9..283216f5 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -48,7 +48,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.created") end @@ -81,7 +84,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.in_progress") end @@ -116,7 +122,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. sig do - params(data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.delta") @@ -151,7 +160,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.completed") end @@ -184,7 +196,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.failed") end @@ -217,7 +232,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.cancelled") end @@ -250,7 +268,10 @@ module OpenAI # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. - sig { params(data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.step.expired") end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 0b347621..de3392b0 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -50,7 +50,10 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.created") end @@ -83,7 +86,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.queued") end @@ -116,7 +122,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.in_progress") end @@ -149,7 +158,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.requires_action") end @@ -182,7 +194,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.completed") end @@ -215,7 +230,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.incomplete") end @@ -248,7 +266,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.failed") end @@ -281,7 +302,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.cancelling") end @@ -314,7 +338,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.cancelled") end @@ -347,7 +374,10 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. - sig { params(data: OpenAI::Models::Beta::Threads::Run, event: Symbol).returns(T.attached_class) } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + .returns(T.attached_class) + end def self.new(data:, event: :"thread.run.expired") end diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index df3f74a9..3cd4db69 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -67,7 +67,7 @@ module OpenAI id: String, created_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::Thread::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -119,8 +119,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch + code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 8e93da0a..dc7a1570 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -335,14 +335,14 @@ module OpenAI ) ), temperature: T.nilable(Float), - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice ) ), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), tools: T.nilable( T::Array[ T.any( @@ -353,8 +353,10 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -497,7 +499,9 @@ module OpenAI params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources) + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) + ) ) .returns(T.attached_class) end @@ -900,8 +904,14 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + code_interpreter: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + OpenAI::Util::AnyHash + ), + file_search: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, + OpenAI::Util::AnyHash + ) ) .returns(T.attached_class) end @@ -1164,7 +1174,10 @@ module OpenAI sig do params( - static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) @@ -1282,8 +1295,11 @@ module OpenAI # IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + code_interpreter: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + OpenAI::Util::AnyHash + ), + file_search: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index b546a4c2..165cc27d 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -55,8 +55,8 @@ module OpenAI params( messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -439,8 +439,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch + code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -697,7 +697,10 @@ module OpenAI sig do params( - static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/thread_delete_params.rbi b/rbi/lib/openai/models/beta/thread_delete_params.rbi index a47b1e53..a4ea2d8b 100644 --- a/rbi/lib/openai/models/beta/thread_delete_params.rbi +++ b/rbi/lib/openai/models/beta/thread_delete_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi index 07f1e477..666d7bb1 100644 --- a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index 95989f48..84e72f6e 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -37,7 +37,14 @@ module OpenAI # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. - sig { params(data: OpenAI::Models::Beta::Thread, enabled: T::Boolean, event: Symbol).returns(T.attached_class) } + sig do + params( + data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash), + enabled: T::Boolean, + event: Symbol + ) + .returns(T.attached_class) + end def self.new(data:, enabled: nil, event: :"thread.created") end diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index dae45106..97e08dd0 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -41,8 +41,8 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -97,8 +97,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch + code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index 29970857..e41f739a 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -58,7 +58,7 @@ module OpenAI sig do params( end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index 8902d331..c825f31b 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -70,7 +70,7 @@ module OpenAI params( index: Integer, end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index 5eaca363..dd673abd 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -55,7 +55,7 @@ module OpenAI sig do params( end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index 1239b189..b154c4d8 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -65,7 +65,7 @@ module OpenAI params( index: Integer, end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index da91c722..4c2d8b93 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -27,7 +27,10 @@ module OpenAI # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. - sig { params(image_file: OpenAI::Models::Beta::Threads::ImageFile, type: Symbol).returns(T.attached_class) } + sig do + params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(image_file:, type: :image_file) end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 6b7c7867..6aa45d0d 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -37,7 +37,11 @@ module OpenAI # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. sig do - params(index: Integer, image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, type: Symbol) + params( + index: Integer, + image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(index:, image_file: nil, type: :image_file) diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index fa47f814..7a20b522 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -26,7 +26,10 @@ module OpenAI end # References an image URL in the content of a message. - sig { params(image_url: OpenAI::Models::Beta::Threads::ImageURL, type: Symbol).returns(T.attached_class) } + sig do + params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(image_url:, type: :image_url) end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index d4bec7be..5b278b0a 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -36,7 +36,11 @@ module OpenAI # References an image URL in the content of a message. sig do - params(index: Integer, image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, type: Symbol) + params( + index: Integer, + image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(index:, image_url: nil, type: :image_url) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index ca7f31be..6ea4f061 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -208,7 +208,7 @@ module OpenAI ], created_at: Integer, incomplete_at: T.nilable(Integer), - incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, run_id: T.nilable(String), diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 2bbad44b..74d7c35b 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -113,7 +113,7 @@ module OpenAI role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi index 8ef23891..74583e5c 100644 --- a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi @@ -17,13 +17,7 @@ module OpenAI end sig do - params( - thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - T::Hash[Symbol, T.anything] - ) - ) + params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index 299d8e92..e52c4656 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -38,7 +38,11 @@ module OpenAI # Represents a message delta i.e. any changed fields on a message during # streaming. sig do - params(id: String, delta: OpenAI::Models::Beta::Threads::MessageDelta, object: Symbol) + params( + id: String, + delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash), + object: Symbol + ) .returns(T.attached_class) end def self.new(id:, delta:, object: :"thread.message.delta") diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 8b7fcfb2..aecbbbaa 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -71,7 +71,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi index 19dca144..ecc15e69 100644 --- a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi @@ -17,13 +17,7 @@ module OpenAI end sig do - params( - thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - T::Hash[Symbol, T.anything] - ) - ) + params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 26ec576d..e25f66a7 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -34,7 +34,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index 6cbe1e05..ce2250e5 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -47,7 +47,7 @@ module OpenAI sig do params( id: String, - function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, + function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 23687e33..a89a0fe4 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -413,15 +413,15 @@ module OpenAI created_at: Integer, expires_at: T.nilable(Integer), failed_at: T.nilable(Integer), - incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails), + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash)), instructions: String, - last_error: T.nilable(OpenAI::Models::Beta::Threads::Run::LastError), + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash)), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: String, parallel_tool_calls: T::Boolean, - required_action: T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction), + required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash)), response_format: T.nilable( T.any( Symbol, @@ -446,8 +446,8 @@ module OpenAI OpenAI::Models::Beta::FunctionTool ) ], - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy), - usage: T.nilable(OpenAI::Models::Beta::Threads::Run::Usage), + truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash)), temperature: T.nilable(Float), top_p: T.nilable(Float), object: Symbol @@ -679,7 +679,7 @@ module OpenAI # is required. sig do params( - submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi index 119500b5..fd833391 100644 --- a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi @@ -17,13 +17,7 @@ module OpenAI end sig do - params( - thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - T::Hash[Symbol, T.anything] - ) - ) + params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 18a61e58..83e052a8 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -383,8 +383,10 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 28ea5210..8021be63 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -61,7 +61,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi index fa5675d0..90546b16 100644 --- a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi @@ -17,13 +17,7 @@ module OpenAI end sig do - params( - thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - T::Hash[Symbol, T.anything] - ) - ) + params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 0a39a132..ad431d15 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -32,7 +32,7 @@ module OpenAI params( thread_id: String, tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 732bd936..41fc9c62 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -34,7 +34,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 3b9ea8e2..97783911 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -42,7 +42,7 @@ module OpenAI sig do params( index: Integer, - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 13ecfae5..98f88637 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -51,7 +51,10 @@ module OpenAI sig do params( id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + code_interpreter: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) @@ -232,7 +235,10 @@ module OpenAI sig do params( - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + image: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 4b26a9eb..1b18df27 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -61,7 +61,10 @@ module OpenAI params( index: Integer, id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + code_interpreter: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 0423fad0..432b6002 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -42,7 +42,7 @@ module OpenAI sig do params( id: String, - file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -97,7 +97,10 @@ module OpenAI # For now, this is always going to be an empty object. sig do params( - ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + ranking_options: T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + OpenAI::Util::AnyHash + ), results: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 9c94d56e..4c691bf9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -40,7 +40,7 @@ module OpenAI sig do params( id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index 50168e95..8819f4f0 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -54,7 +54,7 @@ module OpenAI params( index: Integer, id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index 4293b0b6..6c36a326 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -39,7 +39,10 @@ module OpenAI # Details of the message creation by the run step. sig do params( - message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + message_creation: T.any( + OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index d55ca19e..1a008d84 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -207,7 +207,7 @@ module OpenAI created_at: Integer, expired_at: T.nilable(Integer), failed_at: T.nilable(Integer), - last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, @@ -217,7 +217,7 @@ module OpenAI ), thread_id: String, type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, - usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage), + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index 62c90cff..bd418312 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -39,7 +39,11 @@ module OpenAI # Represents a run step delta i.e. any changed fields on a run step during # streaming. sig do - params(id: String, delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, object: Symbol) + params( + id: String, + delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash), + object: Symbol + ) .returns(T.attached_class) end def self.new(id:, delta:, object: :"thread.run.step.delta") diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index 03db1624..3df236cc 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -39,7 +39,10 @@ module OpenAI # Details of the message creation by the run step. sig do params( - message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + message_creation: T.any( + OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 8a8e6212..fefcb8ac 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -90,7 +90,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 54eb0d50..42271a87 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -48,7 +48,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 30ab7921..2ba0d542 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -26,7 +26,10 @@ module OpenAI end # The text content that is part of a message. - sig { params(text: OpenAI::Models::Beta::Threads::Text, type: Symbol).returns(T.attached_class) } + sig do + params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(text:, type: :text) end diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index c021cb96..b694fadd 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -36,7 +36,11 @@ module OpenAI # The text content that is part of a message. sig do - params(index: Integer, text: OpenAI::Models::Beta::Threads::TextDelta, type: Symbol) + params( + index: Integer, + text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(index:, text: nil, type: :text) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 0131e9f7..6e2cd6d4 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -99,7 +99,7 @@ module OpenAI model: String, service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage, + usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -189,8 +189,8 @@ module OpenAI params( finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, index: Integer, - logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), - message: OpenAI::Models::Chat::ChatCompletionMessage + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash)), + message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 02f68760..de9d2442 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -136,7 +136,7 @@ module OpenAI # Messages sent by the model in response to user messages. sig do params( - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)), content: T.nilable( T.any( String, @@ -148,7 +148,9 @@ module OpenAI ] ) ), - function_call: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall), + function_call: T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) + ), name: String, refusal: T.nilable(String), tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index dafab50f..802b0de2 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -107,7 +107,7 @@ module OpenAI model: String, service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), system_fingerprint: String, - usage: T.nilable(OpenAI::Models::CompletionUsage), + usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -199,10 +199,10 @@ module OpenAI sig do params( - delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, + delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash), finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), index: Integer, - logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) ) .returns(T.attached_class) end @@ -286,7 +286,7 @@ module OpenAI sig do params( content: T.nilable(String), - function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash), refusal: T.nilable(String), role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] @@ -417,7 +417,7 @@ module OpenAI params( index: Integer, id: String, - function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + function: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash), type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index baf81fd2..2392c0a6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -44,7 +44,10 @@ module OpenAI # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text # generation. sig do - params(file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, type: Symbol) + params( + file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(file:, type: :file) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 5b8ea698..af0de7f9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -26,7 +26,10 @@ module OpenAI # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). sig do - params(image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, type: Symbol) + params( + image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(image_url:, type: :image_url) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 89a3b585..2abced72 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -30,7 +30,10 @@ module OpenAI # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). sig do - params(input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, type: Symbol) + params( + input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(input_audio:, type: :input_audio) diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 850a6c60..a2ea14b8 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -89,8 +89,8 @@ module OpenAI content: T.nilable(String), refusal: T.nilable(String), annotations: T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudio), - function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash)), + function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash), tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], role: Symbol ) @@ -144,7 +144,10 @@ module OpenAI # A URL citation when using web search. sig do - params(url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, type: Symbol) + params( + url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(url_citation:, type: :url_citation) diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index 09f27b3e..b4dcff08 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -35,7 +35,11 @@ module OpenAI end sig do - params(id: String, function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, type: Symbol) + params( + id: String, + function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(id:, function:, type: :function) diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 602ab1d6..a03dfbeb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -27,7 +27,10 @@ module OpenAI # Specifies a tool the model should use. Use to force the model to call a specific # function. sig do - params(function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, type: Symbol) + params( + function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(function:, type: :function) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index 7cf95402..8cd4fe09 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -24,7 +24,10 @@ module OpenAI def type=(_) end - sig { params(function: OpenAI::Models::FunctionDefinition, type: Symbol).returns(T.attached_class) } + sig do + params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(function:, type: :function) end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 2bfbf47c..6f7c3948 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -559,7 +559,7 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, @@ -574,7 +574,7 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( @@ -586,7 +586,7 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -596,8 +596,8 @@ module OpenAI top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -974,7 +974,9 @@ module OpenAI sig do params( search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, - user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) + user_location: T.nilable( + T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) + ) ) .returns(T.attached_class) end @@ -1071,7 +1073,10 @@ module OpenAI # Approximate location parameters for the search. sig do params( - approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + approximate: T.any( + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + OpenAI::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/completion_delete_params.rbi b/rbi/lib/openai/models/chat/completion_delete_params.rbi index 6682081d..5aeae8b6 100644 --- a/rbi/lib/openai/models/chat/completion_delete_params.rbi +++ b/rbi/lib/openai/models/chat/completion_delete_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index e91a5f65..eb6f581d 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -65,7 +65,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi index 1422fe6c..a8822b3a 100644 --- a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index 64b6d477..0b68282e 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -24,7 +24,7 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 8bffd407..0a60c940 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -44,7 +44,7 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index c4a10497..a6536d37 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -81,7 +81,7 @@ module OpenAI created: Integer, model: String, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage, + usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 7cc42500..cbf7bf7d 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -49,7 +49,7 @@ module OpenAI params( finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, index: Integer, - logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), + logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash)), text: String ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index d0fbc5a9..d2c76ed8 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -269,12 +269,12 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index 411d8384..d18a0c5d 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -60,8 +60,8 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, - prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails + completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash), + prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index aa1ebc37..e6a54b86 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -46,7 +46,7 @@ module OpenAI params( data: T::Array[OpenAI::Models::Embedding], model: String, - usage: OpenAI::Models::CreateEmbeddingResponse::Usage, + usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 8492ccdf..69cd0415 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -82,7 +82,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/file_content_params.rbi b/rbi/lib/openai/models/file_content_params.rbi index 71e0eaaa..6d0e6bb7 100644 --- a/rbi/lib/openai/models/file_content_params.rbi +++ b/rbi/lib/openai/models/file_content_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index fbd7b9d0..93746f38 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -31,7 +31,7 @@ module OpenAI params( file: T.any(IO, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/file_delete_params.rbi b/rbi/lib/openai/models/file_delete_params.rbi index 5c6407ba..c934741d 100644 --- a/rbi/lib/openai/models/file_delete_params.rbi +++ b/rbi/lib/openai/models/file_delete_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index be376ff6..795e6044 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -56,7 +56,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/file_retrieve_params.rbi b/rbi/lib/openai/models/file_retrieve_params.rbi index a1f9e075..261382f7 100644 --- a/rbi/lib/openai/models/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/file_retrieve_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index cce5ee80..85b64f51 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -213,10 +213,10 @@ module OpenAI params( id: String, created_at: Integer, - error: T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error), + error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash)), fine_tuned_model: T.nilable(String), finished_at: T.nilable(Integer), - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash), model: String, organization_id: String, result_files: T::Array[String], @@ -228,7 +228,7 @@ module OpenAI estimated_finish: T.nilable(Integer), integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]), metadata: T.nilable(T::Hash[Symbol, String]), - method_: OpenAI::Models::FineTuning::FineTuningJob::Method, + method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -489,8 +489,8 @@ module OpenAI # The method used for fine-tuning. sig do params( - dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, - supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, + dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash), + supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash), type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol ) .returns(T.attached_class) @@ -530,7 +530,9 @@ module OpenAI # Configuration for the DPO fine-tuning method. sig do - params(hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters) + params( + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + ) .returns(T.attached_class) end def self.new(hyperparameters: nil) @@ -694,7 +696,12 @@ module OpenAI # Configuration for the supervised fine-tuning method. sig do - params(hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters) + params( + hyperparameters: T.any( + OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, + OpenAI::Util::AnyHash + ) + ) .returns(T.attached_class) end def self.new(hyperparameters: nil) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index e7898d88..de6aec82 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -29,7 +29,10 @@ module OpenAI end sig do - params(wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, type: Symbol) + params( + wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(wandb:, type: :wandb) diff --git a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi index bdc32d2b..529378d5 100644 --- a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 1f474701..cc5d4db7 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -144,14 +144,14 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), metadata: T.nilable(T::Hash[Symbol, String]), - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end @@ -340,7 +340,10 @@ module OpenAI end sig do - params(wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, type: Symbol) + params( + wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(wandb:, type: :wandb) @@ -462,8 +465,8 @@ module OpenAI # The method used for fine-tuning. sig do params( - dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, - supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, + dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash), + supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash), type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol ) .returns(T.attached_class) @@ -503,7 +506,9 @@ module OpenAI # Configuration for the DPO fine-tuning method. sig do - params(hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters) + params( + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + ) .returns(T.attached_class) end def self.new(hyperparameters: nil) @@ -667,7 +672,12 @@ module OpenAI # Configuration for the supervised fine-tuning method. sig do - params(hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters) + params( + hyperparameters: T.any( + OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, + OpenAI::Util::AnyHash + ) + ) .returns(T.attached_class) end def self.new(hyperparameters: nil) diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index fc4ede17..253cd1c0 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -29,7 +29,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 6e667d46..4102ced5 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -40,7 +40,7 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi index 238e3029..9c617afd 100644 --- a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 27b1407f..36ce4669 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -30,7 +30,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index a04c9043..faed91db 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -81,7 +81,7 @@ module OpenAI created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash), step_number: Integer, object: Symbol ) diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 821d44c7..2a711dc2 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -85,7 +85,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index f5b930f4..3fad72eb 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -107,7 +107,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 9aab13b2..513ce790 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -116,7 +116,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/model_delete_params.rbi b/rbi/lib/openai/models/model_delete_params.rbi index ad6f9bd8..5e653702 100644 --- a/rbi/lib/openai/models/model_delete_params.rbi +++ b/rbi/lib/openai/models/model_delete_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/model_list_params.rbi b/rbi/lib/openai/models/model_list_params.rbi index 90fef299..d3992e7a 100644 --- a/rbi/lib/openai/models/model_list_params.rbi +++ b/rbi/lib/openai/models/model_list_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/model_retrieve_params.rbi b/rbi/lib/openai/models/model_retrieve_params.rbi index dc7fc8c5..7be752d3 100644 --- a/rbi/lib/openai/models/model_retrieve_params.rbi +++ b/rbi/lib/openai/models/model_retrieve_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index f30ecf4b..b589c1b7 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -50,9 +50,9 @@ module OpenAI sig do params( - categories: OpenAI::Models::Moderation::Categories, - category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes, - category_scores: OpenAI::Models::Moderation::CategoryScores, + categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash), + category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash), + category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash), flagged: T::Boolean ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index eb0cf50f..282a33b9 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -62,7 +62,7 @@ module OpenAI T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index e5d12ba8..52ba6b82 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -26,7 +26,10 @@ module OpenAI # An object describing an image to classify. sig do - params(image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, type: Symbol) + params( + image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(image_url:, type: :image_url) diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index 219e6d13..01da50a3 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -28,7 +28,10 @@ module OpenAI # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do - params(json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, type: Symbol) + params( + json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(json_schema:, type: :json_schema) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 43f9d2b2..e2d118da 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -64,7 +64,7 @@ module OpenAI vector_store_ids: T::Array[String], filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, + ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 57d5f60d..b43cde65 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -56,7 +56,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index c7462537..bf617bc9 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -394,8 +394,8 @@ module OpenAI params( id: String, created_at: Float, - error: T.nilable(OpenAI::Models::Responses::ResponseError), - incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), + error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash)), + incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash)), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), @@ -427,11 +427,11 @@ module OpenAI top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, - text: OpenAI::Models::Responses::ResponseTextConfig, + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), - usage: OpenAI::Models::Responses::ResponseUsage, + usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash), user: String, object: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 7ced83f5..7001c9c4 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -37,7 +37,7 @@ module OpenAI # Emitted when the code interpreter call is completed. sig do params( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index afa6822d..8d4c4c6f 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -37,7 +37,7 @@ module OpenAI # Emitted when a code interpreter call is in progress. sig do params( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index 13bb3219..533e28c5 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -37,7 +37,7 @@ module OpenAI # Emitted when the code interpreter is actively interpreting the code snippet. sig do params( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 7129137a..8cbd9bea 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -26,7 +26,10 @@ module OpenAI end # Emitted when the model response is complete. - sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } + sig do + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(response:, type: :"response.completed") end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 332d500c..9e28ddee 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -83,7 +83,7 @@ module OpenAI params( id: String, call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol, type: Symbol diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index ef53ffa4..7e1109eb 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -393,10 +393,10 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: OpenAI::Models::Responses::ResponseTextConfig, + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, @@ -413,7 +413,7 @@ module OpenAI top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index 84ab75a8..58257a17 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -26,7 +26,10 @@ module OpenAI end # An event that is emitted when a response is created. - sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } + sig do + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(response:, type: :"response.created") end diff --git a/rbi/lib/openai/models/responses/response_delete_params.rbi b/rbi/lib/openai/models/responses/response_delete_params.rbi index b268b24b..0dfb6fe2 100644 --- a/rbi/lib/openai/models/responses/response_delete_params.rbi +++ b/rbi/lib/openai/models/responses/response_delete_params.rbi @@ -8,8 +8,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index 98d7cb48..7409c35c 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -26,7 +26,10 @@ module OpenAI end # An event that is emitted when a response fails. - sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } + sig do + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(response:, type: :"response.failed") end diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index 3f82d5d8..f694955a 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -26,7 +26,10 @@ module OpenAI end # Emitted when the response is in progress. - sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } + sig do + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(response:, type: :"response.in_progress") end diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index dfe15923..07ca324b 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -26,7 +26,10 @@ module OpenAI end # An event that is emitted when a response finishes as incomplete. - sig { params(response: OpenAI::Models::Responses::Response, type: Symbol).returns(T.attached_class) } + sig do + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(response:, type: :"response.incomplete") end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index f99aa262..9d56daf4 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -293,7 +293,7 @@ module OpenAI sig do params( call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), id: String, acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index d2129c7d..91c155d7 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -23,7 +23,7 @@ module OpenAI sig do params( include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 2e0872c8..10397181 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -60,9 +60,9 @@ module OpenAI sig do params( input_tokens: Integer, - input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, + input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash), output_tokens: Integer, - output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, + output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash), total_tokens: Integer ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 7d675e8c..ae1af675 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -50,7 +50,7 @@ module OpenAI params( type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, - user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) + user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash)) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index 1f106d7d..deafa473 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -23,7 +23,10 @@ module OpenAI def type=(_) end - sig { params(static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol).returns(T.attached_class) } + sig do + params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(static:, type: :static) end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index 617954af..4c4a6100 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -24,7 +24,10 @@ module OpenAI end # Customize your own chunking strategy by setting chunk size and chunk overlap. - sig { params(static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol).returns(T.attached_class) } + sig do + params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash), type: Symbol) + .returns(T.attached_class) + end def self.new(static:, type: :static) end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 18dcc179..2ce22a6c 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -102,7 +102,7 @@ module OpenAI filename: String, purpose: String, status: OpenAI::Models::Upload::Status::TaggedSymbol, - file: T.nilable(OpenAI::Models::FileObject), + file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/upload_cancel_params.rbi b/rbi/lib/openai/models/upload_cancel_params.rbi index 6db61397..fac39581 100644 --- a/rbi/lib/openai/models/upload_cancel_params.rbi +++ b/rbi/lib/openai/models/upload_cancel_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index f8550617..4503bab7 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -29,7 +29,7 @@ module OpenAI params( part_ids: T::Array[String], md5: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 93d701d3..d6b9c299 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -54,7 +54,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index 7ef6052a..d52036fc 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -19,7 +19,7 @@ module OpenAI sig do params( data: T.any(IO, StringIO), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index dadca7f7..2cb7c946 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -123,13 +123,13 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash), last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, status: OpenAI::Models::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter, + expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash), expires_at: T.nilable(Integer), object: Symbol ) diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index b3c4a55a..f6aad6ef 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -90,11 +90,11 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store_delete_params.rbi b/rbi/lib/openai/models/vector_store_delete_params.rbi index 3fa908fe..a7ddf5aa 100644 --- a/rbi/lib/openai/models/vector_store_delete_params.rbi +++ b/rbi/lib/openai/models/vector_store_delete_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 38db6860..c0360d6f 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -59,7 +59,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store_retrieve_params.rbi b/rbi/lib/openai/models/vector_store_retrieve_params.rbi index 88a00415..dd7b68d3 100644 --- a/rbi/lib/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_store_retrieve_params.rbi @@ -7,8 +7,12 @@ module OpenAI include OpenAI::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 377f1ad9..4145160a 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -63,9 +63,9 @@ module OpenAI query: T.any(String, T::Array[String]), filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), rewrite_query: T::Boolean, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 4e5c605f..2415457b 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -43,10 +43,10 @@ module OpenAI sig do params( - expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi index cc6cad3f..c1334d3d 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 1dfb5480..2f203007 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -74,7 +74,7 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 7cb94eb9..367b7a22 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -82,7 +82,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi index 20edec10..0935490b 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/lib/openai/models/vector_stores/file_content_params.rbi index e9c610a6..d87f0929 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_params.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index bf6ee554..dac1b7e6 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -74,7 +74,7 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi index f7fe2e55..8277a73f 100644 --- a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index e4ff05f1..f123f36a 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -73,7 +73,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi index 4f1f1768..a69049b2 100644 --- a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 69fe9291..b74c3292 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -35,7 +35,7 @@ module OpenAI params( vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 8e2181bb..6ddab8f0 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -125,7 +125,7 @@ module OpenAI params( id: String, created_at: Integer, - last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), + last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)), status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, usage_bytes: Integer, vector_store_id: String, diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index d32c3323..47124b6e 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -73,7 +73,7 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, + file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash), status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, vector_store_id: String, object: Symbol diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 8c7934b0..4f198f23 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -4,13 +4,13 @@ module OpenAI # @api private module RequestParameters # Options to specify HTTP behaviour for this request. - sig { returns(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) } + sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) } def request_options end sig do - params(_: T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) - .returns(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + params(_: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + .returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) end def request_options=(_) end @@ -18,7 +18,7 @@ module OpenAI # @api private module Converter # @api private - sig { params(params: T.anything).returns([T.anything, T::Hash[Symbol, T.anything]]) } + sig { params(params: T.anything).returns([T.anything, OpenAI::Util::AnyHash]) } def dump_request(params) end end @@ -100,7 +100,7 @@ module OpenAI end # Returns a new instance of RequestOptions. - sig { params(values: T::Hash[Symbol, T.anything]).returns(T.attached_class) } + sig { params(values: OpenAI::Util::AnyHash).returns(T.attached_class) } def self.new(values = {}) end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index c434cedb..5d7311e5 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -13,7 +13,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(T.anything) end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 38f106b4..0a0205db 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -16,7 +16,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) end @@ -78,7 +78,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns( OpenAI::Stream[ diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index baf563ad..c99220ed 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -12,7 +12,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)) end diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index 1a12c440..0e76d86b 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -10,7 +10,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Batch) end @@ -48,7 +48,12 @@ module OpenAI sig do params( batch_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(OpenAI::Models::Batch) end @@ -64,7 +69,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Batch]) end @@ -87,7 +92,12 @@ module OpenAI sig do params( batch_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(OpenAI::Models::Batch) end diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index a31361cb..3205f05d 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -31,7 +31,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -111,7 +111,7 @@ module OpenAI sig do params( assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -150,7 +150,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -235,7 +235,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Assistant]) end @@ -264,7 +264,7 @@ module OpenAI sig do params( assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::AssistantDeleted) end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 6db707e7..65c67e5a 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -18,7 +18,7 @@ module OpenAI messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -46,7 +46,7 @@ module OpenAI sig do params( thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -63,7 +63,7 @@ module OpenAI thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -90,7 +90,7 @@ module OpenAI sig do params( thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::ThreadDeleted) end @@ -140,7 +140,7 @@ module OpenAI top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -279,7 +279,7 @@ module OpenAI top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns( OpenAI::Stream[ diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 68fd1790..d83a3d17 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -22,7 +22,7 @@ module OpenAI role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -57,7 +57,7 @@ module OpenAI params( message_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -77,7 +77,7 @@ module OpenAI message_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -106,7 +106,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message]) end @@ -141,7 +141,7 @@ module OpenAI params( message_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::MessageDeleted) end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index f5d012a0..44b7f356 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -51,7 +51,7 @@ module OpenAI top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -213,7 +213,7 @@ module OpenAI top_p: T.nilable(Float), truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns( OpenAI::Stream[ @@ -367,7 +367,7 @@ module OpenAI params( run_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -387,7 +387,7 @@ module OpenAI run_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -416,7 +416,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run]) end @@ -448,7 +448,7 @@ module OpenAI params( run_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -471,7 +471,7 @@ module OpenAI thread_id: String, tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -502,7 +502,7 @@ module OpenAI thread_id: String, tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns( OpenAI::Stream[ diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 5dfa9a8a..36fc8b92 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -13,7 +13,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) end @@ -47,7 +47,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) end diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 8fc9878c..00b8c661 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -77,7 +77,7 @@ module OpenAI user: String, web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -341,7 +341,7 @@ module OpenAI user: String, web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end @@ -541,7 +541,7 @@ module OpenAI sig do params( completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -559,7 +559,7 @@ module OpenAI params( completion_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -586,7 +586,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion]) end @@ -613,7 +613,7 @@ module OpenAI sig do params( completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletionDeleted) end diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 0667ddbc..f86371f9 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -13,7 +13,7 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) end diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index b894675b..62ca185f 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -31,7 +31,7 @@ module OpenAI top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Completion) end @@ -174,7 +174,7 @@ module OpenAI top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Stream[OpenAI::Models::Completion]) end diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 65a19f1d..9c05cf33 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -11,7 +11,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::CreateEmbeddingResponse) end diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index da0efe9d..a8fa8604 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -28,7 +28,7 @@ module OpenAI params( file: T.any(IO, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::FileObject) end @@ -48,7 +48,12 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(OpenAI::Models::FileObject) end @@ -66,7 +71,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::FileObject]) end @@ -92,7 +97,12 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(OpenAI::Models::FileDeleted) end @@ -107,7 +117,12 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(T.anything) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 1c55189e..6e2fc880 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -26,7 +26,7 @@ module OpenAI seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -99,7 +99,7 @@ module OpenAI sig do params( fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -116,7 +116,7 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) end @@ -136,7 +136,7 @@ module OpenAI sig do params( fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -153,7 +153,7 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index 24509186..5986546b 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -11,7 +11,7 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) end diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 1944e5f2..d36dd357 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -12,7 +12,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end @@ -52,7 +52,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end @@ -98,7 +98,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 52e65f29..505640bc 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -8,7 +8,12 @@ module OpenAI sig do params( model: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(OpenAI::Models::Model) end @@ -22,7 +27,7 @@ module OpenAI # Lists the currently available models, and provides basic information about each # one such as the owner and availability. sig do - params(request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything]))) + params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash))) .returns(OpenAI::Page[OpenAI::Models::Model]) end def list(request_options: {}) @@ -33,7 +38,12 @@ module OpenAI sig do params( model: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable( + T.any( + OpenAI::RequestOptions, + OpenAI::Util::AnyHash + ) + ) ) .returns(OpenAI::Models::ModelDeleted) end diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index b57441a5..b6a3164f 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -13,7 +13,7 @@ module OpenAI T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::ModerationCreateResponse) end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 3ed27f5a..7f02edbe 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -66,7 +66,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Responses::Response) end @@ -241,7 +241,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns( OpenAI::Stream[ @@ -399,7 +399,7 @@ module OpenAI params( response_id: String, include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Responses::Response) end @@ -417,7 +417,7 @@ module OpenAI sig do params( response_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .void end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 7f92008d..d1663648 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -12,7 +12,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns( OpenAI::CursorPage[ diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 6ee12473..863418f5 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -32,7 +32,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Upload) end @@ -59,7 +59,7 @@ module OpenAI sig do params( upload_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Upload) end @@ -88,7 +88,7 @@ module OpenAI upload_id: String, part_ids: T::Array[String], md5: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Upload) end diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 6e52432f..b90746ff 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -19,7 +19,7 @@ module OpenAI params( upload_id: String, data: T.any(IO, StringIO), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Uploads::UploadPart) end diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 7e24de66..dd0b6e05 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -22,7 +22,7 @@ module OpenAI file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -53,7 +53,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -71,7 +71,7 @@ module OpenAI expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -100,7 +100,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStore]) end @@ -129,7 +129,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStoreDeleted) end @@ -150,7 +150,7 @@ module OpenAI max_num_results: Integer, ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, rewrite_query: T::Boolean, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse]) end diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index c8a0af8b..b7a976f9 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -14,7 +14,7 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -43,7 +43,7 @@ module OpenAI params( batch_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -62,7 +62,7 @@ module OpenAI params( batch_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -85,7 +85,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 88902be4..753f7397 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -16,7 +16,7 @@ module OpenAI OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -45,7 +45,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -64,7 +64,7 @@ module OpenAI file_id: String, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -92,7 +92,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end @@ -129,7 +129,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileDeleted) end @@ -147,7 +147,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, T::Hash[Symbol, T.anything])) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse]) end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 3ad41179..79ce090a 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -3,6 +3,8 @@ module OpenAI # @api private module Util + # Due to the current WIP status of Shapes support in Sorbet, types referencing + # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } # @api private @@ -87,7 +89,7 @@ module OpenAI # @api private sig do params( - data: T.any(T::Hash[Symbol, T.anything], T::Array[T.anything], T.anything), + data: T.any(OpenAI::Util::AnyHash, T::Array[T.anything], T.anything), pick: T.nilable(T.any(Symbol, Integer, T::Array[T.any(Symbol, Integer)])), sentinel: T.nilable(T.anything), blk: T.nilable(T.proc.returns(T.anything)) From 42b510f64343b8e1ba6fa1ea51d163a9b7bd2068 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 08:18:49 +0000 Subject: [PATCH 052/295] chore: recursively accept `AnyHash` for `BaseModel`s in arrays and hashes (#58) --- rbi/lib/openai/models/audio/transcription.rbi | 9 ++-- .../audio/transcription_text_delta_event.rbi | 10 ++-- .../audio/transcription_text_done_event.rbi | 10 ++-- .../models/audio/transcription_verbose.rbi | 12 ++--- .../models/audio/translation_verbose.rbi | 6 +-- rbi/lib/openai/models/batch.rbi | 12 +++-- rbi/lib/openai/models/beta/assistant.rbi | 2 + .../models/beta/assistant_create_params.rbi | 32 +++++++++++-- .../models/beta/assistant_update_params.rbi | 4 ++ .../beta/thread_create_and_run_params.rbi | 47 +++++++++++++++---- .../models/beta/thread_create_params.rbi | 42 ++++++++++++++--- .../models/beta/threads/image_file_delta.rbi | 6 +-- .../models/beta/threads/image_url_delta.rbi | 6 +-- .../openai/models/beta/threads/message.rbi | 12 +++-- .../beta/threads/message_create_params.rbi | 8 +++- .../models/beta/threads/message_delta.rbi | 9 ++-- rbi/lib/openai/models/beta/threads/run.rbi | 23 +++++---- .../models/beta/threads/run_create_params.rbi | 22 +++++++-- .../run_submit_tool_outputs_params.rbi | 2 +- .../runs/code_interpreter_tool_call.rbi | 1 + .../runs/code_interpreter_tool_call_delta.rbi | 3 ++ .../threads/runs/file_search_tool_call.rbi | 43 +++++++++++++---- .../models/beta/threads/runs/run_step.rbi | 7 +-- .../beta/threads/runs/run_step_delta.rbi | 3 ++ .../threads/runs/tool_call_delta_object.rbi | 3 ++ .../threads/runs/tool_calls_step_details.rbi | 1 + rbi/lib/openai/models/beta/threads/text.rbi | 1 + .../openai/models/beta/threads/text_delta.rbi | 3 ++ .../openai/models/chat/chat_completion.rbi | 10 ++-- ...hat_completion_assistant_message_param.rbi | 7 +-- .../models/chat/chat_completion_chunk.rbi | 32 +++++++------ ...hat_completion_developer_message_param.rbi | 2 +- .../models/chat/chat_completion_message.rbi | 12 ++--- .../chat_completion_prediction_content.rbi | 2 +- .../chat_completion_system_message_param.rbi | 2 +- .../chat/chat_completion_token_logprob.rbi | 2 +- .../chat_completion_tool_message_param.rbi | 2 +- .../chat_completion_user_message_param.rbi | 1 + .../models/chat/completion_create_params.rbi | 34 +++++++++----- rbi/lib/openai/models/completion.rbi | 2 +- rbi/lib/openai/models/completion_choice.rbi | 2 +- rbi/lib/openai/models/compound_filter.rbi | 2 +- .../models/create_embedding_response.rbi | 2 +- rbi/lib/openai/models/file_object.rbi | 4 +- .../models/fine_tuning/fine_tuning_job.rbi | 12 +++-- .../fine_tuning/fine_tuning_job_event.rbi | 8 ++-- .../models/fine_tuning/job_create_params.rbi | 4 +- rbi/lib/openai/models/images_response.rbi | 5 +- rbi/lib/openai/models/moderation.rbi | 26 +++++----- .../models/moderation_create_params.rbi | 2 +- .../models/moderation_create_response.rbi | 9 +++- .../models/responses/easy_input_message.rbi | 1 + .../models/responses/file_search_tool.rbi | 6 +-- rbi/lib/openai/models/responses/response.rbi | 21 +++++---- .../response_code_interpreter_tool_call.rbi | 10 +++- .../responses/response_computer_tool_call.rbi | 5 +- ...esponse_computer_tool_call_output_item.rbi | 29 +++++++++--- .../response_content_part_added_event.rbi | 6 ++- .../response_content_part_done_event.rbi | 6 ++- .../responses/response_create_params.rbi | 7 +++ .../models/responses/response_error.rbi | 2 +- .../response_file_search_tool_call.rbi | 4 +- ...esponse_function_tool_call_output_item.rbi | 6 +-- .../models/responses/response_input_item.rbi | 22 +++++++-- .../responses/response_input_message_item.rbi | 15 +++--- .../models/responses/response_item_list.rbi | 1 + .../response_output_item_added_event.rbi | 1 + .../response_output_item_done_event.rbi | 1 + .../responses/response_output_message.rbi | 8 +++- .../models/responses/response_output_text.rbi | 1 + .../responses/response_reasoning_item.rbi | 2 +- .../response_text_annotation_delta_event.rbi | 1 + .../models/responses/response_text_config.rbi | 3 ++ rbi/lib/openai/models/upload.rbi | 2 +- rbi/lib/openai/models/vector_store.rbi | 2 +- .../models/vector_store_create_params.rbi | 3 ++ .../models/vector_store_search_params.rbi | 6 +-- .../models/vector_store_search_response.rbi | 4 +- .../file_batch_create_params.rbi | 3 ++ .../vector_stores/file_create_params.rbi | 3 ++ .../vector_stores/vector_store_file.rbi | 25 ++++++---- .../vector_stores/vector_store_file_batch.rbi | 2 +- rbi/lib/openai/resources/beta/assistants.rbi | 8 +++- rbi/lib/openai/resources/beta/threads.rbi | 32 ++++++++----- .../resources/beta/threads/messages.rbi | 5 +- .../openai/resources/beta/threads/runs.rbi | 30 ++++++++---- rbi/lib/openai/resources/chat/completions.rbi | 40 +++++++++------- rbi/lib/openai/resources/completions.rbi | 4 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 8 ++-- rbi/lib/openai/resources/moderations.rbi | 2 +- rbi/lib/openai/resources/responses.rbi | 14 ++++-- rbi/lib/openai/resources/vector_stores.rbi | 9 ++-- .../resources/vector_stores/file_batches.rbi | 1 + .../openai/resources/vector_stores/files.rbi | 1 + 94 files changed, 620 insertions(+), 265 deletions(-) diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index ac4346e0..bc60292b 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::Transcription::Logprob]) - .returns(T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + params(_: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) end def logprobs=(_) end @@ -30,7 +30,10 @@ module OpenAI # Represents a transcription response returned by model, based on the provided # input. sig do - params(text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]) + params( + text: String, + logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)] + ) .returns(T.attached_class) end def self.new(text:, logprobs: nil) diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index 6c73838b..0f931bf2 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -30,8 +30,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]) + params( + _: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] + ) end def logprobs=(_) end @@ -43,7 +47,7 @@ module OpenAI sig do params( delta: String, - logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index fb616718..044cbf7c 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -31,8 +31,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]) + params( + _: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] + ) end def logprobs=(_) end @@ -44,7 +48,7 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index 55eb1b23..aba3ac2a 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -37,8 +37,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionSegment]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionSegment]) + params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) end def segments=(_) end @@ -49,8 +49,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionWord]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionWord]) + params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]) end def words=(_) end @@ -62,8 +62,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment], - words: T::Array[OpenAI::Models::Audio::TranscriptionWord] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)], + words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index bceb7944..8bceffaa 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -37,8 +37,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionSegment]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionSegment]) + params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) end def segments=(_) end @@ -48,7 +48,7 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index acb1dcad..c86180e1 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -201,7 +201,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Models::Batch::Status::TaggedSymbol, + status: OpenAI::Models::Batch::Status::OrSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -301,7 +301,10 @@ module OpenAI def data end - sig { params(_: T::Array[OpenAI::Models::BatchError]).returns(T::Array[OpenAI::Models::BatchError]) } + sig do + params(_: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]) + end def data=(_) end @@ -314,7 +317,10 @@ module OpenAI def object=(_) end - sig { params(data: T::Array[OpenAI::Models::BatchError], object: String).returns(T.attached_class) } + sig do + params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)], object: String) + .returns(T.attached_class) + end def self.new(data: nil, object: nil) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index a15e202a..8dd6289e 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -238,6 +238,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -246,6 +247,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 8e5c3a9b..0251dc51 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -196,6 +196,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -205,6 +206,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -239,6 +241,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -248,6 +251,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -429,8 +433,22 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) - .returns(T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) + params( + _: T::Array[ + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) end def vector_stores=(_) end @@ -438,7 +456,12 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + vector_stores: T::Array[ + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -477,12 +500,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) .returns( T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -521,6 +546,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 8fc4069c..b266b2e2 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -196,6 +196,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -205,6 +206,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -239,6 +241,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -248,6 +251,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index dc7a1570..b44e4bcf 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -330,6 +330,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -339,7 +340,8 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), @@ -347,6 +349,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -450,8 +453,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message]) - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message]) + params( + _: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] + ) end def messages=(_) end @@ -497,7 +504,7 @@ module OpenAI # an empty thread will be created. sig do params( - messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable( T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) @@ -621,13 +628,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) @@ -761,6 +771,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -769,6 +780,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -783,6 +795,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -980,10 +993,20 @@ module OpenAI sig do params( - _: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + _: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns( - T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) end def vector_stores=(_) @@ -992,7 +1015,12 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + vector_stores: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -1031,12 +1059,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) .returns( T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -1075,6 +1105,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 165cc27d..4fab7a4b 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -14,8 +14,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]) - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]) + params(_: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)]) end def messages=(_) end @@ -53,7 +53,7 @@ module OpenAI sig do params( - messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) @@ -175,13 +175,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) @@ -310,6 +313,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -318,6 +322,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -332,6 +337,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -506,8 +512,22 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) + params( + _: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] + ) end def vector_stores=(_) end @@ -515,7 +535,12 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + vector_stores: T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -554,12 +579,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) .returns( T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -598,6 +625,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 2d687b40..3d650791 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -12,8 +12,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol) end def detail=(_) end @@ -30,7 +30,7 @@ module OpenAI end sig do - params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String) + params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol, file_id: String) .returns(T.attached_class) end def self.new(detail: nil, file_id: nil) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 1363acdb..ac2e9bb6 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -12,8 +12,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol) end def detail=(_) end @@ -29,7 +29,7 @@ module OpenAI end sig do - params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String) + params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol, url: String) .returns(T.attached_class) end def self.new(detail: nil, url: nil) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 6ea4f061..63ead9a1 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -196,11 +196,12 @@ module OpenAI params( id: String, assistant_id: T.nilable(String), - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment]), + attachments: T.nilable(T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Util::AnyHash)]), completed_at: T.nilable(Integer), content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock @@ -210,9 +211,9 @@ module OpenAI incomplete_at: T.nilable(Integer), incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), - role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, + role: OpenAI::Models::Beta::Threads::Message::Role::OrSymbol, run_id: T.nilable(String), - status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, + status: OpenAI::Models::Beta::Threads::Message::Status::OrSymbol, thread_id: String, object: Symbol ) @@ -298,6 +299,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -306,6 +308,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -320,6 +323,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -406,7 +410,7 @@ module OpenAI # On an incomplete message, details about why the message is incomplete. sig do - params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end def self.new(reason:) diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 74d7c35b..350fd7ae 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -105,13 +105,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) @@ -242,6 +245,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -250,6 +254,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -264,6 +269,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index c396bfdb..0e044cb6 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -28,6 +28,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -38,6 +39,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -54,8 +56,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol) end def role=(_) end @@ -66,12 +68,13 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock ) ], - role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol + role: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index a89a0fe4..c6efb6d4 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -426,22 +426,25 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), started_at: T.nilable(Integer), - status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, + status: OpenAI::Models::Beta::Threads::RunStatus::OrSymbol, thread_id: String, tool_choice: T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -548,8 +551,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) + .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) end def reason=(_) end @@ -557,7 +560,7 @@ module OpenAI # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. sig do - params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end def self.new(reason: nil) @@ -614,7 +617,7 @@ module OpenAI # The last error associated with this run. Will be `null` if there are no errors. sig do - params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String) + params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) @@ -711,7 +714,9 @@ module OpenAI # Details on the tool outputs needed for this run to continue. sig do - params(tool_calls: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) + params( + tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Util::AnyHash)] + ) .returns(T.attached_class) end def self.new(tool_calls:) @@ -753,7 +758,7 @@ module OpenAI # control the intial context window of the run. sig do params( - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, last_messages: T.nilable(Integer) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 83e052a8..49b45721 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -350,7 +350,9 @@ module OpenAI assistant_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), + additional_messages: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), @@ -362,6 +364,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -370,13 +373,15 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -566,13 +571,21 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), + attachments: T.nilable( + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, + OpenAI::Util::AnyHash + ) + ] + ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) @@ -707,6 +720,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -715,6 +729,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -729,6 +744,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index ad431d15..aad29cff 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -31,7 +31,7 @@ module OpenAI sig do params( thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 98f88637..7dcb0cba 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -129,6 +129,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image ) ] diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 1b18df27..244ab852 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -119,6 +119,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -127,6 +128,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -142,6 +144,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 432b6002..ae88c21f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -88,8 +88,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result]) + params( + _: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + ) end def results=(_) end @@ -101,7 +105,7 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, OpenAI::Util::AnyHash ), - results: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + results: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end @@ -155,7 +159,7 @@ module OpenAI # The ranking options for the file search. sig do params( - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::OrSymbol, score_threshold: Float ) .returns(T.attached_class) @@ -254,8 +258,22 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) + params( + _: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Util::AnyHash + ) + ] + ) + .returns( + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Util::AnyHash + ) + ] + ) end def content=(_) end @@ -266,7 +284,12 @@ module OpenAI file_id: String, file_name: String, score: Float, - content: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + content: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -310,10 +333,10 @@ module OpenAI sig do params( - _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) .returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) end def type=(_) @@ -322,7 +345,7 @@ module OpenAI sig do params( text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 1a008d84..c2d4d482 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -210,13 +210,14 @@ module OpenAI last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::OrSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::OrSymbol, usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash)), object: Symbol ) @@ -296,7 +297,7 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. sig do - params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String) + params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 3753e97f..c291b78b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -24,12 +24,14 @@ module OpenAI params( _: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) .returns( T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) @@ -42,6 +44,7 @@ module OpenAI params( step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 3258b9d6..17d54160 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -39,6 +39,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) @@ -48,6 +49,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) @@ -63,6 +65,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 3ad8cc7f..d244c946 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -61,6 +61,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall ) diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 0ba8015c..eda70b49 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -53,6 +53,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathAnnotation ) ], diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index 12996683..d7051ba2 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -25,6 +25,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ] @@ -33,6 +34,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ] @@ -55,6 +57,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ], diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 6e2cd6d4..45ff9cca 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -94,10 +94,10 @@ module OpenAI sig do params( id: String, - choices: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Util::AnyHash)], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::OrSymbol), system_fingerprint: String, usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash), object: Symbol @@ -187,7 +187,7 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::OrSymbol, index: Integer, logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash)), message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash) @@ -267,8 +267,8 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), - refusal: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]) + content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]), + refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index de9d2442..260791cc 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -127,8 +127,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) end def tool_calls=(_) end @@ -143,6 +143,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartRefusal ) ] @@ -153,7 +154,7 @@ module OpenAI ), name: String, refusal: T.nilable(String), - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)], role: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 802b0de2..e64d0cbc 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -102,10 +102,10 @@ module OpenAI sig do params( id: String, - choices: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Util::AnyHash)], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), + service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::OrSymbol), system_fingerprint: String, usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)), object: Symbol @@ -200,7 +200,7 @@ module OpenAI sig do params( delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash), - finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), + finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::OrSymbol), index: Integer, logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) ) @@ -265,8 +265,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol) end def role=(_) end @@ -276,8 +276,12 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]) + params( + _: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + ) + .returns( + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + ) end def tool_calls=(_) end @@ -288,8 +292,8 @@ module OpenAI content: T.nilable(String), function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash), refusal: T.nilable(String), - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol, + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end @@ -407,8 +411,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol) + .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol) end def type=(_) end @@ -418,7 +422,7 @@ module OpenAI index: Integer, id: String, function: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash), - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol ) .returns(T.attached_class) end @@ -552,8 +556,8 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), - refusal: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]) + content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]), + refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 2ccc5505..216c218d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -40,7 +40,7 @@ module OpenAI # replace the previous `system` messages. sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), name: String, role: Symbol ) diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index a2ea14b8..12d3f9c5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -38,8 +38,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)]) end def annotations=(_) end @@ -77,8 +77,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) end def tool_calls=(_) end @@ -88,10 +88,10 @@ module OpenAI params( content: T.nilable(String), refusal: T.nilable(String), - annotations: T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)], audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash)), function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash), - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)], role: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 4996c9f4..472440af 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -32,7 +32,7 @@ module OpenAI # being regenerated. sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index f8cb806e..6c3b80e1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -40,7 +40,7 @@ module OpenAI # for this purpose instead. sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), name: String, role: Symbol ) diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 1eae4294..ea88fd66 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -55,7 +55,7 @@ module OpenAI token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float, - top_logprobs: T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] + top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 42697bfe..bd333634 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -36,7 +36,7 @@ module OpenAI sig do params( - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), + content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), tool_call_id: String, role: Symbol ) diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 58bd1d57..a9e388f6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -82,6 +82,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 6f7c3948..cf85872a 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -131,13 +131,15 @@ module OpenAI params( _: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ) ) .returns( T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ) ) end @@ -152,8 +154,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function]) - .returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function]) + params(_: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)]) end def functions=(_) end @@ -337,6 +339,7 @@ module OpenAI params( _: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) @@ -344,6 +347,7 @@ module OpenAI .returns( T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) @@ -470,13 +474,15 @@ module OpenAI params( _: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ) ) .returns( T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ) ) end @@ -491,8 +497,8 @@ module OpenAI end sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionTool]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionTool]) + params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]) + .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]) end def tools=(_) end @@ -551,6 +557,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -563,9 +570,10 @@ module OpenAI frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -579,6 +587,7 @@ module OpenAI reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -590,9 +599,10 @@ module OpenAI temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index a6536d37..e270ee5f 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -77,7 +77,7 @@ module OpenAI sig do params( id: String, - choices: T::Array[OpenAI::Models::CompletionChoice], + choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Util::AnyHash)], created: Integer, model: String, system_fingerprint: String, diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index cbf7bf7d..bdd63223 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -47,7 +47,7 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, + finish_reason: OpenAI::Models::CompletionChoice::FinishReason::OrSymbol, index: Integer, logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash)), text: String diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 3f6d0c84..eaf9891d 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -31,7 +31,7 @@ module OpenAI # Combine multiple filters using `and` or `or`. sig do params( - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, T.anything)], type: OpenAI::Models::CompoundFilter::Type::OrSymbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index e6a54b86..e9a4af7f 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -44,7 +44,7 @@ module OpenAI sig do params( - data: T::Array[OpenAI::Models::Embedding], + data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Util::AnyHash)], model: String, usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash), object: Symbol diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index c96e406d..00ace88e 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -101,8 +101,8 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, - status: OpenAI::Models::FileObject::Status::TaggedSymbol, + purpose: OpenAI::Models::FileObject::Purpose::OrSymbol, + status: OpenAI::Models::FileObject::Status::OrSymbol, expires_at: Integer, status_details: String, object: Symbol diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 85b64f51..04cf51bd 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -221,12 +221,14 @@ module OpenAI organization_id: String, result_files: T::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, + status: OpenAI::Models::FineTuning::FineTuningJob::Status::OrSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]), + integrations: T.nilable( + T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash), object: Symbol @@ -480,8 +482,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol) end def type=(_) end @@ -491,7 +493,7 @@ module OpenAI params( dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash), supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash), - type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index aeb14b83..372492b9 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -67,8 +67,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol) + .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol) end def type=(_) end @@ -78,10 +78,10 @@ module OpenAI params( id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::OrSymbol, message: String, data: T.anything, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol, + type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol, object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index cc5d4db7..5c524237 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -145,7 +145,9 @@ module OpenAI model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), + integrations: T.nilable( + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), seed: T.nilable(Integer), diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index e9537809..724883f8 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -19,7 +19,10 @@ module OpenAI def data=(_) end - sig { params(created: Integer, data: T::Array[OpenAI::Models::Image]).returns(T.attached_class) } + sig do + params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Util::AnyHash)]) + .returns(T.attached_class) + end def self.new(created:, data:) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index b589c1b7..26fc4f28 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -445,19 +445,19 @@ module OpenAI # A list of the categories along with the input type(s) that the score applies to. sig do params( - harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], - harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], - hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], - hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], - illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], - illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], - self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], - self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], - self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], - sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], - sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], - violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], - violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] + harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::OrSymbol], + harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::OrSymbol], + hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::OrSymbol], + hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::OrSymbol], + illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::OrSymbol], + illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::OrSymbol], + self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::OrSymbol], + self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::OrSymbol], + self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::OrSymbol], + sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::OrSymbol], + sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::OrSymbol], + violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::OrSymbol], + violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::OrSymbol] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 282a33b9..591f644a 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -59,7 +59,7 @@ module OpenAI input: T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Util::AnyHash, OpenAI::Models::ModerationTextInput)] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index bf831d82..c3a9c5d8 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -31,7 +31,14 @@ module OpenAI end # Represents if a given text input is potentially harmful. - sig { params(id: String, model: String, results: T::Array[OpenAI::Models::Moderation]).returns(T.attached_class) } + sig do + params( + id: String, + model: String, + results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Util::AnyHash)] + ) + .returns(T.attached_class) + end def self.new(id:, model:, results:) end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index bf8bc082..18a3e4ee 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -89,6 +89,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index e2d118da..4380f086 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -28,8 +28,8 @@ module OpenAI end sig do - params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) - .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) + params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) + .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) end def filters=(_) end @@ -62,7 +62,7 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash), type: Symbol diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index bf617bc9..6aabb7ec 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -326,8 +326,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseStatus::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseStatus::OrSymbol) end def status=(_) end @@ -398,10 +398,11 @@ module OpenAI incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash)), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), + model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, @@ -412,13 +413,15 @@ module OpenAI parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -428,9 +431,9 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), - status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseStatus::OrSymbol, text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), - truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), + truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::OrSymbol), usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash), user: String, object: Symbol @@ -522,15 +525,15 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) + .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) end def reason=(_) end # Details about why the response is incomplete. sig do - params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end def self.new(reason: nil) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index cb72f53d..0e9fda65 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -86,10 +86,11 @@ module OpenAI results: T::Array[ T.any( OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) @@ -184,7 +185,12 @@ module OpenAI # The output of a code interpreter tool call that is a file. sig do params( - files: T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + files: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, + OpenAI::Util::AnyHash + ) + ], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index d7f5e2c6..f3845301 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -117,6 +117,7 @@ module OpenAI id: String, action: T.any( OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, @@ -127,7 +128,7 @@ module OpenAI OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait ), call_id: String, - pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], + pending_safety_checks: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, OpenAI::Util::AnyHash)], status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol ) @@ -359,7 +360,7 @@ module OpenAI # A drag action. sig do params( - path: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], + path: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, OpenAI::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 9e28ddee..93d2029f 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -59,9 +59,21 @@ module OpenAI sig do params( - _: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + _: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] ) - .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) + .returns( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] + ) end def acknowledged_safety_checks=(_) end @@ -73,8 +85,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol) end def status=(_) end @@ -84,8 +96,13 @@ module OpenAI id: String, call_id: String, output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol, + acknowledged_safety_checks: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ], + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index ac337005..b7e8bd20 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -66,7 +66,11 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal), + part: T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Util::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index d6185fc4..566da131 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -66,7 +66,11 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal), + part: T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Util::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 7e1109eb..9cf8f37d 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -254,6 +254,7 @@ module OpenAI _: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) @@ -261,6 +262,7 @@ module OpenAI T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) @@ -304,6 +306,7 @@ module OpenAI _: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -314,6 +317,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -373,6 +377,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -400,11 +405,13 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index b2a69187..8b28aae4 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -27,7 +27,7 @@ module OpenAI # An error object returned when the model fails to generate a Response. sig do - params(code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String) + params(code: OpenAI::Models::Responses::ResponseError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index e3a77e36..957f8061 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -64,7 +64,9 @@ module OpenAI id: String, queries: T::Array[String], status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, - results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]), + results: T.nilable( + T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Util::AnyHash)] + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index bb1f3e0a..f78c8d84 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -47,8 +47,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol) end def status=(_) end @@ -58,7 +58,7 @@ module OpenAI id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 9d56daf4..74e9e516 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -115,6 +115,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) @@ -267,10 +268,20 @@ module OpenAI sig do params( - _: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + _: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] ) .returns( - T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ] ) end def acknowledged_safety_checks=(_) @@ -295,7 +306,12 @@ module OpenAI call_id: String, output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), id: String, - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], + acknowledged_safety_checks: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Util::AnyHash + ) + ], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 1c80f8f2..e6579bda 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -71,8 +71,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol) end def status=(_) end @@ -83,8 +83,8 @@ module OpenAI end sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol) + .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol) end def type=(_) end @@ -95,13 +95,14 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, - type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol + role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::OrSymbol, + status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol, + type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 301dcc40..afdd9b6d 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -99,6 +99,7 @@ module OpenAI data: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 0e49a206..42d8f23b 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -68,6 +68,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index e53adef5..0e58ab9d 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -68,6 +68,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 681787d8..9b6bf634 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -68,7 +68,13 @@ module OpenAI sig do params( id: String, - content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], + content: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Util::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ) + ], status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, role: Symbol, type: Symbol diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 2b88cd02..5c365bc5 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -66,6 +66,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath ) diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index b31a3e74..12f95019 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -52,7 +52,7 @@ module OpenAI sig do params( id: String, - summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], + summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Util::AnyHash)], status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index c557f685..2be76360 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -86,6 +86,7 @@ module OpenAI params( annotation: T.any( OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath ), diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index 2287e496..a04a62b1 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -35,6 +35,7 @@ module OpenAI params( _: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) @@ -42,6 +43,7 @@ module OpenAI .returns( T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) @@ -59,6 +61,7 @@ module OpenAI params( format_: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 2ce22a6c..949a6347 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -101,7 +101,7 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Models::Upload::Status::TaggedSymbol, + status: OpenAI::Models::Upload::Status::OrSymbol, file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash)), object: Symbol ) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 2cb7c946..a3dc0af1 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -127,7 +127,7 @@ module OpenAI last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - status: OpenAI::Models::VectorStore::Status::TaggedSymbol, + status: OpenAI::Models::VectorStore::Status::OrSymbol, usage_bytes: Integer, expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash), expires_at: T.nilable(Integer), diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index f6aad6ef..54ca2316 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -25,12 +25,14 @@ module OpenAI params( _: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) .returns( T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -88,6 +90,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 4145160a..40bccf18 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -21,8 +21,8 @@ module OpenAI end sig do - params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) - .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)) + params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) + .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) end def filters=(_) end @@ -61,7 +61,7 @@ module OpenAI sig do params( query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), rewrite_query: T::Boolean, diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 22a47028..1847ed36 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -61,7 +61,7 @@ module OpenAI sig do params( attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - content: T::Array[OpenAI::Models::VectorStoreSearchResponse::Content], + content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Util::AnyHash)], file_id: String, filename: String, score: Float @@ -121,7 +121,7 @@ module OpenAI end sig do - params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol) .returns(T.attached_class) end def self.new(text:, type:) diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 2f203007..1a242819 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -53,12 +53,14 @@ module OpenAI params( _: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) .returns( T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -72,6 +74,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index dac1b7e6..e84068ca 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -53,12 +53,14 @@ module OpenAI params( _: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) .returns( T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -72,6 +74,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 6ddab8f0..7d337918 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -111,10 +111,18 @@ module OpenAI sig do params( - _: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + _: T.any( + OpenAI::Models::StaticFileChunkingStrategyObject, + OpenAI::Util::AnyHash, + OpenAI::Models::OtherFileChunkingStrategyObject + ) ) .returns( - T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + T.any( + OpenAI::Models::StaticFileChunkingStrategyObject, + OpenAI::Util::AnyHash, + OpenAI::Models::OtherFileChunkingStrategyObject + ) ) end def chunking_strategy=(_) @@ -126,11 +134,15 @@ module OpenAI id: String, created_at: Integer, last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)), - status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, + status: OpenAI::Models::VectorStores::VectorStoreFile::Status::OrSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject), + chunking_strategy: T.any( + OpenAI::Models::StaticFileChunkingStrategyObject, + OpenAI::Util::AnyHash, + OpenAI::Models::OtherFileChunkingStrategyObject + ), object: Symbol ) .returns(T.attached_class) @@ -192,10 +204,7 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. sig do - params( - code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, - message: String - ) + params(code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end def self.new(code:, message:) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 47124b6e..e2b512ef 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -74,7 +74,7 @@ module OpenAI id: String, created_at: Integer, file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash), - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::OrSymbol, vector_store_id: String, object: Symbol ) diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 3205f05d..5b415b26 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -17,15 +17,17 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -136,15 +138,17 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 65c67e5a..9e668eee 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -15,9 +15,9 @@ module OpenAI # Create a thread. sig do params( - messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) @@ -62,7 +62,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) @@ -115,30 +115,35 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -254,30 +259,35 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index d83a3d17..24507cc6 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -14,13 +14,16 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 44b7f356..d797090c 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -16,7 +16,9 @@ module OpenAI assistant_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), + additional_messages: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), @@ -28,6 +30,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -36,20 +39,24 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -178,7 +185,9 @@ module OpenAI assistant_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), + additional_messages: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), @@ -190,6 +199,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -198,20 +208,24 @@ module OpenAI tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Models::Beta::AssistantToolChoice, + OpenAI::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), + truncation_strategy: T.nilable( + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + ), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -469,7 +483,7 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -500,7 +514,7 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 00b8c661..91d89d3d 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -30,6 +30,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -38,13 +39,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -53,11 +55,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -65,17 +68,18 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) @@ -294,6 +298,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -302,13 +307,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Models::Chat::ChatCompletionFunctionCallOption, + OpenAI::Util::AnyHash ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -317,11 +323,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, + OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -329,17 +336,18 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Models::Chat::ChatCompletionNamedToolChoice, + OpenAI::Util::AnyHash ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), stream: T.noreturn, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 62ca185f..64b4c2ef 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -25,7 +25,7 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), @@ -168,7 +168,7 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 6e2fc880..df149226 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -19,10 +19,12 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), + integrations: T.nilable( + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Util::AnyHash)] + ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index b6a3164f..0fd74373 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -10,7 +10,7 @@ module OpenAI input: T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Util::AnyHash, OpenAI::Models::ModerationTextInput)] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 7f02edbe..61ac293c 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -25,6 +25,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -45,18 +46,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: OpenAI::Models::Responses::ResponseTextConfig, + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -200,6 +203,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -220,18 +224,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: OpenAI::Models::Responses::ResponseTextConfig, + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, + OpenAI::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index dd0b6e05..6f4b48c7 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -16,9 +16,10 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -68,7 +69,7 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) @@ -146,9 +147,9 @@ module OpenAI params( vector_store_id: String, query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), rewrite_query: T::Boolean, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index b7a976f9..62bd27b1 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -12,6 +12,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 753f7397..37afdaf7 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -14,6 +14,7 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, + OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) From 4829bc8f96c48aa3642daf0b132b60a10049a9d4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 08:25:47 +0000 Subject: [PATCH 053/295] chore: switch to prettier looking sorbet annotations (#59) --- rbi/lib/openai/base_client.rbi | 8 +- rbi/lib/openai/client.rbi | 51 +- rbi/lib/openai/cursor_page.rbi | 14 +- rbi/lib/openai/errors.rbi | 112 +--- .../models/audio/speech_create_params.rbi | 54 +- rbi/lib/openai/models/audio/transcription.rbi | 41 +- .../audio/transcription_create_params.rbi | 75 +-- .../models/audio/transcription_segment.rbi | 70 +-- .../audio/transcription_text_delta_event.rbi | 50 +- .../audio/transcription_text_done_event.rbi | 50 +- .../models/audio/transcription_verbose.rbi | 43 +- .../models/audio/transcription_word.rbi | 21 +- rbi/lib/openai/models/audio/translation.rbi | 7 +- .../audio/translation_create_params.rbi | 44 +- .../models/audio/translation_verbose.rbi | 32 +- .../auto_file_chunking_strategy_param.rbi | 7 +- rbi/lib/openai/models/batch.rbi | 179 ++---- rbi/lib/openai/models/batch_create_params.rbi | 34 +- rbi/lib/openai/models/batch_error.rbi | 30 +- rbi/lib/openai/models/batch_list_params.rbi | 16 +- .../openai/models/batch_request_counts.rbi | 21 +- rbi/lib/openai/models/beta/assistant.rbi | 176 ++---- .../models/beta/assistant_create_params.rbi | 254 ++------- .../openai/models/beta/assistant_deleted.rbi | 21 +- .../models/beta/assistant_list_params.rbi | 35 +- .../models/beta/assistant_stream_event.rbi | 440 ++++----------- .../models/beta/assistant_tool_choice.rbi | 21 +- .../beta/assistant_tool_choice_function.rbi | 7 +- .../models/beta/assistant_update_params.rbi | 162 ++---- .../models/beta/code_interpreter_tool.rbi | 7 +- .../openai/models/beta/file_search_tool.rbi | 56 +- rbi/lib/openai/models/beta/function_tool.rbi | 18 +- .../models/beta/message_stream_event.rbi | 90 +-- .../models/beta/run_step_stream_event.rbi | 126 +---- .../openai/models/beta/run_stream_event.rbi | 180 ++---- rbi/lib/openai/models/beta/thread.rbi | 78 +-- .../beta/thread_create_and_run_params.rbi | 512 ++++-------------- .../models/beta/thread_create_params.rbi | 252 ++------- rbi/lib/openai/models/beta/thread_deleted.rbi | 21 +- .../models/beta/thread_stream_event.rbi | 26 +- .../models/beta/thread_update_params.rbi | 55 +- .../beta/threads/file_citation_annotation.rbi | 45 +- .../file_citation_delta_annotation.rbi | 66 +-- .../beta/threads/file_path_annotation.rbi | 47 +- .../threads/file_path_delta_annotation.rbi | 58 +- .../openai/models/beta/threads/image_file.rbi | 18 +- .../beta/threads/image_file_content_block.rbi | 18 +- .../models/beta/threads/image_file_delta.rbi | 19 +- .../beta/threads/image_file_delta_block.rbi | 25 +- .../openai/models/beta/threads/image_url.rbi | 18 +- .../beta/threads/image_url_content_block.rbi | 18 +- .../models/beta/threads/image_url_delta.rbi | 19 +- .../beta/threads/image_url_delta_block.rbi | 25 +- .../openai/models/beta/threads/message.rbi | 176 +----- .../beta/threads/message_create_params.rbi | 92 +--- .../beta/threads/message_delete_params.rbi | 7 +- .../models/beta/threads/message_deleted.rbi | 21 +- .../models/beta/threads/message_delta.rbi | 31 +- .../beta/threads/message_delta_event.rbi | 25 +- .../beta/threads/message_list_params.rbi | 43 +- .../beta/threads/message_retrieve_params.rbi | 7 +- .../beta/threads/message_update_params.rbi | 14 +- .../beta/threads/refusal_content_block.rbi | 14 +- .../beta/threads/refusal_delta_block.rbi | 22 +- .../required_action_function_tool_call.rbi | 40 +- rbi/lib/openai/models/beta/threads/run.rbi | 366 +++---------- .../models/beta/threads/run_cancel_params.rbi | 7 +- .../models/beta/threads/run_create_params.rbi | 314 ++--------- .../models/beta/threads/run_list_params.rbi | 35 +- .../beta/threads/run_retrieve_params.rbi | 7 +- .../run_submit_tool_outputs_params.rbi | 33 +- .../models/beta/threads/run_update_params.rbi | 14 +- .../threads/runs/code_interpreter_logs.rbi | 22 +- .../runs/code_interpreter_output_image.rbi | 34 +- .../runs/code_interpreter_tool_call.rbi | 103 +--- .../runs/code_interpreter_tool_call_delta.rbi | 63 +-- .../threads/runs/file_search_tool_call.rbi | 130 +---- .../runs/file_search_tool_call_delta.rbi | 29 +- .../beta/threads/runs/function_tool_call.rbi | 47 +- .../threads/runs/function_tool_call_delta.rbi | 57 +- .../runs/message_creation_step_details.rbi | 29 +- .../models/beta/threads/runs/run_step.rbi | 177 +----- .../beta/threads/runs/run_step_delta.rbi | 16 +- .../threads/runs/run_step_delta_event.rbi | 25 +- .../runs/run_step_delta_message_delta.rbi | 30 +- .../beta/threads/runs/step_list_params.rbi | 53 +- .../threads/runs/step_retrieve_params.rbi | 25 +- .../threads/runs/tool_call_delta_object.rbi | 26 +- .../threads/runs/tool_calls_step_details.rbi | 33 +- rbi/lib/openai/models/beta/threads/text.rbi | 31 +- .../beta/threads/text_content_block.rbi | 18 +- .../beta/threads/text_content_block_param.rbi | 14 +- .../openai/models/beta/threads/text_delta.rbi | 26 +- .../models/beta/threads/text_delta_block.rbi | 25 +- .../openai/models/chat/chat_completion.rbi | 127 +---- ...hat_completion_assistant_message_param.rbi | 115 +--- .../models/chat/chat_completion_audio.rbi | 28 +- .../chat/chat_completion_audio_param.rbi | 20 +- .../models/chat/chat_completion_chunk.rbi | 246 ++------- .../chat/chat_completion_content_part.rbi | 41 +- .../chat_completion_content_part_image.rbi | 37 +- ...at_completion_content_part_input_audio.rbi | 36 +- .../chat_completion_content_part_refusal.rbi | 14 +- .../chat_completion_content_part_text.rbi | 14 +- .../models/chat/chat_completion_deleted.rbi | 21 +- ...hat_completion_developer_message_param.rbi | 25 +- .../chat_completion_function_call_option.rbi | 7 +- ...chat_completion_function_message_param.rbi | 21 +- .../models/chat/chat_completion_message.rbi | 129 ++--- .../chat_completion_message_tool_call.rbi | 40 +- .../chat_completion_named_tool_choice.rbi | 26 +- .../chat_completion_prediction_content.rbi | 17 +- .../chat/chat_completion_store_message.rbi | 7 +- .../chat/chat_completion_stream_options.rbi | 8 +- .../chat_completion_system_message_param.rbi | 25 +- .../chat/chat_completion_token_logprob.rbi | 52 +- .../models/chat/chat_completion_tool.rbi | 18 +- .../chat_completion_tool_message_param.rbi | 24 +- .../chat_completion_user_message_param.rbi | 49 +- .../models/chat/completion_create_params.rbi | 417 ++++---------- .../models/chat/completion_list_params.rbi | 42 +- .../models/chat/completion_update_params.rbi | 7 +- .../chat/completions/message_list_params.rbi | 27 +- rbi/lib/openai/models/comparison_filter.rbi | 24 +- rbi/lib/openai/models/completion.rbi | 54 +- rbi/lib/openai/models/completion_choice.rbi | 67 +-- .../models/completion_create_params.rbi | 140 +---- rbi/lib/openai/models/completion_usage.rbi | 93 ++-- rbi/lib/openai/models/compound_filter.rbi | 20 +- .../models/create_embedding_response.rbi | 46 +- rbi/lib/openai/models/embedding.rbi | 21 +- .../openai/models/embedding_create_params.rbi | 47 +- rbi/lib/openai/models/error_object.rbi | 28 +- rbi/lib/openai/models/file_create_params.rbi | 14 +- rbi/lib/openai/models/file_deleted.rbi | 21 +- rbi/lib/openai/models/file_list_params.rbi | 35 +- rbi/lib/openai/models/file_object.rbi | 71 +-- .../models/fine_tuning/fine_tuning_job.rbi | 313 +++-------- .../fine_tuning/fine_tuning_job_event.rbi | 57 +- .../fine_tuning_job_wandb_integration.rbi | 29 +- ...ne_tuning_job_wandb_integration_object.rbi | 18 +- .../models/fine_tuning/job_create_params.rbi | 267 +++------ .../fine_tuning/job_list_events_params.rbi | 16 +- .../models/fine_tuning/job_list_params.rbi | 23 +- .../jobs/checkpoint_list_params.rbi | 16 +- .../jobs/fine_tuning_job_checkpoint.rbi | 108 ++-- rbi/lib/openai/models/function_definition.rbi | 30 +- rbi/lib/openai/models/image.rbi | 24 +- .../models/image_create_variation_params.rbi | 52 +- rbi/lib/openai/models/image_edit_params.rbi | 67 +-- .../openai/models/image_generate_params.rbi | 73 +-- rbi/lib/openai/models/images_response.rbi | 14 +- rbi/lib/openai/models/model.rbi | 28 +- rbi/lib/openai/models/model_deleted.rbi | 21 +- rbi/lib/openai/models/moderation.rbi | 361 ++---------- .../models/moderation_create_params.rbi | 33 +- .../models/moderation_create_response.rbi | 21 +- .../models/moderation_image_url_input.rbi | 25 +- .../openai/models/moderation_text_input.rbi | 14 +- .../other_file_chunking_strategy_object.rbi | 7 +- rbi/lib/openai/models/reasoning.rbi | 20 +- .../models/response_format_json_object.rbi | 7 +- .../models/response_format_json_schema.rbi | 47 +- .../openai/models/response_format_text.rbi | 7 +- .../openai/models/responses/computer_tool.rbi | 31 +- .../models/responses/easy_input_message.rbi | 53 +- .../models/responses/file_search_tool.rbi | 65 +-- .../openai/models/responses/function_tool.rbi | 35 +- .../responses/input_item_list_params.rbi | 35 +- rbi/lib/openai/models/responses/response.rbi | 262 ++------- .../responses/response_audio_delta_event.rbi | 14 +- .../responses/response_audio_done_event.rbi | 7 +- .../response_audio_transcript_delta_event.rbi | 14 +- .../response_audio_transcript_done_event.rbi | 7 +- ...code_interpreter_call_code_delta_event.rbi | 21 +- ..._code_interpreter_call_code_done_event.rbi | 21 +- ..._code_interpreter_call_completed_event.rbi | 26 +- ...ode_interpreter_call_in_progress_event.rbi | 26 +- ...de_interpreter_call_interpreting_event.rbi | 26 +- .../response_code_interpreter_tool_call.rbi | 100 +--- .../responses/response_completed_event.rbi | 18 +- .../responses/response_computer_tool_call.rbi | 280 ++-------- ...esponse_computer_tool_call_output_item.rbi | 80 +-- ...e_computer_tool_call_output_screenshot.rbi | 23 +- .../response_content_part_added_event.rbi | 42 +- .../response_content_part_done_event.rbi | 42 +- .../responses/response_create_params.rbi | 201 +------ .../responses/response_created_event.rbi | 18 +- .../models/responses/response_error.rbi | 17 +- .../models/responses/response_error_event.rbi | 28 +- .../responses/response_failed_event.rbi | 18 +- ...ponse_file_search_call_completed_event.rbi | 21 +- ...nse_file_search_call_in_progress_event.rbi | 21 +- ...ponse_file_search_call_searching_event.rbi | 21 +- .../response_file_search_tool_call.rbi | 83 +-- ...esponse_format_text_json_schema_config.rbi | 37 +- ...se_function_call_arguments_delta_event.rbi | 28 +- ...nse_function_call_arguments_done_event.rbi | 28 +- .../responses/response_function_tool_call.rbi | 47 +- .../response_function_tool_call_item.rbi | 7 +- ...esponse_function_tool_call_output_item.rbi | 39 +- .../response_function_web_search.rbi | 24 +- .../responses/response_in_progress_event.rbi | 18 +- .../responses/response_incomplete_event.rbi | 18 +- .../models/responses/response_input_audio.rbi | 24 +- .../models/responses/response_input_file.rbi | 31 +- .../models/responses/response_input_image.rbi | 31 +- .../models/responses/response_input_item.rbi | 193 ++----- .../responses/response_input_message_item.rbi | 65 +-- .../models/responses/response_input_text.rbi | 14 +- .../models/responses/response_item_list.rbi | 64 +-- .../responses/response_output_audio.rbi | 21 +- .../response_output_item_added_event.rbi | 42 +- .../response_output_item_done_event.rbi | 42 +- .../responses/response_output_message.rbi | 45 +- .../responses/response_output_refusal.rbi | 14 +- .../models/responses/response_output_text.rbi | 117 +--- .../responses/response_reasoning_item.rbi | 49 +- .../response_refusal_delta_event.rbi | 35 +- .../responses/response_refusal_done_event.rbi | 35 +- .../responses/response_retrieve_params.rbi | 11 +- .../response_text_annotation_delta_event.rbi | 134 +---- .../models/responses/response_text_config.rbi | 17 +- .../responses/response_text_delta_event.rbi | 35 +- .../responses/response_text_done_event.rbi | 35 +- .../models/responses/response_usage.rbi | 59 +- ...sponse_web_search_call_completed_event.rbi | 21 +- ...onse_web_search_call_in_progress_event.rbi | 21 +- ...sponse_web_search_call_searching_event.rbi | 21 +- .../models/responses/tool_choice_function.rbi | 14 +- .../models/responses/tool_choice_types.rbi | 10 +- .../models/responses/web_search_tool.rbi | 72 +-- .../models/static_file_chunking_strategy.rbi | 14 +- .../static_file_chunking_strategy_object.rbi | 18 +- ...ic_file_chunking_strategy_object_param.rbi | 18 +- rbi/lib/openai/models/upload.rbi | 70 +-- .../openai/models/upload_complete_params.rbi | 15 +- .../openai/models/upload_create_params.rbi | 28 +- .../models/uploads/part_create_params.rbi | 7 +- rbi/lib/openai/models/uploads/upload_part.rbi | 28 +- rbi/lib/openai/models/vector_store.rbi | 137 +---- .../models/vector_store_create_params.rbi | 63 +-- .../openai/models/vector_store_deleted.rbi | 21 +- .../models/vector_store_list_params.rbi | 35 +- .../models/vector_store_search_params.rbi | 66 +-- .../models/vector_store_search_response.rbi | 58 +- .../models/vector_store_update_params.rbi | 40 +- .../file_batch_cancel_params.rbi | 7 +- .../file_batch_create_params.rbi | 33 +- .../file_batch_list_files_params.rbi | 53 +- .../file_batch_retrieve_params.rbi | 7 +- .../vector_stores/file_content_params.rbi | 7 +- .../vector_stores/file_content_response.rbi | 16 +- .../vector_stores/file_create_params.rbi | 33 +- .../vector_stores/file_delete_params.rbi | 7 +- .../models/vector_stores/file_list_params.rbi | 46 +- .../vector_stores/file_retrieve_params.rbi | 7 +- .../vector_stores/file_update_params.rbi | 17 +- .../vector_stores/vector_store_file.rbi | 98 +--- .../vector_stores/vector_store_file_batch.rbi | 85 +-- .../vector_store_file_deleted.rbi | 21 +- rbi/lib/openai/page.rbi | 14 +- rbi/lib/openai/request_options.rbi | 58 +- rbi/lib/openai/resources/audio.rbi | 9 +- rbi/lib/openai/resources/beta.rbi | 6 +- rbi/lib/openai/resources/beta/threads.rbi | 6 +- .../openai/resources/beta/threads/runs.rbi | 3 +- rbi/lib/openai/resources/chat.rbi | 3 +- rbi/lib/openai/resources/chat/completions.rbi | 3 +- rbi/lib/openai/resources/fine_tuning.rbi | 3 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 3 +- rbi/lib/openai/resources/responses.rbi | 3 +- rbi/lib/openai/resources/uploads.rbi | 3 +- rbi/lib/openai/resources/vector_stores.rbi | 6 +- 274 files changed, 2972 insertions(+), 11549 deletions(-) diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index 56d1eb77..92d2484d 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -83,13 +83,9 @@ module OpenAI end end + # @api private sig { returns(T.anything) } - def requester - end - - sig { params(_: T.anything).returns(T.anything) } - def requester=(_) - end + attr_accessor :requester # @api private sig do diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 2d8c31bf..556aaeec 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -11,72 +11,55 @@ module OpenAI DEFAULT_MAX_RETRY_DELAY = T.let(8.0, Float) sig { returns(String) } - def api_key - end + attr_reader :api_key sig { returns(T.nilable(String)) } - def organization - end + attr_reader :organization sig { returns(T.nilable(String)) } - def project - end + attr_reader :project sig { returns(OpenAI::Resources::Completions) } - def completions - end + attr_reader :completions sig { returns(OpenAI::Resources::Chat) } - def chat - end + attr_reader :chat sig { returns(OpenAI::Resources::Embeddings) } - def embeddings - end + attr_reader :embeddings sig { returns(OpenAI::Resources::Files) } - def files - end + attr_reader :files sig { returns(OpenAI::Resources::Images) } - def images - end + attr_reader :images sig { returns(OpenAI::Resources::Audio) } - def audio - end + attr_reader :audio sig { returns(OpenAI::Resources::Moderations) } - def moderations - end + attr_reader :moderations sig { returns(OpenAI::Resources::Models) } - def models - end + attr_reader :models sig { returns(OpenAI::Resources::FineTuning) } - def fine_tuning - end + attr_reader :fine_tuning sig { returns(OpenAI::Resources::VectorStores) } - def vector_stores - end + attr_reader :vector_stores sig { returns(OpenAI::Resources::Beta) } - def beta - end + attr_reader :beta sig { returns(OpenAI::Resources::Batches) } - def batches - end + attr_reader :batches sig { returns(OpenAI::Resources::Uploads) } - def uploads - end + attr_reader :uploads sig { returns(OpenAI::Resources::Responses) } - def responses - end + attr_reader :responses # @api private sig { override.returns(T::Hash[String, String]) } diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi index 261ed7d6..126a899e 100644 --- a/rbi/lib/openai/cursor_page.rbi +++ b/rbi/lib/openai/cursor_page.rbi @@ -7,20 +7,10 @@ module OpenAI Elem = type_member sig { returns(T.nilable(T::Array[Elem])) } - def data - end - - sig { params(_: T.nilable(T::Array[Elem])).returns(T.nilable(T::Array[Elem])) } - def data=(_) - end + attr_accessor :data sig { returns(T::Boolean) } - def has_more - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def has_more=(_) - end + attr_accessor :has_more sig { returns(String) } def inspect diff --git a/rbi/lib/openai/errors.rbi b/rbi/lib/openai/errors.rbi index 56d79f0b..e010e283 100644 --- a/rbi/lib/openai/errors.rbi +++ b/rbi/lib/openai/errors.rbi @@ -3,12 +3,7 @@ module OpenAI class Error < StandardError sig { returns(T.nilable(StandardError)) } - def cause - end - - sig { params(_: T.nilable(StandardError)).returns(T.nilable(StandardError)) } - def cause=(_) - end + attr_accessor :cause end class ConversionError < OpenAI::Error @@ -16,52 +11,22 @@ module OpenAI class APIError < OpenAI::Error sig { returns(URI::Generic) } - def url - end - - sig { params(_: URI::Generic).returns(URI::Generic) } - def url=(_) - end + attr_accessor :url sig { returns(T.nilable(Integer)) } - def status - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def status=(_) - end + attr_accessor :status sig { returns(T.nilable(T.anything)) } - def body - end - - sig { params(_: T.nilable(T.anything)).returns(T.nilable(T.anything)) } - def body=(_) - end + attr_accessor :body sig { returns(T.nilable(String)) } - def code - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def code=(_) - end + attr_accessor :code sig { returns(T.nilable(String)) } - def param - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def param=(_) - end + attr_accessor :param sig { returns(T.nilable(String)) } - def type - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def type=(_) - end + attr_accessor :type # @api private sig do @@ -81,44 +46,19 @@ module OpenAI class APIConnectionError < OpenAI::APIError sig { void } - def status - end - - sig { params(_: NilClass).void } - def status=(_) - end + attr_accessor :status sig { void } - def body - end - - sig { params(_: NilClass).void } - def body=(_) - end + attr_accessor :body sig { void } - def code - end - - sig { params(_: NilClass).void } - def code=(_) - end + attr_accessor :code sig { void } - def param - end - - sig { params(_: NilClass).void } - def param=(_) - end + attr_accessor :param sig { void } - def type - end - - sig { params(_: NilClass).void } - def type=(_) - end + attr_accessor :type # @api private sig do @@ -170,36 +110,16 @@ module OpenAI end sig { returns(Integer) } - def status - end - - sig { params(_: Integer).returns(Integer) } - def status=(_) - end + attr_accessor :status sig { returns(T.nilable(String)) } - def code - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def code=(_) - end + attr_accessor :code sig { returns(T.nilable(String)) } - def param - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def param=(_) - end + attr_accessor :param sig { returns(T.nilable(String)) } - def type - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def type=(_) - end + attr_accessor :type # @api private sig do diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 0e0c0f1e..5c59d0c7 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -9,73 +9,43 @@ module OpenAI # The text to generate audio for. The maximum length is 4096 characters. sig { returns(String) } - def input - end - - sig { params(_: String).returns(String) } - def input=(_) - end + attr_accessor :input # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. sig { returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the # voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). sig { returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) } - def voice - end - - sig do - params(_: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - .returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) - end - def voice=(_) - end + attr_accessor :voice # Control the voice of your generated audio with additional instructions. Does not # work with `tts-1` or `tts-1-hd`. sig { returns(T.nilable(String)) } - def instructions - end + attr_reader :instructions - sig { params(_: String).returns(String) } - def instructions=(_) - end + sig { params(instructions: String).void } + attr_writer :instructions # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. sig { returns(T.nilable(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol)) } - def response_format - end + attr_reader :response_format - sig do - params(_: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - .returns(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol) - end - def response_format=(_) - end + sig { params(response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol).void } + attr_writer :response_format # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # the default. sig { returns(T.nilable(Float)) } - def speed - end + attr_reader :speed - sig { params(_: Float).returns(Float) } - def speed=(_) - end + sig { params(speed: Float).void } + attr_writer :speed sig do params( diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index bc60292b..92154d33 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -6,26 +6,19 @@ module OpenAI class Transcription < OpenAI::BaseModel # The transcribed text. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The log probabilities of the tokens in the transcription. Only returned with the # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::Transcription::Logprob])) } - def logprobs - end + attr_reader :logprobs sig do - params(_: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) - end - def logprobs=(_) + params(logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) + .void end + attr_writer :logprobs # Represents a transcription response returned by model, based on the provided # input. @@ -46,30 +39,24 @@ module OpenAI class Logprob < OpenAI::BaseModel # The token in the transcription. sig { returns(T.nilable(String)) } - def token - end + attr_reader :token - sig { params(_: String).returns(String) } - def token=(_) - end + sig { params(token: String).void } + attr_writer :token # The bytes of the token. sig { returns(T.nilable(T::Array[Float])) } - def bytes - end + attr_reader :bytes - sig { params(_: T::Array[Float]).returns(T::Array[Float]) } - def bytes=(_) - end + sig { params(bytes: T::Array[Float]).void } + attr_writer :bytes # The log probability of the token. sig { returns(T.nilable(Float)) } - def logprob - end + attr_reader :logprob - sig { params(_: Float).returns(Float) } - def logprob=(_) - end + sig { params(logprob: Float).void } + attr_writer :logprob sig { params(token: String, bytes: T::Array[Float], logprob: Float).returns(T.attached_class) } def self.new(token: nil, bytes: nil, logprob: nil) diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 21e8d1ad..93b5c317 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -10,26 +10,13 @@ module OpenAI # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(IO, StringIO)) } - def file - end - - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def file=(_) - end + attr_accessor :file # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the @@ -37,52 +24,38 @@ module OpenAI # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol])) } - def include - end + attr_reader :include - sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]) - end - def include=(_) - end + sig { params(include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]).void } + attr_writer :include # The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) # format will improve accuracy and latency. sig { returns(T.nilable(String)) } - def language - end + attr_reader :language - sig { params(_: String).returns(String) } - def language=(_) - end + sig { params(language: String).void } + attr_writer :language # An optional text to guide the model's style or continue a previous audio # segment. The # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) # should match the audio language. sig { returns(T.nilable(String)) } - def prompt - end + attr_reader :prompt - sig { params(_: String).returns(String) } - def prompt=(_) - end + sig { params(prompt: String).void } + attr_writer :prompt # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. sig { returns(T.nilable(OpenAI::Models::AudioResponseFormat::OrSymbol)) } - def response_format - end + attr_reader :response_format - sig do - params(_: OpenAI::Models::AudioResponseFormat::OrSymbol) - .returns(OpenAI::Models::AudioResponseFormat::OrSymbol) - end - def response_format=(_) - end + sig { params(response_format: OpenAI::Models::AudioResponseFormat::OrSymbol).void } + attr_writer :response_format # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and @@ -90,12 +63,10 @@ module OpenAI # [log probability](https://en.wikipedia.org/wiki/Log_probability) to # automatically increase the temperature until certain thresholds are hit. sig { returns(T.nilable(Float)) } - def temperature - end + attr_reader :temperature - sig { params(_: Float).returns(Float) } - def temperature=(_) - end + sig { params(temperature: Float).void } + attr_writer :temperature # The timestamp granularities to populate for this transcription. # `response_format` must be set `verbose_json` to use timestamp granularities. @@ -107,15 +78,15 @@ module OpenAI T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) ) end - def timestamp_granularities - end + attr_reader :timestamp_granularities sig do - params(_: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) - .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) - end - def timestamp_granularities=(_) + params( + timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol] + ) + .void end + attr_writer :timestamp_granularities sig do params( diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index 92a534a2..8bc3e5b2 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -6,96 +6,46 @@ module OpenAI class TranscriptionSegment < OpenAI::BaseModel # Unique identifier of the segment. sig { returns(Integer) } - def id - end - - sig { params(_: Integer).returns(Integer) } - def id=(_) - end + attr_accessor :id # Average logprob of the segment. If the value is lower than -1, consider the # logprobs failed. sig { returns(Float) } - def avg_logprob - end - - sig { params(_: Float).returns(Float) } - def avg_logprob=(_) - end + attr_accessor :avg_logprob # Compression ratio of the segment. If the value is greater than 2.4, consider the # compression failed. sig { returns(Float) } - def compression_ratio - end - - sig { params(_: Float).returns(Float) } - def compression_ratio=(_) - end + attr_accessor :compression_ratio # End time of the segment in seconds. sig { returns(Float) } - def end_ - end - - sig { params(_: Float).returns(Float) } - def end_=(_) - end + attr_accessor :end_ # Probability of no speech in the segment. If the value is higher than 1.0 and the # `avg_logprob` is below -1, consider this segment silent. sig { returns(Float) } - def no_speech_prob - end - - sig { params(_: Float).returns(Float) } - def no_speech_prob=(_) - end + attr_accessor :no_speech_prob # Seek offset of the segment. sig { returns(Integer) } - def seek - end - - sig { params(_: Integer).returns(Integer) } - def seek=(_) - end + attr_accessor :seek # Start time of the segment in seconds. sig { returns(Float) } - def start - end - - sig { params(_: Float).returns(Float) } - def start=(_) - end + attr_accessor :start # Temperature parameter used for generating the segment. sig { returns(Float) } - def temperature - end - - sig { params(_: Float).returns(Float) } - def temperature=(_) - end + attr_accessor :temperature # Text content of the segment. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Array of token IDs for the text content. sig { returns(T::Array[Integer]) } - def tokens - end - - sig { params(_: T::Array[Integer]).returns(T::Array[Integer]) } - def tokens=(_) - end + attr_accessor :tokens sig do params( diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index 0f931bf2..0e61bfb0 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -6,39 +6,25 @@ module OpenAI class TranscriptionTextDeltaEvent < OpenAI::BaseModel # The text delta that was additionally transcribed. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The type of the event. Always `transcript.text.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The log probabilities of the delta. Only included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob])) } - def logprobs - end + attr_reader :logprobs sig do params( - _: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] ) - .returns( - T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] - ) - end - def logprobs=(_) + .void end + attr_writer :logprobs # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you @@ -67,30 +53,24 @@ module OpenAI class Logprob < OpenAI::BaseModel # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } - def token - end + attr_reader :token - sig { params(_: String).returns(String) } - def token=(_) - end + sig { params(token: String).void } + attr_writer :token # The bytes that were used to generate the log probability. sig { returns(T.nilable(T::Array[T.anything])) } - def bytes - end + attr_reader :bytes - sig { params(_: T::Array[T.anything]).returns(T::Array[T.anything]) } - def bytes=(_) - end + sig { params(bytes: T::Array[T.anything]).void } + attr_writer :bytes # The log probability of the token. sig { returns(T.nilable(Float)) } - def logprob - end + attr_reader :logprob - sig { params(_: Float).returns(Float) } - def logprob=(_) - end + sig { params(logprob: Float).void } + attr_writer :logprob sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } def self.new(token: nil, bytes: nil, logprob: nil) diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index 044cbf7c..ecfd6f55 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -6,40 +6,26 @@ module OpenAI class TranscriptionTextDoneEvent < OpenAI::BaseModel # The text that was transcribed. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of the event. Always `transcript.text.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The log probabilities of the individual tokens in the transcription. Only # included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob])) } - def logprobs - end + attr_reader :logprobs sig do params( - _: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] ) - .returns( - T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] - ) - end - def logprobs=(_) + .void end + attr_writer :logprobs # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you @@ -68,30 +54,24 @@ module OpenAI class Logprob < OpenAI::BaseModel # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } - def token - end + attr_reader :token - sig { params(_: String).returns(String) } - def token=(_) - end + sig { params(token: String).void } + attr_writer :token # The bytes that were used to generate the log probability. sig { returns(T.nilable(T::Array[T.anything])) } - def bytes - end + attr_reader :bytes - sig { params(_: T::Array[T.anything]).returns(T::Array[T.anything]) } - def bytes=(_) - end + sig { params(bytes: T::Array[T.anything]).void } + attr_writer :bytes # The log probability of the token. sig { returns(T.nilable(Float)) } - def logprob - end + attr_reader :logprob - sig { params(_: Float).returns(Float) } - def logprob=(_) - end + sig { params(logprob: Float).void } + attr_writer :logprob sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } def self.new(token: nil, bytes: nil, logprob: nil) diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index aba3ac2a..004b1ec3 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -6,54 +6,29 @@ module OpenAI class TranscriptionVerbose < OpenAI::BaseModel # The duration of the input audio. sig { returns(Float) } - def duration - end - - sig { params(_: Float).returns(Float) } - def duration=(_) - end + attr_accessor :duration # The language of the input audio. sig { returns(String) } - def language - end - - sig { params(_: String).returns(String) } - def language=(_) - end + attr_accessor :language # The transcribed text. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Segments of the transcribed text and their corresponding details. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } - def segments - end + attr_reader :segments - sig do - params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) - end - def segments=(_) - end + sig { params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]).void } + attr_writer :segments # Extracted words and their corresponding timestamps. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionWord])) } - def words - end + attr_reader :words - sig do - params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]) - end - def words=(_) - end + sig { params(words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]).void } + attr_writer :words # Represents a verbose json transcription response returned by model, based on the # provided input. diff --git a/rbi/lib/openai/models/audio/transcription_word.rbi b/rbi/lib/openai/models/audio/transcription_word.rbi index a3be4b46..c6adc3b1 100644 --- a/rbi/lib/openai/models/audio/transcription_word.rbi +++ b/rbi/lib/openai/models/audio/transcription_word.rbi @@ -6,30 +6,15 @@ module OpenAI class TranscriptionWord < OpenAI::BaseModel # End time of the word in seconds. sig { returns(Float) } - def end_ - end - - sig { params(_: Float).returns(Float) } - def end_=(_) - end + attr_accessor :end_ # Start time of the word in seconds. sig { returns(Float) } - def start - end - - sig { params(_: Float).returns(Float) } - def start=(_) - end + attr_accessor :start # The text content of the word. sig { returns(String) } - def word - end - - sig { params(_: String).returns(String) } - def word=(_) - end + attr_accessor :word sig { params(end_: Float, start: Float, word: String).returns(T.attached_class) } def self.new(end_:, start:, word:) diff --git a/rbi/lib/openai/models/audio/translation.rbi b/rbi/lib/openai/models/audio/translation.rbi index b9650fa4..07767362 100644 --- a/rbi/lib/openai/models/audio/translation.rbi +++ b/rbi/lib/openai/models/audio/translation.rbi @@ -5,12 +5,7 @@ module OpenAI module Audio class Translation < OpenAI::BaseModel sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text sig { params(text: String).returns(T.attached_class) } def self.new(text:) diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 824eab32..6f8d3e8b 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -10,50 +10,30 @@ module OpenAI # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(IO, StringIO)) } - def file - end - - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def file=(_) - end + attr_accessor :file # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # An optional text to guide the model's style or continue a previous audio # segment. The # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) # should be in English. sig { returns(T.nilable(String)) } - def prompt - end + attr_reader :prompt - sig { params(_: String).returns(String) } - def prompt=(_) - end + sig { params(prompt: String).void } + attr_writer :prompt # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. sig { returns(T.nilable(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol)) } - def response_format - end + attr_reader :response_format - sig do - params(_: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - .returns(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol) - end - def response_format=(_) - end + sig { params(response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol).void } + attr_writer :response_format # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and @@ -61,12 +41,10 @@ module OpenAI # [log probability](https://en.wikipedia.org/wiki/Log_probability) to # automatically increase the temperature until certain thresholds are hit. sig { returns(T.nilable(Float)) } - def temperature - end + attr_reader :temperature - sig { params(_: Float).returns(Float) } - def temperature=(_) - end + sig { params(temperature: Float).void } + attr_writer :temperature sig do params( diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index 8bceffaa..f12cba37 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -6,42 +6,22 @@ module OpenAI class TranslationVerbose < OpenAI::BaseModel # The duration of the input audio. sig { returns(Float) } - def duration - end - - sig { params(_: Float).returns(Float) } - def duration=(_) - end + attr_accessor :duration # The language of the output translation (always `english`). sig { returns(String) } - def language - end - - sig { params(_: String).returns(String) } - def language=(_) - end + attr_accessor :language # The translated text. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Segments of the translated text and their corresponding details. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } - def segments - end + attr_reader :segments - sig do - params(_: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]) - end - def segments=(_) - end + sig { params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]).void } + attr_writer :segments sig do params( diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi index b1d97ad3..43d54634 100644 --- a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi @@ -5,12 +5,7 @@ module OpenAI class AutoFileChunkingStrategyParam < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index c86180e1..c8c01aac 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -4,160 +4,100 @@ module OpenAI module Models class Batch < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The time frame within which the batch should be processed. sig { returns(String) } - def completion_window - end - - sig { params(_: String).returns(String) } - def completion_window=(_) - end + attr_accessor :completion_window # The Unix timestamp (in seconds) for when the batch was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The OpenAI API endpoint used by the batch. sig { returns(String) } - def endpoint - end - - sig { params(_: String).returns(String) } - def endpoint=(_) - end + attr_accessor :endpoint # The ID of the input file for the batch. sig { returns(String) } - def input_file_id - end - - sig { params(_: String).returns(String) } - def input_file_id=(_) - end + attr_accessor :input_file_id # The object type, which is always `batch`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The current status of the batch. sig { returns(OpenAI::Models::Batch::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Batch::Status::TaggedSymbol).returns(OpenAI::Models::Batch::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The Unix timestamp (in seconds) for when the batch was cancelled. sig { returns(T.nilable(Integer)) } - def cancelled_at - end + attr_reader :cancelled_at - sig { params(_: Integer).returns(Integer) } - def cancelled_at=(_) - end + sig { params(cancelled_at: Integer).void } + attr_writer :cancelled_at # The Unix timestamp (in seconds) for when the batch started cancelling. sig { returns(T.nilable(Integer)) } - def cancelling_at - end + attr_reader :cancelling_at - sig { params(_: Integer).returns(Integer) } - def cancelling_at=(_) - end + sig { params(cancelling_at: Integer).void } + attr_writer :cancelling_at # The Unix timestamp (in seconds) for when the batch was completed. sig { returns(T.nilable(Integer)) } - def completed_at - end + attr_reader :completed_at - sig { params(_: Integer).returns(Integer) } - def completed_at=(_) - end + sig { params(completed_at: Integer).void } + attr_writer :completed_at # The ID of the file containing the outputs of requests with errors. sig { returns(T.nilable(String)) } - def error_file_id - end + attr_reader :error_file_id - sig { params(_: String).returns(String) } - def error_file_id=(_) - end + sig { params(error_file_id: String).void } + attr_writer :error_file_id sig { returns(T.nilable(OpenAI::Models::Batch::Errors)) } - def errors - end + attr_reader :errors - sig do - params(_: T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash)) - end - def errors=(_) - end + sig { params(errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash)).void } + attr_writer :errors # The Unix timestamp (in seconds) for when the batch expired. sig { returns(T.nilable(Integer)) } - def expired_at - end + attr_reader :expired_at - sig { params(_: Integer).returns(Integer) } - def expired_at=(_) - end + sig { params(expired_at: Integer).void } + attr_writer :expired_at # The Unix timestamp (in seconds) for when the batch will expire. sig { returns(T.nilable(Integer)) } - def expires_at - end + attr_reader :expires_at - sig { params(_: Integer).returns(Integer) } - def expires_at=(_) - end + sig { params(expires_at: Integer).void } + attr_writer :expires_at # The Unix timestamp (in seconds) for when the batch failed. sig { returns(T.nilable(Integer)) } - def failed_at - end + attr_reader :failed_at - sig { params(_: Integer).returns(Integer) } - def failed_at=(_) - end + sig { params(failed_at: Integer).void } + attr_writer :failed_at # The Unix timestamp (in seconds) for when the batch started finalizing. sig { returns(T.nilable(Integer)) } - def finalizing_at - end + attr_reader :finalizing_at - sig { params(_: Integer).returns(Integer) } - def finalizing_at=(_) - end + sig { params(finalizing_at: Integer).void } + attr_writer :finalizing_at # The Unix timestamp (in seconds) for when the batch started processing. sig { returns(T.nilable(Integer)) } - def in_progress_at - end + attr_reader :in_progress_at - sig { params(_: Integer).returns(Integer) } - def in_progress_at=(_) - end + sig { params(in_progress_at: Integer).void } + attr_writer :in_progress_at # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -166,33 +106,21 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The ID of the file containing the outputs of successfully executed requests. sig { returns(T.nilable(String)) } - def output_file_id - end + attr_reader :output_file_id - sig { params(_: String).returns(String) } - def output_file_id=(_) - end + sig { params(output_file_id: String).void } + attr_writer :output_file_id # The request counts for different statuses within the batch. sig { returns(T.nilable(OpenAI::Models::BatchRequestCounts)) } - def request_counts - end + attr_reader :request_counts - sig do - params(_: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash)) - end - def request_counts=(_) - end + sig { params(request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash)).void } + attr_writer :request_counts sig do params( @@ -298,24 +226,17 @@ module OpenAI class Errors < OpenAI::BaseModel sig { returns(T.nilable(T::Array[OpenAI::Models::BatchError])) } - def data - end + attr_reader :data - sig do - params(_: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]) - end - def data=(_) - end + sig { params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]).void } + attr_writer :data # The object type, which is always `list`. sig { returns(T.nilable(String)) } - def object - end + attr_reader :object - sig { params(_: String).returns(String) } - def object=(_) - end + sig { params(object: String).void } + attr_writer :object sig do params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)], object: String) diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 46c3a392..25f982eb 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -9,30 +9,14 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. sig { returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) } - def completion_window - end - - sig do - params(_: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) - .returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) - end - def completion_window=(_) - end + attr_accessor :completion_window # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. sig { returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) } - def endpoint - end - - sig do - params(_: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - .returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) - end - def endpoint=(_) - end + attr_accessor :endpoint # The ID of an uploaded file that contains requests for the new batch. # @@ -44,12 +28,7 @@ module OpenAI # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 # requests, and can be up to 200 MB in size. sig { returns(String) } - def input_file_id - end - - sig { params(_: String).returns(String) } - def input_file_id=(_) - end + attr_accessor :input_file_id # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -58,12 +37,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( diff --git a/rbi/lib/openai/models/batch_error.rbi b/rbi/lib/openai/models/batch_error.rbi index ded1e3bb..261a3ac1 100644 --- a/rbi/lib/openai/models/batch_error.rbi +++ b/rbi/lib/openai/models/batch_error.rbi @@ -5,39 +5,25 @@ module OpenAI class BatchError < OpenAI::BaseModel # An error code identifying the error type. sig { returns(T.nilable(String)) } - def code - end + attr_reader :code - sig { params(_: String).returns(String) } - def code=(_) - end + sig { params(code: String).void } + attr_writer :code # The line number of the input file where the error occurred, if applicable. sig { returns(T.nilable(Integer)) } - def line - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def line=(_) - end + attr_accessor :line # A human-readable message providing more details about the error. sig { returns(T.nilable(String)) } - def message - end + attr_reader :message - sig { params(_: String).returns(String) } - def message=(_) - end + sig { params(message: String).void } + attr_writer :message # The name of the parameter that caused the error, if applicable. sig { returns(T.nilable(String)) } - def param - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def param=(_) - end + attr_accessor :param sig do params(code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)) diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 460fe7ca..8fb64add 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -11,22 +11,18 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit sig do params( diff --git a/rbi/lib/openai/models/batch_request_counts.rbi b/rbi/lib/openai/models/batch_request_counts.rbi index df746c81..03b6cecc 100644 --- a/rbi/lib/openai/models/batch_request_counts.rbi +++ b/rbi/lib/openai/models/batch_request_counts.rbi @@ -5,30 +5,15 @@ module OpenAI class BatchRequestCounts < OpenAI::BaseModel # Number of requests that have been completed successfully. sig { returns(Integer) } - def completed - end - - sig { params(_: Integer).returns(Integer) } - def completed=(_) - end + attr_accessor :completed # Number of requests that have failed. sig { returns(Integer) } - def failed - end - - sig { params(_: Integer).returns(Integer) } - def failed=(_) - end + attr_accessor :failed # Total number of requests in the batch. sig { returns(Integer) } - def total - end - - sig { params(_: Integer).returns(Integer) } - def total=(_) - end + attr_accessor :total # The request counts for different statuses within the batch. sig { params(completed: Integer, failed: Integer, total: Integer).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index 8dd6289e..e5fcf3bc 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -6,40 +6,20 @@ module OpenAI class Assistant < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the assistant was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } - def description - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def description=(_) - end + attr_accessor :description # The system instructions that the assistant uses. The maximum length is 256,000 # characters. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -48,12 +28,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -61,30 +36,15 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The name of the assistant. The maximum length is 256 characters. sig { returns(T.nilable(String)) } - def name - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def name=(_) - end + attr_accessor :name # The object type, which is always `assistant`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or @@ -100,31 +60,7 @@ module OpenAI ] ) end - def tools - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - end - def tools=(_) - end + attr_accessor :tools # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -158,59 +94,28 @@ module OpenAI ) ) end - def response_format - end - - sig do - params( - _: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - .returns( - T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - end - def response_format=(_) - end + attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash))) - end - def tool_resources=(_) + params( + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :tool_resources # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -218,12 +123,7 @@ module OpenAI # # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # Represents an `assistant` that can call the model and use tools. sig do @@ -314,26 +214,26 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do - params(_: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) - end - def code_interpreter=(_) + params( + code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do - params(_: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash)) - end - def file_search=(_) + params( + file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -366,12 +266,10 @@ module OpenAI # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -388,12 +286,10 @@ module OpenAI # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } def self.new(vector_store_ids: nil) diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 0251dc51..24382098 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -13,34 +13,16 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } - def description - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def description=(_) - end + attr_accessor :description # The system instructions that the assistant uses. The maximum length is 256,000 # characters. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -49,21 +31,11 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The name of the assistant. The maximum length is 256 characters. sig { returns(T.nilable(String)) } - def name - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def name=(_) - end + attr_accessor :name # **o-series models only** # @@ -72,15 +44,7 @@ module OpenAI # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } - def reasoning_effort - end - - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end - def reasoning_effort=(_) - end + attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -114,63 +78,28 @@ module OpenAI ) ) end - def response_format - end - - sig do - params( - _: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - .returns( - T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - end - def response_format=(_) - end + attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)) ) - .returns( - T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)) - ) - end - def tool_resources=(_) + .void end + attr_writer :tool_resources # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or @@ -188,12 +117,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -202,19 +130,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -222,12 +140,7 @@ module OpenAI # # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p sig do params( @@ -331,34 +244,26 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) - ) - end - def file_search=(_) + .void end + attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -391,12 +296,10 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -413,12 +316,10 @@ module OpenAI # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -429,29 +330,20 @@ module OpenAI T.nilable(T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) ) end - def vector_stores - end + attr_reader :vector_stores sig do params( - _: T::Array[ + vector_stores: T::Array[ T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, OpenAI::Util::AnyHash ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash - ) - ] - ) - end - def vector_stores=(_) + .void end + attr_writer :vector_stores sig do params( @@ -493,38 +385,28 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) - .returns( - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -533,14 +415,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig do - params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) - end - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -591,12 +466,7 @@ module OpenAI class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. @@ -615,34 +485,22 @@ module OpenAI OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static ) end - def static - end + attr_reader :static sig do params( - _: T.any( + static: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash - ) - ) - end - def static=(_) + .void end + attr_writer :static # Always `static`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -674,22 +532,12 @@ module OpenAI # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } - def chunk_overlap_tokens - end - - sig { params(_: Integer).returns(Integer) } - def chunk_overlap_tokens=(_) - end + attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } - def max_chunk_size_tokens - end - - sig { params(_: Integer).returns(Integer) } - def max_chunk_size_tokens=(_) - end + attr_accessor :max_chunk_size_tokens sig do params( diff --git a/rbi/lib/openai/models/beta/assistant_deleted.rbi b/rbi/lib/openai/models/beta/assistant_deleted.rbi index bf4cde08..c5b56819 100644 --- a/rbi/lib/openai/models/beta/assistant_deleted.rbi +++ b/rbi/lib/openai/models/beta/assistant_deleted.rbi @@ -5,28 +5,13 @@ module OpenAI module Beta class AssistantDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :"assistant.deleted") diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 76427298..845086ec 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -12,47 +12,36 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 449d6397..eb08519f 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -62,32 +62,20 @@ module OpenAI # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Whether to enable input audio transcription. sig { returns(T.nilable(T::Boolean)) } - def enabled - end + attr_reader :enabled - sig { params(_: T::Boolean).returns(T::Boolean) } - def enabled=(_) - end + sig { params(enabled: T::Boolean).void } + attr_writer :enabled # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is @@ -112,23 +100,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. @@ -148,23 +126,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. @@ -184,23 +152,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. @@ -220,23 +178,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. @@ -256,23 +204,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. @@ -292,23 +230,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. @@ -328,23 +256,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. @@ -364,23 +282,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. @@ -400,23 +308,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. @@ -436,23 +334,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. @@ -471,23 +359,13 @@ module OpenAI class ThreadRunStepCreated < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -507,23 +385,13 @@ module OpenAI class ThreadRunStepInProgress < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -544,23 +412,13 @@ module OpenAI # Represents a run step delta i.e. any changed fields on a run step during # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -583,23 +441,13 @@ module OpenAI class ThreadRunStepCompleted < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -619,23 +467,13 @@ module OpenAI class ThreadRunStepFailed < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -655,23 +493,13 @@ module OpenAI class ThreadRunStepCancelled < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -691,23 +519,13 @@ module OpenAI class ThreadRunStepExpired < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -728,23 +546,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is @@ -765,23 +573,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves @@ -802,23 +600,13 @@ module OpenAI # Represents a message delta i.e. any changed fields on a message during # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are @@ -842,23 +630,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is @@ -879,23 +657,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends @@ -914,23 +682,13 @@ module OpenAI class ErrorEvent < OpenAI::BaseModel sig { returns(OpenAI::Models::ErrorObject) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when an # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 1d11f89d..47eef189 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -6,26 +6,13 @@ module OpenAI class AssistantToolChoice < OpenAI::BaseModel # The type of the tool. If type is `function`, the function name must be set sig { returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - .returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(OpenAI::Models::Beta::AssistantToolChoiceFunction)) } - def function - end + attr_reader :function - sig do - params(_: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash)) - end - def function=(_) - end + sig { params(function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash)).void } + attr_writer :function # Specifies a tool the model should use. Use to force the model to call a specific # tool. diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi index 6e594ce1..084e57a8 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi @@ -6,12 +6,7 @@ module OpenAI class AssistantToolChoiceFunction < OpenAI::BaseModel # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name sig { params(name: String).returns(T.attached_class) } def self.new(name:) diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index b266b2e2..09cfa6aa 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -9,22 +9,12 @@ module OpenAI # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } - def description - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def description=(_) - end + attr_accessor :description # The system instructions that the assistant uses. The maximum length is 256,000 # characters. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -33,12 +23,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -46,24 +31,14 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. sig { returns(T.nilable(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol))) } - def model - end + attr_reader :model - sig do - params(_: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) - .returns(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)) - end - def model=(_) - end + sig { params(model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)).void } + attr_writer :model # The name of the assistant. The maximum length is 256 characters. sig { returns(T.nilable(String)) } - def name - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def name=(_) - end + attr_accessor :name # **o-series models only** # @@ -72,15 +47,7 @@ module OpenAI # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } - def reasoning_effort - end - - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end - def reasoning_effort=(_) - end + attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -114,63 +81,28 @@ module OpenAI ) ) end - def response_format - end - - sig do - params( - _: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - .returns( - T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - end - def response_format=(_) - end + attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)) ) - .returns( - T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)) - ) - end - def tool_resources=(_) + .void end + attr_writer :tool_resources # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or @@ -188,12 +120,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -202,19 +133,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -222,12 +143,7 @@ module OpenAI # # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p sig do params( @@ -386,34 +302,26 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) - ) - end - def file_search=(_) + .void end + attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -447,12 +355,10 @@ module OpenAI # to the `code_interpreter` tool. There can be a maximum of 20 files associated # with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -469,12 +375,10 @@ module OpenAI # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } def self.new(vector_store_ids: nil) diff --git a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi index ba0e9924..3e68afee 100644 --- a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi @@ -6,12 +6,7 @@ module OpenAI class CodeInterpreterTool < OpenAI::BaseModel # The type of tool being defined: `code_interpreter` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :code_interpreter) diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index e44328a2..cc00d4a9 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -6,24 +6,14 @@ module OpenAI class FileSearchTool < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Overrides for the file search tool. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch)) } - def file_search - end + attr_reader :file_search - sig do - params(_: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash)) - end - def file_search=(_) - end + sig { params(file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash)).void } + attr_writer :file_search sig do params( @@ -49,12 +39,10 @@ module OpenAI # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. sig { returns(T.nilable(Integer)) } - def max_num_results - end + attr_reader :max_num_results - sig { params(_: Integer).returns(Integer) } - def max_num_results=(_) - end + sig { params(max_num_results: Integer).void } + attr_writer :max_num_results # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. @@ -63,15 +51,15 @@ module OpenAI # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions)) } - def ranking_options - end + attr_reader :ranking_options sig do - params(_: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash)) - end - def ranking_options=(_) + params( + ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :ranking_options # Overrides for the file search tool. sig do @@ -97,25 +85,15 @@ module OpenAI # The score threshold for the file search. All values must be a floating point # number between 0 and 1. sig { returns(Float) } - def score_threshold - end - - sig { params(_: Float).returns(Float) } - def score_threshold=(_) - end + attr_accessor :score_threshold # The ranker to use for the file search. If not specified will use the `auto` # ranker. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol)) } - def ranker - end + attr_reader :ranker - sig do - params(_: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) - .returns(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol) - end - def ranker=(_) - end + sig { params(ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol).void } + attr_writer :ranker # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index c69fa0dd..b12da233 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -5,24 +5,14 @@ module OpenAI module Beta class FunctionTool < OpenAI::BaseModel sig { returns(OpenAI::Models::FunctionDefinition) } - def function - end + attr_reader :function - sig do - params(_: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) - end - def function=(_) - end + sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)).void } + attr_writer :function # The type of tool being defined: `function` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash), type: Symbol) diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 8eb06277..6487db0b 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -26,23 +26,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is @@ -63,23 +53,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves @@ -100,23 +80,13 @@ module OpenAI # Represents a message delta i.e. any changed fields on a message during # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are @@ -140,23 +110,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is @@ -177,23 +137,13 @@ module OpenAI # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 283216f5..f39da267 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -27,23 +27,13 @@ module OpenAI class ThreadRunStepCreated < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -63,23 +53,13 @@ module OpenAI class ThreadRunStepInProgress < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -100,23 +80,13 @@ module OpenAI # Represents a run step delta i.e. any changed fields on a run step during # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -139,23 +109,13 @@ module OpenAI class ThreadRunStepCompleted < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -175,23 +135,13 @@ module OpenAI class ThreadRunStepFailed < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -211,23 +161,13 @@ module OpenAI class ThreadRunStepCancelled < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) @@ -247,23 +187,13 @@ module OpenAI class ThreadRunStepExpired < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index de3392b0..03f0c58b 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -30,23 +30,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. @@ -66,23 +56,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. @@ -102,23 +82,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. @@ -138,23 +108,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. @@ -174,23 +134,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. @@ -210,23 +160,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. @@ -246,23 +186,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. @@ -282,23 +212,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. @@ -318,23 +238,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. @@ -354,23 +264,13 @@ module OpenAI # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 3cd4db69..9061c27f 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -6,21 +6,11 @@ module OpenAI class Thread < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the thread was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -29,36 +19,26 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The object type, which is always `thread`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash))) - end - def tool_resources=(_) + params( + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :tool_resources # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -92,26 +72,24 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do - params(_: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash)) - end - def code_interpreter=(_) + params( + code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do - params(_: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash)) - end - def file_search=(_) + params(file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash)) + .void end + attr_writer :file_search # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -144,12 +122,10 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -166,12 +142,10 @@ module OpenAI # attached to this thread. There can be a maximum of 1 vector store attached to # the thread. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } def self.new(vector_store_ids: nil) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index b44e4bcf..ed08bc73 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -11,22 +11,12 @@ module OpenAI # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. sig { returns(String) } - def assistant_id - end - - sig { params(_: String).returns(String) } - def assistant_id=(_) - end + attr_accessor :assistant_id # Override the default system message of the assistant. This is useful for # modifying the behavior on a per-run basis. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # The maximum number of completion tokens that may be used over the course of the # run. The run will make a best effort to use only the number of completion tokens @@ -34,12 +24,7 @@ module OpenAI # completion tokens specified, the run will end with status `incomplete`. See # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } - def max_completion_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_completion_tokens=(_) - end + attr_accessor :max_completion_tokens # The maximum number of prompt tokens that may be used over the course of the run. # The run will make a best effort to use only the number of prompt tokens @@ -47,12 +32,7 @@ module OpenAI # prompt tokens specified, the run will end with status `incomplete`. See # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } - def max_prompt_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_prompt_tokens=(_) - end + attr_accessor :max_prompt_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -61,38 +41,23 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } - def model - end - - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - end - def model=(_) - end + attr_accessor :model # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. sig { returns(T.nilable(T::Boolean)) } - def parallel_tool_calls - end + attr_reader :parallel_tool_calls - sig { params(_: T::Boolean).returns(T::Boolean) } - def parallel_tool_calls=(_) - end + sig { params(parallel_tool_calls: T::Boolean).void } + attr_writer :parallel_tool_calls # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -126,57 +91,21 @@ module OpenAI ) ) end - def response_format - end - - sig do - params( - _: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - .returns( - T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - end - def response_format=(_) - end + attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread)) } - def thread - end + attr_reader :thread - sig do - params(_: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash)) - end - def thread=(_) - end + sig { params(thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash)).void } + attr_writer :thread # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value @@ -195,48 +124,22 @@ module OpenAI ) ) end - def tool_choice - end - - sig do - params( - _: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - .returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - end - def tool_choice=(_) - end + attr_accessor :tool_choice # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)) ) - .returns( - T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)) - ) - end - def tool_resources=(_) + .void end + attr_writer :tool_resources # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. @@ -253,35 +156,7 @@ module OpenAI ) ) end - def tools - end - - sig do - params( - _: T.nilable( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - ) - .returns( - T.nilable( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - ) - end - def tools=(_) - end + attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -289,33 +164,22 @@ module OpenAI # # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy)) } - def truncation_strategy - end + attr_reader :truncation_strategy sig do params( - _: T.nilable( + truncation_strategy: T.nilable( T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) ) ) - .returns( - T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) - ) - ) - end - def truncation_strategy=(_) + .void end + attr_writer :truncation_strategy sig do params( @@ -449,19 +313,15 @@ module OpenAI # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message])) } - def messages - end + attr_reader :messages sig do params( - _: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] ) - .returns( - T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] - ) - end - def messages=(_) + .void end + attr_writer :messages # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -470,35 +330,24 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do params( - _: T.nilable( + tool_resources: T.nilable( T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) ) ) - .returns( - T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) - ) - ) - end - def tool_resources=(_) + .void end + attr_writer :tool_resources # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. @@ -544,37 +393,7 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - end - def content=(_) - end + attr_accessor :content # The role of the entity that is creating the message. Allowed values include: # @@ -583,29 +402,11 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) - end - def role=(_) - end + attr_accessor :role # A list of files attached to the message, and the tools they should be added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment])) } - def attachments - end - - sig do - params( - _: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]) - ) - .returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment])) - end - def attachments=(_) - end + attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -614,12 +415,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -743,12 +539,10 @@ module OpenAI class Attachment < OpenAI::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The tools to add this file to. sig do @@ -763,12 +557,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -776,18 +569,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools sig do params( @@ -838,12 +622,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :file_search) @@ -870,46 +649,32 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any( + code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Util::AnyHash - ) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any( + file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Util::AnyHash - ) - ) - end - def file_search=(_) + .void end + attr_writer :file_search # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -948,12 +713,10 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -970,12 +733,10 @@ module OpenAI # attached to this thread. There can be a maximum of 1 vector store attached to # the thread. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -988,29 +749,20 @@ module OpenAI ) ) end - def vector_stores - end + attr_reader :vector_stores sig do params( - _: T::Array[ + vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, OpenAI::Util::AnyHash ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash - ) - ] - ) - end - def vector_stores=(_) + .void end + attr_writer :vector_stores sig do params( @@ -1052,38 +804,28 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -1092,14 +834,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig do - params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) - end - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -1150,12 +885,7 @@ module OpenAI class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. @@ -1174,34 +904,22 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static ) end - def static - end + attr_reader :static sig do params( - _: T.any( + static: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash - ) - ) - end - def static=(_) + .void end + attr_writer :static # Always `static`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -1233,22 +951,12 @@ module OpenAI # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } - def chunk_overlap_tokens - end - - sig { params(_: Integer).returns(Integer) } - def chunk_overlap_tokens=(_) - end + attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } - def max_chunk_size_tokens - end - - sig { params(_: Integer).returns(Integer) } - def max_chunk_size_tokens=(_) - end + attr_accessor :max_chunk_size_tokens sig do params( @@ -1285,40 +993,29 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any( + code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Util::AnyHash - ) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) - ) - end - def file_search=(_) + .void end + attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -1354,12 +1051,10 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -1376,12 +1071,10 @@ module OpenAI # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } def self.new(vector_store_ids: nil) @@ -1425,25 +1118,12 @@ module OpenAI # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) - .returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # The number of most recent messages from the thread when constructing the context # for the run. sig { returns(T.nilable(Integer)) } - def last_messages - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def last_messages=(_) - end + attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 4fab7a4b..fe346edf 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -10,15 +10,15 @@ module OpenAI # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message])) } - def messages - end + attr_reader :messages sig do - params(_: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)]) - end - def messages=(_) + params( + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)] + ) + .void end + attr_writer :messages # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -27,29 +27,22 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)) ) - .returns(T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash))) - end - def tool_resources=(_) + .void end + attr_writer :tool_resources sig do params( @@ -93,37 +86,7 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - end - def content=(_) - end + attr_accessor :content # The role of the entity that is creating the message. Allowed values include: # @@ -132,27 +95,11 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) - .returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) - end - def role=(_) - end + attr_accessor :role # A list of files attached to the message, and the tools they should be added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment])) } - def attachments - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment])) - .returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment])) - end - def attachments=(_) - end + attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -161,12 +108,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -285,12 +227,10 @@ module OpenAI class Attachment < OpenAI::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The tools to add this file to. sig do @@ -305,12 +245,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -318,18 +257,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools sig do params( @@ -380,12 +310,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :file_search) @@ -412,32 +337,26 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash)) - end - def file_search=(_) + .void end + attr_writer :file_search # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -470,12 +389,10 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -492,12 +409,10 @@ module OpenAI # attached to this thread. There can be a maximum of 1 vector store attached to # the thread. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -508,29 +423,20 @@ module OpenAI T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) ) end - def vector_stores - end + attr_reader :vector_stores sig do params( - _: T::Array[ + vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, OpenAI::Util::AnyHash ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash - ) - ] - ) - end - def vector_stores=(_) + .void end + attr_writer :vector_stores sig do params( @@ -572,38 +478,28 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -612,14 +508,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig do - params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) - end - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -670,12 +559,7 @@ module OpenAI class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. @@ -694,34 +578,22 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static ) end - def static - end + attr_reader :static sig do params( - _: T.any( + static: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash - ) - ) - end - def static=(_) + .void end + attr_writer :static # Always `static`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -753,22 +625,12 @@ module OpenAI # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } - def chunk_overlap_tokens - end - - sig { params(_: Integer).returns(Integer) } - def chunk_overlap_tokens=(_) - end + attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } - def max_chunk_size_tokens - end - - sig { params(_: Integer).returns(Integer) } - def max_chunk_size_tokens=(_) - end + attr_accessor :max_chunk_size_tokens sig do params( diff --git a/rbi/lib/openai/models/beta/thread_deleted.rbi b/rbi/lib/openai/models/beta/thread_deleted.rbi index 1ca71e29..cc908f49 100644 --- a/rbi/lib/openai/models/beta/thread_deleted.rbi +++ b/rbi/lib/openai/models/beta/thread_deleted.rbi @@ -5,28 +5,13 @@ module OpenAI module Beta class ThreadDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :"thread.deleted") diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index 84e72f6e..53d58531 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -7,32 +7,20 @@ module OpenAI # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } - def data - end + attr_reader :data - sig do - params(_: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)) - end - def data=(_) - end + sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)).void } + attr_writer :data sig { returns(Symbol) } - def event - end - - sig { params(_: Symbol).returns(Symbol) } - def event=(_) - end + attr_accessor :event # Whether to enable input audio transcription. sig { returns(T.nilable(T::Boolean)) } - def enabled - end + attr_reader :enabled - sig { params(_: T::Boolean).returns(T::Boolean) } - def enabled=(_) - end + sig { params(enabled: T::Boolean).void } + attr_writer :enabled # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 97e08dd0..d5156fc0 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -14,29 +14,22 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources)) } - def tool_resources - end + attr_reader :tool_resources sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)) ) - .returns(T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash))) - end - def tool_resources=(_) + .void end + attr_writer :tool_resources sig do params( @@ -64,32 +57,26 @@ module OpenAI class ToolResources < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch)) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash)) - end - def file_search=(_) + .void end + attr_writer :file_search # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -122,12 +109,10 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } def self.new(file_ids: nil) @@ -144,12 +129,10 @@ module OpenAI # attached to this thread. There can be a maximum of 1 vector store attached to # the thread. sig { returns(T.nilable(T::Array[String])) } - def vector_store_ids - end + attr_reader :vector_store_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + sig { params(vector_store_ids: T::Array[String]).void } + attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } def self.new(vector_store_ids: nil) diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index e41f739a..c8f84631 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -6,51 +6,29 @@ module OpenAI module Threads class FileCitationAnnotation < OpenAI::BaseModel sig { returns(Integer) } - def end_index - end - - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + attr_accessor :end_index sig { returns(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation) } - def file_citation - end + attr_reader :file_citation sig do params( - _: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash) + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash)) - end - def file_citation=(_) + .void end + attr_writer :file_citation sig { returns(Integer) } - def start_index - end - - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + attr_accessor :start_index # The text in the message content that needs to be replaced. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Always `file_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant @@ -86,12 +64,7 @@ module OpenAI class FileCitation < OpenAI::BaseModel # The ID of the specific File the citation is from. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } def self.new(file_id:) diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index c825f31b..666708db 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -7,61 +7,41 @@ module OpenAI class FileCitationDeltaAnnotation < OpenAI::BaseModel # The index of the annotation in the text content part. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `file_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(Integer)) } - def end_index - end + attr_reader :end_index - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + sig { params(end_index: Integer).void } + attr_writer :end_index sig { returns(T.nilable(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation)) } - def file_citation - end + attr_reader :file_citation sig do params( - _: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash) + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash) - ) - end - def file_citation=(_) + .void end + attr_writer :file_citation sig { returns(T.nilable(Integer)) } - def start_index - end + attr_reader :start_index - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + sig { params(start_index: Integer).void } + attr_writer :start_index # The text in the message content that needs to be replaced. sig { returns(T.nilable(String)) } - def text - end + attr_reader :text - sig { params(_: String).returns(String) } - def text=(_) - end + sig { params(text: String).void } + attr_writer :text # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant @@ -99,21 +79,17 @@ module OpenAI class FileCitation < OpenAI::BaseModel # The ID of the specific File the citation is from. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The specific quote in the file. sig { returns(T.nilable(String)) } - def quote - end + attr_reader :quote - sig { params(_: String).returns(String) } - def quote=(_) - end + sig { params(quote: String).void } + attr_writer :quote sig { params(file_id: String, quote: String).returns(T.attached_class) } def self.new(file_id: nil, quote: nil) diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index dd673abd..33a5f4e1 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -6,49 +6,29 @@ module OpenAI module Threads class FilePathAnnotation < OpenAI::BaseModel sig { returns(Integer) } - def end_index - end - - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + attr_accessor :end_index sig { returns(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath) } - def file_path - end + attr_reader :file_path sig do - params(_: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash)) - end - def file_path=(_) + params( + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :file_path sig { returns(Integer) } - def start_index - end - - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + attr_accessor :start_index # The text in the message content that needs to be replaced. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Always `file_path`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A URL for the file that's generated when the assistant used the # `code_interpreter` tool to generate a file. @@ -83,12 +63,7 @@ module OpenAI class FilePath < OpenAI::BaseModel # The ID of the file that was generated. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } def self.new(file_id:) diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index b154c4d8..7c79aae6 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -7,57 +7,41 @@ module OpenAI class FilePathDeltaAnnotation < OpenAI::BaseModel # The index of the annotation in the text content part. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `file_path`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(Integer)) } - def end_index - end + attr_reader :end_index - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + sig { params(end_index: Integer).void } + attr_writer :end_index sig { returns(T.nilable(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath)) } - def file_path - end + attr_reader :file_path sig do - params(_: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash)) - end - def file_path=(_) + params( + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :file_path sig { returns(T.nilable(Integer)) } - def start_index - end + attr_reader :start_index - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + sig { params(start_index: Integer).void } + attr_writer :start_index # The text in the message content that needs to be replaced. sig { returns(T.nilable(String)) } - def text - end + attr_reader :text - sig { params(_: String).returns(String) } - def text=(_) - end + sig { params(text: String).void } + attr_writer :text # A URL for the file that's generated when the assistant used the # `code_interpreter` tool to generate a file. @@ -94,12 +78,10 @@ module OpenAI class FilePath < OpenAI::BaseModel # The ID of the file that was generated. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id sig { params(file_id: String).returns(T.attached_class) } def self.new(file_id: nil) diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index edcd6cf4..e7cc0c6e 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -9,25 +9,15 @@ module OpenAI # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol)) } - def detail - end + attr_reader :detail - sig do - params(_: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - end - def detail=(_) - end + sig { params(detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol).void } + attr_writer :detail sig do params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index 4c2d8b93..18986902 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -6,24 +6,14 @@ module OpenAI module Threads class ImageFileContentBlock < OpenAI::BaseModel sig { returns(OpenAI::Models::Beta::Threads::ImageFile) } - def image_file - end + attr_reader :image_file - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash)) - end - def image_file=(_) - end + sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash)).void } + attr_writer :image_file # Always `image_file`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 3d650791..a560c68b 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -8,26 +8,19 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } - def detail - end + attr_reader :detail - sig do - params(_: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol) - end - def detail=(_) - end + sig { params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol).void } + attr_writer :detail # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id sig do params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol, file_id: String) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 6aa45d0d..3dcd3939 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -7,32 +7,17 @@ module OpenAI class ImageFileDeltaBlock < OpenAI::BaseModel # The index of the content part in the message. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `image_file`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta)) } - def image_file - end + attr_reader :image_file - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash)) - end - def image_file=(_) - end + sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash)).void } + attr_writer :image_file # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 9b0d2994..c27cc50c 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -8,25 +8,15 @@ module OpenAI # The external URL of the image, must be a supported image types: jpeg, jpg, png, # gif, webp. sig { returns(String) } - def url - end - - sig { params(_: String).returns(String) } - def url=(_) - end + attr_accessor :url # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol)) } - def detail - end + attr_reader :detail - sig do - params(_: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - end - def detail=(_) - end + sig { params(detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol).void } + attr_writer :detail sig do params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index 7a20b522..2c1839ed 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -6,24 +6,14 @@ module OpenAI module Threads class ImageURLContentBlock < OpenAI::BaseModel sig { returns(OpenAI::Models::Beta::Threads::ImageURL) } - def image_url - end + attr_reader :image_url - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash)) - end - def image_url=(_) - end + sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash)).void } + attr_writer :image_url # The type of the content part. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # References an image URL in the content of a message. sig do diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index ac2e9bb6..e317b878 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -8,25 +8,18 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } - def detail - end + attr_reader :detail - sig do - params(_: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol) - end - def detail=(_) - end + sig { params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol).void } + attr_writer :detail # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, # webp. sig { returns(T.nilable(String)) } - def url - end + attr_reader :url - sig { params(_: String).returns(String) } - def url=(_) - end + sig { params(url: String).void } + attr_writer :url sig do params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol, url: String) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index 5b278b0a..664de983 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -7,32 +7,17 @@ module OpenAI class ImageURLDeltaBlock < OpenAI::BaseModel # The index of the content part in the message. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `image_url`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta)) } - def image_url - end + attr_reader :image_url - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash)) - end - def image_url=(_) - end + sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash)).void } + attr_writer :image_url # References an image URL in the content of a message. sig do diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 63ead9a1..bf9c7500 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -7,44 +7,21 @@ module OpenAI class Message < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # If applicable, the ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) that # authored this message. sig { returns(T.nilable(String)) } - def assistant_id - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def assistant_id=(_) - end + attr_accessor :assistant_id # A list of files attached to the message, and the tools they were added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment])) } - def attachments - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment])) - .returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment])) - end - def attachments=(_) - end + attr_accessor :attachments # The Unix timestamp (in seconds) for when the message was completed. sig { returns(T.nilable(Integer)) } - def completed_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def completed_at=(_) - end + attr_accessor :completed_at # The content of the message in array of text and/or images. sig do @@ -59,67 +36,27 @@ module OpenAI ] ) end - def content - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) - ] - ) - end - def content=(_) - end + attr_accessor :content # The Unix timestamp (in seconds) for when the message was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The Unix timestamp (in seconds) for when the message was marked as incomplete. sig { returns(T.nilable(Integer)) } - def incomplete_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def incomplete_at=(_) - end + attr_accessor :incomplete_at # On an incomplete message, details about why the message is incomplete. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails)) } - def incomplete_details - end + attr_reader :incomplete_details sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)) + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)) ) - .returns( - T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)) - ) - end - def incomplete_details=(_) + .void end + attr_writer :incomplete_details # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -128,67 +65,31 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The object type, which is always `thread.message`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The entity that produced the message. One of `user` or `assistant`. sig { returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - end - def role=(_) - end + attr_accessor :role # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) # associated with the creation of this message. Value is `null` when messages are # created manually using the create message or create thread endpoints. sig { returns(T.nilable(String)) } - def run_id - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def run_id=(_) - end + attr_accessor :run_id # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. sig { returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that # this message belongs to. sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -271,12 +172,10 @@ module OpenAI class Attachment < OpenAI::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The tools to add this file to. sig do @@ -291,12 +190,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -304,18 +202,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools sig do params( @@ -366,12 +255,7 @@ module OpenAI class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :file_search) @@ -398,15 +282,7 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason the message is incomplete. sig { returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } - def reason - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - end - def reason=(_) - end + attr_accessor :reason # On an incomplete message, details about why the message is incomplete. sig do diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 350fd7ae..f65aa74e 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -23,37 +23,7 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - end - def content=(_) - end + attr_accessor :content # The role of the entity that is creating the message. Allowed values include: # @@ -62,27 +32,11 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) - end - def role=(_) - end + attr_accessor :role # A list of files attached to the message, and the tools they should be added to. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment])) } - def attachments - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment])) - .returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment])) - end - def attachments=(_) - end + attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -91,12 +45,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -217,12 +166,10 @@ module OpenAI class Attachment < OpenAI::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The tools to add this file to. sig do @@ -237,12 +184,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -250,18 +196,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools sig do params( @@ -312,12 +249,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :file_search) diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi index 74583e5c..dd6b3c5a 100644 --- a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi @@ -9,12 +9,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id sig do params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/models/beta/threads/message_deleted.rbi b/rbi/lib/openai/models/beta/threads/message_deleted.rbi index 9684886b..df1dc2e2 100644 --- a/rbi/lib/openai/models/beta/threads/message_deleted.rbi +++ b/rbi/lib/openai/models/beta/threads/message_deleted.rbi @@ -6,28 +6,13 @@ module OpenAI module Threads class MessageDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :"thread.message.deleted") diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 0e044cb6..22b4e446 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -20,12 +20,11 @@ module OpenAI ) ) end - def content - end + attr_reader :content sig do params( - _: T::Array[ + content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Util::AnyHash, @@ -35,32 +34,16 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) - ] - ) - end - def content=(_) + .void end + attr_writer :content # The entity that produced the message. One of `user` or `assistant`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol)) } - def role - end + attr_reader :role - sig do - params(_: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol) - end - def role=(_) - end + sig { params(role: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol).void } + attr_writer :role # The delta containing the fields that have changed on the Message. sig do diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index e52c4656..5ac1d6f1 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -7,33 +7,18 @@ module OpenAI class MessageDeltaEvent < OpenAI::BaseModel # The identifier of the message, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The delta containing the fields that have changed on the Message. sig { returns(OpenAI::Models::Beta::Threads::MessageDelta) } - def delta - end + attr_reader :delta - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash)) - end - def delta=(_) - end + sig { params(delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash)).void } + attr_writer :delta # The object type, which is always `thread.message.delta`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # Represents a message delta i.e. any changed fields on a message during # streaming. diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index aecbbbaa..be115f45 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -13,56 +13,43 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol).void } + attr_writer :order # Filter messages by the run ID that generated them. sig { returns(T.nilable(String)) } - def run_id - end + attr_reader :run_id - sig { params(_: String).returns(String) } - def run_id=(_) - end + sig { params(run_id: String).void } + attr_writer :run_id sig do params( diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi index ecc15e69..3c2f8ecd 100644 --- a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi @@ -9,12 +9,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id sig do params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index e25f66a7..fde62361 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -9,12 +9,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -23,12 +18,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( diff --git a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi index c2179549..b9350051 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi @@ -6,21 +6,11 @@ module OpenAI module Threads class RefusalContentBlock < OpenAI::BaseModel sig { returns(String) } - def refusal - end - - sig { params(_: String).returns(String) } - def refusal=(_) - end + attr_accessor :refusal # Always `refusal`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The refusal content generated by the assistant. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi index 87dd9957..72b6092e 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi @@ -7,29 +7,17 @@ module OpenAI class RefusalDeltaBlock < OpenAI::BaseModel # The index of the refusal part in the message. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `refusal`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(String)) } - def refusal - end + attr_reader :refusal - sig { params(_: String).returns(String) } - def refusal=(_) - end + sig { params(refusal: String).void } + attr_writer :refusal # The refusal content that is part of a message. sig { params(index: Integer, refusal: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index ce2250e5..f8511f4f 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -10,38 +10,24 @@ module OpenAI # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) # endpoint. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The function definition. sig { returns(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function) } - def function - end + attr_reader :function sig do params( - _: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash) - ) - end - def function=(_) + .void end + attr_writer :function # The type of tool call the output is required for. For now, this is always # `function`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Tool call objects sig do @@ -67,21 +53,11 @@ module OpenAI class Function < OpenAI::BaseModel # The arguments that the model expects you to pass to the function. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The name of the function. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # The function definition. sig { params(arguments: String, name: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index c6efb6d4..5be2cbe4 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -7,124 +7,72 @@ module OpenAI class Run < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # execution of this run. sig { returns(String) } - def assistant_id - end - - sig { params(_: String).returns(String) } - def assistant_id=(_) - end + attr_accessor :assistant_id # The Unix timestamp (in seconds) for when the run was cancelled. sig { returns(T.nilable(Integer)) } - def cancelled_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def cancelled_at=(_) - end + attr_accessor :cancelled_at # The Unix timestamp (in seconds) for when the run was completed. sig { returns(T.nilable(Integer)) } - def completed_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def completed_at=(_) - end + attr_accessor :completed_at # The Unix timestamp (in seconds) for when the run was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The Unix timestamp (in seconds) for when the run will expire. sig { returns(T.nilable(Integer)) } - def expires_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def expires_at=(_) - end + attr_accessor :expires_at # The Unix timestamp (in seconds) for when the run failed. sig { returns(T.nilable(Integer)) } - def failed_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def failed_at=(_) - end + attr_accessor :failed_at # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails)) } - def incomplete_details - end + attr_reader :incomplete_details sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash))) - end - def incomplete_details=(_) + params( + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :incomplete_details # The instructions that the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. sig { returns(String) } - def instructions - end - - sig { params(_: String).returns(String) } - def instructions=(_) - end + attr_accessor :instructions # The last error associated with this run. Will be `null` if there are no errors. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::LastError)) } - def last_error - end + attr_reader :last_error sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash))) - end - def last_error=(_) + params(last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash))) + .void end + attr_writer :last_error # The maximum number of completion tokens specified to have been used over the # course of the run. sig { returns(T.nilable(Integer)) } - def max_completion_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_completion_tokens=(_) - end + attr_accessor :max_completion_tokens # The maximum number of prompt tokens specified to have been used over the course # of the run. sig { returns(T.nilable(Integer)) } - def max_prompt_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_prompt_tokens=(_) - end + attr_accessor :max_prompt_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -133,56 +81,36 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The model that the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The object type, which is always `thread.run`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. sig { returns(T::Boolean) } - def parallel_tool_calls - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def parallel_tool_calls=(_) - end + attr_accessor :parallel_tool_calls # Details on the action required to continue the run. Will be `null` if no action # is required. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction)) } - def required_action - end + attr_reader :required_action sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash))) - end - def required_action=(_) + params( + required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :required_action # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -216,66 +144,22 @@ module OpenAI ) ) end - def response_format - end - - sig do - params( - _: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - .returns( - T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - end - def response_format=(_) - end + attr_accessor :response_format # The Unix timestamp (in seconds) for when the run was started. sig { returns(T.nilable(Integer)) } - def started_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def started_at=(_) - end + attr_accessor :started_at # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. sig { returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) # that was executed on as a part of this run. sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value @@ -294,29 +178,7 @@ module OpenAI ) ) end - def tool_choice - end - - sig do - params( - _: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - .returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - end - def tool_choice=(_) - end + attr_accessor :tool_choice # The list of tools that the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for @@ -332,75 +194,36 @@ module OpenAI ] ) end - def tools - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - end - def tools=(_) - end + attr_accessor :tools # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy)) } - def truncation_strategy - end + attr_reader :truncation_strategy sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash))) - end - def truncation_strategy=(_) + params( + truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :truncation_strategy # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) } - def usage - end + attr_reader :usage - sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash))) - end - def usage=(_) - end + sig { params(usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash))).void } + attr_writer :usage # The sampling temperature used for this run. If not set, defaults to 1. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # The nucleus sampling value used for this run. If not set, defaults to 1. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -547,15 +370,10 @@ module OpenAI # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } - def reason - end + attr_reader :reason - sig do - params(_: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) - end - def reason=(_) - end + sig { params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol).void } + attr_writer :reason # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. @@ -596,24 +414,11 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. sig { returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } - def code - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - end - def code=(_) - end + attr_accessor :code # A human-readable description of the error. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # The last error associated with this run. Will be `null` if there are no errors. sig do @@ -655,28 +460,19 @@ module OpenAI class RequiredAction < OpenAI::BaseModel # Details on the tool outputs needed for this run to continue. sig { returns(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs) } - def submit_tool_outputs - end + attr_reader :submit_tool_outputs sig do params( - _: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash) + submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash) - ) - end - def submit_tool_outputs=(_) + .void end + attr_writer :submit_tool_outputs # For now, this is always `submit_tool_outputs`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Details on the action required to continue the run. Will be `null` if no action # is required. @@ -702,15 +498,7 @@ module OpenAI class SubmitToolOutputs < OpenAI::BaseModel # A list of the relevant tool calls. sig { returns(T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) } - def tool_calls - end - - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) - .returns(T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) - end - def tool_calls=(_) - end + attr_accessor :tool_calls # Details on the tool outputs needed for this run to continue. sig do @@ -734,25 +522,12 @@ module OpenAI # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) - end - def type=(_) - end + attr_accessor :type # The number of most recent messages from the thread when constructing the context # for the run. sig { returns(T.nilable(Integer)) } - def last_messages - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def last_messages=(_) - end + attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. @@ -805,30 +580,15 @@ module OpenAI class Usage < OpenAI::BaseModel # Number of completion tokens used over the course of the run. sig { returns(Integer) } - def completion_tokens - end - - sig { params(_: Integer).returns(Integer) } - def completion_tokens=(_) - end + attr_accessor :completion_tokens # Number of prompt tokens used over the course of the run. sig { returns(Integer) } - def prompt_tokens - end - - sig { params(_: Integer).returns(Integer) } - def prompt_tokens=(_) - end + attr_accessor :prompt_tokens # Total number of tokens used (prompt + completion). sig { returns(Integer) } - def total_tokens - end - - sig { params(_: Integer).returns(Integer) } - def total_tokens=(_) - end + attr_accessor :total_tokens # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi index fd833391..d7bb357e 100644 --- a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi @@ -9,12 +9,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id sig do params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 49b45721..096ace11 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -12,12 +12,7 @@ module OpenAI # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. sig { returns(String) } - def assistant_id - end - - sig { params(_: String).returns(String) } - def assistant_id=(_) - end + attr_accessor :assistant_id # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` @@ -27,49 +22,26 @@ module OpenAI # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } - def include - end + attr_reader :include - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - end - def include=(_) - end + sig { params(include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]).void } + attr_writer :include # Appends additional instructions at the end of the instructions for the run. This # is useful for modifying the behavior on a per-run basis without overriding other # instructions. sig { returns(T.nilable(String)) } - def additional_instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def additional_instructions=(_) - end + attr_accessor :additional_instructions # Adds additional messages to the thread before creating the run. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage])) } - def additional_messages - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage])) - .returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage])) - end - def additional_messages=(_) - end + attr_accessor :additional_messages # Overrides the # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) # of the assistant. This is useful for modifying the behavior on a per-run basis. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # The maximum number of completion tokens that may be used over the course of the # run. The run will make a best effort to use only the number of completion tokens @@ -77,12 +49,7 @@ module OpenAI # completion tokens specified, the run will end with status `incomplete`. See # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } - def max_completion_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_completion_tokens=(_) - end + attr_accessor :max_completion_tokens # The maximum number of prompt tokens that may be used over the course of the run. # The run will make a best effort to use only the number of prompt tokens @@ -90,12 +57,7 @@ module OpenAI # prompt tokens specified, the run will end with status `incomplete`. See # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } - def max_prompt_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_prompt_tokens=(_) - end + attr_accessor :max_prompt_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -104,38 +66,23 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } - def model - end - - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) - end - def model=(_) - end + attr_accessor :model # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. sig { returns(T.nilable(T::Boolean)) } - def parallel_tool_calls - end + attr_reader :parallel_tool_calls - sig { params(_: T::Boolean).returns(T::Boolean) } - def parallel_tool_calls=(_) - end + sig { params(parallel_tool_calls: T::Boolean).void } + attr_writer :parallel_tool_calls # **o-series models only** # @@ -144,15 +91,7 @@ module OpenAI # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } - def reasoning_effort - end - - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end - def reasoning_effort=(_) - end + attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -186,44 +125,13 @@ module OpenAI ) ) end - def response_format - end - - sig do - params( - _: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - .returns( - T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ) - ) - end - def response_format=(_) - end + attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value @@ -242,29 +150,7 @@ module OpenAI ) ) end - def tool_choice - end - - sig do - params( - _: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - .returns( - T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - ) - ) - end - def tool_choice=(_) - end + attr_accessor :tool_choice # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. @@ -281,35 +167,7 @@ module OpenAI ) ) end - def tools - end - - sig do - params( - _: T.nilable( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - ) - .returns( - T.nilable( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - ) - end - def tools=(_) - end + attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -317,33 +175,22 @@ module OpenAI # # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy)) } - def truncation_strategy - end + attr_reader :truncation_strategy sig do params( - _: T.nilable( + truncation_strategy: T.nilable( T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) ) ) - .returns( - T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) - ) - ) - end - def truncation_strategy=(_) + .void end + attr_writer :truncation_strategy sig do params( @@ -481,37 +328,7 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - ) - end - def content=(_) - end + attr_accessor :content # The role of the entity that is creating the message. Allowed values include: # @@ -520,15 +337,7 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) - end - def role=(_) - end + attr_accessor :role # A list of files attached to the message, and the tools they should be added to. sig do @@ -536,19 +345,7 @@ module OpenAI T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]) ) end - def attachments - end - - sig do - params( - _: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]) - ) - .returns( - T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]) - ) - end - def attachments=(_) - end + attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -557,12 +354,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( @@ -692,12 +484,10 @@ module OpenAI class Attachment < OpenAI::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The tools to add this file to. sig do @@ -712,12 +502,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Util::AnyHash, @@ -725,18 +514,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools sig do params( @@ -787,12 +567,7 @@ module OpenAI class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :file_search) @@ -839,25 +614,12 @@ module OpenAI # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # The number of most recent messages from the thread when constructing the context # for the run. sig { returns(T.nilable(Integer)) } - def last_messages - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def last_messages=(_) - end + attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 8021be63..54ec8f4c 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -13,47 +13,36 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi index 90546b16..56e89445 100644 --- a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi @@ -9,12 +9,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id sig do params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index aad29cff..48f5b347 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -9,24 +9,11 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # A list of tools for which the outputs are being submitted. sig { returns(T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput]) } - def tool_outputs - end - - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput]) - .returns(T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput]) - end - def tool_outputs=(_) - end + attr_accessor :tool_outputs sig do params( @@ -55,22 +42,18 @@ module OpenAI class ToolOutput < OpenAI::BaseModel # The output of the tool call to be submitted to continue the run. sig { returns(T.nilable(String)) } - def output - end + attr_reader :output - sig { params(_: String).returns(String) } - def output=(_) - end + sig { params(output: String).void } + attr_writer :output # The ID of the tool call in the `required_action` object within the run object # the output is being submitted for. sig { returns(T.nilable(String)) } - def tool_call_id - end + attr_reader :tool_call_id - sig { params(_: String).returns(String) } - def tool_call_id=(_) - end + sig { params(tool_call_id: String).void } + attr_writer :tool_call_id sig { params(output: String, tool_call_id: String).returns(T.attached_class) } def self.new(output: nil, tool_call_id: nil) diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 41fc9c62..65bfc898 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -9,12 +9,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -23,12 +18,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi index a6c26d19..76b660b5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -8,30 +8,18 @@ module OpenAI class CodeInterpreterLogs < OpenAI::BaseModel # The index of the output in the outputs array. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `logs`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The text output from the Code Interpreter tool call. sig { returns(T.nilable(String)) } - def logs - end + attr_reader :logs - sig { params(_: String).returns(String) } - def logs=(_) - end + sig { params(logs: String).void } + attr_writer :logs # Text output from the Code Interpreter tool call as part of a run step. sig { params(index: Integer, logs: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 97783911..3d0e6331 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -8,36 +8,22 @@ module OpenAI class CodeInterpreterOutputImage < OpenAI::BaseModel # The index of the output in the outputs array. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `image`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image)) } - def image - end + attr_reader :image sig do params( - _: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash) + image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash) - ) - end - def image=(_) + .void end + attr_writer :image sig do params( @@ -63,12 +49,10 @@ module OpenAI # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id sig { params(file_id: String).returns(T.attached_class) } def self.new(file_id: nil) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 7dcb0cba..8d3d6522 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -8,44 +8,27 @@ module OpenAI class CodeInterpreterToolCall < OpenAI::BaseModel # The ID of the tool call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Code Interpreter tool call definition. sig { returns(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any( + code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Util::AnyHash - ) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter # The type of tool call. This is always going to be `code_interpreter` for this # type of tool call. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Details of the Code Interpreter tool call the run step was involved in. sig do @@ -78,12 +61,7 @@ module OpenAI class CodeInterpreter < OpenAI::BaseModel # The input to the Code Interpreter tool call. sig { returns(String) } - def input - end - - sig { params(_: String).returns(String) } - def input=(_) - end + attr_accessor :input # The outputs from the Code Interpreter tool call. Code Interpreter can output one # or more items, including text (`logs`) or images (`image`). Each of these are @@ -98,29 +76,7 @@ module OpenAI ] ) end - def outputs - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - ] - ) - end - def outputs=(_) - end + attr_accessor :outputs # The Code Interpreter tool call definition. sig do @@ -173,21 +129,11 @@ module OpenAI class Logs < OpenAI::BaseModel # The text output from the Code Interpreter tool call. sig { returns(String) } - def logs - end - - sig { params(_: String).returns(String) } - def logs=(_) - end + attr_accessor :logs # Always `logs`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Text output from the Code Interpreter tool call as part of a run step. sig { params(logs: String, type: Symbol).returns(T.attached_class) } @@ -205,34 +151,22 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image ) end - def image - end + attr_reader :image sig do params( - _: T.any( + image: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Util::AnyHash - ) - ) - end - def image=(_) + .void end + attr_writer :image # Always `image`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -263,12 +197,7 @@ module OpenAI # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } def self.new(file_id:) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 244ab852..f8b5b1e9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -8,53 +8,34 @@ module OpenAI class CodeInterpreterToolCallDelta < OpenAI::BaseModel # The index of the tool call in the tool calls array. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of tool call. This is always going to be `code_interpreter` for this # type of tool call. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The ID of the tool call. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id # The Code Interpreter tool call definition. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter)) } - def code_interpreter - end + attr_reader :code_interpreter sig do params( - _: T.any( + code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Util::AnyHash - ) - ) - end - def code_interpreter=(_) + .void end + attr_writer :code_interpreter # Details of the Code Interpreter tool call the run step was involved in. sig do @@ -89,12 +70,10 @@ module OpenAI class CodeInterpreter < OpenAI::BaseModel # The input to the Code Interpreter tool call. sig { returns(T.nilable(String)) } - def input - end + attr_reader :input - sig { params(_: String).returns(String) } - def input=(_) - end + sig { params(input: String).void } + attr_writer :input # The outputs from the Code Interpreter tool call. Code Interpreter can output one # or more items, including text (`logs`) or images (`image`). Each of these are @@ -111,12 +90,11 @@ module OpenAI ) ) end - def outputs - end + attr_reader :outputs sig do params( - _: T::Array[ + outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Util::AnyHash, @@ -124,18 +102,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - ] - ) - end - def outputs=(_) + .void end + attr_writer :outputs # The Code Interpreter tool call definition. sig do diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index ae88c21f..4b92c053 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -8,36 +8,24 @@ module OpenAI class FileSearchToolCall < OpenAI::BaseModel # The ID of the tool call object. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # For now, this is always going to be an empty object. sig { returns(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch) } - def file_search - end + attr_reader :file_search sig do params( - _: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash)) - end - def file_search=(_) + .void end + attr_writer :file_search # The type of tool call. This is always going to be `file_search` for this type of # tool call. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -62,41 +50,30 @@ module OpenAI class FileSearch < OpenAI::BaseModel # The ranking options for the file search. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions)) } - def ranking_options - end + attr_reader :ranking_options sig do params( - _: T.any( + ranking_options: T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Util::AnyHash - ) - ) - end - def ranking_options=(_) + .void end + attr_writer :ranking_options # The results of the file search. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result])) } - def results - end + attr_reader :results sig do params( - _: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + results: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] ) - .returns( - T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] - ) - end - def results=(_) + .void end + attr_writer :results # For now, this is always going to be an empty object. sig do @@ -132,29 +109,12 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) end - def ranker - end - - sig do - params( - _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - .returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end - def ranker=(_) - end + attr_accessor :ranker # The score threshold for the file search. All values must be a floating point # number between 0 and 1. sig { returns(Float) } - def score_threshold - end - - sig { params(_: Float).returns(Float) } - def score_threshold=(_) - end + attr_accessor :score_threshold # The ranking options for the file search. sig do @@ -221,31 +181,16 @@ module OpenAI class Result < OpenAI::BaseModel # The ID of the file that result was found in. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The name of the file that result was found in. sig { returns(String) } - def file_name - end - - sig { params(_: String).returns(String) } - def file_name=(_) - end + attr_accessor :file_name # The score of the result. All values must be a floating point number between 0 # and 1. sig { returns(Float) } - def score - end - - sig { params(_: Float).returns(Float) } - def score=(_) - end + attr_accessor :score # The content of the result that was found. The content is only included if # requested via the include query parameter. @@ -254,29 +199,20 @@ module OpenAI T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) ) end - def content - end + attr_reader :content sig do params( - _: T::Array[ + content: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, OpenAI::Util::AnyHash ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Util::AnyHash - ) - ] - ) - end - def content=(_) + .void end + attr_writer :content # A result instance of the file search. sig do @@ -313,12 +249,10 @@ module OpenAI class Content < OpenAI::BaseModel # The text content of the file. sig { returns(T.nilable(String)) } - def text - end + attr_reader :text - sig { params(_: String).returns(String) } - def text=(_) - end + sig { params(text: String).void } + attr_writer :text # The type of the content. sig do @@ -328,19 +262,15 @@ module OpenAI ) ) end - def type - end + attr_reader :type sig do params( - _: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol ) - .returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol - ) - end - def type=(_) + .void end + attr_writer :type sig do params( diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 4986a86f..8dda0ce6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -8,40 +8,23 @@ module OpenAI class FileSearchToolCallDelta < OpenAI::BaseModel # For now, this is always going to be an empty object. sig { returns(T.anything) } - def file_search - end - - sig { params(_: T.anything).returns(T.anything) } - def file_search=(_) - end + attr_accessor :file_search # The index of the tool call in the tool calls array. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of tool call. This is always going to be `file_search` for this type of # tool call. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The ID of the tool call object. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id sig do params( diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 4c691bf9..3e337e83 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -8,34 +8,24 @@ module OpenAI class FunctionToolCall < OpenAI::BaseModel # The ID of the tool call object. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The definition of the function that was called. sig { returns(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function) } - def function - end + attr_reader :function sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash)) - end - def function=(_) + params( + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :function # The type of tool call. This is always going to be `function` for this type of # tool call. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -60,32 +50,17 @@ module OpenAI class Function < OpenAI::BaseModel # The arguments passed to the function. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The name of the function. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # The output of the function. This will be `null` if the outputs have not been # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) # yet. sig { returns(T.nilable(String)) } - def output - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def output=(_) - end + attr_accessor :output # The definition of the function that was called. sig do diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index 8819f4f0..04b5a69e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -8,47 +8,31 @@ module OpenAI class FunctionToolCallDelta < OpenAI::BaseModel # The index of the tool call in the tool calls array. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of tool call. This is always going to be `function` for this type of # tool call. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The ID of the tool call object. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id # The definition of the function that was called. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function)) } - def function - end + attr_reader :function sig do params( - _: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash) - ) - end - def function=(_) + .void end + attr_writer :function sig do params( @@ -79,32 +63,23 @@ module OpenAI class Function < OpenAI::BaseModel # The arguments passed to the function. sig { returns(T.nilable(String)) } - def arguments - end + attr_reader :arguments - sig { params(_: String).returns(String) } - def arguments=(_) - end + sig { params(arguments: String).void } + attr_writer :arguments # The name of the function. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # The output of the function. This will be `null` if the outputs have not been # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) # yet. sig { returns(T.nilable(String)) } - def output - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def output=(_) - end + attr_accessor :output # The definition of the function that was called. sig do diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index 6c36a326..8a929854 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -7,34 +7,22 @@ module OpenAI module Runs class MessageCreationStepDetails < OpenAI::BaseModel sig { returns(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation) } - def message_creation - end + attr_reader :message_creation sig do params( - _: T.any( + message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Util::AnyHash - ) - ) - end - def message_creation=(_) + .void end + attr_writer :message_creation # Always `message_creation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Details of the message creation by the run step. sig do @@ -65,12 +53,7 @@ module OpenAI class MessageCreation < OpenAI::BaseModel # The ID of the message that was created by this run step. sig { returns(String) } - def message_id - end - - sig { params(_: String).returns(String) } - def message_id=(_) - end + attr_accessor :message_id sig { params(message_id: String).returns(T.attached_class) } def self.new(message_id:) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index c2d4d482..03af7f5d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -8,84 +8,47 @@ module OpenAI class RunStep < OpenAI::BaseModel # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) # associated with the run step. sig { returns(String) } - def assistant_id - end - - sig { params(_: String).returns(String) } - def assistant_id=(_) - end + attr_accessor :assistant_id # The Unix timestamp (in seconds) for when the run step was cancelled. sig { returns(T.nilable(Integer)) } - def cancelled_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def cancelled_at=(_) - end + attr_accessor :cancelled_at # The Unix timestamp (in seconds) for when the run step completed. sig { returns(T.nilable(Integer)) } - def completed_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def completed_at=(_) - end + attr_accessor :completed_at # The Unix timestamp (in seconds) for when the run step was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The Unix timestamp (in seconds) for when the run step expired. A step is # considered expired if the parent run is expired. sig { returns(T.nilable(Integer)) } - def expired_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def expired_at=(_) - end + attr_accessor :expired_at # The Unix timestamp (in seconds) for when the run step failed. sig { returns(T.nilable(Integer)) } - def failed_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def failed_at=(_) - end + attr_accessor :failed_at # The last error associated with this run step. Will be `null` if there are no # errors. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError)) } - def last_error - end + attr_reader :last_error sig do params( - _: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)) + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)) ) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash))) - end - def last_error=(_) + .void end + attr_writer :last_error # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -94,44 +57,21 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The object type, which is always `thread.run.step`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that # this run step is a part of. sig { returns(String) } - def run_id - end - - sig { params(_: String).returns(String) } - def run_id=(_) - end + attr_accessor :run_id # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The details of the run step. sig do @@ -142,60 +82,29 @@ module OpenAI ) ) end - def step_details - end - - sig do - params( - _: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - ) - ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - ) - ) - end - def step_details=(_) - end + attr_accessor :step_details # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) # that was run. sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # The type of run step, which can be either `message_creation` or `tool_calls`. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - end - def type=(_) - end + attr_accessor :type # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage)) } - def usage - end + attr_reader :usage sig do - params(_: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash))) - end - def usage=(_) + params( + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :usage # Represents a step in execution of a run. sig do @@ -275,24 +184,11 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } - def code - end - - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) - end - def code=(_) - end + attr_accessor :code # A human-readable description of the error. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # The last error associated with this run step. Will be `null` if there are no # errors. @@ -404,30 +300,15 @@ module OpenAI class Usage < OpenAI::BaseModel # Number of completion tokens used over the course of the run step. sig { returns(Integer) } - def completion_tokens - end - - sig { params(_: Integer).returns(Integer) } - def completion_tokens=(_) - end + attr_accessor :completion_tokens # Number of prompt tokens used over the course of the run step. sig { returns(Integer) } - def prompt_tokens - end - - sig { params(_: Integer).returns(Integer) } - def prompt_tokens=(_) - end + attr_accessor :prompt_tokens # Total number of tokens used (prompt + completion). sig { returns(Integer) } - def total_tokens - end - - sig { params(_: Integer).returns(Integer) } - def total_tokens=(_) - end + attr_accessor :total_tokens # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index c291b78b..ecd4fba6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -17,27 +17,19 @@ module OpenAI ) ) end - def step_details - end + attr_reader :step_details sig do params( - _: T.any( + step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - ) - ) - end - def step_details=(_) + .void end + attr_writer :step_details # The delta containing the fields that have changed on the run step. sig do diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index bd418312..08c7adf8 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -8,33 +8,18 @@ module OpenAI class RunStepDeltaEvent < OpenAI::BaseModel # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The delta containing the fields that have changed on the run step. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDelta) } - def delta - end + attr_reader :delta - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash)) - end - def delta=(_) - end + sig { params(delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash)).void } + attr_writer :delta # The object type, which is always `thread.run.step.delta`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # Represents a run step delta i.e. any changed fields on a run step during # streaming. diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index 3df236cc..a13b01a2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -8,33 +8,21 @@ module OpenAI class RunStepDeltaMessageDelta < OpenAI::BaseModel # Always `message_creation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation)) } - def message_creation - end + attr_reader :message_creation sig do params( - _: T.any( + message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Util::AnyHash - ) - ) - end - def message_creation=(_) + .void end + attr_writer :message_creation # Details of the message creation by the run step. sig do @@ -65,12 +53,10 @@ module OpenAI class MessageCreation < OpenAI::BaseModel # The ID of the message that was created by this run step. sig { returns(T.nilable(String)) } - def message_id - end + attr_reader :message_id - sig { params(_: String).returns(String) } - def message_id=(_) - end + sig { params(message_id: String).void } + attr_writer :message_id sig { params(message_id: String).returns(T.attached_class) } def self.new(message_id: nil) diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index fefcb8ac..be9ab4d9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -10,36 +10,27 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` @@ -49,38 +40,26 @@ module OpenAI # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } - def include - end + attr_reader :include - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - end - def include=(_) - end + sig { params(include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]).void } + attr_writer :include # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) - .returns(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 42271a87..c65e2b00 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -10,20 +10,10 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def thread_id - end - - sig { params(_: String).returns(String) } - def thread_id=(_) - end + attr_accessor :thread_id sig { returns(String) } - def run_id - end - - sig { params(_: String).returns(String) } - def run_id=(_) - end + attr_accessor :run_id # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` @@ -33,15 +23,10 @@ module OpenAI # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } - def include - end + attr_reader :include - sig do - params(_: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - .returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]) - end - def include=(_) - end + sig { params(include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]).void } + attr_writer :include sig do params( diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 17d54160..c4833e7c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -8,12 +8,7 @@ module OpenAI class ToolCallDeltaObject < OpenAI::BaseModel # Always `tool_calls`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or @@ -31,12 +26,11 @@ module OpenAI ) ) end - def tool_calls - end + attr_reader :tool_calls sig do params( - _: T::Array[ + tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Util::AnyHash, @@ -45,19 +39,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) - ] - ) - end - def tool_calls=(_) + .void end + attr_writer :tool_calls # Details of the tool call. sig do diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index d244c946..0dade5cb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -20,40 +20,11 @@ module OpenAI ] ) end - def tool_calls - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) - ] - ) - end - def tool_calls=(_) - end + attr_accessor :tool_calls # Always `tool_calls`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Details of the tool call. sig do diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index eda70b49..9dd44292 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -15,38 +15,11 @@ module OpenAI ] ) end - def annotations - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) - ] - ) - end - def annotations=(_) - end + attr_accessor :annotations # The data that makes up the text. sig { returns(String) } - def value - end - - sig { params(_: String).returns(String) } - def value=(_) - end + attr_accessor :value sig do params( diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 2ba0d542..18c16ea3 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -6,24 +6,14 @@ module OpenAI module Threads class TextContentBlock < OpenAI::BaseModel sig { returns(OpenAI::Models::Beta::Threads::Text) } - def text - end + attr_reader :text - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash)) - end - def text=(_) - end + sig { params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash)).void } + attr_writer :text # Always `text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The text content that is part of a message. sig do diff --git a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi index 58764a6f..f59cbbd5 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi @@ -7,21 +7,11 @@ module OpenAI class TextContentBlockParam < OpenAI::BaseModel # Text content to be sent to the model sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Always `text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The text content that is part of a message. sig { params(text: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index d7051ba2..2f225af3 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -17,12 +17,11 @@ module OpenAI ) ) end - def annotations - end + attr_reader :annotations sig do params( - _: T::Array[ + annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Util::AnyHash, @@ -30,27 +29,16 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Util::AnyHash, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) - ] - ) - end - def annotations=(_) + .void end + attr_writer :annotations # The data that makes up the text. sig { returns(T.nilable(String)) } - def value - end + attr_reader :value - sig { params(_: String).returns(String) } - def value=(_) - end + sig { params(value: String).void } + attr_writer :value sig do params( diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index b694fadd..0e37546f 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -7,32 +7,17 @@ module OpenAI class TextDeltaBlock < OpenAI::BaseModel # The index of the content part in the message. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Always `text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { returns(T.nilable(OpenAI::Models::Beta::Threads::TextDelta)) } - def text - end + attr_reader :text - sig do - params(_: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash)) - end - def text=(_) - end + sig { params(text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash)).void } + attr_writer :text # The text content that is part of a message. sig do diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 45ff9cca..f48fc10a 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -6,88 +6,45 @@ module OpenAI class ChatCompletion < OpenAI::BaseModel # A unique identifier for the chat completion. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # A list of chat completion choices. Can be more than one if `n` is greater # than 1. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice]) } - def choices - end - - sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice]) - end - def choices=(_) - end + attr_accessor :choices # The Unix timestamp (in seconds) of when the chat completion was created. sig { returns(Integer) } - def created - end - - sig { params(_: Integer).returns(Integer) } - def created=(_) - end + attr_accessor :created # The model used for the chat completion. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The object type, which is always `chat.completion`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The service tier used for processing the request. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) } - def service_tier - end - - sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) - end - def service_tier=(_) - end + attr_accessor :service_tier # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } - def system_fingerprint - end + attr_reader :system_fingerprint - sig { params(_: String).returns(String) } - def system_fingerprint=(_) - end + sig { params(system_fingerprint: String).void } + attr_writer :system_fingerprint # Usage statistics for the completion request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } - def usage - end + attr_reader :usage - sig do - params(_: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) - end - def usage=(_) - end + sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)).void } + attr_writer :usage # Represents a chat completion response returned by model, based on the provided # input. @@ -142,48 +99,30 @@ module OpenAI # model called a tool, or `function_call` (deprecated) if the model called a # function. sig { returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } - def finish_reason - end - - sig do - params(_: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - .returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - end - def finish_reason=(_) - end + attr_accessor :finish_reason # The index of the choice in the list of choices. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Log probability information for the choice. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs)) } - def logprobs - end + attr_reader :logprobs sig do - params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash))) - end - def logprobs=(_) + params( + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :logprobs # A chat completion message generated by the model. sig { returns(OpenAI::Models::Chat::ChatCompletionMessage) } - def message - end + attr_reader :message - sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash)) - end - def message=(_) - end + sig { params(message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash)).void } + attr_writer :message sig do params( @@ -242,27 +181,11 @@ module OpenAI class Logprobs < OpenAI::BaseModel # A list of message content tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } - def content - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - .returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - end - def content=(_) - end + attr_accessor :content # A list of message refusal tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } - def refusal - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - .returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - end - def refusal=(_) - end + attr_accessor :refusal # Log probability information for the choice. sig do diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 260791cc..c36302c3 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -6,29 +6,20 @@ module OpenAI class ChatCompletionAssistantMessageParam < OpenAI::BaseModel # The role of the messages author, in this case `assistant`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio)) } - def audio - end + attr_reader :audio sig do params( - _: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)) + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)) ) - .returns( - T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)) - ) - end - def audio=(_) + .void end + attr_writer :audio # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. @@ -47,91 +38,46 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ) - ) - ) - .returns( - T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ) - ) - ) - end - def content=(_) - end + attr_accessor :content # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall)) } - def function_call - end + attr_reader :function_call sig do params( - _: T.nilable( + function_call: T.nilable( T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) ) ) - .returns( - T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) - ) - ) - end - def function_call=(_) + .void end + attr_writer :function_call # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # The refusal message by the assistant. sig { returns(T.nilable(String)) } - def refusal - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def refusal=(_) - end + attr_accessor :refusal # The tool calls generated by the model, such as function calls. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall])) } - def tool_calls - end + attr_reader :tool_calls sig do - params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) - end - def tool_calls=(_) + params( + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)] + ) + .void end + attr_writer :tool_calls # Messages sent by the model in response to user messages. sig do @@ -192,12 +138,7 @@ module OpenAI class Audio < OpenAI::BaseModel # Unique identifier for a previous audio response from the model. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). @@ -289,21 +230,11 @@ module OpenAI # hallucinate parameters not defined by your function schema. Validate the # arguments in your code before calling your function. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index 5b7b1eae..7cd9cc72 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -6,41 +6,21 @@ module OpenAI class ChatCompletionAudio < OpenAI::BaseModel # Unique identifier for this audio response. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # Base64 encoded audio bytes generated by the model, in the format specified in # the request. sig { returns(String) } - def data - end - - sig { params(_: String).returns(String) } - def data=(_) - end + attr_accessor :data # The Unix timestamp (in seconds) for when this audio response will no longer be # accessible on the server for use in multi-turn conversations. sig { returns(Integer) } - def expires_at - end - - sig { params(_: Integer).returns(Integer) } - def expires_at=(_) - end + attr_accessor :expires_at # Transcript of the audio generated by the model. sig { returns(String) } - def transcript - end - - sig { params(_: String).returns(String) } - def transcript=(_) - end + attr_accessor :transcript # If the audio output modality is requested, this object contains data about the # audio response from the model. diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index cd667fba..3555a183 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -7,28 +7,12 @@ module OpenAI # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } - def format_ - end - - sig do - params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) - end - def format_=(_) - end + attr_accessor :format_ # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) } - def voice - end - - sig do - params(_: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - end - def voice=(_) - end + attr_accessor :voice # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index e64d0cbc..9564a77e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -6,77 +6,39 @@ module OpenAI class ChatCompletionChunk < OpenAI::BaseModel # A unique identifier for the chat completion. Each chunk has the same ID. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # A list of chat completion choices. Can contain more than one elements if `n` is # greater than 1. Can also be empty for the last chunk if you set # `stream_options: {"include_usage": true}`. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice]) } - def choices - end - - sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice]) - end - def choices=(_) - end + attr_accessor :choices # The Unix timestamp (in seconds) of when the chat completion was created. Each # chunk has the same timestamp. sig { returns(Integer) } - def created - end - - sig { params(_: Integer).returns(Integer) } - def created=(_) - end + attr_accessor :created # The model to generate the completion. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The object type, which is always `chat.completion.chunk`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The service tier used for processing the request. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) } - def service_tier - end - - sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) - end - def service_tier=(_) - end + attr_accessor :service_tier # This fingerprint represents the backend configuration that the model runs with. # Can be used in conjunction with the `seed` request parameter to understand when # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } - def system_fingerprint - end + attr_reader :system_fingerprint - sig { params(_: String).returns(String) } - def system_fingerprint=(_) - end + sig { params(system_fingerprint: String).void } + attr_writer :system_fingerprint # An optional field that will only be present when you set # `stream_options: {"include_usage": true}` in your request. When present, it @@ -86,15 +48,10 @@ module OpenAI # **NOTE:** If the stream is interrupted or cancelled, you may not receive the # final usage chunk which contains the total token usage for the request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } - def usage - end + attr_reader :usage - sig do - params(_: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash))) - end - def usage=(_) - end + sig { params(usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash))).void } + attr_writer :usage # Represents a streamed chunk of a chat completion response returned by the model, # based on the provided input. @@ -145,15 +102,10 @@ module OpenAI class Choice < OpenAI::BaseModel # A chat completion delta generated by streamed model responses. sig { returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) } - def delta - end + attr_reader :delta - sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash)) - end - def delta=(_) - end + sig { params(delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash)).void } + attr_writer :delta # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -162,40 +114,23 @@ module OpenAI # model called a tool, or `function_call` (deprecated) if the model called a # function. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) } - def finish_reason - end - - sig do - params(_: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) - end - def finish_reason=(_) - end + attr_accessor :finish_reason # The index of the choice in the list of choices. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # Log probability information for the choice. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs)) } - def logprobs - end + attr_reader :logprobs sig do params( - _: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) ) - .returns( - T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) - ) - end - def logprobs=(_) + .void end + attr_writer :logprobs sig do params( @@ -226,65 +161,42 @@ module OpenAI class Delta < OpenAI::BaseModel # The contents of the chunk message. sig { returns(T.nilable(String)) } - def content - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def content=(_) - end + attr_accessor :content # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall)) } - def function_call - end + attr_reader :function_call sig do params( - _: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash) + function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash) - ) - end - def function_call=(_) + .void end + attr_writer :function_call # The refusal message generated by the model. sig { returns(T.nilable(String)) } - def refusal - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def refusal=(_) - end + attr_accessor :refusal # The role of the author of this message. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol)) } - def role - end + attr_reader :role - sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol) - end - def role=(_) - end + sig { params(role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol).void } + attr_writer :role sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall])) } - def tool_calls - end + attr_reader :tool_calls sig do params( - _: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] ) - .returns( - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] - ) - end - def tool_calls=(_) + .void end + attr_writer :tool_calls # A chat completion delta generated by streamed model responses. sig do @@ -321,21 +233,17 @@ module OpenAI # hallucinate parameters not defined by your function schema. Validate the # arguments in your code before calling your function. sig { returns(T.nilable(String)) } - def arguments - end + attr_reader :arguments - sig { params(_: String).returns(String) } - def arguments=(_) - end + sig { params(arguments: String).void } + attr_writer :arguments # The name of the function to call. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. @@ -374,48 +282,32 @@ module OpenAI class ToolCall < OpenAI::BaseModel sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The ID of the tool call. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function)) } - def function - end + attr_reader :function sig do params( - _: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash) - ) - end - def function=(_) + .void end + attr_writer :function # The type of the tool. Currently, only `function` is supported. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol).void } + attr_writer :type sig do params( @@ -449,21 +341,17 @@ module OpenAI # hallucinate parameters not defined by your function schema. Validate the # arguments in your code before calling your function. sig { returns(T.nilable(String)) } - def arguments - end + attr_reader :arguments - sig { params(_: String).returns(String) } - def arguments=(_) - end + sig { params(arguments: String).void } + attr_writer :arguments # The name of the function to call. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments: nil, name: nil) @@ -531,27 +419,11 @@ module OpenAI class Logprobs < OpenAI::BaseModel # A list of message content tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } - def content - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - .returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - end - def content=(_) - end + attr_accessor :content # A list of message refusal tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } - def refusal - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - .returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) - end - def refusal=(_) - end + attr_accessor :refusal # Log probability information for the choice. sig do diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 2392c0a6..ea7cbd9d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -22,24 +22,17 @@ module OpenAI class File < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) } - def file - end + attr_reader :file sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash)) - end - def file=(_) + params(file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash)) + .void end + attr_writer :file # The type of the content part. Always `file`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text # generation. @@ -61,30 +54,24 @@ module OpenAI # The base64 encoded file data, used when passing the file to the model as a # string. sig { returns(T.nilable(String)) } - def file_data - end + attr_reader :file_data - sig { params(_: String).returns(String) } - def file_data=(_) - end + sig { params(file_data: String).void } + attr_writer :file_data # The ID of an uploaded file to use as input. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The name of the file, used when passing the file to the model as a string. sig { returns(T.nilable(String)) } - def filename - end + attr_reader :filename - sig { params(_: String).returns(String) } - def filename=(_) - end + sig { params(filename: String).void } + attr_writer :filename sig { params(file_data: String, file_id: String, filename: String).returns(T.attached_class) } def self.new(file_data: nil, file_id: nil, filename: nil) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index af0de7f9..f856dce5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -5,24 +5,19 @@ module OpenAI module Chat class ChatCompletionContentPartImage < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) } - def image_url - end + attr_reader :image_url sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash)) - end - def image_url=(_) + params( + image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :image_url # The type of the content part. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). sig do @@ -44,25 +39,15 @@ module OpenAI class ImageURL < OpenAI::BaseModel # Either a URL of the image or the base64 encoded image data. sig { returns(String) } - def url - end - - sig { params(_: String).returns(String) } - def url=(_) - end + attr_accessor :url # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol)) } - def detail - end + attr_reader :detail - sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol) - end - def detail=(_) - end + sig { params(detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol).void } + attr_writer :detail sig do params( diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 2abced72..318356fb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -5,28 +5,19 @@ module OpenAI module Chat class ChatCompletionContentPartInputAudio < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) } - def input_audio - end + attr_reader :input_audio sig do params( - _: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash) + input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash) - ) - end - def input_audio=(_) + .void end + attr_writer :input_audio # The type of the content part. Always `input_audio`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). sig do @@ -51,24 +42,11 @@ module OpenAI class InputAudio < OpenAI::BaseModel # Base64 encoded audio data. sig { returns(String) } - def data - end - - sig { params(_: String).returns(String) } - def data=(_) - end + attr_accessor :data # The format of the encoded audio data. Currently supports "wav" and "mp3". sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) } - def format_ - end - - sig do - params(_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) - .returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) - end - def format_=(_) - end + attr_accessor :format_ sig do params( diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi index 1cae47c2..788cfecf 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -6,21 +6,11 @@ module OpenAI class ChatCompletionContentPartRefusal < OpenAI::BaseModel # The refusal message generated by the model. sig { returns(String) } - def refusal - end - - sig { params(_: String).returns(String) } - def refusal=(_) - end + attr_accessor :refusal # The type of the content part. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(refusal: String, type: Symbol).returns(T.attached_class) } def self.new(refusal:, type: :refusal) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index 2fcd20fa..1cb1a46c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -6,21 +6,11 @@ module OpenAI class ChatCompletionContentPartText < OpenAI::BaseModel # The text content. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of the content part. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi index 8ef5a9d5..9affbd35 100644 --- a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi @@ -6,30 +6,15 @@ module OpenAI class ChatCompletionDeleted < OpenAI::BaseModel # The ID of the chat completion that was deleted. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # Whether the chat completion was deleted. sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted # The type of object being deleted. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :"chat.completion.deleted") diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 216c218d..ddab2743 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -6,34 +6,19 @@ module OpenAI class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # The contents of the developer message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } - def content - end - - sig do - params(_: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - .returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - end - def content=(_) - end + attr_accessor :content # The role of the messages author, in this case `developer`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index e1d6c904..11ce4c45 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -6,12 +6,7 @@ module OpenAI class ChatCompletionFunctionCallOption < OpenAI::BaseModel # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # Specifying a particular function via `{"name": "my_function"}` forces the model # to call that function. diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi index 665339f5..43ae6ea4 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi @@ -6,30 +6,15 @@ module OpenAI class ChatCompletionFunctionMessageParam < OpenAI::BaseModel # The contents of the function message. sig { returns(T.nilable(String)) } - def content - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def content=(_) - end + attr_accessor :content # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # The role of the messages author, in this case `function`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role sig { params(content: T.nilable(String), name: String, role: Symbol).returns(T.attached_class) } def self.new(content:, name:, role: :function) diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 12d3f9c5..3d52bbbb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -6,82 +6,62 @@ module OpenAI class ChatCompletionMessage < OpenAI::BaseModel # The contents of the message. sig { returns(T.nilable(String)) } - def content - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def content=(_) - end + attr_accessor :content # The refusal message generated by the model. sig { returns(T.nilable(String)) } - def refusal - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def refusal=(_) - end + attr_accessor :refusal # The role of the author of this message. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation])) } - def annotations - end + attr_reader :annotations sig do - params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)]) - end - def annotations=(_) + params( + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)] + ) + .void end + attr_writer :annotations # If the audio output modality is requested, this object contains data about the # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) } - def audio - end + attr_reader :audio - sig do - params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash))) - end - def audio=(_) - end + sig { params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash))).void } + attr_writer :audio # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall)) } - def function_call - end + attr_reader :function_call sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash)) - end - def function_call=(_) + params( + function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :function_call # The tool calls generated by the model, such as function calls. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall])) } - def tool_calls - end + attr_reader :tool_calls sig do - params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)]) - end - def tool_calls=(_) + params( + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)] + ) + .void end + attr_writer :tool_calls # A chat completion message generated by the model. sig do @@ -119,28 +99,19 @@ module OpenAI class Annotation < OpenAI::BaseModel # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A URL citation when using web search. sig { returns(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation) } - def url_citation - end + attr_reader :url_citation sig do params( - _: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash) + url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash) - ) - end - def url_citation=(_) + .void end + attr_writer :url_citation # A URL citation when using web search. sig do @@ -165,39 +136,19 @@ module OpenAI class URLCitation < OpenAI::BaseModel # The index of the last character of the URL citation in the message. sig { returns(Integer) } - def end_index - end - - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + attr_accessor :end_index # The index of the first character of the URL citation in the message. sig { returns(Integer) } - def start_index - end - - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + attr_accessor :start_index # The title of the web resource. sig { returns(String) } - def title - end - - sig { params(_: String).returns(String) } - def title=(_) - end + attr_accessor :title # The URL of the web resource. sig { returns(String) } - def url - end - - sig { params(_: String).returns(String) } - def url=(_) - end + attr_accessor :url # A URL citation when using web search. sig do @@ -223,21 +174,11 @@ module OpenAI # hallucinate parameters not defined by your function schema. Validate the # arguments in your code before calling your function. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index b4dcff08..851dff7f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -6,33 +6,23 @@ module OpenAI class ChatCompletionMessageToolCall < OpenAI::BaseModel # The ID of the tool call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The function that the model called. sig { returns(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function) } - def function - end + attr_reader :function sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash)) - end - def function=(_) + params( + function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :function # The type of the tool. Currently, only `function` is supported. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params( @@ -60,21 +50,11 @@ module OpenAI # hallucinate parameters not defined by your function schema. Validate the # arguments in your code before calling your function. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # The function that the model called. sig { params(arguments: String, name: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index a03dfbeb..4c4f4e58 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -5,24 +5,19 @@ module OpenAI module Chat class ChatCompletionNamedToolChoice < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) } - def function - end + attr_reader :function sig do - params(_: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash)) - end - def function=(_) + params( + function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :function # The type of the tool. Currently, only `function` is supported. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Specifies a tool the model should use. Use to force the model to call a specific # function. @@ -43,12 +38,7 @@ module OpenAI class Function < OpenAI::BaseModel # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name sig { params(name: String).returns(T.attached_class) } def self.new(name:) diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 472440af..34ebf6a5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -8,25 +8,12 @@ module OpenAI # generated tokens would match this content, the entire model response can be # returned much more quickly. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } - def content - end - - sig do - params(_: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - .returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - end - def content=(_) - end + attr_accessor :content # The type of the predicted content you want to provide. This type is currently # always `content`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Static predicted output content, such as the content of a text file that is # being regenerated. diff --git a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi index 50654ac0..78838f7f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi @@ -6,12 +6,7 @@ module OpenAI class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # The identifier of the chat message. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # A chat completion message generated by the model. sig { params(id: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index fe3e980e..bb0ee0d9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -12,12 +12,10 @@ module OpenAI # **NOTE:** If the stream is interrupted, you may not receive the final usage # chunk which contains the total token usage for the request. sig { returns(T.nilable(T::Boolean)) } - def include_usage - end + attr_reader :include_usage - sig { params(_: T::Boolean).returns(T::Boolean) } - def include_usage=(_) - end + sig { params(include_usage: T::Boolean).void } + attr_writer :include_usage # Options for streaming response. Only set this when you set `stream: true`. sig { params(include_usage: T::Boolean).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 6c3b80e1..d3cf9fc5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -6,34 +6,19 @@ module OpenAI class ChatCompletionSystemMessageParam < OpenAI::BaseModel # The contents of the system message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } - def content - end - - sig do - params(_: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - .returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - end - def content=(_) - end + attr_accessor :content # The role of the messages author, in this case `system`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index ea88fd66..33ce4b9e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -6,49 +6,26 @@ module OpenAI class ChatCompletionTokenLogprob < OpenAI::BaseModel # The token. sig { returns(String) } - def token - end - - sig { params(_: String).returns(String) } - def token=(_) - end + attr_accessor :token # A list of integers representing the UTF-8 bytes representation of the token. # Useful in instances where characters are represented by multiple tokens and # their byte representations must be combined to generate the correct text # representation. Can be `null` if there is no bytes representation for the token. sig { returns(T.nilable(T::Array[Integer])) } - def bytes - end - - sig { params(_: T.nilable(T::Array[Integer])).returns(T.nilable(T::Array[Integer])) } - def bytes=(_) - end + attr_accessor :bytes # The log probability of this token, if it is within the top 20 most likely # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very # unlikely. sig { returns(Float) } - def logprob - end - - sig { params(_: Float).returns(Float) } - def logprob=(_) - end + attr_accessor :logprob # List of the most likely tokens and their log probability, at this token # position. In rare cases, there may be fewer than the number of requested # `top_logprobs` returned. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob]) } - def top_logprobs - end - - sig do - params(_: T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob]) - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob]) - end - def top_logprobs=(_) - end + attr_accessor :top_logprobs sig do params( @@ -79,35 +56,20 @@ module OpenAI class TopLogprob < OpenAI::BaseModel # The token. sig { returns(String) } - def token - end - - sig { params(_: String).returns(String) } - def token=(_) - end + attr_accessor :token # A list of integers representing the UTF-8 bytes representation of the token. # Useful in instances where characters are represented by multiple tokens and # their byte representations must be combined to generate the correct text # representation. Can be `null` if there is no bytes representation for the token. sig { returns(T.nilable(T::Array[Integer])) } - def bytes - end - - sig { params(_: T.nilable(T::Array[Integer])).returns(T.nilable(T::Array[Integer])) } - def bytes=(_) - end + attr_accessor :bytes # The log probability of this token, if it is within the top 20 most likely # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very # unlikely. sig { returns(Float) } - def logprob - end - - sig { params(_: Float).returns(Float) } - def logprob=(_) - end + attr_accessor :logprob sig do params( diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index 8cd4fe09..03b8ecd1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -5,24 +5,14 @@ module OpenAI module Chat class ChatCompletionTool < OpenAI::BaseModel sig { returns(OpenAI::Models::FunctionDefinition) } - def function - end + attr_reader :function - sig do - params(_: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)) - end - def function=(_) - end + sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)).void } + attr_writer :function # The type of the tool. Currently, only `function` is supported. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash), type: Symbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index bd333634..8a16e044 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -6,33 +6,15 @@ module OpenAI class ChatCompletionToolMessageParam < OpenAI::BaseModel # The contents of the tool message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } - def content - end - - sig do - params(_: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - .returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) - end - def content=(_) - end + attr_accessor :content # The role of the messages author, in this case `tool`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # Tool call that this message is responding to. sig { returns(String) } - def tool_call_id - end - - sig { params(_: String).returns(String) } - def tool_call_id=(_) - end + attr_accessor :tool_call_id sig do params( diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index a9e388f6..96a94d0d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -20,58 +20,19 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ) - ) - end - def content=(_) - end + attr_accessor :content # The role of the messages author, in this case `user`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # Messages sent by an end user, containing prompts or additional context # information. diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index cf85872a..cd89b96c 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -27,37 +27,7 @@ module OpenAI ] ) end - def messages - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - ] - ) - end - def messages=(_) - end + attr_accessor :messages # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -65,40 +35,22 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) } - def audio - end + attr_reader :audio - sig do - params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash))) - end - def audio=(_) - end + sig { params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash))).void } + attr_writer :audio # Number between -2.0 and 2.0. Positive values penalize new tokens based on their # existing frequency in the text so far, decreasing the model's likelihood to # repeat the same line verbatim. sig { returns(T.nilable(Float)) } - def frequency_penalty - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def frequency_penalty=(_) - end + attr_accessor :frequency_penalty # Deprecated in favor of `tool_choice`. # @@ -124,41 +76,33 @@ module OpenAI ) ) end - def function_call - end + attr_reader :function_call sig do params( - _: T.any( + function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Util::AnyHash - ) - ) - end - def function_call=(_) + .void end + attr_writer :function_call # Deprecated in favor of `tools`. # # A list of functions the model may generate JSON inputs for. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function])) } - def functions - end + attr_reader :functions sig do - params(_: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)]) - end - def functions=(_) + params( + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)] + ) + .void end + attr_writer :functions # Modify the likelihood of specified tokens appearing in the completion. # @@ -169,34 +113,19 @@ module OpenAI # increase likelihood of selection; values like -100 or 100 should result in a ban # or exclusive selection of the relevant token. sig { returns(T.nilable(T::Hash[Symbol, Integer])) } - def logit_bias - end - - sig { params(_: T.nilable(T::Hash[Symbol, Integer])).returns(T.nilable(T::Hash[Symbol, Integer])) } - def logit_bias=(_) - end + attr_accessor :logit_bias # Whether to return log probabilities of the output tokens or not. If true, # returns the log probabilities of each output token returned in the `content` of # `message`. sig { returns(T.nilable(T::Boolean)) } - def logprobs - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def logprobs=(_) - end + attr_accessor :logprobs # An upper bound for the number of tokens that can be generated for a completion, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } - def max_completion_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_completion_tokens=(_) - end + attr_accessor :max_completion_tokens # The maximum number of [tokens](/tokenizer) that can be generated in the chat # completion. This value can be used to control @@ -206,12 +135,7 @@ module OpenAI # compatible with # [o1 series models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } - def max_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_tokens=(_) - end + attr_accessor :max_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -220,12 +144,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # Output types that you would like the model to generate. Most models are capable # of generating text, which is the default: @@ -238,61 +157,41 @@ module OpenAI # # `["text", "audio"]` sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) } - def modalities - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) - .returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) - end - def modalities=(_) - end + attr_accessor :modalities # How many chat completion choices to generate for each input message. Note that # you will be charged based on the number of generated tokens across all of the # choices. Keep `n` as `1` to minimize costs. sig { returns(T.nilable(Integer)) } - def n - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def n=(_) - end + attr_accessor :n # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. sig { returns(T.nilable(T::Boolean)) } - def parallel_tool_calls - end + attr_reader :parallel_tool_calls - sig { params(_: T::Boolean).returns(T::Boolean) } - def parallel_tool_calls=(_) - end + sig { params(parallel_tool_calls: T::Boolean).void } + attr_writer :parallel_tool_calls # Static predicted output content, such as the content of a text file that is # being regenerated. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent)) } - def prediction - end + attr_reader :prediction sig do - params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash))) - end - def prediction=(_) + params( + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :prediction # Number between -2.0 and 2.0. Positive values penalize new tokens based on # whether they appear in the text so far, increasing the model's likelihood to # talk about new topics. sig { returns(T.nilable(Float)) } - def presence_penalty - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def presence_penalty=(_) - end + attr_accessor :presence_penalty # **o-series models only** # @@ -301,15 +200,7 @@ module OpenAI # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } - def reasoning_effort - end - - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end - def reasoning_effort=(_) - end + attr_accessor :reasoning_effort # An object specifying the format that the model must output. # @@ -332,29 +223,20 @@ module OpenAI ) ) end - def response_format - end + attr_reader :response_format sig do params( - _: T.any( + response_format: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) ) - .returns( - T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ) - ) - end - def response_format=(_) + .void end + attr_writer :response_format # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and @@ -362,12 +244,7 @@ module OpenAI # should refer to the `system_fingerprint` response parameter to monitor changes # in the backend. sig { returns(T.nilable(Integer)) } - def seed - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def seed=(_) - end + attr_accessor :seed # Specifies the latency tier to use for processing the request. This parameter is # relevant for customers subscribed to the scale tier service: @@ -384,69 +261,37 @@ module OpenAI # When this parameter is set, the response body will include the `service_tier` # utilized. sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } - def service_tier - end - - sig do - params(_: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) - .returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) - end - def service_tier=(_) - end + attr_accessor :service_tier # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } - def stop - end - - sig do - params( - _: T.nilable( - T.any( - String, - T::Array[String] - ) - ) - ).returns(T.nilable(T.any(String, T::Array[String]))) - end - def stop=(_) - end + attr_accessor :stop # Whether or not to store the output of this chat completion request for use in # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. sig { returns(T.nilable(T::Boolean)) } - def store - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def store=(_) - end + attr_accessor :store # Options for streaming response. Only set this when you set `stream: true`. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) } - def stream_options - end + attr_reader :stream_options sig do - params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) - end - def stream_options=(_) + params( + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :stream_options # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but # not both. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can @@ -467,52 +312,34 @@ module OpenAI ) ) end - def tool_choice - end + attr_reader :tool_choice sig do params( - _: T.any( + tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Util::AnyHash - ) - ) - end - def tool_choice=(_) + .void end + attr_writer :tool_choice # A list of tools the model may call. Currently, only functions are supported as a # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTool])) } - def tools - end + attr_reader :tools - sig do - params(_: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]) - .returns(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]) - end - def tools=(_) - end + sig { params(tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]).void } + attr_writer :tools # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. # `logprobs` must be set to `true` if this parameter is used. sig { returns(T.nilable(Integer)) } - def top_logprobs - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def top_logprobs=(_) - end + attr_accessor :top_logprobs # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -520,37 +347,30 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions)) } - def web_search_options - end + attr_reader :web_search_options sig do - params(_: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash)) - end - def web_search_options=(_) + params( + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :web_search_options sig do params( @@ -794,22 +614,15 @@ module OpenAI # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # A description of what the function does, used by the model to choose when and # how to call the function. sig { returns(T.nilable(String)) } - def description - end + attr_reader :description - sig { params(_: String).returns(String) } - def description=(_) - end + sig { params(description: String).void } + attr_writer :description # The parameters the functions accepts, described as a JSON Schema object. See the # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, @@ -819,12 +632,10 @@ module OpenAI # # Omitting `parameters` defines a function with an empty parameter list. sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - def parameters - end + attr_reader :parameters - sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } - def parameters=(_) - end + sig { params(parameters: T::Hash[Symbol, T.anything]).void } + attr_writer :parameters sig do params(name: String, description: String, parameters: T::Hash[Symbol, T.anything]) @@ -948,35 +759,29 @@ module OpenAI T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) ) end - def search_context_size - end + attr_reader :search_context_size sig do - params(_: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) - .returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) - end - def search_context_size=(_) + params( + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol + ) + .void end + attr_writer :search_context_size # Approximate location parameters for the search. sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation)) } - def user_location - end + attr_reader :user_location sig do params( - _: T.nilable( + user_location: T.nilable( T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) ) ) - .returns( - T.nilable( - T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) - ) - ) - end - def user_location=(_) + .void end + attr_writer :user_location # This tool searches the web for relevant results to use in a response. Learn more # about the @@ -1051,34 +856,22 @@ module OpenAI class UserLocation < OpenAI::BaseModel # Approximate location parameters for the search. sig { returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate) } - def approximate - end + attr_reader :approximate sig do params( - _: T.any( + approximate: T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Util::AnyHash - ) - ) - end - def approximate=(_) + .void end + attr_writer :approximate # The type of location approximation. Always `approximate`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Approximate location parameters for the search. sig do @@ -1109,41 +902,33 @@ module OpenAI class Approximate < OpenAI::BaseModel # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } - def city - end + attr_reader :city - sig { params(_: String).returns(String) } - def city=(_) - end + sig { params(city: String).void } + attr_writer :city # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. sig { returns(T.nilable(String)) } - def country - end + attr_reader :country - sig { params(_: String).returns(String) } - def country=(_) - end + sig { params(country: String).void } + attr_writer :country # Free text input for the region of the user, e.g. `California`. sig { returns(T.nilable(String)) } - def region - end + attr_reader :region - sig { params(_: String).returns(String) } - def region=(_) - end + sig { params(region: String).void } + attr_writer :region # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } - def timezone - end + attr_reader :timezone - sig { params(_: String).returns(String) } - def timezone=(_) - end + sig { params(timezone: String).void } + attr_writer :timezone # Approximate location parameters for the search. sig do diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index eb6f581d..b4b185ec 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -9,54 +9,38 @@ module OpenAI # Identifier for the last chat completion from the previous pagination request. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # Number of Chat Completions to retrieve. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # A list of metadata keys to filter the Chat Completions by. Example: # # `metadata[key1]=value1&metadata[key2]=value2` sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The model used to generate the Chat Completions. sig { returns(T.nilable(String)) } - def model - end + attr_reader :model - sig { params(_: String).returns(String) } - def model=(_) - end + sig { params(model: String).void } + attr_writer :model # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. sig { returns(T.nilable(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) - .returns(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index 0b68282e..c039b140 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -14,12 +14,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 0a60c940..382d3dd1 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -10,34 +10,25 @@ module OpenAI # Identifier for the last message from the previous pagination request. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # Number of messages to retrieve. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. sig { returns(T.nilable(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) - .returns(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index d9019724..58689eef 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -5,12 +5,7 @@ module OpenAI class ComparisonFilter < OpenAI::BaseModel # The key to compare against the value. sig { returns(String) } - def key - end - - sig { params(_: String).returns(String) } - def key=(_) - end + attr_accessor :key # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -21,25 +16,12 @@ module OpenAI # - `lt`: less than # - `lte`: less than or equal sig { returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::ComparisonFilter::Type::OrSymbol) - .returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # The value to compare against the attribute key; supports string, number, or # boolean types. sig { returns(T.any(String, Float, T::Boolean)) } - def value - end - - sig { params(_: T.any(String, Float, T::Boolean)).returns(T.any(String, Float, T::Boolean)) } - def value=(_) - end + attr_accessor :value # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index e270ee5f..0f9d002f 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -5,72 +5,40 @@ module OpenAI class Completion < OpenAI::BaseModel # A unique identifier for the completion. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The list of completion choices the model generated for the input prompt. sig { returns(T::Array[OpenAI::Models::CompletionChoice]) } - def choices - end - - sig { params(_: T::Array[OpenAI::Models::CompletionChoice]).returns(T::Array[OpenAI::Models::CompletionChoice]) } - def choices=(_) - end + attr_accessor :choices # The Unix timestamp (in seconds) of when the completion was created. sig { returns(Integer) } - def created - end - - sig { params(_: Integer).returns(Integer) } - def created=(_) - end + attr_accessor :created # The model used for completion. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The object type, which is always "text_completion" sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } - def system_fingerprint - end + attr_reader :system_fingerprint - sig { params(_: String).returns(String) } - def system_fingerprint=(_) - end + sig { params(system_fingerprint: String).void } + attr_writer :system_fingerprint # Usage statistics for the completion request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } - def usage - end + attr_reader :usage - sig do - params(_: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)) - end - def usage=(_) - end + sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)).void } + attr_writer :usage # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index bdd63223..8e9b757d 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -8,42 +8,19 @@ module OpenAI # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. sig { returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } - def finish_reason - end - - sig do - params(_: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - .returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - end - def finish_reason=(_) - end + attr_accessor :finish_reason sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index sig { returns(T.nilable(OpenAI::Models::CompletionChoice::Logprobs)) } - def logprobs - end + attr_reader :logprobs - sig do - params(_: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash))) - end - def logprobs=(_) - end + sig { params(logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash))).void } + attr_writer :logprobs sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text sig do params( @@ -94,36 +71,28 @@ module OpenAI class Logprobs < OpenAI::BaseModel sig { returns(T.nilable(T::Array[Integer])) } - def text_offset - end + attr_reader :text_offset - sig { params(_: T::Array[Integer]).returns(T::Array[Integer]) } - def text_offset=(_) - end + sig { params(text_offset: T::Array[Integer]).void } + attr_writer :text_offset sig { returns(T.nilable(T::Array[Float])) } - def token_logprobs - end + attr_reader :token_logprobs - sig { params(_: T::Array[Float]).returns(T::Array[Float]) } - def token_logprobs=(_) - end + sig { params(token_logprobs: T::Array[Float]).void } + attr_writer :token_logprobs sig { returns(T.nilable(T::Array[String])) } - def tokens - end + attr_reader :tokens - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def tokens=(_) - end + sig { params(tokens: T::Array[String]).void } + attr_writer :tokens sig { returns(T.nilable(T::Array[T::Hash[Symbol, Float]])) } - def top_logprobs - end + attr_reader :top_logprobs - sig { params(_: T::Array[T::Hash[Symbol, Float]]).returns(T::Array[T::Hash[Symbol, Float]]) } - def top_logprobs=(_) - end + sig { params(top_logprobs: T::Array[T::Hash[Symbol, Float]]).void } + attr_writer :top_logprobs sig do params( diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index d2c76ed8..74e9a9a9 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -12,15 +12,7 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. sig { returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) - .returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # The prompt(s) to generate completions for, encoded as a string, array of # strings, array of tokens, or array of token arrays. @@ -31,15 +23,7 @@ module OpenAI sig do returns(T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]))) end - def prompt - end - - sig do - params(_: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]))) - .returns(T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]))) - end - def prompt=(_) - end + attr_accessor :prompt # Generates `best_of` completions server-side and returns the "best" (the one with # the highest log probability per token). Results cannot be streamed. @@ -51,21 +35,11 @@ module OpenAI # consume your token quota. Use carefully and ensure that you have reasonable # settings for `max_tokens` and `stop`. sig { returns(T.nilable(Integer)) } - def best_of - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def best_of=(_) - end + attr_accessor :best_of # Echo back the prompt in addition to the completion sig { returns(T.nilable(T::Boolean)) } - def echo - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def echo=(_) - end + attr_accessor :echo # Number between -2.0 and 2.0. Positive values penalize new tokens based on their # existing frequency in the text so far, decreasing the model's likelihood to @@ -73,12 +47,7 @@ module OpenAI # # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) sig { returns(T.nilable(Float)) } - def frequency_penalty - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def frequency_penalty=(_) - end + attr_accessor :frequency_penalty # Modify the likelihood of specified tokens appearing in the completion. # @@ -93,12 +62,7 @@ module OpenAI # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token # from being generated. sig { returns(T.nilable(T::Hash[Symbol, Integer])) } - def logit_bias - end - - sig { params(_: T.nilable(T::Hash[Symbol, Integer])).returns(T.nilable(T::Hash[Symbol, Integer])) } - def logit_bias=(_) - end + attr_accessor :logit_bias # Include the log probabilities on the `logprobs` most likely output tokens, as # well the chosen tokens. For example, if `logprobs` is 5, the API will return a @@ -107,12 +71,7 @@ module OpenAI # # The maximum value for `logprobs` is 5. sig { returns(T.nilable(Integer)) } - def logprobs - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def logprobs=(_) - end + attr_accessor :logprobs # The maximum number of [tokens](/tokenizer) that can be generated in the # completion. @@ -122,12 +81,7 @@ module OpenAI # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. sig { returns(T.nilable(Integer)) } - def max_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_tokens=(_) - end + attr_accessor :max_tokens # How many completions to generate for each prompt. # @@ -135,12 +89,7 @@ module OpenAI # consume your token quota. Use carefully and ensure that you have reasonable # settings for `max_tokens` and `stop`. sig { returns(T.nilable(Integer)) } - def n - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def n=(_) - end + attr_accessor :n # Number between -2.0 and 2.0. Positive values penalize new tokens based on # whether they appear in the text so far, increasing the model's likelihood to @@ -148,12 +97,7 @@ module OpenAI # # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) sig { returns(T.nilable(Float)) } - def presence_penalty - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def presence_penalty=(_) - end + attr_accessor :presence_penalty # If specified, our system will make a best effort to sample deterministically, # such that repeated requests with the same `seed` and parameters should return @@ -162,54 +106,30 @@ module OpenAI # Determinism is not guaranteed, and you should refer to the `system_fingerprint` # response parameter to monitor changes in the backend. sig { returns(T.nilable(Integer)) } - def seed - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def seed=(_) - end + attr_accessor :seed # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } - def stop - end - - sig do - params( - _: T.nilable( - T.any( - String, - T::Array[String] - ) - ) - ).returns(T.nilable(T.any(String, T::Array[String]))) - end - def stop=(_) - end + attr_accessor :stop # Options for streaming response. Only set this when you set `stream: true`. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) } - def stream_options - end + attr_reader :stream_options sig do - params(_: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash))) - end - def stream_options=(_) + params( + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :stream_options # The suffix that comes after a completion of inserted text. # # This parameter is only supported for `gpt-3.5-turbo-instruct`. sig { returns(T.nilable(String)) } - def suffix - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def suffix=(_) - end + attr_accessor :suffix # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more @@ -217,12 +137,7 @@ module OpenAI # # We generally recommend altering this or `top_p` but not both. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -230,23 +145,16 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index d18a0c5d..bc7a3e80 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -5,54 +5,39 @@ module OpenAI class CompletionUsage < OpenAI::BaseModel # Number of tokens in the generated completion. sig { returns(Integer) } - def completion_tokens - end - - sig { params(_: Integer).returns(Integer) } - def completion_tokens=(_) - end + attr_accessor :completion_tokens # Number of tokens in the prompt. sig { returns(Integer) } - def prompt_tokens - end - - sig { params(_: Integer).returns(Integer) } - def prompt_tokens=(_) - end + attr_accessor :prompt_tokens # Total number of tokens used in the request (prompt + completion). sig { returns(Integer) } - def total_tokens - end - - sig { params(_: Integer).returns(Integer) } - def total_tokens=(_) - end + attr_accessor :total_tokens # Breakdown of tokens used in a completion. sig { returns(T.nilable(OpenAI::Models::CompletionUsage::CompletionTokensDetails)) } - def completion_tokens_details - end + attr_reader :completion_tokens_details sig do - params(_: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash)) - end - def completion_tokens_details=(_) + params( + completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :completion_tokens_details # Breakdown of tokens used in the prompt. sig { returns(T.nilable(OpenAI::Models::CompletionUsage::PromptTokensDetails)) } - def prompt_tokens_details - end + attr_reader :prompt_tokens_details sig do - params(_: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash)) - end - def prompt_tokens_details=(_) + params( + prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :prompt_tokens_details # Usage statistics for the completion request. sig do @@ -93,42 +78,34 @@ module OpenAI # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. sig { returns(T.nilable(Integer)) } - def accepted_prediction_tokens - end + attr_reader :accepted_prediction_tokens - sig { params(_: Integer).returns(Integer) } - def accepted_prediction_tokens=(_) - end + sig { params(accepted_prediction_tokens: Integer).void } + attr_writer :accepted_prediction_tokens # Audio input tokens generated by the model. sig { returns(T.nilable(Integer)) } - def audio_tokens - end + attr_reader :audio_tokens - sig { params(_: Integer).returns(Integer) } - def audio_tokens=(_) - end + sig { params(audio_tokens: Integer).void } + attr_writer :audio_tokens # Tokens generated by the model for reasoning. sig { returns(T.nilable(Integer)) } - def reasoning_tokens - end + attr_reader :reasoning_tokens - sig { params(_: Integer).returns(Integer) } - def reasoning_tokens=(_) - end + sig { params(reasoning_tokens: Integer).void } + attr_writer :reasoning_tokens # When using Predicted Outputs, the number of tokens in the prediction that did # not appear in the completion. However, like reasoning tokens, these tokens are # still counted in the total completion tokens for purposes of billing, output, # and context window limits. sig { returns(T.nilable(Integer)) } - def rejected_prediction_tokens - end + attr_reader :rejected_prediction_tokens - sig { params(_: Integer).returns(Integer) } - def rejected_prediction_tokens=(_) - end + sig { params(rejected_prediction_tokens: Integer).void } + attr_writer :rejected_prediction_tokens # Breakdown of tokens used in a completion. sig do @@ -161,21 +138,17 @@ module OpenAI class PromptTokensDetails < OpenAI::BaseModel # Audio input tokens present in the prompt. sig { returns(T.nilable(Integer)) } - def audio_tokens - end + attr_reader :audio_tokens - sig { params(_: Integer).returns(Integer) } - def audio_tokens=(_) - end + sig { params(audio_tokens: Integer).void } + attr_writer :audio_tokens # Cached tokens present in the prompt. sig { returns(T.nilable(Integer)) } - def cached_tokens - end + attr_reader :cached_tokens - sig { params(_: Integer).returns(Integer) } - def cached_tokens=(_) - end + sig { params(cached_tokens: Integer).void } + attr_writer :cached_tokens # Breakdown of tokens used in the prompt. sig { params(audio_tokens: Integer, cached_tokens: Integer).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index eaf9891d..f36b780a 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -6,27 +6,11 @@ module OpenAI # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. sig { returns(T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) } - def filters - end - - sig do - params(_: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) - .returns(T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) - end - def filters=(_) - end + attr_accessor :filters # Type of operation: `and` or `or`. sig { returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::CompoundFilter::Type::OrSymbol) - .returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # Combine multiple filters using `and` or `or`. sig do diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index e9a4af7f..d057718b 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -5,42 +5,22 @@ module OpenAI class CreateEmbeddingResponse < OpenAI::BaseModel # The list of embeddings generated by the model. sig { returns(T::Array[OpenAI::Models::Embedding]) } - def data - end - - sig { params(_: T::Array[OpenAI::Models::Embedding]).returns(T::Array[OpenAI::Models::Embedding]) } - def data=(_) - end + attr_accessor :data # The name of the model used to generate the embedding. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The object type, which is always "list". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The usage information for the request. sig { returns(OpenAI::Models::CreateEmbeddingResponse::Usage) } - def usage - end + attr_reader :usage - sig do - params(_: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash)) - end - def usage=(_) - end + sig { params(usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash)).void } + attr_writer :usage sig do params( @@ -71,21 +51,11 @@ module OpenAI class Usage < OpenAI::BaseModel # The number of tokens used by the prompt. sig { returns(Integer) } - def prompt_tokens - end - - sig { params(_: Integer).returns(Integer) } - def prompt_tokens=(_) - end + attr_accessor :prompt_tokens # The total number of tokens used by the request. sig { returns(Integer) } - def total_tokens - end - - sig { params(_: Integer).returns(Integer) } - def total_tokens=(_) - end + attr_accessor :total_tokens # The usage information for the request. sig { params(prompt_tokens: Integer, total_tokens: Integer).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/lib/openai/models/embedding.rbi index 4218f3b9..90ad0f45 100644 --- a/rbi/lib/openai/models/embedding.rbi +++ b/rbi/lib/openai/models/embedding.rbi @@ -7,30 +7,15 @@ module OpenAI # the model as listed in the # [embedding guide](https://platform.openai.com/docs/guides/embeddings). sig { returns(T::Array[Float]) } - def embedding - end - - sig { params(_: T::Array[Float]).returns(T::Array[Float]) } - def embedding=(_) - end + attr_accessor :embedding # The index of the embedding in the list of embeddings. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The object type, which is always "embedding". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # Represents an embedding vector returned by embedding endpoint. sig { params(embedding: T::Array[Float], index: Integer, object: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 69cd0415..0f90d8b7 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -15,15 +15,7 @@ module OpenAI # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. sig { returns(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])) } - def input - end - - sig do - params(_: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])) - .returns(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])) - end - def input=(_) - end + attr_accessor :input # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -31,49 +23,32 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. sig { returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # The number of dimensions the resulting output embeddings should have. Only # supported in `text-embedding-3` and later models. sig { returns(T.nilable(Integer)) } - def dimensions - end + attr_reader :dimensions - sig { params(_: Integer).returns(Integer) } - def dimensions=(_) - end + sig { params(dimensions: Integer).void } + attr_writer :dimensions # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). sig { returns(T.nilable(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol)) } - def encoding_format - end + attr_reader :encoding_format - sig do - params(_: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) - .returns(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol) - end - def encoding_format=(_) - end + sig { params(encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol).void } + attr_writer :encoding_format # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( diff --git a/rbi/lib/openai/models/error_object.rbi b/rbi/lib/openai/models/error_object.rbi index f7390e71..134b6d66 100644 --- a/rbi/lib/openai/models/error_object.rbi +++ b/rbi/lib/openai/models/error_object.rbi @@ -4,36 +4,16 @@ module OpenAI module Models class ErrorObject < OpenAI::BaseModel sig { returns(T.nilable(String)) } - def code - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def code=(_) - end + attr_accessor :code sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message sig { returns(T.nilable(String)) } - def param - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def param=(_) - end + attr_accessor :param sig { returns(String) } - def type - end - - sig { params(_: String).returns(String) } - def type=(_) - end + attr_accessor :type sig do params(code: T.nilable(String), message: String, param: T.nilable(String), type: String) diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index 93746f38..c25ae968 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -8,24 +8,14 @@ module OpenAI # The File object (not file name) to be uploaded. sig { returns(T.any(IO, StringIO)) } - def file - end - - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def file=(_) - end + attr_accessor :file # The intended purpose of the uploaded file. One of: - `assistants`: Used in the # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } - def purpose - end - - sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } - def purpose=(_) - end + attr_accessor :purpose sig do params( diff --git a/rbi/lib/openai/models/file_deleted.rbi b/rbi/lib/openai/models/file_deleted.rbi index 83029526..fd6fac2b 100644 --- a/rbi/lib/openai/models/file_deleted.rbi +++ b/rbi/lib/openai/models/file_deleted.rbi @@ -4,28 +4,13 @@ module OpenAI module Models class FileDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :file) diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 795e6044..7cf867a4 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -11,44 +11,33 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A limit on the number of objects to be returned. Limit can range between 1 and # 10,000, and the default is 10,000. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::FileListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::FileListParams::Order::OrSymbol) - .returns(OpenAI::Models::FileListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::FileListParams::Order::OrSymbol).void } + attr_writer :order # Only return files with the given purpose. sig { returns(T.nilable(String)) } - def purpose - end + attr_reader :purpose - sig { params(_: String).returns(String) } - def purpose=(_) - end + sig { params(purpose: String).void } + attr_writer :purpose sig do params( diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 00ace88e..85e51b7d 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -5,94 +5,49 @@ module OpenAI class FileObject < OpenAI::BaseModel # The file identifier, which can be referenced in the API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The size of the file, in bytes. sig { returns(Integer) } - def bytes - end - - sig { params(_: Integer).returns(Integer) } - def bytes=(_) - end + attr_accessor :bytes # The Unix timestamp (in seconds) for when the file was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The name of the file. sig { returns(String) } - def filename - end - - sig { params(_: String).returns(String) } - def filename=(_) - end + attr_accessor :filename # The object type, which is always `file`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. sig { returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) } - def purpose - end - - sig do - params(_: OpenAI::Models::FileObject::Purpose::TaggedSymbol) - .returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) - end - def purpose=(_) - end + attr_accessor :purpose # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. sig { returns(OpenAI::Models::FileObject::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::FileObject::Status::TaggedSymbol) - .returns(OpenAI::Models::FileObject::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The Unix timestamp (in seconds) for when the file will expire. sig { returns(T.nilable(Integer)) } - def expires_at - end + attr_reader :expires_at - sig { params(_: Integer).returns(Integer) } - def expires_at=(_) - end + sig { params(expires_at: Integer).void } + attr_writer :expires_at # Deprecated. For details on why a fine-tuning training file failed validation, # see the `error` field on `fine_tuning.job`. sig { returns(T.nilable(String)) } - def status_details - end + attr_reader :status_details - sig { params(_: String).returns(String) } - def status_details=(_) - end + sig { params(status_details: String).void } + attr_writer :status_details # The `File` object represents a document that has been uploaded to OpenAI. sig do diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 04cf51bd..b7a0df18 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -6,180 +6,97 @@ module OpenAI class FineTuningJob < OpenAI::BaseModel # The object identifier, which can be referenced in the API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the fine-tuning job was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error)) } - def error - end + attr_reader :error sig do - params(_: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash))) - end - def error=(_) + params(error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash))) + .void end + attr_writer :error # The name of the fine-tuned model that is being created. The value will be null # if the fine-tuning job is still running. sig { returns(T.nilable(String)) } - def fine_tuned_model - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def fine_tuned_model=(_) - end + attr_accessor :fine_tuned_model # The Unix timestamp (in seconds) for when the fine-tuning job was finished. The # value will be null if the fine-tuning job is still running. sig { returns(T.nilable(Integer)) } - def finished_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def finished_at=(_) - end + attr_accessor :finished_at # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters) } - def hyperparameters - end + attr_reader :hyperparameters sig do - params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash)) - end - def hyperparameters=(_) + params( + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :hyperparameters # The base model that is being fine-tuned. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # The object type, which is always "fine_tuning.job". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The organization that owns the fine-tuning job. sig { returns(String) } - def organization_id - end - - sig { params(_: String).returns(String) } - def organization_id=(_) - end + attr_accessor :organization_id # The compiled results file ID(s) for the fine-tuning job. You can retrieve the # results with the # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(T::Array[String]) } - def result_files - end - - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def result_files=(_) - end + attr_accessor :result_files # The seed used for the fine-tuning job. sig { returns(Integer) } - def seed - end - - sig { params(_: Integer).returns(Integer) } - def seed=(_) - end + attr_accessor :seed # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The total number of billable tokens processed by this fine-tuning job. The value # will be null if the fine-tuning job is still running. sig { returns(T.nilable(Integer)) } - def trained_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def trained_tokens=(_) - end + attr_accessor :trained_tokens # The file ID used for training. You can retrieve the training data with the # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(String) } - def training_file - end - - sig { params(_: String).returns(String) } - def training_file=(_) - end + attr_accessor :training_file # The file ID used for validation. You can retrieve the validation results with # the # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(T.nilable(String)) } - def validation_file - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def validation_file=(_) - end + attr_accessor :validation_file # The Unix timestamp (in seconds) for when the fine-tuning job is estimated to # finish. The value will be null if the fine-tuning job is not running. sig { returns(T.nilable(Integer)) } - def estimated_finish - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def estimated_finish=(_) - end + attr_accessor :estimated_finish # A list of integrations to enable for this fine-tuning job. sig { returns(T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject])) } - def integrations - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject])) - .returns(T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject])) - end - def integrations=(_) - end + attr_accessor :integrations # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -188,24 +105,14 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The method used for fine-tuning. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method)) } - def method_ - end + attr_reader :method_ - sig do - params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash)) - end - def method_=(_) - end + sig { params(method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash)).void } + attr_writer :method_ # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. @@ -290,31 +197,16 @@ module OpenAI class Error < OpenAI::BaseModel # A machine-readable error code. sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end + attr_accessor :code # A human-readable error message. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # The parameter that was invalid, usually `training_file` or `validation_file`. # This field will be null if the failure was not parameter-specific. sig { returns(T.nilable(String)) } - def param - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def param=(_) - end + attr_accessor :param # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. @@ -331,32 +223,26 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def batch_size - end + attr_reader :batch_size - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def batch_size=(_) - end + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } - def learning_rate_multiplier - end + attr_reader :learning_rate_multiplier - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def learning_rate_multiplier=(_) - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def n_epochs - end + attr_reader :n_epochs - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def n_epochs=(_) - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. @@ -454,39 +340,29 @@ module OpenAI class Method < OpenAI::BaseModel # Configuration for the DPO fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo)) } - def dpo - end + attr_reader :dpo - sig do - params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash)) - end - def dpo=(_) - end + sig { params(dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash)).void } + attr_writer :dpo # Configuration for the supervised fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised)) } - def supervised - end + attr_reader :supervised sig do - params(_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash)) - end - def supervised=(_) + params( + supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :supervised # The type of method. Is either `supervised` or `dpo`. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol).void } + attr_writer :type # The method used for fine-tuning. sig do @@ -516,19 +392,15 @@ module OpenAI class Dpo < OpenAI::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters)) } - def hyperparameters - end + attr_reader :hyperparameters sig do params( - _: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) - ) - end - def hyperparameters=(_) + .void end + attr_writer :hyperparameters # Configuration for the DPO fine-tuning method. sig do @@ -551,42 +423,34 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def batch_size - end + attr_reader :batch_size - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def batch_size=(_) - end + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. sig { returns(T.nilable(T.any(Symbol, Float))) } - def beta - end + attr_reader :beta - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def beta=(_) - end + sig { params(beta: T.any(Symbol, Float)).void } + attr_writer :beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } - def learning_rate_multiplier - end + attr_reader :learning_rate_multiplier - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def learning_rate_multiplier=(_) - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def n_epochs - end + attr_reader :n_epochs - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def n_epochs=(_) - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. sig do @@ -676,25 +540,18 @@ module OpenAI class Supervised < OpenAI::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters)) } - def hyperparameters - end + attr_reader :hyperparameters sig do params( - _: T.any( + hyperparameters: T.any( OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Util::AnyHash - ) - ) - end - def hyperparameters=(_) + .void end + attr_writer :hyperparameters # Configuration for the supervised fine-tuning method. sig do @@ -720,32 +577,26 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def batch_size - end + attr_reader :batch_size - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def batch_size=(_) - end + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } - def learning_rate_multiplier - end + attr_reader :learning_rate_multiplier - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def learning_rate_multiplier=(_) - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def n_epochs - end + attr_reader :n_epochs - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def n_epochs=(_) - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. sig do diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 372492b9..61fee31f 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -6,72 +6,37 @@ module OpenAI class FineTuningJobEvent < OpenAI::BaseModel # The object identifier. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the fine-tuning job was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The log level of the event. sig { returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } - def level - end - - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - end - def level=(_) - end + attr_accessor :level # The message of the event. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # The object type, which is always "fine_tuning.job.event". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The data associated with the event. sig { returns(T.nilable(T.anything)) } - def data - end + attr_reader :data - sig { params(_: T.anything).returns(T.anything) } - def data=(_) - end + sig { params(data: T.anything).void } + attr_writer :data # The type of event. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol) - .returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol).void } + attr_writer :type # Fine-tuning job event object sig do diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index 22879caa..315342d0 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -6,44 +6,27 @@ module OpenAI class FineTuningJobWandbIntegration < OpenAI::BaseModel # The name of the project that the new run will be created under. sig { returns(String) } - def project - end - - sig { params(_: String).returns(String) } - def project=(_) - end + attr_accessor :project # The entity to use for the run. This allows you to set the team or username of # the WandB user that you would like associated with the run. If not set, the # default entity for the registered WandB API key is used. sig { returns(T.nilable(String)) } - def entity - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def entity=(_) - end + attr_accessor :entity # A display name to set for the run. If not set, we will use the Job ID as the # name. sig { returns(T.nilable(String)) } - def name - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def name=(_) - end + attr_accessor :name # A list of tags to be attached to the newly created run. These tags are passed # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". sig { returns(T.nilable(T::Array[String])) } - def tags - end + attr_reader :tags - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def tags=(_) - end + sig { params(tags: T::Array[String]).void } + attr_writer :tags # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index de6aec82..79b69c52 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -6,27 +6,17 @@ module OpenAI class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel # The type of the integration being enabled for the fine-tuning job sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. sig { returns(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) } - def wandb - end + attr_reader :wandb - sig do - params(_: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash)) - end - def wandb=(_) - end + sig { params(wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash)).void } + attr_writer :wandb sig do params( diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 5c524237..d9c7959b 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -10,15 +10,7 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). sig { returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) - .returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # The ID of an uploaded file that contains training data. # @@ -38,37 +30,24 @@ module OpenAI # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) # for more details. sig { returns(String) } - def training_file - end - - sig { params(_: String).returns(String) } - def training_file=(_) - end + attr_accessor :training_file # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters)) } - def hyperparameters - end + attr_reader :hyperparameters sig do - params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash)) - end - def hyperparameters=(_) + params( + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :hyperparameters # A list of integrations to enable for your fine-tuning job. sig { returns(T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration])) } - def integrations - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration])) - .returns(T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration])) - end - def integrations=(_) - end + attr_accessor :integrations # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -77,35 +56,20 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The method used for fine-tuning. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method)) } - def method_ - end + attr_reader :method_ - sig do - params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash)) - end - def method_=(_) - end + sig { params(method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash)).void } + attr_writer :method_ # The seed controls the reproducibility of the job. Passing in the same seed and # job parameters should produce the same results, but may differ in rare cases. If # a seed is not specified, one will be generated for you. sig { returns(T.nilable(Integer)) } - def seed - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def seed=(_) - end + attr_accessor :seed # A string of up to 64 characters that will be added to your fine-tuned model # name. @@ -113,12 +77,7 @@ module OpenAI # For example, a `suffix` of "custom-model-name" would produce a model name like # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. sig { returns(T.nilable(String)) } - def suffix - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def suffix=(_) - end + attr_accessor :suffix # The ID of an uploaded file that contains validation data. # @@ -133,12 +92,7 @@ module OpenAI # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) # for more details. sig { returns(T.nilable(String)) } - def validation_file - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def validation_file=(_) - end + attr_accessor :validation_file sig do params( @@ -219,32 +173,26 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def batch_size - end + attr_reader :batch_size - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def batch_size=(_) - end + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } - def learning_rate_multiplier - end + attr_reader :learning_rate_multiplier - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def learning_rate_multiplier=(_) - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def n_epochs - end + attr_reader :n_epochs - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def n_epochs=(_) - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. @@ -319,27 +267,22 @@ module OpenAI # The type of integration to enable. Currently, only "wandb" (Weights and Biases) # is supported. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. sig { returns(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb) } - def wandb - end + attr_reader :wandb sig do - params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash)) - end - def wandb=(_) + params( + wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :wandb sig do params( @@ -358,44 +301,27 @@ module OpenAI class Wandb < OpenAI::BaseModel # The name of the project that the new run will be created under. sig { returns(String) } - def project - end - - sig { params(_: String).returns(String) } - def project=(_) - end + attr_accessor :project # The entity to use for the run. This allows you to set the team or username of # the WandB user that you would like associated with the run. If not set, the # default entity for the registered WandB API key is used. sig { returns(T.nilable(String)) } - def entity - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def entity=(_) - end + attr_accessor :entity # A display name to set for the run. If not set, we will use the Job ID as the # name. sig { returns(T.nilable(String)) } - def name - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def name=(_) - end + attr_accessor :name # A list of tags to be attached to the newly created run. These tags are passed # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". sig { returns(T.nilable(T::Array[String])) } - def tags - end + attr_reader :tags - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def tags=(_) - end + sig { params(tags: T::Array[String]).void } + attr_writer :tags # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an @@ -430,39 +356,29 @@ module OpenAI class Method < OpenAI::BaseModel # Configuration for the DPO fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo)) } - def dpo - end + attr_reader :dpo - sig do - params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash)) - end - def dpo=(_) - end + sig { params(dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash)).void } + attr_writer :dpo # Configuration for the supervised fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised)) } - def supervised - end + attr_reader :supervised sig do - params(_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash)) - end - def supervised=(_) + params( + supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :supervised # The type of method. Is either `supervised` or `dpo`. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) - .returns(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol).void } + attr_writer :type # The method used for fine-tuning. sig do @@ -492,19 +408,15 @@ module OpenAI class Dpo < OpenAI::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters)) } - def hyperparameters - end + attr_reader :hyperparameters sig do params( - _: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) ) - .returns( - T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) - ) - end - def hyperparameters=(_) + .void end + attr_writer :hyperparameters # Configuration for the DPO fine-tuning method. sig do @@ -527,42 +439,34 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def batch_size - end + attr_reader :batch_size - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def batch_size=(_) - end + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. sig { returns(T.nilable(T.any(Symbol, Float))) } - def beta - end + attr_reader :beta - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def beta=(_) - end + sig { params(beta: T.any(Symbol, Float)).void } + attr_writer :beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } - def learning_rate_multiplier - end + attr_reader :learning_rate_multiplier - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def learning_rate_multiplier=(_) - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def n_epochs - end + attr_reader :n_epochs - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def n_epochs=(_) - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. sig do @@ -652,25 +556,18 @@ module OpenAI class Supervised < OpenAI::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters)) } - def hyperparameters - end + attr_reader :hyperparameters sig do params( - _: T.any( + hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, OpenAI::Util::AnyHash ) ) - .returns( - T.any( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Util::AnyHash - ) - ) - end - def hyperparameters=(_) + .void end + attr_writer :hyperparameters # Configuration for the supervised fine-tuning method. sig do @@ -698,32 +595,26 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def batch_size - end + attr_reader :batch_size - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def batch_size=(_) - end + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } - def learning_rate_multiplier - end + attr_reader :learning_rate_multiplier - sig { params(_: T.any(Symbol, Float)).returns(T.any(Symbol, Float)) } - def learning_rate_multiplier=(_) - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } - def n_epochs - end + attr_reader :n_epochs - sig { params(_: T.any(Symbol, Integer)).returns(T.any(Symbol, Integer)) } - def n_epochs=(_) - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. sig do diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index 253cd1c0..52f286e3 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -9,21 +9,17 @@ module OpenAI # Identifier for the last event from the previous pagination request. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # Number of events to retrieve. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit sig do params( diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 4102ced5..52610539 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -9,31 +9,22 @@ module OpenAI # Identifier for the last job from the previous pagination request. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # Number of fine-tuning jobs to retrieve. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. # Alternatively, set `metadata=null` to indicate no metadata. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata sig do params( diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 36ce4669..793f2f71 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -10,21 +10,17 @@ module OpenAI # Identifier for the last checkpoint ID from the previous pagination request. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # Number of checkpoints to retrieve. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit sig do params( diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index faed91db..de47d120 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -7,71 +7,39 @@ module OpenAI class FineTuningJobCheckpoint < OpenAI::BaseModel # The checkpoint identifier, which can be referenced in the API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the checkpoint was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The name of the fine-tuned checkpoint model that is created. sig { returns(String) } - def fine_tuned_model_checkpoint - end - - sig { params(_: String).returns(String) } - def fine_tuned_model_checkpoint=(_) - end + attr_accessor :fine_tuned_model_checkpoint # The name of the fine-tuning job that this checkpoint was created from. sig { returns(String) } - def fine_tuning_job_id - end - - sig { params(_: String).returns(String) } - def fine_tuning_job_id=(_) - end + attr_accessor :fine_tuning_job_id # Metrics at the step number during the fine-tuning job. sig { returns(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics) } - def metrics - end + attr_reader :metrics sig do params( - _: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash) + metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash)) - end - def metrics=(_) + .void end + attr_writer :metrics # The object type, which is always "fine_tuning.job.checkpoint". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The step number that the checkpoint was created at. sig { returns(Integer) } - def step_number - end - - sig { params(_: Integer).returns(Integer) } - def step_number=(_) - end + attr_accessor :step_number # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a # fine-tuning job that is ready to use. @@ -117,60 +85,46 @@ module OpenAI class Metrics < OpenAI::BaseModel sig { returns(T.nilable(Float)) } - def full_valid_loss - end + attr_reader :full_valid_loss - sig { params(_: Float).returns(Float) } - def full_valid_loss=(_) - end + sig { params(full_valid_loss: Float).void } + attr_writer :full_valid_loss sig { returns(T.nilable(Float)) } - def full_valid_mean_token_accuracy - end + attr_reader :full_valid_mean_token_accuracy - sig { params(_: Float).returns(Float) } - def full_valid_mean_token_accuracy=(_) - end + sig { params(full_valid_mean_token_accuracy: Float).void } + attr_writer :full_valid_mean_token_accuracy sig { returns(T.nilable(Float)) } - def step - end + attr_reader :step - sig { params(_: Float).returns(Float) } - def step=(_) - end + sig { params(step: Float).void } + attr_writer :step sig { returns(T.nilable(Float)) } - def train_loss - end + attr_reader :train_loss - sig { params(_: Float).returns(Float) } - def train_loss=(_) - end + sig { params(train_loss: Float).void } + attr_writer :train_loss sig { returns(T.nilable(Float)) } - def train_mean_token_accuracy - end + attr_reader :train_mean_token_accuracy - sig { params(_: Float).returns(Float) } - def train_mean_token_accuracy=(_) - end + sig { params(train_mean_token_accuracy: Float).void } + attr_writer :train_mean_token_accuracy sig { returns(T.nilable(Float)) } - def valid_loss - end + attr_reader :valid_loss - sig { params(_: Float).returns(Float) } - def valid_loss=(_) - end + sig { params(valid_loss: Float).void } + attr_writer :valid_loss sig { returns(T.nilable(Float)) } - def valid_mean_token_accuracy - end + attr_reader :valid_mean_token_accuracy - sig { params(_: Float).returns(Float) } - def valid_mean_token_accuracy=(_) - end + sig { params(valid_mean_token_accuracy: Float).void } + attr_writer :valid_mean_token_accuracy # Metrics at the step number during the fine-tuning job. sig do diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index 542b14aa..f1bad3a8 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -6,22 +6,15 @@ module OpenAI # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # A description of what the function does, used by the model to choose when and # how to call the function. sig { returns(T.nilable(String)) } - def description - end + attr_reader :description - sig { params(_: String).returns(String) } - def description=(_) - end + sig { params(description: String).void } + attr_writer :description # The parameters the functions accepts, described as a JSON Schema object. See the # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, @@ -31,12 +24,10 @@ module OpenAI # # Omitting `parameters` defines a function with an empty parameter list. sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - def parameters - end + attr_reader :parameters - sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } - def parameters=(_) - end + sig { params(parameters: T::Hash[Symbol, T.anything]).void } + attr_writer :parameters # Whether to enable strict schema adherence when generating the function call. If # set to true, the model will follow the exact schema defined in the `parameters` @@ -44,12 +35,7 @@ module OpenAI # more about Structured Outputs in the # [function calling guide](docs/guides/function-calling). sig { returns(T.nilable(T::Boolean)) } - def strict - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def strict=(_) - end + attr_accessor :strict sig do params( diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index 3c0fc4fa..5af93081 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -6,31 +6,25 @@ module OpenAI # The base64-encoded JSON of the generated image, if `response_format` is # `b64_json`. sig { returns(T.nilable(String)) } - def b64_json - end + attr_reader :b64_json - sig { params(_: String).returns(String) } - def b64_json=(_) - end + sig { params(b64_json: String).void } + attr_writer :b64_json # The prompt that was used to generate the image, if there was any revision to the # prompt. sig { returns(T.nilable(String)) } - def revised_prompt - end + attr_reader :revised_prompt - sig { params(_: String).returns(String) } - def revised_prompt=(_) - end + sig { params(revised_prompt: String).void } + attr_writer :revised_prompt # The URL of the generated image, if `response_format` is `url` (default). sig { returns(T.nilable(String)) } - def url - end + attr_reader :url - sig { params(_: String).returns(String) } - def url=(_) - end + sig { params(url: String).void } + attr_writer :url # Represents the url or the content of an image generated by the OpenAI API. sig { params(b64_json: String, revised_prompt: String, url: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 2a711dc2..5e353218 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -9,73 +9,37 @@ module OpenAI # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. sig { returns(T.any(IO, StringIO)) } - def image - end - - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def image=(_) - end + attr_accessor :image # The model to use for image generation. Only `dall-e-2` is supported at this # time. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } - def model - end - - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - end - def model=(_) - end + attr_accessor :model # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # `n=1` is supported. sig { returns(T.nilable(Integer)) } - def n - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def n=(_) - end + attr_accessor :n # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) } - def response_format - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) - end - def response_format=(_) - end + attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) } - def size - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) - end - def size=(_) - end + attr_accessor :size # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 3fad72eb..5752e130 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -9,93 +9,50 @@ module OpenAI # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask # is not provided, image must have transparency, which will be used as the mask. sig { returns(T.any(IO, StringIO)) } - def image - end - - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def image=(_) - end + attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 # characters. sig { returns(String) } - def prompt - end - - sig { params(_: String).returns(String) } - def prompt=(_) - end + attr_accessor :prompt # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. sig { returns(T.nilable(T.any(IO, StringIO))) } - def mask - end + attr_reader :mask - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def mask=(_) - end + sig { params(mask: T.any(IO, StringIO)).void } + attr_writer :mask # The model to use for image generation. Only `dall-e-2` is supported at this # time. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } - def model - end - - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - end - def model=(_) - end + attr_accessor :model # The number of images to generate. Must be between 1 and 10. sig { returns(T.nilable(Integer)) } - def n - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def n=(_) - end + attr_accessor :n # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } - def response_format - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) - end - def response_format=(_) - end + attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } - def size - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) - end - def size=(_) - end + attr_accessor :size # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 513ce790..97f922c3 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -9,102 +9,53 @@ module OpenAI # A text description of the desired image(s). The maximum length is 1000 # characters for `dall-e-2` and 4000 characters for `dall-e-3`. sig { returns(String) } - def prompt - end - - sig { params(_: String).returns(String) } - def prompt=(_) - end + attr_accessor :prompt # The model to use for image generation. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } - def model - end - - sig do - params(_: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - .returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) - end - def model=(_) - end + attr_accessor :model # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # `n=1` is supported. sig { returns(T.nilable(Integer)) } - def n - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def n=(_) - end + attr_accessor :n # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } - def quality - end + attr_reader :quality - sig do - params(_: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) - .returns(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol) - end - def quality=(_) - end + sig { params(quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol).void } + attr_writer :quality # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } - def response_format - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) - end - def response_format=(_) - end + attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } - def size - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) - end - def size=(_) - end + attr_accessor :size # The style of the generated images. Must be one of `vivid` or `natural`. Vivid # causes the model to lean towards generating hyper-real and dramatic images. # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } - def style - end - - sig do - params(_: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) - end - def style=(_) - end + attr_accessor :style # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index 724883f8..5395968e 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -4,20 +4,10 @@ module OpenAI module Models class ImagesResponse < OpenAI::BaseModel sig { returns(Integer) } - def created - end - - sig { params(_: Integer).returns(Integer) } - def created=(_) - end + attr_accessor :created sig { returns(T::Array[OpenAI::Models::Image]) } - def data - end - - sig { params(_: T::Array[OpenAI::Models::Image]).returns(T::Array[OpenAI::Models::Image]) } - def data=(_) - end + attr_accessor :data sig do params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Util::AnyHash)]) diff --git a/rbi/lib/openai/models/model.rbi b/rbi/lib/openai/models/model.rbi index 07b59908..556bb5fc 100644 --- a/rbi/lib/openai/models/model.rbi +++ b/rbi/lib/openai/models/model.rbi @@ -5,39 +5,19 @@ module OpenAI class Model < OpenAI::BaseModel # The model identifier, which can be referenced in the API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) when the model was created. sig { returns(Integer) } - def created - end - - sig { params(_: Integer).returns(Integer) } - def created=(_) - end + attr_accessor :created # The object type, which is always "model". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The organization that owns the model. sig { returns(String) } - def owned_by - end - - sig { params(_: String).returns(String) } - def owned_by=(_) - end + attr_accessor :owned_by # Describes an OpenAI model offering that can be used with the API. sig { params(id: String, created: Integer, owned_by: String, object: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/model_deleted.rbi b/rbi/lib/openai/models/model_deleted.rbi index 6ed7605b..9888bb84 100644 --- a/rbi/lib/openai/models/model_deleted.rbi +++ b/rbi/lib/openai/models/model_deleted.rbi @@ -4,28 +4,13 @@ module OpenAI module Models class ModelDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(String) } - def object - end - - sig { params(_: String).returns(String) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: String).returns(T.attached_class) } def self.new(id:, deleted:, object:) diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 26fc4f28..72848671 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -5,48 +5,33 @@ module OpenAI class Moderation < OpenAI::BaseModel # A list of the categories, and whether they are flagged or not. sig { returns(OpenAI::Models::Moderation::Categories) } - def categories - end + attr_reader :categories - sig do - params(_: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash)) - end - def categories=(_) - end + sig { params(categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash)).void } + attr_writer :categories # A list of the categories along with the input type(s) that the score applies to. sig { returns(OpenAI::Models::Moderation::CategoryAppliedInputTypes) } - def category_applied_input_types - end + attr_reader :category_applied_input_types sig do - params(_: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash)) - end - def category_applied_input_types=(_) + params( + category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :category_applied_input_types # A list of the categories along with their scores as predicted by model. sig { returns(OpenAI::Models::Moderation::CategoryScores) } - def category_scores - end + attr_reader :category_scores - sig do - params(_: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash)) - end - def category_scores=(_) - end + sig { params(category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash)).void } + attr_writer :category_scores # Whether any of the below categories are flagged. sig { returns(T::Boolean) } - def flagged - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def flagged=(_) - end + attr_accessor :flagged sig do params( @@ -78,136 +63,71 @@ module OpenAI # Content that expresses, incites, or promotes harassing language towards any # target. sig { returns(T::Boolean) } - def harassment - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def harassment=(_) - end + attr_accessor :harassment # Harassment content that also includes violence or serious harm towards any # target. sig { returns(T::Boolean) } - def harassment_threatening - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def harassment_threatening=(_) - end + attr_accessor :harassment_threatening # Content that expresses, incites, or promotes hate based on race, gender, # ethnicity, religion, nationality, sexual orientation, disability status, or # caste. Hateful content aimed at non-protected groups (e.g., chess players) is # harassment. sig { returns(T::Boolean) } - def hate - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def hate=(_) - end + attr_accessor :hate # Hateful content that also includes violence or serious harm towards the targeted # group based on race, gender, ethnicity, religion, nationality, sexual # orientation, disability status, or caste. sig { returns(T::Boolean) } - def hate_threatening - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def hate_threatening=(_) - end + attr_accessor :hate_threatening # Content that includes instructions or advice that facilitate the planning or # execution of wrongdoing, or that gives advice or instruction on how to commit # illicit acts. For example, "how to shoplift" would fit this category. sig { returns(T.nilable(T::Boolean)) } - def illicit - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def illicit=(_) - end + attr_accessor :illicit # Content that includes instructions or advice that facilitate the planning or # execution of wrongdoing that also includes violence, or that gives advice or # instruction on the procurement of any weapon. sig { returns(T.nilable(T::Boolean)) } - def illicit_violent - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def illicit_violent=(_) - end + attr_accessor :illicit_violent # Content that promotes, encourages, or depicts acts of self-harm, such as # suicide, cutting, and eating disorders. sig { returns(T::Boolean) } - def self_harm - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def self_harm=(_) - end + attr_accessor :self_harm # Content that encourages performing acts of self-harm, such as suicide, cutting, # and eating disorders, or that gives instructions or advice on how to commit such # acts. sig { returns(T::Boolean) } - def self_harm_instructions - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def self_harm_instructions=(_) - end + attr_accessor :self_harm_instructions # Content where the speaker expresses that they are engaging or intend to engage # in acts of self-harm, such as suicide, cutting, and eating disorders. sig { returns(T::Boolean) } - def self_harm_intent - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def self_harm_intent=(_) - end + attr_accessor :self_harm_intent # Content meant to arouse sexual excitement, such as the description of sexual # activity, or that promotes sexual services (excluding sex education and # wellness). sig { returns(T::Boolean) } - def sexual - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def sexual=(_) - end + attr_accessor :sexual # Sexual content that includes an individual who is under 18 years old. sig { returns(T::Boolean) } - def sexual_minors - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def sexual_minors=(_) - end + attr_accessor :sexual_minors # Content that depicts death, violence, or physical injury. sig { returns(T::Boolean) } - def violence - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def violence=(_) - end + attr_accessor :violence # Content that depicts death, violence, or physical injury in graphic detail. sig { returns(T::Boolean) } - def violence_graphic - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def violence_graphic=(_) - end + attr_accessor :violence_graphic # A list of the categories, and whether they are flagged or not. sig do @@ -272,15 +192,7 @@ module OpenAI class CategoryAppliedInputTypes < OpenAI::BaseModel # The applied input type(s) for the category 'harassment'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) } - def harassment - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) - end - def harassment=(_) - end + attr_accessor :harassment # The applied input type(s) for the category 'harassment/threatening'. sig do @@ -288,79 +200,27 @@ module OpenAI T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] ) end - def harassment_threatening - end - - sig do - params( - _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) - end - def harassment_threatening=(_) - end + attr_accessor :harassment_threatening # The applied input type(s) for the category 'hate'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } - def hate - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) - end - def hate=(_) - end + attr_accessor :hate # The applied input type(s) for the category 'hate/threatening'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) } - def hate_threatening - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) - end - def hate_threatening=(_) - end + attr_accessor :hate_threatening # The applied input type(s) for the category 'illicit'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } - def illicit - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) - end - def illicit=(_) - end + attr_accessor :illicit # The applied input type(s) for the category 'illicit/violent'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) } - def illicit_violent - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) - end - def illicit_violent=(_) - end + attr_accessor :illicit_violent # The applied input type(s) for the category 'self-harm'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } - def self_harm - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) - end - def self_harm=(_) - end + attr_accessor :self_harm # The applied input type(s) for the category 'self-harm/instructions'. sig do @@ -368,79 +228,27 @@ module OpenAI T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] ) end - def self_harm_instructions - end - - sig do - params( - _: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) - end - def self_harm_instructions=(_) - end + attr_accessor :self_harm_instructions # The applied input type(s) for the category 'self-harm/intent'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) } - def self_harm_intent - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) - end - def self_harm_intent=(_) - end + attr_accessor :self_harm_intent # The applied input type(s) for the category 'sexual'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } - def sexual - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) - end - def sexual=(_) - end + attr_accessor :sexual # The applied input type(s) for the category 'sexual/minors'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) } - def sexual_minors - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) - end - def sexual_minors=(_) - end + attr_accessor :sexual_minors # The applied input type(s) for the category 'violence'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } - def violence - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) - end - def violence=(_) - end + attr_accessor :violence # The applied input type(s) for the category 'violence/graphic'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) } - def violence_graphic - end - - sig do - params(_: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) - end - def violence_graphic=(_) - end + attr_accessor :violence_graphic # A list of the categories along with the input type(s) that the score applies to. sig do @@ -764,120 +572,55 @@ module OpenAI class CategoryScores < OpenAI::BaseModel # The score for the category 'harassment'. sig { returns(Float) } - def harassment - end - - sig { params(_: Float).returns(Float) } - def harassment=(_) - end + attr_accessor :harassment # The score for the category 'harassment/threatening'. sig { returns(Float) } - def harassment_threatening - end - - sig { params(_: Float).returns(Float) } - def harassment_threatening=(_) - end + attr_accessor :harassment_threatening # The score for the category 'hate'. sig { returns(Float) } - def hate - end - - sig { params(_: Float).returns(Float) } - def hate=(_) - end + attr_accessor :hate # The score for the category 'hate/threatening'. sig { returns(Float) } - def hate_threatening - end - - sig { params(_: Float).returns(Float) } - def hate_threatening=(_) - end + attr_accessor :hate_threatening # The score for the category 'illicit'. sig { returns(Float) } - def illicit - end - - sig { params(_: Float).returns(Float) } - def illicit=(_) - end + attr_accessor :illicit # The score for the category 'illicit/violent'. sig { returns(Float) } - def illicit_violent - end - - sig { params(_: Float).returns(Float) } - def illicit_violent=(_) - end + attr_accessor :illicit_violent # The score for the category 'self-harm'. sig { returns(Float) } - def self_harm - end - - sig { params(_: Float).returns(Float) } - def self_harm=(_) - end + attr_accessor :self_harm # The score for the category 'self-harm/instructions'. sig { returns(Float) } - def self_harm_instructions - end - - sig { params(_: Float).returns(Float) } - def self_harm_instructions=(_) - end + attr_accessor :self_harm_instructions # The score for the category 'self-harm/intent'. sig { returns(Float) } - def self_harm_intent - end - - sig { params(_: Float).returns(Float) } - def self_harm_intent=(_) - end + attr_accessor :self_harm_intent # The score for the category 'sexual'. sig { returns(Float) } - def sexual - end - - sig { params(_: Float).returns(Float) } - def sexual=(_) - end + attr_accessor :sexual # The score for the category 'sexual/minors'. sig { returns(Float) } - def sexual_minors - end - - sig { params(_: Float).returns(Float) } - def sexual_minors=(_) - end + attr_accessor :sexual_minors # The score for the category 'violence'. sig { returns(Float) } - def violence - end - - sig { params(_: Float).returns(Float) } - def violence=(_) - end + attr_accessor :violence # The score for the category 'violence/graphic'. sig { returns(Float) } - def violence_graphic - end - - sig { params(_: Float).returns(Float) } - def violence_graphic=(_) - end + attr_accessor :violence_graphic # A list of the categories along with their scores as predicted by model. sig do diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 591f644a..5487b934 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -17,42 +17,17 @@ module OpenAI ) ) end - def input - end - - sig do - params( - _: T.any( - String, - T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - ) - ) - .returns( - T.any( - String, - T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - ) - ) - end - def input=(_) - end + attr_accessor :input # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). sig { returns(T.nilable(T.any(String, OpenAI::Models::ModerationModel::OrSymbol))) } - def model - end + attr_reader :model - sig do - params(_: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ModerationModel::OrSymbol)) - end - def model=(_) - end + sig { params(model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)).void } + attr_writer :model sig do params( diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index c3a9c5d8..0de2d5b0 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -5,30 +5,15 @@ module OpenAI class ModerationCreateResponse < OpenAI::BaseModel # The unique identifier for the moderation request. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The model used to generate the moderation results. sig { returns(String) } - def model - end - - sig { params(_: String).returns(String) } - def model=(_) - end + attr_accessor :model # A list of moderation objects. sig { returns(T::Array[OpenAI::Models::Moderation]) } - def results - end - - sig { params(_: T::Array[OpenAI::Models::Moderation]).returns(T::Array[OpenAI::Models::Moderation]) } - def results=(_) - end + attr_accessor :results # Represents if a given text input is potentially harmful. sig do diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index 52ba6b82..5d3fb30a 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -5,24 +5,14 @@ module OpenAI class ModerationImageURLInput < OpenAI::BaseModel # Contains either an image URL or a data URL for a base64 encoded image. sig { returns(OpenAI::Models::ModerationImageURLInput::ImageURL) } - def image_url - end + attr_reader :image_url - sig do - params(_: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash)) - end - def image_url=(_) - end + sig { params(image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash)).void } + attr_writer :image_url # Always `image_url`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An object describing an image to classify. sig do @@ -42,12 +32,7 @@ module OpenAI class ImageURL < OpenAI::BaseModel # Either a URL of the image or the base64 encoded image data. sig { returns(String) } - def url - end - - sig { params(_: String).returns(String) } - def url=(_) - end + attr_accessor :url # Contains either an image URL or a data URL for a base64 encoded image. sig { params(url: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/moderation_text_input.rbi b/rbi/lib/openai/models/moderation_text_input.rbi index 41888533..dddf6d0b 100644 --- a/rbi/lib/openai/models/moderation_text_input.rbi +++ b/rbi/lib/openai/models/moderation_text_input.rbi @@ -5,21 +5,11 @@ module OpenAI class ModerationTextInput < OpenAI::BaseModel # A string of text to classify. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Always `text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An object describing text to classify. sig { params(text: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi index db3ddb71..14da388e 100644 --- a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi @@ -5,12 +5,7 @@ module OpenAI class OtherFileChunkingStrategyObject < OpenAI::BaseModel # Always `other`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # This is returned when the chunking strategy is unknown. Typically, this is # because the file was indexed before the `chunking_strategy` concept was diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index f221616f..18587255 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -10,15 +10,7 @@ module OpenAI # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } - def effort - end - - sig do - params(_: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - .returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) - end - def effort=(_) - end + attr_accessor :effort # **computer_use_preview only** # @@ -26,15 +18,7 @@ module OpenAI # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } - def generate_summary - end - - sig do - params(_: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) - .returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) - end - def generate_summary=(_) - end + attr_accessor :generate_summary # **o-series models only** # diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/lib/openai/models/response_format_json_object.rbi index 044c6ff6..331cb19e 100644 --- a/rbi/lib/openai/models/response_format_json_object.rbi +++ b/rbi/lib/openai/models/response_format_json_object.rbi @@ -5,12 +5,7 @@ module OpenAI class ResponseFormatJSONObject < OpenAI::BaseModel # The type of response format being defined. Always `json_object`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # JSON object response format. An older method of generating JSON responses. Using # `json_schema` is recommended for models that support it. Note that the model diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index 01da50a3..ea87d33f 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -5,24 +5,17 @@ module OpenAI class ResponseFormatJSONSchema < OpenAI::BaseModel # Structured Outputs configuration options, including a JSON Schema. sig { returns(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema) } - def json_schema - end + attr_reader :json_schema sig do - params(_: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash)) - end - def json_schema=(_) + params(json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash)) + .void end + attr_writer :json_schema # The type of response format being defined. Always `json_schema`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # JSON Schema response format. Used to generate structured JSON responses. Learn # more about @@ -45,32 +38,23 @@ module OpenAI # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # A description of what the response format is for, used by the model to determine # how to respond in the format. sig { returns(T.nilable(String)) } - def description - end + attr_reader :description - sig { params(_: String).returns(String) } - def description=(_) - end + sig { params(description: String).void } + attr_writer :description # The schema for the response format, described as a JSON Schema object. Learn how # to build JSON schemas [here](https://json-schema.org/). sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - def schema - end + attr_reader :schema - sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } - def schema=(_) - end + sig { params(schema: T::Hash[Symbol, T.anything]).void } + attr_writer :schema # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` @@ -78,12 +62,7 @@ module OpenAI # learn more, read the # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). sig { returns(T.nilable(T::Boolean)) } - def strict - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def strict=(_) - end + attr_accessor :strict # Structured Outputs configuration options, including a JSON Schema. sig do diff --git a/rbi/lib/openai/models/response_format_text.rbi b/rbi/lib/openai/models/response_format_text.rbi index 2894efdf..a4b2db5e 100644 --- a/rbi/lib/openai/models/response_format_text.rbi +++ b/rbi/lib/openai/models/response_format_text.rbi @@ -5,12 +5,7 @@ module OpenAI class ResponseFormatText < OpenAI::BaseModel # The type of response format being defined. Always `text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Default response format. Used to generate text responses. sig { params(type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 82add0e9..84a8caab 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -6,42 +6,19 @@ module OpenAI class ComputerTool < OpenAI::BaseModel # The height of the computer display. sig { returns(Float) } - def display_height - end - - sig { params(_: Float).returns(Float) } - def display_height=(_) - end + attr_accessor :display_height # The width of the computer display. sig { returns(Float) } - def display_width - end - - sig { params(_: Float).returns(Float) } - def display_width=(_) - end + attr_accessor :display_width # The type of computer environment to control. sig { returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) } - def environment - end - - sig do - params(_: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - .returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) - end - def environment=(_) - end + attr_accessor :environment # The type of the computer use tool. Always `computer_use_preview`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A tool that controls a virtual computer. Learn more about the # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 18a3e4ee..926e6805 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -20,62 +20,19 @@ module OpenAI ) ) end - def content - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - ) - end - def content=(_) - end + attr_accessor :content # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. sig { returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - .returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) - end - def role=(_) - end + attr_accessor :role # The type of the message input. Always `message`. sig { returns(T.nilable(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) - .returns(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol).void } + attr_writer :type # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 4380f086..716402c5 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -6,55 +6,43 @@ module OpenAI class FileSearchTool < OpenAI::BaseModel # The type of the file search tool. Always `file_search`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The IDs of the vector stores to search. sig { returns(T::Array[String]) } - def vector_store_ids - end - - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def vector_store_ids=(_) - end + attr_accessor :vector_store_ids # A filter to apply based on file attributes. sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } - def filters - end + attr_reader :filters sig do - params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) - .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) - end - def filters=(_) + params( + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter) + ) + .void end + attr_writer :filters # The maximum number of results to return. This number should be between 1 and 50 # inclusive. sig { returns(T.nilable(Integer)) } - def max_num_results - end + attr_reader :max_num_results - sig { params(_: Integer).returns(Integer) } - def max_num_results=(_) - end + sig { params(max_num_results: Integer).void } + attr_writer :max_num_results # Ranking options for search. sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions)) } - def ranking_options - end + attr_reader :ranking_options sig do - params(_: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash)) - end - def ranking_options=(_) + params( + ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :ranking_options # A tool that searches for relevant content from uploaded files. Learn more about # the @@ -104,26 +92,19 @@ module OpenAI class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol)) } - def ranker - end + attr_reader :ranker - sig do - params(_: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) - .returns(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol) - end - def ranker=(_) - end + sig { params(ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol).void } + attr_writer :ranker # The score threshold for the file search, a number between 0 and 1. Numbers # closer to 1 will attempt to return only the most relevant results, but may # return fewer results. sig { returns(T.nilable(Float)) } - def score_threshold - end + attr_reader :score_threshold - sig { params(_: Float).returns(Float) } - def score_threshold=(_) - end + sig { params(score_threshold: Float).void } + attr_writer :score_threshold # Ranking options for search. sig do diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/lib/openai/models/responses/function_tool.rbi index 8513be94..7462dada 100644 --- a/rbi/lib/openai/models/responses/function_tool.rbi +++ b/rbi/lib/openai/models/responses/function_tool.rbi @@ -6,49 +6,24 @@ module OpenAI class FunctionTool < OpenAI::BaseModel # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # A JSON schema object describing the parameters of the function. sig { returns(T::Hash[Symbol, T.anything]) } - def parameters - end - - sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } - def parameters=(_) - end + attr_accessor :parameters # Whether to enforce strict parameter validation. Default `true`. sig { returns(T::Boolean) } - def strict - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def strict=(_) - end + attr_accessor :strict # The type of the function tool. Always `function`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A description of the function. Used by the model to determine whether or not to # call the function. sig { returns(T.nilable(String)) } - def description - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def description=(_) - end + attr_accessor :description # Defines a function in your own code the model can choose to call. Learn more # about diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index b43cde65..0f383412 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -9,46 +9,35 @@ module OpenAI # An item ID to list items after, used in pagination. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # An item ID to list items before, used in pagination. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. sig { returns(T.nilable(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) - .returns(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 6aabb7ec..366aa274 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -6,45 +6,30 @@ module OpenAI class Response < OpenAI::BaseModel # Unique identifier for this Response. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # Unix timestamp (in seconds) of when this Response was created. sig { returns(Float) } - def created_at - end - - sig { params(_: Float).returns(Float) } - def created_at=(_) - end + attr_accessor :created_at # An error object returned when the model fails to generate a Response. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseError)) } - def error - end + attr_reader :error - sig do - params(_: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash))) - end - def error=(_) - end + sig { params(error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash))).void } + attr_writer :error # Details about why the response is incomplete. sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails)) } - def incomplete_details - end + attr_reader :incomplete_details sig do - params(_: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash))) - end - def incomplete_details=(_) + params( + incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :incomplete_details # Inserts a system (or developer) message as the first item in the model's # context. @@ -53,12 +38,7 @@ module OpenAI # response will be not be carried over to the next response. This makes it simple # to swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -67,12 +47,7 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -84,28 +59,11 @@ module OpenAI T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) ) end - def model - end - - sig do - params( - _: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) - ) - .returns( - T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) - ) - end - def model=(_) - end + attr_accessor :model # The object type of this resource - always set to `response`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # An array of content items generated by the model. # @@ -128,58 +86,18 @@ module OpenAI ] ) end - def output - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ] - ) - end - def output=(_) - end + attr_accessor :output # Whether to allow the model to run tool calls in parallel. sig { returns(T::Boolean) } - def parallel_tool_calls - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def parallel_tool_calls=(_) - end + attr_accessor :parallel_tool_calls # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but # not both. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model @@ -193,27 +111,7 @@ module OpenAI ) ) end - def tool_choice - end - - sig do - params( - _: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) - ) - .returns( - T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) - ) - end - def tool_choice=(_) - end + attr_accessor :tool_choice # An array of tools the model may call while generating a response. You can # specify which tool to use by setting the `tool_choice` parameter. @@ -241,33 +139,7 @@ module OpenAI ] ) end - def tools - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ] - ) - end - def tools=(_) - end + attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -275,62 +147,37 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } - def max_output_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_output_tokens=(_) - end + attr_accessor :max_output_tokens # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). sig { returns(T.nilable(String)) } - def previous_response_id - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def previous_response_id=(_) - end + attr_accessor :previous_response_id # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(OpenAI::Models::Reasoning)) } - def reasoning - end + attr_reader :reasoning - sig do - params(_: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) - end - def reasoning=(_) - end + sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))).void } + attr_writer :reasoning # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseStatus::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseStatus::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseStatus::OrSymbol).void } + attr_writer :status # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -338,15 +185,10 @@ module OpenAI # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } - def text - end + attr_reader :text - sig do - params(_: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) - end - def text=(_) - end + sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)).void } + attr_writer :text # The truncation strategy to use for the model response. # @@ -356,39 +198,24 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. sig { returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) } - def truncation - end - - sig do - params(_: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) - .returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) - end - def truncation=(_) - end + attr_accessor :truncation # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseUsage)) } - def usage - end + attr_reader :usage - sig do - params(_: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash)) - end - def usage=(_) - end + sig { params(usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash)).void } + attr_writer :usage # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( @@ -521,15 +348,10 @@ module OpenAI class IncompleteDetails < OpenAI::BaseModel # The reason why the response is incomplete. sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol)) } - def reason - end + attr_reader :reason - sig do - params(_: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) - .returns(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) - end - def reason=(_) - end + sig { params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol).void } + attr_writer :reason # Details about why the response is incomplete. sig do diff --git a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi index 54ec9c86..af937de8 100644 --- a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi @@ -6,21 +6,11 @@ module OpenAI class ResponseAudioDeltaEvent < OpenAI::BaseModel # A chunk of Base64 encoded response audio bytes. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The type of the event. Always `response.audio.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when there is a partial audio response. sig { params(delta: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_audio_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_done_event.rbi index d60d8ffe..2f302c28 100644 --- a/rbi/lib/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_done_event.rbi @@ -6,12 +6,7 @@ module OpenAI class ResponseAudioDoneEvent < OpenAI::BaseModel # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when the audio response is complete. sig { params(type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi index 072b6541..f2206761 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -6,21 +6,11 @@ module OpenAI class ResponseAudioTranscriptDeltaEvent < OpenAI::BaseModel # The partial transcript of the audio response. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The type of the event. Always `response.audio.transcript.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when there is a partial transcript of audio. sig { params(delta: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi index 940f3497..cb69a888 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi @@ -6,12 +6,7 @@ module OpenAI class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when the full audio transcript is completed. sig { params(type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 21d55044..26d256d8 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::BaseModel # The partial code snippet added by the code interpreter. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.code_interpreter_call.code.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a partial code snippet is added by the code interpreter. sig { params(delta: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 294664ab..c1512e6a 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::BaseModel # The final code snippet output by the code interpreter. sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end + attr_accessor :code # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.code_interpreter_call.code.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when code snippet output is finalized by the code interpreter. sig { params(code: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 7001c9c4..ea6117b4 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -6,33 +6,23 @@ module OpenAI class ResponseCodeInterpreterCallCompletedEvent < OpenAI::BaseModel # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } - def code_interpreter_call - end + attr_reader :code_interpreter_call sig do - params(_: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) - end - def code_interpreter_call=(_) + params( + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :code_interpreter_call # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.code_interpreter_call.completed`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when the code interpreter call is completed. sig do diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 8d4c4c6f..7bc859d9 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -6,33 +6,23 @@ module OpenAI class ResponseCodeInterpreterCallInProgressEvent < OpenAI::BaseModel # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } - def code_interpreter_call - end + attr_reader :code_interpreter_call sig do - params(_: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) - end - def code_interpreter_call=(_) + params( + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :code_interpreter_call # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.code_interpreter_call.in_progress`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a code interpreter call is in progress. sig do diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index 533e28c5..9788b54f 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -6,33 +6,23 @@ module OpenAI class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::BaseModel # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } - def code_interpreter_call - end + attr_reader :code_interpreter_call sig do - params(_: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash)) - end - def code_interpreter_call=(_) + params( + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :code_interpreter_call # The index of the output item that the code interpreter call is in progress. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.code_interpreter_call.interpreting`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when the code interpreter is actively interpreting the code snippet. sig do diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 0e9fda65..9ddb15ba 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -6,21 +6,11 @@ module OpenAI class ResponseCodeInterpreterToolCall < OpenAI::BaseModel # The unique ID of the code interpreter tool call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The code to run. sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end + attr_accessor :code # The results of the code interpreter tool call. sig do @@ -33,50 +23,15 @@ module OpenAI ] ) end - def results - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - ] - ) - end - def results=(_) - end + attr_accessor :results # The status of the code interpreter tool call. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The type of the code interpreter tool call. Always `code_interpreter_call`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A tool call to run code. sig do @@ -135,21 +90,11 @@ module OpenAI class Logs < OpenAI::BaseModel # The logs of the code interpreter tool call. sig { returns(String) } - def logs - end - - sig { params(_: String).returns(String) } - def logs=(_) - end + attr_accessor :logs # The type of the code interpreter text output. Always `logs`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The output of a code interpreter tool call that is text. sig { params(logs: String, type: Symbol).returns(T.attached_class) } @@ -163,24 +108,11 @@ module OpenAI class Files < OpenAI::BaseModel sig { returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]) } - def files - end - - sig do - params(_: T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]) - .returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]) - end - def files=(_) - end + attr_accessor :files # The type of the code interpreter file output. Always `files`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The output of a code interpreter tool call that is a file. sig do @@ -213,21 +145,11 @@ module OpenAI class File < OpenAI::BaseModel # The ID of the file. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The MIME type of the file. sig { returns(String) } - def mime_type - end - - sig { params(_: String).returns(String) } - def mime_type=(_) - end + attr_accessor :mime_type sig { params(file_id: String, mime_type: String).returns(T.attached_class) } def self.new(file_id:, mime_type:) diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 8cbd9bea..4b78bb8f 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -6,24 +6,14 @@ module OpenAI class ResponseCompletedEvent < OpenAI::BaseModel # Properties of the completed response. sig { returns(OpenAI::Models::Responses::Response) } - def response - end + attr_reader :response - sig do - params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - end - def response=(_) - end + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + attr_writer :response # The type of the event. Always `response.completed`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when the model response is complete. sig do diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index f3845301..205f7f16 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -6,12 +6,7 @@ module OpenAI class ResponseComputerToolCall < OpenAI::BaseModel # The unique ID of the computer call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # A click action. sig do @@ -29,85 +24,24 @@ module OpenAI ) ) end - def action - end - - sig do - params( - _: T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - ) - ) - .returns( - T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - ) - ) - end - def action=(_) - end + attr_accessor :action # An identifier used when responding to the tool call with output. sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end + attr_accessor :call_id # The pending safety checks for the computer call. sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]) } - def pending_safety_checks - end - - sig do - params(_: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]) - .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]) - end - def pending_safety_checks=(_) - end + attr_accessor :pending_safety_checks # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) - end - def status=(_) - end + attr_accessor :status # The type of the computer call. Always `computer_call`. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) @@ -188,43 +122,20 @@ module OpenAI # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } - def button - end - - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) - end - def button=(_) - end + attr_accessor :button # Specifies the event type. For a click action, this property is always set to # `click`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The x-coordinate where the click occurred. sig { returns(Integer) } - def x - end - - sig { params(_: Integer).returns(Integer) } - def x=(_) - end + attr_accessor :x # The y-coordinate where the click occurred. sig { returns(Integer) } - def y_ - end - - sig { params(_: Integer).returns(Integer) } - def y_=(_) - end + attr_accessor :y_ # A click action. sig do @@ -291,30 +202,15 @@ module OpenAI # Specifies the event type. For a double click action, this property is always set # to `double_click`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The x-coordinate where the double click occurred. sig { returns(Integer) } - def x - end - - sig { params(_: Integer).returns(Integer) } - def x=(_) - end + attr_accessor :x # The y-coordinate where the double click occurred. sig { returns(Integer) } - def y_ - end - - sig { params(_: Integer).returns(Integer) } - def y_=(_) - end + attr_accessor :y_ # A double click action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } @@ -337,25 +233,12 @@ module OpenAI # ] # ``` sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path]) } - def path - end - - sig do - params(_: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path]) - .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path]) - end - def path=(_) - end + attr_accessor :path # Specifies the event type. For a drag action, this property is always set to # `drag`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A drag action. sig do @@ -380,21 +263,11 @@ module OpenAI class Path < OpenAI::BaseModel # The x-coordinate. sig { returns(Integer) } - def x - end - - sig { params(_: Integer).returns(Integer) } - def x=(_) - end + attr_accessor :x # The y-coordinate. sig { returns(Integer) } - def y_ - end - - sig { params(_: Integer).returns(Integer) } - def y_=(_) - end + attr_accessor :y_ # A series of x/y coordinate pairs in the drag path. sig { params(x: Integer, y_: Integer).returns(T.attached_class) } @@ -411,22 +284,12 @@ module OpenAI # The combination of keys the model is requesting to be pressed. This is an array # of strings, each representing a key. sig { returns(T::Array[String]) } - def keys - end - - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def keys=(_) - end + attr_accessor :keys # Specifies the event type. For a keypress action, this property is always set to # `keypress`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A collection of keypresses the model would like to perform. sig { params(keys: T::Array[String], type: Symbol).returns(T.attached_class) } @@ -442,30 +305,15 @@ module OpenAI # Specifies the event type. For a move action, this property is always set to # `move`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The x-coordinate to move to. sig { returns(Integer) } - def x - end - - sig { params(_: Integer).returns(Integer) } - def x=(_) - end + attr_accessor :x # The y-coordinate to move to. sig { returns(Integer) } - def y_ - end - - sig { params(_: Integer).returns(Integer) } - def y_=(_) - end + attr_accessor :y_ # A mouse move action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } @@ -481,12 +329,7 @@ module OpenAI # Specifies the event type. For a screenshot action, this property is always set # to `screenshot`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A screenshot action. sig { params(type: Symbol).returns(T.attached_class) } @@ -501,49 +344,24 @@ module OpenAI class Scroll < OpenAI::BaseModel # The horizontal scroll distance. sig { returns(Integer) } - def scroll_x - end - - sig { params(_: Integer).returns(Integer) } - def scroll_x=(_) - end + attr_accessor :scroll_x # The vertical scroll distance. sig { returns(Integer) } - def scroll_y - end - - sig { params(_: Integer).returns(Integer) } - def scroll_y=(_) - end + attr_accessor :scroll_y # Specifies the event type. For a scroll action, this property is always set to # `scroll`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The x-coordinate where the scroll occurred. sig { returns(Integer) } - def x - end - - sig { params(_: Integer).returns(Integer) } - def x=(_) - end + attr_accessor :x # The y-coordinate where the scroll occurred. sig { returns(Integer) } - def y_ - end - - sig { params(_: Integer).returns(Integer) } - def y_=(_) - end + attr_accessor :y_ # A scroll action. sig do @@ -563,22 +381,12 @@ module OpenAI class Type < OpenAI::BaseModel # The text to type. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # Specifies the event type. For a type action, this property is always set to # `type`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An action to type in text. sig { params(text: String, type: Symbol).returns(T.attached_class) } @@ -594,12 +402,7 @@ module OpenAI # Specifies the event type. For a wait action, this property is always set to # `wait`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A wait action. sig { params(type: Symbol).returns(T.attached_class) } @@ -626,30 +429,15 @@ module OpenAI class PendingSafetyCheck < OpenAI::BaseModel # The ID of the pending safety check. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The type of the pending safety check. sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end + attr_accessor :code # Details about the pending safety check. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 93d2029f..84261381 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -6,44 +6,27 @@ module OpenAI class ResponseComputerToolCallOutputItem < OpenAI::BaseModel # The unique ID of the computer call tool output. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The ID of the computer tool call that produced the output. sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end + attr_accessor :call_id # A computer screenshot image used with the computer use tool. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) } - def output - end + attr_reader :output sig do params( - _: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash)) - end - def output=(_) + .void end + attr_writer :output # The type of the computer tool call output. Always `computer_call_output`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The safety checks reported by the API that have been acknowledged by the # developer. @@ -54,42 +37,28 @@ module OpenAI ) ) end - def acknowledged_safety_checks - end + attr_reader :acknowledged_safety_checks sig do params( - _: T::Array[ + acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, OpenAI::Util::AnyHash ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Util::AnyHash - ) - ] - ) - end - def acknowledged_safety_checks=(_) + .void end + attr_writer :acknowledged_safety_checks # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol).void } + attr_writer :status sig do params( @@ -129,30 +98,15 @@ module OpenAI class AcknowledgedSafetyCheck < OpenAI::BaseModel # The ID of the pending safety check. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The type of the pending safety check. sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end + attr_accessor :code # Details about the pending safety check. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index 7b4d757f..1246a3c0 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -7,30 +7,21 @@ module OpenAI # Specifies the event type. For a computer screenshot, this property is always set # to `computer_screenshot`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The identifier of an uploaded file that contains the screenshot. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The URL of the screenshot image. sig { returns(T.nilable(String)) } - def image_url - end + attr_reader :image_url - sig { params(_: String).returns(String) } - def image_url=(_) - end + sig { params(image_url: String).void } + attr_writer :image_url # A computer screenshot image used with the computer use tool. sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index b7e8bd20..9b96ae2c 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseContentPartAddedEvent < OpenAI::BaseModel # The index of the content part that was added. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The ID of the output item that the content part was added to. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the content part was added to. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The content part that was added. sig do @@ -37,28 +22,11 @@ module OpenAI T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) ) end - def part - end - - sig do - params( - _: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - ) - .returns( - T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - ) - end - def part=(_) - end + attr_accessor :part # The type of the event. Always `response.content_part.added`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a new content part is added. sig do diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 566da131..5c70029a 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseContentPartDoneEvent < OpenAI::BaseModel # The index of the content part that is done. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The ID of the output item that the content part was added to. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the content part was added to. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The content part that is done. sig do @@ -37,28 +22,11 @@ module OpenAI T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) ) end - def part - end - - sig do - params( - _: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - ) - .returns( - T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - ) - end - def part=(_) - end + attr_accessor :part # The type of the event. Always `response.content_part.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a content part is done. sig do diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 9cf8f37d..10786b61 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -38,53 +38,7 @@ module OpenAI ) ) end - def input - end - - sig do - params( - _: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ) - ) - .returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ) - ) - end - def input=(_) - end + attr_accessor :input # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -92,15 +46,7 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) } - def model - end - - sig do - params(_: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) - .returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) - end - def model=(_) - end + attr_accessor :model # Specify additional output data to include in the model response. Currently # supported values are: @@ -111,15 +57,7 @@ module OpenAI # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } - def include - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) - .returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) - end - def include=(_) - end + attr_accessor :include # Inserts a system (or developer) message as the first item in the model's # context. @@ -128,23 +66,13 @@ module OpenAI # response will be not be carried over to the next response. This makes it simple # to swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } - def instructions - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def instructions=(_) - end + attr_accessor :instructions # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } - def max_output_tokens - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_output_tokens=(_) - end + attr_accessor :max_output_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -153,68 +81,38 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # Whether to allow the model to run tool calls in parallel. sig { returns(T.nilable(T::Boolean)) } - def parallel_tool_calls - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def parallel_tool_calls=(_) - end + attr_accessor :parallel_tool_calls # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). sig { returns(T.nilable(String)) } - def previous_response_id - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def previous_response_id=(_) - end + attr_accessor :previous_response_id # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(OpenAI::Models::Reasoning)) } - def reasoning - end + attr_reader :reasoning - sig do - params(_: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))) - end - def reasoning=(_) - end + sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))).void } + attr_writer :reasoning # Whether to store the generated model response for later retrieval via API. sig { returns(T.nilable(T::Boolean)) } - def store - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def store=(_) - end + attr_accessor :store # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but # not both. sig { returns(T.nilable(Float)) } - def temperature - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def temperature=(_) - end + attr_accessor :temperature # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -222,15 +120,10 @@ module OpenAI # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } - def text - end + attr_reader :text - sig do - params(_: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)) - end - def text=(_) - end + sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)).void } + attr_writer :text # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model @@ -246,29 +139,20 @@ module OpenAI ) ) end - def tool_choice - end + attr_reader :tool_choice sig do params( - _: T.any( + tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) - .returns( - T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Util::AnyHash, - OpenAI::Models::Responses::ToolChoiceFunction - ) - ) - end - def tool_choice=(_) + .void end + attr_writer :tool_choice # An array of tools the model may call while generating a response. You can # specify which tool to use by setting the `tool_choice` parameter. @@ -298,12 +182,11 @@ module OpenAI ) ) end - def tools - end + attr_reader :tools sig do params( - _: T::Array[ + tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, OpenAI::Util::AnyHash, @@ -313,20 +196,9 @@ module OpenAI ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Util::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ] - ) - end - def tools=(_) + .void end + attr_writer :tools # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -334,12 +206,7 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } - def top_p - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def top_p=(_) - end + attr_accessor :top_p # The truncation strategy to use for the model response. # @@ -349,26 +216,16 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) } - def truncation - end - - sig do - params(_: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) - .returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) - end - def truncation=(_) - end + attr_accessor :truncation # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } - def user - end + attr_reader :user - sig { params(_: String).returns(String) } - def user=(_) - end + sig { params(user: String).void } + attr_writer :user sig do params( diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index 58257a17..0ff42037 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -6,24 +6,14 @@ module OpenAI class ResponseCreatedEvent < OpenAI::BaseModel # The response that was created. sig { returns(OpenAI::Models::Responses::Response) } - def response - end + attr_reader :response - sig do - params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - end - def response=(_) - end + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + attr_writer :response # The type of the event. Always `response.created`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An event that is emitted when a response is created. sig do diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 8b28aae4..18462364 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -6,24 +6,11 @@ module OpenAI class ResponseError < OpenAI::BaseModel # The error code for the response. sig { returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } - def code - end - - sig do - params(_: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - end - def code=(_) - end + attr_accessor :code # A human-readable description of the error. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # An error object returned when the model fails to generate a Response. sig do diff --git a/rbi/lib/openai/models/responses/response_error_event.rbi b/rbi/lib/openai/models/responses/response_error_event.rbi index f4c0e9f0..303a0210 100644 --- a/rbi/lib/openai/models/responses/response_error_event.rbi +++ b/rbi/lib/openai/models/responses/response_error_event.rbi @@ -6,39 +6,19 @@ module OpenAI class ResponseErrorEvent < OpenAI::BaseModel # The error code. sig { returns(T.nilable(String)) } - def code - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def code=(_) - end + attr_accessor :code # The error message. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # The error parameter. sig { returns(T.nilable(String)) } - def param - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def param=(_) - end + attr_accessor :param # The type of the event. Always `error`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when an error occurs. sig do diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index 7409c35c..8a3b97e1 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -6,24 +6,14 @@ module OpenAI class ResponseFailedEvent < OpenAI::BaseModel # The response that failed. sig { returns(OpenAI::Models::Responses::Response) } - def response - end + attr_reader :response - sig do - params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - end - def response=(_) - end + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + attr_writer :response # The type of the event. Always `response.failed`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An event that is emitted when a response fails. sig do diff --git a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi index ffb5cae7..fd8d4c62 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseFileSearchCallCompletedEvent < OpenAI::BaseModel # The ID of the output item that the file search call is initiated. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the file search call is initiated. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.file_search_call.completed`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a file search call is completed (results found). sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi index 2feeebed..621c0706 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseFileSearchCallInProgressEvent < OpenAI::BaseModel # The ID of the output item that the file search call is initiated. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the file search call is initiated. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.file_search_call.in_progress`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a file search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi index b340e2ff..849e6603 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseFileSearchCallSearchingEvent < OpenAI::BaseModel # The ID of the output item that the file search call is initiated. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the file search call is searching. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.file_search_call.searching`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a file search is currently searching. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 957f8061..7355cae9 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -6,55 +6,24 @@ module OpenAI class ResponseFileSearchToolCall < OpenAI::BaseModel # The unique ID of the file search tool call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The queries used to search for files. sig { returns(T::Array[String]) } - def queries - end - - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def queries=(_) - end + attr_accessor :queries # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, sig { returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) - end - def status=(_) - end + attr_accessor :status # The type of the file search tool call. Always `file_search_call`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The results of the file search tool call. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result])) } - def results - end - - sig do - params(_: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result])) - .returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result])) - end - def results=(_) - end + attr_accessor :results # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) @@ -123,51 +92,35 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } - def attributes - end - - sig do - params(_: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - .returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - def attributes=(_) - end + attr_accessor :attributes # The unique ID of the file. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The name of the file. sig { returns(T.nilable(String)) } - def filename - end + attr_reader :filename - sig { params(_: String).returns(String) } - def filename=(_) - end + sig { params(filename: String).void } + attr_writer :filename # The relevance score of the file - a value between 0 and 1. sig { returns(T.nilable(Float)) } - def score - end + attr_reader :score - sig { params(_: Float).returns(Float) } - def score=(_) - end + sig { params(score: Float).void } + attr_writer :score # The text that was retrieved from the file. sig { returns(T.nilable(String)) } - def text - end + attr_reader :text - sig { params(_: String).returns(String) } - def text=(_) - end + sig { params(text: String).void } + attr_writer :text sig do params( diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index aab69ea5..8b4a57ef 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -7,41 +7,27 @@ module OpenAI # The schema for the response format, described as a JSON Schema object. Learn how # to build JSON schemas [here](https://json-schema.org/). sig { returns(T::Hash[Symbol, T.anything]) } - def schema - end - - sig { params(_: T::Hash[Symbol, T.anything]).returns(T::Hash[Symbol, T.anything]) } - def schema=(_) - end + attr_accessor :schema # The type of response format being defined. Always `json_schema`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A description of what the response format is for, used by the model to determine # how to respond in the format. sig { returns(T.nilable(String)) } - def description - end + attr_reader :description - sig { params(_: String).returns(String) } - def description=(_) - end + sig { params(description: String).void } + attr_writer :description # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` @@ -49,12 +35,7 @@ module OpenAI # learn more, read the # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). sig { returns(T.nilable(T::Boolean)) } - def strict - end - - sig { params(_: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) } - def strict=(_) - end + attr_accessor :strict # JSON Schema response format. Used to generate structured JSON responses. Learn # more about diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi index bd790e94..5aada226 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -6,39 +6,19 @@ module OpenAI class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::BaseModel # The function-call arguments delta that is added. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The ID of the output item that the function-call arguments delta is added to. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the function-call arguments delta is added to. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.function_call_arguments.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when there is a partial function-call arguments delta. sig do diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi index 48684e3e..ba8b13d1 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -6,38 +6,18 @@ module OpenAI class ResponseFunctionCallArgumentsDoneEvent < OpenAI::BaseModel # The function-call arguments. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The ID of the item. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when function-call arguments are finalized. sig do diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index b721ef25..aab3c789 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -6,61 +6,34 @@ module OpenAI class ResponseFunctionToolCall < OpenAI::BaseModel # A JSON string of the arguments to pass to the function. sig { returns(String) } - def arguments - end - - sig { params(_: String).returns(String) } - def arguments=(_) - end + attr_accessor :arguments # The unique ID of the function tool call generated by the model. sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end + attr_accessor :call_id # The name of the function to run. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # The type of the function tool call. Always `function_call`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The unique ID of the function tool call. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol).void } + attr_writer :status # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi index d0e917fc..8a6699fb 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi @@ -6,12 +6,7 @@ module OpenAI class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall # The unique ID of the function tool call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index f78c8d84..fcbdc3a6 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -6,52 +6,27 @@ module OpenAI class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel # The unique ID of the function call tool output. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The unique ID of the function tool call generated by the model. sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end + attr_accessor :call_id # A JSON string of the output of the function tool call. sig { returns(String) } - def output - end - - sig { params(_: String).returns(String) } - def output=(_) - end + attr_accessor :output # The type of the function tool call output. Always `function_call_output`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol).void } + attr_writer :status sig do params( diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 0fa5fadb..647bbe36 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -6,33 +6,15 @@ module OpenAI class ResponseFunctionWebSearch < OpenAI::BaseModel # The unique ID of the web search tool call. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The status of the web search tool call. sig { returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) - end - def status=(_) - end + attr_accessor :status # The type of the web search tool call. Always `web_search_call`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index f694955a..7895e6a8 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -6,24 +6,14 @@ module OpenAI class ResponseInProgressEvent < OpenAI::BaseModel # The response that is in progress. sig { returns(OpenAI::Models::Responses::Response) } - def response - end + attr_reader :response - sig do - params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - end - def response=(_) - end + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + attr_writer :response # The type of the event. Always `response.in_progress`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when the response is in progress. sig do diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index 07ca324b..ea974ede 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -6,24 +6,14 @@ module OpenAI class ResponseIncompleteEvent < OpenAI::BaseModel # The response that was incomplete. sig { returns(OpenAI::Models::Responses::Response) } - def response - end + attr_reader :response - sig do - params(_: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)) - end - def response=(_) - end + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + attr_writer :response # The type of the event. Always `response.incomplete`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An event that is emitted when a response finishes as incomplete. sig do diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index e0768a09..c48ea4f2 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -6,33 +6,15 @@ module OpenAI class ResponseInputAudio < OpenAI::BaseModel # Base64-encoded audio data. sig { returns(String) } - def data - end - - sig { params(_: String).returns(String) } - def data=(_) - end + attr_accessor :data # The format of the audio data. Currently supported formats are `mp3` and `wav`. sig { returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) } - def format_ - end - - sig do - params(_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) - end - def format_=(_) - end + attr_accessor :format_ # The type of the input item. Always `input_audio`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An audio input to the model. sig do diff --git a/rbi/lib/openai/models/responses/response_input_file.rbi b/rbi/lib/openai/models/responses/response_input_file.rbi index cedf90ec..05c60269 100644 --- a/rbi/lib/openai/models/responses/response_input_file.rbi +++ b/rbi/lib/openai/models/responses/response_input_file.rbi @@ -6,39 +6,28 @@ module OpenAI class ResponseInputFile < OpenAI::BaseModel # The type of the input item. Always `input_file`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The content of the file to be sent to the model. sig { returns(T.nilable(String)) } - def file_data - end + attr_reader :file_data - sig { params(_: String).returns(String) } - def file_data=(_) - end + sig { params(file_data: String).void } + attr_writer :file_data # The ID of the file to be sent to the model. sig { returns(T.nilable(String)) } - def file_id - end + attr_reader :file_id - sig { params(_: String).returns(String) } - def file_id=(_) - end + sig { params(file_id: String).void } + attr_writer :file_id # The name of the file to be sent to the model. sig { returns(T.nilable(String)) } - def filename - end + attr_reader :filename - sig { params(_: String).returns(String) } - def filename=(_) - end + sig { params(filename: String).void } + attr_writer :filename # A file input to the model. sig do diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 12174be2..677129df 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -7,43 +7,20 @@ module OpenAI # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } - def detail - end - - sig do - params(_: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) - end - def detail=(_) - end + attr_accessor :detail # The type of the input item. Always `input_image`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The ID of the file to be sent to the model. sig { returns(T.nilable(String)) } - def file_id - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def file_id=(_) - end + attr_accessor :file_id # The URL of the image to be sent to the model. A fully qualified URL or base64 # encoded image in a data URL. sig { returns(T.nilable(String)) } - def image_url - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def image_url=(_) - end + attr_accessor :image_url # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 74e9e516..9c3341e5 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -44,68 +44,26 @@ module OpenAI ] ) end - def content - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - end - def content=(_) - end + attr_accessor :content # The role of the message input. One of `user`, `system`, or `developer`. sig { returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) - end - def role=(_) - end + attr_accessor :role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol).void } + attr_writer :status # The type of the message input. Always set to `message`. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol).void } + attr_writer :type # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -215,44 +173,30 @@ module OpenAI class ComputerCallOutput < OpenAI::BaseModel # The ID of the computer tool call that produced the output. sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end + attr_accessor :call_id # A computer screenshot image used with the computer use tool. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) } - def output - end + attr_reader :output sig do params( - _: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) ) - .returns(T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash)) - end - def output=(_) + .void end + attr_writer :output # The type of the computer tool call output. Always `computer_call_output`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The ID of the computer tool call output. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id # The safety checks reported by the API that have been acknowledged by the # developer. @@ -263,42 +207,28 @@ module OpenAI ) ) end - def acknowledged_safety_checks - end + attr_reader :acknowledged_safety_checks sig do params( - _: T::Array[ + acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, OpenAI::Util::AnyHash ) ] ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Util::AnyHash - ) - ] - ) - end - def acknowledged_safety_checks=(_) + .void end + attr_writer :acknowledged_safety_checks # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol).void } + attr_writer :status # The output of a computer tool call. sig do @@ -339,30 +269,15 @@ module OpenAI class AcknowledgedSafetyCheck < OpenAI::BaseModel # The ID of the pending safety check. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The type of the pending safety check. sig { returns(String) } - def code - end - - sig { params(_: String).returns(String) } - def code=(_) - end + attr_accessor :code # Details about the pending safety check. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } @@ -408,53 +323,31 @@ module OpenAI class FunctionCallOutput < OpenAI::BaseModel # The unique ID of the function tool call generated by the model. sig { returns(String) } - def call_id - end - - sig { params(_: String).returns(String) } - def call_id=(_) - end + attr_accessor :call_id # A JSON string of the output of the function tool call. sig { returns(String) } - def output - end - - sig { params(_: String).returns(String) } - def output=(_) - end + attr_accessor :output # The type of the function tool call output. Always `function_call_output`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The unique ID of the function tool call output. Populated when this item is # returned via API. sig { returns(T.nilable(String)) } - def id - end + attr_reader :id - sig { params(_: String).returns(String) } - def id=(_) - end + sig { params(id: String).void } + attr_writer :id # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol).void } + attr_writer :status # The output of a function tool call. sig do @@ -519,21 +412,11 @@ module OpenAI class ItemReference < OpenAI::BaseModel # The ID of the item to reference. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The type of item to reference. Always `item_reference`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An internal identifier for an item to reference. sig { params(id: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index e6579bda..18f699d6 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -6,12 +6,7 @@ module OpenAI class ResponseInputMessageItem < OpenAI::BaseModel # The unique ID of the message input. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # A list of one or many input items to the model, containing different content # types. @@ -26,68 +21,26 @@ module OpenAI ] ) end - def content - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - end - def content=(_) - end + attr_accessor :content # The role of the message input. One of `user`, `system`, or `developer`. sig { returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } - def role - end - - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - end - def role=(_) - end + attr_accessor :role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol).void } + attr_writer :status # The type of the message input. Always set to `message`. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol)) } - def type - end + attr_reader :type - sig do - params(_: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol) - end - def type=(_) - end + sig { params(type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol).void } + attr_writer :type sig do params( diff --git a/rbi/lib/openai/models/responses/response_input_text.rbi b/rbi/lib/openai/models/responses/response_input_text.rbi index 16fc4040..5c0838ec 100644 --- a/rbi/lib/openai/models/responses/response_input_text.rbi +++ b/rbi/lib/openai/models/responses/response_input_text.rbi @@ -6,21 +6,11 @@ module OpenAI class ResponseInputText < OpenAI::BaseModel # The text input to the model. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of the input item. Always `input_text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A text input to the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index afdd9b6d..a7e4d918 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -21,77 +21,23 @@ module OpenAI ] ) end - def data - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) - ] - ) - end - def data=(_) - end + attr_accessor :data # The ID of the first item in the list. sig { returns(String) } - def first_id - end - - sig { params(_: String).returns(String) } - def first_id=(_) - end + attr_accessor :first_id # Whether there are more items available. sig { returns(T::Boolean) } - def has_more - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def has_more=(_) - end + attr_accessor :has_more # The ID of the last item in the list. sig { returns(String) } - def last_id - end - - sig { params(_: String).returns(String) } - def last_id=(_) - end + attr_accessor :last_id # The type of object returned, must be `list`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # A list of Response items. sig do diff --git a/rbi/lib/openai/models/responses/response_output_audio.rbi b/rbi/lib/openai/models/responses/response_output_audio.rbi index 162e5138..c5a26f54 100644 --- a/rbi/lib/openai/models/responses/response_output_audio.rbi +++ b/rbi/lib/openai/models/responses/response_output_audio.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseOutputAudio < OpenAI::BaseModel # Base64-encoded audio data from the model. sig { returns(String) } - def data - end - - sig { params(_: String).returns(String) } - def data=(_) - end + attr_accessor :data # The transcript of the audio data from the model. sig { returns(String) } - def transcript - end - - sig { params(_: String).returns(String) } - def transcript=(_) - end + attr_accessor :transcript # The type of the output audio. Always `output_audio`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An audio output from the model. sig { params(data: String, transcript: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 42d8f23b..648d48d9 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -17,51 +17,15 @@ module OpenAI ) ) end - def item - end - - sig do - params( - _: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ) - .returns( - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ) - end - def item=(_) - end + attr_accessor :item # The index of the output item that was added. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.output_item.added`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a new output item is added. sig do diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index 0e58ab9d..1d8ebd83 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -17,51 +17,15 @@ module OpenAI ) ) end - def item - end - - sig do - params( - _: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ) - .returns( - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ) - end - def item=(_) - end + attr_accessor :item # The index of the output item that was marked done. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.output_item.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when an output item is marked done. sig do diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 9b6bf634..dabf3215 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -6,12 +6,7 @@ module OpenAI class ResponseOutputMessage < OpenAI::BaseModel # The unique ID of the output message. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The content of the output message. sig do @@ -19,50 +14,20 @@ module OpenAI T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] ) end - def content - end - - sig do - params( - _: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - ) - .returns( - T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - ) - end - def content=(_) - end + attr_accessor :content # The role of the output message. Always `assistant`. sig { returns(Symbol) } - def role - end - - sig { params(_: Symbol).returns(Symbol) } - def role=(_) - end + attr_accessor :role # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. sig { returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) - end - def status=(_) - end + attr_accessor :status # The type of the output message. Always `message`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # An output message from the model. sig do diff --git a/rbi/lib/openai/models/responses/response_output_refusal.rbi b/rbi/lib/openai/models/responses/response_output_refusal.rbi index 1db3c101..5e416cf9 100644 --- a/rbi/lib/openai/models/responses/response_output_refusal.rbi +++ b/rbi/lib/openai/models/responses/response_output_refusal.rbi @@ -6,21 +6,11 @@ module OpenAI class ResponseOutputRefusal < OpenAI::BaseModel # The refusal explanationfrom the model. sig { returns(String) } - def refusal - end - - sig { params(_: String).returns(String) } - def refusal=(_) - end + attr_accessor :refusal # The type of the refusal. Always `refusal`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A refusal from the model. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 5c365bc5..36a04fa3 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -16,49 +16,15 @@ module OpenAI ] ) end - def annotations - end - - sig do - params( - _: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) - ] - ) - .returns( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) - ] - ) - end - def annotations=(_) - end + attr_accessor :annotations # The text output from the model. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of the output text. Always `output_text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A text output from the model. sig do @@ -116,30 +82,15 @@ module OpenAI class FileCitation < OpenAI::BaseModel # The ID of the file. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The index of the file in the list of files. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of the file citation. Always `file_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } @@ -154,48 +105,23 @@ module OpenAI class URLCitation < OpenAI::BaseModel # The index of the last character of the URL citation in the message. sig { returns(Integer) } - def end_index - end - - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + attr_accessor :end_index # The index of the first character of the URL citation in the message. sig { returns(Integer) } - def start_index - end - - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + attr_accessor :start_index # The title of the web resource. sig { returns(String) } - def title - end - - sig { params(_: String).returns(String) } - def title=(_) - end + attr_accessor :title # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The URL of the web resource. sig { returns(String) } - def url - end - - sig { params(_: String).returns(String) } - def url=(_) - end + attr_accessor :url # A citation for a web resource used to generate a model response. sig do @@ -223,30 +149,15 @@ module OpenAI class FilePath < OpenAI::BaseModel # The ID of the file. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The index of the file in the list of files. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of the file path. Always `file_path`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 12f95019..7c7b4827 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -6,46 +6,23 @@ module OpenAI class ResponseReasoningItem < OpenAI::BaseModel # The unique identifier of the reasoning content. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # Reasoning text contents. sig { returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary]) } - def summary - end - - sig do - params(_: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary]) - .returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary]) - end - def summary=(_) - end + attr_accessor :summary # The type of the object. Always `reasoning`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } - def status - end + attr_reader :status - sig do - params(_: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - .returns(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol) - end - def status=(_) - end + sig { params(status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol).void } + attr_writer :status # A description of the chain of thought used by a reasoning model while generating # a response. @@ -78,21 +55,11 @@ module OpenAI class Summary < OpenAI::BaseModel # A short summary of the reasoning used by the model when generating the response. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of the object. Always `summary_text`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :summary_text) diff --git a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi index f4a53f02..cbbd66ed 100644 --- a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi @@ -6,48 +6,23 @@ module OpenAI class ResponseRefusalDeltaEvent < OpenAI::BaseModel # The index of the content part that the refusal text is added to. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The refusal text that is added. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The ID of the output item that the refusal text is added to. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the refusal text is added to. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.refusal.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when there is a partial refusal text. sig do diff --git a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi index 1e4bf80b..1f24f6df 100644 --- a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi @@ -6,48 +6,23 @@ module OpenAI class ResponseRefusalDoneEvent < OpenAI::BaseModel # The index of the content part that the refusal text is finalized. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The ID of the output item that the refusal text is finalized. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the refusal text is finalized. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The refusal text that is finalized. sig { returns(String) } - def refusal - end - - sig { params(_: String).returns(String) } - def refusal=(_) - end + attr_accessor :refusal # The type of the event. Always `response.refusal.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when refusal text is finalized. sig do diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index 91c155d7..da5deb7b 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -10,15 +10,10 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } - def include - end + attr_reader :include - sig do - params(_: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) - .returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]) - end - def include=(_) - end + sig { params(include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]).void } + attr_writer :include sig do params( diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 2be76360..55d9384e 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -14,72 +14,27 @@ module OpenAI ) ) end - def annotation - end - - sig do - params( - _: T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - ) - .returns( - T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - ) - end - def annotation=(_) - end + attr_accessor :annotation # The index of the annotation that was added. sig { returns(Integer) } - def annotation_index - end - - sig { params(_: Integer).returns(Integer) } - def annotation_index=(_) - end + attr_accessor :annotation_index # The index of the content part that the text annotation was added to. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The ID of the output item that the text annotation was added to. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the text annotation was added to. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.output_text.annotation.added`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a text annotation is added. sig do @@ -146,30 +101,15 @@ module OpenAI class FileCitation < OpenAI::BaseModel # The ID of the file. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The index of the file in the list of files. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of the file citation. Always `file_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } @@ -184,48 +124,23 @@ module OpenAI class URLCitation < OpenAI::BaseModel # The index of the last character of the URL citation in the message. sig { returns(Integer) } - def end_index - end - - sig { params(_: Integer).returns(Integer) } - def end_index=(_) - end + attr_accessor :end_index # The index of the first character of the URL citation in the message. sig { returns(Integer) } - def start_index - end - - sig { params(_: Integer).returns(Integer) } - def start_index=(_) - end + attr_accessor :start_index # The title of the web resource. sig { returns(String) } - def title - end - - sig { params(_: String).returns(String) } - def title=(_) - end + attr_accessor :title # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # The URL of the web resource. sig { returns(String) } - def url - end - - sig { params(_: String).returns(String) } - def url=(_) - end + attr_accessor :url # A citation for a web resource used to generate a model response. sig do @@ -253,30 +168,15 @@ module OpenAI class FilePath < OpenAI::BaseModel # The ID of the file. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The index of the file in the list of files. sig { returns(Integer) } - def index - end - - sig { params(_: Integer).returns(Integer) } - def index=(_) - end + attr_accessor :index # The type of the file path. Always `file_path`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index a04a62b1..240a7e92 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -28,29 +28,20 @@ module OpenAI ) ) end - def format_ - end + attr_reader :format_ sig do params( - _: T.any( + format_: T.any( OpenAI::Models::ResponseFormatText, OpenAI::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) ) - .returns( - T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject - ) - ) - end - def format_=(_) + .void end + attr_writer :format_ # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: diff --git a/rbi/lib/openai/models/responses/response_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_delta_event.rbi index 50307018..efb4c979 100644 --- a/rbi/lib/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_delta_event.rbi @@ -6,48 +6,23 @@ module OpenAI class ResponseTextDeltaEvent < OpenAI::BaseModel # The index of the content part that the text delta was added to. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The text delta that was added. sig { returns(String) } - def delta - end - - sig { params(_: String).returns(String) } - def delta=(_) - end + attr_accessor :delta # The ID of the output item that the text delta was added to. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the text delta was added to. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.output_text.delta`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when there is an additional text delta. sig do diff --git a/rbi/lib/openai/models/responses/response_text_done_event.rbi b/rbi/lib/openai/models/responses/response_text_done_event.rbi index 56e6ddd7..b8e7bf9a 100644 --- a/rbi/lib/openai/models/responses/response_text_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_done_event.rbi @@ -6,48 +6,23 @@ module OpenAI class ResponseTextDoneEvent < OpenAI::BaseModel # The index of the content part that the text content is finalized. sig { returns(Integer) } - def content_index - end - - sig { params(_: Integer).returns(Integer) } - def content_index=(_) - end + attr_accessor :content_index # The ID of the output item that the text content is finalized. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the text content is finalized. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The text content that is finalized. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of the event. Always `response.output_text.done`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when text content is finalized. sig do diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 10397181..b3158710 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -6,54 +6,39 @@ module OpenAI class ResponseUsage < OpenAI::BaseModel # The number of input tokens. sig { returns(Integer) } - def input_tokens - end - - sig { params(_: Integer).returns(Integer) } - def input_tokens=(_) - end + attr_accessor :input_tokens # A detailed breakdown of the input tokens. sig { returns(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) } - def input_tokens_details - end + attr_reader :input_tokens_details sig do - params(_: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash)) - end - def input_tokens_details=(_) + params( + input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :input_tokens_details # The number of output tokens. sig { returns(Integer) } - def output_tokens - end - - sig { params(_: Integer).returns(Integer) } - def output_tokens=(_) - end + attr_accessor :output_tokens # A detailed breakdown of the output tokens. sig { returns(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails) } - def output_tokens_details - end + attr_reader :output_tokens_details sig do - params(_: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash)) - end - def output_tokens_details=(_) + params( + output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :output_tokens_details # The total number of tokens used. sig { returns(Integer) } - def total_tokens - end - - sig { params(_: Integer).returns(Integer) } - def total_tokens=(_) - end + attr_accessor :total_tokens # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. @@ -89,12 +74,7 @@ module OpenAI # The number of tokens that were retrieved from the cache. # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). sig { returns(Integer) } - def cached_tokens - end - - sig { params(_: Integer).returns(Integer) } - def cached_tokens=(_) - end + attr_accessor :cached_tokens # A detailed breakdown of the input tokens. sig { params(cached_tokens: Integer).returns(T.attached_class) } @@ -109,12 +89,7 @@ module OpenAI class OutputTokensDetails < OpenAI::BaseModel # The number of reasoning tokens. sig { returns(Integer) } - def reasoning_tokens - end - - sig { params(_: Integer).returns(Integer) } - def reasoning_tokens=(_) - end + attr_accessor :reasoning_tokens # A detailed breakdown of the output tokens. sig { params(reasoning_tokens: Integer).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi index 1348fded..b02a5b1f 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseWebSearchCallCompletedEvent < OpenAI::BaseModel # Unique ID for the output item associated with the web search call. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the web search call is associated with. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.web_search_call.completed`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a web search call is completed. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi index 891725d6..11aa3340 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseWebSearchCallInProgressEvent < OpenAI::BaseModel # Unique ID for the output item associated with the web search call. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the web search call is associated with. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.web_search_call.in_progress`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a web search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi index a2f0a421..c97724d2 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi @@ -6,30 +6,15 @@ module OpenAI class ResponseWebSearchCallSearchingEvent < OpenAI::BaseModel # Unique ID for the output item associated with the web search call. sig { returns(String) } - def item_id - end - - sig { params(_: String).returns(String) } - def item_id=(_) - end + attr_accessor :item_id # The index of the output item that the web search call is associated with. sig { returns(Integer) } - def output_index - end - - sig { params(_: Integer).returns(Integer) } - def output_index=(_) - end + attr_accessor :output_index # The type of the event. Always `response.web_search_call.searching`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Emitted when a web search call is executing. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/tool_choice_function.rbi b/rbi/lib/openai/models/responses/tool_choice_function.rbi index a8afd2d4..29aa1b6c 100644 --- a/rbi/lib/openai/models/responses/tool_choice_function.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_function.rbi @@ -6,21 +6,11 @@ module OpenAI class ToolChoiceFunction < OpenAI::BaseModel # The name of the function to call. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # For function calling, the type is always `function`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Use this option to force the model to call a specific function. sig { params(name: String, type: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index efede45b..3332abb3 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -13,15 +13,7 @@ module OpenAI # - `web_search_preview` # - `computer_use_preview` sig { returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - .returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index ae1af675..c100a08a 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -9,39 +9,26 @@ module OpenAI # - `web_search_preview` # - `web_search_preview_2025_03_11` sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) - .returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) - end - def type=(_) - end + attr_accessor :type # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol)) } - def search_context_size - end + attr_reader :search_context_size - sig do - params(_: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - .returns(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol) - end - def search_context_size=(_) - end + sig { params(search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol).void } + attr_writer :search_context_size sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation)) } - def user_location - end + attr_reader :user_location sig do - params(_: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash))) - end - def user_location=(_) + params( + user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :user_location # This tool searches the web for relevant results to use in a response. Learn more # about the @@ -116,50 +103,37 @@ module OpenAI class UserLocation < OpenAI::BaseModel # The type of location approximation. Always `approximate`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } - def city - end + attr_reader :city - sig { params(_: String).returns(String) } - def city=(_) - end + sig { params(city: String).void } + attr_writer :city # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. sig { returns(T.nilable(String)) } - def country - end + attr_reader :country - sig { params(_: String).returns(String) } - def country=(_) - end + sig { params(country: String).void } + attr_writer :country # Free text input for the region of the user, e.g. `California`. sig { returns(T.nilable(String)) } - def region - end + attr_reader :region - sig { params(_: String).returns(String) } - def region=(_) - end + sig { params(region: String).void } + attr_writer :region # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } - def timezone - end + attr_reader :timezone - sig { params(_: String).returns(String) } - def timezone=(_) - end + sig { params(timezone: String).void } + attr_writer :timezone sig do params(city: String, country: String, region: String, timezone: String, type: Symbol) diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/lib/openai/models/static_file_chunking_strategy.rbi index 1d8d219c..bb5abe4d 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy.rbi @@ -7,22 +7,12 @@ module OpenAI # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } - def chunk_overlap_tokens - end - - sig { params(_: Integer).returns(Integer) } - def chunk_overlap_tokens=(_) - end + attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } - def max_chunk_size_tokens - end - - sig { params(_: Integer).returns(Integer) } - def max_chunk_size_tokens=(_) - end + attr_accessor :max_chunk_size_tokens sig { params(chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer).returns(T.attached_class) } def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:) diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index deafa473..eac14e48 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -4,24 +4,14 @@ module OpenAI module Models class StaticFileChunkingStrategyObject < OpenAI::BaseModel sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } - def static - end + attr_reader :static - sig do - params(_: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) - end - def static=(_) - end + sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)).void } + attr_writer :static # Always `static`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type sig do params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash), type: Symbol) diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index 4c4a6100..5387db39 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -4,24 +4,14 @@ module OpenAI module Models class StaticFileChunkingStrategyObjectParam < OpenAI::BaseModel sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } - def static - end + attr_reader :static - sig do - params(_: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)) - end - def static=(_) - end + sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)).void } + attr_writer :static # Always `static`. sig { returns(Symbol) } - def type - end - - sig { params(_: Symbol).returns(Symbol) } - def type=(_) - end + attr_accessor :type # Customize your own chunking strategy by setting chunk size and chunk overlap. sig do diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 949a6347..49ff6e17 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -5,92 +5,44 @@ module OpenAI class Upload < OpenAI::BaseModel # The Upload unique identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The intended number of bytes to be uploaded. sig { returns(Integer) } - def bytes - end - - sig { params(_: Integer).returns(Integer) } - def bytes=(_) - end + attr_accessor :bytes # The Unix timestamp (in seconds) for when the Upload was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The Unix timestamp (in seconds) for when the Upload will expire. sig { returns(Integer) } - def expires_at - end - - sig { params(_: Integer).returns(Integer) } - def expires_at=(_) - end + attr_accessor :expires_at # The name of the file to be uploaded. sig { returns(String) } - def filename - end - - sig { params(_: String).returns(String) } - def filename=(_) - end + attr_accessor :filename # The object type, which is always "upload". sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The intended purpose of the file. # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) # for acceptable values. sig { returns(String) } - def purpose - end - - sig { params(_: String).returns(String) } - def purpose=(_) - end + attr_accessor :purpose # The status of the Upload. sig { returns(OpenAI::Models::Upload::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::Upload::Status::TaggedSymbol) - .returns(OpenAI::Models::Upload::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The `File` object represents a document that has been uploaded to OpenAI. sig { returns(T.nilable(OpenAI::Models::FileObject)) } - def file - end + attr_reader :file - sig do - params(_: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash))) - end - def file=(_) - end + sig { params(file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash))).void } + attr_writer :file # The Upload object can accept byte chunks in the form of Parts. sig do diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 4503bab7..676ce576 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -8,22 +8,15 @@ module OpenAI # The ordered list of Part IDs. sig { returns(T::Array[String]) } - def part_ids - end - - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def part_ids=(_) - end + attr_accessor :part_ids # The optional md5 checksum for the file contents to verify if the bytes uploaded # matches what you expect. sig { returns(T.nilable(String)) } - def md5 - end + attr_reader :md5 - sig { params(_: String).returns(String) } - def md5=(_) - end + sig { params(md5: String).void } + attr_writer :md5 sig do params( diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index d6b9c299..9fdac8ce 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -8,45 +8,25 @@ module OpenAI # The number of bytes in the file you are uploading. sig { returns(Integer) } - def bytes - end - - sig { params(_: Integer).returns(Integer) } - def bytes=(_) - end + attr_accessor :bytes # The name of the file to upload. sig { returns(String) } - def filename - end - - sig { params(_: String).returns(String) } - def filename=(_) - end + attr_accessor :filename # The MIME type of the file. # # This must fall within the supported MIME types for your file purpose. See the # supported MIME types for assistants and vision. sig { returns(String) } - def mime_type - end - - sig { params(_: String).returns(String) } - def mime_type=(_) - end + attr_accessor :mime_type # The intended purpose of the uploaded file. # # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } - def purpose - end - - sig { params(_: OpenAI::Models::FilePurpose::OrSymbol).returns(OpenAI::Models::FilePurpose::OrSymbol) } - def purpose=(_) - end + attr_accessor :purpose sig do params( diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index d52036fc..a73f5e78 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -9,12 +9,7 @@ module OpenAI # The chunk of bytes for this Part. sig { returns(T.any(IO, StringIO)) } - def data - end - - sig { params(_: T.any(IO, StringIO)).returns(T.any(IO, StringIO)) } - def data=(_) - end + attr_accessor :data sig do params( diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/lib/openai/models/uploads/upload_part.rbi index 59ae7dda..43223349 100644 --- a/rbi/lib/openai/models/uploads/upload_part.rbi +++ b/rbi/lib/openai/models/uploads/upload_part.rbi @@ -6,39 +6,19 @@ module OpenAI class UploadPart < OpenAI::BaseModel # The upload Part unique identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the Part was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The object type, which is always `upload.part`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The ID of the Upload object that this Part was added to. sig { returns(String) } - def upload_id - end - - sig { params(_: String).returns(String) } - def upload_id=(_) - end + attr_accessor :upload_id # The upload Part represents a chunk of bytes we can add to an Upload object. sig do diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index a3dc0af1..aeb5522d 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -5,41 +5,21 @@ module OpenAI class VectorStore < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the vector store was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at sig { returns(OpenAI::Models::VectorStore::FileCounts) } - def file_counts - end + attr_reader :file_counts - sig do - params(_: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash)) - end - def file_counts=(_) - end + sig { params(file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash)).void } + attr_writer :file_counts # The Unix timestamp (in seconds) for when the vector store was last active. sig { returns(T.nilable(Integer)) } - def last_active_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def last_active_at=(_) - end + attr_accessor :last_active_at # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -48,74 +28,36 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The name of the vector store. sig { returns(String) } - def name - end - - sig { params(_: String).returns(String) } - def name=(_) - end + attr_accessor :name # The object type, which is always `vector_store`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. sig { returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::VectorStore::Status::TaggedSymbol) - .returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The total number of bytes used by the files in the vector store. sig { returns(Integer) } - def usage_bytes - end - - sig { params(_: Integer).returns(Integer) } - def usage_bytes=(_) - end + attr_accessor :usage_bytes # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStore::ExpiresAfter)) } - def expires_after - end + attr_reader :expires_after - sig do - params(_: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash)) - end - def expires_after=(_) - end + sig { params(expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash)).void } + attr_writer :expires_after # The Unix timestamp (in seconds) for when the vector store will expire. sig { returns(T.nilable(Integer)) } - def expires_at - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def expires_at=(_) - end + attr_accessor :expires_at # A vector store is a collection of processed files can be used by the # `file_search` tool. @@ -174,48 +116,23 @@ module OpenAI class FileCounts < OpenAI::BaseModel # The number of files that were cancelled. sig { returns(Integer) } - def cancelled - end - - sig { params(_: Integer).returns(Integer) } - def cancelled=(_) - end + attr_accessor :cancelled # The number of files that have been successfully processed. sig { returns(Integer) } - def completed - end - - sig { params(_: Integer).returns(Integer) } - def completed=(_) - end + attr_accessor :completed # The number of files that have failed to process. sig { returns(Integer) } - def failed - end - - sig { params(_: Integer).returns(Integer) } - def failed=(_) - end + attr_accessor :failed # The number of files that are currently being processed. sig { returns(Integer) } - def in_progress - end - - sig { params(_: Integer).returns(Integer) } - def in_progress=(_) - end + attr_accessor :in_progress # The total number of files. sig { returns(Integer) } - def total - end - - sig { params(_: Integer).returns(Integer) } - def total=(_) - end + attr_accessor :total sig do params( @@ -268,21 +185,11 @@ module OpenAI # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } - def anchor - end - - sig { params(_: Symbol).returns(Symbol) } - def anchor=(_) - end + attr_accessor :anchor # The number of days after the anchor time that the vector store will expire. sig { returns(Integer) } - def days - end - - sig { params(_: Integer).returns(Integer) } - def days=(_) - end + attr_accessor :days # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 54ca2316..3799a1d8 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -18,50 +18,38 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) - .returns( - T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter)) } - def expires_after - end + attr_reader :expires_after sig do - params(_: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash)) - end - def expires_after=(_) + params(expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash)) + .void end + attr_writer :expires_after # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # the vector store should use. Useful for tools like `file_search` that can access # files. sig { returns(T.nilable(T::Array[String])) } - def file_ids - end + attr_reader :file_ids - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -70,21 +58,14 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The name of the vector store. sig { returns(T.nilable(String)) } - def name - end + attr_reader :name - sig { params(_: String).returns(String) } - def name=(_) - end + sig { params(name: String).void } + attr_writer :name sig do params( @@ -127,21 +108,11 @@ module OpenAI # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } - def anchor - end - - sig { params(_: Symbol).returns(Symbol) } - def anchor=(_) - end + attr_accessor :anchor # The number of days after the anchor time that the vector store will expire. sig { returns(Integer) } - def days - end - - sig { params(_: Integer).returns(Integer) } - def days=(_) - end + attr_accessor :days # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/vector_store_deleted.rbi b/rbi/lib/openai/models/vector_store_deleted.rbi index ede60489..bd0e83c0 100644 --- a/rbi/lib/openai/models/vector_store_deleted.rbi +++ b/rbi/lib/openai/models/vector_store_deleted.rbi @@ -4,28 +4,13 @@ module OpenAI module Models class VectorStoreDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :"vector_store.deleted") diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index c0360d6f..0a1e94a2 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -11,47 +11,36 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::VectorStoreListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::VectorStoreListParams::Order::OrSymbol) - .returns(OpenAI::Models::VectorStoreListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 40bccf18..0e4d0c31 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -8,55 +8,46 @@ module OpenAI # A query string for a search sig { returns(T.any(String, T::Array[String])) } - def query - end - - sig { params(_: T.any(String, T::Array[String])).returns(T.any(String, T::Array[String])) } - def query=(_) - end + attr_accessor :query # A filter to apply based on file attributes. sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } - def filters - end + attr_reader :filters sig do - params(_: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) - .returns(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter)) - end - def filters=(_) + params( + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter) + ) + .void end + attr_writer :filters # The maximum number of results to return. This number should be between 1 and 50 # inclusive. sig { returns(T.nilable(Integer)) } - def max_num_results - end + attr_reader :max_num_results - sig { params(_: Integer).returns(Integer) } - def max_num_results=(_) - end + sig { params(max_num_results: Integer).void } + attr_writer :max_num_results # Ranking options for search. sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions)) } - def ranking_options - end + attr_reader :ranking_options sig do - params(_: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash)) - end - def ranking_options=(_) + params( + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :ranking_options # Whether to rewrite the natural language query for vector search. sig { returns(T.nilable(T::Boolean)) } - def rewrite_query - end + attr_reader :rewrite_query - sig { params(_: T::Boolean).returns(T::Boolean) } - def rewrite_query=(_) - end + sig { params(rewrite_query: T::Boolean).void } + attr_writer :rewrite_query sig do params( @@ -119,23 +110,16 @@ module OpenAI class RankingOptions < OpenAI::BaseModel sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol)) } - def ranker - end + attr_reader :ranker - sig do - params(_: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) - .returns(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol) - end - def ranker=(_) - end + sig { params(ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol).void } + attr_writer :ranker sig { returns(T.nilable(Float)) } - def score_threshold - end + attr_reader :score_threshold - sig { params(_: Float).returns(Float) } - def score_threshold=(_) - end + sig { params(score_threshold: Float).void } + attr_writer :score_threshold # Ranking options for search. sig do diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 1847ed36..a9a8d0fc 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -9,54 +9,23 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } - def attributes - end - - sig do - params(_: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - .returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - def attributes=(_) - end + attr_accessor :attributes # Content chunks from the file. sig { returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content]) } - def content - end - - sig do - params(_: T::Array[OpenAI::Models::VectorStoreSearchResponse::Content]) - .returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content]) - end - def content=(_) - end + attr_accessor :content # The ID of the vector store file. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # The name of the vector store file. sig { returns(String) } - def filename - end - - sig { params(_: String).returns(String) } - def filename=(_) - end + attr_accessor :filename # The similarity score for the result. sig { returns(Float) } - def score - end - - sig { params(_: Float).returns(Float) } - def score=(_) - end + attr_accessor :score sig do params( @@ -101,24 +70,11 @@ module OpenAI class Content < OpenAI::BaseModel # The text content returned from search. sig { returns(String) } - def text - end - - sig { params(_: String).returns(String) } - def text=(_) - end + attr_accessor :text # The type of content. sig { returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } - def type - end - - sig do - params(_: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) - .returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) - end - def type=(_) - end + attr_accessor :type sig do params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol) diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 2415457b..e7c42739 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -8,15 +8,15 @@ module OpenAI # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter)) } - def expires_after - end + attr_reader :expires_after sig do - params(_: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash))) - .returns(T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash))) - end - def expires_after=(_) + params( + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)) + ) + .void end + attr_writer :expires_after # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -25,21 +25,11 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } - def metadata - end - - sig { params(_: T.nilable(T::Hash[Symbol, String])).returns(T.nilable(T::Hash[Symbol, String])) } - def metadata=(_) - end + attr_accessor :metadata # The name of the vector store. sig { returns(T.nilable(String)) } - def name - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def name=(_) - end + attr_accessor :name sig do params( @@ -71,21 +61,11 @@ module OpenAI # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } - def anchor - end - - sig { params(_: Symbol).returns(Symbol) } - def anchor=(_) - end + attr_accessor :anchor # The number of days after the anchor time that the vector store will expire. sig { returns(Integer) } - def days - end - - sig { params(_: Integer).returns(Integer) } - def days=(_) - end + attr_accessor :days # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi index c1334d3d..764c2bb2 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -8,12 +8,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 1a242819..c8da7a14 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -11,12 +11,7 @@ module OpenAI # the vector store should use. Useful for tools like `file_search` that can access # files. sig { returns(T::Array[String]) } - def file_ids - end - - sig { params(_: T::Array[String]).returns(T::Array[String]) } - def file_ids=(_) - end + attr_accessor :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -24,15 +19,7 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } - def attributes - end - - sig do - params(_: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - .returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - def attributes=(_) - end + attr_accessor :attributes # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. @@ -46,27 +33,19 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) - .returns( - T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 367b7a22..65b17181 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -8,71 +8,50 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol)) } - def filter - end + attr_reader :filter - sig do - params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol) - end - def filter=(_) - end + sig { params(filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol).void } + attr_writer :filter # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi index 0935490b..c637f31e 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -8,12 +8,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/lib/openai/models/vector_stores/file_content_params.rbi index d87f0929..6d9ffc4f 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_params.rbi @@ -8,12 +8,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_content_response.rbi b/rbi/lib/openai/models/vector_stores/file_content_response.rbi index 7986b030..1fad3ed9 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_response.rbi @@ -6,21 +6,17 @@ module OpenAI class FileContentResponse < OpenAI::BaseModel # The text content sig { returns(T.nilable(String)) } - def text - end + attr_reader :text - sig { params(_: String).returns(String) } - def text=(_) - end + sig { params(text: String).void } + attr_writer :text # The content type (currently only `"text"`) sig { returns(T.nilable(String)) } - def type - end + attr_reader :type - sig { params(_: String).returns(String) } - def type=(_) - end + sig { params(type: String).void } + attr_writer :type sig { params(text: String, type: String).returns(T.attached_class) } def self.new(text: nil, type: nil) diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index e84068ca..e0b2f3f4 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -11,12 +11,7 @@ module OpenAI # vector store should use. Useful for tools like `file_search` that can access # files. sig { returns(String) } - def file_id - end - - sig { params(_: String).returns(String) } - def file_id=(_) - end + attr_accessor :file_id # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -24,15 +19,7 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } - def attributes - end - - sig do - params(_: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - .returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - def attributes=(_) - end + attr_accessor :attributes # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. @@ -46,27 +33,19 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) - .returns( - T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi index 8277a73f..4c8aa466 100644 --- a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi @@ -8,12 +8,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index f123f36a..edce777d 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -12,59 +12,43 @@ module OpenAI # ending with obj_foo, your subsequent call can include after=obj_foo in order to # fetch the next page of the list. sig { returns(T.nilable(String)) } - def after - end + attr_reader :after - sig { params(_: String).returns(String) } - def after=(_) - end + sig { params(after: String).void } + attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order # to fetch the previous page of the list. sig { returns(T.nilable(String)) } - def before - end + attr_reader :before - sig { params(_: String).returns(String) } - def before=(_) - end + sig { params(before: String).void } + attr_writer :before # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol)) } - def filter - end + attr_reader :filter - sig do - params(_: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol) - end - def filter=(_) - end + sig { params(filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol).void } + attr_writer :filter # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } - def limit - end + attr_reader :limit - sig { params(_: Integer).returns(Integer) } - def limit=(_) - end + sig { params(limit: Integer).void } + attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol)) } - def order - end + attr_reader :order - sig do - params(_: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) - .returns(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol) - end - def order=(_) - end + sig { params(order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol).void } + attr_writer :order sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi index a69049b2..a7d5c319 100644 --- a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi @@ -8,12 +8,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id sig do params( diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index b74c3292..3c552da3 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -8,12 +8,7 @@ module OpenAI include OpenAI::RequestParameters sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -21,15 +16,7 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } - def attributes - end - - sig do - params(_: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - .returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - def attributes=(_) - end + attr_accessor :attributes sig do params( diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 7d337918..3e32084e 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -6,81 +6,46 @@ module OpenAI class VectorStoreFile < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the vector store file was created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at # The last error associated with this vector store file. Will be `null` if there # are no errors. sig { returns(T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError)) } - def last_error - end + attr_reader :last_error sig do params( - _: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)) + last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)) ) - .returns(T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash))) - end - def last_error=(_) + .void end + attr_writer :last_error # The object type, which is always `vector_store.file`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - .returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The total vector store usage in bytes. Note that this may be different from the # original file size. sig { returns(Integer) } - def usage_bytes - end - - sig { params(_: Integer).returns(Integer) } - def usage_bytes=(_) - end + attr_accessor :usage_bytes # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # that the [File](https://platform.openai.com/docs/api-reference/files) is # attached to. sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -88,15 +53,7 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } - def attributes - end - - sig do - params(_: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - .returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - def attributes=(_) - end + attr_accessor :attributes # The strategy used to chunk the file. sig do @@ -106,27 +63,19 @@ module OpenAI ) ) end - def chunking_strategy - end + attr_reader :chunking_strategy sig do params( - _: T.any( + chunking_strategy: T.any( OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Util::AnyHash, OpenAI::Models::OtherFileChunkingStrategyObject ) ) - .returns( - T.any( - OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Util::AnyHash, - OpenAI::Models::OtherFileChunkingStrategyObject - ) - ) - end - def chunking_strategy=(_) + .void end + attr_writer :chunking_strategy # A list of files attached to a vector store. sig do @@ -182,24 +131,11 @@ module OpenAI class LastError < OpenAI::BaseModel # One of `server_error` or `rate_limit_exceeded`. sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } - def code - end - - sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - .returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - end - def code=(_) - end + attr_accessor :code # A human-readable description of the error. sig { returns(String) } - def message - end - - sig { params(_: String).returns(String) } - def message=(_) - end + attr_accessor :message # The last error associated with this vector store file. Will be `null` if there # are no errors. diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index e2b512ef..dd1ee3e5 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -6,67 +6,39 @@ module OpenAI class VectorStoreFileBatch < OpenAI::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id # The Unix timestamp (in seconds) for when the vector store files batch was # created. sig { returns(Integer) } - def created_at - end - - sig { params(_: Integer).returns(Integer) } - def created_at=(_) - end + attr_accessor :created_at sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts) } - def file_counts - end + attr_reader :file_counts sig do - params(_: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash)) - end - def file_counts=(_) + params( + file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash) + ) + .void end + attr_writer :file_counts # The object type, which is always `vector_store.file_batch`. sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } - def status - end - - sig do - params(_: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - end - def status=(_) - end + attr_accessor :status # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # that the [File](https://platform.openai.com/docs/api-reference/files) is # attached to. sig { returns(String) } - def vector_store_id - end - - sig { params(_: String).returns(String) } - def vector_store_id=(_) - end + attr_accessor :vector_store_id # A batch of files attached to a vector store. sig do @@ -102,48 +74,23 @@ module OpenAI class FileCounts < OpenAI::BaseModel # The number of files that where cancelled. sig { returns(Integer) } - def cancelled - end - - sig { params(_: Integer).returns(Integer) } - def cancelled=(_) - end + attr_accessor :cancelled # The number of files that have been processed. sig { returns(Integer) } - def completed - end - - sig { params(_: Integer).returns(Integer) } - def completed=(_) - end + attr_accessor :completed # The number of files that have failed to process. sig { returns(Integer) } - def failed - end - - sig { params(_: Integer).returns(Integer) } - def failed=(_) - end + attr_accessor :failed # The number of files that are currently being processed. sig { returns(Integer) } - def in_progress - end - - sig { params(_: Integer).returns(Integer) } - def in_progress=(_) - end + attr_accessor :in_progress # The total number of files. sig { returns(Integer) } - def total - end - - sig { params(_: Integer).returns(Integer) } - def total=(_) - end + attr_accessor :total sig do params( diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi index d7a5a707..6c4d25fa 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -5,28 +5,13 @@ module OpenAI module VectorStores class VectorStoreFileDeleted < OpenAI::BaseModel sig { returns(String) } - def id - end - - sig { params(_: String).returns(String) } - def id=(_) - end + attr_accessor :id sig { returns(T::Boolean) } - def deleted - end - - sig { params(_: T::Boolean).returns(T::Boolean) } - def deleted=(_) - end + attr_accessor :deleted sig { returns(Symbol) } - def object - end - - sig { params(_: Symbol).returns(Symbol) } - def object=(_) - end + attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } def self.new(id:, deleted:, object: :"vector_store.file.deleted") diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi index 41e78793..5ef61d62 100644 --- a/rbi/lib/openai/page.rbi +++ b/rbi/lib/openai/page.rbi @@ -7,20 +7,10 @@ module OpenAI Elem = type_member sig { returns(T.nilable(T::Array[Elem])) } - def data - end - - sig { params(_: T.nilable(T::Array[Elem])).returns(T.nilable(T::Array[Elem])) } - def data=(_) - end + attr_accessor :data sig { returns(String) } - def object - end - - sig { params(_: String).returns(String) } - def object=(_) - end + attr_accessor :object sig { returns(String) } def inspect diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 4f198f23..51fb5512 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -5,15 +5,7 @@ module OpenAI module RequestParameters # Options to specify HTTP behaviour for this request. sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) } - def request_options - end - - sig do - params(_: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) - .returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) - end - def request_options=(_) - end + attr_accessor :request_options # @api private module Converter @@ -38,66 +30,30 @@ module OpenAI # Idempotency key to send with request and all associated retries. Will only be # sent for write requests. sig { returns(T.nilable(String)) } - def idempotency_key - end - - sig { params(_: T.nilable(String)).returns(T.nilable(String)) } - def idempotency_key=(_) - end + attr_accessor :idempotency_key # Extra query params to send with the request. These are `.merge`’d into any # `query` given at the client level. sig { returns(T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) } - def extra_query - end - - sig do - params(_: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) - .returns(T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) - end - def extra_query=(_) - end + attr_accessor :extra_query # Extra headers to send with the request. These are `.merged`’d into any # `extra_headers` given at the client level. sig { returns(T.nilable(T::Hash[String, T.nilable(String)])) } - def extra_headers - end - - sig do - params(_: T.nilable(T::Hash[String, T.nilable(String)])) - .returns(T.nilable(T::Hash[String, T.nilable(String)])) - end - def extra_headers=(_) - end + attr_accessor :extra_headers # Extra data to send with the request. These are deep merged into any data # generated as part of the normal request. sig { returns(T.nilable(T.anything)) } - def extra_body - end - - sig { params(_: T.nilable(T.anything)).returns(T.nilable(T.anything)) } - def extra_body=(_) - end + attr_accessor :extra_body # Maximum number of retries to attempt after a failed initial request. sig { returns(T.nilable(Integer)) } - def max_retries - end - - sig { params(_: T.nilable(Integer)).returns(T.nilable(Integer)) } - def max_retries=(_) - end + attr_accessor :max_retries # Request timeout in seconds. sig { returns(T.nilable(Float)) } - def timeout - end - - sig { params(_: T.nilable(Float)).returns(T.nilable(Float)) } - def timeout=(_) - end + attr_accessor :timeout # Returns a new instance of RequestOptions. sig { params(values: OpenAI::Util::AnyHash).returns(T.attached_class) } diff --git a/rbi/lib/openai/resources/audio.rbi b/rbi/lib/openai/resources/audio.rbi index c3381f2e..353012c6 100644 --- a/rbi/lib/openai/resources/audio.rbi +++ b/rbi/lib/openai/resources/audio.rbi @@ -4,16 +4,13 @@ module OpenAI module Resources class Audio sig { returns(OpenAI::Resources::Audio::Transcriptions) } - def transcriptions - end + attr_reader :transcriptions sig { returns(OpenAI::Resources::Audio::Translations) } - def translations - end + attr_reader :translations sig { returns(OpenAI::Resources::Audio::Speech) } - def speech - end + attr_reader :speech sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/lib/openai/resources/beta.rbi b/rbi/lib/openai/resources/beta.rbi index 3e97021d..eebedd02 100644 --- a/rbi/lib/openai/resources/beta.rbi +++ b/rbi/lib/openai/resources/beta.rbi @@ -4,12 +4,10 @@ module OpenAI module Resources class Beta sig { returns(OpenAI::Resources::Beta::Assistants) } - def assistants - end + attr_reader :assistants sig { returns(OpenAI::Resources::Beta::Threads) } - def threads - end + attr_reader :threads sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 9e668eee..2c2ac7ef 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -5,12 +5,10 @@ module OpenAI class Beta class Threads sig { returns(OpenAI::Resources::Beta::Threads::Runs) } - def runs - end + attr_reader :runs sig { returns(OpenAI::Resources::Beta::Threads::Messages) } - def messages - end + attr_reader :messages # Create a thread. sig do diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index d797090c..71cf02b1 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -6,8 +6,7 @@ module OpenAI class Threads class Runs sig { returns(OpenAI::Resources::Beta::Threads::Runs::Steps) } - def steps - end + attr_reader :steps # Create a run. sig do diff --git a/rbi/lib/openai/resources/chat.rbi b/rbi/lib/openai/resources/chat.rbi index 4d090d3f..98440959 100644 --- a/rbi/lib/openai/resources/chat.rbi +++ b/rbi/lib/openai/resources/chat.rbi @@ -4,8 +4,7 @@ module OpenAI module Resources class Chat sig { returns(OpenAI::Resources::Chat::Completions) } - def completions - end + attr_reader :completions sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 91d89d3d..58bc7161 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -5,8 +5,7 @@ module OpenAI class Chat class Completions sig { returns(OpenAI::Resources::Chat::Completions::Messages) } - def messages - end + attr_reader :messages # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take diff --git a/rbi/lib/openai/resources/fine_tuning.rbi b/rbi/lib/openai/resources/fine_tuning.rbi index 0699b0ee..f2743caa 100644 --- a/rbi/lib/openai/resources/fine_tuning.rbi +++ b/rbi/lib/openai/resources/fine_tuning.rbi @@ -4,8 +4,7 @@ module OpenAI module Resources class FineTuning sig { returns(OpenAI::Resources::FineTuning::Jobs) } - def jobs - end + attr_reader :jobs sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index df149226..7d8544da 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -5,8 +5,7 @@ module OpenAI class FineTuning class Jobs sig { returns(OpenAI::Resources::FineTuning::Jobs::Checkpoints) } - def checkpoints - end + attr_reader :checkpoints # Creates a fine-tuning job which begins the process of creating a new model from # a given dataset. diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 61ac293c..6ddca1da 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -4,8 +4,7 @@ module OpenAI module Resources class Responses sig { returns(OpenAI::Resources::Responses::InputItems) } - def input_items - end + attr_reader :input_items # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 863418f5..55b2b24e 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -4,8 +4,7 @@ module OpenAI module Resources class Uploads sig { returns(OpenAI::Resources::Uploads::Parts) } - def parts - end + attr_reader :parts # Creates an intermediate # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 6f4b48c7..0c24ebc9 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -4,12 +4,10 @@ module OpenAI module Resources class VectorStores sig { returns(OpenAI::Resources::VectorStores::Files) } - def files - end + attr_reader :files sig { returns(OpenAI::Resources::VectorStores::FileBatches) } - def file_batches - end + attr_reader :file_batches # Create a vector store. sig do From cacbdd2ffe3036985951c11593234bfce0250f7c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 12:32:29 +0000 Subject: [PATCH 054/295] fix: pages should be able to accept non-converter models (#60) --- lib/openai/cursor_page.rb | 2 +- lib/openai/page.rb | 2 +- test/openai/resources/batches_test.rb | 2 ++ test/openai/resources/beta/assistants_test.rb | 2 ++ test/openai/resources/beta/threads/messages_test.rb | 2 ++ test/openai/resources/beta/threads/runs/steps_test.rb | 2 ++ test/openai/resources/beta/threads/runs_test.rb | 2 ++ test/openai/resources/chat/completions/messages_test.rb | 2 ++ test/openai/resources/chat/completions_test.rb | 2 ++ test/openai/resources/files_test.rb | 2 ++ test/openai/resources/fine_tuning/jobs/checkpoints_test.rb | 2 ++ test/openai/resources/fine_tuning/jobs_test.rb | 4 ++++ test/openai/resources/models_test.rb | 2 ++ test/openai/resources/responses/input_items_test.rb | 2 ++ test/openai/resources/vector_stores/file_batches_test.rb | 2 ++ test/openai/resources/vector_stores/files_test.rb | 4 ++++ test/openai/resources/vector_stores_test.rb | 4 ++++ 17 files changed, 38 insertions(+), 2 deletions(-) diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index 3ba7ab46..fa0e9fc7 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -43,7 +43,7 @@ def initialize(client:, req:, headers:, page_data:) case page_data in {data: Array | nil => data} - @data = data&.map { model.coerce(_1) } + @data = data&.map { OpenAI::Converter.coerce(model, _1) } else end diff --git a/lib/openai/page.rb b/lib/openai/page.rb index 6dd82f08..3e00417d 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -43,7 +43,7 @@ def initialize(client:, req:, headers:, page_data:) case page_data in {data: Array | nil => data} - @data = data&.map { model.coerce(_1) } + @data = data&.map { OpenAI::Converter.coerce(model, _1) } else end diff --git a/test/openai/resources/batches_test.rb b/test/openai/resources/batches_test.rb index 08893c6c..59e25d4f 100644 --- a/test/openai/resources/batches_test.rb +++ b/test/openai/resources/batches_test.rb @@ -82,6 +82,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Batch end diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index e0ffb63a..f6363319 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -89,6 +89,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Beta::Assistant end diff --git a/test/openai/resources/beta/threads/messages_test.rb b/test/openai/resources/beta/threads/messages_test.rb index 42f89d03..bd430759 100644 --- a/test/openai/resources/beta/threads/messages_test.rb +++ b/test/openai/resources/beta/threads/messages_test.rb @@ -92,6 +92,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Beta::Threads::Message end diff --git a/test/openai/resources/beta/threads/runs/steps_test.rb b/test/openai/resources/beta/threads/runs/steps_test.rb index e18e9336..97fa3d2b 100644 --- a/test/openai/resources/beta/threads/runs/steps_test.rb +++ b/test/openai/resources/beta/threads/runs/steps_test.rb @@ -40,6 +40,8 @@ def test_list_required_params end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Beta::Threads::Runs::RunStep end diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 9341955e..919cb65f 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -131,6 +131,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Beta::Threads::Run end diff --git a/test/openai/resources/chat/completions/messages_test.rb b/test/openai/resources/chat/completions/messages_test.rb index 72c10f96..e9fac539 100644 --- a/test/openai/resources/chat/completions/messages_test.rb +++ b/test/openai/resources/chat/completions/messages_test.rb @@ -11,6 +11,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Chat::ChatCompletionStoreMessage end diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index ffbd392e..f4d9b7f7 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -75,6 +75,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Chat::ChatCompletion end diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index c517f42f..6384982e 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -55,6 +55,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::FileObject end diff --git a/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb b/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb index 96ee1164..feb9c651 100644 --- a/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb +++ b/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb @@ -11,6 +11,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint end diff --git a/test/openai/resources/fine_tuning/jobs_test.rb b/test/openai/resources/fine_tuning/jobs_test.rb index 2b35089a..7df07711 100644 --- a/test/openai/resources/fine_tuning/jobs_test.rb +++ b/test/openai/resources/fine_tuning/jobs_test.rb @@ -75,6 +75,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::FineTuning::FineTuningJob end @@ -144,6 +146,8 @@ def test_list_events end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::FineTuning::FineTuningJobEvent end diff --git a/test/openai/resources/models_test.rb b/test/openai/resources/models_test.rb index d91063a2..33384138 100644 --- a/test/openai/resources/models_test.rb +++ b/test/openai/resources/models_test.rb @@ -28,6 +28,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Model end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 9ae8f8e4..1250f3bd 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -11,6 +11,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::Responses::ResponseItem end diff --git a/test/openai/resources/vector_stores/file_batches_test.rb b/test/openai/resources/vector_stores/file_batches_test.rb index d9d66e4d..f4bcecf5 100644 --- a/test/openai/resources/vector_stores/file_batches_test.rb +++ b/test/openai/resources/vector_stores/file_batches_test.rb @@ -68,6 +68,8 @@ def test_list_files_required_params end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::VectorStores::VectorStoreFile end diff --git a/test/openai/resources/vector_stores/files_test.rb b/test/openai/resources/vector_stores/files_test.rb index 6e38e8a7..2b789caa 100644 --- a/test/openai/resources/vector_stores/files_test.rb +++ b/test/openai/resources/vector_stores/files_test.rb @@ -82,6 +82,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::VectorStores::VectorStoreFile end @@ -125,6 +127,8 @@ def test_content_required_params end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::VectorStores::FileContentResponse end diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index 2296e034..c7ae05cb 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -83,6 +83,8 @@ def test_list end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::VectorStore end @@ -128,6 +130,8 @@ def test_search_required_params end row = response.to_enum.first + return if row.nil? + assert_pattern do row => OpenAI::Models::VectorStoreSearchResponse end From 1999b84a55b50d6a2699bacf5c2b116eae2a7202 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 20:14:16 +0000 Subject: [PATCH 055/295] chore: reduce verbosity in type declarations (#61) --- lib/openai.rb | 2 +- lib/openai/models/all_models.rb | 6 +- .../models/audio/speech_create_params.rb | 22 +-- lib/openai/models/audio/speech_model.rb | 8 +- .../audio/transcription_create_params.rb | 14 +- .../audio/transcription_create_response.rb | 6 +- .../models/audio/transcription_include.rb | 8 +- .../audio/transcription_stream_event.rb | 6 +- .../models/audio/translation_create_params.rb | 14 +- .../audio/translation_create_response.rb | 6 +- lib/openai/models/audio_model.rb | 8 +- lib/openai/models/audio_response_format.rb | 8 +- lib/openai/models/batch.rb | 8 +- lib/openai/models/batch_create_params.rb | 16 +- .../models/beta/assistant_create_params.rb | 12 +- .../models/beta/assistant_list_params.rb | 8 +- .../beta/assistant_response_format_option.rb | 6 +- .../models/beta/assistant_stream_event.rb | 6 +- lib/openai/models/beta/assistant_tool.rb | 6 +- .../models/beta/assistant_tool_choice.rb | 8 +- .../beta/assistant_tool_choice_option.rb | 14 +- .../models/beta/assistant_update_params.rb | 6 +- lib/openai/models/beta/file_search_tool.rb | 8 +- .../models/beta/message_stream_event.rb | 6 +- .../models/beta/run_step_stream_event.rb | 6 +- lib/openai/models/beta/run_stream_event.rb | 6 +- .../beta/thread_create_and_run_params.rb | 46 ++--- .../models/beta/thread_create_params.rb | 26 +-- lib/openai/models/beta/threads/annotation.rb | 6 +- .../models/beta/threads/annotation_delta.rb | 6 +- lib/openai/models/beta/threads/image_file.rb | 8 +- .../models/beta/threads/image_file_delta.rb | 8 +- lib/openai/models/beta/threads/image_url.rb | 8 +- .../models/beta/threads/image_url_delta.rb | 8 +- lib/openai/models/beta/threads/message.rb | 30 ++-- .../models/beta/threads/message_content.rb | 6 +- .../beta/threads/message_content_delta.rb | 6 +- .../threads/message_content_part_param.rb | 6 +- .../beta/threads/message_create_params.rb | 20 +-- .../models/beta/threads/message_delta.rb | 8 +- .../beta/threads/message_list_params.rb | 8 +- lib/openai/models/beta/threads/run.rb | 24 +-- .../models/beta/threads/run_create_params.rb | 34 ++-- .../models/beta/threads/run_list_params.rb | 8 +- lib/openai/models/beta/threads/run_status.rb | 8 +- .../runs/code_interpreter_tool_call.rb | 6 +- .../runs/code_interpreter_tool_call_delta.rb | 6 +- .../threads/runs/file_search_tool_call.rb | 16 +- .../models/beta/threads/runs/run_step.rb | 30 ++-- .../beta/threads/runs/run_step_delta.rb | 6 +- .../beta/threads/runs/run_step_include.rb | 8 +- .../beta/threads/runs/step_list_params.rb | 8 +- .../models/beta/threads/runs/tool_call.rb | 6 +- .../beta/threads/runs/tool_call_delta.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 16 +- ...chat_completion_assistant_message_param.rb | 12 +- .../chat/chat_completion_audio_param.rb | 16 +- .../models/chat/chat_completion_chunk.rb | 32 ++-- .../chat/chat_completion_content_part.rb | 6 +- .../chat_completion_content_part_image.rb | 8 +- ...hat_completion_content_part_input_audio.rb | 8 +- ...chat_completion_developer_message_param.rb | 6 +- .../chat/chat_completion_message_param.rb | 6 +- .../models/chat/chat_completion_modality.rb | 8 +- .../chat_completion_prediction_content.rb | 6 +- .../models/chat/chat_completion_role.rb | 8 +- .../chat_completion_system_message_param.rb | 6 +- .../chat_completion_tool_choice_option.rb | 14 +- .../chat_completion_tool_message_param.rb | 6 +- .../chat_completion_user_message_param.rb | 6 +- .../models/chat/completion_create_params.rb | 56 +++--- .../models/chat/completion_list_params.rb | 8 +- .../chat/completions/message_list_params.rb | 8 +- lib/openai/models/chat_model.rb | 8 +- lib/openai/models/comparison_filter.rb | 14 +- lib/openai/models/completion_choice.rb | 8 +- lib/openai/models/completion_create_params.rb | 18 +- lib/openai/models/compound_filter.rb | 14 +- lib/openai/models/embedding_create_params.rb | 20 +-- lib/openai/models/embedding_model.rb | 8 +- lib/openai/models/file_chunking_strategy.rb | 6 +- .../models/file_chunking_strategy_param.rb | 6 +- lib/openai/models/file_list_params.rb | 8 +- lib/openai/models/file_object.rb | 16 +- lib/openai/models/file_purpose.rb | 8 +- .../models/fine_tuning/fine_tuning_job.rb | 76 +++------ .../fine_tuning/fine_tuning_job_event.rb | 16 +- .../models/fine_tuning/job_create_params.rb | 74 +++----- .../models/image_create_variation_params.rb | 22 +-- lib/openai/models/image_edit_params.rb | 22 +-- lib/openai/models/image_generate_params.rb | 38 ++--- lib/openai/models/image_model.rb | 8 +- lib/openai/models/moderation.rb | 104 +++++------ lib/openai/models/moderation_create_params.rb | 12 +- lib/openai/models/moderation_model.rb | 8 +- .../models/moderation_multi_modal_input.rb | 6 +- lib/openai/models/reasoning.rb | 8 +- lib/openai/models/reasoning_effort.rb | 8 +- lib/openai/models/responses/computer_tool.rb | 8 +- .../models/responses/easy_input_message.rb | 22 +-- .../models/responses/file_search_tool.rb | 14 +- .../responses/input_item_list_params.rb | 8 +- lib/openai/models/responses/response.rb | 22 +-- .../response_code_interpreter_tool_call.rb | 14 +- .../responses/response_computer_tool_call.rb | 30 ++-- ...response_computer_tool_call_output_item.rb | 8 +- .../models/responses/response_content.rb | 6 +- .../response_content_part_added_event.rb | 6 +- .../response_content_part_done_event.rb | 6 +- .../responses/response_create_params.rb | 20 +-- lib/openai/models/responses/response_error.rb | 8 +- .../response_file_search_tool_call.rb | 14 +- .../responses/response_format_text_config.rb | 6 +- .../responses/response_function_tool_call.rb | 8 +- ...response_function_tool_call_output_item.rb | 8 +- .../responses/response_function_web_search.rb | 8 +- .../models/responses/response_includable.rb | 8 +- .../models/responses/response_input_audio.rb | 8 +- .../responses/response_input_content.rb | 6 +- .../models/responses/response_input_image.rb | 8 +- .../models/responses/response_input_item.rb | 46 ++--- .../responses/response_input_message_item.rb | 24 +-- lib/openai/models/responses/response_item.rb | 6 +- .../models/responses/response_output_item.rb | 6 +- .../responses/response_output_message.rb | 14 +- .../models/responses/response_output_text.rb | 6 +- .../responses/response_reasoning_item.rb | 8 +- .../models/responses/response_status.rb | 8 +- .../models/responses/response_stream_event.rb | 6 +- .../response_text_annotation_delta_event.rb | 6 +- lib/openai/models/responses/tool.rb | 6 +- .../models/responses/tool_choice_options.rb | 8 +- .../models/responses/tool_choice_types.rb | 8 +- .../models/responses/web_search_tool.rb | 16 +- lib/openai/models/responses_model.rb | 6 +- lib/openai/models/upload.rb | 8 +- lib/openai/models/vector_store.rb | 8 +- lib/openai/models/vector_store_list_params.rb | 8 +- .../models/vector_store_search_params.rb | 20 +-- .../models/vector_store_search_response.rb | 14 +- .../vector_stores/file_batch_create_params.rb | 6 +- .../file_batch_list_files_params.rb | 16 +- .../vector_stores/file_create_params.rb | 6 +- .../models/vector_stores/file_list_params.rb | 16 +- .../vector_stores/file_update_params.rb | 6 +- .../models/vector_stores/vector_store_file.rb | 22 +-- .../vector_stores/vector_store_file_batch.rb | 8 +- rbi/lib/openai/models/all_models.rbi | 9 +- .../models/audio/speech_create_params.rbi | 20 +-- rbi/lib/openai/models/audio/speech_model.rbi | 6 +- .../audio/transcription_create_params.rbi | 20 +-- .../audio/transcription_create_response.rbi | 9 +- .../models/audio/transcription_include.rbi | 6 +- .../audio/transcription_stream_event.rbi | 26 +-- .../audio/translation_create_params.rbi | 14 +- .../audio/translation_create_response.rbi | 9 +- rbi/lib/openai/models/audio_model.rbi | 6 +- .../openai/models/audio_response_format.rbi | 6 +- rbi/lib/openai/models/batch.rbi | 6 +- rbi/lib/openai/models/batch_create_params.rbi | 12 +- .../models/beta/assistant_create_params.rbi | 34 +--- .../models/beta/assistant_list_params.rbi | 6 +- .../beta/assistant_response_format_option.rbi | 28 +-- .../models/beta/assistant_stream_event.rbi | 48 +----- rbi/lib/openai/models/beta/assistant_tool.rbi | 27 +-- .../models/beta/assistant_tool_choice.rbi | 6 +- .../beta/assistant_tool_choice_option.rbi | 32 +--- .../models/beta/assistant_update_params.rbi | 9 +- .../openai/models/beta/file_search_tool.rbi | 12 +- .../models/beta/message_stream_event.rbi | 29 +--- .../models/beta/run_step_stream_event.rbi | 31 +--- .../openai/models/beta/run_stream_event.rbi | 34 +--- .../beta/thread_create_and_run_params.rbi | 161 ++++++------------ .../models/beta/thread_create_params.rbi | 108 ++++-------- .../openai/models/beta/threads/annotation.rbi | 26 +-- .../models/beta/threads/annotation_delta.rbi | 26 +-- .../openai/models/beta/threads/image_file.rbi | 6 +- .../models/beta/threads/image_file_delta.rbi | 6 +- .../openai/models/beta/threads/image_url.rbi | 6 +- .../models/beta/threads/image_url_delta.rbi | 6 +- .../openai/models/beta/threads/message.rbi | 48 ++---- .../models/beta/threads/message_content.rbi | 28 +-- .../beta/threads/message_content_delta.rbi | 28 +-- .../threads/message_content_part_param.rbi | 27 +-- .../beta/threads/message_create_params.rbi | 82 +++------ .../models/beta/threads/message_delta.rbi | 6 +- .../beta/threads/message_list_params.rbi | 6 +- rbi/lib/openai/models/beta/threads/run.rbi | 18 +- .../models/beta/threads/run_create_params.rbi | 108 ++++-------- .../models/beta/threads/run_list_params.rbi | 6 +- .../openai/models/beta/threads/run_status.rbi | 6 +- .../runs/code_interpreter_tool_call.rbi | 26 +-- .../runs/code_interpreter_tool_call_delta.rbi | 26 +-- .../threads/runs/file_search_tool_call.rbi | 32 ++-- .../models/beta/threads/runs/run_step.rbi | 44 ++--- .../beta/threads/runs/run_step_delta.rbi | 26 +-- .../beta/threads/runs/run_step_include.rbi | 6 +- .../beta/threads/runs/step_list_params.rbi | 6 +- .../models/beta/threads/runs/tool_call.rbi | 27 +-- .../beta/threads/runs/tool_call_delta.rbi | 27 +-- .../openai/models/chat/chat_completion.rbi | 12 +- ...hat_completion_assistant_message_param.rbi | 67 +++----- .../chat/chat_completion_audio_param.rbi | 12 +- .../models/chat/chat_completion_chunk.rbi | 30 ++-- .../chat/chat_completion_content_part.rbi | 28 +-- .../chat_completion_content_part_image.rbi | 12 +- ...at_completion_content_part_input_audio.rbi | 16 +- ...hat_completion_developer_message_param.rbi | 9 +- .../chat/chat_completion_message_param.rbi | 30 +--- .../models/chat/chat_completion_modality.rbi | 6 +- .../chat_completion_prediction_content.rbi | 9 +- .../models/chat/chat_completion_role.rbi | 6 +- .../chat_completion_system_message_param.rbi | 9 +- .../chat_completion_tool_choice_option.rbi | 32 +--- .../chat_completion_tool_message_param.rbi | 9 +- .../chat_completion_user_message_param.rbi | 53 ++---- .../models/chat/completion_create_params.rbi | 107 ++++-------- .../models/chat/completion_list_params.rbi | 6 +- .../chat/completions/message_list_params.rbi | 6 +- rbi/lib/openai/models/chat_model.rbi | 6 +- rbi/lib/openai/models/comparison_filter.rbi | 14 +- rbi/lib/openai/models/completion_choice.rbi | 6 +- .../models/completion_create_params.rbi | 28 +-- rbi/lib/openai/models/compound_filter.rbi | 14 +- .../openai/models/embedding_create_params.rbi | 25 +-- rbi/lib/openai/models/embedding_model.rbi | 6 +- .../openai/models/file_chunking_strategy.rbi | 23 +-- .../models/file_chunking_strategy_param.rbi | 26 +-- rbi/lib/openai/models/file_list_params.rbi | 6 +- rbi/lib/openai/models/file_object.rbi | 12 +- rbi/lib/openai/models/file_purpose.rbi | 6 +- .../models/fine_tuning/fine_tuning_job.rbi | 92 +++------- .../fine_tuning/fine_tuning_job_event.rbi | 12 +- .../models/fine_tuning/job_create_params.rbi | 95 +++-------- .../models/image_create_variation_params.rbi | 20 +-- rbi/lib/openai/models/image_edit_params.rbi | 20 +-- .../openai/models/image_generate_params.rbi | 32 ++-- rbi/lib/openai/models/image_model.rbi | 6 +- rbi/lib/openai/models/moderation.rbi | 132 ++++++-------- .../models/moderation_create_params.rbi | 35 +--- rbi/lib/openai/models/moderation_model.rbi | 6 +- .../models/moderation_multi_modal_input.rbi | 9 +- rbi/lib/openai/models/reasoning.rbi | 6 +- rbi/lib/openai/models/reasoning_effort.rbi | 6 +- .../openai/models/responses/computer_tool.rbi | 6 +- .../models/responses/easy_input_message.rbi | 58 ++----- .../models/responses/file_search_tool.rbi | 19 +-- .../responses/input_item_list_params.rbi | 6 +- rbi/lib/openai/models/responses/response.rbi | 39 ++--- .../response_code_interpreter_tool_call.rbi | 38 ++--- .../responses/response_computer_tool_call.rbi | 61 ++----- ...esponse_computer_tool_call_output_item.rbi | 12 +- .../models/responses/response_content.rbi | 29 +--- .../response_content_part_added_event.rbi | 19 +-- .../response_content_part_done_event.rbi | 19 +-- .../responses/response_create_params.rbi | 103 ++++------- .../models/responses/response_error.rbi | 6 +- .../response_file_search_tool_call.rbi | 14 +- .../responses/response_format_text_config.rbi | 27 +-- .../responses/response_function_tool_call.rbi | 6 +- ...esponse_function_tool_call_output_item.rbi | 12 +- .../response_function_web_search.rbi | 6 +- .../models/responses/response_includable.rbi | 6 +- .../models/responses/response_input_audio.rbi | 6 +- .../responses/response_input_content.rbi | 27 +-- .../models/responses/response_input_image.rbi | 6 +- .../models/responses/response_input_item.rbi | 77 +++------ .../responses/response_input_message_item.rbi | 18 +- .../openai/models/responses/response_item.rbi | 32 +--- .../models/responses/response_output_item.rbi | 30 +--- .../responses/response_output_message.rbi | 25 +-- .../models/responses/response_output_text.rbi | 27 +-- .../responses/response_reasoning_item.rbi | 6 +- .../models/responses/response_status.rbi | 6 +- .../responses/response_stream_event.rbi | 56 +----- .../response_text_annotation_delta_event.rbi | 27 +-- rbi/lib/openai/models/responses/tool.rbi | 28 +-- .../models/responses/tool_choice_options.rbi | 6 +- .../models/responses/tool_choice_types.rbi | 6 +- .../models/responses/web_search_tool.rbi | 12 +- rbi/lib/openai/models/responses_model.rbi | 9 +- rbi/lib/openai/models/upload.rbi | 6 +- rbi/lib/openai/models/vector_store.rbi | 6 +- .../models/vector_store_list_params.rbi | 6 +- .../models/vector_store_search_params.rbi | 23 +-- .../models/vector_store_search_response.rbi | 14 +- .../file_batch_create_params.rbi | 8 +- .../file_batch_list_files_params.rbi | 12 +- .../vector_stores/file_create_params.rbi | 8 +- .../models/vector_stores/file_list_params.rbi | 12 +- .../vector_stores/file_update_params.rbi | 8 +- .../vector_stores/vector_store_file.rbi | 20 +-- .../vector_stores/vector_store_file_batch.rbi | 6 +- 293 files changed, 1704 insertions(+), 3710 deletions(-) diff --git a/lib/openai.rb b/lib/openai.rb index 64c67b2f..ac44c4d7 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -2,7 +2,7 @@ # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. -if defined?(:Tapioca) && caller_locations.any? { _1.path.end_with?("tapioca/cli.rb") } +if Object.const_defined?(:Tapioca) && caller_locations.any? { _1.path.end_with?("tapioca/cli.rb") } Warning.warn( <<~WARN \n diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 0727842f..135bffac 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -27,10 +27,8 @@ module AllModels variant const: OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW_2025_03_11 # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] + # def self.variants; end end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 3a0c5b33..1c0a806a 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -87,10 +87,8 @@ module Model variant enum: -> { OpenAI::Models::Audio::SpeechModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] + # def self.variants; end end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -112,11 +110,9 @@ module Voice finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, @@ -133,11 +129,9 @@ module ResponseFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index b2e5f284..fad7aa94 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -12,11 +12,9 @@ module SpeechModel finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index c41eb0d5..bbbd2ada 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -143,10 +143,8 @@ module Model variant enum: -> { OpenAI::Models::AudioModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] + # def self.variants; end end module TimestampGranularity @@ -157,11 +155,9 @@ module TimestampGranularity finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 07d30d90..fe9d6a25 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -15,10 +15,8 @@ module TranscriptionCreateResponse variant -> { OpenAI::Models::Audio::TranscriptionVerbose } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] + # def self.variants; end end end end diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index 41e2d8cf..7c76eb59 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -10,11 +10,9 @@ module TranscriptionInclude finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index be40014b..40437586 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -19,10 +19,8 @@ module TranscriptionStreamEvent variant :"transcript.text.done", -> { OpenAI::Models::Audio::TranscriptionTextDoneEvent } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] + # def self.variants; end end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 47ce78ce..e610b092 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -83,10 +83,8 @@ module Model variant enum: -> { OpenAI::Models::AudioModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] + # def self.variants; end end # The format of the output, in one of these options: `json`, `text`, `srt`, @@ -102,11 +100,9 @@ module ResponseFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 4d7cf38c..2511f059 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -11,10 +11,8 @@ module TranslationCreateResponse variant -> { OpenAI::Models::Audio::TranslationVerbose } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] + # def self.variants; end end end end diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index d84ec517..d0ffd0c5 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -11,11 +11,9 @@ module AudioModel finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 86745cf4..0b6e75b3 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -16,11 +16,9 @@ module AudioResponseFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 0c1883c2..96e290bb 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -239,11 +239,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Errors < OpenAI::BaseModel diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 85c2bb71..4c141b0e 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -68,11 +68,9 @@ module CompletionWindow finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The endpoint to be used for all requests in the batch. Currently @@ -89,11 +87,9 @@ module Endpoint finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 76336aef..6dcdbe59 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -171,10 +171,8 @@ module Model variant enum: -> { OpenAI::Models::ChatModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def self.variants; end end class ToolResources < OpenAI::BaseModel @@ -392,10 +390,8 @@ class Static < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index c7b7b460..e6b3a31d 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -77,11 +77,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index da77528c..48b15aaa 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -43,10 +43,8 @@ module AssistantResponseFormatOption variant -> { OpenAI::Models::ResponseFormatJSONSchema } # @!parse - # class << self - # # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 02fad932..734ad6c4 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -727,10 +727,8 @@ class ErrorEvent < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index ecd6b3db..2a2c655d 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -15,10 +15,8 @@ module AssistantTool variant :function, -> { OpenAI::Models::Beta::FunctionTool } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 332a8971..b7cb38c3 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -40,11 +40,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 0f60a7c0..807dc2eb 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -32,18 +32,14 @@ module Auto finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # @!parse - # class << self - # # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] - # def variants; end - # end + # # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 4a6636f6..0db4e4cd 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -267,10 +267,8 @@ module Model variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 # @!parse - # class << self - # # @return [Array(String, Symbol)] - # def variants; end - # end + # # @return [Array(String, Symbol)] + # def self.variants; end end class ToolResources < OpenAI::BaseModel diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 94ce6f78..98e95ab3 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -115,11 +115,9 @@ module Ranker finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 5ea73994..6f0bdf04 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -160,10 +160,8 @@ class ThreadMessageIncomplete < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 45e973df..a3dc27d6 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -212,10 +212,8 @@ class ThreadRunStepExpired < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index f506c9e8..058cde20 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -292,10 +292,8 @@ class ThreadRunExpired < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 58aea234..afa35bb4 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -223,10 +223,8 @@ module Model variant enum: -> { OpenAI::Models::ChatModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def self.variants; end end class Thread < OpenAI::BaseModel @@ -337,10 +335,8 @@ module Content variant OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # The role of the entity that is creating the message. Allowed values include: @@ -357,11 +353,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Attachment < OpenAI::BaseModel @@ -420,10 +414,8 @@ class FileSearch < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] + # def self.variants; end end end end @@ -644,10 +636,8 @@ class Static < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # def self.variants; end end end end @@ -741,10 +731,8 @@ module Tool variant -> { OpenAI::Models::Beta::FunctionTool } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] + # def self.variants; end end class TruncationStrategy < OpenAI::BaseModel @@ -787,11 +775,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index abadddfc..24ff443e 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -110,10 +110,8 @@ module Content variant OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # The role of the entity that is creating the message. Allowed values include: @@ -130,11 +128,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Attachment < OpenAI::BaseModel @@ -193,10 +189,8 @@ class FileSearch < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] + # def self.variants; end end end end @@ -416,10 +410,8 @@ class Static < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 99110d38..b7822e85 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -19,10 +19,8 @@ module Annotation variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathAnnotation } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 66fbd501..8bbb2ee1 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -19,10 +19,8 @@ module AnnotationDelta variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 873f7306..2a06f605 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -43,11 +43,9 @@ module Detail finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index a07a1a85..9f702f21 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -47,11 +47,9 @@ module Detail finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 36f95249..e6830beb 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -42,11 +42,9 @@ module Detail finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index f6c26510..d47277f4 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -46,11 +46,9 @@ module Detail finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 4af15e3b..b3a7faab 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -195,10 +195,8 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] + # def self.variants; end end end @@ -230,11 +228,9 @@ module Reason finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -247,11 +243,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -265,11 +259,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 40ea6954..4e4280f1 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -24,10 +24,8 @@ module MessageContent variant :refusal, -> { OpenAI::Models::Beta::Threads::RefusalContentBlock } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 634b517c..4cbf92ef 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -24,10 +24,8 @@ module MessageContentDelta variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDeltaBlock } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 4ae935f2..20ea5568 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -21,10 +21,8 @@ module MessageContentPartParam variant :text, -> { OpenAI::Models::Beta::Threads::TextContentBlockParam } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index faca57aa..f9082a79 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -70,10 +70,8 @@ module Content variant OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # The role of the entity that is creating the message. Allowed values include: @@ -90,11 +88,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Attachment < OpenAI::BaseModel @@ -153,10 +149,8 @@ class FileSearch < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 34586ce6..301bea9e 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -44,11 +44,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 8eacf749..eef2c21b 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -89,11 +89,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index f79c15af..05dd765d 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -315,11 +315,9 @@ module Reason finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -356,11 +354,9 @@ module Code finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -448,11 +444,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index c5d6dd63..a3791a25 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -299,10 +299,8 @@ module Content variant OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # The role of the entity that is creating the message. Allowed values include: @@ -319,11 +317,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Attachment < OpenAI::BaseModel @@ -382,10 +378,8 @@ class FileSearch < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] + # def self.variants; end end end end @@ -403,10 +397,8 @@ module Model variant enum: -> { OpenAI::Models::ChatModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def self.variants; end end class TruncationStrategy < OpenAI::BaseModel @@ -449,11 +441,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index a0286c58..24096507 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -78,11 +78,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index f3701e63..351e323f 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -22,11 +22,9 @@ module RunStatus finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index e21e1235..4cfc8daf 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -139,10 +139,8 @@ class Image < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 12321ae7..8b00e21b 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -98,10 +98,8 @@ module Output variant :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 00d5e5ca..282a9534 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -103,11 +103,9 @@ module Ranker finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -193,11 +191,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 15da4b34..acd1f507 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -192,11 +192,9 @@ module Code finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -213,11 +211,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The details of the run step. @@ -233,10 +229,8 @@ module StepDetails variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] + # def self.variants; end end # The type of run step, which can be either `message_creation` or `tool_calls`. @@ -248,11 +242,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Usage < OpenAI::BaseModel diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index d5fc45bb..84f2aa92 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -38,10 +38,8 @@ module StepDetails variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index 9aad4b0e..f2f05bbe 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -13,11 +13,9 @@ module RunStepInclude finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index c699a8f7..c162f2ba 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -102,11 +102,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index c6e9367f..12ba7d73 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -19,10 +19,8 @@ module ToolCall variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCall } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] + # def self.variants; end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index f58ae752..c8c796f3 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -19,10 +19,8 @@ module ToolCallDelta variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] + # def self.variants; end end end end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 5920c8d3..bc94f59d 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -150,11 +150,9 @@ module FinishReason finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Logprobs < OpenAI::BaseModel @@ -191,11 +189,9 @@ module ServiceTier finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index e9ee4909..5264bab1 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -133,17 +133,13 @@ module ArrayOfContentPart variant :refusal, -> { OpenAI::Models::Chat::ChatCompletionContentPartRefusal } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] + # def self.variants; end end # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # @deprecated diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index c03b45ad..7de20292 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -43,11 +43,9 @@ module Format finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -66,11 +64,9 @@ module Voice finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index ee479086..dde734e3 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -245,11 +245,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class ToolCall < OpenAI::BaseModel @@ -338,11 +336,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end @@ -364,11 +360,9 @@ module FinishReason finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Logprobs < OpenAI::BaseModel @@ -405,11 +399,9 @@ module ServiceTier finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 9b1d2144..72530466 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -89,10 +89,8 @@ class File < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] + # def self.variants; end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index b2a578df..3fd2baeb 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -62,11 +62,9 @@ module Detail finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 27ce4e32..269e74c7 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -57,11 +57,9 @@ module Format finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 44c3742f..1ec9bc97 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -54,10 +54,8 @@ module Content variant OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end end end diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index c8a3a17d..d55d9826 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -33,10 +33,8 @@ module ChatCompletionMessageParam variant :function, -> { OpenAI::Models::Chat::ChatCompletionFunctionMessageParam } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] + # def self.variants; end end end diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index 9d7033f1..f759271a 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -11,11 +11,9 @@ module ChatCompletionModality finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index ed62e69d..b71a21fb 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -47,10 +47,8 @@ module Content variant OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end end end diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index ad95f7d6..33673904 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -16,11 +16,9 @@ module ChatCompletionRole finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index ac99aca3..7544935c 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -54,10 +54,8 @@ module Content variant OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 349d84e1..7b6d3ff8 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -33,18 +33,14 @@ module Auto finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # @!parse - # class << self - # # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] - # def variants; end - # end + # # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] + # def self.variants; end end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index fc5f1903..0eb97603 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -45,10 +45,8 @@ module Content variant OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end end end diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 89d87d20..fd78d809 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -53,10 +53,8 @@ module Content variant OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index b22818b3..6953d7a6 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -434,10 +434,8 @@ module Model variant enum: -> { OpenAI::Models::ChatModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # def self.variants; end end # @deprecated @@ -476,18 +474,14 @@ module FunctionCallMode finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # @!parse - # class << self - # # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] - # def variants; end - # end + # # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] + # def self.variants; end end # @deprecated @@ -544,11 +538,9 @@ module Modality finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # An object specifying the format that the model must output. @@ -578,10 +570,8 @@ module ResponseFormat variant -> { OpenAI::Models::ResponseFormatJSONObject } # @!parse - # class << self - # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] + # def self.variants; end end # Specifies the latency tier to use for processing the request. This parameter is @@ -606,11 +596,9 @@ module ServiceTier finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -625,10 +613,8 @@ module Stop variant OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end class WebSearchOptions < OpenAI::BaseModel @@ -675,11 +661,9 @@ module SearchContextSize finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class UserLocation < OpenAI::BaseModel diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 8c9be5ed..9c964a04 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -79,11 +79,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index fb9059c7..797d4ed2 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -60,11 +60,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 5c34a108..3da0931e 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -51,11 +51,9 @@ module ChatModel finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 3d5e8ba5..e79a3435 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -61,11 +61,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The value to compare against the attribute key; supports string, number, or @@ -80,10 +78,8 @@ module Value variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end end end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 26b78056..5e4a549b 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -50,11 +50,9 @@ module FinishReason finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Logprobs < OpenAI::BaseModel diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index cbdc341b..6e791a6b 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -253,10 +253,8 @@ module Model variant const: OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 # @!parse - # class << self - # # @return [Array(String, Symbol)] - # def variants; end - # end + # # @return [Array(String, Symbol)] + # def self.variants; end end # The prompt(s) to generate completions for, encoded as a string, array of @@ -283,10 +281,8 @@ module Prompt variant OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray # @!parse - # class << self - # # @return [Array(String, Array, Array, Array>)] - # def variants; end - # end + # # @return [Array(String, Array, Array, Array>)] + # def self.variants; end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -301,10 +297,8 @@ module Stop variant OpenAI::Models::CompletionCreateParams::Stop::StringArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end end end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index f27c7333..7fd45e2c 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -37,10 +37,8 @@ module Filter variant OpenAI::Unknown # @!parse - # class << self - # # @return [Array(OpenAI::Models::ComparisonFilter, Object)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::ComparisonFilter, Object)] + # def self.variants; end end # Type of operation: `and` or `or`. @@ -52,11 +50,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index d41fd3c1..03177d2a 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -106,10 +106,8 @@ module Input variant OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray # @!parse - # class << self - # # @return [Array(String, Array, Array, Array>)] - # def variants; end - # end + # # @return [Array(String, Array, Array, Array>)] + # def self.variants; end end # ID of the model to use. You can use the @@ -126,10 +124,8 @@ module Model variant enum: -> { OpenAI::Models::EmbeddingModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] + # def self.variants; end end # The format to return the embeddings in. Can be either `float` or @@ -142,11 +138,9 @@ module EncodingFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index a3ace03e..e2e7df6a 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -11,11 +11,9 @@ module EmbeddingModel finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 9e917e35..5cfb6edf 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -14,10 +14,8 @@ module FileChunkingStrategy variant :other, -> { OpenAI::Models::OtherFileChunkingStrategyObject } # @!parse - # class << self - # # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] + # def self.variants; end end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index d1943074..38682a15 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -16,10 +16,8 @@ module FileChunkingStrategyParam variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObjectParam } # @!parse - # class << self - # # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] + # def self.variants; end end end end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 814201ae..9aa3afb3 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -73,11 +73,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index 0ff33248..7b08ec97 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -115,11 +115,9 @@ module Purpose finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # @deprecated @@ -135,11 +133,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 5a1df197..73d23787 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -18,11 +18,9 @@ module FilePurpose finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 9a0b6281..743bd260 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -282,10 +282,8 @@ module BatchSize variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -298,10 +296,8 @@ module LearningRateMultiplier variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -314,10 +310,8 @@ module NEpochs variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end end @@ -335,11 +329,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Method < OpenAI::BaseModel @@ -475,10 +467,8 @@ module BatchSize variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -491,10 +481,8 @@ module Beta variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -507,10 +495,8 @@ module LearningRateMultiplier variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -523,10 +509,8 @@ module NEpochs variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end end end @@ -610,10 +594,8 @@ module BatchSize variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -626,10 +608,8 @@ module LearningRateMultiplier variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -642,10 +622,8 @@ module NEpochs variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end end end @@ -659,11 +637,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 11db3c9d..92a9a009 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -79,11 +79,9 @@ module Level finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The type of event. @@ -95,11 +93,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index c75b6b94..0f224244 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -167,10 +167,8 @@ module Model variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI # @!parse - # class << self - # # @return [Array(String, Symbol)] - # def variants; end - # end + # # @return [Array(String, Symbol)] + # def self.variants; end end # @deprecated @@ -232,10 +230,8 @@ module BatchSize variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -248,10 +244,8 @@ module LearningRateMultiplier variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -264,10 +258,8 @@ module NEpochs variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end end @@ -481,10 +473,8 @@ module BatchSize variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -497,10 +487,8 @@ module Beta variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -513,10 +501,8 @@ module LearningRateMultiplier variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -529,10 +515,8 @@ module NEpochs variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end end end @@ -616,10 +600,8 @@ module BatchSize variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -632,10 +614,8 @@ module LearningRateMultiplier variant Float # @!parse - # class << self - # # @return [Array(Symbol, :auto, Float)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Float)] + # def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -648,10 +628,8 @@ module NEpochs variant Integer # @!parse - # class << self - # # @return [Array(Symbol, :auto, Integer)] - # def variants; end - # end + # # @return [Array(Symbol, :auto, Integer)] + # def self.variants; end end end end @@ -665,11 +643,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 6b469d42..19e34407 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -81,10 +81,8 @@ module Model variant enum: -> { OpenAI::Models::ImageModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # def self.variants; end end # The format in which the generated images are returned. Must be one of `url` or @@ -98,11 +96,9 @@ module ResponseFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -116,11 +112,9 @@ module Size finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index a2017242..459ed49c 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -112,10 +112,8 @@ module Model variant enum: -> { OpenAI::Models::ImageModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # def self.variants; end end # The format in which the generated images are returned. Must be one of `url` or @@ -129,11 +127,9 @@ module ResponseFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -147,11 +143,9 @@ module Size finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 420abb8a..377d17b9 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -114,10 +114,8 @@ module Model variant enum: -> { OpenAI::Models::ImageModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # def self.variants; end end # The quality of the image that will be generated. `hd` creates images with finer @@ -131,11 +129,9 @@ module Quality finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The format in which the generated images are returned. Must be one of `url` or @@ -149,11 +145,9 @@ module ResponseFormat finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -170,11 +164,9 @@ module Size finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -189,11 +181,9 @@ module Style finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index 7ebd947c..5f3d315c 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -10,11 +10,9 @@ module ImageModel finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 922325b5..b1bb2ec1 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -316,11 +316,9 @@ module Harassment finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module HarassmentThreatening @@ -330,11 +328,9 @@ module HarassmentThreatening finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module Hate @@ -344,11 +340,9 @@ module Hate finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module HateThreatening @@ -358,11 +352,9 @@ module HateThreatening finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module Illicit @@ -372,11 +364,9 @@ module Illicit finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module IllicitViolent @@ -386,11 +376,9 @@ module IllicitViolent finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module SelfHarm @@ -401,11 +389,9 @@ module SelfHarm finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module SelfHarmInstruction @@ -416,11 +402,9 @@ module SelfHarmInstruction finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module SelfHarmIntent @@ -431,11 +415,9 @@ module SelfHarmIntent finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module Sexual @@ -446,11 +428,9 @@ module Sexual finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module SexualMinor @@ -460,11 +440,9 @@ module SexualMinor finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module Violence @@ -475,11 +453,9 @@ module Violence finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module ViolenceGraphic @@ -490,11 +466,9 @@ module ViolenceGraphic finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 89acded2..a3c068ef 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -55,10 +55,8 @@ module Input variant OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray # @!parse - # class << self - # # @return [Array(String, Array, Array)] - # def variants; end - # end + # # @return [Array(String, Array, Array)] + # def self.variants; end end # The content moderation model you would like to use. Learn more in @@ -76,10 +74,8 @@ module Model variant enum: -> { OpenAI::Models::ModerationModel } # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] + # def self.variants; end end end end diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index 58e33aeb..8d50271d 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -12,11 +12,9 @@ module ModerationModel finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index c6441173..732739a6 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -15,10 +15,8 @@ module ModerationMultiModalInput variant :text, -> { OpenAI::Models::ModerationTextInput } # @!parse - # class << self - # # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + # def self.variants; end end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 86ed1a42..64a9d430 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -50,11 +50,9 @@ module GenerateSummary finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index e59ef5cb..f123316a 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -17,11 +17,9 @@ module ReasoningEffort finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 86220eb2..6802a365 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -52,11 +52,9 @@ module Environment finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index cbc17798..3c79d015 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -56,10 +56,8 @@ module Content variant -> { OpenAI::Models::Responses::ResponseInputMessageContentList } # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -74,11 +72,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The type of the message input. Always `message`. @@ -89,11 +85,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index a557be49..6c9f5511 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -73,10 +73,8 @@ module Filters variant -> { OpenAI::Models::CompoundFilter } # @!parse - # class << self - # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] + # def self.variants; end end class RankingOptions < OpenAI::BaseModel @@ -121,11 +119,9 @@ module Ranker finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index c0cd1b8d..d55641a5 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -75,11 +75,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 60bff913..35ed1c06 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -298,11 +298,9 @@ module Reason finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -330,10 +328,8 @@ module ToolChoice variant -> { OpenAI::Models::Responses::ToolChoiceFunction } # @!parse - # class << self - # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] - # def variants; end - # end + # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # def self.variants; end end # The truncation strategy to use for the model response. @@ -351,11 +347,9 @@ module Truncation finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 8c1cc921..0d7ae42f 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -131,10 +131,8 @@ class File < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] + # def self.variants; end end # The status of the code interpreter tool call. @@ -147,11 +145,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 0123e679..6e5128d4 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -143,11 +143,9 @@ module Button finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -407,10 +405,8 @@ class Wait < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] + # def self.variants; end end class PendingSafetyCheck < OpenAI::BaseModel @@ -455,11 +451,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The type of the computer call. Always `computer_call`. @@ -470,11 +464,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index cc5bfbe9..856d3c99 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -105,11 +105,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index ad933f5d..c3e86848 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -23,10 +23,8 @@ module ResponseContent variant -> { OpenAI::Models::Responses::ResponseOutputRefusal } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index a1fb23a0..d1242f76 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -60,10 +60,8 @@ module Part variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 968c86e1..8481a853 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -60,10 +60,8 @@ module Part variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index d49d609c..7557e478 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -263,10 +263,8 @@ module Input variant -> { OpenAI::Models::Responses::ResponseInput } # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # How the model should select which tool (or tools) to use when generating a @@ -293,10 +291,8 @@ module ToolChoice variant -> { OpenAI::Models::Responses::ToolChoiceFunction } # @!parse - # class << self - # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] - # def variants; end - # end + # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # def self.variants; end end # The truncation strategy to use for the model response. @@ -314,11 +310,9 @@ module Truncation finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 2fcc855b..4b7b4755 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -51,11 +51,9 @@ module Code finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 9b5b331c..20201242 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -65,11 +65,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class Result < OpenAI::BaseModel @@ -146,10 +144,8 @@ module Attribute variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 81d6ac10..ee665d54 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -35,10 +35,8 @@ module ResponseFormatTextConfig variant :json_object, -> { OpenAI::Models::ResponseFormatJSONObject } # @!parse - # class << self - # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 99de4281..6d337c8f 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -76,11 +76,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index a3113d19..83d109ab 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -61,11 +61,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 05bb88d9..78f572ca 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -46,11 +46,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 2ed1572f..39763a87 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -20,11 +20,9 @@ module ResponseIncludable finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 0beb7f70..a039c35c 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -42,11 +42,9 @@ module Format finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index fd4e014d..41f9a7d7 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -19,10 +19,8 @@ module ResponseInputContent variant :input_file, -> { OpenAI::Models::Responses::ResponseInputFile } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 025c1255..2e770199 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -54,11 +54,9 @@ module Detail finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 5aade5e3..d2e09cef 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -116,11 +116,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -134,11 +132,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The type of the message input. Always set to `message`. @@ -149,11 +145,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -265,11 +259,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -338,11 +330,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -371,10 +361,8 @@ class ItemReference < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index af7318e2..3962d98d 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -65,11 +65,9 @@ module Role finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -83,11 +81,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # The type of the message input. Always set to `message`. @@ -98,11 +94,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index baeff0e5..ca1c9ee7 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -35,10 +35,8 @@ module ResponseItem variant :function_call_output, -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 9bdb2a25..2666a922 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -33,10 +33,8 @@ module ResponseOutputItem variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index e602d2ea..04b05a0d 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -62,10 +62,8 @@ module Content variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # def self.variants; end end # The status of the message input. One of `in_progress`, `completed`, or @@ -79,11 +77,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index a5027854..e6c46e68 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -157,10 +157,8 @@ class FilePath < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 8309c595..bb1bc110 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -79,11 +79,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index 0dd22d9b..31187039 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -15,11 +15,9 @@ module ResponseStatus finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 73a45396..5dddc41f 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -122,10 +122,8 @@ module ResponseStreamEvent -> { OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index a8dbcfba..f796c035 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -191,10 +191,8 @@ class FilePath < OpenAI::BaseModel end # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index c3d097ce..3072312f 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -28,10 +28,8 @@ module Tool variant -> { OpenAI::Models::Responses::WebSearchTool } # @!parse - # class << self - # # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 48a22269..00c16a94 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -20,11 +20,9 @@ module ToolChoiceOptions finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index c62ca447..b6041f87 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -45,11 +45,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 1826a57b..31542421 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -54,11 +54,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # High level guidance for the amount of context window space to use for the @@ -72,11 +70,9 @@ module SearchContextSize finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class UserLocation < OpenAI::BaseModel diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 51c15860..3dff1960 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -27,10 +27,8 @@ module ResponsesModel variant const: OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW_2025_03_11 # @!parse - # class << self - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] - # def variants; end - # end + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] + # def self.variants; end end end end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 3e23d757..4f319b8c 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -87,11 +87,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index f86bb7ea..dd5df66d 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -169,11 +169,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end class ExpiresAfter < OpenAI::BaseModel diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 94cfda44..74392037 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -76,11 +76,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 38670d94..24e8a53e 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -87,10 +87,8 @@ module Query variant OpenAI::Models::VectorStoreSearchParams::Query::StringArray # @!parse - # class << self - # # @return [Array(String, Array)] - # def variants; end - # end + # # @return [Array(String, Array)] + # def self.variants; end end # A filter to apply based on file attributes. @@ -104,10 +102,8 @@ module Filters variant -> { OpenAI::Models::CompoundFilter } # @!parse - # class << self - # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] - # def variants; end - # end + # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] + # def self.variants; end end class RankingOptions < OpenAI::BaseModel @@ -147,11 +143,9 @@ module Ranker finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 23f38c59..11a8b9e3 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -60,10 +60,8 @@ module Attribute variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end class Content < OpenAI::BaseModel @@ -95,11 +93,9 @@ module Type finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index ad9ef46e..dcf7edc1 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -59,10 +59,8 @@ module Attribute variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end end end diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 13e346c1..9ea2e0db 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -95,11 +95,9 @@ module Filter finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -112,11 +110,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 58fbaa08..44994550 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -59,10 +59,8 @@ module Attribute variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end end end diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 092f40b0..7d603af5 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -89,11 +89,9 @@ module Filter finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -106,11 +104,9 @@ module Order finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index c88b9a73..5e67af85 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -44,10 +44,8 @@ module Attribute variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index ad361605..12c6f576 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -139,11 +139,9 @@ module Code finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end @@ -160,11 +158,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end module Attribute @@ -177,10 +173,8 @@ module Attribute variant OpenAI::BooleanModel # @!parse - # class << self - # # @return [Array(String, Float, Boolean)] - # def variants; end - # end + # # @return [Array(String, Float, Boolean)] + # def self.variants; end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index fbc1cc4f..8fc405d8 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -113,11 +113,9 @@ module Status finalize! - class << self - # @!parse - # # @return [Array] - # def values; end - end + # @!parse + # # @return [Array] + # def self.values; end end end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index c1a126f5..16ff78e8 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -5,9 +5,6 @@ module OpenAI module AllModels extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol)} } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::TaggedSymbol) } @@ -17,10 +14,8 @@ module OpenAI COMPUTER_USE_PREVIEW_2025_03_11 = T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::TaggedSymbol) - class << self - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 5c59d0c7..f5f11b88 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -84,12 +84,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } + def self.variants end end @@ -113,10 +109,8 @@ module OpenAI SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } + def self.values end end @@ -136,10 +130,8 @@ module OpenAI WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 495283eb..14159fd5 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -13,10 +13,8 @@ module OpenAI TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 93b5c317..664bc188 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -140,12 +140,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + def self.variants end end @@ -161,13 +157,11 @@ module OpenAI SEGMENT = T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index a15d78bf..482495e5 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -8,13 +8,8 @@ module OpenAI module TranscriptionCreateResponse extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)} } - - class << self - sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } - def variants - end + sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 531eaf5f..b9eb4918 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -11,10 +11,8 @@ module OpenAI LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 40a5e97b..82ef83d4 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -10,25 +10,13 @@ module OpenAI module TranscriptionStreamEvent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Audio::TranscriptionTextDeltaEvent, - OpenAI::Models::Audio::TranscriptionTextDoneEvent - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 6f8d3e8b..a6f7d249 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -81,12 +81,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::AudioModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + def self.variants end end @@ -107,10 +103,8 @@ module OpenAI T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 3ee8d56c..8c9c9851 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -6,13 +6,8 @@ module OpenAI module TranslationCreateResponse extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)} } - - class << self - sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } - def variants - end + sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 5ae6945c..eb369760 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -12,10 +12,8 @@ module OpenAI GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::AudioModel::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::AudioModel::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 37fec73f..dc544cfd 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -17,10 +17,8 @@ module OpenAI VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::AudioResponseFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::AudioResponseFormat::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index c8c01aac..6e8237c7 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -217,10 +217,8 @@ module OpenAI CANCELLING = T.let(:cancelling, OpenAI::Models::Batch::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Batch::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Batch::Status::TaggedSymbol]) } + def self.values end end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 25f982eb..0cdcccff 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -78,10 +78,8 @@ module OpenAI NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol]) } + def self.values end end @@ -101,10 +99,8 @@ module OpenAI V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 24382098..c4111c93 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -233,12 +233,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def self.variants end end @@ -453,16 +449,6 @@ module OpenAI module ChunkingStrategy extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - } - end - class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } @@ -554,15 +540,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 845086ec..4cbb19dc 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -82,10 +82,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index 65a440c2..a44a0adc 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -26,27 +26,13 @@ module OpenAI module AssistantResponseFormatOption extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - } - end - - class << self - sig do - override - .returns( - [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] - ) - end - def variants - end + sig do + override + .returns( + [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index eb08519f..5aa7ac81 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -26,38 +26,6 @@ module OpenAI module AssistantStreamEvent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) - } - end - class ThreadCreated < OpenAI::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -705,15 +673,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index aea5d000..f9ca4d29 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -6,26 +6,13 @@ module OpenAI module AssistantTool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 47eef189..eaf93e66 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -50,10 +50,8 @@ module OpenAI T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 8afac7a6..0df3bce3 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -13,16 +13,6 @@ module OpenAI module AssistantToolChoiceOption extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice - ) - } - end - # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before @@ -38,22 +28,18 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol]) } + def self.values end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 09cfa6aa..bd4b57f2 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -236,9 +236,6 @@ module OpenAI module Model extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)} } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } @@ -293,10 +290,8 @@ module OpenAI GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - class << self - sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } + def self.variants end end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index cc00d4a9..4823e922 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -141,13 +141,11 @@ module OpenAI OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol ) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 6487db0b..d141e77e 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -9,19 +9,6 @@ module OpenAI module MessageStreamEvent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete - ) - } - end - class ThreadMessageCreated < OpenAI::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -160,15 +147,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index f39da267..18316a2b 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -9,21 +9,6 @@ module OpenAI module RunStepStreamEvent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired - ) - } - end - class ThreadRunStepCreated < OpenAI::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } @@ -210,15 +195,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 03f0c58b..48a1e1c2 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -8,24 +8,6 @@ module OpenAI module RunStreamEvent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired - ) - } - end - class ThreadRunCreated < OpenAI::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -286,15 +268,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index ed08bc73..eeeb28a0 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -300,12 +300,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def self.variants end end @@ -468,43 +464,25 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - } - end - MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def self.variants end end @@ -526,13 +504,11 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol]) + end + def self.values end end @@ -609,16 +585,6 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - } - end - class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -633,15 +599,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] + ) + end + def self.variants end end end @@ -872,16 +836,6 @@ module OpenAI module ChunkingStrategy extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - } - end - class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } @@ -975,15 +929,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + ) + end + def self.variants end end end @@ -1089,26 +1041,13 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + ) + end + def self.variants end end @@ -1169,13 +1108,11 @@ module OpenAI OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol ) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index fe346edf..753f8c43 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -161,43 +161,25 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - } - end - MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def self.variants end end @@ -217,10 +199,8 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol]) } + def self.values end end @@ -297,16 +277,6 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - } - end - class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -321,15 +291,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] + ) + end + def self.variants end end end @@ -546,16 +514,6 @@ module OpenAI module ChunkingStrategy extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - } - end - class Auto < OpenAI::BaseModel # Always `auto`. sig { returns(Symbol) } @@ -647,15 +605,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index a260cd30..02f8a33e 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -10,25 +10,13 @@ module OpenAI module Annotation extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 170ebc33..133c007f 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -10,25 +10,13 @@ module OpenAI module AnnotationDelta extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index e7cc0c6e..4a97f32e 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -42,10 +42,8 @@ module OpenAI LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index a560c68b..f2b98ec3 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -49,10 +49,8 @@ module OpenAI LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index c27cc50c..732b0f05 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -41,10 +41,8 @@ module OpenAI LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index e317b878..281a3d35 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -47,10 +47,8 @@ module OpenAI LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index bf9c7500..875f4bcc 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -242,16 +242,6 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - } - end - class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -266,15 +256,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] + ) + end + def self.variants end end end @@ -318,12 +306,10 @@ module OpenAI RUN_FAILED = T.let(:run_failed, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) - class << self - sig do - override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol]) - end - def values - end + sig do + override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol]) + end + def self.values end end end @@ -338,10 +324,8 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol]) } + def self.values end end @@ -357,10 +341,8 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 58d1462c..3b799135 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -9,27 +9,13 @@ module OpenAI module MessageContent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index b4216635..7cfeab9e 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -9,27 +9,13 @@ module OpenAI module MessageContentDelta extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index 215aeccb..e2e3c952 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -9,26 +9,13 @@ module OpenAI module MessageContentPartParam extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index f65aa74e..72b27b6e 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -100,43 +100,25 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - } - end - MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def self.variants end end @@ -156,10 +138,8 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol]) } + def self.values end end @@ -236,16 +216,6 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - } - end - class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -260,15 +230,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 22b4e446..a89cb0f3 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -94,10 +94,8 @@ module OpenAI USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index be115f45..32f85d3e 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -93,10 +93,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 5be2cbe4..9a5ebeb1 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -403,10 +403,8 @@ module OpenAI MAX_PROMPT_TOKENS = T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol]) } + def self.values end end end @@ -449,10 +447,8 @@ module OpenAI INVALID_PROMPT = T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol]) } + def self.values end end end @@ -569,10 +565,8 @@ module OpenAI LAST_MESSAGES = T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 096ace11..85c3c07f 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -412,43 +412,25 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ) - } - end - MessageContentPartParamArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ] - ) - end - def variants - end + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) + ] + ] + ) + end + def self.variants end end @@ -471,13 +453,11 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol]) + end + def self.values end end @@ -554,16 +534,6 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - } - end - class FileSearch < OpenAI::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -578,15 +548,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] + ) + end + def self.variants end end end @@ -599,12 +567,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def self.variants end end @@ -665,13 +629,11 @@ module OpenAI OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol ) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 54ec8f4c..daca9195 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -84,10 +84,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index fea1250b..243f07b9 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -23,10 +23,8 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 8d3d6522..6720ae9a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -116,16 +116,6 @@ module OpenAI module Output extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - } - end - class Logs < OpenAI::BaseModel # The text output from the Code Interpreter tool call. sig { returns(String) } @@ -209,15 +199,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index f8b5b1e9..627f5f07 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -144,25 +144,13 @@ module OpenAI module Output extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 4b92c053..ee1a666e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -165,15 +165,13 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol] + ) + end + def self.values end end end @@ -314,15 +312,13 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol ) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol] + ) + end + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 03af7f5d..79a32996 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -222,10 +222,8 @@ module OpenAI RATE_LIMIT_EXCEEDED = T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol]) } + def self.values end end end @@ -245,10 +243,8 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol]) } + def self.values end end @@ -256,25 +252,13 @@ module OpenAI module StepDetails extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + ) + end + def self.variants end end @@ -290,10 +274,8 @@ module OpenAI T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol]) } + def self.values end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index ecd4fba6..19089bfb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -63,25 +63,13 @@ module OpenAI module StepDetails extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 7216c68b..23cf6a3b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -18,10 +18,8 @@ module OpenAI OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol ) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index be9ab4d9..b978312e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -105,10 +105,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index d69faf62..22b596d5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -9,26 +9,13 @@ module OpenAI module ToolCall extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 50b03360..8af6cb90 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -9,26 +9,13 @@ module OpenAI module ToolCallDelta extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index f48fc10a..1fea8214 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -171,10 +171,8 @@ module OpenAI FUNCTION_CALL = T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol]) } + def self.values end end @@ -223,10 +221,8 @@ module OpenAI SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index c36302c3..b46a9dd7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -156,21 +156,6 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ) - } - end - ArrayOfContentPartArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], @@ -182,46 +167,32 @@ module OpenAI module ArrayOfContentPart extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] - ) - end - def variants - end - end - end - - class << self sig do override .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ] + [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] ) end - def variants + def self.variants end end + + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) + ] + ] + ) + end + def self.variants + end end class FunctionCall < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 3555a183..1b96a01d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -54,10 +54,8 @@ module OpenAI OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol]) } + def self.values end end @@ -79,10 +77,8 @@ module OpenAI SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 9564a77e..56e76f39 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -273,10 +273,8 @@ module OpenAI T.let(:assistant, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol]) } + def self.values end end @@ -374,13 +372,11 @@ module OpenAI FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol]) + end + def self.values end end end @@ -409,10 +405,8 @@ module OpenAI FUNCTION_CALL = T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol]) } + def self.values end end @@ -461,10 +455,8 @@ module OpenAI SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index ea7cbd9d..bf1e89a6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -8,18 +8,6 @@ module OpenAI module ChatCompletionContentPart extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - } - end - class File < OpenAI::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) } attr_reader :file @@ -83,15 +71,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index f856dce5..4a3b9252 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -82,13 +82,11 @@ module OpenAI LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 318356fb..d3e09c2d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -84,15 +84,13 @@ module OpenAI MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol] + ) + end + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index ddab2743..267e8628 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -51,16 +51,11 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 9efb8e97..a5792ca5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -9,29 +9,13 @@ module OpenAI module ChatCompletionMessageParam extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index efa22f99..8c780c0f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -12,10 +12,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 34ebf6a5..c476a4e6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -42,16 +42,11 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index 77577cd0..0fad6644 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -17,10 +17,8 @@ module OpenAI TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index d3cf9fc5..183c3c03 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -51,16 +51,11 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 9da6e2a7..ad5c2dd6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -15,16 +15,6 @@ module OpenAI module ChatCompletionToolChoiceOption extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice - ) - } - end - # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. @@ -39,22 +29,18 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol]) } + def self.values end end - class << self - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 8a16e044..8f868442 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -44,16 +44,11 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])} } - ChatCompletionContentPartTextArray = T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def variants - end + sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 96a94d0d..73db3fd6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -85,45 +85,26 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ) - } - end - ChatCompletionContentPartArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Converter) - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ] - ) - end - def variants - end + sig do + override + .returns( + [ + String, + T::Array[ + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) + ] + ] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index cd89b96c..df28493e 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -534,12 +534,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + def self.variants end end @@ -560,16 +556,6 @@ module OpenAI module FunctionCall extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption - ) - } - end - # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. @@ -586,28 +572,24 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol] - ) - end - def values - end - end - end - - class << self sig do override .returns( - [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + T::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol] ) end - def variants + def self.values end end + + sig do + override + .returns( + [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + ) + end + def self.variants + end end class Function < OpenAI::BaseModel @@ -661,10 +643,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol]) } + def self.values end end @@ -681,26 +661,13 @@ module OpenAI module ResponseFormat extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + ) + end + def self.variants end end @@ -728,10 +695,8 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol]) } + def self.values end end @@ -740,14 +705,10 @@ module OpenAI module Stop extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[String]]) } - def variants - end + sig { override.returns([String, T::Array[String]]) } + def self.variants end end @@ -841,15 +802,13 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol ) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol] + ) + end + def self.values end end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index b4b185ec..3cf1ef88 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -84,10 +84,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 382d3dd1..950d7ce7 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -69,10 +69,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index deecbd65..df9c03d2 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -58,10 +58,8 @@ module OpenAI GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::TaggedSymbol) GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ChatModel::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ChatModel::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 58689eef..971045f3 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -66,10 +66,8 @@ module OpenAI LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ComparisonFilter::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ComparisonFilter::Type::TaggedSymbol]) } + def self.values end end @@ -78,12 +76,8 @@ module OpenAI module Value extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 8e9b757d..c37142f2 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -62,10 +62,8 @@ module OpenAI LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol]) } + def self.values end end diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 74e9a9a9..65892e81 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -251,9 +251,6 @@ module OpenAI module Model extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)} } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } @@ -262,10 +259,8 @@ module OpenAI DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) - class << self - sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } + def self.variants end end @@ -278,21 +273,14 @@ module OpenAI module Prompt extend OpenAI::Union - Variants = - type_template(:out) do - {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} - end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def variants - end + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def self.variants end end @@ -301,14 +289,10 @@ module OpenAI module Stop extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.nilable(T.any(String, T::Array[String]))} } - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[String]]) } - def variants - end + sig { override.returns([String, T::Array[String]]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index f36b780a..e070a53b 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -40,12 +40,8 @@ module OpenAI module Filter extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, T.anything)} } - - class << self - sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } - def variants - end + sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } + def self.variants end end @@ -59,10 +55,8 @@ module OpenAI AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::CompoundFilter::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::CompoundFilter::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 0f90d8b7..a81280ca 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -91,21 +91,14 @@ module OpenAI module Input extend OpenAI::Union - Variants = - type_template(:out) do - {fixed: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])} - end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def variants - end + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def self.variants end end @@ -117,12 +110,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } + def self.variants end end @@ -138,10 +127,8 @@ module OpenAI FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 54a2f972..d1fc3704 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -12,10 +12,8 @@ module OpenAI TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::TaggedSymbol) TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::EmbeddingModel::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::EmbeddingModel::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index 4f77f905..4e9e85c9 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -6,22 +6,13 @@ module OpenAI module FileChunkingStrategy extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 46711f26..17f0755f 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -7,25 +7,13 @@ module OpenAI module FileChunkingStrategyParam extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 7cf867a4..e479a1d7 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -78,10 +78,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FileListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FileListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 85e51b7d..80273345 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -113,10 +113,8 @@ module OpenAI FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::Models::FileObject::Purpose::TaggedSymbol) VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FileObject::Purpose::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FileObject::Purpose::TaggedSymbol]) } + def self.values end end @@ -132,10 +130,8 @@ module OpenAI PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FileObject::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FileObject::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 0caf4b3f..3fa6743d 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -19,10 +19,8 @@ module OpenAI USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::TaggedSymbol) EVALS = T.let(:evals, OpenAI::Models::FilePurpose::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FilePurpose::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FilePurpose::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index b7a0df18..5aa6d868 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -275,12 +275,8 @@ module OpenAI module BatchSize extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end @@ -289,12 +285,8 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -303,12 +295,8 @@ module OpenAI module NEpochs extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end end @@ -330,10 +318,8 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol]) } + def self.values end end @@ -484,12 +470,8 @@ module OpenAI module BatchSize extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end @@ -498,12 +480,8 @@ module OpenAI module Beta extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -512,12 +490,8 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -526,12 +500,8 @@ module OpenAI module NEpochs extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end end @@ -628,12 +598,8 @@ module OpenAI module BatchSize extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end @@ -642,12 +608,8 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -656,12 +618,8 @@ module OpenAI module NEpochs extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end end @@ -678,10 +636,8 @@ module OpenAI SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 61fee31f..2963e637 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -83,10 +83,8 @@ module OpenAI WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol]) } + def self.values end end @@ -101,10 +99,8 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index d9c7959b..7c0d6d5b 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -150,9 +150,6 @@ module OpenAI module Model extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)} } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } @@ -162,10 +159,8 @@ module OpenAI GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) - class << self - sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } + def self.variants end end @@ -225,12 +220,8 @@ module OpenAI module BatchSize extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end @@ -239,12 +230,8 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -253,12 +240,8 @@ module OpenAI module NEpochs extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end end @@ -500,12 +483,8 @@ module OpenAI module BatchSize extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end @@ -514,12 +493,8 @@ module OpenAI module Beta extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -528,12 +503,8 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -542,12 +513,8 @@ module OpenAI module NEpochs extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end end @@ -646,12 +613,8 @@ module OpenAI module BatchSize extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end @@ -660,12 +623,8 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Float)} } - - class << self - sig { override.returns([Symbol, Float]) } - def variants - end + sig { override.returns([Symbol, Float]) } + def self.variants end end @@ -674,12 +633,8 @@ module OpenAI module NEpochs extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(Symbol, Integer)} } - - class << self - sig { override.returns([Symbol, Integer]) } - def variants - end + sig { override.returns([Symbol, Integer]) } + def self.variants end end end @@ -696,10 +651,8 @@ module OpenAI SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 5e353218..182d0c91 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -78,12 +78,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + def self.variants end end @@ -100,10 +96,8 @@ module OpenAI URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol]) } + def self.values end end @@ -119,10 +113,8 @@ module OpenAI NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 5752e130..8d058a7a 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -105,12 +105,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + def self.variants end end @@ -126,10 +122,8 @@ module OpenAI URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol]) } + def self.values end end @@ -145,10 +139,8 @@ module OpenAI NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 97f922c3..79804bf5 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -107,12 +107,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ImageModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + def self.variants end end @@ -128,10 +124,8 @@ module OpenAI STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol]) } + def self.values end end @@ -148,10 +142,8 @@ module OpenAI URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol]) } + def self.values end end @@ -170,10 +162,8 @@ module OpenAI NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol]) } + def self.values end end @@ -190,10 +180,8 @@ module OpenAI VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index ba80b7d9..6e7b06d9 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -11,10 +11,8 @@ module OpenAI DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ImageModel::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ImageModel::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 72848671..bcf566f6 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -319,12 +319,10 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) - class << self - sig do - override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) - end - def values - end + sig do + override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + end + def self.values end end @@ -339,15 +337,13 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + ) + end + def self.values end end @@ -361,10 +357,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } + def self.values end end @@ -378,13 +372,11 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + end + def self.values end end @@ -398,10 +390,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } + def self.values end end @@ -415,13 +405,11 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + end + def self.values end end @@ -436,10 +424,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } + def self.values end end @@ -456,15 +442,13 @@ module OpenAI IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + ) + end + def self.values end end @@ -480,13 +464,11 @@ module OpenAI IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + end + def self.values end end @@ -501,10 +483,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } + def self.values end end @@ -518,13 +498,11 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + end + def self.values end end @@ -539,10 +517,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } + def self.values end end @@ -558,13 +534,11 @@ module OpenAI IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 5487b934..8ead2c0c 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -66,31 +66,18 @@ module OpenAI module Input extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - String, - T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - ) - } - end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) ModerationMultiModalInputArray = T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Converter) - class << self - sig do - override - .returns( - [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] - ) - end - def variants - end + sig do + override + .returns( + [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] + ) + end + def self.variants end end @@ -101,12 +88,8 @@ module OpenAI module Model extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)} } - - class << self - sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index 97ae9021..d3f8dff4 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -14,10 +14,8 @@ module OpenAI TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ModerationModel::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ModerationModel::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 5d0b8337..9c96d7b0 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -6,13 +6,8 @@ module OpenAI module ModerationMultiModalInput extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)} } - - class << self - sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } - def variants - end + sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 18587255..aef3d79c 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -60,10 +60,8 @@ module OpenAI CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index ad236c65..0563845c 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -18,10 +18,8 @@ module OpenAI MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::ReasoningEffort::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::ReasoningEffort::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 84a8caab..10f57a60 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -61,10 +61,8 @@ module OpenAI UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 926e6805..d8597a18 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -87,40 +87,22 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( + sig do + override + .returns( + [ String, T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ) - } - end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ] - ) - end - def variants - end + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) + ] + ] + ) + end + def self.variants end end @@ -138,10 +120,8 @@ module OpenAI SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol]) } + def self.values end end @@ -155,10 +135,8 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 716402c5..a62e15dd 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -79,13 +79,8 @@ module OpenAI module Filters extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } - - class << self - sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def variants - end + sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } + def self.variants end end @@ -142,12 +137,10 @@ module OpenAI OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol ) - class << self - sig do - override.returns(T::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol]) - end - def values - end + sig do + override.returns(T::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 0f383412..be14d1bd 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -81,10 +81,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 366aa274..140d38a3 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -379,10 +379,8 @@ module OpenAI CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol]) } + def self.values end end end @@ -393,26 +391,13 @@ module OpenAI module ToolChoice extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + ) + end + def self.variants end end @@ -432,10 +417,8 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::Response::Truncation::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::Response::Truncation::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 9ddb15ba..91dbb3cd 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -77,16 +77,6 @@ module OpenAI module Result extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - } - end - class Logs < OpenAI::BaseModel # The logs of the code interpreter tool call. sig { returns(String) } @@ -161,15 +151,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] + ) + end + def self.variants end end @@ -189,13 +177,11 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 205f7f16..298263bd 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -101,23 +101,6 @@ module OpenAI module Action extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - ) - } - end - class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -185,15 +168,13 @@ module OpenAI FORWARD = T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) - class << self - sig do - override - .returns( - T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol] - ) - end - def values - end + sig do + override + .returns( + T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol] + ) + end + def self.values end end end @@ -414,15 +395,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] + ) + end + def self.variants end end @@ -465,10 +444,8 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol]) } + def self.values end end @@ -483,10 +460,8 @@ module OpenAI COMPUTER_CALL = T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 84261381..a995f82d 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -135,13 +135,11 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index adbde5c6..ef91ffdf 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -7,28 +7,13 @@ module OpenAI module ResponseContent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile, - OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Models::Responses::ResponseOutputRefusal - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 9b96ae2c..2c961d2d 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -65,20 +65,11 @@ module OpenAI module Part extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - } - end - - class << self - sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) - end - def variants - end + sig do + override + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 5c70029a..b5948c6d 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -65,20 +65,11 @@ module OpenAI module Part extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - } - end - - class << self - sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) - end - def variants - end + sig do + override + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 10786b61..ace43a29 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -371,56 +371,30 @@ module OpenAI module Input extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( + sig do + override + .returns( + [ String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ) - } - end - - class << self - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ] - ) - end - def variants - end + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) + ] + ] + ) + end + def self.variants end end @@ -430,26 +404,13 @@ module OpenAI module ToolChoice extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + ) + end + def self.variants end end @@ -471,10 +432,8 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 18462364..a000c309 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -62,10 +62,8 @@ module OpenAI IMAGE_FILE_NOT_FOUND = T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 7355cae9..55b6067b 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -78,10 +78,8 @@ module OpenAI T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol]) } + def self.values end end @@ -153,12 +151,8 @@ module OpenAI module Attribute extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 3019db16..24160f1c 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -19,26 +19,13 @@ module OpenAI module ResponseFormatTextConfig extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index aab3c789..0135732e 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -84,10 +84,8 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index fcbdc3a6..a3ae3675 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -73,13 +73,11 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol]) + end + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 647bbe36..0dfe2413 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -54,10 +54,8 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index ca498afe..9b325a79 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -27,10 +27,8 @@ module OpenAI OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol ) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index c48ea4f2..98923bc5 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -48,10 +48,8 @@ module OpenAI MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index b1e862e0..d63620b1 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -7,26 +7,13 @@ module OpenAI module ResponseInputContent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 677129df..4d92987a 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -63,10 +63,8 @@ module OpenAI LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 9c3341e5..51053e58 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -11,25 +11,6 @@ module OpenAI module ResponseInputItem extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - } - end - class Message < OpenAI::BaseModel # A list of one or many input items to the model, containing different content # types. @@ -120,10 +101,8 @@ module OpenAI SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol]) } + def self.values end end @@ -144,10 +123,8 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol]) } + def self.values end end @@ -162,10 +139,8 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol]) } + def self.values end end end @@ -309,13 +284,11 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol]) + end + def self.values end end end @@ -398,13 +371,11 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) - class << self - sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol]) - end - def values - end + sig do + override + .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol]) + end + def self.values end end end @@ -428,15 +399,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 18f699d6..345adad7 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -95,10 +95,8 @@ module OpenAI SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol]) } + def self.values end end @@ -118,10 +116,8 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol]) } + def self.values end end @@ -135,10 +131,8 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index 4bfa929a..b57094c5 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -7,31 +7,13 @@ module OpenAI module ResponseItem extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index f0406f67..9d893172 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -7,29 +7,13 @@ module OpenAI module ResponseOutputItem extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index dabf3215..d91e17f1 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -68,20 +68,11 @@ module OpenAI module Content extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) - } - end - - class << self - sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) - end - def variants - end + sig do + override + .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + end + def self.variants end end @@ -98,10 +89,8 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 36a04fa3..561c4b12 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -68,17 +68,6 @@ module OpenAI module Annotation extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) - } - end - class FileCitation < OpenAI::BaseModel # The ID of the file. sig { returns(String) } @@ -169,15 +158,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 7c7b4827..ba09b049 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -83,10 +83,8 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index c4d55827..a46776c9 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -16,10 +16,8 @@ module OpenAI IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseStatus::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseStatus::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index c549a897..e04029c8 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -7,55 +7,13 @@ module OpenAI module ResponseStreamEvent extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseAudioDeltaEvent, - OpenAI::Models::Responses::ResponseAudioDoneEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Models::Responses::ResponseCompletedEvent, - OpenAI::Models::Responses::ResponseContentPartAddedEvent, - OpenAI::Models::Responses::ResponseContentPartDoneEvent, - OpenAI::Models::Responses::ResponseCreatedEvent, - OpenAI::Models::Responses::ResponseErrorEvent, - OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Models::Responses::ResponseInProgressEvent, - OpenAI::Models::Responses::ResponseFailedEvent, - OpenAI::Models::Responses::ResponseIncompleteEvent, - OpenAI::Models::Responses::ResponseOutputItemAddedEvent, - OpenAI::Models::Responses::ResponseOutputItemDoneEvent, - OpenAI::Models::Responses::ResponseRefusalDeltaEvent, - OpenAI::Models::Responses::ResponseRefusalDoneEvent, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Models::Responses::ResponseTextDeltaEvent, - OpenAI::Models::Responses::ResponseTextDoneEvent, - OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 55d9384e..4ad5009e 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -87,17 +87,6 @@ module OpenAI module Annotation extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - } - end - class FileCitation < OpenAI::BaseModel # The ID of the file. sig { returns(String) } @@ -188,15 +177,13 @@ module OpenAI end end - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 3e2da0c3..0f080b1b 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -9,27 +9,13 @@ module OpenAI module Tool extend OpenAI::Union - Variants = - type_template(:out) do - { - fixed: T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - } - end - - class << self - sig do - override - .returns( - [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] - ) - end - def variants - end + sig do + override + .returns( + [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] + ) + end + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index a3f23a00..27f51058 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -21,10 +21,8 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 3332abb3..6119c6e4 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -47,10 +47,8 @@ module OpenAI WEB_SEARCH_PREVIEW_2025_03_11 = T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index c100a08a..86e24a9b 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -72,10 +72,8 @@ module OpenAI WEB_SEARCH_PREVIEW_2025_03_11 = T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol]) } + def self.values end end @@ -93,10 +91,8 @@ module OpenAI MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol]) } + def self.values end end diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index ebec2d7d..63a4193b 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -5,9 +5,6 @@ module OpenAI module ResponsesModel extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)} } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::TaggedSymbol) } @@ -17,10 +14,8 @@ module OpenAI COMPUTER_USE_PREVIEW_2025_03_11 = T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::TaggedSymbol) - class << self - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } - def variants - end + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 49ff6e17..08940cb9 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -93,10 +93,8 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::Upload::Status::TaggedSymbol) EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::Upload::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::Upload::Status::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index aeb5522d..fae6b032 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -174,10 +174,8 @@ module OpenAI IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStore::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStore::Status::TaggedSymbol]) } + def self.values end end diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 0a1e94a2..aa0fd8d8 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -81,10 +81,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 0e4d0c31..57283a5e 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -83,14 +83,10 @@ module OpenAI module Query extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, T::Array[String])} } - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - class << self - sig { override.returns([String, T::Array[String]]) } - def variants - end + sig { override.returns([String, T::Array[String]]) } + def self.variants end end @@ -98,13 +94,8 @@ module OpenAI module Filters extend OpenAI::Union - Variants = - type_template(:out) { {fixed: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)} } - - class << self - sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def variants - end + sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } + def self.variants end end @@ -156,10 +147,8 @@ module OpenAI OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol ) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index a9a8d0fc..4baabfad 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -58,12 +58,8 @@ module OpenAI module Attribute extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end @@ -100,10 +96,8 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index c8da7a14..089e78d9 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -83,12 +83,8 @@ module OpenAI module Attribute extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 65b17181..582e82a0 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -102,10 +102,8 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol]) } + def self.values end end @@ -122,10 +120,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index e0b2f3f4..d032eda8 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -83,12 +83,8 @@ module OpenAI module Attribute extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index edce777d..7aaf42a7 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -93,10 +93,8 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol]) } + def self.values end end @@ -112,10 +110,8 @@ module OpenAI ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol]) } + def self.values end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 3c552da3..228b23aa 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -45,12 +45,8 @@ module OpenAI module Attribute extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 3e32084e..ad610fcb 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -171,10 +171,8 @@ module OpenAI INVALID_FILE = T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol]) } + def self.values end end end @@ -194,22 +192,16 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol]) } + def self.values end end module Attribute extend OpenAI::Union - Variants = type_template(:out) { {fixed: T.any(String, Float, T::Boolean)} } - - class << self - sig { override.returns([String, Float, T::Boolean]) } - def variants - end + sig { override.returns([String, Float, T::Boolean]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index dd1ee3e5..0d7f33b3 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -134,10 +134,8 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - class << self - sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol]) } - def values - end + sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol]) } + def self.values end end end From b842d36ddf4d5d1a5bc16f3146722a02a8c12357 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 22:08:37 +0000 Subject: [PATCH 056/295] chore: disable dangerous rubocop auto correct rule (#62) --- .rubocop.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.rubocop.yml b/.rubocop.yml index 6dbfa8bf..1352fdb0 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -84,6 +84,12 @@ Lint/MissingSuper: Lint/UnusedMethodArgument: AutoCorrect: false +# This option is prone to causing accidental bugs. +Lint/UselessAssignment: + AutoCorrect: false + Exclude: + - "examples/**/*.rb" + Metrics/AbcSize: Enabled: false From 68077ff10b92ba974263d518f84bbd38d9f44168 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 24 Mar 2025 16:33:32 +0000 Subject: [PATCH 057/295] chore: more readable output when tests fail (#63) --- Rakefile | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/Rakefile b/Rakefile index e97d158b..cf2e280f 100644 --- a/Rakefile +++ b/Rakefile @@ -1,20 +1,25 @@ # frozen_string_literal: true +require "securerandom" +require "shellwords" + require "minitest/test_task" require "rake/clean" require "rubocop/rake_task" -require "securerandom" -require "shellwords" CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/]) xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] -task(default: [:test, :format]) +task(default: [:test]) + +multitask(:test) do + rb = + FileList[ENV.fetch("TEST", "./test/**/*_test.rb")] + .map { "require_relative(#{_1.dump});" } + .join -Minitest::TestTask.create do |t| - t.libs = %w[.] - t.test_globs = ENV.fetch("TEST", "./test/**/*_test.rb") + ruby(*%w[-w -e], rb, verbose: false) { fail unless _1 } end RuboCop::RakeTask.new(:rubocop) do |t| From 8c8bb1af690e8f3d43c6e119b402cf936cc86755 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 17:33:34 +0000 Subject: [PATCH 058/295] chore: add hash of OpenAPI spec/config inputs to .stats.yml --- .stats.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.stats.yml b/.stats.yml index 7011b57a..766090ae 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5ad6884898c07591750dde560118baf7074a59aecd1f367f930c5e42b04e848a.yml +openapi_spec_hash: 0c255269b89767eae26f4d4dc22d3cbd +config_hash: d36e491b0afc4f79e3afad4b3c9bec70 From 7a245a2c6ce1f8069e7ec56d8eac4af8dfcd9213 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 26 Mar 2025 20:33:24 +0000 Subject: [PATCH 059/295] chore(api): updates to supported Voice IDs (#64) --- .stats.yml | 4 +- .../models/audio/speech_create_params.rb | 50 +++++++++++++++---- .../chat/chat_completion_audio_param.rb | 43 +++++++++++++--- .../responses/input_item_list_params.rb | 14 +++++- lib/openai/models/responses/response.rb | 4 +- .../responses/response_create_params.rb | 4 +- ...response_format_text_json_schema_config.rb | 22 ++++---- lib/openai/resources/audio/speech.rb | 6 +-- lib/openai/resources/responses.rb | 8 +-- lib/openai/resources/responses/input_items.rb | 3 ++ .../models/audio/speech_create_params.rbi | 22 ++++---- .../chat/chat_completion_audio_param.rbi | 15 +++--- .../responses/input_item_list_params.rbi | 12 ++++- rbi/lib/openai/models/responses/response.rbi | 4 +- .../responses/response_create_params.rbi | 4 +- ...esponse_format_text_json_schema_config.rbi | 19 +++---- rbi/lib/openai/resources/audio/speech.rbi | 6 +-- rbi/lib/openai/resources/responses.rbi | 8 +-- .../resources/responses/input_items.rbi | 4 ++ .../models/audio/speech_create_params.rbs | 21 ++++++-- .../chat/chat_completion_audio_param.rbs | 30 +++++++++-- .../responses/input_item_list_params.rbs | 8 +++ ...esponse_format_text_json_schema_config.rbs | 10 ++-- .../resources/responses/input_items.rbs | 1 + 24 files changed, 226 insertions(+), 96 deletions(-) diff --git a/.stats.yml b/.stats.yml index 766090ae..940027f0 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5ad6884898c07591750dde560118baf7074a59aecd1f367f930c5e42b04e848a.yml -openapi_spec_hash: 0c255269b89767eae26f4d4dc22d3cbd +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6663c59193eb95b201e492de17dcbd5e126ba03d18ce66287a3e2c632ca56fe7.yml +openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e config_hash: d36e491b0afc4f79e3afad4b3c9bec70 diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 1c0a806a..2c0c03cc 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -23,12 +23,12 @@ class SpeechCreateParams < OpenAI::BaseModel # @!attribute voice # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the - # voices are available in the + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # - # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] - required :voice, enum: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } + # @return [String, Symbol] + required :voice, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } # @!attribute [r] instructions # Control the voice of your generated audio with additional instructions. Does not @@ -66,7 +66,7 @@ class SpeechCreateParams < OpenAI::BaseModel # @!parse # # @param input [String] # # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # # @param voice [Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] + # # @param voice [String, Symbol] # # @param instructions [String] # # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] # # @param speed [Float] @@ -92,14 +92,17 @@ module Model end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the - # voices are available in the + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). module Voice - extend OpenAI::Enum + extend OpenAI::Union + + # @!group ALLOY = :alloy ASH = :ash + BALLAD = :ballad CORAL = :coral ECHO = :echo FABLE = :fable @@ -107,12 +110,37 @@ module Voice NOVA = :nova SAGE = :sage SHIMMER = :shimmer + VERSE = :verse - finalize! + # @!endgroup + + variant String + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ALLOY + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ASH + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::BALLAD + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::CORAL + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER + + variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE # @!parse - # # @return [Array] - # def self.values; end + # # @return [Array(String, Symbol)] + # def self.variants; end end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 7de20292..f3800a67 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -15,8 +15,8 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] - required :voice, enum: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } + # @return [String, Symbol] + required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } # @!parse # # Parameters for audio output. Required when audio output is requested with @@ -24,7 +24,7 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # # [Learn more](https://platform.openai.com/docs/guides/audio). # # # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] - # # @param voice [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] + # # @param voice [String, Symbol] # # # def initialize(format_:, voice:, **) = super @@ -51,22 +51,51 @@ module Format # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. module Voice - extend OpenAI::Enum + extend OpenAI::Union + + # @!group ALLOY = :alloy ASH = :ash BALLAD = :ballad CORAL = :coral ECHO = :echo + FABLE = :fable + ONYX = :onyx + NOVA = :nova SAGE = :sage SHIMMER = :shimmer VERSE = :verse - finalize! + # @!endgroup + + variant String + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ALLOY + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ASH + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::BALLAD + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::CORAL + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER + + variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE # @!parse - # # @return [Array] - # def self.values; end + # # @return [Array(String, Symbol)] + # def self.variants; end end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index d55641a5..d50fc83d 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -28,6 +28,17 @@ class InputItemListParams < OpenAI::BaseModel # # @return [String] # attr_writer :before + # @!attribute [r] include + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + # + # @return [Array, nil] + optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } + + # @!parse + # # @return [Array] + # attr_writer :include + # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. @@ -55,11 +66,12 @@ class InputItemListParams < OpenAI::BaseModel # @!parse # # @param after [String] # # @param before [String] + # # @param include [Array] # # @param limit [Integer] # # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super + # def initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 35ed1c06..296c2d41 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -33,8 +33,8 @@ class Response < OpenAI::BaseModel # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. # # @return [String, nil] required :instructions, String, nil?: true diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 7557e478..a4469d5c 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -50,8 +50,8 @@ class ResponseCreateParams < OpenAI::BaseModel # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. # # @return [String, nil] optional :instructions, String, nil?: true diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index 34aefbc9..06e3bf5e 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -4,6 +4,13 @@ module OpenAI module Models module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + # @!attribute name + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + # and dashes, with a maximum length of 64. + # + # @return [String] + required :name, String + # @!attribute schema # The schema for the response format, described as a JSON Schema object. Learn how # to build JSON schemas [here](https://json-schema.org/). @@ -28,17 +35,6 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel # # @return [String] # attr_writer :description - # @!attribute [r] name - # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores - # and dashes, with a maximum length of 64. - # - # @return [String, nil] - optional :name, String - - # @!parse - # # @return [String] - # attr_writer :name - # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` @@ -54,13 +50,13 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel # # more about # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # # + # # @param name [String] # # @param schema [Hash{Symbol=>Object}] # # @param description [String] - # # @param name [String] # # @param strict [Boolean, nil] # # @param type [Symbol, :json_schema] # # - # def initialize(schema:, description: nil, name: nil, strict: nil, type: :json_schema, **) = super + # def initialize(name:, schema:, description: nil, strict: nil, type: :json_schema, **) = super # def initialize: (Hash | OpenAI::BaseModel) -> void end diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index b5b584fe..cbffafe3 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -13,9 +13,9 @@ class Speech # @option params [String, Symbol, OpenAI::Models::Audio::SpeechModel] :model One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # - # @option params [Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] :voice The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the - # voices are available in the + # @option params [String, Symbol] :voice The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # # @option params [String] :instructions Control the voice of your generated audio with additional instructions. Does not diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index f12fe46e..e0a7bc26 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -49,8 +49,8 @@ class Responses # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. # # @option params [Integer, nil] :max_output_tokens An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and @@ -185,8 +185,8 @@ def create(params) # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. # # @option params [Integer, nil] :max_output_tokens An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index ff8ff1d0..ece1f2a1 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -14,6 +14,9 @@ class InputItems # # @option params [String] :before An item ID to list items before, used in pagination. # + # @option params [Array] :include Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + # # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index f5f11b88..014716e8 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -17,10 +17,10 @@ module OpenAI attr_accessor :model # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the - # voices are available in the + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig { returns(OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol) } + sig { returns(T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol)) } attr_accessor :voice # Control the voice of your generated audio with additional instructions. Does not @@ -51,7 +51,7 @@ module OpenAI params( input: String, model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, + voice: T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, @@ -68,7 +68,7 @@ module OpenAI { input: String, model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, + voice: T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, @@ -90,17 +90,18 @@ module OpenAI end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the - # voices are available in the + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). module Voice - extend OpenAI::Enum + extend OpenAI::Union TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + BALLAD = T.let(:ballad, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) @@ -108,9 +109,10 @@ module OpenAI NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + VERSE = T.let(:verse, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } - def self.values + sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } + def self.variants end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 1b96a01d..abac1261 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -11,7 +11,7 @@ module OpenAI # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. - sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) } + sig { returns(T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol)) } attr_accessor :voice # Parameters for audio output. Required when audio output is requested with @@ -20,7 +20,7 @@ module OpenAI sig do params( format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol + voice: T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) ) .returns(T.attached_class) end @@ -32,7 +32,7 @@ module OpenAI .returns( { format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol + voice: T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) } ) end @@ -62,7 +62,7 @@ module OpenAI # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. module Voice - extend OpenAI::Enum + extend OpenAI::Union TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } OrSymbol = @@ -73,12 +73,15 @@ module OpenAI BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + FABLE = T.let(:fable, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + ONYX = T.let(:onyx, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + NOVA = T.let(:nova, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } - def self.values + sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } + def self.variants end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index be14d1bd..36ee8426 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -21,6 +21,14 @@ module OpenAI sig { params(before: String).void } attr_writer :before + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } + attr_reader :include + + sig { params(include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]).void } + attr_writer :include + # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. sig { returns(T.nilable(Integer)) } @@ -43,13 +51,14 @@ module OpenAI params( after: String, before: String, + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + def self.new(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) end sig do @@ -58,6 +67,7 @@ module OpenAI { after: String, before: String, + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: OpenAI::RequestOptions diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 140d38a3..db73d16a 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -35,8 +35,8 @@ module OpenAI # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } attr_accessor :instructions diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index ace43a29..3204e5db 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -63,8 +63,8 @@ module OpenAI # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } attr_accessor :instructions diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index 8b4a57ef..8d657f44 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -4,6 +4,11 @@ module OpenAI module Models module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + # and dashes, with a maximum length of 64. + sig { returns(String) } + attr_accessor :name + # The schema for the response format, described as a JSON Schema object. Learn how # to build JSON schemas [here](https://json-schema.org/). sig { returns(T::Hash[Symbol, T.anything]) } @@ -21,14 +26,6 @@ module OpenAI sig { params(description: String).void } attr_writer :description - # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores - # and dashes, with a maximum length of 64. - sig { returns(T.nilable(String)) } - attr_reader :name - - sig { params(name: String).void } - attr_writer :name - # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` # field. Only a subset of JSON Schema is supported when `strict` is `true`. To @@ -42,25 +39,25 @@ module OpenAI # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( + name: String, schema: T::Hash[Symbol, T.anything], description: String, - name: String, strict: T.nilable(T::Boolean), type: Symbol ) .returns(T.attached_class) end - def self.new(schema:, description: nil, name: nil, strict: nil, type: :json_schema) + def self.new(name:, schema:, description: nil, strict: nil, type: :json_schema) end sig do override .returns( { + name: String, schema: T::Hash[Symbol, T.anything], type: Symbol, description: String, - name: String, strict: T.nilable(T::Boolean) } ) diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index 5d7311e5..2c23fe1f 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -9,7 +9,7 @@ module OpenAI params( input: String, model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol, + voice: T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, @@ -24,8 +24,8 @@ module OpenAI # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. model:, # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the - # voices are available in the + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). voice:, # Control the voice of your generated audio with additional instructions. Does not diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 6ddca1da..1c7a99ac 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -102,8 +102,8 @@ module OpenAI # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. instructions: nil, # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and @@ -317,8 +317,8 @@ module OpenAI # context. # # When using along with `previous_response_id`, the instructions from a previous - # response will be not be carried over to the next response. This makes it simple - # to swap out system (or developer) messages in new responses. + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. instructions: nil, # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index d1663648..35fb21c7 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -10,6 +10,7 @@ module OpenAI response_id: String, after: String, before: String, + include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) @@ -36,6 +37,9 @@ module OpenAI after: nil, # An item ID to list items before, used in pagination. before: nil, + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + include: nil, # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. limit: nil, diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 2b0a7895..99ceae76 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -57,8 +57,10 @@ module OpenAI end type voice = - :alloy + String + | :alloy | :ash + | :ballad | :coral | :echo | :fable @@ -66,12 +68,14 @@ module OpenAI | :nova | :sage | :shimmer + | :verse module Voice - extend OpenAI::Enum + extend OpenAI::Union ALLOY: :alloy ASH: :ash + BALLAD: :ballad CORAL: :coral ECHO: :echo FABLE: :fable @@ -79,8 +83,19 @@ module OpenAI NOVA: :nova SAGE: :sage SHIMMER: :shimmer + VERSE: :verse - def self?.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::voice] + def self?.variants: -> [String, (:alloy + | :ash + | :ballad + | :coral + | :echo + | :fable + | :onyx + | :nova + | :sage + | :shimmer + | :verse)] end type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index f5963ec9..3f277ae1 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -36,21 +36,45 @@ module OpenAI end type voice = - :alloy | :ash | :ballad | :coral | :echo | :sage | :shimmer | :verse + String + | :alloy + | :ash + | :ballad + | :coral + | :echo + | :fable + | :onyx + | :nova + | :sage + | :shimmer + | :verse module Voice - extend OpenAI::Enum + extend OpenAI::Union ALLOY: :alloy ASH: :ash BALLAD: :ballad CORAL: :coral ECHO: :echo + FABLE: :fable + ONYX: :onyx + NOVA: :nova SAGE: :sage SHIMMER: :shimmer VERSE: :verse - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::voice] + def self?.variants: -> [String, (:alloy + | :ash + | :ballad + | :coral + | :echo + | :fable + | :onyx + | :nova + | :sage + | :shimmer + | :verse)] end end end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 1a91bd7e..da58932d 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -5,6 +5,7 @@ module OpenAI { after: String, before: String, + include: ::Array[OpenAI::Models::Responses::response_includable], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::order } @@ -22,6 +23,12 @@ module OpenAI def before=: (String) -> String + attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? + + def include=: ( + ::Array[OpenAI::Models::Responses::response_includable] + ) -> ::Array[OpenAI::Models::Responses::response_includable] + attr_reader limit: Integer? def limit=: (Integer) -> Integer @@ -35,6 +42,7 @@ module OpenAI def initialize: ( ?after: String, ?before: String, + ?include: ::Array[OpenAI::Models::Responses::response_includable], ?limit: Integer, ?order: OpenAI::Models::Responses::InputItemListParams::order, ?request_options: OpenAI::request_opts diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index a4435499..899ff7bb 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -3,14 +3,16 @@ module OpenAI module Responses type response_format_text_json_schema_config = { + name: String, schema: ::Hash[Symbol, top], type: :json_schema, description: String, - name: String, strict: bool? } class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + attr_accessor name: String + attr_accessor schema: ::Hash[Symbol, top] attr_accessor type: :json_schema @@ -19,16 +21,12 @@ module OpenAI def description=: (String) -> String - attr_reader name: String? - - def name=: (String) -> String - attr_accessor strict: bool? def initialize: ( + name: String, schema: ::Hash[Symbol, top], ?description: String, - ?name: String, ?strict: bool?, ?type: :json_schema ) -> void diff --git a/sig/openai/resources/responses/input_items.rbs b/sig/openai/resources/responses/input_items.rbs index 63b4ed3c..031857d2 100644 --- a/sig/openai/resources/responses/input_items.rbs +++ b/sig/openai/resources/responses/input_items.rbs @@ -6,6 +6,7 @@ module OpenAI String response_id, ?after: String, ?before: String, + ?include: ::Array[OpenAI::Models::Responses::response_includable], ?limit: Integer, ?order: OpenAI::Models::Responses::InputItemListParams::order, ?request_options: OpenAI::request_opts From 1b3c2c9ae0e7b7af2be0b67cd2fa99c8d8056fb0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 06:09:41 +0000 Subject: [PATCH 060/295] chore: more aggressive tapioca detection logic for skipping compiler introspection (#65) --- lib/openai.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/openai.rb b/lib/openai.rb index ac44c4d7..7a735871 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -2,7 +2,7 @@ # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. -if Object.const_defined?(:Tapioca) && caller_locations.any? { _1.path.end_with?("tapioca/cli.rb") } +if Object.const_defined?(:Tapioca) && caller.chain([$0]).chain(ARGV).grep(/tapioca/) Warning.warn( <<~WARN \n From b562319b42210cd0fa0690d788b3094e5836e9ff Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 15:29:52 +0000 Subject: [PATCH 061/295] chore: re-order assignment lines to make unions easier to read (#66) --- lib/openai/models/all_models.rb | 26 ++-- .../models/audio/speech_create_params.rb | 54 ++++---- .../models/beta/assistant_update_params.rb | 130 +++++++++--------- .../beta/thread_create_and_run_params.rb | 8 +- .../models/beta/thread_create_params.rb | 8 +- .../beta/threads/message_create_params.rb | 8 +- .../models/beta/threads/run_create_params.rb | 8 +- ...chat_completion_assistant_message_param.rb | 8 +- .../chat/chat_completion_audio_param.rb | 54 ++++---- ...chat_completion_developer_message_param.rb | 8 +- .../chat_completion_prediction_content.rb | 8 +- .../chat_completion_system_message_param.rb | 8 +- .../chat_completion_tool_message_param.rb | 8 +- .../chat_completion_user_message_param.rb | 8 +- .../models/chat/completion_create_params.rb | 6 +- lib/openai/models/completion_create_params.rb | 46 +++---- lib/openai/models/embedding_create_params.rb | 18 +-- .../models/fine_tuning/job_create_params.rb | 26 ++-- lib/openai/models/moderation_create_params.rb | 12 +- lib/openai/models/responses_model.rb | 26 ++-- .../models/vector_store_search_params.rb | 6 +- rbi/lib/openai/models/all_models.rbi | 8 +- .../models/audio/speech_create_params.rbi | 8 +- .../models/beta/assistant_update_params.rbi | 8 +- .../beta/thread_create_and_run_params.rbi | 6 +- .../models/beta/thread_create_params.rbi | 6 +- .../beta/threads/message_create_params.rbi | 6 +- .../models/beta/threads/run_create_params.rbi | 6 +- ...hat_completion_assistant_message_param.rbi | 12 +- .../chat/chat_completion_audio_param.rbi | 8 +- ...hat_completion_developer_message_param.rbi | 6 +- .../chat_completion_prediction_content.rbi | 6 +- .../chat_completion_system_message_param.rbi | 6 +- .../chat_completion_tool_message_param.rbi | 6 +- .../chat_completion_user_message_param.rbi | 6 +- .../models/chat/completion_create_params.rbi | 4 +- .../models/completion_create_params.rbi | 20 +-- .../openai/models/embedding_create_params.rbi | 8 +- .../models/fine_tuning/job_create_params.rbi | 8 +- .../models/moderation_create_params.rbi | 10 +- rbi/lib/openai/models/responses_model.rbi | 8 +- .../models/vector_store_search_params.rbi | 4 +- sig/openai/models/all_models.rbs | 10 +- .../models/audio/speech_create_params.rbs | 24 ++-- .../models/beta/assistant_update_params.rbs | 62 ++++----- .../beta/thread_create_and_run_params.rbs | 4 +- .../models/beta/thread_create_params.rbs | 4 +- .../beta/threads/message_create_params.rbs | 4 +- .../models/beta/threads/run_create_params.rbs | 4 +- ...hat_completion_assistant_message_param.rbs | 10 +- .../chat/chat_completion_audio_param.rbs | 24 ++-- ...hat_completion_developer_message_param.rbs | 4 +- .../chat_completion_prediction_content.rbs | 4 +- .../chat_completion_system_message_param.rbs | 4 +- .../chat_completion_tool_message_param.rbs | 4 +- .../chat_completion_user_message_param.rbs | 4 +- .../models/chat/completion_create_params.rbs | 4 +- .../models/completion_create_params.rbs | 16 +-- sig/openai/models/embedding_create_params.rbs | 4 +- .../models/fine_tuning/job_create_params.rbs | 10 +- .../models/moderation_create_params.rbs | 4 +- sig/openai/models/responses_model.rbs | 10 +- .../models/vector_store_search_params.rbs | 4 +- 63 files changed, 431 insertions(+), 431 deletions(-) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 135bffac..f09c1abd 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -5,30 +5,30 @@ module Models module AllModels extend OpenAI::Union - # @!group - - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - - # @!endgroup - variant String variant enum: -> { OpenAI::Models::ChatModel } - variant const: OpenAI::Models::AllModels::O1_PRO + variant const: -> { OpenAI::Models::AllModels::O1_PRO } - variant const: OpenAI::Models::AllModels::O1_PRO_2025_03_19 + variant const: -> { OpenAI::Models::AllModels::O1_PRO_2025_03_19 } - variant const: OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW + variant const: -> { OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW } - variant const: OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW_2025_03_11 + variant const: -> { OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW_2025_03_11 } # @!parse # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] # def self.variants; end + + # @!group + + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + + # @!endgroup end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 2c0c03cc..8db71ca4 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -98,49 +98,49 @@ module Model module Voice extend OpenAI::Union - # @!group - - ALLOY = :alloy - ASH = :ash - BALLAD = :ballad - CORAL = :coral - ECHO = :echo - FABLE = :fable - ONYX = :onyx - NOVA = :nova - SAGE = :sage - SHIMMER = :shimmer - VERSE = :verse - - # @!endgroup - variant String - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ALLOY + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ALLOY } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ASH + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ASH } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::BALLAD + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::BALLAD } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::CORAL + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::CORAL } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER } - variant const: OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE } # @!parse # # @return [Array(String, Symbol)] # def self.variants; end + + # @!group + + ALLOY = :alloy + ASH = :ash + BALLAD = :ballad + CORAL = :coral + ECHO = :echo + FABLE = :fable + ONYX = :onyx + NOVA = :nova + SAGE = :sage + SHIMMER = :shimmer + VERSE = :verse + + # @!endgroup end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 0db4e4cd..d178a24b 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -169,106 +169,106 @@ class AssistantUpdateParams < OpenAI::BaseModel module Model extend OpenAI::Union - # @!group - - O3_MINI = :"o3-mini" - O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" - O1 = :o1 - O1_2024_12_17 = :"o1-2024-12-17" - GPT_4O = :"gpt-4o" - GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" - GPT_4O_MINI = :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW = :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO = :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" - GPT_4 = :"gpt-4" - GPT_4_0314 = :"gpt-4-0314" - GPT_4_0613 = :"gpt-4-0613" - GPT_4_32K = :"gpt-4-32k" - GPT_4_32K_0314 = :"gpt-4-32k-0314" - GPT_4_32K_0613 = :"gpt-4-32k-0613" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - - # @!endgroup - variant String - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O1 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::O1_2024_12_17 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0314 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0314 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0613 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0613 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } - variant const: OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } # @!parse # # @return [Array(String, Symbol)] # def self.variants; end + + # @!group + + O3_MINI = :"o3-mini" + O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" + O1 = :o1 + O1_2024_12_17 = :"o1-2024-12-17" + GPT_4O = :"gpt-4o" + GPT_4O_2024_11_20 = :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06 = :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13 = :"gpt-4o-2024-05-13" + GPT_4O_MINI = :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" + GPT_4_5_PREVIEW = :"gpt-4.5-preview" + GPT_4_5_PREVIEW_2025_02_27 = :"gpt-4.5-preview-2025-02-27" + GPT_4_TURBO = :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09 = :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW = :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW = :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW = :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW = :"gpt-4-vision-preview" + GPT_4 = :"gpt-4" + GPT_4_0314 = :"gpt-4-0314" + GPT_4_0613 = :"gpt-4-0613" + GPT_4_32K = :"gpt-4-32k" + GPT_4_32K_0314 = :"gpt-4-32k-0314" + GPT_4_32K_0613 = :"gpt-4-32k-0613" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K = :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0613 = :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106 = :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" + + # @!endgroup end class ToolResources < OpenAI::BaseModel diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index afa35bb4..e3891370 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -325,18 +325,18 @@ class Message < OpenAI::BaseModel module Content extend OpenAI::Union - MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] - # The text contents of the message. variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray + variant -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 24ff443e..62984f11 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -100,18 +100,18 @@ class Message < OpenAI::BaseModel module Content extend OpenAI::Union - MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] - # The text contents of the message. variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray + variant -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index f9082a79..73ea83f7 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -60,18 +60,18 @@ class MessageCreateParams < OpenAI::BaseModel module Content extend OpenAI::Union - MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] - # The text contents of the message. variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray + variant -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index a3791a25..129ed90f 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -289,18 +289,18 @@ class AdditionalMessage < OpenAI::BaseModel module Content extend OpenAI::Union - MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] - # The text contents of the message. variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray + variant -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + MessageContentPartParamArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 5264bab1..c6a293dd 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -111,14 +111,11 @@ class Audio < OpenAI::BaseModel module Content extend OpenAI::Union - ArrayOfContentPartArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] - # The contents of the assistant message. variant String # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. - variant OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray + variant -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). @@ -140,6 +137,9 @@ module ArrayOfContentPart # @!parse # # @return [Array(String, Array)] # def self.variants; end + + ArrayOfContentPartArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] end # @deprecated diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index f3800a67..638a7c7e 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -53,49 +53,49 @@ module Format module Voice extend OpenAI::Union - # @!group - - ALLOY = :alloy - ASH = :ash - BALLAD = :ballad - CORAL = :coral - ECHO = :echo - FABLE = :fable - ONYX = :onyx - NOVA = :nova - SAGE = :sage - SHIMMER = :shimmer - VERSE = :verse - - # @!endgroup - variant String - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ALLOY + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ALLOY } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ASH + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ASH } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::BALLAD + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::BALLAD } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::CORAL + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::CORAL } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER } - variant const: OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE } # @!parse # # @return [Array(String, Symbol)] # def self.variants; end + + # @!group + + ALLOY = :alloy + ASH = :ash + BALLAD = :ballad + CORAL = :coral + ECHO = :echo + FABLE = :fable + ONYX = :onyx + NOVA = :nova + SAGE = :sage + SHIMMER = :shimmer + VERSE = :verse + + # @!endgroup end end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 1ec9bc97..98b66dd7 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -44,18 +44,18 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] - # The contents of the developer message. variant String # An array of content parts with a defined type. For developer messages, only type `text` is supported. - variant OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray + variant -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index b71a21fb..e170c46d 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -36,19 +36,19 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] - # The content used for a Predicted Output. This is often the # text of a file you are regenerating with minor changes. variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. - variant OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray + variant -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 7544935c..b311ca43 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -44,18 +44,18 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] - # The contents of the system message. variant String # An array of content parts with a defined type. For system messages, only type `text` is supported. - variant OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray + variant -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 0eb97603..31644647 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -35,18 +35,18 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] - # The contents of the tool message. variant String # An array of content parts with a defined type. For tool messages, only type `text` is supported. - variant OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray + variant -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + ChatCompletionContentPartTextArray = + OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index fd78d809..b938d473 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -43,18 +43,18 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel module Content extend OpenAI::Union - ChatCompletionContentPartArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] - # The text contents of the message. variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. - variant OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray + variant -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + ChatCompletionContentPartArray = + OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] end end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 6953d7a6..0db1c9bf 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -606,15 +606,15 @@ module ServiceTier module Stop extend OpenAI::Union - StringArray = OpenAI::ArrayOf[String] - variant String - variant OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray + variant -> { OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + StringArray = OpenAI::ArrayOf[String] end class WebSearchOptions < OpenAI::BaseModel diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 6e791a6b..02ee2c45 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -236,25 +236,25 @@ class CompletionCreateParams < OpenAI::BaseModel module Model extend OpenAI::Union - # @!group - - GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" - DAVINCI_002 = :"davinci-002" - BABBAGE_002 = :"babbage-002" - - # @!endgroup - variant String - variant const: OpenAI::Models::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } - variant const: OpenAI::Models::CompletionCreateParams::Model::DAVINCI_002 + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::DAVINCI_002 } - variant const: OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 } # @!parse # # @return [Array(String, Symbol)] # def self.variants; end + + # @!group + + GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" + DAVINCI_002 = :"davinci-002" + BABBAGE_002 = :"babbage-002" + + # @!endgroup end # The prompt(s) to generate completions for, encoded as a string, array of @@ -266,23 +266,23 @@ module Model module Prompt extend OpenAI::Union - StringArray = OpenAI::ArrayOf[String] - - IntegerArray = OpenAI::ArrayOf[Integer] - - ArrayOfToken2DArray = OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]] - variant String - variant OpenAI::Models::CompletionCreateParams::Prompt::StringArray + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::StringArray } - variant OpenAI::Models::CompletionCreateParams::Prompt::IntegerArray + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::IntegerArray } - variant OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray } # @!parse # # @return [Array(String, Array, Array, Array>)] # def self.variants; end + + StringArray = OpenAI::ArrayOf[String] + + IntegerArray = OpenAI::ArrayOf[Integer] + + ArrayOfToken2DArray = OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]] end # Up to 4 sequences where the API will stop generating further tokens. The @@ -290,15 +290,15 @@ module Prompt module Stop extend OpenAI::Union - StringArray = OpenAI::ArrayOf[String] - variant String - variant OpenAI::Models::CompletionCreateParams::Stop::StringArray + variant -> { OpenAI::Models::CompletionCreateParams::Stop::StringArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + StringArray = OpenAI::ArrayOf[String] end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 03177d2a..3bb5d983 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -87,27 +87,27 @@ class EmbeddingCreateParams < OpenAI::BaseModel module Input extend OpenAI::Union - StringArray = OpenAI::ArrayOf[String] - - IntegerArray = OpenAI::ArrayOf[Integer] - - ArrayOfToken2DArray = OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]] - # The string that will be turned into an embedding. variant String # The array of strings that will be turned into an embedding. - variant OpenAI::Models::EmbeddingCreateParams::Input::StringArray + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::StringArray } # The array of integers that will be turned into an embedding. - variant OpenAI::Models::EmbeddingCreateParams::Input::IntegerArray + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::IntegerArray } # The array of arrays containing integers that will be turned into an embedding. - variant OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray } # @!parse # # @return [Array(String, Array, Array, Array>)] # def self.variants; end + + StringArray = OpenAI::ArrayOf[String] + + IntegerArray = OpenAI::ArrayOf[Integer] + + ArrayOfToken2DArray = OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]] end # ID of the model to use. You can use the diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 0f224244..a7189227 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -147,28 +147,28 @@ class JobCreateParams < OpenAI::BaseModel module Model extend OpenAI::Union - # @!group - - BABBAGE_002 = :"babbage-002" - DAVINCI_002 = :"davinci-002" - GPT_3_5_TURBO = :"gpt-3.5-turbo" - GPT_4O_MINI = :"gpt-4o-mini" - - # @!endgroup - variant String - variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::BABBAGE_002 + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::BABBAGE_002 } - variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::DAVINCI_002 + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::DAVINCI_002 } - variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } - variant const: OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI } # @!parse # # @return [Array(String, Symbol)] # def self.variants; end + + # @!group + + BABBAGE_002 = :"babbage-002" + DAVINCI_002 = :"davinci-002" + GPT_3_5_TURBO = :"gpt-3.5-turbo" + GPT_4O_MINI = :"gpt-4o-mini" + + # @!endgroup end # @deprecated diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index a3c068ef..5273a9d8 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -41,22 +41,22 @@ class ModerationCreateParams < OpenAI::BaseModel module Input extend OpenAI::Union - StringArray = OpenAI::ArrayOf[String] - - ModerationMultiModalInputArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] - # A string of text to classify for moderation. variant String # An array of strings to classify for moderation. - variant OpenAI::Models::ModerationCreateParams::Input::StringArray + variant -> { OpenAI::Models::ModerationCreateParams::Input::StringArray } # An array of multi-modal inputs to the moderation model. - variant OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray + variant -> { OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray } # @!parse # # @return [Array(String, Array, Array)] # def self.variants; end + + StringArray = OpenAI::ArrayOf[String] + + ModerationMultiModalInputArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] end # The content moderation model you would like to use. Learn more in diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 3dff1960..a7995455 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -5,30 +5,30 @@ module Models module ResponsesModel extend OpenAI::Union - # @!group - - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - - # @!endgroup - variant String variant enum: -> { OpenAI::Models::ChatModel } - variant const: OpenAI::Models::ResponsesModel::O1_PRO + variant const: -> { OpenAI::Models::ResponsesModel::O1_PRO } - variant const: OpenAI::Models::ResponsesModel::O1_PRO_2025_03_19 + variant const: -> { OpenAI::Models::ResponsesModel::O1_PRO_2025_03_19 } - variant const: OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW + variant const: -> { OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW } - variant const: OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW_2025_03_11 + variant const: -> { OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW_2025_03_11 } # @!parse # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] # def self.variants; end + + # @!group + + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" + + # @!endgroup end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 24e8a53e..48c3d614 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -80,15 +80,15 @@ class VectorStoreSearchParams < OpenAI::BaseModel module Query extend OpenAI::Union - StringArray = OpenAI::ArrayOf[String] - variant String - variant OpenAI::Models::VectorStoreSearchParams::Query::StringArray + variant -> { OpenAI::Models::VectorStoreSearchParams::Query::StringArray } # @!parse # # @return [Array(String, Array)] # def self.variants; end + + StringArray = OpenAI::ArrayOf[String] end # A filter to apply based on file attributes. diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 16ff78e8..f6a5bf8e 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -5,6 +5,10 @@ module OpenAI module AllModels extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::TaggedSymbol) } @@ -13,10 +17,6 @@ module OpenAI COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::TaggedSymbol) COMPUTER_USE_PREVIEW_2025_03_11 = T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } - def self.variants - end end end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 014716e8..626ade1f 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -96,6 +96,10 @@ module OpenAI module Voice extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } @@ -110,10 +114,6 @@ module OpenAI SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) VERSE = T.let(:verse, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } - def self.variants - end end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index bd4b57f2..2cb5a2dd 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -236,6 +236,10 @@ module OpenAI module Model extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } @@ -289,10 +293,6 @@ module OpenAI T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } - def self.variants - end end class ToolResources < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index eeeb28a0..e88a0645 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -464,9 +464,6 @@ module OpenAI module Content extend OpenAI::Union - MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - sig do override .returns( @@ -484,6 +481,9 @@ module OpenAI end def self.variants end + + MessageContentPartParamArray = + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 753f8c43..4029540a 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -161,9 +161,6 @@ module OpenAI module Content extend OpenAI::Union - MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - sig do override .returns( @@ -181,6 +178,9 @@ module OpenAI end def self.variants end + + MessageContentPartParamArray = + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 72b27b6e..07e24f42 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -100,9 +100,6 @@ module OpenAI module Content extend OpenAI::Union - MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - sig do override .returns( @@ -120,6 +117,9 @@ module OpenAI end def self.variants end + + MessageContentPartParamArray = + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 85c3c07f..cc320d8b 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -412,9 +412,6 @@ module OpenAI module Content extend OpenAI::Union - MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) - sig do override .returns( @@ -432,6 +429,9 @@ module OpenAI end def self.variants end + + MessageContentPartParamArray = + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index b46a9dd7..887423f7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -156,12 +156,6 @@ module OpenAI module Content extend OpenAI::Union - ArrayOfContentPartArray = - T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], - OpenAI::Converter - ) - # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ArrayOfContentPart @@ -193,6 +187,12 @@ module OpenAI end def self.variants end + + ArrayOfContentPartArray = + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], + OpenAI::Converter + ) end class FunctionCall < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index abac1261..d59ea97b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -64,6 +64,10 @@ module OpenAI module Voice extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } @@ -79,10 +83,6 @@ module OpenAI SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } - def self.variants - end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 267e8628..3efb534b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -51,12 +51,12 @@ module OpenAI module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants end + + ChatCompletionContentPartTextArray = + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index c476a4e6..2b80637d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -42,12 +42,12 @@ module OpenAI module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants end + + ChatCompletionContentPartTextArray = + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 183c3c03..edf0ff22 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -51,12 +51,12 @@ module OpenAI module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants end + + ChatCompletionContentPartTextArray = + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 8f868442..bfb45b0a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -44,12 +44,12 @@ module OpenAI module Content extend OpenAI::Union - ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants end + + ChatCompletionContentPartTextArray = + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 73db3fd6..64034045 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -85,9 +85,6 @@ module OpenAI module Content extend OpenAI::Union - ChatCompletionContentPartArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Converter) - sig do override .returns( @@ -106,6 +103,9 @@ module OpenAI end def self.variants end + + ChatCompletionContentPartArray = + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index df28493e..08d3dc99 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -705,11 +705,11 @@ module OpenAI module Stop extend OpenAI::Union - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - sig { override.returns([String, T::Array[String]]) } def self.variants end + + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) end class WebSearchOptions < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 65892e81..4c623a0b 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -251,6 +251,10 @@ module OpenAI module Model extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } @@ -258,10 +262,6 @@ module OpenAI T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } - def self.variants - end end # The prompt(s) to generate completions for, encoded as a string, array of @@ -273,15 +273,15 @@ module OpenAI module Prompt extend OpenAI::Union + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def self.variants + end + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) - - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def self.variants - end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -289,11 +289,11 @@ module OpenAI module Stop extend OpenAI::Union - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - sig { override.returns([String, T::Array[String]]) } def self.variants end + + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index a81280ca..aab30870 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -91,15 +91,15 @@ module OpenAI module Input extend OpenAI::Union + sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } + def self.variants + end + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) - - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def self.variants - end end # ID of the model to use. You can use the diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 7c0d6d5b..604cf39f 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -150,6 +150,10 @@ module OpenAI module Model extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } @@ -158,10 +162,6 @@ module OpenAI DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } - def self.variants - end end class Hyperparameters < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 8ead2c0c..b410d597 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -66,11 +66,6 @@ module OpenAI module Input extend OpenAI::Union - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - - ModerationMultiModalInputArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Converter) - sig do override .returns( @@ -79,6 +74,11 @@ module OpenAI end def self.variants end + + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + + ModerationMultiModalInputArray = + T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Converter) end # The content moderation model you would like to use. Learn more in diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 63a4193b..4e2b1c2b 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -5,6 +5,10 @@ module OpenAI module ResponsesModel extend OpenAI::Union + sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } + def self.variants + end + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::TaggedSymbol) } @@ -13,10 +17,6 @@ module OpenAI COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::TaggedSymbol) COMPUTER_USE_PREVIEW_2025_03_11 = T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::TaggedSymbol) - - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } - def self.variants - end end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 57283a5e..4996c14f 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -83,11 +83,11 @@ module OpenAI module Query extend OpenAI::Union - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) - sig { override.returns([String, T::Array[String]]) } def self.variants end + + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) end # A filter to apply based on file attributes. diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index ef2cc9d6..77c8f9b8 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -11,15 +11,15 @@ module OpenAI module AllModels extend OpenAI::Union - O1_PRO: :"o1-pro" - O1_PRO_2025_03_19: :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11")] + + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" end end end diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 99ceae76..bd14d768 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -73,18 +73,6 @@ module OpenAI module Voice extend OpenAI::Union - ALLOY: :alloy - ASH: :ash - BALLAD: :ballad - CORAL: :coral - ECHO: :echo - FABLE: :fable - ONYX: :onyx - NOVA: :nova - SAGE: :sage - SHIMMER: :shimmer - VERSE: :verse - def self?.variants: -> [String, (:alloy | :ash | :ballad @@ -96,6 +84,18 @@ module OpenAI | :sage | :shimmer | :verse)] + + ALLOY: :alloy + ASH: :ash + BALLAD: :ballad + CORAL: :coral + ECHO: :echo + FABLE: :fable + ONYX: :onyx + NOVA: :nova + SAGE: :sage + SHIMMER: :shimmer + VERSE: :verse end type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index 490fde16..cdd077dd 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -104,37 +104,6 @@ module OpenAI module Model extend OpenAI::Union - O3_MINI: :"o3-mini" - O3_MINI_2025_01_31: :"o3-mini-2025-01-31" - O1: :o1 - O1_2024_12_17: :"o1-2024-12-17" - GPT_4O: :"gpt-4o" - GPT_4O_2024_11_20: :"gpt-4o-2024-11-20" - GPT_4O_2024_08_06: :"gpt-4o-2024-08-06" - GPT_4O_2024_05_13: :"gpt-4o-2024-05-13" - GPT_4O_MINI: :"gpt-4o-mini" - GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" - GPT_4_5_PREVIEW: :"gpt-4.5-preview" - GPT_4_5_PREVIEW_2025_02_27: :"gpt-4.5-preview-2025-02-27" - GPT_4_TURBO: :"gpt-4-turbo" - GPT_4_TURBO_2024_04_09: :"gpt-4-turbo-2024-04-09" - GPT_4_0125_PREVIEW: :"gpt-4-0125-preview" - GPT_4_TURBO_PREVIEW: :"gpt-4-turbo-preview" - GPT_4_1106_PREVIEW: :"gpt-4-1106-preview" - GPT_4_VISION_PREVIEW: :"gpt-4-vision-preview" - GPT_4: :"gpt-4" - GPT_4_0314: :"gpt-4-0314" - GPT_4_0613: :"gpt-4-0613" - GPT_4_32K: :"gpt-4-32k" - GPT_4_32K_0314: :"gpt-4-32k-0314" - GPT_4_32K_0613: :"gpt-4-32k-0613" - GPT_3_5_TURBO: :"gpt-3.5-turbo" - GPT_3_5_TURBO_16K: :"gpt-3.5-turbo-16k" - GPT_3_5_TURBO_0613: :"gpt-3.5-turbo-0613" - GPT_3_5_TURBO_1106: :"gpt-3.5-turbo-1106" - GPT_3_5_TURBO_0125: :"gpt-3.5-turbo-0125" - GPT_3_5_TURBO_16K_0613: :"gpt-3.5-turbo-16k-0613" - def self?.variants: -> [String, (:"o3-mini" | :"o3-mini-2025-01-31" | :o1 @@ -165,6 +134,37 @@ module OpenAI | :"gpt-3.5-turbo-1106" | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613")] + + O3_MINI: :"o3-mini" + O3_MINI_2025_01_31: :"o3-mini-2025-01-31" + O1: :o1 + O1_2024_12_17: :"o1-2024-12-17" + GPT_4O: :"gpt-4o" + GPT_4O_2024_11_20: :"gpt-4o-2024-11-20" + GPT_4O_2024_08_06: :"gpt-4o-2024-08-06" + GPT_4O_2024_05_13: :"gpt-4o-2024-05-13" + GPT_4O_MINI: :"gpt-4o-mini" + GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" + GPT_4_5_PREVIEW: :"gpt-4.5-preview" + GPT_4_5_PREVIEW_2025_02_27: :"gpt-4.5-preview-2025-02-27" + GPT_4_TURBO: :"gpt-4-turbo" + GPT_4_TURBO_2024_04_09: :"gpt-4-turbo-2024-04-09" + GPT_4_0125_PREVIEW: :"gpt-4-0125-preview" + GPT_4_TURBO_PREVIEW: :"gpt-4-turbo-preview" + GPT_4_1106_PREVIEW: :"gpt-4-1106-preview" + GPT_4_VISION_PREVIEW: :"gpt-4-vision-preview" + GPT_4: :"gpt-4" + GPT_4_0314: :"gpt-4-0314" + GPT_4_0613: :"gpt-4-0613" + GPT_4_32K: :"gpt-4-32k" + GPT_4_32K_0314: :"gpt-4-32k-0314" + GPT_4_32K_0613: :"gpt-4-32k-0613" + GPT_3_5_TURBO: :"gpt-3.5-turbo" + GPT_3_5_TURBO_16K: :"gpt-3.5-turbo-16k" + GPT_3_5_TURBO_0613: :"gpt-3.5-turbo-0613" + GPT_3_5_TURBO_1106: :"gpt-3.5-turbo-1106" + GPT_3_5_TURBO_0125: :"gpt-3.5-turbo-0125" + GPT_3_5_TURBO_16K_0613: :"gpt-3.5-turbo-16k-0613" end type tool_resources = diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index f9593b82..1cb60a89 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -149,12 +149,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] MessageContentPartParamArray: message_content_part_param_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 12ab7a4d..bd37a0ce 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -65,12 +65,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] MessageContentPartParamArray: message_content_part_param_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 3acdf91c..76c17e62 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -40,12 +40,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] MessageContentPartParamArray: message_content_part_param_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index bf78c39f..694f1fb2 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -124,12 +124,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + type message_content_part_param_array = ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] MessageContentPartParamArray: message_content_part_param_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] end type role = :user | :assistant diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index 47bcea48..968d73be 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -64,11 +64,6 @@ module OpenAI module Content extend OpenAI::Union - type array_of_content_part_array = - ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] - - ArrayOfContentPartArray: array_of_content_part_array - type array_of_content_part = OpenAI::Models::Chat::ChatCompletionContentPartText | OpenAI::Models::Chat::ChatCompletionContentPartRefusal @@ -80,6 +75,11 @@ module OpenAI end def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]] + + type array_of_content_part_array = + ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] + + ArrayOfContentPartArray: array_of_content_part_array end type function_call = { arguments: String, name: String } diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index 3f277ae1..dc4be41d 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -52,18 +52,6 @@ module OpenAI module Voice extend OpenAI::Union - ALLOY: :alloy - ASH: :ash - BALLAD: :ballad - CORAL: :coral - ECHO: :echo - FABLE: :fable - ONYX: :onyx - NOVA: :nova - SAGE: :sage - SHIMMER: :shimmer - VERSE: :verse - def self?.variants: -> [String, (:alloy | :ash | :ballad @@ -75,6 +63,18 @@ module OpenAI | :sage | :shimmer | :verse)] + + ALLOY: :alloy + ASH: :ash + BALLAD: :ballad + CORAL: :coral + ECHO: :echo + FABLE: :fable + ONYX: :onyx + NOVA: :nova + SAGE: :sage + SHIMMER: :shimmer + VERSE: :verse end end end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index ccf0ea3d..9bd2727c 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -33,12 +33,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index ccfde6db..cbf6347c 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -27,12 +27,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 5c0e475c..56a477ec 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -33,12 +33,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 6f295779..e0e31f90 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -31,12 +31,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + type chat_completion_content_part_text_array = ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] ChatCompletionContentPartTextArray: chat_completion_content_part_text_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] end end end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 7724c406..308744d9 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -33,12 +33,12 @@ module OpenAI module Content extend OpenAI::Union + def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] + type chat_completion_content_part_array = ::Array[OpenAI::Models::Chat::chat_completion_content_part] ChatCompletionContentPartArray: chat_completion_content_part_array - - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] end end end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 4bcc300d..915923c4 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -260,11 +260,11 @@ module OpenAI module Stop extend OpenAI::Union + def self?.variants: -> [String, ::Array[String]] + type string_array = ::Array[String] StringArray: string_array - - def self?.variants: -> [String, ::Array[String]] end type web_search_options = diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index b6138203..520e21d5 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -91,13 +91,13 @@ module OpenAI module Model extend OpenAI::Union - GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" - DAVINCI_002: :"davinci-002" - BABBAGE_002: :"babbage-002" - def self?.variants: -> [String, (:"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002")] + + GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" + DAVINCI_002: :"davinci-002" + BABBAGE_002: :"babbage-002" end type prompt = @@ -106,6 +106,8 @@ module OpenAI module Prompt extend OpenAI::Union + def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] + type string_array = ::Array[String] StringArray: string_array @@ -117,8 +119,6 @@ module OpenAI type array_of_token2_d_array = ::Array[::Array[Integer]] ArrayOfToken2DArray: array_of_token2_d_array - - def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] end type stop = (String | ::Array[String])? @@ -126,11 +126,11 @@ module OpenAI module Stop extend OpenAI::Union + def self?.variants: -> [String, ::Array[String]] + type string_array = ::Array[String] StringArray: string_array - - def self?.variants: -> [String, ::Array[String]] end end end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 423f85e6..484259cf 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -49,6 +49,8 @@ module OpenAI module Input extend OpenAI::Union + def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] + type string_array = ::Array[String] StringArray: string_array @@ -60,8 +62,6 @@ module OpenAI type array_of_token2_d_array = ::Array[::Array[Integer]] ArrayOfToken2DArray: array_of_token2_d_array - - def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] end type model = String | OpenAI::Models::embedding_model diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index e0b090b5..04733621 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -70,15 +70,15 @@ module OpenAI module Model extend OpenAI::Union - BABBAGE_002: :"babbage-002" - DAVINCI_002: :"davinci-002" - GPT_3_5_TURBO: :"gpt-3.5-turbo" - GPT_4O_MINI: :"gpt-4o-mini" - def self?.variants: -> [String, (:"babbage-002" | :"davinci-002" | :"gpt-3.5-turbo" | :"gpt-4o-mini")] + + BABBAGE_002: :"babbage-002" + DAVINCI_002: :"davinci-002" + GPT_3_5_TURBO: :"gpt-3.5-turbo" + GPT_4O_MINI: :"gpt-4o-mini" end type hyperparameters = diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 266f6beb..edf5e5f5 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -35,6 +35,8 @@ module OpenAI module Input extend OpenAI::Union + def self?.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] + type string_array = ::Array[String] StringArray: string_array @@ -43,8 +45,6 @@ module OpenAI ::Array[OpenAI::Models::moderation_multi_modal_input] ModerationMultiModalInputArray: moderation_multi_modal_input_array - - def self?.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] end type model = String | OpenAI::Models::moderation_model diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 2f676c59..5e0a2660 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -11,15 +11,15 @@ module OpenAI module ResponsesModel extend OpenAI::Union - O1_PRO: :"o1-pro" - O1_PRO_2025_03_19: :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" | :"o1-pro-2025-03-19" | :"computer-use-preview" | :"computer-use-preview-2025-03-11")] + + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" end end end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 24619a7e..9b476ac0 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -52,11 +52,11 @@ module OpenAI module Query extend OpenAI::Union + def self?.variants: -> [String, ::Array[String]] + type string_array = ::Array[String] StringArray: string_array - - def self?.variants: -> [String, ::Array[String]] end type filters = From 6a6b015104a543ed244744a962c4005806a98663 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 16:08:09 +0000 Subject: [PATCH 062/295] feat(api): add `get /chat/completions` endpoint --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 940027f0..923a6960 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6663c59193eb95b201e492de17dcbd5e126ba03d18ce66287a3e2c632ca56fe7.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: d36e491b0afc4f79e3afad4b3c9bec70 +config_hash: 9351ea829c2b41da3b48a38c934c92ee From cf573e840a43346dd9a61698b31626ec5708bbf4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 19:42:41 +0000 Subject: [PATCH 063/295] feat(api): add `get /responses/{response_id}/input_items` endpoint --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 923a6960..426a77c6 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6663c59193eb95b201e492de17dcbd5e126ba03d18ce66287a3e2c632ca56fe7.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: 9351ea829c2b41da3b48a38c934c92ee +config_hash: e25e31d8446b6bc0e3ef7103b6993cce From 11f3a7bac3b94a0f1faf732b8aa4db041cbc2fb3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 27 Mar 2025 23:54:53 +0000 Subject: [PATCH 064/295] chore(internal): minor refactoring of utils (#67) --- Rakefile | 2 +- lib/openai/util.rb | 8 +++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/Rakefile b/Rakefile index cf2e280f..f8c899c1 100644 --- a/Rakefile +++ b/Rakefile @@ -11,7 +11,7 @@ CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/]) xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] -task(default: [:test]) +multitask(default: [:test]) multitask(:test) do rb = diff --git a/lib/openai/util.rb b/lib/openai/util.rb index a06aa76e..f19004c2 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -75,7 +75,7 @@ def primitive?(input) def coerce_boolean(input) case input.is_a?(String) ? input.downcase : input in Numeric - !input.zero? + input.nonzero? in "true" true in "false" @@ -165,14 +165,12 @@ class << self private def deep_merge_lr(lhs, rhs, concat: false) case [lhs, rhs, concat] in [Hash, Hash, _] - # rubocop:disable Style/YodaCondition - rhs_cleaned = rhs.reject { |_, val| OMIT == val } + rhs_cleaned = rhs.reject { _2 == OpenAI::Util::OMIT } lhs - .reject { |key, _| OMIT == rhs[key] } + .reject { |key, _| rhs[key] == OpenAI::Util::OMIT } .merge(rhs_cleaned) do |_, old_val, new_val| deep_merge_lr(old_val, new_val, concat: concat) end - # rubocop:enable Style/YodaCondition in [Array, Array, true] lhs.concat(rhs) else From fa67394f8e6e97268dc84a043a3585f5b5690dd2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 28 Mar 2025 19:59:10 +0000 Subject: [PATCH 065/295] feat(internal): converter interface should recurse without schema (#68) --- lib/openai/base_model.rb | 891 ++++++++++++++++----------------- rbi/lib/openai/base_model.rbi | 194 +++---- sig/openai/base_model.rbs | 92 ++-- test/openai/base_model_test.rb | 666 +++++++++++++++--------- 4 files changed, 974 insertions(+), 869 deletions(-) diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index c12857ab..2d37a563 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -9,22 +9,34 @@ module Converter # # @param value [Object] # - # @return [Object] - def coerce(value) = value - - # @api private + # @param state [Hash{Symbol=>Object}] . # - # @param value [Object] + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched # # @return [Object] - def dump(value) = value + def coerce(value, state:) = (raise NotImplementedError) # @api private # # @param value [Object] # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) = (raise NotImplementedError) + # @return [Object] + def dump(value) + case value + in Array + value.map { OpenAI::Unknown.dump(_1) } + in Hash + value.transform_values { OpenAI::Unknown.dump(_1) } + in OpenAI::BaseModel + value.class.dump(value) + else + value + end + end # rubocop:enable Lint/UnusedMethodArgument @@ -44,14 +56,14 @@ class << self # @return [Proc] def type_info(spec) case spec - in Hash - type_info(spec.slice(:const, :enum, :union).first&.last) in Proc spec - in OpenAI::Converter | Module | Symbol - -> { spec } + in Hash + type_info(spec.slice(:const, :enum, :union).first&.last) in true | false -> { OpenAI::BooleanModel } + in OpenAI::Converter | Class | Symbol + -> { spec } in NilClass | Integer | Float -> { spec.class } end @@ -66,108 +78,127 @@ def type_info(spec) # converted value # 3. otherwise, the given `value` unaltered # + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + # # @param target [OpenAI::Converter, Class] + # # @param value [Object] # + # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the + # coercion strategy when we have to decide between multiple possible conversion + # targets: + # + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. + # + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: + # + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. + # + # See implementation below for more details. + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # # @return [Object] - def coerce(target, value) + def coerce(target, value, state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0}) + strictness, exactness = state.fetch_values(:strictness, :exactness) + case target in OpenAI::Converter - target.coerce(value) - in Symbol - case value - in Symbol | String if (val = value.to_sym) == target - val - else - value + return target.coerce(value, state: state) + in Class + if value.is_a?(target) + exactness[:yes] += 1 + return value end - in Module + case target in -> { _1 <= NilClass } - nil + exactness[value.nil? ? :yes : :maybe] += 1 + return nil in -> { _1 <= Integer } - value.is_a?(Numeric) ? Integer(value) : value + if value.is_a?(Integer) + exactness[:yes] += 1 + return value + elsif strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + else + Kernel.then do + return Integer(value).tap { exactness[:maybe] += 1 } + rescue ArgumentError, TypeError + end + end in -> { _1 <= Float } - value.is_a?(Numeric) ? Float(value) : value - in -> { _1 <= Symbol } - value.is_a?(String) ? value.to_sym : value + if value.is_a?(Numeric) + exactness[:yes] += 1 + return Float(value) + elsif strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + else + Kernel.then do + return Float(value).tap { exactness[:maybe] += 1 } + rescue ArgumentError, TypeError + end + end in -> { _1 <= String } - value.is_a?(Symbol) ? value.to_s : value + case value + in String | Symbol | Numeric + exactness[value.is_a?(Numeric) ? :maybe : :yes] += 1 + return value.to_s + else + if strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + end + end in -> { _1 <= Date || _1 <= Time } - value.is_a?(String) ? target.parse(value) : value - in -> { _1 <= IO } - value.is_a?(String) ? StringIO.new(value) : value + Kernel.then do + return target.parse(value).tap { exactness[:yes] += 1 } + rescue ArgumentError, TypeError => e + raise e if strictness == :strong + end + in -> { _1 <= IO } if value.is_a?(String) + exactness[:yes] += 1 + return StringIO.new(value.b) else - value end - end - end - - # @api private - # - # @param target [OpenAI::Converter, Class] - # @param value [Object] - # - # @return [Object] - def dump(target, value) - case target - in OpenAI::Converter - target.dump(value) + in Symbol + if (value.is_a?(Symbol) || value.is_a?(String)) && value.to_sym == target + exactness[:yes] += 1 + return target + elsif strictness == :strong + message = "cannot convert non-matching #{value.class} into #{target.inspect}" + raise ArgumentError.new(message) + end else - value end + + exactness[:no] += 1 + value end # @api private # - # The underlying algorithm for computing maximal compatibility is subject to - # future improvements. - # - # Similar to `#.coerce`, used to determine the best union variant to decode into. - # - # 1. determine if strict-ish coercion is possible - # 2. return either result of successful coercion or if loose coercion is possible - # 3. return a score for recursively tallied count for fields that can be coerced - # # @param target [OpenAI::Converter, Class] # @param value [Object] # # @return [Object] - def try_strict_coerce(target, value) - case target - in OpenAI::Converter - target.try_strict_coerce(value) - in Symbol - case value - in Symbol | String if (val = value.to_sym) == target - [true, val, 1] - else - [false, false, 0] - end - in Module - case [target, value] - in [-> { _1 <= NilClass }, _] - [true, nil, value.nil? ? 1 : 0] - in [-> { _1 <= Integer }, Numeric] - [true, Integer(value), 1] - in [-> { _1 <= Float }, Numeric] - [true, Float(value), 1] - in [-> { _1 <= Symbol }, String] - [true, value.to_sym, 1] - in [-> { _1 <= String }, Symbol] - [true, value.to_s, 1] - in [-> { _1 <= Date || _1 <= Time }, String] - Kernel.then do - [true, target.parse(value), 1] - rescue ArgumentError - [false, false, 0] - end - in [_, ^target] - [true, value, 1] - else - [false, false, 0] - end - end + def dump(target, value) + target.is_a?(OpenAI::Converter) ? target.dump(value) : OpenAI::Unknown.dump(value) end end end @@ -193,13 +224,23 @@ def self.===(other) = true def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown class << self - # @!parse - # # @api private - # # - # # @param value [Object] - # # - # # @return [Object] - # def coerce(value) = super + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) + state.fetch(:exactness)[:yes] += 1 + value + end # @!parse # # @api private @@ -208,16 +249,6 @@ class << self # # # # @return [Object] # def dump(value) = super - - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - # prevent unknown variant from being chosen during the first coercion pass - [false, true, 0] - end end # rubocop:enable Lint/UnusedMethodArgument @@ -242,13 +273,23 @@ def self.===(other) = other == true || other == false def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel class << self - # @!parse - # # @api private - # # - # # @param value [Boolean, Object] - # # - # # @return [Boolean, Object] - # def coerce(value) = super + # @api private + # + # @param value [Boolean, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Boolean, Object] + def coerce(value, state:) + state.fetch(:exactness)[value == true || value == false ? :yes : :no] += 1 + value + end # @!parse # # @api private @@ -257,20 +298,6 @@ class << self # # # # @return [Boolean, Object] # def dump(value) = super - - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - case value - in true | false - [true, value, 1] - else - [false, false, 0] - end - end end end @@ -333,19 +360,34 @@ def ===(other) = values.include?(other) # # @return [Boolean] def ==(other) - other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Enum) && other.values.to_set == values.to_set + other.is_a?(Module) && other.singleton_class <= OpenAI::Enum && other.values.to_set == values.to_set end # @api private # + # Unlike with primitives, `Enum` additionally validates that the value is a member + # of the enum. + # # @param value [String, Symbol, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # # @return [Symbol, Object] - def coerce(value) - case value - in Symbol | String if values.include?(val = value.to_sym) + def coerce(value, state:) + exactness = state.fetch(:exactness) + val = value.is_a?(String) ? value.to_sym : value + + if values.include?(val) + exactness[:yes] += 1 val else + exactness[values.first&.class == val.class ? :maybe : :no] += 1 value end end @@ -357,27 +399,6 @@ def coerce(value) # # # # @return [Symbol, Object] # def dump(value) = super - - # @api private - # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - return [true, value, 1] if values.include?(value) - - case value - in Symbol | String if values.include?(val = value.to_sym) - [true, val, 1] - else - case [value, values.first] - in [true | false, true | false] | [Integer, Integer] | [Symbol | String, Symbol] - [false, true, 0] - else - [false, false, 0] - end - end - end end # @api private @@ -426,9 +447,7 @@ module Union # All of the specified variants for this union. # # @return [Array] - def variants - derefed_variants.map(&:last) - end + def variants = derefed_variants.map(&:last) # @api private # @@ -458,7 +477,7 @@ def variants case key in Symbol [key, OpenAI::Converter.type_info(spec)] - in Proc | OpenAI::Converter | Module | Hash + in Proc | OpenAI::Converter | Class | Hash [nil, OpenAI::Converter.type_info(key)] end @@ -475,16 +494,14 @@ def variants in [_, OpenAI::BaseModel] value.class in [Symbol, Hash] - key = - if value.key?(@discriminator) - value.fetch(@discriminator) - elsif value.key?((discriminator = @discriminator.to_s)) - value.fetch(discriminator) - end + key = value.fetch(@discriminator) do + value.fetch(@discriminator.to_s, OpenAI::Util::OMIT) + end + + return nil if key == OpenAI::Util::OMIT key = key.to_sym if key.is_a?(String) - _, resolved = known_variants.find { |k,| k == key } - resolved.nil? ? OpenAI::Unknown : resolved.call + known_variants.find { |k,| k == key }&.last&.call else nil end @@ -506,87 +523,81 @@ def ===(other) # # @return [Boolean] def ==(other) - other.is_a?(Module) && other.singleton_class.ancestors.include?(OpenAI::Union) && other.derefed_variants == derefed_variants + other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants end # @api private # # @param value [Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # # @return [Object] - def coerce(value) - if (variant = resolve_variant(value)) - return OpenAI::Converter.coerce(variant, value) + def coerce(value, state:) + if (target = resolve_variant(value)) + return OpenAI::Converter.coerce(target, value, state: state) end - matches = [] + strictness = state.fetch(:strictness) + exactness = state.fetch(:exactness) + state[:strictness] = strictness == :strong ? true : strictness + alternatives = [] known_variants.each do |_, variant_fn| - variant = variant_fn.call - - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, _] + target = variant_fn.call + exact = state[:exactness] = {yes: 0, no: 0, maybe: 0} + state[:branched] += 1 + + coerced = OpenAI::Converter.coerce(target, value, state: state) + yes, no, maybe = exact.values + if (no + maybe).zero? || (!strictness && yes.positive?) + exact.each { exactness[_1] += _2 } + state[:exactness] = exactness return coerced - in [false, true, score] - matches << [score, variant] - in [false, false, _] - nil + elsif maybe.positive? + alternatives << [[-yes, -maybe, no], exact, coerced] end end - _, variant = matches.sort! { _2.first <=> _1.first }.find { |score,| !score.zero? } - variant.nil? ? value : OpenAI::Converter.coerce(variant, value) - end - - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - if (variant = resolve_variant(value)) - return OpenAI::Converter.dump(variant, value) - end - - known_variants.each do |_, variant_fn| - variant = variant_fn.call - if variant === value - return OpenAI::Converter.dump(variant, value) + case alternatives.sort_by(&:first) + in [] + exactness[:no] += 1 + if strictness == :strong + message = "no possible conversion of #{value.class} into a variant of #{target.inspect}" + raise ArgumentError.new(message) end + value + in [[_, exact, coerced], *] + exact.each { exactness[_1] += _2 } + coerced end - value + .tap { state[:exactness] = exactness } + ensure + state[:strictness] = strictness end # @api private # # @param value [Object] # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - # TODO(ruby) this will result in super linear decoding behaviour for nested unions - # follow up with a decoding context that captures current strictness levels - if (variant = resolve_variant(value)) - return Converter.try_strict_coerce(variant, value) + # @return [Object] + def dump(value) + if (target = resolve_variant(value)) + return OpenAI::Converter.dump(target, value) end - coercible = false - max_score = 0 - - known_variants.each do |_, variant_fn| - variant = variant_fn.call - - case OpenAI::Converter.try_strict_coerce(variant, value) - in [true, coerced, score] - return [true, coerced, score] - in [false, true, score] - coercible = true - max_score = [max_score, score].max - in [false, false, _] - nil - end + known_variants.each do + target = _2.call + return OpenAI::Converter.dump(target, value) if target === value end - [false, coercible, max_score] + super end # rubocop:enable Style/CaseEquality @@ -617,36 +628,46 @@ def self.[](type_info, spec = {}) = new(type_info, spec) # @param other [Object] # # @return [Boolean] - def ===(other) - type = item_type - case other - in Array - # rubocop:disable Style/CaseEquality - other.all? { type === _1 } - # rubocop:enable Style/CaseEquality - else - false - end - end + def ===(other) = other.is_a?(Array) && other.all?(item_type) # @param other [Object] # # @return [Boolean] - def ==(other) = other.is_a?(OpenAI::ArrayOf) && other.item_type == item_type + def ==(other) = other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type # @api private # # @param value [Enumerable, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # # @return [Array, Object] - def coerce(value) - type = item_type - case value - in Enumerable unless value.is_a?(Hash) - value.map { OpenAI::Converter.coerce(type, _1) } - else - value + def coerce(value, state:) + exactness = state.fetch(:exactness) + + unless value.is_a?(Array) + exactness[:no] += 1 + return value end + + target = item_type + exactness[:yes] += 1 + value + .map do |item| + case [nilable?, item] + in [true, nil] + exactness[:yes] += 1 + nil + else + OpenAI::Converter.coerce(target, item, state: state) + end + end end # @api private @@ -655,57 +676,19 @@ def coerce(value) # # @return [Array, Object] def dump(value) - type = item_type - case value - in Enumerable unless value.is_a?(Hash) - value.map { OpenAI::Converter.dump(type, _1) }.to_a - else - value - end + target = item_type + value.is_a?(Array) ? value.map { OpenAI::Converter.dump(target, _1) } : super end # @api private # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - case value - in Array - type = item_type - great_success = true - tally = 0 - - mapped = - value.map do |item| - case OpenAI::Converter.try_strict_coerce(type, item) - in [true, coerced, score] - tally += score - coerced - in [false, true, score] - great_success = false - tally += score - item - in [false, false, _] - great_success &&= item.nil? - item - end - end - - if great_success - [true, mapped, tally] - else - [false, true, tally] - end - else - [false, false, 0] - end - end + # @return [OpenAI::Converter, Class] + protected def item_type = @item_type_fn.call # @api private # - # @return [OpenAI::Converter, Class] - protected def item_type = @item_type_fn.call + # @return [Boolean] + protected def nilable? = @nilable # @api private # @@ -722,6 +705,7 @@ def try_strict_coerce(value) # @option spec [Boolean] :"nil?" def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Converter.type_info(type_info || spec) + @nilable = spec[:nil?] end end @@ -769,24 +753,46 @@ def ===(other) # @param other [Object] # # @return [Boolean] - def ==(other) = other.is_a?(OpenAI::HashOf) && other.item_type == item_type + def ==(other) = other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type # @api private # # @param value [Hash{Object=>Object}, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # # @return [Hash{Symbol=>Object}, Object] - def coerce(value) - type = item_type - case value - in Hash - value.to_h do |key, val| - coerced = OpenAI::Converter.coerce(type, val) - [key.is_a?(String) ? key.to_sym : key, coerced] - end - else - value + def coerce(value, state:) + exactness = state.fetch(:exactness) + + unless value.is_a?(Hash) + exactness[:no] += 1 + return value end + + target = item_type + exactness[:yes] += 1 + value + .to_h do |key, val| + k = key.is_a?(String) ? key.to_sym : key + v = + case [nilable?, val] + in [true, nil] + exactness[:yes] += 1 + nil + else + OpenAI::Converter.coerce(target, val, state: state) + end + + exactness[:no] += 1 unless k.is_a?(Symbol) + [k, v] + end end # @api private @@ -795,59 +801,19 @@ def coerce(value) # # @return [Hash{Symbol=>Object}, Object] def dump(value) - type = item_type - case value - in Hash - value.transform_values do |val| - OpenAI::Converter.dump(type, val) - end - else - value - end + target = item_type + value.is_a?(Hash) ? value.transform_values { OpenAI::Converter.dump(target, _1) } : super end # @api private # - # @param value [Object] - # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - case value - in Hash - type = item_type - great_success = true - tally = 0 - - mapped = - value.transform_values do |val| - case OpenAI::Converter.try_strict_coerce(type, val) - in [true, coerced, score] - tally += score - coerced - in [false, true, score] - great_success = false - tally += score - val - in [false, false, _] - great_success &&= val.nil? - val - end - end - - if great_success - [true, mapped, tally] - else - [false, true, tally] - end - else - [false, false, 0] - end - end + # @return [OpenAI::Converter, Class] + protected def item_type = @item_type_fn.call # @api private # - # @return [OpenAI::Converter, Class] - protected def item_type = @item_type_fn.call + # @return [Boolean] + protected def nilable? = @nilable # @api private # @@ -864,6 +830,7 @@ def try_strict_coerce(value) # @option spec [Boolean] :"nil?" def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Converter.type_info(type_info || spec) + @nilable = spec[:nil?] end end @@ -890,13 +857,6 @@ def known_fields @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) end - # @api private - # - # @return [Hash{Symbol=>Symbol}] - def reverse_map - @reverse_map ||= (self < OpenAI::BaseModel ? superclass.reverse_map.dup : {}) - end - # @api private # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] @@ -906,11 +866,6 @@ def fields end end - # @api private - # - # @return [Hash{Symbol=>Proc}] - def defaults = (@defaults ||= {}) - # @api private # # @param name_sym [Symbol] @@ -931,38 +886,40 @@ def defaults = (@defaults ||= {}) private def add_field(name_sym, required:, type_info:, spec:) type_fn, info = case type_info - in Proc | Module | OpenAI::Converter + in Proc | OpenAI::Converter | Class [OpenAI::Converter.type_info({**spec, union: type_info}), spec] in Hash [OpenAI::Converter.type_info(type_info), type_info] end - fallback = info[:const] - defaults[name_sym] = fallback if required && !info[:nil?] && info.key?(:const) - - key = info[:api_name]&.tap { reverse_map[_1] = name_sym } || name_sym setter = "#{name_sym}=" + api_name = info.fetch(:api_name, name_sym) + nilable = info[:nil?] + const = required && !nilable ? info.fetch(:const, OpenAI::Util::OMIT) : OpenAI::Util::OMIT - if known_fields.key?(name_sym) - [name_sym, setter].each { undef_method(_1) } - end + [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) - known_fields[name_sym] = {mode: @mode, key: key, required: required, type_fn: type_fn} + known_fields[name_sym] = + { + mode: @mode, + api_name: api_name, + required: required, + nilable: nilable, + const: const, + type_fn: type_fn + } - define_method(setter) do |val| - @data[key] = val - end + define_method(setter) { @data.store(name_sym, _1) } define_method(name_sym) do - field_type = type_fn.call - value = @data.fetch(key) { self.class.defaults[key] } - OpenAI::Converter.coerce(field_type, value) + target = type_fn.call + value = @data.fetch(name_sym) { const == OpenAI::Util::OMIT ? nil : const } + state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + (nilable || !required) && value.nil? ? nil : OpenAI::Converter.coerce(target, value, state: state) rescue StandardError - name = self.class.name.split("::").last - raise OpenAI::ConversionError.new( - "Failed to parse #{name}.#{name_sym} as #{field_type.inspect}. " \ - "To get the unparsed API response, use #{name}[:#{key}]." - ) + cls = self.class.name.split("::").last + message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." + raise OpenAI::ConversionError.new(message) end end @@ -1028,120 +985,124 @@ def optional(name_sym, type_info, spec = {}) ensure @mode = nil end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = other.is_a?(Class) && other <= OpenAI::BaseModel && other.fields == fields end # @param other [Object] # # @return [Boolean] - def ==(other) - case other - in OpenAI::BaseModel - self.class.fields == other.class.fields && @data == other.to_h - else - false - end - end + def ==(other) = self.class == other.class && @data == other.to_h class << self # @api private # # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # # @return [OpenAI::BaseModel, Object] - def coerce(value) - case OpenAI::Util.coerce_hash(value) - in Hash => coerced - new(coerced) - else - value + def coerce(value, state:) + exactness = state.fetch(:exactness) + + if value.is_a?(self.class) + exactness[:yes] += 1 + return value end - end - # @api private - # - # @param value [OpenAI::BaseModel, Object] - # - # @return [Hash{Object=>Object}, Object] - def dump(value) - unless (coerced = OpenAI::Util.coerce_hash(value)).is_a?(Hash) + unless (val = OpenAI::Util.coerce_hash(value)).is_a?(Hash) + exactness[:no] += 1 return value end + exactness[:yes] += 1 - values = coerced.filter_map do |key, val| - name = key.to_sym - case (field = known_fields[name]) - in nil - [name, val] - else - mode, type_fn, api_name = field.fetch_values(:mode, :type_fn, :key) - case mode - in :coerce - next + keys = val.keys.to_set + instance = new + data = instance.to_h + + fields.each do |name, field| + mode, required, target = field.fetch_values(:mode, :required, :type) + api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) + + unless val.key?(api_name) + if const != OpenAI::Util::OMIT + exactness[:yes] += 1 + elsif required && mode != :dump + exactness[nilable ? :maybe : :no] += 1 else - target = type_fn.call - [api_name, OpenAI::Converter.dump(target, val)] + exactness[:yes] += 1 end + next end - end.to_h - defaults.each do |key, val| - next if values.key?(key) + item = val.fetch(api_name) + keys.delete(api_name) - values[key] = val + converted = + if item.nil? && (nilable || !required) + exactness[nilable ? :yes : :maybe] += 1 + nil + else + coerced = OpenAI::Converter.coerce(target, item, state: state) + case target + in OpenAI::Converter | Symbol + coerced + else + item + end + end + data.store(name, converted) end - values + keys.each { data.store(_1, val.fetch(_1)) } + instance end # @api private # - # @param value [Object] + # @param value [OpenAI::BaseModel, Object] # - # @return [Array(true, Object, nil), Array(false, Boolean, Integer)] - def try_strict_coerce(value) - case value - in Hash | OpenAI::BaseModel - value = value.to_h - else - return [false, false, 0] + # @return [Hash{Object=>Object}, Object] + def dump(value) + unless (coerced = OpenAI::Util.coerce_hash(value)).is_a?(Hash) + return super end - keys = value.keys.to_set - great_success = true - tally = 0 acc = {} - known_fields.each_value do |field| - mode, required, type_fn, api_name = field.fetch_values(:mode, :required, :type_fn, :key) - keys.delete(api_name) - - case [required && mode != :dump, value.key?(api_name)] - in [_, true] - target = type_fn.call - item = value.fetch(api_name) - case OpenAI::Converter.try_strict_coerce(target, item) - in [true, coerced, score] - tally += score - acc[api_name] = coerced - in [false, true, score] - great_success = false - tally += score - acc[api_name] = item - in [false, false, _] - great_success &&= item.nil? + coerced.each do |key, val| + name = key.is_a?(String) ? key.to_sym : key + case (field = known_fields[name]) + in nil + acc.store(name, super(val)) + else + mode, api_name, type_fn = field.fetch_values(:mode, :api_name, :type_fn) + case mode + in :coerce + next + else + target = type_fn.call + acc.store(api_name, OpenAI::Converter.dump(target, val)) end - in [true, false] - great_success = false - in [false, false] - nil end end - keys.each do |key| - acc[key] = value.fetch(key) + known_fields.each_value do |field| + mode, api_name, const = field.fetch_values(:mode, :api_name, :const) + next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Util::OMIT + acc.store(api_name, const) end - great_success ? [true, new(acc), tally] : [false, true, tally] + acc end end @@ -1181,14 +1142,15 @@ def to_h = @data # # @return [Hash{Symbol=>Object}] def deconstruct_keys(keys) - (keys || self.class.known_fields.keys).filter_map do |k| - unless self.class.known_fields.key?(k) - next - end + (keys || self.class.known_fields.keys) + .filter_map do |k| + unless self.class.known_fields.key?(k) + next + end - [k, method(k).call] - end - .to_h + [k, public_send(k)] + end + .to_h end # Create a new instance of a model. @@ -1197,21 +1159,7 @@ def deconstruct_keys(keys) def initialize(data = {}) case OpenAI::Util.coerce_hash(data) in Hash => coerced - @data = coerced.to_h do |key, value| - name = key.to_sym - mapped = self.class.reverse_map.fetch(name, name) - type = self.class.fields[mapped]&.fetch(:type) - stored = - case [type, value] - in [Module, Hash] if type <= OpenAI::BaseModel - type.new(value) - in [OpenAI::ArrayOf, Array] | [OpenAI::HashOf, Hash] - type.coerce(value) - else - value - end - [name, stored] - end + @data = coerced else raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") end @@ -1222,9 +1170,12 @@ def to_s = @data.to_s # @return [String] def inspect - "#<#{self.class.name}:0x#{object_id.to_s(16)} #{deconstruct_keys(nil).map do |k, v| - "#{k}=#{v.inspect}" - end.join(' ')}>" + rows = self.class.known_fields.keys.map do + "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" + rescue OpenAI::ConversionError + "#{_1}=#{@data.fetch(_1)}" + end + "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" end end end diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index c99d19ab..0f1d214f 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -5,9 +5,18 @@ module OpenAI module Converter Input = T.type_alias { T.any(OpenAI::Converter, T::Class[T.anything]) } + State = + T.type_alias do + { + strictness: T.any(T::Boolean, Symbol), + exactness: {yes: Integer, no: Integer, maybe: Integer}, + branched: Integer + } + end + # @api private - sig { overridable.params(value: T.anything).returns(T.anything) } - def coerce(value) + sig { overridable.params(value: T.anything, state: OpenAI::Converter::State).returns(T.anything) } + def coerce(value, state:) end # @api private @@ -15,15 +24,6 @@ module OpenAI def dump(value) end - # @api private - sig do - overridable - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end - class << self # @api private sig do @@ -51,28 +51,43 @@ module OpenAI # 2. if it's possible and safe to convert the given `value` to `target`, then the # converted value # 3. otherwise, the given `value` unaltered - sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } - def self.coerce(target, value) + # + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + sig do + params(target: OpenAI::Converter::Input, value: T.anything, state: OpenAI::Converter::State) + .returns(T.anything) + end + def self.coerce( + target, + value, + # The `strictness` is one of `true`, `false`, or `:strong`. This informs the + # coercion strategy when we have to decide between multiple possible conversion + # targets: + # + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. + # + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: + # + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. + # + # See implementation below for more details. + state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + ) end # @api private sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } def self.dump(target, value) end - - # @api private - # - # The underlying algorithm for computing maximal compatibility is subject to - # future improvements. - # - # Similar to `#.coerce`, used to determine the best union variant to decode into. - # - # 1. determine if strict-ish coercion is possible - # 2. return either result of successful coercion or if loose coercion is possible - # 3. return a score for recursively tallied count for fields that can be coerced - sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } - def self.try_strict_coerce(target, value) - end end end @@ -95,23 +110,14 @@ module OpenAI class << self # @api private - sig(:final) { override.params(value: T.anything).returns(T.anything) } - def coerce(value) + sig(:final) { override.params(value: T.anything, state: OpenAI::Converter::State).returns(T.anything) } + def coerce(value, state:) end # @api private sig(:final) { override.params(value: T.anything).returns(T.anything) } def dump(value) end - - # @api private - sig(:final) do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end end end @@ -135,9 +141,11 @@ module OpenAI class << self # @api private sig(:final) do - override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) + override + .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Converter::State) + .returns(T.any(T::Boolean, T.anything)) end - def coerce(value) + def coerce(value, state:) end # @api private @@ -146,15 +154,6 @@ module OpenAI end def dump(value) end - - # @api private - sig(:final) do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end end end @@ -194,23 +193,21 @@ module OpenAI end # @api private - sig { override.params(value: T.any(String, Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def coerce(value) + # + # Unlike with primitives, `Enum` additionally validates that the value is a member + # of the enum. + sig do + override + .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Converter::State) + .returns(T.any(Symbol, T.anything)) + end + def coerce(value, state:) end # @api private sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } def dump(value) end - - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end end # @api private @@ -264,23 +261,14 @@ module OpenAI end # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def coerce(value) + sig { override.params(value: T.anything, state: OpenAI::Converter::State).returns(T.anything) } + def coerce(value, state:) end # @api private sig { override.params(value: T.anything).returns(T.anything) } def dump(value) end - - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end end # @api private @@ -317,10 +305,10 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Enumerable[T.anything], T.anything)) + .params(value: T.any(T::Enumerable[T.anything], T.anything), state: OpenAI::Converter::State) .returns(T.any(T::Array[T.anything], T.anything)) end - def coerce(value) + def coerce(value, state:) end # @api private @@ -333,17 +321,13 @@ module OpenAI end # @api private - sig(:final) do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) + sig(:final) { returns(T.anything) } + protected def item_type end # @api private - sig(:final) { returns(T.anything) } - protected def item_type + sig(:final) { returns(T::Boolean) } + protected def nilable? end # @api private @@ -396,10 +380,10 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) + .params(value: T.any(T::Hash[T.anything, T.anything], T.anything), state: OpenAI::Converter::State) .returns(T.any(OpenAI::Util::AnyHash, T.anything)) end - def coerce(value) + def coerce(value, state:) end # @api private @@ -412,17 +396,13 @@ module OpenAI end # @api private - sig(:final) do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) + sig(:final) { returns(T.anything) } + protected def item_type end # @api private - sig(:final) { returns(T.anything) } - protected def item_type + sig(:final) { returns(T::Boolean) } + protected def nilable? end # @api private @@ -446,7 +426,7 @@ module OpenAI abstract! - KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean} } + KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} } class << self # @api private @@ -465,11 +445,6 @@ module OpenAI def known_fields end - # @api private - sig { returns(T::Hash[Symbol, Symbol]) } - def reverse_map - end - # @api private sig do returns(T::Hash[Symbol, T.all(OpenAI::BaseModel::KnownFieldShape, {type: OpenAI::Converter::Input})]) @@ -477,11 +452,6 @@ module OpenAI def fields end - # @api private - sig { returns(T::Hash[Symbol, T.proc.returns(T::Class[T.anything])]) } - def defaults - end - # @api private sig do params( @@ -551,6 +521,10 @@ module OpenAI sig { params(blk: T.proc.void).void } private def response_only(&blk) end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end end sig { params(other: T.anything).returns(T::Boolean) } @@ -561,10 +535,13 @@ module OpenAI # @api private sig do override - .params(value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything)) + .params( + value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything), + state: OpenAI::Converter::State + ) .returns(T.any(T.attached_class, T.anything)) end - def coerce(value) + def coerce(value, state:) end # @api private @@ -575,15 +552,6 @@ module OpenAI end def dump(value) end - - # @api private - sig do - override - .params(value: T.anything) - .returns(T.any([T::Boolean, T.anything, NilClass], [T::Boolean, T::Boolean, Integer])) - end - def try_strict_coerce(value) - end end # Returns the raw value associated with the given key, if found. Otherwise, nil is diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index ff01cbf3..7f741683 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -2,13 +2,16 @@ module OpenAI module Converter type input = OpenAI::Converter | Class - def coerce: (top value) -> top + type state = + { + strictness: bool | :strong, + exactness: { yes: Integer, no: Integer, maybe: Integer }, + branched: Integer + } - def dump: (top value) -> top + def coerce: (top value, state: OpenAI::Converter::state) -> top - def try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) + def dump: (top value) -> top def self.type_info: ( { @@ -20,14 +23,13 @@ module OpenAI | OpenAI::Converter::input spec ) -> (^-> top) - def self.coerce: (OpenAI::Converter::input target, top value) -> top - - def self.dump: (OpenAI::Converter::input target, top value) -> top - - def self.try_strict_coerce: ( + def self.coerce: ( OpenAI::Converter::input target, - top value + top value, + ?state: OpenAI::Converter::state ) -> top + + def self.dump: (OpenAI::Converter::input target, top value) -> top end class Unknown @@ -37,13 +39,9 @@ module OpenAI def self.==: (top other) -> bool - def self.coerce: (top value) -> top + def self.coerce: (top value, state: OpenAI::Converter::state) -> top def self.dump: (top value) -> top - - def self.try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) end class BooleanModel @@ -53,13 +51,12 @@ module OpenAI def self.==: (top other) -> bool - def self.coerce: (bool | top value) -> (bool | top) + def self.coerce: ( + bool | top value, + state: OpenAI::Converter::state + ) -> (bool | top) def self.dump: (bool | top value) -> (bool | top) - - def self.try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) end module Enum @@ -73,13 +70,12 @@ module OpenAI def ==: (top other) -> bool - def coerce: (String | Symbol | top value) -> (Symbol | top) + def coerce: ( + String | Symbol | top value, + state: OpenAI::Converter::state + ) -> (Symbol | top) def dump: (Symbol | top value) -> (Symbol | top) - - def try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) end module Union @@ -109,13 +105,9 @@ module OpenAI def ==: (top other) -> bool - def coerce: (top value) -> top + def coerce: (top value, state: OpenAI::Converter::state) -> top def dump: (top value) -> top - - def try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) end class ArrayOf @@ -132,16 +124,17 @@ module OpenAI def ==: (top other) -> bool - def coerce: (Enumerable[top] | top value) -> (::Array[top] | top) + def coerce: ( + Enumerable[top] | top value, + state: OpenAI::Converter::state + ) -> (::Array[top] | top) def dump: (Enumerable[top] | top value) -> (::Array[top] | top) - def try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) - def item_type: -> top + def nilable?: -> bool + def initialize: ( ::Hash[Symbol, top] | ^-> OpenAI::Converter::input @@ -164,16 +157,17 @@ module OpenAI def ==: (top other) -> bool - def coerce: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) + def coerce: ( + ::Hash[top, top] | top value, + state: OpenAI::Converter::state + ) -> (::Hash[Symbol, top] | top) def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) - def try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) - def item_type: -> top + def nilable?: -> bool + def initialize: ( ::Hash[Symbol, top] | ^-> OpenAI::Converter::input @@ -185,18 +179,15 @@ module OpenAI class BaseModel extend OpenAI::Converter - type known_field = { mode: (:coerce | :dump)?, required: bool } + type known_field = + { mode: (:coerce | :dump)?, required: bool, nilable: bool } def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field & { type_fn: (^-> OpenAI::Converter::input) })] - def self.reverse_map: -> ::Hash[Symbol, Symbol] - def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field & { type: OpenAI::Converter::input })] - def self.defaults: -> ::Hash[Symbol, (^-> Class)] - private def self.add_field: ( Symbol name_sym, required: bool, @@ -231,18 +222,17 @@ module OpenAI private def self.response_only: { -> void } -> void + def self.==: (top other) -> bool + def ==: (top other) -> bool def self.coerce: ( - OpenAI::BaseModel | ::Hash[top, top] | top value + OpenAI::BaseModel | ::Hash[top, top] | top value, + state: OpenAI::Converter::state ) -> (instance | top) def self.dump: (instance | top value) -> (::Hash[top, top] | top) - def self.try_strict_coerce: ( - top value - ) -> ([true, top, nil] | [false, bool, Integer]) - def []: (Symbol key) -> top? def to_h: -> ::Hash[Symbol, top] diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index bb5fb2a6..f1c37432 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -2,380 +2,576 @@ require_relative "test_helper" -class OpenAI::Test::BaseModelTest < Minitest::Test - module E1 - extend OpenAI::Enum +class OpenAI::Test::PrimitiveModelTest < Minitest::Test + A = OpenAI::ArrayOf[-> { Integer }] + H = OpenAI::HashOf[-> { Integer }, nil?: true] - A = :a - B = :b + module E + extend OpenAI::Enum end - A1 = OpenAI::ArrayOf[-> { Integer }] - A2 = OpenAI::ArrayOf[enum: -> { E1 }] + module U + extend OpenAI::Union + end - def test_basic - assert(E1.is_a?(OpenAI::Converter)) - assert(A1.is_a?(OpenAI::Converter)) + class B < OpenAI::BaseModel + optional :a, Integer + optional :b, B end - def test_basic_coerce - assert_pattern do - OpenAI::Converter.coerce(A1, [1.0, 2.0, 3.0]) => [1, 2, 3] - end + def test_typing + converters = [ + OpenAI::Unknown, + OpenAI::BooleanModel, + A, + H, + E, + U, + B + ] - assert_pattern do - OpenAI::Converter.coerce(A2, %w[a b c]) => [:a, :b, "c"] + converters.each do |conv| + assert_pattern do + conv => OpenAI::Converter + end end end - def test_basic_dump - assert_pattern do - OpenAI::Converter.dump(A1, [1.0, 2.0, 3.0]) => [1, 2, 3] - end + def test_coerce + cases = { + [OpenAI::Unknown, :a] => [{yes: 1}, :a], + [NilClass, :a] => [{maybe: 1}, nil], + [NilClass, nil] => [{yes: 1}, nil], + [OpenAI::BooleanModel, true] => [{yes: 1}, true], + [OpenAI::BooleanModel, "true"] => [{no: 1}, "true"], + [Integer, 1] => [{yes: 1}, 1], + [Integer, 1.0] => [{maybe: 1}, 1], + [Integer, "1"] => [{maybe: 1}, 1], + [Integer, "one"] => [{no: 1}, "one"], + [Float, 1] => [{yes: 1}, 1.0], + [Float, "1"] => [{maybe: 1}, 1.0], + [Float, :one] => [{no: 1}, :one], + [String, :str] => [{yes: 1}, "str"], + [String, "str"] => [{yes: 1}, "str"], + [String, 1] => [{maybe: 1}, "1"], + [:a, "a"] => [{yes: 1}, :a], + [Date, "1990-09-19"] => [{yes: 1}, Date.new(1990, 9, 19)], + [Date, Date.new(1990, 9, 19)] => [{yes: 1}, Date.new(1990, 9, 19)], + [Date, "one"] => [{no: 1}, "one"], + [Time, "1990-09-19"] => [{yes: 1}, Time.new(1990, 9, 19)], + [Time, Time.new(1990, 9, 19)] => [{yes: 1}, Time.new(1990, 9, 19)], + [Time, "one"] => [{no: 1}, "one"] + } - assert_pattern do - OpenAI::Converter.dump(A2, %w[a b c]) => %w[a b c] + cases.each do |lhs, rhs| + target, input = lhs + exactness, expect = rhs + state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + assert_pattern do + OpenAI::Converter.coerce(target, input, state: state) => ^expect + state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness + end end end - def test_primitive_try_strict_coerce - d_now = Date.today - t_now = Time.now - + def test_dump cases = { - [NilClass, :a] => [true, nil, 0], - [NilClass, nil] => [true, nil, 1], - [Integer, 1.0] => [true, 1, 1], - [Float, 1] => [true, 1.0, 1], - [Date, d_now] => [true, d_now, 1], - [Time, t_now] => [true, t_now, 1] + [OpenAI::Unknown, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [A, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [H, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [E, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [U, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [B, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [String, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [:b, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [nil, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [OpenAI::BooleanModel, true] => true, + [OpenAI::BooleanModel, "true"] => "true", + [Integer, "1"] => "1", + [Float, 1] => 1, + [String, "one"] => "one", + [String, :one] => :one, + [:a, :b] => :b, + [:a, "a"] => "a" } - cases.each do |test, expect| - type, input = test + cases.each do + target, input = _1 + expect = _2 assert_pattern do - OpenAI::Converter.try_strict_coerce(type, input) => ^expect + OpenAI::Converter.dump(target, input) => ^expect end end end - def test_basic_enum_try_strict_coerce + def test_coerce_errors cases = { - :a => [true, :a, 1], - "a" => [true, :a, 1], - :c => [false, true, 0], - 1 => [false, false, 0] + [Integer, "one"] => TypeError, + [Float, "one"] => TypeError, + [String, Time] => TypeError, + [:a, "one"] => ArgumentError, + [Date, "one"] => ArgumentError, + [Time, "one"] => ArgumentError } - cases.each do |input, expect| - assert_pattern do - OpenAI::Converter.try_strict_coerce(E1, input) => ^expect + cases.each do + target, input = _1 + state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + assert_raises(_2) do + OpenAI::Converter.coerce(target, input, state: state) end end end +end + +class OpenAI::Test::EnumModelTest < Minitest::Test + module E1 + extend OpenAI::Enum + + TRUE = true + end + + module E2 + extend OpenAI::Enum + + ONE = 1 + TWO = 2 + end - def test_basic_array_try_strict_coerce + module E3 + extend OpenAI::Enum + + ONE = 1.0 + TWO = 2.0 + end + + module E4 + extend OpenAI::Enum + + ONE = :one + TWO = :two + end + + def test_coerce cases = { - [] => [true, [], 0], - nil => [false, false, 0], - [1, 2, 3] => [true, [1, 2, 3], 3], - [1.0, 2.0, 3.0] => [true, [1, 2, 3], 3], - [1, nil, 3] => [true, [1, nil, 3], 2], - [1, nil, nil] => [true, [1, nil, nil], 1], - [1, "two", 3] => [false, true, 2] + [E1, true] => [{yes: 1}, true], + [E1, false] => [{no: 1}, false], + [E1, :true] => [{no: 1}, :true], + + [E2, 1] => [{yes: 1}, 1], + [E2, 1.0] => [{yes: 1}, 1], + [E2, 1.2] => [{no: 1}, 1.2], + [E2, "1"] => [{no: 1}, "1"], + + [E3, 1.0] => [{yes: 1}, 1.0], + [E3, 1] => [{yes: 1}, 1.0], + [E3, "one"] => [{no: 1}, "one"], + + [E4, :one] => [{yes: 1}, :one], + [E4, "one"] => [{yes: 1}, :one], + [E4, "1"] => [{maybe: 1}, "1"], + [E4, :"1"] => [{maybe: 1}, :"1"], + [E4, 1] => [{no: 1}, 1] } - cases.each do |input, expect| + cases.each do |lhs, rhs| + target, input = lhs + exactness, expect = rhs + state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Converter.try_strict_coerce(A1, input) => ^expect + OpenAI::Converter.coerce(target, input, state: state) => ^expect + state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end end - def test_nested_array_try_strict_coerce + def test_dump cases = { - %w[a b] => [true, [:a, :b], 2], - %w[a b c] => [false, true, 2] + [E1, true] => true, + [E1, "true"] => "true", + + [E2, 1.0] => 1.0, + [E2, 3] => 3, + [E2, "1.0"] => "1.0", + + [E3, 1.0] => 1.0, + [E3, 3] => 3, + [E3, "1.0"] => "1.0", + + [E4, :one] => :one, + [E4, "one"] => "one", + [E4, "1.0"] => "1.0" } - cases.each do |input, expect| + cases.each do + target, input = _1 + expect = _2 assert_pattern do - OpenAI::Converter.try_strict_coerce(A2, input) => ^expect + OpenAI::Converter.dump(target, input) => ^expect end end end +end - class M1 < OpenAI::BaseModel - required :a, Time - optional :b, E1, api_name: :renamed - required :c, A1 +class OpenAI::Test::CollectionModelTest < Minitest::Test + A1 = OpenAI::ArrayOf[-> { Integer }] + H1 = OpenAI::HashOf[Integer] - request_only do - required :w, Integer - optional :x, String - end + A2 = OpenAI::ArrayOf[H1] + H2 = OpenAI::HashOf[-> { A1 }] - response_only do - required :y, Integer - optional :z, String + A3 = OpenAI::ArrayOf[Integer, nil?: true] + H3 = OpenAI::HashOf[Integer, nil?: true] + + def test_coerce + cases = { + [A1, []] => [{yes: 1}, []], + [A1, {}] => [{no: 1}, {}], + [A1, [1, 2.0]] => [{yes: 2, maybe: 1}, [1, 2]], + [A1, ["1", 2.0]] => [{yes: 1, maybe: 2}, [1, 2]], + [H1, {}] => [{yes: 1}, {}], + [H1, []] => [{no: 1}, []], + [H1, {a: 1, b: 2}] => [{yes: 3}, {a: 1, b: 2}], + [H1, {"a" => 1, "b" => 2}] => [{yes: 3}, {a: 1, b: 2}], + [H1, {[] => 1}] => [{yes: 2, no: 1}, {[] => 1}], + [H1, {a: 1.5}] => [{yes: 1, maybe: 1}, {a: 1}], + + [A2, [{}, {"a" => 1}]] => [{yes: 4}, [{}, {a: 1}]], + [A2, [{"a" => "1"}]] => [{yes: 2, maybe: 1}, [{a: 1}]], + [H2, {a: [1, 2]}] => [{yes: 4}, {a: [1, 2]}], + [H2, {"a" => ["1", 2]}] => [{yes: 3, maybe: 1}, {a: [1, 2]}], + [H2, {"a" => ["one", 2]}] => [{yes: 3, no: 1}, {a: ["one", 2]}], + + [A3, [nil, 1]] => [{yes: 3}, [nil, 1]], + [A3, [nil, "1"]] => [{yes: 2, maybe: 1}, [nil, 1]], + [H3, {a: nil, b: "1"}] => [{yes: 2, maybe: 1}, {a: nil, b: 1}], + [H3, {a: nil}] => [{yes: 2}, {a: nil}] + } + + cases.each do |lhs, rhs| + target, input = lhs + exactness, expect = rhs + state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + assert_pattern do + OpenAI::Converter.coerce(target, input, state: state) => ^expect + state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness + end end end +end - class M2 < M1 - required :c, M1 +class OpenAI::Test::BaseModelTest < Minitest::Test + class M1 < OpenAI::BaseModel + required :a, Integer end - def test_model_accessors - now = Time.now.round(0) - model = M2.new(a: now.to_s, b: "b", renamed: "a", c: [1.0, 2.0, 3.0], w: 1, y: 1) + class M2 < M1 + required :a, Time + required :b, Integer, nil?: true + optional :c, String + end - cases = [ - [model.a, now], - [model.b, :a], - [model.c, [1, 2, 3]], - [model.w, 1], - [model.y, 1] - ] + class M3 < OpenAI::BaseModel + optional :c, const: :c + required :d, const: :d + end - cases.each do |input, expect| - assert_pattern do - input => ^expect - end + class M4 < M1 + request_only do + required :a, Integer + optional :b, String end - end - def test_model_conversion_accessor - model = M2.new(c: {}) - assert_pattern do - model.c => M1 + response_only do + required :c, Integer + optional :d, String end end - def test_model_equality - now = Time.now - model1 = M2.new(a: now, b: "b", renamed: "a", c: M1.new, w: 1, y: 1) - model2 = M2.new(a: now, b: "b", renamed: "a", c: M1.new, w: 1, y: 1) + class M5 < OpenAI::BaseModel + request_only do + required :c, const: :c + end - assert_pattern do - model2 => ^model1 + response_only do + required :d, const: :d end end - def test_basic_model_coerce + class M6 < M1 + required :a, OpenAI::ArrayOf[M6] + end + + def test_coerce cases = { - {} => M2.new, - {a: nil, b: :a, c: [1.0, 2.0, 3.0], w: 1} => M2.new(a: nil, b: :a, c: [1.0, 2.0, 3.0], w: 1) + [M1, {}] => [{yes: 1, no: 1}, {}], + [M1, :m1] => [{no: 1}, :m1], + + [M2, {}] => [{yes: 2, no: 1, maybe: 1}, {}], + [M2, {a: "1990-09-19", b: nil}] => [{yes: 4}, {a: "1990-09-19", b: nil}], + [M2, {a: "1990-09-19", b: "1"}] => [{yes: 3, maybe: 1}, {a: "1990-09-19", b: "1"}], + [M2, {a: "1990-09-19"}] => [{yes: 3, maybe: 1}, {a: "1990-09-19"}], + [M2, {a: "1990-09-19", c: nil}] => [{yes: 2, maybe: 2}, {a: "1990-09-19", c: nil}], + + [M3, {c: "c", d: "d"}] => [{yes: 3}, {c: :c, d: :d}], + [M3, {c: "d", d: "c"}] => [{yes: 1, no: 2}, {c: "d", d: "c"}], + + [M4, {c: 2}] => [{yes: 5}, {c: 2}], + [M4, {a: "1", c: 2}] => [{yes: 4, maybe: 1}, {a: "1", c: 2}], + [M4, {b: nil, c: 2}] => [{yes: 4, maybe: 1}, {b: nil, c: 2}], + + [M5, {}] => [{yes: 3}, {}], + [M5, {c: "c"}] => [{yes: 3}, {c: :c}], + [M5, {d: "d"}] => [{yes: 3}, {d: :d}], + [M5, {d: nil}] => [{yes: 2, no: 1}, {d: nil}], + + [M6, {a: [{a: []}]}] => [{yes: 4}, -> { _1 in {a: [M6]} }] } - cases.each do |input, expect| + cases.each do |lhs, rhs| + target, input = lhs + exactness, expect = rhs + state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Converter.coerce(M2, input) => ^expect + coerced = OpenAI::Converter.coerce(target, input, state: state) + assert_equal(coerced, coerced) + if coerced.is_a?(OpenAI::BaseModel) + coerced.to_h => ^expect + else + coerced => ^expect + end + state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end end - def test_basic_model_dump + def test_dump cases = { - nil => nil, - {} => {}, - {w: 1, x: "x", y: 1, z: "z"} => {w: 1, x: "x"}, - [1, 2, 3] => [1, 2, 3] + [M3, M3.new] => {d: :d}, + [M3, {}] => {d: :d}, + [M3, {d: 1}] => {d: 1}, + + [M4, M4.new(a: 1, b: "b", c: 2, d: "d")] => {a: 1, b: "b"}, + [M4, {a: 1, b: "b", c: 2, d: "d"}] => {a: 1, b: "b"}, + + [M5, M5.new] => {c: :c}, + [M5, {}] => {c: :c}, + [M5, {c: 1}] => {c: 1} } - cases.each do |input, expect| + cases.each do + target, input = _1 + expect = _2 assert_pattern do - OpenAI::Converter.dump(M2, input) => ^expect + OpenAI::Converter.dump(target, input) => ^expect end end end - def test_basic_model_try_strict_coerce - raw = {a: Time.now, c: [2], y: 1} - addn = {x: "x", n: "n"} - expect_exact = M1.new(raw) - expect_addn = M1.new(**raw, **addn) - + def test_accessors cases = { - {} => [false, true, 0], - raw => [true, expect_exact, 3], - {**raw, **addn} => [true, expect_addn, 4] + M2.new({a: "1990-09-19", b: "1"}) => {a: Time.new(1990, 9, 19), b: TypeError}, + M2.new(a: "one", b: "one") => {a: ArgumentError, b: TypeError}, + M2.new(a: nil, b: 2.0) => {a: TypeError, b: TypeError}, + + M3.new => {d: :d}, + M3.new(d: 1) => {d: ArgumentError}, + + M5.new => {c: :c, d: :d} } - cases.each do |input, expect| - assert_pattern do - OpenAI::Converter.try_strict_coerce(M1, input) => ^expect + cases.each do + target = _1 + _2.each do |accessor, expect| + case expect + in Class if expect <= StandardError + tap do + target.public_send(accessor) + flunk + rescue OpenAI::ConversionError => e + assert_kind_of(expect, e.cause) + end + else + assert_pattern { target.public_send(accessor) => ^expect } + end end end end +end - def test_nested_model_dump - now = Time.now - models = [M1, M2] - inputs = [ - M1.new(a: now, b: "a", c: [1.0, 2.0, 3.0], y: 1), - {a: now, b: "a", c: [1.0, 2.0, 3.0], y: 1}, - {"a" => now, b: "", "b" => "a", "c" => [], :c => [1.0, 2.0, 3.0], "y" => 1} - ] +class OpenAI::Test::UnionTest < Minitest::Test + module U0 + extend OpenAI::Union + end - models.product(inputs).each do |model, input| - assert_pattern do - OpenAI::Converter.dump(model, input) => {a: now, renamed: "a", c: [1, 2, 3]} - end - end + module U1 + extend OpenAI::Union + variant const: :a + variant const: 2 end - class M4 < M2 - required :c, M1 - required :d, OpenAI::ArrayOf[M4] - required :e, M2, api_name: :f + class M1 < OpenAI::BaseModel + required :t, const: :a, api_name: :type + optional :c, String end - def test_model_to_h - model = M4.new(a: "wow", c: {}, d: [{}, 2, {c: {}}], f: {}) - assert_pattern do - model.to_h => {a: "wow", c: M1, d: [M4, 2, M4 => child], f: M2} - assert_equal({c: M1.new}, child.to_h) - end + class M2 < OpenAI::BaseModel + required :type, const: :b + optional :c, String end - A3 = OpenAI::ArrayOf[A1] + module U2 + extend OpenAI::Union + discriminator :type - class M3 < M1 - optional :b, E1, api_name: :renamed_again + variant :a, M1 + variant :b, M2 end - module U1 + module U3 extend OpenAI::Union - discriminator :type + variant :a, M1 - variant :b, M3 + variant String end - module U2 + module U4 extend OpenAI::Union + discriminator :type - variant A1 - variant A3 + variant String + variant :a, M1 end - def test_basic_union - assert(U1.is_a?(OpenAI::Converter)) + class M3 < OpenAI::BaseModel + optional :recur, -> { U5 } + required :a, Integer + end - assert_pattern do - M1.new => U1 - M3.new => U1 - end + class M4 < OpenAI::BaseModel + optional :recur, -> { U5 } + required :a, OpenAI::ArrayOf[-> { U5 }] end - def test_basic_discriminated_union_coerce - common = {a: Time.now, c: [], w: 1} - cases = { - nil => nil, - {type: "a", **common} => M1.new(type: "a", **common), - {type: :b, **common} => M3.new(type: :b, **common), - {type: :c, xyz: 1} => {type: :c, xyz: 1} - } + class M5 < OpenAI::BaseModel + optional :recur, -> { U5 } + required :b, OpenAI::ArrayOf[-> { U5 }] + end - cases.each do |input, expect| - assert_pattern do - OpenAI::Converter.coerce(U1, input) => ^expect - end - end + module U5 + extend OpenAI::Union + + variant -> { M3 } + variant -> { M4 } end - def test_basic_discriminated_union_dump - now = Time.now - cases = { - nil => nil, - M1.new(a: now, b: :a, c: [1.0, 2.0, 3.0], y: 1) => {a: now, renamed: :a, c: [1, 2, 3]}, - M3.new(b: "a", y: 1) => {renamed_again: "a"}, - {type: :a, b: "a", y: 1} => {type: :a, renamed: "a"}, - {type: "b", b: "a", y: 1} => {type: "b", renamed_again: "a"}, - {type: :c, xyz: 1} => {type: :c, xyz: 1} - } + module U6 + extend OpenAI::Union - cases.each do |input, expect| - assert_pattern do - OpenAI::Converter.dump(U1, input) => ^expect - end + variant -> { M3 } + variant -> { M5 } + end + + def test_accessors + model = M3.new(recur: []) + tap do + model.recur + flunk + rescue OpenAI::ConversionError => e + assert_kind_of(ArgumentError, e.cause) end end - def test_basic_undifferentiated_union_try_strict_coerce + def test_coerce cases = { - [] => [true, [], 0], - [[]] => [true, [[]], 0], - # [nil] => [false, true, 0], - [1, 2, 3] => [true, [1, 2, 3], 3], - [[1, 2, 3], [4, 5, 6]] => [true, [[1, 2, 3], [4, 5, 6]], 6] + [U0, :""] => [{no: 1}, 0, :""], + + [U1, "a"] => [{yes: 1}, 1, :a], + [U1, "2"] => [{maybe: 1}, 2, 2], + [U1, :b] => [{no: 1}, 2, :b], + + [U2, {type: :a}] => [{yes: 3}, 0, {t: :a}], + [U2, {type: "b"}] => [{yes: 3}, 0, {type: :b}], + + [U3, "one"] => [{yes: 1}, 2, "one"], + [U4, "one"] => [{yes: 1}, 1, "one"], + + [U5, {a: []}] => [{yes: 3}, 2, {a: []}], + [U6, {b: []}] => [{yes: 3}, 2, {b: []}], + + [U5, {a: [{a: []}]}] => [{yes: 6}, 4, {a: [M4.new(a: [])]}], + [U5, {a: [{a: [{a: []}]}]}] => [{yes: 9}, 6, {a: [M4.new(a: [M4.new(a: [])])]}] } - cases.each do |input, expect| + cases.each do |lhs, rhs| + target, input = lhs + exactness, branched, expect = rhs + state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Converter.try_strict_coerce(U2, input) => ^expect + coerced = OpenAI::Converter.coerce(target, input, state: state) + assert_equal(coerced, coerced) + if coerced.is_a?(OpenAI::BaseModel) + coerced.to_h => ^expect + else + coerced => ^expect + end + state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness + state => {branched: ^branched} end end end +end - class C1 < OpenAI::BaseModel - required :a, const: :a - required :b, const: :b, nil?: true - optional :c, const: :c - end +class OpenAI::Test::BaseModelQoLTest < Minitest::Test + module E1 + extend OpenAI::Enum - def test_basic_const - assert_pattern do - C1.dump(C1.new) => {a: :a} - C1.new => {a: :a} - C1.new(a: "a") => {a: :a} - C1.new(b: 2) => {b: 2} - C1.new.a => :a - C1.new.b => nil - C1.new.c => nil - end + A = 1 end module E2 extend OpenAI::Enum - A = :a - B = :b + A = 1 end - module U3 - extend OpenAI::Union + module E3 + extend OpenAI::Enum - discriminator :type - variant :a, M1 - variant :b, M3 + A = 2 + B = 3 end - def test_basic_eql - assert_equal(OpenAI::Unknown, OpenAI::Unknown) - refute_equal(OpenAI::Unknown, OpenAI::BooleanModel) - assert_equal(OpenAI::BooleanModel, OpenAI::BooleanModel) - - assert_equal(E1, E2) - assert_equal(E1, E2) - - refute_equal(U1, U2) - assert_equal(U1, U3) + class M1 < OpenAI::BaseModel + required :a, Integer end - module U4 - extend OpenAI::Union + class M2 < OpenAI::BaseModel + required :a, Integer, nil?: true + end - variant :a, const: :a - variant :b, const: :b + class M3 < M2 + required :a, Integer end - def test_basic_const_union - assert_pattern do - U4.coerce(nil) => nil - U4.coerce("") => "" - U4.coerce(:a) => :a - U4.coerce("a") => :a + def test_equality + cases = { + [OpenAI::Unknown, OpenAI::Unknown] => true, + [OpenAI::BooleanModel, OpenAI::BooleanModel] => true, + [OpenAI::Unknown, OpenAI::BooleanModel] => false, + [E1, E2] => true, + [E1, E3] => false, + [M1, M2] => false, + [M1, M3] => true + } + + cases.each do + if _2 + assert_equal(*_1) + else + refute_equal(*_1) + end end end end From a04f6a18529cdf4708e7810657960f260096c252 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 29 Mar 2025 13:35:28 +0000 Subject: [PATCH 066/295] chore: remove unnecessary & confusing module (#69) --- lib/openai.rb | 1 - lib/openai/base_model.rb | 3 --- lib/openai/extern.rb | 7 ------- rbi/lib/openai/base_client.rbi | 2 +- rbi/lib/openai/base_model.rbi | 6 +----- rbi/lib/openai/extern.rbi | 7 ------- sig/openai/base_client.rbs | 2 +- sig/openai/base_model.rbs | 2 -- sig/openai/extern.rbs | 4 ---- 9 files changed, 3 insertions(+), 31 deletions(-) delete mode 100644 lib/openai/extern.rb delete mode 100644 rbi/lib/openai/extern.rbi delete mode 100644 sig/openai/extern.rbs diff --git a/lib/openai.rb b/lib/openai.rb index 7a735871..3106b191 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -36,7 +36,6 @@ # Package files. require_relative "openai/version" require_relative "openai/util" -require_relative "openai/extern" require_relative "openai/base_model" require_relative "openai/base_page" require_relative "openai/base_stream" diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb index 2d37a563..20a05eb9 100644 --- a/lib/openai/base_model.rb +++ b/lib/openai/base_model.rb @@ -1165,9 +1165,6 @@ def initialize(data = {}) end end - # @return [String] - def to_s = @data.to_s - # @return [String] def inspect rows = self.class.known_fields.keys.map do diff --git a/lib/openai/extern.rb b/lib/openai/extern.rb deleted file mode 100644 index 1ab41492..00000000 --- a/lib/openai/extern.rb +++ /dev/null @@ -1,7 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @api private - module Extern - end -end diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi index 92d2484d..580d6b5b 100644 --- a/rbi/lib/openai/base_client.rbi +++ b/rbi/lib/openai/base_client.rbi @@ -84,7 +84,7 @@ module OpenAI end # @api private - sig { returns(T.anything) } + sig { returns(OpenAI::PooledNetRequester) } attr_accessor :requester # @api private diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi index 0f1d214f..412a31e7 100644 --- a/rbi/lib/openai/base_model.rbi +++ b/rbi/lib/openai/base_model.rbi @@ -38,7 +38,7 @@ module OpenAI OpenAI::Converter::Input ) ) - .returns(T.proc.returns(T.anything).void) + .returns(T.proc.returns(T.anything)) end def self.type_info(spec) end @@ -597,10 +597,6 @@ module OpenAI def self.new(data = {}) end - sig { returns(String) } - def to_s - end - sig { returns(String) } def inspect end diff --git a/rbi/lib/openai/extern.rbi b/rbi/lib/openai/extern.rbi deleted file mode 100644 index e5e18a8d..00000000 --- a/rbi/lib/openai/extern.rbi +++ /dev/null @@ -1,7 +0,0 @@ -# typed: strong - -module OpenAI - # @api private - module Extern - end -end diff --git a/sig/openai/base_client.rbs b/sig/openai/base_client.rbs index 38596607..319bd07e 100644 --- a/sig/openai/base_client.rbs +++ b/sig/openai/base_client.rbs @@ -49,7 +49,7 @@ module OpenAI ) -> void # @api private - attr_accessor requester: top + attr_accessor requester: OpenAI::PooledNetRequester def initialize: ( base_url: String, diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs index 7f741683..a15f603e 100644 --- a/sig/openai/base_model.rbs +++ b/sig/openai/base_model.rbs @@ -243,8 +243,6 @@ module OpenAI def initialize: (?::Hash[Symbol, top] | self data) -> void - def to_s: -> String - def inspect: -> String end end diff --git a/sig/openai/extern.rbs b/sig/openai/extern.rbs deleted file mode 100644 index 23069f69..00000000 --- a/sig/openai/extern.rbs +++ /dev/null @@ -1,4 +0,0 @@ -module OpenAI - module Extern - end -end From 703fc8880b0c7cb7626e93b62ff4ccddf8ad619f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 17:18:48 +0000 Subject: [PATCH 067/295] chore: relocate internal modules (#70) --- lib/openai.rb | 19 +- lib/openai/base_client.rb | 457 ------- lib/openai/base_model.rb | 1178 ----------------- lib/openai/base_page.rb | 59 - lib/openai/base_stream.rb | 61 - lib/openai/client.rb | 2 +- lib/openai/cursor_page.rb | 6 +- lib/openai/errors.rb | 6 +- .../models/audio/speech_create_params.rb | 2 +- .../audio/transcription_create_params.rb | 2 +- .../models/audio/translation_create_params.rb | 2 +- lib/openai/models/batch_cancel_params.rb | 2 +- lib/openai/models/batch_create_params.rb | 2 +- lib/openai/models/batch_list_params.rb | 2 +- lib/openai/models/batch_retrieve_params.rb | 2 +- .../models/beta/assistant_create_params.rb | 2 +- .../models/beta/assistant_delete_params.rb | 2 +- .../models/beta/assistant_list_params.rb | 2 +- .../models/beta/assistant_retrieve_params.rb | 2 +- .../models/beta/assistant_update_params.rb | 2 +- .../beta/thread_create_and_run_params.rb | 2 +- .../models/beta/thread_create_params.rb | 2 +- .../models/beta/thread_delete_params.rb | 2 +- .../models/beta/thread_retrieve_params.rb | 2 +- .../models/beta/thread_update_params.rb | 2 +- .../beta/threads/message_create_params.rb | 2 +- .../beta/threads/message_delete_params.rb | 2 +- .../beta/threads/message_list_params.rb | 2 +- .../beta/threads/message_retrieve_params.rb | 2 +- .../beta/threads/message_update_params.rb | 2 +- .../models/beta/threads/run_cancel_params.rb | 2 +- .../models/beta/threads/run_create_params.rb | 2 +- .../models/beta/threads/run_list_params.rb | 2 +- .../beta/threads/run_retrieve_params.rb | 2 +- .../threads/run_submit_tool_outputs_params.rb | 2 +- .../models/beta/threads/run_update_params.rb | 2 +- .../beta/threads/runs/step_list_params.rb | 2 +- .../beta/threads/runs/step_retrieve_params.rb | 2 +- .../models/chat/completion_create_params.rb | 2 +- .../models/chat/completion_delete_params.rb | 2 +- .../models/chat/completion_list_params.rb | 2 +- .../models/chat/completion_retrieve_params.rb | 2 +- .../models/chat/completion_update_params.rb | 2 +- .../chat/completions/message_list_params.rb | 2 +- lib/openai/models/completion_create_params.rb | 2 +- lib/openai/models/embedding_create_params.rb | 2 +- lib/openai/models/file_content_params.rb | 2 +- lib/openai/models/file_create_params.rb | 2 +- lib/openai/models/file_delete_params.rb | 2 +- lib/openai/models/file_list_params.rb | 2 +- lib/openai/models/file_retrieve_params.rb | 2 +- .../models/fine_tuning/job_cancel_params.rb | 2 +- .../models/fine_tuning/job_create_params.rb | 2 +- .../fine_tuning/job_list_events_params.rb | 2 +- .../models/fine_tuning/job_list_params.rb | 2 +- .../models/fine_tuning/job_retrieve_params.rb | 2 +- .../jobs/checkpoint_list_params.rb | 2 +- .../models/image_create_variation_params.rb | 2 +- lib/openai/models/image_edit_params.rb | 2 +- lib/openai/models/image_generate_params.rb | 2 +- lib/openai/models/model_delete_params.rb | 2 +- lib/openai/models/model_list_params.rb | 2 +- lib/openai/models/model_retrieve_params.rb | 2 +- lib/openai/models/moderation_create_params.rb | 2 +- .../responses/input_item_list_params.rb | 2 +- .../responses/response_create_params.rb | 2 +- .../responses/response_delete_params.rb | 2 +- .../responses/response_retrieve_params.rb | 2 +- lib/openai/models/upload_cancel_params.rb | 2 +- lib/openai/models/upload_complete_params.rb | 2 +- lib/openai/models/upload_create_params.rb | 2 +- .../models/uploads/part_create_params.rb | 2 +- .../models/vector_store_create_params.rb | 2 +- .../models/vector_store_delete_params.rb | 2 +- lib/openai/models/vector_store_list_params.rb | 2 +- .../models/vector_store_retrieve_params.rb | 2 +- .../models/vector_store_search_params.rb | 2 +- .../models/vector_store_update_params.rb | 2 +- .../vector_stores/file_batch_cancel_params.rb | 2 +- .../vector_stores/file_batch_create_params.rb | 2 +- .../file_batch_list_files_params.rb | 2 +- .../file_batch_retrieve_params.rb | 2 +- .../vector_stores/file_content_params.rb | 2 +- .../vector_stores/file_create_params.rb | 2 +- .../vector_stores/file_delete_params.rb | 2 +- .../models/vector_stores/file_list_params.rb | 2 +- .../vector_stores/file_retrieve_params.rb | 2 +- .../vector_stores/file_update_params.rb | 2 +- lib/openai/page.rb | 6 +- lib/openai/pooled_net_requester.rb | 180 --- lib/openai/request_options.rb | 33 - lib/openai/stream.rb | 4 +- lib/openai/transport/base_client.rb | 459 +++++++ lib/openai/transport/pooled_net_requester.rb | 182 +++ lib/openai/type.rb | 23 + lib/openai/type/array_of.rb | 110 ++ lib/openai/type/base_model.rb | 355 +++++ lib/openai/type/base_page.rb | 61 + lib/openai/type/base_stream.rb | 63 + lib/openai/type/boolean_model.rb | 52 + lib/openai/type/converter.rb | 211 +++ lib/openai/type/enum.rb | 105 ++ lib/openai/type/hash_of.rb | 130 ++ lib/openai/type/request_parameters.rb | 38 + lib/openai/type/union.rb | 208 +++ lib/openai/type/unknown.rb | 56 + rbi/lib/openai/base_client.rbi | 196 --- rbi/lib/openai/base_model.rbi | 604 --------- rbi/lib/openai/base_page.rbi | 36 - rbi/lib/openai/base_stream.rbi | 41 - rbi/lib/openai/client.rbi | 2 +- rbi/lib/openai/cursor_page.rbi | 2 +- .../models/audio/speech_create_params.rbi | 2 +- .../audio/transcription_create_params.rbi | 2 +- .../audio/translation_create_params.rbi | 2 +- rbi/lib/openai/models/batch_cancel_params.rbi | 2 +- rbi/lib/openai/models/batch_create_params.rbi | 2 +- rbi/lib/openai/models/batch_list_params.rbi | 2 +- .../openai/models/batch_retrieve_params.rbi | 2 +- .../models/beta/assistant_create_params.rbi | 2 +- .../models/beta/assistant_delete_params.rbi | 2 +- .../models/beta/assistant_list_params.rbi | 2 +- .../models/beta/assistant_retrieve_params.rbi | 2 +- .../models/beta/assistant_update_params.rbi | 2 +- .../beta/thread_create_and_run_params.rbi | 7 +- .../models/beta/thread_create_params.rbi | 7 +- .../models/beta/thread_delete_params.rbi | 2 +- .../models/beta/thread_retrieve_params.rbi | 2 +- .../models/beta/thread_update_params.rbi | 2 +- .../beta/threads/message_create_params.rbi | 7 +- .../beta/threads/message_delete_params.rbi | 2 +- .../beta/threads/message_list_params.rbi | 2 +- .../beta/threads/message_retrieve_params.rbi | 2 +- .../beta/threads/message_update_params.rbi | 2 +- .../models/beta/threads/run_cancel_params.rbi | 2 +- .../models/beta/threads/run_create_params.rbi | 7 +- .../models/beta/threads/run_list_params.rbi | 2 +- .../beta/threads/run_retrieve_params.rbi | 2 +- .../run_submit_tool_outputs_params.rbi | 2 +- .../models/beta/threads/run_update_params.rbi | 2 +- .../beta/threads/runs/step_list_params.rbi | 2 +- .../threads/runs/step_retrieve_params.rbi | 2 +- ...hat_completion_assistant_message_param.rbi | 2 +- ...hat_completion_developer_message_param.rbi | 2 +- .../chat_completion_prediction_content.rbi | 2 +- .../chat_completion_system_message_param.rbi | 2 +- .../chat_completion_tool_message_param.rbi | 2 +- .../chat_completion_user_message_param.rbi | 2 +- .../models/chat/completion_create_params.rbi | 4 +- .../models/chat/completion_delete_params.rbi | 2 +- .../models/chat/completion_list_params.rbi | 2 +- .../chat/completion_retrieve_params.rbi | 2 +- .../models/chat/completion_update_params.rbi | 2 +- .../chat/completions/message_list_params.rbi | 2 +- .../models/completion_create_params.rbi | 10 +- .../openai/models/embedding_create_params.rbi | 8 +- rbi/lib/openai/models/file_content_params.rbi | 2 +- rbi/lib/openai/models/file_create_params.rbi | 2 +- rbi/lib/openai/models/file_delete_params.rbi | 2 +- rbi/lib/openai/models/file_list_params.rbi | 2 +- .../openai/models/file_retrieve_params.rbi | 2 +- .../models/fine_tuning/job_cancel_params.rbi | 2 +- .../models/fine_tuning/job_create_params.rbi | 2 +- .../fine_tuning/job_list_events_params.rbi | 2 +- .../models/fine_tuning/job_list_params.rbi | 2 +- .../fine_tuning/job_retrieve_params.rbi | 2 +- .../jobs/checkpoint_list_params.rbi | 2 +- rbi/lib/openai/models/function_parameters.rbi | 2 +- .../models/image_create_variation_params.rbi | 2 +- rbi/lib/openai/models/image_edit_params.rbi | 2 +- .../openai/models/image_generate_params.rbi | 2 +- rbi/lib/openai/models/metadata.rbi | 2 +- rbi/lib/openai/models/model_delete_params.rbi | 2 +- rbi/lib/openai/models/model_list_params.rbi | 2 +- .../openai/models/model_retrieve_params.rbi | 2 +- .../models/moderation_create_params.rbi | 6 +- .../responses/input_item_list_params.rbi | 2 +- .../responses/response_create_params.rbi | 2 +- .../responses/response_delete_params.rbi | 2 +- .../models/responses/response_input.rbi | 2 +- .../response_input_message_content_list.rbi | 2 +- .../responses/response_retrieve_params.rbi | 2 +- .../openai/models/upload_cancel_params.rbi | 2 +- .../openai/models/upload_complete_params.rbi | 2 +- .../openai/models/upload_create_params.rbi | 2 +- .../models/uploads/part_create_params.rbi | 2 +- .../models/vector_store_create_params.rbi | 2 +- .../models/vector_store_delete_params.rbi | 2 +- .../models/vector_store_list_params.rbi | 2 +- .../models/vector_store_retrieve_params.rbi | 2 +- .../models/vector_store_search_params.rbi | 4 +- .../models/vector_store_update_params.rbi | 2 +- .../file_batch_cancel_params.rbi | 2 +- .../file_batch_create_params.rbi | 2 +- .../file_batch_list_files_params.rbi | 2 +- .../file_batch_retrieve_params.rbi | 2 +- .../vector_stores/file_content_params.rbi | 2 +- .../vector_stores/file_create_params.rbi | 2 +- .../vector_stores/file_delete_params.rbi | 2 +- .../models/vector_stores/file_list_params.rbi | 2 +- .../vector_stores/file_retrieve_params.rbi | 2 +- .../vector_stores/file_update_params.rbi | 2 +- rbi/lib/openai/page.rbi | 2 +- rbi/lib/openai/pooled_net_requester.rbi | 59 - rbi/lib/openai/request_options.rbi | 15 - rbi/lib/openai/stream.rbi | 2 +- rbi/lib/openai/transport/base_client.rbi | 204 +++ .../openai/transport/pooled_net_requester.rbi | 64 + rbi/lib/openai/type.rbi | 23 + rbi/lib/openai/type/array_of.rbi | 80 ++ rbi/lib/openai/type/base_model.rbi | 189 +++ rbi/lib/openai/type/base_page.rbi | 38 + rbi/lib/openai/type/base_stream.rbi | 43 + rbi/lib/openai/type/boolean_model.rbi | 41 + rbi/lib/openai/type/converter.rbi | 99 ++ rbi/lib/openai/type/enum.rbi | 58 + rbi/lib/openai/type/hash_of.rbi | 84 ++ rbi/lib/openai/type/request_parameters.rbi | 20 + rbi/lib/openai/type/union.rbi | 66 + rbi/lib/openai/type/unknown.rbi | 37 + sig/openai/base_client.rbs | 106 -- sig/openai/base_model.rbs | 248 ---- sig/openai/base_page.rbs | 20 - sig/openai/base_stream.rbs | 23 - sig/openai/client.rbs | 2 +- sig/openai/cursor_page.rbs | 2 +- .../models/audio/speech_create_params.rbs | 2 +- .../audio/transcription_create_params.rbs | 2 +- .../audio/translation_create_params.rbs | 2 +- sig/openai/models/batch_cancel_params.rbs | 2 +- sig/openai/models/batch_create_params.rbs | 2 +- sig/openai/models/batch_list_params.rbs | 2 +- sig/openai/models/batch_retrieve_params.rbs | 2 +- .../models/beta/assistant_create_params.rbs | 2 +- .../models/beta/assistant_delete_params.rbs | 2 +- .../models/beta/assistant_list_params.rbs | 2 +- .../models/beta/assistant_retrieve_params.rbs | 2 +- .../models/beta/assistant_update_params.rbs | 2 +- .../beta/thread_create_and_run_params.rbs | 2 +- .../models/beta/thread_create_params.rbs | 2 +- .../models/beta/thread_delete_params.rbs | 2 +- .../models/beta/thread_retrieve_params.rbs | 2 +- .../models/beta/thread_update_params.rbs | 2 +- .../beta/threads/message_create_params.rbs | 2 +- .../beta/threads/message_delete_params.rbs | 2 +- .../beta/threads/message_list_params.rbs | 2 +- .../beta/threads/message_retrieve_params.rbs | 2 +- .../beta/threads/message_update_params.rbs | 2 +- .../models/beta/threads/run_cancel_params.rbs | 2 +- .../models/beta/threads/run_create_params.rbs | 2 +- .../models/beta/threads/run_list_params.rbs | 2 +- .../beta/threads/run_retrieve_params.rbs | 2 +- .../run_submit_tool_outputs_params.rbs | 2 +- .../models/beta/threads/run_update_params.rbs | 2 +- .../beta/threads/runs/step_list_params.rbs | 2 +- .../threads/runs/step_retrieve_params.rbs | 2 +- .../models/chat/completion_create_params.rbs | 2 +- .../models/chat/completion_delete_params.rbs | 2 +- .../models/chat/completion_list_params.rbs | 2 +- .../chat/completion_retrieve_params.rbs | 2 +- .../models/chat/completion_update_params.rbs | 2 +- .../chat/completions/message_list_params.rbs | 2 +- .../models/completion_create_params.rbs | 2 +- sig/openai/models/embedding_create_params.rbs | 2 +- sig/openai/models/file_content_params.rbs | 2 +- sig/openai/models/file_create_params.rbs | 2 +- sig/openai/models/file_delete_params.rbs | 2 +- sig/openai/models/file_list_params.rbs | 2 +- sig/openai/models/file_retrieve_params.rbs | 2 +- .../models/fine_tuning/job_cancel_params.rbs | 2 +- .../models/fine_tuning/job_create_params.rbs | 2 +- .../fine_tuning/job_list_events_params.rbs | 2 +- .../models/fine_tuning/job_list_params.rbs | 2 +- .../fine_tuning/job_retrieve_params.rbs | 2 +- .../jobs/checkpoint_list_params.rbs | 2 +- .../models/image_create_variation_params.rbs | 2 +- sig/openai/models/image_edit_params.rbs | 2 +- sig/openai/models/image_generate_params.rbs | 2 +- sig/openai/models/model_delete_params.rbs | 2 +- sig/openai/models/model_list_params.rbs | 2 +- sig/openai/models/model_retrieve_params.rbs | 2 +- .../models/moderation_create_params.rbs | 2 +- .../responses/input_item_list_params.rbs | 2 +- .../responses/response_create_params.rbs | 2 +- .../responses/response_delete_params.rbs | 2 +- .../responses/response_retrieve_params.rbs | 2 +- sig/openai/models/upload_cancel_params.rbs | 2 +- sig/openai/models/upload_complete_params.rbs | 2 +- sig/openai/models/upload_create_params.rbs | 2 +- .../models/uploads/part_create_params.rbs | 2 +- .../models/vector_store_create_params.rbs | 2 +- .../models/vector_store_delete_params.rbs | 2 +- .../models/vector_store_list_params.rbs | 2 +- .../models/vector_store_retrieve_params.rbs | 2 +- .../models/vector_store_search_params.rbs | 2 +- .../models/vector_store_update_params.rbs | 2 +- .../file_batch_cancel_params.rbs | 2 +- .../file_batch_create_params.rbs | 2 +- .../file_batch_list_files_params.rbs | 2 +- .../file_batch_retrieve_params.rbs | 2 +- .../vector_stores/file_content_params.rbs | 2 +- .../vector_stores/file_create_params.rbs | 2 +- .../vector_stores/file_delete_params.rbs | 2 +- .../models/vector_stores/file_list_params.rbs | 2 +- .../vector_stores/file_retrieve_params.rbs | 2 +- .../vector_stores/file_update_params.rbs | 2 +- sig/openai/page.rbs | 2 +- sig/openai/pooled_net_requester.rbs | 37 - sig/openai/request_options.rbs | 10 - sig/openai/stream.rbs | 2 +- sig/openai/transport/base_client.rbs | 110 ++ sig/openai/transport/pooled_net_requester.rbs | 39 + sig/openai/type.rbs | 22 + sig/openai/type/array_of.rbs | 36 + sig/openai/type/base_model.rbs | 73 + sig/openai/type/base_page.rbs | 22 + sig/openai/type/base_stream.rbs | 25 + sig/openai/type/boolean_model.rbs | 18 + sig/openai/type/converter.rbs | 36 + sig/openai/type/enum.rbs | 22 + sig/openai/type/hash_of.rbs | 36 + sig/openai/type/request_parameters.rbs | 13 + sig/openai/type/union.rbs | 37 + sig/openai/type/unknown.rbs | 15 + test/openai/base_model_test.rb | 20 +- 325 files changed, 3924 insertions(+), 3663 deletions(-) delete mode 100644 lib/openai/base_client.rb delete mode 100644 lib/openai/base_model.rb delete mode 100644 lib/openai/base_page.rb delete mode 100644 lib/openai/base_stream.rb delete mode 100644 lib/openai/pooled_net_requester.rb create mode 100644 lib/openai/transport/base_client.rb create mode 100644 lib/openai/transport/pooled_net_requester.rb create mode 100644 lib/openai/type.rb create mode 100644 lib/openai/type/array_of.rb create mode 100644 lib/openai/type/base_model.rb create mode 100644 lib/openai/type/base_page.rb create mode 100644 lib/openai/type/base_stream.rb create mode 100644 lib/openai/type/boolean_model.rb create mode 100644 lib/openai/type/converter.rb create mode 100644 lib/openai/type/enum.rb create mode 100644 lib/openai/type/hash_of.rb create mode 100644 lib/openai/type/request_parameters.rb create mode 100644 lib/openai/type/union.rb create mode 100644 lib/openai/type/unknown.rb delete mode 100644 rbi/lib/openai/base_client.rbi delete mode 100644 rbi/lib/openai/base_model.rbi delete mode 100644 rbi/lib/openai/base_page.rbi delete mode 100644 rbi/lib/openai/base_stream.rbi delete mode 100644 rbi/lib/openai/pooled_net_requester.rbi create mode 100644 rbi/lib/openai/transport/base_client.rbi create mode 100644 rbi/lib/openai/transport/pooled_net_requester.rbi create mode 100644 rbi/lib/openai/type.rbi create mode 100644 rbi/lib/openai/type/array_of.rbi create mode 100644 rbi/lib/openai/type/base_model.rbi create mode 100644 rbi/lib/openai/type/base_page.rbi create mode 100644 rbi/lib/openai/type/base_stream.rbi create mode 100644 rbi/lib/openai/type/boolean_model.rbi create mode 100644 rbi/lib/openai/type/converter.rbi create mode 100644 rbi/lib/openai/type/enum.rbi create mode 100644 rbi/lib/openai/type/hash_of.rbi create mode 100644 rbi/lib/openai/type/request_parameters.rbi create mode 100644 rbi/lib/openai/type/union.rbi create mode 100644 rbi/lib/openai/type/unknown.rbi delete mode 100644 sig/openai/base_client.rbs delete mode 100644 sig/openai/base_model.rbs delete mode 100644 sig/openai/base_page.rbs delete mode 100644 sig/openai/base_stream.rbs delete mode 100644 sig/openai/pooled_net_requester.rbs create mode 100644 sig/openai/transport/base_client.rbs create mode 100644 sig/openai/transport/pooled_net_requester.rbs create mode 100644 sig/openai/type.rbs create mode 100644 sig/openai/type/array_of.rbs create mode 100644 sig/openai/type/base_model.rbs create mode 100644 sig/openai/type/base_page.rbs create mode 100644 sig/openai/type/base_stream.rbs create mode 100644 sig/openai/type/boolean_model.rbs create mode 100644 sig/openai/type/converter.rbs create mode 100644 sig/openai/type/enum.rbs create mode 100644 sig/openai/type/hash_of.rbs create mode 100644 sig/openai/type/request_parameters.rbs create mode 100644 sig/openai/type/union.rbs create mode 100644 sig/openai/type/unknown.rbs diff --git a/lib/openai.rb b/lib/openai.rb index 3106b191..9231c014 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -36,13 +36,22 @@ # Package files. require_relative "openai/version" require_relative "openai/util" -require_relative "openai/base_model" -require_relative "openai/base_page" -require_relative "openai/base_stream" +require_relative "openai/type/converter" +require_relative "openai/type/unknown" +require_relative "openai/type/boolean_model" +require_relative "openai/type/enum" +require_relative "openai/type/union" +require_relative "openai/type/array_of" +require_relative "openai/type/hash_of" +require_relative "openai/type/base_model" +require_relative "openai/type/base_page" +require_relative "openai/type/base_stream" +require_relative "openai/type/request_parameters" +require_relative "openai/type" require_relative "openai/request_options" require_relative "openai/errors" -require_relative "openai/base_client" -require_relative "openai/pooled_net_requester" +require_relative "openai/transport/base_client" +require_relative "openai/transport/pooled_net_requester" require_relative "openai/client" require_relative "openai/stream" require_relative "openai/cursor_page" diff --git a/lib/openai/base_client.rb b/lib/openai/base_client.rb deleted file mode 100644 index 8234a2aa..00000000 --- a/lib/openai/base_client.rb +++ /dev/null @@ -1,457 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @api private - # - # @abstract - class BaseClient - # from whatwg fetch spec - MAX_REDIRECTS = 20 - - # rubocop:disable Style/MutableConstant - PLATFORM_HEADERS = - { - "x-stainless-arch" => OpenAI::Util.arch, - "x-stainless-lang" => "ruby", - "x-stainless-os" => OpenAI::Util.os, - "x-stainless-package-version" => OpenAI::VERSION, - "x-stainless-runtime" => ::RUBY_ENGINE, - "x-stainless-runtime-version" => ::RUBY_ENGINE_VERSION - } - # rubocop:enable Style/MutableConstant - - class << self - # @api private - # - # @param req [Hash{Symbol=>Object}] - # - # @raise [ArgumentError] - def validate!(req) - keys = [:method, :path, :query, :headers, :body, :unwrap, :page, :stream, :model, :options] - case req - in Hash - req.each_key do |k| - unless keys.include?(k) - raise ArgumentError.new("Request `req` keys must be one of #{keys}, got #{k.inspect}") - end - end - else - raise ArgumentError.new("Request `req` must be a Hash or RequestOptions, got #{req.inspect}") - end - end - - # @api private - # - # @param status [Integer] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # - # @return [Boolean] - def should_retry?(status, headers:) - coerced = OpenAI::Util.coerce_boolean(headers["x-should-retry"]) - case [coerced, status] - in [true | false, _] - coerced - in [_, 408 | 409 | 429 | (500..)] - # retry on: - # 408: timeouts - # 409: locks - # 429: rate limits - # 500+: unknown errors - true - else - false - end - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @option request [Object] :body - # - # @option request [Integer] :max_retries - # - # @option request [Float] :timeout - # - # @param status [Integer] - # - # @param response_headers [Hash{String=>String}, Net::HTTPHeader] - # - # @return [Hash{Symbol=>Object}] - def follow_redirect(request, status:, response_headers:) - method, url, headers = request.fetch_values(:method, :url, :headers) - location = - Kernel.then do - URI.join(url, response_headers["location"]) - rescue ArgumentError - message = "Server responded with status #{status} but no valid location header." - raise OpenAI::APIConnectionError.new(url: url, message: message) - end - - request = {**request, url: location} - - case [url.scheme, location.scheme] - in ["https", "http"] - message = "Tried to redirect to a insecure URL" - raise OpenAI::APIConnectionError.new(url: url, message: message) - else - nil - end - - # from whatwg fetch spec - case [status, method] - in [301 | 302, :post] | [303, _] - drop = %w[content-encoding content-language content-length content-location content-type] - request = { - **request, - method: method == :head ? :head : :get, - headers: headers.except(*drop), - body: nil - } - else - end - - # from undici - if OpenAI::Util.uri_origin(url) != OpenAI::Util.uri_origin(location) - drop = %w[authorization cookie host proxy-authorization] - request = {**request, headers: request.fetch(:headers).except(*drop)} - end - - request - end - - # @api private - # - # @param status [Integer, OpenAI::APIConnectionError] - # @param stream [Enumerable, nil] - def reap_connection!(status, stream:) - case status - in (..199) | (300..499) - stream&.each { next } - in OpenAI::APIConnectionError | (500..) - OpenAI::Util.close_fused!(stream) - else - end - end - end - - # @api private - # @return [OpenAI::PooledNetRequester] - attr_accessor :requester - - # @api private - # - # @param base_url [String] - # @param timeout [Float] - # @param max_retries [Integer] - # @param initial_retry_delay [Float] - # @param max_retry_delay [Float] - # @param headers [Hash{String=>String, Integer, Array, nil}] - # @param idempotency_header [String, nil] - def initialize( - base_url:, - timeout: 0.0, - max_retries: 0, - initial_retry_delay: 0.0, - max_retry_delay: 0.0, - headers: {}, - idempotency_header: nil - ) - @requester = OpenAI::PooledNetRequester.new - @headers = OpenAI::Util.normalized_headers( - self.class::PLATFORM_HEADERS, - { - "accept" => "application/json", - "content-type" => "application/json" - }, - headers - ) - @base_url = OpenAI::Util.parse_uri(base_url) - @idempotency_header = idempotency_header&.to_s&.downcase - @max_retries = max_retries - @timeout = timeout - @initial_retry_delay = initial_retry_delay - @max_retry_delay = max_retry_delay - end - - # @api private - # - # @return [Hash{String=>String}] - private def auth_headers = {} - - # @api private - # - # @return [String] - private def generate_idempotency_key = "stainless-ruby-retry-#{SecureRandom.uuid}" - - # @api private - # - # @param req [Hash{Symbol=>Object}] . - # - # @option req [Symbol] :method - # - # @option req [String, Array] :path - # - # @option req [Hash{String=>Array, String, nil}, nil] :query - # - # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers - # - # @option req [Object, nil] :body - # - # @option req [Symbol, nil] :unwrap - # - # @option req [Class, nil] :page - # - # @option req [Class, nil] :stream - # - # @option req [OpenAI::Converter, Class, nil] :model - # - # @param opts [Hash{Symbol=>Object}] . - # - # @option opts [String, nil] :idempotency_key - # - # @option opts [Hash{String=>Array, String, nil}, nil] :extra_query - # - # @option opts [Hash{String=>String, nil}, nil] :extra_headers - # - # @option opts [Object, nil] :extra_body - # - # @option opts [Integer, nil] :max_retries - # - # @option opts [Float, nil] :timeout - # - # @return [Hash{Symbol=>Object}] - private def build_request(req, opts) - method, uninterpolated_path = req.fetch_values(:method, :path) - - path = OpenAI::Util.interpolate_path(uninterpolated_path) - - query = OpenAI::Util.deep_merge(req[:query].to_h, opts[:extra_query].to_h) - - headers = OpenAI::Util.normalized_headers( - @headers, - auth_headers, - req[:headers].to_h, - opts[:extra_headers].to_h - ) - - if @idempotency_header && - !headers.key?(@idempotency_header) && - !Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) - headers[@idempotency_header] = opts.fetch(:idempotency_key) { generate_idempotency_key } - end - - unless headers.key?("x-stainless-retry-count") - headers["x-stainless-retry-count"] = "0" - end - - timeout = opts.fetch(:timeout, @timeout).to_f.clamp((0..)) - unless headers.key?("x-stainless-timeout") || timeout.zero? - headers["x-stainless-timeout"] = timeout.to_s - end - - headers.reject! { |_, v| v.to_s.empty? } - - body = - case method - in :get | :head | :options | :trace - nil - else - OpenAI::Util.deep_merge(*[req[:body], opts[:extra_body]].compact) - end - - headers, encoded = OpenAI::Util.encode_content(headers, body) - { - method: method, - url: OpenAI::Util.join_parsed_uri(@base_url, {**req, path: path, query: query}), - headers: headers, - body: encoded, - max_retries: opts.fetch(:max_retries, @max_retries), - timeout: timeout - } - end - - # @api private - # - # @param headers [Hash{String=>String}] - # @param retry_count [Integer] - # - # @return [Float] - private def retry_delay(headers, retry_count:) - # Non-standard extension - span = Float(headers["retry-after-ms"], exception: false)&.then { _1 / 1000 } - return span if span - - retry_header = headers["retry-after"] - return span if (span = Float(retry_header, exception: false)) - - span = retry_header&.then do - Time.httpdate(_1) - Time.now - rescue ArgumentError - nil - end - return span if span - - scale = retry_count**2 - jitter = 1 - (0.25 * rand) - (@initial_retry_delay * scale * jitter).clamp(0, @max_retry_delay) - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @option request [Object] :body - # - # @option request [Integer] :max_retries - # - # @option request [Float] :timeout - # - # @param redirect_count [Integer] - # - # @param retry_count [Integer] - # - # @param send_retry_header [Boolean] - # - # @raise [OpenAI::APIError] - # @return [Array(Integer, Net::HTTPResponse, Enumerable)] - private def send_request(request, redirect_count:, retry_count:, send_retry_header:) - url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) - input = {**request.except(:timeout), deadline: OpenAI::Util.monotonic_secs + timeout} - - if send_retry_header - headers["x-stainless-retry-count"] = retry_count.to_s - end - - begin - status, response, stream = @requester.execute(input) - rescue OpenAI::APIConnectionError => e - status = e - end - - case status - in ..299 - [status, response, stream] - in 300..399 if redirect_count >= self.class::MAX_REDIRECTS - self.class.reap_connection!(status, stream: stream) - - message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." - raise OpenAI::APIConnectionError.new(url: url, message: message) - in 300..399 - self.class.reap_connection!(status, stream: stream) - - request = self.class.follow_redirect(request, status: status, response_headers: response) - send_request( - request, - redirect_count: redirect_count + 1, - retry_count: retry_count, - send_retry_header: send_retry_header - ) - in OpenAI::APIConnectionError if retry_count >= max_retries - raise status - in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response) - decoded = Kernel.then do - OpenAI::Util.decode_content(response, stream: stream, suppress_error: true) - ensure - self.class.reap_connection!(status, stream: stream) - end - - raise OpenAI::APIStatusError.for( - url: url, - status: status, - body: decoded, - request: nil, - response: response - ) - in (400..) | OpenAI::APIConnectionError - self.class.reap_connection!(status, stream: stream) - - delay = retry_delay(response, retry_count: retry_count) - sleep(delay) - - send_request( - request, - redirect_count: redirect_count, - retry_count: retry_count + 1, - send_retry_header: send_retry_header - ) - end - end - - # Execute the request specified by `req`. This is the method that all resource - # methods call into. - # - # @param req [Hash{Symbol=>Object}] . - # - # @option req [Symbol] :method - # - # @option req [String, Array] :path - # - # @option req [Hash{String=>Array, String, nil}, nil] :query - # - # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers - # - # @option req [Object, nil] :body - # - # @option req [Symbol, nil] :unwrap - # - # @option req [Class, nil] :page - # - # @option req [Class, nil] :stream - # - # @option req [OpenAI::Converter, Class, nil] :model - # - # @option req [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :options - # - # @raise [OpenAI::APIError] - # @return [Object] - def request(req) - self.class.validate!(req) - model = req.fetch(:model) { OpenAI::Unknown } - opts = req[:options].to_h - OpenAI::RequestOptions.validate!(opts) - request = build_request(req.except(:options), opts) - url = request.fetch(:url) - - # Don't send the current retry count in the headers if the caller modified the header defaults. - send_retry_header = request.fetch(:headers)["x-stainless-retry-count"] == "0" - status, response, stream = send_request( - request, - redirect_count: 0, - retry_count: 0, - send_retry_header: send_retry_header - ) - - decoded = OpenAI::Util.decode_content(response, stream: stream) - case req - in { stream: Class => st } - st.new(model: model, url: url, status: status, response: response, stream: decoded) - in { page: Class => page } - page.new(client: self, req: req, headers: response, page_data: decoded) - else - unwrapped = OpenAI::Util.dig(decoded, req[:unwrap]) - OpenAI::Converter.coerce(model, unwrapped) - end - end - - # @return [String] - def inspect - # rubocop:disable Layout/LineLength - base_url = OpenAI::Util.unparse_uri(@base_url) - "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" - # rubocop:enable Layout/LineLength - end - end -end diff --git a/lib/openai/base_model.rb b/lib/openai/base_model.rb deleted file mode 100644 index 20a05eb9..00000000 --- a/lib/openai/base_model.rb +++ /dev/null @@ -1,1178 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @api private - module Converter - # rubocop:disable Lint/UnusedMethodArgument - - # @api private - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(value, state:) = (raise NotImplementedError) - - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - case value - in Array - value.map { OpenAI::Unknown.dump(_1) } - in Hash - value.transform_values { OpenAI::Unknown.dump(_1) } - in OpenAI::BaseModel - value.class.dump(value) - else - value - end - end - - # rubocop:enable Lint/UnusedMethodArgument - - class << self - # @api private - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - # - # @return [Proc] - def type_info(spec) - case spec - in Proc - spec - in Hash - type_info(spec.slice(:const, :enum, :union).first&.last) - in true | false - -> { OpenAI::BooleanModel } - in OpenAI::Converter | Class | Symbol - -> { spec } - in NilClass | Integer | Float - -> { spec.class } - end - end - - # @api private - # - # Based on `target`, transform `value` into `target`, to the extent possible: - # - # 1. if the given `value` conforms to `target` already, return the given `value` - # 2. if it's possible and safe to convert the given `value` to `target`, then the - # converted value - # 3. otherwise, the given `value` unaltered - # - # The coercion process is subject to improvement between minor release versions. - # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode - # - # @param target [OpenAI::Converter, Class] - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: - # - # - `true`: the conversion must be exact, with minimum coercion. - # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. - # - # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For - # any given conversion attempt, the exactness will be updated based on how closely - # the value recursively matches the target type: - # - # - `yes`: the value can be converted to the target type with minimum coercion. - # - `maybe`: the value can be converted to the target type with some reasonable - # coercion. - # - `no`: the value cannot be converted to the target type. - # - # See implementation below for more details. - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(target, value, state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0}) - strictness, exactness = state.fetch_values(:strictness, :exactness) - - case target - in OpenAI::Converter - return target.coerce(value, state: state) - in Class - if value.is_a?(target) - exactness[:yes] += 1 - return value - end - - case target - in -> { _1 <= NilClass } - exactness[value.nil? ? :yes : :maybe] += 1 - return nil - in -> { _1 <= Integer } - if value.is_a?(Integer) - exactness[:yes] += 1 - return value - elsif strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - else - Kernel.then do - return Integer(value).tap { exactness[:maybe] += 1 } - rescue ArgumentError, TypeError - end - end - in -> { _1 <= Float } - if value.is_a?(Numeric) - exactness[:yes] += 1 - return Float(value) - elsif strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - else - Kernel.then do - return Float(value).tap { exactness[:maybe] += 1 } - rescue ArgumentError, TypeError - end - end - in -> { _1 <= String } - case value - in String | Symbol | Numeric - exactness[value.is_a?(Numeric) ? :maybe : :yes] += 1 - return value.to_s - else - if strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - end - end - in -> { _1 <= Date || _1 <= Time } - Kernel.then do - return target.parse(value).tap { exactness[:yes] += 1 } - rescue ArgumentError, TypeError => e - raise e if strictness == :strong - end - in -> { _1 <= IO } if value.is_a?(String) - exactness[:yes] += 1 - return StringIO.new(value.b) - else - end - in Symbol - if (value.is_a?(Symbol) || value.is_a?(String)) && value.to_sym == target - exactness[:yes] += 1 - return target - elsif strictness == :strong - message = "cannot convert non-matching #{value.class} into #{target.inspect}" - raise ArgumentError.new(message) - end - else - end - - exactness[:no] += 1 - value - end - - # @api private - # - # @param target [OpenAI::Converter, Class] - # @param value [Object] - # - # @return [Object] - def dump(target, value) - target.is_a?(OpenAI::Converter) ? target.dump(value) : OpenAI::Unknown.dump(value) - end - end - end - - # @api private - # - # @abstract - # - # When we don't know what to expect for the value. - class Unknown - extend OpenAI::Converter - - # rubocop:disable Lint/UnusedMethodArgument - - # @param other [Object] - # - # @return [Boolean] - def self.===(other) = true - - # @param other [Object] - # - # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown - - class << self - # @api private - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(value, state:) - state.fetch(:exactness)[:yes] += 1 - value - end - - # @!parse - # # @api private - # # - # # @param value [Object] - # # - # # @return [Object] - # def dump(value) = super - end - - # rubocop:enable Lint/UnusedMethodArgument - end - - # @api private - # - # @abstract - # - # Ruby has no Boolean class; this is something for models to refer to. - class BooleanModel - extend OpenAI::Converter - - # @param other [Object] - # - # @return [Boolean] - def self.===(other) = other == true || other == false - - # @param other [Object] - # - # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel - - class << self - # @api private - # - # @param value [Boolean, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Boolean, Object] - def coerce(value, state:) - state.fetch(:exactness)[value == true || value == false ? :yes : :no] += 1 - value - end - - # @!parse - # # @api private - # # - # # @param value [Boolean, Object] - # # - # # @return [Boolean, Object] - # def dump(value) = super - end - end - - # @api private - # - # A value from among a specified list of options. OpenAPI enum values map to Ruby - # values in the SDK as follows: - # - # 1. boolean => true | false - # 2. integer => Integer - # 3. float => Float - # 4. string => Symbol - # - # We can therefore convert string values to Symbols, but can't convert other - # values safely. - # - # @example - # # `chat_model` is a `OpenAI::Models::ChatModel` - # case chat_model - # when OpenAI::Models::ChatModel::O3_MINI - # # ... - # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 - # # ... - # when OpenAI::Models::ChatModel::O1 - # # ... - # else - # puts(chat_model) - # end - # - # @example - # case chat_model - # in :"o3-mini" - # # ... - # in :"o3-mini-2025-01-31" - # # ... - # in :o1 - # # ... - # else - # puts(chat_model) - # end - module Enum - include OpenAI::Converter - - # All of the valid Symbol values for this enum. - # - # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) - - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values - - # @param other [Object] - # - # @return [Boolean] - def ===(other) = values.include?(other) - - # @param other [Object] - # - # @return [Boolean] - def ==(other) - other.is_a?(Module) && other.singleton_class <= OpenAI::Enum && other.values.to_set == values.to_set - end - - # @api private - # - # Unlike with primitives, `Enum` additionally validates that the value is a member - # of the enum. - # - # @param value [String, Symbol, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Symbol, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - val = value.is_a?(String) ? value.to_sym : value - - if values.include?(val) - exactness[:yes] += 1 - val - else - exactness[values.first&.class == val.class ? :maybe : :no] += 1 - value - end - end - - # @!parse - # # @api private - # # - # # @param value [Symbol, Object] - # # - # # @return [Symbol, Object] - # def dump(value) = super - end - - # @api private - # - # @example - # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` - # case chat_completion_content_part - # when OpenAI::Models::Chat::ChatCompletionContentPartText - # puts(chat_completion_content_part.text) - # when OpenAI::Models::Chat::ChatCompletionContentPartImage - # puts(chat_completion_content_part.image_url) - # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio - # puts(chat_completion_content_part.input_audio) - # else - # puts(chat_completion_content_part) - # end - # - # @example - # case chat_completion_content_part - # in {type: :text, text: text} - # puts(text) - # in {type: :image_url, image_url: image_url} - # puts(image_url) - # in {type: :input_audio, input_audio: input_audio} - # puts(input_audio) - # else - # puts(chat_completion_content_part) - # end - module Union - include OpenAI::Converter - - # @api private - # - # All of the specified variant info for this union. - # - # @return [Array] - private def known_variants = (@known_variants ||= []) - - # @api private - # - # @return [Array] - protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } - end - - # All of the specified variants for this union. - # - # @return [Array] - def variants = derefed_variants.map(&:last) - - # @api private - # - # @param property [Symbol] - private def discriminator(property) - case property - in Symbol - @discriminator = property - end - end - - # @api private - # - # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def variant(key, spec = nil) - variant_info = - case key - in Symbol - [key, OpenAI::Converter.type_info(spec)] - in Proc | OpenAI::Converter | Class | Hash - [nil, OpenAI::Converter.type_info(key)] - end - - known_variants << variant_info - end - - # @api private - # - # @param value [Object] - # - # @return [OpenAI::Converter, Class, nil] - private def resolve_variant(value) - case [@discriminator, value] - in [_, OpenAI::BaseModel] - value.class - in [Symbol, Hash] - key = value.fetch(@discriminator) do - value.fetch(@discriminator.to_s, OpenAI::Util::OMIT) - end - - return nil if key == OpenAI::Util::OMIT - - key = key.to_sym if key.is_a?(String) - known_variants.find { |k,| k == key }&.last&.call - else - nil - end - end - - # rubocop:disable Style/HashEachMethods - # rubocop:disable Style/CaseEquality - - # @param other [Object] - # - # @return [Boolean] - def ===(other) - known_variants.any? do |_, variant_fn| - variant_fn.call === other - end - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) - other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants - end - - # @api private - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(value, state:) - if (target = resolve_variant(value)) - return OpenAI::Converter.coerce(target, value, state: state) - end - - strictness = state.fetch(:strictness) - exactness = state.fetch(:exactness) - state[:strictness] = strictness == :strong ? true : strictness - - alternatives = [] - known_variants.each do |_, variant_fn| - target = variant_fn.call - exact = state[:exactness] = {yes: 0, no: 0, maybe: 0} - state[:branched] += 1 - - coerced = OpenAI::Converter.coerce(target, value, state: state) - yes, no, maybe = exact.values - if (no + maybe).zero? || (!strictness && yes.positive?) - exact.each { exactness[_1] += _2 } - state[:exactness] = exactness - return coerced - elsif maybe.positive? - alternatives << [[-yes, -maybe, no], exact, coerced] - end - end - - case alternatives.sort_by(&:first) - in [] - exactness[:no] += 1 - if strictness == :strong - message = "no possible conversion of #{value.class} into a variant of #{target.inspect}" - raise ArgumentError.new(message) - end - value - in [[_, exact, coerced], *] - exact.each { exactness[_1] += _2 } - coerced - end - .tap { state[:exactness] = exactness } - ensure - state[:strictness] = strictness - end - - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - if (target = resolve_variant(value)) - return OpenAI::Converter.dump(target, value) - end - - known_variants.each do - target = _2.call - return OpenAI::Converter.dump(target, value) if target === value - end - - super - end - - # rubocop:enable Style/CaseEquality - # rubocop:enable Style/HashEachMethods - end - - # @api private - # - # @abstract - # - # Array of items of a given type. - class ArrayOf - include OpenAI::Converter - - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def self.[](type_info, spec = {}) = new(type_info, spec) - - # @param other [Object] - # - # @return [Boolean] - def ===(other) = other.is_a?(Array) && other.all?(item_type) - - # @param other [Object] - # - # @return [Boolean] - def ==(other) = other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type - - # @api private - # - # @param value [Enumerable, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Array, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - - unless value.is_a?(Array) - exactness[:no] += 1 - return value - end - - target = item_type - exactness[:yes] += 1 - value - .map do |item| - case [nilable?, item] - in [true, nil] - exactness[:yes] += 1 - nil - else - OpenAI::Converter.coerce(target, item, state: state) - end - end - end - - # @api private - # - # @param value [Enumerable, Object] - # - # @return [Array, Object] - def dump(value) - target = item_type - value.is_a?(Array) ? value.map { OpenAI::Converter.dump(target, _1) } : super - end - - # @api private - # - # @return [OpenAI::Converter, Class] - protected def item_type = @item_type_fn.call - - # @api private - # - # @return [Boolean] - protected def nilable? = @nilable - - # @api private - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def initialize(type_info, spec = {}) - @item_type_fn = OpenAI::Converter.type_info(type_info || spec) - @nilable = spec[:nil?] - end - end - - # @api private - # - # @abstract - # - # Hash of items of a given type. - class HashOf - include OpenAI::Converter - - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def self.[](type_info, spec = {}) = new(type_info, spec) - - # @param other [Object] - # - # @return [Boolean] - def ===(other) - type = item_type - case other - in Hash - other.all? do |key, val| - case [key, val] - in [Symbol | String, ^type] - true - else - false - end - end - else - false - end - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) = other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type - - # @api private - # - # @param value [Hash{Object=>Object}, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Hash{Symbol=>Object}, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - - unless value.is_a?(Hash) - exactness[:no] += 1 - return value - end - - target = item_type - exactness[:yes] += 1 - value - .to_h do |key, val| - k = key.is_a?(String) ? key.to_sym : key - v = - case [nilable?, val] - in [true, nil] - exactness[:yes] += 1 - nil - else - OpenAI::Converter.coerce(target, val, state: state) - end - - exactness[:no] += 1 unless k.is_a?(Symbol) - [k, v] - end - end - - # @api private - # - # @param value [Hash{Object=>Object}, Object] - # - # @return [Hash{Symbol=>Object}, Object] - def dump(value) - target = item_type - value.is_a?(Hash) ? value.transform_values { OpenAI::Converter.dump(target, _1) } : super - end - - # @api private - # - # @return [OpenAI::Converter, Class] - protected def item_type = @item_type_fn.call - - # @api private - # - # @return [Boolean] - protected def nilable? = @nilable - - # @api private - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def initialize(type_info, spec = {}) - @item_type_fn = OpenAI::Converter.type_info(type_info || spec) - @nilable = spec[:nil?] - end - end - - # @abstract - # - # @example - # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` - # comparison_filter => { - # key: key, - # type: type, - # value: value - # } - class BaseModel - extend OpenAI::Converter - - class << self - # @api private - # - # Assumes superclass fields are totally defined before fields are accessed / - # defined on subclasses. - # - # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - def known_fields - @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) - end - - # @api private - # - # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - def fields - known_fields.transform_values do |field| - {**field.except(:type_fn), type: field.fetch(:type_fn).call} - end - end - - # @api private - # - # @param name_sym [Symbol] - # - # @param required [Boolean] - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def add_field(name_sym, required:, type_info:, spec:) - type_fn, info = - case type_info - in Proc | OpenAI::Converter | Class - [OpenAI::Converter.type_info({**spec, union: type_info}), spec] - in Hash - [OpenAI::Converter.type_info(type_info), type_info] - end - - setter = "#{name_sym}=" - api_name = info.fetch(:api_name, name_sym) - nilable = info[:nil?] - const = required && !nilable ? info.fetch(:const, OpenAI::Util::OMIT) : OpenAI::Util::OMIT - - [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) - - known_fields[name_sym] = - { - mode: @mode, - api_name: api_name, - required: required, - nilable: nilable, - const: const, - type_fn: type_fn - } - - define_method(setter) { @data.store(name_sym, _1) } - - define_method(name_sym) do - target = type_fn.call - value = @data.fetch(name_sym) { const == OpenAI::Util::OMIT ? nil : const } - state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - (nilable || !required) && value.nil? ? nil : OpenAI::Converter.coerce(target, value, state: state) - rescue StandardError - cls = self.class.name.split("::").last - message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." - raise OpenAI::ConversionError.new(message) - end - end - - # @api private - # - # @param name_sym [Symbol] - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def required(name_sym, type_info, spec = {}) - add_field(name_sym, required: true, type_info: type_info, spec: spec) - end - - # @api private - # - # @param name_sym [Symbol] - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def optional(name_sym, type_info, spec = {}) - add_field(name_sym, required: false, type_info: type_info, spec: spec) - end - - # @api private - # - # `request_only` attributes not excluded from `.#coerce` when receiving responses - # even if well behaved servers should not send them - # - # @param blk [Proc] - private def request_only(&blk) - @mode = :dump - blk.call - ensure - @mode = nil - end - - # @api private - # - # `response_only` attributes are omitted from `.#dump` when making requests - # - # @param blk [Proc] - private def response_only(&blk) - @mode = :coerce - blk.call - ensure - @mode = nil - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) = other.is_a?(Class) && other <= OpenAI::BaseModel && other.fields == fields - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) = self.class == other.class && @data == other.to_h - - class << self - # @api private - # - # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [OpenAI::BaseModel, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - - if value.is_a?(self.class) - exactness[:yes] += 1 - return value - end - - unless (val = OpenAI::Util.coerce_hash(value)).is_a?(Hash) - exactness[:no] += 1 - return value - end - exactness[:yes] += 1 - - keys = val.keys.to_set - instance = new - data = instance.to_h - - fields.each do |name, field| - mode, required, target = field.fetch_values(:mode, :required, :type) - api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) - - unless val.key?(api_name) - if const != OpenAI::Util::OMIT - exactness[:yes] += 1 - elsif required && mode != :dump - exactness[nilable ? :maybe : :no] += 1 - else - exactness[:yes] += 1 - end - next - end - - item = val.fetch(api_name) - keys.delete(api_name) - - converted = - if item.nil? && (nilable || !required) - exactness[nilable ? :yes : :maybe] += 1 - nil - else - coerced = OpenAI::Converter.coerce(target, item, state: state) - case target - in OpenAI::Converter | Symbol - coerced - else - item - end - end - data.store(name, converted) - end - - keys.each { data.store(_1, val.fetch(_1)) } - instance - end - - # @api private - # - # @param value [OpenAI::BaseModel, Object] - # - # @return [Hash{Object=>Object}, Object] - def dump(value) - unless (coerced = OpenAI::Util.coerce_hash(value)).is_a?(Hash) - return super - end - - acc = {} - - coerced.each do |key, val| - name = key.is_a?(String) ? key.to_sym : key - case (field = known_fields[name]) - in nil - acc.store(name, super(val)) - else - mode, api_name, type_fn = field.fetch_values(:mode, :api_name, :type_fn) - case mode - in :coerce - next - else - target = type_fn.call - acc.store(api_name, OpenAI::Converter.dump(target, val)) - end - end - end - - known_fields.each_value do |field| - mode, api_name, const = field.fetch_values(:mode, :api_name, :const) - next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Util::OMIT - acc.store(api_name, const) - end - - acc - end - end - - # Returns the raw value associated with the given key, if found. Otherwise, nil is - # returned. - # - # It is valid to lookup keys that are not in the API spec, for example to access - # undocumented features. This method does not parse response data into - # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. - # - # @param key [Symbol] - # - # @return [Object, nil] - def [](key) - unless key.instance_of?(Symbol) - raise ArgumentError.new("Expected symbol key for lookup, got #{key.inspect}") - end - - @data[key] - end - - # Returns a Hash of the data underlying this object. O(1) - # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. - # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. - # - # @return [Hash{Symbol=>Object}] - def to_h = @data - - alias_method :to_hash, :to_h - - # @param keys [Array, nil] - # - # @return [Hash{Symbol=>Object}] - def deconstruct_keys(keys) - (keys || self.class.known_fields.keys) - .filter_map do |k| - unless self.class.known_fields.key?(k) - next - end - - [k, public_send(k)] - end - .to_h - end - - # Create a new instance of a model. - # - # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] - def initialize(data = {}) - case OpenAI::Util.coerce_hash(data) - in Hash => coerced - @data = coerced - else - raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") - end - end - - # @return [String] - def inspect - rows = self.class.known_fields.keys.map do - "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" - rescue OpenAI::ConversionError - "#{_1}=#{@data.fetch(_1)}" - end - "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" - end - end -end diff --git a/lib/openai/base_page.rb b/lib/openai/base_page.rb deleted file mode 100644 index 3c6c0e71..00000000 --- a/lib/openai/base_page.rb +++ /dev/null @@ -1,59 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @example - # if page.has_next? - # page = page.next_page - # end - # - # @example - # page.auto_paging_each do |completion| - # puts(completion) - # end - # - # @example - # completions = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # completions => Array - module BasePage - # rubocop:disable Lint/UnusedMethodArgument - - # @return [Boolean] - def next_page? = (raise NotImplementedError) - - # @raise [OpenAI::APIError] - # @return [OpenAI::BasePage] - def next_page = (raise NotImplementedError) - - # @param blk [Proc] - # - # @return [void] - def auto_paging_each(&) = (raise NotImplementedError) - - # @return [Enumerable] - def to_enum = super(:auto_paging_each) - - alias_method :enum_for, :to_enum - - # @api private - # - # @param client [OpenAI::BaseClient] - # @param req [Hash{Symbol=>Object}] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param page_data [Object] - def initialize(client:, req:, headers:, page_data:) - @client = client - @req = req - super() - end - - # rubocop:enable Lint/UnusedMethodArgument - end -end diff --git a/lib/openai/base_stream.rb b/lib/openai/base_stream.rb deleted file mode 100644 index 519cf83d..00000000 --- a/lib/openai/base_stream.rb +++ /dev/null @@ -1,61 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @example - # stream.each do |chunk| - # puts(chunk) - # end - # - # @example - # chunks = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # chunks => Array - module BaseStream - include Enumerable - - # @return [void] - def close = OpenAI::Util.close_fused!(@iterator) - - # @api private - # - # @return [Enumerable] - private def iterator = (raise NotImplementedError) - - # @param blk [Proc] - # - # @return [void] - def each(&) - unless block_given? - raise ArgumentError.new("A block must be given to ##{__method__}") - end - @iterator.each(&) - end - - # @return [Enumerator] - def to_enum = @iterator - - alias_method :enum_for, :to_enum - - # @api private - # - # @param model [Class, OpenAI::Converter] - # @param url [URI::Generic] - # @param status [Integer] - # @param response [Net::HTTPResponse] - # @param stream [Enumerable] - def initialize(model:, url:, status:, response:, stream:) - @model = model - @url = url - @status = status - @response = response - @stream = stream - @iterator = iterator - end - end -end diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 126da608..02db6417 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module OpenAI - class Client < OpenAI::BaseClient + class Client < OpenAI::Transport::BaseClient # Default max number of retries to attempt after a failed retryable request. DEFAULT_MAX_RETRIES = 2 diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb index fa0e9fc7..640aa03b 100644 --- a/lib/openai/cursor_page.rb +++ b/lib/openai/cursor_page.rb @@ -23,7 +23,7 @@ module OpenAI # # completions => Array class CursorPage - include OpenAI::BasePage + include OpenAI::Type::BasePage # @return [Array, nil] attr_accessor :data @@ -33,7 +33,7 @@ class CursorPage # @api private # - # @param client [OpenAI::BaseClient] + # @param client [OpenAI::Transport::BaseClient] # @param req [Hash{Symbol=>Object}] # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param page_data [Hash{Symbol=>Object}] @@ -43,7 +43,7 @@ def initialize(client:, req:, headers:, page_data:) case page_data in {data: Array | nil => data} - @data = data&.map { OpenAI::Converter.coerce(model, _1) } + @data = data&.map { OpenAI::Type::Converter.coerce(model, _1) } else end diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 7a4228e8..ad1e9852 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -171,9 +171,9 @@ def self.for(url:, status:, body:, request:, response:, message: nil) # @param message [String, nil] def initialize(url:, status:, body:, request:, response:, message: nil) message ||= OpenAI::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} } - @code = OpenAI::Converter.coerce(String, OpenAI::Util.dig(body, :code)) - @param = OpenAI::Converter.coerce(String, OpenAI::Util.dig(body, :param)) - @type = OpenAI::Converter.coerce(String, OpenAI::Util.dig(body, :type)) + @code = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :code)) + @param = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :param)) + @type = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :type)) super( url: url, status: status, diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 8db71ca4..f59e9311 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -5,7 +5,7 @@ module Models module Audio class SpeechCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute input diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index bbbd2ada..bcd93827 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -5,7 +5,7 @@ module Models module Audio class TranscriptionCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute file diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index e610b092..5f5c90ad 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -5,7 +5,7 @@ module Models module Audio class TranslationCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute file diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 3e1d39cd..6772ee41 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class BatchCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 4c141b0e..b58fb416 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class BatchCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute completion_window diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 368bc08b..5bd6a019 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class BatchListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index 252ed4ca..12c4186e 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class BatchRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 6dcdbe59..dab3186e 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -5,7 +5,7 @@ module Models module Beta class AssistantCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute model diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index 9262c91e..939c39ee 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -5,7 +5,7 @@ module Models module Beta class AssistantDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index e6b3a31d..601a694b 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -5,7 +5,7 @@ module Models module Beta class AssistantListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index d61babe9..40ba9b41 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module Beta class AssistantRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index d178a24b..6b2bbaf8 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -5,7 +5,7 @@ module Models module Beta class AssistantUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute description diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index e3891370..56c776fa 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -5,7 +5,7 @@ module Models module Beta class ThreadCreateAndRunParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute assistant_id diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 62984f11..3095b908 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -5,7 +5,7 @@ module Models module Beta class ThreadCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] messages diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index bff19a8a..f81ea4ec 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -5,7 +5,7 @@ module Models module Beta class ThreadDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index cd0bc3c8..4df984d4 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module Beta class ThreadRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 2d9eeab2..e4d3d234 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -5,7 +5,7 @@ module Models module Beta class ThreadUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute metadata diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 73ea83f7..6738bb76 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class MessageCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute content diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 8055fbe0..abe4fb7f 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class MessageDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index eef2c21b..cc3f0ae0 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class MessageListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index 0280a2b9..c51fa75c 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class MessageRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index 5e7ff7f0..46ffdaa4 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class MessageUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index 230c1059..14a4bc2b 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class RunCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 129ed90f..ad9f9698 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class RunCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute assistant_id diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 24096507..8f3dfdb6 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class RunListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index bd9194cf..4bc9a2aa 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class RunRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 5c747924..c2d753ad 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class RunSubmitToolOutputsParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 9d1f9b01..6e213a7f 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -6,7 +6,7 @@ module Beta module Threads class RunUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index c162f2ba..eb22d47e 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -7,7 +7,7 @@ module Threads module Runs class StepListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 5329f9d4..ff4b6ccd 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -7,7 +7,7 @@ module Threads module Runs class StepRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 0db1c9bf..8eb609ab 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -5,7 +5,7 @@ module Models module Chat class CompletionCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute messages diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index d3a43ac0..cbcb2c22 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -5,7 +5,7 @@ module Models module Chat class CompletionDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 9c964a04..8b2e8ef7 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -5,7 +5,7 @@ module Models module Chat class CompletionListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index 248e8caf..e8ee1b1c 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module Chat class CompletionRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index bc642e24..32797eb4 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -5,7 +5,7 @@ module Models module Chat class CompletionUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute metadata diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 797d4ed2..74ccbdc7 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -6,7 +6,7 @@ module Chat module Completions class MessageListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 02ee2c45..d4935347 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class CompletionCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute model diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 3bb5d983..7bcadd1d 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class EmbeddingCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute input diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index 38b39c6c..597cc103 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class FileContentParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 23e108a1..c37dba0c 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class FileCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute file diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index 54238ca0..22f10a38 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class FileDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 9aa3afb3..9d825d43 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class FileListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index 391639a3..a4f815c3 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class FileRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 7427d15d..979023cf 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -5,7 +5,7 @@ module Models module FineTuning class JobCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index a7189227..1e8ddf18 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -5,7 +5,7 @@ module Models module FineTuning class JobCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute model diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index ad4de4f5..bbb67fa8 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -5,7 +5,7 @@ module Models module FineTuning class JobListEventsParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index 6c53cafd..f9c13fc9 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -5,7 +5,7 @@ module Models module FineTuning class JobListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index 5808c0a0..7735a847 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module FineTuning class JobRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index ef0571df..ee8722ea 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -6,7 +6,7 @@ module FineTuning module Jobs class CheckpointListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 19e34407..87a15c79 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ImageCreateVariationParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute image diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 459ed49c..cf4ca6e4 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ImageEditParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute image diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 377d17b9..43ed870f 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ImageGenerateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute prompt diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index c5414ed7..3076a522 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ModelDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index 37bd5c67..1718546b 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ModelListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index a8779854..335e5c97 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ModelRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 5273a9d8..93150dbd 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class ModerationCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute input diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index d50fc83d..3bf3624b 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -5,7 +5,7 @@ module Models module Responses class InputItemListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index a4469d5c..e5c04cf7 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -5,7 +5,7 @@ module Models module Responses class ResponseCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute input diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index 0ecfdf28..ee46f30c 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -5,7 +5,7 @@ module Models module Responses class ResponseDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 4ca65d89..6d3028c3 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module Responses class ResponseRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] include diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 164c052a..62d1e116 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class UploadCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 1a2b22be..866303ce 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class UploadCompleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute part_ids diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 6e279c41..3650d2dd 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class UploadCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute bytes diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index a25b50cc..2d3a2fb7 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -5,7 +5,7 @@ module Models module Uploads class PartCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute data diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 9197db12..43b4e601 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class VectorStoreCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] chunking_strategy diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index 0d599c50..25113370 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class VectorStoreDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 74392037..614dc937 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class VectorStoreListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index 8051213c..60ede4d7 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class VectorStoreRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!parse diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 48c3d614..c970dbc4 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class VectorStoreSearchParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute query diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 8dcdbb59..7e6fb3a6 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models class VectorStoreUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute expires_after diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index 045c91ff..3e5971ff 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileBatchCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index dcf7edc1..69286492 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileBatchCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute file_ids diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 9ea2e0db..1dbc17b1 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileBatchListFilesParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 9856a452..34ccee34 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileBatchRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index c5d82123..8e88a4ff 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileContentParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 44994550..79ef17c3 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute file_id diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index 3a22fe46..8baa3964 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 7d603af5..78acf207 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute [r] after diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index af408c6c..bfb3d370 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 5e67af85..e1a477eb 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -5,7 +5,7 @@ module Models module VectorStores class FileUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::RequestParameters::Converter + # extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/page.rb b/lib/openai/page.rb index 3e00417d..55f85755 100644 --- a/lib/openai/page.rb +++ b/lib/openai/page.rb @@ -23,7 +23,7 @@ module OpenAI # # models => Array class Page - include OpenAI::BasePage + include OpenAI::Type::BasePage # @return [Array, nil] attr_accessor :data @@ -33,7 +33,7 @@ class Page # @api private # - # @param client [OpenAI::BaseClient] + # @param client [OpenAI::Transport::BaseClient] # @param req [Hash{Symbol=>Object}] # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param page_data [Array] @@ -43,7 +43,7 @@ def initialize(client:, req:, headers:, page_data:) case page_data in {data: Array | nil => data} - @data = data&.map { OpenAI::Converter.coerce(model, _1) } + @data = data&.map { OpenAI::Type::Converter.coerce(model, _1) } else end diff --git a/lib/openai/pooled_net_requester.rb b/lib/openai/pooled_net_requester.rb deleted file mode 100644 index a7e5242a..00000000 --- a/lib/openai/pooled_net_requester.rb +++ /dev/null @@ -1,180 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @api private - class PooledNetRequester - # from the golang stdlib - # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 - KEEP_ALIVE_TIMEOUT = 30 - - class << self - # @api private - # - # @param url [URI::Generic] - # - # @return [Net::HTTP] - def connect(url) - port = - case [url.port, url.scheme] - in [Integer, _] - url.port - in [nil, "http" | "ws"] - Net::HTTP.http_default_port - in [nil, "https" | "wss"] - Net::HTTP.https_default_port - end - - Net::HTTP.new(url.host, port).tap do - _1.use_ssl = %w[https wss].include?(url.scheme) - _1.max_retries = 0 - end - end - - # @api private - # - # @param conn [Net::HTTP] - # @param deadline [Float] - def calibrate_socket_timeout(conn, deadline) - timeout = deadline - OpenAI::Util.monotonic_secs - conn.open_timeout = conn.read_timeout = conn.write_timeout = conn.continue_timeout = timeout - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @param blk [Proc] - # - # @yieldparam [String] - # @return [Net::HTTPGenericRequest] - def build_request(request, &) - method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) - req = Net::HTTPGenericRequest.new( - method.to_s.upcase, - !body.nil?, - method != :head, - url.to_s - ) - - headers.each { req[_1] = _2 } - - case body - in nil - nil - in String - req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) - in StringIO - req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) - in IO | Enumerator - req["transfer-encoding"] ||= "chunked" unless req["content-length"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) - end - - req - end - end - - # @api private - # - # @param url [URI::Generic] - # @param deadline [Float] - # @param blk [Proc] - # - # @raise [Timeout::Error] - # @yieldparam [Net::HTTP] - private def with_pool(url, deadline:, &blk) - origin = OpenAI::Util.uri_origin(url) - timeout = deadline - OpenAI::Util.monotonic_secs - pool = - @mutex.synchronize do - @pools[origin] ||= ConnectionPool.new(size: @size) do - self.class.connect(url) - end - end - - pool.with(timeout: timeout, &blk) - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @option request [Object] :body - # - # @option request [Float] :deadline - # - # @return [Array(Integer, Net::HTTPResponse, Enumerable)] - def execute(request) - url, deadline = request.fetch_values(:url, :deadline) - - eof = false - finished = false - enum = Enumerator.new do |y| - with_pool(url, deadline: deadline) do |conn| - next if finished - - req = self.class.build_request(request) do - self.class.calibrate_socket_timeout(conn, deadline) - end - - self.class.calibrate_socket_timeout(conn, deadline) - unless conn.started? - conn.keep_alive_timeout = self.class::KEEP_ALIVE_TIMEOUT - conn.start - end - - self.class.calibrate_socket_timeout(conn, deadline) - conn.request(req) do |rsp| - y << [conn, req, rsp] - break if finished - - rsp.read_body do |bytes| - y << bytes - break if finished - - self.class.calibrate_socket_timeout(conn, deadline) - end - eof = true - end - end - rescue Timeout::Error - raise OpenAI::APITimeoutError - end - - conn, _, response = enum.next - body = OpenAI::Util.fused_enum(enum, external: true) do - finished = true - tap do - enum.next - rescue StopIteration - nil - end - conn.finish if !eof && conn&.started? - end - [Integer(response.code), response, (response.body = body)] - end - - # @api private - # - # @param size [Integer] - def initialize(size: Etc.nprocessors) - @mutex = Mutex.new - @size = size - @pools = {} - end - end -end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index 917753db..279c2936 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -1,39 +1,6 @@ # frozen_string_literal: true module OpenAI - # @api private - module RequestParameters - # @!parse - # # Options to specify HTTP behaviour for this request. - # # @return [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # attr_accessor :request_options - - # @param mod [Module] - def self.included(mod) - return unless mod <= OpenAI::BaseModel - - mod.extend(OpenAI::RequestParameters::Converter) - mod.optional(:request_options, OpenAI::RequestOptions) - end - - # @api private - module Converter - # @api private - # - # @param params [Object] - # - # @return [Array(Object, Hash{Symbol=>Object})] - def dump_request(params) - case (dumped = dump(params)) - in Hash - [dumped.except(:request_options), dumped[:request_options]] - else - [dumped, nil] - end - end - end - end - # Specify HTTP behaviour to use for a specific request. These options supplement # or override those provided at the client level. # diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index fc67dadc..d3be1bd5 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -17,7 +17,7 @@ module OpenAI # # events => Array class Stream - include OpenAI::BaseStream + include OpenAI::Type::BaseStream # @api private # @@ -55,7 +55,7 @@ class Stream message: message ) in decoded - y << OpenAI::Converter.coerce(@model, decoded) + y << OpenAI::Type::Converter.coerce(@model, decoded) end else end diff --git a/lib/openai/transport/base_client.rb b/lib/openai/transport/base_client.rb new file mode 100644 index 00000000..fb9321c1 --- /dev/null +++ b/lib/openai/transport/base_client.rb @@ -0,0 +1,459 @@ +# frozen_string_literal: true + +module OpenAI + module Transport + # @api private + # + # @abstract + class BaseClient + # from whatwg fetch spec + MAX_REDIRECTS = 20 + + # rubocop:disable Style/MutableConstant + PLATFORM_HEADERS = + { + "x-stainless-arch" => OpenAI::Util.arch, + "x-stainless-lang" => "ruby", + "x-stainless-os" => OpenAI::Util.os, + "x-stainless-package-version" => OpenAI::VERSION, + "x-stainless-runtime" => ::RUBY_ENGINE, + "x-stainless-runtime-version" => ::RUBY_ENGINE_VERSION + } + # rubocop:enable Style/MutableConstant + + class << self + # @api private + # + # @param req [Hash{Symbol=>Object}] + # + # @raise [ArgumentError] + def validate!(req) + keys = [:method, :path, :query, :headers, :body, :unwrap, :page, :stream, :model, :options] + case req + in Hash + req.each_key do |k| + unless keys.include?(k) + raise ArgumentError.new("Request `req` keys must be one of #{keys}, got #{k.inspect}") + end + end + else + raise ArgumentError.new("Request `req` must be a Hash or RequestOptions, got #{req.inspect}") + end + end + + # @api private + # + # @param status [Integer] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # + # @return [Boolean] + def should_retry?(status, headers:) + coerced = OpenAI::Util.coerce_boolean(headers["x-should-retry"]) + case [coerced, status] + in [true | false, _] + coerced + in [_, 408 | 409 | 429 | (500..)] + # retry on: + # 408: timeouts + # 409: locks + # 429: rate limits + # 500+: unknown errors + true + else + false + end + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @option request [Object] :body + # + # @option request [Integer] :max_retries + # + # @option request [Float] :timeout + # + # @param status [Integer] + # + # @param response_headers [Hash{String=>String}, Net::HTTPHeader] + # + # @return [Hash{Symbol=>Object}] + def follow_redirect(request, status:, response_headers:) + method, url, headers = request.fetch_values(:method, :url, :headers) + location = + Kernel.then do + URI.join(url, response_headers["location"]) + rescue ArgumentError + message = "Server responded with status #{status} but no valid location header." + raise OpenAI::APIConnectionError.new(url: url, message: message) + end + + request = {**request, url: location} + + case [url.scheme, location.scheme] + in ["https", "http"] + message = "Tried to redirect to a insecure URL" + raise OpenAI::APIConnectionError.new(url: url, message: message) + else + nil + end + + # from whatwg fetch spec + case [status, method] + in [301 | 302, :post] | [303, _] + drop = %w[content-encoding content-language content-length content-location content-type] + request = { + **request, + method: method == :head ? :head : :get, + headers: headers.except(*drop), + body: nil + } + else + end + + # from undici + if OpenAI::Util.uri_origin(url) != OpenAI::Util.uri_origin(location) + drop = %w[authorization cookie host proxy-authorization] + request = {**request, headers: request.fetch(:headers).except(*drop)} + end + + request + end + + # @api private + # + # @param status [Integer, OpenAI::APIConnectionError] + # @param stream [Enumerable, nil] + def reap_connection!(status, stream:) + case status + in (..199) | (300..499) + stream&.each { next } + in OpenAI::APIConnectionError | (500..) + OpenAI::Util.close_fused!(stream) + else + end + end + end + + # @api private + # @return [OpenAI::Transport::PooledNetRequester] + attr_accessor :requester + + # @api private + # + # @param base_url [String] + # @param timeout [Float] + # @param max_retries [Integer] + # @param initial_retry_delay [Float] + # @param max_retry_delay [Float] + # @param headers [Hash{String=>String, Integer, Array, nil}] + # @param idempotency_header [String, nil] + def initialize( + base_url:, + timeout: 0.0, + max_retries: 0, + initial_retry_delay: 0.0, + max_retry_delay: 0.0, + headers: {}, + idempotency_header: nil + ) + @requester = OpenAI::Transport::PooledNetRequester.new + @headers = OpenAI::Util.normalized_headers( + self.class::PLATFORM_HEADERS, + { + "accept" => "application/json", + "content-type" => "application/json" + }, + headers + ) + @base_url = OpenAI::Util.parse_uri(base_url) + @idempotency_header = idempotency_header&.to_s&.downcase + @max_retries = max_retries + @timeout = timeout + @initial_retry_delay = initial_retry_delay + @max_retry_delay = max_retry_delay + end + + # @api private + # + # @return [Hash{String=>String}] + private def auth_headers = {} + + # @api private + # + # @return [String] + private def generate_idempotency_key = "stainless-ruby-retry-#{SecureRandom.uuid}" + + # @api private + # + # @param req [Hash{Symbol=>Object}] . + # + # @option req [Symbol] :method + # + # @option req [String, Array] :path + # + # @option req [Hash{String=>Array, String, nil}, nil] :query + # + # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers + # + # @option req [Object, nil] :body + # + # @option req [Symbol, nil] :unwrap + # + # @option req [Class, nil] :page + # + # @option req [Class, nil] :stream + # + # @option req [OpenAI::Type::Converter, Class, nil] :model + # + # @param opts [Hash{Symbol=>Object}] . + # + # @option opts [String, nil] :idempotency_key + # + # @option opts [Hash{String=>Array, String, nil}, nil] :extra_query + # + # @option opts [Hash{String=>String, nil}, nil] :extra_headers + # + # @option opts [Object, nil] :extra_body + # + # @option opts [Integer, nil] :max_retries + # + # @option opts [Float, nil] :timeout + # + # @return [Hash{Symbol=>Object}] + private def build_request(req, opts) + method, uninterpolated_path = req.fetch_values(:method, :path) + + path = OpenAI::Util.interpolate_path(uninterpolated_path) + + query = OpenAI::Util.deep_merge(req[:query].to_h, opts[:extra_query].to_h) + + headers = OpenAI::Util.normalized_headers( + @headers, + auth_headers, + req[:headers].to_h, + opts[:extra_headers].to_h + ) + + if @idempotency_header && + !headers.key?(@idempotency_header) && + !Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) + headers[@idempotency_header] = opts.fetch(:idempotency_key) { generate_idempotency_key } + end + + unless headers.key?("x-stainless-retry-count") + headers["x-stainless-retry-count"] = "0" + end + + timeout = opts.fetch(:timeout, @timeout).to_f.clamp((0..)) + unless headers.key?("x-stainless-timeout") || timeout.zero? + headers["x-stainless-timeout"] = timeout.to_s + end + + headers.reject! { |_, v| v.to_s.empty? } + + body = + case method + in :get | :head | :options | :trace + nil + else + OpenAI::Util.deep_merge(*[req[:body], opts[:extra_body]].compact) + end + + headers, encoded = OpenAI::Util.encode_content(headers, body) + { + method: method, + url: OpenAI::Util.join_parsed_uri(@base_url, {**req, path: path, query: query}), + headers: headers, + body: encoded, + max_retries: opts.fetch(:max_retries, @max_retries), + timeout: timeout + } + end + + # @api private + # + # @param headers [Hash{String=>String}] + # @param retry_count [Integer] + # + # @return [Float] + private def retry_delay(headers, retry_count:) + # Non-standard extension + span = Float(headers["retry-after-ms"], exception: false)&.then { _1 / 1000 } + return span if span + + retry_header = headers["retry-after"] + return span if (span = Float(retry_header, exception: false)) + + span = retry_header&.then do + Time.httpdate(_1) - Time.now + rescue ArgumentError + nil + end + return span if span + + scale = retry_count**2 + jitter = 1 - (0.25 * rand) + (@initial_retry_delay * scale * jitter).clamp(0, @max_retry_delay) + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @option request [Object] :body + # + # @option request [Integer] :max_retries + # + # @option request [Float] :timeout + # + # @param redirect_count [Integer] + # + # @param retry_count [Integer] + # + # @param send_retry_header [Boolean] + # + # @raise [OpenAI::APIError] + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] + private def send_request(request, redirect_count:, retry_count:, send_retry_header:) + url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) + input = {**request.except(:timeout), deadline: OpenAI::Util.monotonic_secs + timeout} + + if send_retry_header + headers["x-stainless-retry-count"] = retry_count.to_s + end + + begin + status, response, stream = @requester.execute(input) + rescue OpenAI::APIConnectionError => e + status = e + end + + case status + in ..299 + [status, response, stream] + in 300..399 if redirect_count >= self.class::MAX_REDIRECTS + self.class.reap_connection!(status, stream: stream) + + message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." + raise OpenAI::APIConnectionError.new(url: url, message: message) + in 300..399 + self.class.reap_connection!(status, stream: stream) + + request = self.class.follow_redirect(request, status: status, response_headers: response) + send_request( + request, + redirect_count: redirect_count + 1, + retry_count: retry_count, + send_retry_header: send_retry_header + ) + in OpenAI::APIConnectionError if retry_count >= max_retries + raise status + in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response) + decoded = Kernel.then do + OpenAI::Util.decode_content(response, stream: stream, suppress_error: true) + ensure + self.class.reap_connection!(status, stream: stream) + end + + raise OpenAI::APIStatusError.for( + url: url, + status: status, + body: decoded, + request: nil, + response: response + ) + in (400..) | OpenAI::APIConnectionError + self.class.reap_connection!(status, stream: stream) + + delay = retry_delay(response, retry_count: retry_count) + sleep(delay) + + send_request( + request, + redirect_count: redirect_count, + retry_count: retry_count + 1, + send_retry_header: send_retry_header + ) + end + end + + # Execute the request specified by `req`. This is the method that all resource + # methods call into. + # + # @param req [Hash{Symbol=>Object}] . + # + # @option req [Symbol] :method + # + # @option req [String, Array] :path + # + # @option req [Hash{String=>Array, String, nil}, nil] :query + # + # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers + # + # @option req [Object, nil] :body + # + # @option req [Symbol, nil] :unwrap + # + # @option req [Class, nil] :page + # + # @option req [Class, nil] :stream + # + # @option req [OpenAI::Type::Converter, Class, nil] :model + # + # @option req [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :options + # + # @raise [OpenAI::APIError] + # @return [Object] + def request(req) + self.class.validate!(req) + model = req.fetch(:model) { OpenAI::Unknown } + opts = req[:options].to_h + OpenAI::RequestOptions.validate!(opts) + request = build_request(req.except(:options), opts) + url = request.fetch(:url) + + # Don't send the current retry count in the headers if the caller modified the header defaults. + send_retry_header = request.fetch(:headers)["x-stainless-retry-count"] == "0" + status, response, stream = send_request( + request, + redirect_count: 0, + retry_count: 0, + send_retry_header: send_retry_header + ) + + decoded = OpenAI::Util.decode_content(response, stream: stream) + case req + in { stream: Class => st } + st.new(model: model, url: url, status: status, response: response, stream: decoded) + in { page: Class => page } + page.new(client: self, req: req, headers: response, page_data: decoded) + else + unwrapped = OpenAI::Util.dig(decoded, req[:unwrap]) + OpenAI::Type::Converter.coerce(model, unwrapped) + end + end + + # @return [String] + def inspect + # rubocop:disable Layout/LineLength + base_url = OpenAI::Util.unparse_uri(@base_url) + "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" + # rubocop:enable Layout/LineLength + end + end + end +end diff --git a/lib/openai/transport/pooled_net_requester.rb b/lib/openai/transport/pooled_net_requester.rb new file mode 100644 index 00000000..c3a4260a --- /dev/null +++ b/lib/openai/transport/pooled_net_requester.rb @@ -0,0 +1,182 @@ +# frozen_string_literal: true + +module OpenAI + module Transport + # @api private + class PooledNetRequester + # from the golang stdlib + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + KEEP_ALIVE_TIMEOUT = 30 + + class << self + # @api private + # + # @param url [URI::Generic] + # + # @return [Net::HTTP] + def connect(url) + port = + case [url.port, url.scheme] + in [Integer, _] + url.port + in [nil, "http" | "ws"] + Net::HTTP.http_default_port + in [nil, "https" | "wss"] + Net::HTTP.https_default_port + end + + Net::HTTP.new(url.host, port).tap do + _1.use_ssl = %w[https wss].include?(url.scheme) + _1.max_retries = 0 + end + end + + # @api private + # + # @param conn [Net::HTTP] + # @param deadline [Float] + def calibrate_socket_timeout(conn, deadline) + timeout = deadline - OpenAI::Util.monotonic_secs + conn.open_timeout = conn.read_timeout = conn.write_timeout = conn.continue_timeout = timeout + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @param blk [Proc] + # + # @yieldparam [String] + # @return [Net::HTTPGenericRequest] + def build_request(request, &) + method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) + req = Net::HTTPGenericRequest.new( + method.to_s.upcase, + !body.nil?, + method != :head, + url.to_s + ) + + headers.each { req[_1] = _2 } + + case body + in nil + nil + in String + req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + in StringIO + req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + in IO | Enumerator + req["transfer-encoding"] ||= "chunked" unless req["content-length"] + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + end + + req + end + end + + # @api private + # + # @param url [URI::Generic] + # @param deadline [Float] + # @param blk [Proc] + # + # @raise [Timeout::Error] + # @yieldparam [Net::HTTP] + private def with_pool(url, deadline:, &blk) + origin = OpenAI::Util.uri_origin(url) + timeout = deadline - OpenAI::Util.monotonic_secs + pool = + @mutex.synchronize do + @pools[origin] ||= ConnectionPool.new(size: @size) do + self.class.connect(url) + end + end + + pool.with(timeout: timeout, &blk) + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @option request [Object] :body + # + # @option request [Float] :deadline + # + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] + def execute(request) + url, deadline = request.fetch_values(:url, :deadline) + + eof = false + finished = false + enum = Enumerator.new do |y| + with_pool(url, deadline: deadline) do |conn| + next if finished + + req = self.class.build_request(request) do + self.class.calibrate_socket_timeout(conn, deadline) + end + + self.class.calibrate_socket_timeout(conn, deadline) + unless conn.started? + conn.keep_alive_timeout = self.class::KEEP_ALIVE_TIMEOUT + conn.start + end + + self.class.calibrate_socket_timeout(conn, deadline) + conn.request(req) do |rsp| + y << [conn, req, rsp] + break if finished + + rsp.read_body do |bytes| + y << bytes + break if finished + + self.class.calibrate_socket_timeout(conn, deadline) + end + eof = true + end + end + rescue Timeout::Error + raise OpenAI::APITimeoutError + end + + conn, _, response = enum.next + body = OpenAI::Util.fused_enum(enum, external: true) do + finished = true + tap do + enum.next + rescue StopIteration + nil + end + conn.finish if !eof && conn&.started? + end + [Integer(response.code), response, (response.body = body)] + end + + # @api private + # + # @param size [Integer] + def initialize(size: Etc.nprocessors) + @mutex = Mutex.new + @size = size + @pools = {} + end + end + end +end diff --git a/lib/openai/type.rb b/lib/openai/type.rb new file mode 100644 index 00000000..f4b8345c --- /dev/null +++ b/lib/openai/type.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module OpenAI + Unknown = OpenAI::Type::Unknown + + BooleanModel = OpenAI::Type::BooleanModel + + Enum = OpenAI::Type::Enum + + Union = OpenAI::Type::Union + + ArrayOf = OpenAI::Type::ArrayOf + + HashOf = OpenAI::Type::HashOf + + BaseModel = OpenAI::Type::BaseModel + + RequestParameters = OpenAI::Type::RequestParameters + + # This module contains various type declarations. + module Type + end +end diff --git a/lib/openai/type/array_of.rb b/lib/openai/type/array_of.rb new file mode 100644 index 00000000..0e6f3227 --- /dev/null +++ b/lib/openai/type/array_of.rb @@ -0,0 +1,110 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + # + # @abstract + # + # Array of items of a given type. + class ArrayOf + include OpenAI::Type::Converter + + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def self.[](type_info, spec = {}) = new(type_info, spec) + + # @param other [Object] + # + # @return [Boolean] + def ===(other) = other.is_a?(Array) && other.all?(item_type) + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type + + # @api private + # + # @param value [Enumerable, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Array, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + + unless value.is_a?(Array) + exactness[:no] += 1 + return value + end + + target = item_type + exactness[:yes] += 1 + value + .map do |item| + case [nilable?, item] + in [true, nil] + exactness[:yes] += 1 + nil + else + OpenAI::Type::Converter.coerce(target, item, state: state) + end + end + end + + # @api private + # + # @param value [Enumerable, Object] + # + # @return [Array, Object] + def dump(value) + target = item_type + value.is_a?(Array) ? value.map { OpenAI::Type::Converter.dump(target, _1) } : super + end + + # @api private + # + # @return [OpenAI::Type::Converter, Class] + protected def item_type = @item_type_fn.call + + # @api private + # + # @return [Boolean] + protected def nilable? = @nilable + + # @api private + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def initialize(type_info, spec = {}) + @item_type_fn = OpenAI::Type::Converter.type_info(type_info || spec) + @nilable = spec[:nil?] + end + end + end +end diff --git a/lib/openai/type/base_model.rb b/lib/openai/type/base_model.rb new file mode 100644 index 00000000..6bbed764 --- /dev/null +++ b/lib/openai/type/base_model.rb @@ -0,0 +1,355 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @abstract + # + # @example + # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` + # comparison_filter => { + # key: key, + # type: type, + # value: value + # } + class BaseModel + extend OpenAI::Type::Converter + + class << self + # @api private + # + # Assumes superclass fields are totally defined before fields are accessed / + # defined on subclasses. + # + # @return [Hash{Symbol=>Hash{Symbol=>Object}}] + def known_fields + @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) + end + + # @api private + # + # @return [Hash{Symbol=>Hash{Symbol=>Object}}] + def fields + known_fields.transform_values do |field| + {**field.except(:type_fn), type: field.fetch(:type_fn).call} + end + end + + # @api private + # + # @param name_sym [Symbol] + # + # @param required [Boolean] + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def add_field(name_sym, required:, type_info:, spec:) + type_fn, info = + case type_info + in Proc | OpenAI::Type::Converter | Class + [OpenAI::Type::Converter.type_info({**spec, union: type_info}), spec] + in Hash + [OpenAI::Type::Converter.type_info(type_info), type_info] + end + + setter = "#{name_sym}=" + api_name = info.fetch(:api_name, name_sym) + nilable = info[:nil?] + const = required && !nilable ? info.fetch(:const, OpenAI::Util::OMIT) : OpenAI::Util::OMIT + + [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) + + known_fields[name_sym] = + { + mode: @mode, + api_name: api_name, + required: required, + nilable: nilable, + const: const, + type_fn: type_fn + } + + define_method(setter) { @data.store(name_sym, _1) } + + define_method(name_sym) do + target = type_fn.call + value = @data.fetch(name_sym) { const == OpenAI::Util::OMIT ? nil : const } + state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + if (nilable || !required) && value.nil? + nil + else + OpenAI::Type::Converter.coerce( + target, + value, + state: state + ) + end + rescue StandardError + cls = self.class.name.split("::").last + message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." + raise OpenAI::ConversionError.new(message) + end + end + + # @api private + # + # @param name_sym [Symbol] + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def required(name_sym, type_info, spec = {}) + add_field(name_sym, required: true, type_info: type_info, spec: spec) + end + + # @api private + # + # @param name_sym [Symbol] + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def optional(name_sym, type_info, spec = {}) + add_field(name_sym, required: false, type_info: type_info, spec: spec) + end + + # @api private + # + # `request_only` attributes not excluded from `.#coerce` when receiving responses + # even if well behaved servers should not send them + # + # @param blk [Proc] + private def request_only(&blk) + @mode = :dump + blk.call + ensure + @mode = nil + end + + # @api private + # + # `response_only` attributes are omitted from `.#dump` when making requests + # + # @param blk [Proc] + private def response_only(&blk) + @mode = :coerce + blk.call + ensure + @mode = nil + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = other.is_a?(Class) && other <= OpenAI::BaseModel && other.fields == fields + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = self.class == other.class && @data == other.to_h + + class << self + # @api private + # + # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [OpenAI::BaseModel, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + + if value.is_a?(self.class) + exactness[:yes] += 1 + return value + end + + unless (val = OpenAI::Util.coerce_hash(value)).is_a?(Hash) + exactness[:no] += 1 + return value + end + exactness[:yes] += 1 + + keys = val.keys.to_set + instance = new + data = instance.to_h + + fields.each do |name, field| + mode, required, target = field.fetch_values(:mode, :required, :type) + api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) + + unless val.key?(api_name) + if const != OpenAI::Util::OMIT + exactness[:yes] += 1 + elsif required && mode != :dump + exactness[nilable ? :maybe : :no] += 1 + else + exactness[:yes] += 1 + end + next + end + + item = val.fetch(api_name) + keys.delete(api_name) + + converted = + if item.nil? && (nilable || !required) + exactness[nilable ? :yes : :maybe] += 1 + nil + else + coerced = OpenAI::Type::Converter.coerce(target, item, state: state) + case target + in OpenAI::Type::Converter | Symbol + coerced + else + item + end + end + data.store(name, converted) + end + + keys.each { data.store(_1, val.fetch(_1)) } + instance + end + + # @api private + # + # @param value [OpenAI::BaseModel, Object] + # + # @return [Hash{Object=>Object}, Object] + def dump(value) + unless (coerced = OpenAI::Util.coerce_hash(value)).is_a?(Hash) + return super + end + + acc = {} + + coerced.each do |key, val| + name = key.is_a?(String) ? key.to_sym : key + case (field = known_fields[name]) + in nil + acc.store(name, super(val)) + else + mode, api_name, type_fn = field.fetch_values(:mode, :api_name, :type_fn) + case mode + in :coerce + next + else + target = type_fn.call + acc.store(api_name, OpenAI::Type::Converter.dump(target, val)) + end + end + end + + known_fields.each_value do |field| + mode, api_name, const = field.fetch_values(:mode, :api_name, :const) + next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Util::OMIT + acc.store(api_name, const) + end + + acc + end + end + + # Returns the raw value associated with the given key, if found. Otherwise, nil is + # returned. + # + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. + # + # @param key [Symbol] + # + # @return [Object, nil] + def [](key) + unless key.instance_of?(Symbol) + raise ArgumentError.new("Expected symbol key for lookup, got #{key.inspect}") + end + + @data[key] + end + + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + # + # @return [Hash{Symbol=>Object}] + def to_h = @data + + alias_method :to_hash, :to_h + + # @param keys [Array, nil] + # + # @return [Hash{Symbol=>Object}] + def deconstruct_keys(keys) + (keys || self.class.known_fields.keys) + .filter_map do |k| + unless self.class.known_fields.key?(k) + next + end + + [k, public_send(k)] + end + .to_h + end + + # Create a new instance of a model. + # + # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] + def initialize(data = {}) + case OpenAI::Util.coerce_hash(data) + in Hash => coerced + @data = coerced + else + raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") + end + end + + # @return [String] + def inspect + rows = self.class.known_fields.keys.map do + "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" + rescue OpenAI::ConversionError + "#{_1}=#{@data.fetch(_1)}" + end + "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" + end + end + end +end diff --git a/lib/openai/type/base_page.rb b/lib/openai/type/base_page.rb new file mode 100644 index 00000000..33648d3c --- /dev/null +++ b/lib/openai/type/base_page.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @example + # if page.has_next? + # page = page.next_page + # end + # + # @example + # page.auto_paging_each do |completion| + # puts(completion) + # end + # + # @example + # completions = + # page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # completions => Array + module BasePage + # rubocop:disable Lint/UnusedMethodArgument + + # @return [Boolean] + def next_page? = (raise NotImplementedError) + + # @raise [OpenAI::APIError] + # @return [OpenAI::Type::BasePage] + def next_page = (raise NotImplementedError) + + # @param blk [Proc] + # + # @return [void] + def auto_paging_each(&) = (raise NotImplementedError) + + # @return [Enumerable] + def to_enum = super(:auto_paging_each) + + alias_method :enum_for, :to_enum + + # @api private + # + # @param client [OpenAI::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Object] + def initialize(client:, req:, headers:, page_data:) + @client = client + @req = req + super() + end + + # rubocop:enable Lint/UnusedMethodArgument + end + end +end diff --git a/lib/openai/type/base_stream.rb b/lib/openai/type/base_stream.rb new file mode 100644 index 00000000..4d62568e --- /dev/null +++ b/lib/openai/type/base_stream.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @example + # stream.each do |chunk| + # puts(chunk) + # end + # + # @example + # chunks = + # stream + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # chunks => Array + module BaseStream + include Enumerable + + # @return [void] + def close = OpenAI::Util.close_fused!(@iterator) + + # @api private + # + # @return [Enumerable] + private def iterator = (raise NotImplementedError) + + # @param blk [Proc] + # + # @return [void] + def each(&) + unless block_given? + raise ArgumentError.new("A block must be given to ##{__method__}") + end + @iterator.each(&) + end + + # @return [Enumerator] + def to_enum = @iterator + + alias_method :enum_for, :to_enum + + # @api private + # + # @param model [Class, OpenAI::Type::Converter] + # @param url [URI::Generic] + # @param status [Integer] + # @param response [Net::HTTPResponse] + # @param stream [Enumerable] + def initialize(model:, url:, status:, response:, stream:) + @model = model + @url = url + @status = status + @response = response + @stream = stream + @iterator = iterator + end + end + end +end diff --git a/lib/openai/type/boolean_model.rb b/lib/openai/type/boolean_model.rb new file mode 100644 index 00000000..9ee84edd --- /dev/null +++ b/lib/openai/type/boolean_model.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + # + # @abstract + # + # Ruby has no Boolean class; this is something for models to refer to. + class BooleanModel + extend OpenAI::Type::Converter + + # @param other [Object] + # + # @return [Boolean] + def self.===(other) = other == true || other == false + + # @param other [Object] + # + # @return [Boolean] + def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel + + class << self + # @api private + # + # @param value [Boolean, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Boolean, Object] + def coerce(value, state:) + state.fetch(:exactness)[value == true || value == false ? :yes : :no] += 1 + value + end + + # @!parse + # # @api private + # # + # # @param value [Boolean, Object] + # # + # # @return [Boolean, Object] + # def dump(value) = super + end + end + end +end diff --git a/lib/openai/type/converter.rb b/lib/openai/type/converter.rb new file mode 100644 index 00000000..ffb349aa --- /dev/null +++ b/lib/openai/type/converter.rb @@ -0,0 +1,211 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + module Converter + # rubocop:disable Lint/UnusedMethodArgument + + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) = (raise NotImplementedError) + + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + case value + in Array + value.map { OpenAI::Unknown.dump(_1) } + in Hash + value.transform_values { OpenAI::Unknown.dump(_1) } + in OpenAI::BaseModel + value.class.dump(value) + else + value + end + end + + # rubocop:enable Lint/UnusedMethodArgument + + class << self + # @api private + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + # + # @return [Proc] + def type_info(spec) + case spec + in Proc + spec + in Hash + type_info(spec.slice(:const, :enum, :union).first&.last) + in true | false + -> { OpenAI::BooleanModel } + in OpenAI::Type::Converter | Class | Symbol + -> { spec } + in NilClass | Integer | Float + -> { spec.class } + end + end + + # @api private + # + # Based on `target`, transform `value` into `target`, to the extent possible: + # + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered + # + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + # + # @param target [OpenAI::Type::Converter, Class] + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the + # coercion strategy when we have to decide between multiple possible conversion + # targets: + # + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. + # + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: + # + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. + # + # See implementation below for more details. + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce( + target, + value, + state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + ) + strictness, exactness = state.fetch_values(:strictness, :exactness) + + case target + in OpenAI::Type::Converter + return target.coerce(value, state: state) + in Class + if value.is_a?(target) + exactness[:yes] += 1 + return value + end + + case target + in -> { _1 <= NilClass } + exactness[value.nil? ? :yes : :maybe] += 1 + return nil + in -> { _1 <= Integer } + if value.is_a?(Integer) + exactness[:yes] += 1 + return value + elsif strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + else + Kernel.then do + return Integer(value).tap { exactness[:maybe] += 1 } + rescue ArgumentError, TypeError + end + end + in -> { _1 <= Float } + if value.is_a?(Numeric) + exactness[:yes] += 1 + return Float(value) + elsif strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + else + Kernel.then do + return Float(value).tap { exactness[:maybe] += 1 } + rescue ArgumentError, TypeError + end + end + in -> { _1 <= String } + case value + in String | Symbol | Numeric + exactness[value.is_a?(Numeric) ? :maybe : :yes] += 1 + return value.to_s + else + if strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + end + end + in -> { _1 <= Date || _1 <= Time } + Kernel.then do + return target.parse(value).tap { exactness[:yes] += 1 } + rescue ArgumentError, TypeError => e + raise e if strictness == :strong + end + in -> { _1 <= IO } if value.is_a?(String) + exactness[:yes] += 1 + return StringIO.new(value.b) + else + end + in Symbol + if (value.is_a?(Symbol) || value.is_a?(String)) && value.to_sym == target + exactness[:yes] += 1 + return target + elsif strictness == :strong + message = "cannot convert non-matching #{value.class} into #{target.inspect}" + raise ArgumentError.new(message) + end + else + end + + exactness[:no] += 1 + value + end + + # @api private + # + # @param target [OpenAI::Type::Converter, Class] + # @param value [Object] + # + # @return [Object] + def dump(target, value) + target.is_a?(OpenAI::Type::Converter) ? target.dump(value) : OpenAI::Unknown.dump(value) + end + end + end + end +end diff --git a/lib/openai/type/enum.rb b/lib/openai/type/enum.rb new file mode 100644 index 00000000..2518da97 --- /dev/null +++ b/lib/openai/type/enum.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + # + # A value from among a specified list of options. OpenAPI enum values map to Ruby + # values in the SDK as follows: + # + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol + # + # We can therefore convert string values to Symbols, but can't convert other + # values safely. + # + # @example + # # `chat_model` is a `OpenAI::Models::ChatModel` + # case chat_model + # when OpenAI::Models::ChatModel::O3_MINI + # # ... + # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 + # # ... + # when OpenAI::Models::ChatModel::O1 + # # ... + # else + # puts(chat_model) + # end + # + # @example + # case chat_model + # in :"o3-mini" + # # ... + # in :"o3-mini-2025-01-31" + # # ... + # in :o1 + # # ... + # else + # puts(chat_model) + # end + module Enum + include OpenAI::Type::Converter + + # All of the valid Symbol values for this enum. + # + # @return [Array] + def values = (@values ||= constants.map { const_get(_1) }) + + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + private def finalize! = values + + # @param other [Object] + # + # @return [Boolean] + def ===(other) = values.include?(other) + + # @param other [Object] + # + # @return [Boolean] + def ==(other) + other.is_a?(Module) && other.singleton_class <= OpenAI::Enum && other.values.to_set == values.to_set + end + + # @api private + # + # Unlike with primitives, `Enum` additionally validates that the value is a member + # of the enum. + # + # @param value [String, Symbol, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Symbol, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + val = value.is_a?(String) ? value.to_sym : value + + if values.include?(val) + exactness[:yes] += 1 + val + else + exactness[values.first&.class == val.class ? :maybe : :no] += 1 + value + end + end + + # @!parse + # # @api private + # # + # # @param value [Symbol, Object] + # # + # # @return [Symbol, Object] + # def dump(value) = super + end + end +end diff --git a/lib/openai/type/hash_of.rb b/lib/openai/type/hash_of.rb new file mode 100644 index 00000000..cc066188 --- /dev/null +++ b/lib/openai/type/hash_of.rb @@ -0,0 +1,130 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + # + # @abstract + # + # Hash of items of a given type. + class HashOf + include OpenAI::Type::Converter + + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def self.[](type_info, spec = {}) = new(type_info, spec) + + # @param other [Object] + # + # @return [Boolean] + def ===(other) + type = item_type + case other + in Hash + other.all? do |key, val| + case [key, val] + in [Symbol | String, ^type] + true + else + false + end + end + else + false + end + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type + + # @api private + # + # @param value [Hash{Object=>Object}, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Hash{Symbol=>Object}, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + + unless value.is_a?(Hash) + exactness[:no] += 1 + return value + end + + target = item_type + exactness[:yes] += 1 + value + .to_h do |key, val| + k = key.is_a?(String) ? key.to_sym : key + v = + case [nilable?, val] + in [true, nil] + exactness[:yes] += 1 + nil + else + OpenAI::Type::Converter.coerce(target, val, state: state) + end + + exactness[:no] += 1 unless k.is_a?(Symbol) + [k, v] + end + end + + # @api private + # + # @param value [Hash{Object=>Object}, Object] + # + # @return [Hash{Symbol=>Object}, Object] + def dump(value) + target = item_type + value.is_a?(Hash) ? value.transform_values { OpenAI::Type::Converter.dump(target, _1) } : super + end + + # @api private + # + # @return [OpenAI::Type::Converter, Class] + protected def item_type = @item_type_fn.call + + # @api private + # + # @return [Boolean] + protected def nilable? = @nilable + + # @api private + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def initialize(type_info, spec = {}) + @item_type_fn = OpenAI::Type::Converter.type_info(type_info || spec) + @nilable = spec[:nil?] + end + end + end +end diff --git a/lib/openai/type/request_parameters.rb b/lib/openai/type/request_parameters.rb new file mode 100644 index 00000000..3d4e267e --- /dev/null +++ b/lib/openai/type/request_parameters.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + module RequestParameters + # @!parse + # # Options to specify HTTP behaviour for this request. + # # @return [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # attr_accessor :request_options + + # @param mod [Module] + def self.included(mod) + return unless mod <= OpenAI::BaseModel + + mod.extend(OpenAI::Type::RequestParameters::Converter) + mod.optional(:request_options, OpenAI::RequestOptions) + end + + # @api private + module Converter + # @api private + # + # @param params [Object] + # + # @return [Array(Object, Hash{Symbol=>Object})] + def dump_request(params) + case (dumped = dump(params)) + in Hash + [dumped.except(:request_options), dumped[:request_options]] + else + [dumped, nil] + end + end + end + end + end +end diff --git a/lib/openai/type/union.rb b/lib/openai/type/union.rb new file mode 100644 index 00000000..c96ad89d --- /dev/null +++ b/lib/openai/type/union.rb @@ -0,0 +1,208 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + # + # @example + # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` + # case chat_completion_content_part + # when OpenAI::Models::Chat::ChatCompletionContentPartText + # puts(chat_completion_content_part.text) + # when OpenAI::Models::Chat::ChatCompletionContentPartImage + # puts(chat_completion_content_part.image_url) + # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio + # puts(chat_completion_content_part.input_audio) + # else + # puts(chat_completion_content_part) + # end + # + # @example + # case chat_completion_content_part + # in {type: :text, text: text} + # puts(text) + # in {type: :image_url, image_url: image_url} + # puts(image_url) + # in {type: :input_audio, input_audio: input_audio} + # puts(input_audio) + # else + # puts(chat_completion_content_part) + # end + module Union + include OpenAI::Type::Converter + + # @api private + # + # All of the specified variant info for this union. + # + # @return [Array] + private def known_variants = (@known_variants ||= []) + + # @api private + # + # @return [Array] + protected def derefed_variants + @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + end + + # All of the specified variants for this union. + # + # @return [Array] + def variants = derefed_variants.map(&:last) + + # @api private + # + # @param property [Symbol] + private def discriminator(property) + case property + in Symbol + @discriminator = property + end + end + + # @api private + # + # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def variant(key, spec = nil) + variant_info = + case key + in Symbol + [key, OpenAI::Type::Converter.type_info(spec)] + in Proc | OpenAI::Type::Converter | Class | Hash + [nil, OpenAI::Type::Converter.type_info(key)] + end + + known_variants << variant_info + end + + # @api private + # + # @param value [Object] + # + # @return [OpenAI::Type::Converter, Class, nil] + private def resolve_variant(value) + case [@discriminator, value] + in [_, OpenAI::BaseModel] + value.class + in [Symbol, Hash] + key = value.fetch(@discriminator) do + value.fetch(@discriminator.to_s, OpenAI::Util::OMIT) + end + + return nil if key == OpenAI::Util::OMIT + + key = key.to_sym if key.is_a?(String) + known_variants.find { |k,| k == key }&.last&.call + else + nil + end + end + + # rubocop:disable Style/HashEachMethods + # rubocop:disable Style/CaseEquality + + # @param other [Object] + # + # @return [Boolean] + def ===(other) + known_variants.any? do |_, variant_fn| + variant_fn.call === other + end + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) + other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants + end + + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) + if (target = resolve_variant(value)) + return OpenAI::Type::Converter.coerce(target, value, state: state) + end + + strictness = state.fetch(:strictness) + exactness = state.fetch(:exactness) + state[:strictness] = strictness == :strong ? true : strictness + + alternatives = [] + known_variants.each do |_, variant_fn| + target = variant_fn.call + exact = state[:exactness] = {yes: 0, no: 0, maybe: 0} + state[:branched] += 1 + + coerced = OpenAI::Type::Converter.coerce(target, value, state: state) + yes, no, maybe = exact.values + if (no + maybe).zero? || (!strictness && yes.positive?) + exact.each { exactness[_1] += _2 } + state[:exactness] = exactness + return coerced + elsif maybe.positive? + alternatives << [[-yes, -maybe, no], exact, coerced] + end + end + + case alternatives.sort_by(&:first) + in [] + exactness[:no] += 1 + if strictness == :strong + message = "no possible conversion of #{value.class} into a variant of #{target.inspect}" + raise ArgumentError.new(message) + end + value + in [[_, exact, coerced], *] + exact.each { exactness[_1] += _2 } + coerced + end + .tap { state[:exactness] = exactness } + ensure + state[:strictness] = strictness + end + + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + if (target = resolve_variant(value)) + return OpenAI::Type::Converter.dump(target, value) + end + + known_variants.each do + target = _2.call + return OpenAI::Type::Converter.dump(target, value) if target === value + end + + super + end + + # rubocop:enable Style/CaseEquality + # rubocop:enable Style/HashEachMethods + end + end +end diff --git a/lib/openai/type/unknown.rb b/lib/openai/type/unknown.rb new file mode 100644 index 00000000..7cb2567d --- /dev/null +++ b/lib/openai/type/unknown.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +module OpenAI + module Type + # @api private + # + # @abstract + # + # When we don't know what to expect for the value. + class Unknown + extend OpenAI::Type::Converter + + # rubocop:disable Lint/UnusedMethodArgument + + # @param other [Object] + # + # @return [Boolean] + def self.===(other) = true + + # @param other [Object] + # + # @return [Boolean] + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown + + class << self + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) + state.fetch(:exactness)[:yes] += 1 + value + end + + # @!parse + # # @api private + # # + # # @param value [Object] + # # + # # @return [Object] + # def dump(value) = super + end + + # rubocop:enable Lint/UnusedMethodArgument + end + end +end diff --git a/rbi/lib/openai/base_client.rbi b/rbi/lib/openai/base_client.rbi deleted file mode 100644 index 580d6b5b..00000000 --- a/rbi/lib/openai/base_client.rbi +++ /dev/null @@ -1,196 +0,0 @@ -# typed: strong - -module OpenAI - # @api private - class BaseClient - abstract! - - RequestComponentsShape = - T.type_alias do - { - method: Symbol, - path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), - body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), - model: T.nilable(OpenAI::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) - } - end - - RequestInputShape = - T.type_alias do - { - method: Symbol, - url: URI::Generic, - headers: T::Hash[String, String], - body: T.anything, - max_retries: Integer, - timeout: Float - } - end - - # from whatwg fetch spec - MAX_REDIRECTS = 20 - - PLATFORM_HEADERS = T::Hash[String, String] - - class << self - # @api private - sig { params(req: OpenAI::BaseClient::RequestComponentsShape).void } - def validate!(req) - end - - # @api private - sig do - params(status: Integer, headers: T.any(T::Hash[String, String], Net::HTTPHeader)).returns(T::Boolean) - end - def should_retry?(status, headers:) - end - - # @api private - sig do - params( - request: OpenAI::BaseClient::RequestInputShape, - status: Integer, - response_headers: T.any(T::Hash[String, String], Net::HTTPHeader) - ) - .returns(OpenAI::BaseClient::RequestInputShape) - end - def follow_redirect(request, status:, response_headers:) - end - - # @api private - sig do - params( - status: T.any(Integer, OpenAI::APIConnectionError), - stream: T.nilable(T::Enumerable[String]) - ).void - end - def reap_connection!(status, stream:) - end - end - - # @api private - sig { returns(OpenAI::PooledNetRequester) } - attr_accessor :requester - - # @api private - sig do - params( - base_url: String, - timeout: Float, - max_retries: Integer, - initial_retry_delay: Float, - max_retry_delay: Float, - headers: T::Hash[String, - T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))], - idempotency_header: T.nilable(String) - ) - .returns(T.attached_class) - end - def self.new( - base_url:, - timeout: 0.0, - max_retries: 0, - initial_retry_delay: 0.0, - max_retry_delay: 0.0, - headers: {}, - idempotency_header: nil - ) - end - - # @api private - sig { overridable.returns(T::Hash[String, String]) } - private def auth_headers - end - - # @api private - sig { returns(String) } - private def generate_idempotency_key - end - - # @api private - sig do - overridable - .params(req: OpenAI::BaseClient::RequestComponentsShape, opts: OpenAI::Util::AnyHash) - .returns(OpenAI::BaseClient::RequestInputShape) - end - private def build_request(req, opts) - end - - # @api private - sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } - private def retry_delay(headers, retry_count:) - end - - # @api private - sig do - params( - request: OpenAI::BaseClient::RequestInputShape, - redirect_count: Integer, - retry_count: Integer, - send_retry_header: T::Boolean - ) - .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) - end - private def send_request(request, redirect_count:, retry_count:, send_retry_header:) - end - - # Execute the request specified by `req`. This is the method that all resource - # methods call into. - sig do - params( - method: Symbol, - path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), - body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::BaseStream[T.anything, OpenAI::BaseModel]]), - model: T.nilable(OpenAI::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) - ) - .returns(T.anything) - end - def request( - method, - path, - query: {}, - headers: {}, - body: nil, - unwrap: nil, - page: nil, - stream: nil, - model: OpenAI::Unknown, - options: {} - ) - end - - sig { returns(String) } - def inspect - end - end -end diff --git a/rbi/lib/openai/base_model.rbi b/rbi/lib/openai/base_model.rbi deleted file mode 100644 index 412a31e7..00000000 --- a/rbi/lib/openai/base_model.rbi +++ /dev/null @@ -1,604 +0,0 @@ -# typed: strong - -module OpenAI - # @api private - module Converter - Input = T.type_alias { T.any(OpenAI::Converter, T::Class[T.anything]) } - - State = - T.type_alias do - { - strictness: T.any(T::Boolean, Symbol), - exactness: {yes: Integer, no: Integer, maybe: Integer}, - branched: Integer - } - end - - # @api private - sig { overridable.params(value: T.anything, state: OpenAI::Converter::State).returns(T.anything) } - def coerce(value, state:) - end - - # @api private - sig { overridable.params(value: T.anything).returns(T.anything) } - def dump(value) - end - - class << self - # @api private - sig do - params( - spec: T.any( - { - const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), - enum: T.nilable(T.proc.returns(OpenAI::Converter::Input)), - union: T.nilable(T.proc.returns(OpenAI::Converter::Input)) - }, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ) - ) - .returns(T.proc.returns(T.anything)) - end - def self.type_info(spec) - end - - # @api private - # - # Based on `target`, transform `value` into `target`, to the extent possible: - # - # 1. if the given `value` conforms to `target` already, return the given `value` - # 2. if it's possible and safe to convert the given `value` to `target`, then the - # converted value - # 3. otherwise, the given `value` unaltered - # - # The coercion process is subject to improvement between minor release versions. - # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode - sig do - params(target: OpenAI::Converter::Input, value: T.anything, state: OpenAI::Converter::State) - .returns(T.anything) - end - def self.coerce( - target, - value, - # The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: - # - # - `true`: the conversion must be exact, with minimum coercion. - # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. - # - # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For - # any given conversion attempt, the exactness will be updated based on how closely - # the value recursively matches the target type: - # - # - `yes`: the value can be converted to the target type with minimum coercion. - # - `maybe`: the value can be converted to the target type with some reasonable - # coercion. - # - `no`: the value cannot be converted to the target type. - # - # See implementation below for more details. - state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - ) - end - - # @api private - sig { params(target: OpenAI::Converter::Input, value: T.anything).returns(T.anything) } - def self.dump(target, value) - end - end - end - - # @api private - # - # When we don't know what to expect for the value. - class Unknown - extend OpenAI::Converter - - abstract! - final! - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.==(other) - end - - class << self - # @api private - sig(:final) { override.params(value: T.anything, state: OpenAI::Converter::State).returns(T.anything) } - def coerce(value, state:) - end - - # @api private - sig(:final) { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end - end - end - - # @api private - # - # Ruby has no Boolean class; this is something for models to refer to. - class BooleanModel - extend OpenAI::Converter - - abstract! - final! - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.==(other) - end - - class << self - # @api private - sig(:final) do - override - .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Converter::State) - .returns(T.any(T::Boolean, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig(:final) do - override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) - end - def dump(value) - end - end - end - - # @api private - # - # A value from among a specified list of options. OpenAPI enum values map to Ruby - # values in the SDK as follows: - # - # 1. boolean => true | false - # 2. integer => Integer - # 3. float => Float - # 4. string => Symbol - # - # We can therefore convert string values to Symbols, but can't convert other - # values safely. - module Enum - include OpenAI::Converter - - # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } - def values - end - - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize! - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - # - # Unlike with primitives, `Enum` additionally validates that the value is a member - # of the enum. - sig do - override - .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Converter::State) - .returns(T.any(Symbol, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value) - end - end - - # @api private - module Union - include OpenAI::Converter - - # @api private - # - # All of the specified variant info for this union. - sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Converter::Input)]]) } - private def known_variants - end - - # @api private - sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } - protected def derefed_variants - end - - # All of the specified variants for this union. - sig { overridable.returns(T::Array[T.anything]) } - def variants - end - - # @api private - sig { params(property: Symbol).void } - private def discriminator(property) - end - - # @api private - sig do - params( - key: T.any(Symbol, OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything), - spec: T.any(OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything) - ) - .void - end - private def variant(key, spec = nil) - end - - # @api private - sig { params(value: T.anything).returns(T.nilable(T.anything)) } - private def resolve_variant(value) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - sig { override.params(value: T.anything, state: OpenAI::Converter::State).returns(T.anything) } - def coerce(value, state:) - end - - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end - end - - # @api private - # - # Array of items of a given type. - class ArrayOf - include OpenAI::Converter - - abstract! - final! - - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .returns(T.attached_class) - end - def self.[](type_info, spec = {}) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Enumerable[T.anything], T.anything), state: OpenAI::Converter::State) - .returns(T.any(T::Array[T.anything], T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Enumerable[T.anything], T.anything)) - .returns(T.any(T::Array[T.anything], T.anything)) - end - def dump(value) - end - - # @api private - sig(:final) { returns(T.anything) } - protected def item_type - end - - # @api private - sig(:final) { returns(T::Boolean) } - protected def nilable? - end - - # @api private - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def initialize(type_info, spec = {}) - end - end - - # @api private - # - # Hash of items of a given type. - class HashOf - include OpenAI::Converter - - abstract! - final! - - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .returns(T.attached_class) - end - def self.[](type_info, spec = {}) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Hash[T.anything, T.anything], T.anything), state: OpenAI::Converter::State) - .returns(T.any(OpenAI::Util::AnyHash, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) - .returns(T.any(OpenAI::Util::AnyHash, T.anything)) - end - def dump(value) - end - - # @api private - sig(:final) { returns(T.anything) } - protected def item_type - end - - # @api private - sig(:final) { returns(T::Boolean) } - protected def nilable? - end - - # @api private - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def initialize(type_info, spec = {}) - end - end - - class BaseModel - extend OpenAI::Converter - - abstract! - - KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} } - - class << self - # @api private - # - # Assumes superclass fields are totally defined before fields are accessed / - # defined on subclasses. - sig do - returns( - T::Hash[Symbol, - T.all( - OpenAI::BaseModel::KnownFieldShape, - {type_fn: T.proc.returns(OpenAI::Converter::Input)} - )] - ) - end - def known_fields - end - - # @api private - sig do - returns(T::Hash[Symbol, T.all(OpenAI::BaseModel::KnownFieldShape, {type: OpenAI::Converter::Input})]) - end - def fields - end - - # @api private - sig do - params( - name_sym: Symbol, - required: T::Boolean, - type_info: T.any( - { - const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), - enum: T.nilable(T.proc.returns(OpenAI::Converter::Input)), - union: T.nilable(T.proc.returns(OpenAI::Converter::Input)), - api_name: Symbol, - nil?: T::Boolean - }, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - private def add_field(name_sym, required:, type_info:, spec:) - end - - # @api private - sig do - params( - name_sym: Symbol, - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def required(name_sym, type_info, spec = {}) - end - - # @api private - sig do - params( - name_sym: Symbol, - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Converter::Input), - OpenAI::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def optional(name_sym, type_info, spec = {}) - end - - # @api private - # - # `request_only` attributes not excluded from `.#coerce` when receiving responses - # even if well behaved servers should not send them - sig { params(blk: T.proc.void).void } - private def request_only(&blk) - end - - # @api private - # - # `response_only` attributes are omitted from `.#dump` when making requests - sig { params(blk: T.proc.void).void } - private def response_only(&blk) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - class << self - # @api private - sig do - override - .params( - value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything), - state: OpenAI::Converter::State - ) - .returns(T.any(T.attached_class, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig do - override - .params(value: T.any(T.attached_class, T.anything)) - .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) - end - def dump(value) - end - end - - # Returns the raw value associated with the given key, if found. Otherwise, nil is - # returned. - # - # It is valid to lookup keys that are not in the API spec, for example to access - # undocumented features. This method does not parse response data into - # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. - sig { params(key: Symbol).returns(T.nilable(T.anything)) } - def [](key) - end - - # Returns a Hash of the data underlying this object. O(1) - # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. - # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. - sig { overridable.returns(OpenAI::Util::AnyHash) } - def to_h - end - - # Returns a Hash of the data underlying this object. O(1) - # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. - # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. - sig { overridable.returns(OpenAI::Util::AnyHash) } - def to_hash - end - - sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Util::AnyHash) } - def deconstruct_keys(keys) - end - - # Create a new instance of a model. - sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } - def self.new(data = {}) - end - - sig { returns(String) } - def inspect - end - end -end diff --git a/rbi/lib/openai/base_page.rbi b/rbi/lib/openai/base_page.rbi deleted file mode 100644 index 6c91ffcc..00000000 --- a/rbi/lib/openai/base_page.rbi +++ /dev/null @@ -1,36 +0,0 @@ -# typed: strong - -module OpenAI - module BasePage - Elem = type_member(:out) - - sig { overridable.returns(T::Boolean) } - def next_page? - end - - sig { overridable.returns(T.self_type) } - def next_page - end - - sig { overridable.params(blk: T.proc.params(arg0: Elem).void).void } - def auto_paging_each(&blk) - end - - sig { returns(T::Enumerable[Elem]) } - def to_enum - end - - # @api private - sig do - params( - client: OpenAI::BaseClient, - req: OpenAI::BaseClient::RequestComponentsShape, - headers: T.any(T::Hash[String, String], Net::HTTPHeader), - page_data: T.anything - ) - .void - end - def initialize(client:, req:, headers:, page_data:) - end - end -end diff --git a/rbi/lib/openai/base_stream.rbi b/rbi/lib/openai/base_stream.rbi deleted file mode 100644 index dee8cc83..00000000 --- a/rbi/lib/openai/base_stream.rbi +++ /dev/null @@ -1,41 +0,0 @@ -# typed: strong - -module OpenAI - module BaseStream - include Enumerable - - Message = type_member(:in) - Elem = type_member(:out) - - sig { void } - def close - end - - # @api private - sig { overridable.returns(T::Enumerable[Elem]) } - private def iterator - end - - sig { params(blk: T.proc.params(arg0: Elem).void).void } - def each(&blk) - end - - sig { returns(T::Enumerator[Elem]) } - def to_enum - end - - # @api private - sig do - params( - model: T.any(T::Class[T.anything], OpenAI::Converter), - url: URI::Generic, - status: Integer, - response: Net::HTTPResponse, - stream: T::Enumerable[Message] - ) - .void - end - def initialize(model:, url:, status:, response:, stream:) - end - end -end diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 556aaeec..5f320546 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -1,7 +1,7 @@ # typed: strong module OpenAI - class Client < OpenAI::BaseClient + class Client < OpenAI::Transport::BaseClient DEFAULT_MAX_RETRIES = 2 DEFAULT_TIMEOUT_IN_SECONDS = T.let(600.0, Float) diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi index 126a899e..e167d210 100644 --- a/rbi/lib/openai/cursor_page.rbi +++ b/rbi/lib/openai/cursor_page.rbi @@ -2,7 +2,7 @@ module OpenAI class CursorPage - include OpenAI::BasePage + include OpenAI::Type::BasePage Elem = type_member diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 626ade1f..8110c846 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio class SpeechCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The text to generate audio for. The maximum length is 4096 characters. diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 664bc188..a4cfde33 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio class TranscriptionCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The audio file object (not file name) to transcribe, in one of these formats: diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index a6f7d249..1f264107 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio class TranslationCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The audio file object (not file name) translate, in one of these formats: flac, diff --git a/rbi/lib/openai/models/batch_cancel_params.rbi b/rbi/lib/openai/models/batch_cancel_params.rbi index 4a71901d..3abd1e78 100644 --- a/rbi/lib/openai/models/batch_cancel_params.rbi +++ b/rbi/lib/openai/models/batch_cancel_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class BatchCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 0cdcccff..bc8dd159 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class BatchCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The time frame within which the batch should be processed. Currently only `24h` diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 8fb64add..571dd654 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class BatchListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/batch_retrieve_params.rbi b/rbi/lib/openai/models/batch_retrieve_params.rbi index cb2b72ff..b984cd5d 100644 --- a/rbi/lib/openai/models/batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/batch_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class BatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index c4111c93..75169d1b 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class AssistantCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # ID of the model to use. You can use the diff --git a/rbi/lib/openai/models/beta/assistant_delete_params.rbi b/rbi/lib/openai/models/beta/assistant_delete_params.rbi index ef0b02c4..ca945e48 100644 --- a/rbi/lib/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_delete_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class AssistantDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 4cbb19dc..1657931e 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class AssistantListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi index fcad6743..cb2206a7 100644 --- a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class AssistantRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 2cb5a2dd..874e84be 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class AssistantUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The description of the assistant. The maximum length is 512 characters. diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index e88a0645..5d33277e 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class ThreadCreateAndRunParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The ID of the @@ -483,7 +483,10 @@ module OpenAI end MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Type::Converter + ) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 4029540a..5115f982 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class ThreadCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to @@ -180,7 +180,10 @@ module OpenAI end MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Type::Converter + ) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/thread_delete_params.rbi b/rbi/lib/openai/models/beta/thread_delete_params.rbi index a4ea2d8b..0b43e22c 100644 --- a/rbi/lib/openai/models/beta/thread_delete_params.rbi +++ b/rbi/lib/openai/models/beta/thread_delete_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class ThreadDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi index 666d7bb1..436ff67f 100644 --- a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class ThreadRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index d5156fc0..227c5099 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta class ThreadUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Set of 16 key-value pairs that can be attached to an object. This can be useful diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 07e24f42..2045cd26 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class MessageCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The text contents of the message. @@ -119,7 +119,10 @@ module OpenAI end MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Type::Converter + ) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi index dd6b3c5a..1149a983 100644 --- a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class MessageDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 32f85d3e..545445aa 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class MessageListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi index 3c2f8ecd..e2f6c363 100644 --- a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class MessageRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index fde62361..6af1915e 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class MessageUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi index d7bb357e..c84678a8 100644 --- a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class RunCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index cc320d8b..1fb7e4b3 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class RunCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The ID of the @@ -431,7 +431,10 @@ module OpenAI end MessageContentPartParamArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Type::Converter + ) end # The role of the entity that is creating the message. Allowed values include: diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index daca9195..d6f27922 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class RunListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi index 56e89445..24cfa0e3 100644 --- a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class RunRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 48f5b347..98e8df09 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class RunSubmitToolOutputsParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 65bfc898..1067298a 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads class RunUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index b978312e..52605110 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -6,7 +6,7 @@ module OpenAI module Threads module Runs class StepListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index c65e2b00..15b2fdd2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -6,7 +6,7 @@ module OpenAI module Threads module Runs class StepRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 887423f7..84e2f441 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -191,7 +191,7 @@ module OpenAI ArrayOfContentPartArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], - OpenAI::Converter + OpenAI::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 3efb534b..ef698edc 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -56,7 +56,7 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 2b80637d..5c908db7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -47,7 +47,7 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index edf0ff22..f04bdf4a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -56,7 +56,7 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index bfb45b0a..4956394b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -49,7 +49,7 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Converter) + T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 64034045..c234e478 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -105,7 +105,7 @@ module OpenAI end ChatCompletionContentPartArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Converter) + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 08d3dc99..72a30f17 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A list of messages comprising the conversation so far. Depending on the @@ -709,7 +709,7 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) end class WebSearchOptions < OpenAI::BaseModel diff --git a/rbi/lib/openai/models/chat/completion_delete_params.rbi b/rbi/lib/openai/models/chat/completion_delete_params.rbi index 5aeae8b6..9bc3d10e 100644 --- a/rbi/lib/openai/models/chat/completion_delete_params.rbi +++ b/rbi/lib/openai/models/chat/completion_delete_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class CompletionDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 3cf1ef88..e488c018 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class CompletionListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Identifier for the last chat completion from the previous pagination request. diff --git a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi index a8822b3a..fe74c4a3 100644 --- a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class CompletionRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index c039b140..d9801164 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class CompletionUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Set of 16 key-value pairs that can be attached to an object. This can be useful diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 950d7ce7..57006149 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Chat module Completions class MessageListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Identifier for the last message from the previous pagination request. diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 4c623a0b..76ecc07b 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # ID of the model to use. You can use the @@ -277,11 +277,11 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) - IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) + IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Type::Converter) - ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) + ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Type::Converter) end # Up to 4 sequences where the API will stop generating further tokens. The @@ -293,7 +293,7 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index aab30870..9aac2355 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class EmbeddingCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Input text to embed, encoded as a string or array of tokens. To embed multiple @@ -95,11 +95,11 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) - IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Converter) + IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Type::Converter) - ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Converter) + ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Type::Converter) end # ID of the model to use. You can use the diff --git a/rbi/lib/openai/models/file_content_params.rbi b/rbi/lib/openai/models/file_content_params.rbi index 6d0e6bb7..2a195cd2 100644 --- a/rbi/lib/openai/models/file_content_params.rbi +++ b/rbi/lib/openai/models/file_content_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class FileContentParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index c25ae968..8813232a 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class FileCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The File object (not file name) to be uploaded. diff --git a/rbi/lib/openai/models/file_delete_params.rbi b/rbi/lib/openai/models/file_delete_params.rbi index c934741d..3d675373 100644 --- a/rbi/lib/openai/models/file_delete_params.rbi +++ b/rbi/lib/openai/models/file_delete_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index e479a1d7..027ccc87 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class FileListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/file_retrieve_params.rbi b/rbi/lib/openai/models/file_retrieve_params.rbi index 261382f7..579f51ba 100644 --- a/rbi/lib/openai/models/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/file_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi index 529378d5..d9429091 100644 --- a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning class JobCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 604cf39f..3844e2ce 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning class JobCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The name of the model to fine-tune. You can select one of the diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index 52f286e3..0bfecb02 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning class JobListEventsParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Identifier for the last event from the previous pagination request. diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 52610539..62697ccd 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning class JobListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Identifier for the last job from the previous pagination request. diff --git a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi index 9c617afd..4fadf667 100644 --- a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning class JobRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 793f2f71..70dcc5c6 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -5,7 +5,7 @@ module OpenAI module FineTuning module Jobs class CheckpointListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Identifier for the last checkpoint ID from the previous pagination request. diff --git a/rbi/lib/openai/models/function_parameters.rbi b/rbi/lib/openai/models/function_parameters.rbi index 7f79a305..8b20436c 100644 --- a/rbi/lib/openai/models/function_parameters.rbi +++ b/rbi/lib/openai/models/function_parameters.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - FunctionParameters = T.let(OpenAI::HashOf[OpenAI::Unknown], OpenAI::Converter) + FunctionParameters = T.let(OpenAI::HashOf[OpenAI::Unknown], OpenAI::Type::Converter) end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 182d0c91..364651ff 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ImageCreateVariationParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The image to use as the basis for the variation(s). Must be a valid PNG file, diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 8d058a7a..130b145b 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ImageEditParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 79804bf5..5cdd5c58 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ImageGenerateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A text description of the desired image(s). The maximum length is 1000 diff --git a/rbi/lib/openai/models/metadata.rbi b/rbi/lib/openai/models/metadata.rbi index e09a2c38..9b99c6d0 100644 --- a/rbi/lib/openai/models/metadata.rbi +++ b/rbi/lib/openai/models/metadata.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - Metadata = T.let(OpenAI::HashOf[String], OpenAI::Converter) + Metadata = T.let(OpenAI::HashOf[String], OpenAI::Type::Converter) end end diff --git a/rbi/lib/openai/models/model_delete_params.rbi b/rbi/lib/openai/models/model_delete_params.rbi index 5e653702..53253e49 100644 --- a/rbi/lib/openai/models/model_delete_params.rbi +++ b/rbi/lib/openai/models/model_delete_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ModelDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/model_list_params.rbi b/rbi/lib/openai/models/model_list_params.rbi index d3992e7a..ccea7e8a 100644 --- a/rbi/lib/openai/models/model_list_params.rbi +++ b/rbi/lib/openai/models/model_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ModelListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/model_retrieve_params.rbi b/rbi/lib/openai/models/model_retrieve_params.rbi index 7be752d3..c6b05f8c 100644 --- a/rbi/lib/openai/models/model_retrieve_params.rbi +++ b/rbi/lib/openai/models/model_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ModelRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index b410d597..0f79bac9 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class ModerationCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Input (or inputs) to classify. Can be a single string, an array of strings, or @@ -75,10 +75,10 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) ModerationMultiModalInputArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Converter) + T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Type::Converter) end # The content moderation model you would like to use. Learn more in diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 36ee8426..bbd299bd 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class InputItemListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # An item ID to list items after, used in pagination. diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 3204e5db..305eb10e 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class ResponseCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Text, image, or file inputs to the model, used to generate a response. diff --git a/rbi/lib/openai/models/responses/response_delete_params.rbi b/rbi/lib/openai/models/responses/response_delete_params.rbi index 0dfb6fe2..92e2ddb7 100644 --- a/rbi/lib/openai/models/responses/response_delete_params.rbi +++ b/rbi/lib/openai/models/responses/response_delete_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class ResponseDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/responses/response_input.rbi b/rbi/lib/openai/models/responses/response_input.rbi index 5b6d68df..d903e04f 100644 --- a/rbi/lib/openai/models/responses/response_input.rbi +++ b/rbi/lib/openai/models/responses/response_input.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses ResponseInput = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], OpenAI::Converter) + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi index 14a913ae..174cada3 100644 --- a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses ResponseInputMessageContentList = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], OpenAI::Converter) + T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], OpenAI::Type::Converter) end end end diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index da5deb7b..df3a5dc9 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class ResponseRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # Additional fields to include in the response. See the `include` parameter for diff --git a/rbi/lib/openai/models/upload_cancel_params.rbi b/rbi/lib/openai/models/upload_cancel_params.rbi index fac39581..7fb18dd3 100644 --- a/rbi/lib/openai/models/upload_cancel_params.rbi +++ b/rbi/lib/openai/models/upload_cancel_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class UploadCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 676ce576..24f09fa4 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class UploadCompleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The ordered list of Part IDs. diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 9fdac8ce..1e0390bf 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class UploadCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The number of bytes in the file you are uploading. diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index a73f5e78..c6f213d5 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Uploads class PartCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The chunk of bytes for this Part. diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 3799a1d8..e3f9c4b1 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class VectorStoreCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The chunking strategy used to chunk the file(s). If not set, will use the `auto` diff --git a/rbi/lib/openai/models/vector_store_delete_params.rbi b/rbi/lib/openai/models/vector_store_delete_params.rbi index a7ddf5aa..d01c30e0 100644 --- a/rbi/lib/openai/models/vector_store_delete_params.rbi +++ b/rbi/lib/openai/models/vector_store_delete_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class VectorStoreDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index aa0fd8d8..a46b89ef 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class VectorStoreListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/vector_store_retrieve_params.rbi b/rbi/lib/openai/models/vector_store_retrieve_params.rbi index dd7b68d3..7e591036 100644 --- a/rbi/lib/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_store_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class VectorStoreRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig do diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 4996c14f..c4ff79e9 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class VectorStoreSearchParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A query string for a search @@ -87,7 +87,7 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) end # A filter to apply based on file attributes. diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index e7c42739..04887f7e 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class VectorStoreUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # The expiration policy for a vector store. diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi index 764c2bb2..0a18aa15 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileBatchCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 089e78d9..2f1c7b95 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileBatchCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 582e82a0..531c3d57 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileBatchListFilesParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi index c637f31e..16e593c7 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileBatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/lib/openai/models/vector_stores/file_content_params.rbi index 6d9ffc4f..9558d156 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileContentParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index d032eda8..1acbeb21 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A [File](https://platform.openai.com/docs/api-reference/files) ID that the diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi index 4c8aa466..9ffd9fb3 100644 --- a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 7aaf42a7..0043569a 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi index a7d5c319..8ac6b042 100644 --- a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 228b23aa..a8c04b54 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores class FileUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters sig { returns(String) } diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi index 5ef61d62..8a668d1c 100644 --- a/rbi/lib/openai/page.rbi +++ b/rbi/lib/openai/page.rbi @@ -2,7 +2,7 @@ module OpenAI class Page - include OpenAI::BasePage + include OpenAI::Type::BasePage Elem = type_member diff --git a/rbi/lib/openai/pooled_net_requester.rbi b/rbi/lib/openai/pooled_net_requester.rbi deleted file mode 100644 index 43651130..00000000 --- a/rbi/lib/openai/pooled_net_requester.rbi +++ /dev/null @@ -1,59 +0,0 @@ -# typed: strong - -module OpenAI - # @api private - class PooledNetRequester - RequestShape = - T.type_alias do - { - method: Symbol, - url: URI::Generic, - headers: T::Hash[String, String], - body: T.anything, - deadline: Float - } - end - - # from the golang stdlib - # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 - KEEP_ALIVE_TIMEOUT = 30 - - class << self - # @api private - sig { params(url: URI::Generic).returns(Net::HTTP) } - def connect(url) - end - - # @api private - sig { params(conn: Net::HTTP, deadline: Float).void } - def calibrate_socket_timeout(conn, deadline) - end - - # @api private - sig do - params(request: OpenAI::PooledNetRequester::RequestShape, blk: T.proc.params(arg0: String).void) - .returns(Net::HTTPGenericRequest) - end - def build_request(request, &blk) - end - end - - # @api private - sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } - private def with_pool(url, deadline:, &blk) - end - - # @api private - sig do - params(request: OpenAI::PooledNetRequester::RequestShape) - .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) - end - def execute(request) - end - - # @api private - sig { params(size: Integer).returns(T.attached_class) } - def self.new(size: Etc.nprocessors) - end - end -end diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 51fb5512..4e963f12 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -1,21 +1,6 @@ # typed: strong module OpenAI - # @api private - module RequestParameters - # Options to specify HTTP behaviour for this request. - sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) } - attr_accessor :request_options - - # @api private - module Converter - # @api private - sig { params(params: T.anything).returns([T.anything, OpenAI::Util::AnyHash]) } - def dump_request(params) - end - end - end - # Specify HTTP behaviour to use for a specific request. These options supplement # or override those provided at the client level. # diff --git a/rbi/lib/openai/stream.rbi b/rbi/lib/openai/stream.rbi index 75e469ce..ba0c2f8a 100644 --- a/rbi/lib/openai/stream.rbi +++ b/rbi/lib/openai/stream.rbi @@ -2,7 +2,7 @@ module OpenAI class Stream - include OpenAI::BaseStream + include OpenAI::Type::BaseStream Message = type_member(:in) { {fixed: OpenAI::Util::ServerSentEvent} } Elem = type_member(:out) diff --git a/rbi/lib/openai/transport/base_client.rbi b/rbi/lib/openai/transport/base_client.rbi new file mode 100644 index 00000000..7deae432 --- /dev/null +++ b/rbi/lib/openai/transport/base_client.rbi @@ -0,0 +1,204 @@ +# typed: strong + +module OpenAI + module Transport + # @api private + class BaseClient + abstract! + + RequestComponentsShape = + T.type_alias do + { + method: Symbol, + path: T.any(String, T::Array[String]), + query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), + headers: T.nilable( + T::Hash[String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + )] + ), + body: T.nilable(T.anything), + unwrap: T.nilable(Symbol), + page: T.nilable(T::Class[OpenAI::Type::BasePage[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::Type::BaseStream[T.anything, OpenAI::BaseModel]]), + model: T.nilable(OpenAI::Type::Converter::Input), + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + } + end + + RequestInputShape = + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + max_retries: Integer, + timeout: Float + } + end + + # from whatwg fetch spec + MAX_REDIRECTS = 20 + + PLATFORM_HEADERS = T::Hash[String, String] + + class << self + # @api private + sig { params(req: OpenAI::Transport::BaseClient::RequestComponentsShape).void } + def validate!(req) + end + + # @api private + sig do + params( + status: Integer, + headers: T.any( + T::Hash[String, String], + Net::HTTPHeader + ) + ).returns(T::Boolean) + end + def should_retry?(status, headers:) + end + + # @api private + sig do + params( + request: OpenAI::Transport::BaseClient::RequestInputShape, + status: Integer, + response_headers: T.any(T::Hash[String, String], Net::HTTPHeader) + ) + .returns(OpenAI::Transport::BaseClient::RequestInputShape) + end + def follow_redirect(request, status:, response_headers:) + end + + # @api private + sig do + params( + status: T.any(Integer, OpenAI::APIConnectionError), + stream: T.nilable(T::Enumerable[String]) + ).void + end + def reap_connection!(status, stream:) + end + end + + # @api private + sig { returns(OpenAI::Transport::PooledNetRequester) } + attr_accessor :requester + + # @api private + sig do + params( + base_url: String, + timeout: Float, + max_retries: Integer, + initial_retry_delay: Float, + max_retry_delay: Float, + headers: T::Hash[String, + T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))], + idempotency_header: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new( + base_url:, + timeout: 0.0, + max_retries: 0, + initial_retry_delay: 0.0, + max_retry_delay: 0.0, + headers: {}, + idempotency_header: nil + ) + end + + # @api private + sig { overridable.returns(T::Hash[String, String]) } + private def auth_headers + end + + # @api private + sig { returns(String) } + private def generate_idempotency_key + end + + # @api private + sig do + overridable + .params(req: OpenAI::Transport::BaseClient::RequestComponentsShape, opts: OpenAI::Util::AnyHash) + .returns(OpenAI::Transport::BaseClient::RequestInputShape) + end + private def build_request(req, opts) + end + + # @api private + sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } + private def retry_delay(headers, retry_count:) + end + + # @api private + sig do + params( + request: OpenAI::Transport::BaseClient::RequestInputShape, + redirect_count: Integer, + retry_count: Integer, + send_retry_header: T::Boolean + ) + .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + end + private def send_request(request, redirect_count:, retry_count:, send_retry_header:) + end + + # Execute the request specified by `req`. This is the method that all resource + # methods call into. + sig do + params( + method: Symbol, + path: T.any(String, T::Array[String]), + query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), + headers: T.nilable( + T::Hash[String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + )] + ), + body: T.nilable(T.anything), + unwrap: T.nilable(Symbol), + page: T.nilable(T::Class[OpenAI::Type::BasePage[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::Type::BaseStream[T.anything, OpenAI::BaseModel]]), + model: T.nilable(OpenAI::Type::Converter::Input), + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + ) + .returns(T.anything) + end + def request( + method, + path, + query: {}, + headers: {}, + body: nil, + unwrap: nil, + page: nil, + stream: nil, + model: OpenAI::Unknown, + options: {} + ) + end + + sig { returns(String) } + def inspect + end + end + end +end diff --git a/rbi/lib/openai/transport/pooled_net_requester.rbi b/rbi/lib/openai/transport/pooled_net_requester.rbi new file mode 100644 index 00000000..d90c1881 --- /dev/null +++ b/rbi/lib/openai/transport/pooled_net_requester.rbi @@ -0,0 +1,64 @@ +# typed: strong + +module OpenAI + module Transport + # @api private + class PooledNetRequester + RequestShape = + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + deadline: Float + } + end + + # from the golang stdlib + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + KEEP_ALIVE_TIMEOUT = 30 + + class << self + # @api private + sig { params(url: URI::Generic).returns(Net::HTTP) } + def connect(url) + end + + # @api private + sig { params(conn: Net::HTTP, deadline: Float).void } + def calibrate_socket_timeout(conn, deadline) + end + + # @api private + sig do + params( + request: OpenAI::Transport::PooledNetRequester::RequestShape, + blk: T.proc.params(arg0: String).void + ) + .returns(Net::HTTPGenericRequest) + end + def build_request(request, &blk) + end + end + + # @api private + sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } + private def with_pool(url, deadline:, &blk) + end + + # @api private + sig do + params(request: OpenAI::Transport::PooledNetRequester::RequestShape) + .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + end + def execute(request) + end + + # @api private + sig { params(size: Integer).returns(T.attached_class) } + def self.new(size: Etc.nprocessors) + end + end + end +end diff --git a/rbi/lib/openai/type.rbi b/rbi/lib/openai/type.rbi new file mode 100644 index 00000000..a52a8d3b --- /dev/null +++ b/rbi/lib/openai/type.rbi @@ -0,0 +1,23 @@ +# typed: strong + +module OpenAI + Unknown = OpenAI::Type::Unknown + + BooleanModel = OpenAI::Type::BooleanModel + + Enum = OpenAI::Type::Enum + + Union = OpenAI::Type::Union + + ArrayOf = OpenAI::Type::ArrayOf + + HashOf = OpenAI::Type::HashOf + + BaseModel = OpenAI::Type::BaseModel + + RequestParameters = OpenAI::Type::RequestParameters + + # This module contains various type declarations. + module Type + end +end diff --git a/rbi/lib/openai/type/array_of.rbi b/rbi/lib/openai/type/array_of.rbi new file mode 100644 index 00000000..4c6fe0a1 --- /dev/null +++ b/rbi/lib/openai/type/array_of.rbi @@ -0,0 +1,80 @@ +# typed: strong + +module OpenAI + module Type + # @api private + # + # Array of items of a given type. + class ArrayOf + include OpenAI::Type::Converter + + abstract! + final! + + sig(:final) do + params( + type_info: T.any( + OpenAI::Util::AnyHash, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .returns(T.attached_class) + end + def self.[](type_info, spec = {}) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + sig(:final) do + override + .params(value: T.any(T::Enumerable[T.anything], T.anything), state: OpenAI::Type::Converter::State) + .returns(T.any(T::Array[T.anything], T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig(:final) do + override + .params(value: T.any(T::Enumerable[T.anything], T.anything)) + .returns(T.any(T::Array[T.anything], T.anything)) + end + def dump(value) + end + + # @api private + sig(:final) { returns(T.anything) } + protected def item_type + end + + # @api private + sig(:final) { returns(T::Boolean) } + protected def nilable? + end + + # @api private + sig(:final) do + params( + type_info: T.any( + OpenAI::Util::AnyHash, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .void + end + def initialize(type_info, spec = {}) + end + end + end +end diff --git a/rbi/lib/openai/type/base_model.rbi b/rbi/lib/openai/type/base_model.rbi new file mode 100644 index 00000000..e379903e --- /dev/null +++ b/rbi/lib/openai/type/base_model.rbi @@ -0,0 +1,189 @@ +# typed: strong + +module OpenAI + module Type + class BaseModel + extend OpenAI::Type::Converter + + abstract! + + KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} } + + class << self + # @api private + # + # Assumes superclass fields are totally defined before fields are accessed / + # defined on subclasses. + sig do + returns( + T::Hash[Symbol, + T.all( + OpenAI::BaseModel::KnownFieldShape, + {type_fn: T.proc.returns(OpenAI::Type::Converter::Input)} + )] + ) + end + def known_fields + end + + # @api private + sig do + returns( + T::Hash[Symbol, + T.all(OpenAI::BaseModel::KnownFieldShape, {type: OpenAI::Type::Converter::Input})] + ) + end + def fields + end + + # @api private + sig do + params( + name_sym: Symbol, + required: T::Boolean, + type_info: T.any( + { + const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), + enum: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)), + union: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)), + api_name: Symbol, + nil?: T::Boolean + }, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .void + end + private def add_field(name_sym, required:, type_info:, spec:) + end + + # @api private + sig do + params( + name_sym: Symbol, + type_info: T.any( + OpenAI::Util::AnyHash, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .void + end + def required(name_sym, type_info, spec = {}) + end + + # @api private + sig do + params( + name_sym: Symbol, + type_info: T.any( + OpenAI::Util::AnyHash, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .void + end + def optional(name_sym, type_info, spec = {}) + end + + # @api private + # + # `request_only` attributes not excluded from `.#coerce` when receiving responses + # even if well behaved servers should not send them + sig { params(blk: T.proc.void).void } + private def request_only(&blk) + end + + # @api private + # + # `response_only` attributes are omitted from `.#dump` when making requests + sig { params(blk: T.proc.void).void } + private def response_only(&blk) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + class << self + # @api private + sig do + override + .params( + value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything), + state: OpenAI::Type::Converter::State + ) + .returns(T.any(T.attached_class, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig do + override + .params(value: T.any(T.attached_class, T.anything)) + .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) + end + def dump(value) + end + end + + # Returns the raw value associated with the given key, if found. Otherwise, nil is + # returned. + # + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. + sig { params(key: Symbol).returns(T.nilable(T.anything)) } + def [](key) + end + + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + sig { overridable.returns(OpenAI::Util::AnyHash) } + def to_h + end + + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + sig { overridable.returns(OpenAI::Util::AnyHash) } + def to_hash + end + + sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Util::AnyHash) } + def deconstruct_keys(keys) + end + + # Create a new instance of a model. + sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } + def self.new(data = {}) + end + + sig { returns(String) } + def inspect + end + end + end +end diff --git a/rbi/lib/openai/type/base_page.rbi b/rbi/lib/openai/type/base_page.rbi new file mode 100644 index 00000000..03abe2c1 --- /dev/null +++ b/rbi/lib/openai/type/base_page.rbi @@ -0,0 +1,38 @@ +# typed: strong + +module OpenAI + module Type + module BasePage + Elem = type_member(:out) + + sig { overridable.returns(T::Boolean) } + def next_page? + end + + sig { overridable.returns(T.self_type) } + def next_page + end + + sig { overridable.params(blk: T.proc.params(arg0: Elem).void).void } + def auto_paging_each(&blk) + end + + sig { returns(T::Enumerable[Elem]) } + def to_enum + end + + # @api private + sig do + params( + client: OpenAI::Transport::BaseClient, + req: OpenAI::Transport::BaseClient::RequestComponentsShape, + headers: T.any(T::Hash[String, String], Net::HTTPHeader), + page_data: T.anything + ) + .void + end + def initialize(client:, req:, headers:, page_data:) + end + end + end +end diff --git a/rbi/lib/openai/type/base_stream.rbi b/rbi/lib/openai/type/base_stream.rbi new file mode 100644 index 00000000..8fa7098c --- /dev/null +++ b/rbi/lib/openai/type/base_stream.rbi @@ -0,0 +1,43 @@ +# typed: strong + +module OpenAI + module Type + module BaseStream + include Enumerable + + Message = type_member(:in) + Elem = type_member(:out) + + sig { void } + def close + end + + # @api private + sig { overridable.returns(T::Enumerable[Elem]) } + private def iterator + end + + sig { params(blk: T.proc.params(arg0: Elem).void).void } + def each(&blk) + end + + sig { returns(T::Enumerator[Elem]) } + def to_enum + end + + # @api private + sig do + params( + model: T.any(T::Class[T.anything], OpenAI::Type::Converter), + url: URI::Generic, + status: Integer, + response: Net::HTTPResponse, + stream: T::Enumerable[Message] + ) + .void + end + def initialize(model:, url:, status:, response:, stream:) + end + end + end +end diff --git a/rbi/lib/openai/type/boolean_model.rbi b/rbi/lib/openai/type/boolean_model.rbi new file mode 100644 index 00000000..96efcadd --- /dev/null +++ b/rbi/lib/openai/type/boolean_model.rbi @@ -0,0 +1,41 @@ +# typed: strong + +module OpenAI + module Type + # @api private + # + # Ruby has no Boolean class; this is something for models to refer to. + class BooleanModel + extend OpenAI::Type::Converter + + abstract! + final! + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.==(other) + end + + class << self + # @api private + sig(:final) do + override + .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Type::Converter::State) + .returns(T.any(T::Boolean, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig(:final) do + override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) + end + def dump(value) + end + end + end + end +end diff --git a/rbi/lib/openai/type/converter.rbi b/rbi/lib/openai/type/converter.rbi new file mode 100644 index 00000000..979159e8 --- /dev/null +++ b/rbi/lib/openai/type/converter.rbi @@ -0,0 +1,99 @@ +# typed: strong + +module OpenAI + module Type + # @api private + module Converter + Input = T.type_alias { T.any(OpenAI::Type::Converter, T::Class[T.anything]) } + + State = + T.type_alias do + { + strictness: T.any(T::Boolean, Symbol), + exactness: {yes: Integer, no: Integer, maybe: Integer}, + branched: Integer + } + end + + # @api private + sig { overridable.params(value: T.anything, state: OpenAI::Type::Converter::State).returns(T.anything) } + def coerce(value, state:) + end + + # @api private + sig { overridable.params(value: T.anything).returns(T.anything) } + def dump(value) + end + + class << self + # @api private + sig do + params( + spec: T.any( + { + const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), + enum: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)), + union: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)) + }, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ) + ) + .returns(T.proc.returns(T.anything)) + end + def self.type_info(spec) + end + + # @api private + # + # Based on `target`, transform `value` into `target`, to the extent possible: + # + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered + # + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + sig do + params( + target: OpenAI::Type::Converter::Input, + value: T.anything, + state: OpenAI::Type::Converter::State + ) + .returns(T.anything) + end + def self.coerce( + target, + value, + # The `strictness` is one of `true`, `false`, or `:strong`. This informs the + # coercion strategy when we have to decide between multiple possible conversion + # targets: + # + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. + # + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: + # + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. + # + # See implementation below for more details. + state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + ) + end + + # @api private + sig { params(target: OpenAI::Type::Converter::Input, value: T.anything).returns(T.anything) } + def self.dump(target, value) + end + end + end + end +end diff --git a/rbi/lib/openai/type/enum.rbi b/rbi/lib/openai/type/enum.rbi new file mode 100644 index 00000000..7f6fdacd --- /dev/null +++ b/rbi/lib/openai/type/enum.rbi @@ -0,0 +1,58 @@ +# typed: strong + +module OpenAI + module Type + # @api private + # + # A value from among a specified list of options. OpenAPI enum values map to Ruby + # values in the SDK as follows: + # + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol + # + # We can therefore convert string values to Symbols, but can't convert other + # values safely. + module Enum + include OpenAI::Type::Converter + + # All of the valid Symbol values for this enum. + sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } + def values + end + + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + sig { void } + private def finalize! + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + # + # Unlike with primitives, `Enum` additionally validates that the value is a member + # of the enum. + sig do + override + .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Type::Converter::State) + .returns(T.any(Symbol, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def dump(value) + end + end + end +end diff --git a/rbi/lib/openai/type/hash_of.rbi b/rbi/lib/openai/type/hash_of.rbi new file mode 100644 index 00000000..6d6bc6ec --- /dev/null +++ b/rbi/lib/openai/type/hash_of.rbi @@ -0,0 +1,84 @@ +# typed: strong + +module OpenAI + module Type + # @api private + # + # Hash of items of a given type. + class HashOf + include OpenAI::Type::Converter + + abstract! + final! + + sig(:final) do + params( + type_info: T.any( + OpenAI::Util::AnyHash, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .returns(T.attached_class) + end + def self.[](type_info, spec = {}) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + sig(:final) do + override + .params(value: T.any( + T::Hash[T.anything, T.anything], + T.anything + ), + state: OpenAI::Type::Converter::State) + .returns(T.any(OpenAI::Util::AnyHash, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig(:final) do + override + .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) + .returns(T.any(OpenAI::Util::AnyHash, T.anything)) + end + def dump(value) + end + + # @api private + sig(:final) { returns(T.anything) } + protected def item_type + end + + # @api private + sig(:final) { returns(T::Boolean) } + protected def nilable? + end + + # @api private + sig(:final) do + params( + type_info: T.any( + OpenAI::Util::AnyHash, + T.proc.returns(OpenAI::Type::Converter::Input), + OpenAI::Type::Converter::Input + ), + spec: OpenAI::Util::AnyHash + ) + .void + end + def initialize(type_info, spec = {}) + end + end + end +end diff --git a/rbi/lib/openai/type/request_parameters.rbi b/rbi/lib/openai/type/request_parameters.rbi new file mode 100644 index 00000000..3d5a7c0d --- /dev/null +++ b/rbi/lib/openai/type/request_parameters.rbi @@ -0,0 +1,20 @@ +# typed: strong + +module OpenAI + module Type + # @api private + module RequestParameters + # Options to specify HTTP behaviour for this request. + sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) } + attr_accessor :request_options + + # @api private + module Converter + # @api private + sig { params(params: T.anything).returns([T.anything, OpenAI::Util::AnyHash]) } + def dump_request(params) + end + end + end + end +end diff --git a/rbi/lib/openai/type/union.rbi b/rbi/lib/openai/type/union.rbi new file mode 100644 index 00000000..bfdbfa87 --- /dev/null +++ b/rbi/lib/openai/type/union.rbi @@ -0,0 +1,66 @@ +# typed: strong + +module OpenAI + module Type + # @api private + module Union + include OpenAI::Type::Converter + + # @api private + # + # All of the specified variant info for this union. + sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Type::Converter::Input)]]) } + private def known_variants + end + + # @api private + sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + protected def derefed_variants + end + + # All of the specified variants for this union. + sig { overridable.returns(T::Array[T.anything]) } + def variants + end + + # @api private + sig { params(property: Symbol).void } + private def discriminator(property) + end + + # @api private + sig do + params( + key: T.any(Symbol, OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything), + spec: T.any(OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything) + ) + .void + end + private def variant(key, spec = nil) + end + + # @api private + sig { params(value: T.anything).returns(T.nilable(T.anything)) } + private def resolve_variant(value) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + sig { override.params(value: T.anything, state: OpenAI::Type::Converter::State).returns(T.anything) } + def coerce(value, state:) + end + + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end + end + end +end diff --git a/rbi/lib/openai/type/unknown.rbi b/rbi/lib/openai/type/unknown.rbi new file mode 100644 index 00000000..1df209be --- /dev/null +++ b/rbi/lib/openai/type/unknown.rbi @@ -0,0 +1,37 @@ +# typed: strong + +module OpenAI + module Type + # @api private + # + # When we don't know what to expect for the value. + class Unknown + extend OpenAI::Type::Converter + + abstract! + final! + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.==(other) + end + + class << self + # @api private + sig(:final) do + override.params(value: T.anything, state: OpenAI::Type::Converter::State).returns(T.anything) + end + def coerce(value, state:) + end + + # @api private + sig(:final) { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end + end + end + end +end diff --git a/sig/openai/base_client.rbs b/sig/openai/base_client.rbs deleted file mode 100644 index 319bd07e..00000000 --- a/sig/openai/base_client.rbs +++ /dev/null @@ -1,106 +0,0 @@ -module OpenAI - class BaseClient - type request_components = - { - method: Symbol, - path: String | ::Array[String], - query: ::Hash[String, (::Array[String] | String)?]?, - headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?]?, - body: top?, - unwrap: Symbol?, - page: Class?, - stream: Class?, - model: OpenAI::Converter::input?, - options: OpenAI::request_opts? - } - - type request_input = - { - method: Symbol, - url: URI::Generic, - headers: ::Hash[String, String], - body: top, - max_retries: Integer, - timeout: Float - } - - MAX_REDIRECTS: 20 - - PLATFORM_HEADERS: ::Hash[String, String] - - def self.validate!: (OpenAI::BaseClient::request_components req) -> void - - def self.should_retry?: ( - Integer status, - headers: ::Hash[String, String] - ) -> bool - - def self.follow_redirect: ( - OpenAI::BaseClient::request_input request, - status: Integer, - response_headers: ::Hash[String, String] - ) -> OpenAI::BaseClient::request_input - - def self.reap_connection!: ( - Integer | OpenAI::APIConnectionError status, - stream: Enumerable[String]? - ) -> void - - # @api private - attr_accessor requester: OpenAI::PooledNetRequester - - def initialize: ( - base_url: String, - ?timeout: Float, - ?max_retries: Integer, - ?initial_retry_delay: Float, - ?max_retry_delay: Float, - ?headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?], - ?idempotency_header: String? - ) -> void - - private def auth_headers: -> ::Hash[String, String] - - private def generate_idempotency_key: -> String - - private def build_request: ( - OpenAI::BaseClient::request_components req, - OpenAI::request_options opts - ) -> OpenAI::BaseClient::request_input - - private def retry_delay: ( - ::Hash[String, String] headers, - retry_count: Integer - ) -> Float - - private def send_request: ( - OpenAI::BaseClient::request_input request, - redirect_count: Integer, - retry_count: Integer, - send_retry_header: bool - ) -> [Integer, top, Enumerable[String]] - - def request: - ( - Symbol method, - String | ::Array[String] path, - ?query: ::Hash[String, (::Array[String] | String)?]?, - ?headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?]?, - ?body: top?, - ?unwrap: Symbol?, - ?page: Class?, - ?stream: Class?, - ?model: OpenAI::Converter::input?, - ?options: OpenAI::request_opts? - ) -> top - | (OpenAI::BaseClient::request_components req) -> top - - def inspect: -> String - end -end diff --git a/sig/openai/base_model.rbs b/sig/openai/base_model.rbs deleted file mode 100644 index a15f603e..00000000 --- a/sig/openai/base_model.rbs +++ /dev/null @@ -1,248 +0,0 @@ -module OpenAI - module Converter - type input = OpenAI::Converter | Class - - type state = - { - strictness: bool | :strong, - exactness: { yes: Integer, no: Integer, maybe: Integer }, - branched: Integer - } - - def coerce: (top value, state: OpenAI::Converter::state) -> top - - def dump: (top value) -> top - - def self.type_info: ( - { - const: (nil | bool | Integer | Float | Symbol)?, - enum: ^-> OpenAI::Converter::input?, - union: ^-> OpenAI::Converter::input? - } - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input spec - ) -> (^-> top) - - def self.coerce: ( - OpenAI::Converter::input target, - top value, - ?state: OpenAI::Converter::state - ) -> top - - def self.dump: (OpenAI::Converter::input target, top value) -> top - end - - class Unknown - extend OpenAI::Converter - - def self.===: (top other) -> bool - - def self.==: (top other) -> bool - - def self.coerce: (top value, state: OpenAI::Converter::state) -> top - - def self.dump: (top value) -> top - end - - class BooleanModel - extend OpenAI::Converter - - def self.===: (top other) -> bool - - def self.==: (top other) -> bool - - def self.coerce: ( - bool | top value, - state: OpenAI::Converter::state - ) -> (bool | top) - - def self.dump: (bool | top value) -> (bool | top) - end - - module Enum - include OpenAI::Converter - - def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] - - private def self.finalize!: -> void - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: ( - String | Symbol | top value, - state: OpenAI::Converter::state - ) -> (Symbol | top) - - def dump: (Symbol | top value) -> (Symbol | top) - end - - module Union - include OpenAI::Converter - - private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Converter::input)]] - - def self.derefed_variants: -> ::Array[[Symbol?, top]] - - def self.variants: -> ::Array[top] - - private def self.discriminator: (Symbol property) -> void - - private def self.variant: ( - Symbol - | ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input key, - ?::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input spec - ) -> void - - private def self.resolve_variant: (top value) -> OpenAI::Converter::input? - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: (top value, state: OpenAI::Converter::state) -> top - - def dump: (top value) -> top - end - - class ArrayOf - include OpenAI::Converter - - def self.[]: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> instance - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: ( - Enumerable[top] | top value, - state: OpenAI::Converter::state - ) -> (::Array[top] | top) - - def dump: (Enumerable[top] | top value) -> (::Array[top] | top) - - def item_type: -> top - - def nilable?: -> bool - - def initialize: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - end - - class HashOf - include OpenAI::Converter - - def self.[]: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> instance - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: ( - ::Hash[top, top] | top value, - state: OpenAI::Converter::state - ) -> (::Hash[Symbol, top] | top) - - def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) - - def item_type: -> top - - def nilable?: -> bool - - def initialize: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - end - - class BaseModel - extend OpenAI::Converter - - type known_field = - { mode: (:coerce | :dump)?, required: bool, nilable: bool } - - def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field - & { type_fn: (^-> OpenAI::Converter::input) })] - - def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field - & { type: OpenAI::Converter::input })] - - private def self.add_field: ( - Symbol name_sym, - required: bool, - type_info: { - const: (nil | bool | Integer | Float | Symbol)?, - enum: ^-> OpenAI::Converter::input?, - union: ^-> OpenAI::Converter::input?, - api_name: Symbol - } - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input, - spec: ::Hash[Symbol, top] - ) -> void - - def self.required: ( - Symbol name_sym, - ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - - def self.optional: ( - Symbol name_sym, - ::Hash[Symbol, top] - | ^-> OpenAI::Converter::input - | OpenAI::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - - private def self.request_only: { -> void } -> void - - private def self.response_only: { -> void } -> void - - def self.==: (top other) -> bool - - def ==: (top other) -> bool - - def self.coerce: ( - OpenAI::BaseModel | ::Hash[top, top] | top value, - state: OpenAI::Converter::state - ) -> (instance | top) - - def self.dump: (instance | top value) -> (::Hash[top, top] | top) - - def []: (Symbol key) -> top? - - def to_h: -> ::Hash[Symbol, top] - - alias to_hash to_h - - def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] - - def initialize: (?::Hash[Symbol, top] | self data) -> void - - def inspect: -> String - end -end diff --git a/sig/openai/base_page.rbs b/sig/openai/base_page.rbs deleted file mode 100644 index 384b9d9b..00000000 --- a/sig/openai/base_page.rbs +++ /dev/null @@ -1,20 +0,0 @@ -module OpenAI - module BasePage[Elem] - def next_page?: -> bool - - def next_page: -> self - - def auto_paging_each: { (Elem arg0) -> void } -> void - - def to_enum: -> Enumerable[Elem] - - alias enum_for to_enum - - def initialize: ( - client: OpenAI::BaseClient, - req: OpenAI::BaseClient::request_components, - headers: ::Hash[String, String], - page_data: top - ) -> void - end -end diff --git a/sig/openai/base_stream.rbs b/sig/openai/base_stream.rbs deleted file mode 100644 index caa21732..00000000 --- a/sig/openai/base_stream.rbs +++ /dev/null @@ -1,23 +0,0 @@ -module OpenAI - module BaseStream[Message, Elem] - include Enumerable[Elem] - - def close: -> void - - private def iterator: -> Enumerable[Elem] - - def each: { (Elem arg0) -> void } -> void - - def to_enum: -> Enumerator[Elem] - - alias enum_for to_enum - - def initialize: ( - model: Class | OpenAI::Converter, - url: URI::Generic, - status: Integer, - response: top, - stream: Enumerable[Message] - ) -> void - end -end diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index f97f86d3..a7a4533d 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -1,5 +1,5 @@ module OpenAI - class Client < OpenAI::BaseClient + class Client < OpenAI::Transport::BaseClient DEFAULT_MAX_RETRIES: 2 DEFAULT_TIMEOUT_IN_SECONDS: Float diff --git a/sig/openai/cursor_page.rbs b/sig/openai/cursor_page.rbs index 4bf87e3a..3b8ed120 100644 --- a/sig/openai/cursor_page.rbs +++ b/sig/openai/cursor_page.rbs @@ -1,6 +1,6 @@ module OpenAI class CursorPage[Elem] - include OpenAI::BasePage[Elem] + include OpenAI::Type::BasePage[Elem] attr_accessor data: ::Array[Elem]? diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index bd14d768..06dd0109 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -13,7 +13,7 @@ module OpenAI & OpenAI::request_parameters class SpeechCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor input: String diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 8bd43f21..5abf1747 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -15,7 +15,7 @@ module OpenAI & OpenAI::request_parameters class TranscriptionCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor file: IO | StringIO diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 222208b9..3be91750 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class TranslationCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor file: IO | StringIO diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index 89722862..887d319a 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -3,7 +3,7 @@ module OpenAI type batch_cancel_params = { } & OpenAI::request_parameters class BatchCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 61f2a0a0..a660d80f 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class BatchCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor completion_window: OpenAI::Models::BatchCreateParams::completion_window diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index 16b65628..7dfa318e 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -4,7 +4,7 @@ module OpenAI { after: String, limit: Integer } & OpenAI::request_parameters class BatchListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index a7d75dad..c6dcd6ae 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type batch_retrieve_params = { } & OpenAI::request_parameters class BatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index b25b8eaf..e1cf13a2 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -18,7 +18,7 @@ module OpenAI & OpenAI::request_parameters class AssistantCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor model: OpenAI::Models::Beta::AssistantCreateParams::model diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index 391db3a6..cbab3d6b 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type assistant_delete_params = { } & OpenAI::request_parameters class AssistantDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 80fc8f41..42c3c994 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -11,7 +11,7 @@ module OpenAI & OpenAI::request_parameters class AssistantListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index bb0b45bb..8f5e9d53 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type assistant_retrieve_params = { } & OpenAI::request_parameters class AssistantRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index cdd077dd..eec6b058 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -18,7 +18,7 @@ module OpenAI & OpenAI::request_parameters class AssistantUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor description: String? diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 1cb60a89..28e49216 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -22,7 +22,7 @@ module OpenAI & OpenAI::request_parameters class ThreadCreateAndRunParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor assistant_id: String diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index bd37a0ce..ba9059a3 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class ThreadCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]? diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index c6749d4e..9440978b 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type thread_delete_params = { } & OpenAI::request_parameters class ThreadDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index ef480309..e259fb2d 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type thread_retrieve_params = { } & OpenAI::request_parameters class ThreadRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index d5e29904..f725515f 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -9,7 +9,7 @@ module OpenAI & OpenAI::request_parameters class ThreadUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor metadata: OpenAI::Models::metadata? diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 76c17e62..0c2147d8 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class MessageCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor content: OpenAI::Models::Beta::Threads::MessageCreateParams::content diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index d7f60e8a..fb823dfa 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -6,7 +6,7 @@ module OpenAI { thread_id: String } & OpenAI::request_parameters class MessageDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index aa976bd1..66fd88db 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -13,7 +13,7 @@ module OpenAI & OpenAI::request_parameters class MessageListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index 5e2772e6..29cfa699 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -6,7 +6,7 @@ module OpenAI { thread_id: String } & OpenAI::request_parameters class MessageRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index a6b12a9a..5ccea8ed 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -7,7 +7,7 @@ module OpenAI & OpenAI::request_parameters class MessageUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 7c5a325c..93ad8eb6 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -6,7 +6,7 @@ module OpenAI { thread_id: String } & OpenAI::request_parameters class RunCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 694f1fb2..3ed348c1 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -25,7 +25,7 @@ module OpenAI & OpenAI::request_parameters class RunCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor assistant_id: String diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index fe34ea7a..15e3c05c 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class RunListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index 1409bd1a..ab99f4b4 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -6,7 +6,7 @@ module OpenAI { thread_id: String } & OpenAI::request_parameters class RunRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index 67893bc6..6619a57c 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class RunSubmitToolOutputsParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index d466458d..2e8e547d 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -7,7 +7,7 @@ module OpenAI & OpenAI::request_parameters class RunUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 31771eec..ee7ffc2d 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -15,7 +15,7 @@ module OpenAI & OpenAI::request_parameters class StepListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index 3ae1e68e..08140619 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class StepRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 915923c4..19e7aa49 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -37,7 +37,7 @@ module OpenAI & OpenAI::request_parameters class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param] diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index e32e018f..b5abb861 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type completion_delete_params = { } & OpenAI::request_parameters class CompletionDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 8ea7b59f..533ec07c 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class CompletionListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 89d6e6d5..8b07c987 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type completion_retrieve_params = { } & OpenAI::request_parameters class CompletionRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index ea20f161..31894b01 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -5,7 +5,7 @@ module OpenAI { metadata: OpenAI::Models::metadata? } & OpenAI::request_parameters class CompletionUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor metadata: OpenAI::Models::metadata? diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 96ebdaed..08d092c8 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -11,7 +11,7 @@ module OpenAI & OpenAI::request_parameters class MessageListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 520e21d5..99d092d2 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -23,7 +23,7 @@ module OpenAI & OpenAI::request_parameters class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor model: OpenAI::Models::CompletionCreateParams::model diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 484259cf..31ad63d5 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI & OpenAI::request_parameters class EmbeddingCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor input: OpenAI::Models::EmbeddingCreateParams::input diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index 133e1f62..e875dcbb 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -3,7 +3,7 @@ module OpenAI type file_content_params = { } & OpenAI::request_parameters class FileContentParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 9426d743..fb34d5b0 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -5,7 +5,7 @@ module OpenAI & OpenAI::request_parameters class FileCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor file: IO | StringIO diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index 36ce2488..88f704ae 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -3,7 +3,7 @@ module OpenAI type file_delete_params = { } & OpenAI::request_parameters class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 1cdd93d3..d4b2cc4c 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class FileListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index 79f137bc..28a27398 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type file_retrieve_params = { } & OpenAI::request_parameters class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index d3a98211..adc3eb00 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -4,7 +4,7 @@ module OpenAI type job_cancel_params = { } & OpenAI::request_parameters class JobCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 04733621..faedfe50 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -16,7 +16,7 @@ module OpenAI & OpenAI::request_parameters class JobCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor model: OpenAI::Models::FineTuning::JobCreateParams::model diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index 0700828c..8159d7f4 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -5,7 +5,7 @@ module OpenAI { after: String, limit: Integer } & OpenAI::request_parameters class JobListEventsParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index 26f60cf4..fe78d5ad 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -6,7 +6,7 @@ module OpenAI & OpenAI::request_parameters class JobListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index 5a155e22..5306f28a 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type job_retrieve_params = { } & OpenAI::request_parameters class JobRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index 23b73542..e5377848 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -6,7 +6,7 @@ module OpenAI { after: String, limit: Integer } & OpenAI::request_parameters class CheckpointListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 60907fc8..23a37ecd 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class ImageCreateVariationParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor image: IO | StringIO diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index d205ee7c..12dc8ce9 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -14,7 +14,7 @@ module OpenAI & OpenAI::request_parameters class ImageEditParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor image: IO | StringIO diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index 2c18ac73..b9283099 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -14,7 +14,7 @@ module OpenAI & OpenAI::request_parameters class ImageGenerateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor prompt: String diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index 72705871..a4ac1b91 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -3,7 +3,7 @@ module OpenAI type model_delete_params = { } & OpenAI::request_parameters class ModelDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 1c3127a8..15ed4c2f 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -3,7 +3,7 @@ module OpenAI type model_list_params = { } & OpenAI::request_parameters class ModelListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index 7a36fdcb..5293679d 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type model_retrieve_params = { } & OpenAI::request_parameters class ModelRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index edf5e5f5..7067fe66 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -8,7 +8,7 @@ module OpenAI & OpenAI::request_parameters class ModerationCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor input: OpenAI::Models::ModerationCreateParams::input diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index da58932d..d4b3d62b 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class InputItemListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index ec23db60..c5fb416a 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -24,7 +24,7 @@ module OpenAI & OpenAI::request_parameters class ResponseCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor input: OpenAI::Models::Responses::ResponseCreateParams::input diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index 269f94f6..48a494f2 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type response_delete_params = { } & OpenAI::request_parameters class ResponseDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 2b845fc5..76a68008 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -6,7 +6,7 @@ module OpenAI & OpenAI::request_parameters class ResponseRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index 67e76b0d..df6f7b0c 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -3,7 +3,7 @@ module OpenAI type upload_cancel_params = { } & OpenAI::request_parameters class UploadCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index c7df9dc0..eb3cbcf1 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -4,7 +4,7 @@ module OpenAI { part_ids: ::Array[String], :md5 => String } & OpenAI::request_parameters class UploadCompleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor part_ids: ::Array[String] diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index 0ea5b497..431fc356 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class UploadCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor bytes: Integer diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index dfd0732f..cc960f1c 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -5,7 +5,7 @@ module OpenAI { data: (IO | StringIO) } & OpenAI::request_parameters class PartCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor data: IO | StringIO diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 2072c316..b0bdecc8 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI & OpenAI::request_parameters class VectorStoreCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index a65d42cf..65b00bf3 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_delete_params = { } & OpenAI::request_parameters class VectorStoreDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index 113f4b27..932f1b34 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class VectorStoreListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 072120dd..39d412f5 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_retrieve_params = { } & OpenAI::request_parameters class VectorStoreRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 9b476ac0..3c87c94f 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -11,7 +11,7 @@ module OpenAI & OpenAI::request_parameters class VectorStoreSearchParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor query: OpenAI::Models::VectorStoreSearchParams::query diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index f0b56795..734ce7fd 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -9,7 +9,7 @@ module OpenAI & OpenAI::request_parameters class VectorStoreUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter? diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index b83566f3..7400302e 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -5,7 +5,7 @@ module OpenAI { vector_store_id: String } & OpenAI::request_parameters class FileBatchCancelParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index f53e80ea..ec58551e 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class FileBatchCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor file_ids: ::Array[String] diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index f4378efa..b19f61ad 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -13,7 +13,7 @@ module OpenAI & OpenAI::request_parameters class FileBatchListFilesParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index ac22b295..2adbda41 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -5,7 +5,7 @@ module OpenAI { vector_store_id: String } & OpenAI::request_parameters class FileBatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index 95606785..608c3d89 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -5,7 +5,7 @@ module OpenAI { vector_store_id: String } & OpenAI::request_parameters class FileContentParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index c0b0ba41..82145f08 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI & OpenAI::request_parameters class FileCreateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor file_id: String diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index 634054fa..c1b36f86 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -5,7 +5,7 @@ module OpenAI { vector_store_id: String } & OpenAI::request_parameters class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 179d8472..88f6eb73 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -12,7 +12,7 @@ module OpenAI & OpenAI::request_parameters class FileListParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index 0e6f1b85..dafdc50e 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -5,7 +5,7 @@ module OpenAI { vector_store_id: String } & OpenAI::request_parameters class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index fe03cee6..dfa006a4 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -9,7 +9,7 @@ module OpenAI & OpenAI::request_parameters class FileUpdateParams < OpenAI::BaseModel - extend OpenAI::RequestParameters::Converter + extend OpenAI::Type::RequestParameters::Converter include OpenAI::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/page.rbs b/sig/openai/page.rbs index 0159e75d..48cd508b 100644 --- a/sig/openai/page.rbs +++ b/sig/openai/page.rbs @@ -1,6 +1,6 @@ module OpenAI class Page[Elem] - include OpenAI::BasePage[Elem] + include OpenAI::Type::BasePage[Elem] attr_accessor data: ::Array[Elem]? diff --git a/sig/openai/pooled_net_requester.rbs b/sig/openai/pooled_net_requester.rbs deleted file mode 100644 index 58f0fcce..00000000 --- a/sig/openai/pooled_net_requester.rbs +++ /dev/null @@ -1,37 +0,0 @@ -module OpenAI - class PooledNetRequester - type request = - { - method: Symbol, - url: URI::Generic, - headers: ::Hash[String, String], - body: top, - deadline: Float - } - - KEEP_ALIVE_TIMEOUT: 30 - - def self.connect: (URI::Generic url) -> top - - def self.calibrate_socket_timeout: (top conn, Float deadline) -> void - - def self.build_request: ( - OpenAI::PooledNetRequester::request request - ) { - (String arg0) -> void - } -> top - - private def with_pool: ( - URI::Generic url, - deadline: Float - ) { - (top arg0) -> void - } -> void - - def execute: ( - OpenAI::PooledNetRequester::request request - ) -> [Integer, top, Enumerable[String]] - - def initialize: (?size: Integer) -> void - end -end diff --git a/sig/openai/request_options.rbs b/sig/openai/request_options.rbs index 26e1c1fa..51674ffe 100644 --- a/sig/openai/request_options.rbs +++ b/sig/openai/request_options.rbs @@ -2,16 +2,6 @@ module OpenAI type request_opts = OpenAI::RequestOptions | OpenAI::request_options | ::Hash[Symbol, top] - type request_parameters = { request_options: OpenAI::request_opts } - - module RequestParameters - attr_accessor request_options: OpenAI::request_opts - - module Converter - def dump_request: (top params) -> [top, ::Hash[Symbol, top]] - end - end - type request_options = { idempotency_key: String?, diff --git a/sig/openai/stream.rbs b/sig/openai/stream.rbs index 7474463b..a566119e 100644 --- a/sig/openai/stream.rbs +++ b/sig/openai/stream.rbs @@ -1,6 +1,6 @@ module OpenAI class Stream[Elem] - include OpenAI::BaseStream[OpenAI::Util::server_sent_event, Elem] + include OpenAI::Type::BaseStream[OpenAI::Util::server_sent_event, Elem] private def iterator: -> Enumerable[Elem] end diff --git a/sig/openai/transport/base_client.rbs b/sig/openai/transport/base_client.rbs new file mode 100644 index 00000000..4ae59a76 --- /dev/null +++ b/sig/openai/transport/base_client.rbs @@ -0,0 +1,110 @@ +module OpenAI + module Transport + class BaseClient + type request_components = + { + method: Symbol, + path: String | ::Array[String], + query: ::Hash[String, (::Array[String] | String)?]?, + headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?]?, + body: top?, + unwrap: Symbol?, + page: Class?, + stream: Class?, + model: OpenAI::Type::Converter::input?, + options: OpenAI::request_opts? + } + + type request_input = + { + method: Symbol, + url: URI::Generic, + headers: ::Hash[String, String], + body: top, + max_retries: Integer, + timeout: Float + } + + MAX_REDIRECTS: 20 + + PLATFORM_HEADERS: ::Hash[String, String] + + def self.validate!: ( + OpenAI::Transport::BaseClient::request_components req + ) -> void + + def self.should_retry?: ( + Integer status, + headers: ::Hash[String, String] + ) -> bool + + def self.follow_redirect: ( + OpenAI::Transport::BaseClient::request_input request, + status: Integer, + response_headers: ::Hash[String, String] + ) -> OpenAI::Transport::BaseClient::request_input + + def self.reap_connection!: ( + Integer | OpenAI::APIConnectionError status, + stream: Enumerable[String]? + ) -> void + + # @api private + attr_accessor requester: OpenAI::Transport::PooledNetRequester + + def initialize: ( + base_url: String, + ?timeout: Float, + ?max_retries: Integer, + ?initial_retry_delay: Float, + ?max_retry_delay: Float, + ?headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?], + ?idempotency_header: String? + ) -> void + + private def auth_headers: -> ::Hash[String, String] + + private def generate_idempotency_key: -> String + + private def build_request: ( + OpenAI::Transport::BaseClient::request_components req, + OpenAI::request_options opts + ) -> OpenAI::Transport::BaseClient::request_input + + private def retry_delay: ( + ::Hash[String, String] headers, + retry_count: Integer + ) -> Float + + private def send_request: ( + OpenAI::Transport::BaseClient::request_input request, + redirect_count: Integer, + retry_count: Integer, + send_retry_header: bool + ) -> [Integer, top, Enumerable[String]] + + def request: + ( + Symbol method, + String | ::Array[String] path, + ?query: ::Hash[String, (::Array[String] | String)?]?, + ?headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?]?, + ?body: top?, + ?unwrap: Symbol?, + ?page: Class?, + ?stream: Class?, + ?model: OpenAI::Type::Converter::input?, + ?options: OpenAI::request_opts? + ) -> top + | (OpenAI::Transport::BaseClient::request_components req) -> top + + def inspect: -> String + end + end +end diff --git a/sig/openai/transport/pooled_net_requester.rbs b/sig/openai/transport/pooled_net_requester.rbs new file mode 100644 index 00000000..d29e1f7e --- /dev/null +++ b/sig/openai/transport/pooled_net_requester.rbs @@ -0,0 +1,39 @@ +module OpenAI + module Transport + class PooledNetRequester + type request = + { + method: Symbol, + url: URI::Generic, + headers: ::Hash[String, String], + body: top, + deadline: Float + } + + KEEP_ALIVE_TIMEOUT: 30 + + def self.connect: (URI::Generic url) -> top + + def self.calibrate_socket_timeout: (top conn, Float deadline) -> void + + def self.build_request: ( + OpenAI::Transport::PooledNetRequester::request request + ) { + (String arg0) -> void + } -> top + + private def with_pool: ( + URI::Generic url, + deadline: Float + ) { + (top arg0) -> void + } -> void + + def execute: ( + OpenAI::Transport::PooledNetRequester::request request + ) -> [Integer, top, Enumerable[String]] + + def initialize: (?size: Integer) -> void + end + end +end diff --git a/sig/openai/type.rbs b/sig/openai/type.rbs new file mode 100644 index 00000000..61ed895e --- /dev/null +++ b/sig/openai/type.rbs @@ -0,0 +1,22 @@ +module OpenAI + class Unknown = OpenAI::Type::Unknown + + class BooleanModel = OpenAI::Type::BooleanModel + + module Enum = OpenAI::Type::Enum + + module Union = OpenAI::Type::Union + + class ArrayOf = OpenAI::Type::ArrayOf + + class HashOf = OpenAI::Type::HashOf + + class BaseModel = OpenAI::Type::BaseModel + + type request_parameters = OpenAI::Type::request_parameters + + module RequestParameters = OpenAI::Type::RequestParameters + + module Type + end +end diff --git a/sig/openai/type/array_of.rbs b/sig/openai/type/array_of.rbs new file mode 100644 index 00000000..6441c9d4 --- /dev/null +++ b/sig/openai/type/array_of.rbs @@ -0,0 +1,36 @@ +module OpenAI + module Type + class ArrayOf + include OpenAI::Type::Converter + + def self.[]: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> instance + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + Enumerable[top] | top value, + state: OpenAI::Type::Converter::state + ) -> (::Array[top] | top) + + def dump: (Enumerable[top] | top value) -> (::Array[top] | top) + + def item_type: -> top + + def nilable?: -> bool + + def initialize: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + end + end +end diff --git a/sig/openai/type/base_model.rbs b/sig/openai/type/base_model.rbs new file mode 100644 index 00000000..a929a421 --- /dev/null +++ b/sig/openai/type/base_model.rbs @@ -0,0 +1,73 @@ +module OpenAI + module Type + class BaseModel + extend OpenAI::Type::Converter + + type known_field = + { mode: (:coerce | :dump)?, required: bool, nilable: bool } + + def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field + & { type_fn: (^-> OpenAI::Type::Converter::input) })] + + def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field + & { type: OpenAI::Type::Converter::input })] + + private def self.add_field: ( + Symbol name_sym, + required: bool, + type_info: { + const: (nil | bool | Integer | Float | Symbol)?, + enum: ^-> OpenAI::Type::Converter::input?, + union: ^-> OpenAI::Type::Converter::input?, + api_name: Symbol + } + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input, + spec: ::Hash[Symbol, top] + ) -> void + + def self.required: ( + Symbol name_sym, + ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + + def self.optional: ( + Symbol name_sym, + ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + + private def self.request_only: { -> void } -> void + + private def self.response_only: { -> void } -> void + + def self.==: (top other) -> bool + + def ==: (top other) -> bool + + def self.coerce: ( + OpenAI::BaseModel | ::Hash[top, top] | top value, + state: OpenAI::Type::Converter::state + ) -> (instance | top) + + def self.dump: (instance | top value) -> (::Hash[top, top] | top) + + def []: (Symbol key) -> top? + + def to_h: -> ::Hash[Symbol, top] + + alias to_hash to_h + + def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] + + def initialize: (?::Hash[Symbol, top] | self data) -> void + + def inspect: -> String + end + end +end diff --git a/sig/openai/type/base_page.rbs b/sig/openai/type/base_page.rbs new file mode 100644 index 00000000..db9e8a49 --- /dev/null +++ b/sig/openai/type/base_page.rbs @@ -0,0 +1,22 @@ +module OpenAI + module Type + module BasePage[Elem] + def next_page?: -> bool + + def next_page: -> self + + def auto_paging_each: { (Elem arg0) -> void } -> void + + def to_enum: -> Enumerable[Elem] + + alias enum_for to_enum + + def initialize: ( + client: OpenAI::Transport::BaseClient, + req: OpenAI::Transport::BaseClient::request_components, + headers: ::Hash[String, String], + page_data: top + ) -> void + end + end +end diff --git a/sig/openai/type/base_stream.rbs b/sig/openai/type/base_stream.rbs new file mode 100644 index 00000000..de541575 --- /dev/null +++ b/sig/openai/type/base_stream.rbs @@ -0,0 +1,25 @@ +module OpenAI + module Type + module BaseStream[Message, Elem] + include Enumerable[Elem] + + def close: -> void + + private def iterator: -> Enumerable[Elem] + + def each: { (Elem arg0) -> void } -> void + + def to_enum: -> Enumerator[Elem] + + alias enum_for to_enum + + def initialize: ( + model: Class | OpenAI::Type::Converter, + url: URI::Generic, + status: Integer, + response: top, + stream: Enumerable[Message] + ) -> void + end + end +end diff --git a/sig/openai/type/boolean_model.rbs b/sig/openai/type/boolean_model.rbs new file mode 100644 index 00000000..00ed0300 --- /dev/null +++ b/sig/openai/type/boolean_model.rbs @@ -0,0 +1,18 @@ +module OpenAI + module Type + class BooleanModel + extend OpenAI::Type::Converter + + def self.===: (top other) -> bool + + def self.==: (top other) -> bool + + def self.coerce: ( + bool | top value, + state: OpenAI::Type::Converter::state + ) -> (bool | top) + + def self.dump: (bool | top value) -> (bool | top) + end + end +end diff --git a/sig/openai/type/converter.rbs b/sig/openai/type/converter.rbs new file mode 100644 index 00000000..3785d489 --- /dev/null +++ b/sig/openai/type/converter.rbs @@ -0,0 +1,36 @@ +module OpenAI + module Type + module Converter + type input = OpenAI::Type::Converter | Class + + type state = + { + strictness: bool | :strong, + exactness: { yes: Integer, no: Integer, maybe: Integer }, + branched: Integer + } + + def coerce: (top value, state: OpenAI::Type::Converter::state) -> top + + def dump: (top value) -> top + + def self.type_info: ( + { + const: (nil | bool | Integer | Float | Symbol)?, + enum: ^-> OpenAI::Type::Converter::input?, + union: ^-> OpenAI::Type::Converter::input? + } + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input spec + ) -> (^-> top) + + def self.coerce: ( + OpenAI::Type::Converter::input target, + top value, + ?state: OpenAI::Type::Converter::state + ) -> top + + def self.dump: (OpenAI::Type::Converter::input target, top value) -> top + end + end +end diff --git a/sig/openai/type/enum.rbs b/sig/openai/type/enum.rbs new file mode 100644 index 00000000..fb65841f --- /dev/null +++ b/sig/openai/type/enum.rbs @@ -0,0 +1,22 @@ +module OpenAI + module Type + module Enum + include OpenAI::Type::Converter + + def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] + + private def self.finalize!: -> void + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + String | Symbol | top value, + state: OpenAI::Type::Converter::state + ) -> (Symbol | top) + + def dump: (Symbol | top value) -> (Symbol | top) + end + end +end diff --git a/sig/openai/type/hash_of.rbs b/sig/openai/type/hash_of.rbs new file mode 100644 index 00000000..a31f87aa --- /dev/null +++ b/sig/openai/type/hash_of.rbs @@ -0,0 +1,36 @@ +module OpenAI + module Type + class HashOf + include OpenAI::Type::Converter + + def self.[]: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> instance + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + ::Hash[top, top] | top value, + state: OpenAI::Type::Converter::state + ) -> (::Hash[Symbol, top] | top) + + def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) + + def item_type: -> top + + def nilable?: -> bool + + def initialize: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + end + end +end diff --git a/sig/openai/type/request_parameters.rbs b/sig/openai/type/request_parameters.rbs new file mode 100644 index 00000000..b92ee909 --- /dev/null +++ b/sig/openai/type/request_parameters.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Type + type request_parameters = { request_options: OpenAI::request_opts } + + module RequestParameters + attr_accessor request_options: OpenAI::request_opts + + module Converter + def dump_request: (top params) -> [top, ::Hash[Symbol, top]] + end + end + end +end diff --git a/sig/openai/type/union.rbs b/sig/openai/type/union.rbs new file mode 100644 index 00000000..372932ce --- /dev/null +++ b/sig/openai/type/union.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Type + module Union + include OpenAI::Type::Converter + + private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Type::Converter::input)]] + + def self.derefed_variants: -> ::Array[[Symbol?, top]] + + def self.variants: -> ::Array[top] + + private def self.discriminator: (Symbol property) -> void + + private def self.variant: ( + Symbol + | ::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input key, + ?::Hash[Symbol, top] + | ^-> OpenAI::Type::Converter::input + | OpenAI::Type::Converter::input spec + ) -> void + + private def self.resolve_variant: ( + top value + ) -> OpenAI::Type::Converter::input? + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: (top value, state: OpenAI::Type::Converter::state) -> top + + def dump: (top value) -> top + end + end +end diff --git a/sig/openai/type/unknown.rbs b/sig/openai/type/unknown.rbs new file mode 100644 index 00000000..b3139352 --- /dev/null +++ b/sig/openai/type/unknown.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Type + class Unknown + extend OpenAI::Type::Converter + + def self.===: (top other) -> bool + + def self.==: (top other) -> bool + + def self.coerce: (top value, state: OpenAI::Type::Converter::state) -> top + + def self.dump: (top value) -> top + end + end +end diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index f1c37432..90012091 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -32,7 +32,7 @@ def test_typing converters.each do |conv| assert_pattern do - conv => OpenAI::Converter + conv => OpenAI::Type::Converter end end end @@ -68,7 +68,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Converter.coerce(target, input, state: state) => ^expect + OpenAI::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end @@ -99,7 +99,7 @@ def test_dump target, input = _1 expect = _2 assert_pattern do - OpenAI::Converter.dump(target, input) => ^expect + OpenAI::Type::Converter.dump(target, input) => ^expect end end end @@ -118,7 +118,7 @@ def test_coerce_errors target, input = _1 state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_raises(_2) do - OpenAI::Converter.coerce(target, input, state: state) + OpenAI::Type::Converter.coerce(target, input, state: state) end end end @@ -179,7 +179,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Converter.coerce(target, input, state: state) => ^expect + OpenAI::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end @@ -207,7 +207,7 @@ def test_dump target, input = _1 expect = _2 assert_pattern do - OpenAI::Converter.dump(target, input) => ^expect + OpenAI::Type::Converter.dump(target, input) => ^expect end end end @@ -253,7 +253,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Converter.coerce(target, input, state: state) => ^expect + OpenAI::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end @@ -333,7 +333,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - coerced = OpenAI::Converter.coerce(target, input, state: state) + coerced = OpenAI::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) if coerced.is_a?(OpenAI::BaseModel) coerced.to_h => ^expect @@ -363,7 +363,7 @@ def test_dump target, input = _1 expect = _2 assert_pattern do - OpenAI::Converter.dump(target, input) => ^expect + OpenAI::Type::Converter.dump(target, input) => ^expect end end end @@ -509,7 +509,7 @@ def test_coerce exactness, branched, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - coerced = OpenAI::Converter.coerce(target, input, state: state) + coerced = OpenAI::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) if coerced.is_a?(OpenAI::BaseModel) coerced.to_h => ^expect From ec69933eb7a601e7f1f7c6991ed5344013ee4451 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 17:39:30 +0000 Subject: [PATCH 068/295] chore: more accurate type annotations for SDK internals (#71) --- rbi/lib/openai/type/array_of.rbi | 8 +++++--- rbi/lib/openai/type/hash_of.rbi | 4 +++- sig/openai/type/array_of.rbs | 8 ++++---- sig/openai/type/hash_of.rbs | 4 ++-- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/rbi/lib/openai/type/array_of.rbi b/rbi/lib/openai/type/array_of.rbi index 4c6fe0a1..2f6ace65 100644 --- a/rbi/lib/openai/type/array_of.rbi +++ b/rbi/lib/openai/type/array_of.rbi @@ -11,6 +11,8 @@ module OpenAI abstract! final! + Elem = type_member(:out) + sig(:final) do params( type_info: T.any( @@ -36,7 +38,7 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Enumerable[T.anything], T.anything), state: OpenAI::Type::Converter::State) + .params(value: T.any(T::Enumerable[Elem], T.anything), state: OpenAI::Type::Converter::State) .returns(T.any(T::Array[T.anything], T.anything)) end def coerce(value, state:) @@ -45,14 +47,14 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Enumerable[T.anything], T.anything)) + .params(value: T.any(T::Enumerable[Elem], T.anything)) .returns(T.any(T::Array[T.anything], T.anything)) end def dump(value) end # @api private - sig(:final) { returns(T.anything) } + sig(:final) { returns(Elem) } protected def item_type end diff --git a/rbi/lib/openai/type/hash_of.rbi b/rbi/lib/openai/type/hash_of.rbi index 6d6bc6ec..66bd1160 100644 --- a/rbi/lib/openai/type/hash_of.rbi +++ b/rbi/lib/openai/type/hash_of.rbi @@ -11,6 +11,8 @@ module OpenAI abstract! final! + Elem = type_member(:out) + sig(:final) do params( type_info: T.any( @@ -56,7 +58,7 @@ module OpenAI end # @api private - sig(:final) { returns(T.anything) } + sig(:final) { returns(Elem) } protected def item_type end diff --git a/sig/openai/type/array_of.rbs b/sig/openai/type/array_of.rbs index 6441c9d4..7e8cb67c 100644 --- a/sig/openai/type/array_of.rbs +++ b/sig/openai/type/array_of.rbs @@ -1,6 +1,6 @@ module OpenAI module Type - class ArrayOf + class ArrayOf[Elem] include OpenAI::Type::Converter def self.[]: ( @@ -15,13 +15,13 @@ module OpenAI def ==: (top other) -> bool def coerce: ( - Enumerable[top] | top value, + Enumerable[Elem] | top value, state: OpenAI::Type::Converter::state ) -> (::Array[top] | top) - def dump: (Enumerable[top] | top value) -> (::Array[top] | top) + def dump: (Enumerable[Elem] | top value) -> (::Array[top] | top) - def item_type: -> top + def item_type: -> Elem def nilable?: -> bool diff --git a/sig/openai/type/hash_of.rbs b/sig/openai/type/hash_of.rbs index a31f87aa..dea00a66 100644 --- a/sig/openai/type/hash_of.rbs +++ b/sig/openai/type/hash_of.rbs @@ -1,6 +1,6 @@ module OpenAI module Type - class HashOf + class HashOf[Elem] include OpenAI::Type::Converter def self.[]: ( @@ -21,7 +21,7 @@ module OpenAI def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) - def item_type: -> top + def item_type: -> Elem def nilable?: -> bool From 45c83052be164d066fcfce79b14361d78f453fda Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 18:46:10 +0000 Subject: [PATCH 069/295] chore: update readme (#72) --- README.md | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 2c5d6001..63cf0c1f 100644 --- a/README.md +++ b/README.md @@ -10,10 +10,10 @@ The underlying REST API documentation can be found on [platform.openai.com](http ## Installation -To use this gem during the beta, install directly from GitHub with Bundler by adding the following to your application's `Gemfile`: +To use this gem, install via Bundler by adding the following to your application's `Gemfile`: ```ruby -gem "openai", git: "https://github.com/openai/openai-ruby", branch: "main" +gem "openai", "~> 0.1.0.pre.alpha.2" ``` To fetch an initial copy of the gem: @@ -22,12 +22,6 @@ To fetch an initial copy of the gem: bundle install ``` -To update the version used by your application when updates are pushed to GitHub: - -```sh -bundle update openai -``` - ## Usage ```ruby From d87606f539e61fef6c8dc409f0057e1da790d60a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 00:27:14 +0000 Subject: [PATCH 070/295] chore: Remove deprecated/unused remote spec feature --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 426a77c6..b21d5dae 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6663c59193eb95b201e492de17dcbd5e126ba03d18ce66287a3e2c632ca56fe7.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: e25e31d8446b6bc0e3ef7103b6993cce +config_hash: 2daae06cc598821ccf87201de0861e40 From c6b737c010732bb31bb7a087830552c80f34b6f1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 01:07:58 +0000 Subject: [PATCH 071/295] fix: switch to github compatible markdown engine (#73) --- .rubocop.yml | 4 ++ .yardopts | 1 + Gemfile | 19 +++-- Gemfile.lock | 72 ++++++++++--------- lib/openai/type/converter.rb | 6 ++ .../models/audio/speech_create_params.rbi | 10 ++- .../models/audio/transcription_segment.rbi | 13 +++- .../file_citation_delta_annotation.rbi | 9 ++- .../beta/threads/runs/step_list_params.rbi | 10 ++- ...hat_completion_assistant_message_param.rbi | 10 ++- .../models/chat/chat_completion_message.rbi | 10 ++- rbi/lib/openai/models/completion.rbi | 10 ++- rbi/lib/openai/models/completion_usage.rbi | 7 +- .../fine_tuning/fine_tuning_job_event.rbi | 10 ++- .../models/image_create_variation_params.rbi | 10 ++- .../models/responses/file_search_tool.rbi | 8 ++- ...ode_interpreter_call_in_progress_event.rbi | 6 +- ...de_interpreter_call_interpreting_event.rbi | 6 +- ...esponse_computer_tool_call_output_item.rbi | 9 ++- .../models/responses/response_input_item.rbi | 9 ++- .../models/responses/response_usage.rbi | 8 ++- rbi/lib/openai/models/upload.rbi | 12 +++- .../models/vector_store_create_params.rbi | 9 ++- .../models/vector_store_search_params.rbi | 9 ++- .../file_batch_list_files_params.rbi | 10 ++- .../vector_stores/vector_store_file_batch.rbi | 9 ++- test/openai/base_model_test.rb | 2 + 27 files changed, 239 insertions(+), 59 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index 1352fdb0..c4eda68d 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -235,6 +235,10 @@ Style/RegexpLiteral: Style/SafeNavigation: Enabled: false +Style/SignalException: + Exclude: + - Rakefile + # We use these sparingly, where we anticipate future branches for the # inner conditional. Style/SoleNestedConditional: diff --git a/.yardopts b/.yardopts index c7c3301d..004c697b 100644 --- a/.yardopts +++ b/.yardopts @@ -1,3 +1,4 @@ --markup markdown +--markup-provider redcarpet --exclude /rbi --exclude /sig diff --git a/Gemfile b/Gemfile index b064fc5a..e5ec01e9 100644 --- a/Gemfile +++ b/Gemfile @@ -5,12 +5,6 @@ source "https://rubygems.org" gemspec group :development do - gem "async" - gem "minitest" - gem "minitest-focus" - gem "minitest-hooks" - gem "minitest-proveit" - gem "minitest-rg" gem "rake" gem "rbs" gem "rubocop" @@ -20,6 +14,19 @@ group :development do # TODO: using a fork for now, the prettier below has a bug gem "syntax_tree-rbs", github: "stainless-api/syntax_tree-rbs", branch: "main" gem "tapioca" +end + +group :development, :test do + gem "async" + gem "minitest" + gem "minitest-focus" + gem "minitest-hooks" + gem "minitest-proveit" + gem "minitest-rg" +end + +group :development, :docs do + gem "redcarpet" gem "webrick" gem "yard" end diff --git a/Gemfile.lock b/Gemfile.lock index 333b95fb..4d2964c7 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -29,8 +29,8 @@ GEM minitest (>= 5.1) securerandom (>= 0.3) tzinfo (~> 2.0, >= 2.0.5) - ast (2.4.2) - async (2.23.0) + ast (2.4.3) + async (2.23.1) console (~> 1.29) fiber-annotation io-event (~> 1.9) @@ -41,11 +41,11 @@ GEM bigdecimal (3.1.9) concurrent-ruby (1.3.5) connection_pool (2.5.0) - console (1.29.3) + console (1.30.2) fiber-annotation fiber-local (~> 1.1) json - csv (3.3.2) + csv (3.3.3) drb (2.2.1) erubi (1.13.1) ffi (1.17.1) @@ -56,16 +56,16 @@ GEM fileutils (1.7.3) i18n (1.14.7) concurrent-ruby (~> 1.0) - io-event (1.9.0) - json (2.10.1) + io-event (1.10.0) + json (2.10.2) language_server-protocol (3.17.0.4) lint_roller (1.1.0) listen (3.9.0) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) - logger (1.6.6) - metrics (0.12.1) - minitest (5.25.4) + logger (1.7.0) + metrics (0.12.2) + minitest (5.25.5) minitest-focus (1.4.0) minitest (>= 4, < 6) minitest-hooks (1.5.2) @@ -74,26 +74,29 @@ GEM minitest (> 5, < 7) minitest-rg (5.3.0) minitest (~> 5.0) + mutex_m (0.3.0) netrc (0.11.0) parallel (1.26.3) - parser (3.3.7.1) + parser (3.3.7.4) ast (~> 2.4.1) racc prettier_print (1.2.1) - prism (1.3.0) + prism (1.4.0) racc (1.8.1) rainbow (3.1.1) rake (13.2.1) rb-fsevent (0.11.2) rb-inotify (0.11.1) ffi (~> 1.0) - rbi (0.2.4) + rbi (0.3.1) prism (~> 1.0) + rbs (>= 3.4.4) sorbet-runtime (>= 0.5.9204) - rbs (3.8.1) + rbs (3.9.2) logger + redcarpet (3.6.1) regexp_parser (2.10.0) - rubocop (1.73.2) + rubocop (1.75.1) json (~> 2.3) language_server-protocol (~> 3.17.0.2) lint_roller (~> 1.1.0) @@ -101,41 +104,43 @@ GEM parser (>= 3.3.0.2) rainbow (>= 2.2.2, < 4.0) regexp_parser (>= 2.9.3, < 3.0) - rubocop-ast (>= 1.38.0, < 2.0) + rubocop-ast (>= 1.43.0, < 2.0) ruby-progressbar (~> 1.7) unicode-display_width (>= 2.4.0, < 4.0) - rubocop-ast (1.38.1) - parser (>= 3.3.1.0) + rubocop-ast (1.43.0) + parser (>= 3.3.7.2) + prism (~> 1.4) ruby-progressbar (1.13.0) securerandom (0.4.1) - sorbet (0.5.11888) - sorbet-static (= 0.5.11888) - sorbet-runtime (0.5.11888) - sorbet-static (0.5.11888-x86_64-linux) - sorbet-static-and-runtime (0.5.11888) - sorbet (= 0.5.11888) - sorbet-runtime (= 0.5.11888) - spoom (1.5.4) + sorbet (0.5.11966) + sorbet-static (= 0.5.11966) + sorbet-runtime (0.5.11966) + sorbet-static (0.5.11966-x86_64-linux) + sorbet-static-and-runtime (0.5.11966) + sorbet (= 0.5.11966) + sorbet-runtime (= 0.5.11966) + spoom (1.6.1) erubi (>= 1.10.0) prism (>= 0.28.0) rbi (>= 0.2.3) sorbet-static-and-runtime (>= 0.5.10187) thor (>= 0.19.2) - steep (1.9.4) + steep (1.10.0) activesupport (>= 5.1) concurrent-ruby (>= 1.1.10) csv (>= 3.0.9) fileutils (>= 1.1.0) json (>= 2.1.0) - language_server-protocol (>= 3.15, < 4.0) + language_server-protocol (>= 3.17.0.4, < 4.0) listen (~> 3.0) logger (>= 1.3.0) + mutex_m (>= 0.3.0) parser (>= 3.1) rainbow (>= 2.2.2, < 4.0) - rbs (~> 3.8) + rbs (~> 3.9) securerandom (>= 0.1) strscan (>= 1.0.0) - terminal-table (>= 2, < 4) + terminal-table (>= 2, < 5) uri (>= 0.12.0) strscan (3.1.2) syntax_tree (6.2.0) @@ -150,13 +155,15 @@ GEM spoom (>= 1.2.0) thor (>= 1.2.0) yard-sorbet - terminal-table (3.0.2) - unicode-display_width (>= 1.1.1, < 3) + terminal-table (4.0.0) + unicode-display_width (>= 1.1.1, < 4) thor (1.3.2) traces (0.15.2) tzinfo (2.0.6) concurrent-ruby (~> 1.0) - unicode-display_width (2.6.0) + unicode-display_width (3.1.4) + unicode-emoji (~> 4.0, >= 4.0.4) + unicode-emoji (4.0.4) uri (1.0.3) webrick (1.9.1) yard (0.9.37) @@ -177,6 +184,7 @@ DEPENDENCIES openai! rake rbs + redcarpet rubocop sorbet steep diff --git a/lib/openai/type/converter.rb b/lib/openai/type/converter.rb index ffb349aa..8a4739ea 100644 --- a/lib/openai/type/converter.rb +++ b/lib/openai/type/converter.rb @@ -2,6 +2,7 @@ module OpenAI module Type + # rubocop:disable Metrics/ModuleLength # @api private module Converter # rubocop:disable Lint/UnusedMethodArgument @@ -118,6 +119,8 @@ def coerce( value, state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} ) + # rubocop:disable Lint/SuppressedException + # rubocop:disable Metrics/BlockNesting strictness, exactness = state.fetch_values(:strictness, :exactness) case target @@ -194,6 +197,8 @@ def coerce( exactness[:no] += 1 value + # rubocop:enable Metrics/BlockNesting + # rubocop:enable Lint/SuppressedException end # @api private @@ -207,5 +212,6 @@ def dump(target, value) end end end + # rubocop:enable Metrics/ModuleLength end end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 8110c846..cd1f2343 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -59,7 +59,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) + def self.new( + input:, + model:, + voice:, + instructions: nil, + response_format: nil, + speed: nil, + request_options: {} + ) end sig do diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index 8bc3e5b2..96cdfd4e 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -62,7 +62,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) + def self.new( + id:, + avg_logprob:, + compression_ratio:, + end_:, + no_speech_prob:, + seek:, + start:, + temperature:, + text:, + tokens: + ) end sig do diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index 666708db..dd7bb7b4 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -57,7 +57,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation) + def self.new( + index:, + end_index: nil, + file_citation: nil, + start_index: nil, + text: nil, + type: :file_citation + ) end sig do diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 52605110..bdb4424d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -73,7 +73,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + def self.new( + thread_id:, + after: nil, + before: nil, + include: nil, + limit: nil, + order: nil, + request_options: {} + ) end sig do diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 84e2f441..65806b1f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -105,7 +105,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) + def self.new( + audio: nil, + content: nil, + function_call: nil, + name: nil, + refusal: nil, + tool_calls: nil, + role: :assistant + ) end sig do diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 3d52bbbb..190244ae 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -76,7 +76,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) + def self.new( + content:, + refusal:, + annotations: nil, + audio: nil, + function_call: nil, + tool_calls: nil, + role: :assistant + ) end sig do diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index 0f9d002f..e877536d 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -54,7 +54,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) + def self.new( + id:, + choices:, + created:, + model:, + system_fingerprint: nil, + usage: nil, + object: :text_completion + ) end sig do diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index bc7a3e80..8daa195d 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -117,7 +117,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) + def self.new( + accepted_prediction_tokens: nil, + audio_tokens: nil, + reasoning_tokens: nil, + rejected_prediction_tokens: nil + ) end sig do diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 2963e637..73e56f74 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -51,7 +51,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") + def self.new( + id:, + created_at:, + level:, + message:, + data: nil, + type: nil, + object: :"fine_tuning.job.event" + ) end sig do diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 364651ff..80245291 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -53,7 +53,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) + def self.new( + image:, + model: nil, + n: nil, + response_format: nil, + size: nil, + user: nil, + request_options: {} + ) end sig do diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index a62e15dd..62517b8d 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -57,7 +57,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) + def self.new( + vector_store_ids:, + filters: nil, + max_num_results: nil, + ranking_options: nil, + type: :file_search + ) end sig do diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 7bc859d9..e49fec75 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -33,7 +33,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") + def self.new( + code_interpreter_call:, + output_index:, + type: :"response.code_interpreter_call.in_progress" + ) end sig do diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index 9788b54f..a19cad6d 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -33,7 +33,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") + def self.new( + code_interpreter_call:, + output_index:, + type: :"response.code_interpreter_call.interpreting" + ) end sig do diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index a995f82d..a1ef5e14 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -76,7 +76,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + def self.new( + id:, + call_id:, + output:, + acknowledged_safety_checks: nil, + status: nil, + type: :computer_call_output + ) end sig do diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 51053e58..7f12e53f 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -222,7 +222,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + def self.new( + call_id:, + output:, + id: nil, + acknowledged_safety_checks: nil, + status: nil, + type: :computer_call_output + ) end sig do diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index b3158710..1ec163da 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -52,7 +52,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:) + def self.new( + input_tokens:, + input_tokens_details:, + output_tokens:, + output_tokens_details:, + total_tokens: + ) end sig do diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 08940cb9..129170d8 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -59,7 +59,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) + def self.new( + id:, + bytes:, + created_at:, + expires_at:, + filename:, + purpose:, + status:, + file: nil, + object: :upload + ) end sig do diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index e3f9c4b1..4c0acebf 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -82,7 +82,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) + def self.new( + chunking_strategy: nil, + expires_after: nil, + file_ids: nil, + metadata: nil, + name: nil, + request_options: {} + ) end sig do diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index c4ff79e9..c6d6cc94 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -60,7 +60,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) + def self.new( + query:, + filters: nil, + max_num_results: nil, + ranking_options: nil, + rewrite_query: nil, + request_options: {} + ) end sig do diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 531c3d57..4cba5c1a 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -65,7 +65,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) + def self.new( + vector_store_id:, + after: nil, + before: nil, + filter: nil, + limit: nil, + order: nil, + request_options: {} + ) end sig do diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 0d7f33b3..09d4693b 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -52,7 +52,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") + def self.new( + id:, + created_at:, + file_counts:, + status:, + vector_store_id:, + object: :"vector_store.files_batch" + ) end sig do diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index 90012091..7238cd06 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -154,9 +154,11 @@ module E4 def test_coerce cases = { + # rubocop:disable Style/BooleanSymbol [E1, true] => [{yes: 1}, true], [E1, false] => [{no: 1}, false], [E1, :true] => [{no: 1}, :true], + # rubocop:enable Style/BooleanSymbol [E2, 1] => [{yes: 1}, 1], [E2, 1.0] => [{yes: 1}, 1], From 3c6b4b9db780bb65061fe67f07e77d775ab146b8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 01:33:10 +0000 Subject: [PATCH 072/295] chore: fix misc rubocop errors (#74) --- .rubocop.yml | 7 +++-- Rakefile | 2 +- Steepfile | 2 +- lib/openai.rb | 31 ++++++++++---------- lib/openai/transport/pooled_net_requester.rb | 8 ++--- lib/openai/type/array_of.rb | 4 ++- lib/openai/type/base_model.rb | 8 +++-- lib/openai/type/base_page.rb | 2 +- lib/openai/type/base_stream.rb | 4 +-- lib/openai/type/hash_of.rb | 4 ++- lib/openai/type/union.rb | 2 ++ manifest.yaml | 1 + 12 files changed, 44 insertions(+), 31 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index c4eda68d..df60b4df 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -118,9 +118,9 @@ Metrics/ParameterLists: Metrics/PerceivedComplexity: Enabled: false +# Need to preserve block identifier for documentation. Naming/BlockForwarding: - Exclude: - - "**/*.rbi" + Enabled: false Naming/ClassAndModuleCamelCase: Exclude: @@ -153,6 +153,9 @@ Style/Alias: Style/AndOr: EnforcedStyle: always +Style/ArgumentsForwarding: + Enabled: false + Style/BisectedAttrAccessor: Exclude: - "**/*.rbi" diff --git a/Rakefile b/Rakefile index f8c899c1..92963d6a 100644 --- a/Rakefile +++ b/Rakefile @@ -14,7 +14,7 @@ xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] multitask(default: [:test]) multitask(:test) do - rb = + rb = FileList[ENV.fetch("TEST", "./test/**/*_test.rb")] .map { "require_relative(#{_1.dump});" } .join diff --git a/Steepfile b/Steepfile index 48667fe7..6e5d0ac5 100644 --- a/Steepfile +++ b/Steepfile @@ -9,7 +9,7 @@ target :lib do YAML.safe_load_file("./manifest.yaml", symbolize_names: true) => { dependencies: } # currently these libraries lack the `*.rbs` annotations required by `steep` - stdlibs = dependencies - %w[etc net/http rbconfig set stringio] + stdlibs = dependencies - %w[English etc net/http rbconfig set stringio] stdlibs.each { library(_1) } end diff --git a/lib/openai.rb b/lib/openai.rb index 9231c014..a2330683 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -1,21 +1,7 @@ # frozen_string_literal: true -# We already ship the preferred sorbet manifests in the package itself. -# `tapioca` currently does not offer us a way to opt out of unnecessary compilation. -if Object.const_defined?(:Tapioca) && caller.chain([$0]).chain(ARGV).grep(/tapioca/) - Warning.warn( - <<~WARN - \n - ⚠️ skipped loading of "openai" gem under `tapioca`. - - This message is normal and expected if you are running a `tapioca` command, and does not impact `.rbi` generation. - \n - WARN - ) - return -end - # Standard libraries. +require "English" require "cgi" require "date" require "erb" @@ -30,6 +16,21 @@ require "time" require "uri" +# We already ship the preferred sorbet manifests in the package itself. +# `tapioca` currently does not offer us a way to opt out of unnecessary compilation. +if Object.const_defined?(:Tapioca) && caller.chain([$PROGRAM_NAME]).chain(ARGV).grep(/tapioca/) + Warning.warn( + <<~WARN + \n + ⚠️ skipped loading of "openai" gem under `tapioca`. + + This message is normal and expected if you are running a `tapioca` command, and does not impact `.rbi` generation. + \n + WARN + ) + return +end + # Gems. require "connection_pool" diff --git a/lib/openai/transport/pooled_net_requester.rb b/lib/openai/transport/pooled_net_requester.rb index c3a4260a..57c8bf5e 100644 --- a/lib/openai/transport/pooled_net_requester.rb +++ b/lib/openai/transport/pooled_net_requester.rb @@ -54,7 +54,7 @@ def calibrate_socket_timeout(conn, deadline) # # @yieldparam [String] # @return [Net::HTTPGenericRequest] - def build_request(request, &) + def build_request(request, &blk) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) req = Net::HTTPGenericRequest.new( method.to_s.upcase, @@ -70,13 +70,13 @@ def build_request(request, &) nil in String req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &blk) in StringIO req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &blk) in IO | Enumerator req["transfer-encoding"] ||= "chunked" unless req["content-length"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &) + req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &blk) end req diff --git a/lib/openai/type/array_of.rb b/lib/openai/type/array_of.rb index 0e6f3227..35bb520d 100644 --- a/lib/openai/type/array_of.rb +++ b/lib/openai/type/array_of.rb @@ -31,7 +31,9 @@ def ===(other) = other.is_a?(Array) && other.all?(item_type) # @param other [Object] # # @return [Boolean] - def ==(other) = other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type + def ==(other) + other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type + end # @api private # diff --git a/lib/openai/type/base_model.rb b/lib/openai/type/base_model.rb index 6bbed764..57f628f0 100644 --- a/lib/openai/type/base_model.rb +++ b/lib/openai/type/base_model.rb @@ -94,7 +94,9 @@ def fields end rescue StandardError cls = self.class.name.split("::").last + # rubocop:disable Layout/LineLength message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." + # rubocop:enable Layout/LineLength raise OpenAI::ConversionError.new(message) end end @@ -205,14 +207,13 @@ def coerce(value, state:) instance = new data = instance.to_h + # rubocop:disable Metrics/BlockLength fields.each do |name, field| mode, required, target = field.fetch_values(:mode, :required, :type) api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) unless val.key?(api_name) - if const != OpenAI::Util::OMIT - exactness[:yes] += 1 - elsif required && mode != :dump + if required && mode != :dump && const == OpenAI::Util::OMIT exactness[nilable ? :maybe : :no] += 1 else exactness[:yes] += 1 @@ -238,6 +239,7 @@ def coerce(value, state:) end data.store(name, converted) end + # rubocop:enable Metrics/BlockLength keys.each { data.store(_1, val.fetch(_1)) } instance diff --git a/lib/openai/type/base_page.rb b/lib/openai/type/base_page.rb index 33648d3c..5f9ee0fa 100644 --- a/lib/openai/type/base_page.rb +++ b/lib/openai/type/base_page.rb @@ -36,7 +36,7 @@ def next_page = (raise NotImplementedError) # @param blk [Proc] # # @return [void] - def auto_paging_each(&) = (raise NotImplementedError) + def auto_paging_each(&blk) = (raise NotImplementedError) # @return [Enumerable] def to_enum = super(:auto_paging_each) diff --git a/lib/openai/type/base_stream.rb b/lib/openai/type/base_stream.rb index 4d62568e..98f2b576 100644 --- a/lib/openai/type/base_stream.rb +++ b/lib/openai/type/base_stream.rb @@ -31,11 +31,11 @@ def close = OpenAI::Util.close_fused!(@iterator) # @param blk [Proc] # # @return [void] - def each(&) + def each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") end - @iterator.each(&) + @iterator.each(&blk) end # @return [Enumerator] diff --git a/lib/openai/type/hash_of.rb b/lib/openai/type/hash_of.rb index cc066188..b6760886 100644 --- a/lib/openai/type/hash_of.rb +++ b/lib/openai/type/hash_of.rb @@ -46,7 +46,9 @@ def ===(other) # @param other [Object] # # @return [Boolean] - def ==(other) = other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type + def ==(other) + other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type + end # @api private # diff --git a/lib/openai/type/union.rb b/lib/openai/type/union.rb index c96ad89d..da0bde31 100644 --- a/lib/openai/type/union.rb +++ b/lib/openai/type/union.rb @@ -124,7 +124,9 @@ def ===(other) # # @return [Boolean] def ==(other) + # rubocop:disable Layout/LineLength other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants + # rubocop:enable Layout/LineLength end # @api private diff --git a/manifest.yaml b/manifest.yaml index fa9c3e5e..556686f5 100644 --- a/manifest.yaml +++ b/manifest.yaml @@ -1,4 +1,5 @@ dependencies: + - English - cgi - date - erb From d4881ec00f9d1203ec263b4dcf09ca8f01cca0e5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 08:19:06 +0000 Subject: [PATCH 073/295] chore: use fully qualified name in sorbet README example (#75) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 63cf0c1f..a2b1e0e8 100644 --- a/README.md +++ b/README.md @@ -171,7 +171,7 @@ Due to limitations with the Sorbet type system, where a method otherwise can tak Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. ```ruby -model = CompletionCreateParams.new( +model = OpenAI::Models::Chat::CompletionCreateParams.new( messages: [{ role: "user", content: "Say this is a test" From ffc7ad0d48ddc5712ad14c8c3e159ecd2f99b246 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 08:25:27 +0000 Subject: [PATCH 074/295] chore: support binary responses (#76) --- lib/openai/resources/audio/speech.rb | 4 ++-- lib/openai/resources/files.rb | 4 ++-- rbi/lib/openai/resources/audio/speech.rbi | 2 +- rbi/lib/openai/resources/files.rbi | 2 +- sig/openai/resources/audio/speech.rbs | 2 +- sig/openai/resources/files.rbs | 2 +- test/openai/resources/audio/speech_test.rb | 2 +- test/openai/resources/files_test.rb | 2 +- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index cbffafe3..0b5bb925 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -29,7 +29,7 @@ class Speech # # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # - # @return [Object] + # @return [StringIO] def create(params) parsed, options = OpenAI::Models::Audio::SpeechCreateParams.dump_request(params) @client.request( @@ -37,7 +37,7 @@ def create(params) path: "audio/speech", headers: {"accept" => "application/octet-stream"}, body: parsed, - model: OpenAI::Unknown, + model: StringIO, options: options ) end diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 191387e3..63a4f893 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -125,13 +125,13 @@ def delete(file_id, params = {}) # # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # - # @return [Object] + # @return [StringIO] def content(file_id, params = {}) @client.request( method: :get, path: ["files/%0s/content", file_id], headers: {"accept" => "application/binary"}, - model: OpenAI::Unknown, + model: StringIO, options: params[:request_options] ) end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index 2c23fe1f..d17427d8 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -15,7 +15,7 @@ module OpenAI speed: Float, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) ) - .returns(T.anything) + .returns(StringIO) end def create( # The text to generate audio for. The maximum length is 4096 characters. diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index a8fa8604..9ddc3a8e 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -124,7 +124,7 @@ module OpenAI ) ) ) - .returns(T.anything) + .returns(StringIO) end def content( # The ID of the file to use for this request. diff --git a/sig/openai/resources/audio/speech.rbs b/sig/openai/resources/audio/speech.rbs index 7e7d117c..ad93110a 100644 --- a/sig/openai/resources/audio/speech.rbs +++ b/sig/openai/resources/audio/speech.rbs @@ -10,7 +10,7 @@ module OpenAI ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, ?speed: Float, ?request_options: OpenAI::request_opts - ) -> top + ) -> StringIO def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index f3913739..62988179 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -28,7 +28,7 @@ module OpenAI def content: ( String file_id, ?request_options: OpenAI::request_opts - ) -> top + ) -> StringIO def initialize: (client: OpenAI::Client) -> void end diff --git a/test/openai/resources/audio/speech_test.rb b/test/openai/resources/audio/speech_test.rb index 9718aa1a..e1588426 100644 --- a/test/openai/resources/audio/speech_test.rb +++ b/test/openai/resources/audio/speech_test.rb @@ -7,7 +7,7 @@ def test_create_required_params response = @openai.audio.speech.create(input: "input", model: :"tts-1", voice: :alloy) assert_pattern do - response => OpenAI::Unknown + response => StringIO end end end diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index 6384982e..58bcbf2b 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -98,7 +98,7 @@ def test_content response = @openai.files.content("file_id") assert_pattern do - response => OpenAI::Unknown + response => StringIO end end end From 5b635a46aec44be7e446c640f87827f343c5d13b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 08:30:25 +0000 Subject: [PATCH 075/295] fix: path interpolation template strings (#77) --- lib/openai/resources/batches.rb | 4 ++-- lib/openai/resources/beta/assistants.rb | 6 +++--- lib/openai/resources/beta/threads.rb | 6 +++--- lib/openai/resources/beta/threads/messages.rb | 10 +++++----- lib/openai/resources/beta/threads/runs.rb | 16 ++++++++-------- lib/openai/resources/beta/threads/runs/steps.rb | 4 ++-- lib/openai/resources/chat/completions.rb | 6 +++--- .../resources/chat/completions/messages.rb | 2 +- lib/openai/resources/files.rb | 6 +++--- lib/openai/resources/fine_tuning/jobs.rb | 6 +++--- .../resources/fine_tuning/jobs/checkpoints.rb | 2 +- lib/openai/resources/models.rb | 4 ++-- lib/openai/resources/responses.rb | 4 ++-- lib/openai/resources/responses/input_items.rb | 2 +- lib/openai/resources/uploads.rb | 4 ++-- lib/openai/resources/uploads/parts.rb | 2 +- lib/openai/resources/vector_stores.rb | 8 ++++---- .../resources/vector_stores/file_batches.rb | 8 ++++---- lib/openai/resources/vector_stores/files.rb | 12 ++++++------ lib/openai/util.rb | 4 ++-- 20 files changed, 58 insertions(+), 58 deletions(-) diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 62883c2e..143a05ee 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -58,7 +58,7 @@ def create(params) def retrieve(batch_id, params = {}) @client.request( method: :get, - path: ["batches/%0s", batch_id], + path: ["batches/%1$s", batch_id], model: OpenAI::Models::Batch, options: params[:request_options] ) @@ -105,7 +105,7 @@ def list(params = {}) def cancel(batch_id, params = {}) @client.request( method: :post, - path: ["batches/%0s/cancel", batch_id], + path: ["batches/%1$s/cancel", batch_id], model: OpenAI::Models::Batch, options: params[:request_options] ) diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 8fbb01b9..1557a813 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -101,7 +101,7 @@ def create(params) def retrieve(assistant_id, params = {}) @client.request( method: :get, - path: ["assistants/%0s", assistant_id], + path: ["assistants/%1$s", assistant_id], model: OpenAI::Models::Beta::Assistant, options: params[:request_options] ) @@ -187,7 +187,7 @@ def update(assistant_id, params = {}) parsed, options = OpenAI::Models::Beta::AssistantUpdateParams.dump_request(params) @client.request( method: :post, - path: ["assistants/%0s", assistant_id], + path: ["assistants/%1$s", assistant_id], body: parsed, model: OpenAI::Models::Beta::Assistant, options: options @@ -241,7 +241,7 @@ def list(params = {}) def delete(assistant_id, params = {}) @client.request( method: :delete, - path: ["assistants/%0s", assistant_id], + path: ["assistants/%1$s", assistant_id], model: OpenAI::Models::Beta::AssistantDeleted, options: params[:request_options] ) diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 27b67c15..767bd13c 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -55,7 +55,7 @@ def create(params = {}) def retrieve(thread_id, params = {}) @client.request( method: :get, - path: ["threads/%0s", thread_id], + path: ["threads/%1$s", thread_id], model: OpenAI::Models::Beta::Thread, options: params[:request_options] ) @@ -86,7 +86,7 @@ def update(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::ThreadUpdateParams.dump_request(params) @client.request( method: :post, - path: ["threads/%0s", thread_id], + path: ["threads/%1$s", thread_id], body: parsed, model: OpenAI::Models::Beta::Thread, options: options @@ -105,7 +105,7 @@ def update(thread_id, params = {}) def delete(thread_id, params = {}) @client.request( method: :delete, - path: ["threads/%0s", thread_id], + path: ["threads/%1$s", thread_id], model: OpenAI::Models::Beta::ThreadDeleted, options: params[:request_options] ) diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 5b87f5f5..fdcef361 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -37,7 +37,7 @@ def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageCreateParams.dump_request(params) @client.request( method: :post, - path: ["threads/%0s/messages", thread_id], + path: ["threads/%1$s/messages", thread_id], body: parsed, model: OpenAI::Models::Beta::Threads::Message, options: options @@ -64,7 +64,7 @@ def retrieve(message_id, params) end @client.request( method: :get, - path: ["threads/%0s/messages/%1s", thread_id, message_id], + path: ["threads/%1$s/messages/%2$s", thread_id, message_id], model: OpenAI::Models::Beta::Threads::Message, options: options ) @@ -96,7 +96,7 @@ def update(message_id, params) end @client.request( method: :post, - path: ["threads/%0s/messages/%1s", thread_id, message_id], + path: ["threads/%1$s/messages/%2$s", thread_id, message_id], body: parsed, model: OpenAI::Models::Beta::Threads::Message, options: options @@ -135,7 +135,7 @@ def list(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::Threads::MessageListParams.dump_request(params) @client.request( method: :get, - path: ["threads/%0s/messages", thread_id], + path: ["threads/%1$s/messages", thread_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::Beta::Threads::Message, @@ -162,7 +162,7 @@ def delete(message_id, params) end @client.request( method: :delete, - path: ["threads/%0s/messages/%1s", thread_id, message_id], + path: ["threads/%1$s/messages/%2$s", thread_id, message_id], model: OpenAI::Models::Beta::Threads::MessageDeleted, options: options ) diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index fa61373d..d67d31ee 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -132,7 +132,7 @@ def create(thread_id, params) query_params = [:include] @client.request( method: :post, - path: ["threads/%0s/runs", thread_id], + path: ["threads/%1$s/runs", thread_id], query: parsed.slice(*query_params), body: parsed.except(*query_params), model: OpenAI::Models::Beta::Threads::Run, @@ -265,7 +265,7 @@ def create_streaming(thread_id, params) query_params = [:include] @client.request( method: :post, - path: ["threads/%0s/runs", thread_id], + path: ["threads/%1$s/runs", thread_id], query: parsed.slice(*query_params), headers: {"accept" => "text/event-stream"}, body: parsed.except(*query_params), @@ -295,7 +295,7 @@ def retrieve(run_id, params) end @client.request( method: :get, - path: ["threads/%0s/runs/%1s", thread_id, run_id], + path: ["threads/%1$s/runs/%2$s", thread_id, run_id], model: OpenAI::Models::Beta::Threads::Run, options: options ) @@ -328,7 +328,7 @@ def update(run_id, params) end @client.request( method: :post, - path: ["threads/%0s/runs/%1s", thread_id, run_id], + path: ["threads/%1$s/runs/%2$s", thread_id, run_id], body: parsed, model: OpenAI::Models::Beta::Threads::Run, options: options @@ -364,7 +364,7 @@ def list(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::Threads::RunListParams.dump_request(params) @client.request( method: :get, - path: ["threads/%0s/runs", thread_id], + path: ["threads/%1$s/runs", thread_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::Beta::Threads::Run, @@ -391,7 +391,7 @@ def cancel(run_id, params) end @client.request( method: :post, - path: ["threads/%0s/runs/%1s/cancel", thread_id, run_id], + path: ["threads/%1$s/runs/%2$s/cancel", thread_id, run_id], model: OpenAI::Models::Beta::Threads::Run, options: options ) @@ -427,7 +427,7 @@ def submit_tool_outputs(run_id, params) end @client.request( method: :post, - path: ["threads/%0s/runs/%1s/submit_tool_outputs", thread_id, run_id], + path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id], body: parsed, model: OpenAI::Models::Beta::Threads::Run, options: options @@ -465,7 +465,7 @@ def submit_tool_outputs_streaming(run_id, params) end @client.request( method: :post, - path: ["threads/%0s/runs/%1s/submit_tool_outputs", thread_id, run_id], + path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id], headers: {"accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Stream, diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 0dc70121..9d087222 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -40,7 +40,7 @@ def retrieve(step_id, params) end @client.request( method: :get, - path: ["threads/%0s/runs/%1s/steps/%2s", thread_id, run_id, step_id], + path: ["threads/%1$s/runs/%2$s/steps/%3$s", thread_id, run_id, step_id], query: parsed, model: OpenAI::Models::Beta::Threads::Runs::RunStep, options: options @@ -91,7 +91,7 @@ def list(run_id, params) end @client.request( method: :get, - path: ["threads/%0s/runs/%1s/steps", thread_id, run_id], + path: ["threads/%1$s/runs/%2$s/steps", thread_id, run_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::Beta::Threads::Runs::RunStep, diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 52e05866..86c7172c 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -465,7 +465,7 @@ def create_streaming(params) def retrieve(completion_id, params = {}) @client.request( method: :get, - path: ["chat/completions/%0s", completion_id], + path: ["chat/completions/%1$s", completion_id], model: OpenAI::Models::Chat::ChatCompletion, options: params[:request_options] ) @@ -493,7 +493,7 @@ def update(completion_id, params) parsed, options = OpenAI::Models::Chat::CompletionUpdateParams.dump_request(params) @client.request( method: :post, - path: ["chat/completions/%0s", completion_id], + path: ["chat/completions/%1$s", completion_id], body: parsed, model: OpenAI::Models::Chat::ChatCompletion, options: options @@ -546,7 +546,7 @@ def list(params = {}) def delete(completion_id, params = {}) @client.request( method: :delete, - path: ["chat/completions/%0s", completion_id], + path: ["chat/completions/%1$s", completion_id], model: OpenAI::Models::Chat::ChatCompletionDeleted, options: params[:request_options] ) diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index decc122d..e40262ce 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -26,7 +26,7 @@ def list(completion_id, params = {}) parsed, options = OpenAI::Models::Chat::Completions::MessageListParams.dump_request(params) @client.request( method: :get, - path: ["chat/completions/%0s/messages", completion_id], + path: ["chat/completions/%1$s/messages", completion_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::Chat::ChatCompletionStoreMessage, diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 63a4f893..d0253a42 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -61,7 +61,7 @@ def create(params) def retrieve(file_id, params = {}) @client.request( method: :get, - path: ["files/%0s", file_id], + path: ["files/%1$s", file_id], model: OpenAI::Models::FileObject, options: params[:request_options] ) @@ -111,7 +111,7 @@ def list(params = {}) def delete(file_id, params = {}) @client.request( method: :delete, - path: ["files/%0s", file_id], + path: ["files/%1$s", file_id], model: OpenAI::Models::FileDeleted, options: params[:request_options] ) @@ -129,7 +129,7 @@ def delete(file_id, params = {}) def content(file_id, params = {}) @client.request( method: :get, - path: ["files/%0s/content", file_id], + path: ["files/%1$s/content", file_id], headers: {"accept" => "application/binary"}, model: StringIO, options: params[:request_options] diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 9d00777f..baf7a2cc 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -103,7 +103,7 @@ def create(params) def retrieve(fine_tuning_job_id, params = {}) @client.request( method: :get, - path: ["fine_tuning/jobs/%0s", fine_tuning_job_id], + path: ["fine_tuning/jobs/%1$s", fine_tuning_job_id], model: OpenAI::Models::FineTuning::FineTuningJob, options: params[:request_options] ) @@ -147,7 +147,7 @@ def list(params = {}) def cancel(fine_tuning_job_id, params = {}) @client.request( method: :post, - path: ["fine_tuning/jobs/%0s/cancel", fine_tuning_job_id], + path: ["fine_tuning/jobs/%1$s/cancel", fine_tuning_job_id], model: OpenAI::Models::FineTuning::FineTuningJob, options: params[:request_options] ) @@ -170,7 +170,7 @@ def list_events(fine_tuning_job_id, params = {}) parsed, options = OpenAI::Models::FineTuning::JobListEventsParams.dump_request(params) @client.request( method: :get, - path: ["fine_tuning/jobs/%0s/events", fine_tuning_job_id], + path: ["fine_tuning/jobs/%1$s/events", fine_tuning_job_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::FineTuning::FineTuningJobEvent, diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index cb4b3c18..ced20ee9 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -22,7 +22,7 @@ def list(fine_tuning_job_id, params = {}) parsed, options = OpenAI::Models::FineTuning::Jobs::CheckpointListParams.dump_request(params) @client.request( method: :get, - path: ["fine_tuning/jobs/%0s/checkpoints", fine_tuning_job_id], + path: ["fine_tuning/jobs/%1$s/checkpoints", fine_tuning_job_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint, diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index cff4bf06..937381d2 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -16,7 +16,7 @@ class Models def retrieve(model, params = {}) @client.request( method: :get, - path: ["models/%0s", model], + path: ["models/%1$s", model], model: OpenAI::Models::Model, options: params[:request_options] ) @@ -53,7 +53,7 @@ def list(params = {}) def delete(model, params = {}) @client.request( method: :delete, - path: ["models/%0s", model], + path: ["models/%1$s", model], model: OpenAI::Models::ModelDeleted, options: params[:request_options] ) diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index e0a7bc26..99f01ccb 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -297,7 +297,7 @@ def retrieve(response_id, params = {}) parsed, options = OpenAI::Models::Responses::ResponseRetrieveParams.dump_request(params) @client.request( method: :get, - path: ["responses/%0s", response_id], + path: ["responses/%1$s", response_id], query: parsed, model: OpenAI::Models::Responses::Response, options: options @@ -316,7 +316,7 @@ def retrieve(response_id, params = {}) def delete(response_id, params = {}) @client.request( method: :delete, - path: ["responses/%0s", response_id], + path: ["responses/%1$s", response_id], model: NilClass, options: params[:request_options] ) diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index ece1f2a1..30f343fd 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -32,7 +32,7 @@ def list(response_id, params = {}) parsed, options = OpenAI::Models::Responses::InputItemListParams.dump_request(params) @client.request( method: :get, - path: ["responses/%0s/input_items", response_id], + path: ["responses/%1$s/input_items", response_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::Responses::ResponseItem, diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index f72e4255..f50e95ed 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -68,7 +68,7 @@ def create(params) def cancel(upload_id, params = {}) @client.request( method: :post, - path: ["uploads/%0s/cancel", upload_id], + path: ["uploads/%1$s/cancel", upload_id], model: OpenAI::Models::Upload, options: params[:request_options] ) @@ -104,7 +104,7 @@ def complete(upload_id, params) parsed, options = OpenAI::Models::UploadCompleteParams.dump_request(params) @client.request( method: :post, - path: ["uploads/%0s/complete", upload_id], + path: ["uploads/%1$s/complete", upload_id], body: parsed, model: OpenAI::Models::Upload, options: options diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index b90eef15..ea9e8a5f 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -29,7 +29,7 @@ def create(upload_id, params) parsed, options = OpenAI::Models::Uploads::PartCreateParams.dump_request(params) @client.request( method: :post, - path: ["uploads/%0s/parts", upload_id], + path: ["uploads/%1$s/parts", upload_id], headers: {"content-type" => "multipart/form-data"}, body: parsed, model: OpenAI::Models::Uploads::UploadPart, diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index e60a1592..5d0fe33d 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -57,7 +57,7 @@ def create(params = {}) def retrieve(vector_store_id, params = {}) @client.request( method: :get, - path: ["vector_stores/%0s", vector_store_id], + path: ["vector_stores/%1$s", vector_store_id], model: OpenAI::Models::VectorStore, options: params[:request_options] ) @@ -87,7 +87,7 @@ def update(vector_store_id, params = {}) parsed, options = OpenAI::Models::VectorStoreUpdateParams.dump_request(params) @client.request( method: :post, - path: ["vector_stores/%0s", vector_store_id], + path: ["vector_stores/%1$s", vector_store_id], body: parsed, model: OpenAI::Models::VectorStore, options: options @@ -141,7 +141,7 @@ def list(params = {}) def delete(vector_store_id, params = {}) @client.request( method: :delete, - path: ["vector_stores/%0s", vector_store_id], + path: ["vector_stores/%1$s", vector_store_id], model: OpenAI::Models::VectorStoreDeleted, options: params[:request_options] ) @@ -172,7 +172,7 @@ def search(vector_store_id, params) parsed, options = OpenAI::Models::VectorStoreSearchParams.dump_request(params) @client.request( method: :post, - path: ["vector_stores/%0s/search", vector_store_id], + path: ["vector_stores/%1$s/search", vector_store_id], body: parsed, page: OpenAI::Page, model: OpenAI::Models::VectorStoreSearchResponse, diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index b3523d38..1a9d1dae 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -30,7 +30,7 @@ def create(vector_store_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchCreateParams.dump_request(params) @client.request( method: :post, - path: ["vector_stores/%0s/file_batches", vector_store_id], + path: ["vector_stores/%1$s/file_batches", vector_store_id], body: parsed, model: OpenAI::Models::VectorStores::VectorStoreFileBatch, options: options @@ -56,7 +56,7 @@ def retrieve(batch_id, params) end @client.request( method: :get, - path: ["vector_stores/%0s/file_batches/%1s", vector_store_id, batch_id], + path: ["vector_stores/%1$s/file_batches/%2$s", vector_store_id, batch_id], model: OpenAI::Models::VectorStores::VectorStoreFileBatch, options: options ) @@ -82,7 +82,7 @@ def cancel(batch_id, params) end @client.request( method: :post, - path: ["vector_stores/%0s/file_batches/%1s/cancel", vector_store_id, batch_id], + path: ["vector_stores/%1$s/file_batches/%2$s/cancel", vector_store_id, batch_id], model: OpenAI::Models::VectorStores::VectorStoreFileBatch, options: options ) @@ -126,7 +126,7 @@ def list_files(batch_id, params) end @client.request( method: :get, - path: ["vector_stores/%0s/file_batches/%1s/files", vector_store_id, batch_id], + path: ["vector_stores/%1$s/file_batches/%2$s/files", vector_store_id, batch_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::VectorStores::VectorStoreFile, diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 54b806c0..57e9af60 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -32,7 +32,7 @@ def create(vector_store_id, params) parsed, options = OpenAI::Models::VectorStores::FileCreateParams.dump_request(params) @client.request( method: :post, - path: ["vector_stores/%0s/files", vector_store_id], + path: ["vector_stores/%1$s/files", vector_store_id], body: parsed, model: OpenAI::Models::VectorStores::VectorStoreFile, options: options @@ -58,7 +58,7 @@ def retrieve(file_id, params) end @client.request( method: :get, - path: ["vector_stores/%0s/files/%1s", vector_store_id, file_id], + path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], model: OpenAI::Models::VectorStores::VectorStoreFile, options: options ) @@ -89,7 +89,7 @@ def update(file_id, params) end @client.request( method: :post, - path: ["vector_stores/%0s/files/%1s", vector_store_id, file_id], + path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], body: parsed, model: OpenAI::Models::VectorStores::VectorStoreFile, options: options @@ -127,7 +127,7 @@ def list(vector_store_id, params = {}) parsed, options = OpenAI::Models::VectorStores::FileListParams.dump_request(params) @client.request( method: :get, - path: ["vector_stores/%0s/files", vector_store_id], + path: ["vector_stores/%1$s/files", vector_store_id], query: parsed, page: OpenAI::CursorPage, model: OpenAI::Models::VectorStores::VectorStoreFile, @@ -157,7 +157,7 @@ def delete(file_id, params) end @client.request( method: :delete, - path: ["vector_stores/%0s/files/%1s", vector_store_id, file_id], + path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], model: OpenAI::Models::VectorStores::VectorStoreFileDeleted, options: options ) @@ -182,7 +182,7 @@ def content(file_id, params) end @client.request( method: :get, - path: ["vector_stores/%0s/files/%1s/content", vector_store_id, file_id], + path: ["vector_stores/%1$s/files/%2$s/content", vector_store_id, file_id], page: OpenAI::Page, model: OpenAI::Models::VectorStores::FileContentResponse, options: options diff --git a/lib/openai/util.rb b/lib/openai/util.rb index f19004c2..8a4b693f 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -253,9 +253,9 @@ def interpolate_path(path) path in [] "" - in [String, *interpolations] + in [String => p, *interpolations] encoded = interpolations.map { ERB::Util.url_encode(_1) } - path.first % encoded + format(p, *encoded) end end end From dc95c08a77fe912e8a5352de1f69857ee3e79056 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 14:27:40 +0000 Subject: [PATCH 076/295] chore(internal): version bump (#78) --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- rbi/lib/openai/version.rbi | 2 +- sig/openai/version.rbs | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index f14b480a..aaf968a1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.2" + ".": "0.1.0-alpha.3" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 4d2964c7..0bcf3d3b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.alpha.2) + openai (0.1.0.pre.alpha.3) connection_pool GEM diff --git a/README.md b/README.md index a2b1e0e8..98c8fe21 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ The underlying REST API documentation can be found on [platform.openai.com](http To use this gem, install via Bundler by adding the following to your application's `Gemfile`: ```ruby -gem "openai", "~> 0.1.0.pre.alpha.2" +gem "openai", "~> 0.1.0.pre.alpha.3" ``` To fetch an initial copy of the gem: diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 8175d17f..148d7830 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0-alpha.2" + VERSION = "0.1.0-alpha.3" end diff --git a/rbi/lib/openai/version.rbi b/rbi/lib/openai/version.rbi index 1f42bc59..5886c95b 100644 --- a/rbi/lib/openai/version.rbi +++ b/rbi/lib/openai/version.rbi @@ -1,5 +1,5 @@ # typed: strong module OpenAI - VERSION = "0.1.0-alpha.2" + VERSION = "0.1.0-alpha.3" end diff --git a/sig/openai/version.rbs b/sig/openai/version.rbs index b4a8f46d..d670f686 100644 --- a/sig/openai/version.rbs +++ b/sig/openai/version.rbs @@ -1,3 +1,3 @@ module OpenAI - VERSION: "0.1.0-alpha.2" + VERSION: "0.1.0-alpha.3" end From c7369324759d26504f5c65a6ecefb7a0d30a67cf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 15:30:54 +0000 Subject: [PATCH 077/295] feat: add reference links in yard (#79) --- .../models/audio/speech_create_params.rb | 1 + .../audio/transcription_create_params.rb | 3 ++ .../models/audio/translation_create_params.rb | 1 + lib/openai/models/batch.rb | 3 ++ lib/openai/models/batch_cancel_params.rb | 1 + lib/openai/models/batch_create_params.rb | 1 + lib/openai/models/batch_list_params.rb | 1 + lib/openai/models/batch_retrieve_params.rb | 1 + lib/openai/models/beta/assistant.rb | 3 ++ .../models/beta/assistant_create_params.rb | 6 ++++ .../models/beta/assistant_delete_params.rb | 1 + .../models/beta/assistant_list_params.rb | 1 + .../models/beta/assistant_retrieve_params.rb | 1 + .../models/beta/assistant_tool_choice.rb | 2 ++ .../models/beta/assistant_update_params.rb | 3 ++ lib/openai/models/beta/file_search_tool.rb | 4 +++ lib/openai/models/beta/thread.rb | 3 ++ .../beta/thread_create_and_run_params.rb | 17 ++++++++++ .../models/beta/thread_create_params.rb | 10 ++++++ .../models/beta/thread_delete_params.rb | 1 + .../models/beta/thread_retrieve_params.rb | 1 + .../models/beta/thread_update_params.rb | 3 ++ .../beta/threads/file_citation_annotation.rb | 1 + .../threads/file_citation_delta_annotation.rb | 1 + .../beta/threads/file_path_annotation.rb | 1 + .../threads/file_path_delta_annotation.rb | 1 + lib/openai/models/beta/threads/image_file.rb | 2 ++ .../models/beta/threads/image_file_delta.rb | 2 ++ lib/openai/models/beta/threads/image_url.rb | 2 ++ .../models/beta/threads/image_url_delta.rb | 2 ++ lib/openai/models/beta/threads/message.rb | 7 +++++ .../beta/threads/message_create_params.rb | 1 + .../beta/threads/message_delete_params.rb | 1 + .../models/beta/threads/message_delta.rb | 2 ++ .../beta/threads/message_list_params.rb | 1 + .../beta/threads/message_retrieve_params.rb | 1 + .../beta/threads/message_update_params.rb | 1 + .../required_action_function_tool_call.rb | 1 + lib/openai/models/beta/threads/run.rb | 12 +++++++ .../models/beta/threads/run_cancel_params.rb | 1 + .../models/beta/threads/run_create_params.rb | 9 ++++++ .../models/beta/threads/run_list_params.rb | 1 + .../beta/threads/run_retrieve_params.rb | 1 + .../threads/run_submit_tool_outputs_params.rb | 3 ++ .../models/beta/threads/run_update_params.rb | 1 + .../runs/code_interpreter_output_image.rb | 1 + .../runs/code_interpreter_tool_call.rb | 2 ++ .../runs/code_interpreter_tool_call_delta.rb | 1 + .../threads/runs/file_search_tool_call.rb | 6 ++++ .../beta/threads/runs/function_tool_call.rb | 1 + .../threads/runs/function_tool_call_delta.rb | 1 + .../runs/message_creation_step_details.rb | 1 + .../models/beta/threads/runs/run_step.rb | 10 ++++++ .../beta/threads/runs/run_step_delta.rb | 2 ++ .../runs/run_step_delta_message_delta.rb | 1 + .../beta/threads/runs/step_list_params.rb | 1 + .../beta/threads/runs/step_retrieve_params.rb | 1 + lib/openai/models/chat/chat_completion.rb | 5 +++ ...chat_completion_assistant_message_param.rb | 5 +++ .../chat/chat_completion_audio_param.rb | 4 +++ .../models/chat/chat_completion_chunk.rb | 13 ++++++++ .../chat/chat_completion_content_part.rb | 1 + .../chat_completion_content_part_image.rb | 3 ++ ...hat_completion_content_part_input_audio.rb | 3 ++ ...chat_completion_developer_message_param.rb | 2 ++ .../models/chat/chat_completion_message.rb | 3 ++ .../chat/chat_completion_message_tool_call.rb | 1 + .../chat/chat_completion_named_tool_choice.rb | 1 + .../chat_completion_prediction_content.rb | 2 ++ .../chat_completion_system_message_param.rb | 2 ++ .../chat_completion_tool_message_param.rb | 2 ++ .../chat_completion_user_message_param.rb | 2 ++ .../models/chat/completion_create_params.rb | 7 +++++ .../models/chat/completion_delete_params.rb | 1 + .../models/chat/completion_list_params.rb | 1 + .../models/chat/completion_retrieve_params.rb | 1 + .../models/chat/completion_update_params.rb | 1 + .../chat/completions/message_list_params.rb | 1 + lib/openai/models/comparison_filter.rb | 4 +++ lib/openai/models/completion_choice.rb | 3 ++ lib/openai/models/completion_create_params.rb | 3 ++ lib/openai/models/completion_usage.rb | 2 ++ lib/openai/models/compound_filter.rb | 2 ++ .../models/create_embedding_response.rb | 1 + lib/openai/models/embedding_create_params.rb | 1 + lib/openai/models/file_content_params.rb | 1 + lib/openai/models/file_create_params.rb | 1 + lib/openai/models/file_delete_params.rb | 1 + lib/openai/models/file_list_params.rb | 1 + lib/openai/models/file_object.rb | 4 +++ lib/openai/models/file_retrieve_params.rb | 1 + .../models/fine_tuning/fine_tuning_job.rb | 31 +++++++++++++++++++ .../fine_tuning/fine_tuning_job_event.rb | 4 +++ .../models/fine_tuning/job_cancel_params.rb | 1 + .../models/fine_tuning/job_create_params.rb | 28 +++++++++++++++++ .../fine_tuning/job_list_events_params.rb | 1 + .../models/fine_tuning/job_list_params.rb | 1 + .../models/fine_tuning/job_retrieve_params.rb | 1 + .../jobs/checkpoint_list_params.rb | 1 + .../jobs/fine_tuning_job_checkpoint.rb | 1 + .../models/image_create_variation_params.rb | 1 + lib/openai/models/image_edit_params.rb | 1 + lib/openai/models/image_generate_params.rb | 1 + lib/openai/models/model_delete_params.rb | 1 + lib/openai/models/model_list_params.rb | 1 + lib/openai/models/model_retrieve_params.rb | 1 + lib/openai/models/moderation.rb | 3 ++ lib/openai/models/moderation_create_params.rb | 1 + .../models/moderation_image_url_input.rb | 1 + lib/openai/models/reasoning.rb | 2 ++ .../models/response_format_json_schema.rb | 1 + lib/openai/models/responses/computer_tool.rb | 2 ++ .../models/responses/easy_input_message.rb | 6 ++++ .../models/responses/file_search_tool.rb | 5 +++ .../responses/input_item_list_params.rb | 1 + lib/openai/models/responses/response.rb | 7 +++++ .../response_code_interpreter_tool_call.rb | 2 ++ .../responses/response_computer_tool_call.rb | 8 +++++ ...response_computer_tool_call_output_item.rb | 2 ++ .../response_content_part_added_event.rb | 2 ++ .../response_content_part_done_event.rb | 2 ++ .../responses/response_create_params.rb | 3 ++ .../responses/response_delete_params.rb | 1 + lib/openai/models/responses/response_error.rb | 2 ++ .../response_file_search_tool_call.rb | 2 ++ .../responses/response_function_tool_call.rb | 2 ++ ...response_function_tool_call_output_item.rb | 2 ++ .../responses/response_function_web_search.rb | 2 ++ .../models/responses/response_input_audio.rb | 2 ++ .../models/responses/response_input_image.rb | 2 ++ .../models/responses/response_input_item.rb | 10 ++++++ .../responses/response_input_message_item.rb | 6 ++++ .../responses/response_output_message.rb | 2 ++ .../responses/response_reasoning_item.rb | 2 ++ .../responses/response_retrieve_params.rb | 1 + .../response_text_annotation_delta_event.rb | 2 ++ lib/openai/models/responses/response_usage.rb | 2 ++ .../models/responses/tool_choice_types.rb | 2 ++ .../models/responses/web_search_tool.rb | 5 +++ lib/openai/models/upload.rb | 2 ++ lib/openai/models/upload_cancel_params.rb | 1 + lib/openai/models/upload_complete_params.rb | 1 + lib/openai/models/upload_create_params.rb | 1 + .../models/uploads/part_create_params.rb | 1 + lib/openai/models/vector_store.rb | 4 +++ .../models/vector_store_create_params.rb | 1 + .../models/vector_store_delete_params.rb | 1 + lib/openai/models/vector_store_list_params.rb | 1 + .../models/vector_store_retrieve_params.rb | 1 + .../models/vector_store_search_params.rb | 2 ++ .../models/vector_store_search_response.rb | 2 ++ .../models/vector_store_update_params.rb | 1 + .../vector_stores/file_batch_cancel_params.rb | 1 + .../vector_stores/file_batch_create_params.rb | 1 + .../file_batch_list_files_params.rb | 1 + .../file_batch_retrieve_params.rb | 1 + .../vector_stores/file_content_params.rb | 1 + .../vector_stores/file_create_params.rb | 1 + .../vector_stores/file_delete_params.rb | 1 + .../models/vector_stores/file_list_params.rb | 1 + .../vector_stores/file_retrieve_params.rb | 1 + .../vector_stores/file_update_params.rb | 1 + .../models/vector_stores/vector_store_file.rb | 5 +++ .../vector_stores/vector_store_file_batch.rb | 3 ++ lib/openai/resources/audio.rb | 2 ++ lib/openai/resources/audio/speech.rb | 4 +++ lib/openai/resources/audio/transcriptions.rb | 6 ++++ lib/openai/resources/audio/translations.rb | 4 +++ lib/openai/resources/batches.rb | 10 ++++++ lib/openai/resources/beta.rb | 2 ++ lib/openai/resources/beta/assistants.rb | 12 +++++++ lib/openai/resources/beta/threads.rb | 14 +++++++++ lib/openai/resources/beta/threads/messages.rb | 12 +++++++ lib/openai/resources/beta/threads/runs.rb | 18 +++++++++++ .../resources/beta/threads/runs/steps.rb | 6 ++++ lib/openai/resources/chat.rb | 2 ++ lib/openai/resources/chat/completions.rb | 14 +++++++++ .../resources/chat/completions/messages.rb | 4 +++ lib/openai/resources/completions.rb | 6 ++++ lib/openai/resources/embeddings.rb | 4 +++ lib/openai/resources/files.rb | 12 +++++++ lib/openai/resources/fine_tuning.rb | 2 ++ lib/openai/resources/fine_tuning/jobs.rb | 12 +++++++ .../resources/fine_tuning/jobs/checkpoints.rb | 4 +++ lib/openai/resources/images.rb | 8 +++++ lib/openai/resources/models.rb | 8 +++++ lib/openai/resources/moderations.rb | 4 +++ lib/openai/resources/responses.rb | 10 ++++++ lib/openai/resources/responses/input_items.rb | 4 +++ lib/openai/resources/uploads.rb | 8 +++++ lib/openai/resources/uploads/parts.rb | 4 +++ lib/openai/resources/vector_stores.rb | 14 +++++++++ .../resources/vector_stores/file_batches.rb | 10 ++++++ lib/openai/resources/vector_stores/files.rb | 14 +++++++++ rbi/lib/openai/resources/audio.rbi | 1 + rbi/lib/openai/resources/audio/speech.rbi | 1 + .../openai/resources/audio/transcriptions.rbi | 1 + .../openai/resources/audio/translations.rbi | 1 + rbi/lib/openai/resources/batches.rbi | 1 + rbi/lib/openai/resources/beta.rbi | 1 + rbi/lib/openai/resources/beta/assistants.rbi | 1 + rbi/lib/openai/resources/beta/threads.rbi | 1 + .../resources/beta/threads/messages.rbi | 1 + .../openai/resources/beta/threads/runs.rbi | 1 + .../resources/beta/threads/runs/steps.rbi | 1 + rbi/lib/openai/resources/chat.rbi | 1 + rbi/lib/openai/resources/chat/completions.rbi | 1 + .../resources/chat/completions/messages.rbi | 1 + rbi/lib/openai/resources/completions.rbi | 1 + rbi/lib/openai/resources/embeddings.rbi | 1 + rbi/lib/openai/resources/files.rbi | 1 + rbi/lib/openai/resources/fine_tuning.rbi | 1 + rbi/lib/openai/resources/fine_tuning/jobs.rbi | 1 + .../fine_tuning/jobs/checkpoints.rbi | 1 + rbi/lib/openai/resources/images.rbi | 1 + rbi/lib/openai/resources/models.rbi | 1 + rbi/lib/openai/resources/moderations.rbi | 1 + rbi/lib/openai/resources/responses.rbi | 1 + .../resources/responses/input_items.rbi | 1 + rbi/lib/openai/resources/uploads.rbi | 1 + rbi/lib/openai/resources/uploads/parts.rbi | 1 + rbi/lib/openai/resources/vector_stores.rbi | 1 + .../resources/vector_stores/file_batches.rbi | 1 + .../openai/resources/vector_stores/files.rbi | 1 + 224 files changed, 708 insertions(+) diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index f59e9311..e7ebf9d7 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Audio + # @see OpenAI::Resources::Audio::Speech#create class SpeechCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index bcd93827..9326a51f 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -3,6 +3,9 @@ module OpenAI module Models module Audio + # @see OpenAI::Resources::Audio::Transcriptions#create + # + # @see OpenAI::Resources::Audio::Transcriptions#create_streaming class TranscriptionCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 5f5c90ad..80c1292c 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Audio + # @see OpenAI::Resources::Audio::Translations#create class TranslationCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 96e290bb..e41efffb 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -225,6 +225,8 @@ class Batch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The current status of the batch. + # + # @see OpenAI::Models::Batch#status module Status extend OpenAI::Enum @@ -244,6 +246,7 @@ module Status # def self.values; end end + # @see OpenAI::Models::Batch#errors class Errors < OpenAI::BaseModel # @!attribute [r] data # diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 6772ee41..1b94b154 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Batches#cancel class BatchCancelParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index b58fb416..91a8de4e 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Batches#create class BatchCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 5bd6a019..0515bbc5 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Batches#list class BatchListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index 12c4186e..fae80854 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Batches#retrieve class BatchRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index c7a250d4..5b56453e 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -160,6 +160,7 @@ class Assistant < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::BaseModel # @!attribute [r] code_interpreter # @@ -192,6 +193,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -213,6 +215,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The ID of the diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index dab3186e..881fd6ee 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#create class AssistantCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -208,6 +209,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -229,6 +231,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The @@ -312,6 +315,8 @@ class VectorStore < OpenAI::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. + # + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Union @@ -363,6 +368,7 @@ class Static < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index 939c39ee..08dbb202 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#delete class AssistantDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 601a694b..2ccfe616 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#list class AssistantListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 40ba9b41..7fe50f31 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#retrieve class AssistantRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index b7cb38c3..1caf13f7 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -31,6 +31,8 @@ class AssistantToolChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The type of the tool. If type is `function`, the function name must be set + # + # @see OpenAI::Models::Beta::AssistantToolChoice#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 6b2bbaf8..3c3bdd37 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#update class AssistantUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -304,6 +305,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # Overrides the list of @@ -326,6 +328,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # Overrides the diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 98e95ab3..9a23f3f2 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -28,6 +28,7 @@ class FileSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] max_num_results # The maximum number of results the file search tool should output. The default is @@ -71,6 +72,7 @@ class FileSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point @@ -107,6 +109,8 @@ class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. + # + # @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Enum diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 3de11c4c..737cbf81 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -56,6 +56,7 @@ class Thread < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::BaseModel # @!attribute [r] code_interpreter # @@ -88,6 +89,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -109,6 +111,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::Thread::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 56c776fa..d1a26adb 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -3,6 +3,9 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#create_and_run + # + # @see OpenAI::Resources::Beta::Threads#create_and_run_streaming class ThreadCreateAndRunParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -322,6 +325,8 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The text contents of the message. + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#content module Content extend OpenAI::Union @@ -345,6 +350,8 @@ module Content # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role module Role extend OpenAI::Enum @@ -420,6 +427,7 @@ class FileSearch < OpenAI::BaseModel end end + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources class ToolResources < OpenAI::BaseModel # @!attribute [r] code_interpreter # @@ -454,6 +462,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -475,6 +484,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The @@ -558,6 +568,8 @@ class VectorStore < OpenAI::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Union @@ -609,6 +621,7 @@ class Static < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -677,6 +690,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -698,6 +712,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The ID of the @@ -767,6 +782,8 @@ class TruncationStrategy < OpenAI::BaseModel # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 3095b908..d778808c 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#create class ThreadCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -97,6 +98,8 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The text contents of the message. + # + # @see OpenAI::Models::Beta::ThreadCreateParams::Message#content module Content extend OpenAI::Union @@ -120,6 +123,8 @@ module Content # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. + # + # @see OpenAI::Models::Beta::ThreadCreateParams::Message#role module Role extend OpenAI::Enum @@ -228,6 +233,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -249,6 +255,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The @@ -332,6 +339,8 @@ class VectorStore < OpenAI::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. + # + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Union @@ -383,6 +392,7 @@ class Static < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index f81ea4ec..71afbb41 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#delete class ThreadDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index 4df984d4..c12d6b56 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#retrieve class ThreadRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index e4d3d234..e80088af 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#update class ThreadUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -70,6 +71,7 @@ class ToolResources < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -91,6 +93,7 @@ class CodeInterpreter < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] vector_store_ids # The diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index 0c3116f3..e471d257 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -47,6 +47,7 @@ class FileCitationAnnotation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 512bc7ad..5d2bf50f 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -70,6 +70,7 @@ class FileCitationDeltaAnnotation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::BaseModel # @!attribute [r] file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index a04561c4..fbe60a09 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -46,6 +46,7 @@ class FilePathAnnotation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 1ef92d3a..8fe4a8e7 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -69,6 +69,7 @@ class FilePathDeltaAnnotation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::BaseModel # @!attribute [r] file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 2a06f605..8f66bf22 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -34,6 +34,8 @@ class ImageFile < OpenAI::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. + # + # @see OpenAI::Models::Beta::Threads::ImageFile#detail module Detail extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 9f702f21..cb52ad4b 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -38,6 +38,8 @@ class ImageFileDelta < OpenAI::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. + # + # @see OpenAI::Models::Beta::Threads::ImageFileDelta#detail module Detail extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index e6830beb..a38ccdbd 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -33,6 +33,8 @@ class ImageURL < OpenAI::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` + # + # @see OpenAI::Models::Beta::Threads::ImageURL#detail module Detail extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index d47277f4..6c80622a 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -37,6 +37,8 @@ class ImageURLDelta < OpenAI::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. + # + # @see OpenAI::Models::Beta::Threads::ImageURLDelta#detail module Detail extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index b3a7faab..3d07bbd7 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -200,6 +200,7 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel end end + # @see OpenAI::Models::Beta::Threads::Message#incomplete_details class IncompleteDetails < OpenAI::BaseModel # @!attribute reason # The reason the message is incomplete. @@ -217,6 +218,8 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The reason the message is incomplete. + # + # @see OpenAI::Models::Beta::Threads::Message::IncompleteDetails#reason module Reason extend OpenAI::Enum @@ -235,6 +238,8 @@ module Reason end # The entity that produced the message. One of `user` or `assistant`. + # + # @see OpenAI::Models::Beta::Threads::Message#role module Role extend OpenAI::Enum @@ -250,6 +255,8 @@ module Role # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. + # + # @see OpenAI::Models::Beta::Threads::Message#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 6738bb76..340c113a 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#create class MessageCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index abe4fb7f..2a93e0d9 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#delete class MessageDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 301bea9e..c13c0e39 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -36,6 +36,8 @@ class MessageDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The entity that produced the message. One of `user` or `assistant`. + # + # @see OpenAI::Models::Beta::Threads::MessageDelta#role module Role extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index cc3f0ae0..1c5efce5 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#list class MessageListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index c51fa75c..9a0811a6 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#retrieve class MessageRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index 46ffdaa4..ba22a469 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#update class MessageUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index 51e0aba3..d9f3e412 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -38,6 +38,7 @@ class RequiredActionFunctionToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::BaseModel # @!attribute arguments # The arguments that the model expects you to pass to the function. diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 05dd765d..f104ddeb 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -283,6 +283,7 @@ class Run < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::BaseModel # @!attribute [r] reason # The reason why the run is incomplete. This will point to which specific token @@ -307,6 +308,8 @@ class IncompleteDetails < OpenAI::BaseModel # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. + # + # @see OpenAI::Models::Beta::Threads::Run::IncompleteDetails#reason module Reason extend OpenAI::Enum @@ -321,6 +324,7 @@ module Reason end end + # @see OpenAI::Models::Beta::Threads::Run#last_error class LastError < OpenAI::BaseModel # @!attribute code # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. @@ -345,6 +349,8 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + # + # @see OpenAI::Models::Beta::Threads::Run::LastError#code module Code extend OpenAI::Enum @@ -360,6 +366,7 @@ module Code end end + # @see OpenAI::Models::Beta::Threads::Run#required_action class RequiredAction < OpenAI::BaseModel # @!attribute submit_tool_outputs # Details on the tool outputs needed for this run to continue. @@ -385,6 +392,7 @@ class RequiredAction < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::BaseModel # @!attribute tool_calls # A list of the relevant tool calls. @@ -404,6 +412,7 @@ class SubmitToolOutputs < OpenAI::BaseModel end end + # @see OpenAI::Models::Beta::Threads::Run#truncation_strategy class TruncationStrategy < OpenAI::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to @@ -436,6 +445,8 @@ class TruncationStrategy < OpenAI::BaseModel # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Models::Beta::Threads::Run::TruncationStrategy#type module Type extend OpenAI::Enum @@ -450,6 +461,7 @@ module Type end end + # @see OpenAI::Models::Beta::Threads::Run#usage class Usage < OpenAI::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run. diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index 14a4bc2b..f2fe43f6 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#cancel class RunCancelParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index ad9f9698..d0c690be 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -4,6 +4,9 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#create + # + # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming class RunCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -286,6 +289,8 @@ class AdditionalMessage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The text contents of the message. + # + # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#content module Content extend OpenAI::Union @@ -309,6 +314,8 @@ module Content # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. + # + # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#role module Role extend OpenAI::Enum @@ -433,6 +440,8 @@ class TruncationStrategy < OpenAI::BaseModel # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 8f3dfdb6..43e8bd9d 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#list class RunListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index 4bc9a2aa..c75947e3 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#retrieve class RunRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index c2d753ad..8ed57b07 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -4,6 +4,9 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs + # + # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming class RunSubmitToolOutputsParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 6e213a7f..54971ac1 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#update class RunUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index 89ebfd97..42262679 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -36,6 +36,7 @@ class CodeInterpreterOutputImage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::BaseModel # @!attribute [r] file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 4cfc8daf..62aeaf0c 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -37,6 +37,7 @@ class CodeInterpreterToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -121,6 +122,7 @@ class Image < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 8b00e21b..5e5b0369 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -52,6 +52,7 @@ class CodeInterpreterToolCallDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::BaseModel # @!attribute [r] input # The input to the Code Interpreter tool call. diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 282a9534..09267fd0 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -34,6 +34,7 @@ class FileSearchToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::BaseModel # @!attribute [r] ranking_options # The ranking options for the file search. @@ -67,6 +68,7 @@ class FileSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::BaseModel # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` @@ -95,6 +97,8 @@ class RankingOptions < OpenAI::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. + # + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Enum @@ -184,6 +188,8 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The type of the content. + # + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index f3e392ba..9d5784e9 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -34,6 +34,7 @@ class FunctionToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::BaseModel # @!attribute arguments # The arguments passed to the function. diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index 02069f3b..d77981e5 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -49,6 +49,7 @@ class FunctionToolCallDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::BaseModel # @!attribute [r] arguments # The arguments passed to the function. diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index 459f5bdc..f7bde3db 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -28,6 +28,7 @@ class MessageCreationStepDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index acd1f507..2c9449fe 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -159,6 +159,7 @@ class RunStep < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. @@ -184,6 +185,8 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # One of `server_error` or `rate_limit_exceeded`. + # + # @see OpenAI::Models::Beta::Threads::Runs::RunStep::LastError#code module Code extend OpenAI::Enum @@ -200,6 +203,8 @@ module Code # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. + # + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#status module Status extend OpenAI::Enum @@ -217,6 +222,8 @@ module Status end # The details of the run step. + # + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#step_details module StepDetails extend OpenAI::Union @@ -234,6 +241,8 @@ module StepDetails end # The type of run step, which can be either `message_creation` or `tool_calls`. + # + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#type module Type extend OpenAI::Enum @@ -247,6 +256,7 @@ module Type # def self.values; end end + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage class Usage < OpenAI::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 84f2aa92..929339da 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -26,6 +26,8 @@ class RunStepDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The details of the run step. + # + # @see OpenAI::Models::Beta::Threads::Runs::RunStepDelta#step_details module StepDetails extend OpenAI::Union diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 4c2d9170..7acf2e7e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -32,6 +32,7 @@ class RunStepDeltaMessageDelta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::BaseModel # @!attribute [r] message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index eb22d47e..08c4b854 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -5,6 +5,7 @@ module Models module Beta module Threads module Runs + # @see OpenAI::Resources::Beta::Threads::Runs::Steps#list class StepListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index ff4b6ccd..9f539603 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -5,6 +5,7 @@ module Models module Beta module Threads module Runs + # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve class StepRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index bc94f59d..2cdec571 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -139,6 +139,8 @@ class Choice < OpenAI::BaseModel # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. + # + # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason module FinishReason extend OpenAI::Enum @@ -155,6 +157,7 @@ module FinishReason # def self.values; end end + # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs class Logprobs < OpenAI::BaseModel # @!attribute content # A list of message content tokens with log probability information. @@ -181,6 +184,8 @@ class Logprobs < OpenAI::BaseModel end # The service tier used for processing the request. + # + # @see OpenAI::Models::Chat::ChatCompletion#service_tier module ServiceTier extend OpenAI::Enum diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index c6a293dd..adf7a92f 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -88,6 +88,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::BaseModel # @!attribute id # Unique identifier for a previous audio response from the model. @@ -108,6 +109,8 @@ class Audio < OpenAI::BaseModel # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. + # + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#content module Content extend OpenAI::Union @@ -143,6 +146,8 @@ module ArrayOfContentPart end # @deprecated + # + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#function_call class FunctionCall < OpenAI::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 638a7c7e..558fb748 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -32,6 +32,8 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. + # + # @see OpenAI::Models::Chat::ChatCompletionAudioParam#format_ module Format extend OpenAI::Enum @@ -50,6 +52,8 @@ module Format # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # + # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice module Voice extend OpenAI::Union diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index dde734e3..3e09badb 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -139,6 +139,7 @@ class Choice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::BaseModel # @!attribute content # The contents of the chunk message. @@ -197,6 +198,8 @@ class Delta < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # @deprecated + # + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call class FunctionCall < OpenAI::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON @@ -234,6 +237,8 @@ class FunctionCall < OpenAI::BaseModel end # The role of the author of this message. + # + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#role module Role extend OpenAI::Enum @@ -295,6 +300,7 @@ class ToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON @@ -329,6 +335,8 @@ class Function < OpenAI::BaseModel end # The type of the tool. Currently, only `function` is supported. + # + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type module Type extend OpenAI::Enum @@ -349,6 +357,8 @@ module Type # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. + # + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#finish_reason module FinishReason extend OpenAI::Enum @@ -365,6 +375,7 @@ module FinishReason # def self.values; end end + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs class Logprobs < OpenAI::BaseModel # @!attribute content # A list of message content tokens with log probability information. @@ -391,6 +402,8 @@ class Logprobs < OpenAI::BaseModel end # The service tier used for processing the request. + # + # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier module ServiceTier extend OpenAI::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 72530466..dfa175c4 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -45,6 +45,7 @@ class File < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::BaseModel # @!attribute [r] file_data # The base64 encoded file data, used when passing the file to the model as a diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 3fd2baeb..107b95af 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -25,6 +25,7 @@ class ChatCompletionContentPartImage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. @@ -53,6 +54,8 @@ class ImageURL < OpenAI::BaseModel # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + # + # @see OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL#detail module Detail extend OpenAI::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 269e74c7..4be021ce 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -25,6 +25,7 @@ class ChatCompletionContentPartInputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::BaseModel # @!attribute data # Base64 encoded audio data. @@ -49,6 +50,8 @@ class InputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The format of the encoded audio data. Currently supports "wav" and "mp3". + # + # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ module Format extend OpenAI::Enum diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 98b66dd7..6a5af05b 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -41,6 +41,8 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The contents of the developer message. + # + # @see OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam#content module Content extend OpenAI::Union diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 228616ad..f153dbe8 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -111,6 +111,7 @@ class Annotation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. @@ -151,6 +152,8 @@ class URLCitation < OpenAI::BaseModel end # @deprecated + # + # @see OpenAI::Models::Chat::ChatCompletionMessage#function_call class FunctionCall < OpenAI::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index f4628729..e114fcfe 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -31,6 +31,7 @@ class ChatCompletionMessageToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index 84744f2a..6d8c1713 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -26,6 +26,7 @@ class ChatCompletionNamedToolChoice < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::BaseModel # @!attribute name # The name of the function to call. diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index e170c46d..5865f161 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -33,6 +33,8 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. + # + # @see OpenAI::Models::Chat::ChatCompletionPredictionContent#content module Content extend OpenAI::Union diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index b311ca43..6426eaad 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -41,6 +41,8 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The contents of the system message. + # + # @see OpenAI::Models::Chat::ChatCompletionSystemMessageParam#content module Content extend OpenAI::Union diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 31644647..37d6d123 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -32,6 +32,8 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The contents of the tool message. + # + # @see OpenAI::Models::Chat::ChatCompletionToolMessageParam#content module Content extend OpenAI::Union diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index b938d473..eb640b44 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -40,6 +40,8 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The contents of the user message. + # + # @see OpenAI::Models::Chat::ChatCompletionUserMessageParam#content module Content extend OpenAI::Union diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 8eb609ab..b612cb26 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -3,6 +3,9 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#create + # + # @see OpenAI::Resources::Chat::Completions#create_streaming class CompletionCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -652,6 +655,8 @@ class WebSearchOptions < OpenAI::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. + # + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#search_context_size module SearchContextSize extend OpenAI::Enum @@ -666,6 +671,7 @@ module SearchContextSize # def self.values; end end + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location class UserLocation < OpenAI::BaseModel # @!attribute approximate # Approximate location parameters for the search. @@ -690,6 +696,7 @@ class UserLocation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::BaseModel # @!attribute [r] city # Free text input for the city of the user, e.g. `San Francisco`. diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index cbcb2c22..70e85073 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#delete class CompletionDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 8b2e8ef7..905fe4ea 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#list class CompletionListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index e8ee1b1c..3465af46 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#retrieve class CompletionRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 32797eb4..4e7909e9 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#update class CompletionUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 74ccbdc7..d72b3255 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Chat module Completions + # @see OpenAI::Resources::Chat::Completions::Messages#list class MessageListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index e79a3435..313ae677 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -49,6 +49,8 @@ class ComparisonFilter < OpenAI::BaseModel # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal + # + # @see OpenAI::Models::ComparisonFilter#type module Type extend OpenAI::Enum @@ -68,6 +70,8 @@ module Type # The value to compare against the attribute key; supports string, number, or # boolean types. + # + # @see OpenAI::Models::ComparisonFilter#value module Value extend OpenAI::Union diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 5e4a549b..66f575ee 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -41,6 +41,8 @@ class CompletionChoice < OpenAI::BaseModel # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. + # + # @see OpenAI::Models::CompletionChoice#finish_reason module FinishReason extend OpenAI::Enum @@ -55,6 +57,7 @@ module FinishReason # def self.values; end end + # @see OpenAI::Models::CompletionChoice#logprobs class Logprobs < OpenAI::BaseModel # @!attribute [r] text_offset # diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index d4935347..18e6bdb2 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -2,6 +2,9 @@ module OpenAI module Models + # @see OpenAI::Resources::Completions#create + # + # @see OpenAI::Resources::Completions#create_streaming class CompletionCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index 5eebf968..553021f3 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -63,6 +63,7 @@ class CompletionUsage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::BaseModel # @!attribute [r] accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that @@ -129,6 +130,7 @@ class CompletionTokensDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::CompletionUsage#prompt_tokens_details class PromptTokensDetails < OpenAI::BaseModel # @!attribute [r] audio_tokens # Audio input tokens present in the prompt. diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 7fd45e2c..0a5c34a4 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -42,6 +42,8 @@ module Filter end # Type of operation: `and` or `or`. + # + # @see OpenAI::Models::CompoundFilter#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index a47238ef..2679c15d 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -37,6 +37,7 @@ class CreateEmbeddingResponse < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::CreateEmbeddingResponse#usage class Usage < OpenAI::BaseModel # @!attribute prompt_tokens # The number of tokens used by the prompt. diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 7bcadd1d..9bae0793 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Embeddings#create class EmbeddingCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index 597cc103..c1e2edd4 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#content class FileContentParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index c37dba0c..da8f43df 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#create class FileCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index 22f10a38..c8b31252 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#delete class FileDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 9d825d43..997d1fa1 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#list class FileListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index 7b08ec97..e85f9017 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -102,6 +102,8 @@ class FileObject < OpenAI::BaseModel # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. + # + # @see OpenAI::Models::FileObject#purpose module Purpose extend OpenAI::Enum @@ -124,6 +126,8 @@ module Purpose # # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. + # + # @see OpenAI::Models::FileObject#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index a4f815c3..c1729f9e 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#retrieve class FileRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 743bd260..07a3dd60 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -192,6 +192,7 @@ class FineTuningJob < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::FineTuningJob#error class Error < OpenAI::BaseModel # @!attribute code # A machine-readable error code. @@ -225,6 +226,7 @@ class Error < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters class Hyperparameters < OpenAI::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model @@ -274,6 +276,8 @@ class Hyperparameters < OpenAI::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#batch_size module BatchSize extend OpenAI::Union @@ -288,6 +292,8 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Union @@ -302,6 +308,8 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#n_epochs module NEpochs extend OpenAI::Union @@ -317,6 +325,8 @@ module NEpochs # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + # + # @see OpenAI::Models::FineTuning::FineTuningJob#status module Status extend OpenAI::Enum @@ -334,6 +344,7 @@ module Status # def self.values; end end + # @see OpenAI::Models::FineTuning::FineTuningJob#method_ class Method < OpenAI::BaseModel # @!attribute [r] dpo # Configuration for the DPO fine-tuning method. @@ -376,6 +387,7 @@ class Method < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. @@ -396,6 +408,7 @@ class Dpo < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model @@ -459,6 +472,8 @@ class Hyperparameters < OpenAI::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#batch_size module BatchSize extend OpenAI::Union @@ -473,6 +488,8 @@ module BatchSize # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#beta module Beta extend OpenAI::Union @@ -487,6 +504,8 @@ module Beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Union @@ -501,6 +520,8 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#n_epochs module NEpochs extend OpenAI::Union @@ -515,6 +536,7 @@ module NEpochs end end + # @see OpenAI::Models::FineTuning::FineTuningJob::Method#supervised class Supervised < OpenAI::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. @@ -536,6 +558,7 @@ class Supervised < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model @@ -586,6 +609,8 @@ class Hyperparameters < OpenAI::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#batch_size module BatchSize extend OpenAI::Union @@ -600,6 +625,8 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Union @@ -614,6 +641,8 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#n_epochs module NEpochs extend OpenAI::Union @@ -629,6 +658,8 @@ module NEpochs end # The type of method. Is either `supervised` or `dpo`. + # + # @see OpenAI::Models::FineTuning::FineTuningJob::Method#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 92a9a009..c6bd2025 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -70,6 +70,8 @@ class FineTuningJobEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The log level of the event. + # + # @see OpenAI::Models::FineTuning::FineTuningJobEvent#level module Level extend OpenAI::Enum @@ -85,6 +87,8 @@ module Level end # The type of event. + # + # @see OpenAI::Models::FineTuning::FineTuningJobEvent#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 979023cf..e1830988 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#cancel class JobCancelParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 1e8ddf18..f5770f51 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#create class JobCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -222,6 +223,8 @@ class Hyperparameters < OpenAI::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#batch_size module BatchSize extend OpenAI::Union @@ -236,6 +239,8 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Union @@ -250,6 +255,8 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#n_epochs module NEpochs extend OpenAI::Union @@ -288,6 +295,7 @@ class Integration < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::BaseModel # @!attribute project # The name of the project that the new run will be created under. @@ -381,6 +389,7 @@ class Method < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. @@ -402,6 +411,7 @@ class Dpo < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model @@ -465,6 +475,8 @@ class Hyperparameters < OpenAI::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#batch_size module BatchSize extend OpenAI::Union @@ -479,6 +491,8 @@ module BatchSize # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#beta module Beta extend OpenAI::Union @@ -493,6 +507,8 @@ module Beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Union @@ -507,6 +523,8 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#n_epochs module NEpochs extend OpenAI::Union @@ -521,6 +539,7 @@ module NEpochs end end + # @see OpenAI::Models::FineTuning::JobCreateParams::Method#supervised class Supervised < OpenAI::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. @@ -542,6 +561,7 @@ class Supervised < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model @@ -592,6 +612,8 @@ class Hyperparameters < OpenAI::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#batch_size module BatchSize extend OpenAI::Union @@ -606,6 +628,8 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Union @@ -620,6 +644,8 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#n_epochs module NEpochs extend OpenAI::Union @@ -635,6 +661,8 @@ module NEpochs end # The type of method. Is either `supervised` or `dpo`. + # + # @see OpenAI::Models::FineTuning::JobCreateParams::Method#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index bbb67fa8..167b9242 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#list_events class JobListEventsParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index f9c13fc9..c3abbcd5 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#list class JobListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index 7735a847..e62d1b6f 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#retrieve class JobRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index ee8722ea..1b5657d6 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -4,6 +4,7 @@ module OpenAI module Models module FineTuning module Jobs + # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list class CheckpointListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 11dfff02..0390e0c6 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -74,6 +74,7 @@ class FineTuningJobCheckpoint < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::BaseModel # @!attribute [r] full_valid_loss # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 87a15c79..b958af96 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Images#create_variation class ImageCreateVariationParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index cf4ca6e4..d9678ace 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Images#edit class ImageEditParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 43ed870f..52810d19 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Images#generate class ImageGenerateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index 3076a522..186515a2 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Models#delete class ModelDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index 1718546b..191cb53f 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Models#list class ModelListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index 335e5c97..6c19bb2b 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Models#retrieve class ModelRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index b1bb2ec1..9d8bf7a0 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -37,6 +37,7 @@ class Moderation < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Moderation#categories class Categories < OpenAI::BaseModel # @!attribute harassment # Content that expresses, incites, or promotes harassing language towards any @@ -172,6 +173,7 @@ class Categories < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Moderation#category_applied_input_types class CategoryAppliedInputTypes < OpenAI::BaseModel # @!attribute harassment # The applied input type(s) for the category 'harassment'. @@ -472,6 +474,7 @@ module ViolenceGraphic end end + # @see OpenAI::Models::Moderation#category_scores class CategoryScores < OpenAI::BaseModel # @!attribute harassment # The score for the category 'harassment'. diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 93150dbd..6d03f271 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Moderations#create class ModerationCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index de867149..c02f0627 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -25,6 +25,7 @@ class ModerationImageURLInput < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::ModerationImageURLInput#image_url class ImageURL < OpenAI::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 64a9d430..af30c4e0 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -42,6 +42,8 @@ class Reasoning < OpenAI::BaseModel # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. + # + # @see OpenAI::Models::Reasoning#generate_summary module GenerateSummary extend OpenAI::Enum diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 907a4666..8dac86a9 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -27,6 +27,7 @@ class ResponseFormatJSONSchema < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 6802a365..57d62fd4 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -42,6 +42,8 @@ class ComputerTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The type of computer environment to control. + # + # @see OpenAI::Models::Responses::ComputerTool#environment module Environment extend OpenAI::Enum diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 3c79d015..715855e0 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -45,6 +45,8 @@ class EasyInputMessage < OpenAI::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. + # + # @see OpenAI::Models::Responses::EasyInputMessage#content module Content extend OpenAI::Union @@ -62,6 +64,8 @@ module Content # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. + # + # @see OpenAI::Models::Responses::EasyInputMessage#role module Role extend OpenAI::Enum @@ -78,6 +82,8 @@ module Role end # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Responses::EasyInputMessage#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 6c9f5511..18833fef 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -63,6 +63,8 @@ class FileSearchTool < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # A filter to apply based on file attributes. + # + # @see OpenAI::Models::Responses::FileSearchTool#filters module Filters extend OpenAI::Union @@ -77,6 +79,7 @@ module Filters # def self.variants; end end + # @see OpenAI::Models::Responses::FileSearchTool#ranking_options class RankingOptions < OpenAI::BaseModel # @!attribute [r] ranker # The ranker to use for the file search. @@ -111,6 +114,8 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The ranker to use for the file search. + # + # @see OpenAI::Models::Responses::FileSearchTool::RankingOptions#ranker module Ranker extend OpenAI::Enum diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index 3bf3624b..82362ce6 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Responses + # @see OpenAI::Resources::Responses::InputItems#list class InputItemListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 296c2d41..9f71b7aa 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -269,6 +269,7 @@ class Response < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::BaseModel # @!attribute [r] reason # The reason why the response is incomplete. @@ -290,6 +291,8 @@ class IncompleteDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The reason why the response is incomplete. + # + # @see OpenAI::Models::Responses::Response::IncompleteDetails#reason module Reason extend OpenAI::Enum @@ -307,6 +310,8 @@ module Reason # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. + # + # @see OpenAI::Models::Responses::Response#tool_choice module ToolChoice extend OpenAI::Union @@ -339,6 +344,8 @@ module ToolChoice # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. + # + # @see OpenAI::Models::Responses::Response#truncation module Truncation extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 0d7ae42f..5ce7e550 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -136,6 +136,8 @@ class File < OpenAI::BaseModel end # The status of the code interpreter tool call. + # + # @see OpenAI::Models::Responses::ResponseCodeInterpreterToolCall#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 6e5128d4..207a2584 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -59,6 +59,8 @@ class ResponseComputerToolCall < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # A click action. + # + # @see OpenAI::Models::Responses::ResponseComputerToolCall#action module Action extend OpenAI::Union @@ -132,6 +134,8 @@ class Click < OpenAI::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. + # + # @see OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click#button module Button extend OpenAI::Enum @@ -442,6 +446,8 @@ class PendingSafetyCheck < OpenAI::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseComputerToolCall#status module Status extend OpenAI::Enum @@ -457,6 +463,8 @@ module Status end # The type of the computer call. Always `computer_call`. + # + # @see OpenAI::Models::Responses::ResponseComputerToolCall#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 856d3c99..467d41e0 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -96,6 +96,8 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseComputerToolCallOutputItem#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index d1242f76..66590848 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -48,6 +48,8 @@ class ResponseContentPartAddedEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The content part that was added. + # + # @see OpenAI::Models::Responses::ResponseContentPartAddedEvent#part module Part extend OpenAI::Union diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 8481a853..988c11d1 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -48,6 +48,8 @@ class ResponseContentPartDoneEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The content part that is done. + # + # @see OpenAI::Models::Responses::ResponseContentPartDoneEvent#part module Part extend OpenAI::Union diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index e5c04cf7..41049b5b 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -3,6 +3,9 @@ module OpenAI module Models module Responses + # @see OpenAI::Resources::Responses#create + # + # @see OpenAI::Resources::Responses#create_streaming class ResponseCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index ee46f30c..389d812a 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Responses + # @see OpenAI::Resources::Responses#delete class ResponseDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 4b7b4755..465b571a 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -27,6 +27,8 @@ class ResponseError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The error code for the response. + # + # @see OpenAI::Models::Responses::ResponseError#code module Code extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 20201242..3606686b 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -54,6 +54,8 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, + # + # @see OpenAI::Models::Responses::ResponseFileSearchToolCall#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 6d337c8f..92fbd817 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -67,6 +67,8 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseFunctionToolCall#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 83d109ab..310a8685 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -52,6 +52,8 @@ class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 78f572ca..41ecb9f4 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -36,6 +36,8 @@ class ResponseFunctionWebSearch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The status of the web search tool call. + # + # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index a039c35c..e6caa2ed 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -34,6 +34,8 @@ class ResponseInputAudio < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The format of the audio data. Currently supported formats are `mp3` and `wav`. + # + # @see OpenAI::Models::Responses::ResponseInputAudio#format_ module Format extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 2e770199..5c5bb8ce 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -45,6 +45,8 @@ class ResponseInputImage < OpenAI::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. + # + # @see OpenAI::Models::Responses::ResponseInputImage#detail module Detail extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index d2e09cef..206650eb 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -107,6 +107,8 @@ class Message < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The role of the message input. One of `user`, `system`, or `developer`. + # + # @see OpenAI::Models::Responses::ResponseInputItem::Message#role module Role extend OpenAI::Enum @@ -123,6 +125,8 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseInputItem::Message#status module Status extend OpenAI::Enum @@ -138,6 +142,8 @@ module Status end # The type of the message input. Always set to `message`. + # + # @see OpenAI::Models::Responses::ResponseInputItem::Message#type module Type extend OpenAI::Enum @@ -250,6 +256,8 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput#status module Status extend OpenAI::Enum @@ -321,6 +329,8 @@ class FunctionCallOutput < OpenAI::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 3962d98d..65bfbf76 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -56,6 +56,8 @@ class ResponseInputMessageItem < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The role of the message input. One of `user`, `system`, or `developer`. + # + # @see OpenAI::Models::Responses::ResponseInputMessageItem#role module Role extend OpenAI::Enum @@ -72,6 +74,8 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseInputMessageItem#status module Status extend OpenAI::Enum @@ -87,6 +91,8 @@ module Status end # The type of the message input. Always set to `message`. + # + # @see OpenAI::Models::Responses::ResponseInputMessageItem#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 04b05a0d..373721b0 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -68,6 +68,8 @@ module Content # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseOutputMessage#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index bb1bc110..e5539f3d 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -70,6 +70,8 @@ class Summary < OpenAI::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. + # + # @see OpenAI::Models::Responses::ResponseReasoningItem#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 6d3028c3..36f30f22 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Responses + # @see OpenAI::Resources::Responses#retrieve class ResponseRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index f796c035..c6291cf6 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -66,6 +66,8 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # A citation to a file. + # + # @see OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent#annotation module Annotation extend OpenAI::Union diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 8e4ccddd..316d5b3f 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -48,6 +48,7 @@ class ResponseUsage < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::BaseModel # @!attribute cached_tokens # The number of tokens that were retrieved from the cache. @@ -66,6 +67,7 @@ class InputTokensDetails < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void end + # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details class OutputTokensDetails < OpenAI::BaseModel # @!attribute reasoning_tokens # The number of reasoning tokens. diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index b6041f87..cddbcc56 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -35,6 +35,8 @@ class ToolChoiceTypes < OpenAI::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # + # @see OpenAI::Models::Responses::ToolChoiceTypes#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 31542421..58f833d7 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -46,6 +46,8 @@ class WebSearchTool < OpenAI::BaseModel # # - `web_search_preview` # - `web_search_preview_2025_03_11` + # + # @see OpenAI::Models::Responses::WebSearchTool#type module Type extend OpenAI::Enum @@ -61,6 +63,8 @@ module Type # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. + # + # @see OpenAI::Models::Responses::WebSearchTool#search_context_size module SearchContextSize extend OpenAI::Enum @@ -75,6 +79,7 @@ module SearchContextSize # def self.values; end end + # @see OpenAI::Models::Responses::WebSearchTool#user_location class UserLocation < OpenAI::BaseModel # @!attribute type # The type of location approximation. Always `approximate`. diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 4f319b8c..a1652e79 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -77,6 +77,8 @@ class Upload < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The status of the Upload. + # + # @see OpenAI::Models::Upload#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 62d1e116..e509a7b1 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Uploads#cancel class UploadCancelParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 866303ce..1d3ff0d8 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Uploads#complete class UploadCompleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 3650d2dd..68511cf4 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Uploads#create class UploadCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 2d3a2fb7..636b73b1 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Uploads + # @see OpenAI::Resources::Uploads::Parts#create class PartCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index dd5df66d..8832fca7 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -114,6 +114,7 @@ class VectorStore < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::VectorStore#file_counts class FileCounts < OpenAI::BaseModel # @!attribute cancelled # The number of files that were cancelled. @@ -160,6 +161,8 @@ class FileCounts < OpenAI::BaseModel # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. + # + # @see OpenAI::Models::VectorStore#status module Status extend OpenAI::Enum @@ -174,6 +177,7 @@ module Status # def self.values; end end + # @see OpenAI::Models::VectorStore#expires_after class ExpiresAfter < OpenAI::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 43b4e601..a4633e39 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#create class VectorStoreCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index 25113370..a6973a49 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#delete class VectorStoreDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 614dc937..90569e9b 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#list class VectorStoreListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index 60ede4d7..8328d0a1 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#retrieve class VectorStoreRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index c970dbc4..bde42131 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#search class VectorStoreSearchParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter @@ -135,6 +136,7 @@ class RankingOptions < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker extend OpenAI::Enum diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 11a8b9e3..c4914147 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -86,6 +86,8 @@ class Content < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # The type of content. + # + # @see OpenAI::Models::VectorStoreSearchResponse::Content#type module Type extend OpenAI::Enum diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 7e6fb3a6..62b9c58d 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#update class VectorStoreUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index 3e5971ff..b9d746ca 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::FileBatches#cancel class FileBatchCancelParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 69286492..ce0dbcf7 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::FileBatches#create class FileBatchCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 1dbc17b1..a5e43403 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::FileBatches#list_files class FileBatchListFilesParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 34ccee34..37c6f122 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::FileBatches#retrieve class FileBatchRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index 8e88a4ff..52f123e2 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#content class FileContentParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 79ef17c3..514d5c8b 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#create class FileCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index 8baa3964..8ed88cd8 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#delete class FileDeleteParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 78acf207..34115039 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#list class FileListParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index bfb3d370..65d9dcbe 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#retrieve class FileRetrieveParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index e1a477eb..8d952ea8 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#update class FileUpdateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 12c6f576..a1a631ed 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -105,6 +105,7 @@ class VectorStoreFile < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. @@ -130,6 +131,8 @@ class LastError < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void # One of `server_error` or `rate_limit_exceeded`. + # + # @see OpenAI::Models::VectorStores::VectorStoreFile::LastError#code module Code extend OpenAI::Enum @@ -148,6 +151,8 @@ module Code # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. + # + # @see OpenAI::Models::VectorStores::VectorStoreFile#status module Status extend OpenAI::Enum diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index 8fc405d8..dfc5192d 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -58,6 +58,7 @@ class VectorStoreFileBatch < OpenAI::BaseModel # def initialize: (Hash | OpenAI::BaseModel) -> void + # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::BaseModel # @!attribute cancelled # The number of files that where cancelled. @@ -103,6 +104,8 @@ class FileCounts < OpenAI::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. + # + # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#status module Status extend OpenAI::Enum diff --git a/lib/openai/resources/audio.rb b/lib/openai/resources/audio.rb index e82c41f4..2ddf1970 100644 --- a/lib/openai/resources/audio.rb +++ b/lib/openai/resources/audio.rb @@ -12,6 +12,8 @@ class Audio # @return [OpenAI::Resources::Audio::Speech] attr_reader :speech + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index 0b5bb925..23e2ee76 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -30,6 +30,8 @@ class Speech # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [StringIO] + # + # @see OpenAI::Models::Audio::SpeechCreateParams def create(params) parsed, options = OpenAI::Models::Audio::SpeechCreateParams.dump_request(params) @client.request( @@ -42,6 +44,8 @@ def create(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 3c7238bf..cfbc82a3 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -49,6 +49,8 @@ class Transcriptions # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] + # + # @see OpenAI::Models::Audio::TranscriptionCreateParams def create(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) if parsed[:stream] @@ -110,6 +112,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::Audio::TranscriptionCreateParams def create_streaming(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -128,6 +132,8 @@ def create_streaming(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index ea8e0e4a..e3f7a9f2 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -31,6 +31,8 @@ class Translations # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] + # + # @see OpenAI::Models::Audio::TranslationCreateParams def create(params) parsed, options = OpenAI::Models::Audio::TranslationCreateParams.dump_request(params) @client.request( @@ -43,6 +45,8 @@ def create(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 143a05ee..b9f05f65 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -35,6 +35,8 @@ class Batches # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Batch] + # + # @see OpenAI::Models::BatchCreateParams def create(params) parsed, options = OpenAI::Models::BatchCreateParams.dump_request(params) @client.request( @@ -55,6 +57,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Batch] + # + # @see OpenAI::Models::BatchRetrieveParams def retrieve(batch_id, params = {}) @client.request( method: :get, @@ -79,6 +83,8 @@ def retrieve(batch_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::BatchListParams def list(params = {}) parsed, options = OpenAI::Models::BatchListParams.dump_request(params) @client.request( @@ -102,6 +108,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Batch] + # + # @see OpenAI::Models::BatchCancelParams def cancel(batch_id, params = {}) @client.request( method: :post, @@ -111,6 +119,8 @@ def cancel(batch_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/beta.rb b/lib/openai/resources/beta.rb index c1b7273a..62d4e049 100644 --- a/lib/openai/resources/beta.rb +++ b/lib/openai/resources/beta.rb @@ -9,6 +9,8 @@ class Beta # @return [OpenAI::Resources::Beta::Threads] attr_reader :threads + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 1557a813..928cc77e 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -78,6 +78,8 @@ class Assistants # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Assistant] + # + # @see OpenAI::Models::Beta::AssistantCreateParams def create(params) parsed, options = OpenAI::Models::Beta::AssistantCreateParams.dump_request(params) @client.request( @@ -98,6 +100,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Assistant] + # + # @see OpenAI::Models::Beta::AssistantRetrieveParams def retrieve(assistant_id, params = {}) @client.request( method: :get, @@ -183,6 +187,8 @@ def retrieve(assistant_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Assistant] + # + # @see OpenAI::Models::Beta::AssistantUpdateParams def update(assistant_id, params = {}) parsed, options = OpenAI::Models::Beta::AssistantUpdateParams.dump_request(params) @client.request( @@ -217,6 +223,8 @@ def update(assistant_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Beta::AssistantListParams def list(params = {}) parsed, options = OpenAI::Models::Beta::AssistantListParams.dump_request(params) @client.request( @@ -238,6 +246,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::AssistantDeleted] + # + # @see OpenAI::Models::Beta::AssistantDeleteParams def delete(assistant_id, params = {}) @client.request( method: :delete, @@ -247,6 +257,8 @@ def delete(assistant_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 767bd13c..587f677e 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -32,6 +32,8 @@ class Threads # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Thread] + # + # @see OpenAI::Models::Beta::ThreadCreateParams def create(params = {}) parsed, options = OpenAI::Models::Beta::ThreadCreateParams.dump_request(params) @client.request( @@ -52,6 +54,8 @@ def create(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Thread] + # + # @see OpenAI::Models::Beta::ThreadRetrieveParams def retrieve(thread_id, params = {}) @client.request( method: :get, @@ -82,6 +86,8 @@ def retrieve(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Thread] + # + # @see OpenAI::Models::Beta::ThreadUpdateParams def update(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::ThreadUpdateParams.dump_request(params) @client.request( @@ -102,6 +108,8 @@ def update(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::ThreadDeleted] + # + # @see OpenAI::Models::Beta::ThreadDeleteParams def delete(thread_id, params = {}) @client.request( method: :delete, @@ -206,6 +214,8 @@ def delete(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def create_and_run(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) if parsed[:stream] @@ -316,6 +326,8 @@ def create_and_run(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def create_and_run_streaming(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -334,6 +346,8 @@ def create_and_run_streaming(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index fdcef361..0e86c04e 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -33,6 +33,8 @@ class Messages # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Message] + # + # @see OpenAI::Models::Beta::Threads::MessageCreateParams def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageCreateParams.dump_request(params) @client.request( @@ -56,6 +58,8 @@ def create(thread_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Message] + # + # @see OpenAI::Models::Beta::Threads::MessageRetrieveParams def retrieve(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageRetrieveParams.dump_request(params) thread_id = @@ -88,6 +92,8 @@ def retrieve(message_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Message] + # + # @see OpenAI::Models::Beta::Threads::MessageUpdateParams def update(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageUpdateParams.dump_request(params) thread_id = @@ -131,6 +137,8 @@ def update(message_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Beta::Threads::MessageListParams def list(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::Threads::MessageListParams.dump_request(params) @client.request( @@ -154,6 +162,8 @@ def list(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::MessageDeleted] + # + # @see OpenAI::Models::Beta::Threads::MessageDeleteParams def delete(message_id, params) parsed, options = OpenAI::Models::Beta::Threads::MessageDeleteParams.dump_request(params) thread_id = @@ -168,6 +178,8 @@ def delete(message_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index d67d31ee..a36e8c81 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -123,6 +123,8 @@ class Runs # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] + # + # @see OpenAI::Models::Beta::Threads::RunCreateParams def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) if parsed[:stream] @@ -255,6 +257,8 @@ def create(thread_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::Beta::Threads::RunCreateParams def create_streaming(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -287,6 +291,8 @@ def create_streaming(thread_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] + # + # @see OpenAI::Models::Beta::Threads::RunRetrieveParams def retrieve(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunRetrieveParams.dump_request(params) thread_id = @@ -320,6 +326,8 @@ def retrieve(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] + # + # @see OpenAI::Models::Beta::Threads::RunUpdateParams def update(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunUpdateParams.dump_request(params) thread_id = @@ -360,6 +368,8 @@ def update(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Beta::Threads::RunListParams def list(thread_id, params = {}) parsed, options = OpenAI::Models::Beta::Threads::RunListParams.dump_request(params) @client.request( @@ -383,6 +393,8 @@ def list(thread_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] + # + # @see OpenAI::Models::Beta::Threads::RunCancelParams def cancel(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCancelParams.dump_request(params) thread_id = @@ -415,6 +427,8 @@ def cancel(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Run] + # + # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] @@ -452,6 +466,8 @@ def submit_tool_outputs(run_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs_streaming(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -474,6 +490,8 @@ def submit_tool_outputs_streaming(run_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 9d087222..5d5559cd 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -28,6 +28,8 @@ class Steps # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] + # + # @see OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams def retrieve(step_id, params) parsed, options = OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams.dump_request(params) thread_id = @@ -83,6 +85,8 @@ def retrieve(step_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Beta::Threads::Runs::StepListParams def list(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::Runs::StepListParams.dump_request(params) thread_id = @@ -99,6 +103,8 @@ def list(run_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/chat.rb b/lib/openai/resources/chat.rb index d5bf1e2e..9bebeb10 100644 --- a/lib/openai/resources/chat.rb +++ b/lib/openai/resources/chat.rb @@ -6,6 +6,8 @@ class Chat # @return [OpenAI::Resources::Chat::Completions] attr_reader :completions + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 86c7172c..516866e9 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -213,6 +213,8 @@ class Completions # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletion] + # + # @see OpenAI::Models::Chat::CompletionCreateParams def create(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) if parsed[:stream] @@ -434,6 +436,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::Chat::CompletionCreateParams def create_streaming(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -462,6 +466,8 @@ def create_streaming(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletion] + # + # @see OpenAI::Models::Chat::CompletionRetrieveParams def retrieve(completion_id, params = {}) @client.request( method: :get, @@ -489,6 +495,8 @@ def retrieve(completion_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletion] + # + # @see OpenAI::Models::Chat::CompletionUpdateParams def update(completion_id, params) parsed, options = OpenAI::Models::Chat::CompletionUpdateParams.dump_request(params) @client.request( @@ -521,6 +529,8 @@ def update(completion_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Chat::CompletionListParams def list(params = {}) parsed, options = OpenAI::Models::Chat::CompletionListParams.dump_request(params) @client.request( @@ -543,6 +553,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Chat::ChatCompletionDeleted] + # + # @see OpenAI::Models::Chat::CompletionDeleteParams def delete(completion_id, params = {}) @client.request( method: :delete, @@ -552,6 +564,8 @@ def delete(completion_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index e40262ce..c55835f8 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -22,6 +22,8 @@ class Messages # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Chat::Completions::MessageListParams def list(completion_id, params = {}) parsed, options = OpenAI::Models::Chat::Completions::MessageListParams.dump_request(params) @client.request( @@ -34,6 +36,8 @@ def list(completion_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 2cf80ec1..8a22b8c0 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -113,6 +113,8 @@ class Completions # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Completion] + # + # @see OpenAI::Models::CompletionCreateParams def create(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) if parsed[:stream] @@ -238,6 +240,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::CompletionCreateParams def create_streaming(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -256,6 +260,8 @@ def create_streaming(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index b70c1ef5..dd58f2c8 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -35,6 +35,8 @@ class Embeddings # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::CreateEmbeddingResponse] + # + # @see OpenAI::Models::EmbeddingCreateParams def create(params) parsed, options = OpenAI::Models::EmbeddingCreateParams.dump_request(params) @client.request( @@ -46,6 +48,8 @@ def create(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index d0253a42..48756391 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -37,6 +37,8 @@ class Files # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FileObject] + # + # @see OpenAI::Models::FileCreateParams def create(params) parsed, options = OpenAI::Models::FileCreateParams.dump_request(params) @client.request( @@ -58,6 +60,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FileObject] + # + # @see OpenAI::Models::FileRetrieveParams def retrieve(file_id, params = {}) @client.request( method: :get, @@ -87,6 +91,8 @@ def retrieve(file_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::FileListParams def list(params = {}) parsed, options = OpenAI::Models::FileListParams.dump_request(params) @client.request( @@ -108,6 +114,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FileDeleted] + # + # @see OpenAI::Models::FileDeleteParams def delete(file_id, params = {}) @client.request( method: :delete, @@ -126,6 +134,8 @@ def delete(file_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [StringIO] + # + # @see OpenAI::Models::FileContentParams def content(file_id, params = {}) @client.request( method: :get, @@ -136,6 +146,8 @@ def content(file_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/fine_tuning.rb b/lib/openai/resources/fine_tuning.rb index 61663e79..c2f263c6 100644 --- a/lib/openai/resources/fine_tuning.rb +++ b/lib/openai/resources/fine_tuning.rb @@ -6,6 +6,8 @@ class FineTuning # @return [OpenAI::Resources::FineTuning::Jobs] attr_reader :jobs + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index baf7a2cc..fd4eb576 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -78,6 +78,8 @@ class Jobs # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FineTuning::FineTuningJob] + # + # @see OpenAI::Models::FineTuning::JobCreateParams def create(params) parsed, options = OpenAI::Models::FineTuning::JobCreateParams.dump_request(params) @client.request( @@ -100,6 +102,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FineTuning::FineTuningJob] + # + # @see OpenAI::Models::FineTuning::JobRetrieveParams def retrieve(fine_tuning_job_id, params = {}) @client.request( method: :get, @@ -123,6 +127,8 @@ def retrieve(fine_tuning_job_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::FineTuning::JobListParams def list(params = {}) parsed, options = OpenAI::Models::FineTuning::JobListParams.dump_request(params) @client.request( @@ -144,6 +150,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::FineTuning::FineTuningJob] + # + # @see OpenAI::Models::FineTuning::JobCancelParams def cancel(fine_tuning_job_id, params = {}) @client.request( method: :post, @@ -166,6 +174,8 @@ def cancel(fine_tuning_job_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::FineTuning::JobListEventsParams def list_events(fine_tuning_job_id, params = {}) parsed, options = OpenAI::Models::FineTuning::JobListEventsParams.dump_request(params) @client.request( @@ -178,6 +188,8 @@ def list_events(fine_tuning_job_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index ced20ee9..de62c140 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -18,6 +18,8 @@ class Checkpoints # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::FineTuning::Jobs::CheckpointListParams def list(fine_tuning_job_id, params = {}) parsed, options = OpenAI::Models::FineTuning::Jobs::CheckpointListParams.dump_request(params) @client.request( @@ -30,6 +32,8 @@ def list(fine_tuning_job_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 48b2ac87..0464b4d4 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -30,6 +30,8 @@ class Images # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ImagesResponse] + # + # @see OpenAI::Models::ImageCreateVariationParams def create_variation(params) parsed, options = OpenAI::Models::ImageCreateVariationParams.dump_request(params) @client.request( @@ -75,6 +77,8 @@ def create_variation(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ImagesResponse] + # + # @see OpenAI::Models::ImageEditParams def edit(params) parsed, options = OpenAI::Models::ImageEditParams.dump_request(params) @client.request( @@ -123,6 +127,8 @@ def edit(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ImagesResponse] + # + # @see OpenAI::Models::ImageGenerateParams def generate(params) parsed, options = OpenAI::Models::ImageGenerateParams.dump_request(params) @client.request( @@ -134,6 +140,8 @@ def generate(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index 937381d2..044d32d7 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -13,6 +13,8 @@ class Models # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Model] + # + # @see OpenAI::Models::ModelRetrieveParams def retrieve(model, params = {}) @client.request( method: :get, @@ -30,6 +32,8 @@ def retrieve(model, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Page] + # + # @see OpenAI::Models::ModelListParams def list(params = {}) @client.request( method: :get, @@ -50,6 +54,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ModelDeleted] + # + # @see OpenAI::Models::ModelDeleteParams def delete(model, params = {}) @client.request( method: :delete, @@ -59,6 +65,8 @@ def delete(model, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index 85d8cd8f..aa98d908 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -19,6 +19,8 @@ class Moderations # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::ModerationCreateResponse] + # + # @see OpenAI::Models::ModerationCreateParams def create(params) parsed, options = OpenAI::Models::ModerationCreateParams.dump_request(params) @client.request( @@ -30,6 +32,8 @@ def create(params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 99f01ccb..7bd97e6e 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -127,6 +127,8 @@ class Responses # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Responses::Response] + # + # @see OpenAI::Models::Responses::ResponseCreateParams def create(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] @@ -263,6 +265,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Stream] + # + # @see OpenAI::Models::Responses::ResponseCreateParams def create_streaming(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) unless parsed.fetch(:stream, true) @@ -293,6 +297,8 @@ def create_streaming(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Responses::Response] + # + # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve(response_id, params = {}) parsed, options = OpenAI::Models::Responses::ResponseRetrieveParams.dump_request(params) @client.request( @@ -313,6 +319,8 @@ def retrieve(response_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [nil] + # + # @see OpenAI::Models::Responses::ResponseDeleteParams def delete(response_id, params = {}) @client.request( method: :delete, @@ -322,6 +330,8 @@ def delete(response_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 30f343fd..15d5fd19 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -28,6 +28,8 @@ class InputItems # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::Responses::InputItemListParams def list(response_id, params = {}) parsed, options = OpenAI::Models::Responses::InputItemListParams.dump_request(params) @client.request( @@ -40,6 +42,8 @@ def list(response_id, params = {}) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index f50e95ed..490e742a 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -45,6 +45,8 @@ class Uploads # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Upload] + # + # @see OpenAI::Models::UploadCreateParams def create(params) parsed, options = OpenAI::Models::UploadCreateParams.dump_request(params) @client.request( @@ -65,6 +67,8 @@ def create(params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Upload] + # + # @see OpenAI::Models::UploadCancelParams def cancel(upload_id, params = {}) @client.request( method: :post, @@ -100,6 +104,8 @@ def cancel(upload_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Upload] + # + # @see OpenAI::Models::UploadCompleteParams def complete(upload_id, params) parsed, options = OpenAI::Models::UploadCompleteParams.dump_request(params) @client.request( @@ -111,6 +117,8 @@ def complete(upload_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index ea9e8a5f..bc06cf70 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -25,6 +25,8 @@ class Parts # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::Uploads::UploadPart] + # + # @see OpenAI::Models::Uploads::PartCreateParams def create(upload_id, params) parsed, options = OpenAI::Models::Uploads::PartCreateParams.dump_request(params) @client.request( @@ -37,6 +39,8 @@ def create(upload_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 5d0fe33d..644fbec4 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -34,6 +34,8 @@ class VectorStores # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStore] + # + # @see OpenAI::Models::VectorStoreCreateParams def create(params = {}) parsed, options = OpenAI::Models::VectorStoreCreateParams.dump_request(params) @client.request( @@ -54,6 +56,8 @@ def create(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStore] + # + # @see OpenAI::Models::VectorStoreRetrieveParams def retrieve(vector_store_id, params = {}) @client.request( method: :get, @@ -83,6 +87,8 @@ def retrieve(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStore] + # + # @see OpenAI::Models::VectorStoreUpdateParams def update(vector_store_id, params = {}) parsed, options = OpenAI::Models::VectorStoreUpdateParams.dump_request(params) @client.request( @@ -117,6 +123,8 @@ def update(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::VectorStoreListParams def list(params = {}) parsed, options = OpenAI::Models::VectorStoreListParams.dump_request(params) @client.request( @@ -138,6 +146,8 @@ def list(params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStoreDeleted] + # + # @see OpenAI::Models::VectorStoreDeleteParams def delete(vector_store_id, params = {}) @client.request( method: :delete, @@ -168,6 +178,8 @@ def delete(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Page] + # + # @see OpenAI::Models::VectorStoreSearchParams def search(vector_store_id, params) parsed, options = OpenAI::Models::VectorStoreSearchParams.dump_request(params) @client.request( @@ -180,6 +192,8 @@ def search(vector_store_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 1a9d1dae..e2b1e6ac 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -26,6 +26,8 @@ class FileBatches # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] + # + # @see OpenAI::Models::VectorStores::FileBatchCreateParams def create(vector_store_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchCreateParams.dump_request(params) @client.request( @@ -48,6 +50,8 @@ def create(vector_store_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] + # + # @see OpenAI::Models::VectorStores::FileBatchRetrieveParams def retrieve(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchRetrieveParams.dump_request(params) vector_store_id = @@ -74,6 +78,8 @@ def retrieve(batch_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] + # + # @see OpenAI::Models::VectorStores::FileBatchCancelParams def cancel(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchCancelParams.dump_request(params) vector_store_id = @@ -118,6 +124,8 @@ def cancel(batch_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::VectorStores::FileBatchListFilesParams def list_files(batch_id, params) parsed, options = OpenAI::Models::VectorStores::FileBatchListFilesParams.dump_request(params) vector_store_id = @@ -134,6 +142,8 @@ def list_files(batch_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 57e9af60..6d94283d 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -28,6 +28,8 @@ class Files # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFile] + # + # @see OpenAI::Models::VectorStores::FileCreateParams def create(vector_store_id, params) parsed, options = OpenAI::Models::VectorStores::FileCreateParams.dump_request(params) @client.request( @@ -50,6 +52,8 @@ def create(vector_store_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFile] + # + # @see OpenAI::Models::VectorStores::FileRetrieveParams def retrieve(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileRetrieveParams.dump_request(params) vector_store_id = @@ -81,6 +85,8 @@ def retrieve(file_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFile] + # + # @see OpenAI::Models::VectorStores::FileUpdateParams def update(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileUpdateParams.dump_request(params) vector_store_id = @@ -123,6 +129,8 @@ def update(file_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::CursorPage] + # + # @see OpenAI::Models::VectorStores::FileListParams def list(vector_store_id, params = {}) parsed, options = OpenAI::Models::VectorStores::FileListParams.dump_request(params) @client.request( @@ -149,6 +157,8 @@ def list(vector_store_id, params = {}) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] + # + # @see OpenAI::Models::VectorStores::FileDeleteParams def delete(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileDeleteParams.dump_request(params) vector_store_id = @@ -174,6 +184,8 @@ def delete(file_id, params) # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options # # @return [OpenAI::Page] + # + # @see OpenAI::Models::VectorStores::FileContentParams def content(file_id, params) parsed, options = OpenAI::Models::VectorStores::FileContentParams.dump_request(params) vector_store_id = @@ -189,6 +201,8 @@ def content(file_id, params) ) end + # @api private + # # @param client [OpenAI::Client] def initialize(client:) @client = client diff --git a/rbi/lib/openai/resources/audio.rbi b/rbi/lib/openai/resources/audio.rbi index 353012c6..1747c22c 100644 --- a/rbi/lib/openai/resources/audio.rbi +++ b/rbi/lib/openai/resources/audio.rbi @@ -12,6 +12,7 @@ module OpenAI sig { returns(OpenAI::Resources::Audio::Speech) } attr_reader :speech + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index d17427d8..b662b954 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -41,6 +41,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 0a0205db..688f6c9f 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -135,6 +135,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index c99220ed..178eaa7e 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -41,6 +41,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index 0e76d86b..fe814b47 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -108,6 +108,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/beta.rbi b/rbi/lib/openai/resources/beta.rbi index eebedd02..c53baf2b 100644 --- a/rbi/lib/openai/resources/beta.rbi +++ b/rbi/lib/openai/resources/beta.rbi @@ -9,6 +9,7 @@ module OpenAI sig { returns(OpenAI::Resources::Beta::Threads) } attr_reader :threads + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 5b415b26..58bd9abe 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -279,6 +279,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 2c2ac7ef..d5a0da4b 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -416,6 +416,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 24507cc6..b30ffe2f 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -157,6 +157,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 71cf02b1..296929f4 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -565,6 +565,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 36fc8b92..a048f9ce 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -85,6 +85,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/chat.rbi b/rbi/lib/openai/resources/chat.rbi index 98440959..45c92d55 100644 --- a/rbi/lib/openai/resources/chat.rbi +++ b/rbi/lib/openai/resources/chat.rbi @@ -6,6 +6,7 @@ module OpenAI sig { returns(OpenAI::Resources::Chat::Completions) } attr_reader :completions + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 58bc7161..595d5865 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -631,6 +631,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index f86371f9..15b7617c 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -31,6 +31,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 64b4c2ef..b5b3feeb 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -289,6 +289,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 9c05cf33..992bfec4 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -45,6 +45,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 9ddc3a8e..3a276b74 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -133,6 +133,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/fine_tuning.rbi b/rbi/lib/openai/resources/fine_tuning.rbi index f2743caa..d6d0b987 100644 --- a/rbi/lib/openai/resources/fine_tuning.rbi +++ b/rbi/lib/openai/resources/fine_tuning.rbi @@ -6,6 +6,7 @@ module OpenAI sig { returns(OpenAI::Resources::FineTuning::Jobs) } attr_reader :jobs + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 7d8544da..57cec0cd 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -169,6 +169,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index 5986546b..9049855c 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -26,6 +26,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index d36dd357..ea7194a0 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -136,6 +136,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 505640bc..16962ff9 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -54,6 +54,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 0fd74373..85abe41d 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -30,6 +30,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 1c7a99ac..8a9352b4 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -433,6 +433,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 35fb21c7..822e8463 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -52,6 +52,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 55b2b24e..7677a58a 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -103,6 +103,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index b90746ff..14dffcb9 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -32,6 +32,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 0c24ebc9..4aef839d 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -171,6 +171,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index 62bd27b1..ec63dcb2 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -118,6 +118,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 37afdaf7..0cb2f053 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -161,6 +161,7 @@ module OpenAI ) end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) end From cfdb55862dddf901ba8956da21aeab26fa79df1c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 16:10:55 +0000 Subject: [PATCH 078/295] chore: improve yard docs readability (#80) --- lib/openai/resources/audio/speech.rb | 31 +- lib/openai/resources/audio/transcriptions.rb | 104 +--- lib/openai/resources/audio/translations.rb | 31 +- lib/openai/resources/batches.rb | 61 +-- lib/openai/resources/beta/assistants.rb | 209 ++------ lib/openai/resources/beta/threads.rb | 275 ++-------- lib/openai/resources/beta/threads/messages.rb | 100 +--- lib/openai/resources/beta/threads/runs.rb | 361 +++---------- .../resources/beta/threads/runs/steps.rb | 63 +-- lib/openai/resources/chat/completions.rb | 489 +++--------------- .../resources/chat/completions/messages.rb | 17 +- lib/openai/resources/completions.rb | 252 ++------- lib/openai/resources/embeddings.rb | 36 +- lib/openai/resources/files.rb | 55 +- lib/openai/resources/fine_tuning/jobs.rb | 114 +--- .../resources/fine_tuning/jobs/checkpoints.rb | 13 +- lib/openai/resources/images.rb | 111 ++-- lib/openai/resources/models.rb | 18 +- lib/openai/resources/moderations.rb | 14 +- lib/openai/resources/responses.rb | 272 ++-------- lib/openai/resources/responses/input_items.rb | 29 +- lib/openai/resources/uploads.rb | 43 +- lib/openai/resources/uploads/parts.rb | 10 +- lib/openai/resources/vector_stores.rb | 109 ++-- .../resources/vector_stores/file_batches.rb | 77 +-- lib/openai/resources/vector_stores/files.rb | 100 ++-- 26 files changed, 654 insertions(+), 2340 deletions(-) diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index 23e2ee76..8264c108 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -6,28 +6,15 @@ class Audio class Speech # Generates audio from the input text. # - # @param params [OpenAI::Models::Audio::SpeechCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :input The text to generate audio for. The maximum length is 4096 characters. - # - # @option params [String, Symbol, OpenAI::Models::Audio::SpeechModel] :model One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - # - # @option params [String, Symbol] :voice The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and - # `verse`. Previews of the voices are available in the - # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - # - # @option params [String] :instructions Control the voice of your generated audio with additional instructions. Does not - # work with `tts-1` or `tts-1-hd`. - # - # @option params [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] :response_format The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, - # `wav`, and `pcm`. - # - # @option params [Float] :speed The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) + # + # @param input [String] + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] + # @param voice [String, Symbol] + # @param instructions [String] + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] + # @param speed [Float] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [StringIO] # diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index cfbc82a3..c9bf8f4f 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -6,47 +6,17 @@ class Audio class Transcriptions # Transcribes audio into the input language. # - # @param params [OpenAI::Models::Audio::TranscriptionCreateParams, Hash{Symbol=>Object}] . - # - # @option params [IO, StringIO] :file The audio file object (not file name) to transcribe, in one of these formats: - # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - # - # @option params [String, Symbol, OpenAI::Models::AudioModel] :model ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). - # - # @option params [Array] :include Additional information to include in the transcription response. `logprobs` will - # return the log probabilities of the tokens in the response to understand the - # model's confidence in the transcription. `logprobs` only works with - # response_format set to `json` and only with the models `gpt-4o-transcribe` and - # `gpt-4o-mini-transcribe`. - # - # @option params [String] :language The language of the input audio. Supplying the input language in - # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) - # format will improve accuracy and latency. - # - # @option params [String] :prompt An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should match the audio language. - # - # @option params [Symbol, OpenAI::Models::AudioResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. - # - # @option params [Float] :temperature The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. - # - # @option params [Array] :timestamp_granularities The timestamp granularities to populate for this transcription. - # `response_format` must be set `verbose_json` to use timestamp granularities. - # Either or both of these options are supported: `word`, or `segment`. Note: There - # is no additional latency for segment timestamps, but generating word timestamps - # incurs additional latency. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # + # @param file [IO, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param include [Array] + # @param language [String] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] + # @param temperature [Float] + # @param timestamp_granularities [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] # @@ -69,47 +39,17 @@ def create(params) # Transcribes audio into the input language. # - # @param params [OpenAI::Models::Audio::TranscriptionCreateParams, Hash{Symbol=>Object}] . - # - # @option params [IO, StringIO] :file The audio file object (not file name) to transcribe, in one of these formats: - # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - # - # @option params [String, Symbol, OpenAI::Models::AudioModel] :model ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). - # - # @option params [Array] :include Additional information to include in the transcription response. `logprobs` will - # return the log probabilities of the tokens in the response to understand the - # model's confidence in the transcription. `logprobs` only works with - # response_format set to `json` and only with the models `gpt-4o-transcribe` and - # `gpt-4o-mini-transcribe`. - # - # @option params [String] :language The language of the input audio. Supplying the input language in - # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) - # format will improve accuracy and latency. - # - # @option params [String] :prompt An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should match the audio language. - # - # @option params [Symbol, OpenAI::Models::AudioResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. - # - # @option params [Float] :temperature The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. - # - # @option params [Array] :timestamp_granularities The timestamp granularities to populate for this transcription. - # `response_format` must be set `verbose_json` to use timestamp granularities. - # Either or both of these options are supported: `word`, or `segment`. Note: There - # is no additional latency for segment timestamps, but generating word timestamps - # incurs additional latency. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # + # @param file [IO, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param include [Array] + # @param language [String] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] + # @param temperature [Float] + # @param timestamp_granularities [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index e3f7a9f2..c1540307 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -6,29 +6,14 @@ class Audio class Translations # Translates audio into English. # - # @param params [OpenAI::Models::Audio::TranslationCreateParams, Hash{Symbol=>Object}] . - # - # @option params [IO, StringIO] :file The audio file object (not file name) translate, in one of these formats: flac, - # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - # - # @option params [String, Symbol, OpenAI::Models::AudioModel] :model ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. - # - # @option params [String] :prompt An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should be in English. - # - # @option params [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] :response_format The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. - # - # @option params [Float] :temperature The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) + # + # @param file [IO, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] + # @param temperature [Float] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] # diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index b9f05f65..89f67bb1 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -5,34 +5,13 @@ module Resources class Batches # Creates and executes a batch from an uploaded file of requests # - # @param params [OpenAI::Models::BatchCreateParams, Hash{Symbol=>Object}] . + # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) # - # @option params [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] :completion_window The time frame within which the batch should be processed. Currently only `24h` - # is supported. - # - # @option params [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] :endpoint The endpoint to be used for all requests in the batch. Currently - # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` - # are supported. Note that `/v1/embeddings` batches are also restricted to a - # maximum of 50,000 embedding inputs across all requests in the batch. - # - # @option params [String] :input_file_id The ID of an uploaded file that contains requests for the new batch. - # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. - # - # Your input file must be formatted as a - # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), - # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 - # requests, and can be up to 200 MB in size. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] + # @param input_file_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] # @@ -50,11 +29,10 @@ def create(params) # Retrieves a batch. # - # @param batch_id [String] The ID of the batch to retrieve. - # - # @param params [OpenAI::Models::BatchRetrieveParams, Hash{Symbol=>Object}] . + # @overload retrieve(batch_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param batch_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] # @@ -70,17 +48,11 @@ def retrieve(batch_id, params = {}) # List your organization's batches. # - # @param params [OpenAI::Models::BatchListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # @overload list(after: nil, limit: nil, request_options: {}) # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -101,11 +73,10 @@ def list(params = {}) # 10 minutes, before changing to `cancelled`, where it will have partial results # (if any) available in the output file. # - # @param batch_id [String] The ID of the batch to cancel. - # - # @param params [OpenAI::Models::BatchCancelParams, Hash{Symbol=>Object}] . + # @overload cancel(batch_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param batch_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] # diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 928cc77e..08b62ef0 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -6,76 +6,20 @@ class Beta class Assistants # Create an assistant with a model and instructions. # - # @param params [OpenAI::Models::Beta::AssistantCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - # - # @option params [String, nil] :description The description of the assistant. The maximum length is 512 characters. - # - # @option params [String, nil] :instructions The system instructions that the assistant uses. The maximum length is 256,000 - # characters. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, nil] :name The name of the assistant. The maximum length is 256 characters. - # - # @option params [Symbol, OpenAI::Models::ReasoningEffort, nil] :reasoning_effort **o-series models only** - # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. - # - # @option params [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] :response_format Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. - # - # @option params [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] :tool_resources A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. - # - # @option params [Array] :tools A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or temperature but not both. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) + # + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] + # @param tools [Array] + # @param top_p [Float, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Assistant] # @@ -93,11 +37,10 @@ def create(params) # Retrieves an assistant. # - # @param assistant_id [String] The ID of the assistant to retrieve. - # - # @param params [OpenAI::Models::Beta::AssistantRetrieveParams, Hash{Symbol=>Object}] . + # @overload retrieve(assistant_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param assistant_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Assistant] # @@ -113,78 +56,21 @@ def retrieve(assistant_id, params = {}) # Modifies an assistant. # - # @param assistant_id [String] The ID of the assistant to modify. - # - # @param params [OpenAI::Models::Beta::AssistantUpdateParams, Hash{Symbol=>Object}] . - # - # @option params [String, nil] :description The description of the assistant. The maximum length is 512 characters. - # - # @option params [String, nil] :instructions The system instructions that the assistant uses. The maximum length is 256,000 - # characters. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, Symbol] :model ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - # - # @option params [String, nil] :name The name of the assistant. The maximum length is 256 characters. - # - # @option params [Symbol, OpenAI::Models::ReasoningEffort, nil] :reasoning_effort **o-series models only** - # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. - # - # @option params [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] :response_format Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. - # - # @option params [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] :tool_resources A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. - # - # @option params [Array] :tools A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or temperature but not both. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload update(assistant_id, description: nil, instructions: nil, metadata: nil, model: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) + # + # @param assistant_id [String] + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol] + # @param name [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] + # @param tools [Array] + # @param top_p [Float, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Assistant] # @@ -202,25 +88,13 @@ def update(assistant_id, params = {}) # Returns a list of assistants. # - # @param params [OpenAI::Models::Beta::AssistantListParams, Hash{Symbol=>Object}] . + # @overload list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. - # - # @option params [String] :before A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. - # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] :order Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -239,11 +113,10 @@ def list(params = {}) # Delete an assistant. # - # @param assistant_id [String] The ID of the assistant to delete. - # - # @param params [OpenAI::Models::Beta::AssistantDeleteParams, Hash{Symbol=>Object}] . + # @overload delete(assistant_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param assistant_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::AssistantDeleted] # diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 587f677e..6b028b1d 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -12,24 +12,12 @@ class Threads # Create a thread. # - # @param params [OpenAI::Models::Beta::ThreadCreateParams, Hash{Symbol=>Object}] . + # @overload create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # - # @option params [Array] :messages A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # start the thread with. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] :tool_resources A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param messages [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Thread] # @@ -47,11 +35,10 @@ def create(params = {}) # Retrieves a thread. # - # @param thread_id [String] The ID of the thread to retrieve. + # @overload retrieve(thread_id, request_options: {}) # - # @param params [OpenAI::Models::Beta::ThreadRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Thread] # @@ -67,23 +54,12 @@ def retrieve(thread_id, params = {}) # Modifies a thread. # - # @param thread_id [String] The ID of the thread to modify. Only the `metadata` can be modified. - # - # @param params [OpenAI::Models::Beta::ThreadUpdateParams, Hash{Symbol=>Object}] . - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # @overload update(thread_id, metadata: nil, tool_resources: nil, request_options: {}) # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] :tool_resources A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Thread] # @@ -101,11 +77,10 @@ def update(thread_id, params = {}) # Delete a thread. # - # @param thread_id [String] The ID of the thread to delete. - # - # @param params [OpenAI::Models::Beta::ThreadDeleteParams, Hash{Symbol=>Object}] . + # @overload delete(thread_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::ThreadDeleted] # @@ -121,97 +96,24 @@ def delete(thread_id, params = {}) # Create a thread and run it in one request. # - # @param params [OpenAI::Models::Beta::ThreadCreateAndRunParams, Hash{Symbol=>Object}] . - # - # @option params [String] :assistant_id The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. - # - # @option params [String, nil] :instructions Override the default system message of the assistant. This is useful for - # modifying the behavior on a per-run basis. - # - # @option params [Integer, nil] :max_completion_tokens The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. - # - # @option params [Integer, nil] :max_prompt_tokens The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, Symbol, OpenAI::Models::ChatModel, nil] :model The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. - # - # @option params [Boolean] :parallel_tool_calls Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. - # - # @option params [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] :response_format Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. - # - # @option params [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] :thread Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. - # - # @option params [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] :tool_choice Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # @option params [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] :tool_resources A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. - # - # @option params [Array, nil] :tools Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or temperature but not both. - # - # @option params [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] :truncation_strategy Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_and_run(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # + # @param assistant_id [String] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] # @@ -233,97 +135,24 @@ def create_and_run(params) # Create a thread and run it in one request. # - # @param params [OpenAI::Models::Beta::ThreadCreateAndRunParams, Hash{Symbol=>Object}] . - # - # @option params [String] :assistant_id The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. - # - # @option params [String, nil] :instructions Override the default system message of the assistant. This is useful for - # modifying the behavior on a per-run basis. - # - # @option params [Integer, nil] :max_completion_tokens The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. - # - # @option params [Integer, nil] :max_prompt_tokens The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, Symbol, OpenAI::Models::ChatModel, nil] :model The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. - # - # @option params [Boolean] :parallel_tool_calls Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. - # - # @option params [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] :response_format Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. - # - # @option params [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] :thread Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. - # - # @option params [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] :tool_choice Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # @option params [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] :tool_resources A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. - # - # @option params [Array, nil] :tools Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or temperature but not both. - # - # @option params [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] :truncation_strategy Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_and_run_streaming(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # + # @param assistant_id [String] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 0e86c04e..c8900898 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -7,30 +7,14 @@ class Threads class Messages # Create a message. # - # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # to create a message for. + # @overload create(thread_id, content:, role:, attachments: nil, metadata: nil, request_options: {}) # - # @param params [OpenAI::Models::Beta::Threads::MessageCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Array] :content The text contents of the message. - # - # @option params [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] :role The role of the entity that is creating the message. Allowed values include: - # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. - # - # @option params [Array, nil] :attachments A list of files attached to the message, and the tools they should be added to. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param thread_id [String] + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Message] # @@ -48,14 +32,11 @@ def create(thread_id, params) # Retrieve a message. # - # @param message_id [String] The ID of the message to retrieve. - # - # @param params [OpenAI::Models::Beta::Threads::MessageRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # to which this message belongs. + # @overload retrieve(message_id, thread_id:, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param message_id [String] + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Message] # @@ -76,20 +57,12 @@ def retrieve(message_id, params) # Modifies a message. # - # @param message_id [String] Path param: The ID of the message to modify. - # - # @param params [OpenAI::Models::Beta::Threads::MessageUpdateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id Path param: The ID of the thread to which this message belongs. + # @overload update(message_id, thread_id:, metadata: nil, request_options: {}) # - # @option params [Hash{Symbol=>String}, nil] :metadata Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param message_id [String] + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Message] # @@ -111,30 +84,15 @@ def update(message_id, params) # Returns a list of messages for a given thread. # - # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # the messages belong to. - # - # @param params [OpenAI::Models::Beta::Threads::MessageListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. - # - # @option params [String] :before A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # @overload list(thread_id, after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] :order Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. - # - # @option params [String] :run_id Filter messages by the run ID that generated them. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param thread_id [String] + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] + # @param run_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -153,13 +111,11 @@ def list(thread_id, params = {}) # Deletes a message. # - # @param message_id [String] The ID of the message to delete. - # - # @param params [OpenAI::Models::Beta::Threads::MessageDeleteParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id The ID of the thread to which this message belongs. + # @overload delete(message_id, thread_id:, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param message_id [String] + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::MessageDeleted] # diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index a36e8c81..45bfd146 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -10,117 +10,27 @@ class Runs # Create a run. # - # @param thread_id [String] Path param: The ID of the thread to run. - # - # @param params [OpenAI::Models::Beta::Threads::RunCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :assistant_id Body param: The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. - # - # @option params [Array] :include Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. - # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. - # - # @option params [String, nil] :additional_instructions Body param: Appends additional instructions at the end of the instructions for - # the run. This is useful for modifying the behavior on a per-run basis without - # overriding other instructions. - # - # @option params [Array, nil] :additional_messages Body param: Adds additional messages to the thread before creating the run. - # - # @option params [String, nil] :instructions Body param: Overrides the - # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) - # of the assistant. This is useful for modifying the behavior on a per-run basis. - # - # @option params [Integer, nil] :max_completion_tokens Body param: The maximum number of completion tokens that may be used over the - # course of the run. The run will make a best effort to use only the number of - # completion tokens specified, across multiple turns of the run. If the run - # exceeds the number of completion tokens specified, the run will end with status - # `incomplete`. See `incomplete_details` for more info. - # - # @option params [Integer, nil] :max_prompt_tokens Body param: The maximum number of prompt tokens that may be used over the course - # of the run. The run will make a best effort to use only the number of prompt - # tokens specified, across multiple turns of the run. If the run exceeds the - # number of prompt tokens specified, the run will end with status `incomplete`. - # See `incomplete_details` for more info. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, Symbol, OpenAI::Models::ChatModel, nil] :model Body param: The ID of the - # [Model](https://platform.openai.com/docs/api-reference/models) to be used to - # execute this run. If a value is provided here, it will override the model - # associated with the assistant. If not, the model associated with the assistant - # will be used. - # - # @option params [Boolean] :parallel_tool_calls Body param: Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. - # - # @option params [Symbol, OpenAI::Models::ReasoningEffort, nil] :reasoning_effort Body param: **o-series models only** - # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. - # - # @option params [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] :response_format Body param: Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. - # - # @option params [Float, nil] :temperature Body param: What sampling temperature to use, between 0 and 2. Higher values - # like 0.8 will make the output more random, while lower values like 0.2 will make - # it more focused and deterministic. - # - # @option params [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] :tool_choice Body param: Controls which (if any) tool is called by the model. `none` means - # the model will not call any tools and instead generates a message. `auto` is the - # default value and means the model can pick between generating a message or - # calling one or more tools. `required` means the model must call one or more - # tools before responding to the user. Specifying a particular tool like - # `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # @option params [Array, nil] :tools Body param: Override the tools the assistant can use for this run. This is - # useful for modifying the behavior on a per-run basis. - # - # @option params [Float, nil] :top_p Body param: An alternative to sampling with temperature, called nucleus - # sampling, where the model considers the results of the tokens with top_p - # probability mass. So 0.1 means only the tokens comprising the top 10% - # probability mass are considered. - # - # We generally recommend altering this or temperature but not both. - # - # @option params [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] :truncation_strategy Body param: Controls for how a thread will be truncated prior to the run. Use - # this to control the intial context window of the run. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # + # @param thread_id [String] + # @param assistant_id [String] + # @param include [Array] + # @param additional_instructions [String, nil] + # @param additional_messages [Array, nil] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] # @@ -144,117 +54,27 @@ def create(thread_id, params) # Create a run. # - # @param thread_id [String] Path param: The ID of the thread to run. - # - # @param params [OpenAI::Models::Beta::Threads::RunCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :assistant_id Body param: The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. - # - # @option params [Array] :include Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. - # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. - # - # @option params [String, nil] :additional_instructions Body param: Appends additional instructions at the end of the instructions for - # the run. This is useful for modifying the behavior on a per-run basis without - # overriding other instructions. - # - # @option params [Array, nil] :additional_messages Body param: Adds additional messages to the thread before creating the run. - # - # @option params [String, nil] :instructions Body param: Overrides the - # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) - # of the assistant. This is useful for modifying the behavior on a per-run basis. - # - # @option params [Integer, nil] :max_completion_tokens Body param: The maximum number of completion tokens that may be used over the - # course of the run. The run will make a best effort to use only the number of - # completion tokens specified, across multiple turns of the run. If the run - # exceeds the number of completion tokens specified, the run will end with status - # `incomplete`. See `incomplete_details` for more info. - # - # @option params [Integer, nil] :max_prompt_tokens Body param: The maximum number of prompt tokens that may be used over the course - # of the run. The run will make a best effort to use only the number of prompt - # tokens specified, across multiple turns of the run. If the run exceeds the - # number of prompt tokens specified, the run will end with status `incomplete`. - # See `incomplete_details` for more info. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, Symbol, OpenAI::Models::ChatModel, nil] :model Body param: The ID of the - # [Model](https://platform.openai.com/docs/api-reference/models) to be used to - # execute this run. If a value is provided here, it will override the model - # associated with the assistant. If not, the model associated with the assistant - # will be used. - # - # @option params [Boolean] :parallel_tool_calls Body param: Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. - # - # @option params [Symbol, OpenAI::Models::ReasoningEffort, nil] :reasoning_effort Body param: **o-series models only** - # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. - # - # @option params [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] :response_format Body param: Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. - # - # @option params [Float, nil] :temperature Body param: What sampling temperature to use, between 0 and 2. Higher values - # like 0.8 will make the output more random, while lower values like 0.2 will make - # it more focused and deterministic. - # - # @option params [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] :tool_choice Body param: Controls which (if any) tool is called by the model. `none` means - # the model will not call any tools and instead generates a message. `auto` is the - # default value and means the model can pick between generating a message or - # calling one or more tools. `required` means the model must call one or more - # tools before responding to the user. Specifying a particular tool like - # `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # @option params [Array, nil] :tools Body param: Override the tools the assistant can use for this run. This is - # useful for modifying the behavior on a per-run basis. - # - # @option params [Float, nil] :top_p Body param: An alternative to sampling with temperature, called nucleus - # sampling, where the model considers the results of the tokens with top_p - # probability mass. So 0.1 means only the tokens comprising the top 10% - # probability mass are considered. - # - # We generally recommend altering this or temperature but not both. - # - # @option params [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] :truncation_strategy Body param: Controls for how a thread will be truncated prior to the run. Use - # this to control the intial context window of the run. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_streaming(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # + # @param thread_id [String] + # @param assistant_id [String] + # @param include [Array] + # @param additional_instructions [String, nil] + # @param additional_messages [Array, nil] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # @@ -281,14 +101,11 @@ def create_streaming(thread_id, params) # Retrieves a run. # - # @param run_id [String] The ID of the run to retrieve. + # @overload retrieve(run_id, thread_id:, request_options: {}) # - # @param params [OpenAI::Models::Beta::Threads::RunRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # that was run. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param run_id [String] + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] # @@ -309,21 +126,12 @@ def retrieve(run_id, params) # Modifies a run. # - # @param run_id [String] Path param: The ID of the run to modify. - # - # @param params [OpenAI::Models::Beta::Threads::RunUpdateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id Path param: The ID of the - # [thread](https://platform.openai.com/docs/api-reference/threads) that was run. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. + # @overload update(run_id, thread_id:, metadata: nil, request_options: {}) # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param run_id [String] + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] # @@ -345,27 +153,14 @@ def update(run_id, params) # Returns a list of runs belonging to a thread. # - # @param thread_id [String] The ID of the thread the run belongs to. - # - # @param params [OpenAI::Models::Beta::Threads::RunListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. - # - # @option params [String] :before A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. - # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # @overload list(thread_id, after: nil, before: nil, limit: nil, order: nil, request_options: {}) # - # @option params [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] :order Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param thread_id [String] + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -384,13 +179,11 @@ def list(thread_id, params = {}) # Cancels a run that is `in_progress`. # - # @param run_id [String] The ID of the run to cancel. - # - # @param params [OpenAI::Models::Beta::Threads::RunCancelParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id The ID of the thread to which this run belongs. + # @overload cancel(run_id, thread_id:, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param run_id [String] + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] # @@ -414,17 +207,12 @@ def cancel(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @param run_id [String] Path param: The ID of the run that requires the tool output submission. - # - # @param params [OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id Path param: The ID of the - # [thread](https://platform.openai.com/docs/api-reference/threads) to which this - # run belongs. + # @overload submit_tool_outputs(run_id, thread_id:, tool_outputs:, request_options: {}) # - # @option params [Array] :tool_outputs Body param: A list of tools for which the outputs are being submitted. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param run_id [String] + # @param thread_id [String] + # @param tool_outputs [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] # @@ -453,17 +241,12 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @param run_id [String] Path param: The ID of the run that requires the tool output submission. - # - # @param params [OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id Path param: The ID of the - # [thread](https://platform.openai.com/docs/api-reference/threads) to which this - # run belongs. - # - # @option params [Array] :tool_outputs Body param: A list of tools for which the outputs are being submitted. + # @overload submit_tool_outputs_streaming(run_id, thread_id:, tool_outputs:, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param run_id [String] + # @param thread_id [String] + # @param tool_outputs [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 5d5559cd..e22cea6d 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -8,24 +8,13 @@ class Runs class Steps # Retrieves a run step. # - # @param step_id [String] Path param: The ID of the run step to retrieve. + # @overload retrieve(step_id, thread_id:, run_id:, include: nil, request_options: {}) # - # @param params [OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id Path param: The ID of the thread to which the run and run step belongs. - # - # @option params [String] :run_id Path param: The ID of the run to which the run step belongs. - # - # @option params [Array] :include Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. - # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param step_id [String] + # @param thread_id [String] + # @param run_id [String] + # @param include [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] # @@ -51,38 +40,16 @@ def retrieve(step_id, params) # Returns a list of run steps belonging to a run. # - # @param run_id [String] Path param: The ID of the run the run steps belong to. - # - # @param params [OpenAI::Models::Beta::Threads::Runs::StepListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :thread_id Path param: The ID of the thread the run and run steps belong to. - # - # @option params [String] :after Query param: A cursor for use in pagination. `after` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, ending with obj_foo, your subsequent call can include - # after=obj_foo in order to fetch the next page of the list. - # - # @option params [String] :before Query param: A cursor for use in pagination. `before` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, starting with obj_foo, your subsequent call can include - # before=obj_foo in order to fetch the previous page of the list. - # - # @option params [Array] :include Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. - # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. - # - # @option params [Integer] :limit Query param: A limit on the number of objects to be returned. Limit can range - # between 1 and 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] :order Query param: Sort order by the `created_at` timestamp of the objects. `asc` for - # ascending order and `desc` for descending order. + # @overload list(run_id, thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param run_id [String] + # @param thread_id [String] + # @param after [String] + # @param before [String] + # @param include [Array] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 516866e9..41ffd171 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -25,192 +25,39 @@ class Completions # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @param params [OpenAI::Models::Chat::CompletionCreateParams, Hash{Symbol=>Object}] . - # - # @option params [Array] :messages A list of messages comprising the conversation so far. Depending on the - # [model](https://platform.openai.com/docs/models) you use, different message - # types (modalities) are supported, like - # [text](https://platform.openai.com/docs/guides/text-generation), - # [images](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio). - # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - # - # @option params [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] :audio Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). - # - # @option params [Float, nil] :frequency_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. - # - # @option params [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] :function_call Deprecated in favor of `tool_choice`. - # - # Controls which (if any) function is called by the model. - # - # `none` means the model will not call a function and instead generates a message. - # - # `auto` means the model can pick between generating a message or calling a - # function. - # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. - # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. - # - # @option params [Array] :functions Deprecated in favor of `tools`. - # - # A list of functions the model may generate JSON inputs for. - # - # @option params [Hash{Symbol=>Integer}, nil] :logit_bias Modify the likelihood of specified tokens appearing in the completion. - # - # Accepts a JSON object that maps tokens (specified by their token ID in the - # tokenizer) to an associated bias value from -100 to 100. Mathematically, the - # bias is added to the logits generated by the model prior to sampling. The exact - # effect will vary per model, but values between -1 and 1 should decrease or - # increase likelihood of selection; values like -100 or 100 should result in a ban - # or exclusive selection of the relevant token. - # - # @option params [Boolean, nil] :logprobs Whether to return log probabilities of the output tokens or not. If true, - # returns the log probabilities of each output token returned in the `content` of - # `message`. - # - # @option params [Integer, nil] :max_completion_tokens An upper bound for the number of tokens that can be generated for a completion, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Integer, nil] :max_tokens The maximum number of [tokens](/tokenizer) that can be generated in the chat - # completion. This value can be used to control - # [costs](https://openai.com/api/pricing/) for text generated via API. - # - # This value is now deprecated in favor of `max_completion_tokens`, and is not - # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [Array, nil] :modalities Output types that you would like the model to generate. Most models are capable - # of generating text, which is the default: - # - # `["text"]` - # - # The `gpt-4o-audio-preview` model can also be used to - # [generate audio](https://platform.openai.com/docs/guides/audio). To request that - # this model generate both text and audio responses, you can use: - # - # `["text", "audio"]` - # - # @option params [Integer, nil] :n How many chat completion choices to generate for each input message. Note that - # you will be charged based on the number of generated tokens across all of the - # choices. Keep `n` as `1` to minimize costs. - # - # @option params [Boolean] :parallel_tool_calls Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. - # - # @option params [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] :prediction Static predicted output content, such as the content of a text file that is - # being regenerated. - # - # @option params [Float, nil] :presence_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. - # - # @option params [Symbol, OpenAI::Models::ReasoningEffort, nil] :reasoning_effort **o-series models only** - # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. - # - # @option params [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] :response_format An object specifying the format that the model must output. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - # - # @option params [Integer, nil] :seed This feature is in Beta. If specified, our system will make a best effort to - # sample deterministically, such that repeated requests with the same `seed` and - # parameters should return the same result. Determinism is not guaranteed, and you - # should refer to the `system_fingerprint` response parameter to monitor changes - # in the backend. - # - # @option params [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] :service_tier Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. - # - # When this parameter is set, the response body will include the `service_tier` - # utilized. - # - # @option params [String, Array, nil] :stop Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. - # - # @option params [Boolean, nil] :store Whether or not to store the output of this chat completion request for use in - # our [model distillation](https://platform.openai.com/docs/guides/distillation) - # or [evals](https://platform.openai.com/docs/guides/evals) products. - # - # @option params [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] :stream_options Options for streaming response. Only set this when you set `stream: true`. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. - # - # @option params [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] :tool_choice Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. - # - # @option params [Array] :tools A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. - # - # @option params [Integer, nil] :top_logprobs An integer between 0 and 20 specifying the number of most likely tokens to - # return at each token position, each with an associated log probability. - # `logprobs` must be set to `true` if this parameter is used. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or `temperature` but not both. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] :web_search_options This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # + # @param messages [Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] + # @param frequency_penalty [Float, nil] + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + # @param functions [Array] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Boolean, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param modalities [Array, nil] + # @param n [Integer, nil] + # @param parallel_tool_calls [Boolean] + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] + # @param presence_penalty [Float, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + # @param seed [Integer, nil] + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] + # @param stop [String, Array, nil] + # @param store [Boolean, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + # @param tools [Array] + # @param top_logprobs [Integer, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletion] # @@ -248,192 +95,39 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @param params [OpenAI::Models::Chat::CompletionCreateParams, Hash{Symbol=>Object}] . - # - # @option params [Array] :messages A list of messages comprising the conversation so far. Depending on the - # [model](https://platform.openai.com/docs/models) you use, different message - # types (modalities) are supported, like - # [text](https://platform.openai.com/docs/guides/text-generation), - # [images](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio). - # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - # - # @option params [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] :audio Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). - # - # @option params [Float, nil] :frequency_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. - # - # @option params [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] :function_call Deprecated in favor of `tool_choice`. - # - # Controls which (if any) function is called by the model. - # - # `none` means the model will not call a function and instead generates a message. - # - # `auto` means the model can pick between generating a message or calling a - # function. - # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. - # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. - # - # @option params [Array] :functions Deprecated in favor of `tools`. - # - # A list of functions the model may generate JSON inputs for. - # - # @option params [Hash{Symbol=>Integer}, nil] :logit_bias Modify the likelihood of specified tokens appearing in the completion. - # - # Accepts a JSON object that maps tokens (specified by their token ID in the - # tokenizer) to an associated bias value from -100 to 100. Mathematically, the - # bias is added to the logits generated by the model prior to sampling. The exact - # effect will vary per model, but values between -1 and 1 should decrease or - # increase likelihood of selection; values like -100 or 100 should result in a ban - # or exclusive selection of the relevant token. - # - # @option params [Boolean, nil] :logprobs Whether to return log probabilities of the output tokens or not. If true, - # returns the log probabilities of each output token returned in the `content` of - # `message`. - # - # @option params [Integer, nil] :max_completion_tokens An upper bound for the number of tokens that can be generated for a completion, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Integer, nil] :max_tokens The maximum number of [tokens](/tokenizer) that can be generated in the chat - # completion. This value can be used to control - # [costs](https://openai.com/api/pricing/) for text generated via API. - # - # This value is now deprecated in favor of `max_completion_tokens`, and is not - # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [Array, nil] :modalities Output types that you would like the model to generate. Most models are capable - # of generating text, which is the default: - # - # `["text"]` - # - # The `gpt-4o-audio-preview` model can also be used to - # [generate audio](https://platform.openai.com/docs/guides/audio). To request that - # this model generate both text and audio responses, you can use: - # - # `["text", "audio"]` - # - # @option params [Integer, nil] :n How many chat completion choices to generate for each input message. Note that - # you will be charged based on the number of generated tokens across all of the - # choices. Keep `n` as `1` to minimize costs. - # - # @option params [Boolean] :parallel_tool_calls Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. - # - # @option params [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] :prediction Static predicted output content, such as the content of a text file that is - # being regenerated. - # - # @option params [Float, nil] :presence_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. - # - # @option params [Symbol, OpenAI::Models::ReasoningEffort, nil] :reasoning_effort **o-series models only** - # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. - # - # @option params [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] :response_format An object specifying the format that the model must output. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - # - # @option params [Integer, nil] :seed This feature is in Beta. If specified, our system will make a best effort to - # sample deterministically, such that repeated requests with the same `seed` and - # parameters should return the same result. Determinism is not guaranteed, and you - # should refer to the `system_fingerprint` response parameter to monitor changes - # in the backend. - # - # @option params [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] :service_tier Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. - # - # When this parameter is set, the response body will include the `service_tier` - # utilized. - # - # @option params [String, Array, nil] :stop Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. - # - # @option params [Boolean, nil] :store Whether or not to store the output of this chat completion request for use in - # our [model distillation](https://platform.openai.com/docs/guides/distillation) - # or [evals](https://platform.openai.com/docs/guides/evals) products. - # - # @option params [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] :stream_options Options for streaming response. Only set this when you set `stream: true`. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. - # - # @option params [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] :tool_choice Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. - # - # @option params [Array] :tools A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. - # - # @option params [Integer, nil] :top_logprobs An integer between 0 and 20 specifying the number of most likely tokens to - # return at each token position, each with an associated log probability. - # `logprobs` must be set to `true` if this parameter is used. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or `temperature` but not both. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] :web_search_options This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_streaming(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # + # @param messages [Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] + # @param frequency_penalty [Float, nil] + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + # @param functions [Array] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Boolean, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param modalities [Array, nil] + # @param n [Integer, nil] + # @param parallel_tool_calls [Boolean] + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] + # @param presence_penalty [Float, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + # @param seed [Integer, nil] + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] + # @param stop [String, Array, nil] + # @param store [Boolean, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + # @param tools [Array] + # @param top_logprobs [Integer, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # @@ -459,11 +153,10 @@ def create_streaming(params) # Get a stored chat completion. Only Chat Completions that have been created with # the `store` parameter set to `true` will be returned. # - # @param completion_id [String] The ID of the chat completion to retrieve. + # @overload retrieve(completion_id, request_options: {}) # - # @param params [OpenAI::Models::Chat::CompletionRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param completion_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletion] # @@ -481,18 +174,11 @@ def retrieve(completion_id, params = {}) # with the `store` parameter set to `true` can be modified. Currently, the only # supported modification is to update the `metadata` field. # - # @param completion_id [String] The ID of the chat completion to update. - # - # @param params [OpenAI::Models::Chat::CompletionUpdateParams, Hash{Symbol=>Object}] . + # @overload update(completion_id, metadata:, request_options: {}) # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param completion_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletion] # @@ -511,22 +197,14 @@ def update(completion_id, params) # List stored Chat Completions. Only Chat Completions that have been stored with # the `store` parameter set to `true` will be returned. # - # @param params [OpenAI::Models::Chat::CompletionListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after Identifier for the last chat completion from the previous pagination request. + # @overload list(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) # - # @option params [Integer] :limit Number of Chat Completions to retrieve. - # - # @option params [Hash{Symbol=>String}, nil] :metadata A list of metadata keys to filter the Chat Completions by. Example: - # - # `metadata[key1]=value1&metadata[key2]=value2` - # - # @option params [String] :model The model used to generate the Chat Completions. - # - # @option params [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] :order Sort order for Chat Completions by timestamp. Use `asc` for ascending order or - # `desc` for descending order. Defaults to `asc`. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param after [String] + # @param limit [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -546,11 +224,10 @@ def list(params = {}) # Delete a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be deleted. # - # @param completion_id [String] The ID of the chat completion to delete. - # - # @param params [OpenAI::Models::Chat::CompletionDeleteParams, Hash{Symbol=>Object}] . + # @overload delete(completion_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param completion_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletionDeleted] # diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index c55835f8..56dd9e36 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -8,18 +8,13 @@ class Messages # Get the messages in a stored chat completion. Only Chat Completions that have # been created with the `store` parameter set to `true` will be returned. # - # @param completion_id [String] The ID of the chat completion to retrieve messages from. + # @overload list(completion_id, after: nil, limit: nil, order: nil, request_options: {}) # - # @param params [OpenAI::Models::Chat::Completions::MessageListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after Identifier for the last message from the previous pagination request. - # - # @option params [Integer] :limit Number of messages to retrieve. - # - # @option params [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] :order Sort order for messages by timestamp. Use `asc` for ascending order or `desc` - # for descending order. Defaults to `asc`. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param completion_id [String] + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 8a22b8c0..7a58a20a 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -5,112 +5,26 @@ module Resources class Completions # Creates a completion for the provided prompt and parameters. # - # @param params [OpenAI::Models::CompletionCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Symbol] :model ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - # - # @option params [String, Array, Array, Array>, nil] :prompt The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. - # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. - # - # @option params [Integer, nil] :best_of Generates `best_of` completions server-side and returns the "best" (the one with - # the highest log probability per token). Results cannot be streamed. - # - # When used with `n`, `best_of` controls the number of candidate completions and - # `n` specifies how many to return – `best_of` must be greater than `n`. - # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. - # - # @option params [Boolean, nil] :echo Echo back the prompt in addition to the completion - # - # @option params [Float, nil] :frequency_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. - # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) - # - # @option params [Hash{Symbol=>Integer}, nil] :logit_bias Modify the likelihood of specified tokens appearing in the completion. - # - # Accepts a JSON object that maps tokens (specified by their token ID in the GPT - # tokenizer) to an associated bias value from -100 to 100. You can use this - # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. - # Mathematically, the bias is added to the logits generated by the model prior to - # sampling. The exact effect will vary per model, but values between -1 and 1 - # should decrease or increase likelihood of selection; values like -100 or 100 - # should result in a ban or exclusive selection of the relevant token. - # - # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token - # from being generated. - # - # @option params [Integer, nil] :logprobs Include the log probabilities on the `logprobs` most likely output tokens, as - # well the chosen tokens. For example, if `logprobs` is 5, the API will return a - # list of the 5 most likely tokens. The API will always return the `logprob` of - # the sampled token, so there may be up to `logprobs+1` elements in the response. - # - # The maximum value for `logprobs` is 5. - # - # @option params [Integer, nil] :max_tokens The maximum number of [tokens](/tokenizer) that can be generated in the - # completion. - # - # The token count of your prompt plus `max_tokens` cannot exceed the model's - # context length. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. - # - # @option params [Integer, nil] :n How many completions to generate for each prompt. - # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. - # - # @option params [Float, nil] :presence_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. - # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) - # - # @option params [Integer, nil] :seed If specified, our system will make a best effort to sample deterministically, - # such that repeated requests with the same `seed` and parameters should return - # the same result. - # - # Determinism is not guaranteed, and you should refer to the `system_fingerprint` - # response parameter to monitor changes in the backend. - # - # @option params [String, Array, nil] :stop Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. - # - # @option params [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] :stream_options Options for streaming response. Only set this when you set `stream: true`. - # - # @option params [String, nil] :suffix The suffix that comes after a completion of inserted text. - # - # This parameter is only supported for `gpt-3.5-turbo-instruct`. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. - # - # We generally recommend altering this or `top_p` but not both. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or `temperature` but not both. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) + # + # @param model [String, Symbol] + # @param prompt [String, Array, Array, Array>, nil] + # @param best_of [Integer, nil] + # @param echo [Boolean, nil] + # @param frequency_penalty [Float, nil] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Integer, nil] + # @param max_tokens [Integer, nil] + # @param n [Integer, nil] + # @param presence_penalty [Float, nil] + # @param seed [Integer, nil] + # @param stop [String, Array, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param suffix [String, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Completion] # @@ -132,112 +46,26 @@ def create(params) # Creates a completion for the provided prompt and parameters. # - # @param params [OpenAI::Models::CompletionCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Symbol] :model ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - # - # @option params [String, Array, Array, Array>, nil] :prompt The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. - # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. - # - # @option params [Integer, nil] :best_of Generates `best_of` completions server-side and returns the "best" (the one with - # the highest log probability per token). Results cannot be streamed. - # - # When used with `n`, `best_of` controls the number of candidate completions and - # `n` specifies how many to return – `best_of` must be greater than `n`. - # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. - # - # @option params [Boolean, nil] :echo Echo back the prompt in addition to the completion - # - # @option params [Float, nil] :frequency_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. - # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) - # - # @option params [Hash{Symbol=>Integer}, nil] :logit_bias Modify the likelihood of specified tokens appearing in the completion. - # - # Accepts a JSON object that maps tokens (specified by their token ID in the GPT - # tokenizer) to an associated bias value from -100 to 100. You can use this - # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. - # Mathematically, the bias is added to the logits generated by the model prior to - # sampling. The exact effect will vary per model, but values between -1 and 1 - # should decrease or increase likelihood of selection; values like -100 or 100 - # should result in a ban or exclusive selection of the relevant token. - # - # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token - # from being generated. - # - # @option params [Integer, nil] :logprobs Include the log probabilities on the `logprobs` most likely output tokens, as - # well the chosen tokens. For example, if `logprobs` is 5, the API will return a - # list of the 5 most likely tokens. The API will always return the `logprob` of - # the sampled token, so there may be up to `logprobs+1` elements in the response. - # - # The maximum value for `logprobs` is 5. - # - # @option params [Integer, nil] :max_tokens The maximum number of [tokens](/tokenizer) that can be generated in the - # completion. - # - # The token count of your prompt plus `max_tokens` cannot exceed the model's - # context length. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. - # - # @option params [Integer, nil] :n How many completions to generate for each prompt. - # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. - # - # @option params [Float, nil] :presence_penalty Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. - # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) - # - # @option params [Integer, nil] :seed If specified, our system will make a best effort to sample deterministically, - # such that repeated requests with the same `seed` and parameters should return - # the same result. - # - # Determinism is not guaranteed, and you should refer to the `system_fingerprint` - # response parameter to monitor changes in the backend. - # - # @option params [String, Array, nil] :stop Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. - # - # @option params [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] :stream_options Options for streaming response. Only set this when you set `stream: true`. - # - # @option params [String, nil] :suffix The suffix that comes after a completion of inserted text. - # - # This parameter is only supported for `gpt-3.5-turbo-instruct`. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. - # - # We generally recommend altering this or `top_p` but not both. - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or `temperature` but not both. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) + # + # @param model [String, Symbol] + # @param prompt [String, Array, Array, Array>, nil] + # @param best_of [Integer, nil] + # @param echo [Boolean, nil] + # @param frequency_penalty [Float, nil] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Integer, nil] + # @param max_tokens [Integer, nil] + # @param n [Integer, nil] + # @param presence_penalty [Float, nil] + # @param seed [Integer, nil] + # @param stop [String, Array, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param suffix [String, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index dd58f2c8..f9a2281d 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -5,34 +5,14 @@ module Resources class Embeddings # Creates an embedding vector representing the input text. # - # @param params [OpenAI::Models::EmbeddingCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Array, Array, Array>] :input Input text to embed, encoded as a string or array of tokens. To embed multiple - # inputs in a single request, pass an array of strings or array of token arrays. - # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 - # dimensions or less. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. - # - # @option params [String, Symbol, OpenAI::Models::EmbeddingModel] :model ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. - # - # @option params [Integer] :dimensions The number of dimensions the resulting output embeddings should have. Only - # supported in `text-embedding-3` and later models. - # - # @option params [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] :encoding_format The format to return the embeddings in. Can be either `float` or - # [`base64`](https://pypi.org/project/pybase64/). - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) + # + # @param input [String, Array, Array, Array>] + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] + # @param dimensions [Integer] + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::CreateEmbeddingResponse] # diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 48756391..2d8cf4e3 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -25,16 +25,11 @@ class Files # Please [contact us](https://help.openai.com/) if you need to increase these # storage limits. # - # @param params [OpenAI::Models::FileCreateParams, Hash{Symbol=>Object}] . + # @overload create(file:, purpose:, request_options: {}) # - # @option params [IO, StringIO] :file The File object (not file name) to be uploaded. - # - # @option params [Symbol, OpenAI::Models::FilePurpose] :purpose The intended purpose of the uploaded file. One of: - `assistants`: Used in the - # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for - # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: - # Flexible file type for any purpose - `evals`: Used for eval data sets - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file [IO, StringIO] + # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileObject] # @@ -53,11 +48,10 @@ def create(params) # Returns information about a specific file. # - # @param file_id [String] The ID of the file to use for this request. - # - # @param params [OpenAI::Models::FileRetrieveParams, Hash{Symbol=>Object}] . + # @overload retrieve(file_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileObject] # @@ -73,22 +67,13 @@ def retrieve(file_id, params = {}) # Returns a list of files. # - # @param params [OpenAI::Models::FileListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. - # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 10,000, and the default is 10,000. + # @overload list(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) # - # @option params [Symbol, OpenAI::Models::FileListParams::Order] :order Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. - # - # @option params [String] :purpose Only return files with the given purpose. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::FileListParams::Order] + # @param purpose [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -107,11 +92,10 @@ def list(params = {}) # Delete a file. # - # @param file_id [String] The ID of the file to use for this request. + # @overload delete(file_id, request_options: {}) # - # @param params [OpenAI::Models::FileDeleteParams, Hash{Symbol=>Object}] . - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileDeleted] # @@ -127,11 +111,10 @@ def delete(file_id, params = {}) # Returns the contents of the specified file. # - # @param file_id [String] The ID of the file to use for this request. - # - # @param params [OpenAI::Models::FileContentParams, Hash{Symbol=>Object}] . + # @overload content(file_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [StringIO] # diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index fd4eb576..e09a74fe 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -15,67 +15,18 @@ class Jobs # # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) # - # @param params [OpenAI::Models::FineTuning::JobCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Symbol] :model The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - # - # @option params [String] :training_file The ID of an uploaded file that contains training data. - # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. - # - # Your dataset must be formatted as a JSONL file. Additionally, you must upload - # your file with the purpose `fine-tune`. - # - # The contents of the file should differ depending on if the model uses the - # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), - # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) - # format, or if the fine-tuning method uses the - # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) - # format. - # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. - # - # @option params [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] :hyperparameters The hyperparameters used for the fine-tuning job. This value is now deprecated - # in favor of `method`, and should be passed in under the `method` parameter. - # - # @option params [Array, nil] :integrations A list of integrations to enable for your fine-tuning job. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [OpenAI::Models::FineTuning::JobCreateParams::Method] :method_ The method used for fine-tuning. - # - # @option params [Integer, nil] :seed The seed controls the reproducibility of the job. Passing in the same seed and - # job parameters should produce the same results, but may differ in rare cases. If - # a seed is not specified, one will be generated for you. - # - # @option params [String, nil] :suffix A string of up to 64 characters that will be added to your fine-tuned model - # name. - # - # For example, a `suffix` of "custom-model-name" would produce a model name like - # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. - # - # @option params [String, nil] :validation_file The ID of an uploaded file that contains validation data. - # - # If you provide this file, the data is used to generate validation metrics - # periodically during fine-tuning. These metrics can be viewed in the fine-tuning - # results file. The same data should not be present in both train and validation - # files. - # - # Your dataset must be formatted as a JSONL file. You must upload your file with - # the purpose `fine-tune`. - # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) + # + # @param model [String, Symbol] + # @param training_file [String] + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] + # @param integrations [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] + # @param seed [Integer, nil] + # @param suffix [String, nil] + # @param validation_file [String, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::FineTuningJob] # @@ -95,11 +46,10 @@ def create(params) # # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job. - # - # @param params [OpenAI::Models::FineTuning::JobRetrieveParams, Hash{Symbol=>Object}] . + # @overload retrieve(fine_tuning_job_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param fine_tuning_job_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::FineTuningJob] # @@ -115,16 +65,12 @@ def retrieve(fine_tuning_job_id, params = {}) # List your organization's fine-tuning jobs # - # @param params [OpenAI::Models::FineTuning::JobListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after Identifier for the last job from the previous pagination request. + # @overload list(after: nil, limit: nil, metadata: nil, request_options: {}) # - # @option params [Integer] :limit Number of fine-tuning jobs to retrieve. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Optional metadata filter. To filter, use the syntax `metadata[k]=v`. - # Alternatively, set `metadata=null` to indicate no metadata. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param after [String] + # @param limit [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -143,11 +89,10 @@ def list(params = {}) # Immediately cancel a fine-tune job. # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job to cancel. - # - # @param params [OpenAI::Models::FineTuning::JobCancelParams, Hash{Symbol=>Object}] . + # @overload cancel(fine_tuning_job_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param fine_tuning_job_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::FineTuningJob] # @@ -163,15 +108,12 @@ def cancel(fine_tuning_job_id, params = {}) # Get status updates for a fine-tuning job. # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get events for. - # - # @param params [OpenAI::Models::FineTuning::JobListEventsParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after Identifier for the last event from the previous pagination request. - # - # @option params [Integer] :limit Number of events to retrieve. + # @overload list_events(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param fine_tuning_job_id [String] + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index de62c140..0aa37907 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -7,15 +7,12 @@ class Jobs class Checkpoints # List checkpoints for a fine-tuning job. # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get checkpoints for. + # @overload list(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) # - # @param params [OpenAI::Models::FineTuning::Jobs::CheckpointListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after Identifier for the last checkpoint ID from the previous pagination request. - # - # @option params [Integer] :limit Number of checkpoints to retrieve. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param fine_tuning_job_id [String] + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 0464b4d4..ba4e4984 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -5,29 +5,15 @@ module Resources class Images # Creates a variation of a given image. # - # @param params [OpenAI::Models::ImageCreateVariationParams, Hash{Symbol=>Object}] . + # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @option params [IO, StringIO] :image The image to use as the basis for the variation(s). Must be a valid PNG file, - # less than 4MB, and square. - # - # @option params [String, Symbol, OpenAI::Models::ImageModel, nil] :model The model to use for image generation. Only `dall-e-2` is supported at this - # time. - # - # @option params [Integer, nil] :n The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. - # - # @option params [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] :response_format The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. - # - # @option params [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] :size The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param image [IO, StringIO] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ImagesResponse] # @@ -46,35 +32,17 @@ def create_variation(params) # Creates an edited or extended image given an original image and a prompt. # - # @param params [OpenAI::Models::ImageEditParams, Hash{Symbol=>Object}] . - # - # @option params [IO, StringIO] :image The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. - # - # @option params [String] :prompt A text description of the desired image(s). The maximum length is 1000 - # characters. - # - # @option params [IO, StringIO] :mask An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than - # 4MB, and have the same dimensions as `image`. - # - # @option params [String, Symbol, OpenAI::Models::ImageModel, nil] :model The model to use for image generation. Only `dall-e-2` is supported at this - # time. - # - # @option params [Integer, nil] :n The number of images to generate. Must be between 1 and 10. - # - # @option params [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] :response_format The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @option params [Symbol, OpenAI::Models::ImageEditParams::Size, nil] :size The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param image [IO, StringIO] + # @param prompt [String] + # @param mask [IO, StringIO] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ImagesResponse] # @@ -93,38 +61,17 @@ def edit(params) # Creates an image given a prompt. # - # @param params [OpenAI::Models::ImageGenerateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :prompt A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. - # - # @option params [String, Symbol, OpenAI::Models::ImageModel, nil] :model The model to use for image generation. - # - # @option params [Integer, nil] :n The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. - # - # @option params [Symbol, OpenAI::Models::ImageGenerateParams::Quality] :quality The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. - # - # @option params [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] :response_format The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. - # - # @option params [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] :size The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. - # - # @option params [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] :style The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload generate(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # + # @param prompt [String] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ImagesResponse] # diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index 044d32d7..c00abfbc 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -6,11 +6,10 @@ class Models # Retrieves a model instance, providing basic information about the model such as # the owner and permissioning. # - # @param model [String] The ID of the model to use for this request + # @overload retrieve(model, request_options: {}) # - # @param params [OpenAI::Models::ModelRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param model [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Model] # @@ -27,9 +26,9 @@ def retrieve(model, params = {}) # Lists the currently available models, and provides basic information about each # one such as the owner and availability. # - # @param params [OpenAI::Models::ModelListParams, Hash{Symbol=>Object}] . + # @overload list(request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Page] # @@ -47,11 +46,10 @@ def list(params = {}) # Delete a fine-tuned model. You must have the Owner role in your organization to # delete a model. # - # @param model [String] The model to delete - # - # @param params [OpenAI::Models::ModelDeleteParams, Hash{Symbol=>Object}] . + # @overload delete(model, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param model [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ModelDeleted] # diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index aa98d908..802ccbd1 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -6,17 +6,11 @@ class Moderations # Classifies if text and/or image inputs are potentially harmful. Learn more in # the [moderation guide](https://platform.openai.com/docs/guides/moderation). # - # @param params [OpenAI::Models::ModerationCreateParams, Hash{Symbol=>Object}] . + # @overload create(input:, model: nil, request_options: {}) # - # @option params [String, Array, Array] :input Input (or inputs) to classify. Can be a single string, an array of strings, or - # an array of multi-modal input objects similar to other models. - # - # @option params [String, Symbol, OpenAI::Models::ModerationModel] :model The content moderation model you would like to use. Learn more in - # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and - # learn about available models - # [here](https://platform.openai.com/docs/models#moderation). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param input [String, Array, Array] + # @param model [String, Symbol, OpenAI::Models::ModerationModel] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ModerationCreateResponse] # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 7bd97e6e..7b1561d9 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -18,113 +18,26 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @param params [OpenAI::Models::Responses::ResponseCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Array] :input Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - # - # @option params [Array, nil] :include Specify additional output data to include in the model response. Currently - # supported values are: - # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. - # - # @option params [String, nil] :instructions Inserts a system (or developer) message as the first item in the model's - # context. - # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. - # - # @option params [Integer, nil] :max_output_tokens An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [Boolean, nil] :parallel_tool_calls Whether to allow the model to run tool calls in parallel. - # - # @option params [String, nil] :previous_response_id The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). - # - # @option params [OpenAI::Models::Reasoning, nil] :reasoning **o-series models only** - # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Boolean, nil] :store Whether to store the generated model response for later retrieval via API. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. - # - # @option params [OpenAI::Models::Responses::ResponseTextConfig] :text Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - # - # @option params [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] :tool_choice How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. - # - # @option params [Array] :tools An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. - # - # The two categories of tools you can provide the model are: - # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or `temperature` but not both. - # - # @option params [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] :truncation The truncation strategy to use for the model response. - # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # + # @param input [String, Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param include [Array, nil] + # @param instructions [String, nil] + # @param max_output_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param parallel_tool_calls [Boolean, nil] + # @param previous_response_id [String, nil] + # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param store [Boolean, nil] + # @param temperature [Float, nil] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @param tools [Array] + # @param top_p [Float, nil] + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] # @@ -156,113 +69,26 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @param params [OpenAI::Models::Responses::ResponseCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String, Array] :input Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - # - # @option params [String, Symbol, OpenAI::Models::ChatModel] :model Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - # - # @option params [Array, nil] :include Specify additional output data to include in the model response. Currently - # supported values are: - # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. - # - # @option params [String, nil] :instructions Inserts a system (or developer) message as the first item in the model's - # context. - # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. - # - # @option params [Integer, nil] :max_output_tokens An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [Boolean, nil] :parallel_tool_calls Whether to allow the model to run tool calls in parallel. - # - # @option params [String, nil] :previous_response_id The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). - # - # @option params [OpenAI::Models::Reasoning, nil] :reasoning **o-series models only** - # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - # - # @option params [Boolean, nil] :store Whether to store the generated model response for later retrieval via API. - # - # @option params [Float, nil] :temperature What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. - # - # @option params [OpenAI::Models::Responses::ResponseTextConfig] :text Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - # - # @option params [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] :tool_choice How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. - # - # @option params [Array] :tools An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. - # - # The two categories of tools you can provide the model are: - # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). - # - # @option params [Float, nil] :top_p An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. - # - # We generally recommend altering this or `temperature` but not both. - # - # @option params [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] :truncation The truncation strategy to use for the model response. - # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. - # - # @option params [String] :user A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload create_streaming(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # + # @param input [String, Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param include [Array, nil] + # @param instructions [String, nil] + # @param max_output_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param parallel_tool_calls [Boolean, nil] + # @param previous_response_id [String, nil] + # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param store [Boolean, nil] + # @param temperature [Float, nil] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @param tools [Array] + # @param top_p [Float, nil] + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Stream] # @@ -287,14 +113,11 @@ def create_streaming(params) # Retrieves a model response with the given ID. # - # @param response_id [String] The ID of the response to retrieve. + # @overload retrieve(response_id, include: nil, request_options: {}) # - # @param params [OpenAI::Models::Responses::ResponseRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [Array] :include Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param response_id [String] + # @param include [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] # @@ -312,11 +135,10 @@ def retrieve(response_id, params = {}) # Deletes a model response with the given ID. # - # @param response_id [String] The ID of the response to delete. - # - # @param params [OpenAI::Models::Responses::ResponseDeleteParams, Hash{Symbol=>Object}] . + # @overload delete(response_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param response_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [nil] # diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 15d5fd19..42b9fb43 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -6,26 +6,15 @@ class Responses class InputItems # Returns a list of input items for a given response. # - # @param response_id [String] The ID of the response to retrieve input items for. - # - # @param params [OpenAI::Models::Responses::InputItemListParams, Hash{Symbol=>Object}] . - # - # @option params [String] :after An item ID to list items after, used in pagination. - # - # @option params [String] :before An item ID to list items before, used in pagination. - # - # @option params [Array] :include Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. - # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] :order The order to return the input items in. Default is `asc`. - # - # - `asc`: Return the input items in ascending order. - # - `desc`: Return the input items in descending order. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @overload list(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # + # @param response_id [String] + # @param after [String] + # @param before [String] + # @param include [Array] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 490e742a..1f4fc2bf 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -26,23 +26,13 @@ class Uploads # the documentation on # [creating a File](https://platform.openai.com/docs/api-reference/files/create). # - # @param params [OpenAI::Models::UploadCreateParams, Hash{Symbol=>Object}] . + # @overload create(bytes:, filename:, mime_type:, purpose:, request_options: {}) # - # @option params [Integer] :bytes The number of bytes in the file you are uploading. - # - # @option params [String] :filename The name of the file to upload. - # - # @option params [String] :mime_type The MIME type of the file. - # - # This must fall within the supported MIME types for your file purpose. See the - # supported MIME types for assistants and vision. - # - # @option params [Symbol, OpenAI::Models::FilePurpose] :purpose The intended purpose of the uploaded file. - # - # See the - # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param bytes [Integer] + # @param filename [String] + # @param mime_type [String] + # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] # @@ -60,11 +50,10 @@ def create(params) # Cancels the Upload. No Parts may be added after an Upload is cancelled. # - # @param upload_id [String] The ID of the Upload. - # - # @param params [OpenAI::Models::UploadCancelParams, Hash{Symbol=>Object}] . + # @overload cancel(upload_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param upload_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] # @@ -92,16 +81,12 @@ def cancel(upload_id, params = {}) # initially specified when creating the Upload object. No Parts may be added after # an Upload is completed. # - # @param upload_id [String] The ID of the Upload. - # - # @param params [OpenAI::Models::UploadCompleteParams, Hash{Symbol=>Object}] . - # - # @option params [Array] :part_ids The ordered list of Part IDs. - # - # @option params [String] :md5 The optional md5 checksum for the file contents to verify if the bytes uploaded - # matches what you expect. + # @overload complete(upload_id, part_ids:, md5: nil, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param upload_id [String] + # @param part_ids [Array] + # @param md5 [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] # diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index bc06cf70..11eaeb11 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -16,13 +16,11 @@ class Parts # order of the Parts when you # [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). # - # @param upload_id [String] The ID of the Upload. + # @overload create(upload_id, data:, request_options: {}) # - # @param params [OpenAI::Models::Uploads::PartCreateParams, Hash{Symbol=>Object}] . - # - # @option params [IO, StringIO] :data The chunk of bytes for this Part. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param upload_id [String] + # @param data [IO, StringIO] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Uploads::UploadPart] # diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 644fbec4..beb5fa09 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -11,27 +11,14 @@ class VectorStores # Create a vector store. # - # @param params [OpenAI::Models::VectorStoreCreateParams, Hash{Symbol=>Object}] . + # @overload create(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # - # @option params [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] :chunking_strategy The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. - # - # @option params [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] :expires_after The expiration policy for a vector store. - # - # @option params [Array] :file_ids A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String] :name The name of the vector store. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStore] # @@ -49,11 +36,10 @@ def create(params = {}) # Retrieves a vector store. # - # @param vector_store_id [String] The ID of the vector store to retrieve. + # @overload retrieve(vector_store_id, request_options: {}) # - # @param params [OpenAI::Models::VectorStoreRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStore] # @@ -69,22 +55,13 @@ def retrieve(vector_store_id, params = {}) # Modifies a vector store. # - # @param vector_store_id [String] The ID of the vector store to modify. - # - # @param params [OpenAI::Models::VectorStoreUpdateParams, Hash{Symbol=>Object}] . - # - # @option params [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] :expires_after The expiration policy for a vector store. - # - # @option params [Hash{Symbol=>String}, nil] :metadata Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @option params [String, nil] :name The name of the vector store. + # @overload update(vector_store_id, expires_after: nil, metadata: nil, name: nil, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStore] # @@ -102,25 +79,13 @@ def update(vector_store_id, params = {}) # Returns a list of vector stores. # - # @param params [OpenAI::Models::VectorStoreListParams, Hash{Symbol=>Object}] . + # @overload list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. - # - # @option params [String] :before A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. - # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::VectorStoreListParams::Order] :order Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -139,11 +104,10 @@ def list(params = {}) # Delete a vector store. # - # @param vector_store_id [String] The ID of the vector store to delete. - # - # @param params [OpenAI::Models::VectorStoreDeleteParams, Hash{Symbol=>Object}] . + # @overload delete(vector_store_id, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStoreDeleted] # @@ -160,22 +124,15 @@ def delete(vector_store_id, params = {}) # Search a vector store for relevant chunks based on a query and file attributes # filter. # - # @param vector_store_id [String] The ID of the vector store to search. - # - # @param params [OpenAI::Models::VectorStoreSearchParams, Hash{Symbol=>Object}] . - # - # @option params [String, Array] :query A query string for a search - # - # @option params [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] :filters A filter to apply based on file attributes. - # - # @option params [Integer] :max_num_results The maximum number of results to return. This number should be between 1 and 50 - # inclusive. - # - # @option params [OpenAI::Models::VectorStoreSearchParams::RankingOptions] :ranking_options Ranking options for search. - # - # @option params [Boolean] :rewrite_query Whether to rewrite the natural language query for vector search. + # @overload search(vector_store_id, query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param query [String, Array] + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] + # @param rewrite_query [Boolean] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Page] # diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index e2b1e6ac..67f78837 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -6,24 +6,13 @@ class VectorStores class FileBatches # Create a vector store file batch. # - # @param vector_store_id [String] The ID of the vector store for which to create a File Batch. + # @overload create(vector_store_id, file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) # - # @param params [OpenAI::Models::VectorStores::FileBatchCreateParams, Hash{Symbol=>Object}] . - # - # @option params [Array] :file_ids A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. - # - # @option params [Hash{Symbol=>String, Float, Boolean}, nil] :attributes Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. - # - # @option params [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] :chunking_strategy The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param file_ids [Array] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # @@ -41,13 +30,11 @@ def create(vector_store_id, params) # Retrieves a vector store file batch. # - # @param batch_id [String] The ID of the file batch being retrieved. + # @overload retrieve(batch_id, vector_store_id:, request_options: {}) # - # @param params [OpenAI::Models::VectorStores::FileBatchRetrieveParams, Hash{Symbol=>Object}] . - # - # @option params [String] :vector_store_id The ID of the vector store that the file batch belongs to. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param batch_id [String] + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # @@ -69,13 +56,11 @@ def retrieve(batch_id, params) # Cancel a vector store file batch. This attempts to cancel the processing of # files in this batch as soon as possible. # - # @param batch_id [String] The ID of the file batch to cancel. - # - # @param params [OpenAI::Models::VectorStores::FileBatchCancelParams, Hash{Symbol=>Object}] . - # - # @option params [String] :vector_store_id The ID of the vector store that the file batch belongs to. + # @overload cancel(batch_id, vector_store_id:, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param batch_id [String] + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # @@ -96,32 +81,16 @@ def cancel(batch_id, params) # Returns a list of vector store files in a batch. # - # @param batch_id [String] Path param: The ID of the file batch that the files belong to. - # - # @param params [OpenAI::Models::VectorStores::FileBatchListFilesParams, Hash{Symbol=>Object}] . - # - # @option params [String] :vector_store_id Path param: The ID of the vector store that the files belong to. - # - # @option params [String] :after Query param: A cursor for use in pagination. `after` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, ending with obj_foo, your subsequent call can include - # after=obj_foo in order to fetch the next page of the list. - # - # @option params [String] :before Query param: A cursor for use in pagination. `before` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, starting with obj_foo, your subsequent call can include - # before=obj_foo in order to fetch the previous page of the list. - # - # @option params [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] :filter Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, - # `cancelled`. - # - # @option params [Integer] :limit Query param: A limit on the number of objects to be returned. Limit can range - # between 1 and 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] :order Query param: Sort order by the `created_at` timestamp of the objects. `asc` for - # ascending order and `desc` for descending order. + # @overload list_files(batch_id, vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param batch_id [String] + # @param vector_store_id [String] + # @param after [String] + # @param before [String] + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 6d94283d..b296ddcd 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -8,24 +8,13 @@ class Files # [File](https://platform.openai.com/docs/api-reference/files) to a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). # - # @param vector_store_id [String] The ID of the vector store for which to create a File. + # @overload create(vector_store_id, file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # - # @param params [OpenAI::Models::VectorStores::FileCreateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :file_id A [File](https://platform.openai.com/docs/api-reference/files) ID that the - # vector store should use. Useful for tools like `file_search` that can access - # files. - # - # @option params [Hash{Symbol=>String, Float, Boolean}, nil] :attributes Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. - # - # @option params [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] :chunking_strategy The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param file_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFile] # @@ -43,13 +32,11 @@ def create(vector_store_id, params) # Retrieves a vector store file. # - # @param file_id [String] The ID of the file being retrieved. - # - # @param params [OpenAI::Models::VectorStores::FileRetrieveParams, Hash{Symbol=>Object}] . + # @overload retrieve(file_id, vector_store_id:, request_options: {}) # - # @option params [String] :vector_store_id The ID of the vector store that the file belongs to. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFile] # @@ -70,19 +57,12 @@ def retrieve(file_id, params) # Update attributes on a vector store file. # - # @param file_id [String] Path param: The ID of the file to update attributes. - # - # @param params [OpenAI::Models::VectorStores::FileUpdateParams, Hash{Symbol=>Object}] . - # - # @option params [String] :vector_store_id Path param: The ID of the vector store the file belongs to. + # @overload update(file_id, vector_store_id:, attributes:, request_options: {}) # - # @option params [Hash{Symbol=>String, Float, Boolean}, nil] :attributes Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. Keys are - # strings with a maximum length of 64 characters. Values are strings with a - # maximum length of 512 characters, booleans, or numbers. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param vector_store_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFile] # @@ -104,29 +84,15 @@ def update(file_id, params) # Returns a list of vector store files. # - # @param vector_store_id [String] The ID of the vector store that the files belong to. - # - # @param params [OpenAI::Models::VectorStores::FileListParams, Hash{Symbol=>Object}] . + # @overload list(vector_store_id, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # - # @option params [String] :after A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. - # - # @option params [String] :before A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. - # - # @option params [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] :filter Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - # - # @option params [Integer] :limit A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. - # - # @option params [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] :order Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param vector_store_id [String] + # @param after [String] + # @param before [String] + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::CursorPage] # @@ -148,13 +114,11 @@ def list(vector_store_id, params = {}) # [delete file](https://platform.openai.com/docs/api-reference/files/delete) # endpoint. # - # @param file_id [String] The ID of the file to delete. + # @overload delete(file_id, vector_store_id:, request_options: {}) # - # @param params [OpenAI::Models::VectorStores::FileDeleteParams, Hash{Symbol=>Object}] . - # - # @option params [String] :vector_store_id The ID of the vector store that the file belongs to. - # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] # @@ -175,13 +139,11 @@ def delete(file_id, params) # Retrieve the parsed contents of a vector store file. # - # @param file_id [String] The ID of the file within the vector store. - # - # @param params [OpenAI::Models::VectorStores::FileContentParams, Hash{Symbol=>Object}] . - # - # @option params [String] :vector_store_id The ID of the vector store. + # @overload content(file_id, vector_store_id:, request_options: {}) # - # @option params [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :request_options + # @param file_id [String] + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Page] # From 4d5b0a712dc99a9c6ff1cabc18c911c8181e896b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 17:46:59 +0000 Subject: [PATCH 079/295] feat: link response models to their methods in yard doc (#81) --- lib/openai/models/audio/transcription_create_response.rb | 4 ++++ lib/openai/models/audio/translation_create_response.rb | 1 + lib/openai/models/batch.rb | 1 + lib/openai/models/beta/assistant.rb | 1 + lib/openai/models/beta/assistant_deleted.rb | 1 + lib/openai/models/beta/thread.rb | 1 + lib/openai/models/beta/thread_deleted.rb | 1 + lib/openai/models/beta/threads/message.rb | 1 + lib/openai/models/beta/threads/message_deleted.rb | 1 + lib/openai/models/beta/threads/run.rb | 3 +++ lib/openai/models/beta/threads/runs/run_step.rb | 1 + lib/openai/models/chat/chat_completion.rb | 3 +++ lib/openai/models/chat/chat_completion_deleted.rb | 1 + lib/openai/models/completion.rb | 3 +++ lib/openai/models/create_embedding_response.rb | 1 + lib/openai/models/file_deleted.rb | 1 + lib/openai/models/file_object.rb | 1 + lib/openai/models/fine_tuning/fine_tuning_job.rb | 1 + lib/openai/models/fine_tuning/fine_tuning_job_event.rb | 1 + .../models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb | 1 + lib/openai/models/images_response.rb | 1 + lib/openai/models/model.rb | 1 + lib/openai/models/model_deleted.rb | 1 + lib/openai/models/moderation_create_response.rb | 1 + lib/openai/models/responses/response.rb | 3 +++ lib/openai/models/upload.rb | 1 + lib/openai/models/uploads/upload_part.rb | 1 + lib/openai/models/vector_store.rb | 1 + lib/openai/models/vector_store_deleted.rb | 1 + lib/openai/models/vector_store_search_response.rb | 1 + lib/openai/models/vector_stores/file_content_response.rb | 1 + lib/openai/models/vector_stores/vector_store_file.rb | 1 + lib/openai/models/vector_stores/vector_store_file_batch.rb | 1 + lib/openai/models/vector_stores/vector_store_file_deleted.rb | 1 + 34 files changed, 45 insertions(+) diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index fe9d6a25..be3c624c 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -5,6 +5,10 @@ module Models module Audio # Represents a transcription response returned by model, based on the provided # input. + # + # @see OpenAI::Resources::Audio::Transcriptions#create + # + # @see OpenAI::Resources::Audio::Transcriptions#create_streaming module TranscriptionCreateResponse extend OpenAI::Union diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 2511f059..3beab227 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Audio + # @see OpenAI::Resources::Audio::Translations#create module TranslationCreateResponse extend OpenAI::Union diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index e41efffb..5d59baff 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Batches#create class Batch < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index 5b56453e..9adc23a7 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#create class Assistant < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/assistant_deleted.rb b/lib/openai/models/beta/assistant_deleted.rb index 1e02cf88..4793b0ae 100644 --- a/lib/openai/models/beta/assistant_deleted.rb +++ b/lib/openai/models/beta/assistant_deleted.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Assistants#delete class AssistantDeleted < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 737cbf81..c651e7c2 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#create class Thread < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/thread_deleted.rb b/lib/openai/models/beta/thread_deleted.rb index f27336b9..35289d1b 100644 --- a/lib/openai/models/beta/thread_deleted.rb +++ b/lib/openai/models/beta/thread_deleted.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Beta + # @see OpenAI::Resources::Beta::Threads#delete class ThreadDeleted < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 3d07bbd7..ded2bdd4 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#create class Message < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/threads/message_deleted.rb b/lib/openai/models/beta/threads/message_deleted.rb index a3329fce..488ddb42 100644 --- a/lib/openai/models/beta/threads/message_deleted.rb +++ b/lib/openai/models/beta/threads/message_deleted.rb @@ -4,6 +4,7 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Messages#delete class MessageDeleted < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index f104ddeb..66e2e6a8 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -4,6 +4,9 @@ module OpenAI module Models module Beta module Threads + # @see OpenAI::Resources::Beta::Threads::Runs#create + # + # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming class Run < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 2c9449fe..21084d23 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -5,6 +5,7 @@ module Models module Beta module Threads module Runs + # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve class RunStep < OpenAI::BaseModel # @!attribute id # The identifier of the run step, which can be referenced in API endpoints. diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 2cdec571..2dcdfd05 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -3,6 +3,9 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#create + # + # @see OpenAI::Resources::Chat::Completions#create_streaming class ChatCompletion < OpenAI::BaseModel # @!attribute id # A unique identifier for the chat completion. diff --git a/lib/openai/models/chat/chat_completion_deleted.rb b/lib/openai/models/chat/chat_completion_deleted.rb index 22274c71..844e2cab 100644 --- a/lib/openai/models/chat/chat_completion_deleted.rb +++ b/lib/openai/models/chat/chat_completion_deleted.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Chat + # @see OpenAI::Resources::Chat::Completions#delete class ChatCompletionDeleted < OpenAI::BaseModel # @!attribute id # The ID of the chat completion that was deleted. diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 751e0dc8..8e3ad52a 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -2,6 +2,9 @@ module OpenAI module Models + # @see OpenAI::Resources::Completions#create + # + # @see OpenAI::Resources::Completions#create_streaming class Completion < OpenAI::BaseModel # @!attribute id # A unique identifier for the completion. diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index 2679c15d..3fbd7bc4 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Embeddings#create class CreateEmbeddingResponse < OpenAI::BaseModel # @!attribute data # The list of embeddings generated by the model. diff --git a/lib/openai/models/file_deleted.rb b/lib/openai/models/file_deleted.rb index 9eb014a2..c9841f7e 100644 --- a/lib/openai/models/file_deleted.rb +++ b/lib/openai/models/file_deleted.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#delete class FileDeleted < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index e85f9017..1b6ca589 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Files#create class FileObject < OpenAI::BaseModel # @!attribute id # The file identifier, which can be referenced in the API endpoints. diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 07a3dd60..cce986bc 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#create class FineTuningJob < OpenAI::BaseModel # @!attribute id # The object identifier, which can be referenced in the API endpoints. diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index c6bd2025..60995c3b 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -3,6 +3,7 @@ module OpenAI module Models module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#list_events class FineTuningJobEvent < OpenAI::BaseModel # @!attribute id # The object identifier. diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 0390e0c6..75874793 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -4,6 +4,7 @@ module OpenAI module Models module FineTuning module Jobs + # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list class FineTuningJobCheckpoint < OpenAI::BaseModel # @!attribute id # The checkpoint identifier, which can be referenced in the API endpoints. diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index 3f3f3b00..fb015bb2 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Images#create_variation class ImagesResponse < OpenAI::BaseModel # @!attribute created # diff --git a/lib/openai/models/model.rb b/lib/openai/models/model.rb index 1827ed7b..3e17e2d3 100644 --- a/lib/openai/models/model.rb +++ b/lib/openai/models/model.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Models#retrieve class Model < OpenAI::BaseModel # @!attribute id # The model identifier, which can be referenced in the API endpoints. diff --git a/lib/openai/models/model_deleted.rb b/lib/openai/models/model_deleted.rb index 72d0b807..f269390a 100644 --- a/lib/openai/models/model_deleted.rb +++ b/lib/openai/models/model_deleted.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Models#delete class ModelDeleted < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 2e9e3828..c06f6c57 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Moderations#create class ModerationCreateResponse < OpenAI::BaseModel # @!attribute id # The unique identifier for the moderation request. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 9f71b7aa..280ce639 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -3,6 +3,9 @@ module OpenAI module Models module Responses + # @see OpenAI::Resources::Responses#create + # + # @see OpenAI::Resources::Responses#create_streaming class Response < OpenAI::BaseModel # @!attribute id # Unique identifier for this Response. diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index a1652e79..a905f9ff 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::Uploads#create class Upload < OpenAI::BaseModel # @!attribute id # The Upload unique identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/uploads/upload_part.rb b/lib/openai/models/uploads/upload_part.rb index 8edf74ce..e0566eb9 100644 --- a/lib/openai/models/uploads/upload_part.rb +++ b/lib/openai/models/uploads/upload_part.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Uploads + # @see OpenAI::Resources::Uploads::Parts#create class UploadPart < OpenAI::BaseModel # @!attribute id # The upload Part unique identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 8832fca7..29391d3a 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#create class VectorStore < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/vector_store_deleted.rb b/lib/openai/models/vector_store_deleted.rb index 018837f0..84da9461 100644 --- a/lib/openai/models/vector_store_deleted.rb +++ b/lib/openai/models/vector_store_deleted.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#delete class VectorStoreDeleted < OpenAI::BaseModel # @!attribute id # diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index c4914147..71b724a2 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @see OpenAI::Resources::VectorStores#search class VectorStoreSearchResponse < OpenAI::BaseModel # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index a2450a19..6f667726 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#content class FileContentResponse < OpenAI::BaseModel # @!attribute [r] text # The text content diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index a1a631ed..7c64487b 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#create class VectorStoreFile < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index dfc5192d..b08f672c 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::FileBatches#create class VectorStoreFileBatch < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/vector_stores/vector_store_file_deleted.rb b/lib/openai/models/vector_stores/vector_store_file_deleted.rb index 9a226ad7..26fa7214 100644 --- a/lib/openai/models/vector_stores/vector_store_file_deleted.rb +++ b/lib/openai/models/vector_stores/vector_store_file_deleted.rb @@ -3,6 +3,7 @@ module OpenAI module Models module VectorStores + # @see OpenAI::Resources::VectorStores::Files#delete class VectorStoreFileDeleted < OpenAI::BaseModel # @!attribute id # From 30dbf7cda5257092fe289b33321d8e73979d97e0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 22:45:56 +0000 Subject: [PATCH 080/295] chore: relax sorbet enum parameters to allow `String` in addition to `Symbol` (#82) --- .../models/audio/speech_create_params.rb | 4 +- .../models/beta/assistant_update_params.rb | 6 +-- .../chat/chat_completion_audio_param.rb | 4 +- lib/openai/models/completion_create_params.rb | 4 +- .../models/fine_tuning/job_create_params.rb | 4 +- lib/openai/models/responses/response.rb | 4 +- .../responses/response_create_params.rb | 4 +- lib/openai/resources/audio/speech.rb | 2 +- lib/openai/resources/beta/assistants.rb | 2 +- lib/openai/resources/completions.rb | 4 +- lib/openai/resources/fine_tuning/jobs.rb | 2 +- lib/openai/resources/responses.rb | 4 +- rbi/lib/openai/models/all_models.rbi | 2 +- .../models/audio/speech_create_params.rbi | 5 +- rbi/lib/openai/models/audio/speech_model.rbi | 2 +- .../audio/transcription_create_params.rbi | 8 ++- .../models/audio/transcription_include.rbi | 3 +- .../audio/translation_create_params.rbi | 2 +- rbi/lib/openai/models/audio_model.rbi | 2 +- .../openai/models/audio_response_format.rbi | 2 +- rbi/lib/openai/models/batch.rbi | 2 +- rbi/lib/openai/models/batch_create_params.rbi | 5 +- .../models/beta/assistant_list_params.rbi | 3 +- .../models/beta/assistant_tool_choice.rbi | 3 +- .../beta/assistant_tool_choice_option.rbi | 2 +- .../models/beta/assistant_update_params.rbi | 2 +- .../openai/models/beta/file_search_tool.rbi | 8 ++- .../beta/thread_create_and_run_params.rbi | 10 +++- .../models/beta/thread_create_params.rbi | 2 +- .../openai/models/beta/threads/image_file.rbi | 3 +- .../models/beta/threads/image_file_delta.rbi | 2 +- .../openai/models/beta/threads/image_url.rbi | 3 +- .../models/beta/threads/image_url_delta.rbi | 2 +- .../openai/models/beta/threads/message.rbi | 8 +-- .../beta/threads/message_create_params.rbi | 2 +- .../models/beta/threads/message_delta.rbi | 2 +- .../beta/threads/message_list_params.rbi | 2 +- rbi/lib/openai/models/beta/threads/run.rbi | 6 +-- .../models/beta/threads/run_create_params.rbi | 16 +++++- .../models/beta/threads/run_list_params.rbi | 2 +- .../openai/models/beta/threads/run_status.rbi | 2 +- .../threads/runs/file_search_tool_call.rbi | 2 + .../models/beta/threads/runs/run_step.rbi | 6 +-- .../beta/threads/runs/run_step_include.rbi | 2 +- .../beta/threads/runs/step_list_params.rbi | 2 +- .../openai/models/chat/chat_completion.rbi | 4 +- .../chat/chat_completion_audio_param.rbi | 4 +- .../models/chat/chat_completion_chunk.rbi | 14 ++++-- .../chat_completion_content_part_image.rbi | 8 ++- ...at_completion_content_part_input_audio.rbi | 8 ++- .../models/chat/chat_completion_modality.rbi | 3 +- .../models/chat/chat_completion_role.rbi | 2 +- .../chat_completion_tool_choice_option.rbi | 2 +- .../models/chat/completion_create_params.rbi | 13 +++-- .../models/chat/completion_list_params.rbi | 2 +- .../chat/completions/message_list_params.rbi | 2 +- rbi/lib/openai/models/chat_model.rbi | 2 +- rbi/lib/openai/models/comparison_filter.rbi | 2 +- rbi/lib/openai/models/completion_choice.rbi | 3 +- .../models/completion_create_params.rbi | 3 +- rbi/lib/openai/models/compound_filter.rbi | 2 +- .../openai/models/embedding_create_params.rbi | 2 +- rbi/lib/openai/models/embedding_model.rbi | 2 +- rbi/lib/openai/models/file_list_params.rbi | 2 +- rbi/lib/openai/models/file_object.rbi | 4 +- rbi/lib/openai/models/file_purpose.rbi | 2 +- .../models/fine_tuning/fine_tuning_job.rbi | 4 +- .../fine_tuning/fine_tuning_job_event.rbi | 4 +- .../models/fine_tuning/job_create_params.rbi | 4 +- .../models/image_create_variation_params.rbi | 5 +- rbi/lib/openai/models/image_edit_params.rbi | 5 +- .../openai/models/image_generate_params.rbi | 11 ++-- rbi/lib/openai/models/image_model.rbi | 2 +- rbi/lib/openai/models/moderation.rbi | 50 ++++++++++++++----- rbi/lib/openai/models/moderation_model.rbi | 2 +- rbi/lib/openai/models/reasoning.rbi | 3 +- rbi/lib/openai/models/reasoning_effort.rbi | 2 +- .../openai/models/responses/computer_tool.rbi | 2 +- .../models/responses/easy_input_message.rbi | 4 +- .../models/responses/file_search_tool.rbi | 2 +- .../responses/input_item_list_params.rbi | 2 +- rbi/lib/openai/models/responses/response.rbi | 5 +- .../response_code_interpreter_tool_call.rbi | 2 +- .../responses/response_computer_tool_call.rbi | 12 +++-- ...esponse_computer_tool_call_output_item.rbi | 2 +- .../responses/response_create_params.rbi | 2 +- .../models/responses/response_error.rbi | 3 +- .../response_file_search_tool_call.rbi | 2 +- .../responses/response_function_tool_call.rbi | 2 +- ...esponse_function_tool_call_output_item.rbi | 2 +- .../response_function_web_search.rbi | 2 +- .../models/responses/response_includable.rbi | 3 +- .../models/responses/response_input_audio.rbi | 2 +- .../models/responses/response_input_image.rbi | 2 +- .../models/responses/response_input_item.rbi | 22 ++++++-- .../responses/response_input_message_item.rbi | 6 +-- .../responses/response_output_message.rbi | 2 +- .../responses/response_reasoning_item.rbi | 2 +- .../models/responses/response_status.rbi | 3 +- .../models/responses/tool_choice_options.rbi | 3 +- .../models/responses/tool_choice_types.rbi | 3 +- .../models/responses/web_search_tool.rbi | 5 +- rbi/lib/openai/models/responses_model.rbi | 2 +- rbi/lib/openai/models/upload.rbi | 2 +- rbi/lib/openai/models/vector_store.rbi | 2 +- .../models/vector_store_list_params.rbi | 3 +- .../models/vector_store_search_params.rbi | 2 +- .../models/vector_store_search_response.rbi | 2 +- .../file_batch_list_files_params.rbi | 4 +- .../models/vector_stores/file_list_params.rbi | 4 +- .../vector_stores/vector_store_file.rbi | 4 +- .../vector_stores/vector_store_file_batch.rbi | 2 +- 112 files changed, 298 insertions(+), 173 deletions(-) diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index e7ebf9d7..081c6175 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -28,7 +28,7 @@ class SpeechCreateParams < OpenAI::BaseModel # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # - # @return [String, Symbol] + # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } # @!attribute [r] instructions @@ -67,7 +67,7 @@ class SpeechCreateParams < OpenAI::BaseModel # @!parse # # @param input [String] # # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # # @param voice [String, Symbol] + # # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] # # @param instructions [String] # # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] # # @param speed [Float] diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 3c3bdd37..2c59bf2d 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -40,11 +40,11 @@ class AssistantUpdateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, nil] + # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] optional :model, union: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model } # @!parse - # # @return [String, Symbol] + # # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] # attr_writer :model # @!attribute name @@ -132,7 +132,7 @@ class AssistantUpdateParams < OpenAI::BaseModel # # @param description [String, nil] # # @param instructions [String, nil] # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol] + # # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] # # @param name [String, nil] # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 558fb748..9260b6ff 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -15,7 +15,7 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. # - # @return [String, Symbol] + # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } # @!parse @@ -24,7 +24,7 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # # [Learn more](https://platform.openai.com/docs/guides/audio). # # # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] - # # @param voice [String, Symbol] + # # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] # # # def initialize(format_:, voice:, **) = super diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 18e6bdb2..974ee482 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -17,7 +17,7 @@ class CompletionCreateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol] + # @return [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] required :model, union: -> { OpenAI::Models::CompletionCreateParams::Model } # @!attribute prompt @@ -186,7 +186,7 @@ class CompletionCreateParams < OpenAI::BaseModel # attr_writer :user # @!parse - # # @param model [String, Symbol] + # # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # # @param prompt [String, Array, Array, Array>, nil] # # @param best_of [Integer, nil] # # @param echo [Boolean, nil] diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index f5770f51..7649c07b 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -13,7 +13,7 @@ class JobCreateParams < OpenAI::BaseModel # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # - # @return [String, Symbol] + # @return [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] required :model, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Model } # @!attribute training_file @@ -114,7 +114,7 @@ class JobCreateParams < OpenAI::BaseModel optional :validation_file, String, nil?: true # @!parse - # # @param model [String, Symbol] + # # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] # # @param training_file [String] # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] # # @param integrations [Array, nil] diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 280ce639..f53f3fc5 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -60,7 +60,7 @@ class Response < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute object @@ -225,7 +225,7 @@ class Response < OpenAI::BaseModel # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] # # @param instructions [String, nil] # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel] + # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] # # @param output [Array] # # @param parallel_tool_calls [Boolean] # # @param temperature [Float, nil] diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 41049b5b..29abdd2c 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -32,7 +32,7 @@ class ResponseCreateParams < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute include @@ -201,7 +201,7 @@ class ResponseCreateParams < OpenAI::BaseModel # @!parse # # @param input [String, Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel] + # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] # # @param include [Array, nil] # # @param instructions [String, nil] # # @param max_output_tokens [Integer, nil] diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index 8264c108..6a31ca99 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -10,7 +10,7 @@ class Speech # # @param input [String] # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # @param voice [String, Symbol] + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] # @param instructions [String] # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] # @param speed [Float] diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 08b62ef0..da45a68f 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -62,7 +62,7 @@ def retrieve(assistant_id, params = {}) # @param description [String, nil] # @param instructions [String, nil] # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol] + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] # @param name [String, nil] # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 7a58a20a..f27ec1e4 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -7,7 +7,7 @@ class Completions # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol] + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # @param prompt [String, Array, Array, Array>, nil] # @param best_of [Integer, nil] # @param echo [Boolean, nil] @@ -48,7 +48,7 @@ def create(params) # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol] + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # @param prompt [String, Array, Array, Array>, nil] # @param best_of [Integer, nil] # @param echo [Boolean, nil] diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index e09a74fe..391f59b9 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -17,7 +17,7 @@ class Jobs # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # - # @param model [String, Symbol] + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] # @param training_file [String] # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] # @param integrations [Array, nil] diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 7b1561d9..2e39f489 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -21,7 +21,7 @@ class Responses # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] # @param include [Array, nil] # @param instructions [String, nil] # @param max_output_tokens [Integer, nil] @@ -72,7 +72,7 @@ def create(params) # @overload create_streaming(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] # @param include [Array, nil] # @param instructions [String, nil] # @param max_output_tokens [Integer, nil] diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index f6a5bf8e..3d16cbd5 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -10,7 +10,7 @@ module OpenAI end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AllModels::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AllModels::TaggedSymbol) } O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::TaggedSymbol) O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index cd1f2343..ed9a79de 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -109,7 +109,8 @@ module OpenAI end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) @@ -131,7 +132,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 14159fd5..ed1acea8 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -7,7 +7,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index a4cfde33..f20bc4f6 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -151,7 +151,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol + ) + end WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) SEGMENT = diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index b9eb4918..878b37d7 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -7,7 +7,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 1f264107..e0a7e1ea 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -94,7 +94,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index eb369760..656d0083 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioModel::TaggedSymbol) } WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::TaggedSymbol) GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index dc544cfd..1246abca 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -9,7 +9,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 6e8237c7..f82137fc 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -206,7 +206,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Batch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Batch::Status::TaggedSymbol) } VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index bc8dd159..b5f6c9e7 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -74,7 +74,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) @@ -91,7 +91,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) V1_CHAT_COMPLETIONS = diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 1657931e..57461148 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -77,7 +77,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index eaf93e66..e1475eed 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -43,7 +43,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) CODE_INTERPRETER = diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 0df3bce3..44079027 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -22,7 +22,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 874e84be..bb5f00ab 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -242,7 +242,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) O3_MINI_2025_01_31 = diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 4823e922..61181130 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -131,7 +131,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) + end AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 5d33277e..963ba806 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -501,7 +501,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) ASSISTANT = @@ -1101,7 +1101,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) + end AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 5115f982..e1977394 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -197,7 +197,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 4a97f32e..2ce2e93b 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -36,7 +36,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index f2b98ec3..a0e96760 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -43,7 +43,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 732b0f05..0795a0a5 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -35,7 +35,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 281a3d35..aa4152eb 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -41,7 +41,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 875f4bcc..87967f32 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -293,7 +293,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) @@ -319,7 +319,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) @@ -335,7 +336,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 2045cd26..f2b3ca52 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -136,7 +136,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index a89cb0f3..8f88410b 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -89,7 +89,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 545445aa..c1edc5f6 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -88,7 +88,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 9a5ebeb1..407e6777 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -396,7 +396,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } MAX_COMPLETION_TOKENS = T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) @@ -439,7 +439,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = @@ -559,7 +559,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) LAST_MESSAGES = diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 1fb7e4b3..3e1cea2e 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -449,7 +449,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol + ) + end USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) @@ -622,7 +628,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) + end AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index d6f27922..94b4495a 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -79,7 +79,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index 243f07b9..59df355d 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -11,7 +11,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index ee1a666e..96fb0416 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -150,6 +150,7 @@ module OpenAI T.type_alias do T.any( Symbol, + String, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) end @@ -302,6 +303,7 @@ module OpenAI T.type_alias do T.any( Symbol, + String, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol ) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 79a32996..734e99a5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -215,7 +215,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) @@ -235,7 +235,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) @@ -268,7 +268,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } MESSAGE_CREATION = T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 23cf6a3b..8741e9f7 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -10,7 +10,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = T.let( diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index bdb4424d..75db421b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -108,7 +108,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 1fea8214..32ff4ea3 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -161,7 +161,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) @@ -216,7 +216,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index d59ea97b..938f0366 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -46,7 +46,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) @@ -70,7 +70,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 56e76f39..ca1adf7b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -263,7 +263,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) @@ -367,7 +367,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + ) + end FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) @@ -394,7 +400,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) @@ -450,7 +456,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 4a3b9252..5c838086 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -76,7 +76,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol + ) + end AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index d3e09c2d..588aba54 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -77,7 +77,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol + ) + end WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 8c780c0f..785b8db7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -7,7 +7,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index 0fad6644..61b9c3a5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -8,7 +8,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index ad5c2dd6..48048594 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -23,7 +23,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 72a30f17..a841bba5 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -565,7 +565,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol + ) + end NONE = T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) @@ -638,7 +644,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) @@ -690,7 +696,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) @@ -782,6 +788,7 @@ module OpenAI T.type_alias do T.any( Symbol, + String, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol ) end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index e488c018..334e3fcd 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -79,7 +79,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 57006149..9accd714 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -64,7 +64,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index df9c03d2..4494fe86 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ChatModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ChatModel::TaggedSymbol) } O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::TaggedSymbol) O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 971045f3..3549a566 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -57,7 +57,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index c37142f2..cfd36710 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -56,7 +56,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 76ecc07b..cde7b429 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -256,7 +256,8 @@ module OpenAI end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } GPT_3_5_TURBO_INSTRUCT = T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index e070a53b..b73e46bf 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -50,7 +50,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 9aac2355..89a4ff35 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -122,7 +122,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index d1fc3704..94781b12 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::EmbeddingModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingModel::TaggedSymbol) } TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::TaggedSymbol) TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 027ccc87..c3cd0d00 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -73,7 +73,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 80273345..e8452219 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -103,7 +103,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) @@ -124,7 +124,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FileObject::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Status::TaggedSymbol) } UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 3fa6743d..1f7bb4f9 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -10,7 +10,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::FilePurpose::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FilePurpose::TaggedSymbol) } ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::TaggedSymbol) BATCH = T.let(:batch, OpenAI::Models::FilePurpose::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 5aa6d868..91218d14 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -308,7 +308,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } VALIDATING_FILES = T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) @@ -631,7 +631,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 73e56f74..811f8518 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -85,7 +85,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) @@ -102,7 +102,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 3844e2ce..5091008f 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -156,7 +156,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) @@ -646,7 +646,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 80245291..735f07ef 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -99,7 +99,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) @@ -115,7 +115,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 130b145b..62dcb5bc 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -117,7 +117,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) @@ -133,7 +134,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 5cdd5c58..9e3de432 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -119,7 +119,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) @@ -137,7 +138,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) @@ -154,7 +155,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) @@ -175,7 +177,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 6e7b06d9..6ba143ea 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ImageModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageModel::TaggedSymbol) } DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index bcf566f6..be0ff0fa 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -315,7 +315,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) @@ -332,7 +332,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol + ) + end TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) @@ -353,7 +359,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) @@ -368,7 +374,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol + ) + end TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) @@ -386,7 +398,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) @@ -401,7 +413,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) @@ -419,7 +431,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) @@ -435,7 +447,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol + ) + end TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) @@ -458,7 +476,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) IMAGE = @@ -478,7 +496,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) @@ -494,7 +512,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) @@ -512,7 +530,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) @@ -528,7 +546,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol + ) + end TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) IMAGE = diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index d3f8dff4..e422afd9 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ModerationModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ModerationModel::TaggedSymbol) } OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) OMNI_MODERATION_2024_09_26 = diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index aef3d79c..0bcde269 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -55,7 +55,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 0563845c..e95a9cdc 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -12,7 +12,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ReasoningEffort::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ReasoningEffort::TaggedSymbol) } LOW = T.let(:low, OpenAI::Models::ReasoningEffort::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 10f57a60..90cabbf7 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -54,7 +54,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index d8597a18..e0e2b2ed 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -113,7 +113,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) @@ -131,7 +131,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 62517b8d..07f8b017 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -134,7 +134,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index bbd299bd..445f655c 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -86,7 +86,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index db73d16a..4ce11535 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -372,7 +372,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } MAX_OUTPUT_TOKENS = T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) @@ -412,7 +412,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 91dbb3cd..cb88444a 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -168,7 +168,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 298263bd..80a4e800 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -155,7 +155,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ) + end LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) @@ -436,7 +442,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) @@ -455,7 +461,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } COMPUTER_CALL = T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index a1ef5e14..f10df142 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -133,7 +133,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 305eb10e..9bf4d295 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -427,7 +427,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index a000c309..4f682bd4 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -29,7 +29,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 55b6067b..01b2899c 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -66,7 +66,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 0135732e..f7b0b0a7 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -76,7 +76,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index a3ae3675..b6e0956c 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -64,7 +64,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 0dfe2413..080ec4db 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -46,7 +46,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 9b325a79..f2015ad7 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -15,7 +15,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } FILE_SEARCH_CALL_RESULTS = T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index 98923bc5..7135e479 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -43,7 +43,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 4d92987a..11947564 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -57,7 +57,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 7f12e53f..eb5e163c 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -95,7 +95,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) @@ -114,7 +114,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) @@ -135,7 +135,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) @@ -279,7 +279,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol + ) + end IN_PROGRESS = T.let( @@ -366,7 +372,13 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) } + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol + ) + end IN_PROGRESS = T.let( diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 345adad7..11580c3f 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -89,7 +89,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) @@ -108,7 +108,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) @@ -127,7 +127,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index d91e17f1..81c36c47 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -83,7 +83,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index ba09b049..acb3183e 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -77,7 +77,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index a46776c9..138c0188 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -9,7 +9,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index 27f51058..722831af 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -15,7 +15,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 6119c6e4..75251556 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -37,7 +37,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) WEB_SEARCH_PREVIEW = diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 86e24a9b..310805df 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -65,7 +65,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) @@ -85,7 +86,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 4e2b1c2b..0c8b5a11 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -10,7 +10,7 @@ module OpenAI end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::ResponsesModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ResponsesModel::TaggedSymbol) } O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::TaggedSymbol) O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 129170d8..fc61f0b1 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -96,7 +96,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::Upload::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Upload::Status::TaggedSymbol) } PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index fae6b032..e57a4fb8 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -168,7 +168,7 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStore::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStore::Status::TaggedSymbol) } EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index a46b89ef..819d2785 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -76,7 +76,8 @@ module OpenAI extend OpenAI::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index c6d6cc94..85d03442 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -145,7 +145,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 4baabfad..eefd75ff 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -92,7 +92,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 4cba5c1a..311c5591 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -100,7 +100,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) @@ -123,7 +123,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 0043569a..791c45a1 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -86,7 +86,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) @@ -105,7 +105,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index ad610fcb..7707a463 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -162,7 +162,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) @@ -185,7 +185,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 09d4693b..c16243ce 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -133,7 +133,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } OrSymbol = - T.type_alias { T.any(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) From 54442e81d84d4c44d81b23ca9d92d643e98a2b88 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 23:13:44 +0000 Subject: [PATCH 081/295] feat(api): manual updates (#83) --- .stats.yml | 4 ++-- README.md | 2 +- .../audio/transcription_create_params.rb | 2 +- .../audio/transcription_create_response.rb | 2 +- .../beta/thread_create_and_run_params.rb | 2 +- lib/openai/models/beta/threads/run.rb | 2 +- .../models/beta/threads/run_create_params.rb | 2 +- .../threads/run_submit_tool_outputs_params.rb | 2 +- lib/openai/models/chat/chat_completion.rb | 2 +- .../models/chat/completion_create_params.rb | 2 +- lib/openai/models/completion.rb | 2 +- lib/openai/models/completion_create_params.rb | 2 +- lib/openai/models/responses/response.rb | 2 +- .../responses/response_create_params.rb | 2 +- lib/openai/resources/audio/transcriptions.rb | 6 ++--- lib/openai/resources/beta/threads.rb | 6 ++--- lib/openai/resources/beta/threads/runs.rb | 12 +++++----- lib/openai/resources/chat/completions.rb | 6 ++--- lib/openai/resources/completions.rb | 6 ++--- lib/openai/resources/responses.rb | 6 ++--- .../openai/resources/audio/transcriptions.rbi | 10 ++++----- rbi/lib/openai/resources/beta/threads.rbi | 10 ++++----- .../openai/resources/beta/threads/runs.rbi | 22 +++++++++---------- rbi/lib/openai/resources/chat/completions.rbi | 10 ++++----- rbi/lib/openai/resources/completions.rbi | 10 ++++----- rbi/lib/openai/resources/responses.rbi | 10 ++++----- sig/openai/resources/audio/transcriptions.rbs | 2 +- sig/openai/resources/beta/threads.rbs | 2 +- sig/openai/resources/beta/threads/runs.rbs | 4 ++-- sig/openai/resources/chat/completions.rbs | 2 +- sig/openai/resources/completions.rbs | 2 +- sig/openai/resources/responses.rbs | 2 +- 32 files changed, 78 insertions(+), 80 deletions(-) diff --git a/.stats.yml b/.stats.yml index b21d5dae..c16913f8 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6663c59193eb95b201e492de17dcbd5e126ba03d18ce66287a3e2c632ca56fe7.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bce8217a697c729ac98046d4caf2c9e826b54c427fb0ab4f98e549a2e0ce31c.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: 2daae06cc598821ccf87201de0861e40 +config_hash: 31a12443afeef2933b34e2de23c40954 diff --git a/README.md b/README.md index 98c8fe21..316ac3f6 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ end We provide support for streaming responses using Server Side Events (SSE). ```ruby -stream = openai.chat.completions.create_streaming( +stream = openai.chat.completions.stream_raw( messages: [{ role: "user", content: "Say this is a test" diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 9326a51f..ee1640c6 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -5,7 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Transcriptions#create # - # @see OpenAI::Resources::Audio::Transcriptions#create_streaming + # @see OpenAI::Resources::Audio::Transcriptions#stream_raw class TranscriptionCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index be3c624c..efebb7c6 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -8,7 +8,7 @@ module Audio # # @see OpenAI::Resources::Audio::Transcriptions#create # - # @see OpenAI::Resources::Audio::Transcriptions#create_streaming + # @see OpenAI::Resources::Audio::Transcriptions#stream_raw module TranscriptionCreateResponse extend OpenAI::Union diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index d1a26adb..495e3cd1 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -5,7 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#create_and_run # - # @see OpenAI::Resources::Beta::Threads#create_and_run_streaming + # @see OpenAI::Resources::Beta::Threads#stream_raw class ThreadCreateAndRunParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 66e2e6a8..9b21a4a0 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming + # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw class Run < OpenAI::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index d0c690be..a52ec57a 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming + # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw class RunCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 8ed57b07..1ab6c2f2 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # - # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming + # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw class RunSubmitToolOutputsParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 2dcdfd05..5c66a291 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -5,7 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#create # - # @see OpenAI::Resources::Chat::Completions#create_streaming + # @see OpenAI::Resources::Chat::Completions#stream_raw class ChatCompletion < OpenAI::BaseModel # @!attribute id # A unique identifier for the chat completion. diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index b612cb26..f5cf3bd0 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -5,7 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#create # - # @see OpenAI::Resources::Chat::Completions#create_streaming + # @see OpenAI::Resources::Chat::Completions#stream_raw class CompletionCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 8e3ad52a..791a65a3 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -4,7 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Completions#create # - # @see OpenAI::Resources::Completions#create_streaming + # @see OpenAI::Resources::Completions#stream_raw class Completion < OpenAI::BaseModel # @!attribute id # A unique identifier for the completion. diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 974ee482..cc824656 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Completions#create # - # @see OpenAI::Resources::Completions#create_streaming + # @see OpenAI::Resources::Completions#stream_raw class CompletionCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index f53f3fc5..b6e167a0 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -5,7 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#create # - # @see OpenAI::Resources::Responses#create_streaming + # @see OpenAI::Resources::Responses#stream_raw class Response < OpenAI::BaseModel # @!attribute id # Unique identifier for this Response. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 29abdd2c..55ec73cc 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -5,7 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#create # - # @see OpenAI::Resources::Responses#create_streaming + # @see OpenAI::Resources::Responses#stream_raw class ResponseCreateParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index c9bf8f4f..2dc49631 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -24,7 +24,7 @@ class Transcriptions def create(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -39,7 +39,7 @@ def create(params) # Transcribes audio into the input language. # - # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @overload stream_raw(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # # @param file [IO, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] @@ -54,7 +54,7 @@ def create(params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Audio::TranscriptionCreateParams - def create_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 6b028b1d..82c8c329 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -121,7 +121,7 @@ def delete(thread_id, params = {}) def create_and_run(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_and_run_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -135,7 +135,7 @@ def create_and_run(params) # Create a thread and run it in one request. # - # @overload create_and_run_streaming(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param assistant_id [String] # @param instructions [String, nil] @@ -157,7 +157,7 @@ def create_and_run(params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams - def create_and_run_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create_and_run` for the non-streaming use case." diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 45bfd146..d662fbb2 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -38,7 +38,7 @@ class Runs def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end query_params = [:include] @@ -54,7 +54,7 @@ def create(thread_id, params) # Create a run. # - # @overload create_streaming(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param thread_id [String] # @param assistant_id [String] @@ -79,7 +79,7 @@ def create(thread_id, params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams - def create_streaming(thread_id, params) + def stream_raw(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." @@ -220,7 +220,7 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] - message = "Please use `#submit_tool_outputs_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end thread_id = @@ -241,7 +241,7 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @overload submit_tool_outputs_streaming(run_id, thread_id:, tool_outputs:, request_options: {}) + # @overload stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # # @param run_id [String] # @param thread_id [String] @@ -251,7 +251,7 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - def submit_tool_outputs_streaming(run_id, params) + def stream_raw(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 41ffd171..c000fad2 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -65,7 +65,7 @@ class Completions def create(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -95,7 +95,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload create_streaming(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] # @param model [String, Symbol, OpenAI::Models::ChatModel] @@ -132,7 +132,7 @@ def create(params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams - def create_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index f27ec1e4..62a26a53 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -32,7 +32,7 @@ class Completions def create(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -46,7 +46,7 @@ def create(params) # Creates a completion for the provided prompt and parameters. # - # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) + # @overload stream_raw(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # @param prompt [String, Array, Array, Array>, nil] @@ -70,7 +70,7 @@ def create(params) # @return [OpenAI::Stream] # # @see OpenAI::Models::CompletionCreateParams - def create_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 2e39f489..46de5e29 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -45,7 +45,7 @@ class Responses def create(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -69,7 +69,7 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create_streaming(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] @@ -93,7 +93,7 @@ def create(params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams - def create_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 688f6c9f..476b325a 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -59,8 +59,8 @@ module OpenAI # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. timestamp_granularities: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -89,7 +89,7 @@ module OpenAI ] ) end - def create_streaming( + def stream_raw( # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, @@ -128,8 +128,8 @@ module OpenAI # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. timestamp_granularities: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index d5a0da4b..114b3406 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -236,8 +236,8 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` - # or `#create_and_run` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -320,7 +320,7 @@ module OpenAI ] ) end - def create_and_run_streaming( + def stream_raw( # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. @@ -409,8 +409,8 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` - # or `#create_and_run` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 296929f4..704f83ce 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -170,8 +170,8 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -259,7 +259,7 @@ module OpenAI ] ) end - def create_streaming( + def stream_raw( # Path param: The ID of the thread to run. thread_id, # Body param: The ID of the @@ -368,8 +368,8 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) @@ -497,9 +497,8 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and - # non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -548,7 +547,7 @@ module OpenAI ] ) end - def submit_tool_outputs_streaming( + def stream_raw( # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the @@ -557,9 +556,8 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and - # non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 595d5865..0471b372 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -268,8 +268,8 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -352,7 +352,7 @@ module OpenAI ) .returns(OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end - def create_streaming( + def stream_raw( # A list of messages comprising the conversation so far. Depending on the # [model](https://platform.openai.com/docs/models) you use, different message # types (modalities) are supported, like @@ -536,8 +536,8 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index b5b3feeb..7c97579b 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -139,8 +139,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -178,7 +178,7 @@ module OpenAI ) .returns(OpenAI::Stream[OpenAI::Models::Completion]) end - def create_streaming( + def stream_raw( # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our @@ -282,8 +282,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 8a9352b4..4d305614 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -177,8 +177,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -287,7 +287,7 @@ module OpenAI ] ) end - def create_streaming( + def stream_raw( # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -392,8 +392,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index b52531d0..6f8acd12 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -14,7 +14,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response - def create_streaming: ( + def stream_raw: ( file: IO | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 10e58839..7ee78295 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -49,7 +49,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def create_and_run_streaming: ( + def stream_raw: ( assistant_id: String, ?instructions: String?, ?max_completion_tokens: Integer?, diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 24c1f8c5..dae2f06c 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -27,7 +27,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def create_streaming: ( + def stream_raw: ( String thread_id, assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], @@ -84,7 +84,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def submit_tool_outputs_streaming: ( + def stream_raw: ( String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 5bd7a8db..b699bc30 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -38,7 +38,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletion - def create_streaming: ( + def stream_raw: ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, diff --git a/sig/openai/resources/completions.rbs b/sig/openai/resources/completions.rbs index 42f91241..b48f77df 100644 --- a/sig/openai/resources/completions.rbs +++ b/sig/openai/resources/completions.rbs @@ -22,7 +22,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Completion - def create_streaming: ( + def stream_raw: ( model: OpenAI::Models::CompletionCreateParams::model, prompt: OpenAI::Models::CompletionCreateParams::prompt?, ?best_of: Integer?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index ba80da78..51041a3b 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -24,7 +24,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Responses::Response - def create_streaming: ( + def stream_raw: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, From fc3a2f73cffbaef1b7c427dd12e1a0273c2e7e98 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 23:19:25 +0000 Subject: [PATCH 082/295] feat(api): manual updates (#84) --- .stats.yml | 2 +- .../models/beta/threads/run_submit_tool_outputs_params.rb | 2 +- lib/openai/resources/beta/threads/runs.rb | 6 +++--- rbi/lib/openai/resources/beta/threads/runs.rbi | 6 +++--- sig/openai/resources/beta/threads/runs.rbs | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.stats.yml b/.stats.yml index c16913f8..1f1a1736 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bce8217a697c729ac98046d4caf2c9e826b54c427fb0ab4f98e549a2e0ce31c.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: 31a12443afeef2933b34e2de23c40954 +config_hash: 178ba1bfb1237bf6b94abb3408072aa7 diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 1ab6c2f2..63891c18 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # - # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#submit_stream_raw class RunSubmitToolOutputsParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index d662fbb2..ebf0f81b 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -220,7 +220,7 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#submit_stream_raw` for the streaming use case." raise ArgumentError.new(message) end thread_id = @@ -241,7 +241,7 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @overload stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) + # @overload submit_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # # @param run_id [String] # @param thread_id [String] @@ -251,7 +251,7 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - def stream_raw(run_id, params) + def submit_stream_raw(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 704f83ce..ea37f159 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -497,7 +497,7 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # There is no need to provide `stream:`. Instead, use `#submit_stream_raw` or # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} @@ -547,7 +547,7 @@ module OpenAI ] ) end - def stream_raw( + def submit_stream_raw( # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the @@ -556,7 +556,7 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # There is no need to provide `stream:`. Instead, use `#submit_stream_raw` or # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index dae2f06c..3d21ae09 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -84,7 +84,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def stream_raw: ( + def submit_stream_raw: ( String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], From dc6ab508e24bafbfa1cc6735de3c25f8c92d04f4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 23:51:20 +0000 Subject: [PATCH 083/295] chore: order client variables by "importance" (#85) --- lib/openai/client.rb | 8 ++++---- rbi/lib/openai/client.rbi | 6 +++--- sig/openai/client.rbs | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 02db6417..2bfddd02 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -77,14 +77,14 @@ class Client < OpenAI::Transport::BaseClient # Creates and returns a new client for interacting with the API. # - # @param base_url [String, nil] Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` - # # @param api_key [String, nil] Defaults to `ENV["OPENAI_API_KEY"]` # # @param organization [String, nil] Defaults to `ENV["OPENAI_ORG_ID"]` # # @param project [String, nil] Defaults to `ENV["OPENAI_PROJECT_ID"]` # + # @param base_url [String, nil] Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` + # # @param max_retries [Integer] Max number of retries to attempt after a failed retryable request. # # @param timeout [Float] @@ -93,10 +93,10 @@ class Client < OpenAI::Transport::BaseClient # # @param max_retry_delay [Float] def initialize( - base_url: nil, api_key: ENV["OPENAI_API_KEY"], organization: ENV["OPENAI_ORG_ID"], project: ENV["OPENAI_PROJECT_ID"], + base_url: nil, max_retries: DEFAULT_MAX_RETRIES, timeout: DEFAULT_TIMEOUT_IN_SECONDS, initial_retry_delay: DEFAULT_INITIAL_RETRY_DELAY, @@ -105,7 +105,7 @@ def initialize( base_url ||= "https://api.openai.com/v1" if api_key.nil? - raise ArgumentError.new("api_key is required") + raise ArgumentError.new("api_key is required, and can be set via environ: \"OPENAI_API_KEY\"") end headers = { diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 5f320546..41d82819 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -69,10 +69,10 @@ module OpenAI # Creates and returns a new client for interacting with the API. sig do params( - base_url: T.nilable(String), api_key: T.nilable(String), organization: T.nilable(String), project: T.nilable(String), + base_url: T.nilable(String), max_retries: Integer, timeout: Float, initial_retry_delay: Float, @@ -81,14 +81,14 @@ module OpenAI .returns(T.attached_class) end def self.new( - # Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` - base_url: nil, # Defaults to `ENV["OPENAI_API_KEY"]` api_key: ENV["OPENAI_API_KEY"], # Defaults to `ENV["OPENAI_ORG_ID"]` organization: ENV["OPENAI_ORG_ID"], # Defaults to `ENV["OPENAI_PROJECT_ID"]` project: ENV["OPENAI_PROJECT_ID"], + # Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` + base_url: nil, # Max number of retries to attempt after a failed retryable request. max_retries: DEFAULT_MAX_RETRIES, timeout: DEFAULT_TIMEOUT_IN_SECONDS, diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index a7a4533d..6b95a137 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -45,10 +45,10 @@ module OpenAI private def auth_headers: -> ::Hash[String, String] def initialize: ( - ?base_url: String?, ?api_key: String?, ?organization: String?, ?project: String?, + ?base_url: String?, ?max_retries: Integer, ?timeout: Float, ?initial_retry_delay: Float, From 1ce256c1fa77fcfe556bc69d82599d4188684603 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 00:31:46 +0000 Subject: [PATCH 084/295] feat: implement `to_json` for base model (#86) --- .rubocop.yml | 3 +-- lib/openai/type/base_model.rb | 10 ++++++++++ lib/openai/util.rb | 16 ++++++---------- rbi/lib/openai/type/base_model.rbi | 8 ++++++++ rbi/lib/openai/util.rbi | 4 ++-- sig/openai/type/base_model.rbs | 4 ++++ sig/openai/util.rbs | 4 ++-- test/openai/util_test.rb | 2 +- 8 files changed, 34 insertions(+), 17 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index df60b4df..d66784a3 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -127,8 +127,7 @@ Naming/ClassAndModuleCamelCase: - "**/*.rbi" Naming/MethodParameterName: - Exclude: - - "**/*.rbi" + Enabled: false Naming/PredicateName: Exclude: diff --git a/lib/openai/type/base_model.rb b/lib/openai/type/base_model.rb index 57f628f0..4162bd9a 100644 --- a/lib/openai/type/base_model.rb +++ b/lib/openai/type/base_model.rb @@ -331,6 +331,16 @@ def deconstruct_keys(keys) .to_h end + # @param a [Object] + # + # @return [String] + def to_json(*a) = self.class.dump(self).to_json(*a) + + # @param a [Object] + # + # @return [String] + def to_yaml(*a) = self.class.dump(self).to_yaml(*a) + # Create a new instance of a model. # # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] diff --git a/lib/openai/util.rb b/lib/openai/util.rb index 8a4b693f..379dddc4 100644 --- a/lib/openai/util.rb +++ b/lib/openai/util.rb @@ -441,7 +441,7 @@ class << self # # @yieldparam [Enumerator::Yielder] # @return [Enumerable] - def string_io(&blk) + def writable_enum(&blk) Enumerator.new do |y| y.define_singleton_method(:write) do self << _1.clone @@ -454,15 +454,13 @@ def string_io(&blk) end class << self - # rubocop:disable Naming/MethodParameterName - # @api private # # @param y [Enumerator::Yielder] # @param boundary [String] # @param key [Symbol, String] # @param val [Object] - private def encode_multipart_formdata(y, boundary:, key:, val:) + private def write_multipart_chunk(y, boundary:, key:, val:) y << "--#{boundary}\r\n" y << "Content-Disposition: form-data" unless key.nil? @@ -494,8 +492,6 @@ class << self y << "\r\n" end - # rubocop:enable Naming/MethodParameterName - # @api private # # @param body [Object] @@ -504,21 +500,21 @@ class << self private def encode_multipart_streaming(body) boundary = SecureRandom.urlsafe_base64(60) - strio = string_io do |y| + strio = writable_enum do |y| case body in Hash body.each do |key, val| case val in Array if val.all? { primitive?(_1) } val.each do |v| - encode_multipart_formdata(y, boundary: boundary, key: key, val: v) + write_multipart_chunk(y, boundary: boundary, key: key, val: v) end else - encode_multipart_formdata(y, boundary: boundary, key: key, val: val) + write_multipart_chunk(y, boundary: boundary, key: key, val: val) end end else - encode_multipart_formdata(y, boundary: boundary, key: nil, val: body) + write_multipart_chunk(y, boundary: boundary, key: nil, val: body) end y << "--#{boundary}--\r\n" end diff --git a/rbi/lib/openai/type/base_model.rbi b/rbi/lib/openai/type/base_model.rbi index e379903e..a35f37f5 100644 --- a/rbi/lib/openai/type/base_model.rbi +++ b/rbi/lib/openai/type/base_model.rbi @@ -176,6 +176,14 @@ module OpenAI def deconstruct_keys(keys) end + sig { params(a: T.anything).returns(String) } + def to_json(*a) + end + + sig { params(a: T.anything).returns(String) } + def to_yaml(*a) + end + # Create a new instance of a model. sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } def self.new(data = {}) diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi index 79ce090a..57b25a19 100644 --- a/rbi/lib/openai/util.rbi +++ b/rbi/lib/openai/util.rbi @@ -198,7 +198,7 @@ module OpenAI class << self sig { params(blk: T.proc.params(y: Enumerator::Yielder).void).returns(T::Enumerable[String]) } - def string_io(&blk) + def writable_enum(&blk) end end @@ -207,7 +207,7 @@ module OpenAI sig do params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void end - private def encode_multipart_formdata(y, boundary:, key:, val:) + private def write_multipart_chunk(y, boundary:, key:, val:) end # @api private diff --git a/sig/openai/type/base_model.rbs b/sig/openai/type/base_model.rbs index a929a421..cf3041f5 100644 --- a/sig/openai/type/base_model.rbs +++ b/sig/openai/type/base_model.rbs @@ -65,6 +65,10 @@ module OpenAI def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] + def to_json: (*top a) -> String + + def to_yaml: (*top a) -> String + def initialize: (?::Hash[Symbol, top] | self data) -> void def inspect: -> String diff --git a/sig/openai/util.rbs b/sig/openai/util.rbs index 97336cd4..1a93a427 100644 --- a/sig/openai/util.rbs +++ b/sig/openai/util.rbs @@ -82,11 +82,11 @@ module OpenAI } -> void end - def self?.string_io: { + def self?.writable_enum: { (Enumerator::Yielder y) -> void } -> Enumerable[String] - def self?.encode_multipart_formdata: ( + def self?.write_multipart_chunk: ( Enumerator::Yielder y, boundary: String, key: Symbol | String, diff --git a/test/openai/util_test.rb b/test/openai/util_test.rb index 476e16af..5d4c1b0c 100644 --- a/test/openai/util_test.rb +++ b/test/openai/util_test.rb @@ -233,7 +233,7 @@ def test_copy_write StringIO.new("abc") => "abc" } cases.each do |input, expected| - enum = OpenAI::Util.string_io do |y| + enum = OpenAI::Util.writable_enum do |y| IO.copy_stream(input, y) end assert_equal(expected, enum.to_a.join) From 62d60a281d6507ae16f18c443dc574df7ec71a1f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 13:09:41 +0000 Subject: [PATCH 085/295] chore: extract error classes into own module (#87) --- README.md | 4 +- lib/openai/errors.rb | 425 ++++++++++--------- lib/openai/stream.rb | 2 +- lib/openai/transport/base_client.rb | 18 +- lib/openai/transport/pooled_net_requester.rb | 2 +- lib/openai/type/base_page.rb | 2 +- rbi/lib/openai/errors.rbi | 310 ++++++++------ rbi/lib/openai/transport/base_client.rbi | 5 +- sig/openai/errors.rbs | 206 +++++---- sig/openai/transport/base_client.rbs | 2 +- test/openai/base_model_test.rb | 2 +- test/openai/client_test.rb | 8 +- 12 files changed, 542 insertions(+), 444 deletions(-) diff --git a/README.md b/README.md index 316ac3f6..fa326448 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ When the library is unable to connect to the API, or if the API returns a non-su ```ruby begin job = openai.fine_tuning.jobs.create(model: "gpt-4o", training_file: "file-abc123") -rescue OpenAI::Error => e +rescue OpenAI::Errors::APIError => e puts(e.status) # 400 end ``` @@ -103,7 +103,7 @@ Error codes are as followed: | HTTP 409 | `ConflictError` | | HTTP 422 | `UnprocessableEntityError` | | HTTP 429 | `RateLimitError` | -| HTTP >=500 | `InternalServerError` | +| HTTP >= 500 | `InternalServerError` | | Other HTTP error | `APIStatusError` | | Timeout | `APITimeoutError` | | Network error | `APIConnectionError` | diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index ad1e9852..bc67421d 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -1,219 +1,256 @@ # frozen_string_literal: true module OpenAI - class Error < StandardError - # @!parse - # # @return [StandardError, nil] - # attr_accessor :cause - end + module Errors + class Error < StandardError + # @!parse + # # @return [StandardError, nil] + # attr_accessor :cause + end - class ConversionError < OpenAI::Error - end + class ConversionError < OpenAI::Errors::Error + end + + class APIError < OpenAI::Errors::Error + # @return [URI::Generic] + attr_accessor :url + + # @return [Integer, nil] + attr_accessor :status + + # @return [Object, nil] + attr_accessor :body + + # @return [String, nil] + attr_accessor :code - class APIError < OpenAI::Error - # @return [URI::Generic] - attr_accessor :url - - # @return [Integer, nil] - attr_accessor :status - - # @return [Object, nil] - attr_accessor :body - - # @return [String, nil] - attr_accessor :code - - # @return [String, nil] - attr_accessor :param - - # @return [String, nil] - attr_accessor :type - - # @api private - # - # @param url [URI::Generic] - # @param status [Integer, nil] - # @param body [Object, nil] - # @param request [nil] - # @param response [nil] - # @param message [String, nil] - def initialize(url:, status: nil, body: nil, request: nil, response: nil, message: nil) - @url = url - @status = status - @body = body - @request = request - @response = response - super(message) + # @return [String, nil] + attr_accessor :param + + # @return [String, nil] + attr_accessor :type + + # @api private + # + # @param url [URI::Generic] + # @param status [Integer, nil] + # @param body [Object, nil] + # @param request [nil] + # @param response [nil] + # @param message [String, nil] + def initialize(url:, status: nil, body: nil, request: nil, response: nil, message: nil) + @url = url + @status = status + @body = body + @request = request + @response = response + super(message) + end end - end - class APIConnectionError < OpenAI::APIError - # @!parse - # # @return [nil] - # attr_accessor :status - - # @!parse - # # @return [nil] - # attr_accessor :body - - # @!parse - # # @return [nil] - # attr_accessor :code - - # @!parse - # # @return [nil] - # attr_accessor :param - - # @!parse - # # @return [nil] - # attr_accessor :type - - # @api private - # - # @param url [URI::Generic] - # @param status [nil] - # @param body [nil] - # @param request [nil] - # @param response [nil] - # @param message [String, nil] - def initialize( - url:, - status: nil, - body: nil, - request: nil, - response: nil, - message: "Connection error." - ) - super + class APIConnectionError < OpenAI::Errors::APIError + # @!parse + # # @return [nil] + # attr_accessor :status + + # @!parse + # # @return [nil] + # attr_accessor :body + + # @!parse + # # @return [nil] + # attr_accessor :code + + # @!parse + # # @return [nil] + # attr_accessor :param + + # @!parse + # # @return [nil] + # attr_accessor :type + + # @api private + # + # @param url [URI::Generic] + # @param status [nil] + # @param body [nil] + # @param request [nil] + # @param response [nil] + # @param message [String, nil] + def initialize( + url:, + status: nil, + body: nil, + request: nil, + response: nil, + message: "Connection error." + ) + super + end end - end - class APITimeoutError < OpenAI::APIConnectionError - # @api private - # - # @param url [URI::Generic] - # @param status [nil] - # @param body [nil] - # @param request [nil] - # @param response [nil] - # @param message [String, nil] - def initialize( - url:, - status: nil, - body: nil, - request: nil, - response: nil, - message: "Request timed out." - ) - super + class APITimeoutError < OpenAI::Errors::APIConnectionError + # @api private + # + # @param url [URI::Generic] + # @param status [nil] + # @param body [nil] + # @param request [nil] + # @param response [nil] + # @param message [String, nil] + def initialize( + url:, + status: nil, + body: nil, + request: nil, + response: nil, + message: "Request timed out." + ) + super + end end - end - class APIStatusError < OpenAI::APIError - # @api private - # - # @param url [URI::Generic] - # @param status [Integer] - # @param body [Object, nil] - # @param request [nil] - # @param response [nil] - # @param message [String, nil] - # - # @return [OpenAI::APIStatusError] - def self.for(url:, status:, body:, request:, response:, message: nil) - kwargs = {url: url, status: status, body: body, request: request, response: response, message: message} - - case status - in 400 - OpenAI::BadRequestError.new(**kwargs) - in 401 - OpenAI::AuthenticationError.new(**kwargs) - in 403 - OpenAI::PermissionDeniedError.new(**kwargs) - in 404 - OpenAI::NotFoundError.new(**kwargs) - in 409 - OpenAI::ConflictError.new(**kwargs) - in 422 - OpenAI::UnprocessableEntityError.new(**kwargs) - in 429 - OpenAI::RateLimitError.new(**kwargs) - in (500..) - OpenAI::InternalServerError.new(**kwargs) - else - OpenAI::APIStatusError.new(**kwargs) + class APIStatusError < OpenAI::Errors::APIError + # @api private + # + # @param url [URI::Generic] + # @param status [Integer] + # @param body [Object, nil] + # @param request [nil] + # @param response [nil] + # @param message [String, nil] + # + # @return [OpenAI::Errors::APIStatusError] + def self.for(url:, status:, body:, request:, response:, message: nil) + kwargs = { + url: url, + status: status, + body: body, + request: request, + response: response, + message: message + } + + case status + in 400 + OpenAI::Errors::BadRequestError.new(**kwargs) + in 401 + OpenAI::Errors::AuthenticationError.new(**kwargs) + in 403 + OpenAI::Errors::PermissionDeniedError.new(**kwargs) + in 404 + OpenAI::Errors::NotFoundError.new(**kwargs) + in 409 + OpenAI::Errors::ConflictError.new(**kwargs) + in 422 + OpenAI::Errors::UnprocessableEntityError.new(**kwargs) + in 429 + OpenAI::Errors::RateLimitError.new(**kwargs) + in (500..) + OpenAI::Errors::InternalServerError.new(**kwargs) + else + OpenAI::Errors::APIStatusError.new(**kwargs) + end + end + + # @!parse + # # @return [Integer] + # attr_accessor :status + + # @!parse + # # @return [String, nil] + # attr_accessor :code + + # @!parse + # # @return [String, nil] + # attr_accessor :param + + # @!parse + # # @return [String, nil] + # attr_accessor :type + + # @api private + # + # @param url [URI::Generic] + # @param status [Integer] + # @param body [Object, nil] + # @param request [nil] + # @param response [nil] + # @param message [String, nil] + def initialize(url:, status:, body:, request:, response:, message: nil) + message ||= OpenAI::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} } + @code = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :code)) + @param = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :param)) + @type = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :type)) + super( + url: url, + status: status, + body: body, + request: request, + response: response, + message: message&.to_s + ) end end - # @!parse - # # @return [Integer] - # attr_accessor :status - - # @!parse - # # @return [String, nil] - # attr_accessor :code - - # @!parse - # # @return [String, nil] - # attr_accessor :param - - # @!parse - # # @return [String, nil] - # attr_accessor :type - - # @api private - # - # @param url [URI::Generic] - # @param status [Integer] - # @param body [Object, nil] - # @param request [nil] - # @param response [nil] - # @param message [String, nil] - def initialize(url:, status:, body:, request:, response:, message: nil) - message ||= OpenAI::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} } - @code = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :code)) - @param = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :param)) - @type = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :type)) - super( - url: url, - status: status, - body: body, - request: request, - response: response, - message: message&.to_s - ) + class BadRequestError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 400 end - end - class BadRequestError < OpenAI::APIStatusError - HTTP_STATUS = 400 - end + class AuthenticationError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 401 + end - class AuthenticationError < OpenAI::APIStatusError - HTTP_STATUS = 401 - end + class PermissionDeniedError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 403 + end - class PermissionDeniedError < OpenAI::APIStatusError - HTTP_STATUS = 403 - end + class NotFoundError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 404 + end - class NotFoundError < OpenAI::APIStatusError - HTTP_STATUS = 404 - end + class ConflictError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 409 + end - class ConflictError < OpenAI::APIStatusError - HTTP_STATUS = 409 - end + class UnprocessableEntityError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 422 + end - class UnprocessableEntityError < OpenAI::APIStatusError - HTTP_STATUS = 422 - end + class RateLimitError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 429 + end - class RateLimitError < OpenAI::APIStatusError - HTTP_STATUS = 429 + class InternalServerError < OpenAI::Errors::APIStatusError + HTTP_STATUS = (500..) + end end - class InternalServerError < OpenAI::APIStatusError - HTTP_STATUS = (500..) - end + Error = OpenAI::Errors::Error + + ConversionError = OpenAI::Errors::ConversionError + + APIError = OpenAI::Errors::APIError + + APIStatusError = OpenAI::Errors::APIStatusError + + APIConnectionError = OpenAI::Errors::APIConnectionError + + APITimeoutError = OpenAI::Errors::APITimeoutError + + BadRequestError = OpenAI::Errors::BadRequestError + + AuthenticationError = OpenAI::Errors::AuthenticationError + + PermissionDeniedError = OpenAI::Errors::PermissionDeniedError + + NotFoundError = OpenAI::Errors::NotFoundError + + ConflictError = OpenAI::Errors::ConflictError + + UnprocessableEntityError = OpenAI::Errors::UnprocessableEntityError + + RateLimitError = OpenAI::Errors::RateLimitError + + InternalServerError = OpenAI::Errors::InternalServerError end diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb index d3be1bd5..9c6f4244 100644 --- a/lib/openai/stream.rb +++ b/lib/openai/stream.rb @@ -46,7 +46,7 @@ class Stream else "An error occurred during streaming" end - OpenAI::APIError.for( + OpenAI::Errors::APIError.for( url: @url, status: @status, body: body, diff --git a/lib/openai/transport/base_client.rb b/lib/openai/transport/base_client.rb index fb9321c1..3679bf42 100644 --- a/lib/openai/transport/base_client.rb +++ b/lib/openai/transport/base_client.rb @@ -92,7 +92,7 @@ def follow_redirect(request, status:, response_headers:) URI.join(url, response_headers["location"]) rescue ArgumentError message = "Server responded with status #{status} but no valid location header." - raise OpenAI::APIConnectionError.new(url: url, message: message) + raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) end request = {**request, url: location} @@ -100,7 +100,7 @@ def follow_redirect(request, status:, response_headers:) case [url.scheme, location.scheme] in ["https", "http"] message = "Tried to redirect to a insecure URL" - raise OpenAI::APIConnectionError.new(url: url, message: message) + raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) else nil end @@ -129,13 +129,13 @@ def follow_redirect(request, status:, response_headers:) # @api private # - # @param status [Integer, OpenAI::APIConnectionError] + # @param status [Integer, OpenAI::Errors::APIConnectionError] # @param stream [Enumerable, nil] def reap_connection!(status, stream:) case status in (..199) | (300..499) stream&.each { next } - in OpenAI::APIConnectionError | (500..) + in OpenAI::Errors::APIConnectionError | (500..) OpenAI::Util.close_fused!(stream) else end @@ -326,7 +326,7 @@ def initialize( # # @param send_retry_header [Boolean] # - # @raise [OpenAI::APIError] + # @raise [OpenAI::Errors::APIError] # @return [Array(Integer, Net::HTTPResponse, Enumerable)] private def send_request(request, redirect_count:, retry_count:, send_retry_header:) url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) @@ -349,7 +349,7 @@ def initialize( self.class.reap_connection!(status, stream: stream) message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." - raise OpenAI::APIConnectionError.new(url: url, message: message) + raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) in 300..399 self.class.reap_connection!(status, stream: stream) @@ -369,14 +369,14 @@ def initialize( self.class.reap_connection!(status, stream: stream) end - raise OpenAI::APIStatusError.for( + raise OpenAI::Errors::APIStatusError.for( url: url, status: status, body: decoded, request: nil, response: response ) - in (400..) | OpenAI::APIConnectionError + in (400..) | OpenAI::Errors::APIConnectionError self.class.reap_connection!(status, stream: stream) delay = retry_delay(response, retry_count: retry_count) @@ -416,7 +416,7 @@ def initialize( # # @option req [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :options # - # @raise [OpenAI::APIError] + # @raise [OpenAI::Errors::APIError] # @return [Object] def request(req) self.class.validate!(req) diff --git a/lib/openai/transport/pooled_net_requester.rb b/lib/openai/transport/pooled_net_requester.rb index 57c8bf5e..b3259684 100644 --- a/lib/openai/transport/pooled_net_requester.rb +++ b/lib/openai/transport/pooled_net_requester.rb @@ -153,7 +153,7 @@ def execute(request) end end rescue Timeout::Error - raise OpenAI::APITimeoutError + raise OpenAI::Errors::APITimeoutError end conn, _, response = enum.next diff --git a/lib/openai/type/base_page.rb b/lib/openai/type/base_page.rb index 5f9ee0fa..a98b0624 100644 --- a/lib/openai/type/base_page.rb +++ b/lib/openai/type/base_page.rb @@ -29,7 +29,7 @@ module BasePage # @return [Boolean] def next_page? = (raise NotImplementedError) - # @raise [OpenAI::APIError] + # @raise [OpenAI::Errors::APIError] # @return [OpenAI::Type::BasePage] def next_page = (raise NotImplementedError) diff --git a/rbi/lib/openai/errors.rbi b/rbi/lib/openai/errors.rbi index e010e283..d04fc102 100644 --- a/rbi/lib/openai/errors.rbi +++ b/rbi/lib/openai/errors.rbi @@ -1,171 +1,201 @@ # typed: strong module OpenAI - class Error < StandardError - sig { returns(T.nilable(StandardError)) } - attr_accessor :cause - end - - class ConversionError < OpenAI::Error - end - - class APIError < OpenAI::Error - sig { returns(URI::Generic) } - attr_accessor :url - - sig { returns(T.nilable(Integer)) } - attr_accessor :status - - sig { returns(T.nilable(T.anything)) } - attr_accessor :body + module Errors + class Error < StandardError + sig { returns(T.nilable(StandardError)) } + attr_accessor :cause + end - sig { returns(T.nilable(String)) } - attr_accessor :code + class ConversionError < OpenAI::Errors::Error + end - sig { returns(T.nilable(String)) } - attr_accessor :param + class APIError < OpenAI::Errors::Error + sig { returns(URI::Generic) } + attr_accessor :url + + sig { returns(T.nilable(Integer)) } + attr_accessor :status + + sig { returns(T.nilable(T.anything)) } + attr_accessor :body + + sig { returns(T.nilable(String)) } + attr_accessor :code + + sig { returns(T.nilable(String)) } + attr_accessor :param + + sig { returns(T.nilable(String)) } + attr_accessor :type + + # @api private + sig do + params( + url: URI::Generic, + status: T.nilable(Integer), + body: T.nilable(Object), + request: NilClass, + response: NilClass, + message: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: nil) + end + end - sig { returns(T.nilable(String)) } - attr_accessor :type + class APIConnectionError < OpenAI::Errors::APIError + sig { void } + attr_accessor :status + + sig { void } + attr_accessor :body + + sig { void } + attr_accessor :code + + sig { void } + attr_accessor :param + + sig { void } + attr_accessor :type + + # @api private + sig do + params( + url: URI::Generic, + status: NilClass, + body: NilClass, + request: NilClass, + response: NilClass, + message: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: "Connection error.") + end + end - # @api private - sig do - params( - url: URI::Generic, - status: T.nilable(Integer), - body: T.nilable(Object), - request: NilClass, - response: NilClass, - message: T.nilable(String) - ) - .returns(T.attached_class) + class APITimeoutError < OpenAI::Errors::APIConnectionError + # @api private + sig do + params( + url: URI::Generic, + status: NilClass, + body: NilClass, + request: NilClass, + response: NilClass, + message: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: "Request timed out.") + end end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: nil) + + class APIStatusError < OpenAI::Errors::APIError + # @api private + sig do + params( + url: URI::Generic, + status: Integer, + body: T.nilable(Object), + request: NilClass, + response: NilClass, + message: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.for(url:, status:, body:, request:, response:, message: nil) + end + + sig { returns(Integer) } + attr_accessor :status + + sig { returns(T.nilable(String)) } + attr_accessor :code + + sig { returns(T.nilable(String)) } + attr_accessor :param + + sig { returns(T.nilable(String)) } + attr_accessor :type + + # @api private + sig do + params( + url: URI::Generic, + status: Integer, + body: T.nilable(Object), + request: NilClass, + response: NilClass, + message: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new(url:, status:, body:, request:, response:, message: nil) + end end - end - class APIConnectionError < OpenAI::APIError - sig { void } - attr_accessor :status - - sig { void } - attr_accessor :body - - sig { void } - attr_accessor :code - - sig { void } - attr_accessor :param - - sig { void } - attr_accessor :type - - # @api private - sig do - params( - url: URI::Generic, - status: NilClass, - body: NilClass, - request: NilClass, - response: NilClass, - message: T.nilable(String) - ) - .returns(T.attached_class) + class BadRequestError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 400 end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: "Connection error.") + + class AuthenticationError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 401 end - end - class APITimeoutError < OpenAI::APIConnectionError - # @api private - sig do - params( - url: URI::Generic, - status: NilClass, - body: NilClass, - request: NilClass, - response: NilClass, - message: T.nilable(String) - ) - .returns(T.attached_class) + class PermissionDeniedError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 403 end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: "Request timed out.") + + class NotFoundError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 404 end - end - class APIStatusError < OpenAI::APIError - # @api private - sig do - params( - url: URI::Generic, - status: Integer, - body: T.nilable(Object), - request: NilClass, - response: NilClass, - message: T.nilable(String) - ) - .returns(T.attached_class) + class ConflictError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 409 end - def self.for(url:, status:, body:, request:, response:, message: nil) + + class UnprocessableEntityError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 422 end - sig { returns(Integer) } - attr_accessor :status - - sig { returns(T.nilable(String)) } - attr_accessor :code - - sig { returns(T.nilable(String)) } - attr_accessor :param - - sig { returns(T.nilable(String)) } - attr_accessor :type - - # @api private - sig do - params( - url: URI::Generic, - status: Integer, - body: T.nilable(Object), - request: NilClass, - response: NilClass, - message: T.nilable(String) - ) - .returns(T.attached_class) + class RateLimitError < OpenAI::Errors::APIStatusError + HTTP_STATUS = 429 end - def self.new(url:, status:, body:, request:, response:, message: nil) + + class InternalServerError < OpenAI::Errors::APIStatusError + HTTP_STATUS = T.let((500..), T::Range[Integer]) end end - class BadRequestError < OpenAI::APIStatusError - HTTP_STATUS = 400 - end + Error = OpenAI::Errors::Error - class AuthenticationError < OpenAI::APIStatusError - HTTP_STATUS = 401 - end + ConversionError = OpenAI::Errors::ConversionError - class PermissionDeniedError < OpenAI::APIStatusError - HTTP_STATUS = 403 - end + APIError = OpenAI::Errors::APIError - class NotFoundError < OpenAI::APIStatusError - HTTP_STATUS = 404 - end + APIStatusError = OpenAI::Errors::APIStatusError - class ConflictError < OpenAI::APIStatusError - HTTP_STATUS = 409 - end + APIConnectionError = OpenAI::Errors::APIConnectionError - class UnprocessableEntityError < OpenAI::APIStatusError - HTTP_STATUS = 422 - end + APITimeoutError = OpenAI::Errors::APITimeoutError - class RateLimitError < OpenAI::APIStatusError - HTTP_STATUS = 429 - end + BadRequestError = OpenAI::Errors::BadRequestError - class InternalServerError < OpenAI::APIStatusError - HTTP_STATUS = T.let((500..), T::Range[Integer]) - end + AuthenticationError = OpenAI::Errors::AuthenticationError + + PermissionDeniedError = OpenAI::Errors::PermissionDeniedError + + NotFoundError = OpenAI::Errors::NotFoundError + + ConflictError = OpenAI::Errors::ConflictError + + UnprocessableEntityError = OpenAI::Errors::UnprocessableEntityError + + RateLimitError = OpenAI::Errors::RateLimitError + + InternalServerError = OpenAI::Errors::InternalServerError end diff --git a/rbi/lib/openai/transport/base_client.rbi b/rbi/lib/openai/transport/base_client.rbi index 7deae432..c8afd87e 100644 --- a/rbi/lib/openai/transport/base_client.rbi +++ b/rbi/lib/openai/transport/base_client.rbi @@ -82,9 +82,10 @@ module OpenAI # @api private sig do params( - status: T.any(Integer, OpenAI::APIConnectionError), + status: T.any(Integer, OpenAI::Errors::APIConnectionError), stream: T.nilable(T::Enumerable[String]) - ).void + ) + .void end def reap_connection!(status, stream:) end diff --git a/sig/openai/errors.rbs b/sig/openai/errors.rbs index 6d5804f3..8dc6afce 100644 --- a/sig/openai/errors.rbs +++ b/sig/openai/errors.rbs @@ -1,105 +1,135 @@ module OpenAI - class Error < StandardError - attr_accessor cause: StandardError? + module Errors + class Error < StandardError + attr_accessor cause: StandardError? + end + + class ConversionError < OpenAI::Errors::Error + end + + class APIError < OpenAI::Errors::Error + attr_accessor url: URI::Generic + + attr_accessor status: Integer? + + attr_accessor body: top? + + attr_accessor code: String? + + attr_accessor param: String? + + attr_accessor type: String? + + def initialize: ( + url: URI::Generic, + ?status: Integer?, + ?body: Object?, + ?request: nil, + ?response: nil, + ?message: String? + ) -> void + end + + class APIConnectionError < OpenAI::Errors::APIError + def initialize: ( + url: URI::Generic, + ?status: nil, + ?body: nil, + ?request: nil, + ?response: nil, + ?message: String? + ) -> void + end + + class APITimeoutError < OpenAI::Errors::APIConnectionError + def initialize: ( + url: URI::Generic, + ?status: nil, + ?body: nil, + ?request: nil, + ?response: nil, + ?message: String? + ) -> void + end + + class APIStatusError < OpenAI::Errors::APIError + def self.for: ( + url: URI::Generic, + status: Integer, + body: Object?, + request: nil, + response: nil, + ?message: String? + ) -> instance + + def initialize: ( + url: URI::Generic, + status: Integer, + body: Object?, + request: nil, + response: nil, + ?message: String? + ) -> void + end + + class BadRequestError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 400 + end + + class AuthenticationError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 401 + end + + class PermissionDeniedError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 403 + end + + class NotFoundError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 404 + end + + class ConflictError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 409 + end + + class UnprocessableEntityError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 422 + end + + class RateLimitError < OpenAI::Errors::APIStatusError + HTTP_STATUS: 429 + end + + class InternalServerError < OpenAI::Errors::APIStatusError + HTTP_STATUS: Range[Integer] + end end - class ConversionError < OpenAI::Error - end - - class APIError < OpenAI::Error - attr_accessor url: URI::Generic - - attr_accessor status: Integer? + class Error = OpenAI::Errors::Error - attr_accessor body: top? + class ConversionError = OpenAI::Errors::ConversionError - attr_accessor code: String? + class APIError = OpenAI::Errors::APIError - attr_accessor param: String? + class APIStatusError = OpenAI::Errors::APIStatusError - attr_accessor type: String? - - def initialize: ( - url: URI::Generic, - ?status: Integer?, - ?body: Object?, - ?request: nil, - ?response: nil, - ?message: String? - ) -> void - end - - class APIConnectionError < OpenAI::APIError - def initialize: ( - url: URI::Generic, - ?status: nil, - ?body: nil, - ?request: nil, - ?response: nil, - ?message: String? - ) -> void - end - - class APITimeoutError < OpenAI::APIConnectionError - def initialize: ( - url: URI::Generic, - ?status: nil, - ?body: nil, - ?request: nil, - ?response: nil, - ?message: String? - ) -> void - end + class APIConnectionError = OpenAI::Errors::APIConnectionError - class APIStatusError < OpenAI::APIError - def self.for: ( - url: URI::Generic, - status: Integer, - body: Object?, - request: nil, - response: nil, - ?message: String? - ) -> instance - - def initialize: ( - url: URI::Generic, - status: Integer, - body: Object?, - request: nil, - response: nil, - ?message: String? - ) -> void - end + class APITimeoutError = OpenAI::Errors::APITimeoutError - class BadRequestError < OpenAI::APIStatusError - HTTP_STATUS: 400 - end + class BadRequestError = OpenAI::Errors::BadRequestError - class AuthenticationError < OpenAI::APIStatusError - HTTP_STATUS: 401 - end + class AuthenticationError = OpenAI::Errors::AuthenticationError - class PermissionDeniedError < OpenAI::APIStatusError - HTTP_STATUS: 403 - end + class PermissionDeniedError = OpenAI::Errors::PermissionDeniedError - class NotFoundError < OpenAI::APIStatusError - HTTP_STATUS: 404 - end + class NotFoundError = OpenAI::Errors::NotFoundError - class ConflictError < OpenAI::APIStatusError - HTTP_STATUS: 409 - end + class ConflictError = OpenAI::Errors::ConflictError - class UnprocessableEntityError < OpenAI::APIStatusError - HTTP_STATUS: 422 - end + class UnprocessableEntityError = OpenAI::Errors::UnprocessableEntityError - class RateLimitError < OpenAI::APIStatusError - HTTP_STATUS: 429 - end + class RateLimitError = OpenAI::Errors::RateLimitError - class InternalServerError < OpenAI::APIStatusError - HTTP_STATUS: Range[Integer] - end + class InternalServerError = OpenAI::Errors::InternalServerError end diff --git a/sig/openai/transport/base_client.rbs b/sig/openai/transport/base_client.rbs index 4ae59a76..724f235c 100644 --- a/sig/openai/transport/base_client.rbs +++ b/sig/openai/transport/base_client.rbs @@ -47,7 +47,7 @@ module OpenAI ) -> OpenAI::Transport::BaseClient::request_input def self.reap_connection!: ( - Integer | OpenAI::APIConnectionError status, + Integer | OpenAI::Errors::APIConnectionError status, stream: Enumerable[String]? ) -> void diff --git a/test/openai/base_model_test.rb b/test/openai/base_model_test.rb index 7238cd06..737cb8fa 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/base_model_test.rb @@ -390,7 +390,7 @@ def test_accessors tap do target.public_send(accessor) flunk - rescue OpenAI::ConversionError => e + rescue OpenAI::Errors::ConversionError => e assert_kind_of(expect, e.cause) end else diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index cbb0c21d..6b35af97 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -199,7 +199,7 @@ def test_client_redirect_307 requester = MockRequester.new(307, {"location" => "/redirected"}, {}) openai.requester = requester - assert_raises(OpenAI::APIConnectionError) do + assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", @@ -221,7 +221,7 @@ def test_client_redirect_303 requester = MockRequester.new(303, {"location" => "/redirected"}, {}) openai.requester = requester - assert_raises(OpenAI::APIConnectionError) do + assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", @@ -240,7 +240,7 @@ def test_client_redirect_auth_keep_same_origin requester = MockRequester.new(307, {"location" => "/redirected"}, {}) openai.requester = requester - assert_raises(OpenAI::APIConnectionError) do + assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", @@ -259,7 +259,7 @@ def test_client_redirect_auth_strip_cross_origin requester = MockRequester.new(307, {"location" => "https://example.com/redirected"}, {}) openai.requester = requester - assert_raises(OpenAI::APIConnectionError) do + assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", From 54d827044b19da6a3b450f403fd0de0766b74e90 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 14:50:48 +0000 Subject: [PATCH 086/295] feat(api): manual updates (#88) --- .stats.yml | 2 +- .../models/beta/threads/run_submit_tool_outputs_params.rb | 2 +- lib/openai/resources/beta/threads/runs.rb | 6 +++--- rbi/lib/openai/resources/beta/threads/runs.rbi | 6 +++--- sig/openai/resources/beta/threads/runs.rbs | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.stats.yml b/.stats.yml index 1f1a1736..b80d385d 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bce8217a697c729ac98046d4caf2c9e826b54c427fb0ab4f98e549a2e0ce31c.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: 178ba1bfb1237bf6b94abb3408072aa7 +config_hash: 578c5bff4208d560c0c280f13324409f diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 63891c18..168c457d 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # - # @see OpenAI::Resources::Beta::Threads::Runs#submit_stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_stream_raw class RunSubmitToolOutputsParams < OpenAI::BaseModel # @!parse # extend OpenAI::Type::RequestParameters::Converter diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index ebf0f81b..aa7f72c0 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -220,7 +220,7 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] - message = "Please use `#submit_stream_raw` for the streaming use case." + message = "Please use `#submit_tool_stream_raw` for the streaming use case." raise ArgumentError.new(message) end thread_id = @@ -241,7 +241,7 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @overload submit_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) + # @overload submit_tool_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # # @param run_id [String] # @param thread_id [String] @@ -251,7 +251,7 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - def submit_stream_raw(run_id, params) + def submit_tool_stream_raw(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index ea37f159..dd25ce47 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -497,7 +497,7 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use `#submit_stream_raw` or + # There is no need to provide `stream:`. Instead, use `#submit_tool_stream_raw` or # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} @@ -547,7 +547,7 @@ module OpenAI ] ) end - def submit_stream_raw( + def submit_tool_stream_raw( # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the @@ -556,7 +556,7 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use `#submit_stream_raw` or + # There is no need to provide `stream:`. Instead, use `#submit_tool_stream_raw` or # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 3d21ae09..0b24cbc7 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -84,7 +84,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def submit_stream_raw: ( + def submit_tool_stream_raw: ( String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], From 698a5cfc09bba820e368d62861dbe4f89685ecd4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 18:38:35 +0000 Subject: [PATCH 087/295] fix!: use descriptive prefixes for enum names that start with otherwise illegal identifiers (#89) --- lib/openai/models/batch_create_params.rb | 2 +- lib/openai/models/image_create_variation_params.rb | 6 +++--- lib/openai/models/image_edit_params.rb | 6 +++--- lib/openai/models/image_generate_params.rb | 10 +++++----- rbi/lib/openai/models/batch_create_params.rbi | 2 +- .../openai/models/image_create_variation_params.rbi | 6 +++--- rbi/lib/openai/models/image_edit_params.rbi | 6 +++--- rbi/lib/openai/models/image_generate_params.rbi | 10 +++++----- sig/openai/models/batch_create_params.rbs | 2 +- sig/openai/models/image_create_variation_params.rbs | 6 +++--- sig/openai/models/image_edit_params.rbs | 6 +++--- sig/openai/models/image_generate_params.rbs | 10 +++++----- 12 files changed, 36 insertions(+), 36 deletions(-) diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 91a8de4e..fcb80ecf 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -65,7 +65,7 @@ class BatchCreateParams < OpenAI::BaseModel module CompletionWindow extend OpenAI::Enum - NUMBER_24H = :"24h" + COMPLETION_WINDOW_24H = :"24h" finalize! diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index b958af96..fd6b93ba 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -107,9 +107,9 @@ module ResponseFormat module Size extend OpenAI::Enum - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" + SIZE_256X256 = :"256x256" + SIZE_512X512 = :"512x512" + SIZE_1024X1024 = :"1024x1024" finalize! diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index d9678ace..77954285 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -138,9 +138,9 @@ module ResponseFormat module Size extend OpenAI::Enum - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" + SIZE_256X256 = :"256x256" + SIZE_512X512 = :"512x512" + SIZE_1024X1024 = :"1024x1024" finalize! diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 52810d19..fbd3db67 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -157,11 +157,11 @@ module ResponseFormat module Size extend OpenAI::Enum - NUMBER_256X256 = :"256x256" - NUMBER_512X512 = :"512x512" - NUMBER_1024X1024 = :"1024x1024" - NUMBER_1792X1024 = :"1792x1024" - NUMBER_1024X1792 = :"1024x1792" + SIZE_256X256 = :"256x256" + SIZE_512X512 = :"512x512" + SIZE_1024X1024 = :"1024x1024" + SIZE_1792X1024 = :"1792x1024" + SIZE_1024X1792 = :"1024x1792" finalize! diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index b5f6c9e7..ebc9f457 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -76,7 +76,7 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } - NUMBER_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) + COMPLETION_WINDOW_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol]) } def self.values diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 735f07ef..39818554 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -118,9 +118,9 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol]) } def self.values diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 62dcb5bc..1305d68a 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -136,9 +136,9 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } def self.values diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 9e3de432..78364258 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -158,11 +158,11 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } - NUMBER_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - NUMBER_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - NUMBER_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - NUMBER_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - NUMBER_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol]) } def self.values diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index a660d80f..1220fcdc 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -36,7 +36,7 @@ module OpenAI module CompletionWindow extend OpenAI::Enum - NUMBER_24H: :"24h" + COMPLETION_WINDOW_24H: :"24h" def self?.values: -> ::Array[OpenAI::Models::BatchCreateParams::completion_window] end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 23a37ecd..0fcca83b 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -65,9 +65,9 @@ module OpenAI module Size extend OpenAI::Enum - NUMBER_256X256: :"256x256" - NUMBER_512X512: :"512x512" - NUMBER_1024X1024: :"1024x1024" + SIZE_256X256: :"256x256" + SIZE_512X512: :"512x512" + SIZE_1024X1024: :"1024x1024" def self?.values: -> ::Array[OpenAI::Models::ImageCreateVariationParams::size] end diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 12dc8ce9..3da9c277 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -75,9 +75,9 @@ module OpenAI module Size extend OpenAI::Enum - NUMBER_256X256: :"256x256" - NUMBER_512X512: :"512x512" - NUMBER_1024X1024: :"1024x1024" + SIZE_256X256: :"256x256" + SIZE_512X512: :"512x512" + SIZE_1024X1024: :"1024x1024" def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::size] end diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index b9283099..ca757dea 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -89,11 +89,11 @@ module OpenAI module Size extend OpenAI::Enum - NUMBER_256X256: :"256x256" - NUMBER_512X512: :"512x512" - NUMBER_1024X1024: :"1024x1024" - NUMBER_1792X1024: :"1792x1024" - NUMBER_1024X1792: :"1024x1792" + SIZE_256X256: :"256x256" + SIZE_512X512: :"512x512" + SIZE_1024X1024: :"1024x1024" + SIZE_1792X1024: :"1792x1024" + SIZE_1024X1792: :"1024x1792" def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::size] end From 2a6776f2028028bd98be8d44dd8fcc02f9384137 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 16:23:49 +0000 Subject: [PATCH 088/295] chore: move private classes into internal module (#90) --- lib/openai.rb | 36 +- lib/openai/aliases.rb | 19 + lib/openai/client.rb | 2 +- lib/openai/cursor_page.rb | 92 --- lib/openai/errors.rb | 8 +- lib/openai/internal/cursor_page.rb | 94 +++ lib/openai/internal/page.rb | 88 +++ lib/openai/internal/stream.rb | 69 ++ lib/openai/internal/transport/base_client.rb | 461 +++++++++++ .../transport/pooled_net_requester.rb | 184 +++++ lib/openai/internal/type/array_of.rb | 114 +++ lib/openai/internal/type/base_model.rb | 376 +++++++++ lib/openai/internal/type/base_page.rb | 63 ++ lib/openai/internal/type/base_stream.rb | 65 ++ lib/openai/internal/type/boolean_model.rb | 54 ++ lib/openai/internal/type/converter.rb | 219 ++++++ lib/openai/internal/type/enum.rb | 107 +++ lib/openai/internal/type/hash_of.rb | 140 ++++ .../internal/type/request_parameters.rb | 40 + lib/openai/internal/type/union.rb | 212 +++++ lib/openai/internal/type/unknown.rb | 58 ++ lib/openai/internal/util.rb | 724 ++++++++++++++++++ .../models/audio/speech_create_params.rb | 4 +- .../audio/transcription_create_params.rb | 4 +- .../models/audio/translation_create_params.rb | 4 +- lib/openai/models/batch_cancel_params.rb | 4 +- lib/openai/models/batch_create_params.rb | 4 +- lib/openai/models/batch_list_params.rb | 4 +- lib/openai/models/batch_retrieve_params.rb | 4 +- .../models/beta/assistant_create_params.rb | 4 +- .../models/beta/assistant_delete_params.rb | 4 +- .../models/beta/assistant_list_params.rb | 4 +- .../models/beta/assistant_retrieve_params.rb | 4 +- .../models/beta/assistant_update_params.rb | 4 +- .../beta/thread_create_and_run_params.rb | 4 +- .../models/beta/thread_create_params.rb | 4 +- .../models/beta/thread_delete_params.rb | 4 +- .../models/beta/thread_retrieve_params.rb | 4 +- .../models/beta/thread_update_params.rb | 4 +- .../beta/threads/message_create_params.rb | 4 +- .../beta/threads/message_delete_params.rb | 4 +- .../beta/threads/message_list_params.rb | 4 +- .../beta/threads/message_retrieve_params.rb | 4 +- .../beta/threads/message_update_params.rb | 4 +- .../models/beta/threads/run_cancel_params.rb | 4 +- .../models/beta/threads/run_create_params.rb | 4 +- .../models/beta/threads/run_list_params.rb | 4 +- .../beta/threads/run_retrieve_params.rb | 4 +- .../threads/run_submit_tool_outputs_params.rb | 4 +- .../models/beta/threads/run_update_params.rb | 4 +- .../beta/threads/runs/step_list_params.rb | 4 +- .../beta/threads/runs/step_retrieve_params.rb | 4 +- .../models/chat/completion_create_params.rb | 4 +- .../models/chat/completion_delete_params.rb | 4 +- .../models/chat/completion_list_params.rb | 4 +- .../models/chat/completion_retrieve_params.rb | 4 +- .../models/chat/completion_update_params.rb | 4 +- .../chat/completions/message_list_params.rb | 4 +- lib/openai/models/completion_create_params.rb | 4 +- lib/openai/models/embedding_create_params.rb | 4 +- lib/openai/models/file_content_params.rb | 4 +- lib/openai/models/file_create_params.rb | 4 +- lib/openai/models/file_delete_params.rb | 4 +- lib/openai/models/file_list_params.rb | 4 +- lib/openai/models/file_retrieve_params.rb | 4 +- .../models/fine_tuning/job_cancel_params.rb | 4 +- .../models/fine_tuning/job_create_params.rb | 4 +- .../fine_tuning/job_list_events_params.rb | 4 +- .../models/fine_tuning/job_list_params.rb | 4 +- .../models/fine_tuning/job_retrieve_params.rb | 4 +- .../jobs/checkpoint_list_params.rb | 4 +- .../models/image_create_variation_params.rb | 4 +- lib/openai/models/image_edit_params.rb | 4 +- lib/openai/models/image_generate_params.rb | 4 +- lib/openai/models/model_delete_params.rb | 4 +- lib/openai/models/model_list_params.rb | 4 +- lib/openai/models/model_retrieve_params.rb | 4 +- lib/openai/models/moderation_create_params.rb | 4 +- .../responses/input_item_list_params.rb | 4 +- .../responses/response_create_params.rb | 4 +- .../responses/response_delete_params.rb | 4 +- .../responses/response_retrieve_params.rb | 4 +- lib/openai/models/upload_cancel_params.rb | 4 +- lib/openai/models/upload_complete_params.rb | 4 +- lib/openai/models/upload_create_params.rb | 4 +- .../models/uploads/part_create_params.rb | 4 +- .../models/vector_store_create_params.rb | 4 +- .../models/vector_store_delete_params.rb | 4 +- lib/openai/models/vector_store_list_params.rb | 4 +- .../models/vector_store_retrieve_params.rb | 4 +- .../models/vector_store_search_params.rb | 4 +- .../models/vector_store_update_params.rb | 4 +- .../vector_stores/file_batch_cancel_params.rb | 4 +- .../vector_stores/file_batch_create_params.rb | 4 +- .../file_batch_list_files_params.rb | 4 +- .../file_batch_retrieve_params.rb | 4 +- .../vector_stores/file_content_params.rb | 4 +- .../vector_stores/file_create_params.rb | 4 +- .../vector_stores/file_delete_params.rb | 4 +- .../models/vector_stores/file_list_params.rb | 4 +- .../vector_stores/file_retrieve_params.rb | 4 +- .../vector_stores/file_update_params.rb | 4 +- lib/openai/page.rb | 86 --- lib/openai/resources/audio/transcriptions.rb | 4 +- lib/openai/resources/batches.rb | 4 +- lib/openai/resources/beta/assistants.rb | 4 +- lib/openai/resources/beta/threads.rb | 4 +- lib/openai/resources/beta/threads/messages.rb | 4 +- lib/openai/resources/beta/threads/runs.rb | 12 +- .../resources/beta/threads/runs/steps.rb | 4 +- lib/openai/resources/chat/completions.rb | 8 +- .../resources/chat/completions/messages.rb | 4 +- lib/openai/resources/completions.rb | 4 +- lib/openai/resources/files.rb | 4 +- lib/openai/resources/fine_tuning/jobs.rb | 8 +- .../resources/fine_tuning/jobs/checkpoints.rb | 4 +- lib/openai/resources/models.rb | 4 +- lib/openai/resources/responses.rb | 4 +- lib/openai/resources/responses/input_items.rb | 4 +- lib/openai/resources/vector_stores.rb | 8 +- .../resources/vector_stores/file_batches.rb | 4 +- lib/openai/resources/vector_stores/files.rb | 8 +- lib/openai/stream.rb | 67 -- lib/openai/transport/base_client.rb | 459 ----------- lib/openai/transport/pooled_net_requester.rb | 182 ----- lib/openai/type.rb | 23 - lib/openai/type/array_of.rb | 112 --- lib/openai/type/base_model.rb | 367 --------- lib/openai/type/base_page.rb | 61 -- lib/openai/type/base_stream.rb | 63 -- lib/openai/type/boolean_model.rb | 52 -- lib/openai/type/converter.rb | 217 ------ lib/openai/type/enum.rb | 105 --- lib/openai/type/hash_of.rb | 132 ---- lib/openai/type/request_parameters.rb | 38 - lib/openai/type/union.rb | 210 ----- lib/openai/type/unknown.rb | 56 -- lib/openai/util.rb | 722 ----------------- rbi/lib/openai/aliases.rbi | 19 + rbi/lib/openai/client.rbi | 2 +- rbi/lib/openai/cursor_page.rbi | 19 - rbi/lib/openai/internal/cursor_page.rbi | 21 + rbi/lib/openai/internal/page.rbi | 21 + rbi/lib/openai/internal/stream.rbi | 17 + .../openai/internal/transport/base_client.rbi | 210 +++++ .../transport/pooled_net_requester.rbi | 66 ++ rbi/lib/openai/internal/type/array_of.rbi | 88 +++ rbi/lib/openai/internal/type/base_model.rbi | 206 +++++ rbi/lib/openai/internal/type/base_page.rbi | 40 + rbi/lib/openai/internal/type/base_stream.rbi | 45 ++ .../openai/internal/type/boolean_model.rbi | 43 ++ rbi/lib/openai/internal/type/converter.rbi | 108 +++ rbi/lib/openai/internal/type/enum.rbi | 60 ++ rbi/lib/openai/internal/type/hash_of.rbi | 87 +++ .../internal/type/request_parameters.rbi | 22 + rbi/lib/openai/internal/type/union.rbi | 75 ++ rbi/lib/openai/internal/type/unknown.rbi | 42 + rbi/lib/openai/internal/util.rbi | 291 +++++++ .../models/audio/speech_create_params.rbi | 6 +- rbi/lib/openai/models/audio/transcription.rbi | 6 +- .../audio/transcription_create_params.rbi | 6 +- .../audio/transcription_text_delta_event.rbi | 4 +- .../audio/transcription_text_done_event.rbi | 4 +- .../models/audio/transcription_verbose.rbi | 16 +- .../audio/translation_create_params.rbi | 6 +- .../models/audio/translation_verbose.rbi | 9 +- rbi/lib/openai/models/batch.rbi | 12 +- rbi/lib/openai/models/batch_cancel_params.rbi | 12 +- rbi/lib/openai/models/batch_create_params.rbi | 6 +- rbi/lib/openai/models/batch_list_params.rbi | 6 +- .../openai/models/batch_retrieve_params.rbi | 12 +- rbi/lib/openai/models/beta/assistant.rbi | 16 +- .../models/beta/assistant_create_params.rbi | 52 +- .../models/beta/assistant_delete_params.rbi | 12 +- .../models/beta/assistant_list_params.rbi | 6 +- .../models/beta/assistant_retrieve_params.rbi | 12 +- .../models/beta/assistant_stream_event.rbi | 131 ++-- .../models/beta/assistant_tool_choice.rbi | 9 +- .../models/beta/assistant_update_params.rbi | 40 +- .../openai/models/beta/file_search_tool.rbi | 13 +- rbi/lib/openai/models/beta/function_tool.rbi | 4 +- .../models/beta/message_stream_event.rbi | 32 +- .../models/beta/run_step_stream_event.rbi | 51 +- .../openai/models/beta/run_stream_event.rbi | 40 +- rbi/lib/openai/models/beta/thread.rbi | 14 +- .../beta/thread_create_and_run_params.rbi | 96 ++- .../models/beta/thread_create_params.rbi | 56 +- .../models/beta/thread_delete_params.rbi | 12 +- .../models/beta/thread_retrieve_params.rbi | 12 +- .../models/beta/thread_stream_event.rbi | 4 +- .../models/beta/thread_update_params.rbi | 30 +- .../beta/threads/file_citation_annotation.rbi | 10 +- .../file_citation_delta_annotation.rbi | 10 +- .../beta/threads/file_path_annotation.rbi | 4 +- .../threads/file_path_delta_annotation.rbi | 4 +- .../beta/threads/image_file_content_block.rbi | 7 +- .../beta/threads/image_file_delta_block.rbi | 7 +- .../beta/threads/image_url_content_block.rbi | 7 +- .../beta/threads/image_url_delta_block.rbi | 7 +- .../openai/models/beta/threads/message.rbi | 18 +- .../beta/threads/message_create_params.rbi | 16 +- .../beta/threads/message_delete_params.rbi | 12 +- .../models/beta/threads/message_delta.rbi | 4 +- .../beta/threads/message_delta_event.rbi | 4 +- .../beta/threads/message_list_params.rbi | 6 +- .../beta/threads/message_retrieve_params.rbi | 12 +- .../beta/threads/message_update_params.rbi | 6 +- .../required_action_function_tool_call.rbi | 10 +- rbi/lib/openai/models/beta/threads/run.rbi | 45 +- .../models/beta/threads/run_cancel_params.rbi | 12 +- .../models/beta/threads/run_create_params.rbi | 28 +- .../models/beta/threads/run_list_params.rbi | 6 +- .../beta/threads/run_retrieve_params.rbi | 12 +- .../run_submit_tool_outputs_params.rbi | 13 +- .../models/beta/threads/run_update_params.rbi | 6 +- .../runs/code_interpreter_output_image.rbi | 10 +- .../runs/code_interpreter_tool_call.rbi | 10 +- .../runs/code_interpreter_tool_call_delta.rbi | 8 +- .../threads/runs/file_search_tool_call.rbi | 32 +- .../beta/threads/runs/function_tool_call.rbi | 4 +- .../threads/runs/function_tool_call_delta.rbi | 10 +- .../runs/message_creation_step_details.rbi | 4 +- .../models/beta/threads/runs/run_step.rbi | 10 +- .../beta/threads/runs/run_step_delta.rbi | 4 +- .../threads/runs/run_step_delta_event.rbi | 7 +- .../runs/run_step_delta_message_delta.rbi | 4 +- .../beta/threads/runs/step_list_params.rbi | 6 +- .../threads/runs/step_retrieve_params.rbi | 6 +- .../threads/runs/tool_call_delta_object.rbi | 4 +- .../threads/runs/tool_calls_step_details.rbi | 2 +- rbi/lib/openai/models/beta/threads/text.rbi | 2 +- .../beta/threads/text_content_block.rbi | 4 +- .../openai/models/beta/threads/text_delta.rbi | 4 +- .../models/beta/threads/text_delta_block.rbi | 4 +- .../openai/models/chat/chat_completion.rbi | 22 +- ...hat_completion_assistant_message_param.rbi | 26 +- .../models/chat/chat_completion_chunk.rbi | 55 +- .../chat/chat_completion_content_part.rbi | 6 +- .../chat_completion_content_part_image.rbi | 4 +- ...at_completion_content_part_input_audio.rbi | 10 +- ...hat_completion_developer_message_param.rbi | 10 +- .../models/chat/chat_completion_message.rbi | 31 +- .../chat_completion_message_tool_call.rbi | 4 +- .../chat_completion_named_tool_choice.rbi | 4 +- .../chat_completion_prediction_content.rbi | 10 +- .../chat_completion_system_message_param.rbi | 10 +- .../chat/chat_completion_token_logprob.rbi | 2 +- .../models/chat/chat_completion_tool.rbi | 4 +- .../chat_completion_tool_message_param.rbi | 10 +- .../chat_completion_user_message_param.rbi | 7 +- .../models/chat/completion_create_params.rbi | 68 +- .../models/chat/completion_delete_params.rbi | 12 +- .../models/chat/completion_list_params.rbi | 6 +- .../chat/completion_retrieve_params.rbi | 12 +- .../models/chat/completion_update_params.rbi | 6 +- .../chat/completions/message_list_params.rbi | 6 +- rbi/lib/openai/models/completion.rbi | 6 +- rbi/lib/openai/models/completion_choice.rbi | 9 +- .../models/completion_create_params.rbi | 21 +- rbi/lib/openai/models/completion_usage.rbi | 8 +- rbi/lib/openai/models/compound_filter.rbi | 2 +- .../models/create_embedding_response.rbi | 6 +- .../openai/models/embedding_create_params.rbi | 15 +- rbi/lib/openai/models/file_content_params.rbi | 12 +- rbi/lib/openai/models/file_create_params.rbi | 6 +- rbi/lib/openai/models/file_delete_params.rbi | 12 +- rbi/lib/openai/models/file_list_params.rbi | 6 +- .../openai/models/file_retrieve_params.rbi | 12 +- .../models/fine_tuning/fine_tuning_job.rbi | 46 +- ...ne_tuning_job_wandb_integration_object.rbi | 9 +- .../models/fine_tuning/job_cancel_params.rbi | 12 +- .../models/fine_tuning/job_create_params.rbi | 52 +- .../fine_tuning/job_list_events_params.rbi | 6 +- .../models/fine_tuning/job_list_params.rbi | 6 +- .../fine_tuning/job_retrieve_params.rbi | 12 +- .../jobs/checkpoint_list_params.rbi | 6 +- .../jobs/fine_tuning_job_checkpoint.rbi | 4 +- rbi/lib/openai/models/function_parameters.rbi | 2 +- .../models/image_create_variation_params.rbi | 6 +- rbi/lib/openai/models/image_edit_params.rbi | 6 +- .../openai/models/image_generate_params.rbi | 6 +- rbi/lib/openai/models/images_response.rbi | 2 +- rbi/lib/openai/models/metadata.rbi | 2 +- rbi/lib/openai/models/model_delete_params.rbi | 12 +- rbi/lib/openai/models/model_list_params.rbi | 12 +- .../openai/models/model_retrieve_params.rbi | 12 +- rbi/lib/openai/models/moderation.rbi | 17 +- .../models/moderation_create_params.rbi | 21 +- .../models/moderation_create_response.rbi | 2 +- .../models/moderation_image_url_input.rbi | 9 +- .../models/response_format_json_schema.rbi | 6 +- .../models/responses/easy_input_message.rbi | 2 +- .../models/responses/file_search_tool.rbi | 8 +- .../responses/input_item_list_params.rbi | 6 +- rbi/lib/openai/models/responses/response.rbi | 29 +- ..._code_interpreter_call_completed_event.rbi | 4 +- ...ode_interpreter_call_in_progress_event.rbi | 4 +- ...de_interpreter_call_interpreting_event.rbi | 4 +- .../response_code_interpreter_tool_call.rbi | 4 +- .../responses/response_completed_event.rbi | 7 +- .../responses/response_computer_tool_call.rbi | 16 +- ...esponse_computer_tool_call_output_item.rbi | 14 +- .../response_content_part_added_event.rbi | 2 +- .../response_content_part_done_event.rbi | 2 +- .../responses/response_create_params.rbi | 24 +- .../responses/response_created_event.rbi | 7 +- .../responses/response_delete_params.rbi | 12 +- .../responses/response_failed_event.rbi | 7 +- .../response_file_search_tool_call.rbi | 2 +- .../responses/response_in_progress_event.rbi | 7 +- .../responses/response_incomplete_event.rbi | 7 +- .../models/responses/response_input.rbi | 5 +- .../models/responses/response_input_item.rbi | 16 +- .../response_input_message_content_list.rbi | 5 +- .../responses/response_input_message_item.rbi | 2 +- .../models/responses/response_item_list.rbi | 2 +- .../response_output_item_added_event.rbi | 2 +- .../response_output_item_done_event.rbi | 2 +- .../responses/response_output_message.rbi | 2 +- .../models/responses/response_output_text.rbi | 2 +- .../responses/response_reasoning_item.rbi | 2 +- .../responses/response_retrieve_params.rbi | 6 +- .../response_text_annotation_delta_event.rbi | 2 +- .../models/responses/response_text_config.rbi | 4 +- .../models/responses/response_usage.rbi | 8 +- .../models/responses/web_search_tool.rbi | 4 +- .../static_file_chunking_strategy_object.rbi | 7 +- ...ic_file_chunking_strategy_object_param.rbi | 7 +- rbi/lib/openai/models/upload.rbi | 4 +- .../openai/models/upload_cancel_params.rbi | 12 +- .../openai/models/upload_complete_params.rbi | 6 +- .../openai/models/upload_create_params.rbi | 6 +- .../models/uploads/part_create_params.rbi | 6 +- rbi/lib/openai/models/vector_store.rbi | 11 +- .../models/vector_store_create_params.rbi | 16 +- .../models/vector_store_delete_params.rbi | 12 +- .../models/vector_store_list_params.rbi | 6 +- .../models/vector_store_retrieve_params.rbi | 12 +- .../models/vector_store_search_params.rbi | 16 +- .../models/vector_store_search_response.rbi | 2 +- .../models/vector_store_update_params.rbi | 10 +- .../file_batch_cancel_params.rbi | 6 +- .../file_batch_create_params.rbi | 10 +- .../file_batch_list_files_params.rbi | 6 +- .../file_batch_retrieve_params.rbi | 6 +- .../vector_stores/file_content_params.rbi | 6 +- .../vector_stores/file_create_params.rbi | 10 +- .../vector_stores/file_delete_params.rbi | 6 +- .../models/vector_stores/file_list_params.rbi | 6 +- .../vector_stores/file_retrieve_params.rbi | 6 +- .../vector_stores/file_update_params.rbi | 6 +- .../vector_stores/vector_store_file.rbi | 12 +- .../vector_stores/vector_store_file_batch.rbi | 4 +- rbi/lib/openai/page.rbi | 19 - rbi/lib/openai/request_options.rbi | 2 +- rbi/lib/openai/resources/audio/speech.rbi | 2 +- .../openai/resources/audio/transcriptions.rbi | 6 +- .../openai/resources/audio/translations.rbi | 2 +- rbi/lib/openai/resources/batches.rbi | 20 +- rbi/lib/openai/resources/beta/assistants.rbi | 28 +- rbi/lib/openai/resources/beta/threads.rbi | 48 +- .../resources/beta/threads/messages.rbi | 16 +- .../openai/resources/beta/threads/runs.rbi | 56 +- .../resources/beta/threads/runs/steps.rbi | 6 +- rbi/lib/openai/resources/chat/completions.rbi | 56 +- .../resources/chat/completions/messages.rbi | 4 +- rbi/lib/openai/resources/completions.rbi | 10 +- rbi/lib/openai/resources/embeddings.rbi | 2 +- rbi/lib/openai/resources/files.rbi | 27 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 20 +- .../fine_tuning/jobs/checkpoints.rbi | 4 +- rbi/lib/openai/resources/images.rbi | 6 +- rbi/lib/openai/resources/models.rbi | 18 +- rbi/lib/openai/resources/moderations.rbi | 10 +- rbi/lib/openai/resources/responses.rbi | 30 +- .../resources/responses/input_items.rbi | 4 +- rbi/lib/openai/resources/uploads.rbi | 6 +- rbi/lib/openai/resources/uploads/parts.rbi | 2 +- rbi/lib/openai/resources/vector_stores.rbi | 26 +- .../resources/vector_stores/file_batches.rbi | 12 +- .../openai/resources/vector_stores/files.rbi | 18 +- rbi/lib/openai/stream.rbi | 15 - rbi/lib/openai/transport/base_client.rbi | 205 ----- .../openai/transport/pooled_net_requester.rbi | 64 -- rbi/lib/openai/type.rbi | 23 - rbi/lib/openai/type/array_of.rbi | 82 -- rbi/lib/openai/type/base_model.rbi | 197 ----- rbi/lib/openai/type/base_page.rbi | 38 - rbi/lib/openai/type/base_stream.rbi | 43 -- rbi/lib/openai/type/boolean_model.rbi | 41 - rbi/lib/openai/type/converter.rbi | 99 --- rbi/lib/openai/type/enum.rbi | 58 -- rbi/lib/openai/type/hash_of.rbi | 86 --- rbi/lib/openai/type/request_parameters.rbi | 20 - rbi/lib/openai/type/union.rbi | 66 -- rbi/lib/openai/type/unknown.rbi | 37 - rbi/lib/openai/util.rbi | 280 ------- sig/openai/aliases.rbs | 19 + sig/openai/client.rbs | 2 +- sig/openai/cursor_page.rbs | 11 - sig/openai/internal/cursor_page.rbs | 13 + sig/openai/internal/page.rbs | 13 + sig/openai/internal/stream.rbs | 9 + sig/openai/internal/transport/base_client.rbs | 114 +++ .../transport/pooled_net_requester.rbs | 41 + sig/openai/internal/type/array_of.rbs | 38 + sig/openai/internal/type/base_model.rbs | 79 ++ sig/openai/internal/type/base_page.rbs | 24 + sig/openai/internal/type/base_stream.rbs | 27 + sig/openai/internal/type/boolean_model.rbs | 20 + sig/openai/internal/type/converter.rbs | 44 ++ sig/openai/internal/type/enum.rbs | 24 + sig/openai/internal/type/hash_of.rbs | 38 + .../internal/type/request_parameters.rbs | 15 + sig/openai/internal/type/union.rbs | 42 + sig/openai/internal/type/unknown.rbs | 20 + sig/openai/internal/util.rbs | 141 ++++ .../models/audio/speech_create_params.rbs | 6 +- .../audio/transcription_create_params.rbs | 6 +- .../audio/translation_create_params.rbs | 6 +- sig/openai/models/batch_cancel_params.rbs | 6 +- sig/openai/models/batch_create_params.rbs | 6 +- sig/openai/models/batch_list_params.rbs | 7 +- sig/openai/models/batch_retrieve_params.rbs | 7 +- .../models/beta/assistant_create_params.rbs | 6 +- .../models/beta/assistant_delete_params.rbs | 7 +- .../models/beta/assistant_list_params.rbs | 6 +- .../models/beta/assistant_retrieve_params.rbs | 7 +- .../models/beta/assistant_update_params.rbs | 6 +- .../beta/thread_create_and_run_params.rbs | 6 +- .../models/beta/thread_create_params.rbs | 6 +- .../models/beta/thread_delete_params.rbs | 7 +- .../models/beta/thread_retrieve_params.rbs | 7 +- .../models/beta/thread_update_params.rbs | 6 +- .../beta/threads/message_create_params.rbs | 6 +- .../beta/threads/message_delete_params.rbs | 6 +- .../beta/threads/message_list_params.rbs | 6 +- .../beta/threads/message_retrieve_params.rbs | 6 +- .../beta/threads/message_update_params.rbs | 6 +- .../models/beta/threads/run_cancel_params.rbs | 6 +- .../models/beta/threads/run_create_params.rbs | 6 +- .../models/beta/threads/run_list_params.rbs | 6 +- .../beta/threads/run_retrieve_params.rbs | 6 +- .../run_submit_tool_outputs_params.rbs | 6 +- .../models/beta/threads/run_update_params.rbs | 6 +- .../beta/threads/runs/step_list_params.rbs | 6 +- .../threads/runs/step_retrieve_params.rbs | 6 +- .../models/chat/completion_create_params.rbs | 6 +- .../models/chat/completion_delete_params.rbs | 7 +- .../models/chat/completion_list_params.rbs | 6 +- .../chat/completion_retrieve_params.rbs | 7 +- .../models/chat/completion_update_params.rbs | 7 +- .../chat/completions/message_list_params.rbs | 6 +- .../models/completion_create_params.rbs | 6 +- sig/openai/models/embedding_create_params.rbs | 6 +- sig/openai/models/file_content_params.rbs | 6 +- sig/openai/models/file_create_params.rbs | 6 +- sig/openai/models/file_delete_params.rbs | 6 +- sig/openai/models/file_list_params.rbs | 6 +- sig/openai/models/file_retrieve_params.rbs | 7 +- .../models/fine_tuning/job_cancel_params.rbs | 6 +- .../models/fine_tuning/job_create_params.rbs | 6 +- .../fine_tuning/job_list_events_params.rbs | 7 +- .../models/fine_tuning/job_list_params.rbs | 6 +- .../fine_tuning/job_retrieve_params.rbs | 7 +- .../jobs/checkpoint_list_params.rbs | 7 +- .../models/image_create_variation_params.rbs | 6 +- sig/openai/models/image_edit_params.rbs | 6 +- sig/openai/models/image_generate_params.rbs | 6 +- sig/openai/models/model_delete_params.rbs | 6 +- sig/openai/models/model_list_params.rbs | 6 +- sig/openai/models/model_retrieve_params.rbs | 7 +- .../models/moderation_create_params.rbs | 6 +- .../responses/input_item_list_params.rbs | 6 +- .../responses/response_create_params.rbs | 6 +- .../responses/response_delete_params.rbs | 7 +- .../responses/response_retrieve_params.rbs | 6 +- sig/openai/models/upload_cancel_params.rbs | 7 +- sig/openai/models/upload_complete_params.rbs | 7 +- sig/openai/models/upload_create_params.rbs | 6 +- .../models/uploads/part_create_params.rbs | 6 +- .../models/vector_store_create_params.rbs | 6 +- .../models/vector_store_delete_params.rbs | 7 +- .../models/vector_store_list_params.rbs | 6 +- .../models/vector_store_retrieve_params.rbs | 7 +- .../models/vector_store_search_params.rbs | 6 +- .../models/vector_store_update_params.rbs | 6 +- .../file_batch_cancel_params.rbs | 6 +- .../file_batch_create_params.rbs | 6 +- .../file_batch_list_files_params.rbs | 6 +- .../file_batch_retrieve_params.rbs | 6 +- .../vector_stores/file_content_params.rbs | 6 +- .../vector_stores/file_create_params.rbs | 6 +- .../vector_stores/file_delete_params.rbs | 6 +- .../models/vector_stores/file_list_params.rbs | 6 +- .../vector_stores/file_retrieve_params.rbs | 6 +- .../vector_stores/file_update_params.rbs | 6 +- sig/openai/page.rbs | 11 - sig/openai/resources/audio/transcriptions.rbs | 2 +- sig/openai/resources/batches.rbs | 2 +- sig/openai/resources/beta/assistants.rbs | 2 +- sig/openai/resources/beta/threads.rbs | 2 +- .../resources/beta/threads/messages.rbs | 2 +- sig/openai/resources/beta/threads/runs.rbs | 6 +- .../resources/beta/threads/runs/steps.rbs | 2 +- sig/openai/resources/chat/completions.rbs | 4 +- .../resources/chat/completions/messages.rbs | 2 +- sig/openai/resources/completions.rbs | 2 +- sig/openai/resources/files.rbs | 2 +- sig/openai/resources/fine_tuning/jobs.rbs | 4 +- .../fine_tuning/jobs/checkpoints.rbs | 2 +- sig/openai/resources/models.rbs | 2 +- sig/openai/resources/responses.rbs | 2 +- .../resources/responses/input_items.rbs | 2 +- sig/openai/resources/vector_stores.rbs | 4 +- .../resources/vector_stores/file_batches.rbs | 2 +- sig/openai/resources/vector_stores/files.rbs | 4 +- sig/openai/stream.rbs | 7 - sig/openai/transport/base_client.rbs | 110 --- sig/openai/transport/pooled_net_requester.rbs | 39 - sig/openai/type.rbs | 22 - sig/openai/type/array_of.rbs | 36 - sig/openai/type/base_model.rbs | 77 -- sig/openai/type/base_page.rbs | 22 - sig/openai/type/base_stream.rbs | 25 - sig/openai/type/boolean_model.rbs | 18 - sig/openai/type/converter.rbs | 36 - sig/openai/type/enum.rbs | 22 - sig/openai/type/hash_of.rbs | 36 - sig/openai/type/request_parameters.rbs | 13 - sig/openai/type/union.rbs | 37 - sig/openai/type/unknown.rbs | 15 - sig/openai/util.rbs | 132 ---- .../{ => internal/type}/base_model_test.rb | 22 +- test/openai/{ => internal}/util_test.rb | 110 +-- test/openai/resources/batches_test.rb | 2 +- test/openai/resources/beta/assistants_test.rb | 2 +- .../resources/beta/threads/messages_test.rb | 2 +- .../resources/beta/threads/runs/steps_test.rb | 2 +- .../resources/beta/threads/runs_test.rb | 2 +- .../chat/completions/messages_test.rb | 2 +- .../openai/resources/chat/completions_test.rb | 2 +- test/openai/resources/files_test.rb | 2 +- .../fine_tuning/jobs/checkpoints_test.rb | 2 +- .../openai/resources/fine_tuning/jobs_test.rb | 4 +- test/openai/resources/models_test.rb | 2 +- .../resources/responses/input_items_test.rb | 2 +- .../vector_stores/file_batches_test.rb | 2 +- .../resources/vector_stores/files_test.rb | 4 +- test/openai/resources/vector_stores_test.rb | 4 +- 550 files changed, 7458 insertions(+), 6849 deletions(-) create mode 100644 lib/openai/aliases.rb delete mode 100644 lib/openai/cursor_page.rb create mode 100644 lib/openai/internal/cursor_page.rb create mode 100644 lib/openai/internal/page.rb create mode 100644 lib/openai/internal/stream.rb create mode 100644 lib/openai/internal/transport/base_client.rb create mode 100644 lib/openai/internal/transport/pooled_net_requester.rb create mode 100644 lib/openai/internal/type/array_of.rb create mode 100644 lib/openai/internal/type/base_model.rb create mode 100644 lib/openai/internal/type/base_page.rb create mode 100644 lib/openai/internal/type/base_stream.rb create mode 100644 lib/openai/internal/type/boolean_model.rb create mode 100644 lib/openai/internal/type/converter.rb create mode 100644 lib/openai/internal/type/enum.rb create mode 100644 lib/openai/internal/type/hash_of.rb create mode 100644 lib/openai/internal/type/request_parameters.rb create mode 100644 lib/openai/internal/type/union.rb create mode 100644 lib/openai/internal/type/unknown.rb create mode 100644 lib/openai/internal/util.rb delete mode 100644 lib/openai/page.rb delete mode 100644 lib/openai/stream.rb delete mode 100644 lib/openai/transport/base_client.rb delete mode 100644 lib/openai/transport/pooled_net_requester.rb delete mode 100644 lib/openai/type.rb delete mode 100644 lib/openai/type/array_of.rb delete mode 100644 lib/openai/type/base_model.rb delete mode 100644 lib/openai/type/base_page.rb delete mode 100644 lib/openai/type/base_stream.rb delete mode 100644 lib/openai/type/boolean_model.rb delete mode 100644 lib/openai/type/converter.rb delete mode 100644 lib/openai/type/enum.rb delete mode 100644 lib/openai/type/hash_of.rb delete mode 100644 lib/openai/type/request_parameters.rb delete mode 100644 lib/openai/type/union.rb delete mode 100644 lib/openai/type/unknown.rb delete mode 100644 lib/openai/util.rb create mode 100644 rbi/lib/openai/aliases.rbi delete mode 100644 rbi/lib/openai/cursor_page.rbi create mode 100644 rbi/lib/openai/internal/cursor_page.rbi create mode 100644 rbi/lib/openai/internal/page.rbi create mode 100644 rbi/lib/openai/internal/stream.rbi create mode 100644 rbi/lib/openai/internal/transport/base_client.rbi create mode 100644 rbi/lib/openai/internal/transport/pooled_net_requester.rbi create mode 100644 rbi/lib/openai/internal/type/array_of.rbi create mode 100644 rbi/lib/openai/internal/type/base_model.rbi create mode 100644 rbi/lib/openai/internal/type/base_page.rbi create mode 100644 rbi/lib/openai/internal/type/base_stream.rbi create mode 100644 rbi/lib/openai/internal/type/boolean_model.rbi create mode 100644 rbi/lib/openai/internal/type/converter.rbi create mode 100644 rbi/lib/openai/internal/type/enum.rbi create mode 100644 rbi/lib/openai/internal/type/hash_of.rbi create mode 100644 rbi/lib/openai/internal/type/request_parameters.rbi create mode 100644 rbi/lib/openai/internal/type/union.rbi create mode 100644 rbi/lib/openai/internal/type/unknown.rbi create mode 100644 rbi/lib/openai/internal/util.rbi delete mode 100644 rbi/lib/openai/page.rbi delete mode 100644 rbi/lib/openai/stream.rbi delete mode 100644 rbi/lib/openai/transport/base_client.rbi delete mode 100644 rbi/lib/openai/transport/pooled_net_requester.rbi delete mode 100644 rbi/lib/openai/type.rbi delete mode 100644 rbi/lib/openai/type/array_of.rbi delete mode 100644 rbi/lib/openai/type/base_model.rbi delete mode 100644 rbi/lib/openai/type/base_page.rbi delete mode 100644 rbi/lib/openai/type/base_stream.rbi delete mode 100644 rbi/lib/openai/type/boolean_model.rbi delete mode 100644 rbi/lib/openai/type/converter.rbi delete mode 100644 rbi/lib/openai/type/enum.rbi delete mode 100644 rbi/lib/openai/type/hash_of.rbi delete mode 100644 rbi/lib/openai/type/request_parameters.rbi delete mode 100644 rbi/lib/openai/type/union.rbi delete mode 100644 rbi/lib/openai/type/unknown.rbi delete mode 100644 rbi/lib/openai/util.rbi create mode 100644 sig/openai/aliases.rbs delete mode 100644 sig/openai/cursor_page.rbs create mode 100644 sig/openai/internal/cursor_page.rbs create mode 100644 sig/openai/internal/page.rbs create mode 100644 sig/openai/internal/stream.rbs create mode 100644 sig/openai/internal/transport/base_client.rbs create mode 100644 sig/openai/internal/transport/pooled_net_requester.rbs create mode 100644 sig/openai/internal/type/array_of.rbs create mode 100644 sig/openai/internal/type/base_model.rbs create mode 100644 sig/openai/internal/type/base_page.rbs create mode 100644 sig/openai/internal/type/base_stream.rbs create mode 100644 sig/openai/internal/type/boolean_model.rbs create mode 100644 sig/openai/internal/type/converter.rbs create mode 100644 sig/openai/internal/type/enum.rbs create mode 100644 sig/openai/internal/type/hash_of.rbs create mode 100644 sig/openai/internal/type/request_parameters.rbs create mode 100644 sig/openai/internal/type/union.rbs create mode 100644 sig/openai/internal/type/unknown.rbs create mode 100644 sig/openai/internal/util.rbs delete mode 100644 sig/openai/page.rbs delete mode 100644 sig/openai/stream.rbs delete mode 100644 sig/openai/transport/base_client.rbs delete mode 100644 sig/openai/transport/pooled_net_requester.rbs delete mode 100644 sig/openai/type.rbs delete mode 100644 sig/openai/type/array_of.rbs delete mode 100644 sig/openai/type/base_model.rbs delete mode 100644 sig/openai/type/base_page.rbs delete mode 100644 sig/openai/type/base_stream.rbs delete mode 100644 sig/openai/type/boolean_model.rbs delete mode 100644 sig/openai/type/converter.rbs delete mode 100644 sig/openai/type/enum.rbs delete mode 100644 sig/openai/type/hash_of.rbs delete mode 100644 sig/openai/type/request_parameters.rbs delete mode 100644 sig/openai/type/union.rbs delete mode 100644 sig/openai/type/unknown.rbs delete mode 100644 sig/openai/util.rbs rename test/openai/{ => internal/type}/base_model_test.rb (94%) rename test/openai/{ => internal}/util_test.rb (75%) diff --git a/lib/openai.rb b/lib/openai.rb index a2330683..8e87e568 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -36,27 +36,27 @@ # Package files. require_relative "openai/version" -require_relative "openai/util" -require_relative "openai/type/converter" -require_relative "openai/type/unknown" -require_relative "openai/type/boolean_model" -require_relative "openai/type/enum" -require_relative "openai/type/union" -require_relative "openai/type/array_of" -require_relative "openai/type/hash_of" -require_relative "openai/type/base_model" -require_relative "openai/type/base_page" -require_relative "openai/type/base_stream" -require_relative "openai/type/request_parameters" -require_relative "openai/type" +require_relative "openai/internal/util" +require_relative "openai/internal/type/converter" +require_relative "openai/internal/type/unknown" +require_relative "openai/internal/type/boolean_model" +require_relative "openai/internal/type/enum" +require_relative "openai/internal/type/union" +require_relative "openai/internal/type/array_of" +require_relative "openai/internal/type/hash_of" +require_relative "openai/internal/type/base_model" +require_relative "openai/internal/type/base_page" +require_relative "openai/internal/type/base_stream" +require_relative "openai/internal/type/request_parameters" +require_relative "openai/aliases" require_relative "openai/request_options" require_relative "openai/errors" -require_relative "openai/transport/base_client" -require_relative "openai/transport/pooled_net_requester" +require_relative "openai/internal/transport/base_client" +require_relative "openai/internal/transport/pooled_net_requester" require_relative "openai/client" -require_relative "openai/stream" -require_relative "openai/cursor_page" -require_relative "openai/page" +require_relative "openai/internal/stream" +require_relative "openai/internal/cursor_page" +require_relative "openai/internal/page" require_relative "openai/models/reasoning_effort" require_relative "openai/models/chat/chat_completion_message" require_relative "openai/models/fine_tuning/fine_tuning_job_wandb_integration_object" diff --git a/lib/openai/aliases.rb b/lib/openai/aliases.rb new file mode 100644 index 00000000..da013a3e --- /dev/null +++ b/lib/openai/aliases.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module OpenAI + Unknown = OpenAI::Internal::Type::Unknown + + BooleanModel = OpenAI::Internal::Type::BooleanModel + + Enum = OpenAI::Internal::Type::Enum + + Union = OpenAI::Internal::Type::Union + + ArrayOf = OpenAI::Internal::Type::ArrayOf + + HashOf = OpenAI::Internal::Type::HashOf + + BaseModel = OpenAI::Internal::Type::BaseModel + + RequestParameters = OpenAI::Internal::Type::RequestParameters +end diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 2bfddd02..5781780a 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true module OpenAI - class Client < OpenAI::Transport::BaseClient + class Client < OpenAI::Internal::Transport::BaseClient # Default max number of retries to attempt after a failed retryable request. DEFAULT_MAX_RETRIES = 2 diff --git a/lib/openai/cursor_page.rb b/lib/openai/cursor_page.rb deleted file mode 100644 index 640aa03b..00000000 --- a/lib/openai/cursor_page.rb +++ /dev/null @@ -1,92 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @example - # if cursor_page.has_next? - # cursor_page = cursor_page.next_page - # end - # - # @example - # cursor_page.auto_paging_each do |completion| - # puts(completion) - # end - # - # @example - # completions = - # cursor_page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # completions => Array - class CursorPage - include OpenAI::Type::BasePage - - # @return [Array, nil] - attr_accessor :data - - # @return [Boolean] - attr_accessor :has_more - - # @api private - # - # @param client [OpenAI::Transport::BaseClient] - # @param req [Hash{Symbol=>Object}] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param page_data [Hash{Symbol=>Object}] - def initialize(client:, req:, headers:, page_data:) - super - model = req.fetch(:model) - - case page_data - in {data: Array | nil => data} - @data = data&.map { OpenAI::Type::Converter.coerce(model, _1) } - else - end - - case page_data - in {has_more: true | false | nil => has_more} - @has_more = has_more - else - end - end - - # @return [Boolean] - def next_page? - has_more - end - - # @raise [OpenAI::HTTP::Error] - # @return [OpenAI::CursorPage] - def next_page - unless next_page? - message = "No more pages available. Please check #next_page? before calling ##{__method__}" - raise RuntimeError.new(message) - end - - req = OpenAI::Util.deep_merge(@req, {query: {after: data&.last&.id}}) - @client.request(req) - end - - # @param blk [Proc] - def auto_paging_each(&blk) - unless block_given? - raise ArgumentError.new("A block must be given to ##{__method__}") - end - page = self - loop do - page.data&.each { blk.call(_1) } - break unless page.next_page? - page = page.next_page - end - end - - # @return [String] - def inspect - "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} has_more=#{has_more.inspect}>" - end - end -end diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index bc67421d..4b5fc99f 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -178,10 +178,10 @@ def self.for(url:, status:, body:, request:, response:, message: nil) # @param response [nil] # @param message [String, nil] def initialize(url:, status:, body:, request:, response:, message: nil) - message ||= OpenAI::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} } - @code = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :code)) - @param = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :param)) - @type = OpenAI::Type::Converter.coerce(String, OpenAI::Util.dig(body, :type)) + message ||= OpenAI::Internal::Util.dig(body, :message) { {url: url.to_s, status: status, body: body} } + @code = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :code)) + @param = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :param)) + @type = OpenAI::Internal::Type::Converter.coerce(String, OpenAI::Internal::Util.dig(body, :type)) super( url: url, status: status, diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb new file mode 100644 index 00000000..ba3b1b74 --- /dev/null +++ b/lib/openai/internal/cursor_page.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + # @example + # if cursor_page.has_next? + # cursor_page = cursor_page.next_page + # end + # + # @example + # cursor_page.auto_paging_each do |completion| + # puts(completion) + # end + # + # @example + # completions = + # cursor_page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # completions => Array + class CursorPage + include OpenAI::Internal::Type::BasePage + + # @return [Array, nil] + attr_accessor :data + + # @return [Boolean] + attr_accessor :has_more + + # @api private + # + # @param client [OpenAI::Internal::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Hash{Symbol=>Object}] + def initialize(client:, req:, headers:, page_data:) + super + model = req.fetch(:model) + + case page_data + in {data: Array | nil => data} + @data = data&.map { OpenAI::Internal::Type::Converter.coerce(model, _1) } + else + end + + case page_data + in {has_more: true | false | nil => has_more} + @has_more = has_more + else + end + end + + # @return [Boolean] + def next_page? + has_more + end + + # @raise [OpenAI::HTTP::Error] + # @return [OpenAI::Internal::CursorPage] + def next_page + unless next_page? + message = "No more pages available. Please check #next_page? before calling ##{__method__}" + raise RuntimeError.new(message) + end + + req = OpenAI::Internal::Util.deep_merge(@req, {query: {after: data&.last&.id}}) + @client.request(req) + end + + # @param blk [Proc] + def auto_paging_each(&blk) + unless block_given? + raise ArgumentError.new("A block must be given to ##{__method__}") + end + page = self + loop do + page.data&.each { blk.call(_1) } + break unless page.next_page? + page = page.next_page + end + end + + # @return [String] + def inspect + "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} has_more=#{has_more.inspect}>" + end + end + end +end diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb new file mode 100644 index 00000000..867cd496 --- /dev/null +++ b/lib/openai/internal/page.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + # @example + # if page.has_next? + # page = page.next_page + # end + # + # @example + # page.auto_paging_each do |model| + # puts(model) + # end + # + # @example + # models = + # page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # models => Array + class Page + include OpenAI::Internal::Type::BasePage + + # @return [Array, nil] + attr_accessor :data + + # @return [String] + attr_accessor :object + + # @api private + # + # @param client [OpenAI::Internal::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Array] + def initialize(client:, req:, headers:, page_data:) + super + model = req.fetch(:model) + + case page_data + in {data: Array | nil => data} + @data = data&.map { OpenAI::Internal::Type::Converter.coerce(model, _1) } + else + end + + case page_data + in {object: String => object} + @object = object + else + end + end + + # @return [Boolean] + def next_page? + false + end + + # @raise [OpenAI::HTTP::Error] + # @return [OpenAI::Internal::Page] + def next_page + RuntimeError.new("No more pages available.") + end + + # @param blk [Proc] + def auto_paging_each(&blk) + unless block_given? + raise ArgumentError.new("A block must be given to ##{__method__}") + end + page = self + loop do + page.data&.each { blk.call(_1) } + break unless page.next_page? + page = page.next_page + end + end + + # @return [String] + def inspect + "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} object=#{object.inspect}>" + end + end + end +end diff --git a/lib/openai/internal/stream.rb b/lib/openai/internal/stream.rb new file mode 100644 index 00000000..3c9e0748 --- /dev/null +++ b/lib/openai/internal/stream.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + # @example + # stream.each do |event| + # puts(event) + # end + # + # @example + # events = + # stream + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # events => Array + class Stream + include OpenAI::Internal::Type::BaseStream + + # @api private + # + # @return [Enumerable] + private def iterator + # rubocop:disable Metrics/BlockLength + @iterator ||= OpenAI::Internal::Util.chain_fused(@stream) do |y| + consume = false + + @stream.each do |msg| + next if consume + + case msg + in { data: String => data } if data.start_with?("[DONE]") + consume = true + next + in { data: String => data } + case JSON.parse(data, symbolize_names: true) + in { error: error } + message = + case error + in String + error + in { message: String => m } + m + else + "An error occurred during streaming" + end + OpenAI::Errors::APIError.for( + url: @url, + status: @status, + body: body, + request: nil, + response: @response, + message: message + ) + in decoded + y << OpenAI::Internal::Type::Converter.coerce(@model, decoded) + end + else + end + end + end + # rubocop:enable Metrics/BlockLength + end + end + end +end diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb new file mode 100644 index 00000000..013e2d64 --- /dev/null +++ b/lib/openai/internal/transport/base_client.rb @@ -0,0 +1,461 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Transport + # @api private + # + # @abstract + class BaseClient + # from whatwg fetch spec + MAX_REDIRECTS = 20 + + # rubocop:disable Style/MutableConstant + PLATFORM_HEADERS = + { + "x-stainless-arch" => OpenAI::Internal::Util.arch, + "x-stainless-lang" => "ruby", + "x-stainless-os" => OpenAI::Internal::Util.os, + "x-stainless-package-version" => OpenAI::VERSION, + "x-stainless-runtime" => ::RUBY_ENGINE, + "x-stainless-runtime-version" => ::RUBY_ENGINE_VERSION + } + # rubocop:enable Style/MutableConstant + + class << self + # @api private + # + # @param req [Hash{Symbol=>Object}] + # + # @raise [ArgumentError] + def validate!(req) + keys = [:method, :path, :query, :headers, :body, :unwrap, :page, :stream, :model, :options] + case req + in Hash + req.each_key do |k| + unless keys.include?(k) + raise ArgumentError.new("Request `req` keys must be one of #{keys}, got #{k.inspect}") + end + end + else + raise ArgumentError.new("Request `req` must be a Hash or RequestOptions, got #{req.inspect}") + end + end + + # @api private + # + # @param status [Integer] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # + # @return [Boolean] + def should_retry?(status, headers:) + coerced = OpenAI::Internal::Util.coerce_boolean(headers["x-should-retry"]) + case [coerced, status] + in [true | false, _] + coerced + in [_, 408 | 409 | 429 | (500..)] + # retry on: + # 408: timeouts + # 409: locks + # 429: rate limits + # 500+: unknown errors + true + else + false + end + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @option request [Object] :body + # + # @option request [Integer] :max_retries + # + # @option request [Float] :timeout + # + # @param status [Integer] + # + # @param response_headers [Hash{String=>String}, Net::HTTPHeader] + # + # @return [Hash{Symbol=>Object}] + def follow_redirect(request, status:, response_headers:) + method, url, headers = request.fetch_values(:method, :url, :headers) + location = + Kernel.then do + URI.join(url, response_headers["location"]) + rescue ArgumentError + message = "Server responded with status #{status} but no valid location header." + raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) + end + + request = {**request, url: location} + + case [url.scheme, location.scheme] + in ["https", "http"] + message = "Tried to redirect to a insecure URL" + raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) + else + nil + end + + # from whatwg fetch spec + case [status, method] + in [301 | 302, :post] | [303, _] + drop = %w[content-encoding content-language content-length content-location content-type] + request = { + **request, + method: method == :head ? :head : :get, + headers: headers.except(*drop), + body: nil + } + else + end + + # from undici + if OpenAI::Internal::Util.uri_origin(url) != OpenAI::Internal::Util.uri_origin(location) + drop = %w[authorization cookie host proxy-authorization] + request = {**request, headers: request.fetch(:headers).except(*drop)} + end + + request + end + + # @api private + # + # @param status [Integer, OpenAI::Errors::APIConnectionError] + # @param stream [Enumerable, nil] + def reap_connection!(status, stream:) + case status + in (..199) | (300..499) + stream&.each { next } + in OpenAI::Errors::APIConnectionError | (500..) + OpenAI::Internal::Util.close_fused!(stream) + else + end + end + end + + # @api private + # @return [OpenAI::Internal::Transport::PooledNetRequester] + attr_accessor :requester + + # @api private + # + # @param base_url [String] + # @param timeout [Float] + # @param max_retries [Integer] + # @param initial_retry_delay [Float] + # @param max_retry_delay [Float] + # @param headers [Hash{String=>String, Integer, Array, nil}] + # @param idempotency_header [String, nil] + def initialize( + base_url:, + timeout: 0.0, + max_retries: 0, + initial_retry_delay: 0.0, + max_retry_delay: 0.0, + headers: {}, + idempotency_header: nil + ) + @requester = OpenAI::Internal::Transport::PooledNetRequester.new + @headers = OpenAI::Internal::Util.normalized_headers( + self.class::PLATFORM_HEADERS, + { + "accept" => "application/json", + "content-type" => "application/json" + }, + headers + ) + @base_url = OpenAI::Internal::Util.parse_uri(base_url) + @idempotency_header = idempotency_header&.to_s&.downcase + @max_retries = max_retries + @timeout = timeout + @initial_retry_delay = initial_retry_delay + @max_retry_delay = max_retry_delay + end + + # @api private + # + # @return [Hash{String=>String}] + private def auth_headers = {} + + # @api private + # + # @return [String] + private def generate_idempotency_key = "stainless-ruby-retry-#{SecureRandom.uuid}" + + # @api private + # + # @param req [Hash{Symbol=>Object}] . + # + # @option req [Symbol] :method + # + # @option req [String, Array] :path + # + # @option req [Hash{String=>Array, String, nil}, nil] :query + # + # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers + # + # @option req [Object, nil] :body + # + # @option req [Symbol, nil] :unwrap + # + # @option req [Class, nil] :page + # + # @option req [Class, nil] :stream + # + # @option req [OpenAI::Internal::Type::Converter, Class, nil] :model + # + # @param opts [Hash{Symbol=>Object}] . + # + # @option opts [String, nil] :idempotency_key + # + # @option opts [Hash{String=>Array, String, nil}, nil] :extra_query + # + # @option opts [Hash{String=>String, nil}, nil] :extra_headers + # + # @option opts [Object, nil] :extra_body + # + # @option opts [Integer, nil] :max_retries + # + # @option opts [Float, nil] :timeout + # + # @return [Hash{Symbol=>Object}] + private def build_request(req, opts) + method, uninterpolated_path = req.fetch_values(:method, :path) + + path = OpenAI::Internal::Util.interpolate_path(uninterpolated_path) + + query = OpenAI::Internal::Util.deep_merge(req[:query].to_h, opts[:extra_query].to_h) + + headers = OpenAI::Internal::Util.normalized_headers( + @headers, + auth_headers, + req[:headers].to_h, + opts[:extra_headers].to_h + ) + + if @idempotency_header && + !headers.key?(@idempotency_header) && + !Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) + headers[@idempotency_header] = opts.fetch(:idempotency_key) { generate_idempotency_key } + end + + unless headers.key?("x-stainless-retry-count") + headers["x-stainless-retry-count"] = "0" + end + + timeout = opts.fetch(:timeout, @timeout).to_f.clamp((0..)) + unless headers.key?("x-stainless-timeout") || timeout.zero? + headers["x-stainless-timeout"] = timeout.to_s + end + + headers.reject! { |_, v| v.to_s.empty? } + + body = + case method + in :get | :head | :options | :trace + nil + else + OpenAI::Internal::Util.deep_merge(*[req[:body], opts[:extra_body]].compact) + end + + headers, encoded = OpenAI::Internal::Util.encode_content(headers, body) + { + method: method, + url: OpenAI::Internal::Util.join_parsed_uri(@base_url, {**req, path: path, query: query}), + headers: headers, + body: encoded, + max_retries: opts.fetch(:max_retries, @max_retries), + timeout: timeout + } + end + + # @api private + # + # @param headers [Hash{String=>String}] + # @param retry_count [Integer] + # + # @return [Float] + private def retry_delay(headers, retry_count:) + # Non-standard extension + span = Float(headers["retry-after-ms"], exception: false)&.then { _1 / 1000 } + return span if span + + retry_header = headers["retry-after"] + return span if (span = Float(retry_header, exception: false)) + + span = retry_header&.then do + Time.httpdate(_1) - Time.now + rescue ArgumentError + nil + end + return span if span + + scale = retry_count**2 + jitter = 1 - (0.25 * rand) + (@initial_retry_delay * scale * jitter).clamp(0, @max_retry_delay) + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @option request [Object] :body + # + # @option request [Integer] :max_retries + # + # @option request [Float] :timeout + # + # @param redirect_count [Integer] + # + # @param retry_count [Integer] + # + # @param send_retry_header [Boolean] + # + # @raise [OpenAI::Errors::APIError] + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] + private def send_request(request, redirect_count:, retry_count:, send_retry_header:) + url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) + input = {**request.except(:timeout), deadline: OpenAI::Internal::Util.monotonic_secs + timeout} + + if send_retry_header + headers["x-stainless-retry-count"] = retry_count.to_s + end + + begin + status, response, stream = @requester.execute(input) + rescue OpenAI::APIConnectionError => e + status = e + end + + case status + in ..299 + [status, response, stream] + in 300..399 if redirect_count >= self.class::MAX_REDIRECTS + self.class.reap_connection!(status, stream: stream) + + message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." + raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) + in 300..399 + self.class.reap_connection!(status, stream: stream) + + request = self.class.follow_redirect(request, status: status, response_headers: response) + send_request( + request, + redirect_count: redirect_count + 1, + retry_count: retry_count, + send_retry_header: send_retry_header + ) + in OpenAI::APIConnectionError if retry_count >= max_retries + raise status + in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response) + decoded = Kernel.then do + OpenAI::Internal::Util.decode_content(response, stream: stream, suppress_error: true) + ensure + self.class.reap_connection!(status, stream: stream) + end + + raise OpenAI::Errors::APIStatusError.for( + url: url, + status: status, + body: decoded, + request: nil, + response: response + ) + in (400..) | OpenAI::Errors::APIConnectionError + self.class.reap_connection!(status, stream: stream) + + delay = retry_delay(response, retry_count: retry_count) + sleep(delay) + + send_request( + request, + redirect_count: redirect_count, + retry_count: retry_count + 1, + send_retry_header: send_retry_header + ) + end + end + + # Execute the request specified by `req`. This is the method that all resource + # methods call into. + # + # @param req [Hash{Symbol=>Object}] . + # + # @option req [Symbol] :method + # + # @option req [String, Array] :path + # + # @option req [Hash{String=>Array, String, nil}, nil] :query + # + # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers + # + # @option req [Object, nil] :body + # + # @option req [Symbol, nil] :unwrap + # + # @option req [Class, nil] :page + # + # @option req [Class, nil] :stream + # + # @option req [OpenAI::Internal::Type::Converter, Class, nil] :model + # + # @option req [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :options + # + # @raise [OpenAI::Errors::APIError] + # @return [Object] + def request(req) + self.class.validate!(req) + model = req.fetch(:model) { OpenAI::Unknown } + opts = req[:options].to_h + OpenAI::RequestOptions.validate!(opts) + request = build_request(req.except(:options), opts) + url = request.fetch(:url) + + # Don't send the current retry count in the headers if the caller modified the header defaults. + send_retry_header = request.fetch(:headers)["x-stainless-retry-count"] == "0" + status, response, stream = send_request( + request, + redirect_count: 0, + retry_count: 0, + send_retry_header: send_retry_header + ) + + decoded = OpenAI::Internal::Util.decode_content(response, stream: stream) + case req + in { stream: Class => st } + st.new(model: model, url: url, status: status, response: response, stream: decoded) + in { page: Class => page } + page.new(client: self, req: req, headers: response, page_data: decoded) + else + unwrapped = OpenAI::Internal::Util.dig(decoded, req[:unwrap]) + OpenAI::Internal::Type::Converter.coerce(model, unwrapped) + end + end + + # @return [String] + def inspect + # rubocop:disable Layout/LineLength + base_url = OpenAI::Internal::Util.unparse_uri(@base_url) + "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" + # rubocop:enable Layout/LineLength + end + end + end + end +end diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb new file mode 100644 index 00000000..c5ee96d9 --- /dev/null +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -0,0 +1,184 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Transport + # @api private + class PooledNetRequester + # from the golang stdlib + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + KEEP_ALIVE_TIMEOUT = 30 + + class << self + # @api private + # + # @param url [URI::Generic] + # + # @return [Net::HTTP] + def connect(url) + port = + case [url.port, url.scheme] + in [Integer, _] + url.port + in [nil, "http" | "ws"] + Net::HTTP.http_default_port + in [nil, "https" | "wss"] + Net::HTTP.https_default_port + end + + Net::HTTP.new(url.host, port).tap do + _1.use_ssl = %w[https wss].include?(url.scheme) + _1.max_retries = 0 + end + end + + # @api private + # + # @param conn [Net::HTTP] + # @param deadline [Float] + def calibrate_socket_timeout(conn, deadline) + timeout = deadline - OpenAI::Internal::Util.monotonic_secs + conn.open_timeout = conn.read_timeout = conn.write_timeout = conn.continue_timeout = timeout + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @param blk [Proc] + # + # @yieldparam [String] + # @return [Net::HTTPGenericRequest] + def build_request(request, &blk) + method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) + req = Net::HTTPGenericRequest.new( + method.to_s.upcase, + !body.nil?, + method != :head, + url.to_s + ) + + headers.each { req[_1] = _2 } + + case body + in nil + nil + in String + req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] + req.body_stream = OpenAI::Internal::Util::ReadIOAdapter.new(body, &blk) + in StringIO + req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] + req.body_stream = OpenAI::Internal::Util::ReadIOAdapter.new(body, &blk) + in IO | Enumerator + req["transfer-encoding"] ||= "chunked" unless req["content-length"] + req.body_stream = OpenAI::Internal::Util::ReadIOAdapter.new(body, &blk) + end + + req + end + end + + # @api private + # + # @param url [URI::Generic] + # @param deadline [Float] + # @param blk [Proc] + # + # @raise [Timeout::Error] + # @yieldparam [Net::HTTP] + private def with_pool(url, deadline:, &blk) + origin = OpenAI::Internal::Util.uri_origin(url) + timeout = deadline - OpenAI::Internal::Util.monotonic_secs + pool = + @mutex.synchronize do + @pools[origin] ||= ConnectionPool.new(size: @size) do + self.class.connect(url) + end + end + + pool.with(timeout: timeout, &blk) + end + + # @api private + # + # @param request [Hash{Symbol=>Object}] . + # + # @option request [Symbol] :method + # + # @option request [URI::Generic] :url + # + # @option request [Hash{String=>String}] :headers + # + # @option request [Object] :body + # + # @option request [Float] :deadline + # + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] + def execute(request) + url, deadline = request.fetch_values(:url, :deadline) + + eof = false + finished = false + enum = Enumerator.new do |y| + with_pool(url, deadline: deadline) do |conn| + next if finished + + req = self.class.build_request(request) do + self.class.calibrate_socket_timeout(conn, deadline) + end + + self.class.calibrate_socket_timeout(conn, deadline) + unless conn.started? + conn.keep_alive_timeout = self.class::KEEP_ALIVE_TIMEOUT + conn.start + end + + self.class.calibrate_socket_timeout(conn, deadline) + conn.request(req) do |rsp| + y << [conn, req, rsp] + break if finished + + rsp.read_body do |bytes| + y << bytes + break if finished + + self.class.calibrate_socket_timeout(conn, deadline) + end + eof = true + end + end + rescue Timeout::Error + raise OpenAI::Errors::APITimeoutError + end + + conn, _, response = enum.next + body = OpenAI::Internal::Util.fused_enum(enum, external: true) do + finished = true + tap do + enum.next + rescue StopIteration + nil + end + conn.finish if !eof && conn&.started? + end + [Integer(response.code), response, (response.body = body)] + end + + # @api private + # + # @param size [Integer] + def initialize(size: Etc.nprocessors) + @mutex = Mutex.new + @size = size + @pools = {} + end + end + end + end +end diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb new file mode 100644 index 00000000..65fa4ccb --- /dev/null +++ b/lib/openai/internal/type/array_of.rb @@ -0,0 +1,114 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # @abstract + # + # Array of items of a given type. + class ArrayOf + include OpenAI::Internal::Type::Converter + + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def self.[](type_info, spec = {}) = new(type_info, spec) + + # @param other [Object] + # + # @return [Boolean] + def ===(other) = other.is_a?(Array) && other.all?(item_type) + + # @param other [Object] + # + # @return [Boolean] + def ==(other) + other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type + end + + # @api private + # + # @param value [Enumerable, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Array, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + + unless value.is_a?(Array) + exactness[:no] += 1 + return value + end + + target = item_type + exactness[:yes] += 1 + value + .map do |item| + case [nilable?, item] + in [true, nil] + exactness[:yes] += 1 + nil + else + OpenAI::Internal::Type::Converter.coerce(target, item, state: state) + end + end + end + + # @api private + # + # @param value [Enumerable, Object] + # + # @return [Array, Object] + def dump(value) + target = item_type + value.is_a?(Array) ? value.map { OpenAI::Internal::Type::Converter.dump(target, _1) } : super + end + + # @api private + # + # @return [OpenAI::Internal::Type::Converter, Class] + protected def item_type = @item_type_fn.call + + # @api private + # + # @return [Boolean] + protected def nilable? = @nilable + + # @api private + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def initialize(type_info, spec = {}) + @item_type_fn = OpenAI::Internal::Type::Converter.type_info(type_info || spec) + @nilable = spec[:nil?] + end + end + end + end +end diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb new file mode 100644 index 00000000..8984ef64 --- /dev/null +++ b/lib/openai/internal/type/base_model.rb @@ -0,0 +1,376 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @abstract + # + # @example + # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` + # comparison_filter => { + # key: key, + # type: type, + # value: value + # } + class BaseModel + extend OpenAI::Internal::Type::Converter + + class << self + # @api private + # + # Assumes superclass fields are totally defined before fields are accessed / + # defined on subclasses. + # + # @return [Hash{Symbol=>Hash{Symbol=>Object}}] + def known_fields + @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) + end + + # @api private + # + # @return [Hash{Symbol=>Hash{Symbol=>Object}}] + def fields + known_fields.transform_values do |field| + {**field.except(:type_fn), type: field.fetch(:type_fn).call} + end + end + + # @api private + # + # @param name_sym [Symbol] + # + # @param required [Boolean] + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def add_field(name_sym, required:, type_info:, spec:) + type_fn, info = + case type_info + in Proc | OpenAI::Internal::Type::Converter | Class + [OpenAI::Internal::Type::Converter.type_info({**spec, union: type_info}), spec] + in Hash + [OpenAI::Internal::Type::Converter.type_info(type_info), type_info] + end + + setter = "#{name_sym}=" + api_name = info.fetch(:api_name, name_sym) + nilable = info[:nil?] + const = if required && !nilable + info.fetch( + :const, + OpenAI::Internal::Util::OMIT + ) + else + OpenAI::Internal::Util::OMIT + end + + [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) + + known_fields[name_sym] = + { + mode: @mode, + api_name: api_name, + required: required, + nilable: nilable, + const: const, + type_fn: type_fn + } + + define_method(setter) { @data.store(name_sym, _1) } + + define_method(name_sym) do + target = type_fn.call + value = @data.fetch(name_sym) { const == OpenAI::Internal::Util::OMIT ? nil : const } + state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + if (nilable || !required) && value.nil? + nil + else + OpenAI::Internal::Type::Converter.coerce( + target, + value, + state: state + ) + end + rescue StandardError + cls = self.class.name.split("::").last + # rubocop:disable Layout/LineLength + message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." + # rubocop:enable Layout/LineLength + raise OpenAI::ConversionError.new(message) + end + end + + # @api private + # + # @param name_sym [Symbol] + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def required(name_sym, type_info, spec = {}) + add_field(name_sym, required: true, type_info: type_info, spec: spec) + end + + # @api private + # + # @param name_sym [Symbol] + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def optional(name_sym, type_info, spec = {}) + add_field(name_sym, required: false, type_info: type_info, spec: spec) + end + + # @api private + # + # `request_only` attributes not excluded from `.#coerce` when receiving responses + # even if well behaved servers should not send them + # + # @param blk [Proc] + private def request_only(&blk) + @mode = :dump + blk.call + ensure + @mode = nil + end + + # @api private + # + # `response_only` attributes are omitted from `.#dump` when making requests + # + # @param blk [Proc] + private def response_only(&blk) + @mode = :coerce + blk.call + ensure + @mode = nil + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = other.is_a?(Class) && other <= OpenAI::BaseModel && other.fields == fields + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) = self.class == other.class && @data == other.to_h + + class << self + # @api private + # + # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [OpenAI::BaseModel, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + + if value.is_a?(self.class) + exactness[:yes] += 1 + return value + end + + unless (val = OpenAI::Internal::Util.coerce_hash(value)).is_a?(Hash) + exactness[:no] += 1 + return value + end + exactness[:yes] += 1 + + keys = val.keys.to_set + instance = new + data = instance.to_h + + # rubocop:disable Metrics/BlockLength + fields.each do |name, field| + mode, required, target = field.fetch_values(:mode, :required, :type) + api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) + + unless val.key?(api_name) + if required && mode != :dump && const == OpenAI::Internal::Util::OMIT + exactness[nilable ? :maybe : :no] += 1 + else + exactness[:yes] += 1 + end + next + end + + item = val.fetch(api_name) + keys.delete(api_name) + + converted = + if item.nil? && (nilable || !required) + exactness[nilable ? :yes : :maybe] += 1 + nil + else + coerced = OpenAI::Internal::Type::Converter.coerce(target, item, state: state) + case target + in OpenAI::Internal::Type::Converter | Symbol + coerced + else + item + end + end + data.store(name, converted) + end + # rubocop:enable Metrics/BlockLength + + keys.each { data.store(_1, val.fetch(_1)) } + instance + end + + # @api private + # + # @param value [OpenAI::BaseModel, Object] + # + # @return [Hash{Object=>Object}, Object] + def dump(value) + unless (coerced = OpenAI::Internal::Util.coerce_hash(value)).is_a?(Hash) + return super + end + + acc = {} + + coerced.each do |key, val| + name = key.is_a?(String) ? key.to_sym : key + case (field = known_fields[name]) + in nil + acc.store(name, super(val)) + else + mode, api_name, type_fn = field.fetch_values(:mode, :api_name, :type_fn) + case mode + in :coerce + next + else + target = type_fn.call + acc.store(api_name, OpenAI::Internal::Type::Converter.dump(target, val)) + end + end + end + + known_fields.each_value do |field| + mode, api_name, const = field.fetch_values(:mode, :api_name, :const) + next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Internal::Util::OMIT + acc.store(api_name, const) + end + + acc + end + end + + # Returns the raw value associated with the given key, if found. Otherwise, nil is + # returned. + # + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. + # + # @param key [Symbol] + # + # @return [Object, nil] + def [](key) + unless key.instance_of?(Symbol) + raise ArgumentError.new("Expected symbol key for lookup, got #{key.inspect}") + end + + @data[key] + end + + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + # + # @return [Hash{Symbol=>Object}] + def to_h = @data + + alias_method :to_hash, :to_h + + # @param keys [Array, nil] + # + # @return [Hash{Symbol=>Object}] + def deconstruct_keys(keys) + (keys || self.class.known_fields.keys) + .filter_map do |k| + unless self.class.known_fields.key?(k) + next + end + + [k, public_send(k)] + end + .to_h + end + + # @param a [Object] + # + # @return [String] + def to_json(*a) = self.class.dump(self).to_json(*a) + + # @param a [Object] + # + # @return [String] + def to_yaml(*a) = self.class.dump(self).to_yaml(*a) + + # Create a new instance of a model. + # + # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] + def initialize(data = {}) + case OpenAI::Internal::Util.coerce_hash(data) + in Hash => coerced + @data = coerced + else + raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") + end + end + + # @return [String] + def inspect + rows = self.class.known_fields.keys.map do + "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" + rescue OpenAI::ConversionError + "#{_1}=#{@data.fetch(_1)}" + end + "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" + end + end + end + end +end diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb new file mode 100644 index 00000000..63882c48 --- /dev/null +++ b/lib/openai/internal/type/base_page.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @example + # if page.has_next? + # page = page.next_page + # end + # + # @example + # page.auto_paging_each do |completion| + # puts(completion) + # end + # + # @example + # completions = + # page + # .to_enum + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # completions => Array + module BasePage + # rubocop:disable Lint/UnusedMethodArgument + + # @return [Boolean] + def next_page? = (raise NotImplementedError) + + # @raise [OpenAI::Errors::APIError] + # @return [OpenAI::Internal::Type::BasePage] + def next_page = (raise NotImplementedError) + + # @param blk [Proc] + # + # @return [void] + def auto_paging_each(&blk) = (raise NotImplementedError) + + # @return [Enumerable] + def to_enum = super(:auto_paging_each) + + alias_method :enum_for, :to_enum + + # @api private + # + # @param client [OpenAI::Internal::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Object] + def initialize(client:, req:, headers:, page_data:) + @client = client + @req = req + super() + end + + # rubocop:enable Lint/UnusedMethodArgument + end + end + end +end diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb new file mode 100644 index 00000000..064878ba --- /dev/null +++ b/lib/openai/internal/type/base_stream.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @example + # stream.each do |chunk| + # puts(chunk) + # end + # + # @example + # chunks = + # stream + # .lazy + # .select { _1.object_id.even? } + # .map(&:itself) + # .take(2) + # .to_a + # + # chunks => Array + module BaseStream + include Enumerable + + # @return [void] + def close = OpenAI::Internal::Util.close_fused!(@iterator) + + # @api private + # + # @return [Enumerable] + private def iterator = (raise NotImplementedError) + + # @param blk [Proc] + # + # @return [void] + def each(&blk) + unless block_given? + raise ArgumentError.new("A block must be given to ##{__method__}") + end + @iterator.each(&blk) + end + + # @return [Enumerator] + def to_enum = @iterator + + alias_method :enum_for, :to_enum + + # @api private + # + # @param model [Class, OpenAI::Internal::Type::Converter] + # @param url [URI::Generic] + # @param status [Integer] + # @param response [Net::HTTPResponse] + # @param stream [Enumerable] + def initialize(model:, url:, status:, response:, stream:) + @model = model + @url = url + @status = status + @response = response + @stream = stream + @iterator = iterator + end + end + end + end +end diff --git a/lib/openai/internal/type/boolean_model.rb b/lib/openai/internal/type/boolean_model.rb new file mode 100644 index 00000000..369fd741 --- /dev/null +++ b/lib/openai/internal/type/boolean_model.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # @abstract + # + # Ruby has no Boolean class; this is something for models to refer to. + class BooleanModel + extend OpenAI::Internal::Type::Converter + + # @param other [Object] + # + # @return [Boolean] + def self.===(other) = other == true || other == false + + # @param other [Object] + # + # @return [Boolean] + def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel + + class << self + # @api private + # + # @param value [Boolean, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Boolean, Object] + def coerce(value, state:) + state.fetch(:exactness)[value == true || value == false ? :yes : :no] += 1 + value + end + + # @!parse + # # @api private + # # + # # @param value [Boolean, Object] + # # + # # @return [Boolean, Object] + # def dump(value) = super + end + end + end + end +end diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb new file mode 100644 index 00000000..29aad06d --- /dev/null +++ b/lib/openai/internal/type/converter.rb @@ -0,0 +1,219 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # rubocop:disable Metrics/ModuleLength + # @api private + module Converter + # rubocop:disable Lint/UnusedMethodArgument + + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) = (raise NotImplementedError) + + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + case value + in Array + value.map { OpenAI::Unknown.dump(_1) } + in Hash + value.transform_values { OpenAI::Unknown.dump(_1) } + in OpenAI::BaseModel + value.class.dump(value) + else + value + end + end + + # rubocop:enable Lint/UnusedMethodArgument + + class << self + # @api private + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + # + # @return [Proc] + def type_info(spec) + case spec + in Proc + spec + in Hash + type_info(spec.slice(:const, :enum, :union).first&.last) + in true | false + -> { OpenAI::BooleanModel } + in OpenAI::Internal::Type::Converter | Class | Symbol + -> { spec } + in NilClass | Integer | Float + -> { spec.class } + end + end + + # @api private + # + # Based on `target`, transform `value` into `target`, to the extent possible: + # + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered + # + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + # + # @param target [OpenAI::Internal::Type::Converter, Class] + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the + # coercion strategy when we have to decide between multiple possible conversion + # targets: + # + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. + # + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: + # + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. + # + # See implementation below for more details. + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce( + target, + value, + state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + ) + # rubocop:disable Lint/SuppressedException + # rubocop:disable Metrics/BlockNesting + strictness, exactness = state.fetch_values(:strictness, :exactness) + + case target + in OpenAI::Internal::Type::Converter + return target.coerce(value, state: state) + in Class + if value.is_a?(target) + exactness[:yes] += 1 + return value + end + + case target + in -> { _1 <= NilClass } + exactness[value.nil? ? :yes : :maybe] += 1 + return nil + in -> { _1 <= Integer } + if value.is_a?(Integer) + exactness[:yes] += 1 + return value + elsif strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + else + Kernel.then do + return Integer(value).tap { exactness[:maybe] += 1 } + rescue ArgumentError, TypeError + end + end + in -> { _1 <= Float } + if value.is_a?(Numeric) + exactness[:yes] += 1 + return Float(value) + elsif strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + else + Kernel.then do + return Float(value).tap { exactness[:maybe] += 1 } + rescue ArgumentError, TypeError + end + end + in -> { _1 <= String } + case value + in String | Symbol | Numeric + exactness[value.is_a?(Numeric) ? :maybe : :yes] += 1 + return value.to_s + else + if strictness == :strong + message = "no implicit conversion of #{value.class} into #{target.inspect}" + raise TypeError.new(message) + end + end + in -> { _1 <= Date || _1 <= Time } + Kernel.then do + return target.parse(value).tap { exactness[:yes] += 1 } + rescue ArgumentError, TypeError => e + raise e if strictness == :strong + end + in -> { _1 <= IO } if value.is_a?(String) + exactness[:yes] += 1 + return StringIO.new(value.b) + else + end + in Symbol + if (value.is_a?(Symbol) || value.is_a?(String)) && value.to_sym == target + exactness[:yes] += 1 + return target + elsif strictness == :strong + message = "cannot convert non-matching #{value.class} into #{target.inspect}" + raise ArgumentError.new(message) + end + else + end + + exactness[:no] += 1 + value + # rubocop:enable Metrics/BlockNesting + # rubocop:enable Lint/SuppressedException + end + + # @api private + # + # @param target [OpenAI::Internal::Type::Converter, Class] + # @param value [Object] + # + # @return [Object] + def dump(target, value) + target.is_a?(OpenAI::Internal::Type::Converter) ? target.dump(value) : OpenAI::Unknown.dump(value) + end + end + end + # rubocop:enable Metrics/ModuleLength + end + end +end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb new file mode 100644 index 00000000..861f0ac9 --- /dev/null +++ b/lib/openai/internal/type/enum.rb @@ -0,0 +1,107 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # A value from among a specified list of options. OpenAPI enum values map to Ruby + # values in the SDK as follows: + # + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol + # + # We can therefore convert string values to Symbols, but can't convert other + # values safely. + # + # @example + # # `chat_model` is a `OpenAI::Models::ChatModel` + # case chat_model + # when OpenAI::Models::ChatModel::O3_MINI + # # ... + # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 + # # ... + # when OpenAI::Models::ChatModel::O1 + # # ... + # else + # puts(chat_model) + # end + # + # @example + # case chat_model + # in :"o3-mini" + # # ... + # in :"o3-mini-2025-01-31" + # # ... + # in :o1 + # # ... + # else + # puts(chat_model) + # end + module Enum + include OpenAI::Internal::Type::Converter + + # All of the valid Symbol values for this enum. + # + # @return [Array] + def values = (@values ||= constants.map { const_get(_1) }) + + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + private def finalize! = values + + # @param other [Object] + # + # @return [Boolean] + def ===(other) = values.include?(other) + + # @param other [Object] + # + # @return [Boolean] + def ==(other) + other.is_a?(Module) && other.singleton_class <= OpenAI::Enum && other.values.to_set == values.to_set + end + + # @api private + # + # Unlike with primitives, `Enum` additionally validates that the value is a member + # of the enum. + # + # @param value [String, Symbol, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Symbol, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + val = value.is_a?(String) ? value.to_sym : value + + if values.include?(val) + exactness[:yes] += 1 + val + else + exactness[values.first&.class == val.class ? :maybe : :no] += 1 + value + end + end + + # @!parse + # # @api private + # # + # # @param value [Symbol, Object] + # # + # # @return [Symbol, Object] + # def dump(value) = super + end + end + end +end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb new file mode 100644 index 00000000..d07f7ccd --- /dev/null +++ b/lib/openai/internal/type/hash_of.rb @@ -0,0 +1,140 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # @abstract + # + # Hash of items of a given type. + class HashOf + include OpenAI::Internal::Type::Converter + + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def self.[](type_info, spec = {}) = new(type_info, spec) + + # @param other [Object] + # + # @return [Boolean] + def ===(other) + type = item_type + case other + in Hash + other.all? do |key, val| + case [key, val] + in [Symbol | String, ^type] + true + else + false + end + end + else + false + end + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) + other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type + end + + # @api private + # + # @param value [Hash{Object=>Object}, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Hash{Symbol=>Object}, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + + unless value.is_a?(Hash) + exactness[:no] += 1 + return value + end + + target = item_type + exactness[:yes] += 1 + value + .to_h do |key, val| + k = key.is_a?(String) ? key.to_sym : key + v = + case [nilable?, val] + in [true, nil] + exactness[:yes] += 1 + nil + else + OpenAI::Internal::Type::Converter.coerce(target, val, state: state) + end + + exactness[:no] += 1 unless k.is_a?(Symbol) + [k, v] + end + end + + # @api private + # + # @param value [Hash{Object=>Object}, Object] + # + # @return [Hash{Symbol=>Object}, Object] + def dump(value) + target = item_type + if value.is_a?(Hash) + value.transform_values do + OpenAI::Internal::Type::Converter.dump(target, _1) + end + else + super + end + end + + # @api private + # + # @return [OpenAI::Internal::Type::Converter, Class] + protected def item_type = @item_type_fn.call + + # @api private + # + # @return [Boolean] + protected def nilable? = @nilable + + # @api private + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + def initialize(type_info, spec = {}) + @item_type_fn = OpenAI::Internal::Type::Converter.type_info(type_info || spec) + @nilable = spec[:nil?] + end + end + end + end +end diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb new file mode 100644 index 00000000..8a693e53 --- /dev/null +++ b/lib/openai/internal/type/request_parameters.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + module RequestParameters + # @!parse + # # Options to specify HTTP behaviour for this request. + # # @return [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # attr_accessor :request_options + + # @param mod [Module] + def self.included(mod) + return unless mod <= OpenAI::BaseModel + + mod.extend(OpenAI::Internal::Type::RequestParameters::Converter) + mod.optional(:request_options, OpenAI::RequestOptions) + end + + # @api private + module Converter + # @api private + # + # @param params [Object] + # + # @return [Array(Object, Hash{Symbol=>Object})] + def dump_request(params) + case (dumped = dump(params)) + in Hash + [dumped.except(:request_options), dumped[:request_options]] + else + [dumped, nil] + end + end + end + end + end + end +end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb new file mode 100644 index 00000000..bf2b376e --- /dev/null +++ b/lib/openai/internal/type/union.rb @@ -0,0 +1,212 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # @example + # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` + # case chat_completion_content_part + # when OpenAI::Models::Chat::ChatCompletionContentPartText + # puts(chat_completion_content_part.text) + # when OpenAI::Models::Chat::ChatCompletionContentPartImage + # puts(chat_completion_content_part.image_url) + # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio + # puts(chat_completion_content_part.input_audio) + # else + # puts(chat_completion_content_part) + # end + # + # @example + # case chat_completion_content_part + # in {type: :text, text: text} + # puts(text) + # in {type: :image_url, image_url: image_url} + # puts(image_url) + # in {type: :input_audio, input_audio: input_audio} + # puts(input_audio) + # else + # puts(chat_completion_content_part) + # end + module Union + include OpenAI::Internal::Type::Converter + + # @api private + # + # All of the specified variant info for this union. + # + # @return [Array] + private def known_variants = (@known_variants ||= []) + + # @api private + # + # @return [Array] + protected def derefed_variants + @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + end + + # All of the specified variants for this union. + # + # @return [Array] + def variants = derefed_variants.map(&:last) + + # @api private + # + # @param property [Symbol] + private def discriminator(property) + case property + in Symbol + @discriminator = property + end + end + + # @api private + # + # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + private def variant(key, spec = nil) + variant_info = + case key + in Symbol + [key, OpenAI::Internal::Type::Converter.type_info(spec)] + in Proc | OpenAI::Internal::Type::Converter | Class | Hash + [nil, OpenAI::Internal::Type::Converter.type_info(key)] + end + + known_variants << variant_info + end + + # @api private + # + # @param value [Object] + # + # @return [OpenAI::Internal::Type::Converter, Class, nil] + private def resolve_variant(value) + case [@discriminator, value] + in [_, OpenAI::BaseModel] + value.class + in [Symbol, Hash] + key = value.fetch(@discriminator) do + value.fetch(@discriminator.to_s, OpenAI::Internal::Util::OMIT) + end + + return nil if key == OpenAI::Internal::Util::OMIT + + key = key.to_sym if key.is_a?(String) + known_variants.find { |k,| k == key }&.last&.call + else + nil + end + end + + # rubocop:disable Style/HashEachMethods + # rubocop:disable Style/CaseEquality + + # @param other [Object] + # + # @return [Boolean] + def ===(other) + known_variants.any? do |_, variant_fn| + variant_fn.call === other + end + end + + # @param other [Object] + # + # @return [Boolean] + def ==(other) + # rubocop:disable Layout/LineLength + other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants + # rubocop:enable Layout/LineLength + end + + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) + if (target = resolve_variant(value)) + return OpenAI::Internal::Type::Converter.coerce(target, value, state: state) + end + + strictness = state.fetch(:strictness) + exactness = state.fetch(:exactness) + state[:strictness] = strictness == :strong ? true : strictness + + alternatives = [] + known_variants.each do |_, variant_fn| + target = variant_fn.call + exact = state[:exactness] = {yes: 0, no: 0, maybe: 0} + state[:branched] += 1 + + coerced = OpenAI::Internal::Type::Converter.coerce(target, value, state: state) + yes, no, maybe = exact.values + if (no + maybe).zero? || (!strictness && yes.positive?) + exact.each { exactness[_1] += _2 } + state[:exactness] = exactness + return coerced + elsif maybe.positive? + alternatives << [[-yes, -maybe, no], exact, coerced] + end + end + + case alternatives.sort_by(&:first) + in [] + exactness[:no] += 1 + if strictness == :strong + message = "no possible conversion of #{value.class} into a variant of #{target.inspect}" + raise ArgumentError.new(message) + end + value + in [[_, exact, coerced], *] + exact.each { exactness[_1] += _2 } + coerced + end + .tap { state[:exactness] = exactness } + ensure + state[:strictness] = strictness + end + + # @api private + # + # @param value [Object] + # + # @return [Object] + def dump(value) + if (target = resolve_variant(value)) + return OpenAI::Internal::Type::Converter.dump(target, value) + end + + known_variants.each do + target = _2.call + return OpenAI::Internal::Type::Converter.dump(target, value) if target === value + end + + super + end + + # rubocop:enable Style/CaseEquality + # rubocop:enable Style/HashEachMethods + end + end + end +end diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb new file mode 100644 index 00000000..589bd2ba --- /dev/null +++ b/lib/openai/internal/type/unknown.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # @abstract + # + # When we don't know what to expect for the value. + class Unknown + extend OpenAI::Internal::Type::Converter + + # rubocop:disable Lint/UnusedMethodArgument + + # @param other [Object] + # + # @return [Boolean] + def self.===(other) = true + + # @param other [Object] + # + # @return [Boolean] + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown + + class << self + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [Object] + def coerce(value, state:) + state.fetch(:exactness)[:yes] += 1 + value + end + + # @!parse + # # @api private + # # + # # @param value [Object] + # # + # # @return [Object] + # def dump(value) = super + end + + # rubocop:enable Lint/UnusedMethodArgument + end + end + end +end diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb new file mode 100644 index 00000000..6fb201e3 --- /dev/null +++ b/lib/openai/internal/util.rb @@ -0,0 +1,724 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + # rubocop:disable Metrics/ModuleLength + + # @api private + module Util + # @api private + # + # @return [Float] + def self.monotonic_secs = Process.clock_gettime(Process::CLOCK_MONOTONIC) + + class << self + # @api private + # + # @return [String] + def arch + case (arch = RbConfig::CONFIG["arch"])&.downcase + in nil + "unknown" + in /aarch64|arm64/ + "arm64" + in /x86_64/ + "x64" + in /arm/ + "arm" + else + "other:#{arch}" + end + end + + # @api private + # + # @return [String] + def os + case (host = RbConfig::CONFIG["host_os"])&.downcase + in nil + "Unknown" + in /linux/ + "Linux" + in /darwin/ + "MacOS" + in /freebsd/ + "FreeBSD" + in /openbsd/ + "OpenBSD" + in /mswin|mingw|cygwin|ucrt/ + "Windows" + else + "Other:#{host}" + end + end + end + + class << self + # @api private + # + # @param input [Object] + # + # @return [Boolean] + def primitive?(input) + case input + in true | false | Integer | Float | Symbol | String + true + else + false + end + end + + # @api private + # + # @param input [Object] + # + # @return [Boolean, Object] + def coerce_boolean(input) + case input.is_a?(String) ? input.downcase : input + in Numeric + input.nonzero? + in "true" + true + in "false" + false + else + input + end + end + + # @api private + # + # @param input [Object] + # + # @raise [ArgumentError] + # @return [Boolean, nil] + def coerce_boolean!(input) + case coerce_boolean(input) + in true | false | nil => coerced + coerced + else + raise ArgumentError.new("Unable to coerce #{input.inspect} into boolean value") + end + end + + # @api private + # + # @param input [Object] + # + # @return [Integer, Object] + def coerce_integer(input) + case input + in true + 1 + in false + 0 + else + Integer(input, exception: false) || input + end + end + + # @api private + # + # @param input [Object] + # + # @return [Float, Object] + def coerce_float(input) + case input + in true + 1.0 + in false + 0.0 + else + Float(input, exception: false) || input + end + end + + # @api private + # + # @param input [Object] + # + # @return [Hash{Object=>Object}, Object] + def coerce_hash(input) + case input + in NilClass | Array | Set | Enumerator + input + else + input.respond_to?(:to_h) ? input.to_h : input + end + end + end + + # Use this to indicate that a value should be explicitly removed from a data + # structure when using `OpenAI::Internal::Util.deep_merge`. + # + # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging + # `{a: 1}` and `{}` would produce `{a: 1}`. + OMIT = Object.new.freeze + + class << self + # @api private + # + # @param lhs [Object] + # @param rhs [Object] + # @param concat [Boolean] + # + # @return [Object] + private def deep_merge_lr(lhs, rhs, concat: false) + case [lhs, rhs, concat] + in [Hash, Hash, _] + rhs_cleaned = rhs.reject { _2 == OpenAI::Internal::Util::OMIT } + lhs + .reject { |key, _| rhs[key] == OpenAI::Internal::Util::OMIT } + .merge(rhs_cleaned) do |_, old_val, new_val| + deep_merge_lr(old_val, new_val, concat: concat) + end + in [Array, Array, true] + lhs.concat(rhs) + else + rhs + end + end + + # @api private + # + # Recursively merge one hash with another. If the values at a given key are not + # both hashes, just take the new value. + # + # @param values [Array] + # + # @param sentinel [Object, nil] the value to return if no values are provided. + # + # @param concat [Boolean] whether to merge sequences by concatenation. + # + # @return [Object] + def deep_merge(*values, sentinel: nil, concat: false) + case values + in [value, *values] + values.reduce(value) do |acc, val| + deep_merge_lr(acc, val, concat: concat) + end + else + sentinel + end + end + + # @api private + # + # @param data [Hash{Symbol=>Object}, Array, Object] + # @param pick [Symbol, Integer, Array, nil] + # @param sentinel [Object, nil] + # @param blk [Proc, nil] + # + # @return [Object, nil] + def dig(data, pick, sentinel = nil, &blk) + case [data, pick, blk] + in [_, nil, nil] + data + in [Hash, Symbol, _] | [Array, Integer, _] + blk.nil? ? data.fetch(pick, sentinel) : data.fetch(pick, &blk) + in [Hash | Array, Array, _] + pick.reduce(data) do |acc, key| + case acc + in Hash if acc.key?(key) + acc.fetch(key) + in Array if key.is_a?(Integer) && key < acc.length + acc[key] + else + return blk.nil? ? sentinel : blk.call + end + end + in _ + blk.nil? ? sentinel : blk.call + end + end + end + + class << self + # @api private + # + # @param uri [URI::Generic] + # + # @return [String] + def uri_origin(uri) + "#{uri.scheme}://#{uri.host}#{uri.port == uri.default_port ? '' : ":#{uri.port}"}" + end + + # @api private + # + # @param path [String, Array] + # + # @return [String] + def interpolate_path(path) + case path + in String + path + in [] + "" + in [String => p, *interpolations] + encoded = interpolations.map { ERB::Util.url_encode(_1) } + format(p, *encoded) + end + end + end + + class << self + # @api private + # + # @param query [String, nil] + # + # @return [Hash{String=>Array}] + def decode_query(query) + CGI.parse(query.to_s) + end + + # @api private + # + # @param query [Hash{String=>Array, String, nil}, nil] + # + # @return [String, nil] + def encode_query(query) + query.to_h.empty? ? nil : URI.encode_www_form(query) + end + end + + class << self + # @api private + # + # @param url [URI::Generic, String] + # + # @return [Hash{Symbol=>String, Integer, nil}] + def parse_uri(url) + parsed = URI::Generic.component.zip(URI.split(url)).to_h + {**parsed, query: decode_query(parsed.fetch(:query))} + end + + # @api private + # + # @param parsed [Hash{Symbol=>String, Integer, nil}] . + # + # @option parsed [String, nil] :scheme + # + # @option parsed [String, nil] :host + # + # @option parsed [Integer, nil] :port + # + # @option parsed [String, nil] :path + # + # @option parsed [Hash{String=>Array}] :query + # + # @return [URI::Generic] + def unparse_uri(parsed) + URI::Generic.build(**parsed, query: encode_query(parsed.fetch(:query))) + end + + # @api private + # + # @param lhs [Hash{Symbol=>String, Integer, nil}] . + # + # @option lhs [String, nil] :scheme + # + # @option lhs [String, nil] :host + # + # @option lhs [Integer, nil] :port + # + # @option lhs [String, nil] :path + # + # @option lhs [Hash{String=>Array}] :query + # + # @param rhs [Hash{Symbol=>String, Integer, nil}] . + # + # @option rhs [String, nil] :scheme + # + # @option rhs [String, nil] :host + # + # @option rhs [Integer, nil] :port + # + # @option rhs [String, nil] :path + # + # @option rhs [Hash{String=>Array}] :query + # + # @return [URI::Generic] + def join_parsed_uri(lhs, rhs) + base_path, base_query = lhs.fetch_values(:path, :query) + slashed = base_path.end_with?("/") ? base_path : "#{base_path}/" + + parsed_path, parsed_query = parse_uri(rhs.fetch(:path)).fetch_values(:path, :query) + override = URI::Generic.build(**rhs.slice(:scheme, :host, :port), path: parsed_path) + + joined = URI.join(URI::Generic.build(lhs.except(:path, :query)), slashed, override) + query = deep_merge( + joined.path == base_path ? base_query : {}, + parsed_query, + rhs[:query].to_h, + concat: true + ) + + joined.query = encode_query(query) + joined + end + end + + class << self + # @api private + # + # @param headers [Hash{String=>String, Integer, Array, nil}] + # + # @return [Hash{String=>String}] + def normalized_headers(*headers) + {}.merge(*headers.compact).to_h do |key, val| + value = + case val + in Array + val.map { _1.to_s.strip }.join(", ") + else + val&.to_s&.strip + end + [key.downcase, value] + end + end + end + + # @api private + # + # An adapter that satisfies the IO interface required by `::IO.copy_stream` + class ReadIOAdapter + # @api private + # + # @param max_len [Integer, nil] + # + # @return [String] + private def read_enum(max_len) + case max_len + in nil + @stream.to_a.join + in Integer + @buf << @stream.next while @buf.length < max_len + @buf.slice!(..max_len) + end + rescue StopIteration + @stream = nil + @buf.slice!(0..) + end + + # @api private + # + # @param max_len [Integer, nil] + # @param out_string [String, nil] + # + # @return [String, nil] + def read(max_len = nil, out_string = nil) + case @stream + in nil + nil + in IO | StringIO + @stream.read(max_len, out_string) + in Enumerator + read = read_enum(max_len) + case out_string + in String + out_string.replace(read) + in nil + read + end + end + .tap(&@blk) + end + + # @api private + # + # @param stream [String, IO, StringIO, Enumerable] + # @param blk [Proc] + # + # @yieldparam [String] + def initialize(stream, &blk) + @stream = stream.is_a?(String) ? StringIO.new(stream) : stream + @buf = String.new.b + @blk = blk + end + end + + class << self + # @param blk [Proc] + # + # @yieldparam [Enumerator::Yielder] + # @return [Enumerable] + def writable_enum(&blk) + Enumerator.new do |y| + y.define_singleton_method(:write) do + self << _1.clone + _1.bytesize + end + + blk.call(y) + end + end + end + + class << self + # @api private + # + # @param y [Enumerator::Yielder] + # @param boundary [String] + # @param key [Symbol, String] + # @param val [Object] + private def write_multipart_chunk(y, boundary:, key:, val:) + y << "--#{boundary}\r\n" + y << "Content-Disposition: form-data" + unless key.nil? + name = ERB::Util.url_encode(key.to_s) + y << "; name=\"#{name}\"" + end + if val.is_a?(IO) + filename = ERB::Util.url_encode(File.basename(val.to_path)) + y << "; filename=\"#{filename}\"" + end + y << "\r\n" + case val + in IO + y << "Content-Type: application/octet-stream\r\n\r\n" + IO.copy_stream(val, y) + in StringIO + y << "Content-Type: application/octet-stream\r\n\r\n" + y << val.string + in String + y << "Content-Type: application/octet-stream\r\n\r\n" + y << val.to_s + in true | false | Integer | Float | Symbol + y << "Content-Type: text/plain\r\n\r\n" + y << val.to_s + else + y << "Content-Type: application/json\r\n\r\n" + y << JSON.fast_generate(val) + end + y << "\r\n" + end + + # @api private + # + # @param body [Object] + # + # @return [Array(String, Enumerable)] + private def encode_multipart_streaming(body) + boundary = SecureRandom.urlsafe_base64(60) + + strio = writable_enum do |y| + case body + in Hash + body.each do |key, val| + case val + in Array if val.all? { primitive?(_1) } + val.each do |v| + write_multipart_chunk(y, boundary: boundary, key: key, val: v) + end + else + write_multipart_chunk(y, boundary: boundary, key: key, val: val) + end + end + else + write_multipart_chunk(y, boundary: boundary, key: nil, val: body) + end + y << "--#{boundary}--\r\n" + end + + [boundary, strio] + end + + # @api private + # + # @param headers [Hash{String=>String}] + # @param body [Object] + # + # @return [Object] + def encode_content(headers, body) + content_type = headers["content-type"] + case [content_type, body] + in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array] + [headers, JSON.fast_generate(body)] + in [%r{^application/(?:x-)?jsonl}, Enumerable] + [headers, body.lazy.map { JSON.fast_generate(_1) }] + in [%r{^multipart/form-data}, Hash | IO | StringIO] + boundary, strio = encode_multipart_streaming(body) + headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} + [headers, strio] + in [_, StringIO] + [headers, body.string] + else + [headers, body] + end + end + + # @api private + # + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param stream [Enumerable] + # @param suppress_error [Boolean] + # + # @raise [JSON::ParserError] + # @return [Object] + def decode_content(headers, stream:, suppress_error: false) + case headers["content-type"] + in %r{^application/(?:vnd\.api\+)?json} + json = stream.to_a.join + begin + JSON.parse(json, symbolize_names: true) + rescue JSON::ParserError => e + raise e unless suppress_error + json + end + in %r{^application/(?:x-)?jsonl} + lines = decode_lines(stream) + chain_fused(lines) do |y| + lines.each { y << JSON.parse(_1, symbolize_names: true) } + end + in %r{^text/event-stream} + lines = decode_lines(stream) + decode_sse(lines) + in %r{^text/} + stream.to_a.join + else + # TODO: parsing other response types + StringIO.new(stream.to_a.join) + end + end + end + + class << self + # @api private + # + # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html + # + # @param enum [Enumerable] + # @param external [Boolean] + # @param close [Proc] + # + # @return [Enumerable] + def fused_enum(enum, external: false, &close) + fused = false + iter = Enumerator.new do |y| + next if fused + + fused = true + if external + loop { y << enum.next } + else + enum.each(&y) + end + ensure + close&.call + close = nil + end + + iter.define_singleton_method(:rewind) do + fused = true + self + end + iter + end + + # @api private + # + # @param enum [Enumerable, nil] + def close_fused!(enum) + return unless enum.is_a?(Enumerator) + + # rubocop:disable Lint/UnreachableLoop + enum.rewind.each { break } + # rubocop:enable Lint/UnreachableLoop + end + + # @api private + # + # @param enum [Enumerable, nil] + # @param blk [Proc] + # + # @yieldparam [Enumerator::Yielder] + # @return [Enumerable] + def chain_fused(enum, &blk) + iter = Enumerator.new { blk.call(_1) } + fused_enum(iter) { close_fused!(enum) } + end + end + + class << self + # @api private + # + # @param enum [Enumerable] + # + # @return [Enumerable] + def decode_lines(enum) + re = /(\r\n|\r|\n)/ + buffer = String.new.b + cr_seen = nil + + chain_fused(enum) do |y| + enum.each do |row| + offset = buffer.bytesize + buffer << row + while (match = re.match(buffer, cr_seen&.to_i || offset)) + case [match.captures.first, cr_seen] + in ["\r", nil] + cr_seen = match.end(1) + next + in ["\r" | "\r\n", Integer] + y << buffer.slice!(..(cr_seen.pred)) + else + y << buffer.slice!(..(match.end(1).pred)) + end + offset = 0 + cr_seen = nil + end + end + + y << buffer.slice!(..(cr_seen.pred)) unless cr_seen.nil? + y << buffer unless buffer.empty? + end + end + + # @api private + # + # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream + # + # @param lines [Enumerable] + # + # @return [Hash{Symbol=>Object}] + def decode_sse(lines) + # rubocop:disable Metrics/BlockLength + chain_fused(lines) do |y| + blank = {event: nil, data: nil, id: nil, retry: nil} + current = {} + + lines.each do |line| + case line.sub(/\R$/, "") + in "" + next if current.empty? + y << {**blank, **current} + current = {} + in /^:/ + next + in /^([^:]+):\s?(.*)$/ + field, value = Regexp.last_match.captures + case field + in "event" + current.merge!(event: value) + in "data" + (current[:data] ||= String.new.b) << (value << "\n") + in "id" unless value.include?("\0") + current.merge!(id: value) + in "retry" if /^\d+$/ =~ value + current.merge!(retry: Integer(value)) + else + end + else + end + end + # rubocop:enable Metrics/BlockLength + + y << {**blank, **current} unless current.empty? + end + end + end + end + + # rubocop:enable Metrics/ModuleLength + end +end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 081c6175..0729f543 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -6,8 +6,8 @@ module Audio # @see OpenAI::Resources::Audio::Speech#create class SpeechCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute input # The text to generate audio for. The maximum length is 4096 characters. diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index ee1640c6..063af4b9 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -8,8 +8,8 @@ module Audio # @see OpenAI::Resources::Audio::Transcriptions#stream_raw class TranscriptionCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute file # The audio file object (not file name) to transcribe, in one of these formats: diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 80c1292c..57b2727c 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -6,8 +6,8 @@ module Audio # @see OpenAI::Resources::Audio::Translations#create class TranslationCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute file # The audio file object (not file name) translate, in one of these formats: flac, diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 1b94b154..a9fb8c08 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Batches#cancel class BatchCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index fcb80ecf..04139051 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Batches#create class BatchCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute completion_window # The time frame within which the batch should be processed. Currently only `24h` diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 0515bbc5..0dc270ae 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Batches#list class BatchListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index fae80854..695466e8 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Batches#retrieve class BatchRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 881fd6ee..d6443c05 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Assistants#create class AssistantCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute model # ID of the model to use. You can use the diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index 08dbb202..1414cb2b 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Assistants#delete class AssistantDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 2ccfe616..1385f253 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Assistants#list class AssistantListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 7fe50f31..1c58ceea 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Assistants#retrieve class AssistantRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 2c59bf2d..11f30474 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Assistants#update class AssistantUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute description # The description of the assistant. The maximum length is 512 characters. diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 495e3cd1..4425ba3c 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -8,8 +8,8 @@ module Beta # @see OpenAI::Resources::Beta::Threads#stream_raw class ThreadCreateAndRunParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute assistant_id # The ID of the diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index d778808c..434c28c5 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Threads#create class ThreadCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index 71afbb41..0febd443 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Threads#delete class ThreadDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index c12d6b56..7b3c99a3 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Threads#retrieve class ThreadRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index e80088af..4ae11357 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -6,8 +6,8 @@ module Beta # @see OpenAI::Resources::Beta::Threads#update class ThreadUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 340c113a..5a15f269 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Messages#create class MessageCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute content # The text contents of the message. diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 2a93e0d9..64604e89 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Messages#delete class MessageDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 1c5efce5..1cbbff8e 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Messages#list class MessageListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index 9a0811a6..996cb629 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Messages#retrieve class MessageRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index ba22a469..f77a34f4 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Messages#update class MessageUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index f2fe43f6..bc13b744 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#cancel class RunCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index a52ec57a..4b60bf0b 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -9,8 +9,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw class RunCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute assistant_id # The ID of the diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 43e8bd9d..7e05072e 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#list class RunListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index c75947e3..ddca0649 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#retrieve class RunRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 168c457d..e3579635 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -9,8 +9,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_stream_raw class RunSubmitToolOutputsParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 54971ac1..0b5a69c2 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -7,8 +7,8 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#update class RunUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 08c4b854..d978c19f 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -8,8 +8,8 @@ module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#list class StepListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 9f539603..5349ca7f 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -8,8 +8,8 @@ module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve class StepRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id # diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index f5cf3bd0..6b8c53a3 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -8,8 +8,8 @@ module Chat # @see OpenAI::Resources::Chat::Completions#stream_raw class CompletionCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute messages # A list of messages comprising the conversation so far. Depending on the diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index 70e85073..69c7c1aa 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -6,8 +6,8 @@ module Chat # @see OpenAI::Resources::Chat::Completions#delete class CompletionDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 905fe4ea..c369717e 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -6,8 +6,8 @@ module Chat # @see OpenAI::Resources::Chat::Completions#list class CompletionListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # Identifier for the last chat completion from the previous pagination request. diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index 3465af46..42086b40 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -6,8 +6,8 @@ module Chat # @see OpenAI::Resources::Chat::Completions#retrieve class CompletionRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 4e7909e9..87539dd0 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -6,8 +6,8 @@ module Chat # @see OpenAI::Resources::Chat::Completions#update class CompletionUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index d72b3255..271056ab 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -7,8 +7,8 @@ module Completions # @see OpenAI::Resources::Chat::Completions::Messages#list class MessageListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # Identifier for the last message from the previous pagination request. diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index cc824656..993c28d2 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -7,8 +7,8 @@ module Models # @see OpenAI::Resources::Completions#stream_raw class CompletionCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute model # ID of the model to use. You can use the diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 9bae0793..12977ee8 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Embeddings#create class EmbeddingCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute input # Input text to embed, encoded as a string or array of tokens. To embed multiple diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index c1e2edd4..dcb5cd2b 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Files#content class FileContentParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index da8f43df..7049b873 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Files#create class FileCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute file # The File object (not file name) to be uploaded. diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index c8b31252..cba48c40 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Files#delete class FileDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 997d1fa1..53f26749 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Files#list class FileListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index c1729f9e..187535fd 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Files#retrieve class FileRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index e1830988..465bc7c2 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -6,8 +6,8 @@ module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#cancel class JobCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 7649c07b..bd2f2f88 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -6,8 +6,8 @@ module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#create class JobCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute model # The name of the model to fine-tune. You can select one of the diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index 167b9242..0e9abe08 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -6,8 +6,8 @@ module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list_events class JobListEventsParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # Identifier for the last event from the previous pagination request. diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index c3abbcd5..4922697f 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -6,8 +6,8 @@ module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list class JobListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # Identifier for the last job from the previous pagination request. diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index e62d1b6f..17337029 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -6,8 +6,8 @@ module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#retrieve class JobRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index 1b5657d6..7a7b3c86 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -7,8 +7,8 @@ module Jobs # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list class CheckpointListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # Identifier for the last checkpoint ID from the previous pagination request. diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index fd6b93ba..fdc94a1e 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Images#create_variation class ImageCreateVariationParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute image # The image to use as the basis for the variation(s). Must be a valid PNG file, diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 77954285..f14ec178 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Images#edit class ImageEditParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute image # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index fbd3db67..38a73865 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Images#generate class ImageGenerateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute prompt # A text description of the desired image(s). The maximum length is 1000 diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index 186515a2..7a3e61da 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Models#delete class ModelDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index 191cb53f..eb621c9b 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Models#list class ModelListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index 6c19bb2b..330c276b 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Models#retrieve class ModelRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 6d03f271..95492506 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Moderations#create class ModerationCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute input # Input (or inputs) to classify. Can be a single string, an array of strings, or diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index 82362ce6..b807321f 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -6,8 +6,8 @@ module Responses # @see OpenAI::Resources::Responses::InputItems#list class InputItemListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # An item ID to list items after, used in pagination. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 55ec73cc..8253c846 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -8,8 +8,8 @@ module Responses # @see OpenAI::Resources::Responses#stream_raw class ResponseCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute input # Text, image, or file inputs to the model, used to generate a response. diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index 389d812a..311adb57 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -6,8 +6,8 @@ module Responses # @see OpenAI::Resources::Responses#delete class ResponseDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 36f30f22..ea7d0882 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -6,8 +6,8 @@ module Responses # @see OpenAI::Resources::Responses#retrieve class ResponseRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] include # Additional fields to include in the response. See the `include` parameter for diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index e509a7b1..5602bb3f 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Uploads#cancel class UploadCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 1d3ff0d8..4b5b8cc3 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Uploads#complete class UploadCompleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute part_ids # The ordered list of Part IDs. diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 68511cf4..b77c4093 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::Uploads#create class UploadCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute bytes # The number of bytes in the file you are uploading. diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 636b73b1..12a0fcaf 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -6,8 +6,8 @@ module Uploads # @see OpenAI::Resources::Uploads::Parts#create class PartCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute data # The chunk of bytes for this Part. diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index a4633e39..a1e61412 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::VectorStores#create class VectorStoreCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index a6973a49..42f0e18c 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::VectorStores#delete class VectorStoreDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 90569e9b..12ea7117 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::VectorStores#list class VectorStoreListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index 8328d0a1..096ded4f 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::VectorStores#retrieve class VectorStoreRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!parse # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index bde42131..acf25855 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::VectorStores#search class VectorStoreSearchParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute query # A query string for a search diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 62b9c58d..43edb6ab 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -5,8 +5,8 @@ module Models # @see OpenAI::Resources::VectorStores#update class VectorStoreUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute expires_after # The expiration policy for a vector store. diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index b9d746ca..c87c2b85 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#cancel class FileBatchCancelParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index ce0dbcf7..19b69c38 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#create class FileBatchCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index a5e43403..d19506f4 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#list_files class FileBatchListFilesParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 37c6f122..a05da932 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#retrieve class FileBatchRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index 52f123e2..9b6fefcc 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::Files#content class FileContentParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 514d5c8b..429f1ebc 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::Files#create class FileCreateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute file_id # A [File](https://platform.openai.com/docs/api-reference/files) ID that the diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index 8ed88cd8..bf6c7241 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::Files#delete class FileDeleteParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 34115039..01da7de3 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::Files#list class FileListParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index 65d9dcbe..75f2cc40 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::Files#retrieve class FileRetrieveParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 8d952ea8..a14267b5 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -6,8 +6,8 @@ module VectorStores # @see OpenAI::Resources::VectorStores::Files#update class FileUpdateParams < OpenAI::BaseModel # @!parse - # extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id # diff --git a/lib/openai/page.rb b/lib/openai/page.rb deleted file mode 100644 index 55f85755..00000000 --- a/lib/openai/page.rb +++ /dev/null @@ -1,86 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @example - # if page.has_next? - # page = page.next_page - # end - # - # @example - # page.auto_paging_each do |model| - # puts(model) - # end - # - # @example - # models = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # models => Array - class Page - include OpenAI::Type::BasePage - - # @return [Array, nil] - attr_accessor :data - - # @return [String] - attr_accessor :object - - # @api private - # - # @param client [OpenAI::Transport::BaseClient] - # @param req [Hash{Symbol=>Object}] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param page_data [Array] - def initialize(client:, req:, headers:, page_data:) - super - model = req.fetch(:model) - - case page_data - in {data: Array | nil => data} - @data = data&.map { OpenAI::Type::Converter.coerce(model, _1) } - else - end - - case page_data - in {object: String => object} - @object = object - else - end - end - - # @return [Boolean] - def next_page? - false - end - - # @raise [OpenAI::HTTP::Error] - # @return [OpenAI::Page] - def next_page - RuntimeError.new("No more pages available.") - end - - # @param blk [Proc] - def auto_paging_each(&blk) - unless block_given? - raise ArgumentError.new("A block must be given to ##{__method__}") - end - page = self - loop do - page.data&.each { blk.call(_1) } - break unless page.next_page? - page = page.next_page - end - end - - # @return [String] - def inspect - "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} object=#{object.inspect}>" - end - end -end diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 2dc49631..677b18f1 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -51,7 +51,7 @@ def create(params) # @param timestamp_granularities [Array] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def stream_raw(params) @@ -66,7 +66,7 @@ def stream_raw(params) path: "audio/transcriptions", headers: {"content-type" => "multipart/form-data", "accept" => "text/event-stream"}, body: parsed, - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Audio::TranscriptionStreamEvent, options: options ) diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 89f67bb1..fdd3806a 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -54,7 +54,7 @@ def retrieve(batch_id, params = {}) # @param limit [Integer] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::BatchListParams def list(params = {}) @@ -63,7 +63,7 @@ def list(params = {}) method: :get, path: "batches", query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Batch, options: options ) diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index da45a68f..a7f952e7 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -96,7 +96,7 @@ def update(assistant_id, params = {}) # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::AssistantListParams def list(params = {}) @@ -105,7 +105,7 @@ def list(params = {}) method: :get, path: "assistants", query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Assistant, options: options ) diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 82c8c329..f581f130 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -154,7 +154,7 @@ def create_and_run(params) # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def stream_raw(params) @@ -169,7 +169,7 @@ def stream_raw(params) path: "threads/runs", headers: {"accept" => "text/event-stream"}, body: parsed, - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Beta::AssistantStreamEvent, options: options ) diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index c8900898..81de13fc 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -94,7 +94,7 @@ def update(message_id, params) # @param run_id [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::MessageListParams def list(thread_id, params = {}) @@ -103,7 +103,7 @@ def list(thread_id, params = {}) method: :get, path: ["threads/%1$s/messages", thread_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Threads::Message, options: options ) diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index aa7f72c0..aa61011c 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -76,7 +76,7 @@ def create(thread_id, params) # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def stream_raw(thread_id, params) @@ -93,7 +93,7 @@ def stream_raw(thread_id, params) query: parsed.slice(*query_params), headers: {"accept" => "text/event-stream"}, body: parsed.except(*query_params), - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Beta::AssistantStreamEvent, options: options ) @@ -162,7 +162,7 @@ def update(run_id, params) # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::RunListParams def list(thread_id, params = {}) @@ -171,7 +171,7 @@ def list(thread_id, params = {}) method: :get, path: ["threads/%1$s/runs", thread_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Threads::Run, options: options ) @@ -248,7 +248,7 @@ def submit_tool_outputs(run_id, params) # @param tool_outputs [Array] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_stream_raw(run_id, params) @@ -267,7 +267,7 @@ def submit_tool_stream_raw(run_id, params) path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id], headers: {"accept" => "text/event-stream"}, body: parsed, - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Beta::AssistantStreamEvent, options: options ) diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index e22cea6d..65972e48 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -51,7 +51,7 @@ def retrieve(step_id, params) # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::Runs::StepListParams def list(run_id, params) @@ -64,7 +64,7 @@ def list(run_id, params) method: :get, path: ["threads/%1$s/runs/%2$s/steps", thread_id, run_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Threads::Runs::RunStep, options: options ) diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index c000fad2..52736d64 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -129,7 +129,7 @@ def create(params) # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams def stream_raw(params) @@ -144,7 +144,7 @@ def stream_raw(params) path: "chat/completions", headers: {"accept" => "text/event-stream"}, body: parsed, - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Chat::ChatCompletionChunk, options: options ) @@ -206,7 +206,7 @@ def update(completion_id, params) # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::CompletionListParams def list(params = {}) @@ -215,7 +215,7 @@ def list(params = {}) method: :get, path: "chat/completions", query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Chat::ChatCompletion, options: options ) diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index 56dd9e36..489df23b 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -16,7 +16,7 @@ class Messages # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::Completions::MessageListParams def list(completion_id, params = {}) @@ -25,7 +25,7 @@ def list(completion_id, params = {}) method: :get, path: ["chat/completions/%1$s/messages", completion_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Chat::ChatCompletionStoreMessage, options: options ) diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 62a26a53..861ab13a 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -67,7 +67,7 @@ def create(params) # @param user [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::CompletionCreateParams def stream_raw(params) @@ -82,7 +82,7 @@ def stream_raw(params) path: "completions", headers: {"accept" => "text/event-stream"}, body: parsed, - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Completion, options: options ) diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 2d8cf4e3..2c1b995e 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -75,7 +75,7 @@ def retrieve(file_id, params = {}) # @param purpose [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FileListParams def list(params = {}) @@ -84,7 +84,7 @@ def list(params = {}) method: :get, path: "files", query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::FileObject, options: options ) diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 391f59b9..8e28f8c7 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -72,7 +72,7 @@ def retrieve(fine_tuning_job_id, params = {}) # @param metadata [Hash{Symbol=>String}, nil] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListParams def list(params = {}) @@ -81,7 +81,7 @@ def list(params = {}) method: :get, path: "fine_tuning/jobs", query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::FineTuning::FineTuningJob, options: options ) @@ -115,7 +115,7 @@ def cancel(fine_tuning_job_id, params = {}) # @param limit [Integer] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListEventsParams def list_events(fine_tuning_job_id, params = {}) @@ -124,7 +124,7 @@ def list_events(fine_tuning_job_id, params = {}) method: :get, path: ["fine_tuning/jobs/%1$s/events", fine_tuning_job_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::FineTuning::FineTuningJobEvent, options: options ) diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index 0aa37907..b8195545 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -14,7 +14,7 @@ class Checkpoints # @param limit [Integer] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::Jobs::CheckpointListParams def list(fine_tuning_job_id, params = {}) @@ -23,7 +23,7 @@ def list(fine_tuning_job_id, params = {}) method: :get, path: ["fine_tuning/jobs/%1$s/checkpoints", fine_tuning_job_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint, options: options ) diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index c00abfbc..3ab368d3 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -30,14 +30,14 @@ def retrieve(model, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Page] + # @return [OpenAI::Internal::Page] # # @see OpenAI::Models::ModelListParams def list(params = {}) @client.request( method: :get, path: "models", - page: OpenAI::Page, + page: OpenAI::Internal::Page, model: OpenAI::Models::Model, options: params[:request_options] ) diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 46de5e29..3eee10b4 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -90,7 +90,7 @@ def create(params) # @param user [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) @@ -105,7 +105,7 @@ def stream_raw(params) path: "responses", headers: {"accept" => "text/event-stream"}, body: parsed, - stream: OpenAI::Stream, + stream: OpenAI::Internal::Stream, model: OpenAI::Models::Responses::ResponseStreamEvent, options: options ) diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 42b9fb43..5e875778 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -16,7 +16,7 @@ class InputItems # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Responses::InputItemListParams def list(response_id, params = {}) @@ -25,7 +25,7 @@ def list(response_id, params = {}) method: :get, path: ["responses/%1$s/input_items", response_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Responses::ResponseItem, options: options ) diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index beb5fa09..1fc69777 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -87,7 +87,7 @@ def update(vector_store_id, params = {}) # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStoreListParams def list(params = {}) @@ -96,7 +96,7 @@ def list(params = {}) method: :get, path: "vector_stores", query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::VectorStore, options: options ) @@ -134,7 +134,7 @@ def delete(vector_store_id, params = {}) # @param rewrite_query [Boolean] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Page] + # @return [OpenAI::Internal::Page] # # @see OpenAI::Models::VectorStoreSearchParams def search(vector_store_id, params) @@ -143,7 +143,7 @@ def search(vector_store_id, params) method: :post, path: ["vector_stores/%1$s/search", vector_store_id], body: parsed, - page: OpenAI::Page, + page: OpenAI::Internal::Page, model: OpenAI::Models::VectorStoreSearchResponse, options: options ) diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 67f78837..5772d514 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -92,7 +92,7 @@ def cancel(batch_id, params) # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileBatchListFilesParams def list_files(batch_id, params) @@ -105,7 +105,7 @@ def list_files(batch_id, params) method: :get, path: ["vector_stores/%1$s/file_batches/%2$s/files", vector_store_id, batch_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::VectorStores::VectorStoreFile, options: options ) diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index b296ddcd..0c044e34 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -94,7 +94,7 @@ def update(file_id, params) # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileListParams def list(vector_store_id, params = {}) @@ -103,7 +103,7 @@ def list(vector_store_id, params = {}) method: :get, path: ["vector_stores/%1$s/files", vector_store_id], query: parsed, - page: OpenAI::CursorPage, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::VectorStores::VectorStoreFile, options: options ) @@ -145,7 +145,7 @@ def delete(file_id, params) # @param vector_store_id [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Page] + # @return [OpenAI::Internal::Page] # # @see OpenAI::Models::VectorStores::FileContentParams def content(file_id, params) @@ -157,7 +157,7 @@ def content(file_id, params) @client.request( method: :get, path: ["vector_stores/%1$s/files/%2$s/content", vector_store_id, file_id], - page: OpenAI::Page, + page: OpenAI::Internal::Page, model: OpenAI::Models::VectorStores::FileContentResponse, options: options ) diff --git a/lib/openai/stream.rb b/lib/openai/stream.rb deleted file mode 100644 index 9c6f4244..00000000 --- a/lib/openai/stream.rb +++ /dev/null @@ -1,67 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # @example - # stream.each do |event| - # puts(event) - # end - # - # @example - # events = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # events => Array - class Stream - include OpenAI::Type::BaseStream - - # @api private - # - # @return [Enumerable] - private def iterator - # rubocop:disable Metrics/BlockLength - @iterator ||= OpenAI::Util.chain_fused(@stream) do |y| - consume = false - - @stream.each do |msg| - next if consume - - case msg - in { data: String => data } if data.start_with?("[DONE]") - consume = true - next - in { data: String => data } - case JSON.parse(data, symbolize_names: true) - in { error: error } - message = - case error - in String - error - in { message: String => m } - m - else - "An error occurred during streaming" - end - OpenAI::Errors::APIError.for( - url: @url, - status: @status, - body: body, - request: nil, - response: @response, - message: message - ) - in decoded - y << OpenAI::Type::Converter.coerce(@model, decoded) - end - else - end - end - end - # rubocop:enable Metrics/BlockLength - end - end -end diff --git a/lib/openai/transport/base_client.rb b/lib/openai/transport/base_client.rb deleted file mode 100644 index 3679bf42..00000000 --- a/lib/openai/transport/base_client.rb +++ /dev/null @@ -1,459 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Transport - # @api private - # - # @abstract - class BaseClient - # from whatwg fetch spec - MAX_REDIRECTS = 20 - - # rubocop:disable Style/MutableConstant - PLATFORM_HEADERS = - { - "x-stainless-arch" => OpenAI::Util.arch, - "x-stainless-lang" => "ruby", - "x-stainless-os" => OpenAI::Util.os, - "x-stainless-package-version" => OpenAI::VERSION, - "x-stainless-runtime" => ::RUBY_ENGINE, - "x-stainless-runtime-version" => ::RUBY_ENGINE_VERSION - } - # rubocop:enable Style/MutableConstant - - class << self - # @api private - # - # @param req [Hash{Symbol=>Object}] - # - # @raise [ArgumentError] - def validate!(req) - keys = [:method, :path, :query, :headers, :body, :unwrap, :page, :stream, :model, :options] - case req - in Hash - req.each_key do |k| - unless keys.include?(k) - raise ArgumentError.new("Request `req` keys must be one of #{keys}, got #{k.inspect}") - end - end - else - raise ArgumentError.new("Request `req` must be a Hash or RequestOptions, got #{req.inspect}") - end - end - - # @api private - # - # @param status [Integer] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # - # @return [Boolean] - def should_retry?(status, headers:) - coerced = OpenAI::Util.coerce_boolean(headers["x-should-retry"]) - case [coerced, status] - in [true | false, _] - coerced - in [_, 408 | 409 | 429 | (500..)] - # retry on: - # 408: timeouts - # 409: locks - # 429: rate limits - # 500+: unknown errors - true - else - false - end - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @option request [Object] :body - # - # @option request [Integer] :max_retries - # - # @option request [Float] :timeout - # - # @param status [Integer] - # - # @param response_headers [Hash{String=>String}, Net::HTTPHeader] - # - # @return [Hash{Symbol=>Object}] - def follow_redirect(request, status:, response_headers:) - method, url, headers = request.fetch_values(:method, :url, :headers) - location = - Kernel.then do - URI.join(url, response_headers["location"]) - rescue ArgumentError - message = "Server responded with status #{status} but no valid location header." - raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) - end - - request = {**request, url: location} - - case [url.scheme, location.scheme] - in ["https", "http"] - message = "Tried to redirect to a insecure URL" - raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) - else - nil - end - - # from whatwg fetch spec - case [status, method] - in [301 | 302, :post] | [303, _] - drop = %w[content-encoding content-language content-length content-location content-type] - request = { - **request, - method: method == :head ? :head : :get, - headers: headers.except(*drop), - body: nil - } - else - end - - # from undici - if OpenAI::Util.uri_origin(url) != OpenAI::Util.uri_origin(location) - drop = %w[authorization cookie host proxy-authorization] - request = {**request, headers: request.fetch(:headers).except(*drop)} - end - - request - end - - # @api private - # - # @param status [Integer, OpenAI::Errors::APIConnectionError] - # @param stream [Enumerable, nil] - def reap_connection!(status, stream:) - case status - in (..199) | (300..499) - stream&.each { next } - in OpenAI::Errors::APIConnectionError | (500..) - OpenAI::Util.close_fused!(stream) - else - end - end - end - - # @api private - # @return [OpenAI::Transport::PooledNetRequester] - attr_accessor :requester - - # @api private - # - # @param base_url [String] - # @param timeout [Float] - # @param max_retries [Integer] - # @param initial_retry_delay [Float] - # @param max_retry_delay [Float] - # @param headers [Hash{String=>String, Integer, Array, nil}] - # @param idempotency_header [String, nil] - def initialize( - base_url:, - timeout: 0.0, - max_retries: 0, - initial_retry_delay: 0.0, - max_retry_delay: 0.0, - headers: {}, - idempotency_header: nil - ) - @requester = OpenAI::Transport::PooledNetRequester.new - @headers = OpenAI::Util.normalized_headers( - self.class::PLATFORM_HEADERS, - { - "accept" => "application/json", - "content-type" => "application/json" - }, - headers - ) - @base_url = OpenAI::Util.parse_uri(base_url) - @idempotency_header = idempotency_header&.to_s&.downcase - @max_retries = max_retries - @timeout = timeout - @initial_retry_delay = initial_retry_delay - @max_retry_delay = max_retry_delay - end - - # @api private - # - # @return [Hash{String=>String}] - private def auth_headers = {} - - # @api private - # - # @return [String] - private def generate_idempotency_key = "stainless-ruby-retry-#{SecureRandom.uuid}" - - # @api private - # - # @param req [Hash{Symbol=>Object}] . - # - # @option req [Symbol] :method - # - # @option req [String, Array] :path - # - # @option req [Hash{String=>Array, String, nil}, nil] :query - # - # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers - # - # @option req [Object, nil] :body - # - # @option req [Symbol, nil] :unwrap - # - # @option req [Class, nil] :page - # - # @option req [Class, nil] :stream - # - # @option req [OpenAI::Type::Converter, Class, nil] :model - # - # @param opts [Hash{Symbol=>Object}] . - # - # @option opts [String, nil] :idempotency_key - # - # @option opts [Hash{String=>Array, String, nil}, nil] :extra_query - # - # @option opts [Hash{String=>String, nil}, nil] :extra_headers - # - # @option opts [Object, nil] :extra_body - # - # @option opts [Integer, nil] :max_retries - # - # @option opts [Float, nil] :timeout - # - # @return [Hash{Symbol=>Object}] - private def build_request(req, opts) - method, uninterpolated_path = req.fetch_values(:method, :path) - - path = OpenAI::Util.interpolate_path(uninterpolated_path) - - query = OpenAI::Util.deep_merge(req[:query].to_h, opts[:extra_query].to_h) - - headers = OpenAI::Util.normalized_headers( - @headers, - auth_headers, - req[:headers].to_h, - opts[:extra_headers].to_h - ) - - if @idempotency_header && - !headers.key?(@idempotency_header) && - !Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) - headers[@idempotency_header] = opts.fetch(:idempotency_key) { generate_idempotency_key } - end - - unless headers.key?("x-stainless-retry-count") - headers["x-stainless-retry-count"] = "0" - end - - timeout = opts.fetch(:timeout, @timeout).to_f.clamp((0..)) - unless headers.key?("x-stainless-timeout") || timeout.zero? - headers["x-stainless-timeout"] = timeout.to_s - end - - headers.reject! { |_, v| v.to_s.empty? } - - body = - case method - in :get | :head | :options | :trace - nil - else - OpenAI::Util.deep_merge(*[req[:body], opts[:extra_body]].compact) - end - - headers, encoded = OpenAI::Util.encode_content(headers, body) - { - method: method, - url: OpenAI::Util.join_parsed_uri(@base_url, {**req, path: path, query: query}), - headers: headers, - body: encoded, - max_retries: opts.fetch(:max_retries, @max_retries), - timeout: timeout - } - end - - # @api private - # - # @param headers [Hash{String=>String}] - # @param retry_count [Integer] - # - # @return [Float] - private def retry_delay(headers, retry_count:) - # Non-standard extension - span = Float(headers["retry-after-ms"], exception: false)&.then { _1 / 1000 } - return span if span - - retry_header = headers["retry-after"] - return span if (span = Float(retry_header, exception: false)) - - span = retry_header&.then do - Time.httpdate(_1) - Time.now - rescue ArgumentError - nil - end - return span if span - - scale = retry_count**2 - jitter = 1 - (0.25 * rand) - (@initial_retry_delay * scale * jitter).clamp(0, @max_retry_delay) - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @option request [Object] :body - # - # @option request [Integer] :max_retries - # - # @option request [Float] :timeout - # - # @param redirect_count [Integer] - # - # @param retry_count [Integer] - # - # @param send_retry_header [Boolean] - # - # @raise [OpenAI::Errors::APIError] - # @return [Array(Integer, Net::HTTPResponse, Enumerable)] - private def send_request(request, redirect_count:, retry_count:, send_retry_header:) - url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) - input = {**request.except(:timeout), deadline: OpenAI::Util.monotonic_secs + timeout} - - if send_retry_header - headers["x-stainless-retry-count"] = retry_count.to_s - end - - begin - status, response, stream = @requester.execute(input) - rescue OpenAI::APIConnectionError => e - status = e - end - - case status - in ..299 - [status, response, stream] - in 300..399 if redirect_count >= self.class::MAX_REDIRECTS - self.class.reap_connection!(status, stream: stream) - - message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." - raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) - in 300..399 - self.class.reap_connection!(status, stream: stream) - - request = self.class.follow_redirect(request, status: status, response_headers: response) - send_request( - request, - redirect_count: redirect_count + 1, - retry_count: retry_count, - send_retry_header: send_retry_header - ) - in OpenAI::APIConnectionError if retry_count >= max_retries - raise status - in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response) - decoded = Kernel.then do - OpenAI::Util.decode_content(response, stream: stream, suppress_error: true) - ensure - self.class.reap_connection!(status, stream: stream) - end - - raise OpenAI::Errors::APIStatusError.for( - url: url, - status: status, - body: decoded, - request: nil, - response: response - ) - in (400..) | OpenAI::Errors::APIConnectionError - self.class.reap_connection!(status, stream: stream) - - delay = retry_delay(response, retry_count: retry_count) - sleep(delay) - - send_request( - request, - redirect_count: redirect_count, - retry_count: retry_count + 1, - send_retry_header: send_retry_header - ) - end - end - - # Execute the request specified by `req`. This is the method that all resource - # methods call into. - # - # @param req [Hash{Symbol=>Object}] . - # - # @option req [Symbol] :method - # - # @option req [String, Array] :path - # - # @option req [Hash{String=>Array, String, nil}, nil] :query - # - # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers - # - # @option req [Object, nil] :body - # - # @option req [Symbol, nil] :unwrap - # - # @option req [Class, nil] :page - # - # @option req [Class, nil] :stream - # - # @option req [OpenAI::Type::Converter, Class, nil] :model - # - # @option req [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :options - # - # @raise [OpenAI::Errors::APIError] - # @return [Object] - def request(req) - self.class.validate!(req) - model = req.fetch(:model) { OpenAI::Unknown } - opts = req[:options].to_h - OpenAI::RequestOptions.validate!(opts) - request = build_request(req.except(:options), opts) - url = request.fetch(:url) - - # Don't send the current retry count in the headers if the caller modified the header defaults. - send_retry_header = request.fetch(:headers)["x-stainless-retry-count"] == "0" - status, response, stream = send_request( - request, - redirect_count: 0, - retry_count: 0, - send_retry_header: send_retry_header - ) - - decoded = OpenAI::Util.decode_content(response, stream: stream) - case req - in { stream: Class => st } - st.new(model: model, url: url, status: status, response: response, stream: decoded) - in { page: Class => page } - page.new(client: self, req: req, headers: response, page_data: decoded) - else - unwrapped = OpenAI::Util.dig(decoded, req[:unwrap]) - OpenAI::Type::Converter.coerce(model, unwrapped) - end - end - - # @return [String] - def inspect - # rubocop:disable Layout/LineLength - base_url = OpenAI::Util.unparse_uri(@base_url) - "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" - # rubocop:enable Layout/LineLength - end - end - end -end diff --git a/lib/openai/transport/pooled_net_requester.rb b/lib/openai/transport/pooled_net_requester.rb deleted file mode 100644 index b3259684..00000000 --- a/lib/openai/transport/pooled_net_requester.rb +++ /dev/null @@ -1,182 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Transport - # @api private - class PooledNetRequester - # from the golang stdlib - # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 - KEEP_ALIVE_TIMEOUT = 30 - - class << self - # @api private - # - # @param url [URI::Generic] - # - # @return [Net::HTTP] - def connect(url) - port = - case [url.port, url.scheme] - in [Integer, _] - url.port - in [nil, "http" | "ws"] - Net::HTTP.http_default_port - in [nil, "https" | "wss"] - Net::HTTP.https_default_port - end - - Net::HTTP.new(url.host, port).tap do - _1.use_ssl = %w[https wss].include?(url.scheme) - _1.max_retries = 0 - end - end - - # @api private - # - # @param conn [Net::HTTP] - # @param deadline [Float] - def calibrate_socket_timeout(conn, deadline) - timeout = deadline - OpenAI::Util.monotonic_secs - conn.open_timeout = conn.read_timeout = conn.write_timeout = conn.continue_timeout = timeout - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @param blk [Proc] - # - # @yieldparam [String] - # @return [Net::HTTPGenericRequest] - def build_request(request, &blk) - method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) - req = Net::HTTPGenericRequest.new( - method.to_s.upcase, - !body.nil?, - method != :head, - url.to_s - ) - - headers.each { req[_1] = _2 } - - case body - in nil - nil - in String - req["content-length"] ||= body.bytesize.to_s unless req["transfer-encoding"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &blk) - in StringIO - req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &blk) - in IO | Enumerator - req["transfer-encoding"] ||= "chunked" unless req["content-length"] - req.body_stream = OpenAI::Util::ReadIOAdapter.new(body, &blk) - end - - req - end - end - - # @api private - # - # @param url [URI::Generic] - # @param deadline [Float] - # @param blk [Proc] - # - # @raise [Timeout::Error] - # @yieldparam [Net::HTTP] - private def with_pool(url, deadline:, &blk) - origin = OpenAI::Util.uri_origin(url) - timeout = deadline - OpenAI::Util.monotonic_secs - pool = - @mutex.synchronize do - @pools[origin] ||= ConnectionPool.new(size: @size) do - self.class.connect(url) - end - end - - pool.with(timeout: timeout, &blk) - end - - # @api private - # - # @param request [Hash{Symbol=>Object}] . - # - # @option request [Symbol] :method - # - # @option request [URI::Generic] :url - # - # @option request [Hash{String=>String}] :headers - # - # @option request [Object] :body - # - # @option request [Float] :deadline - # - # @return [Array(Integer, Net::HTTPResponse, Enumerable)] - def execute(request) - url, deadline = request.fetch_values(:url, :deadline) - - eof = false - finished = false - enum = Enumerator.new do |y| - with_pool(url, deadline: deadline) do |conn| - next if finished - - req = self.class.build_request(request) do - self.class.calibrate_socket_timeout(conn, deadline) - end - - self.class.calibrate_socket_timeout(conn, deadline) - unless conn.started? - conn.keep_alive_timeout = self.class::KEEP_ALIVE_TIMEOUT - conn.start - end - - self.class.calibrate_socket_timeout(conn, deadline) - conn.request(req) do |rsp| - y << [conn, req, rsp] - break if finished - - rsp.read_body do |bytes| - y << bytes - break if finished - - self.class.calibrate_socket_timeout(conn, deadline) - end - eof = true - end - end - rescue Timeout::Error - raise OpenAI::Errors::APITimeoutError - end - - conn, _, response = enum.next - body = OpenAI::Util.fused_enum(enum, external: true) do - finished = true - tap do - enum.next - rescue StopIteration - nil - end - conn.finish if !eof && conn&.started? - end - [Integer(response.code), response, (response.body = body)] - end - - # @api private - # - # @param size [Integer] - def initialize(size: Etc.nprocessors) - @mutex = Mutex.new - @size = size - @pools = {} - end - end - end -end diff --git a/lib/openai/type.rb b/lib/openai/type.rb deleted file mode 100644 index f4b8345c..00000000 --- a/lib/openai/type.rb +++ /dev/null @@ -1,23 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - Unknown = OpenAI::Type::Unknown - - BooleanModel = OpenAI::Type::BooleanModel - - Enum = OpenAI::Type::Enum - - Union = OpenAI::Type::Union - - ArrayOf = OpenAI::Type::ArrayOf - - HashOf = OpenAI::Type::HashOf - - BaseModel = OpenAI::Type::BaseModel - - RequestParameters = OpenAI::Type::RequestParameters - - # This module contains various type declarations. - module Type - end -end diff --git a/lib/openai/type/array_of.rb b/lib/openai/type/array_of.rb deleted file mode 100644 index 35bb520d..00000000 --- a/lib/openai/type/array_of.rb +++ /dev/null @@ -1,112 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - # - # @abstract - # - # Array of items of a given type. - class ArrayOf - include OpenAI::Type::Converter - - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def self.[](type_info, spec = {}) = new(type_info, spec) - - # @param other [Object] - # - # @return [Boolean] - def ===(other) = other.is_a?(Array) && other.all?(item_type) - - # @param other [Object] - # - # @return [Boolean] - def ==(other) - other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type - end - - # @api private - # - # @param value [Enumerable, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Array, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - - unless value.is_a?(Array) - exactness[:no] += 1 - return value - end - - target = item_type - exactness[:yes] += 1 - value - .map do |item| - case [nilable?, item] - in [true, nil] - exactness[:yes] += 1 - nil - else - OpenAI::Type::Converter.coerce(target, item, state: state) - end - end - end - - # @api private - # - # @param value [Enumerable, Object] - # - # @return [Array, Object] - def dump(value) - target = item_type - value.is_a?(Array) ? value.map { OpenAI::Type::Converter.dump(target, _1) } : super - end - - # @api private - # - # @return [OpenAI::Type::Converter, Class] - protected def item_type = @item_type_fn.call - - # @api private - # - # @return [Boolean] - protected def nilable? = @nilable - - # @api private - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def initialize(type_info, spec = {}) - @item_type_fn = OpenAI::Type::Converter.type_info(type_info || spec) - @nilable = spec[:nil?] - end - end - end -end diff --git a/lib/openai/type/base_model.rb b/lib/openai/type/base_model.rb deleted file mode 100644 index 4162bd9a..00000000 --- a/lib/openai/type/base_model.rb +++ /dev/null @@ -1,367 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @abstract - # - # @example - # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` - # comparison_filter => { - # key: key, - # type: type, - # value: value - # } - class BaseModel - extend OpenAI::Type::Converter - - class << self - # @api private - # - # Assumes superclass fields are totally defined before fields are accessed / - # defined on subclasses. - # - # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - def known_fields - @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) - end - - # @api private - # - # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - def fields - known_fields.transform_values do |field| - {**field.except(:type_fn), type: field.fetch(:type_fn).call} - end - end - - # @api private - # - # @param name_sym [Symbol] - # - # @param required [Boolean] - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def add_field(name_sym, required:, type_info:, spec:) - type_fn, info = - case type_info - in Proc | OpenAI::Type::Converter | Class - [OpenAI::Type::Converter.type_info({**spec, union: type_info}), spec] - in Hash - [OpenAI::Type::Converter.type_info(type_info), type_info] - end - - setter = "#{name_sym}=" - api_name = info.fetch(:api_name, name_sym) - nilable = info[:nil?] - const = required && !nilable ? info.fetch(:const, OpenAI::Util::OMIT) : OpenAI::Util::OMIT - - [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) - - known_fields[name_sym] = - { - mode: @mode, - api_name: api_name, - required: required, - nilable: nilable, - const: const, - type_fn: type_fn - } - - define_method(setter) { @data.store(name_sym, _1) } - - define_method(name_sym) do - target = type_fn.call - value = @data.fetch(name_sym) { const == OpenAI::Util::OMIT ? nil : const } - state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - if (nilable || !required) && value.nil? - nil - else - OpenAI::Type::Converter.coerce( - target, - value, - state: state - ) - end - rescue StandardError - cls = self.class.name.split("::").last - # rubocop:disable Layout/LineLength - message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." - # rubocop:enable Layout/LineLength - raise OpenAI::ConversionError.new(message) - end - end - - # @api private - # - # @param name_sym [Symbol] - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def required(name_sym, type_info, spec = {}) - add_field(name_sym, required: true, type_info: type_info, spec: spec) - end - - # @api private - # - # @param name_sym [Symbol] - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def optional(name_sym, type_info, spec = {}) - add_field(name_sym, required: false, type_info: type_info, spec: spec) - end - - # @api private - # - # `request_only` attributes not excluded from `.#coerce` when receiving responses - # even if well behaved servers should not send them - # - # @param blk [Proc] - private def request_only(&blk) - @mode = :dump - blk.call - ensure - @mode = nil - end - - # @api private - # - # `response_only` attributes are omitted from `.#dump` when making requests - # - # @param blk [Proc] - private def response_only(&blk) - @mode = :coerce - blk.call - ensure - @mode = nil - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) = other.is_a?(Class) && other <= OpenAI::BaseModel && other.fields == fields - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) = self.class == other.class && @data == other.to_h - - class << self - # @api private - # - # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [OpenAI::BaseModel, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - - if value.is_a?(self.class) - exactness[:yes] += 1 - return value - end - - unless (val = OpenAI::Util.coerce_hash(value)).is_a?(Hash) - exactness[:no] += 1 - return value - end - exactness[:yes] += 1 - - keys = val.keys.to_set - instance = new - data = instance.to_h - - # rubocop:disable Metrics/BlockLength - fields.each do |name, field| - mode, required, target = field.fetch_values(:mode, :required, :type) - api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) - - unless val.key?(api_name) - if required && mode != :dump && const == OpenAI::Util::OMIT - exactness[nilable ? :maybe : :no] += 1 - else - exactness[:yes] += 1 - end - next - end - - item = val.fetch(api_name) - keys.delete(api_name) - - converted = - if item.nil? && (nilable || !required) - exactness[nilable ? :yes : :maybe] += 1 - nil - else - coerced = OpenAI::Type::Converter.coerce(target, item, state: state) - case target - in OpenAI::Type::Converter | Symbol - coerced - else - item - end - end - data.store(name, converted) - end - # rubocop:enable Metrics/BlockLength - - keys.each { data.store(_1, val.fetch(_1)) } - instance - end - - # @api private - # - # @param value [OpenAI::BaseModel, Object] - # - # @return [Hash{Object=>Object}, Object] - def dump(value) - unless (coerced = OpenAI::Util.coerce_hash(value)).is_a?(Hash) - return super - end - - acc = {} - - coerced.each do |key, val| - name = key.is_a?(String) ? key.to_sym : key - case (field = known_fields[name]) - in nil - acc.store(name, super(val)) - else - mode, api_name, type_fn = field.fetch_values(:mode, :api_name, :type_fn) - case mode - in :coerce - next - else - target = type_fn.call - acc.store(api_name, OpenAI::Type::Converter.dump(target, val)) - end - end - end - - known_fields.each_value do |field| - mode, api_name, const = field.fetch_values(:mode, :api_name, :const) - next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Util::OMIT - acc.store(api_name, const) - end - - acc - end - end - - # Returns the raw value associated with the given key, if found. Otherwise, nil is - # returned. - # - # It is valid to lookup keys that are not in the API spec, for example to access - # undocumented features. This method does not parse response data into - # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. - # - # @param key [Symbol] - # - # @return [Object, nil] - def [](key) - unless key.instance_of?(Symbol) - raise ArgumentError.new("Expected symbol key for lookup, got #{key.inspect}") - end - - @data[key] - end - - # Returns a Hash of the data underlying this object. O(1) - # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. - # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. - # - # @return [Hash{Symbol=>Object}] - def to_h = @data - - alias_method :to_hash, :to_h - - # @param keys [Array, nil] - # - # @return [Hash{Symbol=>Object}] - def deconstruct_keys(keys) - (keys || self.class.known_fields.keys) - .filter_map do |k| - unless self.class.known_fields.key?(k) - next - end - - [k, public_send(k)] - end - .to_h - end - - # @param a [Object] - # - # @return [String] - def to_json(*a) = self.class.dump(self).to_json(*a) - - # @param a [Object] - # - # @return [String] - def to_yaml(*a) = self.class.dump(self).to_yaml(*a) - - # Create a new instance of a model. - # - # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] - def initialize(data = {}) - case OpenAI::Util.coerce_hash(data) - in Hash => coerced - @data = coerced - else - raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") - end - end - - # @return [String] - def inspect - rows = self.class.known_fields.keys.map do - "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" - rescue OpenAI::ConversionError - "#{_1}=#{@data.fetch(_1)}" - end - "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" - end - end - end -end diff --git a/lib/openai/type/base_page.rb b/lib/openai/type/base_page.rb deleted file mode 100644 index a98b0624..00000000 --- a/lib/openai/type/base_page.rb +++ /dev/null @@ -1,61 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @example - # if page.has_next? - # page = page.next_page - # end - # - # @example - # page.auto_paging_each do |completion| - # puts(completion) - # end - # - # @example - # completions = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # completions => Array - module BasePage - # rubocop:disable Lint/UnusedMethodArgument - - # @return [Boolean] - def next_page? = (raise NotImplementedError) - - # @raise [OpenAI::Errors::APIError] - # @return [OpenAI::Type::BasePage] - def next_page = (raise NotImplementedError) - - # @param blk [Proc] - # - # @return [void] - def auto_paging_each(&blk) = (raise NotImplementedError) - - # @return [Enumerable] - def to_enum = super(:auto_paging_each) - - alias_method :enum_for, :to_enum - - # @api private - # - # @param client [OpenAI::Transport::BaseClient] - # @param req [Hash{Symbol=>Object}] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param page_data [Object] - def initialize(client:, req:, headers:, page_data:) - @client = client - @req = req - super() - end - - # rubocop:enable Lint/UnusedMethodArgument - end - end -end diff --git a/lib/openai/type/base_stream.rb b/lib/openai/type/base_stream.rb deleted file mode 100644 index 98f2b576..00000000 --- a/lib/openai/type/base_stream.rb +++ /dev/null @@ -1,63 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @example - # stream.each do |chunk| - # puts(chunk) - # end - # - # @example - # chunks = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # chunks => Array - module BaseStream - include Enumerable - - # @return [void] - def close = OpenAI::Util.close_fused!(@iterator) - - # @api private - # - # @return [Enumerable] - private def iterator = (raise NotImplementedError) - - # @param blk [Proc] - # - # @return [void] - def each(&blk) - unless block_given? - raise ArgumentError.new("A block must be given to ##{__method__}") - end - @iterator.each(&blk) - end - - # @return [Enumerator] - def to_enum = @iterator - - alias_method :enum_for, :to_enum - - # @api private - # - # @param model [Class, OpenAI::Type::Converter] - # @param url [URI::Generic] - # @param status [Integer] - # @param response [Net::HTTPResponse] - # @param stream [Enumerable] - def initialize(model:, url:, status:, response:, stream:) - @model = model - @url = url - @status = status - @response = response - @stream = stream - @iterator = iterator - end - end - end -end diff --git a/lib/openai/type/boolean_model.rb b/lib/openai/type/boolean_model.rb deleted file mode 100644 index 9ee84edd..00000000 --- a/lib/openai/type/boolean_model.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - # - # @abstract - # - # Ruby has no Boolean class; this is something for models to refer to. - class BooleanModel - extend OpenAI::Type::Converter - - # @param other [Object] - # - # @return [Boolean] - def self.===(other) = other == true || other == false - - # @param other [Object] - # - # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel - - class << self - # @api private - # - # @param value [Boolean, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Boolean, Object] - def coerce(value, state:) - state.fetch(:exactness)[value == true || value == false ? :yes : :no] += 1 - value - end - - # @!parse - # # @api private - # # - # # @param value [Boolean, Object] - # # - # # @return [Boolean, Object] - # def dump(value) = super - end - end - end -end diff --git a/lib/openai/type/converter.rb b/lib/openai/type/converter.rb deleted file mode 100644 index 8a4739ea..00000000 --- a/lib/openai/type/converter.rb +++ /dev/null @@ -1,217 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # rubocop:disable Metrics/ModuleLength - # @api private - module Converter - # rubocop:disable Lint/UnusedMethodArgument - - # @api private - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(value, state:) = (raise NotImplementedError) - - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - case value - in Array - value.map { OpenAI::Unknown.dump(_1) } - in Hash - value.transform_values { OpenAI::Unknown.dump(_1) } - in OpenAI::BaseModel - value.class.dump(value) - else - value - end - end - - # rubocop:enable Lint/UnusedMethodArgument - - class << self - # @api private - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - # - # @return [Proc] - def type_info(spec) - case spec - in Proc - spec - in Hash - type_info(spec.slice(:const, :enum, :union).first&.last) - in true | false - -> { OpenAI::BooleanModel } - in OpenAI::Type::Converter | Class | Symbol - -> { spec } - in NilClass | Integer | Float - -> { spec.class } - end - end - - # @api private - # - # Based on `target`, transform `value` into `target`, to the extent possible: - # - # 1. if the given `value` conforms to `target` already, return the given `value` - # 2. if it's possible and safe to convert the given `value` to `target`, then the - # converted value - # 3. otherwise, the given `value` unaltered - # - # The coercion process is subject to improvement between minor release versions. - # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode - # - # @param target [OpenAI::Type::Converter, Class] - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: - # - # - `true`: the conversion must be exact, with minimum coercion. - # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. - # - # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For - # any given conversion attempt, the exactness will be updated based on how closely - # the value recursively matches the target type: - # - # - `yes`: the value can be converted to the target type with minimum coercion. - # - `maybe`: the value can be converted to the target type with some reasonable - # coercion. - # - `no`: the value cannot be converted to the target type. - # - # See implementation below for more details. - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce( - target, - value, - state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - ) - # rubocop:disable Lint/SuppressedException - # rubocop:disable Metrics/BlockNesting - strictness, exactness = state.fetch_values(:strictness, :exactness) - - case target - in OpenAI::Type::Converter - return target.coerce(value, state: state) - in Class - if value.is_a?(target) - exactness[:yes] += 1 - return value - end - - case target - in -> { _1 <= NilClass } - exactness[value.nil? ? :yes : :maybe] += 1 - return nil - in -> { _1 <= Integer } - if value.is_a?(Integer) - exactness[:yes] += 1 - return value - elsif strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - else - Kernel.then do - return Integer(value).tap { exactness[:maybe] += 1 } - rescue ArgumentError, TypeError - end - end - in -> { _1 <= Float } - if value.is_a?(Numeric) - exactness[:yes] += 1 - return Float(value) - elsif strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - else - Kernel.then do - return Float(value).tap { exactness[:maybe] += 1 } - rescue ArgumentError, TypeError - end - end - in -> { _1 <= String } - case value - in String | Symbol | Numeric - exactness[value.is_a?(Numeric) ? :maybe : :yes] += 1 - return value.to_s - else - if strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - end - end - in -> { _1 <= Date || _1 <= Time } - Kernel.then do - return target.parse(value).tap { exactness[:yes] += 1 } - rescue ArgumentError, TypeError => e - raise e if strictness == :strong - end - in -> { _1 <= IO } if value.is_a?(String) - exactness[:yes] += 1 - return StringIO.new(value.b) - else - end - in Symbol - if (value.is_a?(Symbol) || value.is_a?(String)) && value.to_sym == target - exactness[:yes] += 1 - return target - elsif strictness == :strong - message = "cannot convert non-matching #{value.class} into #{target.inspect}" - raise ArgumentError.new(message) - end - else - end - - exactness[:no] += 1 - value - # rubocop:enable Metrics/BlockNesting - # rubocop:enable Lint/SuppressedException - end - - # @api private - # - # @param target [OpenAI::Type::Converter, Class] - # @param value [Object] - # - # @return [Object] - def dump(target, value) - target.is_a?(OpenAI::Type::Converter) ? target.dump(value) : OpenAI::Unknown.dump(value) - end - end - end - # rubocop:enable Metrics/ModuleLength - end -end diff --git a/lib/openai/type/enum.rb b/lib/openai/type/enum.rb deleted file mode 100644 index 2518da97..00000000 --- a/lib/openai/type/enum.rb +++ /dev/null @@ -1,105 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - # - # A value from among a specified list of options. OpenAPI enum values map to Ruby - # values in the SDK as follows: - # - # 1. boolean => true | false - # 2. integer => Integer - # 3. float => Float - # 4. string => Symbol - # - # We can therefore convert string values to Symbols, but can't convert other - # values safely. - # - # @example - # # `chat_model` is a `OpenAI::Models::ChatModel` - # case chat_model - # when OpenAI::Models::ChatModel::O3_MINI - # # ... - # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 - # # ... - # when OpenAI::Models::ChatModel::O1 - # # ... - # else - # puts(chat_model) - # end - # - # @example - # case chat_model - # in :"o3-mini" - # # ... - # in :"o3-mini-2025-01-31" - # # ... - # in :o1 - # # ... - # else - # puts(chat_model) - # end - module Enum - include OpenAI::Type::Converter - - # All of the valid Symbol values for this enum. - # - # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) - - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values - - # @param other [Object] - # - # @return [Boolean] - def ===(other) = values.include?(other) - - # @param other [Object] - # - # @return [Boolean] - def ==(other) - other.is_a?(Module) && other.singleton_class <= OpenAI::Enum && other.values.to_set == values.to_set - end - - # @api private - # - # Unlike with primitives, `Enum` additionally validates that the value is a member - # of the enum. - # - # @param value [String, Symbol, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Symbol, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - val = value.is_a?(String) ? value.to_sym : value - - if values.include?(val) - exactness[:yes] += 1 - val - else - exactness[values.first&.class == val.class ? :maybe : :no] += 1 - value - end - end - - # @!parse - # # @api private - # # - # # @param value [Symbol, Object] - # # - # # @return [Symbol, Object] - # def dump(value) = super - end - end -end diff --git a/lib/openai/type/hash_of.rb b/lib/openai/type/hash_of.rb deleted file mode 100644 index b6760886..00000000 --- a/lib/openai/type/hash_of.rb +++ /dev/null @@ -1,132 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - # - # @abstract - # - # Hash of items of a given type. - class HashOf - include OpenAI::Type::Converter - - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def self.[](type_info, spec = {}) = new(type_info, spec) - - # @param other [Object] - # - # @return [Boolean] - def ===(other) - type = item_type - case other - in Hash - other.all? do |key, val| - case [key, val] - in [Symbol | String, ^type] - true - else - false - end - end - else - false - end - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) - other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type - end - - # @api private - # - # @param value [Hash{Object=>Object}, Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Hash{Symbol=>Object}, Object] - def coerce(value, state:) - exactness = state.fetch(:exactness) - - unless value.is_a?(Hash) - exactness[:no] += 1 - return value - end - - target = item_type - exactness[:yes] += 1 - value - .to_h do |key, val| - k = key.is_a?(String) ? key.to_sym : key - v = - case [nilable?, val] - in [true, nil] - exactness[:yes] += 1 - nil - else - OpenAI::Type::Converter.coerce(target, val, state: state) - end - - exactness[:no] += 1 unless k.is_a?(Symbol) - [k, v] - end - end - - # @api private - # - # @param value [Hash{Object=>Object}, Object] - # - # @return [Hash{Symbol=>Object}, Object] - def dump(value) - target = item_type - value.is_a?(Hash) ? value.transform_values { OpenAI::Type::Converter.dump(target, _1) } : super - end - - # @api private - # - # @return [OpenAI::Type::Converter, Class] - protected def item_type = @item_type_fn.call - - # @api private - # - # @return [Boolean] - protected def nilable? = @nilable - - # @api private - # - # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - def initialize(type_info, spec = {}) - @item_type_fn = OpenAI::Type::Converter.type_info(type_info || spec) - @nilable = spec[:nil?] - end - end - end -end diff --git a/lib/openai/type/request_parameters.rb b/lib/openai/type/request_parameters.rb deleted file mode 100644 index 3d4e267e..00000000 --- a/lib/openai/type/request_parameters.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - module RequestParameters - # @!parse - # # Options to specify HTTP behaviour for this request. - # # @return [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # attr_accessor :request_options - - # @param mod [Module] - def self.included(mod) - return unless mod <= OpenAI::BaseModel - - mod.extend(OpenAI::Type::RequestParameters::Converter) - mod.optional(:request_options, OpenAI::RequestOptions) - end - - # @api private - module Converter - # @api private - # - # @param params [Object] - # - # @return [Array(Object, Hash{Symbol=>Object})] - def dump_request(params) - case (dumped = dump(params)) - in Hash - [dumped.except(:request_options), dumped[:request_options]] - else - [dumped, nil] - end - end - end - end - end -end diff --git a/lib/openai/type/union.rb b/lib/openai/type/union.rb deleted file mode 100644 index da0bde31..00000000 --- a/lib/openai/type/union.rb +++ /dev/null @@ -1,210 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - # - # @example - # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` - # case chat_completion_content_part - # when OpenAI::Models::Chat::ChatCompletionContentPartText - # puts(chat_completion_content_part.text) - # when OpenAI::Models::Chat::ChatCompletionContentPartImage - # puts(chat_completion_content_part.image_url) - # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio - # puts(chat_completion_content_part.input_audio) - # else - # puts(chat_completion_content_part) - # end - # - # @example - # case chat_completion_content_part - # in {type: :text, text: text} - # puts(text) - # in {type: :image_url, image_url: image_url} - # puts(image_url) - # in {type: :input_audio, input_audio: input_audio} - # puts(input_audio) - # else - # puts(chat_completion_content_part) - # end - module Union - include OpenAI::Type::Converter - - # @api private - # - # All of the specified variant info for this union. - # - # @return [Array] - private def known_variants = (@known_variants ||= []) - - # @api private - # - # @return [Array] - protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } - end - - # All of the specified variants for this union. - # - # @return [Array] - def variants = derefed_variants.map(&:last) - - # @api private - # - # @param property [Symbol] - private def discriminator(property) - case property - in Symbol - @discriminator = property - end - end - - # @api private - # - # @param key [Symbol, Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] - # - # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Type::Converter, Class] . - # - # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const - # - # @option spec [Proc] :enum - # - # @option spec [Proc] :union - # - # @option spec [Boolean] :"nil?" - private def variant(key, spec = nil) - variant_info = - case key - in Symbol - [key, OpenAI::Type::Converter.type_info(spec)] - in Proc | OpenAI::Type::Converter | Class | Hash - [nil, OpenAI::Type::Converter.type_info(key)] - end - - known_variants << variant_info - end - - # @api private - # - # @param value [Object] - # - # @return [OpenAI::Type::Converter, Class, nil] - private def resolve_variant(value) - case [@discriminator, value] - in [_, OpenAI::BaseModel] - value.class - in [Symbol, Hash] - key = value.fetch(@discriminator) do - value.fetch(@discriminator.to_s, OpenAI::Util::OMIT) - end - - return nil if key == OpenAI::Util::OMIT - - key = key.to_sym if key.is_a?(String) - known_variants.find { |k,| k == key }&.last&.call - else - nil - end - end - - # rubocop:disable Style/HashEachMethods - # rubocop:disable Style/CaseEquality - - # @param other [Object] - # - # @return [Boolean] - def ===(other) - known_variants.any? do |_, variant_fn| - variant_fn.call === other - end - end - - # @param other [Object] - # - # @return [Boolean] - def ==(other) - # rubocop:disable Layout/LineLength - other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants - # rubocop:enable Layout/LineLength - end - - # @api private - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(value, state:) - if (target = resolve_variant(value)) - return OpenAI::Type::Converter.coerce(target, value, state: state) - end - - strictness = state.fetch(:strictness) - exactness = state.fetch(:exactness) - state[:strictness] = strictness == :strong ? true : strictness - - alternatives = [] - known_variants.each do |_, variant_fn| - target = variant_fn.call - exact = state[:exactness] = {yes: 0, no: 0, maybe: 0} - state[:branched] += 1 - - coerced = OpenAI::Type::Converter.coerce(target, value, state: state) - yes, no, maybe = exact.values - if (no + maybe).zero? || (!strictness && yes.positive?) - exact.each { exactness[_1] += _2 } - state[:exactness] = exactness - return coerced - elsif maybe.positive? - alternatives << [[-yes, -maybe, no], exact, coerced] - end - end - - case alternatives.sort_by(&:first) - in [] - exactness[:no] += 1 - if strictness == :strong - message = "no possible conversion of #{value.class} into a variant of #{target.inspect}" - raise ArgumentError.new(message) - end - value - in [[_, exact, coerced], *] - exact.each { exactness[_1] += _2 } - coerced - end - .tap { state[:exactness] = exactness } - ensure - state[:strictness] = strictness - end - - # @api private - # - # @param value [Object] - # - # @return [Object] - def dump(value) - if (target = resolve_variant(value)) - return OpenAI::Type::Converter.dump(target, value) - end - - known_variants.each do - target = _2.call - return OpenAI::Type::Converter.dump(target, value) if target === value - end - - super - end - - # rubocop:enable Style/CaseEquality - # rubocop:enable Style/HashEachMethods - end - end -end diff --git a/lib/openai/type/unknown.rb b/lib/openai/type/unknown.rb deleted file mode 100644 index 7cb2567d..00000000 --- a/lib/openai/type/unknown.rb +++ /dev/null @@ -1,56 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Type - # @api private - # - # @abstract - # - # When we don't know what to expect for the value. - class Unknown - extend OpenAI::Type::Converter - - # rubocop:disable Lint/UnusedMethodArgument - - # @param other [Object] - # - # @return [Boolean] - def self.===(other) = true - - # @param other [Object] - # - # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown - - class << self - # @api private - # - # @param value [Object] - # - # @param state [Hash{Symbol=>Object}] . - # - # @option state [Boolean, :strong] :strictness - # - # @option state [Hash{Symbol=>Object}] :exactness - # - # @option state [Integer] :branched - # - # @return [Object] - def coerce(value, state:) - state.fetch(:exactness)[:yes] += 1 - value - end - - # @!parse - # # @api private - # # - # # @param value [Object] - # # - # # @return [Object] - # def dump(value) = super - end - - # rubocop:enable Lint/UnusedMethodArgument - end - end -end diff --git a/lib/openai/util.rb b/lib/openai/util.rb deleted file mode 100644 index 379dddc4..00000000 --- a/lib/openai/util.rb +++ /dev/null @@ -1,722 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - # rubocop:disable Metrics/ModuleLength - - # @api private - module Util - # @api private - # - # @return [Float] - def self.monotonic_secs = Process.clock_gettime(Process::CLOCK_MONOTONIC) - - class << self - # @api private - # - # @return [String] - def arch - case (arch = RbConfig::CONFIG["arch"])&.downcase - in nil - "unknown" - in /aarch64|arm64/ - "arm64" - in /x86_64/ - "x64" - in /arm/ - "arm" - else - "other:#{arch}" - end - end - - # @api private - # - # @return [String] - def os - case (host = RbConfig::CONFIG["host_os"])&.downcase - in nil - "Unknown" - in /linux/ - "Linux" - in /darwin/ - "MacOS" - in /freebsd/ - "FreeBSD" - in /openbsd/ - "OpenBSD" - in /mswin|mingw|cygwin|ucrt/ - "Windows" - else - "Other:#{host}" - end - end - end - - class << self - # @api private - # - # @param input [Object] - # - # @return [Boolean] - def primitive?(input) - case input - in true | false | Integer | Float | Symbol | String - true - else - false - end - end - - # @api private - # - # @param input [Object] - # - # @return [Boolean, Object] - def coerce_boolean(input) - case input.is_a?(String) ? input.downcase : input - in Numeric - input.nonzero? - in "true" - true - in "false" - false - else - input - end - end - - # @api private - # - # @param input [Object] - # - # @raise [ArgumentError] - # @return [Boolean, nil] - def coerce_boolean!(input) - case coerce_boolean(input) - in true | false | nil => coerced - coerced - else - raise ArgumentError.new("Unable to coerce #{input.inspect} into boolean value") - end - end - - # @api private - # - # @param input [Object] - # - # @return [Integer, Object] - def coerce_integer(input) - case input - in true - 1 - in false - 0 - else - Integer(input, exception: false) || input - end - end - - # @api private - # - # @param input [Object] - # - # @return [Float, Object] - def coerce_float(input) - case input - in true - 1.0 - in false - 0.0 - else - Float(input, exception: false) || input - end - end - - # @api private - # - # @param input [Object] - # - # @return [Hash{Object=>Object}, Object] - def coerce_hash(input) - case input - in NilClass | Array | Set | Enumerator - input - else - input.respond_to?(:to_h) ? input.to_h : input - end - end - end - - # Use this to indicate that a value should be explicitly removed from a data - # structure when using `OpenAI::Util.deep_merge`. - # - # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging - # `{a: 1}` and `{}` would produce `{a: 1}`. - OMIT = Object.new.freeze - - class << self - # @api private - # - # @param lhs [Object] - # @param rhs [Object] - # @param concat [Boolean] - # - # @return [Object] - private def deep_merge_lr(lhs, rhs, concat: false) - case [lhs, rhs, concat] - in [Hash, Hash, _] - rhs_cleaned = rhs.reject { _2 == OpenAI::Util::OMIT } - lhs - .reject { |key, _| rhs[key] == OpenAI::Util::OMIT } - .merge(rhs_cleaned) do |_, old_val, new_val| - deep_merge_lr(old_val, new_val, concat: concat) - end - in [Array, Array, true] - lhs.concat(rhs) - else - rhs - end - end - - # @api private - # - # Recursively merge one hash with another. If the values at a given key are not - # both hashes, just take the new value. - # - # @param values [Array] - # - # @param sentinel [Object, nil] the value to return if no values are provided. - # - # @param concat [Boolean] whether to merge sequences by concatenation. - # - # @return [Object] - def deep_merge(*values, sentinel: nil, concat: false) - case values - in [value, *values] - values.reduce(value) do |acc, val| - deep_merge_lr(acc, val, concat: concat) - end - else - sentinel - end - end - - # @api private - # - # @param data [Hash{Symbol=>Object}, Array, Object] - # @param pick [Symbol, Integer, Array, nil] - # @param sentinel [Object, nil] - # @param blk [Proc, nil] - # - # @return [Object, nil] - def dig(data, pick, sentinel = nil, &blk) - case [data, pick, blk] - in [_, nil, nil] - data - in [Hash, Symbol, _] | [Array, Integer, _] - blk.nil? ? data.fetch(pick, sentinel) : data.fetch(pick, &blk) - in [Hash | Array, Array, _] - pick.reduce(data) do |acc, key| - case acc - in Hash if acc.key?(key) - acc.fetch(key) - in Array if key.is_a?(Integer) && key < acc.length - acc[key] - else - return blk.nil? ? sentinel : blk.call - end - end - in _ - blk.nil? ? sentinel : blk.call - end - end - end - - class << self - # @api private - # - # @param uri [URI::Generic] - # - # @return [String] - def uri_origin(uri) - "#{uri.scheme}://#{uri.host}#{uri.port == uri.default_port ? '' : ":#{uri.port}"}" - end - - # @api private - # - # @param path [String, Array] - # - # @return [String] - def interpolate_path(path) - case path - in String - path - in [] - "" - in [String => p, *interpolations] - encoded = interpolations.map { ERB::Util.url_encode(_1) } - format(p, *encoded) - end - end - end - - class << self - # @api private - # - # @param query [String, nil] - # - # @return [Hash{String=>Array}] - def decode_query(query) - CGI.parse(query.to_s) - end - - # @api private - # - # @param query [Hash{String=>Array, String, nil}, nil] - # - # @return [String, nil] - def encode_query(query) - query.to_h.empty? ? nil : URI.encode_www_form(query) - end - end - - class << self - # @api private - # - # @param url [URI::Generic, String] - # - # @return [Hash{Symbol=>String, Integer, nil}] - def parse_uri(url) - parsed = URI::Generic.component.zip(URI.split(url)).to_h - {**parsed, query: decode_query(parsed.fetch(:query))} - end - - # @api private - # - # @param parsed [Hash{Symbol=>String, Integer, nil}] . - # - # @option parsed [String, nil] :scheme - # - # @option parsed [String, nil] :host - # - # @option parsed [Integer, nil] :port - # - # @option parsed [String, nil] :path - # - # @option parsed [Hash{String=>Array}] :query - # - # @return [URI::Generic] - def unparse_uri(parsed) - URI::Generic.build(**parsed, query: encode_query(parsed.fetch(:query))) - end - - # @api private - # - # @param lhs [Hash{Symbol=>String, Integer, nil}] . - # - # @option lhs [String, nil] :scheme - # - # @option lhs [String, nil] :host - # - # @option lhs [Integer, nil] :port - # - # @option lhs [String, nil] :path - # - # @option lhs [Hash{String=>Array}] :query - # - # @param rhs [Hash{Symbol=>String, Integer, nil}] . - # - # @option rhs [String, nil] :scheme - # - # @option rhs [String, nil] :host - # - # @option rhs [Integer, nil] :port - # - # @option rhs [String, nil] :path - # - # @option rhs [Hash{String=>Array}] :query - # - # @return [URI::Generic] - def join_parsed_uri(lhs, rhs) - base_path, base_query = lhs.fetch_values(:path, :query) - slashed = base_path.end_with?("/") ? base_path : "#{base_path}/" - - parsed_path, parsed_query = parse_uri(rhs.fetch(:path)).fetch_values(:path, :query) - override = URI::Generic.build(**rhs.slice(:scheme, :host, :port), path: parsed_path) - - joined = URI.join(URI::Generic.build(lhs.except(:path, :query)), slashed, override) - query = deep_merge( - joined.path == base_path ? base_query : {}, - parsed_query, - rhs[:query].to_h, - concat: true - ) - - joined.query = encode_query(query) - joined - end - end - - class << self - # @api private - # - # @param headers [Hash{String=>String, Integer, Array, nil}] - # - # @return [Hash{String=>String}] - def normalized_headers(*headers) - {}.merge(*headers.compact).to_h do |key, val| - value = - case val - in Array - val.map { _1.to_s.strip }.join(", ") - else - val&.to_s&.strip - end - [key.downcase, value] - end - end - end - - # @api private - # - # An adapter that satisfies the IO interface required by `::IO.copy_stream` - class ReadIOAdapter - # @api private - # - # @param max_len [Integer, nil] - # - # @return [String] - private def read_enum(max_len) - case max_len - in nil - @stream.to_a.join - in Integer - @buf << @stream.next while @buf.length < max_len - @buf.slice!(..max_len) - end - rescue StopIteration - @stream = nil - @buf.slice!(0..) - end - - # @api private - # - # @param max_len [Integer, nil] - # @param out_string [String, nil] - # - # @return [String, nil] - def read(max_len = nil, out_string = nil) - case @stream - in nil - nil - in IO | StringIO - @stream.read(max_len, out_string) - in Enumerator - read = read_enum(max_len) - case out_string - in String - out_string.replace(read) - in nil - read - end - end - .tap(&@blk) - end - - # @api private - # - # @param stream [String, IO, StringIO, Enumerable] - # @param blk [Proc] - # - # @yieldparam [String] - def initialize(stream, &blk) - @stream = stream.is_a?(String) ? StringIO.new(stream) : stream - @buf = String.new.b - @blk = blk - end - end - - class << self - # @param blk [Proc] - # - # @yieldparam [Enumerator::Yielder] - # @return [Enumerable] - def writable_enum(&blk) - Enumerator.new do |y| - y.define_singleton_method(:write) do - self << _1.clone - _1.bytesize - end - - blk.call(y) - end - end - end - - class << self - # @api private - # - # @param y [Enumerator::Yielder] - # @param boundary [String] - # @param key [Symbol, String] - # @param val [Object] - private def write_multipart_chunk(y, boundary:, key:, val:) - y << "--#{boundary}\r\n" - y << "Content-Disposition: form-data" - unless key.nil? - name = ERB::Util.url_encode(key.to_s) - y << "; name=\"#{name}\"" - end - if val.is_a?(IO) - filename = ERB::Util.url_encode(File.basename(val.to_path)) - y << "; filename=\"#{filename}\"" - end - y << "\r\n" - case val - in IO - y << "Content-Type: application/octet-stream\r\n\r\n" - IO.copy_stream(val, y) - in StringIO - y << "Content-Type: application/octet-stream\r\n\r\n" - y << val.string - in String - y << "Content-Type: application/octet-stream\r\n\r\n" - y << val.to_s - in true | false | Integer | Float | Symbol - y << "Content-Type: text/plain\r\n\r\n" - y << val.to_s - else - y << "Content-Type: application/json\r\n\r\n" - y << JSON.fast_generate(val) - end - y << "\r\n" - end - - # @api private - # - # @param body [Object] - # - # @return [Array(String, Enumerable)] - private def encode_multipart_streaming(body) - boundary = SecureRandom.urlsafe_base64(60) - - strio = writable_enum do |y| - case body - in Hash - body.each do |key, val| - case val - in Array if val.all? { primitive?(_1) } - val.each do |v| - write_multipart_chunk(y, boundary: boundary, key: key, val: v) - end - else - write_multipart_chunk(y, boundary: boundary, key: key, val: val) - end - end - else - write_multipart_chunk(y, boundary: boundary, key: nil, val: body) - end - y << "--#{boundary}--\r\n" - end - - [boundary, strio] - end - - # @api private - # - # @param headers [Hash{String=>String}] - # @param body [Object] - # - # @return [Object] - def encode_content(headers, body) - content_type = headers["content-type"] - case [content_type, body] - in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array] - [headers, JSON.fast_generate(body)] - in [%r{^application/(?:x-)?jsonl}, Enumerable] - [headers, body.lazy.map { JSON.fast_generate(_1) }] - in [%r{^multipart/form-data}, Hash | IO | StringIO] - boundary, strio = encode_multipart_streaming(body) - headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} - [headers, strio] - in [_, StringIO] - [headers, body.string] - else - [headers, body] - end - end - - # @api private - # - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param stream [Enumerable] - # @param suppress_error [Boolean] - # - # @raise [JSON::ParserError] - # @return [Object] - def decode_content(headers, stream:, suppress_error: false) - case headers["content-type"] - in %r{^application/(?:vnd\.api\+)?json} - json = stream.to_a.join - begin - JSON.parse(json, symbolize_names: true) - rescue JSON::ParserError => e - raise e unless suppress_error - json - end - in %r{^application/(?:x-)?jsonl} - lines = decode_lines(stream) - chain_fused(lines) do |y| - lines.each { y << JSON.parse(_1, symbolize_names: true) } - end - in %r{^text/event-stream} - lines = decode_lines(stream) - decode_sse(lines) - in %r{^text/} - stream.to_a.join - else - # TODO: parsing other response types - StringIO.new(stream.to_a.join) - end - end - end - - class << self - # @api private - # - # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html - # - # @param enum [Enumerable] - # @param external [Boolean] - # @param close [Proc] - # - # @return [Enumerable] - def fused_enum(enum, external: false, &close) - fused = false - iter = Enumerator.new do |y| - next if fused - - fused = true - if external - loop { y << enum.next } - else - enum.each(&y) - end - ensure - close&.call - close = nil - end - - iter.define_singleton_method(:rewind) do - fused = true - self - end - iter - end - - # @api private - # - # @param enum [Enumerable, nil] - def close_fused!(enum) - return unless enum.is_a?(Enumerator) - - # rubocop:disable Lint/UnreachableLoop - enum.rewind.each { break } - # rubocop:enable Lint/UnreachableLoop - end - - # @api private - # - # @param enum [Enumerable, nil] - # @param blk [Proc] - # - # @yieldparam [Enumerator::Yielder] - # @return [Enumerable] - def chain_fused(enum, &blk) - iter = Enumerator.new { blk.call(_1) } - fused_enum(iter) { close_fused!(enum) } - end - end - - class << self - # @api private - # - # @param enum [Enumerable] - # - # @return [Enumerable] - def decode_lines(enum) - re = /(\r\n|\r|\n)/ - buffer = String.new.b - cr_seen = nil - - chain_fused(enum) do |y| - enum.each do |row| - offset = buffer.bytesize - buffer << row - while (match = re.match(buffer, cr_seen&.to_i || offset)) - case [match.captures.first, cr_seen] - in ["\r", nil] - cr_seen = match.end(1) - next - in ["\r" | "\r\n", Integer] - y << buffer.slice!(..(cr_seen.pred)) - else - y << buffer.slice!(..(match.end(1).pred)) - end - offset = 0 - cr_seen = nil - end - end - - y << buffer.slice!(..(cr_seen.pred)) unless cr_seen.nil? - y << buffer unless buffer.empty? - end - end - - # @api private - # - # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream - # - # @param lines [Enumerable] - # - # @return [Hash{Symbol=>Object}] - def decode_sse(lines) - # rubocop:disable Metrics/BlockLength - chain_fused(lines) do |y| - blank = {event: nil, data: nil, id: nil, retry: nil} - current = {} - - lines.each do |line| - case line.sub(/\R$/, "") - in "" - next if current.empty? - y << {**blank, **current} - current = {} - in /^:/ - next - in /^([^:]+):\s?(.*)$/ - field, value = Regexp.last_match.captures - case field - in "event" - current.merge!(event: value) - in "data" - (current[:data] ||= String.new.b) << (value << "\n") - in "id" unless value.include?("\0") - current.merge!(id: value) - in "retry" if /^\d+$/ =~ value - current.merge!(retry: Integer(value)) - else - end - else - end - end - # rubocop:enable Metrics/BlockLength - - y << {**blank, **current} unless current.empty? - end - end - end - end - - # rubocop:enable Metrics/ModuleLength -end diff --git a/rbi/lib/openai/aliases.rbi b/rbi/lib/openai/aliases.rbi new file mode 100644 index 00000000..5641bd39 --- /dev/null +++ b/rbi/lib/openai/aliases.rbi @@ -0,0 +1,19 @@ +# typed: strong + +module OpenAI + Unknown = OpenAI::Internal::Type::Unknown + + BooleanModel = OpenAI::Internal::Type::BooleanModel + + Enum = OpenAI::Internal::Type::Enum + + Union = OpenAI::Internal::Type::Union + + ArrayOf = OpenAI::Internal::Type::ArrayOf + + HashOf = OpenAI::Internal::Type::HashOf + + BaseModel = OpenAI::Internal::Type::BaseModel + + RequestParameters = OpenAI::Internal::Type::RequestParameters +end diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 41d82819..518920af 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -1,7 +1,7 @@ # typed: strong module OpenAI - class Client < OpenAI::Transport::BaseClient + class Client < OpenAI::Internal::Transport::BaseClient DEFAULT_MAX_RETRIES = 2 DEFAULT_TIMEOUT_IN_SECONDS = T.let(600.0, Float) diff --git a/rbi/lib/openai/cursor_page.rbi b/rbi/lib/openai/cursor_page.rbi deleted file mode 100644 index e167d210..00000000 --- a/rbi/lib/openai/cursor_page.rbi +++ /dev/null @@ -1,19 +0,0 @@ -# typed: strong - -module OpenAI - class CursorPage - include OpenAI::Type::BasePage - - Elem = type_member - - sig { returns(T.nilable(T::Array[Elem])) } - attr_accessor :data - - sig { returns(T::Boolean) } - attr_accessor :has_more - - sig { returns(String) } - def inspect - end - end -end diff --git a/rbi/lib/openai/internal/cursor_page.rbi b/rbi/lib/openai/internal/cursor_page.rbi new file mode 100644 index 00000000..7d728a09 --- /dev/null +++ b/rbi/lib/openai/internal/cursor_page.rbi @@ -0,0 +1,21 @@ +# typed: strong + +module OpenAI + module Internal + class CursorPage + include OpenAI::Internal::Type::BasePage + + Elem = type_member + + sig { returns(T.nilable(T::Array[Elem])) } + attr_accessor :data + + sig { returns(T::Boolean) } + attr_accessor :has_more + + sig { returns(String) } + def inspect + end + end + end +end diff --git a/rbi/lib/openai/internal/page.rbi b/rbi/lib/openai/internal/page.rbi new file mode 100644 index 00000000..bb47228b --- /dev/null +++ b/rbi/lib/openai/internal/page.rbi @@ -0,0 +1,21 @@ +# typed: strong + +module OpenAI + module Internal + class Page + include OpenAI::Internal::Type::BasePage + + Elem = type_member + + sig { returns(T.nilable(T::Array[Elem])) } + attr_accessor :data + + sig { returns(String) } + attr_accessor :object + + sig { returns(String) } + def inspect + end + end + end +end diff --git a/rbi/lib/openai/internal/stream.rbi b/rbi/lib/openai/internal/stream.rbi new file mode 100644 index 00000000..14bab753 --- /dev/null +++ b/rbi/lib/openai/internal/stream.rbi @@ -0,0 +1,17 @@ +# typed: strong + +module OpenAI + module Internal + class Stream + include OpenAI::Internal::Type::BaseStream + + Message = type_member(:in) { {fixed: OpenAI::Internal::Util::ServerSentEvent} } + Elem = type_member(:out) + + # @api private + sig { override.returns(T::Enumerable[Elem]) } + private def iterator + end + end + end +end diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi new file mode 100644 index 00000000..abc94734 --- /dev/null +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -0,0 +1,210 @@ +# typed: strong + +module OpenAI + module Internal + module Transport + # @api private + class BaseClient + abstract! + + RequestComponentsShape = + T.type_alias do + { + method: Symbol, + path: T.any(String, T::Array[String]), + query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), + headers: T.nilable( + T::Hash[String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + )] + ), + body: T.nilable(T.anything), + unwrap: T.nilable(Symbol), + page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::Internal::Type::BaseStream[T.anything, OpenAI::BaseModel]]), + model: T.nilable(OpenAI::Internal::Type::Converter::Input), + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + } + end + + RequestInputShape = + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + max_retries: Integer, + timeout: Float + } + end + + # from whatwg fetch spec + MAX_REDIRECTS = 20 + + PLATFORM_HEADERS = T::Hash[String, String] + + class << self + # @api private + sig { params(req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape).void } + def validate!(req) + end + + # @api private + sig do + params( + status: Integer, + headers: T.any( + T::Hash[String, String], + Net::HTTPHeader + ) + ).returns(T::Boolean) + end + def should_retry?(status, headers:) + end + + # @api private + sig do + params( + request: OpenAI::Internal::Transport::BaseClient::RequestInputShape, + status: Integer, + response_headers: T.any(T::Hash[String, String], Net::HTTPHeader) + ) + .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) + end + def follow_redirect(request, status:, response_headers:) + end + + # @api private + sig do + params( + status: T.any(Integer, OpenAI::Errors::APIConnectionError), + stream: T.nilable(T::Enumerable[String]) + ) + .void + end + def reap_connection!(status, stream:) + end + end + + # @api private + sig { returns(OpenAI::Internal::Transport::PooledNetRequester) } + attr_accessor :requester + + # @api private + sig do + params( + base_url: String, + timeout: Float, + max_retries: Integer, + initial_retry_delay: Float, + max_retry_delay: Float, + headers: T::Hash[String, + T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))], + idempotency_header: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new( + base_url:, + timeout: 0.0, + max_retries: 0, + initial_retry_delay: 0.0, + max_retry_delay: 0.0, + headers: {}, + idempotency_header: nil + ) + end + + # @api private + sig { overridable.returns(T::Hash[String, String]) } + private def auth_headers + end + + # @api private + sig { returns(String) } + private def generate_idempotency_key + end + + # @api private + sig do + overridable + .params( + req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, + opts: OpenAI::Internal::Util::AnyHash + ) + .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) + end + private def build_request(req, opts) + end + + # @api private + sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } + private def retry_delay(headers, retry_count:) + end + + # @api private + sig do + params( + request: OpenAI::Internal::Transport::BaseClient::RequestInputShape, + redirect_count: Integer, + retry_count: Integer, + send_retry_header: T::Boolean + ) + .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + end + private def send_request(request, redirect_count:, retry_count:, send_retry_header:) + end + + # Execute the request specified by `req`. This is the method that all resource + # methods call into. + sig do + params( + method: Symbol, + path: T.any(String, T::Array[String]), + query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), + headers: T.nilable( + T::Hash[String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + )] + ), + body: T.nilable(T.anything), + unwrap: T.nilable(Symbol), + page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::BaseModel]]), + stream: T.nilable(T::Class[OpenAI::Internal::Type::BaseStream[T.anything, OpenAI::BaseModel]]), + model: T.nilable(OpenAI::Internal::Type::Converter::Input), + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + ) + .returns(T.anything) + end + def request( + method, + path, + query: {}, + headers: {}, + body: nil, + unwrap: nil, + page: nil, + stream: nil, + model: OpenAI::Unknown, + options: {} + ) + end + + sig { returns(String) } + def inspect + end + end + end + end +end diff --git a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi new file mode 100644 index 00000000..1db6bc50 --- /dev/null +++ b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi @@ -0,0 +1,66 @@ +# typed: strong + +module OpenAI + module Internal + module Transport + # @api private + class PooledNetRequester + RequestShape = + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + deadline: Float + } + end + + # from the golang stdlib + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + KEEP_ALIVE_TIMEOUT = 30 + + class << self + # @api private + sig { params(url: URI::Generic).returns(Net::HTTP) } + def connect(url) + end + + # @api private + sig { params(conn: Net::HTTP, deadline: Float).void } + def calibrate_socket_timeout(conn, deadline) + end + + # @api private + sig do + params( + request: OpenAI::Internal::Transport::PooledNetRequester::RequestShape, + blk: T.proc.params(arg0: String).void + ) + .returns(Net::HTTPGenericRequest) + end + def build_request(request, &blk) + end + end + + # @api private + sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } + private def with_pool(url, deadline:, &blk) + end + + # @api private + sig do + params(request: OpenAI::Internal::Transport::PooledNetRequester::RequestShape) + .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + end + def execute(request) + end + + # @api private + sig { params(size: Integer).returns(T.attached_class) } + def self.new(size: Etc.nprocessors) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi new file mode 100644 index 00000000..54f032b7 --- /dev/null +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -0,0 +1,88 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + # + # Array of items of a given type. + class ArrayOf + include OpenAI::Internal::Type::Converter + + abstract! + final! + + Elem = type_member(:out) + + sig(:final) do + params( + type_info: T.any( + OpenAI::Internal::Util::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .returns(T.attached_class) + end + def self.[](type_info, spec = {}) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + sig(:final) do + override + .params(value: T.any( + T::Enumerable[Elem], + T.anything + ), + state: OpenAI::Internal::Type::Converter::State) + .returns(T.any(T::Array[T.anything], T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig(:final) do + override + .params(value: T.any(T::Enumerable[Elem], T.anything)) + .returns(T.any(T::Array[T.anything], T.anything)) + end + def dump(value) + end + + # @api private + sig(:final) { returns(Elem) } + protected def item_type + end + + # @api private + sig(:final) { returns(T::Boolean) } + protected def nilable? + end + + # @api private + sig(:final) do + params( + type_info: T.any( + OpenAI::Internal::Util::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .void + end + def initialize(type_info, spec = {}) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi new file mode 100644 index 00000000..6a8646a9 --- /dev/null +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -0,0 +1,206 @@ +# typed: strong + +module OpenAI + module Internal + module Type + class BaseModel + extend OpenAI::Internal::Type::Converter + + abstract! + + KnownFieldShape = T.type_alias do + {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} + end + + class << self + # @api private + # + # Assumes superclass fields are totally defined before fields are accessed / + # defined on subclasses. + sig do + returns( + T::Hash[ + Symbol, + T.all( + OpenAI::BaseModel::KnownFieldShape, + {type_fn: T.proc.returns(OpenAI::Internal::Type::Converter::Input)} + ) + ] + ) + end + def known_fields + end + + # @api private + sig do + returns( + T::Hash[Symbol, + T.all( + OpenAI::BaseModel::KnownFieldShape, + {type: OpenAI::Internal::Type::Converter::Input} + )] + ) + end + def fields + end + + # @api private + sig do + params( + name_sym: Symbol, + required: T::Boolean, + type_info: T.any( + { + const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), + enum: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)), + union: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)), + api_name: Symbol, + nil?: T::Boolean + }, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .void + end + private def add_field(name_sym, required:, type_info:, spec:) + end + + # @api private + sig do + params( + name_sym: Symbol, + type_info: T.any( + OpenAI::Internal::Util::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .void + end + def required(name_sym, type_info, spec = {}) + end + + # @api private + sig do + params( + name_sym: Symbol, + type_info: T.any( + OpenAI::Internal::Util::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .void + end + def optional(name_sym, type_info, spec = {}) + end + + # @api private + # + # `request_only` attributes not excluded from `.#coerce` when receiving responses + # even if well behaved servers should not send them + sig { params(blk: T.proc.void).void } + private def request_only(&blk) + end + + # @api private + # + # `response_only` attributes are omitted from `.#dump` when making requests + sig { params(blk: T.proc.void).void } + private def response_only(&blk) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + class << self + # @api private + sig do + override + .params( + value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything), + state: OpenAI::Internal::Type::Converter::State + ) + .returns(T.any(T.attached_class, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig do + override + .params(value: T.any(T.attached_class, T.anything)) + .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) + end + def dump(value) + end + end + + # Returns the raw value associated with the given key, if found. Otherwise, nil is + # returned. + # + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. + sig { params(key: Symbol).returns(T.nilable(T.anything)) } + def [](key) + end + + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + sig { overridable.returns(OpenAI::Internal::Util::AnyHash) } + def to_h + end + + # Returns a Hash of the data underlying this object. O(1) + # + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. + # + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. + sig { overridable.returns(OpenAI::Internal::Util::AnyHash) } + def to_hash + end + + sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::Util::AnyHash) } + def deconstruct_keys(keys) + end + + sig { params(a: T.anything).returns(String) } + def to_json(*a) + end + + sig { params(a: T.anything).returns(String) } + def to_yaml(*a) + end + + # Create a new instance of a model. + sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } + def self.new(data = {}) + end + + sig { returns(String) } + def inspect + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/base_page.rbi b/rbi/lib/openai/internal/type/base_page.rbi new file mode 100644 index 00000000..1125a5c8 --- /dev/null +++ b/rbi/lib/openai/internal/type/base_page.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Internal + module Type + module BasePage + Elem = type_member(:out) + + sig { overridable.returns(T::Boolean) } + def next_page? + end + + sig { overridable.returns(T.self_type) } + def next_page + end + + sig { overridable.params(blk: T.proc.params(arg0: Elem).void).void } + def auto_paging_each(&blk) + end + + sig { returns(T::Enumerable[Elem]) } + def to_enum + end + + # @api private + sig do + params( + client: OpenAI::Internal::Transport::BaseClient, + req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, + headers: T.any(T::Hash[String, String], Net::HTTPHeader), + page_data: T.anything + ) + .void + end + def initialize(client:, req:, headers:, page_data:) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/lib/openai/internal/type/base_stream.rbi new file mode 100644 index 00000000..10e8da7e --- /dev/null +++ b/rbi/lib/openai/internal/type/base_stream.rbi @@ -0,0 +1,45 @@ +# typed: strong + +module OpenAI + module Internal + module Type + module BaseStream + include Enumerable + + Message = type_member(:in) + Elem = type_member(:out) + + sig { void } + def close + end + + # @api private + sig { overridable.returns(T::Enumerable[Elem]) } + private def iterator + end + + sig { params(blk: T.proc.params(arg0: Elem).void).void } + def each(&blk) + end + + sig { returns(T::Enumerator[Elem]) } + def to_enum + end + + # @api private + sig do + params( + model: T.any(T::Class[T.anything], OpenAI::Internal::Type::Converter), + url: URI::Generic, + status: Integer, + response: Net::HTTPResponse, + stream: T::Enumerable[Message] + ) + .void + end + def initialize(model:, url:, status:, response:, stream:) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/boolean_model.rbi b/rbi/lib/openai/internal/type/boolean_model.rbi new file mode 100644 index 00000000..ab09cc33 --- /dev/null +++ b/rbi/lib/openai/internal/type/boolean_model.rbi @@ -0,0 +1,43 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + # + # Ruby has no Boolean class; this is something for models to refer to. + class BooleanModel + extend OpenAI::Internal::Type::Converter + + abstract! + final! + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.==(other) + end + + class << self + # @api private + sig(:final) do + override + .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Internal::Type::Converter::State) + .returns(T.any(T::Boolean, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig(:final) do + override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) + end + def dump(value) + end + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/converter.rbi b/rbi/lib/openai/internal/type/converter.rbi new file mode 100644 index 00000000..e529f225 --- /dev/null +++ b/rbi/lib/openai/internal/type/converter.rbi @@ -0,0 +1,108 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + module Converter + Input = T.type_alias { T.any(OpenAI::Internal::Type::Converter, T::Class[T.anything]) } + + State = + T.type_alias do + { + strictness: T.any(T::Boolean, Symbol), + exactness: {yes: Integer, no: Integer, maybe: Integer}, + branched: Integer + } + end + + # @api private + sig do + overridable.params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::State + ).returns(T.anything) + end + def coerce(value, state:) + end + + # @api private + sig { overridable.params(value: T.anything).returns(T.anything) } + def dump(value) + end + + class << self + # @api private + sig do + params( + spec: T.any( + { + const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), + enum: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)), + union: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)) + }, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ) + ) + .returns(T.proc.returns(T.anything)) + end + def self.type_info(spec) + end + + # @api private + # + # Based on `target`, transform `value` into `target`, to the extent possible: + # + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered + # + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + sig do + params( + target: OpenAI::Internal::Type::Converter::Input, + value: T.anything, + state: OpenAI::Internal::Type::Converter::State + ) + .returns(T.anything) + end + def self.coerce( + target, + value, + # The `strictness` is one of `true`, `false`, or `:strong`. This informs the + # coercion strategy when we have to decide between multiple possible conversion + # targets: + # + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. + # + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: + # + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. + # + # See implementation below for more details. + state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + ) + end + + # @api private + sig do + params(target: OpenAI::Internal::Type::Converter::Input, value: T.anything).returns(T.anything) + end + def self.dump(target, value) + end + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi new file mode 100644 index 00000000..d64ea741 --- /dev/null +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -0,0 +1,60 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + # + # A value from among a specified list of options. OpenAPI enum values map to Ruby + # values in the SDK as follows: + # + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol + # + # We can therefore convert string values to Symbols, but can't convert other + # values safely. + module Enum + include OpenAI::Internal::Type::Converter + + # All of the valid Symbol values for this enum. + sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } + def values + end + + # @api private + # + # Guard against thread safety issues by instantiating `@values`. + sig { void } + private def finalize! + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + # + # Unlike with primitives, `Enum` additionally validates that the value is a member + # of the enum. + sig do + override + .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Internal::Type::Converter::State) + .returns(T.any(Symbol, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } + def dump(value) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi new file mode 100644 index 00000000..207ff5c9 --- /dev/null +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -0,0 +1,87 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + # + # Hash of items of a given type. + class HashOf + include OpenAI::Internal::Type::Converter + + abstract! + final! + + Elem = type_member(:out) + + sig(:final) do + params( + type_info: T.any( + OpenAI::Internal::Util::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .returns(T.attached_class) + end + def self.[](type_info, spec = {}) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + sig(:final) do + override + .params( + value: T.any(T::Hash[T.anything, T.anything], T.anything), + state: OpenAI::Internal::Type::Converter::State + ) + .returns(T.any(OpenAI::Internal::Util::AnyHash, T.anything)) + end + def coerce(value, state:) + end + + # @api private + sig(:final) do + override + .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) + .returns(T.any(OpenAI::Internal::Util::AnyHash, T.anything)) + end + def dump(value) + end + + # @api private + sig(:final) { returns(Elem) } + protected def item_type + end + + # @api private + sig(:final) { returns(T::Boolean) } + protected def nilable? + end + + # @api private + sig(:final) do + params( + type_info: T.any( + OpenAI::Internal::Util::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: OpenAI::Internal::Util::AnyHash + ) + .void + end + def initialize(type_info, spec = {}) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/request_parameters.rbi b/rbi/lib/openai/internal/type/request_parameters.rbi new file mode 100644 index 00000000..6170a658 --- /dev/null +++ b/rbi/lib/openai/internal/type/request_parameters.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + module RequestParameters + # Options to specify HTTP behaviour for this request. + sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) } + attr_accessor :request_options + + # @api private + module Converter + # @api private + sig { params(params: T.anything).returns([T.anything, OpenAI::Internal::Util::AnyHash]) } + def dump_request(params) + end + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi new file mode 100644 index 00000000..d96795ec --- /dev/null +++ b/rbi/lib/openai/internal/type/union.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + module Union + include OpenAI::Internal::Type::Converter + + # @api private + # + # All of the specified variant info for this union. + sig do + returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Internal::Type::Converter::Input)]]) + end + private def known_variants + end + + # @api private + sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + protected def derefed_variants + end + + # All of the specified variants for this union. + sig { overridable.returns(T::Array[T.anything]) } + def variants + end + + # @api private + sig { params(property: Symbol).void } + private def discriminator(property) + end + + # @api private + sig do + params( + key: T.any(Symbol, OpenAI::Internal::Util::AnyHash, T.proc.returns(T.anything), T.anything), + spec: T.any(OpenAI::Internal::Util::AnyHash, T.proc.returns(T.anything), T.anything) + ) + .void + end + private def variant(key, spec = nil) + end + + # @api private + sig { params(value: T.anything).returns(T.nilable(T.anything)) } + private def resolve_variant(value) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ===(other) + end + + sig { params(other: T.anything).returns(T::Boolean) } + def ==(other) + end + + # @api private + sig do + override.params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::State + ).returns(T.anything) + end + def coerce(value, state:) + end + + # @api private + sig { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/unknown.rbi b/rbi/lib/openai/internal/type/unknown.rbi new file mode 100644 index 00000000..96c59fe6 --- /dev/null +++ b/rbi/lib/openai/internal/type/unknown.rbi @@ -0,0 +1,42 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + # + # When we don't know what to expect for the value. + class Unknown + extend OpenAI::Internal::Type::Converter + + abstract! + final! + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.===(other) + end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.==(other) + end + + class << self + # @api private + sig(:final) do + override.params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::State + ).returns(T.anything) + end + def coerce(value, state:) + end + + # @api private + sig(:final) { override.params(value: T.anything).returns(T.anything) } + def dump(value) + end + end + end + end + end +end diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi new file mode 100644 index 00000000..5414927e --- /dev/null +++ b/rbi/lib/openai/internal/util.rbi @@ -0,0 +1,291 @@ +# typed: strong + +module OpenAI + module Internal + # @api private + module Util + # Due to the current WIP status of Shapes support in Sorbet, types referencing + # this alias might be refined in the future. + AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } + + # @api private + sig { returns(Float) } + def self.monotonic_secs + end + + class << self + # @api private + sig { returns(String) } + def arch + end + + # @api private + sig { returns(String) } + def os + end + end + + class << self + # @api private + sig { params(input: T.anything).returns(T::Boolean) } + def primitive?(input) + end + + # @api private + sig { params(input: T.anything).returns(T.any(T::Boolean, T.anything)) } + def coerce_boolean(input) + end + + # @api private + sig { params(input: T.anything).returns(T.nilable(T::Boolean)) } + def coerce_boolean!(input) + end + + # @api private + sig { params(input: T.anything).returns(T.any(Integer, T.anything)) } + def coerce_integer(input) + end + + # @api private + sig { params(input: T.anything).returns(T.any(Float, T.anything)) } + def coerce_float(input) + end + + # @api private + sig { params(input: T.anything).returns(T.any(T::Hash[T.anything, T.anything], T.anything)) } + def coerce_hash(input) + end + end + + # Use this to indicate that a value should be explicitly removed from a data + # structure when using `OpenAI::Internal::Util.deep_merge`. + # + # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging + # `{a: 1}` and `{}` would produce `{a: 1}`. + OMIT = T.let(T.anything, T.anything) + + class << self + # @api private + sig { params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns(T.anything) } + private def deep_merge_lr(lhs, rhs, concat: false) + end + + # @api private + # + # Recursively merge one hash with another. If the values at a given key are not + # both hashes, just take the new value. + sig do + params(values: T::Array[T.anything], sentinel: T.nilable(T.anything), concat: T::Boolean) + .returns(T.anything) + end + def deep_merge( + *values, + # the value to return if no values are provided. + sentinel: nil, + # whether to merge sequences by concatenation. + concat: false + ) + end + + # @api private + sig do + params( + data: T.any(OpenAI::Internal::Util::AnyHash, T::Array[T.anything], T.anything), + pick: T.nilable(T.any(Symbol, Integer, T::Array[T.any(Symbol, Integer)])), + sentinel: T.nilable(T.anything), + blk: T.nilable(T.proc.returns(T.anything)) + ) + .returns(T.nilable(T.anything)) + end + def dig(data, pick, sentinel = nil, &blk) + end + end + + class << self + # @api private + sig { params(uri: URI::Generic).returns(String) } + def uri_origin(uri) + end + + # @api private + sig { params(path: T.any(String, T::Array[String])).returns(String) } + def interpolate_path(path) + end + end + + class << self + # @api private + sig { params(query: T.nilable(String)).returns(T::Hash[String, T::Array[String]]) } + def decode_query(query) + end + + # @api private + sig do + params(query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) + .returns(T.nilable(String)) + end + def encode_query(query) + end + end + + ParsedUriShape = + T.type_alias do + { + scheme: T.nilable(String), + host: T.nilable(String), + port: T.nilable(Integer), + path: T.nilable(String), + query: T::Hash[String, T::Array[String]] + } + end + + class << self + # @api private + sig { params(url: T.any(URI::Generic, String)).returns(OpenAI::Internal::Util::ParsedUriShape) } + def parse_uri(url) + end + + # @api private + sig { params(parsed: OpenAI::Internal::Util::ParsedUriShape).returns(URI::Generic) } + def unparse_uri(parsed) + end + + # @api private + sig do + params(lhs: OpenAI::Internal::Util::ParsedUriShape, rhs: OpenAI::Internal::Util::ParsedUriShape) + .returns(URI::Generic) + end + def join_parsed_uri(lhs, rhs) + end + end + + class << self + # @api private + sig do + params( + headers: T::Hash[String, + T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))] + ) + .returns(T::Hash[String, String]) + end + def normalized_headers(*headers) + end + end + + # @api private + # + # An adapter that satisfies the IO interface required by `::IO.copy_stream` + class ReadIOAdapter + # @api private + sig { params(max_len: T.nilable(Integer)).returns(String) } + private def read_enum(max_len) + end + + # @api private + sig { params(max_len: T.nilable(Integer), out_string: T.nilable(String)).returns(T.nilable(String)) } + def read(max_len = nil, out_string = nil) + end + + # @api private + sig do + params( + stream: T.any(String, IO, StringIO, T::Enumerable[String]), + blk: T.proc.params(arg0: String).void + ) + .returns(T.attached_class) + end + def self.new(stream, &blk) + end + end + + class << self + sig { params(blk: T.proc.params(y: Enumerator::Yielder).void).returns(T::Enumerable[String]) } + def writable_enum(&blk) + end + end + + class << self + # @api private + sig do + params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void + end + private def write_multipart_chunk(y, boundary:, key:, val:) + end + + # @api private + sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } + private def encode_multipart_streaming(body) + end + + # @api private + sig { params(headers: T::Hash[String, String], body: T.anything).returns(T.anything) } + def encode_content(headers, body) + end + + # @api private + sig do + params( + headers: T.any(T::Hash[String, String], Net::HTTPHeader), + stream: T::Enumerable[String], + suppress_error: T::Boolean + ) + .returns(T.anything) + end + def decode_content(headers, stream:, suppress_error: false) + end + end + + class << self + # @api private + # + # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html + sig do + params(enum: T::Enumerable[T.anything], external: T::Boolean, close: T.proc.void) + .returns(T::Enumerable[T.anything]) + end + def fused_enum(enum, external: false, &close) + end + + # @api private + sig { params(enum: T.nilable(T::Enumerable[T.anything])).void } + def close_fused!(enum) + end + + # @api private + sig do + params( + enum: T.nilable(T::Enumerable[T.anything]), + blk: T.proc.params(arg0: Enumerator::Yielder).void + ) + .returns(T::Enumerable[T.anything]) + end + def chain_fused(enum, &blk) + end + end + + ServerSentEvent = + T.type_alias do + { + event: T.nilable(String), + data: T.nilable(String), + id: T.nilable(String), + retry: T.nilable(Integer) + } + end + + class << self + # @api private + sig { params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) } + def decode_lines(enum) + end + + # @api private + # + # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream + sig { params(lines: T::Enumerable[String]).returns(OpenAI::Internal::Util::ServerSentEvent) } + def decode_sse(lines) + end + end + end + end +end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index ed9a79de..4f531df7 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Audio class SpeechCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The text to generate audio for. The maximum length is 4096 characters. sig { returns(String) } @@ -55,7 +55,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index 92154d33..f6d6ac1a 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -15,7 +15,9 @@ module OpenAI attr_reader :logprobs sig do - params(logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)]) + params( + logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::Util::AnyHash)] + ) .void end attr_writer :logprobs @@ -25,7 +27,7 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index f20bc4f6..2db27d93 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Audio class TranscriptionCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. @@ -98,7 +98,7 @@ module OpenAI response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index 0e61bfb0..7612edef 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -20,7 +20,7 @@ module OpenAI sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -33,7 +33,7 @@ module OpenAI sig do params( delta: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Util::AnyHash)], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index ecfd6f55..fd8ce791 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -21,7 +21,7 @@ module OpenAI sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -34,7 +34,7 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Util::AnyHash)], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::Util::AnyHash)], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index 004b1ec3..6500d39d 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -20,14 +20,22 @@ module OpenAI sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } attr_reader :segments - sig { params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]).void } + sig do + params( + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)] + ) + .void + end attr_writer :segments # Extracted words and their corresponding timestamps. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionWord])) } attr_reader :words - sig { params(words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)]).void } + sig do + params(words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::Util::AnyHash)]) + .void + end attr_writer :words # Represents a verbose json transcription response returned by model, based on the @@ -37,8 +45,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)], - words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Util::AnyHash)] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)], + words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index e0a7e1ea..6c2cdb3a 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Audio class TranslationCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. @@ -53,7 +53,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index f12cba37..fee32692 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -20,7 +20,12 @@ module OpenAI sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } attr_reader :segments - sig { params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)]).void } + sig do + params( + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)] + ) + .void + end attr_writer :segments sig do @@ -28,7 +33,7 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Util::AnyHash)] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index f82137fc..ddeafb8b 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -61,7 +61,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Batch::Errors)) } attr_reader :errors - sig { params(errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash)).void } + sig { params(errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::Util::AnyHash)).void } attr_writer :errors # The Unix timestamp (in seconds) for when the batch expired. @@ -119,7 +119,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::BatchRequestCounts)) } attr_reader :request_counts - sig { params(request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash)).void } + sig { params(request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::Util::AnyHash)).void } attr_writer :request_counts sig do @@ -134,7 +134,7 @@ module OpenAI cancelling_at: Integer, completed_at: Integer, error_file_id: String, - errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Util::AnyHash), + errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::Util::AnyHash), expired_at: Integer, expires_at: Integer, failed_at: Integer, @@ -142,7 +142,7 @@ module OpenAI in_progress_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), output_file_id: String, - request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Util::AnyHash), + request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -226,7 +226,7 @@ module OpenAI sig { returns(T.nilable(T::Array[OpenAI::Models::BatchError])) } attr_reader :data - sig { params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)]).void } + sig { params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::Util::AnyHash)]).void } attr_writer :data # The object type, which is always `list`. @@ -237,7 +237,7 @@ module OpenAI attr_writer :object sig do - params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Util::AnyHash)], object: String) + params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::Util::AnyHash)], object: String) .returns(T.attached_class) end def self.new(data: nil, object: nil) diff --git a/rbi/lib/openai/models/batch_cancel_params.rbi b/rbi/lib/openai/models/batch_cancel_params.rbi index 3abd1e78..dfdfdb51 100644 --- a/rbi/lib/openai/models/batch_cancel_params.rbi +++ b/rbi/lib/openai/models/batch_cancel_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class BatchCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index ebc9f457..4a38d820 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class BatchCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The time frame within which the batch should be processed. Currently only `24h` # is supported. @@ -45,7 +45,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 571dd654..498b22b2 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class BatchListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -28,7 +28,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch_retrieve_params.rbi b/rbi/lib/openai/models/batch_retrieve_params.rbi index b984cd5d..ced68a22 100644 --- a/rbi/lib/openai/models/batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/batch_retrieve_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class BatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index e5fcf3bc..de41edc8 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -111,7 +111,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -138,7 +138,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -147,13 +147,13 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::Util::AnyHash)), top_p: T.nilable(Float), object: Symbol ) @@ -218,7 +218,7 @@ module OpenAI sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash) ) .void end @@ -229,7 +229,7 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) ) .void end @@ -241,8 +241,8 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 75169d1b..19dabe18 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta class AssistantCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -95,7 +95,9 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -124,7 +126,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -154,23 +156,25 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -244,7 +248,10 @@ module OpenAI sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -255,7 +262,10 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -267,8 +277,14 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ), + file_search: T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end @@ -333,7 +349,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -347,7 +363,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -387,7 +403,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -417,7 +433,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], @@ -477,7 +493,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -492,7 +508,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) diff --git a/rbi/lib/openai/models/beta/assistant_delete_params.rbi b/rbi/lib/openai/models/beta/assistant_delete_params.rbi index ca945e48..31fbc46f 100644 --- a/rbi/lib/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_delete_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Beta class AssistantDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 57461148..c8a24c36 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta class AssistantListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -49,7 +49,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi index cb2206a7..3e5121e0 100644 --- a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Beta class AssistantRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 5aa7ac81..e3b90b3b 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -32,7 +32,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Thread) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -50,7 +50,7 @@ module OpenAI # created. sig do params( - data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash), enabled: T::Boolean, event: Symbol ) @@ -70,7 +70,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -79,7 +79,7 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.created") @@ -96,7 +96,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -105,7 +105,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.queued") @@ -122,7 +122,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -131,7 +131,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.in_progress") @@ -148,7 +148,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -157,7 +157,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.requires_action") @@ -174,7 +174,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -183,7 +183,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.completed") @@ -200,7 +200,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -209,7 +209,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.incomplete") @@ -226,7 +226,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -235,7 +235,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.failed") @@ -252,7 +252,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -261,7 +261,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelling") @@ -278,7 +278,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -287,7 +287,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelled") @@ -304,7 +304,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -313,7 +313,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.expired") @@ -329,7 +329,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -339,7 +339,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.created") @@ -355,7 +358,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -365,7 +368,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.in_progress") @@ -382,7 +388,12 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)).void } + sig do + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :data sig { returns(Symbol) } @@ -393,7 +404,7 @@ module OpenAI # are being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -411,7 +422,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -421,7 +432,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.completed") @@ -437,7 +451,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -447,7 +461,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.failed") @@ -463,7 +480,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -473,7 +490,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.cancelled") @@ -489,7 +509,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -499,7 +519,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.expired") @@ -516,7 +539,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -526,7 +549,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.created") @@ -543,7 +569,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -553,7 +579,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.in_progress") @@ -570,7 +599,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -581,7 +610,7 @@ module OpenAI # being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -600,7 +629,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -610,7 +639,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.completed") @@ -627,7 +659,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -637,7 +669,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.incomplete") @@ -652,7 +687,7 @@ module OpenAI sig { returns(OpenAI::Models::ErrorObject) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -662,7 +697,7 @@ module OpenAI # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. sig do - params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :error) diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index e1475eed..36c345d4 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -11,7 +11,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::AssistantToolChoiceFunction)) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash)).void } + sig do + params( + function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :function # Specifies a tool the model should use. Use to force the model to call a specific @@ -19,7 +24,7 @@ module OpenAI sig do params( type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index bb5f00ab..929c5b35 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta class AssistantUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } @@ -98,7 +98,9 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -127,7 +129,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -157,23 +159,25 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -301,7 +305,10 @@ module OpenAI sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -312,7 +319,10 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -324,8 +334,14 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ), + file_search: T.any( + OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 61181130..e13ceaf5 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -12,12 +12,17 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch)) } attr_reader :file_search - sig { params(file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash)).void } + sig do + params( + file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -55,7 +60,7 @@ module OpenAI sig do params( - ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::Util::AnyHash) ) .void end @@ -65,7 +70,7 @@ module OpenAI sig do params( max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index b12da233..8d337d24 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -7,7 +7,7 @@ module OpenAI sig { returns(OpenAI::Models::FunctionDefinition) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)).void } + sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash)).void } attr_writer :function # The type of tool being defined: `function` @@ -15,7 +15,7 @@ module OpenAI attr_accessor :type sig do - params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash), type: Symbol) + params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(function:, type: :function) diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index d141e77e..0c56b760 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -15,7 +15,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -25,7 +25,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.created") @@ -42,7 +45,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -52,7 +55,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.in_progress") @@ -69,7 +75,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -80,7 +86,7 @@ module OpenAI # being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -99,7 +105,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -109,7 +115,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.completed") @@ -126,7 +135,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -136,7 +145,10 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.incomplete") diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 18316a2b..dddd0b7e 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -14,7 +14,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -24,7 +24,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.created") @@ -40,7 +43,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -50,7 +53,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.in_progress") @@ -67,7 +73,12 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash)).void } + sig do + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :data sig { returns(Symbol) } @@ -78,7 +89,7 @@ module OpenAI # are being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -96,7 +107,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -106,7 +117,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.completed") @@ -122,7 +136,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -132,7 +146,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.failed") @@ -148,7 +165,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -158,7 +175,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.cancelled") @@ -174,7 +194,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -184,7 +204,10 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Util::AnyHash), event: Symbol) + params( + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + event: Symbol + ) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.step.expired") diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 48a1e1c2..2b6188fc 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -14,7 +14,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -23,7 +23,7 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.created") @@ -40,7 +40,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -49,7 +49,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.queued") @@ -66,7 +66,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -75,7 +75,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.in_progress") @@ -92,7 +92,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -101,7 +101,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.requires_action") @@ -118,7 +118,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -127,7 +127,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.completed") @@ -144,7 +144,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -153,7 +153,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.incomplete") @@ -170,7 +170,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -179,7 +179,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.failed") @@ -196,7 +196,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -205,7 +205,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelling") @@ -222,7 +222,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -231,7 +231,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelled") @@ -248,7 +248,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -257,7 +257,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.expired") diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 9061c27f..052a7fc3 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -34,7 +34,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -47,7 +47,7 @@ module OpenAI id: String, created_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -76,7 +76,7 @@ module OpenAI sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash) ) .void end @@ -86,7 +86,9 @@ module OpenAI attr_reader :file_search sig do - params(file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash)) + params( + file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) + ) .void end attr_writer :file_search @@ -97,8 +99,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 963ba806..c99ab223 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta class ThreadCreateAndRunParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to @@ -104,7 +104,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread)) } attr_reader :thread - sig { params(thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash)).void } + sig do + params( + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :thread # Controls which (if any) tool is called by the model. `none` means the model will @@ -135,7 +140,9 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -174,7 +181,7 @@ module OpenAI sig do params( truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ) ) .void @@ -194,26 +201,28 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -221,9 +230,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -313,7 +322,7 @@ module OpenAI sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)] + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -338,7 +347,10 @@ module OpenAI sig do params( tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, + OpenAI::Internal::Util::AnyHash + ) ) ) .void @@ -349,10 +361,13 @@ module OpenAI # an empty thread will be created. sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Util::AnyHash)], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Util::AnyHash) + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, + OpenAI::Internal::Util::AnyHash + ) ) ) .returns(T.attached_class) @@ -420,7 +435,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -428,7 +443,12 @@ module OpenAI ), role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, OpenAI::Util::AnyHash)] + T::Array[ + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, + OpenAI::Internal::Util::AnyHash + ) + ] ), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -485,7 +505,7 @@ module OpenAI MessageContentPartParamArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], - OpenAI::Type::Converter + OpenAI::Internal::Type::Converter ) end @@ -543,7 +563,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -558,7 +578,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -622,7 +642,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -636,7 +656,7 @@ module OpenAI params( file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -651,11 +671,11 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .returns(T.attached_class) @@ -723,7 +743,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -737,7 +757,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -777,7 +797,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -807,7 +827,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], @@ -867,7 +887,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -882,7 +902,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) @@ -954,7 +974,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -966,7 +986,10 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -980,9 +1003,12 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - file_search: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index e1977394..2e45ad3f 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta class ThreadCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. @@ -14,7 +14,7 @@ module OpenAI sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)] + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -38,7 +38,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -46,10 +46,10 @@ module OpenAI sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -117,7 +117,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -125,7 +125,7 @@ module OpenAI ), role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Internal::Util::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -182,7 +182,7 @@ module OpenAI MessageContentPartParamArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], - OpenAI::Type::Converter + OpenAI::Internal::Type::Converter ) end @@ -235,7 +235,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -250,7 +250,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -312,7 +312,10 @@ module OpenAI sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -323,7 +326,10 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -335,8 +341,14 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ), + file_search: T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end @@ -401,7 +413,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -415,7 +427,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -455,7 +467,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -485,7 +497,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], @@ -545,7 +557,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -560,7 +572,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) diff --git a/rbi/lib/openai/models/beta/thread_delete_params.rbi b/rbi/lib/openai/models/beta/thread_delete_params.rbi index 0b43e22c..0a775418 100644 --- a/rbi/lib/openai/models/beta/thread_delete_params.rbi +++ b/rbi/lib/openai/models/beta/thread_delete_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Beta class ThreadDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi index 436ff67f..502dbe33 100644 --- a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Beta class ThreadRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index 53d58531..34ddbb64 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -9,7 +9,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Thread) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -27,7 +27,7 @@ module OpenAI # created. sig do params( - data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash), enabled: T::Boolean, event: Symbol ) diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 227c5099..0f78f9cb 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta class ThreadUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -25,7 +25,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -34,8 +34,8 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -61,7 +61,10 @@ module OpenAI sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -72,7 +75,10 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -84,8 +90,14 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Util::AnyHash) + code_interpreter: T.any( + OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::Util::AnyHash + ), + file_search: T.any( + OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index c8f84631..2aaa8fe0 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -13,7 +13,10 @@ module OpenAI sig do params( - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash) + file_citation: T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -36,7 +39,10 @@ module OpenAI sig do params( end_index: Integer, - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Util::AnyHash), + file_citation: T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, + OpenAI::Internal::Util::AnyHash + ), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index dd7bb7b4..9de4eb01 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -24,7 +24,10 @@ module OpenAI sig do params( - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash) + file_citation: T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -50,7 +53,10 @@ module OpenAI params( index: Integer, end_index: Integer, - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Util::AnyHash), + file_citation: T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + OpenAI::Internal::Util::AnyHash + ), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index 33a5f4e1..7e8487a3 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -13,7 +13,7 @@ module OpenAI sig do params( - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash) + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::Util::AnyHash) ) .void end @@ -35,7 +35,7 @@ module OpenAI sig do params( end_index: Integer, - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Util::AnyHash), + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::Util::AnyHash), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index 7c79aae6..51269c89 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -24,7 +24,7 @@ module OpenAI sig do params( - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash) + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::Util::AnyHash) ) .void end @@ -49,7 +49,7 @@ module OpenAI params( index: Integer, end_index: Integer, - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Util::AnyHash), + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::Util::AnyHash), start_index: Integer, text: String, type: Symbol diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index 18986902..301843bc 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::ImageFile) } attr_reader :image_file - sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash)).void } + sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::Util::AnyHash)).void } attr_writer :image_file # Always `image_file`. @@ -18,7 +18,10 @@ module OpenAI # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. sig do - params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Util::AnyHash), type: Symbol) + params( + image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(image_file:, type: :image_file) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 3dcd3939..72d55cbb 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -16,7 +16,10 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta)) } attr_reader :image_file - sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash)).void } + sig do + params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::Util::AnyHash)) + .void + end attr_writer :image_file # References an image [File](https://platform.openai.com/docs/api-reference/files) @@ -24,7 +27,7 @@ module OpenAI sig do params( index: Integer, - image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Util::AnyHash), + image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index 2c1839ed..ec7592c9 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::ImageURL) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash)).void } + sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::Util::AnyHash)).void } attr_writer :image_url # The type of the content part. @@ -17,7 +17,10 @@ module OpenAI # References an image URL in the content of a message. sig do - params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Util::AnyHash), type: Symbol) + params( + image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(image_url:, type: :image_url) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index 664de983..e337c120 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -16,14 +16,17 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta)) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash)).void } + sig do + params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::Util::AnyHash)) + .void + end attr_writer :image_url # References an image URL in the content of a message. sig do params( index: Integer, - image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Util::AnyHash), + image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 87967f32..03266cc5 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -52,7 +52,9 @@ module OpenAI sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)) + incomplete_details: T.nilable( + T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -97,12 +99,14 @@ module OpenAI params( id: String, assistant_id: T.nilable(String), - attachments: T.nilable(T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Util::AnyHash)]), + attachments: T.nilable( + T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Internal::Util::AnyHash)] + ), completed_at: T.nilable(Integer), content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock @@ -110,7 +114,9 @@ module OpenAI ], created_at: Integer, incomplete_at: T.nilable(Integer), - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Util::AnyHash)), + incomplete_details: T.nilable( + T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::Util::AnyHash) + ), metadata: T.nilable(T::Hash[Symbol, String]), role: OpenAI::Models::Beta::Threads::Message::Role::OrSymbol, run_id: T.nilable(String), @@ -197,7 +203,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -212,7 +218,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index f2b3ca52..9ad49558 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class MessageCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The text contents of the message. sig do @@ -54,7 +54,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -62,10 +62,10 @@ module OpenAI ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::Util::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -121,7 +121,7 @@ module OpenAI MessageContentPartParamArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], - OpenAI::Type::Converter + OpenAI::Internal::Type::Converter ) end @@ -174,7 +174,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -189,7 +189,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi index 1149a983..71feebb1 100644 --- a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi @@ -5,14 +5,20 @@ module OpenAI module Beta module Threads class MessageDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id sig do - params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + params( + thread_id: String, + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::Util::AnyHash + ) + ) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 8f88410b..151f4efe 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -27,7 +27,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -51,7 +51,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index 5ac1d6f1..bae39ef8 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -13,7 +13,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::MessageDelta) } attr_reader :delta - sig { params(delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash)).void } + sig { params(delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::Util::AnyHash)).void } attr_writer :delta # The object type, which is always `thread.message.delta`. @@ -25,7 +25,7 @@ module OpenAI sig do params( id: String, - delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Util::AnyHash), + delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index c1edc5f6..3664eaeb 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class MessageListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -58,7 +58,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi index e2f6c363..bc78f6e0 100644 --- a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi @@ -5,14 +5,20 @@ module OpenAI module Beta module Threads class MessageRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id sig do - params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + params( + thread_id: String, + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::Util::AnyHash + ) + ) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 6af1915e..0d2c98de 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class MessageUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id @@ -24,7 +24,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index f8511f4f..762191b7 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -18,7 +18,10 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash) + function: T.any( + OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -33,7 +36,10 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Util::AnyHash), + function: T.any( + OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 407e6777..d19d330f 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -42,7 +42,7 @@ module OpenAI sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash)) + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -59,7 +59,9 @@ module OpenAI attr_reader :last_error sig do - params(last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash))) + params( + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::Util::AnyHash)) + ) .void end attr_writer :last_error @@ -106,7 +108,7 @@ module OpenAI sig do params( - required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash)) + required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -203,7 +205,7 @@ module OpenAI sig do params( - truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash)) + truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -214,7 +216,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) } attr_reader :usage - sig { params(usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash))).void } + sig do + params( + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::Util::AnyHash)) + ) + .void + end attr_writer :usage # The sampling temperature used for this run. If not set, defaults to 1. @@ -236,20 +243,20 @@ module OpenAI created_at: Integer, expires_at: T.nilable(Integer), failed_at: T.nilable(Integer), - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Util::AnyHash)), + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::Util::AnyHash)), instructions: String, - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Util::AnyHash)), + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::Util::AnyHash)), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: String, parallel_tool_calls: T::Boolean, - required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Util::AnyHash)), + required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::Util::AnyHash)), response_format: T.nilable( T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -261,19 +268,19 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], - truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Util::AnyHash)), - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Util::AnyHash)), + truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::Util::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::Util::AnyHash)), temperature: T.nilable(Float), top_p: T.nilable(Float), object: Symbol @@ -460,7 +467,10 @@ module OpenAI sig do params( - submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash) + submit_tool_outputs: T.any( + OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -474,7 +484,10 @@ module OpenAI # is required. sig do params( - submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Util::AnyHash), + submit_tool_outputs: T.any( + OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) @@ -499,7 +512,7 @@ module OpenAI # Details on the tool outputs needed for this run to continue. sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi index c84678a8..92c6ca3b 100644 --- a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi @@ -5,14 +5,20 @@ module OpenAI module Beta module Threads class RunCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id sig do - params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + params( + thread_id: String, + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::Util::AnyHash + ) + ) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 3e1cea2e..e7854e85 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class RunCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to @@ -185,7 +185,7 @@ module OpenAI sig do params( truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ) ) .void @@ -198,7 +198,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::Util::AnyHash)] ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), @@ -211,7 +211,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -221,14 +221,14 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -236,9 +236,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -363,7 +363,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -374,7 +374,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ), @@ -433,7 +433,7 @@ module OpenAI MessageContentPartParamArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], - OpenAI::Type::Converter + OpenAI::Internal::Type::Converter ) end @@ -498,7 +498,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -513,7 +513,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 94b4495a..da3c8bc7 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class RunListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -50,7 +50,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi index 24cfa0e3..f976ec26 100644 --- a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi @@ -5,14 +5,20 @@ module OpenAI module Beta module Threads class RunRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id sig do - params(thread_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + params( + thread_id: String, + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::Util::AnyHash + ) + ) .returns(T.attached_class) end def self.new(thread_id:, request_options: {}) diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 98e8df09..43e56a5e 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class RunSubmitToolOutputsParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id @@ -18,8 +18,13 @@ module OpenAI sig do params( thread_id: String, - tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + tool_outputs: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, + OpenAI::Internal::Util::AnyHash + ) + ], + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 1067298a..d4946a6e 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads class RunUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id @@ -24,7 +24,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 3d0e6331..3b1be064 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -19,7 +19,10 @@ module OpenAI sig do params( - image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash) + image: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -28,7 +31,10 @@ module OpenAI sig do params( index: Integer, - image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Util::AnyHash), + image: T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 6720ae9a..f0524366 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -18,7 +18,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -36,7 +36,7 @@ module OpenAI id: String, code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) @@ -85,7 +85,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image ) ] @@ -147,7 +147,7 @@ module OpenAI params( image: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -162,7 +162,7 @@ module OpenAI params( image: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 627f5f07..999ac980 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -30,7 +30,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -44,7 +44,7 @@ module OpenAI id: String, code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) @@ -97,7 +97,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -113,7 +113,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 96fb0416..cbbf08ee 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -16,7 +16,10 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash) + file_search: T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -30,7 +33,10 @@ module OpenAI sig do params( id: String, - file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Util::AnyHash), + file_search: T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) @@ -56,7 +62,7 @@ module OpenAI params( ranking_options: T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -69,7 +75,12 @@ module OpenAI sig do params( - results: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + results: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, + OpenAI::Internal::Util::AnyHash + ) + ] ) .void end @@ -80,9 +91,14 @@ module OpenAI params( ranking_options: T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - results: T::Array[T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, OpenAI::Util::AnyHash)] + results: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, + OpenAI::Internal::Util::AnyHash + ) + ] ) .returns(T.attached_class) end @@ -205,7 +221,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -222,7 +238,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 3e337e83..f4189ea3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -16,7 +16,7 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::Util::AnyHash) ) .void end @@ -30,7 +30,7 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Util::AnyHash), + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index 04b5a69e..b478742b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -28,7 +28,10 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash) + function: T.any( + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -38,7 +41,10 @@ module OpenAI params( index: Integer, id: String, - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Util::AnyHash), + function: T.any( + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index 8a929854..2f3d4925 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -13,7 +13,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -29,7 +29,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 734e99a5..c068ad2f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -44,7 +44,7 @@ module OpenAI sig do params( - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)) + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -100,7 +100,7 @@ module OpenAI sig do params( - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash)) + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -116,18 +116,18 @@ module OpenAI created_at: Integer, expired_at: T.nilable(Integer), failed_at: T.nilable(Integer), - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Util::AnyHash)), + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::OrSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::OrSymbol, - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Util::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 19089bfb..fa423211 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -23,7 +23,7 @@ module OpenAI params( step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) @@ -36,7 +36,7 @@ module OpenAI params( step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index 08c7adf8..1369d183 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -14,7 +14,10 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDelta) } attr_reader :delta - sig { params(delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash)).void } + sig do + params(delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::Util::AnyHash)) + .void + end attr_writer :delta # The object type, which is always `thread.run.step.delta`. @@ -26,7 +29,7 @@ module OpenAI sig do params( id: String, - delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Util::AnyHash), + delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index a13b01a2..3408ca06 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -17,7 +17,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -29,7 +29,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 75db421b..6b012ab4 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs class StepListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id @@ -69,7 +69,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 15b2fdd2..fd32d2e8 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -6,8 +6,8 @@ module OpenAI module Threads module Runs class StepRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :thread_id @@ -33,7 +33,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index c4833e7c..783946aa 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -33,7 +33,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) @@ -49,7 +49,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 0dade5cb..3b5ca709 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -32,7 +32,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall ) diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 9dd44292..96e9162b 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -26,7 +26,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathAnnotation ) ], diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 18c16ea3..2293760e 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Text) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::Util::AnyHash)).void } attr_writer :text # Always `text`. @@ -17,7 +17,7 @@ module OpenAI # The text content that is part of a message. sig do - params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Util::AnyHash), type: Symbol) + params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::Util::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(text:, type: :text) diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index 2f225af3..a5aea637 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -24,7 +24,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ] @@ -45,7 +45,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ], diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index 0e37546f..8d353d81 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -16,14 +16,14 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::TextDelta)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::Util::AnyHash)).void } attr_writer :text # The text content that is part of a message. sig do params( index: Integer, - text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Util::AnyHash), + text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 32ff4ea3..7086df0f 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -43,7 +43,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash)).void } attr_writer :usage # Represents a chat completion response returned by model, based on the provided @@ -51,12 +51,12 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Util::AnyHash)], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Internal::Util::AnyHash)], created: Integer, model: String, service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::OrSymbol), system_fingerprint: String, - usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash), + usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -111,7 +111,7 @@ module OpenAI sig do params( - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash)) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -121,15 +121,15 @@ module OpenAI sig { returns(OpenAI::Models::Chat::ChatCompletionMessage) } attr_reader :message - sig { params(message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash)).void } + sig { params(message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::Util::AnyHash)).void } attr_writer :message sig do params( finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::OrSymbol, index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Util::AnyHash)), - message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Util::AnyHash) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::Util::AnyHash)), + message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -188,8 +188,12 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]), - refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]) + content: T.nilable( + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] + ), + refusal: T.nilable( + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] + ) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 65806b1f..40188371 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -15,7 +15,9 @@ module OpenAI sig do params( - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)) + audio: T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -48,7 +50,10 @@ module OpenAI sig do params( function_call: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) + T.any( + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, + OpenAI::Internal::Util::AnyHash + ) ) ) .void @@ -73,7 +78,7 @@ module OpenAI sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -82,25 +87,30 @@ module OpenAI # Messages sent by the model in response to user messages. sig do params( - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Util::AnyHash)), + audio: T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::Util::AnyHash) + ), content: T.nilable( T.any( String, T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartRefusal ) ] ) ), function_call: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Util::AnyHash) + T.any( + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, + OpenAI::Internal::Util::AnyHash + ) ), name: String, refusal: T.nilable(String), - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)], + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)], role: Symbol ) .returns(T.attached_class) @@ -199,7 +209,7 @@ module OpenAI ArrayOfContentPartArray = T.let( OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], - OpenAI::Type::Converter + OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index ca1adf7b..719f97f8 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -50,7 +50,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash))).void } + sig { params(usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash))).void } attr_writer :usage # Represents a streamed chunk of a chat completion response returned by the model, @@ -59,12 +59,12 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Util::AnyHash)], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Internal::Util::AnyHash)], created: Integer, model: String, service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::OrSymbol), system_fingerprint: String, - usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -104,7 +104,12 @@ module OpenAI sig { returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) } attr_reader :delta - sig { params(delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash)).void } + sig do + params( + delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :delta # The reason the model stopped generating tokens. This will be `stop` if the model @@ -126,7 +131,9 @@ module OpenAI sig do params( - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) + logprobs: T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -134,10 +141,12 @@ module OpenAI sig do params( - delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Util::AnyHash), + delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::Util::AnyHash), finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::OrSymbol), index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Util::AnyHash)) + logprobs: T.nilable( + T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::Util::AnyHash) + ) ) .returns(T.attached_class) end @@ -170,7 +179,10 @@ module OpenAI sig do params( - function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash) + function_call: T.any( + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -192,7 +204,7 @@ module OpenAI sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -202,10 +214,13 @@ module OpenAI sig do params( content: T.nilable(String), - function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Util::AnyHash), + function_call: T.any( + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + OpenAI::Internal::Util::AnyHash + ), refusal: T.nilable(String), role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol, - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end @@ -294,7 +309,10 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash) + function: T.any( + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -311,7 +329,10 @@ module OpenAI params( index: Integer, id: String, - function: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, OpenAI::Util::AnyHash), + function: T.any( + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + OpenAI::Internal::Util::AnyHash + ), type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol ) .returns(T.attached_class) @@ -428,8 +449,12 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]), - refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Util::AnyHash)]) + content: T.nilable( + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] + ), + refusal: T.nilable( + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] + ) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index bf1e89a6..df2d9e47 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -13,7 +13,9 @@ module OpenAI attr_reader :file sig do - params(file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash)) + params( + file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::Util::AnyHash) + ) .void end attr_writer :file @@ -26,7 +28,7 @@ module OpenAI # generation. sig do params( - file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Util::AnyHash), + file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 5c838086..4ba1cace 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -9,7 +9,7 @@ module OpenAI sig do params( - image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash) + image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::Util::AnyHash) ) .void end @@ -22,7 +22,7 @@ module OpenAI # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). sig do params( - image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Util::AnyHash), + image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 588aba54..61ce5857 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -9,7 +9,10 @@ module OpenAI sig do params( - input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash) + input_audio: T.any( + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -22,7 +25,10 @@ module OpenAI # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). sig do params( - input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Util::AnyHash), + input_audio: T.any( + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index ef698edc..ebf50ef3 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -25,7 +25,10 @@ module OpenAI # replace the previous `system` messages. sig do params( - content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), + content: T.any( + String, + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + ), name: String, role: Symbol ) @@ -56,7 +59,10 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 190244ae..10aee914 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -23,7 +23,7 @@ module OpenAI sig do params( - annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)] + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -35,7 +35,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) } attr_reader :audio - sig { params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash))).void } + sig do + params( + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::Util::AnyHash)) + ) + .void + end attr_writer :audio # Deprecated and replaced by `tool_calls`. The name and arguments of a function @@ -45,7 +50,7 @@ module OpenAI sig do params( - function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash) + function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::Util::AnyHash) ) .void end @@ -57,7 +62,7 @@ module OpenAI sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -68,10 +73,10 @@ module OpenAI params( content: T.nilable(String), refusal: T.nilable(String), - annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Util::AnyHash)], - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Util::AnyHash)), - function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Util::AnyHash), - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Util::AnyHash)], + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::Util::AnyHash)], + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::Util::AnyHash)), + function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::Util::AnyHash), + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)], role: Symbol ) .returns(T.attached_class) @@ -115,7 +120,10 @@ module OpenAI sig do params( - url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash) + url_citation: T.any( + OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -124,7 +132,10 @@ module OpenAI # A URL citation when using web search. sig do params( - url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Util::AnyHash), + url_citation: T.any( + OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, + OpenAI::Internal::Util::AnyHash + ), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index 851dff7f..496d91cb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -14,7 +14,7 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::Util::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Util::AnyHash), + function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 4c4f4e58..6a992cda 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -9,7 +9,7 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash) + function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::Util::AnyHash) ) .void end @@ -23,7 +23,7 @@ module OpenAI # function. sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Util::AnyHash), + function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 5c908db7..ac3a3840 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -19,7 +19,10 @@ module OpenAI # being regenerated. sig do params( - content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), + content: T.any( + String, + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + ), type: Symbol ) .returns(T.attached_class) @@ -47,7 +50,10 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index f04bdf4a..6ef22108 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -25,7 +25,10 @@ module OpenAI # for this purpose instead. sig do params( - content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), + content: T.any( + String, + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + ), name: String, role: Symbol ) @@ -56,7 +59,10 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 33ce4b9e..131c3a45 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -32,7 +32,7 @@ module OpenAI token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float, - top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Util::AnyHash)] + top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index 03b8ecd1..dd162d31 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -7,7 +7,7 @@ module OpenAI sig { returns(OpenAI::Models::FunctionDefinition) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash)).void } + sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash)).void } attr_writer :function # The type of the tool. Currently, only `function` is supported. @@ -15,7 +15,7 @@ module OpenAI attr_accessor :type sig do - params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Util::AnyHash), type: Symbol) + params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(function:, type: :function) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 4956394b..e49bdc4a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -18,7 +18,10 @@ module OpenAI sig do params( - content: T.any(String, T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Util::AnyHash)]), + content: T.any( + String, + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + ), tool_call_id: String, role: Symbol ) @@ -49,7 +52,10 @@ module OpenAI end ChatCompletionContentPartTextArray = - T.let(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index c234e478..73d892eb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -43,7 +43,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File @@ -105,7 +105,10 @@ module OpenAI end ChatCompletionContentPartArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index a841bba5..1d25cd61 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Chat class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A list of messages comprising the conversation so far. Depending on the # [model](https://platform.openai.com/docs/models) you use, different message @@ -43,7 +43,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) } attr_reader :audio - sig { params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash))).void } + sig do + params( + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)) + ) + .void + end attr_writer :audio # Number between -2.0 and 2.0. Positive values penalize new tokens based on their @@ -83,7 +88,7 @@ module OpenAI function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -98,7 +103,7 @@ module OpenAI sig do params( - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)] + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)] ) .void end @@ -181,7 +186,7 @@ module OpenAI sig do params( - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)) + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -229,7 +234,7 @@ module OpenAI params( response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) @@ -280,7 +285,7 @@ module OpenAI sig do params( - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)) + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -319,7 +324,7 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -332,7 +337,10 @@ module OpenAI sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTool])) } attr_reader :tools - sig { params(tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)]).void } + sig do + params(tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)]) + .void + end attr_writer :tools # An integer between 0 and 20 specifying the number of most likely tokens to @@ -366,7 +374,7 @@ module OpenAI sig do params( - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash) + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash) ) .void end @@ -377,7 +385,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -386,14 +394,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -402,12 +410,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -415,19 +423,19 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -715,7 +723,7 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) end class WebSearchOptions < OpenAI::BaseModel @@ -743,7 +751,10 @@ module OpenAI sig do params( user_location: T.nilable( - T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) + T.any( + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, + OpenAI::Internal::Util::AnyHash + ) ) ) .void @@ -757,7 +768,10 @@ module OpenAI params( search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, user_location: T.nilable( - T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, OpenAI::Util::AnyHash) + T.any( + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, + OpenAI::Internal::Util::AnyHash + ) ) ) .returns(T.attached_class) @@ -828,7 +842,7 @@ module OpenAI params( approximate: T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -844,7 +858,7 @@ module OpenAI params( approximate: T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), type: Symbol ) diff --git a/rbi/lib/openai/models/chat/completion_delete_params.rbi b/rbi/lib/openai/models/chat/completion_delete_params.rbi index 9bc3d10e..b03964f8 100644 --- a/rbi/lib/openai/models/chat/completion_delete_params.rbi +++ b/rbi/lib/openai/models/chat/completion_delete_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Chat class CompletionDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 334e3fcd..a9fb2282 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Chat class CompletionListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Identifier for the last chat completion from the previous pagination request. sig { returns(T.nilable(String)) } @@ -49,7 +49,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi index fe74c4a3..ec5ec147 100644 --- a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Chat class CompletionRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index d9801164..d895f848 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Chat class CompletionUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -19,7 +19,7 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 9accd714..5415b3ca 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -5,8 +5,8 @@ module OpenAI module Chat module Completions class MessageListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Identifier for the last message from the previous pagination request. sig { returns(T.nilable(String)) } @@ -35,7 +35,7 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index e877536d..cb4efb9a 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -37,7 +37,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash)).void } attr_writer :usage # Represents a completion response from the API. Note: both the streamed and @@ -45,11 +45,11 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Util::AnyHash)], + choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Internal::Util::AnyHash)], created: Integer, model: String, system_fingerprint: String, - usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Util::AnyHash), + usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index cfd36710..516615be 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -16,7 +16,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionChoice::Logprobs)) } attr_reader :logprobs - sig { params(logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash))).void } + sig do + params( + logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::Util::AnyHash)) + ) + .void + end attr_writer :logprobs sig { returns(String) } @@ -26,7 +31,7 @@ module OpenAI params( finish_reason: OpenAI::Models::CompletionChoice::FinishReason::OrSymbol, index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Util::AnyHash)), + logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::Util::AnyHash)), text: String ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index cde7b429..b71c3965 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -119,7 +119,7 @@ module OpenAI sig do params( - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)) + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -177,12 +177,12 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -278,11 +278,14 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) - IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Type::Converter) + IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Internal::Type::Converter) - ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Type::Converter) + ArrayOfToken2DArray = T.let( + OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], + OpenAI::Internal::Type::Converter + ) end # Up to 4 sequences where the API will stop generating further tokens. The @@ -294,7 +297,7 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) end end end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index 8daa195d..f059d80b 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -21,7 +21,7 @@ module OpenAI sig do params( - completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash) + completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::Util::AnyHash) ) .void end @@ -33,7 +33,7 @@ module OpenAI sig do params( - prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash) + prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::Util::AnyHash) ) .void end @@ -45,8 +45,8 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Util::AnyHash), - prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Util::AnyHash) + completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::Util::AnyHash), + prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index b73e46bf..7377b320 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -15,7 +15,7 @@ module OpenAI # Combine multiple filters using `and` or `or`. sig do params( - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, T.anything)], + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, T.anything)], type: OpenAI::Models::CompoundFilter::Type::OrSymbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index d057718b..d970b614 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -19,14 +19,14 @@ module OpenAI sig { returns(OpenAI::Models::CreateEmbeddingResponse::Usage) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::Util::AnyHash)).void } attr_writer :usage sig do params( - data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Util::AnyHash)], + data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Internal::Util::AnyHash)], model: String, - usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Util::AnyHash), + usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 89a4ff35..47c96aeb 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class EmbeddingCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. @@ -57,7 +57,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -95,11 +95,14 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) - IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Type::Converter) + IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Internal::Type::Converter) - ArrayOfToken2DArray = T.let(OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], OpenAI::Type::Converter) + ArrayOfToken2DArray = T.let( + OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], + OpenAI::Internal::Type::Converter + ) end # ID of the model to use. You can use the diff --git a/rbi/lib/openai/models/file_content_params.rbi b/rbi/lib/openai/models/file_content_params.rbi index 2a195cd2..773bdb83 100644 --- a/rbi/lib/openai/models/file_content_params.rbi +++ b/rbi/lib/openai/models/file_content_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class FileContentParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index 8813232a..f665b3dc 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class FileCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The File object (not file name) to be uploaded. sig { returns(T.any(IO, StringIO)) } @@ -21,7 +21,7 @@ module OpenAI params( file: T.any(IO, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/file_delete_params.rbi b/rbi/lib/openai/models/file_delete_params.rbi index 3d675373..354f6382 100644 --- a/rbi/lib/openai/models/file_delete_params.rbi +++ b/rbi/lib/openai/models/file_delete_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index c3cd0d00..c1cc0765 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class FileListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -45,7 +45,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/file_retrieve_params.rbi b/rbi/lib/openai/models/file_retrieve_params.rbi index 579f51ba..d34e1631 100644 --- a/rbi/lib/openai/models/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/file_retrieve_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 91218d14..c6541e22 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -18,7 +18,9 @@ module OpenAI attr_reader :error sig do - params(error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash))) + params( + error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::Util::AnyHash)) + ) .void end attr_writer :error @@ -40,7 +42,7 @@ module OpenAI sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::Util::AnyHash) ) .void end @@ -111,7 +113,10 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method)) } attr_reader :method_ - sig { params(method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash)).void } + sig do + params(method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::Util::AnyHash)) + .void + end attr_writer :method_ # The `fine_tuning.job` object represents a fine-tuning job that has been created @@ -120,10 +125,10 @@ module OpenAI params( id: String, created_at: Integer, - error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Util::AnyHash)), + error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::Util::AnyHash)), fine_tuned_model: T.nilable(String), finished_at: T.nilable(Integer), - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Util::AnyHash), + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::Util::AnyHash), model: String, organization_id: String, result_files: T::Array[String], @@ -134,10 +139,10 @@ module OpenAI validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Internal::Util::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Util::AnyHash), + method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::Util::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -328,7 +333,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo)) } attr_reader :dpo - sig { params(dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash)).void } + sig do + params( + dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :dpo # Configuration for the supervised fine-tuning method. @@ -337,7 +347,7 @@ module OpenAI sig do params( - supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash) + supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::Util::AnyHash) ) .void end @@ -353,8 +363,8 @@ module OpenAI # The method used for fine-tuning. sig do params( - dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Util::AnyHash), - supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Util::AnyHash), + dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::Util::AnyHash), + supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::Util::AnyHash), type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol ) .returns(T.attached_class) @@ -382,7 +392,10 @@ module OpenAI sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any( + OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -391,7 +404,10 @@ module OpenAI # Configuration for the DPO fine-tuning method. sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any( + OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end @@ -516,7 +532,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -528,7 +544,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index 79b69c52..78bec198 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -15,12 +15,17 @@ module OpenAI sig { returns(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) } attr_reader :wandb - sig { params(wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash)).void } + sig do + params( + wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :wandb sig do params( - wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Util::AnyHash), + wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi index d9429091..4180a266 100644 --- a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module FineTuning class JobCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 5091008f..68089ea2 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module FineTuning class JobCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). @@ -39,7 +39,7 @@ module OpenAI sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::Util::AnyHash) ) .void end @@ -62,7 +62,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method)) } attr_reader :method_ - sig { params(method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash)).void } + sig do + params( + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :method_ # The seed controls the reproducibility of the job. Passing in the same seed and @@ -98,16 +103,16 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::Util::AnyHash), integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::Util::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::Util::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -261,7 +266,7 @@ module OpenAI sig do params( - wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash) + wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::Util::AnyHash) ) .void end @@ -269,7 +274,7 @@ module OpenAI sig do params( - wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Util::AnyHash), + wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -341,7 +346,12 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo)) } attr_reader :dpo - sig { params(dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash)).void } + sig do + params( + dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :dpo # Configuration for the supervised fine-tuning method. @@ -350,7 +360,7 @@ module OpenAI sig do params( - supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash) + supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::Util::AnyHash) ) .void end @@ -366,8 +376,8 @@ module OpenAI # The method used for fine-tuning. sig do params( - dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Util::AnyHash), - supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Util::AnyHash), + dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::Util::AnyHash), + supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::Util::AnyHash), type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol ) .returns(T.attached_class) @@ -395,7 +405,10 @@ module OpenAI sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any( + OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -404,7 +417,10 @@ module OpenAI # Configuration for the DPO fine-tuning method. sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, OpenAI::Util::AnyHash) + hyperparameters: T.any( + OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, + OpenAI::Internal::Util::AnyHash + ) ) .returns(T.attached_class) end @@ -529,7 +545,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .void @@ -541,7 +557,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index 0bfecb02..5a289277 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module FineTuning class JobListEventsParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Identifier for the last event from the previous pagination request. sig { returns(T.nilable(String)) } @@ -25,7 +25,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 62697ccd..d3d055fc 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module FineTuning class JobListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Identifier for the last job from the previous pagination request. sig { returns(T.nilable(String)) } @@ -31,7 +31,7 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi index 4fadf667..119b93f5 100644 --- a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module FineTuning class JobRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 70dcc5c6..e9dabed3 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -5,8 +5,8 @@ module OpenAI module FineTuning module Jobs class CheckpointListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Identifier for the last checkpoint ID from the previous pagination request. sig { returns(T.nilable(String)) } @@ -26,7 +26,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index de47d120..f51faa86 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -27,7 +27,7 @@ module OpenAI sig do params( - metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash) + metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::Util::AnyHash) ) .void end @@ -49,7 +49,7 @@ module OpenAI created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Util::AnyHash), + metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::Util::AnyHash), step_number: Integer, object: Symbol ) diff --git a/rbi/lib/openai/models/function_parameters.rbi b/rbi/lib/openai/models/function_parameters.rbi index 8b20436c..5920964f 100644 --- a/rbi/lib/openai/models/function_parameters.rbi +++ b/rbi/lib/openai/models/function_parameters.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - FunctionParameters = T.let(OpenAI::HashOf[OpenAI::Unknown], OpenAI::Type::Converter) + FunctionParameters = T.let(OpenAI::HashOf[OpenAI::Unknown], OpenAI::Internal::Type::Converter) end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 39818554..ea1298ad 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class ImageCreateVariationParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. @@ -49,7 +49,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 1305d68a..80ac6fdf 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class ImageEditParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask # is not provided, image must have transparency, which will be used as the mask. @@ -64,7 +64,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 78364258..5b0720c6 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class ImageGenerateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A text description of the desired image(s). The maximum length is 1000 # characters for `dall-e-2` and 4000 characters for `dall-e-3`. @@ -67,7 +67,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index 5395968e..412cb40b 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -10,7 +10,7 @@ module OpenAI attr_accessor :data sig do - params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Util::AnyHash)]) + params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::Util::AnyHash)]) .returns(T.attached_class) end def self.new(created:, data:) diff --git a/rbi/lib/openai/models/metadata.rbi b/rbi/lib/openai/models/metadata.rbi index 9b99c6d0..20e8347e 100644 --- a/rbi/lib/openai/models/metadata.rbi +++ b/rbi/lib/openai/models/metadata.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - Metadata = T.let(OpenAI::HashOf[String], OpenAI::Type::Converter) + Metadata = T.let(OpenAI::HashOf[String], OpenAI::Internal::Type::Converter) end end diff --git a/rbi/lib/openai/models/model_delete_params.rbi b/rbi/lib/openai/models/model_delete_params.rbi index 53253e49..3ace3c62 100644 --- a/rbi/lib/openai/models/model_delete_params.rbi +++ b/rbi/lib/openai/models/model_delete_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class ModelDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/model_list_params.rbi b/rbi/lib/openai/models/model_list_params.rbi index ccea7e8a..ec503d0e 100644 --- a/rbi/lib/openai/models/model_list_params.rbi +++ b/rbi/lib/openai/models/model_list_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class ModelListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/model_retrieve_params.rbi b/rbi/lib/openai/models/model_retrieve_params.rbi index c6b05f8c..48ab1cf7 100644 --- a/rbi/lib/openai/models/model_retrieve_params.rbi +++ b/rbi/lib/openai/models/model_retrieve_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class ModelRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index be0ff0fa..256ad93b 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -7,7 +7,7 @@ module OpenAI sig { returns(OpenAI::Models::Moderation::Categories) } attr_reader :categories - sig { params(categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash)).void } + sig { params(categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::Util::AnyHash)).void } attr_writer :categories # A list of the categories along with the input type(s) that the score applies to. @@ -16,7 +16,7 @@ module OpenAI sig do params( - category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash) + category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::Util::AnyHash) ) .void end @@ -26,7 +26,12 @@ module OpenAI sig { returns(OpenAI::Models::Moderation::CategoryScores) } attr_reader :category_scores - sig { params(category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash)).void } + sig do + params( + category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :category_scores # Whether any of the below categories are flagged. @@ -35,9 +40,9 @@ module OpenAI sig do params( - categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Util::AnyHash), - category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Util::AnyHash), - category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Util::AnyHash), + categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::Util::AnyHash), + category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::Util::AnyHash), + category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::Util::AnyHash), flagged: T::Boolean ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 0f79bac9..c66af1c0 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class ModerationCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. @@ -34,10 +34,16 @@ module OpenAI input: T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Util::AnyHash, OpenAI::Models::ModerationTextInput)] + T::Array[ + T.any( + OpenAI::Models::ModerationImageURLInput, + OpenAI::Internal::Util::AnyHash, + OpenAI::Models::ModerationTextInput + ) + ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -75,10 +81,13 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) ModerationMultiModalInputArray = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], + OpenAI::Internal::Type::Converter + ) end # The content moderation model you would like to use. Learn more in diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index 0de2d5b0..4c1b0187 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -20,7 +20,7 @@ module OpenAI params( id: String, model: String, - results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Util::AnyHash)] + results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Internal::Util::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index 5d3fb30a..77f4cfc0 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -7,7 +7,12 @@ module OpenAI sig { returns(OpenAI::Models::ModerationImageURLInput::ImageURL) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash)).void } + sig do + params( + image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::Util::AnyHash) + ) + .void + end attr_writer :image_url # Always `image_url`. @@ -17,7 +22,7 @@ module OpenAI # An object describing an image to classify. sig do params( - image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Util::AnyHash), + image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index ea87d33f..95e6df84 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -8,7 +8,9 @@ module OpenAI attr_reader :json_schema sig do - params(json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash)) + params( + json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::Util::AnyHash) + ) .void end attr_writer :json_schema @@ -22,7 +24,7 @@ module OpenAI # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( - json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Util::AnyHash), + json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index e0e2b2ed..67ac236f 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -46,7 +46,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 07f8b017..94dca043 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter) + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter) ) .void end @@ -38,7 +38,7 @@ module OpenAI sig do params( - ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::Util::AnyHash) ) .void end @@ -50,9 +50,9 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Util::AnyHash), + ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::Util::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 445f655c..20120ad6 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses class InputItemListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # An item ID to list items after, used in pagination. sig { returns(T.nilable(String)) } @@ -54,7 +54,7 @@ module OpenAI include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 4ce11535..c2c8686f 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -16,7 +16,10 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseError)) } attr_reader :error - sig { params(error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash))).void } + sig do + params(error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::Util::AnyHash))) + .void + end attr_writer :error # Details about why the response is incomplete. @@ -25,7 +28,7 @@ module OpenAI sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash)) + incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -168,7 +171,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Reasoning)) } attr_reader :reasoning - sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))).void } + sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash))).void } attr_writer :reasoning # The status of the response generation. One of `completed`, `failed`, @@ -187,7 +190,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash)).void } attr_writer :text # The truncation strategy to use for the model response. @@ -205,7 +208,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::Util::AnyHash)).void } attr_writer :usage # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -221,15 +224,15 @@ module OpenAI params( id: String, created_at: Float, - error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Util::AnyHash)), - incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Util::AnyHash)), + error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::Util::AnyHash)), + incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::Util::AnyHash)), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, @@ -242,13 +245,13 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -257,11 +260,11 @@ module OpenAI top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), status: OpenAI::Models::Responses::ResponseStatus::OrSymbol, - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::OrSymbol), - usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Util::AnyHash), + usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::Util::AnyHash), user: String, object: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index ea6117b4..d5e6e2f9 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -10,7 +10,7 @@ module OpenAI sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash) + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI # Emitted when the code interpreter call is completed. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash), + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index e49fec75..5d0d90a1 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -10,7 +10,7 @@ module OpenAI sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash) + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI # Emitted when a code interpreter call is in progress. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash), + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index a19cad6d..f2f0f442 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -10,7 +10,7 @@ module OpenAI sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash) + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI # Emitted when the code interpreter is actively interpreting the code snippet. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Util::AnyHash), + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index cb88444a..2f23866c 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -41,7 +41,7 @@ module OpenAI results: T::Array[ T.any( OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], @@ -110,7 +110,7 @@ module OpenAI files: T::Array[ T.any( OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ], type: Symbol diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 4b78bb8f..5ea9fdfe 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } attr_writer :response # The type of the event. Always `response.completed`. @@ -17,7 +17,10 @@ module OpenAI # Emitted when the model response is complete. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + params( + response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(response:, type: :"response.completed") diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 80a4e800..18ad0198 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -51,7 +51,7 @@ module OpenAI id: String, action: T.any( OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, @@ -62,7 +62,12 @@ module OpenAI OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait ), call_id: String, - pending_safety_checks: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, OpenAI::Util::AnyHash)], + pending_safety_checks: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, + OpenAI::Internal::Util::AnyHash + ) + ], status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol ) @@ -230,7 +235,12 @@ module OpenAI # A drag action. sig do params( - path: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, OpenAI::Util::AnyHash)], + path: T::Array[ + T.any( + OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, + OpenAI::Internal::Util::AnyHash + ) + ], type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index f10df142..5f0a22d5 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -18,7 +18,10 @@ module OpenAI sig do params( - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) + output: T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -44,7 +47,7 @@ module OpenAI acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -64,11 +67,14 @@ module OpenAI params( id: String, call_id: String, - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), + output: T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + OpenAI::Internal::Util::AnyHash + ), acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ], status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 2c961d2d..9065af5e 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -36,7 +36,7 @@ module OpenAI output_index: Integer, part: T.any( OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputRefusal ), type: Symbol diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index b5948c6d..1b8ece5c 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -36,7 +36,7 @@ module OpenAI output_index: Integer, part: T.any( OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputRefusal ), type: Symbol diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 9bf4d295..ef7c1ca5 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses class ResponseCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Text, image, or file inputs to the model, used to generate a response. # @@ -100,7 +100,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Reasoning)) } attr_reader :reasoning - sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash))).void } + sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash))).void } attr_writer :reasoning # Whether to store the generated model response for later retrieval via API. @@ -122,7 +122,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash)).void } attr_writer :text # How the model should select which tool (or tools) to use when generating a @@ -146,7 +146,7 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) @@ -189,7 +189,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -234,7 +234,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -255,20 +255,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -277,7 +277,7 @@ module OpenAI top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index 0ff42037..0ce79c50 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } attr_writer :response # The type of the event. Always `response.created`. @@ -17,7 +17,10 @@ module OpenAI # An event that is emitted when a response is created. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + params( + response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(response:, type: :"response.created") diff --git a/rbi/lib/openai/models/responses/response_delete_params.rbi b/rbi/lib/openai/models/responses/response_delete_params.rbi index 92e2ddb7..008c4bd9 100644 --- a/rbi/lib/openai/models/responses/response_delete_params.rbi +++ b/rbi/lib/openai/models/responses/response_delete_params.rbi @@ -4,16 +4,12 @@ module OpenAI module Models module Responses class ResponseDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index 8a3b97e1..8d43369b 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } attr_writer :response # The type of the event. Always `response.failed`. @@ -17,7 +17,10 @@ module OpenAI # An event that is emitted when a response fails. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + params( + response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(response:, type: :"response.failed") diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 01b2899c..050fdfc1 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -34,7 +34,7 @@ module OpenAI queries: T::Array[String], status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, results: T.nilable( - T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Internal::Util::AnyHash)] ), type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index 7895e6a8..21962a06 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } attr_writer :response # The type of the event. Always `response.in_progress`. @@ -17,7 +17,10 @@ module OpenAI # Emitted when the response is in progress. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + params( + response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(response:, type: :"response.in_progress") diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index ea974ede..ab06154d 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } attr_writer :response # The type of the event. Always `response.incomplete`. @@ -17,7 +17,10 @@ module OpenAI # An event that is emitted when a response finishes as incomplete. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Util::AnyHash), type: Symbol) + params( + response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(response:, type: :"response.incomplete") diff --git a/rbi/lib/openai/models/responses/response_input.rbi b/rbi/lib/openai/models/responses/response_input.rbi index d903e04f..f86dda51 100644 --- a/rbi/lib/openai/models/responses/response_input.rbi +++ b/rbi/lib/openai/models/responses/response_input.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses ResponseInput = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index eb5e163c..ab326c72 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -54,7 +54,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) @@ -156,7 +156,10 @@ module OpenAI sig do params( - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash) + output: T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + OpenAI::Internal::Util::AnyHash + ) ) .void end @@ -189,7 +192,7 @@ module OpenAI acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ] ) @@ -209,12 +212,15 @@ module OpenAI sig do params( call_id: String, - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Util::AnyHash), + output: T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + OpenAI::Internal::Util::AnyHash + ), id: String, acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, diff --git a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi index 174cada3..b06ad65b 100644 --- a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses ResponseInputMessageContentList = - T.let(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], OpenAI::Type::Converter) + T.let( + OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 11580c3f..60b55701 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -48,7 +48,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index a7e4d918..93cf118a 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -45,7 +45,7 @@ module OpenAI data: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 648d48d9..735922c6 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -32,7 +32,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index 1d8ebd83..8dde062b 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -32,7 +32,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 81c36c47..8b07428a 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -36,7 +36,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputRefusal ) ], diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 561c4b12..d8aeb7e6 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -32,7 +32,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath ) diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index acb3183e..bd3303b1 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -29,7 +29,7 @@ module OpenAI sig do params( id: String, - summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Util::AnyHash)], + summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Internal::Util::AnyHash)], status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index df3a5dc9..a6c61a0f 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses class ResponseRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. @@ -18,7 +18,7 @@ module OpenAI sig do params( include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 4ad5009e..689a3028 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -41,7 +41,7 @@ module OpenAI params( annotation: T.any( OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath ), diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index 240a7e92..1aad6df7 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -34,7 +34,7 @@ module OpenAI params( format_: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) @@ -52,7 +52,7 @@ module OpenAI params( format_: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 1ec163da..c9ac5ea8 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -14,7 +14,7 @@ module OpenAI sig do params( - input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash) + input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::Util::AnyHash) ) .void end @@ -30,7 +30,7 @@ module OpenAI sig do params( - output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash) + output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::Util::AnyHash) ) .void end @@ -45,9 +45,9 @@ module OpenAI sig do params( input_tokens: Integer, - input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Util::AnyHash), + input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::Util::AnyHash), output_tokens: Integer, - output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Util::AnyHash), + output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::Util::AnyHash), total_tokens: Integer ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 310805df..ba497fb4 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -24,7 +24,7 @@ module OpenAI sig do params( - user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash)) + user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -37,7 +37,7 @@ module OpenAI params( type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, - user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Util::AnyHash)) + user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::Util::AnyHash)) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index eac14e48..8525371f 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -6,7 +6,7 @@ module OpenAI sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } attr_reader :static - sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)).void } + sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash)).void } attr_writer :static # Always `static`. @@ -14,7 +14,10 @@ module OpenAI attr_accessor :type sig do - params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash), type: Symbol) + params( + static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(static:, type: :static) diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index 5387db39..fe747e98 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -6,7 +6,7 @@ module OpenAI sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } attr_reader :static - sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash)).void } + sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash)).void } attr_writer :static # Always `static`. @@ -15,7 +15,10 @@ module OpenAI # Customize your own chunking strategy by setting chunk size and chunk overlap. sig do - params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Util::AnyHash), type: Symbol) + params( + static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash), + type: Symbol + ) .returns(T.attached_class) end def self.new(static:, type: :static) diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index fc61f0b1..feefd876 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -41,7 +41,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FileObject)) } attr_reader :file - sig { params(file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash))).void } + sig { params(file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::Util::AnyHash))).void } attr_writer :file # The Upload object can accept byte chunks in the form of Parts. @@ -54,7 +54,7 @@ module OpenAI filename: String, purpose: String, status: OpenAI::Models::Upload::Status::OrSymbol, - file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Util::AnyHash)), + file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::Util::AnyHash)), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/upload_cancel_params.rbi b/rbi/lib/openai/models/upload_cancel_params.rbi index 7fb18dd3..706ec1dd 100644 --- a/rbi/lib/openai/models/upload_cancel_params.rbi +++ b/rbi/lib/openai/models/upload_cancel_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class UploadCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 24f09fa4..9558d974 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class UploadCompleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The ordered list of Part IDs. sig { returns(T::Array[String]) } @@ -22,7 +22,7 @@ module OpenAI params( part_ids: T::Array[String], md5: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 1e0390bf..50f627d5 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class UploadCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The number of bytes in the file you are uploading. sig { returns(Integer) } @@ -34,7 +34,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index c6f213d5..b840d72b 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Uploads class PartCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The chunk of bytes for this Part. sig { returns(T.any(IO, StringIO)) } @@ -14,7 +14,7 @@ module OpenAI sig do params( data: T.any(IO, StringIO), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index e57a4fb8..e0277e39 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -14,7 +14,7 @@ module OpenAI sig { returns(OpenAI::Models::VectorStore::FileCounts) } attr_reader :file_counts - sig { params(file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash)).void } + sig { params(file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::Util::AnyHash)).void } attr_writer :file_counts # The Unix timestamp (in seconds) for when the vector store was last active. @@ -52,7 +52,10 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::VectorStore::ExpiresAfter)) } attr_reader :expires_after - sig { params(expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash)).void } + sig do + params(expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::Util::AnyHash)) + .void + end attr_writer :expires_after # The Unix timestamp (in seconds) for when the vector store will expire. @@ -65,13 +68,13 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Util::AnyHash), + file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::Util::AnyHash), last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, status: OpenAI::Models::VectorStore::Status::OrSymbol, usage_bytes: Integer, - expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Util::AnyHash), + expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::Util::AnyHash), expires_at: T.nilable(Integer), object: Symbol ) diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 4c0acebf..86965352 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class VectorStoreCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. @@ -24,7 +24,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -37,7 +37,9 @@ module OpenAI attr_reader :expires_after sig do - params(expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash)) + params( + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash) + ) .void end attr_writer :expires_after @@ -71,14 +73,14 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store_delete_params.rbi b/rbi/lib/openai/models/vector_store_delete_params.rbi index d01c30e0..9ea9ee73 100644 --- a/rbi/lib/openai/models/vector_store_delete_params.rbi +++ b/rbi/lib/openai/models/vector_store_delete_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class VectorStoreDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 819d2785..ef5af052 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class VectorStoreListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -48,7 +48,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_store_retrieve_params.rbi b/rbi/lib/openai/models/vector_store_retrieve_params.rbi index 7e591036..f73d277f 100644 --- a/rbi/lib/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_store_retrieve_params.rbi @@ -3,16 +3,12 @@ module OpenAI module Models class VectorStoreRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ).returns(T.attached_class) + params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + .returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 85d03442..15dfc27f 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class VectorStoreSearchParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A query string for a search sig { returns(T.any(String, T::Array[String])) } @@ -16,7 +16,7 @@ module OpenAI sig do params( - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter) + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter) ) .void end @@ -36,7 +36,7 @@ module OpenAI sig do params( - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::Util::AnyHash) ) .void end @@ -52,11 +52,11 @@ module OpenAI sig do params( query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::Util::AnyHash), rewrite_query: T::Boolean, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end @@ -94,7 +94,7 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Type::Converter) + StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) end # A filter to apply based on file attributes. diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index eefd75ff..05fd7444 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -30,7 +30,7 @@ module OpenAI sig do params( attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Util::AnyHash)], + content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Internal::Util::AnyHash)], file_id: String, filename: String, score: Float diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 04887f7e..1982bdfd 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class VectorStoreUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter)) } @@ -12,7 +12,7 @@ module OpenAI sig do params( - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)) + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash)) ) .void end @@ -33,10 +33,10 @@ module OpenAI sig do params( - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi index 0a18aa15..b8985360 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileBatchCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 2f1c7b95..447f5e35 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileBatchCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # the vector store should use. Useful for tools like `file_search` that can access @@ -39,7 +39,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -53,10 +53,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 311c5591..07693f31 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileBatchListFilesParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -61,7 +61,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi index 16e593c7..1d9ae1fd 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileBatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/lib/openai/models/vector_stores/file_content_params.rbi index 9558d156..3889fd16 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileContentParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 1acbeb21..221d7a92 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A [File](https://platform.openai.com/docs/api-reference/files) ID that the # vector store should use. Useful for tools like `file_search` that can access @@ -39,7 +39,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -53,10 +53,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi index 9ffd9fb3..572a5963 100644 --- a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 791c45a1..9f4a5c71 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, @@ -57,7 +57,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi index 8ac6b042..48f7edba 100644 --- a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index a8c04b54..294449c4 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module VectorStores class FileUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters sig { returns(String) } attr_accessor :vector_store_id @@ -22,7 +22,7 @@ module OpenAI params( vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 7707a463..30c4c023 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -19,7 +19,9 @@ module OpenAI sig do params( - last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)) + last_error: T.nilable( + T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::Util::AnyHash) + ) ) .void end @@ -69,7 +71,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::OtherFileChunkingStrategyObject ) ) @@ -82,14 +84,16 @@ module OpenAI params( id: String, created_at: Integer, - last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Util::AnyHash)), + last_error: T.nilable( + T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::Util::AnyHash) + ), status: OpenAI::Models::VectorStores::VectorStoreFile::Status::OrSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::OtherFileChunkingStrategyObject ), object: Symbol diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index c16243ce..b71f8619 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( - file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash) + file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::Util::AnyHash) ) .void end @@ -45,7 +45,7 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Util::AnyHash), + file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::Util::AnyHash), status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::OrSymbol, vector_store_id: String, object: Symbol diff --git a/rbi/lib/openai/page.rbi b/rbi/lib/openai/page.rbi deleted file mode 100644 index 8a668d1c..00000000 --- a/rbi/lib/openai/page.rbi +++ /dev/null @@ -1,19 +0,0 @@ -# typed: strong - -module OpenAI - class Page - include OpenAI::Type::BasePage - - Elem = type_member - - sig { returns(T.nilable(T::Array[Elem])) } - attr_accessor :data - - sig { returns(String) } - attr_accessor :object - - sig { returns(String) } - def inspect - end - end -end diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 4e963f12..9c665553 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -41,7 +41,7 @@ module OpenAI attr_accessor :timeout # Returns a new instance of RequestOptions. - sig { params(values: OpenAI::Util::AnyHash).returns(T.attached_class) } + sig { params(values: OpenAI::Internal::Util::AnyHash).returns(T.attached_class) } def self.new(values = {}) end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index b662b954..9b5892ad 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -13,7 +13,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(StringIO) end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 476b325a..8ec0dc0f 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -16,7 +16,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) end @@ -78,10 +78,10 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns( - OpenAI::Stream[ + OpenAI::Internal::Stream[ T.any( OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 178eaa7e..2e07bd58 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -12,7 +12,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)) end diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index fe814b47..acaa4e55 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -10,7 +10,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Batch) end @@ -48,12 +48,7 @@ module OpenAI sig do params( batch_id: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Batch) end @@ -69,9 +64,9 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Batch]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Batch]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -92,12 +87,7 @@ module OpenAI sig do params( batch_id: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Batch) end diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 58bd9abe..00296adc 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -17,23 +17,25 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -113,7 +115,7 @@ module OpenAI sig do params( assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -138,23 +140,25 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -239,9 +243,9 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Assistant]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Assistant]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -268,7 +272,7 @@ module OpenAI sig do params( assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::AssistantDeleted) end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 114b3406..64816d6d 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -13,10 +13,10 @@ module OpenAI # Create a thread. sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Util::AnyHash)], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::Util::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Util::AnyHash)), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -44,7 +44,7 @@ module OpenAI sig do params( thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -60,8 +60,8 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Util::AnyHash)), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -88,7 +88,7 @@ module OpenAI sig do params( thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::ThreadDeleted) end @@ -113,26 +113,28 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -140,10 +142,10 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -257,26 +259,28 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Util::AnyHash), + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Util::AnyHash)), + tool_resources: T.nilable( + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) + ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -284,13 +288,13 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns( - OpenAI::Stream[ + OpenAI::Internal::Stream[ T.any( OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index b30ffe2f..a412e38b 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -14,7 +14,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -22,10 +22,10 @@ module OpenAI ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::Util::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -60,7 +60,7 @@ module OpenAI params( message_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -80,7 +80,7 @@ module OpenAI message_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -109,9 +109,9 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Message]) end def list( # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) @@ -144,7 +144,7 @@ module OpenAI params( message_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::MessageDeleted) end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index dd25ce47..43be9231 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -16,7 +16,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::Util::AnyHash)] ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), @@ -29,7 +29,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -39,14 +39,14 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -54,10 +54,10 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -185,7 +185,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::Util::AnyHash)] ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), @@ -198,7 +198,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -208,14 +208,14 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -223,13 +223,13 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns( - OpenAI::Stream[ + OpenAI::Internal::Stream[ T.any( OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, @@ -380,7 +380,7 @@ module OpenAI params( run_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -400,7 +400,7 @@ module OpenAI run_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -429,9 +429,9 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Run]) end def list( # The ID of the thread the run belongs to. @@ -461,7 +461,7 @@ module OpenAI params( run_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -482,9 +482,14 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], + tool_outputs: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, + OpenAI::Internal::Util::AnyHash + ) + ], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -512,12 +517,17 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Util::AnyHash)], + tool_outputs: T::Array[ + T.any( + OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, + OpenAI::Internal::Util::AnyHash + ) + ], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns( - OpenAI::Stream[ + OpenAI::Internal::Stream[ T.any( OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index a048f9ce..d035232e 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -13,7 +13,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) end @@ -47,9 +47,9 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) end def list( # Path param: The ID of the run the run steps belong to. diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 0471b372..ebf756a5 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -29,7 +29,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -38,14 +38,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -54,12 +54,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -67,20 +67,20 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -297,7 +297,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -306,14 +306,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Util::AnyHash)), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Util::AnyHash)], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -322,12 +322,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Util::AnyHash)), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -335,22 +335,22 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Util::AnyHash + OpenAI::Internal::Util::AnyHash ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Util::AnyHash)], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Util::AnyHash), + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) + .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end def stream_raw( # A list of messages comprising the conversation so far. Depending on the @@ -548,7 +548,7 @@ module OpenAI sig do params( completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -566,7 +566,7 @@ module OpenAI params( completion_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -593,9 +593,9 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletion]) end def list( # Identifier for the last chat completion from the previous pagination request. @@ -620,7 +620,7 @@ module OpenAI sig do params( completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletionDeleted) end diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 15b7617c..447ff6c0 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -13,9 +13,9 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) end def list( # The ID of the chat completion to retrieve messages from. diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 7c97579b..804db29b 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -25,13 +25,13 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Completion) end @@ -168,15 +168,15 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::Stream[OpenAI::Models::Completion]) + .returns(OpenAI::Internal::Stream[OpenAI::Models::Completion]) end def stream_raw( # ID of the model to use. You can use the diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 992bfec4..57d7fdd6 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -11,7 +11,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::CreateEmbeddingResponse) end diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 3a276b74..18c23250 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -28,7 +28,7 @@ module OpenAI params( file: T.any(IO, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::FileObject) end @@ -48,12 +48,7 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::FileObject) end @@ -71,9 +66,9 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::FileObject]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FileObject]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -97,12 +92,7 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::FileDeleted) end @@ -117,12 +107,7 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(StringIO) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 57cec0cd..81ceb5d7 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -18,16 +18,16 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Util::AnyHash), + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::Util::AnyHash), integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::Util::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Util::AnyHash), + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::Util::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -100,7 +100,7 @@ module OpenAI sig do params( fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -117,9 +117,9 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) end def list( # Identifier for the last job from the previous pagination request. @@ -137,7 +137,7 @@ module OpenAI sig do params( fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -154,9 +154,9 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) end def list_events( # The ID of the fine-tuning job to get events for. diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index 9049855c..3274da29 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -11,9 +11,9 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) end def list( # The ID of the fine-tuning job to get checkpoints for. diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index ea7194a0..58353329 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -12,7 +12,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end @@ -52,7 +52,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end @@ -98,7 +98,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 16962ff9..c78bca31 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -8,12 +8,7 @@ module OpenAI sig do params( model: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Model) end @@ -27,8 +22,8 @@ module OpenAI # Lists the currently available models, and provides basic information about each # one such as the owner and availability. sig do - params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash))) - .returns(OpenAI::Page[OpenAI::Models::Model]) + params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash))) + .returns(OpenAI::Internal::Page[OpenAI::Models::Model]) end def list(request_options: {}) end @@ -38,12 +33,7 @@ module OpenAI sig do params( model: String, - request_options: T.nilable( - T.any( - OpenAI::RequestOptions, - OpenAI::Util::AnyHash - ) - ) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::ModelDeleted) end diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 85abe41d..1b180936 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -10,10 +10,16 @@ module OpenAI input: T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Util::AnyHash, OpenAI::Models::ModerationTextInput)] + T::Array[ + T.any( + OpenAI::Models::ModerationImageURLInput, + OpenAI::Internal::Util::AnyHash, + OpenAI::Models::ModerationTextInput + ) + ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::ModerationCreateResponse) end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 4d305614..585d0875 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -24,7 +24,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -45,20 +45,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -68,7 +68,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Responses::Response) end @@ -202,7 +202,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -223,20 +223,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -246,10 +246,10 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns( - OpenAI::Stream[ + OpenAI::Internal::Stream[ T.any( OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, @@ -404,7 +404,7 @@ module OpenAI params( response_id: String, include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Responses::Response) end @@ -422,7 +422,7 @@ module OpenAI sig do params( response_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .void end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 822e8463..92e7016f 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -13,10 +13,10 @@ module OpenAI include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns( - OpenAI::CursorPage[ + OpenAI::Internal::CursorPage[ T.any( OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 7677a58a..88ab62ad 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -31,7 +31,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Upload) end @@ -58,7 +58,7 @@ module OpenAI sig do params( upload_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Upload) end @@ -87,7 +87,7 @@ module OpenAI upload_id: String, part_ids: T::Array[String], md5: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Upload) end diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 14dffcb9..6d53630a 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -19,7 +19,7 @@ module OpenAI params( upload_id: String, data: T.any(IO, StringIO), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::Uploads::UploadPart) end diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 4aef839d..74b9b2a3 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -14,14 +14,14 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Util::AnyHash), + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -52,7 +52,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -67,10 +67,10 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Util::AnyHash)), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -99,9 +99,9 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::VectorStore]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStore]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -128,7 +128,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStoreDeleted) end @@ -145,13 +145,13 @@ module OpenAI params( vector_store_id: String, query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Util::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Util::AnyHash), + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::Util::AnyHash), rewrite_query: T::Boolean, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse]) + .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse]) end def search( # The ID of the vector store to search. diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index ec63dcb2..40c66e29 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -12,10 +12,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -44,7 +44,7 @@ module OpenAI params( batch_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -63,7 +63,7 @@ module OpenAI params( batch_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -86,9 +86,9 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end def list_files( # Path param: The ID of the file batch that the files belong to. diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 0cb2f053..fe472d5a 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -14,10 +14,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Util::AnyHash, + OpenAI::Internal::Util::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -46,7 +46,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -65,7 +65,7 @@ module OpenAI file_id: String, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -93,9 +93,9 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end def list( # The ID of the vector store that the files belong to. @@ -130,7 +130,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileDeleted) end @@ -148,9 +148,9 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) ) - .returns(OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse]) + .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStores::FileContentResponse]) end def content( # The ID of the file within the vector store. diff --git a/rbi/lib/openai/stream.rbi b/rbi/lib/openai/stream.rbi deleted file mode 100644 index ba0c2f8a..00000000 --- a/rbi/lib/openai/stream.rbi +++ /dev/null @@ -1,15 +0,0 @@ -# typed: strong - -module OpenAI - class Stream - include OpenAI::Type::BaseStream - - Message = type_member(:in) { {fixed: OpenAI::Util::ServerSentEvent} } - Elem = type_member(:out) - - # @api private - sig { override.returns(T::Enumerable[Elem]) } - private def iterator - end - end -end diff --git a/rbi/lib/openai/transport/base_client.rbi b/rbi/lib/openai/transport/base_client.rbi deleted file mode 100644 index c8afd87e..00000000 --- a/rbi/lib/openai/transport/base_client.rbi +++ /dev/null @@ -1,205 +0,0 @@ -# typed: strong - -module OpenAI - module Transport - # @api private - class BaseClient - abstract! - - RequestComponentsShape = - T.type_alias do - { - method: Symbol, - path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), - body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::Type::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::Type::BaseStream[T.anything, OpenAI::BaseModel]]), - model: T.nilable(OpenAI::Type::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) - } - end - - RequestInputShape = - T.type_alias do - { - method: Symbol, - url: URI::Generic, - headers: T::Hash[String, String], - body: T.anything, - max_retries: Integer, - timeout: Float - } - end - - # from whatwg fetch spec - MAX_REDIRECTS = 20 - - PLATFORM_HEADERS = T::Hash[String, String] - - class << self - # @api private - sig { params(req: OpenAI::Transport::BaseClient::RequestComponentsShape).void } - def validate!(req) - end - - # @api private - sig do - params( - status: Integer, - headers: T.any( - T::Hash[String, String], - Net::HTTPHeader - ) - ).returns(T::Boolean) - end - def should_retry?(status, headers:) - end - - # @api private - sig do - params( - request: OpenAI::Transport::BaseClient::RequestInputShape, - status: Integer, - response_headers: T.any(T::Hash[String, String], Net::HTTPHeader) - ) - .returns(OpenAI::Transport::BaseClient::RequestInputShape) - end - def follow_redirect(request, status:, response_headers:) - end - - # @api private - sig do - params( - status: T.any(Integer, OpenAI::Errors::APIConnectionError), - stream: T.nilable(T::Enumerable[String]) - ) - .void - end - def reap_connection!(status, stream:) - end - end - - # @api private - sig { returns(OpenAI::Transport::PooledNetRequester) } - attr_accessor :requester - - # @api private - sig do - params( - base_url: String, - timeout: Float, - max_retries: Integer, - initial_retry_delay: Float, - max_retry_delay: Float, - headers: T::Hash[String, - T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))], - idempotency_header: T.nilable(String) - ) - .returns(T.attached_class) - end - def self.new( - base_url:, - timeout: 0.0, - max_retries: 0, - initial_retry_delay: 0.0, - max_retry_delay: 0.0, - headers: {}, - idempotency_header: nil - ) - end - - # @api private - sig { overridable.returns(T::Hash[String, String]) } - private def auth_headers - end - - # @api private - sig { returns(String) } - private def generate_idempotency_key - end - - # @api private - sig do - overridable - .params(req: OpenAI::Transport::BaseClient::RequestComponentsShape, opts: OpenAI::Util::AnyHash) - .returns(OpenAI::Transport::BaseClient::RequestInputShape) - end - private def build_request(req, opts) - end - - # @api private - sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } - private def retry_delay(headers, retry_count:) - end - - # @api private - sig do - params( - request: OpenAI::Transport::BaseClient::RequestInputShape, - redirect_count: Integer, - retry_count: Integer, - send_retry_header: T::Boolean - ) - .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) - end - private def send_request(request, redirect_count:, retry_count:, send_retry_header:) - end - - # Execute the request specified by `req`. This is the method that all resource - # methods call into. - sig do - params( - method: Symbol, - path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), - body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::Type::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::Type::BaseStream[T.anything, OpenAI::BaseModel]]), - model: T.nilable(OpenAI::Type::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) - ) - .returns(T.anything) - end - def request( - method, - path, - query: {}, - headers: {}, - body: nil, - unwrap: nil, - page: nil, - stream: nil, - model: OpenAI::Unknown, - options: {} - ) - end - - sig { returns(String) } - def inspect - end - end - end -end diff --git a/rbi/lib/openai/transport/pooled_net_requester.rbi b/rbi/lib/openai/transport/pooled_net_requester.rbi deleted file mode 100644 index d90c1881..00000000 --- a/rbi/lib/openai/transport/pooled_net_requester.rbi +++ /dev/null @@ -1,64 +0,0 @@ -# typed: strong - -module OpenAI - module Transport - # @api private - class PooledNetRequester - RequestShape = - T.type_alias do - { - method: Symbol, - url: URI::Generic, - headers: T::Hash[String, String], - body: T.anything, - deadline: Float - } - end - - # from the golang stdlib - # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 - KEEP_ALIVE_TIMEOUT = 30 - - class << self - # @api private - sig { params(url: URI::Generic).returns(Net::HTTP) } - def connect(url) - end - - # @api private - sig { params(conn: Net::HTTP, deadline: Float).void } - def calibrate_socket_timeout(conn, deadline) - end - - # @api private - sig do - params( - request: OpenAI::Transport::PooledNetRequester::RequestShape, - blk: T.proc.params(arg0: String).void - ) - .returns(Net::HTTPGenericRequest) - end - def build_request(request, &blk) - end - end - - # @api private - sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } - private def with_pool(url, deadline:, &blk) - end - - # @api private - sig do - params(request: OpenAI::Transport::PooledNetRequester::RequestShape) - .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) - end - def execute(request) - end - - # @api private - sig { params(size: Integer).returns(T.attached_class) } - def self.new(size: Etc.nprocessors) - end - end - end -end diff --git a/rbi/lib/openai/type.rbi b/rbi/lib/openai/type.rbi deleted file mode 100644 index a52a8d3b..00000000 --- a/rbi/lib/openai/type.rbi +++ /dev/null @@ -1,23 +0,0 @@ -# typed: strong - -module OpenAI - Unknown = OpenAI::Type::Unknown - - BooleanModel = OpenAI::Type::BooleanModel - - Enum = OpenAI::Type::Enum - - Union = OpenAI::Type::Union - - ArrayOf = OpenAI::Type::ArrayOf - - HashOf = OpenAI::Type::HashOf - - BaseModel = OpenAI::Type::BaseModel - - RequestParameters = OpenAI::Type::RequestParameters - - # This module contains various type declarations. - module Type - end -end diff --git a/rbi/lib/openai/type/array_of.rbi b/rbi/lib/openai/type/array_of.rbi deleted file mode 100644 index 2f6ace65..00000000 --- a/rbi/lib/openai/type/array_of.rbi +++ /dev/null @@ -1,82 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - # - # Array of items of a given type. - class ArrayOf - include OpenAI::Type::Converter - - abstract! - final! - - Elem = type_member(:out) - - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .returns(T.attached_class) - end - def self.[](type_info, spec = {}) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Enumerable[Elem], T.anything), state: OpenAI::Type::Converter::State) - .returns(T.any(T::Array[T.anything], T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Enumerable[Elem], T.anything)) - .returns(T.any(T::Array[T.anything], T.anything)) - end - def dump(value) - end - - # @api private - sig(:final) { returns(Elem) } - protected def item_type - end - - # @api private - sig(:final) { returns(T::Boolean) } - protected def nilable? - end - - # @api private - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def initialize(type_info, spec = {}) - end - end - end -end diff --git a/rbi/lib/openai/type/base_model.rbi b/rbi/lib/openai/type/base_model.rbi deleted file mode 100644 index a35f37f5..00000000 --- a/rbi/lib/openai/type/base_model.rbi +++ /dev/null @@ -1,197 +0,0 @@ -# typed: strong - -module OpenAI - module Type - class BaseModel - extend OpenAI::Type::Converter - - abstract! - - KnownFieldShape = T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} } - - class << self - # @api private - # - # Assumes superclass fields are totally defined before fields are accessed / - # defined on subclasses. - sig do - returns( - T::Hash[Symbol, - T.all( - OpenAI::BaseModel::KnownFieldShape, - {type_fn: T.proc.returns(OpenAI::Type::Converter::Input)} - )] - ) - end - def known_fields - end - - # @api private - sig do - returns( - T::Hash[Symbol, - T.all(OpenAI::BaseModel::KnownFieldShape, {type: OpenAI::Type::Converter::Input})] - ) - end - def fields - end - - # @api private - sig do - params( - name_sym: Symbol, - required: T::Boolean, - type_info: T.any( - { - const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), - enum: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)), - union: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)), - api_name: Symbol, - nil?: T::Boolean - }, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - private def add_field(name_sym, required:, type_info:, spec:) - end - - # @api private - sig do - params( - name_sym: Symbol, - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def required(name_sym, type_info, spec = {}) - end - - # @api private - sig do - params( - name_sym: Symbol, - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def optional(name_sym, type_info, spec = {}) - end - - # @api private - # - # `request_only` attributes not excluded from `.#coerce` when receiving responses - # even if well behaved servers should not send them - sig { params(blk: T.proc.void).void } - private def request_only(&blk) - end - - # @api private - # - # `response_only` attributes are omitted from `.#dump` when making requests - sig { params(blk: T.proc.void).void } - private def response_only(&blk) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - class << self - # @api private - sig do - override - .params( - value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything), - state: OpenAI::Type::Converter::State - ) - .returns(T.any(T.attached_class, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig do - override - .params(value: T.any(T.attached_class, T.anything)) - .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) - end - def dump(value) - end - end - - # Returns the raw value associated with the given key, if found. Otherwise, nil is - # returned. - # - # It is valid to lookup keys that are not in the API spec, for example to access - # undocumented features. This method does not parse response data into - # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. - sig { params(key: Symbol).returns(T.nilable(T.anything)) } - def [](key) - end - - # Returns a Hash of the data underlying this object. O(1) - # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. - # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. - sig { overridable.returns(OpenAI::Util::AnyHash) } - def to_h - end - - # Returns a Hash of the data underlying this object. O(1) - # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. - # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. - sig { overridable.returns(OpenAI::Util::AnyHash) } - def to_hash - end - - sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Util::AnyHash) } - def deconstruct_keys(keys) - end - - sig { params(a: T.anything).returns(String) } - def to_json(*a) - end - - sig { params(a: T.anything).returns(String) } - def to_yaml(*a) - end - - # Create a new instance of a model. - sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } - def self.new(data = {}) - end - - sig { returns(String) } - def inspect - end - end - end -end diff --git a/rbi/lib/openai/type/base_page.rbi b/rbi/lib/openai/type/base_page.rbi deleted file mode 100644 index 03abe2c1..00000000 --- a/rbi/lib/openai/type/base_page.rbi +++ /dev/null @@ -1,38 +0,0 @@ -# typed: strong - -module OpenAI - module Type - module BasePage - Elem = type_member(:out) - - sig { overridable.returns(T::Boolean) } - def next_page? - end - - sig { overridable.returns(T.self_type) } - def next_page - end - - sig { overridable.params(blk: T.proc.params(arg0: Elem).void).void } - def auto_paging_each(&blk) - end - - sig { returns(T::Enumerable[Elem]) } - def to_enum - end - - # @api private - sig do - params( - client: OpenAI::Transport::BaseClient, - req: OpenAI::Transport::BaseClient::RequestComponentsShape, - headers: T.any(T::Hash[String, String], Net::HTTPHeader), - page_data: T.anything - ) - .void - end - def initialize(client:, req:, headers:, page_data:) - end - end - end -end diff --git a/rbi/lib/openai/type/base_stream.rbi b/rbi/lib/openai/type/base_stream.rbi deleted file mode 100644 index 8fa7098c..00000000 --- a/rbi/lib/openai/type/base_stream.rbi +++ /dev/null @@ -1,43 +0,0 @@ -# typed: strong - -module OpenAI - module Type - module BaseStream - include Enumerable - - Message = type_member(:in) - Elem = type_member(:out) - - sig { void } - def close - end - - # @api private - sig { overridable.returns(T::Enumerable[Elem]) } - private def iterator - end - - sig { params(blk: T.proc.params(arg0: Elem).void).void } - def each(&blk) - end - - sig { returns(T::Enumerator[Elem]) } - def to_enum - end - - # @api private - sig do - params( - model: T.any(T::Class[T.anything], OpenAI::Type::Converter), - url: URI::Generic, - status: Integer, - response: Net::HTTPResponse, - stream: T::Enumerable[Message] - ) - .void - end - def initialize(model:, url:, status:, response:, stream:) - end - end - end -end diff --git a/rbi/lib/openai/type/boolean_model.rbi b/rbi/lib/openai/type/boolean_model.rbi deleted file mode 100644 index 96efcadd..00000000 --- a/rbi/lib/openai/type/boolean_model.rbi +++ /dev/null @@ -1,41 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - # - # Ruby has no Boolean class; this is something for models to refer to. - class BooleanModel - extend OpenAI::Type::Converter - - abstract! - final! - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.==(other) - end - - class << self - # @api private - sig(:final) do - override - .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Type::Converter::State) - .returns(T.any(T::Boolean, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig(:final) do - override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) - end - def dump(value) - end - end - end - end -end diff --git a/rbi/lib/openai/type/converter.rbi b/rbi/lib/openai/type/converter.rbi deleted file mode 100644 index 979159e8..00000000 --- a/rbi/lib/openai/type/converter.rbi +++ /dev/null @@ -1,99 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - module Converter - Input = T.type_alias { T.any(OpenAI::Type::Converter, T::Class[T.anything]) } - - State = - T.type_alias do - { - strictness: T.any(T::Boolean, Symbol), - exactness: {yes: Integer, no: Integer, maybe: Integer}, - branched: Integer - } - end - - # @api private - sig { overridable.params(value: T.anything, state: OpenAI::Type::Converter::State).returns(T.anything) } - def coerce(value, state:) - end - - # @api private - sig { overridable.params(value: T.anything).returns(T.anything) } - def dump(value) - end - - class << self - # @api private - sig do - params( - spec: T.any( - { - const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), - enum: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)), - union: T.nilable(T.proc.returns(OpenAI::Type::Converter::Input)) - }, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ) - ) - .returns(T.proc.returns(T.anything)) - end - def self.type_info(spec) - end - - # @api private - # - # Based on `target`, transform `value` into `target`, to the extent possible: - # - # 1. if the given `value` conforms to `target` already, return the given `value` - # 2. if it's possible and safe to convert the given `value` to `target`, then the - # converted value - # 3. otherwise, the given `value` unaltered - # - # The coercion process is subject to improvement between minor release versions. - # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode - sig do - params( - target: OpenAI::Type::Converter::Input, - value: T.anything, - state: OpenAI::Type::Converter::State - ) - .returns(T.anything) - end - def self.coerce( - target, - value, - # The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: - # - # - `true`: the conversion must be exact, with minimum coercion. - # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. - # - # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For - # any given conversion attempt, the exactness will be updated based on how closely - # the value recursively matches the target type: - # - # - `yes`: the value can be converted to the target type with minimum coercion. - # - `maybe`: the value can be converted to the target type with some reasonable - # coercion. - # - `no`: the value cannot be converted to the target type. - # - # See implementation below for more details. - state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - ) - end - - # @api private - sig { params(target: OpenAI::Type::Converter::Input, value: T.anything).returns(T.anything) } - def self.dump(target, value) - end - end - end - end -end diff --git a/rbi/lib/openai/type/enum.rbi b/rbi/lib/openai/type/enum.rbi deleted file mode 100644 index 7f6fdacd..00000000 --- a/rbi/lib/openai/type/enum.rbi +++ /dev/null @@ -1,58 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - # - # A value from among a specified list of options. OpenAPI enum values map to Ruby - # values in the SDK as follows: - # - # 1. boolean => true | false - # 2. integer => Integer - # 3. float => Float - # 4. string => Symbol - # - # We can therefore convert string values to Symbols, but can't convert other - # values safely. - module Enum - include OpenAI::Type::Converter - - # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } - def values - end - - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize! - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - # - # Unlike with primitives, `Enum` additionally validates that the value is a member - # of the enum. - sig do - override - .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Type::Converter::State) - .returns(T.any(Symbol, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value) - end - end - end -end diff --git a/rbi/lib/openai/type/hash_of.rbi b/rbi/lib/openai/type/hash_of.rbi deleted file mode 100644 index 66bd1160..00000000 --- a/rbi/lib/openai/type/hash_of.rbi +++ /dev/null @@ -1,86 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - # - # Hash of items of a given type. - class HashOf - include OpenAI::Type::Converter - - abstract! - final! - - Elem = type_member(:out) - - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .returns(T.attached_class) - end - def self.[](type_info, spec = {}) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - sig(:final) do - override - .params(value: T.any( - T::Hash[T.anything, T.anything], - T.anything - ), - state: OpenAI::Type::Converter::State) - .returns(T.any(OpenAI::Util::AnyHash, T.anything)) - end - def coerce(value, state:) - end - - # @api private - sig(:final) do - override - .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) - .returns(T.any(OpenAI::Util::AnyHash, T.anything)) - end - def dump(value) - end - - # @api private - sig(:final) { returns(Elem) } - protected def item_type - end - - # @api private - sig(:final) { returns(T::Boolean) } - protected def nilable? - end - - # @api private - sig(:final) do - params( - type_info: T.any( - OpenAI::Util::AnyHash, - T.proc.returns(OpenAI::Type::Converter::Input), - OpenAI::Type::Converter::Input - ), - spec: OpenAI::Util::AnyHash - ) - .void - end - def initialize(type_info, spec = {}) - end - end - end -end diff --git a/rbi/lib/openai/type/request_parameters.rbi b/rbi/lib/openai/type/request_parameters.rbi deleted file mode 100644 index 3d5a7c0d..00000000 --- a/rbi/lib/openai/type/request_parameters.rbi +++ /dev/null @@ -1,20 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - module RequestParameters - # Options to specify HTTP behaviour for this request. - sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Util::AnyHash)) } - attr_accessor :request_options - - # @api private - module Converter - # @api private - sig { params(params: T.anything).returns([T.anything, OpenAI::Util::AnyHash]) } - def dump_request(params) - end - end - end - end -end diff --git a/rbi/lib/openai/type/union.rbi b/rbi/lib/openai/type/union.rbi deleted file mode 100644 index bfdbfa87..00000000 --- a/rbi/lib/openai/type/union.rbi +++ /dev/null @@ -1,66 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - module Union - include OpenAI::Type::Converter - - # @api private - # - # All of the specified variant info for this union. - sig { returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Type::Converter::Input)]]) } - private def known_variants - end - - # @api private - sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } - protected def derefed_variants - end - - # All of the specified variants for this union. - sig { overridable.returns(T::Array[T.anything]) } - def variants - end - - # @api private - sig { params(property: Symbol).void } - private def discriminator(property) - end - - # @api private - sig do - params( - key: T.any(Symbol, OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything), - spec: T.any(OpenAI::Util::AnyHash, T.proc.returns(T.anything), T.anything) - ) - .void - end - private def variant(key, spec = nil) - end - - # @api private - sig { params(value: T.anything).returns(T.nilable(T.anything)) } - private def resolve_variant(value) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end - - sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end - - # @api private - sig { override.params(value: T.anything, state: OpenAI::Type::Converter::State).returns(T.anything) } - def coerce(value, state:) - end - - # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end - end - end -end diff --git a/rbi/lib/openai/type/unknown.rbi b/rbi/lib/openai/type/unknown.rbi deleted file mode 100644 index 1df209be..00000000 --- a/rbi/lib/openai/type/unknown.rbi +++ /dev/null @@ -1,37 +0,0 @@ -# typed: strong - -module OpenAI - module Type - # @api private - # - # When we don't know what to expect for the value. - class Unknown - extend OpenAI::Type::Converter - - abstract! - final! - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.===(other) - end - - sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.==(other) - end - - class << self - # @api private - sig(:final) do - override.params(value: T.anything, state: OpenAI::Type::Converter::State).returns(T.anything) - end - def coerce(value, state:) - end - - # @api private - sig(:final) { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end - end - end - end -end diff --git a/rbi/lib/openai/util.rbi b/rbi/lib/openai/util.rbi deleted file mode 100644 index 57b25a19..00000000 --- a/rbi/lib/openai/util.rbi +++ /dev/null @@ -1,280 +0,0 @@ -# typed: strong - -module OpenAI - # @api private - module Util - # Due to the current WIP status of Shapes support in Sorbet, types referencing - # this alias might be refined in the future. - AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } - - # @api private - sig { returns(Float) } - def self.monotonic_secs - end - - class << self - # @api private - sig { returns(String) } - def arch - end - - # @api private - sig { returns(String) } - def os - end - end - - class << self - # @api private - sig { params(input: T.anything).returns(T::Boolean) } - def primitive?(input) - end - - # @api private - sig { params(input: T.anything).returns(T.any(T::Boolean, T.anything)) } - def coerce_boolean(input) - end - - # @api private - sig { params(input: T.anything).returns(T.nilable(T::Boolean)) } - def coerce_boolean!(input) - end - - # @api private - sig { params(input: T.anything).returns(T.any(Integer, T.anything)) } - def coerce_integer(input) - end - - # @api private - sig { params(input: T.anything).returns(T.any(Float, T.anything)) } - def coerce_float(input) - end - - # @api private - sig { params(input: T.anything).returns(T.any(T::Hash[T.anything, T.anything], T.anything)) } - def coerce_hash(input) - end - end - - # Use this to indicate that a value should be explicitly removed from a data - # structure when using `OpenAI::Util.deep_merge`. - # - # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging - # `{a: 1}` and `{}` would produce `{a: 1}`. - OMIT = T.let(T.anything, T.anything) - - class << self - # @api private - sig { params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns(T.anything) } - private def deep_merge_lr(lhs, rhs, concat: false) - end - - # @api private - # - # Recursively merge one hash with another. If the values at a given key are not - # both hashes, just take the new value. - sig do - params(values: T::Array[T.anything], sentinel: T.nilable(T.anything), concat: T::Boolean) - .returns(T.anything) - end - def deep_merge( - *values, - # the value to return if no values are provided. - sentinel: nil, - # whether to merge sequences by concatenation. - concat: false - ) - end - - # @api private - sig do - params( - data: T.any(OpenAI::Util::AnyHash, T::Array[T.anything], T.anything), - pick: T.nilable(T.any(Symbol, Integer, T::Array[T.any(Symbol, Integer)])), - sentinel: T.nilable(T.anything), - blk: T.nilable(T.proc.returns(T.anything)) - ) - .returns(T.nilable(T.anything)) - end - def dig(data, pick, sentinel = nil, &blk) - end - end - - class << self - # @api private - sig { params(uri: URI::Generic).returns(String) } - def uri_origin(uri) - end - - # @api private - sig { params(path: T.any(String, T::Array[String])).returns(String) } - def interpolate_path(path) - end - end - - class << self - # @api private - sig { params(query: T.nilable(String)).returns(T::Hash[String, T::Array[String]]) } - def decode_query(query) - end - - # @api private - sig do - params(query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) - .returns(T.nilable(String)) - end - def encode_query(query) - end - end - - ParsedUriShape = - T.type_alias do - { - scheme: T.nilable(String), - host: T.nilable(String), - port: T.nilable(Integer), - path: T.nilable(String), - query: T::Hash[String, T::Array[String]] - } - end - - class << self - # @api private - sig { params(url: T.any(URI::Generic, String)).returns(OpenAI::Util::ParsedUriShape) } - def parse_uri(url) - end - - # @api private - sig { params(parsed: OpenAI::Util::ParsedUriShape).returns(URI::Generic) } - def unparse_uri(parsed) - end - - # @api private - sig do - params(lhs: OpenAI::Util::ParsedUriShape, rhs: OpenAI::Util::ParsedUriShape).returns(URI::Generic) - end - def join_parsed_uri(lhs, rhs) - end - end - - class << self - # @api private - sig do - params( - headers: T::Hash[String, - T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))] - ) - .returns(T::Hash[String, String]) - end - def normalized_headers(*headers) - end - end - - # @api private - # - # An adapter that satisfies the IO interface required by `::IO.copy_stream` - class ReadIOAdapter - # @api private - sig { params(max_len: T.nilable(Integer)).returns(String) } - private def read_enum(max_len) - end - - # @api private - sig { params(max_len: T.nilable(Integer), out_string: T.nilable(String)).returns(T.nilable(String)) } - def read(max_len = nil, out_string = nil) - end - - # @api private - sig do - params( - stream: T.any(String, IO, StringIO, T::Enumerable[String]), - blk: T.proc.params(arg0: String).void - ) - .returns(T.attached_class) - end - def self.new(stream, &blk) - end - end - - class << self - sig { params(blk: T.proc.params(y: Enumerator::Yielder).void).returns(T::Enumerable[String]) } - def writable_enum(&blk) - end - end - - class << self - # @api private - sig do - params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void - end - private def write_multipart_chunk(y, boundary:, key:, val:) - end - - # @api private - sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } - private def encode_multipart_streaming(body) - end - - # @api private - sig { params(headers: T::Hash[String, String], body: T.anything).returns(T.anything) } - def encode_content(headers, body) - end - - # @api private - sig do - params( - headers: T.any(T::Hash[String, String], Net::HTTPHeader), - stream: T::Enumerable[String], - suppress_error: T::Boolean - ) - .returns(T.anything) - end - def decode_content(headers, stream:, suppress_error: false) - end - end - - class << self - # @api private - # - # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html - sig do - params(enum: T::Enumerable[T.anything], external: T::Boolean, close: T.proc.void) - .returns(T::Enumerable[T.anything]) - end - def fused_enum(enum, external: false, &close) - end - - # @api private - sig { params(enum: T.nilable(T::Enumerable[T.anything])).void } - def close_fused!(enum) - end - - # @api private - sig do - params(enum: T.nilable(T::Enumerable[T.anything]), blk: T.proc.params(arg0: Enumerator::Yielder).void) - .returns(T::Enumerable[T.anything]) - end - def chain_fused(enum, &blk) - end - end - - ServerSentEvent = - T.type_alias do - {event: T.nilable(String), data: T.nilable(String), id: T.nilable(String), retry: T.nilable(Integer)} - end - - class << self - # @api private - sig { params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) } - def decode_lines(enum) - end - - # @api private - # - # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream - sig { params(lines: T::Enumerable[String]).returns(OpenAI::Util::ServerSentEvent) } - def decode_sse(lines) - end - end - end -end diff --git a/sig/openai/aliases.rbs b/sig/openai/aliases.rbs new file mode 100644 index 00000000..b99ae58a --- /dev/null +++ b/sig/openai/aliases.rbs @@ -0,0 +1,19 @@ +module OpenAI + class Unknown = OpenAI::Internal::Type::Unknown + + class BooleanModel = OpenAI::Internal::Type::BooleanModel + + module Enum = OpenAI::Internal::Type::Enum + + module Union = OpenAI::Internal::Type::Union + + class ArrayOf = OpenAI::Internal::Type::ArrayOf + + class HashOf = OpenAI::Internal::Type::HashOf + + class BaseModel = OpenAI::Internal::Type::BaseModel + + type request_parameters = OpenAI::Internal::Type::request_parameters + + module RequestParameters = OpenAI::Internal::Type::RequestParameters +end diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 6b95a137..7b991e2c 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -1,5 +1,5 @@ module OpenAI - class Client < OpenAI::Transport::BaseClient + class Client < OpenAI::Internal::Transport::BaseClient DEFAULT_MAX_RETRIES: 2 DEFAULT_TIMEOUT_IN_SECONDS: Float diff --git a/sig/openai/cursor_page.rbs b/sig/openai/cursor_page.rbs deleted file mode 100644 index 3b8ed120..00000000 --- a/sig/openai/cursor_page.rbs +++ /dev/null @@ -1,11 +0,0 @@ -module OpenAI - class CursorPage[Elem] - include OpenAI::Type::BasePage[Elem] - - attr_accessor data: ::Array[Elem]? - - attr_accessor has_more: bool - - def inspect: -> String - end -end diff --git a/sig/openai/internal/cursor_page.rbs b/sig/openai/internal/cursor_page.rbs new file mode 100644 index 00000000..3a79ad6a --- /dev/null +++ b/sig/openai/internal/cursor_page.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Internal + class CursorPage[Elem] + include OpenAI::Internal::Type::BasePage[Elem] + + attr_accessor data: ::Array[Elem]? + + attr_accessor has_more: bool + + def inspect: -> String + end + end +end diff --git a/sig/openai/internal/page.rbs b/sig/openai/internal/page.rbs new file mode 100644 index 00000000..9cad7eed --- /dev/null +++ b/sig/openai/internal/page.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Internal + class Page[Elem] + include OpenAI::Internal::Type::BasePage[Elem] + + attr_accessor data: ::Array[Elem]? + + attr_accessor object: String + + def inspect: -> String + end + end +end diff --git a/sig/openai/internal/stream.rbs b/sig/openai/internal/stream.rbs new file mode 100644 index 00000000..b723e314 --- /dev/null +++ b/sig/openai/internal/stream.rbs @@ -0,0 +1,9 @@ +module OpenAI + module Internal + class Stream[Elem] + include OpenAI::Internal::Type::BaseStream[OpenAI::Internal::Util::server_sent_event, Elem] + + private def iterator: -> Enumerable[Elem] + end + end +end diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs new file mode 100644 index 00000000..abdc7471 --- /dev/null +++ b/sig/openai/internal/transport/base_client.rbs @@ -0,0 +1,114 @@ +module OpenAI + module Internal + module Transport + class BaseClient + type request_components = + { + method: Symbol, + path: String | ::Array[String], + query: ::Hash[String, (::Array[String] | String)?]?, + headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?]?, + body: top?, + unwrap: Symbol?, + page: Class?, + stream: Class?, + model: OpenAI::Internal::Type::Converter::input?, + options: OpenAI::request_opts? + } + + type request_input = + { + method: Symbol, + url: URI::Generic, + headers: ::Hash[String, String], + body: top, + max_retries: Integer, + timeout: Float + } + + MAX_REDIRECTS: 20 + + PLATFORM_HEADERS: ::Hash[String, String] + + def self.validate!: ( + OpenAI::Internal::Transport::BaseClient::request_components req + ) -> void + + def self.should_retry?: ( + Integer status, + headers: ::Hash[String, String] + ) -> bool + + def self.follow_redirect: ( + OpenAI::Internal::Transport::BaseClient::request_input request, + status: Integer, + response_headers: ::Hash[String, String] + ) -> OpenAI::Internal::Transport::BaseClient::request_input + + def self.reap_connection!: ( + Integer | OpenAI::Errors::APIConnectionError status, + stream: Enumerable[String]? + ) -> void + + # @api private + attr_accessor requester: OpenAI::Internal::Transport::PooledNetRequester + + def initialize: ( + base_url: String, + ?timeout: Float, + ?max_retries: Integer, + ?initial_retry_delay: Float, + ?max_retry_delay: Float, + ?headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?], + ?idempotency_header: String? + ) -> void + + private def auth_headers: -> ::Hash[String, String] + + private def generate_idempotency_key: -> String + + private def build_request: ( + OpenAI::Internal::Transport::BaseClient::request_components req, + OpenAI::request_options opts + ) -> OpenAI::Internal::Transport::BaseClient::request_input + + private def retry_delay: ( + ::Hash[String, String] headers, + retry_count: Integer + ) -> Float + + private def send_request: ( + OpenAI::Internal::Transport::BaseClient::request_input request, + redirect_count: Integer, + retry_count: Integer, + send_retry_header: bool + ) -> [Integer, top, Enumerable[String]] + + def request: + ( + Symbol method, + String | ::Array[String] path, + ?query: ::Hash[String, (::Array[String] | String)?]?, + ?headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?]?, + ?body: top?, + ?unwrap: Symbol?, + ?page: Class?, + ?stream: Class?, + ?model: OpenAI::Internal::Type::Converter::input?, + ?options: OpenAI::request_opts? + ) -> top + | ( + OpenAI::Internal::Transport::BaseClient::request_components req + ) -> top + + def inspect: -> String + end + end + end +end diff --git a/sig/openai/internal/transport/pooled_net_requester.rbs b/sig/openai/internal/transport/pooled_net_requester.rbs new file mode 100644 index 00000000..d12703cb --- /dev/null +++ b/sig/openai/internal/transport/pooled_net_requester.rbs @@ -0,0 +1,41 @@ +module OpenAI + module Internal + module Transport + class PooledNetRequester + type request = + { + method: Symbol, + url: URI::Generic, + headers: ::Hash[String, String], + body: top, + deadline: Float + } + + KEEP_ALIVE_TIMEOUT: 30 + + def self.connect: (URI::Generic url) -> top + + def self.calibrate_socket_timeout: (top conn, Float deadline) -> void + + def self.build_request: ( + OpenAI::Internal::Transport::PooledNetRequester::request request + ) { + (String arg0) -> void + } -> top + + private def with_pool: ( + URI::Generic url, + deadline: Float + ) { + (top arg0) -> void + } -> void + + def execute: ( + OpenAI::Internal::Transport::PooledNetRequester::request request + ) -> [Integer, top, Enumerable[String]] + + def initialize: (?size: Integer) -> void + end + end + end +end diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs new file mode 100644 index 00000000..617458ed --- /dev/null +++ b/sig/openai/internal/type/array_of.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Internal + module Type + class ArrayOf[Elem] + include OpenAI::Internal::Type::Converter + + def self.[]: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> instance + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + Enumerable[Elem] | top value, + state: OpenAI::Internal::Type::Converter::state + ) -> (::Array[top] | top) + + def dump: (Enumerable[Elem] | top value) -> (::Array[top] | top) + + def item_type: -> Elem + + def nilable?: -> bool + + def initialize: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + end + end + end +end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs new file mode 100644 index 00000000..fd068625 --- /dev/null +++ b/sig/openai/internal/type/base_model.rbs @@ -0,0 +1,79 @@ +module OpenAI + module Internal + module Type + class BaseModel + extend OpenAI::Internal::Type::Converter + + type known_field = + { mode: (:coerce | :dump)?, required: bool, nilable: bool } + + def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field + & { type_fn: (^-> OpenAI::Internal::Type::Converter::input) })] + + def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field + & { type: OpenAI::Internal::Type::Converter::input })] + + private def self.add_field: ( + Symbol name_sym, + required: bool, + type_info: { + const: (nil | bool | Integer | Float | Symbol)?, + enum: ^-> OpenAI::Internal::Type::Converter::input?, + union: ^-> OpenAI::Internal::Type::Converter::input?, + api_name: Symbol + } + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input, + spec: ::Hash[Symbol, top] + ) -> void + + def self.required: ( + Symbol name_sym, + ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + + def self.optional: ( + Symbol name_sym, + ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + + private def self.request_only: { -> void } -> void + + private def self.response_only: { -> void } -> void + + def self.==: (top other) -> bool + + def ==: (top other) -> bool + + def self.coerce: ( + OpenAI::BaseModel | ::Hash[top, top] | top value, + state: OpenAI::Internal::Type::Converter::state + ) -> (instance | top) + + def self.dump: (instance | top value) -> (::Hash[top, top] | top) + + def []: (Symbol key) -> top? + + def to_h: -> ::Hash[Symbol, top] + + alias to_hash to_h + + def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] + + def to_json: (*top a) -> String + + def to_yaml: (*top a) -> String + + def initialize: (?::Hash[Symbol, top] | self data) -> void + + def inspect: -> String + end + end + end +end diff --git a/sig/openai/internal/type/base_page.rbs b/sig/openai/internal/type/base_page.rbs new file mode 100644 index 00000000..216a4e0b --- /dev/null +++ b/sig/openai/internal/type/base_page.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Internal + module Type + module BasePage[Elem] + def next_page?: -> bool + + def next_page: -> self + + def auto_paging_each: { (Elem arg0) -> void } -> void + + def to_enum: -> Enumerable[Elem] + + alias enum_for to_enum + + def initialize: ( + client: OpenAI::Internal::Transport::BaseClient, + req: OpenAI::Internal::Transport::BaseClient::request_components, + headers: ::Hash[String, String], + page_data: top + ) -> void + end + end + end +end diff --git a/sig/openai/internal/type/base_stream.rbs b/sig/openai/internal/type/base_stream.rbs new file mode 100644 index 00000000..902e7720 --- /dev/null +++ b/sig/openai/internal/type/base_stream.rbs @@ -0,0 +1,27 @@ +module OpenAI + module Internal + module Type + module BaseStream[Message, Elem] + include Enumerable[Elem] + + def close: -> void + + private def iterator: -> Enumerable[Elem] + + def each: { (Elem arg0) -> void } -> void + + def to_enum: -> Enumerator[Elem] + + alias enum_for to_enum + + def initialize: ( + model: Class | OpenAI::Internal::Type::Converter, + url: URI::Generic, + status: Integer, + response: top, + stream: Enumerable[Message] + ) -> void + end + end + end +end diff --git a/sig/openai/internal/type/boolean_model.rbs b/sig/openai/internal/type/boolean_model.rbs new file mode 100644 index 00000000..3bfa2a59 --- /dev/null +++ b/sig/openai/internal/type/boolean_model.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Internal + module Type + class BooleanModel + extend OpenAI::Internal::Type::Converter + + def self.===: (top other) -> bool + + def self.==: (top other) -> bool + + def self.coerce: ( + bool | top value, + state: OpenAI::Internal::Type::Converter::state + ) -> (bool | top) + + def self.dump: (bool | top value) -> (bool | top) + end + end + end +end diff --git a/sig/openai/internal/type/converter.rbs b/sig/openai/internal/type/converter.rbs new file mode 100644 index 00000000..56921a18 --- /dev/null +++ b/sig/openai/internal/type/converter.rbs @@ -0,0 +1,44 @@ +module OpenAI + module Internal + module Type + module Converter + type input = OpenAI::Internal::Type::Converter | Class + + type state = + { + strictness: bool | :strong, + exactness: { yes: Integer, no: Integer, maybe: Integer }, + branched: Integer + } + + def coerce: ( + top value, + state: OpenAI::Internal::Type::Converter::state + ) -> top + + def dump: (top value) -> top + + def self.type_info: ( + { + const: (nil | bool | Integer | Float | Symbol)?, + enum: ^-> OpenAI::Internal::Type::Converter::input?, + union: ^-> OpenAI::Internal::Type::Converter::input? + } + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input spec + ) -> (^-> top) + + def self.coerce: ( + OpenAI::Internal::Type::Converter::input target, + top value, + ?state: OpenAI::Internal::Type::Converter::state + ) -> top + + def self.dump: ( + OpenAI::Internal::Type::Converter::input target, + top value + ) -> top + end + end + end +end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs new file mode 100644 index 00000000..0a35ac11 --- /dev/null +++ b/sig/openai/internal/type/enum.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Internal + module Type + module Enum + include OpenAI::Internal::Type::Converter + + def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] + + private def self.finalize!: -> void + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + String | Symbol | top value, + state: OpenAI::Internal::Type::Converter::state + ) -> (Symbol | top) + + def dump: (Symbol | top value) -> (Symbol | top) + end + end + end +end diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs new file mode 100644 index 00000000..79743eb5 --- /dev/null +++ b/sig/openai/internal/type/hash_of.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Internal + module Type + class HashOf[Elem] + include OpenAI::Internal::Type::Converter + + def self.[]: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> instance + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + ::Hash[top, top] | top value, + state: OpenAI::Internal::Type::Converter::state + ) -> (::Hash[Symbol, top] | top) + + def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) + + def item_type: -> Elem + + def nilable?: -> bool + + def initialize: ( + ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + ?::Hash[Symbol, top] spec + ) -> void + end + end + end +end diff --git a/sig/openai/internal/type/request_parameters.rbs b/sig/openai/internal/type/request_parameters.rbs new file mode 100644 index 00000000..da025b59 --- /dev/null +++ b/sig/openai/internal/type/request_parameters.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Internal + module Type + type request_parameters = { request_options: OpenAI::request_opts } + + module RequestParameters + attr_accessor request_options: OpenAI::request_opts + + module Converter + def dump_request: (top params) -> [top, ::Hash[Symbol, top]] + end + end + end + end +end diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs new file mode 100644 index 00000000..9d749433 --- /dev/null +++ b/sig/openai/internal/type/union.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Internal + module Type + module Union + include OpenAI::Internal::Type::Converter + + private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Internal::Type::Converter::input)]] + + def self.derefed_variants: -> ::Array[[Symbol?, top]] + + def self.variants: -> ::Array[top] + + private def self.discriminator: (Symbol property) -> void + + private def self.variant: ( + Symbol + | ::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input key, + ?::Hash[Symbol, top] + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input spec + ) -> void + + private def self.resolve_variant: ( + top value + ) -> OpenAI::Internal::Type::Converter::input? + + def ===: (top other) -> bool + + def ==: (top other) -> bool + + def coerce: ( + top value, + state: OpenAI::Internal::Type::Converter::state + ) -> top + + def dump: (top value) -> top + end + end + end +end diff --git a/sig/openai/internal/type/unknown.rbs b/sig/openai/internal/type/unknown.rbs new file mode 100644 index 00000000..571a5cd7 --- /dev/null +++ b/sig/openai/internal/type/unknown.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Internal + module Type + class Unknown + extend OpenAI::Internal::Type::Converter + + def self.===: (top other) -> bool + + def self.==: (top other) -> bool + + def self.coerce: ( + top value, + state: OpenAI::Internal::Type::Converter::state + ) -> top + + def self.dump: (top value) -> top + end + end + end +end diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs new file mode 100644 index 00000000..21bf07da --- /dev/null +++ b/sig/openai/internal/util.rbs @@ -0,0 +1,141 @@ +module OpenAI + module Internal + module Util + def self?.monotonic_secs: -> Float + + def self?.arch: -> String + + def self?.os: -> String + + def self?.primitive?: (top input) -> bool + + def self?.coerce_boolean: (top input) -> (bool | top) + + def self?.coerce_boolean!: (top input) -> bool? + + def self?.coerce_integer: (top input) -> (Integer | top) + + def self?.coerce_float: (top input) -> (Float | top) + + def self?.coerce_hash: (top input) -> (::Hash[top, top] | top) + + OMIT: top + + def self?.deep_merge_lr: (top lhs, top rhs, ?concat: bool) -> top + + def self?.deep_merge: ( + *::Array[top] values, + ?sentinel: top?, + ?concat: bool + ) -> top + + def self?.dig: ( + ::Hash[Symbol, top] | ::Array[top] | top data, + (Symbol | Integer | ::Array[(Symbol | Integer)])? pick, + ?top? sentinel + ) { + -> top? + } -> top? + + def self?.uri_origin: (URI::Generic uri) -> String + + def self?.interpolate_path: (String | ::Array[String] path) -> String + + def self?.decode_query: (String? query) -> ::Hash[String, ::Array[String]] + + def self?.encode_query: ( + ::Hash[String, (::Array[String] | String)?]? query + ) -> String? + + type parsed_uri = + { + scheme: String?, + host: String?, + port: Integer?, + path: String?, + query: ::Hash[String, ::Array[String]] + } + + def self?.parse_uri: ( + URI::Generic | String url + ) -> OpenAI::Internal::Util::parsed_uri + + def self?.unparse_uri: ( + OpenAI::Internal::Util::parsed_uri parsed + ) -> URI::Generic + + def self?.join_parsed_uri: ( + OpenAI::Internal::Util::parsed_uri lhs, + OpenAI::Internal::Util::parsed_uri rhs + ) -> URI::Generic + + def self?.normalized_headers: ( + *::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?] headers + ) -> ::Hash[String, String] + + class ReadIOAdapter + private def read_enum: (Integer? max_len) -> String + + def read: (?Integer? max_len, ?String? out_string) -> String? + + def initialize: ( + String | IO | StringIO | Enumerable[String] stream + ) { + (String arg0) -> void + } -> void + end + + def self?.writable_enum: { + (Enumerator::Yielder y) -> void + } -> Enumerable[String] + + def self?.write_multipart_chunk: ( + Enumerator::Yielder y, + boundary: String, + key: Symbol | String, + val: top + ) -> void + + def self?.encode_multipart_streaming: ( + top body + ) -> [String, Enumerable[String]] + + def self?.encode_content: ( + ::Hash[String, String] headers, + top body + ) -> top + + def self?.decode_content: ( + ::Hash[String, String] headers, + stream: Enumerable[String], + ?suppress_error: bool + ) -> top + + def self?.fused_enum: ( + Enumerable[top] enum, + ?external: bool + ) { + -> void + } -> Enumerable[top] + + def self?.close_fused!: (Enumerable[top]? enum) -> void + + def self?.chain_fused: ( + Enumerable[top]? enum + ) { + (Enumerator::Yielder arg0) -> void + } -> Enumerable[top] + + type server_sent_event = + { event: String?, data: String?, id: String?, retry: Integer? } + + def self?.decode_lines: (Enumerable[String] enum) -> Enumerable[String] + + def self?.decode_sse: ( + Enumerable[String] lines + ) -> OpenAI::Internal::Util::server_sent_event + end + end +end diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 06dd0109..1b50fd38 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -10,11 +10,11 @@ module OpenAI response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, speed: Float } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class SpeechCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor input: String diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 5abf1747..b46cde10 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -12,11 +12,11 @@ module OpenAI temperature: Float, timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class TranscriptionCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor file: IO | StringIO diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 3be91750..800cd86d 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -9,11 +9,11 @@ module OpenAI response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, temperature: Float } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class TranslationCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor file: IO | StringIO diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index 887d319a..fdb4fc66 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -1,10 +1,10 @@ module OpenAI module Models - type batch_cancel_params = { } & OpenAI::request_parameters + type batch_cancel_params = { } & OpenAI::Internal::Type::request_parameters class BatchCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 1220fcdc..19fe80ff 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -7,11 +7,11 @@ module OpenAI input_file_id: String, metadata: OpenAI::Models::metadata? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class BatchCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor completion_window: OpenAI::Models::BatchCreateParams::completion_window diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index 7dfa318e..3ca61260 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models type batch_list_params = - { after: String, limit: Integer } & OpenAI::request_parameters + { after: String, limit: Integer } + & OpenAI::Internal::Type::request_parameters class BatchListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index c6dcd6ae..4b17656d 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type batch_retrieve_params = { } & OpenAI::request_parameters + type batch_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class BatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index e1cf13a2..10663a9c 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -15,11 +15,11 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::assistant_tool], top_p: Float? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class AssistantCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor model: OpenAI::Models::Beta::AssistantCreateParams::model diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index cbab3d6b..297d2fdb 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Beta - type assistant_delete_params = { } & OpenAI::request_parameters + type assistant_delete_params = + { } & OpenAI::Internal::Type::request_parameters class AssistantDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 42c3c994..c3040fc8 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -8,11 +8,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class AssistantListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index 8f5e9d53..d39312a3 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Beta - type assistant_retrieve_params = { } & OpenAI::request_parameters + type assistant_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class AssistantRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index eec6b058..f84dde39 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -15,11 +15,11 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::assistant_tool], top_p: Float? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class AssistantUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor description: String? diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 28e49216..4ca7db99 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -19,11 +19,11 @@ module OpenAI top_p: Float?, truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ThreadCreateAndRunParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor assistant_id: String diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index ba9059a3..6caf45d4 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -7,11 +7,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ThreadCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]? diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index 9440978b..27afc166 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Beta - type thread_delete_params = { } & OpenAI::request_parameters + type thread_delete_params = + { } & OpenAI::Internal::Type::request_parameters class ThreadDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index e259fb2d..4db38c39 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Beta - type thread_retrieve_params = { } & OpenAI::request_parameters + type thread_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class ThreadRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index f725515f..83b3ecc8 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -6,11 +6,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ThreadUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor metadata: OpenAI::Models::metadata? diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 0c2147d8..0091df98 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -9,11 +9,11 @@ module OpenAI attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, metadata: OpenAI::Models::metadata? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class MessageCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor content: OpenAI::Models::Beta::Threads::MessageCreateParams::content diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index fb823dfa..f400a51c 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -3,11 +3,11 @@ module OpenAI module Beta module Threads type message_delete_params = - { thread_id: String } & OpenAI::request_parameters + { thread_id: String } & OpenAI::Internal::Type::request_parameters class MessageDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index 66fd88db..9b331b46 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -10,11 +10,11 @@ module OpenAI order: OpenAI::Models::Beta::Threads::MessageListParams::order, run_id: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class MessageListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index 29cfa699..58434310 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -3,11 +3,11 @@ module OpenAI module Beta module Threads type message_retrieve_params = - { thread_id: String } & OpenAI::request_parameters + { thread_id: String } & OpenAI::Internal::Type::request_parameters class MessageRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index 5ccea8ed..7d14cb66 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -4,11 +4,11 @@ module OpenAI module Threads type message_update_params = { thread_id: String, metadata: OpenAI::Models::metadata? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class MessageUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 93ad8eb6..5460b5ad 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -3,11 +3,11 @@ module OpenAI module Beta module Threads type run_cancel_params = - { thread_id: String } & OpenAI::request_parameters + { thread_id: String } & OpenAI::Internal::Type::request_parameters class RunCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 3ed348c1..360db47b 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -22,11 +22,11 @@ module OpenAI top_p: Float?, truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class RunCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor assistant_id: String diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index 15e3c05c..525ad66f 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -9,11 +9,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class RunListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index ab99f4b4..a76a9131 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -3,11 +3,11 @@ module OpenAI module Beta module Threads type run_retrieve_params = - { thread_id: String } & OpenAI::request_parameters + { thread_id: String } & OpenAI::Internal::Type::request_parameters class RunRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index 6619a57c..48e05b57 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -7,11 +7,11 @@ module OpenAI thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class RunSubmitToolOutputsParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index 2e8e547d..6e46178a 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -4,11 +4,11 @@ module OpenAI module Threads type run_update_params = { thread_id: String, metadata: OpenAI::Models::metadata? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class RunUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index ee7ffc2d..04b7cf53 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -12,11 +12,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class StepListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index 08140619..06b14341 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -9,11 +9,11 @@ module OpenAI run_id: String, include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include] } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class StepRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor thread_id: String diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 19e7aa49..1515d670 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -34,11 +34,11 @@ module OpenAI user: String, web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param] diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index b5abb861..e4206b21 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Chat - type completion_delete_params = { } & OpenAI::request_parameters + type completion_delete_params = + { } & OpenAI::Internal::Type::request_parameters class CompletionDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 533ec07c..7f171502 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -9,11 +9,11 @@ module OpenAI model: String, order: OpenAI::Models::Chat::CompletionListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class CompletionListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 8b07c987..3c64e3fb 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Chat - type completion_retrieve_params = { } & OpenAI::request_parameters + type completion_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class CompletionRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index 31894b01..dbfdd87d 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -2,11 +2,12 @@ module OpenAI module Models module Chat type completion_update_params = - { metadata: OpenAI::Models::metadata? } & OpenAI::request_parameters + { metadata: OpenAI::Models::metadata? } + & OpenAI::Internal::Type::request_parameters class CompletionUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor metadata: OpenAI::Models::metadata? diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 08d092c8..1d5f30a2 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -8,11 +8,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class MessageListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 99d092d2..3bbe8da3 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -20,11 +20,11 @@ module OpenAI top_p: Float?, user: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class CompletionCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor model: OpenAI::Models::CompletionCreateParams::model diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 31ad63d5..1c00db8b 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -8,11 +8,11 @@ module OpenAI encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, user: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class EmbeddingCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor input: OpenAI::Models::EmbeddingCreateParams::input diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index e875dcbb..c9adfe43 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -1,10 +1,10 @@ module OpenAI module Models - type file_content_params = { } & OpenAI::request_parameters + type file_content_params = { } & OpenAI::Internal::Type::request_parameters class FileContentParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index fb34d5b0..f08a1328 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models type file_create_params = { file: (IO | StringIO), purpose: OpenAI::Models::file_purpose } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor file: IO | StringIO diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index 88f704ae..993174ef 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -1,10 +1,10 @@ module OpenAI module Models - type file_delete_params = { } & OpenAI::request_parameters + type file_delete_params = { } & OpenAI::Internal::Type::request_parameters class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index d4b2cc4c..89b0392a 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -7,11 +7,11 @@ module OpenAI order: OpenAI::Models::FileListParams::order, purpose: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index 28a27398..e81b21d8 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type file_retrieve_params = { } & OpenAI::request_parameters + type file_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index adc3eb00..097c08c0 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -1,11 +1,11 @@ module OpenAI module Models module FineTuning - type job_cancel_params = { } & OpenAI::request_parameters + type job_cancel_params = { } & OpenAI::Internal::Type::request_parameters class JobCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index faedfe50..fd4f66bb 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -13,11 +13,11 @@ module OpenAI suffix: String?, validation_file: String? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class JobCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor model: OpenAI::Models::FineTuning::JobCreateParams::model diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index 8159d7f4..2bd10d63 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -2,11 +2,12 @@ module OpenAI module Models module FineTuning type job_list_events_params = - { after: String, limit: Integer } & OpenAI::request_parameters + { after: String, limit: Integer } + & OpenAI::Internal::Type::request_parameters class JobListEventsParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index fe78d5ad..a09f5864 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -3,11 +3,11 @@ module OpenAI module FineTuning type job_list_params = { after: String, limit: Integer, metadata: ::Hash[Symbol, String]? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class JobListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index 5306f28a..466850ae 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module FineTuning - type job_retrieve_params = { } & OpenAI::request_parameters + type job_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class JobRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index e5377848..dc5aa5a0 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -3,11 +3,12 @@ module OpenAI module FineTuning module Jobs type checkpoint_list_params = - { after: String, limit: Integer } & OpenAI::request_parameters + { after: String, limit: Integer } + & OpenAI::Internal::Type::request_parameters class CheckpointListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 0fcca83b..ebbc6451 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -9,11 +9,11 @@ module OpenAI size: OpenAI::Models::ImageCreateVariationParams::size?, user: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ImageCreateVariationParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor image: IO | StringIO diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 3da9c277..616d4a05 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -11,11 +11,11 @@ module OpenAI size: OpenAI::Models::ImageEditParams::size?, user: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ImageEditParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor image: IO | StringIO diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index ca757dea..5af7ffe3 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -11,11 +11,11 @@ module OpenAI style: OpenAI::Models::ImageGenerateParams::style?, user: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ImageGenerateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor prompt: String diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index a4ac1b91..2f5cfd40 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -1,10 +1,10 @@ module OpenAI module Models - type model_delete_params = { } & OpenAI::request_parameters + type model_delete_params = { } & OpenAI::Internal::Type::request_parameters class ModelDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 15ed4c2f..4511e755 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -1,10 +1,10 @@ module OpenAI module Models - type model_list_params = { } & OpenAI::request_parameters + type model_list_params = { } & OpenAI::Internal::Type::request_parameters class ModelListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index 5293679d..dfcd6daa 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type model_retrieve_params = { } & OpenAI::request_parameters + type model_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class ModelRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 7067fe66..833c5ff4 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -5,11 +5,11 @@ module OpenAI input: OpenAI::Models::ModerationCreateParams::input, model: OpenAI::Models::ModerationCreateParams::model } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ModerationCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor input: OpenAI::Models::ModerationCreateParams::input diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index d4b3d62b..18327460 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -9,11 +9,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class InputItemListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index c5fb416a..d4c12563 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -21,11 +21,11 @@ module OpenAI truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, user: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ResponseCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor input: OpenAI::Models::Responses::ResponseCreateParams::input diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index 48a494f2..ff2fd40f 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models module Responses - type response_delete_params = { } & OpenAI::request_parameters + type response_delete_params = + { } & OpenAI::Internal::Type::request_parameters class ResponseDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 76a68008..d91ca0ed 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -3,11 +3,11 @@ module OpenAI module Responses type response_retrieve_params = { include: ::Array[OpenAI::Models::Responses::response_includable] } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class ResponseRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index df6f7b0c..f7d03f06 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type upload_cancel_params = { } & OpenAI::request_parameters + type upload_cancel_params = + { } & OpenAI::Internal::Type::request_parameters class UploadCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index eb3cbcf1..50d22376 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -1,11 +1,12 @@ module OpenAI module Models type upload_complete_params = - { part_ids: ::Array[String], :md5 => String } & OpenAI::request_parameters + { part_ids: ::Array[String], :md5 => String } + & OpenAI::Internal::Type::request_parameters class UploadCompleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor part_ids: ::Array[String] diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index 431fc356..94f2cc72 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -7,11 +7,11 @@ module OpenAI mime_type: String, purpose: OpenAI::Models::file_purpose } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class UploadCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor bytes: Integer diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index cc960f1c..a36ae697 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models module Uploads type part_create_params = - { data: (IO | StringIO) } & OpenAI::request_parameters + { data: (IO | StringIO) } & OpenAI::Internal::Type::request_parameters class PartCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor data: IO | StringIO diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index b0bdecc8..3a387fa9 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -8,11 +8,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class VectorStoreCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index 65b00bf3..b2f2436c 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type vector_store_delete_params = { } & OpenAI::request_parameters + type vector_store_delete_params = + { } & OpenAI::Internal::Type::request_parameters class VectorStoreDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index 932f1b34..45da51d8 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -7,11 +7,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::VectorStoreListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class VectorStoreListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 39d412f5..7f875a4d 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type vector_store_retrieve_params = { } & OpenAI::request_parameters + type vector_store_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters class VectorStoreRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 3c87c94f..10fb00d0 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -8,11 +8,11 @@ module OpenAI ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, rewrite_query: bool } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class VectorStoreSearchParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor query: OpenAI::Models::VectorStoreSearchParams::query diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index 734ce7fd..a25ec5db 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -6,11 +6,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class VectorStoreUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter? diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index 7400302e..eb2088fd 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models module VectorStores type file_batch_cancel_params = - { vector_store_id: String } & OpenAI::request_parameters + { vector_store_id: String } & OpenAI::Internal::Type::request_parameters class FileBatchCancelParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index ec58551e..e8e0ddca 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -7,11 +7,11 @@ module OpenAI attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileBatchCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor file_ids: ::Array[String] diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index b19f61ad..e9f2d88b 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -10,11 +10,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileBatchListFilesParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index 2adbda41..a9efa644 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models module VectorStores type file_batch_retrieve_params = - { vector_store_id: String } & OpenAI::request_parameters + { vector_store_id: String } & OpenAI::Internal::Type::request_parameters class FileBatchRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index 608c3d89..c7f2dfc8 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models module VectorStores type file_content_params = - { vector_store_id: String } & OpenAI::request_parameters + { vector_store_id: String } & OpenAI::Internal::Type::request_parameters class FileContentParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 82145f08..93f6ca13 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -7,11 +7,11 @@ module OpenAI attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileCreateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor file_id: String diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index c1b36f86..486986b3 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models module VectorStores type file_delete_params = - { vector_store_id: String } & OpenAI::request_parameters + { vector_store_id: String } & OpenAI::Internal::Type::request_parameters class FileDeleteParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 88f6eb73..6501043b 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -9,11 +9,11 @@ module OpenAI limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::order } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileListParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_reader after: String? diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index dafdc50e..f9c4eac7 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -2,11 +2,11 @@ module OpenAI module Models module VectorStores type file_retrieve_params = - { vector_store_id: String } & OpenAI::request_parameters + { vector_store_id: String } & OpenAI::Internal::Type::request_parameters class FileRetrieveParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index dfa006a4..3155411a 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -6,11 +6,11 @@ module OpenAI vector_store_id: String, attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? } - & OpenAI::request_parameters + & OpenAI::Internal::Type::request_parameters class FileUpdateParams < OpenAI::BaseModel - extend OpenAI::Type::RequestParameters::Converter - include OpenAI::RequestParameters + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters attr_accessor vector_store_id: String diff --git a/sig/openai/page.rbs b/sig/openai/page.rbs deleted file mode 100644 index 48cd508b..00000000 --- a/sig/openai/page.rbs +++ /dev/null @@ -1,11 +0,0 @@ -module OpenAI - class Page[Elem] - include OpenAI::Type::BasePage[Elem] - - attr_accessor data: ::Array[Elem]? - - attr_accessor object: String - - def inspect: -> String - end -end diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 6f8acd12..e1577759 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -24,7 +24,7 @@ module OpenAI ?temperature: Float, ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Audio::transcription_stream_event] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Audio::transcription_stream_event] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/batches.rbs b/sig/openai/resources/batches.rbs index 37dbb707..338c2671 100644 --- a/sig/openai/resources/batches.rbs +++ b/sig/openai/resources/batches.rbs @@ -18,7 +18,7 @@ module OpenAI ?after: String, ?limit: Integer, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Batch] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Batch] def cancel: ( String batch_id, diff --git a/sig/openai/resources/beta/assistants.rbs b/sig/openai/resources/beta/assistants.rbs index 84ea0af9..fa36413e 100644 --- a/sig/openai/resources/beta/assistants.rbs +++ b/sig/openai/resources/beta/assistants.rbs @@ -44,7 +44,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Beta::AssistantListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Assistant] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Assistant] def delete: ( String assistant_id, diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 7ee78295..50be0608 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -66,7 +66,7 @@ module OpenAI ?top_p: Float?, ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads/messages.rbs b/sig/openai/resources/beta/threads/messages.rbs index f5421f58..560d8eb7 100644 --- a/sig/openai/resources/beta/threads/messages.rbs +++ b/sig/openai/resources/beta/threads/messages.rbs @@ -33,7 +33,7 @@ module OpenAI ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, ?run_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Message] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Message] def delete: ( String message_id, diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 0b24cbc7..5a1bb9fd 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -47,7 +47,7 @@ module OpenAI ?top_p: Float?, ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] def retrieve: ( String run_id, @@ -69,7 +69,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Beta::Threads::RunListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Run] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Run] def cancel: ( String run_id, @@ -89,7 +89,7 @@ module OpenAI thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Beta::assistant_stream_event] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads/runs/steps.rbs b/sig/openai/resources/beta/threads/runs/steps.rbs index 262179eb..7cdcbff4 100644 --- a/sig/openai/resources/beta/threads/runs/steps.rbs +++ b/sig/openai/resources/beta/threads/runs/steps.rbs @@ -21,7 +21,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index b699bc30..0e8a88ef 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -70,7 +70,7 @@ module OpenAI ?user: String, ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Chat::ChatCompletionChunk] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk] def retrieve: ( String completion_id, @@ -90,7 +90,7 @@ module OpenAI ?model: String, ?order: OpenAI::Models::Chat::CompletionListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletion] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletion] def delete: ( String completion_id, diff --git a/sig/openai/resources/chat/completions/messages.rbs b/sig/openai/resources/chat/completions/messages.rbs index f2fc47a1..6af4409f 100644 --- a/sig/openai/resources/chat/completions/messages.rbs +++ b/sig/openai/resources/chat/completions/messages.rbs @@ -9,7 +9,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/completions.rbs b/sig/openai/resources/completions.rbs index b48f77df..b786a2e1 100644 --- a/sig/openai/resources/completions.rbs +++ b/sig/openai/resources/completions.rbs @@ -41,7 +41,7 @@ module OpenAI ?top_p: Float?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Completion] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Completion] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index 62988179..16e295dd 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -18,7 +18,7 @@ module OpenAI ?order: OpenAI::Models::FileListParams::order, ?purpose: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FileObject] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FileObject] def delete: ( String file_id, diff --git a/sig/openai/resources/fine_tuning/jobs.rbs b/sig/openai/resources/fine_tuning/jobs.rbs index 339d5f85..4264a1e9 100644 --- a/sig/openai/resources/fine_tuning/jobs.rbs +++ b/sig/openai/resources/fine_tuning/jobs.rbs @@ -27,7 +27,7 @@ module OpenAI ?limit: Integer, ?metadata: ::Hash[Symbol, String]?, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] def cancel: ( String fine_tuning_job_id, @@ -39,7 +39,7 @@ module OpenAI ?after: String, ?limit: Integer, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs index 9912513d..45bec94e 100644 --- a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs +++ b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs @@ -8,7 +8,7 @@ module OpenAI ?after: String, ?limit: Integer, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/models.rbs b/sig/openai/resources/models.rbs index cb91fa06..042eaed1 100644 --- a/sig/openai/resources/models.rbs +++ b/sig/openai/resources/models.rbs @@ -8,7 +8,7 @@ module OpenAI def list: ( ?request_options: OpenAI::request_opts - ) -> OpenAI::Page[OpenAI::Models::Model] + ) -> OpenAI::Internal::Page[OpenAI::Models::Model] def delete: ( String model, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 51041a3b..2dfcf44d 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -43,7 +43,7 @@ module OpenAI ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Stream[OpenAI::Models::Responses::response_stream_event] + ) -> OpenAI::Internal::Stream[OpenAI::Models::Responses::response_stream_event] def retrieve: ( String response_id, diff --git a/sig/openai/resources/responses/input_items.rbs b/sig/openai/resources/responses/input_items.rbs index 031857d2..9fcece7a 100644 --- a/sig/openai/resources/responses/input_items.rbs +++ b/sig/openai/resources/responses/input_items.rbs @@ -10,7 +10,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Responses::InputItemListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::Responses::response_item] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Responses::response_item] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index 707af947..c60ff766 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -33,7 +33,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::VectorStoreListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStore] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::VectorStore] def delete: ( String vector_store_id, @@ -48,7 +48,7 @@ module OpenAI ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, ?rewrite_query: bool, ?request_options: OpenAI::request_opts - ) -> OpenAI::Page[OpenAI::Models::VectorStoreSearchResponse] + ) -> OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores/file_batches.rbs b/sig/openai/resources/vector_stores/file_batches.rbs index 5985e792..39606ed6 100644 --- a/sig/openai/resources/vector_stores/file_batches.rbs +++ b/sig/openai/resources/vector_stores/file_batches.rbs @@ -31,7 +31,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores/files.rbs b/sig/openai/resources/vector_stores/files.rbs index b0e11ad0..3669f6a7 100644 --- a/sig/openai/resources/vector_stores/files.rbs +++ b/sig/openai/resources/vector_stores/files.rbs @@ -31,7 +31,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::VectorStores::FileListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] def delete: ( String file_id, @@ -43,7 +43,7 @@ module OpenAI String file_id, vector_store_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Page[OpenAI::Models::VectorStores::FileContentResponse] + ) -> OpenAI::Internal::Page[OpenAI::Models::VectorStores::FileContentResponse] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/stream.rbs b/sig/openai/stream.rbs deleted file mode 100644 index a566119e..00000000 --- a/sig/openai/stream.rbs +++ /dev/null @@ -1,7 +0,0 @@ -module OpenAI - class Stream[Elem] - include OpenAI::Type::BaseStream[OpenAI::Util::server_sent_event, Elem] - - private def iterator: -> Enumerable[Elem] - end -end diff --git a/sig/openai/transport/base_client.rbs b/sig/openai/transport/base_client.rbs deleted file mode 100644 index 724f235c..00000000 --- a/sig/openai/transport/base_client.rbs +++ /dev/null @@ -1,110 +0,0 @@ -module OpenAI - module Transport - class BaseClient - type request_components = - { - method: Symbol, - path: String | ::Array[String], - query: ::Hash[String, (::Array[String] | String)?]?, - headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?]?, - body: top?, - unwrap: Symbol?, - page: Class?, - stream: Class?, - model: OpenAI::Type::Converter::input?, - options: OpenAI::request_opts? - } - - type request_input = - { - method: Symbol, - url: URI::Generic, - headers: ::Hash[String, String], - body: top, - max_retries: Integer, - timeout: Float - } - - MAX_REDIRECTS: 20 - - PLATFORM_HEADERS: ::Hash[String, String] - - def self.validate!: ( - OpenAI::Transport::BaseClient::request_components req - ) -> void - - def self.should_retry?: ( - Integer status, - headers: ::Hash[String, String] - ) -> bool - - def self.follow_redirect: ( - OpenAI::Transport::BaseClient::request_input request, - status: Integer, - response_headers: ::Hash[String, String] - ) -> OpenAI::Transport::BaseClient::request_input - - def self.reap_connection!: ( - Integer | OpenAI::Errors::APIConnectionError status, - stream: Enumerable[String]? - ) -> void - - # @api private - attr_accessor requester: OpenAI::Transport::PooledNetRequester - - def initialize: ( - base_url: String, - ?timeout: Float, - ?max_retries: Integer, - ?initial_retry_delay: Float, - ?max_retry_delay: Float, - ?headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?], - ?idempotency_header: String? - ) -> void - - private def auth_headers: -> ::Hash[String, String] - - private def generate_idempotency_key: -> String - - private def build_request: ( - OpenAI::Transport::BaseClient::request_components req, - OpenAI::request_options opts - ) -> OpenAI::Transport::BaseClient::request_input - - private def retry_delay: ( - ::Hash[String, String] headers, - retry_count: Integer - ) -> Float - - private def send_request: ( - OpenAI::Transport::BaseClient::request_input request, - redirect_count: Integer, - retry_count: Integer, - send_retry_header: bool - ) -> [Integer, top, Enumerable[String]] - - def request: - ( - Symbol method, - String | ::Array[String] path, - ?query: ::Hash[String, (::Array[String] | String)?]?, - ?headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?]?, - ?body: top?, - ?unwrap: Symbol?, - ?page: Class?, - ?stream: Class?, - ?model: OpenAI::Type::Converter::input?, - ?options: OpenAI::request_opts? - ) -> top - | (OpenAI::Transport::BaseClient::request_components req) -> top - - def inspect: -> String - end - end -end diff --git a/sig/openai/transport/pooled_net_requester.rbs b/sig/openai/transport/pooled_net_requester.rbs deleted file mode 100644 index d29e1f7e..00000000 --- a/sig/openai/transport/pooled_net_requester.rbs +++ /dev/null @@ -1,39 +0,0 @@ -module OpenAI - module Transport - class PooledNetRequester - type request = - { - method: Symbol, - url: URI::Generic, - headers: ::Hash[String, String], - body: top, - deadline: Float - } - - KEEP_ALIVE_TIMEOUT: 30 - - def self.connect: (URI::Generic url) -> top - - def self.calibrate_socket_timeout: (top conn, Float deadline) -> void - - def self.build_request: ( - OpenAI::Transport::PooledNetRequester::request request - ) { - (String arg0) -> void - } -> top - - private def with_pool: ( - URI::Generic url, - deadline: Float - ) { - (top arg0) -> void - } -> void - - def execute: ( - OpenAI::Transport::PooledNetRequester::request request - ) -> [Integer, top, Enumerable[String]] - - def initialize: (?size: Integer) -> void - end - end -end diff --git a/sig/openai/type.rbs b/sig/openai/type.rbs deleted file mode 100644 index 61ed895e..00000000 --- a/sig/openai/type.rbs +++ /dev/null @@ -1,22 +0,0 @@ -module OpenAI - class Unknown = OpenAI::Type::Unknown - - class BooleanModel = OpenAI::Type::BooleanModel - - module Enum = OpenAI::Type::Enum - - module Union = OpenAI::Type::Union - - class ArrayOf = OpenAI::Type::ArrayOf - - class HashOf = OpenAI::Type::HashOf - - class BaseModel = OpenAI::Type::BaseModel - - type request_parameters = OpenAI::Type::request_parameters - - module RequestParameters = OpenAI::Type::RequestParameters - - module Type - end -end diff --git a/sig/openai/type/array_of.rbs b/sig/openai/type/array_of.rbs deleted file mode 100644 index 7e8cb67c..00000000 --- a/sig/openai/type/array_of.rbs +++ /dev/null @@ -1,36 +0,0 @@ -module OpenAI - module Type - class ArrayOf[Elem] - include OpenAI::Type::Converter - - def self.[]: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> instance - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: ( - Enumerable[Elem] | top value, - state: OpenAI::Type::Converter::state - ) -> (::Array[top] | top) - - def dump: (Enumerable[Elem] | top value) -> (::Array[top] | top) - - def item_type: -> Elem - - def nilable?: -> bool - - def initialize: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - end - end -end diff --git a/sig/openai/type/base_model.rbs b/sig/openai/type/base_model.rbs deleted file mode 100644 index cf3041f5..00000000 --- a/sig/openai/type/base_model.rbs +++ /dev/null @@ -1,77 +0,0 @@ -module OpenAI - module Type - class BaseModel - extend OpenAI::Type::Converter - - type known_field = - { mode: (:coerce | :dump)?, required: bool, nilable: bool } - - def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field - & { type_fn: (^-> OpenAI::Type::Converter::input) })] - - def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field - & { type: OpenAI::Type::Converter::input })] - - private def self.add_field: ( - Symbol name_sym, - required: bool, - type_info: { - const: (nil | bool | Integer | Float | Symbol)?, - enum: ^-> OpenAI::Type::Converter::input?, - union: ^-> OpenAI::Type::Converter::input?, - api_name: Symbol - } - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input, - spec: ::Hash[Symbol, top] - ) -> void - - def self.required: ( - Symbol name_sym, - ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - - def self.optional: ( - Symbol name_sym, - ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - - private def self.request_only: { -> void } -> void - - private def self.response_only: { -> void } -> void - - def self.==: (top other) -> bool - - def ==: (top other) -> bool - - def self.coerce: ( - OpenAI::BaseModel | ::Hash[top, top] | top value, - state: OpenAI::Type::Converter::state - ) -> (instance | top) - - def self.dump: (instance | top value) -> (::Hash[top, top] | top) - - def []: (Symbol key) -> top? - - def to_h: -> ::Hash[Symbol, top] - - alias to_hash to_h - - def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] - - def to_json: (*top a) -> String - - def to_yaml: (*top a) -> String - - def initialize: (?::Hash[Symbol, top] | self data) -> void - - def inspect: -> String - end - end -end diff --git a/sig/openai/type/base_page.rbs b/sig/openai/type/base_page.rbs deleted file mode 100644 index db9e8a49..00000000 --- a/sig/openai/type/base_page.rbs +++ /dev/null @@ -1,22 +0,0 @@ -module OpenAI - module Type - module BasePage[Elem] - def next_page?: -> bool - - def next_page: -> self - - def auto_paging_each: { (Elem arg0) -> void } -> void - - def to_enum: -> Enumerable[Elem] - - alias enum_for to_enum - - def initialize: ( - client: OpenAI::Transport::BaseClient, - req: OpenAI::Transport::BaseClient::request_components, - headers: ::Hash[String, String], - page_data: top - ) -> void - end - end -end diff --git a/sig/openai/type/base_stream.rbs b/sig/openai/type/base_stream.rbs deleted file mode 100644 index de541575..00000000 --- a/sig/openai/type/base_stream.rbs +++ /dev/null @@ -1,25 +0,0 @@ -module OpenAI - module Type - module BaseStream[Message, Elem] - include Enumerable[Elem] - - def close: -> void - - private def iterator: -> Enumerable[Elem] - - def each: { (Elem arg0) -> void } -> void - - def to_enum: -> Enumerator[Elem] - - alias enum_for to_enum - - def initialize: ( - model: Class | OpenAI::Type::Converter, - url: URI::Generic, - status: Integer, - response: top, - stream: Enumerable[Message] - ) -> void - end - end -end diff --git a/sig/openai/type/boolean_model.rbs b/sig/openai/type/boolean_model.rbs deleted file mode 100644 index 00ed0300..00000000 --- a/sig/openai/type/boolean_model.rbs +++ /dev/null @@ -1,18 +0,0 @@ -module OpenAI - module Type - class BooleanModel - extend OpenAI::Type::Converter - - def self.===: (top other) -> bool - - def self.==: (top other) -> bool - - def self.coerce: ( - bool | top value, - state: OpenAI::Type::Converter::state - ) -> (bool | top) - - def self.dump: (bool | top value) -> (bool | top) - end - end -end diff --git a/sig/openai/type/converter.rbs b/sig/openai/type/converter.rbs deleted file mode 100644 index 3785d489..00000000 --- a/sig/openai/type/converter.rbs +++ /dev/null @@ -1,36 +0,0 @@ -module OpenAI - module Type - module Converter - type input = OpenAI::Type::Converter | Class - - type state = - { - strictness: bool | :strong, - exactness: { yes: Integer, no: Integer, maybe: Integer }, - branched: Integer - } - - def coerce: (top value, state: OpenAI::Type::Converter::state) -> top - - def dump: (top value) -> top - - def self.type_info: ( - { - const: (nil | bool | Integer | Float | Symbol)?, - enum: ^-> OpenAI::Type::Converter::input?, - union: ^-> OpenAI::Type::Converter::input? - } - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input spec - ) -> (^-> top) - - def self.coerce: ( - OpenAI::Type::Converter::input target, - top value, - ?state: OpenAI::Type::Converter::state - ) -> top - - def self.dump: (OpenAI::Type::Converter::input target, top value) -> top - end - end -end diff --git a/sig/openai/type/enum.rbs b/sig/openai/type/enum.rbs deleted file mode 100644 index fb65841f..00000000 --- a/sig/openai/type/enum.rbs +++ /dev/null @@ -1,22 +0,0 @@ -module OpenAI - module Type - module Enum - include OpenAI::Type::Converter - - def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] - - private def self.finalize!: -> void - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: ( - String | Symbol | top value, - state: OpenAI::Type::Converter::state - ) -> (Symbol | top) - - def dump: (Symbol | top value) -> (Symbol | top) - end - end -end diff --git a/sig/openai/type/hash_of.rbs b/sig/openai/type/hash_of.rbs deleted file mode 100644 index dea00a66..00000000 --- a/sig/openai/type/hash_of.rbs +++ /dev/null @@ -1,36 +0,0 @@ -module OpenAI - module Type - class HashOf[Elem] - include OpenAI::Type::Converter - - def self.[]: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> instance - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: ( - ::Hash[top, top] | top value, - state: OpenAI::Type::Converter::state - ) -> (::Hash[Symbol, top] | top) - - def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) - - def item_type: -> Elem - - def nilable?: -> bool - - def initialize: ( - ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input type_info, - ?::Hash[Symbol, top] spec - ) -> void - end - end -end diff --git a/sig/openai/type/request_parameters.rbs b/sig/openai/type/request_parameters.rbs deleted file mode 100644 index b92ee909..00000000 --- a/sig/openai/type/request_parameters.rbs +++ /dev/null @@ -1,13 +0,0 @@ -module OpenAI - module Type - type request_parameters = { request_options: OpenAI::request_opts } - - module RequestParameters - attr_accessor request_options: OpenAI::request_opts - - module Converter - def dump_request: (top params) -> [top, ::Hash[Symbol, top]] - end - end - end -end diff --git a/sig/openai/type/union.rbs b/sig/openai/type/union.rbs deleted file mode 100644 index 372932ce..00000000 --- a/sig/openai/type/union.rbs +++ /dev/null @@ -1,37 +0,0 @@ -module OpenAI - module Type - module Union - include OpenAI::Type::Converter - - private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Type::Converter::input)]] - - def self.derefed_variants: -> ::Array[[Symbol?, top]] - - def self.variants: -> ::Array[top] - - private def self.discriminator: (Symbol property) -> void - - private def self.variant: ( - Symbol - | ::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input key, - ?::Hash[Symbol, top] - | ^-> OpenAI::Type::Converter::input - | OpenAI::Type::Converter::input spec - ) -> void - - private def self.resolve_variant: ( - top value - ) -> OpenAI::Type::Converter::input? - - def ===: (top other) -> bool - - def ==: (top other) -> bool - - def coerce: (top value, state: OpenAI::Type::Converter::state) -> top - - def dump: (top value) -> top - end - end -end diff --git a/sig/openai/type/unknown.rbs b/sig/openai/type/unknown.rbs deleted file mode 100644 index b3139352..00000000 --- a/sig/openai/type/unknown.rbs +++ /dev/null @@ -1,15 +0,0 @@ -module OpenAI - module Type - class Unknown - extend OpenAI::Type::Converter - - def self.===: (top other) -> bool - - def self.==: (top other) -> bool - - def self.coerce: (top value, state: OpenAI::Type::Converter::state) -> top - - def self.dump: (top value) -> top - end - end -end diff --git a/sig/openai/util.rbs b/sig/openai/util.rbs deleted file mode 100644 index 1a93a427..00000000 --- a/sig/openai/util.rbs +++ /dev/null @@ -1,132 +0,0 @@ -module OpenAI - module Util - def self?.monotonic_secs: -> Float - - def self?.arch: -> String - - def self?.os: -> String - - def self?.primitive?: (top input) -> bool - - def self?.coerce_boolean: (top input) -> (bool | top) - - def self?.coerce_boolean!: (top input) -> bool? - - def self?.coerce_integer: (top input) -> (Integer | top) - - def self?.coerce_float: (top input) -> (Float | top) - - def self?.coerce_hash: (top input) -> (::Hash[top, top] | top) - - OMIT: top - - def self?.deep_merge_lr: (top lhs, top rhs, ?concat: bool) -> top - - def self?.deep_merge: ( - *::Array[top] values, - ?sentinel: top?, - ?concat: bool - ) -> top - - def self?.dig: ( - ::Hash[Symbol, top] | ::Array[top] | top data, - (Symbol | Integer | ::Array[(Symbol | Integer)])? pick, - ?top? sentinel - ) { - -> top? - } -> top? - - def self?.uri_origin: (URI::Generic uri) -> String - - def self?.interpolate_path: (String | ::Array[String] path) -> String - - def self?.decode_query: (String? query) -> ::Hash[String, ::Array[String]] - - def self?.encode_query: ( - ::Hash[String, (::Array[String] | String)?]? query - ) -> String? - - type parsed_uri = - { - scheme: String?, - host: String?, - port: Integer?, - path: String?, - query: ::Hash[String, ::Array[String]] - } - - def self?.parse_uri: (URI::Generic | String url) -> OpenAI::Util::parsed_uri - - def self?.unparse_uri: (OpenAI::Util::parsed_uri parsed) -> URI::Generic - - def self?.join_parsed_uri: ( - OpenAI::Util::parsed_uri lhs, - OpenAI::Util::parsed_uri rhs - ) -> URI::Generic - - def self?.normalized_headers: ( - *::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?] headers - ) -> ::Hash[String, String] - - class ReadIOAdapter - private def read_enum: (Integer? max_len) -> String - - def read: (?Integer? max_len, ?String? out_string) -> String? - - def initialize: ( - String | IO | StringIO | Enumerable[String] stream - ) { - (String arg0) -> void - } -> void - end - - def self?.writable_enum: { - (Enumerator::Yielder y) -> void - } -> Enumerable[String] - - def self?.write_multipart_chunk: ( - Enumerator::Yielder y, - boundary: String, - key: Symbol | String, - val: top - ) -> void - - def self?.encode_multipart_streaming: ( - top body - ) -> [String, Enumerable[String]] - - def self?.encode_content: (::Hash[String, String] headers, top body) -> top - - def self?.decode_content: ( - ::Hash[String, String] headers, - stream: Enumerable[String], - ?suppress_error: bool - ) -> top - - def self?.fused_enum: ( - Enumerable[top] enum, - ?external: bool - ) { - -> void - } -> Enumerable[top] - - def self?.close_fused!: (Enumerable[top]? enum) -> void - - def self?.chain_fused: ( - Enumerable[top]? enum - ) { - (Enumerator::Yielder arg0) -> void - } -> Enumerable[top] - - type server_sent_event = - { event: String?, data: String?, id: String?, retry: Integer? } - - def self?.decode_lines: (Enumerable[String] enum) -> Enumerable[String] - - def self?.decode_sse: ( - Enumerable[String] lines - ) -> OpenAI::Util::server_sent_event - end -end diff --git a/test/openai/base_model_test.rb b/test/openai/internal/type/base_model_test.rb similarity index 94% rename from test/openai/base_model_test.rb rename to test/openai/internal/type/base_model_test.rb index 737cb8fa..25c1322c 100644 --- a/test/openai/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require_relative "test_helper" +require_relative "../../test_helper" class OpenAI::Test::PrimitiveModelTest < Minitest::Test A = OpenAI::ArrayOf[-> { Integer }] @@ -32,7 +32,7 @@ def test_typing converters.each do |conv| assert_pattern do - conv => OpenAI::Type::Converter + conv => OpenAI::Internal::Type::Converter end end end @@ -68,7 +68,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Type::Converter.coerce(target, input, state: state) => ^expect + OpenAI::Internal::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end @@ -99,7 +99,7 @@ def test_dump target, input = _1 expect = _2 assert_pattern do - OpenAI::Type::Converter.dump(target, input) => ^expect + OpenAI::Internal::Type::Converter.dump(target, input) => ^expect end end end @@ -118,7 +118,7 @@ def test_coerce_errors target, input = _1 state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_raises(_2) do - OpenAI::Type::Converter.coerce(target, input, state: state) + OpenAI::Internal::Type::Converter.coerce(target, input, state: state) end end end @@ -181,7 +181,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Type::Converter.coerce(target, input, state: state) => ^expect + OpenAI::Internal::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end @@ -209,7 +209,7 @@ def test_dump target, input = _1 expect = _2 assert_pattern do - OpenAI::Type::Converter.dump(target, input) => ^expect + OpenAI::Internal::Type::Converter.dump(target, input) => ^expect end end end @@ -255,7 +255,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - OpenAI::Type::Converter.coerce(target, input, state: state) => ^expect + OpenAI::Internal::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness end end @@ -335,7 +335,7 @@ def test_coerce exactness, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - coerced = OpenAI::Type::Converter.coerce(target, input, state: state) + coerced = OpenAI::Internal::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) if coerced.is_a?(OpenAI::BaseModel) coerced.to_h => ^expect @@ -365,7 +365,7 @@ def test_dump target, input = _1 expect = _2 assert_pattern do - OpenAI::Type::Converter.dump(target, input) => ^expect + OpenAI::Internal::Type::Converter.dump(target, input) => ^expect end end end @@ -511,7 +511,7 @@ def test_coerce exactness, branched, expect = rhs state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} assert_pattern do - coerced = OpenAI::Type::Converter.coerce(target, input, state: state) + coerced = OpenAI::Internal::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) if coerced.is_a?(OpenAI::BaseModel) coerced.to_h => ^expect diff --git a/test/openai/util_test.rb b/test/openai/internal/util_test.rb similarity index 75% rename from test/openai/util_test.rb rename to test/openai/internal/util_test.rb index 5d4c1b0c..ed50d991 100644 --- a/test/openai/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -1,48 +1,48 @@ # frozen_string_literal: true -require_relative "test_helper" +require_relative "../test_helper" class OpenAI::Test::UtilDataHandlingTest < Minitest::Test def test_left_map assert_pattern do - OpenAI::Util.deep_merge({a: 1}, nil) => nil + OpenAI::Internal::Util.deep_merge({a: 1}, nil) => nil end end def test_right_map assert_pattern do - OpenAI::Util.deep_merge(nil, {a: 1}) => {a: 1} + OpenAI::Internal::Util.deep_merge(nil, {a: 1}) => {a: 1} end end def test_disjoint_maps assert_pattern do - OpenAI::Util.deep_merge({b: 2}, {a: 1}) => {a: 1, b: 2} + OpenAI::Internal::Util.deep_merge({b: 2}, {a: 1}) => {a: 1, b: 2} end end def test_overlapping_maps assert_pattern do - OpenAI::Util.deep_merge({b: 2, c: 3}, {a: 1, c: 4}) => {a: 1, b: 2, c: 4} + OpenAI::Internal::Util.deep_merge({b: 2, c: 3}, {a: 1, c: 4}) => {a: 1, b: 2, c: 4} end end def test_nested assert_pattern do - OpenAI::Util.deep_merge({b: {b2: 1}}, {b: {b2: 2}}) => {b: {b2: 2}} + OpenAI::Internal::Util.deep_merge({b: {b2: 1}}, {b: {b2: 2}}) => {b: {b2: 2}} end end def test_nested_left_map assert_pattern do - OpenAI::Util.deep_merge({b: {b2: 1}}, {b: 6}) => {b: 6} + OpenAI::Internal::Util.deep_merge({b: {b2: 1}}, {b: 6}) => {b: 6} end end def test_omission - merged = OpenAI::Util.deep_merge( + merged = OpenAI::Internal::Util.deep_merge( {b: {b2: 1, b3: {c: 4, d: 5}}}, - {b: {b2: 1, b3: {c: OpenAI::Util::OMIT, d: 5}}} + {b: {b2: 1, b3: {c: OpenAI::Internal::Util::OMIT, d: 5}}} ) assert_pattern do @@ -51,7 +51,7 @@ def test_omission end def test_concat - merged = OpenAI::Util.deep_merge( + merged = OpenAI::Internal::Util.deep_merge( {a: {b: [1, 2]}}, {a: {b: [3, 4]}}, concat: true @@ -63,7 +63,7 @@ def test_concat end def test_concat_false - merged = OpenAI::Util.deep_merge( + merged = OpenAI::Internal::Util.deep_merge( {a: {b: [1, 2]}}, {a: {b: [3, 4]}}, concat: false @@ -76,19 +76,19 @@ def test_concat_false def test_dig assert_pattern do - OpenAI::Util.dig(1, nil) => 1 - OpenAI::Util.dig({a: 1}, :b) => nil - OpenAI::Util.dig({a: 1}, :a) => 1 - OpenAI::Util.dig({a: {b: 1}}, [:a, :b]) => 1 - - OpenAI::Util.dig([], 1) => nil - OpenAI::Util.dig([nil, [nil, 1]], [1, 1]) => 1 - OpenAI::Util.dig({a: [nil, 1]}, [:a, 1]) => 1 - OpenAI::Util.dig([], 1.0) => nil - - OpenAI::Util.dig(Object, 1) => nil - OpenAI::Util.dig([], 1.0, 2) => 2 - OpenAI::Util.dig([], 1.0) { 2 } => 2 + OpenAI::Internal::Util.dig(1, nil) => 1 + OpenAI::Internal::Util.dig({a: 1}, :b) => nil + OpenAI::Internal::Util.dig({a: 1}, :a) => 1 + OpenAI::Internal::Util.dig({a: {b: 1}}, [:a, :b]) => 1 + + OpenAI::Internal::Util.dig([], 1) => nil + OpenAI::Internal::Util.dig([nil, [nil, 1]], [1, 1]) => 1 + OpenAI::Internal::Util.dig({a: [nil, 1]}, [:a, 1]) => 1 + OpenAI::Internal::Util.dig([], 1.0) => nil + + OpenAI::Internal::Util.dig(Object, 1) => nil + OpenAI::Internal::Util.dig([], 1.0, 2) => 2 + OpenAI::Internal::Util.dig([], 1.0) { 2 } => 2 end end end @@ -100,11 +100,11 @@ def test_parsing https://example.com/ https://example.com:443/example?e1=e1&e2=e2&e= ].each do |url| - parsed = OpenAI::Util.parse_uri(url) - unparsed = OpenAI::Util.unparse_uri(parsed).to_s + parsed = OpenAI::Internal::Util.parse_uri(url) + unparsed = OpenAI::Internal::Util.unparse_uri(parsed).to_s assert_equal(url, unparsed) - assert_equal(parsed, OpenAI::Util.parse_uri(unparsed)) + assert_equal(parsed, OpenAI::Internal::Util.parse_uri(unparsed)) end end @@ -113,7 +113,7 @@ def test_joining [ "h://a.b/c?d=e", "h://nope/ignored", - OpenAI::Util.parse_uri("h://a.b/c?d=e") + OpenAI::Internal::Util.parse_uri("h://a.b/c?d=e") ], [ "h://a.b/c?d=e", @@ -129,8 +129,8 @@ def test_joining cases.each do |expect, lhs, rhs| assert_equal( URI.parse(expect), - OpenAI::Util.join_parsed_uri( - OpenAI::Util.parse_uri(lhs), + OpenAI::Internal::Util.join_parsed_uri( + OpenAI::Internal::Util.parse_uri(lhs), rhs ) ) @@ -148,8 +148,8 @@ def test_joining_queries cases.each do |path, expected| assert_equal( URI.parse(expected), - OpenAI::Util.join_parsed_uri( - OpenAI::Util.parse_uri(base_url), + OpenAI::Internal::Util.join_parsed_uri( + OpenAI::Internal::Util.parse_uri(base_url), {path: path} ) ) @@ -162,7 +162,7 @@ class FakeCGI < CGI def initialize(headers, io) @ctype = headers["content-type"] # rubocop:disable Lint/EmptyBlock - @io = OpenAI::Util::ReadIOAdapter.new(io) {} + @io = OpenAI::Internal::Util::ReadIOAdapter.new(io) {} # rubocop:enable Lint/EmptyBlock @c_len = io.to_a.join.bytesize.to_s super() @@ -185,7 +185,7 @@ def test_file_encode StringIO.new("abc") => "abc" } cases.each do |body, val| - encoded = OpenAI::Util.encode_content(headers, body) + encoded = OpenAI::Internal::Util.encode_content(headers, body) cgi = FakeCGI.new(*encoded) assert_pattern do cgi[""] => ^val @@ -202,7 +202,7 @@ def test_hash_encode {file: StringIO.new("a")} => {"file" => "a"} } cases.each do |body, testcase| - encoded = OpenAI::Util.encode_content(headers, body) + encoded = OpenAI::Internal::Util.encode_content(headers, body) cgi = FakeCGI.new(*encoded) testcase.each do |key, val| assert_equal(val, cgi[key]) @@ -220,7 +220,7 @@ def test_copy_read cases.each do |input, expected| io = StringIO.new # rubocop:disable Lint/EmptyBlock - adapter = OpenAI::Util::ReadIOAdapter.new(input) {} + adapter = OpenAI::Internal::Util::ReadIOAdapter.new(input) {} # rubocop:enable Lint/EmptyBlock IO.copy_stream(adapter, io) assert_equal(expected, io.string) @@ -233,7 +233,7 @@ def test_copy_write StringIO.new("abc") => "abc" } cases.each do |input, expected| - enum = OpenAI::Util.writable_enum do |y| + enum = OpenAI::Internal::Util.writable_enum do |y| IO.copy_stream(input, y) end assert_equal(expected, enum.to_a.join) @@ -245,7 +245,7 @@ class OpenAI::Test::UtilFusedEnumTest < Minitest::Test def test_closing arr = [1, 2, 3] once = 0 - fused = OpenAI::Util.fused_enum(arr.to_enum) do + fused = OpenAI::Internal::Util.fused_enum(arr.to_enum) do once = once.succ end @@ -260,7 +260,7 @@ def test_closing def test_rewind_chain once = 0 - fused = OpenAI::Util.fused_enum([1, 2, 3].to_enum) do + fused = OpenAI::Internal::Util.fused_enum([1, 2, 3].to_enum) do once = once.succ end .lazy @@ -277,7 +277,7 @@ def test_rewind_chain def test_external_iteration it = [1, 2, 3].to_enum first = it.next - fused = OpenAI::Util.fused_enum(it, external: true) + fused = OpenAI::Internal::Util.fused_enum(it, external: true) assert_equal(1, first) assert_equal([2, 3], fused.to_a) @@ -285,11 +285,11 @@ def test_external_iteration def test_close_fused once = 0 - fused = OpenAI::Util.fused_enum([1, 2, 3].to_enum) do + fused = OpenAI::Internal::Util.fused_enum([1, 2, 3].to_enum) do once = once.succ end - OpenAI::Util.close_fused!(fused) + OpenAI::Internal::Util.close_fused!(fused) assert_equal(1, once) assert_equal([], fused.to_a) @@ -302,11 +302,11 @@ def test_closed_fused_extern_iteration taken = taken.succ _1 end - fused = OpenAI::Util.fused_enum(enum) + fused = OpenAI::Internal::Util.fused_enum(enum) first = fused.next assert_equal(1, first) - OpenAI::Util.close_fused!(fused) + OpenAI::Internal::Util.close_fused!(fused) assert_equal(1, taken) end @@ -318,10 +318,10 @@ def test_closed_fused_taken_count end .map(&:succ) .filter(&:odd?) - fused = OpenAI::Util.fused_enum(enum) + fused = OpenAI::Internal::Util.fused_enum(enum) assert_equal(0, taken) - OpenAI::Util.close_fused!(fused) + OpenAI::Internal::Util.close_fused!(fused) assert_equal(0, taken) end @@ -337,8 +337,8 @@ def test_closed_fused_extern_iter_taken_count assert_equal(2, first) assert_equal(1, taken) - fused = OpenAI::Util.fused_enum(enum) - OpenAI::Util.close_fused!(fused) + fused = OpenAI::Internal::Util.fused_enum(enum) + OpenAI::Internal::Util.close_fused!(fused) assert_equal(1, taken) end @@ -352,12 +352,12 @@ def test_close_fused_sse_chain .filter(&:odd?) .map(&:to_s) - fused_1 = OpenAI::Util.fused_enum(enum) - fused_2 = OpenAI::Util.decode_lines(fused_1) - fused_3 = OpenAI::Util.decode_sse(fused_2) + fused_1 = OpenAI::Internal::Util.fused_enum(enum) + fused_2 = OpenAI::Internal::Util.decode_lines(fused_1) + fused_3 = OpenAI::Internal::Util.decode_sse(fused_2) assert_equal(0, taken) - OpenAI::Util.close_fused!(fused_3) + OpenAI::Internal::Util.close_fused!(fused_3) assert_equal(0, taken) end end @@ -380,7 +380,7 @@ def test_decode_lines eols = %W[\n \r \r\n] cases.each do |enum, expected| eols.each do |eol| - lines = OpenAI::Util.decode_lines(enum.map { _1.gsub("\n", eol) }) + lines = OpenAI::Internal::Util.decode_lines(enum.map { _1.gsub("\n", eol) }) assert_equal(expected.map { _1.gsub("\n", eol) }, lines.to_a, "eol=#{JSON.generate(eol)}") end end @@ -398,7 +398,7 @@ def test_mixed_decode_lines %W[\n\r] => %W[\n \r] } cases.each do |enum, expected| - lines = OpenAI::Util.decode_lines(enum) + lines = OpenAI::Internal::Util.decode_lines(enum) assert_equal(expected, lines.to_a) end end @@ -521,7 +521,7 @@ def test_decode_sse cases.each do |name, test_cases| test_cases.each do |input, expected| - actual = OpenAI::Util.decode_sse(input).map(&:compact) + actual = OpenAI::Internal::Util.decode_sse(input).map(&:compact) assert_equal(expected, actual, name) end end diff --git a/test/openai/resources/batches_test.rb b/test/openai/resources/batches_test.rb index 59e25d4f..76c25672 100644 --- a/test/openai/resources/batches_test.rb +++ b/test/openai/resources/batches_test.rb @@ -78,7 +78,7 @@ def test_list response = @openai.batches.list assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index f6363319..f618bea7 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -85,7 +85,7 @@ def test_list response = @openai.beta.assistants.list assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/beta/threads/messages_test.rb b/test/openai/resources/beta/threads/messages_test.rb index bd430759..7a8fdc90 100644 --- a/test/openai/resources/beta/threads/messages_test.rb +++ b/test/openai/resources/beta/threads/messages_test.rb @@ -88,7 +88,7 @@ def test_list response = @openai.beta.threads.messages.list("thread_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/beta/threads/runs/steps_test.rb b/test/openai/resources/beta/threads/runs/steps_test.rb index 97fa3d2b..c05de77f 100644 --- a/test/openai/resources/beta/threads/runs/steps_test.rb +++ b/test/openai/resources/beta/threads/runs/steps_test.rb @@ -36,7 +36,7 @@ def test_list_required_params response = @openai.beta.threads.runs.steps.list("run_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 919cb65f..1147966b 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -127,7 +127,7 @@ def test_list response = @openai.beta.threads.runs.list("thread_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/chat/completions/messages_test.rb b/test/openai/resources/chat/completions/messages_test.rb index e9fac539..1b68a1b0 100644 --- a/test/openai/resources/chat/completions/messages_test.rb +++ b/test/openai/resources/chat/completions/messages_test.rb @@ -7,7 +7,7 @@ def test_list response = @openai.chat.completions.messages.list("completion_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index f4d9b7f7..00d91a0a 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -71,7 +71,7 @@ def test_list response = @openai.chat.completions.list assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index 58bcbf2b..c4a14255 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -51,7 +51,7 @@ def test_list response = @openai.files.list assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb b/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb index feb9c651..08154e71 100644 --- a/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb +++ b/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb @@ -7,7 +7,7 @@ def test_list response = @openai.fine_tuning.jobs.checkpoints.list("ft-AF1WoRqd3aJAHsqc9NY7iL8F") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/fine_tuning/jobs_test.rb b/test/openai/resources/fine_tuning/jobs_test.rb index 7df07711..a953af3d 100644 --- a/test/openai/resources/fine_tuning/jobs_test.rb +++ b/test/openai/resources/fine_tuning/jobs_test.rb @@ -71,7 +71,7 @@ def test_list response = @openai.fine_tuning.jobs.list assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first @@ -142,7 +142,7 @@ def test_list_events response = @openai.fine_tuning.jobs.list_events("ft-AF1WoRqd3aJAHsqc9NY7iL8F") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/models_test.rb b/test/openai/resources/models_test.rb index 33384138..6316652e 100644 --- a/test/openai/resources/models_test.rb +++ b/test/openai/resources/models_test.rb @@ -24,7 +24,7 @@ def test_list response = @openai.models.list assert_pattern do - response => OpenAI::Page + response => OpenAI::Internal::Page end row = response.to_enum.first diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 1250f3bd..59a1cc5e 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -7,7 +7,7 @@ def test_list response = @openai.responses.input_items.list("response_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/vector_stores/file_batches_test.rb b/test/openai/resources/vector_stores/file_batches_test.rb index f4bcecf5..538469fe 100644 --- a/test/openai/resources/vector_stores/file_batches_test.rb +++ b/test/openai/resources/vector_stores/file_batches_test.rb @@ -64,7 +64,7 @@ def test_list_files_required_params response = @openai.vector_stores.file_batches.list_files("batch_id", vector_store_id: "vector_store_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first diff --git a/test/openai/resources/vector_stores/files_test.rb b/test/openai/resources/vector_stores/files_test.rb index 2b789caa..5d806b95 100644 --- a/test/openai/resources/vector_stores/files_test.rb +++ b/test/openai/resources/vector_stores/files_test.rb @@ -78,7 +78,7 @@ def test_list response = @openai.vector_stores.files.list("vector_store_id") assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first @@ -123,7 +123,7 @@ def test_content_required_params response = @openai.vector_stores.files.content("file-abc123", vector_store_id: "vs_abc123") assert_pattern do - response => OpenAI::Page + response => OpenAI::Internal::Page end row = response.to_enum.first diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index c7ae05cb..c12e8413 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -79,7 +79,7 @@ def test_list response = @openai.vector_stores.list assert_pattern do - response => OpenAI::CursorPage + response => OpenAI::Internal::CursorPage end row = response.to_enum.first @@ -126,7 +126,7 @@ def test_search_required_params response = @openai.vector_stores.search("vs_abc123", query: "string") assert_pattern do - response => OpenAI::Page + response => OpenAI::Internal::Page end row = response.to_enum.first From 4d951694889f6b989bf16b641ae9908e6463629a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 17:28:18 +0000 Subject: [PATCH 089/295] feat!: remove top level type aliases to relocated classes (#91) --- lib/openai.rb | 2 +- lib/openai/aliases.rb | 19 -- lib/openai/errors.rb | 28 --- lib/openai/internal.rb | 8 + lib/openai/internal/transport/base_client.rb | 6 +- lib/openai/internal/type/array_of.rb | 4 +- lib/openai/internal/type/base_model.rb | 31 ++-- lib/openai/internal/type/boolean_model.rb | 2 +- lib/openai/internal/type/converter.rb | 10 +- lib/openai/internal/type/enum.rb | 2 +- lib/openai/internal/type/hash_of.rb | 4 +- .../internal/type/request_parameters.rb | 2 +- lib/openai/internal/type/union.rb | 8 +- lib/openai/internal/type/unknown.rb | 2 +- lib/openai/internal/util.rb | 11 +- lib/openai/models/all_models.rb | 2 +- .../models/audio/speech_create_params.rb | 10 +- lib/openai/models/audio/speech_model.rb | 2 +- lib/openai/models/audio/transcription.rb | 12 +- .../audio/transcription_create_params.rb | 13 +- .../audio/transcription_create_response.rb | 2 +- .../models/audio/transcription_include.rb | 2 +- .../models/audio/transcription_segment.rb | 6 +- .../audio/transcription_stream_event.rb | 2 +- .../audio/transcription_text_delta_event.rb | 13 +- .../audio/transcription_text_done_event.rb | 13 +- .../models/audio/transcription_verbose.rb | 8 +- lib/openai/models/audio/transcription_word.rb | 4 +- lib/openai/models/audio/translation.rb | 4 +- .../models/audio/translation_create_params.rb | 8 +- .../audio/translation_create_response.rb | 2 +- .../models/audio/translation_verbose.rb | 6 +- lib/openai/models/audio_model.rb | 2 +- lib/openai/models/audio_response_format.rb | 2 +- .../auto_file_chunking_strategy_param.rb | 4 +- lib/openai/models/batch.rb | 14 +- lib/openai/models/batch_cancel_params.rb | 4 +- lib/openai/models/batch_create_params.rb | 10 +- lib/openai/models/batch_error.rb | 4 +- lib/openai/models/batch_list_params.rb | 4 +- lib/openai/models/batch_request_counts.rb | 4 +- lib/openai/models/batch_retrieve_params.rb | 4 +- lib/openai/models/beta/assistant.rb | 24 +-- .../models/beta/assistant_create_params.rb | 50 +++--- .../models/beta/assistant_delete_params.rb | 4 +- lib/openai/models/beta/assistant_deleted.rb | 6 +- .../models/beta/assistant_list_params.rb | 6 +- .../beta/assistant_response_format_option.rb | 2 +- .../models/beta/assistant_retrieve_params.rb | 4 +- .../models/beta/assistant_stream_event.rb | 100 +++++------ lib/openai/models/beta/assistant_tool.rb | 2 +- .../models/beta/assistant_tool_choice.rb | 6 +- .../beta/assistant_tool_choice_function.rb | 4 +- .../beta/assistant_tool_choice_option.rb | 4 +- .../models/beta/assistant_update_params.rb | 26 +-- .../models/beta/code_interpreter_tool.rb | 4 +- lib/openai/models/beta/file_search_tool.rb | 14 +- lib/openai/models/beta/function_tool.rb | 4 +- .../models/beta/message_stream_event.rb | 22 +-- .../models/beta/run_step_stream_event.rb | 30 ++-- lib/openai/models/beta/run_stream_event.rb | 42 ++--- lib/openai/models/beta/thread.rb | 22 +-- .../beta/thread_create_and_run_params.rb | 110 ++++++------ .../models/beta/thread_create_params.rb | 75 ++++---- .../models/beta/thread_delete_params.rb | 4 +- lib/openai/models/beta/thread_deleted.rb | 6 +- .../models/beta/thread_retrieve_params.rb | 4 +- lib/openai/models/beta/thread_stream_event.rb | 6 +- .../models/beta/thread_update_params.rb | 22 +-- lib/openai/models/beta/threads/annotation.rb | 2 +- .../models/beta/threads/annotation_delta.rb | 2 +- .../beta/threads/file_citation_annotation.rb | 8 +- .../threads/file_citation_delta_annotation.rb | 8 +- .../beta/threads/file_path_annotation.rb | 8 +- .../threads/file_path_delta_annotation.rb | 8 +- lib/openai/models/beta/threads/image_file.rb | 6 +- .../beta/threads/image_file_content_block.rb | 4 +- .../models/beta/threads/image_file_delta.rb | 6 +- .../beta/threads/image_file_delta_block.rb | 4 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../beta/threads/image_url_content_block.rb | 4 +- .../models/beta/threads/image_url_delta.rb | 6 +- .../beta/threads/image_url_delta_block.rb | 4 +- lib/openai/models/beta/threads/message.rb | 34 ++-- .../models/beta/threads/message_content.rb | 2 +- .../beta/threads/message_content_delta.rb | 2 +- .../threads/message_content_part_param.rb | 2 +- .../beta/threads/message_create_params.rb | 26 +-- .../beta/threads/message_delete_params.rb | 4 +- .../models/beta/threads/message_deleted.rb | 6 +- .../models/beta/threads/message_delta.rb | 9 +- .../beta/threads/message_delta_event.rb | 4 +- .../beta/threads/message_list_params.rb | 6 +- .../beta/threads/message_retrieve_params.rb | 4 +- .../beta/threads/message_update_params.rb | 6 +- .../beta/threads/refusal_content_block.rb | 4 +- .../beta/threads/refusal_delta_block.rb | 4 +- .../required_action_function_tool_call.rb | 8 +- lib/openai/models/beta/threads/run.rb | 42 ++--- .../models/beta/threads/run_cancel_params.rb | 4 +- .../models/beta/threads/run_create_params.rb | 51 +++--- .../models/beta/threads/run_list_params.rb | 6 +- .../beta/threads/run_retrieve_params.rb | 4 +- lib/openai/models/beta/threads/run_status.rb | 2 +- .../threads/run_submit_tool_outputs_params.rb | 10 +- .../models/beta/threads/run_update_params.rb | 6 +- .../threads/runs/code_interpreter_logs.rb | 4 +- .../runs/code_interpreter_output_image.rb | 8 +- .../runs/code_interpreter_tool_call.rb | 24 +-- .../runs/code_interpreter_tool_call_delta.rb | 12 +- .../threads/runs/file_search_tool_call.rb | 28 +-- .../runs/file_search_tool_call_delta.rb | 6 +- .../beta/threads/runs/function_tool_call.rb | 8 +- .../threads/runs/function_tool_call_delta.rb | 8 +- .../runs/message_creation_step_details.rb | 8 +- .../models/beta/threads/runs/run_step.rb | 22 +-- .../beta/threads/runs/run_step_delta.rb | 6 +- .../beta/threads/runs/run_step_delta_event.rb | 4 +- .../runs/run_step_delta_message_delta.rb | 8 +- .../beta/threads/runs/run_step_include.rb | 2 +- .../beta/threads/runs/step_list_params.rb | 9 +- .../beta/threads/runs/step_retrieve_params.rb | 7 +- .../models/beta/threads/runs/tool_call.rb | 2 +- .../beta/threads/runs/tool_call_delta.rb | 2 +- .../threads/runs/tool_call_delta_object.rb | 7 +- .../threads/runs/tool_calls_step_details.rb | 7 +- lib/openai/models/beta/threads/text.rb | 7 +- .../models/beta/threads/text_content_block.rb | 4 +- .../beta/threads/text_content_block_param.rb | 4 +- lib/openai/models/beta/threads/text_delta.rb | 7 +- .../models/beta/threads/text_delta_block.rb | 4 +- lib/openai/models/chat/chat_completion.rb | 26 +-- ...chat_completion_assistant_message_param.rb | 21 +-- .../models/chat/chat_completion_audio.rb | 4 +- .../chat/chat_completion_audio_param.rb | 8 +- .../models/chat/chat_completion_chunk.rb | 49 +++--- .../chat/chat_completion_content_part.rb | 10 +- .../chat_completion_content_part_image.rb | 10 +- ...hat_completion_content_part_input_audio.rb | 10 +- .../chat_completion_content_part_refusal.rb | 4 +- .../chat/chat_completion_content_part_text.rb | 4 +- .../models/chat/chat_completion_deleted.rb | 6 +- ...chat_completion_developer_message_param.rb | 8 +- .../chat_completion_function_call_option.rb | 4 +- .../chat_completion_function_message_param.rb | 4 +- .../models/chat/chat_completion_message.rb | 22 +-- .../chat/chat_completion_message_param.rb | 2 +- .../chat/chat_completion_message_tool_call.rb | 8 +- .../models/chat/chat_completion_modality.rb | 2 +- .../chat/chat_completion_named_tool_choice.rb | 8 +- .../chat_completion_prediction_content.rb | 8 +- .../models/chat/chat_completion_role.rb | 2 +- .../chat/chat_completion_store_message.rb | 2 +- .../chat/chat_completion_stream_options.rb | 6 +- .../chat_completion_system_message_param.rb | 8 +- .../chat/chat_completion_token_logprob.rb | 14 +- .../models/chat/chat_completion_tool.rb | 4 +- .../chat_completion_tool_choice_option.rb | 4 +- .../chat_completion_tool_message_param.rb | 8 +- .../chat_completion_user_message_param.rb | 8 +- .../models/chat/completion_create_params.rb | 60 +++---- .../models/chat/completion_delete_params.rb | 4 +- .../models/chat/completion_list_params.rb | 8 +- .../models/chat/completion_retrieve_params.rb | 4 +- .../models/chat/completion_update_params.rb | 6 +- .../chat/completions/message_list_params.rb | 6 +- lib/openai/models/chat_model.rb | 2 +- lib/openai/models/comparison_filter.rb | 10 +- lib/openai/models/completion.rb | 6 +- lib/openai/models/completion_choice.rb | 18 +- lib/openai/models/completion_create_params.rb | 22 +-- lib/openai/models/completion_usage.rb | 12 +- lib/openai/models/compound_filter.rb | 12 +- .../models/create_embedding_response.rb | 10 +- lib/openai/models/embedding.rb | 6 +- lib/openai/models/embedding_create_params.rb | 16 +- lib/openai/models/embedding_model.rb | 2 +- lib/openai/models/error_object.rb | 4 +- lib/openai/models/file_chunking_strategy.rb | 2 +- .../models/file_chunking_strategy_param.rb | 2 +- lib/openai/models/file_content_params.rb | 4 +- lib/openai/models/file_create_params.rb | 4 +- lib/openai/models/file_delete_params.rb | 4 +- lib/openai/models/file_deleted.rb | 6 +- lib/openai/models/file_list_params.rb | 6 +- lib/openai/models/file_object.rb | 8 +- lib/openai/models/file_purpose.rb | 2 +- lib/openai/models/file_retrieve_params.rb | 4 +- .../models/fine_tuning/fine_tuning_job.rb | 62 +++---- .../fine_tuning/fine_tuning_job_event.rb | 10 +- .../fine_tuning_job_wandb_integration.rb | 6 +- ...ine_tuning_job_wandb_integration_object.rb | 4 +- .../models/fine_tuning/job_cancel_params.rb | 4 +- .../models/fine_tuning/job_create_params.rb | 66 +++---- .../fine_tuning/job_list_events_params.rb | 4 +- .../models/fine_tuning/job_list_params.rb | 6 +- .../models/fine_tuning/job_retrieve_params.rb | 4 +- .../jobs/checkpoint_list_params.rb | 4 +- .../jobs/fine_tuning_job_checkpoint.rb | 8 +- lib/openai/models/function_definition.rb | 8 +- lib/openai/models/function_parameters.rb | 2 +- lib/openai/models/image.rb | 4 +- .../models/image_create_variation_params.rb | 10 +- lib/openai/models/image_edit_params.rb | 10 +- lib/openai/models/image_generate_params.rb | 14 +- lib/openai/models/image_model.rb | 2 +- lib/openai/models/images_response.rb | 6 +- lib/openai/models/metadata.rb | 2 +- lib/openai/models/model.rb | 4 +- lib/openai/models/model_delete_params.rb | 4 +- lib/openai/models/model_deleted.rb | 6 +- lib/openai/models/model_list_params.rb | 4 +- lib/openai/models/model_retrieve_params.rb | 4 +- lib/openai/models/moderation.rb | 104 +++++------ lib/openai/models/moderation_create_params.rb | 13 +- .../models/moderation_create_response.rb | 6 +- .../models/moderation_image_url_input.rb | 8 +- lib/openai/models/moderation_model.rb | 2 +- .../models/moderation_multi_modal_input.rb | 2 +- lib/openai/models/moderation_text_input.rb | 4 +- .../other_file_chunking_strategy_object.rb | 4 +- lib/openai/models/reasoning.rb | 6 +- lib/openai/models/reasoning_effort.rb | 2 +- .../models/response_format_json_object.rb | 4 +- .../models/response_format_json_schema.rb | 12 +- lib/openai/models/response_format_text.rb | 4 +- lib/openai/models/responses/computer_tool.rb | 6 +- .../models/responses/easy_input_message.rb | 10 +- .../models/responses/file_search_tool.rb | 14 +- lib/openai/models/responses/function_tool.rb | 8 +- .../responses/input_item_list_params.rb | 9 +- lib/openai/models/responses/response.rb | 23 +-- .../responses/response_audio_delta_event.rb | 4 +- .../responses/response_audio_done_event.rb | 4 +- .../response_audio_transcript_delta_event.rb | 4 +- .../response_audio_transcript_done_event.rb | 4 +- ..._code_interpreter_call_code_delta_event.rb | 4 +- ...e_code_interpreter_call_code_done_event.rb | 4 +- ...e_code_interpreter_call_completed_event.rb | 4 +- ...code_interpreter_call_in_progress_event.rb | 4 +- ...ode_interpreter_call_interpreting_event.rb | 4 +- .../response_code_interpreter_tool_call.rb | 24 +-- .../responses/response_completed_event.rb | 4 +- .../responses/response_computer_tool_call.rb | 62 +++---- ...response_computer_tool_call_output_item.rb | 12 +- ...se_computer_tool_call_output_screenshot.rb | 4 +- .../models/responses/response_content.rb | 2 +- .../response_content_part_added_event.rb | 6 +- .../response_content_part_done_event.rb | 6 +- .../responses/response_create_params.rb | 22 +-- .../responses/response_created_event.rb | 4 +- .../responses/response_delete_params.rb | 4 +- lib/openai/models/responses/response_error.rb | 6 +- .../models/responses/response_error_event.rb | 4 +- .../models/responses/response_failed_event.rb | 4 +- ...sponse_file_search_call_completed_event.rb | 4 +- ...onse_file_search_call_in_progress_event.rb | 4 +- ...sponse_file_search_call_searching_event.rb | 4 +- .../response_file_search_tool_call.rb | 20 +-- .../responses/response_format_text_config.rb | 2 +- ...response_format_text_json_schema_config.rb | 8 +- ...nse_function_call_arguments_delta_event.rb | 4 +- ...onse_function_call_arguments_done_event.rb | 4 +- .../responses/response_function_tool_call.rb | 6 +- .../response_function_tool_call_item.rb | 2 +- ...response_function_tool_call_output_item.rb | 6 +- .../responses/response_function_web_search.rb | 6 +- .../responses/response_in_progress_event.rb | 4 +- .../models/responses/response_includable.rb | 2 +- .../responses/response_incomplete_event.rb | 4 +- lib/openai/models/responses/response_input.rb | 3 +- .../models/responses/response_input_audio.rb | 6 +- .../responses/response_input_content.rb | 2 +- .../models/responses/response_input_file.rb | 4 +- .../models/responses/response_input_image.rb | 6 +- .../models/responses/response_input_item.rb | 37 ++-- .../response_input_message_content_list.rb | 2 +- .../responses/response_input_message_item.rb | 13 +- .../models/responses/response_input_text.rb | 4 +- lib/openai/models/responses/response_item.rb | 2 +- .../models/responses/response_item_list.rb | 8 +- .../models/responses/response_output_audio.rb | 4 +- .../models/responses/response_output_item.rb | 2 +- .../response_output_item_added_event.rb | 4 +- .../response_output_item_done_event.rb | 4 +- .../responses/response_output_message.rb | 10 +- .../responses/response_output_refusal.rb | 4 +- .../models/responses/response_output_text.rb | 20 +-- .../responses/response_reasoning_item.rb | 13 +- .../responses/response_refusal_delta_event.rb | 4 +- .../responses/response_refusal_done_event.rb | 4 +- .../responses/response_retrieve_params.rb | 7 +- .../models/responses/response_status.rb | 2 +- .../models/responses/response_stream_event.rb | 2 +- .../response_text_annotation_delta_event.rb | 18 +- .../models/responses/response_text_config.rb | 4 +- .../responses/response_text_delta_event.rb | 4 +- .../responses/response_text_done_event.rb | 4 +- lib/openai/models/responses/response_usage.rb | 12 +- ...esponse_web_search_call_completed_event.rb | 4 +- ...ponse_web_search_call_in_progress_event.rb | 4 +- ...esponse_web_search_call_searching_event.rb | 4 +- lib/openai/models/responses/tool.rb | 2 +- .../models/responses/tool_choice_function.rb | 4 +- .../models/responses/tool_choice_options.rb | 2 +- .../models/responses/tool_choice_types.rb | 6 +- .../models/responses/web_search_tool.rb | 12 +- lib/openai/models/responses_model.rb | 2 +- .../models/static_file_chunking_strategy.rb | 4 +- .../static_file_chunking_strategy_object.rb | 4 +- ...tic_file_chunking_strategy_object_param.rb | 4 +- lib/openai/models/upload.rb | 6 +- lib/openai/models/upload_cancel_params.rb | 4 +- lib/openai/models/upload_complete_params.rb | 6 +- lib/openai/models/upload_create_params.rb | 4 +- .../models/uploads/part_create_params.rb | 4 +- lib/openai/models/uploads/upload_part.rb | 4 +- lib/openai/models/vector_store.rb | 16 +- .../models/vector_store_create_params.rb | 12 +- .../models/vector_store_delete_params.rb | 4 +- lib/openai/models/vector_store_deleted.rb | 6 +- lib/openai/models/vector_store_list_params.rb | 6 +- .../models/vector_store_retrieve_params.rb | 4 +- .../models/vector_store_search_params.rb | 18 +- .../models/vector_store_search_response.rb | 19 +- .../models/vector_store_update_params.rb | 10 +- .../vector_stores/file_batch_cancel_params.rb | 4 +- .../vector_stores/file_batch_create_params.rb | 12 +- .../file_batch_list_files_params.rb | 8 +- .../file_batch_retrieve_params.rb | 4 +- .../vector_stores/file_content_params.rb | 4 +- .../vector_stores/file_content_response.rb | 4 +- .../vector_stores/file_create_params.rb | 10 +- .../vector_stores/file_delete_params.rb | 4 +- .../models/vector_stores/file_list_params.rb | 8 +- .../vector_stores/file_retrieve_params.rb | 4 +- .../vector_stores/file_update_params.rb | 10 +- .../models/vector_stores/vector_store_file.rb | 18 +- .../vector_stores/vector_store_file_batch.rb | 10 +- .../vector_store_file_deleted.rb | 6 +- lib/openai/request_options.rb | 8 +- rbi/lib/openai/aliases.rbi | 19 -- rbi/lib/openai/errors.rbi | 28 --- rbi/lib/openai/internal.rbi | 10 ++ .../openai/internal/transport/base_client.rbi | 22 ++- rbi/lib/openai/internal/type/array_of.rbi | 8 +- rbi/lib/openai/internal/type/base_model.rbi | 32 ++-- rbi/lib/openai/internal/type/hash_of.rbi | 12 +- .../internal/type/request_parameters.rbi | 4 +- rbi/lib/openai/internal/type/union.rbi | 4 +- rbi/lib/openai/internal/util.rbi | 13 +- rbi/lib/openai/models/all_models.rbi | 2 +- .../models/audio/speech_create_params.rbi | 10 +- rbi/lib/openai/models/audio/speech_model.rbi | 2 +- rbi/lib/openai/models/audio/transcription.rbi | 8 +- .../audio/transcription_create_params.rbi | 8 +- .../audio/transcription_create_response.rbi | 2 +- .../models/audio/transcription_include.rbi | 2 +- .../models/audio/transcription_segment.rbi | 2 +- .../audio/transcription_stream_event.rbi | 2 +- .../audio/transcription_text_delta_event.rbi | 8 +- .../audio/transcription_text_done_event.rbi | 8 +- .../models/audio/transcription_verbose.rbi | 15 +- .../models/audio/transcription_word.rbi | 2 +- rbi/lib/openai/models/audio/translation.rbi | 2 +- .../audio/translation_create_params.rbi | 8 +- .../audio/translation_create_response.rbi | 2 +- .../models/audio/translation_verbose.rbi | 8 +- rbi/lib/openai/models/audio_model.rbi | 2 +- .../openai/models/audio_response_format.rbi | 2 +- .../auto_file_chunking_strategy_param.rbi | 2 +- rbi/lib/openai/models/batch.rbi | 18 +- rbi/lib/openai/models/batch_cancel_params.rbi | 10 +- rbi/lib/openai/models/batch_create_params.rbi | 8 +- rbi/lib/openai/models/batch_error.rbi | 2 +- rbi/lib/openai/models/batch_list_params.rbi | 4 +- .../openai/models/batch_request_counts.rbi | 2 +- .../openai/models/batch_retrieve_params.rbi | 10 +- rbi/lib/openai/models/beta/assistant.rbi | 24 +-- .../models/beta/assistant_create_params.rbi | 62 +++---- .../models/beta/assistant_delete_params.rbi | 10 +- .../openai/models/beta/assistant_deleted.rbi | 2 +- .../models/beta/assistant_list_params.rbi | 6 +- .../beta/assistant_response_format_option.rbi | 2 +- .../models/beta/assistant_retrieve_params.rbi | 10 +- .../models/beta/assistant_stream_event.rbi | 163 ++++++++---------- rbi/lib/openai/models/beta/assistant_tool.rbi | 2 +- .../models/beta/assistant_tool_choice.rbi | 13 +- .../beta/assistant_tool_choice_function.rbi | 2 +- .../beta/assistant_tool_choice_option.rbi | 4 +- .../models/beta/assistant_update_params.rbi | 40 ++--- .../models/beta/code_interpreter_tool.rbi | 2 +- .../openai/models/beta/file_search_tool.rbi | 18 +- rbi/lib/openai/models/beta/function_tool.rbi | 6 +- .../models/beta/message_stream_event.rbi | 44 ++--- .../models/beta/run_step_stream_event.rbi | 49 +++--- .../openai/models/beta/run_stream_event.rbi | 62 +++---- rbi/lib/openai/models/beta/thread.rbi | 20 +-- .../beta/thread_create_and_run_params.rbi | 124 ++++++------- .../models/beta/thread_create_params.rbi | 82 ++++----- .../models/beta/thread_delete_params.rbi | 10 +- rbi/lib/openai/models/beta/thread_deleted.rbi | 2 +- .../models/beta/thread_retrieve_params.rbi | 10 +- .../models/beta/thread_stream_event.rbi | 6 +- .../models/beta/thread_update_params.rbi | 34 ++-- .../openai/models/beta/threads/annotation.rbi | 2 +- .../models/beta/threads/annotation_delta.rbi | 2 +- .../beta/threads/file_citation_annotation.rbi | 14 +- .../file_citation_delta_annotation.rbi | 14 +- .../beta/threads/file_path_annotation.rbi | 8 +- .../threads/file_path_delta_annotation.rbi | 8 +- .../openai/models/beta/threads/image_file.rbi | 4 +- .../beta/threads/image_file_content_block.rbi | 6 +- .../models/beta/threads/image_file_delta.rbi | 4 +- .../beta/threads/image_file_delta_block.rbi | 9 +- .../openai/models/beta/threads/image_url.rbi | 4 +- .../beta/threads/image_url_content_block.rbi | 9 +- .../models/beta/threads/image_url_delta.rbi | 4 +- .../beta/threads/image_url_delta_block.rbi | 9 +- .../openai/models/beta/threads/message.rbi | 34 ++-- .../models/beta/threads/message_content.rbi | 2 +- .../beta/threads/message_content_delta.rbi | 2 +- .../threads/message_content_part_param.rbi | 2 +- .../beta/threads/message_create_params.rbi | 24 +-- .../beta/threads/message_delete_params.rbi | 4 +- .../models/beta/threads/message_deleted.rbi | 2 +- .../models/beta/threads/message_delta.rbi | 8 +- .../beta/threads/message_delta_event.rbi | 6 +- .../beta/threads/message_list_params.rbi | 6 +- .../beta/threads/message_retrieve_params.rbi | 4 +- .../beta/threads/message_update_params.rbi | 4 +- .../beta/threads/refusal_content_block.rbi | 2 +- .../beta/threads/refusal_delta_block.rbi | 2 +- .../required_action_function_tool_call.rbi | 14 +- rbi/lib/openai/models/beta/threads/run.rbi | 63 +++---- .../models/beta/threads/run_cancel_params.rbi | 4 +- .../models/beta/threads/run_create_params.rbi | 44 ++--- .../models/beta/threads/run_list_params.rbi | 6 +- .../beta/threads/run_retrieve_params.rbi | 4 +- .../openai/models/beta/threads/run_status.rbi | 2 +- .../run_submit_tool_outputs_params.rbi | 13 +- .../models/beta/threads/run_update_params.rbi | 4 +- .../threads/runs/code_interpreter_logs.rbi | 2 +- .../runs/code_interpreter_output_image.rbi | 14 +- .../runs/code_interpreter_tool_call.rbi | 22 +-- .../runs/code_interpreter_tool_call_delta.rbi | 14 +- .../threads/runs/file_search_tool_call.rbi | 36 ++-- .../runs/file_search_tool_call_delta.rbi | 2 +- .../beta/threads/runs/function_tool_call.rbi | 8 +- .../threads/runs/function_tool_call_delta.rbi | 14 +- .../runs/message_creation_step_details.rbi | 8 +- .../models/beta/threads/runs/run_step.rbi | 24 +-- .../beta/threads/runs/run_step_delta.rbi | 8 +- .../threads/runs/run_step_delta_event.rbi | 9 +- .../runs/run_step_delta_message_delta.rbi | 8 +- .../beta/threads/runs/run_step_include.rbi | 2 +- .../beta/threads/runs/step_list_params.rbi | 6 +- .../threads/runs/step_retrieve_params.rbi | 4 +- .../models/beta/threads/runs/tool_call.rbi | 2 +- .../beta/threads/runs/tool_call_delta.rbi | 2 +- .../threads/runs/tool_call_delta_object.rbi | 6 +- .../threads/runs/tool_calls_step_details.rbi | 4 +- rbi/lib/openai/models/beta/threads/text.rbi | 4 +- .../beta/threads/text_content_block.rbi | 6 +- .../beta/threads/text_content_block_param.rbi | 2 +- .../openai/models/beta/threads/text_delta.rbi | 6 +- .../models/beta/threads/text_delta_block.rbi | 6 +- .../openai/models/chat/chat_completion.rbi | 32 ++-- ...hat_completion_assistant_message_param.rbi | 32 ++-- .../models/chat/chat_completion_audio.rbi | 2 +- .../chat/chat_completion_audio_param.rbi | 6 +- .../models/chat/chat_completion_chunk.rbi | 68 +++----- .../chat/chat_completion_content_part.rbi | 10 +- .../chat_completion_content_part_image.rbi | 10 +- ...at_completion_content_part_input_audio.rbi | 16 +- .../chat_completion_content_part_refusal.rbi | 2 +- .../chat_completion_content_part_text.rbi | 2 +- .../models/chat/chat_completion_deleted.rbi | 2 +- ...hat_completion_developer_message_param.rbi | 8 +- .../chat_completion_function_call_option.rbi | 2 +- ...chat_completion_function_message_param.rbi | 2 +- .../models/chat/chat_completion_message.rbi | 39 ++--- .../chat/chat_completion_message_param.rbi | 2 +- .../chat_completion_message_tool_call.rbi | 8 +- .../models/chat/chat_completion_modality.rbi | 2 +- .../chat_completion_named_tool_choice.rbi | 8 +- .../chat_completion_prediction_content.rbi | 8 +- .../models/chat/chat_completion_role.rbi | 2 +- .../chat/chat_completion_stream_options.rbi | 2 +- .../chat_completion_system_message_param.rbi | 8 +- .../chat/chat_completion_token_logprob.rbi | 6 +- .../models/chat/chat_completion_tool.rbi | 6 +- .../chat_completion_tool_choice_option.rbi | 4 +- .../chat_completion_tool_message_param.rbi | 8 +- .../chat_completion_user_message_param.rbi | 8 +- .../models/chat/completion_create_params.rbi | 81 ++++----- .../models/chat/completion_delete_params.rbi | 10 +- .../models/chat/completion_list_params.rbi | 6 +- .../chat/completion_retrieve_params.rbi | 10 +- .../models/chat/completion_update_params.rbi | 4 +- .../chat/completions/message_list_params.rbi | 6 +- rbi/lib/openai/models/chat_model.rbi | 2 +- rbi/lib/openai/models/comparison_filter.rbi | 6 +- rbi/lib/openai/models/completion.rbi | 8 +- rbi/lib/openai/models/completion_choice.rbi | 12 +- .../models/completion_create_params.rbi | 29 ++-- rbi/lib/openai/models/completion_usage.rbi | 14 +- rbi/lib/openai/models/compound_filter.rbi | 8 +- .../models/create_embedding_response.rbi | 10 +- rbi/lib/openai/models/embedding.rbi | 2 +- .../openai/models/embedding_create_params.rbi | 23 +-- rbi/lib/openai/models/embedding_model.rbi | 2 +- rbi/lib/openai/models/error_object.rbi | 2 +- .../openai/models/file_chunking_strategy.rbi | 2 +- .../models/file_chunking_strategy_param.rbi | 2 +- rbi/lib/openai/models/file_content_params.rbi | 10 +- rbi/lib/openai/models/file_create_params.rbi | 4 +- rbi/lib/openai/models/file_delete_params.rbi | 10 +- rbi/lib/openai/models/file_deleted.rbi | 2 +- rbi/lib/openai/models/file_list_params.rbi | 6 +- rbi/lib/openai/models/file_object.rbi | 6 +- rbi/lib/openai/models/file_purpose.rbi | 2 +- .../openai/models/file_retrieve_params.rbi | 10 +- .../models/fine_tuning/fine_tuning_job.rbi | 84 ++++----- .../fine_tuning/fine_tuning_job_event.rbi | 6 +- .../fine_tuning_job_wandb_integration.rbi | 2 +- ...ne_tuning_job_wandb_integration_object.rbi | 8 +- .../models/fine_tuning/job_cancel_params.rbi | 10 +- .../models/fine_tuning/job_create_params.rbi | 81 ++++----- .../fine_tuning/job_list_events_params.rbi | 4 +- .../models/fine_tuning/job_list_params.rbi | 4 +- .../fine_tuning/job_retrieve_params.rbi | 10 +- .../jobs/checkpoint_list_params.rbi | 4 +- .../jobs/fine_tuning_job_checkpoint.rbi | 8 +- rbi/lib/openai/models/function_definition.rbi | 2 +- rbi/lib/openai/models/function_parameters.rbi | 6 +- rbi/lib/openai/models/image.rbi | 2 +- .../models/image_create_variation_params.rbi | 10 +- rbi/lib/openai/models/image_edit_params.rbi | 10 +- .../openai/models/image_generate_params.rbi | 14 +- rbi/lib/openai/models/image_model.rbi | 2 +- rbi/lib/openai/models/images_response.rbi | 4 +- rbi/lib/openai/models/metadata.rbi | 2 +- rbi/lib/openai/models/model.rbi | 2 +- rbi/lib/openai/models/model_delete_params.rbi | 10 +- rbi/lib/openai/models/model_deleted.rbi | 2 +- rbi/lib/openai/models/model_list_params.rbi | 10 +- .../openai/models/model_retrieve_params.rbi | 10 +- rbi/lib/openai/models/moderation.rbi | 51 +++--- .../models/moderation_create_params.rbi | 14 +- .../models/moderation_create_response.rbi | 4 +- .../models/moderation_image_url_input.rbi | 13 +- rbi/lib/openai/models/moderation_model.rbi | 2 +- .../models/moderation_multi_modal_input.rbi | 2 +- .../openai/models/moderation_text_input.rbi | 2 +- .../other_file_chunking_strategy_object.rbi | 2 +- rbi/lib/openai/models/reasoning.rbi | 4 +- rbi/lib/openai/models/reasoning_effort.rbi | 2 +- .../models/response_format_json_object.rbi | 2 +- .../models/response_format_json_schema.rbi | 8 +- .../openai/models/response_format_text.rbi | 2 +- .../openai/models/responses/computer_tool.rbi | 4 +- .../models/responses/easy_input_message.rbi | 10 +- .../models/responses/file_search_tool.rbi | 16 +- .../openai/models/responses/function_tool.rbi | 2 +- .../responses/input_item_list_params.rbi | 6 +- rbi/lib/openai/models/responses/response.rbi | 39 ++--- .../responses/response_audio_delta_event.rbi | 2 +- .../responses/response_audio_done_event.rbi | 2 +- .../response_audio_transcript_delta_event.rbi | 2 +- .../response_audio_transcript_done_event.rbi | 2 +- ...code_interpreter_call_code_delta_event.rbi | 2 +- ..._code_interpreter_call_code_done_event.rbi | 2 +- ..._code_interpreter_call_completed_event.rbi | 6 +- ...ode_interpreter_call_in_progress_event.rbi | 6 +- ...de_interpreter_call_interpreting_event.rbi | 6 +- .../response_code_interpreter_tool_call.rbi | 16 +- .../responses/response_completed_event.rbi | 9 +- .../responses/response_computer_tool_call.rbi | 48 ++---- ...esponse_computer_tool_call_output_item.rbi | 20 +-- ...e_computer_tool_call_output_screenshot.rbi | 2 +- .../models/responses/response_content.rbi | 2 +- .../response_content_part_added_event.rbi | 6 +- .../response_content_part_done_event.rbi | 6 +- .../responses/response_create_params.rbi | 28 +-- .../responses/response_created_event.rbi | 9 +- .../responses/response_delete_params.rbi | 10 +- .../models/responses/response_error.rbi | 4 +- .../models/responses/response_error_event.rbi | 2 +- .../responses/response_failed_event.rbi | 9 +- ...ponse_file_search_call_completed_event.rbi | 2 +- ...nse_file_search_call_in_progress_event.rbi | 2 +- ...ponse_file_search_call_searching_event.rbi | 2 +- .../response_file_search_tool_call.rbi | 10 +- .../responses/response_format_text_config.rbi | 2 +- ...esponse_format_text_json_schema_config.rbi | 2 +- ...se_function_call_arguments_delta_event.rbi | 2 +- ...nse_function_call_arguments_done_event.rbi | 2 +- .../responses/response_function_tool_call.rbi | 4 +- ...esponse_function_tool_call_output_item.rbi | 4 +- .../response_function_web_search.rbi | 4 +- .../responses/response_in_progress_event.rbi | 9 +- .../models/responses/response_includable.rbi | 2 +- .../responses/response_incomplete_event.rbi | 9 +- .../models/responses/response_input.rbi | 2 +- .../models/responses/response_input_audio.rbi | 4 +- .../responses/response_input_content.rbi | 2 +- .../models/responses/response_input_file.rbi | 2 +- .../models/responses/response_input_image.rbi | 4 +- .../models/responses/response_input_item.rbi | 38 ++-- .../response_input_message_content_list.rbi | 2 +- .../responses/response_input_message_item.rbi | 10 +- .../models/responses/response_input_text.rbi | 2 +- .../openai/models/responses/response_item.rbi | 2 +- .../models/responses/response_item_list.rbi | 4 +- .../responses/response_output_audio.rbi | 2 +- .../models/responses/response_output_item.rbi | 2 +- .../response_output_item_added_event.rbi | 4 +- .../response_output_item_done_event.rbi | 4 +- .../responses/response_output_message.rbi | 8 +- .../responses/response_output_refusal.rbi | 2 +- .../models/responses/response_output_text.rbi | 12 +- .../responses/response_reasoning_item.rbi | 8 +- .../response_refusal_delta_event.rbi | 2 +- .../responses/response_refusal_done_event.rbi | 2 +- .../responses/response_retrieve_params.rbi | 4 +- .../models/responses/response_status.rbi | 2 +- .../responses/response_stream_event.rbi | 2 +- .../response_text_annotation_delta_event.rbi | 12 +- .../models/responses/response_text_config.rbi | 6 +- .../responses/response_text_delta_event.rbi | 2 +- .../responses/response_text_done_event.rbi | 2 +- .../models/responses/response_usage.rbi | 14 +- ...sponse_web_search_call_completed_event.rbi | 2 +- ...onse_web_search_call_in_progress_event.rbi | 2 +- ...sponse_web_search_call_searching_event.rbi | 2 +- rbi/lib/openai/models/responses/tool.rbi | 2 +- .../models/responses/tool_choice_function.rbi | 2 +- .../models/responses/tool_choice_options.rbi | 2 +- .../models/responses/tool_choice_types.rbi | 4 +- .../models/responses/web_search_tool.rbi | 12 +- rbi/lib/openai/models/responses_model.rbi | 2 +- .../models/static_file_chunking_strategy.rbi | 2 +- .../static_file_chunking_strategy_object.rbi | 9 +- ...ic_file_chunking_strategy_object_param.rbi | 9 +- rbi/lib/openai/models/upload.rbi | 8 +- .../openai/models/upload_cancel_params.rbi | 10 +- .../openai/models/upload_complete_params.rbi | 4 +- .../openai/models/upload_create_params.rbi | 4 +- .../models/uploads/part_create_params.rbi | 4 +- rbi/lib/openai/models/uploads/upload_part.rbi | 2 +- rbi/lib/openai/models/vector_store.rbi | 19 +- .../models/vector_store_create_params.rbi | 14 +- .../models/vector_store_delete_params.rbi | 10 +- .../openai/models/vector_store_deleted.rbi | 2 +- .../models/vector_store_list_params.rbi | 6 +- .../models/vector_store_retrieve_params.rbi | 10 +- .../models/vector_store_search_params.rbi | 22 +-- .../models/vector_store_search_response.rbi | 10 +- .../models/vector_store_update_params.rbi | 10 +- .../file_batch_cancel_params.rbi | 4 +- .../file_batch_create_params.rbi | 10 +- .../file_batch_list_files_params.rbi | 8 +- .../file_batch_retrieve_params.rbi | 4 +- .../vector_stores/file_content_params.rbi | 4 +- .../vector_stores/file_content_response.rbi | 2 +- .../vector_stores/file_create_params.rbi | 10 +- .../vector_stores/file_delete_params.rbi | 4 +- .../models/vector_stores/file_list_params.rbi | 8 +- .../vector_stores/file_retrieve_params.rbi | 4 +- .../vector_stores/file_update_params.rbi | 6 +- .../vector_stores/vector_store_file.rbi | 22 +-- .../vector_stores/vector_store_file_batch.rbi | 10 +- .../vector_store_file_deleted.rbi | 2 +- rbi/lib/openai/request_options.rbi | 4 +- rbi/lib/openai/resources/audio/speech.rbi | 2 +- .../openai/resources/audio/transcriptions.rbi | 4 +- .../openai/resources/audio/translations.rbi | 2 +- rbi/lib/openai/resources/batches.rbi | 8 +- rbi/lib/openai/resources/beta/assistants.rbi | 26 ++- rbi/lib/openai/resources/beta/threads.rbi | 46 +++-- .../resources/beta/threads/messages.rbi | 14 +- .../openai/resources/beta/threads/runs.rbi | 50 +++--- .../resources/beta/threads/runs/steps.rbi | 4 +- rbi/lib/openai/resources/chat/completions.rbi | 52 +++--- .../resources/chat/completions/messages.rbi | 2 +- rbi/lib/openai/resources/completions.rbi | 8 +- rbi/lib/openai/resources/embeddings.rbi | 2 +- rbi/lib/openai/resources/files.rbi | 10 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 16 +- .../fine_tuning/jobs/checkpoints.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 6 +- rbi/lib/openai/resources/models.rbi | 6 +- rbi/lib/openai/resources/moderations.rbi | 4 +- rbi/lib/openai/resources/responses.rbi | 28 +-- .../resources/responses/input_items.rbi | 2 +- rbi/lib/openai/resources/uploads.rbi | 6 +- rbi/lib/openai/resources/uploads/parts.rbi | 2 +- rbi/lib/openai/resources/vector_stores.rbi | 22 +-- .../resources/vector_stores/file_batches.rbi | 10 +- .../openai/resources/vector_stores/files.rbi | 14 +- sig/openai/aliases.rbs | 19 -- sig/openai/errors.rbs | 28 --- sig/openai/internal.rbs | 4 + sig/openai/internal/type/base_model.rbs | 6 +- sig/openai/internal/util.rbs | 2 - sig/openai/models/all_models.rbs | 2 +- .../models/audio/speech_create_params.rbs | 8 +- sig/openai/models/audio/speech_model.rbs | 2 +- sig/openai/models/audio/transcription.rbs | 4 +- .../audio/transcription_create_params.rbs | 6 +- .../audio/transcription_create_response.rbs | 2 +- .../models/audio/transcription_include.rbs | 2 +- .../models/audio/transcription_segment.rbs | 2 +- .../audio/transcription_stream_event.rbs | 2 +- .../audio/transcription_text_delta_event.rbs | 4 +- .../audio/transcription_text_done_event.rbs | 4 +- .../models/audio/transcription_verbose.rbs | 2 +- .../models/audio/transcription_word.rbs | 2 +- sig/openai/models/audio/translation.rbs | 2 +- .../audio/translation_create_params.rbs | 6 +- .../audio/translation_create_response.rbs | 2 +- .../models/audio/translation_verbose.rbs | 2 +- sig/openai/models/audio_model.rbs | 2 +- sig/openai/models/audio_response_format.rbs | 2 +- .../auto_file_chunking_strategy_param.rbs | 2 +- sig/openai/models/batch.rbs | 6 +- sig/openai/models/batch_cancel_params.rbs | 2 +- sig/openai/models/batch_create_params.rbs | 6 +- sig/openai/models/batch_error.rbs | 2 +- sig/openai/models/batch_list_params.rbs | 2 +- sig/openai/models/batch_request_counts.rbs | 2 +- sig/openai/models/batch_retrieve_params.rbs | 2 +- sig/openai/models/beta/assistant.rbs | 8 +- .../models/beta/assistant_create_params.rbs | 20 +-- .../models/beta/assistant_delete_params.rbs | 2 +- sig/openai/models/beta/assistant_deleted.rbs | 2 +- .../models/beta/assistant_list_params.rbs | 4 +- .../beta/assistant_response_format_option.rbs | 2 +- .../models/beta/assistant_retrieve_params.rbs | 2 +- .../models/beta/assistant_stream_event.rbs | 50 +++--- sig/openai/models/beta/assistant_tool.rbs | 2 +- .../models/beta/assistant_tool_choice.rbs | 4 +- .../beta/assistant_tool_choice_function.rbs | 2 +- .../beta/assistant_tool_choice_option.rbs | 4 +- .../models/beta/assistant_update_params.rbs | 10 +- .../models/beta/code_interpreter_tool.rbs | 2 +- sig/openai/models/beta/file_search_tool.rbs | 8 +- sig/openai/models/beta/function_tool.rbs | 2 +- .../models/beta/message_stream_event.rbs | 12 +- .../models/beta/run_step_stream_event.rbs | 16 +- sig/openai/models/beta/run_stream_event.rbs | 22 +-- sig/openai/models/beta/thread.rbs | 8 +- .../beta/thread_create_and_run_params.rbs | 46 ++--- .../models/beta/thread_create_params.rbs | 30 ++-- .../models/beta/thread_delete_params.rbs | 2 +- sig/openai/models/beta/thread_deleted.rbs | 2 +- .../models/beta/thread_retrieve_params.rbs | 2 +- .../models/beta/thread_stream_event.rbs | 2 +- .../models/beta/thread_update_params.rbs | 8 +- sig/openai/models/beta/threads/annotation.rbs | 2 +- .../models/beta/threads/annotation_delta.rbs | 2 +- .../beta/threads/file_citation_annotation.rbs | 4 +- .../file_citation_delta_annotation.rbs | 4 +- .../beta/threads/file_path_annotation.rbs | 4 +- .../threads/file_path_delta_annotation.rbs | 4 +- sig/openai/models/beta/threads/image_file.rbs | 4 +- .../beta/threads/image_file_content_block.rbs | 2 +- .../models/beta/threads/image_file_delta.rbs | 4 +- .../beta/threads/image_file_delta_block.rbs | 2 +- sig/openai/models/beta/threads/image_url.rbs | 4 +- .../beta/threads/image_url_content_block.rbs | 2 +- .../models/beta/threads/image_url_delta.rbs | 4 +- .../beta/threads/image_url_delta_block.rbs | 2 +- sig/openai/models/beta/threads/message.rbs | 16 +- .../models/beta/threads/message_content.rbs | 2 +- .../beta/threads/message_content_delta.rbs | 2 +- .../threads/message_content_part_param.rbs | 2 +- .../beta/threads/message_create_params.rbs | 12 +- .../beta/threads/message_delete_params.rbs | 2 +- .../models/beta/threads/message_deleted.rbs | 2 +- .../models/beta/threads/message_delta.rbs | 4 +- .../beta/threads/message_delta_event.rbs | 2 +- .../beta/threads/message_list_params.rbs | 4 +- .../beta/threads/message_retrieve_params.rbs | 2 +- .../beta/threads/message_update_params.rbs | 2 +- .../beta/threads/refusal_content_block.rbs | 2 +- .../beta/threads/refusal_delta_block.rbs | 2 +- .../required_action_function_tool_call.rbs | 4 +- sig/openai/models/beta/threads/run.rbs | 20 +-- .../models/beta/threads/run_cancel_params.rbs | 2 +- .../models/beta/threads/run_create_params.rbs | 20 +-- .../models/beta/threads/run_list_params.rbs | 4 +- .../beta/threads/run_retrieve_params.rbs | 2 +- sig/openai/models/beta/threads/run_status.rbs | 2 +- .../run_submit_tool_outputs_params.rbs | 4 +- .../models/beta/threads/run_update_params.rbs | 2 +- .../threads/runs/code_interpreter_logs.rbs | 2 +- .../runs/code_interpreter_output_image.rbs | 4 +- .../runs/code_interpreter_tool_call.rbs | 12 +- .../runs/code_interpreter_tool_call_delta.rbs | 6 +- .../threads/runs/file_search_tool_call.rbs | 14 +- .../runs/file_search_tool_call_delta.rbs | 2 +- .../beta/threads/runs/function_tool_call.rbs | 4 +- .../threads/runs/function_tool_call_delta.rbs | 4 +- .../runs/message_creation_step_details.rbs | 4 +- .../models/beta/threads/runs/run_step.rbs | 14 +- .../beta/threads/runs/run_step_delta.rbs | 4 +- .../threads/runs/run_step_delta_event.rbs | 2 +- .../runs/run_step_delta_message_delta.rbs | 4 +- .../beta/threads/runs/run_step_include.rbs | 2 +- .../beta/threads/runs/step_list_params.rbs | 4 +- .../threads/runs/step_retrieve_params.rbs | 2 +- .../models/beta/threads/runs/tool_call.rbs | 2 +- .../beta/threads/runs/tool_call_delta.rbs | 2 +- .../threads/runs/tool_call_delta_object.rbs | 2 +- .../threads/runs/tool_calls_step_details.rbs | 2 +- sig/openai/models/beta/threads/text.rbs | 2 +- .../beta/threads/text_content_block.rbs | 2 +- .../beta/threads/text_content_block_param.rbs | 2 +- sig/openai/models/beta/threads/text_delta.rbs | 2 +- .../models/beta/threads/text_delta_block.rbs | 2 +- sig/openai/models/chat/chat_completion.rbs | 10 +- ...hat_completion_assistant_message_param.rbs | 10 +- .../models/chat/chat_completion_audio.rbs | 2 +- .../chat/chat_completion_audio_param.rbs | 6 +- .../models/chat/chat_completion_chunk.rbs | 22 +-- .../chat/chat_completion_content_part.rbs | 6 +- .../chat_completion_content_part_image.rbs | 6 +- ...at_completion_content_part_input_audio.rbs | 6 +- .../chat_completion_content_part_refusal.rbs | 2 +- .../chat_completion_content_part_text.rbs | 2 +- .../models/chat/chat_completion_deleted.rbs | 2 +- ...hat_completion_developer_message_param.rbs | 4 +- .../chat_completion_function_call_option.rbs | 2 +- ...chat_completion_function_message_param.rbs | 2 +- .../models/chat/chat_completion_message.rbs | 8 +- .../chat/chat_completion_message_param.rbs | 2 +- .../chat_completion_message_tool_call.rbs | 4 +- .../models/chat/chat_completion_modality.rbs | 2 +- .../chat_completion_named_tool_choice.rbs | 4 +- .../chat_completion_prediction_content.rbs | 4 +- .../models/chat/chat_completion_role.rbs | 2 +- .../chat/chat_completion_stream_options.rbs | 2 +- .../chat_completion_system_message_param.rbs | 4 +- .../chat/chat_completion_token_logprob.rbs | 4 +- .../models/chat/chat_completion_tool.rbs | 2 +- .../chat_completion_tool_choice_option.rbs | 4 +- .../chat_completion_tool_message_param.rbs | 4 +- .../chat_completion_user_message_param.rbs | 4 +- .../models/chat/completion_create_params.rbs | 26 +-- .../models/chat/completion_delete_params.rbs | 2 +- .../models/chat/completion_list_params.rbs | 4 +- .../chat/completion_retrieve_params.rbs | 2 +- .../models/chat/completion_update_params.rbs | 2 +- .../chat/completions/message_list_params.rbs | 4 +- sig/openai/models/chat_model.rbs | 2 +- sig/openai/models/comparison_filter.rbs | 6 +- sig/openai/models/completion.rbs | 2 +- sig/openai/models/completion_choice.rbs | 6 +- .../models/completion_create_params.rbs | 8 +- sig/openai/models/completion_usage.rbs | 6 +- sig/openai/models/compound_filter.rbs | 6 +- .../models/create_embedding_response.rbs | 4 +- sig/openai/models/embedding.rbs | 2 +- sig/openai/models/embedding_create_params.rbs | 8 +- sig/openai/models/embedding_model.rbs | 2 +- sig/openai/models/error_object.rbs | 2 +- sig/openai/models/file_chunking_strategy.rbs | 2 +- .../models/file_chunking_strategy_param.rbs | 2 +- sig/openai/models/file_content_params.rbs | 2 +- sig/openai/models/file_create_params.rbs | 2 +- sig/openai/models/file_delete_params.rbs | 2 +- sig/openai/models/file_deleted.rbs | 2 +- sig/openai/models/file_list_params.rbs | 4 +- sig/openai/models/file_object.rbs | 6 +- sig/openai/models/file_purpose.rbs | 2 +- sig/openai/models/file_retrieve_params.rbs | 2 +- .../models/fine_tuning/fine_tuning_job.rbs | 40 ++--- .../fine_tuning/fine_tuning_job_event.rbs | 6 +- .../fine_tuning_job_wandb_integration.rbs | 2 +- ...ne_tuning_job_wandb_integration_object.rbs | 2 +- .../models/fine_tuning/job_cancel_params.rbs | 2 +- .../models/fine_tuning/job_create_params.rbs | 42 ++--- .../fine_tuning/job_list_events_params.rbs | 2 +- .../models/fine_tuning/job_list_params.rbs | 2 +- .../fine_tuning/job_retrieve_params.rbs | 2 +- .../jobs/checkpoint_list_params.rbs | 2 +- .../jobs/fine_tuning_job_checkpoint.rbs | 4 +- sig/openai/models/function_definition.rbs | 2 +- sig/openai/models/image.rbs | 2 +- .../models/image_create_variation_params.rbs | 8 +- sig/openai/models/image_edit_params.rbs | 8 +- sig/openai/models/image_generate_params.rbs | 12 +- sig/openai/models/image_model.rbs | 2 +- sig/openai/models/images_response.rbs | 2 +- sig/openai/models/model.rbs | 2 +- sig/openai/models/model_delete_params.rbs | 2 +- sig/openai/models/model_deleted.rbs | 2 +- sig/openai/models/model_list_params.rbs | 2 +- sig/openai/models/model_retrieve_params.rbs | 2 +- sig/openai/models/moderation.rbs | 34 ++-- .../models/moderation_create_params.rbs | 6 +- .../models/moderation_create_response.rbs | 2 +- .../models/moderation_image_url_input.rbs | 4 +- sig/openai/models/moderation_model.rbs | 2 +- .../models/moderation_multi_modal_input.rbs | 2 +- sig/openai/models/moderation_text_input.rbs | 2 +- .../other_file_chunking_strategy_object.rbs | 2 +- sig/openai/models/reasoning.rbs | 4 +- sig/openai/models/reasoning_effort.rbs | 2 +- .../models/response_format_json_object.rbs | 2 +- .../models/response_format_json_schema.rbs | 4 +- sig/openai/models/response_format_text.rbs | 2 +- sig/openai/models/responses/computer_tool.rbs | 4 +- .../models/responses/easy_input_message.rbs | 8 +- .../models/responses/file_search_tool.rbs | 8 +- sig/openai/models/responses/function_tool.rbs | 2 +- .../responses/input_item_list_params.rbs | 4 +- sig/openai/models/responses/response.rbs | 10 +- .../responses/response_audio_delta_event.rbs | 2 +- .../responses/response_audio_done_event.rbs | 2 +- .../response_audio_transcript_delta_event.rbs | 2 +- .../response_audio_transcript_done_event.rbs | 2 +- ...code_interpreter_call_code_delta_event.rbs | 2 +- ..._code_interpreter_call_code_done_event.rbs | 2 +- ..._code_interpreter_call_completed_event.rbs | 2 +- ...ode_interpreter_call_in_progress_event.rbs | 2 +- ...de_interpreter_call_interpreting_event.rbs | 2 +- .../response_code_interpreter_tool_call.rbs | 12 +- .../responses/response_completed_event.rbs | 2 +- .../responses/response_computer_tool_call.rbs | 32 ++-- ...esponse_computer_tool_call_output_item.rbs | 6 +- ...e_computer_tool_call_output_screenshot.rbs | 2 +- .../models/responses/response_content.rbs | 2 +- .../response_content_part_added_event.rbs | 4 +- .../response_content_part_done_event.rbs | 4 +- .../responses/response_create_params.rbs | 8 +- .../responses/response_created_event.rbs | 2 +- .../responses/response_delete_params.rbs | 2 +- .../models/responses/response_error.rbs | 4 +- .../models/responses/response_error_event.rbs | 2 +- .../responses/response_failed_event.rbs | 2 +- ...ponse_file_search_call_completed_event.rbs | 2 +- ...nse_file_search_call_in_progress_event.rbs | 2 +- ...ponse_file_search_call_searching_event.rbs | 2 +- .../response_file_search_tool_call.rbs | 8 +- .../responses/response_format_text_config.rbs | 2 +- ...esponse_format_text_json_schema_config.rbs | 2 +- ...se_function_call_arguments_delta_event.rbs | 2 +- ...nse_function_call_arguments_done_event.rbs | 2 +- .../responses/response_function_tool_call.rbs | 4 +- ...esponse_function_tool_call_output_item.rbs | 4 +- .../response_function_web_search.rbs | 4 +- .../responses/response_in_progress_event.rbs | 2 +- .../models/responses/response_includable.rbs | 2 +- .../responses/response_incomplete_event.rbs | 2 +- .../models/responses/response_input_audio.rbs | 4 +- .../responses/response_input_content.rbs | 2 +- .../models/responses/response_input_file.rbs | 2 +- .../models/responses/response_input_image.rbs | 4 +- .../models/responses/response_input_item.rbs | 22 +-- .../responses/response_input_message_item.rbs | 8 +- .../models/responses/response_input_text.rbs | 2 +- sig/openai/models/responses/response_item.rbs | 2 +- .../models/responses/response_item_list.rbs | 2 +- .../responses/response_output_audio.rbs | 2 +- .../models/responses/response_output_item.rbs | 2 +- .../response_output_item_added_event.rbs | 2 +- .../response_output_item_done_event.rbs | 2 +- .../responses/response_output_message.rbs | 6 +- .../responses/response_output_refusal.rbs | 2 +- .../models/responses/response_output_text.rbs | 10 +- .../responses/response_reasoning_item.rbs | 6 +- .../response_refusal_delta_event.rbs | 2 +- .../responses/response_refusal_done_event.rbs | 2 +- .../responses/response_retrieve_params.rbs | 2 +- .../models/responses/response_status.rbs | 2 +- .../responses/response_stream_event.rbs | 2 +- .../response_text_annotation_delta_event.rbs | 10 +- .../models/responses/response_text_config.rbs | 2 +- .../responses/response_text_delta_event.rbs | 2 +- .../responses/response_text_done_event.rbs | 2 +- .../models/responses/response_usage.rbs | 6 +- ...sponse_web_search_call_completed_event.rbs | 2 +- ...onse_web_search_call_in_progress_event.rbs | 2 +- ...sponse_web_search_call_searching_event.rbs | 2 +- sig/openai/models/responses/tool.rbs | 2 +- .../models/responses/tool_choice_function.rbs | 2 +- .../models/responses/tool_choice_options.rbs | 2 +- .../models/responses/tool_choice_types.rbs | 4 +- .../models/responses/web_search_tool.rbs | 8 +- sig/openai/models/responses_model.rbs | 2 +- .../models/static_file_chunking_strategy.rbs | 2 +- .../static_file_chunking_strategy_object.rbs | 2 +- ...ic_file_chunking_strategy_object_param.rbs | 2 +- sig/openai/models/upload.rbs | 4 +- sig/openai/models/upload_cancel_params.rbs | 2 +- sig/openai/models/upload_complete_params.rbs | 2 +- sig/openai/models/upload_create_params.rbs | 2 +- .../models/uploads/part_create_params.rbs | 2 +- sig/openai/models/uploads/upload_part.rbs | 2 +- sig/openai/models/vector_store.rbs | 8 +- .../models/vector_store_create_params.rbs | 4 +- .../models/vector_store_delete_params.rbs | 2 +- sig/openai/models/vector_store_deleted.rbs | 2 +- .../models/vector_store_list_params.rbs | 4 +- .../models/vector_store_retrieve_params.rbs | 2 +- .../models/vector_store_search_params.rbs | 10 +- .../models/vector_store_search_response.rbs | 8 +- .../models/vector_store_update_params.rbs | 4 +- .../file_batch_cancel_params.rbs | 2 +- .../file_batch_create_params.rbs | 4 +- .../file_batch_list_files_params.rbs | 6 +- .../file_batch_retrieve_params.rbs | 2 +- .../vector_stores/file_content_params.rbs | 2 +- .../vector_stores/file_content_response.rbs | 2 +- .../vector_stores/file_create_params.rbs | 4 +- .../vector_stores/file_delete_params.rbs | 2 +- .../models/vector_stores/file_list_params.rbs | 6 +- .../vector_stores/file_retrieve_params.rbs | 2 +- .../vector_stores/file_update_params.rbs | 4 +- .../vector_stores/vector_store_file.rbs | 10 +- .../vector_stores/vector_store_file_batch.rbs | 6 +- .../vector_store_file_deleted.rbs | 2 +- sig/openai/request_options.rbs | 2 +- test/openai/client_test.rb | 20 +-- test/openai/internal/type/base_model_test.rb | 104 +++++------ test/openai/internal/util_test.rb | 2 +- test/openai/resources/batches_test.rb | 8 +- test/openai/resources/beta/assistants_test.rb | 18 +- .../resources/beta/threads/messages_test.rb | 26 +-- .../resources/beta/threads/runs/steps_test.rb | 4 +- .../resources/beta/threads/runs_test.rb | 36 ++-- test/openai/resources/beta/threads_test.rb | 14 +- .../openai/resources/chat/completions_test.rb | 10 +- test/openai/resources/completions_test.rb | 2 +- test/openai/resources/embeddings_test.rb | 2 +- test/openai/resources/files_test.rb | 2 +- .../openai/resources/fine_tuning/jobs_test.rb | 26 +-- test/openai/resources/images_test.rb | 6 +- test/openai/resources/models_test.rb | 2 +- test/openai/resources/moderations_test.rb | 2 +- .../resources/responses/input_items_test.rb | 12 +- test/openai/resources/responses_test.rb | 16 +- .../vector_stores/file_batches_test.rb | 2 +- .../resources/vector_stores/files_test.rb | 10 +- test/openai/resources/vector_stores_test.rb | 14 +- 1048 files changed, 4327 insertions(+), 4654 deletions(-) delete mode 100644 lib/openai/aliases.rb create mode 100644 lib/openai/internal.rb delete mode 100644 rbi/lib/openai/aliases.rbi create mode 100644 rbi/lib/openai/internal.rbi delete mode 100644 sig/openai/aliases.rbs create mode 100644 sig/openai/internal.rbs diff --git a/lib/openai.rb b/lib/openai.rb index 8e87e568..9b3ac42c 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -48,7 +48,7 @@ require_relative "openai/internal/type/base_page" require_relative "openai/internal/type/base_stream" require_relative "openai/internal/type/request_parameters" -require_relative "openai/aliases" +require_relative "openai/internal" require_relative "openai/request_options" require_relative "openai/errors" require_relative "openai/internal/transport/base_client" diff --git a/lib/openai/aliases.rb b/lib/openai/aliases.rb deleted file mode 100644 index da013a3e..00000000 --- a/lib/openai/aliases.rb +++ /dev/null @@ -1,19 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - Unknown = OpenAI::Internal::Type::Unknown - - BooleanModel = OpenAI::Internal::Type::BooleanModel - - Enum = OpenAI::Internal::Type::Enum - - Union = OpenAI::Internal::Type::Union - - ArrayOf = OpenAI::Internal::Type::ArrayOf - - HashOf = OpenAI::Internal::Type::HashOf - - BaseModel = OpenAI::Internal::Type::BaseModel - - RequestParameters = OpenAI::Internal::Type::RequestParameters -end diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 4b5fc99f..73e74f52 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -225,32 +225,4 @@ class InternalServerError < OpenAI::Errors::APIStatusError HTTP_STATUS = (500..) end end - - Error = OpenAI::Errors::Error - - ConversionError = OpenAI::Errors::ConversionError - - APIError = OpenAI::Errors::APIError - - APIStatusError = OpenAI::Errors::APIStatusError - - APIConnectionError = OpenAI::Errors::APIConnectionError - - APITimeoutError = OpenAI::Errors::APITimeoutError - - BadRequestError = OpenAI::Errors::BadRequestError - - AuthenticationError = OpenAI::Errors::AuthenticationError - - PermissionDeniedError = OpenAI::Errors::PermissionDeniedError - - NotFoundError = OpenAI::Errors::NotFoundError - - ConflictError = OpenAI::Errors::ConflictError - - UnprocessableEntityError = OpenAI::Errors::UnprocessableEntityError - - RateLimitError = OpenAI::Errors::RateLimitError - - InternalServerError = OpenAI::Errors::InternalServerError end diff --git a/lib/openai/internal.rb b/lib/openai/internal.rb new file mode 100644 index 00000000..42050499 --- /dev/null +++ b/lib/openai/internal.rb @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +module OpenAI + # @api private + module Internal + OMIT = Object.new.freeze + end +end diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 013e2d64..cd7979be 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -339,7 +339,7 @@ def initialize( begin status, response, stream = @requester.execute(input) - rescue OpenAI::APIConnectionError => e + rescue OpenAI::Errors::APIConnectionError => e status = e end @@ -361,7 +361,7 @@ def initialize( retry_count: retry_count, send_retry_header: send_retry_header ) - in OpenAI::APIConnectionError if retry_count >= max_retries + in OpenAI::Errors::APIConnectionError if retry_count >= max_retries raise status in (400..) if retry_count >= max_retries || !self.class.should_retry?(status, headers: response) decoded = Kernel.then do @@ -421,7 +421,7 @@ def initialize( # @return [Object] def request(req) self.class.validate!(req) - model = req.fetch(:model) { OpenAI::Unknown } + model = req.fetch(:model) { OpenAI::Internal::Type::Unknown } opts = req[:options].to_h OpenAI::RequestOptions.validate!(opts) request = build_request(req.except(:options), opts) diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 65fa4ccb..5c67fc15 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -33,7 +33,9 @@ def ===(other) = other.is_a?(Array) && other.all?(item_type) # # @return [Boolean] def ==(other) - other.is_a?(OpenAI::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type + # rubocop:disable Layout/LineLength + other.is_a?(OpenAI::Internal::Type::ArrayOf) && other.nilable? == nilable? && other.item_type == item_type + # rubocop:enable Layout/LineLength end # @api private diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 8984ef64..5dde350e 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -23,7 +23,7 @@ class << self # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] def known_fields - @known_fields ||= (self < OpenAI::BaseModel ? superclass.known_fields.dup : {}) + @known_fields ||= (self < OpenAI::Internal::Type::BaseModel ? superclass.known_fields.dup : {}) end # @api private @@ -64,14 +64,7 @@ def fields setter = "#{name_sym}=" api_name = info.fetch(:api_name, name_sym) nilable = info[:nil?] - const = if required && !nilable - info.fetch( - :const, - OpenAI::Internal::Util::OMIT - ) - else - OpenAI::Internal::Util::OMIT - end + const = required && !nilable ? info.fetch(:const, OpenAI::Internal::OMIT) : OpenAI::Internal::OMIT [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) @@ -89,7 +82,7 @@ def fields define_method(name_sym) do target = type_fn.call - value = @data.fetch(name_sym) { const == OpenAI::Internal::Util::OMIT ? nil : const } + value = @data.fetch(name_sym) { const == OpenAI::Internal::OMIT ? nil : const } state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} if (nilable || !required) && value.nil? nil @@ -105,7 +98,7 @@ def fields # rubocop:disable Layout/LineLength message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." # rubocop:enable Layout/LineLength - raise OpenAI::ConversionError.new(message) + raise OpenAI::Errors::ConversionError.new(message) end end @@ -175,7 +168,7 @@ def optional(name_sym, type_info, spec = {}) # @param other [Object] # # @return [Boolean] - def ==(other) = other.is_a?(Class) && other <= OpenAI::BaseModel && other.fields == fields + def ==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::BaseModel && other.fields == fields end # @param other [Object] @@ -186,7 +179,7 @@ def ==(other) = self.class == other.class && @data == other.to_h class << self # @api private # - # @param value [OpenAI::BaseModel, Hash{Object=>Object}, Object] + # @param value [OpenAI::Internal::Type::BaseModel, Hash{Object=>Object}, Object] # # @param state [Hash{Symbol=>Object}] . # @@ -196,7 +189,7 @@ class << self # # @option state [Integer] :branched # - # @return [OpenAI::BaseModel, Object] + # @return [OpenAI::Internal::Type::BaseModel, Object] def coerce(value, state:) exactness = state.fetch(:exactness) @@ -221,7 +214,7 @@ def coerce(value, state:) api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) unless val.key?(api_name) - if required && mode != :dump && const == OpenAI::Internal::Util::OMIT + if required && mode != :dump && const == OpenAI::Internal::OMIT exactness[nilable ? :maybe : :no] += 1 else exactness[:yes] += 1 @@ -255,7 +248,7 @@ def coerce(value, state:) # @api private # - # @param value [OpenAI::BaseModel, Object] + # @param value [OpenAI::Internal::Type::BaseModel, Object] # # @return [Hash{Object=>Object}, Object] def dump(value) @@ -284,7 +277,7 @@ def dump(value) known_fields.each_value do |field| mode, api_name, const = field.fetch_values(:mode, :api_name, :const) - next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Internal::Util::OMIT + next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Internal::OMIT acc.store(api_name, const) end @@ -351,13 +344,13 @@ def to_yaml(*a) = self.class.dump(self).to_yaml(*a) # Create a new instance of a model. # - # @param data [Hash{Symbol=>Object}, OpenAI::BaseModel] + # @param data [Hash{Symbol=>Object}, OpenAI::Internal::Type::BaseModel] def initialize(data = {}) case OpenAI::Internal::Util.coerce_hash(data) in Hash => coerced @data = coerced else - raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::BaseModel}, got #{data.inspect}") + raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::Internal::Type::BaseModel}, got #{data.inspect}") end end diff --git a/lib/openai/internal/type/boolean_model.rb b/lib/openai/internal/type/boolean_model.rb index 369fd741..cf1dcf00 100644 --- a/lib/openai/internal/type/boolean_model.rb +++ b/lib/openai/internal/type/boolean_model.rb @@ -19,7 +19,7 @@ def self.===(other) = other == true || other == false # @param other [Object] # # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::BooleanModel + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::BooleanModel class << self # @api private diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 29aad06d..ee4589cf 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -31,10 +31,10 @@ def coerce(value, state:) = (raise NotImplementedError) def dump(value) case value in Array - value.map { OpenAI::Unknown.dump(_1) } + value.map { OpenAI::Internal::Type::Unknown.dump(_1) } in Hash - value.transform_values { OpenAI::Unknown.dump(_1) } - in OpenAI::BaseModel + value.transform_values { OpenAI::Internal::Type::Unknown.dump(_1) } + in OpenAI::Internal::Type::BaseModel value.class.dump(value) else value @@ -64,7 +64,7 @@ def type_info(spec) in Hash type_info(spec.slice(:const, :enum, :union).first&.last) in true | false - -> { OpenAI::BooleanModel } + -> { OpenAI::Internal::Type::BooleanModel } in OpenAI::Internal::Type::Converter | Class | Symbol -> { spec } in NilClass | Integer | Float @@ -209,7 +209,7 @@ def coerce( # # @return [Object] def dump(target, value) - target.is_a?(OpenAI::Internal::Type::Converter) ? target.dump(value) : OpenAI::Unknown.dump(value) + target.is_a?(OpenAI::Internal::Type::Converter) ? target.dump(value) : OpenAI::Internal::Type::Unknown.dump(value) end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 861f0ac9..de4e7a7a 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -62,7 +62,7 @@ def ===(other) = values.include?(other) # # @return [Boolean] def ==(other) - other.is_a?(Module) && other.singleton_class <= OpenAI::Enum && other.values.to_set == values.to_set + other.is_a?(Module) && other.singleton_class <= OpenAI::Internal::Type::Enum && other.values.to_set == values.to_set end # @api private diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index d07f7ccd..eb5b546e 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -48,7 +48,9 @@ def ===(other) # # @return [Boolean] def ==(other) - other.is_a?(OpenAI::HashOf) && other.nilable? == nilable? && other.item_type == item_type + # rubocop:disable Layout/LineLength + other.is_a?(OpenAI::Internal::Type::HashOf) && other.nilable? == nilable? && other.item_type == item_type + # rubocop:enable Layout/LineLength end # @api private diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb index 8a693e53..93ebb068 100644 --- a/lib/openai/internal/type/request_parameters.rb +++ b/lib/openai/internal/type/request_parameters.rb @@ -12,7 +12,7 @@ module RequestParameters # @param mod [Module] def self.included(mod) - return unless mod <= OpenAI::BaseModel + return unless mod <= OpenAI::Internal::Type::BaseModel mod.extend(OpenAI::Internal::Type::RequestParameters::Converter) mod.optional(:request_options, OpenAI::RequestOptions) diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index bf2b376e..18c6cf0e 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -93,14 +93,14 @@ def variants = derefed_variants.map(&:last) # @return [OpenAI::Internal::Type::Converter, Class, nil] private def resolve_variant(value) case [@discriminator, value] - in [_, OpenAI::BaseModel] + in [_, OpenAI::Internal::Type::BaseModel] value.class in [Symbol, Hash] key = value.fetch(@discriminator) do - value.fetch(@discriminator.to_s, OpenAI::Internal::Util::OMIT) + value.fetch(@discriminator.to_s, OpenAI::Internal::OMIT) end - return nil if key == OpenAI::Internal::Util::OMIT + return nil if key == OpenAI::Internal::OMIT key = key.to_sym if key.is_a?(String) known_variants.find { |k,| k == key }&.last&.call @@ -126,7 +126,7 @@ def ===(other) # @return [Boolean] def ==(other) # rubocop:disable Layout/LineLength - other.is_a?(Module) && other.singleton_class <= OpenAI::Union && other.derefed_variants == derefed_variants + other.is_a?(Module) && other.singleton_class <= OpenAI::Internal::Type::Union && other.derefed_variants == derefed_variants # rubocop:enable Layout/LineLength end diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index 589bd2ba..5d06c449 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -21,7 +21,7 @@ def self.===(other) = true # @param other [Object] # # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::Unknown + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::Unknown class << self # @api private diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 6fb201e3..da915e57 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -148,13 +148,6 @@ def coerce_hash(input) end end - # Use this to indicate that a value should be explicitly removed from a data - # structure when using `OpenAI::Internal::Util.deep_merge`. - # - # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging - # `{a: 1}` and `{}` would produce `{a: 1}`. - OMIT = Object.new.freeze - class << self # @api private # @@ -166,9 +159,9 @@ class << self private def deep_merge_lr(lhs, rhs, concat: false) case [lhs, rhs, concat] in [Hash, Hash, _] - rhs_cleaned = rhs.reject { _2 == OpenAI::Internal::Util::OMIT } + rhs_cleaned = rhs.reject { _2 == OpenAI::Internal::OMIT } lhs - .reject { |key, _| rhs[key] == OpenAI::Internal::Util::OMIT } + .reject { |key, _| rhs[key] == OpenAI::Internal::OMIT } .merge(rhs_cleaned) do |_, old_val, new_val| deep_merge_lr(old_val, new_val, concat: concat) end diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index f09c1abd..6ea63fb0 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -3,7 +3,7 @@ module OpenAI module Models module AllModels - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 0729f543..94b31a52 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Audio # @see OpenAI::Resources::Audio::Speech#create - class SpeechCreateParams < OpenAI::BaseModel + class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -75,12 +75,12 @@ class SpeechCreateParams < OpenAI::BaseModel # # # def initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -97,7 +97,7 @@ module Model # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). module Voice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -147,7 +147,7 @@ module Voice # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MP3 = :mp3 OPUS = :opus diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index fad7aa94..504b7319 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Audio module SpeechModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TTS_1 = :"tts-1" TTS_1_HD = :"tts-1-hd" diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 8185ea6c..abe567ce 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class Transcription < OpenAI::BaseModel + class Transcription < OpenAI::Internal::Type::BaseModel # @!attribute text # The transcribed text. # @@ -16,7 +16,7 @@ class Transcription < OpenAI::BaseModel # to the `include` array. # # @return [Array, nil] - optional :logprobs, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } + optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } # @!parse # # @return [Array] @@ -31,9 +31,9 @@ class Transcription < OpenAI::BaseModel # # # def initialize(text:, logprobs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token # The token in the transcription. # @@ -48,7 +48,7 @@ class Logprob < OpenAI::BaseModel # The bytes of the token. # # @return [Array, nil] - optional :bytes, OpenAI::ArrayOf[Float] + optional :bytes, OpenAI::Internal::Type::ArrayOf[Float] # @!parse # # @return [Array] @@ -71,7 +71,7 @@ class Logprob < OpenAI::BaseModel # # # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 063af4b9..f3dc4d0d 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -6,7 +6,7 @@ module Audio # @see OpenAI::Resources::Audio::Transcriptions#create # # @see OpenAI::Resources::Audio::Transcriptions#stream_raw - class TranscriptionCreateParams < OpenAI::BaseModel + class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -34,7 +34,8 @@ class TranscriptionCreateParams < OpenAI::BaseModel # `gpt-4o-mini-transcribe`. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionInclude] } + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionInclude] } # @!parse # # @return [Array] @@ -100,7 +101,7 @@ class TranscriptionCreateParams < OpenAI::BaseModel # # @return [Array, nil] optional :timestamp_granularities, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity] } # @!parse # # @return [Array] @@ -132,13 +133,13 @@ class TranscriptionCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -151,7 +152,7 @@ module Model end module TimestampGranularity - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WORD = :word SEGMENT = :segment diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index efebb7c6..a8026630 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -10,7 +10,7 @@ module Audio # # @see OpenAI::Resources::Audio::Transcriptions#stream_raw module TranscriptionCreateResponse - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # Represents a transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::Transcription } diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index 7c76eb59..5cde821b 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Audio module TranscriptionInclude - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOGPROBS = :logprobs diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index b219af36..02990b46 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionSegment < OpenAI::BaseModel + class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier of the segment. # @@ -65,7 +65,7 @@ class TranscriptionSegment < OpenAI::BaseModel # Array of token IDs for the text content. # # @return [Array] - required :tokens, OpenAI::ArrayOf[Integer] + required :tokens, OpenAI::Internal::Type::ArrayOf[Integer] # @!parse # # @param id [Integer] @@ -95,7 +95,7 @@ class TranscriptionSegment < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 40437586..171cfd23 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -8,7 +8,7 @@ module Audio # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. module TranscriptionStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index ec8ca4f4..4a858738 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionTextDeltaEvent < OpenAI::BaseModel + class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The text delta that was additionally transcribed. # @@ -22,7 +22,8 @@ class TranscriptionTextDeltaEvent < OpenAI::BaseModel # with the `include[]` parameter set to `logprobs`. # # @return [Array, nil] - optional :logprobs, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } + optional :logprobs, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } # @!parse # # @return [Array] @@ -40,9 +41,9 @@ class TranscriptionTextDeltaEvent < OpenAI::BaseModel # # # def initialize(delta:, logprobs: nil, type: :"transcript.text.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token # The token that was used to generate the log probability. # @@ -57,7 +58,7 @@ class Logprob < OpenAI::BaseModel # The bytes that were used to generate the log probability. # # @return [Array, nil] - optional :bytes, OpenAI::ArrayOf[OpenAI::Unknown] + optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] # @!parse # # @return [Array] @@ -80,7 +81,7 @@ class Logprob < OpenAI::BaseModel # # # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index b2a78b25..9795995a 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionTextDoneEvent < OpenAI::BaseModel + class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute text # The text that was transcribed. # @@ -23,7 +23,8 @@ class TranscriptionTextDoneEvent < OpenAI::BaseModel # with the `include[]` parameter set to `logprobs`. # # @return [Array, nil] - optional :logprobs, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } + optional :logprobs, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } # @!parse # # @return [Array] @@ -41,9 +42,9 @@ class TranscriptionTextDoneEvent < OpenAI::BaseModel # # # def initialize(text:, logprobs: nil, type: :"transcript.text.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token # The token that was used to generate the log probability. # @@ -58,7 +59,7 @@ class Logprob < OpenAI::BaseModel # The bytes that were used to generate the log probability. # # @return [Array, nil] - optional :bytes, OpenAI::ArrayOf[OpenAI::Unknown] + optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] # @!parse # # @return [Array] @@ -81,7 +82,7 @@ class Logprob < OpenAI::BaseModel # # # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 3b66f702..1dfb5931 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionVerbose < OpenAI::BaseModel + class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @!attribute duration # The duration of the input audio. # @@ -26,7 +26,7 @@ class TranscriptionVerbose < OpenAI::BaseModel # Segments of the transcribed text and their corresponding details. # # @return [Array, nil] - optional :segments, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } + optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } # @!parse # # @return [Array] @@ -36,7 +36,7 @@ class TranscriptionVerbose < OpenAI::BaseModel # Extracted words and their corresponding timestamps. # # @return [Array, nil] - optional :words, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionWord] } + optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionWord] } # @!parse # # @return [Array] @@ -54,7 +54,7 @@ class TranscriptionVerbose < OpenAI::BaseModel # # # def initialize(duration:, language:, text:, segments: nil, words: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/transcription_word.rb b/lib/openai/models/audio/transcription_word.rb index f331b86c..5df30687 100644 --- a/lib/openai/models/audio/transcription_word.rb +++ b/lib/openai/models/audio/transcription_word.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionWord < OpenAI::BaseModel + class TranscriptionWord < OpenAI::Internal::Type::BaseModel # @!attribute end_ # End time of the word in seconds. # @@ -29,7 +29,7 @@ class TranscriptionWord < OpenAI::BaseModel # # # def initialize(end_:, start:, word:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/translation.rb b/lib/openai/models/audio/translation.rb index 7b9597ca..c9631757 100644 --- a/lib/openai/models/audio/translation.rb +++ b/lib/openai/models/audio/translation.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class Translation < OpenAI::BaseModel + class Translation < OpenAI::Internal::Type::BaseModel # @!attribute text # # @return [String] @@ -14,7 +14,7 @@ class Translation < OpenAI::BaseModel # # # def initialize(text:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 57b2727c..93833eb2 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Audio # @see OpenAI::Resources::Audio::Translations#create - class TranslationCreateParams < OpenAI::BaseModel + class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -71,12 +71,12 @@ class TranslationCreateParams < OpenAI::BaseModel # # # def initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -91,7 +91,7 @@ module Model # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum JSON = :json TEXT = :text diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 3beab227..f50f6c86 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -5,7 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Translations#create module TranslationCreateResponse - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant -> { OpenAI::Models::Audio::Translation } diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index eb6ecfdd..b8a970d0 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranslationVerbose < OpenAI::BaseModel + class TranslationVerbose < OpenAI::Internal::Type::BaseModel # @!attribute duration # The duration of the input audio. # @@ -26,7 +26,7 @@ class TranslationVerbose < OpenAI::BaseModel # Segments of the translated text and their corresponding details. # # @return [Array, nil] - optional :segments, -> { OpenAI::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } + optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } # @!parse # # @return [Array] @@ -40,7 +40,7 @@ class TranslationVerbose < OpenAI::BaseModel # # # def initialize(duration:, language:, text:, segments: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index d0ffd0c5..4373ee6b 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -3,7 +3,7 @@ module OpenAI module Models module AudioModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WHISPER_1 = :"whisper-1" GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 0b6e75b3..0904c657 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -6,7 +6,7 @@ module Models # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. module AudioResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum JSON = :json TEXT = :text diff --git a/lib/openai/models/auto_file_chunking_strategy_param.rb b/lib/openai/models/auto_file_chunking_strategy_param.rb index 124d43f7..15d03f11 100644 --- a/lib/openai/models/auto_file_chunking_strategy_param.rb +++ b/lib/openai/models/auto_file_chunking_strategy_param.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class AutoFileChunkingStrategyParam < OpenAI::BaseModel + class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `auto`. # @@ -17,7 +17,7 @@ class AutoFileChunkingStrategyParam < OpenAI::BaseModel # # # def initialize(type: :auto, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 5d59baff..be9ede39 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#create - class Batch < OpenAI::BaseModel + class Batch < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -153,7 +153,7 @@ class Batch < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] output_file_id # The ID of the file containing the outputs of successfully executed requests. @@ -223,13 +223,13 @@ class Batch < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The current status of the batch. # # @see OpenAI::Models::Batch#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum VALIDATING = :validating FAILED = :failed @@ -248,11 +248,11 @@ module Status end # @see OpenAI::Models::Batch#errors - class Errors < OpenAI::BaseModel + class Errors < OpenAI::Internal::Type::BaseModel # @!attribute [r] data # # @return [Array, nil] - optional :data, -> { OpenAI::ArrayOf[OpenAI::Models::BatchError] } + optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::BatchError] } # @!parse # # @return [Array] @@ -274,7 +274,7 @@ class Errors < OpenAI::BaseModel # # # def initialize(data: nil, object: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index a9fb8c08..4e1871e0 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#cancel - class BatchCancelParams < OpenAI::BaseModel + class BatchCancelParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class BatchCancelParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 04139051..1549f66e 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#create - class BatchCreateParams < OpenAI::BaseModel + class BatchCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -47,7 +47,7 @@ class BatchCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] @@ -58,12 +58,12 @@ class BatchCreateParams < OpenAI::BaseModel # # # def initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The time frame within which the batch should be processed. Currently only `24h` # is supported. module CompletionWindow - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum COMPLETION_WINDOW_24H = :"24h" @@ -79,7 +79,7 @@ module CompletionWindow # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. module Endpoint - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum V1_RESPONSES = :"/v1/responses" V1_CHAT_COMPLETIONS = :"/v1/chat/completions" diff --git a/lib/openai/models/batch_error.rb b/lib/openai/models/batch_error.rb index 5333a1ab..3d31ed4a 100644 --- a/lib/openai/models/batch_error.rb +++ b/lib/openai/models/batch_error.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class BatchError < OpenAI::BaseModel + class BatchError < OpenAI::Internal::Type::BaseModel # @!attribute [r] code # An error code identifying the error type. # @@ -43,7 +43,7 @@ class BatchError < OpenAI::BaseModel # # # def initialize(code: nil, line: nil, message: nil, param: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 0dc270ae..651111c7 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#list - class BatchListParams < OpenAI::BaseModel + class BatchListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -39,7 +39,7 @@ class BatchListParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/batch_request_counts.rb b/lib/openai/models/batch_request_counts.rb index 2029a404..92ab5316 100644 --- a/lib/openai/models/batch_request_counts.rb +++ b/lib/openai/models/batch_request_counts.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class BatchRequestCounts < OpenAI::BaseModel + class BatchRequestCounts < OpenAI::Internal::Type::BaseModel # @!attribute completed # Number of requests that have been completed successfully. # @@ -30,7 +30,7 @@ class BatchRequestCounts < OpenAI::BaseModel # # # def initialize(completed:, failed:, total:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index 695466e8..fac34345 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#retrieve - class BatchRetrieveParams < OpenAI::BaseModel + class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class BatchRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index 9adc23a7..727f24f2 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#create - class Assistant < OpenAI::BaseModel + class Assistant < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -39,7 +39,7 @@ class Assistant < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # ID of the model to use. You can use the @@ -69,7 +69,7 @@ class Assistant < OpenAI::BaseModel # `function`. # # @return [Array] - required :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } # @!attribute response_format # Specifies the format that the model must output. Compatible with @@ -159,10 +159,10 @@ class Assistant < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Assistant#tool_resources - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, nil] @@ -192,17 +192,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -213,11 +213,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -225,7 +225,7 @@ class FileSearch < OpenAI::BaseModel # the assistant. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -236,7 +236,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index d6443c05..2b426970 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#create - class AssistantCreateParams < OpenAI::BaseModel + class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -41,7 +41,7 @@ class AssistantCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute name # The name of the assistant. The maximum length is 256 characters. @@ -108,7 +108,7 @@ class AssistantCreateParams < OpenAI::BaseModel # `function`. # # @return [Array, nil] - optional :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } # @!parse # # @return [Array] @@ -156,7 +156,7 @@ class AssistantCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -164,7 +164,7 @@ class AssistantCreateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -176,7 +176,7 @@ module Model # def self.variants; end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] @@ -207,17 +207,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -228,11 +228,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -240,7 +240,7 @@ class FileSearch < OpenAI::BaseModel # the assistant. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -254,7 +254,7 @@ class FileSearch < OpenAI::BaseModel # # @return [Array, nil] optional :vector_stores, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } # @!parse # # @return [Array] @@ -266,9 +266,9 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -287,7 +287,7 @@ class VectorStore < OpenAI::BaseModel # store. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -302,7 +302,7 @@ class VectorStore < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] @@ -311,14 +311,14 @@ class VectorStore < OpenAI::BaseModel # # # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -329,7 +329,7 @@ module ChunkingStrategy variant :static, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `auto`. # @@ -344,10 +344,10 @@ class Auto < OpenAI::BaseModel # # # def initialize(type: :auto, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] @@ -366,10 +366,10 @@ class Static < OpenAI::BaseModel # # # def initialize(static:, type: :static, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # @@ -391,7 +391,7 @@ class Static < OpenAI::BaseModel # # # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index 1414cb2b..aed0abc7 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#delete - class AssistantDeleteParams < OpenAI::BaseModel + class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class AssistantDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/assistant_deleted.rb b/lib/openai/models/beta/assistant_deleted.rb index 4793b0ae..f4af5c72 100644 --- a/lib/openai/models/beta/assistant_deleted.rb +++ b/lib/openai/models/beta/assistant_deleted.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#delete - class AssistantDeleted < OpenAI::BaseModel + class AssistantDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -13,7 +13,7 @@ class AssistantDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -27,7 +27,7 @@ class AssistantDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :"assistant.deleted", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 1385f253..c01b9f64 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#list - class AssistantListParams < OpenAI::BaseModel + class AssistantListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -66,12 +66,12 @@ class AssistantListParams < OpenAI::BaseModel # # # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 48b15aaa..2c797ac5 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -24,7 +24,7 @@ module Beta # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. module AssistantResponseFormatOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `auto` is the default value variant const: :auto diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 1c58ceea..1a86f690 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#retrieve - class AssistantRetrieveParams < OpenAI::BaseModel + class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class AssistantRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 734ad6c4..9bd4aec3 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -24,7 +24,7 @@ module Beta # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. module AssistantStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :event @@ -109,7 +109,7 @@ module AssistantStreamEvent # Occurs when an [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. This can happen due to an internal server error or a timeout. variant :error, -> { OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent } - class ThreadCreated < OpenAI::BaseModel + class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -126,7 +126,7 @@ class ThreadCreated < OpenAI::BaseModel # Whether to enable input audio transcription. # # @return [Boolean, nil] - optional :enabled, OpenAI::BooleanModel + optional :enabled, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -143,10 +143,10 @@ class ThreadCreated < OpenAI::BaseModel # # # def initialize(data:, enabled: nil, event: :"thread.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCreated < OpenAI::BaseModel + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -168,10 +168,10 @@ class ThreadRunCreated < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunQueued < OpenAI::BaseModel + class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -193,10 +193,10 @@ class ThreadRunQueued < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.queued", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunInProgress < OpenAI::BaseModel + class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -218,10 +218,10 @@ class ThreadRunInProgress < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunRequiresAction < OpenAI::BaseModel + class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -243,10 +243,10 @@ class ThreadRunRequiresAction < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.requires_action", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCompleted < OpenAI::BaseModel + class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -268,10 +268,10 @@ class ThreadRunCompleted < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunIncomplete < OpenAI::BaseModel + class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -293,10 +293,10 @@ class ThreadRunIncomplete < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.incomplete", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunFailed < OpenAI::BaseModel + class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -318,10 +318,10 @@ class ThreadRunFailed < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.failed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCancelling < OpenAI::BaseModel + class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -343,10 +343,10 @@ class ThreadRunCancelling < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.cancelling", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCancelled < OpenAI::BaseModel + class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -368,10 +368,10 @@ class ThreadRunCancelled < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.cancelled", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunExpired < OpenAI::BaseModel + class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -393,10 +393,10 @@ class ThreadRunExpired < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.expired", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepCreated < OpenAI::BaseModel + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -418,10 +418,10 @@ class ThreadRunStepCreated < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepInProgress < OpenAI::BaseModel + class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -443,10 +443,10 @@ class ThreadRunStepInProgress < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepDelta < OpenAI::BaseModel + class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a run step delta i.e. any changed fields on a run step during # streaming. @@ -469,10 +469,10 @@ class ThreadRunStepDelta < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepCompleted < OpenAI::BaseModel + class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -494,10 +494,10 @@ class ThreadRunStepCompleted < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepFailed < OpenAI::BaseModel + class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -519,10 +519,10 @@ class ThreadRunStepFailed < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.failed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepCancelled < OpenAI::BaseModel + class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -544,10 +544,10 @@ class ThreadRunStepCancelled < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.cancelled", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepExpired < OpenAI::BaseModel + class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -569,10 +569,10 @@ class ThreadRunStepExpired < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.expired", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageCreated < OpenAI::BaseModel + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -595,10 +595,10 @@ class ThreadMessageCreated < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageInProgress < OpenAI::BaseModel + class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -621,10 +621,10 @@ class ThreadMessageInProgress < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageDelta < OpenAI::BaseModel + class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message delta i.e. any changed fields on a message during # streaming. @@ -647,10 +647,10 @@ class ThreadMessageDelta < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageCompleted < OpenAI::BaseModel + class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -673,10 +673,10 @@ class ThreadMessageCompleted < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageIncomplete < OpenAI::BaseModel + class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -699,10 +699,10 @@ class ThreadMessageIncomplete < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.incomplete", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ErrorEvent < OpenAI::BaseModel + class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!attribute data # # @return [OpenAI::Models::ErrorObject] @@ -723,7 +723,7 @@ class ErrorEvent < OpenAI::BaseModel # # # def initialize(data:, event: :error, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 2a2c655d..0921b4bb 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module AssistantTool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 1caf13f7..0c1c8cb9 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantToolChoice < OpenAI::BaseModel + class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the tool. If type is `function`, the function name must be set # @@ -28,13 +28,13 @@ class AssistantToolChoice < OpenAI::BaseModel # # # def initialize(type:, function: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of the tool. If type is `function`, the function name must be set # # @see OpenAI::Models::Beta::AssistantToolChoice#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FUNCTION = :function CODE_INTERPRETER = :code_interpreter diff --git a/lib/openai/models/beta/assistant_tool_choice_function.rb b/lib/openai/models/beta/assistant_tool_choice_function.rb index 4e127718..19ca8d48 100644 --- a/lib/openai/models/beta/assistant_tool_choice_function.rb +++ b/lib/openai/models/beta/assistant_tool_choice_function.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantToolChoiceFunction < OpenAI::BaseModel + class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. # @@ -15,7 +15,7 @@ class AssistantToolChoiceFunction < OpenAI::BaseModel # # # def initialize(name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 807dc2eb..5445f1a6 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -11,7 +11,7 @@ module Beta # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. module AssistantToolChoiceOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `none` means the model will not call any tools and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools before responding to the user. variant enum: -> { OpenAI::Models::Beta::AssistantToolChoiceOption::Auto } @@ -24,7 +24,7 @@ module AssistantToolChoiceOption # more tools. `required` means the model must call one or more tools before # responding to the user. module Auto - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE = :none AUTO = :auto diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 11f30474..2fe97acb 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Assistants#update - class AssistantUpdateParams < OpenAI::BaseModel + class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -31,7 +31,7 @@ class AssistantUpdateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] model # ID of the model to use. You can use the @@ -112,7 +112,7 @@ class AssistantUpdateParams < OpenAI::BaseModel # `function`. # # @return [Array, nil] - optional :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } # @!parse # # @return [Array] @@ -160,7 +160,7 @@ class AssistantUpdateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -168,7 +168,7 @@ class AssistantUpdateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -272,7 +272,7 @@ module Model # @!endgroup end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] @@ -303,10 +303,10 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # Overrides the list of # [file](https://platform.openai.com/docs/api-reference/files) IDs made available @@ -314,7 +314,7 @@ class CodeInterpreter < OpenAI::BaseModel # with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -325,11 +325,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -337,7 +337,7 @@ class FileSearch < OpenAI::BaseModel # the assistant. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -348,7 +348,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/code_interpreter_tool.rb b/lib/openai/models/beta/code_interpreter_tool.rb index 0e226273..3f3013ad 100644 --- a/lib/openai/models/beta/code_interpreter_tool.rb +++ b/lib/openai/models/beta/code_interpreter_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class CodeInterpreterTool < OpenAI::BaseModel + class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `code_interpreter` # @@ -15,7 +15,7 @@ class CodeInterpreterTool < OpenAI::BaseModel # # # def initialize(type: :code_interpreter, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 9a23f3f2..2428a1a0 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class FileSearchTool < OpenAI::BaseModel + class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `file_search` # @@ -26,10 +26,10 @@ class FileSearchTool < OpenAI::BaseModel # # # def initialize(file_search: nil, type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::FileSearchTool#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] max_num_results # The maximum number of results the file search tool should output. The default is # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between @@ -70,10 +70,10 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(max_num_results: nil, ranking_options: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point # number between 0 and 1. @@ -105,14 +105,14 @@ class RankingOptions < OpenAI::BaseModel # # # def initialize(score_threshold:, ranker: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ranker to use for the file search. If not specified will use the `auto` # ranker. # # @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index 5b9cec7a..ed879754 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class FunctionTool < OpenAI::BaseModel + class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # # @return [OpenAI::Models::FunctionDefinition] @@ -21,7 +21,7 @@ class FunctionTool < OpenAI::BaseModel # # # def initialize(function:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 6f0bdf04..0657f158 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -7,7 +7,7 @@ module Beta # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. module MessageStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :event @@ -29,7 +29,7 @@ module MessageStreamEvent variant :"thread.message.incomplete", -> { OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete } - class ThreadMessageCreated < OpenAI::BaseModel + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -52,10 +52,10 @@ class ThreadMessageCreated < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageInProgress < OpenAI::BaseModel + class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -78,10 +78,10 @@ class ThreadMessageInProgress < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageDelta < OpenAI::BaseModel + class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message delta i.e. any changed fields on a message during # streaming. @@ -104,10 +104,10 @@ class ThreadMessageDelta < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageCompleted < OpenAI::BaseModel + class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -130,10 +130,10 @@ class ThreadMessageCompleted < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadMessageIncomplete < OpenAI::BaseModel + class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -156,7 +156,7 @@ class ThreadMessageIncomplete < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.message.incomplete", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index a3dc27d6..93f240f7 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -7,7 +7,7 @@ module Beta # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. module RunStepStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :event @@ -35,7 +35,7 @@ module RunStepStreamEvent # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) expires. variant :"thread.run.step.expired", -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired } - class ThreadRunStepCreated < OpenAI::BaseModel + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -57,10 +57,10 @@ class ThreadRunStepCreated < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepInProgress < OpenAI::BaseModel + class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -82,10 +82,10 @@ class ThreadRunStepInProgress < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepDelta < OpenAI::BaseModel + class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a run step delta i.e. any changed fields on a run step during # streaming. @@ -108,10 +108,10 @@ class ThreadRunStepDelta < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepCompleted < OpenAI::BaseModel + class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -133,10 +133,10 @@ class ThreadRunStepCompleted < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepFailed < OpenAI::BaseModel + class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -158,10 +158,10 @@ class ThreadRunStepFailed < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.failed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepCancelled < OpenAI::BaseModel + class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -183,10 +183,10 @@ class ThreadRunStepCancelled < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.cancelled", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunStepExpired < OpenAI::BaseModel + class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # @@ -208,7 +208,7 @@ class ThreadRunStepExpired < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.step.expired", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 058cde20..14e5177f 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -6,7 +6,7 @@ module Beta # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. module RunStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :event @@ -41,7 +41,7 @@ module RunStreamEvent # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) expires. variant :"thread.run.expired", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired } - class ThreadRunCreated < OpenAI::BaseModel + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -63,10 +63,10 @@ class ThreadRunCreated < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunQueued < OpenAI::BaseModel + class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -88,10 +88,10 @@ class ThreadRunQueued < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.queued", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunInProgress < OpenAI::BaseModel + class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -113,10 +113,10 @@ class ThreadRunInProgress < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunRequiresAction < OpenAI::BaseModel + class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -138,10 +138,10 @@ class ThreadRunRequiresAction < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.requires_action", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCompleted < OpenAI::BaseModel + class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -163,10 +163,10 @@ class ThreadRunCompleted < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunIncomplete < OpenAI::BaseModel + class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -188,10 +188,10 @@ class ThreadRunIncomplete < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.incomplete", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunFailed < OpenAI::BaseModel + class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -213,10 +213,10 @@ class ThreadRunFailed < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.failed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCancelling < OpenAI::BaseModel + class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -238,10 +238,10 @@ class ThreadRunCancelling < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.cancelling", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunCancelled < OpenAI::BaseModel + class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -263,10 +263,10 @@ class ThreadRunCancelled < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.cancelled", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class ThreadRunExpired < OpenAI::BaseModel + class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -288,7 +288,7 @@ class ThreadRunExpired < OpenAI::BaseModel # # # def initialize(data:, event: :"thread.run.expired", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index c651e7c2..ab9384f2 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Threads#create - class Thread < OpenAI::BaseModel + class Thread < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -26,7 +26,7 @@ class Thread < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute object # The object type, which is always `thread`. @@ -55,10 +55,10 @@ class Thread < OpenAI::BaseModel # # # def initialize(id:, created_at:, metadata:, tool_resources:, object: :thread, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Thread#tool_resources - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, nil] @@ -88,17 +88,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -109,11 +109,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::Thread::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -121,7 +121,7 @@ class FileSearch < OpenAI::BaseModel # the thread. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -132,7 +132,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 4425ba3c..03e401db 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -6,7 +6,7 @@ module Beta # @see OpenAI::Resources::Beta::Threads#create_and_run # # @see OpenAI::Resources::Beta::Threads#stream_raw - class ThreadCreateAndRunParams < OpenAI::BaseModel + class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -55,7 +55,7 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to @@ -72,7 +72,7 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # during tool use. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::BooleanModel + optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -149,7 +149,7 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # # @return [Array, nil] optional :tools, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Tool] }, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Tool] }, nil?: true # @!attribute top_p @@ -211,14 +211,14 @@ class ThreadCreateAndRunParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -230,14 +230,14 @@ module Model # def self.variants; end end - class Thread < OpenAI::BaseModel + class Thread < OpenAI::Internal::Type::BaseModel # @!attribute [r] messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # # @return [Array, nil] optional :messages, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] } # @!parse # # @return [Array] @@ -252,7 +252,7 @@ class Thread < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this @@ -275,9 +275,9 @@ class Thread < OpenAI::BaseModel # # # def initialize(messages: nil, metadata: nil, tool_resources: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # @@ -300,7 +300,7 @@ class Message < OpenAI::BaseModel # # @return [Array, nil] optional :attachments, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment] }, nil?: true # @!attribute metadata @@ -312,7 +312,7 @@ class Message < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param content [String, Array] @@ -322,13 +322,13 @@ class Message < OpenAI::BaseModel # # # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The text contents of the message. # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The text contents of the message. variant String @@ -341,7 +341,7 @@ module Content # def self.variants; end MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -353,7 +353,7 @@ module Content # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant @@ -365,7 +365,7 @@ module Role # def self.values; end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the file to attach to the message. # @@ -381,7 +381,7 @@ class Attachment < OpenAI::BaseModel # # @return [Array, nil] optional :tools, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } # @!parse # # @return [Array] @@ -393,10 +393,10 @@ class Attachment < OpenAI::BaseModel # # # def initialize(file_id: nil, tools: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -405,7 +405,7 @@ module Tool variant :file_search, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `file_search` # @@ -417,7 +417,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse @@ -428,7 +428,7 @@ class FileSearch < OpenAI::BaseModel end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] @@ -460,17 +460,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -481,11 +481,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -493,7 +493,7 @@ class FileSearch < OpenAI::BaseModel # the thread. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -507,7 +507,7 @@ class FileSearch < OpenAI::BaseModel # # @return [Array, nil] optional :vector_stores, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } # @!parse # # @return [Array] @@ -519,9 +519,9 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -540,7 +540,7 @@ class VectorStore < OpenAI::BaseModel # store. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -555,7 +555,7 @@ class VectorStore < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] @@ -564,14 +564,14 @@ class VectorStore < OpenAI::BaseModel # # # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -582,7 +582,7 @@ module ChunkingStrategy variant :static, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `auto`. # @@ -597,10 +597,10 @@ class Auto < OpenAI::BaseModel # # # def initialize(type: :auto, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] @@ -619,10 +619,10 @@ class Static < OpenAI::BaseModel # # # def initialize(static:, type: :static, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # @@ -644,7 +644,7 @@ class Static < OpenAI::BaseModel # # # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end @@ -657,7 +657,7 @@ class Static < OpenAI::BaseModel end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] @@ -688,17 +688,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -709,11 +709,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -721,7 +721,7 @@ class FileSearch < OpenAI::BaseModel # the assistant. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -732,12 +732,12 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant -> { OpenAI::Models::Beta::CodeInterpreterTool } @@ -750,7 +750,7 @@ module Tool # def self.variants; end end - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -776,7 +776,7 @@ class TruncationStrategy < OpenAI::BaseModel # # # def initialize(type:, last_messages: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -785,7 +785,7 @@ class TruncationStrategy < OpenAI::BaseModel # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 434c28c5..6e46487b 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Threads#create - class ThreadCreateParams < OpenAI::BaseModel + class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,8 @@ class ThreadCreateParams < OpenAI::BaseModel # start the thread with. # # @return [Array, nil] - optional :messages, -> { OpenAI::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message] } + optional :messages, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message] } # @!parse # # @return [Array] @@ -29,7 +30,7 @@ class ThreadCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this @@ -48,9 +49,9 @@ class ThreadCreateParams < OpenAI::BaseModel # # # def initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # @@ -73,7 +74,7 @@ class Message < OpenAI::BaseModel # # @return [Array, nil] optional :attachments, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment] }, nil?: true # @!attribute metadata @@ -85,7 +86,7 @@ class Message < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param content [String, Array] @@ -95,13 +96,13 @@ class Message < OpenAI::BaseModel # # # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The text contents of the message. # # @see OpenAI::Models::Beta::ThreadCreateParams::Message#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The text contents of the message. variant String @@ -114,7 +115,7 @@ module Content # def self.variants; end MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -126,7 +127,7 @@ module Content # # @see OpenAI::Models::Beta::ThreadCreateParams::Message#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant @@ -138,7 +139,7 @@ module Role # def self.values; end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the file to attach to the message. # @@ -154,7 +155,7 @@ class Attachment < OpenAI::BaseModel # # @return [Array, nil] optional :tools, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool] } # @!parse # # @return [Array] @@ -166,10 +167,10 @@ class Attachment < OpenAI::BaseModel # # # def initialize(file_id: nil, tools: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -178,7 +179,7 @@ module Tool variant :file_search, -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `file_search` # @@ -190,7 +191,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse @@ -200,7 +201,7 @@ class FileSearch < OpenAI::BaseModel end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] @@ -231,17 +232,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -252,11 +253,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -264,7 +265,7 @@ class FileSearch < OpenAI::BaseModel # the thread. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -278,7 +279,7 @@ class FileSearch < OpenAI::BaseModel # # @return [Array, nil] optional :vector_stores, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } # @!parse # # @return [Array] @@ -290,9 +291,9 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -311,7 +312,7 @@ class VectorStore < OpenAI::BaseModel # store. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -326,7 +327,7 @@ class VectorStore < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] @@ -335,14 +336,14 @@ class VectorStore < OpenAI::BaseModel # # # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -353,7 +354,7 @@ module ChunkingStrategy variant :static, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `auto`. # @@ -368,10 +369,10 @@ class Auto < OpenAI::BaseModel # # # def initialize(type: :auto, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] @@ -390,10 +391,10 @@ class Static < OpenAI::BaseModel # # # def initialize(static:, type: :static, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # @@ -415,7 +416,7 @@ class Static < OpenAI::BaseModel # # # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index 0febd443..aa6b5341 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Threads#delete - class ThreadDeleteParams < OpenAI::BaseModel + class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class ThreadDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/thread_deleted.rb b/lib/openai/models/beta/thread_deleted.rb index 35289d1b..5a290344 100644 --- a/lib/openai/models/beta/thread_deleted.rb +++ b/lib/openai/models/beta/thread_deleted.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Threads#delete - class ThreadDeleted < OpenAI::BaseModel + class ThreadDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -13,7 +13,7 @@ class ThreadDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -27,7 +27,7 @@ class ThreadDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :"thread.deleted", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index 7b3c99a3..db2128ba 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Threads#retrieve - class ThreadRetrieveParams < OpenAI::BaseModel + class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class ThreadRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index f24a473b..195b3b77 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class ThreadStreamEvent < OpenAI::BaseModel + class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -20,7 +20,7 @@ class ThreadStreamEvent < OpenAI::BaseModel # Whether to enable input audio transcription. # # @return [Boolean, nil] - optional :enabled, OpenAI::BooleanModel + optional :enabled, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -37,7 +37,7 @@ class ThreadStreamEvent < OpenAI::BaseModel # # # def initialize(data:, enabled: nil, event: :"thread.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 4ae11357..10ec2820 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # @see OpenAI::Resources::Beta::Threads#update - class ThreadUpdateParams < OpenAI::BaseModel + class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -18,7 +18,7 @@ class ThreadUpdateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this @@ -36,9 +36,9 @@ class ThreadUpdateParams < OpenAI::BaseModel # # # def initialize(metadata: nil, tool_resources: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] @@ -69,17 +69,17 @@ class ToolResources < OpenAI::BaseModel # # # def initialize(code_interpreter: nil, file_search: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -90,11 +90,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(file_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -102,7 +102,7 @@ class FileSearch < OpenAI::BaseModel # the thread. # # @return [Array, nil] - optional :vector_store_ids, OpenAI::ArrayOf[String] + optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -113,7 +113,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(vector_store_ids: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index b7822e85..0086d3c1 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -8,7 +8,7 @@ module Threads # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 8bbb2ee1..b5a749fd 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -8,7 +8,7 @@ module Threads # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. module AnnotationDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index e471d257..404d9340 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FileCitationAnnotation < OpenAI::BaseModel + class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # # @return [Integer] @@ -45,10 +45,10 @@ class FileCitationAnnotation < OpenAI::BaseModel # # # def initialize(end_index:, file_citation:, start_index:, text:, type: :file_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. # @@ -60,7 +60,7 @@ class FileCitation < OpenAI::BaseModel # # # def initialize(file_id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 5d2bf50f..5077a210 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FileCitationDeltaAnnotation < OpenAI::BaseModel + class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the annotation in the text content part. # @@ -68,10 +68,10 @@ class FileCitationDeltaAnnotation < OpenAI::BaseModel # # # def initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the specific File the citation is from. # @@ -98,7 +98,7 @@ class FileCitation < OpenAI::BaseModel # # # def initialize(file_id: nil, quote: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index fbe60a09..9bf12486 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FilePathAnnotation < OpenAI::BaseModel + class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # # @return [Integer] @@ -44,10 +44,10 @@ class FilePathAnnotation < OpenAI::BaseModel # # # def initialize(end_index:, file_path:, start_index:, text:, type: :file_path, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. # @@ -59,7 +59,7 @@ class FilePath < OpenAI::BaseModel # # # def initialize(file_id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 8fe4a8e7..cb1c00a6 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FilePathDeltaAnnotation < OpenAI::BaseModel + class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the annotation in the text content part. # @@ -67,10 +67,10 @@ class FilePathDeltaAnnotation < OpenAI::BaseModel # # # def initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the file that was generated. # @@ -86,7 +86,7 @@ class FilePath < OpenAI::BaseModel # # # def initialize(file_id: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 8f66bf22..bd2e1f25 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFile < OpenAI::BaseModel + class ImageFile < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you @@ -30,14 +30,14 @@ class ImageFile < OpenAI::BaseModel # # # def initialize(file_id:, detail: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # # @see OpenAI::Models::Beta::Threads::ImageFile#detail module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LOW = :low diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 00121cfe..60fc0073 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFileContentBlock < OpenAI::BaseModel + class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # # @return [OpenAI::Models::Beta::Threads::ImageFile] @@ -25,7 +25,7 @@ class ImageFileContentBlock < OpenAI::BaseModel # # # def initialize(image_file:, type: :image_file, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index cb52ad4b..4d654fe4 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFileDelta < OpenAI::BaseModel + class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] detail # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -34,14 +34,14 @@ class ImageFileDelta < OpenAI::BaseModel # # # def initialize(detail: nil, file_id: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # # @see OpenAI::Models::Beta::Threads::ImageFileDelta#detail module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LOW = :low diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index c33babbb..51118d76 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFileDeltaBlock < OpenAI::BaseModel + class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the content part in the message. # @@ -36,7 +36,7 @@ class ImageFileDeltaBlock < OpenAI::BaseModel # # # def initialize(index:, image_file: nil, type: :image_file, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index a38ccdbd..c5db8e23 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # The external URL of the image, must be a supported image types: jpeg, jpg, png, # gif, webp. @@ -29,14 +29,14 @@ class ImageURL < OpenAI::BaseModel # # # def initialize(url:, detail: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # # @see OpenAI::Models::Beta::Threads::ImageURL#detail module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LOW = :low diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index b800af73..326f1c1d 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURLContentBlock < OpenAI::BaseModel + class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # # @return [OpenAI::Models::Beta::Threads::ImageURL] @@ -24,7 +24,7 @@ class ImageURLContentBlock < OpenAI::BaseModel # # # def initialize(image_url:, type: :image_url, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 6c80622a..0c69ce94 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURLDelta < OpenAI::BaseModel + class ImageURLDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. @@ -33,14 +33,14 @@ class ImageURLDelta < OpenAI::BaseModel # # # def initialize(detail: nil, url: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # # @see OpenAI::Models::Beta::Threads::ImageURLDelta#detail module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LOW = :low diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index 59f49efb..950a87c1 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURLDeltaBlock < OpenAI::BaseModel + class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the content part in the message. # @@ -35,7 +35,7 @@ class ImageURLDeltaBlock < OpenAI::BaseModel # # # def initialize(index:, image_url: nil, type: :image_url, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index ded2bdd4..4987ff90 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#create - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -25,7 +25,7 @@ class Message < OpenAI::BaseModel # # @return [Array, nil] required :attachments, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment] }, nil?: true # @!attribute completed_at @@ -38,7 +38,8 @@ class Message < OpenAI::BaseModel # The content of the message in array of text and/or images. # # @return [Array] - required :content, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent] } + required :content, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent] } # @!attribute created_at # The Unix timestamp (in seconds) for when the message was created. @@ -67,7 +68,7 @@ class Message < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute object # The object type, which is always `thread.message`. @@ -142,9 +143,9 @@ class Message < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the file to attach to the message. # @@ -159,7 +160,8 @@ class Attachment < OpenAI::BaseModel # The tools to add this file to. # # @return [Array, nil] - optional :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::Message::Attachment::Tool] } + optional :tools, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Message::Attachment::Tool] } # @!parse # # @return [Array] @@ -171,16 +173,16 @@ class Attachment < OpenAI::BaseModel # # # def initialize(file_id: nil, tools: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant -> { OpenAI::Models::Beta::CodeInterpreterTool } variant -> { OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly } - class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel + class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `file_search` # @@ -192,7 +194,7 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel # # # def initialize(type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse @@ -202,7 +204,7 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel end # @see OpenAI::Models::Beta::Threads::Message#incomplete_details - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason the message is incomplete. # @@ -216,13 +218,13 @@ class IncompleteDetails < OpenAI::BaseModel # # # def initialize(reason:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason the message is incomplete. # # @see OpenAI::Models::Beta::Threads::Message::IncompleteDetails#reason module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum CONTENT_FILTER = :content_filter MAX_TOKENS = :max_tokens @@ -242,7 +244,7 @@ module Reason # # @see OpenAI::Models::Beta::Threads::Message#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant @@ -259,7 +261,7 @@ module Role # # @see OpenAI::Models::Beta::Threads::Message#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress INCOMPLETE = :incomplete diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 4e4280f1..793bbb1d 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -7,7 +7,7 @@ module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. module MessageContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 4cbf92ef..0d6aafd8 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -7,7 +7,7 @@ module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. module MessageContentDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 20ea5568..33049950 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -7,7 +7,7 @@ module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. module MessageContentPartParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 5a15f269..dfabf003 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#create - class MessageCreateParams < OpenAI::BaseModel + class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -32,7 +32,7 @@ class MessageCreateParams < OpenAI::BaseModel # # @return [Array, nil] optional :attachments, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment] }, nil?: true # @!attribute metadata @@ -44,7 +44,7 @@ class MessageCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param content [String, Array] @@ -55,11 +55,11 @@ class MessageCreateParams < OpenAI::BaseModel # # # def initialize(content:, role:, attachments: nil, metadata: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The text contents of the message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The text contents of the message. variant String @@ -72,7 +72,7 @@ module Content # def self.variants; end MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -82,7 +82,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant @@ -94,7 +94,7 @@ module Role # def self.values; end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the file to attach to the message. # @@ -110,7 +110,7 @@ class Attachment < OpenAI::BaseModel # # @return [Array, nil] optional :tools, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool] } # @!parse # # @return [Array] @@ -122,10 +122,10 @@ class Attachment < OpenAI::BaseModel # # # def initialize(file_id: nil, tools: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -134,7 +134,7 @@ module Tool variant :file_search, -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `file_search` # @@ -146,7 +146,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 64604e89..759a3108 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#delete - class MessageDeleteParams < OpenAI::BaseModel + class MessageDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -21,7 +21,7 @@ class MessageDeleteParams < OpenAI::BaseModel # # # def initialize(thread_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/message_deleted.rb b/lib/openai/models/beta/threads/message_deleted.rb index 488ddb42..da791175 100644 --- a/lib/openai/models/beta/threads/message_deleted.rb +++ b/lib/openai/models/beta/threads/message_deleted.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#delete - class MessageDeleted < OpenAI::BaseModel + class MessageDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -14,7 +14,7 @@ class MessageDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -28,7 +28,7 @@ class MessageDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :"thread.message.deleted", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index c13c0e39..b63f29bf 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -4,12 +4,13 @@ module OpenAI module Models module Beta module Threads - class MessageDelta < OpenAI::BaseModel + class MessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] content # The content of the message in array of text and/or images. # # @return [Array, nil] - optional :content, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentDelta] } + optional :content, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentDelta] } # @!parse # # @return [Array] @@ -33,13 +34,13 @@ class MessageDelta < OpenAI::BaseModel # # # def initialize(content: nil, role: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The entity that produced the message. One of `user` or `assistant`. # # @see OpenAI::Models::Beta::Threads::MessageDelta#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index a008085e..11ba7a44 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageDeltaEvent < OpenAI::BaseModel + class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier of the message, which can be referenced in API endpoints. # @@ -33,7 +33,7 @@ class MessageDeltaEvent < OpenAI::BaseModel # # # def initialize(id:, delta:, object: :"thread.message.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 1cbbff8e..9a7d62f5 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#list - class MessageListParams < OpenAI::BaseModel + class MessageListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -78,12 +78,12 @@ class MessageListParams < OpenAI::BaseModel # # # def initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index 996cb629..e2bc129b 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#retrieve - class MessageRetrieveParams < OpenAI::BaseModel + class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -21,7 +21,7 @@ class MessageRetrieveParams < OpenAI::BaseModel # # # def initialize(thread_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index f77a34f4..c83b973f 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#update - class MessageUpdateParams < OpenAI::BaseModel + class MessageUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -24,7 +24,7 @@ class MessageUpdateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param thread_id [String] @@ -33,7 +33,7 @@ class MessageUpdateParams < OpenAI::BaseModel # # # def initialize(thread_id:, metadata: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/refusal_content_block.rb b/lib/openai/models/beta/threads/refusal_content_block.rb index d174b062..58ab5010 100644 --- a/lib/openai/models/beta/threads/refusal_content_block.rb +++ b/lib/openai/models/beta/threads/refusal_content_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RefusalContentBlock < OpenAI::BaseModel + class RefusalContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute refusal # # @return [String] @@ -24,7 +24,7 @@ class RefusalContentBlock < OpenAI::BaseModel # # # def initialize(refusal:, type: :refusal, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/refusal_delta_block.rb b/lib/openai/models/beta/threads/refusal_delta_block.rb index e65375c1..e762e97d 100644 --- a/lib/openai/models/beta/threads/refusal_delta_block.rb +++ b/lib/openai/models/beta/threads/refusal_delta_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RefusalDeltaBlock < OpenAI::BaseModel + class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the refusal part in the message. # @@ -35,7 +35,7 @@ class RefusalDeltaBlock < OpenAI::BaseModel # # # def initialize(index:, refusal: nil, type: :refusal, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index d9f3e412..d79d1249 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RequiredActionFunctionToolCall < OpenAI::BaseModel + class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the tool call. This ID must be referenced when you submit the tool # outputs in using the @@ -36,10 +36,10 @@ class RequiredActionFunctionToolCall < OpenAI::BaseModel # # # def initialize(id:, function:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments that the model expects you to pass to the function. # @@ -60,7 +60,7 @@ class Function < OpenAI::BaseModel # # # def initialize(arguments:, name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 9b21a4a0..8af69bee 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -7,7 +7,7 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw - class Run < OpenAI::BaseModel + class Run < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -96,7 +96,7 @@ class Run < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # The model that the @@ -118,7 +118,7 @@ class Run < OpenAI::BaseModel # during tool use. # # @return [Boolean] - required :parallel_tool_calls, OpenAI::BooleanModel + required :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel # @!attribute required_action # Details on the action required to continue the run. Will be `null` if no action @@ -191,7 +191,7 @@ class Run < OpenAI::BaseModel # this run. # # @return [Array] - required :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to @@ -284,10 +284,10 @@ class Run < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Run#incomplete_details - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] reason # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -307,14 +307,14 @@ class IncompleteDetails < OpenAI::BaseModel # # # def initialize(reason: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # # @see OpenAI::Models::Beta::Threads::Run::IncompleteDetails#reason module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens @@ -328,7 +328,7 @@ module Reason end # @see OpenAI::Models::Beta::Threads::Run#last_error - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # @@ -349,13 +349,13 @@ class LastError < OpenAI::BaseModel # # # def initialize(code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # # @see OpenAI::Models::Beta::Threads::Run::LastError#code module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded @@ -370,7 +370,7 @@ module Code end # @see OpenAI::Models::Beta::Threads::Run#required_action - class RequiredAction < OpenAI::BaseModel + class RequiredAction < OpenAI::Internal::Type::BaseModel # @!attribute submit_tool_outputs # Details on the tool outputs needed for this run to continue. # @@ -393,16 +393,16 @@ class RequiredAction < OpenAI::BaseModel # # # def initialize(submit_tool_outputs:, type: :submit_tool_outputs, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs - class SubmitToolOutputs < OpenAI::BaseModel + class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # A list of the relevant tool calls. # # @return [Array] required :tool_calls, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] } # @!parse # # Details on the tool outputs needed for this run to continue. @@ -411,12 +411,12 @@ class SubmitToolOutputs < OpenAI::BaseModel # # # def initialize(tool_calls:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end # @see OpenAI::Models::Beta::Threads::Run#truncation_strategy - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -442,7 +442,7 @@ class TruncationStrategy < OpenAI::BaseModel # # # def initialize(type:, last_messages: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -451,7 +451,7 @@ class TruncationStrategy < OpenAI::BaseModel # # @see OpenAI::Models::Beta::Threads::Run::TruncationStrategy#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LAST_MESSAGES = :last_messages @@ -465,7 +465,7 @@ module Type end # @see OpenAI::Models::Beta::Threads::Run#usage - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run. # @@ -494,7 +494,7 @@ class Usage < OpenAI::BaseModel # # # def initialize(completion_tokens:, prompt_tokens:, total_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index bc13b744..0e9b76a0 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#cancel - class RunCancelParams < OpenAI::BaseModel + class RunCancelParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -21,7 +21,7 @@ class RunCancelParams < OpenAI::BaseModel # # # def initialize(thread_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 4b60bf0b..51a64adc 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -7,7 +7,7 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw - class RunCreateParams < OpenAI::BaseModel + class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -30,7 +30,8 @@ class RunCreateParams < OpenAI::BaseModel # for more information. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } # @!parse # # @return [Array] @@ -49,7 +50,7 @@ class RunCreateParams < OpenAI::BaseModel # # @return [Array, nil] optional :additional_messages, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage] }, nil?: true # @!attribute instructions @@ -89,7 +90,7 @@ class RunCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to @@ -106,7 +107,7 @@ class RunCreateParams < OpenAI::BaseModel # during tool use. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::BooleanModel + optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -173,7 +174,9 @@ class RunCreateParams < OpenAI::BaseModel # modifying the behavior on a per-run basis. # # @return [Array, nil] - optional :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] }, nil?: true + optional :tools, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] }, + nil?: true # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -238,9 +241,9 @@ class RunCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class AdditionalMessage < OpenAI::BaseModel + class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # @@ -264,7 +267,7 @@ class AdditionalMessage < OpenAI::BaseModel # # @return [Array, nil] optional :attachments, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment] }, nil?: true # @!attribute metadata @@ -276,7 +279,7 @@ class AdditionalMessage < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param content [String, Array] @@ -286,13 +289,13 @@ class AdditionalMessage < OpenAI::BaseModel # # # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The text contents of the message. # # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The text contents of the message. variant String @@ -305,7 +308,7 @@ module Content # def self.variants; end MessageContentPartParamArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -317,7 +320,7 @@ module Content # # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant @@ -329,7 +332,7 @@ module Role # def self.values; end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The ID of the file to attach to the message. # @@ -345,7 +348,7 @@ class Attachment < OpenAI::BaseModel # # @return [Array, nil] optional :tools, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } # @!parse # # @return [Array] @@ -357,10 +360,10 @@ class Attachment < OpenAI::BaseModel # # # def initialize(file_id: nil, tools: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -369,7 +372,7 @@ module Tool variant :file_search, -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool being defined: `file_search` # @@ -381,7 +384,7 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse @@ -396,7 +399,7 @@ class FileSearch < OpenAI::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -408,7 +411,7 @@ module Model # def self.variants; end end - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -434,7 +437,7 @@ class TruncationStrategy < OpenAI::BaseModel # # # def initialize(type:, last_messages: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -443,7 +446,7 @@ class TruncationStrategy < OpenAI::BaseModel # # @see OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LAST_MESSAGES = :last_messages diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 7e05072e..f2b9f759 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#list - class RunListParams < OpenAI::BaseModel + class RunListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -67,12 +67,12 @@ class RunListParams < OpenAI::BaseModel # # # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index ddca0649..ca7aaf1d 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#retrieve - class RunRetrieveParams < OpenAI::BaseModel + class RunRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -21,7 +21,7 @@ class RunRetrieveParams < OpenAI::BaseModel # # # def initialize(thread_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index 351e323f..efb9f2b6 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -8,7 +8,7 @@ module Threads # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. module RunStatus - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum QUEUED = :queued IN_PROGRESS = :in_progress diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index e3579635..85d3e4d9 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -7,7 +7,7 @@ module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_stream_raw - class RunSubmitToolOutputsParams < OpenAI::BaseModel + class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -22,7 +22,7 @@ class RunSubmitToolOutputsParams < OpenAI::BaseModel # # @return [Array] required :tool_outputs, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } # @!parse # # @param thread_id [String] @@ -31,9 +31,9 @@ class RunSubmitToolOutputsParams < OpenAI::BaseModel # # # def initialize(thread_id:, tool_outputs:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class ToolOutput < OpenAI::BaseModel + class ToolOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] output # The output of the tool call to be submitted to continue the run. # @@ -61,7 +61,7 @@ class ToolOutput < OpenAI::BaseModel # # # def initialize(output: nil, tool_call_id: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 0b5a69c2..97c387fc 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#update - class RunUpdateParams < OpenAI::BaseModel + class RunUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -24,7 +24,7 @@ class RunUpdateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param thread_id [String] @@ -33,7 +33,7 @@ class RunUpdateParams < OpenAI::BaseModel # # # def initialize(thread_id:, metadata: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb index db4bce17..9d329390 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class CodeInterpreterLogs < OpenAI::BaseModel + class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the output in the outputs array. # @@ -37,7 +37,7 @@ class CodeInterpreterLogs < OpenAI::BaseModel # # # def initialize(index:, logs: nil, type: :logs, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index 42262679..1fbdc55b 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class CodeInterpreterOutputImage < OpenAI::BaseModel + class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the output in the outputs array. # @@ -34,10 +34,10 @@ class CodeInterpreterOutputImage < OpenAI::BaseModel # # # def initialize(index:, image: nil, type: :image, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. @@ -54,7 +54,7 @@ class Image < OpenAI::BaseModel # # # def initialize(file_id: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 62aeaf0c..4daf4c57 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class CodeInterpreterToolCall < OpenAI::BaseModel + class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the tool call. # @@ -35,10 +35,10 @@ class CodeInterpreterToolCall < OpenAI::BaseModel # # # def initialize(id:, code_interpreter:, type: :code_interpreter, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. # @@ -52,7 +52,7 @@ class CodeInterpreter < OpenAI::BaseModel # # @return [Array] required :outputs, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } # @!parse # # The Code Interpreter tool call definition. @@ -62,11 +62,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(input:, outputs:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Text output from the Code Interpreter tool call as part of a run step. module Output - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -77,7 +77,7 @@ module Output variant :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image } - class Logs < OpenAI::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel # @!attribute logs # The text output from the Code Interpreter tool call. # @@ -98,10 +98,10 @@ class Logs < OpenAI::BaseModel # # # def initialize(logs:, type: :logs, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # @!attribute image # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] @@ -120,10 +120,10 @@ class Image < OpenAI::BaseModel # # # def initialize(image:, type: :image, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. @@ -136,7 +136,7 @@ class Image < OpenAI::BaseModel # # # def initialize(file_id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 5e5b0369..6573f533 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class CodeInterpreterToolCallDelta < OpenAI::BaseModel + class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the tool call in the tool calls array. # @@ -50,10 +50,10 @@ class CodeInterpreterToolCallDelta < OpenAI::BaseModel # # # def initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] input # The input to the Code Interpreter tool call. # @@ -71,7 +71,7 @@ class CodeInterpreter < OpenAI::BaseModel # # @return [Array, nil] optional :outputs, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } # @!parse # # @return [Array] @@ -85,11 +85,11 @@ class CodeInterpreter < OpenAI::BaseModel # # # def initialize(input: nil, outputs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Text output from the Code Interpreter tool call as part of a run step. module Output - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 09267fd0..c8331a7b 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class FileSearchToolCall < OpenAI::BaseModel + class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the tool call object. # @@ -32,10 +32,10 @@ class FileSearchToolCall < OpenAI::BaseModel # # # def initialize(id:, file_search:, type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] ranking_options # The ranking options for the file search. # @@ -52,7 +52,7 @@ class FileSearch < OpenAI::BaseModel # # @return [Array, nil] optional :results, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } # @!parse # # @return [Array] @@ -66,10 +66,10 @@ class FileSearch < OpenAI::BaseModel # # # def initialize(ranking_options: nil, results: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -93,14 +93,14 @@ class RankingOptions < OpenAI::BaseModel # # # def initialize(ranker:, score_threshold:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ranker to use for the file search. If not specified will use the `auto` # ranker. # # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 @@ -113,7 +113,7 @@ module Ranker end end - class Result < OpenAI::BaseModel + class Result < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that result was found in. # @@ -139,7 +139,7 @@ class Result < OpenAI::BaseModel # # @return [Array, nil] optional :content, - -> { OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } # @!parse # # @return [Array] @@ -155,9 +155,9 @@ class Result < OpenAI::BaseModel # # # def initialize(file_id:, file_name:, score:, content: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Content < OpenAI::BaseModel + class Content < OpenAI::Internal::Type::BaseModel # @!attribute [r] text # The text content of the file. # @@ -185,13 +185,13 @@ class Content < OpenAI::BaseModel # # # def initialize(text: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of the content. # # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index 86dd6012..eb5776d0 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -5,12 +5,12 @@ module Models module Beta module Threads module Runs - class FileSearchToolCallDelta < OpenAI::BaseModel + class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute file_search # For now, this is always going to be an empty object. # # @return [Object] - required :file_search, OpenAI::Unknown + required :file_search, OpenAI::Internal::Type::Unknown # @!attribute index # The index of the tool call in the tool calls array. @@ -43,7 +43,7 @@ class FileSearchToolCallDelta < OpenAI::BaseModel # # # def initialize(file_search:, index:, id: nil, type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index 9d5784e9..f0677ac7 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class FunctionToolCall < OpenAI::BaseModel + class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the tool call object. # @@ -32,10 +32,10 @@ class FunctionToolCall < OpenAI::BaseModel # # # def initialize(id:, function:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. # @@ -65,7 +65,7 @@ class Function < OpenAI::BaseModel # # # def initialize(arguments:, name:, output:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index d77981e5..c4bc767e 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class FunctionToolCallDelta < OpenAI::BaseModel + class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the tool call in the tool calls array. # @@ -47,10 +47,10 @@ class FunctionToolCallDelta < OpenAI::BaseModel # # # def initialize(index:, id: nil, function: nil, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute [r] arguments # The arguments passed to the function. # @@ -88,7 +88,7 @@ class Function < OpenAI::BaseModel # # # def initialize(arguments: nil, name: nil, output: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index f7bde3db..12b7b685 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class MessageCreationStepDetails < OpenAI::BaseModel + class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] @@ -26,10 +26,10 @@ class MessageCreationStepDetails < OpenAI::BaseModel # # # def initialize(message_creation:, type: :message_creation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation - class MessageCreation < OpenAI::BaseModel + class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. # @@ -41,7 +41,7 @@ class MessageCreation < OpenAI::BaseModel # # # def initialize(message_id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 21084d23..9261d7c5 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -6,7 +6,7 @@ module Beta module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve - class RunStep < OpenAI::BaseModel + class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier of the run step, which can be referenced in API endpoints. # @@ -68,7 +68,7 @@ class RunStep < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute object # The object type, which is always `thread.run.step`. @@ -158,10 +158,10 @@ class RunStep < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # @@ -183,13 +183,13 @@ class LastError < OpenAI::BaseModel # # # def initialize(code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # One of `server_error` or `rate_limit_exceeded`. # # @see OpenAI::Models::Beta::Threads::Runs::RunStep::LastError#code module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded @@ -207,7 +207,7 @@ module Code # # @see OpenAI::Models::Beta::Threads::Runs::RunStep#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress CANCELLED = :cancelled @@ -226,7 +226,7 @@ module Status # # @see OpenAI::Models::Beta::Threads::Runs::RunStep#step_details module StepDetails - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -245,7 +245,7 @@ module StepDetails # # @see OpenAI::Models::Beta::Threads::Runs::RunStep#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls @@ -258,7 +258,7 @@ module Type end # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run step. # @@ -287,7 +287,7 @@ class Usage < OpenAI::BaseModel # # # def initialize(completion_tokens:, prompt_tokens:, total_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 929339da..15e16864 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class RunStepDelta < OpenAI::BaseModel + class RunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] step_details # The details of the run step. # @@ -23,13 +23,13 @@ class RunStepDelta < OpenAI::BaseModel # # # def initialize(step_details: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The details of the run step. # # @see OpenAI::Models::Beta::Threads::Runs::RunStepDelta#step_details module StepDetails - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index fd2e6642..027d0490 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class RunStepDeltaEvent < OpenAI::BaseModel + class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier of the run step, which can be referenced in API endpoints. # @@ -34,7 +34,7 @@ class RunStepDeltaEvent < OpenAI::BaseModel # # # def initialize(id:, delta:, object: :"thread.run.step.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 7acf2e7e..178763ff 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class RunStepDeltaMessageDelta < OpenAI::BaseModel + class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `message_creation`. # @@ -30,10 +30,10 @@ class RunStepDeltaMessageDelta < OpenAI::BaseModel # # # def initialize(message_creation: nil, type: :message_creation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation - class MessageCreation < OpenAI::BaseModel + class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute [r] message_id # The ID of the message that was created by this run step. # @@ -49,7 +49,7 @@ class MessageCreation < OpenAI::BaseModel # # # def initialize(message_id: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index f2f05bbe..4c7fe791 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -6,7 +6,7 @@ module Beta module Threads module Runs module RunStepInclude - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index d978c19f..a7c48d03 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -6,7 +6,7 @@ module Beta module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#list - class StepListParams < OpenAI::BaseModel + class StepListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -52,7 +52,8 @@ class StepListParams < OpenAI::BaseModel # for more information. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } # @!parse # # @return [Array] @@ -91,12 +92,12 @@ class StepListParams < OpenAI::BaseModel # # # def initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 5349ca7f..efbf4aaf 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -6,7 +6,7 @@ module Beta module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve - class StepRetrieveParams < OpenAI::BaseModel + class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -31,7 +31,8 @@ class StepRetrieveParams < OpenAI::BaseModel # for more information. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } # @!parse # # @return [Array] @@ -45,7 +46,7 @@ class StepRetrieveParams < OpenAI::BaseModel # # # def initialize(thread_id:, run_id:, include: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 12ba7d73..eaee8252 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -7,7 +7,7 @@ module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. module ToolCall - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index c8c796f3..a99db2d3 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -7,7 +7,7 @@ module Threads module Runs # Details of the Code Interpreter tool call the run step was involved in. module ToolCallDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index b9e864cd..7f61ee05 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads module Runs - class ToolCallDeltaObject < OpenAI::BaseModel + class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `tool_calls`. # @@ -18,7 +18,8 @@ class ToolCallDeltaObject < OpenAI::BaseModel # `function`. # # @return [Array, nil] - optional :tool_calls, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCallDelta] } + optional :tool_calls, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCallDelta] } # @!parse # # @return [Array] @@ -32,7 +33,7 @@ class ToolCallDeltaObject < OpenAI::BaseModel # # # def initialize(tool_calls: nil, type: :tool_calls, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index b833e0ff..bd9aa901 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -5,14 +5,15 @@ module Models module Beta module Threads module Runs - class ToolCallsStepDetails < OpenAI::BaseModel + class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # # @return [Array] - required :tool_calls, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCall] } + required :tool_calls, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCall] } # @!attribute type # Always `tool_calls`. @@ -28,7 +29,7 @@ class ToolCallsStepDetails < OpenAI::BaseModel # # # def initialize(tool_calls:, type: :tool_calls, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index f3b930f5..35eb23bc 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -4,11 +4,12 @@ module OpenAI module Models module Beta module Threads - class Text < OpenAI::BaseModel + class Text < OpenAI::Internal::Type::BaseModel # @!attribute annotations # # @return [Array] - required :annotations, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::Annotation] } + required :annotations, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Annotation] } # @!attribute value # The data that makes up the text. @@ -22,7 +23,7 @@ class Text < OpenAI::BaseModel # # # def initialize(annotations:, value:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index b70b93fc..0e90d460 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class TextContentBlock < OpenAI::BaseModel + class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # # @return [OpenAI::Models::Beta::Threads::Text] @@ -24,7 +24,7 @@ class TextContentBlock < OpenAI::BaseModel # # # def initialize(text:, type: :text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/text_content_block_param.rb b/lib/openai/models/beta/threads/text_content_block_param.rb index ce067a8c..9cbf24b3 100644 --- a/lib/openai/models/beta/threads/text_content_block_param.rb +++ b/lib/openai/models/beta/threads/text_content_block_param.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class TextContentBlockParam < OpenAI::BaseModel + class TextContentBlockParam < OpenAI::Internal::Type::BaseModel # @!attribute text # Text content to be sent to the model # @@ -25,7 +25,7 @@ class TextContentBlockParam < OpenAI::BaseModel # # # def initialize(text:, type: :text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index ee2b9db4..978ecd2c 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -4,11 +4,12 @@ module OpenAI module Models module Beta module Threads - class TextDelta < OpenAI::BaseModel + class TextDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] annotations # # @return [Array, nil] - optional :annotations, -> { OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::AnnotationDelta] } + optional :annotations, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::AnnotationDelta] } # @!parse # # @return [Array] @@ -30,7 +31,7 @@ class TextDelta < OpenAI::BaseModel # # # def initialize(annotations: nil, value: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index a06f3595..7c1b9ecc 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class TextDeltaBlock < OpenAI::BaseModel + class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute index # The index of the content part in the message. # @@ -35,7 +35,7 @@ class TextDeltaBlock < OpenAI::BaseModel # # # def initialize(index:, text: nil, type: :text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 5c66a291..0f9f79a6 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -6,7 +6,7 @@ module Chat # @see OpenAI::Resources::Chat::Completions#create # # @see OpenAI::Resources::Chat::Completions#stream_raw - class ChatCompletion < OpenAI::BaseModel + class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute id # A unique identifier for the chat completion. # @@ -18,7 +18,7 @@ class ChatCompletion < OpenAI::BaseModel # than 1. # # @return [Array] - required :choices, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice] } + required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice] } # @!attribute created # The Unix timestamp (in seconds) of when the chat completion was created. @@ -94,9 +94,9 @@ class ChatCompletion < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Choice < OpenAI::BaseModel + class Choice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -134,7 +134,7 @@ class Choice < OpenAI::BaseModel # # # def initialize(finish_reason:, index:, logprobs:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -145,7 +145,7 @@ class Choice < OpenAI::BaseModel # # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STOP = :stop LENGTH = :length @@ -161,18 +161,22 @@ module FinishReason end # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # # @return [Array, nil] - required :content, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + nil?: true # @!attribute refusal # A list of message refusal tokens with log probability information. # # @return [Array, nil] - required :refusal, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true + required :refusal, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + nil?: true # @!parse # # Log probability information for the choice. @@ -182,7 +186,7 @@ class Logprobs < OpenAI::BaseModel # # # def initialize(content:, refusal:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end @@ -190,7 +194,7 @@ class Logprobs < OpenAI::BaseModel # # @see OpenAI::Models::Chat::ChatCompletion#service_tier module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index adf7a92f..5ac2d838 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionAssistantMessageParam < OpenAI::BaseModel + class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the messages author, in this case `assistant`. # @@ -56,7 +56,8 @@ class ChatCompletionAssistantMessageParam < OpenAI::BaseModel # The tool calls generated by the model, such as function calls. # # @return [Array, nil] - optional :tool_calls, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } + optional :tool_calls, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } # @!parse # # @return [Array] @@ -86,10 +87,10 @@ class ChatCompletionAssistantMessageParam < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio - class Audio < OpenAI::BaseModel + class Audio < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for a previous audio response from the model. # @@ -104,7 +105,7 @@ class Audio < OpenAI::BaseModel # # # def initialize(id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The contents of the assistant message. Required unless `tool_calls` or @@ -112,7 +113,7 @@ class Audio < OpenAI::BaseModel # # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The contents of the assistant message. variant String @@ -123,7 +124,7 @@ module Content # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ArrayOfContentPart - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -142,13 +143,13 @@ module ArrayOfContentPart # def self.variants; end ArrayOfContentPartArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] end # @deprecated # # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#function_call - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -173,7 +174,7 @@ class FunctionCall < OpenAI::BaseModel # # # def initialize(arguments:, name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index 2a8b5bca..e5c8bb97 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionAudio < OpenAI::BaseModel + class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for this audio response. # @@ -42,7 +42,7 @@ class ChatCompletionAudio < OpenAI::BaseModel # # # def initialize(id:, data:, expires_at:, transcript:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 9260b6ff..b1ede0ce 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionAudioParam < OpenAI::BaseModel + class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # @!attribute format_ # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. @@ -28,14 +28,14 @@ class ChatCompletionAudioParam < OpenAI::BaseModel # # # def initialize(format_:, voice:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # # @see OpenAI::Models::Chat::ChatCompletionAudioParam#format_ module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WAV = :wav MP3 = :mp3 @@ -55,7 +55,7 @@ module Format # # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice module Voice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 3e09badb..5c693f05 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionChunk < OpenAI::BaseModel + class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @!attribute id # A unique identifier for the chat completion. Each chunk has the same ID. # @@ -16,7 +16,8 @@ class ChatCompletionChunk < OpenAI::BaseModel # `stream_options: {"include_usage": true}`. # # @return [Array] - required :choices, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice] } + required :choices, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice] } # @!attribute created # The Unix timestamp (in seconds) of when the chat completion was created. Each @@ -95,9 +96,9 @@ class ChatCompletionChunk < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Choice < OpenAI::BaseModel + class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta # A chat completion delta generated by streamed model responses. # @@ -137,10 +138,10 @@ class Choice < OpenAI::BaseModel # # # def initialize(delta:, finish_reason:, index:, logprobs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta - class Delta < OpenAI::BaseModel + class Delta < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the chunk message. # @@ -178,7 +179,7 @@ class Delta < OpenAI::BaseModel # # @return [Array, nil] optional :tool_calls, - -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } # @!parse # # @return [Array] @@ -195,12 +196,12 @@ class Delta < OpenAI::BaseModel # # # def initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @deprecated # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -233,14 +234,14 @@ class FunctionCall < OpenAI::BaseModel # # # def initialize(arguments: nil, name: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The role of the author of this message. # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum DEVELOPER = :developer SYSTEM = :system @@ -255,7 +256,7 @@ module Role # def self.values; end end - class ToolCall < OpenAI::BaseModel + class ToolCall < OpenAI::Internal::Type::BaseModel # @!attribute index # # @return [Integer] @@ -298,10 +299,10 @@ class ToolCall < OpenAI::BaseModel # # # def initialize(index:, id: nil, function: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -331,14 +332,14 @@ class Function < OpenAI::BaseModel # # # def initialize(arguments: nil, name: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The type of the tool. Currently, only `function` is supported. # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FUNCTION = :function @@ -360,7 +361,7 @@ module Type # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#finish_reason module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STOP = :stop LENGTH = :length @@ -376,18 +377,22 @@ module FinishReason end # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # # @return [Array, nil] - required :content, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + nil?: true # @!attribute refusal # A list of message refusal tokens with log probability information. # # @return [Array, nil] - required :refusal, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true + required :refusal, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + nil?: true # @!parse # # Log probability information for the choice. @@ -397,7 +402,7 @@ class Logprobs < OpenAI::BaseModel # # # def initialize(content:, refusal:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end @@ -405,7 +410,7 @@ class Logprobs < OpenAI::BaseModel # # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SCALE = :scale DEFAULT = :default diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index dfa175c4..30da9605 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -6,7 +6,7 @@ module Chat # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ChatCompletionContentPart - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -22,7 +22,7 @@ module ChatCompletionContentPart # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text generation. variant :file, -> { OpenAI::Models::Chat::ChatCompletionContentPart::File } - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel # @!attribute file # # @return [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] @@ -43,10 +43,10 @@ class File < OpenAI::BaseModel # # # def initialize(file:, type: :file, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_data # The base64 encoded file data, used when passing the file to the model as a # string. @@ -85,7 +85,7 @@ class File < OpenAI::BaseModel # # # def initialize(file_data: nil, file_id: nil, filename: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 107b95af..04a9152b 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartImage < OpenAI::BaseModel + class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!attribute image_url # # @return [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] @@ -23,10 +23,10 @@ class ChatCompletionContentPartImage < OpenAI::BaseModel # # # def initialize(image_url:, type: :image_url, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. # @@ -50,14 +50,14 @@ class ImageURL < OpenAI::BaseModel # # # def initialize(url:, detail: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # # @see OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL#detail module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto LOW = :low diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 4be021ce..46b3b077 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartInputAudio < OpenAI::BaseModel + class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute input_audio # # @return [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] @@ -23,10 +23,10 @@ class ChatCompletionContentPartInputAudio < OpenAI::BaseModel # # # def initialize(input_audio:, type: :input_audio, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio - class InputAudio < OpenAI::BaseModel + class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64 encoded audio data. # @@ -47,13 +47,13 @@ class InputAudio < OpenAI::BaseModel # # # def initialize(data:, format_:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The format of the encoded audio data. Currently supports "wav" and "mp3". # # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WAV = :wav MP3 = :mp3 diff --git a/lib/openai/models/chat/chat_completion_content_part_refusal.rb b/lib/openai/models/chat/chat_completion_content_part_refusal.rb index 83e6e914..20e3bc5d 100644 --- a/lib/openai/models/chat/chat_completion_content_part_refusal.rb +++ b/lib/openai/models/chat/chat_completion_content_part_refusal.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartRefusal < OpenAI::BaseModel + class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel # @!attribute refusal # The refusal message generated by the model. # @@ -22,7 +22,7 @@ class ChatCompletionContentPartRefusal < OpenAI::BaseModel # # # def initialize(refusal:, type: :refusal, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_content_part_text.rb b/lib/openai/models/chat/chat_completion_content_part_text.rb index 1fa4cbdb..e286368a 100644 --- a/lib/openai/models/chat/chat_completion_content_part_text.rb +++ b/lib/openai/models/chat/chat_completion_content_part_text.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartText < OpenAI::BaseModel + class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text content. # @@ -25,7 +25,7 @@ class ChatCompletionContentPartText < OpenAI::BaseModel # # # def initialize(text:, type: :text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_deleted.rb b/lib/openai/models/chat/chat_completion_deleted.rb index 844e2cab..a743476a 100644 --- a/lib/openai/models/chat/chat_completion_deleted.rb +++ b/lib/openai/models/chat/chat_completion_deleted.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # @see OpenAI::Resources::Chat::Completions#delete - class ChatCompletionDeleted < OpenAI::BaseModel + class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the chat completion that was deleted. # @@ -15,7 +15,7 @@ class ChatCompletionDeleted < OpenAI::BaseModel # Whether the chat completion was deleted. # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # The type of object being deleted. @@ -30,7 +30,7 @@ class ChatCompletionDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :"chat.completion.deleted", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 6a5af05b..1fe2ecc1 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel + class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the developer message. # @@ -38,13 +38,13 @@ class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel # # # def initialize(content:, name: nil, role: :developer, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The contents of the developer message. # # @see OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The contents of the developer message. variant String @@ -57,7 +57,7 @@ module Content # def self.variants; end ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_function_call_option.rb b/lib/openai/models/chat/chat_completion_function_call_option.rb index 8cc72a51..9434599e 100644 --- a/lib/openai/models/chat/chat_completion_function_call_option.rb +++ b/lib/openai/models/chat/chat_completion_function_call_option.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionFunctionCallOption < OpenAI::BaseModel + class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. # @@ -18,7 +18,7 @@ class ChatCompletionFunctionCallOption < OpenAI::BaseModel # # # def initialize(name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_function_message_param.rb b/lib/openai/models/chat/chat_completion_function_message_param.rb index 1da70875..7e00e030 100644 --- a/lib/openai/models/chat/chat_completion_function_message_param.rb +++ b/lib/openai/models/chat/chat_completion_function_message_param.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # @deprecated - class ChatCompletionFunctionMessageParam < OpenAI::BaseModel + class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the function message. # @@ -30,7 +30,7 @@ class ChatCompletionFunctionMessageParam < OpenAI::BaseModel # # # def initialize(content:, name:, role: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index f153dbe8..9b1828e2 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionMessage < OpenAI::BaseModel + class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the message. # @@ -27,7 +27,8 @@ class ChatCompletionMessage < OpenAI::BaseModel # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # # @return [Array, nil] - optional :annotations, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] } + optional :annotations, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] } # @!parse # # @return [Array] @@ -56,7 +57,8 @@ class ChatCompletionMessage < OpenAI::BaseModel # The tool calls generated by the model, such as function calls. # # @return [Array, nil] - optional :tool_calls, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } + optional :tool_calls, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } # @!parse # # @return [Array] @@ -86,9 +88,9 @@ class ChatCompletionMessage < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Annotation < OpenAI::BaseModel + class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the URL citation. Always `url_citation`. # @@ -109,10 +111,10 @@ class Annotation < OpenAI::BaseModel # # # def initialize(url_citation:, type: :url_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. # @@ -147,14 +149,14 @@ class URLCitation < OpenAI::BaseModel # # # def initialize(end_index:, start_index:, title:, url:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end # @deprecated # # @see OpenAI::Models::Chat::ChatCompletionMessage#function_call - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -179,7 +181,7 @@ class FunctionCall < OpenAI::BaseModel # # # def initialize(arguments:, name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index d55d9826..6796ea10 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -7,7 +7,7 @@ module Chat # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. module ChatCompletionMessageParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :role diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index e114fcfe..508bc7ca 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionMessageToolCall < OpenAI::BaseModel + class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the tool call. # @@ -29,10 +29,10 @@ class ChatCompletionMessageToolCall < OpenAI::BaseModel # # # def initialize(id:, function:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -56,7 +56,7 @@ class Function < OpenAI::BaseModel # # # def initialize(arguments:, name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index f759271a..05e0e087 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat module ChatCompletionModality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text AUDIO = :audio diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index 6d8c1713..9058c083 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionNamedToolChoice < OpenAI::BaseModel + class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute function # # @return [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] @@ -24,10 +24,10 @@ class ChatCompletionNamedToolChoice < OpenAI::BaseModel # # # def initialize(function:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. # @@ -39,7 +39,7 @@ class Function < OpenAI::BaseModel # # # def initialize(name:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index 5865f161..dd3150fd 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionPredictionContent < OpenAI::BaseModel + class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be @@ -28,7 +28,7 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # # # def initialize(content:, type: :content, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be @@ -36,7 +36,7 @@ class ChatCompletionPredictionContent < OpenAI::BaseModel # # @see OpenAI::Models::Chat::ChatCompletionPredictionContent#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The content used for a Predicted Output. This is often the # text of a file you are regenerating with minor changes. @@ -50,7 +50,7 @@ module Content # def self.variants; end ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 33673904..87bd08a8 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -5,7 +5,7 @@ module Models module Chat # The role of the author of a message module ChatCompletionRole - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum DEVELOPER = :developer SYSTEM = :system diff --git a/lib/openai/models/chat/chat_completion_store_message.rb b/lib/openai/models/chat/chat_completion_store_message.rb index 6dc8cc30..3ae2370c 100644 --- a/lib/openai/models/chat/chat_completion_store_message.rb +++ b/lib/openai/models/chat/chat_completion_store_message.rb @@ -17,7 +17,7 @@ class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # # # def initialize(id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index 0d66702d..e7b99c89 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionStreamOptions < OpenAI::BaseModel + class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] include_usage # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire @@ -14,7 +14,7 @@ class ChatCompletionStreamOptions < OpenAI::BaseModel # chunk which contains the total token usage for the request. # # @return [Boolean, nil] - optional :include_usage, OpenAI::BooleanModel + optional :include_usage, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -27,7 +27,7 @@ class ChatCompletionStreamOptions < OpenAI::BaseModel # # # def initialize(include_usage: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 6426eaad..d1d9b2f7 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionSystemMessageParam < OpenAI::BaseModel + class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the system message. # @@ -38,13 +38,13 @@ class ChatCompletionSystemMessageParam < OpenAI::BaseModel # # # def initialize(content:, name: nil, role: :system, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The contents of the system message. # # @see OpenAI::Models::Chat::ChatCompletionSystemMessageParam#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The contents of the system message. variant String @@ -57,7 +57,7 @@ module Content # def self.variants; end ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index d4faf321..8b6d0019 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionTokenLogprob < OpenAI::BaseModel + class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token # The token. # @@ -17,7 +17,7 @@ class ChatCompletionTokenLogprob < OpenAI::BaseModel # representation. Can be `null` if there is no bytes representation for the token. # # @return [Array, nil] - required :bytes, OpenAI::ArrayOf[Integer], nil?: true + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer], nil?: true # @!attribute logprob # The log probability of this token, if it is within the top 20 most likely @@ -34,7 +34,7 @@ class ChatCompletionTokenLogprob < OpenAI::BaseModel # # @return [Array] required :top_logprobs, - -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } # @!parse # # @param token [String] @@ -44,9 +44,9 @@ class ChatCompletionTokenLogprob < OpenAI::BaseModel # # # def initialize(token:, bytes:, logprob:, top_logprobs:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class TopLogprob < OpenAI::BaseModel + class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token # The token. # @@ -60,7 +60,7 @@ class TopLogprob < OpenAI::BaseModel # representation. Can be `null` if there is no bytes representation for the token. # # @return [Array, nil] - required :bytes, OpenAI::ArrayOf[Integer], nil?: true + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer], nil?: true # @!attribute logprob # The log probability of this token, if it is within the top 20 most likely @@ -77,7 +77,7 @@ class TopLogprob < OpenAI::BaseModel # # # def initialize(token:, bytes:, logprob:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index f027725f..8737a603 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionTool < OpenAI::BaseModel + class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # # @return [OpenAI::Models::FunctionDefinition] @@ -21,7 +21,7 @@ class ChatCompletionTool < OpenAI::BaseModel # # # def initialize(function:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 7b6d3ff8..bb04bbc8 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -13,7 +13,7 @@ module Chat # `none` is the default when no tools are present. `auto` is the default if tools # are present. module ChatCompletionToolChoiceOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools. variant enum: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto } @@ -25,7 +25,7 @@ module ChatCompletionToolChoiceOption # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. module Auto - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE = :none AUTO = :auto diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 37d6d123..20c3a014 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionToolMessageParam < OpenAI::BaseModel + class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the tool message. # @@ -29,13 +29,13 @@ class ChatCompletionToolMessageParam < OpenAI::BaseModel # # # def initialize(content:, tool_call_id:, role: :tool, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The contents of the tool message. # # @see OpenAI::Models::Chat::ChatCompletionToolMessageParam#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The contents of the tool message. variant String @@ -48,7 +48,7 @@ module Content # def self.variants; end ChatCompletionContentPartTextArray = - OpenAI::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index eb640b44..ba2c2dbe 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionUserMessageParam < OpenAI::BaseModel + class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the user message. # @@ -37,13 +37,13 @@ class ChatCompletionUserMessageParam < OpenAI::BaseModel # # # def initialize(content:, name: nil, role: :user, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The contents of the user message. # # @see OpenAI::Models::Chat::ChatCompletionUserMessageParam#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The text contents of the message. variant String @@ -56,7 +56,7 @@ module Content # def self.variants; end ChatCompletionContentPartArray = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] end end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 6b8c53a3..792691a9 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -6,7 +6,7 @@ module Chat # @see OpenAI::Resources::Chat::Completions#create # # @see OpenAI::Resources::Chat::Completions#stream_raw - class CompletionCreateParams < OpenAI::BaseModel + class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -20,7 +20,8 @@ class CompletionCreateParams < OpenAI::BaseModel # [audio](https://platform.openai.com/docs/guides/audio). # # @return [Array] - required :messages, -> { OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionMessageParam] } + required :messages, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionMessageParam] } # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a @@ -77,7 +78,8 @@ class CompletionCreateParams < OpenAI::BaseModel # A list of functions the model may generate JSON inputs for. # # @return [Array, nil] - optional :functions, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::CompletionCreateParams::Function] } + optional :functions, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::CompletionCreateParams::Function] } # @!parse # # @return [Array] @@ -94,7 +96,7 @@ class CompletionCreateParams < OpenAI::BaseModel # or exclusive selection of the relevant token. # # @return [Hash{Symbol=>Integer}, nil] - optional :logit_bias, OpenAI::HashOf[Integer], nil?: true + optional :logit_bias, OpenAI::Internal::Type::HashOf[Integer], nil?: true # @!attribute logprobs # Whether to return log probabilities of the output tokens or not. If true, @@ -102,7 +104,7 @@ class CompletionCreateParams < OpenAI::BaseModel # `message`. # # @return [Boolean, nil] - optional :logprobs, OpenAI::BooleanModel, nil?: true + optional :logprobs, OpenAI::Internal::Type::BooleanModel, nil?: true # @!attribute max_completion_tokens # An upper bound for the number of tokens that can be generated for a completion, @@ -133,7 +135,7 @@ class CompletionCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute modalities # Output types that you would like the model to generate. Most models are capable @@ -149,7 +151,7 @@ class CompletionCreateParams < OpenAI::BaseModel # # @return [Array, nil] optional :modalities, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Chat::CompletionCreateParams::Modality] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Chat::CompletionCreateParams::Modality] }, nil?: true # @!attribute n @@ -166,7 +168,7 @@ class CompletionCreateParams < OpenAI::BaseModel # during tool use. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::BooleanModel + optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -259,7 +261,7 @@ class CompletionCreateParams < OpenAI::BaseModel # or [evals](https://platform.openai.com/docs/guides/evals) products. # # @return [Boolean, nil] - optional :store, OpenAI::BooleanModel, nil?: true + optional :store, OpenAI::Internal::Type::BooleanModel, nil?: true # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. @@ -300,7 +302,7 @@ class CompletionCreateParams < OpenAI::BaseModel # for. A max of 128 functions are supported. # # @return [Array, nil] - optional :tools, -> { OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionTool] } + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTool] } # @!parse # # @return [Array] @@ -418,7 +420,7 @@ class CompletionCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -426,7 +428,7 @@ class CompletionCreateParams < OpenAI::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -458,7 +460,7 @@ module Model # `none` is the default when no functions are present. `auto` is the default if # functions are present. module FunctionCall - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `none` means the model will not call a function and instead generates a message. `auto` means the model can pick between generating a message or calling a function. variant enum: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode } @@ -470,7 +472,7 @@ module FunctionCall # `auto` means the model can pick between generating a message or calling a # function. module FunctionCallMode - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE = :none AUTO = :auto @@ -488,7 +490,7 @@ module FunctionCallMode end # @deprecated - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. @@ -517,7 +519,7 @@ class Function < OpenAI::BaseModel # Omitting `parameters` defines a function with an empty parameter list. # # @return [Hash{Symbol=>Object}, nil] - optional :parameters, OpenAI::HashOf[OpenAI::Unknown] + optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!parse # # @return [Hash{Symbol=>Object}] @@ -530,11 +532,11 @@ class Function < OpenAI::BaseModel # # # def initialize(name:, description: nil, parameters: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end module Modality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text AUDIO = :audio @@ -557,7 +559,7 @@ module Modality # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. module ResponseFormat - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # Default response format. Used to generate text responses. variant -> { OpenAI::Models::ResponseFormatText } @@ -592,7 +594,7 @@ module ResponseFormat # When this parameter is set, the response body will include the `service_tier` # utilized. module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DEFAULT = :default @@ -607,7 +609,7 @@ module ServiceTier # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -617,10 +619,10 @@ module Stop # # @return [Array(String, Array)] # def self.variants; end - StringArray = OpenAI::ArrayOf[String] + StringArray = OpenAI::Internal::Type::ArrayOf[String] end - class WebSearchOptions < OpenAI::BaseModel + class WebSearchOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -651,14 +653,14 @@ class WebSearchOptions < OpenAI::BaseModel # # # def initialize(search_context_size: nil, user_location: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#search_context_size module SearchContextSize - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOW = :low MEDIUM = :medium @@ -672,7 +674,7 @@ module SearchContextSize end # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location - class UserLocation < OpenAI::BaseModel + class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute approximate # Approximate location parameters for the search. # @@ -694,10 +696,10 @@ class UserLocation < OpenAI::BaseModel # # # def initialize(approximate:, type: :approximate, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate - class Approximate < OpenAI::BaseModel + class Approximate < OpenAI::Internal::Type::BaseModel # @!attribute [r] city # Free text input for the city of the user, e.g. `San Francisco`. # @@ -750,7 +752,7 @@ class Approximate < OpenAI::BaseModel # # # def initialize(city: nil, country: nil, region: nil, timezone: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index 69c7c1aa..a7441968 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # @see OpenAI::Resources::Chat::Completions#delete - class CompletionDeleteParams < OpenAI::BaseModel + class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class CompletionDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index c369717e..5e2f39ec 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # @see OpenAI::Resources::Chat::Completions#list - class CompletionListParams < OpenAI::BaseModel + class CompletionListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -35,7 +35,7 @@ class CompletionListParams < OpenAI::BaseModel # `metadata[key1]=value1&metadata[key2]=value2` # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] model # The model used to generate the Chat Completions. @@ -68,12 +68,12 @@ class CompletionListParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index 42086b40..29df8685 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # @see OpenAI::Resources::Chat::Completions#retrieve - class CompletionRetrieveParams < OpenAI::BaseModel + class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class CompletionRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 87539dd0..54a9a688 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # @see OpenAI::Resources::Chat::Completions#update - class CompletionUpdateParams < OpenAI::BaseModel + class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -18,7 +18,7 @@ class CompletionUpdateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param metadata [Hash{Symbol=>String}, nil] @@ -26,7 +26,7 @@ class CompletionUpdateParams < OpenAI::BaseModel # # # def initialize(metadata:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 271056ab..18970d11 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -5,7 +5,7 @@ module Models module Chat module Completions # @see OpenAI::Resources::Chat::Completions::Messages#list - class MessageListParams < OpenAI::BaseModel + class MessageListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,12 +49,12 @@ class MessageListParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 3da0931e..a3021397 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -3,7 +3,7 @@ module OpenAI module Models module ChatModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 313ae677..2a1f6511 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ComparisonFilter < OpenAI::BaseModel + class ComparisonFilter < OpenAI::Internal::Type::BaseModel # @!attribute key # The key to compare against the value. # @@ -39,7 +39,7 @@ class ComparisonFilter < OpenAI::BaseModel # # # def initialize(key:, type:, value:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -52,7 +52,7 @@ class ComparisonFilter < OpenAI::BaseModel # # @see OpenAI::Models::ComparisonFilter#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum EQ = :eq NE = :ne @@ -73,13 +73,13 @@ module Type # # @see OpenAI::Models::ComparisonFilter#value module Value - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 791a65a3..5d7fd8b0 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -5,7 +5,7 @@ module Models # @see OpenAI::Resources::Completions#create # # @see OpenAI::Resources::Completions#stream_raw - class Completion < OpenAI::BaseModel + class Completion < OpenAI::Internal::Type::BaseModel # @!attribute id # A unique identifier for the completion. # @@ -16,7 +16,7 @@ class Completion < OpenAI::BaseModel # The list of completion choices the model generated for the input prompt. # # @return [Array] - required :choices, -> { OpenAI::ArrayOf[OpenAI::Models::CompletionChoice] } + required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::CompletionChoice] } # @!attribute created # The Unix timestamp (in seconds) of when the completion was created. @@ -73,7 +73,7 @@ class Completion < OpenAI::BaseModel # # # def initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 66f575ee..1a2084f4 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class CompletionChoice < OpenAI::BaseModel + class CompletionChoice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -35,7 +35,7 @@ class CompletionChoice < OpenAI::BaseModel # # # def initialize(finish_reason:, index:, logprobs:, text:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -44,7 +44,7 @@ class CompletionChoice < OpenAI::BaseModel # # @see OpenAI::Models::CompletionChoice#finish_reason module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STOP = :stop LENGTH = :length @@ -58,11 +58,11 @@ module FinishReason end # @see OpenAI::Models::CompletionChoice#logprobs - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute [r] text_offset # # @return [Array, nil] - optional :text_offset, OpenAI::ArrayOf[Integer] + optional :text_offset, OpenAI::Internal::Type::ArrayOf[Integer] # @!parse # # @return [Array] @@ -71,7 +71,7 @@ class Logprobs < OpenAI::BaseModel # @!attribute [r] token_logprobs # # @return [Array, nil] - optional :token_logprobs, OpenAI::ArrayOf[Float] + optional :token_logprobs, OpenAI::Internal::Type::ArrayOf[Float] # @!parse # # @return [Array] @@ -80,7 +80,7 @@ class Logprobs < OpenAI::BaseModel # @!attribute [r] tokens # # @return [Array, nil] - optional :tokens, OpenAI::ArrayOf[String] + optional :tokens, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -89,7 +89,7 @@ class Logprobs < OpenAI::BaseModel # @!attribute [r] top_logprobs # # @return [ArrayFloat}>, nil] - optional :top_logprobs, OpenAI::ArrayOf[OpenAI::HashOf[Float]] + optional :top_logprobs, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[Float]] # @!parse # # @return [ArrayFloat}>] @@ -103,7 +103,7 @@ class Logprobs < OpenAI::BaseModel # # # def initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 993c28d2..e17ce7d4 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -5,7 +5,7 @@ module Models # @see OpenAI::Resources::Completions#create # # @see OpenAI::Resources::Completions#stream_raw - class CompletionCreateParams < OpenAI::BaseModel + class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ class CompletionCreateParams < OpenAI::BaseModel # Echo back the prompt in addition to the completion # # @return [Boolean, nil] - optional :echo, OpenAI::BooleanModel, nil?: true + optional :echo, OpenAI::Internal::Type::BooleanModel, nil?: true # @!attribute frequency_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on their @@ -76,7 +76,7 @@ class CompletionCreateParams < OpenAI::BaseModel # from being generated. # # @return [Hash{Symbol=>Integer}, nil] - optional :logit_bias, OpenAI::HashOf[Integer], nil?: true + optional :logit_bias, OpenAI::Internal::Type::HashOf[Integer], nil?: true # @!attribute logprobs # Include the log probabilities on the `logprobs` most likely output tokens, as @@ -229,7 +229,7 @@ class CompletionCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -237,7 +237,7 @@ class CompletionCreateParams < OpenAI::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -267,7 +267,7 @@ module Model # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. module Prompt - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -281,17 +281,17 @@ module Prompt # # @return [Array(String, Array, Array, Array>)] # def self.variants; end - StringArray = OpenAI::ArrayOf[String] + StringArray = OpenAI::Internal::Type::ArrayOf[String] - IntegerArray = OpenAI::ArrayOf[Integer] + IntegerArray = OpenAI::Internal::Type::ArrayOf[Integer] - ArrayOfToken2DArray = OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]] + ArrayOfToken2DArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]] end # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -301,7 +301,7 @@ module Stop # # @return [Array(String, Array)] # def self.variants; end - StringArray = OpenAI::ArrayOf[String] + StringArray = OpenAI::Internal::Type::ArrayOf[String] end end end diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index 553021f3..bdcff97d 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class CompletionUsage < OpenAI::BaseModel + class CompletionUsage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of tokens in the generated completion. # @@ -61,10 +61,10 @@ class CompletionUsage < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::CompletionUsage#completion_tokens_details - class CompletionTokensDetails < OpenAI::BaseModel + class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. @@ -127,11 +127,11 @@ class CompletionTokensDetails < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::CompletionUsage#prompt_tokens_details - class PromptTokensDetails < OpenAI::BaseModel + class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] audio_tokens # Audio input tokens present in the prompt. # @@ -160,7 +160,7 @@ class PromptTokensDetails < OpenAI::BaseModel # # # def initialize(audio_tokens: nil, cached_tokens: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 0a5c34a4..fa9859e2 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -2,13 +2,13 @@ module OpenAI module Models - class CompoundFilter < OpenAI::BaseModel + class CompoundFilter < OpenAI::Internal::Type::BaseModel # @!attribute filters # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. # # @return [Array] - required :filters, -> { OpenAI::ArrayOf[union: OpenAI::Models::CompoundFilter::Filter] } + required :filters, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::CompoundFilter::Filter] } # @!attribute type # Type of operation: `and` or `or`. @@ -24,17 +24,17 @@ class CompoundFilter < OpenAI::BaseModel # # # def initialize(filters:, type:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. module Filter - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } - variant OpenAI::Unknown + variant OpenAI::Internal::Type::Unknown # @!parse # # @return [Array(OpenAI::Models::ComparisonFilter, Object)] @@ -45,7 +45,7 @@ module Filter # # @see OpenAI::Models::CompoundFilter#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AND = :and OR = :or diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index 3fbd7bc4..bde0fa74 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -3,12 +3,12 @@ module OpenAI module Models # @see OpenAI::Resources::Embeddings#create - class CreateEmbeddingResponse < OpenAI::BaseModel + class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of embeddings generated by the model. # # @return [Array] - required :data, -> { OpenAI::ArrayOf[OpenAI::Models::Embedding] } + required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Embedding] } # @!attribute model # The name of the model used to generate the embedding. @@ -36,10 +36,10 @@ class CreateEmbeddingResponse < OpenAI::BaseModel # # # def initialize(data:, model:, usage:, object: :list, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::CreateEmbeddingResponse#usage - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel # @!attribute prompt_tokens # The number of tokens used by the prompt. # @@ -60,7 +60,7 @@ class Usage < OpenAI::BaseModel # # # def initialize(prompt_tokens:, total_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index 5ac4c6c9..d71f4752 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -2,14 +2,14 @@ module OpenAI module Models - class Embedding < OpenAI::BaseModel + class Embedding < OpenAI::Internal::Type::BaseModel # @!attribute embedding # The embedding vector, which is a list of floats. The length of vector depends on # the model as listed in the # [embedding guide](https://platform.openai.com/docs/guides/embeddings). # # @return [Array] - required :embedding, OpenAI::ArrayOf[Float] + required :embedding, OpenAI::Internal::Type::ArrayOf[Float] # @!attribute index # The index of the embedding in the list of embeddings. @@ -32,7 +32,7 @@ class Embedding < OpenAI::BaseModel # # # def initialize(embedding:, index:, object: :embedding, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 12977ee8..4705cd60 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Embeddings#create - class EmbeddingCreateParams < OpenAI::BaseModel + class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -75,7 +75,7 @@ class EmbeddingCreateParams < OpenAI::BaseModel # # # def initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. @@ -86,7 +86,7 @@ class EmbeddingCreateParams < OpenAI::BaseModel # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # The string that will be turned into an embedding. variant String @@ -104,11 +104,11 @@ module Input # # @return [Array(String, Array, Array, Array>)] # def self.variants; end - StringArray = OpenAI::ArrayOf[String] + StringArray = OpenAI::Internal::Type::ArrayOf[String] - IntegerArray = OpenAI::ArrayOf[Integer] + IntegerArray = OpenAI::Internal::Type::ArrayOf[Integer] - ArrayOfToken2DArray = OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]] + ArrayOfToken2DArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]] end # ID of the model to use. You can use the @@ -117,7 +117,7 @@ module Input # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -132,7 +132,7 @@ module Model # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). module EncodingFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FLOAT = :float BASE64 = :base64 diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index e2e7df6a..33ab9d7f 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -3,7 +3,7 @@ module OpenAI module Models module EmbeddingModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT_EMBEDDING_ADA_002 = :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" diff --git a/lib/openai/models/error_object.rb b/lib/openai/models/error_object.rb index c6aaddc6..37031a55 100644 --- a/lib/openai/models/error_object.rb +++ b/lib/openai/models/error_object.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ErrorObject < OpenAI::BaseModel + class ErrorObject < OpenAI::Internal::Type::BaseModel # @!attribute code # # @return [String, nil] @@ -31,7 +31,7 @@ class ErrorObject < OpenAI::BaseModel # # # def initialize(code:, message:, param:, type:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 5cfb6edf..0c37933e 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -4,7 +4,7 @@ module OpenAI module Models # The strategy used to chunk the file. module FileChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 38682a15..6652fa42 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -5,7 +5,7 @@ module Models # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. module FileChunkingStrategyParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index dcb5cd2b..96b979af 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#content - class FileContentParams < OpenAI::BaseModel + class FileContentParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class FileContentParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 7049b873..43683821 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#create - class FileCreateParams < OpenAI::BaseModel + class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -30,7 +30,7 @@ class FileCreateParams < OpenAI::BaseModel # # # def initialize(file:, purpose:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index cba48c40..fda911ad 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#delete - class FileDeleteParams < OpenAI::BaseModel + class FileDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class FileDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/file_deleted.rb b/lib/openai/models/file_deleted.rb index c9841f7e..a391a385 100644 --- a/lib/openai/models/file_deleted.rb +++ b/lib/openai/models/file_deleted.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#delete - class FileDeleted < OpenAI::BaseModel + class FileDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -12,7 +12,7 @@ class FileDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -26,7 +26,7 @@ class FileDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :file, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 53f26749..d5a7bce4 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#list - class FileListParams < OpenAI::BaseModel + class FileListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -62,12 +62,12 @@ class FileListParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index 1b6ca589..d25bcfa7 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#create - class FileObject < OpenAI::BaseModel + class FileObject < OpenAI::Internal::Type::BaseModel # @!attribute id # The file identifier, which can be referenced in the API endpoints. # @@ -98,7 +98,7 @@ class FileObject < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` @@ -106,7 +106,7 @@ class FileObject < OpenAI::BaseModel # # @see OpenAI::Models::FileObject#purpose module Purpose - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASSISTANTS = :assistants ASSISTANTS_OUTPUT = :assistants_output @@ -130,7 +130,7 @@ module Purpose # # @see OpenAI::Models::FileObject#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum UPLOADED = :uploaded PROCESSED = :processed diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 73d23787..3b9a9976 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -7,7 +7,7 @@ module Models # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets module FilePurpose - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASSISTANTS = :assistants BATCH = :batch diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index 187535fd..ec128c3d 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#retrieve - class FileRetrieveParams < OpenAI::BaseModel + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class FileRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index cce986bc..9b72f23f 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#create - class FineTuningJob < OpenAI::BaseModel + class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute id # The object identifier, which can be referenced in the API endpoints. # @@ -69,7 +69,7 @@ class FineTuningJob < OpenAI::BaseModel # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). # # @return [Array] - required :result_files, OpenAI::ArrayOf[String] + required :result_files, OpenAI::Internal::Type::ArrayOf[String] # @!attribute seed # The seed used for the fine-tuning job. @@ -118,7 +118,7 @@ class FineTuningJob < OpenAI::BaseModel # # @return [Array, nil] optional :integrations, - -> { OpenAI::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject] }, nil?: true # @!attribute metadata @@ -130,7 +130,7 @@ class FineTuningJob < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] method_ # The method used for fine-tuning. @@ -191,10 +191,10 @@ class FineTuningJob < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::FineTuningJob#error - class Error < OpenAI::BaseModel + class Error < OpenAI::Internal::Type::BaseModel # @!attribute code # A machine-readable error code. # @@ -224,11 +224,11 @@ class Error < OpenAI::BaseModel # # # def initialize(code:, message:, param:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -273,14 +273,14 @@ class Hyperparameters < OpenAI::BaseModel # # # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#batch_size module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -296,7 +296,7 @@ module BatchSize # # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -312,7 +312,7 @@ module LearningRateMultiplier # # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#n_epochs module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -329,7 +329,7 @@ module NEpochs # # @see OpenAI::Models::FineTuning::FineTuningJob#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum VALIDATING_FILES = :validating_files QUEUED = :queued @@ -346,7 +346,7 @@ module Status end # @see OpenAI::Models::FineTuning::FineTuningJob#method_ - class Method < OpenAI::BaseModel + class Method < OpenAI::Internal::Type::BaseModel # @!attribute [r] dpo # Configuration for the DPO fine-tuning method. # @@ -386,10 +386,10 @@ class Method < OpenAI::BaseModel # # # def initialize(dpo: nil, supervised: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo - class Dpo < OpenAI::BaseModel + class Dpo < OpenAI::Internal::Type::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. # @@ -407,10 +407,10 @@ class Dpo < OpenAI::BaseModel # # # def initialize(hyperparameters: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -469,14 +469,14 @@ class Hyperparameters < OpenAI::BaseModel # # # def initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#batch_size module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -492,7 +492,7 @@ module BatchSize # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#beta module Beta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -508,7 +508,7 @@ module Beta # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -524,7 +524,7 @@ module LearningRateMultiplier # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#n_epochs module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -538,7 +538,7 @@ module NEpochs end # @see OpenAI::Models::FineTuning::FineTuningJob::Method#supervised - class Supervised < OpenAI::BaseModel + class Supervised < OpenAI::Internal::Type::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. # @@ -557,10 +557,10 @@ class Supervised < OpenAI::BaseModel # # # def initialize(hyperparameters: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -606,14 +606,14 @@ class Hyperparameters < OpenAI::BaseModel # # # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#batch_size module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -629,7 +629,7 @@ module BatchSize # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -645,7 +645,7 @@ module LearningRateMultiplier # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#n_epochs module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -662,7 +662,7 @@ module NEpochs # # @see OpenAI::Models::FineTuning::FineTuningJob::Method#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 60995c3b..a354bb5a 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list_events - class FineTuningJobEvent < OpenAI::BaseModel + class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute id # The object identifier. # @@ -39,7 +39,7 @@ class FineTuningJobEvent < OpenAI::BaseModel # The data associated with the event. # # @return [Object, nil] - optional :data, OpenAI::Unknown + optional :data, OpenAI::Internal::Type::Unknown # @!parse # # @return [Object] @@ -68,13 +68,13 @@ class FineTuningJobEvent < OpenAI::BaseModel # # # def initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The log level of the event. # # @see OpenAI::Models::FineTuning::FineTuningJobEvent#level module Level - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum INFO = :info WARN = :warn @@ -91,7 +91,7 @@ module Level # # @see OpenAI::Models::FineTuning::FineTuningJobEvent#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE = :message METRICS = :metrics diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 3d4ee69a..5b1dc8c8 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class FineTuningJobWandbIntegration < OpenAI::BaseModel + class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @!attribute project # The name of the project that the new run will be created under. # @@ -31,7 +31,7 @@ class FineTuningJobWandbIntegration < OpenAI::BaseModel # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". # # @return [Array, nil] - optional :tags, OpenAI::ArrayOf[String] + optional :tags, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -50,7 +50,7 @@ class FineTuningJobWandbIntegration < OpenAI::BaseModel # # # def initialize(project:, entity: nil, name: nil, tags: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 9d1a4377..05920c94 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel + class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the integration being enabled for the fine-tuning job # @@ -25,7 +25,7 @@ class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel # # # def initialize(wandb:, type: :wandb, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 465bc7c2..92eda537 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#cancel - class JobCancelParams < OpenAI::BaseModel + class JobCancelParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class JobCancelParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index bd2f2f88..89dcb87d 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#create - class JobCreateParams < OpenAI::BaseModel + class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -54,7 +54,7 @@ class JobCreateParams < OpenAI::BaseModel # # @return [Array, nil] optional :integrations, - -> { OpenAI::ArrayOf[OpenAI::Models::FineTuning::JobCreateParams::Integration] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::JobCreateParams::Integration] }, nil?: true # @!attribute metadata @@ -66,7 +66,7 @@ class JobCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] method_ # The method used for fine-tuning. @@ -141,12 +141,12 @@ class JobCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -173,7 +173,7 @@ module Model end # @deprecated - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -219,14 +219,14 @@ class Hyperparameters < OpenAI::BaseModel # # # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#batch_size module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -242,7 +242,7 @@ module BatchSize # # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -258,7 +258,7 @@ module LearningRateMultiplier # # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#n_epochs module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -270,7 +270,7 @@ module NEpochs end end - class Integration < OpenAI::BaseModel + class Integration < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of integration to enable. Currently, only "wandb" (Weights and Biases) # is supported. @@ -293,10 +293,10 @@ class Integration < OpenAI::BaseModel # # # def initialize(wandb:, type: :wandb, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb - class Wandb < OpenAI::BaseModel + class Wandb < OpenAI::Internal::Type::BaseModel # @!attribute project # The name of the project that the new run will be created under. # @@ -324,7 +324,7 @@ class Wandb < OpenAI::BaseModel # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". # # @return [Array, nil] - optional :tags, OpenAI::ArrayOf[String] + optional :tags, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -343,11 +343,11 @@ class Wandb < OpenAI::BaseModel # # # def initialize(project:, entity: nil, name: nil, tags: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end - class Method < OpenAI::BaseModel + class Method < OpenAI::Internal::Type::BaseModel # @!attribute [r] dpo # Configuration for the DPO fine-tuning method. # @@ -387,10 +387,10 @@ class Method < OpenAI::BaseModel # # # def initialize(dpo: nil, supervised: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo - class Dpo < OpenAI::BaseModel + class Dpo < OpenAI::Internal::Type::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. # @@ -409,10 +409,10 @@ class Dpo < OpenAI::BaseModel # # # def initialize(hyperparameters: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -471,14 +471,14 @@ class Hyperparameters < OpenAI::BaseModel # # # def initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#batch_size module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -494,7 +494,7 @@ module BatchSize # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#beta module Beta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -510,7 +510,7 @@ module Beta # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -526,7 +526,7 @@ module LearningRateMultiplier # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#n_epochs module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -540,7 +540,7 @@ module NEpochs end # @see OpenAI::Models::FineTuning::JobCreateParams::Method#supervised - class Supervised < OpenAI::BaseModel + class Supervised < OpenAI::Internal::Type::BaseModel # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. # @@ -559,10 +559,10 @@ class Supervised < OpenAI::BaseModel # # # def initialize(hyperparameters: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -608,14 +608,14 @@ class Hyperparameters < OpenAI::BaseModel # # # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#batch_size module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -631,7 +631,7 @@ module BatchSize # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -647,7 +647,7 @@ module LearningRateMultiplier # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#n_epochs module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :auto @@ -664,7 +664,7 @@ module NEpochs # # @see OpenAI::Models::FineTuning::JobCreateParams::Method#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SUPERVISED = :supervised DPO = :dpo diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index 0e9abe08..d69f7e6e 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list_events - class JobListEventsParams < OpenAI::BaseModel + class JobListEventsParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -36,7 +36,7 @@ class JobListEventsParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index 4922697f..e0ffb31f 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list - class JobListParams < OpenAI::BaseModel + class JobListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -34,7 +34,7 @@ class JobListParams < OpenAI::BaseModel # Alternatively, set `metadata=null` to indicate no metadata. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!parse # # @param after [String] @@ -44,7 +44,7 @@ class JobListParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, metadata: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index 17337029..3fa511ac 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#retrieve - class JobRetrieveParams < OpenAI::BaseModel + class JobRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class JobRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index 7a7b3c86..fc91cb4e 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -5,7 +5,7 @@ module Models module FineTuning module Jobs # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list - class CheckpointListParams < OpenAI::BaseModel + class CheckpointListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -37,7 +37,7 @@ class CheckpointListParams < OpenAI::BaseModel # # # def initialize(after: nil, limit: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 75874793..0af58cbe 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -5,7 +5,7 @@ module Models module FineTuning module Jobs # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list - class FineTuningJobCheckpoint < OpenAI::BaseModel + class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @!attribute id # The checkpoint identifier, which can be referenced in the API endpoints. # @@ -73,10 +73,10 @@ class FineTuningJobCheckpoint < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics - class Metrics < OpenAI::BaseModel + class Metrics < OpenAI::Internal::Type::BaseModel # @!attribute [r] full_valid_loss # # @return [Float, nil] @@ -164,7 +164,7 @@ class Metrics < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 8d805ee6..c261b034 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class FunctionDefinition < OpenAI::BaseModel + class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. @@ -31,7 +31,7 @@ class FunctionDefinition < OpenAI::BaseModel # Omitting `parameters` defines a function with an empty parameter list. # # @return [Hash{Symbol=>Object}, nil] - optional :parameters, OpenAI::HashOf[OpenAI::Unknown] + optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!parse # # @return [Hash{Symbol=>Object}] @@ -45,7 +45,7 @@ class FunctionDefinition < OpenAI::BaseModel # [function calling guide](docs/guides/function-calling). # # @return [Boolean, nil] - optional :strict, OpenAI::BooleanModel, nil?: true + optional :strict, OpenAI::Internal::Type::BooleanModel, nil?: true # @!parse # # @param name [String] @@ -55,7 +55,7 @@ class FunctionDefinition < OpenAI::BaseModel # # # def initialize(name:, description: nil, parameters: nil, strict: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/function_parameters.rb b/lib/openai/models/function_parameters.rb index a443f9b9..c32b337a 100644 --- a/lib/openai/models/function_parameters.rb +++ b/lib/openai/models/function_parameters.rb @@ -2,6 +2,6 @@ module OpenAI module Models - FunctionParameters = OpenAI::HashOf[OpenAI::Unknown] + FunctionParameters = OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] end end diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index c8ad72d4..e3842efd 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # @!attribute [r] b64_json # The base64-encoded JSON of the generated image, if `response_format` is # `b64_json`. @@ -44,7 +44,7 @@ class Image < OpenAI::BaseModel # # # def initialize(b64_json: nil, revised_prompt: nil, url: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index fdc94a1e..06bd032f 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#create_variation - class ImageCreateVariationParams < OpenAI::BaseModel + class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -69,12 +69,12 @@ class ImageCreateVariationParams < OpenAI::BaseModel # # # def initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The model to use for image generation. Only `dall-e-2` is supported at this # time. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -90,7 +90,7 @@ module Model # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum URL = :url B64_JSON = :b64_json @@ -105,7 +105,7 @@ module ResponseFormat # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SIZE_256X256 = :"256x256" SIZE_512X512 = :"512x512" diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index f14ec178..43a5120c 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#edit - class ImageEditParams < OpenAI::BaseModel + class ImageEditParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -100,12 +100,12 @@ class ImageEditParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The model to use for image generation. Only `dall-e-2` is supported at this # time. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -121,7 +121,7 @@ module Model # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum URL = :url B64_JSON = :b64_json @@ -136,7 +136,7 @@ module ResponseFormat # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SIZE_256X256 = :"256x256" SIZE_512X512 = :"512x512" diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 38a73865..30a2eaa3 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#generate - class ImageGenerateParams < OpenAI::BaseModel + class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -103,11 +103,11 @@ class ImageGenerateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The model to use for image generation. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -123,7 +123,7 @@ module Model # details and greater consistency across the image. This param is only supported # for `dall-e-3`. module Quality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STANDARD = :standard HD = :hd @@ -139,7 +139,7 @@ module Quality # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum URL = :url B64_JSON = :b64_json @@ -155,7 +155,7 @@ module ResponseFormat # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SIZE_256X256 = :"256x256" SIZE_512X512 = :"512x512" @@ -175,7 +175,7 @@ module Size # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. module Style - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum VIVID = :vivid NATURAL = :natural diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index 5f3d315c..83fd2f56 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -3,7 +3,7 @@ module OpenAI module Models module ImageModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index fb015bb2..108b9120 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#create_variation - class ImagesResponse < OpenAI::BaseModel + class ImagesResponse < OpenAI::Internal::Type::BaseModel # @!attribute created # # @return [Integer] @@ -12,7 +12,7 @@ class ImagesResponse < OpenAI::BaseModel # @!attribute data # # @return [Array] - required :data, -> { OpenAI::ArrayOf[OpenAI::Models::Image] } + required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image] } # @!parse # # @param created [Integer] @@ -20,7 +20,7 @@ class ImagesResponse < OpenAI::BaseModel # # # def initialize(created:, data:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/metadata.rb b/lib/openai/models/metadata.rb index fea222e0..c6a6935d 100644 --- a/lib/openai/models/metadata.rb +++ b/lib/openai/models/metadata.rb @@ -2,6 +2,6 @@ module OpenAI module Models - Metadata = OpenAI::HashOf[String] + Metadata = OpenAI::Internal::Type::HashOf[String] end end diff --git a/lib/openai/models/model.rb b/lib/openai/models/model.rb index 3e17e2d3..a3d362fb 100644 --- a/lib/openai/models/model.rb +++ b/lib/openai/models/model.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#retrieve - class Model < OpenAI::BaseModel + class Model < OpenAI::Internal::Type::BaseModel # @!attribute id # The model identifier, which can be referenced in the API endpoints. # @@ -38,7 +38,7 @@ class Model < OpenAI::BaseModel # # # def initialize(id:, created:, owned_by:, object: :model, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index 7a3e61da..3f4036d0 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#delete - class ModelDeleteParams < OpenAI::BaseModel + class ModelDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class ModelDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/model_deleted.rb b/lib/openai/models/model_deleted.rb index f269390a..b35f6781 100644 --- a/lib/openai/models/model_deleted.rb +++ b/lib/openai/models/model_deleted.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#delete - class ModelDeleted < OpenAI::BaseModel + class ModelDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -12,7 +12,7 @@ class ModelDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -26,7 +26,7 @@ class ModelDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index eb621c9b..c7c05067 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#list - class ModelListParams < OpenAI::BaseModel + class ModelListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class ModelListParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index 330c276b..b1384fc8 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#retrieve - class ModelRetrieveParams < OpenAI::BaseModel + class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class ModelRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 9d8bf7a0..88fd634b 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class Moderation < OpenAI::BaseModel + class Moderation < OpenAI::Internal::Type::BaseModel # @!attribute categories # A list of the categories, and whether they are flagged or not. # @@ -25,7 +25,7 @@ class Moderation < OpenAI::BaseModel # Whether any of the below categories are flagged. # # @return [Boolean] - required :flagged, OpenAI::BooleanModel + required :flagged, OpenAI::Internal::Type::BooleanModel # @!parse # # @param categories [OpenAI::Models::Moderation::Categories] @@ -35,23 +35,25 @@ class Moderation < OpenAI::BaseModel # # # def initialize(categories:, category_applied_input_types:, category_scores:, flagged:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Moderation#categories - class Categories < OpenAI::BaseModel + class Categories < OpenAI::Internal::Type::BaseModel # @!attribute harassment # Content that expresses, incites, or promotes harassing language towards any # target. # # @return [Boolean] - required :harassment, OpenAI::BooleanModel + required :harassment, OpenAI::Internal::Type::BooleanModel # @!attribute harassment_threatening # Harassment content that also includes violence or serious harm towards any # target. # # @return [Boolean] - required :harassment_threatening, OpenAI::BooleanModel, api_name: :"harassment/threatening" + required :harassment_threatening, + OpenAI::Internal::Type::BooleanModel, + api_name: :"harassment/threatening" # @!attribute hate # Content that expresses, incites, or promotes hate based on race, gender, @@ -60,7 +62,7 @@ class Categories < OpenAI::BaseModel # harassment. # # @return [Boolean] - required :hate, OpenAI::BooleanModel + required :hate, OpenAI::Internal::Type::BooleanModel # @!attribute hate_threatening # Hateful content that also includes violence or serious harm towards the targeted @@ -68,7 +70,7 @@ class Categories < OpenAI::BaseModel # orientation, disability status, or caste. # # @return [Boolean] - required :hate_threatening, OpenAI::BooleanModel, api_name: :"hate/threatening" + required :hate_threatening, OpenAI::Internal::Type::BooleanModel, api_name: :"hate/threatening" # @!attribute illicit # Content that includes instructions or advice that facilitate the planning or @@ -76,7 +78,7 @@ class Categories < OpenAI::BaseModel # illicit acts. For example, "how to shoplift" would fit this category. # # @return [Boolean, nil] - required :illicit, OpenAI::BooleanModel, nil?: true + required :illicit, OpenAI::Internal::Type::BooleanModel, nil?: true # @!attribute illicit_violent # Content that includes instructions or advice that facilitate the planning or @@ -84,14 +86,17 @@ class Categories < OpenAI::BaseModel # instruction on the procurement of any weapon. # # @return [Boolean, nil] - required :illicit_violent, OpenAI::BooleanModel, api_name: :"illicit/violent", nil?: true + required :illicit_violent, + OpenAI::Internal::Type::BooleanModel, + api_name: :"illicit/violent", + nil?: true # @!attribute self_harm # Content that promotes, encourages, or depicts acts of self-harm, such as # suicide, cutting, and eating disorders. # # @return [Boolean] - required :self_harm, OpenAI::BooleanModel, api_name: :"self-harm" + required :self_harm, OpenAI::Internal::Type::BooleanModel, api_name: :"self-harm" # @!attribute self_harm_instructions # Content that encourages performing acts of self-harm, such as suicide, cutting, @@ -99,14 +104,16 @@ class Categories < OpenAI::BaseModel # acts. # # @return [Boolean] - required :self_harm_instructions, OpenAI::BooleanModel, api_name: :"self-harm/instructions" + required :self_harm_instructions, + OpenAI::Internal::Type::BooleanModel, + api_name: :"self-harm/instructions" # @!attribute self_harm_intent # Content where the speaker expresses that they are engaging or intend to engage # in acts of self-harm, such as suicide, cutting, and eating disorders. # # @return [Boolean] - required :self_harm_intent, OpenAI::BooleanModel, api_name: :"self-harm/intent" + required :self_harm_intent, OpenAI::Internal::Type::BooleanModel, api_name: :"self-harm/intent" # @!attribute sexual # Content meant to arouse sexual excitement, such as the description of sexual @@ -114,25 +121,25 @@ class Categories < OpenAI::BaseModel # wellness). # # @return [Boolean] - required :sexual, OpenAI::BooleanModel + required :sexual, OpenAI::Internal::Type::BooleanModel # @!attribute sexual_minors # Sexual content that includes an individual who is under 18 years old. # # @return [Boolean] - required :sexual_minors, OpenAI::BooleanModel, api_name: :"sexual/minors" + required :sexual_minors, OpenAI::Internal::Type::BooleanModel, api_name: :"sexual/minors" # @!attribute violence # Content that depicts death, violence, or physical injury. # # @return [Boolean] - required :violence, OpenAI::BooleanModel + required :violence, OpenAI::Internal::Type::BooleanModel # @!attribute violence_graphic # Content that depicts death, violence, or physical injury in graphic detail. # # @return [Boolean] - required :violence_graphic, OpenAI::BooleanModel, api_name: :"violence/graphic" + required :violence_graphic, OpenAI::Internal::Type::BooleanModel, api_name: :"violence/graphic" # @!parse # # A list of the categories, and whether they are flagged or not. @@ -170,38 +177,39 @@ class Categories < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Moderation#category_applied_input_types - class CategoryAppliedInputTypes < OpenAI::BaseModel + class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The applied input type(s) for the category 'harassment'. # # @return [Array] required :harassment, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment] } # @!attribute harassment_threatening # The applied input type(s) for the category 'harassment/threatening'. # # @return [Array] required :harassment_threatening, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening] }, api_name: :"harassment/threatening" # @!attribute hate # The applied input type(s) for the category 'hate'. # # @return [Array] - required :hate, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate] } + required :hate, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate] } # @!attribute hate_threatening # The applied input type(s) for the category 'hate/threatening'. # # @return [Array] required :hate_threatening, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening] }, api_name: :"hate/threatening" # @!attribute illicit @@ -209,14 +217,14 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # # @return [Array] required :illicit, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit] } # @!attribute illicit_violent # The applied input type(s) for the category 'illicit/violent'. # # @return [Array] required :illicit_violent, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent] }, api_name: :"illicit/violent" # @!attribute self_harm @@ -224,7 +232,7 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # # @return [Array] required :self_harm, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm] }, api_name: :"self-harm" # @!attribute self_harm_instructions @@ -232,7 +240,7 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # # @return [Array] required :self_harm_instructions, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction] }, api_name: :"self-harm/instructions" # @!attribute self_harm_intent @@ -240,7 +248,7 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # # @return [Array] required :self_harm_intent, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent] }, api_name: :"self-harm/intent" # @!attribute sexual @@ -248,14 +256,14 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # # @return [Array] required :sexual, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual] } # @!attribute sexual_minors # The applied input type(s) for the category 'sexual/minors'. # # @return [Array] required :sexual_minors, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor] }, api_name: :"sexual/minors" # @!attribute violence @@ -263,14 +271,14 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # # @return [Array] required :violence, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence] } # @!attribute violence_graphic # The applied input type(s) for the category 'violence/graphic'. # # @return [Array] required :violence_graphic, - -> { OpenAI::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] }, api_name: :"violence/graphic" # @!parse @@ -309,10 +317,10 @@ class CategoryAppliedInputTypes < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Harassment - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -324,7 +332,7 @@ module Harassment end module HarassmentThreatening - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -336,7 +344,7 @@ module HarassmentThreatening end module Hate - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -348,7 +356,7 @@ module Hate end module HateThreatening - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -360,7 +368,7 @@ module HateThreatening end module Illicit - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -372,7 +380,7 @@ module Illicit end module IllicitViolent - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -384,7 +392,7 @@ module IllicitViolent end module SelfHarm - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text IMAGE = :image @@ -397,7 +405,7 @@ module SelfHarm end module SelfHarmInstruction - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text IMAGE = :image @@ -410,7 +418,7 @@ module SelfHarmInstruction end module SelfHarmIntent - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text IMAGE = :image @@ -423,7 +431,7 @@ module SelfHarmIntent end module Sexual - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text IMAGE = :image @@ -436,7 +444,7 @@ module Sexual end module SexualMinor - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text @@ -448,7 +456,7 @@ module SexualMinor end module Violence - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text IMAGE = :image @@ -461,7 +469,7 @@ module Violence end module ViolenceGraphic - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text IMAGE = :image @@ -475,7 +483,7 @@ module ViolenceGraphic end # @see OpenAI::Models::Moderation#category_scores - class CategoryScores < OpenAI::BaseModel + class CategoryScores < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The score for the category 'harassment'. # @@ -590,7 +598,7 @@ class CategoryScores < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 95492506..c5e3ff4a 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Moderations#create - class ModerationCreateParams < OpenAI::BaseModel + class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -35,12 +35,12 @@ class ModerationCreateParams < OpenAI::BaseModel # # # def initialize(input:, model: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A string of text to classify for moderation. variant String @@ -55,9 +55,10 @@ module Input # # @return [Array(String, Array, Array)] # def self.variants; end - StringArray = OpenAI::ArrayOf[String] + StringArray = OpenAI::Internal::Type::ArrayOf[String] - ModerationMultiModalInputArray = OpenAI::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] + ModerationMultiModalInputArray = + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] end # The content moderation model you would like to use. Learn more in @@ -65,7 +66,7 @@ module Input # learn about available models # [here](https://platform.openai.com/docs/models#moderation). module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index c06f6c57..922bca1e 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Moderations#create - class ModerationCreateResponse < OpenAI::BaseModel + class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique identifier for the moderation request. # @@ -20,7 +20,7 @@ class ModerationCreateResponse < OpenAI::BaseModel # A list of moderation objects. # # @return [Array] - required :results, -> { OpenAI::ArrayOf[OpenAI::Models::Moderation] } + required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Moderation] } # @!parse # # Represents if a given text input is potentially harmful. @@ -31,7 +31,7 @@ class ModerationCreateResponse < OpenAI::BaseModel # # # def initialize(id:, model:, results:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index c02f0627..25b0835a 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ModerationImageURLInput < OpenAI::BaseModel + class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!attribute image_url # Contains either an image URL or a data URL for a base64 encoded image. # @@ -23,10 +23,10 @@ class ModerationImageURLInput < OpenAI::BaseModel # # # def initialize(image_url:, type: :image_url, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::ModerationImageURLInput#image_url - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. # @@ -40,7 +40,7 @@ class ImageURL < OpenAI::BaseModel # # # def initialize(url:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index 8d50271d..fa606daa 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -3,7 +3,7 @@ module OpenAI module Models module ModerationModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum OMNI_MODERATION_LATEST = :"omni-moderation-latest" OMNI_MODERATION_2024_09_26 = :"omni-moderation-2024-09-26" diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index 732739a6..ca2979e7 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -4,7 +4,7 @@ module OpenAI module Models # An object describing an image to classify. module ModerationMultiModalInput - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/moderation_text_input.rb b/lib/openai/models/moderation_text_input.rb index fbc192e8..ec5fe7cb 100644 --- a/lib/openai/models/moderation_text_input.rb +++ b/lib/openai/models/moderation_text_input.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ModerationTextInput < OpenAI::BaseModel + class ModerationTextInput < OpenAI::Internal::Type::BaseModel # @!attribute text # A string of text to classify. # @@ -23,7 +23,7 @@ class ModerationTextInput < OpenAI::BaseModel # # # def initialize(text:, type: :text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/other_file_chunking_strategy_object.rb b/lib/openai/models/other_file_chunking_strategy_object.rb index 0f7c6224..9b28e285 100644 --- a/lib/openai/models/other_file_chunking_strategy_object.rb +++ b/lib/openai/models/other_file_chunking_strategy_object.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class OtherFileChunkingStrategyObject < OpenAI::BaseModel + class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!attribute type # Always `other`. # @@ -18,7 +18,7 @@ class OtherFileChunkingStrategyObject < OpenAI::BaseModel # # # def initialize(type: :other, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index af30c4e0..0727d5c1 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class Reasoning < OpenAI::BaseModel + class Reasoning < OpenAI::Internal::Type::BaseModel # @!attribute effort # **o-series models only** # @@ -35,7 +35,7 @@ class Reasoning < OpenAI::BaseModel # # # def initialize(effort: nil, generate_summary: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # **computer_use_preview only** # @@ -45,7 +45,7 @@ class Reasoning < OpenAI::BaseModel # # @see OpenAI::Models::Reasoning#generate_summary module GenerateSummary - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum CONCISE = :concise DETAILED = :detailed diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index f123316a..737f9d5f 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -9,7 +9,7 @@ module Models # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. module ReasoningEffort - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOW = :low MEDIUM = :medium diff --git a/lib/openai/models/response_format_json_object.rb b/lib/openai/models/response_format_json_object.rb index 2996332d..bcd20439 100644 --- a/lib/openai/models/response_format_json_object.rb +++ b/lib/openai/models/response_format_json_object.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ResponseFormatJSONObject < OpenAI::BaseModel + class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of response format being defined. Always `json_object`. # @@ -18,7 +18,7 @@ class ResponseFormatJSONObject < OpenAI::BaseModel # # # def initialize(type: :json_object, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 8dac86a9..71050de4 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ResponseFormatJSONSchema < OpenAI::BaseModel + class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute json_schema # Structured Outputs configuration options, including a JSON Schema. # @@ -25,10 +25,10 @@ class ResponseFormatJSONSchema < OpenAI::BaseModel # # # def initialize(json_schema:, type: :json_schema, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema - class JSONSchema < OpenAI::BaseModel + class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. @@ -52,7 +52,7 @@ class JSONSchema < OpenAI::BaseModel # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}, nil] - optional :schema, OpenAI::HashOf[OpenAI::Unknown] + optional :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!parse # # @return [Hash{Symbol=>Object}] @@ -66,7 +66,7 @@ class JSONSchema < OpenAI::BaseModel # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # # @return [Boolean, nil] - optional :strict, OpenAI::BooleanModel, nil?: true + optional :strict, OpenAI::Internal::Type::BooleanModel, nil?: true # @!parse # # Structured Outputs configuration options, including a JSON Schema. @@ -78,7 +78,7 @@ class JSONSchema < OpenAI::BaseModel # # # def initialize(name:, description: nil, schema: nil, strict: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/response_format_text.rb b/lib/openai/models/response_format_text.rb index 3821c9a0..609679d6 100644 --- a/lib/openai/models/response_format_text.rb +++ b/lib/openai/models/response_format_text.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class ResponseFormatText < OpenAI::BaseModel + class ResponseFormatText < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of response format being defined. Always `text`. # @@ -16,7 +16,7 @@ class ResponseFormatText < OpenAI::BaseModel # # # def initialize(type: :text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 57d62fd4..dcbbba28 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ComputerTool < OpenAI::BaseModel + class ComputerTool < OpenAI::Internal::Type::BaseModel # @!attribute display_height # The height of the computer display. # @@ -39,13 +39,13 @@ class ComputerTool < OpenAI::BaseModel # # # def initialize(display_height:, display_width:, environment:, type: :computer_use_preview, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of computer environment to control. # # @see OpenAI::Models::Responses::ComputerTool#environment module Environment - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MAC = :mac WINDOWS = :windows diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 715855e0..d7c78f66 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class EasyInputMessage < OpenAI::BaseModel + class EasyInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. @@ -41,14 +41,14 @@ class EasyInputMessage < OpenAI::BaseModel # # # def initialize(content:, role:, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # # @see OpenAI::Models::Responses::EasyInputMessage#content module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A text input to the model. variant String @@ -67,7 +67,7 @@ module Content # # @see OpenAI::Models::Responses::EasyInputMessage#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user ASSISTANT = :assistant @@ -85,7 +85,7 @@ module Role # # @see OpenAI::Models::Responses::EasyInputMessage#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE = :message diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 18833fef..62d497ad 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class FileSearchTool < OpenAI::BaseModel + class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the file search tool. Always `file_search`. # @@ -14,7 +14,7 @@ class FileSearchTool < OpenAI::BaseModel # The IDs of the vector stores to search. # # @return [Array] - required :vector_store_ids, OpenAI::ArrayOf[String] + required :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!attribute [r] filters # A filter to apply based on file attributes. @@ -60,13 +60,13 @@ class FileSearchTool < OpenAI::BaseModel # # # def initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A filter to apply based on file attributes. # # @see OpenAI::Models::Responses::FileSearchTool#filters module Filters - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -80,7 +80,7 @@ module Filters end # @see OpenAI::Models::Responses::FileSearchTool#ranking_options - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] ranker # The ranker to use for the file search. # @@ -111,13 +111,13 @@ class RankingOptions < OpenAI::BaseModel # # # def initialize(ranker: nil, score_threshold: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ranker to use for the file search. # # @see OpenAI::Models::Responses::FileSearchTool::RankingOptions#ranker module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index 5e1906c6..ce5b34a5 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class FunctionTool < OpenAI::BaseModel + class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. # @@ -14,13 +14,13 @@ class FunctionTool < OpenAI::BaseModel # A JSON schema object describing the parameters of the function. # # @return [Hash{Symbol=>Object}] - required :parameters, OpenAI::HashOf[OpenAI::Unknown] + required :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!attribute strict # Whether to enforce strict parameter validation. Default `true`. # # @return [Boolean] - required :strict, OpenAI::BooleanModel + required :strict, OpenAI::Internal::Type::BooleanModel # @!attribute type # The type of the function tool. Always `function`. @@ -48,7 +48,7 @@ class FunctionTool < OpenAI::BaseModel # # # def initialize(name:, parameters:, strict:, description: nil, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index b807321f..a6a7a77f 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # @see OpenAI::Resources::Responses::InputItems#list - class InputItemListParams < OpenAI::BaseModel + class InputItemListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -34,7 +34,8 @@ class InputItemListParams < OpenAI::BaseModel # Response creation above for more information. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } # @!parse # # @return [Array] @@ -74,14 +75,14 @@ class InputItemListParams < OpenAI::BaseModel # # # def initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index b6e167a0..afcc7eec 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -6,7 +6,7 @@ module Responses # @see OpenAI::Resources::Responses#create # # @see OpenAI::Resources::Responses#stream_raw - class Response < OpenAI::BaseModel + class Response < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for this Response. # @@ -51,7 +51,7 @@ class Response < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a @@ -79,13 +79,14 @@ class Response < OpenAI::BaseModel # consider using the `output_text` property where supported in SDKs. # # @return [Array] - required :output, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem] } + required :output, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem] } # @!attribute parallel_tool_calls # Whether to allow the model to run tool calls in parallel. # # @return [Boolean] - required :parallel_tool_calls, OpenAI::BooleanModel + required :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -121,7 +122,7 @@ class Response < OpenAI::BaseModel # [function calling](https://platform.openai.com/docs/guides/function-calling). # # @return [Array] - required :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::Tool] } + required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -270,10 +271,10 @@ class Response < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Responses::Response#incomplete_details - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] reason # The reason why the response is incomplete. # @@ -291,13 +292,13 @@ class IncompleteDetails < OpenAI::BaseModel # # # def initialize(reason: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason why the response is incomplete. # # @see OpenAI::Models::Responses::Response::IncompleteDetails#reason module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter @@ -316,7 +317,7 @@ module Reason # # @see OpenAI::Models::Responses::Response#tool_choice module ToolChoice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # Controls which (if any) tool is called by the model. # @@ -350,7 +351,7 @@ module ToolChoice # # @see OpenAI::Models::Responses::Response#truncation module Truncation - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index 8c690aef..72fd5781 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioDeltaEvent < OpenAI::BaseModel + class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # A chunk of Base64 encoded response audio bytes. # @@ -24,7 +24,7 @@ class ResponseAudioDeltaEvent < OpenAI::BaseModel # # # def initialize(delta:, type: :"response.audio.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index e0f3632f..6dd92e68 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioDoneEvent < OpenAI::BaseModel + class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the event. Always `response.audio.done`. # @@ -17,7 +17,7 @@ class ResponseAudioDoneEvent < OpenAI::BaseModel # # # def initialize(type: :"response.audio.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 476cb265..062c5a76 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioTranscriptDeltaEvent < OpenAI::BaseModel + class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The partial transcript of the audio response. # @@ -24,7 +24,7 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::BaseModel # # # def initialize(delta:, type: :"response.audio.transcript.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 89ee6d86..98399c6e 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel + class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the event. Always `response.audio.transcript.done`. # @@ -17,7 +17,7 @@ class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel # # # def initialize(type: :"response.audio.transcript.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 3ec0bcba..2fa390fe 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The partial code snippet added by the code interpreter. # @@ -31,7 +31,7 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::BaseModel # # # def initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 19de0973..caa291b9 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute code # The final code snippet output by the code interpreter. # @@ -31,7 +31,7 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::BaseModel # # # def initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index 3843f408..0e13a5e9 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallCompletedEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter_call # A tool call to run code. # @@ -31,7 +31,7 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::BaseModel # # # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 549d7eba..66aa4fce 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallInProgressEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter_call # A tool call to run code. # @@ -31,7 +31,7 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::BaseModel # # # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index bf8bbe13..7a87f2c2 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter_call # A tool call to run code. # @@ -31,7 +31,7 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::BaseModel # # # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 5ce7e550..9d7260d9 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterToolCall < OpenAI::BaseModel + class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the code interpreter tool call. # @@ -21,7 +21,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::BaseModel # # @return [Array] required :results, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result] } # @!attribute status # The status of the code interpreter tool call. @@ -46,11 +46,11 @@ class ResponseCodeInterpreterToolCall < OpenAI::BaseModel # # # def initialize(id:, code:, results:, status:, type: :code_interpreter_call, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The output of a code interpreter tool call that is text. module Result - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -60,7 +60,7 @@ module Result # The output of a code interpreter tool call that is a file. variant :files, -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files } - class Logs < OpenAI::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel # @!attribute logs # The logs of the code interpreter tool call. # @@ -81,15 +81,15 @@ class Logs < OpenAI::BaseModel # # # def initialize(logs:, type: :logs, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Files < OpenAI::BaseModel + class Files < OpenAI::Internal::Type::BaseModel # @!attribute files # # @return [Array] required :files, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] } # @!attribute type # The type of the code interpreter file output. Always `files`. @@ -105,9 +105,9 @@ class Files < OpenAI::BaseModel # # # def initialize(files:, type: :files, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file. # @@ -126,7 +126,7 @@ class File < OpenAI::BaseModel # # # def initialize(file_id:, mime_type:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end @@ -139,7 +139,7 @@ class File < OpenAI::BaseModel # # @see OpenAI::Models::Responses::ResponseCodeInterpreterToolCall#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress INTERPRETING = :interpreting diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index 8328ff58..883a7de9 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCompletedEvent < OpenAI::BaseModel + class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # Properties of the completed response. # @@ -24,7 +24,7 @@ class ResponseCompletedEvent < OpenAI::BaseModel # # # def initialize(response:, type: :"response.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 207a2584..7eafa499 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseComputerToolCall < OpenAI::BaseModel + class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the computer call. # @@ -27,7 +27,7 @@ class ResponseComputerToolCall < OpenAI::BaseModel # # @return [Array] required :pending_safety_checks, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck] } # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -56,13 +56,13 @@ class ResponseComputerToolCall < OpenAI::BaseModel # # # def initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A click action. # # @see OpenAI::Models::Responses::ResponseComputerToolCall#action module Action - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -93,7 +93,7 @@ module Action # A wait action. variant :wait, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait } - class Click < OpenAI::BaseModel + class Click < OpenAI::Internal::Type::BaseModel # @!attribute button # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -130,14 +130,14 @@ class Click < OpenAI::BaseModel # # # def initialize(button:, x:, y_:, type: :click, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # # @see OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click#button module Button - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LEFT = :left RIGHT = :right @@ -153,7 +153,7 @@ module Button end end - class DoubleClick < OpenAI::BaseModel + class DoubleClick < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a double click action, this property is always set # to `double_click`. @@ -182,10 +182,10 @@ class DoubleClick < OpenAI::BaseModel # # # def initialize(x:, y_:, type: :double_click, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Drag < OpenAI::BaseModel + class Drag < OpenAI::Internal::Type::BaseModel # @!attribute path # An array of coordinates representing the path of the drag action. Coordinates # will appear as an array of objects, eg @@ -199,7 +199,7 @@ class Drag < OpenAI::BaseModel # # @return [Array] required :path, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path] } # @!attribute type # Specifies the event type. For a drag action, this property is always set to @@ -216,9 +216,9 @@ class Drag < OpenAI::BaseModel # # # def initialize(path:, type: :drag, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Path < OpenAI::BaseModel + class Path < OpenAI::Internal::Type::BaseModel # @!attribute x # The x-coordinate. # @@ -239,17 +239,17 @@ class Path < OpenAI::BaseModel # # # def initialize(x:, y_:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end - class Keypress < OpenAI::BaseModel + class Keypress < OpenAI::Internal::Type::BaseModel # @!attribute keys # The combination of keys the model is requesting to be pressed. This is an array # of strings, each representing a key. # # @return [Array] - required :keys, OpenAI::ArrayOf[String] + required :keys, OpenAI::Internal::Type::ArrayOf[String] # @!attribute type # Specifies the event type. For a keypress action, this property is always set to @@ -266,10 +266,10 @@ class Keypress < OpenAI::BaseModel # # # def initialize(keys:, type: :keypress, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Move < OpenAI::BaseModel + class Move < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a move action, this property is always set to # `move`. @@ -298,10 +298,10 @@ class Move < OpenAI::BaseModel # # # def initialize(x:, y_:, type: :move, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Screenshot < OpenAI::BaseModel + class Screenshot < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a screenshot action, this property is always set # to `screenshot`. @@ -316,10 +316,10 @@ class Screenshot < OpenAI::BaseModel # # # def initialize(type: :screenshot, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Scroll < OpenAI::BaseModel + class Scroll < OpenAI::Internal::Type::BaseModel # @!attribute scroll_x # The horizontal scroll distance. # @@ -362,10 +362,10 @@ class Scroll < OpenAI::BaseModel # # # def initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Type < OpenAI::BaseModel + class Type < OpenAI::Internal::Type::BaseModel # @!attribute text # The text to type. # @@ -387,10 +387,10 @@ class Type < OpenAI::BaseModel # # # def initialize(text:, type: :type, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class Wait < OpenAI::BaseModel + class Wait < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a wait action, this property is always set to # `wait`. @@ -405,7 +405,7 @@ class Wait < OpenAI::BaseModel # # # def initialize(type: :wait, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse @@ -413,7 +413,7 @@ class Wait < OpenAI::BaseModel # def self.variants; end end - class PendingSafetyCheck < OpenAI::BaseModel + class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the pending safety check. # @@ -441,7 +441,7 @@ class PendingSafetyCheck < OpenAI::BaseModel # # # def initialize(id:, code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -449,7 +449,7 @@ class PendingSafetyCheck < OpenAI::BaseModel # # @see OpenAI::Models::Responses::ResponseComputerToolCall#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -466,7 +466,7 @@ module Status # # @see OpenAI::Models::Responses::ResponseComputerToolCall#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum COMPUTER_CALL = :computer_call diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 467d41e0..0db1e5cc 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseComputerToolCallOutputItem < OpenAI::BaseModel + class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the computer call tool output. # @@ -34,7 +34,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::BaseModel # # @return [Array, nil] optional :acknowledged_safety_checks, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } # @!parse # # @return [Array] @@ -61,9 +61,9 @@ class ResponseComputerToolCallOutputItem < OpenAI::BaseModel # # # def initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class AcknowledgedSafetyCheck < OpenAI::BaseModel + class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the pending safety check. # @@ -91,7 +91,7 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # # # def initialize(id:, code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The status of the message input. One of `in_progress`, `completed`, or @@ -99,7 +99,7 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # # @see OpenAI::Models::Responses::ResponseComputerToolCallOutputItem#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 4dc426ff..1ac341fb 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel + class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a computer screenshot, this property is always set # to `computer_screenshot`. @@ -40,7 +40,7 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel # # # def initialize(file_id: nil, image_url: nil, type: :computer_screenshot, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index c3e86848..3c306ddb 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -5,7 +5,7 @@ module Models module Responses # Multi-modal input and output contents. module ResponseContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A text input to the model. variant -> { OpenAI::Models::Responses::ResponseInputText } diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 66590848..2d78d1ff 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseContentPartAddedEvent < OpenAI::BaseModel + class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute content_index # The index of the content part that was added. # @@ -45,13 +45,13 @@ class ResponseContentPartAddedEvent < OpenAI::BaseModel # # # def initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The content part that was added. # # @see OpenAI::Models::Responses::ResponseContentPartAddedEvent#part module Part - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 988c11d1..d28334f9 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseContentPartDoneEvent < OpenAI::BaseModel + class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute content_index # The index of the content part that is done. # @@ -45,13 +45,13 @@ class ResponseContentPartDoneEvent < OpenAI::BaseModel # # # def initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The content part that is done. # # @see OpenAI::Models::Responses::ResponseContentPartDoneEvent#part module Part - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 8253c846..7015d62b 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -6,7 +6,7 @@ module Responses # @see OpenAI::Resources::Responses#create # # @see OpenAI::Resources::Responses#stream_raw - class ResponseCreateParams < OpenAI::BaseModel + class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -46,7 +46,9 @@ class ResponseCreateParams < OpenAI::BaseModel # call output. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] }, nil?: true + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] }, + nil?: true # @!attribute instructions # Inserts a system (or developer) message as the first item in the model's @@ -76,13 +78,13 @@ class ResponseCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute parallel_tool_calls # Whether to allow the model to run tool calls in parallel. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::BooleanModel, nil?: true + optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel, nil?: true # @!attribute previous_response_id # The unique ID of the previous response to the model. Use this to create @@ -105,7 +107,7 @@ class ResponseCreateParams < OpenAI::BaseModel # Whether to store the generated model response for later retrieval via API. # # @return [Boolean, nil] - optional :store, OpenAI::BooleanModel, nil?: true + optional :store, OpenAI::Internal::Type::BooleanModel, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -159,7 +161,7 @@ class ResponseCreateParams < OpenAI::BaseModel # [function calling](https://platform.openai.com/docs/guides/function-calling). # # @return [Array, nil] - optional :tools, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::Tool] } + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } # @!parse # # @return [Array] @@ -243,7 +245,7 @@ class ResponseCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Text, image, or file inputs to the model, used to generate a response. # @@ -255,7 +257,7 @@ class ResponseCreateParams < OpenAI::BaseModel # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A text input to the model, equivalent to a text input with the # `user` role. @@ -274,7 +276,7 @@ module Input # response. See the `tools` parameter to see how to specify which tools the model # can call. module ToolChoice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # Controls which (if any) tool is called by the model. # @@ -306,7 +308,7 @@ module ToolChoice # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. module Truncation - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DISABLED = :disabled diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index 9a4cc890..2eb916c1 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCreatedEvent < OpenAI::BaseModel + class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was created. # @@ -24,7 +24,7 @@ class ResponseCreatedEvent < OpenAI::BaseModel # # # def initialize(response:, type: :"response.created", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index 311adb57..516661d0 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # @see OpenAI::Resources::Responses#delete - class ResponseDeleteParams < OpenAI::BaseModel + class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ class ResponseDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 465b571a..a42f876f 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseError < OpenAI::BaseModel + class ResponseError < OpenAI::Internal::Type::BaseModel # @!attribute code # The error code for the response. # @@ -24,13 +24,13 @@ class ResponseError < OpenAI::BaseModel # # # def initialize(code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The error code for the response. # # @see OpenAI::Models::Responses::ResponseError#code module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index 057c30e1..5558c8c0 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseErrorEvent < OpenAI::BaseModel + class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @!attribute code # The error code. # @@ -38,7 +38,7 @@ class ResponseErrorEvent < OpenAI::BaseModel # # # def initialize(code:, message:, param:, type: :error, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index 2b4d5471..aa99a457 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFailedEvent < OpenAI::BaseModel + class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that failed. # @@ -24,7 +24,7 @@ class ResponseFailedEvent < OpenAI::BaseModel # # # def initialize(response:, type: :"response.failed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 8aaa0427..458771a8 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchCallCompletedEvent < OpenAI::BaseModel + class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item_id # The ID of the output item that the file search call is initiated. # @@ -31,7 +31,7 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::BaseModel # # # def initialize(item_id:, output_index:, type: :"response.file_search_call.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 09237200..4671a7e6 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchCallInProgressEvent < OpenAI::BaseModel + class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute item_id # The ID of the output item that the file search call is initiated. # @@ -31,7 +31,7 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::BaseModel # # # def initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index bc87ce2c..880e33e1 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchCallSearchingEvent < OpenAI::BaseModel + class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @!attribute item_id # The ID of the output item that the file search call is initiated. # @@ -31,7 +31,7 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::BaseModel # # # def initialize(item_id:, output_index:, type: :"response.file_search_call.searching", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 3606686b..dec92ac0 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchToolCall < OpenAI::BaseModel + class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the file search tool call. # @@ -14,7 +14,7 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # The queries used to search for files. # # @return [Array] - required :queries, OpenAI::ArrayOf[String] + required :queries, OpenAI::Internal::Type::ArrayOf[String] # @!attribute status # The status of the file search tool call. One of `in_progress`, `searching`, @@ -34,7 +34,7 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # # @return [Array, nil] optional :results, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result] }, nil?: true # @!parse @@ -50,14 +50,14 @@ class ResponseFileSearchToolCall < OpenAI::BaseModel # # # def initialize(id:, queries:, status:, results: nil, type: :file_search_call, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # # @see OpenAI::Models::Responses::ResponseFileSearchToolCall#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress SEARCHING = :searching @@ -72,7 +72,7 @@ module Status # def self.values; end end - class Result < OpenAI::BaseModel + class Result < OpenAI::Internal::Type::BaseModel # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -82,7 +82,7 @@ class Result < OpenAI::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::HashOf[union: OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::Attribute] }, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::Attribute] }, nil?: true # @!attribute [r] file_id @@ -134,16 +134,16 @@ class Result < OpenAI::BaseModel # # # def initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index ee665d54..bdacd77c 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -17,7 +17,7 @@ module Responses # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. module ResponseFormatTextConfig - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index 06e3bf5e..f0404403 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. @@ -16,7 +16,7 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}] - required :schema, OpenAI::HashOf[OpenAI::Unknown] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!attribute type # The type of response format being defined. Always `json_schema`. @@ -43,7 +43,7 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # # @return [Boolean, nil] - optional :strict, OpenAI::BooleanModel, nil?: true + optional :strict, OpenAI::Internal::Type::BooleanModel, nil?: true # @!parse # # JSON Schema response format. Used to generate structured JSON responses. Learn @@ -58,7 +58,7 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel # # # def initialize(name:, schema:, description: nil, strict: nil, type: :json_schema, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index 51e4b411..6005ba84 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::BaseModel + class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The function-call arguments delta that is added. # @@ -38,7 +38,7 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::BaseModel # # # def initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_done_event.rb b/lib/openai/models/responses/response_function_call_arguments_done_event.rb index 379a1df5..a9280f46 100644 --- a/lib/openai/models/responses/response_function_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionCallArgumentsDoneEvent < OpenAI::BaseModel + class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The function-call arguments. # @@ -37,7 +37,7 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::BaseModel # # # def initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 92fbd817..61ce02d2 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionToolCall < OpenAI::BaseModel + class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # A JSON string of the arguments to pass to the function. # @@ -63,14 +63,14 @@ class ResponseFunctionToolCall < OpenAI::BaseModel # # # def initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseFunctionToolCall#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index 9317cee6..8315cb64 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -19,7 +19,7 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # # # def initialize(id:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 310a8685..081ef864 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel + class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the function call tool output. # @@ -48,14 +48,14 @@ class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel # # # def initialize(id:, call_id:, output:, status: nil, type: :function_call_output, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 41ecb9f4..c2cdfa12 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionWebSearch < OpenAI::BaseModel + class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the web search tool call. # @@ -33,13 +33,13 @@ class ResponseFunctionWebSearch < OpenAI::BaseModel # # # def initialize(id:, status:, type: :web_search_call, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the web search tool call. # # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress SEARCHING = :searching diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index 5f61d802..a26c5659 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInProgressEvent < OpenAI::BaseModel + class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that is in progress. # @@ -24,7 +24,7 @@ class ResponseInProgressEvent < OpenAI::BaseModel # # # def initialize(response:, type: :"response.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 39763a87..26bd124c 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -12,7 +12,7 @@ module Responses # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. module ResponseIncludable - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index 8f0f240c..08fb757a 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseIncompleteEvent < OpenAI::BaseModel + class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was incomplete. # @@ -24,7 +24,7 @@ class ResponseIncompleteEvent < OpenAI::BaseModel # # # def initialize(response:, type: :"response.incomplete", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_input.rb b/lib/openai/models/responses/response_input.rb index d8565c95..591bab05 100644 --- a/lib/openai/models/responses/response_input.rb +++ b/lib/openai/models/responses/response_input.rb @@ -3,7 +3,8 @@ module OpenAI module Models module Responses - ResponseInput = OpenAI::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputItem }] + ResponseInput = + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputItem }] end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index e6caa2ed..90dcdee6 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputAudio < OpenAI::BaseModel + class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64-encoded audio data. # @@ -31,13 +31,13 @@ class ResponseInputAudio < OpenAI::BaseModel # # # def initialize(data:, format_:, type: :input_audio, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The format of the audio data. Currently supported formats are `mp3` and `wav`. # # @see OpenAI::Models::Responses::ResponseInputAudio#format_ module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MP3 = :mp3 WAV = :wav diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 41f9a7d7..901a5159 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -5,7 +5,7 @@ module Models module Responses # A text input to the model. module ResponseInputContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index a6e595f9..0e2a3c73 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputFile < OpenAI::BaseModel + class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the input item. Always `input_file`. # @@ -50,7 +50,7 @@ class ResponseInputFile < OpenAI::BaseModel # # # def initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 5c5bb8ce..883f8491 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputImage < OpenAI::BaseModel + class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @!attribute detail # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. @@ -41,14 +41,14 @@ class ResponseInputImage < OpenAI::BaseModel # # # def initialize(detail:, file_id: nil, image_url: nil, type: :input_image, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # # @see OpenAI::Models::Responses::ResponseInputImage#detail module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum HIGH = :high LOW = :low diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 206650eb..62862e0e 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -9,7 +9,7 @@ module Responses # `assistant` role are presumed to have been generated by the model in previous # interactions. module ResponseInputItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -57,13 +57,14 @@ module ResponseInputItem # An internal identifier for an item to reference. variant :item_reference, -> { OpenAI::Models::Responses::ResponseInputItem::ItemReference } - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of one or many input items to the model, containing different content # types. # # @return [Array] - required :content, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } + required :content, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. @@ -104,13 +105,13 @@ class Message < OpenAI::BaseModel # # # def initialize(content:, role:, status: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The role of the message input. One of `user`, `system`, or `developer`. # # @see OpenAI::Models::Responses::ResponseInputItem::Message#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user SYSTEM = :system @@ -128,7 +129,7 @@ module Role # # @see OpenAI::Models::Responses::ResponseInputItem::Message#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -145,7 +146,7 @@ module Status # # @see OpenAI::Models::Responses::ResponseInputItem::Message#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE = :message @@ -157,7 +158,7 @@ module Type end end - class ComputerCallOutput < OpenAI::BaseModel + class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute call_id # The ID of the computer tool call that produced the output. # @@ -192,7 +193,7 @@ class ComputerCallOutput < OpenAI::BaseModel # # @return [Array, nil] optional :acknowledged_safety_checks, - -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] } # @!parse # # @return [Array] @@ -221,9 +222,9 @@ class ComputerCallOutput < OpenAI::BaseModel # # # def initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class AcknowledgedSafetyCheck < OpenAI::BaseModel + class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the pending safety check. # @@ -251,7 +252,7 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # # # def initialize(id:, code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The status of the message input. One of `in_progress`, `completed`, or @@ -259,7 +260,7 @@ class AcknowledgedSafetyCheck < OpenAI::BaseModel # # @see OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -273,7 +274,7 @@ module Status end end - class FunctionCallOutput < OpenAI::BaseModel + class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute call_id # The unique ID of the function tool call generated by the model. # @@ -325,14 +326,14 @@ class FunctionCallOutput < OpenAI::BaseModel # # # def initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -346,7 +347,7 @@ module Status end end - class ItemReference < OpenAI::BaseModel + class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the item to reference. # @@ -367,7 +368,7 @@ class ItemReference < OpenAI::BaseModel # # # def initialize(id:, type: :item_reference, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/responses/response_input_message_content_list.rb b/lib/openai/models/responses/response_input_message_content_list.rb index 98901e68..a1362365 100644 --- a/lib/openai/models/responses/response_input_message_content_list.rb +++ b/lib/openai/models/responses/response_input_message_content_list.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses ResponseInputMessageContentList = - OpenAI::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputContent }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputContent }] end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 65bfbf76..3bb170a9 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputMessageItem < OpenAI::BaseModel + class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the message input. # @@ -15,7 +15,8 @@ class ResponseInputMessageItem < OpenAI::BaseModel # types. # # @return [Array] - required :content, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } + required :content, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. @@ -53,13 +54,13 @@ class ResponseInputMessageItem < OpenAI::BaseModel # # # def initialize(id:, content:, role:, status: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The role of the message input. One of `user`, `system`, or `developer`. # # @see OpenAI::Models::Responses::ResponseInputMessageItem#role module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER = :user SYSTEM = :system @@ -77,7 +78,7 @@ module Role # # @see OpenAI::Models::Responses::ResponseInputMessageItem#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -94,7 +95,7 @@ module Status # # @see OpenAI::Models::Responses::ResponseInputMessageItem#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE = :message diff --git a/lib/openai/models/responses/response_input_text.rb b/lib/openai/models/responses/response_input_text.rb index c38be5e5..d8ed6f2c 100644 --- a/lib/openai/models/responses/response_input_text.rb +++ b/lib/openai/models/responses/response_input_text.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputText < OpenAI::BaseModel + class ResponseInputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text input to the model. # @@ -24,7 +24,7 @@ class ResponseInputText < OpenAI::BaseModel # # # def initialize(text:, type: :input_text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index ca1c9ee7..3fe0074e 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -5,7 +5,7 @@ module Models module Responses # Content item used to generate a response. module ResponseItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 34ca03ad..5c05dab2 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - class ResponseItemList < OpenAI::BaseModel + class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!attribute data # A list of items used to generate this response. # # @return [Array] - required :data, -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseItem] } + required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseItem] } # @!attribute first_id # The ID of the first item in the list. @@ -20,7 +20,7 @@ class ResponseItemList < OpenAI::BaseModel # Whether there are more items available. # # @return [Boolean] - required :has_more, OpenAI::BooleanModel + required :has_more, OpenAI::Internal::Type::BooleanModel # @!attribute last_id # The ID of the last item in the list. @@ -45,7 +45,7 @@ class ResponseItemList < OpenAI::BaseModel # # # def initialize(data:, first_id:, has_more:, last_id:, object: :list, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index da147435..d8fb8c61 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputAudio < OpenAI::BaseModel + class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64-encoded audio data from the model. # @@ -31,7 +31,7 @@ class ResponseOutputAudio < OpenAI::BaseModel # # # def initialize(data:, transcript:, type: :output_audio, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 2666a922..5751bb93 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -5,7 +5,7 @@ module Models module Responses # An output message from the model. module ResponseOutputItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 9885b0a9..6dfbd4d9 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputItemAddedEvent < OpenAI::BaseModel + class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was added. # @@ -31,7 +31,7 @@ class ResponseOutputItemAddedEvent < OpenAI::BaseModel # # # def initialize(item:, output_index:, type: :"response.output_item.added", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 6840bbd7..904c8eae 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputItemDoneEvent < OpenAI::BaseModel + class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was marked done. # @@ -31,7 +31,7 @@ class ResponseOutputItemDoneEvent < OpenAI::BaseModel # # # def initialize(item:, output_index:, type: :"response.output_item.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 373721b0..272c8ab1 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputMessage < OpenAI::BaseModel + class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the output message. # @@ -15,7 +15,7 @@ class ResponseOutputMessage < OpenAI::BaseModel # # @return [Array] required :content, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputMessage::Content] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputMessage::Content] } # @!attribute role # The role of the output message. Always `assistant`. @@ -47,11 +47,11 @@ class ResponseOutputMessage < OpenAI::BaseModel # # # def initialize(id:, content:, status:, role: :assistant, type: :message, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A text output from the model. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -71,7 +71,7 @@ module Content # # @see OpenAI::Models::Responses::ResponseOutputMessage#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/responses/response_output_refusal.rb b/lib/openai/models/responses/response_output_refusal.rb index bd44ae1e..0e050ce2 100644 --- a/lib/openai/models/responses/response_output_refusal.rb +++ b/lib/openai/models/responses/response_output_refusal.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputRefusal < OpenAI::BaseModel + class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel # @!attribute refusal # The refusal explanationfrom the model. # @@ -24,7 +24,7 @@ class ResponseOutputRefusal < OpenAI::BaseModel # # # def initialize(refusal:, type: :refusal, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index e6c46e68..9766a767 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -3,13 +3,13 @@ module OpenAI module Models module Responses - class ResponseOutputText < OpenAI::BaseModel + class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!attribute annotations # The annotations of the text output. # # @return [Array] required :annotations, - -> { OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputText::Annotation] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputText::Annotation] } # @!attribute text # The text output from the model. @@ -32,11 +32,11 @@ class ResponseOutputText < OpenAI::BaseModel # # # def initialize(annotations:, text:, type: :output_text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A citation to a file. module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -49,7 +49,7 @@ module Annotation # A path to a file. variant :file_path, -> { OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath } - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file. # @@ -77,10 +77,10 @@ class FileCitation < OpenAI::BaseModel # # # def initialize(file_id:, index:, type: :file_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. # @@ -122,10 +122,10 @@ class URLCitation < OpenAI::BaseModel # # # def initialize(end_index:, start_index:, title:, url:, type: :url_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file. # @@ -153,7 +153,7 @@ class FilePath < OpenAI::BaseModel # # # def initialize(file_id:, index:, type: :file_path, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index e5539f3d..fa4d428c 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseReasoningItem < OpenAI::BaseModel + class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique identifier of the reasoning content. # @@ -14,7 +14,8 @@ class ResponseReasoningItem < OpenAI::BaseModel # Reasoning text contents. # # @return [Array] - required :summary, -> { OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseReasoningItem::Summary] } + required :summary, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseReasoningItem::Summary] } # @!attribute type # The type of the object. Always `reasoning`. @@ -44,9 +45,9 @@ class ResponseReasoningItem < OpenAI::BaseModel # # # def initialize(id:, summary:, status: nil, type: :reasoning, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class Summary < OpenAI::BaseModel + class Summary < OpenAI::Internal::Type::BaseModel # @!attribute text # A short summary of the reasoning used by the model when generating the response. # @@ -65,7 +66,7 @@ class Summary < OpenAI::BaseModel # # # def initialize(text:, type: :summary_text, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -73,7 +74,7 @@ class Summary < OpenAI::BaseModel # # @see OpenAI::Models::Responses::ResponseReasoningItem#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index b09b8ef2..35247a7d 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseRefusalDeltaEvent < OpenAI::BaseModel + class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute content_index # The index of the content part that the refusal text is added to. # @@ -45,7 +45,7 @@ class ResponseRefusalDeltaEvent < OpenAI::BaseModel # # # def initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index 662705f7..7f6cd16b 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseRefusalDoneEvent < OpenAI::BaseModel + class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute content_index # The index of the content part that the refusal text is finalized. # @@ -45,7 +45,7 @@ class ResponseRefusalDoneEvent < OpenAI::BaseModel # # # def initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index ea7d0882..eaef643f 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # @see OpenAI::Resources::Responses#retrieve - class ResponseRetrieveParams < OpenAI::BaseModel + class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,8 @@ class ResponseRetrieveParams < OpenAI::BaseModel # Response creation above for more information. # # @return [Array, nil] - optional :include, -> { OpenAI::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } + optional :include, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } # @!parse # # @return [Array] @@ -26,7 +27,7 @@ class ResponseRetrieveParams < OpenAI::BaseModel # # # def initialize(include: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index 31187039..15876663 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -6,7 +6,7 @@ module Responses # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. module ResponseStatus - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum COMPLETED = :completed FAILED = :failed diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 5dddc41f..db86c410 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -5,7 +5,7 @@ module Models module Responses # Emitted when there is a partial audio response. module ResponseStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index c6291cf6..bc38658a 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel + class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute annotation # A citation to a file. # @@ -63,13 +63,13 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A citation to a file. # # @see OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent#annotation module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type @@ -85,7 +85,7 @@ module Annotation variant :file_path, -> { OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath } - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file. # @@ -113,10 +113,10 @@ class FileCitation < OpenAI::BaseModel # # # def initialize(file_id:, index:, type: :file_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. # @@ -158,10 +158,10 @@ class URLCitation < OpenAI::BaseModel # # # def initialize(end_index:, start_index:, title:, url:, type: :url_citation, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file. # @@ -189,7 +189,7 @@ class FilePath < OpenAI::BaseModel # # # def initialize(file_id:, index:, type: :file_path, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @!parse diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 77a2d9b0..65f31b36 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextConfig < OpenAI::BaseModel + class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # @!attribute [r] format_ # An object specifying the format that the model must output. # @@ -37,7 +37,7 @@ class ResponseTextConfig < OpenAI::BaseModel # # # def initialize(format_: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 5dee2ee1..8b6c4b75 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextDeltaEvent < OpenAI::BaseModel + class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute content_index # The index of the content part that the text delta was added to. # @@ -45,7 +45,7 @@ class ResponseTextDeltaEvent < OpenAI::BaseModel # # # def initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index cd3d0faf..45a3267a 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextDoneEvent < OpenAI::BaseModel + class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute content_index # The index of the content part that the text content is finalized. # @@ -45,7 +45,7 @@ class ResponseTextDoneEvent < OpenAI::BaseModel # # # def initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 316d5b3f..8d6bee6d 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseUsage < OpenAI::BaseModel + class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens # The number of input tokens. # @@ -46,10 +46,10 @@ class ResponseUsage < OpenAI::BaseModel # # # def initialize(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details - class InputTokensDetails < OpenAI::BaseModel + class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute cached_tokens # The number of tokens that were retrieved from the cache. # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). @@ -64,11 +64,11 @@ class InputTokensDetails < OpenAI::BaseModel # # # def initialize(cached_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details - class OutputTokensDetails < OpenAI::BaseModel + class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_tokens # The number of reasoning tokens. # @@ -82,7 +82,7 @@ class OutputTokensDetails < OpenAI::BaseModel # # # def initialize(reasoning_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 08386de1..70e0dc19 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseWebSearchCallCompletedEvent < OpenAI::BaseModel + class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item_id # Unique ID for the output item associated with the web search call. # @@ -31,7 +31,7 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::BaseModel # # # def initialize(item_id:, output_index:, type: :"response.web_search_call.completed", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index 0770e80c..fc8f006a 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseWebSearchCallInProgressEvent < OpenAI::BaseModel + class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute item_id # Unique ID for the output item associated with the web search call. # @@ -31,7 +31,7 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::BaseModel # # # def initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index 6d037de9..39d6ae9f 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseWebSearchCallSearchingEvent < OpenAI::BaseModel + class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @!attribute item_id # Unique ID for the output item associated with the web search call. # @@ -31,7 +31,7 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::BaseModel # # # def initialize(item_id:, output_index:, type: :"response.web_search_call.searching", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 3072312f..9c696f1f 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -7,7 +7,7 @@ module Responses # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type diff --git a/lib/openai/models/responses/tool_choice_function.rb b/lib/openai/models/responses/tool_choice_function.rb index 08f9d77e..ca4d89b6 100644 --- a/lib/openai/models/responses/tool_choice_function.rb +++ b/lib/openai/models/responses/tool_choice_function.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ToolChoiceFunction < OpenAI::BaseModel + class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. # @@ -24,7 +24,7 @@ class ToolChoiceFunction < OpenAI::BaseModel # # # def initialize(name:, type: :function, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 00c16a94..7b08e8c7 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -12,7 +12,7 @@ module Responses # # `required` means the model must call one or more tools. module ToolChoiceOptions - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE = :none AUTO = :auto diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index cddbcc56..fe1d606d 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ToolChoiceTypes < OpenAI::BaseModel + class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -25,7 +25,7 @@ class ToolChoiceTypes < OpenAI::BaseModel # # # def initialize(type:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -38,7 +38,7 @@ class ToolChoiceTypes < OpenAI::BaseModel # # @see OpenAI::Models::Responses::ToolChoiceTypes#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FILE_SEARCH = :file_search WEB_SEARCH_PREVIEW = :web_search_preview diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 58f833d7..504bcab1 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class WebSearchTool < OpenAI::BaseModel + class WebSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the web search tool. One of: # @@ -40,7 +40,7 @@ class WebSearchTool < OpenAI::BaseModel # # # def initialize(type:, search_context_size: nil, user_location: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of the web search tool. One of: # @@ -49,7 +49,7 @@ class WebSearchTool < OpenAI::BaseModel # # @see OpenAI::Models::Responses::WebSearchTool#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 @@ -66,7 +66,7 @@ module Type # # @see OpenAI::Models::Responses::WebSearchTool#search_context_size module SearchContextSize - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOW = :low MEDIUM = :medium @@ -80,7 +80,7 @@ module SearchContextSize end # @see OpenAI::Models::Responses::WebSearchTool#user_location - class UserLocation < OpenAI::BaseModel + class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of location approximation. Always `approximate`. # @@ -138,7 +138,7 @@ class UserLocation < OpenAI::BaseModel # # # def initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index a7995455..154ea09f 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -3,7 +3,7 @@ module OpenAI module Models module ResponsesModel - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 730e687e..9f466cc0 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class StaticFileChunkingStrategy < OpenAI::BaseModel + class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # @@ -24,7 +24,7 @@ class StaticFileChunkingStrategy < OpenAI::BaseModel # # # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index be6299c5..1655679f 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class StaticFileChunkingStrategyObject < OpenAI::BaseModel + class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!attribute static # # @return [OpenAI::Models::StaticFileChunkingStrategy] @@ -20,7 +20,7 @@ class StaticFileChunkingStrategyObject < OpenAI::BaseModel # # # def initialize(static:, type: :static, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index 21b6d0ac..f64fff68 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -2,7 +2,7 @@ module OpenAI module Models - class StaticFileChunkingStrategyObjectParam < OpenAI::BaseModel + class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!attribute static # # @return [OpenAI::Models::StaticFileChunkingStrategy] @@ -22,7 +22,7 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::BaseModel # # # def initialize(static:, type: :static, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index a905f9ff..d0da7a8f 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#create - class Upload < OpenAI::BaseModel + class Upload < OpenAI::Internal::Type::BaseModel # @!attribute id # The Upload unique identifier, which can be referenced in API endpoints. # @@ -75,13 +75,13 @@ class Upload < OpenAI::BaseModel # # # def initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the Upload. # # @see OpenAI::Models::Upload#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum PENDING = :pending COMPLETED = :completed diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 5602bb3f..7c44f8c9 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#cancel - class UploadCancelParams < OpenAI::BaseModel + class UploadCancelParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class UploadCancelParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 4b5b8cc3..378f2269 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#complete - class UploadCompleteParams < OpenAI::BaseModel + class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -12,7 +12,7 @@ class UploadCompleteParams < OpenAI::BaseModel # The ordered list of Part IDs. # # @return [Array] - required :part_ids, OpenAI::ArrayOf[String] + required :part_ids, OpenAI::Internal::Type::ArrayOf[String] # @!attribute [r] md5 # The optional md5 checksum for the file contents to verify if the bytes uploaded @@ -32,7 +32,7 @@ class UploadCompleteParams < OpenAI::BaseModel # # # def initialize(part_ids:, md5: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index b77c4093..9b199b9d 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#create - class UploadCreateParams < OpenAI::BaseModel + class UploadCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -47,7 +47,7 @@ class UploadCreateParams < OpenAI::BaseModel # # # def initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 12a0fcaf..ff3805ca 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Uploads # @see OpenAI::Resources::Uploads::Parts#create - class PartCreateParams < OpenAI::BaseModel + class PartCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -21,7 +21,7 @@ class PartCreateParams < OpenAI::BaseModel # # # def initialize(data:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/uploads/upload_part.rb b/lib/openai/models/uploads/upload_part.rb index e0566eb9..4f839461 100644 --- a/lib/openai/models/uploads/upload_part.rb +++ b/lib/openai/models/uploads/upload_part.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Uploads # @see OpenAI::Resources::Uploads::Parts#create - class UploadPart < OpenAI::BaseModel + class UploadPart < OpenAI::Internal::Type::BaseModel # @!attribute id # The upload Part unique identifier, which can be referenced in API endpoints. # @@ -39,7 +39,7 @@ class UploadPart < OpenAI::BaseModel # # # def initialize(id:, created_at:, upload_id:, object: :"upload.part", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 29391d3a..4b214ef5 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#create - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -36,7 +36,7 @@ class VectorStore < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::HashOf[String], nil?: true + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute name # The name of the vector store. @@ -113,10 +113,10 @@ class VectorStore < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::VectorStore#file_counts - class FileCounts < OpenAI::BaseModel + class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that were cancelled. # @@ -156,7 +156,7 @@ class FileCounts < OpenAI::BaseModel # # # def initialize(cancelled:, completed:, failed:, in_progress:, total:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The status of the vector store, which can be either `expired`, `in_progress`, or @@ -165,7 +165,7 @@ class FileCounts < OpenAI::BaseModel # # @see OpenAI::Models::VectorStore#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum EXPIRED = :expired IN_PROGRESS = :in_progress @@ -179,7 +179,7 @@ module Status end # @see OpenAI::Models::VectorStore#expires_after - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. @@ -201,7 +201,7 @@ class ExpiresAfter < OpenAI::BaseModel # # # def initialize(days:, anchor: :last_active_at, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index a1e61412..42739728 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#create - class VectorStoreCreateParams < OpenAI::BaseModel + class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -35,7 +35,7 @@ class VectorStoreCreateParams < OpenAI::BaseModel # files. # # @return [Array, nil] - optional :file_ids, OpenAI::ArrayOf[String] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!parse # # @return [Array] @@ -50,7 +50,7 @@ class VectorStoreCreateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] name # The name of the vector store. @@ -82,9 +82,9 @@ class VectorStoreCreateParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. @@ -106,7 +106,7 @@ class ExpiresAfter < OpenAI::BaseModel # # # def initialize(days:, anchor: :last_active_at, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index 42f0e18c..5beedec7 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#delete - class VectorStoreDeleteParams < OpenAI::BaseModel + class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class VectorStoreDeleteParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_store_deleted.rb b/lib/openai/models/vector_store_deleted.rb index 84da9461..c3b6e2ab 100644 --- a/lib/openai/models/vector_store_deleted.rb +++ b/lib/openai/models/vector_store_deleted.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#delete - class VectorStoreDeleted < OpenAI::BaseModel + class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -12,7 +12,7 @@ class VectorStoreDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -26,7 +26,7 @@ class VectorStoreDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :"vector_store.deleted", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 12ea7117..eaea91f5 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#list - class VectorStoreListParams < OpenAI::BaseModel + class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -65,12 +65,12 @@ class VectorStoreListParams < OpenAI::BaseModel # # # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index 096ded4f..f3b3b098 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#retrieve - class VectorStoreRetrieveParams < OpenAI::BaseModel + class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ class VectorStoreRetrieveParams < OpenAI::BaseModel # # # def initialize(request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index acf25855..e967edbe 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#search - class VectorStoreSearchParams < OpenAI::BaseModel + class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ class VectorStoreSearchParams < OpenAI::BaseModel # Whether to rewrite the natural language query for vector search. # # @return [Boolean, nil] - optional :rewrite_query, OpenAI::BooleanModel + optional :rewrite_query, OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Boolean] @@ -75,11 +75,11 @@ class VectorStoreSearchParams < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A query string for a search module Query - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String @@ -89,12 +89,12 @@ module Query # # @return [Array(String, Array)] # def self.variants; end - StringArray = OpenAI::ArrayOf[String] + StringArray = OpenAI::Internal::Type::ArrayOf[String] end # A filter to apply based on file attributes. module Filters - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. variant -> { OpenAI::Models::ComparisonFilter } @@ -107,7 +107,7 @@ module Filters # def self.variants; end end - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] ranker # # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] @@ -134,11 +134,11 @@ class RankingOptions < OpenAI::BaseModel # # # def initialize(ranker: nil, score_threshold: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 71b724a2..785e82a6 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#search - class VectorStoreSearchResponse < OpenAI::BaseModel + class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -13,14 +13,15 @@ class VectorStoreSearchResponse < OpenAI::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] required :attributes, - -> { OpenAI::HashOf[union: OpenAI::Models::VectorStoreSearchResponse::Attribute] }, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStoreSearchResponse::Attribute] }, nil?: true # @!attribute content # Content chunks from the file. # # @return [Array] - required :content, -> { OpenAI::ArrayOf[OpenAI::Models::VectorStoreSearchResponse::Content] } + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::VectorStoreSearchResponse::Content] } # @!attribute file_id # The ID of the vector store file. @@ -49,23 +50,23 @@ class VectorStoreSearchResponse < OpenAI::BaseModel # # # def initialize(attributes:, content:, file_id:, filename:, score:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] # def self.variants; end end - class Content < OpenAI::BaseModel + class Content < OpenAI::Internal::Type::BaseModel # @!attribute text # The text content returned from search. # @@ -84,13 +85,13 @@ class Content < OpenAI::BaseModel # # # def initialize(text:, type:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of content. # # @see OpenAI::Models::VectorStoreSearchResponse::Content#type module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT = :text diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 43edb6ab..46a09e25 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -3,7 +3,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#update - class VectorStoreUpdateParams < OpenAI::BaseModel + class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -23,7 +23,7 @@ class VectorStoreUpdateParams < OpenAI::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute name # The name of the vector store. @@ -39,9 +39,9 @@ class VectorStoreUpdateParams < OpenAI::BaseModel # # # def initialize(expires_after: nil, metadata: nil, name: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. @@ -63,7 +63,7 @@ class ExpiresAfter < OpenAI::BaseModel # # # def initialize(days:, anchor: :last_active_at, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index c87c2b85..a64a4b9c 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#cancel - class FileBatchCancelParams < OpenAI::BaseModel + class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -20,7 +20,7 @@ class FileBatchCancelParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 19b69c38..4140d9b9 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#create - class FileBatchCreateParams < OpenAI::BaseModel + class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -15,7 +15,7 @@ class FileBatchCreateParams < OpenAI::BaseModel # files. # # @return [Array] - required :file_ids, OpenAI::ArrayOf[String] + required :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -26,7 +26,7 @@ class FileBatchCreateParams < OpenAI::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::HashOf[union: OpenAI::Models::VectorStores::FileBatchCreateParams::Attribute] }, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileBatchCreateParams::Attribute] }, nil?: true # @!attribute [r] chunking_strategy @@ -48,16 +48,16 @@ class FileBatchCreateParams < OpenAI::BaseModel # # # def initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index d19506f4..991ac9cc 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#list_files - class FileBatchListFilesParams < OpenAI::BaseModel + class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -83,11 +83,11 @@ class FileBatchListFilesParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -104,7 +104,7 @@ module Filter # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index a05da932..55499481 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#retrieve - class FileBatchRetrieveParams < OpenAI::BaseModel + class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -20,7 +20,7 @@ class FileBatchRetrieveParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index 9b6fefcc..26be8b94 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#content - class FileContentParams < OpenAI::BaseModel + class FileContentParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -20,7 +20,7 @@ class FileContentParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index 6f667726..01c19380 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#content - class FileContentResponse < OpenAI::BaseModel + class FileContentResponse < OpenAI::Internal::Type::BaseModel # @!attribute [r] text # The text content # @@ -31,7 +31,7 @@ class FileContentResponse < OpenAI::BaseModel # # # def initialize(text: nil, type: nil, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 429f1ebc..f74f4ce2 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#create - class FileCreateParams < OpenAI::BaseModel + class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -26,7 +26,7 @@ class FileCreateParams < OpenAI::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::HashOf[union: OpenAI::Models::VectorStores::FileCreateParams::Attribute] }, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileCreateParams::Attribute] }, nil?: true # @!attribute [r] chunking_strategy @@ -48,16 +48,16 @@ class FileCreateParams < OpenAI::BaseModel # # # def initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index bf6c7241..e4bee072 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#delete - class FileDeleteParams < OpenAI::BaseModel + class FileDeleteParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -20,7 +20,7 @@ class FileDeleteParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 01da7de3..0bcfb9d3 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#list - class FileListParams < OpenAI::BaseModel + class FileListParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -77,11 +77,11 @@ class FileListParams < OpenAI::BaseModel # # # def initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -98,7 +98,7 @@ module Filter # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC = :asc DESC = :desc diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index 75f2cc40..9d9c26a7 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#retrieve - class FileRetrieveParams < OpenAI::BaseModel + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -20,7 +20,7 @@ class FileRetrieveParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index a14267b5..0cea5737 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#update - class FileUpdateParams < OpenAI::BaseModel + class FileUpdateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -23,7 +23,7 @@ class FileUpdateParams < OpenAI::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] required :attributes, - -> { OpenAI::HashOf[union: OpenAI::Models::VectorStores::FileUpdateParams::Attribute] }, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileUpdateParams::Attribute] }, nil?: true # @!parse @@ -33,16 +33,16 @@ class FileUpdateParams < OpenAI::BaseModel # # # def initialize(vector_store_id:, attributes:, request_options: {}, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 7c64487b..12ed8106 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#create - class VectorStoreFile < OpenAI::BaseModel + class VectorStoreFile < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -63,7 +63,7 @@ class VectorStoreFile < OpenAI::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute] }, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute] }, nil?: true # @!attribute [r] chunking_strategy @@ -104,10 +104,10 @@ class VectorStoreFile < OpenAI::BaseModel # super # end - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # @@ -129,13 +129,13 @@ class LastError < OpenAI::BaseModel # # # def initialize(code:, message:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # One of `server_error` or `rate_limit_exceeded`. # # @see OpenAI::Models::VectorStores::VectorStoreFile::LastError#code module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR = :server_error UNSUPPORTED_FILE = :unsupported_file @@ -155,7 +155,7 @@ module Code # # @see OpenAI::Models::VectorStores::VectorStoreFile#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed @@ -170,13 +170,13 @@ module Status end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant String variant Float - variant OpenAI::BooleanModel + variant OpenAI::Internal::Type::BooleanModel # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index b08f672c..ebcd7390 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#create - class VectorStoreFileBatch < OpenAI::BaseModel + class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. # @@ -57,10 +57,10 @@ class VectorStoreFileBatch < OpenAI::BaseModel # # # def initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts - class FileCounts < OpenAI::BaseModel + class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that where cancelled. # @@ -100,7 +100,7 @@ class FileCounts < OpenAI::BaseModel # # # def initialize(cancelled:, completed:, failed:, in_progress:, total:, **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end # The status of the vector store files batch, which can be either `in_progress`, @@ -108,7 +108,7 @@ class FileCounts < OpenAI::BaseModel # # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#status module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress COMPLETED = :completed diff --git a/lib/openai/models/vector_stores/vector_store_file_deleted.rb b/lib/openai/models/vector_stores/vector_store_file_deleted.rb index 26fa7214..13f5c954 100644 --- a/lib/openai/models/vector_stores/vector_store_file_deleted.rb +++ b/lib/openai/models/vector_stores/vector_store_file_deleted.rb @@ -4,7 +4,7 @@ module OpenAI module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#delete - class VectorStoreFileDeleted < OpenAI::BaseModel + class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel # @!attribute id # # @return [String] @@ -13,7 +13,7 @@ class VectorStoreFileDeleted < OpenAI::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::BooleanModel + required :deleted, OpenAI::Internal::Type::BooleanModel # @!attribute object # @@ -27,7 +27,7 @@ class VectorStoreFileDeleted < OpenAI::BaseModel # # # def initialize(id:, deleted:, object: :"vector_store.file.deleted", **) = super - # def initialize: (Hash | OpenAI::BaseModel) -> void + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void end end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index 279c2936..5f374fb2 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -6,7 +6,7 @@ module OpenAI # # When making a request, you can pass an actual {RequestOptions} instance, or # simply pass a Hash with symbol keys matching the attributes on this class. - class RequestOptions < OpenAI::BaseModel + class RequestOptions < OpenAI::Internal::Type::BaseModel # @api private # # @param opts [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -37,21 +37,21 @@ def self.validate!(opts) # `query` given at the client level. # # @return [Hash{String=>Array, String, nil}, nil] - optional :extra_query, OpenAI::HashOf[OpenAI::ArrayOf[String]] + optional :extra_query, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::ArrayOf[String]] # @!attribute extra_headers # Extra headers to send with the request. These are `.merged`’d into any # `extra_headers` given at the client level. # # @return [Hash{String=>String, nil}, nil] - optional :extra_headers, OpenAI::HashOf[String, nil?: true] + optional :extra_headers, OpenAI::Internal::Type::HashOf[String, nil?: true] # @!attribute extra_body # Extra data to send with the request. These are deep merged into any data # generated as part of the normal request. # # @return [Object, nil] - optional :extra_body, OpenAI::HashOf[OpenAI::Unknown] + optional :extra_body, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!attribute max_retries # Maximum number of retries to attempt after a failed initial request. diff --git a/rbi/lib/openai/aliases.rbi b/rbi/lib/openai/aliases.rbi deleted file mode 100644 index 5641bd39..00000000 --- a/rbi/lib/openai/aliases.rbi +++ /dev/null @@ -1,19 +0,0 @@ -# typed: strong - -module OpenAI - Unknown = OpenAI::Internal::Type::Unknown - - BooleanModel = OpenAI::Internal::Type::BooleanModel - - Enum = OpenAI::Internal::Type::Enum - - Union = OpenAI::Internal::Type::Union - - ArrayOf = OpenAI::Internal::Type::ArrayOf - - HashOf = OpenAI::Internal::Type::HashOf - - BaseModel = OpenAI::Internal::Type::BaseModel - - RequestParameters = OpenAI::Internal::Type::RequestParameters -end diff --git a/rbi/lib/openai/errors.rbi b/rbi/lib/openai/errors.rbi index d04fc102..a0f3280e 100644 --- a/rbi/lib/openai/errors.rbi +++ b/rbi/lib/openai/errors.rbi @@ -170,32 +170,4 @@ module OpenAI HTTP_STATUS = T.let((500..), T::Range[Integer]) end end - - Error = OpenAI::Errors::Error - - ConversionError = OpenAI::Errors::ConversionError - - APIError = OpenAI::Errors::APIError - - APIStatusError = OpenAI::Errors::APIStatusError - - APIConnectionError = OpenAI::Errors::APIConnectionError - - APITimeoutError = OpenAI::Errors::APITimeoutError - - BadRequestError = OpenAI::Errors::BadRequestError - - AuthenticationError = OpenAI::Errors::AuthenticationError - - PermissionDeniedError = OpenAI::Errors::PermissionDeniedError - - NotFoundError = OpenAI::Errors::NotFoundError - - ConflictError = OpenAI::Errors::ConflictError - - UnprocessableEntityError = OpenAI::Errors::UnprocessableEntityError - - RateLimitError = OpenAI::Errors::RateLimitError - - InternalServerError = OpenAI::Errors::InternalServerError end diff --git a/rbi/lib/openai/internal.rbi b/rbi/lib/openai/internal.rbi new file mode 100644 index 00000000..ff17c5f5 --- /dev/null +++ b/rbi/lib/openai/internal.rbi @@ -0,0 +1,10 @@ +# typed: strong + +module OpenAI + # @api private + module Internal + # Due to the current WIP status of Shapes support in Sorbet, types referencing + # this alias might be refined in the future. + AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } + end +end diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index abc94734..f27429e0 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -25,10 +25,13 @@ module OpenAI ), body: T.nilable(T.anything), unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::Internal::Type::BaseStream[T.anything, OpenAI::BaseModel]]), + page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), + stream: T.nilable( + T::Class[OpenAI::Internal::Type::BaseStream[T.anything, + OpenAI::Internal::Type::BaseModel]] + ), model: T.nilable(OpenAI::Internal::Type::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) } end @@ -136,7 +139,7 @@ module OpenAI overridable .params( req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, - opts: OpenAI::Internal::Util::AnyHash + opts: OpenAI::Internal::AnyHash ) .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) end @@ -180,10 +183,13 @@ module OpenAI ), body: T.nilable(T.anything), unwrap: T.nilable(Symbol), - page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::BaseModel]]), - stream: T.nilable(T::Class[OpenAI::Internal::Type::BaseStream[T.anything, OpenAI::BaseModel]]), + page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), + stream: T.nilable( + T::Class[OpenAI::Internal::Type::BaseStream[T.anything, + OpenAI::Internal::Type::BaseModel]] + ), model: T.nilable(OpenAI::Internal::Type::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(T.anything) end @@ -196,7 +202,7 @@ module OpenAI unwrap: nil, page: nil, stream: nil, - model: OpenAI::Unknown, + model: OpenAI::Internal::Type::Unknown, options: {} ) end diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index 54f032b7..d01b1212 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -17,11 +17,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .returns(T.attached_class) end @@ -72,11 +72,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .void end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 6a8646a9..f5cf4f9c 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -22,7 +22,7 @@ module OpenAI T::Hash[ Symbol, T.all( - OpenAI::BaseModel::KnownFieldShape, + OpenAI::Internal::Type::BaseModel::KnownFieldShape, {type_fn: T.proc.returns(OpenAI::Internal::Type::Converter::Input)} ) ] @@ -34,11 +34,13 @@ module OpenAI # @api private sig do returns( - T::Hash[Symbol, - T.all( - OpenAI::BaseModel::KnownFieldShape, - {type: OpenAI::Internal::Type::Converter::Input} - )] + T::Hash[ + Symbol, + T.all( + OpenAI::Internal::Type::BaseModel::KnownFieldShape, + {type: OpenAI::Internal::Type::Converter::Input} + ) + ] ) end def fields @@ -60,7 +62,7 @@ module OpenAI T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .void end @@ -72,11 +74,11 @@ module OpenAI params( name_sym: Symbol, type_info: T.any( - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .void end @@ -88,11 +90,11 @@ module OpenAI params( name_sym: Symbol, type_info: T.any( - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .void end @@ -128,7 +130,7 @@ module OpenAI sig do override .params( - value: T.any(OpenAI::BaseModel, T::Hash[T.anything, T.anything], T.anything), + value: T.any(OpenAI::Internal::Type::BaseModel, T::Hash[T.anything, T.anything], T.anything), state: OpenAI::Internal::Type::Converter::State ) .returns(T.any(T.attached_class, T.anything)) @@ -164,7 +166,7 @@ module OpenAI # # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. - sig { overridable.returns(OpenAI::Internal::Util::AnyHash) } + sig { overridable.returns(OpenAI::Internal::AnyHash) } def to_h end @@ -176,11 +178,11 @@ module OpenAI # # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. - sig { overridable.returns(OpenAI::Internal::Util::AnyHash) } + sig { overridable.returns(OpenAI::Internal::AnyHash) } def to_hash end - sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::Util::AnyHash) } + sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::AnyHash) } def deconstruct_keys(keys) end diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index 207ff5c9..d63a83f4 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -17,11 +17,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .returns(T.attached_class) end @@ -43,7 +43,7 @@ module OpenAI value: T.any(T::Hash[T.anything, T.anything], T.anything), state: OpenAI::Internal::Type::Converter::State ) - .returns(T.any(OpenAI::Internal::Util::AnyHash, T.anything)) + .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end def coerce(value, state:) end @@ -52,7 +52,7 @@ module OpenAI sig(:final) do override .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) - .returns(T.any(OpenAI::Internal::Util::AnyHash, T.anything)) + .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end def dump(value) end @@ -71,11 +71,11 @@ module OpenAI sig(:final) do params( type_info: T.any( - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, T.proc.returns(OpenAI::Internal::Type::Converter::Input), OpenAI::Internal::Type::Converter::Input ), - spec: OpenAI::Internal::Util::AnyHash + spec: OpenAI::Internal::AnyHash ) .void end diff --git a/rbi/lib/openai/internal/type/request_parameters.rbi b/rbi/lib/openai/internal/type/request_parameters.rbi index 6170a658..18a79320 100644 --- a/rbi/lib/openai/internal/type/request_parameters.rbi +++ b/rbi/lib/openai/internal/type/request_parameters.rbi @@ -6,13 +6,13 @@ module OpenAI # @api private module RequestParameters # Options to specify HTTP behaviour for this request. - sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) } + sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) } attr_accessor :request_options # @api private module Converter # @api private - sig { params(params: T.anything).returns([T.anything, OpenAI::Internal::Util::AnyHash]) } + sig { params(params: T.anything).returns([T.anything, OpenAI::Internal::AnyHash]) } def dump_request(params) end end diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi index d96795ec..3d1486f0 100644 --- a/rbi/lib/openai/internal/type/union.rbi +++ b/rbi/lib/openai/internal/type/union.rbi @@ -34,8 +34,8 @@ module OpenAI # @api private sig do params( - key: T.any(Symbol, OpenAI::Internal::Util::AnyHash, T.proc.returns(T.anything), T.anything), - spec: T.any(OpenAI::Internal::Util::AnyHash, T.proc.returns(T.anything), T.anything) + key: T.any(Symbol, OpenAI::Internal::AnyHash, T.proc.returns(T.anything), T.anything), + spec: T.any(OpenAI::Internal::AnyHash, T.proc.returns(T.anything), T.anything) ) .void end diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 5414927e..7abcdc1a 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -4,10 +4,6 @@ module OpenAI module Internal # @api private module Util - # Due to the current WIP status of Shapes support in Sorbet, types referencing - # this alias might be refined in the future. - AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } - # @api private sig { returns(Float) } def self.monotonic_secs @@ -57,13 +53,6 @@ module OpenAI end end - # Use this to indicate that a value should be explicitly removed from a data - # structure when using `OpenAI::Internal::Util.deep_merge`. - # - # e.g. merging `{a: 1}` and `{a: OMIT}` should produce `{}`, where merging - # `{a: 1}` and `{}` would produce `{a: 1}`. - OMIT = T.let(T.anything, T.anything) - class << self # @api private sig { params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns(T.anything) } @@ -90,7 +79,7 @@ module OpenAI # @api private sig do params( - data: T.any(OpenAI::Internal::Util::AnyHash, T::Array[T.anything], T.anything), + data: T.any(OpenAI::Internal::AnyHash, T::Array[T.anything], T.anything), pick: T.nilable(T.any(Symbol, Integer, T::Array[T.any(Symbol, Integer)])), sentinel: T.nilable(T.anything), blk: T.nilable(T.proc.returns(T.anything)) diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 3d16cbd5..edc0d8c5 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module AllModels - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } def self.variants diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 4f531df7..08cd04a2 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class SpeechCreateParams < OpenAI::BaseModel + class SpeechCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -55,7 +55,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -90,7 +90,7 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } def self.variants @@ -102,7 +102,7 @@ module OpenAI # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). module Voice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } def self.variants @@ -128,7 +128,7 @@ module OpenAI # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } OrSymbol = diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index ed1acea8..a64b1353 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio module SpeechModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index f6d6ac1a..5310fa78 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class Transcription < OpenAI::BaseModel + class Transcription < OpenAI::Internal::Type::BaseModel # The transcribed text. sig { returns(String) } attr_accessor :text @@ -16,7 +16,7 @@ module OpenAI sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::AnyHash)] ) .void end @@ -27,7 +27,7 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end @@ -38,7 +38,7 @@ module OpenAI def to_hash end - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel # The token in the transcription. sig { returns(T.nilable(String)) } attr_reader :token diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 2db27d93..951cde7b 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionCreateParams < OpenAI::BaseModel + class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -98,7 +98,7 @@ module OpenAI response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -138,7 +138,7 @@ module OpenAI # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } def self.variants @@ -146,7 +146,7 @@ module OpenAI end module TimestampGranularity - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index 482495e5..c2a3aad6 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -6,7 +6,7 @@ module OpenAI # Represents a transcription response returned by model, based on the provided # input. module TranscriptionCreateResponse - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } def self.variants diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 878b37d7..e1de4abe 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio module TranscriptionInclude - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } OrSymbol = diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index 96cdfd4e..b4b718a4 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionSegment < OpenAI::BaseModel + class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # Unique identifier of the segment. sig { returns(Integer) } attr_accessor :id diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 82ef83d4..32426872 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -8,7 +8,7 @@ module OpenAI # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. module TranscriptionStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index 7612edef..aeddfb85 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionTextDeltaEvent < OpenAI::BaseModel + class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # The text delta that was additionally transcribed. sig { returns(String) } attr_accessor :delta @@ -20,7 +20,7 @@ module OpenAI sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::AnyHash)] ) .void end @@ -33,7 +33,7 @@ module OpenAI sig do params( delta: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::Util::AnyHash)], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::AnyHash)], type: Symbol ) .returns(T.attached_class) @@ -50,7 +50,7 @@ module OpenAI def to_hash end - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } attr_reader :token diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index fd8ce791..c422dcf3 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionTextDoneEvent < OpenAI::BaseModel + class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # The text that was transcribed. sig { returns(String) } attr_accessor :text @@ -21,7 +21,7 @@ module OpenAI sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::Util::AnyHash)] + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::AnyHash)] ) .void end @@ -34,7 +34,7 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::Util::AnyHash)], + logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::AnyHash)], type: Symbol ) .returns(T.attached_class) @@ -51,7 +51,7 @@ module OpenAI def to_hash end - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } attr_reader :token diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index 6500d39d..cde99b0d 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionVerbose < OpenAI::BaseModel + class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # The duration of the input audio. sig { returns(Float) } attr_accessor :duration @@ -21,9 +21,7 @@ module OpenAI attr_reader :segments sig do - params( - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)] - ) + params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)]) .void end attr_writer :segments @@ -32,10 +30,7 @@ module OpenAI sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionWord])) } attr_reader :words - sig do - params(words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::Util::AnyHash)]) - .void - end + sig { params(words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::AnyHash)]).void } attr_writer :words # Represents a verbose json transcription response returned by model, based on the @@ -45,8 +40,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)], - words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::Util::AnyHash)] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)], + words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio/transcription_word.rbi b/rbi/lib/openai/models/audio/transcription_word.rbi index c6adc3b1..5907efa6 100644 --- a/rbi/lib/openai/models/audio/transcription_word.rbi +++ b/rbi/lib/openai/models/audio/transcription_word.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranscriptionWord < OpenAI::BaseModel + class TranscriptionWord < OpenAI::Internal::Type::BaseModel # End time of the word in seconds. sig { returns(Float) } attr_accessor :end_ diff --git a/rbi/lib/openai/models/audio/translation.rbi b/rbi/lib/openai/models/audio/translation.rbi index 07767362..a2735384 100644 --- a/rbi/lib/openai/models/audio/translation.rbi +++ b/rbi/lib/openai/models/audio/translation.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class Translation < OpenAI::BaseModel + class Translation < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :text diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 6c2cdb3a..aa1ac9b3 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranslationCreateParams < OpenAI::BaseModel + class TranslationCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -53,7 +53,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -79,7 +79,7 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } def self.variants @@ -89,7 +89,7 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 8c9c9851..8c96ec6e 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio module TranslationCreateResponse - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } def self.variants diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index fee32692..90251977 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Audio - class TranslationVerbose < OpenAI::BaseModel + class TranslationVerbose < OpenAI::Internal::Type::BaseModel # The duration of the input audio. sig { returns(Float) } attr_accessor :duration @@ -21,9 +21,7 @@ module OpenAI attr_reader :segments sig do - params( - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)] - ) + params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)]) .void end attr_writer :segments @@ -33,7 +31,7 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::Util::AnyHash)] + segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 656d0083..09edaa97 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module AudioModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 1246abca..ac5e8f58 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -6,7 +6,7 @@ module OpenAI # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. module AudioResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi index 43d54634..58bd0717 100644 --- a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class AutoFileChunkingStrategyParam < OpenAI::BaseModel + class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel # Always `auto`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index ddeafb8b..b34e5a45 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Batch < OpenAI::BaseModel + class Batch < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id @@ -61,7 +61,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Batch::Errors)) } attr_reader :errors - sig { params(errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::Util::AnyHash)).void } + sig { params(errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::AnyHash)).void } attr_writer :errors # The Unix timestamp (in seconds) for when the batch expired. @@ -119,7 +119,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::BatchRequestCounts)) } attr_reader :request_counts - sig { params(request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::Util::AnyHash)).void } + sig { params(request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::AnyHash)).void } attr_writer :request_counts sig do @@ -134,7 +134,7 @@ module OpenAI cancelling_at: Integer, completed_at: Integer, error_file_id: String, - errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::Util::AnyHash), + errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::AnyHash), expired_at: Integer, expires_at: Integer, failed_at: Integer, @@ -142,7 +142,7 @@ module OpenAI in_progress_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), output_file_id: String, - request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::Util::AnyHash), + request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -203,7 +203,7 @@ module OpenAI # The current status of the batch. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Batch::Status::TaggedSymbol) } @@ -222,11 +222,11 @@ module OpenAI end end - class Errors < OpenAI::BaseModel + class Errors < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(T::Array[OpenAI::Models::BatchError])) } attr_reader :data - sig { params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::Util::AnyHash)]).void } + sig { params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::AnyHash)]).void } attr_writer :data # The object type, which is always `list`. @@ -237,7 +237,7 @@ module OpenAI attr_writer :object sig do - params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::Util::AnyHash)], object: String) + params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::AnyHash)], object: String) .returns(T.attached_class) end def self.new(data: nil, object: nil) diff --git a/rbi/lib/openai/models/batch_cancel_params.rbi b/rbi/lib/openai/models/batch_cancel_params.rbi index dfdfdb51..e5ef1114 100644 --- a/rbi/lib/openai/models/batch_cancel_params.rbi +++ b/rbi/lib/openai/models/batch_cancel_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class BatchCancelParams < OpenAI::BaseModel + class BatchCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 4a38d820..d27284f7 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class BatchCreateParams < OpenAI::BaseModel + class BatchCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -45,7 +45,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -70,7 +70,7 @@ module OpenAI # The time frame within which the batch should be processed. Currently only `24h` # is supported. module CompletionWindow - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } OrSymbol = @@ -88,7 +88,7 @@ module OpenAI # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. module Endpoint - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } OrSymbol = diff --git a/rbi/lib/openai/models/batch_error.rbi b/rbi/lib/openai/models/batch_error.rbi index 261a3ac1..157787c8 100644 --- a/rbi/lib/openai/models/batch_error.rbi +++ b/rbi/lib/openai/models/batch_error.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class BatchError < OpenAI::BaseModel + class BatchError < OpenAI::Internal::Type::BaseModel # An error code identifying the error type. sig { returns(T.nilable(String)) } attr_reader :code diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 498b22b2..479dcdff 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class BatchListParams < OpenAI::BaseModel + class BatchListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -28,7 +28,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/batch_request_counts.rbi b/rbi/lib/openai/models/batch_request_counts.rbi index 03b6cecc..6b688106 100644 --- a/rbi/lib/openai/models/batch_request_counts.rbi +++ b/rbi/lib/openai/models/batch_request_counts.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class BatchRequestCounts < OpenAI::BaseModel + class BatchRequestCounts < OpenAI::Internal::Type::BaseModel # Number of requests that have been completed successfully. sig { returns(Integer) } attr_accessor :completed diff --git a/rbi/lib/openai/models/batch_retrieve_params.rbi b/rbi/lib/openai/models/batch_retrieve_params.rbi index ced68a22..002b5af2 100644 --- a/rbi/lib/openai/models/batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/batch_retrieve_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class BatchRetrieveParams < OpenAI::BaseModel + class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index de41edc8..3ea623d1 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class Assistant < OpenAI::BaseModel + class Assistant < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -111,7 +111,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -138,7 +138,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -147,13 +147,13 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::Util::AnyHash)), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::AnyHash)), top_p: T.nilable(Float), object: Symbol ) @@ -212,13 +212,13 @@ module OpenAI def to_hash end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) ) .void end @@ -229,7 +229,7 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -241,8 +241,8 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), + file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -261,7 +261,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. @@ -280,7 +280,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index 19dabe18..a75932ff 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantCreateParams < OpenAI::BaseModel + class AssistantCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -95,9 +95,7 @@ module OpenAI sig do params( - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -126,7 +124,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -156,25 +154,23 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -235,14 +231,14 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } def self.variants end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter @@ -250,7 +246,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -262,10 +258,7 @@ module OpenAI sig do params( - file_search: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -279,12 +272,9 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - file_search: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -303,7 +293,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -322,7 +312,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -349,7 +339,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -363,7 +353,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -384,7 +374,7 @@ module OpenAI def to_hash end - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. sig do @@ -403,7 +393,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -433,7 +423,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], @@ -463,9 +453,9 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -481,7 +471,7 @@ module OpenAI end end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel sig do returns( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -493,7 +483,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -508,7 +498,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -529,7 +519,7 @@ module OpenAI def to_hash end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. diff --git a/rbi/lib/openai/models/beta/assistant_delete_params.rbi b/rbi/lib/openai/models/beta/assistant_delete_params.rbi index 31fbc46f..b929b1f8 100644 --- a/rbi/lib/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_delete_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Beta - class AssistantDeleteParams < OpenAI::BaseModel + class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant_deleted.rbi b/rbi/lib/openai/models/beta/assistant_deleted.rbi index c5b56819..08a9c254 100644 --- a/rbi/lib/openai/models/beta/assistant_deleted.rbi +++ b/rbi/lib/openai/models/beta/assistant_deleted.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantDeleted < OpenAI::BaseModel + class AssistantDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index c8a24c36..7da7715a 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantListParams < OpenAI::BaseModel + class AssistantListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -74,7 +74,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index a44a0adc..0b95306f 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -24,7 +24,7 @@ module OpenAI # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. module AssistantResponseFormatOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi index 3e5121e0..f118b741 100644 --- a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Beta - class AssistantRetrieveParams < OpenAI::BaseModel + class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index e3b90b3b..86e113fd 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -24,15 +24,15 @@ module OpenAI # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) # to learn how to integrate the Assistants API with streaming. module AssistantStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class ThreadCreated < OpenAI::BaseModel + class ThreadCreated < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -50,7 +50,7 @@ module OpenAI # created. sig do params( - data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash), enabled: T::Boolean, event: Symbol ) @@ -64,13 +64,13 @@ module OpenAI end end - class ThreadRunCreated < OpenAI::BaseModel + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -79,7 +79,7 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.created") @@ -90,13 +90,13 @@ module OpenAI end end - class ThreadRunQueued < OpenAI::BaseModel + class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -105,7 +105,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.queued") @@ -116,13 +116,13 @@ module OpenAI end end - class ThreadRunInProgress < OpenAI::BaseModel + class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -131,7 +131,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.in_progress") @@ -142,13 +142,13 @@ module OpenAI end end - class ThreadRunRequiresAction < OpenAI::BaseModel + class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -157,7 +157,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.requires_action") @@ -168,13 +168,13 @@ module OpenAI end end - class ThreadRunCompleted < OpenAI::BaseModel + class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -183,7 +183,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.completed") @@ -194,13 +194,13 @@ module OpenAI end end - class ThreadRunIncomplete < OpenAI::BaseModel + class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -209,7 +209,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.incomplete") @@ -220,13 +220,13 @@ module OpenAI end end - class ThreadRunFailed < OpenAI::BaseModel + class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -235,7 +235,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.failed") @@ -246,13 +246,13 @@ module OpenAI end end - class ThreadRunCancelling < OpenAI::BaseModel + class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -261,7 +261,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelling") @@ -272,13 +272,13 @@ module OpenAI end end - class ThreadRunCancelled < OpenAI::BaseModel + class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -287,7 +287,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelled") @@ -298,13 +298,13 @@ module OpenAI end end - class ThreadRunExpired < OpenAI::BaseModel + class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -313,7 +313,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.expired") @@ -324,12 +324,12 @@ module OpenAI end end - class ThreadRunStepCreated < OpenAI::BaseModel + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -340,7 +340,7 @@ module OpenAI # is created. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -353,12 +353,12 @@ module OpenAI end end - class ThreadRunStepInProgress < OpenAI::BaseModel + class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -369,7 +369,7 @@ module OpenAI # moves to an `in_progress` state. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -382,18 +382,13 @@ module OpenAI end end - class ThreadRunStepDelta < OpenAI::BaseModel + class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data - sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -404,7 +399,7 @@ module OpenAI # are being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -417,12 +412,12 @@ module OpenAI end end - class ThreadRunStepCompleted < OpenAI::BaseModel + class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -433,7 +428,7 @@ module OpenAI # is completed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -446,12 +441,12 @@ module OpenAI end end - class ThreadRunStepFailed < OpenAI::BaseModel + class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -462,7 +457,7 @@ module OpenAI # fails. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -475,12 +470,12 @@ module OpenAI end end - class ThreadRunStepCancelled < OpenAI::BaseModel + class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -491,7 +486,7 @@ module OpenAI # is cancelled. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -504,12 +499,12 @@ module OpenAI end end - class ThreadRunStepExpired < OpenAI::BaseModel + class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -520,7 +515,7 @@ module OpenAI # expires. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -533,13 +528,13 @@ module OpenAI end end - class ThreadMessageCreated < OpenAI::BaseModel + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -549,10 +544,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.created") @@ -563,13 +555,13 @@ module OpenAI end end - class ThreadMessageInProgress < OpenAI::BaseModel + class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -579,10 +571,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.in_progress") @@ -593,13 +582,13 @@ module OpenAI end end - class ThreadMessageDelta < OpenAI::BaseModel + class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -610,7 +599,7 @@ module OpenAI # being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -623,13 +612,13 @@ module OpenAI end end - class ThreadMessageCompleted < OpenAI::BaseModel + class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -639,10 +628,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.completed") @@ -653,13 +639,13 @@ module OpenAI end end - class ThreadMessageIncomplete < OpenAI::BaseModel + class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -669,10 +655,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.incomplete") @@ -683,11 +666,11 @@ module OpenAI end end - class ErrorEvent < OpenAI::BaseModel + class ErrorEvent < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::ErrorObject) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -697,7 +680,7 @@ module OpenAI # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. sig do - params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :error) diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index f9ca4d29..e2eaccc9 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module AssistantTool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 36c345d4..79dcf2c8 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantToolChoice < OpenAI::BaseModel + class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # The type of the tool. If type is `function`, the function name must be set sig { returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) } attr_accessor :type @@ -11,12 +11,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::AssistantToolChoiceFunction)) } attr_reader :function - sig do - params( - function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::AnyHash)).void } attr_writer :function # Specifies a tool the model should use. Use to force the model to call a specific @@ -24,7 +19,7 @@ module OpenAI sig do params( type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::Util::AnyHash) + function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -45,7 +40,7 @@ module OpenAI # The type of the tool. If type is `function`, the function name must be set module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi index 084e57a8..bbf08c85 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantToolChoiceFunction < OpenAI::BaseModel + class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel # The name of the function to call. sig { returns(String) } attr_accessor :name diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 44079027..3a3a1f7c 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -11,14 +11,14 @@ module OpenAI # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. module AssistantToolChoiceOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before # responding to the user. module Auto - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 929c5b35..f14621d3 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class AssistantUpdateParams < OpenAI::BaseModel + class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -98,9 +98,7 @@ module OpenAI sig do params( - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -129,7 +127,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -159,25 +157,23 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -238,7 +234,7 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } def self.variants @@ -299,7 +295,7 @@ module OpenAI T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter @@ -307,7 +303,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -319,10 +315,7 @@ module OpenAI sig do params( - file_search: T.any( - OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -336,12 +329,9 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - file_search: T.any( - OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -360,7 +350,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # Overrides the list of # [file](https://platform.openai.com/docs/api-reference/files) IDs made available # to the `code_interpreter` tool. There can be a maximum of 20 files associated @@ -380,7 +370,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to diff --git a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi index 3e68afee..f629a549 100644 --- a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class CodeInterpreterTool < OpenAI::BaseModel + class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `code_interpreter` sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index e13ceaf5..7388dc69 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class FileSearchTool < OpenAI::BaseModel + class FileSearchTool < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -13,16 +13,14 @@ module OpenAI attr_reader :file_search sig do - params( - file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::Util::AnyHash) - ) + params(file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::AnyHash)) .void end attr_writer :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::Util::AnyHash), + file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -34,7 +32,7 @@ module OpenAI def to_hash end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The maximum number of results the file search tool should output. The default is # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between # 1 and 50 inclusive. @@ -60,7 +58,7 @@ module OpenAI sig do params( - ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::AnyHash) ) .void end @@ -70,7 +68,7 @@ module OpenAI sig do params( max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -86,7 +84,7 @@ module OpenAI def to_hash end - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # The score threshold for the file search. All values must be a floating point # number between 0 and 1. sig { returns(Float) } @@ -131,7 +129,7 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index 8d337d24..c1b5305c 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -3,11 +3,11 @@ module OpenAI module Models module Beta - class FunctionTool < OpenAI::BaseModel + class FunctionTool < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::FunctionDefinition) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash)).void } + sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash)).void } attr_writer :function # The type of tool being defined: `function` @@ -15,7 +15,7 @@ module OpenAI attr_accessor :type sig do - params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash), type: Symbol) + params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(function:, type: :function) diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 0c56b760..fe25c2b0 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -7,15 +7,15 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. module MessageStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class ThreadMessageCreated < OpenAI::BaseModel + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -25,10 +25,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.created") @@ -39,13 +36,13 @@ module OpenAI end end - class ThreadMessageInProgress < OpenAI::BaseModel + class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -55,10 +52,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.in_progress") @@ -69,13 +63,13 @@ module OpenAI end end - class ThreadMessageDelta < OpenAI::BaseModel + class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -86,7 +80,7 @@ module OpenAI # being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -99,13 +93,13 @@ module OpenAI end end - class ThreadMessageCompleted < OpenAI::BaseModel + class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -115,10 +109,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.completed") @@ -129,13 +120,13 @@ module OpenAI end end - class ThreadMessageIncomplete < OpenAI::BaseModel + class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -145,10 +136,7 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::Util::AnyHash), - event: Symbol - ) + params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.message.incomplete") diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index dddd0b7e..0b610488 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -7,14 +7,14 @@ module OpenAI # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. module RunStepStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class ThreadRunStepCreated < OpenAI::BaseModel + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -25,7 +25,7 @@ module OpenAI # is created. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -38,12 +38,12 @@ module OpenAI end end - class ThreadRunStepInProgress < OpenAI::BaseModel + class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -54,7 +54,7 @@ module OpenAI # moves to an `in_progress` state. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -67,18 +67,13 @@ module OpenAI end end - class ThreadRunStepDelta < OpenAI::BaseModel + class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data - sig do - params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -89,7 +84,7 @@ module OpenAI # are being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -102,12 +97,12 @@ module OpenAI end end - class ThreadRunStepCompleted < OpenAI::BaseModel + class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -118,7 +113,7 @@ module OpenAI # is completed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -131,12 +126,12 @@ module OpenAI end end - class ThreadRunStepFailed < OpenAI::BaseModel + class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -147,7 +142,7 @@ module OpenAI # fails. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -160,12 +155,12 @@ module OpenAI end end - class ThreadRunStepCancelled < OpenAI::BaseModel + class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -176,7 +171,7 @@ module OpenAI # is cancelled. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) @@ -189,12 +184,12 @@ module OpenAI end end - class ThreadRunStepExpired < OpenAI::BaseModel + class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # Represents a step in execution of a run. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -205,7 +200,7 @@ module OpenAI # expires. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), event: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 2b6188fc..cedd2400 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -6,15 +6,15 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. module RunStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class ThreadRunCreated < OpenAI::BaseModel + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -23,7 +23,7 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.created") @@ -34,13 +34,13 @@ module OpenAI end end - class ThreadRunQueued < OpenAI::BaseModel + class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -49,7 +49,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.queued") @@ -60,13 +60,13 @@ module OpenAI end end - class ThreadRunInProgress < OpenAI::BaseModel + class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -75,7 +75,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.in_progress") @@ -86,13 +86,13 @@ module OpenAI end end - class ThreadRunRequiresAction < OpenAI::BaseModel + class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -101,7 +101,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.requires_action") @@ -112,13 +112,13 @@ module OpenAI end end - class ThreadRunCompleted < OpenAI::BaseModel + class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -127,7 +127,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.completed") @@ -138,13 +138,13 @@ module OpenAI end end - class ThreadRunIncomplete < OpenAI::BaseModel + class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -153,7 +153,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.incomplete") @@ -164,13 +164,13 @@ module OpenAI end end - class ThreadRunFailed < OpenAI::BaseModel + class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -179,7 +179,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.failed") @@ -190,13 +190,13 @@ module OpenAI end end - class ThreadRunCancelling < OpenAI::BaseModel + class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -205,7 +205,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelling") @@ -216,13 +216,13 @@ module OpenAI end end - class ThreadRunCancelled < OpenAI::BaseModel + class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -231,7 +231,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.cancelled") @@ -242,13 +242,13 @@ module OpenAI end end - class ThreadRunExpired < OpenAI::BaseModel + class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -257,7 +257,7 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::Util::AnyHash), event: Symbol) + params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end def self.new(data:, event: :"thread.run.expired") diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 052a7fc3..b6dd7fda 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class Thread < OpenAI::BaseModel + class Thread < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -34,7 +34,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -47,7 +47,7 @@ module OpenAI id: String, created_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::Util::AnyHash)), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -70,13 +70,13 @@ module OpenAI def to_hash end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) ) .void end @@ -87,7 +87,7 @@ module OpenAI sig do params( - file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) + file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -99,8 +99,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::Util::AnyHash), - file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::Util::AnyHash) + code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), + file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -119,7 +119,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -138,7 +138,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index c99ab223..c0aab016 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class ThreadCreateAndRunParams < OpenAI::BaseModel + class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -105,9 +105,7 @@ module OpenAI attr_reader :thread sig do - params( - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash) - ) + params(thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash)) .void end attr_writer :thread @@ -140,9 +138,7 @@ module OpenAI sig do params( - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -181,7 +177,7 @@ module OpenAI sig do params( truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) ) ) .void @@ -201,28 +197,26 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash), + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -230,9 +224,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -307,14 +301,14 @@ module OpenAI # model associated with the assistant. If not, the model associated with the # assistant will be used. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } def self.variants end end - class Thread < OpenAI::BaseModel + class Thread < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message])) } @@ -322,7 +316,7 @@ module OpenAI sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::Util::AnyHash)] + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::AnyHash)] ) .void end @@ -347,10 +341,7 @@ module OpenAI sig do params( tool_resources: T.nilable( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, - OpenAI::Internal::Util::AnyHash - ) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Internal::AnyHash) ) ) .void @@ -361,13 +352,10 @@ module OpenAI # an empty thread will be created. sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::Util::AnyHash)], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable( - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, - OpenAI::Internal::Util::AnyHash - ) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Internal::AnyHash) ) ) .returns(T.attached_class) @@ -388,7 +376,7 @@ module OpenAI def to_hash end - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # The text contents of the message. sig do returns( @@ -435,7 +423,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -446,7 +434,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ), @@ -482,7 +470,7 @@ module OpenAI # The text contents of the message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -504,7 +492,7 @@ module OpenAI MessageContentPartParamArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Internal::Type::Converter ) end @@ -516,7 +504,7 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } @@ -535,7 +523,7 @@ module OpenAI end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -563,7 +551,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -578,7 +566,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] @@ -606,9 +594,9 @@ module OpenAI end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -634,7 +622,7 @@ module OpenAI end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter @@ -642,7 +630,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -656,7 +644,7 @@ module OpenAI params( file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -671,11 +659,11 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) @@ -695,7 +683,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -714,7 +702,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -743,7 +731,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -757,7 +745,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -778,7 +766,7 @@ module OpenAI def to_hash end - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. sig do @@ -797,7 +785,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -827,7 +815,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], @@ -857,9 +845,9 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -875,7 +863,7 @@ module OpenAI end end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel sig do returns( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -887,7 +875,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -902,7 +890,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -923,7 +911,7 @@ module OpenAI def to_hash end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. @@ -966,7 +954,7 @@ module OpenAI end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter @@ -974,7 +962,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -988,7 +976,7 @@ module OpenAI params( file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -1003,11 +991,11 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), file_search: T.any( OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) @@ -1027,7 +1015,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -1046,7 +1034,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -1068,7 +1056,7 @@ module OpenAI end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -1080,7 +1068,7 @@ module OpenAI end end - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be @@ -1122,7 +1110,7 @@ module OpenAI # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 2e45ad3f..2f0b0063 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class ThreadCreateParams < OpenAI::BaseModel + class ThreadCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ module OpenAI sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::Util::AnyHash)] + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)] ) .void end @@ -38,7 +38,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -46,10 +46,10 @@ module OpenAI sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::Util::AnyHash)], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -70,7 +70,7 @@ module OpenAI def to_hash end - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # The text contents of the message. sig do returns( @@ -117,7 +117,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -125,7 +125,7 @@ module OpenAI ), role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]) ) @@ -159,7 +159,7 @@ module OpenAI # The text contents of the message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -181,7 +181,7 @@ module OpenAI MessageContentPartParamArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Internal::Type::Converter ) end @@ -193,7 +193,7 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } OrSymbol = @@ -207,7 +207,7 @@ module OpenAI end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -235,7 +235,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -250,7 +250,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] @@ -278,9 +278,9 @@ module OpenAI end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -306,16 +306,13 @@ module OpenAI end end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash - ) + code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) ) .void end @@ -326,10 +323,7 @@ module OpenAI sig do params( - file_search: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -341,14 +335,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash - ), - file_search: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), + file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -367,7 +355,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -386,7 +374,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -413,7 +401,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -427,7 +415,7 @@ module OpenAI vector_stores: T::Array[ T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -448,7 +436,7 @@ module OpenAI def to_hash end - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. sig do @@ -467,7 +455,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) @@ -497,7 +485,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), file_ids: T::Array[String], @@ -527,9 +515,9 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -545,7 +533,7 @@ module OpenAI end end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel sig do returns( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -557,7 +545,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -572,7 +560,7 @@ module OpenAI params( static: T.any( OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -593,7 +581,7 @@ module OpenAI def to_hash end - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. diff --git a/rbi/lib/openai/models/beta/thread_delete_params.rbi b/rbi/lib/openai/models/beta/thread_delete_params.rbi index 0a775418..be5184db 100644 --- a/rbi/lib/openai/models/beta/thread_delete_params.rbi +++ b/rbi/lib/openai/models/beta/thread_delete_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Beta - class ThreadDeleteParams < OpenAI::BaseModel + class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/thread_deleted.rbi b/rbi/lib/openai/models/beta/thread_deleted.rbi index cc908f49..68939994 100644 --- a/rbi/lib/openai/models/beta/thread_deleted.rbi +++ b/rbi/lib/openai/models/beta/thread_deleted.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class ThreadDeleted < OpenAI::BaseModel + class ThreadDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi index 502dbe33..e42b6057 100644 --- a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Beta - class ThreadRetrieveParams < OpenAI::BaseModel + class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index 34ddbb64..1f9f6531 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -3,13 +3,13 @@ module OpenAI module Models module Beta - class ThreadStreamEvent < OpenAI::BaseModel + class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash)).void } + sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash)).void } attr_writer :data sig { returns(Symbol) } @@ -27,7 +27,7 @@ module OpenAI # created. sig do params( - data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::Util::AnyHash), + data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash), enabled: T::Boolean, event: Symbol ) diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 0f78f9cb..a1f2bf11 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Beta - class ThreadUpdateParams < OpenAI::BaseModel + class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -25,7 +25,7 @@ module OpenAI sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)) ) .void end @@ -34,8 +34,8 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -55,16 +55,13 @@ module OpenAI def to_hash end - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter)) } attr_reader :code_interpreter sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash - ) + code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) ) .void end @@ -75,10 +72,7 @@ module OpenAI sig do params( - file_search: T.any( - OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -90,14 +84,8 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::Util::AnyHash - ), - file_search: T.any( - OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), + file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -116,7 +104,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -135,7 +123,7 @@ module OpenAI end end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 02f8a33e..2579d047 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -8,7 +8,7 @@ module OpenAI # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 133c007f..a0c01cad 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -8,7 +8,7 @@ module OpenAI # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. module AnnotationDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index 2aaa8fe0..b87f7920 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FileCitationAnnotation < OpenAI::BaseModel + class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel sig { returns(Integer) } attr_accessor :end_index @@ -13,10 +13,7 @@ module OpenAI sig do params( - file_citation: T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, - OpenAI::Internal::Util::AnyHash - ) + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Internal::AnyHash) ) .void end @@ -39,10 +36,7 @@ module OpenAI sig do params( end_index: Integer, - file_citation: T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, - OpenAI::Internal::Util::AnyHash - ), + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Internal::AnyHash), start_index: Integer, text: String, type: Symbol @@ -67,7 +61,7 @@ module OpenAI def to_hash end - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # The ID of the specific File the citation is from. sig { returns(String) } attr_accessor :file_id diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index 9de4eb01..c8a41dac 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FileCitationDeltaAnnotation < OpenAI::BaseModel + class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # The index of the annotation in the text content part. sig { returns(Integer) } attr_accessor :index @@ -24,10 +24,7 @@ module OpenAI sig do params( - file_citation: T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, - OpenAI::Internal::Util::AnyHash - ) + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Internal::AnyHash) ) .void end @@ -53,10 +50,7 @@ module OpenAI params( index: Integer, end_index: Integer, - file_citation: T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, - OpenAI::Internal::Util::AnyHash - ), + file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Internal::AnyHash), start_index: Integer, text: String, type: Symbol @@ -89,7 +83,7 @@ module OpenAI def to_hash end - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # The ID of the specific File the citation is from. sig { returns(T.nilable(String)) } attr_reader :file_id diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index 7e8487a3..d3f54f98 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FilePathAnnotation < OpenAI::BaseModel + class FilePathAnnotation < OpenAI::Internal::Type::BaseModel sig { returns(Integer) } attr_accessor :end_index @@ -13,7 +13,7 @@ module OpenAI sig do params( - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::Util::AnyHash) + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::AnyHash) ) .void end @@ -35,7 +35,7 @@ module OpenAI sig do params( end_index: Integer, - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::Util::AnyHash), + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::AnyHash), start_index: Integer, text: String, type: Symbol @@ -60,7 +60,7 @@ module OpenAI def to_hash end - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # The ID of the file that was generated. sig { returns(String) } attr_accessor :file_id diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index 51269c89..a47885b8 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class FilePathDeltaAnnotation < OpenAI::BaseModel + class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # The index of the annotation in the text content part. sig { returns(Integer) } attr_accessor :index @@ -24,7 +24,7 @@ module OpenAI sig do params( - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::Util::AnyHash) + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::AnyHash) ) .void end @@ -49,7 +49,7 @@ module OpenAI params( index: Integer, end_index: Integer, - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::Util::AnyHash), + file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::AnyHash), start_index: Integer, text: String, type: Symbol @@ -75,7 +75,7 @@ module OpenAI def to_hash end - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # The ID of the file that was generated. sig { returns(T.nilable(String)) } attr_reader :file_id diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 2ce2e93b..f16895d6 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFile < OpenAI::BaseModel + class ImageFile < OpenAI::Internal::Type::BaseModel # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. @@ -33,7 +33,7 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index 301843bc..133cd72c 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - class ImageFileContentBlock < OpenAI::BaseModel + class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Beta::Threads::ImageFile) } attr_reader :image_file - sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::Util::AnyHash)).void } + sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::AnyHash)).void } attr_writer :image_file # Always `image_file`. @@ -19,7 +19,7 @@ module OpenAI # in the content of a message. sig do params( - image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::Util::AnyHash), + image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index a0e96760..6d41ec50 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFileDelta < OpenAI::BaseModel + class ImageFileDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } @@ -39,7 +39,7 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 72d55cbb..7cb2f9a0 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageFileDeltaBlock < OpenAI::BaseModel + class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # The index of the content part in the message. sig { returns(Integer) } attr_accessor :index @@ -16,10 +16,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta)) } attr_reader :image_file - sig do - params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::Util::AnyHash)) - .void - end + sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::AnyHash)).void } attr_writer :image_file # References an image [File](https://platform.openai.com/docs/api-reference/files) @@ -27,7 +24,7 @@ module OpenAI sig do params( index: Integer, - image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::Util::AnyHash), + image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 0795a0a5..7bfaa664 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel # The external URL of the image, must be a supported image types: jpeg, jpg, png, # gif, webp. sig { returns(String) } @@ -32,7 +32,7 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index ec7592c9..2dff246b 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - class ImageURLContentBlock < OpenAI::BaseModel + class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Beta::Threads::ImageURL) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::Util::AnyHash)).void } + sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash)).void } attr_writer :image_url # The type of the content part. @@ -17,10 +17,7 @@ module OpenAI # References an image URL in the content of a message. sig do - params( - image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(image_url:, type: :image_url) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index aa4152eb..0fbc73d8 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURLDelta < OpenAI::BaseModel + class ImageURLDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } @@ -37,7 +37,7 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index e337c120..75462fba 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class ImageURLDeltaBlock < OpenAI::BaseModel + class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # The index of the content part in the message. sig { returns(Integer) } attr_accessor :index @@ -16,17 +16,14 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta)) } attr_reader :image_url - sig do - params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::Util::AnyHash)) - .void - end + sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::AnyHash)).void } attr_writer :image_url # References an image URL in the content of a message. sig do params( index: Integer, - image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::Util::AnyHash), + image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 03266cc5..6c7e5c86 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -52,9 +52,7 @@ module OpenAI sig do params( - incomplete_details: T.nilable( - T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::Util::AnyHash) - ) + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::AnyHash)) ) .void end @@ -99,14 +97,12 @@ module OpenAI params( id: String, assistant_id: T.nilable(String), - attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Internal::Util::AnyHash)] - ), + attachments: T.nilable(T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Internal::AnyHash)]), completed_at: T.nilable(Integer), content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock @@ -114,9 +110,7 @@ module OpenAI ], created_at: Integer, incomplete_at: T.nilable(Integer), - incomplete_details: T.nilable( - T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::Util::AnyHash) - ), + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), role: OpenAI::Models::Beta::Threads::Message::Role::OrSymbol, run_id: T.nilable(String), @@ -175,7 +169,7 @@ module OpenAI def to_hash end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -203,7 +197,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -218,7 +212,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] @@ -246,9 +240,9 @@ module OpenAI end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel + class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -273,7 +267,7 @@ module OpenAI end end - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel # The reason the message is incomplete. sig { returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } attr_accessor :reason @@ -294,7 +288,7 @@ module OpenAI # The reason the message is incomplete. module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } @@ -322,7 +316,7 @@ module OpenAI # The entity that produced the message. One of `user` or `assistant`. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } OrSymbol = @@ -339,7 +333,7 @@ module OpenAI # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 3b799135..eae2a6d4 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -7,7 +7,7 @@ module OpenAI # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. module MessageContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index 7cfeab9e..7be07281 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -7,7 +7,7 @@ module OpenAI # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. module MessageContentDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index e2e3c952..a6e0c71f 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -7,7 +7,7 @@ module OpenAI # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. module MessageContentPartParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 9ad49558..5ed1214c 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageCreateParams < OpenAI::BaseModel + class MessageCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -54,7 +54,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -62,10 +62,10 @@ module OpenAI ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -98,7 +98,7 @@ module OpenAI # The text contents of the message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -120,7 +120,7 @@ module OpenAI MessageContentPartParamArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Internal::Type::Converter ) end @@ -132,7 +132,7 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } OrSymbol = @@ -146,7 +146,7 @@ module OpenAI end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -174,7 +174,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -189,7 +189,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] @@ -217,9 +217,9 @@ module OpenAI end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi index 71feebb1..0dfa827d 100644 --- a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageDeleteParams < OpenAI::BaseModel + class MessageDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -16,7 +16,7 @@ module OpenAI thread_id: String, request_options: T.any( OpenAI::RequestOptions, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message_deleted.rbi b/rbi/lib/openai/models/beta/threads/message_deleted.rbi index df1dc2e2..fd6cfd06 100644 --- a/rbi/lib/openai/models/beta/threads/message_deleted.rbi +++ b/rbi/lib/openai/models/beta/threads/message_deleted.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageDeleted < OpenAI::BaseModel + class MessageDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 151f4efe..5ae1fdd1 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageDelta < OpenAI::BaseModel + class MessageDelta < OpenAI::Internal::Type::BaseModel # The content of the message in array of text and/or images. sig do returns( @@ -27,7 +27,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -51,7 +51,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock @@ -85,7 +85,7 @@ module OpenAI # The entity that produced the message. One of `user` or `assistant`. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index bae39ef8..3fd98304 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageDeltaEvent < OpenAI::BaseModel + class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # The identifier of the message, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -13,7 +13,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::MessageDelta) } attr_reader :delta - sig { params(delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::Util::AnyHash)).void } + sig { params(delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::AnyHash)).void } attr_writer :delta # The object type, which is always `thread.message.delta`. @@ -25,7 +25,7 @@ module OpenAI sig do params( id: String, - delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::Util::AnyHash), + delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 3664eaeb..2a25adec 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageListParams < OpenAI::BaseModel + class MessageListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -58,7 +58,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -84,7 +84,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi index bc78f6e0..62cc19a2 100644 --- a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageRetrieveParams < OpenAI::BaseModel + class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -16,7 +16,7 @@ module OpenAI thread_id: String, request_options: T.any( OpenAI::RequestOptions, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 0d2c98de..51074aac 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class MessageUpdateParams < OpenAI::BaseModel + class MessageUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -24,7 +24,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi index b9350051..334ce796 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RefusalContentBlock < OpenAI::BaseModel + class RefusalContentBlock < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :refusal diff --git a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi index 72b6092e..2caef1c7 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RefusalDeltaBlock < OpenAI::BaseModel + class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # The index of the refusal part in the message. sig { returns(Integer) } attr_accessor :index diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index 762191b7..795f0789 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RequiredActionFunctionToolCall < OpenAI::BaseModel + class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call. This ID must be referenced when you submit the tool # outputs in using the # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) @@ -18,10 +18,7 @@ module OpenAI sig do params( - function: T.any( - OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, - OpenAI::Internal::Util::AnyHash - ) + function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Internal::AnyHash) ) .void end @@ -36,10 +33,7 @@ module OpenAI sig do params( id: String, - function: T.any( - OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, - OpenAI::Internal::Util::AnyHash - ), + function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -56,7 +50,7 @@ module OpenAI def to_hash end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The arguments that the model expects you to pass to the function. sig { returns(String) } attr_accessor :arguments diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index d19d330f..47341501 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class Run < OpenAI::BaseModel + class Run < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -42,7 +42,7 @@ module OpenAI sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::Util::AnyHash)) + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::AnyHash)) ) .void end @@ -60,7 +60,7 @@ module OpenAI sig do params( - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::Util::AnyHash)) + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::AnyHash)) ) .void end @@ -108,7 +108,7 @@ module OpenAI sig do params( - required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::Util::AnyHash)) + required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::AnyHash)) ) .void end @@ -205,7 +205,7 @@ module OpenAI sig do params( - truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::Util::AnyHash)) + truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::AnyHash)) ) .void end @@ -216,12 +216,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) } attr_reader :usage - sig do - params( - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::Util::AnyHash)) - ) - .void - end + sig { params(usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::AnyHash))).void } attr_writer :usage # The sampling temperature used for this run. If not set, defaults to 1. @@ -243,20 +238,20 @@ module OpenAI created_at: Integer, expires_at: T.nilable(Integer), failed_at: T.nilable(Integer), - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::Util::AnyHash)), + incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::AnyHash)), instructions: String, - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::Util::AnyHash)), + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::AnyHash)), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: String, parallel_tool_calls: T::Boolean, - required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::Util::AnyHash)), + required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::AnyHash)), response_format: T.nilable( T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -268,19 +263,19 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], - truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::Util::AnyHash)), - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::Util::AnyHash)), + truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::AnyHash)), temperature: T.nilable(Float), top_p: T.nilable(Float), object: Symbol @@ -373,7 +368,7 @@ module OpenAI def to_hash end - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } @@ -398,7 +393,7 @@ module OpenAI # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } @@ -416,7 +411,7 @@ module OpenAI end end - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. sig { returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } attr_accessor :code @@ -442,7 +437,7 @@ module OpenAI # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } OrSymbol = @@ -460,17 +455,14 @@ module OpenAI end end - class RequiredAction < OpenAI::BaseModel + class RequiredAction < OpenAI::Internal::Type::BaseModel # Details on the tool outputs needed for this run to continue. sig { returns(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs) } attr_reader :submit_tool_outputs sig do params( - submit_tool_outputs: T.any( - OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, - OpenAI::Internal::Util::AnyHash - ) + submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Internal::AnyHash) ) .void end @@ -484,10 +476,7 @@ module OpenAI # is required. sig do params( - submit_tool_outputs: T.any( - OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, - OpenAI::Internal::Util::AnyHash - ), + submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -504,7 +493,7 @@ module OpenAI def to_hash end - class SubmitToolOutputs < OpenAI::BaseModel + class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # A list of the relevant tool calls. sig { returns(T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) } attr_accessor :tool_calls @@ -512,7 +501,7 @@ module OpenAI # Details on the tool outputs needed for this run to continue. sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Internal::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end @@ -525,7 +514,7 @@ module OpenAI end end - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be @@ -567,7 +556,7 @@ module OpenAI # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } @@ -584,7 +573,7 @@ module OpenAI end end - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel # Number of completion tokens used over the course of the run. sig { returns(Integer) } attr_accessor :completion_tokens diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi index 92c6ca3b..d82ac9df 100644 --- a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RunCancelParams < OpenAI::BaseModel + class RunCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -16,7 +16,7 @@ module OpenAI thread_id: String, request_options: T.any( OpenAI::RequestOptions, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index e7854e85..a34ebb8a 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RunCreateParams < OpenAI::BaseModel + class RunCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -185,7 +185,7 @@ module OpenAI sig do params( truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) ) ) .void @@ -198,7 +198,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)] ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), @@ -211,7 +211,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -221,14 +221,14 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -236,9 +236,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -312,7 +312,7 @@ module OpenAI def to_hash end - class AdditionalMessage < OpenAI::BaseModel + class AdditionalMessage < OpenAI::Internal::Type::BaseModel # The text contents of the message. sig do returns( @@ -363,7 +363,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -374,7 +374,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ), @@ -410,7 +410,7 @@ module OpenAI # The text contents of the message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -432,7 +432,7 @@ module OpenAI MessageContentPartParamArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], OpenAI::Internal::Type::Converter ) end @@ -444,7 +444,7 @@ module OpenAI # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } @@ -470,7 +470,7 @@ module OpenAI end end - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -498,7 +498,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -513,7 +513,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] @@ -541,9 +541,9 @@ module OpenAI end module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -574,14 +574,14 @@ module OpenAI # model associated with the assistant. If not, the model associated with the # assistant will be used. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } def self.variants end end - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be @@ -623,7 +623,7 @@ module OpenAI # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index da3c8bc7..17f72cc4 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RunListParams < OpenAI::BaseModel + class RunListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -50,7 +50,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -75,7 +75,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi index f976ec26..60f2a769 100644 --- a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RunRetrieveParams < OpenAI::BaseModel + class RunRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -16,7 +16,7 @@ module OpenAI thread_id: String, request_options: T.any( OpenAI::RequestOptions, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index 59df355d..e9d84d19 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -8,7 +8,7 @@ module OpenAI # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. module RunStatus - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 43e56a5e..1c8321a5 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RunSubmitToolOutputsParams < OpenAI::BaseModel + class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -18,13 +18,8 @@ module OpenAI sig do params( thread_id: String, - tool_outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, - OpenAI::Internal::Util::AnyHash - ) - ], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -44,7 +39,7 @@ module OpenAI def to_hash end - class ToolOutput < OpenAI::BaseModel + class ToolOutput < OpenAI::Internal::Type::BaseModel # The output of the tool call to be submitted to continue the run. sig { returns(T.nilable(String)) } attr_reader :output diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index d4946a6e..a1a1a651 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class RunUpdateParams < OpenAI::BaseModel + class RunUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -24,7 +24,7 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi index 76b660b5..20f4803a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class CodeInterpreterLogs < OpenAI::BaseModel + class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # The index of the output in the outputs array. sig { returns(Integer) } attr_accessor :index diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 3b1be064..bff183df 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class CodeInterpreterOutputImage < OpenAI::BaseModel + class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # The index of the output in the outputs array. sig { returns(Integer) } attr_accessor :index @@ -19,10 +19,7 @@ module OpenAI sig do params( - image: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, - OpenAI::Internal::Util::AnyHash - ) + image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Internal::AnyHash) ) .void end @@ -31,10 +28,7 @@ module OpenAI sig do params( index: Integer, - image: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, - OpenAI::Internal::Util::AnyHash - ), + image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -51,7 +45,7 @@ module OpenAI def to_hash end - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. sig { returns(T.nilable(String)) } diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index f0524366..dcf372d2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class CodeInterpreterToolCall < OpenAI::BaseModel + class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call. sig { returns(String) } attr_accessor :id @@ -18,7 +18,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -36,7 +36,7 @@ module OpenAI id: String, code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -58,7 +58,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # The input to the Code Interpreter tool call. sig { returns(String) } attr_accessor :input @@ -85,7 +85,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image ) ] @@ -114,9 +114,9 @@ module OpenAI # Text output from the Code Interpreter tool call as part of a run step. module Output - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Logs < OpenAI::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel # The text output from the Code Interpreter tool call. sig { returns(String) } attr_accessor :logs @@ -135,7 +135,7 @@ module OpenAI end end - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel sig do returns( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image @@ -147,7 +147,7 @@ module OpenAI params( image: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -162,7 +162,7 @@ module OpenAI params( image: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -183,7 +183,7 @@ module OpenAI def to_hash end - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. sig { returns(String) } diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 999ac980..a3b42808 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class CodeInterpreterToolCallDelta < OpenAI::BaseModel + class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # The index of the tool call in the tool calls array. sig { returns(Integer) } attr_accessor :index @@ -30,7 +30,7 @@ module OpenAI params( code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -44,7 +44,7 @@ module OpenAI id: String, code_interpreter: T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -67,7 +67,7 @@ module OpenAI def to_hash end - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel # The input to the Code Interpreter tool call. sig { returns(T.nilable(String)) } attr_reader :input @@ -97,7 +97,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -113,7 +113,7 @@ module OpenAI outputs: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] @@ -142,7 +142,7 @@ module OpenAI # Text output from the Code Interpreter tool call as part of a run step. module Output - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index cbbf08ee..bdd404fb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class FileSearchToolCall < OpenAI::BaseModel + class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call object. sig { returns(String) } attr_accessor :id @@ -16,10 +16,7 @@ module OpenAI sig do params( - file_search: T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, - OpenAI::Internal::Util::AnyHash - ) + file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Internal::AnyHash) ) .void end @@ -33,10 +30,7 @@ module OpenAI sig do params( id: String, - file_search: T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, - OpenAI::Internal::Util::AnyHash - ), + file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -53,7 +47,7 @@ module OpenAI def to_hash end - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel # The ranking options for the file search. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions)) } attr_reader :ranking_options @@ -62,7 +56,7 @@ module OpenAI params( ranking_options: T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -78,7 +72,7 @@ module OpenAI results: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -91,12 +85,12 @@ module OpenAI params( ranking_options: T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), results: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -117,7 +111,7 @@ module OpenAI def to_hash end - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. sig do @@ -158,7 +152,7 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } @@ -193,7 +187,7 @@ module OpenAI end end - class Result < OpenAI::BaseModel + class Result < OpenAI::Internal::Type::BaseModel # The ID of the file that result was found in. sig { returns(String) } attr_accessor :file_id @@ -221,7 +215,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -238,7 +232,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -261,7 +255,7 @@ module OpenAI def to_hash end - class Content < OpenAI::BaseModel + class Content < OpenAI::Internal::Type::BaseModel # The text content of the file. sig { returns(T.nilable(String)) } attr_reader :text @@ -311,7 +305,7 @@ module OpenAI # The type of the content. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 8dda0ce6..5474e165 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class FileSearchToolCallDelta < OpenAI::BaseModel + class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # For now, this is always going to be an empty object. sig { returns(T.anything) } attr_accessor :file_search diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index f4189ea3..2d1aa5a6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class FunctionToolCall < OpenAI::BaseModel + class FunctionToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call object. sig { returns(String) } attr_accessor :id @@ -16,7 +16,7 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::Util::AnyHash) + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::AnyHash) ) .void end @@ -30,7 +30,7 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::Util::AnyHash), + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -47,7 +47,7 @@ module OpenAI def to_hash end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The arguments passed to the function. sig { returns(String) } attr_accessor :arguments diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index b478742b..22b29632 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class FunctionToolCallDelta < OpenAI::BaseModel + class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # The index of the tool call in the tool calls array. sig { returns(Integer) } attr_accessor :index @@ -28,10 +28,7 @@ module OpenAI sig do params( - function: T.any( - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, - OpenAI::Internal::Util::AnyHash - ) + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Internal::AnyHash) ) .void end @@ -41,10 +38,7 @@ module OpenAI params( index: Integer, id: String, - function: T.any( - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, - OpenAI::Internal::Util::AnyHash - ), + function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -66,7 +60,7 @@ module OpenAI def to_hash end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The arguments passed to the function. sig { returns(T.nilable(String)) } attr_reader :arguments diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index 2f3d4925..f279cb12 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class MessageCreationStepDetails < OpenAI::BaseModel + class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation) } attr_reader :message_creation @@ -13,7 +13,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -29,7 +29,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -50,7 +50,7 @@ module OpenAI def to_hash end - class MessageCreation < OpenAI::BaseModel + class MessageCreation < OpenAI::Internal::Type::BaseModel # The ID of the message that was created by this run step. sig { returns(String) } attr_accessor :message_id diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index c068ad2f..af9b15a8 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class RunStep < OpenAI::BaseModel + class RunStep < OpenAI::Internal::Type::BaseModel # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -44,7 +44,7 @@ module OpenAI sig do params( - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::Util::AnyHash)) + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::AnyHash)) ) .void end @@ -100,7 +100,7 @@ module OpenAI sig do params( - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::Util::AnyHash)) + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::AnyHash)) ) .void end @@ -116,18 +116,18 @@ module OpenAI created_at: Integer, expired_at: T.nilable(Integer), failed_at: T.nilable(Integer), - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::Util::AnyHash)), + last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::OrSymbol, step_details: T.any( OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails ), thread_id: String, type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::OrSymbol, - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::Util::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -181,7 +181,7 @@ module OpenAI def to_hash end - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel # One of `server_error` or `rate_limit_exceeded`. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } attr_accessor :code @@ -210,7 +210,7 @@ module OpenAI # One of `server_error` or `rate_limit_exceeded`. module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } @@ -231,7 +231,7 @@ module OpenAI # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } OrSymbol = @@ -250,7 +250,7 @@ module OpenAI # The details of the run step. module StepDetails - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -264,7 +264,7 @@ module OpenAI # The type of run step, which can be either `message_creation` or `tool_calls`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } OrSymbol = @@ -279,7 +279,7 @@ module OpenAI end end - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel # Number of completion tokens used over the course of the run step. sig { returns(Integer) } attr_accessor :completion_tokens diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index fa423211..a599ca89 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class RunStepDelta < OpenAI::BaseModel + class RunStepDelta < OpenAI::Internal::Type::BaseModel # The details of the run step. sig do returns( @@ -23,7 +23,7 @@ module OpenAI params( step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) @@ -36,7 +36,7 @@ module OpenAI params( step_details: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject ) ) @@ -61,7 +61,7 @@ module OpenAI # The details of the run step. module StepDetails - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index 1369d183..fbaa33df 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class RunStepDeltaEvent < OpenAI::BaseModel + class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -14,10 +14,7 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDelta) } attr_reader :delta - sig do - params(delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::Util::AnyHash)) - .void - end + sig { params(delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::AnyHash)).void } attr_writer :delta # The object type, which is always `thread.run.step.delta`. @@ -29,7 +26,7 @@ module OpenAI sig do params( id: String, - delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::Util::AnyHash), + delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index 3408ca06..d4dd14e5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class RunStepDeltaMessageDelta < OpenAI::BaseModel + class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # Always `message_creation`. sig { returns(Symbol) } attr_accessor :type @@ -17,7 +17,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -29,7 +29,7 @@ module OpenAI params( message_creation: T.any( OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -50,7 +50,7 @@ module OpenAI def to_hash end - class MessageCreation < OpenAI::BaseModel + class MessageCreation < OpenAI::Internal::Type::BaseModel # The ID of the message that was created by this run step. sig { returns(T.nilable(String)) } attr_reader :message_id diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 8741e9f7..bee1919e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -6,7 +6,7 @@ module OpenAI module Threads module Runs module RunStepInclude - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 6b012ab4..e48d41bb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class StepListParams < OpenAI::BaseModel + class StepListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -69,7 +69,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -104,7 +104,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index fd32d2e8..bd43d059 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class StepRetrieveParams < OpenAI::BaseModel + class StepRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -33,7 +33,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 22b596d5..8bdd948f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -7,7 +7,7 @@ module OpenAI module Runs # Details of the Code Interpreter tool call the run step was involved in. module ToolCall - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index 8af6cb90..a1e18b64 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -7,7 +7,7 @@ module OpenAI module Runs # Details of the Code Interpreter tool call the run step was involved in. module ToolCallDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 783946aa..d9051b8e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class ToolCallDeltaObject < OpenAI::BaseModel + class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # Always `tool_calls`. sig { returns(Symbol) } attr_accessor :type @@ -33,7 +33,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) @@ -49,7 +49,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta ) diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 3b5ca709..b85467c4 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads module Runs - class ToolCallsStepDetails < OpenAI::BaseModel + class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. @@ -32,7 +32,7 @@ module OpenAI tool_calls: T::Array[ T.any( OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall ) diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 96e9162b..933e4630 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class Text < OpenAI::BaseModel + class Text < OpenAI::Internal::Type::BaseModel sig do returns( T::Array[ @@ -26,7 +26,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::FilePathAnnotation ) ], diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 2293760e..3838ff04 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -4,11 +4,11 @@ module OpenAI module Models module Beta module Threads - class TextContentBlock < OpenAI::BaseModel + class TextContentBlock < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Beta::Threads::Text) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::AnyHash)).void } attr_writer :text # Always `text`. @@ -17,7 +17,7 @@ module OpenAI # The text content that is part of a message. sig do - params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::Util::AnyHash), type: Symbol) + params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(text:, type: :text) diff --git a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi index f59cbbd5..94f3431d 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class TextContentBlockParam < OpenAI::BaseModel + class TextContentBlockParam < OpenAI::Internal::Type::BaseModel # Text content to be sent to the model sig { returns(String) } attr_accessor :text diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index a5aea637..8fda3e45 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class TextDelta < OpenAI::BaseModel + class TextDelta < OpenAI::Internal::Type::BaseModel sig do returns( T.nilable( @@ -24,7 +24,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ] @@ -45,7 +45,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation ) ], diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index 8d353d81..d3d54de5 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Beta module Threads - class TextDeltaBlock < OpenAI::BaseModel + class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # The index of the content part in the message. sig { returns(Integer) } attr_accessor :index @@ -16,14 +16,14 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Beta::Threads::TextDelta)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::AnyHash)).void } attr_writer :text # The text content that is part of a message. sig do params( index: Integer, - text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::Util::AnyHash), + text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 7086df0f..829cff8c 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletion < OpenAI::BaseModel + class ChatCompletion < OpenAI::Internal::Type::BaseModel # A unique identifier for the chat completion. sig { returns(String) } attr_accessor :id @@ -43,7 +43,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash)).void } attr_writer :usage # Represents a chat completion response returned by model, based on the provided @@ -51,12 +51,12 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Internal::Util::AnyHash)], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Internal::AnyHash)], created: Integer, model: String, service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::OrSymbol), system_fingerprint: String, - usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash), + usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -91,7 +91,7 @@ module OpenAI def to_hash end - class Choice < OpenAI::BaseModel + class Choice < OpenAI::Internal::Type::BaseModel # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if @@ -111,7 +111,7 @@ module OpenAI sig do params( - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::Util::AnyHash)) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::AnyHash)) ) .void end @@ -121,15 +121,15 @@ module OpenAI sig { returns(OpenAI::Models::Chat::ChatCompletionMessage) } attr_reader :message - sig { params(message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::Util::AnyHash)).void } + sig { params(message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::AnyHash)).void } attr_writer :message sig do params( finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::OrSymbol, index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::Util::AnyHash)), - message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::Util::AnyHash) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::AnyHash)), + message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -157,7 +157,7 @@ module OpenAI # model called a tool, or `function_call` (deprecated) if the model called a # function. module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } OrSymbol = @@ -176,7 +176,7 @@ module OpenAI end end - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel # A list of message content tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } attr_accessor :content @@ -188,12 +188,8 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable( - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] - ), - refusal: T.nilable( - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] - ) + content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]), + refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]) ) .returns(T.attached_class) end @@ -216,7 +212,7 @@ module OpenAI # The service tier used for processing the request. module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } OrSymbol = diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 40188371..a6f8cec6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionAssistantMessageParam < OpenAI::BaseModel + class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # The role of the messages author, in this case `assistant`. sig { returns(Symbol) } attr_accessor :role @@ -16,7 +16,7 @@ module OpenAI sig do params( audio: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::AnyHash) ) ) .void @@ -50,10 +50,7 @@ module OpenAI sig do params( function_call: T.nilable( - T.any( - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, - OpenAI::Internal::Util::AnyHash - ) + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Internal::AnyHash) ) ) .void @@ -78,7 +75,7 @@ module OpenAI sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)] ) .void end @@ -88,7 +85,7 @@ module OpenAI sig do params( audio: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::AnyHash) ), content: T.nilable( T.any( @@ -96,21 +93,18 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartRefusal ) ] ) ), function_call: T.nilable( - T.any( - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, - OpenAI::Internal::Util::AnyHash - ) + T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Internal::AnyHash) ), name: String, refusal: T.nilable(String), - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)], + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)], role: Symbol ) .returns(T.attached_class) @@ -153,7 +147,7 @@ module OpenAI def to_hash end - class Audio < OpenAI::BaseModel + class Audio < OpenAI::Internal::Type::BaseModel # Unique identifier for a previous audio response from the model. sig { returns(String) } attr_accessor :id @@ -172,12 +166,12 @@ module OpenAI # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ArrayOfContentPart - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -208,12 +202,12 @@ module OpenAI ArrayOfContentPartArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], OpenAI::Internal::Type::Converter ) end - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index 7cd9cc72..fa10bf84 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionAudio < OpenAI::BaseModel + class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # Unique identifier for this audio response. sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 938f0366..29b450f1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionAudioParam < OpenAI::BaseModel + class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } @@ -42,7 +42,7 @@ module OpenAI # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } OrSymbol = @@ -62,7 +62,7 @@ module OpenAI # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. module Voice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } def self.variants diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 719f97f8..d93d792a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionChunk < OpenAI::BaseModel + class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # A unique identifier for the chat completion. Each chunk has the same ID. sig { returns(String) } attr_accessor :id @@ -50,7 +50,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash))).void } + sig { params(usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash))).void } attr_writer :usage # Represents a streamed chunk of a chat completion response returned by the model, @@ -59,12 +59,12 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Internal::Util::AnyHash)], + choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Internal::AnyHash)], created: Integer, model: String, service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::OrSymbol), system_fingerprint: String, - usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash)), + usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -99,15 +99,13 @@ module OpenAI def to_hash end - class Choice < OpenAI::BaseModel + class Choice < OpenAI::Internal::Type::BaseModel # A chat completion delta generated by streamed model responses. sig { returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) } attr_reader :delta sig do - params( - delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::Util::AnyHash) - ) + params(delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::AnyHash)) .void end attr_writer :delta @@ -131,9 +129,7 @@ module OpenAI sig do params( - logprobs: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::Util::AnyHash) - ) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::AnyHash)) ) .void end @@ -141,12 +137,10 @@ module OpenAI sig do params( - delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::Util::AnyHash), + delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::AnyHash), finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::OrSymbol), index: Integer, - logprobs: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::Util::AnyHash) - ) + logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::AnyHash)) ) .returns(T.attached_class) end @@ -167,7 +161,7 @@ module OpenAI def to_hash end - class Delta < OpenAI::BaseModel + class Delta < OpenAI::Internal::Type::BaseModel # The contents of the chunk message. sig { returns(T.nilable(String)) } attr_accessor :content @@ -179,10 +173,7 @@ module OpenAI sig do params( - function_call: T.any( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, - OpenAI::Internal::Util::AnyHash - ) + function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Internal::AnyHash) ) .void end @@ -204,7 +195,7 @@ module OpenAI sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::AnyHash)] ) .void end @@ -214,13 +205,10 @@ module OpenAI sig do params( content: T.nilable(String), - function_call: T.any( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, - OpenAI::Internal::Util::AnyHash - ), + function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Internal::AnyHash), refusal: T.nilable(String), role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol, - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end @@ -242,7 +230,7 @@ module OpenAI def to_hash end - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -273,7 +261,7 @@ module OpenAI # The role of the author of this message. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } @@ -293,7 +281,7 @@ module OpenAI end end - class ToolCall < OpenAI::BaseModel + class ToolCall < OpenAI::Internal::Type::BaseModel sig { returns(Integer) } attr_accessor :index @@ -311,7 +299,7 @@ module OpenAI params( function: T.any( OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -331,7 +319,7 @@ module OpenAI id: String, function: T.any( OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol ) @@ -354,7 +342,7 @@ module OpenAI def to_hash end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -383,7 +371,7 @@ module OpenAI # The type of the tool. Currently, only `function` is supported. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } @@ -416,7 +404,7 @@ module OpenAI # model called a tool, or `function_call` (deprecated) if the model called a # function. module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } @@ -437,7 +425,7 @@ module OpenAI end end - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel # A list of message content tokens with log probability information. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } attr_accessor :content @@ -449,12 +437,8 @@ module OpenAI # Log probability information for the choice. sig do params( - content: T.nilable( - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] - ), - refusal: T.nilable( - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::Util::AnyHash)] - ) + content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]), + refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]) ) .returns(T.attached_class) end @@ -477,7 +461,7 @@ module OpenAI # The service tier used for processing the request. module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } OrSymbol = diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index df2d9e47..f53e1e23 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -6,15 +6,15 @@ module OpenAI # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ChatCompletionContentPart - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) } attr_reader :file sig do params( - file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::Util::AnyHash) + file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::AnyHash) ) .void end @@ -28,7 +28,7 @@ module OpenAI # generation. sig do params( - file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::Util::AnyHash), + file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -40,7 +40,7 @@ module OpenAI def to_hash end - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel # The base64 encoded file data, used when passing the file to the model as a # string. sig { returns(T.nilable(String)) } diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 4ba1cace..3bb8586b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -3,13 +3,13 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartImage < OpenAI::BaseModel + class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) } attr_reader :image_url sig do params( - image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::Util::AnyHash) + image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::AnyHash) ) .void end @@ -22,7 +22,7 @@ module OpenAI # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). sig do params( - image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::Util::AnyHash), + image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -36,7 +36,7 @@ module OpenAI def to_hash end - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel # Either a URL of the image or the base64 encoded image data. sig { returns(String) } attr_accessor :url @@ -71,7 +71,7 @@ module OpenAI # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 61ce5857..19fda6e5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -3,16 +3,13 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartInputAudio < OpenAI::BaseModel + class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) } attr_reader :input_audio sig do params( - input_audio: T.any( - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, - OpenAI::Internal::Util::AnyHash - ) + input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Internal::AnyHash) ) .void end @@ -25,10 +22,7 @@ module OpenAI # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). sig do params( - input_audio: T.any( - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, - OpenAI::Internal::Util::AnyHash - ), + input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -45,7 +39,7 @@ module OpenAI def to_hash end - class InputAudio < OpenAI::BaseModel + class InputAudio < OpenAI::Internal::Type::BaseModel # Base64 encoded audio data. sig { returns(String) } attr_accessor :data @@ -78,7 +72,7 @@ module OpenAI # The format of the encoded audio data. Currently supports "wav" and "mp3". module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi index 788cfecf..7431214c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartRefusal < OpenAI::BaseModel + class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel # The refusal message generated by the model. sig { returns(String) } attr_accessor :refusal diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index 1cb1a46c..d9189ad1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionContentPartText < OpenAI::BaseModel + class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # The text content. sig { returns(String) } attr_accessor :text diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi index 9affbd35..617a2ea5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionDeleted < OpenAI::BaseModel + class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel # The ID of the chat completion that was deleted. sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index ebf50ef3..a026c7f1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel + class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the developer message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } attr_accessor :content @@ -27,7 +27,7 @@ module OpenAI params( content: T.any( String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] ), name: String, role: Symbol @@ -52,7 +52,7 @@ module OpenAI # The contents of the developer message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants @@ -60,7 +60,7 @@ module OpenAI ChatCompletionContentPartTextArray = T.let( - OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index 11ce4c45..45e13985 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionFunctionCallOption < OpenAI::BaseModel + class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # The name of the function to call. sig { returns(String) } attr_accessor :name diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi index 43ae6ea4..41e66779 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionFunctionMessageParam < OpenAI::BaseModel + class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the function message. sig { returns(T.nilable(String)) } attr_accessor :content diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 10aee914..922e88f6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionMessage < OpenAI::BaseModel + class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # The contents of the message. sig { returns(T.nilable(String)) } attr_accessor :content @@ -23,7 +23,7 @@ module OpenAI sig do params( - annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::Util::AnyHash)] + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::AnyHash)] ) .void end @@ -35,12 +35,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) } attr_reader :audio - sig do - params( - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::Util::AnyHash)) - ) - .void - end + sig { params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::AnyHash))).void } attr_writer :audio # Deprecated and replaced by `tool_calls`. The name and arguments of a function @@ -50,7 +45,7 @@ module OpenAI sig do params( - function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::Util::AnyHash) + function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::AnyHash) ) .void end @@ -62,7 +57,7 @@ module OpenAI sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)] + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)] ) .void end @@ -73,10 +68,10 @@ module OpenAI params( content: T.nilable(String), refusal: T.nilable(String), - annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::Util::AnyHash)], - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::Util::AnyHash)), - function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::Util::AnyHash), - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::Util::AnyHash)], + annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::AnyHash)], + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::AnyHash)), + function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::AnyHash), + tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)], role: Symbol ) .returns(T.attached_class) @@ -109,7 +104,7 @@ module OpenAI def to_hash end - class Annotation < OpenAI::BaseModel + class Annotation < OpenAI::Internal::Type::BaseModel # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } attr_accessor :type @@ -120,10 +115,7 @@ module OpenAI sig do params( - url_citation: T.any( - OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, - OpenAI::Internal::Util::AnyHash - ) + url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Internal::AnyHash) ) .void end @@ -132,10 +124,7 @@ module OpenAI # A URL citation when using web search. sig do params( - url_citation: T.any( - OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, - OpenAI::Internal::Util::AnyHash - ), + url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -152,7 +141,7 @@ module OpenAI def to_hash end - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel # The index of the last character of the URL citation in the message. sig { returns(Integer) } attr_accessor :end_index @@ -187,7 +176,7 @@ module OpenAI end end - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index a5792ca5..1a4a2cc0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -7,7 +7,7 @@ module OpenAI # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. module ChatCompletionMessageParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index 496d91cb..4104c3ca 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionMessageToolCall < OpenAI::BaseModel + class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call. sig { returns(String) } attr_accessor :id @@ -14,7 +14,7 @@ module OpenAI sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::Util::AnyHash) + function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::Util::AnyHash), + function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -44,7 +44,7 @@ module OpenAI def to_hash end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 785b8db7..5e04a7e7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat module ChatCompletionModality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } OrSymbol = diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 6a992cda..566f6c99 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -3,13 +3,13 @@ module OpenAI module Models module Chat - class ChatCompletionNamedToolChoice < OpenAI::BaseModel + class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) } attr_reader :function sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::Util::AnyHash) + function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::AnyHash) ) .void end @@ -23,7 +23,7 @@ module OpenAI # function. sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::Util::AnyHash), + function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -35,7 +35,7 @@ module OpenAI def to_hash end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The name of the function to call. sig { returns(String) } attr_accessor :name diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index ac3a3840..674775a6 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionPredictionContent < OpenAI::BaseModel + class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. @@ -21,7 +21,7 @@ module OpenAI params( content: T.any( String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] ), type: Symbol ) @@ -43,7 +43,7 @@ module OpenAI # generated tokens would match this content, the entire model response can be # returned much more quickly. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants @@ -51,7 +51,7 @@ module OpenAI ChatCompletionContentPartTextArray = T.let( - OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index 61b9c3a5..e75ae72c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -5,7 +5,7 @@ module OpenAI module Chat # The role of the author of a message module ChatCompletionRole - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index bb0ee0d9..26683cfa 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionStreamOptions < OpenAI::BaseModel + class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire # request, and the `choices` field will always be an empty array. diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 6ef22108..bb766e74 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionSystemMessageParam < OpenAI::BaseModel + class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the system message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } attr_accessor :content @@ -27,7 +27,7 @@ module OpenAI params( content: T.any( String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] ), name: String, role: Symbol @@ -52,7 +52,7 @@ module OpenAI # The contents of the system message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants @@ -60,7 +60,7 @@ module OpenAI ChatCompletionContentPartTextArray = T.let( - OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 131c3a45..17cc3821 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionTokenLogprob < OpenAI::BaseModel + class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # The token. sig { returns(String) } attr_accessor :token @@ -32,7 +32,7 @@ module OpenAI token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float, - top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Internal::Util::AnyHash)] + top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end @@ -53,7 +53,7 @@ module OpenAI def to_hash end - class TopLogprob < OpenAI::BaseModel + class TopLogprob < OpenAI::Internal::Type::BaseModel # The token. sig { returns(String) } attr_accessor :token diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index dd162d31..e35bbd32 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -3,11 +3,11 @@ module OpenAI module Models module Chat - class ChatCompletionTool < OpenAI::BaseModel + class ChatCompletionTool < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::FunctionDefinition) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash)).void } + sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash)).void } attr_writer :function # The type of the tool. Currently, only `function` is supported. @@ -15,7 +15,7 @@ module OpenAI attr_accessor :type sig do - params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::Util::AnyHash), type: Symbol) + params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(function:, type: :function) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index 48048594..b739c519 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -13,13 +13,13 @@ module OpenAI # `none` is the default when no tools are present. `auto` is the default if tools # are present. module ChatCompletionToolChoiceOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. module Auto - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } OrSymbol = diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index e49bdc4a..3c34a0a4 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionToolMessageParam < OpenAI::BaseModel + class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the tool message. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } attr_accessor :content @@ -20,7 +20,7 @@ module OpenAI params( content: T.any( String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] ), tool_call_id: String, role: Symbol @@ -45,7 +45,7 @@ module OpenAI # The contents of the tool message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } def self.variants @@ -53,7 +53,7 @@ module OpenAI ChatCompletionContentPartTextArray = T.let( - OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 73d892eb..f4a11939 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class ChatCompletionUserMessageParam < OpenAI::BaseModel + class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the user message. sig do returns( @@ -43,7 +43,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File @@ -83,7 +83,7 @@ module OpenAI # The contents of the user message. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -106,7 +106,7 @@ module OpenAI ChatCompletionContentPartArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 1d25cd61..cdbe6807 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class CompletionCreateParams < OpenAI::BaseModel + class CompletionCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -44,9 +44,7 @@ module OpenAI attr_reader :audio sig do - params( - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)) - ) + params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash))) .void end attr_writer :audio @@ -88,7 +86,7 @@ module OpenAI function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -103,7 +101,7 @@ module OpenAI sig do params( - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)] + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)] ) .void end @@ -186,7 +184,7 @@ module OpenAI sig do params( - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)) + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)) ) .void end @@ -234,7 +232,7 @@ module OpenAI params( response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ) @@ -285,7 +283,7 @@ module OpenAI sig do params( - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)) + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)) ) .void end @@ -324,7 +322,7 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -337,10 +335,7 @@ module OpenAI sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTool])) } attr_reader :tools - sig do - params(tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)]) - .void - end + sig { params(tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)]).void } attr_writer :tools # An integer between 0 and 20 specifying the number of most likely tokens to @@ -374,7 +369,7 @@ module OpenAI sig do params( - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash) + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash) ) .void end @@ -385,7 +380,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -394,14 +389,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -410,12 +405,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -423,19 +418,19 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -540,7 +535,7 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } def self.variants @@ -562,13 +557,13 @@ module OpenAI # `none` is the default when no functions are present. `auto` is the default if # functions are present. module FunctionCall - extend OpenAI::Union + extend OpenAI::Internal::Type::Union # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. module FunctionCallMode - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } @@ -606,7 +601,7 @@ module OpenAI end end - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. sig { returns(String) } @@ -648,7 +643,7 @@ module OpenAI end module Modality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } OrSymbol = @@ -673,7 +668,7 @@ module OpenAI # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. module ResponseFormat - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -700,7 +695,7 @@ module OpenAI # When this parameter is set, the response body will include the `service_tier` # utilized. module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } OrSymbol = @@ -717,16 +712,16 @@ module OpenAI # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String]]) } def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) end - class WebSearchOptions < OpenAI::BaseModel + class WebSearchOptions < OpenAI::Internal::Type::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. sig do @@ -753,7 +748,7 @@ module OpenAI user_location: T.nilable( T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) ) @@ -770,7 +765,7 @@ module OpenAI user_location: T.nilable( T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) ) @@ -794,7 +789,7 @@ module OpenAI # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. module SearchContextSize - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } @@ -833,7 +828,7 @@ module OpenAI end end - class UserLocation < OpenAI::BaseModel + class UserLocation < OpenAI::Internal::Type::BaseModel # Approximate location parameters for the search. sig { returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate) } attr_reader :approximate @@ -842,7 +837,7 @@ module OpenAI params( approximate: T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -858,7 +853,7 @@ module OpenAI params( approximate: T.any( OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), type: Symbol ) @@ -879,7 +874,7 @@ module OpenAI def to_hash end - class Approximate < OpenAI::BaseModel + class Approximate < OpenAI::Internal::Type::BaseModel # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } attr_reader :city diff --git a/rbi/lib/openai/models/chat/completion_delete_params.rbi b/rbi/lib/openai/models/chat/completion_delete_params.rbi index b03964f8..e92e61fa 100644 --- a/rbi/lib/openai/models/chat/completion_delete_params.rbi +++ b/rbi/lib/openai/models/chat/completion_delete_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Chat - class CompletionDeleteParams < OpenAI::BaseModel + class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index a9fb2282..1759bb11 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class CompletionListParams < OpenAI::BaseModel + class CompletionListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -75,7 +75,7 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi index ec5ec147..921f208a 100644 --- a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Chat - class CompletionRetrieveParams < OpenAI::BaseModel + class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index d895f848..7a044c5b 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Chat - class CompletionUpdateParams < OpenAI::BaseModel + class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -19,7 +19,7 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 5415b3ca..4f3218d7 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat module Completions - class MessageListParams < OpenAI::BaseModel + class MessageListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -35,7 +35,7 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -59,7 +59,7 @@ module OpenAI # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 4494fe86..17dc0d14 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module ChatModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ChatModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 3549a566..1b06ab72 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ComparisonFilter < OpenAI::BaseModel + class ComparisonFilter < OpenAI::Internal::Type::BaseModel # The key to compare against the value. sig { returns(String) } attr_accessor :key @@ -54,7 +54,7 @@ module OpenAI # - `lt`: less than # - `lte`: less than or equal module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } @@ -74,7 +74,7 @@ module OpenAI # The value to compare against the attribute key; supports string, number, or # boolean types. module Value - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index cb4efb9a..98496acd 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Completion < OpenAI::BaseModel + class Completion < OpenAI::Internal::Type::BaseModel # A unique identifier for the completion. sig { returns(String) } attr_accessor :id @@ -37,7 +37,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash)).void } attr_writer :usage # Represents a completion response from the API. Note: both the streamed and @@ -45,11 +45,11 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Internal::Util::AnyHash)], + choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Internal::AnyHash)], created: Integer, model: String, system_fingerprint: String, - usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::Util::AnyHash), + usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 516615be..6cd7e70d 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class CompletionChoice < OpenAI::BaseModel + class CompletionChoice < OpenAI::Internal::Type::BaseModel # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if @@ -17,9 +17,7 @@ module OpenAI attr_reader :logprobs sig do - params( - logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::Util::AnyHash)) - ) + params(logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::AnyHash))) .void end attr_writer :logprobs @@ -31,7 +29,7 @@ module OpenAI params( finish_reason: OpenAI::Models::CompletionChoice::FinishReason::OrSymbol, index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::Util::AnyHash)), + logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::AnyHash)), text: String ) .returns(T.attached_class) @@ -58,7 +56,7 @@ module OpenAI # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } OrSymbol = @@ -73,7 +71,7 @@ module OpenAI end end - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(T::Array[Integer])) } attr_reader :text_offset diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index b71c3965..26802234 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class CompletionCreateParams < OpenAI::BaseModel + class CompletionCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -119,7 +119,7 @@ module OpenAI sig do params( - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)) + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)) ) .void end @@ -177,12 +177,12 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -249,7 +249,7 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } def self.variants @@ -272,32 +272,33 @@ module OpenAI # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. module Prompt - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) - IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Internal::Type::Converter) + IntegerArray = T.let(OpenAI::Internal::Type::ArrayOf[Integer], OpenAI::Internal::Type::Converter) - ArrayOfToken2DArray = T.let( - OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], - OpenAI::Internal::Type::Converter - ) + ArrayOfToken2DArray = + T.let( + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]], + OpenAI::Internal::Type::Converter + ) end # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String]]) } def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) end end end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index f059d80b..fb3951e4 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class CompletionUsage < OpenAI::BaseModel + class CompletionUsage < OpenAI::Internal::Type::BaseModel # Number of tokens in the generated completion. sig { returns(Integer) } attr_accessor :completion_tokens @@ -21,7 +21,7 @@ module OpenAI sig do params( - completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::Util::AnyHash) + completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::AnyHash) ) .void end @@ -33,7 +33,7 @@ module OpenAI sig do params( - prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::Util::AnyHash) + prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::AnyHash) ) .void end @@ -45,8 +45,8 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::Util::AnyHash), - prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::Util::AnyHash) + completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::AnyHash), + prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -74,7 +74,7 @@ module OpenAI def to_hash end - class CompletionTokensDetails < OpenAI::BaseModel + class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. sig { returns(T.nilable(Integer)) } @@ -140,7 +140,7 @@ module OpenAI end end - class PromptTokensDetails < OpenAI::BaseModel + class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # Audio input tokens present in the prompt. sig { returns(T.nilable(Integer)) } attr_reader :audio_tokens diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 7377b320..056c00fe 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class CompoundFilter < OpenAI::BaseModel + class CompoundFilter < OpenAI::Internal::Type::BaseModel # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. sig { returns(T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) } @@ -15,7 +15,7 @@ module OpenAI # Combine multiple filters using `and` or `or`. sig do params( - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, T.anything)], + filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, T.anything)], type: OpenAI::Models::CompoundFilter::Type::OrSymbol ) .returns(T.attached_class) @@ -38,7 +38,7 @@ module OpenAI # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. module Filter - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } def self.variants @@ -47,7 +47,7 @@ module OpenAI # Type of operation: `and` or `or`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index d970b614..e76ac6c3 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class CreateEmbeddingResponse < OpenAI::BaseModel + class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # The list of embeddings generated by the model. sig { returns(T::Array[OpenAI::Models::Embedding]) } attr_accessor :data @@ -19,14 +19,14 @@ module OpenAI sig { returns(OpenAI::Models::CreateEmbeddingResponse::Usage) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::AnyHash)).void } attr_writer :usage sig do params( - data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Internal::Util::AnyHash)], + data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Internal::AnyHash)], model: String, - usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::Util::AnyHash), + usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -48,7 +48,7 @@ module OpenAI def to_hash end - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel # The number of tokens used by the prompt. sig { returns(Integer) } attr_accessor :prompt_tokens diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/lib/openai/models/embedding.rbi index 90ad0f45..1f6e9020 100644 --- a/rbi/lib/openai/models/embedding.rbi +++ b/rbi/lib/openai/models/embedding.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Embedding < OpenAI::BaseModel + class Embedding < OpenAI::Internal::Type::BaseModel # The embedding vector, which is a list of floats. The length of vector depends on # the model as listed in the # [embedding guide](https://platform.openai.com/docs/guides/embeddings). diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 47c96aeb..5d8acf9b 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class EmbeddingCreateParams < OpenAI::BaseModel + class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -57,7 +57,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -89,20 +89,21 @@ module OpenAI # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) - IntegerArray = T.let(OpenAI::ArrayOf[Integer], OpenAI::Internal::Type::Converter) + IntegerArray = T.let(OpenAI::Internal::Type::ArrayOf[Integer], OpenAI::Internal::Type::Converter) - ArrayOfToken2DArray = T.let( - OpenAI::ArrayOf[OpenAI::ArrayOf[Integer]], - OpenAI::Internal::Type::Converter - ) + ArrayOfToken2DArray = + T.let( + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]], + OpenAI::Internal::Type::Converter + ) end # ID of the model to use. You can use the @@ -111,7 +112,7 @@ module OpenAI # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } def self.variants @@ -121,7 +122,7 @@ module OpenAI # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). module EncodingFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } OrSymbol = diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 94781b12..fb2fd9ed 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module EmbeddingModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/error_object.rbi b/rbi/lib/openai/models/error_object.rbi index 134b6d66..dfea9b56 100644 --- a/rbi/lib/openai/models/error_object.rbi +++ b/rbi/lib/openai/models/error_object.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ErrorObject < OpenAI::BaseModel + class ErrorObject < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(String)) } attr_accessor :code diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index 4e9e85c9..8e896180 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -4,7 +4,7 @@ module OpenAI module Models # The strategy used to chunk the file. module FileChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 17f0755f..9f69a517 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -5,7 +5,7 @@ module OpenAI # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. module FileChunkingStrategyParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/file_content_params.rbi b/rbi/lib/openai/models/file_content_params.rbi index 773bdb83..f753c7d2 100644 --- a/rbi/lib/openai/models/file_content_params.rbi +++ b/rbi/lib/openai/models/file_content_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class FileContentParams < OpenAI::BaseModel + class FileContentParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index f665b3dc..c2a7e47f 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class FileCreateParams < OpenAI::BaseModel + class FileCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -21,7 +21,7 @@ module OpenAI params( file: T.any(IO, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/file_delete_params.rbi b/rbi/lib/openai/models/file_delete_params.rbi index 354f6382..af5aa65f 100644 --- a/rbi/lib/openai/models/file_delete_params.rbi +++ b/rbi/lib/openai/models/file_delete_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class FileDeleteParams < OpenAI::BaseModel + class FileDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/file_deleted.rbi b/rbi/lib/openai/models/file_deleted.rbi index fd6fac2b..6b362f3b 100644 --- a/rbi/lib/openai/models/file_deleted.rbi +++ b/rbi/lib/openai/models/file_deleted.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class FileDeleted < OpenAI::BaseModel + class FileDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index c1cc0765..035e32d3 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class FileListParams < OpenAI::BaseModel + class FileListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -45,7 +45,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -70,7 +70,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileListParams::Order::TaggedSymbol) } diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index e8452219..c7f26a14 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class FileObject < OpenAI::BaseModel + class FileObject < OpenAI::Internal::Type::BaseModel # The file identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -100,7 +100,7 @@ module OpenAI # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. module Purpose - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } @@ -121,7 +121,7 @@ module OpenAI # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Status::TaggedSymbol) } diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 1f7bb4f9..5514076c 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -7,7 +7,7 @@ module OpenAI # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets module FilePurpose - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FilePurpose::TaggedSymbol) } diff --git a/rbi/lib/openai/models/file_retrieve_params.rbi b/rbi/lib/openai/models/file_retrieve_params.rbi index d34e1631..25676e12 100644 --- a/rbi/lib/openai/models/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/file_retrieve_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class FileRetrieveParams < OpenAI::BaseModel + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index c6541e22..810bf8fd 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class FineTuningJob < OpenAI::BaseModel + class FineTuningJob < OpenAI::Internal::Type::BaseModel # The object identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -19,7 +19,7 @@ module OpenAI sig do params( - error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::Util::AnyHash)) + error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::AnyHash)) ) .void end @@ -42,7 +42,7 @@ module OpenAI sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::Util::AnyHash) + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::AnyHash) ) .void end @@ -113,10 +113,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method)) } attr_reader :method_ - sig do - params(method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::Util::AnyHash)) - .void - end + sig { params(method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::AnyHash)).void } attr_writer :method_ # The `fine_tuning.job` object represents a fine-tuning job that has been created @@ -125,10 +122,10 @@ module OpenAI params( id: String, created_at: Integer, - error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::Util::AnyHash)), + error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::AnyHash)), fine_tuned_model: T.nilable(String), finished_at: T.nilable(Integer), - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::Util::AnyHash), + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::AnyHash), model: String, organization_id: String, result_files: T::Array[String], @@ -139,10 +136,10 @@ module OpenAI validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::Util::AnyHash), + method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::AnyHash), object: Symbol ) .returns(T.attached_class) @@ -199,7 +196,7 @@ module OpenAI def to_hash end - class Error < OpenAI::BaseModel + class Error < OpenAI::Internal::Type::BaseModel # A machine-readable error code. sig { returns(String) } attr_accessor :code @@ -224,7 +221,7 @@ module OpenAI end end - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -278,7 +275,7 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -288,7 +285,7 @@ module OpenAI # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -298,7 +295,7 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -309,7 +306,7 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } OrSymbol = @@ -328,17 +325,12 @@ module OpenAI end end - class Method < OpenAI::BaseModel + class Method < OpenAI::Internal::Type::BaseModel # Configuration for the DPO fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo)) } attr_reader :dpo - sig do - params( - dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::AnyHash)).void } attr_writer :dpo # Configuration for the supervised fine-tuning method. @@ -347,7 +339,7 @@ module OpenAI sig do params( - supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::Util::AnyHash) + supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::AnyHash) ) .void end @@ -363,8 +355,8 @@ module OpenAI # The method used for fine-tuning. sig do params( - dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::Util::AnyHash), - supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::Util::AnyHash), + dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::AnyHash), + supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::AnyHash), type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol ) .returns(T.attached_class) @@ -385,17 +377,14 @@ module OpenAI def to_hash end - class Dpo < OpenAI::BaseModel + class Dpo < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters)) } attr_reader :hyperparameters sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, - OpenAI::Internal::Util::AnyHash - ) + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Internal::AnyHash) ) .void end @@ -404,10 +393,7 @@ module OpenAI # Configuration for the DPO fine-tuning method. sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, - OpenAI::Internal::Util::AnyHash - ) + hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -421,7 +407,7 @@ module OpenAI def to_hash end - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -484,7 +470,7 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -494,7 +480,7 @@ module OpenAI # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. module Beta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -504,7 +490,7 @@ module OpenAI # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -514,7 +500,7 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -523,7 +509,7 @@ module OpenAI end end - class Supervised < OpenAI::BaseModel + class Supervised < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters)) } attr_reader :hyperparameters @@ -532,7 +518,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -544,7 +530,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) @@ -559,7 +545,7 @@ module OpenAI def to_hash end - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -612,7 +598,7 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -622,7 +608,7 @@ module OpenAI # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -632,7 +618,7 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -643,7 +629,7 @@ module OpenAI # The type of method. Is either `supervised` or `dpo`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 811f8518..f9a16f3d 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class FineTuningJobEvent < OpenAI::BaseModel + class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # The object identifier. sig { returns(String) } attr_accessor :id @@ -81,7 +81,7 @@ module OpenAI # The log level of the event. module Level - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } OrSymbol = @@ -98,7 +98,7 @@ module OpenAI # The type of event. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index 315342d0..561dd861 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class FineTuningJobWandbIntegration < OpenAI::BaseModel + class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # The name of the project that the new run will be created under. sig { returns(String) } attr_accessor :project diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index 78bec198..bb2e26b6 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel + class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # The type of the integration being enabled for the fine-tuning job sig { returns(Symbol) } attr_accessor :type @@ -16,16 +16,14 @@ module OpenAI attr_reader :wandb sig do - params( - wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::Util::AnyHash) - ) + params(wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::AnyHash)) .void end attr_writer :wandb sig do params( - wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::Util::AnyHash), + wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi index 4180a266..2c2b8d46 100644 --- a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module FineTuning - class JobCancelParams < OpenAI::BaseModel + class JobCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 68089ea2..351f2408 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class JobCreateParams < OpenAI::BaseModel + class JobCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -39,7 +39,7 @@ module OpenAI sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::Util::AnyHash) + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::AnyHash) ) .void end @@ -62,12 +62,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method)) } attr_reader :method_ - sig do - params( - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash)).void } attr_writer :method_ # The seed controls the reproducibility of the job. Passing in the same seed and @@ -103,16 +98,16 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::Util::AnyHash), + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::AnyHash), integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::Util::AnyHash), + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -153,7 +148,7 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } def self.variants @@ -169,7 +164,7 @@ module OpenAI GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) end - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -223,7 +218,7 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -233,7 +228,7 @@ module OpenAI # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -243,7 +238,7 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -251,7 +246,7 @@ module OpenAI end end - class Integration < OpenAI::BaseModel + class Integration < OpenAI::Internal::Type::BaseModel # The type of integration to enable. Currently, only "wandb" (Weights and Biases) # is supported. sig { returns(Symbol) } @@ -266,7 +261,7 @@ module OpenAI sig do params( - wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::Util::AnyHash) + wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::AnyHash) ) .void end @@ -274,7 +269,7 @@ module OpenAI sig do params( - wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::Util::AnyHash), + wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -286,7 +281,7 @@ module OpenAI def to_hash end - class Wandb < OpenAI::BaseModel + class Wandb < OpenAI::Internal::Type::BaseModel # The name of the project that the new run will be created under. sig { returns(String) } attr_accessor :project @@ -341,15 +336,13 @@ module OpenAI end end - class Method < OpenAI::BaseModel + class Method < OpenAI::Internal::Type::BaseModel # Configuration for the DPO fine-tuning method. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo)) } attr_reader :dpo sig do - params( - dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::Util::AnyHash) - ) + params(dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::AnyHash)) .void end attr_writer :dpo @@ -360,7 +353,7 @@ module OpenAI sig do params( - supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::Util::AnyHash) + supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::AnyHash) ) .void end @@ -376,8 +369,8 @@ module OpenAI # The method used for fine-tuning. sig do params( - dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::Util::AnyHash), - supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::Util::AnyHash), + dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::AnyHash), + supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::AnyHash), type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol ) .returns(T.attached_class) @@ -398,7 +391,7 @@ module OpenAI def to_hash end - class Dpo < OpenAI::BaseModel + class Dpo < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters)) } attr_reader :hyperparameters @@ -407,7 +400,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -419,7 +412,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) @@ -434,7 +427,7 @@ module OpenAI def to_hash end - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -497,7 +490,7 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -507,7 +500,7 @@ module OpenAI # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. module Beta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -517,7 +510,7 @@ module OpenAI # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -527,7 +520,7 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -536,7 +529,7 @@ module OpenAI end end - class Supervised < OpenAI::BaseModel + class Supervised < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters)) } attr_reader :hyperparameters @@ -545,7 +538,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .void @@ -557,7 +550,7 @@ module OpenAI params( hyperparameters: T.any( OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ) .returns(T.attached_class) @@ -574,7 +567,7 @@ module OpenAI def to_hash end - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -627,7 +620,7 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -637,7 +630,7 @@ module OpenAI # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } def self.variants @@ -647,7 +640,7 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } def self.variants @@ -658,7 +651,7 @@ module OpenAI # The type of method. Is either `supervised` or `dpo`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index 5a289277..9ad33668 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class JobListEventsParams < OpenAI::BaseModel + class JobListEventsParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -25,7 +25,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index d3d055fc..94aa1f34 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module FineTuning - class JobListParams < OpenAI::BaseModel + class JobListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -31,7 +31,7 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi index 119b93f5..ae440b7a 100644 --- a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module FineTuning - class JobRetrieveParams < OpenAI::BaseModel + class JobRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index e9dabed3..279608d3 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning module Jobs - class CheckpointListParams < OpenAI::BaseModel + class CheckpointListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -26,7 +26,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index f51faa86..cd3931c5 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module FineTuning module Jobs - class FineTuningJobCheckpoint < OpenAI::BaseModel + class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # The checkpoint identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -27,7 +27,7 @@ module OpenAI sig do params( - metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::Util::AnyHash) + metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::AnyHash) ) .void end @@ -49,7 +49,7 @@ module OpenAI created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::Util::AnyHash), + metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::AnyHash), step_number: Integer, object: Symbol ) @@ -83,7 +83,7 @@ module OpenAI def to_hash end - class Metrics < OpenAI::BaseModel + class Metrics < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(Float)) } attr_reader :full_valid_loss diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index f1bad3a8..6a4dc07a 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class FunctionDefinition < OpenAI::BaseModel + class FunctionDefinition < OpenAI::Internal::Type::BaseModel # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. sig { returns(String) } diff --git a/rbi/lib/openai/models/function_parameters.rbi b/rbi/lib/openai/models/function_parameters.rbi index 5920964f..f7b78f39 100644 --- a/rbi/lib/openai/models/function_parameters.rbi +++ b/rbi/lib/openai/models/function_parameters.rbi @@ -2,6 +2,10 @@ module OpenAI module Models - FunctionParameters = T.let(OpenAI::HashOf[OpenAI::Unknown], OpenAI::Internal::Type::Converter) + FunctionParameters = + T.let( + OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown], + OpenAI::Internal::Type::Converter + ) end end diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index 5af93081..9f14f13d 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel # The base64-encoded JSON of the generated image, if `response_format` is # `b64_json`. sig { returns(T.nilable(String)) } diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index ea1298ad..eb1c580e 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ImageCreateVariationParams < OpenAI::BaseModel + class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -84,7 +84,7 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } def self.variants @@ -95,7 +95,7 @@ module OpenAI # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } OrSymbol = @@ -112,7 +112,7 @@ module OpenAI # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } OrSymbol = diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 80ac6fdf..5c428b9a 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ImageEditParams < OpenAI::BaseModel + class ImageEditParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -64,7 +64,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -103,7 +103,7 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } def self.variants @@ -114,7 +114,7 @@ module OpenAI # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } OrSymbol = @@ -131,7 +131,7 @@ module OpenAI # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 5b0720c6..880ba871 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ImageGenerateParams < OpenAI::BaseModel + class ImageGenerateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -67,7 +67,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -105,7 +105,7 @@ module OpenAI # The model to use for image generation. module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } def self.variants @@ -116,7 +116,7 @@ module OpenAI # details and greater consistency across the image. This param is only supported # for `dall-e-3`. module Quality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } OrSymbol = @@ -134,7 +134,7 @@ module OpenAI # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } OrSymbol = @@ -152,7 +152,7 @@ module OpenAI # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or # `1024x1792` for `dall-e-3` models. module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } OrSymbol = @@ -174,7 +174,7 @@ module OpenAI # Natural causes the model to produce more natural, less hyper-real looking # images. This param is only supported for `dall-e-3`. module Style - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } OrSymbol = diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 6ba143ea..20027c2b 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module ImageModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index 412cb40b..796ce49c 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ImagesResponse < OpenAI::BaseModel + class ImagesResponse < OpenAI::Internal::Type::BaseModel sig { returns(Integer) } attr_accessor :created @@ -10,7 +10,7 @@ module OpenAI attr_accessor :data sig do - params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::Util::AnyHash)]) + params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)]) .returns(T.attached_class) end def self.new(created:, data:) diff --git a/rbi/lib/openai/models/metadata.rbi b/rbi/lib/openai/models/metadata.rbi index 20e8347e..04102855 100644 --- a/rbi/lib/openai/models/metadata.rbi +++ b/rbi/lib/openai/models/metadata.rbi @@ -2,6 +2,6 @@ module OpenAI module Models - Metadata = T.let(OpenAI::HashOf[String], OpenAI::Internal::Type::Converter) + Metadata = T.let(OpenAI::Internal::Type::HashOf[String], OpenAI::Internal::Type::Converter) end end diff --git a/rbi/lib/openai/models/model.rbi b/rbi/lib/openai/models/model.rbi index 556bb5fc..ad55cfee 100644 --- a/rbi/lib/openai/models/model.rbi +++ b/rbi/lib/openai/models/model.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Model < OpenAI::BaseModel + class Model < OpenAI::Internal::Type::BaseModel # The model identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/model_delete_params.rbi b/rbi/lib/openai/models/model_delete_params.rbi index 3ace3c62..42095a36 100644 --- a/rbi/lib/openai/models/model_delete_params.rbi +++ b/rbi/lib/openai/models/model_delete_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class ModelDeleteParams < OpenAI::BaseModel + class ModelDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/model_deleted.rbi b/rbi/lib/openai/models/model_deleted.rbi index 9888bb84..7044add0 100644 --- a/rbi/lib/openai/models/model_deleted.rbi +++ b/rbi/lib/openai/models/model_deleted.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ModelDeleted < OpenAI::BaseModel + class ModelDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/model_list_params.rbi b/rbi/lib/openai/models/model_list_params.rbi index ec503d0e..6570ac6f 100644 --- a/rbi/lib/openai/models/model_list_params.rbi +++ b/rbi/lib/openai/models/model_list_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class ModelListParams < OpenAI::BaseModel + class ModelListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/model_retrieve_params.rbi b/rbi/lib/openai/models/model_retrieve_params.rbi index 48ab1cf7..273f1b0e 100644 --- a/rbi/lib/openai/models/model_retrieve_params.rbi +++ b/rbi/lib/openai/models/model_retrieve_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class ModelRetrieveParams < OpenAI::BaseModel + class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 256ad93b..70df9dfa 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -2,12 +2,12 @@ module OpenAI module Models - class Moderation < OpenAI::BaseModel + class Moderation < OpenAI::Internal::Type::BaseModel # A list of the categories, and whether they are flagged or not. sig { returns(OpenAI::Models::Moderation::Categories) } attr_reader :categories - sig { params(categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::Util::AnyHash)).void } + sig { params(categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::AnyHash)).void } attr_writer :categories # A list of the categories along with the input type(s) that the score applies to. @@ -16,7 +16,7 @@ module OpenAI sig do params( - category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::Util::AnyHash) + category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::AnyHash) ) .void end @@ -26,12 +26,7 @@ module OpenAI sig { returns(OpenAI::Models::Moderation::CategoryScores) } attr_reader :category_scores - sig do - params( - category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::AnyHash)).void } attr_writer :category_scores # Whether any of the below categories are flagged. @@ -40,9 +35,9 @@ module OpenAI sig do params( - categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::Util::AnyHash), - category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::Util::AnyHash), - category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::Util::AnyHash), + categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::AnyHash), + category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::AnyHash), + category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::AnyHash), flagged: T::Boolean ) .returns(T.attached_class) @@ -64,7 +59,7 @@ module OpenAI def to_hash end - class Categories < OpenAI::BaseModel + class Categories < OpenAI::Internal::Type::BaseModel # Content that expresses, incites, or promotes harassing language towards any # target. sig { returns(T::Boolean) } @@ -194,7 +189,7 @@ module OpenAI end end - class CategoryAppliedInputTypes < OpenAI::BaseModel + class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # The applied input type(s) for the category 'harassment'. sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) } attr_accessor :harassment @@ -315,7 +310,7 @@ module OpenAI end module Harassment - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } @@ -332,7 +327,7 @@ module OpenAI end module HarassmentThreatening - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } @@ -359,7 +354,7 @@ module OpenAI end module Hate - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } @@ -374,7 +369,7 @@ module OpenAI end module HateThreatening - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } @@ -398,7 +393,7 @@ module OpenAI end module Illicit - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } @@ -413,7 +408,7 @@ module OpenAI end module IllicitViolent - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } @@ -431,7 +426,7 @@ module OpenAI end module SelfHarm - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } @@ -447,7 +442,7 @@ module OpenAI end module SelfHarmInstruction - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } @@ -476,7 +471,7 @@ module OpenAI end module SelfHarmIntent - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } @@ -496,7 +491,7 @@ module OpenAI end module Sexual - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } @@ -512,7 +507,7 @@ module OpenAI end module SexualMinor - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } @@ -530,7 +525,7 @@ module OpenAI end module Violence - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } @@ -546,7 +541,7 @@ module OpenAI end module ViolenceGraphic - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } @@ -572,7 +567,7 @@ module OpenAI end end - class CategoryScores < OpenAI::BaseModel + class CategoryScores < OpenAI::Internal::Type::BaseModel # The score for the category 'harassment'. sig { returns(Float) } attr_accessor :harassment diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index c66af1c0..4fa9650a 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ModerationCreateParams < OpenAI::BaseModel + class ModerationCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -37,13 +37,13 @@ module OpenAI T::Array[ T.any( OpenAI::Models::ModerationImageURLInput, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ModerationTextInput ) ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -70,7 +70,7 @@ module OpenAI # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -81,11 +81,11 @@ module OpenAI def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) ModerationMultiModalInputArray = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], OpenAI::Internal::Type::Converter ) end @@ -95,7 +95,7 @@ module OpenAI # learn about available models # [here](https://platform.openai.com/docs/models#moderation). module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } def self.variants diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index 4c1b0187..fc12e123 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ModerationCreateResponse < OpenAI::BaseModel + class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # The unique identifier for the moderation request. sig { returns(String) } attr_accessor :id @@ -20,7 +20,7 @@ module OpenAI params( id: String, model: String, - results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Internal::Util::AnyHash)] + results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Internal::AnyHash)] ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index 77f4cfc0..128f5062 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -2,17 +2,12 @@ module OpenAI module Models - class ModerationImageURLInput < OpenAI::BaseModel + class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # Contains either an image URL or a data URL for a base64 encoded image. sig { returns(OpenAI::Models::ModerationImageURLInput::ImageURL) } attr_reader :image_url - sig do - params( - image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::Util::AnyHash) - ) - .void - end + sig { params(image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::AnyHash)).void } attr_writer :image_url # Always `image_url`. @@ -22,7 +17,7 @@ module OpenAI # An object describing an image to classify. sig do params( - image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::Util::AnyHash), + image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -34,7 +29,7 @@ module OpenAI def to_hash end - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel # Either a URL of the image or the base64 encoded image data. sig { returns(String) } attr_accessor :url diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index e422afd9..3b1d564a 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module ModerationModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ModerationModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 9c96d7b0..247b0857 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -4,7 +4,7 @@ module OpenAI module Models # An object describing an image to classify. module ModerationMultiModalInput - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } def self.variants diff --git a/rbi/lib/openai/models/moderation_text_input.rbi b/rbi/lib/openai/models/moderation_text_input.rbi index dddf6d0b..d117af19 100644 --- a/rbi/lib/openai/models/moderation_text_input.rbi +++ b/rbi/lib/openai/models/moderation_text_input.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ModerationTextInput < OpenAI::BaseModel + class ModerationTextInput < OpenAI::Internal::Type::BaseModel # A string of text to classify. sig { returns(String) } attr_accessor :text diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi index 14da388e..24704550 100644 --- a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class OtherFileChunkingStrategyObject < OpenAI::BaseModel + class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # Always `other`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 0bcde269..13f6e5ae 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Reasoning < OpenAI::BaseModel + class Reasoning < OpenAI::Internal::Type::BaseModel # **o-series models only** # # Constrains effort on reasoning for @@ -52,7 +52,7 @@ module OpenAI # debugging and understanding the model's reasoning process. One of `concise` or # `detailed`. module GenerateSummary - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } OrSymbol = diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index e95a9cdc..d46cfdf9 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -9,7 +9,7 @@ module OpenAI # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. module ReasoningEffort - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ReasoningEffort::TaggedSymbol) } diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/lib/openai/models/response_format_json_object.rbi index 331cb19e..3a1e8d74 100644 --- a/rbi/lib/openai/models/response_format_json_object.rbi +++ b/rbi/lib/openai/models/response_format_json_object.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ResponseFormatJSONObject < OpenAI::BaseModel + class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel # The type of response format being defined. Always `json_object`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index 95e6df84..8f5dbd5c 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -2,14 +2,14 @@ module OpenAI module Models - class ResponseFormatJSONSchema < OpenAI::BaseModel + class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # Structured Outputs configuration options, including a JSON Schema. sig { returns(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema) } attr_reader :json_schema sig do params( - json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::Util::AnyHash) + json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::AnyHash) ) .void end @@ -24,7 +24,7 @@ module OpenAI # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( - json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::Util::AnyHash), + json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -36,7 +36,7 @@ module OpenAI def to_hash end - class JSONSchema < OpenAI::BaseModel + class JSONSchema < OpenAI::Internal::Type::BaseModel # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. sig { returns(String) } diff --git a/rbi/lib/openai/models/response_format_text.rbi b/rbi/lib/openai/models/response_format_text.rbi index a4b2db5e..2cee5b0b 100644 --- a/rbi/lib/openai/models/response_format_text.rbi +++ b/rbi/lib/openai/models/response_format_text.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class ResponseFormatText < OpenAI::BaseModel + class ResponseFormatText < OpenAI::Internal::Type::BaseModel # The type of response format being defined. Always `text`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 90cabbf7..b4329d44 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ComputerTool < OpenAI::BaseModel + class ComputerTool < OpenAI::Internal::Type::BaseModel # The height of the computer display. sig { returns(Float) } attr_accessor :display_height @@ -50,7 +50,7 @@ module OpenAI # The type of computer environment to control. module Environment - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 67ac236f..6b25a120 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class EasyInputMessage < OpenAI::BaseModel + class EasyInputMessage < OpenAI::Internal::Type::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. sig do @@ -46,7 +46,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) @@ -85,7 +85,7 @@ module OpenAI # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -109,7 +109,7 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } OrSymbol = @@ -127,7 +127,7 @@ module OpenAI # The type of the message input. Always `message`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 94dca043..7ca909fa 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class FileSearchTool < OpenAI::BaseModel + class FileSearchTool < OpenAI::Internal::Type::BaseModel # The type of the file search tool. Always `file_search`. sig { returns(Symbol) } attr_accessor :type @@ -18,7 +18,7 @@ module OpenAI sig do params( - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter) + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter) ) .void end @@ -38,7 +38,7 @@ module OpenAI sig do params( - ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::AnyHash) ) .void end @@ -50,9 +50,9 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::Util::AnyHash), + ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::AnyHash), type: Symbol ) .returns(T.attached_class) @@ -83,14 +83,14 @@ module OpenAI # A filter to apply based on file attributes. module Filters - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } def self.variants end end - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol)) } attr_reader :ranker @@ -129,7 +129,7 @@ module OpenAI # The ranker to use for the file search. module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/lib/openai/models/responses/function_tool.rbi index 7462dada..e329cc11 100644 --- a/rbi/lib/openai/models/responses/function_tool.rbi +++ b/rbi/lib/openai/models/responses/function_tool.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class FunctionTool < OpenAI::BaseModel + class FunctionTool < OpenAI::Internal::Type::BaseModel # The name of the function to call. sig { returns(String) } attr_accessor :name diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 20120ad6..baf516d6 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class InputItemListParams < OpenAI::BaseModel + class InputItemListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -54,7 +54,7 @@ module OpenAI include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -82,7 +82,7 @@ module OpenAI # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index c2c8686f..4a3056e9 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class Response < OpenAI::BaseModel + class Response < OpenAI::Internal::Type::BaseModel # Unique identifier for this Response. sig { returns(String) } attr_accessor :id @@ -16,10 +16,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseError)) } attr_reader :error - sig do - params(error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::Util::AnyHash))) - .void - end + sig { params(error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::AnyHash))).void } attr_writer :error # Details about why the response is incomplete. @@ -28,7 +25,7 @@ module OpenAI sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::Util::AnyHash)) + incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::AnyHash)) ) .void end @@ -171,7 +168,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Reasoning)) } attr_reader :reasoning - sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash))).void } + sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash))).void } attr_writer :reasoning # The status of the response generation. One of `completed`, `failed`, @@ -190,7 +187,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash)).void } attr_writer :text # The truncation strategy to use for the model response. @@ -208,7 +205,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::Util::AnyHash)).void } + sig { params(usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::AnyHash)).void } attr_writer :usage # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -224,15 +221,15 @@ module OpenAI params( id: String, created_at: Float, - error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::Util::AnyHash)), - incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::Util::AnyHash)), + error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::AnyHash)), + incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::AnyHash)), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, @@ -245,13 +242,13 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -260,11 +257,11 @@ module OpenAI top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), status: OpenAI::Models::Responses::ResponseStatus::OrSymbol, - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::OrSymbol), - usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::Util::AnyHash), + usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::AnyHash), user: String, object: Symbol ) @@ -348,7 +345,7 @@ module OpenAI def to_hash end - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel # The reason why the response is incomplete. sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol)) } attr_reader :reason @@ -370,7 +367,7 @@ module OpenAI # The reason why the response is incomplete. module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } @@ -392,7 +389,7 @@ module OpenAI # response. See the `tools` parameter to see how to specify which tools the model # can call. module ToolChoice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -412,7 +409,7 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. module Truncation - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi index af937de8..17fa0ac5 100644 --- a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioDeltaEvent < OpenAI::BaseModel + class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # A chunk of Base64 encoded response audio bytes. sig { returns(String) } attr_accessor :delta diff --git a/rbi/lib/openai/models/responses/response_audio_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_done_event.rbi index 2f302c28..b25bf971 100644 --- a/rbi/lib/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioDoneEvent < OpenAI::BaseModel + class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi index f2206761..8c6021af 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioTranscriptDeltaEvent < OpenAI::BaseModel + class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # The partial transcript of the audio response. sig { returns(String) } attr_accessor :delta diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi index cb69a888..52a596c6 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel + class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 26d256d8..4c8bd0e9 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel # The partial code snippet added by the code interpreter. sig { returns(String) } attr_accessor :delta diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index c1512e6a..d1ec38b7 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel # The final code snippet output by the code interpreter. sig { returns(String) } attr_accessor :code diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index d5e6e2f9..5d1c5965 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -3,14 +3,14 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallCompletedEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } attr_reader :code_interpreter_call sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash) + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI # Emitted when the code interpreter call is completed. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash), + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 5d0d90a1..02d423ad 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -3,14 +3,14 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallInProgressEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } attr_reader :code_interpreter_call sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash) + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI # Emitted when a code interpreter call is in progress. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash), + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index f2f0f442..47f2b1a2 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -3,14 +3,14 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel # A tool call to run code. sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } attr_reader :code_interpreter_call sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash) + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash) ) .void end @@ -27,7 +27,7 @@ module OpenAI # Emitted when the code interpreter is actively interpreting the code snippet. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::Util::AnyHash), + code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash), output_index: Integer, type: Symbol ) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 2f23866c..a6157f98 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCodeInterpreterToolCall < OpenAI::BaseModel + class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # The unique ID of the code interpreter tool call. sig { returns(String) } attr_accessor :id @@ -41,7 +41,7 @@ module OpenAI results: T::Array[ T.any( OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], @@ -75,9 +75,9 @@ module OpenAI # The output of a code interpreter tool call that is text. module Result - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Logs < OpenAI::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel # The logs of the code interpreter tool call. sig { returns(String) } attr_accessor :logs @@ -96,7 +96,7 @@ module OpenAI end end - class Files < OpenAI::BaseModel + class Files < OpenAI::Internal::Type::BaseModel sig { returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]) } attr_accessor :files @@ -110,7 +110,7 @@ module OpenAI files: T::Array[ T.any( OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ], type: Symbol @@ -132,7 +132,7 @@ module OpenAI def to_hash end - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -163,7 +163,7 @@ module OpenAI # The status of the code interpreter tool call. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 5ea9fdfe..c7774329 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - class ResponseCompletedEvent < OpenAI::BaseModel + class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # Properties of the completed response. sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } attr_writer :response # The type of the event. Always `response.completed`. @@ -17,10 +17,7 @@ module OpenAI # Emitted when the model response is complete. sig do - params( - response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(response:, type: :"response.completed") diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 18ad0198..8ced5e39 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseComputerToolCall < OpenAI::BaseModel + class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # The unique ID of the computer call. sig { returns(String) } attr_accessor :id @@ -51,7 +51,7 @@ module OpenAI id: String, action: T.any( OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, @@ -62,12 +62,7 @@ module OpenAI OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait ), call_id: String, - pending_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, - OpenAI::Internal::Util::AnyHash - ) - ], + pending_safety_checks: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, OpenAI::Internal::AnyHash)], status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol ) @@ -104,9 +99,9 @@ module OpenAI # A click action. module Action - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Click < OpenAI::BaseModel + class Click < OpenAI::Internal::Type::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } @@ -155,7 +150,7 @@ module OpenAI # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. module Button - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } @@ -190,7 +185,7 @@ module OpenAI end end - class DoubleClick < OpenAI::BaseModel + class DoubleClick < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a double click action, this property is always set # to `double_click`. sig { returns(Symbol) } @@ -214,7 +209,7 @@ module OpenAI end end - class Drag < OpenAI::BaseModel + class Drag < OpenAI::Internal::Type::BaseModel # An array of coordinates representing the path of the drag action. Coordinates # will appear as an array of objects, eg # @@ -235,12 +230,7 @@ module OpenAI # A drag action. sig do params( - path: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, - OpenAI::Internal::Util::AnyHash - ) - ], + path: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, OpenAI::Internal::AnyHash)], type: Symbol ) .returns(T.attached_class) @@ -257,7 +247,7 @@ module OpenAI def to_hash end - class Path < OpenAI::BaseModel + class Path < OpenAI::Internal::Type::BaseModel # The x-coordinate. sig { returns(Integer) } attr_accessor :x @@ -277,7 +267,7 @@ module OpenAI end end - class Keypress < OpenAI::BaseModel + class Keypress < OpenAI::Internal::Type::BaseModel # The combination of keys the model is requesting to be pressed. This is an array # of strings, each representing a key. sig { returns(T::Array[String]) } @@ -298,7 +288,7 @@ module OpenAI end end - class Move < OpenAI::BaseModel + class Move < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a move action, this property is always set to # `move`. sig { returns(Symbol) } @@ -322,7 +312,7 @@ module OpenAI end end - class Screenshot < OpenAI::BaseModel + class Screenshot < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a screenshot action, this property is always set # to `screenshot`. sig { returns(Symbol) } @@ -338,7 +328,7 @@ module OpenAI end end - class Scroll < OpenAI::BaseModel + class Scroll < OpenAI::Internal::Type::BaseModel # The horizontal scroll distance. sig { returns(Integer) } attr_accessor :scroll_x @@ -375,7 +365,7 @@ module OpenAI end end - class Type < OpenAI::BaseModel + class Type < OpenAI::Internal::Type::BaseModel # The text to type. sig { returns(String) } attr_accessor :text @@ -395,7 +385,7 @@ module OpenAI end end - class Wait < OpenAI::BaseModel + class Wait < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a wait action, this property is always set to # `wait`. sig { returns(Symbol) } @@ -421,7 +411,7 @@ module OpenAI end end - class PendingSafetyCheck < OpenAI::BaseModel + class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # The ID of the pending safety check. sig { returns(String) } attr_accessor :id @@ -447,7 +437,7 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } @@ -467,7 +457,7 @@ module OpenAI # The type of the computer call. Always `computer_call`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 5f0a22d5..fbfa11b5 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseComputerToolCallOutputItem < OpenAI::BaseModel + class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The unique ID of the computer call tool output. sig { returns(String) } attr_accessor :id @@ -18,10 +18,7 @@ module OpenAI sig do params( - output: T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - OpenAI::Internal::Util::AnyHash - ) + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash) ) .void end @@ -47,7 +44,7 @@ module OpenAI acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -67,14 +64,11 @@ module OpenAI params( id: String, call_id: String, - output: T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - OpenAI::Internal::Util::AnyHash - ), + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ], status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, @@ -108,7 +102,7 @@ module OpenAI def to_hash end - class AcknowledgedSafetyCheck < OpenAI::BaseModel + class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The ID of the pending safety check. sig { returns(String) } attr_accessor :id @@ -134,7 +128,7 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index 1246a3c0..11d1b8d6 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel + class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a computer screenshot, this property is always set # to `computer_screenshot`. sig { returns(Symbol) } diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index ef91ffdf..5bb9113e 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses # Multi-modal input and output contents. module ResponseContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 9065af5e..669ce321 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseContentPartAddedEvent < OpenAI::BaseModel + class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # The index of the content part that was added. sig { returns(Integer) } attr_accessor :content_index @@ -36,7 +36,7 @@ module OpenAI output_index: Integer, part: T.any( OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseOutputRefusal ), type: Symbol @@ -63,7 +63,7 @@ module OpenAI # The content part that was added. module Part - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 1b8ece5c..7d1b7312 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseContentPartDoneEvent < OpenAI::BaseModel + class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # The index of the content part that is done. sig { returns(Integer) } attr_accessor :content_index @@ -36,7 +36,7 @@ module OpenAI output_index: Integer, part: T.any( OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseOutputRefusal ), type: Symbol @@ -63,7 +63,7 @@ module OpenAI # The content part that is done. module Part - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index ef7c1ca5..5fe13ae5 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseCreateParams < OpenAI::BaseModel + class ResponseCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -100,7 +100,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Reasoning)) } attr_reader :reasoning - sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash))).void } + sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash))).void } attr_writer :reasoning # Whether to store the generated model response for later retrieval via API. @@ -122,7 +122,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash)).void } + sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash)).void } attr_writer :text # How the model should select which tool (or tools) to use when generating a @@ -146,7 +146,7 @@ module OpenAI tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ) ) @@ -189,7 +189,7 @@ module OpenAI tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -234,7 +234,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -255,20 +255,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -277,7 +277,7 @@ module OpenAI top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -369,7 +369,7 @@ module OpenAI # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -402,7 +402,7 @@ module OpenAI # response. See the `tools` parameter to see how to specify which tools the model # can call. module ToolChoice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -422,7 +422,7 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. module Truncation - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index 0ce79c50..ee2e69fc 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - class ResponseCreatedEvent < OpenAI::BaseModel + class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # The response that was created. sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } attr_writer :response # The type of the event. Always `response.created`. @@ -17,10 +17,7 @@ module OpenAI # An event that is emitted when a response is created. sig do - params( - response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(response:, type: :"response.created") diff --git a/rbi/lib/openai/models/responses/response_delete_params.rbi b/rbi/lib/openai/models/responses/response_delete_params.rbi index 008c4bd9..ac0c636e 100644 --- a/rbi/lib/openai/models/responses/response_delete_params.rbi +++ b/rbi/lib/openai/models/responses/response_delete_params.rbi @@ -3,13 +3,17 @@ module OpenAI module Models module Responses - class ResponseDeleteParams < OpenAI::BaseModel + class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 4f682bd4..a6cb9d7e 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseError < OpenAI::BaseModel + class ResponseError < OpenAI::Internal::Type::BaseModel # The error code for the response. sig { returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } attr_accessor :code @@ -26,7 +26,7 @@ module OpenAI # The error code for the response. module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_error_event.rbi b/rbi/lib/openai/models/responses/response_error_event.rbi index 303a0210..55680bad 100644 --- a/rbi/lib/openai/models/responses/response_error_event.rbi +++ b/rbi/lib/openai/models/responses/response_error_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseErrorEvent < OpenAI::BaseModel + class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # The error code. sig { returns(T.nilable(String)) } attr_accessor :code diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index 8d43369b..fdbaf6f0 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - class ResponseFailedEvent < OpenAI::BaseModel + class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # The response that failed. sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } attr_writer :response # The type of the event. Always `response.failed`. @@ -17,10 +17,7 @@ module OpenAI # An event that is emitted when a response fails. sig do - params( - response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(response:, type: :"response.failed") diff --git a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi index fd8d4c62..285df33d 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchCallCompletedEvent < OpenAI::BaseModel + class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # The ID of the output item that the file search call is initiated. sig { returns(String) } attr_accessor :item_id diff --git a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi index 621c0706..e6d95bd6 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchCallInProgressEvent < OpenAI::BaseModel + class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # The ID of the output item that the file search call is initiated. sig { returns(String) } attr_accessor :item_id diff --git a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi index 849e6603..a6f01d00 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchCallSearchingEvent < OpenAI::BaseModel + class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # The ID of the output item that the file search call is initiated. sig { returns(String) } attr_accessor :item_id diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 050fdfc1..98e75a9c 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFileSearchToolCall < OpenAI::BaseModel + class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # The unique ID of the file search tool call. sig { returns(String) } attr_accessor :id @@ -34,7 +34,7 @@ module OpenAI queries: T::Array[String], status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, results: T.nilable( - T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Internal::AnyHash)] ), type: Symbol ) @@ -61,7 +61,7 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } @@ -83,7 +83,7 @@ module OpenAI end end - class Result < OpenAI::BaseModel + class Result < OpenAI::Internal::Type::BaseModel # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. Keys are strings with a maximum @@ -149,7 +149,7 @@ module OpenAI end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 24160f1c..f05a3373 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -17,7 +17,7 @@ module OpenAI # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. module ResponseFormatTextConfig - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index 8d657f44..294d5b32 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. sig { returns(String) } diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi index 5aada226..8d5a0f74 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::BaseModel + class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel # The function-call arguments delta that is added. sig { returns(String) } attr_accessor :delta diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi index ba8b13d1..49d5fc83 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionCallArgumentsDoneEvent < OpenAI::BaseModel + class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # The function-call arguments. sig { returns(String) } attr_accessor :arguments diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index f7b0b0a7..32854d1a 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionToolCall < OpenAI::BaseModel + class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # A JSON string of the arguments to pass to the function. sig { returns(String) } attr_accessor :arguments @@ -71,7 +71,7 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index b6e0956c..f2d23ee3 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel + class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The unique ID of the function call tool output. sig { returns(String) } attr_accessor :id @@ -59,7 +59,7 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 080ec4db..b479dcf8 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseFunctionWebSearch < OpenAI::BaseModel + class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # The unique ID of the web search tool call. sig { returns(String) } attr_accessor :id @@ -41,7 +41,7 @@ module OpenAI # The status of the web search tool call. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index 21962a06..187ca04d 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - class ResponseInProgressEvent < OpenAI::BaseModel + class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # The response that is in progress. sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } attr_writer :response # The type of the event. Always `response.in_progress`. @@ -17,10 +17,7 @@ module OpenAI # Emitted when the response is in progress. sig do - params( - response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(response:, type: :"response.in_progress") diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index f2015ad7..de36828b 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -12,7 +12,7 @@ module OpenAI # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. module ResponseIncludable - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index ab06154d..305b7c65 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -3,12 +3,12 @@ module OpenAI module Models module Responses - class ResponseIncompleteEvent < OpenAI::BaseModel + class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # The response that was incomplete. sig { returns(OpenAI::Models::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash)).void } + sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } attr_writer :response # The type of the event. Always `response.incomplete`. @@ -17,10 +17,7 @@ module OpenAI # An event that is emitted when a response finishes as incomplete. sig do - params( - response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(response:, type: :"response.incomplete") diff --git a/rbi/lib/openai/models/responses/response_input.rbi b/rbi/lib/openai/models/responses/response_input.rbi index f86dda51..77a9217a 100644 --- a/rbi/lib/openai/models/responses/response_input.rbi +++ b/rbi/lib/openai/models/responses/response_input.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses ResponseInput = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index 7135e479..cbcdd1d1 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputAudio < OpenAI::BaseModel + class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # Base64-encoded audio data. sig { returns(String) } attr_accessor :data @@ -39,7 +39,7 @@ module OpenAI # The format of the audio data. Currently supported formats are `mp3` and `wav`. module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index d63620b1..e1a66fb0 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses # A text input to the model. module ResponseInputContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_input_file.rbi b/rbi/lib/openai/models/responses/response_input_file.rbi index 05c60269..81323132 100644 --- a/rbi/lib/openai/models/responses/response_input_file.rbi +++ b/rbi/lib/openai/models/responses/response_input_file.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputFile < OpenAI::BaseModel + class ResponseInputFile < OpenAI::Internal::Type::BaseModel # The type of the input item. Always `input_file`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 11947564..a15a6bc9 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputImage < OpenAI::BaseModel + class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } @@ -53,7 +53,7 @@ module OpenAI # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index ab326c72..c01e2b4f 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -9,9 +9,9 @@ module OpenAI # `assistant` role are presumed to have been generated by the model in previous # interactions. module ResponseInputItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content # types. sig do @@ -54,7 +54,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) @@ -90,7 +90,7 @@ module OpenAI # The role of the message input. One of `user`, `system`, or `developer`. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } @@ -109,7 +109,7 @@ module OpenAI # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } @@ -130,7 +130,7 @@ module OpenAI # The type of the message input. Always set to `message`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } @@ -145,7 +145,7 @@ module OpenAI end end - class ComputerCallOutput < OpenAI::BaseModel + class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The ID of the computer tool call that produced the output. sig { returns(String) } attr_accessor :call_id @@ -156,10 +156,7 @@ module OpenAI sig do params( - output: T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - OpenAI::Internal::Util::AnyHash - ) + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash) ) .void end @@ -192,7 +189,7 @@ module OpenAI acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ] ) @@ -212,15 +209,12 @@ module OpenAI sig do params( call_id: String, - output: T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - OpenAI::Internal::Util::AnyHash - ), + output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), id: String, acknowledged_safety_checks: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, @@ -254,7 +248,7 @@ module OpenAI def to_hash end - class AcknowledgedSafetyCheck < OpenAI::BaseModel + class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The ID of the pending safety check. sig { returns(String) } attr_accessor :id @@ -280,7 +274,7 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } @@ -312,7 +306,7 @@ module OpenAI end end - class FunctionCallOutput < OpenAI::BaseModel + class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # The unique ID of the function tool call generated by the model. sig { returns(String) } attr_accessor :call_id @@ -373,7 +367,7 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } @@ -405,7 +399,7 @@ module OpenAI end end - class ItemReference < OpenAI::BaseModel + class ItemReference < OpenAI::Internal::Type::BaseModel # The ID of the item to reference. sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi index b06ad65b..f25328f7 100644 --- a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_content_list.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses ResponseInputMessageContentList = T.let( - OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 60b55701..90c6a533 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputMessageItem < OpenAI::BaseModel + class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # The unique ID of the message input. sig { returns(String) } attr_accessor :id @@ -48,7 +48,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile ) @@ -85,7 +85,7 @@ module OpenAI # The role of the message input. One of `user`, `system`, or `developer`. module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } OrSymbol = @@ -103,7 +103,7 @@ module OpenAI # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } @@ -123,7 +123,7 @@ module OpenAI # The type of the message input. Always set to `message`. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_input_text.rbi b/rbi/lib/openai/models/responses/response_input_text.rbi index 5c0838ec..94dd8553 100644 --- a/rbi/lib/openai/models/responses/response_input_text.rbi +++ b/rbi/lib/openai/models/responses/response_input_text.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseInputText < OpenAI::BaseModel + class ResponseInputText < OpenAI::Internal::Type::BaseModel # The text input to the model. sig { returns(String) } attr_accessor :text diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index b57094c5..3a0b1c92 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses # Content item used to generate a response. module ResponseItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 93cf118a..5b5f1583 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseItemList < OpenAI::BaseModel + class ResponseItemList < OpenAI::Internal::Type::BaseModel # A list of items used to generate this response. sig do returns( @@ -45,7 +45,7 @@ module OpenAI data: T::Array[ T.any( OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, diff --git a/rbi/lib/openai/models/responses/response_output_audio.rbi b/rbi/lib/openai/models/responses/response_output_audio.rbi index c5a26f54..53e9b4c3 100644 --- a/rbi/lib/openai/models/responses/response_output_audio.rbi +++ b/rbi/lib/openai/models/responses/response_output_audio.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputAudio < OpenAI::BaseModel + class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # Base64-encoded audio data from the model. sig { returns(String) } attr_accessor :data diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index 9d893172..5827fb3d 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses # An output message from the model. module ResponseOutputItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 735922c6..3e5daa2e 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputItemAddedEvent < OpenAI::BaseModel + class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # The output item that was added. sig do returns( @@ -32,7 +32,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index 8dde062b..e7625f50 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputItemDoneEvent < OpenAI::BaseModel + class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # The output item that was marked done. sig do returns( @@ -32,7 +32,7 @@ module OpenAI params( item: T.any( OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 8b07428a..0a39a8eb 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputMessage < OpenAI::BaseModel + class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # The unique ID of the output message. sig { returns(String) } attr_accessor :id @@ -36,7 +36,7 @@ module OpenAI content: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseOutputRefusal ) ], @@ -66,7 +66,7 @@ module OpenAI # A text output from the model. module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override @@ -79,7 +79,7 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_output_refusal.rbi b/rbi/lib/openai/models/responses/response_output_refusal.rbi index 5e416cf9..9fff39df 100644 --- a/rbi/lib/openai/models/responses/response_output_refusal.rbi +++ b/rbi/lib/openai/models/responses/response_output_refusal.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputRefusal < OpenAI::BaseModel + class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel # The refusal explanationfrom the model. sig { returns(String) } attr_accessor :refusal diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index d8aeb7e6..e8092e12 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseOutputText < OpenAI::BaseModel + class ResponseOutputText < OpenAI::Internal::Type::BaseModel # The annotations of the text output. sig do returns( @@ -32,7 +32,7 @@ module OpenAI annotations: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath ) @@ -66,9 +66,9 @@ module OpenAI # A citation to a file. module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -91,7 +91,7 @@ module OpenAI end end - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel # The index of the last character of the URL citation in the message. sig { returns(Integer) } attr_accessor :end_index @@ -135,7 +135,7 @@ module OpenAI end end - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # The ID of the file. sig { returns(String) } attr_accessor :file_id diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index bd3303b1..3359bd3e 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseReasoningItem < OpenAI::BaseModel + class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # The unique identifier of the reasoning content. sig { returns(String) } attr_accessor :id @@ -29,7 +29,7 @@ module OpenAI sig do params( id: String, - summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Internal::Util::AnyHash)], + summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Internal::AnyHash)], status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) @@ -52,7 +52,7 @@ module OpenAI def to_hash end - class Summary < OpenAI::BaseModel + class Summary < OpenAI::Internal::Type::BaseModel # A short summary of the reasoning used by the model when generating the response. sig { returns(String) } attr_accessor :text @@ -73,7 +73,7 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi index cbbd66ed..1fffadb6 100644 --- a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseRefusalDeltaEvent < OpenAI::BaseModel + class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # The index of the content part that the refusal text is added to. sig { returns(Integer) } attr_accessor :content_index diff --git a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi index 1f24f6df..6b9582e9 100644 --- a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseRefusalDoneEvent < OpenAI::BaseModel + class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # The index of the content part that the refusal text is finalized. sig { returns(Integer) } attr_accessor :content_index diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index a6c61a0f..d1308648 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseRetrieveParams < OpenAI::BaseModel + class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -18,7 +18,7 @@ module OpenAI sig do params( include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index 138c0188..c69a253e 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -6,7 +6,7 @@ module OpenAI # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. module ResponseStatus - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index e04029c8..ced70ddc 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses # Emitted when there is a partial audio response. module ResponseStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 689a3028..e448a3e2 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel + class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # A citation to a file. sig do returns( @@ -41,7 +41,7 @@ module OpenAI params( annotation: T.any( OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath ), @@ -85,9 +85,9 @@ module OpenAI # A citation to a file. module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -110,7 +110,7 @@ module OpenAI end end - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel # The index of the last character of the URL citation in the message. sig { returns(Integer) } attr_accessor :end_index @@ -154,7 +154,7 @@ module OpenAI end end - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel # The ID of the file. sig { returns(String) } attr_accessor :file_id diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index 1aad6df7..b5d1a866 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextConfig < OpenAI::BaseModel + class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -34,7 +34,7 @@ module OpenAI params( format_: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) @@ -52,7 +52,7 @@ module OpenAI params( format_: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject ) diff --git a/rbi/lib/openai/models/responses/response_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_delta_event.rbi index efb4c979..7bbc55cd 100644 --- a/rbi/lib/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_delta_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextDeltaEvent < OpenAI::BaseModel + class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # The index of the content part that the text delta was added to. sig { returns(Integer) } attr_accessor :content_index diff --git a/rbi/lib/openai/models/responses/response_text_done_event.rbi b/rbi/lib/openai/models/responses/response_text_done_event.rbi index b8e7bf9a..743e9572 100644 --- a/rbi/lib/openai/models/responses/response_text_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_done_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseTextDoneEvent < OpenAI::BaseModel + class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # The index of the content part that the text content is finalized. sig { returns(Integer) } attr_accessor :content_index diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index c9ac5ea8..47ce8244 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseUsage < OpenAI::BaseModel + class ResponseUsage < OpenAI::Internal::Type::BaseModel # The number of input tokens. sig { returns(Integer) } attr_accessor :input_tokens @@ -14,7 +14,7 @@ module OpenAI sig do params( - input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::Util::AnyHash) + input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::AnyHash) ) .void end @@ -30,7 +30,7 @@ module OpenAI sig do params( - output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::Util::AnyHash) + output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::AnyHash) ) .void end @@ -45,9 +45,9 @@ module OpenAI sig do params( input_tokens: Integer, - input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::Util::AnyHash), + input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::AnyHash), output_tokens: Integer, - output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::Util::AnyHash), + output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::AnyHash), total_tokens: Integer ) .returns(T.attached_class) @@ -76,7 +76,7 @@ module OpenAI def to_hash end - class InputTokensDetails < OpenAI::BaseModel + class InputTokensDetails < OpenAI::Internal::Type::BaseModel # The number of tokens that were retrieved from the cache. # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). sig { returns(Integer) } @@ -92,7 +92,7 @@ module OpenAI end end - class OutputTokensDetails < OpenAI::BaseModel + class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # The number of reasoning tokens. sig { returns(Integer) } attr_accessor :reasoning_tokens diff --git a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi index b02a5b1f..160537d1 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseWebSearchCallCompletedEvent < OpenAI::BaseModel + class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # Unique ID for the output item associated with the web search call. sig { returns(String) } attr_accessor :item_id diff --git a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi index 11aa3340..428a1aa1 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseWebSearchCallInProgressEvent < OpenAI::BaseModel + class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # Unique ID for the output item associated with the web search call. sig { returns(String) } attr_accessor :item_id diff --git a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi index c97724d2..6edd9004 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ResponseWebSearchCallSearchingEvent < OpenAI::BaseModel + class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # Unique ID for the output item associated with the web search call. sig { returns(String) } attr_accessor :item_id diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 0f080b1b..edc55ed0 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -7,7 +7,7 @@ module OpenAI # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig do override diff --git a/rbi/lib/openai/models/responses/tool_choice_function.rbi b/rbi/lib/openai/models/responses/tool_choice_function.rbi index 29aa1b6c..d22afe4e 100644 --- a/rbi/lib/openai/models/responses/tool_choice_function.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_function.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ToolChoiceFunction < OpenAI::BaseModel + class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel # The name of the function to call. sig { returns(String) } attr_accessor :name diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index 722831af..9eeb2f91 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -12,7 +12,7 @@ module OpenAI # # `required` means the model must call one or more tools. module ToolChoiceOptions - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 75251556..9969a8c5 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class ToolChoiceTypes < OpenAI::BaseModel + class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # @@ -34,7 +34,7 @@ module OpenAI # - `web_search_preview` # - `computer_use_preview` module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index ba497fb4..5558e18f 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Responses - class WebSearchTool < OpenAI::BaseModel + class WebSearchTool < OpenAI::Internal::Type::BaseModel # The type of the web search tool. One of: # # - `web_search_preview` @@ -24,7 +24,7 @@ module OpenAI sig do params( - user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::Util::AnyHash)) + user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::AnyHash)) ) .void end @@ -37,7 +37,7 @@ module OpenAI params( type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, - user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::Util::AnyHash)) + user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::AnyHash)) ) .returns(T.attached_class) end @@ -62,7 +62,7 @@ module OpenAI # - `web_search_preview` # - `web_search_preview_2025_03_11` module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } OrSymbol = @@ -81,7 +81,7 @@ module OpenAI # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. module SearchContextSize - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } @@ -97,7 +97,7 @@ module OpenAI end end - class UserLocation < OpenAI::BaseModel + class UserLocation < OpenAI::Internal::Type::BaseModel # The type of location approximation. Always `approximate`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 0c8b5a11..841c3432 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module ResponsesModel - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } def self.variants diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/lib/openai/models/static_file_chunking_strategy.rbi index bb5abe4d..8ac37465 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class StaticFileChunkingStrategy < OpenAI::BaseModel + class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index 8525371f..4976ca0e 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -2,11 +2,11 @@ module OpenAI module Models - class StaticFileChunkingStrategyObject < OpenAI::BaseModel + class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } attr_reader :static - sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash)).void } + sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash)).void } attr_writer :static # Always `static`. @@ -14,10 +14,7 @@ module OpenAI attr_accessor :type sig do - params( - static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(static:, type: :static) diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index fe747e98..b3f1b26c 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -2,11 +2,11 @@ module OpenAI module Models - class StaticFileChunkingStrategyObjectParam < OpenAI::BaseModel + class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } attr_reader :static - sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash)).void } + sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash)).void } attr_writer :static # Always `static`. @@ -15,10 +15,7 @@ module OpenAI # Customize your own chunking strategy by setting chunk size and chunk overlap. sig do - params( - static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::Util::AnyHash), - type: Symbol - ) + params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end def self.new(static:, type: :static) diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index feefd876..e3342969 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class Upload < OpenAI::BaseModel + class Upload < OpenAI::Internal::Type::BaseModel # The Upload unique identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -41,7 +41,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FileObject)) } attr_reader :file - sig { params(file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::Util::AnyHash))).void } + sig { params(file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::AnyHash))).void } attr_writer :file # The Upload object can accept byte chunks in the form of Parts. @@ -54,7 +54,7 @@ module OpenAI filename: String, purpose: String, status: OpenAI::Models::Upload::Status::OrSymbol, - file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::Util::AnyHash)), + file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::AnyHash)), object: Symbol ) .returns(T.attached_class) @@ -93,7 +93,7 @@ module OpenAI # The status of the Upload. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Upload::Status::TaggedSymbol) } diff --git a/rbi/lib/openai/models/upload_cancel_params.rbi b/rbi/lib/openai/models/upload_cancel_params.rbi index 706ec1dd..f58f71bc 100644 --- a/rbi/lib/openai/models/upload_cancel_params.rbi +++ b/rbi/lib/openai/models/upload_cancel_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class UploadCancelParams < OpenAI::BaseModel + class UploadCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 9558d974..3478dd28 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class UploadCompleteParams < OpenAI::BaseModel + class UploadCompleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -22,7 +22,7 @@ module OpenAI params( part_ids: T::Array[String], md5: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 50f627d5..aacc2ab0 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class UploadCreateParams < OpenAI::BaseModel + class UploadCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -34,7 +34,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index b840d72b..43f97062 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Uploads - class PartCreateParams < OpenAI::BaseModel + class PartCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -14,7 +14,7 @@ module OpenAI sig do params( data: T.any(IO, StringIO), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/lib/openai/models/uploads/upload_part.rbi index 43223349..5e5b8819 100644 --- a/rbi/lib/openai/models/uploads/upload_part.rbi +++ b/rbi/lib/openai/models/uploads/upload_part.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module Uploads - class UploadPart < OpenAI::BaseModel + class UploadPart < OpenAI::Internal::Type::BaseModel # The upload Part unique identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index e0277e39..0c48914f 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -14,7 +14,7 @@ module OpenAI sig { returns(OpenAI::Models::VectorStore::FileCounts) } attr_reader :file_counts - sig { params(file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::Util::AnyHash)).void } + sig { params(file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::AnyHash)).void } attr_writer :file_counts # The Unix timestamp (in seconds) for when the vector store was last active. @@ -52,10 +52,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::VectorStore::ExpiresAfter)) } attr_reader :expires_after - sig do - params(expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::Util::AnyHash)) - .void - end + sig { params(expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash)).void } attr_writer :expires_after # The Unix timestamp (in seconds) for when the vector store will expire. @@ -68,13 +65,13 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::Util::AnyHash), + file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::AnyHash), last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, status: OpenAI::Models::VectorStore::Status::OrSymbol, usage_bytes: Integer, - expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::Util::AnyHash), + expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash), expires_at: T.nilable(Integer), object: Symbol ) @@ -116,7 +113,7 @@ module OpenAI def to_hash end - class FileCounts < OpenAI::BaseModel + class FileCounts < OpenAI::Internal::Type::BaseModel # The number of files that were cancelled. sig { returns(Integer) } attr_accessor :cancelled @@ -168,7 +165,7 @@ module OpenAI # `completed`. A status of `completed` indicates that the vector store is ready # for use. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStore::Status::TaggedSymbol) } @@ -182,7 +179,7 @@ module OpenAI end end - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 86965352..36bee1e6 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStoreCreateParams < OpenAI::BaseModel + class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -24,7 +24,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -38,7 +38,7 @@ module OpenAI sig do params( - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash) + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::AnyHash) ) .void end @@ -73,14 +73,14 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash), + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -113,7 +113,7 @@ module OpenAI def to_hash end - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } diff --git a/rbi/lib/openai/models/vector_store_delete_params.rbi b/rbi/lib/openai/models/vector_store_delete_params.rbi index 9ea9ee73..9f646bad 100644 --- a/rbi/lib/openai/models/vector_store_delete_params.rbi +++ b/rbi/lib/openai/models/vector_store_delete_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class VectorStoreDeleteParams < OpenAI::BaseModel + class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/vector_store_deleted.rbi b/rbi/lib/openai/models/vector_store_deleted.rbi index bd0e83c0..7143c3a2 100644 --- a/rbi/lib/openai/models/vector_store_deleted.rbi +++ b/rbi/lib/openai/models/vector_store_deleted.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStoreDeleted < OpenAI::BaseModel + class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index ef5af052..bbe5e54f 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStoreListParams < OpenAI::BaseModel + class VectorStoreListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -48,7 +48,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -73,7 +73,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/vector_store_retrieve_params.rbi b/rbi/lib/openai/models/vector_store_retrieve_params.rbi index f73d277f..a8ea5ee3 100644 --- a/rbi/lib/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_store_retrieve_params.rbi @@ -2,13 +2,17 @@ module OpenAI module Models - class VectorStoreRetrieveParams < OpenAI::BaseModel + class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters sig do - params(request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) - .returns(T.attached_class) + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) end def self.new(request_options: {}) end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 15dfc27f..7019fb58 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStoreSearchParams < OpenAI::BaseModel + class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -16,7 +16,7 @@ module OpenAI sig do params( - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter) + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter) ) .void end @@ -36,7 +36,7 @@ module OpenAI sig do params( - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::Util::AnyHash) + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash) ) .void end @@ -52,11 +52,11 @@ module OpenAI sig do params( query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::Util::AnyHash), + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash), rewrite_query: T::Boolean, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -88,25 +88,25 @@ module OpenAI # A query string for a search module Query - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String]]) } def self.variants end - StringArray = T.let(OpenAI::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) end # A filter to apply based on file attributes. module Filters - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } def self.variants end end - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol)) } attr_reader :ranker @@ -140,7 +140,7 @@ module OpenAI end module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 05fd7444..a024e03c 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStoreSearchResponse < OpenAI::BaseModel + class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. Keys are strings with a maximum @@ -30,7 +30,7 @@ module OpenAI sig do params( attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Internal::Util::AnyHash)], + content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Internal::AnyHash)], file_id: String, filename: String, score: Float @@ -56,14 +56,14 @@ module OpenAI end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants end end - class Content < OpenAI::BaseModel + class Content < OpenAI::Internal::Type::BaseModel # The text content returned from search. sig { returns(String) } attr_accessor :text @@ -88,7 +88,7 @@ module OpenAI # The type of content. module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } OrSymbol = diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 1982bdfd..7f3e0224 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -2,7 +2,7 @@ module OpenAI module Models - class VectorStoreUpdateParams < OpenAI::BaseModel + class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -12,7 +12,7 @@ module OpenAI sig do params( - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash)) + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)) ) .void end @@ -33,10 +33,10 @@ module OpenAI sig do params( - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash)), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -57,7 +57,7 @@ module OpenAI def to_hash end - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi index b8985360..3528e39d 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileBatchCancelParams < OpenAI::BaseModel + class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 447f5e35..2d21b5fc 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileBatchCreateParams < OpenAI::BaseModel + class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -39,7 +39,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -53,10 +53,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -81,7 +81,7 @@ module OpenAI end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 07693f31..242a405d 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileBatchListFilesParams < OpenAI::BaseModel + class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -61,7 +61,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -95,7 +95,7 @@ module OpenAI # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } @@ -118,7 +118,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi index 1d9ae1fd..465c7bdb 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileBatchRetrieveParams < OpenAI::BaseModel + class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/lib/openai/models/vector_stores/file_content_params.rbi index 3889fd16..98263089 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileContentParams < OpenAI::BaseModel + class FileContentParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_content_response.rbi b/rbi/lib/openai/models/vector_stores/file_content_response.rbi index 1fad3ed9..9b48e97a 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_response.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileContentResponse < OpenAI::BaseModel + class FileContentResponse < OpenAI::Internal::Type::BaseModel # The text content sig { returns(T.nilable(String)) } attr_reader :text diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 221d7a92..dc5f40b1 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileCreateParams < OpenAI::BaseModel + class FileCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -39,7 +39,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ) ) @@ -53,10 +53,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -81,7 +81,7 @@ module OpenAI end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi index 572a5963..940154a6 100644 --- a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileDeleteParams < OpenAI::BaseModel + class FileDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 9f4a5c71..a13740f0 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileListParams < OpenAI::BaseModel + class FileListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -57,7 +57,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -82,7 +82,7 @@ module OpenAI # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } OrSymbol = @@ -101,7 +101,7 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } OrSymbol = diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi index 48f7edba..553291b7 100644 --- a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileRetrieveParams < OpenAI::BaseModel + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -13,7 +13,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 294449c4..eefd1019 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class FileUpdateParams < OpenAI::BaseModel + class FileUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -22,7 +22,7 @@ module OpenAI params( vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash) + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end @@ -43,7 +43,7 @@ module OpenAI end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 30c4c023..81d9bc52 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class VectorStoreFile < OpenAI::BaseModel + class VectorStoreFile < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -19,9 +19,7 @@ module OpenAI sig do params( - last_error: T.nilable( - T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::Util::AnyHash) - ) + last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::AnyHash)) ) .void end @@ -71,7 +69,7 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::OtherFileChunkingStrategyObject ) ) @@ -84,16 +82,14 @@ module OpenAI params( id: String, created_at: Integer, - last_error: T.nilable( - T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::Util::AnyHash) - ), + last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::AnyHash)), status: OpenAI::Models::VectorStores::VectorStoreFile::Status::OrSymbol, usage_bytes: Integer, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::OtherFileChunkingStrategyObject ), object: Symbol @@ -132,7 +128,7 @@ module OpenAI def to_hash end - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel # One of `server_error` or `rate_limit_exceeded`. sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } attr_accessor :code @@ -161,7 +157,7 @@ module OpenAI # One of `server_error` or `rate_limit_exceeded`. module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } @@ -185,7 +181,7 @@ module OpenAI # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } OrSymbol = @@ -202,7 +198,7 @@ module OpenAI end module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } def self.variants diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index b71f8619..6f9eb44c 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class VectorStoreFileBatch < OpenAI::BaseModel + class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -18,7 +18,7 @@ module OpenAI sig do params( - file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::Util::AnyHash) + file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::AnyHash) ) .void end @@ -45,7 +45,7 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::Util::AnyHash), + file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::AnyHash), status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::OrSymbol, vector_store_id: String, object: Symbol @@ -78,7 +78,7 @@ module OpenAI def to_hash end - class FileCounts < OpenAI::BaseModel + class FileCounts < OpenAI::Internal::Type::BaseModel # The number of files that where cancelled. sig { returns(Integer) } attr_accessor :cancelled @@ -129,7 +129,7 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } OrSymbol = diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi index 6c4d25fa..702d956c 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -3,7 +3,7 @@ module OpenAI module Models module VectorStores - class VectorStoreFileDeleted < OpenAI::BaseModel + class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } attr_accessor :id diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 9c665553..017b80bf 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -6,7 +6,7 @@ module OpenAI # # When making a request, you can pass an actual {RequestOptions} instance, or # simply pass a Hash with symbol keys matching the attributes on this class. - class RequestOptions < OpenAI::BaseModel + class RequestOptions < OpenAI::Internal::Type::BaseModel # @api private sig { params(opts: T.any(T.self_type, T::Hash[Symbol, T.anything])).void } def self.validate!(opts) @@ -41,7 +41,7 @@ module OpenAI attr_accessor :timeout # Returns a new instance of RequestOptions. - sig { params(values: OpenAI::Internal::Util::AnyHash).returns(T.attached_class) } + sig { params(values: OpenAI::Internal::AnyHash).returns(T.attached_class) } def self.new(values = {}) end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index 9b5892ad..43967cf5 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -13,7 +13,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(StringIO) end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 8ec0dc0f..d0f71924 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -16,7 +16,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) end @@ -78,7 +78,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns( OpenAI::Internal::Stream[ diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 2e07bd58..448c96b3 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -12,7 +12,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)) end diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index acaa4e55..f723b248 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -10,7 +10,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Batch) end @@ -48,7 +48,7 @@ module OpenAI sig do params( batch_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Batch) end @@ -64,7 +64,7 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Batch]) end @@ -87,7 +87,7 @@ module OpenAI sig do params( batch_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Batch) end diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 00296adc..bbbe925b 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -17,25 +17,23 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -115,7 +113,7 @@ module OpenAI sig do params( assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -140,25 +138,23 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Assistant) end @@ -243,7 +239,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Assistant]) end @@ -272,7 +268,7 @@ module OpenAI sig do params( assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::AssistantDeleted) end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 64816d6d..4a22f258 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -13,10 +13,10 @@ module OpenAI # Create a thread. sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::Util::AnyHash)], + messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)), + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -44,7 +44,7 @@ module OpenAI sig do params( thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -60,8 +60,8 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::Util::AnyHash)), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Thread) end @@ -88,7 +88,7 @@ module OpenAI sig do params( thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::ThreadDeleted) end @@ -113,28 +113,26 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash), + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -142,10 +140,10 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -259,28 +257,26 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::Util::AnyHash), + thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash), tool_choice: T.nilable( T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::Util::AnyHash) - ), + tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -288,10 +284,10 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns( OpenAI::Internal::Stream[ diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index a412e38b..8585935d 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -14,7 +14,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam ) @@ -22,10 +22,10 @@ module OpenAI ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -60,7 +60,7 @@ module OpenAI params( message_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -80,7 +80,7 @@ module OpenAI message_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -109,7 +109,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Message]) end @@ -144,7 +144,7 @@ module OpenAI params( message_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::MessageDeleted) end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 43be9231..a1f66f91 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -16,7 +16,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)] ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), @@ -29,7 +29,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -39,14 +39,14 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -54,10 +54,10 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -185,7 +185,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)] ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), @@ -198,7 +198,7 @@ module OpenAI T.any( Symbol, OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema ) @@ -208,14 +208,14 @@ module OpenAI T.any( OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ) ), tools: T.nilable( T::Array[ T.any( OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool ) @@ -223,10 +223,10 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::Util::AnyHash) + T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns( OpenAI::Internal::Stream[ @@ -380,7 +380,7 @@ module OpenAI params( run_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -400,7 +400,7 @@ module OpenAI run_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -429,7 +429,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Run]) end @@ -461,7 +461,7 @@ module OpenAI params( run_id: String, thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -482,14 +482,9 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, - OpenAI::Internal::Util::AnyHash - ) - ], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -517,14 +512,9 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, - OpenAI::Internal::Util::AnyHash - ) - ], + tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns( OpenAI::Internal::Stream[ diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index d035232e..81727780 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -13,7 +13,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) end @@ -47,7 +47,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) end diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index ebf756a5..7a170eb6 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -29,7 +29,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -38,14 +38,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -54,12 +54,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -67,20 +67,20 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash), + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -297,7 +297,7 @@ module OpenAI messages: T::Array[ T.any( OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, @@ -306,14 +306,14 @@ module OpenAI ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::Util::AnyHash)), + audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), frequency_penalty: T.nilable(Float), function_call: T.any( OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::Util::AnyHash)], + functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), @@ -322,12 +322,12 @@ module OpenAI modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::Util::AnyHash)), + prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)), presence_penalty: T.nilable(Float), reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), response_format: T.any( OpenAI::Models::ResponseFormatText, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject ), @@ -335,20 +335,20 @@ module OpenAI service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), temperature: T.nilable(Float), tool_choice: T.any( OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::Util::AnyHash + OpenAI::Internal::AnyHash ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::Util::AnyHash)], + tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::Util::AnyHash), + web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end @@ -548,7 +548,7 @@ module OpenAI sig do params( completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -566,7 +566,7 @@ module OpenAI params( completion_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -593,7 +593,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletion]) end @@ -620,7 +620,7 @@ module OpenAI sig do params( completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Chat::ChatCompletionDeleted) end diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 447ff6c0..440c076c 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -13,7 +13,7 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) end diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 804db29b..08f12a97 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -25,13 +25,13 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Completion) end @@ -168,13 +168,13 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::Util::AnyHash)), + stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Completion]) end diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 57d7fdd6..182cc4d0 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -11,7 +11,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::CreateEmbeddingResponse) end diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 18c23250..322f5e32 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -28,7 +28,7 @@ module OpenAI params( file: T.any(IO, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FileObject) end @@ -48,7 +48,7 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FileObject) end @@ -66,7 +66,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FileObject]) end @@ -92,7 +92,7 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FileDeleted) end @@ -107,7 +107,7 @@ module OpenAI sig do params( file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(StringIO) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 81ceb5d7..2370a56a 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -18,16 +18,16 @@ module OpenAI params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), training_file: String, - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::Util::AnyHash), + hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::AnyHash), integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::Util::AnyHash)] + T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::Util::AnyHash), + method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash), seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -100,7 +100,7 @@ module OpenAI sig do params( fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -117,7 +117,7 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) end @@ -137,7 +137,7 @@ module OpenAI sig do params( fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -154,7 +154,7 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index 3274da29..d165ca24 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -11,7 +11,7 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) end diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 58353329..515ec04c 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -12,7 +12,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end @@ -52,7 +52,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end @@ -98,7 +98,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::ImagesResponse) end diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index c78bca31..8a21caa0 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -8,7 +8,7 @@ module OpenAI sig do params( model: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Model) end @@ -22,7 +22,7 @@ module OpenAI # Lists the currently available models, and provides basic information about each # one such as the owner and availability. sig do - params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash))) + params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))) .returns(OpenAI::Internal::Page[OpenAI::Models::Model]) end def list(request_options: {}) @@ -33,7 +33,7 @@ module OpenAI sig do params( model: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::ModelDeleted) end diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 1b180936..8b4b2f36 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -13,13 +13,13 @@ module OpenAI T::Array[ T.any( OpenAI::Models::ModerationImageURLInput, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::ModerationTextInput ) ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::ModerationCreateResponse) end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 585d0875..1999f3c5 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -24,7 +24,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -45,20 +45,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -68,7 +68,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Responses::Response) end @@ -202,7 +202,7 @@ module OpenAI T::Array[ T.any( OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, @@ -223,20 +223,20 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::Util::AnyHash)), + reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::Util::AnyHash), + text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), tool_choice: T.any( OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ T.any( OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool @@ -246,7 +246,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns( OpenAI::Internal::Stream[ @@ -404,7 +404,7 @@ module OpenAI params( response_id: String, include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Responses::Response) end @@ -422,7 +422,7 @@ module OpenAI sig do params( response_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .void end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 92e7016f..5c6dfbeb 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -13,7 +13,7 @@ module OpenAI include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns( OpenAI::Internal::CursorPage[ diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index 88ab62ad..ed222226 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -31,7 +31,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Upload) end @@ -58,7 +58,7 @@ module OpenAI sig do params( upload_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Upload) end @@ -87,7 +87,7 @@ module OpenAI upload_id: String, part_ids: T::Array[String], md5: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Upload) end diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 6d53630a..7aec6363 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -19,7 +19,7 @@ module OpenAI params( upload_id: String, data: T.any(IO, StringIO), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Uploads::UploadPart) end diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index 74b9b2a3..e64c7552 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -14,14 +14,14 @@ module OpenAI params( chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash), + expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::AnyHash), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -52,7 +52,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -67,10 +67,10 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::Util::AnyHash)), + expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStore) end @@ -99,7 +99,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStore]) end @@ -128,7 +128,7 @@ module OpenAI sig do params( vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStoreDeleted) end @@ -145,11 +145,11 @@ module OpenAI params( vector_store_id: String, query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::Util::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::Util::AnyHash), + ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash), rewrite_query: T::Boolean, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse]) end diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index 40c66e29..5bb347bf 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -12,10 +12,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -44,7 +44,7 @@ module OpenAI params( batch_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -63,7 +63,7 @@ module OpenAI params( batch_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -86,7 +86,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index fe472d5a..145d752b 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -14,10 +14,10 @@ module OpenAI attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), chunking_strategy: T.any( OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::Util::AnyHash, + OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -46,7 +46,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -65,7 +65,7 @@ module OpenAI file_id: String, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -93,7 +93,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end @@ -130,7 +130,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::VectorStores::VectorStoreFileDeleted) end @@ -148,7 +148,7 @@ module OpenAI params( file_id: String, vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::Util::AnyHash)) + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStores::FileContentResponse]) end diff --git a/sig/openai/aliases.rbs b/sig/openai/aliases.rbs deleted file mode 100644 index b99ae58a..00000000 --- a/sig/openai/aliases.rbs +++ /dev/null @@ -1,19 +0,0 @@ -module OpenAI - class Unknown = OpenAI::Internal::Type::Unknown - - class BooleanModel = OpenAI::Internal::Type::BooleanModel - - module Enum = OpenAI::Internal::Type::Enum - - module Union = OpenAI::Internal::Type::Union - - class ArrayOf = OpenAI::Internal::Type::ArrayOf - - class HashOf = OpenAI::Internal::Type::HashOf - - class BaseModel = OpenAI::Internal::Type::BaseModel - - type request_parameters = OpenAI::Internal::Type::request_parameters - - module RequestParameters = OpenAI::Internal::Type::RequestParameters -end diff --git a/sig/openai/errors.rbs b/sig/openai/errors.rbs index 8dc6afce..19583ccb 100644 --- a/sig/openai/errors.rbs +++ b/sig/openai/errors.rbs @@ -104,32 +104,4 @@ module OpenAI HTTP_STATUS: Range[Integer] end end - - class Error = OpenAI::Errors::Error - - class ConversionError = OpenAI::Errors::ConversionError - - class APIError = OpenAI::Errors::APIError - - class APIStatusError = OpenAI::Errors::APIStatusError - - class APIConnectionError = OpenAI::Errors::APIConnectionError - - class APITimeoutError = OpenAI::Errors::APITimeoutError - - class BadRequestError = OpenAI::Errors::BadRequestError - - class AuthenticationError = OpenAI::Errors::AuthenticationError - - class PermissionDeniedError = OpenAI::Errors::PermissionDeniedError - - class NotFoundError = OpenAI::Errors::NotFoundError - - class ConflictError = OpenAI::Errors::ConflictError - - class UnprocessableEntityError = OpenAI::Errors::UnprocessableEntityError - - class RateLimitError = OpenAI::Errors::RateLimitError - - class InternalServerError = OpenAI::Errors::InternalServerError end diff --git a/sig/openai/internal.rbs b/sig/openai/internal.rbs new file mode 100644 index 00000000..ff72a6b0 --- /dev/null +++ b/sig/openai/internal.rbs @@ -0,0 +1,4 @@ +module OpenAI + module Internal + end +end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index fd068625..69aca270 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -7,10 +7,10 @@ module OpenAI type known_field = { mode: (:coerce | :dump)?, required: bool, nilable: bool } - def self.known_fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field + def self.known_fields: -> ::Hash[Symbol, (OpenAI::Internal::Type::BaseModel::known_field & { type_fn: (^-> OpenAI::Internal::Type::Converter::input) })] - def self.fields: -> ::Hash[Symbol, (OpenAI::BaseModel::known_field + def self.fields: -> ::Hash[Symbol, (OpenAI::Internal::Type::BaseModel::known_field & { type: OpenAI::Internal::Type::Converter::input })] private def self.add_field: ( @@ -52,7 +52,7 @@ module OpenAI def ==: (top other) -> bool def self.coerce: ( - OpenAI::BaseModel | ::Hash[top, top] | top value, + OpenAI::Internal::Type::BaseModel | ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::state ) -> (instance | top) diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 21bf07da..07e02294 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -19,8 +19,6 @@ module OpenAI def self?.coerce_hash: (top input) -> (::Hash[top, top] | top) - OMIT: top - def self?.deep_merge_lr: (top lhs, top rhs, ?concat: bool) -> top def self?.deep_merge: ( diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 77c8f9b8..836c7e7b 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -9,7 +9,7 @@ module OpenAI | :"computer-use-preview-2025-03-11" module AllModels - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" | :"o1-pro-2025-03-19" diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 1b50fd38..97aff0df 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -12,7 +12,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class SpeechCreateParams < OpenAI::BaseModel + class SpeechCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -51,7 +51,7 @@ module OpenAI type model = String | OpenAI::Models::Audio::speech_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::Audio::speech_model] end @@ -71,7 +71,7 @@ module OpenAI | :verse module Voice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, (:alloy | :ash @@ -101,7 +101,7 @@ module OpenAI type response_format = :mp3 | :opus | :aac | :flac | :wav | :pcm module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MP3: :mp3 OPUS: :opus diff --git a/sig/openai/models/audio/speech_model.rbs b/sig/openai/models/audio/speech_model.rbs index 8fc430d6..1dfa36a8 100644 --- a/sig/openai/models/audio/speech_model.rbs +++ b/sig/openai/models/audio/speech_model.rbs @@ -4,7 +4,7 @@ module OpenAI type speech_model = :"tts-1" | :"tts-1-hd" | :"gpt-4o-mini-tts" module SpeechModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TTS_1: :"tts-1" TTS_1_HD: :"tts-1-hd" diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 3f9bf1d4..12d41fc2 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -7,7 +7,7 @@ module OpenAI logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob] } - class Transcription < OpenAI::BaseModel + class Transcription < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_reader logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob]? @@ -25,7 +25,7 @@ module OpenAI type logprob = { token: String, bytes: ::Array[Float], logprob: Float } - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel attr_reader token: String? def token=: (String) -> String diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index b46cde10..f0ae2daa 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -14,7 +14,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class TranscriptionCreateParams < OpenAI::BaseModel + class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -69,7 +69,7 @@ module OpenAI type model = String | OpenAI::Models::audio_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::audio_model] end @@ -77,7 +77,7 @@ module OpenAI type timestamp_granularity = :word | :segment module TimestampGranularity - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WORD: :word SEGMENT: :segment diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index 49eb0f09..f4931f11 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -6,7 +6,7 @@ module OpenAI | OpenAI::Models::Audio::TranscriptionVerbose module TranscriptionCreateResponse - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] end diff --git a/sig/openai/models/audio/transcription_include.rbs b/sig/openai/models/audio/transcription_include.rbs index 349684b6..de5322fa 100644 --- a/sig/openai/models/audio/transcription_include.rbs +++ b/sig/openai/models/audio/transcription_include.rbs @@ -4,7 +4,7 @@ module OpenAI type transcription_include = :logprobs module TranscriptionInclude - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOGPROBS: :logprobs diff --git a/sig/openai/models/audio/transcription_segment.rbs b/sig/openai/models/audio/transcription_segment.rbs index 85af1669..b0748151 100644 --- a/sig/openai/models/audio/transcription_segment.rbs +++ b/sig/openai/models/audio/transcription_segment.rbs @@ -15,7 +15,7 @@ module OpenAI tokens: ::Array[Integer] } - class TranscriptionSegment < OpenAI::BaseModel + class TranscriptionSegment < OpenAI::Internal::Type::BaseModel attr_accessor id: Integer attr_accessor avg_logprob: Float diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs index 8441d201..158d4540 100644 --- a/sig/openai/models/audio/transcription_stream_event.rbs +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -6,7 +6,7 @@ module OpenAI | OpenAI::Models::Audio::TranscriptionTextDoneEvent module TranscriptionStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] end diff --git a/sig/openai/models/audio/transcription_text_delta_event.rbs b/sig/openai/models/audio/transcription_text_delta_event.rbs index 373c6aed..c34d5fcd 100644 --- a/sig/openai/models/audio/transcription_text_delta_event.rbs +++ b/sig/openai/models/audio/transcription_text_delta_event.rbs @@ -8,7 +8,7 @@ module OpenAI logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } - class TranscriptionTextDeltaEvent < OpenAI::BaseModel + class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String attr_accessor type: :"transcript.text.delta" @@ -29,7 +29,7 @@ module OpenAI type logprob = { token: String, bytes: ::Array[top], logprob: Float } - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel attr_reader token: String? def token=: (String) -> String diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs index f1f1dd1d..f6216cee 100644 --- a/sig/openai/models/audio/transcription_text_done_event.rbs +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -8,7 +8,7 @@ module OpenAI logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } - class TranscriptionTextDoneEvent < OpenAI::BaseModel + class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :"transcript.text.done" @@ -29,7 +29,7 @@ module OpenAI type logprob = { token: String, bytes: ::Array[top], logprob: Float } - class Logprob < OpenAI::BaseModel + class Logprob < OpenAI::Internal::Type::BaseModel attr_reader token: String? def token=: (String) -> String diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 9546e704..34e8fe7a 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -10,7 +10,7 @@ module OpenAI words: ::Array[OpenAI::Models::Audio::TranscriptionWord] } - class TranscriptionVerbose < OpenAI::BaseModel + class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel attr_accessor duration: Float attr_accessor language: String diff --git a/sig/openai/models/audio/transcription_word.rbs b/sig/openai/models/audio/transcription_word.rbs index 5b9eee66..e9332bbd 100644 --- a/sig/openai/models/audio/transcription_word.rbs +++ b/sig/openai/models/audio/transcription_word.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type transcription_word = { end_: Float, start: Float, word: String } - class TranscriptionWord < OpenAI::BaseModel + class TranscriptionWord < OpenAI::Internal::Type::BaseModel attr_accessor end_: Float attr_accessor start: Float diff --git a/sig/openai/models/audio/translation.rbs b/sig/openai/models/audio/translation.rbs index 4a88fe3f..43eeef91 100644 --- a/sig/openai/models/audio/translation.rbs +++ b/sig/openai/models/audio/translation.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type translation = { text: String } - class Translation < OpenAI::BaseModel + class Translation < OpenAI::Internal::Type::BaseModel attr_accessor text: String def initialize: (text: String) -> void diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 800cd86d..588ac2cf 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class TranslationCreateParams < OpenAI::BaseModel + class TranslationCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -47,7 +47,7 @@ module OpenAI type model = String | OpenAI::Models::audio_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::audio_model] end @@ -55,7 +55,7 @@ module OpenAI type response_format = :json | :text | :srt | :verbose_json | :vtt module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum JSON: :json TEXT: :text diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index 7b88f273..785dfce2 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -6,7 +6,7 @@ module OpenAI | OpenAI::Models::Audio::TranslationVerbose module TranslationCreateResponse - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] end diff --git a/sig/openai/models/audio/translation_verbose.rbs b/sig/openai/models/audio/translation_verbose.rbs index 0a805f53..3f69b2ca 100644 --- a/sig/openai/models/audio/translation_verbose.rbs +++ b/sig/openai/models/audio/translation_verbose.rbs @@ -9,7 +9,7 @@ module OpenAI segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] } - class TranslationVerbose < OpenAI::BaseModel + class TranslationVerbose < OpenAI::Internal::Type::BaseModel attr_accessor duration: Float attr_accessor language: String diff --git a/sig/openai/models/audio_model.rbs b/sig/openai/models/audio_model.rbs index c5a9d284..4a294e19 100644 --- a/sig/openai/models/audio_model.rbs +++ b/sig/openai/models/audio_model.rbs @@ -4,7 +4,7 @@ module OpenAI :"whisper-1" | :"gpt-4o-transcribe" | :"gpt-4o-mini-transcribe" module AudioModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WHISPER_1: :"whisper-1" GPT_4O_TRANSCRIBE: :"gpt-4o-transcribe" diff --git a/sig/openai/models/audio_response_format.rbs b/sig/openai/models/audio_response_format.rbs index 7c91cd99..39091918 100644 --- a/sig/openai/models/audio_response_format.rbs +++ b/sig/openai/models/audio_response_format.rbs @@ -3,7 +3,7 @@ module OpenAI type audio_response_format = :json | :text | :srt | :verbose_json | :vtt module AudioResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum JSON: :json TEXT: :text diff --git a/sig/openai/models/auto_file_chunking_strategy_param.rbs b/sig/openai/models/auto_file_chunking_strategy_param.rbs index 49630c41..a5285e7b 100644 --- a/sig/openai/models/auto_file_chunking_strategy_param.rbs +++ b/sig/openai/models/auto_file_chunking_strategy_param.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type auto_file_chunking_strategy_param = { type: :auto } - class AutoFileChunkingStrategyParam < OpenAI::BaseModel + class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel attr_accessor type: :auto def initialize: (?type: :auto) -> void diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 46840a64..75a90985 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -24,7 +24,7 @@ module OpenAI request_counts: OpenAI::Models::BatchRequestCounts } - class Batch < OpenAI::BaseModel + class Batch < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor completion_window: String @@ -129,7 +129,7 @@ module OpenAI | :cancelled module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum VALIDATING: :validating FAILED: :failed @@ -146,7 +146,7 @@ module OpenAI type errors = { data: ::Array[OpenAI::Models::BatchError], object: String } - class Errors < OpenAI::BaseModel + class Errors < OpenAI::Internal::Type::BaseModel attr_reader data: ::Array[OpenAI::Models::BatchError]? def data=: ( diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index fdb4fc66..c8b34f9f 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type batch_cancel_params = { } & OpenAI::Internal::Type::request_parameters - class BatchCancelParams < OpenAI::BaseModel + class BatchCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 19fe80ff..cacafcf2 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class BatchCreateParams < OpenAI::BaseModel + class BatchCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -34,7 +34,7 @@ module OpenAI type completion_window = :"24h" module CompletionWindow - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum COMPLETION_WINDOW_24H: :"24h" @@ -48,7 +48,7 @@ module OpenAI | :"/v1/completions" module Endpoint - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum V1_RESPONSES: :"/v1/responses" V1_CHAT_COMPLETIONS: :"/v1/chat/completions" diff --git a/sig/openai/models/batch_error.rbs b/sig/openai/models/batch_error.rbs index b2aadd8e..75e828eb 100644 --- a/sig/openai/models/batch_error.rbs +++ b/sig/openai/models/batch_error.rbs @@ -3,7 +3,7 @@ module OpenAI type batch_error = { code: String, line: Integer?, message: String, param: String? } - class BatchError < OpenAI::BaseModel + class BatchError < OpenAI::Internal::Type::BaseModel attr_reader code: String? def code=: (String) -> String diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index 3ca61260..9cb84ad1 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -4,7 +4,7 @@ module OpenAI { after: String, limit: Integer } & OpenAI::Internal::Type::request_parameters - class BatchListParams < OpenAI::BaseModel + class BatchListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/batch_request_counts.rbs b/sig/openai/models/batch_request_counts.rbs index 0020eb38..56860e0e 100644 --- a/sig/openai/models/batch_request_counts.rbs +++ b/sig/openai/models/batch_request_counts.rbs @@ -3,7 +3,7 @@ module OpenAI type batch_request_counts = { completed: Integer, failed: Integer, total: Integer } - class BatchRequestCounts < OpenAI::BaseModel + class BatchRequestCounts < OpenAI::Internal::Type::BaseModel attr_accessor completed: Integer attr_accessor failed: Integer diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index 4b17656d..681750c7 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type batch_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class BatchRetrieveParams < OpenAI::BaseModel + class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/assistant.rbs b/sig/openai/models/beta/assistant.rbs index 91f1b5ee..557bcacf 100644 --- a/sig/openai/models/beta/assistant.rbs +++ b/sig/openai/models/beta/assistant.rbs @@ -18,7 +18,7 @@ module OpenAI top_p: Float? } - class Assistant < OpenAI::BaseModel + class Assistant < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -69,7 +69,7 @@ module OpenAI file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -91,7 +91,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -103,7 +103,7 @@ module OpenAI type file_search = { vector_store_ids: ::Array[String] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 10663a9c..83167a35 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -17,7 +17,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class AssistantCreateParams < OpenAI::BaseModel + class AssistantCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -67,7 +67,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::chat_model] end @@ -78,7 +78,7 @@ module OpenAI file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -100,7 +100,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -116,7 +116,7 @@ module OpenAI vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] @@ -141,7 +141,7 @@ module OpenAI metadata: OpenAI::Models::metadata? } - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel attr_reader chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( @@ -167,11 +167,11 @@ module OpenAI | OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type auto = { type: :auto } - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel attr_accessor type: :auto def initialize: (?type: :auto) -> void @@ -185,7 +185,7 @@ module OpenAI type: :static } - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel attr_accessor static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static attr_accessor type: :static @@ -203,7 +203,7 @@ module OpenAI max_chunk_size_tokens: Integer } - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel attr_accessor chunk_overlap_tokens: Integer attr_accessor max_chunk_size_tokens: Integer diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index 297d2fdb..d6a42d1d 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type assistant_delete_params = { } & OpenAI::Internal::Type::request_parameters - class AssistantDeleteParams < OpenAI::BaseModel + class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/assistant_deleted.rbs b/sig/openai/models/beta/assistant_deleted.rbs index 39b34057..8c913e60 100644 --- a/sig/openai/models/beta/assistant_deleted.rbs +++ b/sig/openai/models/beta/assistant_deleted.rbs @@ -4,7 +4,7 @@ module OpenAI type assistant_deleted = { id: String, deleted: bool, object: :"assistant.deleted" } - class AssistantDeleted < OpenAI::BaseModel + class AssistantDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index c3040fc8..c0481b7d 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -10,7 +10,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class AssistantListParams < OpenAI::BaseModel + class AssistantListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -45,7 +45,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index 3b848b00..e5da5be7 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -8,7 +8,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONSchema module AssistantResponseFormatOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] end diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index d39312a3..53274daa 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type assistant_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class AssistantRetrieveParams < OpenAI::BaseModel + class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index ae88c40e..915e637d 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -28,7 +28,7 @@ module OpenAI | OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent module AssistantStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type thread_created = { @@ -37,7 +37,7 @@ module OpenAI enabled: bool } - class ThreadCreated < OpenAI::BaseModel + class ThreadCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Thread attr_accessor event: :"thread.created" @@ -61,7 +61,7 @@ module OpenAI event: :"thread.run.created" } - class ThreadRunCreated < OpenAI::BaseModel + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.created" @@ -80,7 +80,7 @@ module OpenAI event: :"thread.run.queued" } - class ThreadRunQueued < OpenAI::BaseModel + class ThreadRunQueued < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.queued" @@ -99,7 +99,7 @@ module OpenAI event: :"thread.run.in_progress" } - class ThreadRunInProgress < OpenAI::BaseModel + class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.in_progress" @@ -118,7 +118,7 @@ module OpenAI event: :"thread.run.requires_action" } - class ThreadRunRequiresAction < OpenAI::BaseModel + class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.requires_action" @@ -137,7 +137,7 @@ module OpenAI event: :"thread.run.completed" } - class ThreadRunCompleted < OpenAI::BaseModel + class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.completed" @@ -156,7 +156,7 @@ module OpenAI event: :"thread.run.incomplete" } - class ThreadRunIncomplete < OpenAI::BaseModel + class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.incomplete" @@ -175,7 +175,7 @@ module OpenAI event: :"thread.run.failed" } - class ThreadRunFailed < OpenAI::BaseModel + class ThreadRunFailed < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.failed" @@ -194,7 +194,7 @@ module OpenAI event: :"thread.run.cancelling" } - class ThreadRunCancelling < OpenAI::BaseModel + class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.cancelling" @@ -213,7 +213,7 @@ module OpenAI event: :"thread.run.cancelled" } - class ThreadRunCancelled < OpenAI::BaseModel + class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.cancelled" @@ -232,7 +232,7 @@ module OpenAI event: :"thread.run.expired" } - class ThreadRunExpired < OpenAI::BaseModel + class ThreadRunExpired < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.expired" @@ -251,7 +251,7 @@ module OpenAI event: :"thread.run.step.created" } - class ThreadRunStepCreated < OpenAI::BaseModel + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.created" @@ -270,7 +270,7 @@ module OpenAI event: :"thread.run.step.in_progress" } - class ThreadRunStepInProgress < OpenAI::BaseModel + class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.in_progress" @@ -289,7 +289,7 @@ module OpenAI event: :"thread.run.step.delta" } - class ThreadRunStepDelta < OpenAI::BaseModel + class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent attr_accessor event: :"thread.run.step.delta" @@ -308,7 +308,7 @@ module OpenAI event: :"thread.run.step.completed" } - class ThreadRunStepCompleted < OpenAI::BaseModel + class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.completed" @@ -327,7 +327,7 @@ module OpenAI event: :"thread.run.step.failed" } - class ThreadRunStepFailed < OpenAI::BaseModel + class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.failed" @@ -346,7 +346,7 @@ module OpenAI event: :"thread.run.step.cancelled" } - class ThreadRunStepCancelled < OpenAI::BaseModel + class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.cancelled" @@ -365,7 +365,7 @@ module OpenAI event: :"thread.run.step.expired" } - class ThreadRunStepExpired < OpenAI::BaseModel + class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.expired" @@ -384,7 +384,7 @@ module OpenAI event: :"thread.message.created" } - class ThreadMessageCreated < OpenAI::BaseModel + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.created" @@ -403,7 +403,7 @@ module OpenAI event: :"thread.message.in_progress" } - class ThreadMessageInProgress < OpenAI::BaseModel + class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.in_progress" @@ -422,7 +422,7 @@ module OpenAI event: :"thread.message.delta" } - class ThreadMessageDelta < OpenAI::BaseModel + class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::MessageDeltaEvent attr_accessor event: :"thread.message.delta" @@ -441,7 +441,7 @@ module OpenAI event: :"thread.message.completed" } - class ThreadMessageCompleted < OpenAI::BaseModel + class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.completed" @@ -460,7 +460,7 @@ module OpenAI event: :"thread.message.incomplete" } - class ThreadMessageIncomplete < OpenAI::BaseModel + class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.incomplete" @@ -475,7 +475,7 @@ module OpenAI type error_event = { data: OpenAI::Models::ErrorObject, event: :error } - class ErrorEvent < OpenAI::BaseModel + class ErrorEvent < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::ErrorObject attr_accessor event: :error diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index d3a43fed..abdfdcff 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Beta::FunctionTool module AssistantTool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index 69e6e1b5..92b87de5 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -7,7 +7,7 @@ module OpenAI function: OpenAI::Models::Beta::AssistantToolChoiceFunction } - class AssistantToolChoice < OpenAI::BaseModel + class AssistantToolChoice < OpenAI::Internal::Type::BaseModel attr_accessor type: OpenAI::Models::Beta::AssistantToolChoice::type_ attr_reader function: OpenAI::Models::Beta::AssistantToolChoiceFunction? @@ -26,7 +26,7 @@ module OpenAI type type_ = :function | :code_interpreter | :file_search module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FUNCTION: :function CODE_INTERPRETER: :code_interpreter diff --git a/sig/openai/models/beta/assistant_tool_choice_function.rbs b/sig/openai/models/beta/assistant_tool_choice_function.rbs index ab112b3d..bd41ba31 100644 --- a/sig/openai/models/beta/assistant_tool_choice_function.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_function.rbs @@ -3,7 +3,7 @@ module OpenAI module Beta type assistant_tool_choice_function = { name: String } - class AssistantToolChoiceFunction < OpenAI::BaseModel + class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel attr_accessor name: String def initialize: (name: String) -> void diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index 4b1d9ff3..54a6f6c5 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -6,12 +6,12 @@ module OpenAI | OpenAI::Models::Beta::AssistantToolChoice module AssistantToolChoiceOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type auto = :none | :auto | :required module Auto - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE: :none AUTO: :auto diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index f84dde39..de95efb8 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -17,7 +17,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class AssistantUpdateParams < OpenAI::BaseModel + class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -102,7 +102,7 @@ module OpenAI | :"gpt-3.5-turbo-16k-0613" module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, (:"o3-mini" | :"o3-mini-2025-01-31" @@ -173,7 +173,7 @@ module OpenAI file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -195,7 +195,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -207,7 +207,7 @@ module OpenAI type file_search = { vector_store_ids: ::Array[String] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] diff --git a/sig/openai/models/beta/code_interpreter_tool.rbs b/sig/openai/models/beta/code_interpreter_tool.rbs index 3855b043..f986bb74 100644 --- a/sig/openai/models/beta/code_interpreter_tool.rbs +++ b/sig/openai/models/beta/code_interpreter_tool.rbs @@ -3,7 +3,7 @@ module OpenAI module Beta type code_interpreter_tool = { type: :code_interpreter } - class CodeInterpreterTool < OpenAI::BaseModel + class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel attr_accessor type: :code_interpreter def initialize: (?type: :code_interpreter) -> void diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index c18305c9..f3bec757 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -7,7 +7,7 @@ module OpenAI file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch } - class FileSearchTool < OpenAI::BaseModel + class FileSearchTool < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search attr_reader file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch? @@ -29,7 +29,7 @@ module OpenAI ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader max_num_results: Integer? def max_num_results=: (Integer) -> Integer @@ -53,7 +53,7 @@ module OpenAI ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker } - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel attr_accessor score_threshold: Float attr_reader ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? @@ -72,7 +72,7 @@ module OpenAI type ranker = :auto | :default_2024_08_21 module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 diff --git a/sig/openai/models/beta/function_tool.rbs b/sig/openai/models/beta/function_tool.rbs index 72724066..cdb1900f 100644 --- a/sig/openai/models/beta/function_tool.rbs +++ b/sig/openai/models/beta/function_tool.rbs @@ -4,7 +4,7 @@ module OpenAI type function_tool = { function: OpenAI::Models::FunctionDefinition, type: :function } - class FunctionTool < OpenAI::BaseModel + class FunctionTool < OpenAI::Internal::Type::BaseModel attr_accessor function: OpenAI::Models::FunctionDefinition attr_accessor type: :function diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index fb0a513a..80b9ef55 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete module MessageStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type thread_message_created = { @@ -17,7 +17,7 @@ module OpenAI event: :"thread.message.created" } - class ThreadMessageCreated < OpenAI::BaseModel + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.created" @@ -36,7 +36,7 @@ module OpenAI event: :"thread.message.in_progress" } - class ThreadMessageInProgress < OpenAI::BaseModel + class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.in_progress" @@ -55,7 +55,7 @@ module OpenAI event: :"thread.message.delta" } - class ThreadMessageDelta < OpenAI::BaseModel + class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::MessageDeltaEvent attr_accessor event: :"thread.message.delta" @@ -74,7 +74,7 @@ module OpenAI event: :"thread.message.completed" } - class ThreadMessageCompleted < OpenAI::BaseModel + class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.completed" @@ -93,7 +93,7 @@ module OpenAI event: :"thread.message.incomplete" } - class ThreadMessageIncomplete < OpenAI::BaseModel + class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Message attr_accessor event: :"thread.message.incomplete" diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 027b49a0..6cd64a35 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -11,7 +11,7 @@ module OpenAI | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired module RunStepStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type thread_run_step_created = { @@ -19,7 +19,7 @@ module OpenAI event: :"thread.run.step.created" } - class ThreadRunStepCreated < OpenAI::BaseModel + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.created" @@ -38,7 +38,7 @@ module OpenAI event: :"thread.run.step.in_progress" } - class ThreadRunStepInProgress < OpenAI::BaseModel + class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.in_progress" @@ -57,7 +57,7 @@ module OpenAI event: :"thread.run.step.delta" } - class ThreadRunStepDelta < OpenAI::BaseModel + class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent attr_accessor event: :"thread.run.step.delta" @@ -76,7 +76,7 @@ module OpenAI event: :"thread.run.step.completed" } - class ThreadRunStepCompleted < OpenAI::BaseModel + class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.completed" @@ -95,7 +95,7 @@ module OpenAI event: :"thread.run.step.failed" } - class ThreadRunStepFailed < OpenAI::BaseModel + class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.failed" @@ -114,7 +114,7 @@ module OpenAI event: :"thread.run.step.cancelled" } - class ThreadRunStepCancelled < OpenAI::BaseModel + class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.cancelled" @@ -133,7 +133,7 @@ module OpenAI event: :"thread.run.step.expired" } - class ThreadRunStepExpired < OpenAI::BaseModel + class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.expired" diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 87e168ab..3e2604b4 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -14,7 +14,7 @@ module OpenAI | OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired module RunStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type thread_run_created = { @@ -22,7 +22,7 @@ module OpenAI event: :"thread.run.created" } - class ThreadRunCreated < OpenAI::BaseModel + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.created" @@ -41,7 +41,7 @@ module OpenAI event: :"thread.run.queued" } - class ThreadRunQueued < OpenAI::BaseModel + class ThreadRunQueued < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.queued" @@ -60,7 +60,7 @@ module OpenAI event: :"thread.run.in_progress" } - class ThreadRunInProgress < OpenAI::BaseModel + class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.in_progress" @@ -79,7 +79,7 @@ module OpenAI event: :"thread.run.requires_action" } - class ThreadRunRequiresAction < OpenAI::BaseModel + class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.requires_action" @@ -98,7 +98,7 @@ module OpenAI event: :"thread.run.completed" } - class ThreadRunCompleted < OpenAI::BaseModel + class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.completed" @@ -117,7 +117,7 @@ module OpenAI event: :"thread.run.incomplete" } - class ThreadRunIncomplete < OpenAI::BaseModel + class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.incomplete" @@ -136,7 +136,7 @@ module OpenAI event: :"thread.run.failed" } - class ThreadRunFailed < OpenAI::BaseModel + class ThreadRunFailed < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.failed" @@ -155,7 +155,7 @@ module OpenAI event: :"thread.run.cancelling" } - class ThreadRunCancelling < OpenAI::BaseModel + class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.cancelling" @@ -174,7 +174,7 @@ module OpenAI event: :"thread.run.cancelled" } - class ThreadRunCancelled < OpenAI::BaseModel + class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.cancelled" @@ -193,7 +193,7 @@ module OpenAI event: :"thread.run.expired" } - class ThreadRunExpired < OpenAI::BaseModel + class ThreadRunExpired < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Threads::Run attr_accessor event: :"thread.run.expired" diff --git a/sig/openai/models/beta/thread.rbs b/sig/openai/models/beta/thread.rbs index 1fdf76c8..e0418d02 100644 --- a/sig/openai/models/beta/thread.rbs +++ b/sig/openai/models/beta/thread.rbs @@ -10,7 +10,7 @@ module OpenAI tool_resources: OpenAI::Models::Beta::Thread::ToolResources? } - class Thread < OpenAI::BaseModel + class Thread < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -37,7 +37,7 @@ module OpenAI file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -59,7 +59,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -71,7 +71,7 @@ module OpenAI type file_search = { vector_store_ids: ::Array[String] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 4ca7db99..aeb818ab 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -21,7 +21,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ThreadCreateAndRunParams < OpenAI::BaseModel + class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -85,7 +85,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::chat_model] end @@ -97,7 +97,7 @@ module OpenAI tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? } - class Thread < OpenAI::BaseModel + class Thread < OpenAI::Internal::Type::BaseModel attr_reader messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message]? def messages=: ( @@ -124,7 +124,7 @@ module OpenAI metadata: OpenAI::Models::metadata? } - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content attr_accessor role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role @@ -147,7 +147,7 @@ module OpenAI | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] @@ -160,7 +160,7 @@ module OpenAI type role = :user | :assistant module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant @@ -174,7 +174,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] } - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String @@ -197,11 +197,11 @@ module OpenAI | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file_search = { type: :file_search } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search def initialize: (?type: :file_search) -> void @@ -220,7 +220,7 @@ module OpenAI file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -242,7 +242,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -258,7 +258,7 @@ module OpenAI vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] @@ -283,7 +283,7 @@ module OpenAI metadata: OpenAI::Models::metadata? } - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( @@ -309,11 +309,11 @@ module OpenAI | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type auto = { type: :auto } - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel attr_accessor type: :auto def initialize: (?type: :auto) -> void @@ -327,7 +327,7 @@ module OpenAI type: :static } - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel attr_accessor static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static attr_accessor type: :static @@ -345,7 +345,7 @@ module OpenAI max_chunk_size_tokens: Integer } - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel attr_accessor chunk_overlap_tokens: Integer attr_accessor max_chunk_size_tokens: Integer @@ -372,7 +372,7 @@ module OpenAI file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -394,7 +394,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -406,7 +406,7 @@ module OpenAI type file_search = { vector_store_ids: ::Array[String] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] @@ -423,7 +423,7 @@ module OpenAI | OpenAI::Models::Beta::FunctionTool module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] end @@ -434,7 +434,7 @@ module OpenAI last_messages: Integer? } - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel attr_accessor type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ attr_accessor last_messages: Integer? @@ -449,7 +449,7 @@ module OpenAI type type_ = :auto | :last_messages module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 6caf45d4..96d0d366 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ThreadCreateParams < OpenAI::BaseModel + class ThreadCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -40,7 +40,7 @@ module OpenAI metadata: OpenAI::Models::metadata? } - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Beta::ThreadCreateParams::Message::content attr_accessor role: OpenAI::Models::Beta::ThreadCreateParams::Message::role @@ -63,7 +63,7 @@ module OpenAI | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] @@ -76,7 +76,7 @@ module OpenAI type role = :user | :assistant module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant @@ -90,7 +90,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] } - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String @@ -113,11 +113,11 @@ module OpenAI | OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file_search = { type: :file_search } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search def initialize: (?type: :file_search) -> void @@ -136,7 +136,7 @@ module OpenAI file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -158,7 +158,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -174,7 +174,7 @@ module OpenAI vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] @@ -199,7 +199,7 @@ module OpenAI metadata: OpenAI::Models::metadata? } - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( @@ -225,11 +225,11 @@ module OpenAI | OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static module ChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type auto = { type: :auto } - class Auto < OpenAI::BaseModel + class Auto < OpenAI::Internal::Type::BaseModel attr_accessor type: :auto def initialize: (?type: :auto) -> void @@ -243,7 +243,7 @@ module OpenAI type: :static } - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel attr_accessor static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static attr_accessor type: :static @@ -261,7 +261,7 @@ module OpenAI max_chunk_size_tokens: Integer } - class Static < OpenAI::BaseModel + class Static < OpenAI::Internal::Type::BaseModel attr_accessor chunk_overlap_tokens: Integer attr_accessor max_chunk_size_tokens: Integer diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index 27afc166..485a121a 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type thread_delete_params = { } & OpenAI::Internal::Type::request_parameters - class ThreadDeleteParams < OpenAI::BaseModel + class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/thread_deleted.rbs b/sig/openai/models/beta/thread_deleted.rbs index c3b1ff2b..1734e860 100644 --- a/sig/openai/models/beta/thread_deleted.rbs +++ b/sig/openai/models/beta/thread_deleted.rbs @@ -4,7 +4,7 @@ module OpenAI type thread_deleted = { id: String, deleted: bool, object: :"thread.deleted" } - class ThreadDeleted < OpenAI::BaseModel + class ThreadDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index 4db38c39..90d1a84a 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type thread_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class ThreadRetrieveParams < OpenAI::BaseModel + class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/thread_stream_event.rbs b/sig/openai/models/beta/thread_stream_event.rbs index 672ef1d6..74c61286 100644 --- a/sig/openai/models/beta/thread_stream_event.rbs +++ b/sig/openai/models/beta/thread_stream_event.rbs @@ -8,7 +8,7 @@ module OpenAI enabled: bool } - class ThreadStreamEvent < OpenAI::BaseModel + class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel attr_accessor data: OpenAI::Models::Beta::Thread attr_accessor event: :"thread.created" diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index 83b3ecc8..81883e74 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -8,7 +8,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ThreadUpdateParams < OpenAI::BaseModel + class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -30,7 +30,7 @@ module OpenAI file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch } - class ToolResources < OpenAI::BaseModel + class ToolResources < OpenAI::Internal::Type::BaseModel attr_reader code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( @@ -52,7 +52,7 @@ module OpenAI type code_interpreter = { file_ids: ::Array[String] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader file_ids: ::Array[String]? def file_ids=: (::Array[String]) -> ::Array[String] @@ -64,7 +64,7 @@ module OpenAI type file_search = { vector_store_ids: ::Array[String] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader vector_store_ids: ::Array[String]? def vector_store_ids=: (::Array[String]) -> ::Array[String] diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index 00961486..72c5b0ec 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::FilePathAnnotation module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 84b0d6a2..0840b591 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation module AnnotationDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] end diff --git a/sig/openai/models/beta/threads/file_citation_annotation.rbs b/sig/openai/models/beta/threads/file_citation_annotation.rbs index da859605..b22f592c 100644 --- a/sig/openai/models/beta/threads/file_citation_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_annotation.rbs @@ -11,7 +11,7 @@ module OpenAI type: :file_citation } - class FileCitationAnnotation < OpenAI::BaseModel + class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer attr_accessor file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation @@ -34,7 +34,7 @@ module OpenAI type file_citation = { file_id: String } - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void diff --git a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs index 09994a1b..eff94443 100644 --- a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs @@ -12,7 +12,7 @@ module OpenAI text: String } - class FileCitationDeltaAnnotation < OpenAI::BaseModel + class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :file_citation @@ -48,7 +48,7 @@ module OpenAI type file_citation = { file_id: String, quote: String } - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String diff --git a/sig/openai/models/beta/threads/file_path_annotation.rbs b/sig/openai/models/beta/threads/file_path_annotation.rbs index d3e7acc5..4234a0fd 100644 --- a/sig/openai/models/beta/threads/file_path_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_annotation.rbs @@ -11,7 +11,7 @@ module OpenAI type: :file_path } - class FilePathAnnotation < OpenAI::BaseModel + class FilePathAnnotation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer attr_accessor file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath @@ -34,7 +34,7 @@ module OpenAI type file_path = { file_id: String } - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void diff --git a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs index 6c8e2380..671f758d 100644 --- a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs @@ -12,7 +12,7 @@ module OpenAI text: String } - class FilePathDeltaAnnotation < OpenAI::BaseModel + class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :file_path @@ -48,7 +48,7 @@ module OpenAI type file_path = { file_id: String } - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 0fb507cc..8c594ae4 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -8,7 +8,7 @@ module OpenAI detail: OpenAI::Models::Beta::Threads::ImageFile::detail } - class ImageFile < OpenAI::BaseModel + class ImageFile < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_reader detail: OpenAI::Models::Beta::Threads::ImageFile::detail? @@ -27,7 +27,7 @@ module OpenAI type detail = :auto | :low | :high module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LOW: :low diff --git a/sig/openai/models/beta/threads/image_file_content_block.rbs b/sig/openai/models/beta/threads/image_file_content_block.rbs index 7f0e99ee..85cbd424 100644 --- a/sig/openai/models/beta/threads/image_file_content_block.rbs +++ b/sig/openai/models/beta/threads/image_file_content_block.rbs @@ -8,7 +8,7 @@ module OpenAI type: :image_file } - class ImageFileContentBlock < OpenAI::BaseModel + class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel attr_accessor image_file: OpenAI::Models::Beta::Threads::ImageFile attr_accessor type: :image_file diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index f645c00f..1360958e 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -8,7 +8,7 @@ module OpenAI file_id: String } - class ImageFileDelta < OpenAI::BaseModel + class ImageFileDelta < OpenAI::Internal::Type::BaseModel attr_reader detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail? def detail=: ( @@ -29,7 +29,7 @@ module OpenAI type detail = :auto | :low | :high module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LOW: :low diff --git a/sig/openai/models/beta/threads/image_file_delta_block.rbs b/sig/openai/models/beta/threads/image_file_delta_block.rbs index e7e85e2b..cdb0e087 100644 --- a/sig/openai/models/beta/threads/image_file_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_file_delta_block.rbs @@ -9,7 +9,7 @@ module OpenAI image_file: OpenAI::Models::Beta::Threads::ImageFileDelta } - class ImageFileDeltaBlock < OpenAI::BaseModel + class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :image_file diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 6039ca67..90adafe5 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -8,7 +8,7 @@ module OpenAI detail: OpenAI::Models::Beta::Threads::ImageURL::detail } - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String attr_reader detail: OpenAI::Models::Beta::Threads::ImageURL::detail? @@ -27,7 +27,7 @@ module OpenAI type detail = :auto | :low | :high module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LOW: :low diff --git a/sig/openai/models/beta/threads/image_url_content_block.rbs b/sig/openai/models/beta/threads/image_url_content_block.rbs index bd4eb8a7..a8e5425f 100644 --- a/sig/openai/models/beta/threads/image_url_content_block.rbs +++ b/sig/openai/models/beta/threads/image_url_content_block.rbs @@ -8,7 +8,7 @@ module OpenAI type: :image_url } - class ImageURLContentBlock < OpenAI::BaseModel + class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel attr_accessor image_url: OpenAI::Models::Beta::Threads::ImageURL attr_accessor type: :image_url diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 9cd1c721..b85ba1e9 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -8,7 +8,7 @@ module OpenAI url: String } - class ImageURLDelta < OpenAI::BaseModel + class ImageURLDelta < OpenAI::Internal::Type::BaseModel attr_reader detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail? def detail=: ( @@ -29,7 +29,7 @@ module OpenAI type detail = :auto | :low | :high module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LOW: :low diff --git a/sig/openai/models/beta/threads/image_url_delta_block.rbs b/sig/openai/models/beta/threads/image_url_delta_block.rbs index 7e548334..4269c159 100644 --- a/sig/openai/models/beta/threads/image_url_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_url_delta_block.rbs @@ -9,7 +9,7 @@ module OpenAI image_url: OpenAI::Models::Beta::Threads::ImageURLDelta } - class ImageURLDeltaBlock < OpenAI::BaseModel + class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :image_url diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index e9a83633..02263056 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -20,7 +20,7 @@ module OpenAI thread_id: String } - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor assistant_id: String? @@ -74,7 +74,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] } - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String @@ -97,12 +97,12 @@ module OpenAI | OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type assistant_tools_file_search_type_only = { type: :file_search } - class AssistantToolsFileSearchTypeOnly < OpenAI::BaseModel + class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search def initialize: (?type: :file_search) -> void @@ -119,7 +119,7 @@ module OpenAI reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason } - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel attr_accessor reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason def initialize: ( @@ -136,7 +136,7 @@ module OpenAI | :run_failed module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum CONTENT_FILTER: :content_filter MAX_TOKENS: :max_tokens @@ -151,7 +151,7 @@ module OpenAI type role = :user | :assistant module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant @@ -162,7 +162,7 @@ module OpenAI type status = :in_progress | :incomplete | :completed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress INCOMPLETE: :incomplete diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index fda3bd19..e3bc2805 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::RefusalContentBlock module MessageContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index c143f0ac..f06072ee 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::ImageURLDeltaBlock module MessageContentDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 9749af26..1e8a7d91 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -8,7 +8,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::TextContentBlockParam module MessageContentPartParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 0091df98..0b4397bf 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class MessageCreateParams < OpenAI::BaseModel + class MessageCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -38,7 +38,7 @@ module OpenAI | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] @@ -51,7 +51,7 @@ module OpenAI type role = :user | :assistant module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant @@ -65,7 +65,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] } - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String @@ -88,11 +88,11 @@ module OpenAI | OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file_search = { type: :file_search } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search def initialize: (?type: :file_search) -> void diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index f400a51c..9abbe1c5 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -5,7 +5,7 @@ module OpenAI type message_delete_params = { thread_id: String } & OpenAI::Internal::Type::request_parameters - class MessageDeleteParams < OpenAI::BaseModel + class MessageDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/message_deleted.rbs b/sig/openai/models/beta/threads/message_deleted.rbs index f6d3a96e..d1356267 100644 --- a/sig/openai/models/beta/threads/message_deleted.rbs +++ b/sig/openai/models/beta/threads/message_deleted.rbs @@ -5,7 +5,7 @@ module OpenAI type message_deleted = { id: String, deleted: bool, object: :"thread.message.deleted" } - class MessageDeleted < OpenAI::BaseModel + class MessageDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index f40f44ea..e74b69e1 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -8,7 +8,7 @@ module OpenAI role: OpenAI::Models::Beta::Threads::MessageDelta::role } - class MessageDelta < OpenAI::BaseModel + class MessageDelta < OpenAI::Internal::Type::BaseModel attr_reader content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta]? def content=: ( @@ -31,7 +31,7 @@ module OpenAI type role = :user | :assistant module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant diff --git a/sig/openai/models/beta/threads/message_delta_event.rbs b/sig/openai/models/beta/threads/message_delta_event.rbs index d34c13f6..553d6fef 100644 --- a/sig/openai/models/beta/threads/message_delta_event.rbs +++ b/sig/openai/models/beta/threads/message_delta_event.rbs @@ -9,7 +9,7 @@ module OpenAI object: :"thread.message.delta" } - class MessageDeltaEvent < OpenAI::BaseModel + class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor delta: OpenAI::Models::Beta::Threads::MessageDelta diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index 9b331b46..2cdda317 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -12,7 +12,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class MessageListParams < OpenAI::BaseModel + class MessageListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -52,7 +52,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index 58434310..c6ff2a71 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -5,7 +5,7 @@ module OpenAI type message_retrieve_params = { thread_id: String } & OpenAI::Internal::Type::request_parameters - class MessageRetrieveParams < OpenAI::BaseModel + class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index 7d14cb66..4f14f212 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -6,7 +6,7 @@ module OpenAI { thread_id: String, metadata: OpenAI::Models::metadata? } & OpenAI::Internal::Type::request_parameters - class MessageUpdateParams < OpenAI::BaseModel + class MessageUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/refusal_content_block.rbs b/sig/openai/models/beta/threads/refusal_content_block.rbs index 041435a5..c91e1603 100644 --- a/sig/openai/models/beta/threads/refusal_content_block.rbs +++ b/sig/openai/models/beta/threads/refusal_content_block.rbs @@ -4,7 +4,7 @@ module OpenAI module Threads type refusal_content_block = { refusal: String, type: :refusal } - class RefusalContentBlock < OpenAI::BaseModel + class RefusalContentBlock < OpenAI::Internal::Type::BaseModel attr_accessor refusal: String attr_accessor type: :refusal diff --git a/sig/openai/models/beta/threads/refusal_delta_block.rbs b/sig/openai/models/beta/threads/refusal_delta_block.rbs index 635407d5..6fc97486 100644 --- a/sig/openai/models/beta/threads/refusal_delta_block.rbs +++ b/sig/openai/models/beta/threads/refusal_delta_block.rbs @@ -5,7 +5,7 @@ module OpenAI type refusal_delta_block = { index: Integer, type: :refusal, refusal: String } - class RefusalDeltaBlock < OpenAI::BaseModel + class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :refusal diff --git a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs index 9429819f..b2cd22a2 100644 --- a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs +++ b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs @@ -9,7 +9,7 @@ module OpenAI type: :function } - class RequiredActionFunctionToolCall < OpenAI::BaseModel + class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function @@ -26,7 +26,7 @@ module OpenAI type function = { arguments: String, name: String } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor name: String diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index c36d145c..6fb8ca5b 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -33,7 +33,7 @@ module OpenAI top_p: Float? } - class Run < OpenAI::BaseModel + class Run < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor assistant_id: String @@ -125,7 +125,7 @@ module OpenAI reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason } - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel attr_reader reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason? def reason=: ( @@ -141,7 +141,7 @@ module OpenAI type reason = :max_completion_tokens | :max_prompt_tokens module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens @@ -156,7 +156,7 @@ module OpenAI message: String } - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel attr_accessor code: OpenAI::Models::Beta::Threads::Run::LastError::code attr_accessor message: String @@ -171,7 +171,7 @@ module OpenAI type code = :server_error | :rate_limit_exceeded | :invalid_prompt module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded @@ -187,7 +187,7 @@ module OpenAI type: :submit_tool_outputs } - class RequiredAction < OpenAI::BaseModel + class RequiredAction < OpenAI::Internal::Type::BaseModel attr_accessor submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs attr_accessor type: :submit_tool_outputs @@ -204,7 +204,7 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] } - class SubmitToolOutputs < OpenAI::BaseModel + class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel attr_accessor tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] def initialize: ( @@ -221,7 +221,7 @@ module OpenAI last_messages: Integer? } - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel attr_accessor type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_ attr_accessor last_messages: Integer? @@ -236,7 +236,7 @@ module OpenAI type type_ = :auto | :last_messages module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LAST_MESSAGES: :last_messages @@ -252,7 +252,7 @@ module OpenAI total_tokens: Integer } - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel attr_accessor completion_tokens: Integer attr_accessor prompt_tokens: Integer diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 5460b5ad..4c384a32 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -5,7 +5,7 @@ module OpenAI type run_cancel_params = { thread_id: String } & OpenAI::Internal::Type::request_parameters - class RunCancelParams < OpenAI::BaseModel + class RunCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index 360db47b..f5462f0c 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -24,7 +24,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class RunCreateParams < OpenAI::BaseModel + class RunCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -99,7 +99,7 @@ module OpenAI metadata: OpenAI::Models::metadata? } - class AdditionalMessage < OpenAI::BaseModel + class AdditionalMessage < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content attr_accessor role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role @@ -122,7 +122,7 @@ module OpenAI | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] @@ -135,7 +135,7 @@ module OpenAI type role = :user | :assistant module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant @@ -149,7 +149,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] } - class Attachment < OpenAI::BaseModel + class Attachment < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String @@ -172,11 +172,11 @@ module OpenAI | OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file_search = { type: :file_search } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search def initialize: (?type: :file_search) -> void @@ -192,7 +192,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::chat_model] end @@ -203,7 +203,7 @@ module OpenAI last_messages: Integer? } - class TruncationStrategy < OpenAI::BaseModel + class TruncationStrategy < OpenAI::Internal::Type::BaseModel attr_accessor type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_ attr_accessor last_messages: Integer? @@ -218,7 +218,7 @@ module OpenAI type type_ = :auto | :last_messages module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LAST_MESSAGES: :last_messages diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index 525ad66f..42825950 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class RunListParams < OpenAI::BaseModel + class RunListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -46,7 +46,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index a76a9131..ddbeaf2b 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -5,7 +5,7 @@ module OpenAI type run_retrieve_params = { thread_id: String } & OpenAI::Internal::Type::request_parameters - class RunRetrieveParams < OpenAI::BaseModel + class RunRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/run_status.rbs b/sig/openai/models/beta/threads/run_status.rbs index 4b0e430b..4a106ac0 100644 --- a/sig/openai/models/beta/threads/run_status.rbs +++ b/sig/openai/models/beta/threads/run_status.rbs @@ -14,7 +14,7 @@ module OpenAI | :expired module RunStatus - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum QUEUED: :queued IN_PROGRESS: :in_progress diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index 48e05b57..294478bc 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class RunSubmitToolOutputsParams < OpenAI::BaseModel + class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -27,7 +27,7 @@ module OpenAI type tool_output = { output: String, tool_call_id: String } - class ToolOutput < OpenAI::BaseModel + class ToolOutput < OpenAI::Internal::Type::BaseModel attr_reader output: String? def output=: (String) -> String diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index 6e46178a..8ea338f3 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -6,7 +6,7 @@ module OpenAI { thread_id: String, metadata: OpenAI::Models::metadata? } & OpenAI::Internal::Type::request_parameters - class RunUpdateParams < OpenAI::BaseModel + class RunUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs index 85c320b0..e002868b 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs @@ -6,7 +6,7 @@ module OpenAI type code_interpreter_logs = { index: Integer, type: :logs, logs: String } - class CodeInterpreterLogs < OpenAI::BaseModel + class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :logs diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs index f640b74e..e05b9326 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs @@ -10,7 +10,7 @@ module OpenAI image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } - class CodeInterpreterOutputImage < OpenAI::BaseModel + class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :image @@ -31,7 +31,7 @@ module OpenAI type image = { file_id: String } - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel attr_reader file_id: String? def file_id=: (String) -> String diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index fa6f7865..6ded6a40 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -10,7 +10,7 @@ module OpenAI type: :code_interpreter } - class CodeInterpreterToolCall < OpenAI::BaseModel + class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter @@ -31,7 +31,7 @@ module OpenAI outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_accessor input: String attr_accessor outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] @@ -48,11 +48,11 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image module Output - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type logs = { logs: String, type: :logs } - class Logs < OpenAI::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel attr_accessor logs: String attr_accessor type: :logs @@ -68,7 +68,7 @@ module OpenAI type: :image } - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel attr_accessor image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image attr_accessor type: :image @@ -82,7 +82,7 @@ module OpenAI type image = { file_id: String } - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index 32dc3fb8..632bed4c 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -11,7 +11,7 @@ module OpenAI code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } - class CodeInterpreterToolCallDelta < OpenAI::BaseModel + class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :code_interpreter @@ -41,7 +41,7 @@ module OpenAI outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] } - class CodeInterpreter < OpenAI::BaseModel + class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_reader input: String? def input=: (String) -> String @@ -64,7 +64,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage module Output - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index f5a993d3..c0437f96 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -10,7 +10,7 @@ module OpenAI type: :file_search } - class FileSearchToolCall < OpenAI::BaseModel + class FileSearchToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch @@ -31,7 +31,7 @@ module OpenAI results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } - class FileSearch < OpenAI::BaseModel + class FileSearch < OpenAI::Internal::Type::BaseModel attr_reader ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions? def ranking_options=: ( @@ -57,7 +57,7 @@ module OpenAI score_threshold: Float } - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel attr_accessor ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker attr_accessor score_threshold: Float @@ -72,7 +72,7 @@ module OpenAI type ranker = :auto | :default_2024_08_21 module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 @@ -89,7 +89,7 @@ module OpenAI content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } - class Result < OpenAI::BaseModel + class Result < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_accessor file_name: String @@ -117,7 +117,7 @@ module OpenAI type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ } - class Content < OpenAI::BaseModel + class Content < OpenAI::Internal::Type::BaseModel attr_reader text: String? def text=: (String) -> String @@ -138,7 +138,7 @@ module OpenAI type type_ = :text module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs index 11ae3dc7..563a191d 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs @@ -6,7 +6,7 @@ module OpenAI type file_search_tool_call_delta = { file_search: top, index: Integer, type: :file_search, id: String } - class FileSearchToolCallDelta < OpenAI::BaseModel + class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel attr_accessor file_search: top attr_accessor index: Integer diff --git a/sig/openai/models/beta/threads/runs/function_tool_call.rbs b/sig/openai/models/beta/threads/runs/function_tool_call.rbs index c3e9168a..324286aa 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call.rbs @@ -10,7 +10,7 @@ module OpenAI type: :function } - class FunctionToolCall < OpenAI::BaseModel + class FunctionToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function @@ -27,7 +27,7 @@ module OpenAI type function = { arguments: String, name: String, output: String? } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor name: String diff --git a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs index f6a01247..0f96b130 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs @@ -11,7 +11,7 @@ module OpenAI function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function } - class FunctionToolCallDelta < OpenAI::BaseModel + class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :function @@ -37,7 +37,7 @@ module OpenAI type function = { arguments: String, name: String, output: String? } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_reader arguments: String? def arguments=: (String) -> String diff --git a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs index 3ddacf7b..f7300f72 100644 --- a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs @@ -9,7 +9,7 @@ module OpenAI type: :message_creation } - class MessageCreationStepDetails < OpenAI::BaseModel + class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel attr_accessor message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation attr_accessor type: :message_creation @@ -23,7 +23,7 @@ module OpenAI type message_creation = { message_id: String } - class MessageCreation < OpenAI::BaseModel + class MessageCreation < OpenAI::Internal::Type::BaseModel attr_accessor message_id: String def initialize: (message_id: String) -> void diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index a60e10a6..952766af 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -25,7 +25,7 @@ module OpenAI usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage? } - class RunStep < OpenAI::BaseModel + class RunStep < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor assistant_id: String @@ -85,7 +85,7 @@ module OpenAI message: String } - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel attr_accessor code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code attr_accessor message: String @@ -100,7 +100,7 @@ module OpenAI type code = :server_error | :rate_limit_exceeded module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded @@ -113,7 +113,7 @@ module OpenAI :in_progress | :cancelled | :failed | :completed | :expired module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress CANCELLED: :cancelled @@ -129,7 +129,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails module StepDetails - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] end @@ -137,7 +137,7 @@ module OpenAI type type_ = :message_creation | :tool_calls module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls @@ -152,7 +152,7 @@ module OpenAI total_tokens: Integer } - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel attr_accessor completion_tokens: Integer attr_accessor prompt_tokens: Integer diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 45b48449..1f3143ee 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -10,7 +10,7 @@ module OpenAI step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details } - class RunStepDelta < OpenAI::BaseModel + class RunStepDelta < OpenAI::Internal::Type::BaseModel attr_reader step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details? def step_details=: ( @@ -28,7 +28,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject module StepDetails - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index 295594a8..0300ea33 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -12,7 +12,7 @@ module OpenAI object: :"thread.run.step.delta" } - class RunStepDeltaEvent < OpenAI::BaseModel + class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index d1daf15e..b397b858 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -11,7 +11,7 @@ module OpenAI message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation } - class RunStepDeltaMessageDelta < OpenAI::BaseModel + class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel attr_accessor type: :message_creation attr_reader message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation? @@ -29,7 +29,7 @@ module OpenAI type message_creation = { message_id: String } - class MessageCreation < OpenAI::BaseModel + class MessageCreation < OpenAI::Internal::Type::BaseModel attr_reader message_id: String? def message_id=: (String) -> String diff --git a/sig/openai/models/beta/threads/runs/run_step_include.rbs b/sig/openai/models/beta/threads/runs/run_step_include.rbs index 529038db..30d9eb94 100644 --- a/sig/openai/models/beta/threads/runs/run_step_include.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_include.rbs @@ -9,7 +9,7 @@ module OpenAI :"step_details.tool_calls[*].file_search.results[*].content" module RunStepInclude - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT: :"step_details.tool_calls[*].file_search.results[*].content" diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 04b7cf53..5fe683ff 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -14,7 +14,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class StepListParams < OpenAI::BaseModel + class StepListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -59,7 +59,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index 06b14341..c759864c 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class StepRetrieveParams < OpenAI::BaseModel + class StepRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index 2d13d06d..f3604833 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FunctionToolCall module ToolCall - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index 2e349dcd..f5159374 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta module ToolCallDelta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs index 50619aae..f693e116 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs @@ -9,7 +9,7 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta] } - class ToolCallDeltaObject < OpenAI::BaseModel + class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel attr_accessor type: :tool_calls attr_reader tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta]? diff --git a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs index 7f5c8851..c08bece5 100644 --- a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs @@ -9,7 +9,7 @@ module OpenAI type: :tool_calls } - class ToolCallsStepDetails < OpenAI::BaseModel + class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel attr_accessor tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call] attr_accessor type: :tool_calls diff --git a/sig/openai/models/beta/threads/text.rbs b/sig/openai/models/beta/threads/text.rbs index 1c4ebeb3..a4aa3141 100644 --- a/sig/openai/models/beta/threads/text.rbs +++ b/sig/openai/models/beta/threads/text.rbs @@ -8,7 +8,7 @@ module OpenAI value: String } - class Text < OpenAI::BaseModel + class Text < OpenAI::Internal::Type::BaseModel attr_accessor annotations: ::Array[OpenAI::Models::Beta::Threads::annotation] attr_accessor value: String diff --git a/sig/openai/models/beta/threads/text_content_block.rbs b/sig/openai/models/beta/threads/text_content_block.rbs index b83f4585..2bcf7f2a 100644 --- a/sig/openai/models/beta/threads/text_content_block.rbs +++ b/sig/openai/models/beta/threads/text_content_block.rbs @@ -5,7 +5,7 @@ module OpenAI type text_content_block = { text: OpenAI::Models::Beta::Threads::Text, type: :text } - class TextContentBlock < OpenAI::BaseModel + class TextContentBlock < OpenAI::Internal::Type::BaseModel attr_accessor text: OpenAI::Models::Beta::Threads::Text attr_accessor type: :text diff --git a/sig/openai/models/beta/threads/text_content_block_param.rbs b/sig/openai/models/beta/threads/text_content_block_param.rbs index bf0b5b7e..291be17d 100644 --- a/sig/openai/models/beta/threads/text_content_block_param.rbs +++ b/sig/openai/models/beta/threads/text_content_block_param.rbs @@ -4,7 +4,7 @@ module OpenAI module Threads type text_content_block_param = { text: String, type: :text } - class TextContentBlockParam < OpenAI::BaseModel + class TextContentBlockParam < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :text diff --git a/sig/openai/models/beta/threads/text_delta.rbs b/sig/openai/models/beta/threads/text_delta.rbs index 721b012e..0b5ac945 100644 --- a/sig/openai/models/beta/threads/text_delta.rbs +++ b/sig/openai/models/beta/threads/text_delta.rbs @@ -8,7 +8,7 @@ module OpenAI value: String } - class TextDelta < OpenAI::BaseModel + class TextDelta < OpenAI::Internal::Type::BaseModel attr_reader annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta]? def annotations=: ( diff --git a/sig/openai/models/beta/threads/text_delta_block.rbs b/sig/openai/models/beta/threads/text_delta_block.rbs index 2a03d985..8d70d40b 100644 --- a/sig/openai/models/beta/threads/text_delta_block.rbs +++ b/sig/openai/models/beta/threads/text_delta_block.rbs @@ -9,7 +9,7 @@ module OpenAI text: OpenAI::Models::Beta::Threads::TextDelta } - class TextDeltaBlock < OpenAI::BaseModel + class TextDeltaBlock < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_accessor type: :text diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index b0013010..54e0d554 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -15,7 +15,7 @@ module OpenAI usage: OpenAI::Models::CompletionUsage } - class ChatCompletion < OpenAI::BaseModel + class ChatCompletion < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice] @@ -59,7 +59,7 @@ module OpenAI message: OpenAI::Models::Chat::ChatCompletionMessage } - class Choice < OpenAI::BaseModel + class Choice < OpenAI::Internal::Type::BaseModel attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason attr_accessor index: Integer @@ -81,7 +81,7 @@ module OpenAI :stop | :length | :tool_calls | :content_filter | :function_call module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STOP: :stop LENGTH: :length @@ -98,7 +98,7 @@ module OpenAI refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? } - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel attr_accessor content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? attr_accessor refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? @@ -115,7 +115,7 @@ module OpenAI type service_tier = :scale | :default module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index 968d73be..4f92813c 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -14,7 +14,7 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } - class ChatCompletionAssistantMessageParam < OpenAI::BaseModel + class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor role: :assistant attr_accessor audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio? @@ -49,7 +49,7 @@ module OpenAI type audio = { id: String } - class Audio < OpenAI::BaseModel + class Audio < OpenAI::Internal::Type::BaseModel attr_accessor id: String def initialize: (id: String) -> void @@ -62,14 +62,14 @@ module OpenAI | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type array_of_content_part = OpenAI::Models::Chat::ChatCompletionContentPartText | OpenAI::Models::Chat::ChatCompletionContentPartRefusal module ArrayOfContentPart - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] end @@ -84,7 +84,7 @@ module OpenAI type function_call = { arguments: String, name: String } - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor name: String diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index a3b3cda4..18d4ba70 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -6,7 +6,7 @@ module OpenAI type chat_completion_audio = { id: String, data: String, expires_at: Integer, transcript: String } - class ChatCompletionAudio < OpenAI::BaseModel + class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor data: String diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index dc4be41d..3fb9c17d 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -9,7 +9,7 @@ module OpenAI voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice } - class ChatCompletionAudioParam < OpenAI::BaseModel + class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel attr_accessor format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_ attr_accessor voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice @@ -24,7 +24,7 @@ module OpenAI type format_ = :wav | :mp3 | :flac | :opus | :pcm16 module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WAV: :wav MP3: :mp3 @@ -50,7 +50,7 @@ module OpenAI | :verse module Voice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, (:alloy | :ash diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 1ec7aadf..7cba70ad 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -15,7 +15,7 @@ module OpenAI usage: OpenAI::Models::CompletionUsage? } - class ChatCompletionChunk < OpenAI::BaseModel + class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice] @@ -55,7 +55,7 @@ module OpenAI logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? } - class Choice < OpenAI::BaseModel + class Choice < OpenAI::Internal::Type::BaseModel attr_accessor delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason? @@ -82,7 +82,7 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } - class Delta < OpenAI::BaseModel + class Delta < OpenAI::Internal::Type::BaseModel attr_accessor content: String? attr_reader function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall? @@ -117,7 +117,7 @@ module OpenAI type function_call = { arguments: String, name: String } - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel attr_reader arguments: String? def arguments=: (String) -> String @@ -134,7 +134,7 @@ module OpenAI type role = :developer | :system | :user | :assistant | :tool module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum DEVELOPER: :developer SYSTEM: :system @@ -153,7 +153,7 @@ module OpenAI type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ } - class ToolCall < OpenAI::BaseModel + class ToolCall < OpenAI::Internal::Type::BaseModel attr_accessor index: Integer attr_reader id: String? @@ -183,7 +183,7 @@ module OpenAI type function = { arguments: String, name: String } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_reader arguments: String? def arguments=: (String) -> String @@ -200,7 +200,7 @@ module OpenAI type type_ = :function module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FUNCTION: :function @@ -213,7 +213,7 @@ module OpenAI :stop | :length | :tool_calls | :content_filter | :function_call module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STOP: :stop LENGTH: :length @@ -230,7 +230,7 @@ module OpenAI refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? } - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel attr_accessor content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? attr_accessor refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? @@ -247,7 +247,7 @@ module OpenAI type service_tier = :scale | :default module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SCALE: :scale DEFAULT: :default diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index df907b34..470cda35 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -10,7 +10,7 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionContentPart::File module ChatCompletionContentPart - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file = { @@ -18,7 +18,7 @@ module OpenAI type: :file } - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel attr_accessor file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File attr_accessor type: :file @@ -32,7 +32,7 @@ module OpenAI type file = { file_data: String, file_id: String, filename: String } - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel attr_reader file_data: String? def file_data=: (String) -> String diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index d8ca22d9..6aa104fb 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -9,7 +9,7 @@ module OpenAI type: :image_url } - class ChatCompletionContentPartImage < OpenAI::BaseModel + class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel attr_accessor image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL attr_accessor type: :image_url @@ -27,7 +27,7 @@ module OpenAI detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail } - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String attr_reader detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail? @@ -46,7 +46,7 @@ module OpenAI type detail = :auto | :low | :high module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto LOW: :low diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index 8d40f203..e3267b32 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -9,7 +9,7 @@ module OpenAI type: :input_audio } - class ChatCompletionContentPartInputAudio < OpenAI::BaseModel + class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel attr_accessor input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio attr_accessor type: :input_audio @@ -27,7 +27,7 @@ module OpenAI format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ } - class InputAudio < OpenAI::BaseModel + class InputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String attr_accessor format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ @@ -42,7 +42,7 @@ module OpenAI type format_ = :wav | :mp3 module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WAV: :wav MP3: :mp3 diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index e715e480..e7b62c6b 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -6,7 +6,7 @@ module OpenAI type chat_completion_content_part_refusal = { refusal: String, type: :refusal } - class ChatCompletionContentPartRefusal < OpenAI::BaseModel + class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel attr_accessor refusal: String attr_accessor type: :refusal diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 638f5e4e..b8a43400 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -5,7 +5,7 @@ module OpenAI module Chat type chat_completion_content_part_text = { text: String, type: :text } - class ChatCompletionContentPartText < OpenAI::BaseModel + class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :text diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index e776fc71..6d7e613c 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -6,7 +6,7 @@ module OpenAI type chat_completion_deleted = { id: String, deleted: bool, object: :"chat.completion.deleted" } - class ChatCompletionDeleted < OpenAI::BaseModel + class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index 9bd2727c..b0dfdf86 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -10,7 +10,7 @@ module OpenAI name: String } - class ChatCompletionDeveloperMessageParam < OpenAI::BaseModel + class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content attr_accessor role: :developer @@ -31,7 +31,7 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index e7067a87..fd033d17 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -5,7 +5,7 @@ module OpenAI module Chat type chat_completion_function_call_option = { name: String } - class ChatCompletionFunctionCallOption < OpenAI::BaseModel + class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel attr_accessor name: String def initialize: (name: String) -> void diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index fad91c63..e83ffd82 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -6,7 +6,7 @@ module OpenAI type chat_completion_function_message_param = { content: String?, name: String, role: :function } - class ChatCompletionFunctionMessageParam < OpenAI::BaseModel + class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor content: String? attr_accessor name: String diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index d3668749..7fa0b355 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -14,7 +14,7 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } - class ChatCompletionMessage < OpenAI::BaseModel + class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel attr_accessor content: String? attr_accessor refusal: String? @@ -59,7 +59,7 @@ module OpenAI url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation } - class Annotation < OpenAI::BaseModel + class Annotation < OpenAI::Internal::Type::BaseModel attr_accessor type: :url_citation attr_accessor url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation @@ -79,7 +79,7 @@ module OpenAI url: String } - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer attr_accessor start_index: Integer @@ -101,7 +101,7 @@ module OpenAI type function_call = { arguments: String, name: String } - class FunctionCall < OpenAI::BaseModel + class FunctionCall < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor name: String diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index 557d3c2c..e30fd658 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -12,7 +12,7 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionFunctionMessageParam module ChatCompletionMessageParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index cd147e2d..ca90663e 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -10,7 +10,7 @@ module OpenAI type: :function } - class ChatCompletionMessageToolCall < OpenAI::BaseModel + class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function @@ -27,7 +27,7 @@ module OpenAI type function = { arguments: String, name: String } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor name: String diff --git a/sig/openai/models/chat/chat_completion_modality.rbs b/sig/openai/models/chat/chat_completion_modality.rbs index b634c024..c0b279d7 100644 --- a/sig/openai/models/chat/chat_completion_modality.rbs +++ b/sig/openai/models/chat/chat_completion_modality.rbs @@ -6,7 +6,7 @@ module OpenAI type chat_completion_modality = :text | :audio module ChatCompletionModality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text AUDIO: :audio diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 62feb1bf..77b79236 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -9,7 +9,7 @@ module OpenAI type: :function } - class ChatCompletionNamedToolChoice < OpenAI::BaseModel + class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel attr_accessor function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function attr_accessor type: :function @@ -23,7 +23,7 @@ module OpenAI type function = { name: String } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_accessor name: String def initialize: (name: String) -> void diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index cbf6347c..1f5c681a 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -9,7 +9,7 @@ module OpenAI type: :content } - class ChatCompletionPredictionContent < OpenAI::BaseModel + class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content attr_accessor type: :content @@ -25,7 +25,7 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] diff --git a/sig/openai/models/chat/chat_completion_role.rbs b/sig/openai/models/chat/chat_completion_role.rbs index 4744870b..96104afd 100644 --- a/sig/openai/models/chat/chat_completion_role.rbs +++ b/sig/openai/models/chat/chat_completion_role.rbs @@ -7,7 +7,7 @@ module OpenAI :developer | :system | :user | :assistant | :tool | :function module ChatCompletionRole - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum DEVELOPER: :developer SYSTEM: :system diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 7fed3536..bf86a211 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -5,7 +5,7 @@ module OpenAI module Chat type chat_completion_stream_options = { include_usage: bool } - class ChatCompletionStreamOptions < OpenAI::BaseModel + class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel attr_reader include_usage: bool? def include_usage=: (bool) -> bool diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 56a477ec..12811868 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -10,7 +10,7 @@ module OpenAI name: String } - class ChatCompletionSystemMessageParam < OpenAI::BaseModel + class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content attr_accessor role: :system @@ -31,7 +31,7 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index a076afef..968dd8a5 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -11,7 +11,7 @@ module OpenAI top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } - class ChatCompletionTokenLogprob < OpenAI::BaseModel + class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel attr_accessor token: String attr_accessor bytes: ::Array[Integer]? @@ -32,7 +32,7 @@ module OpenAI type top_logprob = { token: String, bytes: ::Array[Integer]?, logprob: Float } - class TopLogprob < OpenAI::BaseModel + class TopLogprob < OpenAI::Internal::Type::BaseModel attr_accessor token: String attr_accessor bytes: ::Array[Integer]? diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index d465043d..1fd217b1 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -6,7 +6,7 @@ module OpenAI type chat_completion_tool = { function: OpenAI::Models::FunctionDefinition, type: :function } - class ChatCompletionTool < OpenAI::BaseModel + class ChatCompletionTool < OpenAI::Internal::Type::BaseModel attr_accessor function: OpenAI::Models::FunctionDefinition attr_accessor type: :function diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index 114b7588..aadc1e00 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -8,12 +8,12 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionNamedToolChoice module ChatCompletionToolChoiceOption - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type auto = :none | :auto | :required module Auto - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE: :none AUTO: :auto diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index e0e31f90..7808c0a9 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -10,7 +10,7 @@ module OpenAI tool_call_id: String } - class ChatCompletionToolMessageParam < OpenAI::BaseModel + class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content attr_accessor role: :tool @@ -29,7 +29,7 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 308744d9..ea9b16d6 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -10,7 +10,7 @@ module OpenAI name: String } - class ChatCompletionUserMessageParam < OpenAI::BaseModel + class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content attr_accessor role: :user @@ -31,7 +31,7 @@ module OpenAI String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 1515d670..d10cf21d 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -36,7 +36,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class CompletionCreateParams < OpenAI::BaseModel + class CompletionCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -167,7 +167,7 @@ module OpenAI type model = String | OpenAI::Models::chat_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::chat_model] end @@ -177,12 +177,12 @@ module OpenAI | OpenAI::Models::Chat::ChatCompletionFunctionCallOption module FunctionCall - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type function_call_mode = :none | :auto module FunctionCallMode - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE: :none AUTO: :auto @@ -200,7 +200,7 @@ module OpenAI parameters: OpenAI::Models::function_parameters } - class Function < OpenAI::BaseModel + class Function < OpenAI::Internal::Type::BaseModel attr_accessor name: String attr_reader description: String? @@ -225,7 +225,7 @@ module OpenAI type modality = :text | :audio module Modality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text AUDIO: :audio @@ -239,7 +239,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject module ResponseFormat - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end @@ -247,7 +247,7 @@ module OpenAI type service_tier = :auto | :default module ServiceTier - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DEFAULT: :default @@ -258,7 +258,7 @@ module OpenAI type stop = (String | ::Array[String])? module Stop - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[String]] @@ -273,7 +273,7 @@ module OpenAI user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? } - class WebSearchOptions < OpenAI::BaseModel + class WebSearchOptions < OpenAI::Internal::Type::BaseModel attr_reader search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? def search_context_size=: ( @@ -292,7 +292,7 @@ module OpenAI type search_context_size = :low | :medium | :high module SearchContextSize - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOW: :low MEDIUM: :medium @@ -307,7 +307,7 @@ module OpenAI type: :approximate } - class UserLocation < OpenAI::BaseModel + class UserLocation < OpenAI::Internal::Type::BaseModel attr_accessor approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate attr_accessor type: :approximate @@ -327,7 +327,7 @@ module OpenAI timezone: String } - class Approximate < OpenAI::BaseModel + class Approximate < OpenAI::Internal::Type::BaseModel attr_reader city: String? def city=: (String) -> String diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index e4206b21..324fe253 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type completion_delete_params = { } & OpenAI::Internal::Type::request_parameters - class CompletionDeleteParams < OpenAI::BaseModel + class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 7f171502..e7bbb7d5 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class CompletionListParams < OpenAI::BaseModel + class CompletionListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 3c64e3fb..7d97ef11 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type completion_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class CompletionRetrieveParams < OpenAI::BaseModel + class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index dbfdd87d..dc63a22a 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -5,7 +5,7 @@ module OpenAI { metadata: OpenAI::Models::metadata? } & OpenAI::Internal::Type::request_parameters - class CompletionUpdateParams < OpenAI::BaseModel + class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 1d5f30a2..73db541f 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -10,7 +10,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class MessageListParams < OpenAI::BaseModel + class MessageListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -40,7 +40,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index d0aa3240..0f571b30 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -46,7 +46,7 @@ module OpenAI | :"gpt-3.5-turbo-16k-0613" module ChatModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index 8d14f63b..e4a99bcf 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -7,7 +7,7 @@ module OpenAI value: OpenAI::Models::ComparisonFilter::value } - class ComparisonFilter < OpenAI::BaseModel + class ComparisonFilter < OpenAI::Internal::Type::BaseModel attr_accessor key: String attr_accessor type: OpenAI::Models::ComparisonFilter::type_ @@ -25,7 +25,7 @@ module OpenAI type type_ = :eq | :ne | :gt | :gte | :lt | :lte module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum EQ: :eq NE: :ne @@ -40,7 +40,7 @@ module OpenAI type value = String | Float | bool module Value - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end diff --git a/sig/openai/models/completion.rbs b/sig/openai/models/completion.rbs index 0e189869..ed2ca5d6 100644 --- a/sig/openai/models/completion.rbs +++ b/sig/openai/models/completion.rbs @@ -11,7 +11,7 @@ module OpenAI usage: OpenAI::Models::CompletionUsage } - class Completion < OpenAI::BaseModel + class Completion < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor choices: ::Array[OpenAI::Models::CompletionChoice] diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index e506a455..5e50fc5f 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -8,7 +8,7 @@ module OpenAI text: String } - class CompletionChoice < OpenAI::BaseModel + class CompletionChoice < OpenAI::Internal::Type::BaseModel attr_accessor finish_reason: OpenAI::Models::CompletionChoice::finish_reason attr_accessor index: Integer @@ -29,7 +29,7 @@ module OpenAI type finish_reason = :stop | :length | :content_filter module FinishReason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STOP: :stop LENGTH: :length @@ -46,7 +46,7 @@ module OpenAI top_logprobs: ::Array[::Hash[Symbol, Float]] } - class Logprobs < OpenAI::BaseModel + class Logprobs < OpenAI::Internal::Type::BaseModel attr_reader text_offset: ::Array[Integer]? def text_offset=: (::Array[Integer]) -> ::Array[Integer] diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 3bbe8da3..d4b9afbe 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -22,7 +22,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class CompletionCreateParams < OpenAI::BaseModel + class CompletionCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -89,7 +89,7 @@ module OpenAI String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, (:"gpt-3.5-turbo-instruct" | :"davinci-002" @@ -104,7 +104,7 @@ module OpenAI String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] module Prompt - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] @@ -124,7 +124,7 @@ module OpenAI type stop = (String | ::Array[String])? module Stop - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[String]] diff --git a/sig/openai/models/completion_usage.rbs b/sig/openai/models/completion_usage.rbs index fe20a24b..5a98db8b 100644 --- a/sig/openai/models/completion_usage.rbs +++ b/sig/openai/models/completion_usage.rbs @@ -9,7 +9,7 @@ module OpenAI prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails } - class CompletionUsage < OpenAI::BaseModel + class CompletionUsage < OpenAI::Internal::Type::BaseModel attr_accessor completion_tokens: Integer attr_accessor prompt_tokens: Integer @@ -46,7 +46,7 @@ module OpenAI rejected_prediction_tokens: Integer } - class CompletionTokensDetails < OpenAI::BaseModel + class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel attr_reader accepted_prediction_tokens: Integer? def accepted_prediction_tokens=: (Integer) -> Integer @@ -76,7 +76,7 @@ module OpenAI type prompt_tokens_details = { audio_tokens: Integer, cached_tokens: Integer } - class PromptTokensDetails < OpenAI::BaseModel + class PromptTokensDetails < OpenAI::Internal::Type::BaseModel attr_reader audio_tokens: Integer? def audio_tokens=: (Integer) -> Integer diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 211de484..2f98c0d3 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -6,7 +6,7 @@ module OpenAI type: OpenAI::Models::CompoundFilter::type_ } - class CompoundFilter < OpenAI::BaseModel + class CompoundFilter < OpenAI::Internal::Type::BaseModel attr_accessor filters: ::Array[OpenAI::Models::CompoundFilter::filter] attr_accessor type: OpenAI::Models::CompoundFilter::type_ @@ -21,7 +21,7 @@ module OpenAI type filter = OpenAI::Models::ComparisonFilter | top module Filter - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::ComparisonFilter, top] end @@ -29,7 +29,7 @@ module OpenAI type type_ = :and | :or module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AND: :and OR: :or diff --git a/sig/openai/models/create_embedding_response.rbs b/sig/openai/models/create_embedding_response.rbs index e9ae13dc..d9d813c3 100644 --- a/sig/openai/models/create_embedding_response.rbs +++ b/sig/openai/models/create_embedding_response.rbs @@ -8,7 +8,7 @@ module OpenAI usage: OpenAI::Models::CreateEmbeddingResponse::Usage } - class CreateEmbeddingResponse < OpenAI::BaseModel + class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel attr_accessor data: ::Array[OpenAI::Models::Embedding] attr_accessor model: String @@ -28,7 +28,7 @@ module OpenAI type usage = { prompt_tokens: Integer, total_tokens: Integer } - class Usage < OpenAI::BaseModel + class Usage < OpenAI::Internal::Type::BaseModel attr_accessor prompt_tokens: Integer attr_accessor total_tokens: Integer diff --git a/sig/openai/models/embedding.rbs b/sig/openai/models/embedding.rbs index 08eb26a7..c0787d7b 100644 --- a/sig/openai/models/embedding.rbs +++ b/sig/openai/models/embedding.rbs @@ -3,7 +3,7 @@ module OpenAI type embedding = { embedding: ::Array[Float], index: Integer, object: :embedding } - class Embedding < OpenAI::BaseModel + class Embedding < OpenAI::Internal::Type::BaseModel attr_accessor embedding: ::Array[Float] attr_accessor index: Integer diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 1c00db8b..fc247341 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class EmbeddingCreateParams < OpenAI::BaseModel + class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -47,7 +47,7 @@ module OpenAI String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] @@ -67,7 +67,7 @@ module OpenAI type model = String | OpenAI::Models::embedding_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::embedding_model] end @@ -75,7 +75,7 @@ module OpenAI type encoding_format = :float | :base64 module EncodingFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FLOAT: :float BASE64: :base64 diff --git a/sig/openai/models/embedding_model.rbs b/sig/openai/models/embedding_model.rbs index 30fd2a8a..eebc0c7b 100644 --- a/sig/openai/models/embedding_model.rbs +++ b/sig/openai/models/embedding_model.rbs @@ -6,7 +6,7 @@ module OpenAI | :"text-embedding-3-large" module EmbeddingModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT_EMBEDDING_ADA_002: :"text-embedding-ada-002" TEXT_EMBEDDING_3_SMALL: :"text-embedding-3-small" diff --git a/sig/openai/models/error_object.rbs b/sig/openai/models/error_object.rbs index bd0ae3a7..0a526b8a 100644 --- a/sig/openai/models/error_object.rbs +++ b/sig/openai/models/error_object.rbs @@ -3,7 +3,7 @@ module OpenAI type error_object = { code: String?, message: String, param: String?, type: String } - class ErrorObject < OpenAI::BaseModel + class ErrorObject < OpenAI::Internal::Type::BaseModel attr_accessor code: String? attr_accessor message: String diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index 7ac3728a..ebe4dcb5 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -5,7 +5,7 @@ module OpenAI | OpenAI::Models::OtherFileChunkingStrategyObject module FileChunkingStrategy - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index df5528b1..22233a43 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -5,7 +5,7 @@ module OpenAI | OpenAI::Models::StaticFileChunkingStrategyObjectParam module FileChunkingStrategyParam - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] end diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index c9adfe43..d0eb084a 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type file_content_params = { } & OpenAI::Internal::Type::request_parameters - class FileContentParams < OpenAI::BaseModel + class FileContentParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index f08a1328..46d8450e 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI { file: (IO | StringIO), purpose: OpenAI::Models::file_purpose } & OpenAI::Internal::Type::request_parameters - class FileCreateParams < OpenAI::BaseModel + class FileCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index 993174ef..bd9575f3 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type file_delete_params = { } & OpenAI::Internal::Type::request_parameters - class FileDeleteParams < OpenAI::BaseModel + class FileDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/file_deleted.rbs b/sig/openai/models/file_deleted.rbs index 04eef616..e4134f28 100644 --- a/sig/openai/models/file_deleted.rbs +++ b/sig/openai/models/file_deleted.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type file_deleted = { id: String, deleted: bool, object: :file } - class FileDeleted < OpenAI::BaseModel + class FileDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 89b0392a..8cc477d7 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class FileListParams < OpenAI::BaseModel + class FileListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -44,7 +44,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index c4b2cd6b..2ea4ec93 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -13,7 +13,7 @@ module OpenAI status_details: String } - class FileObject < OpenAI::BaseModel + class FileObject < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor bytes: Integer @@ -60,7 +60,7 @@ module OpenAI | :vision module Purpose - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASSISTANTS: :assistants ASSISTANTS_OUTPUT: :assistants_output @@ -76,7 +76,7 @@ module OpenAI type status = :uploaded | :processed | :error module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum UPLOADED: :uploaded PROCESSED: :processed diff --git a/sig/openai/models/file_purpose.rbs b/sig/openai/models/file_purpose.rbs index 84067b2c..9b17621d 100644 --- a/sig/openai/models/file_purpose.rbs +++ b/sig/openai/models/file_purpose.rbs @@ -4,7 +4,7 @@ module OpenAI :assistants | :batch | :"fine-tune" | :vision | :user_data | :evals module FilePurpose - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASSISTANTS: :assistants BATCH: :batch diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index e81b21d8..818e9432 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type file_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class FileRetrieveParams < OpenAI::BaseModel + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index b939849a..8a66e5a9 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -26,7 +26,7 @@ module OpenAI method_: OpenAI::Models::FineTuning::FineTuningJob::Method } - class FineTuningJob < OpenAI::BaseModel + class FineTuningJob < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -95,7 +95,7 @@ module OpenAI type error = { code: String, message: String, param: String? } - class Error < OpenAI::BaseModel + class Error < OpenAI::Internal::Type::BaseModel attr_accessor code: String attr_accessor message: String @@ -118,7 +118,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs } - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel attr_reader batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size? def batch_size=: ( @@ -148,7 +148,7 @@ module OpenAI type batch_size = :auto | Integer module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -156,7 +156,7 @@ module OpenAI type learning_rate_multiplier = :auto | Float module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -164,7 +164,7 @@ module OpenAI type n_epochs = :auto | Integer module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -179,7 +179,7 @@ module OpenAI | :cancelled module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum VALIDATING_FILES: :validating_files QUEUED: :queued @@ -198,7 +198,7 @@ module OpenAI type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ } - class Method < OpenAI::BaseModel + class Method < OpenAI::Internal::Type::BaseModel attr_reader dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo? def dpo=: ( @@ -230,7 +230,7 @@ module OpenAI hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } - class Dpo < OpenAI::BaseModel + class Dpo < OpenAI::Internal::Type::BaseModel attr_reader hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters? def hyperparameters=: ( @@ -251,7 +251,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs } - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel attr_reader batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size? def batch_size=: ( @@ -288,7 +288,7 @@ module OpenAI type batch_size = :auto | Integer module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -296,7 +296,7 @@ module OpenAI type beta = :auto | Float module Beta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -304,7 +304,7 @@ module OpenAI type learning_rate_multiplier = :auto | Float module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -312,7 +312,7 @@ module OpenAI type n_epochs = :auto | Integer module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -324,7 +324,7 @@ module OpenAI hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters } - class Supervised < OpenAI::BaseModel + class Supervised < OpenAI::Internal::Type::BaseModel attr_reader hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters? def hyperparameters=: ( @@ -344,7 +344,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs } - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel attr_reader batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size? def batch_size=: ( @@ -374,7 +374,7 @@ module OpenAI type batch_size = :auto | Integer module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -382,7 +382,7 @@ module OpenAI type learning_rate_multiplier = :auto | Float module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -390,7 +390,7 @@ module OpenAI type n_epochs = :auto | Integer module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -400,7 +400,7 @@ module OpenAI type type_ = :supervised | :dpo module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index e9f208ef..dfc567a3 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -14,7 +14,7 @@ module OpenAI type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_ } - class FineTuningJobEvent < OpenAI::BaseModel + class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -50,7 +50,7 @@ module OpenAI type level = :info | :warn | :error module Level - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum INFO: :info WARN: :warn @@ -62,7 +62,7 @@ module OpenAI type type_ = :message | :metrics module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE: :message METRICS: :metrics diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index f5fc5cec..b952d550 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -11,7 +11,7 @@ module OpenAI tags: ::Array[String] } - class FineTuningJobWandbIntegration < OpenAI::BaseModel + class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel attr_accessor project: String attr_accessor entity: String? diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index e156cf05..559090c6 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -9,7 +9,7 @@ module OpenAI wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration } - class FineTuningJobWandbIntegrationObject < OpenAI::BaseModel + class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel attr_accessor type: :wandb attr_accessor wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index 097c08c0..46fadbe0 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -3,7 +3,7 @@ module OpenAI module FineTuning type job_cancel_params = { } & OpenAI::Internal::Type::request_parameters - class JobCancelParams < OpenAI::BaseModel + class JobCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index fd4f66bb..86773182 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -15,7 +15,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class JobCreateParams < OpenAI::BaseModel + class JobCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -68,7 +68,7 @@ module OpenAI | :"gpt-4o-mini" module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, (:"babbage-002" | :"davinci-002" @@ -88,7 +88,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs } - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size? def batch_size=: ( @@ -118,7 +118,7 @@ module OpenAI type batch_size = :auto | Integer module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -126,7 +126,7 @@ module OpenAI type learning_rate_multiplier = :auto | Float module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -134,7 +134,7 @@ module OpenAI type n_epochs = :auto | Integer module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -146,7 +146,7 @@ module OpenAI wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb } - class Integration < OpenAI::BaseModel + class Integration < OpenAI::Internal::Type::BaseModel attr_accessor type: :wandb attr_accessor wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb @@ -166,7 +166,7 @@ module OpenAI tags: ::Array[String] } - class Wandb < OpenAI::BaseModel + class Wandb < OpenAI::Internal::Type::BaseModel attr_accessor project: String attr_accessor entity: String? @@ -195,7 +195,7 @@ module OpenAI type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ } - class Method < OpenAI::BaseModel + class Method < OpenAI::Internal::Type::BaseModel attr_reader dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo? def dpo=: ( @@ -227,7 +227,7 @@ module OpenAI hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters } - class Dpo < OpenAI::BaseModel + class Dpo < OpenAI::Internal::Type::BaseModel attr_reader hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters? def hyperparameters=: ( @@ -248,7 +248,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs } - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size? def batch_size=: ( @@ -285,7 +285,7 @@ module OpenAI type batch_size = :auto | Integer module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -293,7 +293,7 @@ module OpenAI type beta = :auto | Float module Beta - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -301,7 +301,7 @@ module OpenAI type learning_rate_multiplier = :auto | Float module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -309,7 +309,7 @@ module OpenAI type n_epochs = :auto | Integer module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -321,7 +321,7 @@ module OpenAI hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters } - class Supervised < OpenAI::BaseModel + class Supervised < OpenAI::Internal::Type::BaseModel attr_reader hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters? def hyperparameters=: ( @@ -341,7 +341,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs } - class Hyperparameters < OpenAI::BaseModel + class Hyperparameters < OpenAI::Internal::Type::BaseModel attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size? def batch_size=: ( @@ -371,7 +371,7 @@ module OpenAI type batch_size = :auto | Integer module BatchSize - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -379,7 +379,7 @@ module OpenAI type learning_rate_multiplier = :auto | Float module LearningRateMultiplier - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Float] end @@ -387,7 +387,7 @@ module OpenAI type n_epochs = :auto | Integer module NEpochs - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [:auto, Integer] end @@ -397,7 +397,7 @@ module OpenAI type type_ = :supervised | :dpo module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SUPERVISED: :supervised DPO: :dpo diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index 2bd10d63..79968e83 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -5,7 +5,7 @@ module OpenAI { after: String, limit: Integer } & OpenAI::Internal::Type::request_parameters - class JobListEventsParams < OpenAI::BaseModel + class JobListEventsParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index a09f5864..b32ac587 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -5,7 +5,7 @@ module OpenAI { after: String, limit: Integer, metadata: ::Hash[Symbol, String]? } & OpenAI::Internal::Type::request_parameters - class JobListParams < OpenAI::BaseModel + class JobListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index 466850ae..179216b1 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type job_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class JobRetrieveParams < OpenAI::BaseModel + class JobRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index dc5aa5a0..012833d8 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -6,7 +6,7 @@ module OpenAI { after: String, limit: Integer } & OpenAI::Internal::Type::request_parameters - class CheckpointListParams < OpenAI::BaseModel + class CheckpointListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs index 4f7cf1b7..1f11cb97 100644 --- a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs +++ b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs @@ -13,7 +13,7 @@ module OpenAI step_number: Integer } - class FineTuningJobCheckpoint < OpenAI::BaseModel + class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -51,7 +51,7 @@ module OpenAI valid_mean_token_accuracy: Float } - class Metrics < OpenAI::BaseModel + class Metrics < OpenAI::Internal::Type::BaseModel attr_reader full_valid_loss: Float? def full_valid_loss=: (Float) -> Float diff --git a/sig/openai/models/function_definition.rbs b/sig/openai/models/function_definition.rbs index 194ec56f..c8acb986 100644 --- a/sig/openai/models/function_definition.rbs +++ b/sig/openai/models/function_definition.rbs @@ -8,7 +8,7 @@ module OpenAI strict: bool? } - class FunctionDefinition < OpenAI::BaseModel + class FunctionDefinition < OpenAI::Internal::Type::BaseModel attr_accessor name: String attr_reader description: String? diff --git a/sig/openai/models/image.rbs b/sig/openai/models/image.rbs index ed120c3b..f95aa74f 100644 --- a/sig/openai/models/image.rbs +++ b/sig/openai/models/image.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type image = { :b64_json => String, revised_prompt: String, url: String } - class Image < OpenAI::BaseModel + class Image < OpenAI::Internal::Type::BaseModel attr_reader b64_json: String? def b64_json=: (String) -> String diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index ebbc6451..7b3071d3 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ImageCreateVariationParams < OpenAI::BaseModel + class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -44,7 +44,7 @@ module OpenAI type model = String | OpenAI::Models::image_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::image_model] end @@ -52,7 +52,7 @@ module OpenAI type response_format = :url | :b64_json module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum URL: :url B64_JSON: :b64_json @@ -63,7 +63,7 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SIZE_256X256: :"256x256" SIZE_512X512: :"512x512" diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 616d4a05..9cb884a7 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -13,7 +13,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ImageEditParams < OpenAI::BaseModel + class ImageEditParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -54,7 +54,7 @@ module OpenAI type model = String | OpenAI::Models::image_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::image_model] end @@ -62,7 +62,7 @@ module OpenAI type response_format = :url | :b64_json module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum URL: :url B64_JSON: :b64_json @@ -73,7 +73,7 @@ module OpenAI type size = :"256x256" | :"512x512" | :"1024x1024" module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SIZE_256X256: :"256x256" SIZE_512X512: :"512x512" diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index 5af7ffe3..869a9475 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -13,7 +13,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ImageGenerateParams < OpenAI::BaseModel + class ImageGenerateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -56,7 +56,7 @@ module OpenAI type model = String | OpenAI::Models::image_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::image_model] end @@ -64,7 +64,7 @@ module OpenAI type quality = :standard | :hd module Quality - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum STANDARD: :standard HD: :hd @@ -75,7 +75,7 @@ module OpenAI type response_format = :url | :b64_json module ResponseFormat - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum URL: :url B64_JSON: :b64_json @@ -87,7 +87,7 @@ module OpenAI :"256x256" | :"512x512" | :"1024x1024" | :"1792x1024" | :"1024x1792" module Size - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SIZE_256X256: :"256x256" SIZE_512X512: :"512x512" @@ -101,7 +101,7 @@ module OpenAI type style = :vivid | :natural module Style - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum VIVID: :vivid NATURAL: :natural diff --git a/sig/openai/models/image_model.rbs b/sig/openai/models/image_model.rbs index 97e2e64f..31f6927a 100644 --- a/sig/openai/models/image_model.rbs +++ b/sig/openai/models/image_model.rbs @@ -3,7 +3,7 @@ module OpenAI type image_model = :"dall-e-2" | :"dall-e-3" module ImageModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum DALL_E_2: :"dall-e-2" DALL_E_3: :"dall-e-3" diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index cafb98c6..e5608584 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -3,7 +3,7 @@ module OpenAI type images_response = { created: Integer, data: ::Array[OpenAI::Models::Image] } - class ImagesResponse < OpenAI::BaseModel + class ImagesResponse < OpenAI::Internal::Type::BaseModel attr_accessor created: Integer attr_accessor data: ::Array[OpenAI::Models::Image] diff --git a/sig/openai/models/model.rbs b/sig/openai/models/model.rbs index 3540d0d1..88000ee1 100644 --- a/sig/openai/models/model.rbs +++ b/sig/openai/models/model.rbs @@ -3,7 +3,7 @@ module OpenAI type model = { id: String, created: Integer, object: :model, owned_by: String } - class Model < OpenAI::BaseModel + class Model < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created: Integer diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index 2f5cfd40..ab28b6e7 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type model_delete_params = { } & OpenAI::Internal::Type::request_parameters - class ModelDeleteParams < OpenAI::BaseModel + class ModelDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/model_deleted.rbs b/sig/openai/models/model_deleted.rbs index 6297c483..f1c1b5f9 100644 --- a/sig/openai/models/model_deleted.rbs +++ b/sig/openai/models/model_deleted.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type model_deleted = { id: String, deleted: bool, object: String } - class ModelDeleted < OpenAI::BaseModel + class ModelDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 4511e755..ffbe02ed 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type model_list_params = { } & OpenAI::Internal::Type::request_parameters - class ModelListParams < OpenAI::BaseModel + class ModelListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index dfcd6daa..8cf3afab 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type model_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class ModelRetrieveParams < OpenAI::BaseModel + class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index 9967761f..2a38be49 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -8,7 +8,7 @@ module OpenAI flagged: bool } - class Moderation < OpenAI::BaseModel + class Moderation < OpenAI::Internal::Type::BaseModel attr_accessor categories: OpenAI::Models::Moderation::Categories attr_accessor category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes @@ -43,7 +43,7 @@ module OpenAI violence_graphic: bool } - class Categories < OpenAI::BaseModel + class Categories < OpenAI::Internal::Type::BaseModel attr_accessor harassment: bool attr_accessor harassment_threatening: bool @@ -106,7 +106,7 @@ module OpenAI violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] } - class CategoryAppliedInputTypes < OpenAI::BaseModel + class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel attr_accessor harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] attr_accessor harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] @@ -154,7 +154,7 @@ module OpenAI type harassment = :text module Harassment - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -164,7 +164,7 @@ module OpenAI type harassment_threatening = :text module HarassmentThreatening - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -174,7 +174,7 @@ module OpenAI type hate = :text module Hate - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -184,7 +184,7 @@ module OpenAI type hate_threatening = :text module HateThreatening - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -194,7 +194,7 @@ module OpenAI type illicit = :text module Illicit - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -204,7 +204,7 @@ module OpenAI type illicit_violent = :text module IllicitViolent - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -214,7 +214,7 @@ module OpenAI type self_harm = :text | :image module SelfHarm - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text IMAGE: :image @@ -225,7 +225,7 @@ module OpenAI type self_harm_instruction = :text | :image module SelfHarmInstruction - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text IMAGE: :image @@ -236,7 +236,7 @@ module OpenAI type self_harm_intent = :text | :image module SelfHarmIntent - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text IMAGE: :image @@ -247,7 +247,7 @@ module OpenAI type sexual = :text | :image module Sexual - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text IMAGE: :image @@ -258,7 +258,7 @@ module OpenAI type sexual_minor = :text module SexualMinor - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text @@ -268,7 +268,7 @@ module OpenAI type violence = :text | :image module Violence - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text IMAGE: :image @@ -279,7 +279,7 @@ module OpenAI type violence_graphic = :text | :image module ViolenceGraphic - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text IMAGE: :image @@ -305,7 +305,7 @@ module OpenAI violence_graphic: Float } - class CategoryScores < OpenAI::BaseModel + class CategoryScores < OpenAI::Internal::Type::BaseModel attr_accessor harassment: Float attr_accessor harassment_threatening: Float diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index 833c5ff4..e8c1e576 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -7,7 +7,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ModerationCreateParams < OpenAI::BaseModel + class ModerationCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -33,7 +33,7 @@ module OpenAI | ::Array[OpenAI::Models::moderation_multi_modal_input] module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] @@ -50,7 +50,7 @@ module OpenAI type model = String | OpenAI::Models::moderation_model module Model - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::moderation_model] end diff --git a/sig/openai/models/moderation_create_response.rbs b/sig/openai/models/moderation_create_response.rbs index 5934337f..d6432898 100644 --- a/sig/openai/models/moderation_create_response.rbs +++ b/sig/openai/models/moderation_create_response.rbs @@ -7,7 +7,7 @@ module OpenAI results: ::Array[OpenAI::Models::Moderation] } - class ModerationCreateResponse < OpenAI::BaseModel + class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor model: String diff --git a/sig/openai/models/moderation_image_url_input.rbs b/sig/openai/models/moderation_image_url_input.rbs index e23f649d..4aa41a71 100644 --- a/sig/openai/models/moderation_image_url_input.rbs +++ b/sig/openai/models/moderation_image_url_input.rbs @@ -6,7 +6,7 @@ module OpenAI type: :image_url } - class ModerationImageURLInput < OpenAI::BaseModel + class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel attr_accessor image_url: OpenAI::Models::ModerationImageURLInput::ImageURL attr_accessor type: :image_url @@ -20,7 +20,7 @@ module OpenAI type image_url = { url: String } - class ImageURL < OpenAI::BaseModel + class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String def initialize: (url: String) -> void diff --git a/sig/openai/models/moderation_model.rbs b/sig/openai/models/moderation_model.rbs index 1f6ab3fd..228342be 100644 --- a/sig/openai/models/moderation_model.rbs +++ b/sig/openai/models/moderation_model.rbs @@ -7,7 +7,7 @@ module OpenAI | :"text-moderation-stable" module ModerationModel - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum OMNI_MODERATION_LATEST: :"omni-moderation-latest" OMNI_MODERATION_2024_09_26: :"omni-moderation-2024-09-26" diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index 967234fc..61d672c1 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -5,7 +5,7 @@ module OpenAI | OpenAI::Models::ModerationTextInput module ModerationMultiModalInput - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] end diff --git a/sig/openai/models/moderation_text_input.rbs b/sig/openai/models/moderation_text_input.rbs index 45143b65..c6d70b96 100644 --- a/sig/openai/models/moderation_text_input.rbs +++ b/sig/openai/models/moderation_text_input.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type moderation_text_input = { text: String, type: :text } - class ModerationTextInput < OpenAI::BaseModel + class ModerationTextInput < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :text diff --git a/sig/openai/models/other_file_chunking_strategy_object.rbs b/sig/openai/models/other_file_chunking_strategy_object.rbs index 60c24b65..01290f70 100644 --- a/sig/openai/models/other_file_chunking_strategy_object.rbs +++ b/sig/openai/models/other_file_chunking_strategy_object.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type other_file_chunking_strategy_object = { type: :other } - class OtherFileChunkingStrategyObject < OpenAI::BaseModel + class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel attr_accessor type: :other def initialize: (?type: :other) -> void diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index d4060722..c299e779 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -6,7 +6,7 @@ module OpenAI generate_summary: OpenAI::Models::Reasoning::generate_summary? } - class Reasoning < OpenAI::BaseModel + class Reasoning < OpenAI::Internal::Type::BaseModel attr_accessor effort: OpenAI::Models::reasoning_effort? attr_accessor generate_summary: OpenAI::Models::Reasoning::generate_summary? @@ -21,7 +21,7 @@ module OpenAI type generate_summary = :concise | :detailed module GenerateSummary - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum CONCISE: :concise DETAILED: :detailed diff --git a/sig/openai/models/reasoning_effort.rbs b/sig/openai/models/reasoning_effort.rbs index 7ee7ea11..d449d634 100644 --- a/sig/openai/models/reasoning_effort.rbs +++ b/sig/openai/models/reasoning_effort.rbs @@ -3,7 +3,7 @@ module OpenAI type reasoning_effort = :low | :medium | :high module ReasoningEffort - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOW: :low MEDIUM: :medium diff --git a/sig/openai/models/response_format_json_object.rbs b/sig/openai/models/response_format_json_object.rbs index f228d234..09c67374 100644 --- a/sig/openai/models/response_format_json_object.rbs +++ b/sig/openai/models/response_format_json_object.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type response_format_json_object = { type: :json_object } - class ResponseFormatJSONObject < OpenAI::BaseModel + class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel attr_accessor type: :json_object def initialize: (?type: :json_object) -> void diff --git a/sig/openai/models/response_format_json_schema.rbs b/sig/openai/models/response_format_json_schema.rbs index 05e4a025..4d9f4d87 100644 --- a/sig/openai/models/response_format_json_schema.rbs +++ b/sig/openai/models/response_format_json_schema.rbs @@ -6,7 +6,7 @@ module OpenAI type: :json_schema } - class ResponseFormatJSONSchema < OpenAI::BaseModel + class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel attr_accessor json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema attr_accessor type: :json_schema @@ -26,7 +26,7 @@ module OpenAI strict: bool? } - class JSONSchema < OpenAI::BaseModel + class JSONSchema < OpenAI::Internal::Type::BaseModel attr_accessor name: String attr_reader description: String? diff --git a/sig/openai/models/response_format_text.rbs b/sig/openai/models/response_format_text.rbs index 691bace2..7b892f07 100644 --- a/sig/openai/models/response_format_text.rbs +++ b/sig/openai/models/response_format_text.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type response_format_text = { type: :text } - class ResponseFormatText < OpenAI::BaseModel + class ResponseFormatText < OpenAI::Internal::Type::BaseModel attr_accessor type: :text def initialize: (?type: :text) -> void diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 0b984fde..f30d8909 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -9,7 +9,7 @@ module OpenAI type: :computer_use_preview } - class ComputerTool < OpenAI::BaseModel + class ComputerTool < OpenAI::Internal::Type::BaseModel attr_accessor display_height: Float attr_accessor display_width: Float @@ -30,7 +30,7 @@ module OpenAI type environment = :mac | :windows | :ubuntu | :browser module Environment - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MAC: :mac WINDOWS: :windows diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 04936217..1120ea2c 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -8,7 +8,7 @@ module OpenAI type: OpenAI::Models::Responses::EasyInputMessage::type_ } - class EasyInputMessage < OpenAI::BaseModel + class EasyInputMessage < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Responses::EasyInputMessage::content attr_accessor role: OpenAI::Models::Responses::EasyInputMessage::role @@ -32,7 +32,7 @@ module OpenAI | OpenAI::Models::Responses::response_input_message_content_list module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] end @@ -40,7 +40,7 @@ module OpenAI type role = :user | :assistant | :system | :developer module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user ASSISTANT: :assistant @@ -53,7 +53,7 @@ module OpenAI type type_ = :message module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE: :message diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 69476cbd..6c174053 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -10,7 +10,7 @@ module OpenAI ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions } - class FileSearchTool < OpenAI::BaseModel + class FileSearchTool < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search attr_accessor vector_store_ids: ::Array[String] @@ -45,7 +45,7 @@ module OpenAI OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter module Filters - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -56,7 +56,7 @@ module OpenAI score_threshold: Float } - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel attr_reader ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker? def ranker=: ( @@ -77,7 +77,7 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index b0e5aa03..f69630ac 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -10,7 +10,7 @@ module OpenAI description: String? } - class FunctionTool < OpenAI::BaseModel + class FunctionTool < OpenAI::Internal::Type::BaseModel attr_accessor name: String attr_accessor parameters: ::Hash[Symbol, top] diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 18327460..3e047f28 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class InputItemListParams < OpenAI::BaseModel + class InputItemListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -53,7 +53,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 254aa7e1..86acfc4c 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -27,7 +27,7 @@ module OpenAI user: String } - class Response < OpenAI::BaseModel + class Response < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Float @@ -118,7 +118,7 @@ module OpenAI reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason } - class IncompleteDetails < OpenAI::BaseModel + class IncompleteDetails < OpenAI::Internal::Type::BaseModel attr_reader reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason? def reason=: ( @@ -134,7 +134,7 @@ module OpenAI type reason = :max_output_tokens | :content_filter module Reason - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter @@ -149,7 +149,7 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceFunction module ToolChoice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end @@ -157,7 +157,7 @@ module OpenAI type truncation = :auto | :disabled module Truncation - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index f6a9c6fd..7a218696 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -4,7 +4,7 @@ module OpenAI type response_audio_delta_event = { delta: String, type: :"response.audio.delta" } - class ResponseAudioDeltaEvent < OpenAI::BaseModel + class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String attr_accessor type: :"response.audio.delta" diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index 9449f329..cbff6d81 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_audio_done_event = { type: :"response.audio.done" } - class ResponseAudioDoneEvent < OpenAI::BaseModel + class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor type: :"response.audio.done" def initialize: (?type: :"response.audio.done") -> void diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index b1e7a534..6f8fc8cb 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -4,7 +4,7 @@ module OpenAI type response_audio_transcript_delta_event = { delta: String, type: :"response.audio.transcript.delta" } - class ResponseAudioTranscriptDeltaEvent < OpenAI::BaseModel + class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String attr_accessor type: :"response.audio.transcript.delta" diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index ca5e5241..9d7072d8 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -4,7 +4,7 @@ module OpenAI type response_audio_transcript_done_event = { type: :"response.audio.transcript.done" } - class ResponseAudioTranscriptDoneEvent < OpenAI::BaseModel + class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor type: :"response.audio.transcript.done" def initialize: (?type: :"response.audio.transcript.done") -> void diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index a330e86c..23837b29 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.code_interpreter_call.code.delta" } - class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index 45e0a1fd..d1358071 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.code_interpreter_call.code.done" } - class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor code: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index 46aae17c..840dabb1 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.code_interpreter_call.completed" } - class ResponseCodeInterpreterCallCompletedEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel attr_accessor code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index a5384a8c..f19814d0 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.code_interpreter_call.in_progress" } - class ResponseCodeInterpreterCallInProgressEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel attr_accessor code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index 3fcff8bf..fdaebc1e 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.code_interpreter_call.interpreting" } - class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::BaseModel + class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel attr_accessor code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index 0a64a853..b6aa4db6 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -10,7 +10,7 @@ module OpenAI type: :code_interpreter_call } - class ResponseCodeInterpreterToolCall < OpenAI::BaseModel + class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor code: String @@ -36,11 +36,11 @@ module OpenAI | OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files module Result - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type logs = { logs: String, type: :logs } - class Logs < OpenAI::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel attr_accessor logs: String attr_accessor type: :logs @@ -56,7 +56,7 @@ module OpenAI type: :files } - class Files < OpenAI::BaseModel + class Files < OpenAI::Internal::Type::BaseModel attr_accessor files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] attr_accessor type: :files @@ -70,7 +70,7 @@ module OpenAI type file = { file_id: String, mime_type: String } - class File < OpenAI::BaseModel + class File < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_accessor mime_type: String @@ -87,7 +87,7 @@ module OpenAI type status = :in_progress | :interpreting | :completed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress INTERPRETING: :interpreting diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 372c8eb9..5ef101ac 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -7,7 +7,7 @@ module OpenAI type: :"response.completed" } - class ResponseCompletedEvent < OpenAI::BaseModel + class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Models::Responses::Response attr_accessor type: :"response.completed" diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 1cf4a066..ab81a03e 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -11,7 +11,7 @@ module OpenAI type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ } - class ResponseComputerToolCall < OpenAI::BaseModel + class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor action: OpenAI::Models::Responses::ResponseComputerToolCall::action @@ -47,7 +47,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait module Action - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type click = { @@ -57,7 +57,7 @@ module OpenAI y_: Integer } - class Click < OpenAI::BaseModel + class Click < OpenAI::Internal::Type::BaseModel attr_accessor button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button attr_accessor type: :click @@ -78,7 +78,7 @@ module OpenAI type button = :left | :right | :wheel | :back | :forward module Button - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LEFT: :left RIGHT: :right @@ -92,7 +92,7 @@ module OpenAI type double_click = { type: :double_click, x: Integer, y_: Integer } - class DoubleClick < OpenAI::BaseModel + class DoubleClick < OpenAI::Internal::Type::BaseModel attr_accessor type: :double_click attr_accessor x: Integer @@ -114,7 +114,7 @@ module OpenAI type: :drag } - class Drag < OpenAI::BaseModel + class Drag < OpenAI::Internal::Type::BaseModel attr_accessor path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path] attr_accessor type: :drag @@ -128,7 +128,7 @@ module OpenAI type path = { x: Integer, y_: Integer } - class Path < OpenAI::BaseModel + class Path < OpenAI::Internal::Type::BaseModel attr_accessor x: Integer attr_accessor y_: Integer @@ -141,7 +141,7 @@ module OpenAI type keypress = { keys: ::Array[String], type: :keypress } - class Keypress < OpenAI::BaseModel + class Keypress < OpenAI::Internal::Type::BaseModel attr_accessor keys: ::Array[String] attr_accessor type: :keypress @@ -153,7 +153,7 @@ module OpenAI type move = { type: :move, x: Integer, y_: Integer } - class Move < OpenAI::BaseModel + class Move < OpenAI::Internal::Type::BaseModel attr_accessor type: :move attr_accessor x: Integer @@ -167,7 +167,7 @@ module OpenAI type screenshot = { type: :screenshot } - class Screenshot < OpenAI::BaseModel + class Screenshot < OpenAI::Internal::Type::BaseModel attr_accessor type: :screenshot def initialize: (?type: :screenshot) -> void @@ -184,7 +184,7 @@ module OpenAI y_: Integer } - class Scroll < OpenAI::BaseModel + class Scroll < OpenAI::Internal::Type::BaseModel attr_accessor scroll_x: Integer attr_accessor scroll_y: Integer @@ -208,7 +208,7 @@ module OpenAI type type_ = { text: String, type: :type } - class Type < OpenAI::BaseModel + class Type < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :type @@ -220,7 +220,7 @@ module OpenAI type wait = { type: :wait } - class Wait < OpenAI::BaseModel + class Wait < OpenAI::Internal::Type::BaseModel attr_accessor type: :wait def initialize: (?type: :wait) -> void @@ -234,7 +234,7 @@ module OpenAI type pending_safety_check = { id: String, code: String, message: String } - class PendingSafetyCheck < OpenAI::BaseModel + class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor code: String @@ -249,7 +249,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -261,7 +261,7 @@ module OpenAI type type_ = :computer_call module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum COMPUTER_CALL: :computer_call diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 793ed85e..32edd0b4 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -11,7 +11,7 @@ module OpenAI status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status } - class ResponseComputerToolCallOutputItem < OpenAI::BaseModel + class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor call_id: String @@ -46,7 +46,7 @@ module OpenAI type acknowledged_safety_check = { id: String, code: String, message: String } - class AcknowledgedSafetyCheck < OpenAI::BaseModel + class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor code: String @@ -61,7 +61,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs index 3d8cc77b..eae3e223 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -4,7 +4,7 @@ module OpenAI type response_computer_tool_call_output_screenshot = { type: :computer_screenshot, file_id: String, image_url: String } - class ResponseComputerToolCallOutputScreenshot < OpenAI::BaseModel + class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel attr_accessor type: :computer_screenshot attr_reader file_id: String? diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index d8469d84..4e48827c 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -9,7 +9,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal module ResponseContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index f4ca687c..e4c3e4bf 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -10,7 +10,7 @@ module OpenAI type: :"response.content_part.added" } - class ResponseContentPartAddedEvent < OpenAI::BaseModel + class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel attr_accessor content_index: Integer attr_accessor item_id: String @@ -36,7 +36,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal module Part - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 4ffbc22b..ff72b2b3 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -10,7 +10,7 @@ module OpenAI type: :"response.content_part.done" } - class ResponseContentPartDoneEvent < OpenAI::BaseModel + class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor content_index: Integer attr_accessor item_id: String @@ -36,7 +36,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal module Part - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index d4c12563..5e04dd5a 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -23,7 +23,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class ResponseCreateParams < OpenAI::BaseModel + class ResponseCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -101,7 +101,7 @@ module OpenAI type input = String | OpenAI::Models::Responses::response_input module Input - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::Responses::response_input] end @@ -112,7 +112,7 @@ module OpenAI | OpenAI::Models::Responses::ToolChoiceFunction module ToolChoice - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end @@ -120,7 +120,7 @@ module OpenAI type truncation = :auto | :disabled module Truncation - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DISABLED: :disabled diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index bd5a386a..6848ecf7 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -7,7 +7,7 @@ module OpenAI type: :"response.created" } - class ResponseCreatedEvent < OpenAI::BaseModel + class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Models::Responses::Response attr_accessor type: :"response.created" diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index ff2fd40f..1cedd65a 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type response_delete_params = { } & OpenAI::Internal::Type::request_parameters - class ResponseDeleteParams < OpenAI::BaseModel + class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index 4187ffad..f9721f01 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -7,7 +7,7 @@ module OpenAI message: String } - class ResponseError < OpenAI::BaseModel + class ResponseError < OpenAI::Internal::Type::BaseModel attr_accessor code: OpenAI::Models::Responses::ResponseError::code attr_accessor message: String @@ -40,7 +40,7 @@ module OpenAI | :image_file_not_found module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index 2c037b8a..b64588ae 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -4,7 +4,7 @@ module OpenAI type response_error_event = { code: String?, message: String, param: String?, type: :error } - class ResponseErrorEvent < OpenAI::BaseModel + class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel attr_accessor code: String? attr_accessor message: String diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index 5747ae10..f6f26208 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -7,7 +7,7 @@ module OpenAI type: :"response.failed" } - class ResponseFailedEvent < OpenAI::BaseModel + class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Models::Responses::Response attr_accessor type: :"response.failed" diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index 069463fe..8249564b 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.file_search_call.completed" } - class ResponseFileSearchCallCompletedEvent < OpenAI::BaseModel + class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel attr_accessor item_id: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index e8163f14..5008567b 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.file_search_call.in_progress" } - class ResponseFileSearchCallInProgressEvent < OpenAI::BaseModel + class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel attr_accessor item_id: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index 1745ca78..ba475249 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.file_search_call.searching" } - class ResponseFileSearchCallSearchingEvent < OpenAI::BaseModel + class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel attr_accessor item_id: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index c42ff6ab..6e6bfb5c 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -10,7 +10,7 @@ module OpenAI results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]? } - class ResponseFileSearchToolCall < OpenAI::BaseModel + class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor queries: ::Array[String] @@ -35,7 +35,7 @@ module OpenAI :in_progress | :searching | :completed | :incomplete | :failed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress SEARCHING: :searching @@ -55,7 +55,7 @@ module OpenAI text: String } - class Result < OpenAI::BaseModel + class Result < OpenAI::Internal::Type::BaseModel attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]? attr_reader file_id: String? @@ -87,7 +87,7 @@ module OpenAI type attribute = String | Float | bool module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index a78a0964..11e70474 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::ResponseFormatJSONObject module ResponseFormatTextConfig - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index 899ff7bb..f6a08df7 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -10,7 +10,7 @@ module OpenAI strict: bool? } - class ResponseFormatTextJSONSchemaConfig < OpenAI::BaseModel + class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel attr_accessor name: String attr_accessor schema: ::Hash[Symbol, top] diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index afcf88ef..19067caf 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -9,7 +9,7 @@ module OpenAI type: :"response.function_call_arguments.delta" } - class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::BaseModel + class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String attr_accessor item_id: String diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index 5a0dc613..9a6c9073 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -9,7 +9,7 @@ module OpenAI type: :"response.function_call_arguments.done" } - class ResponseFunctionCallArgumentsDoneEvent < OpenAI::BaseModel + class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor item_id: String diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index af876b35..ec454c7d 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -11,7 +11,7 @@ module OpenAI status: OpenAI::Models::Responses::ResponseFunctionToolCall::status } - class ResponseFunctionToolCall < OpenAI::BaseModel + class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel attr_accessor arguments: String attr_accessor call_id: String @@ -44,7 +44,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 8feafadc..7fd2c526 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -10,7 +10,7 @@ module OpenAI status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status } - class ResponseFunctionToolCallOutputItem < OpenAI::BaseModel + class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor call_id: String @@ -38,7 +38,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 4b1b18f8..3a2241fe 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -8,7 +8,7 @@ module OpenAI type: :web_search_call } - class ResponseFunctionWebSearch < OpenAI::BaseModel + class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status @@ -26,7 +26,7 @@ module OpenAI type status = :in_progress | :searching | :completed | :failed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress SEARCHING: :searching diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index 35845e80..fb29e454 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -7,7 +7,7 @@ module OpenAI type: :"response.in_progress" } - class ResponseInProgressEvent < OpenAI::BaseModel + class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Models::Responses::Response attr_accessor type: :"response.in_progress" diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index 17f0b9d0..0634f434 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -7,7 +7,7 @@ module OpenAI | :"computer_call_output.output.image_url" module ResponseIncludable - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FILE_SEARCH_CALL_RESULTS: :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index 24d172fe..8e4251ac 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -7,7 +7,7 @@ module OpenAI type: :"response.incomplete" } - class ResponseIncompleteEvent < OpenAI::BaseModel + class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Models::Responses::Response attr_accessor type: :"response.incomplete" diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index cebca75a..605fd682 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -8,7 +8,7 @@ module OpenAI type: :input_audio } - class ResponseInputAudio < OpenAI::BaseModel + class ResponseInputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String attr_accessor format_: OpenAI::Models::Responses::ResponseInputAudio::format_ @@ -26,7 +26,7 @@ module OpenAI type format_ = :mp3 | :wav module Format - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MP3: :mp3 WAV: :wav diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index b94f1f20..04d4f7b9 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -7,7 +7,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseInputFile module ResponseInputContent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] end diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index 4c336cd6..75f0306c 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -9,7 +9,7 @@ module OpenAI filename: String } - class ResponseInputFile < OpenAI::BaseModel + class ResponseInputFile < OpenAI::Internal::Type::BaseModel attr_accessor type: :input_file attr_reader file_data: String? diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index 18b3631c..f6e47720 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -9,7 +9,7 @@ module OpenAI image_url: String? } - class ResponseInputImage < OpenAI::BaseModel + class ResponseInputImage < OpenAI::Internal::Type::BaseModel attr_accessor detail: OpenAI::Models::Responses::ResponseInputImage::detail attr_accessor type: :input_image @@ -30,7 +30,7 @@ module OpenAI type detail = :high | :low | :auto module Detail - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum HIGH: :high LOW: :low diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index ec441979..35ad1d08 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -15,7 +15,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseInputItem::ItemReference module ResponseInputItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type message = { @@ -25,7 +25,7 @@ module OpenAI type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ } - class Message < OpenAI::BaseModel + class Message < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list attr_accessor role: OpenAI::Models::Responses::ResponseInputItem::Message::role @@ -54,7 +54,7 @@ module OpenAI type role = :user | :system | :developer module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user SYSTEM: :system @@ -66,7 +66,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -78,7 +78,7 @@ module OpenAI type type_ = :message module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE: :message @@ -96,7 +96,7 @@ module OpenAI status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status } - class ComputerCallOutput < OpenAI::BaseModel + class ComputerCallOutput < OpenAI::Internal::Type::BaseModel attr_accessor call_id: String attr_accessor output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot @@ -133,7 +133,7 @@ module OpenAI type acknowledged_safety_check = { id: String, code: String, message: String } - class AcknowledgedSafetyCheck < OpenAI::BaseModel + class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor code: String @@ -148,7 +148,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -167,7 +167,7 @@ module OpenAI status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status } - class FunctionCallOutput < OpenAI::BaseModel + class FunctionCallOutput < OpenAI::Internal::Type::BaseModel attr_accessor call_id: String attr_accessor output: String @@ -197,7 +197,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -209,7 +209,7 @@ module OpenAI type item_reference = { id: String, type: :item_reference } - class ItemReference < OpenAI::BaseModel + class ItemReference < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor type: :item_reference diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index e0a6c449..96c1aa3d 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -10,7 +10,7 @@ module OpenAI type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ } - class ResponseInputMessageItem < OpenAI::BaseModel + class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list @@ -42,7 +42,7 @@ module OpenAI type role = :user | :system | :developer module Role - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum USER: :user SYSTEM: :system @@ -54,7 +54,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -66,7 +66,7 @@ module OpenAI type type_ = :message module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum MESSAGE: :message diff --git a/sig/openai/models/responses/response_input_text.rbs b/sig/openai/models/responses/response_input_text.rbs index c41c7537..bb0b03f9 100644 --- a/sig/openai/models/responses/response_input_text.rbs +++ b/sig/openai/models/responses/response_input_text.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_input_text = { text: String, type: :input_text } - class ResponseInputText < OpenAI::BaseModel + class ResponseInputText < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :input_text diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index 59b6a1f5..a84d2504 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -12,7 +12,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem module ResponseItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index dbed0410..196112a5 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -12,7 +12,7 @@ module OpenAI object: :list } - class ResponseItemList < OpenAI::BaseModel + class ResponseItemList < OpenAI::Internal::Type::BaseModel attr_accessor data: ::Array[OpenAI::Models::Responses::response_item] attr_accessor first_id: String diff --git a/sig/openai/models/responses/response_output_audio.rbs b/sig/openai/models/responses/response_output_audio.rbs index 1f90e75d..23c68656 100644 --- a/sig/openai/models/responses/response_output_audio.rbs +++ b/sig/openai/models/responses/response_output_audio.rbs @@ -4,7 +4,7 @@ module OpenAI type response_output_audio = { data: String, transcript: String, type: :output_audio } - class ResponseOutputAudio < OpenAI::BaseModel + class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String attr_accessor transcript: String diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index fb0c190a..f3a5b7b5 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -10,7 +10,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseReasoningItem module ResponseOutputItem - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] end diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index e59d12f3..bdbcfb2e 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.output_item.added" } - class ResponseOutputItemAddedEvent < OpenAI::BaseModel + class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel attr_accessor item: OpenAI::Models::Responses::response_output_item attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index c4df8604..076a3f09 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.output_item.done" } - class ResponseOutputItemDoneEvent < OpenAI::BaseModel + class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor item: OpenAI::Models::Responses::response_output_item attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index dd1c2a28..8662cc68 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -10,7 +10,7 @@ module OpenAI type: :message } - class ResponseOutputMessage < OpenAI::BaseModel + class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content] @@ -36,7 +36,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputRefusal module Content - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] end @@ -44,7 +44,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/responses/response_output_refusal.rbs b/sig/openai/models/responses/response_output_refusal.rbs index f1743e42..69e08eed 100644 --- a/sig/openai/models/responses/response_output_refusal.rbs +++ b/sig/openai/models/responses/response_output_refusal.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_output_refusal = { refusal: String, type: :refusal } - class ResponseOutputRefusal < OpenAI::BaseModel + class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel attr_accessor refusal: String attr_accessor type: :refusal diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index 51f31ba9..e3814bc4 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -8,7 +8,7 @@ module OpenAI type: :output_text } - class ResponseOutputText < OpenAI::BaseModel + class ResponseOutputText < OpenAI::Internal::Type::BaseModel attr_accessor annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] attr_accessor text: String @@ -29,12 +29,12 @@ module OpenAI | OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file_citation = { file_id: String, index: Integer, type: :file_citation } - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_accessor index: Integer @@ -59,7 +59,7 @@ module OpenAI url: String } - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer attr_accessor start_index: Integer @@ -83,7 +83,7 @@ module OpenAI type file_path = { file_id: String, index: Integer, type: :file_path } - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_accessor index: Integer diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 9c17d2fd..e83fbe06 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -9,7 +9,7 @@ module OpenAI status: OpenAI::Models::Responses::ResponseReasoningItem::status } - class ResponseReasoningItem < OpenAI::BaseModel + class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary] @@ -33,7 +33,7 @@ module OpenAI type summary = { text: String, type: :summary_text } - class Summary < OpenAI::BaseModel + class Summary < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: :summary_text @@ -46,7 +46,7 @@ module OpenAI type status = :in_progress | :completed | :incomplete module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index 6d26e2d3..0ab5e996 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -10,7 +10,7 @@ module OpenAI type: :"response.refusal.delta" } - class ResponseRefusalDeltaEvent < OpenAI::BaseModel + class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor content_index: Integer attr_accessor delta: String diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 693e0216..761e42a0 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -10,7 +10,7 @@ module OpenAI type: :"response.refusal.done" } - class ResponseRefusalDoneEvent < OpenAI::BaseModel + class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor content_index: Integer attr_accessor item_id: String diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index d91ca0ed..9a99b74c 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -5,7 +5,7 @@ module OpenAI { include: ::Array[OpenAI::Models::Responses::response_includable] } & OpenAI::Internal::Type::request_parameters - class ResponseRetrieveParams < OpenAI::BaseModel + class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index cc7c73f5..d78fb3a2 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -4,7 +4,7 @@ module OpenAI type response_status = :completed | :failed | :in_progress | :incomplete module ResponseStatus - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum COMPLETED: :completed FAILED: :failed diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index a73d12f4..18708efb 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -36,7 +36,7 @@ module OpenAI | OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent module ResponseStreamEvent - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 71096d81..54d32aa3 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -11,7 +11,7 @@ module OpenAI type: :"response.output_text.annotation.added" } - class ResponseTextAnnotationDeltaEvent < OpenAI::BaseModel + class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation attr_accessor annotation_index: Integer @@ -41,12 +41,12 @@ module OpenAI | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath module Annotation - extend OpenAI::Union + extend OpenAI::Internal::Type::Union type file_citation = { file_id: String, index: Integer, type: :file_citation } - class FileCitation < OpenAI::BaseModel + class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_accessor index: Integer @@ -71,7 +71,7 @@ module OpenAI url: String } - class URLCitation < OpenAI::BaseModel + class URLCitation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer attr_accessor start_index: Integer @@ -95,7 +95,7 @@ module OpenAI type file_path = { file_id: String, index: Integer, type: :file_path } - class FilePath < OpenAI::BaseModel + class FilePath < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String attr_accessor index: Integer diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index 2429e169..30ff82f7 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -4,7 +4,7 @@ module OpenAI type response_text_config = { format_: OpenAI::Models::Responses::response_format_text_config } - class ResponseTextConfig < OpenAI::BaseModel + class ResponseTextConfig < OpenAI::Internal::Type::BaseModel attr_reader format_: OpenAI::Models::Responses::response_format_text_config? def format_=: ( diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index 45cfac57..2c36a1bf 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -10,7 +10,7 @@ module OpenAI type: :"response.output_text.delta" } - class ResponseTextDeltaEvent < OpenAI::BaseModel + class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor content_index: Integer attr_accessor delta: String diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index bc4c94d4..86dc66db 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -10,7 +10,7 @@ module OpenAI type: :"response.output_text.done" } - class ResponseTextDoneEvent < OpenAI::BaseModel + class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor content_index: Integer attr_accessor item_id: String diff --git a/sig/openai/models/responses/response_usage.rbs b/sig/openai/models/responses/response_usage.rbs index 8f4d0dac..28b094d0 100644 --- a/sig/openai/models/responses/response_usage.rbs +++ b/sig/openai/models/responses/response_usage.rbs @@ -10,7 +10,7 @@ module OpenAI total_tokens: Integer } - class ResponseUsage < OpenAI::BaseModel + class ResponseUsage < OpenAI::Internal::Type::BaseModel attr_accessor input_tokens: Integer attr_accessor input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails @@ -33,7 +33,7 @@ module OpenAI type input_tokens_details = { cached_tokens: Integer } - class InputTokensDetails < OpenAI::BaseModel + class InputTokensDetails < OpenAI::Internal::Type::BaseModel attr_accessor cached_tokens: Integer def initialize: (cached_tokens: Integer) -> void @@ -43,7 +43,7 @@ module OpenAI type output_tokens_details = { reasoning_tokens: Integer } - class OutputTokensDetails < OpenAI::BaseModel + class OutputTokensDetails < OpenAI::Internal::Type::BaseModel attr_accessor reasoning_tokens: Integer def initialize: (reasoning_tokens: Integer) -> void diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index bc8d0e77..d5772178 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.web_search_call.completed" } - class ResponseWebSearchCallCompletedEvent < OpenAI::BaseModel + class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel attr_accessor item_id: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index 34cdc0c4..56dfa277 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.web_search_call.in_progress" } - class ResponseWebSearchCallInProgressEvent < OpenAI::BaseModel + class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel attr_accessor item_id: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index f03cf1a9..3a70544f 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -8,7 +8,7 @@ module OpenAI type: :"response.web_search_call.searching" } - class ResponseWebSearchCallSearchingEvent < OpenAI::BaseModel + class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel attr_accessor item_id: String attr_accessor output_index: Integer diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index c3b27fe5..0d3a572d 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -8,7 +8,7 @@ module OpenAI | OpenAI::Models::Responses::WebSearchTool module Tool - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] end diff --git a/sig/openai/models/responses/tool_choice_function.rbs b/sig/openai/models/responses/tool_choice_function.rbs index bf83f1a7..a1881b61 100644 --- a/sig/openai/models/responses/tool_choice_function.rbs +++ b/sig/openai/models/responses/tool_choice_function.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type tool_choice_function = { name: String, type: :function } - class ToolChoiceFunction < OpenAI::BaseModel + class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel attr_accessor name: String attr_accessor type: :function diff --git a/sig/openai/models/responses/tool_choice_options.rbs b/sig/openai/models/responses/tool_choice_options.rbs index 44974540..f07ff227 100644 --- a/sig/openai/models/responses/tool_choice_options.rbs +++ b/sig/openai/models/responses/tool_choice_options.rbs @@ -4,7 +4,7 @@ module OpenAI type tool_choice_options = :none | :auto | :required module ToolChoiceOptions - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum NONE: :none AUTO: :auto diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index b0562635..588a58ac 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -4,7 +4,7 @@ module OpenAI type tool_choice_types = { type: OpenAI::Models::Responses::ToolChoiceTypes::type_ } - class ToolChoiceTypes < OpenAI::BaseModel + class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel attr_accessor type: OpenAI::Models::Responses::ToolChoiceTypes::type_ def initialize: ( @@ -20,7 +20,7 @@ module OpenAI | :web_search_preview_2025_03_11 module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum FILE_SEARCH: :file_search WEB_SEARCH_PREVIEW: :web_search_preview diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index cb6606be..92f25bd0 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -8,7 +8,7 @@ module OpenAI user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? } - class WebSearchTool < OpenAI::BaseModel + class WebSearchTool < OpenAI::Internal::Type::BaseModel attr_accessor type: OpenAI::Models::Responses::WebSearchTool::type_ attr_reader search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size? @@ -30,7 +30,7 @@ module OpenAI type type_ = :web_search_preview | :web_search_preview_2025_03_11 module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 @@ -41,7 +41,7 @@ module OpenAI type search_context_size = :low | :medium | :high module SearchContextSize - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum LOW: :low MEDIUM: :medium @@ -59,7 +59,7 @@ module OpenAI timezone: String } - class UserLocation < OpenAI::BaseModel + class UserLocation < OpenAI::Internal::Type::BaseModel attr_accessor type: :approximate attr_reader city: String? diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 5e0a2660..2cb153e7 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -9,7 +9,7 @@ module OpenAI | :"computer-use-preview-2025-03-11" module ResponsesModel - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" | :"o1-pro-2025-03-19" diff --git a/sig/openai/models/static_file_chunking_strategy.rbs b/sig/openai/models/static_file_chunking_strategy.rbs index e77bd4ac..7c00abb0 100644 --- a/sig/openai/models/static_file_chunking_strategy.rbs +++ b/sig/openai/models/static_file_chunking_strategy.rbs @@ -3,7 +3,7 @@ module OpenAI type static_file_chunking_strategy = { chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer } - class StaticFileChunkingStrategy < OpenAI::BaseModel + class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel attr_accessor chunk_overlap_tokens: Integer attr_accessor max_chunk_size_tokens: Integer diff --git a/sig/openai/models/static_file_chunking_strategy_object.rbs b/sig/openai/models/static_file_chunking_strategy_object.rbs index 62e506a3..1122e914 100644 --- a/sig/openai/models/static_file_chunking_strategy_object.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object.rbs @@ -3,7 +3,7 @@ module OpenAI type static_file_chunking_strategy_object = { static: OpenAI::Models::StaticFileChunkingStrategy, type: :static } - class StaticFileChunkingStrategyObject < OpenAI::BaseModel + class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel attr_accessor static: OpenAI::Models::StaticFileChunkingStrategy attr_accessor type: :static diff --git a/sig/openai/models/static_file_chunking_strategy_object_param.rbs b/sig/openai/models/static_file_chunking_strategy_object_param.rbs index a86f96ab..639a7ea1 100644 --- a/sig/openai/models/static_file_chunking_strategy_object_param.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object_param.rbs @@ -3,7 +3,7 @@ module OpenAI type static_file_chunking_strategy_object_param = { static: OpenAI::Models::StaticFileChunkingStrategy, type: :static } - class StaticFileChunkingStrategyObjectParam < OpenAI::BaseModel + class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel attr_accessor static: OpenAI::Models::StaticFileChunkingStrategy attr_accessor type: :static diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index ff4fec3b..0a50164d 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -13,7 +13,7 @@ module OpenAI file: OpenAI::Models::FileObject? } - class Upload < OpenAI::BaseModel + class Upload < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor bytes: Integer @@ -49,7 +49,7 @@ module OpenAI type status = :pending | :completed | :cancelled | :expired module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum PENDING: :pending COMPLETED: :completed diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index f7d03f06..669ffc83 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -3,7 +3,7 @@ module OpenAI type upload_cancel_params = { } & OpenAI::Internal::Type::request_parameters - class UploadCancelParams < OpenAI::BaseModel + class UploadCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index 50d22376..5cfff932 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -4,7 +4,7 @@ module OpenAI { part_ids: ::Array[String], :md5 => String } & OpenAI::Internal::Type::request_parameters - class UploadCompleteParams < OpenAI::BaseModel + class UploadCompleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index 94f2cc72..9b68eb00 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class UploadCreateParams < OpenAI::BaseModel + class UploadCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index a36ae697..d6014118 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type part_create_params = { data: (IO | StringIO) } & OpenAI::Internal::Type::request_parameters - class PartCreateParams < OpenAI::BaseModel + class PartCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 2a1f9e38..93530d14 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -11,7 +11,7 @@ module OpenAI upload_id: String } - class UploadPart < OpenAI::BaseModel + class UploadPart < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 02c2f02f..a29d28eb 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -15,7 +15,7 @@ module OpenAI expires_at: Integer? } - class VectorStore < OpenAI::BaseModel + class VectorStore < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -67,7 +67,7 @@ module OpenAI total: Integer } - class FileCounts < OpenAI::BaseModel + class FileCounts < OpenAI::Internal::Type::BaseModel attr_accessor cancelled: Integer attr_accessor completed: Integer @@ -92,7 +92,7 @@ module OpenAI type status = :expired | :in_progress | :completed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum EXPIRED: :expired IN_PROGRESS: :in_progress @@ -103,7 +103,7 @@ module OpenAI type expires_after = { anchor: :last_active_at, days: Integer } - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel attr_accessor anchor: :last_active_at attr_accessor days: Integer diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 3a387fa9..accb9698 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class VectorStoreCreateParams < OpenAI::BaseModel + class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -49,7 +49,7 @@ module OpenAI type expires_after = { anchor: :last_active_at, days: Integer } - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel attr_accessor anchor: :last_active_at attr_accessor days: Integer diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index b2f2436c..1dfb12b4 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_delete_params = { } & OpenAI::Internal::Type::request_parameters - class VectorStoreDeleteParams < OpenAI::BaseModel + class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_store_deleted.rbs b/sig/openai/models/vector_store_deleted.rbs index 3403e058..1c8ca3ae 100644 --- a/sig/openai/models/vector_store_deleted.rbs +++ b/sig/openai/models/vector_store_deleted.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_deleted = { id: String, deleted: bool, object: :"vector_store.deleted" } - class VectorStoreDeleted < OpenAI::BaseModel + class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index 45da51d8..5c2aa3f9 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class VectorStoreListParams < OpenAI::BaseModel + class VectorStoreListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -44,7 +44,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 7f875a4d..51b51332 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_retrieve_params = { } & OpenAI::Internal::Type::request_parameters - class VectorStoreRetrieveParams < OpenAI::BaseModel + class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 10fb00d0..823cd7bc 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -10,7 +10,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class VectorStoreSearchParams < OpenAI::BaseModel + class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -50,7 +50,7 @@ module OpenAI type query = String | ::Array[String] module Query - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, ::Array[String]] @@ -63,7 +63,7 @@ module OpenAI OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter module Filters - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] end @@ -74,7 +74,7 @@ module OpenAI score_threshold: Float } - class RankingOptions < OpenAI::BaseModel + class RankingOptions < OpenAI::Internal::Type::BaseModel attr_reader ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker? def ranker=: ( @@ -95,7 +95,7 @@ module OpenAI type ranker = :auto | :"default-2024-11-15" module Ranker - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index a5ba79e3..d7c590bd 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -9,7 +9,7 @@ module OpenAI score: Float } - class VectorStoreSearchResponse < OpenAI::BaseModel + class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStoreSearchResponse::attribute]? attr_accessor content: ::Array[OpenAI::Models::VectorStoreSearchResponse::Content] @@ -33,7 +33,7 @@ module OpenAI type attribute = String | Float | bool module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end @@ -44,7 +44,7 @@ module OpenAI type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ } - class Content < OpenAI::BaseModel + class Content < OpenAI::Internal::Type::BaseModel attr_accessor text: String attr_accessor type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ @@ -59,7 +59,7 @@ module OpenAI type type_ = :text module Type - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TEXT: :text diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index a25ec5db..f8258d41 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -8,7 +8,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class VectorStoreUpdateParams < OpenAI::BaseModel + class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -29,7 +29,7 @@ module OpenAI type expires_after = { anchor: :last_active_at, days: Integer } - class ExpiresAfter < OpenAI::BaseModel + class ExpiresAfter < OpenAI::Internal::Type::BaseModel attr_accessor anchor: :last_active_at attr_accessor days: Integer diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index eb2088fd..26c9d893 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_batch_cancel_params = { vector_store_id: String } & OpenAI::Internal::Type::request_parameters - class FileBatchCancelParams < OpenAI::BaseModel + class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index e8e0ddca..03b56b5a 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class FileBatchCreateParams < OpenAI::BaseModel + class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -35,7 +35,7 @@ module OpenAI type attribute = String | Float | bool module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index e9f2d88b..87868689 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -12,7 +12,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class FileBatchListFilesParams < OpenAI::BaseModel + class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -57,7 +57,7 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled module Filter - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -70,7 +70,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index a9efa644..b43f76b8 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_batch_retrieve_params = { vector_store_id: String } & OpenAI::Internal::Type::request_parameters - class FileBatchRetrieveParams < OpenAI::BaseModel + class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index c7f2dfc8..a3f37f30 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_content_params = { vector_store_id: String } & OpenAI::Internal::Type::request_parameters - class FileContentParams < OpenAI::BaseModel + class FileContentParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_stores/file_content_response.rbs b/sig/openai/models/vector_stores/file_content_response.rbs index 11f49799..df2aa45a 100644 --- a/sig/openai/models/vector_stores/file_content_response.rbs +++ b/sig/openai/models/vector_stores/file_content_response.rbs @@ -3,7 +3,7 @@ module OpenAI module VectorStores type file_content_response = { text: String, type: String } - class FileContentResponse < OpenAI::BaseModel + class FileContentResponse < OpenAI::Internal::Type::BaseModel attr_reader text: String? def text=: (String) -> String diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 93f6ca13..9a396ee0 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -9,7 +9,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class FileCreateParams < OpenAI::BaseModel + class FileCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -35,7 +35,7 @@ module OpenAI type attribute = String | Float | bool module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index 486986b3..a80ed26d 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_delete_params = { vector_store_id: String } & OpenAI::Internal::Type::request_parameters - class FileDeleteParams < OpenAI::BaseModel + class FileDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 6501043b..7b916738 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -11,7 +11,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class FileListParams < OpenAI::BaseModel + class FileListParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -53,7 +53,7 @@ module OpenAI type filter = :in_progress | :completed | :failed | :cancelled module Filter - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -66,7 +66,7 @@ module OpenAI type order = :asc | :desc module Order - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ASC: :asc DESC: :desc diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index f9c4eac7..266f9e3c 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_retrieve_params = { vector_store_id: String } & OpenAI::Internal::Type::request_parameters - class FileRetrieveParams < OpenAI::BaseModel + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 3155411a..7b75e4f2 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -8,7 +8,7 @@ module OpenAI } & OpenAI::Internal::Type::request_parameters - class FileUpdateParams < OpenAI::BaseModel + class FileUpdateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -27,7 +27,7 @@ module OpenAI type attribute = String | Float | bool module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 200981e8..7039aa57 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -16,7 +16,7 @@ module OpenAI chunking_strategy: OpenAI::Models::file_chunking_strategy } - class VectorStoreFile < OpenAI::BaseModel + class VectorStoreFile < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -59,7 +59,7 @@ module OpenAI message: String } - class LastError < OpenAI::BaseModel + class LastError < OpenAI::Internal::Type::BaseModel attr_accessor code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code attr_accessor message: String @@ -74,7 +74,7 @@ module OpenAI type code = :server_error | :unsupported_file | :invalid_file module Code - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum SERVER_ERROR: :server_error UNSUPPORTED_FILE: :unsupported_file @@ -87,7 +87,7 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed @@ -100,7 +100,7 @@ module OpenAI type attribute = String | Float | bool module Attribute - extend OpenAI::Union + extend OpenAI::Internal::Type::Union def self?.variants: -> [String, Float, bool] end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index d5ae501f..6c286435 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -13,7 +13,7 @@ module OpenAI vector_store_id: String } - class VectorStoreFileBatch < OpenAI::BaseModel + class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor created_at: Integer @@ -46,7 +46,7 @@ module OpenAI total: Integer } - class FileCounts < OpenAI::BaseModel + class FileCounts < OpenAI::Internal::Type::BaseModel attr_accessor cancelled: Integer attr_accessor completed: Integer @@ -71,7 +71,7 @@ module OpenAI type status = :in_progress | :completed | :cancelled | :failed module Status - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress COMPLETED: :completed diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index 19257a53..15ae080f 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -6,7 +6,7 @@ module OpenAI type vector_store_file_deleted = { id: String, deleted: bool, object: :"vector_store.file.deleted" } - class VectorStoreFileDeleted < OpenAI::BaseModel + class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor deleted: bool diff --git a/sig/openai/request_options.rbs b/sig/openai/request_options.rbs index 51674ffe..a3ac246f 100644 --- a/sig/openai/request_options.rbs +++ b/sig/openai/request_options.rbs @@ -12,7 +12,7 @@ module OpenAI timeout: Float? } - class RequestOptions < OpenAI::BaseModel + class RequestOptions < OpenAI::Internal::Type::BaseModel def self.validate!: (self | ::Hash[Symbol, top] opts) -> void attr_accessor idempotency_key: String? diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 6b35af97..dd6ec3ea 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -55,7 +55,7 @@ def test_client_default_request_default_retry_attempts requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end @@ -67,7 +67,7 @@ def test_client_given_request_default_retry_attempts requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end @@ -79,7 +79,7 @@ def test_client_default_request_given_retry_attempts requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", @@ -95,7 +95,7 @@ def test_client_given_request_given_retry_attempts requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", @@ -111,7 +111,7 @@ def test_client_retry_after_seconds requester = MockRequester.new(500, {"retry-after" => "1.3"}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end @@ -124,7 +124,7 @@ def test_client_retry_after_date requester = MockRequester.new(500, {"retry-after" => (Time.now + 10).httpdate}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do Thread.current.thread_variable_set(:time_now, Time.now) openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") Thread.current.thread_variable_set(:time_now, nil) @@ -139,7 +139,7 @@ def test_client_retry_after_ms requester = MockRequester.new(500, {"retry-after-ms" => "1300"}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end @@ -152,7 +152,7 @@ def test_retry_count_header requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end @@ -165,7 +165,7 @@ def test_omit_retry_count_header requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", @@ -182,7 +182,7 @@ def test_overwrite_retry_count_header requester = MockRequester.new(500, {}, {}) openai.requester = requester - assert_raises(OpenAI::InternalServerError) do + assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index 25c1322c..0b854098 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -3,26 +3,26 @@ require_relative "../../test_helper" class OpenAI::Test::PrimitiveModelTest < Minitest::Test - A = OpenAI::ArrayOf[-> { Integer }] - H = OpenAI::HashOf[-> { Integer }, nil?: true] + A = OpenAI::Internal::Type::ArrayOf[-> { Integer }] + H = OpenAI::Internal::Type::HashOf[-> { Integer }, nil?: true] module E - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum end module U - extend OpenAI::Union + extend OpenAI::Internal::Type::Union end - class B < OpenAI::BaseModel + class B < OpenAI::Internal::Type::BaseModel optional :a, Integer optional :b, B end def test_typing converters = [ - OpenAI::Unknown, - OpenAI::BooleanModel, + OpenAI::Internal::Type::Unknown, + OpenAI::Internal::Type::BooleanModel, A, H, E, @@ -39,11 +39,11 @@ def test_typing def test_coerce cases = { - [OpenAI::Unknown, :a] => [{yes: 1}, :a], + [OpenAI::Internal::Type::Unknown, :a] => [{yes: 1}, :a], [NilClass, :a] => [{maybe: 1}, nil], [NilClass, nil] => [{yes: 1}, nil], - [OpenAI::BooleanModel, true] => [{yes: 1}, true], - [OpenAI::BooleanModel, "true"] => [{no: 1}, "true"], + [OpenAI::Internal::Type::BooleanModel, true] => [{yes: 1}, true], + [OpenAI::Internal::Type::BooleanModel, "true"] => [{no: 1}, "true"], [Integer, 1] => [{yes: 1}, 1], [Integer, 1.0] => [{maybe: 1}, 1], [Integer, "1"] => [{maybe: 1}, 1], @@ -76,7 +76,7 @@ def test_coerce def test_dump cases = { - [OpenAI::Unknown, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, + [OpenAI::Internal::Type::Unknown, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [A, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [H, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [E, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, @@ -85,8 +85,8 @@ def test_dump [String, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [:b, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [nil, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, - [OpenAI::BooleanModel, true] => true, - [OpenAI::BooleanModel, "true"] => "true", + [OpenAI::Internal::Type::BooleanModel, true] => true, + [OpenAI::Internal::Type::BooleanModel, "true"] => "true", [Integer, "1"] => "1", [Float, 1] => 1, [String, "one"] => "one", @@ -126,27 +126,27 @@ def test_coerce_errors class OpenAI::Test::EnumModelTest < Minitest::Test module E1 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum TRUE = true end module E2 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ONE = 1 TWO = 2 end module E3 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ONE = 1.0 TWO = 2.0 end module E4 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum ONE = :one TWO = :two @@ -216,14 +216,14 @@ def test_dump end class OpenAI::Test::CollectionModelTest < Minitest::Test - A1 = OpenAI::ArrayOf[-> { Integer }] - H1 = OpenAI::HashOf[Integer] + A1 = OpenAI::Internal::Type::ArrayOf[-> { Integer }] + H1 = OpenAI::Internal::Type::HashOf[Integer] - A2 = OpenAI::ArrayOf[H1] - H2 = OpenAI::HashOf[-> { A1 }] + A2 = OpenAI::Internal::Type::ArrayOf[H1] + H2 = OpenAI::Internal::Type::HashOf[-> { A1 }] - A3 = OpenAI::ArrayOf[Integer, nil?: true] - H3 = OpenAI::HashOf[Integer, nil?: true] + A3 = OpenAI::Internal::Type::ArrayOf[Integer, nil?: true] + H3 = OpenAI::Internal::Type::HashOf[Integer, nil?: true] def test_coerce cases = { @@ -263,7 +263,7 @@ def test_coerce end class OpenAI::Test::BaseModelTest < Minitest::Test - class M1 < OpenAI::BaseModel + class M1 < OpenAI::Internal::Type::BaseModel required :a, Integer end @@ -273,7 +273,7 @@ class M2 < M1 optional :c, String end - class M3 < OpenAI::BaseModel + class M3 < OpenAI::Internal::Type::BaseModel optional :c, const: :c required :d, const: :d end @@ -290,7 +290,7 @@ class M4 < M1 end end - class M5 < OpenAI::BaseModel + class M5 < OpenAI::Internal::Type::BaseModel request_only do required :c, const: :c end @@ -301,7 +301,7 @@ class M5 < OpenAI::BaseModel end class M6 < M1 - required :a, OpenAI::ArrayOf[M6] + required :a, OpenAI::Internal::Type::ArrayOf[M6] end def test_coerce @@ -337,7 +337,7 @@ def test_coerce assert_pattern do coerced = OpenAI::Internal::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) - if coerced.is_a?(OpenAI::BaseModel) + if coerced.is_a?(OpenAI::Internal::Type::BaseModel) coerced.to_h => ^expect else coerced => ^expect @@ -403,27 +403,27 @@ def test_accessors class OpenAI::Test::UnionTest < Minitest::Test module U0 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union end module U1 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant const: :a variant const: 2 end - class M1 < OpenAI::BaseModel + class M1 < OpenAI::Internal::Type::BaseModel required :t, const: :a, api_name: :type optional :c, String end - class M2 < OpenAI::BaseModel + class M2 < OpenAI::Internal::Type::BaseModel required :type, const: :b optional :c, String end module U2 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type variant :a, M1 @@ -431,7 +431,7 @@ module U2 end module U3 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type variant :a, M1 @@ -439,37 +439,37 @@ module U3 end module U4 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union discriminator :type variant String variant :a, M1 end - class M3 < OpenAI::BaseModel + class M3 < OpenAI::Internal::Type::BaseModel optional :recur, -> { U5 } required :a, Integer end - class M4 < OpenAI::BaseModel + class M4 < OpenAI::Internal::Type::BaseModel optional :recur, -> { U5 } - required :a, OpenAI::ArrayOf[-> { U5 }] + required :a, OpenAI::Internal::Type::ArrayOf[-> { U5 }] end - class M5 < OpenAI::BaseModel + class M5 < OpenAI::Internal::Type::BaseModel optional :recur, -> { U5 } - required :b, OpenAI::ArrayOf[-> { U5 }] + required :b, OpenAI::Internal::Type::ArrayOf[-> { U5 }] end module U5 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant -> { M3 } variant -> { M4 } end module U6 - extend OpenAI::Union + extend OpenAI::Internal::Type::Union variant -> { M3 } variant -> { M5 } @@ -480,7 +480,7 @@ def test_accessors tap do model.recur flunk - rescue OpenAI::ConversionError => e + rescue OpenAI::Errors::ConversionError => e assert_kind_of(ArgumentError, e.cause) end end @@ -513,7 +513,7 @@ def test_coerce assert_pattern do coerced = OpenAI::Internal::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) - if coerced.is_a?(OpenAI::BaseModel) + if coerced.is_a?(OpenAI::Internal::Type::BaseModel) coerced.to_h => ^expect else coerced => ^expect @@ -527,29 +527,29 @@ def test_coerce class OpenAI::Test::BaseModelQoLTest < Minitest::Test module E1 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum A = 1 end module E2 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum A = 1 end module E3 - extend OpenAI::Enum + extend OpenAI::Internal::Type::Enum A = 2 B = 3 end - class M1 < OpenAI::BaseModel + class M1 < OpenAI::Internal::Type::BaseModel required :a, Integer end - class M2 < OpenAI::BaseModel + class M2 < OpenAI::Internal::Type::BaseModel required :a, Integer, nil?: true end @@ -559,9 +559,9 @@ class M3 < M2 def test_equality cases = { - [OpenAI::Unknown, OpenAI::Unknown] => true, - [OpenAI::BooleanModel, OpenAI::BooleanModel] => true, - [OpenAI::Unknown, OpenAI::BooleanModel] => false, + [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::Unknown] => true, + [OpenAI::Internal::Type::BooleanModel, OpenAI::Internal::Type::BooleanModel] => true, + [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::BooleanModel] => false, [E1, E2] => true, [E1, E3] => false, [M1, M2] => false, diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index ed50d991..71ba50ed 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -42,7 +42,7 @@ def test_nested_left_map def test_omission merged = OpenAI::Internal::Util.deep_merge( {b: {b2: 1, b3: {c: 4, d: 5}}}, - {b: {b2: 1, b3: {c: OpenAI::Internal::Util::OMIT, d: 5}}} + {b: {b2: 1, b3: {c: OpenAI::Internal::OMIT, d: 5}}} ) assert_pattern do diff --git a/test/openai/resources/batches_test.rb b/test/openai/resources/batches_test.rb index 76c25672..8ba0d03e 100644 --- a/test/openai/resources/batches_test.rb +++ b/test/openai/resources/batches_test.rb @@ -34,7 +34,7 @@ def test_create_required_params failed_at: Integer | nil, finalizing_at: Integer | nil, in_progress_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, request_counts: OpenAI::Models::BatchRequestCounts | nil } @@ -67,7 +67,7 @@ def test_retrieve failed_at: Integer | nil, finalizing_at: Integer | nil, in_progress_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, request_counts: OpenAI::Models::BatchRequestCounts | nil } @@ -107,7 +107,7 @@ def test_list failed_at: Integer | nil, finalizing_at: Integer | nil, in_progress_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, request_counts: OpenAI::Models::BatchRequestCounts | nil } @@ -140,7 +140,7 @@ def test_cancel failed_at: Integer | nil, finalizing_at: Integer | nil, in_progress_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, request_counts: OpenAI::Models::BatchRequestCounts | nil } diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index f618bea7..0e89f80e 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -16,11 +16,11 @@ def test_create_required_params created_at: Integer, description: String | nil, instructions: String | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, @@ -42,11 +42,11 @@ def test_retrieve created_at: Integer, description: String | nil, instructions: String | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, @@ -68,11 +68,11 @@ def test_update created_at: Integer, description: String | nil, instructions: String | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, @@ -101,11 +101,11 @@ def test_list created_at: Integer, description: String | nil, instructions: String | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, @@ -124,7 +124,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end diff --git a/test/openai/resources/beta/threads/messages_test.rb b/test/openai/resources/beta/threads/messages_test.rb index 7a8fdc90..5dec984b 100644 --- a/test/openai/resources/beta/threads/messages_test.rb +++ b/test/openai/resources/beta/threads/messages_test.rb @@ -14,13 +14,13 @@ def test_create_required_params response => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, role: OpenAI::Models::Beta::Threads::Message::Role, run_id: String | nil, @@ -41,13 +41,13 @@ def test_retrieve_required_params response => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, role: OpenAI::Models::Beta::Threads::Message::Role, run_id: String | nil, @@ -68,13 +68,13 @@ def test_update_required_params response => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, role: OpenAI::Models::Beta::Threads::Message::Role, run_id: String | nil, @@ -102,13 +102,13 @@ def test_list row => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, role: OpenAI::Models::Beta::Threads::Message::Role, run_id: String | nil, @@ -128,7 +128,7 @@ def test_delete_required_params assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end diff --git a/test/openai/resources/beta/threads/runs/steps_test.rb b/test/openai/resources/beta/threads/runs/steps_test.rb index c05de77f..200fc186 100644 --- a/test/openai/resources/beta/threads/runs/steps_test.rb +++ b/test/openai/resources/beta/threads/runs/steps_test.rb @@ -20,7 +20,7 @@ def test_retrieve_required_params expired_at: Integer | nil, failed_at: Integer | nil, last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, run_id: String, status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status, @@ -56,7 +56,7 @@ def test_list_required_params expired_at: Integer | nil, failed_at: Integer | nil, last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, run_id: String, status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status, diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 1147966b..749bb638 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -24,17 +24,17 @@ def test_create_required_params last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, @@ -64,17 +64,17 @@ def test_retrieve_required_params last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, @@ -104,17 +104,17 @@ def test_update_required_params last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, @@ -151,17 +151,17 @@ def test_list last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, @@ -191,17 +191,17 @@ def test_cancel_required_params last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, @@ -232,17 +232,17 @@ def test_submit_tool_outputs_required_params last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index f7f1021f..f702b5ce 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -14,7 +14,7 @@ def test_create response => { id: String, created_at: Integer, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, tool_resources: OpenAI::Models::Beta::Thread::ToolResources | nil } @@ -32,7 +32,7 @@ def test_retrieve response => { id: String, created_at: Integer, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, tool_resources: OpenAI::Models::Beta::Thread::ToolResources | nil } @@ -50,7 +50,7 @@ def test_update response => { id: String, created_at: Integer, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, tool_resources: OpenAI::Models::Beta::Thread::ToolResources | nil } @@ -67,7 +67,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end @@ -94,17 +94,17 @@ def test_create_and_run_required_params last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, status: OpenAI::Models::Beta::Threads::RunStatus, thread_id: String, tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, temperature: Float | nil, diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index 00d91a0a..8eab790c 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -14,7 +14,7 @@ def test_create_required_params assert_pattern do response => { id: String, - choices: ^(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, @@ -35,7 +35,7 @@ def test_retrieve assert_pattern do response => { id: String, - choices: ^(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, @@ -56,7 +56,7 @@ def test_update_required_params assert_pattern do response => { id: String, - choices: ^(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, @@ -84,7 +84,7 @@ def test_list assert_pattern do row => { id: String, - choices: ^(OpenAI::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, @@ -105,7 +105,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end diff --git a/test/openai/resources/completions_test.rb b/test/openai/resources/completions_test.rb index f7d94ab8..e37c502a 100644 --- a/test/openai/resources/completions_test.rb +++ b/test/openai/resources/completions_test.rb @@ -13,7 +13,7 @@ def test_create_required_params assert_pattern do response => { id: String, - choices: ^(OpenAI::ArrayOf[OpenAI::Models::CompletionChoice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::CompletionChoice]), created: Integer, model: String, object: Symbol, diff --git a/test/openai/resources/embeddings_test.rb b/test/openai/resources/embeddings_test.rb index d9eb182b..a280c031 100644 --- a/test/openai/resources/embeddings_test.rb +++ b/test/openai/resources/embeddings_test.rb @@ -16,7 +16,7 @@ def test_create_required_params assert_pattern do response => { - data: ^(OpenAI::ArrayOf[OpenAI::Models::Embedding]), + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Embedding]), model: String, object: Symbol, usage: OpenAI::Models::CreateEmbeddingResponse::Usage diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index c4a14255..78bfa904 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -86,7 +86,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end diff --git a/test/openai/resources/fine_tuning/jobs_test.rb b/test/openai/resources/fine_tuning/jobs_test.rb index a953af3d..9f164929 100644 --- a/test/openai/resources/fine_tuning/jobs_test.rb +++ b/test/openai/resources/fine_tuning/jobs_test.rb @@ -21,15 +21,15 @@ def test_create_required_params model: String, object: Symbol, organization_id: String, - result_files: ^(OpenAI::ArrayOf[String]), + result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, status: OpenAI::Models::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil } end @@ -53,15 +53,15 @@ def test_retrieve model: String, object: Symbol, organization_id: String, - result_files: ^(OpenAI::ArrayOf[String]), + result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, status: OpenAI::Models::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil } end @@ -92,15 +92,15 @@ def test_list model: String, object: Symbol, organization_id: String, - result_files: ^(OpenAI::ArrayOf[String]), + result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, status: OpenAI::Models::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil } end @@ -124,15 +124,15 @@ def test_cancel model: String, object: Symbol, organization_id: String, - result_files: ^(OpenAI::ArrayOf[String]), + result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, status: OpenAI::Models::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil } end @@ -159,7 +159,7 @@ def test_list_events level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level, message: String, object: Symbol, - data: OpenAI::Unknown | nil, + data: OpenAI::Internal::Type::Unknown | nil, type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type | nil } end diff --git a/test/openai/resources/images_test.rb b/test/openai/resources/images_test.rb index c935d0c6..a9bb70a4 100644 --- a/test/openai/resources/images_test.rb +++ b/test/openai/resources/images_test.rb @@ -13,7 +13,7 @@ def test_create_variation_required_params assert_pattern do response => { created: Integer, - data: ^(OpenAI::ArrayOf[OpenAI::Models::Image]) + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) } end end @@ -32,7 +32,7 @@ def test_edit_required_params assert_pattern do response => { created: Integer, - data: ^(OpenAI::ArrayOf[OpenAI::Models::Image]) + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) } end end @@ -47,7 +47,7 @@ def test_generate_required_params assert_pattern do response => { created: Integer, - data: ^(OpenAI::ArrayOf[OpenAI::Models::Image]) + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) } end end diff --git a/test/openai/resources/models_test.rb b/test/openai/resources/models_test.rb index 6316652e..06dac749 100644 --- a/test/openai/resources/models_test.rb +++ b/test/openai/resources/models_test.rb @@ -54,7 +54,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: String } end diff --git a/test/openai/resources/moderations_test.rb b/test/openai/resources/moderations_test.rb index cbd3c69f..d77f7d0b 100644 --- a/test/openai/resources/moderations_test.rb +++ b/test/openai/resources/moderations_test.rb @@ -14,7 +14,7 @@ def test_create_required_params response => { id: String, model: String, - results: ^(OpenAI::ArrayOf[OpenAI::Models::Moderation]) + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Moderation]) } end end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 59a1cc5e..705de078 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -35,30 +35,30 @@ def test_list in { type: :message, id: String, - content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent]), role: OpenAI::Models::Responses::ResponseInputMessageItem::Role, status: OpenAI::Models::Responses::ResponseInputMessageItem::Status | nil } in { type: :message, id: String, - content: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputMessage::Content]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputMessage::Content]), role: Symbol, status: OpenAI::Models::Responses::ResponseOutputMessage::Status } in { type: :file_search_call, id: String, - queries: ^(OpenAI::ArrayOf[String]), + queries: ^(OpenAI::Internal::Type::ArrayOf[String]), status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status, - results: ^(OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) | nil + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) | nil } in { type: :computer_call, id: String, action: OpenAI::Models::Responses::ResponseComputerToolCall::Action, call_id: String, - pending_safety_checks: ^(OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]), + pending_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]), status: OpenAI::Models::Responses::ResponseComputerToolCall::Status } in { @@ -66,7 +66,7 @@ def test_list id: String, call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - acknowledged_safety_checks: ^(OpenAI::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, + acknowledged_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status | nil } in {type: :web_search_call, id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status} diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index d0e0157e..3b452659 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -17,14 +17,14 @@ def test_create_required_params error: OpenAI::Models::Responses::ResponseError | nil, incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails | nil, instructions: String | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: OpenAI::Models::ResponsesModel, object: Symbol, - output: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), - parallel_tool_calls: OpenAI::BooleanModel, + output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, temperature: Float | nil, tool_choice: OpenAI::Models::Responses::Response::ToolChoice, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::Tool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool]), top_p: Float | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, @@ -52,14 +52,14 @@ def test_retrieve error: OpenAI::Models::Responses::ResponseError | nil, incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails | nil, instructions: String | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: OpenAI::Models::ResponsesModel, object: Symbol, - output: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), - parallel_tool_calls: OpenAI::BooleanModel, + output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), + parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, temperature: Float | nil, tool_choice: OpenAI::Models::Responses::Response::ToolChoice, - tools: ^(OpenAI::ArrayOf[union: OpenAI::Models::Responses::Tool]), + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool]), top_p: Float | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, diff --git a/test/openai/resources/vector_stores/file_batches_test.rb b/test/openai/resources/vector_stores/file_batches_test.rb index 538469fe..55dce57e 100644 --- a/test/openai/resources/vector_stores/file_batches_test.rb +++ b/test/openai/resources/vector_stores/file_batches_test.rb @@ -83,7 +83,7 @@ def test_list_files_required_params status: OpenAI::Models::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil } end diff --git a/test/openai/resources/vector_stores/files_test.rb b/test/openai/resources/vector_stores/files_test.rb index 5d806b95..279fda94 100644 --- a/test/openai/resources/vector_stores/files_test.rb +++ b/test/openai/resources/vector_stores/files_test.rb @@ -19,7 +19,7 @@ def test_create_required_params status: OpenAI::Models::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil } end @@ -41,7 +41,7 @@ def test_retrieve_required_params status: OpenAI::Models::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil } end @@ -68,7 +68,7 @@ def test_update_required_params status: OpenAI::Models::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil } end @@ -97,7 +97,7 @@ def test_list status: OpenAI::Models::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil } end @@ -113,7 +113,7 @@ def test_delete_required_params assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index c12e8413..1a45528c 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -16,7 +16,7 @@ def test_create created_at: Integer, file_counts: OpenAI::Models::VectorStore::FileCounts, last_active_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, status: OpenAI::Models::VectorStore::Status, @@ -40,7 +40,7 @@ def test_retrieve created_at: Integer, file_counts: OpenAI::Models::VectorStore::FileCounts, last_active_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, status: OpenAI::Models::VectorStore::Status, @@ -64,7 +64,7 @@ def test_update created_at: Integer, file_counts: OpenAI::Models::VectorStore::FileCounts, last_active_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, status: OpenAI::Models::VectorStore::Status, @@ -95,7 +95,7 @@ def test_list created_at: Integer, file_counts: OpenAI::Models::VectorStore::FileCounts, last_active_at: Integer | nil, - metadata: ^(OpenAI::HashOf[String]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, status: OpenAI::Models::VectorStore::Status, @@ -116,7 +116,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::BooleanModel, + deleted: OpenAI::Internal::Type::BooleanModel, object: Symbol } end @@ -138,8 +138,8 @@ def test_search_required_params assert_pattern do row => { - attributes: ^(OpenAI::HashOf[union: OpenAI::Models::VectorStoreSearchResponse::Attribute]) | nil, - content: ^(OpenAI::ArrayOf[OpenAI::Models::VectorStoreSearchResponse::Content]), + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStoreSearchResponse::Attribute]) | nil, + content: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::VectorStoreSearchResponse::Content]), file_id: String, filename: String, score: Float From 074bf151fb4e9de40b2565eaaf8cf81339f12576 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 18:38:43 +0000 Subject: [PATCH 090/295] feat(api): manual updates (#92) --- .stats.yml | 2 +- .../audio/transcription_create_params.rb | 2 +- .../audio/transcription_create_response.rb | 2 +- lib/openai/models/beta/threads/run.rb | 2 +- .../models/beta/threads/run_create_params.rb | 2 +- .../threads/run_submit_tool_outputs_params.rb | 2 +- lib/openai/models/completion.rb | 2 +- lib/openai/models/completion_create_params.rb | 2 +- lib/openai/resources/audio/transcriptions.rb | 6 ++--- lib/openai/resources/beta/threads/runs.rb | 12 +++++----- lib/openai/resources/completions.rb | 6 ++--- .../openai/resources/audio/transcriptions.rbi | 10 ++++----- .../openai/resources/beta/threads/runs.rbi | 22 ++++++++++--------- rbi/lib/openai/resources/completions.rbi | 10 ++++----- sig/openai/resources/audio/transcriptions.rbs | 2 +- sig/openai/resources/beta/threads/runs.rbs | 4 ++-- sig/openai/resources/completions.rbs | 2 +- 17 files changed, 46 insertions(+), 44 deletions(-) diff --git a/.stats.yml b/.stats.yml index b80d385d..e0e1a71e 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 80 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bce8217a697c729ac98046d4caf2c9e826b54c427fb0ab4f98e549a2e0ce31c.yml openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: 578c5bff4208d560c0c280f13324409f +config_hash: bcd2cacdcb9fae9938f273cd167f613c diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index f3dc4d0d..0cd4367b 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -5,7 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Transcriptions#create # - # @see OpenAI::Resources::Audio::Transcriptions#stream_raw + # @see OpenAI::Resources::Audio::Transcriptions#create_streaming class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index a8026630..75cfc88b 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -8,7 +8,7 @@ module Audio # # @see OpenAI::Resources::Audio::Transcriptions#create # - # @see OpenAI::Resources::Audio::Transcriptions#stream_raw + # @see OpenAI::Resources::Audio::Transcriptions#create_streaming module TranscriptionCreateResponse extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 8af69bee..9377b199 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw class Run < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 51a64adc..612590b0 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 85d3e4d9..4e4733e6 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # - # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 5d7fd8b0..49f1d4c1 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -4,7 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Completions#create # - # @see OpenAI::Resources::Completions#stream_raw + # @see OpenAI::Resources::Completions#create_streaming class Completion < OpenAI::Internal::Type::BaseModel # @!attribute id # A unique identifier for the completion. diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index e17ce7d4..63d127ca 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Completions#create # - # @see OpenAI::Resources::Completions#stream_raw + # @see OpenAI::Resources::Completions#create_streaming class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 677b18f1..bd918fe4 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -24,7 +24,7 @@ class Transcriptions def create(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -39,7 +39,7 @@ def create(params) # Transcribes audio into the input language. # - # @overload stream_raw(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # # @param file [IO, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] @@ -54,7 +54,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Audio::TranscriptionCreateParams - def stream_raw(params) + def create_streaming(params) parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index aa61011c..bce39bba 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -38,7 +38,7 @@ class Runs def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#create_stream_raw` for the streaming use case." raise ArgumentError.new(message) end query_params = [:include] @@ -54,7 +54,7 @@ def create(thread_id, params) # Create a run. # - # @overload stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param thread_id [String] # @param assistant_id [String] @@ -79,7 +79,7 @@ def create(thread_id, params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams - def stream_raw(thread_id, params) + def create_stream_raw(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." @@ -220,7 +220,7 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] - message = "Please use `#submit_tool_stream_raw` for the streaming use case." + message = "Please use `#submit_tool_outputs_stream_raw` for the streaming use case." raise ArgumentError.new(message) end thread_id = @@ -241,7 +241,7 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @overload submit_tool_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) + # @overload submit_tool_outputs_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # # @param run_id [String] # @param thread_id [String] @@ -251,7 +251,7 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - def submit_tool_stream_raw(run_id, params) + def submit_tool_outputs_stream_raw(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 861ab13a..06c5a7dd 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -32,7 +32,7 @@ class Completions def create(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -46,7 +46,7 @@ def create(params) # Creates a completion for the provided prompt and parameters. # - # @overload stream_raw(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) + # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # @param prompt [String, Array, Array, Array>, nil] @@ -70,7 +70,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::CompletionCreateParams - def stream_raw(params) + def create_streaming(params) parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index d0f71924..047d7619 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -59,8 +59,8 @@ module OpenAI # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. timestamp_granularities: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -89,7 +89,7 @@ module OpenAI ] ) end - def stream_raw( + def create_streaming( # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, @@ -128,8 +128,8 @@ module OpenAI # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. timestamp_granularities: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index a1f66f91..68622174 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -170,8 +170,8 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -259,7 +259,7 @@ module OpenAI ] ) end - def stream_raw( + def create_stream_raw( # Path param: The ID of the thread to run. thread_id, # Body param: The ID of the @@ -368,8 +368,8 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) @@ -497,8 +497,9 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use `#submit_tool_stream_raw` or - # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use + # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and + # non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -547,7 +548,7 @@ module OpenAI ] ) end - def submit_tool_stream_raw( + def submit_tool_outputs_stream_raw( # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the @@ -556,8 +557,9 @@ module OpenAI thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, - # There is no need to provide `stream:`. Instead, use `#submit_tool_stream_raw` or - # `#submit_tool_outputs` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use + # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and + # non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 08f12a97..d064199d 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -139,8 +139,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -178,7 +178,7 @@ module OpenAI ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Completion]) end - def stream_raw( + def create_streaming( # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our @@ -282,8 +282,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index e1577759..6506a498 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -14,7 +14,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response - def stream_raw: ( + def create_streaming: ( file: IO | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 5a1bb9fd..df8265f2 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -27,7 +27,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def stream_raw: ( + def create_stream_raw: ( String thread_id, assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], @@ -84,7 +84,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def submit_tool_stream_raw: ( + def submit_tool_outputs_stream_raw: ( String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], diff --git a/sig/openai/resources/completions.rbs b/sig/openai/resources/completions.rbs index b786a2e1..c266af82 100644 --- a/sig/openai/resources/completions.rbs +++ b/sig/openai/resources/completions.rbs @@ -22,7 +22,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Completion - def stream_raw: ( + def create_streaming: ( model: OpenAI::Models::CompletionCreateParams::model, prompt: OpenAI::Models::CompletionCreateParams::prompt?, ?best_of: Integer?, From c2518ef7c807be857ef77f29e005c036ef6460eb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 19:21:07 +0000 Subject: [PATCH 091/295] feat!: bump min supported ruby version to 3.1 (oldest non-EOL) (#93) --- README.md | 4 ++-- lib/openai/internal/cursor_page.rb | 12 ----------- lib/openai/internal/page.rb | 12 ----------- lib/openai/internal/stream.rb | 11 ---------- lib/openai/internal/type/base_page.rb | 22 +------------------- lib/openai/internal/type/base_stream.rb | 16 ++------------ rbi/lib/openai/internal.rbi | 2 ++ rbi/lib/openai/internal/type/base_page.rbi | 1 + rbi/lib/openai/internal/type/base_stream.rbi | 1 + sig/openai/internal.rbs | 1 + 10 files changed, 10 insertions(+), 72 deletions(-) diff --git a/README.md b/README.md index fa326448..3a2e94a6 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # OpenAI Ruby API library -The OpenAI Ruby library provides convenient access to the OpenAI REST API from any Ruby 3.0.0+ application. +The OpenAI Ruby library provides convenient access to the OpenAI REST API from any Ruby 3.1.0+ application. ## Documentation @@ -202,4 +202,4 @@ This package considers improvements to the (non-runtime) `*.rbi` and `*.rbs` typ ## Requirements -Ruby 3.0.0 or higher. +Ruby 3.1.0 or higher. diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb index ba3b1b74..90625f49 100644 --- a/lib/openai/internal/cursor_page.rb +++ b/lib/openai/internal/cursor_page.rb @@ -11,18 +11,6 @@ module Internal # cursor_page.auto_paging_each do |completion| # puts(completion) # end - # - # @example - # completions = - # cursor_page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # completions => Array class CursorPage include OpenAI::Internal::Type::BasePage diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb index 867cd496..3fd199ef 100644 --- a/lib/openai/internal/page.rb +++ b/lib/openai/internal/page.rb @@ -11,18 +11,6 @@ module Internal # page.auto_paging_each do |model| # puts(model) # end - # - # @example - # models = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # models => Array class Page include OpenAI::Internal::Type::BasePage diff --git a/lib/openai/internal/stream.rb b/lib/openai/internal/stream.rb index 3c9e0748..bef131b1 100644 --- a/lib/openai/internal/stream.rb +++ b/lib/openai/internal/stream.rb @@ -6,17 +6,6 @@ module Internal # stream.each do |event| # puts(event) # end - # - # @example - # events = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # events => Array class Stream include OpenAI::Internal::Type::BaseStream diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb index 63882c48..a530d34b 100644 --- a/lib/openai/internal/type/base_page.rb +++ b/lib/openai/internal/type/base_page.rb @@ -3,27 +3,7 @@ module OpenAI module Internal module Type - # @example - # if page.has_next? - # page = page.next_page - # end - # - # @example - # page.auto_paging_each do |completion| - # puts(completion) - # end - # - # @example - # completions = - # page - # .to_enum - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # completions => Array + # This module provides a base implementation for paginated responses in the SDK. module BasePage # rubocop:disable Lint/UnusedMethodArgument diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index 064878ba..53bbcecb 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -3,21 +3,9 @@ module OpenAI module Internal module Type - # @example - # stream.each do |chunk| - # puts(chunk) - # end + # This module provides a base implementation for streaming responses in the SDK. # - # @example - # chunks = - # stream - # .lazy - # .select { _1.object_id.even? } - # .map(&:itself) - # .take(2) - # .to_a - # - # chunks => Array + # @see https://rubyapi.org/3.1/o/enumerable module BaseStream include Enumerable diff --git a/rbi/lib/openai/internal.rbi b/rbi/lib/openai/internal.rbi index ff17c5f5..42138966 100644 --- a/rbi/lib/openai/internal.rbi +++ b/rbi/lib/openai/internal.rbi @@ -6,5 +6,7 @@ module OpenAI # Due to the current WIP status of Shapes support in Sorbet, types referencing # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } + + OMIT = T.let(T.anything, T.anything) end end diff --git a/rbi/lib/openai/internal/type/base_page.rbi b/rbi/lib/openai/internal/type/base_page.rbi index 1125a5c8..b40068ca 100644 --- a/rbi/lib/openai/internal/type/base_page.rbi +++ b/rbi/lib/openai/internal/type/base_page.rbi @@ -3,6 +3,7 @@ module OpenAI module Internal module Type + # This module provides a base implementation for paginated responses in the SDK. module BasePage Elem = type_member(:out) diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/lib/openai/internal/type/base_stream.rbi index 10e8da7e..1e0940f8 100644 --- a/rbi/lib/openai/internal/type/base_stream.rbi +++ b/rbi/lib/openai/internal/type/base_stream.rbi @@ -3,6 +3,7 @@ module OpenAI module Internal module Type + # This module provides a base implementation for streaming responses in the SDK. module BaseStream include Enumerable diff --git a/sig/openai/internal.rbs b/sig/openai/internal.rbs index ff72a6b0..1da6dd75 100644 --- a/sig/openai/internal.rbs +++ b/sig/openai/internal.rbs @@ -1,4 +1,5 @@ module OpenAI module Internal + OMIT: top end end From c2a0c8be1817b79abaf1079a710b707a2ccd0d9f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 20:29:33 +0000 Subject: [PATCH 092/295] chore: demonstrate how to make undocumented requests in README (#94) --- README.md | 29 +++++++++++++++- lib/openai/internal/transport/base_client.rb | 34 +++++++++++++------ lib/openai/resources/audio/transcriptions.rb | 10 ++++-- lib/openai/resources/beta/threads.rb | 9 +++-- lib/openai/resources/beta/threads/runs.rb | 20 ++++++++--- lib/openai/resources/chat/completions.rb | 8 +++-- lib/openai/resources/completions.rb | 8 +++-- lib/openai/resources/responses.rb | 8 +++-- .../openai/internal/transport/base_client.rbi | 2 ++ .../openai/resources/audio/transcriptions.rbi | 10 ++++-- rbi/lib/openai/resources/beta/threads.rbi | 9 +++-- .../openai/resources/beta/threads/runs.rbi | 20 ++++++++--- rbi/lib/openai/resources/chat/completions.rbi | 8 +++-- rbi/lib/openai/resources/completions.rbi | 8 +++-- rbi/lib/openai/resources/responses.rbi | 8 +++-- sig/openai/internal/transport/base_client.rbs | 32 ++++++++--------- 16 files changed, 165 insertions(+), 58 deletions(-) diff --git a/README.md b/README.md index 3a2e94a6..a915e37f 100644 --- a/README.md +++ b/README.md @@ -158,7 +158,9 @@ openai.chat.completions.create( ) ``` -## Sorbet Support +## LSP Support + +### Sorbet **This library emits an intentional warning under the [`tapioca` toolchain](https://github.com/Shopify/tapioca)**. This is normal, and does not impact functionality. @@ -184,6 +186,31 @@ openai.chat.completions.create(**model) ## Advanced +### Making custom/undocumented requests + +This library is typed for convenient access to the documented API. + +If you need to access undocumented endpoints, params, or response properties, the library can still be used. + +#### Undocumented request params + +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a requests as seen in examples above. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can make requests using `client.request`. Options on the client will be respected (such as retries) when making this request. + +```ruby +response = + client.request( + method: :post, + path: '/undocumented/endpoint', + query: {"dog": "woof"}, + headers: {"useful-header": "interesting-value"}, + body: {"he": "llo"}, + ) +``` + ### Concurrency & Connection Pooling The `OpenAI::Client` instances are thread-safe, and should be re-used across multiple threads. By default, each `Client` have their own HTTP connection pool, with a maximum number of connections equal to thread count. diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index cd7979be..c23b9986 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -395,27 +395,39 @@ def initialize( # Execute the request specified by `req`. This is the method that all resource # methods call into. # - # @param req [Hash{Symbol=>Object}] . + # @overload request(method, path, query: {}, headers: {}, body: nil, unwrap: nil, page: nil, stream: nil, model: OpenAI::Internal::Type::Unknown, options: {}) # - # @option req [Symbol] :method + # @param method [Symbol] # - # @option req [String, Array] :path + # @param path [String, Array] # - # @option req [Hash{String=>Array, String, nil}, nil] :query + # @param query [Hash{String=>Array, String, nil}, nil] # - # @option req [Hash{String=>String, Integer, Array, nil}, nil] :headers + # @param headers [Hash{String=>String, Integer, Array, nil}, nil] # - # @option req [Object, nil] :body + # @param body [Object, nil] # - # @option req [Symbol, nil] :unwrap + # @param unwrap [Symbol, nil] # - # @option req [Class, nil] :page + # @param page [Class, nil] # - # @option req [Class, nil] :stream + # @param stream [Class, nil] # - # @option req [OpenAI::Internal::Type::Converter, Class, nil] :model + # @param model [OpenAI::Internal::Type::Converter, Class, nil] + # + # @param options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] . + # + # @option options [String, nil] :idempotency_key + # + # @option options [Hash{String=>Array, String, nil}, nil] :extra_query + # + # @option options [Hash{String=>String, nil}, nil] :extra_headers + # + # @option options [Object, nil] :extra_body + # + # @option options [Integer, nil] :max_retries # - # @option req [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] :options + # @option options [Float, nil] :timeout # # @raise [OpenAI::Errors::APIError] # @return [Object] diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index bd918fe4..2a669ca4 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -4,7 +4,10 @@ module OpenAI module Resources class Audio class Transcriptions - # Transcribes audio into the input language. + # See {OpenAI::Resources::Audio::Transcriptions#create_streaming} for streaming + # counterpart. + # + # Transcribes audio into the input language. # # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # @@ -37,7 +40,10 @@ def create(params) ) end - # Transcribes audio into the input language. + # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming + # counterpart. + # + # Transcribes audio into the input language. # # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index f581f130..3eea6fe8 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -94,7 +94,9 @@ def delete(thread_id, params = {}) ) end - # Create a thread and run it in one request. + # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. + # + # Create a thread and run it in one request. # # @overload create_and_run(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # @@ -133,7 +135,10 @@ def create_and_run(params) ) end - # Create a thread and run it in one request. + # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming + # counterpart. + # + # Create a thread and run it in one request. # # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index bce39bba..cd12bbcc 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -8,7 +8,10 @@ class Runs # @return [OpenAI::Resources::Beta::Threads::Runs::Steps] attr_reader :steps - # Create a run. + # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming + # counterpart. + # + # Create a run. # # @overload create(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # @@ -52,7 +55,10 @@ def create(thread_id, params) ) end - # Create a run. + # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming + # counterpart. + # + # Create a run. # # @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # @@ -202,7 +208,10 @@ def cancel(run_id, params) ) end - # When a run has the `status: "requires_action"` and `required_action.type` is + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for + # streaming counterpart. + # + # When a run has the `status: "requires_action"` and `required_action.type` is # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the # tool calls once they're all completed. All outputs must be submitted in a single # request. @@ -236,7 +245,10 @@ def submit_tool_outputs(run_id, params) ) end - # When a run has the `status: "requires_action"` and `required_action.type` is + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for + # non-streaming counterpart. + # + # When a run has the `status: "requires_action"` and `required_action.type` is # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the # tool calls once they're all completed. All outputs must be submitted in a single # request. diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 52736d64..8d816acc 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -7,7 +7,9 @@ class Completions # @return [OpenAI::Resources::Chat::Completions::Messages] attr_reader :messages - # **Starting a new project?** We recommend trying + # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. + # + # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take # advantage of the latest OpenAI platform features. Compare # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). @@ -77,7 +79,9 @@ def create(params) ) end - # **Starting a new project?** We recommend trying + # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. + # + # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take # advantage of the latest OpenAI platform features. Compare # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 06c5a7dd..3440bd65 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -3,7 +3,9 @@ module OpenAI module Resources class Completions - # Creates a completion for the provided prompt and parameters. + # See {OpenAI::Resources::Completions#create_streaming} for streaming counterpart. + # + # Creates a completion for the provided prompt and parameters. # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # @@ -44,7 +46,9 @@ def create(params) ) end - # Creates a completion for the provided prompt and parameters. + # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. + # + # Creates a completion for the provided prompt and parameters. # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 3eee10b4..a4a9ff87 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -6,7 +6,9 @@ class Responses # @return [OpenAI::Resources::Responses::InputItems] attr_reader :input_items - # Creates a model response. Provide + # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. + # + # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or # [image](https://platform.openai.com/docs/guides/images) inputs to generate # [text](https://platform.openai.com/docs/guides/text) or @@ -57,7 +59,9 @@ def create(params) ) end - # Creates a model response. Provide + # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. + # + # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or # [image](https://platform.openai.com/docs/guides/images) inputs to generate # [text](https://platform.openai.com/docs/guides/text) or diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index f27429e0..d3c028a2 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -166,6 +166,8 @@ module OpenAI # Execute the request specified by `req`. This is the method that all resource # methods call into. + # + # @overload request(method, path, query: {}, headers: {}, body: nil, unwrap: nil, page: nil, stream: nil, model: OpenAI::Internal::Type::Unknown, options: {}) sig do params( method: Symbol, diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 047d7619..d08bf3a7 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -4,7 +4,10 @@ module OpenAI module Resources class Audio class Transcriptions - # Transcribes audio into the input language. + # See {OpenAI::Resources::Audio::Transcriptions#create_streaming} for streaming + # counterpart. + # + # Transcribes audio into the input language. sig do params( file: T.any(IO, StringIO), @@ -66,7 +69,10 @@ module OpenAI ) end - # Transcribes audio into the input language. + # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming + # counterpart. + # + # Transcribes audio into the input language. sig do params( file: T.any(IO, StringIO), diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 4a22f258..a4d03027 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -99,7 +99,9 @@ module OpenAI ) end - # Create a thread and run it in one request. + # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. + # + # Create a thread and run it in one request. sig do params( assistant_id: String, @@ -243,7 +245,10 @@ module OpenAI ) end - # Create a thread and run it in one request. + # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming + # counterpart. + # + # Create a thread and run it in one request. sig do params( assistant_id: String, diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 68622174..69460b3d 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -8,7 +8,10 @@ module OpenAI sig { returns(OpenAI::Resources::Beta::Threads::Runs::Steps) } attr_reader :steps - # Create a run. + # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming + # counterpart. + # + # Create a run. sig do params( thread_id: String, @@ -177,7 +180,10 @@ module OpenAI ) end - # Create a run. + # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming + # counterpart. + # + # Create a run. sig do params( thread_id: String, @@ -474,7 +480,10 @@ module OpenAI ) end - # When a run has the `status: "requires_action"` and `required_action.type` is + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for + # streaming counterpart. + # + # When a run has the `status: "requires_action"` and `required_action.type` is # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the # tool calls once they're all completed. All outputs must be submitted in a single # request. @@ -505,7 +514,10 @@ module OpenAI ) end - # When a run has the `status: "requires_action"` and `required_action.type` is + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for + # non-streaming counterpart. + # + # When a run has the `status: "requires_action"` and `required_action.type` is # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the # tool calls once they're all completed. All outputs must be submitted in a single # request. diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 7a170eb6..20a9a8b0 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -7,7 +7,9 @@ module OpenAI sig { returns(OpenAI::Resources::Chat::Completions::Messages) } attr_reader :messages - # **Starting a new project?** We recommend trying + # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. + # + # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take # advantage of the latest OpenAI platform features. Compare # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). @@ -275,7 +277,9 @@ module OpenAI ) end - # **Starting a new project?** We recommend trying + # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. + # + # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take # advantage of the latest OpenAI platform features. Compare # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index d064199d..2054b2b3 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -3,7 +3,9 @@ module OpenAI module Resources class Completions - # Creates a completion for the provided prompt and parameters. + # See {OpenAI::Resources::Completions#create_streaming} for streaming counterpart. + # + # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), @@ -146,7 +148,9 @@ module OpenAI ) end - # Creates a completion for the provided prompt and parameters. + # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. + # + # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 1999f3c5..4ccb309d 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -6,7 +6,9 @@ module OpenAI sig { returns(OpenAI::Resources::Responses::InputItems) } attr_reader :input_items - # Creates a model response. Provide + # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. + # + # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or # [image](https://platform.openai.com/docs/guides/images) inputs to generate # [text](https://platform.openai.com/docs/guides/text) or @@ -184,7 +186,9 @@ module OpenAI ) end - # Creates a model response. Provide + # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. + # + # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or # [image](https://platform.openai.com/docs/guides/images) inputs to generate # [text](https://platform.openai.com/docs/guides/text) or diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs index abdc7471..d66d0e62 100644 --- a/sig/openai/internal/transport/base_client.rbs +++ b/sig/openai/internal/transport/base_client.rbs @@ -88,24 +88,20 @@ module OpenAI send_retry_header: bool ) -> [Integer, top, Enumerable[String]] - def request: - ( - Symbol method, - String | ::Array[String] path, - ?query: ::Hash[String, (::Array[String] | String)?]?, - ?headers: ::Hash[String, (String - | Integer - | ::Array[(String | Integer)?])?]?, - ?body: top?, - ?unwrap: Symbol?, - ?page: Class?, - ?stream: Class?, - ?model: OpenAI::Internal::Type::Converter::input?, - ?options: OpenAI::request_opts? - ) -> top - | ( - OpenAI::Internal::Transport::BaseClient::request_components req - ) -> top + def request: ( + Symbol method, + String | ::Array[String] path, + ?query: ::Hash[String, (::Array[String] | String)?]?, + ?headers: ::Hash[String, (String + | Integer + | ::Array[(String | Integer)?])?]?, + ?body: top?, + ?unwrap: Symbol?, + ?page: Class?, + ?stream: Class?, + ?model: OpenAI::Internal::Type::Converter::input?, + ?options: OpenAI::request_opts? + ) -> top def inspect: -> String end From 6612b9bd7822b150a24bd5ffb206f0e361dbda31 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 22:37:28 +0000 Subject: [PATCH 093/295] fix: pre-release version string should match ruby, not semver conventions (#95) --- lib/openai/version.rb | 2 +- rbi/lib/openai/version.rbi | 2 +- sig/openai/version.rbs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 148d7830..f0a95660 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0-alpha.3" + VERSION = "0.1.0.pre.alpha.3" end diff --git a/rbi/lib/openai/version.rbi b/rbi/lib/openai/version.rbi index 5886c95b..41e15730 100644 --- a/rbi/lib/openai/version.rbi +++ b/rbi/lib/openai/version.rbi @@ -1,5 +1,5 @@ # typed: strong module OpenAI - VERSION = "0.1.0-alpha.3" + VERSION = "0.1.0.pre.alpha.3" end diff --git a/sig/openai/version.rbs b/sig/openai/version.rbs index d670f686..aa9ea54f 100644 --- a/sig/openai/version.rbs +++ b/sig/openai/version.rbs @@ -1,3 +1,3 @@ module OpenAI - VERSION: "0.1.0-alpha.3" + VERSION: "0.1.0.pre.alpha.3" end From 2d1a6c18b53284cb572e98de76b5a6463309b331 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 3 Apr 2025 23:40:05 +0000 Subject: [PATCH 094/295] feat: support solargraph generics (#96) --- lib/openai/internal/cursor_page.rb | 6 ++++- lib/openai/internal/page.rb | 6 ++++- lib/openai/internal/stream.rb | 4 +++- lib/openai/internal/transport/base_client.rb | 4 ++-- .../transport/pooled_net_requester.rb | 2 +- lib/openai/internal/type/array_of.rb | 8 ++++--- lib/openai/internal/type/base_page.rb | 5 +++- lib/openai/internal/type/base_stream.rb | 9 ++++--- lib/openai/internal/type/hash_of.rb | 4 +++- lib/openai/internal/util.rb | 24 +++++++++---------- rbi/lib/openai/internal/cursor_page.rbi | 4 ++-- rbi/lib/openai/internal/page.rbi | 4 ++-- rbi/lib/openai/internal/stream.rbi | 4 ++-- rbi/lib/openai/internal/type/array_of.rbi | 4 ++-- sig/openai/internal/type/array_of.rbs | 4 ++-- 15 files changed, 56 insertions(+), 36 deletions(-) diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb index 90625f49..a56921db 100644 --- a/lib/openai/internal/cursor_page.rb +++ b/lib/openai/internal/cursor_page.rb @@ -2,6 +2,8 @@ module OpenAI module Internal + # @generic Elem + # # @example # if cursor_page.has_next? # cursor_page = cursor_page.next_page @@ -14,7 +16,7 @@ module Internal class CursorPage include OpenAI::Internal::Type::BasePage - # @return [Array, nil] + # @return [Array>, nil] attr_accessor :data # @return [Boolean] @@ -61,6 +63,8 @@ def next_page end # @param blk [Proc] + # + # @yieldparam [generic] def auto_paging_each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb index 3fd199ef..368ffd45 100644 --- a/lib/openai/internal/page.rb +++ b/lib/openai/internal/page.rb @@ -2,6 +2,8 @@ module OpenAI module Internal + # @generic Elem + # # @example # if page.has_next? # page = page.next_page @@ -14,7 +16,7 @@ module Internal class Page include OpenAI::Internal::Type::BasePage - # @return [Array, nil] + # @return [Array>, nil] attr_accessor :data # @return [String] @@ -55,6 +57,8 @@ def next_page end # @param blk [Proc] + # + # @yieldparam [generic] def auto_paging_each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") diff --git a/lib/openai/internal/stream.rb b/lib/openai/internal/stream.rb index bef131b1..ad1f7a1d 100644 --- a/lib/openai/internal/stream.rb +++ b/lib/openai/internal/stream.rb @@ -2,6 +2,8 @@ module OpenAI module Internal + # @generic Elem + # # @example # stream.each do |event| # puts(event) @@ -11,7 +13,7 @@ class Stream # @api private # - # @return [Enumerable] + # @return [Enumerable>] private def iterator # rubocop:disable Metrics/BlockLength @iterator ||= OpenAI::Internal::Util.chain_fused(@stream) do |y| diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index c23b9986..a46be452 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -131,7 +131,7 @@ def follow_redirect(request, status:, response_headers:) # @api private # # @param status [Integer, OpenAI::Errors::APIConnectionError] - # @param stream [Enumerable, nil] + # @param stream [Enumerable, nil] def reap_connection!(status, stream:) case status in (..199) | (300..499) @@ -328,7 +328,7 @@ def initialize( # @param send_retry_header [Boolean] # # @raise [OpenAI::Errors::APIError] - # @return [Array(Integer, Net::HTTPResponse, Enumerable)] + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] private def send_request(request, redirect_count:, retry_count:, send_retry_header:) url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) input = {**request.except(:timeout), deadline: OpenAI::Internal::Util.monotonic_secs + timeout} diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index c5ee96d9..74f76024 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -119,7 +119,7 @@ def build_request(request, &blk) # # @option request [Float] :deadline # - # @return [Array(Integer, Net::HTTPResponse, Enumerable)] + # @return [Array(Integer, Net::HTTPResponse, Enumerable)] def execute(request) url, deadline = request.fetch_values(:url, :deadline) diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 5c67fc15..e483f418 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -7,6 +7,8 @@ module Type # # @abstract # + # @generic Elem + # # Array of items of a given type. class ArrayOf include OpenAI::Internal::Type::Converter @@ -40,7 +42,7 @@ def ==(other) # @api private # - # @param value [Enumerable, Object] + # @param value [Array, Object] # # @param state [Hash{Symbol=>Object}] . # @@ -75,7 +77,7 @@ def coerce(value, state:) # @api private # - # @param value [Enumerable, Object] + # @param value [Array, Object] # # @return [Array, Object] def dump(value) @@ -85,7 +87,7 @@ def dump(value) # @api private # - # @return [OpenAI::Internal::Type::Converter, Class] + # @return [generic] protected def item_type = @item_type_fn.call # @api private diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb index a530d34b..d4c64531 100644 --- a/lib/openai/internal/type/base_page.rb +++ b/lib/openai/internal/type/base_page.rb @@ -3,6 +3,8 @@ module OpenAI module Internal module Type + # @generic Elem + # # This module provides a base implementation for paginated responses in the SDK. module BasePage # rubocop:disable Lint/UnusedMethodArgument @@ -16,10 +18,11 @@ def next_page = (raise NotImplementedError) # @param blk [Proc] # + # @yieldparam [generic] # @return [void] def auto_paging_each(&blk) = (raise NotImplementedError) - # @return [Enumerable] + # @return [Enumerable>] def to_enum = super(:auto_paging_each) alias_method :enum_for, :to_enum diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index 53bbcecb..c9dd5765 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -3,6 +3,8 @@ module OpenAI module Internal module Type + # @generic Elem + # # This module provides a base implementation for streaming responses in the SDK. # # @see https://rubyapi.org/3.1/o/enumerable @@ -14,11 +16,12 @@ def close = OpenAI::Internal::Util.close_fused!(@iterator) # @api private # - # @return [Enumerable] + # @return [Enumerable>] private def iterator = (raise NotImplementedError) # @param blk [Proc] # + # @yieldparam [generic] # @return [void] def each(&blk) unless block_given? @@ -27,7 +30,7 @@ def each(&blk) @iterator.each(&blk) end - # @return [Enumerator] + # @return [Enumerator>] def to_enum = @iterator alias_method :enum_for, :to_enum @@ -38,7 +41,7 @@ def to_enum = @iterator # @param url [URI::Generic] # @param status [Integer] # @param response [Net::HTTPResponse] - # @param stream [Enumerable] + # @param stream [Enumerable] def initialize(model:, url:, status:, response:, stream:) @model = model @url = url diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index eb5b546e..2d69f003 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -7,6 +7,8 @@ module Type # # @abstract # + # @generic Elem + # # Hash of items of a given type. class HashOf include OpenAI::Internal::Type::Converter @@ -111,7 +113,7 @@ def dump(value) # @api private # - # @return [OpenAI::Internal::Type::Converter, Class] + # @return [generic] protected def item_type = @item_type_fn.call # @api private diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index da915e57..ef6e02a3 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -419,7 +419,7 @@ def read(max_len = nil, out_string = nil) # @api private # - # @param stream [String, IO, StringIO, Enumerable] + # @param stream [String, IO, StringIO, Enumerable] # @param blk [Proc] # # @yieldparam [String] @@ -434,7 +434,7 @@ class << self # @param blk [Proc] # # @yieldparam [Enumerator::Yielder] - # @return [Enumerable] + # @return [Enumerable] def writable_enum(&blk) Enumerator.new do |y| y.define_singleton_method(:write) do @@ -490,7 +490,7 @@ class << self # # @param body [Object] # - # @return [Array(String, Enumerable)] + # @return [Array(String, Enumerable)] private def encode_multipart_streaming(body) boundary = SecureRandom.urlsafe_base64(60) @@ -543,7 +543,7 @@ def encode_content(headers, body) # @api private # # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param stream [Enumerable] + # @param stream [Enumerable] # @param suppress_error [Boolean] # # @raise [JSON::ParserError] @@ -580,11 +580,11 @@ class << self # # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html # - # @param enum [Enumerable] + # @param enum [Enumerable] # @param external [Boolean] # @param close [Proc] # - # @return [Enumerable] + # @return [Enumerable] def fused_enum(enum, external: false, &close) fused = false iter = Enumerator.new do |y| @@ -610,7 +610,7 @@ def fused_enum(enum, external: false, &close) # @api private # - # @param enum [Enumerable, nil] + # @param enum [Enumerable, nil] def close_fused!(enum) return unless enum.is_a?(Enumerator) @@ -621,11 +621,11 @@ def close_fused!(enum) # @api private # - # @param enum [Enumerable, nil] + # @param enum [Enumerable, nil] # @param blk [Proc] # # @yieldparam [Enumerator::Yielder] - # @return [Enumerable] + # @return [Enumerable] def chain_fused(enum, &blk) iter = Enumerator.new { blk.call(_1) } fused_enum(iter) { close_fused!(enum) } @@ -635,9 +635,9 @@ def chain_fused(enum, &blk) class << self # @api private # - # @param enum [Enumerable] + # @param enum [Enumerable] # - # @return [Enumerable] + # @return [Enumerable] def decode_lines(enum) re = /(\r\n|\r|\n)/ buffer = String.new.b @@ -671,7 +671,7 @@ def decode_lines(enum) # # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream # - # @param lines [Enumerable] + # @param lines [Enumerable] # # @return [Hash{Symbol=>Object}] def decode_sse(lines) diff --git a/rbi/lib/openai/internal/cursor_page.rbi b/rbi/lib/openai/internal/cursor_page.rbi index 7d728a09..5e926938 100644 --- a/rbi/lib/openai/internal/cursor_page.rbi +++ b/rbi/lib/openai/internal/cursor_page.rbi @@ -3,10 +3,10 @@ module OpenAI module Internal class CursorPage - include OpenAI::Internal::Type::BasePage - Elem = type_member + include OpenAI::Internal::Type::BasePage + sig { returns(T.nilable(T::Array[Elem])) } attr_accessor :data diff --git a/rbi/lib/openai/internal/page.rbi b/rbi/lib/openai/internal/page.rbi index bb47228b..bb3ac0c1 100644 --- a/rbi/lib/openai/internal/page.rbi +++ b/rbi/lib/openai/internal/page.rbi @@ -3,10 +3,10 @@ module OpenAI module Internal class Page - include OpenAI::Internal::Type::BasePage - Elem = type_member + include OpenAI::Internal::Type::BasePage + sig { returns(T.nilable(T::Array[Elem])) } attr_accessor :data diff --git a/rbi/lib/openai/internal/stream.rbi b/rbi/lib/openai/internal/stream.rbi index 14bab753..7b7eff02 100644 --- a/rbi/lib/openai/internal/stream.rbi +++ b/rbi/lib/openai/internal/stream.rbi @@ -3,11 +3,11 @@ module OpenAI module Internal class Stream - include OpenAI::Internal::Type::BaseStream - Message = type_member(:in) { {fixed: OpenAI::Internal::Util::ServerSentEvent} } Elem = type_member(:out) + include OpenAI::Internal::Type::BaseStream + # @api private sig { override.returns(T::Enumerable[Elem]) } private def iterator diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index d01b1212..d326636b 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -40,7 +40,7 @@ module OpenAI sig(:final) do override .params(value: T.any( - T::Enumerable[Elem], + T::Array[T.anything], T.anything ), state: OpenAI::Internal::Type::Converter::State) @@ -52,7 +52,7 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Enumerable[Elem], T.anything)) + .params(value: T.any(T::Array[T.anything], T.anything)) .returns(T.any(T::Array[T.anything], T.anything)) end def dump(value) diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 617458ed..6f344cc2 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -16,11 +16,11 @@ module OpenAI def ==: (top other) -> bool def coerce: ( - Enumerable[Elem] | top value, + ::Array[top] | top value, state: OpenAI::Internal::Type::Converter::state ) -> (::Array[top] | top) - def dump: (Enumerable[Elem] | top value) -> (::Array[top] | top) + def dump: (::Array[top] | top value) -> (::Array[top] | top) def item_type: -> Elem From 40cf4b0635432ba809786627ac392ac7ef606138 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 01:13:52 +0000 Subject: [PATCH 095/295] chore: do not use literals for version in type definitions (#97) --- README.md | 4 ++++ rbi/lib/openai/version.rbi | 2 +- sig/openai/version.rbs | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a915e37f..19cfde3e 100644 --- a/README.md +++ b/README.md @@ -12,10 +12,14 @@ The underlying REST API documentation can be found on [platform.openai.com](http To use this gem, install via Bundler by adding the following to your application's `Gemfile`: + + ```ruby gem "openai", "~> 0.1.0.pre.alpha.3" ``` + + To fetch an initial copy of the gem: ```sh diff --git a/rbi/lib/openai/version.rbi b/rbi/lib/openai/version.rbi index 41e15730..8f6874c7 100644 --- a/rbi/lib/openai/version.rbi +++ b/rbi/lib/openai/version.rbi @@ -1,5 +1,5 @@ # typed: strong module OpenAI - VERSION = "0.1.0.pre.alpha.3" + VERSION = T.let(T.unsafe(nil), String) end diff --git a/sig/openai/version.rbs b/sig/openai/version.rbs index aa9ea54f..9d3c6077 100644 --- a/sig/openai/version.rbs +++ b/sig/openai/version.rbs @@ -1,3 +1,3 @@ module OpenAI - VERSION: "0.1.0.pre.alpha.3" + VERSION: String end From 09f45d49813fa21edee3147a62b6cff87c5e8531 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 21:01:30 +0000 Subject: [PATCH 096/295] chore: simplify internal utils (#98) --- lib/openai/internal/util.rb | 43 +++++++------------------ rbi/lib/openai/internal/cursor_page.rbi | 4 +-- rbi/lib/openai/internal/page.rbi | 4 +-- rbi/lib/openai/internal/util.rbi | 12 ++++--- sig/openai/internal/util.rbs | 10 +++--- 5 files changed, 28 insertions(+), 45 deletions(-) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index ef6e02a3..91e225b8 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -70,13 +70,11 @@ def primitive?(input) # @api private # - # @param input [Object] + # @param input [String, Boolean] # # @return [Boolean, Object] def coerce_boolean(input) case input.is_a?(String) ? input.downcase : input - in Numeric - input.nonzero? in "true" true in "false" @@ -88,7 +86,7 @@ def coerce_boolean(input) # @api private # - # @param input [Object] + # @param input [String, Boolean] # # @raise [ArgumentError] # @return [Boolean, nil] @@ -103,34 +101,20 @@ def coerce_boolean!(input) # @api private # - # @param input [Object] + # @param input [String, Integer] # # @return [Integer, Object] def coerce_integer(input) - case input - in true - 1 - in false - 0 - else - Integer(input, exception: false) || input - end + Integer(input, exception: false) || input end # @api private # - # @param input [Object] + # @param input [String, Integer, Float] # # @return [Float, Object] def coerce_float(input) - case input - in true - 1.0 - in false - 0.0 - else - Float(input, exception: false) || input - end + Float(input, exception: false) || input end # @api private @@ -159,12 +143,7 @@ class << self private def deep_merge_lr(lhs, rhs, concat: false) case [lhs, rhs, concat] in [Hash, Hash, _] - rhs_cleaned = rhs.reject { _2 == OpenAI::Internal::OMIT } - lhs - .reject { |key, _| rhs[key] == OpenAI::Internal::OMIT } - .merge(rhs_cleaned) do |_, old_val, new_val| - deep_merge_lr(old_val, new_val, concat: concat) - end + lhs.merge(rhs) { deep_merge_lr(_2, _3, concat: concat) } in [Array, Array, true] lhs.concat(rhs) else @@ -362,7 +341,7 @@ def normalized_headers(*headers) value = case val in Array - val.map { _1.to_s.strip }.join(", ") + val.filter_map { _1&.to_s&.strip }.join(", ") else val&.to_s&.strip end @@ -469,7 +448,7 @@ class << self case val in IO y << "Content-Type: application/octet-stream\r\n\r\n" - IO.copy_stream(val, y) + IO.copy_stream(val.tap(&:rewind), y) in StringIO y << "Content-Type: application/octet-stream\r\n\r\n" y << val.string @@ -533,6 +512,8 @@ def encode_content(headers, body) boundary, strio = encode_multipart_streaming(body) headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} [headers, strio] + in [_, IO] + [headers, body.tap(&:rewind)] in [_, StringIO] [headers, body.string] else @@ -673,7 +654,7 @@ def decode_lines(enum) # # @param lines [Enumerable] # - # @return [Hash{Symbol=>Object}] + # @return [EnumerableObject}>] def decode_sse(lines) # rubocop:disable Metrics/BlockLength chain_fused(lines) do |y| diff --git a/rbi/lib/openai/internal/cursor_page.rbi b/rbi/lib/openai/internal/cursor_page.rbi index 5e926938..7d728a09 100644 --- a/rbi/lib/openai/internal/cursor_page.rbi +++ b/rbi/lib/openai/internal/cursor_page.rbi @@ -3,10 +3,10 @@ module OpenAI module Internal class CursorPage - Elem = type_member - include OpenAI::Internal::Type::BasePage + Elem = type_member + sig { returns(T.nilable(T::Array[Elem])) } attr_accessor :data diff --git a/rbi/lib/openai/internal/page.rbi b/rbi/lib/openai/internal/page.rbi index bb3ac0c1..bb47228b 100644 --- a/rbi/lib/openai/internal/page.rbi +++ b/rbi/lib/openai/internal/page.rbi @@ -3,10 +3,10 @@ module OpenAI module Internal class Page - Elem = type_member - include OpenAI::Internal::Type::BasePage + Elem = type_member + sig { returns(T.nilable(T::Array[Elem])) } attr_accessor :data diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 7abcdc1a..b943b0fd 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -28,22 +28,22 @@ module OpenAI end # @api private - sig { params(input: T.anything).returns(T.any(T::Boolean, T.anything)) } + sig { params(input: T.any(String, T::Boolean)).returns(T.any(T::Boolean, T.anything)) } def coerce_boolean(input) end # @api private - sig { params(input: T.anything).returns(T.nilable(T::Boolean)) } + sig { params(input: T.any(String, T::Boolean)).returns(T.nilable(T::Boolean)) } def coerce_boolean!(input) end # @api private - sig { params(input: T.anything).returns(T.any(Integer, T.anything)) } + sig { params(input: T.any(String, Integer)).returns(T.any(Integer, T.anything)) } def coerce_integer(input) end # @api private - sig { params(input: T.anything).returns(T.any(Float, T.anything)) } + sig { params(input: T.any(String, Integer, Float)).returns(T.any(Float, T.anything)) } def coerce_float(input) end @@ -271,7 +271,9 @@ module OpenAI # @api private # # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream - sig { params(lines: T::Enumerable[String]).returns(OpenAI::Internal::Util::ServerSentEvent) } + sig do + params(lines: T::Enumerable[String]).returns(T::Enumerable[OpenAI::Internal::Util::ServerSentEvent]) + end def decode_sse(lines) end end diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 07e02294..1c93f8d7 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -9,13 +9,13 @@ module OpenAI def self?.primitive?: (top input) -> bool - def self?.coerce_boolean: (top input) -> (bool | top) + def self?.coerce_boolean: (String | bool input) -> (bool | top) - def self?.coerce_boolean!: (top input) -> bool? + def self?.coerce_boolean!: (String | bool input) -> bool? - def self?.coerce_integer: (top input) -> (Integer | top) + def self?.coerce_integer: (String | Integer input) -> (Integer | top) - def self?.coerce_float: (top input) -> (Float | top) + def self?.coerce_float: (String | Integer | Float input) -> (Float | top) def self?.coerce_hash: (top input) -> (::Hash[top, top] | top) @@ -133,7 +133,7 @@ module OpenAI def self?.decode_sse: ( Enumerable[String] lines - ) -> OpenAI::Internal::Util::server_sent_event + ) -> Enumerable[OpenAI::Internal::Util::server_sent_event] end end end From 2022a84d1cff5ed41709d1fe1e5356958513578c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 21:09:34 +0000 Subject: [PATCH 097/295] chore: misc sdk polish (#99) --- .rubocop.yml | 3 +++ README.md | 4 ++-- lib/openai/internal/type/base_model.rb | 9 ++++++--- lib/openai/internal/type/converter.rb | 2 ++ lib/openai/internal/type/enum.rb | 2 ++ 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index d66784a3..f5de5e94 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -202,6 +202,9 @@ Style/MethodCallWithArgsParentheses: Exclude: - "**/*.gemspec" +Style/MultilineBlockChain: + Enabled: false + # Perfectly fine. Style/MultipleComparison: Enabled: false diff --git a/README.md b/README.md index 19cfde3e..76732571 100644 --- a/README.md +++ b/README.md @@ -177,7 +177,7 @@ Due to limitations with the Sorbet type system, where a method otherwise can tak Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. ```ruby -model = OpenAI::Models::Chat::CompletionCreateParams.new( +params = OpenAI::Models::Chat::CompletionCreateParams.new( messages: [{ role: "user", content: "Say this is a test" @@ -185,7 +185,7 @@ model = OpenAI::Models::Chat::CompletionCreateParams.new( model: "gpt-4o" ) -openai.chat.completions.create(**model) +openai.chat.completions.create(**params) ``` ## Advanced diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 5dde350e..d7dade74 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -168,7 +168,9 @@ def optional(name_sym, type_info, spec = {}) # @param other [Object] # # @return [Boolean] - def ==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::BaseModel && other.fields == fields + def ==(other) + other.is_a?(Class) && other <= OpenAI::Internal::Type::BaseModel && other.fields == fields + end end # @param other [Object] @@ -350,7 +352,8 @@ def initialize(data = {}) in Hash => coerced @data = coerced else - raise ArgumentError.new("Expected a #{Hash} or #{OpenAI::Internal::Type::BaseModel}, got #{data.inspect}") + message = "Expected a #{Hash} or #{OpenAI::Internal::Type::BaseModel}, got #{data.inspect}" + raise ArgumentError.new(message) end end @@ -358,7 +361,7 @@ def initialize(data = {}) def inspect rows = self.class.known_fields.keys.map do "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" - rescue OpenAI::ConversionError + rescue OpenAI::Errors::ConversionError "#{_1}=#{@data.fetch(_1)}" end "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index ee4589cf..b879a4ae 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -209,7 +209,9 @@ def coerce( # # @return [Object] def dump(target, value) + # rubocop:disable Layout/LineLength target.is_a?(OpenAI::Internal::Type::Converter) ? target.dump(value) : OpenAI::Internal::Type::Unknown.dump(value) + # rubocop:enable Layout/LineLength end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index de4e7a7a..5ba8860c 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -62,7 +62,9 @@ def ===(other) = values.include?(other) # # @return [Boolean] def ==(other) + # rubocop:disable Layout/LineLength other.is_a?(Module) && other.singleton_class <= OpenAI::Internal::Type::Enum && other.values.to_set == values.to_set + # rubocop:enable Layout/LineLength end # @api private From 4d1a9caccb26c0cb963ea307fc8f77eb2c32dff2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 4 Apr 2025 21:16:31 +0000 Subject: [PATCH 098/295] chore: document LSP support in read me (#100) --- README.md | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 76732571..5c71b3e6 100644 --- a/README.md +++ b/README.md @@ -164,9 +164,23 @@ openai.chat.completions.create( ## LSP Support -### Sorbet +### Solargraph + +This library includes [Solargraph](https://solargraph.org) support for both auto completion and go to definition. + +```ruby +gem "solargraph", group: :development +``` + +After Solargraph is installed, **you must populate its index** either via the provided editor command, or by running the following in your terminal: + +```sh +bundle exec solargraph gems +``` -**This library emits an intentional warning under the [`tapioca` toolchain](https://github.com/Shopify/tapioca)**. This is normal, and does not impact functionality. +Otherwise Solargraph will not be able to provide type information or auto-completion for any non-indexed libraries. + +### Sorbet This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. @@ -188,6 +202,12 @@ params = OpenAI::Models::Chat::CompletionCreateParams.new( openai.chat.completions.create(**params) ``` +Note: **This library emits an intentional warning under the [`tapioca` toolchain](https://github.com/Shopify/tapioca)**. This is normal, and does not impact functionality. + +### Ruby LSP + +The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. + ## Advanced ### Making custom/undocumented requests From 54defb32cde5c6f777ca846bf76566857bad6e9c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 05:19:30 +0000 Subject: [PATCH 099/295] feat: support query, header, and body params that have identical names (#101) --- lib/openai/internal/type/base_model.rb | 9 ++++++--- lib/openai/resources/fine_tuning/jobs.rb | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index d7dade74..1f4f669f 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -258,6 +258,7 @@ def dump(value) return super end + is_param = singleton_class <= OpenAI::Internal::Type::RequestParameters::Converter acc = {} coerced.each do |key, val| @@ -266,19 +267,21 @@ def dump(value) in nil acc.store(name, super(val)) else - mode, api_name, type_fn = field.fetch_values(:mode, :api_name, :type_fn) + mode, type_fn = field.fetch_values(:mode, :type_fn) case mode in :coerce next else target = type_fn.call + api_name = is_param ? name : field.fetch(:api_name) acc.store(api_name, OpenAI::Internal::Type::Converter.dump(target, val)) end end end - known_fields.each_value do |field| - mode, api_name, const = field.fetch_values(:mode, :api_name, :const) + known_fields.each do |name, field| + mode, const = field.fetch_values(:mode, :const) + api_name = is_param ? name : field.fetch(:api_name) next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Internal::OMIT acc.store(api_name, const) end diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 8e28f8c7..8032932a 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -36,7 +36,7 @@ def create(params) @client.request( method: :post, path: "fine_tuning/jobs", - body: parsed, + body: parsed.transform_keys(method_: :method), model: OpenAI::Models::FineTuning::FineTuningJob, options: options ) From d015f63034ba1af3ae5a4be121947c129fa93e0e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 13:47:05 +0000 Subject: [PATCH 100/295] feat: allow all valid `JSON` types to be encoded (#102) --- lib/openai/internal/util.rb | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 91e225b8..4b45e162 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -61,7 +61,7 @@ class << self # @return [Boolean] def primitive?(input) case input - in true | false | Integer | Float | Symbol | String + in true | false | Numeric | Symbol | String true else false @@ -504,7 +504,7 @@ class << self def encode_content(headers, body) content_type = headers["content-type"] case [content_type, body] - in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array] + in [%r{^application/(?:vnd\.api\+)?json}, _] unless body.nil? [headers, JSON.fast_generate(body)] in [%r{^application/(?:x-)?jsonl}, Enumerable] [headers, body.lazy.map { JSON.fast_generate(_1) }] @@ -516,6 +516,8 @@ def encode_content(headers, body) [headers, body.tap(&:rewind)] in [_, StringIO] [headers, body.string] + in [_, Symbol | Numeric] + [headers, body.to_s] else [headers, body] end From f6ba21f3829251e7abe34b94b21cbccf5d9f4af1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 13:55:02 +0000 Subject: [PATCH 101/295] chore: rename confusing `Type::BooleanModel` to `Type::Boolean` (#103) --- lib/openai/internal/type/boolean_model.rb | 4 +-- lib/openai/internal/type/converter.rb | 2 +- lib/openai/models/beta/assistant_deleted.rb | 2 +- .../models/beta/assistant_stream_event.rb | 2 +- .../beta/thread_create_and_run_params.rb | 2 +- lib/openai/models/beta/thread_deleted.rb | 2 +- lib/openai/models/beta/thread_stream_event.rb | 2 +- .../models/beta/threads/message_deleted.rb | 2 +- lib/openai/models/beta/threads/run.rb | 2 +- .../models/beta/threads/run_create_params.rb | 2 +- .../models/chat/chat_completion_deleted.rb | 2 +- .../chat/chat_completion_stream_options.rb | 2 +- .../models/chat/completion_create_params.rb | 6 ++-- lib/openai/models/comparison_filter.rb | 2 +- lib/openai/models/completion_create_params.rb | 2 +- lib/openai/models/file_deleted.rb | 2 +- lib/openai/models/function_definition.rb | 2 +- lib/openai/models/model_deleted.rb | 2 +- lib/openai/models/moderation.rb | 35 ++++++++----------- .../models/response_format_json_schema.rb | 2 +- lib/openai/models/responses/function_tool.rb | 2 +- lib/openai/models/responses/response.rb | 2 +- .../responses/response_create_params.rb | 4 +-- .../response_file_search_tool_call.rb | 2 +- ...response_format_text_json_schema_config.rb | 2 +- .../models/responses/response_item_list.rb | 2 +- lib/openai/models/vector_store_deleted.rb | 2 +- .../models/vector_store_search_params.rb | 2 +- .../models/vector_store_search_response.rb | 2 +- .../vector_stores/file_batch_create_params.rb | 2 +- .../vector_stores/file_create_params.rb | 2 +- .../vector_stores/file_update_params.rb | 2 +- .../models/vector_stores/vector_store_file.rb | 2 +- .../vector_store_file_deleted.rb | 2 +- .../openai/internal/type/boolean_model.rbi | 2 +- sig/openai/internal/type/boolean_model.rbs | 2 +- test/openai/internal/type/base_model_test.rb | 14 ++++---- test/openai/resources/beta/assistants_test.rb | 2 +- .../resources/beta/threads/messages_test.rb | 2 +- .../resources/beta/threads/runs_test.rb | 12 +++---- test/openai/resources/beta/threads_test.rb | 4 +-- .../openai/resources/chat/completions_test.rb | 2 +- test/openai/resources/files_test.rb | 2 +- test/openai/resources/models_test.rb | 2 +- test/openai/resources/responses_test.rb | 4 +-- .../resources/vector_stores/files_test.rb | 2 +- test/openai/resources/vector_stores_test.rb | 2 +- 47 files changed, 77 insertions(+), 84 deletions(-) diff --git a/lib/openai/internal/type/boolean_model.rb b/lib/openai/internal/type/boolean_model.rb index cf1dcf00..54dadb7f 100644 --- a/lib/openai/internal/type/boolean_model.rb +++ b/lib/openai/internal/type/boolean_model.rb @@ -8,7 +8,7 @@ module Type # @abstract # # Ruby has no Boolean class; this is something for models to refer to. - class BooleanModel + class Boolean extend OpenAI::Internal::Type::Converter # @param other [Object] @@ -19,7 +19,7 @@ def self.===(other) = other == true || other == false # @param other [Object] # # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::BooleanModel + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::Boolean class << self # @api private diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index b879a4ae..ad45fc3f 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -64,7 +64,7 @@ def type_info(spec) in Hash type_info(spec.slice(:const, :enum, :union).first&.last) in true | false - -> { OpenAI::Internal::Type::BooleanModel } + -> { OpenAI::Internal::Type::Boolean } in OpenAI::Internal::Type::Converter | Class | Symbol -> { spec } in NilClass | Integer | Float diff --git a/lib/openai/models/beta/assistant_deleted.rb b/lib/openai/models/beta/assistant_deleted.rb index f4af5c72..61cba341 100644 --- a/lib/openai/models/beta/assistant_deleted.rb +++ b/lib/openai/models/beta/assistant_deleted.rb @@ -13,7 +13,7 @@ class AssistantDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 9bd4aec3..b46d6568 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -126,7 +126,7 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # Whether to enable input audio transcription. # # @return [Boolean, nil] - optional :enabled, OpenAI::Internal::Type::BooleanModel + optional :enabled, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 03e401db..38c89eea 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -72,7 +72,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # during tool use. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel + optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] diff --git a/lib/openai/models/beta/thread_deleted.rb b/lib/openai/models/beta/thread_deleted.rb index 5a290344..6bd4b0db 100644 --- a/lib/openai/models/beta/thread_deleted.rb +++ b/lib/openai/models/beta/thread_deleted.rb @@ -13,7 +13,7 @@ class ThreadDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index 195b3b77..416358fd 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -20,7 +20,7 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # Whether to enable input audio transcription. # # @return [Boolean, nil] - optional :enabled, OpenAI::Internal::Type::BooleanModel + optional :enabled, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] diff --git a/lib/openai/models/beta/threads/message_deleted.rb b/lib/openai/models/beta/threads/message_deleted.rb index da791175..07513949 100644 --- a/lib/openai/models/beta/threads/message_deleted.rb +++ b/lib/openai/models/beta/threads/message_deleted.rb @@ -14,7 +14,7 @@ class MessageDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 9377b199..3c22831f 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -118,7 +118,7 @@ class Run < OpenAI::Internal::Type::BaseModel # during tool use. # # @return [Boolean] - required :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel + required :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!attribute required_action # Details on the action required to continue the run. Will be `null` if no action diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 612590b0..bb191666 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -107,7 +107,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # during tool use. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel + optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] diff --git a/lib/openai/models/chat/chat_completion_deleted.rb b/lib/openai/models/chat/chat_completion_deleted.rb index a743476a..33c9c802 100644 --- a/lib/openai/models/chat/chat_completion_deleted.rb +++ b/lib/openai/models/chat/chat_completion_deleted.rb @@ -15,7 +15,7 @@ class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel # Whether the chat completion was deleted. # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # The type of object being deleted. diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index e7b99c89..7b23b7a2 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -14,7 +14,7 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # chunk which contains the total token usage for the request. # # @return [Boolean, nil] - optional :include_usage, OpenAI::Internal::Type::BooleanModel + optional :include_usage, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 792691a9..e8aed051 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -104,7 +104,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `message`. # # @return [Boolean, nil] - optional :logprobs, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :logprobs, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute max_completion_tokens # An upper bound for the number of tokens that can be generated for a completion, @@ -168,7 +168,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # during tool use. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel + optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] @@ -261,7 +261,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # or [evals](https://platform.openai.com/docs/guides/evals) products. # # @return [Boolean, nil] - optional :store, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :store, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 2a1f6511..1c2b3a50 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -79,7 +79,7 @@ module Value variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 63d127ca..c8920c94 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -49,7 +49,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Echo back the prompt in addition to the completion # # @return [Boolean, nil] - optional :echo, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :echo, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute frequency_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on their diff --git a/lib/openai/models/file_deleted.rb b/lib/openai/models/file_deleted.rb index a391a385..67ab7b27 100644 --- a/lib/openai/models/file_deleted.rb +++ b/lib/openai/models/file_deleted.rb @@ -12,7 +12,7 @@ class FileDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index c261b034..e024d931 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -45,7 +45,7 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # [function calling guide](docs/guides/function-calling). # # @return [Boolean, nil] - optional :strict, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!parse # # @param name [String] diff --git a/lib/openai/models/model_deleted.rb b/lib/openai/models/model_deleted.rb index b35f6781..82476c3e 100644 --- a/lib/openai/models/model_deleted.rb +++ b/lib/openai/models/model_deleted.rb @@ -12,7 +12,7 @@ class ModelDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 88fd634b..4fd434ab 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -25,7 +25,7 @@ class Moderation < OpenAI::Internal::Type::BaseModel # Whether any of the below categories are flagged. # # @return [Boolean] - required :flagged, OpenAI::Internal::Type::BooleanModel + required :flagged, OpenAI::Internal::Type::Boolean # @!parse # # @param categories [OpenAI::Models::Moderation::Categories] @@ -44,16 +44,14 @@ class Categories < OpenAI::Internal::Type::BaseModel # target. # # @return [Boolean] - required :harassment, OpenAI::Internal::Type::BooleanModel + required :harassment, OpenAI::Internal::Type::Boolean # @!attribute harassment_threatening # Harassment content that also includes violence or serious harm towards any # target. # # @return [Boolean] - required :harassment_threatening, - OpenAI::Internal::Type::BooleanModel, - api_name: :"harassment/threatening" + required :harassment_threatening, OpenAI::Internal::Type::Boolean, api_name: :"harassment/threatening" # @!attribute hate # Content that expresses, incites, or promotes hate based on race, gender, @@ -62,7 +60,7 @@ class Categories < OpenAI::Internal::Type::BaseModel # harassment. # # @return [Boolean] - required :hate, OpenAI::Internal::Type::BooleanModel + required :hate, OpenAI::Internal::Type::Boolean # @!attribute hate_threatening # Hateful content that also includes violence or serious harm towards the targeted @@ -70,7 +68,7 @@ class Categories < OpenAI::Internal::Type::BaseModel # orientation, disability status, or caste. # # @return [Boolean] - required :hate_threatening, OpenAI::Internal::Type::BooleanModel, api_name: :"hate/threatening" + required :hate_threatening, OpenAI::Internal::Type::Boolean, api_name: :"hate/threatening" # @!attribute illicit # Content that includes instructions or advice that facilitate the planning or @@ -78,7 +76,7 @@ class Categories < OpenAI::Internal::Type::BaseModel # illicit acts. For example, "how to shoplift" would fit this category. # # @return [Boolean, nil] - required :illicit, OpenAI::Internal::Type::BooleanModel, nil?: true + required :illicit, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute illicit_violent # Content that includes instructions or advice that facilitate the planning or @@ -86,17 +84,14 @@ class Categories < OpenAI::Internal::Type::BaseModel # instruction on the procurement of any weapon. # # @return [Boolean, nil] - required :illicit_violent, - OpenAI::Internal::Type::BooleanModel, - api_name: :"illicit/violent", - nil?: true + required :illicit_violent, OpenAI::Internal::Type::Boolean, api_name: :"illicit/violent", nil?: true # @!attribute self_harm # Content that promotes, encourages, or depicts acts of self-harm, such as # suicide, cutting, and eating disorders. # # @return [Boolean] - required :self_harm, OpenAI::Internal::Type::BooleanModel, api_name: :"self-harm" + required :self_harm, OpenAI::Internal::Type::Boolean, api_name: :"self-harm" # @!attribute self_harm_instructions # Content that encourages performing acts of self-harm, such as suicide, cutting, @@ -104,16 +99,14 @@ class Categories < OpenAI::Internal::Type::BaseModel # acts. # # @return [Boolean] - required :self_harm_instructions, - OpenAI::Internal::Type::BooleanModel, - api_name: :"self-harm/instructions" + required :self_harm_instructions, OpenAI::Internal::Type::Boolean, api_name: :"self-harm/instructions" # @!attribute self_harm_intent # Content where the speaker expresses that they are engaging or intend to engage # in acts of self-harm, such as suicide, cutting, and eating disorders. # # @return [Boolean] - required :self_harm_intent, OpenAI::Internal::Type::BooleanModel, api_name: :"self-harm/intent" + required :self_harm_intent, OpenAI::Internal::Type::Boolean, api_name: :"self-harm/intent" # @!attribute sexual # Content meant to arouse sexual excitement, such as the description of sexual @@ -121,25 +114,25 @@ class Categories < OpenAI::Internal::Type::BaseModel # wellness). # # @return [Boolean] - required :sexual, OpenAI::Internal::Type::BooleanModel + required :sexual, OpenAI::Internal::Type::Boolean # @!attribute sexual_minors # Sexual content that includes an individual who is under 18 years old. # # @return [Boolean] - required :sexual_minors, OpenAI::Internal::Type::BooleanModel, api_name: :"sexual/minors" + required :sexual_minors, OpenAI::Internal::Type::Boolean, api_name: :"sexual/minors" # @!attribute violence # Content that depicts death, violence, or physical injury. # # @return [Boolean] - required :violence, OpenAI::Internal::Type::BooleanModel + required :violence, OpenAI::Internal::Type::Boolean # @!attribute violence_graphic # Content that depicts death, violence, or physical injury in graphic detail. # # @return [Boolean] - required :violence_graphic, OpenAI::Internal::Type::BooleanModel, api_name: :"violence/graphic" + required :violence_graphic, OpenAI::Internal::Type::Boolean, api_name: :"violence/graphic" # @!parse # # A list of the categories, and whether they are flagged or not. diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 71050de4..0bb1f00c 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -66,7 +66,7 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # # @return [Boolean, nil] - optional :strict, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!parse # # Structured Outputs configuration options, including a JSON Schema. diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index ce5b34a5..596b365a 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -20,7 +20,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # Whether to enforce strict parameter validation. Default `true`. # # @return [Boolean] - required :strict, OpenAI::Internal::Type::BooleanModel + required :strict, OpenAI::Internal::Type::Boolean # @!attribute type # The type of the function tool. Always `function`. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index afcc7eec..844833f7 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -86,7 +86,7 @@ class Response < OpenAI::Internal::Type::BaseModel # Whether to allow the model to run tool calls in parallel. # # @return [Boolean] - required :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel + required :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 7015d62b..38550327 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -84,7 +84,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Whether to allow the model to run tool calls in parallel. # # @return [Boolean, nil] - optional :parallel_tool_calls, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute previous_response_id # The unique ID of the previous response to the model. Use this to create @@ -107,7 +107,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Whether to store the generated model response for later retrieval via API. # # @return [Boolean, nil] - optional :store, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :store, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index dec92ac0..c8afd7e5 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -143,7 +143,7 @@ module Attribute variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index f0404403..0fac4e6d 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -43,7 +43,7 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # # @return [Boolean, nil] - optional :strict, OpenAI::Internal::Type::BooleanModel, nil?: true + optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!parse # # JSON Schema response format. Used to generate structured JSON responses. Learn diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 5c05dab2..d5f234c7 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -20,7 +20,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # Whether there are more items available. # # @return [Boolean] - required :has_more, OpenAI::Internal::Type::BooleanModel + required :has_more, OpenAI::Internal::Type::Boolean # @!attribute last_id # The ID of the last item in the list. diff --git a/lib/openai/models/vector_store_deleted.rb b/lib/openai/models/vector_store_deleted.rb index c3b6e2ab..b1624e84 100644 --- a/lib/openai/models/vector_store_deleted.rb +++ b/lib/openai/models/vector_store_deleted.rb @@ -12,7 +12,7 @@ class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index e967edbe..6325952c 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -49,7 +49,7 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # Whether to rewrite the natural language query for vector search. # # @return [Boolean, nil] - optional :rewrite_query, OpenAI::Internal::Type::BooleanModel + optional :rewrite_query, OpenAI::Internal::Type::Boolean # @!parse # # @return [Boolean] diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 785e82a6..61475c54 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -59,7 +59,7 @@ module Attribute variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 4140d9b9..e6dd47c8 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -57,7 +57,7 @@ module Attribute variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index f74f4ce2..37899d8f 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -57,7 +57,7 @@ module Attribute variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 0cea5737..7f72b454 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -42,7 +42,7 @@ module Attribute variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 12ed8106..7845ce47 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -176,7 +176,7 @@ module Attribute variant Float - variant OpenAI::Internal::Type::BooleanModel + variant OpenAI::Internal::Type::Boolean # @!parse # # @return [Array(String, Float, Boolean)] diff --git a/lib/openai/models/vector_stores/vector_store_file_deleted.rb b/lib/openai/models/vector_stores/vector_store_file_deleted.rb index 13f5c954..f7992170 100644 --- a/lib/openai/models/vector_stores/vector_store_file_deleted.rb +++ b/lib/openai/models/vector_stores/vector_store_file_deleted.rb @@ -13,7 +13,7 @@ class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel # @!attribute deleted # # @return [Boolean] - required :deleted, OpenAI::Internal::Type::BooleanModel + required :deleted, OpenAI::Internal::Type::Boolean # @!attribute object # diff --git a/rbi/lib/openai/internal/type/boolean_model.rbi b/rbi/lib/openai/internal/type/boolean_model.rbi index ab09cc33..713967f7 100644 --- a/rbi/lib/openai/internal/type/boolean_model.rbi +++ b/rbi/lib/openai/internal/type/boolean_model.rbi @@ -6,7 +6,7 @@ module OpenAI # @api private # # Ruby has no Boolean class; this is something for models to refer to. - class BooleanModel + class Boolean extend OpenAI::Internal::Type::Converter abstract! diff --git a/sig/openai/internal/type/boolean_model.rbs b/sig/openai/internal/type/boolean_model.rbs index 3bfa2a59..cfc97bb1 100644 --- a/sig/openai/internal/type/boolean_model.rbs +++ b/sig/openai/internal/type/boolean_model.rbs @@ -1,7 +1,7 @@ module OpenAI module Internal module Type - class BooleanModel + class Boolean extend OpenAI::Internal::Type::Converter def self.===: (top other) -> bool diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index 0b854098..a80e2c16 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -22,7 +22,7 @@ class B < OpenAI::Internal::Type::BaseModel def test_typing converters = [ OpenAI::Internal::Type::Unknown, - OpenAI::Internal::Type::BooleanModel, + OpenAI::Internal::Type::Boolean, A, H, E, @@ -42,8 +42,8 @@ def test_coerce [OpenAI::Internal::Type::Unknown, :a] => [{yes: 1}, :a], [NilClass, :a] => [{maybe: 1}, nil], [NilClass, nil] => [{yes: 1}, nil], - [OpenAI::Internal::Type::BooleanModel, true] => [{yes: 1}, true], - [OpenAI::Internal::Type::BooleanModel, "true"] => [{no: 1}, "true"], + [OpenAI::Internal::Type::Boolean, true] => [{yes: 1}, true], + [OpenAI::Internal::Type::Boolean, "true"] => [{no: 1}, "true"], [Integer, 1] => [{yes: 1}, 1], [Integer, 1.0] => [{maybe: 1}, 1], [Integer, "1"] => [{maybe: 1}, 1], @@ -85,8 +85,8 @@ def test_dump [String, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [:b, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, [nil, B.new(a: "one", b: B.new(a: 1.0))] => {a: "one", b: {a: 1}}, - [OpenAI::Internal::Type::BooleanModel, true] => true, - [OpenAI::Internal::Type::BooleanModel, "true"] => "true", + [OpenAI::Internal::Type::Boolean, true] => true, + [OpenAI::Internal::Type::Boolean, "true"] => "true", [Integer, "1"] => "1", [Float, 1] => 1, [String, "one"] => "one", @@ -560,8 +560,8 @@ class M3 < M2 def test_equality cases = { [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::Unknown] => true, - [OpenAI::Internal::Type::BooleanModel, OpenAI::Internal::Type::BooleanModel] => true, - [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::BooleanModel] => false, + [OpenAI::Internal::Type::Boolean, OpenAI::Internal::Type::Boolean] => true, + [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::Boolean] => false, [E1, E2] => true, [E1, E3] => false, [M1, M2] => false, diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index 0e89f80e..e7c59205 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -124,7 +124,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end diff --git a/test/openai/resources/beta/threads/messages_test.rb b/test/openai/resources/beta/threads/messages_test.rb index 5dec984b..e9d4d46c 100644 --- a/test/openai/resources/beta/threads/messages_test.rb +++ b/test/openai/resources/beta/threads/messages_test.rb @@ -128,7 +128,7 @@ def test_delete_required_params assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 749bb638..2ada16a3 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -27,7 +27,7 @@ def test_create_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, @@ -67,7 +67,7 @@ def test_retrieve_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, @@ -107,7 +107,7 @@ def test_update_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, @@ -154,7 +154,7 @@ def test_list metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, @@ -194,7 +194,7 @@ def test_cancel_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, @@ -235,7 +235,7 @@ def test_submit_tool_outputs_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index f702b5ce..da2960dc 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -67,7 +67,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end @@ -97,7 +97,7 @@ def test_create_and_run_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index 8eab790c..0ff3c51e 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -105,7 +105,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index 78bfa904..d18f31f4 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -86,7 +86,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end diff --git a/test/openai/resources/models_test.rb b/test/openai/resources/models_test.rb index 06dac749..291a9a89 100644 --- a/test/openai/resources/models_test.rb +++ b/test/openai/resources/models_test.rb @@ -54,7 +54,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: String } end diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 3b452659..dd6971e5 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -21,7 +21,7 @@ def test_create_required_params model: OpenAI::Models::ResponsesModel, object: Symbol, output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, temperature: Float | nil, tool_choice: OpenAI::Models::Responses::Response::ToolChoice, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool]), @@ -56,7 +56,7 @@ def test_retrieve model: OpenAI::Models::ResponsesModel, object: Symbol, output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), - parallel_tool_calls: OpenAI::Internal::Type::BooleanModel, + parallel_tool_calls: OpenAI::Internal::Type::Boolean, temperature: Float | nil, tool_choice: OpenAI::Models::Responses::Response::ToolChoice, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool]), diff --git a/test/openai/resources/vector_stores/files_test.rb b/test/openai/resources/vector_stores/files_test.rb index 279fda94..b7573f30 100644 --- a/test/openai/resources/vector_stores/files_test.rb +++ b/test/openai/resources/vector_stores/files_test.rb @@ -113,7 +113,7 @@ def test_delete_required_params assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index 1a45528c..a68725ca 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -116,7 +116,7 @@ def test_delete assert_pattern do response => { id: String, - deleted: OpenAI::Internal::Type::BooleanModel, + deleted: OpenAI::Internal::Type::Boolean, object: Symbol } end From 1280e3281b6d5e60dc35b898614cf713b5d5e085 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 14:53:46 +0000 Subject: [PATCH 102/295] fix: converter should transform stringio into string where applicable (#104) --- lib/openai.rb | 2 +- lib/openai/internal/type/{boolean_model.rb => boolean.rb} | 0 lib/openai/internal/type/converter.rb | 3 +++ .../openai/internal/type/{boolean_model.rbi => boolean.rbi} | 0 sig/openai/internal/type/{boolean_model.rbs => boolean.rbs} | 0 test/openai/resources/audio/speech_test.rb | 2 ++ 6 files changed, 6 insertions(+), 1 deletion(-) rename lib/openai/internal/type/{boolean_model.rb => boolean.rb} (100%) rename rbi/lib/openai/internal/type/{boolean_model.rbi => boolean.rbi} (100%) rename sig/openai/internal/type/{boolean_model.rbs => boolean.rbs} (100%) diff --git a/lib/openai.rb b/lib/openai.rb index 9b3ac42c..775fe43b 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -39,7 +39,7 @@ require_relative "openai/internal/util" require_relative "openai/internal/type/converter" require_relative "openai/internal/type/unknown" -require_relative "openai/internal/type/boolean_model" +require_relative "openai/internal/type/boolean" require_relative "openai/internal/type/enum" require_relative "openai/internal/type/union" require_relative "openai/internal/type/array_of" diff --git a/lib/openai/internal/type/boolean_model.rb b/lib/openai/internal/type/boolean.rb similarity index 100% rename from lib/openai/internal/type/boolean_model.rb rename to lib/openai/internal/type/boolean.rb diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index ad45fc3f..394eccae 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -168,6 +168,9 @@ def coerce( in String | Symbol | Numeric exactness[value.is_a?(Numeric) ? :maybe : :yes] += 1 return value.to_s + in StringIO + exactness[:yes] += 1 + return value.string else if strictness == :strong message = "no implicit conversion of #{value.class} into #{target.inspect}" diff --git a/rbi/lib/openai/internal/type/boolean_model.rbi b/rbi/lib/openai/internal/type/boolean.rbi similarity index 100% rename from rbi/lib/openai/internal/type/boolean_model.rbi rename to rbi/lib/openai/internal/type/boolean.rbi diff --git a/sig/openai/internal/type/boolean_model.rbs b/sig/openai/internal/type/boolean.rbs similarity index 100% rename from sig/openai/internal/type/boolean_model.rbs rename to sig/openai/internal/type/boolean.rbs diff --git a/test/openai/resources/audio/speech_test.rb b/test/openai/resources/audio/speech_test.rb index e1588426..f50614b3 100644 --- a/test/openai/resources/audio/speech_test.rb +++ b/test/openai/resources/audio/speech_test.rb @@ -4,6 +4,8 @@ class OpenAI::Test::Resources::Audio::SpeechTest < OpenAI::Test::ResourceTest def test_create_required_params + skip("skipped: test server currently has no support for method content-type") + response = @openai.audio.speech.create(input: "input", model: :"tts-1", voice: :alloy) assert_pattern do From abad8d507e51a70dacd320e7d264f0ee680d2574 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 16:16:45 +0000 Subject: [PATCH 103/295] chore(internal): misc small improvements (#105) --- test/openai/internal/type/base_model_test.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index a80e2c16..27178ef8 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -154,11 +154,11 @@ module E4 def test_coerce cases = { - # rubocop:disable Style/BooleanSymbol + # rubocop:disable Lint/BooleanSymbol [E1, true] => [{yes: 1}, true], [E1, false] => [{no: 1}, false], [E1, :true] => [{no: 1}, :true], - # rubocop:enable Style/BooleanSymbol + # rubocop:enable Lint/BooleanSymbol [E2, 1] => [{yes: 1}, 1], [E2, 1.0] => [{yes: 1}, 1], From d50e6c01569695cad4d7d2bd01ce145db227c83e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 17:47:25 +0000 Subject: [PATCH 104/295] chore(internal): run rubocop linter in parallel (#106) --- .rubocop.yml | 6 +++--- Rakefile | 16 +++++++--------- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index f5de5e94..c4120399 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -39,7 +39,7 @@ Layout/LineLength: AllowedPatterns: - "^\\s*#.*$" - ^require(_relative)? - - "OpenAI::(Models|Resources)::" + - "OpenAI::(Models|Resources|Test)::" Max: 110 Layout/MultilineArrayLineBreaks: @@ -122,9 +122,9 @@ Metrics/PerceivedComplexity: Naming/BlockForwarding: Enabled: false +# Underscores are generally useful for disambiguation. Naming/ClassAndModuleCamelCase: - Exclude: - - "**/*.rbi" + Enabled: false Naming/MethodParameterName: Enabled: false diff --git a/Rakefile b/Rakefile index 92963d6a..990cc38f 100644 --- a/Rakefile +++ b/Rakefile @@ -9,8 +9,6 @@ require "rubocop/rake_task" CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/]) -xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] - multitask(default: [:test]) multitask(:test) do @@ -22,17 +20,17 @@ multitask(:test) do ruby(*%w[-w -e], rb, verbose: false) { fail unless _1 } end -RuboCop::RakeTask.new(:rubocop) do |t| - t.options = %w[--fail-level E] - if ENV.key?("CI") - t.options += %w[--format github] - end +rubo_find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] +xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] + +multitask(:rubocop) do + lint = xargs + %w[rubocop --fail-level E] + (ENV.key?("CI") ? %w[--format github] : []) + sh("#{rubo_find.shelljoin} | #{lint.shelljoin}") end multitask(:ruboformat) do - find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] - sh("#{find.shelljoin} | #{fmt.shelljoin}") + sh("#{rubo_find.shelljoin} | #{fmt.shelljoin}") end multitask(:syntax_tree) do From e0225db51ea62b2f15a886975006f2879045e0bf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 18:14:19 +0000 Subject: [PATCH 105/295] chore(internal): rubocop rules (#107) --- .rubocop.yml | 5 +++-- lib/openai/internal/type/converter.rb | 2 -- lib/openai/internal/util.rb | 4 ---- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index c4120399..8a64a238 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -39,6 +39,8 @@ Layout/LineLength: AllowedPatterns: - "^\\s*#.*$" - ^require(_relative)? + - "OpenAI::Internal::Type::BaseModel$" + - "^\\s*[A-Z0-9_]+ = :" - "OpenAI::(Models|Resources|Test)::" Max: 110 @@ -109,8 +111,7 @@ Metrics/MethodLength: Enabled: false Metrics/ModuleLength: - Exclude: - - "**/*.rbi" + Enabled: false Metrics/ParameterLists: Enabled: false diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 394eccae..1fbf548f 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -3,7 +3,6 @@ module OpenAI module Internal module Type - # rubocop:disable Metrics/ModuleLength # @api private module Converter # rubocop:disable Lint/UnusedMethodArgument @@ -218,7 +217,6 @@ def dump(target, value) end end end - # rubocop:enable Metrics/ModuleLength end end end diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 4b45e162..7fa9d32d 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -2,8 +2,6 @@ module OpenAI module Internal - # rubocop:disable Metrics/ModuleLength - # @api private module Util # @api private @@ -694,7 +692,5 @@ def decode_sse(lines) end end end - - # rubocop:enable Metrics/ModuleLength end end From 3727aaea6825f13037cd5737d14f4d3721dc5208 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 23:26:07 +0000 Subject: [PATCH 106/295] chore: always fold up method bodies in sorbet type definitions (#108) --- rbi/lib/openai/client.rbi | 6 +- rbi/lib/openai/errors.rbi | 9 +- rbi/lib/openai/internal/cursor_page.rbi | 3 +- rbi/lib/openai/internal/page.rbi | 3 +- rbi/lib/openai/internal/stream.rbi | 3 +- .../openai/internal/transport/base_client.rbi | 38 +-- .../transport/pooled_net_requester.rbi | 18 +- rbi/lib/openai/internal/type/array_of.rbi | 24 +- rbi/lib/openai/internal/type/base_model.rbi | 77 ++---- rbi/lib/openai/internal/type/base_page.rbi | 15 +- rbi/lib/openai/internal/type/base_stream.rbi | 15 +- rbi/lib/openai/internal/type/boolean.rbi | 12 +- rbi/lib/openai/internal/type/converter.rbi | 16 +- rbi/lib/openai/internal/type/enum.rbi | 18 +- rbi/lib/openai/internal/type/hash_of.rbi | 24 +- .../internal/type/request_parameters.rbi | 3 +- rbi/lib/openai/internal/type/union.rbi | 30 +-- rbi/lib/openai/internal/type/unknown.rbi | 12 +- rbi/lib/openai/internal/util.rbi | 100 +++---- rbi/lib/openai/models/all_models.rbi | 3 +- .../models/audio/speech_create_params.rbi | 12 +- rbi/lib/openai/models/audio/speech_model.rbi | 3 +- rbi/lib/openai/models/audio/transcription.rbi | 12 +- .../audio/transcription_create_params.rbi | 13 +- .../audio/transcription_create_response.rbi | 3 +- .../models/audio/transcription_include.rbi | 3 +- .../models/audio/transcription_segment.rbi | 3 +- .../audio/transcription_stream_event.rbi | 3 +- .../audio/transcription_text_delta_event.rbi | 12 +- .../audio/transcription_text_done_event.rbi | 12 +- .../models/audio/transcription_verbose.rbi | 6 +- .../models/audio/transcription_word.rbi | 6 +- rbi/lib/openai/models/audio/translation.rbi | 6 +- .../audio/translation_create_params.rbi | 9 +- .../audio/translation_create_response.rbi | 3 +- .../models/audio/translation_verbose.rbi | 6 +- rbi/lib/openai/models/audio_model.rbi | 3 +- .../openai/models/audio_response_format.rbi | 3 +- .../auto_file_chunking_strategy_param.rbi | 6 +- rbi/lib/openai/models/batch.rbi | 16 +- rbi/lib/openai/models/batch_cancel_params.rbi | 6 +- rbi/lib/openai/models/batch_create_params.rbi | 12 +- rbi/lib/openai/models/batch_error.rbi | 6 +- rbi/lib/openai/models/batch_list_params.rbi | 6 +- .../openai/models/batch_request_counts.rbi | 6 +- .../openai/models/batch_retrieve_params.rbi | 6 +- rbi/lib/openai/models/beta/assistant.rbi | 57 ++-- .../models/beta/assistant_create_params.rbi | 115 ++++---- .../models/beta/assistant_delete_params.rbi | 6 +- .../openai/models/beta/assistant_deleted.rbi | 6 +- .../models/beta/assistant_list_params.rbi | 9 +- .../beta/assistant_response_format_option.rbi | 3 +- .../models/beta/assistant_retrieve_params.rbi | 6 +- .../models/beta/assistant_stream_event.rbi | 151 ++++------- rbi/lib/openai/models/beta/assistant_tool.rbi | 3 +- .../models/beta/assistant_tool_choice.rbi | 9 +- .../beta/assistant_tool_choice_function.rbi | 6 +- .../beta/assistant_tool_choice_option.rbi | 6 +- .../models/beta/assistant_update_params.rbi | 72 +++-- .../models/beta/code_interpreter_tool.rbi | 6 +- .../openai/models/beta/file_search_tool.rbi | 21 +- rbi/lib/openai/models/beta/function_tool.rbi | 6 +- .../models/beta/message_stream_event.rbi | 33 +-- .../models/beta/run_step_stream_event.rbi | 49 ++-- .../openai/models/beta/run_stream_event.rbi | 63 ++--- rbi/lib/openai/models/beta/thread.rbi | 24 +- .../beta/thread_create_and_run_params.rbi | 252 ++++++++---------- .../models/beta/thread_create_params.rbi | 172 +++++------- .../models/beta/thread_delete_params.rbi | 6 +- rbi/lib/openai/models/beta/thread_deleted.rbi | 6 +- .../models/beta/thread_retrieve_params.rbi | 6 +- .../models/beta/thread_stream_event.rbi | 6 +- .../models/beta/thread_update_params.rbi | 24 +- .../openai/models/beta/threads/annotation.rbi | 3 +- .../models/beta/threads/annotation_delta.rbi | 3 +- .../beta/threads/file_citation_annotation.rbi | 12 +- .../file_citation_delta_annotation.rbi | 9 +- .../beta/threads/file_path_annotation.rbi | 12 +- .../threads/file_path_delta_annotation.rbi | 9 +- .../openai/models/beta/threads/image_file.rbi | 9 +- .../beta/threads/image_file_content_block.rbi | 6 +- .../models/beta/threads/image_file_delta.rbi | 9 +- .../beta/threads/image_file_delta_block.rbi | 6 +- .../openai/models/beta/threads/image_url.rbi | 9 +- .../beta/threads/image_url_content_block.rbi | 6 +- .../models/beta/threads/image_url_delta.rbi | 9 +- .../beta/threads/image_url_delta_block.rbi | 6 +- .../openai/models/beta/threads/message.rbi | 111 ++++---- .../models/beta/threads/message_content.rbi | 3 +- .../beta/threads/message_content_delta.rbi | 3 +- .../threads/message_content_part_param.rbi | 3 +- .../beta/threads/message_create_params.rbi | 105 ++++---- .../beta/threads/message_delete_params.rbi | 6 +- .../models/beta/threads/message_deleted.rbi | 6 +- .../models/beta/threads/message_delta.rbi | 61 ++--- .../beta/threads/message_delta_event.rbi | 6 +- .../beta/threads/message_list_params.rbi | 9 +- .../beta/threads/message_retrieve_params.rbi | 6 +- .../beta/threads/message_update_params.rbi | 6 +- .../beta/threads/refusal_content_block.rbi | 6 +- .../beta/threads/refusal_delta_block.rbi | 6 +- .../required_action_function_tool_call.rbi | 12 +- rbi/lib/openai/models/beta/threads/run.rbi | 84 +++--- .../models/beta/threads/run_cancel_params.rbi | 6 +- .../models/beta/threads/run_create_params.rbi | 164 +++++------- .../models/beta/threads/run_list_params.rbi | 9 +- .../beta/threads/run_retrieve_params.rbi | 6 +- .../openai/models/beta/threads/run_status.rbi | 3 +- .../run_submit_tool_outputs_params.rbi | 12 +- .../models/beta/threads/run_update_params.rbi | 6 +- .../threads/runs/code_interpreter_logs.rbi | 6 +- .../runs/code_interpreter_output_image.rbi | 12 +- .../runs/code_interpreter_tool_call.rbi | 59 ++-- .../runs/code_interpreter_tool_call_delta.rbi | 51 ++-- .../threads/runs/file_search_tool_call.rbi | 68 ++--- .../runs/file_search_tool_call_delta.rbi | 6 +- .../beta/threads/runs/function_tool_call.rbi | 12 +- .../threads/runs/function_tool_call_delta.rbi | 12 +- .../runs/message_creation_step_details.rbi | 12 +- .../models/beta/threads/runs/run_step.rbi | 31 +-- .../beta/threads/runs/run_step_delta.rbi | 9 +- .../threads/runs/run_step_delta_event.rbi | 6 +- .../runs/run_step_delta_message_delta.rbi | 12 +- .../beta/threads/runs/run_step_include.rbi | 3 +- .../beta/threads/runs/step_list_params.rbi | 6 +- .../threads/runs/step_retrieve_params.rbi | 6 +- .../models/beta/threads/runs/tool_call.rbi | 3 +- .../beta/threads/runs/tool_call_delta.rbi | 3 +- .../threads/runs/tool_call_delta_object.rbi | 50 ++-- .../threads/runs/tool_calls_step_details.rbi | 38 ++- rbi/lib/openai/models/beta/threads/text.rbi | 32 ++- .../beta/threads/text_content_block.rbi | 6 +- .../beta/threads/text_content_block_param.rbi | 6 +- .../openai/models/beta/threads/text_delta.rbi | 42 ++- .../models/beta/threads/text_delta_block.rbi | 6 +- .../openai/models/chat/chat_completion.rbi | 25 +- ...hat_completion_assistant_message_param.rbi | 55 ++-- .../models/chat/chat_completion_audio.rbi | 6 +- .../chat/chat_completion_audio_param.rbi | 12 +- .../models/chat/chat_completion_chunk.rbi | 55 ++-- .../chat/chat_completion_content_part.rbi | 15 +- .../chat_completion_content_part_image.rbi | 15 +- ...at_completion_content_part_input_audio.rbi | 15 +- .../chat_completion_content_part_refusal.rbi | 6 +- .../chat_completion_content_part_text.rbi | 6 +- .../models/chat/chat_completion_deleted.rbi | 6 +- ...hat_completion_developer_message_param.rbi | 9 +- .../chat_completion_function_call_option.rbi | 6 +- ...chat_completion_function_message_param.rbi | 6 +- .../models/chat/chat_completion_message.rbi | 21 +- .../chat/chat_completion_message_param.rbi | 3 +- .../chat_completion_message_tool_call.rbi | 12 +- .../models/chat/chat_completion_modality.rbi | 3 +- .../chat_completion_named_tool_choice.rbi | 12 +- .../chat_completion_prediction_content.rbi | 9 +- .../models/chat/chat_completion_role.rbi | 3 +- .../chat/chat_completion_store_message.rbi | 6 +- .../chat/chat_completion_stream_options.rbi | 6 +- .../chat_completion_system_message_param.rbi | 9 +- .../chat/chat_completion_token_logprob.rbi | 12 +- .../models/chat/chat_completion_tool.rbi | 6 +- .../chat_completion_tool_choice_option.rbi | 6 +- .../chat_completion_tool_message_param.rbi | 9 +- .../chat_completion_user_message_param.rbi | 59 ++-- .../models/chat/completion_create_params.rbi | 105 +++----- .../models/chat/completion_delete_params.rbi | 6 +- .../models/chat/completion_list_params.rbi | 9 +- .../chat/completion_retrieve_params.rbi | 6 +- .../models/chat/completion_update_params.rbi | 6 +- .../chat/completions/message_list_params.rbi | 9 +- rbi/lib/openai/models/chat_model.rbi | 3 +- rbi/lib/openai/models/comparison_filter.rbi | 12 +- rbi/lib/openai/models/completion.rbi | 3 +- rbi/lib/openai/models/completion_choice.rbi | 15 +- .../models/completion_create_params.rbi | 25 +- rbi/lib/openai/models/completion_usage.rbi | 16 +- rbi/lib/openai/models/compound_filter.rbi | 12 +- .../models/create_embedding_response.rbi | 12 +- rbi/lib/openai/models/embedding.rbi | 6 +- .../openai/models/embedding_create_params.rbi | 15 +- rbi/lib/openai/models/embedding_model.rbi | 3 +- rbi/lib/openai/models/error_object.rbi | 6 +- .../openai/models/file_chunking_strategy.rbi | 3 +- .../models/file_chunking_strategy_param.rbi | 3 +- rbi/lib/openai/models/file_content_params.rbi | 6 +- rbi/lib/openai/models/file_create_params.rbi | 6 +- rbi/lib/openai/models/file_delete_params.rbi | 6 +- rbi/lib/openai/models/file_deleted.rbi | 6 +- rbi/lib/openai/models/file_list_params.rbi | 9 +- rbi/lib/openai/models/file_object.rbi | 13 +- rbi/lib/openai/models/file_purpose.rbi | 3 +- .../openai/models/file_retrieve_params.rbi | 6 +- .../models/fine_tuning/fine_tuning_job.rbi | 85 ++---- .../fine_tuning/fine_tuning_job_event.rbi | 9 +- .../fine_tuning_job_wandb_integration.rbi | 6 +- ...ne_tuning_job_wandb_integration_object.rbi | 6 +- .../models/fine_tuning/job_cancel_params.rbi | 6 +- .../models/fine_tuning/job_create_params.rbi | 95 +++---- .../fine_tuning/job_list_events_params.rbi | 6 +- .../models/fine_tuning/job_list_params.rbi | 6 +- .../fine_tuning/job_retrieve_params.rbi | 6 +- .../jobs/checkpoint_list_params.rbi | 6 +- .../jobs/fine_tuning_job_checkpoint.rbi | 14 +- rbi/lib/openai/models/function_definition.rbi | 6 +- rbi/lib/openai/models/image.rbi | 6 +- .../models/image_create_variation_params.rbi | 12 +- rbi/lib/openai/models/image_edit_params.rbi | 16 +- .../openai/models/image_generate_params.rbi | 22 +- rbi/lib/openai/models/image_model.rbi | 3 +- rbi/lib/openai/models/images_response.rbi | 6 +- rbi/lib/openai/models/model.rbi | 6 +- rbi/lib/openai/models/model_delete_params.rbi | 6 +- rbi/lib/openai/models/model_deleted.rbi | 6 +- rbi/lib/openai/models/model_list_params.rbi | 6 +- .../openai/models/model_retrieve_params.rbi | 6 +- rbi/lib/openai/models/moderation.rbi | 66 ++--- .../models/moderation_create_params.rbi | 22 +- .../models/moderation_create_response.rbi | 6 +- .../models/moderation_image_url_input.rbi | 16 +- rbi/lib/openai/models/moderation_model.rbi | 3 +- .../models/moderation_multi_modal_input.rbi | 3 +- .../openai/models/moderation_text_input.rbi | 6 +- .../other_file_chunking_strategy_object.rbi | 6 +- rbi/lib/openai/models/reasoning.rbi | 9 +- rbi/lib/openai/models/reasoning_effort.rbi | 3 +- .../models/response_format_json_object.rbi | 6 +- .../models/response_format_json_schema.rbi | 12 +- .../openai/models/response_format_text.rbi | 6 +- .../openai/models/responses/computer_tool.rbi | 9 +- .../models/responses/easy_input_message.rbi | 57 ++-- .../models/responses/file_search_tool.rbi | 15 +- .../openai/models/responses/function_tool.rbi | 6 +- .../responses/input_item_list_params.rbi | 9 +- rbi/lib/openai/models/responses/response.rbi | 110 ++++---- .../responses/response_audio_delta_event.rbi | 6 +- .../responses/response_audio_done_event.rbi | 6 +- .../response_audio_transcript_delta_event.rbi | 6 +- .../response_audio_transcript_done_event.rbi | 6 +- ...code_interpreter_call_code_delta_event.rbi | 6 +- ..._code_interpreter_call_code_done_event.rbi | 6 +- ..._code_interpreter_call_completed_event.rbi | 9 +- ...ode_interpreter_call_in_progress_event.rbi | 3 +- ...de_interpreter_call_interpreting_event.rbi | 3 +- .../response_code_interpreter_tool_call.rbi | 64 ++--- .../responses/response_completed_event.rbi | 6 +- .../responses/response_computer_tool_call.rbi | 84 ++---- ...esponse_computer_tool_call_output_item.rbi | 28 +- ...e_computer_tool_call_output_screenshot.rbi | 6 +- .../models/responses/response_content.rbi | 3 +- .../response_content_part_added_event.rbi | 6 +- .../response_content_part_done_event.rbi | 9 +- .../responses/response_create_params.rbi | 174 ++++++------ .../responses/response_created_event.rbi | 6 +- .../responses/response_delete_params.rbi | 6 +- .../models/responses/response_error.rbi | 9 +- .../models/responses/response_error_event.rbi | 6 +- .../responses/response_failed_event.rbi | 6 +- ...ponse_file_search_call_completed_event.rbi | 6 +- ...nse_file_search_call_in_progress_event.rbi | 6 +- ...ponse_file_search_call_searching_event.rbi | 6 +- .../response_file_search_tool_call.rbi | 18 +- .../responses/response_format_text_config.rbi | 3 +- ...esponse_format_text_json_schema_config.rbi | 6 +- ...se_function_call_arguments_delta_event.rbi | 6 +- ...nse_function_call_arguments_done_event.rbi | 6 +- .../responses/response_function_tool_call.rbi | 9 +- .../response_function_tool_call_item.rbi | 6 +- ...esponse_function_tool_call_output_item.rbi | 9 +- .../response_function_web_search.rbi | 9 +- .../responses/response_in_progress_event.rbi | 6 +- .../models/responses/response_includable.rbi | 3 +- .../responses/response_incomplete_event.rbi | 6 +- .../models/responses/response_input_audio.rbi | 9 +- .../responses/response_input_content.rbi | 3 +- .../models/responses/response_input_file.rbi | 6 +- .../models/responses/response_input_image.rbi | 9 +- .../models/responses/response_input_item.rbi | 93 +++---- .../responses/response_input_message_item.rbi | 47 ++-- .../models/responses/response_input_text.rbi | 6 +- .../openai/models/responses/response_item.rbi | 3 +- .../models/responses/response_item_list.rbi | 68 +++-- .../responses/response_output_audio.rbi | 6 +- .../models/responses/response_output_item.rbi | 3 +- .../response_output_item_added_event.rbi | 6 +- .../response_output_item_done_event.rbi | 6 +- .../responses/response_output_message.rbi | 22 +- .../responses/response_output_refusal.rbi | 6 +- .../models/responses/response_output_text.rbi | 59 ++-- .../responses/response_reasoning_item.rbi | 15 +- .../response_refusal_delta_event.rbi | 6 +- .../responses/response_refusal_done_event.rbi | 6 +- .../responses/response_retrieve_params.rbi | 6 +- .../models/responses/response_status.rbi | 3 +- .../responses/response_stream_event.rbi | 3 +- .../response_text_annotation_delta_event.rbi | 28 +- .../models/responses/response_text_config.rbi | 6 +- .../responses/response_text_delta_event.rbi | 3 +- .../responses/response_text_done_event.rbi | 6 +- .../models/responses/response_usage.rbi | 15 +- ...sponse_web_search_call_completed_event.rbi | 6 +- ...onse_web_search_call_in_progress_event.rbi | 6 +- ...sponse_web_search_call_searching_event.rbi | 6 +- rbi/lib/openai/models/responses/tool.rbi | 3 +- .../models/responses/tool_choice_function.rbi | 6 +- .../models/responses/tool_choice_options.rbi | 3 +- .../models/responses/tool_choice_types.rbi | 9 +- .../models/responses/web_search_tool.rbi | 18 +- rbi/lib/openai/models/responses_model.rbi | 3 +- .../models/static_file_chunking_strategy.rbi | 6 +- .../static_file_chunking_strategy_object.rbi | 6 +- ...ic_file_chunking_strategy_object_param.rbi | 6 +- rbi/lib/openai/models/upload.rbi | 6 +- .../openai/models/upload_cancel_params.rbi | 6 +- .../openai/models/upload_complete_params.rbi | 6 +- .../openai/models/upload_create_params.rbi | 6 +- .../models/uploads/part_create_params.rbi | 6 +- rbi/lib/openai/models/uploads/upload_part.rbi | 6 +- rbi/lib/openai/models/vector_store.rbi | 22 +- .../models/vector_store_create_params.rbi | 9 +- .../models/vector_store_delete_params.rbi | 6 +- .../openai/models/vector_store_deleted.rbi | 6 +- .../models/vector_store_list_params.rbi | 9 +- .../models/vector_store_retrieve_params.rbi | 6 +- .../models/vector_store_search_params.rbi | 18 +- .../models/vector_store_search_response.rbi | 18 +- .../models/vector_store_update_params.rbi | 12 +- .../file_batch_cancel_params.rbi | 6 +- .../file_batch_create_params.rbi | 9 +- .../file_batch_list_files_params.rbi | 9 +- .../file_batch_retrieve_params.rbi | 6 +- .../vector_stores/file_content_params.rbi | 6 +- .../vector_stores/file_content_response.rbi | 6 +- .../vector_stores/file_create_params.rbi | 9 +- .../vector_stores/file_delete_params.rbi | 6 +- .../models/vector_stores/file_list_params.rbi | 12 +- .../vector_stores/file_retrieve_params.rbi | 6 +- .../vector_stores/file_update_params.rbi | 9 +- .../vector_stores/vector_store_file.rbi | 22 +- .../vector_stores/vector_store_file_batch.rbi | 12 +- .../vector_store_file_deleted.rbi | 6 +- rbi/lib/openai/request_options.rbi | 6 +- rbi/lib/openai/resources/audio.rbi | 3 +- rbi/lib/openai/resources/audio/speech.rbi | 7 +- .../openai/resources/audio/transcriptions.rbi | 19 +- .../openai/resources/audio/translations.rbi | 7 +- rbi/lib/openai/resources/batches.rbi | 19 +- rbi/lib/openai/resources/beta.rbi | 3 +- rbi/lib/openai/resources/beta/assistants.rbi | 47 ++-- rbi/lib/openai/resources/beta/threads.rbi | 103 ++++--- .../resources/beta/threads/messages.rbi | 35 +-- .../openai/resources/beta/threads/runs.rbi | 163 +++++------ .../resources/beta/threads/runs/steps.rbi | 11 +- rbi/lib/openai/resources/chat.rbi | 3 +- rbi/lib/openai/resources/chat/completions.rbi | 63 ++--- .../resources/chat/completions/messages.rbi | 7 +- rbi/lib/openai/resources/completions.rbi | 29 +- rbi/lib/openai/resources/embeddings.rbi | 7 +- rbi/lib/openai/resources/files.rbi | 23 +- rbi/lib/openai/resources/fine_tuning.rbi | 3 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 23 +- .../fine_tuning/jobs/checkpoints.rbi | 7 +- rbi/lib/openai/resources/images.rbi | 15 +- rbi/lib/openai/resources/models.rbi | 14 +- rbi/lib/openai/resources/moderations.rbi | 17 +- rbi/lib/openai/resources/responses.rbi | 171 ++++++------ .../resources/responses/input_items.rbi | 27 +- rbi/lib/openai/resources/uploads.rbi | 15 +- rbi/lib/openai/resources/uploads/parts.rbi | 7 +- rbi/lib/openai/resources/vector_stores.rbi | 27 +- .../resources/vector_stores/file_batches.rbi | 19 +- .../openai/resources/vector_stores/files.rbi | 27 +- 371 files changed, 2556 insertions(+), 4170 deletions(-) diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 518920af..8578bd99 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -63,8 +63,7 @@ module OpenAI # @api private sig { override.returns(T::Hash[String, String]) } - private def auth_headers - end + private def auth_headers; end # Creates and returns a new client for interacting with the API. sig do @@ -94,7 +93,6 @@ module OpenAI timeout: DEFAULT_TIMEOUT_IN_SECONDS, initial_retry_delay: DEFAULT_INITIAL_RETRY_DELAY, max_retry_delay: DEFAULT_MAX_RETRY_DELAY - ) - end + ); end end end diff --git a/rbi/lib/openai/errors.rbi b/rbi/lib/openai/errors.rbi index a0f3280e..51d71f19 100644 --- a/rbi/lib/openai/errors.rbi +++ b/rbi/lib/openai/errors.rbi @@ -41,8 +41,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: nil) - end + def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: nil); end end class APIConnectionError < OpenAI::Errors::APIError @@ -107,8 +106,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.for(url:, status:, body:, request:, response:, message: nil) - end + def self.for(url:, status:, body:, request:, response:, message: nil); end sig { returns(Integer) } attr_accessor :status @@ -134,8 +132,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(url:, status:, body:, request:, response:, message: nil) - end + def self.new(url:, status:, body:, request:, response:, message: nil); end end class BadRequestError < OpenAI::Errors::APIStatusError diff --git a/rbi/lib/openai/internal/cursor_page.rbi b/rbi/lib/openai/internal/cursor_page.rbi index 7d728a09..80b18955 100644 --- a/rbi/lib/openai/internal/cursor_page.rbi +++ b/rbi/lib/openai/internal/cursor_page.rbi @@ -14,8 +14,7 @@ module OpenAI attr_accessor :has_more sig { returns(String) } - def inspect - end + def inspect; end end end end diff --git a/rbi/lib/openai/internal/page.rbi b/rbi/lib/openai/internal/page.rbi index bb47228b..14d7ba40 100644 --- a/rbi/lib/openai/internal/page.rbi +++ b/rbi/lib/openai/internal/page.rbi @@ -14,8 +14,7 @@ module OpenAI attr_accessor :object sig { returns(String) } - def inspect - end + def inspect; end end end end diff --git a/rbi/lib/openai/internal/stream.rbi b/rbi/lib/openai/internal/stream.rbi index 7b7eff02..8c1f5f34 100644 --- a/rbi/lib/openai/internal/stream.rbi +++ b/rbi/lib/openai/internal/stream.rbi @@ -10,8 +10,7 @@ module OpenAI # @api private sig { override.returns(T::Enumerable[Elem]) } - private def iterator - end + private def iterator; end end end end diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index d3c028a2..b8b35744 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -55,8 +55,7 @@ module OpenAI class << self # @api private sig { params(req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape).void } - def validate!(req) - end + def validate!(req); end # @api private sig do @@ -68,8 +67,7 @@ module OpenAI ) ).returns(T::Boolean) end - def should_retry?(status, headers:) - end + def should_retry?(status, headers:); end # @api private sig do @@ -80,8 +78,7 @@ module OpenAI ) .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) end - def follow_redirect(request, status:, response_headers:) - end + def follow_redirect(request, status:, response_headers:); end # @api private sig do @@ -91,8 +88,7 @@ module OpenAI ) .void end - def reap_connection!(status, stream:) - end + def reap_connection!(status, stream:); end end # @api private @@ -121,18 +117,14 @@ module OpenAI max_retry_delay: 0.0, headers: {}, idempotency_header: nil - ) - end - + ); end # @api private sig { overridable.returns(T::Hash[String, String]) } - private def auth_headers - end + private def auth_headers; end # @api private sig { returns(String) } - private def generate_idempotency_key - end + private def generate_idempotency_key; end # @api private sig do @@ -143,13 +135,11 @@ module OpenAI ) .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) end - private def build_request(req, opts) - end + private def build_request(req, opts); end # @api private sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } - private def retry_delay(headers, retry_count:) - end + private def retry_delay(headers, retry_count:); end # @api private sig do @@ -161,8 +151,7 @@ module OpenAI ) .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) end - private def send_request(request, redirect_count:, retry_count:, send_retry_header:) - end + private def send_request(request, redirect_count:, retry_count:, send_retry_header:); end # Execute the request specified by `req`. This is the method that all resource # methods call into. @@ -206,12 +195,9 @@ module OpenAI stream: nil, model: OpenAI::Internal::Type::Unknown, options: {} - ) - end - + ); end sig { returns(String) } - def inspect - end + def inspect; end end end end diff --git a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi index 1db6bc50..35b91830 100644 --- a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi +++ b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi @@ -23,13 +23,11 @@ module OpenAI class << self # @api private sig { params(url: URI::Generic).returns(Net::HTTP) } - def connect(url) - end + def connect(url); end # @api private sig { params(conn: Net::HTTP, deadline: Float).void } - def calibrate_socket_timeout(conn, deadline) - end + def calibrate_socket_timeout(conn, deadline); end # @api private sig do @@ -39,27 +37,23 @@ module OpenAI ) .returns(Net::HTTPGenericRequest) end - def build_request(request, &blk) - end + def build_request(request, &blk); end end # @api private sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } - private def with_pool(url, deadline:, &blk) - end + private def with_pool(url, deadline:, &blk); end # @api private sig do params(request: OpenAI::Internal::Transport::PooledNetRequester::RequestShape) .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) end - def execute(request) - end + def execute(request); end # @api private sig { params(size: Integer).returns(T.attached_class) } - def self.new(size: Etc.nprocessors) - end + def self.new(size: Etc.nprocessors); end end end end diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index d326636b..98276260 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -25,16 +25,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.[](type_info, spec = {}) - end + def self.[](type_info, spec = {}); end sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end + def ===(other); end sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end + def ==(other); end # @api private sig(:final) do @@ -46,8 +43,7 @@ module OpenAI state: OpenAI::Internal::Type::Converter::State) .returns(T.any(T::Array[T.anything], T.anything)) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig(:final) do @@ -55,18 +51,15 @@ module OpenAI .params(value: T.any(T::Array[T.anything], T.anything)) .returns(T.any(T::Array[T.anything], T.anything)) end - def dump(value) - end + def dump(value); end # @api private sig(:final) { returns(Elem) } - protected def item_type - end + protected def item_type; end # @api private sig(:final) { returns(T::Boolean) } - protected def nilable? - end + protected def nilable?; end # @api private sig(:final) do @@ -80,8 +73,7 @@ module OpenAI ) .void end - def initialize(type_info, spec = {}) - end + def initialize(type_info, spec = {}); end end end end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index f5cf4f9c..8b8c9d16 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -20,31 +20,29 @@ module OpenAI sig do returns( T::Hash[ - Symbol, - T.all( - OpenAI::Internal::Type::BaseModel::KnownFieldShape, - {type_fn: T.proc.returns(OpenAI::Internal::Type::Converter::Input)} - ) + Symbol, + T.all( + OpenAI::Internal::Type::BaseModel::KnownFieldShape, + {type_fn: T.proc.returns(OpenAI::Internal::Type::Converter::Input)} + ) ] ) end - def known_fields - end + def known_fields; end # @api private sig do returns( T::Hash[ - Symbol, - T.all( - OpenAI::Internal::Type::BaseModel::KnownFieldShape, - {type: OpenAI::Internal::Type::Converter::Input} - ) + Symbol, + T.all( + OpenAI::Internal::Type::BaseModel::KnownFieldShape, + {type: OpenAI::Internal::Type::Converter::Input} + ) ] ) end - def fields - end + def fields; end # @api private sig do @@ -66,8 +64,7 @@ module OpenAI ) .void end - private def add_field(name_sym, required:, type_info:, spec:) - end + private def add_field(name_sym, required:, type_info:, spec:); end # @api private sig do @@ -82,8 +79,7 @@ module OpenAI ) .void end - def required(name_sym, type_info, spec = {}) - end + def required(name_sym, type_info, spec = {}); end # @api private sig do @@ -98,32 +94,27 @@ module OpenAI ) .void end - def optional(name_sym, type_info, spec = {}) - end + def optional(name_sym, type_info, spec = {}); end # @api private # # `request_only` attributes not excluded from `.#coerce` when receiving responses # even if well behaved servers should not send them sig { params(blk: T.proc.void).void } - private def request_only(&blk) - end + private def request_only(&blk); end # @api private # # `response_only` attributes are omitted from `.#dump` when making requests sig { params(blk: T.proc.void).void } - private def response_only(&blk) - end + private def response_only(&blk); end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end + def ==(other); end end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end + def ==(other); end class << self # @api private @@ -135,8 +126,7 @@ module OpenAI ) .returns(T.any(T.attached_class, T.anything)) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig do @@ -144,8 +134,7 @@ module OpenAI .params(value: T.any(T.attached_class, T.anything)) .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) end - def dump(value) - end + def dump(value); end end # Returns the raw value associated with the given key, if found. Otherwise, nil is @@ -155,8 +144,7 @@ module OpenAI # undocumented features. This method does not parse response data into # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. sig { params(key: Symbol).returns(T.nilable(T.anything)) } - def [](key) - end + def [](key); end # Returns a Hash of the data underlying this object. O(1) # @@ -167,8 +155,7 @@ module OpenAI # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. sig { overridable.returns(OpenAI::Internal::AnyHash) } - def to_h - end + def to_h; end # Returns a Hash of the data underlying this object. O(1) # @@ -179,29 +166,23 @@ module OpenAI # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. sig { overridable.returns(OpenAI::Internal::AnyHash) } - def to_hash - end + def to_hash; end sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::AnyHash) } - def deconstruct_keys(keys) - end + def deconstruct_keys(keys); end sig { params(a: T.anything).returns(String) } - def to_json(*a) - end + def to_json(*a); end sig { params(a: T.anything).returns(String) } - def to_yaml(*a) - end + def to_yaml(*a); end # Create a new instance of a model. sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } - def self.new(data = {}) - end + def self.new(data = {}); end sig { returns(String) } - def inspect - end + def inspect; end end end end diff --git a/rbi/lib/openai/internal/type/base_page.rbi b/rbi/lib/openai/internal/type/base_page.rbi index b40068ca..3906abb9 100644 --- a/rbi/lib/openai/internal/type/base_page.rbi +++ b/rbi/lib/openai/internal/type/base_page.rbi @@ -8,20 +8,16 @@ module OpenAI Elem = type_member(:out) sig { overridable.returns(T::Boolean) } - def next_page? - end + def next_page?; end sig { overridable.returns(T.self_type) } - def next_page - end + def next_page; end sig { overridable.params(blk: T.proc.params(arg0: Elem).void).void } - def auto_paging_each(&blk) - end + def auto_paging_each(&blk); end sig { returns(T::Enumerable[Elem]) } - def to_enum - end + def to_enum; end # @api private sig do @@ -33,8 +29,7 @@ module OpenAI ) .void end - def initialize(client:, req:, headers:, page_data:) - end + def initialize(client:, req:, headers:, page_data:); end end end end diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/lib/openai/internal/type/base_stream.rbi index 1e0940f8..eb383d06 100644 --- a/rbi/lib/openai/internal/type/base_stream.rbi +++ b/rbi/lib/openai/internal/type/base_stream.rbi @@ -11,21 +11,17 @@ module OpenAI Elem = type_member(:out) sig { void } - def close - end + def close; end # @api private sig { overridable.returns(T::Enumerable[Elem]) } - private def iterator - end + private def iterator; end sig { params(blk: T.proc.params(arg0: Elem).void).void } - def each(&blk) - end + def each(&blk); end sig { returns(T::Enumerator[Elem]) } - def to_enum - end + def to_enum; end # @api private sig do @@ -38,8 +34,7 @@ module OpenAI ) .void end - def initialize(model:, url:, status:, response:, stream:) - end + def initialize(model:, url:, status:, response:, stream:); end end end end diff --git a/rbi/lib/openai/internal/type/boolean.rbi b/rbi/lib/openai/internal/type/boolean.rbi index 713967f7..4e002734 100644 --- a/rbi/lib/openai/internal/type/boolean.rbi +++ b/rbi/lib/openai/internal/type/boolean.rbi @@ -13,12 +13,10 @@ module OpenAI final! sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.===(other) - end + def self.===(other); end sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.==(other) - end + def self.==(other); end class << self # @api private @@ -27,15 +25,13 @@ module OpenAI .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Internal::Type::Converter::State) .returns(T.any(T::Boolean, T.anything)) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig(:final) do override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) end - def dump(value) - end + def dump(value); end end end end diff --git a/rbi/lib/openai/internal/type/converter.rbi b/rbi/lib/openai/internal/type/converter.rbi index e529f225..99bef8c2 100644 --- a/rbi/lib/openai/internal/type/converter.rbi +++ b/rbi/lib/openai/internal/type/converter.rbi @@ -23,13 +23,11 @@ module OpenAI state: OpenAI::Internal::Type::Converter::State ).returns(T.anything) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig { overridable.params(value: T.anything).returns(T.anything) } - def dump(value) - end + def dump(value); end class << self # @api private @@ -47,8 +45,7 @@ module OpenAI ) .returns(T.proc.returns(T.anything)) end - def self.type_info(spec) - end + def self.type_info(spec); end # @api private # @@ -92,15 +89,12 @@ module OpenAI # # See implementation below for more details. state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - ) - end - + ); end # @api private sig do params(target: OpenAI::Internal::Type::Converter::Input, value: T.anything).returns(T.anything) end - def self.dump(target, value) - end + def self.dump(target, value); end end end end diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index d64ea741..9fcc30ee 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -20,23 +20,19 @@ module OpenAI # All of the valid Symbol values for this enum. sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } - def values - end + def values; end # @api private # # Guard against thread safety issues by instantiating `@values`. sig { void } - private def finalize! - end + private def finalize!; end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end + def ===(other); end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end + def ==(other); end # @api private # @@ -47,13 +43,11 @@ module OpenAI .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Internal::Type::Converter::State) .returns(T.any(Symbol, T.anything)) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value) - end + def dump(value); end end end end diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index d63a83f4..cfa8b710 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -25,16 +25,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.[](type_info, spec = {}) - end + def self.[](type_info, spec = {}); end sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end + def ===(other); end sig(:final) { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end + def ==(other); end # @api private sig(:final) do @@ -45,8 +42,7 @@ module OpenAI ) .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig(:final) do @@ -54,18 +50,15 @@ module OpenAI .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end - def dump(value) - end + def dump(value); end # @api private sig(:final) { returns(Elem) } - protected def item_type - end + protected def item_type; end # @api private sig(:final) { returns(T::Boolean) } - protected def nilable? - end + protected def nilable?; end # @api private sig(:final) do @@ -79,8 +72,7 @@ module OpenAI ) .void end - def initialize(type_info, spec = {}) - end + def initialize(type_info, spec = {}); end end end end diff --git a/rbi/lib/openai/internal/type/request_parameters.rbi b/rbi/lib/openai/internal/type/request_parameters.rbi index 18a79320..2a1c8d7f 100644 --- a/rbi/lib/openai/internal/type/request_parameters.rbi +++ b/rbi/lib/openai/internal/type/request_parameters.rbi @@ -13,8 +13,7 @@ module OpenAI module Converter # @api private sig { params(params: T.anything).returns([T.anything, OpenAI::Internal::AnyHash]) } - def dump_request(params) - end + def dump_request(params); end end end end diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi index 3d1486f0..55bc31e8 100644 --- a/rbi/lib/openai/internal/type/union.rbi +++ b/rbi/lib/openai/internal/type/union.rbi @@ -13,23 +13,19 @@ module OpenAI sig do returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Internal::Type::Converter::Input)]]) end - private def known_variants - end + private def known_variants; end # @api private sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } - protected def derefed_variants - end + protected def derefed_variants; end # All of the specified variants for this union. sig { overridable.returns(T::Array[T.anything]) } - def variants - end + def variants; end # @api private sig { params(property: Symbol).void } - private def discriminator(property) - end + private def discriminator(property); end # @api private sig do @@ -39,21 +35,17 @@ module OpenAI ) .void end - private def variant(key, spec = nil) - end + private def variant(key, spec = nil); end # @api private sig { params(value: T.anything).returns(T.nilable(T.anything)) } - private def resolve_variant(value) - end + private def resolve_variant(value); end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other) - end + def ===(other); end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other) - end + def ==(other); end # @api private sig do @@ -62,13 +54,11 @@ module OpenAI state: OpenAI::Internal::Type::Converter::State ).returns(T.anything) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end + def dump(value); end end end end diff --git a/rbi/lib/openai/internal/type/unknown.rbi b/rbi/lib/openai/internal/type/unknown.rbi index 96c59fe6..55d4988d 100644 --- a/rbi/lib/openai/internal/type/unknown.rbi +++ b/rbi/lib/openai/internal/type/unknown.rbi @@ -13,12 +13,10 @@ module OpenAI final! sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.===(other) - end + def self.===(other); end sig(:final) { params(other: T.anything).returns(T::Boolean) } - def self.==(other) - end + def self.==(other); end class << self # @api private @@ -28,13 +26,11 @@ module OpenAI state: OpenAI::Internal::Type::Converter::State ).returns(T.anything) end - def coerce(value, state:) - end + def coerce(value, state:); end # @api private sig(:final) { override.params(value: T.anything).returns(T.anything) } - def dump(value) - end + def dump(value); end end end end diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index b943b0fd..36689b2b 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -6,58 +6,48 @@ module OpenAI module Util # @api private sig { returns(Float) } - def self.monotonic_secs - end + def self.monotonic_secs; end class << self # @api private sig { returns(String) } - def arch - end + def arch; end # @api private sig { returns(String) } - def os - end + def os; end end class << self # @api private sig { params(input: T.anything).returns(T::Boolean) } - def primitive?(input) - end + def primitive?(input); end # @api private sig { params(input: T.any(String, T::Boolean)).returns(T.any(T::Boolean, T.anything)) } - def coerce_boolean(input) - end + def coerce_boolean(input); end # @api private sig { params(input: T.any(String, T::Boolean)).returns(T.nilable(T::Boolean)) } - def coerce_boolean!(input) - end + def coerce_boolean!(input); end # @api private sig { params(input: T.any(String, Integer)).returns(T.any(Integer, T.anything)) } - def coerce_integer(input) - end + def coerce_integer(input); end # @api private sig { params(input: T.any(String, Integer, Float)).returns(T.any(Float, T.anything)) } - def coerce_float(input) - end + def coerce_float(input); end # @api private sig { params(input: T.anything).returns(T.any(T::Hash[T.anything, T.anything], T.anything)) } - def coerce_hash(input) - end + def coerce_hash(input); end end class << self # @api private sig { params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns(T.anything) } - private def deep_merge_lr(lhs, rhs, concat: false) - end + private def deep_merge_lr(lhs, rhs, concat: false); end # @api private # @@ -73,9 +63,7 @@ module OpenAI sentinel: nil, # whether to merge sequences by concatenation. concat: false - ) - end - + ); end # @api private sig do params( @@ -86,35 +74,30 @@ module OpenAI ) .returns(T.nilable(T.anything)) end - def dig(data, pick, sentinel = nil, &blk) - end + def dig(data, pick, sentinel = nil, &blk); end end class << self # @api private sig { params(uri: URI::Generic).returns(String) } - def uri_origin(uri) - end + def uri_origin(uri); end # @api private sig { params(path: T.any(String, T::Array[String])).returns(String) } - def interpolate_path(path) - end + def interpolate_path(path); end end class << self # @api private sig { params(query: T.nilable(String)).returns(T::Hash[String, T::Array[String]]) } - def decode_query(query) - end + def decode_query(query); end # @api private sig do params(query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) .returns(T.nilable(String)) end - def encode_query(query) - end + def encode_query(query); end end ParsedUriShape = @@ -131,21 +114,18 @@ module OpenAI class << self # @api private sig { params(url: T.any(URI::Generic, String)).returns(OpenAI::Internal::Util::ParsedUriShape) } - def parse_uri(url) - end + def parse_uri(url); end # @api private sig { params(parsed: OpenAI::Internal::Util::ParsedUriShape).returns(URI::Generic) } - def unparse_uri(parsed) - end + def unparse_uri(parsed); end # @api private sig do params(lhs: OpenAI::Internal::Util::ParsedUriShape, rhs: OpenAI::Internal::Util::ParsedUriShape) .returns(URI::Generic) end - def join_parsed_uri(lhs, rhs) - end + def join_parsed_uri(lhs, rhs); end end class << self @@ -157,8 +137,7 @@ module OpenAI ) .returns(T::Hash[String, String]) end - def normalized_headers(*headers) - end + def normalized_headers(*headers); end end # @api private @@ -167,13 +146,11 @@ module OpenAI class ReadIOAdapter # @api private sig { params(max_len: T.nilable(Integer)).returns(String) } - private def read_enum(max_len) - end + private def read_enum(max_len); end # @api private sig { params(max_len: T.nilable(Integer), out_string: T.nilable(String)).returns(T.nilable(String)) } - def read(max_len = nil, out_string = nil) - end + def read(max_len = nil, out_string = nil); end # @api private sig do @@ -183,14 +160,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(stream, &blk) - end + def self.new(stream, &blk); end end class << self sig { params(blk: T.proc.params(y: Enumerator::Yielder).void).returns(T::Enumerable[String]) } - def writable_enum(&blk) - end + def writable_enum(&blk); end end class << self @@ -198,18 +173,15 @@ module OpenAI sig do params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void end - private def write_multipart_chunk(y, boundary:, key:, val:) - end + private def write_multipart_chunk(y, boundary:, key:, val:); end # @api private sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } - private def encode_multipart_streaming(body) - end + private def encode_multipart_streaming(body); end # @api private sig { params(headers: T::Hash[String, String], body: T.anything).returns(T.anything) } - def encode_content(headers, body) - end + def encode_content(headers, body); end # @api private sig do @@ -220,8 +192,7 @@ module OpenAI ) .returns(T.anything) end - def decode_content(headers, stream:, suppress_error: false) - end + def decode_content(headers, stream:, suppress_error: false); end end class << self @@ -232,13 +203,11 @@ module OpenAI params(enum: T::Enumerable[T.anything], external: T::Boolean, close: T.proc.void) .returns(T::Enumerable[T.anything]) end - def fused_enum(enum, external: false, &close) - end + def fused_enum(enum, external: false, &close); end # @api private sig { params(enum: T.nilable(T::Enumerable[T.anything])).void } - def close_fused!(enum) - end + def close_fused!(enum); end # @api private sig do @@ -248,8 +217,7 @@ module OpenAI ) .returns(T::Enumerable[T.anything]) end - def chain_fused(enum, &blk) - end + def chain_fused(enum, &blk); end end ServerSentEvent = @@ -265,8 +233,7 @@ module OpenAI class << self # @api private sig { params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) } - def decode_lines(enum) - end + def decode_lines(enum); end # @api private # @@ -274,8 +241,7 @@ module OpenAI sig do params(lines: T::Enumerable[String]).returns(T::Enumerable[OpenAI::Internal::Util::ServerSentEvent]) end - def decode_sse(lines) - end + def decode_sse(lines); end end end end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index edc0d8c5..aaded68e 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -6,8 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AllModels::TaggedSymbol) } diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 08cd04a2..9a810d27 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -84,8 +84,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. @@ -93,8 +92,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -105,8 +103,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } OrSymbol = @@ -142,8 +139,7 @@ module OpenAI PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index a64b1353..2f4f0bf8 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -14,8 +14,7 @@ module OpenAI GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index 5310fa78..97b95ef4 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -31,12 +31,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text:, logprobs: nil) - end + def self.new(text:, logprobs: nil); end sig { override.returns({text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]}) } - def to_hash - end + def to_hash; end class Logprob < OpenAI::Internal::Type::BaseModel # The token in the transcription. @@ -61,12 +59,10 @@ module OpenAI attr_writer :logprob sig { params(token: String, bytes: T::Array[Float], logprob: Float).returns(T.attached_class) } - def self.new(token: nil, bytes: nil, logprob: nil) - end + def self.new(token: nil, bytes: nil, logprob: nil); end sig { override.returns({token: String, bytes: T::Array[Float], logprob: Float}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 951cde7b..866e4c50 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -112,9 +112,7 @@ module OpenAI temperature: nil, timestamp_granularities: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -131,8 +129,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source @@ -141,8 +138,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } - def self.variants - end + def self.variants; end end module TimestampGranularity @@ -167,8 +163,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index c2a3aad6..ba3131e3 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index e1de4abe..732e95a4 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -13,8 +13,7 @@ module OpenAI LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index b4b718a4..60e0d8c4 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -93,8 +93,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index 32426872..d09c413a 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -16,8 +16,7 @@ module OpenAI [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index aeddfb85..c37fbdd0 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -38,8 +38,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(delta:, logprobs: nil, type: :"transcript.text.delta") - end + def self.new(delta:, logprobs: nil, type: :"transcript.text.delta"); end sig do override @@ -47,8 +46,7 @@ module OpenAI {delta: String, type: Symbol, logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]} ) end - def to_hash - end + def to_hash; end class Logprob < OpenAI::Internal::Type::BaseModel # The token that was used to generate the log probability. @@ -73,12 +71,10 @@ module OpenAI attr_writer :logprob sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } - def self.new(token: nil, bytes: nil, logprob: nil) - end + def self.new(token: nil, bytes: nil, logprob: nil); end sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index c422dcf3..03d16168 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -39,8 +39,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text:, logprobs: nil, type: :"transcript.text.done") - end + def self.new(text:, logprobs: nil, type: :"transcript.text.done"); end sig do override @@ -48,8 +47,7 @@ module OpenAI {text: String, type: Symbol, logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]} ) end - def to_hash - end + def to_hash; end class Logprob < OpenAI::Internal::Type::BaseModel # The token that was used to generate the log probability. @@ -74,12 +72,10 @@ module OpenAI attr_writer :logprob sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } - def self.new(token: nil, bytes: nil, logprob: nil) - end + def self.new(token: nil, bytes: nil, logprob: nil); end sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index cde99b0d..a8c181ae 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -45,8 +45,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(duration:, language:, text:, segments: nil, words: nil) - end + def self.new(duration:, language:, text:, segments: nil, words: nil); end sig do override @@ -60,8 +59,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/transcription_word.rbi b/rbi/lib/openai/models/audio/transcription_word.rbi index 5907efa6..639e066d 100644 --- a/rbi/lib/openai/models/audio/transcription_word.rbi +++ b/rbi/lib/openai/models/audio/transcription_word.rbi @@ -17,12 +17,10 @@ module OpenAI attr_accessor :word sig { params(end_: Float, start: Float, word: String).returns(T.attached_class) } - def self.new(end_:, start:, word:) - end + def self.new(end_:, start:, word:); end sig { override.returns({end_: Float, start: Float, word: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/translation.rbi b/rbi/lib/openai/models/audio/translation.rbi index a2735384..96bbc6d2 100644 --- a/rbi/lib/openai/models/audio/translation.rbi +++ b/rbi/lib/openai/models/audio/translation.rbi @@ -8,12 +8,10 @@ module OpenAI attr_accessor :text sig { params(text: String).returns(T.attached_class) } - def self.new(text:) - end + def self.new(text:); end sig { override.returns({text: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index aa1ac9b3..adc943d6 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -73,8 +73,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. @@ -82,8 +81,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # The format of the output, in one of these options: `json`, `text`, `srt`, @@ -104,8 +102,7 @@ module OpenAI VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/lib/openai/models/audio/translation_create_response.rbi index 8c96ec6e..57941765 100644 --- a/rbi/lib/openai/models/audio/translation_create_response.rbi +++ b/rbi/lib/openai/models/audio/translation_create_response.rbi @@ -7,8 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index 90251977..77fb4815 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(duration:, language:, text:, segments: nil) - end + def self.new(duration:, language:, text:, segments: nil); end sig do override @@ -44,8 +43,7 @@ module OpenAI {duration: Float, language: String, text: String, segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment]} ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index 09edaa97..ab50d39a 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -13,8 +13,7 @@ module OpenAI GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::AudioModel::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index ac5e8f58..c5d2582e 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -18,8 +18,7 @@ module OpenAI VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::AudioResponseFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi index 58bd0717..d1ba038a 100644 --- a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi @@ -10,12 +10,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto) - end + def self.new(type: :auto); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index b34e5a45..a8a659d1 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -168,9 +168,7 @@ module OpenAI output_file_id: nil, request_counts: nil, object: :batch - ) - end - + ); end sig do override .returns( @@ -198,8 +196,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The current status of the batch. module Status @@ -218,8 +215,7 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Batch::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end class Errors < OpenAI::Internal::Type::BaseModel @@ -240,12 +236,10 @@ module OpenAI params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::AnyHash)], object: String) .returns(T.attached_class) end - def self.new(data: nil, object: nil) - end + def self.new(data: nil, object: nil); end sig { override.returns({data: T::Array[OpenAI::Models::BatchError], object: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/batch_cancel_params.rbi b/rbi/lib/openai/models/batch_cancel_params.rbi index e5ef1114..137bcef9 100644 --- a/rbi/lib/openai/models/batch_cancel_params.rbi +++ b/rbi/lib/openai/models/batch_cancel_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index d27284f7..f33919b3 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -49,8 +49,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) - end + def self.new(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}); end sig do override @@ -64,8 +63,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The time frame within which the batch should be processed. Currently only `24h` # is supported. @@ -79,8 +77,7 @@ module OpenAI COMPLETION_WINDOW_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol]) } - def self.values - end + def self.values; end end # The endpoint to be used for all requests in the batch. Currently @@ -101,8 +98,7 @@ module OpenAI V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/batch_error.rbi b/rbi/lib/openai/models/batch_error.rbi index 157787c8..0871460e 100644 --- a/rbi/lib/openai/models/batch_error.rbi +++ b/rbi/lib/openai/models/batch_error.rbi @@ -29,14 +29,12 @@ module OpenAI params(code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)) .returns(T.attached_class) end - def self.new(code: nil, line: nil, message: nil, param: nil) - end + def self.new(code: nil, line: nil, message: nil, param: nil); end sig do override.returns({code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 479dcdff..a78f54c3 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -32,12 +32,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, request_options: {}); end sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/batch_request_counts.rbi b/rbi/lib/openai/models/batch_request_counts.rbi index 6b688106..2c97e840 100644 --- a/rbi/lib/openai/models/batch_request_counts.rbi +++ b/rbi/lib/openai/models/batch_request_counts.rbi @@ -17,12 +17,10 @@ module OpenAI # The request counts for different statuses within the batch. sig { params(completed: Integer, failed: Integer, total: Integer).returns(T.attached_class) } - def self.new(completed:, failed:, total:) - end + def self.new(completed:, failed:, total:); end sig { override.returns({completed: Integer, failed: Integer, total: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/batch_retrieve_params.rbi b/rbi/lib/openai/models/batch_retrieve_params.rbi index 002b5af2..7a154d58 100644 --- a/rbi/lib/openai/models/batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/batch_retrieve_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index 3ea623d1..4dbc147a 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -52,11 +52,11 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) end @@ -136,12 +136,12 @@ module OpenAI model: String, name: T.nilable(String), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], response_format: T.nilable( T.any( @@ -173,9 +173,7 @@ module OpenAI tool_resources: nil, top_p: nil, object: :assistant - ) - end - + ); end sig do override .returns( @@ -189,11 +187,11 @@ module OpenAI name: T.nilable(String), object: Symbol, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], response_format: T.nilable( T.any( @@ -209,8 +207,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter)) } @@ -246,8 +243,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -258,8 +254,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -272,12 +267,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -292,12 +285,10 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil) - end + def self.new(vector_store_ids: nil); end sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index a75932ff..b225b423 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -108,11 +108,11 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) ) @@ -122,12 +122,12 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) .void @@ -162,12 +162,12 @@ module OpenAI temperature: T.nilable(Float), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], top_p: T.nilable(Float), request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) @@ -187,9 +187,7 @@ module OpenAI tools: nil, top_p: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -211,19 +209,18 @@ module OpenAI temperature: T.nilable(Float), tool_resources: T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], top_p: T.nilable(Float), request_options: OpenAI::RequestOptions } ) end - def to_hash - end + def to_hash; end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -234,8 +231,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def self.variants - end + def self.variants; end end class ToolResources < OpenAI::Internal::Type::BaseModel @@ -278,8 +274,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -290,8 +285,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -304,12 +298,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -337,10 +329,10 @@ module OpenAI sig do params( vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -351,16 +343,15 @@ module OpenAI params( vector_store_ids: T::Array[String], vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) ] ) .returns(T.attached_class) end - def self.new(vector_store_ids: nil, vector_stores: nil) - end + def self.new(vector_store_ids: nil, vector_stores: nil); end sig do override @@ -371,8 +362,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -431,8 +421,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil) - end + def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil); end sig do override @@ -447,8 +436,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -463,12 +451,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto) - end + def self.new(type: :auto); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end class Static < OpenAI::Internal::Type::BaseModel @@ -504,8 +490,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(static:, type: :static) - end + def self.new(static:, type: :static); end sig do override @@ -516,8 +501,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. @@ -537,12 +521,10 @@ module OpenAI max_chunk_size_tokens: Integer ).returns(T.attached_class) end - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:) - end + def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } - def to_hash - end + def to_hash; end end end @@ -552,8 +534,7 @@ module OpenAI [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_delete_params.rbi b/rbi/lib/openai/models/beta/assistant_delete_params.rbi index b929b1f8..283d4cf9 100644 --- a/rbi/lib/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_delete_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_deleted.rbi b/rbi/lib/openai/models/beta/assistant_deleted.rbi index 08a9c254..676a92bf 100644 --- a/rbi/lib/openai/models/beta/assistant_deleted.rbi +++ b/rbi/lib/openai/models/beta/assistant_deleted.rbi @@ -14,12 +14,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"assistant.deleted") - end + def self.new(id:, deleted:, object: :"assistant.deleted"); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 7da7715a..9bf1cab2 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -53,8 +53,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}) - end + def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}); end sig do override @@ -68,8 +67,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -84,8 +82,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index 0b95306f..43f47c46 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -32,8 +32,7 @@ module OpenAI [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi index f118b741..195f1fe4 100644 --- a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 86e113fd..407895b1 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -56,12 +56,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, enabled: nil, event: :"thread.created") - end + def self.new(data:, enabled: nil, event: :"thread.created"); end sig { override.returns({data: OpenAI::Models::Beta::Thread, event: Symbol, enabled: T::Boolean}) } - def to_hash - end + def to_hash; end end class ThreadRunCreated < OpenAI::Internal::Type::BaseModel @@ -82,12 +80,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.created") - end + def self.new(data:, event: :"thread.run.created"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel @@ -108,12 +104,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.queued") - end + def self.new(data:, event: :"thread.run.queued"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel @@ -134,12 +128,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.in_progress") - end + def self.new(data:, event: :"thread.run.in_progress"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel @@ -160,12 +152,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.requires_action") - end + def self.new(data:, event: :"thread.run.requires_action"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel @@ -186,12 +176,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.completed") - end + def self.new(data:, event: :"thread.run.completed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel @@ -212,12 +200,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.incomplete") - end + def self.new(data:, event: :"thread.run.incomplete"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel @@ -238,12 +224,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.failed") - end + def self.new(data:, event: :"thread.run.failed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel @@ -264,12 +248,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelling") - end + def self.new(data:, event: :"thread.run.cancelling"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel @@ -290,12 +272,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelled") - end + def self.new(data:, event: :"thread.run.cancelled"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel @@ -316,12 +296,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.expired") - end + def self.new(data:, event: :"thread.run.expired"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel @@ -345,12 +323,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.created") - end + def self.new(data:, event: :"thread.run.step.created"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel @@ -374,12 +350,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.in_progress") - end + def self.new(data:, event: :"thread.run.step.in_progress"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel @@ -388,7 +362,9 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void + end attr_writer :data sig { returns(Symbol) } @@ -404,12 +380,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.delta") - end + def self.new(data:, event: :"thread.run.step.delta"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel @@ -433,12 +407,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.completed") - end + def self.new(data:, event: :"thread.run.step.completed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel @@ -462,12 +434,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.failed") - end + def self.new(data:, event: :"thread.run.step.failed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel @@ -491,12 +461,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.cancelled") - end + def self.new(data:, event: :"thread.run.step.cancelled"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel @@ -520,12 +488,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.expired") - end + def self.new(data:, event: :"thread.run.step.expired"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel @@ -547,12 +513,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.created") - end + def self.new(data:, event: :"thread.message.created"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel @@ -574,12 +538,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.in_progress") - end + def self.new(data:, event: :"thread.message.in_progress"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel @@ -604,12 +566,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.delta") - end + def self.new(data:, event: :"thread.message.delta"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel @@ -631,12 +591,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.completed") - end + def self.new(data:, event: :"thread.message.completed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel @@ -658,12 +616,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.incomplete") - end + def self.new(data:, event: :"thread.message.incomplete"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ErrorEvent < OpenAI::Internal::Type::BaseModel @@ -683,12 +639,10 @@ module OpenAI params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :error) - end + def self.new(data:, event: :error); end sig { override.returns({data: OpenAI::Models::ErrorObject, event: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -697,8 +651,7 @@ module OpenAI [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/lib/openai/models/beta/assistant_tool.rbi index e2eaccc9..b26fd5d8 100644 --- a/rbi/lib/openai/models/beta/assistant_tool.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool.rbi @@ -12,8 +12,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 79dcf2c8..71efdfec 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -23,8 +23,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, function: nil) - end + def self.new(type:, function: nil); end sig do override @@ -35,8 +34,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The type of the tool. If type is `function`, the function name must be set module Type @@ -52,8 +50,7 @@ module OpenAI FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi index bbf08c85..c7334df0 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi @@ -9,12 +9,10 @@ module OpenAI attr_accessor :name sig { params(name: String).returns(T.attached_class) } - def self.new(name:) - end + def self.new(name:); end sig { override.returns({name: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 3a3a1f7c..88d3b4e5 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -29,8 +29,7 @@ module OpenAI REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol]) } - def self.values - end + def self.values; end end sig do @@ -39,8 +38,7 @@ module OpenAI [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index f14621d3..10c48190 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -111,11 +111,11 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) ) @@ -125,12 +125,12 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) .void @@ -165,12 +165,12 @@ module OpenAI temperature: T.nilable(Float), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], top_p: T.nilable(Float), request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) @@ -190,9 +190,7 @@ module OpenAI tools: nil, top_p: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -214,19 +212,18 @@ module OpenAI temperature: T.nilable(Float), tool_resources: T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], top_p: T.nilable(Float), request_options: OpenAI::RequestOptions } ) end - def to_hash - end + def to_hash; end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -237,8 +234,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } OrSymbol = @@ -335,8 +331,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -347,8 +342,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # Overrides the list of @@ -362,12 +356,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -382,12 +374,10 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil) - end + def self.new(vector_store_ids: nil); end sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi index f629a549..cacd14eb 100644 --- a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi @@ -9,12 +9,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :code_interpreter) - end + def self.new(type: :code_interpreter); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 7388dc69..6914e6bb 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -25,12 +25,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_search: nil, type: :file_search) - end + def self.new(file_search: nil, type: :file_search); end sig { override.returns({type: Symbol, file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch}) } - def to_hash - end + def to_hash; end class FileSearch < OpenAI::Internal::Type::BaseModel # The maximum number of results the file search tool should output. The default is @@ -72,8 +70,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(max_num_results: nil, ranking_options: nil) - end + def self.new(max_num_results: nil, ranking_options: nil); end sig do override @@ -81,8 +78,7 @@ module OpenAI {max_num_results: Integer, ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions} ) end - def to_hash - end + def to_hash; end class RankingOptions < OpenAI::Internal::Type::BaseModel # The score threshold for the file search. All values must be a floating point @@ -111,8 +107,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(score_threshold:, ranker: nil) - end + def self.new(score_threshold:, ranker: nil); end sig do override @@ -123,8 +118,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -154,8 +148,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index c1b5305c..b375f86a 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -18,12 +18,10 @@ module OpenAI params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(function:, type: :function) - end + def self.new(function:, type: :function); end sig { override.returns({function: OpenAI::Models::FunctionDefinition, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index fe25c2b0..c312e9f2 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -28,12 +28,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.created") - end + def self.new(data:, event: :"thread.message.created"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel @@ -55,12 +53,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.in_progress") - end + def self.new(data:, event: :"thread.message.in_progress"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel @@ -85,12 +81,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.delta") - end + def self.new(data:, event: :"thread.message.delta"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel @@ -112,12 +106,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.completed") - end + def self.new(data:, event: :"thread.message.completed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel @@ -139,12 +131,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.incomplete") - end + def self.new(data:, event: :"thread.message.incomplete"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -153,8 +143,7 @@ module OpenAI [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 0b610488..7aff0d4d 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -30,12 +30,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.created") - end + def self.new(data:, event: :"thread.run.step.created"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel @@ -59,12 +57,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.in_progress") - end + def self.new(data:, event: :"thread.run.step.in_progress"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel @@ -73,7 +69,9 @@ module OpenAI sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void } + sig do + params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void + end attr_writer :data sig { returns(Symbol) } @@ -89,12 +87,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.delta") - end + def self.new(data:, event: :"thread.run.step.delta"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel @@ -118,12 +114,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.completed") - end + def self.new(data:, event: :"thread.run.step.completed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel @@ -147,12 +141,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.failed") - end + def self.new(data:, event: :"thread.run.step.failed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel @@ -176,12 +168,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.cancelled") - end + def self.new(data:, event: :"thread.run.step.cancelled"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel @@ -205,12 +195,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.expired") - end + def self.new(data:, event: :"thread.run.step.expired"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -219,8 +207,7 @@ module OpenAI [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index cedd2400..de5bcb0f 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -26,12 +26,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.created") - end + def self.new(data:, event: :"thread.run.created"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel @@ -52,12 +50,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.queued") - end + def self.new(data:, event: :"thread.run.queued"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel @@ -78,12 +74,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.in_progress") - end + def self.new(data:, event: :"thread.run.in_progress"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel @@ -104,12 +98,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.requires_action") - end + def self.new(data:, event: :"thread.run.requires_action"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel @@ -130,12 +122,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.completed") - end + def self.new(data:, event: :"thread.run.completed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel @@ -156,12 +146,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.incomplete") - end + def self.new(data:, event: :"thread.run.incomplete"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel @@ -182,12 +170,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.failed") - end + def self.new(data:, event: :"thread.run.failed"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel @@ -208,12 +194,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelling") - end + def self.new(data:, event: :"thread.run.cancelling"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel @@ -234,12 +218,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelled") - end + def self.new(data:, event: :"thread.run.cancelled"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel @@ -260,12 +242,10 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.expired") - end + def self.new(data:, event: :"thread.run.expired"); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -274,8 +254,7 @@ module OpenAI [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index b6dd7fda..0e294e8a 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -52,8 +52,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, metadata:, tool_resources:, object: :thread) - end + def self.new(id:, created_at:, metadata:, tool_resources:, object: :thread); end sig do override @@ -67,8 +66,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter)) } @@ -104,8 +102,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -116,8 +113,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -130,12 +126,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -150,12 +144,10 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil) - end + def self.new(vector_store_ids: nil); end sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index c0aab016..f9240bfe 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -150,11 +150,11 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) ) @@ -214,12 +214,12 @@ module OpenAI tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -247,9 +247,7 @@ module OpenAI top_p: nil, truncation_strategy: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -280,11 +278,11 @@ module OpenAI tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -293,8 +291,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the @@ -304,8 +301,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def self.variants - end + def self.variants; end end class Thread < OpenAI::Internal::Type::BaseModel @@ -360,8 +356,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(messages: nil, metadata: nil, tool_resources: nil) - end + def self.new(messages: nil, metadata: nil, tool_resources: nil); end sig do override @@ -373,8 +368,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Message < OpenAI::Internal::Type::BaseModel # The text contents of the message. @@ -383,11 +377,11 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ) ) @@ -421,29 +415,28 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, + OpenAI::Internal::AnyHash + ) ] ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil) - end + def self.new(content:, role:, attachments: nil, metadata: nil); end sig do override @@ -452,11 +445,11 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, @@ -465,8 +458,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The text contents of the message. module Content @@ -478,17 +470,16 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ] ) end - def self.variants - end + def self.variants; end MessageContentPartParamArray = T.let( @@ -519,8 +510,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol]) end - def self.values - end + def self.values; end end class Attachment < OpenAI::Internal::Type::BaseModel @@ -536,10 +526,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) ] ) ) @@ -549,11 +539,11 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) ] ) .void @@ -564,17 +554,16 @@ module OpenAI params( file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) ] ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil) - end + def self.new(file_id: nil, tools: nil); end sig do override @@ -582,16 +571,15 @@ module OpenAI { file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) ] } ) end - def to_hash - end + def to_hash; end module Tool extend OpenAI::Internal::Type::Union @@ -602,12 +590,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search) - end + def self.new(type: :file_search); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -616,8 +602,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] ) end - def self.variants - end + def self.variants; end end end end @@ -668,8 +653,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -680,8 +664,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -694,12 +677,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -729,10 +710,10 @@ module OpenAI sig do params( vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -743,16 +724,15 @@ module OpenAI params( vector_store_ids: T::Array[String], vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) ] ) .returns(T.attached_class) end - def self.new(vector_store_ids: nil, vector_stores: nil) - end + def self.new(vector_store_ids: nil, vector_stores: nil); end sig do override @@ -763,8 +743,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -823,8 +802,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil) - end + def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil); end sig do override @@ -839,8 +817,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -855,12 +832,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto) - end + def self.new(type: :auto); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end class Static < OpenAI::Internal::Type::BaseModel @@ -896,8 +871,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(static:, type: :static) - end + def self.new(static:, type: :static); end sig do override @@ -908,8 +882,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. @@ -929,14 +902,12 @@ module OpenAI max_chunk_size_tokens: Integer ).returns(T.attached_class) end - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:) - end + def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end sig do override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) end - def to_hash - end + def to_hash; end end end @@ -946,8 +917,7 @@ module OpenAI [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] ) end - def self.variants - end + def self.variants; end end end end @@ -1000,8 +970,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -1012,8 +981,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -1026,12 +994,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -1046,12 +1012,10 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil) - end + def self.new(vector_store_ids: nil); end sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end end @@ -1064,8 +1028,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] ) end - def self.variants - end + def self.variants; end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -1090,8 +1053,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, last_messages: nil) - end + def self.new(type:, last_messages: nil); end sig do override @@ -1102,8 +1064,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -1135,8 +1096,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 2f0b0063..285edbe9 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -53,8 +53,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) - end + def self.new(messages: nil, metadata: nil, tool_resources: nil, request_options: {}); end sig do override @@ -67,8 +66,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Message < OpenAI::Internal::Type::BaseModel # The text contents of the message. @@ -77,11 +75,11 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ) ) @@ -115,12 +113,12 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, @@ -131,8 +129,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil) - end + def self.new(content:, role:, attachments: nil, metadata: nil); end sig do override @@ -141,11 +138,11 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, @@ -154,8 +151,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The text contents of the message. module Content @@ -167,17 +163,16 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ] ) end - def self.variants - end + def self.variants; end MessageContentPartParamArray = T.let( @@ -203,8 +198,7 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end class Attachment < OpenAI::Internal::Type::BaseModel @@ -220,10 +214,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) ] ) ) @@ -233,11 +227,11 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) ] ) .void @@ -248,17 +242,16 @@ module OpenAI params( file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) ] ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil) - end + def self.new(file_id: nil, tools: nil); end sig do override @@ -266,16 +259,15 @@ module OpenAI { file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) ] } ) end - def to_hash - end + def to_hash; end module Tool extend OpenAI::Internal::Type::Union @@ -286,12 +278,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search) - end + def self.new(type: :file_search); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -300,8 +290,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] ) end - def self.variants - end + def self.variants; end end end end @@ -340,8 +329,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -352,8 +340,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -366,12 +353,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -399,10 +384,10 @@ module OpenAI sig do params( vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -413,16 +398,15 @@ module OpenAI params( vector_store_ids: T::Array[String], vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) ] ) .returns(T.attached_class) end - def self.new(vector_store_ids: nil, vector_stores: nil) - end + def self.new(vector_store_ids: nil, vector_stores: nil); end sig do override @@ -433,8 +417,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -493,8 +476,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil) - end + def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil); end sig do override @@ -509,8 +491,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -525,12 +506,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto) - end + def self.new(type: :auto); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end class Static < OpenAI::Internal::Type::BaseModel @@ -566,8 +545,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(static:, type: :static) - end + def self.new(static:, type: :static); end sig do override @@ -578,8 +556,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. @@ -599,12 +576,10 @@ module OpenAI max_chunk_size_tokens: Integer ).returns(T.attached_class) end - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:) - end + def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } - def to_hash - end + def to_hash; end end end @@ -614,8 +589,7 @@ module OpenAI [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/thread_delete_params.rbi b/rbi/lib/openai/models/beta/thread_delete_params.rbi index be5184db..c9083423 100644 --- a/rbi/lib/openai/models/beta/thread_delete_params.rbi +++ b/rbi/lib/openai/models/beta/thread_delete_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/thread_deleted.rbi b/rbi/lib/openai/models/beta/thread_deleted.rbi index 68939994..9944d075 100644 --- a/rbi/lib/openai/models/beta/thread_deleted.rbi +++ b/rbi/lib/openai/models/beta/thread_deleted.rbi @@ -14,12 +14,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"thread.deleted") - end + def self.new(id:, deleted:, object: :"thread.deleted"); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi index e42b6057..7658004e 100644 --- a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/thread_retrieve_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index 1f9f6531..e2268f47 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -33,12 +33,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, enabled: nil, event: :"thread.created") - end + def self.new(data:, enabled: nil, event: :"thread.created"); end sig { override.returns({data: OpenAI::Models::Beta::Thread, event: Symbol, enabled: T::Boolean}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index a1f2bf11..4ec25c94 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -39,8 +39,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(metadata: nil, tool_resources: nil, request_options: {}) - end + def self.new(metadata: nil, tool_resources: nil, request_options: {}); end sig do override @@ -52,8 +51,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class ToolResources < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter)) } @@ -89,8 +87,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter: nil, file_search: nil) - end + def self.new(code_interpreter: nil, file_search: nil); end sig do override @@ -101,8 +98,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -115,12 +111,10 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil) - end + def self.new(file_ids: nil); end sig { override.returns({file_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end class FileSearch < OpenAI::Internal::Type::BaseModel @@ -135,12 +129,10 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil) - end + def self.new(vector_store_ids: nil); end sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 2579d047..0b4b796a 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -16,8 +16,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index a0c01cad..26a9fe9b 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -16,8 +16,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index b87f7920..e29ed266 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -43,8 +43,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(end_index:, file_citation:, start_index:, text:, type: :file_citation) - end + def self.new(end_index:, file_citation:, start_index:, text:, type: :file_citation); end sig do override @@ -58,8 +57,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FileCitation < OpenAI::Internal::Type::BaseModel # The ID of the specific File the citation is from. @@ -67,12 +65,10 @@ module OpenAI attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id:) - end + def self.new(file_id:); end sig { override.returns({file_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index c8a41dac..3a09db06 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -80,8 +80,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FileCitation < OpenAI::Internal::Type::BaseModel # The ID of the specific File the citation is from. @@ -99,12 +98,10 @@ module OpenAI attr_writer :quote sig { params(file_id: String, quote: String).returns(T.attached_class) } - def self.new(file_id: nil, quote: nil) - end + def self.new(file_id: nil, quote: nil); end sig { override.returns({file_id: String, quote: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index d3f54f98..93c25e4e 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -42,8 +42,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(end_index:, file_path:, start_index:, text:, type: :file_path) - end + def self.new(end_index:, file_path:, start_index:, text:, type: :file_path); end sig do override @@ -57,8 +56,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FilePath < OpenAI::Internal::Type::BaseModel # The ID of the file that was generated. @@ -66,12 +64,10 @@ module OpenAI attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id:) - end + def self.new(file_id:); end sig { override.returns({file_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index a47885b8..e8f4b3d2 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -72,8 +72,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FilePath < OpenAI::Internal::Type::BaseModel # The ID of the file that was generated. @@ -84,12 +83,10 @@ module OpenAI attr_writer :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id: nil) - end + def self.new(file_id: nil); end sig { override.returns({file_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index f16895d6..25343dd0 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -23,12 +23,10 @@ module OpenAI params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) .returns(T.attached_class) end - def self.new(file_id:, detail: nil) - end + def self.new(file_id:, detail: nil); end sig { override.returns({file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol}) } - def to_hash - end + def to_hash; end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -44,8 +42,7 @@ module OpenAI HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index 133cd72c..b50f4624 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -24,12 +24,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image_file:, type: :image_file) - end + def self.new(image_file:, type: :image_file); end sig { override.returns({image_file: OpenAI::Models::Beta::Threads::ImageFile, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index 6d41ec50..da45783e 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -26,15 +26,13 @@ module OpenAI params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol, file_id: String) .returns(T.attached_class) end - def self.new(detail: nil, file_id: nil) - end + def self.new(detail: nil, file_id: nil); end sig do override .returns({detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String}) end - def to_hash - end + def to_hash; end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -50,8 +48,7 @@ module OpenAI HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 7cb2f9a0..4dfa0c31 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -29,14 +29,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, image_file: nil, type: :image_file) - end + def self.new(index:, image_file: nil, type: :image_file); end sig do override.returns({index: Integer, type: Symbol, image_file: OpenAI::Models::Beta::Threads::ImageFileDelta}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 7bfaa664..d0e2b929 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -22,12 +22,10 @@ module OpenAI params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) .returns(T.attached_class) end - def self.new(url:, detail: nil) - end + def self.new(url:, detail: nil); end sig { override.returns({url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol}) } - def to_hash - end + def to_hash; end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` @@ -43,8 +41,7 @@ module OpenAI HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index 2dff246b..a960242b 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -20,12 +20,10 @@ module OpenAI params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(image_url:, type: :image_url) - end + def self.new(image_url:, type: :image_url); end sig { override.returns({image_url: OpenAI::Models::Beta::Threads::ImageURL, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 0fbc73d8..5efc1733 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -25,14 +25,12 @@ module OpenAI params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol, url: String) .returns(T.attached_class) end - def self.new(detail: nil, url: nil) - end + def self.new(detail: nil, url: nil); end sig do override.returns({detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String}) end - def to_hash - end + def to_hash; end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. @@ -48,8 +46,7 @@ module OpenAI HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index 75462fba..ed36acf3 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -28,12 +28,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, image_url: nil, type: :image_url) - end + def self.new(index:, image_url: nil, type: :image_url); end sig { override.returns({index: Integer, type: Symbol, image_url: OpenAI::Models::Beta::Threads::ImageURLDelta}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 6c7e5c86..29e9be2a 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -27,12 +27,12 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlock, + OpenAI::Models::Beta::Threads::RefusalContentBlock + ) ] ) end @@ -100,13 +100,13 @@ module OpenAI attachments: T.nilable(T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Internal::AnyHash)]), completed_at: T.nilable(Integer), content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlock, + OpenAI::Models::Beta::Threads::RefusalContentBlock + ) ], created_at: Integer, incomplete_at: T.nilable(Integer), @@ -135,9 +135,7 @@ module OpenAI status:, thread_id:, object: :"thread.message" - ) - end - + ); end sig do override .returns( @@ -147,12 +145,12 @@ module OpenAI attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment]), completed_at: T.nilable(Integer), content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlock, + OpenAI::Models::Beta::Threads::RefusalContentBlock + ) ], created_at: Integer, incomplete_at: T.nilable(Integer), @@ -166,8 +164,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Attachment < OpenAI::Internal::Type::BaseModel # The ID of the file to attach to the message. @@ -182,10 +179,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) ] ) ) @@ -195,11 +192,11 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) ] ) .void @@ -210,17 +207,16 @@ module OpenAI params( file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) ] ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil) - end + def self.new(file_id: nil, tools: nil); end sig do override @@ -228,16 +224,15 @@ module OpenAI { file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) ] } ) end - def to_hash - end + def to_hash; end module Tool extend OpenAI::Internal::Type::Union @@ -248,12 +243,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search) - end + def self.new(type: :file_search); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -262,8 +255,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] ) end - def self.variants - end + def self.variants; end end end @@ -277,14 +269,12 @@ module OpenAI params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end - def self.new(reason:) - end + def self.new(reason:); end sig do override.returns({reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol}) end - def to_hash - end + def to_hash; end # The reason the message is incomplete. module Reason @@ -309,8 +299,7 @@ module OpenAI sig do override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol]) end - def self.values - end + def self.values; end end end @@ -326,8 +315,7 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -344,8 +332,7 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index eae2a6d4..148ce53f 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index 7be07281..c1dba8a3 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index a6e0c71f..012559c9 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 5ed1214c..32a91cfd 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -14,11 +14,11 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ) ) @@ -52,12 +52,12 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, @@ -69,8 +69,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil, request_options: {}) - end + def self.new(content:, role:, attachments: nil, metadata: nil, request_options: {}); end sig do override @@ -79,11 +78,11 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, @@ -93,8 +92,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The text contents of the message. module Content @@ -106,17 +104,16 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ] ) end - def self.variants - end + def self.variants; end MessageContentPartParamArray = T.let( @@ -142,8 +139,7 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end class Attachment < OpenAI::Internal::Type::BaseModel @@ -159,10 +155,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) ] ) ) @@ -172,11 +168,11 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) ] ) .void @@ -187,17 +183,16 @@ module OpenAI params( file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) ] ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil) - end + def self.new(file_id: nil, tools: nil); end sig do override @@ -205,16 +200,15 @@ module OpenAI { file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) ] } ) end - def to_hash - end + def to_hash; end module Tool extend OpenAI::Internal::Type::Union @@ -225,12 +219,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search) - end + def self.new(type: :file_search); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -239,8 +231,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi index 0dfa827d..8f25de1b 100644 --- a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delete_params.rbi @@ -21,12 +21,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, request_options: {}) - end + def self.new(thread_id:, request_options: {}); end sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_deleted.rbi b/rbi/lib/openai/models/beta/threads/message_deleted.rbi index fd6cfd06..b5da3306 100644 --- a/rbi/lib/openai/models/beta/threads/message_deleted.rbi +++ b/rbi/lib/openai/models/beta/threads/message_deleted.rbi @@ -15,12 +15,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"thread.message.deleted") - end + def self.new(id:, deleted:, object: :"thread.message.deleted"); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 5ae1fdd1..cce9e935 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -10,12 +10,12 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Models::Beta::Threads::TextDeltaBlock, + OpenAI::Models::Beta::Threads::RefusalDeltaBlock, + OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + ) ] ) ) @@ -25,13 +25,13 @@ module OpenAI sig do params( content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::TextDeltaBlock, + OpenAI::Models::Beta::Threads::RefusalDeltaBlock, + OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + ) ] ) .void @@ -49,39 +49,37 @@ module OpenAI sig do params( content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::TextDeltaBlock, + OpenAI::Models::Beta::Threads::RefusalDeltaBlock, + OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + ) ], role: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol ) .returns(T.attached_class) end - def self.new(content: nil, role: nil) - end + def self.new(content: nil, role: nil); end sig do override .returns( { content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Models::Beta::Threads::TextDeltaBlock, + OpenAI::Models::Beta::Threads::RefusalDeltaBlock, + OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + ) ], role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol } ) end - def to_hash - end + def to_hash; end # The entity that produced the message. One of `user` or `assistant`. module Role @@ -95,8 +93,7 @@ module OpenAI ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index 3fd98304..359c70cd 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -30,12 +30,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, delta:, object: :"thread.message.delta") - end + def self.new(id:, delta:, object: :"thread.message.delta"); end sig { override.returns({id: String, delta: OpenAI::Models::Beta::Threads::MessageDelta, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 2a25adec..66577b99 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -62,8 +62,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) - end + def self.new(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}); end sig do override @@ -78,8 +77,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -94,8 +92,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi index 62cc19a2..1818fc76 100644 --- a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi @@ -21,12 +21,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, request_options: {}) - end + def self.new(thread_id:, request_options: {}); end sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 51074aac..90b4db86 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -28,8 +28,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, metadata: nil, request_options: {}) - end + def self.new(thread_id:, metadata: nil, request_options: {}); end sig do override @@ -41,8 +40,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi index 334ce796..591afe27 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi @@ -14,12 +14,10 @@ module OpenAI # The refusal content generated by the assistant. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(refusal:, type: :refusal) - end + def self.new(refusal:, type: :refusal); end sig { override.returns({refusal: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi index 2caef1c7..8c9e2d1f 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi @@ -21,12 +21,10 @@ module OpenAI # The refusal content that is part of a message. sig { params(index: Integer, refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(index:, refusal: nil, type: :refusal) - end + def self.new(index:, refusal: nil, type: :refusal); end sig { override.returns({index: Integer, type: Symbol, refusal: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index 795f0789..6aa38a1e 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -38,8 +38,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, function:, type: :function) - end + def self.new(id:, function:, type: :function); end sig do override @@ -47,8 +46,7 @@ module OpenAI {id: String, function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, type: Symbol} ) end - def to_hash - end + def to_hash; end class Function < OpenAI::Internal::Type::BaseModel # The arguments that the model expects you to pass to the function. @@ -61,12 +59,10 @@ module OpenAI # The function definition. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:) - end + def self.new(arguments:, name:); end sig { override.returns({arguments: String, name: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 47341501..7170660e 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -188,11 +188,11 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) end @@ -267,12 +267,12 @@ module OpenAI ) ), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::AnyHash)), usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::AnyHash)), @@ -310,9 +310,7 @@ module OpenAI temperature: nil, top_p: nil, object: :"thread.run" - ) - end - + ); end sig do override .returns( @@ -352,11 +350,11 @@ module OpenAI ) ), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy), usage: T.nilable(OpenAI::Models::Beta::Threads::Run::Usage), @@ -365,8 +363,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class IncompleteDetails < OpenAI::Internal::Type::BaseModel # The reason why the run is incomplete. This will point to which specific token @@ -383,12 +380,10 @@ module OpenAI params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end - def self.new(reason: nil) - end + def self.new(reason: nil); end sig { override.returns({reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol}) } - def to_hash - end + def to_hash; end # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -406,8 +401,7 @@ module OpenAI T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -425,15 +419,13 @@ module OpenAI params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:) - end + def self.new(code:, message:); end sig do override .returns({code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String}) end - def to_hash - end + def to_hash; end # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. module Code @@ -450,8 +442,7 @@ module OpenAI T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -481,8 +472,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(submit_tool_outputs:, type: :submit_tool_outputs) - end + def self.new(submit_tool_outputs:, type: :submit_tool_outputs); end sig do override @@ -490,8 +480,7 @@ module OpenAI {submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, type: Symbol} ) end - def to_hash - end + def to_hash; end class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # A list of the relevant tool calls. @@ -505,12 +494,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(tool_calls:) - end + def self.new(tool_calls:); end sig { override.returns({tool_calls: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]}) } - def to_hash - end + def to_hash; end end end @@ -536,8 +523,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, last_messages: nil) - end + def self.new(type:, last_messages: nil); end sig do override @@ -548,8 +534,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -568,8 +553,7 @@ module OpenAI T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -595,14 +579,12 @@ module OpenAI total_tokens: Integer ).returns(T.attached_class) end - def self.new(completion_tokens:, prompt_tokens:, total_tokens:) - end + def self.new(completion_tokens:, prompt_tokens:, total_tokens:); end sig do override.returns({completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi index d82ac9df..829a2ec4 100644 --- a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi @@ -21,12 +21,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, request_options: {}) - end + def self.new(thread_id:, request_options: {}); end sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index a34ebb8a..58c097df 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -158,11 +158,11 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ) ) @@ -226,12 +226,12 @@ module OpenAI ), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -261,9 +261,7 @@ module OpenAI top_p: nil, truncation_strategy: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -296,11 +294,11 @@ module OpenAI ), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -309,8 +307,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class AdditionalMessage < OpenAI::Internal::Type::BaseModel # The text contents of the message. @@ -319,11 +316,11 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ) ) @@ -361,29 +358,28 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, + OpenAI::Internal::AnyHash + ) ] ), metadata: T.nilable(T::Hash[Symbol, String]) ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil) - end + def self.new(content:, role:, attachments: nil, metadata: nil); end sig do override @@ -392,11 +388,11 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, @@ -405,8 +401,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The text contents of the message. module Content @@ -418,17 +413,16 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ] ) end - def self.variants - end + def self.variants; end MessageContentPartParamArray = T.let( @@ -466,8 +460,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol]) end - def self.values - end + def self.values; end end class Attachment < OpenAI::Internal::Type::BaseModel @@ -483,10 +476,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) ] ) ) @@ -496,11 +489,11 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) ] ) .void @@ -511,17 +504,16 @@ module OpenAI params( file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) ] ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil) - end + def self.new(file_id: nil, tools: nil); end sig do override @@ -529,16 +521,15 @@ module OpenAI { file_id: String, tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) ] } ) end - def to_hash - end + def to_hash; end module Tool extend OpenAI::Internal::Type::Union @@ -549,12 +540,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search) - end + def self.new(type: :file_search); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -563,8 +552,7 @@ module OpenAI [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] ) end - def self.variants - end + def self.variants; end end end end @@ -577,8 +565,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def self.variants - end + def self.variants; end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -603,8 +590,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, last_messages: nil) - end + def self.new(type:, last_messages: nil); end sig do override @@ -615,8 +601,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -648,8 +633,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 17f72cc4..e266f66c 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -54,8 +54,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}) - end + def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}); end sig do override @@ -69,8 +68,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -85,8 +83,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi index 60f2a769..aa6cb4b9 100644 --- a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi @@ -21,12 +21,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, request_options: {}) - end + def self.new(thread_id:, request_options: {}); end sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index e9d84d19..5de3efc2 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -24,8 +24,7 @@ module OpenAI EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 1c8321a5..2ae4e56d 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -23,8 +23,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, tool_outputs:, request_options: {}) - end + def self.new(thread_id:, tool_outputs:, request_options: {}); end sig do override @@ -36,8 +35,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class ToolOutput < OpenAI::Internal::Type::BaseModel # The output of the tool call to be submitted to continue the run. @@ -56,12 +54,10 @@ module OpenAI attr_writer :tool_call_id sig { params(output: String, tool_call_id: String).returns(T.attached_class) } - def self.new(output: nil, tool_call_id: nil) - end + def self.new(output: nil, tool_call_id: nil); end sig { override.returns({output: String, tool_call_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index a1a1a651..28bb34de 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -28,8 +28,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, metadata: nil, request_options: {}) - end + def self.new(thread_id:, metadata: nil, request_options: {}); end sig do override @@ -41,8 +40,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi index 20f4803a..a751e294 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -23,12 +23,10 @@ module OpenAI # Text output from the Code Interpreter tool call as part of a run step. sig { params(index: Integer, logs: String, type: Symbol).returns(T.attached_class) } - def self.new(index:, logs: nil, type: :logs) - end + def self.new(index:, logs: nil, type: :logs); end sig { override.returns({index: Integer, type: Symbol, logs: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index bff183df..72557474 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -33,8 +33,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, image: nil, type: :image) - end + def self.new(index:, image: nil, type: :image); end sig do override @@ -42,8 +41,7 @@ module OpenAI {index: Integer, type: Symbol, image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} ) end - def to_hash - end + def to_hash; end class Image < OpenAI::Internal::Type::BaseModel # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -55,12 +53,10 @@ module OpenAI attr_writer :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id: nil) - end + def self.new(file_id: nil); end sig { override.returns({file_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index dcf372d2..82827731 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -42,8 +42,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, code_interpreter:, type: :code_interpreter) - end + def self.new(id:, code_interpreter:, type: :code_interpreter); end sig do override @@ -55,8 +54,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # The input to the Code Interpreter tool call. @@ -69,10 +67,10 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) ] ) end @@ -83,17 +81,16 @@ module OpenAI params( input: String, outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) ] ) .returns(T.attached_class) end - def self.new(input:, outputs:) - end + def self.new(input:, outputs:); end sig do override @@ -101,16 +98,15 @@ module OpenAI { input: String, outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) ] } ) end - def to_hash - end + def to_hash; end # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -127,12 +123,10 @@ module OpenAI # Text output from the Code Interpreter tool call as part of a run step. sig { params(logs: String, type: Symbol).returns(T.attached_class) } - def self.new(logs:, type: :logs) - end + def self.new(logs:, type: :logs); end sig { override.returns({logs: String, type: Symbol}) } - def to_hash - end + def to_hash; end end class Image < OpenAI::Internal::Type::BaseModel @@ -168,8 +162,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image:, type: :image) - end + def self.new(image:, type: :image); end sig do override @@ -180,8 +173,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Image < OpenAI::Internal::Type::BaseModel # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -190,12 +182,10 @@ module OpenAI attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id:) - end + def self.new(file_id:); end sig { override.returns({file_id: String}) } - def to_hash - end + def to_hash; end end end @@ -205,8 +195,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index a3b42808..0864612a 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -50,8 +50,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, id: nil, code_interpreter: nil, type: :code_interpreter) - end + def self.new(index:, id: nil, code_interpreter: nil, type: :code_interpreter); end sig do override @@ -64,8 +63,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel # The input to the Code Interpreter tool call. @@ -82,10 +80,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + ) ] ) ) @@ -95,11 +93,11 @@ module OpenAI sig do params( outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + ) ] ) .void @@ -111,17 +109,16 @@ module OpenAI params( input: String, outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + ) ] ) .returns(T.attached_class) end - def self.new(input: nil, outputs: nil) - end + def self.new(input: nil, outputs: nil); end sig do override @@ -129,16 +126,15 @@ module OpenAI { input: String, outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + ) ] } ) end - def to_hash - end + def to_hash; end # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -150,8 +146,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index bdd404fb..871e455f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, file_search:, type: :file_search) - end + def self.new(id:, file_search:, type: :file_search); end sig do override @@ -44,8 +43,7 @@ module OpenAI {id: String, file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, type: Symbol} ) end - def to_hash - end + def to_hash; end class FileSearch < OpenAI::Internal::Type::BaseModel # The ranking options for the file search. @@ -70,10 +68,10 @@ module OpenAI sig do params( results: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -88,16 +86,15 @@ module OpenAI OpenAI::Internal::AnyHash ), results: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, + OpenAI::Internal::AnyHash + ) ] ) .returns(T.attached_class) end - def self.new(ranking_options: nil, results: nil) - end + def self.new(ranking_options: nil, results: nil); end sig do override @@ -108,8 +105,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. If not specified will use the `auto` @@ -134,8 +130,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(ranker:, score_threshold:) - end + def self.new(ranker:, score_threshold:); end sig do override @@ -146,8 +141,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -182,8 +176,7 @@ module OpenAI T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol] ) end - def self.values - end + def self.values; end end end @@ -213,10 +206,10 @@ module OpenAI sig do params( content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -230,16 +223,15 @@ module OpenAI file_name: String, score: Float, content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Internal::AnyHash + ) ] ) .returns(T.attached_class) end - def self.new(file_id:, file_name:, score:, content: nil) - end + def self.new(file_id:, file_name:, score:, content: nil); end sig do override @@ -252,8 +244,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Content < OpenAI::Internal::Type::BaseModel # The text content of the file. @@ -288,8 +279,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text: nil, type: nil) - end + def self.new(text: nil, type: nil); end sig do override @@ -300,8 +290,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The type of the content. module Type @@ -330,8 +319,7 @@ module OpenAI T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol] ) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 5474e165..2714a161 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -34,12 +34,10 @@ module OpenAI type: Symbol ).returns(T.attached_class) end - def self.new(file_search:, index:, id: nil, type: :file_search) - end + def self.new(file_search:, index:, id: nil, type: :file_search); end sig { override.returns({file_search: T.anything, index: Integer, type: Symbol, id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 2d1aa5a6..2454344b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, function:, type: :function) - end + def self.new(id:, function:, type: :function); end sig do override @@ -44,8 +43,7 @@ module OpenAI {id: String, function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, type: Symbol} ) end - def to_hash - end + def to_hash; end class Function < OpenAI::Internal::Type::BaseModel # The arguments passed to the function. @@ -66,12 +64,10 @@ module OpenAI sig do params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) end - def self.new(arguments:, name:, output:) - end + def self.new(arguments:, name:, output:); end sig { override.returns({arguments: String, name: String, output: T.nilable(String)}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index 22b29632..f275cddb 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -43,8 +43,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, id: nil, function: nil, type: :function) - end + def self.new(index:, id: nil, function: nil, type: :function); end sig do override @@ -57,8 +56,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Function < OpenAI::Internal::Type::BaseModel # The arguments passed to the function. @@ -85,12 +83,10 @@ module OpenAI sig do params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) end - def self.new(arguments: nil, name: nil, output: nil) - end + def self.new(arguments: nil, name: nil, output: nil); end sig { override.returns({arguments: String, name: String, output: T.nilable(String)}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index f279cb12..583a179d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(message_creation:, type: :message_creation) - end + def self.new(message_creation:, type: :message_creation); end sig do override @@ -47,8 +46,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class MessageCreation < OpenAI::Internal::Type::BaseModel # The ID of the message that was created by this run step. @@ -56,12 +54,10 @@ module OpenAI attr_accessor :message_id sig { params(message_id: String).returns(T.attached_class) } - def self.new(message_id:) - end + def self.new(message_id:); end sig { override.returns({message_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index af9b15a8..a5ddcb52 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -149,9 +149,7 @@ module OpenAI type:, usage:, object: :"thread.run.step" - ) - end - + ); end sig do override .returns( @@ -178,8 +176,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class LastError < OpenAI::Internal::Type::BaseModel # One of `server_error` or `rate_limit_exceeded`. @@ -196,8 +193,7 @@ module OpenAI params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:) - end + def self.new(code:, message:); end sig do override @@ -205,8 +201,7 @@ module OpenAI {code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String} ) end - def to_hash - end + def to_hash; end # One of `server_error` or `rate_limit_exceeded`. module Code @@ -223,8 +218,7 @@ module OpenAI T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -244,8 +238,7 @@ module OpenAI EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end # The details of the run step. @@ -258,8 +251,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] ) end - def self.variants - end + def self.variants; end end # The type of run step, which can be either `message_creation` or `tool_calls`. @@ -275,8 +267,7 @@ module OpenAI TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end class Usage < OpenAI::Internal::Type::BaseModel @@ -301,14 +292,12 @@ module OpenAI total_tokens: Integer ).returns(T.attached_class) end - def self.new(completion_tokens:, prompt_tokens:, total_tokens:) - end + def self.new(completion_tokens:, prompt_tokens:, total_tokens:); end sig do override.returns({completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index a599ca89..561b30d0 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -42,8 +42,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(step_details: nil) - end + def self.new(step_details: nil); end sig do override @@ -56,8 +55,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The details of the run step. module StepDetails @@ -69,8 +67,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index fbaa33df..4fb76535 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -31,12 +31,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, delta:, object: :"thread.run.step.delta") - end + def self.new(id:, delta:, object: :"thread.run.step.delta"); end sig { override.returns({id: String, delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, object: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index d4dd14e5..2b151753 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(message_creation: nil, type: :message_creation) - end + def self.new(message_creation: nil, type: :message_creation); end sig do override @@ -47,8 +46,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class MessageCreation < OpenAI::Internal::Type::BaseModel # The ID of the message that was created by this run step. @@ -59,12 +57,10 @@ module OpenAI attr_writer :message_id sig { params(message_id: String).returns(T.attached_class) } - def self.new(message_id: nil) - end + def self.new(message_id: nil); end sig { override.returns({message_id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index bee1919e..2261f9ce 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -19,8 +19,7 @@ module OpenAI ) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol]) } - def self.values - end + def self.values; end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index e48d41bb..d9609bca 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -98,8 +98,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -114,8 +113,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index bd43d059..827fe79e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -37,8 +37,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, run_id:, include: nil, request_options: {}) - end + def self.new(thread_id:, run_id:, include: nil, request_options: {}); end sig do override @@ -51,8 +50,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi index 8bdd948f..bd900d9c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi index a1e18b64..336a93e9 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index d9051b8e..9b96f0e3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -17,11 +17,11 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + ) ] ) ) @@ -31,12 +31,12 @@ module OpenAI sig do params( tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + ) ] ) .void @@ -47,19 +47,18 @@ module OpenAI sig do params( tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + ) ], type: Symbol ) .returns(T.attached_class) end - def self.new(tool_calls: nil, type: :tool_calls) - end + def self.new(tool_calls: nil, type: :tool_calls); end sig do override @@ -67,17 +66,16 @@ module OpenAI { type: Symbol, tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + ) ] } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index b85467c4..7eb65800 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -12,11 +12,11 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + ) ] ) end @@ -30,37 +30,35 @@ module OpenAI sig do params( tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + ) ], type: Symbol ) .returns(T.attached_class) end - def self.new(tool_calls:, type: :tool_calls) - end + def self.new(tool_calls:, type: :tool_calls); end sig do override .returns( { tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) + T.any( + OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + ) ], type: Symbol } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 933e4630..0c5b65ab 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -8,10 +8,10 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Models::Beta::Threads::FilePathAnnotation + ) ] ) end @@ -24,35 +24,33 @@ module OpenAI sig do params( annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::FilePathAnnotation + ) ], value: String ) .returns(T.attached_class) end - def self.new(annotations:, value:) - end + def self.new(annotations:, value:); end sig do override .returns( { annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationAnnotation, + OpenAI::Models::Beta::Threads::FilePathAnnotation + ) ], value: String } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 3838ff04..60e48d2f 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -20,12 +20,10 @@ module OpenAI params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(text:, type: :text) - end + def self.new(text:, type: :text); end sig { override.returns({text: OpenAI::Models::Beta::Threads::Text, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi index 94f3431d..ecf224cb 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi @@ -15,12 +15,10 @@ module OpenAI # The text content that is part of a message. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :text) - end + def self.new(text:, type: :text); end sig { override.returns({text: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index 8fda3e45..7776fa38 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -9,10 +9,10 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + ) ] ) ) @@ -22,11 +22,11 @@ module OpenAI sig do params( annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + ) ] ) .void @@ -43,35 +43,33 @@ module OpenAI sig do params( annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + ) ], value: String ) .returns(T.attached_class) end - def self.new(annotations: nil, value: nil) - end + def self.new(annotations: nil, value: nil); end sig do override .returns( { annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) + T.any( + OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + ) ], value: String } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index d3d54de5..d22fc8db 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -28,12 +28,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, text: nil, type: :text) - end + def self.new(index:, text: nil, type: :text); end sig { override.returns({index: Integer, type: Symbol, text: OpenAI::Models::Beta::Threads::TextDelta}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 829cff8c..c871533c 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -70,9 +70,7 @@ module OpenAI system_fingerprint: nil, usage: nil, object: :"chat.completion" - ) - end - + ); end sig do override .returns( @@ -88,8 +86,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Choice < OpenAI::Internal::Type::BaseModel # The reason the model stopped generating tokens. This will be `stop` if the model @@ -133,8 +130,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(finish_reason:, index:, logprobs:, message:) - end + def self.new(finish_reason:, index:, logprobs:, message:); end sig do override @@ -147,8 +143,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -172,8 +167,7 @@ module OpenAI T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol]) } - def self.values - end + def self.values; end end class Logprobs < OpenAI::Internal::Type::BaseModel @@ -193,8 +187,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, refusal:) - end + def self.new(content:, refusal:); end sig do override @@ -205,8 +198,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end @@ -222,8 +214,7 @@ module OpenAI DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index a6f8cec6..8da1d510 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -31,10 +31,10 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) ] ) ) @@ -91,11 +91,11 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) ] ) ), @@ -130,10 +130,10 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) ] ) ), @@ -144,8 +144,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Audio < OpenAI::Internal::Type::BaseModel # Unique identifier for a previous audio response from the model. @@ -155,12 +154,10 @@ module OpenAI # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). sig { params(id: String).returns(T.attached_class) } - def self.new(id:) - end + def self.new(id:); end sig { override.returns({id: String}) } - def to_hash - end + def to_hash; end end # The contents of the assistant message. Required unless `tool_calls` or @@ -179,8 +176,7 @@ module OpenAI [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] ) end - def self.variants - end + def self.variants; end end sig do @@ -189,16 +185,15 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartRefusal + ) ] ] ) end - def self.variants - end + def self.variants; end ArrayOfContentPartArray = T.let( @@ -222,12 +217,10 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:) - end + def self.new(arguments:, name:); end sig { override.returns({arguments: String, name: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index fa10bf84..66867567 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -28,12 +28,10 @@ module OpenAI sig do params(id: String, data: String, expires_at: Integer, transcript: String).returns(T.attached_class) end - def self.new(id:, data:, expires_at:, transcript:) - end + def self.new(id:, data:, expires_at:, transcript:); end sig { override.returns({id: String, data: String, expires_at: Integer, transcript: String}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 29b450f1..e1aa3707 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -24,8 +24,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(format_:, voice:) - end + def self.new(format_:, voice:); end sig do override @@ -36,8 +35,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. @@ -55,8 +53,7 @@ module OpenAI PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol]) } - def self.values - end + def self.values; end end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -65,8 +62,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } OrSymbol = diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index d93d792a..f8eb2225 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -78,9 +78,7 @@ module OpenAI system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk" - ) - end - + ); end sig do override .returns( @@ -96,8 +94,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Choice < OpenAI::Internal::Type::BaseModel # A chat completion delta generated by streamed model responses. @@ -144,8 +141,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(delta:, finish_reason:, index:, logprobs: nil) - end + def self.new(delta:, finish_reason:, index:, logprobs: nil); end sig do override @@ -158,8 +154,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Delta < OpenAI::Internal::Type::BaseModel # The contents of the chunk message. @@ -212,8 +207,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) - end + def self.new(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil); end sig do override @@ -227,8 +221,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON @@ -251,12 +244,10 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments: nil, name: nil) - end + def self.new(arguments: nil, name: nil); end sig { override.returns({arguments: String, name: String}) } - def to_hash - end + def to_hash; end end # The role of the author of this message. @@ -277,8 +268,7 @@ module OpenAI TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end class ToolCall < OpenAI::Internal::Type::BaseModel @@ -325,8 +315,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, id: nil, function: nil, type: nil) - end + def self.new(index:, id: nil, function: nil, type: nil); end sig do override @@ -339,8 +328,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Function < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON @@ -361,12 +349,10 @@ module OpenAI attr_writer :name sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments: nil, name: nil) - end + def self.new(arguments: nil, name: nil); end sig { override.returns({arguments: String, name: String}) } - def to_hash - end + def to_hash; end end # The type of the tool. Currently, only `function` is supported. @@ -391,8 +377,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol]) end - def self.values - end + def self.values; end end end end @@ -421,8 +406,7 @@ module OpenAI T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol]) } - def self.values - end + def self.values; end end class Logprobs < OpenAI::Internal::Type::BaseModel @@ -442,8 +426,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, refusal:) - end + def self.new(content:, refusal:); end sig do override @@ -454,8 +437,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end @@ -471,8 +453,7 @@ module OpenAI DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index f53e1e23..3f40e799 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -33,12 +33,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file:, type: :file) - end + def self.new(file:, type: :file); end sig { override.returns({file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, type: Symbol}) } - def to_hash - end + def to_hash; end class File < OpenAI::Internal::Type::BaseModel # The base64 encoded file data, used when passing the file to the model as a @@ -64,12 +62,10 @@ module OpenAI attr_writer :filename sig { params(file_data: String, file_id: String, filename: String).returns(T.attached_class) } - def self.new(file_data: nil, file_id: nil, filename: nil) - end + def self.new(file_data: nil, file_id: nil, filename: nil); end sig { override.returns({file_data: String, file_id: String, filename: String}) } - def to_hash - end + def to_hash; end end end @@ -79,8 +75,7 @@ module OpenAI [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] ) end - def self.variants - end + def self.variants; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 3bb8586b..8f1d2dba 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -27,14 +27,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image_url:, type: :image_url) - end + def self.new(image_url:, type: :image_url); end sig do override.returns({image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, type: Symbol}) end - def to_hash - end + def to_hash; end class ImageURL < OpenAI::Internal::Type::BaseModel # Either a URL of the image or the base64 encoded image data. @@ -56,8 +54,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(url:, detail: nil) - end + def self.new(url:, detail: nil); end sig do override @@ -65,8 +62,7 @@ module OpenAI {url: String, detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol} ) end - def to_hash - end + def to_hash; end # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). @@ -92,8 +88,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 19fda6e5..d365d7c9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -27,8 +27,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input_audio:, type: :input_audio) - end + def self.new(input_audio:, type: :input_audio); end sig do override @@ -36,8 +35,7 @@ module OpenAI {input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, type: Symbol} ) end - def to_hash - end + def to_hash; end class InputAudio < OpenAI::Internal::Type::BaseModel # Base64 encoded audio data. @@ -55,8 +53,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, format_:) - end + def self.new(data:, format_:); end sig do override @@ -67,8 +64,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The format of the encoded audio data. Currently supports "wav" and "mp3". module Format @@ -96,8 +92,7 @@ module OpenAI T::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol] ) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi index 7431214c..6aedee59 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -13,12 +13,10 @@ module OpenAI attr_accessor :type sig { params(refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(refusal:, type: :refusal) - end + def self.new(refusal:, type: :refusal); end sig { override.returns({refusal: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index d9189ad1..e2fea7d5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -15,12 +15,10 @@ module OpenAI # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :text) - end + def self.new(text:, type: :text); end sig { override.returns({text: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi index 617a2ea5..0c496584 100644 --- a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi @@ -17,12 +17,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"chat.completion.deleted") - end + def self.new(id:, deleted:, object: :"chat.completion.deleted"); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index a026c7f1..d5eeea91 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -34,8 +34,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, name: nil, role: :developer) - end + def self.new(content:, name: nil, role: :developer); end sig do override @@ -47,16 +46,14 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The contents of the developer message. module Content extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants - end + def self.variants; end ChatCompletionContentPartTextArray = T.let( diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index 45e13985..d63a2e1c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -11,12 +11,10 @@ module OpenAI # Specifying a particular function via `{"name": "my_function"}` forces the model # to call that function. sig { params(name: String).returns(T.attached_class) } - def self.new(name:) - end + def self.new(name:); end sig { override.returns({name: String}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi index 41e66779..0b1e82e1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi @@ -17,12 +17,10 @@ module OpenAI attr_accessor :role sig { params(content: T.nilable(String), name: String, role: Symbol).returns(T.attached_class) } - def self.new(content:, name:, role: :function) - end + def self.new(content:, name:, role: :function); end sig { override.returns({content: T.nilable(String), name: String, role: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 922e88f6..6c785895 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -101,8 +101,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Annotation < OpenAI::Internal::Type::BaseModel # The type of the URL citation. Always `url_citation`. @@ -129,8 +128,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(url_citation:, type: :url_citation) - end + def self.new(url_citation:, type: :url_citation); end sig do override @@ -138,8 +136,7 @@ module OpenAI {type: Symbol, url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation} ) end - def to_hash - end + def to_hash; end class URLCitation < OpenAI::Internal::Type::BaseModel # The index of the last character of the URL citation in the message. @@ -167,12 +164,10 @@ module OpenAI url: String ).returns(T.attached_class) end - def self.new(end_index:, start_index:, title:, url:) - end + def self.new(end_index:, start_index:, title:, url:); end sig { override.returns({end_index: Integer, start_index: Integer, title: String, url: String}) } - def to_hash - end + def to_hash; end end end @@ -191,12 +186,10 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:) - end + def self.new(arguments:, name:); end sig { override.returns({arguments: String, name: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 1a4a2cc0..6cf5ae54 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] ) end - def self.variants - end + def self.variants; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index 4104c3ca..b4bfcbdd 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -32,8 +32,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, function:, type: :function) - end + def self.new(id:, function:, type: :function); end sig do override @@ -41,8 +40,7 @@ module OpenAI {id: String, function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, type: Symbol} ) end - def to_hash - end + def to_hash; end class Function < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON @@ -58,12 +56,10 @@ module OpenAI # The function that the model called. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:) - end + def self.new(arguments:, name:); end sig { override.returns({arguments: String, name: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 5e04a7e7..34114b90 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -14,8 +14,7 @@ module OpenAI AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol]) } - def self.values - end + def self.values; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 566f6c99..be71706f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -28,12 +28,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(function:, type: :function) - end + def self.new(function:, type: :function); end sig { override.returns({function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, type: Symbol}) } - def to_hash - end + def to_hash; end class Function < OpenAI::Internal::Type::BaseModel # The name of the function to call. @@ -41,12 +39,10 @@ module OpenAI attr_accessor :name sig { params(name: String).returns(T.attached_class) } - def self.new(name:) - end + def self.new(name:); end sig { override.returns({name: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index 674775a6..c6f72148 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -27,8 +27,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :content) - end + def self.new(content:, type: :content); end sig do override @@ -36,8 +35,7 @@ module OpenAI {content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), type: Symbol} ) end - def to_hash - end + def to_hash; end # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be @@ -46,8 +44,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants - end + def self.variants; end ChatCompletionContentPartTextArray = T.let( diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index e75ae72c..aa6fa947 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -18,8 +18,7 @@ module OpenAI FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol]) } - def self.values - end + def self.values; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi index 78838f7f..67a32515 100644 --- a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi @@ -10,12 +10,10 @@ module OpenAI # A chat completion message generated by the model. sig { params(id: String).returns(T.attached_class) } - def self.new(id:) - end + def self.new(id:); end sig { override.returns({id: String}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index 26683cfa..51d36cd9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -19,12 +19,10 @@ module OpenAI # Options for streaming response. Only set this when you set `stream: true`. sig { params(include_usage: T::Boolean).returns(T.attached_class) } - def self.new(include_usage: nil) - end + def self.new(include_usage: nil); end sig { override.returns({include_usage: T::Boolean}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index bb766e74..44c63247 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -34,8 +34,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, name: nil, role: :system) - end + def self.new(content:, name: nil, role: :system); end sig do override @@ -47,16 +46,14 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The contents of the system message. module Content extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants - end + def self.variants; end ChatCompletionContentPartTextArray = T.let( diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 17cc3821..4763d686 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -36,8 +36,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(token:, bytes:, logprob:, top_logprobs:) - end + def self.new(token:, bytes:, logprob:, top_logprobs:); end sig do override @@ -50,8 +49,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class TopLogprob < OpenAI::Internal::Type::BaseModel # The token. @@ -78,12 +76,10 @@ module OpenAI logprob: Float ).returns(T.attached_class) end - def self.new(token:, bytes:, logprob:) - end + def self.new(token:, bytes:, logprob:); end sig { override.returns({token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index e35bbd32..e6559aed 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -18,12 +18,10 @@ module OpenAI params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(function:, type: :function) - end + def self.new(function:, type: :function); end sig { override.returns({function: OpenAI::Models::FunctionDefinition, type: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index b739c519..a840bdf9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -30,8 +30,7 @@ module OpenAI REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol]) } - def self.values - end + def self.values; end end sig do @@ -40,8 +39,7 @@ module OpenAI [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] ) end - def self.variants - end + def self.variants; end end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 3c34a0a4..3e792e43 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -27,8 +27,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, tool_call_id:, role: :tool) - end + def self.new(content:, tool_call_id:, role: :tool); end sig do override @@ -40,16 +39,14 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The contents of the tool message. module Content extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants - end + def self.variants; end ChatCompletionContentPartTextArray = T.let( diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index f4a11939..312f7dfb 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -10,12 +10,12 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) ] ) ) @@ -41,13 +41,13 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) ] ), name: String, @@ -55,8 +55,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, name: nil, role: :user) - end + def self.new(content:, name: nil, role: :user); end sig do override @@ -65,12 +64,12 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) ] ), role: Symbol, @@ -78,8 +77,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The contents of the user message. module Content @@ -91,18 +89,17 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) + T.any( + OpenAI::Models::Chat::ChatCompletionContentPartText, + OpenAI::Models::Chat::ChatCompletionContentPartImage, + OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Models::Chat::ChatCompletionContentPart::File + ) ] ] ) end - def self.variants - end + def self.variants; end ChatCompletionContentPartArray = T.let( diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index cdbe6807..0afa33d1 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -16,14 +16,14 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) + T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) ] ) end @@ -378,15 +378,15 @@ module OpenAI sig do params( messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) + T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Internal::AnyHash, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), @@ -466,22 +466,20 @@ module OpenAI user: nil, web_search_options: nil, request_options: {} - ) - end - + ); end sig do override .returns( { messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) + T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), @@ -526,8 +524,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -538,8 +535,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # Deprecated in favor of `tool_choice`. @@ -587,8 +583,7 @@ module OpenAI T::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol] ) end - def self.values - end + def self.values; end end sig do @@ -597,8 +592,7 @@ module OpenAI [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] ) end - def self.variants - end + def self.variants; end end class Function < OpenAI::Internal::Type::BaseModel @@ -632,14 +626,12 @@ module OpenAI params(name: String, description: String, parameters: T::Hash[Symbol, T.anything]) .returns(T.attached_class) end - def self.new(name:, description: nil, parameters: nil) - end + def self.new(name:, description: nil, parameters: nil); end sig do override.returns({name: String, description: String, parameters: T::Hash[Symbol, T.anything]}) end - def to_hash - end + def to_hash; end end module Modality @@ -653,8 +645,7 @@ module OpenAI AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol]) } - def self.values - end + def self.values; end end # An object specifying the format that the model must output. @@ -676,8 +667,7 @@ module OpenAI [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] ) end - def self.variants - end + def self.variants; end end # Specifies the latency tier to use for processing the request. This parameter is @@ -705,8 +695,7 @@ module OpenAI DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol]) } - def self.values - end + def self.values; end end # Up to 4 sequences where the API will stop generating further tokens. The @@ -715,8 +704,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String]]) } - def self.variants - end + def self.variants; end StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) end @@ -771,8 +759,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(search_context_size: nil, user_location: nil) - end + def self.new(search_context_size: nil, user_location: nil); end sig do override @@ -783,8 +770,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -824,8 +810,7 @@ module OpenAI T::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol] ) end - def self.values - end + def self.values; end end class UserLocation < OpenAI::Internal::Type::BaseModel @@ -859,8 +844,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(approximate:, type: :approximate) - end + def self.new(approximate:, type: :approximate); end sig do override @@ -871,8 +855,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Approximate < OpenAI::Internal::Type::BaseModel # Free text input for the city of the user, e.g. `San Francisco`. @@ -914,12 +897,10 @@ module OpenAI timezone: String ).returns(T.attached_class) end - def self.new(city: nil, country: nil, region: nil, timezone: nil) - end + def self.new(city: nil, country: nil, region: nil, timezone: nil); end sig { override.returns({city: String, country: String, region: String, timezone: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/completion_delete_params.rbi b/rbi/lib/openai/models/chat/completion_delete_params.rbi index e92e61fa..42ccf3bf 100644 --- a/rbi/lib/openai/models/chat/completion_delete_params.rbi +++ b/rbi/lib/openai/models/chat/completion_delete_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index 1759bb11..c90d051b 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -53,8 +53,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}); end sig do override @@ -69,8 +68,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. @@ -85,8 +83,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi index 921f208a..3cb4b992 100644 --- a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/lib/openai/models/chat/completion_retrieve_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index 7a044c5b..49a8c260 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -23,8 +23,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(metadata:, request_options: {}) - end + def self.new(metadata:, request_options: {}); end sig do override.returns( @@ -34,8 +33,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 4f3218d7..85aa9aba 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -39,8 +39,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, order: nil, request_options: {}); end sig do override @@ -53,8 +52,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. @@ -70,8 +68,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 17dc0d14..f7512375 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -59,8 +59,7 @@ module OpenAI GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ChatModel::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 1b06ab72..2129e61f 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -33,8 +33,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(key:, type:, value:) - end + def self.new(key:, type:, value:); end sig do override @@ -42,8 +41,7 @@ module OpenAI {key: String, type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, value: T.any(String, Float, T::Boolean)} ) end - def to_hash - end + def to_hash; end # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -67,8 +65,7 @@ module OpenAI LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ComparisonFilter::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end # The value to compare against the attribute key; supports string, number, or @@ -77,8 +74,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index 98496acd..d24d8268 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -79,8 +79,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 6cd7e70d..9833bea3 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -34,8 +34,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(finish_reason:, index:, logprobs:, text:) - end + def self.new(finish_reason:, index:, logprobs:, text:); end sig do override @@ -48,8 +47,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -67,8 +65,7 @@ module OpenAI CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol]) } - def self.values - end + def self.values; end end class Logprobs < OpenAI::Internal::Type::BaseModel @@ -105,8 +102,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil) - end + def self.new(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil); end sig do override @@ -119,8 +115,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 26802234..01701e36 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -159,14 +159,7 @@ module OpenAI sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -205,9 +198,7 @@ module OpenAI top_p: nil, user: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -240,8 +231,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -252,8 +242,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } OrSymbol = @@ -275,8 +264,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def self.variants - end + def self.variants; end StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) @@ -295,8 +283,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String]]) } - def self.variants - end + def self.variants; end StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index fb3951e4..e0fbddc4 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -56,9 +56,7 @@ module OpenAI total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil - ) - end - + ); end sig do override .returns( @@ -71,8 +69,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # When using Predicted Outputs, the number of tokens in the prediction that @@ -136,8 +133,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end class PromptTokensDetails < OpenAI::Internal::Type::BaseModel @@ -157,12 +153,10 @@ module OpenAI # Breakdown of tokens used in the prompt. sig { params(audio_tokens: Integer, cached_tokens: Integer).returns(T.attached_class) } - def self.new(audio_tokens: nil, cached_tokens: nil) - end + def self.new(audio_tokens: nil, cached_tokens: nil); end sig { override.returns({audio_tokens: Integer, cached_tokens: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 056c00fe..214f5113 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -20,8 +20,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(filters:, type:) - end + def self.new(filters:, type:); end sig do override @@ -32,8 +31,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. @@ -41,8 +39,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } - def self.variants - end + def self.variants; end end # Type of operation: `and` or `or`. @@ -56,8 +53,7 @@ module OpenAI OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::CompoundFilter::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index e76ac6c3..878d96ba 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -31,8 +31,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, model:, usage:, object: :list) - end + def self.new(data:, model:, usage:, object: :list); end sig do override @@ -45,8 +44,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Usage < OpenAI::Internal::Type::BaseModel # The number of tokens used by the prompt. @@ -59,12 +57,10 @@ module OpenAI # The usage information for the request. sig { params(prompt_tokens: Integer, total_tokens: Integer).returns(T.attached_class) } - def self.new(prompt_tokens:, total_tokens:) - end + def self.new(prompt_tokens:, total_tokens:); end sig { override.returns({prompt_tokens: Integer, total_tokens: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/lib/openai/models/embedding.rbi index 1f6e9020..0567898b 100644 --- a/rbi/lib/openai/models/embedding.rbi +++ b/rbi/lib/openai/models/embedding.rbi @@ -19,12 +19,10 @@ module OpenAI # Represents an embedding vector returned by embedding endpoint. sig { params(embedding: T::Array[Float], index: Integer, object: Symbol).returns(T.attached_class) } - def self.new(embedding:, index:, object: :embedding) - end + def self.new(embedding:, index:, object: :embedding); end sig { override.returns({embedding: T::Array[Float], index: Integer, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 5d8acf9b..6e763395 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -61,8 +61,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) - end + def self.new(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}); end sig do override @@ -77,8 +76,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. @@ -92,8 +90,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def self.variants - end + def self.variants; end StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) @@ -115,8 +112,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # The format to return the embeddings in. Can be either `float` or @@ -132,8 +128,7 @@ module OpenAI BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index fb2fd9ed..962b8546 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -13,8 +13,7 @@ module OpenAI TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::EmbeddingModel::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/error_object.rbi b/rbi/lib/openai/models/error_object.rbi index dfea9b56..4dc767bd 100644 --- a/rbi/lib/openai/models/error_object.rbi +++ b/rbi/lib/openai/models/error_object.rbi @@ -19,14 +19,12 @@ module OpenAI params(code: T.nilable(String), message: String, param: T.nilable(String), type: String) .returns(T.attached_class) end - def self.new(code:, message:, param:, type:) - end + def self.new(code:, message:, param:, type:); end sig do override.returns({code: T.nilable(String), message: String, param: T.nilable(String), type: String}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/lib/openai/models/file_chunking_strategy.rbi index 8e896180..b05dd510 100644 --- a/rbi/lib/openai/models/file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy.rbi @@ -12,8 +12,7 @@ module OpenAI [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 9f69a517..346a7b5a 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -13,8 +13,7 @@ module OpenAI [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/file_content_params.rbi b/rbi/lib/openai/models/file_content_params.rbi index f753c7d2..97fa8439 100644 --- a/rbi/lib/openai/models/file_content_params.rbi +++ b/rbi/lib/openai/models/file_content_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index c2a7e47f..fb50d427 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -25,8 +25,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file:, purpose:, request_options: {}) - end + def self.new(file:, purpose:, request_options: {}); end sig do override @@ -38,8 +37,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/file_delete_params.rbi b/rbi/lib/openai/models/file_delete_params.rbi index af5aa65f..2d359dc0 100644 --- a/rbi/lib/openai/models/file_delete_params.rbi +++ b/rbi/lib/openai/models/file_delete_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/file_deleted.rbi b/rbi/lib/openai/models/file_deleted.rbi index 6b362f3b..60dd5163 100644 --- a/rbi/lib/openai/models/file_deleted.rbi +++ b/rbi/lib/openai/models/file_deleted.rbi @@ -13,12 +13,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :file) - end + def self.new(id:, deleted:, object: :file); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 035e32d3..e89cec8f 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -49,8 +49,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}); end sig do override @@ -64,8 +63,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -79,8 +77,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FileListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index c7f26a14..34dd1542 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -74,9 +74,7 @@ module OpenAI expires_at: nil, status_details: nil, object: :file - ) - end - + ); end sig do override .returns( @@ -93,8 +91,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` @@ -114,8 +111,7 @@ module OpenAI VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FileObject::Purpose::TaggedSymbol]) } - def self.values - end + def self.values; end end # Deprecated. The current status of the file, which can be either `uploaded`, @@ -131,8 +127,7 @@ module OpenAI ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FileObject::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 5514076c..a1e5347b 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -20,8 +20,7 @@ module OpenAI EVALS = T.let(:evals, OpenAI::Models::FilePurpose::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FilePurpose::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/file_retrieve_params.rbi b/rbi/lib/openai/models/file_retrieve_params.rbi index 25676e12..90bf8337 100644 --- a/rbi/lib/openai/models/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/file_retrieve_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 810bf8fd..666480f0 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -164,9 +164,7 @@ module OpenAI metadata: nil, method_: nil, object: :"fine_tuning.job" - ) - end - + ); end sig do override .returns( @@ -193,8 +191,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Error < OpenAI::Internal::Type::BaseModel # A machine-readable error code. @@ -213,12 +210,10 @@ module OpenAI # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. sig { params(code: String, message: String, param: T.nilable(String)).returns(T.attached_class) } - def self.new(code:, message:, param:) - end + def self.new(code:, message:, param:); end sig { override.returns({code: String, message: String, param: T.nilable(String)}) } - def to_hash - end + def to_hash; end end class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -256,8 +251,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) - end + def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end sig do override @@ -269,8 +263,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -278,8 +271,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -288,8 +280,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -298,8 +289,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end end @@ -321,8 +311,7 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end class Method < OpenAI::Internal::Type::BaseModel @@ -361,8 +350,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(dpo: nil, supervised: nil, type: nil) - end + def self.new(dpo: nil, supervised: nil, type: nil); end sig do override @@ -374,8 +362,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Dpo < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. @@ -397,15 +384,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil) - end + def self.new(hyperparameters: nil); end sig do override .returns({hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters}) end - def to_hash - end + def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model @@ -450,8 +435,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) - end + def self.new(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil); end sig do override @@ -464,8 +448,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -473,8 +456,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -483,8 +465,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -493,8 +474,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -503,8 +483,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end end end @@ -535,15 +514,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil) - end + def self.new(hyperparameters: nil); end sig do override .returns({hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters}) end - def to_hash - end + def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model @@ -579,8 +556,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) - end + def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end sig do override @@ -592,8 +568,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -601,8 +576,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -611,8 +585,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -621,8 +594,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end end end @@ -639,8 +611,7 @@ module OpenAI DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index f9a16f3d..85a3774e 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -76,8 +76,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The log level of the event. module Level @@ -92,8 +91,7 @@ module OpenAI ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol]) } - def self.values - end + def self.values; end end # The type of event. @@ -108,8 +106,7 @@ module OpenAI METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index 561dd861..717d084a 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -36,8 +36,7 @@ module OpenAI params(project: String, entity: T.nilable(String), name: T.nilable(String), tags: T::Array[String]) .returns(T.attached_class) end - def self.new(project:, entity: nil, name: nil, tags: nil) - end + def self.new(project:, entity: nil, name: nil, tags: nil); end sig do override @@ -48,8 +47,7 @@ module OpenAI tags: T::Array[String] }) end - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index bb2e26b6..a40e4f11 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -28,12 +28,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(wandb:, type: :wandb) - end + def self.new(wandb:, type: :wandb); end sig { override.returns({type: Symbol, wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi index 2c2b8d46..9976752d 100644 --- a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 351f2408..8a30d533 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -62,7 +62,9 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method)) } attr_reader :method_ - sig { params(method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash)).void } + sig do + params(method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash)).void + end attr_writer :method_ # The seed controls the reproducibility of the job. Passing in the same seed and @@ -122,9 +124,7 @@ module OpenAI suffix: nil, validation_file: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -142,8 +142,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). @@ -151,8 +150,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } OrSymbol = @@ -199,8 +197,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) - end + def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end sig do override @@ -212,8 +209,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -221,8 +217,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -231,8 +226,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -241,8 +235,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end end @@ -274,12 +267,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(wandb:, type: :wandb) - end + def self.new(wandb:, type: :wandb); end sig { override.returns({type: Symbol, wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb}) } - def to_hash - end + def to_hash; end class Wandb < OpenAI::Internal::Type::BaseModel # The name of the project that the new run will be created under. @@ -319,8 +310,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(project:, entity: nil, name: nil, tags: nil) - end + def self.new(project:, entity: nil, name: nil, tags: nil); end sig do override @@ -331,8 +321,7 @@ module OpenAI tags: T::Array[String] }) end - def to_hash - end + def to_hash; end end end @@ -375,8 +364,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(dpo: nil, supervised: nil, type: nil) - end + def self.new(dpo: nil, supervised: nil, type: nil); end sig do override @@ -388,8 +376,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Dpo < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. @@ -417,15 +404,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil) - end + def self.new(hyperparameters: nil); end sig do override .returns({hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters}) end - def to_hash - end + def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model @@ -470,8 +455,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) - end + def self.new(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil); end sig do override @@ -484,8 +468,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -493,8 +476,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -503,8 +485,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -513,8 +494,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -523,8 +503,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end end end @@ -555,8 +534,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil) - end + def self.new(hyperparameters: nil); end sig do override @@ -564,8 +542,7 @@ module OpenAI {hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters} ) end - def to_hash - end + def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model @@ -601,8 +578,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) - end + def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end sig do override @@ -614,8 +590,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -623,8 +598,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -633,8 +607,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Float]) } - def self.variants - end + def self.variants; end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -643,8 +616,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([Symbol, Integer]) } - def self.variants - end + def self.variants; end end end end @@ -661,8 +633,7 @@ module OpenAI DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index 9ad33668..4c6e5963 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -29,12 +29,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, request_options: {}); end sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 94aa1f34..7b8bea48 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, metadata: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, metadata: nil, request_options: {}); end sig do override @@ -49,8 +48,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi index ae440b7a..ca938301 100644 --- a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 279608d3..fee17bda 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -30,12 +30,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, request_options: {}) - end + def self.new(after: nil, limit: nil, request_options: {}); end sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index cd3931c5..06fb98b6 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -63,9 +63,7 @@ module OpenAI metrics:, step_number:, object: :"fine_tuning.job.checkpoint" - ) - end - + ); end sig do override .returns( @@ -80,8 +78,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Metrics < OpenAI::Internal::Type::BaseModel sig { returns(T.nilable(Float)) } @@ -147,9 +144,7 @@ module OpenAI train_mean_token_accuracy: nil, valid_loss: nil, valid_mean_token_accuracy: nil - ) - end - + ); end sig do override .returns( @@ -164,8 +159,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index 6a4dc07a..cb9eeb56 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -46,8 +46,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, description: nil, parameters: nil, strict: nil) - end + def self.new(name:, description: nil, parameters: nil, strict: nil); end sig do override @@ -60,8 +59,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index 9f14f13d..9469de4c 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -28,12 +28,10 @@ module OpenAI # Represents the url or the content of an image generated by the OpenAI API. sig { params(b64_json: String, revised_prompt: String, url: String).returns(T.attached_class) } - def self.new(b64_json: nil, revised_prompt: nil, url: nil) - end + def self.new(b64_json: nil, revised_prompt: nil, url: nil); end sig { override.returns({b64_json: String, revised_prompt: String, url: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index eb1c580e..9a1061c3 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -78,8 +78,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -87,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # The format in which the generated images are returned. Must be one of `url` or @@ -105,8 +103,7 @@ module OpenAI B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -123,8 +120,7 @@ module OpenAI SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 5c428b9a..58bf5ddd 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -78,9 +78,7 @@ module OpenAI size: nil, user: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -97,8 +95,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -106,8 +103,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # The format in which the generated images are returned. Must be one of `url` or @@ -124,8 +120,7 @@ module OpenAI B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -141,8 +136,7 @@ module OpenAI SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 880ba871..b03fd35e 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -81,9 +81,7 @@ module OpenAI style: nil, user: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -100,16 +98,14 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The model to use for image generation. module Model extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } - def self.variants - end + def self.variants; end end # The quality of the image that will be generated. `hd` creates images with finer @@ -126,8 +122,7 @@ module OpenAI HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol]) } - def self.values - end + def self.values; end end # The format in which the generated images are returned. Must be one of `url` or @@ -144,8 +139,7 @@ module OpenAI B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol]) } - def self.values - end + def self.values; end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -165,8 +159,7 @@ module OpenAI SIZE_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol]) } - def self.values - end + def self.values; end end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -184,8 +177,7 @@ module OpenAI NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 20027c2b..0203cf30 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -12,8 +12,7 @@ module OpenAI DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageModel::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index 796ce49c..cb594826 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -13,12 +13,10 @@ module OpenAI params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)]) .returns(T.attached_class) end - def self.new(created:, data:) - end + def self.new(created:, data:); end sig { override.returns({created: Integer, data: T::Array[OpenAI::Models::Image]}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/model.rbi b/rbi/lib/openai/models/model.rbi index ad55cfee..b7dfa10a 100644 --- a/rbi/lib/openai/models/model.rbi +++ b/rbi/lib/openai/models/model.rbi @@ -21,12 +21,10 @@ module OpenAI # Describes an OpenAI model offering that can be used with the API. sig { params(id: String, created: Integer, owned_by: String, object: Symbol).returns(T.attached_class) } - def self.new(id:, created:, owned_by:, object: :model) - end + def self.new(id:, created:, owned_by:, object: :model); end sig { override.returns({id: String, created: Integer, object: Symbol, owned_by: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/model_delete_params.rbi b/rbi/lib/openai/models/model_delete_params.rbi index 42095a36..9a19e392 100644 --- a/rbi/lib/openai/models/model_delete_params.rbi +++ b/rbi/lib/openai/models/model_delete_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/model_deleted.rbi b/rbi/lib/openai/models/model_deleted.rbi index 7044add0..e5b41780 100644 --- a/rbi/lib/openai/models/model_deleted.rbi +++ b/rbi/lib/openai/models/model_deleted.rbi @@ -13,12 +13,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: String).returns(T.attached_class) } - def self.new(id:, deleted:, object:) - end + def self.new(id:, deleted:, object:); end sig { override.returns({id: String, deleted: T::Boolean, object: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/model_list_params.rbi b/rbi/lib/openai/models/model_list_params.rbi index 6570ac6f..6ecd8a67 100644 --- a/rbi/lib/openai/models/model_list_params.rbi +++ b/rbi/lib/openai/models/model_list_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/model_retrieve_params.rbi b/rbi/lib/openai/models/model_retrieve_params.rbi index 273f1b0e..1b835b5d 100644 --- a/rbi/lib/openai/models/model_retrieve_params.rbi +++ b/rbi/lib/openai/models/model_retrieve_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 70df9dfa..0850f88c 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -42,8 +42,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(categories:, category_applied_input_types:, category_scores:, flagged:) - end + def self.new(categories:, category_applied_input_types:, category_scores:, flagged:); end sig do override @@ -56,8 +55,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Categories < OpenAI::Internal::Type::BaseModel # Content that expresses, incites, or promotes harassing language towards any @@ -162,9 +160,7 @@ module OpenAI sexual_minors:, violence:, violence_graphic: - ) - end - + ); end sig do override .returns( @@ -185,8 +181,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel @@ -283,9 +278,7 @@ module OpenAI sexual_minors:, violence:, violence_graphic: - ) - end - + ); end sig do override .returns( @@ -306,8 +299,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end module Harassment extend OpenAI::Internal::Type::Enum @@ -322,8 +314,7 @@ module OpenAI sig do override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) end - def self.values - end + def self.values; end end module HarassmentThreatening @@ -349,8 +340,7 @@ module OpenAI T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] ) end - def self.values - end + def self.values; end end module Hate @@ -364,8 +354,7 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } - def self.values - end + def self.values; end end module HateThreatening @@ -388,8 +377,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) end - def self.values - end + def self.values; end end module Illicit @@ -403,8 +391,7 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } - def self.values - end + def self.values; end end module IllicitViolent @@ -421,8 +408,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) end - def self.values - end + def self.values; end end module SelfHarm @@ -437,8 +423,7 @@ module OpenAI IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } - def self.values - end + def self.values; end end module SelfHarmInstruction @@ -466,8 +451,7 @@ module OpenAI T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] ) end - def self.values - end + def self.values; end end module SelfHarmIntent @@ -486,8 +470,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) end - def self.values - end + def self.values; end end module Sexual @@ -502,8 +485,7 @@ module OpenAI IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } - def self.values - end + def self.values; end end module SexualMinor @@ -520,8 +502,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) end - def self.values - end + def self.values; end end module Violence @@ -536,8 +517,7 @@ module OpenAI IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } - def self.values - end + def self.values; end end module ViolenceGraphic @@ -562,8 +542,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) end - def self.values - end + def self.values; end end end @@ -653,9 +632,7 @@ module OpenAI sexual_minors:, violence:, violence_graphic: - ) - end - + ); end sig do override .returns( @@ -676,8 +653,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 4fa9650a..2c12c369 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -35,11 +35,11 @@ module OpenAI String, T::Array[String], T::Array[ - T.any( - OpenAI::Models::ModerationImageURLInput, - OpenAI::Internal::AnyHash, - OpenAI::Models::ModerationTextInput - ) + T.any( + OpenAI::Models::ModerationImageURLInput, + OpenAI::Internal::AnyHash, + OpenAI::Models::ModerationTextInput + ) ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), @@ -47,8 +47,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, model: nil, request_options: {}) - end + def self.new(input:, model: nil, request_options: {}); end sig do override @@ -64,8 +63,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. @@ -78,8 +76,7 @@ module OpenAI [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] ) end - def self.variants - end + def self.variants; end StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) @@ -98,8 +95,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index fc12e123..34f57350 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -24,12 +24,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, model:, results:) - end + def self.new(id:, model:, results:); end sig { override.returns({id: String, model: String, results: T::Array[OpenAI::Models::Moderation]}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index 128f5062..fd241d15 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -7,7 +7,9 @@ module OpenAI sig { returns(OpenAI::Models::ModerationImageURLInput::ImageURL) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::AnyHash)).void } + sig do + params(image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::AnyHash)).void + end attr_writer :image_url # Always `image_url`. @@ -22,12 +24,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image_url:, type: :image_url) - end + def self.new(image_url:, type: :image_url); end sig { override.returns({image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, type: Symbol}) } - def to_hash - end + def to_hash; end class ImageURL < OpenAI::Internal::Type::BaseModel # Either a URL of the image or the base64 encoded image data. @@ -36,12 +36,10 @@ module OpenAI # Contains either an image URL or a data URL for a base64 encoded image. sig { params(url: String).returns(T.attached_class) } - def self.new(url:) - end + def self.new(url:); end sig { override.returns({url: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index 3b1d564a..7de0fc9c 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -15,8 +15,7 @@ module OpenAI TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ModerationModel::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/lib/openai/models/moderation_multi_modal_input.rbi index 247b0857..5f5a8f6d 100644 --- a/rbi/lib/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/lib/openai/models/moderation_multi_modal_input.rbi @@ -7,8 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/moderation_text_input.rbi b/rbi/lib/openai/models/moderation_text_input.rbi index d117af19..cdb7c967 100644 --- a/rbi/lib/openai/models/moderation_text_input.rbi +++ b/rbi/lib/openai/models/moderation_text_input.rbi @@ -13,12 +13,10 @@ module OpenAI # An object describing text to classify. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :text) - end + def self.new(text:, type: :text); end sig { override.returns({text: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi index 24704550..10b6f90a 100644 --- a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi @@ -11,12 +11,10 @@ module OpenAI # because the file was indexed before the `chunking_strategy` concept was # introduced in the API. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :other) - end + def self.new(type: :other); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 13f6e5ae..f16ebf77 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -31,8 +31,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(effort: nil, generate_summary: nil) - end + def self.new(effort: nil, generate_summary: nil); end sig do override @@ -43,8 +42,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # **computer_use_preview only** # @@ -62,8 +60,7 @@ module OpenAI DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index d46cfdf9..723cc106 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -19,8 +19,7 @@ module OpenAI HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ReasoningEffort::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/lib/openai/models/response_format_json_object.rbi index 3a1e8d74..707ef81c 100644 --- a/rbi/lib/openai/models/response_format_json_object.rbi +++ b/rbi/lib/openai/models/response_format_json_object.rbi @@ -11,12 +11,10 @@ module OpenAI # `json_schema` is recommended for models that support it. Note that the model # will not generate JSON without a system or user message instructing it to do so. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :json_object) - end + def self.new(type: :json_object); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index 8f5dbd5c..735f2c54 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -29,12 +29,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(json_schema:, type: :json_schema) - end + def self.new(json_schema:, type: :json_schema); end sig { override.returns({json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, type: Symbol}) } - def to_hash - end + def to_hash; end class JSONSchema < OpenAI::Internal::Type::BaseModel # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores @@ -76,8 +74,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, description: nil, schema: nil, strict: nil) - end + def self.new(name:, description: nil, schema: nil, strict: nil); end sig do override @@ -90,8 +87,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/response_format_text.rbi b/rbi/lib/openai/models/response_format_text.rbi index 2cee5b0b..ec7b0d03 100644 --- a/rbi/lib/openai/models/response_format_text.rbi +++ b/rbi/lib/openai/models/response_format_text.rbi @@ -9,12 +9,10 @@ module OpenAI # Default response format. Used to generate text responses. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :text) - end + def self.new(type: :text); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index b4329d44..88894312 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -31,8 +31,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(display_height:, display_width:, environment:, type: :computer_use_preview) - end + def self.new(display_height:, display_width:, environment:, type: :computer_use_preview); end sig do override @@ -45,8 +44,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The type of computer environment to control. module Environment @@ -62,8 +60,7 @@ module OpenAI BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 6b25a120..e1819f46 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -11,11 +11,11 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ] ) ) @@ -44,12 +44,12 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ] ), role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, @@ -57,8 +57,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil) - end + def self.new(content:, role:, type: nil); end sig do override @@ -67,11 +66,11 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ] ), role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, @@ -79,8 +78,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. @@ -93,17 +91,16 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ] ] ) end - def self.variants - end + def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -121,8 +118,7 @@ module OpenAI DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end # The type of the message input. Always `message`. @@ -136,8 +132,7 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 7ca909fa..37314e5d 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -78,16 +78,14 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # A filter to apply based on file attributes. module Filters extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def self.variants - end + def self.variants; end end class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -115,8 +113,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(ranker: nil, score_threshold: nil) - end + def self.new(ranker: nil, score_threshold: nil); end sig do override @@ -124,8 +121,7 @@ module OpenAI {ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, score_threshold: Float} ) end - def to_hash - end + def to_hash; end # The ranker to use for the file search. module Ranker @@ -146,8 +142,7 @@ module OpenAI sig do override.returns(T::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/lib/openai/models/responses/function_tool.rbi index e329cc11..16ed2997 100644 --- a/rbi/lib/openai/models/responses/function_tool.rbi +++ b/rbi/lib/openai/models/responses/function_tool.rbi @@ -38,8 +38,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, parameters:, strict:, description: nil, type: :function) - end + def self.new(name:, parameters:, strict:, description: nil, type: :function); end sig do override @@ -53,8 +52,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index baf516d6..83fef973 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -58,8 +58,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) - end + def self.new(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}); end sig do override @@ -74,8 +73,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The order to return the input items in. Default is `asc`. # @@ -92,8 +90,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 4a3056e9..99dd9aae 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -75,14 +75,14 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) + T.any( + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseReasoningItem + ) ] ) end @@ -130,12 +130,12 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ] ) end @@ -227,15 +227,15 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), output: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) + T.any( + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseReasoningItem + ) ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), @@ -246,13 +246,13 @@ module OpenAI OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ], top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), @@ -290,9 +290,7 @@ module OpenAI usage: nil, user: nil, object: :response - ) - end - + ); end sig do override .returns( @@ -306,14 +304,14 @@ module OpenAI model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), object: Symbol, output: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) + T.any( + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseReasoningItem + ) ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), @@ -323,12 +321,12 @@ module OpenAI OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ], top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), @@ -342,8 +340,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class IncompleteDetails < OpenAI::Internal::Type::BaseModel # The reason why the response is incomplete. @@ -358,12 +355,10 @@ module OpenAI params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end - def self.new(reason: nil) - end + def self.new(reason: nil); end sig { override.returns({reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol}) } - def to_hash - end + def to_hash; end # The reason why the response is incomplete. module Reason @@ -380,8 +375,7 @@ module OpenAI T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -397,8 +391,7 @@ module OpenAI [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] ) end - def self.variants - end + def self.variants; end end # The truncation strategy to use for the model response. @@ -419,8 +412,7 @@ module OpenAI DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::Response::Truncation::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi index 17fa0ac5..386c8365 100644 --- a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi @@ -14,12 +14,10 @@ module OpenAI # Emitted when there is a partial audio response. sig { params(delta: String, type: Symbol).returns(T.attached_class) } - def self.new(delta:, type: :"response.audio.delta") - end + def self.new(delta:, type: :"response.audio.delta"); end sig { override.returns({delta: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_audio_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_done_event.rbi index b25bf971..40f5901f 100644 --- a/rbi/lib/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_done_event.rbi @@ -10,12 +10,10 @@ module OpenAI # Emitted when the audio response is complete. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :"response.audio.done") - end + def self.new(type: :"response.audio.done"); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi index 8c6021af..c121b3b9 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -14,12 +14,10 @@ module OpenAI # Emitted when there is a partial transcript of audio. sig { params(delta: String, type: Symbol).returns(T.attached_class) } - def self.new(delta:, type: :"response.audio.transcript.delta") - end + def self.new(delta:, type: :"response.audio.transcript.delta"); end sig { override.returns({delta: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi index 52a596c6..983f0e0a 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi @@ -10,12 +10,10 @@ module OpenAI # Emitted when the full audio transcript is completed. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :"response.audio.transcript.done") - end + def self.new(type: :"response.audio.transcript.done"); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 4c8bd0e9..e19943db 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a partial code snippet is added by the code interpreter. sig { params(delta: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") - end + def self.new(delta:, output_index:, type: :"response.code_interpreter_call.code.delta"); end sig { override.returns({delta: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index d1ec38b7..5d6af463 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when code snippet output is finalized by the code interpreter. sig { params(code: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(code:, output_index:, type: :"response.code_interpreter_call.code.done") - end + def self.new(code:, output_index:, type: :"response.code_interpreter_call.code.done"); end sig { override.returns({code: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 5d1c5965..28acb52b 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -33,7 +33,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") + def self.new( + code_interpreter_call:, + output_index:, + type: :"response.code_interpreter_call.completed" + ) end sig do @@ -46,8 +50,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 02d423ad..c0a220d0 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -50,8 +50,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index 47f2b1a2..e0fe0abb 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -50,8 +50,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index a6157f98..4f1a0e10 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -16,10 +16,10 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) + T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) ] ) end @@ -39,19 +39,18 @@ module OpenAI id: String, code: String, results: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) + T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) ], status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, type: Symbol ) .returns(T.attached_class) end - def self.new(id:, code:, results:, status:, type: :code_interpreter_call) - end + def self.new(id:, code:, results:, status:, type: :code_interpreter_call); end sig do override @@ -60,18 +59,17 @@ module OpenAI id: String, code: String, results: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) + T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) ], status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, type: Symbol } ) end - def to_hash - end + def to_hash; end # The output of a code interpreter tool call that is text. module Result @@ -88,12 +86,10 @@ module OpenAI # The output of a code interpreter tool call that is text. sig { params(logs: String, type: Symbol).returns(T.attached_class) } - def self.new(logs:, type: :logs) - end + def self.new(logs:, type: :logs); end sig { override.returns({logs: String, type: Symbol}) } - def to_hash - end + def to_hash; end end class Files < OpenAI::Internal::Type::BaseModel @@ -108,17 +104,16 @@ module OpenAI sig do params( files: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, + OpenAI::Internal::AnyHash + ) ], type: Symbol ) .returns(T.attached_class) end - def self.new(files:, type: :files) - end + def self.new(files:, type: :files); end sig do override @@ -129,8 +124,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class File < OpenAI::Internal::Type::BaseModel # The ID of the file. @@ -142,12 +136,10 @@ module OpenAI attr_accessor :mime_type sig { params(file_id: String, mime_type: String).returns(T.attached_class) } - def self.new(file_id:, mime_type:) - end + def self.new(file_id:, mime_type:); end sig { override.returns({file_id: String, mime_type: String}) } - def to_hash - end + def to_hash; end end end @@ -157,8 +149,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] ) end - def self.variants - end + def self.variants; end end # The status of the code interpreter tool call. @@ -181,8 +172,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index c7774329..26105306 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -20,12 +20,10 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.completed") - end + def self.new(response:, type: :"response.completed"); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index 8ced5e39..ccc7cf95 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -68,8 +68,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, action:, call_id:, pending_safety_checks:, status:, type:) - end + def self.new(id:, action:, call_id:, pending_safety_checks:, status:, type:); end sig do override @@ -94,8 +93,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # A click action. module Action @@ -130,8 +128,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(button:, x:, y_:, type: :click) - end + def self.new(button:, x:, y_:, type: :click); end sig do override @@ -144,8 +141,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -180,8 +176,7 @@ module OpenAI T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol] ) end - def self.values - end + def self.values; end end end @@ -201,12 +196,10 @@ module OpenAI # A double click action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } - def self.new(x:, y_:, type: :double_click) - end + def self.new(x:, y_:, type: :double_click); end sig { override.returns({type: Symbol, x: Integer, y_: Integer}) } - def to_hash - end + def to_hash; end end class Drag < OpenAI::Internal::Type::BaseModel @@ -235,8 +228,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(path:, type: :drag) - end + def self.new(path:, type: :drag); end sig do override @@ -244,8 +236,7 @@ module OpenAI {path: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], type: Symbol} ) end - def to_hash - end + def to_hash; end class Path < OpenAI::Internal::Type::BaseModel # The x-coordinate. @@ -258,12 +249,10 @@ module OpenAI # A series of x/y coordinate pairs in the drag path. sig { params(x: Integer, y_: Integer).returns(T.attached_class) } - def self.new(x:, y_:) - end + def self.new(x:, y_:); end sig { override.returns({x: Integer, y_: Integer}) } - def to_hash - end + def to_hash; end end end @@ -280,12 +269,10 @@ module OpenAI # A collection of keypresses the model would like to perform. sig { params(keys: T::Array[String], type: Symbol).returns(T.attached_class) } - def self.new(keys:, type: :keypress) - end + def self.new(keys:, type: :keypress); end sig { override.returns({keys: T::Array[String], type: Symbol}) } - def to_hash - end + def to_hash; end end class Move < OpenAI::Internal::Type::BaseModel @@ -304,12 +291,10 @@ module OpenAI # A mouse move action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } - def self.new(x:, y_:, type: :move) - end + def self.new(x:, y_:, type: :move); end sig { override.returns({type: Symbol, x: Integer, y_: Integer}) } - def to_hash - end + def to_hash; end end class Screenshot < OpenAI::Internal::Type::BaseModel @@ -320,12 +305,10 @@ module OpenAI # A screenshot action. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :screenshot) - end + def self.new(type: :screenshot); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end class Scroll < OpenAI::Internal::Type::BaseModel @@ -355,14 +338,12 @@ module OpenAI params(scroll_x: Integer, scroll_y: Integer, x: Integer, y_: Integer, type: Symbol) .returns(T.attached_class) end - def self.new(scroll_x:, scroll_y:, x:, y_:, type: :scroll) - end + def self.new(scroll_x:, scroll_y:, x:, y_:, type: :scroll); end sig do override.returns({scroll_x: Integer, scroll_y: Integer, type: Symbol, x: Integer, y_: Integer}) end - def to_hash - end + def to_hash; end end class Type < OpenAI::Internal::Type::BaseModel @@ -377,12 +358,10 @@ module OpenAI # An action to type in text. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :type) - end + def self.new(text:, type: :type); end sig { override.returns({text: String, type: Symbol}) } - def to_hash - end + def to_hash; end end class Wait < OpenAI::Internal::Type::BaseModel @@ -393,12 +372,10 @@ module OpenAI # A wait action. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :wait) - end + def self.new(type: :wait); end sig { override.returns({type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -407,8 +384,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] ) end - def self.variants - end + def self.variants; end end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel @@ -426,12 +402,10 @@ module OpenAI # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:) - end + def self.new(id:, code:, message:); end sig { override.returns({id: String, code: String, message: String}) } - def to_hash - end + def to_hash; end end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -451,8 +425,7 @@ module OpenAI T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end # The type of the computer call. Always `computer_call`. @@ -467,8 +440,7 @@ module OpenAI T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index fbfa11b5..cf267c21 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -42,10 +42,10 @@ module OpenAI sig do params( acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -66,10 +66,10 @@ module OpenAI call_id: String, output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) ], status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, type: Symbol @@ -99,8 +99,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The ID of the pending safety check. @@ -117,12 +116,10 @@ module OpenAI # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:) - end + def self.new(id:, code:, message:); end sig { override.returns({id: String, code: String, message: String}) } - def to_hash - end + def to_hash; end end # The status of the message input. One of `in_progress`, `completed`, or @@ -146,8 +143,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index 11d1b8d6..5ece9b13 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -25,12 +25,10 @@ module OpenAI # A computer screenshot image used with the computer use tool. sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } - def self.new(file_id: nil, image_url: nil, type: :computer_screenshot) - end + def self.new(file_id: nil, image_url: nil, type: :computer_screenshot); end sig { override.returns({type: Symbol, file_id: String, image_url: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/lib/openai/models/responses/response_content.rbi index 5bb9113e..5fd0ccd8 100644 --- a/rbi/lib/openai/models/responses/response_content.rbi +++ b/rbi/lib/openai/models/responses/response_content.rbi @@ -13,8 +13,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 669ce321..5ec11d97 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -58,8 +58,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The content part that was added. module Part @@ -69,8 +68,7 @@ module OpenAI override .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 7d1b7312..35915ce9 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -43,8 +43,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") - end + def self.new(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done"); end sig do override @@ -58,8 +57,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The content part that is done. module Part @@ -69,8 +67,7 @@ module OpenAI override .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 5fe13ae5..f45e18ea 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -21,19 +21,19 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) ] ) ) @@ -172,12 +172,12 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ] ) ) @@ -187,13 +187,13 @@ module OpenAI sig do params( tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ] ) .void @@ -232,20 +232,20 @@ module OpenAI input: T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) ] ), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), @@ -266,13 +266,13 @@ module OpenAI OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ], top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), @@ -300,9 +300,7 @@ module OpenAI truncation: nil, user: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -310,19 +308,19 @@ module OpenAI input: T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) ] ), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), @@ -342,12 +340,12 @@ module OpenAI OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ], top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), @@ -356,8 +354,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Text, image, or file inputs to the model, used to generate a response. # @@ -377,25 +374,24 @@ module OpenAI [ String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) ] ] ) end - def self.variants - end + def self.variants; end end # How the model should select which tool (or tools) to use when generating a @@ -410,8 +406,7 @@ module OpenAI [OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] ) end - def self.variants - end + def self.variants; end end # The truncation strategy to use for the model response. @@ -433,8 +428,7 @@ module OpenAI DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index ee2e69fc..58605657 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -20,12 +20,10 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.created") - end + def self.new(response:, type: :"response.created"); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_delete_params.rbi b/rbi/lib/openai/models/responses/response_delete_params.rbi index ac0c636e..934c6bdc 100644 --- a/rbi/lib/openai/models/responses/response_delete_params.rbi +++ b/rbi/lib/openai/models/responses/response_delete_params.rbi @@ -15,12 +15,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index a6cb9d7e..843f8868 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -17,12 +17,10 @@ module OpenAI params(code: OpenAI::Models::Responses::ResponseError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:) - end + def self.new(code:, message:); end sig { override.returns({code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String}) } - def to_hash - end + def to_hash; end # The error code for the response. module Code @@ -64,8 +62,7 @@ module OpenAI T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_error_event.rbi b/rbi/lib/openai/models/responses/response_error_event.rbi index 55680bad..c81fa537 100644 --- a/rbi/lib/openai/models/responses/response_error_event.rbi +++ b/rbi/lib/openai/models/responses/response_error_event.rbi @@ -25,14 +25,12 @@ module OpenAI params(code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol) .returns(T.attached_class) end - def self.new(code:, message:, param:, type: :error) - end + def self.new(code:, message:, param:, type: :error); end sig do override.returns({code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index fdbaf6f0..7cafac40 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -20,12 +20,10 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.failed") - end + def self.new(response:, type: :"response.failed"); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi index 285df33d..289e0742 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a file search call is completed (results found). sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.file_search_call.completed") - end + def self.new(item_id:, output_index:, type: :"response.file_search_call.completed"); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi index e6d95bd6..04d00647 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a file search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.file_search_call.in_progress") - end + def self.new(item_id:, output_index:, type: :"response.file_search_call.in_progress"); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi index a6f01d00..a6de609a 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a file search is currently searching. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.file_search_call.searching") - end + def self.new(item_id:, output_index:, type: :"response.file_search_call.searching"); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 98e75a9c..7181116b 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -40,8 +40,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, queries:, status:, results: nil, type: :file_search_call) - end + def self.new(id:, queries:, status:, results: nil, type: :file_search_call); end sig do override @@ -55,8 +54,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, @@ -79,8 +77,7 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end class Result < OpenAI::Internal::Type::BaseModel @@ -130,8 +127,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) - end + def self.new(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil); end sig do override @@ -145,15 +141,13 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end module Attribute extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index f05a3373..3fe12068 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -25,8 +25,7 @@ module OpenAI [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index 294d5b32..8a1756b0 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -47,8 +47,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, schema:, description: nil, strict: nil, type: :json_schema) - end + def self.new(name:, schema:, description: nil, strict: nil, type: :json_schema); end sig do override @@ -62,8 +61,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi index 8d5a0f74..11ac7ab7 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -29,12 +29,10 @@ module OpenAI type: Symbol ).returns(T.attached_class) end - def self.new(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") - end + def self.new(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta"); end sig { override.returns({delta: String, item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi index 49d5fc83..ebafa372 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -28,12 +28,10 @@ module OpenAI type: Symbol ).returns(T.attached_class) end - def self.new(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done") - end + def self.new(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done"); end sig { override.returns({arguments: String, item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 32854d1a..f0a465e7 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -49,8 +49,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) - end + def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call); end sig do override @@ -65,8 +64,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -85,8 +83,7 @@ module OpenAI T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi index 8a6699fb..79e881a7 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi @@ -12,12 +12,10 @@ module OpenAI # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. sig { params(id: String).returns(T.attached_class) } - def self.new(id:) - end + def self.new(id:); end sig { override.returns({id: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index f2d23ee3..398930d8 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -38,8 +38,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, call_id:, output:, status: nil, type: :function_call_output) - end + def self.new(id:, call_id:, output:, status: nil, type: :function_call_output); end sig do override @@ -53,8 +52,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -77,8 +75,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol]) end - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index b479dcf8..92543e70 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -27,8 +27,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, status:, type: :web_search_call) - end + def self.new(id:, status:, type: :web_search_call); end sig do override @@ -36,8 +35,7 @@ module OpenAI {id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol} ) end - def to_hash - end + def to_hash; end # The status of the web search tool call. module Status @@ -55,8 +53,7 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index 187ca04d..37252dc3 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -20,12 +20,10 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.in_progress") - end + def self.new(response:, type: :"response.in_progress"); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index de36828b..661b921f 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -29,8 +29,7 @@ module OpenAI ) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index 305b7c65..12fe7ff5 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -20,12 +20,10 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.incomplete") - end + def self.new(response:, type: :"response.incomplete"); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index cbcdd1d1..b4a5969e 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -25,8 +25,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, format_:, type: :input_audio) - end + def self.new(data:, format_:, type: :input_audio); end sig do override @@ -34,8 +33,7 @@ module OpenAI {data: String, format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, type: Symbol} ) end - def to_hash - end + def to_hash; end # The format of the audio data. Currently supported formats are `mp3` and `wav`. module Format @@ -49,8 +47,7 @@ module OpenAI WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/lib/openai/models/responses/response_input_content.rbi index e1a66fb0..e4f21010 100644 --- a/rbi/lib/openai/models/responses/response_input_content.rbi +++ b/rbi/lib/openai/models/responses/response_input_content.rbi @@ -13,8 +13,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_file.rbi b/rbi/lib/openai/models/responses/response_input_file.rbi index 81323132..5d46ed32 100644 --- a/rbi/lib/openai/models/responses/response_input_file.rbi +++ b/rbi/lib/openai/models/responses/response_input_file.rbi @@ -33,12 +33,10 @@ module OpenAI sig do params(file_data: String, file_id: String, filename: String, type: Symbol).returns(T.attached_class) end - def self.new(file_data: nil, file_id: nil, filename: nil, type: :input_file) - end + def self.new(file_data: nil, file_id: nil, filename: nil, type: :input_file); end sig { override.returns({type: Symbol, file_data: String, file_id: String, filename: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index a15a6bc9..f6089f0f 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -33,8 +33,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(detail:, file_id: nil, image_url: nil, type: :input_image) - end + def self.new(detail:, file_id: nil, image_url: nil, type: :input_image); end sig do override @@ -47,8 +46,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. @@ -64,8 +62,7 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index c01e2b4f..6a70db99 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -17,11 +17,11 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ] ) end @@ -52,12 +52,12 @@ module OpenAI sig do params( content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ], role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, @@ -65,19 +65,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, status: nil, type: nil) - end + def self.new(content:, role:, status: nil, type: nil); end sig do override .returns( { content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ], role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, @@ -85,8 +84,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The role of the message input. One of `user`, `system`, or `developer`. module Role @@ -102,8 +100,7 @@ module OpenAI DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -124,8 +121,7 @@ module OpenAI T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end # The type of the message input. Always set to `message`. @@ -140,8 +136,7 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -187,10 +182,10 @@ module OpenAI sig do params( acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) ] ) .void @@ -212,10 +207,10 @@ module OpenAI output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), id: String, acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) ], status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, type: Symbol @@ -245,8 +240,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The ID of the pending safety check. @@ -263,12 +257,10 @@ module OpenAI # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:) - end + def self.new(id:, code:, message:); end sig { override.returns({id: String, code: String, message: String}) } - def to_hash - end + def to_hash; end end # The status of the message input. One of `in_progress`, `completed`, or @@ -301,8 +293,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol]) end - def self.values - end + def self.values; end end end @@ -346,8 +337,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(call_id:, output:, id: nil, status: nil, type: :function_call_output) - end + def self.new(call_id:, output:, id: nil, status: nil, type: :function_call_output); end sig do override @@ -361,8 +351,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -394,8 +383,7 @@ module OpenAI override .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol]) end - def self.values - end + def self.values; end end end @@ -410,12 +398,10 @@ module OpenAI # An internal identifier for an item to reference. sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :item_reference) - end + def self.new(id:, type: :item_reference); end sig { override.returns({id: String, type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -424,8 +410,7 @@ module OpenAI [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 90c6a533..a8b4a591 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -13,11 +13,11 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ] ) end @@ -46,12 +46,12 @@ module OpenAI params( id: String, content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ], role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::OrSymbol, status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol, @@ -59,8 +59,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, content:, role:, status: nil, type: nil) - end + def self.new(id:, content:, role:, status: nil, type: nil); end sig do override @@ -68,11 +67,11 @@ module OpenAI { id: String, content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) + T.any( + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Responses::ResponseInputImage, + OpenAI::Models::Responses::ResponseInputFile + ) ], role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, @@ -80,8 +79,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The role of the message input. One of `user`, `system`, or `developer`. module Role @@ -96,8 +94,7 @@ module OpenAI DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol]) } - def self.values - end + def self.values; end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -117,8 +114,7 @@ module OpenAI T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end # The type of the message input. Always set to `message`. @@ -132,8 +128,7 @@ module OpenAI MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_input_text.rbi b/rbi/lib/openai/models/responses/response_input_text.rbi index 94dd8553..d3df7cc9 100644 --- a/rbi/lib/openai/models/responses/response_input_text.rbi +++ b/rbi/lib/openai/models/responses/response_input_text.rbi @@ -14,12 +14,10 @@ module OpenAI # A text input to the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :input_text) - end + def self.new(text:, type: :input_text); end sig { override.returns({text: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/lib/openai/models/responses/response_item.rbi index 3a0b1c92..21440ba2 100644 --- a/rbi/lib/openai/models/responses/response_item.rbi +++ b/rbi/lib/openai/models/responses/response_item.rbi @@ -13,8 +13,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index 5b5f1583..a041edfb 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -8,16 +8,16 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) + T.any( + OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + ) ] ) end @@ -43,17 +43,17 @@ module OpenAI sig do params( data: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) + T.any( + OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + ) ], first_id: String, has_more: T::Boolean, @@ -62,24 +62,23 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, first_id:, has_more:, last_id:, object: :list) - end + def self.new(data:, first_id:, has_more:, last_id:, object: :list); end sig do override .returns( { data: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) + T.any( + OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + ) ], first_id: String, has_more: T::Boolean, @@ -88,8 +87,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/responses/response_output_audio.rbi b/rbi/lib/openai/models/responses/response_output_audio.rbi index 53e9b4c3..010d71b6 100644 --- a/rbi/lib/openai/models/responses/response_output_audio.rbi +++ b/rbi/lib/openai/models/responses/response_output_audio.rbi @@ -18,12 +18,10 @@ module OpenAI # An audio output from the model. sig { params(data: String, transcript: String, type: Symbol).returns(T.attached_class) } - def self.new(data:, transcript:, type: :output_audio) - end + def self.new(data:, transcript:, type: :output_audio); end sig { override.returns({data: String, transcript: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/lib/openai/models/responses/response_output_item.rbi index 5827fb3d..5409b70c 100644 --- a/rbi/lib/openai/models/responses/response_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_output_item.rbi @@ -13,8 +13,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 3e5daa2e..230e21d0 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -44,8 +44,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(item:, output_index:, type: :"response.output_item.added") - end + def self.new(item:, output_index:, type: :"response.output_item.added"); end sig do override @@ -64,8 +63,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index e7625f50..94747df6 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -44,8 +44,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(item:, output_index:, type: :"response.output_item.done") - end + def self.new(item:, output_index:, type: :"response.output_item.done"); end sig do override @@ -64,8 +63,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index 0a39a8eb..ca050336 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -34,11 +34,11 @@ module OpenAI params( id: String, content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputRefusal - ) + T.any( + OpenAI::Models::Responses::ResponseOutputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseOutputRefusal + ) ], status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, role: Symbol, @@ -46,8 +46,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, content:, status:, role: :assistant, type: :message) - end + def self.new(id:, content:, status:, role: :assistant, type: :message); end sig do override @@ -61,8 +60,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # A text output from the model. module Content @@ -72,8 +70,7 @@ module OpenAI override .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) end - def self.variants - end + def self.variants; end end # The status of the message input. One of `in_progress`, `completed`, or @@ -90,8 +87,7 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_output_refusal.rbi b/rbi/lib/openai/models/responses/response_output_refusal.rbi index 9fff39df..ece03c37 100644 --- a/rbi/lib/openai/models/responses/response_output_refusal.rbi +++ b/rbi/lib/openai/models/responses/response_output_refusal.rbi @@ -14,12 +14,10 @@ module OpenAI # A refusal from the model. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(refusal:, type: :refusal) - end + def self.new(refusal:, type: :refusal); end sig { override.returns({refusal: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index e8092e12..4d499086 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -8,11 +8,11 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) + T.any( + OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + ) ] ) end @@ -30,39 +30,37 @@ module OpenAI sig do params( annotations: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) + T.any( + OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + ) ], text: String, type: Symbol ) .returns(T.attached_class) end - def self.new(annotations:, text:, type: :output_text) - end + def self.new(annotations:, text:, type: :output_text); end sig do override .returns( { annotations: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) + T.any( + OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + ) ], text: String, type: Symbol } ) end - def to_hash - end + def to_hash; end # A citation to a file. module Annotation @@ -83,12 +81,10 @@ module OpenAI # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_citation) - end + def self.new(file_id:, index:, type: :file_citation); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -117,8 +113,7 @@ module OpenAI params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) .returns(T.attached_class) end - def self.new(end_index:, start_index:, title:, url:, type: :url_citation) - end + def self.new(end_index:, start_index:, title:, url:, type: :url_citation); end sig do override.returns( @@ -131,8 +126,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end class FilePath < OpenAI::Internal::Type::BaseModel @@ -150,12 +144,10 @@ module OpenAI # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_path) - end + def self.new(file_id:, index:, type: :file_path); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -164,8 +156,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 3359bd3e..1b8c7536 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, summary:, status: nil, type: :reasoning) - end + def self.new(id:, summary:, status: nil, type: :reasoning); end sig do override @@ -49,8 +48,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class Summary < OpenAI::Internal::Type::BaseModel # A short summary of the reasoning used by the model when generating the response. @@ -62,12 +60,10 @@ module OpenAI attr_accessor :type sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :summary_text) - end + def self.new(text:, type: :summary_text); end sig { override.returns({text: String, type: Symbol}) } - def to_hash - end + def to_hash; end end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -84,8 +80,7 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi index 1fffadb6..ec2c268f 100644 --- a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi @@ -29,8 +29,7 @@ module OpenAI params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) .returns(T.attached_class) end - def self.new(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") - end + def self.new(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta"); end sig do override @@ -42,8 +41,7 @@ module OpenAI type: Symbol }) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi index 6b9582e9..9f7db249 100644 --- a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi @@ -35,8 +35,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") - end + def self.new(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done"); end sig do override @@ -48,8 +47,7 @@ module OpenAI type: Symbol }) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index d1308648..9afae2e8 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -22,8 +22,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(include: nil, request_options: {}) - end + def self.new(include: nil, request_options: {}); end sig do override @@ -34,8 +33,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index c69a253e..8768bb79 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -18,8 +18,7 @@ module OpenAI INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseStatus::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index ced70ddc..2df75056 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -13,8 +13,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index e448a3e2..16c54b75 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -60,9 +60,7 @@ module OpenAI item_id:, output_index:, type: :"response.output_text.annotation.added" - ) - end - + ); end sig do override .returns( @@ -80,8 +78,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # A citation to a file. module Annotation @@ -102,12 +99,10 @@ module OpenAI # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_citation) - end + def self.new(file_id:, index:, type: :file_citation); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -136,8 +131,7 @@ module OpenAI params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) .returns(T.attached_class) end - def self.new(end_index:, start_index:, title:, url:, type: :url_citation) - end + def self.new(end_index:, start_index:, title:, url:, type: :url_citation); end sig do override.returns( @@ -150,8 +144,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end class FilePath < OpenAI::Internal::Type::BaseModel @@ -169,12 +162,10 @@ module OpenAI # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_path) - end + def self.new(file_id:, index:, type: :file_path); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end sig do @@ -183,8 +174,7 @@ module OpenAI [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index b5d1a866..1e338162 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -59,8 +59,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(format_: nil) - end + def self.new(format_: nil); end sig do override @@ -74,8 +73,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_delta_event.rbi index 7bbc55cd..7880e8f9 100644 --- a/rbi/lib/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_delta_event.rbi @@ -42,8 +42,7 @@ module OpenAI type: Symbol }) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_text_done_event.rbi b/rbi/lib/openai/models/responses/response_text_done_event.rbi index 743e9572..52fa9bef 100644 --- a/rbi/lib/openai/models/responses/response_text_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_done_event.rbi @@ -29,8 +29,7 @@ module OpenAI params(content_index: Integer, item_id: String, output_index: Integer, text: String, type: Symbol) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") - end + def self.new(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done"); end sig do override @@ -42,8 +41,7 @@ module OpenAI type: Symbol }) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 47ce8244..89b2102a 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -73,8 +73,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class InputTokensDetails < OpenAI::Internal::Type::BaseModel # The number of tokens that were retrieved from the cache. @@ -84,12 +83,10 @@ module OpenAI # A detailed breakdown of the input tokens. sig { params(cached_tokens: Integer).returns(T.attached_class) } - def self.new(cached_tokens:) - end + def self.new(cached_tokens:); end sig { override.returns({cached_tokens: Integer}) } - def to_hash - end + def to_hash; end end class OutputTokensDetails < OpenAI::Internal::Type::BaseModel @@ -99,12 +96,10 @@ module OpenAI # A detailed breakdown of the output tokens. sig { params(reasoning_tokens: Integer).returns(T.attached_class) } - def self.new(reasoning_tokens:) - end + def self.new(reasoning_tokens:); end sig { override.returns({reasoning_tokens: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi index 160537d1..fab93564 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a web search call is completed. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.web_search_call.completed") - end + def self.new(item_id:, output_index:, type: :"response.web_search_call.completed"); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi index 428a1aa1..b98665f8 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a web search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.web_search_call.in_progress") - end + def self.new(item_id:, output_index:, type: :"response.web_search_call.in_progress"); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi index 6edd9004..0ad8d67e 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi @@ -18,12 +18,10 @@ module OpenAI # Emitted when a web search call is executing. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.web_search_call.searching") - end + def self.new(item_id:, output_index:, type: :"response.web_search_call.searching"); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index edc55ed0..9aab8bc4 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -15,8 +15,7 @@ module OpenAI [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] ) end - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_function.rbi b/rbi/lib/openai/models/responses/tool_choice_function.rbi index d22afe4e..d0753332 100644 --- a/rbi/lib/openai/models/responses/tool_choice_function.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_function.rbi @@ -14,12 +14,10 @@ module OpenAI # Use this option to force the model to call a specific function. sig { params(name: String, type: Symbol).returns(T.attached_class) } - def self.new(name:, type: :function) - end + def self.new(name:, type: :function); end sig { override.returns({name: String, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index 9eeb2f91..319448bc 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -23,8 +23,7 @@ module OpenAI REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 9969a8c5..82a4138e 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -18,12 +18,10 @@ module OpenAI # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } - def self.new(type:) - end + def self.new(type:); end sig { override.returns({type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol}) } - def to_hash - end + def to_hash; end # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -49,8 +47,7 @@ module OpenAI T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 5558e18f..63a05033 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -41,8 +41,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, search_context_size: nil, user_location: nil) - end + def self.new(type:, search_context_size: nil, user_location: nil); end sig do override @@ -54,8 +53,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The type of the web search tool. One of: # @@ -74,8 +72,7 @@ module OpenAI T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end # High level guidance for the amount of context window space to use for the @@ -93,8 +90,7 @@ module OpenAI HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol]) } - def self.values - end + def self.values; end end class UserLocation < OpenAI::Internal::Type::BaseModel @@ -136,14 +132,12 @@ module OpenAI params(city: String, country: String, region: String, timezone: String, type: Symbol) .returns(T.attached_class) end - def self.new(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) - end + def self.new(city: nil, country: nil, region: nil, timezone: nil, type: :approximate); end sig do override.returns({type: Symbol, city: String, country: String, region: String, timezone: String}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 841c3432..79e16c96 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -6,8 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } - def self.variants - end + def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ResponsesModel::TaggedSymbol) } diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/lib/openai/models/static_file_chunking_strategy.rbi index 8ac37465..1a672f16 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy.rbi @@ -15,12 +15,10 @@ module OpenAI attr_accessor :max_chunk_size_tokens sig { params(chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer).returns(T.attached_class) } - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:) - end + def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index 4976ca0e..a5f3133a 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -17,12 +17,10 @@ module OpenAI params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(static:, type: :static) - end + def self.new(static:, type: :static); end sig { override.returns({static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index b3f1b26c..49f61a35 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -18,12 +18,10 @@ module OpenAI params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(static:, type: :static) - end + def self.new(static:, type: :static); end sig { override.returns({static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index e3342969..bc80b4c6 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -88,8 +88,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # The status of the Upload. module Status @@ -104,8 +103,7 @@ module OpenAI EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Upload::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/upload_cancel_params.rbi b/rbi/lib/openai/models/upload_cancel_params.rbi index f58f71bc..8dd03bf2 100644 --- a/rbi/lib/openai/models/upload_cancel_params.rbi +++ b/rbi/lib/openai/models/upload_cancel_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 3478dd28..f6d148b3 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -26,14 +26,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(part_ids:, md5: nil, request_options: {}) - end + def self.new(part_ids:, md5: nil, request_options: {}); end sig do override.returns({part_ids: T::Array[String], md5: String, request_options: OpenAI::RequestOptions}) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index aacc2ab0..050879be 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -38,8 +38,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(bytes:, filename:, mime_type:, purpose:, request_options: {}) - end + def self.new(bytes:, filename:, mime_type:, purpose:, request_options: {}); end sig do override @@ -53,8 +52,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index 43f97062..f2ed872a 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -18,12 +18,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, request_options: {}) - end + def self.new(data:, request_options: {}); end sig { override.returns({data: T.any(IO, StringIO), request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/lib/openai/models/uploads/upload_part.rbi index 5e5b8819..a6d037e0 100644 --- a/rbi/lib/openai/models/uploads/upload_part.rbi +++ b/rbi/lib/openai/models/uploads/upload_part.rbi @@ -24,12 +24,10 @@ module OpenAI sig do params(id: String, created_at: Integer, upload_id: String, object: Symbol).returns(T.attached_class) end - def self.new(id:, created_at:, upload_id:, object: :"upload.part") - end + def self.new(id:, created_at:, upload_id:, object: :"upload.part"); end sig { override.returns({id: String, created_at: Integer, object: Symbol, upload_id: String}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 0c48914f..9969477d 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -89,9 +89,7 @@ module OpenAI expires_after: nil, expires_at: nil, object: :vector_store - ) - end - + ); end sig do override .returns( @@ -110,8 +108,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FileCounts < OpenAI::Internal::Type::BaseModel # The number of files that were cancelled. @@ -144,8 +141,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(cancelled:, completed:, failed:, in_progress:, total:) - end + def self.new(cancelled:, completed:, failed:, in_progress:, total:); end sig do override @@ -157,8 +153,7 @@ module OpenAI total: Integer }) end - def to_hash - end + def to_hash; end end # The status of the vector store, which can be either `expired`, `in_progress`, or @@ -175,8 +170,7 @@ module OpenAI COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStore::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -191,12 +185,10 @@ module OpenAI # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new(days:, anchor: :last_active_at) - end + def self.new(days:, anchor: :last_active_at); end sig { override.returns({anchor: Symbol, days: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index 36bee1e6..a5ba26e4 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -110,8 +110,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: @@ -125,12 +124,10 @@ module OpenAI # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new(days:, anchor: :last_active_at) - end + def self.new(days:, anchor: :last_active_at); end sig { override.returns({anchor: Symbol, days: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_store_delete_params.rbi b/rbi/lib/openai/models/vector_store_delete_params.rbi index 9f646bad..fa28a443 100644 --- a/rbi/lib/openai/models/vector_store_delete_params.rbi +++ b/rbi/lib/openai/models/vector_store_delete_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_store_deleted.rbi b/rbi/lib/openai/models/vector_store_deleted.rbi index 7143c3a2..8c4ccaf1 100644 --- a/rbi/lib/openai/models/vector_store_deleted.rbi +++ b/rbi/lib/openai/models/vector_store_deleted.rbi @@ -13,12 +13,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"vector_store.deleted") - end + def self.new(id:, deleted:, object: :"vector_store.deleted"); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index bbe5e54f..240ac978 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -52,8 +52,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}) - end + def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}); end sig do override @@ -67,8 +66,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -83,8 +81,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/vector_store_retrieve_params.rbi b/rbi/lib/openai/models/vector_store_retrieve_params.rbi index a8ea5ee3..2f2b9fef 100644 --- a/rbi/lib/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_store_retrieve_params.rbi @@ -14,12 +14,10 @@ module OpenAI ) ).returns(T.attached_class) end - def self.new(request_options: {}) - end + def self.new(request_options: {}); end sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 7019fb58..523d2871 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -83,16 +83,14 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # A query string for a search module Query extend OpenAI::Internal::Type::Union sig { override.returns([String, T::Array[String]]) } - def self.variants - end + def self.variants; end StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) end @@ -102,8 +100,7 @@ module OpenAI extend OpenAI::Internal::Type::Union sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def self.variants - end + def self.variants; end end class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -127,8 +124,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(ranker: nil, score_threshold: nil) - end + def self.new(ranker: nil, score_threshold: nil); end sig do override @@ -136,8 +132,7 @@ module OpenAI {ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, score_threshold: Float} ) end - def to_hash - end + def to_hash; end module Ranker extend OpenAI::Internal::Type::Enum @@ -155,8 +150,7 @@ module OpenAI ) sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index a024e03c..06e2c923 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -37,8 +37,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(attributes:, content:, file_id:, filename:, score:) - end + def self.new(attributes:, content:, file_id:, filename:, score:); end sig do override @@ -52,15 +51,13 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end module Attribute extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end class Content < OpenAI::Internal::Type::BaseModel @@ -76,15 +73,13 @@ module OpenAI params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol) .returns(T.attached_class) end - def self.new(text:, type:) - end + def self.new(text:, type:); end sig do override .returns({text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol}) end - def to_hash - end + def to_hash; end # The type of content. module Type @@ -97,8 +92,7 @@ module OpenAI TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 7f3e0224..81e71bed 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -40,8 +40,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(expires_after: nil, metadata: nil, name: nil, request_options: {}) - end + def self.new(expires_after: nil, metadata: nil, name: nil, request_options: {}); end sig do override @@ -54,8 +53,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: @@ -69,12 +67,10 @@ module OpenAI # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new(days:, anchor: :last_active_at) - end + def self.new(days:, anchor: :last_active_at); end sig { override.returns({anchor: Symbol, days: Integer}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi index 3528e39d..5d9bf50a 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -17,12 +17,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, request_options: {}) - end + def self.new(vector_store_id:, request_options: {}); end sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index 2d21b5fc..fd42f4c3 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -60,8 +60,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) - end + def self.new(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}); end sig do override @@ -77,15 +76,13 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end module Attribute extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 242a405d..93928ade 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -90,8 +90,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter @@ -111,8 +110,7 @@ module OpenAI T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol]) } - def self.values - end + def self.values; end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -129,8 +127,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi index 465c7bdb..c59d8363 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -17,12 +17,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, request_options: {}) - end + def self.new(vector_store_id:, request_options: {}); end sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/lib/openai/models/vector_stores/file_content_params.rbi index 98263089..e241565e 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_params.rbi @@ -17,12 +17,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, request_options: {}) - end + def self.new(vector_store_id:, request_options: {}); end sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_content_response.rbi b/rbi/lib/openai/models/vector_stores/file_content_response.rbi index 9b48e97a..d7f8b8d3 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_response.rbi @@ -19,12 +19,10 @@ module OpenAI attr_writer :type sig { params(text: String, type: String).returns(T.attached_class) } - def self.new(text: nil, type: nil) - end + def self.new(text: nil, type: nil); end sig { override.returns({text: String, type: String}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index dc5f40b1..f63b7e8d 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -60,8 +60,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) - end + def self.new(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}); end sig do override @@ -77,15 +76,13 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end module Attribute extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi index 940154a6..d5920802 100644 --- a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_delete_params.rbi @@ -17,12 +17,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, request_options: {}) - end + def self.new(vector_store_id:, request_options: {}); end sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index a13740f0..9f451eec 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -61,8 +61,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) - end + def self.new(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}); end sig do override @@ -77,8 +76,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter @@ -94,8 +92,7 @@ module OpenAI CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol]) } - def self.values - end + def self.values; end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -111,8 +108,7 @@ module OpenAI DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi index 553291b7..17c67d42 100644 --- a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi @@ -17,12 +17,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, request_options: {}) - end + def self.new(vector_store_id:, request_options: {}); end sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash - end + def to_hash; end end end end diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index eefd1019..9eb12ec9 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -26,8 +26,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, attributes:, request_options: {}) - end + def self.new(vector_store_id:, attributes:, request_options: {}); end sig do override @@ -39,15 +38,13 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end module Attribute extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 81d9bc52..e729c6ee 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -106,9 +106,7 @@ module OpenAI attributes: nil, chunking_strategy: nil, object: :"vector_store.file" - ) - end - + ); end sig do override .returns( @@ -125,8 +123,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class LastError < OpenAI::Internal::Type::BaseModel # One of `server_error` or `rate_limit_exceeded`. @@ -143,8 +140,7 @@ module OpenAI params(code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:) - end + def self.new(code:, message:); end sig do override @@ -152,8 +148,7 @@ module OpenAI {code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, message: String} ) end - def to_hash - end + def to_hash; end # One of `server_error` or `rate_limit_exceeded`. module Code @@ -172,8 +167,7 @@ module OpenAI T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol]) } - def self.values - end + def self.values; end end end @@ -193,16 +187,14 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end module Attribute extend OpenAI::Internal::Type::Union sig { override.returns([String, Float, T::Boolean]) } - def self.variants - end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 6f9eb44c..0ee00156 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -75,8 +75,7 @@ module OpenAI } ) end - def to_hash - end + def to_hash; end class FileCounts < OpenAI::Internal::Type::BaseModel # The number of files that where cancelled. @@ -109,8 +108,7 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(cancelled:, completed:, failed:, in_progress:, total:) - end + def self.new(cancelled:, completed:, failed:, in_progress:, total:); end sig do override @@ -122,8 +120,7 @@ module OpenAI total: Integer }) end - def to_hash - end + def to_hash; end end # The status of the vector store files batch, which can be either `in_progress`, @@ -142,8 +139,7 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol]) } - def self.values - end + def self.values; end end end end diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi index 702d956c..9642c9d5 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -14,12 +14,10 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"vector_store.file.deleted") - end + def self.new(id:, deleted:, object: :"vector_store.file.deleted"); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash - end + def to_hash; end end end diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 017b80bf..3f0ab9b4 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -9,8 +9,7 @@ module OpenAI class RequestOptions < OpenAI::Internal::Type::BaseModel # @api private sig { params(opts: T.any(T.self_type, T::Hash[Symbol, T.anything])).void } - def self.validate!(opts) - end + def self.validate!(opts); end # Idempotency key to send with request and all associated retries. Will only be # sent for write requests. @@ -42,7 +41,6 @@ module OpenAI # Returns a new instance of RequestOptions. sig { params(values: OpenAI::Internal::AnyHash).returns(T.attached_class) } - def self.new(values = {}) - end + def self.new(values = {}); end end end diff --git a/rbi/lib/openai/resources/audio.rbi b/rbi/lib/openai/resources/audio.rbi index 1747c22c..53749c68 100644 --- a/rbi/lib/openai/resources/audio.rbi +++ b/rbi/lib/openai/resources/audio.rbi @@ -14,8 +14,7 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index 43967cf5..f51b153a 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -38,13 +38,10 @@ module OpenAI # the default. speed: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index d08bf3a7..9fe4f4a3 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -66,9 +66,7 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming # counterpart. # @@ -88,10 +86,10 @@ module OpenAI ) .returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Audio::TranscriptionTextDeltaEvent, - OpenAI::Models::Audio::TranscriptionTextDoneEvent - ) + T.any( + OpenAI::Models::Audio::TranscriptionTextDeltaEvent, + OpenAI::Models::Audio::TranscriptionTextDoneEvent + ) ] ) end @@ -138,13 +136,10 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 448c96b3..4ee7bcc0 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -38,13 +38,10 @@ module OpenAI # automatically increase the temperature until certain thresholds are hit. temperature: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index f723b248..ec20cb07 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -41,9 +41,7 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ) - end - + ); end # Retrieves a batch. sig do params( @@ -56,9 +54,7 @@ module OpenAI # The ID of the batch to retrieve. batch_id, request_options: {} - ) - end - + ); end # List your organization's batches. sig do params( @@ -78,9 +74,7 @@ module OpenAI # 100, and the default is 20. limit: nil, request_options: {} - ) - end - + ); end # Cancels an in-progress batch. The batch will be in status `cancelling` for up to # 10 minutes, before changing to `cancelled`, where it will have partial results # (if any) available in the output file. @@ -95,13 +89,10 @@ module OpenAI # The ID of the batch to cancel. batch_id, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/beta.rbi b/rbi/lib/openai/resources/beta.rbi index c53baf2b..c0be4403 100644 --- a/rbi/lib/openai/resources/beta.rbi +++ b/rbi/lib/openai/resources/beta.rbi @@ -11,8 +11,7 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index bbbe925b..277c81d2 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -25,12 +25,12 @@ module OpenAI temperature: T.nilable(Float), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], top_p: T.nilable(Float), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) @@ -106,9 +106,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} - ) - end - + ); end # Retrieves an assistant. sig do params( @@ -121,9 +119,7 @@ module OpenAI # The ID of the assistant to retrieve. assistant_id, request_options: {} - ) - end - + ); end # Modifies an assistant. sig do params( @@ -146,12 +142,12 @@ module OpenAI temperature: T.nilable(Float), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ], top_p: T.nilable(Float), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) @@ -229,9 +225,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} - ) - end - + ); end # Returns a list of assistants. sig do params( @@ -261,9 +255,7 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end # Delete an assistant. sig do params( @@ -276,13 +268,10 @@ module OpenAI # The ID of the assistant to delete. assistant_id, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index a4d03027..07a8d939 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -37,9 +37,7 @@ module OpenAI # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} - ) - end - + ); end # Retrieves a thread. sig do params( @@ -52,9 +50,7 @@ module OpenAI # The ID of the thread to retrieve. thread_id, request_options: {} - ) - end - + ); end # Modifies a thread. sig do params( @@ -81,9 +77,7 @@ module OpenAI # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} - ) - end - + ); end # Delete a thread. sig do params( @@ -96,9 +90,7 @@ module OpenAI # The ID of the thread to delete. thread_id, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # # Create a thread and run it in one request. @@ -132,12 +124,12 @@ module OpenAI tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -242,9 +234,7 @@ module OpenAI # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming # counterpart. # @@ -279,12 +269,12 @@ module OpenAI tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -296,32 +286,32 @@ module OpenAI ) .returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) + T.any( + OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent + ) ] ) end @@ -418,13 +408,10 @@ module OpenAI # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index 8585935d..ac987e13 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -12,12 +12,12 @@ module OpenAI content: T.any( String, T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) + T.any( + OpenAI::Models::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::Threads::ImageURLContentBlock, + OpenAI::Models::Beta::Threads::TextContentBlockParam + ) ] ), role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, @@ -52,9 +52,7 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ) - end - + ); end # Retrieve a message. sig do params( @@ -71,9 +69,7 @@ module OpenAI # to which this message belongs. thread_id:, request_options: {} - ) - end - + ); end # Modifies a message. sig do params( @@ -97,9 +93,7 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ) - end - + ); end # Returns a list of messages for a given thread. sig do params( @@ -136,9 +130,7 @@ module OpenAI # Filter messages by the run ID that generated them. run_id: nil, request_options: {} - ) - end - + ); end # Deletes a message. sig do params( @@ -154,13 +146,10 @@ module OpenAI # The ID of the thread to which this message belongs. thread_id:, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 69460b3d..09f55c13 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -47,12 +47,12 @@ module OpenAI ), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -177,9 +177,7 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming # counterpart. # @@ -219,12 +217,12 @@ module OpenAI ), tools: T.nilable( T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) + T.any( + OpenAI::Models::Beta::CodeInterpreterTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Beta::FileSearchTool, + OpenAI::Models::Beta::FunctionTool + ) ] ), top_p: T.nilable(Float), @@ -236,32 +234,32 @@ module OpenAI ) .returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) + T.any( + OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent + ) ] ) end @@ -378,9 +376,7 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # Retrieves a run. sig do params( @@ -397,9 +393,7 @@ module OpenAI # that was run. thread_id:, request_options: {} - ) - end - + ); end # Modifies a run. sig do params( @@ -424,9 +418,7 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ) - end - + ); end # Returns a list of runs belonging to a thread. sig do params( @@ -459,9 +451,7 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end # Cancels a run that is `in_progress`. sig do params( @@ -477,9 +467,7 @@ module OpenAI # The ID of the thread to which this run belongs. thread_id:, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # @@ -511,9 +499,7 @@ module OpenAI # non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for # non-streaming counterpart. # @@ -531,32 +517,32 @@ module OpenAI ) .returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) + T.any( + OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent + ) ] ) end @@ -574,13 +560,10 @@ module OpenAI # non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 81727780..86935b35 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -34,9 +34,7 @@ module OpenAI # for more information. include: nil, request_options: {} - ) - end - + ); end # Returns a list of run steps belonging to a run. sig do params( @@ -82,13 +80,10 @@ module OpenAI # ascending order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/chat.rbi b/rbi/lib/openai/resources/chat.rbi index 45c92d55..94e9f4ce 100644 --- a/rbi/lib/openai/resources/chat.rbi +++ b/rbi/lib/openai/resources/chat.rbi @@ -8,8 +8,7 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 20a9a8b0..34c8a36d 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -29,15 +29,15 @@ module OpenAI sig do params( messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) + T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Internal::AnyHash, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), @@ -274,9 +274,7 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. # # **Starting a new project?** We recommend trying @@ -299,15 +297,15 @@ module OpenAI sig do params( messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) + T.any( + OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Internal::AnyHash, + OpenAI::Models::Chat::ChatCompletionSystemMessageParam, + OpenAI::Models::Chat::ChatCompletionUserMessageParam, + OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Models::Chat::ChatCompletionToolMessageParam, + OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + ) ], model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), @@ -544,9 +542,7 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # Get a stored chat completion. Only Chat Completions that have been created with # the `store` parameter set to `true` will be returned. sig do @@ -560,9 +556,7 @@ module OpenAI # The ID of the chat completion to retrieve. completion_id, request_options: {} - ) - end - + ); end # Modify a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be modified. Currently, the only # supported modification is to update the `metadata` field. @@ -585,9 +579,7 @@ module OpenAI # a maximum length of 512 characters. metadata:, request_options: {} - ) - end - + ); end # List stored Chat Completions. Only Chat Completions that have been stored with # the `store` parameter set to `true` will be returned. sig do @@ -616,9 +608,7 @@ module OpenAI # `desc` for descending order. Defaults to `asc`. order: nil, request_options: {} - ) - end - + ); end # Delete a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be deleted. sig do @@ -632,13 +622,10 @@ module OpenAI # The ID of the chat completion to delete. completion_id, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index 440c076c..bf8cde9f 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -28,13 +28,10 @@ module OpenAI # for descending order. Defaults to `asc`. order: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index 2054b2b3..a91aba7f 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -9,14 +9,7 @@ module OpenAI sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -145,23 +138,14 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. # # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -290,13 +274,10 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index 182cc4d0..f6e4ac51 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -42,13 +42,10 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 322f5e32..56bf2af9 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -41,9 +41,7 @@ module OpenAI # Flexible file type for any purpose - `evals`: Used for eval data sets purpose:, request_options: {} - ) - end - + ); end # Returns information about a specific file. sig do params( @@ -56,9 +54,7 @@ module OpenAI # The ID of the file to use for this request. file_id, request_options: {} - ) - end - + ); end # Returns a list of files. sig do params( @@ -85,9 +81,7 @@ module OpenAI # Only return files with the given purpose. purpose: nil, request_options: {} - ) - end - + ); end # Delete a file. sig do params( @@ -100,9 +94,7 @@ module OpenAI # The ID of the file to use for this request. file_id, request_options: {} - ) - end - + ); end # Returns the contents of the specified file. sig do params( @@ -115,13 +107,10 @@ module OpenAI # The ID of the file to use for this request. file_id, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/fine_tuning.rbi b/rbi/lib/openai/resources/fine_tuning.rbi index d6d0b987..fba94c1f 100644 --- a/rbi/lib/openai/resources/fine_tuning.rbi +++ b/rbi/lib/openai/resources/fine_tuning.rbi @@ -8,8 +8,7 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 2370a56a..90b307a0 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -91,9 +91,7 @@ module OpenAI # for more details. validation_file: nil, request_options: {} - ) - end - + ); end # Get info about a fine-tuning job. # # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) @@ -108,9 +106,7 @@ module OpenAI # The ID of the fine-tuning job. fine_tuning_job_id, request_options: {} - ) - end - + ); end # List your organization's fine-tuning jobs sig do params( @@ -130,9 +126,7 @@ module OpenAI # Alternatively, set `metadata=null` to indicate no metadata. metadata: nil, request_options: {} - ) - end - + ); end # Immediately cancel a fine-tune job. sig do params( @@ -145,9 +139,7 @@ module OpenAI # The ID of the fine-tuning job to cancel. fine_tuning_job_id, request_options: {} - ) - end - + ); end # Get status updates for a fine-tuning job. sig do params( @@ -166,13 +158,10 @@ module OpenAI # Number of events to retrieve. limit: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi index d165ca24..dfc6cee6 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -23,13 +23,10 @@ module OpenAI # Number of checkpoints to retrieve. limit: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 515ec04c..7b0c9a45 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -38,9 +38,7 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ) - end - + ); end # Creates an edited or extended image given an original image and a prompt. sig do params( @@ -84,9 +82,7 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ) - end - + ); end # Creates an image given a prompt. sig do params( @@ -133,13 +129,10 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 8a21caa0..58a63411 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -16,17 +16,14 @@ module OpenAI # The ID of the model to use for this request model, request_options: {} - ) - end - + ); end # Lists the currently available models, and provides basic information about each # one such as the owner and availability. sig do params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))) .returns(OpenAI::Internal::Page[OpenAI::Models::Model]) end - def list(request_options: {}) - end + def list(request_options: {}); end # Delete a fine-tuned model. You must have the Owner role in your organization to # delete a model. @@ -41,13 +38,10 @@ module OpenAI # The model to delete model, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 8b4b2f36..9e47411a 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -11,11 +11,11 @@ module OpenAI String, T::Array[String], T::Array[ - T.any( - OpenAI::Models::ModerationImageURLInput, - OpenAI::Internal::AnyHash, - OpenAI::Models::ModerationTextInput - ) + T.any( + OpenAI::Models::ModerationImageURLInput, + OpenAI::Internal::AnyHash, + OpenAI::Models::ModerationTextInput + ) ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), @@ -33,13 +33,10 @@ module OpenAI # [here](https://platform.openai.com/docs/models#moderation). model: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 4ccb309d..8fb89354 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -24,20 +24,20 @@ module OpenAI input: T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) ] ), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), @@ -58,13 +58,13 @@ module OpenAI OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ], top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), @@ -183,9 +183,7 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ) - end - + ); end # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. # # Creates a model response. Provide @@ -204,20 +202,20 @@ module OpenAI input: T.any( String, T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) + T.any( + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::ResponseInputItem::Message, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCall, + OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Models::Responses::ResponseReasoningItem, + OpenAI::Models::Responses::ResponseInputItem::ItemReference + ) ] ), model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), @@ -238,13 +236,13 @@ module OpenAI OpenAI::Models::Responses::ToolChoiceFunction ), tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) + T.any( + OpenAI::Models::Responses::FileSearchTool, + OpenAI::Internal::AnyHash, + OpenAI::Models::Responses::FunctionTool, + OpenAI::Models::Responses::ComputerTool, + OpenAI::Models::Responses::WebSearchTool + ) ], top_p: T.nilable(Float), truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), @@ -254,40 +252,40 @@ module OpenAI ) .returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Responses::ResponseAudioDeltaEvent, - OpenAI::Models::Responses::ResponseAudioDoneEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Models::Responses::ResponseCompletedEvent, - OpenAI::Models::Responses::ResponseContentPartAddedEvent, - OpenAI::Models::Responses::ResponseContentPartDoneEvent, - OpenAI::Models::Responses::ResponseCreatedEvent, - OpenAI::Models::Responses::ResponseErrorEvent, - OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Models::Responses::ResponseInProgressEvent, - OpenAI::Models::Responses::ResponseFailedEvent, - OpenAI::Models::Responses::ResponseIncompleteEvent, - OpenAI::Models::Responses::ResponseOutputItemAddedEvent, - OpenAI::Models::Responses::ResponseOutputItemDoneEvent, - OpenAI::Models::Responses::ResponseRefusalDeltaEvent, - OpenAI::Models::Responses::ResponseRefusalDoneEvent, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Models::Responses::ResponseTextDeltaEvent, - OpenAI::Models::Responses::ResponseTextDoneEvent, - OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - ) + T.any( + OpenAI::Models::Responses::ResponseAudioDeltaEvent, + OpenAI::Models::Responses::ResponseAudioDoneEvent, + OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Models::Responses::ResponseCompletedEvent, + OpenAI::Models::Responses::ResponseContentPartAddedEvent, + OpenAI::Models::Responses::ResponseContentPartDoneEvent, + OpenAI::Models::Responses::ResponseCreatedEvent, + OpenAI::Models::Responses::ResponseErrorEvent, + OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Models::Responses::ResponseInProgressEvent, + OpenAI::Models::Responses::ResponseFailedEvent, + OpenAI::Models::Responses::ResponseIncompleteEvent, + OpenAI::Models::Responses::ResponseOutputItemAddedEvent, + OpenAI::Models::Responses::ResponseOutputItemDoneEvent, + OpenAI::Models::Responses::ResponseRefusalDeltaEvent, + OpenAI::Models::Responses::ResponseRefusalDoneEvent, + OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Models::Responses::ResponseTextDeltaEvent, + OpenAI::Models::Responses::ResponseTextDoneEvent, + OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent + ) ] ) end @@ -400,9 +398,7 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ) - end - + ); end # Retrieves a model response with the given ID. sig do params( @@ -419,9 +415,7 @@ module OpenAI # Response creation above for more information. include: nil, request_options: {} - ) - end - + ); end # Deletes a model response with the given ID. sig do params( @@ -434,13 +428,10 @@ module OpenAI # The ID of the response to delete. response_id, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 5c6dfbeb..26668496 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -17,16 +17,16 @@ module OpenAI ) .returns( OpenAI::Internal::CursorPage[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) + T.any( + OpenAI::Models::Responses::ResponseInputMessageItem, + OpenAI::Models::Responses::ResponseOutputMessage, + OpenAI::Models::Responses::ResponseFileSearchToolCall, + OpenAI::Models::Responses::ResponseComputerToolCall, + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Models::Responses::ResponseFunctionWebSearch, + OpenAI::Models::Responses::ResponseFunctionToolCallItem, + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + ) ] ) end @@ -49,13 +49,10 @@ module OpenAI # - `desc`: Return the input items in descending order. order: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index ed222226..bdc55f98 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -51,9 +51,7 @@ module OpenAI # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). purpose:, request_options: {} - ) - end - + ); end # Cancels the Upload. No Parts may be added after an Upload is cancelled. sig do params( @@ -66,9 +64,7 @@ module OpenAI # The ID of the Upload. upload_id, request_options: {} - ) - end - + ); end # Completes the # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). # @@ -100,13 +96,10 @@ module OpenAI # matches what you expect. md5: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 7aec6363..47bb936f 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -29,13 +29,10 @@ module OpenAI # The chunk of bytes for this Part. data:, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index e64c7552..b0868a1f 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -45,9 +45,7 @@ module OpenAI # The name of the vector store. name: nil, request_options: {} - ) - end - + ); end # Retrieves a vector store. sig do params( @@ -60,9 +58,7 @@ module OpenAI # The ID of the vector store to retrieve. vector_store_id, request_options: {} - ) - end - + ); end # Modifies a vector store. sig do params( @@ -89,9 +85,7 @@ module OpenAI # The name of the vector store. name: nil, request_options: {} - ) - end - + ); end # Returns a list of vector stores. sig do params( @@ -121,9 +115,7 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end # Delete a vector store. sig do params( @@ -136,9 +128,7 @@ module OpenAI # The ID of the vector store to delete. vector_store_id, request_options: {} - ) - end - + ); end # Search a vector store for relevant chunks based on a query and file attributes # filter. sig do @@ -168,13 +158,10 @@ module OpenAI # Whether to rewrite the natural language query for vector search. rewrite_query: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index 5bb347bf..ae4348ba 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -36,9 +36,7 @@ module OpenAI # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} - ) - end - + ); end # Retrieves a vector store file batch. sig do params( @@ -54,9 +52,7 @@ module OpenAI # The ID of the vector store that the file batch belongs to. vector_store_id:, request_options: {} - ) - end - + ); end # Cancel a vector store file batch. This attempts to cancel the processing of # files in this batch as soon as possible. sig do @@ -73,9 +69,7 @@ module OpenAI # The ID of the vector store that the file batch belongs to. vector_store_id:, request_options: {} - ) - end - + ); end # Returns a list of vector store files in a batch. sig do params( @@ -115,13 +109,10 @@ module OpenAI # ascending order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 145d752b..22550fd4 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -38,9 +38,7 @@ module OpenAI # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} - ) - end - + ); end # Retrieves a vector store file. sig do params( @@ -56,9 +54,7 @@ module OpenAI # The ID of the vector store that the file belongs to. vector_store_id:, request_options: {} - ) - end - + ); end # Update attributes on a vector store file. sig do params( @@ -81,9 +77,7 @@ module OpenAI # maximum length of 512 characters, booleans, or numbers. attributes:, request_options: {} - ) - end - + ); end # Returns a list of vector store files. sig do params( @@ -119,9 +113,7 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end # Delete a vector store file. This will remove the file from the vector store but # the file itself will not be deleted. To delete the file, use the # [delete file](https://platform.openai.com/docs/api-reference/files/delete) @@ -140,9 +132,7 @@ module OpenAI # The ID of the vector store that the file belongs to. vector_store_id:, request_options: {} - ) - end - + ); end # Retrieve the parsed contents of a vector store file. sig do params( @@ -158,13 +148,10 @@ module OpenAI # The ID of the vector store. vector_store_id:, request_options: {} - ) - end - + ); end # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:) - end + def self.new(client:); end end end end From 9448a5e1bc25481b28a06f65d3bcb2ad587a7283 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 5 Apr 2025 23:46:12 +0000 Subject: [PATCH 107/295] chore: update yard comment formatting (#109) --- .solargraph.yml | 1 + lib/openai/internal/transport/base_client.rb | 2 +- .../transport/pooled_net_requester.rb | 2 +- lib/openai/internal/type/base_model.rb | 22 +- lib/openai/internal/type/converter.rb | 40 +- lib/openai/internal/type/enum.rb | 16 +- lib/openai/internal/util.rb | 2 +- .../models/audio/speech_create_params.rb | 24 +- lib/openai/models/audio/transcription.rb | 6 +- .../audio/transcription_create_params.rb | 48 +- .../audio/transcription_create_response.rb | 2 +- .../models/audio/transcription_segment.rb | 6 +- .../audio/transcription_stream_event.rb | 6 +- .../audio/transcription_text_delta_event.rb | 10 +- .../audio/transcription_text_done_event.rb | 12 +- .../models/audio/transcription_verbose.rb | 2 +- .../models/audio/translation_create_params.rb | 24 +- lib/openai/models/audio_response_format.rb | 4 +- .../auto_file_chunking_strategy_param.rb | 2 +- lib/openai/models/batch.rb | 8 +- lib/openai/models/batch_create_params.rb | 36 +- lib/openai/models/batch_list_params.rb | 8 +- lib/openai/models/beta/assistant.rb | 92 ++-- .../models/beta/assistant_create_params.rb | 130 ++--- .../models/beta/assistant_list_params.rb | 18 +- .../beta/assistant_response_format_option.rb | 32 +- .../models/beta/assistant_stream_event.rb | 140 ++--- .../models/beta/assistant_tool_choice.rb | 2 +- .../beta/assistant_tool_choice_option.rb | 18 +- .../models/beta/assistant_update_params.rb | 104 ++-- lib/openai/models/beta/file_search_tool.rb | 34 +- .../models/beta/message_stream_event.rb | 34 +- .../models/beta/run_step_stream_event.rb | 34 +- lib/openai/models/beta/run_stream_event.rb | 42 +- lib/openai/models/beta/thread.rb | 32 +- .../beta/thread_create_and_run_params.rb | 230 ++++---- .../models/beta/thread_create_params.rb | 84 +-- lib/openai/models/beta/thread_stream_event.rb | 6 +- .../models/beta/thread_update_params.rb | 30 +- lib/openai/models/beta/threads/annotation.rb | 4 +- .../models/beta/threads/annotation_delta.rb | 4 +- .../beta/threads/file_citation_annotation.rb | 4 +- .../threads/file_citation_delta_annotation.rb | 4 +- .../beta/threads/file_path_annotation.rb | 2 +- .../threads/file_path_delta_annotation.rb | 2 +- lib/openai/models/beta/threads/image_file.rb | 8 +- .../beta/threads/image_file_content_block.rb | 2 +- .../models/beta/threads/image_file_delta.rb | 8 +- .../beta/threads/image_file_delta_block.rb | 2 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 6 +- lib/openai/models/beta/threads/message.rb | 24 +- .../models/beta/threads/message_content.rb | 2 +- .../beta/threads/message_content_delta.rb | 2 +- .../threads/message_content_part_param.rb | 2 +- .../beta/threads/message_create_params.rb | 24 +- .../beta/threads/message_delta_event.rb | 2 +- .../beta/threads/message_list_params.rb | 18 +- .../beta/threads/message_update_params.rb | 8 +- .../required_action_function_tool_call.rb | 8 +- lib/openai/models/beta/threads/run.rb | 118 ++-- .../models/beta/threads/run_create_params.rb | 168 +++--- .../models/beta/threads/run_list_params.rb | 18 +- lib/openai/models/beta/threads/run_status.rb | 4 +- .../threads/run_submit_tool_outputs_params.rb | 2 +- .../models/beta/threads/run_update_params.rb | 8 +- .../runs/code_interpreter_output_image.rb | 2 +- .../runs/code_interpreter_tool_call.rb | 8 +- .../runs/code_interpreter_tool_call_delta.rb | 6 +- .../threads/runs/file_search_tool_call.rb | 12 +- .../runs/file_search_tool_call_delta.rb | 2 +- .../beta/threads/runs/function_tool_call.rb | 6 +- .../threads/runs/function_tool_call_delta.rb | 6 +- .../models/beta/threads/runs/run_step.rb | 30 +- .../beta/threads/runs/run_step_delta_event.rb | 2 +- .../beta/threads/runs/step_list_params.rb | 28 +- .../beta/threads/runs/step_retrieve_params.rb | 10 +- .../threads/runs/tool_call_delta_object.rb | 4 +- .../threads/runs/tool_calls_step_details.rb | 4 +- lib/openai/models/chat/chat_completion.rb | 28 +- ...chat_completion_assistant_message_param.rb | 22 +- .../models/chat/chat_completion_audio.rb | 8 +- .../chat/chat_completion_audio_param.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 60 +-- .../chat/chat_completion_content_part.rb | 6 +- .../chat_completion_content_part_image.rb | 4 +- .../chat/chat_completion_content_part_text.rb | 2 +- ...chat_completion_developer_message_param.rb | 6 +- .../chat_completion_function_call_option.rb | 2 +- .../models/chat/chat_completion_message.rb | 16 +- .../chat/chat_completion_message_param.rb | 4 +- .../chat/chat_completion_message_tool_call.rb | 6 +- .../chat/chat_completion_named_tool_choice.rb | 2 +- .../chat_completion_prediction_content.rb | 12 +- .../chat/chat_completion_stream_options.rb | 10 +- .../chat_completion_system_message_param.rb | 6 +- .../chat/chat_completion_token_logprob.rb | 24 +- .../chat_completion_tool_choice_option.rb | 18 +- .../chat_completion_user_message_param.rb | 4 +- .../models/chat/completion_create_params.rb | 304 +++++------ .../models/chat/completion_list_params.rb | 6 +- .../models/chat/completion_update_params.rb | 8 +- .../chat/completions/message_list_params.rb | 4 +- lib/openai/models/comparison_filter.rb | 30 +- lib/openai/models/completion.rb | 6 +- lib/openai/models/completion_choice.rb | 12 +- lib/openai/models/completion_create_params.rb | 128 ++--- lib/openai/models/completion_usage.rb | 8 +- lib/openai/models/compound_filter.rb | 4 +- lib/openai/models/embedding.rb | 4 +- lib/openai/models/embedding_create_params.rb | 54 +- .../models/file_chunking_strategy_param.rb | 2 +- lib/openai/models/file_create_params.rb | 6 +- lib/openai/models/file_list_params.rb | 12 +- lib/openai/models/file_object.rb | 14 +- lib/openai/models/file_purpose.rb | 6 +- .../models/fine_tuning/fine_tuning_job.rb | 82 +-- .../fine_tuning_job_wandb_integration.rb | 16 +- ...ine_tuning_job_wandb_integration_object.rb | 6 +- .../models/fine_tuning/job_create_params.rb | 130 ++--- .../models/fine_tuning/job_list_params.rb | 2 +- .../jobs/fine_tuning_job_checkpoint.rb | 2 +- lib/openai/models/function_definition.rb | 22 +- lib/openai/models/image.rb | 4 +- .../models/image_create_variation_params.rb | 24 +- lib/openai/models/image_edit_params.rb | 28 +- lib/openai/models/image_generate_params.rb | 44 +- lib/openai/models/moderation.rb | 34 +- lib/openai/models/moderation_create_params.rb | 16 +- .../other_file_chunking_strategy_object.rb | 4 +- lib/openai/models/reasoning.rb | 24 +- lib/openai/models/reasoning_effort.rb | 8 +- .../models/response_format_json_object.rb | 4 +- .../models/response_format_json_schema.rb | 18 +- lib/openai/models/responses/computer_tool.rb | 2 +- .../models/responses/easy_input_message.rb | 16 +- .../models/responses/file_search_tool.rb | 10 +- lib/openai/models/responses/function_tool.rb | 6 +- .../responses/input_item_list_params.rb | 12 +- lib/openai/models/responses/response.rb | 122 ++--- .../responses/response_computer_tool_call.rb | 46 +- ...response_computer_tool_call_output_item.rb | 6 +- ...se_computer_tool_call_output_screenshot.rb | 2 +- .../responses/response_create_params.rb | 144 ++--- .../response_file_search_tool_call.rb | 16 +- .../responses/response_format_text_config.rb | 16 +- ...response_format_text_json_schema_config.rb | 18 +- .../responses/response_function_tool_call.rb | 8 +- .../response_function_tool_call_item.rb | 4 +- ...response_function_tool_call_output_item.rb | 4 +- .../responses/response_function_web_search.rb | 4 +- .../models/responses/response_includable.rb | 12 +- .../models/responses/response_input_image.rb | 8 +- .../models/responses/response_input_item.rb | 30 +- .../responses/response_input_message_item.rb | 6 +- .../responses/response_output_message.rb | 4 +- .../responses/response_reasoning_item.rb | 6 +- .../responses/response_retrieve_params.rb | 2 +- .../models/responses/response_status.rb | 2 +- .../models/responses/response_text_config.rb | 22 +- lib/openai/models/responses/response_usage.rb | 4 +- lib/openai/models/responses/tool.rb | 4 +- .../models/responses/tool_choice_options.rb | 8 +- .../models/responses/tool_choice_types.rb | 22 +- .../models/responses/web_search_tool.rb | 20 +- .../models/static_file_chunking_strategy.rb | 4 +- lib/openai/models/upload.rb | 4 +- lib/openai/models/upload_complete_params.rb | 2 +- lib/openai/models/upload_create_params.rb | 8 +- lib/openai/models/vector_store.rb | 20 +- .../models/vector_store_create_params.rb | 16 +- lib/openai/models/vector_store_list_params.rb | 18 +- .../models/vector_store_search_params.rb | 2 +- .../models/vector_store_search_response.rb | 8 +- .../models/vector_store_update_params.rb | 10 +- .../vector_stores/file_batch_create_params.rb | 14 +- .../file_batch_list_files_params.rb | 18 +- .../vector_stores/file_create_params.rb | 14 +- .../models/vector_stores/file_list_params.rb | 18 +- .../vector_stores/file_update_params.rb | 8 +- .../models/vector_stores/vector_store_file.rb | 28 +- .../vector_stores/vector_store_file_batch.rb | 12 +- lib/openai/request_options.rb | 14 +- lib/openai/resources/audio/transcriptions.rb | 8 +- lib/openai/resources/batches.rb | 4 +- lib/openai/resources/beta/threads.rb | 6 +- lib/openai/resources/beta/threads/runs.rb | 28 +- lib/openai/resources/chat/completions.rb | 66 +-- .../resources/chat/completions/messages.rb | 2 +- lib/openai/resources/completions.rb | 4 +- lib/openai/resources/files.rb | 40 +- lib/openai/resources/fine_tuning/jobs.rb | 10 +- lib/openai/resources/models.rb | 6 +- lib/openai/resources/moderations.rb | 2 +- lib/openai/resources/responses.rb | 44 +- lib/openai/resources/uploads.rb | 48 +- lib/openai/resources/uploads/parts.rb | 16 +- lib/openai/resources/vector_stores.rb | 2 +- .../resources/vector_stores/file_batches.rb | 2 +- lib/openai/resources/vector_stores/files.rb | 10 +- rbi/lib/openai/internal.rbi | 2 +- .../openai/internal/transport/base_client.rbi | 2 +- .../transport/pooled_net_requester.rbi | 2 +- rbi/lib/openai/internal/type/base_model.rbi | 32 +- rbi/lib/openai/internal/type/converter.rbi | 40 +- rbi/lib/openai/internal/type/enum.rbi | 16 +- rbi/lib/openai/internal/util.rbi | 2 +- .../models/audio/speech_create_params.rbi | 24 +- rbi/lib/openai/models/audio/transcription.rbi | 6 +- .../audio/transcription_create_params.rbi | 48 +- .../audio/transcription_create_response.rbi | 2 +- .../models/audio/transcription_segment.rbi | 6 +- .../audio/transcription_stream_event.rbi | 6 +- .../audio/transcription_text_delta_event.rbi | 10 +- .../audio/transcription_text_done_event.rbi | 12 +- .../models/audio/transcription_verbose.rbi | 2 +- .../audio/translation_create_params.rbi | 24 +- .../openai/models/audio_response_format.rbi | 4 +- .../auto_file_chunking_strategy_param.rbi | 2 +- rbi/lib/openai/models/batch.rbi | 8 +- rbi/lib/openai/models/batch_create_params.rbi | 36 +- rbi/lib/openai/models/batch_list_params.rbi | 8 +- rbi/lib/openai/models/beta/assistant.rbi | 86 +-- .../models/beta/assistant_create_params.rbi | 130 ++--- .../models/beta/assistant_list_params.rbi | 18 +- .../beta/assistant_response_format_option.rbi | 32 +- .../models/beta/assistant_stream_event.rbi | 140 ++--- .../models/beta/assistant_tool_choice.rbi | 2 +- .../beta/assistant_tool_choice_option.rbi | 18 +- .../models/beta/assistant_update_params.rbi | 104 ++-- .../openai/models/beta/file_search_tool.rbi | 34 +- .../models/beta/message_stream_event.rbi | 34 +- .../models/beta/run_step_stream_event.rbi | 34 +- .../openai/models/beta/run_stream_event.rbi | 42 +- rbi/lib/openai/models/beta/thread.rbi | 32 +- .../beta/thread_create_and_run_params.rbi | 230 ++++---- .../models/beta/thread_create_params.rbi | 84 +-- .../models/beta/thread_stream_event.rbi | 6 +- .../models/beta/thread_update_params.rbi | 30 +- .../openai/models/beta/threads/annotation.rbi | 4 +- .../models/beta/threads/annotation_delta.rbi | 4 +- .../beta/threads/file_citation_annotation.rbi | 4 +- .../file_citation_delta_annotation.rbi | 4 +- .../beta/threads/file_path_annotation.rbi | 2 +- .../threads/file_path_delta_annotation.rbi | 2 +- .../openai/models/beta/threads/image_file.rbi | 8 +- .../beta/threads/image_file_content_block.rbi | 2 +- .../models/beta/threads/image_file_delta.rbi | 8 +- .../beta/threads/image_file_delta_block.rbi | 2 +- .../openai/models/beta/threads/image_url.rbi | 6 +- .../models/beta/threads/image_url_delta.rbi | 6 +- .../openai/models/beta/threads/message.rbi | 24 +- .../models/beta/threads/message_content.rbi | 2 +- .../beta/threads/message_content_delta.rbi | 2 +- .../threads/message_content_part_param.rbi | 2 +- .../beta/threads/message_create_params.rbi | 24 +- .../beta/threads/message_delta_event.rbi | 2 +- .../beta/threads/message_list_params.rbi | 18 +- .../beta/threads/message_update_params.rbi | 8 +- .../required_action_function_tool_call.rbi | 8 +- rbi/lib/openai/models/beta/threads/run.rbi | 118 ++-- .../models/beta/threads/run_create_params.rbi | 168 +++--- .../models/beta/threads/run_list_params.rbi | 18 +- .../openai/models/beta/threads/run_status.rbi | 4 +- .../run_submit_tool_outputs_params.rbi | 2 +- .../models/beta/threads/run_update_params.rbi | 8 +- .../runs/code_interpreter_output_image.rbi | 2 +- .../runs/code_interpreter_tool_call.rbi | 8 +- .../runs/code_interpreter_tool_call_delta.rbi | 6 +- .../threads/runs/file_search_tool_call.rbi | 12 +- .../runs/file_search_tool_call_delta.rbi | 2 +- .../beta/threads/runs/function_tool_call.rbi | 6 +- .../threads/runs/function_tool_call_delta.rbi | 6 +- .../models/beta/threads/runs/run_step.rbi | 30 +- .../threads/runs/run_step_delta_event.rbi | 2 +- .../beta/threads/runs/step_list_params.rbi | 28 +- .../threads/runs/step_retrieve_params.rbi | 10 +- .../threads/runs/tool_call_delta_object.rbi | 4 +- .../threads/runs/tool_calls_step_details.rbi | 4 +- .../openai/models/chat/chat_completion.rbi | 28 +- ...hat_completion_assistant_message_param.rbi | 22 +- .../models/chat/chat_completion_audio.rbi | 8 +- .../chat/chat_completion_audio_param.rbi | 12 +- .../models/chat/chat_completion_chunk.rbi | 60 +-- .../chat/chat_completion_content_part.rbi | 6 +- .../chat_completion_content_part_image.rbi | 4 +- .../chat_completion_content_part_text.rbi | 2 +- ...hat_completion_developer_message_param.rbi | 6 +- .../chat_completion_function_call_option.rbi | 2 +- .../models/chat/chat_completion_message.rbi | 16 +- .../chat/chat_completion_message_param.rbi | 4 +- .../chat_completion_message_tool_call.rbi | 6 +- .../chat_completion_named_tool_choice.rbi | 2 +- .../chat_completion_prediction_content.rbi | 12 +- .../chat/chat_completion_stream_options.rbi | 10 +- .../chat_completion_system_message_param.rbi | 6 +- .../chat/chat_completion_token_logprob.rbi | 24 +- .../chat_completion_tool_choice_option.rbi | 18 +- .../chat_completion_user_message_param.rbi | 4 +- .../models/chat/completion_create_params.rbi | 300 +++++------ .../models/chat/completion_list_params.rbi | 6 +- .../models/chat/completion_update_params.rbi | 8 +- .../chat/completions/message_list_params.rbi | 4 +- rbi/lib/openai/models/comparison_filter.rbi | 30 +- rbi/lib/openai/models/completion.rbi | 6 +- rbi/lib/openai/models/completion_choice.rbi | 12 +- .../models/completion_create_params.rbi | 128 ++--- rbi/lib/openai/models/completion_usage.rbi | 8 +- rbi/lib/openai/models/compound_filter.rbi | 4 +- rbi/lib/openai/models/embedding.rbi | 4 +- .../openai/models/embedding_create_params.rbi | 54 +- .../models/file_chunking_strategy_param.rbi | 2 +- rbi/lib/openai/models/file_create_params.rbi | 6 +- rbi/lib/openai/models/file_list_params.rbi | 12 +- rbi/lib/openai/models/file_object.rbi | 14 +- rbi/lib/openai/models/file_purpose.rbi | 6 +- .../models/fine_tuning/fine_tuning_job.rbi | 82 +-- .../fine_tuning_job_wandb_integration.rbi | 16 +- ...ne_tuning_job_wandb_integration_object.rbi | 6 +- .../models/fine_tuning/job_create_params.rbi | 130 ++--- .../models/fine_tuning/job_list_params.rbi | 2 +- .../jobs/fine_tuning_job_checkpoint.rbi | 2 +- rbi/lib/openai/models/function_definition.rbi | 22 +- rbi/lib/openai/models/image.rbi | 4 +- .../models/image_create_variation_params.rbi | 24 +- rbi/lib/openai/models/image_edit_params.rbi | 28 +- .../openai/models/image_generate_params.rbi | 44 +- rbi/lib/openai/models/moderation.rbi | 34 +- .../models/moderation_create_params.rbi | 16 +- .../other_file_chunking_strategy_object.rbi | 4 +- rbi/lib/openai/models/reasoning.rbi | 24 +- rbi/lib/openai/models/reasoning_effort.rbi | 8 +- .../models/response_format_json_object.rbi | 4 +- .../models/response_format_json_schema.rbi | 18 +- .../openai/models/responses/computer_tool.rbi | 2 +- .../models/responses/easy_input_message.rbi | 16 +- .../models/responses/file_search_tool.rbi | 10 +- .../openai/models/responses/function_tool.rbi | 6 +- .../responses/input_item_list_params.rbi | 12 +- rbi/lib/openai/models/responses/response.rbi | 122 ++--- .../responses/response_computer_tool_call.rbi | 46 +- ...esponse_computer_tool_call_output_item.rbi | 6 +- ...e_computer_tool_call_output_screenshot.rbi | 2 +- .../responses/response_create_params.rbi | 144 ++--- .../response_file_search_tool_call.rbi | 16 +- .../responses/response_format_text_config.rbi | 16 +- ...esponse_format_text_json_schema_config.rbi | 18 +- .../responses/response_function_tool_call.rbi | 8 +- .../response_function_tool_call_item.rbi | 4 +- ...esponse_function_tool_call_output_item.rbi | 4 +- .../response_function_web_search.rbi | 4 +- .../models/responses/response_includable.rbi | 12 +- .../models/responses/response_input_image.rbi | 8 +- .../models/responses/response_input_item.rbi | 30 +- .../responses/response_input_message_item.rbi | 6 +- .../responses/response_output_message.rbi | 4 +- .../responses/response_reasoning_item.rbi | 6 +- .../responses/response_retrieve_params.rbi | 2 +- .../models/responses/response_status.rbi | 2 +- .../models/responses/response_text_config.rbi | 22 +- .../models/responses/response_usage.rbi | 4 +- rbi/lib/openai/models/responses/tool.rbi | 4 +- .../models/responses/tool_choice_options.rbi | 8 +- .../models/responses/tool_choice_types.rbi | 22 +- .../models/responses/web_search_tool.rbi | 20 +- .../models/static_file_chunking_strategy.rbi | 4 +- rbi/lib/openai/models/upload.rbi | 4 +- .../openai/models/upload_complete_params.rbi | 2 +- .../openai/models/upload_create_params.rbi | 8 +- rbi/lib/openai/models/vector_store.rbi | 20 +- .../models/vector_store_create_params.rbi | 16 +- .../models/vector_store_list_params.rbi | 18 +- .../models/vector_store_search_params.rbi | 2 +- .../models/vector_store_search_response.rbi | 8 +- .../models/vector_store_update_params.rbi | 10 +- .../file_batch_create_params.rbi | 14 +- .../file_batch_list_files_params.rbi | 18 +- .../vector_stores/file_create_params.rbi | 14 +- .../models/vector_stores/file_list_params.rbi | 18 +- .../vector_stores/file_update_params.rbi | 8 +- .../vector_stores/vector_store_file.rbi | 28 +- .../vector_stores/vector_store_file_batch.rbi | 12 +- rbi/lib/openai/request_options.rbi | 14 +- rbi/lib/openai/resources/audio/speech.rbi | 14 +- .../openai/resources/audio/transcriptions.rbi | 100 ++-- .../openai/resources/audio/translations.rbi | 20 +- rbi/lib/openai/resources/batches.rbi | 40 +- rbi/lib/openai/resources/beta/assistants.rbi | 172 +++--- rbi/lib/openai/resources/beta/threads.rbi | 252 ++++----- .../resources/beta/threads/messages.rbi | 46 +- .../openai/resources/beta/threads/runs.rbi | 336 ++++++------ .../resources/beta/threads/runs/steps.rbi | 40 +- rbi/lib/openai/resources/chat/completions.rbi | 510 +++++++++--------- .../resources/chat/completions/messages.rbi | 4 +- rbi/lib/openai/resources/completions.rbi | 228 ++++---- rbi/lib/openai/resources/embeddings.rbi | 30 +- rbi/lib/openai/resources/files.rbi | 48 +- rbi/lib/openai/resources/fine_tuning/jobs.rbi | 74 +-- rbi/lib/openai/resources/images.rbi | 62 +-- rbi/lib/openai/resources/models.rbi | 6 +- rbi/lib/openai/resources/moderations.rbi | 10 +- rbi/lib/openai/resources/responses.rbi | 286 +++++----- .../resources/responses/input_items.rbi | 8 +- rbi/lib/openai/resources/uploads.rbi | 58 +- rbi/lib/openai/resources/uploads/parts.rbi | 16 +- rbi/lib/openai/resources/vector_stores.rbi | 42 +- .../resources/vector_stores/file_batches.rbi | 34 +- .../openai/resources/vector_stores/files.rbi | 48 +- 408 files changed, 5449 insertions(+), 5448 deletions(-) diff --git a/.solargraph.yml b/.solargraph.yml index 4f571833..18a89fcb 100644 --- a/.solargraph.yml +++ b/.solargraph.yml @@ -5,6 +5,7 @@ include: - 'Rakefile' - 'examples/**/*.rb' - 'lib/**/*.rb' + - 'test/openai/resource_namespaces.rb' - 'test/openai/test_helper.rb' exclude: - 'rbi/**/*' diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index a46be452..b2e526d5 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -393,7 +393,7 @@ def initialize( end # Execute the request specified by `req`. This is the method that all resource - # methods call into. + # methods call into. # # @overload request(method, path, query: {}, headers: {}, body: nil, unwrap: nil, page: nil, stream: nil, model: OpenAI::Internal::Type::Unknown, options: {}) # diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index 74f76024..3ef69e08 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -6,7 +6,7 @@ module Transport # @api private class PooledNetRequester # from the golang stdlib - # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 KEEP_ALIVE_TIMEOUT = 30 class << self diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 1f4f669f..026af072 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -19,7 +19,7 @@ class << self # @api private # # Assumes superclass fields are totally defined before fields are accessed / - # defined on subclasses. + # defined on subclasses. # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] def known_fields @@ -143,7 +143,7 @@ def optional(name_sym, type_info, spec = {}) # @api private # # `request_only` attributes not excluded from `.#coerce` when receiving responses - # even if well behaved servers should not send them + # even if well behaved servers should not send them # # @param blk [Proc] private def request_only(&blk) @@ -291,11 +291,11 @@ def dump(value) end # Returns the raw value associated with the given key, if found. Otherwise, nil is - # returned. + # returned. # - # It is valid to lookup keys that are not in the API spec, for example to access - # undocumented features. This method does not parse response data into - # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. # # @param key [Symbol] # @@ -310,12 +310,12 @@ def [](key) # Returns a Hash of the data underlying this object. O(1) # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. # # @return [Hash{Symbol=>Object}] def to_h = @data diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 1fbf548f..ef77d9eb 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -75,37 +75,37 @@ def type_info(spec) # # Based on `target`, transform `value` into `target`, to the extent possible: # - # 1. if the given `value` conforms to `target` already, return the given `value` - # 2. if it's possible and safe to convert the given `value` to `target`, then the - # converted value - # 3. otherwise, the given `value` unaltered + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered # - # The coercion process is subject to improvement between minor release versions. - # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode # # @param target [OpenAI::Internal::Type::Converter, Class] # # @param value [Object] # # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: + # coercion strategy when we have to decide between multiple possible conversion + # targets: # - # - `true`: the conversion must be exact, with minimum coercion. - # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. # - # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For - # any given conversion attempt, the exactness will be updated based on how closely - # the value recursively matches the target type: + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: # - # - `yes`: the value can be converted to the target type with minimum coercion. - # - `maybe`: the value can be converted to the target type with some reasonable - # coercion. - # - `no`: the value cannot be converted to the target type. + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. # - # See implementation below for more details. + # See implementation below for more details. # # @option state [Boolean, :strong] :strictness # diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 5ba8860c..2a005d61 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -6,15 +6,15 @@ module Type # @api private # # A value from among a specified list of options. OpenAPI enum values map to Ruby - # values in the SDK as follows: + # values in the SDK as follows: # - # 1. boolean => true | false - # 2. integer => Integer - # 3. float => Float - # 4. string => Symbol + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol # - # We can therefore convert string values to Symbols, but can't convert other - # values safely. + # We can therefore convert string values to Symbols, but can't convert other + # values safely. # # @example # # `chat_model` is a `OpenAI::Models::ChatModel` @@ -70,7 +70,7 @@ def ==(other) # @api private # # Unlike with primitives, `Enum` additionally validates that the value is a member - # of the enum. + # of the enum. # # @param value [String, Symbol, Object] # diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 7fa9d32d..fd5e4b81 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -152,7 +152,7 @@ class << self # @api private # # Recursively merge one hash with another. If the values at a given key are not - # both hashes, just take the new value. + # both hashes, just take the new value. # # @param values [Array] # diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 94b31a52..ba364d38 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -17,23 +17,23 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute model # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # # @return [String, Symbol, OpenAI::Models::Audio::SpeechModel] required :model, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Model } # @!attribute voice # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and - # `verse`. Previews of the voices are available in the - # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } # @!attribute [r] instructions # Control the voice of your generated audio with additional instructions. Does not - # work with `tts-1` or `tts-1-hd`. + # work with `tts-1` or `tts-1-hd`. # # @return [String, nil] optional :instructions, String @@ -44,7 +44,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] response_format # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, - # `wav`, and `pcm`. + # `wav`, and `pcm`. # # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat } @@ -55,7 +55,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] speed # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. # # @return [Float, nil] optional :speed, Float @@ -78,7 +78,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. module Model extend OpenAI::Internal::Type::Union @@ -93,9 +93,9 @@ module Model end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and - # `verse`. Previews of the voices are available in the - # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). module Voice extend OpenAI::Internal::Type::Union @@ -145,7 +145,7 @@ module Voice end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, - # `wav`, and `pcm`. + # `wav`, and `pcm`. module ResponseFormat extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index abe567ce..ead34e3d 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -12,8 +12,8 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @!attribute [r] logprobs # The log probabilities of the tokens in the transcription. Only returned with the - # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added - # to the `include` array. + # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added + # to the `include` array. # # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } @@ -24,7 +24,7 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @!parse # # Represents a transcription response returned by model, based on the provided - # # input. + # # input. # # # # @param text [String] # # @param logprobs [Array] diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 0cd4367b..2dc55d74 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -13,25 +13,25 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file # The audio file object (not file name) to transcribe, in one of these formats: - # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # # @return [IO, StringIO] required :file, IO # @!attribute model # ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). # # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranscriptionCreateParams::Model } # @!attribute [r] include # Additional information to include in the transcription response. `logprobs` will - # return the log probabilities of the tokens in the response to understand the - # model's confidence in the transcription. `logprobs` only works with - # response_format set to `json` and only with the models `gpt-4o-transcribe` and - # `gpt-4o-mini-transcribe`. + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. # # @return [Array, nil] optional :include, @@ -43,8 +43,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] language # The language of the input audio. Supplying the input language in - # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) - # format will improve accuracy and latency. + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. # # @return [String, nil] optional :language, String @@ -55,9 +55,9 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] prompt # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should match the audio language. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. # # @return [String, nil] optional :prompt, String @@ -68,8 +68,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] response_format # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. # # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::AudioResponseFormat } @@ -80,10 +80,10 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. # # @return [Float, nil] optional :temperature, Float @@ -94,10 +94,10 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] timestamp_granularities # The timestamp granularities to populate for this transcription. - # `response_format` must be set `verbose_json` to use timestamp granularities. - # Either or both of these options are supported: `word`, or `segment`. Note: There - # is no additional latency for segment timestamps, but generating word timestamps - # incurs additional latency. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. # # @return [Array, nil] optional :timestamp_granularities, @@ -136,8 +136,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). module Model extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 75cfc88b..69d68b0b 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Audio # Represents a transcription response returned by model, based on the provided - # input. + # input. # # @see OpenAI::Resources::Audio::Transcriptions#create # diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 02990b46..852a77d1 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -12,14 +12,14 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @!attribute avg_logprob # Average logprob of the segment. If the value is lower than -1, consider the - # logprobs failed. + # logprobs failed. # # @return [Float] required :avg_logprob, Float # @!attribute compression_ratio # Compression ratio of the segment. If the value is greater than 2.4, consider the - # compression failed. + # compression failed. # # @return [Float] required :compression_ratio, Float @@ -32,7 +32,7 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @!attribute no_speech_prob # Probability of no speech in the segment. If the value is higher than 1.0 and the - # `avg_logprob` is below -1, consider this segment silent. + # `avg_logprob` is below -1, consider this segment silent. # # @return [Float] required :no_speech_prob, Float diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 171cfd23..de3b63d5 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -4,9 +4,9 @@ module OpenAI module Models module Audio # Emitted when there is an additional text delta. This is also the first event - # emitted when the transcription starts. Only emitted when you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `Stream` parameter set to `true`. + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. module TranscriptionStreamEvent extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 4a858738..731bf107 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -18,8 +18,8 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute [r] logprobs # The log probabilities of the delta. Only included if you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `include[]` parameter set to `logprobs`. + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. # # @return [Array, nil] optional :logprobs, @@ -31,9 +31,9 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!parse # # Emitted when there is an additional text delta. This is also the first event - # # emitted when the transcription starts. Only emitted when you - # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # # with the `Stream` parameter set to `true`. + # # emitted when the transcription starts. Only emitted when you + # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # # with the `Stream` parameter set to `true`. # # # # @param delta [String] # # @param logprobs [Array] diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 9795995a..be1ee0fe 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -18,9 +18,9 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute [r] logprobs # The log probabilities of the individual tokens in the transcription. Only - # included if you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `include[]` parameter set to `logprobs`. + # included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. # # @return [Array, nil] optional :logprobs, @@ -32,9 +32,9 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!parse # # Emitted when the transcription is complete. Contains the complete transcription - # # text. Only emitted when you - # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # # with the `Stream` parameter set to `true`. + # # text. Only emitted when you + # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # # with the `Stream` parameter set to `true`. # # # # @param text [String] # # @param logprobs [Array] diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 1dfb5931..80068f85 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -44,7 +44,7 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @!parse # # Represents a verbose json transcription response returned by model, based on the - # # provided input. + # # provided input. # # # # @param duration [Float] # # @param language [String] diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 93833eb2..82b0af2f 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -11,23 +11,23 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file # The audio file object (not file name) translate, in one of these formats: flac, - # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # # @return [IO, StringIO] required :file, IO # @!attribute model # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # Whisper V2 model) is currently available. # # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranslationCreateParams::Model } # @!attribute [r] prompt # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should be in English. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should be in English. # # @return [String, nil] optional :prompt, String @@ -38,7 +38,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] response_format # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. # # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat } @@ -49,10 +49,10 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. # # @return [Float, nil] optional :temperature, Float @@ -74,7 +74,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # Whisper V2 model) is currently available. module Model extend OpenAI::Internal::Type::Union @@ -89,7 +89,7 @@ module Model end # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. module ResponseFormat extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 0904c657..2babfc83 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -3,8 +3,8 @@ module OpenAI module Models # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. module AudioResponseFormat extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/auto_file_chunking_strategy_param.rb b/lib/openai/models/auto_file_chunking_strategy_param.rb index 15d03f11..a33c9dcc 100644 --- a/lib/openai/models/auto_file_chunking_strategy_param.rb +++ b/lib/openai/models/auto_file_chunking_strategy_param.rb @@ -11,7 +11,7 @@ class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel # @!parse # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. + # # `800` and `chunk_overlap_tokens` of `400`. # # # # @param type [Symbol, :auto] # # diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index be9ede39..a490b124 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -146,11 +146,11 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 1549f66e..95548d81 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -10,16 +10,16 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute completion_window # The time frame within which the batch should be processed. Currently only `24h` - # is supported. + # is supported. # # @return [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] required :completion_window, enum: -> { OpenAI::Models::BatchCreateParams::CompletionWindow } # @!attribute endpoint # The endpoint to be used for all requests in the batch. Currently - # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` - # are supported. Note that `/v1/embeddings` batches are also restricted to a - # maximum of 50,000 embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. # # @return [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] required :endpoint, enum: -> { OpenAI::Models::BatchCreateParams::Endpoint } @@ -27,24 +27,24 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute input_file_id # The ID of an uploaded file that contains requests for the new batch. # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. # - # Your input file must be formatted as a - # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), - # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 - # requests, and can be up to 200 MB in size. + # Your input file must be formatted as a + # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + # requests, and can be up to 200 MB in size. # # @return [String] required :input_file_id, String # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -61,7 +61,7 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The time frame within which the batch should be processed. Currently only `24h` - # is supported. + # is supported. module CompletionWindow extend OpenAI::Internal::Type::Enum @@ -75,9 +75,9 @@ module CompletionWindow end # The endpoint to be used for all requests in the batch. Currently - # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` - # are supported. Note that `/v1/embeddings` batches are also restricted to a - # maximum of 50,000 embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. module Endpoint extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 651111c7..675cc802 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -10,9 +10,9 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -23,7 +23,7 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index 727f24f2..bd04e326 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -25,28 +25,28 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @!attribute instructions # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. # # @return [String, nil] required :instructions, String, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. # # @return [String] required :model, String @@ -65,60 +65,60 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. # # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } # @!attribute response_format # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. - # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. - # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. # # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute tool_resources # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. # # @return [OpenAI::Models::Beta::Assistant::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::Assistant::ToolResources }, nil?: true # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true @@ -183,9 +183,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. + # # specific to the type of tool. For example, the `code_interpreter` tool requires + # # a list of file IDs, while the `file_search` tool requires a list of vector store + # # IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] @@ -198,8 +198,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter`` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter`` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -220,9 +220,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 2b426970..3e3fd1d0 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -11,10 +11,10 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute model # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. # # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Models::Beta::AssistantCreateParams::Model } @@ -27,18 +27,18 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute instructions # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. # # @return [String, nil] optional :instructions, String, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -52,60 +52,60 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_effort # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. # # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute tool_resources # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources }, nil?: true # @!attribute [r] tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } @@ -116,10 +116,10 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true @@ -159,10 +159,10 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -198,9 +198,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. + # # specific to the type of tool. For example, the `code_interpreter` tool requires + # # a list of file IDs, while the `file_search` tool requires a list of vector store + # # IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] @@ -213,8 +213,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -235,9 +235,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -248,9 +248,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_stores # A helper to create a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # with file_ids and attach it to this assistant. There can be a maximum of 1 - # vector store attached to the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this assistant. There can be a maximum of 1 + # vector store attached to the assistant. # # @return [Array, nil] optional :vector_stores, @@ -271,7 +271,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, @@ -283,8 +283,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to - # add to the vector store. There can be a maximum of 10000 files in a vector - # store. + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -295,11 +295,11 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -314,7 +314,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. # # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy @@ -338,7 +338,7 @@ class Auto < OpenAI::Internal::Type::BaseModel # @!parse # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. + # # `800` and `chunk_overlap_tokens` of `400`. # # # # @param type [Symbol, :auto] # # @@ -373,14 +373,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. # # @return [Integer] required :chunk_overlap_tokens, Integer # @!attribute max_chunk_size_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. # # @return [Integer] required :max_chunk_size_tokens, Integer diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index c01b9f64..d46562ae 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -11,9 +11,9 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -24,9 +24,9 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -37,7 +37,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -48,7 +48,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::AssistantListParams::Order } @@ -69,7 +69,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 2c797ac5..f1e5fc86 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -4,25 +4,25 @@ module OpenAI module Models module Beta # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. module AssistantResponseFormatOption extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index b46d6568..5663b14f 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -5,24 +5,24 @@ module Models module Beta # Represents an event emitted when streaming a Run. # - # Each event in a server-sent events stream has an `event` and `data` property: + # Each event in a server-sent events stream has an `event` and `data` property: # - # ``` - # event: thread.created - # data: {"id": "thread_123", "object": "thread", ...} - # ``` + # ``` + # event: thread.created + # data: {"id": "thread_123", "object": "thread", ...} + # ``` # - # We emit events whenever a new object is created, transitions to a new state, or - # is being streamed in parts (deltas). For example, we emit `thread.run.created` - # when a new run is created, `thread.run.completed` when a run completes, and so - # on. When an Assistant chooses to create a message during a run, we emit a - # `thread.message.created event`, a `thread.message.in_progress` event, many - # `thread.message.delta` events, and finally a `thread.message.completed` event. + # We emit events whenever a new object is created, transitions to a new state, or + # is being streamed in parts (deltas). For example, we emit `thread.run.created` + # when a new run is created, `thread.run.completed` when a run completes, and so + # on. When an Assistant chooses to create a message during a run, we emit a + # `thread.message.created event`, a `thread.message.in_progress` event, many + # `thread.message.delta` events, and finally a `thread.message.completed` event. # - # We may add additional events over time, so we recommend handling unknown events - # gracefully in your code. See the - # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) - # to learn how to integrate the Assistants API with streaming. + # We may add additional events over time, so we recommend handling unknown events + # gracefully in your code. See the + # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) + # to learn how to integrate the Assistants API with streaming. module AssistantStreamEvent extend OpenAI::Internal::Type::Union @@ -112,7 +112,7 @@ module AssistantStreamEvent class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a thread that contains - # [messages](https://platform.openai.com/docs/api-reference/messages). + # [messages](https://platform.openai.com/docs/api-reference/messages). # # @return [OpenAI::Models::Beta::Thread] required :data, -> { OpenAI::Models::Beta::Thread } @@ -134,8 +134,8 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a new - # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # # created. + # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # # created. # # # # @param data [OpenAI::Models::Beta::Thread] # # @param enabled [Boolean] @@ -149,7 +149,7 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -161,7 +161,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a new - # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.created"] @@ -174,7 +174,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -186,7 +186,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `queued` status. + # # moves to a `queued` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.queued"] @@ -199,7 +199,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -211,7 +211,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to an `in_progress` status. + # # moves to an `in_progress` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.in_progress"] @@ -224,7 +224,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -236,7 +236,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `requires_action` status. + # # moves to a `requires_action` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.requires_action"] @@ -249,7 +249,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -261,7 +261,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is completed. + # # is completed. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.completed"] @@ -274,7 +274,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -286,7 +286,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # ends with status `incomplete`. + # # ends with status `incomplete`. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.incomplete"] @@ -299,7 +299,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -311,7 +311,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # fails. + # # fails. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.failed"] @@ -324,7 +324,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -336,7 +336,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `cancelling` status. + # # moves to a `cancelling` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.cancelling"] @@ -349,7 +349,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -361,7 +361,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is cancelled. + # # is cancelled. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.cancelled"] @@ -374,7 +374,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -386,7 +386,7 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # expires. + # # expires. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.expired"] @@ -410,8 +410,8 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is created. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # is created. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.created"] @@ -435,8 +435,8 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # moves to an `in_progress` state. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # moves to an `in_progress` state. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.in_progress"] @@ -449,7 +449,7 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a run step delta i.e. any changed fields on a run step during - # streaming. + # streaming. # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent } @@ -461,8 +461,8 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when parts of a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # are being streamed. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # are being streamed. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] # # @param event [Symbol, :"thread.run.step.delta"] @@ -486,8 +486,8 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is completed. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # is completed. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.completed"] @@ -511,8 +511,8 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # fails. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # fails. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.failed"] @@ -536,8 +536,8 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is cancelled. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # is cancelled. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.cancelled"] @@ -561,8 +561,8 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # expires. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # expires. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.expired"] @@ -575,7 +575,7 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -587,8 +587,8 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # created. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # # created. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.created"] @@ -601,7 +601,7 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -613,8 +613,8 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # # to an `in_progress` state. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # # to an `in_progress` state. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.in_progress"] @@ -627,7 +627,7 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message delta i.e. any changed fields on a message during - # streaming. + # streaming. # # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] required :data, -> { OpenAI::Models::Beta::Threads::MessageDeltaEvent } @@ -639,8 +639,8 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when parts of a - # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # # being streamed. + # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # # being streamed. # # # # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] # # @param event [Symbol, :"thread.message.delta"] @@ -653,7 +653,7 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -665,8 +665,8 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # completed. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # # completed. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.completed"] @@ -679,7 +679,7 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -691,8 +691,8 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # # before it is completed. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # # before it is completed. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.incomplete"] @@ -715,8 +715,8 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when an - # # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. - # # This can happen due to an internal server error or a timeout. + # # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + # # This can happen due to an internal server error or a timeout. # # # # @param data [OpenAI::Models::ErrorObject] # # @param event [Symbol, :error] diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 0c1c8cb9..1dff1877 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -21,7 +21,7 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @!parse # # Specifies a tool the model should use. Use to force the model to call a specific - # # tool. + # # tool. # # # # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] # # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 5445f1a6..69293cbc 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -4,12 +4,12 @@ module OpenAI module Models module Beta # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. module AssistantToolChoiceOption extend OpenAI::Internal::Type::Union @@ -20,9 +20,9 @@ module AssistantToolChoiceOption variant -> { OpenAI::Models::Beta::AssistantToolChoice } # `none` means the model will not call any tools and instead generates a message. - # `auto` means the model can pick between generating a message or calling one or - # more tools. `required` means the model must call one or more tools before - # responding to the user. + # `auto` means the model can pick between generating a message or calling one or + # more tools. `required` means the model must call one or more tools before + # responding to the user. module Auto extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 2fe97acb..299fb194 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -17,28 +17,28 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute instructions # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. # # @return [String, nil] optional :instructions, String, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute [r] model # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. # # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] optional :model, union: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model } @@ -56,60 +56,60 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_effort # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. # # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute tool_resources # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources }, nil?: true # @!attribute [r] tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } @@ -120,10 +120,10 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true @@ -163,10 +163,10 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -294,9 +294,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. + # # specific to the type of tool. For example, the `code_interpreter` tool requires + # # a list of file IDs, while the `file_search` tool requires a list of vector store + # # IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] @@ -309,9 +309,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # Overrides the list of - # [file](https://platform.openai.com/docs/api-reference/files) IDs made available - # to the `code_interpreter` tool. There can be a maximum of 20 files associated - # with the tool. + # [file](https://platform.openai.com/docs/api-reference/files) IDs made available + # to the `code_interpreter` tool. There can be a maximum of 20 files associated + # with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -332,9 +332,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # Overrides the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 2428a1a0..76343e2f 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -32,13 +32,13 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] max_num_results # The maximum number of results the file search tool should output. The default is - # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between - # 1 and 50 inclusive. + # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between + # 1 and 50 inclusive. # - # Note that the file search tool may output fewer than `max_num_results` results. - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # Note that the file search tool may output fewer than `max_num_results` results. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. # # @return [Integer, nil] optional :max_num_results, Integer @@ -49,11 +49,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] ranking_options # The ranking options for the file search. If not specified, the file search tool - # will use the `auto` ranker and a score_threshold of 0. + # will use the `auto` ranker and a score_threshold of 0. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions } @@ -76,14 +76,14 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point - # number between 0 and 1. + # number between 0 and 1. # # @return [Float] required :score_threshold, Float # @!attribute [r] ranker # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } @@ -94,11 +94,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!parse # # The ranking options for the file search. If not specified, the file search tool - # # will use the `auto` ranker and a score_threshold of 0. + # # will use the `auto` ranker and a score_threshold of 0. # # - # # See the - # # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # # for more information. + # # See the + # # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # # for more information. # # # # @param score_threshold [Float] # # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] @@ -108,7 +108,7 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. # # @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker module Ranker diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 0657f158..09590507 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -4,8 +4,8 @@ module OpenAI module Models module Beta # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # created. + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. module MessageStreamEvent extend OpenAI::Internal::Type::Union @@ -32,7 +32,7 @@ module MessageStreamEvent class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -44,8 +44,8 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # created. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # # created. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.created"] @@ -58,7 +58,7 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -70,8 +70,8 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # # to an `in_progress` state. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # # to an `in_progress` state. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.in_progress"] @@ -84,7 +84,7 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message delta i.e. any changed fields on a message during - # streaming. + # streaming. # # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] required :data, -> { OpenAI::Models::Beta::Threads::MessageDeltaEvent } @@ -96,8 +96,8 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when parts of a - # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # # being streamed. + # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # # being streamed. # # # # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] # # @param event [Symbol, :"thread.message.delta"] @@ -110,7 +110,7 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -122,8 +122,8 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # completed. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # # completed. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.completed"] @@ -136,7 +136,7 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Models::Beta::Threads::Message } @@ -148,8 +148,8 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # # before it is completed. + # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # # before it is completed. # # # # @param data [OpenAI::Models::Beta::Threads::Message] # # @param event [Symbol, :"thread.message.incomplete"] diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 93f240f7..444add26 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -4,8 +4,8 @@ module OpenAI module Models module Beta # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is created. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. module RunStepStreamEvent extend OpenAI::Internal::Type::Union @@ -49,8 +49,8 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is created. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # is created. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.created"] @@ -74,8 +74,8 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # moves to an `in_progress` state. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # moves to an `in_progress` state. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.in_progress"] @@ -88,7 +88,7 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a run step delta i.e. any changed fields on a run step during - # streaming. + # streaming. # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent } @@ -100,8 +100,8 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when parts of a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # are being streamed. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # are being streamed. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] # # @param event [Symbol, :"thread.run.step.delta"] @@ -125,8 +125,8 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is completed. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # is completed. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.completed"] @@ -150,8 +150,8 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # fails. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # fails. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.failed"] @@ -175,8 +175,8 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is cancelled. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # is cancelled. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.cancelled"] @@ -200,8 +200,8 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # expires. + # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # # expires. # # # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @param event [Symbol, :"thread.run.step.expired"] diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 14e5177f..90552346 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Beta # Occurs when a new - # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. module RunStreamEvent extend OpenAI::Internal::Type::Union @@ -44,7 +44,7 @@ module RunStreamEvent class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -56,7 +56,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a new - # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.created"] @@ -69,7 +69,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -81,7 +81,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `queued` status. + # # moves to a `queued` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.queued"] @@ -94,7 +94,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -106,7 +106,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to an `in_progress` status. + # # moves to an `in_progress` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.in_progress"] @@ -119,7 +119,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -131,7 +131,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `requires_action` status. + # # moves to a `requires_action` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.requires_action"] @@ -144,7 +144,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -156,7 +156,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is completed. + # # is completed. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.completed"] @@ -169,7 +169,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -181,7 +181,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # ends with status `incomplete`. + # # ends with status `incomplete`. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.incomplete"] @@ -194,7 +194,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -206,7 +206,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # fails. + # # fails. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.failed"] @@ -219,7 +219,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -231,7 +231,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `cancelling` status. + # # moves to a `cancelling` status. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.cancelling"] @@ -244,7 +244,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -256,7 +256,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is cancelled. + # # is cancelled. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.cancelled"] @@ -269,7 +269,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). # # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Models::Beta::Threads::Run } @@ -281,7 +281,7 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # expires. + # # expires. # # # # @param data [OpenAI::Models::Beta::Threads::Run] # # @param event [Symbol, :"thread.run.expired"] diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index ab9384f2..5f08e2ff 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -19,11 +19,11 @@ class Thread < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -36,16 +36,16 @@ class Thread < OpenAI::Internal::Type::BaseModel # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. # # @return [OpenAI::Models::Beta::Thread::ToolResources, nil] required :tool_resources, -> { OpenAI::Models::Beta::Thread::ToolResources }, nil?: true # @!parse # # Represents a thread that contains - # # [messages](https://platform.openai.com/docs/api-reference/messages). + # # [messages](https://platform.openai.com/docs/api-reference/messages). # # # # @param id [String] # # @param created_at [Integer] @@ -79,9 +79,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. + # # thread. The resources are specific to the type of tool. For example, the + # # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # # tool requires a list of vector store IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] @@ -94,8 +94,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -116,9 +116,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 38c89eea..4612ebd8 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -13,63 +13,63 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!attribute assistant_id # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. # # @return [String] required :assistant_id, String # @!attribute instructions # Override the default system message of the assistant. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. # # @return [String, nil] optional :instructions, String, nil?: true # @!attribute max_completion_tokens # The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. # # @return [Integer, nil] optional :max_completion_tokens, Integer, nil?: true # @!attribute max_prompt_tokens # The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. # # @return [Integer, nil] optional :max_prompt_tokens, Integer, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. # # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Model }, nil?: true # @!attribute [r] parallel_tool_calls # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. # # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean @@ -80,40 +80,40 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!attribute response_format # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. # # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute [r] thread # Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. + # an empty thread will be created. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, nil] optional :thread, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread } @@ -124,28 +124,28 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Models::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tool_resources # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. # # @return [Array, nil] optional :tools, @@ -154,17 +154,17 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] optional :truncation_strategy, @@ -214,9 +214,9 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. module Model extend OpenAI::Internal::Type::Union @@ -233,7 +233,7 @@ module Model class Thread < OpenAI::Internal::Type::BaseModel # @!attribute [r] messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # start the thread with. + # start the thread with. # # @return [Array, nil] optional :messages, @@ -245,20 +245,20 @@ class Thread < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] optional :tool_resources, @@ -267,7 +267,7 @@ class Thread < OpenAI::Internal::Type::BaseModel # @!parse # # Options to create a new thread. If no thread is provided when running a request, - # # an empty thread will be created. + # # an empty thread will be created. # # # # @param messages [Array] # # @param metadata [Hash{Symbol=>String}, nil] @@ -287,10 +287,10 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] required :role, enum: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role } @@ -305,11 +305,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -346,10 +346,10 @@ module Content # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role module Role @@ -451,9 +451,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. + # # thread. The resources are specific to the type of tool. For example, the + # # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # # tool requires a list of vector store IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] @@ -466,8 +466,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -488,9 +488,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -501,9 +501,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_stores # A helper to create a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # with file_ids and attach it to this thread. There can be a maximum of 1 vector - # store attached to the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. # # @return [Array, nil] optional :vector_stores, @@ -524,7 +524,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, @@ -536,8 +536,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to - # add to the vector store. There can be a maximum of 10000 files in a vector - # store. + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -548,11 +548,11 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -567,7 +567,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy @@ -591,7 +591,7 @@ class Auto < OpenAI::Internal::Type::BaseModel # @!parse # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. + # # `800` and `chunk_overlap_tokens` of `400`. # # # # @param type [Symbol, :auto] # # @@ -626,14 +626,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. # # @return [Integer] required :chunk_overlap_tokens, Integer # @!attribute max_chunk_size_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. # # @return [Integer] required :max_chunk_size_tokens, Integer @@ -679,9 +679,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. + # # specific to the type of tool. For example, the `code_interpreter` tool requires + # # a list of file IDs, while the `file_search` tool requires a list of vector store + # # IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] @@ -694,8 +694,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -716,9 +716,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -753,23 +753,23 @@ module Tool class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. # # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] required :type, enum: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } # @!attribute last_messages # The number of most recent messages from the thread when constructing the context - # for the run. + # for the run. # # @return [Integer, nil] optional :last_messages, Integer, nil?: true # @!parse # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. + # # control the intial context window of the run. # # # # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] # # @param last_messages [Integer, nil] @@ -779,9 +779,9 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type module Type diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 6e46487b..08818856 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -11,7 +11,7 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # start the thread with. + # start the thread with. # # @return [Array, nil] optional :messages, @@ -23,20 +23,20 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources }, nil?: true @@ -61,10 +61,10 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @return [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] required :role, enum: -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Role } @@ -79,11 +79,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -120,10 +120,10 @@ module Content # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @see OpenAI::Models::Beta::ThreadCreateParams::Message#role module Role @@ -223,9 +223,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. + # # thread. The resources are specific to the type of tool. For example, the + # # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # # tool requires a list of vector store IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] @@ -238,8 +238,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -260,9 +260,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -273,9 +273,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_stores # A helper to create a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # with file_ids and attach it to this thread. There can be a maximum of 1 vector - # store attached to the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. # # @return [Array, nil] optional :vector_stores, @@ -296,7 +296,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, @@ -308,8 +308,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to - # add to the vector store. There can be a maximum of 10000 files in a vector - # store. + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -320,11 +320,11 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -339,7 +339,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. # # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy @@ -363,7 +363,7 @@ class Auto < OpenAI::Internal::Type::BaseModel # @!parse # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. + # # `800` and `chunk_overlap_tokens` of `400`. # # # # @param type [Symbol, :auto] # # @@ -398,14 +398,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. # # @return [Integer] required :chunk_overlap_tokens, Integer # @!attribute max_chunk_size_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. # # @return [Integer] required :max_chunk_size_tokens, Integer diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index 416358fd..e67ecf5f 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -6,7 +6,7 @@ module Beta class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a thread that contains - # [messages](https://platform.openai.com/docs/api-reference/messages). + # [messages](https://platform.openai.com/docs/api-reference/messages). # # @return [OpenAI::Models::Beta::Thread] required :data, -> { OpenAI::Models::Beta::Thread } @@ -28,8 +28,8 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @!parse # # Occurs when a new - # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # # created. + # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # # created. # # # # @param data [OpenAI::Models::Beta::Thread] # # @param enabled [Boolean] diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 10ec2820..07938465 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -11,20 +11,20 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute tool_resources # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources }, nil?: true @@ -60,9 +60,9 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!parse # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. + # # thread. The resources are specific to the type of tool. For example, the + # # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # # tool requires a list of vector store IDs. # # # # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] # # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] @@ -75,8 +75,8 @@ class ToolResources < OpenAI::Internal::Type::BaseModel class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -97,9 +97,9 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute [r] vector_store_ids # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. # # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 0086d3c1..10511e12 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -5,8 +5,8 @@ module Models module Beta module Threads # A citation within the message that points to a specific quote from a specific - # File associated with the assistant or the message. Generated when the assistant - # uses the "file_search" tool to search files. + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. module Annotation extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index b5a749fd..726c91ad 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -5,8 +5,8 @@ module Models module Beta module Threads # A citation within the message that points to a specific quote from a specific - # File associated with the assistant or the message. Generated when the assistant - # uses the "file_search" tool to search files. + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. module AnnotationDelta extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index 404d9340..cf4eab2d 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -34,8 +34,8 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @!parse # # A citation within the message that points to a specific quote from a specific - # # File associated with the assistant or the message. Generated when the assistant - # # uses the "file_search" tool to search files. + # # File associated with the assistant or the message. Generated when the assistant + # # uses the "file_search" tool to search files. # # # # @param end_index [Integer] # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 5077a210..d1ac99c2 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -56,8 +56,8 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!parse # # A citation within the message that points to a specific quote from a specific - # # File associated with the assistant or the message. Generated when the assistant - # # uses the "file_search" tool to search files. + # # File associated with the assistant or the message. Generated when the assistant + # # uses the "file_search" tool to search files. # # # # @param index [Integer] # # @param end_index [Integer] diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index 9bf12486..c1a51ef6 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -34,7 +34,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @!parse # # A URL for the file that's generated when the assistant used the - # # `code_interpreter` tool to generate a file. + # # `code_interpreter` tool to generate a file. # # # # @param end_index [Integer] # # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index cb1c00a6..975bd449 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -56,7 +56,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!parse # # A URL for the file that's generated when the assistant used the - # # `code_interpreter` tool to generate a file. + # # `code_interpreter` tool to generate a file. # # # # @param index [Integer] # # @param end_index [Integer] diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index bd2e1f25..631803f6 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -7,15 +7,15 @@ module Threads class ImageFile < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image - # in the message content. Set `purpose="vision"` when uploading the File if you - # need to later display the file content. + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. # # @return [String] required :file_id, String # @!attribute [r] detail # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFile::Detail } @@ -33,7 +33,7 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. # # @see OpenAI::Models::Beta::Threads::ImageFile#detail module Detail diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 60fc0073..4a5a487e 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -18,7 +18,7 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @!parse # # References an image [File](https://platform.openai.com/docs/api-reference/files) - # # in the content of a message. + # # in the content of a message. # # # # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] # # @param type [Symbol, :image_file] diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 4d654fe4..219cfba9 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -7,7 +7,7 @@ module Threads class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] detail # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFileDelta::Detail } @@ -18,8 +18,8 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image - # in the message content. Set `purpose="vision"` when uploading the File if you - # need to later display the file content. + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. # # @return [String, nil] optional :file_id, String @@ -37,7 +37,7 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. # # @see OpenAI::Models::Beta::Threads::ImageFileDelta#detail module Detail diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 51118d76..9a5625ad 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -28,7 +28,7 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @!parse # # References an image [File](https://platform.openai.com/docs/api-reference/files) - # # in the content of a message. + # # in the content of a message. # # # # @param index [Integer] # # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index c5db8e23..a76f8467 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -7,14 +7,14 @@ module Threads class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # The external URL of the image, must be a supported image types: jpeg, jpg, png, - # gif, webp. + # gif, webp. # # @return [String] required :url, String # @!attribute [r] detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. Default value is `auto` + # to high resolution using `high`. Default value is `auto` # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURL::Detail } @@ -32,7 +32,7 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. Default value is `auto` + # to high resolution using `high`. Default value is `auto` # # @see OpenAI::Models::Beta::Threads::ImageURL#detail module Detail diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 0c69ce94..4ae3e547 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -7,7 +7,7 @@ module Threads class ImageURLDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. + # to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURLDelta::Detail } @@ -18,7 +18,7 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # @!attribute [r] url # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, - # webp. + # webp. # # @return [String, nil] optional :url, String @@ -36,7 +36,7 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. + # to high resolution using `high`. # # @see OpenAI::Models::Beta::Threads::ImageURLDelta#detail module Detail diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 4987ff90..355140de 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -14,8 +14,8 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute assistant_id # If applicable, the ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) that - # authored this message. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) that + # authored this message. # # @return [String, nil] required :assistant_id, String, nil?: true @@ -61,11 +61,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -84,29 +84,29 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute run_id # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) - # associated with the creation of this message. Value is `null` when messages are - # created manually using the create message or create thread endpoints. + # associated with the creation of this message. Value is `null` when messages are + # created manually using the create message or create thread endpoints. # # @return [String, nil] required :run_id, String, nil?: true # @!attribute status # The status of the message, which can be either `in_progress`, `incomplete`, or - # `completed`. + # `completed`. # # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Status] required :status, enum: -> { OpenAI::Models::Beta::Threads::Message::Status } # @!attribute thread_id # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that - # this message belongs to. + # this message belongs to. # # @return [String] required :thread_id, String # @!parse # # Represents a message within a - # # [thread](https://platform.openai.com/docs/api-reference/threads). + # # [thread](https://platform.openai.com/docs/api-reference/threads). # # # # @param id [String] # # @param assistant_id [String, nil] @@ -257,7 +257,7 @@ module Role end # The status of the message, which can be either `in_progress`, `incomplete`, or - # `completed`. + # `completed`. # # @see OpenAI::Models::Beta::Threads::Message#status module Status diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 793bbb1d..b6ed040c 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. module MessageContent extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 0d6aafd8..a69eadda 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. module MessageContentDelta extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 33049950..faab6ef1 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -5,7 +5,7 @@ module Models module Beta module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. module MessageContentPartParam extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index dfabf003..ea41994f 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -19,10 +19,10 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] required :role, enum: -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Role } @@ -37,11 +37,11 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -77,10 +77,10 @@ module Content # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. module Role extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index 11ba7a44..0e313696 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -25,7 +25,7 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @!parse # # Represents a message delta i.e. any changed fields on a message during - # # streaming. + # # streaming. # # # # @param id [String] # # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 9a7d62f5..b30bcf8a 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -12,9 +12,9 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -25,9 +25,9 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -38,7 +38,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -49,7 +49,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::MessageListParams::Order } @@ -81,7 +81,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index c83b973f..b02d01a6 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -17,11 +17,11 @@ class MessageUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index d79d1249..bd40aadc 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -7,9 +7,9 @@ module Threads class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the tool call. This ID must be referenced when you submit the tool - # outputs in using the - # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) - # endpoint. + # outputs in using the + # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # endpoint. # # @return [String] required :id, String @@ -22,7 +22,7 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call the output is required for. For now, this is always - # `function`. + # `function`. # # @return [Symbol, :function] required :type, const: :function diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 3c22831f..c6436c0d 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -16,8 +16,8 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute assistant_id # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # execution of this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # execution of this run. # # @return [String] required :assistant_id, String @@ -54,15 +54,15 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute incomplete_details # Details on why the run is incomplete. Will be `null` if the run is not - # incomplete. + # incomplete. # # @return [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails }, nil?: true # @!attribute instructions # The instructions that the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. # # @return [String] required :instructions, String @@ -75,33 +75,33 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens # The maximum number of completion tokens specified to have been used over the - # course of the run. + # course of the run. # # @return [Integer, nil] required :max_completion_tokens, Integer, nil?: true # @!attribute max_prompt_tokens # The maximum number of prompt tokens specified to have been used over the course - # of the run. + # of the run. # # @return [Integer, nil] required :max_prompt_tokens, Integer, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # The model that the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. # # @return [String] required :model, String @@ -114,40 +114,40 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute parallel_tool_calls # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. # # @return [Boolean] required :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!attribute required_action # Details on the action required to continue the run. Will be `null` if no action - # is required. + # is required. # # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] required :required_action, -> { OpenAI::Models::Beta::Threads::Run::RequiredAction }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. # # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] required :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true @@ -160,49 +160,49 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the run, which can be either `queued`, `in_progress`, - # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, - # `incomplete`, or `expired`. + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. # # @return [Symbol, OpenAI::Models::Beta::Threads::RunStatus] required :status, enum: -> { OpenAI::Models::Beta::Threads::RunStatus } # @!attribute thread_id # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # that was executed on as a part of this run. + # that was executed on as a part of this run. # # @return [String] required :thread_id, String # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] required :tool_choice, union: -> { OpenAI::Models::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools # The list of tools that the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. # # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. # # @return [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] required :truncation_strategy, -> { OpenAI::Models::Beta::Threads::Run::TruncationStrategy }, nil?: true # @!attribute usage # Usage statistics related to the run. This value will be `null` if the run is not - # in a terminal state (i.e. `in_progress`, `queued`, etc.). + # in a terminal state (i.e. `in_progress`, `queued`, etc.). # # @return [OpenAI::Models::Beta::Threads::Run::Usage, nil] required :usage, -> { OpenAI::Models::Beta::Threads::Run::Usage }, nil?: true @@ -221,7 +221,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!parse # # Represents an execution run on a - # # [thread](https://platform.openai.com/docs/api-reference/threads). + # # [thread](https://platform.openai.com/docs/api-reference/threads). # # # # @param id [String] # # @param assistant_id [String] @@ -290,7 +290,7 @@ class Run < OpenAI::Internal::Type::BaseModel class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] reason # The reason why the run is incomplete. This will point to which specific token - # limit was reached over the course of the run. + # limit was reached over the course of the run. # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason } @@ -301,7 +301,7 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!parse # # Details on why the run is incomplete. Will be `null` if the run is not - # # incomplete. + # # incomplete. # # # # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] # # @@ -310,7 +310,7 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason why the run is incomplete. This will point to which specific token - # limit was reached over the course of the run. + # limit was reached over the course of the run. # # @see OpenAI::Models::Beta::Threads::Run::IncompleteDetails#reason module Reason @@ -386,7 +386,7 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # @!parse # # Details on the action required to continue the run. Will be `null` if no action - # # is required. + # # is required. # # # # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] # # @param type [Symbol, :submit_tool_outputs] @@ -419,23 +419,23 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] required :type, enum: -> { OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type } # @!attribute last_messages # The number of most recent messages from the thread when constructing the context - # for the run. + # for the run. # # @return [Integer, nil] optional :last_messages, Integer, nil?: true # @!parse # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. + # # control the intial context window of the run. # # # # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] # # @param last_messages [Integer, nil] @@ -445,9 +445,9 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. # # @see OpenAI::Models::Beta::Threads::Run::TruncationStrategy#type module Type @@ -486,7 +486,7 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!parse # # Usage statistics related to the run. This value will be `null` if the run is not - # # in a terminal state (i.e. `in_progress`, `queued`, etc.). + # # in a terminal state (i.e. `in_progress`, `queued`, etc.). # # # # @param completion_tokens [Integer] # # @param prompt_tokens [Integer] diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index bb191666..a058e78e 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -14,20 +14,20 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute assistant_id # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. # # @return [String] required :assistant_id, String # @!attribute [r] include # A list of additional fields to include in the response. Currently the only - # supported value is `step_details.tool_calls[*].file_search.results[*].content` - # to fetch the file search result content. + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. # # @return [Array, nil] optional :include, @@ -39,8 +39,8 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute additional_instructions # Appends additional instructions at the end of the instructions for the run. This - # is useful for modifying the behavior on a per-run basis without overriding other - # instructions. + # is useful for modifying the behavior on a per-run basis without overriding other + # instructions. # # @return [String, nil] optional :additional_instructions, String, nil?: true @@ -55,56 +55,56 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute instructions # Overrides the - # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) - # of the assistant. This is useful for modifying the behavior on a per-run basis. + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. # # @return [String, nil] optional :instructions, String, nil?: true # @!attribute max_completion_tokens # The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. # # @return [Integer, nil] optional :max_completion_tokens, Integer, nil?: true # @!attribute max_prompt_tokens # The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. # # @return [Integer, nil] optional :max_prompt_tokens, Integer, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. # # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Models::Beta::Threads::RunCreateParams::Model }, nil?: true # @!attribute [r] parallel_tool_calls # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. # # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean @@ -116,62 +116,62 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_effort # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. # # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Models::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. # # @return [Array, nil] optional :tools, @@ -180,17 +180,17 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. # # @return [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] optional :truncation_strategy, @@ -254,10 +254,10 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] required :role, enum: -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role } @@ -272,11 +272,11 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -313,10 +313,10 @@ module Content # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. # # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#role module Role @@ -395,9 +395,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. module Model extend OpenAI::Internal::Type::Union @@ -414,23 +414,23 @@ module Model class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. # # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] required :type, enum: -> { OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type } # @!attribute last_messages # The number of most recent messages from the thread when constructing the context - # for the run. + # for the run. # # @return [Integer, nil] optional :last_messages, Integer, nil?: true # @!parse # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. + # # control the intial context window of the run. # # # # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] # # @param last_messages [Integer, nil] @@ -440,9 +440,9 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. # # @see OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy#type module Type diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index f2b9f759..3d4e377b 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -12,9 +12,9 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -25,9 +25,9 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -38,7 +38,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -49,7 +49,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::RunListParams::Order } @@ -70,7 +70,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index efb9f2b6..d17f7dc6 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -5,8 +5,8 @@ module Models module Beta module Threads # The status of the run, which can be either `queued`, `in_progress`, - # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, - # `incomplete`, or `expired`. + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. module RunStatus extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 4e4733e6..2243cb1d 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -46,7 +46,7 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] tool_call_id # The ID of the tool call in the `required_action` object within the run object - # the output is being submitted for. + # the output is being submitted for. # # @return [String, nil] optional :tool_call_id, String diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 97c387fc..68b6536b 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -17,11 +17,11 @@ class RunUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index 1fbdc55b..ab77db4e 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -40,7 +40,7 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel class Image < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the - # image. + # image. # # @return [String, nil] optional :file_id, String diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 4daf4c57..60262e17 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -21,7 +21,7 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call. This is always going to be `code_interpreter` for this - # type of tool call. + # type of tool call. # # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter @@ -47,8 +47,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute outputs # The outputs from the Code Interpreter tool call. Code Interpreter can output one - # or more items, including text (`logs`) or images (`image`). Each of these are - # represented by a different object type. + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. # # @return [Array] required :outputs, @@ -126,7 +126,7 @@ class Image < OpenAI::Internal::Type::BaseModel class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the - # image. + # image. # # @return [String] required :file_id, String diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 6573f533..a9cbef61 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -14,7 +14,7 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call. This is always going to be `code_interpreter` for this - # type of tool call. + # type of tool call. # # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter @@ -66,8 +66,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute [r] outputs # The outputs from the Code Interpreter tool call. Code Interpreter can output one - # or more items, including text (`logs`) or images (`image`). Each of these are - # represented by a different object type. + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. # # @return [Array, nil] optional :outputs, diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index c8331a7b..3c6c423a 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -20,7 +20,7 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call. This is always going to be `file_search` for this type of - # tool call. + # tool call. # # @return [Symbol, :file_search] required :type, const: :file_search @@ -72,7 +72,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] required :ranker, @@ -80,7 +80,7 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point - # number between 0 and 1. + # number between 0 and 1. # # @return [Float] required :score_threshold, Float @@ -96,7 +96,7 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. # # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker module Ranker @@ -128,14 +128,14 @@ class Result < OpenAI::Internal::Type::BaseModel # @!attribute score # The score of the result. All values must be a floating point number between 0 - # and 1. + # and 1. # # @return [Float] required :score, Float # @!attribute [r] content # The content of the result that was found. The content is only included if - # requested via the include query parameter. + # requested via the include query parameter. # # @return [Array, nil] optional :content, diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index eb5776d0..4fa2dc05 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -20,7 +20,7 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call. This is always going to be `file_search` for this type of - # tool call. + # tool call. # # @return [Symbol, :file_search] required :type, const: :file_search diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index f0677ac7..cc1eb09a 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -20,7 +20,7 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call. This is always going to be `function` for this type of - # tool call. + # tool call. # # @return [Symbol, :function] required :type, const: :function @@ -50,8 +50,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!attribute output # The output of the function. This will be `null` if the outputs have not been - # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) - # yet. + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. # # @return [String, nil] required :output, String, nil?: true diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index c4bc767e..6a164004 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -14,7 +14,7 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of tool call. This is always going to be `function` for this type of - # tool call. + # tool call. # # @return [Symbol, :function] required :type, const: :function @@ -73,8 +73,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!attribute output # The output of the function. This will be `null` if the outputs have not been - # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) - # yet. + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. # # @return [String, nil] optional :output, String, nil?: true diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 9261d7c5..a4111bd4 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -15,8 +15,8 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute assistant_id # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) - # associated with the run step. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) + # associated with the run step. # # @return [String] required :assistant_id, String @@ -41,7 +41,7 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute expired_at # The Unix timestamp (in seconds) for when the run step expired. A step is - # considered expired if the parent run is expired. + # considered expired if the parent run is expired. # # @return [Integer, nil] required :expired_at, Integer, nil?: true @@ -54,18 +54,18 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute last_error # The last error associated with this run step. Will be `null` if there are no - # errors. + # errors. # # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] required :last_error, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::LastError }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -78,14 +78,14 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute run_id # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that - # this run step is a part of. + # this run step is a part of. # # @return [String] required :run_id, String # @!attribute status # The status of the run step, which can be either `in_progress`, `cancelled`, - # `failed`, `completed`, or `expired`. + # `failed`, `completed`, or `expired`. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] required :status, enum: -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Status } @@ -98,7 +98,7 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute thread_id # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # that was run. + # that was run. # # @return [String] required :thread_id, String @@ -111,7 +111,7 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics related to the run step. This value will be `null` while the - # run step's status is `in_progress`. + # run step's status is `in_progress`. # # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] required :usage, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Usage }, nil?: true @@ -176,7 +176,7 @@ class LastError < OpenAI::Internal::Type::BaseModel # @!parse # # The last error associated with this run step. Will be `null` if there are no - # # errors. + # # errors. # # # # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] # # @param message [String] @@ -203,7 +203,7 @@ module Code end # The status of the run step, which can be either `in_progress`, `cancelled`, - # `failed`, `completed`, or `expired`. + # `failed`, `completed`, or `expired`. # # @see OpenAI::Models::Beta::Threads::Runs::RunStep#status module Status @@ -279,7 +279,7 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!parse # # Usage statistics related to the run step. This value will be `null` while the - # # run step's status is `in_progress`. + # # run step's status is `in_progress`. # # # # @param completion_tokens [Integer] # # @param prompt_tokens [Integer] diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index 027d0490..a71b9858 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -26,7 +26,7 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @!parse # # Represents a run step delta i.e. any changed fields on a run step during - # # streaming. + # # streaming. # # # # @param id [String] # # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index a7c48d03..1b281161 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -18,9 +18,9 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -31,9 +31,9 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -44,12 +44,12 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] include # A list of additional fields to include in the response. Currently the only - # supported value is `step_details.tool_calls[*].file_search.results[*].content` - # to fetch the file search result content. + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. # # @return [Array, nil] optional :include, @@ -61,7 +61,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -72,7 +72,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::Runs::StepListParams::Order } @@ -95,7 +95,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index efbf4aaf..a02c8ce0 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -23,12 +23,12 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] include # A list of additional fields to include in the response. Currently the only - # supported value is `step_details.tool_calls[*].file_search.results[*].content` - # to fetch the file search result content. + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. # # @return [Array, nil] optional :include, diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 7f61ee05..8c1394b6 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -14,8 +14,8 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # @!attribute [r] tool_calls # An array of tool calls the run step was involved in. These can be associated - # with one of three types of tools: `code_interpreter`, `file_search`, or - # `function`. + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. # # @return [Array, nil] optional :tool_calls, diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index bd9aa901..1ac644f3 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -8,8 +8,8 @@ module Runs class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # An array of tool calls the run step was involved in. These can be associated - # with one of three types of tools: `code_interpreter`, `file_search`, or - # `function`. + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. # # @return [Array] required :tool_calls, diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 0f9f79a6..97ee34db 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -15,7 +15,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute choices # A list of chat completion choices. Can be more than one if `n` is greater - # than 1. + # than 1. # # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice] } @@ -47,8 +47,8 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute [r] system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # - # Can be used in conjunction with the `seed` request parameter to understand when - # backend changes have been made that might impact determinism. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. # # @return [String, nil] optional :system_fingerprint, String @@ -69,7 +69,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!parse # # Represents a chat completion response returned by model, based on the provided - # # input. + # # input. # # # # @param id [String] # # @param choices [Array] @@ -99,11 +99,11 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel class Choice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] required :finish_reason, enum: -> { OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason } @@ -137,11 +137,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. # # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason module FinishReason diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 5ac2d838..992b3818 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -12,14 +12,14 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute audio # Data about a previous audio response from the model. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # [Learn more](https://platform.openai.com/docs/guides/audio). # # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio }, nil?: true # @!attribute content # The contents of the assistant message. Required unless `tool_calls` or - # `function_call` is specified. + # `function_call` is specified. # # @return [String, Array, nil] optional :content, @@ -28,7 +28,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] optional :function_call, @@ -37,7 +37,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute [r] name # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. # # @return [String, nil] optional :name, String @@ -99,7 +99,7 @@ class Audio < OpenAI::Internal::Type::BaseModel # @!parse # # Data about a previous audio response from the model. - # # [Learn more](https://platform.openai.com/docs/guides/audio). + # # [Learn more](https://platform.openai.com/docs/guides/audio). # # # # @param id [String] # # @@ -109,7 +109,7 @@ class Audio < OpenAI::Internal::Type::BaseModel end # The contents of the assistant message. Required unless `tool_calls` or - # `function_call` is specified. + # `function_call` is specified. # # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#content module Content @@ -122,7 +122,7 @@ module Content variant -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } # Learn about - # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ArrayOfContentPart extend OpenAI::Internal::Type::Union @@ -152,9 +152,9 @@ module ArrayOfContentPart class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. # # @return [String] required :arguments, String @@ -167,7 +167,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!parse # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. + # # that should be called, as generated by the model. # # # # @param arguments [String] # # @param name [String] diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index e5c8bb97..238b8a6b 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -12,14 +12,14 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64 encoded audio bytes generated by the model, in the format specified in - # the request. + # the request. # # @return [String] required :data, String # @!attribute expires_at # The Unix timestamp (in seconds) for when this audio response will no longer be - # accessible on the server for use in multi-turn conversations. + # accessible on the server for use in multi-turn conversations. # # @return [Integer] required :expires_at, Integer @@ -32,8 +32,8 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @!parse # # If the audio output modality is requested, this object contains data about the - # # audio response from the model. - # # [Learn more](https://platform.openai.com/docs/guides/audio). + # # audio response from the model. + # # [Learn more](https://platform.openai.com/docs/guides/audio). # # # # @param id [String] # # @param data [String] diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index b1ede0ce..07ca649d 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -6,22 +6,22 @@ module Chat class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # @!attribute format_ # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, - # or `pcm16`. + # or `pcm16`. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] required :format_, enum: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Format }, api_name: :format # @!attribute voice # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. # # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } # @!parse # # Parameters for audio output. Required when audio output is requested with - # # `modalities: ["audio"]`. - # # [Learn more](https://platform.openai.com/docs/guides/audio). + # # `modalities: ["audio"]`. + # # [Learn more](https://platform.openai.com/docs/guides/audio). # # # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] # # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] @@ -31,7 +31,7 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, - # or `pcm16`. + # or `pcm16`. # # @see OpenAI::Models::Chat::ChatCompletionAudioParam#format_ module Format @@ -51,7 +51,7 @@ module Format end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. # # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice module Voice diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 5c693f05..b9c033b3 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -12,8 +12,8 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @!attribute choices # A list of chat completion choices. Can contain more than one elements if `n` is - # greater than 1. Can also be empty for the last chunk if you set - # `stream_options: {"include_usage": true}`. + # greater than 1. Can also be empty for the last chunk if you set + # `stream_options: {"include_usage": true}`. # # @return [Array] required :choices, @@ -21,7 +21,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @!attribute created # The Unix timestamp (in seconds) of when the chat completion was created. Each - # chunk has the same timestamp. + # chunk has the same timestamp. # # @return [Integer] required :created, Integer @@ -46,8 +46,8 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @!attribute [r] system_fingerprint # This fingerprint represents the backend configuration that the model runs with. - # Can be used in conjunction with the `seed` request parameter to understand when - # backend changes have been made that might impact determinism. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. # # @return [String, nil] optional :system_fingerprint, String @@ -58,20 +58,20 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @!attribute usage # An optional field that will only be present when you set - # `stream_options: {"include_usage": true}` in your request. When present, it - # contains a null value **except for the last chunk** which contains the token - # usage statistics for the entire request. + # `stream_options: {"include_usage": true}` in your request. When present, it + # contains a null value **except for the last chunk** which contains the token + # usage statistics for the entire request. # - # **NOTE:** If the stream is interrupted or cancelled, you may not receive the - # final usage chunk which contains the total token usage for the request. + # **NOTE:** If the stream is interrupted or cancelled, you may not receive the + # final usage chunk which contains the total token usage for the request. # # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true # @!parse # # Represents a streamed chunk of a chat completion response returned by the model, - # # based on the provided input. - # # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). + # # based on the provided input. + # # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). # # # # @param id [String] # # @param choices [Array] @@ -107,11 +107,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] required :finish_reason, @@ -150,7 +150,7 @@ class Delta < OpenAI::Internal::Type::BaseModel # @!attribute [r] function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } @@ -204,9 +204,9 @@ class Delta < OpenAI::Internal::Type::BaseModel class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. # # @return [String, nil] optional :arguments, String @@ -227,7 +227,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!parse # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. + # # that should be called, as generated by the model. # # # # @param arguments [String] # # @param name [String] @@ -305,9 +305,9 @@ class ToolCall < OpenAI::Internal::Type::BaseModel class Function < OpenAI::Internal::Type::BaseModel # @!attribute [r] arguments # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. # # @return [String, nil] optional :arguments, String @@ -353,11 +353,11 @@ module Type end # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#finish_reason module FinishReason diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 30da9605..31635ff4 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # Learn about - # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ChatCompletionContentPart extend OpenAI::Internal::Type::Union @@ -36,7 +36,7 @@ class File < OpenAI::Internal::Type::BaseModel # @!parse # # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text - # # generation. + # # generation. # # # # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] # # @param type [Symbol, :file] @@ -49,7 +49,7 @@ class File < OpenAI::Internal::Type::BaseModel class File < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_data # The base64 encoded file data, used when passing the file to the model as a - # string. + # string. # # @return [String, nil] optional :file_data, String diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 04a9152b..a22c144e 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -35,7 +35,7 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute [r] detail # Specifies the detail level of the image. Learn more in the - # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail } @@ -53,7 +53,7 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Specifies the detail level of the image. Learn more in the - # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # # @see OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL#detail module Detail diff --git a/lib/openai/models/chat/chat_completion_content_part_text.rb b/lib/openai/models/chat/chat_completion_content_part_text.rb index e286368a..e1975859 100644 --- a/lib/openai/models/chat/chat_completion_content_part_text.rb +++ b/lib/openai/models/chat/chat_completion_content_part_text.rb @@ -18,7 +18,7 @@ class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # @!parse # # Learn about - # # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # # [text inputs](https://platform.openai.com/docs/guides/text-generation). # # # # @param text [String] # # @param type [Symbol, :text] diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 1fe2ecc1..ecd1e5bd 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -18,7 +18,7 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute [r] name # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. # # @return [String, nil] optional :name, String @@ -29,8 +29,8 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!parse # # Developer-provided instructions that the model should follow, regardless of - # # messages sent by the user. With o1 models and newer, `developer` messages - # # replace the previous `system` messages. + # # messages sent by the user. With o1 models and newer, `developer` messages + # # replace the previous `system` messages. # # # # @param content [String, Array] # # @param name [String] diff --git a/lib/openai/models/chat/chat_completion_function_call_option.rb b/lib/openai/models/chat/chat_completion_function_call_option.rb index 9434599e..3ae8526b 100644 --- a/lib/openai/models/chat/chat_completion_function_call_option.rb +++ b/lib/openai/models/chat/chat_completion_function_call_option.rb @@ -12,7 +12,7 @@ class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # @!parse # # Specifying a particular function via `{"name": "my_function"}` forces the model - # # to call that function. + # # to call that function. # # # # @param name [String] # # diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 9b1828e2..b36682ef 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -24,7 +24,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @!attribute [r] annotations # Annotations for the message, when applicable, as when using the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # # @return [Array, nil] optional :annotations, @@ -36,15 +36,15 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @!attribute audio # If the audio output modality is requested, this object contains data about the - # audio response from the model. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). # # @return [OpenAI::Models::Chat::ChatCompletionAudio, nil] optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudio }, nil?: true # @!attribute [r] function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, nil] optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall } @@ -159,9 +159,9 @@ class URLCitation < OpenAI::Internal::Type::BaseModel class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. # # @return [String] required :arguments, String @@ -174,7 +174,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!parse # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. + # # that should be called, as generated by the model. # # # # @param arguments [String] # # @param name [String] diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index 6796ea10..ed72d515 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -4,8 +4,8 @@ module OpenAI module Models module Chat # Developer-provided instructions that the model should follow, regardless of - # messages sent by the user. With o1 models and newer, `developer` messages - # replace the previous `system` messages. + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. module ChatCompletionMessageParam extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index 508bc7ca..cca6cc4e 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -35,9 +35,9 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. # # @return [String] required :arguments, String diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index 9058c083..c887906f 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -17,7 +17,7 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @!parse # # Specifies a tool the model should use. Use to force the model to call a specific - # # function. + # # function. # # # # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] # # @param type [Symbol, :function] diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index dd3150fd..0cc7df62 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -6,22 +6,22 @@ module Chat class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content that should be matched when generating a model response. If - # generated tokens would match this content, the entire model response can be - # returned much more quickly. + # generated tokens would match this content, the entire model response can be + # returned much more quickly. # # @return [String, Array] required :content, union: -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content } # @!attribute type # The type of the predicted content you want to provide. This type is currently - # always `content`. + # always `content`. # # @return [Symbol, :content] required :type, const: :content # @!parse # # Static predicted output content, such as the content of a text file that is - # # being regenerated. + # # being regenerated. # # # # @param content [String, Array] # # @param type [Symbol, :content] @@ -31,8 +31,8 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The content that should be matched when generating a model response. If - # generated tokens would match this content, the entire model response can be - # returned much more quickly. + # generated tokens would match this content, the entire model response can be + # returned much more quickly. # # @see OpenAI::Models::Chat::ChatCompletionPredictionContent#content module Content diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index 7b23b7a2..cb29b9a6 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -6,12 +6,12 @@ module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] include_usage # If set, an additional chunk will be streamed before the `data: [DONE]` message. - # The `usage` field on this chunk shows the token usage statistics for the entire - # request, and the `choices` field will always be an empty array. + # The `usage` field on this chunk shows the token usage statistics for the entire + # request, and the `choices` field will always be an empty array. # - # All other chunks will also include a `usage` field, but with a null value. - # **NOTE:** If the stream is interrupted, you may not receive the final usage - # chunk which contains the total token usage for the request. + # All other chunks will also include a `usage` field, but with a null value. + # **NOTE:** If the stream is interrupted, you may not receive the final usage + # chunk which contains the total token usage for the request. # # @return [Boolean, nil] optional :include_usage, OpenAI::Internal::Type::Boolean diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index d1d9b2f7..44d1d207 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -18,7 +18,7 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute [r] name # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. # # @return [String, nil] optional :name, String @@ -29,8 +29,8 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!parse # # Developer-provided instructions that the model should follow, regardless of - # # messages sent by the user. With o1 models and newer, use `developer` messages - # # for this purpose instead. + # # messages sent by the user. With o1 models and newer, use `developer` messages + # # for this purpose instead. # # # # @param content [String, Array] # # @param name [String] diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index 8b6d0019..a9f0bc0d 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -12,25 +12,25 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # @!attribute bytes # A list of integers representing the UTF-8 bytes representation of the token. - # Useful in instances where characters are represented by multiple tokens and - # their byte representations must be combined to generate the correct text - # representation. Can be `null` if there is no bytes representation for the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. # # @return [Array, nil] required :bytes, OpenAI::Internal::Type::ArrayOf[Integer], nil?: true # @!attribute logprob # The log probability of this token, if it is within the top 20 most likely - # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very - # unlikely. + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. # # @return [Float] required :logprob, Float # @!attribute top_logprobs # List of the most likely tokens and their log probability, at this token - # position. In rare cases, there may be fewer than the number of requested - # `top_logprobs` returned. + # position. In rare cases, there may be fewer than the number of requested + # `top_logprobs` returned. # # @return [Array] required :top_logprobs, @@ -55,17 +55,17 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute bytes # A list of integers representing the UTF-8 bytes representation of the token. - # Useful in instances where characters are represented by multiple tokens and - # their byte representations must be combined to generate the correct text - # representation. Can be `null` if there is no bytes representation for the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. # # @return [Array, nil] required :bytes, OpenAI::Internal::Type::ArrayOf[Integer], nil?: true # @!attribute logprob # The log probability of this token, if it is within the top 20 most likely - # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very - # unlikely. + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. # # @return [Float] required :logprob, Float diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index bb04bbc8..c57aaf23 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -4,14 +4,14 @@ module OpenAI module Models module Chat # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. module ChatCompletionToolChoiceOption extend OpenAI::Internal::Type::Union @@ -22,8 +22,8 @@ module ChatCompletionToolChoiceOption variant -> { OpenAI::Models::Chat::ChatCompletionNamedToolChoice } # `none` means the model will not call any tool and instead generates a message. - # `auto` means the model can pick between generating a message or calling one or - # more tools. `required` means the model must call one or more tools. + # `auto` means the model can pick between generating a message or calling one or + # more tools. `required` means the model must call one or more tools. module Auto extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index ba2c2dbe..6cf8585a 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -18,7 +18,7 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute [r] name # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. # # @return [String, nil] optional :name, String @@ -29,7 +29,7 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!parse # # Messages sent by an end user, containing prompts or additional context - # # information. + # # information. # # # # @param content [String, Array] # # @param name [String] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index e8aed051..65406125 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -13,11 +13,11 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute messages # A list of messages comprising the conversation so far. Depending on the - # [model](https://platform.openai.com/docs/models) you use, different message - # types (modalities) are supported, like - # [text](https://platform.openai.com/docs/guides/text-generation), - # [images](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio). + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). # # @return [Array] required :messages, @@ -25,26 +25,26 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. # # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Models::Chat::CompletionCreateParams::Model } # @!attribute audio # Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). # # @return [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudioParam }, nil?: true # @!attribute frequency_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. # # @return [Float, nil] optional :frequency_penalty, Float, nil?: true @@ -52,18 +52,18 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] function_call # Deprecated in favor of `tool_choice`. # - # Controls which (if any) function is called by the model. + # Controls which (if any) function is called by the model. # - # `none` means the model will not call a function and instead generates a message. + # `none` means the model will not call a function and instead generates a message. # - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, nil] optional :function_call, union: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall } @@ -75,7 +75,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] functions # Deprecated in favor of `tools`. # - # A list of functions the model may generate JSON inputs for. + # A list of functions the model may generate JSON inputs for. # # @return [Array, nil] optional :functions, @@ -88,66 +88,66 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute logit_bias # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the - # tokenizer) to an associated bias value from -100 to 100. Mathematically, the - # bias is added to the logits generated by the model prior to sampling. The exact - # effect will vary per model, but values between -1 and 1 should decrease or - # increase likelihood of selection; values like -100 or 100 should result in a ban - # or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. # # @return [Hash{Symbol=>Integer}, nil] optional :logit_bias, OpenAI::Internal::Type::HashOf[Integer], nil?: true # @!attribute logprobs # Whether to return log probabilities of the output tokens or not. If true, - # returns the log probabilities of each output token returned in the `content` of - # `message`. + # returns the log probabilities of each output token returned in the `content` of + # `message`. # # @return [Boolean, nil] optional :logprobs, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute max_completion_tokens # An upper bound for the number of tokens that can be generated for a completion, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). # # @return [Integer, nil] optional :max_completion_tokens, Integer, nil?: true # @!attribute max_tokens # The maximum number of [tokens](/tokenizer) that can be generated in the chat - # completion. This value can be used to control - # [costs](https://openai.com/api/pricing/) for text generated via API. + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. # - # This value is now deprecated in favor of `max_completion_tokens`, and is not - # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). # # @return [Integer, nil] optional :max_tokens, Integer, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute modalities # Output types that you would like the model to generate. Most models are capable - # of generating text, which is the default: + # of generating text, which is the default: # - # `["text"]` + # `["text"]` # - # The `gpt-4o-audio-preview` model can also be used to - # [generate audio](https://platform.openai.com/docs/guides/audio). To request that - # this model generate both text and audio responses, you can use: + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: # - # `["text", "audio"]` + # `["text", "audio"]` # # @return [Array, nil] optional :modalities, @@ -156,16 +156,16 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute n # How many chat completion choices to generate for each input message. Note that - # you will be charged based on the number of generated tokens across all of the - # choices. Keep `n` as `1` to minimize costs. + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. # # @return [Integer, nil] optional :n, Integer, nil?: true # @!attribute [r] parallel_tool_calls # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. # # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean @@ -176,15 +176,15 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute prediction # Static predicted output content, such as the content of a text file that is - # being regenerated. + # being regenerated. # # @return [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] optional :prediction, -> { OpenAI::Models::Chat::ChatCompletionPredictionContent }, nil?: true # @!attribute presence_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. # # @return [Float, nil] optional :presence_penalty, Float, nil?: true @@ -192,10 +192,10 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_effort # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true @@ -203,14 +203,14 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] response_format # An object specifying the format that the model must output. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, union: -> { OpenAI::Models::Chat::CompletionCreateParams::ResponseFormat } @@ -221,44 +221,44 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute seed # This feature is in Beta. If specified, our system will make a best effort to - # sample deterministically, such that repeated requests with the same `seed` and - # parameters should return the same result. Determinism is not guaranteed, and you - # should refer to the `system_fingerprint` response parameter to monitor changes - # in the backend. + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. # # @return [Integer, nil] optional :seed, Integer, nil?: true # @!attribute service_tier # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # relevant for customers subscribed to the scale tier service: # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When this parameter is set, the response body will include the `service_tier` + # utilized. # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::CompletionCreateParams::ServiceTier }, nil?: true # @!attribute stop # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. # # @return [String, Array, nil] optional :stop, union: -> { OpenAI::Models::Chat::CompletionCreateParams::Stop }, nil?: true # @!attribute store # Whether or not to store the output of this chat completion request for use in - # our [model distillation](https://platform.openai.com/docs/guides/distillation) - # or [evals](https://platform.openai.com/docs/guides/evals) products. + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. # # @return [Boolean, nil] optional :store, OpenAI::Internal::Type::Boolean, nil?: true @@ -271,23 +271,23 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute [r] tool_choice # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption } @@ -298,8 +298,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] tools # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTool] } @@ -310,26 +310,26 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to - # return at each token position, each with an associated log probability. - # `logprobs` must be set to `true` if this parameter is used. + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. # # @return [Integer, nil] optional :top_logprobs, Integer, nil?: true # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -340,8 +340,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] web_search_options # This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions } @@ -423,10 +423,10 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. module Model extend OpenAI::Internal::Type::Union @@ -447,18 +447,18 @@ module Model # # Deprecated in favor of `tool_choice`. # - # Controls which (if any) function is called by the model. + # Controls which (if any) function is called by the model. # - # `none` means the model will not call a function and instead generates a message. + # `none` means the model will not call a function and instead generates a message. # - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. module FunctionCall extend OpenAI::Internal::Type::Union @@ -469,8 +469,8 @@ module FunctionCall variant -> { OpenAI::Models::Chat::ChatCompletionFunctionCallOption } # `none` means the model will not call a function and instead generates a message. - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. module FunctionCallMode extend OpenAI::Internal::Type::Enum @@ -493,14 +493,14 @@ module FunctionCallMode class Function < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain - # underscores and dashes, with a maximum length of 64. + # underscores and dashes, with a maximum length of 64. # # @return [String] required :name, String # @!attribute [r] description # A description of what the function does, used by the model to choose when and - # how to call the function. + # how to call the function. # # @return [String, nil] optional :description, String @@ -511,12 +511,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @!attribute [r] parameters # The parameters the functions accepts, described as a JSON Schema object. See the - # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, - # and the - # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - # documentation about the format. + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. # - # Omitting `parameters` defines a function with an empty parameter list. + # Omitting `parameters` defines a function with an empty parameter list. # # @return [Hash{Symbol=>Object}, nil] optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] @@ -550,14 +550,14 @@ module Modality # An object specifying the format that the model must output. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. module ResponseFormat extend OpenAI::Internal::Type::Union @@ -580,19 +580,19 @@ module ResponseFormat end # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. - # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -607,7 +607,7 @@ module ServiceTier end # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. module Stop extend OpenAI::Internal::Type::Union @@ -625,7 +625,7 @@ module Stop class WebSearchOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] search_context_size # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] optional :search_context_size, @@ -645,8 +645,8 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel # @!parse # # This tool searches the web for relevant results to use in a response. Learn more - # # about the - # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # # about the + # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # # # # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] # # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] @@ -656,7 +656,7 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#search_context_size module SearchContextSize @@ -712,7 +712,7 @@ class Approximate < OpenAI::Internal::Type::BaseModel # @!attribute [r] country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of - # the user, e.g. `US`. + # the user, e.g. `US`. # # @return [String, nil] optional :country, String @@ -733,7 +733,7 @@ class Approximate < OpenAI::Internal::Type::BaseModel # @!attribute [r] timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the - # user, e.g. `America/Los_Angeles`. + # user, e.g. `America/Los_Angeles`. # # @return [String, nil] optional :timezone, String diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 5e2f39ec..dede958c 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -32,7 +32,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # A list of metadata keys to filter the Chat Completions by. Example: # - # `metadata[key1]=value1&metadata[key2]=value2` + # `metadata[key1]=value1&metadata[key2]=value2` # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -49,7 +49,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or - # `desc` for descending order. Defaults to `asc`. + # `desc` for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Chat::CompletionListParams::Order } @@ -71,7 +71,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or - # `desc` for descending order. Defaults to `asc`. + # `desc` for descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 54a9a688..0a8504f8 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -11,11 +11,11 @@ class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 18970d11..eca322af 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -32,7 +32,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` - # for descending order. Defaults to `asc`. + # for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Chat::Completions::MessageListParams::Order } @@ -52,7 +52,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` - # for descending order. Defaults to `asc`. + # for descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 1c2b3a50..13134827 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -12,26 +12,26 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # - # - `eq`: equals - # - `ne`: not equal - # - `gt`: greater than - # - `gte`: greater than or equal - # - `lt`: less than - # - `lte`: less than or equal + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal # # @return [Symbol, OpenAI::Models::ComparisonFilter::Type] required :type, enum: -> { OpenAI::Models::ComparisonFilter::Type } # @!attribute value # The value to compare against the attribute key; supports string, number, or - # boolean types. + # boolean types. # # @return [String, Float, Boolean] required :value, union: -> { OpenAI::Models::ComparisonFilter::Value } # @!parse # # A filter used to compare a specified attribute key to a given value using a - # # defined comparison operation. + # # defined comparison operation. # # # # @param key [String] # # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] @@ -43,12 +43,12 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # - # - `eq`: equals - # - `ne`: not equal - # - `gt`: greater than - # - `gte`: greater than or equal - # - `lt`: less than - # - `lte`: less than or equal + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal # # @see OpenAI::Models::ComparisonFilter#type module Type @@ -69,7 +69,7 @@ module Type end # The value to compare against the attribute key; supports string, number, or - # boolean types. + # boolean types. # # @see OpenAI::Models::ComparisonFilter#value module Value diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 49f1d4c1..6062d60a 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -39,8 +39,8 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute [r] system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # - # Can be used in conjunction with the `seed` request parameter to understand when - # backend changes have been made that might impact determinism. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. # # @return [String, nil] optional :system_fingerprint, String @@ -61,7 +61,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!parse # # Represents a completion response from the API. Note: both the streamed and - # # non-streamed response objects share the same shape (unlike the chat endpoint). + # # non-streamed response objects share the same shape (unlike the chat endpoint). # # # # @param id [String] # # @param choices [Array] diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 1a2084f4..07081468 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -5,9 +5,9 @@ module Models class CompletionChoice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, or `content_filter` if - # content was omitted due to a flag from our content filters. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. # # @return [Symbol, OpenAI::Models::CompletionChoice::FinishReason] required :finish_reason, enum: -> { OpenAI::Models::CompletionChoice::FinishReason } @@ -38,9 +38,9 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, or `content_filter` if - # content was omitted due to a flag from our content filters. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. # # @see OpenAI::Models::CompletionChoice#finish_reason module FinishReason diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index c8920c94..a2931ac5 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -12,35 +12,35 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute model # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. # # @return [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] required :model, union: -> { OpenAI::Models::CompletionCreateParams::Model } # @!attribute prompt # The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. + # strings, array of tokens, or array of token arrays. # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. # # @return [String, Array, Array, Array>, nil] required :prompt, union: -> { OpenAI::Models::CompletionCreateParams::Prompt }, nil?: true # @!attribute best_of # Generates `best_of` completions server-side and returns the "best" (the one with - # the highest log probability per token). Results cannot be streamed. + # the highest log probability per token). Results cannot be streamed. # - # When used with `n`, `best_of` controls the number of candidate completions and - # `n` specifies how many to return – `best_of` must be greater than `n`. + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. # # @return [Integer, nil] optional :best_of, Integer, nil?: true @@ -53,10 +53,10 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute frequency_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) # # @return [Float, nil] optional :frequency_penalty, Float, nil?: true @@ -64,39 +64,39 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute logit_bias # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the GPT - # tokenizer) to an associated bias value from -100 to 100. You can use this - # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. - # Mathematically, the bias is added to the logits generated by the model prior to - # sampling. The exact effect will vary per model, but values between -1 and 1 - # should decrease or increase likelihood of selection; values like -100 or 100 - # should result in a ban or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. # - # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token - # from being generated. + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. # # @return [Hash{Symbol=>Integer}, nil] optional :logit_bias, OpenAI::Internal::Type::HashOf[Integer], nil?: true # @!attribute logprobs # Include the log probabilities on the `logprobs` most likely output tokens, as - # well the chosen tokens. For example, if `logprobs` is 5, the API will return a - # list of the 5 most likely tokens. The API will always return the `logprob` of - # the sampled token, so there may be up to `logprobs+1` elements in the response. + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. # - # The maximum value for `logprobs` is 5. + # The maximum value for `logprobs` is 5. # # @return [Integer, nil] optional :logprobs, Integer, nil?: true # @!attribute max_tokens # The maximum number of [tokens](/tokenizer) that can be generated in the - # completion. + # completion. # - # The token count of your prompt plus `max_tokens` cannot exceed the model's - # context length. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. # # @return [Integer, nil] optional :max_tokens, Integer, nil?: true @@ -104,37 +104,37 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute n # How many completions to generate for each prompt. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. # # @return [Integer, nil] optional :n, Integer, nil?: true # @!attribute presence_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) # # @return [Float, nil] optional :presence_penalty, Float, nil?: true # @!attribute seed # If specified, our system will make a best effort to sample deterministically, - # such that repeated requests with the same `seed` and parameters should return - # the same result. + # such that repeated requests with the same `seed` and parameters should return + # the same result. # - # Determinism is not guaranteed, and you should refer to the `system_fingerprint` - # response parameter to monitor changes in the backend. + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. # # @return [Integer, nil] optional :seed, Integer, nil?: true # @!attribute stop # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. # # @return [String, Array, nil] optional :stop, union: -> { OpenAI::Models::CompletionCreateParams::Stop }, nil?: true @@ -148,35 +148,35 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute suffix # The suffix that comes after a completion of inserted text. # - # This parameter is only supported for `gpt-3.5-turbo-instruct`. + # This parameter is only supported for `gpt-3.5-turbo-instruct`. # # @return [String, nil] optional :suffix, String, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # - # We generally recommend altering this or `top_p` but not both. + # We generally recommend altering this or `top_p` but not both. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -232,10 +232,10 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -261,11 +261,11 @@ module Model end # The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. + # strings, array of tokens, or array of token arrays. # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. module Prompt extend OpenAI::Internal::Type::Union @@ -289,7 +289,7 @@ module Prompt end # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. module Stop extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index bdcff97d..0f098720 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -67,7 +67,7 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that - # appeared in the completion. + # appeared in the completion. # # @return [Integer, nil] optional :accepted_prediction_tokens, Integer @@ -98,9 +98,9 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute [r] rejected_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that did - # not appear in the completion. However, like reasoning tokens, these tokens are - # still counted in the total completion tokens for purposes of billing, output, - # and context window limits. + # not appear in the completion. However, like reasoning tokens, these tokens are + # still counted in the total completion tokens for purposes of billing, output, + # and context window limits. # # @return [Integer, nil] optional :rejected_prediction_tokens, Integer diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index fa9859e2..57314aa0 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -5,7 +5,7 @@ module Models class CompoundFilter < OpenAI::Internal::Type::BaseModel # @!attribute filters # Array of filters to combine. Items can be `ComparisonFilter` or - # `CompoundFilter`. + # `CompoundFilter`. # # @return [Array] required :filters, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::CompoundFilter::Filter] } @@ -27,7 +27,7 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # A filter used to compare a specified attribute key to a given value using a - # defined comparison operation. + # defined comparison operation. module Filter extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index d71f4752..75d1aebf 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -5,8 +5,8 @@ module Models class Embedding < OpenAI::Internal::Type::BaseModel # @!attribute embedding # The embedding vector, which is a list of floats. The length of vector depends on - # the model as listed in the - # [embedding guide](https://platform.openai.com/docs/guides/embeddings). + # the model as listed in the + # [embedding guide](https://platform.openai.com/docs/guides/embeddings). # # @return [Array] required :embedding, OpenAI::Internal::Type::ArrayOf[Float] diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 4705cd60..3f26541e 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -10,30 +10,30 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute input # Input text to embed, encoded as a string or array of tokens. To embed multiple - # inputs in a single request, pass an array of strings or array of token arrays. - # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 - # dimensions or less. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. # # @return [String, Array, Array, Array>] required :input, union: -> { OpenAI::Models::EmbeddingCreateParams::Input } # @!attribute model # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. # # @return [String, Symbol, OpenAI::Models::EmbeddingModel] required :model, union: -> { OpenAI::Models::EmbeddingCreateParams::Model } # @!attribute [r] dimensions # The number of dimensions the resulting output embeddings should have. Only - # supported in `text-embedding-3` and later models. + # supported in `text-embedding-3` and later models. # # @return [Integer, nil] optional :dimensions, Integer @@ -44,7 +44,7 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] encoding_format # The format to return the embeddings in. Can be either `float` or - # [`base64`](https://pypi.org/project/pybase64/). + # [`base64`](https://pypi.org/project/pybase64/). # # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat, nil] optional :encoding_format, enum: -> { OpenAI::Models::EmbeddingCreateParams::EncodingFormat } @@ -55,8 +55,8 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -78,13 +78,13 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Input text to embed, encoded as a string or array of tokens. To embed multiple - # inputs in a single request, pass an array of strings or array of token arrays. - # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 - # dimensions or less. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. module Input extend OpenAI::Internal::Type::Union @@ -112,10 +112,10 @@ module Input end # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -130,7 +130,7 @@ module Model end # The format to return the embeddings in. Can be either `float` or - # [`base64`](https://pypi.org/project/pybase64/). + # [`base64`](https://pypi.org/project/pybase64/). module EncodingFormat extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 6652fa42..8a671209 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -3,7 +3,7 @@ module OpenAI module Models # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. module FileChunkingStrategyParam extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 43683821..a3bc12dd 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -16,9 +16,9 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute purpose # The intended purpose of the uploaded file. One of: - `assistants`: Used in the - # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for - # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: - # Flexible file type for any purpose - `evals`: Used for eval data sets + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets # # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::Models::FilePurpose } diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index d5a7bce4..ccc569bf 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -10,9 +10,9 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -23,7 +23,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 10,000, and the default is 10,000. + # 10,000, and the default is 10,000. # # @return [Integer, nil] optional :limit, Integer @@ -34,7 +34,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::FileListParams::Order } @@ -65,7 +65,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index d25bcfa7..d694613d 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -36,15 +36,15 @@ class FileObject < OpenAI::Internal::Type::BaseModel # @!attribute purpose # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. # # @return [Symbol, OpenAI::Models::FileObject::Purpose] required :purpose, enum: -> { OpenAI::Models::FileObject::Purpose } # @!attribute status # Deprecated. The current status of the file, which can be either `uploaded`, - # `processed`, or `error`. + # `processed`, or `error`. # # @return [Symbol, OpenAI::Models::FileObject::Status] required :status, enum: -> { OpenAI::Models::FileObject::Status } @@ -61,7 +61,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # @!attribute [r] status_details # Deprecated. For details on why a fine-tuning training file failed validation, - # see the `error` field on `fine_tuning.job`. + # see the `error` field on `fine_tuning.job`. # # @return [String, nil] optional :status_details, String @@ -101,8 +101,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. # # @see OpenAI::Models::FileObject#purpose module Purpose @@ -126,7 +126,7 @@ module Purpose # @deprecated # # Deprecated. The current status of the file, which can be either `uploaded`, - # `processed`, or `error`. + # `processed`, or `error`. # # @see OpenAI::Models::FileObject#status module Status diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index 3b9a9976..c11caef0 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -3,9 +3,9 @@ module OpenAI module Models # The intended purpose of the uploaded file. One of: - `assistants`: Used in the - # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for - # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: - # Flexible file type for any purpose - `evals`: Used for eval data sets + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets module FilePurpose extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 9b72f23f..b92146f5 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -19,28 +19,28 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute error # For fine-tuning jobs that have `failed`, this will contain more information on - # the cause of the failure. + # the cause of the failure. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] required :error, -> { OpenAI::Models::FineTuning::FineTuningJob::Error }, nil?: true # @!attribute fine_tuned_model # The name of the fine-tuned model that is being created. The value will be null - # if the fine-tuning job is still running. + # if the fine-tuning job is still running. # # @return [String, nil] required :fine_tuned_model, String, nil?: true # @!attribute finished_at # The Unix timestamp (in seconds) for when the fine-tuning job was finished. The - # value will be null if the fine-tuning job is still running. + # value will be null if the fine-tuning job is still running. # # @return [Integer, nil] required :finished_at, Integer, nil?: true # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. This value will only be - # returned when running `supervised` jobs. + # returned when running `supervised` jobs. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] required :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters } @@ -65,8 +65,8 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute result_files # The compiled results file ID(s) for the fine-tuning job. You can retrieve the - # results with the - # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + # results with the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). # # @return [Array] required :result_files, OpenAI::Internal::Type::ArrayOf[String] @@ -79,36 +79,36 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute status # The current status of the fine-tuning job, which can be either - # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] required :status, enum: -> { OpenAI::Models::FineTuning::FineTuningJob::Status } # @!attribute trained_tokens # The total number of billable tokens processed by this fine-tuning job. The value - # will be null if the fine-tuning job is still running. + # will be null if the fine-tuning job is still running. # # @return [Integer, nil] required :trained_tokens, Integer, nil?: true # @!attribute training_file # The file ID used for training. You can retrieve the training data with the - # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). # # @return [String] required :training_file, String # @!attribute validation_file # The file ID used for validation. You can retrieve the validation results with - # the - # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + # the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). # # @return [String, nil] required :validation_file, String, nil?: true # @!attribute estimated_finish # The Unix timestamp (in seconds) for when the fine-tuning job is estimated to - # finish. The value will be null if the fine-tuning job is not running. + # finish. The value will be null if the fine-tuning job is not running. # # @return [Integer, nil] optional :estimated_finish, Integer, nil?: true @@ -123,11 +123,11 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -144,7 +144,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!parse # # The `fine_tuning.job` object represents a fine-tuning job that has been created - # # through the API. + # # through the API. # # # # @param id [String] # # @param created_at [Integer] @@ -209,14 +209,14 @@ class Error < OpenAI::Internal::Type::BaseModel # @!attribute param # The parameter that was invalid, usually `training_file` or `validation_file`. - # This field will be null if the failure was not parameter-specific. + # This field will be null if the failure was not parameter-specific. # # @return [String, nil] required :param, String, nil?: true # @!parse # # For fine-tuning jobs that have `failed`, this will contain more information on - # # the cause of the failure. + # # the cause of the failure. # # # # @param code [String] # # @param message [String] @@ -231,7 +231,7 @@ class Error < OpenAI::Internal::Type::BaseModel class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::BatchSize } @@ -242,7 +242,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, @@ -254,7 +254,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::NEpochs } @@ -265,7 +265,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!parse # # The hyperparameters used for the fine-tuning job. This value will only be - # # returned when running `supervised` jobs. + # # returned when running `supervised` jobs. # # # # @param batch_size [Symbol, :auto, Integer] # # @param learning_rate_multiplier [Symbol, :auto, Float] @@ -276,7 +276,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#batch_size module BatchSize @@ -292,7 +292,7 @@ module BatchSize end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier @@ -308,7 +308,7 @@ module LearningRateMultiplier end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#n_epochs module NEpochs @@ -325,7 +325,7 @@ module NEpochs end # The current status of the fine-tuning job, which can be either - # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # # @see OpenAI::Models::FineTuning::FineTuningJob#status module Status @@ -413,7 +413,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, @@ -425,7 +425,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] beta # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. # # @return [Symbol, :auto, Float, nil] optional :beta, @@ -437,7 +437,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, @@ -449,7 +449,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, @@ -472,7 +472,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#batch_size module BatchSize @@ -488,7 +488,7 @@ module BatchSize end # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#beta module Beta @@ -504,7 +504,7 @@ module Beta end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier @@ -520,7 +520,7 @@ module LearningRateMultiplier end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#n_epochs module NEpochs @@ -563,7 +563,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, @@ -575,7 +575,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, @@ -587,7 +587,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, @@ -609,7 +609,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#batch_size module BatchSize @@ -625,7 +625,7 @@ module BatchSize end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier @@ -641,7 +641,7 @@ module LearningRateMultiplier end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#n_epochs module NEpochs diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 5b1dc8c8..603de792 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -12,23 +12,23 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @!attribute entity # The entity to use for the run. This allows you to set the team or username of - # the WandB user that you would like associated with the run. If not set, the - # default entity for the registered WandB API key is used. + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. # # @return [String, nil] optional :entity, String, nil?: true # @!attribute name # A display name to set for the run. If not set, we will use the Job ID as the - # name. + # name. # # @return [String, nil] optional :name, String, nil?: true # @!attribute [r] tags # A list of tags to be attached to the newly created run. These tags are passed - # through directly to WandB. Some default tags are generated by OpenAI: - # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". # # @return [Array, nil] optional :tags, OpenAI::Internal::Type::ArrayOf[String] @@ -39,9 +39,9 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @!parse # # The settings for your integration with Weights and Biases. This payload - # # specifies the project that metrics will be sent to. Optionally, you can set an - # # explicit display name for your run, add tags to your run, and set a default - # # entity (team, username, etc) to be associated with your run. + # # specifies the project that metrics will be sent to. Optionally, you can set an + # # explicit display name for your run, add tags to your run, and set a default + # # entity (team, username, etc) to be associated with your run. # # # # @param project [String] # # @param entity [String, nil] diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 05920c94..3554c944 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -12,9 +12,9 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # @!attribute wandb # The settings for your integration with Weights and Biases. This payload - # specifies the project that metrics will be sent to. Optionally, you can set an - # explicit display name for your run, add tags to your run, and set a default - # entity (team, username, etc) to be associated with your run. + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. # # @return [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] required :wandb, -> { OpenAI::Models::FineTuning::FineTuningJobWandbIntegration } diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 89dcb87d..df3de345 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -11,7 +11,7 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute model # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # # @return [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] required :model, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Model } @@ -19,28 +19,28 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute training_file # The ID of an uploaded file that contains training data. # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. # - # Your dataset must be formatted as a JSONL file. Additionally, you must upload - # your file with the purpose `fine-tune`. + # Your dataset must be formatted as a JSONL file. Additionally, you must upload + # your file with the purpose `fine-tune`. # - # The contents of the file should differ depending on if the model uses the - # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), - # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) - # format, or if the fine-tuning method uses the - # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) - # format. + # The contents of the file should differ depending on if the model uses the + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # format, or if the fine-tuning method uses the + # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. # # @return [String] required :training_file, String # @!attribute [r] hyperparameters # The hyperparameters used for the fine-tuning job. This value is now deprecated - # in favor of `method`, and should be passed in under the `method` parameter. + # in favor of `method`, and should be passed in under the `method` parameter. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters } @@ -59,11 +59,11 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -80,18 +80,18 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute seed # The seed controls the reproducibility of the job. Passing in the same seed and - # job parameters should produce the same results, but may differ in rare cases. If - # a seed is not specified, one will be generated for you. + # job parameters should produce the same results, but may differ in rare cases. If + # a seed is not specified, one will be generated for you. # # @return [Integer, nil] optional :seed, Integer, nil?: true # @!attribute suffix # A string of up to 64 characters that will be added to your fine-tuned model - # name. + # name. # - # For example, a `suffix` of "custom-model-name" would produce a model name like - # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. + # For example, a `suffix` of "custom-model-name" would produce a model name like + # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. # # @return [String, nil] optional :suffix, String, nil?: true @@ -99,16 +99,16 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute validation_file # The ID of an uploaded file that contains validation data. # - # If you provide this file, the data is used to generate validation metrics - # periodically during fine-tuning. These metrics can be viewed in the fine-tuning - # results file. The same data should not be present in both train and validation - # files. + # If you provide this file, the data is used to generate validation metrics + # periodically during fine-tuning. These metrics can be viewed in the fine-tuning + # results file. The same data should not be present in both train and validation + # files. # - # Your dataset must be formatted as a JSONL file. You must upload your file with - # the purpose `fine-tune`. + # Your dataset must be formatted as a JSONL file. You must upload your file with + # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. # # @return [String, nil] optional :validation_file, String, nil?: true @@ -144,7 +144,7 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). module Model extend OpenAI::Internal::Type::Union @@ -176,7 +176,7 @@ module Model class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, @@ -188,7 +188,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, @@ -200,7 +200,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::NEpochs } @@ -211,7 +211,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!parse # # The hyperparameters used for the fine-tuning job. This value is now deprecated - # # in favor of `method`, and should be passed in under the `method` parameter. + # # in favor of `method`, and should be passed in under the `method` parameter. # # # # @param batch_size [Symbol, :auto, Integer] # # @param learning_rate_multiplier [Symbol, :auto, Float] @@ -222,7 +222,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#batch_size module BatchSize @@ -238,7 +238,7 @@ module BatchSize end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier @@ -254,7 +254,7 @@ module LearningRateMultiplier end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#n_epochs module NEpochs @@ -273,16 +273,16 @@ module NEpochs class Integration < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of integration to enable. Currently, only "wandb" (Weights and Biases) - # is supported. + # is supported. # # @return [Symbol, :wandb] required :type, const: :wandb # @!attribute wandb # The settings for your integration with Weights and Biases. This payload - # specifies the project that metrics will be sent to. Optionally, you can set an - # explicit display name for your run, add tags to your run, and set a default - # entity (team, username, etc) to be associated with your run. + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] required :wandb, -> { OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb } @@ -305,23 +305,23 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @!attribute entity # The entity to use for the run. This allows you to set the team or username of - # the WandB user that you would like associated with the run. If not set, the - # default entity for the registered WandB API key is used. + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. # # @return [String, nil] optional :entity, String, nil?: true # @!attribute name # A display name to set for the run. If not set, we will use the Job ID as the - # name. + # name. # # @return [String, nil] optional :name, String, nil?: true # @!attribute [r] tags # A list of tags to be attached to the newly created run. These tags are passed - # through directly to WandB. Some default tags are generated by OpenAI: - # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". # # @return [Array, nil] optional :tags, OpenAI::Internal::Type::ArrayOf[String] @@ -332,9 +332,9 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @!parse # # The settings for your integration with Weights and Biases. This payload - # # specifies the project that metrics will be sent to. Optionally, you can set an - # # explicit display name for your run, add tags to your run, and set a default - # # entity (team, username, etc) to be associated with your run. + # # specifies the project that metrics will be sent to. Optionally, you can set an + # # explicit display name for your run, add tags to your run, and set a default + # # entity (team, username, etc) to be associated with your run. # # # # @param project [String] # # @param entity [String, nil] @@ -415,7 +415,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, @@ -427,7 +427,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] beta # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. # # @return [Symbol, :auto, Float, nil] optional :beta, @@ -439,7 +439,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, @@ -451,7 +451,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, @@ -474,7 +474,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#batch_size module BatchSize @@ -490,7 +490,7 @@ module BatchSize end # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#beta module Beta @@ -506,7 +506,7 @@ module Beta end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier @@ -522,7 +522,7 @@ module LearningRateMultiplier end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#n_epochs module NEpochs @@ -565,7 +565,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] batch_size # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, @@ -577,7 +577,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, @@ -589,7 +589,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute [r] n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, @@ -611,7 +611,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#batch_size module BatchSize @@ -627,7 +627,7 @@ module BatchSize end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier @@ -643,7 +643,7 @@ module LearningRateMultiplier end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. # # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#n_epochs module NEpochs diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index e0ffb31f..1ca2c3a3 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -31,7 +31,7 @@ class JobListParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. - # Alternatively, set `metadata=null` to indicate no metadata. + # Alternatively, set `metadata=null` to indicate no metadata. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 0af58cbe..e32a2926 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -50,7 +50,7 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @!parse # # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a - # # fine-tuning job that is ready to use. + # # fine-tuning job that is ready to use. # # # # @param id [String] # # @param created_at [Integer] diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index e024d931..7bd7f7d5 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -5,14 +5,14 @@ module Models class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain - # underscores and dashes, with a maximum length of 64. + # underscores and dashes, with a maximum length of 64. # # @return [String] required :name, String # @!attribute [r] description # A description of what the function does, used by the model to choose when and - # how to call the function. + # how to call the function. # # @return [String, nil] optional :description, String @@ -23,12 +23,12 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!attribute [r] parameters # The parameters the functions accepts, described as a JSON Schema object. See the - # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, - # and the - # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - # documentation about the format. + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. # - # Omitting `parameters` defines a function with an empty parameter list. + # Omitting `parameters` defines a function with an empty parameter list. # # @return [Hash{Symbol=>Object}, nil] optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] @@ -39,10 +39,10 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!attribute strict # Whether to enable strict schema adherence when generating the function call. If - # set to true, the model will follow the exact schema defined in the `parameters` - # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn - # more about Structured Outputs in the - # [function calling guide](docs/guides/function-calling). + # set to true, the model will follow the exact schema defined in the `parameters` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn + # more about Structured Outputs in the + # [function calling guide](docs/guides/function-calling). # # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index e3842efd..dddf163d 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -5,7 +5,7 @@ module Models class Image < OpenAI::Internal::Type::BaseModel # @!attribute [r] b64_json # The base64-encoded JSON of the generated image, if `response_format` is - # `b64_json`. + # `b64_json`. # # @return [String, nil] optional :b64_json, String @@ -16,7 +16,7 @@ class Image < OpenAI::Internal::Type::BaseModel # @!attribute [r] revised_prompt # The prompt that was used to generate the image, if there was any revision to the - # prompt. + # prompt. # # @return [String, nil] optional :revised_prompt, String diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 06bd032f..a24a9d51 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -10,29 +10,29 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @!attribute image # The image to use as the basis for the variation(s). Must be a valid PNG file, - # less than 4MB, and square. + # less than 4MB, and square. # # @return [IO, StringIO] required :image, IO # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. # # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::Models::ImageCreateVariationParams::Model }, nil?: true # @!attribute n # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # `n=1` is supported. # # @return [Integer, nil] optional :n, Integer, nil?: true # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. # # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] optional :response_format, @@ -41,15 +41,15 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @!attribute size # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. # # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageCreateVariationParams::Size }, nil?: true # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -72,7 +72,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. module Model extend OpenAI::Internal::Type::Union @@ -87,8 +87,8 @@ module Model end # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -103,7 +103,7 @@ module ResponseFormat end # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. module Size extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 43a5120c..0a0c78c4 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -10,22 +10,22 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @!attribute image # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. + # is not provided, image must have transparency, which will be used as the mask. # # @return [IO, StringIO] required :image, IO # @!attribute prompt # A text description of the desired image(s). The maximum length is 1000 - # characters. + # characters. # # @return [String] required :prompt, String # @!attribute [r] mask # An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than - # 4MB, and have the same dimensions as `image`. + # indicate where `image` should be edited. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. # # @return [IO, StringIO, nil] optional :mask, IO @@ -36,7 +36,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. # # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::Models::ImageEditParams::Model }, nil?: true @@ -49,23 +49,23 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. # # @return [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::ImageEditParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. # # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageEditParams::Size }, nil?: true # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -103,7 +103,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. module Model extend OpenAI::Internal::Type::Union @@ -118,8 +118,8 @@ module Model end # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -134,7 +134,7 @@ module ResponseFormat end # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. module Size extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 30a2eaa3..bce729c4 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -10,7 +10,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @!attribute prompt # A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. + # characters for `dall-e-2` and 4000 characters for `dall-e-3`. # # @return [String] required :prompt, String @@ -23,15 +23,15 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @!attribute n # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # `n=1` is supported. # # @return [Integer, nil] optional :n, Integer, nil?: true # @!attribute [r] quality # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] optional :quality, enum: -> { OpenAI::Models::ImageGenerateParams::Quality } @@ -42,33 +42,33 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::ImageGenerateParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageGenerateParams::Size }, nil?: true # @!attribute style # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] optional :style, enum: -> { OpenAI::Models::ImageGenerateParams::Style }, nil?: true # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -120,8 +120,8 @@ module Model end # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. module Quality extend OpenAI::Internal::Type::Enum @@ -136,8 +136,8 @@ module Quality end # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -152,8 +152,8 @@ module ResponseFormat end # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. module Size extend OpenAI::Internal::Type::Enum @@ -171,9 +171,9 @@ module Size end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. module Style extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 4fd434ab..6abb069c 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -41,77 +41,77 @@ class Moderation < OpenAI::Internal::Type::BaseModel class Categories < OpenAI::Internal::Type::BaseModel # @!attribute harassment # Content that expresses, incites, or promotes harassing language towards any - # target. + # target. # # @return [Boolean] required :harassment, OpenAI::Internal::Type::Boolean # @!attribute harassment_threatening # Harassment content that also includes violence or serious harm towards any - # target. + # target. # # @return [Boolean] required :harassment_threatening, OpenAI::Internal::Type::Boolean, api_name: :"harassment/threatening" # @!attribute hate # Content that expresses, incites, or promotes hate based on race, gender, - # ethnicity, religion, nationality, sexual orientation, disability status, or - # caste. Hateful content aimed at non-protected groups (e.g., chess players) is - # harassment. + # ethnicity, religion, nationality, sexual orientation, disability status, or + # caste. Hateful content aimed at non-protected groups (e.g., chess players) is + # harassment. # # @return [Boolean] required :hate, OpenAI::Internal::Type::Boolean # @!attribute hate_threatening # Hateful content that also includes violence or serious harm towards the targeted - # group based on race, gender, ethnicity, religion, nationality, sexual - # orientation, disability status, or caste. + # group based on race, gender, ethnicity, religion, nationality, sexual + # orientation, disability status, or caste. # # @return [Boolean] required :hate_threatening, OpenAI::Internal::Type::Boolean, api_name: :"hate/threatening" # @!attribute illicit # Content that includes instructions or advice that facilitate the planning or - # execution of wrongdoing, or that gives advice or instruction on how to commit - # illicit acts. For example, "how to shoplift" would fit this category. + # execution of wrongdoing, or that gives advice or instruction on how to commit + # illicit acts. For example, "how to shoplift" would fit this category. # # @return [Boolean, nil] required :illicit, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute illicit_violent # Content that includes instructions or advice that facilitate the planning or - # execution of wrongdoing that also includes violence, or that gives advice or - # instruction on the procurement of any weapon. + # execution of wrongdoing that also includes violence, or that gives advice or + # instruction on the procurement of any weapon. # # @return [Boolean, nil] required :illicit_violent, OpenAI::Internal::Type::Boolean, api_name: :"illicit/violent", nil?: true # @!attribute self_harm # Content that promotes, encourages, or depicts acts of self-harm, such as - # suicide, cutting, and eating disorders. + # suicide, cutting, and eating disorders. # # @return [Boolean] required :self_harm, OpenAI::Internal::Type::Boolean, api_name: :"self-harm" # @!attribute self_harm_instructions # Content that encourages performing acts of self-harm, such as suicide, cutting, - # and eating disorders, or that gives instructions or advice on how to commit such - # acts. + # and eating disorders, or that gives instructions or advice on how to commit such + # acts. # # @return [Boolean] required :self_harm_instructions, OpenAI::Internal::Type::Boolean, api_name: :"self-harm/instructions" # @!attribute self_harm_intent # Content where the speaker expresses that they are engaging or intend to engage - # in acts of self-harm, such as suicide, cutting, and eating disorders. + # in acts of self-harm, such as suicide, cutting, and eating disorders. # # @return [Boolean] required :self_harm_intent, OpenAI::Internal::Type::Boolean, api_name: :"self-harm/intent" # @!attribute sexual # Content meant to arouse sexual excitement, such as the description of sexual - # activity, or that promotes sexual services (excluding sex education and - # wellness). + # activity, or that promotes sexual services (excluding sex education and + # wellness). # # @return [Boolean] required :sexual, OpenAI::Internal::Type::Boolean diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index c5e3ff4a..41230634 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -10,16 +10,16 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute input # Input (or inputs) to classify. Can be a single string, an array of strings, or - # an array of multi-modal input objects similar to other models. + # an array of multi-modal input objects similar to other models. # # @return [String, Array, Array] required :input, union: -> { OpenAI::Models::ModerationCreateParams::Input } # @!attribute [r] model # The content moderation model you would like to use. Learn more in - # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and - # learn about available models - # [here](https://platform.openai.com/docs/models#moderation). + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). # # @return [String, Symbol, OpenAI::Models::ModerationModel, nil] optional :model, union: -> { OpenAI::Models::ModerationCreateParams::Model } @@ -38,7 +38,7 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Input (or inputs) to classify. Can be a single string, an array of strings, or - # an array of multi-modal input objects similar to other models. + # an array of multi-modal input objects similar to other models. module Input extend OpenAI::Internal::Type::Union @@ -62,9 +62,9 @@ module Input end # The content moderation model you would like to use. Learn more in - # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and - # learn about available models - # [here](https://platform.openai.com/docs/models#moderation). + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). module Model extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/other_file_chunking_strategy_object.rb b/lib/openai/models/other_file_chunking_strategy_object.rb index 9b28e285..acb54ef8 100644 --- a/lib/openai/models/other_file_chunking_strategy_object.rb +++ b/lib/openai/models/other_file_chunking_strategy_object.rb @@ -11,8 +11,8 @@ class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!parse # # This is returned when the chunking strategy is unknown. Typically, this is - # # because the file was indexed before the `chunking_strategy` concept was - # # introduced in the API. + # # because the file was indexed before the `chunking_strategy` concept was + # # introduced in the API. # # # # @param type [Symbol, :other] # # diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 0727d5c1..25128ce1 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -6,10 +6,10 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # @!attribute effort # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true @@ -17,9 +17,9 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # @!attribute generate_summary # **computer_use_preview only** # - # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `concise` or + # `detailed`. # # @return [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] optional :generate_summary, enum: -> { OpenAI::Models::Reasoning::GenerateSummary }, nil?: true @@ -27,8 +27,8 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # @!parse # # **o-series models only** # # - # # Configuration options for - # # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # # Configuration options for + # # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # # # # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] # # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] @@ -39,9 +39,9 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # **computer_use_preview only** # - # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `concise` or + # `detailed`. # # @see OpenAI::Models::Reasoning#generate_summary module GenerateSummary diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index 737f9d5f..a7bb035e 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -4,10 +4,10 @@ module OpenAI module Models # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. module ReasoningEffort extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/response_format_json_object.rb b/lib/openai/models/response_format_json_object.rb index bcd20439..611237a3 100644 --- a/lib/openai/models/response_format_json_object.rb +++ b/lib/openai/models/response_format_json_object.rb @@ -11,8 +11,8 @@ class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel # @!parse # # JSON object response format. An older method of generating JSON responses. Using - # # `json_schema` is recommended for models that support it. Note that the model - # # will not generate JSON without a system or user message instructing it to do so. + # # `json_schema` is recommended for models that support it. Note that the model + # # will not generate JSON without a system or user message instructing it to do so. # # # # @param type [Symbol, :json_object] # # diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 0bb1f00c..56f2e3b1 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -17,8 +17,8 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!parse # # JSON Schema response format. Used to generate structured JSON responses. Learn - # # more about - # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # # more about + # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # # # # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] # # @param type [Symbol, :json_schema] @@ -31,14 +31,14 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores - # and dashes, with a maximum length of 64. + # and dashes, with a maximum length of 64. # # @return [String] required :name, String # @!attribute [r] description # A description of what the response format is for, used by the model to determine - # how to respond in the format. + # how to respond in the format. # # @return [String, nil] optional :description, String @@ -49,7 +49,7 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute [r] schema # The schema for the response format, described as a JSON Schema object. Learn how - # to build JSON schemas [here](https://json-schema.org/). + # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}, nil] optional :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] @@ -60,10 +60,10 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to - # true, the model will always follow the exact schema defined in the `schema` - # field. Only a subset of JSON Schema is supported when `strict` is `true`. To - # learn more, read the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index dcbbba28..3af1eabe 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -30,7 +30,7 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @!parse # # A tool that controls a virtual computer. Learn more about the - # # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). + # # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). # # # # @param display_height [Float] # # @param display_width [Float] diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index d7c78f66..68cbd033 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -6,14 +6,14 @@ module Responses class EasyInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # Text, image, or audio input to the model, used to generate a response. Can also - # contain previous assistant responses. + # contain previous assistant responses. # # @return [String, Array] required :content, union: -> { OpenAI::Models::Responses::EasyInputMessage::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. + # `developer`. # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] required :role, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Role } @@ -30,10 +30,10 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # @!parse # # A message input to the model with a role indicating instruction following - # # hierarchy. Instructions given with the `developer` or `system` role take - # # precedence over instructions given with the `user` role. Messages with the - # # `assistant` role are presumed to have been generated by the model in previous - # # interactions. + # # hierarchy. Instructions given with the `developer` or `system` role take + # # precedence over instructions given with the `user` role. Messages with the + # # `assistant` role are presumed to have been generated by the model in previous + # # interactions. # # # # @param content [String, Array] # # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] @@ -44,7 +44,7 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Text, image, or audio input to the model, used to generate a response. Can also - # contain previous assistant responses. + # contain previous assistant responses. # # @see OpenAI::Models::Responses::EasyInputMessage#content module Content @@ -63,7 +63,7 @@ module Content end # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. + # `developer`. # # @see OpenAI::Models::Responses::EasyInputMessage#role module Role diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 62d497ad..8fd1295e 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -28,7 +28,7 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute [r] max_num_results # The maximum number of results to return. This number should be between 1 and 50 - # inclusive. + # inclusive. # # @return [Integer, nil] optional :max_num_results, Integer @@ -49,8 +49,8 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!parse # # A tool that searches for relevant content from uploaded files. Learn more about - # # the - # # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # # the + # # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). # # # # @param vector_store_ids [Array] # # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] @@ -93,8 +93,8 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute [r] score_threshold # The score threshold for the file search, a number between 0 and 1. Numbers - # closer to 1 will attempt to return only the most relevant results, but may - # return fewer results. + # closer to 1 will attempt to return only the most relevant results, but may + # return fewer results. # # @return [Float, nil] optional :score_threshold, Float diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index 596b365a..8b2433dc 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -30,15 +30,15 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute description # A description of the function. Used by the model to determine whether or not to - # call the function. + # call the function. # # @return [String, nil] optional :description, String, nil?: true # @!parse # # Defines a function in your own code the model can choose to call. Learn more - # # about - # # [function calling](https://platform.openai.com/docs/guides/function-calling). + # # about + # # [function calling](https://platform.openai.com/docs/guides/function-calling). # # # # @param name [String] # # @param parameters [Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index a6a7a77f..7a4d136a 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -31,7 +31,7 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] include # Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. + # Response creation above for more information. # # @return [Array, nil] optional :include, @@ -43,7 +43,7 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -55,8 +55,8 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # The order to return the input items in. Default is `asc`. # - # - `asc`: Return the input items in ascending order. - # - `desc`: Return the input items in descending order. + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. # # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Responses::InputItemListParams::Order } @@ -79,8 +79,8 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # The order to return the input items in. Default is `asc`. # - # - `asc`: Return the input items in ascending order. - # - `desc`: Return the input items in descending order. + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 844833f7..052c32f9 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -33,32 +33,32 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute instructions # Inserts a system (or developer) message as the first item in the model's - # context. + # context. # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. # # @return [String, nil] required :instructions, String, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. # # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] required :model, union: -> { OpenAI::Models::ResponsesModel } @@ -72,11 +72,11 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute output # An array of content items generated by the model. # - # - The length and order of items in the `output` array is dependent on the - # model's response. - # - Rather than accessing the first item in the `output` array and assuming it's - # an `assistant` message with the content generated by the model, you might - # consider using the `output_text` property where supported in SDKs. + # - The length and order of items in the `output` array is dependent on the + # model's response. + # - Rather than accessing the first item in the `output` array and assuming it's + # an `assistant` message with the content generated by the model, you might + # consider using the `output_text` property where supported in SDKs. # # @return [Array] required :output, @@ -90,62 +90,62 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. # # @return [Float, nil] required :temperature, Float, nil?: true # @!attribute tool_choice # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. # # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] required :tool_choice, union: -> { OpenAI::Models::Responses::Response::ToolChoice } # @!attribute tools # An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. + # specify which tool to use by setting the `tool_choice` parameter. # - # The two categories of tools you can provide the model are: + # The two categories of tools you can provide the model are: # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). # # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. # # @return [Float, nil] required :top_p, Float, nil?: true # @!attribute max_output_tokens # An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). # # @return [Integer, nil] optional :max_output_tokens, Integer, nil?: true # @!attribute previous_response_id # The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). # # @return [String, nil] optional :previous_response_id, String, nil?: true @@ -153,15 +153,15 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute reasoning # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Models::Reasoning }, nil?: true # @!attribute [r] status # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, or `incomplete`. # # @return [Symbol, OpenAI::Models::Responses::ResponseStatus, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseStatus } @@ -172,10 +172,10 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute [r] text # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } @@ -187,18 +187,18 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute truncation # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. # # @return [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] optional :truncation, enum: -> { OpenAI::Models::Responses::Response::Truncation }, nil?: true # @!attribute [r] usage # Represents token usage details including input tokens, output tokens, a - # breakdown of output tokens, and the total tokens used. + # breakdown of output tokens, and the total tokens used. # # @return [OpenAI::Models::Responses::ResponseUsage, nil] optional :usage, -> { OpenAI::Models::Responses::ResponseUsage } @@ -209,8 +209,8 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -312,8 +312,8 @@ module Reason end # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. # # @see OpenAI::Models::Responses::Response#tool_choice module ToolChoice @@ -343,11 +343,11 @@ module ToolChoice # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. # # @see OpenAI::Models::Responses::Response#truncation module Truncation diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 7eafa499..c8d6e68a 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -31,7 +31,7 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] required :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Status } @@ -44,8 +44,8 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!parse # # A tool call to a computer use tool. See the - # # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) - # # for more information. + # # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) + # # for more information. # # # # @param id [String] # # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] @@ -96,14 +96,14 @@ module Action class Click < OpenAI::Internal::Type::BaseModel # @!attribute button # Indicates which mouse button was pressed during the click. One of `left`, - # `right`, `wheel`, `back`, or `forward`. + # `right`, `wheel`, `back`, or `forward`. # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] required :button, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button } # @!attribute type # Specifies the event type. For a click action, this property is always set to - # `click`. + # `click`. # # @return [Symbol, :click] required :type, const: :click @@ -133,7 +133,7 @@ class Click < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Indicates which mouse button was pressed during the click. One of `left`, - # `right`, `wheel`, `back`, or `forward`. + # `right`, `wheel`, `back`, or `forward`. # # @see OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click#button module Button @@ -156,7 +156,7 @@ module Button class DoubleClick < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a double click action, this property is always set - # to `double_click`. + # to `double_click`. # # @return [Symbol, :double_click] required :type, const: :double_click @@ -188,14 +188,14 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel class Drag < OpenAI::Internal::Type::BaseModel # @!attribute path # An array of coordinates representing the path of the drag action. Coordinates - # will appear as an array of objects, eg + # will appear as an array of objects, eg # - # ``` - # [ - # { x: 100, y: 200 }, - # { x: 200, y: 300 } - # ] - # ``` + # ``` + # [ + # { x: 100, y: 200 }, + # { x: 200, y: 300 } + # ] + # ``` # # @return [Array] required :path, @@ -203,7 +203,7 @@ class Drag < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a drag action, this property is always set to - # `drag`. + # `drag`. # # @return [Symbol, :drag] required :type, const: :drag @@ -246,14 +246,14 @@ class Path < OpenAI::Internal::Type::BaseModel class Keypress < OpenAI::Internal::Type::BaseModel # @!attribute keys # The combination of keys the model is requesting to be pressed. This is an array - # of strings, each representing a key. + # of strings, each representing a key. # # @return [Array] required :keys, OpenAI::Internal::Type::ArrayOf[String] # @!attribute type # Specifies the event type. For a keypress action, this property is always set to - # `keypress`. + # `keypress`. # # @return [Symbol, :keypress] required :type, const: :keypress @@ -272,7 +272,7 @@ class Keypress < OpenAI::Internal::Type::BaseModel class Move < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a move action, this property is always set to - # `move`. + # `move`. # # @return [Symbol, :move] required :type, const: :move @@ -304,7 +304,7 @@ class Move < OpenAI::Internal::Type::BaseModel class Screenshot < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a screenshot action, this property is always set - # to `screenshot`. + # to `screenshot`. # # @return [Symbol, :screenshot] required :type, const: :screenshot @@ -334,7 +334,7 @@ class Scroll < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a scroll action, this property is always set to - # `scroll`. + # `scroll`. # # @return [Symbol, :scroll] required :type, const: :scroll @@ -374,7 +374,7 @@ class Type < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a type action, this property is always set to - # `type`. + # `type`. # # @return [Symbol, :type] required :type, const: :type @@ -393,7 +393,7 @@ class Type < OpenAI::Internal::Type::BaseModel class Wait < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a wait action, this property is always set to - # `wait`. + # `wait`. # # @return [Symbol, :wait] required :type, const: :wait @@ -445,7 +445,7 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseComputerToolCall#status module Status diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 0db1e5cc..2cc50149 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -30,7 +30,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute [r] acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the - # developer. + # developer. # # @return [Array, nil] optional :acknowledged_safety_checks, @@ -42,7 +42,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status } @@ -95,7 +95,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel end # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. # # @see OpenAI::Models::Responses::ResponseComputerToolCallOutputItem#status module Status diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 1ac341fb..9cfc543a 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -6,7 +6,7 @@ module Responses class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel # @!attribute type # Specifies the event type. For a computer screenshot, this property is always set - # to `computer_screenshot`. + # to `computer_screenshot`. # # @return [Symbol, :computer_screenshot] required :type, const: :computer_screenshot diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 38550327..0e96b549 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -14,36 +14,36 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute input # Text, image, or file inputs to the model, used to generate a response. # - # Learn more: + # Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # # @return [String, Array] required :input, union: -> { OpenAI::Models::Responses::ResponseCreateParams::Input } # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. # # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute include # Specify additional output data to include in the model response. Currently - # supported values are: + # supported values are: # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # # @return [Array, nil] optional :include, @@ -52,30 +52,30 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute instructions # Inserts a system (or developer) message as the first item in the model's - # context. + # context. # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. # # @return [String, nil] optional :instructions, String, nil?: true # @!attribute max_output_tokens # An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). # # @return [Integer, nil] optional :max_output_tokens, Integer, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -88,8 +88,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute previous_response_id # The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). # # @return [String, nil] optional :previous_response_id, String, nil?: true @@ -97,8 +97,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute reasoning # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Models::Reasoning }, nil?: true @@ -111,19 +111,19 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. # # @return [Float, nil] optional :temperature, Float, nil?: true # @!attribute [r] text # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } @@ -134,8 +134,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] tool_choice # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. # # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] optional :tool_choice, union: -> { OpenAI::Models::Responses::ResponseCreateParams::ToolChoice } @@ -146,19 +146,19 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] tools # An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. + # specify which tool to use by setting the `tool_choice` parameter. # - # The two categories of tools you can provide the model are: + # The two categories of tools you can provide the model are: # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } @@ -169,10 +169,10 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. # # @return [Float, nil] optional :top_p, Float, nil?: true @@ -180,19 +180,19 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute truncation # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. # # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] optional :truncation, enum: -> { OpenAI::Models::Responses::ResponseCreateParams::Truncation }, nil?: true # @!attribute [r] user # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String @@ -249,13 +249,13 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Text, image, or file inputs to the model, used to generate a response. # - # Learn more: + # Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) module Input extend OpenAI::Internal::Type::Union @@ -273,8 +273,8 @@ module Input end # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. module ToolChoice extend OpenAI::Internal::Type::Union @@ -302,11 +302,11 @@ module ToolChoice # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. module Truncation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index c8afd7e5..7048c7a4 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -18,7 +18,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the file search tool call. One of `in_progress`, `searching`, - # `incomplete` or `failed`, + # `incomplete` or `failed`, # # @return [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] required :status, enum: -> { OpenAI::Models::Responses::ResponseFileSearchToolCall::Status } @@ -39,8 +39,8 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!parse # # The results of a file search tool call. See the - # # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) - # # for more information. + # # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) + # # for more information. # # # # @param id [String] # # @param queries [Array] @@ -53,7 +53,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the file search tool call. One of `in_progress`, `searching`, - # `incomplete` or `failed`, + # `incomplete` or `failed`, # # @see OpenAI::Models::Responses::ResponseFileSearchToolCall#status module Status @@ -75,10 +75,10 @@ module Status class Result < OpenAI::Internal::Type::BaseModel # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index bdacd77c..bdb5e64f 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -5,17 +5,17 @@ module Models module Responses # An object specifying the format that the model must output. # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # The default format is `{ "type": "text" }` with no additional options. + # The default format is `{ "type": "text" }` with no additional options. # - # **Not recommended for gpt-4o and newer models:** + # **Not recommended for gpt-4o and newer models:** # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. module ResponseFormatTextConfig extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index 0fac4e6d..3cbc68bd 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -6,14 +6,14 @@ module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores - # and dashes, with a maximum length of 64. + # and dashes, with a maximum length of 64. # # @return [String] required :name, String # @!attribute schema # The schema for the response format, described as a JSON Schema object. Learn how - # to build JSON schemas [here](https://json-schema.org/). + # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}] required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] @@ -26,7 +26,7 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!attribute [r] description # A description of what the response format is for, used by the model to determine - # how to respond in the format. + # how to respond in the format. # # @return [String, nil] optional :description, String @@ -37,18 +37,18 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to - # true, the model will always follow the exact schema defined in the `schema` - # field. Only a subset of JSON Schema is supported when `strict` is `true`. To - # learn more, read the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!parse # # JSON Schema response format. Used to generate structured JSON responses. Learn - # # more about - # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # # more about + # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # # # # @param name [String] # # @param schema [Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 61ce02d2..5bf53133 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -40,7 +40,7 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCall::Status } @@ -51,8 +51,8 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!parse # # A tool call to run a function. See the - # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # # for more information. + # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # # for more information. # # # # @param arguments [String] # # @param call_id [String] @@ -66,7 +66,7 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseFunctionToolCall#status module Status diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index 8315cb64..17e2ceff 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -12,8 +12,8 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # @!parse # # A tool call to run a function. See the - # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # # for more information. + # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # # for more information. # # # # @param id [String] # # diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 081ef864..2123bd97 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -30,7 +30,7 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status } @@ -51,7 +51,7 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem#status module Status diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index c2cdfa12..e6d9a2e8 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -24,8 +24,8 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!parse # # The results of a web search tool call. See the - # # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for - # # more information. + # # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for + # # more information. # # # # @param id [String] # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 26bd124c..71a49423 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -4,13 +4,13 @@ module OpenAI module Models module Responses # Specify additional output data to include in the model response. Currently - # supported values are: + # supported values are: # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. module ResponseIncludable extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 883f8491..c508cf8a 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -6,7 +6,7 @@ module Responses class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @!attribute detail # The detail level of the image to be sent to the model. One of `high`, `low`, or - # `auto`. Defaults to `auto`. + # `auto`. Defaults to `auto`. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] required :detail, enum: -> { OpenAI::Models::Responses::ResponseInputImage::Detail } @@ -25,14 +25,14 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @!attribute image_url # The URL of the image to be sent to the model. A fully qualified URL or base64 - # encoded image in a data URL. + # encoded image in a data URL. # # @return [String, nil] optional :image_url, String, nil?: true # @!parse # # An image input to the model. Learn about - # # [image inputs](https://platform.openai.com/docs/guides/vision). + # # [image inputs](https://platform.openai.com/docs/guides/vision). # # # # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] # # @param file_id [String, nil] @@ -44,7 +44,7 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The detail level of the image to be sent to the model. One of `high`, `low`, or - # `auto`. Defaults to `auto`. + # `auto`. Defaults to `auto`. # # @see OpenAI::Models::Responses::ResponseInputImage#detail module Detail diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 62862e0e..db0e26d2 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -4,10 +4,10 @@ module OpenAI module Models module Responses # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. module ResponseInputItem extend OpenAI::Internal::Type::Union @@ -60,7 +60,7 @@ module ResponseInputItem class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of one or many input items to the model, containing different content - # types. + # types. # # @return [Array] required :content, @@ -74,7 +74,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Status } @@ -95,8 +95,8 @@ class Message < OpenAI::Internal::Type::BaseModel # @!parse # # A message input to the model with a role indicating instruction following - # # hierarchy. Instructions given with the `developer` or `system` role take - # # precedence over instructions given with the `user` role. + # # hierarchy. Instructions given with the `developer` or `system` role take + # # precedence over instructions given with the `user` role. # # # # @param content [Array] # # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] @@ -125,7 +125,7 @@ module Role end # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseInputItem::Message#status module Status @@ -189,7 +189,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the - # developer. + # developer. # # @return [Array, nil] optional :acknowledged_safety_checks, @@ -201,7 +201,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status } @@ -256,7 +256,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel end # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. # # @see OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput#status module Status @@ -295,7 +295,7 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] id # The unique ID of the function tool call output. Populated when this item is - # returned via API. + # returned via API. # # @return [String, nil] optional :id, String @@ -306,7 +306,7 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status } @@ -329,7 +329,7 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput#status module Status diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 3bb170a9..b270e74d 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -12,7 +12,7 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of one or many input items to the model, containing different content - # types. + # types. # # @return [Array] required :content, @@ -26,7 +26,7 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Status } @@ -74,7 +74,7 @@ module Role end # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseInputMessageItem#status module Status diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 272c8ab1..b46f43d3 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -25,7 +25,7 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] required :status, enum: -> { OpenAI::Models::Responses::ResponseOutputMessage::Status } @@ -67,7 +67,7 @@ module Content end # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. # # @see OpenAI::Models::Responses::ResponseOutputMessage#status module Status diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index fa4d428c..3ae31538 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -25,7 +25,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @!attribute [r] status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseReasoningItem::Status } @@ -36,7 +36,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @!parse # # A description of the chain of thought used by a reasoning model while generating - # # a response. + # # a response. # # # # @param id [String] # # @param summary [Array] @@ -70,7 +70,7 @@ class Summary < OpenAI::Internal::Type::BaseModel end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. # # @see OpenAI::Models::Responses::ResponseReasoningItem#status module Status diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index eaef643f..1253ccfa 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -11,7 +11,7 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] include # Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. + # Response creation above for more information. # # @return [Array, nil] optional :include, diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index 15876663..da96c2e8 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, or `incomplete`. module ResponseStatus extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 65f31b36..7901b8fb 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -7,17 +7,17 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # @!attribute [r] format_ # An object specifying the format that the model must output. # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # The default format is `{ "type": "text" }` with no additional options. + # The default format is `{ "type": "text" }` with no additional options. # - # **Not recommended for gpt-4o and newer models:** + # **Not recommended for gpt-4o and newer models:** # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] optional :format_, union: -> { OpenAI::Models::Responses::ResponseFormatTextConfig }, api_name: :format @@ -28,10 +28,10 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # @!parse # # Configuration options for a text response from the model. Can be plain text or - # # structured JSON data. Learn more: + # # structured JSON data. Learn more: # # - # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # # # # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] # # diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 8d6bee6d..2a2ecd8f 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -36,7 +36,7 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!parse # # Represents token usage details including input tokens, output tokens, a - # # breakdown of output tokens, and the total tokens used. + # # breakdown of output tokens, and the total tokens used. # # # # @param input_tokens [Integer] # # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] @@ -52,7 +52,7 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute cached_tokens # The number of tokens that were retrieved from the cache. - # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). + # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). # # @return [Integer] required :cached_tokens, Integer diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 9c696f1f..e9be0652 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # A tool that searches for relevant content from uploaded files. Learn more about - # the - # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). module Tool extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 7b08e8c7..789817e8 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -5,12 +5,12 @@ module Models module Responses # Controls which (if any) tool is called by the model. # - # `none` means the model will not call any tool and instead generates a message. + # `none` means the model will not call any tool and instead generates a message. # - # `auto` means the model can pick between generating a message or calling one or - # more tools. + # `auto` means the model can pick between generating a message or calling one or + # more tools. # - # `required` means the model must call one or more tools. + # `required` means the model must call one or more tools. module ToolChoiceOptions extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index fe1d606d..d26c027f 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -6,20 +6,20 @@ module Responses class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of hosted tool the model should to use. Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). + # [built-in tools](https://platform.openai.com/docs/guides/tools). # - # Allowed values are: + # Allowed values are: # - # - `file_search` - # - `web_search_preview` - # - `computer_use_preview` + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` # # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] required :type, enum: -> { OpenAI::Models::Responses::ToolChoiceTypes::Type } # @!parse # # Indicates that the model should use a built-in tool to generate a response. - # # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + # # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). # # # # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] # # @@ -28,13 +28,13 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # The type of hosted tool the model should to use. Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). + # [built-in tools](https://platform.openai.com/docs/guides/tools). # - # Allowed values are: + # Allowed values are: # - # - `file_search` - # - `web_search_preview` - # - `computer_use_preview` + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` # # @see OpenAI::Models::Responses::ToolChoiceTypes#type module Type diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 504bcab1..90f84d39 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -7,15 +7,15 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the web search tool. One of: # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # - `web_search_preview` + # - `web_search_preview_2025_03_11` # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] required :type, enum: -> { OpenAI::Models::Responses::WebSearchTool::Type } # @!attribute [r] search_context_size # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Models::Responses::WebSearchTool::SearchContextSize } @@ -31,8 +31,8 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # @!parse # # This tool searches the web for relevant results to use in a response. Learn more - # # about the - # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + # # about the + # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). # # # # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] # # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] @@ -44,8 +44,8 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # The type of the web search tool. One of: # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # - `web_search_preview` + # - `web_search_preview_2025_03_11` # # @see OpenAI::Models::Responses::WebSearchTool#type module Type @@ -62,7 +62,7 @@ module Type end # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @see OpenAI::Models::Responses::WebSearchTool#search_context_size module SearchContextSize @@ -99,7 +99,7 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute [r] country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of - # the user, e.g. `US`. + # the user, e.g. `US`. # # @return [String, nil] optional :country, String @@ -120,7 +120,7 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute [r] timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the - # user, e.g. `America/Los_Angeles`. + # user, e.g. `America/Los_Angeles`. # # @return [String, nil] optional :timezone, String diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 9f466cc0..9bff61c3 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -6,14 +6,14 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. # # @return [Integer] required :chunk_overlap_tokens, Integer # @!attribute max_chunk_size_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. # # @return [Integer] required :max_chunk_size_tokens, Integer diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index d0da7a8f..9151584f 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -42,8 +42,8 @@ class Upload < OpenAI::Internal::Type::BaseModel # @!attribute purpose # The intended purpose of the file. - # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) - # for acceptable values. + # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) + # for acceptable values. # # @return [String] required :purpose, String diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 378f2269..3b8eb1b5 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -16,7 +16,7 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] md5 # The optional md5 checksum for the file contents to verify if the bytes uploaded - # matches what you expect. + # matches what you expect. # # @return [String, nil] optional :md5, String diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 9b199b9d..aa01ef38 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -23,8 +23,8 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute mime_type # The MIME type of the file. # - # This must fall within the supported MIME types for your file purpose. See the - # supported MIME types for assistants and vision. + # This must fall within the supported MIME types for your file purpose. See the + # supported MIME types for assistants and vision. # # @return [String] required :mime_type, String @@ -32,8 +32,8 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute purpose # The intended purpose of the uploaded file. # - # See the - # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + # See the + # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). # # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::Models::FilePurpose } diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 4b214ef5..9abda127 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -29,11 +29,11 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -52,8 +52,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the vector store, which can be either `expired`, `in_progress`, or - # `completed`. A status of `completed` indicates that the vector store is ready - # for use. + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. # # @return [Symbol, OpenAI::Models::VectorStore::Status] required :status, enum: -> { OpenAI::Models::VectorStore::Status } @@ -82,7 +82,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!parse # # A vector store is a collection of processed files can be used by the - # # `file_search` tool. + # # `file_search` tool. # # # # @param id [String] # # @param created_at [Integer] @@ -160,8 +160,8 @@ class FileCounts < OpenAI::Internal::Type::BaseModel end # The status of the vector store, which can be either `expired`, `in_progress`, or - # `completed`. A status of `completed` indicates that the vector store is ready - # for use. + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. # # @see OpenAI::Models::VectorStore#status module Status @@ -182,7 +182,7 @@ module Status class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. + # `last_active_at`. # # @return [Symbol, :last_active_at] required :anchor, const: :last_active_at diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 42739728..8380c18b 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -10,7 +10,7 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } @@ -31,8 +31,8 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. + # the vector store should use. Useful for tools like `file_search` that can access + # files. # # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] @@ -43,11 +43,11 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -87,7 +87,7 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. + # `last_active_at`. # # @return [Symbol, :last_active_at] required :anchor, const: :last_active_at diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index eaea91f5..345b2830 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -10,9 +10,9 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -23,9 +23,9 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -36,7 +36,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -47,7 +47,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStoreListParams::Order } @@ -68,7 +68,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 6325952c..653db38a 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -26,7 +26,7 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] max_num_results # The maximum number of results to return. This number should be between 1 and 50 - # inclusive. + # inclusive. # # @return [Integer, nil] optional :max_num_results, Integer diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 61475c54..6b076e29 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -6,10 +6,10 @@ module Models class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. # # @return [Hash{Symbol=>String, Float, Boolean}, nil] required :attributes, diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 46a09e25..145e2808 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -16,11 +16,11 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true @@ -44,7 +44,7 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. + # `last_active_at`. # # @return [Symbol, :last_active_at] required :anchor, const: :last_active_at diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index e6dd47c8..31d32b92 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -11,18 +11,18 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. + # the vector store should use. Useful for tools like `file_search` that can access + # files. # # @return [Array] required :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, @@ -31,7 +31,7 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 991ac9cc..0f0bacb3 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -16,9 +16,9 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -29,9 +29,9 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -52,7 +52,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -63,7 +63,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Order } @@ -102,7 +102,7 @@ module Filter end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 37899d8f..6767c7dc 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -11,18 +11,18 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file_id # A [File](https://platform.openai.com/docs/api-reference/files) ID that the - # vector store should use. Useful for tools like `file_search` that can access - # files. + # vector store should use. Useful for tools like `file_search` that can access + # files. # # @return [String] required :file_id, String # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, @@ -31,7 +31,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 0bcfb9d3..88d3a55c 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -11,9 +11,9 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] after # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. # # @return [String, nil] optional :after, String @@ -24,9 +24,9 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] before # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. # # @return [String, nil] optional :before, String @@ -47,7 +47,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] limit # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer @@ -58,7 +58,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute [r] order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStores::FileListParams::Order } @@ -96,7 +96,7 @@ module Filter end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 7f72b454..2e5f4d52 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -16,10 +16,10 @@ class FileUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. # # @return [Hash{Symbol=>String, Float, Boolean}, nil] required :attributes, diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 7845ce47..71ba4e7d 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -19,7 +19,7 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # @!attribute last_error # The last error associated with this vector store file. Will be `null` if there - # are no errors. + # are no errors. # # @return [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] required :last_error, -> { OpenAI::Models::VectorStores::VectorStoreFile::LastError }, nil?: true @@ -32,34 +32,34 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the vector store file, which can be either `in_progress`, - # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the - # vector store file is ready for use. + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. # # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] required :status, enum: -> { OpenAI::Models::VectorStores::VectorStoreFile::Status } # @!attribute usage_bytes # The total vector store usage in bytes. Note that this may be different from the - # original file size. + # original file size. # # @return [Integer] required :usage_bytes, Integer # @!attribute vector_store_id # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # that the [File](https://platform.openai.com/docs/api-reference/files) is - # attached to. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. # # @return [String] required :vector_store_id, String # @!attribute attributes # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, @@ -122,7 +122,7 @@ class LastError < OpenAI::Internal::Type::BaseModel # @!parse # # The last error associated with this vector store file. Will be `null` if there - # # are no errors. + # # are no errors. # # # # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] # # @param message [String] @@ -150,8 +150,8 @@ module Code end # The status of the vector store file, which can be either `in_progress`, - # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the - # vector store file is ready for use. + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. # # @see OpenAI::Models::VectorStores::VectorStoreFile#status module Status diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index ebcd7390..55e8644b 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -13,7 +13,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!attribute created_at # The Unix timestamp (in seconds) for when the vector store files batch was - # created. + # created. # # @return [Integer] required :created_at, Integer @@ -31,16 +31,16 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the vector store files batch, which can be either `in_progress`, - # `completed`, `cancelled` or `failed`. + # `completed`, `cancelled` or `failed`. # # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] required :status, enum: -> { OpenAI::Models::VectorStores::VectorStoreFileBatch::Status } # @!attribute vector_store_id # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # that the [File](https://platform.openai.com/docs/api-reference/files) is - # attached to. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. # # @return [String] required :vector_store_id, String @@ -104,7 +104,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel end # The status of the vector store files batch, which can be either `in_progress`, - # `completed`, `cancelled` or `failed`. + # `completed`, `cancelled` or `failed`. # # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#status module Status diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index 5f374fb2..f4eaf933 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -2,10 +2,10 @@ module OpenAI # Specify HTTP behaviour to use for a specific request. These options supplement - # or override those provided at the client level. + # or override those provided at the client level. # - # When making a request, you can pass an actual {RequestOptions} instance, or - # simply pass a Hash with symbol keys matching the attributes on this class. + # When making a request, you can pass an actual {RequestOptions} instance, or + # simply pass a Hash with symbol keys matching the attributes on this class. class RequestOptions < OpenAI::Internal::Type::BaseModel # @api private # @@ -27,28 +27,28 @@ def self.validate!(opts) # @!attribute idempotency_key # Idempotency key to send with request and all associated retries. Will only be - # sent for write requests. + # sent for write requests. # # @return [String, nil] optional :idempotency_key, String # @!attribute extra_query # Extra query params to send with the request. These are `.merge`’d into any - # `query` given at the client level. + # `query` given at the client level. # # @return [Hash{String=>Array, String, nil}, nil] optional :extra_query, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::ArrayOf[String]] # @!attribute extra_headers # Extra headers to send with the request. These are `.merged`’d into any - # `extra_headers` given at the client level. + # `extra_headers` given at the client level. # # @return [Hash{String=>String, nil}, nil] optional :extra_headers, OpenAI::Internal::Type::HashOf[String, nil?: true] # @!attribute extra_body # Extra data to send with the request. These are deep merged into any data - # generated as part of the normal request. + # generated as part of the normal request. # # @return [Object, nil] optional :extra_body, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 2a669ca4..647f80c8 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -5,9 +5,9 @@ module Resources class Audio class Transcriptions # See {OpenAI::Resources::Audio::Transcriptions#create_streaming} for streaming - # counterpart. + # counterpart. # - # Transcribes audio into the input language. + # Transcribes audio into the input language. # # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # @@ -41,9 +41,9 @@ def create(params) end # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming - # counterpart. + # counterpart. # - # Transcribes audio into the input language. + # Transcribes audio into the input language. # # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index fdd3806a..84f5ad93 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -70,8 +70,8 @@ def list(params = {}) end # Cancels an in-progress batch. The batch will be in status `cancelling` for up to - # 10 minutes, before changing to `cancelled`, where it will have partial results - # (if any) available in the output file. + # 10 minutes, before changing to `cancelled`, where it will have partial results + # (if any) available in the output file. # # @overload cancel(batch_id, request_options: {}) # diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 3eea6fe8..b72ad867 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -96,7 +96,7 @@ def delete(thread_id, params = {}) # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # - # Create a thread and run it in one request. + # Create a thread and run it in one request. # # @overload create_and_run(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # @@ -136,9 +136,9 @@ def create_and_run(params) end # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming - # counterpart. + # counterpart. # - # Create a thread and run it in one request. + # Create a thread and run it in one request. # # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index cd12bbcc..8ebefd9c 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -9,9 +9,9 @@ class Runs attr_reader :steps # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming - # counterpart. + # counterpart. # - # Create a run. + # Create a run. # # @overload create(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # @@ -56,9 +56,9 @@ def create(thread_id, params) end # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming - # counterpart. + # counterpart. # - # Create a run. + # Create a run. # # @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # @@ -209,12 +209,12 @@ def cancel(run_id, params) end # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for - # streaming counterpart. + # streaming counterpart. # - # When a run has the `status: "requires_action"` and `required_action.type` is - # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the - # tool calls once they're all completed. All outputs must be submitted in a single - # request. + # When a run has the `status: "requires_action"` and `required_action.type` is + # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + # tool calls once they're all completed. All outputs must be submitted in a single + # request. # # @overload submit_tool_outputs(run_id, thread_id:, tool_outputs:, request_options: {}) # @@ -246,12 +246,12 @@ def submit_tool_outputs(run_id, params) end # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for - # non-streaming counterpart. + # non-streaming counterpart. # - # When a run has the `status: "requires_action"` and `required_action.type` is - # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the - # tool calls once they're all completed. All outputs must be submitted in a single - # request. + # When a run has the `status: "requires_action"` and `required_action.type` is + # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + # tool calls once they're all completed. All outputs must be submitted in a single + # request. # # @overload submit_tool_outputs_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 8d816acc..499c9f52 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -9,23 +9,23 @@ class Completions # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. # - # **Starting a new project?** We recommend trying - # [Responses](https://platform.openai.com/docs/api-reference/responses) to take - # advantage of the latest OpenAI platform features. Compare - # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). + # **Starting a new project?** We recommend trying + # [Responses](https://platform.openai.com/docs/api-reference/responses) to take + # advantage of the latest OpenAI platform features. Compare + # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). # - # --- + # --- # - # Creates a model response for the given chat conversation. Learn more in the - # [text generation](https://platform.openai.com/docs/guides/text-generation), - # [vision](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio) guides. + # Creates a model response for the given chat conversation. Learn more in the + # [text generation](https://platform.openai.com/docs/guides/text-generation), + # [vision](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio) guides. # - # Parameter support can differ depending on the model used to generate the - # response, particularly for newer reasoning models. Parameters that are only - # supported for reasoning models are noted below. For the current state of - # unsupported parameters in reasoning models, - # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). + # Parameter support can differ depending on the model used to generate the + # response, particularly for newer reasoning models. Parameters that are only + # supported for reasoning models are noted below. For the current state of + # unsupported parameters in reasoning models, + # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # @@ -81,23 +81,23 @@ def create(params) # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. # - # **Starting a new project?** We recommend trying - # [Responses](https://platform.openai.com/docs/api-reference/responses) to take - # advantage of the latest OpenAI platform features. Compare - # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). + # **Starting a new project?** We recommend trying + # [Responses](https://platform.openai.com/docs/api-reference/responses) to take + # advantage of the latest OpenAI platform features. Compare + # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). # - # --- + # --- # - # Creates a model response for the given chat conversation. Learn more in the - # [text generation](https://platform.openai.com/docs/guides/text-generation), - # [vision](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio) guides. + # Creates a model response for the given chat conversation. Learn more in the + # [text generation](https://platform.openai.com/docs/guides/text-generation), + # [vision](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio) guides. # - # Parameter support can differ depending on the model used to generate the - # response, particularly for newer reasoning models. Parameters that are only - # supported for reasoning models are noted below. For the current state of - # unsupported parameters in reasoning models, - # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). + # Parameter support can differ depending on the model used to generate the + # response, particularly for newer reasoning models. Parameters that are only + # supported for reasoning models are noted below. For the current state of + # unsupported parameters in reasoning models, + # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # @@ -155,7 +155,7 @@ def stream_raw(params) end # Get a stored chat completion. Only Chat Completions that have been created with - # the `store` parameter set to `true` will be returned. + # the `store` parameter set to `true` will be returned. # # @overload retrieve(completion_id, request_options: {}) # @@ -175,8 +175,8 @@ def retrieve(completion_id, params = {}) end # Modify a stored chat completion. Only Chat Completions that have been created - # with the `store` parameter set to `true` can be modified. Currently, the only - # supported modification is to update the `metadata` field. + # with the `store` parameter set to `true` can be modified. Currently, the only + # supported modification is to update the `metadata` field. # # @overload update(completion_id, metadata:, request_options: {}) # @@ -199,7 +199,7 @@ def update(completion_id, params) end # List stored Chat Completions. Only Chat Completions that have been stored with - # the `store` parameter set to `true` will be returned. + # the `store` parameter set to `true` will be returned. # # @overload list(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) # @@ -226,7 +226,7 @@ def list(params = {}) end # Delete a stored chat completion. Only Chat Completions that have been created - # with the `store` parameter set to `true` can be deleted. + # with the `store` parameter set to `true` can be deleted. # # @overload delete(completion_id, request_options: {}) # diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index 489df23b..ab3a3e19 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -6,7 +6,7 @@ class Chat class Completions class Messages # Get the messages in a stored chat completion. Only Chat Completions that have - # been created with the `store` parameter set to `true` will be returned. + # been created with the `store` parameter set to `true` will be returned. # # @overload list(completion_id, after: nil, limit: nil, order: nil, request_options: {}) # diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 3440bd65..859c6b7f 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -5,7 +5,7 @@ module Resources class Completions # See {OpenAI::Resources::Completions#create_streaming} for streaming counterpart. # - # Creates a completion for the provided prompt and parameters. + # Creates a completion for the provided prompt and parameters. # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # @@ -48,7 +48,7 @@ def create(params) # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. # - # Creates a completion for the provided prompt and parameters. + # Creates a completion for the provided prompt and parameters. # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 2c1b995e..3dc1bc0e 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -4,26 +4,26 @@ module OpenAI module Resources class Files # Upload a file that can be used across various endpoints. Individual files can be - # up to 512 MB, and the size of all files uploaded by one organization can be up - # to 100 GB. - # - # The Assistants API supports files up to 2 million tokens and of specific file - # types. See the - # [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for - # details. - # - # The Fine-tuning API only supports `.jsonl` files. The input also has certain - # required formats for fine-tuning - # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or - # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) - # models. - # - # The Batch API only supports `.jsonl` files up to 200 MB in size. The input also - # has a specific required - # [format](https://platform.openai.com/docs/api-reference/batch/request-input). - # - # Please [contact us](https://help.openai.com/) if you need to increase these - # storage limits. + # up to 512 MB, and the size of all files uploaded by one organization can be up + # to 100 GB. + # + # The Assistants API supports files up to 2 million tokens and of specific file + # types. See the + # [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + # details. + # + # The Fine-tuning API only supports `.jsonl` files. The input also has certain + # required formats for fine-tuning + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # models. + # + # The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + # has a specific required + # [format](https://platform.openai.com/docs/api-reference/batch/request-input). + # + # Please [contact us](https://help.openai.com/) if you need to increase these + # storage limits. # # @overload create(file:, purpose:, request_options: {}) # diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 8032932a..da5b8f03 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -8,12 +8,12 @@ class Jobs attr_reader :checkpoints # Creates a fine-tuning job which begins the process of creating a new model from - # a given dataset. + # a given dataset. # - # Response includes details of the enqueued job including job status and the name - # of the fine-tuned models once complete. + # Response includes details of the enqueued job including job status and the name + # of the fine-tuned models once complete. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # @@ -44,7 +44,7 @@ def create(params) # Get info about a fine-tuning job. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) # # @overload retrieve(fine_tuning_job_id, request_options: {}) # diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index 3ab368d3..b77422d4 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -4,7 +4,7 @@ module OpenAI module Resources class Models # Retrieves a model instance, providing basic information about the model such as - # the owner and permissioning. + # the owner and permissioning. # # @overload retrieve(model, request_options: {}) # @@ -24,7 +24,7 @@ def retrieve(model, params = {}) end # Lists the currently available models, and provides basic information about each - # one such as the owner and availability. + # one such as the owner and availability. # # @overload list(request_options: {}) # @@ -44,7 +44,7 @@ def list(params = {}) end # Delete a fine-tuned model. You must have the Owner role in your organization to - # delete a model. + # delete a model. # # @overload delete(model, request_options: {}) # diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index 802ccbd1..6df4561d 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -4,7 +4,7 @@ module OpenAI module Resources class Moderations # Classifies if text and/or image inputs are potentially harmful. Learn more in - # the [moderation guide](https://platform.openai.com/docs/guides/moderation). + # the [moderation guide](https://platform.openai.com/docs/guides/moderation). # # @overload create(input:, model: nil, request_options: {}) # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index a4a9ff87..e77901b4 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -8,17 +8,17 @@ class Responses # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. # - # Creates a model response. Provide - # [text](https://platform.openai.com/docs/guides/text) or - # [image](https://platform.openai.com/docs/guides/images) inputs to generate - # [text](https://platform.openai.com/docs/guides/text) or - # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have - # the model call your own - # [custom code](https://platform.openai.com/docs/guides/function-calling) or use - # built-in [tools](https://platform.openai.com/docs/guides/tools) like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use - # your own data as input for the model's response. + # Creates a model response. Provide + # [text](https://platform.openai.com/docs/guides/text) or + # [image](https://platform.openai.com/docs/guides/images) inputs to generate + # [text](https://platform.openai.com/docs/guides/text) or + # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have + # the model call your own + # [custom code](https://platform.openai.com/docs/guides/function-calling) or use + # built-in [tools](https://platform.openai.com/docs/guides/tools) like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use + # your own data as input for the model's response. # # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # @@ -61,17 +61,17 @@ def create(params) # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. # - # Creates a model response. Provide - # [text](https://platform.openai.com/docs/guides/text) or - # [image](https://platform.openai.com/docs/guides/images) inputs to generate - # [text](https://platform.openai.com/docs/guides/text) or - # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have - # the model call your own - # [custom code](https://platform.openai.com/docs/guides/function-calling) or use - # built-in [tools](https://platform.openai.com/docs/guides/tools) like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use - # your own data as input for the model's response. + # Creates a model response. Provide + # [text](https://platform.openai.com/docs/guides/text) or + # [image](https://platform.openai.com/docs/guides/images) inputs to generate + # [text](https://platform.openai.com/docs/guides/text) or + # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have + # the model call your own + # [custom code](https://platform.openai.com/docs/guides/function-calling) or use + # built-in [tools](https://platform.openai.com/docs/guides/tools) like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use + # your own data as input for the model's response. # # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 1f4fc2bf..1c497d22 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -7,24 +7,24 @@ class Uploads attr_reader :parts # Creates an intermediate - # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object - # that you can add - # [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. - # Currently, an Upload can accept at most 8 GB in total and expires after an hour - # after you create it. + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + # that you can add + # [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + # Currently, an Upload can accept at most 8 GB in total and expires after an hour + # after you create it. # - # Once you complete the Upload, we will create a - # [File](https://platform.openai.com/docs/api-reference/files/object) object that - # contains all the parts you uploaded. This File is usable in the rest of our - # platform as a regular File object. + # Once you complete the Upload, we will create a + # [File](https://platform.openai.com/docs/api-reference/files/object) object that + # contains all the parts you uploaded. This File is usable in the rest of our + # platform as a regular File object. # - # For certain `purpose` values, the correct `mime_type` must be specified. Please - # refer to documentation for the - # [supported MIME types for your use case](https://platform.openai.com/docs/assistants/tools/file-search#supported-files). + # For certain `purpose` values, the correct `mime_type` must be specified. Please + # refer to documentation for the + # [supported MIME types for your use case](https://platform.openai.com/docs/assistants/tools/file-search#supported-files). # - # For guidance on the proper filename extensions for each purpose, please follow - # the documentation on - # [creating a File](https://platform.openai.com/docs/api-reference/files/create). + # For guidance on the proper filename extensions for each purpose, please follow + # the documentation on + # [creating a File](https://platform.openai.com/docs/api-reference/files/create). # # @overload create(bytes:, filename:, mime_type:, purpose:, request_options: {}) # @@ -68,18 +68,18 @@ def cancel(upload_id, params = {}) end # Completes the - # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). # - # Within the returned Upload object, there is a nested - # [File](https://platform.openai.com/docs/api-reference/files/object) object that - # is ready to use in the rest of the platform. + # Within the returned Upload object, there is a nested + # [File](https://platform.openai.com/docs/api-reference/files/object) object that + # is ready to use in the rest of the platform. # - # You can specify the order of the Parts by passing in an ordered list of the Part - # IDs. + # You can specify the order of the Parts by passing in an ordered list of the Part + # IDs. # - # The number of bytes uploaded upon completion must match the number of bytes - # initially specified when creating the Upload object. No Parts may be added after - # an Upload is completed. + # The number of bytes uploaded upon completion must match the number of bytes + # initially specified when creating the Upload object. No Parts may be added after + # an Upload is completed. # # @overload complete(upload_id, part_ids:, md5: nil, request_options: {}) # diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index 11eaeb11..343a20e3 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -5,16 +5,16 @@ module Resources class Uploads class Parts # Adds a - # [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an - # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. - # A Part represents a chunk of bytes from the file you are trying to upload. + # [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + # A Part represents a chunk of bytes from the file you are trying to upload. # - # Each Part can be at most 64 MB, and you can add Parts until you hit the Upload - # maximum of 8 GB. + # Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + # maximum of 8 GB. # - # It is possible to add multiple Parts in parallel. You can decide the intended - # order of the Parts when you - # [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). + # It is possible to add multiple Parts in parallel. You can decide the intended + # order of the Parts when you + # [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). # # @overload create(upload_id, data:, request_options: {}) # diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 1fc69777..3547ae53 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -122,7 +122,7 @@ def delete(vector_store_id, params = {}) end # Search a vector store for relevant chunks based on a query and file attributes - # filter. + # filter. # # @overload search(vector_store_id, query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) # diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 5772d514..71268646 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -54,7 +54,7 @@ def retrieve(batch_id, params) end # Cancel a vector store file batch. This attempts to cancel the processing of - # files in this batch as soon as possible. + # files in this batch as soon as possible. # # @overload cancel(batch_id, vector_store_id:, request_options: {}) # diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 0c044e34..9f6900e7 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -5,8 +5,8 @@ module Resources class VectorStores class Files # Create a vector store file by attaching a - # [File](https://platform.openai.com/docs/api-reference/files) to a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + # [File](https://platform.openai.com/docs/api-reference/files) to a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). # # @overload create(vector_store_id, file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # @@ -110,9 +110,9 @@ def list(vector_store_id, params = {}) end # Delete a vector store file. This will remove the file from the vector store but - # the file itself will not be deleted. To delete the file, use the - # [delete file](https://platform.openai.com/docs/api-reference/files/delete) - # endpoint. + # the file itself will not be deleted. To delete the file, use the + # [delete file](https://platform.openai.com/docs/api-reference/files/delete) + # endpoint. # # @overload delete(file_id, vector_store_id:, request_options: {}) # diff --git a/rbi/lib/openai/internal.rbi b/rbi/lib/openai/internal.rbi index 42138966..76548034 100644 --- a/rbi/lib/openai/internal.rbi +++ b/rbi/lib/openai/internal.rbi @@ -4,7 +4,7 @@ module OpenAI # @api private module Internal # Due to the current WIP status of Shapes support in Sorbet, types referencing - # this alias might be refined in the future. + # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } OMIT = T.let(T.anything, T.anything) diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index b8b35744..5653189b 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -154,7 +154,7 @@ module OpenAI private def send_request(request, redirect_count:, retry_count:, send_retry_header:); end # Execute the request specified by `req`. This is the method that all resource - # methods call into. + # methods call into. # # @overload request(method, path, query: {}, headers: {}, body: nil, unwrap: nil, page: nil, stream: nil, model: OpenAI::Internal::Type::Unknown, options: {}) sig do diff --git a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi index 35b91830..fb9572da 100644 --- a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi +++ b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi @@ -17,7 +17,7 @@ module OpenAI end # from the golang stdlib - # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 + # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 KEEP_ALIVE_TIMEOUT = 30 class << self diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 8b8c9d16..0b0d262a 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -16,7 +16,7 @@ module OpenAI # @api private # # Assumes superclass fields are totally defined before fields are accessed / - # defined on subclasses. + # defined on subclasses. sig do returns( T::Hash[ @@ -99,7 +99,7 @@ module OpenAI # @api private # # `request_only` attributes not excluded from `.#coerce` when receiving responses - # even if well behaved servers should not send them + # even if well behaved servers should not send them sig { params(blk: T.proc.void).void } private def request_only(&blk); end @@ -138,33 +138,33 @@ module OpenAI end # Returns the raw value associated with the given key, if found. Otherwise, nil is - # returned. + # returned. # - # It is valid to lookup keys that are not in the API spec, for example to access - # undocumented features. This method does not parse response data into - # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. + # It is valid to lookup keys that are not in the API spec, for example to access + # undocumented features. This method does not parse response data into + # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. sig { params(key: Symbol).returns(T.nilable(T.anything)) } def [](key); end # Returns a Hash of the data underlying this object. O(1) # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. sig { overridable.returns(OpenAI::Internal::AnyHash) } def to_h; end # Returns a Hash of the data underlying this object. O(1) # - # Keys are Symbols and values are the raw values from the response. The return - # value indicates which values were ever set on the object. i.e. there will be a - # key in this hash if they ever were, even if the set value was nil. + # Keys are Symbols and values are the raw values from the response. The return + # value indicates which values were ever set on the object. i.e. there will be a + # key in this hash if they ever were, even if the set value was nil. # - # This method is not recursive. The returned value is shared by the object, so it - # should not be mutated. + # This method is not recursive. The returned value is shared by the object, so it + # should not be mutated. sig { overridable.returns(OpenAI::Internal::AnyHash) } def to_hash; end diff --git a/rbi/lib/openai/internal/type/converter.rbi b/rbi/lib/openai/internal/type/converter.rbi index 99bef8c2..cec2e8a2 100644 --- a/rbi/lib/openai/internal/type/converter.rbi +++ b/rbi/lib/openai/internal/type/converter.rbi @@ -51,13 +51,13 @@ module OpenAI # # Based on `target`, transform `value` into `target`, to the extent possible: # - # 1. if the given `value` conforms to `target` already, return the given `value` - # 2. if it's possible and safe to convert the given `value` to `target`, then the - # converted value - # 3. otherwise, the given `value` unaltered + # 1. if the given `value` conforms to `target` already, return the given `value` + # 2. if it's possible and safe to convert the given `value` to `target`, then the + # converted value + # 3. otherwise, the given `value` unaltered # - # The coercion process is subject to improvement between minor release versions. - # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode + # The coercion process is subject to improvement between minor release versions. + # See https://docs.pydantic.dev/latest/concepts/unions/#smart-mode sig do params( target: OpenAI::Internal::Type::Converter::Input, @@ -70,24 +70,24 @@ module OpenAI target, value, # The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: + # coercion strategy when we have to decide between multiple possible conversion + # targets: # - # - `true`: the conversion must be exact, with minimum coercion. - # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. + # - `true`: the conversion must be exact, with minimum coercion. + # - `false`: the conversion can be approximate, with some coercion. + # - `:strong`: the conversion must be exact, with no coercion, and raise an error + # if not possible. # - # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For - # any given conversion attempt, the exactness will be updated based on how closely - # the value recursively matches the target type: + # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For + # any given conversion attempt, the exactness will be updated based on how closely + # the value recursively matches the target type: # - # - `yes`: the value can be converted to the target type with minimum coercion. - # - `maybe`: the value can be converted to the target type with some reasonable - # coercion. - # - `no`: the value cannot be converted to the target type. + # - `yes`: the value can be converted to the target type with minimum coercion. + # - `maybe`: the value can be converted to the target type with some reasonable + # coercion. + # - `no`: the value cannot be converted to the target type. # - # See implementation below for more details. + # See implementation below for more details. state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} ); end # @api private diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index 9fcc30ee..f74dc677 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -6,15 +6,15 @@ module OpenAI # @api private # # A value from among a specified list of options. OpenAPI enum values map to Ruby - # values in the SDK as follows: + # values in the SDK as follows: # - # 1. boolean => true | false - # 2. integer => Integer - # 3. float => Float - # 4. string => Symbol + # 1. boolean => true | false + # 2. integer => Integer + # 3. float => Float + # 4. string => Symbol # - # We can therefore convert string values to Symbols, but can't convert other - # values safely. + # We can therefore convert string values to Symbols, but can't convert other + # values safely. module Enum include OpenAI::Internal::Type::Converter @@ -37,7 +37,7 @@ module OpenAI # @api private # # Unlike with primitives, `Enum` additionally validates that the value is a member - # of the enum. + # of the enum. sig do override .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Internal::Type::Converter::State) diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 36689b2b..60be3113 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -52,7 +52,7 @@ module OpenAI # @api private # # Recursively merge one hash with another. If the values at a given key are not - # both hashes, just take the new value. + # both hashes, just take the new value. sig do params(values: T::Array[T.anything], sentinel: T.nilable(T.anything), concat: T::Boolean) .returns(T.anything) diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 9a810d27..8dd902b4 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -12,19 +12,19 @@ module OpenAI attr_accessor :input # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. sig { returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) } attr_accessor :model # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and - # `verse`. Previews of the voices are available in the - # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). sig { returns(T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol)) } attr_accessor :voice # Control the voice of your generated audio with additional instructions. Does not - # work with `tts-1` or `tts-1-hd`. + # work with `tts-1` or `tts-1-hd`. sig { returns(T.nilable(String)) } attr_reader :instructions @@ -32,7 +32,7 @@ module OpenAI attr_writer :instructions # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, - # `wav`, and `pcm`. + # `wav`, and `pcm`. sig { returns(T.nilable(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol)) } attr_reader :response_format @@ -40,7 +40,7 @@ module OpenAI attr_writer :response_format # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. sig { returns(T.nilable(Float)) } attr_reader :speed @@ -87,7 +87,7 @@ module OpenAI def to_hash; end # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. module Model extend OpenAI::Internal::Type::Union @@ -96,9 +96,9 @@ module OpenAI end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and - # `verse`. Previews of the voices are available in the - # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). module Voice extend OpenAI::Internal::Type::Union @@ -123,7 +123,7 @@ module OpenAI end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, - # `wav`, and `pcm`. + # `wav`, and `pcm`. module ResponseFormat extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index 97b95ef4..ccea9d77 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -9,8 +9,8 @@ module OpenAI attr_accessor :text # The log probabilities of the tokens in the transcription. Only returned with the - # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added - # to the `include` array. + # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added + # to the `include` array. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::Transcription::Logprob])) } attr_reader :logprobs @@ -23,7 +23,7 @@ module OpenAI attr_writer :logprobs # Represents a transcription response returned by model, based on the provided - # input. + # input. sig do params( text: String, diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 866e4c50..5511185c 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -8,21 +8,21 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The audio file object (not file name) to transcribe, in one of these formats: - # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(IO, StringIO)) } attr_accessor :file # ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } attr_accessor :model # Additional information to include in the transcription response. `logprobs` will - # return the log probabilities of the tokens in the response to understand the - # model's confidence in the transcription. `logprobs` only works with - # response_format set to `json` and only with the models `gpt-4o-transcribe` and - # `gpt-4o-mini-transcribe`. + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol])) } attr_reader :include @@ -30,8 +30,8 @@ module OpenAI attr_writer :include # The language of the input audio. Supplying the input language in - # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) - # format will improve accuracy and latency. + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. sig { returns(T.nilable(String)) } attr_reader :language @@ -39,9 +39,9 @@ module OpenAI attr_writer :language # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should match the audio language. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. sig { returns(T.nilable(String)) } attr_reader :prompt @@ -49,8 +49,8 @@ module OpenAI attr_writer :prompt # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. sig { returns(T.nilable(OpenAI::Models::AudioResponseFormat::OrSymbol)) } attr_reader :response_format @@ -58,10 +58,10 @@ module OpenAI attr_writer :response_format # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. sig { returns(T.nilable(Float)) } attr_reader :temperature @@ -69,10 +69,10 @@ module OpenAI attr_writer :temperature # The timestamp granularities to populate for this transcription. - # `response_format` must be set `verbose_json` to use timestamp granularities. - # Either or both of these options are supported: `word`, or `segment`. Note: There - # is no additional latency for segment timestamps, but generating word timestamps - # incurs additional latency. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. sig do returns( T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) @@ -132,8 +132,8 @@ module OpenAI def to_hash; end # ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). module Model extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/lib/openai/models/audio/transcription_create_response.rbi index ba3131e3..c6ca8f08 100644 --- a/rbi/lib/openai/models/audio/transcription_create_response.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_response.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Audio # Represents a transcription response returned by model, based on the provided - # input. + # input. module TranscriptionCreateResponse extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index 60e0d8c4..37306ed0 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -9,12 +9,12 @@ module OpenAI attr_accessor :id # Average logprob of the segment. If the value is lower than -1, consider the - # logprobs failed. + # logprobs failed. sig { returns(Float) } attr_accessor :avg_logprob # Compression ratio of the segment. If the value is greater than 2.4, consider the - # compression failed. + # compression failed. sig { returns(Float) } attr_accessor :compression_ratio @@ -23,7 +23,7 @@ module OpenAI attr_accessor :end_ # Probability of no speech in the segment. If the value is higher than 1.0 and the - # `avg_logprob` is below -1, consider this segment silent. + # `avg_logprob` is below -1, consider this segment silent. sig { returns(Float) } attr_accessor :no_speech_prob diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/lib/openai/models/audio/transcription_stream_event.rbi index d09c413a..4a3dc34e 100644 --- a/rbi/lib/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_stream_event.rbi @@ -4,9 +4,9 @@ module OpenAI module Models module Audio # Emitted when there is an additional text delta. This is also the first event - # emitted when the transcription starts. Only emitted when you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `Stream` parameter set to `true`. + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. module TranscriptionStreamEvent extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index c37fbdd0..c0c5e553 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -13,8 +13,8 @@ module OpenAI attr_accessor :type # The log probabilities of the delta. Only included if you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `include[]` parameter set to `logprobs`. + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob])) } attr_reader :logprobs @@ -27,9 +27,9 @@ module OpenAI attr_writer :logprobs # Emitted when there is an additional text delta. This is also the first event - # emitted when the transcription starts. Only emitted when you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `Stream` parameter set to `true`. + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. sig do params( delta: String, diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index 03d16168..95c68dcc 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -13,9 +13,9 @@ module OpenAI attr_accessor :type # The log probabilities of the individual tokens in the transcription. Only - # included if you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `include[]` parameter set to `logprobs`. + # included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob])) } attr_reader :logprobs @@ -28,9 +28,9 @@ module OpenAI attr_writer :logprobs # Emitted when the transcription is complete. Contains the complete transcription - # text. Only emitted when you - # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # with the `Stream` parameter set to `true`. + # text. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. sig do params( text: String, diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index a8c181ae..76b0838c 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -34,7 +34,7 @@ module OpenAI attr_writer :words # Represents a verbose json transcription response returned by model, based on the - # provided input. + # provided input. sig do params( duration: Float, diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index adc943d6..7e113ff2 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -8,19 +8,19 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The audio file object (not file name) translate, in one of these formats: flac, - # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(IO, StringIO)) } attr_accessor :file # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # Whisper V2 model) is currently available. sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } attr_accessor :model # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should be in English. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should be in English. sig { returns(T.nilable(String)) } attr_reader :prompt @@ -28,7 +28,7 @@ module OpenAI attr_writer :prompt # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. sig { returns(T.nilable(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol)) } attr_reader :response_format @@ -36,10 +36,10 @@ module OpenAI attr_writer :response_format # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. sig { returns(T.nilable(Float)) } attr_reader :temperature @@ -76,7 +76,7 @@ module OpenAI def to_hash; end # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # Whisper V2 model) is currently available. module Model extend OpenAI::Internal::Type::Union @@ -85,7 +85,7 @@ module OpenAI end # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. module ResponseFormat extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index c5d2582e..23a709ab 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -3,8 +3,8 @@ module OpenAI module Models # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. module AudioResponseFormat extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi index d1ba038a..d072297d 100644 --- a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi @@ -8,7 +8,7 @@ module OpenAI attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # `800` and `chunk_overlap_tokens` of `400`. + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto); end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index a8a659d1..7542c9b1 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -100,11 +100,11 @@ module OpenAI attr_writer :in_progress_at # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index f33919b3..6f907cae 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -7,35 +7,35 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The time frame within which the batch should be processed. Currently only `24h` - # is supported. + # is supported. sig { returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) } attr_accessor :completion_window # The endpoint to be used for all requests in the batch. Currently - # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` - # are supported. Note that `/v1/embeddings` batches are also restricted to a - # maximum of 50,000 embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. sig { returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) } attr_accessor :endpoint # The ID of an uploaded file that contains requests for the new batch. # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. # - # Your input file must be formatted as a - # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), - # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 - # requests, and can be up to 200 MB in size. + # Your input file must be formatted as a + # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + # requests, and can be up to 200 MB in size. sig { returns(String) } attr_accessor :input_file_id # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -66,7 +66,7 @@ module OpenAI def to_hash; end # The time frame within which the batch should be processed. Currently only `24h` - # is supported. + # is supported. module CompletionWindow extend OpenAI::Internal::Type::Enum @@ -81,9 +81,9 @@ module OpenAI end # The endpoint to be used for all requests in the batch. Currently - # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` - # are supported. Note that `/v1/embeddings` batches are also restricted to a - # maximum of 50,000 embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. module Endpoint extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index a78f54c3..9ae31328 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -7,9 +7,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -17,7 +17,7 @@ module OpenAI attr_writer :after # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index 4dbc147a..f8ef7368 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -17,24 +17,24 @@ module OpenAI attr_accessor :description # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. sig { returns(T.nilable(String)) } attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(String) } attr_accessor :model @@ -47,8 +47,8 @@ module OpenAI attr_accessor :object # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. sig do returns( T::Array[ @@ -63,25 +63,25 @@ module OpenAI attr_accessor :tools # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -97,15 +97,15 @@ module OpenAI attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } attr_accessor :temperature # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources)) } attr_reader :tool_resources @@ -118,10 +118,10 @@ module OpenAI attr_writer :tool_resources # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p @@ -233,9 +233,9 @@ module OpenAI attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), @@ -258,8 +258,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter`` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter`` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -275,9 +275,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index b225b423..b0c7ab98 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -8,10 +8,10 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } attr_accessor :model @@ -20,16 +20,16 @@ module OpenAI attr_accessor :description # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. sig { returns(T.nilable(String)) } attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -39,33 +39,33 @@ module OpenAI # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -81,15 +81,15 @@ module OpenAI attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } attr_accessor :temperature # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources)) } attr_reader :tool_resources @@ -102,8 +102,8 @@ module OpenAI attr_writer :tool_resources # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. sig do returns( T.nilable( @@ -135,10 +135,10 @@ module OpenAI attr_writer :tools # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p @@ -223,10 +223,10 @@ module OpenAI def to_hash; end # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -261,9 +261,9 @@ module OpenAI attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: T.any( @@ -289,8 +289,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -306,9 +306,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids @@ -316,9 +316,9 @@ module OpenAI attr_writer :vector_store_ids # A helper to create a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # with file_ids and attach it to this assistant. There can be a maximum of 1 - # vector store attached to the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this assistant. There can be a maximum of 1 + # vector store attached to the assistant. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) @@ -366,7 +366,7 @@ module OpenAI class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. sig do returns( T.nilable( @@ -392,8 +392,8 @@ module OpenAI attr_writer :chunking_strategy # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to - # add to the vector store. There can be a maximum of 10000 files in a vector - # store. + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -401,11 +401,11 @@ module OpenAI attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -439,7 +439,7 @@ module OpenAI def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -449,7 +449,7 @@ module OpenAI attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # `800` and `chunk_overlap_tokens` of `400`. + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto); end @@ -506,12 +506,12 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } attr_accessor :max_chunk_size_tokens diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 9bf1cab2..52cd7565 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -8,9 +8,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -18,9 +18,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -28,7 +28,7 @@ module OpenAI attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -36,7 +36,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol)) } attr_reader :order @@ -70,7 +70,7 @@ module OpenAI def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi index 43f47c46..af1951eb 100644 --- a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_response_format_option.rbi @@ -4,25 +4,25 @@ module OpenAI module Models module Beta # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. module AssistantResponseFormatOption extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 407895b1..5cb77eb2 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -5,30 +5,30 @@ module OpenAI module Beta # Represents an event emitted when streaming a Run. # - # Each event in a server-sent events stream has an `event` and `data` property: + # Each event in a server-sent events stream has an `event` and `data` property: # - # ``` - # event: thread.created - # data: {"id": "thread_123", "object": "thread", ...} - # ``` + # ``` + # event: thread.created + # data: {"id": "thread_123", "object": "thread", ...} + # ``` # - # We emit events whenever a new object is created, transitions to a new state, or - # is being streamed in parts (deltas). For example, we emit `thread.run.created` - # when a new run is created, `thread.run.completed` when a run completes, and so - # on. When an Assistant chooses to create a message during a run, we emit a - # `thread.message.created event`, a `thread.message.in_progress` event, many - # `thread.message.delta` events, and finally a `thread.message.completed` event. + # We emit events whenever a new object is created, transitions to a new state, or + # is being streamed in parts (deltas). For example, we emit `thread.run.created` + # when a new run is created, `thread.run.completed` when a run completes, and so + # on. When an Assistant chooses to create a message during a run, we emit a + # `thread.message.created event`, a `thread.message.in_progress` event, many + # `thread.message.delta` events, and finally a `thread.message.completed` event. # - # We may add additional events over time, so we recommend handling unknown events - # gracefully in your code. See the - # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) - # to learn how to integrate the Assistants API with streaming. + # We may add additional events over time, so we recommend handling unknown events + # gracefully in your code. See the + # [Assistants API quickstart](https://platform.openai.com/docs/assistants/overview) + # to learn how to integrate the Assistants API with streaming. module AssistantStreamEvent extend OpenAI::Internal::Type::Union class ThreadCreated < OpenAI::Internal::Type::BaseModel # Represents a thread that contains - # [messages](https://platform.openai.com/docs/api-reference/messages). + # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } attr_reader :data @@ -46,8 +46,8 @@ module OpenAI attr_writer :enabled # Occurs when a new - # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # created. + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. sig do params( data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash), @@ -64,7 +64,7 @@ module OpenAI class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -75,7 +75,7 @@ module OpenAI attr_accessor :event # Occurs when a new - # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -88,7 +88,7 @@ module OpenAI class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -99,7 +99,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to a `queued` status. + # moves to a `queued` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -112,7 +112,7 @@ module OpenAI class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -123,7 +123,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to an `in_progress` status. + # moves to an `in_progress` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -136,7 +136,7 @@ module OpenAI class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -147,7 +147,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to a `requires_action` status. + # moves to a `requires_action` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -160,7 +160,7 @@ module OpenAI class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -171,7 +171,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # is completed. + # is completed. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -184,7 +184,7 @@ module OpenAI class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -195,7 +195,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # ends with status `incomplete`. + # ends with status `incomplete`. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -208,7 +208,7 @@ module OpenAI class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -219,7 +219,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # fails. + # fails. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -232,7 +232,7 @@ module OpenAI class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -243,7 +243,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to a `cancelling` status. + # moves to a `cancelling` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -256,7 +256,7 @@ module OpenAI class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -267,7 +267,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # is cancelled. + # is cancelled. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -280,7 +280,7 @@ module OpenAI class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -291,7 +291,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # expires. + # expires. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -314,8 +314,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is created. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -341,8 +341,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # moves to an `in_progress` state. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -358,7 +358,7 @@ module OpenAI class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during - # streaming. + # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data @@ -371,8 +371,8 @@ module OpenAI attr_accessor :event # Occurs when parts of a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # are being streamed. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash), @@ -398,8 +398,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is completed. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -425,8 +425,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # fails. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -452,8 +452,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is cancelled. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -479,8 +479,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # expires. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -496,7 +496,7 @@ module OpenAI class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -507,8 +507,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # created. + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -521,7 +521,7 @@ module OpenAI class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -532,8 +532,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # to an `in_progress` state. + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -546,7 +546,7 @@ module OpenAI class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during - # streaming. + # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } attr_reader :data @@ -557,8 +557,8 @@ module OpenAI attr_accessor :event # Occurs when parts of a - # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # being streamed. + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. sig do params( data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash), @@ -574,7 +574,7 @@ module OpenAI class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -585,8 +585,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # completed. + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -599,7 +599,7 @@ module OpenAI class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -610,8 +610,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # before it is completed. + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -633,8 +633,8 @@ module OpenAI attr_accessor :event # Occurs when an - # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. - # This can happen due to an internal server error or a timeout. + # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + # This can happen due to an internal server error or a timeout. sig do params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 71efdfec..4b2436cd 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -15,7 +15,7 @@ module OpenAI attr_writer :function # Specifies a tool the model should use. Use to force the model to call a specific - # tool. + # tool. sig do params( type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index 88d3b4e5..ce3df8b3 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -4,19 +4,19 @@ module OpenAI module Models module Beta # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. module AssistantToolChoiceOption extend OpenAI::Internal::Type::Union # `none` means the model will not call any tools and instead generates a message. - # `auto` means the model can pick between generating a message or calling one or - # more tools. `required` means the model must call one or more tools before - # responding to the user. + # `auto` means the model can pick between generating a message or calling one or + # more tools. `required` means the model must call one or more tools before + # responding to the user. module Auto extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 10c48190..eb334355 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -12,24 +12,24 @@ module OpenAI attr_accessor :description # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. sig { returns(T.nilable(String)) } attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.nilable(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol))) } attr_reader :model @@ -42,33 +42,33 @@ module OpenAI # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -84,15 +84,15 @@ module OpenAI attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } attr_accessor :temperature # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources)) } attr_reader :tool_resources @@ -105,8 +105,8 @@ module OpenAI attr_writer :tool_resources # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. sig do returns( T.nilable( @@ -138,10 +138,10 @@ module OpenAI attr_writer :tools # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p @@ -226,10 +226,10 @@ module OpenAI def to_hash; end # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -318,9 +318,9 @@ module OpenAI attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: T.any( @@ -346,9 +346,9 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # Overrides the list of - # [file](https://platform.openai.com/docs/api-reference/files) IDs made available - # to the `code_interpreter` tool. There can be a maximum of 20 files associated - # with the tool. + # [file](https://platform.openai.com/docs/api-reference/files) IDs made available + # to the `code_interpreter` tool. There can be a maximum of 20 files associated + # with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -364,9 +364,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # Overrides the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index 6914e6bb..c3df4c59 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -32,13 +32,13 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The maximum number of results the file search tool should output. The default is - # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between - # 1 and 50 inclusive. + # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between + # 1 and 50 inclusive. # - # Note that the file search tool may output fewer than `max_num_results` results. - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # Note that the file search tool may output fewer than `max_num_results` results. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(Integer)) } attr_reader :max_num_results @@ -46,11 +46,11 @@ module OpenAI attr_writer :max_num_results # The ranking options for the file search. If not specified, the file search tool - # will use the `auto` ranker and a score_threshold of 0. + # will use the `auto` ranker and a score_threshold of 0. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions)) } attr_reader :ranking_options @@ -82,12 +82,12 @@ module OpenAI class RankingOptions < OpenAI::Internal::Type::BaseModel # The score threshold for the file search. All values must be a floating point - # number between 0 and 1. + # number between 0 and 1. sig { returns(Float) } attr_accessor :score_threshold # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol)) } attr_reader :ranker @@ -95,11 +95,11 @@ module OpenAI attr_writer :ranker # The ranking options for the file search. If not specified, the file search tool - # will use the `auto` ranker and a score_threshold of 0. + # will use the `auto` ranker and a score_threshold of 0. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig do params( score_threshold: Float, @@ -121,7 +121,7 @@ module OpenAI def to_hash; end # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. module Ranker extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index c312e9f2..3f070ad2 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -4,14 +4,14 @@ module OpenAI module Models module Beta # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # created. + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. module MessageStreamEvent extend OpenAI::Internal::Type::Union class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -22,8 +22,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # created. + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -36,7 +36,7 @@ module OpenAI class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -47,8 +47,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # to an `in_progress` state. + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -61,7 +61,7 @@ module OpenAI class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during - # streaming. + # streaming. sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } attr_reader :data @@ -72,8 +72,8 @@ module OpenAI attr_accessor :event # Occurs when parts of a - # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # being streamed. + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. sig do params( data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash), @@ -89,7 +89,7 @@ module OpenAI class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -100,8 +100,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # completed. + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -114,7 +114,7 @@ module OpenAI class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Message) } attr_reader :data @@ -125,8 +125,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # before it is completed. + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index 7aff0d4d..c004aab8 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Beta # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is created. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. module RunStepStreamEvent extend OpenAI::Internal::Type::Union @@ -21,8 +21,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is created. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -48,8 +48,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # moves to an `in_progress` state. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -65,7 +65,7 @@ module OpenAI class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during - # streaming. + # streaming. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data @@ -78,8 +78,8 @@ module OpenAI attr_accessor :event # Occurs when parts of a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # are being streamed. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash), @@ -105,8 +105,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is completed. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -132,8 +132,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # fails. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -159,8 +159,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # is cancelled. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), @@ -186,8 +186,8 @@ module OpenAI attr_accessor :event # Occurs when a - # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # expires. + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. sig do params( data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index de5bcb0f..29f03175 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -4,13 +4,13 @@ module OpenAI module Models module Beta # Occurs when a new - # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. module RunStreamEvent extend OpenAI::Internal::Type::Union class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -21,7 +21,7 @@ module OpenAI attr_accessor :event # Occurs when a new - # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -34,7 +34,7 @@ module OpenAI class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -45,7 +45,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to a `queued` status. + # moves to a `queued` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -58,7 +58,7 @@ module OpenAI class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -69,7 +69,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to an `in_progress` status. + # moves to an `in_progress` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -82,7 +82,7 @@ module OpenAI class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -93,7 +93,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to a `requires_action` status. + # moves to a `requires_action` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -106,7 +106,7 @@ module OpenAI class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -117,7 +117,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # is completed. + # is completed. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -130,7 +130,7 @@ module OpenAI class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -141,7 +141,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # ends with status `incomplete`. + # ends with status `incomplete`. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -154,7 +154,7 @@ module OpenAI class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -165,7 +165,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # fails. + # fails. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -178,7 +178,7 @@ module OpenAI class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -189,7 +189,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # moves to a `cancelling` status. + # moves to a `cancelling` status. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -202,7 +202,7 @@ module OpenAI class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -213,7 +213,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # is cancelled. + # is cancelled. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) @@ -226,7 +226,7 @@ module OpenAI class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig { returns(OpenAI::Models::Beta::Threads::Run) } attr_reader :data @@ -237,7 +237,7 @@ module OpenAI attr_accessor :event # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # expires. + # expires. sig do params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 0e294e8a..0a393255 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -13,11 +13,11 @@ module OpenAI attr_accessor :created_at # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -26,9 +26,9 @@ module OpenAI attr_accessor :object # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources)) } attr_reader :tool_resources @@ -41,7 +41,7 @@ module OpenAI attr_writer :tool_resources # Represents a thread that contains - # [messages](https://platform.openai.com/docs/api-reference/messages). + # [messages](https://platform.openai.com/docs/api-reference/messages). sig do params( id: String, @@ -92,9 +92,9 @@ module OpenAI attr_writer :file_search # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), @@ -117,8 +117,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -134,9 +134,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index f9240bfe..bc0633ec 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -8,51 +8,51 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. sig { returns(String) } attr_accessor :assistant_id # Override the default system message of the assistant. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. sig { returns(T.nilable(String)) } attr_accessor :instructions # The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } attr_accessor :max_completion_tokens # The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } attr_accessor :max_prompt_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } attr_accessor :model # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T.nilable(T::Boolean)) } attr_reader :parallel_tool_calls @@ -60,25 +60,25 @@ module OpenAI attr_writer :parallel_tool_calls # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -94,13 +94,13 @@ module OpenAI attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } attr_accessor :temperature # Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. + # an empty thread will be created. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread)) } attr_reader :thread @@ -111,12 +111,12 @@ module OpenAI attr_writer :thread # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. sig do returns( T.nilable( @@ -130,9 +130,9 @@ module OpenAI attr_accessor :tool_choice # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources)) } attr_reader :tool_resources @@ -145,7 +145,7 @@ module OpenAI attr_writer :tool_resources # Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. sig do returns( T.nilable( @@ -162,15 +162,15 @@ module OpenAI attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy)) } attr_reader :truncation_strategy @@ -294,9 +294,9 @@ module OpenAI def to_hash; end # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. module Model extend OpenAI::Internal::Type::Union @@ -306,7 +306,7 @@ module OpenAI class Thread < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # start the thread with. + # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message])) } attr_reader :messages @@ -319,18 +319,18 @@ module OpenAI attr_writer :messages # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources)) } attr_reader :tool_resources @@ -345,7 +345,7 @@ module OpenAI attr_writer :tool_resources # Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. + # an empty thread will be created. sig do params( messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::AnyHash)], @@ -390,10 +390,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) } attr_accessor :role @@ -402,11 +402,11 @@ module OpenAI attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -490,10 +490,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. module Role extend OpenAI::Internal::Type::Enum @@ -637,9 +637,9 @@ module OpenAI attr_writer :file_search # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: T.any( @@ -668,8 +668,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -685,9 +685,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids @@ -695,9 +695,9 @@ module OpenAI attr_writer :vector_store_ids # A helper to create a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # with file_ids and attach it to this thread. There can be a maximum of 1 vector - # store attached to the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. sig do returns( T.nilable( @@ -747,7 +747,7 @@ module OpenAI class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. sig do returns( T.nilable( @@ -773,8 +773,8 @@ module OpenAI attr_writer :chunking_strategy # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to - # add to the vector store. There can be a maximum of 10000 files in a vector - # store. + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -782,11 +782,11 @@ module OpenAI attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -820,7 +820,7 @@ module OpenAI def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -830,7 +830,7 @@ module OpenAI attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # `800` and `chunk_overlap_tokens` of `400`. + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto); end @@ -887,12 +887,12 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } attr_accessor :max_chunk_size_tokens @@ -954,9 +954,9 @@ module OpenAI attr_writer :file_search # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. sig do params( code_interpreter: T.any( @@ -985,8 +985,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -1002,9 +1002,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this assistant. There can be a maximum of 1 vector store attached to - # the assistant. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids @@ -1033,19 +1033,19 @@ module OpenAI class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) } attr_accessor :type # The number of most recent messages from the thread when constructing the context - # for the run. + # for the run. sig { returns(T.nilable(Integer)) } attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. sig do params( type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, @@ -1067,9 +1067,9 @@ module OpenAI def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. module Type extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 285edbe9..9d73e499 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -8,7 +8,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # start the thread with. + # start the thread with. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message])) } attr_reader :messages @@ -21,18 +21,18 @@ module OpenAI attr_writer :messages # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources)) } attr_reader :tool_resources @@ -88,10 +88,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) } attr_accessor :role @@ -100,11 +100,11 @@ module OpenAI attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -183,10 +183,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. module Role extend OpenAI::Internal::Type::Enum @@ -319,9 +319,9 @@ module OpenAI attr_writer :file_search # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), @@ -344,8 +344,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -361,9 +361,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids @@ -371,9 +371,9 @@ module OpenAI attr_writer :vector_store_ids # A helper to create a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # with file_ids and attach it to this thread. There can be a maximum of 1 vector - # store attached to the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) @@ -421,7 +421,7 @@ module OpenAI class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. sig do returns( T.nilable( @@ -447,8 +447,8 @@ module OpenAI attr_writer :chunking_strategy # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to - # add to the vector store. There can be a maximum of 10000 files in a vector - # store. + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -456,11 +456,11 @@ module OpenAI attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -494,7 +494,7 @@ module OpenAI def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. + # strategy. module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -504,7 +504,7 @@ module OpenAI attr_accessor :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # `800` and `chunk_overlap_tokens` of `400`. + # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :auto); end @@ -561,12 +561,12 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } attr_accessor :max_chunk_size_tokens diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index e2268f47..f169b98c 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # Represents a thread that contains - # [messages](https://platform.openai.com/docs/api-reference/messages). + # [messages](https://platform.openai.com/docs/api-reference/messages). sig { returns(OpenAI::Models::Beta::Thread) } attr_reader :data @@ -23,8 +23,8 @@ module OpenAI attr_writer :enabled # Occurs when a new - # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # created. + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. sig do params( data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash), diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 4ec25c94..1eb3f670 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -8,18 +8,18 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources)) } attr_reader :tool_resources @@ -77,9 +77,9 @@ module OpenAI attr_writer :file_search # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. sig do params( code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), @@ -102,8 +102,8 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # available to the `code_interpreter` tool. There can be a maximum of 20 files - # associated with the tool. + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -119,9 +119,9 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel # The - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # attached to this thread. There can be a maximum of 1 vector store attached to - # the thread. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. sig { returns(T.nilable(T::Array[String])) } attr_reader :vector_store_ids diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/lib/openai/models/beta/threads/annotation.rbi index 0b4b796a..6ed86777 100644 --- a/rbi/lib/openai/models/beta/threads/annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads # A citation within the message that points to a specific quote from a specific - # File associated with the assistant or the message. Generated when the assistant - # uses the "file_search" tool to search files. + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. module Annotation extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi index 26a9fe9b..deb39180 100644 --- a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/annotation_delta.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads # A citation within the message that points to a specific quote from a specific - # File associated with the assistant or the message. Generated when the assistant - # uses the "file_search" tool to search files. + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. module AnnotationDelta extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index e29ed266..a9b10bb9 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -31,8 +31,8 @@ module OpenAI attr_accessor :type # A citation within the message that points to a specific quote from a specific - # File associated with the assistant or the message. Generated when the assistant - # uses the "file_search" tool to search files. + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. sig do params( end_index: Integer, diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index 3a09db06..1d28545a 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -44,8 +44,8 @@ module OpenAI attr_writer :text # A citation within the message that points to a specific quote from a specific - # File associated with the assistant or the message. Generated when the assistant - # uses the "file_search" tool to search files. + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. sig do params( index: Integer, diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index 93c25e4e..87953109 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -31,7 +31,7 @@ module OpenAI attr_accessor :type # A URL for the file that's generated when the assistant used the - # `code_interpreter` tool to generate a file. + # `code_interpreter` tool to generate a file. sig do params( end_index: Integer, diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index e8f4b3d2..4c2c8c36 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -44,7 +44,7 @@ module OpenAI attr_writer :text # A URL for the file that's generated when the assistant used the - # `code_interpreter` tool to generate a file. + # `code_interpreter` tool to generate a file. sig do params( index: Integer, diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 25343dd0..788ca5de 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -6,13 +6,13 @@ module OpenAI module Threads class ImageFile < OpenAI::Internal::Type::BaseModel # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image - # in the message content. Set `purpose="vision"` when uploading the File if you - # need to later display the file content. + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. sig { returns(String) } attr_accessor :file_id # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol)) } attr_reader :detail @@ -29,7 +29,7 @@ module OpenAI def to_hash; end # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. module Detail extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index b50f4624..90524b05 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -16,7 +16,7 @@ module OpenAI attr_accessor :type # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. sig do params( image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::AnyHash), diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index da45783e..d0d866ed 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -6,7 +6,7 @@ module OpenAI module Threads class ImageFileDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } attr_reader :detail @@ -14,8 +14,8 @@ module OpenAI attr_writer :detail # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image - # in the message content. Set `purpose="vision"` when uploading the File if you - # need to later display the file content. + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -35,7 +35,7 @@ module OpenAI def to_hash; end # Specifies the detail level of the image if specified by the user. `low` uses - # fewer tokens, you can opt in to high resolution using `high`. + # fewer tokens, you can opt in to high resolution using `high`. module Detail extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 4dfa0c31..74be2ead 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -20,7 +20,7 @@ module OpenAI attr_writer :image_file # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. sig do params( index: Integer, diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index d0e2b929..27f59579 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -6,12 +6,12 @@ module OpenAI module Threads class ImageURL < OpenAI::Internal::Type::BaseModel # The external URL of the image, must be a supported image types: jpeg, jpg, png, - # gif, webp. + # gif, webp. sig { returns(String) } attr_accessor :url # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. Default value is `auto` + # to high resolution using `high`. Default value is `auto` sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol)) } attr_reader :detail @@ -28,7 +28,7 @@ module OpenAI def to_hash; end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. Default value is `auto` + # to high resolution using `high`. Default value is `auto` module Detail extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 5efc1733..69c06976 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -6,7 +6,7 @@ module OpenAI module Threads class ImageURLDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. + # to high resolution using `high`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } attr_reader :detail @@ -14,7 +14,7 @@ module OpenAI attr_writer :detail # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, - # webp. + # webp. sig { returns(T.nilable(String)) } attr_reader :url @@ -33,7 +33,7 @@ module OpenAI def to_hash; end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # to high resolution using `high`. + # to high resolution using `high`. module Detail extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index 29e9be2a..ae809809 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -10,8 +10,8 @@ module OpenAI attr_accessor :id # If applicable, the ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) that - # authored this message. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) that + # authored this message. sig { returns(T.nilable(String)) } attr_accessor :assistant_id @@ -59,11 +59,11 @@ module OpenAI attr_writer :incomplete_details # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -76,23 +76,23 @@ module OpenAI attr_accessor :role # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) - # associated with the creation of this message. Value is `null` when messages are - # created manually using the create message or create thread endpoints. + # associated with the creation of this message. Value is `null` when messages are + # created manually using the create message or create thread endpoints. sig { returns(T.nilable(String)) } attr_accessor :run_id # The status of the message, which can be either `in_progress`, `incomplete`, or - # `completed`. + # `completed`. sig { returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } attr_accessor :status # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that - # this message belongs to. + # this message belongs to. sig { returns(String) } attr_accessor :thread_id # Represents a message within a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig do params( id: String, @@ -319,7 +319,7 @@ module OpenAI end # The status of the message, which can be either `in_progress`, `incomplete`, or - # `completed`. + # `completed`. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/lib/openai/models/beta/threads/message_content.rbi index 148ce53f..99a0c488 100644 --- a/rbi/lib/openai/models/beta/threads/message_content.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. module MessageContent extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi index c1dba8a3..a6ad0c14 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_delta.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. module MessageContentDelta extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi index 012559c9..fbe37283 100644 --- a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi @@ -5,7 +5,7 @@ module OpenAI module Beta module Threads # References an image [File](https://platform.openai.com/docs/api-reference/files) - # in the content of a message. + # in the content of a message. module MessageContentPartParam extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 32a91cfd..94ce2228 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -27,10 +27,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) } attr_accessor :role @@ -39,11 +39,11 @@ module OpenAI attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -124,10 +124,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. module Role extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index 359c70cd..f5e5b29b 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -21,7 +21,7 @@ module OpenAI attr_accessor :object # Represents a message delta i.e. any changed fields on a message during - # streaming. + # streaming. sig do params( id: String, diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 66577b99..11ea50e8 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -9,9 +9,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -19,9 +19,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -29,7 +29,7 @@ module OpenAI attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -37,7 +37,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol)) } attr_reader :order @@ -80,7 +80,7 @@ module OpenAI def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 90b4db86..09e67673 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -12,11 +12,11 @@ module OpenAI attr_accessor :thread_id # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index 6aa38a1e..61ec2da8 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -6,9 +6,9 @@ module OpenAI module Threads class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call. This ID must be referenced when you submit the tool - # outputs in using the - # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) - # endpoint. + # outputs in using the + # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # endpoint. sig { returns(String) } attr_accessor :id @@ -25,7 +25,7 @@ module OpenAI attr_writer :function # The type of tool call the output is required for. For now, this is always - # `function`. + # `function`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index 7170660e..fd1569df 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -10,8 +10,8 @@ module OpenAI attr_accessor :id # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # execution of this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # execution of this run. sig { returns(String) } attr_accessor :assistant_id @@ -36,7 +36,7 @@ module OpenAI attr_accessor :failed_at # Details on why the run is incomplete. Will be `null` if the run is not - # incomplete. + # incomplete. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails)) } attr_reader :incomplete_details @@ -49,8 +49,8 @@ module OpenAI attr_writer :incomplete_details # The instructions that the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. sig { returns(String) } attr_accessor :instructions @@ -67,27 +67,27 @@ module OpenAI attr_writer :last_error # The maximum number of completion tokens specified to have been used over the - # course of the run. + # course of the run. sig { returns(T.nilable(Integer)) } attr_accessor :max_completion_tokens # The maximum number of prompt tokens specified to have been used over the course - # of the run. + # of the run. sig { returns(T.nilable(Integer)) } attr_accessor :max_prompt_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # The model that the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. sig { returns(String) } attr_accessor :model @@ -96,13 +96,13 @@ module OpenAI attr_accessor :object # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T::Boolean) } attr_accessor :parallel_tool_calls # Details on the action required to continue the run. Will be `null` if no action - # is required. + # is required. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction)) } attr_reader :required_action @@ -115,25 +115,25 @@ module OpenAI attr_writer :required_action # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -153,23 +153,23 @@ module OpenAI attr_accessor :started_at # The status of the run, which can be either `queued`, `in_progress`, - # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, - # `incomplete`, or `expired`. + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. sig { returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } attr_accessor :status # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # that was executed on as a part of this run. + # that was executed on as a part of this run. sig { returns(String) } attr_accessor :thread_id # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. sig do returns( T.nilable( @@ -183,8 +183,8 @@ module OpenAI attr_accessor :tool_choice # The list of tools that the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for - # this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. sig do returns( T::Array[ @@ -199,7 +199,7 @@ module OpenAI attr_accessor :tools # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy)) } attr_reader :truncation_strategy @@ -212,7 +212,7 @@ module OpenAI attr_writer :truncation_strategy # Usage statistics related to the run. This value will be `null` if the run is not - # in a terminal state (i.e. `in_progress`, `queued`, etc.). + # in a terminal state (i.e. `in_progress`, `queued`, etc.). sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) } attr_reader :usage @@ -228,7 +228,7 @@ module OpenAI attr_accessor :top_p # Represents an execution run on a - # [thread](https://platform.openai.com/docs/api-reference/threads). + # [thread](https://platform.openai.com/docs/api-reference/threads). sig do params( id: String, @@ -367,7 +367,7 @@ module OpenAI class IncompleteDetails < OpenAI::Internal::Type::BaseModel # The reason why the run is incomplete. This will point to which specific token - # limit was reached over the course of the run. + # limit was reached over the course of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } attr_reader :reason @@ -375,7 +375,7 @@ module OpenAI attr_writer :reason # Details on why the run is incomplete. Will be `null` if the run is not - # incomplete. + # incomplete. sig do params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) @@ -386,7 +386,7 @@ module OpenAI def to_hash; end # The reason why the run is incomplete. This will point to which specific token - # limit was reached over the course of the run. + # limit was reached over the course of the run. module Reason extend OpenAI::Internal::Type::Enum @@ -464,7 +464,7 @@ module OpenAI attr_accessor :type # Details on the action required to continue the run. Will be `null` if no action - # is required. + # is required. sig do params( submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Internal::AnyHash), @@ -503,19 +503,19 @@ module OpenAI class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } attr_accessor :type # The number of most recent messages from the thread when constructing the context - # for the run. + # for the run. sig { returns(T.nilable(Integer)) } attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. sig do params( type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, @@ -537,9 +537,9 @@ module OpenAI def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. module Type extend OpenAI::Internal::Type::Enum @@ -571,7 +571,7 @@ module OpenAI attr_accessor :total_tokens # Usage statistics related to the run. This value will be `null` if the run is not - # in a terminal state (i.e. `in_progress`, `queued`, etc.). + # in a terminal state (i.e. `in_progress`, `queued`, etc.). sig do params( completion_tokens: Integer, diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 58c097df..bfa2e42d 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -9,18 +9,18 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. sig { returns(String) } attr_accessor :assistant_id # A list of additional fields to include in the response. Currently the only - # supported value is `step_details.tool_calls[*].file_search.results[*].content` - # to fetch the file search result content. + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } attr_reader :include @@ -28,8 +28,8 @@ module OpenAI attr_writer :include # Appends additional instructions at the end of the instructions for the run. This - # is useful for modifying the behavior on a per-run basis without overriding other - # instructions. + # is useful for modifying the behavior on a per-run basis without overriding other + # instructions. sig { returns(T.nilable(String)) } attr_accessor :additional_instructions @@ -38,46 +38,46 @@ module OpenAI attr_accessor :additional_messages # Overrides the - # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) - # of the assistant. This is useful for modifying the behavior on a per-run basis. + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. sig { returns(T.nilable(String)) } attr_accessor :instructions # The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } attr_accessor :max_completion_tokens # The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. sig { returns(T.nilable(Integer)) } attr_accessor :max_prompt_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } attr_accessor :model # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T.nilable(T::Boolean)) } attr_reader :parallel_tool_calls @@ -86,33 +86,33 @@ module OpenAI # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. sig do returns( T.nilable( @@ -128,18 +128,18 @@ module OpenAI attr_accessor :response_format # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. sig { returns(T.nilable(Float)) } attr_accessor :temperature # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. sig do returns( T.nilable( @@ -153,7 +153,7 @@ module OpenAI attr_accessor :tool_choice # Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. sig do returns( T.nilable( @@ -170,15 +170,15 @@ module OpenAI attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy)) } attr_reader :truncation_strategy @@ -329,10 +329,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) } attr_accessor :role @@ -345,11 +345,11 @@ module OpenAI attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -433,10 +433,10 @@ module OpenAI # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. module Role extend OpenAI::Internal::Type::Enum @@ -558,9 +558,9 @@ module OpenAI end # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. module Model extend OpenAI::Internal::Type::Union @@ -570,19 +570,19 @@ module OpenAI class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) } attr_accessor :type # The number of most recent messages from the thread when constructing the context - # for the run. + # for the run. sig { returns(T.nilable(Integer)) } attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. sig do params( type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, @@ -604,9 +604,9 @@ module OpenAI def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. module Type extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index e266f66c..0d5a35b2 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -9,9 +9,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -19,9 +19,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -29,7 +29,7 @@ module OpenAI attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -37,7 +37,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol)) } attr_reader :order @@ -71,7 +71,7 @@ module OpenAI def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index 5de3efc2..b9c1490b 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -5,8 +5,8 @@ module OpenAI module Beta module Threads # The status of the run, which can be either `queued`, `in_progress`, - # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, - # `incomplete`, or `expired`. + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. module RunStatus extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 2ae4e56d..e347bb7b 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -46,7 +46,7 @@ module OpenAI attr_writer :output # The ID of the tool call in the `required_action` object within the run object - # the output is being submitted for. + # the output is being submitted for. sig { returns(T.nilable(String)) } attr_reader :tool_call_id diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 28bb34de..9c4e4bb2 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -12,11 +12,11 @@ module OpenAI attr_accessor :thread_id # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 72557474..3cc75c96 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -45,7 +45,7 @@ module OpenAI class Image < OpenAI::Internal::Type::BaseModel # The [file](https://platform.openai.com/docs/api-reference/files) ID of the - # image. + # image. sig { returns(T.nilable(String)) } attr_reader :file_id diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 82827731..898db2f2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -26,7 +26,7 @@ module OpenAI attr_writer :code_interpreter # The type of tool call. This is always going to be `code_interpreter` for this - # type of tool call. + # type of tool call. sig { returns(Symbol) } attr_accessor :type @@ -62,8 +62,8 @@ module OpenAI attr_accessor :input # The outputs from the Code Interpreter tool call. Code Interpreter can output one - # or more items, including text (`logs`) or images (`image`). Each of these are - # represented by a different object type. + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. sig do returns( T::Array[ @@ -177,7 +177,7 @@ module OpenAI class Image < OpenAI::Internal::Type::BaseModel # The [file](https://platform.openai.com/docs/api-reference/files) ID of the - # image. + # image. sig { returns(String) } attr_accessor :file_id diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 0864612a..06162584 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -11,7 +11,7 @@ module OpenAI attr_accessor :index # The type of tool call. This is always going to be `code_interpreter` for this - # type of tool call. + # type of tool call. sig { returns(Symbol) } attr_accessor :type @@ -74,8 +74,8 @@ module OpenAI attr_writer :input # The outputs from the Code Interpreter tool call. Code Interpreter can output one - # or more items, including text (`logs`) or images (`image`). Each of these are - # represented by a different object type. + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. sig do returns( T.nilable( diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 871e455f..b11ca61e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -23,7 +23,7 @@ module OpenAI attr_writer :file_search # The type of tool call. This is always going to be `file_search` for this type of - # tool call. + # tool call. sig { returns(Symbol) } attr_accessor :type @@ -109,7 +109,7 @@ module OpenAI class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. sig do returns( OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol @@ -118,7 +118,7 @@ module OpenAI attr_accessor :ranker # The score threshold for the file search. All values must be a floating point - # number between 0 and 1. + # number between 0 and 1. sig { returns(Float) } attr_accessor :score_threshold @@ -144,7 +144,7 @@ module OpenAI def to_hash; end # The ranker to use for the file search. If not specified will use the `auto` - # ranker. + # ranker. module Ranker extend OpenAI::Internal::Type::Enum @@ -190,12 +190,12 @@ module OpenAI attr_accessor :file_name # The score of the result. All values must be a floating point number between 0 - # and 1. + # and 1. sig { returns(Float) } attr_accessor :score # The content of the result that was found. The content is only included if - # requested via the include query parameter. + # requested via the include query parameter. sig do returns( T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 2714a161..80a2db89 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -15,7 +15,7 @@ module OpenAI attr_accessor :index # The type of tool call. This is always going to be `file_search` for this type of - # tool call. + # tool call. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 2454344b..12d00e7e 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -23,7 +23,7 @@ module OpenAI attr_writer :function # The type of tool call. This is always going to be `function` for this type of - # tool call. + # tool call. sig { returns(Symbol) } attr_accessor :type @@ -55,8 +55,8 @@ module OpenAI attr_accessor :name # The output of the function. This will be `null` if the outputs have not been - # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) - # yet. + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. sig { returns(T.nilable(String)) } attr_accessor :output diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index f275cddb..df5bcf34 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -11,7 +11,7 @@ module OpenAI attr_accessor :index # The type of tool call. This is always going to be `function` for this type of - # tool call. + # tool call. sig { returns(Symbol) } attr_accessor :type @@ -74,8 +74,8 @@ module OpenAI attr_writer :name # The output of the function. This will be `null` if the outputs have not been - # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) - # yet. + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. sig { returns(T.nilable(String)) } attr_accessor :output diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index a5ddcb52..7519ba3f 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -11,8 +11,8 @@ module OpenAI attr_accessor :id # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) - # associated with the run step. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) + # associated with the run step. sig { returns(String) } attr_accessor :assistant_id @@ -29,7 +29,7 @@ module OpenAI attr_accessor :created_at # The Unix timestamp (in seconds) for when the run step expired. A step is - # considered expired if the parent run is expired. + # considered expired if the parent run is expired. sig { returns(T.nilable(Integer)) } attr_accessor :expired_at @@ -38,7 +38,7 @@ module OpenAI attr_accessor :failed_at # The last error associated with this run step. Will be `null` if there are no - # errors. + # errors. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError)) } attr_reader :last_error @@ -51,11 +51,11 @@ module OpenAI attr_writer :last_error # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -64,12 +64,12 @@ module OpenAI attr_accessor :object # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that - # this run step is a part of. + # this run step is a part of. sig { returns(String) } attr_accessor :run_id # The status of the run step, which can be either `in_progress`, `cancelled`, - # `failed`, `completed`, or `expired`. + # `failed`, `completed`, or `expired`. sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } attr_accessor :status @@ -85,7 +85,7 @@ module OpenAI attr_accessor :step_details # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # that was run. + # that was run. sig { returns(String) } attr_accessor :thread_id @@ -94,7 +94,7 @@ module OpenAI attr_accessor :type # Usage statistics related to the run step. This value will be `null` while the - # run step's status is `in_progress`. + # run step's status is `in_progress`. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage)) } attr_reader :usage @@ -188,7 +188,7 @@ module OpenAI attr_accessor :message # The last error associated with this run step. Will be `null` if there are no - # errors. + # errors. sig do params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) @@ -223,7 +223,7 @@ module OpenAI end # The status of the run step, which can be either `in_progress`, `cancelled`, - # `failed`, `completed`, or `expired`. + # `failed`, `completed`, or `expired`. module Status extend OpenAI::Internal::Type::Enum @@ -284,7 +284,7 @@ module OpenAI attr_accessor :total_tokens # Usage statistics related to the run step. This value will be `null` while the - # run step's status is `in_progress`. + # run step's status is `in_progress`. sig do params( completion_tokens: Integer, diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index 4fb76535..e1124389 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -22,7 +22,7 @@ module OpenAI attr_accessor :object # Represents a run step delta i.e. any changed fields on a run step during - # streaming. + # streaming. sig do params( id: String, diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index d9609bca..abba06e2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -13,9 +13,9 @@ module OpenAI attr_accessor :thread_id # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -23,9 +23,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -33,12 +33,12 @@ module OpenAI attr_writer :before # A list of additional fields to include in the response. Currently the only - # supported value is `step_details.tool_calls[*].file_search.results[*].content` - # to fetch the file search result content. + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } attr_reader :include @@ -46,7 +46,7 @@ module OpenAI attr_writer :include # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -54,7 +54,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol)) } attr_reader :order @@ -101,7 +101,7 @@ module OpenAI def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index 827fe79e..c4b7c54d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -16,12 +16,12 @@ module OpenAI attr_accessor :run_id # A list of additional fields to include in the response. Currently the only - # supported value is `step_details.tool_calls[*].file_search.results[*].content` - # to fetch the file search result content. + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } attr_reader :include diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 9b96f0e3..796b549d 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -11,8 +11,8 @@ module OpenAI attr_accessor :type # An array of tool calls the run step was involved in. These can be associated - # with one of three types of tools: `code_interpreter`, `file_search`, or - # `function`. + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. sig do returns( T.nilable( diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 7eb65800..34fdc65c 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -7,8 +7,8 @@ module OpenAI module Runs class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # An array of tool calls the run step was involved in. These can be associated - # with one of three types of tools: `code_interpreter`, `file_search`, or - # `function`. + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. sig do returns( T::Array[ diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index c871533c..e1e91dfc 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -9,7 +9,7 @@ module OpenAI attr_accessor :id # A list of chat completion choices. Can be more than one if `n` is greater - # than 1. + # than 1. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice]) } attr_accessor :choices @@ -31,8 +31,8 @@ module OpenAI # This fingerprint represents the backend configuration that the model runs with. # - # Can be used in conjunction with the `seed` request parameter to understand when - # backend changes have been made that might impact determinism. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } attr_reader :system_fingerprint @@ -47,7 +47,7 @@ module OpenAI attr_writer :usage # Represents a chat completion response returned by model, based on the provided - # input. + # input. sig do params( id: String, @@ -90,11 +90,11 @@ module OpenAI class Choice < OpenAI::Internal::Type::BaseModel # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. sig { returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } attr_accessor :finish_reason @@ -146,11 +146,11 @@ module OpenAI def to_hash; end # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. module FinishReason extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 8da1d510..436f9a60 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -9,7 +9,7 @@ module OpenAI attr_accessor :role # Data about a previous audio response from the model. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio)) } attr_reader :audio @@ -24,7 +24,7 @@ module OpenAI attr_writer :audio # The contents of the assistant message. Required unless `tool_calls` or - # `function_call` is specified. + # `function_call` is specified. sig do returns( T.nilable( @@ -43,7 +43,7 @@ module OpenAI attr_accessor :content # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall)) } attr_reader :function_call @@ -58,7 +58,7 @@ module OpenAI attr_writer :function_call # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } attr_reader :name @@ -152,7 +152,7 @@ module OpenAI attr_accessor :id # Data about a previous audio response from the model. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { params(id: String).returns(T.attached_class) } def self.new(id:); end @@ -161,12 +161,12 @@ module OpenAI end # The contents of the assistant message. Required unless `tool_calls` or - # `function_call` is specified. + # `function_call` is specified. module Content extend OpenAI::Internal::Type::Union # Learn about - # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ArrayOfContentPart extend OpenAI::Internal::Type::Union @@ -204,9 +204,9 @@ module OpenAI class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(String) } attr_accessor :arguments @@ -215,7 +215,7 @@ module OpenAI attr_accessor :name # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments:, name:); end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index 66867567..77f2a8a9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -9,12 +9,12 @@ module OpenAI attr_accessor :id # Base64 encoded audio bytes generated by the model, in the format specified in - # the request. + # the request. sig { returns(String) } attr_accessor :data # The Unix timestamp (in seconds) for when this audio response will no longer be - # accessible on the server for use in multi-turn conversations. + # accessible on the server for use in multi-turn conversations. sig { returns(Integer) } attr_accessor :expires_at @@ -23,8 +23,8 @@ module OpenAI attr_accessor :transcript # If the audio output modality is requested, this object contains data about the - # audio response from the model. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig do params(id: String, data: String, expires_at: Integer, transcript: String).returns(T.attached_class) end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index e1aa3707..0755aab8 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -5,18 +5,18 @@ module OpenAI module Chat class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, - # or `pcm16`. + # or `pcm16`. sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } attr_accessor :format_ # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. sig { returns(T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol)) } attr_accessor :voice # Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig do params( format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, @@ -38,7 +38,7 @@ module OpenAI def to_hash; end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, - # or `pcm16`. + # or `pcm16`. module Format extend OpenAI::Internal::Type::Enum @@ -57,7 +57,7 @@ module OpenAI end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. module Voice extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index f8eb2225..a7aaf04d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -9,13 +9,13 @@ module OpenAI attr_accessor :id # A list of chat completion choices. Can contain more than one elements if `n` is - # greater than 1. Can also be empty for the last chunk if you set - # `stream_options: {"include_usage": true}`. + # greater than 1. Can also be empty for the last chunk if you set + # `stream_options: {"include_usage": true}`. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice]) } attr_accessor :choices # The Unix timestamp (in seconds) of when the chat completion was created. Each - # chunk has the same timestamp. + # chunk has the same timestamp. sig { returns(Integer) } attr_accessor :created @@ -32,8 +32,8 @@ module OpenAI attr_accessor :service_tier # This fingerprint represents the backend configuration that the model runs with. - # Can be used in conjunction with the `seed` request parameter to understand when - # backend changes have been made that might impact determinism. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } attr_reader :system_fingerprint @@ -41,12 +41,12 @@ module OpenAI attr_writer :system_fingerprint # An optional field that will only be present when you set - # `stream_options: {"include_usage": true}` in your request. When present, it - # contains a null value **except for the last chunk** which contains the token - # usage statistics for the entire request. + # `stream_options: {"include_usage": true}` in your request. When present, it + # contains a null value **except for the last chunk** which contains the token + # usage statistics for the entire request. # - # **NOTE:** If the stream is interrupted or cancelled, you may not receive the - # final usage chunk which contains the total token usage for the request. + # **NOTE:** If the stream is interrupted or cancelled, you may not receive the + # final usage chunk which contains the total token usage for the request. sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } attr_reader :usage @@ -54,8 +54,8 @@ module OpenAI attr_writer :usage # Represents a streamed chunk of a chat completion response returned by the model, - # based on the provided input. - # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). + # based on the provided input. + # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). sig do params( id: String, @@ -108,11 +108,11 @@ module OpenAI attr_writer :delta # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) } attr_accessor :finish_reason @@ -162,7 +162,7 @@ module OpenAI attr_accessor :content # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall)) } attr_reader :function_call @@ -225,9 +225,9 @@ module OpenAI class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(T.nilable(String)) } attr_reader :arguments @@ -242,7 +242,7 @@ module OpenAI attr_writer :name # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments: nil, name: nil); end @@ -332,9 +332,9 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(T.nilable(String)) } attr_reader :arguments @@ -383,11 +383,11 @@ module OpenAI end # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, `content_filter` if - # content was omitted due to a flag from our content filters, `tool_calls` if the - # model called a tool, or `function_call` (deprecated) if the model called a - # function. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. module FinishReason extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index 3f40e799..ba9fd403 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Chat # Learn about - # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ChatCompletionContentPart extend OpenAI::Internal::Type::Union @@ -25,7 +25,7 @@ module OpenAI attr_accessor :type # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text - # generation. + # generation. sig do params( file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::AnyHash), @@ -40,7 +40,7 @@ module OpenAI class File < OpenAI::Internal::Type::BaseModel # The base64 encoded file data, used when passing the file to the model as a - # string. + # string. sig { returns(T.nilable(String)) } attr_reader :file_data diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index 8f1d2dba..e7e3a922 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -40,7 +40,7 @@ module OpenAI attr_accessor :url # Specifies the detail level of the image. Learn more in the - # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol)) } attr_reader :detail @@ -65,7 +65,7 @@ module OpenAI def to_hash; end # Specifies the detail level of the image. Learn more in the - # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). module Detail extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index e2fea7d5..ee4dcfaf 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -13,7 +13,7 @@ module OpenAI attr_accessor :type # Learn about - # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # [text inputs](https://platform.openai.com/docs/guides/text-generation). sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new(text:, type: :text); end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index d5eeea91..6e56c125 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -13,7 +13,7 @@ module OpenAI attr_accessor :role # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } attr_reader :name @@ -21,8 +21,8 @@ module OpenAI attr_writer :name # Developer-provided instructions that the model should follow, regardless of - # messages sent by the user. With o1 models and newer, `developer` messages - # replace the previous `system` messages. + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. sig do params( content: T.any( diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index d63a2e1c..5dcea933 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -9,7 +9,7 @@ module OpenAI attr_accessor :name # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # to call that function. sig { params(name: String).returns(T.attached_class) } def self.new(name:); end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index 6c785895..f70217f5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -17,7 +17,7 @@ module OpenAI attr_accessor :role # Annotations for the message, when applicable, as when using the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation])) } attr_reader :annotations @@ -30,8 +30,8 @@ module OpenAI attr_writer :annotations # If the audio output modality is requested, this object contains data about the - # audio response from the model. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) } attr_reader :audio @@ -39,7 +39,7 @@ module OpenAI attr_writer :audio # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall)) } attr_reader :function_call @@ -173,9 +173,9 @@ module OpenAI class FunctionCall < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(String) } attr_accessor :arguments @@ -184,7 +184,7 @@ module OpenAI attr_accessor :name # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # that should be called, as generated by the model. + # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } def self.new(arguments:, name:); end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi index 6cf5ae54..4c9296f5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_param.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Chat # Developer-provided instructions that the model should follow, regardless of - # messages sent by the user. With o1 models and newer, `developer` messages - # replace the previous `system` messages. + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. module ChatCompletionMessageParam extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index b4bfcbdd..85b1dc6e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -44,9 +44,9 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. sig { returns(String) } attr_accessor :arguments diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index be71706f..041eea0a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -20,7 +20,7 @@ module OpenAI attr_accessor :type # Specifies a tool the model should use. Use to force the model to call a specific - # function. + # function. sig do params( function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::AnyHash), diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index c6f72148..a993e33a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -5,18 +5,18 @@ module OpenAI module Chat class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # The content that should be matched when generating a model response. If - # generated tokens would match this content, the entire model response can be - # returned much more quickly. + # generated tokens would match this content, the entire model response can be + # returned much more quickly. sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } attr_accessor :content # The type of the predicted content you want to provide. This type is currently - # always `content`. + # always `content`. sig { returns(Symbol) } attr_accessor :type # Static predicted output content, such as the content of a text file that is - # being regenerated. + # being regenerated. sig do params( content: T.any( @@ -38,8 +38,8 @@ module OpenAI def to_hash; end # The content that should be matched when generating a model response. If - # generated tokens would match this content, the entire model response can be - # returned much more quickly. + # generated tokens would match this content, the entire model response can be + # returned much more quickly. module Content extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index 51d36cd9..771ab84e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -5,12 +5,12 @@ module OpenAI module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # If set, an additional chunk will be streamed before the `data: [DONE]` message. - # The `usage` field on this chunk shows the token usage statistics for the entire - # request, and the `choices` field will always be an empty array. + # The `usage` field on this chunk shows the token usage statistics for the entire + # request, and the `choices` field will always be an empty array. # - # All other chunks will also include a `usage` field, but with a null value. - # **NOTE:** If the stream is interrupted, you may not receive the final usage - # chunk which contains the total token usage for the request. + # All other chunks will also include a `usage` field, but with a null value. + # **NOTE:** If the stream is interrupted, you may not receive the final usage + # chunk which contains the total token usage for the request. sig { returns(T.nilable(T::Boolean)) } attr_reader :include_usage diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index 44c63247..b21a46af 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -13,7 +13,7 @@ module OpenAI attr_accessor :role # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } attr_reader :name @@ -21,8 +21,8 @@ module OpenAI attr_writer :name # Developer-provided instructions that the model should follow, regardless of - # messages sent by the user. With o1 models and newer, use `developer` messages - # for this purpose instead. + # messages sent by the user. With o1 models and newer, use `developer` messages + # for this purpose instead. sig do params( content: T.any( diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 4763d686..0f4594a5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -9,21 +9,21 @@ module OpenAI attr_accessor :token # A list of integers representing the UTF-8 bytes representation of the token. - # Useful in instances where characters are represented by multiple tokens and - # their byte representations must be combined to generate the correct text - # representation. Can be `null` if there is no bytes representation for the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. sig { returns(T.nilable(T::Array[Integer])) } attr_accessor :bytes # The log probability of this token, if it is within the top 20 most likely - # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very - # unlikely. + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. sig { returns(Float) } attr_accessor :logprob # List of the most likely tokens and their log probability, at this token - # position. In rare cases, there may be fewer than the number of requested - # `top_logprobs` returned. + # position. In rare cases, there may be fewer than the number of requested + # `top_logprobs` returned. sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob]) } attr_accessor :top_logprobs @@ -57,15 +57,15 @@ module OpenAI attr_accessor :token # A list of integers representing the UTF-8 bytes representation of the token. - # Useful in instances where characters are represented by multiple tokens and - # their byte representations must be combined to generate the correct text - # representation. Can be `null` if there is no bytes representation for the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. sig { returns(T.nilable(T::Array[Integer])) } attr_accessor :bytes # The log probability of this token, if it is within the top 20 most likely - # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very - # unlikely. + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. sig { returns(Float) } attr_accessor :logprob diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index a840bdf9..b7979106 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -4,20 +4,20 @@ module OpenAI module Models module Chat # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. module ChatCompletionToolChoiceOption extend OpenAI::Internal::Type::Union # `none` means the model will not call any tool and instead generates a message. - # `auto` means the model can pick between generating a message or calling one or - # more tools. `required` means the model must call one or more tools. + # `auto` means the model can pick between generating a message or calling one or + # more tools. `required` means the model must call one or more tools. module Auto extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 312f7dfb..24c806a5 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -27,7 +27,7 @@ module OpenAI attr_accessor :role # An optional name for the participant. Provides the model information to - # differentiate between participants of the same role. + # differentiate between participants of the same role. sig { returns(T.nilable(String)) } attr_reader :name @@ -35,7 +35,7 @@ module OpenAI attr_writer :name # Messages sent by an end user, containing prompts or additional context - # information. + # information. sig do params( content: T.any( diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 0afa33d1..c284fa11 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -8,11 +8,11 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A list of messages comprising the conversation so far. Depending on the - # [model](https://platform.openai.com/docs/models) you use, different message - # types (modalities) are supported, like - # [text](https://platform.openai.com/docs/guides/text-generation), - # [images](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio). + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). sig do returns( T::Array[ @@ -30,16 +30,16 @@ module OpenAI attr_accessor :messages # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } attr_accessor :model # Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) } attr_reader :audio @@ -50,25 +50,25 @@ module OpenAI attr_writer :audio # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. sig { returns(T.nilable(Float)) } attr_accessor :frequency_penalty # Deprecated in favor of `tool_choice`. # - # Controls which (if any) function is called by the model. + # Controls which (if any) function is called by the model. # - # `none` means the model will not call a function and instead generates a message. + # `none` means the model will not call a function and instead generates a message. # - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. sig do returns( T.nilable( @@ -95,7 +95,7 @@ module OpenAI # Deprecated in favor of `tools`. # - # A list of functions the model may generate JSON inputs for. + # A list of functions the model may generate JSON inputs for. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function])) } attr_reader :functions @@ -109,68 +109,68 @@ module OpenAI # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the - # tokenizer) to an associated bias value from -100 to 100. Mathematically, the - # bias is added to the logits generated by the model prior to sampling. The exact - # effect will vary per model, but values between -1 and 1 should decrease or - # increase likelihood of selection; values like -100 or 100 should result in a ban - # or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. sig { returns(T.nilable(T::Hash[Symbol, Integer])) } attr_accessor :logit_bias # Whether to return log probabilities of the output tokens or not. If true, - # returns the log probabilities of each output token returned in the `content` of - # `message`. + # returns the log probabilities of each output token returned in the `content` of + # `message`. sig { returns(T.nilable(T::Boolean)) } attr_accessor :logprobs # An upper bound for the number of tokens that can be generated for a completion, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } attr_accessor :max_completion_tokens # The maximum number of [tokens](/tokenizer) that can be generated in the chat - # completion. This value can be used to control - # [costs](https://openai.com/api/pricing/) for text generated via API. + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. # - # This value is now deprecated in favor of `max_completion_tokens`, and is not - # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } attr_accessor :max_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # Output types that you would like the model to generate. Most models are capable - # of generating text, which is the default: + # of generating text, which is the default: # - # `["text"]` + # `["text"]` # - # The `gpt-4o-audio-preview` model can also be used to - # [generate audio](https://platform.openai.com/docs/guides/audio). To request that - # this model generate both text and audio responses, you can use: + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: # - # `["text", "audio"]` + # `["text", "audio"]` sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) } attr_accessor :modalities # How many chat completion choices to generate for each input message. Note that - # you will be charged based on the number of generated tokens across all of the - # choices. Keep `n` as `1` to minimize costs. + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. sig { returns(T.nilable(Integer)) } attr_accessor :n # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. sig { returns(T.nilable(T::Boolean)) } attr_reader :parallel_tool_calls @@ -178,7 +178,7 @@ module OpenAI attr_writer :parallel_tool_calls # Static predicted output content, such as the content of a text file that is - # being regenerated. + # being regenerated. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent)) } attr_reader :prediction @@ -191,30 +191,30 @@ module OpenAI attr_writer :prediction # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. sig { returns(T.nilable(Float)) } attr_accessor :presence_penalty # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # An object specifying the format that the model must output. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. sig do returns( T.nilable( @@ -242,38 +242,38 @@ module OpenAI attr_writer :response_format # This feature is in Beta. If specified, our system will make a best effort to - # sample deterministically, such that repeated requests with the same `seed` and - # parameters should return the same result. Determinism is not guaranteed, and you - # should refer to the `system_fingerprint` response parameter to monitor changes - # in the backend. + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. sig { returns(T.nilable(Integer)) } attr_accessor :seed # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # relevant for customers subscribed to the scale tier service: # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When this parameter is set, the response body will include the `service_tier` + # utilized. sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } attr_accessor :service_tier # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } attr_accessor :stop # Whether or not to store the output of this chat completion request for use in - # our [model distillation](https://platform.openai.com/docs/guides/distillation) - # or [evals](https://platform.openai.com/docs/guides/evals) products. + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. sig { returns(T.nilable(T::Boolean)) } attr_accessor :store @@ -290,21 +290,21 @@ module OpenAI attr_writer :stream_options # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. sig { returns(T.nilable(Float)) } attr_accessor :temperature # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. sig do returns( T.nilable( @@ -330,8 +330,8 @@ module OpenAI attr_writer :tool_choice # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTool])) } attr_reader :tools @@ -339,22 +339,22 @@ module OpenAI attr_writer :tools # An integer between 0 and 20 specifying the number of most likely tokens to - # return at each token position, each with an associated log probability. - # `logprobs` must be set to `true` if this parameter is used. + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. sig { returns(T.nilable(Integer)) } attr_accessor :top_logprobs # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -362,8 +362,8 @@ module OpenAI attr_writer :user # This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions)) } attr_reader :web_search_options @@ -527,10 +527,10 @@ module OpenAI def to_hash; end # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. module Model extend OpenAI::Internal::Type::Union @@ -540,24 +540,24 @@ module OpenAI # Deprecated in favor of `tool_choice`. # - # Controls which (if any) function is called by the model. + # Controls which (if any) function is called by the model. # - # `none` means the model will not call a function and instead generates a message. + # `none` means the model will not call a function and instead generates a message. # - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. module FunctionCall extend OpenAI::Internal::Type::Union # `none` means the model will not call a function and instead generates a message. - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. module FunctionCallMode extend OpenAI::Internal::Type::Enum @@ -597,12 +597,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain - # underscores and dashes, with a maximum length of 64. + # underscores and dashes, with a maximum length of 64. sig { returns(String) } attr_accessor :name # A description of what the function does, used by the model to choose when and - # how to call the function. + # how to call the function. sig { returns(T.nilable(String)) } attr_reader :description @@ -610,12 +610,12 @@ module OpenAI attr_writer :description # The parameters the functions accepts, described as a JSON Schema object. See the - # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, - # and the - # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - # documentation about the format. + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. # - # Omitting `parameters` defines a function with an empty parameter list. + # Omitting `parameters` defines a function with an empty parameter list. sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } attr_reader :parameters @@ -650,14 +650,14 @@ module OpenAI # An object specifying the format that the model must output. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. module ResponseFormat extend OpenAI::Internal::Type::Union @@ -671,19 +671,19 @@ module OpenAI end # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # relevant for customers subscribed to the scale tier service: # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When this parameter is set, the response body will include the `service_tier` + # utilized. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -699,7 +699,7 @@ module OpenAI end # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. module Stop extend OpenAI::Internal::Type::Union @@ -711,7 +711,7 @@ module OpenAI class WebSearchOptions < OpenAI::Internal::Type::BaseModel # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. sig do returns( T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) @@ -745,8 +745,8 @@ module OpenAI attr_writer :user_location # This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig do params( search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, @@ -773,7 +773,7 @@ module OpenAI def to_hash; end # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -866,7 +866,7 @@ module OpenAI attr_writer :city # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of - # the user, e.g. `US`. + # the user, e.g. `US`. sig { returns(T.nilable(String)) } attr_reader :country @@ -881,7 +881,7 @@ module OpenAI attr_writer :region # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the - # user, e.g. `America/Los_Angeles`. + # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } attr_reader :timezone diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index c90d051b..eec03610 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -23,7 +23,7 @@ module OpenAI # A list of metadata keys to filter the Chat Completions by. Example: # - # `metadata[key1]=value1&metadata[key2]=value2` + # `metadata[key1]=value1&metadata[key2]=value2` sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -35,7 +35,7 @@ module OpenAI attr_writer :model # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or - # `desc` for descending order. Defaults to `asc`. + # `desc` for descending order. Defaults to `asc`. sig { returns(T.nilable(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol)) } attr_reader :order @@ -71,7 +71,7 @@ module OpenAI def to_hash; end # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or - # `desc` for descending order. Defaults to `asc`. + # `desc` for descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index 49a8c260..b85b54da 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -8,11 +8,11 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 85aa9aba..8a39c287 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -23,7 +23,7 @@ module OpenAI attr_writer :limit # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` - # for descending order. Defaults to `asc`. + # for descending order. Defaults to `asc`. sig { returns(T.nilable(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol)) } attr_reader :order @@ -55,7 +55,7 @@ module OpenAI def to_hash; end # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` - # for descending order. Defaults to `asc`. + # for descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 2129e61f..15d111b8 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -9,22 +9,22 @@ module OpenAI # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # - # - `eq`: equals - # - `ne`: not equal - # - `gt`: greater than - # - `gte`: greater than or equal - # - `lt`: less than - # - `lte`: less than or equal + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal sig { returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) } attr_accessor :type # The value to compare against the attribute key; supports string, number, or - # boolean types. + # boolean types. sig { returns(T.any(String, Float, T::Boolean)) } attr_accessor :value # A filter used to compare a specified attribute key to a given value using a - # defined comparison operation. + # defined comparison operation. sig do params( key: String, @@ -45,12 +45,12 @@ module OpenAI # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # - # - `eq`: equals - # - `ne`: not equal - # - `gt`: greater than - # - `gte`: greater than or equal - # - `lt`: less than - # - `lte`: less than or equal + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal module Type extend OpenAI::Internal::Type::Enum @@ -69,7 +69,7 @@ module OpenAI end # The value to compare against the attribute key; supports string, number, or - # boolean types. + # boolean types. module Value extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index d24d8268..aa47ab99 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -25,8 +25,8 @@ module OpenAI # This fingerprint represents the backend configuration that the model runs with. # - # Can be used in conjunction with the `seed` request parameter to understand when - # backend changes have been made that might impact determinism. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. sig { returns(T.nilable(String)) } attr_reader :system_fingerprint @@ -41,7 +41,7 @@ module OpenAI attr_writer :usage # Represents a completion response from the API. Note: both the streamed and - # non-streamed response objects share the same shape (unlike the chat endpoint). + # non-streamed response objects share the same shape (unlike the chat endpoint). sig do params( id: String, diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 9833bea3..375563ef 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -4,9 +4,9 @@ module OpenAI module Models class CompletionChoice < OpenAI::Internal::Type::BaseModel # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, or `content_filter` if - # content was omitted due to a flag from our content filters. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. sig { returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } attr_accessor :finish_reason @@ -50,9 +50,9 @@ module OpenAI def to_hash; end # The reason the model stopped generating tokens. This will be `stop` if the model - # hit a natural stop point or a provided stop sequence, `length` if the maximum - # number of tokens specified in the request was reached, or `content_filter` if - # content was omitted due to a flag from our content filters. + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. module FinishReason extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index 01701e36..cdda2364 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -7,33 +7,33 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) } attr_accessor :model # The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. + # strings, array of tokens, or array of token arrays. # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. sig do returns(T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]))) end attr_accessor :prompt # Generates `best_of` completions server-side and returns the "best" (the one with - # the highest log probability per token). Results cannot be streamed. + # the highest log probability per token). Results cannot be streamed. # - # When used with `n`, `best_of` controls the number of candidate completions and - # `n` specifies how many to return – `best_of` must be greater than `n`. + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. sig { returns(T.nilable(Integer)) } attr_accessor :best_of @@ -42,74 +42,74 @@ module OpenAI attr_accessor :echo # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) sig { returns(T.nilable(Float)) } attr_accessor :frequency_penalty # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the GPT - # tokenizer) to an associated bias value from -100 to 100. You can use this - # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. - # Mathematically, the bias is added to the logits generated by the model prior to - # sampling. The exact effect will vary per model, but values between -1 and 1 - # should decrease or increase likelihood of selection; values like -100 or 100 - # should result in a ban or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. # - # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token - # from being generated. + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. sig { returns(T.nilable(T::Hash[Symbol, Integer])) } attr_accessor :logit_bias # Include the log probabilities on the `logprobs` most likely output tokens, as - # well the chosen tokens. For example, if `logprobs` is 5, the API will return a - # list of the 5 most likely tokens. The API will always return the `logprob` of - # the sampled token, so there may be up to `logprobs+1` elements in the response. + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. # - # The maximum value for `logprobs` is 5. + # The maximum value for `logprobs` is 5. sig { returns(T.nilable(Integer)) } attr_accessor :logprobs # The maximum number of [tokens](/tokenizer) that can be generated in the - # completion. + # completion. # - # The token count of your prompt plus `max_tokens` cannot exceed the model's - # context length. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. sig { returns(T.nilable(Integer)) } attr_accessor :max_tokens # How many completions to generate for each prompt. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. sig { returns(T.nilable(Integer)) } attr_accessor :n # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) sig { returns(T.nilable(Float)) } attr_accessor :presence_penalty # If specified, our system will make a best effort to sample deterministically, - # such that repeated requests with the same `seed` and parameters should return - # the same result. + # such that repeated requests with the same `seed` and parameters should return + # the same result. # - # Determinism is not guaranteed, and you should refer to the `system_fingerprint` - # response parameter to monitor changes in the backend. + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. sig { returns(T.nilable(Integer)) } attr_accessor :seed # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } attr_accessor :stop @@ -127,29 +127,29 @@ module OpenAI # The suffix that comes after a completion of inserted text. # - # This parameter is only supported for `gpt-3.5-turbo-instruct`. + # This parameter is only supported for `gpt-3.5-turbo-instruct`. sig { returns(T.nilable(String)) } attr_accessor :suffix # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # - # We generally recommend altering this or `top_p` but not both. + # We generally recommend altering this or `top_p` but not both. sig { returns(T.nilable(Float)) } attr_accessor :temperature # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -234,10 +234,10 @@ module OpenAI def to_hash; end # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -255,11 +255,11 @@ module OpenAI end # The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. + # strings, array of tokens, or array of token arrays. # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. module Prompt extend OpenAI::Internal::Type::Union @@ -278,7 +278,7 @@ module OpenAI end # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. module Stop extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index e0fbddc4..2872756d 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -73,7 +73,7 @@ module OpenAI class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # When using Predicted Outputs, the number of tokens in the prediction that - # appeared in the completion. + # appeared in the completion. sig { returns(T.nilable(Integer)) } attr_reader :accepted_prediction_tokens @@ -95,9 +95,9 @@ module OpenAI attr_writer :reasoning_tokens # When using Predicted Outputs, the number of tokens in the prediction that did - # not appear in the completion. However, like reasoning tokens, these tokens are - # still counted in the total completion tokens for purposes of billing, output, - # and context window limits. + # not appear in the completion. However, like reasoning tokens, these tokens are + # still counted in the total completion tokens for purposes of billing, output, + # and context window limits. sig { returns(T.nilable(Integer)) } attr_reader :rejected_prediction_tokens diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 214f5113..5eee5410 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -4,7 +4,7 @@ module OpenAI module Models class CompoundFilter < OpenAI::Internal::Type::BaseModel # Array of filters to combine. Items can be `ComparisonFilter` or - # `CompoundFilter`. + # `CompoundFilter`. sig { returns(T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) } attr_accessor :filters @@ -34,7 +34,7 @@ module OpenAI def to_hash; end # A filter used to compare a specified attribute key to a given value using a - # defined comparison operation. + # defined comparison operation. module Filter extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/lib/openai/models/embedding.rbi index 0567898b..ac1fc9ab 100644 --- a/rbi/lib/openai/models/embedding.rbi +++ b/rbi/lib/openai/models/embedding.rbi @@ -4,8 +4,8 @@ module OpenAI module Models class Embedding < OpenAI::Internal::Type::BaseModel # The embedding vector, which is a list of floats. The length of vector depends on - # the model as listed in the - # [embedding guide](https://platform.openai.com/docs/guides/embeddings). + # the model as listed in the + # [embedding guide](https://platform.openai.com/docs/guides/embeddings). sig { returns(T::Array[Float]) } attr_accessor :embedding diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 6e763395..0869c887 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -7,26 +7,26 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # Input text to embed, encoded as a string or array of tokens. To embed multiple - # inputs in a single request, pass an array of strings or array of token arrays. - # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 - # dimensions or less. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. sig { returns(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])) } attr_accessor :input # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. sig { returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) } attr_accessor :model # The number of dimensions the resulting output embeddings should have. Only - # supported in `text-embedding-3` and later models. + # supported in `text-embedding-3` and later models. sig { returns(T.nilable(Integer)) } attr_reader :dimensions @@ -34,7 +34,7 @@ module OpenAI attr_writer :dimensions # The format to return the embeddings in. Can be either `float` or - # [`base64`](https://pypi.org/project/pybase64/). + # [`base64`](https://pypi.org/project/pybase64/). sig { returns(T.nilable(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol)) } attr_reader :encoding_format @@ -42,8 +42,8 @@ module OpenAI attr_writer :encoding_format # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -79,13 +79,13 @@ module OpenAI def to_hash; end # Input text to embed, encoded as a string or array of tokens. To embed multiple - # inputs in a single request, pass an array of strings or array of token arrays. - # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 - # dimensions or less. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. module Input extend OpenAI::Internal::Type::Union @@ -104,10 +104,10 @@ module OpenAI end # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. module Model extend OpenAI::Internal::Type::Union @@ -116,7 +116,7 @@ module OpenAI end # The format to return the embeddings in. Can be either `float` or - # [`base64`](https://pypi.org/project/pybase64/). + # [`base64`](https://pypi.org/project/pybase64/). module EncodingFormat extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/lib/openai/models/file_chunking_strategy_param.rbi index 346a7b5a..ca7b56c4 100644 --- a/rbi/lib/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/file_chunking_strategy_param.rbi @@ -3,7 +3,7 @@ module OpenAI module Models # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. module FileChunkingStrategyParam extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index fb50d427..c3d5af72 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -11,9 +11,9 @@ module OpenAI attr_accessor :file # The intended purpose of the uploaded file. One of: - `assistants`: Used in the - # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for - # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: - # Flexible file type for any purpose - `evals`: Used for eval data sets + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } attr_accessor :purpose diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index e89cec8f..daad5aea 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -7,9 +7,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -17,7 +17,7 @@ module OpenAI attr_writer :after # A limit on the number of objects to be returned. Limit can range between 1 and - # 10,000, and the default is 10,000. + # 10,000, and the default is 10,000. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -25,7 +25,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::FileListParams::Order::OrSymbol)) } attr_reader :order @@ -66,7 +66,7 @@ module OpenAI def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 34dd1542..15e6c24d 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -24,13 +24,13 @@ module OpenAI attr_accessor :object # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. sig { returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) } attr_accessor :purpose # Deprecated. The current status of the file, which can be either `uploaded`, - # `processed`, or `error`. + # `processed`, or `error`. sig { returns(OpenAI::Models::FileObject::Status::TaggedSymbol) } attr_accessor :status @@ -42,7 +42,7 @@ module OpenAI attr_writer :expires_at # Deprecated. For details on why a fine-tuning training file failed validation, - # see the `error` field on `fine_tuning.job`. + # see the `error` field on `fine_tuning.job`. sig { returns(T.nilable(String)) } attr_reader :status_details @@ -94,8 +94,8 @@ module OpenAI def to_hash; end # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. module Purpose extend OpenAI::Internal::Type::Enum @@ -115,7 +115,7 @@ module OpenAI end # Deprecated. The current status of the file, which can be either `uploaded`, - # `processed`, or `error`. + # `processed`, or `error`. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index a1e5347b..274edce7 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -3,9 +3,9 @@ module OpenAI module Models # The intended purpose of the uploaded file. One of: - `assistants`: Used in the - # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for - # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: - # Flexible file type for any purpose - `evals`: Used for eval data sets + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets module FilePurpose extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 666480f0..b12fca64 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -13,7 +13,7 @@ module OpenAI attr_accessor :created_at # For fine-tuning jobs that have `failed`, this will contain more information on - # the cause of the failure. + # the cause of the failure. sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error)) } attr_reader :error @@ -26,17 +26,17 @@ module OpenAI attr_writer :error # The name of the fine-tuned model that is being created. The value will be null - # if the fine-tuning job is still running. + # if the fine-tuning job is still running. sig { returns(T.nilable(String)) } attr_accessor :fine_tuned_model # The Unix timestamp (in seconds) for when the fine-tuning job was finished. The - # value will be null if the fine-tuning job is still running. + # value will be null if the fine-tuning job is still running. sig { returns(T.nilable(Integer)) } attr_accessor :finished_at # The hyperparameters used for the fine-tuning job. This value will only be - # returned when running `supervised` jobs. + # returned when running `supervised` jobs. sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters) } attr_reader :hyperparameters @@ -61,8 +61,8 @@ module OpenAI attr_accessor :organization_id # The compiled results file ID(s) for the fine-tuning job. You can retrieve the - # results with the - # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + # results with the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(T::Array[String]) } attr_accessor :result_files @@ -71,28 +71,28 @@ module OpenAI attr_accessor :seed # The current status of the fine-tuning job, which can be either - # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } attr_accessor :status # The total number of billable tokens processed by this fine-tuning job. The value - # will be null if the fine-tuning job is still running. + # will be null if the fine-tuning job is still running. sig { returns(T.nilable(Integer)) } attr_accessor :trained_tokens # The file ID used for training. You can retrieve the training data with the - # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(String) } attr_accessor :training_file # The file ID used for validation. You can retrieve the validation results with - # the - # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). + # the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). sig { returns(T.nilable(String)) } attr_accessor :validation_file # The Unix timestamp (in seconds) for when the fine-tuning job is estimated to - # finish. The value will be null if the fine-tuning job is not running. + # finish. The value will be null if the fine-tuning job is not running. sig { returns(T.nilable(Integer)) } attr_accessor :estimated_finish @@ -101,11 +101,11 @@ module OpenAI attr_accessor :integrations # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -117,7 +117,7 @@ module OpenAI attr_writer :method_ # The `fine_tuning.job` object represents a fine-tuning job that has been created - # through the API. + # through the API. sig do params( id: String, @@ -203,12 +203,12 @@ module OpenAI attr_accessor :message # The parameter that was invalid, usually `training_file` or `validation_file`. - # This field will be null if the failure was not parameter-specific. + # This field will be null if the failure was not parameter-specific. sig { returns(T.nilable(String)) } attr_accessor :param # For fine-tuning jobs that have `failed`, this will contain more information on - # the cause of the failure. + # the cause of the failure. sig { params(code: String, message: String, param: T.nilable(String)).returns(T.attached_class) } def self.new(code:, message:, param:); end @@ -218,7 +218,7 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size @@ -226,7 +226,7 @@ module OpenAI attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier @@ -234,7 +234,7 @@ module OpenAI attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs @@ -242,7 +242,7 @@ module OpenAI attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value will only be - # returned when running `supervised` jobs. + # returned when running `supervised` jobs. sig do params( batch_size: T.any(Symbol, Integer), @@ -266,7 +266,7 @@ module OpenAI def to_hash; end # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union @@ -275,7 +275,7 @@ module OpenAI end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -284,7 +284,7 @@ module OpenAI end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. module NEpochs extend OpenAI::Internal::Type::Union @@ -294,7 +294,7 @@ module OpenAI end # The current status of the fine-tuning job, which can be either - # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. module Status extend OpenAI::Internal::Type::Enum @@ -394,7 +394,7 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size @@ -402,7 +402,7 @@ module OpenAI attr_writer :batch_size # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :beta @@ -410,7 +410,7 @@ module OpenAI attr_writer :beta # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier @@ -418,7 +418,7 @@ module OpenAI attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs @@ -451,7 +451,7 @@ module OpenAI def to_hash; end # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union @@ -460,7 +460,7 @@ module OpenAI end # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. module Beta extend OpenAI::Internal::Type::Union @@ -469,7 +469,7 @@ module OpenAI end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -478,7 +478,7 @@ module OpenAI end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. module NEpochs extend OpenAI::Internal::Type::Union @@ -524,7 +524,7 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size @@ -532,7 +532,7 @@ module OpenAI attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier @@ -540,7 +540,7 @@ module OpenAI attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs @@ -571,7 +571,7 @@ module OpenAI def to_hash; end # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union @@ -580,7 +580,7 @@ module OpenAI end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -589,7 +589,7 @@ module OpenAI end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. module NEpochs extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index 717d084a..ba0eda55 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -9,19 +9,19 @@ module OpenAI attr_accessor :project # The entity to use for the run. This allows you to set the team or username of - # the WandB user that you would like associated with the run. If not set, the - # default entity for the registered WandB API key is used. + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. sig { returns(T.nilable(String)) } attr_accessor :entity # A display name to set for the run. If not set, we will use the Job ID as the - # name. + # name. sig { returns(T.nilable(String)) } attr_accessor :name # A list of tags to be attached to the newly created run. These tags are passed - # through directly to WandB. Some default tags are generated by OpenAI: - # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". sig { returns(T.nilable(T::Array[String])) } attr_reader :tags @@ -29,9 +29,9 @@ module OpenAI attr_writer :tags # The settings for your integration with Weights and Biases. This payload - # specifies the project that metrics will be sent to. Optionally, you can set an - # explicit display name for your run, add tags to your run, and set a default - # entity (team, username, etc) to be associated with your run. + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig do params(project: String, entity: T.nilable(String), name: T.nilable(String), tags: T::Array[String]) .returns(T.attached_class) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index a40e4f11..5c771091 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -9,9 +9,9 @@ module OpenAI attr_accessor :type # The settings for your integration with Weights and Biases. This payload - # specifies the project that metrics will be sent to. Optionally, you can set an - # explicit display name for your run, add tags to your run, and set a default - # entity (team, username, etc) to be associated with your run. + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig { returns(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) } attr_reader :wandb diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 8a30d533..4acc7f38 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -8,32 +8,32 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). sig { returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) } attr_accessor :model # The ID of an uploaded file that contains training data. # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. # - # Your dataset must be formatted as a JSONL file. Additionally, you must upload - # your file with the purpose `fine-tune`. + # Your dataset must be formatted as a JSONL file. Additionally, you must upload + # your file with the purpose `fine-tune`. # - # The contents of the file should differ depending on if the model uses the - # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), - # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) - # format, or if the fine-tuning method uses the - # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) - # format. + # The contents of the file should differ depending on if the model uses the + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # format, or if the fine-tuning method uses the + # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. sig { returns(String) } attr_accessor :training_file # The hyperparameters used for the fine-tuning job. This value is now deprecated - # in favor of `method`, and should be passed in under the `method` parameter. + # in favor of `method`, and should be passed in under the `method` parameter. sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters)) } attr_reader :hyperparameters @@ -50,11 +50,11 @@ module OpenAI attr_accessor :integrations # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -68,31 +68,31 @@ module OpenAI attr_writer :method_ # The seed controls the reproducibility of the job. Passing in the same seed and - # job parameters should produce the same results, but may differ in rare cases. If - # a seed is not specified, one will be generated for you. + # job parameters should produce the same results, but may differ in rare cases. If + # a seed is not specified, one will be generated for you. sig { returns(T.nilable(Integer)) } attr_accessor :seed # A string of up to 64 characters that will be added to your fine-tuned model - # name. + # name. # - # For example, a `suffix` of "custom-model-name" would produce a model name like - # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. + # For example, a `suffix` of "custom-model-name" would produce a model name like + # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. sig { returns(T.nilable(String)) } attr_accessor :suffix # The ID of an uploaded file that contains validation data. # - # If you provide this file, the data is used to generate validation metrics - # periodically during fine-tuning. These metrics can be viewed in the fine-tuning - # results file. The same data should not be present in both train and validation - # files. + # If you provide this file, the data is used to generate validation metrics + # periodically during fine-tuning. These metrics can be viewed in the fine-tuning + # results file. The same data should not be present in both train and validation + # files. # - # Your dataset must be formatted as a JSONL file. You must upload your file with - # the purpose `fine-tune`. + # Your dataset must be formatted as a JSONL file. You must upload your file with + # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. sig { returns(T.nilable(String)) } attr_accessor :validation_file @@ -145,7 +145,7 @@ module OpenAI def to_hash; end # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). module Model extend OpenAI::Internal::Type::Union @@ -164,7 +164,7 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size @@ -172,7 +172,7 @@ module OpenAI attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier @@ -180,7 +180,7 @@ module OpenAI attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs @@ -188,7 +188,7 @@ module OpenAI attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value is now deprecated - # in favor of `method`, and should be passed in under the `method` parameter. + # in favor of `method`, and should be passed in under the `method` parameter. sig do params( batch_size: T.any(Symbol, Integer), @@ -212,7 +212,7 @@ module OpenAI def to_hash; end # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union @@ -221,7 +221,7 @@ module OpenAI end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -230,7 +230,7 @@ module OpenAI end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. module NEpochs extend OpenAI::Internal::Type::Union @@ -241,14 +241,14 @@ module OpenAI class Integration < OpenAI::Internal::Type::BaseModel # The type of integration to enable. Currently, only "wandb" (Weights and Biases) - # is supported. + # is supported. sig { returns(Symbol) } attr_accessor :type # The settings for your integration with Weights and Biases. This payload - # specifies the project that metrics will be sent to. Optionally, you can set an - # explicit display name for your run, add tags to your run, and set a default - # entity (team, username, etc) to be associated with your run. + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig { returns(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb) } attr_reader :wandb @@ -278,19 +278,19 @@ module OpenAI attr_accessor :project # The entity to use for the run. This allows you to set the team or username of - # the WandB user that you would like associated with the run. If not set, the - # default entity for the registered WandB API key is used. + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. sig { returns(T.nilable(String)) } attr_accessor :entity # A display name to set for the run. If not set, we will use the Job ID as the - # name. + # name. sig { returns(T.nilable(String)) } attr_accessor :name # A list of tags to be attached to the newly created run. These tags are passed - # through directly to WandB. Some default tags are generated by OpenAI: - # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". sig { returns(T.nilable(T::Array[String])) } attr_reader :tags @@ -298,9 +298,9 @@ module OpenAI attr_writer :tags # The settings for your integration with Weights and Biases. This payload - # specifies the project that metrics will be sent to. Optionally, you can set an - # explicit display name for your run, add tags to your run, and set a default - # entity (team, username, etc) to be associated with your run. + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. sig do params( project: String, @@ -414,7 +414,7 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size @@ -422,7 +422,7 @@ module OpenAI attr_writer :batch_size # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :beta @@ -430,7 +430,7 @@ module OpenAI attr_writer :beta # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier @@ -438,7 +438,7 @@ module OpenAI attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs @@ -471,7 +471,7 @@ module OpenAI def to_hash; end # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union @@ -480,7 +480,7 @@ module OpenAI end # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. + # of the penalty between the policy and reference model. module Beta extend OpenAI::Internal::Type::Union @@ -489,7 +489,7 @@ module OpenAI end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -498,7 +498,7 @@ module OpenAI end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. module NEpochs extend OpenAI::Internal::Type::Union @@ -546,7 +546,7 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size @@ -554,7 +554,7 @@ module OpenAI attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier @@ -562,7 +562,7 @@ module OpenAI attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs @@ -593,7 +593,7 @@ module OpenAI def to_hash; end # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. + # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union @@ -602,7 +602,7 @@ module OpenAI end # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. + # avoid overfitting. module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -611,7 +611,7 @@ module OpenAI end # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. + # through the training dataset. module NEpochs extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 7b8bea48..49bc0f41 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -22,7 +22,7 @@ module OpenAI attr_writer :limit # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. - # Alternatively, set `metadata=null` to indicate no metadata. + # Alternatively, set `metadata=null` to indicate no metadata. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index 06fb98b6..7c253eca 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -42,7 +42,7 @@ module OpenAI attr_accessor :step_number # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a - # fine-tuning job that is ready to use. + # fine-tuning job that is ready to use. sig do params( id: String, diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index cb9eeb56..0351f239 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -4,12 +4,12 @@ module OpenAI module Models class FunctionDefinition < OpenAI::Internal::Type::BaseModel # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain - # underscores and dashes, with a maximum length of 64. + # underscores and dashes, with a maximum length of 64. sig { returns(String) } attr_accessor :name # A description of what the function does, used by the model to choose when and - # how to call the function. + # how to call the function. sig { returns(T.nilable(String)) } attr_reader :description @@ -17,12 +17,12 @@ module OpenAI attr_writer :description # The parameters the functions accepts, described as a JSON Schema object. See the - # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, - # and the - # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - # documentation about the format. + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. # - # Omitting `parameters` defines a function with an empty parameter list. + # Omitting `parameters` defines a function with an empty parameter list. sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } attr_reader :parameters @@ -30,10 +30,10 @@ module OpenAI attr_writer :parameters # Whether to enable strict schema adherence when generating the function call. If - # set to true, the model will follow the exact schema defined in the `parameters` - # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn - # more about Structured Outputs in the - # [function calling guide](docs/guides/function-calling). + # set to true, the model will follow the exact schema defined in the `parameters` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn + # more about Structured Outputs in the + # [function calling guide](docs/guides/function-calling). sig { returns(T.nilable(T::Boolean)) } attr_accessor :strict diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index 9469de4c..3a2f3c67 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -4,7 +4,7 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel # The base64-encoded JSON of the generated image, if `response_format` is - # `b64_json`. + # `b64_json`. sig { returns(T.nilable(String)) } attr_reader :b64_json @@ -12,7 +12,7 @@ module OpenAI attr_writer :b64_json # The prompt that was used to generate the image, if there was any revision to the - # prompt. + # prompt. sig { returns(T.nilable(String)) } attr_reader :revised_prompt diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 9a1061c3..59f2bd84 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -7,34 +7,34 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The image to use as the basis for the variation(s). Must be a valid PNG file, - # less than 4MB, and square. + # less than 4MB, and square. sig { returns(T.any(IO, StringIO)) } attr_accessor :image # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } attr_accessor :model # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # `n=1` is supported. sig { returns(T.nilable(Integer)) } attr_accessor :n # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) } attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) } attr_accessor :size # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -81,7 +81,7 @@ module OpenAI def to_hash; end # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. module Model extend OpenAI::Internal::Type::Union @@ -90,8 +90,8 @@ module OpenAI end # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -107,7 +107,7 @@ module OpenAI end # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. module Size extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 58bf5ddd..8d4b36be 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -7,18 +7,18 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. + # is not provided, image must have transparency, which will be used as the mask. sig { returns(T.any(IO, StringIO)) } attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 - # characters. + # characters. sig { returns(String) } attr_accessor :prompt # An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than - # 4MB, and have the same dimensions as `image`. + # indicate where `image` should be edited. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. sig { returns(T.nilable(T.any(IO, StringIO))) } attr_reader :mask @@ -26,7 +26,7 @@ module OpenAI attr_writer :mask # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } attr_accessor :model @@ -35,19 +35,19 @@ module OpenAI attr_accessor :n # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } attr_accessor :size # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -98,7 +98,7 @@ module OpenAI def to_hash; end # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. module Model extend OpenAI::Internal::Type::Union @@ -107,8 +107,8 @@ module OpenAI end # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -124,7 +124,7 @@ module OpenAI end # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. module Size extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index b03fd35e..c10c716c 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -7,7 +7,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. + # characters for `dall-e-2` and 4000 characters for `dall-e-3`. sig { returns(String) } attr_accessor :prompt @@ -16,13 +16,13 @@ module OpenAI attr_accessor :model # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # `n=1` is supported. sig { returns(T.nilable(Integer)) } attr_accessor :n # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } attr_reader :quality @@ -30,27 +30,27 @@ module OpenAI attr_writer :quality # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } attr_accessor :size # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } attr_accessor :style # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -109,8 +109,8 @@ module OpenAI end # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. module Quality extend OpenAI::Internal::Type::Enum @@ -126,8 +126,8 @@ module OpenAI end # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -143,8 +143,8 @@ module OpenAI end # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. module Size extend OpenAI::Internal::Type::Enum @@ -163,9 +163,9 @@ module OpenAI end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. module Style extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 0850f88c..03bc7cb0 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -59,59 +59,59 @@ module OpenAI class Categories < OpenAI::Internal::Type::BaseModel # Content that expresses, incites, or promotes harassing language towards any - # target. + # target. sig { returns(T::Boolean) } attr_accessor :harassment # Harassment content that also includes violence or serious harm towards any - # target. + # target. sig { returns(T::Boolean) } attr_accessor :harassment_threatening # Content that expresses, incites, or promotes hate based on race, gender, - # ethnicity, religion, nationality, sexual orientation, disability status, or - # caste. Hateful content aimed at non-protected groups (e.g., chess players) is - # harassment. + # ethnicity, religion, nationality, sexual orientation, disability status, or + # caste. Hateful content aimed at non-protected groups (e.g., chess players) is + # harassment. sig { returns(T::Boolean) } attr_accessor :hate # Hateful content that also includes violence or serious harm towards the targeted - # group based on race, gender, ethnicity, religion, nationality, sexual - # orientation, disability status, or caste. + # group based on race, gender, ethnicity, religion, nationality, sexual + # orientation, disability status, or caste. sig { returns(T::Boolean) } attr_accessor :hate_threatening # Content that includes instructions or advice that facilitate the planning or - # execution of wrongdoing, or that gives advice or instruction on how to commit - # illicit acts. For example, "how to shoplift" would fit this category. + # execution of wrongdoing, or that gives advice or instruction on how to commit + # illicit acts. For example, "how to shoplift" would fit this category. sig { returns(T.nilable(T::Boolean)) } attr_accessor :illicit # Content that includes instructions or advice that facilitate the planning or - # execution of wrongdoing that also includes violence, or that gives advice or - # instruction on the procurement of any weapon. + # execution of wrongdoing that also includes violence, or that gives advice or + # instruction on the procurement of any weapon. sig { returns(T.nilable(T::Boolean)) } attr_accessor :illicit_violent # Content that promotes, encourages, or depicts acts of self-harm, such as - # suicide, cutting, and eating disorders. + # suicide, cutting, and eating disorders. sig { returns(T::Boolean) } attr_accessor :self_harm # Content that encourages performing acts of self-harm, such as suicide, cutting, - # and eating disorders, or that gives instructions or advice on how to commit such - # acts. + # and eating disorders, or that gives instructions or advice on how to commit such + # acts. sig { returns(T::Boolean) } attr_accessor :self_harm_instructions # Content where the speaker expresses that they are engaging or intend to engage - # in acts of self-harm, such as suicide, cutting, and eating disorders. + # in acts of self-harm, such as suicide, cutting, and eating disorders. sig { returns(T::Boolean) } attr_accessor :self_harm_intent # Content meant to arouse sexual excitement, such as the description of sexual - # activity, or that promotes sexual services (excluding sex education and - # wellness). + # activity, or that promotes sexual services (excluding sex education and + # wellness). sig { returns(T::Boolean) } attr_accessor :sexual diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 2c12c369..1e4e920e 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -7,7 +7,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # Input (or inputs) to classify. Can be a single string, an array of strings, or - # an array of multi-modal input objects similar to other models. + # an array of multi-modal input objects similar to other models. sig do returns( T.any( @@ -20,9 +20,9 @@ module OpenAI attr_accessor :input # The content moderation model you would like to use. Learn more in - # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and - # learn about available models - # [here](https://platform.openai.com/docs/models#moderation). + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). sig { returns(T.nilable(T.any(String, OpenAI::Models::ModerationModel::OrSymbol))) } attr_reader :model @@ -66,7 +66,7 @@ module OpenAI def to_hash; end # Input (or inputs) to classify. Can be a single string, an array of strings, or - # an array of multi-modal input objects similar to other models. + # an array of multi-modal input objects similar to other models. module Input extend OpenAI::Internal::Type::Union @@ -88,9 +88,9 @@ module OpenAI end # The content moderation model you would like to use. Learn more in - # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and - # learn about available models - # [here](https://platform.openai.com/docs/models#moderation). + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). module Model extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi index 10b6f90a..0d6d06b0 100644 --- a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi @@ -8,8 +8,8 @@ module OpenAI attr_accessor :type # This is returned when the chunking strategy is unknown. Typically, this is - # because the file was indexed before the `chunking_strategy` concept was - # introduced in the API. + # because the file was indexed before the `chunking_strategy` concept was + # introduced in the API. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :other); end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index f16ebf77..ec1c7ac3 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -5,25 +5,25 @@ module OpenAI class Reasoning < OpenAI::Internal::Type::BaseModel # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } attr_accessor :effort # **computer_use_preview only** # - # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `concise` or + # `detailed`. sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } attr_accessor :generate_summary # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig do params( effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), @@ -46,9 +46,9 @@ module OpenAI # **computer_use_preview only** # - # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `concise` or + # `detailed`. module GenerateSummary extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 723cc106..1459f48b 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -4,10 +4,10 @@ module OpenAI module Models # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. module ReasoningEffort extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/lib/openai/models/response_format_json_object.rbi index 707ef81c..caf63a9c 100644 --- a/rbi/lib/openai/models/response_format_json_object.rbi +++ b/rbi/lib/openai/models/response_format_json_object.rbi @@ -8,8 +8,8 @@ module OpenAI attr_accessor :type # JSON object response format. An older method of generating JSON responses. Using - # `json_schema` is recommended for models that support it. Note that the model - # will not generate JSON without a system or user message instructing it to do so. + # `json_schema` is recommended for models that support it. Note that the model + # will not generate JSON without a system or user message instructing it to do so. sig { params(type: Symbol).returns(T.attached_class) } def self.new(type: :json_object); end diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index 735f2c54..26bae2de 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -20,8 +20,8 @@ module OpenAI attr_accessor :type # JSON Schema response format. Used to generate structured JSON responses. Learn - # more about - # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::AnyHash), @@ -36,12 +36,12 @@ module OpenAI class JSONSchema < OpenAI::Internal::Type::BaseModel # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores - # and dashes, with a maximum length of 64. + # and dashes, with a maximum length of 64. sig { returns(String) } attr_accessor :name # A description of what the response format is for, used by the model to determine - # how to respond in the format. + # how to respond in the format. sig { returns(T.nilable(String)) } attr_reader :description @@ -49,7 +49,7 @@ module OpenAI attr_writer :description # The schema for the response format, described as a JSON Schema object. Learn how - # to build JSON schemas [here](https://json-schema.org/). + # to build JSON schemas [here](https://json-schema.org/). sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } attr_reader :schema @@ -57,10 +57,10 @@ module OpenAI attr_writer :schema # Whether to enable strict schema adherence when generating the output. If set to - # true, the model will always follow the exact schema defined in the `schema` - # field. Only a subset of JSON Schema is supported when `strict` is `true`. To - # learn more, read the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). sig { returns(T.nilable(T::Boolean)) } attr_accessor :strict diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 88894312..c20479e3 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -21,7 +21,7 @@ module OpenAI attr_accessor :type # A tool that controls a virtual computer. Learn more about the - # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). + # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). sig do params( display_height: Float, diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index e1819f46..6f2f6316 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses class EasyInputMessage < OpenAI::Internal::Type::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also - # contain previous assistant responses. + # contain previous assistant responses. sig do returns( T.any( @@ -23,7 +23,7 @@ module OpenAI attr_accessor :content # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. + # `developer`. sig { returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) } attr_accessor :role @@ -35,10 +35,10 @@ module OpenAI attr_writer :type # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. sig do params( content: T.any( @@ -81,7 +81,7 @@ module OpenAI def to_hash; end # Text, image, or audio input to the model, used to generate a response. Can also - # contain previous assistant responses. + # contain previous assistant responses. module Content extend OpenAI::Internal::Type::Union @@ -104,7 +104,7 @@ module OpenAI end # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. + # `developer`. module Role extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 37314e5d..b16985a4 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -25,7 +25,7 @@ module OpenAI attr_writer :filters # The maximum number of results to return. This number should be between 1 and 50 - # inclusive. + # inclusive. sig { returns(T.nilable(Integer)) } attr_reader :max_num_results @@ -45,8 +45,8 @@ module OpenAI attr_writer :ranking_options # A tool that searches for relevant content from uploaded files. Learn more about - # the - # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). sig do params( vector_store_ids: T::Array[String], @@ -97,8 +97,8 @@ module OpenAI attr_writer :ranker # The score threshold for the file search, a number between 0 and 1. Numbers - # closer to 1 will attempt to return only the most relevant results, but may - # return fewer results. + # closer to 1 will attempt to return only the most relevant results, but may + # return fewer results. sig { returns(T.nilable(Float)) } attr_reader :score_threshold diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/lib/openai/models/responses/function_tool.rbi index 16ed2997..5d1008c2 100644 --- a/rbi/lib/openai/models/responses/function_tool.rbi +++ b/rbi/lib/openai/models/responses/function_tool.rbi @@ -21,13 +21,13 @@ module OpenAI attr_accessor :type # A description of the function. Used by the model to determine whether or not to - # call the function. + # call the function. sig { returns(T.nilable(String)) } attr_accessor :description # Defines a function in your own code the model can choose to call. Learn more - # about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # about + # [function calling](https://platform.openai.com/docs/guides/function-calling). sig do params( name: String, diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 83fef973..302e530f 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -22,7 +22,7 @@ module OpenAI attr_writer :before # Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. + # Response creation above for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } attr_reader :include @@ -30,7 +30,7 @@ module OpenAI attr_writer :include # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -39,8 +39,8 @@ module OpenAI # The order to return the input items in. Default is `asc`. # - # - `asc`: Return the input items in ascending order. - # - `desc`: Return the input items in descending order. + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. sig { returns(T.nilable(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol)) } attr_reader :order @@ -77,8 +77,8 @@ module OpenAI # The order to return the input items in. Default is `asc`. # - # - `asc`: Return the input items in ascending order. - # - `desc`: Return the input items in descending order. + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 99dd9aae..51372d8d 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -32,28 +32,28 @@ module OpenAI attr_writer :incomplete_details # Inserts a system (or developer) message as the first item in the model's - # context. + # context. # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. sig do returns( T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) @@ -67,11 +67,11 @@ module OpenAI # An array of content items generated by the model. # - # - The length and order of items in the `output` array is dependent on the - # model's response. - # - Rather than accessing the first item in the `output` array and assuming it's - # an `assistant` message with the content generated by the model, you might - # consider using the `output_text` property where supported in SDKs. + # - The length and order of items in the `output` array is dependent on the + # model's response. + # - Rather than accessing the first item in the `output` array and assuming it's + # an `assistant` message with the content generated by the model, you might + # consider using the `output_text` property where supported in SDKs. sig do returns( T::Array[ @@ -93,15 +93,15 @@ module OpenAI attr_accessor :parallel_tool_calls # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. sig { returns(T.nilable(Float)) } attr_accessor :temperature # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. sig do returns( T.any( @@ -114,19 +114,19 @@ module OpenAI attr_accessor :tool_choice # An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. + # specify which tool to use by setting the `tool_choice` parameter. # - # The two categories of tools you can provide the model are: + # The two categories of tools you can provide the model are: # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). sig do returns( T::Array[ @@ -142,29 +142,29 @@ module OpenAI attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p # An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } attr_accessor :max_output_tokens # The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). sig { returns(T.nilable(String)) } attr_accessor :previous_response_id # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(OpenAI::Models::Reasoning)) } attr_reader :reasoning @@ -172,7 +172,7 @@ module OpenAI attr_writer :reasoning # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, or `incomplete`. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } attr_reader :status @@ -180,10 +180,10 @@ module OpenAI attr_writer :status # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } attr_reader :text @@ -192,16 +192,16 @@ module OpenAI # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. sig { returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) } attr_accessor :truncation # Represents token usage details including input tokens, output tokens, a - # breakdown of output tokens, and the total tokens used. + # breakdown of output tokens, and the total tokens used. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseUsage)) } attr_reader :usage @@ -209,8 +209,8 @@ module OpenAI attr_writer :usage # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -380,8 +380,8 @@ module OpenAI end # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. module ToolChoice extend OpenAI::Internal::Type::Union @@ -396,11 +396,11 @@ module OpenAI # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. module Truncation extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index ccc7cf95..ba86d36e 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -35,7 +35,7 @@ module OpenAI attr_accessor :pending_safety_checks # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) } attr_accessor :status @@ -44,8 +44,8 @@ module OpenAI attr_accessor :type # A tool call to a computer use tool. See the - # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) - # for more information. + # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) + # for more information. sig do params( id: String, @@ -101,12 +101,12 @@ module OpenAI class Click < OpenAI::Internal::Type::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, - # `right`, `wheel`, `back`, or `forward`. + # `right`, `wheel`, `back`, or `forward`. sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } attr_accessor :button # Specifies the event type. For a click action, this property is always set to - # `click`. + # `click`. sig { returns(Symbol) } attr_accessor :type @@ -144,7 +144,7 @@ module OpenAI def to_hash; end # Indicates which mouse button was pressed during the click. One of `left`, - # `right`, `wheel`, `back`, or `forward`. + # `right`, `wheel`, `back`, or `forward`. module Button extend OpenAI::Internal::Type::Enum @@ -182,7 +182,7 @@ module OpenAI class DoubleClick < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a double click action, this property is always set - # to `double_click`. + # to `double_click`. sig { returns(Symbol) } attr_accessor :type @@ -204,19 +204,19 @@ module OpenAI class Drag < OpenAI::Internal::Type::BaseModel # An array of coordinates representing the path of the drag action. Coordinates - # will appear as an array of objects, eg + # will appear as an array of objects, eg # - # ``` - # [ - # { x: 100, y: 200 }, - # { x: 200, y: 300 } - # ] - # ``` + # ``` + # [ + # { x: 100, y: 200 }, + # { x: 200, y: 300 } + # ] + # ``` sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path]) } attr_accessor :path # Specifies the event type. For a drag action, this property is always set to - # `drag`. + # `drag`. sig { returns(Symbol) } attr_accessor :type @@ -258,12 +258,12 @@ module OpenAI class Keypress < OpenAI::Internal::Type::BaseModel # The combination of keys the model is requesting to be pressed. This is an array - # of strings, each representing a key. + # of strings, each representing a key. sig { returns(T::Array[String]) } attr_accessor :keys # Specifies the event type. For a keypress action, this property is always set to - # `keypress`. + # `keypress`. sig { returns(Symbol) } attr_accessor :type @@ -277,7 +277,7 @@ module OpenAI class Move < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a move action, this property is always set to - # `move`. + # `move`. sig { returns(Symbol) } attr_accessor :type @@ -299,7 +299,7 @@ module OpenAI class Screenshot < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a screenshot action, this property is always set - # to `screenshot`. + # to `screenshot`. sig { returns(Symbol) } attr_accessor :type @@ -321,7 +321,7 @@ module OpenAI attr_accessor :scroll_y # Specifies the event type. For a scroll action, this property is always set to - # `scroll`. + # `scroll`. sig { returns(Symbol) } attr_accessor :type @@ -352,7 +352,7 @@ module OpenAI attr_accessor :text # Specifies the event type. For a type action, this property is always set to - # `type`. + # `type`. sig { returns(Symbol) } attr_accessor :type @@ -366,7 +366,7 @@ module OpenAI class Wait < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a wait action, this property is always set to - # `wait`. + # `wait`. sig { returns(Symbol) } attr_accessor :type @@ -409,7 +409,7 @@ module OpenAI end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index cf267c21..64b2dc16 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -29,7 +29,7 @@ module OpenAI attr_accessor :type # The safety checks reported by the API that have been acknowledged by the - # developer. + # developer. sig do returns( T.nilable( @@ -53,7 +53,7 @@ module OpenAI attr_writer :acknowledged_safety_checks # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol)) } attr_reader :status @@ -123,7 +123,7 @@ module OpenAI end # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index 5ece9b13..c95e0378 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel # Specifies the event type. For a computer screenshot, this property is always set - # to `computer_screenshot`. + # to `computer_screenshot`. sig { returns(Symbol) } attr_accessor :type diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index f45e18ea..4263b1b6 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -9,13 +9,13 @@ module OpenAI # Text, image, or file inputs to the model, used to generate a response. # - # Learn more: + # Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) sig do returns( T.any( @@ -41,45 +41,45 @@ module OpenAI attr_accessor :input # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) } attr_accessor :model # Specify additional output data to include in the model response. Currently - # supported values are: + # supported values are: # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } attr_accessor :include # Inserts a system (or developer) message as the first item in the model's - # context. + # context. # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. sig { returns(T.nilable(String)) } attr_accessor :instructions # An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } attr_accessor :max_output_tokens # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -88,15 +88,15 @@ module OpenAI attr_accessor :parallel_tool_calls # The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). sig { returns(T.nilable(String)) } attr_accessor :previous_response_id # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(OpenAI::Models::Reasoning)) } attr_reader :reasoning @@ -108,17 +108,17 @@ module OpenAI attr_accessor :store # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. sig { returns(T.nilable(Float)) } attr_accessor :temperature # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } attr_reader :text @@ -126,8 +126,8 @@ module OpenAI attr_writer :text # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. sig do returns( T.nilable( @@ -155,19 +155,19 @@ module OpenAI attr_writer :tool_choice # An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. + # specify which tool to use by setting the `tool_choice` parameter. # - # The two categories of tools you can provide the model are: + # The two categories of tools you can provide the model are: # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). sig do returns( T.nilable( @@ -201,26 +201,26 @@ module OpenAI attr_writer :tools # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. sig { returns(T.nilable(Float)) } attr_accessor :top_p # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) } attr_accessor :truncation # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -358,13 +358,13 @@ module OpenAI # Text, image, or file inputs to the model, used to generate a response. # - # Learn more: + # Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) module Input extend OpenAI::Internal::Type::Union @@ -395,8 +395,8 @@ module OpenAI end # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. module ToolChoice extend OpenAI::Internal::Type::Union @@ -411,11 +411,11 @@ module OpenAI # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. module Truncation extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 7181116b..10068e96 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -13,7 +13,7 @@ module OpenAI attr_accessor :queries # The status of the file search tool call. One of `in_progress`, `searching`, - # `incomplete` or `failed`, + # `incomplete` or `failed`, sig { returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) } attr_accessor :status @@ -26,8 +26,8 @@ module OpenAI attr_accessor :results # The results of a file search tool call. See the - # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) - # for more information. + # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) + # for more information. sig do params( id: String, @@ -57,7 +57,7 @@ module OpenAI def to_hash; end # The status of the file search tool call. One of `in_progress`, `searching`, - # `incomplete` or `failed`, + # `incomplete` or `failed`, module Status extend OpenAI::Internal::Type::Enum @@ -82,10 +82,10 @@ module OpenAI class Result < OpenAI::Internal::Type::BaseModel # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } attr_accessor :attributes diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/lib/openai/models/responses/response_format_text_config.rbi index 3fe12068..ae61d108 100644 --- a/rbi/lib/openai/models/responses/response_format_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_config.rbi @@ -5,17 +5,17 @@ module OpenAI module Responses # An object specifying the format that the model must output. # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # The default format is `{ "type": "text" }` with no additional options. + # The default format is `{ "type": "text" }` with no additional options. # - # **Not recommended for gpt-4o and newer models:** + # **Not recommended for gpt-4o and newer models:** # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. module ResponseFormatTextConfig extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index 8a1756b0..25a553e6 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -5,12 +5,12 @@ module OpenAI module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores - # and dashes, with a maximum length of 64. + # and dashes, with a maximum length of 64. sig { returns(String) } attr_accessor :name # The schema for the response format, described as a JSON Schema object. Learn how - # to build JSON schemas [here](https://json-schema.org/). + # to build JSON schemas [here](https://json-schema.org/). sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :schema @@ -19,7 +19,7 @@ module OpenAI attr_accessor :type # A description of what the response format is for, used by the model to determine - # how to respond in the format. + # how to respond in the format. sig { returns(T.nilable(String)) } attr_reader :description @@ -27,16 +27,16 @@ module OpenAI attr_writer :description # Whether to enable strict schema adherence when generating the output. If set to - # true, the model will always follow the exact schema defined in the `schema` - # field. Only a subset of JSON Schema is supported when `strict` is `true`. To - # learn more, read the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). sig { returns(T.nilable(T::Boolean)) } attr_accessor :strict # JSON Schema response format. Used to generate structured JSON responses. Learn - # more about - # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( name: String, diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index f0a465e7..769270c2 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -28,7 +28,7 @@ module OpenAI attr_writer :id # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol)) } attr_reader :status @@ -36,8 +36,8 @@ module OpenAI attr_writer :status # A tool call to run a function. See the - # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # for more information. + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. sig do params( arguments: String, @@ -67,7 +67,7 @@ module OpenAI def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi index 79e881a7..7ed35c37 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi @@ -9,8 +9,8 @@ module OpenAI attr_accessor :id # A tool call to run a function. See the - # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # for more information. + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. sig { params(id: String).returns(T.attached_class) } def self.new(id:); end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 398930d8..8f224c19 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -21,7 +21,7 @@ module OpenAI attr_accessor :type # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol)) } attr_reader :status @@ -55,7 +55,7 @@ module OpenAI def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 92543e70..5bd85d43 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -17,8 +17,8 @@ module OpenAI attr_accessor :type # The results of a web search tool call. See the - # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for - # more information. + # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for + # more information. sig do params( id: String, diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index 661b921f..dd8aebb9 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -4,13 +4,13 @@ module OpenAI module Models module Responses # Specify additional output data to include in the model response. Currently - # supported values are: + # supported values are: # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. module ResponseIncludable extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index f6089f0f..f6f84eac 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -5,7 +5,7 @@ module OpenAI module Responses class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or - # `auto`. Defaults to `auto`. + # `auto`. Defaults to `auto`. sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } attr_accessor :detail @@ -18,12 +18,12 @@ module OpenAI attr_accessor :file_id # The URL of the image to be sent to the model. A fully qualified URL or base64 - # encoded image in a data URL. + # encoded image in a data URL. sig { returns(T.nilable(String)) } attr_accessor :image_url # An image input to the model. Learn about - # [image inputs](https://platform.openai.com/docs/guides/vision). + # [image inputs](https://platform.openai.com/docs/guides/vision). sig do params( detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, @@ -49,7 +49,7 @@ module OpenAI def to_hash; end # The detail level of the image to be sent to the model. One of `high`, `low`, or - # `auto`. Defaults to `auto`. + # `auto`. Defaults to `auto`. module Detail extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 6a70db99..cbcc9069 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -4,16 +4,16 @@ module OpenAI module Models module Responses # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. module ResponseInputItem extend OpenAI::Internal::Type::Union class Message < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content - # types. + # types. sig do returns( T::Array[ @@ -32,7 +32,7 @@ module OpenAI attr_accessor :role # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol)) } attr_reader :status @@ -47,8 +47,8 @@ module OpenAI attr_writer :type # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. sig do params( content: T::Array[ @@ -104,7 +104,7 @@ module OpenAI end # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum @@ -169,7 +169,7 @@ module OpenAI attr_writer :id # The safety checks reported by the API that have been acknowledged by the - # developer. + # developer. sig do returns( T.nilable( @@ -193,7 +193,7 @@ module OpenAI attr_writer :acknowledged_safety_checks # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } attr_reader :status @@ -264,7 +264,7 @@ module OpenAI end # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. module Status extend OpenAI::Internal::Type::Enum @@ -311,7 +311,7 @@ module OpenAI attr_accessor :type # The unique ID of the function tool call output. Populated when this item is - # returned via API. + # returned via API. sig { returns(T.nilable(String)) } attr_reader :id @@ -319,7 +319,7 @@ module OpenAI attr_writer :id # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } attr_reader :status @@ -354,7 +354,7 @@ module OpenAI def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index a8b4a591..1eeebaa4 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -9,7 +9,7 @@ module OpenAI attr_accessor :id # A list of one or many input items to the model, containing different content - # types. + # types. sig do returns( T::Array[ @@ -28,7 +28,7 @@ module OpenAI attr_accessor :role # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol)) } attr_reader :status @@ -98,7 +98,7 @@ module OpenAI end # The status of item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index ca050336..d7a588ea 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -21,7 +21,7 @@ module OpenAI attr_accessor :role # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. sig { returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) } attr_accessor :status @@ -74,7 +74,7 @@ module OpenAI end # The status of the message input. One of `in_progress`, `completed`, or - # `incomplete`. Populated when input items are returned via API. + # `incomplete`. Populated when input items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 1b8c7536..9d308e35 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -17,7 +17,7 @@ module OpenAI attr_accessor :type # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } attr_reader :status @@ -25,7 +25,7 @@ module OpenAI attr_writer :status # A description of the chain of thought used by a reasoning model while generating - # a response. + # a response. sig do params( id: String, @@ -67,7 +67,7 @@ module OpenAI end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. - # Populated when items are returned via API. + # Populated when items are returned via API. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index 9afae2e8..39ea7e1c 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -8,7 +8,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. + # Response creation above for more information. sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } attr_reader :include diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index 8768bb79..a887be4c 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, or `incomplete`. module ResponseStatus extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index 1e338162..68a82916 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -6,17 +6,17 @@ module OpenAI class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # An object specifying the format that the model must output. # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # The default format is `{ "type": "text" }` with no additional options. + # The default format is `{ "type": "text" }` with no additional options. # - # **Not recommended for gpt-4o and newer models:** + # **Not recommended for gpt-4o and newer models:** # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. sig do returns( T.nilable( @@ -44,10 +44,10 @@ module OpenAI attr_writer :format_ # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig do params( format_: T.any( diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 89b2102a..852d91c4 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -41,7 +41,7 @@ module OpenAI attr_accessor :total_tokens # Represents token usage details including input tokens, output tokens, a - # breakdown of output tokens, and the total tokens used. + # breakdown of output tokens, and the total tokens used. sig do params( input_tokens: Integer, @@ -77,7 +77,7 @@ module OpenAI class InputTokensDetails < OpenAI::Internal::Type::BaseModel # The number of tokens that were retrieved from the cache. - # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). + # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). sig { returns(Integer) } attr_accessor :cached_tokens diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/lib/openai/models/responses/tool.rbi index 9aab8bc4..292e8c9d 100644 --- a/rbi/lib/openai/models/responses/tool.rbi +++ b/rbi/lib/openai/models/responses/tool.rbi @@ -4,8 +4,8 @@ module OpenAI module Models module Responses # A tool that searches for relevant content from uploaded files. Learn more about - # the - # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). module Tool extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index 319448bc..ccc45284 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -5,12 +5,12 @@ module OpenAI module Responses # Controls which (if any) tool is called by the model. # - # `none` means the model will not call any tool and instead generates a message. + # `none` means the model will not call any tool and instead generates a message. # - # `auto` means the model can pick between generating a message or calling one or - # more tools. + # `auto` means the model can pick between generating a message or calling one or + # more tools. # - # `required` means the model must call one or more tools. + # `required` means the model must call one or more tools. module ToolChoiceOptions extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 82a4138e..ece62dd0 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -5,18 +5,18 @@ module OpenAI module Responses class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # The type of hosted tool the model should to use. Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). + # [built-in tools](https://platform.openai.com/docs/guides/tools). # - # Allowed values are: + # Allowed values are: # - # - `file_search` - # - `web_search_preview` - # - `computer_use_preview` + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` sig { returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) } attr_accessor :type # Indicates that the model should use a built-in tool to generate a response. - # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } def self.new(type:); end @@ -24,13 +24,13 @@ module OpenAI def to_hash; end # The type of hosted tool the model should to use. Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). + # [built-in tools](https://platform.openai.com/docs/guides/tools). # - # Allowed values are: + # Allowed values are: # - # - `file_search` - # - `web_search_preview` - # - `computer_use_preview` + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` module Type extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 63a05033..75ccebd6 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -6,13 +6,13 @@ module OpenAI class WebSearchTool < OpenAI::Internal::Type::BaseModel # The type of the web search tool. One of: # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # - `web_search_preview` + # - `web_search_preview_2025_03_11` sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } attr_accessor :type # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol)) } attr_reader :search_context_size @@ -31,8 +31,8 @@ module OpenAI attr_writer :user_location # This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). sig do params( type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, @@ -57,8 +57,8 @@ module OpenAI # The type of the web search tool. One of: # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # - `web_search_preview` + # - `web_search_preview_2025_03_11` module Type extend OpenAI::Internal::Type::Enum @@ -76,7 +76,7 @@ module OpenAI end # High level guidance for the amount of context window space to use for the - # search. One of `low`, `medium`, or `high`. `medium` is the default. + # search. One of `low`, `medium`, or `high`. `medium` is the default. module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -106,7 +106,7 @@ module OpenAI attr_writer :city # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of - # the user, e.g. `US`. + # the user, e.g. `US`. sig { returns(T.nilable(String)) } attr_reader :country @@ -121,7 +121,7 @@ module OpenAI attr_writer :region # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the - # user, e.g. `America/Los_Angeles`. + # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } attr_reader :timezone diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/lib/openai/models/static_file_chunking_strategy.rbi index 1a672f16..ce9bb8d5 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy.rbi @@ -5,12 +5,12 @@ module OpenAI class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # The number of tokens that overlap between chunks. The default value is `400`. # - # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. sig { returns(Integer) } attr_accessor :chunk_overlap_tokens # The maximum number of tokens in each chunk. The default value is `800`. The - # minimum value is `100` and the maximum value is `4096`. + # minimum value is `100` and the maximum value is `4096`. sig { returns(Integer) } attr_accessor :max_chunk_size_tokens diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index bc80b4c6..7a9a7404 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -28,8 +28,8 @@ module OpenAI attr_accessor :object # The intended purpose of the file. - # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) - # for acceptable values. + # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) + # for acceptable values. sig { returns(String) } attr_accessor :purpose diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index f6d148b3..344ab351 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -11,7 +11,7 @@ module OpenAI attr_accessor :part_ids # The optional md5 checksum for the file contents to verify if the bytes uploaded - # matches what you expect. + # matches what you expect. sig { returns(T.nilable(String)) } attr_reader :md5 diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 050879be..74c1e762 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -16,15 +16,15 @@ module OpenAI # The MIME type of the file. # - # This must fall within the supported MIME types for your file purpose. See the - # supported MIME types for assistants and vision. + # This must fall within the supported MIME types for your file purpose. See the + # supported MIME types for assistants and vision. sig { returns(String) } attr_accessor :mime_type # The intended purpose of the uploaded file. # - # See the - # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + # See the + # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } attr_accessor :purpose diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 9969477d..3426c9f4 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -22,11 +22,11 @@ module OpenAI attr_accessor :last_active_at # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -39,8 +39,8 @@ module OpenAI attr_accessor :object # The status of the vector store, which can be either `expired`, `in_progress`, or - # `completed`. A status of `completed` indicates that the vector store is ready - # for use. + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. sig { returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) } attr_accessor :status @@ -60,7 +60,7 @@ module OpenAI attr_accessor :expires_at # A vector store is a collection of processed files can be used by the - # `file_search` tool. + # `file_search` tool. sig do params( id: String, @@ -157,8 +157,8 @@ module OpenAI end # The status of the vector store, which can be either `expired`, `in_progress`, or - # `completed`. A status of `completed` indicates that the vector store is ready - # for use. + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. module Status extend OpenAI::Internal::Type::Enum @@ -175,7 +175,7 @@ module OpenAI class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. + # `last_active_at`. sig { returns(Symbol) } attr_accessor :anchor diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index a5ba26e4..c6256c21 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -7,7 +7,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( @@ -45,8 +45,8 @@ module OpenAI attr_writer :expires_after # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. + # the vector store should use. Useful for tools like `file_search` that can access + # files. sig { returns(T.nilable(T::Array[String])) } attr_reader :file_ids @@ -54,11 +54,11 @@ module OpenAI attr_writer :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -114,7 +114,7 @@ module OpenAI class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. + # `last_active_at`. sig { returns(Symbol) } attr_accessor :anchor diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 240ac978..e40b3b45 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -7,9 +7,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -17,9 +17,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -27,7 +27,7 @@ module OpenAI attr_writer :before # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -35,7 +35,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::VectorStoreListParams::Order::OrSymbol)) } attr_reader :order @@ -69,7 +69,7 @@ module OpenAI def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 523d2871..e5456dff 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -23,7 +23,7 @@ module OpenAI attr_writer :filters # The maximum number of results to return. This number should be between 1 and 50 - # inclusive. + # inclusive. sig { returns(T.nilable(Integer)) } attr_reader :max_num_results diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 06e2c923..454d54a1 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -4,10 +4,10 @@ module OpenAI module Models class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } attr_accessor :attributes diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index 81e71bed..b40311aa 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -19,11 +19,11 @@ module OpenAI attr_writer :expires_after # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata @@ -57,7 +57,7 @@ module OpenAI class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. + # `last_active_at`. sig { returns(Symbol) } attr_accessor :anchor diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index fd42f4c3..f64e4470 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -8,21 +8,21 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. + # the vector store should use. Useful for tools like `file_search` that can access + # files. sig { returns(T::Array[String]) } attr_accessor :file_ids # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } attr_accessor :attributes # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 93928ade..8911fb0e 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -11,9 +11,9 @@ module OpenAI attr_accessor :vector_store_id # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -21,9 +21,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -38,7 +38,7 @@ module OpenAI attr_writer :filter # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -46,7 +46,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol)) } attr_reader :order @@ -114,7 +114,7 @@ module OpenAI end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index f63b7e8d..774e6c2e 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -8,21 +8,21 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A [File](https://platform.openai.com/docs/api-reference/files) ID that the - # vector store should use. Useful for tools like `file_search` that can access - # files. + # vector store should use. Useful for tools like `file_search` that can access + # files. sig { returns(String) } attr_accessor :file_id # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } attr_accessor :attributes # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 9f451eec..10960b9c 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -8,9 +8,9 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. sig { returns(T.nilable(String)) } attr_reader :after @@ -18,9 +18,9 @@ module OpenAI attr_writer :after # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. sig { returns(T.nilable(String)) } attr_reader :before @@ -35,7 +35,7 @@ module OpenAI attr_writer :filter # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. sig { returns(T.nilable(Integer)) } attr_reader :limit @@ -43,7 +43,7 @@ module OpenAI attr_writer :limit # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol)) } attr_reader :order @@ -96,7 +96,7 @@ module OpenAI end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 9eb12ec9..41edb9ab 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -11,10 +11,10 @@ module OpenAI attr_accessor :vector_store_id # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } attr_accessor :attributes diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index e729c6ee..6bf82fb0 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -13,7 +13,7 @@ module OpenAI attr_accessor :created_at # The last error associated with this vector store file. Will be `null` if there - # are no errors. + # are no errors. sig { returns(T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError)) } attr_reader :last_error @@ -30,28 +30,28 @@ module OpenAI attr_accessor :object # The status of the vector store file, which can be either `in_progress`, - # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the - # vector store file is ready for use. + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } attr_accessor :status # The total vector store usage in bytes. Note that this may be different from the - # original file size. + # original file size. sig { returns(Integer) } attr_accessor :usage_bytes # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # that the [File](https://platform.openai.com/docs/api-reference/files) is - # attached to. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. sig { returns(String) } attr_accessor :vector_store_id # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } attr_accessor :attributes @@ -135,7 +135,7 @@ module OpenAI attr_accessor :message # The last error associated with this vector store file. Will be `null` if there - # are no errors. + # are no errors. sig do params(code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) @@ -172,8 +172,8 @@ module OpenAI end # The status of the vector store file, which can be either `in_progress`, - # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the - # vector store file is ready for use. + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 0ee00156..cef32c64 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -9,7 +9,7 @@ module OpenAI attr_accessor :id # The Unix timestamp (in seconds) for when the vector store files batch was - # created. + # created. sig { returns(Integer) } attr_accessor :created_at @@ -29,14 +29,14 @@ module OpenAI attr_accessor :object # The status of the vector store files batch, which can be either `in_progress`, - # `completed`, `cancelled` or `failed`. + # `completed`, `cancelled` or `failed`. sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } attr_accessor :status # The ID of the - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) - # that the [File](https://platform.openai.com/docs/api-reference/files) is - # attached to. + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. sig { returns(String) } attr_accessor :vector_store_id @@ -124,7 +124,7 @@ module OpenAI end # The status of the vector store files batch, which can be either `in_progress`, - # `completed`, `cancelled` or `failed`. + # `completed`, `cancelled` or `failed`. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/request_options.rbi b/rbi/lib/openai/request_options.rbi index 3f0ab9b4..9c3df5a0 100644 --- a/rbi/lib/openai/request_options.rbi +++ b/rbi/lib/openai/request_options.rbi @@ -2,32 +2,32 @@ module OpenAI # Specify HTTP behaviour to use for a specific request. These options supplement - # or override those provided at the client level. + # or override those provided at the client level. # - # When making a request, you can pass an actual {RequestOptions} instance, or - # simply pass a Hash with symbol keys matching the attributes on this class. + # When making a request, you can pass an actual {RequestOptions} instance, or + # simply pass a Hash with symbol keys matching the attributes on this class. class RequestOptions < OpenAI::Internal::Type::BaseModel # @api private sig { params(opts: T.any(T.self_type, T::Hash[Symbol, T.anything])).void } def self.validate!(opts); end # Idempotency key to send with request and all associated retries. Will only be - # sent for write requests. + # sent for write requests. sig { returns(T.nilable(String)) } attr_accessor :idempotency_key # Extra query params to send with the request. These are `.merge`’d into any - # `query` given at the client level. + # `query` given at the client level. sig { returns(T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) } attr_accessor :extra_query # Extra headers to send with the request. These are `.merged`’d into any - # `extra_headers` given at the client level. + # `extra_headers` given at the client level. sig { returns(T.nilable(T::Hash[String, T.nilable(String)])) } attr_accessor :extra_headers # Extra data to send with the request. These are deep merged into any data - # generated as part of the normal request. + # generated as part of the normal request. sig { returns(T.nilable(T.anything)) } attr_accessor :extra_body diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/lib/openai/resources/audio/speech.rbi index f51b153a..2717898a 100644 --- a/rbi/lib/openai/resources/audio/speech.rbi +++ b/rbi/lib/openai/resources/audio/speech.rbi @@ -21,21 +21,21 @@ module OpenAI # The text to generate audio for. The maximum length is 4096 characters. input:, # One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. model:, # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and - # `verse`. Previews of the voices are available in the - # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). voice:, # Control the voice of your generated audio with additional instructions. Does not - # work with `tts-1` or `tts-1-hd`. + # work with `tts-1` or `tts-1-hd`. instructions: nil, # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, - # `wav`, and `pcm`. + # `wav`, and `pcm`. response_format: nil, # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. speed: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 9fe4f4a3..824a08af 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -5,9 +5,9 @@ module OpenAI class Audio class Transcriptions # See {OpenAI::Resources::Audio::Transcriptions#create_streaming} for streaming - # counterpart. + # counterpart. # - # Transcribes audio into the input language. + # Transcribes audio into the input language. sig do params( file: T.any(IO, StringIO), @@ -25,52 +25,52 @@ module OpenAI end def create( # The audio file object (not file name) to transcribe, in one of these formats: - # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, # ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). model:, # Additional information to include in the transcription response. `logprobs` will - # return the log probabilities of the tokens in the response to understand the - # model's confidence in the transcription. `logprobs` only works with - # response_format set to `json` and only with the models `gpt-4o-transcribe` and - # `gpt-4o-mini-transcribe`. + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. include: nil, # The language of the input audio. Supplying the input language in - # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) - # format will improve accuracy and latency. + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. language: nil, # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should match the audio language. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. prompt: nil, # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. response_format: nil, # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. temperature: nil, # The timestamp granularities to populate for this transcription. - # `response_format` must be set `verbose_json` to use timestamp granularities. - # Either or both of these options are supported: `word`, or `segment`. Note: There - # is no additional latency for segment timestamps, but generating word timestamps - # incurs additional latency. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. timestamp_granularities: nil, # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming - # counterpart. + # counterpart. # - # Transcribes audio into the input language. + # Transcribes audio into the input language. sig do params( file: T.any(IO, StringIO), @@ -95,45 +95,45 @@ module OpenAI end def create_streaming( # The audio file object (not file name) to transcribe, in one of these formats: - # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, # ID of the model to use. The options are `gpt-4o-transcribe`, - # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source - # Whisper V2 model). + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). model:, # Additional information to include in the transcription response. `logprobs` will - # return the log probabilities of the tokens in the response to understand the - # model's confidence in the transcription. `logprobs` only works with - # response_format set to `json` and only with the models `gpt-4o-transcribe` and - # `gpt-4o-mini-transcribe`. + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. include: nil, # The language of the input audio. Supplying the input language in - # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) - # format will improve accuracy and latency. + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. language: nil, # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should match the audio language. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. prompt: nil, # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, - # the only supported format is `json`. + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. response_format: nil, # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. temperature: nil, # The timestamp granularities to populate for this transcription. - # `response_format` must be set `verbose_json` to use timestamp granularities. - # Either or both of these options are supported: `word`, or `segment`. Note: There - # is no additional latency for segment timestamps, but generating word timestamps - # incurs additional latency. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. timestamp_granularities: nil, # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index 4ee7bcc0..d39ad32c 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -18,24 +18,24 @@ module OpenAI end def create( # The audio file object (not file name) translate, in one of these formats: flac, - # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, # ID of the model to use. Only `whisper-1` (which is powered by our open source - # Whisper V2 model) is currently available. + # Whisper V2 model) is currently available. model:, # An optional text to guide the model's style or continue a previous audio - # segment. The - # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) - # should be in English. + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should be in English. prompt: nil, # The format of the output, in one of these options: `json`, `text`, `srt`, - # `verbose_json`, or `vtt`. + # `verbose_json`, or `vtt`. response_format: nil, # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # output more random, while lower values like 0.2 will make it more focused and - # deterministic. If set to 0, the model will use - # [log probability](https://en.wikipedia.org/wiki/Log_probability) to - # automatically increase the temperature until certain thresholds are hit. + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. temperature: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/lib/openai/resources/batches.rbi index ec20cb07..390a8fa9 100644 --- a/rbi/lib/openai/resources/batches.rbi +++ b/rbi/lib/openai/resources/batches.rbi @@ -16,29 +16,29 @@ module OpenAI end def create( # The time frame within which the batch should be processed. Currently only `24h` - # is supported. + # is supported. completion_window:, # The endpoint to be used for all requests in the batch. Currently - # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` - # are supported. Note that `/v1/embeddings` batches are also restricted to a - # maximum of 50,000 embedding inputs across all requests in the batch. + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. endpoint:, # The ID of an uploaded file that contains requests for the new batch. # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. # - # Your input file must be formatted as a - # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), - # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 - # requests, and can be up to 200 MB in size. + # Your input file must be formatted as a + # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + # requests, and can be up to 200 MB in size. input_file_id:, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, request_options: {} ); end @@ -66,18 +66,18 @@ module OpenAI end def list( # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, request_options: {} ); end # Cancels an in-progress batch. The batch will be in status `cancelling` for up to - # 10 minutes, before changing to `cancelled`, where it will have partial results - # (if any) available in the output file. + # 10 minutes, before changing to `cancelled`, where it will have partial results + # (if any) available in the output file. sig do params( batch_id: String, diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/lib/openai/resources/beta/assistants.rbi index 277c81d2..580fbcba 100644 --- a/rbi/lib/openai/resources/beta/assistants.rbi +++ b/rbi/lib/openai/resources/beta/assistants.rbi @@ -39,71 +39,71 @@ module OpenAI end def create( # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, # The description of the assistant. The maximum length is 512 characters. description: nil, # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. instructions: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # The name of the assistant. The maximum length is 256 characters. name: nil, # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} ); end @@ -160,69 +160,69 @@ module OpenAI # The description of the assistant. The maximum length is 512 characters. description: nil, # The system instructions that the assistant uses. The maximum length is 256,000 - # characters. + # characters. instructions: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model: nil, # The name of the assistant. The maximum length is 256 characters. name: nil, # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, # A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # assistant. Tools can be of types `code_interpreter`, `file_search`, or - # `function`. + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} ); end @@ -239,20 +239,20 @@ module OpenAI end def list( # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. order: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 07a8d939..316bbca4 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -22,19 +22,19 @@ module OpenAI end def create( # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # start the thread with. + # start the thread with. messages: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} ); end @@ -65,16 +65,16 @@ module OpenAI # The ID of the thread to modify. Only the `metadata` can be modified. thread_id, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # A set of resources that are made available to the assistant's tools in this - # thread. The resources are specific to the type of tool. For example, the - # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # tool requires a list of vector store IDs. + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} ); end @@ -93,7 +93,7 @@ module OpenAI ); end # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # - # Create a thread and run it in one request. + # Create a thread and run it in one request. sig do params( assistant_id: String, @@ -143,102 +143,102 @@ module OpenAI end def create_and_run( # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, # Override the default system message of the assistant. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. instructions: nil, # The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_completion_tokens: nil, # The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_prompt_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. model: nil, # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, # Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. + # an empty thread will be created. thread: nil, # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, # Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. tools: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. top_p: nil, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or - # `#create_and_run` for streaming and non-streaming use cases, respectively. + # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming - # counterpart. + # counterpart. # - # Create a thread and run it in one request. + # Create a thread and run it in one request. sig do params( assistant_id: String, @@ -317,95 +317,95 @@ module OpenAI end def stream_raw( # The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, # Override the default system message of the assistant. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. instructions: nil, # The maximum number of completion tokens that may be used over the course of the - # run. The run will make a best effort to use only the number of completion tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # completion tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_completion_tokens: nil, # The maximum number of prompt tokens that may be used over the course of the run. - # The run will make a best effort to use only the number of prompt tokens - # specified, across multiple turns of the run. If the run exceeds the number of - # prompt tokens specified, the run will end with status `incomplete`. See - # `incomplete_details` for more info. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_prompt_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # be used to execute this run. If a value is provided here, it will override the - # model associated with the assistant. If not, the model associated with the - # assistant will be used. + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. model: nil, # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, # Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, # Options to create a new thread. If no thread is provided when running a request, - # an empty thread will be created. + # an empty thread will be created. thread: nil, # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tools and instead generates a message. `auto` is the default value - # and means the model can pick between generating a message or calling one or more - # tools. `required` means the model must call one or more tools before responding - # to the user. Specifying a particular tool like `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, # A set of resources that are used by the assistant's tools. The resources are - # specific to the type of tool. For example, the `code_interpreter` tool requires - # a list of file IDs, while the `file_search` tool requires a list of vector store - # IDs. + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, # Override the tools the assistant can use for this run. This is useful for - # modifying the behavior on a per-run basis. + # modifying the behavior on a per-run basis. tools: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. top_p: nil, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the intial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or - # `#create_and_run` for streaming and non-streaming use cases, respectively. + # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/lib/openai/resources/beta/threads/messages.rbi index ac987e13..2d12cb7e 100644 --- a/rbi/lib/openai/resources/beta/threads/messages.rbi +++ b/rbi/lib/openai/resources/beta/threads/messages.rbi @@ -31,25 +31,25 @@ module OpenAI end def create( # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # to create a message for. + # to create a message for. thread_id, # The text contents of the message. content:, # The role of the entity that is creating the message. Allowed values include: # - # - `user`: Indicates the message is sent by an actual user and should be used in - # most cases to represent user-generated messages. - # - `assistant`: Indicates the message is generated by the assistant. Use this - # value to insert messages from the assistant into the conversation. + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. role:, # A list of files attached to the message, and the tools they should be added to. attachments: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, request_options: {} ); end @@ -66,7 +66,7 @@ module OpenAI # The ID of the message to retrieve. message_id, # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # to which this message belongs. + # to which this message belongs. thread_id:, request_options: {} ); end @@ -86,11 +86,11 @@ module OpenAI # Path param: The ID of the thread to which this message belongs. thread_id:, # Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, request_options: {} ); end @@ -109,23 +109,23 @@ module OpenAI end def list( # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # the messages belong to. + # the messages belong to. thread_id, # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. order: nil, # Filter messages by the run ID that generated them. run_id: nil, diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 09f55c13..e3a1216a 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -9,9 +9,9 @@ module OpenAI attr_reader :steps # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming - # counterpart. + # counterpart. # - # Create a run. + # Create a run. sig do params( thread_id: String, @@ -68,120 +68,120 @@ module OpenAI # Path param: The ID of the thread to run. thread_id, # Body param: The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, # Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, # Body param: Appends additional instructions at the end of the instructions for - # the run. This is useful for modifying the behavior on a per-run basis without - # overriding other instructions. + # the run. This is useful for modifying the behavior on a per-run basis without + # overriding other instructions. additional_instructions: nil, # Body param: Adds additional messages to the thread before creating the run. additional_messages: nil, # Body param: Overrides the - # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) - # of the assistant. This is useful for modifying the behavior on a per-run basis. + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. instructions: nil, # Body param: The maximum number of completion tokens that may be used over the - # course of the run. The run will make a best effort to use only the number of - # completion tokens specified, across multiple turns of the run. If the run - # exceeds the number of completion tokens specified, the run will end with status - # `incomplete`. See `incomplete_details` for more info. + # course of the run. The run will make a best effort to use only the number of + # completion tokens specified, across multiple turns of the run. If the run + # exceeds the number of completion tokens specified, the run will end with status + # `incomplete`. See `incomplete_details` for more info. max_completion_tokens: nil, # Body param: The maximum number of prompt tokens that may be used over the course - # of the run. The run will make a best effort to use only the number of prompt - # tokens specified, across multiple turns of the run. If the run exceeds the - # number of prompt tokens specified, the run will end with status `incomplete`. - # See `incomplete_details` for more info. + # of the run. The run will make a best effort to use only the number of prompt + # tokens specified, across multiple turns of the run. If the run exceeds the + # number of prompt tokens specified, the run will end with status `incomplete`. + # See `incomplete_details` for more info. max_prompt_tokens: nil, # Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # Body param: The ID of the - # [Model](https://platform.openai.com/docs/api-reference/models) to be used to - # execute this run. If a value is provided here, it will override the model - # associated with the assistant. If not, the model associated with the assistant - # will be used. + # [Model](https://platform.openai.com/docs/api-reference/models) to be used to + # execute this run. If a value is provided here, it will override the model + # associated with the assistant. If not, the model associated with the assistant + # will be used. model: nil, # Body param: Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, # Body param: **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, # Body param: Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, # Body param: What sampling temperature to use, between 0 and 2. Higher values - # like 0.8 will make the output more random, while lower values like 0.2 will make - # it more focused and deterministic. + # like 0.8 will make the output more random, while lower values like 0.2 will make + # it more focused and deterministic. temperature: nil, # Body param: Controls which (if any) tool is called by the model. `none` means - # the model will not call any tools and instead generates a message. `auto` is the - # default value and means the model can pick between generating a message or - # calling one or more tools. `required` means the model must call one or more - # tools before responding to the user. Specifying a particular tool like - # `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # the model will not call any tools and instead generates a message. `auto` is the + # default value and means the model can pick between generating a message or + # calling one or more tools. `required` means the model must call one or more + # tools before responding to the user. Specifying a particular tool like + # `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, # Body param: Override the tools the assistant can use for this run. This is - # useful for modifying the behavior on a per-run basis. + # useful for modifying the behavior on a per-run basis. tools: nil, # Body param: An alternative to sampling with temperature, called nucleus - # sampling, where the model considers the results of the tokens with top_p - # probability mass. So 0.1 means only the tokens comprising the top 10% - # probability mass are considered. + # sampling, where the model considers the results of the tokens with top_p + # probability mass. So 0.1 means only the tokens comprising the top 10% + # probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. top_p: nil, # Body param: Controls for how a thread will be truncated prior to the run. Use - # this to control the intial context window of the run. + # this to control the intial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or - # `#create` for streaming and non-streaming use cases, respectively. + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming - # counterpart. + # counterpart. # - # Create a run. + # Create a run. sig do params( thread_id: String, @@ -267,113 +267,113 @@ module OpenAI # Path param: The ID of the thread to run. thread_id, # Body param: The ID of the - # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to - # execute this run. + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, # Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, # Body param: Appends additional instructions at the end of the instructions for - # the run. This is useful for modifying the behavior on a per-run basis without - # overriding other instructions. + # the run. This is useful for modifying the behavior on a per-run basis without + # overriding other instructions. additional_instructions: nil, # Body param: Adds additional messages to the thread before creating the run. additional_messages: nil, # Body param: Overrides the - # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) - # of the assistant. This is useful for modifying the behavior on a per-run basis. + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. instructions: nil, # Body param: The maximum number of completion tokens that may be used over the - # course of the run. The run will make a best effort to use only the number of - # completion tokens specified, across multiple turns of the run. If the run - # exceeds the number of completion tokens specified, the run will end with status - # `incomplete`. See `incomplete_details` for more info. + # course of the run. The run will make a best effort to use only the number of + # completion tokens specified, across multiple turns of the run. If the run + # exceeds the number of completion tokens specified, the run will end with status + # `incomplete`. See `incomplete_details` for more info. max_completion_tokens: nil, # Body param: The maximum number of prompt tokens that may be used over the course - # of the run. The run will make a best effort to use only the number of prompt - # tokens specified, across multiple turns of the run. If the run exceeds the - # number of prompt tokens specified, the run will end with status `incomplete`. - # See `incomplete_details` for more info. + # of the run. The run will make a best effort to use only the number of prompt + # tokens specified, across multiple turns of the run. If the run exceeds the + # number of prompt tokens specified, the run will end with status `incomplete`. + # See `incomplete_details` for more info. max_prompt_tokens: nil, # Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # Body param: The ID of the - # [Model](https://platform.openai.com/docs/api-reference/models) to be used to - # execute this run. If a value is provided here, it will override the model - # associated with the assistant. If not, the model associated with the assistant - # will be used. + # [Model](https://platform.openai.com/docs/api-reference/models) to be used to + # execute this run. If a value is provided here, it will override the model + # associated with the assistant. If not, the model associated with the assistant + # will be used. model: nil, # Body param: Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, # Body param: **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, # Body param: Specifies the format that the model must output. Compatible with - # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), - # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), - # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the - # message the model generates is valid JSON. + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. # - # **Important:** when using JSON mode, you **must** also instruct the model to - # produce JSON yourself via a system or user message. Without this, the model may - # generate an unending stream of whitespace until the generation reaches the token - # limit, resulting in a long-running and seemingly "stuck" request. Also note that - # the message content may be partially cut off if `finish_reason="length"`, which - # indicates the generation exceeded `max_tokens` or the conversation exceeded the - # max context length. + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, # Body param: What sampling temperature to use, between 0 and 2. Higher values - # like 0.8 will make the output more random, while lower values like 0.2 will make - # it more focused and deterministic. + # like 0.8 will make the output more random, while lower values like 0.2 will make + # it more focused and deterministic. temperature: nil, # Body param: Controls which (if any) tool is called by the model. `none` means - # the model will not call any tools and instead generates a message. `auto` is the - # default value and means the model can pick between generating a message or - # calling one or more tools. `required` means the model must call one or more - # tools before responding to the user. Specifying a particular tool like - # `{"type": "file_search"}` or - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. + # the model will not call any tools and instead generates a message. `auto` is the + # default value and means the model can pick between generating a message or + # calling one or more tools. `required` means the model must call one or more + # tools before responding to the user. Specifying a particular tool like + # `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, # Body param: Override the tools the assistant can use for this run. This is - # useful for modifying the behavior on a per-run basis. + # useful for modifying the behavior on a per-run basis. tools: nil, # Body param: An alternative to sampling with temperature, called nucleus - # sampling, where the model considers the results of the tokens with top_p - # probability mass. So 0.1 means only the tokens comprising the top 10% - # probability mass are considered. + # sampling, where the model considers the results of the tokens with top_p + # probability mass. So 0.1 means only the tokens comprising the top 10% + # probability mass are considered. # - # We generally recommend altering this or temperature but not both. + # We generally recommend altering this or temperature but not both. top_p: nil, # Body param: Controls for how a thread will be truncated prior to the run. Use - # this to control the intial context window of the run. + # this to control the intial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or - # `#create` for streaming and non-streaming use cases, respectively. + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end @@ -390,7 +390,7 @@ module OpenAI # The ID of the run to retrieve. run_id, # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) - # that was run. + # that was run. thread_id:, request_options: {} ); end @@ -408,14 +408,14 @@ module OpenAI # Path param: The ID of the run to modify. run_id, # Path param: The ID of the - # [thread](https://platform.openai.com/docs/api-reference/threads) that was run. + # [thread](https://platform.openai.com/docs/api-reference/threads) that was run. thread_id:, # Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, request_options: {} ); end @@ -435,20 +435,20 @@ module OpenAI # The ID of the thread the run belongs to. thread_id, # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. order: nil, request_options: {} ); end @@ -469,12 +469,12 @@ module OpenAI request_options: {} ); end # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for - # streaming counterpart. + # streaming counterpart. # - # When a run has the `status: "requires_action"` and `required_action.type` is - # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the - # tool calls once they're all completed. All outputs must be submitted in a single - # request. + # When a run has the `status: "requires_action"` and `required_action.type` is + # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + # tool calls once they're all completed. All outputs must be submitted in a single + # request. sig do params( run_id: String, @@ -489,24 +489,24 @@ module OpenAI # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the - # [thread](https://platform.openai.com/docs/api-reference/threads) to which this - # run belongs. + # [thread](https://platform.openai.com/docs/api-reference/threads) to which this + # run belongs. thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and - # non-streaming use cases, respectively. + # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and + # non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for - # non-streaming counterpart. + # non-streaming counterpart. # - # When a run has the `status: "requires_action"` and `required_action.type` is - # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the - # tool calls once they're all completed. All outputs must be submitted in a single - # request. + # When a run has the `status: "requires_action"` and `required_action.type` is + # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the + # tool calls once they're all completed. All outputs must be submitted in a single + # request. sig do params( run_id: String, @@ -550,14 +550,14 @@ module OpenAI # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the - # [thread](https://platform.openai.com/docs/api-reference/threads) to which this - # run belongs. + # [thread](https://platform.openai.com/docs/api-reference/threads) to which this + # run belongs. thread_id:, # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and - # non-streaming use cases, respectively. + # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and + # non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi index 86935b35..949e7d14 100644 --- a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs/steps.rbi @@ -25,13 +25,13 @@ module OpenAI # Path param: The ID of the run to which the run step belongs. run_id:, # Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, request_options: {} ); end @@ -55,29 +55,29 @@ module OpenAI # Path param: The ID of the thread the run and run steps belong to. thread_id:, # Query param: A cursor for use in pagination. `after` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, ending with obj_foo, your subsequent call can include - # after=obj_foo in order to fetch the next page of the list. + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, ending with obj_foo, your subsequent call can include + # after=obj_foo in order to fetch the next page of the list. after: nil, # Query param: A cursor for use in pagination. `before` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, starting with obj_foo, your subsequent call can include - # before=obj_foo in order to fetch the previous page of the list. + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, starting with obj_foo, your subsequent call can include + # before=obj_foo in order to fetch the previous page of the list. before: nil, # Query param: A list of additional fields to include in the response. Currently - # the only supported value is - # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file - # search result content. + # the only supported value is + # `step_details.tool_calls[*].file_search.results[*].content` to fetch the file + # search result content. # - # See the - # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # for more information. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, # Query param: A limit on the number of objects to be returned. Limit can range - # between 1 and 100, and the default is 20. + # between 1 and 100, and the default is 20. limit: nil, # Query param: Sort order by the `created_at` timestamp of the objects. `asc` for - # ascending order and `desc` for descending order. + # ascending order and `desc` for descending order. order: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 34c8a36d..672459b2 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -9,23 +9,23 @@ module OpenAI # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. # - # **Starting a new project?** We recommend trying - # [Responses](https://platform.openai.com/docs/api-reference/responses) to take - # advantage of the latest OpenAI platform features. Compare - # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). + # **Starting a new project?** We recommend trying + # [Responses](https://platform.openai.com/docs/api-reference/responses) to take + # advantage of the latest OpenAI platform features. Compare + # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). # - # --- + # --- # - # Creates a model response for the given chat conversation. Learn more in the - # [text generation](https://platform.openai.com/docs/guides/text-generation), - # [vision](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio) guides. + # Creates a model response for the given chat conversation. Learn more in the + # [text generation](https://platform.openai.com/docs/guides/text-generation), + # [vision](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio) guides. # - # Parameter support can differ depending on the model used to generate the - # response, particularly for newer reasoning models. Parameters that are only - # supported for reasoning models are noted below. For the current state of - # unsupported parameters in reasoning models, - # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). + # Parameter support can differ depending on the model used to generate the + # response, particularly for newer reasoning models. Parameters that are only + # supported for reasoning models are noted below. For the current state of + # unsupported parameters in reasoning models, + # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). sig do params( messages: T::Array[ @@ -88,212 +88,212 @@ module OpenAI end def create( # A list of messages comprising the conversation so far. Depending on the - # [model](https://platform.openai.com/docs/models) you use, different message - # types (modalities) are supported, like - # [text](https://platform.openai.com/docs/guides/text-generation), - # [images](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio). + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). messages:, # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, # Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. frequency_penalty: nil, # Deprecated in favor of `tool_choice`. # - # Controls which (if any) function is called by the model. + # Controls which (if any) function is called by the model. # - # `none` means the model will not call a function and instead generates a message. + # `none` means the model will not call a function and instead generates a message. # - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. function_call: nil, # Deprecated in favor of `tools`. # - # A list of functions the model may generate JSON inputs for. + # A list of functions the model may generate JSON inputs for. functions: nil, # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the - # tokenizer) to an associated bias value from -100 to 100. Mathematically, the - # bias is added to the logits generated by the model prior to sampling. The exact - # effect will vary per model, but values between -1 and 1 should decrease or - # increase likelihood of selection; values like -100 or 100 should result in a ban - # or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. logit_bias: nil, # Whether to return log probabilities of the output tokens or not. If true, - # returns the log probabilities of each output token returned in the `content` of - # `message`. + # returns the log probabilities of each output token returned in the `content` of + # `message`. logprobs: nil, # An upper bound for the number of tokens that can be generated for a completion, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_completion_tokens: nil, # The maximum number of [tokens](/tokenizer) that can be generated in the chat - # completion. This value can be used to control - # [costs](https://openai.com/api/pricing/) for text generated via API. + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. # - # This value is now deprecated in favor of `max_completion_tokens`, and is not - # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # Output types that you would like the model to generate. Most models are capable - # of generating text, which is the default: + # of generating text, which is the default: # - # `["text"]` + # `["text"]` # - # The `gpt-4o-audio-preview` model can also be used to - # [generate audio](https://platform.openai.com/docs/guides/audio). To request that - # this model generate both text and audio responses, you can use: + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: # - # `["text", "audio"]` + # `["text", "audio"]` modalities: nil, # How many chat completion choices to generate for each input message. Note that - # you will be charged based on the number of generated tokens across all of the - # choices. Keep `n` as `1` to minimize costs. + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. n: nil, # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, # Static predicted output content, such as the content of a text file that is - # being regenerated. + # being regenerated. prediction: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. presence_penalty: nil, # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, # An object specifying the format that the model must output. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. response_format: nil, # This feature is in Beta. If specified, our system will make a best effort to - # sample deterministically, such that repeated requests with the same `seed` and - # parameters should return the same result. Determinism is not guaranteed, and you - # should refer to the `system_fingerprint` response parameter to monitor changes - # in the backend. + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. seed: nil, # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. - # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. stop: nil, # Whether or not to store the output of this chat completion request for use in - # our [model distillation](https://platform.openai.com/docs/guides/distillation) - # or [evals](https://platform.openai.com/docs/guides/evals) products. + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. tool_choice: nil, # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to - # return at each token position, each with an associated log probability. - # `logprobs` must be set to `true` if this parameter is used. + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. top_logprobs: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. top_p: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. # - # **Starting a new project?** We recommend trying - # [Responses](https://platform.openai.com/docs/api-reference/responses) to take - # advantage of the latest OpenAI platform features. Compare - # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). + # **Starting a new project?** We recommend trying + # [Responses](https://platform.openai.com/docs/api-reference/responses) to take + # advantage of the latest OpenAI platform features. Compare + # [Chat Completions with Responses](https://platform.openai.com/docs/guides/responses-vs-chat-completions?api-mode=responses). # - # --- + # --- # - # Creates a model response for the given chat conversation. Learn more in the - # [text generation](https://platform.openai.com/docs/guides/text-generation), - # [vision](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio) guides. + # Creates a model response for the given chat conversation. Learn more in the + # [text generation](https://platform.openai.com/docs/guides/text-generation), + # [vision](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio) guides. # - # Parameter support can differ depending on the model used to generate the - # response, particularly for newer reasoning models. Parameters that are only - # supported for reasoning models are noted below. For the current state of - # unsupported parameters in reasoning models, - # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). + # Parameter support can differ depending on the model used to generate the + # response, particularly for newer reasoning models. Parameters that are only + # supported for reasoning models are noted below. For the current state of + # unsupported parameters in reasoning models, + # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). sig do params( messages: T::Array[ @@ -356,195 +356,195 @@ module OpenAI end def stream_raw( # A list of messages comprising the conversation so far. Depending on the - # [model](https://platform.openai.com/docs/models) you use, different message - # types (modalities) are supported, like - # [text](https://platform.openai.com/docs/guides/text-generation), - # [images](https://platform.openai.com/docs/guides/vision), and - # [audio](https://platform.openai.com/docs/guides/audio). + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). messages:, # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, # Parameters for audio output. Required when audio output is requested with - # `modalities: ["audio"]`. - # [Learn more](https://platform.openai.com/docs/guides/audio). + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. frequency_penalty: nil, # Deprecated in favor of `tool_choice`. # - # Controls which (if any) function is called by the model. + # Controls which (if any) function is called by the model. # - # `none` means the model will not call a function and instead generates a message. + # `none` means the model will not call a function and instead generates a message. # - # `auto` means the model can pick between generating a message or calling a - # function. + # `auto` means the model can pick between generating a message or calling a + # function. # - # Specifying a particular function via `{"name": "my_function"}` forces the model - # to call that function. + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. # - # `none` is the default when no functions are present. `auto` is the default if - # functions are present. + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. function_call: nil, # Deprecated in favor of `tools`. # - # A list of functions the model may generate JSON inputs for. + # A list of functions the model may generate JSON inputs for. functions: nil, # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the - # tokenizer) to an associated bias value from -100 to 100. Mathematically, the - # bias is added to the logits generated by the model prior to sampling. The exact - # effect will vary per model, but values between -1 and 1 should decrease or - # increase likelihood of selection; values like -100 or 100 should result in a ban - # or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. logit_bias: nil, # Whether to return log probabilities of the output tokens or not. If true, - # returns the log probabilities of each output token returned in the `content` of - # `message`. + # returns the log probabilities of each output token returned in the `content` of + # `message`. logprobs: nil, # An upper bound for the number of tokens that can be generated for a completion, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_completion_tokens: nil, # The maximum number of [tokens](/tokenizer) that can be generated in the chat - # completion. This value can be used to control - # [costs](https://openai.com/api/pricing/) for text generated via API. + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. # - # This value is now deprecated in favor of `max_completion_tokens`, and is not - # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o1 series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # Output types that you would like the model to generate. Most models are capable - # of generating text, which is the default: + # of generating text, which is the default: # - # `["text"]` + # `["text"]` # - # The `gpt-4o-audio-preview` model can also be used to - # [generate audio](https://platform.openai.com/docs/guides/audio). To request that - # this model generate both text and audio responses, you can use: + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: # - # `["text", "audio"]` + # `["text", "audio"]` modalities: nil, # How many chat completion choices to generate for each input message. Note that - # you will be charged based on the number of generated tokens across all of the - # choices. Keep `n` as `1` to minimize costs. + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. n: nil, # Whether to enable - # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) - # during tool use. + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, # Static predicted output content, such as the content of a text file that is - # being regenerated. + # being regenerated. prediction: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. presence_penalty: nil, # **o-series models only** # - # Constrains effort on reasoning for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, # An object specifying the format that the model must output. # - # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured - # Outputs which ensures the model will match your supplied JSON schema. Learn more - # in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. response_format: nil, # This feature is in Beta. If specified, our system will make a best effort to - # sample deterministically, such that repeated requests with the same `seed` and - # parameters should return the same result. Determinism is not guaranteed, and you - # should refer to the `system_fingerprint` response parameter to monitor changes - # in the backend. + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. seed: nil, # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. - # - When not set, the default behavior is 'auto'. - # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. stop: nil, # Whether or not to store the output of this chat completion request for use in - # our [model distillation](https://platform.openai.com/docs/guides/distillation) - # or [evals](https://platform.openai.com/docs/guides/evals) products. + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, # Controls which (if any) tool is called by the model. `none` means the model will - # not call any tool and instead generates a message. `auto` means the model can - # pick between generating a message or calling one or more tools. `required` means - # the model must call one or more tools. Specifying a particular tool via - # `{"type": "function", "function": {"name": "my_function"}}` forces the model to - # call that tool. - # - # `none` is the default when no tools are present. `auto` is the default if tools - # are present. + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. tool_choice: nil, # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to - # return at each token position, each with an associated log probability. - # `logprobs` must be set to `true` if this parameter is used. + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. top_logprobs: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. top_p: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more - # about the - # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end # Get a stored chat completion. Only Chat Completions that have been created with - # the `store` parameter set to `true` will be returned. + # the `store` parameter set to `true` will be returned. sig do params( completion_id: String, @@ -558,8 +558,8 @@ module OpenAI request_options: {} ); end # Modify a stored chat completion. Only Chat Completions that have been created - # with the `store` parameter set to `true` can be modified. Currently, the only - # supported modification is to update the `metadata` field. + # with the `store` parameter set to `true` can be modified. Currently, the only + # supported modification is to update the `metadata` field. sig do params( completion_id: String, @@ -572,16 +572,16 @@ module OpenAI # The ID of the chat completion to update. completion_id, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, request_options: {} ); end # List stored Chat Completions. Only Chat Completions that have been stored with - # the `store` parameter set to `true` will be returned. + # the `store` parameter set to `true` will be returned. sig do params( after: String, @@ -600,17 +600,17 @@ module OpenAI limit: nil, # A list of metadata keys to filter the Chat Completions by. Example: # - # `metadata[key1]=value1&metadata[key2]=value2` + # `metadata[key1]=value1&metadata[key2]=value2` metadata: nil, # The model used to generate the Chat Completions. model: nil, # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or - # `desc` for descending order. Defaults to `asc`. + # `desc` for descending order. Defaults to `asc`. order: nil, request_options: {} ); end # Delete a stored chat completion. Only Chat Completions that have been created - # with the `store` parameter set to `true` can be deleted. + # with the `store` parameter set to `true` can be deleted. sig do params( completion_id: String, diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/lib/openai/resources/chat/completions/messages.rbi index bf8cde9f..53d6538e 100644 --- a/rbi/lib/openai/resources/chat/completions/messages.rbi +++ b/rbi/lib/openai/resources/chat/completions/messages.rbi @@ -6,7 +6,7 @@ module OpenAI class Completions class Messages # Get the messages in a stored chat completion. Only Chat Completions that have - # been created with the `store` parameter set to `true` will be returned. + # been created with the `store` parameter set to `true` will be returned. sig do params( completion_id: String, @@ -25,7 +25,7 @@ module OpenAI # Number of messages to retrieve. limit: nil, # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` - # for descending order. Defaults to `asc`. + # for descending order. Defaults to `asc`. order: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index a91aba7f..f57faf40 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -5,7 +5,7 @@ module OpenAI class Completions # See {OpenAI::Resources::Completions#create_streaming} for streaming counterpart. # - # Creates a completion for the provided prompt and parameters. + # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), @@ -32,116 +32,116 @@ module OpenAI end def create( # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, # The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. + # strings, array of tokens, or array of token arrays. # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. prompt:, # Generates `best_of` completions server-side and returns the "best" (the one with - # the highest log probability per token). Results cannot be streamed. + # the highest log probability per token). Results cannot be streamed. # - # When used with `n`, `best_of` controls the number of candidate completions and - # `n` specifies how many to return – `best_of` must be greater than `n`. + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. best_of: nil, # Echo back the prompt in addition to the completion echo: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) frequency_penalty: nil, # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the GPT - # tokenizer) to an associated bias value from -100 to 100. You can use this - # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. - # Mathematically, the bias is added to the logits generated by the model prior to - # sampling. The exact effect will vary per model, but values between -1 and 1 - # should decrease or increase likelihood of selection; values like -100 or 100 - # should result in a ban or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. # - # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token - # from being generated. + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. logit_bias: nil, # Include the log probabilities on the `logprobs` most likely output tokens, as - # well the chosen tokens. For example, if `logprobs` is 5, the API will return a - # list of the 5 most likely tokens. The API will always return the `logprob` of - # the sampled token, so there may be up to `logprobs+1` elements in the response. + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. # - # The maximum value for `logprobs` is 5. + # The maximum value for `logprobs` is 5. logprobs: nil, # The maximum number of [tokens](/tokenizer) that can be generated in the - # completion. + # completion. # - # The token count of your prompt plus `max_tokens` cannot exceed the model's - # context length. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. max_tokens: nil, # How many completions to generate for each prompt. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. n: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) presence_penalty: nil, # If specified, our system will make a best effort to sample deterministically, - # such that repeated requests with the same `seed` and parameters should return - # the same result. + # such that repeated requests with the same `seed` and parameters should return + # the same result. # - # Determinism is not guaranteed, and you should refer to the `system_fingerprint` - # response parameter to monitor changes in the backend. + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. seed: nil, # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. stop: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, # The suffix that comes after a completion of inserted text. # - # This parameter is only supported for `gpt-3.5-turbo-instruct`. + # This parameter is only supported for `gpt-3.5-turbo-instruct`. suffix: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # - # We generally recommend altering this or `top_p` but not both. + # We generally recommend altering this or `top_p` but not both. temperature: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. top_p: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. # - # Creates a completion for the provided prompt and parameters. + # Creates a completion for the provided prompt and parameters. sig do params( model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), @@ -168,110 +168,110 @@ module OpenAI end def create_streaming( # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, # The prompt(s) to generate completions for, encoded as a string, array of - # strings, array of tokens, or array of token arrays. + # strings, array of tokens, or array of token arrays. # - # Note that <|endoftext|> is the document separator that the model sees during - # training, so if a prompt is not specified the model will generate as if from the - # beginning of a new document. + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. prompt:, # Generates `best_of` completions server-side and returns the "best" (the one with - # the highest log probability per token). Results cannot be streamed. + # the highest log probability per token). Results cannot be streamed. # - # When used with `n`, `best_of` controls the number of candidate completions and - # `n` specifies how many to return – `best_of` must be greater than `n`. + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. best_of: nil, # Echo back the prompt in addition to the completion echo: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # existing frequency in the text so far, decreasing the model's likelihood to - # repeat the same line verbatim. + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) frequency_penalty: nil, # Modify the likelihood of specified tokens appearing in the completion. # - # Accepts a JSON object that maps tokens (specified by their token ID in the GPT - # tokenizer) to an associated bias value from -100 to 100. You can use this - # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. - # Mathematically, the bias is added to the logits generated by the model prior to - # sampling. The exact effect will vary per model, but values between -1 and 1 - # should decrease or increase likelihood of selection; values like -100 or 100 - # should result in a ban or exclusive selection of the relevant token. + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. # - # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token - # from being generated. + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. logit_bias: nil, # Include the log probabilities on the `logprobs` most likely output tokens, as - # well the chosen tokens. For example, if `logprobs` is 5, the API will return a - # list of the 5 most likely tokens. The API will always return the `logprob` of - # the sampled token, so there may be up to `logprobs+1` elements in the response. + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. # - # The maximum value for `logprobs` is 5. + # The maximum value for `logprobs` is 5. logprobs: nil, # The maximum number of [tokens](/tokenizer) that can be generated in the - # completion. + # completion. # - # The token count of your prompt plus `max_tokens` cannot exceed the model's - # context length. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. max_tokens: nil, # How many completions to generate for each prompt. # - # **Note:** Because this parameter generates many completions, it can quickly - # consume your token quota. Use carefully and ensure that you have reasonable - # settings for `max_tokens` and `stop`. + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. n: nil, # Number between -2.0 and 2.0. Positive values penalize new tokens based on - # whether they appear in the text so far, increasing the model's likelihood to - # talk about new topics. + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. # - # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) presence_penalty: nil, # If specified, our system will make a best effort to sample deterministically, - # such that repeated requests with the same `seed` and parameters should return - # the same result. + # such that repeated requests with the same `seed` and parameters should return + # the same result. # - # Determinism is not guaranteed, and you should refer to the `system_fingerprint` - # response parameter to monitor changes in the backend. + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. seed: nil, # Up to 4 sequences where the API will stop generating further tokens. The - # returned text will not contain the stop sequence. + # returned text will not contain the stop sequence. stop: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, # The suffix that comes after a completion of inserted text. # - # This parameter is only supported for `gpt-3.5-turbo-instruct`. + # This parameter is only supported for `gpt-3.5-turbo-instruct`. suffix: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. # - # We generally recommend altering this or `top_p` but not both. + # We generally recommend altering this or `top_p` but not both. temperature: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. top_p: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/lib/openai/resources/embeddings.rbi index f6e4ac51..ae6d042f 100644 --- a/rbi/lib/openai/resources/embeddings.rbi +++ b/rbi/lib/openai/resources/embeddings.rbi @@ -17,29 +17,29 @@ module OpenAI end def create( # Input text to embed, encoded as a string or array of tokens. To embed multiple - # inputs in a single request, pass an array of strings or array of token arrays. - # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 - # dimensions or less. - # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. input:, # ID of the model to use. You can use the - # [List models](https://platform.openai.com/docs/api-reference/models/list) API to - # see all of your available models, or see our - # [Model overview](https://platform.openai.com/docs/models) for descriptions of - # them. + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, # The number of dimensions the resulting output embeddings should have. Only - # supported in `text-embedding-3` and later models. + # supported in `text-embedding-3` and later models. dimensions: nil, # The format to return the embeddings in. Can be either `float` or - # [`base64`](https://pypi.org/project/pybase64/). + # [`base64`](https://pypi.org/project/pybase64/). encoding_format: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 56bf2af9..482ed085 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -4,26 +4,26 @@ module OpenAI module Resources class Files # Upload a file that can be used across various endpoints. Individual files can be - # up to 512 MB, and the size of all files uploaded by one organization can be up - # to 100 GB. + # up to 512 MB, and the size of all files uploaded by one organization can be up + # to 100 GB. # - # The Assistants API supports files up to 2 million tokens and of specific file - # types. See the - # [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for - # details. + # The Assistants API supports files up to 2 million tokens and of specific file + # types. See the + # [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for + # details. # - # The Fine-tuning API only supports `.jsonl` files. The input also has certain - # required formats for fine-tuning - # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or - # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) - # models. + # The Fine-tuning API only supports `.jsonl` files. The input also has certain + # required formats for fine-tuning + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # models. # - # The Batch API only supports `.jsonl` files up to 200 MB in size. The input also - # has a specific required - # [format](https://platform.openai.com/docs/api-reference/batch/request-input). + # The Batch API only supports `.jsonl` files up to 200 MB in size. The input also + # has a specific required + # [format](https://platform.openai.com/docs/api-reference/batch/request-input). # - # Please [contact us](https://help.openai.com/) if you need to increase these - # storage limits. + # Please [contact us](https://help.openai.com/) if you need to increase these + # storage limits. sig do params( file: T.any(IO, StringIO), @@ -36,9 +36,9 @@ module OpenAI # The File object (not file name) to be uploaded. file:, # The intended purpose of the uploaded file. One of: - `assistants`: Used in the - # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for - # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: - # Flexible file type for any purpose - `evals`: Used for eval data sets + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets purpose:, request_options: {} ); end @@ -68,15 +68,15 @@ module OpenAI end def list( # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 10,000, and the default is 10,000. + # 10,000, and the default is 10,000. limit: nil, # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. order: nil, # Only return files with the given purpose. purpose: nil, diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/lib/openai/resources/fine_tuning/jobs.rbi index 90b307a0..b39d1d85 100644 --- a/rbi/lib/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/lib/openai/resources/fine_tuning/jobs.rbi @@ -8,12 +8,12 @@ module OpenAI attr_reader :checkpoints # Creates a fine-tuning job which begins the process of creating a new model from - # a given dataset. + # a given dataset. # - # Response includes details of the enqueued job including job status and the name - # of the fine-tuned models once complete. + # Response includes details of the enqueued job including job status and the name + # of the fine-tuned models once complete. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), @@ -33,68 +33,68 @@ module OpenAI end def create( # The name of the model to fine-tune. You can select one of the - # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). model:, # The ID of an uploaded file that contains training data. # - # See [upload file](https://platform.openai.com/docs/api-reference/files/create) - # for how to upload a file. + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. # - # Your dataset must be formatted as a JSONL file. Additionally, you must upload - # your file with the purpose `fine-tune`. + # Your dataset must be formatted as a JSONL file. Additionally, you must upload + # your file with the purpose `fine-tune`. # - # The contents of the file should differ depending on if the model uses the - # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), - # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) - # format, or if the fine-tuning method uses the - # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) - # format. + # The contents of the file should differ depending on if the model uses the + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # format, or if the fine-tuning method uses the + # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. training_file:, # The hyperparameters used for the fine-tuning job. This value is now deprecated - # in favor of `method`, and should be passed in under the `method` parameter. + # in favor of `method`, and should be passed in under the `method` parameter. hyperparameters: nil, # A list of integrations to enable for your fine-tuning job. integrations: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # The method used for fine-tuning. method_: nil, # The seed controls the reproducibility of the job. Passing in the same seed and - # job parameters should produce the same results, but may differ in rare cases. If - # a seed is not specified, one will be generated for you. + # job parameters should produce the same results, but may differ in rare cases. If + # a seed is not specified, one will be generated for you. seed: nil, # A string of up to 64 characters that will be added to your fine-tuned model - # name. + # name. # - # For example, a `suffix` of "custom-model-name" would produce a model name like - # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. + # For example, a `suffix` of "custom-model-name" would produce a model name like + # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. suffix: nil, # The ID of an uploaded file that contains validation data. # - # If you provide this file, the data is used to generate validation metrics - # periodically during fine-tuning. These metrics can be viewed in the fine-tuning - # results file. The same data should not be present in both train and validation - # files. + # If you provide this file, the data is used to generate validation metrics + # periodically during fine-tuning. These metrics can be viewed in the fine-tuning + # results file. The same data should not be present in both train and validation + # files. # - # Your dataset must be formatted as a JSONL file. You must upload your file with - # the purpose `fine-tune`. + # Your dataset must be formatted as a JSONL file. You must upload your file with + # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) - # for more details. + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. validation_file: nil, request_options: {} ); end # Get info about a fine-tuning job. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( fine_tuning_job_id: String, @@ -123,7 +123,7 @@ module OpenAI # Number of fine-tuning jobs to retrieve. limit: nil, # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. - # Alternatively, set `metadata=null` to indicate no metadata. + # Alternatively, set `metadata=null` to indicate no metadata. metadata: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 7b0c9a45..79a97ac3 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -18,24 +18,24 @@ module OpenAI end def create_variation( # The image to use as the basis for the variation(s). Must be a valid PNG file, - # less than 4MB, and square. + # less than 4MB, and square. image:, # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. model: nil, # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # `n=1` is supported. n: nil, # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. size: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end @@ -56,30 +56,30 @@ module OpenAI end def edit( # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. + # is not provided, image must have transparency, which will be used as the mask. image:, # A text description of the desired image(s). The maximum length is 1000 - # characters. + # characters. prompt:, # An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than - # 4MB, and have the same dimensions as `image`. + # indicate where `image` should be edited. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. mask: nil, # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # time. model: nil, # The number of images to generate. Must be between 1 and 10. n: nil, # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # `1024x1024`. size: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end @@ -100,33 +100,33 @@ module OpenAI end def generate( # A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. + # characters for `dall-e-2` and 4000 characters for `dall-e-3`. prompt:, # The model to use for image generation. model: nil, # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # `n=1` is supported. n: nil, # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # details and greater consistency across the image. This param is only supported + # for `dall-e-3`. quality: nil, # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or + # `1024x1792` for `dall-e-3` models. size: nil, # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # causes the model to lean towards generating hyper-real and dramatic images. + # Natural causes the model to produce more natural, less hyper-real looking + # images. This param is only supported for `dall-e-3`. style: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/lib/openai/resources/models.rbi index 58a63411..252a1dca 100644 --- a/rbi/lib/openai/resources/models.rbi +++ b/rbi/lib/openai/resources/models.rbi @@ -4,7 +4,7 @@ module OpenAI module Resources class Models # Retrieves a model instance, providing basic information about the model such as - # the owner and permissioning. + # the owner and permissioning. sig do params( model: String, @@ -18,7 +18,7 @@ module OpenAI request_options: {} ); end # Lists the currently available models, and provides basic information about each - # one such as the owner and availability. + # one such as the owner and availability. sig do params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))) .returns(OpenAI::Internal::Page[OpenAI::Models::Model]) @@ -26,7 +26,7 @@ module OpenAI def list(request_options: {}); end # Delete a fine-tuned model. You must have the Owner role in your organization to - # delete a model. + # delete a model. sig do params( model: String, diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/lib/openai/resources/moderations.rbi index 9e47411a..b068d8e7 100644 --- a/rbi/lib/openai/resources/moderations.rbi +++ b/rbi/lib/openai/resources/moderations.rbi @@ -4,7 +4,7 @@ module OpenAI module Resources class Moderations # Classifies if text and/or image inputs are potentially harmful. Learn more in - # the [moderation guide](https://platform.openai.com/docs/guides/moderation). + # the [moderation guide](https://platform.openai.com/docs/guides/moderation). sig do params( input: T.any( @@ -25,12 +25,12 @@ module OpenAI end def create( # Input (or inputs) to classify. Can be a single string, an array of strings, or - # an array of multi-modal input objects similar to other models. + # an array of multi-modal input objects similar to other models. input:, # The content moderation model you would like to use. Learn more in - # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and - # learn about available models - # [here](https://platform.openai.com/docs/models#moderation). + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). model: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 8fb89354..a8108266 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -8,17 +8,17 @@ module OpenAI # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. # - # Creates a model response. Provide - # [text](https://platform.openai.com/docs/guides/text) or - # [image](https://platform.openai.com/docs/guides/images) inputs to generate - # [text](https://platform.openai.com/docs/guides/text) or - # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have - # the model call your own - # [custom code](https://platform.openai.com/docs/guides/function-calling) or use - # built-in [tools](https://platform.openai.com/docs/guides/tools) like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use - # your own data as input for the model's response. + # Creates a model response. Provide + # [text](https://platform.openai.com/docs/guides/text) or + # [image](https://platform.openai.com/docs/guides/images) inputs to generate + # [text](https://platform.openai.com/docs/guides/text) or + # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have + # the model call your own + # [custom code](https://platform.openai.com/docs/guides/function-calling) or use + # built-in [tools](https://platform.openai.com/docs/guides/tools) like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use + # your own data as input for the model's response. sig do params( input: T.any( @@ -77,126 +77,126 @@ module OpenAI def create( # Text, image, or file inputs to the model, used to generate a response. # - # Learn more: + # Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, # Specify additional output data to include in the model response. Currently - # supported values are: + # supported values are: # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. include: nil, # Inserts a system (or developer) message as the first item in the model's - # context. + # context. # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. instructions: nil, # An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, # The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, # Whether to store the generated model response for later retrieval via API. store: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. tool_choice: nil, # An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. + # specify which tool to use by setting the `tool_choice` parameter. # - # The two categories of tools you can provide the model are: + # The two categories of tools you can provide the model are: # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. top_p: nil, # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. truncation: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. # - # Creates a model response. Provide - # [text](https://platform.openai.com/docs/guides/text) or - # [image](https://platform.openai.com/docs/guides/images) inputs to generate - # [text](https://platform.openai.com/docs/guides/text) or - # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have - # the model call your own - # [custom code](https://platform.openai.com/docs/guides/function-calling) or use - # built-in [tools](https://platform.openai.com/docs/guides/tools) like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use - # your own data as input for the model's response. + # Creates a model response. Provide + # [text](https://platform.openai.com/docs/guides/text) or + # [image](https://platform.openai.com/docs/guides/images) inputs to generate + # [text](https://platform.openai.com/docs/guides/text) or + # [JSON](https://platform.openai.com/docs/guides/structured-outputs) outputs. Have + # the model call your own + # [custom code](https://platform.openai.com/docs/guides/function-calling) or use + # built-in [tools](https://platform.openai.com/docs/guides/tools) like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use + # your own data as input for the model's response. sig do params( input: T.any( @@ -292,110 +292,110 @@ module OpenAI def stream_raw( # Text, image, or file inputs to the model, used to generate a response. # - # Learn more: + # Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, # Specify additional output data to include in the model response. Currently - # supported values are: + # supported values are: # - # - `file_search_call.results`: Include the search results of the file search tool - # call. - # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. include: nil, # Inserts a system (or developer) message as the first item in the model's - # context. + # context. # - # When using along with `previous_response_id`, the instructions from a previous - # response will not be carried over to the next response. This makes it simple to - # swap out system (or developer) messages in new responses. + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. instructions: nil, # An upper bound for the number of tokens that can be generated for a response, - # including visible output tokens and - # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, # The unique ID of the previous response to the model. Use this to create - # multi-turn conversations. Learn more about - # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, # **o-series models only** # - # Configuration options for - # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, # Whether to store the generated model response for later retrieval via API. store: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - # make the output more random, while lower values like 0.2 will make it more - # focused and deterministic. We generally recommend altering this or `top_p` but - # not both. + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: + # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a - # response. See the `tools` parameter to see how to specify which tools the model - # can call. + # response. See the `tools` parameter to see how to specify which tools the model + # can call. tool_choice: nil, # An array of tools the model may call while generating a response. You can - # specify which tool to use by setting the `tool_choice` parameter. + # specify which tool to use by setting the `tool_choice` parameter. # - # The two categories of tools you can provide the model are: + # The two categories of tools you can provide the model are: # - # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's - # capabilities, like - # [web search](https://platform.openai.com/docs/guides/tools-web-search) or - # [file search](https://platform.openai.com/docs/guides/tools-file-search). - # Learn more about - # [built-in tools](https://platform.openai.com/docs/guides/tools). - # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about - # [function calling](https://platform.openai.com/docs/guides/function-calling). + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, # An alternative to sampling with temperature, called nucleus sampling, where the - # model considers the results of the tokens with top_p probability mass. So 0.1 - # means only the tokens comprising the top 10% probability mass are considered. + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. # - # We generally recommend altering this or `temperature` but not both. + # We generally recommend altering this or `temperature` but not both. top_p: nil, # The truncation strategy to use for the model response. # - # - `auto`: If the context of this response and previous ones exceeds the model's - # context window size, the model will truncate the response to fit the context - # window by dropping input items in the middle of the conversation. - # - `disabled` (default): If a model response will exceed the context window size - # for a model, the request will fail with a 400 error. + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. truncation: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end @@ -412,7 +412,7 @@ module OpenAI # The ID of the response to retrieve. response_id, # Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. + # Response creation above for more information. include: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/lib/openai/resources/responses/input_items.rbi index 26668496..0c70b7b0 100644 --- a/rbi/lib/openai/resources/responses/input_items.rbi +++ b/rbi/lib/openai/resources/responses/input_items.rbi @@ -38,15 +38,15 @@ module OpenAI # An item ID to list items before, used in pagination. before: nil, # Additional fields to include in the response. See the `include` parameter for - # Response creation above for more information. + # Response creation above for more information. include: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, # The order to return the input items in. Default is `asc`. # - # - `asc`: Return the input items in ascending order. - # - `desc`: Return the input items in descending order. + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. order: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/lib/openai/resources/uploads.rbi index bdc55f98..c397fdbf 100644 --- a/rbi/lib/openai/resources/uploads.rbi +++ b/rbi/lib/openai/resources/uploads.rbi @@ -7,24 +7,24 @@ module OpenAI attr_reader :parts # Creates an intermediate - # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object - # that you can add - # [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. - # Currently, an Upload can accept at most 8 GB in total and expires after an hour - # after you create it. + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object + # that you can add + # [Parts](https://platform.openai.com/docs/api-reference/uploads/part-object) to. + # Currently, an Upload can accept at most 8 GB in total and expires after an hour + # after you create it. # - # Once you complete the Upload, we will create a - # [File](https://platform.openai.com/docs/api-reference/files/object) object that - # contains all the parts you uploaded. This File is usable in the rest of our - # platform as a regular File object. + # Once you complete the Upload, we will create a + # [File](https://platform.openai.com/docs/api-reference/files/object) object that + # contains all the parts you uploaded. This File is usable in the rest of our + # platform as a regular File object. # - # For certain `purpose` values, the correct `mime_type` must be specified. Please - # refer to documentation for the - # [supported MIME types for your use case](https://platform.openai.com/docs/assistants/tools/file-search#supported-files). + # For certain `purpose` values, the correct `mime_type` must be specified. Please + # refer to documentation for the + # [supported MIME types for your use case](https://platform.openai.com/docs/assistants/tools/file-search#supported-files). # - # For guidance on the proper filename extensions for each purpose, please follow - # the documentation on - # [creating a File](https://platform.openai.com/docs/api-reference/files/create). + # For guidance on the proper filename extensions for each purpose, please follow + # the documentation on + # [creating a File](https://platform.openai.com/docs/api-reference/files/create). sig do params( bytes: Integer, @@ -42,13 +42,13 @@ module OpenAI filename:, # The MIME type of the file. # - # This must fall within the supported MIME types for your file purpose. See the - # supported MIME types for assistants and vision. + # This must fall within the supported MIME types for your file purpose. See the + # supported MIME types for assistants and vision. mime_type:, # The intended purpose of the uploaded file. # - # See the - # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + # See the + # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). purpose:, request_options: {} ); end @@ -66,18 +66,18 @@ module OpenAI request_options: {} ); end # Completes the - # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). # - # Within the returned Upload object, there is a nested - # [File](https://platform.openai.com/docs/api-reference/files/object) object that - # is ready to use in the rest of the platform. + # Within the returned Upload object, there is a nested + # [File](https://platform.openai.com/docs/api-reference/files/object) object that + # is ready to use in the rest of the platform. # - # You can specify the order of the Parts by passing in an ordered list of the Part - # IDs. + # You can specify the order of the Parts by passing in an ordered list of the Part + # IDs. # - # The number of bytes uploaded upon completion must match the number of bytes - # initially specified when creating the Upload object. No Parts may be added after - # an Upload is completed. + # The number of bytes uploaded upon completion must match the number of bytes + # initially specified when creating the Upload object. No Parts may be added after + # an Upload is completed. sig do params( upload_id: String, @@ -93,7 +93,7 @@ module OpenAI # The ordered list of Part IDs. part_ids:, # The optional md5 checksum for the file contents to verify if the bytes uploaded - # matches what you expect. + # matches what you expect. md5: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 47bb936f..9b073eea 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -5,16 +5,16 @@ module OpenAI class Uploads class Parts # Adds a - # [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an - # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. - # A Part represents a chunk of bytes from the file you are trying to upload. + # [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an + # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. + # A Part represents a chunk of bytes from the file you are trying to upload. # - # Each Part can be at most 64 MB, and you can add Parts until you hit the Upload - # maximum of 8 GB. + # Each Part can be at most 64 MB, and you can add Parts until you hit the Upload + # maximum of 8 GB. # - # It is possible to add multiple Parts in parallel. You can decide the intended - # order of the Parts when you - # [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). + # It is possible to add multiple Parts in parallel. You can decide the intended + # order of the Parts when you + # [complete the Upload](https://platform.openai.com/docs/api-reference/uploads/complete). sig do params( upload_id: String, diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/lib/openai/resources/vector_stores.rbi index b0868a1f..7658101e 100644 --- a/rbi/lib/openai/resources/vector_stores.rbi +++ b/rbi/lib/openai/resources/vector_stores.rbi @@ -27,20 +27,20 @@ module OpenAI end def create( # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, # The expiration policy for a vector store. expires_after: nil, # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. + # the vector store should use. Useful for tools like `file_search` that can access + # files. file_ids: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # The name of the vector store. name: nil, @@ -76,11 +76,11 @@ module OpenAI # The expiration policy for a vector store. expires_after: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, # The name of the vector store. name: nil, @@ -99,20 +99,20 @@ module OpenAI end def list( # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. order: nil, request_options: {} ); end @@ -130,7 +130,7 @@ module OpenAI request_options: {} ); end # Search a vector store for relevant chunks based on a query and file attributes - # filter. + # filter. sig do params( vector_store_id: String, @@ -151,7 +151,7 @@ module OpenAI # A filter to apply based on file attributes. filters: nil, # The maximum number of results to return. This number should be between 1 and 50 - # inclusive. + # inclusive. max_num_results: nil, # Ranking options for search. ranking_options: nil, diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/lib/openai/resources/vector_stores/file_batches.rbi index ae4348ba..8a6a5231 100644 --- a/rbi/lib/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/lib/openai/resources/vector_stores/file_batches.rbi @@ -23,17 +23,17 @@ module OpenAI # The ID of the vector store for which to create a File Batch. vector_store_id, # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # the vector store should use. Useful for tools like `file_search` that can access - # files. + # the vector store should use. Useful for tools like `file_search` that can access + # files. file_ids:, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. attributes: nil, # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} ); end @@ -54,7 +54,7 @@ module OpenAI request_options: {} ); end # Cancel a vector store file batch. This attempts to cancel the processing of - # files in this batch as soon as possible. + # files in this batch as soon as possible. sig do params( batch_id: String, @@ -90,23 +90,23 @@ module OpenAI # Path param: The ID of the vector store that the files belong to. vector_store_id:, # Query param: A cursor for use in pagination. `after` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, ending with obj_foo, your subsequent call can include - # after=obj_foo in order to fetch the next page of the list. + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, ending with obj_foo, your subsequent call can include + # after=obj_foo in order to fetch the next page of the list. after: nil, # Query param: A cursor for use in pagination. `before` is an object ID that - # defines your place in the list. For instance, if you make a list request and - # receive 100 objects, starting with obj_foo, your subsequent call can include - # before=obj_foo in order to fetch the previous page of the list. + # defines your place in the list. For instance, if you make a list request and + # receive 100 objects, starting with obj_foo, your subsequent call can include + # before=obj_foo in order to fetch the previous page of the list. before: nil, # Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, - # `cancelled`. + # `cancelled`. filter: nil, # Query param: A limit on the number of objects to be returned. Limit can range - # between 1 and 100, and the default is 20. + # between 1 and 100, and the default is 20. limit: nil, # Query param: Sort order by the `created_at` timestamp of the objects. `asc` for - # ascending order and `desc` for descending order. + # ascending order and `desc` for descending order. order: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/lib/openai/resources/vector_stores/files.rbi index 22550fd4..eb8c2391 100644 --- a/rbi/lib/openai/resources/vector_stores/files.rbi +++ b/rbi/lib/openai/resources/vector_stores/files.rbi @@ -5,8 +5,8 @@ module OpenAI class VectorStores class Files # Create a vector store file by attaching a - # [File](https://platform.openai.com/docs/api-reference/files) to a - # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). + # [File](https://platform.openai.com/docs/api-reference/files) to a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). sig do params( vector_store_id: String, @@ -25,17 +25,17 @@ module OpenAI # The ID of the vector store for which to create a File. vector_store_id, # A [File](https://platform.openai.com/docs/api-reference/files) ID that the - # vector store should use. Useful for tools like `file_search` that can access - # files. + # vector store should use. Useful for tools like `file_search` that can access + # files. file_id:, # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. Keys are strings with a maximum - # length of 64 characters. Values are strings with a maximum length of 512 - # characters, booleans, or numbers. + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. attributes: nil, # The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # strategy. Only applicable if `file_ids` is non-empty. + # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} ); end @@ -71,10 +71,10 @@ module OpenAI # Path param: The ID of the vector store the file belongs to. vector_store_id:, # Body param: Set of 16 key-value pairs that can be attached to an object. This - # can be useful for storing additional information about the object in a - # structured format, and querying for objects via API or the dashboard. Keys are - # strings with a maximum length of 64 characters. Values are strings with a - # maximum length of 512 characters, booleans, or numbers. + # can be useful for storing additional information about the object in a + # structured format, and querying for objects via API or the dashboard. Keys are + # strings with a maximum length of 64 characters. Values are strings with a + # maximum length of 512 characters, booleans, or numbers. attributes:, request_options: {} ); end @@ -95,29 +95,29 @@ module OpenAI # The ID of the vector store that the files belong to. vector_store_id, # A cursor for use in pagination. `after` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # ending with obj_foo, your subsequent call can include after=obj_foo in order to - # fetch the next page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, # A cursor for use in pagination. `before` is an object ID that defines your place - # in the list. For instance, if you make a list request and receive 100 objects, - # starting with obj_foo, your subsequent call can include before=obj_foo in order - # to fetch the previous page of the list. + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. filter: nil, # A limit on the number of objects to be returned. Limit can range between 1 and - # 100, and the default is 20. + # 100, and the default is 20. limit: nil, # Sort order by the `created_at` timestamp of the objects. `asc` for ascending - # order and `desc` for descending order. + # order and `desc` for descending order. order: nil, request_options: {} ); end # Delete a vector store file. This will remove the file from the vector store but - # the file itself will not be deleted. To delete the file, use the - # [delete file](https://platform.openai.com/docs/api-reference/files/delete) - # endpoint. + # the file itself will not be deleted. To delete the file, use the + # [delete file](https://platform.openai.com/docs/api-reference/files/delete) + # endpoint. sig do params( file_id: String, From de8a75fccab44e40fce434c2e5d676d7df05f080 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 6 Apr 2025 13:31:00 +0000 Subject: [PATCH 108/295] chore(internal): more concise handling of parameter naming conflicts (#110) --- lib/openai/internal/type/base_model.rb | 9 +++------ lib/openai/resources/fine_tuning/jobs.rb | 2 +- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 026af072..eab6ea8b 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -258,7 +258,6 @@ def dump(value) return super end - is_param = singleton_class <= OpenAI::Internal::Type::RequestParameters::Converter acc = {} coerced.each do |key, val| @@ -267,21 +266,19 @@ def dump(value) in nil acc.store(name, super(val)) else - mode, type_fn = field.fetch_values(:mode, :type_fn) + api_name, mode, type_fn = field.fetch_values(:api_name, :mode, :type_fn) case mode in :coerce next else target = type_fn.call - api_name = is_param ? name : field.fetch(:api_name) acc.store(api_name, OpenAI::Internal::Type::Converter.dump(target, val)) end end end - known_fields.each do |name, field| - mode, const = field.fetch_values(:mode, :const) - api_name = is_param ? name : field.fetch(:api_name) + known_fields.each_value do |field| + api_name, mode, const = field.fetch_values(:api_name, :mode, :const) next if mode == :coerce || acc.key?(api_name) || const == OpenAI::Internal::OMIT acc.store(api_name, const) end diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index da5b8f03..50cf5b51 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -36,7 +36,7 @@ def create(params) @client.request( method: :post, path: "fine_tuning/jobs", - body: parsed.transform_keys(method_: :method), + body: parsed, model: OpenAI::Models::FineTuning::FineTuningJob, options: options ) From 2900cac586d47fe040f8f8238261079abac33b33 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 06:08:28 +0000 Subject: [PATCH 109/295] chore: easier to read examples in README.md (#111) --- README.md | 45 ++++++++++++++++++++++++--------------------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 5c71b3e6..85e4c03d 100644 --- a/README.md +++ b/README.md @@ -36,13 +36,14 @@ openai = OpenAI::Client.new( api_key: "My API Key" # defaults to ENV["OPENAI_API_KEY"] ) -chat_completion = openai.chat.completions.create( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" -) +chat_completion = + openai.chat.completions.create( + messages: [{ + role: "user", + content: "Say this is a test" + }], + model: "gpt-4o" + ) puts(chat_completion) ``` @@ -71,13 +72,14 @@ end We provide support for streaming responses using Server Side Events (SSE). ```ruby -stream = openai.chat.completions.stream_raw( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" -) +stream = + openai.chat.completions.stream_raw( + messages: [{ + role: "user", + content: "Say this is a test" + }], + model: "gpt-4o" + ) stream.each do |completion| puts(completion) @@ -191,13 +193,14 @@ Due to limitations with the Sorbet type system, where a method otherwise can tak Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. ```ruby -params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" -) +params = + OpenAI::Models::Chat::CompletionCreateParams.new( + messages: [{ + role: "user", + content: "Say this is a test" + }], + model: "gpt-4o" + ) openai.chat.completions.create(**params) ``` From 2afce530dbf68f767e019f308d386194f257ced5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 17:55:47 +0000 Subject: [PATCH 110/295] chore: make client tests look prettier (#112) --- test/openai/client_test.rb | 45 +++++++++++++++++++++++--------------- 1 file changed, 27 insertions(+), 18 deletions(-) diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index dd6ec3ea..0e142a74 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -156,7 +156,10 @@ def test_retry_count_header openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") end - retry_count_headers = requester.attempts.map { _1[:headers]["x-stainless-retry-count"] } + retry_count_headers = requester.attempts.map do + _1.fetch(:headers).fetch("x-stainless-retry-count") + end + assert_equal(%w[0 1 2], retry_count_headers) end @@ -173,7 +176,10 @@ def test_omit_retry_count_header ) end - retry_count_headers = requester.attempts.map { _1[:headers]["x-stainless-retry-count"] } + retry_count_headers = requester.attempts.map do + _1.fetch(:headers).fetch("x-stainless-retry-count", nil) + end + assert_equal([nil, nil, nil], retry_count_headers) end @@ -190,7 +196,10 @@ def test_overwrite_retry_count_header ) end - retry_count_headers = requester.attempts.map { _1[:headers]["x-stainless-retry-count"] } + retry_count_headers = requester.attempts.map do + _1.fetch(:headers).fetch("x-stainless-retry-count") + end + assert_equal(%w[42 42 42], retry_count_headers) end @@ -207,12 +216,12 @@ def test_client_redirect_307 ) end - assert_equal("/redirected", requester.attempts.last[:url].path) - assert_equal(requester.attempts.first[:method], requester.attempts.last[:method]) - assert_equal(requester.attempts.first[:body], requester.attempts.last[:body]) + assert_equal("/redirected", requester.attempts.last.fetch(:url).path) + assert_equal(requester.attempts.first.fetch(:method), requester.attempts.last.fetch(:method)) + assert_equal(requester.attempts.first.fetch(:body), requester.attempts.last.fetch(:body)) assert_equal( - requester.attempts.first[:headers]["content-type"], - requester.attempts.last[:headers]["content-type"] + requester.attempts.first.fetch(:headers)["content-type"], + requester.attempts.last.fetch(:headers)["content-type"] ) end @@ -229,10 +238,10 @@ def test_client_redirect_303 ) end - assert_equal("/redirected", requester.attempts.last[:url].path) - assert_equal(:get, requester.attempts.last[:method]) - assert_nil(requester.attempts.last[:body]) - assert_nil(requester.attempts.last[:headers]["Content-Type"]) + assert_equal("/redirected", requester.attempts.last.fetch(:url).path) + assert_equal(:get, requester.attempts.last.fetch(:method)) + assert_nil(requester.attempts.last.fetch(:body)) + assert_nil(requester.attempts.last.fetch(:headers)["content-type"]) end def test_client_redirect_auth_keep_same_origin @@ -244,13 +253,13 @@ def test_client_redirect_auth_keep_same_origin openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - request_options: {extra_headers: {"Authorization" => "Bearer xyz"}} + request_options: {extra_headers: {"authorization" => "Bearer xyz"}} ) end assert_equal( - requester.attempts.first[:headers]["authorization"], - requester.attempts.last[:headers]["authorization"] + requester.attempts.first.fetch(:headers)["authorization"], + requester.attempts.last.fetch(:headers)["authorization"] ) end @@ -263,11 +272,11 @@ def test_client_redirect_auth_strip_cross_origin openai.chat.completions.create( messages: [{content: "string", role: :developer}], model: :"o3-mini", - request_options: {extra_headers: {"Authorization" => "Bearer xyz"}} + request_options: {extra_headers: {"authorization" => "Bearer xyz"}} ) end - assert_nil(requester.attempts.last[:headers]["Authorization"]) + assert_nil(requester.attempts.last.fetch(:headers)["authorization"]) end def test_default_headers @@ -275,7 +284,7 @@ def test_default_headers requester = MockRequester.new(200, {}, {}) openai.requester = requester openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") - headers = requester.attempts.first[:headers] + headers = requester.attempts.first.fetch(:headers) refute_empty(headers["accept"]) refute_empty(headers["content-type"]) From 3c3892f1aae548afde4173edc84e018d898c7e00 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 18:50:34 +0000 Subject: [PATCH 111/295] feat(api): Add evalapi to sdk (#113) Adding the evalsapi to the sdk. --- .stats.yml | 8 +- README.md | 2 +- lib/openai.rb | 43 + lib/openai/client.rb | 4 + lib/openai/models/all_models.rb | 28 +- .../beta/thread_create_and_run_params.rb | 2 +- lib/openai/models/beta/threads/run.rb | 2 +- .../models/beta/threads/run_create_params.rb | 2 +- .../threads/run_submit_tool_outputs_params.rb | 2 +- lib/openai/models/chat/chat_completion.rb | 2 +- .../models/chat/completion_create_params.rb | 2 +- lib/openai/models/eval_create_params.rb | 483 ++++++++++ lib/openai/models/eval_create_response.rb | 142 +++ .../models/eval_custom_data_source_config.rb | 35 + lib/openai/models/eval_delete_params.rb | 19 + lib/openai/models/eval_delete_response.rb | 32 + lib/openai/models/eval_label_model_grader.rb | 252 ++++++ lib/openai/models/eval_list_params.rb | 95 ++ lib/openai/models/eval_list_response.rb | 142 +++ lib/openai/models/eval_retrieve_params.rb | 19 + lib/openai/models/eval_retrieve_response.rb | 142 +++ ...l_stored_completions_data_source_config.rb | 46 + lib/openai/models/eval_string_check_grader.rb | 69 ++ .../models/eval_text_similarity_grader.rb | 88 ++ lib/openai/models/eval_update_params.rb | 42 + lib/openai/models/eval_update_response.rb | 142 +++ ...create_eval_completions_run_data_source.rb | 573 ++++++++++++ .../create_eval_jsonl_run_data_source.rb | 115 +++ lib/openai/models/evals/eval_api_error.rb | 33 + lib/openai/models/evals/run_cancel_params.rb | 27 + .../models/evals/run_cancel_response.rb | 278 ++++++ lib/openai/models/evals/run_create_params.rb | 66 ++ .../models/evals/run_create_response.rb | 278 ++++++ lib/openai/models/evals/run_delete_params.rb | 27 + .../models/evals/run_delete_response.rb | 46 + lib/openai/models/evals/run_list_params.rb | 100 +++ lib/openai/models/evals/run_list_response.rb | 278 ++++++ .../models/evals/run_retrieve_params.rb | 27 + .../models/evals/run_retrieve_response.rb | 278 ++++++ .../evals/runs/output_item_list_params.rb | 105 +++ .../evals/runs/output_item_list_response.rb | 295 +++++++ .../evals/runs/output_item_retrieve_params.rb | 35 + .../runs/output_item_retrieve_response.rb | 295 +++++++ .../checkpoints/permission_create_params.rb | 30 + .../checkpoints/permission_create_response.rb | 49 ++ .../checkpoints/permission_delete_params.rb | 23 + .../checkpoints/permission_delete_response.rb | 39 + .../checkpoints/permission_retrieve_params.rb | 81 ++ .../permission_retrieve_response.rb | 88 ++ lib/openai/models/responses/response.rb | 6 +- .../responses/response_create_params.rb | 6 +- lib/openai/models/responses_model.rb | 28 +- lib/openai/resources/beta/threads.rb | 9 +- lib/openai/resources/beta/threads/runs.rb | 16 +- lib/openai/resources/chat/completions.rb | 9 +- lib/openai/resources/evals.rb | 133 +++ lib/openai/resources/evals/runs.rb | 145 +++ .../resources/evals/runs/output_items.rb | 79 ++ lib/openai/resources/fine_tuning.rb | 4 + .../resources/fine_tuning/checkpoints.rb | 20 + .../fine_tuning/checkpoints/permissions.rb | 94 ++ lib/openai/resources/responses.rb | 12 +- rbi/lib/openai/client.rbi | 3 + rbi/lib/openai/models/all_models.rbi | 33 +- rbi/lib/openai/models/eval_create_params.rbi | 688 +++++++++++++++ .../openai/models/eval_create_response.rbi | 148 ++++ .../models/eval_custom_data_source_config.rbi | 28 + rbi/lib/openai/models/eval_delete_params.rbi | 23 + .../openai/models/eval_delete_response.rbi | 22 + .../openai/models/eval_label_model_grader.rbi | 325 +++++++ rbi/lib/openai/models/eval_list_params.rbi | 96 ++ rbi/lib/openai/models/eval_list_response.rbi | 148 ++++ .../openai/models/eval_retrieve_params.rbi | 23 + .../openai/models/eval_retrieve_response.rbi | 148 ++++ ..._stored_completions_data_source_config.rbi | 50 ++ .../models/eval_string_check_grader.rbi | 72 ++ .../models/eval_text_similarity_grader.rbi | 98 +++ rbi/lib/openai/models/eval_update_params.rbi | 48 + .../openai/models/eval_update_response.rbi | 148 ++++ ...reate_eval_completions_run_data_source.rbi | 833 ++++++++++++++++++ .../create_eval_jsonl_run_data_source.rbi | 139 +++ .../openai/models/evals/eval_api_error.rbi | 26 + .../openai/models/evals/run_cancel_params.rbi | 24 + .../models/evals/run_cancel_response.rbi | 281 ++++++ .../openai/models/evals/run_create_params.rbi | 83 ++ .../models/evals/run_create_response.rbi | 281 ++++++ .../openai/models/evals/run_delete_params.rbi | 24 + .../models/evals/run_delete_response.rbi | 33 + .../openai/models/evals/run_list_params.rbi | 103 +++ .../openai/models/evals/run_list_response.rbi | 281 ++++++ .../models/evals/run_retrieve_params.rbi | 24 + .../models/evals/run_retrieve_response.rbi | 281 ++++++ .../evals/runs/output_item_list_params.rbi | 107 +++ .../evals/runs/output_item_list_response.rbi | 288 ++++++ .../runs/output_item_retrieve_params.rbi | 33 + .../runs/output_item_retrieve_response.rbi | 288 ++++++ .../checkpoints/permission_create_params.rbi | 30 + .../permission_create_response.rbi | 42 + .../checkpoints/permission_delete_params.rbi | 27 + .../permission_delete_response.rbi | 29 + .../permission_retrieve_params.rbi | 95 ++ .../permission_retrieve_response.rbi | 90 ++ rbi/lib/openai/models/responses/response.rbi | 18 +- .../responses/response_create_params.rbi | 22 +- rbi/lib/openai/models/responses_model.rbi | 36 +- rbi/lib/openai/resources/beta/threads.rbi | 13 +- .../openai/resources/beta/threads/runs.rbi | 16 +- rbi/lib/openai/resources/chat/completions.rbi | 13 +- rbi/lib/openai/resources/evals.rbi | 133 +++ rbi/lib/openai/resources/evals/runs.rbi | 122 +++ .../resources/evals/runs/output_items.rbi | 65 ++ rbi/lib/openai/resources/fine_tuning.rbi | 3 + .../resources/fine_tuning/checkpoints.rbi | 16 + .../fine_tuning/checkpoints/permissions.rbi | 78 ++ rbi/lib/openai/resources/responses.rbi | 24 +- sig/openai/client.rbs | 2 + sig/openai/models/all_models.rbs | 30 +- sig/openai/models/eval_create_params.rbs | 309 +++++++ sig/openai/models/eval_create_response.rbs | 67 ++ .../models/eval_custom_data_source_config.rbs | 16 + sig/openai/models/eval_delete_params.rbs | 14 + sig/openai/models/eval_delete_response.rbs | 18 + sig/openai/models/eval_label_model_grader.rbs | 184 ++++ sig/openai/models/eval_list_params.rbs | 69 ++ sig/openai/models/eval_list_response.rbs | 67 ++ sig/openai/models/eval_retrieve_params.rbs | 15 + sig/openai/models/eval_retrieve_response.rbs | 67 ++ ..._stored_completions_data_source_config.rbs | 26 + .../models/eval_string_check_grader.rbs | 47 + .../models/eval_text_similarity_grader.rbs | 71 ++ sig/openai/models/eval_update_params.rbs | 26 + sig/openai/models/eval_update_response.rbs | 67 ++ ...reate_eval_completions_run_data_source.rbs | 394 +++++++++ .../create_eval_jsonl_run_data_source.rbs | 83 ++ sig/openai/models/evals/eval_api_error.rbs | 19 + sig/openai/models/evals/run_cancel_params.rbs | 22 + .../models/evals/run_cancel_response.rbs | 158 ++++ sig/openai/models/evals/run_create_params.rbs | 45 + .../models/evals/run_create_response.rbs | 158 ++++ sig/openai/models/evals/run_delete_params.rbs | 22 + .../models/evals/run_delete_response.rbs | 30 + sig/openai/models/evals/run_list_params.rbs | 74 ++ sig/openai/models/evals/run_list_response.rbs | 158 ++++ .../models/evals/run_retrieve_params.rbs | 22 + .../models/evals/run_retrieve_response.rbs | 158 ++++ .../evals/runs/output_item_list_params.rbs | 77 ++ .../evals/runs/output_item_list_response.rbs | 164 ++++ .../runs/output_item_retrieve_params.rbs | 28 + .../runs/output_item_retrieve_response.rbs | 164 ++++ .../checkpoints/permission_create_params.rbs | 25 + .../permission_create_response.rbs | 34 + .../checkpoints/permission_delete_params.rbs | 19 + .../permission_delete_response.rbs | 26 + .../permission_retrieve_params.rbs | 60 ++ .../permission_retrieve_response.rbs | 65 ++ sig/openai/models/responses_model.rbs | 30 +- sig/openai/resources/beta/threads.rbs | 2 +- sig/openai/resources/beta/threads/runs.rbs | 4 +- sig/openai/resources/chat/completions.rbs | 2 +- sig/openai/resources/evals.rbs | 43 + sig/openai/resources/evals/runs.rbs | 46 + .../resources/evals/runs/output_items.rbs | 28 + sig/openai/resources/fine_tuning.rbs | 2 + .../resources/fine_tuning/checkpoints.rbs | 11 + .../fine_tuning/checkpoints/permissions.rbs | 31 + sig/openai/resources/responses.rbs | 2 +- test/openai/resource_namespaces.rb | 11 + .../resources/evals/runs/output_items_test.rb | 59 ++ test/openai/resources/evals/runs_test.rb | 140 +++ test/openai/resources/evals_test.rb | 269 ++++++ .../checkpoints/permissions_test.rb | 68 ++ .../resources/fine_tuning/checkpoints_test.rb | 6 + 172 files changed, 15380 insertions(+), 148 deletions(-) create mode 100644 lib/openai/models/eval_create_params.rb create mode 100644 lib/openai/models/eval_create_response.rb create mode 100644 lib/openai/models/eval_custom_data_source_config.rb create mode 100644 lib/openai/models/eval_delete_params.rb create mode 100644 lib/openai/models/eval_delete_response.rb create mode 100644 lib/openai/models/eval_label_model_grader.rb create mode 100644 lib/openai/models/eval_list_params.rb create mode 100644 lib/openai/models/eval_list_response.rb create mode 100644 lib/openai/models/eval_retrieve_params.rb create mode 100644 lib/openai/models/eval_retrieve_response.rb create mode 100644 lib/openai/models/eval_stored_completions_data_source_config.rb create mode 100644 lib/openai/models/eval_string_check_grader.rb create mode 100644 lib/openai/models/eval_text_similarity_grader.rb create mode 100644 lib/openai/models/eval_update_params.rb create mode 100644 lib/openai/models/eval_update_response.rb create mode 100644 lib/openai/models/evals/create_eval_completions_run_data_source.rb create mode 100644 lib/openai/models/evals/create_eval_jsonl_run_data_source.rb create mode 100644 lib/openai/models/evals/eval_api_error.rb create mode 100644 lib/openai/models/evals/run_cancel_params.rb create mode 100644 lib/openai/models/evals/run_cancel_response.rb create mode 100644 lib/openai/models/evals/run_create_params.rb create mode 100644 lib/openai/models/evals/run_create_response.rb create mode 100644 lib/openai/models/evals/run_delete_params.rb create mode 100644 lib/openai/models/evals/run_delete_response.rb create mode 100644 lib/openai/models/evals/run_list_params.rb create mode 100644 lib/openai/models/evals/run_list_response.rb create mode 100644 lib/openai/models/evals/run_retrieve_params.rb create mode 100644 lib/openai/models/evals/run_retrieve_response.rb create mode 100644 lib/openai/models/evals/runs/output_item_list_params.rb create mode 100644 lib/openai/models/evals/runs/output_item_list_response.rb create mode 100644 lib/openai/models/evals/runs/output_item_retrieve_params.rb create mode 100644 lib/openai/models/evals/runs/output_item_retrieve_response.rb create mode 100644 lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb create mode 100644 lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb create mode 100644 lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb create mode 100644 lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb create mode 100644 lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb create mode 100644 lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb create mode 100644 lib/openai/resources/evals.rb create mode 100644 lib/openai/resources/evals/runs.rb create mode 100644 lib/openai/resources/evals/runs/output_items.rb create mode 100644 lib/openai/resources/fine_tuning/checkpoints.rb create mode 100644 lib/openai/resources/fine_tuning/checkpoints/permissions.rb create mode 100644 rbi/lib/openai/models/eval_create_params.rbi create mode 100644 rbi/lib/openai/models/eval_create_response.rbi create mode 100644 rbi/lib/openai/models/eval_custom_data_source_config.rbi create mode 100644 rbi/lib/openai/models/eval_delete_params.rbi create mode 100644 rbi/lib/openai/models/eval_delete_response.rbi create mode 100644 rbi/lib/openai/models/eval_label_model_grader.rbi create mode 100644 rbi/lib/openai/models/eval_list_params.rbi create mode 100644 rbi/lib/openai/models/eval_list_response.rbi create mode 100644 rbi/lib/openai/models/eval_retrieve_params.rbi create mode 100644 rbi/lib/openai/models/eval_retrieve_response.rbi create mode 100644 rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi create mode 100644 rbi/lib/openai/models/eval_string_check_grader.rbi create mode 100644 rbi/lib/openai/models/eval_text_similarity_grader.rbi create mode 100644 rbi/lib/openai/models/eval_update_params.rbi create mode 100644 rbi/lib/openai/models/eval_update_response.rbi create mode 100644 rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi create mode 100644 rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi create mode 100644 rbi/lib/openai/models/evals/eval_api_error.rbi create mode 100644 rbi/lib/openai/models/evals/run_cancel_params.rbi create mode 100644 rbi/lib/openai/models/evals/run_cancel_response.rbi create mode 100644 rbi/lib/openai/models/evals/run_create_params.rbi create mode 100644 rbi/lib/openai/models/evals/run_create_response.rbi create mode 100644 rbi/lib/openai/models/evals/run_delete_params.rbi create mode 100644 rbi/lib/openai/models/evals/run_delete_response.rbi create mode 100644 rbi/lib/openai/models/evals/run_list_params.rbi create mode 100644 rbi/lib/openai/models/evals/run_list_response.rbi create mode 100644 rbi/lib/openai/models/evals/run_retrieve_params.rbi create mode 100644 rbi/lib/openai/models/evals/run_retrieve_response.rbi create mode 100644 rbi/lib/openai/models/evals/runs/output_item_list_params.rbi create mode 100644 rbi/lib/openai/models/evals/runs/output_item_list_response.rbi create mode 100644 rbi/lib/openai/models/evals/runs/output_item_retrieve_params.rbi create mode 100644 rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi create mode 100644 rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi create mode 100644 rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi create mode 100644 rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi create mode 100644 rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi create mode 100644 rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi create mode 100644 rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi create mode 100644 rbi/lib/openai/resources/evals.rbi create mode 100644 rbi/lib/openai/resources/evals/runs.rbi create mode 100644 rbi/lib/openai/resources/evals/runs/output_items.rbi create mode 100644 rbi/lib/openai/resources/fine_tuning/checkpoints.rbi create mode 100644 rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi create mode 100644 sig/openai/models/eval_create_params.rbs create mode 100644 sig/openai/models/eval_create_response.rbs create mode 100644 sig/openai/models/eval_custom_data_source_config.rbs create mode 100644 sig/openai/models/eval_delete_params.rbs create mode 100644 sig/openai/models/eval_delete_response.rbs create mode 100644 sig/openai/models/eval_label_model_grader.rbs create mode 100644 sig/openai/models/eval_list_params.rbs create mode 100644 sig/openai/models/eval_list_response.rbs create mode 100644 sig/openai/models/eval_retrieve_params.rbs create mode 100644 sig/openai/models/eval_retrieve_response.rbs create mode 100644 sig/openai/models/eval_stored_completions_data_source_config.rbs create mode 100644 sig/openai/models/eval_string_check_grader.rbs create mode 100644 sig/openai/models/eval_text_similarity_grader.rbs create mode 100644 sig/openai/models/eval_update_params.rbs create mode 100644 sig/openai/models/eval_update_response.rbs create mode 100644 sig/openai/models/evals/create_eval_completions_run_data_source.rbs create mode 100644 sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs create mode 100644 sig/openai/models/evals/eval_api_error.rbs create mode 100644 sig/openai/models/evals/run_cancel_params.rbs create mode 100644 sig/openai/models/evals/run_cancel_response.rbs create mode 100644 sig/openai/models/evals/run_create_params.rbs create mode 100644 sig/openai/models/evals/run_create_response.rbs create mode 100644 sig/openai/models/evals/run_delete_params.rbs create mode 100644 sig/openai/models/evals/run_delete_response.rbs create mode 100644 sig/openai/models/evals/run_list_params.rbs create mode 100644 sig/openai/models/evals/run_list_response.rbs create mode 100644 sig/openai/models/evals/run_retrieve_params.rbs create mode 100644 sig/openai/models/evals/run_retrieve_response.rbs create mode 100644 sig/openai/models/evals/runs/output_item_list_params.rbs create mode 100644 sig/openai/models/evals/runs/output_item_list_response.rbs create mode 100644 sig/openai/models/evals/runs/output_item_retrieve_params.rbs create mode 100644 sig/openai/models/evals/runs/output_item_retrieve_response.rbs create mode 100644 sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs create mode 100644 sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs create mode 100644 sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs create mode 100644 sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs create mode 100644 sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs create mode 100644 sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs create mode 100644 sig/openai/resources/evals.rbs create mode 100644 sig/openai/resources/evals/runs.rbs create mode 100644 sig/openai/resources/evals/runs/output_items.rbs create mode 100644 sig/openai/resources/fine_tuning/checkpoints.rbs create mode 100644 sig/openai/resources/fine_tuning/checkpoints/permissions.rbs create mode 100644 test/openai/resources/evals/runs/output_items_test.rb create mode 100644 test/openai/resources/evals/runs_test.rb create mode 100644 test/openai/resources/evals_test.rb create mode 100644 test/openai/resources/fine_tuning/checkpoints/permissions_test.rb create mode 100644 test/openai/resources/fine_tuning/checkpoints_test.rb diff --git a/.stats.yml b/.stats.yml index e0e1a71e..43112911 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 80 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bce8217a697c729ac98046d4caf2c9e826b54c427fb0ab4f98e549a2e0ce31c.yml -openapi_spec_hash: 7996d2c34cc44fe2ce9ffe93c0ab774e -config_hash: bcd2cacdcb9fae9938f273cd167f613c +configured_endpoints: 95 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-472fe3036ea745365257fe870c0330917fb3153705c2826f49873cd631319b0a.yml +openapi_spec_hash: ea86343b5e9858a74e85da8ab2c532f6 +config_hash: ef19d36c307306f14f2e1cd5c834a151 diff --git a/README.md b/README.md index 85e4c03d..7a859626 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,7 @@ We provide support for streaming responses using Server Side Events (SSE). ```ruby stream = - openai.chat.completions.stream_raw( + openai.chat.completions.create_streaming( messages: [{ role: "user", content: "Say this is a test" diff --git a/lib/openai.rb b/lib/openai.rb index 775fe43b..537211f7 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -224,6 +224,38 @@ require_relative "openai/models/embedding_create_params" require_relative "openai/models/embedding_model" require_relative "openai/models/error_object" +require_relative "openai/models/eval_create_params" +require_relative "openai/models/eval_create_response" +require_relative "openai/models/eval_custom_data_source_config" +require_relative "openai/models/eval_delete_params" +require_relative "openai/models/eval_delete_response" +require_relative "openai/models/eval_label_model_grader" +require_relative "openai/models/eval_list_params" +require_relative "openai/models/eval_list_response" +require_relative "openai/models/eval_retrieve_params" +require_relative "openai/models/eval_retrieve_response" +require_relative "openai/models/evals/create_eval_completions_run_data_source" +require_relative "openai/models/evals/create_eval_jsonl_run_data_source" +require_relative "openai/models/evals/eval_api_error" +require_relative "openai/models/evals/run_cancel_params" +require_relative "openai/models/evals/run_cancel_response" +require_relative "openai/models/evals/run_create_params" +require_relative "openai/models/evals/run_create_response" +require_relative "openai/models/evals/run_delete_params" +require_relative "openai/models/evals/run_delete_response" +require_relative "openai/models/evals/run_list_params" +require_relative "openai/models/evals/run_list_response" +require_relative "openai/models/evals/run_retrieve_params" +require_relative "openai/models/evals/run_retrieve_response" +require_relative "openai/models/evals/runs/output_item_list_params" +require_relative "openai/models/evals/runs/output_item_list_response" +require_relative "openai/models/evals/runs/output_item_retrieve_params" +require_relative "openai/models/evals/runs/output_item_retrieve_response" +require_relative "openai/models/eval_stored_completions_data_source_config" +require_relative "openai/models/eval_string_check_grader" +require_relative "openai/models/eval_text_similarity_grader" +require_relative "openai/models/eval_update_params" +require_relative "openai/models/eval_update_response" require_relative "openai/models/file_chunking_strategy" require_relative "openai/models/file_chunking_strategy_param" require_relative "openai/models/file_content_params" @@ -234,6 +266,12 @@ require_relative "openai/models/file_object" require_relative "openai/models/file_purpose" require_relative "openai/models/file_retrieve_params" +require_relative "openai/models/fine_tuning/checkpoints/permission_create_params" +require_relative "openai/models/fine_tuning/checkpoints/permission_create_response" +require_relative "openai/models/fine_tuning/checkpoints/permission_delete_params" +require_relative "openai/models/fine_tuning/checkpoints/permission_delete_response" +require_relative "openai/models/fine_tuning/checkpoints/permission_retrieve_params" +require_relative "openai/models/fine_tuning/checkpoints/permission_retrieve_response" require_relative "openai/models/fine_tuning/fine_tuning_job" require_relative "openai/models/fine_tuning/fine_tuning_job_event" require_relative "openai/models/fine_tuning/fine_tuning_job_integration" @@ -400,8 +438,13 @@ require_relative "openai/resources/chat/completions/messages" require_relative "openai/resources/completions" require_relative "openai/resources/embeddings" +require_relative "openai/resources/evals" +require_relative "openai/resources/evals/runs" +require_relative "openai/resources/evals/runs/output_items" require_relative "openai/resources/files" require_relative "openai/resources/fine_tuning" +require_relative "openai/resources/fine_tuning/checkpoints" +require_relative "openai/resources/fine_tuning/checkpoints/permissions" require_relative "openai/resources/fine_tuning/jobs" require_relative "openai/resources/fine_tuning/jobs/checkpoints" require_relative "openai/resources/images" diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 5781780a..a8f3a0e2 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -66,6 +66,9 @@ class Client < OpenAI::Internal::Transport::BaseClient # @return [OpenAI::Resources::Responses] attr_reader :responses + # @return [OpenAI::Resources::Evals] + attr_reader :evals + # @api private # # @return [Hash{String=>String}] @@ -138,6 +141,7 @@ def initialize( @batches = OpenAI::Resources::Batches.new(client: self) @uploads = OpenAI::Resources::Uploads.new(client: self) @responses = OpenAI::Resources::Responses.new(client: self) + @evals = OpenAI::Resources::Evals.new(client: self) end end end diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 6ea63fb0..0abbe3bc 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -9,26 +9,26 @@ module AllModels variant enum: -> { OpenAI::Models::ChatModel } - variant const: -> { OpenAI::Models::AllModels::O1_PRO } + variant enum: -> { OpenAI::Models::AllModels::ResponsesOnlyModel } - variant const: -> { OpenAI::Models::AllModels::O1_PRO_2025_03_19 } + module ResponsesOnlyModel + extend OpenAI::Internal::Type::Enum - variant const: -> { OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW } + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - variant const: -> { OpenAI::Models::AllModels::COMPUTER_USE_PREVIEW_2025_03_11 } + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] # def self.variants; end - - # @!group - - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - - # @!endgroup end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 4612ebd8..f2349d0f 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -5,7 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#create_and_run # - # @see OpenAI::Resources::Beta::Threads#stream_raw + # @see OpenAI::Resources::Beta::Threads#create_and_run_streaming class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index c6436c0d..6b71de5b 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming class Run < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index a058e78e..3e2776ba 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 2243cb1d..a3e1b7d9 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # - # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw + # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 97ee34db..4ea12d1b 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -5,7 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#create # - # @see OpenAI::Resources::Chat::Completions#stream_raw + # @see OpenAI::Resources::Chat::Completions#create_streaming class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute id # A unique identifier for the chat completion. diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 65406125..4c378db0 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -5,7 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#create # - # @see OpenAI::Resources::Chat::Completions#stream_raw + # @see OpenAI::Resources::Chat::Completions#create_streaming class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb new file mode 100644 index 00000000..dc2ef4ac --- /dev/null +++ b/lib/openai/models/eval_create_params.rb @@ -0,0 +1,483 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#create + class EvalCreateParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute data_source_config + # The configuration for the data source used for the evaluation runs. + # + # @return [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + required :data_source_config, union: -> { OpenAI::Models::EvalCreateParams::DataSourceConfig } + + # @!attribute testing_criteria + # A list of graders for all eval runs in this group. + # + # @return [Array] + required :testing_criteria, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateParams::TestingCriterion] } + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute [r] name + # The name of the evaluation. + # + # @return [String, nil] + optional :name, String + + # @!parse + # # @return [String] + # attr_writer :name + + # @!attribute [r] share_with_openai + # Indicates whether the evaluation is shared with OpenAI. + # + # @return [Boolean, nil] + optional :share_with_openai, OpenAI::Internal::Type::Boolean + + # @!parse + # # @return [Boolean] + # attr_writer :share_with_openai + + # @!parse + # # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + # # @param testing_criteria [Array] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param share_with_openai [Boolean] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize( + # data_source_config:, + # testing_criteria:, + # metadata: nil, + # name: nil, + # share_with_openai: nil, + # request_options: {}, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The configuration for the data source used for the evaluation runs. + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A CustomDataSourceConfig object that defines the schema for the data source used for the evaluation runs. + # This schema is used to define the shape of the data that will be: + # - Used to define your testing criteria and + # - What data is required when creating a run + variant :custom, -> { OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom } + + # A data source config which specifies the metadata property of your stored completions query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + variant :stored_completions, -> { OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions } + + class Custom < OpenAI::Internal::Type::BaseModel + # @!attribute item_schema + # The json schema for the run data source items. + # + # @return [Hash{Symbol=>Object}] + required :item_schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!attribute [r] include_sample_schema + # Whether to include the sample schema in the data source. + # + # @return [Boolean, nil] + optional :include_sample_schema, OpenAI::Internal::Type::Boolean + + # @!parse + # # @return [Boolean] + # attr_writer :include_sample_schema + + # @!parse + # # A CustomDataSourceConfig object that defines the schema for the data source used + # # for the evaluation runs. This schema is used to define the shape of the data + # # that will be: + # # + # # - Used to define your testing criteria and + # # - What data is required when creating a run + # # + # # @param item_schema [Hash{Symbol=>Object}] + # # @param include_sample_schema [Boolean] + # # @param type [Symbol, :custom] + # # + # def initialize(item_schema:, include_sample_schema: nil, type: :custom, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of data source. Always `stored_completions`. + # + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!parse + # # A data source config which specifies the metadata property of your stored + # # completions query. This is usually metadata like `usecase=chatbot` or + # # `prompt-version=v2`, etc. + # # + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param type [Symbol, :stored_completions] + # # + # def initialize(metadata: nil, type: :stored_completions, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @!parse + # # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] + # def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A LabelModelGrader object which uses a model to assign labels to each item + # in the evaluation. + variant :label_model, -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel } + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + + class LabelModel < OpenAI::Internal::Type::BaseModel + # @!attribute input + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input] } + + # @!attribute labels + # The labels to classify to each item in the evaluation. + # + # @return [Array] + required :labels, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute model + # The model to use for the evaluation. Must support structured outputs. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute passing_labels + # The labels that indicate a passing result. Must be a subset of labels. + # + # @return [Array] + required :passing_labels, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute type + # The object type, which is always `label_model`. + # + # @return [Symbol, :label_model] + required :type, const: :label_model + + # @!parse + # # A LabelModelGrader object which uses a model to assign labels to each item in + # # the evaluation. + # # + # # @param input [Array] + # # @param labels [Array] + # # @param model [String] + # # @param name [String] + # # @param passing_labels [Array] + # # @param type [Symbol, :label_model] + # # + # def initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + module Input + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage } + + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage } + + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage } + + class SimpleInputMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!parse + # # @param content [String] + # # @param role [String] + # # + # def initialize(content:, role:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class InputMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # + # @return [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] + required :content, + -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content } + + # @!attribute role + # The role of the message. One of `user`, `system`, or `developer`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] + required :role, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role } + + # @!attribute type + # The type of item, which is always `message`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] + required :type, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type } + + # @!parse + # # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] + # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] + # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] + # # + # def initialize(content:, role:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#content + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of content, which is always `input_text`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] + required :type, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type } + + # @!parse + # # @param text [String] + # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] + # # + # def initialize(text:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The type of content, which is always `input_text`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content#type + module Type + extend OpenAI::Internal::Type::Enum + + INPUT_TEXT = :input_text + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # The role of the message. One of `user`, `system`, or `developer`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + SYSTEM = :system + DEVELOPER = :developer + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # The type of item, which is always `message`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + class OutputMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # + # @return [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] + required :content, + -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content } + + # @!attribute role + # The role of the message. Must be `assistant` for output. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] + required :role, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role } + + # @!attribute type + # The type of item, which is always `message`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] + required :type, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type } + + # @!parse + # # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] + # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] + # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] + # # + # def initialize(content:, role:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#content + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of content, which is always `output_text`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] + required :type, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type } + + # @!parse + # # @param text [String] + # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] + # # + # def initialize(text:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The type of content, which is always `output_text`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content#type + module Type + extend OpenAI::Internal::Type::Enum + + OUTPUT_TEXT = :output_text + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # The role of the message. Must be `assistant` for output. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#role + module Role + extend OpenAI::Internal::Type::Enum + + ASSISTANT = :assistant + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # The type of item, which is always `message`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # @!parse + # # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage)] + # def self.variants; end + end + end + + # @!parse + # # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # def self.variants; end + end + end + end +end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb new file mode 100644 index 00000000..f5e1cb70 --- /dev/null +++ b/lib/openai/models/eval_create_response.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#create + class EvalCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the eval was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source_config + # Configuration of data sources used in runs of the evaluation. + # + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + required :data_source_config, union: -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig } + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute name + # The name of the evaluation. + # + # @return [String] + required :name, String + + # @!attribute object + # The object type. + # + # @return [Symbol, :eval] + required :object, const: :eval + + # @!attribute share_with_openai + # Indicates whether the evaluation is shared with OpenAI. + # + # @return [Boolean] + required :share_with_openai, OpenAI::Internal::Type::Boolean + + # @!attribute testing_criteria + # A list of testing criteria. + # + # @return [Array] + required :testing_criteria, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } + + # @!parse + # # An Eval object with a data source config and testing criteria. An Eval + # # represents a task to be done for your LLM integration. Like: + # # + # # - Improve the quality of my chatbot + # # - See how well my chatbot handles customer support + # # - Check if o3-mini is better at my usecase than gpt-4o + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param share_with_openai [Boolean] + # # @param testing_criteria [Array] + # # @param object [Symbol, :eval] + # # + # def initialize( + # id:, + # created_at:, + # data_source_config:, + # metadata:, + # name:, + # share_with_openai:, + # testing_criteria:, + # object: :eval, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Configuration of data sources used in runs of the evaluation. + # + # @see OpenAI::Models::EvalCreateResponse#data_source_config + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A CustomDataSourceConfig which specifies the schema of your `item` and optionally `sample` namespaces. + # The response schema defines the shape of the data that will be: + # - Used to define your testing criteria and + # - What data is required when creating a run + variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + + # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + # The schema returned by this data source config is used to defined what variables are available in your evals. + # `item` and `sample` are both defined when using this data source config. + variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + + # @!parse + # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A LabelModelGrader object which uses a model to assign labels to each item + # in the evaluation. + variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + + # @!parse + # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # def self.variants; end + end + end + end +end diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb new file mode 100644 index 00000000..5be74c5c --- /dev/null +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!parse + # # A CustomDataSourceConfig which specifies the schema of your `item` and + # # optionally `sample` namespaces. The response schema defines the shape of the + # # data that will be: + # # + # # - Used to define your testing criteria and + # # - What data is required when creating a run + # # + # # @param schema [Hash{Symbol=>Object}] + # # @param type [Symbol, :custom] + # # + # def initialize(schema:, type: :custom, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end +end diff --git a/lib/openai/models/eval_delete_params.rb b/lib/openai/models/eval_delete_params.rb new file mode 100644 index 00000000..de02e69c --- /dev/null +++ b/lib/openai/models/eval_delete_params.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#delete + class EvalDeleteParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!parse + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end +end diff --git a/lib/openai/models/eval_delete_response.rb b/lib/openai/models/eval_delete_response.rb new file mode 100644 index 00000000..a60ec9d1 --- /dev/null +++ b/lib/openai/models/eval_delete_response.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#delete + class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel + # @!attribute deleted + # + # @return [Boolean] + required :deleted, OpenAI::Internal::Type::Boolean + + # @!attribute eval_id + # + # @return [String] + required :eval_id, String + + # @!attribute object + # + # @return [String] + required :object, String + + # @!parse + # # @param deleted [Boolean] + # # @param eval_id [String] + # # @param object [String] + # # + # def initialize(deleted:, eval_id:, object:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end +end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb new file mode 100644 index 00000000..1455f6e4 --- /dev/null +++ b/lib/openai/models/eval_label_model_grader.rb @@ -0,0 +1,252 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel + # @!attribute input + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalLabelModelGrader::Input] } + + # @!attribute labels + # The labels to assign to each item in the evaluation. + # + # @return [Array] + required :labels, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute model + # The model to use for the evaluation. Must support structured outputs. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute passing_labels + # The labels that indicate a passing result. Must be a subset of labels. + # + # @return [Array] + required :passing_labels, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute type + # The object type, which is always `label_model`. + # + # @return [Symbol, :label_model] + required :type, const: :label_model + + # @!parse + # # A LabelModelGrader object which uses a model to assign labels to each item in + # # the evaluation. + # # + # # @param input [Array] + # # @param labels [Array] + # # @param model [String] + # # @param name [String] + # # @param passing_labels [Array] + # # @param type [Symbol, :label_model] + # # + # def initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # An item can either be an input message or an output message. + module Input + extend OpenAI::Internal::Type::Union + + discriminator :role + + variant :assistant, -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant } + + variant -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage } + + class InputMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # + # @return [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] + required :content, -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content } + + # @!attribute role + # The role of the message. One of `user`, `system`, or `developer`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] + required :role, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role } + + # @!attribute type + # The type of item, which is always `message`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] + required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type } + + # @!parse + # # @param content [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] + # # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] + # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] + # # + # def initialize(content:, role:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#content + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of content, which is always `input_text`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] + required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type } + + # @!parse + # # @param text [String] + # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] + # # + # def initialize(text:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The type of content, which is always `input_text`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content#type + module Type + extend OpenAI::Internal::Type::Enum + + INPUT_TEXT = :input_text + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # The role of the message. One of `user`, `system`, or `developer`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + SYSTEM = :system + DEVELOPER = :developer + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # The type of item, which is always `message`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + class Assistant < OpenAI::Internal::Type::BaseModel + # @!attribute content + # + # @return [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] + required :content, -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content } + + # @!attribute role + # The role of the message. Must be `assistant` for output. + # + # @return [Symbol, :assistant] + required :role, const: :assistant + + # @!attribute type + # The type of item, which is always `message`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] + required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type } + + # @!parse + # # @param content [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] + # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] + # # @param role [Symbol, :assistant] + # # + # def initialize(content:, type:, role: :assistant, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant#content + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of content, which is always `output_text`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] + required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type } + + # @!parse + # # @param text [String] + # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] + # # + # def initialize(text:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The type of content, which is always `output_text`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content#type + module Type + extend OpenAI::Internal::Type::Enum + + OUTPUT_TEXT = :output_text + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # The type of item, which is always `message`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # @!parse + # # @return [Array(OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage)] + # def self.variants; end + end + end + end +end diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb new file mode 100644 index 00000000..cfee50cd --- /dev/null +++ b/lib/openai/models/eval_list_params.rb @@ -0,0 +1,95 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#list + class EvalListParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute [r] after + # Identifier for the last eval from the previous pagination request. + # + # @return [String, nil] + optional :after, String + + # @!parse + # # @return [String] + # attr_writer :after + + # @!attribute [r] limit + # Number of evals to retrieve. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!parse + # # @return [Integer] + # attr_writer :limit + + # @!attribute [r] order + # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for + # descending order. + # + # @return [Symbol, OpenAI::Models::EvalListParams::Order, nil] + optional :order, enum: -> { OpenAI::Models::EvalListParams::Order } + + # @!parse + # # @return [Symbol, OpenAI::Models::EvalListParams::Order] + # attr_writer :order + + # @!attribute [r] order_by + # Evals can be ordered by creation time or last updated time. Use `created_at` for + # creation time or `updated_at` for last updated time. + # + # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy, nil] + optional :order_by, enum: -> { OpenAI::Models::EvalListParams::OrderBy } + + # @!parse + # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # attr_writer :order_by + + # @!parse + # # @param after [String] + # # @param limit [Integer] + # # @param order [Symbol, OpenAI::Models::EvalListParams::Order] + # # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for + # descending order. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # Evals can be ordered by creation time or last updated time. Use `created_at` for + # creation time or `updated_at` for last updated time. + module OrderBy + extend OpenAI::Internal::Type::Enum + + CREATED_AT = :created_at + UPDATED_AT = :updated_at + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + end +end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb new file mode 100644 index 00000000..e91b7773 --- /dev/null +++ b/lib/openai/models/eval_list_response.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#list + class EvalListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the eval was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source_config + # Configuration of data sources used in runs of the evaluation. + # + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + required :data_source_config, union: -> { OpenAI::Models::EvalListResponse::DataSourceConfig } + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute name + # The name of the evaluation. + # + # @return [String] + required :name, String + + # @!attribute object + # The object type. + # + # @return [Symbol, :eval] + required :object, const: :eval + + # @!attribute share_with_openai + # Indicates whether the evaluation is shared with OpenAI. + # + # @return [Boolean] + required :share_with_openai, OpenAI::Internal::Type::Boolean + + # @!attribute testing_criteria + # A list of testing criteria. + # + # @return [Array] + required :testing_criteria, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } + + # @!parse + # # An Eval object with a data source config and testing criteria. An Eval + # # represents a task to be done for your LLM integration. Like: + # # + # # - Improve the quality of my chatbot + # # - See how well my chatbot handles customer support + # # - Check if o3-mini is better at my usecase than gpt-4o + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param share_with_openai [Boolean] + # # @param testing_criteria [Array] + # # @param object [Symbol, :eval] + # # + # def initialize( + # id:, + # created_at:, + # data_source_config:, + # metadata:, + # name:, + # share_with_openai:, + # testing_criteria:, + # object: :eval, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Configuration of data sources used in runs of the evaluation. + # + # @see OpenAI::Models::EvalListResponse#data_source_config + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A CustomDataSourceConfig which specifies the schema of your `item` and optionally `sample` namespaces. + # The response schema defines the shape of the data that will be: + # - Used to define your testing criteria and + # - What data is required when creating a run + variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + + # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + # The schema returned by this data source config is used to defined what variables are available in your evals. + # `item` and `sample` are both defined when using this data source config. + variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + + # @!parse + # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A LabelModelGrader object which uses a model to assign labels to each item + # in the evaluation. + variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + + # @!parse + # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # def self.variants; end + end + end + end +end diff --git a/lib/openai/models/eval_retrieve_params.rb b/lib/openai/models/eval_retrieve_params.rb new file mode 100644 index 00000000..06e448ac --- /dev/null +++ b/lib/openai/models/eval_retrieve_params.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#retrieve + class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!parse + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end +end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb new file mode 100644 index 00000000..8b12f5ed --- /dev/null +++ b/lib/openai/models/eval_retrieve_response.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#retrieve + class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the eval was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source_config + # Configuration of data sources used in runs of the evaluation. + # + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + required :data_source_config, union: -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig } + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute name + # The name of the evaluation. + # + # @return [String] + required :name, String + + # @!attribute object + # The object type. + # + # @return [Symbol, :eval] + required :object, const: :eval + + # @!attribute share_with_openai + # Indicates whether the evaluation is shared with OpenAI. + # + # @return [Boolean] + required :share_with_openai, OpenAI::Internal::Type::Boolean + + # @!attribute testing_criteria + # A list of testing criteria. + # + # @return [Array] + required :testing_criteria, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } + + # @!parse + # # An Eval object with a data source config and testing criteria. An Eval + # # represents a task to be done for your LLM integration. Like: + # # + # # - Improve the quality of my chatbot + # # - See how well my chatbot handles customer support + # # - Check if o3-mini is better at my usecase than gpt-4o + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param share_with_openai [Boolean] + # # @param testing_criteria [Array] + # # @param object [Symbol, :eval] + # # + # def initialize( + # id:, + # created_at:, + # data_source_config:, + # metadata:, + # name:, + # share_with_openai:, + # testing_criteria:, + # object: :eval, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Configuration of data sources used in runs of the evaluation. + # + # @see OpenAI::Models::EvalRetrieveResponse#data_source_config + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A CustomDataSourceConfig which specifies the schema of your `item` and optionally `sample` namespaces. + # The response schema defines the shape of the data that will be: + # - Used to define your testing criteria and + # - What data is required when creating a run + variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + + # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + # The schema returned by this data source config is used to defined what variables are available in your evals. + # `item` and `sample` are both defined when using this data source config. + variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + + # @!parse + # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A LabelModelGrader object which uses a model to assign labels to each item + # in the evaluation. + variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + + # @!parse + # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # def self.variants; end + end + end + end +end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb new file mode 100644 index 00000000..54752125 --- /dev/null +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `stored_completions`. + # + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!parse + # # A StoredCompletionsDataSourceConfig which specifies the metadata property of + # # your stored completions query. This is usually metadata like `usecase=chatbot` + # # or `prompt-version=v2`, etc. The schema returned by this data source config is + # # used to defined what variables are available in your evals. `item` and `sample` + # # are both defined when using this data source config. + # # + # # @param schema [Hash{Symbol=>Object}] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param type [Symbol, :stored_completions] + # # + # def initialize(schema:, metadata: nil, type: :stored_completions, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end +end diff --git a/lib/openai/models/eval_string_check_grader.rb b/lib/openai/models/eval_string_check_grader.rb new file mode 100644 index 00000000..57192594 --- /dev/null +++ b/lib/openai/models/eval_string_check_grader.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [String] + required :input, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute operation + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # + # @return [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] + required :operation, enum: -> { OpenAI::Models::EvalStringCheckGrader::Operation } + + # @!attribute reference + # The reference text. This may include template strings. + # + # @return [String] + required :reference, String + + # @!attribute type + # The object type, which is always `string_check`. + # + # @return [Symbol, :string_check] + required :type, const: :string_check + + # @!parse + # # A StringCheckGrader object that performs a string comparison between input and + # # reference using a specified operation. + # # + # # @param input [String] + # # @param name [String] + # # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] + # # @param reference [String] + # # @param type [Symbol, :string_check] + # # + # def initialize(input:, name:, operation:, reference:, type: :string_check, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # + # @see OpenAI::Models::EvalStringCheckGrader#operation + module Operation + extend OpenAI::Internal::Type::Enum + + EQ = :eq + NE = :ne + LIKE = :like + ILIKE = :ilike + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + end +end diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb new file mode 100644 index 00000000..4362ad72 --- /dev/null +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel + # @!attribute evaluation_metric + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # + # @return [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] + required :evaluation_metric, enum: -> { OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric } + + # @!attribute input + # The text being graded. + # + # @return [String] + required :input, String + + # @!attribute pass_threshold + # A float score where a value greater than or equal indicates a passing grade. + # + # @return [Float] + required :pass_threshold, Float + + # @!attribute reference + # The text being graded against. + # + # @return [String] + required :reference, String + + # @!attribute type + # The type of grader. + # + # @return [Symbol, :text_similarity] + required :type, const: :text_similarity + + # @!attribute [r] name + # The name of the grader. + # + # @return [String, nil] + optional :name, String + + # @!parse + # # @return [String] + # attr_writer :name + + # @!parse + # # A TextSimilarityGrader object which grades text based on similarity metrics. + # # + # # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] + # # @param input [String] + # # @param pass_threshold [Float] + # # @param reference [String] + # # @param name [String] + # # @param type [Symbol, :text_similarity] + # # + # def initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # + # @see OpenAI::Models::EvalTextSimilarityGrader#evaluation_metric + module EvaluationMetric + extend OpenAI::Internal::Type::Enum + + FUZZY_MATCH = :fuzzy_match + BLEU = :bleu + GLEU = :gleu + METEOR = :meteor + ROUGE_1 = :rouge_1 + ROUGE_2 = :rouge_2 + ROUGE_3 = :rouge_3 + ROUGE_4 = :rouge_4 + ROUGE_5 = :rouge_5 + ROUGE_L = :rouge_l + COSINE = :cosine + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + end +end diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb new file mode 100644 index 00000000..babe7583 --- /dev/null +++ b/lib/openai/models/eval_update_params.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#update + class EvalUpdateParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute [r] name + # Rename the evaluation. + # + # @return [String, nil] + optional :name, String + + # @!parse + # # @return [String] + # attr_writer :name + + # @!parse + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(metadata: nil, name: nil, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end +end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb new file mode 100644 index 00000000..b80950ed --- /dev/null +++ b/lib/openai/models/eval_update_response.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Evals#update + class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the eval was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source_config + # Configuration of data sources used in runs of the evaluation. + # + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + required :data_source_config, union: -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig } + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute name + # The name of the evaluation. + # + # @return [String] + required :name, String + + # @!attribute object + # The object type. + # + # @return [Symbol, :eval] + required :object, const: :eval + + # @!attribute share_with_openai + # Indicates whether the evaluation is shared with OpenAI. + # + # @return [Boolean] + required :share_with_openai, OpenAI::Internal::Type::Boolean + + # @!attribute testing_criteria + # A list of testing criteria. + # + # @return [Array] + required :testing_criteria, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } + + # @!parse + # # An Eval object with a data source config and testing criteria. An Eval + # # represents a task to be done for your LLM integration. Like: + # # + # # - Improve the quality of my chatbot + # # - See how well my chatbot handles customer support + # # - Check if o3-mini is better at my usecase than gpt-4o + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param share_with_openai [Boolean] + # # @param testing_criteria [Array] + # # @param object [Symbol, :eval] + # # + # def initialize( + # id:, + # created_at:, + # data_source_config:, + # metadata:, + # name:, + # share_with_openai:, + # testing_criteria:, + # object: :eval, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Configuration of data sources used in runs of the evaluation. + # + # @see OpenAI::Models::EvalUpdateResponse#data_source_config + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A CustomDataSourceConfig which specifies the schema of your `item` and optionally `sample` namespaces. + # The response schema defines the shape of the data that will be: + # - Used to define your testing criteria and + # - What data is required when creating a run + variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + + # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + # The schema returned by this data source config is used to defined what variables are available in your evals. + # `item` and `sample` are both defined when using this data source config. + variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + + # @!parse + # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A LabelModelGrader object which uses a model to assign labels to each item + # in the evaluation. + variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + + # @!parse + # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # def self.variants; end + end + end + end +end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb new file mode 100644 index 00000000..212bc40a --- /dev/null +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -0,0 +1,573 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + required :input_messages, + union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String] + required :model, String + + # @!attribute source + # A StoredCompletionsRunDataSource configuration describing a set of filters + # + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + required :source, union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source } + + # @!attribute type + # The type of run data source. Always `completions`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] + required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type } + + # @!attribute [r] sampling_params + # + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } + + # @!parse + # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] + # attr_writer :sampling_params + + # @!parse + # # A CompletionsRunDataSource object describing a model sampling configuration. + # # + # # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # # @param model [String] + # # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] + # # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] + # # + # def initialize(input_messages:, model:, source:, type:, sampling_params: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!parse + # # @param template [Array] + # # @param type [Symbol, :template] + # # + # def initialize(template:, type: :template, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage } + + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage } + + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!parse + # # @param content [String] + # # @param role [String] + # # + # def initialize(content:, role:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class InputMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] + required :content, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content } + + # @!attribute role + # The role of the message. One of `user`, `system`, or `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] + required :role, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role } + + # @!attribute type + # The type of item, which is always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] + required :type, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type } + + # @!parse + # # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] + # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] + # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] + # # + # def initialize(content:, role:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#content + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of content, which is always `input_text`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] + required :type, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type } + + # @!parse + # # @param text [String] + # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] + # # + # def initialize(text:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The type of content, which is always `input_text`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content#type + module Type + extend OpenAI::Internal::Type::Enum + + INPUT_TEXT = :input_text + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # The role of the message. One of `user`, `system`, or `developer`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + SYSTEM = :system + DEVELOPER = :developer + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # The type of item, which is always `message`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + class OutputMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] + required :content, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content } + + # @!attribute role + # The role of the message. Must be `assistant` for output. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] + required :role, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role } + + # @!attribute type + # The type of item, which is always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] + required :type, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type } + + # @!parse + # # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] + # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] + # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] + # # + # def initialize(content:, role:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#content + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of content, which is always `output_text`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] + required :type, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type } + + # @!parse + # # @param text [String] + # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] + # # + # def initialize(text:, type:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The type of content, which is always `output_text`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content#type + module Type + extend OpenAI::Internal::Type::Enum + + OUTPUT_TEXT = :output_text + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # The role of the message. Must be `assistant` for output. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#role + module Role + extend OpenAI::Internal::Type::Enum + + ASSISTANT = :assistant + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # The type of item, which is always `message`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage)] + # def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!parse + # # @param item_reference [String] + # # @param type [Symbol, :item_reference] + # # + # def initialize(item_reference:, type: :item_reference, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] + # def self.variants; end + end + + # A StoredCompletionsRunDataSource configuration describing a set of filters + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID } + + # A StoredCompletionsRunDataSource configuration describing a set of filters + variant :stored_completions, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!parse + # # @param content [Array] + # # @param type [Symbol, :file_content] + # # + # def initialize(content:, type: :file_content, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute [r] sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!parse + # # @return [Hash{Symbol=>Object}] + # attr_writer :sample + + # @!parse + # # @param item [Hash{Symbol=>Object}] + # # @param sample [Hash{Symbol=>Object}] + # # + # def initialize(item:, sample: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!parse + # # @param id [String] + # # @param type [Symbol, :file_id] + # # + # def initialize(id:, type: :file_id, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + # @!attribute created_after + # An optional Unix timestamp to filter items created after this time. + # + # @return [Integer, nil] + required :created_after, Integer, nil?: true + + # @!attribute created_before + # An optional Unix timestamp to filter items created before this time. + # + # @return [Integer, nil] + required :created_before, Integer, nil?: true + + # @!attribute limit + # An optional maximum number of items to return. + # + # @return [Integer, nil] + required :limit, Integer, nil?: true + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute model + # An optional model to filter by (e.g., 'gpt-4o'). + # + # @return [String, nil] + required :model, String, nil?: true + + # @!attribute type + # The type of source. Always `stored_completions`. + # + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions + + # @!parse + # # A StoredCompletionsRunDataSource configuration describing a set of filters + # # + # # @param created_after [Integer, nil] + # # @param created_before [Integer, nil] + # # @param limit [Integer, nil] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param model [String, nil] + # # @param type [Symbol, :stored_completions] + # # + # def initialize(created_after:, created_before:, limit:, metadata:, model:, type: :stored_completions, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] + # def self.variants; end + end + + # The type of run data source. Always `completions`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#type + module Type + extend OpenAI::Internal::Type::Enum + + COMPLETIONS = :completions + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute [r] max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!parse + # # @return [Integer] + # attr_writer :max_completion_tokens + + # @!attribute [r] seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!parse + # # @return [Integer] + # attr_writer :seed + + # @!attribute [r] temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!parse + # # @return [Float] + # attr_writer :temperature + + # @!attribute [r] top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!parse + # # @return [Float] + # attr_writer :top_p + + # @!parse + # # @param max_completion_tokens [Integer] + # # @param seed [Integer] + # # @param temperature [Float] + # # @param top_p [Float] + # # + # def initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb new file mode 100644 index 00000000..a3e22ebb --- /dev/null +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -0,0 +1,115 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel + # @!attribute source + # + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + required :source, union: -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source } + + # @!attribute type + # The type of data source. Always `jsonl`. + # + # @return [Symbol, :jsonl] + required :type, const: :jsonl + + # @!parse + # # A JsonlRunDataSource object with that specifies a JSONL file that matches the + # # eval + # # + # # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # # @param type [Symbol, :jsonl] + # # + # def initialize(source:, type: :jsonl, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!parse + # # @param content [Array] + # # @param type [Symbol, :file_content] + # # + # def initialize(content:, type: :file_content, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute [r] sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!parse + # # @return [Hash{Symbol=>Object}] + # attr_writer :sample + + # @!parse + # # @param item [Hash{Symbol=>Object}] + # # @param sample [Hash{Symbol=>Object}] + # # + # def initialize(item:, sample: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!parse + # # @param id [String] + # # @param type [Symbol, :file_id] + # # + # def initialize(id:, type: :file_id, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] + # def self.variants; end + end + end + end + end +end diff --git a/lib/openai/models/evals/eval_api_error.rb b/lib/openai/models/evals/eval_api_error.rb new file mode 100644 index 00000000..11b56e24 --- /dev/null +++ b/lib/openai/models/evals/eval_api_error.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + class EvalAPIError < OpenAI::Internal::Type::BaseModel + # @!attribute code + # The error code. + # + # @return [String] + required :code, String + + # @!attribute message + # The error message. + # + # @return [String] + required :message, String + + # @!parse + # # An object representing an error response from the Eval API. + # # + # # @param code [String] + # # @param message [String] + # # + # def initialize(code:, message:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + + EvalAPIError = Evals::EvalAPIError + end +end diff --git a/lib/openai/models/evals/run_cancel_params.rb b/lib/openai/models/evals/run_cancel_params.rb new file mode 100644 index 00000000..b492403f --- /dev/null +++ b/lib/openai/models/evals/run_cancel_params.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#cancel + class RunCancelParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute eval_id + # + # @return [String] + required :eval_id, String + + # @!parse + # # @param eval_id [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(eval_id:, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end +end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb new file mode 100644 index 00000000..2121c8f8 --- /dev/null +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -0,0 +1,278 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#cancel + class RunCancelResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation run. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the evaluation run was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source + # Information about the run's data source. + # + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } + + # @!attribute error + # An object representing an error response from the Eval API. + # + # @return [OpenAI::Models::Evals::EvalAPIError] + required :error, -> { OpenAI::Models::Evals::EvalAPIError } + + # @!attribute eval_id + # The identifier of the associated evaluation. + # + # @return [String] + required :eval_id, String + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute model + # The model that is evaluated, if applicable. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the evaluation run. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of the object. Always "eval.run". + # + # @return [Symbol, :"eval.run"] + required :object, const: :"eval.run" + + # @!attribute per_model_usage + # Usage statistics for each model during the evaluation run. + # + # @return [Array] + required :per_model_usage, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage] } + + # @!attribute per_testing_criteria_results + # Results per testing criteria applied during the evaluation run. + # + # @return [Array] + required :per_testing_criteria_results, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult] } + + # @!attribute report_url + # The URL to the rendered evaluation run report on the UI dashboard. + # + # @return [String] + required :report_url, String + + # @!attribute result_counts + # Counters summarizing the outcomes of the evaluation run. + # + # @return [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] + required :result_counts, -> { OpenAI::Models::Evals::RunCancelResponse::ResultCounts } + + # @!attribute status + # The status of the evaluation run. + # + # @return [String] + required :status, String + + # @!parse + # # A schema representing an evaluation run. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # # @param error [OpenAI::Models::Evals::EvalAPIError] + # # @param eval_id [String] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param model [String] + # # @param name [String] + # # @param per_model_usage [Array] + # # @param per_testing_criteria_results [Array] + # # @param report_url [String] + # # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] + # # @param status [String] + # # @param object [Symbol, :"eval.run"] + # # + # def initialize( + # id:, + # created_at:, + # data_source:, + # error:, + # eval_id:, + # metadata:, + # model:, + # name:, + # per_model_usage:, + # per_testing_criteria_results:, + # report_url:, + # result_counts:, + # status:, + # object: :"eval.run", + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Information about the run's data source. + # + # @see OpenAI::Models::Evals::RunCancelResponse#data_source + module DataSource + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval + variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + + # A CompletionsRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # @!attribute cached_tokens + # The number of tokens retrieved from cache. + # + # @return [Integer] + required :cached_tokens, Integer + + # @!attribute completion_tokens + # The number of completion tokens generated. + # + # @return [Integer] + required :completion_tokens, Integer + + # @!attribute invocation_count + # The number of invocations. + # + # @return [Integer] + required :invocation_count, Integer + + # @!attribute model_name + # The name of the model. + # + # @return [String] + required :model_name, String + + # @!attribute prompt_tokens + # The number of prompt tokens used. + # + # @return [Integer] + required :prompt_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens used. + # + # @return [Integer] + required :total_tokens, Integer + + # @!parse + # # @param cached_tokens [Integer] + # # @param completion_tokens [Integer] + # # @param invocation_count [Integer] + # # @param model_name [String] + # # @param prompt_tokens [Integer] + # # @param total_tokens [Integer] + # # + # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # @!attribute failed + # Number of tests failed for this criteria. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of tests passed for this criteria. + # + # @return [Integer] + required :passed, Integer + + # @!attribute testing_criteria + # A description of the testing criteria. + # + # @return [String] + required :testing_criteria, String + + # @!parse + # # @param failed [Integer] + # # @param passed [Integer] + # # @param testing_criteria [String] + # # + # def initialize(failed:, passed:, testing_criteria:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @see OpenAI::Models::Evals::RunCancelResponse#result_counts + class ResultCounts < OpenAI::Internal::Type::BaseModel + # @!attribute errored + # Number of output items that resulted in an error. + # + # @return [Integer] + required :errored, Integer + + # @!attribute failed + # Number of output items that failed to pass the evaluation. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of output items that passed the evaluation. + # + # @return [Integer] + required :passed, Integer + + # @!attribute total + # Total number of executed output items. + # + # @return [Integer] + required :total, Integer + + # @!parse + # # Counters summarizing the outcomes of the evaluation run. + # # + # # @param errored [Integer] + # # @param failed [Integer] + # # @param passed [Integer] + # # @param total [Integer] + # # + # def initialize(errored:, failed:, passed:, total:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb new file mode 100644 index 00000000..c9ccea28 --- /dev/null +++ b/lib/openai/models/evals/run_create_params.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#create + class RunCreateParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute data_source + # Details about the run's data source. + # + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + required :data_source, union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource } + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute [r] name + # The name of the run. + # + # @return [String, nil] + optional :name, String + + # @!parse + # # @return [String] + # attr_writer :name + + # @!parse + # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param name [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(data_source:, metadata: nil, name: nil, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Details about the run's data source. + module DataSource + extend OpenAI::Internal::Type::Union + + # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval + variant -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + + # A CompletionsRunDataSource object describing a model sampling configuration. + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # def self.variants; end + end + end + end + end +end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb new file mode 100644 index 00000000..d890f4c5 --- /dev/null +++ b/lib/openai/models/evals/run_create_response.rb @@ -0,0 +1,278 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#create + class RunCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation run. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the evaluation run was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source + # Information about the run's data source. + # + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } + + # @!attribute error + # An object representing an error response from the Eval API. + # + # @return [OpenAI::Models::Evals::EvalAPIError] + required :error, -> { OpenAI::Models::Evals::EvalAPIError } + + # @!attribute eval_id + # The identifier of the associated evaluation. + # + # @return [String] + required :eval_id, String + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute model + # The model that is evaluated, if applicable. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the evaluation run. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of the object. Always "eval.run". + # + # @return [Symbol, :"eval.run"] + required :object, const: :"eval.run" + + # @!attribute per_model_usage + # Usage statistics for each model during the evaluation run. + # + # @return [Array] + required :per_model_usage, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage] } + + # @!attribute per_testing_criteria_results + # Results per testing criteria applied during the evaluation run. + # + # @return [Array] + required :per_testing_criteria_results, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult] } + + # @!attribute report_url + # The URL to the rendered evaluation run report on the UI dashboard. + # + # @return [String] + required :report_url, String + + # @!attribute result_counts + # Counters summarizing the outcomes of the evaluation run. + # + # @return [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] + required :result_counts, -> { OpenAI::Models::Evals::RunCreateResponse::ResultCounts } + + # @!attribute status + # The status of the evaluation run. + # + # @return [String] + required :status, String + + # @!parse + # # A schema representing an evaluation run. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # # @param error [OpenAI::Models::Evals::EvalAPIError] + # # @param eval_id [String] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param model [String] + # # @param name [String] + # # @param per_model_usage [Array] + # # @param per_testing_criteria_results [Array] + # # @param report_url [String] + # # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] + # # @param status [String] + # # @param object [Symbol, :"eval.run"] + # # + # def initialize( + # id:, + # created_at:, + # data_source:, + # error:, + # eval_id:, + # metadata:, + # model:, + # name:, + # per_model_usage:, + # per_testing_criteria_results:, + # report_url:, + # result_counts:, + # status:, + # object: :"eval.run", + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Information about the run's data source. + # + # @see OpenAI::Models::Evals::RunCreateResponse#data_source + module DataSource + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval + variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + + # A CompletionsRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # @!attribute cached_tokens + # The number of tokens retrieved from cache. + # + # @return [Integer] + required :cached_tokens, Integer + + # @!attribute completion_tokens + # The number of completion tokens generated. + # + # @return [Integer] + required :completion_tokens, Integer + + # @!attribute invocation_count + # The number of invocations. + # + # @return [Integer] + required :invocation_count, Integer + + # @!attribute model_name + # The name of the model. + # + # @return [String] + required :model_name, String + + # @!attribute prompt_tokens + # The number of prompt tokens used. + # + # @return [Integer] + required :prompt_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens used. + # + # @return [Integer] + required :total_tokens, Integer + + # @!parse + # # @param cached_tokens [Integer] + # # @param completion_tokens [Integer] + # # @param invocation_count [Integer] + # # @param model_name [String] + # # @param prompt_tokens [Integer] + # # @param total_tokens [Integer] + # # + # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # @!attribute failed + # Number of tests failed for this criteria. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of tests passed for this criteria. + # + # @return [Integer] + required :passed, Integer + + # @!attribute testing_criteria + # A description of the testing criteria. + # + # @return [String] + required :testing_criteria, String + + # @!parse + # # @param failed [Integer] + # # @param passed [Integer] + # # @param testing_criteria [String] + # # + # def initialize(failed:, passed:, testing_criteria:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @see OpenAI::Models::Evals::RunCreateResponse#result_counts + class ResultCounts < OpenAI::Internal::Type::BaseModel + # @!attribute errored + # Number of output items that resulted in an error. + # + # @return [Integer] + required :errored, Integer + + # @!attribute failed + # Number of output items that failed to pass the evaluation. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of output items that passed the evaluation. + # + # @return [Integer] + required :passed, Integer + + # @!attribute total + # Total number of executed output items. + # + # @return [Integer] + required :total, Integer + + # @!parse + # # Counters summarizing the outcomes of the evaluation run. + # # + # # @param errored [Integer] + # # @param failed [Integer] + # # @param passed [Integer] + # # @param total [Integer] + # # + # def initialize(errored:, failed:, passed:, total:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/evals/run_delete_params.rb b/lib/openai/models/evals/run_delete_params.rb new file mode 100644 index 00000000..71cdb827 --- /dev/null +++ b/lib/openai/models/evals/run_delete_params.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#delete + class RunDeleteParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute eval_id + # + # @return [String] + required :eval_id, String + + # @!parse + # # @param eval_id [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(eval_id:, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end +end diff --git a/lib/openai/models/evals/run_delete_response.rb b/lib/openai/models/evals/run_delete_response.rb new file mode 100644 index 00000000..68412255 --- /dev/null +++ b/lib/openai/models/evals/run_delete_response.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#delete + class RunDeleteResponse < OpenAI::Internal::Type::BaseModel + # @!attribute [r] deleted + # + # @return [Boolean, nil] + optional :deleted, OpenAI::Internal::Type::Boolean + + # @!parse + # # @return [Boolean] + # attr_writer :deleted + + # @!attribute [r] object + # + # @return [String, nil] + optional :object, String + + # @!parse + # # @return [String] + # attr_writer :object + + # @!attribute [r] run_id + # + # @return [String, nil] + optional :run_id, String + + # @!parse + # # @return [String] + # attr_writer :run_id + + # @!parse + # # @param deleted [Boolean] + # # @param object [String] + # # @param run_id [String] + # # + # def initialize(deleted: nil, object: nil, run_id: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end +end diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb new file mode 100644 index 00000000..6dfe1fc2 --- /dev/null +++ b/lib/openai/models/evals/run_list_params.rb @@ -0,0 +1,100 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#list + class RunListParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute [r] after + # Identifier for the last run from the previous pagination request. + # + # @return [String, nil] + optional :after, String + + # @!parse + # # @return [String] + # attr_writer :after + + # @!attribute [r] limit + # Number of runs to retrieve. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!parse + # # @return [Integer] + # attr_writer :limit + + # @!attribute [r] order + # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for + # descending order. Defaults to `asc`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order, nil] + optional :order, enum: -> { OpenAI::Models::Evals::RunListParams::Order } + + # @!parse + # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order] + # attr_writer :order + + # @!attribute [r] status + # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | + # "canceled". + # + # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] + optional :status, enum: -> { OpenAI::Models::Evals::RunListParams::Status } + + # @!parse + # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # attr_writer :status + + # @!parse + # # @param after [String] + # # @param limit [Integer] + # # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] + # # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for + # descending order. Defaults to `asc`. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | + # "canceled". + module Status + extend OpenAI::Internal::Type::Enum + + QUEUED = :queued + IN_PROGRESS = :in_progress + COMPLETED = :completed + CANCELED = :canceled + FAILED = :failed + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + end + end +end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb new file mode 100644 index 00000000..b31795e0 --- /dev/null +++ b/lib/openai/models/evals/run_list_response.rb @@ -0,0 +1,278 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#list + class RunListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation run. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the evaluation run was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source + # Information about the run's data source. + # + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } + + # @!attribute error + # An object representing an error response from the Eval API. + # + # @return [OpenAI::Models::Evals::EvalAPIError] + required :error, -> { OpenAI::Models::Evals::EvalAPIError } + + # @!attribute eval_id + # The identifier of the associated evaluation. + # + # @return [String] + required :eval_id, String + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute model + # The model that is evaluated, if applicable. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the evaluation run. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of the object. Always "eval.run". + # + # @return [Symbol, :"eval.run"] + required :object, const: :"eval.run" + + # @!attribute per_model_usage + # Usage statistics for each model during the evaluation run. + # + # @return [Array] + required :per_model_usage, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::PerModelUsage] } + + # @!attribute per_testing_criteria_results + # Results per testing criteria applied during the evaluation run. + # + # @return [Array] + required :per_testing_criteria_results, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult] } + + # @!attribute report_url + # The URL to the rendered evaluation run report on the UI dashboard. + # + # @return [String] + required :report_url, String + + # @!attribute result_counts + # Counters summarizing the outcomes of the evaluation run. + # + # @return [OpenAI::Models::Evals::RunListResponse::ResultCounts] + required :result_counts, -> { OpenAI::Models::Evals::RunListResponse::ResultCounts } + + # @!attribute status + # The status of the evaluation run. + # + # @return [String] + required :status, String + + # @!parse + # # A schema representing an evaluation run. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # # @param error [OpenAI::Models::Evals::EvalAPIError] + # # @param eval_id [String] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param model [String] + # # @param name [String] + # # @param per_model_usage [Array] + # # @param per_testing_criteria_results [Array] + # # @param report_url [String] + # # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] + # # @param status [String] + # # @param object [Symbol, :"eval.run"] + # # + # def initialize( + # id:, + # created_at:, + # data_source:, + # error:, + # eval_id:, + # metadata:, + # model:, + # name:, + # per_model_usage:, + # per_testing_criteria_results:, + # report_url:, + # result_counts:, + # status:, + # object: :"eval.run", + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Information about the run's data source. + # + # @see OpenAI::Models::Evals::RunListResponse#data_source + module DataSource + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval + variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + + # A CompletionsRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # @!attribute cached_tokens + # The number of tokens retrieved from cache. + # + # @return [Integer] + required :cached_tokens, Integer + + # @!attribute completion_tokens + # The number of completion tokens generated. + # + # @return [Integer] + required :completion_tokens, Integer + + # @!attribute invocation_count + # The number of invocations. + # + # @return [Integer] + required :invocation_count, Integer + + # @!attribute model_name + # The name of the model. + # + # @return [String] + required :model_name, String + + # @!attribute prompt_tokens + # The number of prompt tokens used. + # + # @return [Integer] + required :prompt_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens used. + # + # @return [Integer] + required :total_tokens, Integer + + # @!parse + # # @param cached_tokens [Integer] + # # @param completion_tokens [Integer] + # # @param invocation_count [Integer] + # # @param model_name [String] + # # @param prompt_tokens [Integer] + # # @param total_tokens [Integer] + # # + # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # @!attribute failed + # Number of tests failed for this criteria. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of tests passed for this criteria. + # + # @return [Integer] + required :passed, Integer + + # @!attribute testing_criteria + # A description of the testing criteria. + # + # @return [String] + required :testing_criteria, String + + # @!parse + # # @param failed [Integer] + # # @param passed [Integer] + # # @param testing_criteria [String] + # # + # def initialize(failed:, passed:, testing_criteria:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @see OpenAI::Models::Evals::RunListResponse#result_counts + class ResultCounts < OpenAI::Internal::Type::BaseModel + # @!attribute errored + # Number of output items that resulted in an error. + # + # @return [Integer] + required :errored, Integer + + # @!attribute failed + # Number of output items that failed to pass the evaluation. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of output items that passed the evaluation. + # + # @return [Integer] + required :passed, Integer + + # @!attribute total + # Total number of executed output items. + # + # @return [Integer] + required :total, Integer + + # @!parse + # # Counters summarizing the outcomes of the evaluation run. + # # + # # @param errored [Integer] + # # @param failed [Integer] + # # @param passed [Integer] + # # @param total [Integer] + # # + # def initialize(errored:, failed:, passed:, total:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/evals/run_retrieve_params.rb b/lib/openai/models/evals/run_retrieve_params.rb new file mode 100644 index 00000000..f17f16a7 --- /dev/null +++ b/lib/openai/models/evals/run_retrieve_params.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#retrieve + class RunRetrieveParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute eval_id + # + # @return [String] + required :eval_id, String + + # @!parse + # # @param eval_id [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(eval_id:, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end +end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb new file mode 100644 index 00000000..219fbd4c --- /dev/null +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -0,0 +1,278 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + # @see OpenAI::Resources::Evals::Runs#retrieve + class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation run. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the evaluation run was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data_source + # Information about the run's data source. + # + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } + + # @!attribute error + # An object representing an error response from the Eval API. + # + # @return [OpenAI::Models::Evals::EvalAPIError] + required :error, -> { OpenAI::Models::Evals::EvalAPIError } + + # @!attribute eval_id + # The identifier of the associated evaluation. + # + # @return [String] + required :eval_id, String + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute model + # The model that is evaluated, if applicable. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the evaluation run. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of the object. Always "eval.run". + # + # @return [Symbol, :"eval.run"] + required :object, const: :"eval.run" + + # @!attribute per_model_usage + # Usage statistics for each model during the evaluation run. + # + # @return [Array] + required :per_model_usage, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage] } + + # @!attribute per_testing_criteria_results + # Results per testing criteria applied during the evaluation run. + # + # @return [Array] + required :per_testing_criteria_results, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult] } + + # @!attribute report_url + # The URL to the rendered evaluation run report on the UI dashboard. + # + # @return [String] + required :report_url, String + + # @!attribute result_counts + # Counters summarizing the outcomes of the evaluation run. + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] + required :result_counts, -> { OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts } + + # @!attribute status + # The status of the evaluation run. + # + # @return [String] + required :status, String + + # @!parse + # # A schema representing an evaluation run. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # # @param error [OpenAI::Models::Evals::EvalAPIError] + # # @param eval_id [String] + # # @param metadata [Hash{Symbol=>String}, nil] + # # @param model [String] + # # @param name [String] + # # @param per_model_usage [Array] + # # @param per_testing_criteria_results [Array] + # # @param report_url [String] + # # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] + # # @param status [String] + # # @param object [Symbol, :"eval.run"] + # # + # def initialize( + # id:, + # created_at:, + # data_source:, + # error:, + # eval_id:, + # metadata:, + # model:, + # name:, + # per_model_usage:, + # per_testing_criteria_results:, + # report_url:, + # result_counts:, + # status:, + # object: :"eval.run", + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Information about the run's data source. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse#data_source + module DataSource + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval + variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + + # A CompletionsRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + + # @!parse + # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # @!attribute cached_tokens + # The number of tokens retrieved from cache. + # + # @return [Integer] + required :cached_tokens, Integer + + # @!attribute completion_tokens + # The number of completion tokens generated. + # + # @return [Integer] + required :completion_tokens, Integer + + # @!attribute invocation_count + # The number of invocations. + # + # @return [Integer] + required :invocation_count, Integer + + # @!attribute model_name + # The name of the model. + # + # @return [String] + required :model_name, String + + # @!attribute prompt_tokens + # The number of prompt tokens used. + # + # @return [Integer] + required :prompt_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens used. + # + # @return [Integer] + required :total_tokens, Integer + + # @!parse + # # @param cached_tokens [Integer] + # # @param completion_tokens [Integer] + # # @param invocation_count [Integer] + # # @param model_name [String] + # # @param prompt_tokens [Integer] + # # @param total_tokens [Integer] + # # + # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # @!attribute failed + # Number of tests failed for this criteria. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of tests passed for this criteria. + # + # @return [Integer] + required :passed, Integer + + # @!attribute testing_criteria + # A description of the testing criteria. + # + # @return [String] + required :testing_criteria, String + + # @!parse + # # @param failed [Integer] + # # @param passed [Integer] + # # @param testing_criteria [String] + # # + # def initialize(failed:, passed:, testing_criteria:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @see OpenAI::Models::Evals::RunRetrieveResponse#result_counts + class ResultCounts < OpenAI::Internal::Type::BaseModel + # @!attribute errored + # Number of output items that resulted in an error. + # + # @return [Integer] + required :errored, Integer + + # @!attribute failed + # Number of output items that failed to pass the evaluation. + # + # @return [Integer] + required :failed, Integer + + # @!attribute passed + # Number of output items that passed the evaluation. + # + # @return [Integer] + required :passed, Integer + + # @!attribute total + # Total number of executed output items. + # + # @return [Integer] + required :total, Integer + + # @!parse + # # Counters summarizing the outcomes of the evaluation run. + # # + # # @param errored [Integer] + # # @param failed [Integer] + # # @param passed [Integer] + # # @param total [Integer] + # # + # def initialize(errored:, failed:, passed:, total:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb new file mode 100644 index 00000000..e4b6424f --- /dev/null +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -0,0 +1,105 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + module Runs + # @see OpenAI::Resources::Evals::Runs::OutputItems#list + class OutputItemListParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute eval_id + # + # @return [String] + required :eval_id, String + + # @!attribute [r] after + # Identifier for the last output item from the previous pagination request. + # + # @return [String, nil] + optional :after, String + + # @!parse + # # @return [String] + # attr_writer :after + + # @!attribute [r] limit + # Number of output items to retrieve. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!parse + # # @return [Integer] + # attr_writer :limit + + # @!attribute [r] order + # Sort order for output items by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + # + # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order, nil] + optional :order, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Order } + + # @!parse + # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] + # attr_writer :order + + # @!attribute [r] status + # Filter output items by status. Use `failed` to filter by failed output items or + # `pass` to filter by passed output items. + # + # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status, nil] + optional :status, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Status } + + # @!parse + # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # attr_writer :status + + # @!parse + # # @param eval_id [String] + # # @param after [String] + # # @param limit [Integer] + # # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] + # # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # Sort order for output items by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # Filter output items by status. Use `failed` to filter by failed output items or + # `pass` to filter by passed output items. + module Status + extend OpenAI::Internal::Type::Enum + + FAIL = :fail + PASS = :pass + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + end + end + end +end diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb new file mode 100644 index 00000000..1b9744e6 --- /dev/null +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -0,0 +1,295 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + module Runs + # @see OpenAI::Resources::Evals::Runs::OutputItems#list + class OutputItemListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation run output item. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the evaluation run was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute datasource_item + # Details of the input data source item. + # + # @return [Hash{Symbol=>Object}] + required :datasource_item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute datasource_item_id + # The identifier for the data source item. + # + # @return [Integer] + required :datasource_item_id, Integer + + # @!attribute eval_id + # The identifier of the evaluation group. + # + # @return [String] + required :eval_id, String + + # @!attribute object + # The type of the object. Always "eval.run.output_item". + # + # @return [Symbol, :"eval.run.output_item"] + required :object, const: :"eval.run.output_item" + + # @!attribute results + # A list of results from the evaluation run. + # + # @return [ArrayObject}>] + required :results, + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]] + + # @!attribute run_id + # The identifier of the evaluation run associated with this output item. + # + # @return [String] + required :run_id, String + + # @!attribute sample + # A sample containing the input and output of the evaluation run. + # + # @return [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] + required :sample, -> { OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample } + + # @!attribute status + # The status of the evaluation run. + # + # @return [String] + required :status, String + + # @!parse + # # A schema representing an evaluation run output item. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param datasource_item [Hash{Symbol=>Object}] + # # @param datasource_item_id [Integer] + # # @param eval_id [String] + # # @param results [ArrayObject}>] + # # @param run_id [String] + # # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] + # # @param status [String] + # # @param object [Symbol, :"eval.run.output_item"] + # # + # def initialize( + # id:, + # created_at:, + # datasource_item:, + # datasource_item_id:, + # eval_id:, + # results:, + # run_id:, + # sample:, + # status:, + # object: :"eval.run.output_item", + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::Evals::Runs::OutputItemListResponse#sample + class Sample < OpenAI::Internal::Type::BaseModel + # @!attribute error + # An object representing an error response from the Eval API. + # + # @return [OpenAI::Models::Evals::EvalAPIError] + required :error, -> { OpenAI::Models::Evals::EvalAPIError } + + # @!attribute finish_reason + # The reason why the sample generation was finished. + # + # @return [String] + required :finish_reason, String + + # @!attribute input + # An array of input messages. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input] } + + # @!attribute max_completion_tokens + # The maximum number of tokens allowed for completion. + # + # @return [Integer] + required :max_completion_tokens, Integer + + # @!attribute model + # The model used for generating the sample. + # + # @return [String] + required :model, String + + # @!attribute output + # An array of output messages. + # + # @return [Array] + required :output, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output] } + + # @!attribute seed + # The seed used for generating the sample. + # + # @return [Integer] + required :seed, Integer + + # @!attribute temperature + # The sampling temperature used. + # + # @return [Float] + required :temperature, Float + + # @!attribute top_p + # The top_p value used for sampling. + # + # @return [Float] + required :top_p, Float + + # @!attribute usage + # Token usage details for the sample. + # + # @return [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] + required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage } + + # @!parse + # # A sample containing the input and output of the evaluation run. + # # + # # @param error [OpenAI::Models::Evals::EvalAPIError] + # # @param finish_reason [String] + # # @param input [Array] + # # @param max_completion_tokens [Integer] + # # @param model [String] + # # @param output [Array] + # # @param seed [Integer] + # # @param temperature [Float] + # # @param top_p [Float] + # # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] + # # + # def initialize( + # error:, + # finish_reason:, + # input:, + # max_completion_tokens:, + # model:, + # output:, + # seed:, + # temperature:, + # top_p:, + # usage:, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message sender (e.g., system, user, developer). + # + # @return [String] + required :role, String + + # @!parse + # # An input message. + # # + # # @param content [String] + # # @param role [String] + # # + # def initialize(content:, role:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class Output < OpenAI::Internal::Type::BaseModel + # @!attribute [r] content + # The content of the message. + # + # @return [String, nil] + optional :content, String + + # @!parse + # # @return [String] + # attr_writer :content + + # @!attribute [r] role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String, nil] + optional :role, String + + # @!parse + # # @return [String] + # attr_writer :role + + # @!parse + # # @param content [String] + # # @param role [String] + # # + # def initialize(content: nil, role: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @see OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute cached_tokens + # The number of tokens retrieved from cache. + # + # @return [Integer] + required :cached_tokens, Integer + + # @!attribute completion_tokens + # The number of completion tokens generated. + # + # @return [Integer] + required :completion_tokens, Integer + + # @!attribute prompt_tokens + # The number of prompt tokens used. + # + # @return [Integer] + required :prompt_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens used. + # + # @return [Integer] + required :total_tokens, Integer + + # @!parse + # # Token usage details for the sample. + # # + # # @param cached_tokens [Integer] + # # @param completion_tokens [Integer] + # # @param prompt_tokens [Integer] + # # @param total_tokens [Integer] + # # + # def initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end + end + end +end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_params.rb b/lib/openai/models/evals/runs/output_item_retrieve_params.rb new file mode 100644 index 00000000..e6154bee --- /dev/null +++ b/lib/openai/models/evals/runs/output_item_retrieve_params.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + module Runs + # @see OpenAI::Resources::Evals::Runs::OutputItems#retrieve + class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute eval_id + # + # @return [String] + required :eval_id, String + + # @!attribute run_id + # + # @return [String] + required :run_id, String + + # @!parse + # # @param eval_id [String] + # # @param run_id [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(eval_id:, run_id:, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb new file mode 100644 index 00000000..7c7798fb --- /dev/null +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -0,0 +1,295 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + module Runs + # @see OpenAI::Resources::Evals::Runs::OutputItems#retrieve + class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the evaluation run output item. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the evaluation run was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute datasource_item + # Details of the input data source item. + # + # @return [Hash{Symbol=>Object}] + required :datasource_item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute datasource_item_id + # The identifier for the data source item. + # + # @return [Integer] + required :datasource_item_id, Integer + + # @!attribute eval_id + # The identifier of the evaluation group. + # + # @return [String] + required :eval_id, String + + # @!attribute object + # The type of the object. Always "eval.run.output_item". + # + # @return [Symbol, :"eval.run.output_item"] + required :object, const: :"eval.run.output_item" + + # @!attribute results + # A list of results from the evaluation run. + # + # @return [ArrayObject}>] + required :results, + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]] + + # @!attribute run_id + # The identifier of the evaluation run associated with this output item. + # + # @return [String] + required :run_id, String + + # @!attribute sample + # A sample containing the input and output of the evaluation run. + # + # @return [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] + required :sample, -> { OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample } + + # @!attribute status + # The status of the evaluation run. + # + # @return [String] + required :status, String + + # @!parse + # # A schema representing an evaluation run output item. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param datasource_item [Hash{Symbol=>Object}] + # # @param datasource_item_id [Integer] + # # @param eval_id [String] + # # @param results [ArrayObject}>] + # # @param run_id [String] + # # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] + # # @param status [String] + # # @param object [Symbol, :"eval.run.output_item"] + # # + # def initialize( + # id:, + # created_at:, + # datasource_item:, + # datasource_item_id:, + # eval_id:, + # results:, + # run_id:, + # sample:, + # status:, + # object: :"eval.run.output_item", + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse#sample + class Sample < OpenAI::Internal::Type::BaseModel + # @!attribute error + # An object representing an error response from the Eval API. + # + # @return [OpenAI::Models::Evals::EvalAPIError] + required :error, -> { OpenAI::Models::Evals::EvalAPIError } + + # @!attribute finish_reason + # The reason why the sample generation was finished. + # + # @return [String] + required :finish_reason, String + + # @!attribute input + # An array of input messages. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input] } + + # @!attribute max_completion_tokens + # The maximum number of tokens allowed for completion. + # + # @return [Integer] + required :max_completion_tokens, Integer + + # @!attribute model + # The model used for generating the sample. + # + # @return [String] + required :model, String + + # @!attribute output + # An array of output messages. + # + # @return [Array] + required :output, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output] } + + # @!attribute seed + # The seed used for generating the sample. + # + # @return [Integer] + required :seed, Integer + + # @!attribute temperature + # The sampling temperature used. + # + # @return [Float] + required :temperature, Float + + # @!attribute top_p + # The top_p value used for sampling. + # + # @return [Float] + required :top_p, Float + + # @!attribute usage + # Token usage details for the sample. + # + # @return [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] + required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage } + + # @!parse + # # A sample containing the input and output of the evaluation run. + # # + # # @param error [OpenAI::Models::Evals::EvalAPIError] + # # @param finish_reason [String] + # # @param input [Array] + # # @param max_completion_tokens [Integer] + # # @param model [String] + # # @param output [Array] + # # @param seed [Integer] + # # @param temperature [Float] + # # @param top_p [Float] + # # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] + # # + # def initialize( + # error:, + # finish_reason:, + # input:, + # max_completion_tokens:, + # model:, + # output:, + # seed:, + # temperature:, + # top_p:, + # usage:, + # ** + # ) + # super + # end + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message sender (e.g., system, user, developer). + # + # @return [String] + required :role, String + + # @!parse + # # An input message. + # # + # # @param content [String] + # # @param role [String] + # # + # def initialize(content:, role:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + class Output < OpenAI::Internal::Type::BaseModel + # @!attribute [r] content + # The content of the message. + # + # @return [String, nil] + optional :content, String + + # @!parse + # # @return [String] + # attr_writer :content + + # @!attribute [r] role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String, nil] + optional :role, String + + # @!parse + # # @return [String] + # attr_writer :role + + # @!parse + # # @param content [String] + # # @param role [String] + # # + # def initialize(content: nil, role: nil, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + + # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute cached_tokens + # The number of tokens retrieved from cache. + # + # @return [Integer] + required :cached_tokens, Integer + + # @!attribute completion_tokens + # The number of completion tokens generated. + # + # @return [Integer] + required :completion_tokens, Integer + + # @!attribute prompt_tokens + # The number of prompt tokens used. + # + # @return [Integer] + required :prompt_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens used. + # + # @return [Integer] + required :total_tokens, Integer + + # @!parse + # # Token usage details for the sample. + # # + # # @param cached_tokens [Integer] + # # @param completion_tokens [Integer] + # # @param prompt_tokens [Integer] + # # @param total_tokens [Integer] + # # + # def initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb new file mode 100644 index 00000000..b569ae0c --- /dev/null +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Checkpoints + # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#create + class PermissionCreateParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute project_ids + # The project identifiers to grant access to. + # + # @return [Array] + required :project_ids, OpenAI::Internal::Type::ArrayOf[String] + + # @!parse + # # @param project_ids [Array] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(project_ids:, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb new file mode 100644 index 00000000..89790c9a --- /dev/null +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Checkpoints + # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#create + class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The permission identifier, which can be referenced in the API endpoints. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the permission was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The object type, which is always "checkpoint.permission". + # + # @return [Symbol, :"checkpoint.permission"] + required :object, const: :"checkpoint.permission" + + # @!attribute project_id + # The project identifier that the permission is for. + # + # @return [String] + required :project_id, String + + # @!parse + # # The `checkpoint.permission` object represents a permission for a fine-tuned + # # model checkpoint. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param project_id [String] + # # @param object [Symbol, :"checkpoint.permission"] + # # + # def initialize(id:, created_at:, project_id:, object: :"checkpoint.permission", **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb new file mode 100644 index 00000000..0b049049 --- /dev/null +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Checkpoints + # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#delete + class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!parse + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb new file mode 100644 index 00000000..3a15c6da --- /dev/null +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Checkpoints + # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#delete + class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The ID of the fine-tuned model checkpoint permission that was deleted. + # + # @return [String] + required :id, String + + # @!attribute deleted + # Whether the fine-tuned model checkpoint permission was successfully deleted. + # + # @return [Boolean] + required :deleted, OpenAI::Internal::Type::Boolean + + # @!attribute object + # The object type, which is always "checkpoint.permission". + # + # @return [Symbol, :"checkpoint.permission"] + required :object, const: :"checkpoint.permission" + + # @!parse + # # @param id [String] + # # @param deleted [Boolean] + # # @param object [Symbol, :"checkpoint.permission"] + # # + # def initialize(id:, deleted:, object: :"checkpoint.permission", **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb new file mode 100644 index 00000000..e32c1188 --- /dev/null +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Checkpoints + # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#retrieve + class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel + # @!parse + # extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute [r] after + # Identifier for the last permission ID from the previous pagination request. + # + # @return [String, nil] + optional :after, String + + # @!parse + # # @return [String] + # attr_writer :after + + # @!attribute [r] limit + # Number of permissions to retrieve. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!parse + # # @return [Integer] + # attr_writer :limit + + # @!attribute [r] order + # The order in which to retrieve permissions. + # + # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] + optional :order, enum: -> { OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order } + + # @!parse + # # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] + # attr_writer :order + + # @!attribute [r] project_id + # The ID of the project to get permissions for. + # + # @return [String, nil] + optional :project_id, String + + # @!parse + # # @return [String] + # attr_writer :project_id + + # @!parse + # # @param after [String] + # # @param limit [Integer] + # # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] + # # @param project_id [String] + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # # + # def initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + # The order in which to retrieve permissions. + module Order + extend OpenAI::Internal::Type::Enum + + ASCENDING = :ascending + DESCENDING = :descending + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb new file mode 100644 index 00000000..6f7cadc9 --- /dev/null +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Checkpoints + # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#retrieve + class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute data + # + # @return [Array] + required :data, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data] } + + # @!attribute has_more + # + # @return [Boolean] + required :has_more, OpenAI::Internal::Type::Boolean + + # @!attribute object + # + # @return [Symbol, :list] + required :object, const: :list + + # @!attribute first_id + # + # @return [String, nil] + optional :first_id, String, nil?: true + + # @!attribute last_id + # + # @return [String, nil] + optional :last_id, String, nil?: true + + # @!parse + # # @param data [Array] + # # @param has_more [Boolean] + # # @param first_id [String, nil] + # # @param last_id [String, nil] + # # @param object [Symbol, :list] + # # + # def initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list, **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The permission identifier, which can be referenced in the API endpoints. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the permission was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The object type, which is always "checkpoint.permission". + # + # @return [Symbol, :"checkpoint.permission"] + required :object, const: :"checkpoint.permission" + + # @!attribute project_id + # The project identifier that the permission is for. + # + # @return [String] + required :project_id, String + + # @!parse + # # The `checkpoint.permission` object represents a permission for a fine-tuned + # # model checkpoint. + # # + # # @param id [String] + # # @param created_at [Integer] + # # @param project_id [String] + # # @param object [Symbol, :"checkpoint.permission"] + # # + # def initialize(id:, created_at:, project_id:, object: :"checkpoint.permission", **) = super + + # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + end + end + end + end + end +end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 052c32f9..5280f3ed 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -5,7 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#create # - # @see OpenAI::Resources::Responses#stream_raw + # @see OpenAI::Resources::Responses#create_streaming class Response < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for this Response. @@ -60,7 +60,7 @@ class Response < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute object @@ -226,7 +226,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] # # @param instructions [String, nil] # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] + # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] # # @param output [Array] # # @param parallel_tool_calls [Boolean] # # @param temperature [Float, nil] diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 0e96b549..07d25b0a 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -5,7 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#create # - # @see OpenAI::Resources::Responses#stream_raw + # @see OpenAI::Resources::Responses#create_streaming class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter @@ -32,7 +32,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::Models::ResponsesModel } # @!attribute include @@ -203,7 +203,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # # @param input [String, Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] + # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] # # @param include [Array, nil] # # @param instructions [String, nil] # # @param max_output_tokens [Integer, nil] diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 154ea09f..588d1722 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -9,26 +9,26 @@ module ResponsesModel variant enum: -> { OpenAI::Models::ChatModel } - variant const: -> { OpenAI::Models::ResponsesModel::O1_PRO } + variant enum: -> { OpenAI::Models::ResponsesModel::ResponsesOnlyModel } - variant const: -> { OpenAI::Models::ResponsesModel::O1_PRO_2025_03_19 } + module ResponsesOnlyModel + extend OpenAI::Internal::Type::Enum - variant const: -> { OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW } + O1_PRO = :"o1-pro" + O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW = :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - variant const: -> { OpenAI::Models::ResponsesModel::COMPUTER_USE_PREVIEW_2025_03_11 } + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol)] + # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] # def self.variants; end - - # @!group - - O1_PRO = :"o1-pro" - O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW = :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - - # @!endgroup end end end diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index b72ad867..2902111e 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -94,7 +94,8 @@ def delete(thread_id, params = {}) ) end - # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Beta::Threads#create_and_run_streaming} for streaming + # counterpart. # # Create a thread and run it in one request. # @@ -123,7 +124,7 @@ def delete(thread_id, params = {}) def create_and_run(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#create_and_run_streaming` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -140,7 +141,7 @@ def create_and_run(params) # # Create a thread and run it in one request. # - # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload create_and_run_streaming(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param assistant_id [String] # @param instructions [String, nil] @@ -162,7 +163,7 @@ def create_and_run(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams - def stream_raw(params) + def create_and_run_streaming(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create_and_run` for the non-streaming use case." diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 8ebefd9c..61d7e284 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -8,7 +8,7 @@ class Runs # @return [OpenAI::Resources::Beta::Threads::Runs::Steps] attr_reader :steps - # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming + # See {OpenAI::Resources::Beta::Threads::Runs#create_streaming} for streaming # counterpart. # # Create a run. @@ -41,7 +41,7 @@ class Runs def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_stream_raw` for the streaming use case." + message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) end query_params = [:include] @@ -60,7 +60,7 @@ def create(thread_id, params) # # Create a run. # - # @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload create_streaming(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param thread_id [String] # @param assistant_id [String] @@ -85,7 +85,7 @@ def create(thread_id, params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams - def create_stream_raw(thread_id, params) + def create_streaming(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." @@ -208,7 +208,7 @@ def cancel(run_id, params) ) end - # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming} for # streaming counterpart. # # When a run has the `status: "requires_action"` and `required_action.type` is @@ -229,7 +229,7 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] - message = "Please use `#submit_tool_outputs_stream_raw` for the streaming use case." + message = "Please use `#submit_tool_outputs_streaming` for the streaming use case." raise ArgumentError.new(message) end thread_id = @@ -253,7 +253,7 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @overload submit_tool_outputs_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) + # @overload submit_tool_outputs_streaming(run_id, thread_id:, tool_outputs:, request_options: {}) # # @param run_id [String] # @param thread_id [String] @@ -263,7 +263,7 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - def submit_tool_outputs_stream_raw(run_id, params) + def submit_tool_outputs_streaming(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 499c9f52..c1d28d4a 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -7,7 +7,8 @@ class Completions # @return [OpenAI::Resources::Chat::Completions::Messages] attr_reader :messages - # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Chat::Completions#create_streaming} for streaming + # counterpart. # # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take @@ -67,7 +68,7 @@ class Completions def create(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -99,7 +100,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload create_streaming(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] # @param model [String, Symbol, OpenAI::Models::ChatModel] @@ -136,7 +137,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams - def stream_raw(params) + def create_streaming(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb new file mode 100644 index 00000000..070c5958 --- /dev/null +++ b/lib/openai/resources/evals.rb @@ -0,0 +1,133 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Evals + # @return [OpenAI::Resources::Evals::Runs] + attr_reader :runs + + # Create the structure of an evaluation that can be used to test a model's + # performance. An evaluation is a set of testing criteria and a datasource. After + # creating an evaluation, you can run it on different models and model parameters. + # We support several types of graders and datasources. For more information, see + # the [Evals guide](https://platform.openai.com/docs/guides/evals). + # + # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) + # + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @param testing_criteria [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::EvalCreateResponse] + # + # @see OpenAI::Models::EvalCreateParams + def create(params) + parsed, options = OpenAI::Models::EvalCreateParams.dump_request(params) + @client.request( + method: :post, + path: "evals", + body: parsed, + model: OpenAI::Models::EvalCreateResponse, + options: options + ) + end + + # Get an evaluation by ID. + # + # @overload retrieve(eval_id, request_options: {}) + # + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::EvalRetrieveResponse] + # + # @see OpenAI::Models::EvalRetrieveParams + def retrieve(eval_id, params = {}) + @client.request( + method: :get, + path: ["evals/%1$s", eval_id], + model: OpenAI::Models::EvalRetrieveResponse, + options: params[:request_options] + ) + end + + # Update certain properties of an evaluation. + # + # @overload update(eval_id, metadata: nil, name: nil, request_options: {}) + # + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::EvalUpdateResponse] + # + # @see OpenAI::Models::EvalUpdateParams + def update(eval_id, params = {}) + parsed, options = OpenAI::Models::EvalUpdateParams.dump_request(params) + @client.request( + method: :post, + path: ["evals/%1$s", eval_id], + body: parsed, + model: OpenAI::Models::EvalUpdateResponse, + options: options + ) + end + + # List evaluations for a project. + # + # @overload list(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) + # + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::EvalListParams + def list(params = {}) + parsed, options = OpenAI::Models::EvalListParams.dump_request(params) + @client.request( + method: :get, + path: "evals", + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::EvalListResponse, + options: options + ) + end + + # Delete an evaluation. + # + # @overload delete(eval_id, request_options: {}) + # + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::EvalDeleteResponse] + # + # @see OpenAI::Models::EvalDeleteParams + def delete(eval_id, params = {}) + @client.request( + method: :delete, + path: ["evals/%1$s", eval_id], + model: OpenAI::Models::EvalDeleteResponse, + options: params[:request_options] + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @runs = OpenAI::Resources::Evals::Runs.new(client: client) + end + end + end +end diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb new file mode 100644 index 00000000..ce05aca8 --- /dev/null +++ b/lib/openai/resources/evals/runs.rb @@ -0,0 +1,145 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Evals + class Runs + # @return [OpenAI::Resources::Evals::Runs::OutputItems] + attr_reader :output_items + + # Create a new evaluation run. This is the endpoint that will kick off grading. + # + # @overload create(eval_id, data_source:, metadata: nil, name: nil, request_options: {}) + # + # @param eval_id [String] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Evals::RunCreateResponse] + # + # @see OpenAI::Models::Evals::RunCreateParams + def create(eval_id, params) + parsed, options = OpenAI::Models::Evals::RunCreateParams.dump_request(params) + @client.request( + method: :post, + path: ["evals/%1$s/runs", eval_id], + body: parsed, + model: OpenAI::Models::Evals::RunCreateResponse, + options: options + ) + end + + # Get an evaluation run by ID. + # + # @overload retrieve(run_id, eval_id:, request_options: {}) + # + # @param run_id [String] + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse] + # + # @see OpenAI::Models::Evals::RunRetrieveParams + def retrieve(run_id, params) + parsed, options = OpenAI::Models::Evals::RunRetrieveParams.dump_request(params) + eval_id = + parsed.delete(:eval_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["evals/%1$s/runs/%2$s", eval_id, run_id], + model: OpenAI::Models::Evals::RunRetrieveResponse, + options: options + ) + end + + # Get a list of runs for an evaluation. + # + # @overload list(eval_id, after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # + # @param eval_id [String] + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::Evals::RunListParams + def list(eval_id, params = {}) + parsed, options = OpenAI::Models::Evals::RunListParams.dump_request(params) + @client.request( + method: :get, + path: ["evals/%1$s/runs", eval_id], + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::Evals::RunListResponse, + options: options + ) + end + + # Delete an eval run. + # + # @overload delete(run_id, eval_id:, request_options: {}) + # + # @param run_id [String] + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Evals::RunDeleteResponse] + # + # @see OpenAI::Models::Evals::RunDeleteParams + def delete(run_id, params) + parsed, options = OpenAI::Models::Evals::RunDeleteParams.dump_request(params) + eval_id = + parsed.delete(:eval_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :delete, + path: ["evals/%1$s/runs/%2$s", eval_id, run_id], + model: OpenAI::Models::Evals::RunDeleteResponse, + options: options + ) + end + + # Cancel an ongoing evaluation run. + # + # @overload cancel(run_id, eval_id:, request_options: {}) + # + # @param run_id [String] + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Evals::RunCancelResponse] + # + # @see OpenAI::Models::Evals::RunCancelParams + def cancel(run_id, params) + parsed, options = OpenAI::Models::Evals::RunCancelParams.dump_request(params) + eval_id = + parsed.delete(:eval_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :post, + path: ["evals/%1$s/runs/%2$s", eval_id, run_id], + model: OpenAI::Models::Evals::RunCancelResponse, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @output_items = OpenAI::Resources::Evals::Runs::OutputItems.new(client: client) + end + end + end + end +end diff --git a/lib/openai/resources/evals/runs/output_items.rb b/lib/openai/resources/evals/runs/output_items.rb new file mode 100644 index 00000000..eb6914ba --- /dev/null +++ b/lib/openai/resources/evals/runs/output_items.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Evals + class Runs + class OutputItems + # Get an evaluation run output item by ID. + # + # @overload retrieve(output_item_id, eval_id:, run_id:, request_options: {}) + # + # @param output_item_id [String] + # @param eval_id [String] + # @param run_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse] + # + # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveParams + def retrieve(output_item_id, params) + parsed, options = OpenAI::Models::Evals::Runs::OutputItemRetrieveParams.dump_request(params) + eval_id = + parsed.delete(:eval_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + run_id = + parsed.delete(:run_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["evals/%1$s/runs/%2$s/output_items/%3$s", eval_id, run_id, output_item_id], + model: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse, + options: options + ) + end + + # Get a list of output items for an evaluation run. + # + # @overload list(run_id, eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # + # @param run_id [String] + # @param eval_id [String] + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::Evals::Runs::OutputItemListParams + def list(run_id, params) + parsed, options = OpenAI::Models::Evals::Runs::OutputItemListParams.dump_request(params) + eval_id = + parsed.delete(:eval_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["evals/%1$s/runs/%2$s/output_items", eval_id, run_id], + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::Evals::Runs::OutputItemListResponse, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end + end +end diff --git a/lib/openai/resources/fine_tuning.rb b/lib/openai/resources/fine_tuning.rb index c2f263c6..051803a7 100644 --- a/lib/openai/resources/fine_tuning.rb +++ b/lib/openai/resources/fine_tuning.rb @@ -6,12 +6,16 @@ class FineTuning # @return [OpenAI::Resources::FineTuning::Jobs] attr_reader :jobs + # @return [OpenAI::Resources::FineTuning::Checkpoints] + attr_reader :checkpoints + # @api private # # @param client [OpenAI::Client] def initialize(client:) @client = client @jobs = OpenAI::Resources::FineTuning::Jobs.new(client: client) + @checkpoints = OpenAI::Resources::FineTuning::Checkpoints.new(client: client) end end end diff --git a/lib/openai/resources/fine_tuning/checkpoints.rb b/lib/openai/resources/fine_tuning/checkpoints.rb new file mode 100644 index 00000000..617521cc --- /dev/null +++ b/lib/openai/resources/fine_tuning/checkpoints.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class FineTuning + class Checkpoints + # @return [OpenAI::Resources::FineTuning::Checkpoints::Permissions] + attr_reader :permissions + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @permissions = OpenAI::Resources::FineTuning::Checkpoints::Permissions.new(client: client) + end + end + end + end +end diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb new file mode 100644 index 00000000..47a1d9b0 --- /dev/null +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -0,0 +1,94 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class FineTuning + class Checkpoints + class Permissions + # **NOTE:** Calling this endpoint requires an [admin API key](../admin-api-keys). + # + # This enables organization owners to share fine-tuned models with other projects + # in their organization. + # + # @overload create(fine_tuned_model_checkpoint, project_ids:, request_options: {}) + # + # @param fine_tuned_model_checkpoint [String] + # @param project_ids [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::Page] + # + # @see OpenAI::Models::FineTuning::Checkpoints::PermissionCreateParams + def create(fine_tuned_model_checkpoint, params) + parsed, options = OpenAI::Models::FineTuning::Checkpoints::PermissionCreateParams.dump_request(params) + @client.request( + method: :post, + path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], + body: parsed, + page: OpenAI::Internal::Page, + model: OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse, + options: options + ) + end + + # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). + # + # Organization owners can use this endpoint to view all permissions for a + # fine-tuned model checkpoint. + # + # @overload retrieve(fine_tuned_model_checkpoint, after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) + # + # @param fine_tuned_model_checkpoint [String] + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] + # @param project_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse] + # + # @see OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams + def retrieve(fine_tuned_model_checkpoint, params = {}) + parsed, options = OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams.dump_request(params) + @client.request( + method: :get, + path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], + query: parsed, + model: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse, + options: options + ) + end + + # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). + # + # Organization owners can use this endpoint to delete a permission for a + # fine-tuned model checkpoint. + # + # @overload delete(fine_tuned_model_checkpoint, request_options: {}) + # + # @param fine_tuned_model_checkpoint [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse] + # + # @see OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteParams + def delete(fine_tuned_model_checkpoint, params = {}) + @client.request( + method: :delete, + path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], + model: OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse, + options: params[:request_options] + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end + end +end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index e77901b4..bb369eb8 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -6,7 +6,7 @@ class Responses # @return [OpenAI::Resources::Responses::InputItems] attr_reader :input_items - # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Responses#create_streaming} for streaming counterpart. # # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or @@ -23,7 +23,7 @@ class Responses # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] # @param include [Array, nil] # @param instructions [String, nil] # @param max_output_tokens [Integer, nil] @@ -47,7 +47,7 @@ class Responses def create(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -73,10 +73,10 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create_streaming(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] # @param include [Array, nil] # @param instructions [String, nil] # @param max_output_tokens [Integer, nil] @@ -97,7 +97,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams - def stream_raw(params) + def create_streaming(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 8578bd99..3886f208 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -61,6 +61,9 @@ module OpenAI sig { returns(OpenAI::Resources::Responses) } attr_reader :responses + sig { returns(OpenAI::Resources::Evals) } + attr_reader :evals + # @api private sig { override.returns(T::Hash[String, String]) } private def auth_headers; end diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index aaded68e..271f8306 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -5,17 +5,32 @@ module OpenAI module AllModels extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::OrSymbol]) } - def self.variants; end + module ResponsesOnlyModel + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AllModels::TaggedSymbol) } + O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + O1_PRO_2025_03_19 = + T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + COMPUTER_USE_PREVIEW = + T.let(:"computer-use-preview", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) - O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::TaggedSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::TaggedSymbol) - COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::AllModels::TaggedSymbol) - COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::TaggedSymbol) + sig { override.returns(T::Array[OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol]) } + def self.values; end + end + + sig do + override + .returns( + [String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::ResponsesOnlyModel::OrSymbol] + ) + end + def self.variants; end end end end diff --git a/rbi/lib/openai/models/eval_create_params.rbi b/rbi/lib/openai/models/eval_create_params.rbi new file mode 100644 index 00000000..c35b54fa --- /dev/null +++ b/rbi/lib/openai/models/eval_create_params.rbi @@ -0,0 +1,688 @@ +# typed: strong + +module OpenAI + module Models + class EvalCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # The configuration for the data source used for the evaluation runs. + sig do + returns( + T.any( + OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + ) + ) + end + attr_accessor :data_source_config + + # A list of graders for all eval runs in this group. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + ) + end + attr_accessor :testing_criteria + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The name of the evaluation. + sig { returns(T.nilable(String)) } + attr_reader :name + + sig { params(name: String).void } + attr_writer :name + + # Indicates whether the evaluation is shared with OpenAI. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :share_with_openai + + sig { params(share_with_openai: T::Boolean).void } + attr_writer :share_with_openai + + sig do + params( + data_source_config: T.any( + OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + ), + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new( + data_source_config:, + testing_criteria:, + metadata: nil, + name: nil, + share_with_openai: nil, + request_options: {} + ); end + sig do + override + .returns( + { + data_source_config: T.any( + OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + ), + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + + # The configuration for the data source used for the evaluation runs. + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + class Custom < OpenAI::Internal::Type::BaseModel + # The json schema for the run data source items. + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item_schema + + # The type of data source. Always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # Whether to include the sample schema in the data source. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :include_sample_schema + + sig { params(include_sample_schema: T::Boolean).void } + attr_writer :include_sample_schema + + # A CustomDataSourceConfig object that defines the schema for the data source used + # for the evaluation runs. This schema is used to define the shape of the data + # that will be: + # + # - Used to define your testing criteria and + # - What data is required when creating a run + sig do + params(item_schema: T::Hash[Symbol, T.anything], include_sample_schema: T::Boolean, type: Symbol) + .returns(T.attached_class) + end + def self.new(item_schema:, include_sample_schema: nil, type: :custom); end + + sig do + override + .returns({ + item_schema: T::Hash[Symbol, T.anything], + type: Symbol, + include_sample_schema: T::Boolean + }) + end + def to_hash; end + end + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + # The type of data source. Always `stored_completions`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A data source config which specifies the metadata property of your stored + # completions query. This is usually metadata like `usecase=chatbot` or + # `prompt-version=v2`, etc. + sig { params(metadata: T.nilable(T::Hash[Symbol, String]), type: Symbol).returns(T.attached_class) } + def self.new(metadata: nil, type: :stored_completions); end + + sig { override.returns({type: Symbol, metadata: T.nilable(T::Hash[Symbol, String])}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + ) + end + def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + class LabelModel < OpenAI::Internal::Type::BaseModel + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + ) + ] + ) + end + attr_accessor :input + + # The labels to classify to each item in the evaluation. + sig { returns(T::Array[String]) } + attr_accessor :labels + + # The model to use for the evaluation. Must support structured outputs. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The labels that indicate a passing result. Must be a subset of labels. + sig { returns(T::Array[String]) } + attr_accessor :passing_labels + + # The object type, which is always `label_model`. + sig { returns(Symbol) } + attr_accessor :type + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + sig do + params( + input: T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + ) + ], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(input:, labels:, model:, name:, passing_labels:, type: :label_model); end + + sig do + override + .returns( + { + input: T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + ) + ], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + } + ) + end + def to_hash; end + + module Input + extend OpenAI::Internal::Type::Union + + class SimpleInputMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class InputMessage < OpenAI::Internal::Type::BaseModel + sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content) } + attr_reader :content + + sig do + params( + content: T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :content + + # The role of the message. One of `user`, `system`, or `developer`. + sig do + returns( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of item, which is always `message`. + sig do + returns( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + content: T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, + OpenAI::Internal::AnyHash + ), + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type:); end + + sig do + override + .returns( + { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::OrSymbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + # The text content. + sig { returns(String) } + attr_accessor :text + + # The type of content, which is always `input_text`. + sig do + returns( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(text:, type:); end + + sig do + override + .returns( + { + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::OrSymbol + } + ) + end + def to_hash; end + + # The type of content, which is always `input_text`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol + ) + end + + INPUT_TEXT = + T.let( + :input_text, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + # The role of the message. One of `user`, `system`, or `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + ) + end + + USER = + T.let( + :user, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of item, which is always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol + ) + end + + MESSAGE = + T.let( + :message, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol] + ) + end + def self.values; end + end + end + + class OutputMessage < OpenAI::Internal::Type::BaseModel + sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content) } + attr_reader :content + + sig do + params( + content: T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :content + + # The role of the message. Must be `assistant` for output. + sig do + returns( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of item, which is always `message`. + sig do + returns( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + content: T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, + OpenAI::Internal::AnyHash + ), + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type:); end + + sig do + override + .returns( + { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::OrSymbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + # The text content. + sig { returns(String) } + attr_accessor :text + + # The type of content, which is always `output_text`. + sig do + returns( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(text:, type:); end + + sig do + override + .returns( + { + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::OrSymbol + } + ) + end + def to_hash; end + + # The type of content, which is always `output_text`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol + ) + end + + OUTPUT_TEXT = + T.let( + :output_text, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + # The role of the message. Must be `assistant` for output. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol + ) + end + + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of item, which is always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol + ) + end + + MESSAGE = + T.let( + :message, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage] + ) + end + def self.variants; end + end + end + + sig do + override + .returns( + [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + ) + end + def self.variants; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_create_response.rbi b/rbi/lib/openai/models/eval_create_response.rbi new file mode 100644 index 00000000..c35f9e03 --- /dev/null +++ b/rbi/lib/openai/models/eval_create_response.rbi @@ -0,0 +1,148 @@ +# typed: strong + +module OpenAI + module Models + class EvalCreateResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the eval was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Configuration of data sources used in runs of the evaluation. + sig do + returns( + T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + ) + end + attr_accessor :data_source_config + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The name of the evaluation. + sig { returns(String) } + attr_accessor :name + + # The object type. + sig { returns(Symbol) } + attr_accessor :object + + # Indicates whether the evaluation is shared with OpenAI. + sig { returns(T::Boolean) } + attr_accessor :share_with_openai + + # A list of testing criteria. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + ) + end + attr_accessor :testing_criteria + + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + sig do + params( + id: String, + created_at: Integer, + data_source_config: T.any( + OpenAI::Models::EvalCustomDataSourceConfig, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source_config:, + metadata:, + name:, + share_with_openai:, + testing_criteria:, + object: :eval + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + } + ) + end + def to_hash; end + + # Configuration of data sources used in runs of the evaluation. + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + ) + end + def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + ) + end + def self.variants; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_custom_data_source_config.rbi b/rbi/lib/openai/models/eval_custom_data_source_config.rbi new file mode 100644 index 00000000..c8d1dcea --- /dev/null +++ b/rbi/lib/openai/models/eval_custom_data_source_config.rbi @@ -0,0 +1,28 @@ +# typed: strong + +module OpenAI + module Models + class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # A CustomDataSourceConfig which specifies the schema of your `item` and + # optionally `sample` namespaces. The response schema defines the shape of the + # data that will be: + # + # - Used to define your testing criteria and + # - What data is required when creating a run + sig { params(schema: T::Hash[Symbol, T.anything], type: Symbol).returns(T.attached_class) } + def self.new(schema:, type: :custom); end + + sig { override.returns({schema: T::Hash[Symbol, T.anything], type: Symbol}) } + def to_hash; end + end + end +end diff --git a/rbi/lib/openai/models/eval_delete_params.rbi b/rbi/lib/openai/models/eval_delete_params.rbi new file mode 100644 index 00000000..14cbeaac --- /dev/null +++ b/rbi/lib/openai/models/eval_delete_params.rbi @@ -0,0 +1,23 @@ +# typed: strong + +module OpenAI + module Models + class EvalDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig do + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) + end + def self.new(request_options: {}); end + + sig { override.returns({request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end +end diff --git a/rbi/lib/openai/models/eval_delete_response.rbi b/rbi/lib/openai/models/eval_delete_response.rbi new file mode 100644 index 00000000..72e1edf4 --- /dev/null +++ b/rbi/lib/openai/models/eval_delete_response.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Models + class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel + sig { returns(T::Boolean) } + attr_accessor :deleted + + sig { returns(String) } + attr_accessor :eval_id + + sig { returns(String) } + attr_accessor :object + + sig { params(deleted: T::Boolean, eval_id: String, object: String).returns(T.attached_class) } + def self.new(deleted:, eval_id:, object:); end + + sig { override.returns({deleted: T::Boolean, eval_id: String, object: String}) } + def to_hash; end + end + end +end diff --git a/rbi/lib/openai/models/eval_label_model_grader.rbi b/rbi/lib/openai/models/eval_label_model_grader.rbi new file mode 100644 index 00000000..af492766 --- /dev/null +++ b/rbi/lib/openai/models/eval_label_model_grader.rbi @@ -0,0 +1,325 @@ +# typed: strong + +module OpenAI + module Models + class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader::Input::Assistant, + OpenAI::Models::EvalLabelModelGrader::Input::InputMessage + ) + ] + ) + end + attr_accessor :input + + # The labels to assign to each item in the evaluation. + sig { returns(T::Array[String]) } + attr_accessor :labels + + # The model to use for the evaluation. Must support structured outputs. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The labels that indicate a passing result. Must be a subset of labels. + sig { returns(T::Array[String]) } + attr_accessor :passing_labels + + # The object type, which is always `label_model`. + sig { returns(Symbol) } + attr_accessor :type + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + sig do + params( + input: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader::Input::Assistant, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalLabelModelGrader::Input::InputMessage + ) + ], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(input:, labels:, model:, name:, passing_labels:, type: :label_model); end + + sig do + override + .returns( + { + input: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader::Input::Assistant, + OpenAI::Models::EvalLabelModelGrader::Input::InputMessage + ) + ], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + } + ) + end + def to_hash; end + + # An item can either be an input message or an output message. + module Input + extend OpenAI::Internal::Type::Union + + class InputMessage < OpenAI::Internal::Type::BaseModel + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content) } + attr_reader :content + + sig do + params( + content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :content + + # The role of the message. One of `user`, `system`, or `developer`. + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) } + attr_accessor :role + + # The type of item, which is always `message`. + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) } + attr_accessor :type + + sig do + params( + content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, OpenAI::Internal::AnyHash), + role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::OrSymbol, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type:); end + + sig do + override + .returns( + { + content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, + role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + # The text content. + sig { returns(String) } + attr_accessor :text + + # The type of content, which is always `input_text`. + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol) } + attr_accessor :type + + sig do + params( + text: String, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(text:, type:); end + + sig do + override + .returns( + {text: String, type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol} + ) + end + def to_hash; end + + # The type of content, which is always `input_text`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol + ) + end + + INPUT_TEXT = + T.let(:input_text, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol) + + sig do + override + .returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol]) + end + def self.values; end + end + end + + # The role of the message. One of `user`, `system`, or `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) } + + USER = T.let(:user, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) + DEVELOPER = + T.let(:developer, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol]) } + def self.values; end + end + + # The type of item, which is always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) } + + MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol]) } + def self.values; end + end + end + + class Assistant < OpenAI::Internal::Type::BaseModel + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content) } + attr_reader :content + + sig do + params( + content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :content + + # The role of the message. Must be `assistant` for output. + sig { returns(Symbol) } + attr_accessor :role + + # The type of item, which is always `message`. + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) } + attr_accessor :type + + sig do + params( + content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, OpenAI::Internal::AnyHash), + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::OrSymbol, + role: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type:, role: :assistant); end + + sig do + override + .returns( + { + content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, + role: Symbol, + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + # The text content. + sig { returns(String) } + attr_accessor :text + + # The type of content, which is always `output_text`. + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) } + attr_accessor :type + + sig do + params( + text: String, + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(text:, type:); end + + sig do + override + .returns( + {text: String, type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol} + ) + end + def to_hash; end + + # The type of content, which is always `output_text`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) } + + OUTPUT_TEXT = + T.let(:output_text, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) + + sig do + override + .returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol]) + end + def self.values; end + end + end + + # The type of item, which is always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) } + + MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol]) } + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage] + ) + end + def self.variants; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_list_params.rbi b/rbi/lib/openai/models/eval_list_params.rbi new file mode 100644 index 00000000..bb813db2 --- /dev/null +++ b/rbi/lib/openai/models/eval_list_params.rbi @@ -0,0 +1,96 @@ +# typed: strong + +module OpenAI + module Models + class EvalListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # Identifier for the last eval from the previous pagination request. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # Number of evals to retrieve. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for + # descending order. + sig { returns(T.nilable(OpenAI::Models::EvalListParams::Order::OrSymbol)) } + attr_reader :order + + sig { params(order: OpenAI::Models::EvalListParams::Order::OrSymbol).void } + attr_writer :order + + # Evals can be ordered by creation time or last updated time. Use `created_at` for + # creation time or `updated_at` for last updated time. + sig { returns(T.nilable(OpenAI::Models::EvalListParams::OrderBy::OrSymbol)) } + attr_reader :order_by + + sig { params(order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol).void } + attr_writer :order_by + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::Models::EvalListParams::Order::OrSymbol, + order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}); end + + sig do + override + .returns( + { + after: String, + limit: Integer, + order: OpenAI::Models::EvalListParams::Order::OrSymbol, + order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + + # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for + # descending order. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalListParams::Order::TaggedSymbol) } + + ASC = T.let(:asc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalListParams::Order::TaggedSymbol]) } + def self.values; end + end + + # Evals can be ordered by creation time or last updated time. Use `created_at` for + # creation time or `updated_at` for last updated time. + module OrderBy + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::OrderBy) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) } + + CREATED_AT = T.let(:created_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) + UPDATED_AT = T.let(:updated_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol]) } + def self.values; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_list_response.rbi b/rbi/lib/openai/models/eval_list_response.rbi new file mode 100644 index 00000000..7ea59a6c --- /dev/null +++ b/rbi/lib/openai/models/eval_list_response.rbi @@ -0,0 +1,148 @@ +# typed: strong + +module OpenAI + module Models + class EvalListResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the eval was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Configuration of data sources used in runs of the evaluation. + sig do + returns( + T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + ) + end + attr_accessor :data_source_config + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The name of the evaluation. + sig { returns(String) } + attr_accessor :name + + # The object type. + sig { returns(Symbol) } + attr_accessor :object + + # Indicates whether the evaluation is shared with OpenAI. + sig { returns(T::Boolean) } + attr_accessor :share_with_openai + + # A list of testing criteria. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + ) + end + attr_accessor :testing_criteria + + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + sig do + params( + id: String, + created_at: Integer, + data_source_config: T.any( + OpenAI::Models::EvalCustomDataSourceConfig, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source_config:, + metadata:, + name:, + share_with_openai:, + testing_criteria:, + object: :eval + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + } + ) + end + def to_hash; end + + # Configuration of data sources used in runs of the evaluation. + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + ) + end + def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + ) + end + def self.variants; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_retrieve_params.rbi b/rbi/lib/openai/models/eval_retrieve_params.rbi new file mode 100644 index 00000000..33cfd174 --- /dev/null +++ b/rbi/lib/openai/models/eval_retrieve_params.rbi @@ -0,0 +1,23 @@ +# typed: strong + +module OpenAI + module Models + class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig do + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) + end + def self.new(request_options: {}); end + + sig { override.returns({request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end +end diff --git a/rbi/lib/openai/models/eval_retrieve_response.rbi b/rbi/lib/openai/models/eval_retrieve_response.rbi new file mode 100644 index 00000000..721a617e --- /dev/null +++ b/rbi/lib/openai/models/eval_retrieve_response.rbi @@ -0,0 +1,148 @@ +# typed: strong + +module OpenAI + module Models + class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the eval was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Configuration of data sources used in runs of the evaluation. + sig do + returns( + T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + ) + end + attr_accessor :data_source_config + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The name of the evaluation. + sig { returns(String) } + attr_accessor :name + + # The object type. + sig { returns(Symbol) } + attr_accessor :object + + # Indicates whether the evaluation is shared with OpenAI. + sig { returns(T::Boolean) } + attr_accessor :share_with_openai + + # A list of testing criteria. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + ) + end + attr_accessor :testing_criteria + + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + sig do + params( + id: String, + created_at: Integer, + data_source_config: T.any( + OpenAI::Models::EvalCustomDataSourceConfig, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source_config:, + metadata:, + name:, + share_with_openai:, + testing_criteria:, + object: :eval + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + } + ) + end + def to_hash; end + + # Configuration of data sources used in runs of the evaluation. + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + ) + end + def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + ) + end + def self.variants; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi new file mode 100644 index 00000000..cfc531e0 --- /dev/null +++ b/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi @@ -0,0 +1,50 @@ +# typed: strong + +module OpenAI + module Models + class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `stored_completions`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A StoredCompletionsDataSourceConfig which specifies the metadata property of + # your stored completions query. This is usually metadata like `usecase=chatbot` + # or `prompt-version=v2`, etc. The schema returned by this data source config is + # used to defined what variables are available in your evals. `item` and `sample` + # are both defined when using this data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(schema:, metadata: nil, type: :stored_completions); end + + sig do + override + .returns({ + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + }) + end + def to_hash; end + end + end +end diff --git a/rbi/lib/openai/models/eval_string_check_grader.rbi b/rbi/lib/openai/models/eval_string_check_grader.rbi new file mode 100644 index 00000000..6a7e26c6 --- /dev/null +++ b/rbi/lib/openai/models/eval_string_check_grader.rbi @@ -0,0 +1,72 @@ +# typed: strong + +module OpenAI + module Models + class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel + # The input text. This may include template strings. + sig { returns(String) } + attr_accessor :input + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + sig { returns(OpenAI::Models::EvalStringCheckGrader::Operation::OrSymbol) } + attr_accessor :operation + + # The reference text. This may include template strings. + sig { returns(String) } + attr_accessor :reference + + # The object type, which is always `string_check`. + sig { returns(Symbol) } + attr_accessor :type + + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + sig do + params( + input: String, + name: String, + operation: OpenAI::Models::EvalStringCheckGrader::Operation::OrSymbol, + reference: String, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(input:, name:, operation:, reference:, type: :string_check); end + + sig do + override + .returns( + { + input: String, + name: String, + operation: OpenAI::Models::EvalStringCheckGrader::Operation::OrSymbol, + reference: String, + type: Symbol + } + ) + end + def to_hash; end + + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + module Operation + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalStringCheckGrader::Operation) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) } + + EQ = T.let(:eq, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) + NE = T.let(:ne, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) + LIKE = T.let(:like, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) + ILIKE = T.let(:ilike, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol]) } + def self.values; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_text_similarity_grader.rbi b/rbi/lib/openai/models/eval_text_similarity_grader.rbi new file mode 100644 index 00000000..13ac3bcc --- /dev/null +++ b/rbi/lib/openai/models/eval_text_similarity_grader.rbi @@ -0,0 +1,98 @@ +# typed: strong + +module OpenAI + module Models + class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + sig { returns(OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol) } + attr_accessor :evaluation_metric + + # The text being graded. + sig { returns(String) } + attr_accessor :input + + # A float score where a value greater than or equal indicates a passing grade. + sig { returns(Float) } + attr_accessor :pass_threshold + + # The text being graded against. + sig { returns(String) } + attr_accessor :reference + + # The type of grader. + sig { returns(Symbol) } + attr_accessor :type + + # The name of the grader. + sig { returns(T.nilable(String)) } + attr_reader :name + + sig { params(name: String).void } + attr_writer :name + + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig do + params( + evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, + input: String, + pass_threshold: Float, + reference: String, + name: String, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + evaluation_metric:, + input:, + pass_threshold:, + reference:, + name: nil, + type: :text_similarity + ) + end + + sig do + override + .returns( + { + evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, + input: String, + pass_threshold: Float, + reference: String, + type: Symbol, + name: String + } + ) + end + def to_hash; end + + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + module EvaluationMetric + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) } + + FUZZY_MATCH = + T.let(:fuzzy_match, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + BLEU = T.let(:bleu, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + GLEU = T.let(:gleu, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + METEOR = T.let(:meteor, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + ROUGE_1 = T.let(:rouge_1, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + ROUGE_2 = T.let(:rouge_2, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + ROUGE_3 = T.let(:rouge_3, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + ROUGE_4 = T.let(:rouge_4, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + ROUGE_5 = T.let(:rouge_5, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + ROUGE_L = T.let(:rouge_l, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + COSINE = T.let(:cosine, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol]) } + def self.values; end + end + end + end +end diff --git a/rbi/lib/openai/models/eval_update_params.rbi b/rbi/lib/openai/models/eval_update_params.rbi new file mode 100644 index 00000000..86ef04e1 --- /dev/null +++ b/rbi/lib/openai/models/eval_update_params.rbi @@ -0,0 +1,48 @@ +# typed: strong + +module OpenAI + module Models + class EvalUpdateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # Rename the evaluation. + sig { returns(T.nilable(String)) } + attr_reader :name + + sig { params(name: String).void } + attr_writer :name + + sig do + params( + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(metadata: nil, name: nil, request_options: {}); end + + sig do + override + .returns( + { + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + end + end +end diff --git a/rbi/lib/openai/models/eval_update_response.rbi b/rbi/lib/openai/models/eval_update_response.rbi new file mode 100644 index 00000000..0025fbad --- /dev/null +++ b/rbi/lib/openai/models/eval_update_response.rbi @@ -0,0 +1,148 @@ +# typed: strong + +module OpenAI + module Models + class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the eval was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Configuration of data sources used in runs of the evaluation. + sig do + returns( + T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + ) + end + attr_accessor :data_source_config + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The name of the evaluation. + sig { returns(String) } + attr_accessor :name + + # The object type. + sig { returns(Symbol) } + attr_accessor :object + + # Indicates whether the evaluation is shared with OpenAI. + sig { returns(T::Boolean) } + attr_accessor :share_with_openai + + # A list of testing criteria. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + ) + end + attr_accessor :testing_criteria + + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + sig do + params( + id: String, + created_at: Integer, + data_source_config: T.any( + OpenAI::Models::EvalCustomDataSourceConfig, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source_config:, + metadata:, + name:, + share_with_openai:, + testing_criteria:, + object: :eval + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + share_with_openai: T::Boolean, + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalLabelModelGrader, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ] + } + ) + end + def to_hash; end + + # Configuration of data sources used in runs of the evaluation. + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + ) + end + def self.variants; end + end + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + module TestingCriterion + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + ) + end + def self.variants; end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi new file mode 100644 index 00000000..b8977938 --- /dev/null +++ b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -0,0 +1,833 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ) + ) + end + attr_accessor :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(String) } + attr_accessor :model + + # A StoredCompletionsRunDataSource configuration describing a set of filters + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `completions`. + sig { returns(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol) } + attr_accessor :type + + sig { returns(T.nilable(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams)) } + attr_reader :sampling_params + + sig do + params( + sampling_params: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :sampling_params + + # A CompletionsRunDataSource object describing a model sampling configuration. + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ), + model: String, + source: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + ), + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, + sampling_params: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .returns(T.attached_class) + end + def self.new(input_messages:, model:, source:, type:, sampling_params: nil); end + + sig do + override + .returns( + { + input_messages: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ), + model: String, + source: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + ), + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, + sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + } + ) + end + def to_hash; end + + module InputMessages + extend OpenAI::Internal::Type::Union + + class Template < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: T::Array[ + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(template:, type: :template); end + + sig do + override + .returns( + { + template: T::Array[ + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + ) + ], + type: Symbol + } + ) + end + def to_hash; end + + module Template + extend OpenAI::Internal::Type::Union + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class InputMessage < OpenAI::Internal::Type::BaseModel + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content + ) + end + attr_reader :content + + sig do + params( + content: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :content + + # The role of the message. One of `user`, `system`, or `developer`. + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of item, which is always `message`. + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + content: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, + OpenAI::Internal::AnyHash + ), + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::OrSymbol, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type:); end + + sig do + override + .returns( + { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::OrSymbol, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::OrSymbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + # The text content. + sig { returns(String) } + attr_accessor :text + + # The type of content, which is always `input_text`. + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(text:, type:); end + + sig do + override + .returns( + { + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::OrSymbol + } + ) + end + def to_hash; end + + # The type of content, which is always `input_text`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol + ) + end + + INPUT_TEXT = + T.let( + :input_text, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + # The role of the message. One of `user`, `system`, or `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + ) + end + + USER = + T.let( + :user, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of item, which is always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol + ) + end + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + class OutputMessage < OpenAI::Internal::Type::BaseModel + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content + ) + end + attr_reader :content + + sig do + params( + content: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :content + + # The role of the message. Must be `assistant` for output. + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of item, which is always `message`. + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + content: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, + OpenAI::Internal::AnyHash + ), + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::OrSymbol, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type:); end + + sig do + override + .returns( + { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::OrSymbol, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::OrSymbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + # The text content. + sig { returns(String) } + attr_accessor :text + + # The type of content, which is always `output_text`. + sig do + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + params( + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(text:, type:); end + + sig do + override + .returns( + { + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::OrSymbol + } + ) + end + def to_hash; end + + # The type of content, which is always `output_text`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol + ) + end + + OUTPUT_TEXT = + T.let( + :output_text, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + # The role of the message. Must be `assistant` for output. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol + ) + end + + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of item, which is always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type) + end + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol + ) + end + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage] + ) + end + def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + def self.new(item_reference:, type: :item_reference); end + + sig { override.returns({item_reference: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + ) + end + def self.variants; end + end + + # A StoredCompletionsRunDataSource configuration describing a set of filters + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig { returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content]) } + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + # An optional Unix timestamp to filter items created after this time. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # An optional Unix timestamp to filter items created before this time. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # An optional maximum number of items to return. + sig { returns(T.nilable(Integer)) } + attr_accessor :limit + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # An optional model to filter by (e.g., 'gpt-4o'). + sig { returns(T.nilable(String)) } + attr_accessor :model + + # The type of source. Always `stored_completions`. + sig { returns(Symbol) } + attr_accessor :type + + # A StoredCompletionsRunDataSource configuration describing a set of filters + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + limit: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(String), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + created_after:, + created_before:, + limit:, + metadata:, + model:, + type: :stored_completions + ) + end + + sig do + override + .returns( + { + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + limit: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(String), + type: Symbol + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + ) + end + def self.variants; end + end + + # The type of run data source. Always `completions`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) } + + COMPLETIONS = + T.let(:completions, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol]) } + def self.values; end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) + .returns(T.attached_class) + end + def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi new file mode 100644 index 00000000..12b3a3b6 --- /dev/null +++ b/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -0,0 +1,139 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + ) + ) + end + attr_accessor :source + + # The type of data source. Always `jsonl`. + sig { returns(Symbol) } + attr_accessor :type + + # A JsonlRunDataSource object with that specifies a JSONL file that matches the + # eval + sig do + params( + source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + ), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(source:, type: :jsonl); end + + sig do + override + .returns( + { + source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + ), + type: Symbol + } + ) + end + def to_hash; end + + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig { returns(T::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content]) } + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + ) + end + def self.variants; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/eval_api_error.rbi b/rbi/lib/openai/models/evals/eval_api_error.rbi new file mode 100644 index 00000000..cc6ac0e5 --- /dev/null +++ b/rbi/lib/openai/models/evals/eval_api_error.rbi @@ -0,0 +1,26 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class EvalAPIError < OpenAI::Internal::Type::BaseModel + # The error code. + sig { returns(String) } + attr_accessor :code + + # The error message. + sig { returns(String) } + attr_accessor :message + + # An object representing an error response from the Eval API. + sig { params(code: String, message: String).returns(T.attached_class) } + def self.new(code:, message:); end + + sig { override.returns({code: String, message: String}) } + def to_hash; end + end + end + + EvalAPIError = Evals::EvalAPIError + end +end diff --git a/rbi/lib/openai/models/evals/run_cancel_params.rbi b/rbi/lib/openai/models/evals/run_cancel_params.rbi new file mode 100644 index 00000000..4d5aae07 --- /dev/null +++ b/rbi/lib/openai/models/evals/run_cancel_params.rbi @@ -0,0 +1,24 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig { returns(String) } + attr_accessor :eval_id + + sig do + params(eval_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + .returns(T.attached_class) + end + def self.new(eval_id:, request_options: {}); end + + sig { override.returns({eval_id: String, request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_cancel_response.rbi b/rbi/lib/openai/models/evals/run_cancel_response.rbi new file mode 100644 index 00000000..bc2ac3bd --- /dev/null +++ b/rbi/lib/openai/models/evals/run_cancel_response.rbi @@ -0,0 +1,281 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunCancelResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation run. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the evaluation run was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Information about the run's data source. + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ) + ) + end + attr_accessor :data_source + + # An object representing an error response from the Eval API. + sig { returns(OpenAI::Models::Evals::EvalAPIError) } + attr_reader :error + + sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + attr_writer :error + + # The identifier of the associated evaluation. + sig { returns(String) } + attr_accessor :eval_id + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The model that is evaluated, if applicable. + sig { returns(String) } + attr_accessor :model + + # The name of the evaluation run. + sig { returns(String) } + attr_accessor :name + + # The type of the object. Always "eval.run". + sig { returns(Symbol) } + attr_accessor :object + + # Usage statistics for each model during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage]) } + attr_accessor :per_model_usage + + # Results per testing criteria applied during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult]) } + attr_accessor :per_testing_criteria_results + + # The URL to the rendered evaluation run report on the UI dashboard. + sig { returns(String) } + attr_accessor :report_url + + # Counters summarizing the outcomes of the evaluation run. + sig { returns(OpenAI::Models::Evals::RunCancelResponse::ResultCounts) } + attr_reader :result_counts + + sig do + params( + result_counts: T.any(OpenAI::Models::Evals::RunCancelResponse::ResultCounts, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :result_counts + + # The status of the evaluation run. + sig { returns(String) } + attr_accessor :status + + # A schema representing an evaluation run. + sig do + params( + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunCancelResponse::PerModelUsage, OpenAI::Internal::AnyHash)], + per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + report_url: String, + result_counts: T.any(OpenAI::Models::Evals::RunCancelResponse::ResultCounts, OpenAI::Internal::AnyHash), + status: String, + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source:, + error:, + eval_id:, + metadata:, + model:, + name:, + per_model_usage:, + per_testing_criteria_results:, + report_url:, + result_counts:, + status:, + object: :"eval.run" + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: T::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage], + per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String + } + ) + end + def to_hash; end + + # Information about the run's data source. + module DataSource + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + ) + end + def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # The number of tokens retrieved from cache. + sig { returns(Integer) } + attr_accessor :cached_tokens + + # The number of completion tokens generated. + sig { returns(Integer) } + attr_accessor :completion_tokens + + # The number of invocations. + sig { returns(Integer) } + attr_accessor :invocation_count + + # The name of the model. + sig { returns(String) } + attr_accessor :model_name + + # The number of prompt tokens used. + sig { returns(Integer) } + attr_accessor :prompt_tokens + + # The total number of tokens used. + sig { returns(Integer) } + attr_accessor :total_tokens + + sig do + params( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new( + cached_tokens:, + completion_tokens:, + invocation_count:, + model_name:, + prompt_tokens:, + total_tokens: + ) + end + + sig do + override + .returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # Number of tests failed for this criteria. + sig { returns(Integer) } + attr_accessor :failed + + # Number of tests passed for this criteria. + sig { returns(Integer) } + attr_accessor :passed + + # A description of the testing criteria. + sig { returns(String) } + attr_accessor :testing_criteria + + sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + def self.new(failed:, passed:, testing_criteria:); end + + sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } + def to_hash; end + end + + class ResultCounts < OpenAI::Internal::Type::BaseModel + # Number of output items that resulted in an error. + sig { returns(Integer) } + attr_accessor :errored + + # Number of output items that failed to pass the evaluation. + sig { returns(Integer) } + attr_accessor :failed + + # Number of output items that passed the evaluation. + sig { returns(Integer) } + attr_accessor :passed + + # Total number of executed output items. + sig { returns(Integer) } + attr_accessor :total + + # Counters summarizing the outcomes of the evaluation run. + sig do + params( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ).returns(T.attached_class) + end + def self.new(errored:, failed:, passed:, total:); end + + sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_create_params.rbi b/rbi/lib/openai/models/evals/run_create_params.rbi new file mode 100644 index 00000000..0f0fd8b0 --- /dev/null +++ b/rbi/lib/openai/models/evals/run_create_params.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # Details about the run's data source. + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ) + ) + end + attr_accessor :data_source + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The name of the run. + sig { returns(T.nilable(String)) } + attr_reader :name + + sig { params(name: String).void } + attr_writer :name + + sig do + params( + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(data_source:, metadata: nil, name: nil, request_options: {}); end + + sig do + override + .returns( + { + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + + # Details about the run's data source. + module DataSource + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + ) + end + def self.variants; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_create_response.rbi b/rbi/lib/openai/models/evals/run_create_response.rbi new file mode 100644 index 00000000..8999012b --- /dev/null +++ b/rbi/lib/openai/models/evals/run_create_response.rbi @@ -0,0 +1,281 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunCreateResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation run. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the evaluation run was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Information about the run's data source. + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ) + ) + end + attr_accessor :data_source + + # An object representing an error response from the Eval API. + sig { returns(OpenAI::Models::Evals::EvalAPIError) } + attr_reader :error + + sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + attr_writer :error + + # The identifier of the associated evaluation. + sig { returns(String) } + attr_accessor :eval_id + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The model that is evaluated, if applicable. + sig { returns(String) } + attr_accessor :model + + # The name of the evaluation run. + sig { returns(String) } + attr_accessor :name + + # The type of the object. Always "eval.run". + sig { returns(Symbol) } + attr_accessor :object + + # Usage statistics for each model during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage]) } + attr_accessor :per_model_usage + + # Results per testing criteria applied during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult]) } + attr_accessor :per_testing_criteria_results + + # The URL to the rendered evaluation run report on the UI dashboard. + sig { returns(String) } + attr_accessor :report_url + + # Counters summarizing the outcomes of the evaluation run. + sig { returns(OpenAI::Models::Evals::RunCreateResponse::ResultCounts) } + attr_reader :result_counts + + sig do + params( + result_counts: T.any(OpenAI::Models::Evals::RunCreateResponse::ResultCounts, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :result_counts + + # The status of the evaluation run. + sig { returns(String) } + attr_accessor :status + + # A schema representing an evaluation run. + sig do + params( + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunCreateResponse::PerModelUsage, OpenAI::Internal::AnyHash)], + per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + report_url: String, + result_counts: T.any(OpenAI::Models::Evals::RunCreateResponse::ResultCounts, OpenAI::Internal::AnyHash), + status: String, + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source:, + error:, + eval_id:, + metadata:, + model:, + name:, + per_model_usage:, + per_testing_criteria_results:, + report_url:, + result_counts:, + status:, + object: :"eval.run" + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: T::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage], + per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String + } + ) + end + def to_hash; end + + # Information about the run's data source. + module DataSource + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + ) + end + def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # The number of tokens retrieved from cache. + sig { returns(Integer) } + attr_accessor :cached_tokens + + # The number of completion tokens generated. + sig { returns(Integer) } + attr_accessor :completion_tokens + + # The number of invocations. + sig { returns(Integer) } + attr_accessor :invocation_count + + # The name of the model. + sig { returns(String) } + attr_accessor :model_name + + # The number of prompt tokens used. + sig { returns(Integer) } + attr_accessor :prompt_tokens + + # The total number of tokens used. + sig { returns(Integer) } + attr_accessor :total_tokens + + sig do + params( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new( + cached_tokens:, + completion_tokens:, + invocation_count:, + model_name:, + prompt_tokens:, + total_tokens: + ) + end + + sig do + override + .returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # Number of tests failed for this criteria. + sig { returns(Integer) } + attr_accessor :failed + + # Number of tests passed for this criteria. + sig { returns(Integer) } + attr_accessor :passed + + # A description of the testing criteria. + sig { returns(String) } + attr_accessor :testing_criteria + + sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + def self.new(failed:, passed:, testing_criteria:); end + + sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } + def to_hash; end + end + + class ResultCounts < OpenAI::Internal::Type::BaseModel + # Number of output items that resulted in an error. + sig { returns(Integer) } + attr_accessor :errored + + # Number of output items that failed to pass the evaluation. + sig { returns(Integer) } + attr_accessor :failed + + # Number of output items that passed the evaluation. + sig { returns(Integer) } + attr_accessor :passed + + # Total number of executed output items. + sig { returns(Integer) } + attr_accessor :total + + # Counters summarizing the outcomes of the evaluation run. + sig do + params( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ).returns(T.attached_class) + end + def self.new(errored:, failed:, passed:, total:); end + + sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_delete_params.rbi b/rbi/lib/openai/models/evals/run_delete_params.rbi new file mode 100644 index 00000000..6f4f786a --- /dev/null +++ b/rbi/lib/openai/models/evals/run_delete_params.rbi @@ -0,0 +1,24 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig { returns(String) } + attr_accessor :eval_id + + sig do + params(eval_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + .returns(T.attached_class) + end + def self.new(eval_id:, request_options: {}); end + + sig { override.returns({eval_id: String, request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_delete_response.rbi b/rbi/lib/openai/models/evals/run_delete_response.rbi new file mode 100644 index 00000000..2130f09b --- /dev/null +++ b/rbi/lib/openai/models/evals/run_delete_response.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunDeleteResponse < OpenAI::Internal::Type::BaseModel + sig { returns(T.nilable(T::Boolean)) } + attr_reader :deleted + + sig { params(deleted: T::Boolean).void } + attr_writer :deleted + + sig { returns(T.nilable(String)) } + attr_reader :object + + sig { params(object: String).void } + attr_writer :object + + sig { returns(T.nilable(String)) } + attr_reader :run_id + + sig { params(run_id: String).void } + attr_writer :run_id + + sig { params(deleted: T::Boolean, object: String, run_id: String).returns(T.attached_class) } + def self.new(deleted: nil, object: nil, run_id: nil); end + + sig { override.returns({deleted: T::Boolean, object: String, run_id: String}) } + def to_hash; end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_list_params.rbi b/rbi/lib/openai/models/evals/run_list_params.rbi new file mode 100644 index 00000000..cd16d49f --- /dev/null +++ b/rbi/lib/openai/models/evals/run_list_params.rbi @@ -0,0 +1,103 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # Identifier for the last run from the previous pagination request. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # Number of runs to retrieve. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for + # descending order. Defaults to `asc`. + sig { returns(T.nilable(OpenAI::Models::Evals::RunListParams::Order::OrSymbol)) } + attr_reader :order + + sig { params(order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol).void } + attr_writer :order + + # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | + # "canceled". + sig { returns(T.nilable(OpenAI::Models::Evals::RunListParams::Status::OrSymbol)) } + attr_reader :status + + sig { params(status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol).void } + attr_writer :status + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, + status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(after: nil, limit: nil, order: nil, status: nil, request_options: {}); end + + sig do + override + .returns( + { + after: String, + limit: Integer, + order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, + status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + + # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for + # descending order. Defaults to `asc`. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) } + + ASC = T.let(:asc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol]) } + def self.values; end + end + + # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | + # "canceled". + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) } + + QUEUED = T.let(:queued, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) + CANCELED = T.let(:canceled, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol]) } + def self.values; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_list_response.rbi b/rbi/lib/openai/models/evals/run_list_response.rbi new file mode 100644 index 00000000..01e8f33b --- /dev/null +++ b/rbi/lib/openai/models/evals/run_list_response.rbi @@ -0,0 +1,281 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunListResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation run. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the evaluation run was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Information about the run's data source. + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ) + ) + end + attr_accessor :data_source + + # An object representing an error response from the Eval API. + sig { returns(OpenAI::Models::Evals::EvalAPIError) } + attr_reader :error + + sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + attr_writer :error + + # The identifier of the associated evaluation. + sig { returns(String) } + attr_accessor :eval_id + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The model that is evaluated, if applicable. + sig { returns(String) } + attr_accessor :model + + # The name of the evaluation run. + sig { returns(String) } + attr_accessor :name + + # The type of the object. Always "eval.run". + sig { returns(Symbol) } + attr_accessor :object + + # Usage statistics for each model during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage]) } + attr_accessor :per_model_usage + + # Results per testing criteria applied during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult]) } + attr_accessor :per_testing_criteria_results + + # The URL to the rendered evaluation run report on the UI dashboard. + sig { returns(String) } + attr_accessor :report_url + + # Counters summarizing the outcomes of the evaluation run. + sig { returns(OpenAI::Models::Evals::RunListResponse::ResultCounts) } + attr_reader :result_counts + + sig do + params( + result_counts: T.any(OpenAI::Models::Evals::RunListResponse::ResultCounts, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :result_counts + + # The status of the evaluation run. + sig { returns(String) } + attr_accessor :status + + # A schema representing an evaluation run. + sig do + params( + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunListResponse::PerModelUsage, OpenAI::Internal::AnyHash)], + per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + report_url: String, + result_counts: T.any(OpenAI::Models::Evals::RunListResponse::ResultCounts, OpenAI::Internal::AnyHash), + status: String, + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source:, + error:, + eval_id:, + metadata:, + model:, + name:, + per_model_usage:, + per_testing_criteria_results:, + report_url:, + result_counts:, + status:, + object: :"eval.run" + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: T::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], + per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String + } + ) + end + def to_hash; end + + # Information about the run's data source. + module DataSource + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + ) + end + def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # The number of tokens retrieved from cache. + sig { returns(Integer) } + attr_accessor :cached_tokens + + # The number of completion tokens generated. + sig { returns(Integer) } + attr_accessor :completion_tokens + + # The number of invocations. + sig { returns(Integer) } + attr_accessor :invocation_count + + # The name of the model. + sig { returns(String) } + attr_accessor :model_name + + # The number of prompt tokens used. + sig { returns(Integer) } + attr_accessor :prompt_tokens + + # The total number of tokens used. + sig { returns(Integer) } + attr_accessor :total_tokens + + sig do + params( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new( + cached_tokens:, + completion_tokens:, + invocation_count:, + model_name:, + prompt_tokens:, + total_tokens: + ) + end + + sig do + override + .returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # Number of tests failed for this criteria. + sig { returns(Integer) } + attr_accessor :failed + + # Number of tests passed for this criteria. + sig { returns(Integer) } + attr_accessor :passed + + # A description of the testing criteria. + sig { returns(String) } + attr_accessor :testing_criteria + + sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + def self.new(failed:, passed:, testing_criteria:); end + + sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } + def to_hash; end + end + + class ResultCounts < OpenAI::Internal::Type::BaseModel + # Number of output items that resulted in an error. + sig { returns(Integer) } + attr_accessor :errored + + # Number of output items that failed to pass the evaluation. + sig { returns(Integer) } + attr_accessor :failed + + # Number of output items that passed the evaluation. + sig { returns(Integer) } + attr_accessor :passed + + # Total number of executed output items. + sig { returns(Integer) } + attr_accessor :total + + # Counters summarizing the outcomes of the evaluation run. + sig do + params( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ).returns(T.attached_class) + end + def self.new(errored:, failed:, passed:, total:); end + + sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_retrieve_params.rbi b/rbi/lib/openai/models/evals/run_retrieve_params.rbi new file mode 100644 index 00000000..7dd9ed20 --- /dev/null +++ b/rbi/lib/openai/models/evals/run_retrieve_params.rbi @@ -0,0 +1,24 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig { returns(String) } + attr_accessor :eval_id + + sig do + params(eval_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + .returns(T.attached_class) + end + def self.new(eval_id:, request_options: {}); end + + sig { override.returns({eval_id: String, request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/run_retrieve_response.rbi b/rbi/lib/openai/models/evals/run_retrieve_response.rbi new file mode 100644 index 00000000..85db5714 --- /dev/null +++ b/rbi/lib/openai/models/evals/run_retrieve_response.rbi @@ -0,0 +1,281 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation run. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the evaluation run was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Information about the run's data source. + sig do + returns( + T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ) + ) + end + attr_accessor :data_source + + # An object representing an error response from the Eval API. + sig { returns(OpenAI::Models::Evals::EvalAPIError) } + attr_reader :error + + sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + attr_writer :error + + # The identifier of the associated evaluation. + sig { returns(String) } + attr_accessor :eval_id + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # The model that is evaluated, if applicable. + sig { returns(String) } + attr_accessor :model + + # The name of the evaluation run. + sig { returns(String) } + attr_accessor :name + + # The type of the object. Always "eval.run". + sig { returns(Symbol) } + attr_accessor :object + + # Usage statistics for each model during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage]) } + attr_accessor :per_model_usage + + # Results per testing criteria applied during the evaluation run. + sig { returns(T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult]) } + attr_accessor :per_testing_criteria_results + + # The URL to the rendered evaluation run report on the UI dashboard. + sig { returns(String) } + attr_accessor :report_url + + # Counters summarizing the outcomes of the evaluation run. + sig { returns(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts) } + attr_reader :result_counts + + sig do + params( + result_counts: T.any(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :result_counts + + # The status of the evaluation run. + sig { returns(String) } + attr_accessor :status + + # A schema representing an evaluation run. + sig do + params( + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage, OpenAI::Internal::AnyHash)], + per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + report_url: String, + result_counts: T.any(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, OpenAI::Internal::AnyHash), + status: String, + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + data_source:, + error:, + eval_id:, + metadata:, + model:, + name:, + per_model_usage:, + per_testing_criteria_results:, + report_url:, + result_counts:, + status:, + object: :"eval.run" + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage], + per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String + } + ) + end + def to_hash; end + + # Information about the run's data source. + module DataSource + extend OpenAI::Internal::Type::Union + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + ) + end + def self.variants; end + end + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + # The number of tokens retrieved from cache. + sig { returns(Integer) } + attr_accessor :cached_tokens + + # The number of completion tokens generated. + sig { returns(Integer) } + attr_accessor :completion_tokens + + # The number of invocations. + sig { returns(Integer) } + attr_accessor :invocation_count + + # The name of the model. + sig { returns(String) } + attr_accessor :model_name + + # The number of prompt tokens used. + sig { returns(Integer) } + attr_accessor :prompt_tokens + + # The total number of tokens used. + sig { returns(Integer) } + attr_accessor :total_tokens + + sig do + params( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new( + cached_tokens:, + completion_tokens:, + invocation_count:, + model_name:, + prompt_tokens:, + total_tokens: + ) + end + + sig do + override + .returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + end + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + # Number of tests failed for this criteria. + sig { returns(Integer) } + attr_accessor :failed + + # Number of tests passed for this criteria. + sig { returns(Integer) } + attr_accessor :passed + + # A description of the testing criteria. + sig { returns(String) } + attr_accessor :testing_criteria + + sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + def self.new(failed:, passed:, testing_criteria:); end + + sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } + def to_hash; end + end + + class ResultCounts < OpenAI::Internal::Type::BaseModel + # Number of output items that resulted in an error. + sig { returns(Integer) } + attr_accessor :errored + + # Number of output items that failed to pass the evaluation. + sig { returns(Integer) } + attr_accessor :failed + + # Number of output items that passed the evaluation. + sig { returns(Integer) } + attr_accessor :passed + + # Total number of executed output items. + sig { returns(Integer) } + attr_accessor :total + + # Counters summarizing the outcomes of the evaluation run. + sig do + params( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ).returns(T.attached_class) + end + def self.new(errored:, failed:, passed:, total:); end + + sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi new file mode 100644 index 00000000..cd6a0ed0 --- /dev/null +++ b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi @@ -0,0 +1,107 @@ +# typed: strong + +module OpenAI + module Models + module Evals + module Runs + class OutputItemListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig { returns(String) } + attr_accessor :eval_id + + # Identifier for the last output item from the previous pagination request. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # Number of output items to retrieve. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order for output items by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + sig { returns(T.nilable(OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol)) } + attr_reader :order + + sig { params(order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol).void } + attr_writer :order + + # Filter output items by status. Use `failed` to filter by failed output items or + # `pass` to filter by passed output items. + sig { returns(T.nilable(OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol)) } + attr_reader :status + + sig { params(status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol).void } + attr_writer :status + + sig do + params( + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, + status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}); end + + sig do + override + .returns( + { + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, + status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + + # Sort order for output items by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) } + + ASC = T.let(:asc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol]) } + def self.values; end + end + + # Filter output items by status. Use `failed` to filter by failed output items or + # `pass` to filter by passed output items. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) } + + FAIL = T.let(:fail, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) + PASS = T.let(:pass, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol]) } + def self.values; end + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi b/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi new file mode 100644 index 00000000..ecb65334 --- /dev/null +++ b/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi @@ -0,0 +1,288 @@ +# typed: strong + +module OpenAI + module Models + module Evals + module Runs + class OutputItemListResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation run output item. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the evaluation run was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Details of the input data source item. + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :datasource_item + + # The identifier for the data source item. + sig { returns(Integer) } + attr_accessor :datasource_item_id + + # The identifier of the evaluation group. + sig { returns(String) } + attr_accessor :eval_id + + # The type of the object. Always "eval.run.output_item". + sig { returns(Symbol) } + attr_accessor :object + + # A list of results from the evaluation run. + sig { returns(T::Array[T::Hash[Symbol, T.anything]]) } + attr_accessor :results + + # The identifier of the evaluation run associated with this output item. + sig { returns(String) } + attr_accessor :run_id + + # A sample containing the input and output of the evaluation run. + sig { returns(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample) } + attr_reader :sample + + sig do + params( + sample: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :sample + + # The status of the evaluation run. + sig { returns(String) } + attr_accessor :status + + # A schema representing an evaluation run output item. + sig do + params( + id: String, + created_at: Integer, + datasource_item: T::Hash[Symbol, T.anything], + datasource_item_id: Integer, + eval_id: String, + results: T::Array[T::Hash[Symbol, T.anything]], + run_id: String, + sample: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, OpenAI::Internal::AnyHash), + status: String, + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + datasource_item:, + datasource_item_id:, + eval_id:, + results:, + run_id:, + sample:, + status:, + object: :"eval.run.output_item" + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + datasource_item: T::Hash[Symbol, T.anything], + datasource_item_id: Integer, + eval_id: String, + object: Symbol, + results: T::Array[T::Hash[Symbol, T.anything]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String + } + ) + end + def to_hash; end + + class Sample < OpenAI::Internal::Type::BaseModel + # An object representing an error response from the Eval API. + sig { returns(OpenAI::Models::Evals::EvalAPIError) } + attr_reader :error + + sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + attr_writer :error + + # The reason why the sample generation was finished. + sig { returns(String) } + attr_accessor :finish_reason + + # An array of input messages. + sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input]) } + attr_accessor :input + + # The maximum number of tokens allowed for completion. + sig { returns(Integer) } + attr_accessor :max_completion_tokens + + # The model used for generating the sample. + sig { returns(String) } + attr_accessor :model + + # An array of output messages. + sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output]) } + attr_accessor :output + + # The seed used for generating the sample. + sig { returns(Integer) } + attr_accessor :seed + + # The sampling temperature used. + sig { returns(Float) } + attr_accessor :temperature + + # The top_p value used for sampling. + sig { returns(Float) } + attr_accessor :top_p + + # Token usage details for the sample. + sig { returns(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage) } + attr_reader :usage + + sig do + params( + usage: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :usage + + # A sample containing the input and output of the evaluation run. + sig do + params( + error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + finish_reason: String, + input: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input, OpenAI::Internal::AnyHash)], + max_completion_tokens: Integer, + model: String, + output: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output, OpenAI::Internal::AnyHash)], + seed: Integer, + temperature: Float, + top_p: Float, + usage: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new( + error:, + finish_reason:, + input:, + max_completion_tokens:, + model:, + output:, + seed:, + temperature:, + top_p:, + usage: + ); end + sig do + override + .returns( + { + error: OpenAI::Models::Evals::EvalAPIError, + finish_reason: String, + input: T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message sender (e.g., system, user, developer). + sig { returns(String) } + attr_accessor :role + + # An input message. + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class Output < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(T.nilable(String)) } + attr_reader :content + + sig { params(content: String).void } + attr_writer :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(T.nilable(String)) } + attr_reader :role + + sig { params(role: String).void } + attr_writer :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content: nil, role: nil); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class Usage < OpenAI::Internal::Type::BaseModel + # The number of tokens retrieved from cache. + sig { returns(Integer) } + attr_accessor :cached_tokens + + # The number of completion tokens generated. + sig { returns(Integer) } + attr_accessor :completion_tokens + + # The number of prompt tokens used. + sig { returns(Integer) } + attr_accessor :prompt_tokens + + # The total number of tokens used. + sig { returns(Integer) } + attr_accessor :total_tokens + + # Token usage details for the sample. + sig do + params( + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:); end + + sig do + override + .returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + end + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/runs/output_item_retrieve_params.rbi b/rbi/lib/openai/models/evals/runs/output_item_retrieve_params.rbi new file mode 100644 index 00000000..6a3eb80e --- /dev/null +++ b/rbi/lib/openai/models/evals/runs/output_item_retrieve_params.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Evals + module Runs + class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig { returns(String) } + attr_accessor :eval_id + + sig { returns(String) } + attr_accessor :run_id + + sig do + params( + eval_id: String, + run_id: String, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(eval_id:, run_id:, request_options: {}); end + + sig { override.returns({eval_id: String, run_id: String, request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi b/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi new file mode 100644 index 00000000..b538f6cd --- /dev/null +++ b/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi @@ -0,0 +1,288 @@ +# typed: strong + +module OpenAI + module Models + module Evals + module Runs + class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel + # Unique identifier for the evaluation run output item. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the evaluation run was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Details of the input data source item. + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :datasource_item + + # The identifier for the data source item. + sig { returns(Integer) } + attr_accessor :datasource_item_id + + # The identifier of the evaluation group. + sig { returns(String) } + attr_accessor :eval_id + + # The type of the object. Always "eval.run.output_item". + sig { returns(Symbol) } + attr_accessor :object + + # A list of results from the evaluation run. + sig { returns(T::Array[T::Hash[Symbol, T.anything]]) } + attr_accessor :results + + # The identifier of the evaluation run associated with this output item. + sig { returns(String) } + attr_accessor :run_id + + # A sample containing the input and output of the evaluation run. + sig { returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample) } + attr_reader :sample + + sig do + params( + sample: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :sample + + # The status of the evaluation run. + sig { returns(String) } + attr_accessor :status + + # A schema representing an evaluation run output item. + sig do + params( + id: String, + created_at: Integer, + datasource_item: T::Hash[Symbol, T.anything], + datasource_item_id: Integer, + eval_id: String, + results: T::Array[T::Hash[Symbol, T.anything]], + run_id: String, + sample: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, OpenAI::Internal::AnyHash), + status: String, + object: Symbol + ) + .returns(T.attached_class) + end + def self.new( + id:, + created_at:, + datasource_item:, + datasource_item_id:, + eval_id:, + results:, + run_id:, + sample:, + status:, + object: :"eval.run.output_item" + ); end + sig do + override + .returns( + { + id: String, + created_at: Integer, + datasource_item: T::Hash[Symbol, T.anything], + datasource_item_id: Integer, + eval_id: String, + object: Symbol, + results: T::Array[T::Hash[Symbol, T.anything]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String + } + ) + end + def to_hash; end + + class Sample < OpenAI::Internal::Type::BaseModel + # An object representing an error response from the Eval API. + sig { returns(OpenAI::Models::Evals::EvalAPIError) } + attr_reader :error + + sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + attr_writer :error + + # The reason why the sample generation was finished. + sig { returns(String) } + attr_accessor :finish_reason + + # An array of input messages. + sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input]) } + attr_accessor :input + + # The maximum number of tokens allowed for completion. + sig { returns(Integer) } + attr_accessor :max_completion_tokens + + # The model used for generating the sample. + sig { returns(String) } + attr_accessor :model + + # An array of output messages. + sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output]) } + attr_accessor :output + + # The seed used for generating the sample. + sig { returns(Integer) } + attr_accessor :seed + + # The sampling temperature used. + sig { returns(Float) } + attr_accessor :temperature + + # The top_p value used for sampling. + sig { returns(Float) } + attr_accessor :top_p + + # Token usage details for the sample. + sig { returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage) } + attr_reader :usage + + sig do + params( + usage: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :usage + + # A sample containing the input and output of the evaluation run. + sig do + params( + error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + finish_reason: String, + input: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input, OpenAI::Internal::AnyHash)], + max_completion_tokens: Integer, + model: String, + output: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output, OpenAI::Internal::AnyHash)], + seed: Integer, + temperature: Float, + top_p: Float, + usage: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new( + error:, + finish_reason:, + input:, + max_completion_tokens:, + model:, + output:, + seed:, + temperature:, + top_p:, + usage: + ); end + sig do + override + .returns( + { + error: OpenAI::Models::Evals::EvalAPIError, + finish_reason: String, + input: T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message sender (e.g., system, user, developer). + sig { returns(String) } + attr_accessor :role + + # An input message. + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class Output < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(T.nilable(String)) } + attr_reader :content + + sig { params(content: String).void } + attr_writer :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(T.nilable(String)) } + attr_reader :role + + sig { params(role: String).void } + attr_writer :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content: nil, role: nil); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class Usage < OpenAI::Internal::Type::BaseModel + # The number of tokens retrieved from cache. + sig { returns(Integer) } + attr_accessor :cached_tokens + + # The number of completion tokens generated. + sig { returns(Integer) } + attr_accessor :completion_tokens + + # The number of prompt tokens used. + sig { returns(Integer) } + attr_accessor :prompt_tokens + + # The total number of tokens used. + sig { returns(Integer) } + attr_accessor :total_tokens + + # Token usage details for the sample. + sig do + params( + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:); end + + sig do + override + .returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + end + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi new file mode 100644 index 00000000..27d4ad09 --- /dev/null +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi @@ -0,0 +1,30 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Checkpoints + class PermissionCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # The project identifiers to grant access to. + sig { returns(T::Array[String]) } + attr_accessor :project_ids + + sig do + params( + project_ids: T::Array[String], + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(project_ids:, request_options: {}); end + + sig { override.returns({project_ids: T::Array[String], request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi new file mode 100644 index 00000000..07d7d8b6 --- /dev/null +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi @@ -0,0 +1,42 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Checkpoints + class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel + # The permission identifier, which can be referenced in the API endpoints. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the permission was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The object type, which is always "checkpoint.permission". + sig { returns(Symbol) } + attr_accessor :object + + # The project identifier that the permission is for. + sig { returns(String) } + attr_accessor :project_id + + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + sig do + params( + id: String, + created_at: Integer, + project_id: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new(id:, created_at:, project_id:, object: :"checkpoint.permission"); end + + sig { override.returns({id: String, created_at: Integer, object: Symbol, project_id: String}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi new file mode 100644 index 00000000..69429e05 --- /dev/null +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Checkpoints + class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + sig do + params( + request_options: T.any( + OpenAI::RequestOptions, + OpenAI::Internal::AnyHash + ) + ).returns(T.attached_class) + end + def self.new(request_options: {}); end + + sig { override.returns({request_options: OpenAI::RequestOptions}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi new file mode 100644 index 00000000..9a510f8c --- /dev/null +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi @@ -0,0 +1,29 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Checkpoints + class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel + # The ID of the fine-tuned model checkpoint permission that was deleted. + sig { returns(String) } + attr_accessor :id + + # Whether the fine-tuned model checkpoint permission was successfully deleted. + sig { returns(T::Boolean) } + attr_accessor :deleted + + # The object type, which is always "checkpoint.permission". + sig { returns(Symbol) } + attr_accessor :object + + sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } + def self.new(id:, deleted:, object: :"checkpoint.permission"); end + + sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi new file mode 100644 index 00000000..b3acb44f --- /dev/null +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi @@ -0,0 +1,95 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Checkpoints + class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # Identifier for the last permission ID from the previous pagination request. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # Number of permissions to retrieve. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # The order in which to retrieve permissions. + sig { returns(T.nilable(OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol)) } + attr_reader :order + + sig { params(order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol).void } + attr_writer :order + + # The ID of the project to get permissions for. + sig { returns(T.nilable(String)) } + attr_reader :project_id + + sig { params(project_id: String).void } + attr_writer :project_id + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, + project_id: String, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) + end + def self.new(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}); end + + sig do + override + .returns( + { + after: String, + limit: Integer, + order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, + project_id: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash; end + + # The order in which to retrieve permissions. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order) } + OrSymbol = + T.type_alias do + T.any( + Symbol, + String, + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol + ) + end + + ASCENDING = + T.let(:ascending, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol) + DESCENDING = + T.let(:descending, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol) + + sig do + override + .returns(T::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol]) + end + def self.values; end + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi new file mode 100644 index 00000000..dc137a5f --- /dev/null +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Checkpoints + class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel + sig { returns(T::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data]) } + attr_accessor :data + + sig { returns(T::Boolean) } + attr_accessor :has_more + + sig { returns(Symbol) } + attr_accessor :object + + sig { returns(T.nilable(String)) } + attr_accessor :first_id + + sig { returns(T.nilable(String)) } + attr_accessor :last_id + + sig do + params( + data: T::Array[ + T.any( + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data, + OpenAI::Internal::AnyHash + ) + ], + has_more: T::Boolean, + first_id: T.nilable(String), + last_id: T.nilable(String), + object: Symbol + ) + .returns(T.attached_class) + end + def self.new(data:, has_more:, first_id: nil, last_id: nil, object: :list); end + + sig do + override + .returns( + { + data: T::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: T::Boolean, + object: Symbol, + first_id: T.nilable(String), + last_id: T.nilable(String) + } + ) + end + def to_hash; end + + class Data < OpenAI::Internal::Type::BaseModel + # The permission identifier, which can be referenced in the API endpoints. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the permission was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The object type, which is always "checkpoint.permission". + sig { returns(Symbol) } + attr_accessor :object + + # The project identifier that the permission is for. + sig { returns(String) } + attr_accessor :project_id + + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + sig do + params( + id: String, + created_at: Integer, + project_id: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new(id:, created_at:, project_id:, object: :"checkpoint.permission"); end + + sig { override.returns({id: String, created_at: Integer, object: Symbol, project_id: String}) } + def to_hash; end + end + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 51372d8d..ced272d4 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -56,7 +56,11 @@ module OpenAI # available models. sig do returns( - T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol) + T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) ) end attr_accessor :model @@ -225,7 +229,11 @@ module OpenAI incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::AnyHash)), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), + model: T.any( + String, + OpenAI::Models::ChatModel::OrSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), output: T::Array[ T.any( OpenAI::Models::Responses::ResponseOutputMessage, @@ -301,7 +309,11 @@ module OpenAI incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::TaggedSymbol), + model: T.any( + String, + OpenAI::Models::ChatModel::TaggedSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ), object: Symbol, output: T::Array[ T.any( diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 4263b1b6..f462e65e 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -45,7 +45,15 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol)) } + sig do + returns( + T.any( + String, + OpenAI::Models::ChatModel::OrSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + ) + ) + end attr_accessor :model # Specify additional output data to include in the model response. Currently @@ -248,7 +256,11 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), + model: T.any( + String, + OpenAI::Models::ChatModel::OrSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), @@ -323,7 +335,11 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), + model: T.any( + String, + OpenAI::Models::ChatModel::OrSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 79e16c96..1ae31d0c 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -5,17 +5,35 @@ module OpenAI module ResponsesModel extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol]) } - def self.variants; end + module ResponsesOnlyModel + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) } - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ResponsesModel::TaggedSymbol) } + O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) + O1_PRO_2025_03_19 = + T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) + COMPUTER_USE_PREVIEW = + T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) + COMPUTER_USE_PREVIEW_2025_03_11 = + T.let( + :"computer-use-preview-2025-03-11", + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) - O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::TaggedSymbol) - O1_PRO_2025_03_19 = T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::TaggedSymbol) - COMPUTER_USE_PREVIEW = T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::TaggedSymbol) - COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::ResponsesModel::TaggedSymbol) + sig { override.returns(T::Array[OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol]) } + def self.values; end + end + + sig do + override + .returns( + [String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol] + ) + end + def self.variants; end end end end diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 316bbca4..2139613a 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -91,7 +91,8 @@ module OpenAI thread_id, request_options: {} ); end - # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Beta::Threads#create_and_run_streaming} for streaming + # counterpart. # # Create a thread and run it in one request. sig do @@ -230,8 +231,8 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or - # `#create_and_run` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` + # or `#create_and_run` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end @@ -315,7 +316,7 @@ module OpenAI ] ) end - def stream_raw( + def create_and_run_streaming( # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. @@ -404,8 +405,8 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or - # `#create_and_run` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` + # or `#create_and_run` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index e3a1216a..0b26ef36 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Resources::Beta::Threads::Runs::Steps) } attr_reader :steps - # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming + # See {OpenAI::Resources::Beta::Threads::Runs#create_streaming} for streaming # counterpart. # # Create a run. @@ -173,7 +173,7 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or + # There is no need to provide `stream:`. Instead, use `#create_streaming` or # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} @@ -263,7 +263,7 @@ module OpenAI ] ) end - def create_stream_raw( + def create_streaming( # Path param: The ID of the thread to run. thread_id, # Body param: The ID of the @@ -372,7 +372,7 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or + # There is no need to provide `stream:`. Instead, use `#create_streaming` or # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} @@ -468,7 +468,7 @@ module OpenAI thread_id:, request_options: {} ); end - # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming} for # streaming counterpart. # # When a run has the `status: "requires_action"` and `required_action.type` is @@ -495,7 +495,7 @@ module OpenAI # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and + # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and # non-streaming use cases, respectively. stream: false, request_options: {} @@ -546,7 +546,7 @@ module OpenAI ] ) end - def submit_tool_outputs_stream_raw( + def submit_tool_outputs_streaming( # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the @@ -556,7 +556,7 @@ module OpenAI # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and + # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and # non-streaming use cases, respectively. stream: true, request_options: {} diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 672459b2..d0623009 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -7,7 +7,8 @@ module OpenAI sig { returns(OpenAI::Resources::Chat::Completions::Messages) } attr_reader :messages - # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Chat::Completions#create_streaming} for streaming + # counterpart. # # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take @@ -270,8 +271,8 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end @@ -354,7 +355,7 @@ module OpenAI ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end - def stream_raw( + def create_streaming( # A list of messages comprising the conversation so far. Depending on the # [model](https://platform.openai.com/docs/models) you use, different message # types (modalities) are supported, like @@ -538,8 +539,8 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/evals.rbi b/rbi/lib/openai/resources/evals.rbi new file mode 100644 index 00000000..f7fb72d2 --- /dev/null +++ b/rbi/lib/openai/resources/evals.rbi @@ -0,0 +1,133 @@ +# typed: strong + +module OpenAI + module Resources + class Evals + sig { returns(OpenAI::Resources::Evals::Runs) } + attr_reader :runs + + # Create the structure of an evaluation that can be used to test a model's + # performance. An evaluation is a set of testing criteria and a datasource. After + # creating an evaluation, you can run it on different models and model parameters. + # We support several types of graders and datasources. For more information, see + # the [Evals guide](https://platform.openai.com/docs/guides/evals). + sig do + params( + data_source_config: T.any( + OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + ), + testing_criteria: T::Array[ + T.any( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalStringCheckGrader, + OpenAI::Models::EvalTextSimilarityGrader + ) + ], + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + share_with_openai: T::Boolean, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::EvalCreateResponse) + end + def create( + # The configuration for the data source used for the evaluation runs. + data_source_config:, + # A list of graders for all eval runs in this group. + testing_criteria:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The name of the evaluation. + name: nil, + # Indicates whether the evaluation is shared with OpenAI. + share_with_openai: nil, + request_options: {} + ); end + # Get an evaluation by ID. + sig do + params( + eval_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::EvalRetrieveResponse) + end + def retrieve( + # The ID of the evaluation to retrieve. + eval_id, + request_options: {} + ); end + # Update certain properties of an evaluation. + sig do + params( + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::EvalUpdateResponse) + end + def update( + # The ID of the evaluation to update. + eval_id, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # Rename the evaluation. + name: nil, + request_options: {} + ); end + # List evaluations for a project. + sig do + params( + after: String, + limit: Integer, + order: OpenAI::Models::EvalListParams::Order::OrSymbol, + order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::EvalListResponse]) + end + def list( + # Identifier for the last eval from the previous pagination request. + after: nil, + # Number of evals to retrieve. + limit: nil, + # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for + # descending order. + order: nil, + # Evals can be ordered by creation time or last updated time. Use `created_at` for + # creation time or `updated_at` for last updated time. + order_by: nil, + request_options: {} + ); end + # Delete an evaluation. + sig do + params( + eval_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::EvalDeleteResponse) + end + def delete( + # The ID of the evaluation to delete. + eval_id, + request_options: {} + ); end + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:); end + end + end +end diff --git a/rbi/lib/openai/resources/evals/runs.rbi b/rbi/lib/openai/resources/evals/runs.rbi new file mode 100644 index 00000000..d49a538a --- /dev/null +++ b/rbi/lib/openai/resources/evals/runs.rbi @@ -0,0 +1,122 @@ +# typed: strong + +module OpenAI + module Resources + class Evals + class Runs + sig { returns(OpenAI::Resources::Evals::Runs::OutputItems) } + attr_reader :output_items + + # Create a new evaluation run. This is the endpoint that will kick off grading. + sig do + params( + eval_id: String, + data_source: T.any( + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::Evals::RunCreateResponse) + end + def create( + # The ID of the evaluation to create a run for. + eval_id, + # Details about the run's data source. + data_source:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The name of the run. + name: nil, + request_options: {} + ); end + # Get an evaluation run by ID. + sig do + params( + run_id: String, + eval_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::Evals::RunRetrieveResponse) + end + def retrieve( + # The ID of the run to retrieve. + run_id, + # The ID of the evaluation to retrieve runs for. + eval_id:, + request_options: {} + ); end + # Get a list of runs for an evaluation. + sig do + params( + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, + status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Evals::RunListResponse]) + end + def list( + # The ID of the evaluation to retrieve runs for. + eval_id, + # Identifier for the last run from the previous pagination request. + after: nil, + # Number of runs to retrieve. + limit: nil, + # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for + # descending order. Defaults to `asc`. + order: nil, + # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | + # "canceled". + status: nil, + request_options: {} + ); end + # Delete an eval run. + sig do + params( + run_id: String, + eval_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::Evals::RunDeleteResponse) + end + def delete( + # The ID of the run to delete. + run_id, + # The ID of the evaluation to delete the run from. + eval_id:, + request_options: {} + ); end + # Cancel an ongoing evaluation run. + sig do + params( + run_id: String, + eval_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::Evals::RunCancelResponse) + end + def cancel( + # The ID of the run to cancel. + run_id, + # The ID of the evaluation whose run you want to cancel. + eval_id:, + request_options: {} + ); end + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:); end + end + end + end +end diff --git a/rbi/lib/openai/resources/evals/runs/output_items.rbi b/rbi/lib/openai/resources/evals/runs/output_items.rbi new file mode 100644 index 00000000..16053fa0 --- /dev/null +++ b/rbi/lib/openai/resources/evals/runs/output_items.rbi @@ -0,0 +1,65 @@ +# typed: strong + +module OpenAI + module Resources + class Evals + class Runs + class OutputItems + # Get an evaluation run output item by ID. + sig do + params( + output_item_id: String, + eval_id: String, + run_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse) + end + def retrieve( + # The ID of the output item to retrieve. + output_item_id, + # The ID of the evaluation to retrieve runs for. + eval_id:, + # The ID of the run to retrieve. + run_id:, + request_options: {} + ); end + # Get a list of output items for an evaluation run. + sig do + params( + run_id: String, + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, + status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Evals::Runs::OutputItemListResponse]) + end + def list( + # Path param: The ID of the run to retrieve output items for. + run_id, + # Path param: The ID of the evaluation to retrieve runs for. + eval_id:, + # Query param: Identifier for the last output item from the previous pagination + # request. + after: nil, + # Query param: Number of output items to retrieve. + limit: nil, + # Query param: Sort order for output items by timestamp. Use `asc` for ascending + # order or `desc` for descending order. Defaults to `asc`. + order: nil, + # Query param: Filter output items by status. Use `failed` to filter by failed + # output items or `pass` to filter by passed output items. + status: nil, + request_options: {} + ); end + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:); end + end + end + end + end +end diff --git a/rbi/lib/openai/resources/fine_tuning.rbi b/rbi/lib/openai/resources/fine_tuning.rbi index fba94c1f..cf5c249f 100644 --- a/rbi/lib/openai/resources/fine_tuning.rbi +++ b/rbi/lib/openai/resources/fine_tuning.rbi @@ -6,6 +6,9 @@ module OpenAI sig { returns(OpenAI::Resources::FineTuning::Jobs) } attr_reader :jobs + sig { returns(OpenAI::Resources::FineTuning::Checkpoints) } + attr_reader :checkpoints + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:); end diff --git a/rbi/lib/openai/resources/fine_tuning/checkpoints.rbi b/rbi/lib/openai/resources/fine_tuning/checkpoints.rbi new file mode 100644 index 00000000..e9131b1d --- /dev/null +++ b/rbi/lib/openai/resources/fine_tuning/checkpoints.rbi @@ -0,0 +1,16 @@ +# typed: strong + +module OpenAI + module Resources + class FineTuning + class Checkpoints + sig { returns(OpenAI::Resources::FineTuning::Checkpoints::Permissions) } + attr_reader :permissions + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:); end + end + end + end +end diff --git a/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi new file mode 100644 index 00000000..0d9761de --- /dev/null +++ b/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi @@ -0,0 +1,78 @@ +# typed: strong + +module OpenAI + module Resources + class FineTuning + class Checkpoints + class Permissions + # **NOTE:** Calling this endpoint requires an [admin API key](../admin-api-keys). + # + # This enables organization owners to share fine-tuned models with other projects + # in their organization. + sig do + params( + fine_tuned_model_checkpoint: String, + project_ids: T::Array[String], + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Internal::Page[OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse]) + end + def create( + # The ID of the fine-tuned model checkpoint to create a permission for. + fine_tuned_model_checkpoint, + # The project identifiers to grant access to. + project_ids:, + request_options: {} + ); end + # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). + # + # Organization owners can use this endpoint to view all permissions for a + # fine-tuned model checkpoint. + sig do + params( + fine_tuned_model_checkpoint: String, + after: String, + limit: Integer, + order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, + project_id: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse) + end + def retrieve( + # The ID of the fine-tuned model checkpoint to get permissions for. + fine_tuned_model_checkpoint, + # Identifier for the last permission ID from the previous pagination request. + after: nil, + # Number of permissions to retrieve. + limit: nil, + # The order in which to retrieve permissions. + order: nil, + # The ID of the project to get permissions for. + project_id: nil, + request_options: {} + ); end + # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). + # + # Organization owners can use this endpoint to delete a permission for a + # fine-tuned model checkpoint. + sig do + params( + fine_tuned_model_checkpoint: String, + request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + ) + .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse) + end + def delete( + # The ID of the fine-tuned model checkpoint to delete a permission for. + fine_tuned_model_checkpoint, + request_options: {} + ); end + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:); end + end + end + end + end +end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index a8108266..b98140db 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -6,7 +6,7 @@ module OpenAI sig { returns(OpenAI::Resources::Responses::InputItems) } attr_reader :input_items - # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Responses#create_streaming} for streaming counterpart. # # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or @@ -40,7 +40,11 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), + model: T.any( + String, + OpenAI::Models::ChatModel::OrSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), @@ -179,8 +183,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end @@ -218,7 +222,11 @@ module OpenAI ) ] ), - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::OrSymbol), + model: T.any( + String, + OpenAI::Models::ChatModel::OrSymbol, + OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), @@ -289,7 +297,7 @@ module OpenAI ] ) end - def stream_raw( + def create_streaming( # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -394,8 +402,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 7b991e2c..154568f7 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -42,6 +42,8 @@ module OpenAI attr_reader responses: OpenAI::Resources::Responses + attr_reader evals: OpenAI::Resources::Evals + private def auth_headers: -> ::Hash[String, String] def initialize: ( diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 836c7e7b..3d567669 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -3,23 +3,29 @@ module OpenAI type all_models = String | OpenAI::Models::chat_model - | :"o1-pro" - | :"o1-pro-2025-03-19" - | :"computer-use-preview" - | :"computer-use-preview-2025-03-11" + | OpenAI::Models::AllModels::responses_only_model module AllModels extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" - | :"o1-pro-2025-03-19" - | :"computer-use-preview" - | :"computer-use-preview-2025-03-11")] + type responses_only_model = + :"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11" - O1_PRO: :"o1-pro" - O1_PRO_2025_03_19: :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" + module ResponsesOnlyModel + extend OpenAI::Internal::Type::Enum + + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" + + def self?.values: -> ::Array[OpenAI::Models::AllModels::responses_only_model] + end + + def self?.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::AllModels::responses_only_model] end end end diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs new file mode 100644 index 00000000..81c23475 --- /dev/null +++ b/sig/openai/models/eval_create_params.rbs @@ -0,0 +1,309 @@ +module OpenAI + module Models + type eval_create_params = + { + data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + metadata: OpenAI::Models::metadata?, + name: String, + share_with_openai: bool + } + & OpenAI::Internal::Type::request_parameters + + class EvalCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor data_source_config: OpenAI::Models::EvalCreateParams::data_source_config + + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion] + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_reader name: String? + + def name=: (String) -> String + + attr_reader share_with_openai: bool? + + def share_with_openai=: (bool) -> bool + + def initialize: ( + data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?share_with_openai: bool, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::eval_create_params + + type data_source_config = + OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom + | OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + type custom = + { + item_schema: ::Hash[Symbol, top], + type: :custom, + include_sample_schema: bool + } + + class Custom < OpenAI::Internal::Type::BaseModel + attr_accessor item_schema: ::Hash[Symbol, top] + + attr_accessor type: :custom + + attr_reader include_sample_schema: bool? + + def include_sample_schema=: (bool) -> bool + + def initialize: ( + item_schema: ::Hash[Symbol, top], + ?include_sample_schema: bool, + ?type: :custom + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::custom + end + + type stored_completions = + { type: :stored_completions, metadata: OpenAI::Models::metadata? } + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + attr_accessor type: :stored_completions + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + ?metadata: OpenAI::Models::metadata?, + ?type: :stored_completions + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::stored_completions + end + + def self?.variants: -> [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + end + + type testing_criterion = + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel + | OpenAI::Models::EvalStringCheckGrader + | OpenAI::Models::EvalTextSimilarityGrader + + module TestingCriterion + extend OpenAI::Internal::Type::Union + + type label_model = + { + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + + class LabelModel < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input] + + attr_accessor labels: ::Array[String] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor passing_labels: ::Array[String] + + attr_accessor type: :label_model + + def initialize: ( + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + ?type: :label_model + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::label_model + + type input = + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage + | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage + | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + + module Input + extend OpenAI::Internal::Type::Union + + type simple_input_message = { content: String, role: String } + + class SimpleInputMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::simple_input_message + end + + type input_message = + { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_ + } + + class InputMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content + + attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role + + attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_ + + def initialize: ( + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::input_message + + type content = + { + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_ + } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_ + + def initialize: ( + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::content + + type type_ = :input_text + + module Type + extend OpenAI::Internal::Type::Enum + + INPUT_TEXT: :input_text + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_] + end + end + + type role = :user | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_] + end + end + + type output_message = + { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_ + } + + class OutputMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content + + attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role + + attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_ + + def initialize: ( + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::output_message + + type content = + { + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_ + } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_ + + def initialize: ( + text: String, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::content + + type type_ = :output_text + + module Type + extend OpenAI::Internal::Type::Enum + + OUTPUT_TEXT: :output_text + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_] + end + end + + type role = :assistant + + module Role + extend OpenAI::Internal::Type::Enum + + ASSISTANT: :assistant + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_] + end + end + + def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage] + end + end + + def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + end + end + end +end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs new file mode 100644 index 00000000..91552ac6 --- /dev/null +++ b/sig/openai/models/eval_create_response.rbs @@ -0,0 +1,67 @@ +module OpenAI + module Models + type eval_create_response = + { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalCreateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] + } + + class EvalCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source_config: OpenAI::Models::EvalCreateResponse::data_source_config + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor name: String + + attr_accessor object: :eval + + attr_accessor share_with_openai: bool + + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] + + def initialize: ( + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalCreateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion], + ?object: :eval + ) -> void + + def to_hash: -> OpenAI::Models::eval_create_response + + type data_source_config = + OpenAI::Models::EvalCustomDataSourceConfig + | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + end + + type testing_criterion = + OpenAI::Models::EvalLabelModelGrader + | OpenAI::Models::EvalStringCheckGrader + | OpenAI::Models::EvalTextSimilarityGrader + + module TestingCriterion + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + end + end + end +end diff --git a/sig/openai/models/eval_custom_data_source_config.rbs b/sig/openai/models/eval_custom_data_source_config.rbs new file mode 100644 index 00000000..d87e099d --- /dev/null +++ b/sig/openai/models/eval_custom_data_source_config.rbs @@ -0,0 +1,16 @@ +module OpenAI + module Models + type eval_custom_data_source_config = + { schema: ::Hash[Symbol, top], type: :custom } + + class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :custom + + def initialize: (schema: ::Hash[Symbol, top], ?type: :custom) -> void + + def to_hash: -> OpenAI::Models::eval_custom_data_source_config + end + end +end diff --git a/sig/openai/models/eval_delete_params.rbs b/sig/openai/models/eval_delete_params.rbs new file mode 100644 index 00000000..cb820afd --- /dev/null +++ b/sig/openai/models/eval_delete_params.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + type eval_delete_params = { } & OpenAI::Internal::Type::request_parameters + + class EvalDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> OpenAI::Models::eval_delete_params + end + end +end diff --git a/sig/openai/models/eval_delete_response.rbs b/sig/openai/models/eval_delete_response.rbs new file mode 100644 index 00000000..e06e798b --- /dev/null +++ b/sig/openai/models/eval_delete_response.rbs @@ -0,0 +1,18 @@ +module OpenAI + module Models + type eval_delete_response = + { deleted: bool, eval_id: String, object: String } + + class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel + attr_accessor deleted: bool + + attr_accessor eval_id: String + + attr_accessor object: String + + def initialize: (deleted: bool, eval_id: String, object: String) -> void + + def to_hash: -> OpenAI::Models::eval_delete_response + end + end +end diff --git a/sig/openai/models/eval_label_model_grader.rbs b/sig/openai/models/eval_label_model_grader.rbs new file mode 100644 index 00000000..af0c4a5f --- /dev/null +++ b/sig/openai/models/eval_label_model_grader.rbs @@ -0,0 +1,184 @@ +module OpenAI + module Models + type eval_label_model_grader = + { + input: ::Array[OpenAI::Models::EvalLabelModelGrader::input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + + class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalLabelModelGrader::input] + + attr_accessor labels: ::Array[String] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor passing_labels: ::Array[String] + + attr_accessor type: :label_model + + def initialize: ( + input: ::Array[OpenAI::Models::EvalLabelModelGrader::input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + ?type: :label_model + ) -> void + + def to_hash: -> OpenAI::Models::eval_label_model_grader + + type input = + OpenAI::Models::EvalLabelModelGrader::Input::Assistant + | OpenAI::Models::EvalLabelModelGrader::Input::InputMessage + + module Input + extend OpenAI::Internal::Type::Union + + type input_message = + { + content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, + role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_ + } + + class InputMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content + + attr_accessor role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role + + attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_ + + def initialize: ( + content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, + role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::input_message + + type content = + { + text: String, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_ + } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_ + + def initialize: ( + text: String, + type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::content + + type type_ = :input_text + + module Type + extend OpenAI::Internal::Type::Enum + + INPUT_TEXT: :input_text + + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_] + end + end + + type role = :user | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_] + end + end + + type assistant = + { + content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, + role: :assistant, + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_ + } + + class Assistant < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content + + attr_accessor role: :assistant + + attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_ + + def initialize: ( + content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_, + ?role: :assistant + ) -> void + + def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::assistant + + type content = + { + text: String, + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_ + } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_ + + def initialize: ( + text: String, + type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::Assistant::content + + type type_ = :output_text + + module Type + extend OpenAI::Internal::Type::Enum + + OUTPUT_TEXT: :output_text + + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_] + end + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_] + end + end + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage] + end + end + end +end diff --git a/sig/openai/models/eval_list_params.rbs b/sig/openai/models/eval_list_params.rbs new file mode 100644 index 00000000..f5a4316b --- /dev/null +++ b/sig/openai/models/eval_list_params.rbs @@ -0,0 +1,69 @@ +module OpenAI + module Models + type eval_list_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::EvalListParams::order, + order_by: OpenAI::Models::EvalListParams::order_by + } + & OpenAI::Internal::Type::request_parameters + + class EvalListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::EvalListParams::order? + + def order=: ( + OpenAI::Models::EvalListParams::order + ) -> OpenAI::Models::EvalListParams::order + + attr_reader order_by: OpenAI::Models::EvalListParams::order_by? + + def order_by=: ( + OpenAI::Models::EvalListParams::order_by + ) -> OpenAI::Models::EvalListParams::order_by + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::EvalListParams::order, + ?order_by: OpenAI::Models::EvalListParams::order_by, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::eval_list_params + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::EvalListParams::order] + end + + type order_by = :created_at | :updated_at + + module OrderBy + extend OpenAI::Internal::Type::Enum + + CREATED_AT: :created_at + UPDATED_AT: :updated_at + + def self?.values: -> ::Array[OpenAI::Models::EvalListParams::order_by] + end + end + end +end diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs new file mode 100644 index 00000000..25f5a62c --- /dev/null +++ b/sig/openai/models/eval_list_response.rbs @@ -0,0 +1,67 @@ +module OpenAI + module Models + type eval_list_response = + { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalListResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion] + } + + class EvalListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source_config: OpenAI::Models::EvalListResponse::data_source_config + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor name: String + + attr_accessor object: :eval + + attr_accessor share_with_openai: bool + + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion] + + def initialize: ( + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalListResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion], + ?object: :eval + ) -> void + + def to_hash: -> OpenAI::Models::eval_list_response + + type data_source_config = + OpenAI::Models::EvalCustomDataSourceConfig + | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + end + + type testing_criterion = + OpenAI::Models::EvalLabelModelGrader + | OpenAI::Models::EvalStringCheckGrader + | OpenAI::Models::EvalTextSimilarityGrader + + module TestingCriterion + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + end + end + end +end diff --git a/sig/openai/models/eval_retrieve_params.rbs b/sig/openai/models/eval_retrieve_params.rbs new file mode 100644 index 00000000..9b2a4444 --- /dev/null +++ b/sig/openai/models/eval_retrieve_params.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Models + type eval_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters + + class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> OpenAI::Models::eval_retrieve_params + end + end +end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs new file mode 100644 index 00000000..39e82c42 --- /dev/null +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -0,0 +1,67 @@ +module OpenAI + module Models + type eval_retrieve_response = + { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalRetrieveResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] + } + + class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source_config: OpenAI::Models::EvalRetrieveResponse::data_source_config + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor name: String + + attr_accessor object: :eval + + attr_accessor share_with_openai: bool + + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] + + def initialize: ( + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalRetrieveResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion], + ?object: :eval + ) -> void + + def to_hash: -> OpenAI::Models::eval_retrieve_response + + type data_source_config = + OpenAI::Models::EvalCustomDataSourceConfig + | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + end + + type testing_criterion = + OpenAI::Models::EvalLabelModelGrader + | OpenAI::Models::EvalStringCheckGrader + | OpenAI::Models::EvalTextSimilarityGrader + + module TestingCriterion + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + end + end + end +end diff --git a/sig/openai/models/eval_stored_completions_data_source_config.rbs b/sig/openai/models/eval_stored_completions_data_source_config.rbs new file mode 100644 index 00000000..b99a1a02 --- /dev/null +++ b/sig/openai/models/eval_stored_completions_data_source_config.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + type eval_stored_completions_data_source_config = + { + schema: ::Hash[Symbol, top], + type: :stored_completions, + metadata: OpenAI::Models::metadata? + } + + class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :stored_completions + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :stored_completions + ) -> void + + def to_hash: -> OpenAI::Models::eval_stored_completions_data_source_config + end + end +end diff --git a/sig/openai/models/eval_string_check_grader.rbs b/sig/openai/models/eval_string_check_grader.rbs new file mode 100644 index 00000000..c7d89a65 --- /dev/null +++ b/sig/openai/models/eval_string_check_grader.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + type eval_string_check_grader = + { + input: String, + name: String, + operation: OpenAI::Models::EvalStringCheckGrader::operation, + reference: String, + type: :string_check + } + + class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel + attr_accessor input: String + + attr_accessor name: String + + attr_accessor operation: OpenAI::Models::EvalStringCheckGrader::operation + + attr_accessor reference: String + + attr_accessor type: :string_check + + def initialize: ( + input: String, + name: String, + operation: OpenAI::Models::EvalStringCheckGrader::operation, + reference: String, + ?type: :string_check + ) -> void + + def to_hash: -> OpenAI::Models::eval_string_check_grader + + type operation = :eq | :ne | :like | :ilike + + module Operation + extend OpenAI::Internal::Type::Enum + + EQ: :eq + NE: :ne + LIKE: :like + ILIKE: :ilike + + def self?.values: -> ::Array[OpenAI::Models::EvalStringCheckGrader::operation] + end + end + end +end diff --git a/sig/openai/models/eval_text_similarity_grader.rbs b/sig/openai/models/eval_text_similarity_grader.rbs new file mode 100644 index 00000000..7b15ca88 --- /dev/null +++ b/sig/openai/models/eval_text_similarity_grader.rbs @@ -0,0 +1,71 @@ +module OpenAI + module Models + type eval_text_similarity_grader = + { + evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric, + input: String, + pass_threshold: Float, + reference: String, + type: :text_similarity, + name: String + } + + class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel + attr_accessor evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric + + attr_accessor input: String + + attr_accessor pass_threshold: Float + + attr_accessor reference: String + + attr_accessor type: :text_similarity + + attr_reader name: String? + + def name=: (String) -> String + + def initialize: ( + evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric, + input: String, + pass_threshold: Float, + reference: String, + ?name: String, + ?type: :text_similarity + ) -> void + + def to_hash: -> OpenAI::Models::eval_text_similarity_grader + + type evaluation_metric = + :fuzzy_match + | :bleu + | :gleu + | :meteor + | :rouge_1 + | :rouge_2 + | :rouge_3 + | :rouge_4 + | :rouge_5 + | :rouge_l + | :cosine + + module EvaluationMetric + extend OpenAI::Internal::Type::Enum + + FUZZY_MATCH: :fuzzy_match + BLEU: :bleu + GLEU: :gleu + METEOR: :meteor + ROUGE_1: :rouge_1 + ROUGE_2: :rouge_2 + ROUGE_3: :rouge_3 + ROUGE_4: :rouge_4 + ROUGE_5: :rouge_5 + ROUGE_L: :rouge_l + COSINE: :cosine + + def self?.values: -> ::Array[OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric] + end + end + end +end diff --git a/sig/openai/models/eval_update_params.rbs b/sig/openai/models/eval_update_params.rbs new file mode 100644 index 00000000..aa2b67fa --- /dev/null +++ b/sig/openai/models/eval_update_params.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + type eval_update_params = + { metadata: OpenAI::Models::metadata?, name: String } + & OpenAI::Internal::Type::request_parameters + + class EvalUpdateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_reader name: String? + + def name=: (String) -> String + + def initialize: ( + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::eval_update_params + end + end +end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs new file mode 100644 index 00000000..6aad41bb --- /dev/null +++ b/sig/openai/models/eval_update_response.rbs @@ -0,0 +1,67 @@ +module OpenAI + module Models + type eval_update_response = + { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalUpdateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] + } + + class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source_config: OpenAI::Models::EvalUpdateResponse::data_source_config + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor name: String + + attr_accessor object: :eval + + attr_accessor share_with_openai: bool + + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] + + def initialize: ( + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalUpdateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + share_with_openai: bool, + testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion], + ?object: :eval + ) -> void + + def to_hash: -> OpenAI::Models::eval_update_response + + type data_source_config = + OpenAI::Models::EvalCustomDataSourceConfig + | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + module DataSourceConfig + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + end + + type testing_criterion = + OpenAI::Models::EvalLabelModelGrader + | OpenAI::Models::EvalStringCheckGrader + | OpenAI::Models::EvalTextSimilarityGrader + + module TestingCriterion + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + end + end + end +end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs new file mode 100644 index 00000000..de1f4c07 --- /dev/null +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -0,0 +1,394 @@ +module OpenAI + module Models + module Evals + type create_eval_completions_run_data_source = + { + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + model: String, + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + } + + class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel + attr_accessor input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages + + attr_accessor model: String + + attr_accessor source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source + + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_ + + attr_reader sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + + def initialize: ( + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + model: String, + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + ?sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + ) -> void + + def to_hash: -> OpenAI::Models::Evals::create_eval_completions_run_data_source + + type input_messages = + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::template + + type template = + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::chat_message + end + + type input_message = + { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_ + } + + class InputMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content + + attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role + + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_ + + def initialize: ( + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::input_message + + type content = + { + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_ + } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_ + + def initialize: ( + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::content + + type type_ = :input_text + + module Type + extend OpenAI::Internal::Type::Enum + + INPUT_TEXT: :input_text + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_] + end + end + + type role = :user | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_] + end + end + + type output_message = + { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_ + } + + class OutputMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content + + attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role + + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_ + + def initialize: ( + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::output_message + + type content = + { + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_ + } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_ + + def initialize: ( + text: String, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::content + + type type_ = :output_text + + module Type + extend OpenAI::Internal::Type::Enum + + OUTPUT_TEXT: :output_text + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_] + end + end + + type role = :assistant + + module Role + extend OpenAI::Internal::Type::Enum + + ASSISTANT: :assistant + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_] + end + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::item_reference + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + end + + type source = + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_id + end + + type stored_completions = + { + created_after: Integer?, + created_before: Integer?, + limit: Integer?, + metadata: OpenAI::Models::metadata?, + model: String?, + type: :stored_completions + } + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor limit: Integer? + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor model: String? + + attr_accessor type: :stored_completions + + def initialize: ( + created_after: Integer?, + created_before: Integer?, + limit: Integer?, + metadata: OpenAI::Models::metadata?, + model: String?, + ?type: :stored_completions + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::stored_completions + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + end + + type type_ = :completions + + module Type + extend OpenAI::Internal::Type::Enum + + COMPLETIONS: :completions + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::sampling_params + end + end + end + end +end diff --git a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs new file mode 100644 index 00000000..46524f62 --- /dev/null +++ b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs @@ -0,0 +1,83 @@ +module OpenAI + module Models + module Evals + type create_eval_jsonl_run_data_source = + { + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, + type: :jsonl + } + + class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source + + attr_accessor type: :jsonl + + def initialize: ( + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, + ?type: :jsonl + ) -> void + + def to_hash: -> OpenAI::Models::Evals::create_eval_jsonl_run_data_source + + type source = + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent + | OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::file_id + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + end + end + end + end +end diff --git a/sig/openai/models/evals/eval_api_error.rbs b/sig/openai/models/evals/eval_api_error.rbs new file mode 100644 index 00000000..eaf19bc5 --- /dev/null +++ b/sig/openai/models/evals/eval_api_error.rbs @@ -0,0 +1,19 @@ +module OpenAI + module Models + class EvalAPIError = Evals::EvalAPIError + + module Evals + type eval_api_error = { code: String, message: String } + + class EvalAPIError < OpenAI::Internal::Type::BaseModel + attr_accessor code: String + + attr_accessor message: String + + def initialize: (code: String, message: String) -> void + + def to_hash: -> OpenAI::Models::Evals::eval_api_error + end + end + end +end diff --git a/sig/openai/models/evals/run_cancel_params.rbs b/sig/openai/models/evals/run_cancel_params.rbs new file mode 100644 index 00000000..fd4d80f8 --- /dev/null +++ b/sig/openai/models/evals/run_cancel_params.rbs @@ -0,0 +1,22 @@ +module OpenAI + module Models + module Evals + type run_cancel_params = + { eval_id: String } & OpenAI::Internal::Type::request_parameters + + class RunCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor eval_id: String + + def initialize: ( + eval_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_cancel_params + end + end + end +end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs new file mode 100644 index 00000000..5a6e54d7 --- /dev/null +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -0,0 +1,158 @@ +module OpenAI + module Models + module Evals + type run_cancel_response = + { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCancelResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String + } + + class RunCancelResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source: OpenAI::Models::Evals::RunCancelResponse::data_source + + attr_accessor error: OpenAI::Models::Evals::EvalAPIError + + attr_accessor eval_id: String + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor object: :"eval.run" + + attr_accessor per_model_usage: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage] + + attr_accessor per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult] + + attr_accessor report_url: String + + attr_accessor result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts + + attr_accessor status: String + + def initialize: ( + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCancelResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + per_model_usage: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String, + ?object: :"eval.run" + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_cancel_response + + type data_source = + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + + module DataSource + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + end + + type per_model_usage = + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + attr_accessor cached_tokens: Integer + + attr_accessor completion_tokens: Integer + + attr_accessor invocation_count: Integer + + attr_accessor model_name: String + + attr_accessor prompt_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::per_model_usage + end + + type per_testing_criteria_result = + { failed: Integer, passed: Integer, testing_criteria: String } + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor testing_criteria: String + + def initialize: ( + failed: Integer, + passed: Integer, + testing_criteria: String + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::per_testing_criteria_result + end + + type result_counts = + { errored: Integer, failed: Integer, passed: Integer, total: Integer } + + class ResultCounts < OpenAI::Internal::Type::BaseModel + attr_accessor errored: Integer + + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor total: Integer + + def initialize: ( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::result_counts + end + end + end + end +end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs new file mode 100644 index 00000000..326d6f6d --- /dev/null +++ b/sig/openai/models/evals/run_create_params.rbs @@ -0,0 +1,45 @@ +module OpenAI + module Models + module Evals + type run_create_params = + { + data_source: OpenAI::Models::Evals::RunCreateParams::data_source, + metadata: OpenAI::Models::metadata?, + name: String + } + & OpenAI::Internal::Type::request_parameters + + class RunCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor data_source: OpenAI::Models::Evals::RunCreateParams::data_source + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_reader name: String? + + def name=: (String) -> String + + def initialize: ( + data_source: OpenAI::Models::Evals::RunCreateParams::data_source, + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_create_params + + type data_source = + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + + module DataSource + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + end + end + end + end +end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs new file mode 100644 index 00000000..3840e50a --- /dev/null +++ b/sig/openai/models/evals/run_create_response.rbs @@ -0,0 +1,158 @@ +module OpenAI + module Models + module Evals + type run_create_response = + { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCreateResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String + } + + class RunCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source: OpenAI::Models::Evals::RunCreateResponse::data_source + + attr_accessor error: OpenAI::Models::Evals::EvalAPIError + + attr_accessor eval_id: String + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor object: :"eval.run" + + attr_accessor per_model_usage: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage] + + attr_accessor per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult] + + attr_accessor report_url: String + + attr_accessor result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts + + attr_accessor status: String + + def initialize: ( + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCreateResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + per_model_usage: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String, + ?object: :"eval.run" + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_create_response + + type data_source = + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + + module DataSource + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + end + + type per_model_usage = + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + attr_accessor cached_tokens: Integer + + attr_accessor completion_tokens: Integer + + attr_accessor invocation_count: Integer + + attr_accessor model_name: String + + attr_accessor prompt_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::per_model_usage + end + + type per_testing_criteria_result = + { failed: Integer, passed: Integer, testing_criteria: String } + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor testing_criteria: String + + def initialize: ( + failed: Integer, + passed: Integer, + testing_criteria: String + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::per_testing_criteria_result + end + + type result_counts = + { errored: Integer, failed: Integer, passed: Integer, total: Integer } + + class ResultCounts < OpenAI::Internal::Type::BaseModel + attr_accessor errored: Integer + + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor total: Integer + + def initialize: ( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::result_counts + end + end + end + end +end diff --git a/sig/openai/models/evals/run_delete_params.rbs b/sig/openai/models/evals/run_delete_params.rbs new file mode 100644 index 00000000..db7ba2df --- /dev/null +++ b/sig/openai/models/evals/run_delete_params.rbs @@ -0,0 +1,22 @@ +module OpenAI + module Models + module Evals + type run_delete_params = + { eval_id: String } & OpenAI::Internal::Type::request_parameters + + class RunDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor eval_id: String + + def initialize: ( + eval_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_delete_params + end + end + end +end diff --git a/sig/openai/models/evals/run_delete_response.rbs b/sig/openai/models/evals/run_delete_response.rbs new file mode 100644 index 00000000..c3fe313f --- /dev/null +++ b/sig/openai/models/evals/run_delete_response.rbs @@ -0,0 +1,30 @@ +module OpenAI + module Models + module Evals + type run_delete_response = + { deleted: bool, object: String, run_id: String } + + class RunDeleteResponse < OpenAI::Internal::Type::BaseModel + attr_reader deleted: bool? + + def deleted=: (bool) -> bool + + attr_reader object: String? + + def object=: (String) -> String + + attr_reader run_id: String? + + def run_id=: (String) -> String + + def initialize: ( + ?deleted: bool, + ?object: String, + ?run_id: String + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_delete_response + end + end + end +end diff --git a/sig/openai/models/evals/run_list_params.rbs b/sig/openai/models/evals/run_list_params.rbs new file mode 100644 index 00000000..7a65ceef --- /dev/null +++ b/sig/openai/models/evals/run_list_params.rbs @@ -0,0 +1,74 @@ +module OpenAI + module Models + module Evals + type run_list_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::Evals::RunListParams::order, + status: OpenAI::Models::Evals::RunListParams::status + } + & OpenAI::Internal::Type::request_parameters + + class RunListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::Evals::RunListParams::order? + + def order=: ( + OpenAI::Models::Evals::RunListParams::order + ) -> OpenAI::Models::Evals::RunListParams::order + + attr_reader status: OpenAI::Models::Evals::RunListParams::status? + + def status=: ( + OpenAI::Models::Evals::RunListParams::status + ) -> OpenAI::Models::Evals::RunListParams::status + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Evals::RunListParams::order, + ?status: OpenAI::Models::Evals::RunListParams::status, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_list_params + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListParams::order] + end + + type status = :queued | :in_progress | :completed | :canceled | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + QUEUED: :queued + IN_PROGRESS: :in_progress + COMPLETED: :completed + CANCELED: :canceled + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListParams::status] + end + end + end + end +end diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs new file mode 100644 index 00000000..8a12f80e --- /dev/null +++ b/sig/openai/models/evals/run_list_response.rbs @@ -0,0 +1,158 @@ +module OpenAI + module Models + module Evals + type run_list_response = + { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunListResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String + } + + class RunListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source: OpenAI::Models::Evals::RunListResponse::data_source + + attr_accessor error: OpenAI::Models::Evals::EvalAPIError + + attr_accessor eval_id: String + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor object: :"eval.run" + + attr_accessor per_model_usage: ::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage] + + attr_accessor per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult] + + attr_accessor report_url: String + + attr_accessor result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts + + attr_accessor status: String + + def initialize: ( + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunListResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + per_model_usage: ::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String, + ?object: :"eval.run" + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_list_response + + type data_source = + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + + module DataSource + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + end + + type per_model_usage = + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + attr_accessor cached_tokens: Integer + + attr_accessor completion_tokens: Integer + + attr_accessor invocation_count: Integer + + attr_accessor model_name: String + + attr_accessor prompt_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::per_model_usage + end + + type per_testing_criteria_result = + { failed: Integer, passed: Integer, testing_criteria: String } + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor testing_criteria: String + + def initialize: ( + failed: Integer, + passed: Integer, + testing_criteria: String + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::per_testing_criteria_result + end + + type result_counts = + { errored: Integer, failed: Integer, passed: Integer, total: Integer } + + class ResultCounts < OpenAI::Internal::Type::BaseModel + attr_accessor errored: Integer + + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor total: Integer + + def initialize: ( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::result_counts + end + end + end + end +end diff --git a/sig/openai/models/evals/run_retrieve_params.rbs b/sig/openai/models/evals/run_retrieve_params.rbs new file mode 100644 index 00000000..50130d18 --- /dev/null +++ b/sig/openai/models/evals/run_retrieve_params.rbs @@ -0,0 +1,22 @@ +module OpenAI + module Models + module Evals + type run_retrieve_params = + { eval_id: String } & OpenAI::Internal::Type::request_parameters + + class RunRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor eval_id: String + + def initialize: ( + eval_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_retrieve_params + end + end + end +end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs new file mode 100644 index 00000000..5a9a33ba --- /dev/null +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -0,0 +1,158 @@ +module OpenAI + module Models + module Evals + type run_retrieve_response = + { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String + } + + class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source + + attr_accessor error: OpenAI::Models::Evals::EvalAPIError + + attr_accessor eval_id: String + + attr_accessor metadata: OpenAI::Models::metadata? + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor object: :"eval.run" + + attr_accessor per_model_usage: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage] + + attr_accessor per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult] + + attr_accessor report_url: String + + attr_accessor result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts + + attr_accessor status: String + + def initialize: ( + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + per_model_usage: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String, + ?object: :"eval.run" + ) -> void + + def to_hash: -> OpenAI::Models::Evals::run_retrieve_response + + type data_source = + OpenAI::Models::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + + module DataSource + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + end + + type per_model_usage = + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + + class PerModelUsage < OpenAI::Internal::Type::BaseModel + attr_accessor cached_tokens: Integer + + attr_accessor completion_tokens: Integer + + attr_accessor invocation_count: Integer + + attr_accessor model_name: String + + attr_accessor prompt_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::per_model_usage + end + + type per_testing_criteria_result = + { failed: Integer, passed: Integer, testing_criteria: String } + + class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor testing_criteria: String + + def initialize: ( + failed: Integer, + passed: Integer, + testing_criteria: String + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::per_testing_criteria_result + end + + type result_counts = + { errored: Integer, failed: Integer, passed: Integer, total: Integer } + + class ResultCounts < OpenAI::Internal::Type::BaseModel + attr_accessor errored: Integer + + attr_accessor failed: Integer + + attr_accessor passed: Integer + + attr_accessor total: Integer + + def initialize: ( + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::result_counts + end + end + end + end +end diff --git a/sig/openai/models/evals/runs/output_item_list_params.rbs b/sig/openai/models/evals/runs/output_item_list_params.rbs new file mode 100644 index 00000000..53ec2a12 --- /dev/null +++ b/sig/openai/models/evals/runs/output_item_list_params.rbs @@ -0,0 +1,77 @@ +module OpenAI + module Models + module Evals + module Runs + type output_item_list_params = + { + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::Runs::OutputItemListParams::order, + status: OpenAI::Models::Evals::Runs::OutputItemListParams::status + } + & OpenAI::Internal::Type::request_parameters + + class OutputItemListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor eval_id: String + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::Evals::Runs::OutputItemListParams::order? + + def order=: ( + OpenAI::Models::Evals::Runs::OutputItemListParams::order + ) -> OpenAI::Models::Evals::Runs::OutputItemListParams::order + + attr_reader status: OpenAI::Models::Evals::Runs::OutputItemListParams::status? + + def status=: ( + OpenAI::Models::Evals::Runs::OutputItemListParams::status + ) -> OpenAI::Models::Evals::Runs::OutputItemListParams::status + + def initialize: ( + eval_id: String, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Evals::Runs::OutputItemListParams::order, + ?status: OpenAI::Models::Evals::Runs::OutputItemListParams::status, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::output_item_list_params + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::Evals::Runs::OutputItemListParams::order] + end + + type status = :fail | :pass + + module Status + extend OpenAI::Internal::Type::Enum + + FAIL: :fail + PASS: :pass + + def self?.values: -> ::Array[OpenAI::Models::Evals::Runs::OutputItemListParams::status] + end + end + end + end + end +end diff --git a/sig/openai/models/evals/runs/output_item_list_response.rbs b/sig/openai/models/evals/runs/output_item_list_response.rbs new file mode 100644 index 00000000..f33ed688 --- /dev/null +++ b/sig/openai/models/evals/runs/output_item_list_response.rbs @@ -0,0 +1,164 @@ +module OpenAI + module Models + module Evals + module Runs + type output_item_list_response = + { + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + object: :"eval.run.output_item", + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String + } + + class OutputItemListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor datasource_item: ::Hash[Symbol, top] + + attr_accessor datasource_item_id: Integer + + attr_accessor eval_id: String + + attr_accessor object: :"eval.run.output_item" + + attr_accessor results: ::Array[::Hash[Symbol, top]] + + attr_accessor run_id: String + + attr_accessor sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample + + attr_accessor status: String + + def initialize: ( + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String, + ?object: :"eval.run.output_item" + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::output_item_list_response + + type sample = + { + error: OpenAI::Models::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + } + + class Sample < OpenAI::Internal::Type::BaseModel + attr_accessor error: OpenAI::Models::Evals::EvalAPIError + + attr_accessor finish_reason: String + + attr_accessor input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input] + + attr_accessor max_completion_tokens: Integer + + attr_accessor model: String + + attr_accessor output: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output] + + attr_accessor seed: Integer + + attr_accessor temperature: Float + + attr_accessor top_p: Float + + attr_accessor usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + + def initialize: ( + error: OpenAI::Models::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::sample + + type input = { content: String, role: String } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::input + end + + type output = { content: String, role: String } + + class Output < OpenAI::Internal::Type::BaseModel + attr_reader content: String? + + def content=: (String) -> String + + attr_reader role: String? + + def role=: (String) -> String + + def initialize: (?content: String, ?role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::output + end + + type usage = + { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor cached_tokens: Integer + + attr_accessor completion_tokens: Integer + + attr_accessor prompt_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::usage + end + end + end + end + end + end +end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs new file mode 100644 index 00000000..1631b85e --- /dev/null +++ b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs @@ -0,0 +1,28 @@ +module OpenAI + module Models + module Evals + module Runs + type output_item_retrieve_params = + { eval_id: String, run_id: String } + & OpenAI::Internal::Type::request_parameters + + class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor eval_id: String + + attr_accessor run_id: String + + def initialize: ( + eval_id: String, + run_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::output_item_retrieve_params + end + end + end + end +end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs new file mode 100644 index 00000000..fc662a02 --- /dev/null +++ b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs @@ -0,0 +1,164 @@ +module OpenAI + module Models + module Evals + module Runs + type output_item_retrieve_response = + { + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + object: :"eval.run.output_item", + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String + } + + class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor datasource_item: ::Hash[Symbol, top] + + attr_accessor datasource_item_id: Integer + + attr_accessor eval_id: String + + attr_accessor object: :"eval.run.output_item" + + attr_accessor results: ::Array[::Hash[Symbol, top]] + + attr_accessor run_id: String + + attr_accessor sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample + + attr_accessor status: String + + def initialize: ( + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String, + ?object: :"eval.run.output_item" + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::output_item_retrieve_response + + type sample = + { + error: OpenAI::Models::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + } + + class Sample < OpenAI::Internal::Type::BaseModel + attr_accessor error: OpenAI::Models::Evals::EvalAPIError + + attr_accessor finish_reason: String + + attr_accessor input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input] + + attr_accessor max_completion_tokens: Integer + + attr_accessor model: String + + attr_accessor output: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output] + + attr_accessor seed: Integer + + attr_accessor temperature: Float + + attr_accessor top_p: Float + + attr_accessor usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + + def initialize: ( + error: OpenAI::Models::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::sample + + type input = { content: String, role: String } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::input + end + + type output = { content: String, role: String } + + class Output < OpenAI::Internal::Type::BaseModel + attr_reader content: String? + + def content=: (String) -> String + + attr_reader role: String? + + def role=: (String) -> String + + def initialize: (?content: String, ?role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::output + end + + type usage = + { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor cached_tokens: Integer + + attr_accessor completion_tokens: Integer + + attr_accessor prompt_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::usage + end + end + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs new file mode 100644 index 00000000..2a715c70 --- /dev/null +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs @@ -0,0 +1,25 @@ +module OpenAI + module Models + module FineTuning + module Checkpoints + type permission_create_params = + { project_ids: ::Array[String] } + & OpenAI::Internal::Type::request_parameters + + class PermissionCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor project_ids: ::Array[String] + + def initialize: ( + project_ids: ::Array[String], + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_create_params + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs new file mode 100644 index 00000000..65e33f69 --- /dev/null +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs @@ -0,0 +1,34 @@ +module OpenAI + module Models + module FineTuning + module Checkpoints + type permission_create_response = + { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } + + class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"checkpoint.permission" + + attr_accessor project_id: String + + def initialize: ( + id: String, + created_at: Integer, + project_id: String, + ?object: :"checkpoint.permission" + ) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_create_response + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs new file mode 100644 index 00000000..142fe252 --- /dev/null +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs @@ -0,0 +1,19 @@ +module OpenAI + module Models + module FineTuning + module Checkpoints + type permission_delete_params = + { } & OpenAI::Internal::Type::request_parameters + + class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_delete_params + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs new file mode 100644 index 00000000..f3920ae7 --- /dev/null +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + module FineTuning + module Checkpoints + type permission_delete_response = + { id: String, deleted: bool, object: :"checkpoint.permission" } + + class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor deleted: bool + + attr_accessor object: :"checkpoint.permission" + + def initialize: ( + id: String, + deleted: bool, + ?object: :"checkpoint.permission" + ) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_delete_response + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs new file mode 100644 index 00000000..fe2c9c7d --- /dev/null +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs @@ -0,0 +1,60 @@ +module OpenAI + module Models + module FineTuning + module Checkpoints + type permission_retrieve_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, + project_id: String + } + & OpenAI::Internal::Type::request_parameters + + class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order? + + def order=: ( + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order + ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order + + attr_reader project_id: String? + + def project_id=: (String) -> String + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, + ?project_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_retrieve_params + + type order = :ascending | :descending + + module Order + extend OpenAI::Internal::Type::Enum + + ASCENDING: :ascending + DESCENDING: :descending + + def self?.values: -> ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order] + end + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs new file mode 100644 index 00000000..061528d5 --- /dev/null +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs @@ -0,0 +1,65 @@ +module OpenAI + module Models + module FineTuning + module Checkpoints + type permission_retrieve_response = + { + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + object: :list, + first_id: String?, + last_id: String? + } + + class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data] + + attr_accessor has_more: bool + + attr_accessor object: :list + + attr_accessor first_id: String? + + attr_accessor last_id: String? + + def initialize: ( + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + ?first_id: String?, + ?last_id: String?, + ?object: :list + ) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_retrieve_response + + type data = + { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"checkpoint.permission" + + attr_accessor project_id: String + + def initialize: ( + id: String, + created_at: Integer, + project_id: String, + ?object: :"checkpoint.permission" + ) -> void + + def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::data + end + end + end + end + end +end diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 2cb153e7..bc8de438 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -3,23 +3,29 @@ module OpenAI type responses_model = String | OpenAI::Models::chat_model - | :"o1-pro" - | :"o1-pro-2025-03-19" - | :"computer-use-preview" - | :"computer-use-preview-2025-03-11" + | OpenAI::Models::ResponsesModel::responses_only_model module ResponsesModel extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::chat_model, (:"o1-pro" - | :"o1-pro-2025-03-19" - | :"computer-use-preview" - | :"computer-use-preview-2025-03-11")] + type responses_only_model = + :"o1-pro" + | :"o1-pro-2025-03-19" + | :"computer-use-preview" + | :"computer-use-preview-2025-03-11" - O1_PRO: :"o1-pro" - O1_PRO_2025_03_19: :"o1-pro-2025-03-19" - COMPUTER_USE_PREVIEW: :"computer-use-preview" - COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" + module ResponsesOnlyModel + extend OpenAI::Internal::Type::Enum + + O1_PRO: :"o1-pro" + O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + COMPUTER_USE_PREVIEW: :"computer-use-preview" + COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" + + def self?.values: -> ::Array[OpenAI::Models::ResponsesModel::responses_only_model] + end + + def self?.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::ResponsesModel::responses_only_model] end end end diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 50be0608..fe3d90cd 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -49,7 +49,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def stream_raw: ( + def create_and_run_streaming: ( assistant_id: String, ?instructions: String?, ?max_completion_tokens: Integer?, diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index df8265f2..bdc66eb1 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -27,7 +27,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def create_stream_raw: ( + def create_streaming: ( String thread_id, assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], @@ -84,7 +84,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def submit_tool_outputs_stream_raw: ( + def submit_tool_outputs_streaming: ( String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 0e8a88ef..e1f27903 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -38,7 +38,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletion - def stream_raw: ( + def create_streaming: ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, diff --git a/sig/openai/resources/evals.rbs b/sig/openai/resources/evals.rbs new file mode 100644 index 00000000..84d16747 --- /dev/null +++ b/sig/openai/resources/evals.rbs @@ -0,0 +1,43 @@ +module OpenAI + module Resources + class Evals + attr_reader runs: OpenAI::Resources::Evals::Runs + + def create: ( + data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?share_with_openai: bool, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::EvalCreateResponse + + def retrieve: ( + String eval_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::EvalRetrieveResponse + + def update: ( + String eval_id, + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::EvalUpdateResponse + + def list: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::EvalListParams::order, + ?order_by: OpenAI::Models::EvalListParams::order_by, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::EvalListResponse] + + def delete: ( + String eval_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::EvalDeleteResponse + + def initialize: (client: OpenAI::Client) -> void + end + end +end diff --git a/sig/openai/resources/evals/runs.rbs b/sig/openai/resources/evals/runs.rbs new file mode 100644 index 00000000..6f8f8f0d --- /dev/null +++ b/sig/openai/resources/evals/runs.rbs @@ -0,0 +1,46 @@ +module OpenAI + module Resources + class Evals + class Runs + attr_reader output_items: OpenAI::Resources::Evals::Runs::OutputItems + + def create: ( + String eval_id, + data_source: OpenAI::Models::Evals::RunCreateParams::data_source, + ?metadata: OpenAI::Models::metadata?, + ?name: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Evals::RunCreateResponse + + def retrieve: ( + String run_id, + eval_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Evals::RunRetrieveResponse + + def list: ( + String eval_id, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Evals::RunListParams::order, + ?status: OpenAI::Models::Evals::RunListParams::status, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Evals::RunListResponse] + + def delete: ( + String run_id, + eval_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Evals::RunDeleteResponse + + def cancel: ( + String run_id, + eval_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Evals::RunCancelResponse + + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/evals/runs/output_items.rbs b/sig/openai/resources/evals/runs/output_items.rbs new file mode 100644 index 00000000..a6124836 --- /dev/null +++ b/sig/openai/resources/evals/runs/output_items.rbs @@ -0,0 +1,28 @@ +module OpenAI + module Resources + class Evals + class Runs + class OutputItems + def retrieve: ( + String output_item_id, + eval_id: String, + run_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse + + def list: ( + String run_id, + eval_id: String, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Evals::Runs::OutputItemListParams::order, + ?status: OpenAI::Models::Evals::Runs::OutputItemListParams::status, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Evals::Runs::OutputItemListResponse] + + def initialize: (client: OpenAI::Client) -> void + end + end + end + end +end diff --git a/sig/openai/resources/fine_tuning.rbs b/sig/openai/resources/fine_tuning.rbs index a78fe81e..db3f1574 100644 --- a/sig/openai/resources/fine_tuning.rbs +++ b/sig/openai/resources/fine_tuning.rbs @@ -3,6 +3,8 @@ module OpenAI class FineTuning attr_reader jobs: OpenAI::Resources::FineTuning::Jobs + attr_reader checkpoints: OpenAI::Resources::FineTuning::Checkpoints + def initialize: (client: OpenAI::Client) -> void end end diff --git a/sig/openai/resources/fine_tuning/checkpoints.rbs b/sig/openai/resources/fine_tuning/checkpoints.rbs new file mode 100644 index 00000000..59e5e893 --- /dev/null +++ b/sig/openai/resources/fine_tuning/checkpoints.rbs @@ -0,0 +1,11 @@ +module OpenAI + module Resources + class FineTuning + class Checkpoints + attr_reader permissions: OpenAI::Resources::FineTuning::Checkpoints::Permissions + + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs new file mode 100644 index 00000000..0033c40d --- /dev/null +++ b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs @@ -0,0 +1,31 @@ +module OpenAI + module Resources + class FineTuning + class Checkpoints + class Permissions + def create: ( + String fine_tuned_model_checkpoint, + project_ids: ::Array[String], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::Page[OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse] + + def retrieve: ( + String fine_tuned_model_checkpoint, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, + ?project_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + + def delete: ( + String fine_tuned_model_checkpoint, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse + + def initialize: (client: OpenAI::Client) -> void + end + end + end + end +end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 2dfcf44d..028c6cc6 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -24,7 +24,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Responses::Response - def stream_raw: ( + def create_streaming: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, diff --git a/test/openai/resource_namespaces.rb b/test/openai/resource_namespaces.rb index 2da61a74..c76a68aa 100644 --- a/test/openai/resource_namespaces.rb +++ b/test/openai/resource_namespaces.rb @@ -18,10 +18,21 @@ module Completions end end + module Checkpoints + end + module Completions end + module Evals + module Runs + end + end + module FineTuning + module Checkpoints + end + module Jobs end end diff --git a/test/openai/resources/evals/runs/output_items_test.rb b/test/openai/resources/evals/runs/output_items_test.rb new file mode 100644 index 00000000..344b8e1c --- /dev/null +++ b/test/openai/resources/evals/runs/output_items_test.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::Evals::Runs::OutputItemsTest < OpenAI::Test::ResourceTest + def test_retrieve_required_params + response = + @openai.evals.runs.output_items.retrieve("output_item_id", eval_id: "eval_id", run_id: "run_id") + + assert_pattern do + response => OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + datasource_item: ^(OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]), + datasource_item_id: Integer, + eval_id: String, + object: Symbol, + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]]), + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String + } + end + end + + def test_list_required_params + response = @openai.evals.runs.output_items.list("run_id", eval_id: "eval_id") + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::Evals::Runs::OutputItemListResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, + datasource_item: ^(OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]), + datasource_item_id: Integer, + eval_id: String, + object: Symbol, + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]]), + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String + } + end + end +end diff --git a/test/openai/resources/evals/runs_test.rb b/test/openai/resources/evals/runs_test.rb new file mode 100644 index 00000000..d55ad6a8 --- /dev/null +++ b/test/openai/resources/evals/runs_test.rb @@ -0,0 +1,140 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::Evals::RunsTest < OpenAI::Test::ResourceTest + def test_create_required_params + response = + @openai.evals.runs.create( + "eval_id", + data_source: {source: {content: [{item: {foo: "bar"}}], type: :file_content}, type: :jsonl} + ) + + assert_pattern do + response => OpenAI::Models::Evals::RunCreateResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCreateResponse::DataSource, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + model: String, + name: String, + object: Symbol, + per_model_usage: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage]), + per_testing_criteria_results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult]), + report_url: String, + result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String + } + end + end + + def test_retrieve_required_params + response = @openai.evals.runs.retrieve("run_id", eval_id: "eval_id") + + assert_pattern do + response => OpenAI::Models::Evals::RunRetrieveResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + model: String, + name: String, + object: Symbol, + per_model_usage: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage]), + per_testing_criteria_results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult]), + report_url: String, + result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String + } + end + end + + def test_list + response = @openai.evals.runs.list("eval_id") + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::Evals::RunListResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunListResponse::DataSource, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + model: String, + name: String, + object: Symbol, + per_model_usage: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::PerModelUsage]), + per_testing_criteria_results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult]), + report_url: String, + result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String + } + end + end + + def test_delete_required_params + response = @openai.evals.runs.delete("run_id", eval_id: "eval_id") + + assert_pattern do + response => OpenAI::Models::Evals::RunDeleteResponse + end + + assert_pattern do + response => { + deleted: OpenAI::Internal::Type::Boolean | nil, + object: String | nil, + run_id: String | nil + } + end + end + + def test_cancel_required_params + response = @openai.evals.runs.cancel("run_id", eval_id: "eval_id") + + assert_pattern do + response => OpenAI::Models::Evals::RunCancelResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCancelResponse::DataSource, + error: OpenAI::Models::Evals::EvalAPIError, + eval_id: String, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + model: String, + name: String, + object: Symbol, + per_model_usage: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage]), + per_testing_criteria_results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult]), + report_url: String, + result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String + } + end + end +end diff --git a/test/openai/resources/evals_test.rb b/test/openai/resources/evals_test.rb new file mode 100644 index 00000000..546438d4 --- /dev/null +++ b/test/openai/resources/evals_test.rb @@ -0,0 +1,269 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::Resources::EvalsTest < OpenAI::Test::ResourceTest + def test_create_required_params + response = + @openai.evals.create( + data_source_config: { + item_schema: { + "0": "bar", + "1": "bar", + "2": "bar", + "3": "bar", + "4": "bar", + "5": "bar", + "6": "bar", + "7": "bar", + "8": "bar", + "9": "bar", + "10": "bar", + "11": "bar", + "12": "bar", + "13": "bar", + "14": "bar", + "15": "bar", + "16": "bar", + "17": "bar", + "18": "bar", + "19": "bar", + "20": "bar", + "21": "bar", + "22": "bar", + "23": "bar", + "24": "bar", + "25": "bar", + "26": "bar", + "27": "bar", + "28": "bar", + "29": "bar", + "30": "bar", + "31": "bar", + "32": "bar", + "33": "bar", + "34": "bar", + "35": "bar", + "36": "bar", + "37": "bar", + "38": "bar", + "39": "bar", + "40": "bar", + "41": "bar", + "42": "bar", + "43": "bar", + "44": "bar", + "45": "bar", + "46": "bar", + "47": "bar", + "48": "bar", + "49": "bar", + "50": "bar", + "51": "bar", + "52": "bar", + "53": "bar", + "54": "bar", + "55": "bar", + "56": "bar", + "57": "bar", + "58": "bar", + "59": "bar", + "60": "bar", + "61": "bar", + "62": "bar", + "63": "bar", + "64": "bar", + "65": "bar", + "66": "bar", + "67": "bar", + "68": "bar", + "69": "bar", + "70": "bar", + "71": "bar", + "72": "bar", + "73": "bar", + "74": "bar", + "75": "bar", + "76": "bar", + "77": "bar", + "78": "bar", + "79": "bar", + "80": "bar", + "81": "bar", + "82": "bar", + "83": "bar", + "84": "bar", + "85": "bar", + "86": "bar", + "87": "bar", + "88": "bar", + "89": "bar", + "90": "bar", + "91": "bar", + "92": "bar", + "93": "bar", + "94": "bar", + "95": "bar", + "96": "bar", + "97": "bar", + "98": "bar", + "99": "bar", + "100": "bar", + "101": "bar", + "102": "bar", + "103": "bar", + "104": "bar", + "105": "bar", + "106": "bar", + "107": "bar", + "108": "bar", + "109": "bar", + "110": "bar", + "111": "bar", + "112": "bar", + "113": "bar", + "114": "bar", + "115": "bar", + "116": "bar", + "117": "bar", + "118": "bar", + "119": "bar", + "120": "bar", + "121": "bar", + "122": "bar", + "123": "bar", + "124": "bar", + "125": "bar", + "126": "bar", + "127": "bar", + "128": "bar", + "129": "bar", + "130": "bar", + "131": "bar", + "132": "bar", + "133": "bar", + "134": "bar", + "135": "bar", + "136": "bar", + "137": "bar", + "138": "bar", + "139": "bar" + }, + type: :custom + }, + testing_criteria: [ + { + input: [{content: "content", role: "role"}], + labels: ["string"], + model: "model", + name: "name", + passing_labels: ["string"], + type: :label_model + } + ] + ) + + assert_pattern do + response => OpenAI::Models::EvalCreateResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalCreateResponse::DataSourceConfig, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + name: String, + object: Symbol, + share_with_openai: OpenAI::Internal::Type::Boolean, + testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion]) + } + end + end + + def test_retrieve + response = @openai.evals.retrieve("eval_id") + + assert_pattern do + response => OpenAI::Models::EvalRetrieveResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalRetrieveResponse::DataSourceConfig, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + name: String, + object: Symbol, + share_with_openai: OpenAI::Internal::Type::Boolean, + testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion]) + } + end + end + + def test_update + response = @openai.evals.update("eval_id") + + assert_pattern do + response => OpenAI::Models::EvalUpdateResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalUpdateResponse::DataSourceConfig, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + name: String, + object: Symbol, + share_with_openai: OpenAI::Internal::Type::Boolean, + testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion]) + } + end + end + + def test_list + response = @openai.evals.list + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::EvalListResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalListResponse::DataSourceConfig, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + name: String, + object: Symbol, + share_with_openai: OpenAI::Internal::Type::Boolean, + testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion]) + } + end + end + + def test_delete + response = @openai.evals.delete("eval_id") + + assert_pattern do + response => OpenAI::Models::EvalDeleteResponse + end + + assert_pattern do + response => { + deleted: OpenAI::Internal::Type::Boolean, + eval_id: String, + object: String + } + end + end +end diff --git a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb new file mode 100644 index 00000000..8ee93e49 --- /dev/null +++ b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::FineTuning::Checkpoints::PermissionsTest < OpenAI::Test::ResourceTest + def test_create_required_params + response = + @openai.fine_tuning.checkpoints.permissions.create( + "ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd", + project_ids: ["string"] + ) + + assert_pattern do + response => OpenAI::Internal::Page + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, + object: Symbol, + project_id: String + } + end + end + + def test_retrieve + response = @openai.fine_tuning.checkpoints.permissions.retrieve("ft-AF1WoRqd3aJAHsqc9NY7iL8F") + + assert_pattern do + response => OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + end + + assert_pattern do + response => { + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data]), + has_more: OpenAI::Internal::Type::Boolean, + object: Symbol, + first_id: String | nil, + last_id: String | nil + } + end + end + + def test_delete + response = + @openai.fine_tuning.checkpoints.permissions.delete("ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd") + + assert_pattern do + response => OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse + end + + assert_pattern do + response => { + id: String, + deleted: OpenAI::Internal::Type::Boolean, + object: Symbol + } + end + end +end diff --git a/test/openai/resources/fine_tuning/checkpoints_test.rb b/test/openai/resources/fine_tuning/checkpoints_test.rb new file mode 100644 index 00000000..acbf41a8 --- /dev/null +++ b/test/openai/resources/fine_tuning/checkpoints_test.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::FineTuning::CheckpointsTest < OpenAI::Test::ResourceTest +end From 519c2006a5e4532aabb56b8b50951043e507642f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 19:45:26 +0000 Subject: [PATCH 112/295] chore(internal): fix examples (#114) --- .stats.yml | 4 +- test/openai/resources/evals_test.rb | 146 +--------------------------- 2 files changed, 3 insertions(+), 147 deletions(-) diff --git a/.stats.yml b/.stats.yml index 43112911..50574f0d 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-472fe3036ea745365257fe870c0330917fb3153705c2826f49873cd631319b0a.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-32de3bc513663c5fac922c49be41c222b6ee8c0b841d8966bcdfa489d441daa3.yml openapi_spec_hash: ea86343b5e9858a74e85da8ab2c532f6 -config_hash: ef19d36c307306f14f2e1cd5c834a151 +config_hash: d6c61213488683418adb860a9ee1501b diff --git a/test/openai/resources/evals_test.rb b/test/openai/resources/evals_test.rb index 546438d4..8d1bdca2 100644 --- a/test/openai/resources/evals_test.rb +++ b/test/openai/resources/evals_test.rb @@ -6,151 +6,7 @@ class OpenAI::Test::Resources::EvalsTest < OpenAI::Test::ResourceTest def test_create_required_params response = @openai.evals.create( - data_source_config: { - item_schema: { - "0": "bar", - "1": "bar", - "2": "bar", - "3": "bar", - "4": "bar", - "5": "bar", - "6": "bar", - "7": "bar", - "8": "bar", - "9": "bar", - "10": "bar", - "11": "bar", - "12": "bar", - "13": "bar", - "14": "bar", - "15": "bar", - "16": "bar", - "17": "bar", - "18": "bar", - "19": "bar", - "20": "bar", - "21": "bar", - "22": "bar", - "23": "bar", - "24": "bar", - "25": "bar", - "26": "bar", - "27": "bar", - "28": "bar", - "29": "bar", - "30": "bar", - "31": "bar", - "32": "bar", - "33": "bar", - "34": "bar", - "35": "bar", - "36": "bar", - "37": "bar", - "38": "bar", - "39": "bar", - "40": "bar", - "41": "bar", - "42": "bar", - "43": "bar", - "44": "bar", - "45": "bar", - "46": "bar", - "47": "bar", - "48": "bar", - "49": "bar", - "50": "bar", - "51": "bar", - "52": "bar", - "53": "bar", - "54": "bar", - "55": "bar", - "56": "bar", - "57": "bar", - "58": "bar", - "59": "bar", - "60": "bar", - "61": "bar", - "62": "bar", - "63": "bar", - "64": "bar", - "65": "bar", - "66": "bar", - "67": "bar", - "68": "bar", - "69": "bar", - "70": "bar", - "71": "bar", - "72": "bar", - "73": "bar", - "74": "bar", - "75": "bar", - "76": "bar", - "77": "bar", - "78": "bar", - "79": "bar", - "80": "bar", - "81": "bar", - "82": "bar", - "83": "bar", - "84": "bar", - "85": "bar", - "86": "bar", - "87": "bar", - "88": "bar", - "89": "bar", - "90": "bar", - "91": "bar", - "92": "bar", - "93": "bar", - "94": "bar", - "95": "bar", - "96": "bar", - "97": "bar", - "98": "bar", - "99": "bar", - "100": "bar", - "101": "bar", - "102": "bar", - "103": "bar", - "104": "bar", - "105": "bar", - "106": "bar", - "107": "bar", - "108": "bar", - "109": "bar", - "110": "bar", - "111": "bar", - "112": "bar", - "113": "bar", - "114": "bar", - "115": "bar", - "116": "bar", - "117": "bar", - "118": "bar", - "119": "bar", - "120": "bar", - "121": "bar", - "122": "bar", - "123": "bar", - "124": "bar", - "125": "bar", - "126": "bar", - "127": "bar", - "128": "bar", - "129": "bar", - "130": "bar", - "131": "bar", - "132": "bar", - "133": "bar", - "134": "bar", - "135": "bar", - "136": "bar", - "137": "bar", - "138": "bar", - "139": "bar" - }, - type: :custom - }, + data_source_config: {item_schema: {foo: "bar"}, type: :custom}, testing_criteria: [ { input: [{content: "content", role: "role"}], From 6302054c5ce1d97e2743c484b629acdb54746863 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 19:47:16 +0000 Subject: [PATCH 113/295] chore(internal): skip broken test (#115) --- .stats.yml | 2 +- .../resources/fine_tuning/checkpoints/permissions_test.rb | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 50574f0d..71a56c4c 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-32de3bc513663c5fac922c49be41c222b6ee8c0b841d8966bcdfa489d441daa3.yml openapi_spec_hash: ea86343b5e9858a74e85da8ab2c532f6 -config_hash: d6c61213488683418adb860a9ee1501b +config_hash: 43dc8df20ffec9d1503f91866cb2b7d9 diff --git a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb index 8ee93e49..eb752c5f 100644 --- a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb +++ b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb @@ -50,6 +50,8 @@ def test_retrieve end def test_delete + skip("OpenAPI spec is slightly incorrect") + response = @openai.fine_tuning.checkpoints.permissions.delete("ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd") From b2111e79c50e8db1f1712557f3e8ceb944119d35 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 20:06:06 +0000 Subject: [PATCH 114/295] feat(api): manual updates (#116) --- .stats.yml | 4 ++-- README.md | 2 +- .../models/beta/thread_create_and_run_params.rb | 2 +- lib/openai/models/beta/threads/run.rb | 2 +- .../models/beta/threads/run_create_params.rb | 2 +- .../threads/run_submit_tool_outputs_params.rb | 2 +- lib/openai/models/chat/chat_completion.rb | 2 +- .../models/chat/completion_create_params.rb | 2 +- lib/openai/models/responses/response.rb | 2 +- .../models/responses/response_create_params.rb | 2 +- lib/openai/resources/beta/threads.rb | 9 ++++----- lib/openai/resources/beta/threads/runs.rb | 16 ++++++++-------- lib/openai/resources/chat/completions.rb | 9 ++++----- lib/openai/resources/responses.rb | 8 ++++---- rbi/lib/openai/resources/beta/threads.rbi | 13 ++++++------- rbi/lib/openai/resources/beta/threads/runs.rbi | 16 ++++++++-------- rbi/lib/openai/resources/chat/completions.rbi | 13 ++++++------- rbi/lib/openai/resources/responses.rbi | 12 ++++++------ sig/openai/resources/beta/threads.rbs | 2 +- sig/openai/resources/beta/threads/runs.rbs | 4 ++-- sig/openai/resources/chat/completions.rbs | 2 +- sig/openai/resources/responses.rbs | 2 +- 22 files changed, 62 insertions(+), 66 deletions(-) diff --git a/.stats.yml b/.stats.yml index 71a56c4c..037cba0b 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-32de3bc513663c5fac922c49be41c222b6ee8c0b841d8966bcdfa489d441daa3.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-44b20fa9d24544217fe6bb48852037537030a1ad29b202936425110744fe66fb.yml openapi_spec_hash: ea86343b5e9858a74e85da8ab2c532f6 -config_hash: 43dc8df20ffec9d1503f91866cb2b7d9 +config_hash: 69e3afd56ccb0f0f822a7a9dc130fc99 diff --git a/README.md b/README.md index 7a859626..85e4c03d 100644 --- a/README.md +++ b/README.md @@ -73,7 +73,7 @@ We provide support for streaming responses using Server Side Events (SSE). ```ruby stream = - openai.chat.completions.create_streaming( + openai.chat.completions.stream_raw( messages: [{ role: "user", content: "Say this is a test" diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index f2349d0f..4612ebd8 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -5,7 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#create_and_run # - # @see OpenAI::Resources::Beta::Threads#create_and_run_streaming + # @see OpenAI::Resources::Beta::Threads#stream_raw class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 6b71de5b..c6436c0d 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming + # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw class Run < OpenAI::Internal::Type::BaseModel # @!attribute id # The identifier, which can be referenced in API endpoints. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 3e2776ba..a058e78e 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#create # - # @see OpenAI::Resources::Beta::Threads::Runs#create_streaming + # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index a3e1b7d9..2243cb1d 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -6,7 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs # - # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming + # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 4ea12d1b..97ee34db 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -5,7 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#create # - # @see OpenAI::Resources::Chat::Completions#create_streaming + # @see OpenAI::Resources::Chat::Completions#stream_raw class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute id # A unique identifier for the chat completion. diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 4c378db0..65406125 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -5,7 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#create # - # @see OpenAI::Resources::Chat::Completions#create_streaming + # @see OpenAI::Resources::Chat::Completions#stream_raw class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 5280f3ed..3ad5a2be 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -5,7 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#create # - # @see OpenAI::Resources::Responses#create_streaming + # @see OpenAI::Resources::Responses#stream_raw class Response < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for this Response. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 07d25b0a..85b349d0 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -5,7 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#create # - # @see OpenAI::Resources::Responses#create_streaming + # @see OpenAI::Resources::Responses#stream_raw class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @!parse # extend OpenAI::Internal::Type::RequestParameters::Converter diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 2902111e..b72ad867 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -94,8 +94,7 @@ def delete(thread_id, params = {}) ) end - # See {OpenAI::Resources::Beta::Threads#create_and_run_streaming} for streaming - # counterpart. + # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # # Create a thread and run it in one request. # @@ -124,7 +123,7 @@ def delete(thread_id, params = {}) def create_and_run(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_and_run_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -141,7 +140,7 @@ def create_and_run(params) # # Create a thread and run it in one request. # - # @overload create_and_run_streaming(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param assistant_id [String] # @param instructions [String, nil] @@ -163,7 +162,7 @@ def create_and_run(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams - def create_and_run_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create_and_run` for the non-streaming use case." diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 61d7e284..8ebefd9c 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -8,7 +8,7 @@ class Runs # @return [OpenAI::Resources::Beta::Threads::Runs::Steps] attr_reader :steps - # See {OpenAI::Resources::Beta::Threads::Runs#create_streaming} for streaming + # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming # counterpart. # # Create a run. @@ -41,7 +41,7 @@ class Runs def create(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#create_stream_raw` for the streaming use case." raise ArgumentError.new(message) end query_params = [:include] @@ -60,7 +60,7 @@ def create(thread_id, params) # # Create a run. # - # @overload create_streaming(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param thread_id [String] # @param assistant_id [String] @@ -85,7 +85,7 @@ def create(thread_id, params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams - def create_streaming(thread_id, params) + def create_stream_raw(thread_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." @@ -208,7 +208,7 @@ def cancel(run_id, params) ) end - # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming} for + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # # When a run has the `status: "requires_action"` and `required_action.type` is @@ -229,7 +229,7 @@ def cancel(run_id, params) def submit_tool_outputs(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] - message = "Please use `#submit_tool_outputs_streaming` for the streaming use case." + message = "Please use `#submit_tool_outputs_stream_raw` for the streaming use case." raise ArgumentError.new(message) end thread_id = @@ -253,7 +253,7 @@ def submit_tool_outputs(run_id, params) # tool calls once they're all completed. All outputs must be submitted in a single # request. # - # @overload submit_tool_outputs_streaming(run_id, thread_id:, tool_outputs:, request_options: {}) + # @overload submit_tool_outputs_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # # @param run_id [String] # @param thread_id [String] @@ -263,7 +263,7 @@ def submit_tool_outputs(run_id, params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams - def submit_tool_outputs_streaming(run_id, params) + def submit_tool_outputs_stream_raw(run_id, params) parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index c1d28d4a..499c9f52 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -7,8 +7,7 @@ class Completions # @return [OpenAI::Resources::Chat::Completions::Messages] attr_reader :messages - # See {OpenAI::Resources::Chat::Completions#create_streaming} for streaming - # counterpart. + # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. # # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take @@ -68,7 +67,7 @@ class Completions def create(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -100,7 +99,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload create_streaming(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] # @param model [String, Symbol, OpenAI::Models::ChatModel] @@ -137,7 +136,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams - def create_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index bb369eb8..63a4abef 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -6,7 +6,7 @@ class Responses # @return [OpenAI::Resources::Responses::InputItems] attr_reader :input_items - # See {OpenAI::Resources::Responses#create_streaming} for streaming counterpart. + # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. # # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or @@ -47,7 +47,7 @@ class Responses def create(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] - message = "Please use `#create_streaming` for the streaming use case." + message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -73,7 +73,7 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create_streaming(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] @@ -97,7 +97,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams - def create_streaming(params) + def stream_raw(params) parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/lib/openai/resources/beta/threads.rbi index 2139613a..316bbca4 100644 --- a/rbi/lib/openai/resources/beta/threads.rbi +++ b/rbi/lib/openai/resources/beta/threads.rbi @@ -91,8 +91,7 @@ module OpenAI thread_id, request_options: {} ); end - # See {OpenAI::Resources::Beta::Threads#create_and_run_streaming} for streaming - # counterpart. + # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # # Create a thread and run it in one request. sig do @@ -231,8 +230,8 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` - # or `#create_and_run` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end @@ -316,7 +315,7 @@ module OpenAI ] ) end - def create_and_run_streaming( + def stream_raw( # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. @@ -405,8 +404,8 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_and_run_streaming` - # or `#create_and_run` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or + # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/lib/openai/resources/beta/threads/runs.rbi index 0b26ef36..e3a1216a 100644 --- a/rbi/lib/openai/resources/beta/threads/runs.rbi +++ b/rbi/lib/openai/resources/beta/threads/runs.rbi @@ -8,7 +8,7 @@ module OpenAI sig { returns(OpenAI::Resources::Beta::Threads::Runs::Steps) } attr_reader :steps - # See {OpenAI::Resources::Beta::Threads::Runs#create_streaming} for streaming + # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming # counterpart. # # Create a run. @@ -173,7 +173,7 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} @@ -263,7 +263,7 @@ module OpenAI ] ) end - def create_streaming( + def create_stream_raw( # Path param: The ID of the thread to run. thread_id, # Body param: The ID of the @@ -372,7 +372,7 @@ module OpenAI # Body param: Controls for how a thread will be truncated prior to the run. Use # this to control the intial context window of the run. truncation_strategy: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or + # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} @@ -468,7 +468,7 @@ module OpenAI thread_id:, request_options: {} ); end - # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_streaming} for + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # # When a run has the `status: "requires_action"` and `required_action.type` is @@ -495,7 +495,7 @@ module OpenAI # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and + # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and # non-streaming use cases, respectively. stream: false, request_options: {} @@ -546,7 +546,7 @@ module OpenAI ] ) end - def submit_tool_outputs_streaming( + def submit_tool_outputs_stream_raw( # Path param: The ID of the run that requires the tool output submission. run_id, # Path param: The ID of the @@ -556,7 +556,7 @@ module OpenAI # Body param: A list of tools for which the outputs are being submitted. tool_outputs:, # There is no need to provide `stream:`. Instead, use - # `#submit_tool_outputs_streaming` or `#submit_tool_outputs` for streaming and + # `#submit_tool_outputs_stream_raw` or `#submit_tool_outputs` for streaming and # non-streaming use cases, respectively. stream: true, request_options: {} diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index d0623009..672459b2 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -7,8 +7,7 @@ module OpenAI sig { returns(OpenAI::Resources::Chat::Completions::Messages) } attr_reader :messages - # See {OpenAI::Resources::Chat::Completions#create_streaming} for streaming - # counterpart. + # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. # # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take @@ -271,8 +270,8 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end @@ -355,7 +354,7 @@ module OpenAI ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end - def create_streaming( + def stream_raw( # A list of messages comprising the conversation so far. Depending on the # [model](https://platform.openai.com/docs/models) you use, different message # types (modalities) are supported, like @@ -539,8 +538,8 @@ module OpenAI # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index b98140db..36919e9b 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -6,7 +6,7 @@ module OpenAI sig { returns(OpenAI::Resources::Responses::InputItems) } attr_reader :input_items - # See {OpenAI::Resources::Responses#create_streaming} for streaming counterpart. + # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. # # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or @@ -183,8 +183,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ); end @@ -297,7 +297,7 @@ module OpenAI ] ) end - def create_streaming( + def stream_raw( # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -402,8 +402,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#create_streaming` or - # `#create` for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` + # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ); end diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index fe3d90cd..50be0608 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -49,7 +49,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def create_and_run_streaming: ( + def stream_raw: ( assistant_id: String, ?instructions: String?, ?max_completion_tokens: Integer?, diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index bdc66eb1..df8265f2 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -27,7 +27,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def create_streaming: ( + def create_stream_raw: ( String thread_id, assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], @@ -84,7 +84,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Beta::Threads::Run - def submit_tool_outputs_streaming: ( + def submit_tool_outputs_stream_raw: ( String run_id, thread_id: String, tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index e1f27903..0e8a88ef 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -38,7 +38,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Chat::ChatCompletion - def create_streaming: ( + def stream_raw: ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 028c6cc6..2dfcf44d 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -24,7 +24,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Responses::Response - def create_streaming: ( + def stream_raw: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, From f74dd0c0b792930d2b5c598f805294fbed00f58d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 20:07:44 +0000 Subject: [PATCH 115/295] chore: attempt to clean up underlying transport when streams are GC'd (#117) --- lib/openai/internal/type/base_stream.rb | 17 +++++++++++++++++ rbi/lib/openai/internal/type/base_stream.rbi | 10 ++++++++++ sig/openai/internal/type/base_stream.rbs | 4 ++++ 3 files changed, 31 insertions(+) diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index c9dd5765..98a8c528 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -11,6 +11,21 @@ module Type module BaseStream include Enumerable + class << self + # Attempt to close the underlying transport when the stream itself is garbage + # collected. + # + # This should not be relied upon for resource clean up, as the garbage collector + # is not guaranteed to run. + # + # @param stream [Enumerable] + # + # @return [Proc] + # + # @see https://rubyapi.org/3.1/o/objectspace#method-c-define_finalizer + def defer_closing(stream) = ->(_id) { OpenAI::Internal::Util.close_fused!(stream) } + end + # @return [void] def close = OpenAI::Internal::Util.close_fused!(@iterator) @@ -49,6 +64,8 @@ def initialize(model:, url:, status:, response:, stream:) @response = response @stream = stream @iterator = iterator + + ObjectSpace.define_finalizer(self, OpenAI::Internal::Type::BaseStream.defer_closing(@stream)) end end end diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/lib/openai/internal/type/base_stream.rbi index eb383d06..81f3a71c 100644 --- a/rbi/lib/openai/internal/type/base_stream.rbi +++ b/rbi/lib/openai/internal/type/base_stream.rbi @@ -10,6 +10,16 @@ module OpenAI Message = type_member(:in) Elem = type_member(:out) + class << self + # Attempt to close the underlying transport when the stream itself is garbage + # collected. + # + # This should not be relied upon for resource clean up, as the garbage collector + # is not guaranteed to run. + sig { params(stream: T::Enumerable[T.anything]).returns(T.proc.params(arg0: Integer).void) } + def defer_closing(stream); end + end + sig { void } def close; end diff --git a/sig/openai/internal/type/base_stream.rbs b/sig/openai/internal/type/base_stream.rbs index 902e7720..b526bcbc 100644 --- a/sig/openai/internal/type/base_stream.rbs +++ b/sig/openai/internal/type/base_stream.rbs @@ -4,6 +4,10 @@ module OpenAI module BaseStream[Message, Elem] include Enumerable[Elem] + def self.defer_closing: ( + Enumerable[top] stream + ) -> (^(Integer arg0) -> void) + def close: -> void private def iterator: -> Enumerable[Elem] From d90d3ae09f6adaec687201a6af4fe2a8b66fbf19 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 21:39:05 +0000 Subject: [PATCH 116/295] chore: add README docs for using solargraph when installing gem from git (#118) --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index 85e4c03d..e18faa18 100644 --- a/README.md +++ b/README.md @@ -180,6 +180,13 @@ After Solargraph is installed, **you must populate its index** either via the pr bundle exec solargraph gems ``` +Note: if you had installed the gem either using a `git:` or `github:` URL, or had vendored the gem using bundler, you will need to set up your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to the gem's `lib` directory. + +```yaml +include: + - 'vendor/bundle/ruby/*/gems/openai-*/lib/**/*.rb' +``` + Otherwise Solargraph will not be able to provide type information or auto-completion for any non-indexed libraries. ### Sorbet From 8049f5938ae5ba5d620440257b219e1844029c0a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 21:50:09 +0000 Subject: [PATCH 117/295] feat: use Pathname alongside raw IO handles for file uploads (#119) --- README.md | 17 +++ lib/openai.rb | 1 + .../transport/pooled_net_requester.rb | 11 +- lib/openai/internal/type/array_of.rb | 14 ++- lib/openai/internal/type/base_model.rb | 14 ++- lib/openai/internal/type/boolean.rb | 6 +- lib/openai/internal/type/converter.rb | 35 ++++-- lib/openai/internal/type/enum.rb | 6 +- lib/openai/internal/type/hash_of.rb | 8 +- lib/openai/internal/type/io_like.rb | 75 +++++++++++++ .../internal/type/request_parameters.rb | 13 ++- lib/openai/internal/type/union.rb | 10 +- lib/openai/internal/type/unknown.rb | 6 +- lib/openai/internal/util.rb | 101 ++++++++++++++---- .../audio/transcription_create_params.rb | 6 +- .../models/audio/translation_create_params.rb | 6 +- lib/openai/models/file_create_params.rb | 6 +- .../models/image_create_variation_params.rb | 6 +- lib/openai/models/image_edit_params.rb | 14 +-- .../models/uploads/part_create_params.rb | 6 +- lib/openai/resources/audio/transcriptions.rb | 4 +- lib/openai/resources/audio/translations.rb | 2 +- lib/openai/resources/files.rb | 2 +- lib/openai/resources/images.rb | 6 +- lib/openai/resources/uploads/parts.rb | 2 +- .../transport/pooled_net_requester.rbi | 2 +- rbi/lib/openai/internal/type/array_of.rbi | 16 +-- rbi/lib/openai/internal/type/base_model.rbi | 10 +- rbi/lib/openai/internal/type/boolean.rbi | 16 ++- rbi/lib/openai/internal/type/converter.rbi | 30 ++++-- rbi/lib/openai/internal/type/enum.rbi | 15 ++- rbi/lib/openai/internal/type/hash_of.rbi | 9 +- rbi/lib/openai/internal/type/io_like.rbi | 49 +++++++++ rbi/lib/openai/internal/type/union.rbi | 16 +-- rbi/lib/openai/internal/type/unknown.rbi | 16 +-- rbi/lib/openai/internal/util.rbi | 39 ++++++- .../audio/transcription_create_params.rbi | 6 +- .../audio/translation_create_params.rbi | 6 +- rbi/lib/openai/models/file_create_params.rbi | 6 +- .../models/image_create_variation_params.rbi | 6 +- rbi/lib/openai/models/image_edit_params.rbi | 14 +-- .../models/uploads/part_create_params.rbi | 6 +- .../openai/resources/audio/transcriptions.rbi | 4 +- .../openai/resources/audio/translations.rbi | 2 +- rbi/lib/openai/resources/files.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 6 +- rbi/lib/openai/resources/uploads/parts.rbi | 2 +- .../transport/pooled_net_requester.rbs | 2 +- sig/openai/internal/type/array_of.rbs | 7 +- sig/openai/internal/type/base_model.rbs | 7 +- sig/openai/internal/type/boolean.rbs | 7 +- sig/openai/internal/type/converter.rbs | 16 ++- sig/openai/internal/type/enum.rbs | 7 +- sig/openai/internal/type/hash_of.rbs | 7 +- sig/openai/internal/type/io_like.rbs | 23 ++++ sig/openai/internal/type/union.rbs | 7 +- sig/openai/internal/type/unknown.rbs | 7 +- sig/openai/internal/util.rbs | 19 +++- .../audio/transcription_create_params.rbs | 6 +- .../audio/translation_create_params.rbs | 6 +- sig/openai/models/file_create_params.rbs | 6 +- .../models/image_create_variation_params.rbs | 6 +- sig/openai/models/image_edit_params.rbs | 14 +-- .../models/uploads/part_create_params.rbs | 7 +- sig/openai/resources/audio/transcriptions.rbs | 4 +- sig/openai/resources/audio/translations.rbs | 2 +- sig/openai/resources/files.rbs | 2 +- sig/openai/resources/images.rbs | 6 +- sig/openai/resources/uploads/parts.rbs | 2 +- test/openai/internal/type/base_model_test.rb | 32 +++++- test/openai/internal/util_test.rb | 18 ++-- .../resources/audio/transcriptions_test.rb | 3 +- .../resources/audio/translations_test.rb | 3 +- test/openai/resources/files_test.rb | 2 +- test/openai/resources/images_test.rb | 7 +- test/openai/resources/uploads/parts_test.rb | 2 +- 76 files changed, 665 insertions(+), 227 deletions(-) create mode 100644 lib/openai/internal/type/io_like.rb create mode 100644 rbi/lib/openai/internal/type/io_like.rbi create mode 100644 sig/openai/internal/type/io_like.rbs diff --git a/README.md b/README.md index e18faa18..8171b0e9 100644 --- a/README.md +++ b/README.md @@ -86,6 +86,23 @@ stream.each do |completion| end ``` +## File uploads + +Request parameters that correspond to file uploads can be passed as `StringIO`, or a [`Pathname`](https://rubyapi.org/3.1/o/pathname) instance. + +```ruby +require "pathname" + +# using `Pathname`, the file will be lazily read, without reading everything in to memory +file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: "fine-tune") + +file = File.read("input.jsonl") +# using `StringIO`, useful if you already have the data in memory +file_object = openai.files.create(file: StringIO.new(file), purpose: "fine-tune") + +puts(file_object.id) +``` + ### Errors When the library is unable to connect to the API, or if the API returns a non-success status code (i.e., 4xx or 5xx response), a subclass of `OpenAI::Error` will be thrown: diff --git a/lib/openai.rb b/lib/openai.rb index 537211f7..66af9d43 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -40,6 +40,7 @@ require_relative "openai/internal/type/converter" require_relative "openai/internal/type/unknown" require_relative "openai/internal/type/boolean" +require_relative "openai/internal/type/io_like" require_relative "openai/internal/type/enum" require_relative "openai/internal/type/union" require_relative "openai/internal/type/array_of" diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index 3ef69e08..b6303758 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -54,7 +54,7 @@ def calibrate_socket_timeout(conn, deadline) # @param blk [Proc] # # @yieldparam [String] - # @return [Net::HTTPGenericRequest] + # @return [Array(Net::HTTPGenericRequest, Proc)] def build_request(request, &blk) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) req = Net::HTTPGenericRequest.new( @@ -75,12 +75,12 @@ def build_request(request, &blk) in StringIO req["content-length"] ||= body.size.to_s unless req["transfer-encoding"] req.body_stream = OpenAI::Internal::Util::ReadIOAdapter.new(body, &blk) - in IO | Enumerator + in Pathname | IO | Enumerator req["transfer-encoding"] ||= "chunked" unless req["content-length"] req.body_stream = OpenAI::Internal::Util::ReadIOAdapter.new(body, &blk) end - req + [req, req.body_stream&.method(:close)] end end @@ -125,11 +125,12 @@ def execute(request) eof = false finished = false + closing = nil enum = Enumerator.new do |y| with_pool(url, deadline: deadline) do |conn| next if finished - req = self.class.build_request(request) do + req, closing = self.class.build_request(request) do self.class.calibrate_socket_timeout(conn, deadline) end @@ -165,7 +166,9 @@ def execute(request) rescue StopIteration nil end + ensure conn.finish if !eof && conn&.started? + closing&.call end [Integer(response.code), response, (response.body = body)] end diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index e483f418..89cec2be 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -79,10 +79,20 @@ def coerce(value, state:) # # @param value [Array, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # # @return [Array, Object] - def dump(value) + def dump(value, state:) target = item_type - value.is_a?(Array) ? value.map { OpenAI::Internal::Type::Converter.dump(target, _1) } : super + if value.is_a?(Array) + value.map do + OpenAI::Internal::Type::Converter.dump(target, _1, state: state) + end + else + super + end end # @api private diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index eab6ea8b..1a180ea8 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -252,8 +252,12 @@ def coerce(value, state:) # # @param value [OpenAI::Internal::Type::BaseModel, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # # @return [Hash{Object=>Object}, Object] - def dump(value) + def dump(value, state:) unless (coerced = OpenAI::Internal::Util.coerce_hash(value)).is_a?(Hash) return super end @@ -264,7 +268,7 @@ def dump(value) name = key.is_a?(String) ? key.to_sym : key case (field = known_fields[name]) in nil - acc.store(name, super(val)) + acc.store(name, super(val, state: state)) else api_name, mode, type_fn = field.fetch_values(:api_name, :mode, :type_fn) case mode @@ -272,7 +276,7 @@ def dump(value) next else target = type_fn.call - acc.store(api_name, OpenAI::Internal::Type::Converter.dump(target, val)) + acc.store(api_name, OpenAI::Internal::Type::Converter.dump(target, val, state: state)) end end end @@ -337,12 +341,12 @@ def deconstruct_keys(keys) # @param a [Object] # # @return [String] - def to_json(*a) = self.class.dump(self).to_json(*a) + def to_json(*a) = OpenAI::Internal::Type::Converter.dump(self.class, self).to_json(*a) # @param a [Object] # # @return [String] - def to_yaml(*a) = self.class.dump(self).to_yaml(*a) + def to_yaml(*a) = OpenAI::Internal::Type::Converter.dump(self.class, self).to_yaml(*a) # Create a new instance of a model. # diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index 54dadb7f..cf12a25a 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -45,8 +45,12 @@ def coerce(value, state:) # # # # @param value [Boolean, Object] # # + # # @param state [Hash{Symbol=>Object}] . + # # + # # @option state [Boolean] :can_retry + # # # # @return [Boolean, Object] - # def dump(value) = super + # def dump(value, state:) = super end end end diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index ef77d9eb..f7f8ed08 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -26,15 +26,24 @@ def coerce(value, state:) = (raise NotImplementedError) # # @param value [Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # # @return [Object] - def dump(value) + def dump(value, state:) case value in Array - value.map { OpenAI::Internal::Type::Unknown.dump(_1) } + value.map { OpenAI::Internal::Type::Unknown.dump(_1, state: state) } in Hash - value.transform_values { OpenAI::Internal::Type::Unknown.dump(_1) } + value.transform_values { OpenAI::Internal::Type::Unknown.dump(_1, state: state) } in OpenAI::Internal::Type::BaseModel - value.class.dump(value) + value.class.dump(value, state: state) + in StringIO + value.string + in Pathname | IO + state[:can_retry] = false if value.is_a?(IO) + OpenAI::Internal::Util::SerializationAdapter.new(value) else value end @@ -182,7 +191,7 @@ def coerce( rescue ArgumentError, TypeError => e raise e if strictness == :strong end - in -> { _1 <= IO } if value.is_a?(String) + in -> { _1 <= StringIO } if value.is_a?(String) exactness[:yes] += 1 return StringIO.new(value.b) else @@ -207,13 +216,21 @@ def coerce( # @api private # # @param target [OpenAI::Internal::Type::Converter, Class] + # # @param value [Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # # @return [Object] - def dump(target, value) - # rubocop:disable Layout/LineLength - target.is_a?(OpenAI::Internal::Type::Converter) ? target.dump(value) : OpenAI::Internal::Type::Unknown.dump(value) - # rubocop:enable Layout/LineLength + def dump(target, value, state: {can_retry: true}) + case target + in OpenAI::Internal::Type::Converter + target.dump(value, state: state) + else + OpenAI::Internal::Type::Unknown.dump(value, state: state) + end end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 2a005d61..50aa9467 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -101,8 +101,12 @@ def coerce(value, state:) # # # # @param value [Symbol, Object] # # + # # @param state [Hash{Symbol=>Object}] . + # # + # # @option state [Boolean] :can_retry + # # # # @return [Symbol, Object] - # def dump(value) = super + # def dump(value, state:) = super end end end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 2d69f003..d88b05e6 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -99,12 +99,16 @@ def coerce(value, state:) # # @param value [Hash{Object=>Object}, Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # # @return [Hash{Symbol=>Object}, Object] - def dump(value) + def dump(value, state:) target = item_type if value.is_a?(Hash) value.transform_values do - OpenAI::Internal::Type::Converter.dump(target, _1) + OpenAI::Internal::Type::Converter.dump(target, _1, state: state) end else super diff --git a/lib/openai/internal/type/io_like.rb b/lib/openai/internal/type/io_like.rb new file mode 100644 index 00000000..5147b4e3 --- /dev/null +++ b/lib/openai/internal/type/io_like.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + module Type + # @api private + # + # @abstract + # + # Either `Pathname` or `StringIO`. + class IOLike + extend OpenAI::Internal::Type::Converter + + # @param other [Object] + # + # @return [Boolean] + def self.===(other) + case other + in StringIO | Pathname | IO + true + else + false + end + end + + # @param other [Object] + # + # @return [Boolean] + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::IOLike + + class << self + # @api private + # + # @param value [StringIO, String, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean, :strong] :strictness + # + # @option state [Hash{Symbol=>Object}] :exactness + # + # @option state [Integer] :branched + # + # @return [StringIO, Object] + def coerce(value, state:) + exactness = state.fetch(:exactness) + case value + in String + exactness[:yes] += 1 + StringIO.new(value) + in StringIO + exactness[:yes] += 1 + value + else + exactness[:no] += 1 + value + end + end + + # @!parse + # # @api private + # # + # # @param value [Pathname, StringIO, IO, String, Object] + # # + # # @param state [Hash{Symbol=>Object}] . + # # + # # @option state [Boolean] :can_retry + # # + # # @return [Pathname, StringIO, IO, String, Object] + # def dump(value, state:) = super + end + end + end + end +end diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb index 93ebb068..aaa7a939 100644 --- a/lib/openai/internal/type/request_parameters.rb +++ b/lib/openai/internal/type/request_parameters.rb @@ -26,9 +26,18 @@ module Converter # # @return [Array(Object, Hash{Symbol=>Object})] def dump_request(params) - case (dumped = dump(params)) + state = {can_retry: true} + case (dumped = dump(params, state: state)) in Hash - [dumped.except(:request_options), dumped[:request_options]] + options = OpenAI::Internal::Util.coerce_hash(dumped[:request_options]) + request_options = + case [options, state.fetch(:can_retry)] + in [Hash | nil, false] + {**options.to_h, max_retries: 0} + else + options + end + [dumped.except(:request_options), request_options] else [dumped, nil] end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 18c6cf0e..ac66dc5f 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -190,15 +190,19 @@ def coerce(value, state:) # # @param value [Object] # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # # @return [Object] - def dump(value) + def dump(value, state:) if (target = resolve_variant(value)) - return OpenAI::Internal::Type::Converter.dump(target, value) + return OpenAI::Internal::Type::Converter.dump(target, value, state: state) end known_variants.each do target = _2.call - return OpenAI::Internal::Type::Converter.dump(target, value) if target === value + return OpenAI::Internal::Type::Converter.dump(target, value, state: state) if target === value end super diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index 5d06c449..1c73e2f9 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -47,8 +47,12 @@ def coerce(value, state:) # # # # @param value [Object] # # + # # @param state [Hash{Symbol=>Object}] . + # # + # # @option state [Boolean] :can_retry + # # # # @return [Object] - # def dump(value) = super + # def dump(value, state:) = super end # rubocop:enable Lint/UnusedMethodArgument diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index fd5e4b81..a7a8a095 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -122,7 +122,7 @@ def coerce_float(input) # @return [Hash{Object=>Object}, Object] def coerce_hash(input) case input - in NilClass | Array | Set | Enumerator + in NilClass | Array | Set | Enumerator | StringIO | IO input else input.respond_to?(:to_h) ? input.to_h : input @@ -348,10 +348,47 @@ def normalized_headers(*headers) end end + # @api private + class SerializationAdapter + # @return [Pathname, IO] + attr_reader :inner + + # @param a [Object] + # + # @return [String] + def to_json(*a) = (inner.is_a?(IO) ? inner.read : inner.read(binmode: true)).to_json(*a) + + # @param a [Object] + # + # @return [String] + def to_yaml(*a) = (inner.is_a?(IO) ? inner.read : inner.read(binmode: true)).to_yaml(*a) + + # @api private + # + # @param inner [Pathname, IO] + def initialize(inner) = @inner = inner + end + # @api private # # An adapter that satisfies the IO interface required by `::IO.copy_stream` class ReadIOAdapter + # @api private + # + # @return [Boolean, nil] + def close? = @closing + + # @api private + def close + case @stream + in Enumerator + OpenAI::Internal::Util.close_fused!(@stream) + in IO if close? + @stream.close + else + end + end + # @api private # # @param max_len [Integer, nil] @@ -396,12 +433,21 @@ def read(max_len = nil, out_string = nil) # @api private # - # @param stream [String, IO, StringIO, Enumerable] + # @param src [String, Pathname, StringIO, Enumerable] # @param blk [Proc] # # @yieldparam [String] - def initialize(stream, &blk) - @stream = stream.is_a?(String) ? StringIO.new(stream) : stream + def initialize(src, &blk) + @stream = + case src + in String + StringIO.new(src) + in Pathname + @closing = true + src.open(binmode: true) + else + src + end @buf = String.new.b @blk = blk end @@ -414,9 +460,10 @@ class << self # @return [Enumerable] def writable_enum(&blk) Enumerator.new do |y| + buf = String.new.b y.define_singleton_method(:write) do - self << _1.clone - _1.bytesize + self << buf.replace(_1) + buf.bytesize end blk.call(y) @@ -431,29 +478,39 @@ class << self # @param boundary [String] # @param key [Symbol, String] # @param val [Object] - private def write_multipart_chunk(y, boundary:, key:, val:) + # @param closing [Array] + private def write_multipart_chunk(y, boundary:, key:, val:, closing:) + val = val.inner if val.is_a?(OpenAI::Internal::Util::SerializationAdapter) + y << "--#{boundary}\r\n" y << "Content-Disposition: form-data" unless key.nil? name = ERB::Util.url_encode(key.to_s) y << "; name=\"#{name}\"" end - if val.is_a?(IO) + case val + in Pathname | IO filename = ERB::Util.url_encode(File.basename(val.to_path)) y << "; filename=\"#{filename}\"" + else end y << "\r\n" case val + in Pathname + y << "Content-Type: application/octet-stream\r\n\r\n" + io = val.open(binmode: true) + closing << io.method(:close) + IO.copy_stream(io, y) in IO y << "Content-Type: application/octet-stream\r\n\r\n" - IO.copy_stream(val.tap(&:rewind), y) + IO.copy_stream(val, y) in StringIO y << "Content-Type: application/octet-stream\r\n\r\n" y << val.string in String y << "Content-Type: application/octet-stream\r\n\r\n" y << val.to_s - in true | false | Integer | Float | Symbol + in _ if primitive?(val) y << "Content-Type: text/plain\r\n\r\n" y << val.to_s else @@ -471,6 +528,7 @@ class << self private def encode_multipart_streaming(body) boundary = SecureRandom.urlsafe_base64(60) + closing = [] strio = writable_enum do |y| case body in Hash @@ -478,19 +536,20 @@ class << self case val in Array if val.all? { primitive?(_1) } val.each do |v| - write_multipart_chunk(y, boundary: boundary, key: key, val: v) + write_multipart_chunk(y, boundary: boundary, key: key, val: v, closing: closing) end else - write_multipart_chunk(y, boundary: boundary, key: key, val: val) + write_multipart_chunk(y, boundary: boundary, key: key, val: val, closing: closing) end end else - write_multipart_chunk(y, boundary: boundary, key: nil, val: body) + write_multipart_chunk(y, boundary: boundary, key: nil, val: body, closing: closing) end y << "--#{boundary}--\r\n" end - [boundary, strio] + fused_io = fused_enum(strio) { closing.each(&:call) } + [boundary, fused_io] end # @api private @@ -501,21 +560,21 @@ class << self # @return [Object] def encode_content(headers, body) content_type = headers["content-type"] + body = body.inner if body.is_a?(OpenAI::Internal::Util::SerializationAdapter) + case [content_type, body] - in [%r{^application/(?:vnd\.api\+)?json}, _] unless body.nil? + in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array | -> { primitive?(_1) }] [headers, JSON.fast_generate(body)] - in [%r{^application/(?:x-)?jsonl}, Enumerable] + in [%r{^application/(?:x-)?jsonl}, Enumerable] unless body.is_a?(StringIO) || body.is_a?(IO) [headers, body.lazy.map { JSON.fast_generate(_1) }] - in [%r{^multipart/form-data}, Hash | IO | StringIO] + in [%r{^multipart/form-data}, Hash | Pathname | StringIO | IO] boundary, strio = encode_multipart_streaming(body) headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} [headers, strio] - in [_, IO] - [headers, body.tap(&:rewind)] - in [_, StringIO] - [headers, body.string] in [_, Symbol | Numeric] [headers, body.to_s] + in [_, StringIO] + [headers, body.string] else [headers, body] end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 2dc55d74..c883db5d 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -15,8 +15,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [IO, StringIO] - required :file, IO + # @return [Pathname, StringIO] + required :file, OpenAI::Internal::Type::IOLike # @!attribute model # ID of the model to use. The options are `gpt-4o-transcribe`, @@ -108,7 +108,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # attr_writer :timestamp_granularities # @!parse - # # @param file [IO, StringIO] + # # @param file [Pathname, StringIO] # # @param model [String, Symbol, OpenAI::Models::AudioModel] # # @param include [Array] # # @param language [String] diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 82b0af2f..51cc325c 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -13,8 +13,8 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [IO, StringIO] - required :file, IO + # @return [Pathname, StringIO] + required :file, OpenAI::Internal::Type::IOLike # @!attribute model # ID of the model to use. Only `whisper-1` (which is powered by our open source @@ -62,7 +62,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # attr_writer :temperature # @!parse - # # @param file [IO, StringIO] + # # @param file [Pathname, StringIO] # # @param model [String, Symbol, OpenAI::Models::AudioModel] # # @param prompt [String] # # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index a3bc12dd..de4e8770 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -11,8 +11,8 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file # The File object (not file name) to be uploaded. # - # @return [IO, StringIO] - required :file, IO + # @return [Pathname, StringIO] + required :file, OpenAI::Internal::Type::IOLike # @!attribute purpose # The intended purpose of the uploaded file. One of: - `assistants`: Used in the @@ -24,7 +24,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel required :purpose, enum: -> { OpenAI::Models::FilePurpose } # @!parse - # # @param file [IO, StringIO] + # # @param file [Pathname, StringIO] # # @param purpose [Symbol, OpenAI::Models::FilePurpose] # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index a24a9d51..b51d4723 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -12,8 +12,8 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. # - # @return [IO, StringIO] - required :image, IO + # @return [Pathname, StringIO] + required :image, OpenAI::Internal::Type::IOLike # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this @@ -59,7 +59,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # attr_writer :user # @!parse - # # @param image [IO, StringIO] + # # @param image [Pathname, StringIO] # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] # # @param n [Integer, nil] # # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 0a0c78c4..c2487b6f 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -12,8 +12,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask # is not provided, image must have transparency, which will be used as the mask. # - # @return [IO, StringIO] - required :image, IO + # @return [Pathname, StringIO] + required :image, OpenAI::Internal::Type::IOLike # @!attribute prompt # A text description of the desired image(s). The maximum length is 1000 @@ -27,11 +27,11 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # indicate where `image` should be edited. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. # - # @return [IO, StringIO, nil] - optional :mask, IO + # @return [Pathname, StringIO, nil] + optional :mask, OpenAI::Internal::Type::IOLike # @!parse - # # @return [IO, StringIO] + # # @return [Pathname, StringIO] # attr_writer :mask # @!attribute model @@ -75,9 +75,9 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # attr_writer :user # @!parse - # # @param image [IO, StringIO] + # # @param image [Pathname, StringIO] # # @param prompt [String] - # # @param mask [IO, StringIO] + # # @param mask [Pathname, StringIO] # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] # # @param n [Integer, nil] # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index ff3805ca..e6fd5ad7 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -12,11 +12,11 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data # The chunk of bytes for this Part. # - # @return [IO, StringIO] - required :data, IO + # @return [Pathname, StringIO] + required :data, OpenAI::Internal::Type::IOLike # @!parse - # # @param data [IO, StringIO] + # # @param data [Pathname, StringIO] # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # # # def initialize(data:, request_options: {}, **) = super diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 647f80c8..bdde1310 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -11,7 +11,7 @@ class Transcriptions # # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [IO, StringIO] + # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] # @param include [Array] # @param language [String] @@ -47,7 +47,7 @@ def create(params) # # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [IO, StringIO] + # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] # @param include [Array] # @param language [String] diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index c1540307..418e58af 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -8,7 +8,7 @@ class Translations # # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # - # @param file [IO, StringIO] + # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] # @param prompt [String] # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 3dc1bc0e..f107ee07 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -27,7 +27,7 @@ class Files # # @overload create(file:, purpose:, request_options: {}) # - # @param file [IO, StringIO] + # @param file [Pathname, StringIO] # @param purpose [Symbol, OpenAI::Models::FilePurpose] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index ba4e4984..886a0efb 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -7,7 +7,7 @@ class Images # # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [IO, StringIO] + # @param image [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] # @param n [Integer, nil] # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] @@ -34,9 +34,9 @@ def create_variation(params) # # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [IO, StringIO] + # @param image [Pathname, StringIO] # @param prompt [String] - # @param mask [IO, StringIO] + # @param mask [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] # @param n [Integer, nil] # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index 343a20e3..12d21af6 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -19,7 +19,7 @@ class Parts # @overload create(upload_id, data:, request_options: {}) # # @param upload_id [String] - # @param data [IO, StringIO] + # @param data [Pathname, StringIO] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Uploads::UploadPart] diff --git a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi index fb9572da..c0a3d443 100644 --- a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi +++ b/rbi/lib/openai/internal/transport/pooled_net_requester.rbi @@ -35,7 +35,7 @@ module OpenAI request: OpenAI::Internal::Transport::PooledNetRequester::RequestShape, blk: T.proc.params(arg0: String).void ) - .returns(Net::HTTPGenericRequest) + .returns([Net::HTTPGenericRequest, T.proc.void]) end def build_request(request, &blk); end end diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index 98276260..ef39a4c7 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -36,11 +36,10 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any( - T::Array[T.anything], - T.anything - ), - state: OpenAI::Internal::Type::Converter::State) + .params( + value: T.any(T::Array[T.anything], T.anything), + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.any(T::Array[T.anything], T.anything)) end def coerce(value, state:); end @@ -48,10 +47,13 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Array[T.anything], T.anything)) + .params( + value: T.any(T::Array[T.anything], T.anything), + state: OpenAI::Internal::Type::Converter::DumpState + ) .returns(T.any(T::Array[T.anything], T.anything)) end - def dump(value); end + def dump(value, state:); end # @api private sig(:final) { returns(Elem) } diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 0b0d262a..2d0c0bfc 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -122,7 +122,7 @@ module OpenAI override .params( value: T.any(OpenAI::Internal::Type::BaseModel, T::Hash[T.anything, T.anything], T.anything), - state: OpenAI::Internal::Type::Converter::State + state: OpenAI::Internal::Type::Converter::CoerceState ) .returns(T.any(T.attached_class, T.anything)) end @@ -131,10 +131,14 @@ module OpenAI # @api private sig do override - .params(value: T.any(T.attached_class, T.anything)) + .params(value: T.any( + T.attached_class, + T.anything + ), + state: OpenAI::Internal::Type::Converter::DumpState) .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) end - def dump(value); end + def dump(value, state:); end end # Returns the raw value associated with the given key, if found. Otherwise, nil is diff --git a/rbi/lib/openai/internal/type/boolean.rbi b/rbi/lib/openai/internal/type/boolean.rbi index 4e002734..74064c19 100644 --- a/rbi/lib/openai/internal/type/boolean.rbi +++ b/rbi/lib/openai/internal/type/boolean.rbi @@ -22,16 +22,26 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Boolean, T.anything), state: OpenAI::Internal::Type::Converter::State) + .params(value: T.any( + T::Boolean, + T.anything + ), + state: OpenAI::Internal::Type::Converter::CoerceState) .returns(T.any(T::Boolean, T.anything)) end def coerce(value, state:); end # @api private sig(:final) do - override.params(value: T.any(T::Boolean, T.anything)).returns(T.any(T::Boolean, T.anything)) + override + .params(value: T.any( + T::Boolean, + T.anything + ), + state: OpenAI::Internal::Type::Converter::DumpState) + .returns(T.any(T::Boolean, T.anything)) end - def dump(value); end + def dump(value, state:); end end end end diff --git a/rbi/lib/openai/internal/type/converter.rbi b/rbi/lib/openai/internal/type/converter.rbi index cec2e8a2..e03e1c2e 100644 --- a/rbi/lib/openai/internal/type/converter.rbi +++ b/rbi/lib/openai/internal/type/converter.rbi @@ -7,7 +7,7 @@ module OpenAI module Converter Input = T.type_alias { T.any(OpenAI::Internal::Type::Converter, T::Class[T.anything]) } - State = + CoerceState = T.type_alias do { strictness: T.any(T::Boolean, Symbol), @@ -16,18 +16,23 @@ module OpenAI } end + DumpState = T.type_alias { {can_retry: T::Boolean} } + # @api private sig do - overridable.params( - value: T.anything, - state: OpenAI::Internal::Type::Converter::State - ).returns(T.anything) + overridable + .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) + .returns(T.anything) end def coerce(value, state:); end # @api private - sig { overridable.params(value: T.anything).returns(T.anything) } - def dump(value); end + sig do + overridable + .params(value: T.anything, state: OpenAI::Internal::Type::Converter::DumpState) + .returns(T.anything) + end + def dump(value, state:); end class << self # @api private @@ -62,7 +67,7 @@ module OpenAI params( target: OpenAI::Internal::Type::Converter::Input, value: T.anything, - state: OpenAI::Internal::Type::Converter::State + state: OpenAI::Internal::Type::Converter::CoerceState ) .returns(T.anything) end @@ -92,9 +97,14 @@ module OpenAI ); end # @api private sig do - params(target: OpenAI::Internal::Type::Converter::Input, value: T.anything).returns(T.anything) + params( + target: OpenAI::Internal::Type::Converter::Input, + value: T.anything, + state: OpenAI::Internal::Type::Converter::DumpState + ) + .returns(T.anything) end - def self.dump(target, value); end + def self.dump(target, value, state: {can_retry: true}); end end end end diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index f74dc677..4fcc4b5c 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -40,14 +40,23 @@ module OpenAI # of the enum. sig do override - .params(value: T.any(String, Symbol, T.anything), state: OpenAI::Internal::Type::Converter::State) + .params(value: T.any( + String, + Symbol, + T.anything + ), + state: OpenAI::Internal::Type::Converter::CoerceState) .returns(T.any(Symbol, T.anything)) end def coerce(value, state:); end # @api private - sig { override.params(value: T.any(Symbol, T.anything)).returns(T.any(Symbol, T.anything)) } - def dump(value); end + sig do + override + .params(value: T.any(Symbol, T.anything), state: OpenAI::Internal::Type::Converter::DumpState) + .returns(T.any(Symbol, T.anything)) + end + def dump(value, state:); end end end end diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index cfa8b710..18914409 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -38,7 +38,7 @@ module OpenAI override .params( value: T.any(T::Hash[T.anything, T.anything], T.anything), - state: OpenAI::Internal::Type::Converter::State + state: OpenAI::Internal::Type::Converter::CoerceState ) .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end @@ -47,10 +47,13 @@ module OpenAI # @api private sig(:final) do override - .params(value: T.any(T::Hash[T.anything, T.anything], T.anything)) + .params( + value: T.any(T::Hash[T.anything, T.anything], T.anything), + state: OpenAI::Internal::Type::Converter::DumpState + ) .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end - def dump(value); end + def dump(value, state:); end # @api private sig(:final) { returns(Elem) } diff --git a/rbi/lib/openai/internal/type/io_like.rbi b/rbi/lib/openai/internal/type/io_like.rbi new file mode 100644 index 00000000..d6c86399 --- /dev/null +++ b/rbi/lib/openai/internal/type/io_like.rbi @@ -0,0 +1,49 @@ +# typed: strong + +module OpenAI + module Internal + module Type + # @api private + # + # Either `Pathname` or `StringIO`. + class IOLike + extend OpenAI::Internal::Type::Converter + + abstract! + final! + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.===(other); end + + sig(:final) { params(other: T.anything).returns(T::Boolean) } + def self.==(other); end + + class << self + # @api private + sig(:final) do + override + .params(value: T.any( + StringIO, + String, + T.anything + ), + state: OpenAI::Internal::Type::Converter::CoerceState) + .returns(T.any(StringIO, T.anything)) + end + def coerce(value, state:); end + + # @api private + sig(:final) do + override + .params( + value: T.any(Pathname, StringIO, IO, String, T.anything), + state: OpenAI::Internal::Type::Converter::DumpState + ) + .returns(T.any(Pathname, StringIO, IO, String, T.anything)) + end + def dump(value, state:); end + end + end + end + end +end diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi index 55bc31e8..e2fb7582 100644 --- a/rbi/lib/openai/internal/type/union.rbi +++ b/rbi/lib/openai/internal/type/union.rbi @@ -49,16 +49,20 @@ module OpenAI # @api private sig do - override.params( - value: T.anything, - state: OpenAI::Internal::Type::Converter::State - ).returns(T.anything) + override + .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) + .returns(T.anything) end def coerce(value, state:); end # @api private - sig { override.params(value: T.anything).returns(T.anything) } - def dump(value); end + sig do + override.params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::DumpState + ).returns(T.anything) + end + def dump(value, state:); end end end end diff --git a/rbi/lib/openai/internal/type/unknown.rbi b/rbi/lib/openai/internal/type/unknown.rbi index 55d4988d..8865c3b1 100644 --- a/rbi/lib/openai/internal/type/unknown.rbi +++ b/rbi/lib/openai/internal/type/unknown.rbi @@ -21,16 +21,20 @@ module OpenAI class << self # @api private sig(:final) do - override.params( - value: T.anything, - state: OpenAI::Internal::Type::Converter::State - ).returns(T.anything) + override + .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) + .returns(T.anything) end def coerce(value, state:); end # @api private - sig(:final) { override.params(value: T.anything).returns(T.anything) } - def dump(value); end + sig(:final) do + override.params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::DumpState + ).returns(T.anything) + end + def dump(value, state:); end end end end diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 60be3113..433fab3c 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -140,10 +140,34 @@ module OpenAI def normalized_headers(*headers); end end + # @api private + class SerializationAdapter + sig { returns(T.any(Pathname, IO)) } + attr_reader :inner + + sig { params(a: T.anything).returns(String) } + def to_json(*a); end + + sig { params(a: T.anything).returns(String) } + def to_yaml(*a); end + + # @api private + sig { params(inner: T.any(Pathname, IO)).returns(T.attached_class) } + def self.new(inner); end + end + # @api private # # An adapter that satisfies the IO interface required by `::IO.copy_stream` class ReadIOAdapter + # @api private + sig { returns(T.nilable(T::Boolean)) } + def close?; end + + # @api private + sig { void } + def close; end + # @api private sig { params(max_len: T.nilable(Integer)).returns(String) } private def read_enum(max_len); end @@ -155,12 +179,12 @@ module OpenAI # @api private sig do params( - stream: T.any(String, IO, StringIO, T::Enumerable[String]), + src: T.any(String, Pathname, StringIO, T::Enumerable[String]), blk: T.proc.params(arg0: String).void ) .returns(T.attached_class) end - def self.new(stream, &blk); end + def self.new(src, &blk); end end class << self @@ -171,9 +195,16 @@ module OpenAI class << self # @api private sig do - params(y: Enumerator::Yielder, boundary: String, key: T.any(Symbol, String), val: T.anything).void + params( + y: Enumerator::Yielder, + boundary: String, + key: T.any(Symbol, String), + val: T.anything, + closing: T::Array[T.proc.void] + ) + .void end - private def write_multipart_chunk(y, boundary:, key:, val:); end + private def write_multipart_chunk(y, boundary:, key:, val:, closing:); end # @api private sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index 5511185c..c8481144 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -9,7 +9,7 @@ module OpenAI # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(IO, StringIO)) } + sig { returns(T.any(Pathname, StringIO)) } attr_accessor :file # ID of the model to use. The options are `gpt-4o-transcribe`, @@ -90,7 +90,7 @@ module OpenAI sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, @@ -117,7 +117,7 @@ module OpenAI override .returns( { - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 7e113ff2..6d4b9e08 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -9,7 +9,7 @@ module OpenAI # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(IO, StringIO)) } + sig { returns(T.any(Pathname, StringIO)) } attr_accessor :file # ID of the model to use. Only `whisper-1` (which is powered by our open source @@ -48,7 +48,7 @@ module OpenAI sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, @@ -64,7 +64,7 @@ module OpenAI override .returns( { - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index c3d5af72..c4a5174b 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -7,7 +7,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The File object (not file name) to be uploaded. - sig { returns(T.any(IO, StringIO)) } + sig { returns(T.any(Pathname, StringIO)) } attr_accessor :file # The intended purpose of the uploaded file. One of: - `assistants`: Used in the @@ -19,7 +19,7 @@ module OpenAI sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) @@ -31,7 +31,7 @@ module OpenAI override .returns( { - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions } diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 59f2bd84..580de965 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -8,7 +8,7 @@ module OpenAI # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. - sig { returns(T.any(IO, StringIO)) } + sig { returns(T.any(Pathname, StringIO)) } attr_accessor :image # The model to use for image generation. Only `dall-e-2` is supported at this @@ -43,7 +43,7 @@ module OpenAI sig do params( - image: T.any(IO, StringIO), + image: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), @@ -68,7 +68,7 @@ module OpenAI override .returns( { - image: T.any(IO, StringIO), + image: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 8d4b36be..4e7a68bf 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -8,7 +8,7 @@ module OpenAI # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask # is not provided, image must have transparency, which will be used as the mask. - sig { returns(T.any(IO, StringIO)) } + sig { returns(T.any(Pathname, StringIO)) } attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 @@ -19,10 +19,10 @@ module OpenAI # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. - sig { returns(T.nilable(T.any(IO, StringIO))) } + sig { returns(T.nilable(T.any(Pathname, StringIO))) } attr_reader :mask - sig { params(mask: T.any(IO, StringIO)).void } + sig { params(mask: T.any(Pathname, StringIO)).void } attr_writer :mask # The model to use for image generation. Only `dall-e-2` is supported at this @@ -56,9 +56,9 @@ module OpenAI sig do params( - image: T.any(IO, StringIO), + image: T.any(Pathname, StringIO), prompt: String, - mask: T.any(IO, StringIO), + mask: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), @@ -83,9 +83,9 @@ module OpenAI override .returns( { - image: T.any(IO, StringIO), + image: T.any(Pathname, StringIO), prompt: String, - mask: T.any(IO, StringIO), + mask: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index f2ed872a..3915001e 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -8,19 +8,19 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The chunk of bytes for this Part. - sig { returns(T.any(IO, StringIO)) } + sig { returns(T.any(Pathname, StringIO)) } attr_accessor :data sig do params( - data: T.any(IO, StringIO), + data: T.any(Pathname, StringIO), request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end def self.new(data:, request_options: {}); end - sig { override.returns({data: T.any(IO, StringIO), request_options: OpenAI::RequestOptions}) } + sig { override.returns({data: T.any(Pathname, StringIO), request_options: OpenAI::RequestOptions}) } def to_hash; end end end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index 824a08af..a411c389 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -10,7 +10,7 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, @@ -73,7 +73,7 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index d39ad32c..c11c0bde 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -7,7 +7,7 @@ module OpenAI # Translates audio into English. sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 482ed085..182dba52 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -26,7 +26,7 @@ module OpenAI # storage limits. sig do params( - file: T.any(IO, StringIO), + file: T.any(Pathname, StringIO), purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 79a97ac3..04c126f2 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -6,7 +6,7 @@ module OpenAI # Creates a variation of a given image. sig do params( - image: T.any(IO, StringIO), + image: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), @@ -42,9 +42,9 @@ module OpenAI # Creates an edited or extended image given an original image and a prompt. sig do params( - image: T.any(IO, StringIO), + image: T.any(Pathname, StringIO), prompt: String, - mask: T.any(IO, StringIO), + mask: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 9b073eea..8258aeea 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( upload_id: String, - data: T.any(IO, StringIO), + data: T.any(Pathname, StringIO), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Uploads::UploadPart) diff --git a/sig/openai/internal/transport/pooled_net_requester.rbs b/sig/openai/internal/transport/pooled_net_requester.rbs index d12703cb..87b79180 100644 --- a/sig/openai/internal/transport/pooled_net_requester.rbs +++ b/sig/openai/internal/transport/pooled_net_requester.rbs @@ -21,7 +21,7 @@ module OpenAI OpenAI::Internal::Transport::PooledNetRequester::request request ) { (String arg0) -> void - } -> top + } -> [top, (^-> void)] private def with_pool: ( URI::Generic url, diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 6f344cc2..70d49013 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -17,10 +17,13 @@ module OpenAI def coerce: ( ::Array[top] | top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> (::Array[top] | top) - def dump: (::Array[top] | top value) -> (::Array[top] | top) + def dump: ( + ::Array[top] | top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> (::Array[top] | top) def item_type: -> Elem diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 69aca270..7d3c77f7 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -53,10 +53,13 @@ module OpenAI def self.coerce: ( OpenAI::Internal::Type::BaseModel | ::Hash[top, top] | top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> (instance | top) - def self.dump: (instance | top value) -> (::Hash[top, top] | top) + def self.dump: ( + instance | top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> (::Hash[top, top] | top) def []: (Symbol key) -> top? diff --git a/sig/openai/internal/type/boolean.rbs b/sig/openai/internal/type/boolean.rbs index cfc97bb1..0a654ab4 100644 --- a/sig/openai/internal/type/boolean.rbs +++ b/sig/openai/internal/type/boolean.rbs @@ -10,10 +10,13 @@ module OpenAI def self.coerce: ( bool | top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> (bool | top) - def self.dump: (bool | top value) -> (bool | top) + def self.dump: ( + bool | top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> (bool | top) end end end diff --git a/sig/openai/internal/type/converter.rbs b/sig/openai/internal/type/converter.rbs index 56921a18..6c0c1434 100644 --- a/sig/openai/internal/type/converter.rbs +++ b/sig/openai/internal/type/converter.rbs @@ -4,19 +4,24 @@ module OpenAI module Converter type input = OpenAI::Internal::Type::Converter | Class - type state = + type coerce_state = { strictness: bool | :strong, exactness: { yes: Integer, no: Integer, maybe: Integer }, branched: Integer } + type dump_state = { can_retry: bool } + def coerce: ( top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> top - def dump: (top value) -> top + def dump: ( + top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> top def self.type_info: ( { @@ -31,12 +36,13 @@ module OpenAI def self.coerce: ( OpenAI::Internal::Type::Converter::input target, top value, - ?state: OpenAI::Internal::Type::Converter::state + ?state: OpenAI::Internal::Type::Converter::coerce_state ) -> top def self.dump: ( OpenAI::Internal::Type::Converter::input target, - top value + top value, + ?state: OpenAI::Internal::Type::Converter::dump_state ) -> top end end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 0a35ac11..e001bcd3 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -14,10 +14,13 @@ module OpenAI def coerce: ( String | Symbol | top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> (Symbol | top) - def dump: (Symbol | top value) -> (Symbol | top) + def dump: ( + Symbol | top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> (Symbol | top) end end end diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index 79743eb5..18eb149f 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -17,10 +17,13 @@ module OpenAI def coerce: ( ::Hash[top, top] | top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> (::Hash[Symbol, top] | top) - def dump: (::Hash[top, top] | top value) -> (::Hash[Symbol, top] | top) + def dump: ( + ::Hash[top, top] | top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> (::Hash[Symbol, top] | top) def item_type: -> Elem diff --git a/sig/openai/internal/type/io_like.rbs b/sig/openai/internal/type/io_like.rbs new file mode 100644 index 00000000..8a5fb508 --- /dev/null +++ b/sig/openai/internal/type/io_like.rbs @@ -0,0 +1,23 @@ +module OpenAI + module Internal + module Type + class IOLike + extend OpenAI::Internal::Type::Converter + + def self.===: (top other) -> bool + + def self.==: (top other) -> bool + + def self.coerce: ( + StringIO | String | top value, + state: OpenAI::Internal::Type::Converter::coerce_state + ) -> (StringIO | top) + + def self.dump: ( + Pathname | StringIO | IO | String | top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> (Pathname | StringIO | IO | String | top) + end + end + end +end diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 9d749433..9f1063f3 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -32,10 +32,13 @@ module OpenAI def coerce: ( top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> top - def dump: (top value) -> top + def dump: ( + top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> top end end end diff --git a/sig/openai/internal/type/unknown.rbs b/sig/openai/internal/type/unknown.rbs index 571a5cd7..0f9142d2 100644 --- a/sig/openai/internal/type/unknown.rbs +++ b/sig/openai/internal/type/unknown.rbs @@ -10,10 +10,13 @@ module OpenAI def self.coerce: ( top value, - state: OpenAI::Internal::Type::Converter::state + state: OpenAI::Internal::Type::Converter::coerce_state ) -> top - def self.dump: (top value) -> top + def self.dump: ( + top value, + state: OpenAI::Internal::Type::Converter::dump_state + ) -> top end end end diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 1c93f8d7..ede1ce30 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -73,13 +73,27 @@ module OpenAI | ::Array[(String | Integer)?])?] headers ) -> ::Hash[String, String] + class SerializationAdapter + attr_reader inner: Pathname | IO + + def to_json: (*top a) -> String + + def to_yaml: (*top a) -> String + + def initialize: (Pathname | IO inner) -> void + end + class ReadIOAdapter + def close?: -> bool? + + def close: -> void + private def read_enum: (Integer? max_len) -> String def read: (?Integer? max_len, ?String? out_string) -> String? def initialize: ( - String | IO | StringIO | Enumerable[String] stream + String | Pathname | StringIO | Enumerable[String] src ) { (String arg0) -> void } -> void @@ -93,7 +107,8 @@ module OpenAI Enumerator::Yielder y, boundary: String, key: Symbol | String, - val: top + val: top, + closing: ::Array[^-> void] ) -> void def self?.encode_multipart_streaming: ( diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index f0ae2daa..0ed3e9cd 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type transcription_create_params = { - file: (IO | StringIO), + file: (Pathname | StringIO), model: OpenAI::Models::Audio::TranscriptionCreateParams::model, include: ::Array[OpenAI::Models::Audio::transcription_include], language: String, @@ -18,7 +18,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: IO | StringIO + attr_accessor file: Pathname | StringIO attr_accessor model: OpenAI::Models::Audio::TranscriptionCreateParams::model @@ -53,7 +53,7 @@ module OpenAI ) -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] def initialize: ( - file: IO | StringIO, + file: Pathname | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 588ac2cf..2ed83e46 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type translation_create_params = { - file: (IO | StringIO), + file: (Pathname | StringIO), model: OpenAI::Models::Audio::TranslationCreateParams::model, prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: IO | StringIO + attr_accessor file: Pathname | StringIO attr_accessor model: OpenAI::Models::Audio::TranslationCreateParams::model @@ -34,7 +34,7 @@ module OpenAI def temperature=: (Float) -> Float def initialize: ( - file: IO | StringIO, + file: Pathname | StringIO, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 46d8450e..c57c5981 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -1,19 +1,19 @@ module OpenAI module Models type file_create_params = - { file: (IO | StringIO), purpose: OpenAI::Models::file_purpose } + { file: (Pathname | StringIO), purpose: OpenAI::Models::file_purpose } & OpenAI::Internal::Type::request_parameters class FileCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: IO | StringIO + attr_accessor file: Pathname | StringIO attr_accessor purpose: OpenAI::Models::file_purpose def initialize: ( - file: IO | StringIO, + file: Pathname | StringIO, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 7b3071d3..28803369 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type image_create_variation_params = { - image: (IO | StringIO), + image: (Pathname | StringIO), model: OpenAI::Models::ImageCreateVariationParams::model?, n: Integer?, response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor image: IO | StringIO + attr_accessor image: Pathname | StringIO attr_accessor model: OpenAI::Models::ImageCreateVariationParams::model? @@ -30,7 +30,7 @@ module OpenAI def user=: (String) -> String def initialize: ( - image: IO | StringIO, + image: Pathname | StringIO, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 9cb884a7..c5367ca5 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -2,9 +2,9 @@ module OpenAI module Models type image_edit_params = { - image: (IO | StringIO), + image: (Pathname | StringIO), prompt: String, - mask: (IO | StringIO), + mask: (Pathname | StringIO), model: OpenAI::Models::ImageEditParams::model?, n: Integer?, response_format: OpenAI::Models::ImageEditParams::response_format?, @@ -17,13 +17,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor image: IO | StringIO + attr_accessor image: Pathname | StringIO attr_accessor prompt: String - attr_reader mask: (IO | StringIO)? + attr_reader mask: (Pathname | StringIO)? - def mask=: (IO | StringIO) -> (IO | StringIO) + def mask=: (Pathname | StringIO) -> (Pathname | StringIO) attr_accessor model: OpenAI::Models::ImageEditParams::model? @@ -38,9 +38,9 @@ module OpenAI def user=: (String) -> String def initialize: ( - image: IO | StringIO, + image: Pathname | StringIO, prompt: String, - ?mask: IO | StringIO, + ?mask: Pathname | StringIO, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index d6014118..89c4c092 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -2,16 +2,17 @@ module OpenAI module Models module Uploads type part_create_params = - { data: (IO | StringIO) } & OpenAI::Internal::Type::request_parameters + { data: (Pathname | StringIO) } + & OpenAI::Internal::Type::request_parameters class PartCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor data: IO | StringIO + attr_accessor data: Pathname | StringIO def initialize: ( - data: IO | StringIO, + data: Pathname | StringIO, ?request_options: OpenAI::request_opts ) -> void diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 6506a498..bf5e0c11 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Transcriptions def create: ( - file: IO | StringIO, + file: Pathname | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, @@ -15,7 +15,7 @@ module OpenAI ) -> OpenAI::Models::Audio::transcription_create_response def create_streaming: ( - file: IO | StringIO, + file: Pathname | StringIO, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index 25ea82e3..a5cdf743 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Translations def create: ( - file: IO | StringIO, + file: Pathname | StringIO, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index 16e295dd..f4c12155 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Files def create: ( - file: IO | StringIO, + file: Pathname | StringIO, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FileObject diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index b1d09b4d..e4831410 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Images def create_variation: ( - image: IO | StringIO, + image: Pathname | StringIO, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -12,9 +12,9 @@ module OpenAI ) -> OpenAI::Models::ImagesResponse def edit: ( - image: IO | StringIO, + image: Pathname | StringIO, prompt: String, - ?mask: IO | StringIO, + ?mask: Pathname | StringIO, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index e00684f2..7d3f7d89 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -4,7 +4,7 @@ module OpenAI class Parts def create: ( String upload_id, - data: IO | StringIO, + data: Pathname | StringIO, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Uploads::UploadPart diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index 27178ef8..89816cf8 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -92,7 +92,9 @@ def test_dump [String, "one"] => "one", [String, :one] => :one, [:a, :b] => :b, - [:a, "a"] => "a" + [:a, "a"] => "a", + [String, StringIO.new("one")] => "one", + [String, Pathname(__FILE__)] => OpenAI::Internal::Util::SerializationAdapter } cases.each do @@ -122,6 +124,34 @@ def test_coerce_errors end end end + + def test_dump_retry + types = [ + OpenAI::Internal::Type::Unknown, + OpenAI::Internal::Type::Boolean, + A, + H, + E, + U, + B + ] + Pathname(__FILE__).open do |fd| + cases = [ + fd, + [fd], + {a: fd}, + {a: {b: fd}} + ] + types.product(cases).each do |target, input| + state = {can_retry: true} + OpenAI::Internal::Type::Converter.dump(target, input, state: state) + + assert_pattern do + state => {can_retry: false} + end + end + end + end end class OpenAI::Test::EnumModelTest < Minitest::Test diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index 71ba50ed..50140677 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -160,11 +160,12 @@ def test_joining_queries class OpenAI::Test::UtilFormDataEncodingTest < Minitest::Test class FakeCGI < CGI def initialize(headers, io) + encoded = io.to_a @ctype = headers["content-type"] # rubocop:disable Lint/EmptyBlock - @io = OpenAI::Internal::Util::ReadIOAdapter.new(io) {} + @io = OpenAI::Internal::Util::ReadIOAdapter.new(encoded.to_enum) {} # rubocop:enable Lint/EmptyBlock - @c_len = io.to_a.join.bytesize.to_s + @c_len = encoded.join.bytesize.to_s super() end @@ -180,15 +181,17 @@ def env_table end def test_file_encode + file = Pathname(__FILE__) headers = {"content-type" => "multipart/form-data"} cases = { - StringIO.new("abc") => "abc" + StringIO.new("abc") => "abc", + file => /^class OpenAI/ } cases.each do |body, val| encoded = OpenAI::Internal::Util.encode_content(headers, body) cgi = FakeCGI.new(*encoded) assert_pattern do - cgi[""] => ^val + cgi[""].read => ^val end end end @@ -199,13 +202,16 @@ def test_hash_encode {a: 2, b: 3} => {"a" => "2", "b" => "3"}, {a: 2, b: nil} => {"a" => "2", "b" => "null"}, {a: 2, b: [1, 2, 3]} => {"a" => "2", "b" => "1"}, - {file: StringIO.new("a")} => {"file" => "a"} + {strio: StringIO.new("a")} => {"strio" => "a"}, + {pathname: Pathname(__FILE__)} => {"pathname" => -> { _1.read in /^class OpenAI/ }} } cases.each do |body, testcase| encoded = OpenAI::Internal::Util.encode_content(headers, body) cgi = FakeCGI.new(*encoded) testcase.each do |key, val| - assert_equal(val, cgi[key]) + assert_pattern do + cgi[key] => ^val + end end end end diff --git a/test/openai/resources/audio/transcriptions_test.rb b/test/openai/resources/audio/transcriptions_test.rb index a8a6d471..b074f1ba 100644 --- a/test/openai/resources/audio/transcriptions_test.rb +++ b/test/openai/resources/audio/transcriptions_test.rb @@ -4,8 +4,7 @@ class OpenAI::Test::Resources::Audio::TranscriptionsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = - @openai.audio.transcriptions.create(file: StringIO.new("some file contents"), model: :"whisper-1") + response = @openai.audio.transcriptions.create(file: Pathname(__FILE__), model: :"whisper-1") assert_pattern do response => OpenAI::Models::Audio::TranscriptionCreateResponse diff --git a/test/openai/resources/audio/translations_test.rb b/test/openai/resources/audio/translations_test.rb index d42b8267..c6a64ad3 100644 --- a/test/openai/resources/audio/translations_test.rb +++ b/test/openai/resources/audio/translations_test.rb @@ -4,8 +4,7 @@ class OpenAI::Test::Resources::Audio::TranslationsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = - @openai.audio.translations.create(file: StringIO.new("some file contents"), model: :"whisper-1") + response = @openai.audio.translations.create(file: Pathname(__FILE__), model: :"whisper-1") assert_pattern do response => OpenAI::Models::Audio::TranslationCreateResponse diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index d18f31f4..9fceee6a 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::FilesTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.files.create(file: StringIO.new("some file contents"), purpose: :assistants) + response = @openai.files.create(file: Pathname(__FILE__), purpose: :assistants) assert_pattern do response => OpenAI::Models::FileObject diff --git a/test/openai/resources/images_test.rb b/test/openai/resources/images_test.rb index a9bb70a4..867dc545 100644 --- a/test/openai/resources/images_test.rb +++ b/test/openai/resources/images_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::ImagesTest < OpenAI::Test::ResourceTest def test_create_variation_required_params - response = @openai.images.create_variation(image: StringIO.new("some file contents")) + response = @openai.images.create_variation(image: Pathname(__FILE__)) assert_pattern do response => OpenAI::Models::ImagesResponse @@ -20,10 +20,7 @@ def test_create_variation_required_params def test_edit_required_params response = - @openai.images.edit( - image: StringIO.new("some file contents"), - prompt: "A cute baby sea otter wearing a beret" - ) + @openai.images.edit(image: Pathname(__FILE__), prompt: "A cute baby sea otter wearing a beret") assert_pattern do response => OpenAI::Models::ImagesResponse diff --git a/test/openai/resources/uploads/parts_test.rb b/test/openai/resources/uploads/parts_test.rb index f5c9e510..b10dcbb3 100644 --- a/test/openai/resources/uploads/parts_test.rb +++ b/test/openai/resources/uploads/parts_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Uploads::PartsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.uploads.parts.create("upload_abc123", data: StringIO.new("some file contents")) + response = @openai.uploads.parts.create("upload_abc123", data: Pathname(__FILE__)) assert_pattern do response => OpenAI::Models::Uploads::UploadPart From 980ab0cd1c42296b0295c628aa6ac76298cd8e1a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 22:30:31 +0000 Subject: [PATCH 118/295] fix: raise connection error for errors that result from HTTP transports (#120) --- lib/openai/internal/transport/base_client.rb | 2 +- lib/openai/internal/transport/pooled_net_requester.rb | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index b2e526d5..0489a1c8 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -380,7 +380,7 @@ def initialize( in (400..) | OpenAI::Errors::APIConnectionError self.class.reap_connection!(status, stream: stream) - delay = retry_delay(response, retry_count: retry_count) + delay = retry_delay(response || {}, retry_count: retry_count) sleep(delay) send_request( diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index b6303758..7bb8ef19 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -123,9 +123,12 @@ def build_request(request, &blk) def execute(request) url, deadline = request.fetch_values(:url, :deadline) + req = nil eof = false finished = false closing = nil + + # rubocop:disable Metrics/BlockLength enum = Enumerator.new do |y| with_pool(url, deadline: deadline) do |conn| next if finished @@ -155,8 +158,11 @@ def execute(request) end end rescue Timeout::Error - raise OpenAI::Errors::APITimeoutError + raise OpenAI::Errors::APITimeoutError.new(url: url, request: req) + rescue StandardError + raise OpenAI::Errors::APIConnectionError.new(url: url, request: req) end + # rubocop:enable Metrics/BlockLength conn, _, response = enum.next body = OpenAI::Internal::Util.fused_enum(enum, external: true) do From 8cd51a225972eb42e47d75ed6a20cd85020028ac Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Apr 2025 23:04:31 +0000 Subject: [PATCH 119/295] chore: loosen const and integer coercion rules (#121) --- lib/openai/internal/type/converter.rb | 24 +++++++++++++------- test/openai/internal/type/base_model_test.rb | 10 ++++---- 2 files changed, 21 insertions(+), 13 deletions(-) diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index f7f8ed08..f6c74199 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -149,9 +149,9 @@ def coerce( if value.is_a?(Integer) exactness[:yes] += 1 return value - elsif strictness == :strong + elsif strictness == :strong && Integer(value, exception: false) != value message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) + raise value.is_a?(Numeric) ? ArgumentError.new(message) : TypeError.new(message) else Kernel.then do return Integer(value).tap { exactness[:maybe] += 1 } @@ -197,12 +197,20 @@ def coerce( else end in Symbol - if (value.is_a?(Symbol) || value.is_a?(String)) && value.to_sym == target - exactness[:yes] += 1 - return target - elsif strictness == :strong - message = "cannot convert non-matching #{value.class} into #{target.inspect}" - raise ArgumentError.new(message) + case value + in Symbol | String + if value.to_sym == target + exactness[:yes] += 1 + return target + else + exactness[:maybe] += 1 + return value + end + else + if strictness == :strong + message = "cannot convert non-matching #{value.class} into #{target.inspect}" + raise ArgumentError.new(message) + end end else end diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index 89816cf8..ee8224df 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -111,7 +111,6 @@ def test_coerce_errors [Integer, "one"] => TypeError, [Float, "one"] => TypeError, [String, Time] => TypeError, - [:a, "one"] => ArgumentError, [Date, "one"] => ArgumentError, [Time, "one"] => ArgumentError } @@ -346,7 +345,7 @@ def test_coerce [M2, {a: "1990-09-19", c: nil}] => [{yes: 2, maybe: 2}, {a: "1990-09-19", c: nil}], [M3, {c: "c", d: "d"}] => [{yes: 3}, {c: :c, d: :d}], - [M3, {c: "d", d: "c"}] => [{yes: 1, no: 2}, {c: "d", d: "c"}], + [M3, {c: "d", d: "c"}] => [{yes: 1, maybe: 2}, {c: "d", d: "c"}], [M4, {c: 2}] => [{yes: 5}, {c: 2}], [M4, {a: "1", c: 2}] => [{yes: 4, maybe: 1}, {a: "1", c: 2}], @@ -404,7 +403,8 @@ def test_accessors cases = { M2.new({a: "1990-09-19", b: "1"}) => {a: Time.new(1990, 9, 19), b: TypeError}, M2.new(a: "one", b: "one") => {a: ArgumentError, b: TypeError}, - M2.new(a: nil, b: 2.0) => {a: TypeError, b: TypeError}, + M2.new(a: nil, b: 2.0) => {a: TypeError}, + M2.new(a: nil, b: 2.2) => {a: TypeError, b: ArgumentError}, M3.new => {d: :d}, M3.new(d: 1) => {d: ArgumentError}, @@ -520,8 +520,8 @@ def test_coerce [U0, :""] => [{no: 1}, 0, :""], [U1, "a"] => [{yes: 1}, 1, :a], - [U1, "2"] => [{maybe: 1}, 2, 2], - [U1, :b] => [{no: 1}, 2, :b], + [U1, "2"] => [{maybe: 1}, 2, "2"], + [U1, :b] => [{maybe: 1}, 2, :b], [U2, {type: :a}] => [{yes: 3}, 0, {t: :a}], [U2, {type: "b"}] => [{yes: 3}, 0, {type: :b}], From d50492dbd9b4ad92b6fd877908fea047a4a4fb25 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 04:19:05 +0000 Subject: [PATCH 120/295] chore: ensure readme.md is bumped when release please updates versions (#122) --- release-please-config.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/release-please-config.json b/release-please-config.json index 29361af6..f8c36cfe 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -60,5 +60,8 @@ } ], "release-type": "ruby", - "version-file": "lib/openai/version.rb" + "version-file": "lib/openai/version.rb", + "extra-files": [ + "README.md" + ] } \ No newline at end of file From 12ebc9cce7b52b94113ec678e211347f2cff6af0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 09:27:17 +0000 Subject: [PATCH 121/295] chore: workaround build errors --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 037cba0b..a58c4247 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-44b20fa9d24544217fe6bb48852037537030a1ad29b202936425110744fe66fb.yml openapi_spec_hash: ea86343b5e9858a74e85da8ab2c532f6 -config_hash: 69e3afd56ccb0f0f822a7a9dc130fc99 +config_hash: 5ea32de61ff42fcf5e66cff8d9e247ea From aa8f14fa7ded71f18fef96139bc2eba2c93d9860 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 14:40:06 +0000 Subject: [PATCH 122/295] fix(internal): update release-please to use ruby strategy for README.md (#123) --- release-please-config.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/release-please-config.json b/release-please-config.json index f8c36cfe..847228c1 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -62,6 +62,9 @@ "release-type": "ruby", "version-file": "lib/openai/version.rb", "extra-files": [ - "README.md" + { + "type": "ruby-readme", + "path": "README.md" + } ] } \ No newline at end of file From 9ec68a36dbd2675bc7921dc2e2f140aa53a4394e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 16:32:33 +0000 Subject: [PATCH 123/295] chore(internal): expand CI branch coverage (#124) --- .github/workflows/ci.yml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6992080f..1a75c727 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,18 +1,18 @@ name: CI on: push: - branches: - - main - pull_request: - branches: - - main - - next + branches-ignore: + - 'generated' + - 'codegen/**' + - 'integrated/**' + - 'preview-head/**' + - 'preview-base/**' + - 'preview/**' jobs: lint: name: lint runs-on: ubuntu-latest - steps: - uses: actions/checkout@v4 @@ -29,7 +29,6 @@ jobs: test: name: test runs-on: ubuntu-latest - steps: - uses: actions/checkout@v4 - name: Set up Ruby From 2c8a77fb024ac1a49a358527c73d41b30cc2f7b7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 16:51:08 +0000 Subject: [PATCH 124/295] chore: fix readme typo (#125) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8171b0e9..00a3aefb 100644 --- a/README.md +++ b/README.md @@ -86,7 +86,7 @@ stream.each do |completion| end ``` -## File uploads +### File uploads Request parameters that correspond to file uploads can be passed as `StringIO`, or a [`Pathname`](https://rubyapi.org/3.1/o/pathname) instance. From c9fd6c693273139fe8f2681f543ab559887b918e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 9 Apr 2025 21:09:32 +0000 Subject: [PATCH 125/295] chore(internal): reduce CI branch coverage --- .github/workflows/ci.yml | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1a75c727..b2c0c8f7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,19 +1,17 @@ name: CI on: push: - branches-ignore: - - 'generated' - - 'codegen/**' - - 'integrated/**' - - 'preview-head/**' - - 'preview-base/**' - - 'preview/**' + branches: + - main + pull_request: + branches: + - main + - next jobs: lint: name: lint runs-on: ubuntu-latest - steps: - uses: actions/checkout@v4 - name: Set up Ruby From 4e65200da1430c692caaeae3982badbe58f2286c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 11 Apr 2025 17:48:01 +0000 Subject: [PATCH 126/295] fix: inaccuracies in the README.md --- README.md | 56 +++++++++++++++++++++----------------------------- openai.gemspec | 2 +- 2 files changed, 24 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index 00a3aefb..60abfc28 100644 --- a/README.md +++ b/README.md @@ -20,12 +20,6 @@ gem "openai", "~> 0.1.0.pre.alpha.3" -To fetch an initial copy of the gem: - -```sh -bundle install -``` - ## Usage ```ruby @@ -36,14 +30,13 @@ openai = OpenAI::Client.new( api_key: "My API Key" # defaults to ENV["OPENAI_API_KEY"] ) -chat_completion = - openai.chat.completions.create( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" - ) +chat_completion = openai.chat.completions.create( + messages: [{ + role: "user", + content: "Say this is a test" + }], + model: "gpt-4o" +) puts(chat_completion) ``` @@ -72,14 +65,13 @@ end We provide support for streaming responses using Server Side Events (SSE). ```ruby -stream = - openai.chat.completions.stream_raw( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" - ) +stream = openai.chat.completions.stream_raw( + messages: [{ + role: "user", + content: "Say this is a test" + }], + model: "gpt-4o" +) stream.each do |completion| puts(completion) @@ -105,7 +97,7 @@ puts(file_object.id) ### Errors -When the library is unable to connect to the API, or if the API returns a non-success status code (i.e., 4xx or 5xx response), a subclass of `OpenAI::Error` will be thrown: +When the library is unable to connect to the API, or if the API returns a non-success status code (i.e., 4xx or 5xx response), a subclass of `OpenAI::Errors::APIError` will be thrown: ```ruby begin @@ -217,14 +209,13 @@ Due to limitations with the Sorbet type system, where a method otherwise can tak Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. ```ruby -params = - OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" - ) +params = OpenAI::Models::Chat::CompletionCreateParams.new( + messages: [{ + role: "user", + content: "Say this is a test" + }], + model: "gpt-4o" +) openai.chat.completions.create(**params) ``` @@ -252,8 +243,7 @@ If you want to explicitly send an extra param, you can do so with the `extra_que To make requests to undocumented endpoints, you can make requests using `client.request`. Options on the client will be respected (such as retries) when making this request. ```ruby -response = - client.request( +response = client.request( method: :post, path: '/undocumented/endpoint', query: {"dog": "woof"}, diff --git a/openai.gemspec b/openai.gemspec index 60a53a67..1171bdbe 100644 --- a/openai.gemspec +++ b/openai.gemspec @@ -8,7 +8,7 @@ Gem::Specification.new do |s| s.summary = "Ruby library to access the OpenAI API" s.authors = ["OpenAI"] s.email = "support@openai.com" - s.files = Dir["lib/**/*.rb", "rbi/**/*.rbi", "sig/**/*.rbs", "manifest.yaml"] + s.files = Dir["lib/**/*.rb", "rbi/**/*.rbi", "sig/**/*.rbs", "manifest.yaml", "CHANGELOG.md", "SECURITY.md"] s.extra_rdoc_files = ["README.md"] s.required_ruby_version = ">= 3.0.0" s.add_dependency "connection_pool" From e5a3514eb83653980beda253966c8f3702417df8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 13 Apr 2025 06:38:36 +0000 Subject: [PATCH 127/295] chore: make internal types pretty print --- lib/openai/internal.rb | 6 ++- lib/openai/internal/cursor_page.rb | 13 +++++-- lib/openai/internal/page.rb | 13 +++++-- lib/openai/internal/transport/base_client.rb | 2 + lib/openai/internal/type/array_of.rb | 15 +++++++- lib/openai/internal/type/base_model.rb | 38 ++++++++++++++++--- lib/openai/internal/type/base_page.rb | 1 + lib/openai/internal/type/base_stream.rb | 9 +++++ lib/openai/internal/type/boolean.rb | 2 + lib/openai/internal/type/converter.rb | 24 ++++++++++++ lib/openai/internal/type/enum.rb | 13 +++++++ lib/openai/internal/type/hash_of.rb | 15 +++++++- lib/openai/internal/type/io_like.rb | 2 + lib/openai/internal/type/union.rb | 13 +++++++ lib/openai/internal/type/unknown.rb | 2 + rbi/lib/openai/internal.rbi | 2 +- rbi/lib/openai/internal/cursor_page.rbi | 1 + rbi/lib/openai/internal/page.rbi | 1 + .../openai/internal/transport/base_client.rbi | 1 + rbi/lib/openai/internal/type/array_of.rbi | 4 ++ rbi/lib/openai/internal/type/base_model.rbi | 7 ++++ rbi/lib/openai/internal/type/base_stream.rbi | 4 ++ rbi/lib/openai/internal/type/converter.rbi | 8 ++++ rbi/lib/openai/internal/type/enum.rbi | 4 ++ rbi/lib/openai/internal/type/hash_of.rbi | 4 ++ rbi/lib/openai/internal/type/union.rbi | 4 ++ sig/openai/internal.rbs | 2 +- sig/openai/internal/type/array_of.rbs | 2 + sig/openai/internal/type/base_model.rbs | 2 + sig/openai/internal/type/base_stream.rbs | 2 + sig/openai/internal/type/converter.rbs | 4 ++ sig/openai/internal/type/enum.rbs | 2 + sig/openai/internal/type/hash_of.rbs | 2 + sig/openai/internal/type/union.rbs | 2 + 34 files changed, 208 insertions(+), 18 deletions(-) diff --git a/lib/openai/internal.rb b/lib/openai/internal.rb index 42050499..32bc2cf5 100644 --- a/lib/openai/internal.rb +++ b/lib/openai/internal.rb @@ -3,6 +3,10 @@ module OpenAI # @api private module Internal - OMIT = Object.new.freeze + OMIT = + Object.new.tap do + _1.define_singleton_method(:inspect) { "#<#{OpenAI::Internal}::OMIT>" } + end + .freeze end end diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb index a56921db..f6c96f58 100644 --- a/lib/openai/internal/cursor_page.rb +++ b/lib/openai/internal/cursor_page.rb @@ -30,11 +30,10 @@ class CursorPage # @param page_data [Hash{Symbol=>Object}] def initialize(client:, req:, headers:, page_data:) super - model = req.fetch(:model) case page_data in {data: Array | nil => data} - @data = data&.map { OpenAI::Internal::Type::Converter.coerce(model, _1) } + @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } else end @@ -69,17 +68,23 @@ def auto_paging_each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") end + page = self loop do - page.data&.each { blk.call(_1) } + page.data&.each(&blk) + break unless page.next_page? page = page.next_page end end + # @api private + # # @return [String] def inspect - "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} has_more=#{has_more.inspect}>" + model = OpenAI::Internal::Type::Converter.inspect(@model, depth: 1) + + "#<#{self.class}[#{model}]:0x#{object_id.to_s(16)} has_more=#{has_more.inspect}>" end end end diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb index 368ffd45..9b77383d 100644 --- a/lib/openai/internal/page.rb +++ b/lib/openai/internal/page.rb @@ -30,11 +30,10 @@ class Page # @param page_data [Array] def initialize(client:, req:, headers:, page_data:) super - model = req.fetch(:model) case page_data in {data: Array | nil => data} - @data = data&.map { OpenAI::Internal::Type::Converter.coerce(model, _1) } + @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } else end @@ -63,17 +62,23 @@ def auto_paging_each(&blk) unless block_given? raise ArgumentError.new("A block must be given to ##{__method__}") end + page = self loop do - page.data&.each { blk.call(_1) } + page.data&.each(&blk) + break unless page.next_page? page = page.next_page end end + # @api private + # # @return [String] def inspect - "#<#{self.class}:0x#{object_id.to_s(16)} data=#{data.inspect} object=#{object.inspect}>" + model = OpenAI::Internal::Type::Converter.inspect(@model, depth: 1) + + "#<#{self.class}[#{model}]:0x#{object_id.to_s(16)} object=#{object.inspect}>" end end end diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 0489a1c8..d0c72a8d 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -460,6 +460,8 @@ def request(req) end end + # @api private + # # @return [String] def inspect # rubocop:disable Layout/LineLength diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 89cec2be..a1147fa6 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -13,6 +13,8 @@ module Type class ArrayOf include OpenAI::Internal::Type::Converter + private_class_method :new + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] # # @param spec [Hash{Symbol=>Object}] . @@ -120,7 +122,18 @@ def dump(value, state:) # @option spec [Boolean] :"nil?" def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Internal::Type::Converter.type_info(type_info || spec) - @nilable = spec[:nil?] + @nilable = spec.fetch(:nil?, false) + end + + # @api private + # + # @param depth [Integer] + # + # @return [String] + def inspect(depth: 0) + # rubocop:disable Layout/LineLength + "#{self.class}[#{[OpenAI::Internal::Type::Converter.inspect(item_type, depth: depth.succ), nilable? ? 'nil' : nil].compact.join(' | ')}]" + # rubocop:enable Layout/LineLength end end end diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 1a180ea8..efc555b7 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -63,7 +63,7 @@ def fields setter = "#{name_sym}=" api_name = info.fetch(:api_name, name_sym) - nilable = info[:nil?] + nilable = info.fetch(:nil?, false) const = required && !nilable ? info.fetch(:const, OpenAI::Internal::OMIT) : OpenAI::Internal::OMIT [name_sym, setter].each { undef_method(_1) } if known_fields.key?(name_sym) @@ -361,14 +361,42 @@ def initialize(data = {}) end end + class << self + # @api private + # + # @param depth [Integer] + # + # @return [String] + def inspect(depth: 0) + return super() if depth.positive? + + depth = depth.succ + deferred = fields.transform_values do |field| + type, required, nilable = field.fetch_values(:type, :required, :nilable) + -> do + [ + OpenAI::Internal::Type::Converter.inspect(type, depth: depth), + !required || nilable ? "nil" : nil + ].compact.join(" | ") + end + .tap { _1.define_singleton_method(:inspect) { call } } + end + + "#{name}[#{deferred.inspect}]" + end + end + + # @api private + # # @return [String] def inspect - rows = self.class.known_fields.keys.map do - "#{_1}=#{@data.key?(_1) ? public_send(_1) : ''}" + rows = @data.map do + "#{_1}=#{self.class.known_fields.key?(_1) ? public_send(_1).inspect : ''}" rescue OpenAI::Errors::ConversionError - "#{_1}=#{@data.fetch(_1)}" + "#{_1}=#{_2.inspect}" end - "#<#{self.class.name}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" + + "#<#{self.class}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" end end end diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb index d4c64531..48c98885 100644 --- a/lib/openai/internal/type/base_page.rb +++ b/lib/openai/internal/type/base_page.rb @@ -36,6 +36,7 @@ def to_enum = super(:auto_paging_each) def initialize(client:, req:, headers:, page_data:) @client = client @req = req + @model = req.fetch(:model) super() end diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index 98a8c528..ebcb564c 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -67,6 +67,15 @@ def initialize(model:, url:, status:, response:, stream:) ObjectSpace.define_finalizer(self, OpenAI::Internal::Type::BaseStream.defer_closing(@stream)) end + + # @api private + # + # @return [String] + def inspect + model = OpenAI::Internal::Type::Converter.inspect(@model, depth: 1) + + "#<#{self.class}[#{model}]:0x#{object_id.to_s(16)}>" + end end end end diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index cf12a25a..298979c8 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -11,6 +11,8 @@ module Type class Boolean extend OpenAI::Internal::Type::Converter + private_class_method :new + # @param other [Object] # # @return [Boolean] diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index f6c74199..1cd4385d 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -49,6 +49,15 @@ def dump(value, state:) end end + # @api private + # + # @param depth [Integer] + # + # @return [String] + def inspect(depth: 0) + super() + end + # rubocop:enable Lint/UnusedMethodArgument class << self @@ -240,6 +249,21 @@ def dump(target, value, state: {can_retry: true}) OpenAI::Internal::Type::Unknown.dump(value, state: state) end end + + # @api private + # + # @param target [Object] + # @param depth [Integer] + # + # @return [String] + def inspect(target, depth:) + case target + in OpenAI::Internal::Type::Converter + target.inspect(depth: depth.succ) + else + target.inspect + end + end end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 50aa9467..52fd23c9 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -107,6 +107,19 @@ def coerce(value, state:) # # # # @return [Symbol, Object] # def dump(value, state:) = super + + # @api private + # + # @param depth [Integer] + # + # @return [String] + def inspect(depth: 0) + # rubocop:disable Layout/LineLength + return super() if depth.positive? + + "#{name}[#{values.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) }.join(' | ')}]" + # rubocop:enable Layout/LineLength + end end end end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index d88b05e6..6dfb501e 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -13,6 +13,8 @@ module Type class HashOf include OpenAI::Internal::Type::Converter + private_class_method :new + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] # # @param spec [Hash{Symbol=>Object}] . @@ -140,7 +142,18 @@ def dump(value, state:) # @option spec [Boolean] :"nil?" def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Internal::Type::Converter.type_info(type_info || spec) - @nilable = spec[:nil?] + @nilable = spec.fetch(:nil?, false) + end + + # @api private + # + # @param depth [Integer] + # + # @return [String] + def inspect(depth: 0) + # rubocop:disable Layout/LineLength + "#{self.class}[#{[OpenAI::Internal::Type::Converter.inspect(item_type, depth: depth.succ), nilable? ? 'nil' : nil].compact.join(' | ')}]" + # rubocop:enable Layout/LineLength end end end diff --git a/lib/openai/internal/type/io_like.rb b/lib/openai/internal/type/io_like.rb index 5147b4e3..43aba589 100644 --- a/lib/openai/internal/type/io_like.rb +++ b/lib/openai/internal/type/io_like.rb @@ -11,6 +11,8 @@ module Type class IOLike extend OpenAI::Internal::Type::Converter + private_class_method :new + # @param other [Object] # # @return [Boolean] diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index ac66dc5f..68150bb4 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -210,6 +210,19 @@ def dump(value, state:) # rubocop:enable Style/CaseEquality # rubocop:enable Style/HashEachMethods + + # @api private + # + # @param depth [Integer] + # + # @return [String] + def inspect(depth: 0) + # rubocop:disable Layout/LineLength + return super() if depth.positive? + + "#{name}[#{variants.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) }.join(' | ')}]" + # rubocop:enable Layout/LineLength + end end end end diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index 1c73e2f9..36556ac1 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -13,6 +13,8 @@ class Unknown # rubocop:disable Lint/UnusedMethodArgument + private_class_method :new + # @param other [Object] # # @return [Boolean] diff --git a/rbi/lib/openai/internal.rbi b/rbi/lib/openai/internal.rbi index 76548034..05388159 100644 --- a/rbi/lib/openai/internal.rbi +++ b/rbi/lib/openai/internal.rbi @@ -7,6 +7,6 @@ module OpenAI # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } - OMIT = T.let(T.anything, T.anything) + OMIT = T.let(Object.new.freeze, T.anything) end end diff --git a/rbi/lib/openai/internal/cursor_page.rbi b/rbi/lib/openai/internal/cursor_page.rbi index 80b18955..7bc68531 100644 --- a/rbi/lib/openai/internal/cursor_page.rbi +++ b/rbi/lib/openai/internal/cursor_page.rbi @@ -13,6 +13,7 @@ module OpenAI sig { returns(T::Boolean) } attr_accessor :has_more + # @api private sig { returns(String) } def inspect; end end diff --git a/rbi/lib/openai/internal/page.rbi b/rbi/lib/openai/internal/page.rbi index 14d7ba40..eda6d887 100644 --- a/rbi/lib/openai/internal/page.rbi +++ b/rbi/lib/openai/internal/page.rbi @@ -13,6 +13,7 @@ module OpenAI sig { returns(String) } attr_accessor :object + # @api private sig { returns(String) } def inspect; end end diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index 5653189b..18a8ea43 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -196,6 +196,7 @@ module OpenAI model: OpenAI::Internal::Type::Unknown, options: {} ); end + # @api private sig { returns(String) } def inspect; end end diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index ef39a4c7..e6f84632 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -76,6 +76,10 @@ module OpenAI .void end def initialize(type_info, spec = {}); end + + # @api private + sig(:final) { params(depth: Integer).returns(String) } + def inspect(depth: 0); end end end end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 2d0c0bfc..23fbb5a9 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -185,6 +185,13 @@ module OpenAI sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } def self.new(data = {}); end + class << self + # @api private + sig { params(depth: Integer).returns(String) } + def inspect(depth: 0); end + end + + # @api private sig { returns(String) } def inspect; end end diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/lib/openai/internal/type/base_stream.rbi index 81f3a71c..8dcf7413 100644 --- a/rbi/lib/openai/internal/type/base_stream.rbi +++ b/rbi/lib/openai/internal/type/base_stream.rbi @@ -45,6 +45,10 @@ module OpenAI .void end def initialize(model:, url:, status:, response:, stream:); end + + # @api private + sig { returns(String) } + def inspect; end end end end diff --git a/rbi/lib/openai/internal/type/converter.rbi b/rbi/lib/openai/internal/type/converter.rbi index e03e1c2e..47dace26 100644 --- a/rbi/lib/openai/internal/type/converter.rbi +++ b/rbi/lib/openai/internal/type/converter.rbi @@ -34,6 +34,10 @@ module OpenAI end def dump(value, state:); end + # @api private + sig { params(depth: Integer).returns(String) } + def inspect(depth: 0); end + class << self # @api private sig do @@ -105,6 +109,10 @@ module OpenAI .returns(T.anything) end def self.dump(target, value, state: {can_retry: true}); end + + # @api private + sig { params(target: T.anything, depth: Integer).returns(String) } + def self.inspect(target, depth:); end end end end diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index 4fcc4b5c..a85d1768 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -57,6 +57,10 @@ module OpenAI .returns(T.any(Symbol, T.anything)) end def dump(value, state:); end + + # @api private + sig { params(depth: Integer).returns(String) } + def inspect(depth: 0); end end end end diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index 18914409..93b3b4fa 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -76,6 +76,10 @@ module OpenAI .void end def initialize(type_info, spec = {}); end + + # @api private + sig(:final) { params(depth: Integer).returns(String) } + def inspect(depth: 0); end end end end diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi index e2fb7582..2e47dfe1 100644 --- a/rbi/lib/openai/internal/type/union.rbi +++ b/rbi/lib/openai/internal/type/union.rbi @@ -63,6 +63,10 @@ module OpenAI ).returns(T.anything) end def dump(value, state:); end + + # @api private + sig { params(depth: Integer).returns(String) } + def inspect(depth: 0); end end end end diff --git a/sig/openai/internal.rbs b/sig/openai/internal.rbs index 1da6dd75..cd18cd99 100644 --- a/sig/openai/internal.rbs +++ b/sig/openai/internal.rbs @@ -1,5 +1,5 @@ module OpenAI module Internal - OMIT: top + OMIT: Object end end diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 70d49013..0489e6e2 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -35,6 +35,8 @@ module OpenAI | OpenAI::Internal::Type::Converter::input type_info, ?::Hash[Symbol, top] spec ) -> void + + def inspect: (?depth: Integer) -> String end end end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 7d3c77f7..f6bfeb6a 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -75,6 +75,8 @@ module OpenAI def initialize: (?::Hash[Symbol, top] | self data) -> void + def self.inspect: (?depth: Integer) -> String + def inspect: -> String end end diff --git a/sig/openai/internal/type/base_stream.rbs b/sig/openai/internal/type/base_stream.rbs index b526bcbc..d43b91c2 100644 --- a/sig/openai/internal/type/base_stream.rbs +++ b/sig/openai/internal/type/base_stream.rbs @@ -25,6 +25,8 @@ module OpenAI response: top, stream: Enumerable[Message] ) -> void + + def inspect: -> String end end end diff --git a/sig/openai/internal/type/converter.rbs b/sig/openai/internal/type/converter.rbs index 6c0c1434..0e4c56aa 100644 --- a/sig/openai/internal/type/converter.rbs +++ b/sig/openai/internal/type/converter.rbs @@ -23,6 +23,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> top + def inspect: (?depth: Integer) -> String + def self.type_info: ( { const: (nil | bool | Integer | Float | Symbol)?, @@ -44,6 +46,8 @@ module OpenAI top value, ?state: OpenAI::Internal::Type::Converter::dump_state ) -> top + + def self.inspect: (top target, depth: Integer) -> String end end end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index e001bcd3..5f707303 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -21,6 +21,8 @@ module OpenAI Symbol | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (Symbol | top) + + def inspect: (?depth: Integer) -> String end end end diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index 18eb149f..e23bc0c3 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -35,6 +35,8 @@ module OpenAI | OpenAI::Internal::Type::Converter::input type_info, ?::Hash[Symbol, top] spec ) -> void + + def inspect: (?depth: Integer) -> String end end end diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 9f1063f3..8b58f419 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -39,6 +39,8 @@ module OpenAI top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> top + + def inspect: (?depth: Integer) -> String end end end From 6f0621c0ff765324fcfac9d896c84722ee7a519c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 13 Apr 2025 08:45:19 +0000 Subject: [PATCH 128/295] chore(internal): minor touch ups on sdk internals --- lib/openai/internal/cursor_page.rb | 39 +++++++++----------- lib/openai/internal/page.rb | 39 +++++++++----------- lib/openai/internal/transport/base_client.rb | 14 +++++-- lib/openai/internal/type/array_of.rb | 10 +++-- lib/openai/internal/type/enum.rb | 6 +-- lib/openai/internal/type/hash_of.rb | 10 +++-- lib/openai/internal/type/union.rb | 6 +-- 7 files changed, 63 insertions(+), 61 deletions(-) diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb index f6c96f58..eb661553 100644 --- a/lib/openai/internal/cursor_page.rb +++ b/lib/openai/internal/cursor_page.rb @@ -22,28 +22,6 @@ class CursorPage # @return [Boolean] attr_accessor :has_more - # @api private - # - # @param client [OpenAI::Internal::Transport::BaseClient] - # @param req [Hash{Symbol=>Object}] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param page_data [Hash{Symbol=>Object}] - def initialize(client:, req:, headers:, page_data:) - super - - case page_data - in {data: Array | nil => data} - @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } - else - end - - case page_data - in {has_more: true | false | nil => has_more} - @has_more = has_more - else - end - end - # @return [Boolean] def next_page? has_more @@ -78,6 +56,23 @@ def auto_paging_each(&blk) end end + # @api private + # + # @param client [OpenAI::Internal::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Hash{Symbol=>Object}] + def initialize(client:, req:, headers:, page_data:) + super + + case page_data + in {data: Array | nil => data} + @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } + else + end + @has_more = page_data[:has_more] + end + # @api private # # @return [String] diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb index 9b77383d..7df85077 100644 --- a/lib/openai/internal/page.rb +++ b/lib/openai/internal/page.rb @@ -22,28 +22,6 @@ class Page # @return [String] attr_accessor :object - # @api private - # - # @param client [OpenAI::Internal::Transport::BaseClient] - # @param req [Hash{Symbol=>Object}] - # @param headers [Hash{String=>String}, Net::HTTPHeader] - # @param page_data [Array] - def initialize(client:, req:, headers:, page_data:) - super - - case page_data - in {data: Array | nil => data} - @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } - else - end - - case page_data - in {object: String => object} - @object = object - else - end - end - # @return [Boolean] def next_page? false @@ -72,6 +50,23 @@ def auto_paging_each(&blk) end end + # @api private + # + # @param client [OpenAI::Internal::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Array] + def initialize(client:, req:, headers:, page_data:) + super + + case page_data + in {data: Array | nil => data} + @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } + else + end + @object = page_data[:object] + end + # @api private # # @return [String] diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index d0c72a8d..24d1f21d 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -93,7 +93,11 @@ def follow_redirect(request, status:, response_headers:) URI.join(url, response_headers["location"]) rescue ArgumentError message = "Server responded with status #{status} but no valid location header." - raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) + raise OpenAI::Errors::APIConnectionError.new( + url: url, + response: response_headers, + message: message + ) end request = {**request, url: location} @@ -101,7 +105,11 @@ def follow_redirect(request, status:, response_headers:) case [url.scheme, location.scheme] in ["https", "http"] message = "Tried to redirect to a insecure URL" - raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) + raise OpenAI::Errors::APIConnectionError.new( + url: url, + response: response_headers, + message: message + ) else nil end @@ -350,7 +358,7 @@ def initialize( self.class.reap_connection!(status, stream: stream) message = "Failed to complete the request within #{self.class::MAX_REDIRECTS} redirects." - raise OpenAI::Errors::APIConnectionError.new(url: url, message: message) + raise OpenAI::Errors::APIConnectionError.new(url: url, response: response, message: message) in 300..399 self.class.reap_connection!(status, stream: stream) diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index a1147fa6..84109d94 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -15,6 +15,8 @@ class ArrayOf private_class_method :new + # @overload [](type_info, spec = {}) + # # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] # # @param spec [Hash{Symbol=>Object}] . @@ -26,7 +28,7 @@ class ArrayOf # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - def self.[](type_info, spec = {}) = new(type_info, spec) + def self.[](...) = new(...) # @param other [Object] # @@ -131,9 +133,9 @@ def initialize(type_info, spec = {}) # # @return [String] def inspect(depth: 0) - # rubocop:disable Layout/LineLength - "#{self.class}[#{[OpenAI::Internal::Type::Converter.inspect(item_type, depth: depth.succ), nilable? ? 'nil' : nil].compact.join(' | ')}]" - # rubocop:enable Layout/LineLength + items = OpenAI::Internal::Type::Converter.inspect(item_type, depth: depth.succ) + + "#{self.class}[#{[items, nilable? ? 'nil' : nil].compact.join(' | ')}]" end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 52fd23c9..81aa58a2 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -114,11 +114,11 @@ def coerce(value, state:) # # @return [String] def inspect(depth: 0) - # rubocop:disable Layout/LineLength return super() if depth.positive? - "#{name}[#{values.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) }.join(' | ')}]" - # rubocop:enable Layout/LineLength + members = values.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) } + + "#{name}[#{members.join(' | ')}]" end end end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 6dfb501e..5a6d6304 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -15,6 +15,8 @@ class HashOf private_class_method :new + # @overload [](type_info, spec = {}) + # # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] # # @param spec [Hash{Symbol=>Object}] . @@ -26,7 +28,7 @@ class HashOf # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" - def self.[](type_info, spec = {}) = new(type_info, spec) + def self.[](...) = new(...) # @param other [Object] # @@ -151,9 +153,9 @@ def initialize(type_info, spec = {}) # # @return [String] def inspect(depth: 0) - # rubocop:disable Layout/LineLength - "#{self.class}[#{[OpenAI::Internal::Type::Converter.inspect(item_type, depth: depth.succ), nilable? ? 'nil' : nil].compact.join(' | ')}]" - # rubocop:enable Layout/LineLength + items = OpenAI::Internal::Type::Converter.inspect(item_type, depth: depth.succ) + + "#{self.class}[#{[items, nilable? ? 'nil' : nil].compact.join(' | ')}]" end end end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 68150bb4..d4ed09a6 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -217,11 +217,11 @@ def dump(value, state:) # # @return [String] def inspect(depth: 0) - # rubocop:disable Layout/LineLength return super() if depth.positive? - "#{name}[#{variants.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) }.join(' | ')}]" - # rubocop:enable Layout/LineLength + members = variants.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) } + + "#{name}[#{members.join(' | ')}]" end end end From 15588236961061fef679db5f03104102f4999d17 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 13:01:02 +0000 Subject: [PATCH 129/295] chore(internal): loosen internal type restrictions --- lib/openai/internal/type/enum.rb | 6 +-- lib/openai/internal/type/union.rb | 4 +- rbi/lib/openai/internal/type/array_of.rbi | 19 ++++--- rbi/lib/openai/internal/type/boolean.rbi | 9 ++-- rbi/lib/openai/internal/type/hash_of.rbi | 19 ++++--- rbi/lib/openai/internal/type/io_like.rbi | 9 ++-- rbi/lib/openai/internal/type/unknown.rbi | 9 ++-- test/openai/internal/type/base_model_test.rb | 54 +++++++++++++++++++- 8 files changed, 86 insertions(+), 43 deletions(-) diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 81aa58a2..f4885fb5 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -62,9 +62,9 @@ def ===(other) = values.include?(other) # # @return [Boolean] def ==(other) - # rubocop:disable Layout/LineLength - other.is_a?(Module) && other.singleton_class <= OpenAI::Internal::Type::Enum && other.values.to_set == values.to_set - # rubocop:enable Layout/LineLength + # rubocop:disable Style/CaseEquality + OpenAI::Internal::Type::Enum === other && other.values.to_set == values.to_set + # rubocop:enable Style/CaseEquality end # @api private diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index d4ed09a6..70c9a115 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -125,9 +125,7 @@ def ===(other) # # @return [Boolean] def ==(other) - # rubocop:disable Layout/LineLength - other.is_a?(Module) && other.singleton_class <= OpenAI::Internal::Type::Union && other.derefed_variants == derefed_variants - # rubocop:enable Layout/LineLength + OpenAI::Internal::Type::Union === other && other.derefed_variants == derefed_variants end # @api private diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index e6f84632..8af5fd34 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -10,11 +10,10 @@ module OpenAI include OpenAI::Internal::Type::Converter abstract! - final! Elem = type_member(:out) - sig(:final) do + sig do params( type_info: T.any( OpenAI::Internal::AnyHash, @@ -27,14 +26,14 @@ module OpenAI end def self.[](type_info, spec = {}); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def ===(other); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end # @api private - sig(:final) do + sig do override .params( value: T.any(T::Array[T.anything], T.anything), @@ -45,7 +44,7 @@ module OpenAI def coerce(value, state:); end # @api private - sig(:final) do + sig do override .params( value: T.any(T::Array[T.anything], T.anything), @@ -56,15 +55,15 @@ module OpenAI def dump(value, state:); end # @api private - sig(:final) { returns(Elem) } + sig { returns(Elem) } protected def item_type; end # @api private - sig(:final) { returns(T::Boolean) } + sig { returns(T::Boolean) } protected def nilable?; end # @api private - sig(:final) do + sig do params( type_info: T.any( OpenAI::Internal::AnyHash, @@ -78,7 +77,7 @@ module OpenAI def initialize(type_info, spec = {}); end # @api private - sig(:final) { params(depth: Integer).returns(String) } + sig { params(depth: Integer).returns(String) } def inspect(depth: 0); end end end diff --git a/rbi/lib/openai/internal/type/boolean.rbi b/rbi/lib/openai/internal/type/boolean.rbi index 74064c19..1cecc37d 100644 --- a/rbi/lib/openai/internal/type/boolean.rbi +++ b/rbi/lib/openai/internal/type/boolean.rbi @@ -10,17 +10,16 @@ module OpenAI extend OpenAI::Internal::Type::Converter abstract! - final! - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def self.===(other); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def self.==(other); end class << self # @api private - sig(:final) do + sig do override .params(value: T.any( T::Boolean, @@ -32,7 +31,7 @@ module OpenAI def coerce(value, state:); end # @api private - sig(:final) do + sig do override .params(value: T.any( T::Boolean, diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index 93b3b4fa..4edc379f 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -10,11 +10,10 @@ module OpenAI include OpenAI::Internal::Type::Converter abstract! - final! Elem = type_member(:out) - sig(:final) do + sig do params( type_info: T.any( OpenAI::Internal::AnyHash, @@ -27,14 +26,14 @@ module OpenAI end def self.[](type_info, spec = {}); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def ===(other); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end # @api private - sig(:final) do + sig do override .params( value: T.any(T::Hash[T.anything, T.anything], T.anything), @@ -45,7 +44,7 @@ module OpenAI def coerce(value, state:); end # @api private - sig(:final) do + sig do override .params( value: T.any(T::Hash[T.anything, T.anything], T.anything), @@ -56,15 +55,15 @@ module OpenAI def dump(value, state:); end # @api private - sig(:final) { returns(Elem) } + sig { returns(Elem) } protected def item_type; end # @api private - sig(:final) { returns(T::Boolean) } + sig { returns(T::Boolean) } protected def nilable?; end # @api private - sig(:final) do + sig do params( type_info: T.any( OpenAI::Internal::AnyHash, @@ -78,7 +77,7 @@ module OpenAI def initialize(type_info, spec = {}); end # @api private - sig(:final) { params(depth: Integer).returns(String) } + sig { params(depth: Integer).returns(String) } def inspect(depth: 0); end end end diff --git a/rbi/lib/openai/internal/type/io_like.rbi b/rbi/lib/openai/internal/type/io_like.rbi index d6c86399..321a5563 100644 --- a/rbi/lib/openai/internal/type/io_like.rbi +++ b/rbi/lib/openai/internal/type/io_like.rbi @@ -10,17 +10,16 @@ module OpenAI extend OpenAI::Internal::Type::Converter abstract! - final! - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def self.===(other); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def self.==(other); end class << self # @api private - sig(:final) do + sig do override .params(value: T.any( StringIO, @@ -33,7 +32,7 @@ module OpenAI def coerce(value, state:); end # @api private - sig(:final) do + sig do override .params( value: T.any(Pathname, StringIO, IO, String, T.anything), diff --git a/rbi/lib/openai/internal/type/unknown.rbi b/rbi/lib/openai/internal/type/unknown.rbi index 8865c3b1..0128954a 100644 --- a/rbi/lib/openai/internal/type/unknown.rbi +++ b/rbi/lib/openai/internal/type/unknown.rbi @@ -10,17 +10,16 @@ module OpenAI extend OpenAI::Internal::Type::Converter abstract! - final! - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def self.===(other); end - sig(:final) { params(other: T.anything).returns(T::Boolean) } + sig { params(other: T.anything).returns(T::Boolean) } def self.==(other); end class << self # @api private - sig(:final) do + sig do override .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) .returns(T.anything) @@ -28,7 +27,7 @@ module OpenAI def coerce(value, state:); end # @api private - sig(:final) do + sig do override.params( value: T.anything, state: OpenAI::Internal::Type::Converter::DumpState diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index ee8224df..c3b0dbfa 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -154,6 +154,12 @@ def test_dump_retry end class OpenAI::Test::EnumModelTest < Minitest::Test + class E0 + include OpenAI::Internal::Type::Enum + + def initialize(*values) = (@values = values) + end + module E1 extend OpenAI::Internal::Type::Enum @@ -183,6 +189,10 @@ module E4 def test_coerce cases = { + [E0.new, "one"] => [{no: 1}, "one"], + [E0.new(:one), "one"] => [{yes: 1}, :one], + [E0.new(:two), "one"] => [{maybe: 1}, "one"], + # rubocop:disable Lint/BooleanSymbol [E1, true] => [{yes: 1}, true], [E1, false] => [{no: 1}, false], @@ -432,8 +442,10 @@ def test_accessors end class OpenAI::Test::UnionTest < Minitest::Test - module U0 - extend OpenAI::Internal::Type::Union + class U0 + include OpenAI::Internal::Type::Union + + def initialize(*variants) = variants.each { variant(_1) } end module U1 @@ -519,6 +531,11 @@ def test_coerce cases = { [U0, :""] => [{no: 1}, 0, :""], + [U0.new(Integer, Float), "one"] => [{no: 1}, 2, "one"], + [U0.new(Integer, Float), 1.0] => [{yes: 1}, 2, 1.0], + [U0.new({const: :a}), "a"] => [{yes: 1}, 1, :a], + [U0.new({const: :a}), "2"] => [{maybe: 1}, 1, "2"], + [U1, "a"] => [{yes: 1}, 1, :a], [U1, "2"] => [{maybe: 1}, 2, "2"], [U1, :b] => [{maybe: 1}, 2, :b], @@ -556,6 +573,12 @@ def test_coerce end class OpenAI::Test::BaseModelQoLTest < Minitest::Test + class E0 + include OpenAI::Internal::Type::Enum + + def initialize(*values) = (@values = values) + end + module E1 extend OpenAI::Internal::Type::Enum @@ -575,6 +598,26 @@ module E3 B = 3 end + class U0 + include OpenAI::Internal::Type::Union + + def initialize(*variants) = variants.each { variant(_1) } + end + + module U1 + extend OpenAI::Internal::Type::Union + + variant String + variant Integer + end + + module U2 + extend OpenAI::Internal::Type::Union + + variant String + variant Integer + end + class M1 < OpenAI::Internal::Type::BaseModel required :a, Integer end @@ -592,8 +635,15 @@ def test_equality [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::Unknown] => true, [OpenAI::Internal::Type::Boolean, OpenAI::Internal::Type::Boolean] => true, [OpenAI::Internal::Type::Unknown, OpenAI::Internal::Type::Boolean] => false, + [E0.new(:a, :b), E0.new(:a, :b)] => true, + [E0.new(:a, :b), E0.new(:b, :a)] => true, + [E0.new(:a, :b), E0.new(:b, :c)] => false, [E1, E2] => true, [E1, E3] => false, + [U0.new(String, Integer), U0.new(String, Integer)] => true, + [U0.new(String, Integer), U0.new(Integer, String)] => false, + [U0.new(String, Float), U0.new(String, Integer)] => false, + [U1, U2] => true, [M1, M2] => false, [M1, M3] => true } From 9b012b4c706f0489246290ed12478dbf9c04dc53 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 16:41:37 +0000 Subject: [PATCH 130/295] feat(api): adding gpt-4.1 family of model IDs --- .stats.yml | 4 +-- lib/openai/internal/type/enum.rb | 12 ++++---- .../models/beta/assistant_update_params.rb | 18 +++++++++++ lib/openai/models/chat_model.rb | 6 ++++ .../models/beta/assistant_update_params.rbi | 9 ++++++ rbi/lib/openai/models/chat_model.rbi | 6 ++++ .../models/beta/assistant_update_params.rbs | 20 ++++++++++++- sig/openai/models/chat_model.rbs | 14 ++++++++- test/openai/client_test.rb | 30 +++++++++---------- test/openai/resources/beta/assistants_test.rb | 2 +- .../openai/resources/chat/completions_test.rb | 2 +- 11 files changed, 96 insertions(+), 27 deletions(-) diff --git a/.stats.yml b/.stats.yml index a58c4247..3ea266b4 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-44b20fa9d24544217fe6bb48852037537030a1ad29b202936425110744fe66fb.yml -openapi_spec_hash: ea86343b5e9858a74e85da8ab2c532f6 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a555f81249cb084f463dcefa4aba069f9341fdaf3dd6ac27d7f237fc90e8f488.yml +openapi_spec_hash: 8e590296cd1a54b9508510b0c7a2c45a config_hash: 5ea32de61ff42fcf5e66cff8d9e247ea diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index f4885fb5..c3ae24b1 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -19,11 +19,11 @@ module Type # @example # # `chat_model` is a `OpenAI::Models::ChatModel` # case chat_model - # when OpenAI::Models::ChatModel::O3_MINI + # when OpenAI::Models::ChatModel::GPT_4_1 # # ... - # when OpenAI::Models::ChatModel::O3_MINI_2025_01_31 + # when OpenAI::Models::ChatModel::GPT_4_1_MINI # # ... - # when OpenAI::Models::ChatModel::O1 + # when OpenAI::Models::ChatModel::GPT_4_1_NANO # # ... # else # puts(chat_model) @@ -31,11 +31,11 @@ module Type # # @example # case chat_model - # in :"o3-mini" + # in :"gpt-4.1" # # ... - # in :"o3-mini-2025-01-31" + # in :"gpt-4.1-mini" # # ... - # in :o1 + # in :"gpt-4.1-nano" # # ... # else # puts(chat_model) diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 299fb194..e740ca15 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -172,6 +172,18 @@ module Model variant String + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1 } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI } variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } @@ -238,6 +250,12 @@ module Model # @!group + GPT_4_1 = :"gpt-4.1" + GPT_4_1_MINI = :"gpt-4.1-mini" + GPT_4_1_NANO = :"gpt-4.1-nano" + GPT_4_1_2025_04_14 = :"gpt-4.1-2025-04-14" + GPT_4_1_MINI_2025_04_14 = :"gpt-4.1-mini-2025-04-14" + GPT_4_1_NANO_2025_04_14 = :"gpt-4.1-nano-2025-04-14" O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index a3021397..eccf706d 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -5,6 +5,12 @@ module Models module ChatModel extend OpenAI::Internal::Type::Enum + GPT_4_1 = :"gpt-4.1" + GPT_4_1_MINI = :"gpt-4.1-mini" + GPT_4_1_NANO = :"gpt-4.1-nano" + GPT_4_1_2025_04_14 = :"gpt-4.1-2025-04-14" + GPT_4_1_MINI_2025_04_14 = :"gpt-4.1-mini-2025-04-14" + GPT_4_1_NANO_2025_04_14 = :"gpt-4.1-nano-2025-04-14" O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index eb334355..01cbc60a 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -240,6 +240,15 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_1_NANO = T.let(:"gpt-4.1-nano", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_1_2025_04_14 = + T.let(:"gpt-4.1-2025-04-14", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_1_MINI_2025_04_14 = + T.let(:"gpt-4.1-mini-2025-04-14", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_1_NANO_2025_04_14 = + T.let(:"gpt-4.1-nano-2025-04-14", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index f7512375..ff8f0bc8 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -8,6 +8,12 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ChatModel::TaggedSymbol) } + GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1_NANO = T.let(:"gpt-4.1-nano", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1_2025_04_14 = T.let(:"gpt-4.1-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1_MINI_2025_04_14 = T.let(:"gpt-4.1-mini-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1_NANO_2025_04_14 = T.let(:"gpt-4.1-nano-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::TaggedSymbol) O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::TaggedSymbol) O1 = T.let(:o1, OpenAI::Models::ChatModel::TaggedSymbol) diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index de95efb8..1ce0f114 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -70,6 +70,12 @@ module OpenAI type model = String + | :"gpt-4.1" + | :"gpt-4.1-mini" + | :"gpt-4.1-nano" + | :"gpt-4.1-2025-04-14" + | :"gpt-4.1-mini-2025-04-14" + | :"gpt-4.1-nano-2025-04-14" | :"o3-mini" | :"o3-mini-2025-01-31" | :o1 @@ -104,7 +110,13 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, (:"o3-mini" + def self?.variants: -> [String, (:"gpt-4.1" + | :"gpt-4.1-mini" + | :"gpt-4.1-nano" + | :"gpt-4.1-2025-04-14" + | :"gpt-4.1-mini-2025-04-14" + | :"gpt-4.1-nano-2025-04-14" + | :"o3-mini" | :"o3-mini-2025-01-31" | :o1 | :"o1-2024-12-17" @@ -135,6 +147,12 @@ module OpenAI | :"gpt-3.5-turbo-0125" | :"gpt-3.5-turbo-16k-0613")] + GPT_4_1: :"gpt-4.1" + GPT_4_1_MINI: :"gpt-4.1-mini" + GPT_4_1_NANO: :"gpt-4.1-nano" + GPT_4_1_2025_04_14: :"gpt-4.1-2025-04-14" + GPT_4_1_MINI_2025_04_14: :"gpt-4.1-mini-2025-04-14" + GPT_4_1_NANO_2025_04_14: :"gpt-4.1-nano-2025-04-14" O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 0f571b30..31b7ef3b 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -1,7 +1,13 @@ module OpenAI module Models type chat_model = - :"o3-mini" + :"gpt-4.1" + | :"gpt-4.1-mini" + | :"gpt-4.1-nano" + | :"gpt-4.1-2025-04-14" + | :"gpt-4.1-mini-2025-04-14" + | :"gpt-4.1-nano-2025-04-14" + | :"o3-mini" | :"o3-mini-2025-01-31" | :o1 | :"o1-2024-12-17" @@ -48,6 +54,12 @@ module OpenAI module ChatModel extend OpenAI::Internal::Type::Enum + GPT_4_1: :"gpt-4.1" + GPT_4_1_MINI: :"gpt-4.1-mini" + GPT_4_1_NANO: :"gpt-4.1-nano" + GPT_4_1_2025_04_14: :"gpt-4.1-2025-04-14" + GPT_4_1_MINI_2025_04_14: :"gpt-4.1-mini-2025-04-14" + GPT_4_1_NANO_2025_04_14: :"gpt-4.1-nano-2025-04-14" O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 0e142a74..586ba30e 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -56,7 +56,7 @@ def test_client_default_request_default_retry_attempts openai.requester = requester assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end assert_equal(3, requester.attempts.length) @@ -68,7 +68,7 @@ def test_client_given_request_default_retry_attempts openai.requester = requester assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end assert_equal(4, requester.attempts.length) @@ -82,7 +82,7 @@ def test_client_default_request_given_retry_attempts assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {max_retries: 3} ) end @@ -98,7 +98,7 @@ def test_client_given_request_given_retry_attempts assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {max_retries: 4} ) end @@ -112,7 +112,7 @@ def test_client_retry_after_seconds openai.requester = requester assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end assert_equal(2, requester.attempts.length) @@ -126,7 +126,7 @@ def test_client_retry_after_date assert_raises(OpenAI::Errors::InternalServerError) do Thread.current.thread_variable_set(:time_now, Time.now) - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") Thread.current.thread_variable_set(:time_now, nil) end @@ -140,7 +140,7 @@ def test_client_retry_after_ms openai.requester = requester assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end assert_equal(2, requester.attempts.length) @@ -153,7 +153,7 @@ def test_retry_count_header openai.requester = requester assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end retry_count_headers = requester.attempts.map do @@ -171,7 +171,7 @@ def test_omit_retry_count_header assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {extra_headers: {"x-stainless-retry-count" => nil}} ) end @@ -191,7 +191,7 @@ def test_overwrite_retry_count_header assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {extra_headers: {"x-stainless-retry-count" => "42"}} ) end @@ -211,7 +211,7 @@ def test_client_redirect_307 assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {extra_headers: {}} ) end @@ -233,7 +233,7 @@ def test_client_redirect_303 assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {extra_headers: {}} ) end @@ -252,7 +252,7 @@ def test_client_redirect_auth_keep_same_origin assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {extra_headers: {"authorization" => "Bearer xyz"}} ) end @@ -271,7 +271,7 @@ def test_client_redirect_auth_strip_cross_origin assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"o3-mini", + model: :"gpt-4.1", request_options: {extra_headers: {"authorization" => "Bearer xyz"}} ) end @@ -283,7 +283,7 @@ def test_default_headers openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") requester = MockRequester.new(200, {}, {}) openai.requester = requester - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") headers = requester.attempts.first.fetch(:headers) refute_empty(headers["accept"]) diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index e7c59205..b76d7856 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Beta::AssistantsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.beta.assistants.create(model: :"o3-mini") + response = @openai.beta.assistants.create(model: :"gpt-4.1") assert_pattern do response => OpenAI::Models::Beta::Assistant diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index 0ff3c51e..f73285f4 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -5,7 +5,7 @@ class OpenAI::Test::Resources::Chat::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params response = - @openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"o3-mini") + @openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") assert_pattern do response => OpenAI::Models::Chat::ChatCompletion From 40709c53929bcdcef3925584d59d772d834603bc Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 17:29:00 +0000 Subject: [PATCH 131/295] chore(internal): mostly README touch ups --- README.md | 4 ++-- lib/openai/internal/type/enum.rb | 3 ++- lib/openai/internal/type/union.rb | 3 ++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 60abfc28..9ec14774 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ The OpenAI Ruby library provides convenient access to the OpenAI REST API from a ## Documentation -Documentation for released of this gem can be found [on RubyDoc](https://gemdocs.org/gems/openai). +Documentation for releases of this gem can be found [on RubyDoc](https://gemdocs.org/gems/openai). The underlying REST API documentation can be found on [platform.openai.com](https://platform.openai.com/docs). @@ -62,7 +62,7 @@ end ### Streaming -We provide support for streaming responses using Server Side Events (SSE). +We provide support for streaming responses using Server-Sent Events (SSE). ```ruby stream = openai.chat.completions.stream_raw( diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index c3ae24b1..8e987aa3 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -117,8 +117,9 @@ def inspect(depth: 0) return super() if depth.positive? members = values.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) } + prefix = is_a?(Module) ? name : self.class.name - "#{name}[#{members.join(' | ')}]" + "#{prefix}[#{members.join(' | ')}]" end end end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 70c9a115..82db8138 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -218,8 +218,9 @@ def inspect(depth: 0) return super() if depth.positive? members = variants.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) } + prefix = is_a?(Module) ? name : self.class.name - "#{name}[#{members.join(' | ')}]" + "#{prefix}[#{members.join(' | ')}]" end end end From aca157068b2a66f9240bbb2f85f00f8285b26633 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 17:42:43 +0000 Subject: [PATCH 132/295] docs: update documentation links to be more uniform --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9ec14774..f1c0d429 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ The OpenAI Ruby library provides convenient access to the OpenAI REST API from a Documentation for releases of this gem can be found [on RubyDoc](https://gemdocs.org/gems/openai). -The underlying REST API documentation can be found on [platform.openai.com](https://platform.openai.com/docs). +The REST API documentation can be found on [platform.openai.com](https://platform.openai.com/docs). ## Installation From 0278b1a3b67250da657da9bccfe30e1f7dd3c3ea Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Apr 2025 00:31:50 +0000 Subject: [PATCH 133/295] chore(internal): always run post-processing when formatting when syntax_tree --- .rubocop.yml | 1 + Rakefile | 9 ++++++++- lib/openai/internal/type/enum.rb | 4 +++- lib/openai/internal/type/union.rb | 4 +++- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index 8a64a238..c88b94f1 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -244,6 +244,7 @@ Style/SafeNavigation: Style/SignalException: Exclude: - Rakefile + - "**/*.rake" # We use these sparingly, where we anticipate future branches for the # inner conditional. diff --git a/Rakefile b/Rakefile index 990cc38f..41d7c926 100644 --- a/Rakefile +++ b/Rakefile @@ -59,12 +59,19 @@ multitask(:syntax_tree) do # 2. at label `l1`, join previously annotated line with `class | module` information. pst = sed + [subst, "--"] + success = false + # transform class aliases to type aliases, which syntax tree has no trouble with sh("#{find.shelljoin} | #{pre.shelljoin}") # run syntax tree to format `*.rbs` files - sh("#{find.shelljoin} | #{fmt.shelljoin}") + sh("#{find.shelljoin} | #{fmt.shelljoin}") do + success = _1 + end # transform type aliases back to class aliases sh("#{find.shelljoin} | #{pst.shelljoin}") + + # always run post-processing to remove comment marker + fail unless success end multitask(format: [:ruboformat, :syntax_tree]) diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 8e987aa3..e850de7b 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -114,7 +114,9 @@ def coerce(value, state:) # # @return [String] def inspect(depth: 0) - return super() if depth.positive? + if depth.positive? + return is_a?(Module) ? super() : self.class.name + end members = values.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) } prefix = is_a?(Module) ? name : self.class.name diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 82db8138..14a1a191 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -215,7 +215,9 @@ def dump(value, state:) # # @return [String] def inspect(depth: 0) - return super() if depth.positive? + if depth.positive? + return is_a?(Module) ? super() : self.class.name + end members = variants.map { OpenAI::Internal::Type::Converter.inspect(_1, depth: depth.succ) } prefix = is_a?(Module) ? name : self.class.name From decef658833b60501a7db2cb8cb9687493d2c3fa Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Apr 2025 17:23:11 +0000 Subject: [PATCH 134/295] chore(internal): protect SSE parsing pipeline from broken UTF-8 characters --- .../transport/pooled_net_requester.rb | 2 +- lib/openai/internal/util.rb | 45 +++++++++++++++---- rbi/lib/openai/internal/util.rbi | 15 +++++++ sig/openai/internal/util.rbs | 2 + test/openai/internal/util_test.rb | 22 ++++++++- 5 files changed, 75 insertions(+), 11 deletions(-) diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index 7bb8ef19..a9ef117f 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -149,7 +149,7 @@ def execute(request) break if finished rsp.read_body do |bytes| - y << bytes + y << bytes.force_encoding(Encoding::BINARY) break if finished self.class.calibrate_socket_timeout(conn, deadline) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index a7a8a095..d2eb9c19 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -448,7 +448,7 @@ def initialize(src, &blk) else src end - @buf = String.new.b + @buf = String.new @blk = blk end end @@ -460,7 +460,7 @@ class << self # @return [Enumerable] def writable_enum(&blk) Enumerator.new do |y| - buf = String.new.b + buf = String.new y.define_singleton_method(:write) do self << buf.replace(_1) buf.bytesize @@ -582,6 +582,27 @@ def encode_content(headers, body) # @api private # + # https://www.iana.org/assignments/character-sets/character-sets.xhtml + # + # @param content_type [String] + # @param text [String] + def force_charset!(content_type, text:) + charset = /charset=([^;\s]+)/.match(content_type)&.captures&.first + + return unless charset + + begin + encoding = Encoding.find(charset) + text.force_encoding(encoding) + rescue ArgumentError + nil + end + end + + # @api private + # + # Assumes each chunk in stream has `Encoding::BINARY`. + # # @param headers [Hash{String=>String}, Net::HTTPHeader] # @param stream [Enumerable] # @param suppress_error [Boolean] @@ -589,7 +610,7 @@ def encode_content(headers, body) # @raise [JSON::ParserError] # @return [Object] def decode_content(headers, stream:, suppress_error: false) - case headers["content-type"] + case (content_type = headers["content-type"]) in %r{^application/(?:vnd\.api\+)?json} json = stream.to_a.join begin @@ -606,11 +627,10 @@ def decode_content(headers, stream:, suppress_error: false) in %r{^text/event-stream} lines = decode_lines(stream) decode_sse(lines) - in %r{^text/} - stream.to_a.join else - # TODO: parsing other response types - StringIO.new(stream.to_a.join) + text = stream.to_a.join + force_charset!(content_type, text: text) + StringIO.new(text) end end end @@ -675,12 +695,17 @@ def chain_fused(enum, &blk) class << self # @api private # + # Assumes Strings have been forced into having `Encoding::BINARY`. + # + # This decoder is responsible for reassembling lines split across multiple + # fragments. + # # @param enum [Enumerable] # # @return [Enumerable] def decode_lines(enum) re = /(\r\n|\r|\n)/ - buffer = String.new.b + buffer = String.new cr_seen = nil chain_fused(enum) do |y| @@ -711,6 +736,8 @@ def decode_lines(enum) # # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream # + # Assumes that `lines` has been decoded with `#decode_lines`. + # # @param lines [Enumerable] # # @return [EnumerableObject}>] @@ -734,7 +761,7 @@ def decode_sse(lines) in "event" current.merge!(event: value) in "data" - (current[:data] ||= String.new.b) << (value << "\n") + (current[:data] ||= String.new) << (value << "\n") in "id" unless value.include?("\0") current.merge!(id: value) in "retry" if /^\d+$/ =~ value diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 433fab3c..9b88505b 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -215,6 +215,14 @@ module OpenAI def encode_content(headers, body); end # @api private + # + # https://www.iana.org/assignments/character-sets/character-sets.xhtml + sig { params(content_type: String, text: String).void } + def force_charset!(content_type, text:); end + + # @api private + # + # Assumes each chunk in stream has `Encoding::BINARY`. sig do params( headers: T.any(T::Hash[String, String], Net::HTTPHeader), @@ -263,12 +271,19 @@ module OpenAI class << self # @api private + # + # Assumes Strings have been forced into having `Encoding::BINARY`. + # + # This decoder is responsible for reassembling lines split across multiple + # fragments. sig { params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) } def decode_lines(enum); end # @api private # # https://html.spec.whatwg.org/multipage/server-sent-events.html#parsing-an-event-stream + # + # Assumes that `lines` has been decoded with `#decode_lines`. sig do params(lines: T::Enumerable[String]).returns(T::Enumerable[OpenAI::Internal::Util::ServerSentEvent]) end diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index ede1ce30..0f040af8 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -120,6 +120,8 @@ module OpenAI top body ) -> top + def self?.force_charset!: (String content_type, text: String) -> void + def self?.decode_content: ( ::Hash[String, String] headers, stream: Enumerable[String], diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index 50140677..e533fdd3 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -368,6 +368,24 @@ def test_close_fused_sse_chain end end +class OpenAI::Test::UtilContentDecodingTest < Minitest::Test + def test_charset + cases = { + "application/json" => Encoding::BINARY, + "application/json; charset=utf-8" => Encoding::UTF_8, + "charset=uTf-8 application/json; " => Encoding::UTF_8, + "charset=UTF-8; application/json; " => Encoding::UTF_8, + "charset=ISO-8859-1 ;application/json; " => Encoding::ISO_8859_1, + "charset=EUC-KR ;application/json; " => Encoding::EUC_KR + } + text = String.new.force_encoding(Encoding::BINARY) + cases.each do |content_type, encoding| + OpenAI::Internal::Util.force_charset!(content_type, text: text) + assert_equal(encoding, text.encoding) + end + end +end + class OpenAI::Test::UtilSseTest < Minitest::Test def test_decode_lines cases = { @@ -381,7 +399,9 @@ def test_decode_lines %W[\na b\n\n] => %W[\n ab\n \n], %W[\na b] => %W[\n ab], %W[\u1F62E\u200D\u1F4A8] => %W[\u1F62E\u200D\u1F4A8], - %W[\u1F62E \u200D \u1F4A8] => %W[\u1F62E\u200D\u1F4A8] + %W[\u1F62E \u200D \u1F4A8] => %W[\u1F62E\u200D\u1F4A8], + ["\xf0\x9f".b, "\xa5\xba".b] => ["\xf0\x9f\xa5\xba".b], + ["\xf0".b, "\x9f".b, "\xa5".b, "\xba".b] => ["\xf0\x9f\xa5\xba".b] } eols = %W[\n \r \r\n] cases.each do |enum, expected| From 3ae8a6a78bed9285cdd6f2723c8b1a7b83d70313 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 16:43:35 +0000 Subject: [PATCH 135/295] feat(api): add o3 and o4-mini model IDs --- .stats.yml | 6 +- lib/openai/models/chat/chat_completion.rb | 39 +++++++++++- .../chat/chat_completion_audio_param.rb | 5 +- .../models/chat/chat_completion_chunk.rb | 39 +++++++++++- .../models/chat/completion_create_params.rb | 19 ++++-- lib/openai/models/chat_model.rb | 4 ++ lib/openai/models/completion_create_params.rb | 4 ++ lib/openai/models/reasoning.rb | 45 +++++++++++--- lib/openai/models/responses/response.rb | 59 ++++++++++++++++++- .../responses/response_create_params.rb | 59 ++++++++++++++++++- lib/openai/resources/responses.rb | 6 +- .../openai/models/chat/chat_completion.rbi | 39 +++++++++++- .../chat/chat_completion_audio_param.rbi | 5 +- .../models/chat/chat_completion_chunk.rbi | 39 +++++++++++- .../models/chat/completion_create_params.rbi | 17 +++++- rbi/lib/openai/models/chat_model.rbi | 4 ++ .../models/completion_create_params.rbi | 4 ++ rbi/lib/openai/models/reasoning.rbi | 44 +++++++++++--- rbi/lib/openai/models/responses/response.rbi | 57 +++++++++++++++++- .../responses/response_create_params.rbi | 58 +++++++++++++++++- rbi/lib/openai/resources/chat/completions.rbi | 18 ++++-- rbi/lib/openai/resources/completions.rbi | 4 ++ rbi/lib/openai/resources/responses.rbi | 42 ++++++++++++- sig/openai/models/chat/chat_completion.rbs | 5 +- .../chat/chat_completion_audio_param.rbs | 3 +- .../models/chat/chat_completion_chunk.rbs | 5 +- .../models/chat/completion_create_params.rbs | 3 +- sig/openai/models/chat_model.rbs | 8 +++ sig/openai/models/reasoning.rbs | 23 +++++++- sig/openai/models/responses/response.rbs | 16 +++++ .../responses/response_create_params.rbs | 16 +++++ sig/openai/resources/responses.rbs | 2 + test/openai/resources/responses_test.rb | 2 + 33 files changed, 636 insertions(+), 63 deletions(-) diff --git a/.stats.yml b/.stats.yml index 3ea266b4..c3609ee0 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a555f81249cb084f463dcefa4aba069f9341fdaf3dd6ac27d7f237fc90e8f488.yml -openapi_spec_hash: 8e590296cd1a54b9508510b0c7a2c45a -config_hash: 5ea32de61ff42fcf5e66cff8d9e247ea +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5633633cc38734869cf7d993f7b549bb8e4d10e0ec45381ec2cd91507cd8eb8f.yml +openapi_spec_hash: c855121b2b2324b99499c9244c21d24d +config_hash: d20837393b73efdb19cd08e04c1cc9a1 diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 97ee34db..1ea5a81f 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -39,7 +39,23 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel required :object, const: :"chat.completion" # @!attribute service_tier - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletion::ServiceTier }, nil?: true @@ -190,14 +206,31 @@ class Logprobs < OpenAI::Internal::Type::BaseModel end end - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. # # @see OpenAI::Models::Chat::ChatCompletion#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum - SCALE = :scale + AUTO = :auto DEFAULT = :default + FLEX = :flex finalize! diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 07ca649d..1e69e4cf 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -13,7 +13,7 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # @!attribute voice # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } @@ -38,6 +38,7 @@ module Format extend OpenAI::Internal::Type::Enum WAV = :wav + AAC = :aac MP3 = :mp3 FLAC = :flac OPUS = :opus @@ -51,7 +52,7 @@ module Format end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice module Voice diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index b9c033b3..0a201e76 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -39,7 +39,23 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel required :object, const: :"chat.completion.chunk" # @!attribute service_tier - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier }, nil?: true @@ -406,14 +422,31 @@ class Logprobs < OpenAI::Internal::Type::BaseModel end end - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. # # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum - SCALE = :scale + AUTO = :auto DEFAULT = :default + FLEX = :flex finalize! diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 65406125..bee73ddb 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -24,7 +24,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionMessageParam] } # @!attribute model - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -121,7 +121,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # This value is now deprecated in favor of `max_completion_tokens`, and is not # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # [o-series models](https://platform.openai.com/docs/guides/reasoning). # # @return [Integer, nil] optional :max_tokens, Integer, nil?: true @@ -240,6 +240,9 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # latency guarentee. # - If set to 'default', the request will be processed using the default service # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). # - When not set, the default behavior is 'auto'. # # When this parameter is set, the response body will include the `service_tier` @@ -249,6 +252,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :service_tier, enum: -> { OpenAI::Models::Chat::CompletionCreateParams::ServiceTier }, nil?: true # @!attribute stop + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. # @@ -422,7 +427,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -432,7 +437,7 @@ module Model variant String - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # offers a wide range of models with different capabilities, performance # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) # to browse and compare available models. @@ -589,6 +594,9 @@ module ResponseFormat # latency guarentee. # - If set to 'default', the request will be processed using the default service # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). # - When not set, the default behavior is 'auto'. # # When this parameter is set, the response body will include the `service_tier` @@ -598,6 +606,7 @@ module ServiceTier AUTO = :auto DEFAULT = :default + FLEX = :flex finalize! @@ -606,6 +615,8 @@ module ServiceTier # def self.values; end end + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index eccf706d..1bcb4858 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -11,6 +11,10 @@ module ChatModel GPT_4_1_2025_04_14 = :"gpt-4.1-2025-04-14" GPT_4_1_MINI_2025_04_14 = :"gpt-4.1-mini-2025-04-14" GPT_4_1_NANO_2025_04_14 = :"gpt-4.1-nano-2025-04-14" + O4_MINI = :"o4-mini" + O4_MINI_2025_04_16 = :"o4-mini-2025-04-16" + O3 = :o3 + O3_2025_04_16 = :"o3-2025-04-16" O3_MINI = :"o3-mini" O3_MINI_2025_01_31 = :"o3-mini-2025-01-31" O1 = :o1 diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index a2931ac5..6463fc0e 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -133,6 +133,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :seed, Integer, nil?: true # @!attribute stop + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. # @@ -288,6 +290,8 @@ module Prompt ArrayOfToken2DArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]] end + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 25128ce1..a5fa2bc4 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -15,15 +15,23 @@ class Reasoning < OpenAI::Internal::Type::BaseModel optional :effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true # @!attribute generate_summary - # **computer_use_preview only** + # **Deprecated:** use `summary` instead. # # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. # # @return [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] optional :generate_summary, enum: -> { OpenAI::Models::Reasoning::GenerateSummary }, nil?: true + # @!attribute summary + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. + # + # @return [Symbol, OpenAI::Models::Reasoning::Summary, nil] + optional :summary, enum: -> { OpenAI::Models::Reasoning::Summary }, nil?: true + # @!parse # # **o-series models only** # # @@ -32,21 +40,44 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # # # # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] # # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] + # # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] # # - # def initialize(effort: nil, generate_summary: nil, **) = super + # def initialize(effort: nil, generate_summary: nil, summary: nil, **) = super # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void - # **computer_use_preview only** + # @deprecated + # + # **Deprecated:** use `summary` instead. # # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. # # @see OpenAI::Models::Reasoning#generate_summary module GenerateSummary extend OpenAI::Internal::Type::Enum + AUTO = :auto + CONCISE = :concise + DETAILED = :detailed + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. + # + # @see OpenAI::Models::Reasoning#summary + module Summary + extend OpenAI::Internal::Type::Enum + + AUTO = :auto CONCISE = :concise DETAILED = :detailed diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 3ad5a2be..0ed05b08 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -54,7 +54,7 @@ class Response < OpenAI::Internal::Type::BaseModel required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -159,6 +159,28 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Models::Reasoning }, nil?: true + # @!attribute service_tier + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + # + # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] + optional :service_tier, enum: -> { OpenAI::Models::Responses::Response::ServiceTier }, nil?: true + # @!attribute [r] status # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. @@ -236,6 +258,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param max_output_tokens [Integer, nil] # # @param previous_response_id [String, nil] # # @param reasoning [OpenAI::Models::Reasoning, nil] + # # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] # # @param text [OpenAI::Models::Responses::ResponseTextConfig] # # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] @@ -260,6 +283,7 @@ class Response < OpenAI::Internal::Type::BaseModel # max_output_tokens: nil, # previous_response_id: nil, # reasoning: nil, + # service_tier: nil, # status: nil, # text: nil, # truncation: nil, @@ -341,6 +365,39 @@ module ToolChoice # def self.variants; end end + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + # + # @see OpenAI::Models::Responses::Response#service_tier + module ServiceTier + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + DEFAULT = :default + FLEX = :flex + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 85b349d0..1969ec3a 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -26,7 +26,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel required :input, union: -> { OpenAI::Models::Responses::ResponseCreateParams::Input } # @!attribute model - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -103,6 +103,30 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Models::Reasoning }, nil?: true + # @!attribute service_tier + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] + optional :service_tier, + enum: -> { OpenAI::Models::Responses::ResponseCreateParams::ServiceTier }, + nil?: true + # @!attribute store # Whether to store the generated model response for later retrieval via API. # @@ -211,6 +235,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param parallel_tool_calls [Boolean, nil] # # @param previous_response_id [String, nil] # # @param reasoning [OpenAI::Models::Reasoning, nil] + # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] # # @param store [Boolean, nil] # # @param temperature [Float, nil] # # @param text [OpenAI::Models::Responses::ResponseTextConfig] @@ -231,6 +256,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # parallel_tool_calls: nil, # previous_response_id: nil, # reasoning: nil, + # service_tier: nil, # store: nil, # temperature: nil, # text: nil, @@ -272,6 +298,37 @@ module Input # def self.variants; end end + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + module ServiceTier + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + DEFAULT = :default + FLEX = :flex + + finalize! + + # @!parse + # # @return [Array] + # def self.values; end + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 63a4abef..05435651 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -20,7 +20,7 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] @@ -31,6 +31,7 @@ class Responses # @param parallel_tool_calls [Boolean, nil] # @param previous_response_id [String, nil] # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] # @param store [Boolean, nil] # @param temperature [Float, nil] # @param text [OpenAI::Models::Responses::ResponseTextConfig] @@ -73,7 +74,7 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] @@ -84,6 +85,7 @@ def create(params) # @param parallel_tool_calls [Boolean, nil] # @param previous_response_id [String, nil] # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] # @param store [Boolean, nil] # @param temperature [Float, nil] # @param text [OpenAI::Models::Responses::ResponseTextConfig] diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index e1e91dfc..b6ba1367 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -25,7 +25,23 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) } attr_accessor :service_tier @@ -202,7 +218,23 @@ module OpenAI end end - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -210,8 +242,9 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } - SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + FLEX = T.let(:flex, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol]) } def self.values; end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 0755aab8..f7bff346 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -10,7 +10,7 @@ module OpenAI attr_accessor :format_ # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. sig { returns(T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol)) } attr_accessor :voice @@ -47,6 +47,7 @@ module OpenAI T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + AAC = T.let(:aac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) @@ -57,7 +58,7 @@ module OpenAI end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, - # `ballad`, `coral`, `echo`, `sage`, and `shimmer`. + # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. module Voice extend OpenAI::Internal::Type::Union diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index a7aaf04d..abee2167 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -27,7 +27,23 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) } attr_accessor :service_tier @@ -441,7 +457,23 @@ module OpenAI end end - # The service tier used for processing the request. + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -449,8 +481,9 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } - SCALE = T.let(:scale, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + FLEX = T.let(:flex, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol]) } def self.values; end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index c284fa11..9e8b8bd5 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -29,7 +29,7 @@ module OpenAI end attr_accessor :messages - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -136,7 +136,7 @@ module OpenAI # # This value is now deprecated in favor of `max_completion_tokens`, and is not # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # [o-series models](https://platform.openai.com/docs/guides/reasoning). sig { returns(T.nilable(Integer)) } attr_accessor :max_tokens @@ -259,6 +259,9 @@ module OpenAI # latency guarentee. # - If set to 'default', the request will be processed using the default service # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). # - When not set, the default behavior is 'auto'. # # When this parameter is set, the response body will include the `service_tier` @@ -266,6 +269,8 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } attr_accessor :service_tier + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } @@ -526,7 +531,7 @@ module OpenAI end def to_hash; end - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -680,6 +685,9 @@ module OpenAI # latency guarentee. # - If set to 'default', the request will be processed using the default service # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). # - When not set, the default behavior is 'auto'. # # When this parameter is set, the response body will include the `service_tier` @@ -693,11 +701,14 @@ module OpenAI AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) + FLEX = T.let(:flex, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol]) } def self.values; end end + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index ff8f0bc8..147d44a6 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -14,6 +14,10 @@ module OpenAI GPT_4_1_2025_04_14 = T.let(:"gpt-4.1-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4_1_MINI_2025_04_14 = T.let(:"gpt-4.1-mini-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4_1_NANO_2025_04_14 = T.let(:"gpt-4.1-nano-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) + O4_MINI = T.let(:"o4-mini", OpenAI::Models::ChatModel::TaggedSymbol) + O4_MINI_2025_04_16 = T.let(:"o4-mini-2025-04-16", OpenAI::Models::ChatModel::TaggedSymbol) + O3 = T.let(:o3, OpenAI::Models::ChatModel::TaggedSymbol) + O3_2025_04_16 = T.let(:"o3-2025-04-16", OpenAI::Models::ChatModel::TaggedSymbol) O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::TaggedSymbol) O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::TaggedSymbol) O1 = T.let(:o1, OpenAI::Models::ChatModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index cdda2364..d3267415 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -108,6 +108,8 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :seed + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. sig { returns(T.nilable(T.any(String, T::Array[String]))) } @@ -277,6 +279,8 @@ module OpenAI ) end + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. module Stop diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index ec1c7ac3..b60585b1 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -12,14 +12,20 @@ module OpenAI sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } attr_accessor :effort - # **computer_use_preview only** + # **Deprecated:** use `summary` instead. # # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } attr_accessor :generate_summary + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. + sig { returns(T.nilable(OpenAI::Models::Reasoning::Summary::OrSymbol)) } + attr_accessor :summary + # **o-series models only** # # Configuration options for @@ -27,28 +33,30 @@ module OpenAI sig do params( effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol), + summary: T.nilable(OpenAI::Models::Reasoning::Summary::OrSymbol) ) .returns(T.attached_class) end - def self.new(effort: nil, generate_summary: nil); end + def self.new(effort: nil, generate_summary: nil, summary: nil); end sig do override .returns( { effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol) + generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol), + summary: T.nilable(OpenAI::Models::Reasoning::Summary::OrSymbol) } ) end def to_hash; end - # **computer_use_preview only** + # **Deprecated:** use `summary` instead. # # A summary of the reasoning performed by the model. This can be useful for - # debugging and understanding the model's reasoning process. One of `concise` or - # `detailed`. + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. module GenerateSummary extend OpenAI::Internal::Type::Enum @@ -56,12 +64,30 @@ module OpenAI OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } + AUTO = T.let(:auto, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol]) } def self.values; end end + + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. + module Summary + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::Summary) } + OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::Summary::TaggedSymbol) } + + AUTO = T.let(:auto, OpenAI::Models::Reasoning::Summary::TaggedSymbol) + CONCISE = T.let(:concise, OpenAI::Models::Reasoning::Summary::TaggedSymbol) + DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::Summary::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Reasoning::Summary::TaggedSymbol]) } + def self.values; end + end end end end diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index ced272d4..833e1023 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -49,7 +49,7 @@ module OpenAI sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -175,6 +175,26 @@ module OpenAI sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash))).void } attr_writer :reasoning + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + sig { returns(T.nilable(OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol)) } + attr_accessor :service_tier + # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } @@ -266,6 +286,7 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), + service_tier: T.nilable(OpenAI::Models::Responses::Response::ServiceTier::OrSymbol), status: OpenAI::Models::Responses::ResponseStatus::OrSymbol, text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::OrSymbol), @@ -292,6 +313,7 @@ module OpenAI max_output_tokens: nil, previous_response_id: nil, reasoning: nil, + service_tier: nil, status: nil, text: nil, truncation: nil, @@ -344,6 +366,7 @@ module OpenAI max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), + service_tier: T.nilable(OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol), status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, text: OpenAI::Models::Responses::ResponseTextConfig, truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), @@ -406,6 +429,38 @@ module OpenAI def self.variants; end end + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + module ServiceTier + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) } + + AUTO = T.let(:auto, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) + FLEX = T.let(:flex, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol]) } + def self.values; end + end + # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index f462e65e..68125cc2 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -40,7 +40,7 @@ module OpenAI end attr_accessor :input - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -111,6 +111,26 @@ module OpenAI sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash))).void } attr_writer :reasoning + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol)) } + attr_accessor :service_tier + # Whether to store the generated model response for later retrieval via API. sig { returns(T.nilable(T::Boolean)) } attr_accessor :store @@ -268,6 +288,7 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), + service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), @@ -303,6 +324,7 @@ module OpenAI parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, + service_tier: nil, store: nil, temperature: nil, text: nil, @@ -347,6 +369,7 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Models::Reasoning), + service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: OpenAI::Models::Responses::ResponseTextConfig, @@ -410,6 +433,39 @@ module OpenAI def self.variants; end end + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + module ServiceTier + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier) } + OrSymbol = + T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) } + + AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) + DEFAULT = T.let(:default, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) + FLEX = T.let(:flex, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol]) } + def self.values; end + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/lib/openai/resources/chat/completions.rbi index 672459b2..6a7ba0a2 100644 --- a/rbi/lib/openai/resources/chat/completions.rbi +++ b/rbi/lib/openai/resources/chat/completions.rbi @@ -94,7 +94,7 @@ module OpenAI # [images](https://platform.openai.com/docs/guides/vision), and # [audio](https://platform.openai.com/docs/guides/audio). messages:, - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -150,7 +150,7 @@ module OpenAI # # This value is now deprecated in favor of `max_completion_tokens`, and is not # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # [o-series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -219,11 +219,16 @@ module OpenAI # latency guarentee. # - If set to 'default', the request will be processed using the default service # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). # - When not set, the default behavior is 'auto'. # # When this parameter is set, the response body will include the `service_tier` # utilized. service_tier: nil, + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. stop: nil, @@ -362,7 +367,7 @@ module OpenAI # [images](https://platform.openai.com/docs/guides/vision), and # [audio](https://platform.openai.com/docs/guides/audio). messages:, - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -418,7 +423,7 @@ module OpenAI # # This value is now deprecated in favor of `max_completion_tokens`, and is not # compatible with - # [o1 series models](https://platform.openai.com/docs/guides/reasoning). + # [o-series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -487,11 +492,16 @@ module OpenAI # latency guarentee. # - If set to 'default', the request will be processed using the default service # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). # - When not set, the default behavior is 'auto'. # # When this parameter is set, the response body will include the `service_tier` # utilized. service_tier: nil, + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. stop: nil, diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/lib/openai/resources/completions.rbi index f57faf40..82e27795 100644 --- a/rbi/lib/openai/resources/completions.rbi +++ b/rbi/lib/openai/resources/completions.rbi @@ -109,6 +109,8 @@ module OpenAI # Determinism is not guaranteed, and you should refer to the `system_fingerprint` # response parameter to monitor changes in the backend. seed: nil, + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. stop: nil, @@ -245,6 +247,8 @@ module OpenAI # Determinism is not guaranteed, and you should refer to the `system_fingerprint` # response parameter to monitor changes in the backend. seed: nil, + # Not supported with latest reasoning models `o3` and `o4-mini`. + # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. stop: nil, diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index 36919e9b..c890608c 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -52,6 +52,7 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), + service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), @@ -89,7 +90,7 @@ module OpenAI # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -133,6 +134,24 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -234,6 +253,7 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), + service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), store: T.nilable(T::Boolean), temperature: T.nilable(Float), text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), @@ -308,7 +328,7 @@ module OpenAI # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, - # Model ID used to generate the response, like `gpt-4o` or `o1`. OpenAI offers a + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare @@ -352,6 +372,24 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. + service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index 54e0d554..8f3b42e4 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -112,13 +112,14 @@ module OpenAI end end - type service_tier = :scale | :default + type service_tier = :auto | :default | :flex module ServiceTier extend OpenAI::Internal::Type::Enum - SCALE: :scale + AUTO: :auto DEFAULT: :default + FLEX: :flex def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] end diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index 3fb9c17d..fb20281b 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -21,12 +21,13 @@ module OpenAI def to_hash: -> OpenAI::Models::Chat::chat_completion_audio_param - type format_ = :wav | :mp3 | :flac | :opus | :pcm16 + type format_ = :wav | :aac | :mp3 | :flac | :opus | :pcm16 module Format extend OpenAI::Internal::Type::Enum WAV: :wav + AAC: :aac MP3: :mp3 FLAC: :flac OPUS: :opus diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 7cba70ad..aabd304a 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -244,13 +244,14 @@ module OpenAI end end - type service_tier = :scale | :default + type service_tier = :auto | :default | :flex module ServiceTier extend OpenAI::Internal::Type::Enum - SCALE: :scale + AUTO: :auto DEFAULT: :default + FLEX: :flex def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index d10cf21d..02d22187 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -244,13 +244,14 @@ module OpenAI def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] end - type service_tier = :auto | :default + type service_tier = :auto | :default | :flex module ServiceTier extend OpenAI::Internal::Type::Enum AUTO: :auto DEFAULT: :default + FLEX: :flex def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::service_tier] end diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 31b7ef3b..8f80d09b 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -7,6 +7,10 @@ module OpenAI | :"gpt-4.1-2025-04-14" | :"gpt-4.1-mini-2025-04-14" | :"gpt-4.1-nano-2025-04-14" + | :"o4-mini" + | :"o4-mini-2025-04-16" + | :o3 + | :"o3-2025-04-16" | :"o3-mini" | :"o3-mini-2025-01-31" | :o1 @@ -60,6 +64,10 @@ module OpenAI GPT_4_1_2025_04_14: :"gpt-4.1-2025-04-14" GPT_4_1_MINI_2025_04_14: :"gpt-4.1-mini-2025-04-14" GPT_4_1_NANO_2025_04_14: :"gpt-4.1-nano-2025-04-14" + O4_MINI: :"o4-mini" + O4_MINI_2025_04_16: :"o4-mini-2025-04-16" + O3: :o3 + O3_2025_04_16: :"o3-2025-04-16" O3_MINI: :"o3-mini" O3_MINI_2025_01_31: :"o3-mini-2025-01-31" O1: :o1 diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index c299e779..30c6ad68 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -3,7 +3,8 @@ module OpenAI type reasoning = { effort: OpenAI::Models::reasoning_effort?, - generate_summary: OpenAI::Models::Reasoning::generate_summary? + generate_summary: OpenAI::Models::Reasoning::generate_summary?, + summary: OpenAI::Models::Reasoning::summary? } class Reasoning < OpenAI::Internal::Type::BaseModel @@ -11,23 +12,39 @@ module OpenAI attr_accessor generate_summary: OpenAI::Models::Reasoning::generate_summary? + attr_accessor summary: OpenAI::Models::Reasoning::summary? + def initialize: ( ?effort: OpenAI::Models::reasoning_effort?, - ?generate_summary: OpenAI::Models::Reasoning::generate_summary? + ?generate_summary: OpenAI::Models::Reasoning::generate_summary?, + ?summary: OpenAI::Models::Reasoning::summary? ) -> void def to_hash: -> OpenAI::Models::reasoning - type generate_summary = :concise | :detailed + type generate_summary = :auto | :concise | :detailed module GenerateSummary extend OpenAI::Internal::Type::Enum + AUTO: :auto CONCISE: :concise DETAILED: :detailed def self?.values: -> ::Array[OpenAI::Models::Reasoning::generate_summary] end + + type summary = :auto | :concise | :detailed + + module Summary + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + CONCISE: :concise + DETAILED: :detailed + + def self?.values: -> ::Array[OpenAI::Models::Reasoning::summary] + end end end end diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 86acfc4c..2957a8d3 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -20,6 +20,7 @@ module OpenAI max_output_tokens: Integer?, previous_response_id: String?, reasoning: OpenAI::Models::Reasoning?, + service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Models::Responses::ResponseTextConfig, truncation: OpenAI::Models::Responses::Response::truncation?, @@ -62,6 +63,8 @@ module OpenAI attr_accessor reasoning: OpenAI::Models::Reasoning? + attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier? + attr_reader status: OpenAI::Models::Responses::response_status? def status=: ( @@ -103,6 +106,7 @@ module OpenAI ?max_output_tokens: Integer?, ?previous_response_id: String?, ?reasoning: OpenAI::Models::Reasoning?, + ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, ?text: OpenAI::Models::Responses::ResponseTextConfig, ?truncation: OpenAI::Models::Responses::Response::truncation?, @@ -154,6 +158,18 @@ module OpenAI def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] end + type service_tier = :auto | :default | :flex + + module ServiceTier + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + DEFAULT: :default + FLEX: :flex + + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] + end + type truncation = :auto | :disabled module Truncation diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 5e04dd5a..1e84251f 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -12,6 +12,7 @@ module OpenAI parallel_tool_calls: bool?, previous_response_id: String?, reasoning: OpenAI::Models::Reasoning?, + service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, temperature: Float?, text: OpenAI::Models::Responses::ResponseTextConfig, @@ -45,6 +46,8 @@ module OpenAI attr_accessor reasoning: OpenAI::Models::Reasoning? + attr_accessor service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier? + attr_accessor store: bool? attr_accessor temperature: Float? @@ -85,6 +88,7 @@ module OpenAI ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?reasoning: OpenAI::Models::Reasoning?, + ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, ?text: OpenAI::Models::Responses::ResponseTextConfig, @@ -106,6 +110,18 @@ module OpenAI def self?.variants: -> [String, OpenAI::Models::Responses::response_input] end + type service_tier = :auto | :default | :flex + + module ServiceTier + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + DEFAULT: :default + FLEX: :flex + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::service_tier] + end + type tool_choice = OpenAI::Models::Responses::tool_choice_options | OpenAI::Models::Responses::ToolChoiceTypes diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 2dfcf44d..40041a27 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -13,6 +13,7 @@ module OpenAI ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?reasoning: OpenAI::Models::Reasoning?, + ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, ?text: OpenAI::Models::Responses::ResponseTextConfig, @@ -34,6 +35,7 @@ module OpenAI ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?reasoning: OpenAI::Models::Reasoning?, + ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, ?text: OpenAI::Models::Responses::ResponseTextConfig, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index dd6971e5..c5066e33 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -29,6 +29,7 @@ def test_create_required_params max_output_tokens: Integer | nil, previous_response_id: String | nil, reasoning: OpenAI::Models::Reasoning | nil, + service_tier: OpenAI::Models::Responses::Response::ServiceTier | nil, status: OpenAI::Models::Responses::ResponseStatus | nil, text: OpenAI::Models::Responses::ResponseTextConfig | nil, truncation: OpenAI::Models::Responses::Response::Truncation | nil, @@ -64,6 +65,7 @@ def test_retrieve max_output_tokens: Integer | nil, previous_response_id: String | nil, reasoning: OpenAI::Models::Reasoning | nil, + service_tier: OpenAI::Models::Responses::Response::ServiceTier | nil, status: OpenAI::Models::Responses::ResponseStatus | nil, text: OpenAI::Models::Responses::ResponseTextConfig | nil, truncation: OpenAI::Models::Responses::Response::Truncation | nil, From 7542538792400b2df2f32b44f3e5480484ccea37 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 18:58:44 +0000 Subject: [PATCH 136/295] feat(client): enable setting base URL from environment variable --- lib/openai/client.rb | 5 +++-- rbi/lib/openai/client.rbi | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/openai/client.rb b/lib/openai/client.rb index a8f3a0e2..15aa8d39 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -86,7 +86,8 @@ class Client < OpenAI::Internal::Transport::BaseClient # # @param project [String, nil] Defaults to `ENV["OPENAI_PROJECT_ID"]` # - # @param base_url [String, nil] Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` + # @param base_url [String, nil] Override the default base URL for the API, e.g., + # `"https://api.example.com/v2/"`. Defaults to `ENV["OPENAI_BASE_URL"]` # # @param max_retries [Integer] Max number of retries to attempt after a failed retryable request. # @@ -99,7 +100,7 @@ def initialize( api_key: ENV["OPENAI_API_KEY"], organization: ENV["OPENAI_ORG_ID"], project: ENV["OPENAI_PROJECT_ID"], - base_url: nil, + base_url: ENV["OPENAI_BASE_URL"], max_retries: DEFAULT_MAX_RETRIES, timeout: DEFAULT_TIMEOUT_IN_SECONDS, initial_retry_delay: DEFAULT_INITIAL_RETRY_DELAY, diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 3886f208..64aba6c1 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -89,8 +89,9 @@ module OpenAI organization: ENV["OPENAI_ORG_ID"], # Defaults to `ENV["OPENAI_PROJECT_ID"]` project: ENV["OPENAI_PROJECT_ID"], - # Override the default base URL for the API, e.g., `"https://api.example.com/v2/"` - base_url: nil, + # Override the default base URL for the API, e.g., + # `"https://api.example.com/v2/"`. Defaults to `ENV["OPENAI_BASE_URL"]` + base_url: ENV["OPENAI_BASE_URL"], # Max number of retries to attempt after a failed retryable request. max_retries: DEFAULT_MAX_RETRIES, timeout: DEFAULT_TIMEOUT_IN_SECONDS, From ab708d4c6fecfc60333bcfdde965c798315bc4ef Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 19:28:08 +0000 Subject: [PATCH 137/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index aaf968a1..b56c3d0b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.3" + ".": "0.1.0-alpha.4" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 0bcf3d3b..d5303e6f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.alpha.3) + openai (0.1.0.pre.alpha.4) connection_pool GEM diff --git a/README.md b/README.md index f1c0d429..954ea98a 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.1.0.pre.alpha.3" +gem "openai", "~> 0.1.0.pre.alpha.4" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index f0a95660..3790ca0e 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.alpha.3" + VERSION = "0.1.0.pre.alpha.4" end From 145b6b6387b8b025ef552522966de8e14f602656 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 11:47:06 +0000 Subject: [PATCH 138/295] fix(client): send correct HTTP path --- lib/openai/internal/transport/pooled_net_requester.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index a9ef117f..df4e1205 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -57,11 +57,15 @@ def calibrate_socket_timeout(conn, deadline) # @return [Array(Net::HTTPGenericRequest, Proc)] def build_request(request, &blk) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) + + # ensure we construct a URI class of the right scheme + url = URI(url.to_s) + req = Net::HTTPGenericRequest.new( method.to_s.upcase, !body.nil?, method != :head, - url.to_s + url ) headers.each { req[_1] = _2 } From a3f75097f91dfc86c0422b73b9b2f4aa693c312d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 17:00:54 +0000 Subject: [PATCH 139/295] chore(internal): contribute.md and contributor QoL improvements --- .gitignore | 6 +- .ruby-version | 1 + CONTRIBUTING.md | 132 ++++++++++++++++++++++++++++++++++++++++++ Rakefile | 65 ++++++++++++++++----- openai.gemspec | 17 ++++-- scripts/bootstrap | 4 +- scripts/format | 1 + scripts/lint | 2 + scripts/test | 2 +- sorbet/rbi/.gitignore | 2 + 10 files changed, 209 insertions(+), 23 deletions(-) create mode 100644 .ruby-version create mode 100644 CONTRIBUTING.md create mode 100644 sorbet/rbi/.gitignore diff --git a/.gitignore b/.gitignore index 8b1228a8..3d26ceed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,10 @@ *.gem .idea/ +.ignore .prism.log .ruby-lsp/ .yardoc/ -Brewfile.lock.json bin/tapioca +Brewfile.lock.json doc/ -sorbet/* -!/sorbet/config +sorbet/tapioca/* diff --git a/.ruby-version b/.ruby-version new file mode 100644 index 00000000..fd2a0186 --- /dev/null +++ b/.ruby-version @@ -0,0 +1 @@ +3.1.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..d800af1c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,132 @@ +## Setting up the environment + +This repository contains a `.ruby-version` file, which should work with either [rbenv](https://github.com/rbenv/rbenv) or [asdf](https://github.com/asdf-vm/asdf) with the [ruby plugin](https://github.com/asdf-vm/asdf-ruby). + +Please follow the instructions for your preferred version manager to install the Ruby version specified in the `.ruby-version` file. + +To set up the repository, run: + +```bash +$ ./scripts/bootstrap +``` + +This will install all the required dependencies. + +## Modifying/Adding code + +Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never modify the contents `examples/` directory. + +## Adding and running examples + +All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. + +```ruby +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../lib/openai" + +# ... +``` + +```bash +$ chmod +x './examples/.rb' + +# run the example against your api +$ ruby './examples/.rb' +``` + +## Using the repository from source + +If you’d like to use the repository from source, you can either install from git or reference a cloned repository: + +To install via git in your `Gemfile`: + +```ruby +gem "openai", git: "https://www.github.com/openai/openai-ruby" +``` + +Alternatively, reference local copy of the repo: + +```bash +$ git clone -- 'https://www.github.com/openai/openai-ruby' '' +``` + +```ruby +gem "openai", path: "" +``` + +## Running commands + +Running `rake` by itself will show all runnable commands. + +```bash +$ bundle exec rake +``` + +## Running tests + +Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. + +```bash +$ npx prism mock path/to/your/openapi.yml +``` + +```bash +$ bundle exec rake test +``` + +## Linting and formatting + +This repository uses [rubocop](https://github.com/rubocop/rubocop) for linting and formatting of `*.rb` and `*.rbi` files. [syntax_tree](https://github.com/ruby-syntax-tree/syntax_tree) is used for formatting `*.rbs` files. + +There are two separate type checkers supported by this library: [sorbet](https://github.com/sorbet/sorbet) and [steep](https://github.com/soutaro/steep) are used for verifying `*.rbi` and `*.rbs` files respectively. + +To lint and typecheck: + +```bash +$ bundle exec rake lint +``` + +To format and fix all lint issues automatically: + +```bash +$ bundle exec rake format +``` + +## Editor Support + +### Solargraph + +This library includes [Solargraph](https://solargraph.org) support for both auto-completion and go to definition. + +```ruby +gem "solargraph", group: :development +``` + +Note: if you had installed the gem locally using `git: "..."` or `path: "..."`, you must update your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to where the gem is located: + +```yaml +include: + - '/lib/**/*.rb' +``` + +### Sorbet + +[Sorbet](https://sorbet.org) should mostly work out of the box when editing this library directly. However, there are a some caveats due to the colocation of `*.rb` and `*.rbi` files in the same project. These issues should not otherwise manifest when this library is used as a dependency. + +1. For go to definition usages, sorbet might get confused and may not always navigate to the correct location. + +2. For each generic type in `*.rbi` files, a spurious "Duplicate type member" error is present. + +### Ruby LSP + +The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. + +## Documentation Preview + +To preview the documentation, run: + +```bash +$ bundle exec rake docs:preview [PORT=8808] +``` diff --git a/Rakefile b/Rakefile index 41d7c926..7a8155db 100644 --- a/Rakefile +++ b/Rakefile @@ -1,5 +1,6 @@ # frozen_string_literal: true +require "pathname" require "securerandom" require "shellwords" @@ -7,10 +8,23 @@ require "minitest/test_task" require "rake/clean" require "rubocop/rake_task" -CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/]) +tapioca = "sorbet/tapioca" +ignore_file = ".ignore" -multitask(default: [:test]) +CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) +CLOBBER.push(*%w[sorbet/rbi/annotations/ sorbet/rbi/gems/], tapioca) + +multitask(:default) do + sh(*%w[rake --tasks]) +end + +desc("Preview docs; use `PORT=` to change the port") +multitask(:"docs:preview") do + sh(*%w[yard server --bind [::] --reload --quiet --port], ENV.fetch("PORT", "8808")) +end + +desc("Run test suites; use `TEST=path/to/test.rb` to run a specific test file") multitask(:test) do rb = FileList[ENV.fetch("TEST", "./test/**/*_test.rb")] @@ -23,17 +37,20 @@ end rubo_find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] -multitask(:rubocop) do +desc("Lint `*.rb(i)`") +multitask(:"lint:rubocop") do lint = xargs + %w[rubocop --fail-level E] + (ENV.key?("CI") ? %w[--format github] : []) sh("#{rubo_find.shelljoin} | #{lint.shelljoin}") end -multitask(:ruboformat) do +desc("Format `*.rb(i)`") +multitask(:"format:rubocop") do fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] sh("#{rubo_find.shelljoin} | #{fmt.shelljoin}") end -multitask(:syntax_tree) do +desc("Format `*.rbs`") +multitask(:"format:syntax_tree") do find = %w[find ./sig -type f -name *.rbs -print0] inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? %w[-i''] : %w[-i] uuid = SecureRandom.uuid @@ -74,27 +91,49 @@ multitask(:syntax_tree) do fail unless success end -multitask(format: [:ruboformat, :syntax_tree]) +desc("Format everything") +multitask(format: [:"format:rubocop", :"format:syntax_tree"]) -multitask(:steep) do +desc("Typecheck `*.rbs`") +multitask(:"typecheck:steep") do sh(*%w[steep check]) end -multitask(:sorbet) do +desc("Typecheck `*.rbi`") +multitask(:"typecheck:sorbet") do sh(*%w[srb typecheck]) end -file("sorbet/tapioca") do +file(tapioca) do sh(*%w[tapioca init]) end -multitask(typecheck: [:steep, :sorbet]) -multitask(lint: [:rubocop, :typecheck]) +desc("Typecheck everything") +multitask(typecheck: [:"typecheck:steep", :"typecheck:sorbet"]) + +desc("Lint everything") +multitask(lint: [:"lint:rubocop", :typecheck]) + +desc("Build yard docs") +multitask(:"build:docs") do + sh(*%w[yard]) +end + +desc("Build ruby gem") +multitask(:"build:gem") do + # optimizing for grepping through the gem bundle: many tools honour `.ignore` files, including VSCode + # + # both `rbi` and `sig` directories are navigable by their respective tool chains and therefore can be ignored by tools such as `rg` + Pathname(ignore_file).write(<<~GLOB) + rbi/* + sig/* + GLOB -multitask(:build) do sh(*%w[gem build -- openai.gemspec]) + rm_rf(ignore_file) end -multitask(release: [:build]) do +desc("Release ruby gem") +multitask(release: [:"build:gem"]) do sh(*%w[gem push], *FileList["openai-*.gem"]) end diff --git a/openai.gemspec b/openai.gemspec index 1171bdbe..64c1b6d9 100644 --- a/openai.gemspec +++ b/openai.gemspec @@ -8,12 +8,21 @@ Gem::Specification.new do |s| s.summary = "Ruby library to access the OpenAI API" s.authors = ["OpenAI"] s.email = "support@openai.com" - s.files = Dir["lib/**/*.rb", "rbi/**/*.rbi", "sig/**/*.rbs", "manifest.yaml", "CHANGELOG.md", "SECURITY.md"] - s.extra_rdoc_files = ["README.md"] - s.required_ruby_version = ">= 3.0.0" - s.add_dependency "connection_pool" s.homepage = "https://gemdocs.org/gems/openai" s.metadata["homepage_uri"] = s.homepage s.metadata["source_code_uri"] = "https://github.com/openai/openai-ruby" s.metadata["rubygems_mfa_required"] = false.to_s + s.required_ruby_version = ">= 3.0.0" + + s.files = Dir[ + "lib/**/*.rb", + "rbi/**/*.rbi", + "sig/**/*.rbs", + "manifest.yaml", + "SECURITY.md", + "CHANGELOG.md", + ".ignore" + ] + s.extra_rdoc_files = ["README.md"] + s.add_dependency "connection_pool" end diff --git a/scripts/bootstrap b/scripts/bootstrap index 88566757..cc31aa85 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -2,7 +2,7 @@ set -e -cd "$(dirname "$0")/.." +cd -- "$(dirname -- "$0")/.." if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ]; then brew bundle check >/dev/null 2>&1 || { @@ -13,4 +13,4 @@ fi echo "==> Installing Ruby dependencies…" -bundle install +exec -- bundle install "$@" diff --git a/scripts/format b/scripts/format index 67b400de..177d1e63 100755 --- a/scripts/format +++ b/scripts/format @@ -5,4 +5,5 @@ set -e cd -- "$(dirname -- "$0")/.." echo "==> Running formatters" + exec -- bundle exec rake format "$@" diff --git a/scripts/lint b/scripts/lint index 39581dc1..08b0dbeb 100755 --- a/scripts/lint +++ b/scripts/lint @@ -4,4 +4,6 @@ set -e cd -- "$(dirname -- "$0")/.." +echo "==> Running linters" + exec -- bundle exec rake lint "$@" diff --git a/scripts/test b/scripts/test index 2e1fe093..8e5d35cd 100755 --- a/scripts/test +++ b/scripts/test @@ -2,7 +2,7 @@ set -e -cd "$(dirname "$0")/.." +cd -- "$(dirname -- "$0")/.." RED='\033[0;31m' GREEN='\033[0;32m' diff --git a/sorbet/rbi/.gitignore b/sorbet/rbi/.gitignore new file mode 100644 index 00000000..d6b7ef32 --- /dev/null +++ b/sorbet/rbi/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore From 1c73271f3fb5b9f1756c45b97bf04d53f3a44064 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 17:09:17 +0000 Subject: [PATCH 140/295] fix: always send idempotency header when specified as a request option --- lib/openai/internal/transport/base_client.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 24d1f21d..30b12835 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -253,7 +253,7 @@ def initialize( if @idempotency_header && !headers.key?(@idempotency_header) && - !Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) + (!Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) || opts.key?(:idempotency_key)) headers[@idempotency_header] = opts.fetch(:idempotency_key) { generate_idempotency_key } end From 1acb9701556f527945c79cc0b69cc6d1b10d7ff9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 20:47:21 +0000 Subject: [PATCH 141/295] chore: refine `#inspect` and `#to_s` for model classes --- lib/openai/internal/type/base_model.rb | 46 ++++++++++++++------- rbi/lib/openai/internal/type/base_model.rbi | 9 ++++ sig/openai/internal/type/base_model.rbs | 6 +++ 3 files changed, 46 insertions(+), 15 deletions(-) diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index efc555b7..239dbfd3 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -338,6 +338,27 @@ def deconstruct_keys(keys) .to_h end + class << self + # @param model [OpenAI::Internal::Type::BaseModel] + # + # @return [Hash{Symbol=>Object}] + def walk(model) + walk = ->(x) do + case x + in OpenAI::Internal::Type::BaseModel + walk.call(x.to_h) + in Hash + x.transform_values(&walk) + in Array + x.map(&walk) + else + x + end + end + walk.call(model) + end + end + # @param a [Object] # # @return [String] @@ -373,13 +394,11 @@ def inspect(depth: 0) depth = depth.succ deferred = fields.transform_values do |field| type, required, nilable = field.fetch_values(:type, :required, :nilable) - -> do - [ - OpenAI::Internal::Type::Converter.inspect(type, depth: depth), - !required || nilable ? "nil" : nil - ].compact.join(" | ") - end - .tap { _1.define_singleton_method(:inspect) { call } } + inspected = [ + OpenAI::Internal::Type::Converter.inspect(type, depth: depth), + !required || nilable ? "nil" : nil + ].compact.join(" | ") + -> { inspected }.tap { _1.define_singleton_method(:inspect) { call } } end "#{name}[#{deferred.inspect}]" @@ -389,15 +408,12 @@ def inspect(depth: 0) # @api private # # @return [String] - def inspect - rows = @data.map do - "#{_1}=#{self.class.known_fields.key?(_1) ? public_send(_1).inspect : ''}" - rescue OpenAI::Errors::ConversionError - "#{_1}=#{_2.inspect}" - end + def to_s = self.class.walk(@data).to_s - "#<#{self.class}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" - end + # @api private + # + # @return [String] + def inspect = "#<#{self.class}:0x#{object_id.to_s(16)} #{self}>" end end end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 23fbb5a9..32f6a62c 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -175,6 +175,11 @@ module OpenAI sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::AnyHash) } def deconstruct_keys(keys); end + class << self + sig { params(model: OpenAI::Internal::Type::BaseModel).returns(OpenAI::Internal::AnyHash) } + def walk(model); end + end + sig { params(a: T.anything).returns(String) } def to_json(*a); end @@ -191,6 +196,10 @@ module OpenAI def inspect(depth: 0); end end + # @api private + sig { returns(String) } + def to_s; end + # @api private sig { returns(String) } def inspect; end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index f6bfeb6a..e3a7d42c 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -69,6 +69,10 @@ module OpenAI def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] + def self.walk: ( + OpenAI::Internal::Type::BaseModel model + ) -> ::Hash[Symbol, top] + def to_json: (*top a) -> String def to_yaml: (*top a) -> String @@ -77,6 +81,8 @@ module OpenAI def self.inspect: (?depth: Integer) -> String + def to_s: -> String + def inspect: -> String end end From 79fae8d05a89f877be56167920c7ad0e2c8e6331 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 09:42:03 +0000 Subject: [PATCH 142/295] chore: make sorbet enums easier to read --- rbi/lib/openai/models/all_models.rbi | 5 +- .../models/audio/speech_create_params.rbi | 10 ++- rbi/lib/openai/models/audio/speech_model.rbi | 2 +- .../audio/transcription_create_params.rbi | 11 +--- .../models/audio/transcription_include.rbi | 3 +- .../audio/translation_create_params.rbi | 5 +- rbi/lib/openai/models/audio_model.rbi | 2 +- .../openai/models/audio_response_format.rbi | 2 +- rbi/lib/openai/models/batch.rbi | 2 +- rbi/lib/openai/models/batch_create_params.rbi | 6 +- .../models/beta/assistant_create_params.rbi | 2 +- .../models/beta/assistant_list_params.rbi | 3 +- .../models/beta/assistant_tool_choice.rbi | 3 +- .../beta/assistant_tool_choice_option.rbi | 5 +- .../models/beta/assistant_update_params.rbi | 5 +- .../openai/models/beta/file_search_tool.rbi | 9 +-- .../beta/thread_create_and_run_params.rbi | 14 +---- .../models/beta/thread_create_params.rbi | 3 +- .../openai/models/beta/threads/image_file.rbi | 3 +- .../models/beta/threads/image_file_delta.rbi | 3 +- .../openai/models/beta/threads/image_url.rbi | 3 +- .../models/beta/threads/image_url_delta.rbi | 3 +- .../openai/models/beta/threads/message.rbi | 9 +-- .../beta/threads/message_create_params.rbi | 3 +- .../models/beta/threads/message_delta.rbi | 3 +- .../beta/threads/message_list_params.rbi | 3 +- rbi/lib/openai/models/beta/threads/run.rbi | 9 +-- .../models/beta/threads/run_create_params.rbi | 20 +----- .../models/beta/threads/run_list_params.rbi | 3 +- .../openai/models/beta/threads/run_status.rbi | 2 +- .../threads/runs/file_search_tool_call.rbi | 18 +----- .../models/beta/threads/runs/run_step.rbi | 9 +-- .../beta/threads/runs/run_step_include.rbi | 3 +- .../beta/threads/runs/step_list_params.rbi | 3 +- .../openai/models/chat/chat_completion.rbi | 6 +- .../chat/chat_completion_audio_param.rbi | 8 +-- .../models/chat/chat_completion_chunk.rbi | 18 ++---- .../chat_completion_content_part_image.rbi | 9 +-- ...at_completion_content_part_input_audio.rbi | 9 +-- .../models/chat/chat_completion_modality.rbi | 3 +- .../models/chat/chat_completion_role.rbi | 2 +- .../chat_completion_tool_choice_option.rbi | 5 +- .../models/chat/completion_create_params.rbi | 28 ++------- .../models/chat/completion_list_params.rbi | 3 +- .../chat/completions/message_list_params.rbi | 3 +- rbi/lib/openai/models/chat_model.rbi | 2 +- rbi/lib/openai/models/comparison_filter.rbi | 2 +- rbi/lib/openai/models/completion_choice.rbi | 3 +- .../models/completion_create_params.rbi | 5 +- rbi/lib/openai/models/compound_filter.rbi | 2 +- .../openai/models/embedding_create_params.rbi | 5 +- rbi/lib/openai/models/embedding_model.rbi | 2 +- rbi/lib/openai/models/eval_create_params.rbi | 54 ++-------------- .../openai/models/eval_label_model_grader.rbi | 21 ++----- rbi/lib/openai/models/eval_list_params.rbi | 4 +- .../models/eval_string_check_grader.rbi | 3 +- .../models/eval_text_similarity_grader.rbi | 3 +- ...reate_eval_completions_run_data_source.rbi | 57 +++-------------- .../openai/models/evals/run_list_params.rbi | 6 +- .../evals/runs/output_item_list_params.rbi | 6 +- rbi/lib/openai/models/file_list_params.rbi | 2 +- rbi/lib/openai/models/file_object.rbi | 4 +- rbi/lib/openai/models/file_purpose.rbi | 2 +- .../permission_retrieve_params.rbi | 9 +-- .../models/fine_tuning/fine_tuning_job.rbi | 6 +- .../fine_tuning/fine_tuning_job_event.rbi | 6 +- .../models/fine_tuning/job_create_params.rbi | 8 +-- .../models/image_create_variation_params.rbi | 8 +-- rbi/lib/openai/models/image_edit_params.rbi | 7 +-- .../openai/models/image_generate_params.rbi | 14 ++--- rbi/lib/openai/models/image_model.rbi | 2 +- rbi/lib/openai/models/moderation.rbi | 63 ++++--------------- .../models/moderation_create_params.rbi | 2 +- rbi/lib/openai/models/moderation_model.rbi | 2 +- rbi/lib/openai/models/reasoning.rbi | 5 +- rbi/lib/openai/models/reasoning_effort.rbi | 2 +- .../openai/models/responses/computer_tool.rbi | 3 +- .../models/responses/easy_input_message.rbi | 6 +- .../models/responses/file_search_tool.rbi | 3 +- .../responses/input_item_list_params.rbi | 3 +- rbi/lib/openai/models/responses/response.rbi | 9 +-- .../response_code_interpreter_tool_call.rbi | 3 +- .../responses/response_computer_tool_call.rbi | 15 +---- ...esponse_computer_tool_call_output_item.rbi | 3 +- .../responses/response_create_params.rbi | 8 +-- .../models/responses/response_error.rbi | 3 +- .../response_file_search_tool_call.rbi | 3 +- .../responses/response_function_tool_call.rbi | 3 +- ...esponse_function_tool_call_output_item.rbi | 3 +- .../response_function_web_search.rbi | 3 +- .../models/responses/response_includable.rbi | 3 +- .../models/responses/response_input_audio.rbi | 3 +- .../models/responses/response_input_image.rbi | 3 +- .../models/responses/response_input_item.rbi | 27 ++------ .../responses/response_input_message_item.rbi | 9 +-- .../responses/response_output_message.rbi | 3 +- .../responses/response_reasoning_item.rbi | 3 +- .../models/responses/response_status.rbi | 3 +- .../models/responses/tool_choice_options.rbi | 3 +- .../models/responses/tool_choice_types.rbi | 3 +- .../models/responses/web_search_tool.rbi | 6 +- rbi/lib/openai/models/responses_model.rbi | 5 +- rbi/lib/openai/models/upload.rbi | 2 +- rbi/lib/openai/models/vector_store.rbi | 2 +- .../models/vector_store_list_params.rbi | 3 +- .../models/vector_store_search_params.rbi | 3 +- .../models/vector_store_search_response.rbi | 3 +- .../file_batch_list_files_params.rbi | 6 +- .../models/vector_stores/file_list_params.rbi | 6 +- .../vector_stores/vector_store_file.rbi | 6 +- .../vector_stores/vector_store_file_batch.rbi | 3 +- 111 files changed, 207 insertions(+), 566 deletions(-) diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 271f8306..82f906f3 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) O1_PRO_2025_03_19 = @@ -27,7 +26,7 @@ module OpenAI sig do override .returns( - [String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::ResponsesOnlyModel::OrSymbol] + [String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol] ) end def self.variants; end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 8dd902b4..68db9bed 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -91,7 +91,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } def self.variants; end end @@ -102,12 +102,11 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) @@ -128,8 +127,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 2f4f0bf8..e049d032 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -7,7 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index c8481144..c6020c3f 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -137,7 +137,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::AudioModel::TaggedSymbol]) } def self.variants; end end @@ -146,14 +146,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) SEGMENT = diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 732e95a4..bb5758c1 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -7,8 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 6d4b9e08..405309c1 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -80,7 +80,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::AudioModel::TaggedSymbol]) } def self.variants; end end @@ -91,8 +91,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index ab50d39a..e82aa420 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::TaggedSymbol) GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 23a709ab..b7fca47d 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -9,7 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 7542c9b1..8de778c2 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -203,7 +203,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Batch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 6f907cae..3a795500 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -71,8 +71,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETION_WINDOW_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) @@ -88,8 +87,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) V1_CHAT_COMPLETIONS = diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index b0c7ab98..f068ee9b 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -230,7 +230,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 52cd7565..48df52cc 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -75,8 +75,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 4b2436cd..3ef8c526 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -41,8 +41,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) CODE_INTERPRETER = diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index ce3df8b3..51538b5e 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -21,8 +21,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) @@ -35,7 +34,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice] + [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, OpenAI::Models::Beta::AssistantToolChoice] ) end def self.variants; end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 01cbc60a..8e100d6d 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -233,12 +233,11 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index c3df4c59..ad127d97 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -127,14 +127,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index bc0633ec..091a1cab 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -300,7 +300,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end @@ -499,8 +499,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) ASSISTANT = @@ -1075,14 +1074,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 9d73e499..6d1ed043 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -191,8 +191,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 788ca5de..c6c13ccf 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -34,8 +34,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index d0d866ed..c677a276 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -40,8 +40,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 27f59579..b98e88e4 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -33,8 +33,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 69c06976..a9c81f3e 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -38,8 +38,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index ae809809..eef51a4c 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -282,8 +282,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) @@ -308,8 +307,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) @@ -324,8 +322,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 94ce2228..6c57ca1f 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -132,8 +132,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index cce9e935..6156e86b 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -86,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 11ea50e8..945d30d3 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -85,8 +85,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index fd1569df..f0708be8 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -392,8 +392,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MAX_COMPLETION_TOKENS = T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) @@ -432,8 +431,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = @@ -545,8 +543,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) LAST_MESSAGES = diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index bfa2e42d..3d4cc20a 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -442,14 +442,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) @@ -564,7 +557,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end @@ -612,14 +605,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 0d5a35b2..d38cd84b 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -76,8 +76,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index b9c1490b..7eb4d991 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -11,7 +11,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index b11ca61e..77bfcf06 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -150,14 +150,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let( @@ -298,14 +291,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let( diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 7519ba3f..db09f851 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -209,8 +209,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) @@ -228,8 +227,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) @@ -259,8 +257,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE_CREATION = T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 2261f9ce..1632e2a6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = T.let( diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index abba06e2..9f8536b3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -106,8 +106,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index b6ba1367..43d751bd 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -171,8 +171,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) @@ -239,8 +238,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index f7bff346..231ca7da 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -43,8 +43,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) AAC = T.let(:aac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) @@ -62,12 +61,11 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index abee2167..8c755f8f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -272,8 +272,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) @@ -377,14 +376,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) @@ -409,8 +401,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) @@ -478,8 +469,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index e7e3a922..e28956c2 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -71,14 +71,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index d365d7c9..9e55ccdf 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -72,14 +72,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 34114b90..0226b92f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -7,8 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index aa6fa947..9be9e3e1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -8,7 +8,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index b7979106..fbdb6fda 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -22,8 +22,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) @@ -36,7 +35,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] ) end def self.variants; end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 9e8b8bd5..6e3970fc 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -539,7 +539,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end @@ -568,14 +568,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) @@ -594,7 +587,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] ) end def self.variants; end @@ -643,8 +636,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) @@ -696,8 +688,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) @@ -790,14 +781,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let( diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index eec03610..f83e4b89 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -76,8 +76,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 8a39c287..a661288c 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -61,8 +61,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 147d44a6..f4c20501 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ChatModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::ChatModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 15d111b8..19f90169 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -55,7 +55,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 375563ef..4731b8da 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -57,8 +57,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index d3267415..a0956081 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -243,12 +243,11 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_3_5_TURBO_INSTRUCT = T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 5eee5410..4bb4c09c 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -47,7 +47,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 0869c887..b19f81a0 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -111,7 +111,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::EmbeddingModel::TaggedSymbol]) } def self.variants; end end @@ -121,8 +121,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 962b8546..bbaaae0a 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::TaggedSymbol) TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_create_params.rbi b/rbi/lib/openai/models/eval_create_params.rbi index c35b54fa..dbec61df 100644 --- a/rbi/lib/openai/models/eval_create_params.rbi +++ b/rbi/lib/openai/models/eval_create_params.rbi @@ -382,14 +382,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } INPUT_TEXT = T.let( @@ -415,14 +408,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( @@ -455,14 +441,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( @@ -577,14 +556,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } OUTPUT_TEXT = T.let( @@ -610,14 +582,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANT = T.let( @@ -640,14 +605,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( diff --git a/rbi/lib/openai/models/eval_label_model_grader.rbi b/rbi/lib/openai/models/eval_label_model_grader.rbi index af492766..40ab0320 100644 --- a/rbi/lib/openai/models/eval_label_model_grader.rbi +++ b/rbi/lib/openai/models/eval_label_model_grader.rbi @@ -154,14 +154,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } INPUT_TEXT = T.let(:input_text, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol) @@ -180,8 +173,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) @@ -198,8 +190,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) @@ -282,8 +273,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } OUTPUT_TEXT = T.let(:output_text, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) @@ -302,8 +292,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_list_params.rbi b/rbi/lib/openai/models/eval_list_params.rbi index bb813db2..e0e69e2a 100644 --- a/rbi/lib/openai/models/eval_list_params.rbi +++ b/rbi/lib/openai/models/eval_list_params.rbi @@ -68,7 +68,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) @@ -83,7 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::OrderBy) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } CREATED_AT = T.let(:created_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) UPDATED_AT = T.let(:updated_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_string_check_grader.rbi b/rbi/lib/openai/models/eval_string_check_grader.rbi index 6a7e26c6..a7c9af35 100644 --- a/rbi/lib/openai/models/eval_string_check_grader.rbi +++ b/rbi/lib/openai/models/eval_string_check_grader.rbi @@ -56,8 +56,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalStringCheckGrader::Operation) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } EQ = T.let(:eq, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) NE = T.let(:ne, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_text_similarity_grader.rbi b/rbi/lib/openai/models/eval_text_similarity_grader.rbi index 13ac3bcc..460cab97 100644 --- a/rbi/lib/openai/models/eval_text_similarity_grader.rbi +++ b/rbi/lib/openai/models/eval_text_similarity_grader.rbi @@ -74,8 +74,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FUZZY_MATCH = T.let(:fuzzy_match, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) diff --git a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi index b8977938..83b73c77 100644 --- a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -270,14 +270,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } INPUT_TEXT = T.let( @@ -305,14 +298,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( @@ -349,14 +335,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( @@ -477,14 +456,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } OUTPUT_TEXT = T.let( @@ -512,14 +484,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANT = T.let( @@ -546,14 +511,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( @@ -770,8 +728,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETIONS = T.let(:completions, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/evals/run_list_params.rbi b/rbi/lib/openai/models/evals/run_list_params.rbi index cd16d49f..5e68c583 100644 --- a/rbi/lib/openai/models/evals/run_list_params.rbi +++ b/rbi/lib/openai/models/evals/run_list_params.rbi @@ -69,8 +69,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) @@ -85,8 +84,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } QUEUED = T.let(:queued, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi index cd6a0ed0..1d9f802f 100644 --- a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi +++ b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi @@ -75,8 +75,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) @@ -91,8 +90,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FAIL = T.let(:fail, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) PASS = T.let(:pass, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index daad5aea..69d9b1a3 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -71,7 +71,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 15e6c24d..82c4880a 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -100,7 +100,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) @@ -120,7 +120,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 274edce7..7d3a2d58 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -10,7 +10,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FilePurpose::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::TaggedSymbol) BATCH = T.let(:batch, OpenAI::Models::FilePurpose::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi index b3acb44f..938c8a10 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi @@ -68,14 +68,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } ASCENDING = T.let(:ascending, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index b12fca64..6e25d748 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -299,8 +299,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } VALIDATING_FILES = T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) @@ -604,8 +603,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 85a3774e..ea515e0f 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -83,8 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) @@ -99,8 +98,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 4acc7f38..13b9cfae 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -149,12 +149,11 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) @@ -626,8 +625,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 580de965..44cd2758 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -85,7 +85,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } def self.variants; end end @@ -96,8 +96,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) @@ -112,8 +111,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 4e7a68bf..616662cf 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -102,7 +102,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } def self.variants; end end @@ -113,8 +113,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) @@ -129,7 +128,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index c10c716c..d35fb1e2 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -104,7 +104,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } def self.variants; end end @@ -115,8 +115,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) @@ -132,8 +131,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) @@ -149,8 +147,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) @@ -170,8 +167,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 0203cf30..2dd50c7c 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 03bc7cb0..7c63a9f8 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -306,8 +306,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) @@ -322,14 +321,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) @@ -348,8 +340,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) @@ -362,14 +353,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) @@ -385,8 +369,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) @@ -399,8 +382,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) @@ -416,8 +398,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) @@ -431,14 +412,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) @@ -459,8 +433,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) IMAGE = @@ -478,8 +451,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) @@ -493,8 +465,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) @@ -510,8 +481,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) @@ -525,14 +495,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) IMAGE = diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 1e4e920e..707f700c 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -94,7 +94,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ModerationModel::TaggedSymbol]) } def self.variants; end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index 7de0fc9c..36b6b843 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ModerationModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) OMNI_MODERATION_2024_09_26 = diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index b60585b1..1223bf2d 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -61,8 +61,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) @@ -79,7 +78,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::Summary) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::Summary::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Reasoning::Summary::TaggedSymbol) CONCISE = T.let(:concise, OpenAI::Models::Reasoning::Summary::TaggedSymbol) diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 1459f48b..11b95ba9 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -12,7 +12,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ReasoningEffort::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let(:low, OpenAI::Models::ReasoningEffort::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index c20479e3..50bedd4b 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -51,8 +51,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 6f2f6316..8efd6fd4 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -109,8 +109,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) @@ -126,8 +125,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index b16985a4..6d579cea 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -129,8 +129,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 302e530f..9699e037 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -83,8 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 833e1023..5e5dacba 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -401,8 +401,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MAX_OUTPUT_TOKENS = T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) @@ -450,8 +449,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) @@ -472,8 +470,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 4f1a0e10..9c9e05b1 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -158,8 +158,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index ba86d36e..d899a4bc 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -150,14 +150,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) @@ -415,8 +408,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) @@ -433,8 +425,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPUTER_CALL = T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 64b2dc16..cbf376cc 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -129,8 +129,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 68125cc2..64664281 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -455,8 +455,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) @@ -475,7 +474,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] ) end def self.variants; end @@ -493,8 +492,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 843f8868..3beca9b7 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -27,8 +27,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 10068e96..edeca91a 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -63,8 +63,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 769270c2..5f89a8cf 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -73,8 +73,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 8f224c19..df902df8 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -61,8 +61,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 5bd85d43..4c9bd2bd 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -43,8 +43,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index dd8aebb9..a6d5df86 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -15,8 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FILE_SEARCH_CALL_RESULTS = T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index b4a5969e..ff2d0075 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -40,8 +40,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index f6f84eac..2cf0bfec 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -54,8 +54,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index cbcc9069..7187822f 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -92,8 +92,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) @@ -110,8 +109,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) @@ -130,8 +128,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) @@ -270,14 +267,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let( @@ -360,14 +350,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let( diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 1eeebaa4..dbae6d9b 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -86,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) @@ -104,8 +103,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) @@ -122,8 +120,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index d7a588ea..e9c25f07 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -79,8 +79,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 9d308e35..ecd2a673 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -72,8 +72,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index a887be4c..7fb2ba80 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index ccc45284..793acf3c 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -15,8 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index ece62dd0..1b8ed0b6 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -35,8 +35,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) WEB_SEARCH_PREVIEW = diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 75ccebd6..9b23c93c 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -63,8 +63,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) @@ -82,8 +81,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 1ae31d0c..24670d55 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) O1_PRO_2025_03_19 = @@ -30,7 +29,7 @@ module OpenAI sig do override .returns( - [String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol] + [String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol] ) end def self.variants; end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 7a9a7404..083dbd10 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -95,7 +95,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Upload::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 3426c9f4..dd704e90 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -163,7 +163,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStore::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index e40b3b45..802bf5c5 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -74,8 +74,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index e5456dff..47525fb1 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -139,8 +139,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 454d54a1..fdfceacf 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -86,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 8911fb0e..041e3c17 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -98,8 +98,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) @@ -120,8 +119,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 10960b9c..0ef2cb3a 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -83,8 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) @@ -101,8 +100,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 6bf82fb0..72356d37 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -156,8 +156,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) @@ -178,8 +177,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index cef32c64..4b1e6a60 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -129,8 +129,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) From d65b1aaaa9c8e269bacb7de62bdd167254a08c37 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:12:30 +0000 Subject: [PATCH 143/295] chore: use `@!method` instead of `@!parse` for virtual method type definitions --- .yardopts | 1 + lib/openai/internal/type/base_model.rb | 2 + lib/openai/internal/type/enum.rb | 7 +- lib/openai/models/all_models.rb | 12 +- .../models/audio/speech_create_params.rb | 37 +- lib/openai/models/audio/speech_model.rb | 7 +- lib/openai/models/audio/transcription.rb | 28 +- .../audio/transcription_create_params.rb | 49 +- .../audio/transcription_create_response.rb | 5 +- .../models/audio/transcription_include.rb | 7 +- .../models/audio/transcription_segment.rb | 40 +- .../audio/transcription_stream_event.rb | 5 +- .../audio/transcription_text_delta_event.rb | 34 +- .../audio/transcription_text_done_event.rb | 34 +- .../models/audio/transcription_verbose.rb | 22 +- lib/openai/models/audio/transcription_word.rb | 12 +- lib/openai/models/audio/translation.rb | 8 +- .../models/audio/translation_create_params.rb | 30 +- .../audio/translation_create_response.rb | 5 +- .../models/audio/translation_verbose.rb | 14 +- lib/openai/models/audio_model.rb | 7 +- lib/openai/models/audio_response_format.rb | 7 +- .../auto_file_chunking_strategy_param.rb | 14 +- lib/openai/models/batch.rb | 87 ++-- lib/openai/models/batch_cancel_params.rb | 8 +- lib/openai/models/batch_create_params.rb | 30 +- lib/openai/models/batch_error.rb | 14 +- lib/openai/models/batch_list_params.rb | 12 +- lib/openai/models/batch_request_counts.rb | 16 +- lib/openai/models/batch_retrieve_params.rb | 8 +- lib/openai/models/beta/assistant.rb | 89 ++-- .../models/beta/assistant_create_params.rb | 140 ++---- .../models/beta/assistant_delete_params.rb | 8 +- lib/openai/models/beta/assistant_deleted.rb | 12 +- .../models/beta/assistant_list_params.rb | 23 +- .../beta/assistant_response_format_option.rb | 5 +- .../models/beta/assistant_retrieve_params.rb | 8 +- .../models/beta/assistant_stream_event.rb | 419 +++++++----------- lib/openai/models/beta/assistant_tool.rb | 5 +- .../models/beta/assistant_tool_choice.rb | 23 +- .../beta/assistant_tool_choice_function.rb | 8 +- .../beta/assistant_tool_choice_option.rb | 12 +- .../models/beta/assistant_update_params.rb | 87 ++-- .../models/beta/code_interpreter_tool.rb | 8 +- lib/openai/models/beta/file_search_tool.rb | 55 +-- lib/openai/models/beta/function_tool.rb | 10 +- .../models/beta/message_stream_event.rb | 95 ++-- .../models/beta/run_step_stream_event.rb | 131 +++--- lib/openai/models/beta/run_stream_event.rb | 165 +++---- lib/openai/models/beta/thread.rb | 58 +-- .../beta/thread_create_and_run_params.rb | 283 ++++-------- .../models/beta/thread_create_params.rb | 152 +++---- .../models/beta/thread_delete_params.rb | 8 +- lib/openai/models/beta/thread_deleted.rb | 12 +- .../models/beta/thread_retrieve_params.rb | 8 +- lib/openai/models/beta/thread_stream_event.rb | 20 +- .../models/beta/thread_update_params.rb | 48 +- lib/openai/models/beta/threads/annotation.rb | 5 +- .../models/beta/threads/annotation_delta.rb | 5 +- .../beta/threads/file_citation_annotation.rb | 32 +- .../threads/file_citation_delta_annotation.rb | 36 +- .../beta/threads/file_path_annotation.rb | 30 +- .../threads/file_path_delta_annotation.rb | 32 +- lib/openai/models/beta/threads/image_file.rb | 17 +- .../beta/threads/image_file_content_block.rb | 16 +- .../models/beta/threads/image_file_delta.rb | 17 +- .../beta/threads/image_file_delta_block.rb | 18 +- lib/openai/models/beta/threads/image_url.rb | 17 +- .../beta/threads/image_url_content_block.rb | 14 +- .../models/beta/threads/image_url_delta.rb | 17 +- .../beta/threads/image_url_delta_block.rb | 16 +- lib/openai/models/beta/threads/message.rb | 114 ++--- .../models/beta/threads/message_content.rb | 5 +- .../beta/threads/message_content_delta.rb | 5 +- .../threads/message_content_part_param.rb | 5 +- .../beta/threads/message_create_params.rb | 51 +-- .../beta/threads/message_delete_params.rb | 10 +- .../models/beta/threads/message_deleted.rb | 12 +- .../models/beta/threads/message_delta.rb | 21 +- .../beta/threads/message_delta_event.rb | 18 +- .../beta/threads/message_list_params.rb | 25 +- .../beta/threads/message_retrieve_params.rb | 10 +- .../beta/threads/message_update_params.rb | 12 +- .../beta/threads/refusal_content_block.rb | 14 +- .../beta/threads/refusal_delta_block.rb | 16 +- .../required_action_function_tool_call.rb | 30 +- lib/openai/models/beta/threads/run.rb | 208 +++------ .../models/beta/threads/run_cancel_params.rb | 10 +- .../models/beta/threads/run_create_params.rb | 141 ++---- .../models/beta/threads/run_list_params.rb | 23 +- .../beta/threads/run_retrieve_params.rb | 10 +- lib/openai/models/beta/threads/run_status.rb | 7 +- .../threads/run_submit_tool_outputs_params.rb | 22 +- .../models/beta/threads/run_update_params.rb | 12 +- .../threads/runs/code_interpreter_logs.rb | 16 +- .../runs/code_interpreter_output_image.rb | 20 +- .../runs/code_interpreter_tool_call.rb | 67 +-- .../runs/code_interpreter_tool_call_delta.rb | 37 +- .../threads/runs/file_search_tool_call.rb | 82 ++-- .../runs/file_search_tool_call_delta.rb | 14 +- .../beta/threads/runs/function_tool_call.rb | 28 +- .../threads/runs/function_tool_call_delta.rb | 30 +- .../runs/message_creation_step_details.rb | 22 +- .../models/beta/threads/runs/run_step.rb | 122 ++--- .../beta/threads/runs/run_step_delta.rb | 17 +- .../beta/threads/runs/run_step_delta_event.rb | 18 +- .../runs/run_step_delta_message_delta.rb | 22 +- .../beta/threads/runs/run_step_include.rb | 7 +- .../beta/threads/runs/step_list_params.rb | 27 +- .../beta/threads/runs/step_retrieve_params.rb | 14 +- .../models/beta/threads/runs/tool_call.rb | 5 +- .../beta/threads/runs/tool_call_delta.rb | 5 +- .../threads/runs/tool_call_delta_object.rb | 14 +- .../threads/runs/tool_calls_step_details.rb | 14 +- lib/openai/models/beta/threads/text.rb | 10 +- .../models/beta/threads/text_content_block.rb | 14 +- .../beta/threads/text_content_block_param.rb | 14 +- lib/openai/models/beta/threads/text_delta.rb | 10 +- .../models/beta/threads/text_delta_block.rb | 16 +- lib/openai/models/chat/chat_completion.rb | 82 ++-- ...chat_completion_assistant_message_param.rb | 75 ++-- .../models/chat/chat_completion_audio.rb | 22 +- .../chat/chat_completion_audio_param.rb | 30 +- .../models/chat/chat_completion_chunk.rb | 158 +++---- .../chat/chat_completion_content_part.rb | 33 +- .../chat_completion_content_part_image.rb | 31 +- ...hat_completion_content_part_input_audio.rb | 31 +- .../chat_completion_content_part_refusal.rb | 10 +- .../chat/chat_completion_content_part_text.rb | 16 +- .../models/chat/chat_completion_deleted.rb | 12 +- ...chat_completion_developer_message_param.rb | 25 +- .../chat_completion_function_call_option.rb | 14 +- .../chat_completion_function_message_param.rb | 12 +- .../models/chat/chat_completion_message.rb | 83 ++-- .../chat/chat_completion_message_param.rb | 5 +- .../chat/chat_completion_message_tool_call.rb | 26 +- .../models/chat/chat_completion_modality.rb | 7 +- .../chat/chat_completion_named_tool_choice.rb | 24 +- .../chat_completion_prediction_content.rb | 21 +- .../models/chat/chat_completion_role.rb | 7 +- .../chat/chat_completion_store_message.rb | 12 +- .../chat/chat_completion_stream_options.rb | 12 +- .../chat_completion_system_message_param.rb | 25 +- .../chat/chat_completion_token_logprob.rb | 26 +- .../models/chat/chat_completion_tool.rb | 10 +- .../chat_completion_tool_choice_option.rb | 12 +- .../chat_completion_tool_message_param.rb | 17 +- .../chat_completion_user_message_param.rb | 23 +- .../models/chat/completion_create_params.rb | 213 +++------ .../models/chat/completion_delete_params.rb | 8 +- .../models/chat/completion_list_params.rb | 25 +- .../models/chat/completion_retrieve_params.rb | 8 +- .../models/chat/completion_update_params.rb | 10 +- .../chat/completions/message_list_params.rb | 21 +- lib/openai/models/chat_model.rb | 7 +- lib/openai/models/comparison_filter.rb | 30 +- lib/openai/models/completion.rb | 26 +- lib/openai/models/completion_choice.rb | 35 +- lib/openai/models/completion_create_params.rb | 79 ++-- lib/openai/models/completion_usage.rb | 69 +-- lib/openai/models/compound_filter.rb | 26 +- .../models/create_embedding_response.rb | 28 +- lib/openai/models/embedding.rb | 16 +- lib/openai/models/embedding_create_params.rb | 35 +- lib/openai/models/embedding_model.rb | 7 +- lib/openai/models/error_object.rb | 14 +- lib/openai/models/eval_create_params.rb | 207 +++------ lib/openai/models/eval_create_response.rb | 58 +-- .../models/eval_custom_data_source_config.rb | 24 +- lib/openai/models/eval_delete_params.rb | 8 +- lib/openai/models/eval_delete_response.rb | 12 +- lib/openai/models/eval_label_model_grader.rb | 108 ++--- lib/openai/models/eval_list_params.rb | 30 +- lib/openai/models/eval_list_response.rb | 58 +-- lib/openai/models/eval_retrieve_params.rb | 8 +- lib/openai/models/eval_retrieve_response.rb | 58 +-- ...l_stored_completions_data_source_config.rb | 24 +- lib/openai/models/eval_string_check_grader.rb | 29 +- .../models/eval_text_similarity_grader.rb | 29 +- lib/openai/models/eval_update_params.rb | 12 +- lib/openai/models/eval_update_response.rb | 58 +-- ...create_eval_completions_run_data_source.rb | 224 ++++------ .../create_eval_jsonl_run_data_source.rb | 51 +-- lib/openai/models/evals/eval_api_error.rb | 14 +- lib/openai/models/evals/run_cancel_params.rb | 10 +- .../models/evals/run_cancel_response.rb | 109 ++--- lib/openai/models/evals/run_create_params.rb | 19 +- .../models/evals/run_create_response.rb | 109 ++--- lib/openai/models/evals/run_delete_params.rb | 10 +- .../models/evals/run_delete_response.rb | 12 +- lib/openai/models/evals/run_list_params.rb | 30 +- lib/openai/models/evals/run_list_response.rb | 109 ++--- .../models/evals/run_retrieve_params.rb | 10 +- .../models/evals/run_retrieve_response.rb | 109 ++--- .../evals/runs/output_item_list_params.rb | 32 +- .../evals/runs/output_item_list_response.rb | 130 ++---- .../evals/runs/output_item_retrieve_params.rb | 12 +- .../runs/output_item_retrieve_response.rb | 130 ++---- lib/openai/models/file_chunking_strategy.rb | 5 +- .../models/file_chunking_strategy_param.rb | 5 +- lib/openai/models/file_content_params.rb | 8 +- lib/openai/models/file_create_params.rb | 12 +- lib/openai/models/file_delete_params.rb | 8 +- lib/openai/models/file_deleted.rb | 12 +- lib/openai/models/file_list_params.rb | 23 +- lib/openai/models/file_object.rb | 55 +-- lib/openai/models/file_purpose.rb | 7 +- lib/openai/models/file_retrieve_params.rb | 8 +- .../checkpoints/permission_create_params.rb | 10 +- .../checkpoints/permission_create_response.rb | 20 +- .../checkpoints/permission_delete_params.rb | 8 +- .../checkpoints/permission_delete_response.rb | 12 +- .../checkpoints/permission_retrieve_params.rb | 23 +- .../permission_retrieve_response.rb | 36 +- .../models/fine_tuning/fine_tuning_job.rb | 247 ++++------- .../fine_tuning/fine_tuning_job_event.rb | 38 +- .../fine_tuning_job_wandb_integration.rb | 24 +- ...ine_tuning_job_wandb_integration_object.rb | 10 +- .../models/fine_tuning/job_cancel_params.rb | 8 +- .../models/fine_tuning/job_create_params.rb | 228 ++++------ .../fine_tuning/job_list_events_params.rb | 12 +- .../models/fine_tuning/job_list_params.rb | 14 +- .../models/fine_tuning/job_retrieve_params.rb | 8 +- .../jobs/checkpoint_list_params.rb | 12 +- .../jobs/fine_tuning_job_checkpoint.rb | 72 +-- lib/openai/models/function_definition.rb | 14 +- lib/openai/models/image.rb | 16 +- .../models/image_create_variation_params.rb | 39 +- lib/openai/models/image_edit_params.rb | 56 +-- lib/openai/models/image_generate_params.rb | 70 +-- lib/openai/models/image_model.rb | 7 +- lib/openai/models/images_response.rb | 10 +- lib/openai/models/model.rb | 18 +- lib/openai/models/model_delete_params.rb | 8 +- lib/openai/models/model_deleted.rb | 12 +- lib/openai/models/model_list_params.rb | 8 +- lib/openai/models/model_retrieve_params.rb | 8 +- lib/openai/models/moderation.rb | 264 ++++------- lib/openai/models/moderation_create_params.rb | 22 +- .../models/moderation_create_response.rb | 16 +- .../models/moderation_image_url_input.rb | 26 +- lib/openai/models/moderation_model.rb | 7 +- .../models/moderation_multi_modal_input.rb | 5 +- lib/openai/models/moderation_text_input.rb | 14 +- .../other_file_chunking_strategy_object.rb | 16 +- lib/openai/models/reasoning.rb | 36 +- lib/openai/models/reasoning_effort.rb | 7 +- .../models/response_format_json_object.rb | 16 +- .../models/response_format_json_schema.rb | 36 +- lib/openai/models/response_format_text.rb | 12 +- lib/openai/models/responses/computer_tool.rb | 27 +- .../models/responses/easy_input_message.rb | 43 +- .../models/responses/file_search_tool.rb | 50 +-- lib/openai/models/responses/function_tool.rb | 24 +- .../responses/input_item_list_params.rb | 25 +- lib/openai/models/responses/response.rb | 117 ++--- .../responses/response_audio_delta_event.rb | 14 +- .../responses/response_audio_done_event.rb | 12 +- .../response_audio_transcript_delta_event.rb | 14 +- .../response_audio_transcript_done_event.rb | 12 +- ..._code_interpreter_call_code_delta_event.rb | 16 +- ...e_code_interpreter_call_code_done_event.rb | 16 +- ...e_code_interpreter_call_completed_event.rb | 16 +- ...code_interpreter_call_in_progress_event.rb | 16 +- ...ode_interpreter_call_interpreting_event.rb | 16 +- .../response_code_interpreter_tool_call.rb | 70 ++- .../responses/response_completed_event.rb | 14 +- .../responses/response_computer_tool_call.rb | 218 ++++----- ...response_computer_tool_call_output_item.rb | 41 +- ...se_computer_tool_call_output_screenshot.rb | 16 +- .../models/responses/response_content.rb | 5 +- .../response_content_part_added_event.rb | 25 +- .../response_content_part_done_event.rb | 25 +- .../responses/response_create_params.rb | 91 ++-- .../responses/response_created_event.rb | 14 +- .../responses/response_delete_params.rb | 8 +- lib/openai/models/responses/response_error.rb | 21 +- .../models/responses/response_error_event.rb | 18 +- .../models/responses/response_failed_event.rb | 14 +- ...sponse_file_search_call_completed_event.rb | 16 +- ...onse_file_search_call_in_progress_event.rb | 16 +- ...sponse_file_search_call_searching_event.rb | 16 +- .../response_file_search_tool_call.rb | 52 +-- .../responses/response_format_text_config.rb | 5 +- ...response_format_text_json_schema_config.rb | 24 +- ...nse_function_call_arguments_delta_event.rb | 18 +- ...onse_function_call_arguments_done_event.rb | 18 +- .../responses/response_function_tool_call.rb | 33 +- .../response_function_tool_call_item.rb | 16 +- ...response_function_tool_call_output_item.rb | 23 +- .../responses/response_function_web_search.rb | 27 +- .../responses/response_in_progress_event.rb | 14 +- .../models/responses/response_includable.rb | 7 +- .../responses/response_incomplete_event.rb | 14 +- .../models/responses/response_input_audio.rb | 23 +- .../responses/response_input_content.rb | 5 +- .../models/responses/response_input_file.rb | 18 +- .../models/responses/response_input_image.rb | 27 +- .../models/responses/response_input_item.rb | 134 ++---- .../responses/response_input_message_item.rb | 37 +- .../models/responses/response_input_text.rb | 14 +- lib/openai/models/responses/response_item.rb | 5 +- .../models/responses/response_item_list.rb | 20 +- .../models/responses/response_output_audio.rb | 16 +- .../models/responses/response_output_item.rb | 5 +- .../response_output_item_added_event.rb | 16 +- .../response_output_item_done_event.rb | 16 +- .../responses/response_output_message.rb | 32 +- .../responses/response_output_refusal.rb | 14 +- .../models/responses/response_output_text.rb | 73 ++- .../responses/response_reasoning_item.rb | 37 +- .../responses/response_refusal_delta_event.rb | 20 +- .../responses/response_refusal_done_event.rb | 20 +- .../responses/response_retrieve_params.rb | 10 +- .../models/responses/response_status.rb | 7 +- .../models/responses/response_stream_event.rb | 5 +- .../response_text_annotation_delta_event.rb | 89 ++-- .../models/responses/response_text_config.rb | 20 +- .../responses/response_text_delta_event.rb | 20 +- .../responses/response_text_done_event.rb | 20 +- lib/openai/models/responses/response_usage.rb | 46 +- ...esponse_web_search_call_completed_event.rb | 16 +- ...ponse_web_search_call_in_progress_event.rb | 16 +- ...esponse_web_search_call_searching_event.rb | 16 +- lib/openai/models/responses/tool.rb | 5 +- .../models/responses/tool_choice_function.rb | 14 +- .../models/responses/tool_choice_options.rb | 7 +- .../models/responses/tool_choice_types.rb | 21 +- .../models/responses/web_search_tool.rb | 50 +-- lib/openai/models/responses_model.rb | 12 +- .../models/static_file_chunking_strategy.rb | 10 +- .../static_file_chunking_strategy_object.rb | 10 +- ...tic_file_chunking_strategy_object_param.rb | 14 +- lib/openai/models/upload.rb | 35 +- lib/openai/models/upload_cancel_params.rb | 8 +- lib/openai/models/upload_complete_params.rb | 12 +- lib/openai/models/upload_create_params.rb | 16 +- .../models/uploads/part_create_params.rb | 10 +- lib/openai/models/uploads/upload_part.rb | 18 +- lib/openai/models/vector_store.rb | 86 ++-- .../models/vector_store_create_params.rb | 42 +- .../models/vector_store_delete_params.rb | 8 +- lib/openai/models/vector_store_deleted.rb | 12 +- lib/openai/models/vector_store_list_params.rb | 23 +- .../models/vector_store_retrieve_params.rb | 8 +- .../models/vector_store_search_params.rb | 59 +-- .../models/vector_store_search_response.rb | 38 +- .../models/vector_store_update_params.rb | 28 +- .../vector_stores/file_batch_cancel_params.rb | 10 +- .../vector_stores/file_batch_create_params.rb | 19 +- .../file_batch_list_files_params.rb | 34 +- .../file_batch_retrieve_params.rb | 10 +- .../vector_stores/file_content_params.rb | 10 +- .../vector_stores/file_content_response.rb | 10 +- .../vector_stores/file_create_params.rb | 19 +- .../vector_stores/file_delete_params.rb | 10 +- .../models/vector_stores/file_list_params.rb | 32 +- .../vector_stores/file_retrieve_params.rb | 10 +- .../vector_stores/file_update_params.rb | 17 +- .../models/vector_stores/vector_store_file.rb | 76 +--- .../vector_stores/vector_store_file_batch.rb | 45 +- .../vector_store_file_deleted.rb | 12 +- lib/openai/request_options.rb | 7 +- rbi/lib/openai/internal/type/base_model.rbi | 1 + rbi/lib/openai/internal/type/enum.rbi | 6 - sig/openai/internal/type/enum.rbs | 2 - test/openai/internal/type/base_model_test.rb | 2 + 367 files changed, 4185 insertions(+), 7968 deletions(-) diff --git a/.yardopts b/.yardopts index 004c697b..5757768a 100644 --- a/.yardopts +++ b/.yardopts @@ -1,3 +1,4 @@ +--type-name-tag generic:Generic --markup markdown --markup-provider redcarpet --exclude /rbi diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 239dbfd3..e0849cca 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -339,6 +339,8 @@ def deconstruct_keys(keys) end class << self + # @api private + # # @param model [OpenAI::Internal::Type::BaseModel] # # @return [Hash{Symbol=>Object}] diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index e850de7b..8f14e58b 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -46,12 +46,7 @@ module Enum # All of the valid Symbol values for this enum. # # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) - - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values + def values = constants.map { const_get(_1) } # @param other [Object] # diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 0abbe3bc..d79c7182 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -19,16 +19,12 @@ module ResponsesOnlyModel COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index ba364d38..7c59f76f 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -64,18 +64,14 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :speed - # @!parse - # # @param input [String] - # # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] - # # @param instructions [String] - # # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] - # # @param speed [Float] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) + # @param input [String] + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] + # @param instructions [String] + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] + # @param speed [Float] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. @@ -87,9 +83,8 @@ module Model # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. variant enum: -> { OpenAI::Models::Audio::SpeechModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -123,9 +118,8 @@ module Voice variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -156,11 +150,8 @@ module ResponseFormat WAV = :wav PCM = :pcm - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index 504b7319..39245ea8 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -10,11 +10,8 @@ module SpeechModel TTS_1_HD = :"tts-1-hd" GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index ead34e3d..0e0cb142 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -22,16 +22,12 @@ class Transcription < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :logprobs - # @!parse - # # Represents a transcription response returned by model, based on the provided - # # input. - # # - # # @param text [String] - # # @param logprobs [Array] - # # - # def initialize(text:, logprobs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, logprobs: nil) + # Represents a transcription response returned by model, based on the provided + # input. + # + # @param text [String] + # @param logprobs [Array] class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token @@ -64,14 +60,10 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :logprob - # @!parse - # # @param token [String] - # # @param bytes [Array] - # # @param logprob [Float] - # # - # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token: nil, bytes: nil, logprob: nil) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index c883db5d..259a21cb 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -107,33 +107,16 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :timestamp_granularities - # @!parse - # # @param file [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::AudioModel] - # # @param include [Array] - # # @param language [String] - # # @param prompt [String] - # # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] - # # @param temperature [Float] - # # @param timestamp_granularities [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # file:, - # model:, - # include: nil, - # language: nil, - # prompt: nil, - # response_format: nil, - # temperature: nil, - # timestamp_granularities: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @param file [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param include [Array] + # @param language [String] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] + # @param temperature [Float] + # @param timestamp_granularities [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source @@ -146,9 +129,8 @@ module Model # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). variant enum: -> { OpenAI::Models::AudioModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end module TimestampGranularity @@ -157,11 +139,8 @@ module TimestampGranularity WORD = :word SEGMENT = :segment - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 69d68b0b..52850e6a 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -18,9 +18,8 @@ module TranscriptionCreateResponse # Represents a verbose json transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::TranscriptionVerbose } - # @!parse - # # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] end end end diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index 5cde821b..2351452b 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -8,11 +8,8 @@ module TranscriptionInclude LOGPROBS = :logprobs - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 852a77d1..19923d2d 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -67,35 +67,17 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @return [Array] required :tokens, OpenAI::Internal::Type::ArrayOf[Integer] - # @!parse - # # @param id [Integer] - # # @param avg_logprob [Float] - # # @param compression_ratio [Float] - # # @param end_ [Float] - # # @param no_speech_prob [Float] - # # @param seek [Integer] - # # @param start [Float] - # # @param temperature [Float] - # # @param text [String] - # # @param tokens [Array] - # # - # def initialize( - # id:, - # avg_logprob:, - # compression_ratio:, - # end_:, - # no_speech_prob:, - # seek:, - # start:, - # temperature:, - # text:, - # tokens:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) + # @param id [Integer] + # @param avg_logprob [Float] + # @param compression_ratio [Float] + # @param end_ [Float] + # @param no_speech_prob [Float] + # @param seek [Integer] + # @param start [Float] + # @param temperature [Float] + # @param text [String] + # @param tokens [Array] end end end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index de3b63d5..d4a5f12e 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -18,9 +18,8 @@ module TranscriptionStreamEvent # Emitted when the transcription is complete. Contains the complete transcription text. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. variant :"transcript.text.done", -> { OpenAI::Models::Audio::TranscriptionTextDoneEvent } - # @!parse - # # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] end end end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 731bf107..f7dff312 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -29,19 +29,15 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :logprobs - # @!parse - # # Emitted when there is an additional text delta. This is also the first event - # # emitted when the transcription starts. Only emitted when you - # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # # with the `Stream` parameter set to `true`. - # # - # # @param delta [String] - # # @param logprobs [Array] - # # @param type [Symbol, :"transcript.text.delta"] - # # - # def initialize(delta:, logprobs: nil, type: :"transcript.text.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") + # Emitted when there is an additional text delta. This is also the first event + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + # + # @param delta [String] + # @param logprobs [Array] + # @param type [Symbol, :"transcript.text.delta"] class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token @@ -74,14 +70,10 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :logprob - # @!parse - # # @param token [String] - # # @param bytes [Array] - # # @param logprob [Float] - # # - # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token: nil, bytes: nil, logprob: nil) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] end end end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index be1ee0fe..3e0fb33c 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -30,19 +30,15 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :logprobs - # @!parse - # # Emitted when the transcription is complete. Contains the complete transcription - # # text. Only emitted when you - # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # # with the `Stream` parameter set to `true`. - # # - # # @param text [String] - # # @param logprobs [Array] - # # @param type [Symbol, :"transcript.text.done"] - # # - # def initialize(text:, logprobs: nil, type: :"transcript.text.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") + # Emitted when the transcription is complete. Contains the complete transcription + # text. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + # + # @param text [String] + # @param logprobs [Array] + # @param type [Symbol, :"transcript.text.done"] class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token @@ -75,14 +71,10 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :logprob - # @!parse - # # @param token [String] - # # @param bytes [Array] - # # @param logprob [Float] - # # - # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token: nil, bytes: nil, logprob: nil) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] end end end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 80068f85..f0b3f7c3 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -42,19 +42,15 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :words - # @!parse - # # Represents a verbose json transcription response returned by model, based on the - # # provided input. - # # - # # @param duration [Float] - # # @param language [String] - # # @param text [String] - # # @param segments [Array] - # # @param words [Array] - # # - # def initialize(duration:, language:, text:, segments: nil, words: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(duration:, language:, text:, segments: nil, words: nil) + # Represents a verbose json transcription response returned by model, based on the + # provided input. + # + # @param duration [Float] + # @param language [String] + # @param text [String] + # @param segments [Array] + # @param words [Array] end end end diff --git a/lib/openai/models/audio/transcription_word.rb b/lib/openai/models/audio/transcription_word.rb index 5df30687..f7f973cd 100644 --- a/lib/openai/models/audio/transcription_word.rb +++ b/lib/openai/models/audio/transcription_word.rb @@ -22,14 +22,10 @@ class TranscriptionWord < OpenAI::Internal::Type::BaseModel # @return [String] required :word, String - # @!parse - # # @param end_ [Float] - # # @param start [Float] - # # @param word [String] - # # - # def initialize(end_:, start:, word:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_:, start:, word:) + # @param end_ [Float] + # @param start [Float] + # @param word [String] end end end diff --git a/lib/openai/models/audio/translation.rb b/lib/openai/models/audio/translation.rb index c9631757..635498f8 100644 --- a/lib/openai/models/audio/translation.rb +++ b/lib/openai/models/audio/translation.rb @@ -9,12 +9,8 @@ class Translation < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!parse - # # @param text [String] - # # - # def initialize(text:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:) + # @param text [String] end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 51cc325c..7589e685 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -61,17 +61,13 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :temperature - # @!parse - # # @param file [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::AudioModel] - # # @param prompt [String] - # # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] - # # @param temperature [Float] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) + # @param file [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] + # @param temperature [Float] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. @@ -83,9 +79,8 @@ module Model # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. variant enum: -> { OpenAI::Models::AudioModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end # The format of the output, in one of these options: `json`, `text`, `srt`, @@ -99,11 +94,8 @@ module ResponseFormat VERBOSE_JSON = :verbose_json VTT = :vtt - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index f50f6c86..49d7cc9e 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -11,9 +11,8 @@ module TranslationCreateResponse variant -> { OpenAI::Models::Audio::TranslationVerbose } - # @!parse - # # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] end end end diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index b8a970d0..5d802ffc 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -32,15 +32,11 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :segments - # @!parse - # # @param duration [Float] - # # @param language [String] - # # @param text [String] - # # @param segments [Array] - # # - # def initialize(duration:, language:, text:, segments: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(duration:, language:, text:, segments: nil) + # @param duration [Float] + # @param language [String] + # @param text [String] + # @param segments [Array] end end end diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index 4373ee6b..8e0e194e 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -9,11 +9,8 @@ module AudioModel GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 2babfc83..5644ca89 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -14,11 +14,8 @@ module AudioResponseFormat VERBOSE_JSON = :verbose_json VTT = :vtt - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/auto_file_chunking_strategy_param.rb b/lib/openai/models/auto_file_chunking_strategy_param.rb index a33c9dcc..9065ad1c 100644 --- a/lib/openai/models/auto_file_chunking_strategy_param.rb +++ b/lib/openai/models/auto_file_chunking_strategy_param.rb @@ -9,15 +9,11 @@ class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index a490b124..0eb7ef45 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -175,55 +175,27 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::BatchRequestCounts] # attr_writer :request_counts - # @!parse - # # @param id [String] - # # @param completion_window [String] - # # @param created_at [Integer] - # # @param endpoint [String] - # # @param input_file_id [String] - # # @param status [Symbol, OpenAI::Models::Batch::Status] - # # @param cancelled_at [Integer] - # # @param cancelling_at [Integer] - # # @param completed_at [Integer] - # # @param error_file_id [String] - # # @param errors [OpenAI::Models::Batch::Errors] - # # @param expired_at [Integer] - # # @param expires_at [Integer] - # # @param failed_at [Integer] - # # @param finalizing_at [Integer] - # # @param in_progress_at [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param output_file_id [String] - # # @param request_counts [OpenAI::Models::BatchRequestCounts] - # # @param object [Symbol, :batch] - # # - # def initialize( - # id:, - # completion_window:, - # created_at:, - # endpoint:, - # input_file_id:, - # status:, - # cancelled_at: nil, - # cancelling_at: nil, - # completed_at: nil, - # error_file_id: nil, - # errors: nil, - # expired_at: nil, - # expires_at: nil, - # failed_at: nil, - # finalizing_at: nil, - # in_progress_at: nil, - # metadata: nil, - # output_file_id: nil, - # request_counts: nil, - # object: :batch, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) + # @param id [String] + # @param completion_window [String] + # @param created_at [Integer] + # @param endpoint [String] + # @param input_file_id [String] + # @param status [Symbol, OpenAI::Models::Batch::Status] + # @param cancelled_at [Integer] + # @param cancelling_at [Integer] + # @param completed_at [Integer] + # @param error_file_id [String] + # @param errors [OpenAI::Models::Batch::Errors] + # @param expired_at [Integer] + # @param expires_at [Integer] + # @param failed_at [Integer] + # @param finalizing_at [Integer] + # @param in_progress_at [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param output_file_id [String] + # @param request_counts [OpenAI::Models::BatchRequestCounts] + # @param object [Symbol, :batch] # The current status of the batch. # @@ -240,11 +212,8 @@ module Status CANCELLING = :cancelling CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Batch#errors @@ -268,13 +237,9 @@ class Errors < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :object - # @!parse - # # @param data [Array] - # # @param object [String] - # # - # def initialize(data: nil, object: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data: nil, object: nil) + # @param data [Array] + # @param object [String] end end end diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 4e1871e0..60cd89b8 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -8,12 +8,8 @@ class BatchCancelParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 95548d81..ef913352 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -49,16 +49,12 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] - # # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] - # # @param input_file_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] + # @param input_file_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The time frame within which the batch should be processed. Currently only `24h` # is supported. @@ -67,11 +63,8 @@ module CompletionWindow COMPLETION_WINDOW_24H = :"24h" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The endpoint to be used for all requests in the batch. Currently @@ -86,11 +79,8 @@ module Endpoint V1_EMBEDDINGS = :"/v1/embeddings" V1_COMPLETIONS = :"/v1/completions" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/batch_error.rb b/lib/openai/models/batch_error.rb index 3d31ed4a..513fcaf3 100644 --- a/lib/openai/models/batch_error.rb +++ b/lib/openai/models/batch_error.rb @@ -35,15 +35,11 @@ class BatchError < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :param, String, nil?: true - # @!parse - # # @param code [String] - # # @param line [Integer, nil] - # # @param message [String] - # # @param param [String, nil] - # # - # def initialize(code: nil, line: nil, message: nil, param: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code: nil, line: nil, message: nil, param: nil) + # @param code [String] + # @param line [Integer, nil] + # @param message [String] + # @param param [String, nil] end end end diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 675cc802..ab35f805 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -32,14 +32,10 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :limit - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/batch_request_counts.rb b/lib/openai/models/batch_request_counts.rb index 92ab5316..dce46757 100644 --- a/lib/openai/models/batch_request_counts.rb +++ b/lib/openai/models/batch_request_counts.rb @@ -21,16 +21,12 @@ class BatchRequestCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # The request counts for different statuses within the batch. - # # - # # @param completed [Integer] - # # @param failed [Integer] - # # @param total [Integer] - # # - # def initialize(completed:, failed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completed:, failed:, total:) + # The request counts for different statuses within the batch. + # + # @param completed [Integer] + # @param failed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index fac34345..6c9e459c 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -8,12 +8,8 @@ class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index bd04e326..6f861ccc 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -123,43 +123,22 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # Represents an `assistant` that can call the model and use tools. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param description [String, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String, nil] - # # @param tools [Array] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] - # # @param top_p [Float, nil] - # # @param object [Symbol, :assistant] - # # - # def initialize( - # id:, - # created_at:, - # description:, - # instructions:, - # metadata:, - # model:, - # name:, - # tools:, - # response_format: nil, - # temperature: nil, - # tool_resources: nil, - # top_p: nil, - # object: :assistant, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, description:, instructions:, metadata:, model:, name:, tools:, response_format: nil, temperature: nil, tool_resources: nil, top_p: nil, object: :assistant) + # Represents an `assistant` that can call the model and use tools. + # + # @param id [String] + # @param created_at [Integer] + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String, nil] + # @param tools [Array] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] + # @param top_p [Float, nil] + # @param object [Symbol, :assistant] # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel @@ -181,18 +160,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -208,12 +183,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search @@ -231,12 +202,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 3e3fd1d0..5c2c0fbe 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -124,39 +124,19 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # @param model [String, Symbol, OpenAI::Models::ChatModel] - # # @param description [String, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String, nil] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # model:, - # description: nil, - # instructions: nil, - # metadata: nil, - # name: nil, - # reasoning_effort: nil, - # response_format: nil, - # temperature: nil, - # tool_resources: nil, - # tools: nil, - # top_p: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] + # @param tools [Array] + # @param top_p [Float, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -171,9 +151,8 @@ module Model # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class ToolResources < OpenAI::Internal::Type::BaseModel @@ -196,18 +175,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -223,12 +198,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search @@ -260,13 +231,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_stores - # @!parse - # # @param vector_store_ids [Array] - # # @param vector_stores [Array] - # # - # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil, vector_stores: nil) + # @param vector_store_ids [Array] + # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy @@ -304,14 +271,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) + # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -336,15 +299,11 @@ class Auto < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end class Static < OpenAI::Internal::Type::BaseModel @@ -360,13 +319,9 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param type [Symbol, :static] # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -385,19 +340,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index aed0abc7..df76595d 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -9,12 +9,8 @@ class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/assistant_deleted.rb b/lib/openai/models/beta/assistant_deleted.rb index 61cba341..9183aceb 100644 --- a/lib/openai/models/beta/assistant_deleted.rb +++ b/lib/openai/models/beta/assistant_deleted.rb @@ -20,14 +20,10 @@ class AssistantDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"assistant.deleted"] required :object, const: :"assistant.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"assistant.deleted"] - # # - # def initialize(id:, deleted:, object: :"assistant.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"assistant.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"assistant.deleted"] end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index d46562ae..c35334fc 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -57,16 +57,12 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -76,11 +72,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index f1e5fc86..8c471773 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -42,9 +42,8 @@ module AssistantResponseFormatOption # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). variant -> { OpenAI::Models::ResponseFormatJSONSchema } - # @!parse - # # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] end end end diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 1a86f690..6bb8b075 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -9,12 +9,8 @@ class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 5663b14f..e8f7fefa 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -132,18 +132,14 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :enabled - # @!parse - # # Occurs when a new - # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Thread] - # # @param enabled [Boolean] - # # @param event [Symbol, :"thread.created"] - # # - # def initialize(data:, enabled: nil, event: :"thread.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, enabled: nil, event: :"thread.created") + # Occurs when a new + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Thread] + # @param enabled [Boolean] + # @param event [Symbol, :"thread.created"] end class ThreadRunCreated < OpenAI::Internal::Type::BaseModel @@ -159,16 +155,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.created"] required :event, const: :"thread.run.created" - # @!parse - # # Occurs when a new - # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.created"] - # # - # def initialize(data:, event: :"thread.run.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.created") + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.created"] end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel @@ -184,16 +176,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.queued"] required :event, const: :"thread.run.queued" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `queued` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.queued"] - # # - # def initialize(data:, event: :"thread.run.queued", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.queued") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `queued` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.queued"] end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel @@ -209,16 +197,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.in_progress"] required :event, const: :"thread.run.in_progress" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to an `in_progress` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.in_progress"] - # # - # def initialize(data:, event: :"thread.run.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.in_progress") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to an `in_progress` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.in_progress"] end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel @@ -234,16 +218,12 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.requires_action"] required :event, const: :"thread.run.requires_action" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `requires_action` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.requires_action"] - # # - # def initialize(data:, event: :"thread.run.requires_action", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.requires_action") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `requires_action` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.requires_action"] end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel @@ -259,16 +239,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.completed"] required :event, const: :"thread.run.completed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.completed"] - # # - # def initialize(data:, event: :"thread.run.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.completed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.completed"] end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel @@ -284,16 +260,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.incomplete"] required :event, const: :"thread.run.incomplete" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # ends with status `incomplete`. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.incomplete"] - # # - # def initialize(data:, event: :"thread.run.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.incomplete") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # ends with status `incomplete`. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.incomplete"] end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel @@ -309,16 +281,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.failed"] required :event, const: :"thread.run.failed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.failed"] - # # - # def initialize(data:, event: :"thread.run.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.failed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.failed"] end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel @@ -334,16 +302,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelling"] required :event, const: :"thread.run.cancelling" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `cancelling` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelling"] - # # - # def initialize(data:, event: :"thread.run.cancelling", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelling") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `cancelling` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelling"] end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel @@ -359,16 +323,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelled"] required :event, const: :"thread.run.cancelled" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelled"] - # # - # def initialize(data:, event: :"thread.run.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelled") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelled"] end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel @@ -384,16 +344,12 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.expired"] required :event, const: :"thread.run.expired" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.expired"] - # # - # def initialize(data:, event: :"thread.run.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.expired") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.expired"] end class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel @@ -408,17 +364,13 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.created"] required :event, const: :"thread.run.step.created" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.created"] - # # - # def initialize(data:, event: :"thread.run.step.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.created") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.created"] end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel @@ -433,17 +385,13 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.in_progress"] required :event, const: :"thread.run.step.in_progress" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # moves to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.in_progress"] - # # - # def initialize(data:, event: :"thread.run.step.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.in_progress") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.in_progress"] end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel @@ -459,17 +407,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.delta"] required :event, const: :"thread.run.step.delta" - # @!parse - # # Occurs when parts of a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # are being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] - # # @param event [Symbol, :"thread.run.step.delta"] - # # - # def initialize(data:, event: :"thread.run.step.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.delta") + # Occurs when parts of a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] + # @param event [Symbol, :"thread.run.step.delta"] end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel @@ -484,17 +428,13 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.completed"] required :event, const: :"thread.run.step.completed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.completed"] - # # - # def initialize(data:, event: :"thread.run.step.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.completed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.completed"] end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel @@ -509,17 +449,13 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.failed"] required :event, const: :"thread.run.step.failed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.failed"] - # # - # def initialize(data:, event: :"thread.run.step.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.failed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.failed"] end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel @@ -534,17 +470,13 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.cancelled"] required :event, const: :"thread.run.step.cancelled" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.cancelled"] - # # - # def initialize(data:, event: :"thread.run.step.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.cancelled") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.cancelled"] end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel @@ -559,17 +491,13 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.expired"] required :event, const: :"thread.run.step.expired" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.expired"] - # # - # def initialize(data:, event: :"thread.run.step.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.expired") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.expired"] end class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel @@ -585,17 +513,13 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.created"] required :event, const: :"thread.message.created" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.created"] - # # - # def initialize(data:, event: :"thread.message.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.created") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.created"] end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel @@ -611,17 +535,13 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.in_progress"] required :event, const: :"thread.message.in_progress" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # # to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.in_progress"] - # # - # def initialize(data:, event: :"thread.message.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.in_progress") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.in_progress"] end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel @@ -637,17 +557,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.delta"] required :event, const: :"thread.message.delta" - # @!parse - # # Occurs when parts of a - # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # # being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] - # # @param event [Symbol, :"thread.message.delta"] - # # - # def initialize(data:, event: :"thread.message.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.delta") + # Occurs when parts of a + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] + # @param event [Symbol, :"thread.message.delta"] end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel @@ -663,17 +579,13 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.completed"] required :event, const: :"thread.message.completed" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.completed"] - # # - # def initialize(data:, event: :"thread.message.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.completed") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.completed"] end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel @@ -689,17 +601,13 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.incomplete"] required :event, const: :"thread.message.incomplete" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # # before it is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.incomplete"] - # # - # def initialize(data:, event: :"thread.message.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.incomplete") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.incomplete"] end class ErrorEvent < OpenAI::Internal::Type::BaseModel @@ -713,22 +621,17 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :error] required :event, const: :error - # @!parse - # # Occurs when an - # # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. - # # This can happen due to an internal server error or a timeout. - # # - # # @param data [OpenAI::Models::ErrorObject] - # # @param event [Symbol, :error] - # # - # def initialize(data:, event: :error, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :error) + # Occurs when an + # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + # This can happen due to an internal server error or a timeout. + # + # @param data [OpenAI::Models::ErrorObject] + # @param event [Symbol, :error] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 0921b4bb..0272eb94 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -14,9 +14,8 @@ module AssistantTool variant :function, -> { OpenAI::Models::Beta::FunctionTool } - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 1dff1877..21e29156 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -19,16 +19,12 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction] # attr_writer :function - # @!parse - # # Specifies a tool the model should use. Use to force the model to call a specific - # # tool. - # # - # # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] - # # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] - # # - # def initialize(type:, function: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, function: nil) + # Specifies a tool the model should use. Use to force the model to call a specific + # tool. + # + # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] + # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] # The type of the tool. If type is `function`, the function name must be set # @@ -40,11 +36,8 @@ module Type CODE_INTERPRETER = :code_interpreter FILE_SEARCH = :file_search - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_function.rb b/lib/openai/models/beta/assistant_tool_choice_function.rb index 19ca8d48..87065a84 100644 --- a/lib/openai/models/beta/assistant_tool_choice_function.rb +++ b/lib/openai/models/beta/assistant_tool_choice_function.rb @@ -10,12 +10,8 @@ class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # @param name [String] - # # - # def initialize(name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:) + # @param name [String] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 69293cbc..f9d205e3 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -30,16 +30,12 @@ module Auto AUTO = :auto REQUIRED = :required - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index e740ca15..6cb2ed05 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -128,39 +128,19 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # @param description [String, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] - # # @param name [String, nil] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # description: nil, - # instructions: nil, - # metadata: nil, - # model: nil, - # name: nil, - # reasoning_effort: nil, - # response_format: nil, - # temperature: nil, - # tool_resources: nil, - # tools: nil, - # top_p: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(description: nil, instructions: nil, metadata: nil, model: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] + # @param name [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] + # @param tools [Array] + # @param top_p [Float, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -244,9 +224,8 @@ module Model variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -310,18 +289,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -338,12 +313,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search @@ -361,12 +332,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/code_interpreter_tool.rb b/lib/openai/models/beta/code_interpreter_tool.rb index 3f3013ad..ffea0d31 100644 --- a/lib/openai/models/beta/code_interpreter_tool.rb +++ b/lib/openai/models/beta/code_interpreter_tool.rb @@ -10,12 +10,8 @@ class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter - # @!parse - # # @param type [Symbol, :code_interpreter] - # # - # def initialize(type: :code_interpreter, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :code_interpreter) + # @param type [Symbol, :code_interpreter] end end end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 76343e2f..b9baee3e 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -20,13 +20,9 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch] # attr_writer :file_search - # @!parse - # # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] - # # @param type [Symbol, :file_search] - # # - # def initialize(file_search: nil, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_search: nil, type: :file_search) + # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] + # @param type [Symbol, :file_search] # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel @@ -62,15 +58,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] # attr_writer :ranking_options - # @!parse - # # Overrides for the file search tool. - # # - # # @param max_num_results [Integer] - # # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] - # # - # def initialize(max_num_results: nil, ranking_options: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(max_num_results: nil, ranking_options: nil) + # Overrides for the file search tool. + # + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -92,20 +84,16 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] # attr_writer :ranker - # @!parse - # # The ranking options for the file search. If not specified, the file search tool - # # will use the `auto` ranker and a score_threshold of 0. - # # - # # See the - # # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # # for more information. - # # - # # @param score_threshold [Float] - # # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] - # # - # def initialize(score_threshold:, ranker: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(score_threshold:, ranker: nil) + # The ranking options for the file search. If not specified, the file search tool + # will use the `auto` ranker and a score_threshold of 0. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. + # + # @param score_threshold [Float] + # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -117,11 +105,8 @@ module Ranker AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index ed879754..bce8c29a 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -15,13 +15,9 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param function [OpenAI::Models::FunctionDefinition] - # # @param type [Symbol, :function] - # # - # def initialize(function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(function:, type: :function) + # @param function [OpenAI::Models::FunctionDefinition] + # @param type [Symbol, :function] end end end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 09590507..74bb507a 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -42,17 +42,13 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.created"] required :event, const: :"thread.message.created" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.created"] - # # - # def initialize(data:, event: :"thread.message.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.created") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.created"] end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel @@ -68,17 +64,13 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.in_progress"] required :event, const: :"thread.message.in_progress" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # # to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.in_progress"] - # # - # def initialize(data:, event: :"thread.message.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.in_progress") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.in_progress"] end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel @@ -94,17 +86,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.delta"] required :event, const: :"thread.message.delta" - # @!parse - # # Occurs when parts of a - # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # # being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] - # # @param event [Symbol, :"thread.message.delta"] - # # - # def initialize(data:, event: :"thread.message.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.delta") + # Occurs when parts of a + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] + # @param event [Symbol, :"thread.message.delta"] end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel @@ -120,17 +108,13 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.completed"] required :event, const: :"thread.message.completed" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.completed"] - # # - # def initialize(data:, event: :"thread.message.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.completed") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.completed"] end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel @@ -146,22 +130,17 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.incomplete"] required :event, const: :"thread.message.incomplete" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # # before it is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.incomplete"] - # # - # def initialize(data:, event: :"thread.message.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.incomplete") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.incomplete"] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 444add26..e312bf45 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -47,17 +47,13 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.created"] required :event, const: :"thread.run.step.created" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.created"] - # # - # def initialize(data:, event: :"thread.run.step.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.created") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.created"] end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel @@ -72,17 +68,13 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.in_progress"] required :event, const: :"thread.run.step.in_progress" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # moves to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.in_progress"] - # # - # def initialize(data:, event: :"thread.run.step.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.in_progress") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.in_progress"] end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel @@ -98,17 +90,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.delta"] required :event, const: :"thread.run.step.delta" - # @!parse - # # Occurs when parts of a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # are being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] - # # @param event [Symbol, :"thread.run.step.delta"] - # # - # def initialize(data:, event: :"thread.run.step.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.delta") + # Occurs when parts of a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] + # @param event [Symbol, :"thread.run.step.delta"] end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel @@ -123,17 +111,13 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.completed"] required :event, const: :"thread.run.step.completed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.completed"] - # # - # def initialize(data:, event: :"thread.run.step.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.completed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.completed"] end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel @@ -148,17 +132,13 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.failed"] required :event, const: :"thread.run.step.failed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.failed"] - # # - # def initialize(data:, event: :"thread.run.step.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.failed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.failed"] end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel @@ -173,17 +153,13 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.cancelled"] required :event, const: :"thread.run.step.cancelled" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.cancelled"] - # # - # def initialize(data:, event: :"thread.run.step.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.cancelled") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.cancelled"] end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel @@ -198,22 +174,17 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.expired"] required :event, const: :"thread.run.step.expired" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.expired"] - # # - # def initialize(data:, event: :"thread.run.step.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.expired") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.expired"] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 90552346..33a63272 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -54,16 +54,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.created"] required :event, const: :"thread.run.created" - # @!parse - # # Occurs when a new - # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.created"] - # # - # def initialize(data:, event: :"thread.run.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.created") + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.created"] end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel @@ -79,16 +75,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.queued"] required :event, const: :"thread.run.queued" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `queued` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.queued"] - # # - # def initialize(data:, event: :"thread.run.queued", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.queued") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `queued` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.queued"] end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel @@ -104,16 +96,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.in_progress"] required :event, const: :"thread.run.in_progress" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to an `in_progress` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.in_progress"] - # # - # def initialize(data:, event: :"thread.run.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.in_progress") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to an `in_progress` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.in_progress"] end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel @@ -129,16 +117,12 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.requires_action"] required :event, const: :"thread.run.requires_action" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `requires_action` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.requires_action"] - # # - # def initialize(data:, event: :"thread.run.requires_action", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.requires_action") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `requires_action` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.requires_action"] end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel @@ -154,16 +138,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.completed"] required :event, const: :"thread.run.completed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.completed"] - # # - # def initialize(data:, event: :"thread.run.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.completed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.completed"] end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel @@ -179,16 +159,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.incomplete"] required :event, const: :"thread.run.incomplete" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # ends with status `incomplete`. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.incomplete"] - # # - # def initialize(data:, event: :"thread.run.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.incomplete") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # ends with status `incomplete`. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.incomplete"] end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel @@ -204,16 +180,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.failed"] required :event, const: :"thread.run.failed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.failed"] - # # - # def initialize(data:, event: :"thread.run.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.failed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.failed"] end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel @@ -229,16 +201,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelling"] required :event, const: :"thread.run.cancelling" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `cancelling` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelling"] - # # - # def initialize(data:, event: :"thread.run.cancelling", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelling") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `cancelling` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelling"] end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel @@ -254,16 +222,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelled"] required :event, const: :"thread.run.cancelled" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelled"] - # # - # def initialize(data:, event: :"thread.run.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelled") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelled"] end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel @@ -279,21 +243,16 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.expired"] required :event, const: :"thread.run.expired" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.expired"] - # # - # def initialize(data:, event: :"thread.run.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.expired") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.expired"] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] end end end diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 5f08e2ff..bdb79d97 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -43,19 +43,15 @@ class Thread < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::Thread::ToolResources, nil] required :tool_resources, -> { OpenAI::Models::Beta::Thread::ToolResources }, nil?: true - # @!parse - # # Represents a thread that contains - # # [messages](https://platform.openai.com/docs/api-reference/messages). - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] - # # @param object [Symbol, :thread] - # # - # def initialize(id:, created_at:, metadata:, tool_resources:, object: :thread, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, metadata:, tool_resources:, object: :thread) + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). + # + # @param id [String] + # @param created_at [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] + # @param object [Symbol, :thread] # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel @@ -77,18 +73,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -104,12 +96,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::Thread::ToolResources#file_search @@ -127,12 +115,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 4612ebd8..c87d75f8 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -171,47 +171,23 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy }, nil?: true - # @!parse - # # @param assistant_id [String] - # # @param instructions [String, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_prompt_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # # @param parallel_tool_calls [Boolean] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] - # # @param tools [Array, nil] - # # @param top_p [Float, nil] - # # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # assistant_id:, - # instructions: nil, - # max_completion_tokens: nil, - # max_prompt_tokens: nil, - # metadata: nil, - # model: nil, - # parallel_tool_calls: nil, - # response_format: nil, - # temperature: nil, - # thread: nil, - # tool_choice: nil, - # tool_resources: nil, - # tools: nil, - # top_p: nil, - # truncation_strategy: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @param assistant_id [String] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the @@ -225,9 +201,8 @@ module Model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class Thread < OpenAI::Internal::Type::BaseModel @@ -265,17 +240,13 @@ class Thread < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources }, nil?: true - # @!parse - # # Options to create a new thread. If no thread is provided when running a request, - # # an empty thread will be created. - # # - # # @param messages [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] - # # - # def initialize(messages: nil, metadata: nil, tool_resources: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(messages: nil, metadata: nil, tool_resources: nil) + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. + # + # @param messages [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] class Message < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -314,15 +285,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] # The text contents of the message. # @@ -336,9 +303,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -358,11 +324,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -387,13 +350,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -412,17 +371,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] end end end @@ -449,18 +403,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -476,12 +426,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search @@ -513,13 +459,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_stores - # @!parse - # # @param vector_store_ids [Array] - # # @param vector_stores [Array] - # # - # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil, vector_stores: nil) + # @param vector_store_ids [Array] + # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy @@ -557,14 +499,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -589,15 +527,11 @@ class Auto < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end class Static < OpenAI::Internal::Type::BaseModel @@ -613,13 +547,9 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param type [Symbol, :static] # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -638,19 +568,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end @@ -677,18 +602,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -704,12 +625,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search @@ -727,12 +644,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end @@ -745,9 +658,8 @@ module Tool variant -> { OpenAI::Models::Beta::FunctionTool } - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -767,16 +679,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :last_messages, Integer, nil?: true - # @!parse - # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. - # # - # # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] - # # @param last_messages [Integer, nil] - # # - # def initialize(type:, last_messages: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, last_messages: nil) + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] + # @param last_messages [Integer, nil] # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -790,11 +698,8 @@ module Type AUTO = :auto LAST_MESSAGES = :last_messages - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 08818856..85033056 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -41,15 +41,11 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources }, nil?: true - # @!parse - # # @param messages [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) + # @param messages [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class Message < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -88,15 +84,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] # The text contents of the message. # @@ -110,9 +102,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -132,11 +123,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -161,13 +149,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -186,17 +170,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] end end end @@ -221,18 +200,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -248,12 +223,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search @@ -285,13 +256,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_stores - # @!parse - # # @param vector_store_ids [Array] - # # @param vector_stores [Array] - # # - # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil, vector_stores: nil) + # @param vector_store_ids [Array] + # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy @@ -329,14 +296,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -361,15 +324,11 @@ class Auto < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end class Static < OpenAI::Internal::Type::BaseModel @@ -385,13 +344,9 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param type [Symbol, :static] # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -410,19 +365,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index aa6b5341..2071367c 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -9,12 +9,8 @@ class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/thread_deleted.rb b/lib/openai/models/beta/thread_deleted.rb index 6bd4b0db..862e25fc 100644 --- a/lib/openai/models/beta/thread_deleted.rb +++ b/lib/openai/models/beta/thread_deleted.rb @@ -20,14 +20,10 @@ class ThreadDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.deleted"] required :object, const: :"thread.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"thread.deleted"] - # # - # def initialize(id:, deleted:, object: :"thread.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"thread.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"thread.deleted"] end end end diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index db2128ba..663f5488 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -9,12 +9,8 @@ class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index e67ecf5f..d7dee4f7 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -26,18 +26,14 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :enabled - # @!parse - # # Occurs when a new - # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Thread] - # # @param enabled [Boolean] - # # @param event [Symbol, :"thread.created"] - # # - # def initialize(data:, enabled: nil, event: :"thread.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, enabled: nil, event: :"thread.created") + # Occurs when a new + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Thread] + # @param enabled [Boolean] + # @param event [Symbol, :"thread.created"] end end end diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 07938465..97527fbf 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -29,14 +29,10 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources }, nil?: true - # @!parse - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(metadata: nil, tool_resources: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata: nil, tool_resources: nil, request_options: {}) + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter @@ -58,18 +54,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -85,12 +77,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search @@ -108,12 +96,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 10511e12..b5adaf6d 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -18,9 +18,8 @@ module Annotation # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathAnnotation } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 726c91ad..c6ed7bc1 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -18,9 +18,8 @@ module AnnotationDelta # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index cf4eab2d..77b9e19d 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -32,20 +32,16 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!parse - # # A citation within the message that points to a specific quote from a specific - # # File associated with the assistant or the message. Generated when the assistant - # # uses the "file_search" tool to search files. - # # - # # @param end_index [Integer] - # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_citation] - # # - # def initialize(end_index:, file_citation:, start_index:, text:, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, file_citation:, start_index:, text:, type: :file_citation) + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. + # + # @param end_index [Integer] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_citation] # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel @@ -55,12 +51,8 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index d1ac99c2..66b0623a 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -54,21 +54,17 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :text - # @!parse - # # A citation within the message that points to a specific quote from a specific - # # File associated with the assistant or the message. Generated when the assistant - # # uses the "file_search" tool to search files. - # # - # # @param index [Integer] - # # @param end_index [Integer] - # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_citation] - # # - # def initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation) + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. + # + # @param index [Integer] + # @param end_index [Integer] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_citation] # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel @@ -92,13 +88,9 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :quote - # @!parse - # # @param file_id [String] - # # @param quote [String] - # # - # def initialize(file_id: nil, quote: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, quote: nil) + # @param file_id [String] + # @param quote [String] end end end diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index c1a51ef6..91c2a6fd 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -32,19 +32,15 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!parse - # # A URL for the file that's generated when the assistant used the - # # `code_interpreter` tool to generate a file. - # # - # # @param end_index [Integer] - # # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_path] - # # - # def initialize(end_index:, file_path:, start_index:, text:, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, file_path:, start_index:, text:, type: :file_path) + # A URL for the file that's generated when the assistant used the + # `code_interpreter` tool to generate a file. + # + # @param end_index [Integer] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_path] # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel @@ -54,12 +50,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 975bd449..065e7eab 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -54,20 +54,16 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :text - # @!parse - # # A URL for the file that's generated when the assistant used the - # # `code_interpreter` tool to generate a file. - # # - # # @param index [Integer] - # # @param end_index [Integer] - # # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_path] - # # - # def initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path) + # A URL for the file that's generated when the assistant used the + # `code_interpreter` tool to generate a file. + # + # @param index [Integer] + # @param end_index [Integer] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_path] # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel @@ -81,12 +77,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :file_id - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 631803f6..61b60dc0 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -24,13 +24,9 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] # attr_writer :detail - # @!parse - # # @param file_id [String] - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] - # # - # def initialize(file_id:, detail: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, detail: nil) + # @param file_id [String] + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -43,11 +39,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 4a5a487e..2bcba265 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -16,16 +16,12 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_file] required :type, const: :image_file - # @!parse - # # References an image [File](https://platform.openai.com/docs/api-reference/files) - # # in the content of a message. - # # - # # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] - # # @param type [Symbol, :image_file] - # # - # def initialize(image_file:, type: :image_file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_file:, type: :image_file) + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. + # + # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] + # @param type [Symbol, :image_file] end end end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 219cfba9..117dd1c1 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -28,13 +28,9 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :file_id - # @!parse - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] - # # @param file_id [String] - # # - # def initialize(detail: nil, file_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(detail: nil, file_id: nil) + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] + # @param file_id [String] # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -47,11 +43,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 9a5625ad..a4abc497 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -26,17 +26,13 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::ImageFileDelta] # attr_writer :image_file - # @!parse - # # References an image [File](https://platform.openai.com/docs/api-reference/files) - # # in the content of a message. - # # - # # @param index [Integer] - # # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] - # # @param type [Symbol, :image_file] - # # - # def initialize(index:, image_file: nil, type: :image_file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, image_file: nil, type: :image_file) + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. + # + # @param index [Integer] + # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] + # @param type [Symbol, :image_file] end end end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index a76f8467..14266c31 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -23,13 +23,9 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] # attr_writer :detail - # @!parse - # # @param url [String] - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] - # # - # def initialize(url:, detail: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url:, detail: nil) + # @param url [String] + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` @@ -42,11 +38,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index 326f1c1d..b44975eb 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -16,15 +16,11 @@ class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!parse - # # References an image URL in the content of a message. - # # - # # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] - # # @param type [Symbol, :image_url] - # # - # def initialize(image_url:, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_url:, type: :image_url) + # References an image URL in the content of a message. + # + # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] + # @param type [Symbol, :image_url] end end end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 4ae3e547..3b2f4eab 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -27,13 +27,9 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :url - # @!parse - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] - # # @param url [String] - # # - # def initialize(detail: nil, url: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(detail: nil, url: nil) + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] + # @param url [String] # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. @@ -46,11 +42,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index 950a87c1..4f7b9a82 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -26,16 +26,12 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::ImageURLDelta] # attr_writer :image_url - # @!parse - # # References an image URL in the content of a message. - # # - # # @param index [Integer] - # # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] - # # @param type [Symbol, :image_url] - # # - # def initialize(index:, image_url: nil, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, image_url: nil, type: :image_url) + # References an image URL in the content of a message. + # + # @param index [Integer] + # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] + # @param type [Symbol, :image_url] end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 355140de..90cf7aba 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -104,46 +104,24 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # Represents a message within a - # # [thread](https://platform.openai.com/docs/api-reference/threads). - # # - # # @param id [String] - # # @param assistant_id [String, nil] - # # @param attachments [Array, nil] - # # @param completed_at [Integer, nil] - # # @param content [Array] - # # @param created_at [Integer] - # # @param incomplete_at [Integer, nil] - # # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] - # # @param run_id [String, nil] - # # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] - # # @param thread_id [String] - # # @param object [Symbol, :"thread.message"] - # # - # def initialize( - # id:, - # assistant_id:, - # attachments:, - # completed_at:, - # content:, - # created_at:, - # incomplete_at:, - # incomplete_details:, - # metadata:, - # role:, - # run_id:, - # status:, - # thread_id:, - # object: :"thread.message", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, assistant_id:, attachments:, completed_at:, content:, created_at:, incomplete_at:, incomplete_details:, metadata:, role:, run_id:, status:, thread_id:, object: :"thread.message") + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + # + # @param id [String] + # @param assistant_id [String, nil] + # @param attachments [Array, nil] + # @param completed_at [Integer, nil] + # @param content [Array] + # @param created_at [Integer] + # @param incomplete_at [Integer, nil] + # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] + # @param run_id [String, nil] + # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] + # @param thread_id [String] + # @param object [Symbol, :"thread.message"] class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id @@ -167,13 +145,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -189,17 +163,12 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] end end @@ -211,14 +180,10 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] required :reason, enum: -> { OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason } - # @!parse - # # On an incomplete message, details about why the message is incomplete. - # # - # # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] - # # - # def initialize(reason:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reason:) + # On an incomplete message, details about why the message is incomplete. + # + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] # The reason the message is incomplete. # @@ -232,11 +197,8 @@ module Reason RUN_EXPIRED = :run_expired RUN_FAILED = :run_failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -249,11 +211,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -267,11 +226,8 @@ module Status INCOMPLETE = :incomplete COMPLETED = :completed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index b6ed040c..dbc60f62 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -23,9 +23,8 @@ module MessageContent # The refusal content generated by the assistant. variant :refusal, -> { OpenAI::Models::Beta::Threads::RefusalContentBlock } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index a69eadda..744d6ed2 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -23,9 +23,8 @@ module MessageContentDelta # References an image URL in the content of a message. variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDeltaBlock } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index faab6ef1..9310f8bb 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -20,9 +20,8 @@ module MessageContentPartParam # The text content that is part of a message. variant :text, -> { OpenAI::Models::Beta::Threads::TextContentBlockParam } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index ea41994f..081a69c8 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -46,16 +46,12 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil, request_options: {}) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The text contents of the message. module Content @@ -67,9 +63,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -87,11 +82,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -116,13 +108,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -141,17 +129,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] end end end diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 759a3108..3625e9b1 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -15,13 +15,9 @@ class MessageDeleteParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/message_deleted.rb b/lib/openai/models/beta/threads/message_deleted.rb index 07513949..42e56be5 100644 --- a/lib/openai/models/beta/threads/message_deleted.rb +++ b/lib/openai/models/beta/threads/message_deleted.rb @@ -21,14 +21,10 @@ class MessageDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.deleted"] required :object, const: :"thread.message.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"thread.message.deleted"] - # # - # def initialize(id:, deleted:, object: :"thread.message.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"thread.message.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"thread.message.deleted"] end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index b63f29bf..9458a9bc 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -26,15 +26,11 @@ class MessageDelta < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] # attr_writer :role - # @!parse - # # The delta containing the fields that have changed on the Message. - # # - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] - # # - # def initialize(content: nil, role: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, role: nil) + # The delta containing the fields that have changed on the Message. + # + # @param content [Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] # The entity that produced the message. One of `user` or `assistant`. # @@ -45,11 +41,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index 0e313696..e935e3bd 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -23,17 +23,13 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.delta"] required :object, const: :"thread.message.delta" - # @!parse - # # Represents a message delta i.e. any changed fields on a message during - # # streaming. - # # - # # @param id [String] - # # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] - # # @param object [Symbol, :"thread.message.delta"] - # # - # def initialize(id:, delta:, object: :"thread.message.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, delta:, object: :"thread.message.delta") + # Represents a message delta i.e. any changed fields on a message during + # streaming. + # + # @param id [String] + # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] + # @param object [Symbol, :"thread.message.delta"] end end end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index b30bcf8a..1fffd076 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -68,17 +68,13 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :run_id - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] - # # @param run_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] + # @param run_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -88,11 +84,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index e2bc129b..10e58171 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -15,13 +15,9 @@ class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index b02d01a6..568cc684 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -26,14 +26,10 @@ class MessageUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param thread_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, metadata: nil, request_options: {}) + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/refusal_content_block.rb b/lib/openai/models/beta/threads/refusal_content_block.rb index 58ab5010..fcdce182 100644 --- a/lib/openai/models/beta/threads/refusal_content_block.rb +++ b/lib/openai/models/beta/threads/refusal_content_block.rb @@ -16,15 +16,11 @@ class RefusalContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!parse - # # The refusal content generated by the assistant. - # # - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(refusal:, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(refusal:, type: :refusal) + # The refusal content generated by the assistant. + # + # @param refusal [String] + # @param type [Symbol, :refusal] end end end diff --git a/lib/openai/models/beta/threads/refusal_delta_block.rb b/lib/openai/models/beta/threads/refusal_delta_block.rb index e762e97d..dcc1bd21 100644 --- a/lib/openai/models/beta/threads/refusal_delta_block.rb +++ b/lib/openai/models/beta/threads/refusal_delta_block.rb @@ -26,16 +26,12 @@ class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :refusal - # @!parse - # # The refusal content that is part of a message. - # # - # # @param index [Integer] - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(index:, refusal: nil, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, refusal: nil, type: :refusal) + # The refusal content that is part of a message. + # + # @param index [Integer] + # @param refusal [String] + # @param type [Symbol, :refusal] end end end diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index bd40aadc..7db514ed 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -27,16 +27,12 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # Tool call objects - # # - # # @param id [String] - # # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] - # # @param type [Symbol, :function] - # # - # def initialize(id:, function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, function:, type: :function) + # Tool call objects + # + # @param id [String] + # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -52,15 +48,11 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # The function definition. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # The function definition. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index c6436c0d..fb23679b 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -219,72 +219,37 @@ class Run < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # Represents an execution run on a - # # [thread](https://platform.openai.com/docs/api-reference/threads). - # # - # # @param id [String] - # # @param assistant_id [String] - # # @param cancelled_at [Integer, nil] - # # @param completed_at [Integer, nil] - # # @param created_at [Integer] - # # @param expires_at [Integer, nil] - # # @param failed_at [Integer, nil] - # # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] - # # @param instructions [String] - # # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_prompt_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param parallel_tool_calls [Boolean] - # # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param started_at [Integer, nil] - # # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] - # # @param thread_id [String] - # # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # # @param tools [Array] - # # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] - # # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] - # # @param temperature [Float, nil] - # # @param top_p [Float, nil] - # # @param object [Symbol, :"thread.run"] - # # - # def initialize( - # id:, - # assistant_id:, - # cancelled_at:, - # completed_at:, - # created_at:, - # expires_at:, - # failed_at:, - # incomplete_details:, - # instructions:, - # last_error:, - # max_completion_tokens:, - # max_prompt_tokens:, - # metadata:, - # model:, - # parallel_tool_calls:, - # required_action:, - # response_format:, - # started_at:, - # status:, - # thread_id:, - # tool_choice:, - # tools:, - # truncation_strategy:, - # usage:, - # temperature: nil, - # top_p: nil, - # object: :"thread.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expires_at:, failed_at:, incomplete_details:, instructions:, last_error:, max_completion_tokens:, max_prompt_tokens:, metadata:, model:, parallel_tool_calls:, required_action:, response_format:, started_at:, status:, thread_id:, tool_choice:, tools:, truncation_strategy:, usage:, temperature: nil, top_p: nil, object: :"thread.run") + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + # + # @param id [String] + # @param assistant_id [String] + # @param cancelled_at [Integer, nil] + # @param completed_at [Integer, nil] + # @param created_at [Integer] + # @param expires_at [Integer, nil] + # @param failed_at [Integer, nil] + # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] + # @param instructions [String] + # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param parallel_tool_calls [Boolean] + # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param started_at [Integer, nil] + # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] + # @param thread_id [String] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tools [Array] + # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] + # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param object [Symbol, :"thread.run"] # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel @@ -299,15 +264,11 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] # attr_writer :reason - # @!parse - # # Details on why the run is incomplete. Will be `null` if the run is not - # # incomplete. - # # - # # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] - # # - # def initialize(reason: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reason: nil) + # Details on why the run is incomplete. Will be `null` if the run is not + # incomplete. + # + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -319,11 +280,8 @@ module Reason MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -341,15 +299,11 @@ class LastError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # The last error associated with this run. Will be `null` if there are no errors. - # # - # # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # The last error associated with this run. Will be `null` if there are no errors. + # + # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] + # @param message [String] # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # @@ -361,11 +315,8 @@ module Code RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -384,16 +335,12 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :submit_tool_outputs] required :type, const: :submit_tool_outputs - # @!parse - # # Details on the action required to continue the run. Will be `null` if no action - # # is required. - # # - # # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] - # # @param type [Symbol, :submit_tool_outputs] - # # - # def initialize(submit_tool_outputs:, type: :submit_tool_outputs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(submit_tool_outputs:, type: :submit_tool_outputs) + # Details on the action required to continue the run. Will be `null` if no action + # is required. + # + # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] + # @param type [Symbol, :submit_tool_outputs] # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel @@ -404,14 +351,10 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] } - # @!parse - # # Details on the tool outputs needed for this run to continue. - # # - # # @param tool_calls [Array] - # # - # def initialize(tool_calls:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(tool_calls:) + # Details on the tool outputs needed for this run to continue. + # + # @param tool_calls [Array] end end @@ -433,16 +376,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :last_messages, Integer, nil?: true - # @!parse - # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. - # # - # # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] - # # @param last_messages [Integer, nil] - # # - # def initialize(type:, last_messages: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, last_messages: nil) + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] + # @param last_messages [Integer, nil] # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -456,11 +395,8 @@ module Type AUTO = :auto LAST_MESSAGES = :last_messages - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -484,17 +420,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Usage statistics related to the run. This value will be `null` if the run is not - # # in a terminal state (i.e. `in_progress`, `queued`, etc.). - # # - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:) + # Usage statistics related to the run. This value will be `null` if the run is not + # in a terminal state (i.e. `in_progress`, `queued`, etc.). + # + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index 0e9b76a0..6067a1a4 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -15,13 +15,9 @@ class RunCancelParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index a058e78e..8ae80de8 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -197,51 +197,25 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy }, nil?: true - # @!parse - # # @param assistant_id [String] - # # @param include [Array] - # # @param additional_instructions [String, nil] - # # @param additional_messages [Array, nil] - # # @param instructions [String, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_prompt_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # # @param parallel_tool_calls [Boolean] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # # @param tools [Array, nil] - # # @param top_p [Float, nil] - # # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # assistant_id:, - # include: nil, - # additional_instructions: nil, - # additional_messages: nil, - # instructions: nil, - # max_completion_tokens: nil, - # max_prompt_tokens: nil, - # metadata: nil, - # model: nil, - # parallel_tool_calls: nil, - # reasoning_effort: nil, - # response_format: nil, - # temperature: nil, - # tool_choice: nil, - # tools: nil, - # top_p: nil, - # truncation_strategy: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @param assistant_id [String] + # @param include [Array] + # @param additional_instructions [String, nil] + # @param additional_messages [Array, nil] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -281,15 +255,11 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] # The text contents of the message. # @@ -303,9 +273,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -325,11 +294,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -354,13 +320,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -379,17 +341,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] end end end @@ -406,9 +363,8 @@ module Model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -428,16 +384,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :last_messages, Integer, nil?: true - # @!parse - # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. - # # - # # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] - # # @param last_messages [Integer, nil] - # # - # def initialize(type:, last_messages: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, last_messages: nil) + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] + # @param last_messages [Integer, nil] # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -451,11 +403,8 @@ module Type AUTO = :auto LAST_MESSAGES = :last_messages - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 3d4e377b..dd2e424a 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -58,16 +58,12 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -77,11 +73,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index ca7aaf1d..464d303b 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -15,13 +15,9 @@ class RunRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index d17f7dc6..cde93d93 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -20,11 +20,8 @@ module RunStatus INCOMPLETE = :incomplete EXPIRED = :expired - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 2243cb1d..16181aa3 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -24,14 +24,10 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel required :tool_outputs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } - # @!parse - # # @param thread_id [String] - # # @param tool_outputs [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, tool_outputs:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, tool_outputs:, request_options: {}) + # @param thread_id [String] + # @param tool_outputs [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] output @@ -55,13 +51,9 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :tool_call_id - # @!parse - # # @param output [String] - # # @param tool_call_id [String] - # # - # def initialize(output: nil, tool_call_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(output: nil, tool_call_id: nil) + # @param output [String] + # @param tool_call_id [String] end end end diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 68b6536b..ddfe60d0 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -26,14 +26,10 @@ class RunUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param thread_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, metadata: nil, request_options: {}) + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb index 9d329390..e1c7ac7f 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb @@ -28,16 +28,12 @@ class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :logs - # @!parse - # # Text output from the Code Interpreter tool call as part of a run step. - # # - # # @param index [Integer] - # # @param logs [String] - # # @param type [Symbol, :logs] - # # - # def initialize(index:, logs: nil, type: :logs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, logs: nil, type: :logs) + # Text output from the Code Interpreter tool call as part of a run step. + # + # @param index [Integer] + # @param logs [String] + # @param type [Symbol, :logs] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index ab77db4e..d46e7d33 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -27,14 +27,10 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] # attr_writer :image - # @!parse - # # @param index [Integer] - # # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] - # # @param type [Symbol, :image] - # # - # def initialize(index:, image: nil, type: :image, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, image: nil, type: :image) + # @param index [Integer] + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] + # @param type [Symbol, :image] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel @@ -49,12 +45,8 @@ class Image < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :file_id - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 60262e17..8c7683ea 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -26,16 +26,12 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter - # @!parse - # # Details of the Code Interpreter tool call the run step was involved in. - # # - # # @param id [String] - # # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] - # # @param type [Symbol, :code_interpreter] - # # - # def initialize(id:, code_interpreter:, type: :code_interpreter, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code_interpreter:, type: :code_interpreter) + # Details of the Code Interpreter tool call the run step was involved in. + # + # @param id [String] + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] + # @param type [Symbol, :code_interpreter] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -54,15 +50,11 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel required :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } - # @!parse - # # The Code Interpreter tool call definition. - # # - # # @param input [String] - # # @param outputs [Array] - # # - # def initialize(input:, outputs:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, outputs:) + # The Code Interpreter tool call definition. + # + # @param input [String] + # @param outputs [Array] # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -90,15 +82,11 @@ class Logs < OpenAI::Internal::Type::BaseModel # @return [Symbol, :logs] required :type, const: :logs - # @!parse - # # Text output from the Code Interpreter tool call as part of a run step. - # # - # # @param logs [String] - # # @param type [Symbol, :logs] - # # - # def initialize(logs:, type: :logs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(logs:, type: :logs) + # Text output from the Code Interpreter tool call as part of a run step. + # + # @param logs [String] + # @param type [Symbol, :logs] end class Image < OpenAI::Internal::Type::BaseModel @@ -114,13 +102,9 @@ class Image < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image] required :type, const: :image - # @!parse - # # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] - # # @param type [Symbol, :image] - # # - # def initialize(image:, type: :image, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image:, type: :image) + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @param type [Symbol, :image] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::Internal::Type::BaseModel @@ -131,18 +115,13 @@ class Image < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:) + # @param file_id [String] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index a9cbef61..3b041752 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -40,17 +40,13 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] # attr_writer :code_interpreter - # @!parse - # # Details of the Code Interpreter tool call the run step was involved in. - # # - # # @param index [Integer] - # # @param id [String] - # # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] - # # @param type [Symbol, :code_interpreter] - # # - # def initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) + # Details of the Code Interpreter tool call the run step was involved in. + # + # @param index [Integer] + # @param id [String] + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] + # @param type [Symbol, :code_interpreter] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -77,15 +73,11 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :outputs - # @!parse - # # The Code Interpreter tool call definition. - # # - # # @param input [String] - # # @param outputs [Array] - # # - # def initialize(input: nil, outputs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input: nil, outputs: nil) + # The Code Interpreter tool call definition. + # + # @param input [String] + # @param outputs [Array] # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -98,9 +90,8 @@ module Output variant :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 3c6c423a..9ab8ead6 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -25,14 +25,10 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param id [String] - # # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] - # # @param type [Symbol, :file_search] - # # - # def initialize(id:, file_search:, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, file_search:, type: :file_search) + # @param id [String] + # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] + # @param type [Symbol, :file_search] # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel @@ -58,15 +54,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :results - # @!parse - # # For now, this is always going to be an empty object. - # # - # # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] - # # @param results [Array] - # # - # def initialize(ranking_options: nil, results: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranking_options: nil, results: nil) + # For now, this is always going to be an empty object. + # + # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] + # @param results [Array] # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -85,15 +77,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @return [Float] required :score_threshold, Float - # @!parse - # # The ranking options for the file search. - # # - # # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] - # # @param score_threshold [Float] - # # - # def initialize(ranker:, score_threshold:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranker:, score_threshold:) + # The ranking options for the file search. + # + # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] + # @param score_threshold [Float] # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -105,11 +93,8 @@ module Ranker AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -145,17 +130,13 @@ class Result < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :content - # @!parse - # # A result instance of the file search. - # # - # # @param file_id [String] - # # @param file_name [String] - # # @param score [Float] - # # @param content [Array] - # # - # def initialize(file_id:, file_name:, score:, content: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, file_name:, score:, content: nil) + # A result instance of the file search. + # + # @param file_id [String] + # @param file_name [String] + # @param score [Float] + # @param content [Array] class Content < OpenAI::Internal::Type::BaseModel # @!attribute [r] text @@ -179,13 +160,9 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] # attr_writer :type - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] - # # - # def initialize(text: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text: nil, type: nil) + # @param text [String] + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] # The type of the content. # @@ -195,11 +172,8 @@ module Type TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index 4fa2dc05..c4e4e5e3 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -35,15 +35,11 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :id - # @!parse - # # @param file_search [Object] - # # @param index [Integer] - # # @param id [String] - # # @param type [Symbol, :file_search] - # # - # def initialize(file_search:, index:, id: nil, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_search:, index:, id: nil, type: :file_search) + # @param file_search [Object] + # @param index [Integer] + # @param id [String] + # @param type [Symbol, :file_search] end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index cc1eb09a..eb5ccad9 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -25,14 +25,10 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param id [String] - # # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] - # # @param type [Symbol, :function] - # # - # def initialize(id:, function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, function:, type: :function) + # @param id [String] + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -56,16 +52,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String, nil] required :output, String, nil?: true - # @!parse - # # The definition of the function that was called. - # # - # # @param arguments [String] - # # @param name [String] - # # @param output [String, nil] - # # - # def initialize(arguments:, name:, output:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:, output:) + # The definition of the function that was called. + # + # @param arguments [String] + # @param name [String] + # @param output [String, nil] end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index 6a164004..fe7116d5 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -39,15 +39,11 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] # attr_writer :function - # @!parse - # # @param index [Integer] - # # @param id [String] - # # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] - # # @param type [Symbol, :function] - # # - # def initialize(index:, id: nil, function: nil, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, id: nil, function: nil, type: :function) + # @param index [Integer] + # @param id [String] + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel @@ -79,16 +75,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :output, String, nil?: true - # @!parse - # # The definition of the function that was called. - # # - # # @param arguments [String] - # # @param name [String] - # # @param output [String, nil] - # # - # def initialize(arguments: nil, name: nil, output: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments: nil, name: nil, output: nil) + # The definition of the function that was called. + # + # @param arguments [String] + # @param name [String] + # @param output [String, nil] end end end diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index 12b7b685..575eb64f 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -18,15 +18,11 @@ class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @return [Symbol, :message_creation] required :type, const: :message_creation - # @!parse - # # Details of the message creation by the run step. - # # - # # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] - # # @param type [Symbol, :message_creation] - # # - # def initialize(message_creation:, type: :message_creation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_creation:, type: :message_creation) + # Details of the message creation by the run step. + # + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @param type [Symbol, :message_creation] # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -36,12 +32,8 @@ class MessageCreation < OpenAI::Internal::Type::BaseModel # @return [String] required :message_id, String - # @!parse - # # @param message_id [String] - # # - # def initialize(message_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_id:) + # @param message_id [String] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index a4111bd4..d4fe0c67 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -116,49 +116,25 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] required :usage, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Usage }, nil?: true - # @!parse - # # Represents a step in execution of a run. - # # - # # @param id [String] - # # @param assistant_id [String] - # # @param cancelled_at [Integer, nil] - # # @param completed_at [Integer, nil] - # # @param created_at [Integer] - # # @param expired_at [Integer, nil] - # # @param failed_at [Integer, nil] - # # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param run_id [String] - # # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] - # # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] - # # @param thread_id [String] - # # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] - # # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] - # # @param object [Symbol, :"thread.run.step"] - # # - # def initialize( - # id:, - # assistant_id:, - # cancelled_at:, - # completed_at:, - # created_at:, - # expired_at:, - # failed_at:, - # last_error:, - # metadata:, - # run_id:, - # status:, - # step_details:, - # thread_id:, - # type:, - # usage:, - # object: :"thread.run.step", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expired_at:, failed_at:, last_error:, metadata:, run_id:, status:, step_details:, thread_id:, type:, usage:, object: :"thread.run.step") + # Represents a step in execution of a run. + # + # @param id [String] + # @param assistant_id [String] + # @param cancelled_at [Integer, nil] + # @param completed_at [Integer, nil] + # @param created_at [Integer] + # @param expired_at [Integer, nil] + # @param failed_at [Integer, nil] + # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param run_id [String] + # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] + # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + # @param thread_id [String] + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] + # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] + # @param object [Symbol, :"thread.run.step"] # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::Internal::Type::BaseModel @@ -174,16 +150,12 @@ class LastError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # The last error associated with this run step. Will be `null` if there are no - # # errors. - # # - # # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # The last error associated with this run step. Will be `null` if there are no + # errors. + # + # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] + # @param message [String] # One of `server_error` or `rate_limit_exceeded`. # @@ -194,11 +166,8 @@ module Code SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -215,11 +184,8 @@ module Status COMPLETED = :completed EXPIRED = :expired - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The details of the run step. @@ -236,9 +202,8 @@ module StepDetails # Details of the tool call. variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] end # The type of run step, which can be either `message_creation` or `tool_calls`. @@ -250,11 +215,8 @@ module Type MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage @@ -277,17 +239,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Usage statistics related to the run step. This value will be `null` while the - # # run step's status is `in_progress`. - # # - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:) + # Usage statistics related to the run step. This value will be `null` while the + # run step's status is `in_progress`. + # + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 15e16864..10ae040b 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -16,14 +16,10 @@ class RunStepDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] # attr_writer :step_details - # @!parse - # # The delta containing the fields that have changed on the run step. - # # - # # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] - # # - # def initialize(step_details: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(step_details: nil) + # The delta containing the fields that have changed on the run step. + # + # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] # The details of the run step. # @@ -39,9 +35,8 @@ module StepDetails # Details of the tool call. variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index a71b9858..4d83e30e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -24,17 +24,13 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.delta"] required :object, const: :"thread.run.step.delta" - # @!parse - # # Represents a run step delta i.e. any changed fields on a run step during - # # streaming. - # # - # # @param id [String] - # # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] - # # @param object [Symbol, :"thread.run.step.delta"] - # # - # def initialize(id:, delta:, object: :"thread.run.step.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, delta:, object: :"thread.run.step.delta") + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. + # + # @param id [String] + # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] + # @param object [Symbol, :"thread.run.step.delta"] end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 178763ff..c57bba4e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -22,15 +22,11 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] # attr_writer :message_creation - # @!parse - # # Details of the message creation by the run step. - # # - # # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] - # # @param type [Symbol, :message_creation] - # # - # def initialize(message_creation: nil, type: :message_creation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_creation: nil, type: :message_creation) + # Details of the message creation by the run step. + # + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] + # @param type [Symbol, :message_creation] # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -44,12 +40,8 @@ class MessageCreation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :message_id - # @!parse - # # @param message_id [String] - # # - # def initialize(message_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_id: nil) + # @param message_id [String] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index 4c7fe791..5253cc7e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -11,11 +11,8 @@ module RunStepInclude STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 1b281161..4f484a34 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -81,18 +81,14 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] # attr_writer :order - # @!parse - # # @param thread_id [String] - # # @param after [String] - # # @param before [String] - # # @param include [Array] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # @param thread_id [String] + # @param after [String] + # @param before [String] + # @param include [Array] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -102,11 +98,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index a02c8ce0..3e6934b7 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -38,15 +38,11 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :include - # @!parse - # # @param thread_id [String] - # # @param run_id [String] - # # @param include [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, run_id:, include: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, run_id:, include: nil, request_options: {}) + # @param thread_id [String] + # @param run_id [String] + # @param include [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index eaee8252..5e18fa3b 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -18,9 +18,8 @@ module ToolCall variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCall } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index a99db2d3..e5cd0aff 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -18,9 +18,8 @@ module ToolCallDelta variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 8c1394b6..ca02ab4c 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -25,15 +25,11 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # Details of the tool call. - # # - # # @param tool_calls [Array] - # # @param type [Symbol, :tool_calls] - # # - # def initialize(tool_calls: nil, type: :tool_calls, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(tool_calls: nil, type: :tool_calls) + # Details of the tool call. + # + # @param tool_calls [Array] + # @param type [Symbol, :tool_calls] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 1ac644f3..08393ac6 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -21,15 +21,11 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @return [Symbol, :tool_calls] required :type, const: :tool_calls - # @!parse - # # Details of the tool call. - # # - # # @param tool_calls [Array] - # # @param type [Symbol, :tool_calls] - # # - # def initialize(tool_calls:, type: :tool_calls, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(tool_calls:, type: :tool_calls) + # Details of the tool call. + # + # @param tool_calls [Array] + # @param type [Symbol, :tool_calls] end end end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index 35eb23bc..aa0282e1 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -17,13 +17,9 @@ class Text < OpenAI::Internal::Type::BaseModel # @return [String] required :value, String - # @!parse - # # @param annotations [Array] - # # @param value [String] - # # - # def initialize(annotations:, value:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotations:, value:) + # @param annotations [Array] + # @param value [String] end end end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index 0e90d460..7d8eb24b 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -16,15 +16,11 @@ class TextContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # The text content that is part of a message. - # # - # # @param text [OpenAI::Models::Beta::Threads::Text] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # The text content that is part of a message. + # + # @param text [OpenAI::Models::Beta::Threads::Text] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/beta/threads/text_content_block_param.rb b/lib/openai/models/beta/threads/text_content_block_param.rb index 9cbf24b3..26880392 100644 --- a/lib/openai/models/beta/threads/text_content_block_param.rb +++ b/lib/openai/models/beta/threads/text_content_block_param.rb @@ -17,15 +17,11 @@ class TextContentBlockParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # The text content that is part of a message. - # # - # # @param text [String] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # The text content that is part of a message. + # + # @param text [String] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index 978ecd2c..cbac0c09 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -25,13 +25,9 @@ class TextDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :value - # @!parse - # # @param annotations [Array] - # # @param value [String] - # # - # def initialize(annotations: nil, value: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotations: nil, value: nil) + # @param annotations [Array] + # @param value [String] end end end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 7c1b9ecc..12701fae 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -26,16 +26,12 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::TextDelta] # attr_writer :text - # @!parse - # # The text content that is part of a message. - # # - # # @param index [Integer] - # # @param text [OpenAI::Models::Beta::Threads::TextDelta] - # # @param type [Symbol, :text] - # # - # def initialize(index:, text: nil, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, text: nil, type: :text) + # The text content that is part of a message. + # + # @param index [Integer] + # @param text [OpenAI::Models::Beta::Threads::TextDelta] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 1ea5a81f..8e808b6e 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -83,34 +83,18 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::CompletionUsage] # attr_writer :usage - # @!parse - # # Represents a chat completion response returned by model, based on the provided - # # input. - # # - # # @param id [String] - # # @param choices [Array] - # # @param created [Integer] - # # @param model [String] - # # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] - # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage] - # # @param object [Symbol, :"chat.completion"] - # # - # def initialize( - # id:, - # choices:, - # created:, - # model:, - # service_tier: nil, - # system_fingerprint: nil, - # usage: nil, - # object: :"chat.completion", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") + # Represents a chat completion response returned by model, based on the provided + # input. + # + # @param id [String] + # @param choices [Array] + # @param created [Integer] + # @param model [String] + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] + # @param system_fingerprint [String] + # @param usage [OpenAI::Models::CompletionUsage] + # @param object [Symbol, :"chat.completion"] class Choice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason @@ -142,15 +126,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionMessage] required :message, -> { OpenAI::Models::Chat::ChatCompletionMessage } - # @!parse - # # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] - # # @param index [Integer] - # # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] - # # @param message [OpenAI::Models::Chat::ChatCompletionMessage] - # # - # def initialize(finish_reason:, index:, logprobs:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(finish_reason:, index:, logprobs:, message:) + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] + # @param index [Integer] + # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] + # @param message [OpenAI::Models::Chat::ChatCompletionMessage] # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -169,11 +149,8 @@ module FinishReason CONTENT_FILTER = :content_filter FUNCTION_CALL = :function_call - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs @@ -194,15 +171,11 @@ class Logprobs < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true - # @!parse - # # Log probability information for the choice. - # # - # # @param content [Array, nil] - # # @param refusal [Array, nil] - # # - # def initialize(content:, refusal:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, refusal:) + # Log probability information for the choice. + # + # @param content [Array, nil] + # @param refusal [Array, nil] end end @@ -232,11 +205,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 992b3818..bf6a5b2e 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -63,31 +63,16 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # Messages sent by the model in response to user messages. - # # - # # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] - # # @param content [String, Array, nil] - # # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] - # # @param name [String] - # # @param refusal [String, nil] - # # @param tool_calls [Array] - # # @param role [Symbol, :assistant] - # # - # def initialize( - # audio: nil, - # content: nil, - # function_call: nil, - # name: nil, - # refusal: nil, - # tool_calls: nil, - # role: :assistant, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) + # Messages sent by the model in response to user messages. + # + # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] + # @param content [String, Array, nil] + # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] + # @param name [String] + # @param refusal [String, nil] + # @param tool_calls [Array] + # @param role [Symbol, :assistant] # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::Internal::Type::BaseModel @@ -97,15 +82,11 @@ class Audio < OpenAI::Internal::Type::BaseModel # @return [String] required :id, String - # @!parse - # # Data about a previous audio response from the model. - # # [Learn more](https://platform.openai.com/docs/guides/audio). - # # - # # @param id [String] - # # - # def initialize(id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:) + # Data about a previous audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). + # + # @param id [String] end # The contents of the assistant message. Required unless `tool_calls` or @@ -133,14 +114,12 @@ module ArrayOfContentPart variant :refusal, -> { OpenAI::Models::Chat::ChatCompletionContentPartRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] end - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ArrayOfContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] @@ -165,16 +144,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index 238b8a6b..e56cae38 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -30,19 +30,15 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @return [String] required :transcript, String - # @!parse - # # If the audio output modality is requested, this object contains data about the - # # audio response from the model. - # # [Learn more](https://platform.openai.com/docs/guides/audio). - # # - # # @param id [String] - # # @param data [String] - # # @param expires_at [Integer] - # # @param transcript [String] - # # - # def initialize(id:, data:, expires_at:, transcript:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, data:, expires_at:, transcript:) + # If the audio output modality is requested, this object contains data about the + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). + # + # @param id [String] + # @param data [String] + # @param expires_at [Integer] + # @param transcript [String] end end diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 1e69e4cf..3374567d 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -18,17 +18,13 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } - # @!parse - # # Parameters for audio output. Required when audio output is requested with - # # `modalities: ["audio"]`. - # # [Learn more](https://platform.openai.com/docs/guides/audio). - # # - # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] - # # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] - # # - # def initialize(format_:, voice:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(format_:, voice:) + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). + # + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] + # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. @@ -44,11 +40,8 @@ module Format OPUS = :opus PCM16 = :pcm16 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -82,9 +75,8 @@ module Voice variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 0a201e76..61a57392 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -84,35 +84,19 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true - # @!parse - # # Represents a streamed chunk of a chat completion response returned by the model, - # # based on the provided input. - # # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). - # # - # # @param id [String] - # # @param choices [Array] - # # @param created [Integer] - # # @param model [String] - # # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] - # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage, nil] - # # @param object [Symbol, :"chat.completion.chunk"] - # # - # def initialize( - # id:, - # choices:, - # created:, - # model:, - # service_tier: nil, - # system_fingerprint: nil, - # usage: nil, - # object: :"chat.completion.chunk", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk") + # Represents a streamed chunk of a chat completion response returned by the model, + # based on the provided input. + # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). + # + # @param id [String] + # @param choices [Array] + # @param created [Integer] + # @param model [String] + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] + # @param system_fingerprint [String] + # @param usage [OpenAI::Models::CompletionUsage, nil] + # @param object [Symbol, :"chat.completion.chunk"] class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta @@ -146,15 +130,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] optional :logprobs, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true - # @!parse - # # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] - # # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] - # # @param index [Integer] - # # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] - # # - # def initialize(delta:, finish_reason:, index:, logprobs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, finish_reason:, index:, logprobs: nil) + # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] + # @param index [Integer] + # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::Internal::Type::BaseModel @@ -201,18 +181,14 @@ class Delta < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # A chat completion delta generated by streamed model responses. - # # - # # @param content [String, nil] - # # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] - # # @param refusal [String, nil] - # # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] - # # @param tool_calls [Array] - # # - # def initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) + # A chat completion delta generated by streamed model responses. + # + # @param content [String, nil] + # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] + # @param refusal [String, nil] + # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] + # @param tool_calls [Array] # @deprecated # @@ -241,16 +217,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments: nil, name: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments: nil, name: nil) + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + # + # @param arguments [String] + # @param name [String] end # The role of the author of this message. @@ -265,11 +237,8 @@ module Role ASSISTANT = :assistant TOOL = :tool - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class ToolCall < OpenAI::Internal::Type::BaseModel @@ -307,15 +276,11 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] # attr_writer :type - # @!parse - # # @param index [Integer] - # # @param id [String] - # # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] - # # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] - # # - # def initialize(index:, id: nil, function: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, id: nil, function: nil, type: nil) + # @param index [Integer] + # @param id [String] + # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] + # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -342,13 +307,9 @@ class Function < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments: nil, name: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments: nil, name: nil) + # @param arguments [String] + # @param name [String] end # The type of the tool. Currently, only `function` is supported. @@ -359,11 +320,8 @@ module Type FUNCTION = :function - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end @@ -385,11 +343,8 @@ module FinishReason CONTENT_FILTER = :content_filter FUNCTION_CALL = :function_call - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs @@ -410,15 +365,11 @@ class Logprobs < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true - # @!parse - # # Log probability information for the choice. - # # - # # @param content [Array, nil] - # # @param refusal [Array, nil] - # # - # def initialize(content:, refusal:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, refusal:) + # Log probability information for the choice. + # + # @param content [Array, nil] + # @param refusal [Array, nil] end end @@ -448,11 +399,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 31635ff4..af75a6c6 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -34,16 +34,12 @@ class File < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file] required :type, const: :file - # @!parse - # # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text - # # generation. - # # - # # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] - # # @param type [Symbol, :file] - # # - # def initialize(file:, type: :file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, type: :file) + # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text + # generation. + # + # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] + # @param type [Symbol, :file] # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel @@ -78,20 +74,15 @@ class File < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :filename - # @!parse - # # @param file_data [String] - # # @param file_id [String] - # # @param filename [String] - # # - # def initialize(file_data: nil, file_id: nil, filename: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_data: nil, file_id: nil, filename: nil) + # @param file_data [String] + # @param file_id [String] + # @param filename [String] end end - # @!parse - # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index a22c144e..023fa1d0 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -15,15 +15,11 @@ class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!parse - # # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). - # # - # # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] - # # @param type [Symbol, :image_url] - # # - # def initialize(image_url:, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_url:, type: :image_url) + # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + # + # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] + # @param type [Symbol, :image_url] # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::Internal::Type::BaseModel @@ -44,13 +40,9 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] # attr_writer :detail - # @!parse - # # @param url [String] - # # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] - # # - # def initialize(url:, detail: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url:, detail: nil) + # @param url [String] + # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). @@ -63,11 +55,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 46b3b077..9331e125 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -15,15 +15,11 @@ class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_audio] required :type, const: :input_audio - # @!parse - # # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). - # # - # # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] - # # @param type [Symbol, :input_audio] - # # - # def initialize(input_audio:, type: :input_audio, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input_audio:, type: :input_audio) + # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). + # + # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @param type [Symbol, :input_audio] # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::Internal::Type::BaseModel @@ -41,13 +37,9 @@ class InputAudio < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format }, api_name: :format - # @!parse - # # @param data [String] - # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] - # # - # def initialize(data:, format_:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, format_:) + # @param data [String] + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] # The format of the encoded audio data. Currently supports "wav" and "mp3". # @@ -58,11 +50,8 @@ module Format WAV = :wav MP3 = :mp3 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_refusal.rb b/lib/openai/models/chat/chat_completion_content_part_refusal.rb index 20e3bc5d..9137d008 100644 --- a/lib/openai/models/chat/chat_completion_content_part_refusal.rb +++ b/lib/openai/models/chat/chat_completion_content_part_refusal.rb @@ -16,13 +16,9 @@ class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!parse - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(refusal:, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(refusal:, type: :refusal) + # @param refusal [String] + # @param type [Symbol, :refusal] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_text.rb b/lib/openai/models/chat/chat_completion_content_part_text.rb index e1975859..212467d9 100644 --- a/lib/openai/models/chat/chat_completion_content_part_text.rb +++ b/lib/openai/models/chat/chat_completion_content_part_text.rb @@ -16,16 +16,12 @@ class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # Learn about - # # [text inputs](https://platform.openai.com/docs/guides/text-generation). - # # - # # @param text [String] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # + # @param text [String] + # @param type [Symbol, :text] end end diff --git a/lib/openai/models/chat/chat_completion_deleted.rb b/lib/openai/models/chat/chat_completion_deleted.rb index 33c9c802..a3125c86 100644 --- a/lib/openai/models/chat/chat_completion_deleted.rb +++ b/lib/openai/models/chat/chat_completion_deleted.rb @@ -23,14 +23,10 @@ class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"chat.completion.deleted"] required :object, const: :"chat.completion.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"chat.completion.deleted"] - # # - # def initialize(id:, deleted:, object: :"chat.completion.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"chat.completion.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"chat.completion.deleted"] end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index ecd1e5bd..1eb265ea 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -27,18 +27,14 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Developer-provided instructions that the model should follow, regardless of - # # messages sent by the user. With o1 models and newer, `developer` messages - # # replace the previous `system` messages. - # # - # # @param content [String, Array] - # # @param name [String] - # # @param role [Symbol, :developer] - # # - # def initialize(content:, name: nil, role: :developer, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name: nil, role: :developer) + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. + # + # @param content [String, Array] + # @param name [String] + # @param role [Symbol, :developer] # The contents of the developer message. # @@ -52,9 +48,8 @@ module Content # An array of content parts with a defined type. For developer messages, only type `text` is supported. variant -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_function_call_option.rb b/lib/openai/models/chat/chat_completion_function_call_option.rb index 3ae8526b..9c7d28fd 100644 --- a/lib/openai/models/chat/chat_completion_function_call_option.rb +++ b/lib/openai/models/chat/chat_completion_function_call_option.rb @@ -10,15 +10,11 @@ class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # Specifying a particular function via `{"name": "my_function"}` forces the model - # # to call that function. - # # - # # @param name [String] - # # - # def initialize(name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:) + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # @param name [String] end end diff --git a/lib/openai/models/chat/chat_completion_function_message_param.rb b/lib/openai/models/chat/chat_completion_function_message_param.rb index 7e00e030..4a8efb76 100644 --- a/lib/openai/models/chat/chat_completion_function_message_param.rb +++ b/lib/openai/models/chat/chat_completion_function_message_param.rb @@ -23,14 +23,10 @@ class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :role, const: :function - # @!parse - # # @param content [String, nil] - # # @param name [String] - # # @param role [Symbol, :function] - # # - # def initialize(content:, name:, role: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name:, role: :function) + # @param content [String, nil] + # @param name [String] + # @param role [Symbol, :function] end end diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index b36682ef..63485db1 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -64,31 +64,16 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # A chat completion message generated by the model. - # # - # # @param content [String, nil] - # # @param refusal [String, nil] - # # @param annotations [Array] - # # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] - # # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] - # # @param tool_calls [Array] - # # @param role [Symbol, :assistant] - # # - # def initialize( - # content:, - # refusal:, - # annotations: nil, - # audio: nil, - # function_call: nil, - # tool_calls: nil, - # role: :assistant, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) + # A chat completion message generated by the model. + # + # @param content [String, nil] + # @param refusal [String, nil] + # @param annotations [Array] + # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] + # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] + # @param tool_calls [Array] + # @param role [Symbol, :assistant] class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -103,15 +88,11 @@ class Annotation < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] required :url_citation, -> { OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation } - # @!parse - # # A URL citation when using web search. - # # - # # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] - # # @param type [Symbol, :url_citation] - # # - # def initialize(url_citation:, type: :url_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url_citation:, type: :url_citation) + # A URL citation when using web search. + # + # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] + # @param type [Symbol, :url_citation] # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::Internal::Type::BaseModel @@ -139,17 +120,13 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # A URL citation when using web search. - # # - # # @param end_index [Integer] - # # @param start_index [Integer] - # # @param title [String] - # # @param url [String] - # # - # def initialize(end_index:, start_index:, title:, url:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, start_index:, title:, url:) + # A URL citation when using web search. + # + # @param end_index [Integer] + # @param start_index [Integer] + # @param title [String] + # @param url [String] end end @@ -172,16 +149,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index ed72d515..16e5d625 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -32,9 +32,8 @@ module ChatCompletionMessageParam variant :function, -> { OpenAI::Models::Chat::ChatCompletionFunctionMessageParam } - # @!parse - # # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] end end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index cca6cc4e..adad4c9a 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -22,14 +22,10 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param id [String] - # # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] - # # @param type [Symbol, :function] - # # - # def initialize(id:, function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, function:, type: :function) + # @param id [String] + # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -48,15 +44,11 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # The function that the model called. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # The function that the model called. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index 05e0e087..10d3ba8c 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -9,11 +9,8 @@ module ChatCompletionModality TEXT = :text AUDIO = :audio - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index c887906f..d6d7a955 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -15,16 +15,12 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # Specifies a tool the model should use. Use to force the model to call a specific - # # function. - # # - # # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] - # # @param type [Symbol, :function] - # # - # def initialize(function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(function:, type: :function) + # Specifies a tool the model should use. Use to force the model to call a specific + # function. + # + # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel @@ -34,12 +30,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # @param name [String] - # # - # def initialize(name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:) + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index 0cc7df62..b6c130fb 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -19,16 +19,12 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :content] required :type, const: :content - # @!parse - # # Static predicted output content, such as the content of a text file that is - # # being regenerated. - # # - # # @param content [String, Array] - # # @param type [Symbol, :content] - # # - # def initialize(content:, type: :content, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type: :content) + # Static predicted output content, such as the content of a text file that is + # being regenerated. + # + # @param content [String, Array] + # @param type [Symbol, :content] # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be @@ -45,9 +41,8 @@ module Content # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. variant -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 87bd08a8..514fe22c 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -14,11 +14,8 @@ module ChatCompletionRole TOOL = :tool FUNCTION = :function - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end diff --git a/lib/openai/models/chat/chat_completion_store_message.rb b/lib/openai/models/chat/chat_completion_store_message.rb index 3ae2370c..119b5a36 100644 --- a/lib/openai/models/chat/chat_completion_store_message.rb +++ b/lib/openai/models/chat/chat_completion_store_message.rb @@ -10,14 +10,10 @@ class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # @return [String] required :id, String - # @!parse - # # A chat completion message generated by the model. - # # - # # @param id [String] - # # - # def initialize(id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:) + # A chat completion message generated by the model. + # + # @param id [String] end end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index cb29b9a6..04006615 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -20,14 +20,10 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :include_usage - # @!parse - # # Options for streaming response. Only set this when you set `stream: true`. - # # - # # @param include_usage [Boolean] - # # - # def initialize(include_usage: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(include_usage: nil) + # Options for streaming response. Only set this when you set `stream: true`. + # + # @param include_usage [Boolean] end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 44d1d207..bb2c91b6 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -27,18 +27,14 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Developer-provided instructions that the model should follow, regardless of - # # messages sent by the user. With o1 models and newer, use `developer` messages - # # for this purpose instead. - # # - # # @param content [String, Array] - # # @param name [String] - # # @param role [Symbol, :system] - # # - # def initialize(content:, name: nil, role: :system, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name: nil, role: :system) + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, use `developer` messages + # for this purpose instead. + # + # @param content [String, Array] + # @param name [String] + # @param role [Symbol, :system] # The contents of the system message. # @@ -52,9 +48,8 @@ module Content # An array of content parts with a defined type. For system messages, only type `text` is supported. variant -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index a9f0bc0d..1ab8dd0f 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -36,15 +36,11 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel required :top_logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } - # @!parse - # # @param token [String] - # # @param bytes [Array, nil] - # # @param logprob [Float] - # # @param top_logprobs [Array] - # # - # def initialize(token:, bytes:, logprob:, top_logprobs:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token:, bytes:, logprob:, top_logprobs:) + # @param token [String] + # @param bytes [Array, nil] + # @param logprob [Float] + # @param top_logprobs [Array] class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -70,14 +66,10 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel # @return [Float] required :logprob, Float - # @!parse - # # @param token [String] - # # @param bytes [Array, nil] - # # @param logprob [Float] - # # - # def initialize(token:, bytes:, logprob:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token:, bytes:, logprob:) + # @param token [String] + # @param bytes [Array, nil] + # @param logprob [Float] end end end diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index 8737a603..1cef5fd5 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -15,13 +15,9 @@ class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param function [OpenAI::Models::FunctionDefinition] - # # @param type [Symbol, :function] - # # - # def initialize(function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(function:, type: :function) + # @param function [OpenAI::Models::FunctionDefinition] + # @param type [Symbol, :function] end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index c57aaf23..5fdd5796 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -31,16 +31,12 @@ module Auto AUTO = :auto REQUIRED = :required - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 20c3a014..a0718af1 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -22,14 +22,10 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # @return [String] required :tool_call_id, String - # @!parse - # # @param content [String, Array] - # # @param tool_call_id [String] - # # @param role [Symbol, :tool] - # # - # def initialize(content:, tool_call_id:, role: :tool, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, tool_call_id:, role: :tool) + # @param content [String, Array] + # @param tool_call_id [String] + # @param role [Symbol, :tool] # The contents of the tool message. # @@ -43,9 +39,8 @@ module Content # An array of content parts with a defined type. For tool messages, only type `text` is supported. variant -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 6cf8585a..34b81339 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -27,17 +27,13 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Messages sent by an end user, containing prompts or additional context - # # information. - # # - # # @param content [String, Array] - # # @param name [String] - # # @param role [Symbol, :user] - # # - # def initialize(content:, name: nil, role: :user, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name: nil, role: :user) + # Messages sent by an end user, containing prompts or additional context + # information. + # + # @param content [String, Array] + # @param name [String] + # @param role [Symbol, :user] # The contents of the user message. # @@ -51,9 +47,8 @@ module Content # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. variant -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index bee73ddb..fff48ea0 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -355,77 +355,38 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] # attr_writer :web_search_options - # @!parse - # # @param messages [Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel] - # # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] - # # @param frequency_penalty [Float, nil] - # # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # # @param functions [Array] - # # @param logit_bias [Hash{Symbol=>Integer}, nil] - # # @param logprobs [Boolean, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param modalities [Array, nil] - # # @param n [Integer, nil] - # # @param parallel_tool_calls [Boolean] - # # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] - # # @param presence_penalty [Float, nil] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # # @param seed [Integer, nil] - # # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] - # # @param stop [String, Array, nil] - # # @param store [Boolean, nil] - # # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # # @param temperature [Float, nil] - # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # # @param tools [Array] - # # @param top_logprobs [Integer, nil] - # # @param top_p [Float, nil] - # # @param user [String] - # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # messages:, - # model:, - # audio: nil, - # frequency_penalty: nil, - # function_call: nil, - # functions: nil, - # logit_bias: nil, - # logprobs: nil, - # max_completion_tokens: nil, - # max_tokens: nil, - # metadata: nil, - # modalities: nil, - # n: nil, - # parallel_tool_calls: nil, - # prediction: nil, - # presence_penalty: nil, - # reasoning_effort: nil, - # response_format: nil, - # seed: nil, - # service_tier: nil, - # stop: nil, - # store: nil, - # stream_options: nil, - # temperature: nil, - # tool_choice: nil, - # tools: nil, - # top_logprobs: nil, - # top_p: nil, - # user: nil, - # web_search_options: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @param messages [Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] + # @param frequency_penalty [Float, nil] + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + # @param functions [Array] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Boolean, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param modalities [Array, nil] + # @param n [Integer, nil] + # @param parallel_tool_calls [Boolean] + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] + # @param presence_penalty [Float, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + # @param seed [Integer, nil] + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] + # @param stop [String, Array, nil] + # @param store [Boolean, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + # @param tools [Array] + # @param top_logprobs [Integer, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -443,9 +404,8 @@ module Model # to browse and compare available models. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end # @deprecated @@ -482,16 +442,12 @@ module FunctionCallMode NONE = :none AUTO = :auto - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] end # @deprecated @@ -530,14 +486,10 @@ class Function < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>Object}] # attr_writer :parameters - # @!parse - # # @param name [String] - # # @param description [String] - # # @param parameters [Hash{Symbol=>Object}] - # # - # def initialize(name:, description: nil, parameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, description: nil, parameters: nil) + # @param name [String] + # @param description [String] + # @param parameters [Hash{Symbol=>Object}] end module Modality @@ -546,11 +498,8 @@ module Modality TEXT = :text AUDIO = :audio - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # An object specifying the format that the model must output. @@ -579,9 +528,8 @@ module ResponseFormat # to do so. variant -> { OpenAI::Models::ResponseFormatJSONObject } - # @!parse - # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -608,11 +556,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Not supported with latest reasoning models `o3` and `o4-mini`. @@ -626,9 +571,8 @@ module Stop variant -> { OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -654,17 +598,13 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation }, nil?: true - # @!parse - # # This tool searches the web for relevant results to use in a response. Learn more - # # about the - # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). - # # - # # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] - # # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] - # # - # def initialize(search_context_size: nil, user_location: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(search_context_size: nil, user_location: nil) + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # + # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] + # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -677,11 +617,8 @@ module SearchContextSize MEDIUM = :medium HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location @@ -699,15 +636,11 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :approximate] required :type, const: :approximate - # @!parse - # # Approximate location parameters for the search. - # # - # # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] - # # @param type [Symbol, :approximate] - # # - # def initialize(approximate:, type: :approximate, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(approximate:, type: :approximate) + # Approximate location parameters for the search. + # + # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] + # @param type [Symbol, :approximate] # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel @@ -753,17 +686,13 @@ class Approximate < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :timezone - # @!parse - # # Approximate location parameters for the search. - # # - # # @param city [String] - # # @param country [String] - # # @param region [String] - # # @param timezone [String] - # # - # def initialize(city: nil, country: nil, region: nil, timezone: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) + # Approximate location parameters for the search. + # + # @param city [String] + # @param country [String] + # @param region [String] + # @param timezone [String] end end end diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index a7441968..c32fe53d 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -9,12 +9,8 @@ class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index dede958c..911b8a36 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -58,17 +58,13 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. @@ -78,11 +74,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index 29df8685..ccdba91c 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -9,12 +9,8 @@ class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 0a8504f8..954a4400 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -20,13 +20,9 @@ class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(metadata:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata:, request_options: {}) + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index eca322af..02329b59 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -41,15 +41,11 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. @@ -59,11 +55,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 1bcb4858..e2cf7b8d 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -59,11 +59,8 @@ module ChatModel GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 13134827..81515805 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -29,17 +29,13 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # @return [String, Float, Boolean] required :value, union: -> { OpenAI::Models::ComparisonFilter::Value } - # @!parse - # # A filter used to compare a specified attribute key to a given value using a - # # defined comparison operation. - # # - # # @param key [String] - # # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] - # # @param value [String, Float, Boolean] - # # - # def initialize(key:, type:, value:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(key:, type:, value:) + # A filter used to compare a specified attribute key to a given value using a + # defined comparison operation. + # + # @param key [String] + # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] + # @param value [String, Float, Boolean] # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -61,11 +57,8 @@ module Type LT = :lt LTE = :lte - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The value to compare against the attribute key; supports string, number, or @@ -81,9 +74,8 @@ module Value variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 6062d60a..e31c3bbf 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -59,21 +59,17 @@ class Completion < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::CompletionUsage] # attr_writer :usage - # @!parse - # # Represents a completion response from the API. Note: both the streamed and - # # non-streamed response objects share the same shape (unlike the chat endpoint). - # # - # # @param id [String] - # # @param choices [Array] - # # @param created [Integer] - # # @param model [String] - # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage] - # # @param object [Symbol, :text_completion] - # # - # def initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) + # Represents a completion response from the API. Note: both the streamed and + # non-streamed response objects share the same shape (unlike the chat endpoint). + # + # @param id [String] + # @param choices [Array] + # @param created [Integer] + # @param model [String] + # @param system_fingerprint [String] + # @param usage [OpenAI::Models::CompletionUsage] + # @param object [Symbol, :text_completion] end end end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 07081468..096074a0 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -27,15 +27,11 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!parse - # # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] - # # @param index [Integer] - # # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] - # # @param text [String] - # # - # def initialize(finish_reason:, index:, logprobs:, text:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(finish_reason:, index:, logprobs:, text:) + # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] + # @param index [Integer] + # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] + # @param text [String] # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -50,11 +46,8 @@ module FinishReason LENGTH = :length CONTENT_FILTER = :content_filter - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::CompletionChoice#logprobs @@ -95,15 +88,11 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # # @return [ArrayFloat}>] # attr_writer :top_logprobs - # @!parse - # # @param text_offset [Array] - # # @param token_logprobs [Array] - # # @param tokens [Array] - # # @param top_logprobs [ArrayFloat}>] - # # - # def initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil) + # @param text_offset [Array] + # @param token_logprobs [Array] + # @param tokens [Array] + # @param top_logprobs [ArrayFloat}>] end end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 6463fc0e..c1e3656e 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -187,51 +187,25 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] - # # @param prompt [String, Array, Array, Array>, nil] - # # @param best_of [Integer, nil] - # # @param echo [Boolean, nil] - # # @param frequency_penalty [Float, nil] - # # @param logit_bias [Hash{Symbol=>Integer}, nil] - # # @param logprobs [Integer, nil] - # # @param max_tokens [Integer, nil] - # # @param n [Integer, nil] - # # @param presence_penalty [Float, nil] - # # @param seed [Integer, nil] - # # @param stop [String, Array, nil] - # # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # # @param suffix [String, nil] - # # @param temperature [Float, nil] - # # @param top_p [Float, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # model:, - # prompt:, - # best_of: nil, - # echo: nil, - # frequency_penalty: nil, - # logit_bias: nil, - # logprobs: nil, - # max_tokens: nil, - # n: nil, - # presence_penalty: nil, - # seed: nil, - # stop: nil, - # stream_options: nil, - # suffix: nil, - # temperature: nil, - # top_p: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] + # @param prompt [String, Array, Array, Array>, nil] + # @param best_of [Integer, nil] + # @param echo [Boolean, nil] + # @param frequency_penalty [Float, nil] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Integer, nil] + # @param max_tokens [Integer, nil] + # @param n [Integer, nil] + # @param presence_penalty [Float, nil] + # @param seed [Integer, nil] + # @param stop [String, Array, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param suffix [String, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -249,9 +223,8 @@ module Model variant const: -> { OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -279,9 +252,8 @@ module Prompt variant -> { OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray } - # @!parse - # # @return [Array(String, Array, Array, Array>)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array, Array, Array>)] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -301,9 +273,8 @@ module Stop variant -> { OpenAI::Models::CompletionCreateParams::Stop::StringArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index 0f098720..d8e75136 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -41,27 +41,14 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails] # attr_writer :prompt_tokens_details - # @!parse - # # Usage statistics for the completion request. - # # - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] - # # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] - # # - # def initialize( - # completion_tokens:, - # prompt_tokens:, - # total_tokens:, - # completion_tokens_details: nil, - # prompt_tokens_details: nil, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) + # Usage statistics for the completion request. + # + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] + # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] + # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel @@ -109,25 +96,13 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :rejected_prediction_tokens - # @!parse - # # Breakdown of tokens used in a completion. - # # - # # @param accepted_prediction_tokens [Integer] - # # @param audio_tokens [Integer] - # # @param reasoning_tokens [Integer] - # # @param rejected_prediction_tokens [Integer] - # # - # def initialize( - # accepted_prediction_tokens: nil, - # audio_tokens: nil, - # reasoning_tokens: nil, - # rejected_prediction_tokens: nil, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) + # Breakdown of tokens used in a completion. + # + # @param accepted_prediction_tokens [Integer] + # @param audio_tokens [Integer] + # @param reasoning_tokens [Integer] + # @param rejected_prediction_tokens [Integer] end # @see OpenAI::Models::CompletionUsage#prompt_tokens_details @@ -152,15 +127,11 @@ class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :cached_tokens - # @!parse - # # Breakdown of tokens used in the prompt. - # # - # # @param audio_tokens [Integer] - # # @param cached_tokens [Integer] - # # - # def initialize(audio_tokens: nil, cached_tokens: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(audio_tokens: nil, cached_tokens: nil) + # Breakdown of tokens used in the prompt. + # + # @param audio_tokens [Integer] + # @param cached_tokens [Integer] end end end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 57314aa0..228c5572 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -16,15 +16,11 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::CompoundFilter::Type] required :type, enum: -> { OpenAI::Models::CompoundFilter::Type } - # @!parse - # # Combine multiple filters using `and` or `or`. - # # - # # @param filters [Array] - # # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] - # # - # def initialize(filters:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(filters:, type:) + # Combine multiple filters using `and` or `or`. + # + # @param filters [Array] + # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. @@ -36,9 +32,8 @@ module Filter variant OpenAI::Internal::Type::Unknown - # @!parse - # # @return [Array(OpenAI::Models::ComparisonFilter, Object)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ComparisonFilter, Object)] end # Type of operation: `and` or `or`. @@ -50,11 +45,8 @@ module Type AND = :and OR = :or - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index bde0fa74..35e2aa45 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -28,15 +28,11 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::CreateEmbeddingResponse::Usage] required :usage, -> { OpenAI::Models::CreateEmbeddingResponse::Usage } - # @!parse - # # @param data [Array] - # # @param model [String] - # # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] - # # @param object [Symbol, :list] - # # - # def initialize(data:, model:, usage:, object: :list, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, model:, usage:, object: :list) + # @param data [Array] + # @param model [String] + # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] + # @param object [Symbol, :list] # @see OpenAI::Models::CreateEmbeddingResponse#usage class Usage < OpenAI::Internal::Type::BaseModel @@ -52,15 +48,11 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # The usage information for the request. - # # - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(prompt_tokens:, total_tokens:) + # The usage information for the request. + # + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index 75d1aebf..e1e28ded 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -23,16 +23,12 @@ class Embedding < OpenAI::Internal::Type::BaseModel # @return [Symbol, :embedding] required :object, const: :embedding - # @!parse - # # Represents an embedding vector returned by embedding endpoint. - # # - # # @param embedding [Array] - # # @param index [Integer] - # # @param object [Symbol, :embedding] - # # - # def initialize(embedding:, index:, object: :embedding, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(embedding:, index:, object: :embedding) + # Represents an embedding vector returned by embedding endpoint. + # + # @param embedding [Array] + # @param index [Integer] + # @param object [Symbol, :embedding] end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 3f26541e..2586d07f 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -65,17 +65,13 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param input [String, Array, Array, Array>] - # # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] - # # @param dimensions [Integer] - # # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) + # @param input [String, Array, Array, Array>] + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] + # @param dimensions [Integer] + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. @@ -100,9 +96,8 @@ module Input # The array of arrays containing integers that will be turned into an embedding. variant -> { OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray } - # @!parse - # # @return [Array(String, Array, Array, Array>)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array, Array, Array>)] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -124,9 +119,8 @@ module Model # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. variant enum: -> { OpenAI::Models::EmbeddingModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] end # The format to return the embeddings in. Can be either `float` or @@ -137,11 +131,8 @@ module EncodingFormat FLOAT = :float BASE64 = :base64 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index 33ab9d7f..0692fbb1 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -9,11 +9,8 @@ module EmbeddingModel TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/error_object.rb b/lib/openai/models/error_object.rb index 37031a55..b33d574a 100644 --- a/lib/openai/models/error_object.rb +++ b/lib/openai/models/error_object.rb @@ -23,15 +23,11 @@ class ErrorObject < OpenAI::Internal::Type::BaseModel # @return [String] required :type, String - # @!parse - # # @param code [String, nil] - # # @param message [String] - # # @param param [String, nil] - # # @param type [String] - # # - # def initialize(code:, message:, param:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:, param:, type:) + # @param code [String, nil] + # @param message [String] + # @param param [String, nil] + # @param type [String] end end end diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index dc2ef4ac..d66fae28 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -52,27 +52,13 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :share_with_openai - # @!parse - # # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] - # # @param testing_criteria [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # data_source_config:, - # testing_criteria:, - # metadata: nil, - # name: nil, - # share_with_openai: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @param testing_criteria [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The configuration for the data source used for the evaluation runs. module DataSourceConfig @@ -113,21 +99,17 @@ class Custom < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :include_sample_schema - # @!parse - # # A CustomDataSourceConfig object that defines the schema for the data source used - # # for the evaluation runs. This schema is used to define the shape of the data - # # that will be: - # # - # # - Used to define your testing criteria and - # # - What data is required when creating a run - # # - # # @param item_schema [Hash{Symbol=>Object}] - # # @param include_sample_schema [Boolean] - # # @param type [Symbol, :custom] - # # - # def initialize(item_schema:, include_sample_schema: nil, type: :custom, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) + # A CustomDataSourceConfig object that defines the schema for the data source used + # for the evaluation runs. This schema is used to define the shape of the data + # that will be: + # + # - Used to define your testing criteria and + # - What data is required when creating a run + # + # @param item_schema [Hash{Symbol=>Object}] + # @param include_sample_schema [Boolean] + # @param type [Symbol, :custom] end class StoredCompletions < OpenAI::Internal::Type::BaseModel @@ -148,22 +130,17 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # A data source config which specifies the metadata property of your stored - # # completions query. This is usually metadata like `usecase=chatbot` or - # # `prompt-version=v2`, etc. - # # - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param type [Symbol, :stored_completions] - # # - # def initialize(metadata: nil, type: :stored_completions, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata: nil, type: :stored_completions) + # A data source config which specifies the metadata property of your stored + # completions query. This is usually metadata like `usecase=chatbot` or + # `prompt-version=v2`, etc. + # + # @param metadata [Hash{Symbol=>String}, nil] + # @param type [Symbol, :stored_completions] end - # @!parse - # # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -220,20 +197,16 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @return [Symbol, :label_model] required :type, const: :label_model - # @!parse - # # A LabelModelGrader object which uses a model to assign labels to each item in - # # the evaluation. - # # - # # @param input [Array] - # # @param labels [Array] - # # @param model [String] - # # @param name [String] - # # @param passing_labels [Array] - # # @param type [Symbol, :label_model] - # # - # def initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + # + # @param input [Array] + # @param labels [Array] + # @param model [String] + # @param name [String] + # @param passing_labels [Array] + # @param type [Symbol, :label_model] module Input extend OpenAI::Internal::Type::Union @@ -257,13 +230,9 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] end class InputMessage < OpenAI::Internal::Type::BaseModel @@ -287,14 +256,10 @@ class InputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] - # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -311,13 +276,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] # The type of content, which is always `input_text`. # @@ -327,11 +288,8 @@ module Type INPUT_TEXT = :input_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -345,11 +303,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -360,11 +315,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -389,14 +341,10 @@ class OutputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] - # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -413,13 +361,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] # The type of content, which is always `output_text`. # @@ -429,11 +373,8 @@ module Type OUTPUT_TEXT = :output_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -445,11 +386,8 @@ module Role ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -460,23 +398,18 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end - # @!parse - # # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage)] end end - # @!parse - # # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index f5e1cb70..12832a04 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -58,38 +58,22 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb index 5be74c5c..04e45803 100644 --- a/lib/openai/models/eval_custom_data_source_config.rb +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -16,20 +16,16 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Symbol, :custom] required :type, const: :custom - # @!parse - # # A CustomDataSourceConfig which specifies the schema of your `item` and - # # optionally `sample` namespaces. The response schema defines the shape of the - # # data that will be: - # # - # # - Used to define your testing criteria and - # # - What data is required when creating a run - # # - # # @param schema [Hash{Symbol=>Object}] - # # @param type [Symbol, :custom] - # # - # def initialize(schema:, type: :custom, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(schema:, type: :custom) + # A CustomDataSourceConfig which specifies the schema of your `item` and + # optionally `sample` namespaces. The response schema defines the shape of the + # data that will be: + # + # - Used to define your testing criteria and + # - What data is required when creating a run + # + # @param schema [Hash{Symbol=>Object}] + # @param type [Symbol, :custom] end end end diff --git a/lib/openai/models/eval_delete_params.rb b/lib/openai/models/eval_delete_params.rb index de02e69c..80e4d81d 100644 --- a/lib/openai/models/eval_delete_params.rb +++ b/lib/openai/models/eval_delete_params.rb @@ -8,12 +8,8 @@ class EvalDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/eval_delete_response.rb b/lib/openai/models/eval_delete_response.rb index a60ec9d1..5495ca3d 100644 --- a/lib/openai/models/eval_delete_response.rb +++ b/lib/openai/models/eval_delete_response.rb @@ -19,14 +19,10 @@ class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :object, String - # @!parse - # # @param deleted [Boolean] - # # @param eval_id [String] - # # @param object [String] - # # - # def initialize(deleted:, eval_id:, object:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(deleted:, eval_id:, object:) + # @param deleted [Boolean] + # @param eval_id [String] + # @param object [String] end end end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb index 1455f6e4..2318e553 100644 --- a/lib/openai/models/eval_label_model_grader.rb +++ b/lib/openai/models/eval_label_model_grader.rb @@ -39,20 +39,16 @@ class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # @return [Symbol, :label_model] required :type, const: :label_model - # @!parse - # # A LabelModelGrader object which uses a model to assign labels to each item in - # # the evaluation. - # # - # # @param input [Array] - # # @param labels [Array] - # # @param model [String] - # # @param name [String] - # # @param passing_labels [Array] - # # @param type [Symbol, :label_model] - # # - # def initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + # + # @param input [Array] + # @param labels [Array] + # @param model [String] + # @param name [String] + # @param passing_labels [Array] + # @param type [Symbol, :label_model] # An item can either be an input message or an output message. module Input @@ -82,14 +78,10 @@ class InputMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] - # # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] + # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -105,13 +97,9 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] # The type of content, which is always `input_text`. # @@ -121,11 +109,8 @@ module Type INPUT_TEXT = :input_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -139,11 +124,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -154,11 +136,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -180,14 +159,10 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type } - # @!parse - # # @param content [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] - # # @param role [Symbol, :assistant] - # # - # def initialize(content:, type:, role: :assistant, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type:, role: :assistant) + # @param content [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] + # @param role [Symbol, :assistant] # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant#content class Content < OpenAI::Internal::Type::BaseModel @@ -203,13 +178,9 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] # The type of content, which is always `output_text`. # @@ -219,11 +190,8 @@ module Type OUTPUT_TEXT = :output_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -235,17 +203,13 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage)] end end end diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index cfee50cd..e0d2fd84 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -50,16 +50,12 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy] # attr_writer :order_by - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::EvalListParams::Order] - # # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. @@ -69,11 +65,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Evals can be ordered by creation time or last updated time. Use `created_at` for @@ -84,11 +77,8 @@ module OrderBy CREATED_AT = :created_at UPDATED_AT = :updated_at - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index e91b7773..0abeed04 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -58,38 +58,22 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_retrieve_params.rb b/lib/openai/models/eval_retrieve_params.rb index 06e448ac..e06ffbe4 100644 --- a/lib/openai/models/eval_retrieve_params.rb +++ b/lib/openai/models/eval_retrieve_params.rb @@ -8,12 +8,8 @@ class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 8b12f5ed..b1d14b18 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -58,38 +58,22 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 54752125..53940b3e 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -27,20 +27,16 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # A StoredCompletionsDataSourceConfig which specifies the metadata property of - # # your stored completions query. This is usually metadata like `usecase=chatbot` - # # or `prompt-version=v2`, etc. The schema returned by this data source config is - # # used to defined what variables are available in your evals. `item` and `sample` - # # are both defined when using this data source config. - # # - # # @param schema [Hash{Symbol=>Object}] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param type [Symbol, :stored_completions] - # # - # def initialize(schema:, metadata: nil, type: :stored_completions, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(schema:, metadata: nil, type: :stored_completions) + # A StoredCompletionsDataSourceConfig which specifies the metadata property of + # your stored completions query. This is usually metadata like `usecase=chatbot` + # or `prompt-version=v2`, etc. The schema returned by this data source config is + # used to defined what variables are available in your evals. `item` and `sample` + # are both defined when using this data source config. + # + # @param schema [Hash{Symbol=>Object}] + # @param metadata [Hash{Symbol=>String}, nil] + # @param type [Symbol, :stored_completions] end end end diff --git a/lib/openai/models/eval_string_check_grader.rb b/lib/openai/models/eval_string_check_grader.rb index 57192594..421bb059 100644 --- a/lib/openai/models/eval_string_check_grader.rb +++ b/lib/openai/models/eval_string_check_grader.rb @@ -33,19 +33,15 @@ class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel # @return [Symbol, :string_check] required :type, const: :string_check - # @!parse - # # A StringCheckGrader object that performs a string comparison between input and - # # reference using a specified operation. - # # - # # @param input [String] - # # @param name [String] - # # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] - # # @param reference [String] - # # @param type [Symbol, :string_check] - # # - # def initialize(input:, name:, operation:, reference:, type: :string_check, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, name:, operation:, reference:, type: :string_check) + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + # + # @param input [String] + # @param name [String] + # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] + # @param reference [String] + # @param type [Symbol, :string_check] # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # @@ -58,11 +54,8 @@ module Operation LIKE = :like ILIKE = :ilike - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 4362ad72..9ff351b4 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -44,19 +44,15 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # A TextSimilarityGrader object which grades text based on similarity metrics. - # # - # # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] - # # @param input [String] - # # @param pass_threshold [Float] - # # @param reference [String] - # # @param name [String] - # # @param type [Symbol, :text_similarity] - # # - # def initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) + # A TextSimilarityGrader object which grades text based on similarity metrics. + # + # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] + # @param input [String] + # @param pass_threshold [Float] + # @param reference [String] + # @param name [String] + # @param type [Symbol, :text_similarity] # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. @@ -77,11 +73,8 @@ module EvaluationMetric ROUGE_L = :rouge_l COSINE = :cosine - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb index babe7583..8572bf39 100644 --- a/lib/openai/models/eval_update_params.rb +++ b/lib/openai/models/eval_update_params.rb @@ -29,14 +29,10 @@ class EvalUpdateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(metadata: nil, name: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata: nil, name: nil, request_options: {}) + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index b80950ed..08e01385 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -58,38 +58,22 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 212bc40a..56b9b732 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -38,18 +38,14 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # attr_writer :sampling_params - # @!parse - # # A CompletionsRunDataSource object describing a model sampling configuration. - # # - # # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] - # # @param model [String] - # # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] - # # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # # - # def initialize(input_messages:, model:, source:, type:, sampling_params: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input_messages:, model:, source:, type:, sampling_params: nil) + # A CompletionsRunDataSource object describing a model sampling configuration. + # + # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # @param model [String] + # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] + # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages module InputMessages @@ -78,13 +74,9 @@ class Template < OpenAI::Internal::Type::BaseModel # @return [Symbol, :template] required :type, const: :template - # @!parse - # # @param template [Array] - # # @param type [Symbol, :template] - # # - # def initialize(template:, type: :template, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] module Template extend OpenAI::Internal::Type::Union @@ -108,13 +100,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] end class InputMessage < OpenAI::Internal::Type::BaseModel @@ -138,14 +126,10 @@ class InputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] - # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -162,13 +146,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] # The type of content, which is always `input_text`. # @@ -178,11 +158,8 @@ module Type INPUT_TEXT = :input_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -196,11 +173,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -211,11 +185,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -240,14 +211,10 @@ class OutputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] - # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -264,13 +231,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] # The type of content, which is always `output_text`. # @@ -280,11 +243,8 @@ module Type OUTPUT_TEXT = :output_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -296,11 +256,8 @@ module Role ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -311,17 +268,13 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage)] end end @@ -338,18 +291,13 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @return [Symbol, :item_reference] required :type, const: :item_reference - # @!parse - # # @param item_reference [String] - # # @param type [Symbol, :item_reference] - # # - # def initialize(item_reference:, type: :item_reference, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] end # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -383,13 +331,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_content] required :type, const: :file_content - # @!parse - # # @param content [Array] - # # @param type [Symbol, :file_content] - # # - # def initialize(content:, type: :file_content, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -406,13 +350,9 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>Object}] # attr_writer :sample - # @!parse - # # @param item [Hash{Symbol=>Object}] - # # @param sample [Hash{Symbol=>Object}] - # # - # def initialize(item:, sample: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] end end @@ -429,13 +369,9 @@ class FileID < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_id] required :type, const: :file_id - # @!parse - # # @param id [String] - # # @param type [Symbol, :file_id] - # # - # def initialize(id:, type: :file_id, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] end class StoredCompletions < OpenAI::Internal::Type::BaseModel @@ -480,24 +416,19 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Symbol, :stored_completions] required :type, const: :stored_completions - # @!parse - # # A StoredCompletionsRunDataSource configuration describing a set of filters - # # - # # @param created_after [Integer, nil] - # # @param created_before [Integer, nil] - # # @param limit [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, nil] - # # @param type [Symbol, :stored_completions] - # # - # def initialize(created_after:, created_before:, limit:, metadata:, model:, type: :stored_completions, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(created_after:, created_before:, limit:, metadata:, model:, type: :stored_completions) + # A StoredCompletionsRunDataSource configuration describing a set of filters + # + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param limit [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, nil] + # @param type [Symbol, :stored_completions] end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] end # The type of run data source. Always `completions`. @@ -508,11 +439,8 @@ module Type COMPLETIONS = :completions - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params @@ -557,15 +485,11 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :top_p - # @!parse - # # @param max_completion_tokens [Integer] - # # @param seed [Integer] - # # @param temperature [Float] - # # @param top_p [Float] - # # - # def initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] end end end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index a3e22ebb..3b06b922 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -15,16 +15,12 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @return [Symbol, :jsonl] required :type, const: :jsonl - # @!parse - # # A JsonlRunDataSource object with that specifies a JSONL file that matches the - # # eval - # # - # # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] - # # @param type [Symbol, :jsonl] - # # - # def initialize(source:, type: :jsonl, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(source:, type: :jsonl) + # A JsonlRunDataSource object with that specifies a JSONL file that matches the + # eval + # + # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @param type [Symbol, :jsonl] # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source module Source @@ -50,13 +46,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_content] required :type, const: :file_content - # @!parse - # # @param content [Array] - # # @param type [Symbol, :file_content] - # # - # def initialize(content:, type: :file_content, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -73,13 +65,9 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>Object}] # attr_writer :sample - # @!parse - # # @param item [Hash{Symbol=>Object}] - # # @param sample [Hash{Symbol=>Object}] - # # - # def initialize(item:, sample: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] end end @@ -96,18 +84,13 @@ class FileID < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_id] required :type, const: :file_id - # @!parse - # # @param id [String] - # # @param type [Symbol, :file_id] - # # - # def initialize(id:, type: :file_id, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] end end end diff --git a/lib/openai/models/evals/eval_api_error.rb b/lib/openai/models/evals/eval_api_error.rb index 11b56e24..4d88b8dc 100644 --- a/lib/openai/models/evals/eval_api_error.rb +++ b/lib/openai/models/evals/eval_api_error.rb @@ -16,15 +16,11 @@ class EvalAPIError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # An object representing an error response from the Eval API. - # # - # # @param code [String] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # An object representing an error response from the Eval API. + # + # @param code [String] + # @param message [String] end end diff --git a/lib/openai/models/evals/run_cancel_params.rb b/lib/openai/models/evals/run_cancel_params.rb index b492403f..958dad3b 100644 --- a/lib/openai/models/evals/run_cancel_params.rb +++ b/lib/openai/models/evals/run_cancel_params.rb @@ -14,13 +14,9 @@ class RunCancelParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!parse - # # @param eval_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, request_options: {}) + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 2121c8f8..19775cff 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -96,45 +96,23 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunCancelResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index c9ccea28..3e03a25c 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -36,15 +36,11 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(data_source:, metadata: nil, name: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data_source:, metadata: nil, name: nil, request_options: {}) + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Details about the run's data source. module DataSource @@ -56,9 +52,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index d890f4c5..25892cb6 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -96,45 +96,23 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunCreateResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/run_delete_params.rb b/lib/openai/models/evals/run_delete_params.rb index 71cdb827..887478b6 100644 --- a/lib/openai/models/evals/run_delete_params.rb +++ b/lib/openai/models/evals/run_delete_params.rb @@ -14,13 +14,9 @@ class RunDeleteParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!parse - # # @param eval_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, request_options: {}) + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/run_delete_response.rb b/lib/openai/models/evals/run_delete_response.rb index 68412255..bd1154dd 100644 --- a/lib/openai/models/evals/run_delete_response.rb +++ b/lib/openai/models/evals/run_delete_response.rb @@ -32,14 +32,10 @@ class RunDeleteResponse < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :run_id - # @!parse - # # @param deleted [Boolean] - # # @param object [String] - # # @param run_id [String] - # # - # def initialize(deleted: nil, object: nil, run_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(deleted: nil, object: nil, run_id: nil) + # @param deleted [Boolean] + # @param object [String] + # @param run_id [String] end end end diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index 6dfe1fc2..b5c500f7 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -51,16 +51,12 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status] # attr_writer :status - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] - # # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. @@ -70,11 +66,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | @@ -88,11 +81,8 @@ module Status CANCELED = :canceled FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index b31795e0..cda98be8 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -96,45 +96,23 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunListResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/run_retrieve_params.rb b/lib/openai/models/evals/run_retrieve_params.rb index f17f16a7..648fa819 100644 --- a/lib/openai/models/evals/run_retrieve_params.rb +++ b/lib/openai/models/evals/run_retrieve_params.rb @@ -14,13 +14,9 @@ class RunRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!parse - # # @param eval_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, request_options: {}) + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 219fbd4c..dabdd0a5 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -96,45 +96,23 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunRetrieveResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index e4b6424f..dc3ba2dd 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -57,17 +57,13 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] # attr_writer :status - # @!parse - # # @param eval_id [String] - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] - # # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # @param eval_id [String] + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. @@ -77,11 +73,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Filter output items by status. Use `failed` to filter by failed output items or @@ -92,11 +85,8 @@ module Status FAIL = :fail PASS = :pass - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index 1b9744e6..fe4db7ad 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -67,37 +67,19 @@ class OutputItemListResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run output item. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param datasource_item [Hash{Symbol=>Object}] - # # @param datasource_item_id [Integer] - # # @param eval_id [String] - # # @param results [ArrayObject}>] - # # @param run_id [String] - # # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] - # # @param status [String] - # # @param object [Symbol, :"eval.run.output_item"] - # # - # def initialize( - # id:, - # created_at:, - # datasource_item:, - # datasource_item_id:, - # eval_id:, - # results:, - # run_id:, - # sample:, - # status:, - # object: :"eval.run.output_item", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, datasource_item:, datasource_item_id:, eval_id:, results:, run_id:, sample:, status:, object: :"eval.run.output_item") + # A schema representing an evaluation run output item. + # + # @param id [String] + # @param created_at [Integer] + # @param datasource_item [Hash{Symbol=>Object}] + # @param datasource_item_id [Integer] + # @param eval_id [String] + # @param results [ArrayObject}>] + # @param run_id [String] + # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] + # @param status [String] + # @param object [Symbol, :"eval.run.output_item"] # @see OpenAI::Models::Evals::Runs::OutputItemListResponse#sample class Sample < OpenAI::Internal::Type::BaseModel @@ -163,37 +145,19 @@ class Sample < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage } - # @!parse - # # A sample containing the input and output of the evaluation run. - # # - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param finish_reason [String] - # # @param input [Array] - # # @param max_completion_tokens [Integer] - # # @param model [String] - # # @param output [Array] - # # @param seed [Integer] - # # @param temperature [Float] - # # @param top_p [Float] - # # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] - # # - # def initialize( - # error:, - # finish_reason:, - # input:, - # max_completion_tokens:, - # model:, - # output:, - # seed:, - # temperature:, - # top_p:, - # usage:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(error:, finish_reason:, input:, max_completion_tokens:, model:, output:, seed:, temperature:, top_p:, usage:) + # A sample containing the input and output of the evaluation run. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param finish_reason [String] + # @param input [Array] + # @param max_completion_tokens [Integer] + # @param model [String] + # @param output [Array] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -208,15 +172,11 @@ class Input < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # An input message. - # # - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # An input message. + # + # @param content [String] + # @param role [String] end class Output < OpenAI::Internal::Type::BaseModel @@ -240,13 +200,9 @@ class Output < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :role - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content: nil, role: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, role: nil) + # @param content [String] + # @param role [String] end # @see OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample#usage @@ -275,17 +231,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Token usage details for the sample. - # # - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:) + # Token usage details for the sample. + # + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_params.rb b/lib/openai/models/evals/runs/output_item_retrieve_params.rb index e6154bee..599a0b19 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_params.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_params.rb @@ -20,14 +20,10 @@ class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :run_id, String - # @!parse - # # @param eval_id [String] - # # @param run_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, run_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, run_id:, request_options: {}) + # @param eval_id [String] + # @param run_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index 7c7798fb..bf311b56 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -67,37 +67,19 @@ class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run output item. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param datasource_item [Hash{Symbol=>Object}] - # # @param datasource_item_id [Integer] - # # @param eval_id [String] - # # @param results [ArrayObject}>] - # # @param run_id [String] - # # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] - # # @param status [String] - # # @param object [Symbol, :"eval.run.output_item"] - # # - # def initialize( - # id:, - # created_at:, - # datasource_item:, - # datasource_item_id:, - # eval_id:, - # results:, - # run_id:, - # sample:, - # status:, - # object: :"eval.run.output_item", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, datasource_item:, datasource_item_id:, eval_id:, results:, run_id:, sample:, status:, object: :"eval.run.output_item") + # A schema representing an evaluation run output item. + # + # @param id [String] + # @param created_at [Integer] + # @param datasource_item [Hash{Symbol=>Object}] + # @param datasource_item_id [Integer] + # @param eval_id [String] + # @param results [ArrayObject}>] + # @param run_id [String] + # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] + # @param status [String] + # @param object [Symbol, :"eval.run.output_item"] # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse#sample class Sample < OpenAI::Internal::Type::BaseModel @@ -163,37 +145,19 @@ class Sample < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage } - # @!parse - # # A sample containing the input and output of the evaluation run. - # # - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param finish_reason [String] - # # @param input [Array] - # # @param max_completion_tokens [Integer] - # # @param model [String] - # # @param output [Array] - # # @param seed [Integer] - # # @param temperature [Float] - # # @param top_p [Float] - # # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] - # # - # def initialize( - # error:, - # finish_reason:, - # input:, - # max_completion_tokens:, - # model:, - # output:, - # seed:, - # temperature:, - # top_p:, - # usage:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(error:, finish_reason:, input:, max_completion_tokens:, model:, output:, seed:, temperature:, top_p:, usage:) + # A sample containing the input and output of the evaluation run. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param finish_reason [String] + # @param input [Array] + # @param max_completion_tokens [Integer] + # @param model [String] + # @param output [Array] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -208,15 +172,11 @@ class Input < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # An input message. - # # - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # An input message. + # + # @param content [String] + # @param role [String] end class Output < OpenAI::Internal::Type::BaseModel @@ -240,13 +200,9 @@ class Output < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :role - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content: nil, role: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, role: nil) + # @param content [String] + # @param role [String] end # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample#usage @@ -275,17 +231,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Token usage details for the sample. - # # - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:) + # Token usage details for the sample. + # + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 0c37933e..a7354de0 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -13,9 +13,8 @@ module FileChunkingStrategy # This is returned when the chunking strategy is unknown. Typically, this is because the file was indexed before the `chunking_strategy` concept was introduced in the API. variant :other, -> { OpenAI::Models::OtherFileChunkingStrategyObject } - # @!parse - # # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 8a671209..7f5a2487 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -15,9 +15,8 @@ module FileChunkingStrategyParam # Customize your own chunking strategy by setting chunk size and chunk overlap. variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObjectParam } - # @!parse - # # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] end end end diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index 96b979af..5557fffd 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -8,12 +8,8 @@ class FileContentParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index de4e8770..567ed995 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -23,14 +23,10 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::Models::FilePurpose } - # @!parse - # # @param file [Pathname, StringIO] - # # @param purpose [Symbol, OpenAI::Models::FilePurpose] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file:, purpose:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, purpose:, request_options: {}) + # @param file [Pathname, StringIO] + # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index fda911ad..177b99cb 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -8,12 +8,8 @@ class FileDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/file_deleted.rb b/lib/openai/models/file_deleted.rb index 67ab7b27..28517280 100644 --- a/lib/openai/models/file_deleted.rb +++ b/lib/openai/models/file_deleted.rb @@ -19,14 +19,10 @@ class FileDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file] required :object, const: :file - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :file] - # # - # def initialize(id:, deleted:, object: :file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :file) + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :file] end end end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index ccc569bf..5f1be612 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -53,16 +53,12 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :purpose - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::FileListParams::Order] - # # @param purpose [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::FileListParams::Order] + # @param purpose [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -72,11 +68,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index d694613d..fd9c63bb 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -70,35 +70,18 @@ class FileObject < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :status_details - # @!parse - # # The `File` object represents a document that has been uploaded to OpenAI. - # # - # # @param id [String] - # # @param bytes [Integer] - # # @param created_at [Integer] - # # @param filename [String] - # # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] - # # @param status [Symbol, OpenAI::Models::FileObject::Status] - # # @param expires_at [Integer] - # # @param status_details [String] - # # @param object [Symbol, :file] - # # - # def initialize( - # id:, - # bytes:, - # created_at:, - # filename:, - # purpose:, - # status:, - # expires_at: nil, - # status_details: nil, - # object: :file, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) + # The `File` object represents a document that has been uploaded to OpenAI. + # + # @param id [String] + # @param bytes [Integer] + # @param created_at [Integer] + # @param filename [String] + # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] + # @param status [Symbol, OpenAI::Models::FileObject::Status] + # @param expires_at [Integer] + # @param status_details [String] + # @param object [Symbol, :file] # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` @@ -116,11 +99,8 @@ module Purpose FINE_TUNE_RESULTS = :"fine-tune-results" VISION = :vision - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @deprecated @@ -136,11 +116,8 @@ module Status PROCESSED = :processed ERROR = :error - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index c11caef0..0f1ca442 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -16,11 +16,8 @@ module FilePurpose USER_DATA = :user_data EVALS = :evals - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index ec128c3d..6c8c1a70 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -8,12 +8,8 @@ class FileRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb index b569ae0c..ef958285 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb @@ -16,13 +16,9 @@ class PermissionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array] required :project_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @param project_ids [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(project_ids:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(project_ids:, request_options: {}) + # @param project_ids [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb index 89790c9a..ed9e10fe 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb @@ -30,18 +30,14 @@ class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :project_id, String - # @!parse - # # The `checkpoint.permission` object represents a permission for a fine-tuned - # # model checkpoint. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param project_id [String] - # # @param object [Symbol, :"checkpoint.permission"] - # # - # def initialize(id:, created_at:, project_id:, object: :"checkpoint.permission", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + # + # @param id [String] + # @param created_at [Integer] + # @param project_id [String] + # @param object [Symbol, :"checkpoint.permission"] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb index 0b049049..7281cf70 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb @@ -10,12 +10,8 @@ class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb index 3a15c6da..1fce739b 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb @@ -24,14 +24,10 @@ class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"checkpoint.permission"] required :object, const: :"checkpoint.permission" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"checkpoint.permission"] - # # - # def initialize(id:, deleted:, object: :"checkpoint.permission", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"checkpoint.permission") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"checkpoint.permission"] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index e32c1188..29fffbe9 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -50,16 +50,12 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :project_id - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] - # # @param project_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] + # @param project_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The order in which to retrieve permissions. module Order @@ -68,11 +64,8 @@ module Order ASCENDING = :ascending DESCENDING = :descending - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb index 6f7cadc9..1de51fee 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb @@ -32,16 +32,12 @@ class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :last_id, String, nil?: true - # @!parse - # # @param data [Array] - # # @param has_more [Boolean] - # # @param first_id [String, nil] - # # @param last_id [String, nil] - # # @param object [Symbol, :list] - # # - # def initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list) + # @param data [Array] + # @param has_more [Boolean] + # @param first_id [String, nil] + # @param last_id [String, nil] + # @param object [Symbol, :list] class Data < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -68,18 +64,14 @@ class Data < OpenAI::Internal::Type::BaseModel # @return [String] required :project_id, String - # @!parse - # # The `checkpoint.permission` object represents a permission for a fine-tuned - # # model checkpoint. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param project_id [String] - # # @param object [Symbol, :"checkpoint.permission"] - # # - # def initialize(id:, created_at:, project_id:, object: :"checkpoint.permission", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + # + # @param id [String] + # @param created_at [Integer] + # @param project_id [String] + # @param object [Symbol, :"checkpoint.permission"] end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index b92146f5..27a10624 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -142,56 +142,29 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method] # attr_writer :method_ - # @!parse - # # The `fine_tuning.job` object represents a fine-tuning job that has been created - # # through the API. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] - # # @param fine_tuned_model [String, nil] - # # @param finished_at [Integer, nil] - # # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] - # # @param model [String] - # # @param organization_id [String] - # # @param result_files [Array] - # # @param seed [Integer] - # # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] - # # @param trained_tokens [Integer, nil] - # # @param training_file [String] - # # @param validation_file [String, nil] - # # @param estimated_finish [Integer, nil] - # # @param integrations [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] - # # @param object [Symbol, :"fine_tuning.job"] - # # - # def initialize( - # id:, - # created_at:, - # error:, - # fine_tuned_model:, - # finished_at:, - # hyperparameters:, - # model:, - # organization_id:, - # result_files:, - # seed:, - # status:, - # trained_tokens:, - # training_file:, - # validation_file:, - # estimated_finish: nil, - # integrations: nil, - # metadata: nil, - # method_: nil, - # object: :"fine_tuning.job", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") + # The `fine_tuning.job` object represents a fine-tuning job that has been created + # through the API. + # + # @param id [String] + # @param created_at [Integer] + # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] + # @param fine_tuned_model [String, nil] + # @param finished_at [Integer, nil] + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] + # @param model [String] + # @param organization_id [String] + # @param result_files [Array] + # @param seed [Integer] + # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] + # @param trained_tokens [Integer, nil] + # @param training_file [String] + # @param validation_file [String, nil] + # @param estimated_finish [Integer, nil] + # @param integrations [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] + # @param object [Symbol, :"fine_tuning.job"] # @see OpenAI::Models::FineTuning::FineTuningJob#error class Error < OpenAI::Internal::Type::BaseModel @@ -214,17 +187,13 @@ class Error < OpenAI::Internal::Type::BaseModel # @return [String, nil] required :param, String, nil?: true - # @!parse - # # For fine-tuning jobs that have `failed`, this will contain more information on - # # the cause of the failure. - # # - # # @param code [String] - # # @param message [String] - # # @param param [String, nil] - # # - # def initialize(code:, message:, param:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:, param:) + # For fine-tuning jobs that have `failed`, this will contain more information on + # the cause of the failure. + # + # @param code [String] + # @param message [String] + # @param param [String, nil] end # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters @@ -263,17 +232,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. This value will only be - # # returned when running `supervised` jobs. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. This value will only be + # returned when running `supervised` jobs. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -286,9 +251,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -302,9 +266,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -318,9 +281,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end @@ -338,11 +300,8 @@ module Status FAILED = :failed CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::FineTuning::FineTuningJob#method_ @@ -377,16 +336,12 @@ class Method < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] # attr_writer :type - # @!parse - # # The method used for fine-tuning. - # # - # # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] - # # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] - # # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] - # # - # def initialize(dpo: nil, supervised: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(dpo: nil, supervised: nil, type: nil) + # The method used for fine-tuning. + # + # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] + # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel @@ -400,14 +355,10 @@ class Dpo < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the DPO fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the DPO fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -459,17 +410,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param beta [Symbol, :auto, Float] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param beta [Symbol, :auto, Float] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -482,9 +429,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # The beta value for the DPO method. A higher beta value will increase the weight @@ -498,9 +444,8 @@ module Beta variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -514,9 +459,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -530,9 +474,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -550,14 +493,10 @@ class Supervised < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the supervised fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the supervised fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -597,16 +536,12 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -619,9 +554,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -635,9 +569,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -651,9 +584,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -667,11 +599,8 @@ module Type SUPERVISED = :supervised DPO = :dpo - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index a354bb5a..f647416e 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -55,20 +55,16 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] # attr_writer :type - # @!parse - # # Fine-tuning job event object - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] - # # @param message [String] - # # @param data [Object] - # # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] - # # @param object [Symbol, :"fine_tuning.job.event"] - # # - # def initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") + # Fine-tuning job event object + # + # @param id [String] + # @param created_at [Integer] + # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] + # @param message [String] + # @param data [Object] + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] + # @param object [Symbol, :"fine_tuning.job.event"] # The log level of the event. # @@ -80,11 +76,8 @@ module Level WARN = :warn ERROR = :error - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of event. @@ -96,11 +89,8 @@ module Type MESSAGE = :message METRICS = :metrics - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 603de792..b1a0ba4f 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -37,20 +37,16 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tags - # @!parse - # # The settings for your integration with Weights and Biases. This payload - # # specifies the project that metrics will be sent to. Optionally, you can set an - # # explicit display name for your run, add tags to your run, and set a default - # # entity (team, username, etc) to be associated with your run. - # # - # # @param project [String] - # # @param entity [String, nil] - # # @param name [String, nil] - # # @param tags [Array] - # # - # def initialize(project:, entity: nil, name: nil, tags: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(project:, entity: nil, name: nil, tags: nil) + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. + # + # @param project [String] + # @param entity [String, nil] + # @param name [String, nil] + # @param tags [Array] end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 3554c944..8d9da11d 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -19,13 +19,9 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] required :wandb, -> { OpenAI::Models::FineTuning::FineTuningJobWandbIntegration } - # @!parse - # # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] - # # @param type [Symbol, :wandb] - # # - # def initialize(wandb:, type: :wandb, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(wandb:, type: :wandb) + # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] + # @param type [Symbol, :wandb] end end diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 92eda537..129f8e75 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -9,12 +9,8 @@ class JobCancelParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index df3de345..e27eb79c 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -113,35 +113,17 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :validation_file, String, nil?: true - # @!parse - # # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] - # # @param training_file [String] - # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] - # # @param integrations [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] - # # @param seed [Integer, nil] - # # @param suffix [String, nil] - # # @param validation_file [String, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # model:, - # training_file:, - # hyperparameters: nil, - # integrations: nil, - # metadata: nil, - # method_: nil, - # seed: nil, - # suffix: nil, - # validation_file: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] + # @param training_file [String] + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] + # @param integrations [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] + # @param seed [Integer, nil] + # @param suffix [String, nil] + # @param validation_file [String, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). @@ -158,9 +140,8 @@ module Model variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -209,17 +190,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. This value is now deprecated - # # in favor of `method`, and should be passed in under the `method` parameter. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. This value is now deprecated + # in favor of `method`, and should be passed in under the `method` parameter. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -232,9 +209,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -248,9 +224,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -264,9 +239,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end @@ -287,13 +261,9 @@ class Integration < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] required :wandb, -> { OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb } - # @!parse - # # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] - # # @param type [Symbol, :wandb] - # # - # def initialize(wandb:, type: :wandb, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(wandb:, type: :wandb) + # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] + # @param type [Symbol, :wandb] # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::Internal::Type::BaseModel @@ -330,20 +300,16 @@ class Wandb < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tags - # @!parse - # # The settings for your integration with Weights and Biases. This payload - # # specifies the project that metrics will be sent to. Optionally, you can set an - # # explicit display name for your run, add tags to your run, and set a default - # # entity (team, username, etc) to be associated with your run. - # # - # # @param project [String] - # # @param entity [String, nil] - # # @param name [String, nil] - # # @param tags [Array] - # # - # def initialize(project:, entity: nil, name: nil, tags: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(project:, entity: nil, name: nil, tags: nil) + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. + # + # @param project [String] + # @param entity [String, nil] + # @param name [String, nil] + # @param tags [Array] end end @@ -378,16 +344,12 @@ class Method < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] # attr_writer :type - # @!parse - # # The method used for fine-tuning. - # # - # # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] - # # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] - # # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] - # # - # def initialize(dpo: nil, supervised: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(dpo: nil, supervised: nil, type: nil) + # The method used for fine-tuning. + # + # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] + # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] + # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel @@ -402,14 +364,10 @@ class Dpo < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the DPO fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the DPO fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -461,17 +419,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param beta [Symbol, :auto, Float] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param beta [Symbol, :auto, Float] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -484,9 +438,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # The beta value for the DPO method. A higher beta value will increase the weight @@ -500,9 +453,8 @@ module Beta variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -516,9 +468,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -532,9 +483,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -552,14 +502,10 @@ class Supervised < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the supervised fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the supervised fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -599,16 +545,12 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -621,9 +563,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -637,9 +578,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -653,9 +593,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -669,11 +608,8 @@ module Type SUPERVISED = :supervised DPO = :dpo - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index d69f7e6e..d4729ee6 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -29,14 +29,10 @@ class JobListEventsParams < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :limit - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index 1ca2c3a3..e2ed96ce 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -36,15 +36,11 @@ class JobListParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, metadata: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index 3fa511ac..b1579373 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -9,12 +9,8 @@ class JobRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index fc91cb4e..f4f1bea1 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -30,14 +30,10 @@ class CheckpointListParams < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :limit - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index e32a2926..98dbd856 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -48,32 +48,17 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @return [Integer] required :step_number, Integer - # @!parse - # # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a - # # fine-tuning job that is ready to use. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param fine_tuned_model_checkpoint [String] - # # @param fine_tuning_job_id [String] - # # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] - # # @param step_number [Integer] - # # @param object [Symbol, :"fine_tuning.job.checkpoint"] - # # - # def initialize( - # id:, - # created_at:, - # fine_tuned_model_checkpoint:, - # fine_tuning_job_id:, - # metrics:, - # step_number:, - # object: :"fine_tuning.job.checkpoint", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, fine_tuned_model_checkpoint:, fine_tuning_job_id:, metrics:, step_number:, object: :"fine_tuning.job.checkpoint") + # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a + # fine-tuning job that is ready to use. + # + # @param id [String] + # @param created_at [Integer] + # @param fine_tuned_model_checkpoint [String] + # @param fine_tuning_job_id [String] + # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] + # @param step_number [Integer] + # @param object [Symbol, :"fine_tuning.job.checkpoint"] # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel @@ -140,31 +125,16 @@ class Metrics < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :valid_mean_token_accuracy - # @!parse - # # Metrics at the step number during the fine-tuning job. - # # - # # @param full_valid_loss [Float] - # # @param full_valid_mean_token_accuracy [Float] - # # @param step [Float] - # # @param train_loss [Float] - # # @param train_mean_token_accuracy [Float] - # # @param valid_loss [Float] - # # @param valid_mean_token_accuracy [Float] - # # - # def initialize( - # full_valid_loss: nil, - # full_valid_mean_token_accuracy: nil, - # step: nil, - # train_loss: nil, - # train_mean_token_accuracy: nil, - # valid_loss: nil, - # valid_mean_token_accuracy: nil, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(full_valid_loss: nil, full_valid_mean_token_accuracy: nil, step: nil, train_loss: nil, train_mean_token_accuracy: nil, valid_loss: nil, valid_mean_token_accuracy: nil) + # Metrics at the step number during the fine-tuning job. + # + # @param full_valid_loss [Float] + # @param full_valid_mean_token_accuracy [Float] + # @param step [Float] + # @param train_loss [Float] + # @param train_mean_token_accuracy [Float] + # @param valid_loss [Float] + # @param valid_mean_token_accuracy [Float] end end end diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 7bd7f7d5..a37c41bc 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -47,15 +47,11 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true - # @!parse - # # @param name [String] - # # @param description [String] - # # @param parameters [Hash{Symbol=>Object}] - # # @param strict [Boolean, nil] - # # - # def initialize(name:, description: nil, parameters: nil, strict: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, description: nil, parameters: nil, strict: nil) + # @param name [String] + # @param description [String] + # @param parameters [Hash{Symbol=>Object}] + # @param strict [Boolean, nil] end end end diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index dddf163d..7d54b273 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -35,16 +35,12 @@ class Image < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :url - # @!parse - # # Represents the url or the content of an image generated by the OpenAI API. - # # - # # @param b64_json [String] - # # @param revised_prompt [String] - # # @param url [String] - # # - # def initialize(b64_json: nil, revised_prompt: nil, url: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) + # Represents the url or the content of an image generated by the OpenAI API. + # + # @param b64_json [String] + # @param revised_prompt [String] + # @param url [String] end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index b51d4723..d2e09d8f 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -58,18 +58,14 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param image [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # # @param n [Integer, nil] - # # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] - # # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @param image [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -81,9 +77,8 @@ module Model # The model to use for image generation. Only `dall-e-2` is supported at this time. variant enum: -> { OpenAI::Models::ImageModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or @@ -95,11 +90,8 @@ module ResponseFormat URL = :url B64_JSON = :b64_json - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -111,11 +103,8 @@ module Size SIZE_512X512 = :"512x512" SIZE_1024X1024 = :"1024x1024" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index c2487b6f..247e370b 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -74,33 +74,16 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param image [Pathname, StringIO] - # # @param prompt [String] - # # @param mask [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # # @param n [Integer, nil] - # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] - # # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # image:, - # prompt:, - # mask: nil, - # model: nil, - # n: nil, - # response_format: nil, - # size: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @param image [Pathname, StringIO] + # @param prompt [String] + # @param mask [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -112,9 +95,8 @@ module Model # The model to use for image generation. Only `dall-e-2` is supported at this time. variant enum: -> { OpenAI::Models::ImageModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or @@ -126,11 +108,8 @@ module ResponseFormat URL = :url B64_JSON = :b64_json - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -142,11 +121,8 @@ module Size SIZE_512X512 = :"512x512" SIZE_1024X1024 = :"1024x1024" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index bce729c4..43b701b6 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -77,33 +77,16 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param prompt [String] - # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # # @param n [Integer, nil] - # # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] - # # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] - # # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] - # # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # prompt:, - # model: nil, - # n: nil, - # quality: nil, - # response_format: nil, - # size: nil, - # style: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @param prompt [String] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. module Model @@ -114,9 +97,8 @@ module Model # The model to use for image generation. variant enum: -> { OpenAI::Models::ImageModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The quality of the image that will be generated. `hd` creates images with finer @@ -128,11 +110,8 @@ module Quality STANDARD = :standard HD = :hd - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The format in which the generated images are returned. Must be one of `url` or @@ -144,11 +123,8 @@ module ResponseFormat URL = :url B64_JSON = :b64_json - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -163,11 +139,8 @@ module Size SIZE_1792X1024 = :"1792x1024" SIZE_1024X1792 = :"1024x1792" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -180,11 +153,8 @@ module Style VIVID = :vivid NATURAL = :natural - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index 83fd2f56..4b6ca64c 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -8,11 +8,8 @@ module ImageModel DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index 108b9120..8816ee07 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -14,13 +14,9 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image] } - # @!parse - # # @param created [Integer] - # # @param data [Array] - # # - # def initialize(created:, data:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(created:, data:) + # @param created [Integer] + # @param data [Array] end end end diff --git a/lib/openai/models/model.rb b/lib/openai/models/model.rb index a3d362fb..e5893b7c 100644 --- a/lib/openai/models/model.rb +++ b/lib/openai/models/model.rb @@ -28,17 +28,13 @@ class Model < OpenAI::Internal::Type::BaseModel # @return [String] required :owned_by, String - # @!parse - # # Describes an OpenAI model offering that can be used with the API. - # # - # # @param id [String] - # # @param created [Integer] - # # @param owned_by [String] - # # @param object [Symbol, :model] - # # - # def initialize(id:, created:, owned_by:, object: :model, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created:, owned_by:, object: :model) + # Describes an OpenAI model offering that can be used with the API. + # + # @param id [String] + # @param created [Integer] + # @param owned_by [String] + # @param object [Symbol, :model] end end end diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index 3f4036d0..f288614b 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -8,12 +8,8 @@ class ModelDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/model_deleted.rb b/lib/openai/models/model_deleted.rb index 82476c3e..612bc76c 100644 --- a/lib/openai/models/model_deleted.rb +++ b/lib/openai/models/model_deleted.rb @@ -19,14 +19,10 @@ class ModelDeleted < OpenAI::Internal::Type::BaseModel # @return [String] required :object, String - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [String] - # # - # def initialize(id:, deleted:, object:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object:) + # @param id [String] + # @param deleted [Boolean] + # @param object [String] end end end diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index c7c05067..52c1d783 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -8,12 +8,8 @@ class ModelListParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index b1384fc8..c2d43bc2 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -8,12 +8,8 @@ class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 6abb069c..834ced17 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -27,15 +27,11 @@ class Moderation < OpenAI::Internal::Type::BaseModel # @return [Boolean] required :flagged, OpenAI::Internal::Type::Boolean - # @!parse - # # @param categories [OpenAI::Models::Moderation::Categories] - # # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] - # # @param category_scores [OpenAI::Models::Moderation::CategoryScores] - # # @param flagged [Boolean] - # # - # def initialize(categories:, category_applied_input_types:, category_scores:, flagged:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(categories:, category_applied_input_types:, category_scores:, flagged:) + # @param categories [OpenAI::Models::Moderation::Categories] + # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] + # @param category_scores [OpenAI::Models::Moderation::CategoryScores] + # @param flagged [Boolean] # @see OpenAI::Models::Moderation#categories class Categories < OpenAI::Internal::Type::BaseModel @@ -134,43 +130,22 @@ class Categories < OpenAI::Internal::Type::BaseModel # @return [Boolean] required :violence_graphic, OpenAI::Internal::Type::Boolean, api_name: :"violence/graphic" - # @!parse - # # A list of the categories, and whether they are flagged or not. - # # - # # @param harassment [Boolean] - # # @param harassment_threatening [Boolean] - # # @param hate [Boolean] - # # @param hate_threatening [Boolean] - # # @param illicit [Boolean, nil] - # # @param illicit_violent [Boolean, nil] - # # @param self_harm [Boolean] - # # @param self_harm_instructions [Boolean] - # # @param self_harm_intent [Boolean] - # # @param sexual [Boolean] - # # @param sexual_minors [Boolean] - # # @param violence [Boolean] - # # @param violence_graphic [Boolean] - # # - # def initialize( - # harassment:, - # harassment_threatening:, - # hate:, - # hate_threatening:, - # illicit:, - # illicit_violent:, - # self_harm:, - # self_harm_instructions:, - # self_harm_intent:, - # sexual:, - # sexual_minors:, - # violence:, - # violence_graphic:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # A list of the categories, and whether they are flagged or not. + # + # @param harassment [Boolean] + # @param harassment_threatening [Boolean] + # @param hate [Boolean] + # @param hate_threatening [Boolean] + # @param illicit [Boolean, nil] + # @param illicit_violent [Boolean, nil] + # @param self_harm [Boolean] + # @param self_harm_instructions [Boolean] + # @param self_harm_intent [Boolean] + # @param sexual [Boolean] + # @param sexual_minors [Boolean] + # @param violence [Boolean] + # @param violence_graphic [Boolean] end # @see OpenAI::Models::Moderation#category_applied_input_types @@ -274,54 +249,30 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] }, api_name: :"violence/graphic" - # @!parse - # # A list of the categories along with the input type(s) that the score applies to. - # # - # # @param harassment [Array] - # # @param harassment_threatening [Array] - # # @param hate [Array] - # # @param hate_threatening [Array] - # # @param illicit [Array] - # # @param illicit_violent [Array] - # # @param self_harm [Array] - # # @param self_harm_instructions [Array] - # # @param self_harm_intent [Array] - # # @param sexual [Array] - # # @param sexual_minors [Array] - # # @param violence [Array] - # # @param violence_graphic [Array] - # # - # def initialize( - # harassment:, - # harassment_threatening:, - # hate:, - # hate_threatening:, - # illicit:, - # illicit_violent:, - # self_harm:, - # self_harm_instructions:, - # self_harm_intent:, - # sexual:, - # sexual_minors:, - # violence:, - # violence_graphic:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # A list of the categories along with the input type(s) that the score applies to. + # + # @param harassment [Array] + # @param harassment_threatening [Array] + # @param hate [Array] + # @param hate_threatening [Array] + # @param illicit [Array] + # @param illicit_violent [Array] + # @param self_harm [Array] + # @param self_harm_instructions [Array] + # @param self_harm_intent [Array] + # @param sexual [Array] + # @param sexual_minors [Array] + # @param violence [Array] + # @param violence_graphic [Array] module Harassment extend OpenAI::Internal::Type::Enum TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module HarassmentThreatening @@ -329,11 +280,8 @@ module HarassmentThreatening TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Hate @@ -341,11 +289,8 @@ module Hate TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module HateThreatening @@ -353,11 +298,8 @@ module HateThreatening TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Illicit @@ -365,11 +307,8 @@ module Illicit TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module IllicitViolent @@ -377,11 +316,8 @@ module IllicitViolent TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SelfHarm @@ -390,11 +326,8 @@ module SelfHarm TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SelfHarmInstruction @@ -403,11 +336,8 @@ module SelfHarmInstruction TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SelfHarmIntent @@ -416,11 +346,8 @@ module SelfHarmIntent TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Sexual @@ -429,11 +356,8 @@ module Sexual TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SexualMinor @@ -441,11 +365,8 @@ module SexualMinor TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Violence @@ -454,11 +375,8 @@ module Violence TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module ViolenceGraphic @@ -467,11 +385,8 @@ module ViolenceGraphic TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -555,43 +470,22 @@ class CategoryScores < OpenAI::Internal::Type::BaseModel # @return [Float] required :violence_graphic, Float, api_name: :"violence/graphic" - # @!parse - # # A list of the categories along with their scores as predicted by model. - # # - # # @param harassment [Float] - # # @param harassment_threatening [Float] - # # @param hate [Float] - # # @param hate_threatening [Float] - # # @param illicit [Float] - # # @param illicit_violent [Float] - # # @param self_harm [Float] - # # @param self_harm_instructions [Float] - # # @param self_harm_intent [Float] - # # @param sexual [Float] - # # @param sexual_minors [Float] - # # @param violence [Float] - # # @param violence_graphic [Float] - # # - # def initialize( - # harassment:, - # harassment_threatening:, - # hate:, - # hate_threatening:, - # illicit:, - # illicit_violent:, - # self_harm:, - # self_harm_instructions:, - # self_harm_intent:, - # sexual:, - # sexual_minors:, - # violence:, - # violence_graphic:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # A list of the categories along with their scores as predicted by model. + # + # @param harassment [Float] + # @param harassment_threatening [Float] + # @param hate [Float] + # @param hate_threatening [Float] + # @param illicit [Float] + # @param illicit_violent [Float] + # @param self_harm [Float] + # @param self_harm_instructions [Float] + # @param self_harm_intent [Float] + # @param sexual [Float] + # @param sexual_minors [Float] + # @param violence [Float] + # @param violence_graphic [Float] end end end diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 41230634..6bc57485 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -28,14 +28,10 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String, Symbol, OpenAI::Models::ModerationModel] # attr_writer :model - # @!parse - # # @param input [String, Array, Array] - # # @param model [String, Symbol, OpenAI::Models::ModerationModel] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(input:, model: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model: nil, request_options: {}) + # @param input [String, Array, Array] + # @param model [String, Symbol, OpenAI::Models::ModerationModel] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. @@ -51,9 +47,8 @@ module Input # An array of multi-modal inputs to the moderation model. variant -> { OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray } - # @!parse - # # @return [Array(String, Array, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -75,9 +70,8 @@ module Model # available models [here](https://platform.openai.com/docs/models#moderation). variant enum: -> { OpenAI::Models::ModerationModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] end end end diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 922bca1e..45575319 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -22,16 +22,12 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @return [Array] required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Moderation] } - # @!parse - # # Represents if a given text input is potentially harmful. - # # - # # @param id [String] - # # @param model [String] - # # @param results [Array] - # # - # def initialize(id:, model:, results:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, model:, results:) + # Represents if a given text input is potentially harmful. + # + # @param id [String] + # @param model [String] + # @param results [Array] end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index 25b0835a..f9fbd274 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -15,15 +15,11 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!parse - # # An object describing an image to classify. - # # - # # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] - # # @param type [Symbol, :image_url] - # # - # def initialize(image_url:, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_url:, type: :image_url) + # An object describing an image to classify. + # + # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] + # @param type [Symbol, :image_url] # @see OpenAI::Models::ModerationImageURLInput#image_url class ImageURL < OpenAI::Internal::Type::BaseModel @@ -33,14 +29,10 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # Contains either an image URL or a data URL for a base64 encoded image. - # # - # # @param url [String] - # # - # def initialize(url:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url:) + # Contains either an image URL or a data URL for a base64 encoded image. + # + # @param url [String] end end end diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index fa606daa..02d78035 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -10,11 +10,8 @@ module ModerationModel TEXT_MODERATION_LATEST = :"text-moderation-latest" TEXT_MODERATION_STABLE = :"text-moderation-stable" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index ca2979e7..32f5923b 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -14,9 +14,8 @@ module ModerationMultiModalInput # An object describing text to classify. variant :text, -> { OpenAI::Models::ModerationTextInput } - # @!parse - # # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] end end end diff --git a/lib/openai/models/moderation_text_input.rb b/lib/openai/models/moderation_text_input.rb index ec5fe7cb..2feaf23d 100644 --- a/lib/openai/models/moderation_text_input.rb +++ b/lib/openai/models/moderation_text_input.rb @@ -15,15 +15,11 @@ class ModerationTextInput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # An object describing text to classify. - # # - # # @param text [String] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # An object describing text to classify. + # + # @param text [String] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/other_file_chunking_strategy_object.rb b/lib/openai/models/other_file_chunking_strategy_object.rb index acb54ef8..862ae35e 100644 --- a/lib/openai/models/other_file_chunking_strategy_object.rb +++ b/lib/openai/models/other_file_chunking_strategy_object.rb @@ -9,16 +9,12 @@ class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :other] required :type, const: :other - # @!parse - # # This is returned when the chunking strategy is unknown. Typically, this is - # # because the file was indexed before the `chunking_strategy` concept was - # # introduced in the API. - # # - # # @param type [Symbol, :other] - # # - # def initialize(type: :other, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :other) + # This is returned when the chunking strategy is unknown. Typically, this is + # because the file was indexed before the `chunking_strategy` concept was + # introduced in the API. + # + # @param type [Symbol, :other] end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index a5fa2bc4..5aa982ea 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -32,19 +32,15 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Reasoning::Summary, nil] optional :summary, enum: -> { OpenAI::Models::Reasoning::Summary }, nil?: true - # @!parse - # # **o-series models only** - # # - # # Configuration options for - # # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - # # - # # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] - # # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] - # # - # def initialize(effort: nil, generate_summary: nil, summary: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(effort: nil, generate_summary: nil, summary: nil) + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # + # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] + # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] # @deprecated # @@ -62,11 +58,8 @@ module GenerateSummary CONCISE = :concise DETAILED = :detailed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # A summary of the reasoning performed by the model. This can be useful for @@ -81,11 +74,8 @@ module Summary CONCISE = :concise DETAILED = :detailed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index a7bb035e..f9990508 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -15,11 +15,8 @@ module ReasoningEffort MEDIUM = :medium HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/response_format_json_object.rb b/lib/openai/models/response_format_json_object.rb index 611237a3..52fc23ae 100644 --- a/lib/openai/models/response_format_json_object.rb +++ b/lib/openai/models/response_format_json_object.rb @@ -9,16 +9,12 @@ class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :json_object] required :type, const: :json_object - # @!parse - # # JSON object response format. An older method of generating JSON responses. Using - # # `json_schema` is recommended for models that support it. Note that the model - # # will not generate JSON without a system or user message instructing it to do so. - # # - # # @param type [Symbol, :json_object] - # # - # def initialize(type: :json_object, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :json_object) + # JSON object response format. An older method of generating JSON responses. Using + # `json_schema` is recommended for models that support it. Note that the model + # will not generate JSON without a system or user message instructing it to do so. + # + # @param type [Symbol, :json_object] end end end diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 56f2e3b1..0e9e47fb 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -15,17 +15,13 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @return [Symbol, :json_schema] required :type, const: :json_schema - # @!parse - # # JSON Schema response format. Used to generate structured JSON responses. Learn - # # more about - # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - # # - # # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] - # # @param type [Symbol, :json_schema] - # # - # def initialize(json_schema:, type: :json_schema, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(json_schema:, type: :json_schema) + # JSON Schema response format. Used to generate structured JSON responses. Learn + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # + # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] + # @param type [Symbol, :json_schema] # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::Internal::Type::BaseModel @@ -68,17 +64,13 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true - # @!parse - # # Structured Outputs configuration options, including a JSON Schema. - # # - # # @param name [String] - # # @param description [String] - # # @param schema [Hash{Symbol=>Object}] - # # @param strict [Boolean, nil] - # # - # def initialize(name:, description: nil, schema: nil, strict: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, description: nil, schema: nil, strict: nil) + # Structured Outputs configuration options, including a JSON Schema. + # + # @param name [String] + # @param description [String] + # @param schema [Hash{Symbol=>Object}] + # @param strict [Boolean, nil] end end end diff --git a/lib/openai/models/response_format_text.rb b/lib/openai/models/response_format_text.rb index 609679d6..8101bcca 100644 --- a/lib/openai/models/response_format_text.rb +++ b/lib/openai/models/response_format_text.rb @@ -9,14 +9,10 @@ class ResponseFormatText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # Default response format. Used to generate text responses. - # # - # # @param type [Symbol, :text] - # # - # def initialize(type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :text) + # Default response format. Used to generate text responses. + # + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 3af1eabe..41a7499d 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -28,18 +28,14 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :computer_use_preview] required :type, const: :computer_use_preview - # @!parse - # # A tool that controls a virtual computer. Learn more about the - # # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). - # # - # # @param display_height [Float] - # # @param display_width [Float] - # # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] - # # @param type [Symbol, :computer_use_preview] - # # - # def initialize(display_height:, display_width:, environment:, type: :computer_use_preview, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(display_height:, display_width:, environment:, type: :computer_use_preview) + # A tool that controls a virtual computer. Learn more about the + # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). + # + # @param display_height [Float] + # @param display_width [Float] + # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] + # @param type [Symbol, :computer_use_preview] # The type of computer environment to control. # @@ -52,11 +48,8 @@ module Environment UBUNTU = :ubuntu BROWSER = :browser - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 68cbd033..9814987c 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -28,20 +28,16 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] # attr_writer :type - # @!parse - # # A message input to the model with a role indicating instruction following - # # hierarchy. Instructions given with the `developer` or `system` role take - # # precedence over instructions given with the `user` role. Messages with the - # # `assistant` role are presumed to have been generated by the model in previous - # # interactions. - # # - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] - # # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] - # # - # def initialize(content:, role:, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] + # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. @@ -57,9 +53,8 @@ module Content # types. variant -> { OpenAI::Models::Responses::ResponseInputMessageContentList } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -74,11 +69,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the message input. Always `message`. @@ -89,11 +81,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 8fd1295e..8065a25a 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -47,20 +47,16 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions] # attr_writer :ranking_options - # @!parse - # # A tool that searches for relevant content from uploaded files. Learn more about - # # the - # # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - # # - # # @param vector_store_ids [Array] - # # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # # @param max_num_results [Integer] - # # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] - # # @param type [Symbol, :file_search] - # # - # def initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) + # A tool that searches for relevant content from uploaded files. Learn more about + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # + # @param vector_store_ids [Array] + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] + # @param type [Symbol, :file_search] # A filter to apply based on file attributes. # @@ -74,9 +70,8 @@ module Filters # Combine multiple filters using `and` or `or`. variant -> { OpenAI::Models::CompoundFilter } - # @!parse - # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end # @see OpenAI::Models::Responses::FileSearchTool#ranking_options @@ -103,15 +98,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :score_threshold - # @!parse - # # Ranking options for search. - # # - # # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] - # # @param score_threshold [Float] - # # - # def initialize(ranker: nil, score_threshold: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranker: nil, score_threshold: nil) + # Ranking options for search. + # + # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] + # @param score_threshold [Float] # The ranker to use for the file search. # @@ -122,11 +113,8 @@ module Ranker AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index 8b2433dc..d0cf09ab 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -35,20 +35,16 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :description, String, nil?: true - # @!parse - # # Defines a function in your own code the model can choose to call. Learn more - # # about - # # [function calling](https://platform.openai.com/docs/guides/function-calling). - # # - # # @param name [String] - # # @param parameters [Hash{Symbol=>Object}] - # # @param strict [Boolean] - # # @param description [String, nil] - # # @param type [Symbol, :function] - # # - # def initialize(name:, parameters:, strict:, description: nil, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, parameters:, strict:, description: nil, type: :function) + # Defines a function in your own code the model can choose to call. Learn more + # about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @param name [String] + # @param parameters [Hash{Symbol=>Object}] + # @param strict [Boolean] + # @param description [String, nil] + # @param type [Symbol, :function] end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index 7a4d136a..eabfd424 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -65,17 +65,13 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param include [Array] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param include [Array] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The order to return the input items in. Default is `asc`. # @@ -87,11 +83,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 0ed05b08..48793f7b 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -241,61 +241,30 @@ class Response < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param id [String] - # # @param created_at [Float] - # # @param error [OpenAI::Models::Responses::ResponseError, nil] - # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # # @param output [Array] - # # @param parallel_tool_calls [Boolean] - # # @param temperature [Float, nil] - # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param max_output_tokens [Integer, nil] - # # @param previous_response_id [String, nil] - # # @param reasoning [OpenAI::Models::Reasoning, nil] - # # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] - # # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] - # # @param usage [OpenAI::Models::Responses::ResponseUsage] - # # @param user [String] - # # @param object [Symbol, :response] - # # - # def initialize( - # id:, - # created_at:, - # error:, - # incomplete_details:, - # instructions:, - # metadata:, - # model:, - # output:, - # parallel_tool_calls:, - # temperature:, - # tool_choice:, - # tools:, - # top_p:, - # max_output_tokens: nil, - # previous_response_id: nil, - # reasoning: nil, - # service_tier: nil, - # status: nil, - # text: nil, - # truncation: nil, - # usage: nil, - # user: nil, - # object: :response, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @param id [String] + # @param created_at [Float] + # @param error [OpenAI::Models::Responses::ResponseError, nil] + # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] + # @param output [Array] + # @param parallel_tool_calls [Boolean] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @param tools [Array] + # @param top_p [Float, nil] + # @param max_output_tokens [Integer, nil] + # @param previous_response_id [String, nil] + # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] + # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] + # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] + # @param usage [OpenAI::Models::Responses::ResponseUsage] + # @param user [String] + # @param object [Symbol, :response] # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel @@ -309,14 +278,10 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] # attr_writer :reason - # @!parse - # # Details about why the response is incomplete. - # # - # # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] - # # - # def initialize(reason: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reason: nil) + # Details about why the response is incomplete. + # + # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] # The reason why the response is incomplete. # @@ -327,11 +292,8 @@ module Reason MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -360,9 +322,8 @@ module ToolChoice # Use this option to force the model to call a specific function. variant -> { OpenAI::Models::Responses::ToolChoiceFunction } - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -391,11 +352,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The truncation strategy to use for the model response. @@ -413,11 +371,8 @@ module Truncation AUTO = :auto DISABLED = :disabled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index 72fd5781..45ede8ad 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -16,15 +16,11 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.delta"] required :type, const: :"response.audio.delta" - # @!parse - # # Emitted when there is a partial audio response. - # # - # # @param delta [String] - # # @param type [Symbol, :"response.audio.delta"] - # # - # def initialize(delta:, type: :"response.audio.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, type: :"response.audio.delta") + # Emitted when there is a partial audio response. + # + # @param delta [String] + # @param type [Symbol, :"response.audio.delta"] end end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index 6dd92e68..359f11b3 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -10,14 +10,10 @@ class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.done"] required :type, const: :"response.audio.done" - # @!parse - # # Emitted when the audio response is complete. - # # - # # @param type [Symbol, :"response.audio.done"] - # # - # def initialize(type: :"response.audio.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :"response.audio.done") + # Emitted when the audio response is complete. + # + # @param type [Symbol, :"response.audio.done"] end end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 062c5a76..48f70a9d 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -16,15 +16,11 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.transcript.delta"] required :type, const: :"response.audio.transcript.delta" - # @!parse - # # Emitted when there is a partial transcript of audio. - # # - # # @param delta [String] - # # @param type [Symbol, :"response.audio.transcript.delta"] - # # - # def initialize(delta:, type: :"response.audio.transcript.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, type: :"response.audio.transcript.delta") + # Emitted when there is a partial transcript of audio. + # + # @param delta [String] + # @param type [Symbol, :"response.audio.transcript.delta"] end end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 98399c6e..5abf997b 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -10,14 +10,10 @@ class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.transcript.done"] required :type, const: :"response.audio.transcript.done" - # @!parse - # # Emitted when the full audio transcript is completed. - # # - # # @param type [Symbol, :"response.audio.transcript.done"] - # # - # def initialize(type: :"response.audio.transcript.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :"response.audio.transcript.done") + # Emitted when the full audio transcript is completed. + # + # @param type [Symbol, :"response.audio.transcript.done"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 2fa390fe..934cb5f9 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @return [Symbol, :"response.code_interpreter_call.code.delta"] required :type, const: :"response.code_interpreter_call.code.delta" - # @!parse - # # Emitted when a partial code snippet is added by the code interpreter. - # # - # # @param delta [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.code.delta"] - # # - # def initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") + # Emitted when a partial code snippet is added by the code interpreter. + # + # @param delta [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.code.delta"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index caa291b9..8b10fde8 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @return [Symbol, :"response.code_interpreter_call.code.done"] required :type, const: :"response.code_interpreter_call.code.done" - # @!parse - # # Emitted when code snippet output is finalized by the code interpreter. - # # - # # @param code [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.code.done"] - # # - # def initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done") + # Emitted when code snippet output is finalized by the code interpreter. + # + # @param code [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.code.done"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index 0e13a5e9..c0e507cd 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @return [Symbol, :"response.code_interpreter_call.completed"] required :type, const: :"response.code_interpreter_call.completed" - # @!parse - # # Emitted when the code interpreter call is completed. - # # - # # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.completed"] - # # - # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") + # Emitted when the code interpreter call is completed. + # + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.completed"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 66aa4fce..3277ceef 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @return [Symbol, :"response.code_interpreter_call.in_progress"] required :type, const: :"response.code_interpreter_call.in_progress" - # @!parse - # # Emitted when a code interpreter call is in progress. - # # - # # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.in_progress"] - # # - # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") + # Emitted when a code interpreter call is in progress. + # + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.in_progress"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 7a87f2c2..3a361629 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @return [Symbol, :"response.code_interpreter_call.interpreting"] required :type, const: :"response.code_interpreter_call.interpreting" - # @!parse - # # Emitted when the code interpreter is actively interpreting the code snippet. - # # - # # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.interpreting"] - # # - # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") + # Emitted when the code interpreter is actively interpreting the code snippet. + # + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.interpreting"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 9d7260d9..cbab454f 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -35,18 +35,14 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter_call] required :type, const: :code_interpreter_call - # @!parse - # # A tool call to run code. - # # - # # @param id [String] - # # @param code [String] - # # @param results [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] - # # @param type [Symbol, :code_interpreter_call] - # # - # def initialize(id:, code:, results:, status:, type: :code_interpreter_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, results:, status:, type: :code_interpreter_call) + # A tool call to run code. + # + # @param id [String] + # @param code [String] + # @param results [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] + # @param type [Symbol, :code_interpreter_call] # The output of a code interpreter tool call that is text. module Result @@ -73,15 +69,11 @@ class Logs < OpenAI::Internal::Type::BaseModel # @return [Symbol, :logs] required :type, const: :logs - # @!parse - # # The output of a code interpreter tool call that is text. - # # - # # @param logs [String] - # # @param type [Symbol, :logs] - # # - # def initialize(logs:, type: :logs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(logs:, type: :logs) + # The output of a code interpreter tool call that is text. + # + # @param logs [String] + # @param type [Symbol, :logs] end class Files < OpenAI::Internal::Type::BaseModel @@ -97,15 +89,11 @@ class Files < OpenAI::Internal::Type::BaseModel # @return [Symbol, :files] required :type, const: :files - # @!parse - # # The output of a code interpreter tool call that is a file. - # # - # # @param files [Array] - # # @param type [Symbol, :files] - # # - # def initialize(files:, type: :files, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(files:, type: :files) + # The output of a code interpreter tool call that is a file. + # + # @param files [Array] + # @param type [Symbol, :files] class File < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -120,19 +108,14 @@ class File < OpenAI::Internal::Type::BaseModel # @return [String] required :mime_type, String - # @!parse - # # @param file_id [String] - # # @param mime_type [String] - # # - # def initialize(file_id:, mime_type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, mime_type:) + # @param file_id [String] + # @param mime_type [String] end end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] end # The status of the code interpreter tool call. @@ -145,11 +128,8 @@ module Status INTERPRETING = :interpreting COMPLETED = :completed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index 883a7de9..509663df 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -16,15 +16,11 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.completed"] required :type, const: :"response.completed" - # @!parse - # # Emitted when the model response is complete. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.completed"] - # # - # def initialize(response:, type: :"response.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.completed") + # Emitted when the model response is complete. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.completed"] end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index c8d6e68a..b84db1e5 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -42,21 +42,17 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] required :type, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Type } - # @!parse - # # A tool call to a computer use tool. See the - # # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) - # # for more information. - # # - # # @param id [String] - # # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - # # @param call_id [String] - # # @param pending_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] - # # - # def initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:) + # A tool call to a computer use tool. See the + # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) + # for more information. + # + # @param id [String] + # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] + # @param call_id [String] + # @param pending_safety_checks [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] + # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] # A click action. # @@ -120,17 +116,13 @@ class Click < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A click action. - # # - # # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :click] - # # - # def initialize(button:, x:, y_:, type: :click, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(button:, x:, y_:, type: :click) + # A click action. + # + # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :click] # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -145,11 +137,8 @@ module Button BACK = :back FORWARD = :forward - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -173,16 +162,12 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A double click action. - # # - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :double_click] - # # - # def initialize(x:, y_:, type: :double_click, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(x:, y_:, type: :double_click) + # A double click action. + # + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :double_click] end class Drag < OpenAI::Internal::Type::BaseModel @@ -208,15 +193,11 @@ class Drag < OpenAI::Internal::Type::BaseModel # @return [Symbol, :drag] required :type, const: :drag - # @!parse - # # A drag action. - # # - # # @param path [Array] - # # @param type [Symbol, :drag] - # # - # def initialize(path:, type: :drag, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(path:, type: :drag) + # A drag action. + # + # @param path [Array] + # @param type [Symbol, :drag] class Path < OpenAI::Internal::Type::BaseModel # @!attribute x @@ -231,15 +212,11 @@ class Path < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A series of x/y coordinate pairs in the drag path. - # # - # # @param x [Integer] - # # @param y_ [Integer] - # # - # def initialize(x:, y_:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(x:, y_:) + # A series of x/y coordinate pairs in the drag path. + # + # @param x [Integer] + # @param y_ [Integer] end end @@ -258,15 +235,11 @@ class Keypress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :keypress] required :type, const: :keypress - # @!parse - # # A collection of keypresses the model would like to perform. - # # - # # @param keys [Array] - # # @param type [Symbol, :keypress] - # # - # def initialize(keys:, type: :keypress, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(keys:, type: :keypress) + # A collection of keypresses the model would like to perform. + # + # @param keys [Array] + # @param type [Symbol, :keypress] end class Move < OpenAI::Internal::Type::BaseModel @@ -289,16 +262,12 @@ class Move < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A mouse move action. - # # - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :move] - # # - # def initialize(x:, y_:, type: :move, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(x:, y_:, type: :move) + # A mouse move action. + # + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :move] end class Screenshot < OpenAI::Internal::Type::BaseModel @@ -309,14 +278,10 @@ class Screenshot < OpenAI::Internal::Type::BaseModel # @return [Symbol, :screenshot] required :type, const: :screenshot - # @!parse - # # A screenshot action. - # # - # # @param type [Symbol, :screenshot] - # # - # def initialize(type: :screenshot, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :screenshot) + # A screenshot action. + # + # @param type [Symbol, :screenshot] end class Scroll < OpenAI::Internal::Type::BaseModel @@ -351,18 +316,14 @@ class Scroll < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A scroll action. - # # - # # @param scroll_x [Integer] - # # @param scroll_y [Integer] - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :scroll] - # # - # def initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll) + # A scroll action. + # + # @param scroll_x [Integer] + # @param scroll_y [Integer] + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :scroll] end class Type < OpenAI::Internal::Type::BaseModel @@ -379,15 +340,11 @@ class Type < OpenAI::Internal::Type::BaseModel # @return [Symbol, :type] required :type, const: :type - # @!parse - # # An action to type in text. - # # - # # @param text [String] - # # @param type [Symbol, :type] - # # - # def initialize(text:, type: :type, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :type) + # An action to type in text. + # + # @param text [String] + # @param type [Symbol, :type] end class Wait < OpenAI::Internal::Type::BaseModel @@ -398,19 +355,14 @@ class Wait < OpenAI::Internal::Type::BaseModel # @return [Symbol, :wait] required :type, const: :wait - # @!parse - # # A wait action. - # # - # # @param type [Symbol, :wait] - # # - # def initialize(type: :wait, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :wait) + # A wait action. + # + # @param type [Symbol, :wait] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel @@ -432,16 +384,12 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, message:) + # A pending safety check for the computer call. + # + # @param id [String] + # @param code [String] + # @param message [String] end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -455,11 +403,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the computer call. Always `computer_call`. @@ -470,11 +415,8 @@ module Type COMPUTER_CALL = :computer_call - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 2cc50149..2a10286a 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -51,17 +51,13 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] # attr_writer :status - # @!parse - # # @param id [String] - # # @param call_id [String] - # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - # # @param acknowledged_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] - # # @param type [Symbol, :computer_call_output] - # # - # def initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + # @param id [String] + # @param call_id [String] + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + # @param acknowledged_safety_checks [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] + # @param type [Symbol, :computer_call_output] class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -82,16 +78,12 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, message:) + # A pending safety check for the computer call. + # + # @param id [String] + # @param code [String] + # @param message [String] end # The status of the message input. One of `in_progress`, `completed`, or @@ -105,11 +97,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 9cfc543a..e65f4fc5 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -31,16 +31,12 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # # @return [String] # attr_writer :image_url - # @!parse - # # A computer screenshot image used with the computer use tool. - # # - # # @param file_id [String] - # # @param image_url [String] - # # @param type [Symbol, :computer_screenshot] - # # - # def initialize(file_id: nil, image_url: nil, type: :computer_screenshot, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) + # A computer screenshot image used with the computer use tool. + # + # @param file_id [String] + # @param image_url [String] + # @param type [Symbol, :computer_screenshot] end end end diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 3c306ddb..ed9ff454 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -22,9 +22,8 @@ module ResponseContent # A refusal from the model. variant -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 2d78d1ff..8399e487 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -34,18 +34,14 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.content_part.added"] required :type, const: :"response.content_part.added" - # @!parse - # # Emitted when a new content part is added. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - # # @param type [Symbol, :"response.content_part.added"] - # # - # def initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") + # Emitted when a new content part is added. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + # @param type [Symbol, :"response.content_part.added"] # The content part that was added. # @@ -61,9 +57,8 @@ module Part # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index d28334f9..945ef949 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -34,18 +34,14 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.content_part.done"] required :type, const: :"response.content_part.done" - # @!parse - # # Emitted when a content part is done. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - # # @param type [Symbol, :"response.content_part.done"] - # # - # def initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") + # Emitted when a content part is done. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + # @param type [Symbol, :"response.content_part.done"] # The content part that is done. # @@ -61,9 +57,8 @@ module Part # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 1969ec3a..3eeb57fc 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -225,53 +225,26 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param input [String, Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # # @param include [Array, nil] - # # @param instructions [String, nil] - # # @param max_output_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param parallel_tool_calls [Boolean, nil] - # # @param previous_response_id [String, nil] - # # @param reasoning [OpenAI::Models::Reasoning, nil] - # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] - # # @param store [Boolean, nil] - # # @param temperature [Float, nil] - # # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # input:, - # model:, - # include: nil, - # instructions: nil, - # max_output_tokens: nil, - # metadata: nil, - # parallel_tool_calls: nil, - # previous_response_id: nil, - # reasoning: nil, - # service_tier: nil, - # store: nil, - # temperature: nil, - # text: nil, - # tool_choice: nil, - # tools: nil, - # top_p: nil, - # truncation: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @param input [String, Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] + # @param include [Array, nil] + # @param instructions [String, nil] + # @param max_output_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param parallel_tool_calls [Boolean, nil] + # @param previous_response_id [String, nil] + # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] + # @param store [Boolean, nil] + # @param temperature [Float, nil] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @param tools [Array] + # @param top_p [Float, nil] + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Text, image, or file inputs to the model, used to generate a response. # @@ -293,9 +266,8 @@ module Input # different content types. variant -> { OpenAI::Models::Responses::ResponseInput } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -322,11 +294,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # How the model should select which tool (or tools) to use when generating a @@ -352,9 +321,8 @@ module ToolChoice # Use this option to force the model to call a specific function. variant -> { OpenAI::Models::Responses::ToolChoiceFunction } - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # The truncation strategy to use for the model response. @@ -370,11 +338,8 @@ module Truncation AUTO = :auto DISABLED = :disabled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index 2eb916c1..ad5cf6d0 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -16,15 +16,11 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.created"] required :type, const: :"response.created" - # @!parse - # # An event that is emitted when a response is created. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.created"] - # # - # def initialize(response:, type: :"response.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.created") + # An event that is emitted when a response is created. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.created"] end end end diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index 516661d0..e3d6735f 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -9,12 +9,8 @@ class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index a42f876f..3727a834 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -16,15 +16,11 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # An error object returned when the model fails to generate a Response. - # # - # # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # An error object returned when the model fails to generate a Response. + # + # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] + # @param message [String] # The error code for the response. # @@ -51,11 +47,8 @@ module Code FAILED_TO_DOWNLOAD_IMAGE = :failed_to_download_image IMAGE_FILE_NOT_FOUND = :image_file_not_found - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index 5558c8c0..d3bfa55e 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -28,17 +28,13 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :error] required :type, const: :error - # @!parse - # # Emitted when an error occurs. - # # - # # @param code [String, nil] - # # @param message [String] - # # @param param [String, nil] - # # @param type [Symbol, :error] - # # - # def initialize(code:, message:, param:, type: :error, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:, param:, type: :error) + # Emitted when an error occurs. + # + # @param code [String, nil] + # @param message [String] + # @param param [String, nil] + # @param type [Symbol, :error] end end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index aa99a457..cbed3cb7 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -16,15 +16,11 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.failed"] required :type, const: :"response.failed" - # @!parse - # # An event that is emitted when a response fails. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.failed"] - # # - # def initialize(response:, type: :"response.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.failed") + # An event that is emitted when a response fails. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.failed"] end end end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 458771a8..ffb7c68d 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -22,16 +22,12 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.file_search_call.completed"] required :type, const: :"response.file_search_call.completed" - # @!parse - # # Emitted when a file search call is completed (results found). - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.file_search_call.completed"] - # # - # def initialize(item_id:, output_index:, type: :"response.file_search_call.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.completed") + # Emitted when a file search call is completed (results found). + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.file_search_call.completed"] end end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 4671a7e6..72fafbc7 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -22,16 +22,12 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.file_search_call.in_progress"] required :type, const: :"response.file_search_call.in_progress" - # @!parse - # # Emitted when a file search call is initiated. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.file_search_call.in_progress"] - # # - # def initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress") + # Emitted when a file search call is initiated. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.file_search_call.in_progress"] end end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index 880e33e1..07c1186a 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -22,16 +22,12 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.file_search_call.searching"] required :type, const: :"response.file_search_call.searching" - # @!parse - # # Emitted when a file search is currently searching. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.file_search_call.searching"] - # # - # def initialize(item_id:, output_index:, type: :"response.file_search_call.searching", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.searching") + # Emitted when a file search is currently searching. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.file_search_call.searching"] end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 7048c7a4..43a865fd 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -37,20 +37,16 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result] }, nil?: true - # @!parse - # # The results of a file search tool call. See the - # # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) - # # for more information. - # # - # # @param id [String] - # # @param queries [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] - # # @param results [Array, nil] - # # @param type [Symbol, :file_search_call] - # # - # def initialize(id:, queries:, status:, results: nil, type: :file_search_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, queries:, status:, results: nil, type: :file_search_call) + # The results of a file search tool call. See the + # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) + # for more information. + # + # @param id [String] + # @param queries [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] + # @param results [Array, nil] + # @param type [Symbol, :file_search_call] # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, @@ -65,11 +61,8 @@ module Status INCOMPLETE = :incomplete FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Result < OpenAI::Internal::Type::BaseModel @@ -125,16 +118,12 @@ class Result < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :text - # @!parse - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param file_id [String] - # # @param filename [String] - # # @param score [Float] - # # @param text [String] - # # - # def initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param file_id [String] + # @param filename [String] + # @param score [Float] + # @param text [String] module Attribute extend OpenAI::Internal::Type::Union @@ -145,9 +134,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index bdb5e64f..6b73450e 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -34,9 +34,8 @@ module ResponseFormatTextConfig # to do so. variant :json_object, -> { OpenAI::Models::ResponseFormatJSONObject } - # @!parse - # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] end end end diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index 3cbc68bd..c6271a6a 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -45,20 +45,16 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true - # @!parse - # # JSON Schema response format. Used to generate structured JSON responses. Learn - # # more about - # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - # # - # # @param name [String] - # # @param schema [Hash{Symbol=>Object}] - # # @param description [String] - # # @param strict [Boolean, nil] - # # @param type [Symbol, :json_schema] - # # - # def initialize(name:, schema:, description: nil, strict: nil, type: :json_schema, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, schema:, description: nil, strict: nil, type: :json_schema) + # JSON Schema response format. Used to generate structured JSON responses. Learn + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # + # @param name [String] + # @param schema [Hash{Symbol=>Object}] + # @param description [String] + # @param strict [Boolean, nil] + # @param type [Symbol, :json_schema] end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index 6005ba84..abd84e9c 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -28,17 +28,13 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # @return [Symbol, :"response.function_call_arguments.delta"] required :type, const: :"response.function_call_arguments.delta" - # @!parse - # # Emitted when there is a partial function-call arguments delta. - # # - # # @param delta [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.function_call_arguments.delta"] - # # - # def initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") + # Emitted when there is a partial function-call arguments delta. + # + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.function_call_arguments.delta"] end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_done_event.rb b/lib/openai/models/responses/response_function_call_arguments_done_event.rb index a9280f46..ba402026 100644 --- a/lib/openai/models/responses/response_function_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_done_event.rb @@ -27,17 +27,13 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.function_call_arguments.done"] required :type, const: :"response.function_call_arguments.done" - # @!parse - # # Emitted when function-call arguments are finalized. - # # - # # @param arguments [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.function_call_arguments.done"] - # # - # def initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done") + # Emitted when function-call arguments are finalized. + # + # @param arguments [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.function_call_arguments.done"] end end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 5bf53133..25210a43 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -49,21 +49,17 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] # attr_writer :status - # @!parse - # # A tool call to run a function. See the - # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # # for more information. - # # - # # @param arguments [String] - # # @param call_id [String] - # # @param name [String] - # # @param id [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] - # # @param type [Symbol, :function_call] - # # - # def initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. + # + # @param arguments [String] + # @param call_id [String] + # @param name [String] + # @param id [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] + # @param type [Symbol, :function_call] # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -76,11 +72,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index 17e2ceff..2244965f 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -10,16 +10,12 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # @return [String] required :id, String - # @!parse - # # A tool call to run a function. See the - # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # # for more information. - # # - # # @param id [String] - # # - # def initialize(id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:) + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. + # + # @param id [String] end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 2123bd97..6eb9b8a9 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -39,16 +39,12 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] # attr_writer :status - # @!parse - # # @param id [String] - # # @param call_id [String] - # # @param output [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] - # # @param type [Symbol, :function_call_output] - # # - # def initialize(id:, call_id:, output:, status: nil, type: :function_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) + # @param id [String] + # @param call_id [String] + # @param output [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] + # @param type [Symbol, :function_call_output] # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -61,11 +57,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index e6d9a2e8..3529a019 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -22,18 +22,14 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :web_search_call] required :type, const: :web_search_call - # @!parse - # # The results of a web search tool call. See the - # # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for - # # more information. - # # - # # @param id [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] - # # @param type [Symbol, :web_search_call] - # # - # def initialize(id:, status:, type: :web_search_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, status:, type: :web_search_call) + # The results of a web search tool call. See the + # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for + # more information. + # + # @param id [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] + # @param type [Symbol, :web_search_call] # The status of the web search tool call. # @@ -46,11 +42,8 @@ module Status COMPLETED = :completed FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index a26c5659..0259d443 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -16,15 +16,11 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.in_progress"] required :type, const: :"response.in_progress" - # @!parse - # # Emitted when the response is in progress. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.in_progress"] - # # - # def initialize(response:, type: :"response.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.in_progress") + # Emitted when the response is in progress. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.in_progress"] end end end diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 71a49423..2f90f277 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -18,11 +18,8 @@ module ResponseIncludable MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index 08fb757a..5536418d 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -16,15 +16,11 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.incomplete"] required :type, const: :"response.incomplete" - # @!parse - # # An event that is emitted when a response finishes as incomplete. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.incomplete"] - # # - # def initialize(response:, type: :"response.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.incomplete") + # An event that is emitted when a response finishes as incomplete. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.incomplete"] end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 90dcdee6..f03f775a 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -22,16 +22,12 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_audio] required :type, const: :input_audio - # @!parse - # # An audio input to the model. - # # - # # @param data [String] - # # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] - # # @param type [Symbol, :input_audio] - # # - # def initialize(data:, format_:, type: :input_audio, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, format_:, type: :input_audio) + # An audio input to the model. + # + # @param data [String] + # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] + # @param type [Symbol, :input_audio] # The format of the audio data. Currently supported formats are `mp3` and `wav`. # @@ -42,11 +38,8 @@ module Format MP3 = :mp3 WAV = :wav - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 901a5159..806d9c26 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -18,9 +18,8 @@ module ResponseInputContent # A file input to the model. variant :input_file, -> { OpenAI::Models::Responses::ResponseInputFile } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] end end end diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 0e2a3c73..4b186be4 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -40,17 +40,13 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :filename - # @!parse - # # A file input to the model. - # # - # # @param file_data [String] - # # @param file_id [String] - # # @param filename [String] - # # @param type [Symbol, :input_file] - # # - # def initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) + # A file input to the model. + # + # @param file_data [String] + # @param file_id [String] + # @param filename [String] + # @param type [Symbol, :input_file] end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index c508cf8a..6a07ce69 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -30,18 +30,14 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :image_url, String, nil?: true - # @!parse - # # An image input to the model. Learn about - # # [image inputs](https://platform.openai.com/docs/guides/vision). - # # - # # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] - # # @param file_id [String, nil] - # # @param image_url [String, nil] - # # @param type [Symbol, :input_image] - # # - # def initialize(detail:, file_id: nil, image_url: nil, type: :input_image, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(detail:, file_id: nil, image_url: nil, type: :input_image) + # An image input to the model. Learn about + # [image inputs](https://platform.openai.com/docs/guides/vision). + # + # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] + # @param file_id [String, nil] + # @param image_url [String, nil] + # @param type [Symbol, :input_image] # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. @@ -54,11 +50,8 @@ module Detail LOW = :low AUTO = :auto - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index db0e26d2..d70b58bc 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -93,19 +93,15 @@ class Message < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] # attr_writer :type - # @!parse - # # A message input to the model with a role indicating instruction following - # # hierarchy. Instructions given with the `developer` or `system` role take - # # precedence over instructions given with the `user` role. - # # - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] - # # - # def initialize(content:, role:, status: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, status: nil, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. + # + # @param content [Array] + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] # The role of the message input. One of `user`, `system`, or `developer`. # @@ -117,11 +113,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -135,11 +128,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the message input. Always set to `message`. @@ -150,11 +140,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -210,19 +197,15 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] # attr_writer :status - # @!parse - # # The output of a computer tool call. - # # - # # @param call_id [String] - # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - # # @param id [String] - # # @param acknowledged_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] - # # @param type [Symbol, :computer_call_output] - # # - # def initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + # The output of a computer tool call. + # + # @param call_id [String] + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + # @param id [String] + # @param acknowledged_safety_checks [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] + # @param type [Symbol, :computer_call_output] class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -243,16 +226,12 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, message:) + # A pending safety check for the computer call. + # + # @param id [String] + # @param code [String] + # @param message [String] end # The status of the message input. One of `in_progress`, `completed`, or @@ -266,11 +245,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -315,18 +291,14 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] # attr_writer :status - # @!parse - # # The output of a function tool call. - # # - # # @param call_id [String] - # # @param output [String] - # # @param id [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] - # # @param type [Symbol, :function_call_output] - # # - # def initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) + # The output of a function tool call. + # + # @param call_id [String] + # @param output [String] + # @param id [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] + # @param type [Symbol, :function_call_output] # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -339,11 +311,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -360,20 +329,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @return [Symbol, :item_reference] required :type, const: :item_reference - # @!parse - # # An internal identifier for an item to reference. - # # - # # @param id [String] - # # @param type [Symbol, :item_reference] - # # - # def initialize(id:, type: :item_reference, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, type: :item_reference) + # An internal identifier for an item to reference. + # + # @param id [String] + # @param type [Symbol, :item_reference] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index b270e74d..2c3f05d2 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -45,16 +45,12 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] # attr_writer :type - # @!parse - # # @param id [String] - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] - # # - # def initialize(id:, content:, role:, status: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, content:, role:, status: nil, type: nil) + # @param id [String] + # @param content [Array] + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] # The role of the message input. One of `user`, `system`, or `developer`. # @@ -66,11 +62,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -84,11 +77,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the message input. Always set to `message`. @@ -99,11 +89,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_input_text.rb b/lib/openai/models/responses/response_input_text.rb index d8ed6f2c..ad65b3f2 100644 --- a/lib/openai/models/responses/response_input_text.rb +++ b/lib/openai/models/responses/response_input_text.rb @@ -16,15 +16,11 @@ class ResponseInputText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_text] required :type, const: :input_text - # @!parse - # # A text input to the model. - # # - # # @param text [String] - # # @param type [Symbol, :input_text] - # # - # def initialize(text:, type: :input_text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :input_text) + # A text input to the model. + # + # @param text [String] + # @param type [Symbol, :input_text] end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index 3fe0074e..42dcb8be 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -34,9 +34,8 @@ module ResponseItem variant :function_call_output, -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index d5f234c7..2b6c1e85 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -34,18 +34,14 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @return [Symbol, :list] required :object, const: :list - # @!parse - # # A list of Response items. - # # - # # @param data [Array] - # # @param first_id [String] - # # @param has_more [Boolean] - # # @param last_id [String] - # # @param object [Symbol, :list] - # # - # def initialize(data:, first_id:, has_more:, last_id:, object: :list, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) + # A list of Response items. + # + # @param data [Array] + # @param first_id [String] + # @param has_more [Boolean] + # @param last_id [String] + # @param object [Symbol, :list] end end diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index d8fb8c61..55675501 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -22,16 +22,12 @@ class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # @return [Symbol, :output_audio] required :type, const: :output_audio - # @!parse - # # An audio output from the model. - # # - # # @param data [String] - # # @param transcript [String] - # # @param type [Symbol, :output_audio] - # # - # def initialize(data:, transcript:, type: :output_audio, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, transcript:, type: :output_audio) + # An audio output from the model. + # + # @param data [String] + # @param transcript [String] + # @param type [Symbol, :output_audio] end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 5751bb93..83a8c4db 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -32,9 +32,8 @@ module ResponseOutputItem # a response. variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 6dfbd4d9..2893bec8 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -22,16 +22,12 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_item.added"] required :type, const: :"response.output_item.added" - # @!parse - # # Emitted when a new output item is added. - # # - # # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_item.added"] - # # - # def initialize(item:, output_index:, type: :"response.output_item.added", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, output_index:, type: :"response.output_item.added") + # Emitted when a new output item is added. + # + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_item.added"] end end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 904c8eae..a8ff9471 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -22,16 +22,12 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_item.done"] required :type, const: :"response.output_item.done" - # @!parse - # # Emitted when an output item is marked done. - # # - # # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_item.done"] - # # - # def initialize(item:, output_index:, type: :"response.output_item.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, output_index:, type: :"response.output_item.done") + # Emitted when an output item is marked done. + # + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_item.done"] end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index b46f43d3..107a4798 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -36,18 +36,14 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :message] required :type, const: :message - # @!parse - # # An output message from the model. - # # - # # @param id [String] - # # @param content [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] - # # @param role [Symbol, :assistant] - # # @param type [Symbol, :message] - # # - # def initialize(id:, content:, status:, role: :assistant, type: :message, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, content:, status:, role: :assistant, type: :message) + # An output message from the model. + # + # @param id [String] + # @param content [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] + # @param role [Symbol, :assistant] + # @param type [Symbol, :message] # A text output from the model. module Content @@ -61,9 +57,8 @@ module Content # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end # The status of the message input. One of `in_progress`, `completed`, or @@ -77,11 +72,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_output_refusal.rb b/lib/openai/models/responses/response_output_refusal.rb index 0e050ce2..0c2ccce2 100644 --- a/lib/openai/models/responses/response_output_refusal.rb +++ b/lib/openai/models/responses/response_output_refusal.rb @@ -16,15 +16,11 @@ class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!parse - # # A refusal from the model. - # # - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(refusal:, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(refusal:, type: :refusal) + # A refusal from the model. + # + # @param refusal [String] + # @param type [Symbol, :refusal] end end end diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 9766a767..a857410f 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -23,16 +23,12 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :output_text] required :type, const: :output_text - # @!parse - # # A text output from the model. - # # - # # @param annotations [Array] - # # @param text [String] - # # @param type [Symbol, :output_text] - # # - # def initialize(annotations:, text:, type: :output_text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotations:, text:, type: :output_text) + # A text output from the model. + # + # @param annotations [Array] + # @param text [String] + # @param type [Symbol, :output_text] # A citation to a file. module Annotation @@ -68,16 +64,12 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!parse - # # A citation to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_citation] - # # - # def initialize(file_id:, index:, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_citation) + # A citation to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_citation] end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -111,18 +103,14 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # A citation for a web resource used to generate a model response. - # # - # # @param end_index [Integer] - # # @param start_index [Integer] - # # @param title [String] - # # @param url [String] - # # @param type [Symbol, :url_citation] - # # - # def initialize(end_index:, start_index:, title:, url:, type: :url_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # A citation for a web resource used to generate a model response. + # + # @param end_index [Integer] + # @param start_index [Integer] + # @param title [String] + # @param url [String] + # @param type [Symbol, :url_citation] end class FilePath < OpenAI::Internal::Type::BaseModel @@ -144,21 +132,16 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!parse - # # A path to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_path] - # # - # def initialize(file_id:, index:, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_path) + # A path to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_path] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 3ae31538..fb8d5db6 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -34,18 +34,14 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] # attr_writer :status - # @!parse - # # A description of the chain of thought used by a reasoning model while generating - # # a response. - # # - # # @param id [String] - # # @param summary [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] - # # @param type [Symbol, :reasoning] - # # - # def initialize(id:, summary:, status: nil, type: :reasoning, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, summary:, status: nil, type: :reasoning) + # A description of the chain of thought used by a reasoning model while generating + # a response. + # + # @param id [String] + # @param summary [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] + # @param type [Symbol, :reasoning] class Summary < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -60,13 +56,9 @@ class Summary < OpenAI::Internal::Type::BaseModel # @return [Symbol, :summary_text] required :type, const: :summary_text - # @!parse - # # @param text [String] - # # @param type [Symbol, :summary_text] - # # - # def initialize(text:, type: :summary_text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :summary_text) + # @param text [String] + # @param type [Symbol, :summary_text] end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -80,11 +72,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index 35247a7d..ea97e622 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -34,18 +34,14 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.refusal.delta"] required :type, const: :"response.refusal.delta" - # @!parse - # # Emitted when there is a partial refusal text. - # # - # # @param content_index [Integer] - # # @param delta [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.refusal.delta"] - # # - # def initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") + # Emitted when there is a partial refusal text. + # + # @param content_index [Integer] + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.refusal.delta"] end end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index 7f6cd16b..ee7b4b55 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -34,18 +34,14 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.refusal.done"] required :type, const: :"response.refusal.done" - # @!parse - # # Emitted when refusal text is finalized. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param refusal [String] - # # @param type [Symbol, :"response.refusal.done"] - # # - # def initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") + # Emitted when refusal text is finalized. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param refusal [String] + # @param type [Symbol, :"response.refusal.done"] end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 1253ccfa..1b64f738 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -21,13 +21,9 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :include - # @!parse - # # @param include [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(include: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(include: nil, request_options: {}) + # @param include [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index da96c2e8..eb628952 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -13,11 +13,8 @@ module ResponseStatus IN_PROGRESS = :in_progress INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index db86c410..d6c7fee7 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -121,9 +121,8 @@ module ResponseStreamEvent variant :"response.web_search_call.searching", -> { OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index bc38658a..28c8abee 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -41,29 +41,15 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.annotation.added"] required :type, const: :"response.output_text.annotation.added" - # @!parse - # # Emitted when a text annotation is added. - # # - # # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - # # @param annotation_index [Integer] - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_text.annotation.added"] - # # - # def initialize( - # annotation:, - # annotation_index:, - # content_index:, - # item_id:, - # output_index:, - # type: :"response.output_text.annotation.added", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text.annotation.added") + # Emitted when a text annotation is added. + # + # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + # @param annotation_index [Integer] + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_text.annotation.added"] # A citation to a file. # @@ -104,16 +90,12 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!parse - # # A citation to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_citation] - # # - # def initialize(file_id:, index:, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_citation) + # A citation to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_citation] end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -147,18 +129,14 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # A citation for a web resource used to generate a model response. - # # - # # @param end_index [Integer] - # # @param start_index [Integer] - # # @param title [String] - # # @param url [String] - # # @param type [Symbol, :url_citation] - # # - # def initialize(end_index:, start_index:, title:, url:, type: :url_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # A citation for a web resource used to generate a model response. + # + # @param end_index [Integer] + # @param start_index [Integer] + # @param title [String] + # @param url [String] + # @param type [Symbol, :url_citation] end class FilePath < OpenAI::Internal::Type::BaseModel @@ -180,21 +158,16 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!parse - # # A path to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_path] - # # - # def initialize(file_id:, index:, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_path) + # A path to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_path] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 7901b8fb..cd0bf58f 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -26,18 +26,14 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] # attr_writer :format_ - # @!parse - # # Configuration options for a text response from the model. Can be plain text or - # # structured JSON data. Learn more: - # # - # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - # # - # # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] - # # - # def initialize(format_: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(format_: nil) + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 8b6c4b75..9f7744b8 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -34,18 +34,14 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.delta"] required :type, const: :"response.output_text.delta" - # @!parse - # # Emitted when there is an additional text delta. - # # - # # @param content_index [Integer] - # # @param delta [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_text.delta"] - # # - # def initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") + # Emitted when there is an additional text delta. + # + # @param content_index [Integer] + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_text.delta"] end end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 45a3267a..cea42efc 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -34,18 +34,14 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.done"] required :type, const: :"response.output_text.done" - # @!parse - # # Emitted when text content is finalized. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param text [String] - # # @param type [Symbol, :"response.output_text.done"] - # # - # def initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") + # Emitted when text content is finalized. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param text [String] + # @param type [Symbol, :"response.output_text.done"] end end end diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 2a2ecd8f..f8a7799f 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -34,19 +34,15 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Represents token usage details including input tokens, output tokens, a - # # breakdown of output tokens, and the total tokens used. - # # - # # @param input_tokens [Integer] - # # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] - # # @param output_tokens [Integer] - # # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] - # # @param total_tokens [Integer] - # # - # def initialize(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:) + # Represents token usage details including input tokens, output tokens, a + # breakdown of output tokens, and the total tokens used. + # + # @param input_tokens [Integer] + # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] + # @param output_tokens [Integer] + # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] + # @param total_tokens [Integer] # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel @@ -57,14 +53,10 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @return [Integer] required :cached_tokens, Integer - # @!parse - # # A detailed breakdown of the input tokens. - # # - # # @param cached_tokens [Integer] - # # - # def initialize(cached_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:) + # A detailed breakdown of the input tokens. + # + # @param cached_tokens [Integer] end # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details @@ -75,14 +67,10 @@ class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @return [Integer] required :reasoning_tokens, Integer - # @!parse - # # A detailed breakdown of the output tokens. - # # - # # @param reasoning_tokens [Integer] - # # - # def initialize(reasoning_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reasoning_tokens:) + # A detailed breakdown of the output tokens. + # + # @param reasoning_tokens [Integer] end end end diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 70e0dc19..59cdab36 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -22,16 +22,12 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.web_search_call.completed"] required :type, const: :"response.web_search_call.completed" - # @!parse - # # Emitted when a web search call is completed. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.web_search_call.completed"] - # # - # def initialize(item_id:, output_index:, type: :"response.web_search_call.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") + # Emitted when a web search call is completed. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.web_search_call.completed"] end end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index fc8f006a..6820c819 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -22,16 +22,12 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.web_search_call.in_progress"] required :type, const: :"response.web_search_call.in_progress" - # @!parse - # # Emitted when a web search call is initiated. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.web_search_call.in_progress"] - # # - # def initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") + # Emitted when a web search call is initiated. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.web_search_call.in_progress"] end end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index 39d6ae9f..efa04758 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -22,16 +22,12 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.web_search_call.searching"] required :type, const: :"response.web_search_call.searching" - # @!parse - # # Emitted when a web search call is executing. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.web_search_call.searching"] - # # - # def initialize(item_id:, output_index:, type: :"response.web_search_call.searching", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") + # Emitted when a web search call is executing. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.web_search_call.searching"] end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index e9be0652..5d053a5e 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -27,9 +27,8 @@ module Tool # Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Models::Responses::WebSearchTool } - # @!parse - # # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_function.rb b/lib/openai/models/responses/tool_choice_function.rb index ca4d89b6..47c1d3ef 100644 --- a/lib/openai/models/responses/tool_choice_function.rb +++ b/lib/openai/models/responses/tool_choice_function.rb @@ -16,15 +16,11 @@ class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # Use this option to force the model to call a specific function. - # # - # # @param name [String] - # # @param type [Symbol, :function] - # # - # def initialize(name:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, type: :function) + # Use this option to force the model to call a specific function. + # + # @param name [String] + # @param type [Symbol, :function] end end end diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 789817e8..f43db682 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -18,11 +18,8 @@ module ToolChoiceOptions AUTO = :auto REQUIRED = :required - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index d26c027f..e51b376e 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -17,15 +17,11 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] required :type, enum: -> { OpenAI::Models::Responses::ToolChoiceTypes::Type } - # @!parse - # # Indicates that the model should use a built-in tool to generate a response. - # # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - # # - # # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] - # # - # def initialize(type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:) + # Indicates that the model should use a built-in tool to generate a response. + # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + # + # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -45,11 +41,8 @@ module Type COMPUTER_USE_PREVIEW = :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 90f84d39..3ed57a22 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -29,18 +29,14 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] optional :user_location, -> { OpenAI::Models::Responses::WebSearchTool::UserLocation }, nil?: true - # @!parse - # # This tool searches the web for relevant results to use in a response. Learn more - # # about the - # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). - # # - # # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] - # # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] - # # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] - # # - # def initialize(type:, search_context_size: nil, user_location: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, search_context_size: nil, user_location: nil) + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + # + # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] + # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] + # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] # The type of the web search tool. One of: # @@ -54,11 +50,8 @@ module Type WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # High level guidance for the amount of context window space to use for the @@ -72,11 +65,8 @@ module SearchContextSize MEDIUM = :medium HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Responses::WebSearchTool#user_location @@ -129,16 +119,12 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :timezone - # @!parse - # # @param city [String] - # # @param country [String] - # # @param region [String] - # # @param timezone [String] - # # @param type [Symbol, :approximate] - # # - # def initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) + # @param city [String] + # @param country [String] + # @param region [String] + # @param timezone [String] + # @param type [Symbol, :approximate] end end end diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 588d1722..5984103a 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -19,16 +19,12 @@ module ResponsesOnlyModel COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 9bff61c3..c8dc5106 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -18,13 +18,9 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index 1655679f..7da9cb50 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -14,13 +14,9 @@ class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::StaticFileChunkingStrategy] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::StaticFileChunkingStrategy] + # @param type [Symbol, :static] end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index f64fff68..b0d5a5b6 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -14,15 +14,11 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # Customize your own chunking strategy by setting chunk size and chunk overlap. - # # - # # @param static [OpenAI::Models::StaticFileChunkingStrategy] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # Customize your own chunking strategy by setting chunk size and chunk overlap. + # + # @param static [OpenAI::Models::StaticFileChunkingStrategy] + # @param type [Symbol, :static] end end end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 9151584f..53ae0a23 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -60,22 +60,18 @@ class Upload < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::FileObject, nil] optional :file, -> { OpenAI::Models::FileObject }, nil?: true - # @!parse - # # The Upload object can accept byte chunks in the form of Parts. - # # - # # @param id [String] - # # @param bytes [Integer] - # # @param created_at [Integer] - # # @param expires_at [Integer] - # # @param filename [String] - # # @param purpose [String] - # # @param status [Symbol, OpenAI::Models::Upload::Status] - # # @param file [OpenAI::Models::FileObject, nil] - # # @param object [Symbol, :upload] - # # - # def initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) + # The Upload object can accept byte chunks in the form of Parts. + # + # @param id [String] + # @param bytes [Integer] + # @param created_at [Integer] + # @param expires_at [Integer] + # @param filename [String] + # @param purpose [String] + # @param status [Symbol, OpenAI::Models::Upload::Status] + # @param file [OpenAI::Models::FileObject, nil] + # @param object [Symbol, :upload] # The status of the Upload. # @@ -88,11 +84,8 @@ module Status CANCELLED = :cancelled EXPIRED = :expired - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 7c44f8c9..1b7164e8 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -8,12 +8,8 @@ class UploadCancelParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 3b8eb1b5..77f01df1 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -25,14 +25,10 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :md5 - # @!parse - # # @param part_ids [Array] - # # @param md5 [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(part_ids:, md5: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(part_ids:, md5: nil, request_options: {}) + # @param part_ids [Array] + # @param md5 [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index aa01ef38..0c7d54d5 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -38,16 +38,12 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::Models::FilePurpose } - # @!parse - # # @param bytes [Integer] - # # @param filename [String] - # # @param mime_type [String] - # # @param purpose [Symbol, OpenAI::Models::FilePurpose] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) + # @param bytes [Integer] + # @param filename [String] + # @param mime_type [String] + # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index e6fd5ad7..1e11840f 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -15,13 +15,9 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # @return [Pathname, StringIO] required :data, OpenAI::Internal::Type::IOLike - # @!parse - # # @param data [Pathname, StringIO] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(data:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, request_options: {}) + # @param data [Pathname, StringIO] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/uploads/upload_part.rb b/lib/openai/models/uploads/upload_part.rb index 4f839461..fa189119 100644 --- a/lib/openai/models/uploads/upload_part.rb +++ b/lib/openai/models/uploads/upload_part.rb @@ -29,17 +29,13 @@ class UploadPart < OpenAI::Internal::Type::BaseModel # @return [String] required :upload_id, String - # @!parse - # # The upload Part represents a chunk of bytes we can add to an Upload object. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param upload_id [String] - # # @param object [Symbol, :"upload.part"] - # # - # def initialize(id:, created_at:, upload_id:, object: :"upload.part", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, upload_id:, object: :"upload.part") + # The upload Part represents a chunk of bytes we can add to an Upload object. + # + # @param id [String] + # @param created_at [Integer] + # @param upload_id [String] + # @param object [Symbol, :"upload.part"] end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 9abda127..5ad255fe 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -80,40 +80,21 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :expires_at, Integer, nil?: true - # @!parse - # # A vector store is a collection of processed files can be used by the - # # `file_search` tool. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param file_counts [OpenAI::Models::VectorStore::FileCounts] - # # @param last_active_at [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param status [Symbol, OpenAI::Models::VectorStore::Status] - # # @param usage_bytes [Integer] - # # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] - # # @param expires_at [Integer, nil] - # # @param object [Symbol, :vector_store] - # # - # def initialize( - # id:, - # created_at:, - # file_counts:, - # last_active_at:, - # metadata:, - # name:, - # status:, - # usage_bytes:, - # expires_after: nil, - # expires_at: nil, - # object: :vector_store, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, file_counts:, last_active_at:, metadata:, name:, status:, usage_bytes:, expires_after: nil, expires_at: nil, object: :vector_store) + # A vector store is a collection of processed files can be used by the + # `file_search` tool. + # + # @param id [String] + # @param created_at [Integer] + # @param file_counts [OpenAI::Models::VectorStore::FileCounts] + # @param last_active_at [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param status [Symbol, OpenAI::Models::VectorStore::Status] + # @param usage_bytes [Integer] + # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] + # @param expires_at [Integer, nil] + # @param object [Symbol, :vector_store] # @see OpenAI::Models::VectorStore#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel @@ -147,16 +128,12 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # @param cancelled [Integer] - # # @param completed [Integer] - # # @param failed [Integer] - # # @param in_progress [Integer] - # # @param total [Integer] - # # - # def initialize(cancelled:, completed:, failed:, in_progress:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cancelled:, completed:, failed:, in_progress:, total:) + # @param cancelled [Integer] + # @param completed [Integer] + # @param failed [Integer] + # @param in_progress [Integer] + # @param total [Integer] end # The status of the vector store, which can be either `expired`, `in_progress`, or @@ -171,11 +148,8 @@ module Status IN_PROGRESS = :in_progress COMPLETED = :completed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::VectorStore#expires_after @@ -193,15 +167,11 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @return [Integer] required :days, Integer - # @!parse - # # The expiration policy for a vector store. - # # - # # @param days [Integer] - # # @param anchor [Symbol, :last_active_at] - # # - # def initialize(days:, anchor: :last_active_at, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(days:, anchor: :last_active_at) + # The expiration policy for a vector store. + # + # @param days [Integer] + # @param anchor [Symbol, :last_active_at] end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 8380c18b..2899d54e 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -62,27 +62,13 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # chunking_strategy: nil, - # expires_after: nil, - # file_ids: nil, - # metadata: nil, - # name: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor @@ -98,15 +84,11 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @return [Integer] required :days, Integer - # @!parse - # # The expiration policy for a vector store. - # # - # # @param days [Integer] - # # @param anchor [Symbol, :last_active_at] - # # - # def initialize(days:, anchor: :last_active_at, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(days:, anchor: :last_active_at) + # The expiration policy for a vector store. + # + # @param days [Integer] + # @param anchor [Symbol, :last_active_at] end end end diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index 5beedec7..e307e25d 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -8,12 +8,8 @@ class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_store_deleted.rb b/lib/openai/models/vector_store_deleted.rb index b1624e84..ecc812bc 100644 --- a/lib/openai/models/vector_store_deleted.rb +++ b/lib/openai/models/vector_store_deleted.rb @@ -19,14 +19,10 @@ class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"vector_store.deleted"] required :object, const: :"vector_store.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"vector_store.deleted"] - # # - # def initialize(id:, deleted:, object: :"vector_store.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"vector_store.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"vector_store.deleted"] end end end diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 345b2830..610dd889 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -56,16 +56,12 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -75,11 +71,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index f3b3b098..aaf50986 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -8,12 +8,8 @@ class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 653db38a..9807ed6a 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -55,27 +55,13 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :rewrite_query - # @!parse - # # @param query [String, Array] - # # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # # @param max_num_results [Integer] - # # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] - # # @param rewrite_query [Boolean] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # query:, - # filters: nil, - # max_num_results: nil, - # ranking_options: nil, - # rewrite_query: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) + # @param query [String, Array] + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] + # @param rewrite_query [Boolean] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # A query string for a search module Query @@ -85,9 +71,8 @@ module Query variant -> { OpenAI::Models::VectorStoreSearchParams::Query::StringArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -102,9 +87,8 @@ module Filters # Combine multiple filters using `and` or `or`. variant -> { OpenAI::Models::CompoundFilter } - # @!parse - # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -126,15 +110,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :score_threshold - # @!parse - # # Ranking options for search. - # # - # # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] - # # @param score_threshold [Float] - # # - # def initialize(ranker: nil, score_threshold: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranker: nil, score_threshold: nil) + # Ranking options for search. + # + # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] + # @param score_threshold [Float] # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker @@ -143,11 +123,8 @@ module Ranker AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 6b076e29..d06a0c95 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -41,16 +41,12 @@ class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # @return [Float] required :score, Float - # @!parse - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param content [Array] - # # @param file_id [String] - # # @param filename [String] - # # @param score [Float] - # # - # def initialize(attributes:, content:, file_id:, filename:, score:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(attributes:, content:, file_id:, filename:, score:) + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param content [Array] + # @param file_id [String] + # @param filename [String] + # @param score [Float] module Attribute extend OpenAI::Internal::Type::Union @@ -61,9 +57,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end class Content < OpenAI::Internal::Type::BaseModel @@ -79,13 +74,9 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] required :type, enum: -> { OpenAI::Models::VectorStoreSearchResponse::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] # The type of content. # @@ -95,11 +86,8 @@ module Type TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 145e2808..9bda9d94 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -31,15 +31,11 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String, nil?: true - # @!parse - # # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(expires_after: nil, metadata: nil, name: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(expires_after: nil, metadata: nil, name: nil, request_options: {}) + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor @@ -55,15 +51,11 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @return [Integer] required :days, Integer - # @!parse - # # The expiration policy for a vector store. - # # - # # @param days [Integer] - # # @param anchor [Symbol, :last_active_at] - # # - # def initialize(days:, anchor: :last_active_at, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(days:, anchor: :last_active_at) + # The expiration policy for a vector store. + # + # @param days [Integer] + # @param anchor [Symbol, :last_active_at] end end end diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index a64a4b9c..c79d293e 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -14,13 +14,9 @@ class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 31d32b92..f0517a4c 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -40,15 +40,11 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] # attr_writer :chunking_strategy - # @!parse - # # @param file_ids [Array] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) + # @param file_ids [Array] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute extend OpenAI::Internal::Type::Union @@ -59,9 +55,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 0f0bacb3..a99326c3 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -72,18 +72,14 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] # attr_writer :order - # @!parse - # # @param vector_store_id [String] - # # @param after [String] - # # @param before [String] - # # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) + # @param vector_store_id [String] + # @param after [String] + # @param before [String] + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter @@ -94,11 +90,8 @@ module Filter FAILED = :failed CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -109,11 +102,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 55499481..48cf6115 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -14,13 +14,9 @@ class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index 26be8b94..e4f3deec 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -14,13 +14,9 @@ class FileContentParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index 01c19380..b4924ae1 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -25,13 +25,9 @@ class FileContentResponse < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :type - # @!parse - # # @param text [String] - # # @param type [String] - # # - # def initialize(text: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text: nil, type: nil) + # @param text [String] + # @param type [String] end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 6767c7dc..d8307a70 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -40,15 +40,11 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] # attr_writer :chunking_strategy - # @!parse - # # @param file_id [String] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) + # @param file_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute extend OpenAI::Internal::Type::Union @@ -59,9 +55,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index e4bee072..25a5fbcc 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -14,13 +14,9 @@ class FileDeleteParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 88d3a55c..0c80decc 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -67,17 +67,13 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter @@ -88,11 +84,8 @@ module Filter FAILED = :failed CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -103,11 +96,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index 9d9c26a7..2b63ee84 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -14,13 +14,9 @@ class FileRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 2e5f4d52..9cf5a31a 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -26,14 +26,10 @@ class FileUpdateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileUpdateParams::Attribute] }, nil?: true - # @!parse - # # @param vector_store_id [String] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, attributes:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, attributes:, request_options: {}) + # @param vector_store_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute extend OpenAI::Internal::Type::Union @@ -44,9 +40,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 71ba4e7d..ae51122a 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -76,35 +76,18 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] # attr_writer :chunking_strategy - # @!parse - # # A list of files attached to a vector store. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] - # # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] - # # @param usage_bytes [Integer] - # # @param vector_store_id [String] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - # # @param object [Symbol, :"vector_store.file"] - # # - # def initialize( - # id:, - # created_at:, - # last_error:, - # status:, - # usage_bytes:, - # vector_store_id:, - # attributes: nil, - # chunking_strategy: nil, - # object: :"vector_store.file", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") + # A list of files attached to a vector store. + # + # @param id [String] + # @param created_at [Integer] + # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] + # @param usage_bytes [Integer] + # @param vector_store_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + # @param object [Symbol, :"vector_store.file"] # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::Internal::Type::BaseModel @@ -120,16 +103,12 @@ class LastError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # The last error associated with this vector store file. Will be `null` if there - # # are no errors. - # # - # # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # The last error associated with this vector store file. Will be `null` if there + # are no errors. + # + # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] + # @param message [String] # One of `server_error` or `rate_limit_exceeded`. # @@ -141,11 +120,8 @@ module Code UNSUPPORTED_FILE = :unsupported_file INVALID_FILE = :invalid_file - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -162,11 +138,8 @@ module Status CANCELLED = :cancelled FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Attribute @@ -178,9 +151,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index 55e8644b..ee7fb5ea 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -45,19 +45,15 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # A batch of files attached to a vector store. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] - # # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] - # # @param vector_store_id [String] - # # @param object [Symbol, :"vector_store.files_batch"] - # # - # def initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") + # A batch of files attached to a vector store. + # + # @param id [String] + # @param created_at [Integer] + # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] + # @param vector_store_id [String] + # @param object [Symbol, :"vector_store.files_batch"] # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel @@ -91,16 +87,12 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # @param cancelled [Integer] - # # @param completed [Integer] - # # @param failed [Integer] - # # @param in_progress [Integer] - # # @param total [Integer] - # # - # def initialize(cancelled:, completed:, failed:, in_progress:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cancelled:, completed:, failed:, in_progress:, total:) + # @param cancelled [Integer] + # @param completed [Integer] + # @param failed [Integer] + # @param in_progress [Integer] + # @param total [Integer] end # The status of the vector store files batch, which can be either `in_progress`, @@ -115,11 +107,8 @@ module Status CANCELLED = :cancelled FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_stores/vector_store_file_deleted.rb b/lib/openai/models/vector_stores/vector_store_file_deleted.rb index f7992170..971629db 100644 --- a/lib/openai/models/vector_stores/vector_store_file_deleted.rb +++ b/lib/openai/models/vector_stores/vector_store_file_deleted.rb @@ -20,14 +20,10 @@ class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"vector_store.file.deleted"] required :object, const: :"vector_store.file.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"vector_store.file.deleted"] - # # - # def initialize(id:, deleted:, object: :"vector_store.file.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"vector_store.file.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"vector_store.file.deleted"] end end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index f4eaf933..4bd64d4b 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -66,10 +66,9 @@ def self.validate!(opts) optional :timeout, Float # @!parse - # # Returns a new instance of RequestOptions. + # # @!method initialize(values = {}) + # # Returns a new instance of RequestOptions. # # - # # @param values [Hash{Symbol=>Object}] - # # - # def initialize(values = {}) = super + # # @param values [Hash{Symbol=>Object}] end end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 32f6a62c..bc1959ae 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -176,6 +176,7 @@ module OpenAI def deconstruct_keys(keys); end class << self + # @api private sig { params(model: OpenAI::Internal::Type::BaseModel).returns(OpenAI::Internal::AnyHash) } def walk(model); end end diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index a85d1768..e1d0753c 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -22,12 +22,6 @@ module OpenAI sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } def values; end - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize!; end - sig { params(other: T.anything).returns(T::Boolean) } def ===(other); end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 5f707303..4de50b6d 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -6,8 +6,6 @@ module OpenAI def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] - private def self.finalize!: -> void - def ===: (top other) -> bool def ==: (top other) -> bool diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index c3b0dbfa..b03987cf 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -156,6 +156,7 @@ def test_dump_retry class OpenAI::Test::EnumModelTest < Minitest::Test class E0 include OpenAI::Internal::Type::Enum + attr_reader :values def initialize(*values) = (@values = values) end @@ -575,6 +576,7 @@ def test_coerce class OpenAI::Test::BaseModelQoLTest < Minitest::Test class E0 include OpenAI::Internal::Type::Enum + attr_reader :values def initialize(*values) = (@values = values) end From 867d18d376d6742a183478ea76753388c8b862a8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:43:01 +0000 Subject: [PATCH 144/295] chore: simplify yard annotations by removing most `@!parse` directives --- .../internal/type/request_parameters.rb | 3 +- .../models/audio/speech_create_params.rb | 21 +--- lib/openai/models/audio/transcription.rb | 24 +--- .../audio/transcription_create_params.rb | 39 ++---- .../audio/transcription_text_delta_event.rb | 24 +--- .../audio/transcription_text_done_event.rb | 24 +--- .../models/audio/transcription_verbose.rb | 12 +- .../models/audio/translation_create_params.rb | 21 +--- .../models/audio/translation_verbose.rb | 6 +- lib/openai/models/batch.rb | 84 +++---------- lib/openai/models/batch_cancel_params.rb | 3 +- lib/openai/models/batch_create_params.rb | 3 +- lib/openai/models/batch_error.rb | 12 +- lib/openai/models/batch_list_params.rb | 15 +-- lib/openai/models/batch_retrieve_params.rb | 3 +- lib/openai/models/beta/assistant.rb | 24 +--- .../models/beta/assistant_create_params.rb | 51 ++------ .../models/beta/assistant_delete_params.rb | 3 +- .../models/beta/assistant_list_params.rb | 27 +---- .../models/beta/assistant_retrieve_params.rb | 3 +- .../models/beta/assistant_stream_event.rb | 6 +- .../models/beta/assistant_tool_choice.rb | 6 +- .../models/beta/assistant_update_params.rb | 39 ++---- lib/openai/models/beta/file_search_tool.rb | 24 +--- lib/openai/models/beta/thread.rb | 24 +--- .../beta/thread_create_and_run_params.rb | 99 +++------------- .../models/beta/thread_create_params.rb | 63 ++-------- .../models/beta/thread_delete_params.rb | 3 +- .../models/beta/thread_retrieve_params.rb | 3 +- lib/openai/models/beta/thread_stream_event.rb | 6 +- .../models/beta/thread_update_params.rb | 27 +---- .../threads/file_citation_delta_annotation.rb | 36 +----- .../threads/file_path_delta_annotation.rb | 30 +---- lib/openai/models/beta/threads/image_file.rb | 6 +- .../models/beta/threads/image_file_delta.rb | 12 +- .../beta/threads/image_file_delta_block.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 12 +- .../beta/threads/image_url_delta_block.rb | 6 +- lib/openai/models/beta/threads/message.rb | 12 +- .../beta/threads/message_create_params.rb | 15 +-- .../beta/threads/message_delete_params.rb | 3 +- .../models/beta/threads/message_delta.rb | 12 +- .../beta/threads/message_list_params.rb | 33 +----- .../beta/threads/message_retrieve_params.rb | 3 +- .../beta/threads/message_update_params.rb | 3 +- .../beta/threads/refusal_delta_block.rb | 6 +- lib/openai/models/beta/threads/run.rb | 6 +- .../models/beta/threads/run_cancel_params.rb | 3 +- .../models/beta/threads/run_create_params.rb | 27 +---- .../models/beta/threads/run_list_params.rb | 27 +---- .../beta/threads/run_retrieve_params.rb | 3 +- .../threads/run_submit_tool_outputs_params.rb | 15 +-- .../models/beta/threads/run_update_params.rb | 3 +- .../threads/runs/code_interpreter_logs.rb | 6 +- .../runs/code_interpreter_output_image.rb | 12 +- .../runs/code_interpreter_tool_call_delta.rb | 24 +--- .../threads/runs/file_search_tool_call.rb | 30 +---- .../runs/file_search_tool_call_delta.rb | 6 +- .../threads/runs/function_tool_call_delta.rb | 24 +--- .../beta/threads/runs/run_step_delta.rb | 6 +- .../runs/run_step_delta_message_delta.rb | 12 +- .../beta/threads/runs/step_list_params.rb | 33 +----- .../beta/threads/runs/step_retrieve_params.rb | 9 +- .../threads/runs/tool_call_delta_object.rb | 6 +- lib/openai/models/beta/threads/text_delta.rb | 12 +- .../models/beta/threads/text_delta_block.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 12 +- ...chat_completion_assistant_message_param.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 66 ++--------- .../chat/chat_completion_content_part.rb | 18 +-- .../chat_completion_content_part_image.rb | 6 +- ...chat_completion_developer_message_param.rb | 6 +- .../models/chat/chat_completion_message.rb | 18 +-- .../chat/chat_completion_stream_options.rb | 6 +- .../chat_completion_system_message_param.rb | 6 +- .../chat_completion_user_message_param.rb | 6 +- .../models/chat/completion_create_params.rb | 93 +++------------ .../models/chat/completion_delete_params.rb | 3 +- .../models/chat/completion_list_params.rb | 27 +---- .../models/chat/completion_retrieve_params.rb | 3 +- .../models/chat/completion_update_params.rb | 3 +- .../chat/completions/message_list_params.rb | 21 +--- lib/openai/models/completion.rb | 12 +- lib/openai/models/completion_choice.rb | 24 +--- lib/openai/models/completion_create_params.rb | 9 +- lib/openai/models/completion_usage.rb | 48 ++------ lib/openai/models/embedding_create_params.rb | 21 +--- lib/openai/models/eval_create_params.rb | 21 +--- lib/openai/models/eval_delete_params.rb | 3 +- lib/openai/models/eval_list_params.rb | 27 +---- lib/openai/models/eval_retrieve_params.rb | 3 +- .../models/eval_text_similarity_grader.rb | 6 +- lib/openai/models/eval_update_params.rb | 9 +- ...create_eval_completions_run_data_source.rb | 36 +----- .../create_eval_jsonl_run_data_source.rb | 6 +- lib/openai/models/evals/run_cancel_params.rb | 3 +- lib/openai/models/evals/run_create_params.rb | 9 +- lib/openai/models/evals/run_delete_params.rb | 3 +- .../models/evals/run_delete_response.rb | 18 +-- lib/openai/models/evals/run_list_params.rb | 27 +---- .../models/evals/run_retrieve_params.rb | 3 +- .../evals/runs/output_item_list_params.rb | 27 +---- .../evals/runs/output_item_list_response.rb | 12 +- .../evals/runs/output_item_retrieve_params.rb | 3 +- .../runs/output_item_retrieve_response.rb | 12 +- lib/openai/models/file_content_params.rb | 3 +- lib/openai/models/file_create_params.rb | 3 +- lib/openai/models/file_delete_params.rb | 3 +- lib/openai/models/file_list_params.rb | 27 +---- lib/openai/models/file_object.rb | 12 +- lib/openai/models/file_retrieve_params.rb | 3 +- .../checkpoints/permission_create_params.rb | 3 +- .../checkpoints/permission_delete_params.rb | 3 +- .../checkpoints/permission_retrieve_params.rb | 27 +---- .../models/fine_tuning/fine_tuning_job.rb | 96 +++------------ .../fine_tuning/fine_tuning_job_event.rb | 12 +- .../fine_tuning_job_wandb_integration.rb | 6 +- .../models/fine_tuning/job_cancel_params.rb | 3 +- .../models/fine_tuning/job_create_params.rb | 111 +++--------------- .../fine_tuning/job_list_events_params.rb | 15 +-- .../models/fine_tuning/job_list_params.rb | 15 +-- .../models/fine_tuning/job_retrieve_params.rb | 3 +- .../jobs/checkpoint_list_params.rb | 15 +-- .../jobs/fine_tuning_job_checkpoint.rb | 42 ++----- lib/openai/models/function_definition.rb | 12 +- lib/openai/models/image.rb | 18 +-- .../models/image_create_variation_params.rb | 9 +- lib/openai/models/image_edit_params.rb | 15 +-- lib/openai/models/image_generate_params.rb | 15 +-- lib/openai/models/model_delete_params.rb | 3 +- lib/openai/models/model_list_params.rb | 3 +- lib/openai/models/model_retrieve_params.rb | 3 +- lib/openai/models/moderation_create_params.rb | 9 +- .../models/response_format_json_schema.rb | 12 +- .../models/responses/easy_input_message.rb | 6 +- .../models/responses/file_search_tool.rb | 30 +---- .../responses/input_item_list_params.rb | 33 +----- lib/openai/models/responses/response.rb | 30 +---- ...response_computer_tool_call_output_item.rb | 12 +- ...se_computer_tool_call_output_screenshot.rb | 12 +- .../responses/response_create_params.rb | 27 +---- .../responses/response_delete_params.rb | 3 +- .../response_file_search_tool_call.rb | 24 +--- ...response_format_text_json_schema_config.rb | 6 +- .../responses/response_function_tool_call.rb | 12 +- ...response_function_tool_call_output_item.rb | 6 +- .../models/responses/response_input_file.rb | 18 +-- .../models/responses/response_input_item.rb | 42 ++----- .../responses/response_input_message_item.rb | 12 +- .../responses/response_reasoning_item.rb | 6 +- .../responses/response_retrieve_params.rb | 9 +- .../models/responses/response_text_config.rb | 6 +- .../models/responses/web_search_tool.rb | 30 +---- lib/openai/models/upload_cancel_params.rb | 3 +- lib/openai/models/upload_complete_params.rb | 9 +- lib/openai/models/upload_create_params.rb | 3 +- .../models/uploads/part_create_params.rb | 3 +- lib/openai/models/vector_store.rb | 6 +- .../models/vector_store_create_params.rb | 27 +---- .../models/vector_store_delete_params.rb | 3 +- lib/openai/models/vector_store_list_params.rb | 27 +---- .../models/vector_store_retrieve_params.rb | 3 +- .../models/vector_store_search_params.rb | 39 ++---- .../models/vector_store_update_params.rb | 3 +- .../vector_stores/file_batch_cancel_params.rb | 3 +- .../vector_stores/file_batch_create_params.rb | 9 +- .../file_batch_list_files_params.rb | 33 +----- .../file_batch_retrieve_params.rb | 3 +- .../vector_stores/file_content_params.rb | 3 +- .../vector_stores/file_content_response.rb | 12 +- .../vector_stores/file_create_params.rb | 9 +- .../vector_stores/file_delete_params.rb | 3 +- .../models/vector_stores/file_list_params.rb | 33 +----- .../vector_stores/file_retrieve_params.rb | 3 +- .../vector_stores/file_update_params.rb | 3 +- .../models/vector_stores/vector_store_file.rb | 6 +- 177 files changed, 530 insertions(+), 2362 deletions(-) diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb index aaa7a939..958e1051 100644 --- a/lib/openai/internal/type/request_parameters.rb +++ b/lib/openai/internal/type/request_parameters.rb @@ -12,9 +12,8 @@ module RequestParameters # @param mod [Module] def self.included(mod) - return unless mod <= OpenAI::Internal::Type::BaseModel + raise ArgumentError.new(mod) unless mod <= OpenAI::Internal::Type::BaseModel - mod.extend(OpenAI::Internal::Type::RequestParameters::Converter) mod.optional(:request_options, OpenAI::RequestOptions) end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 7c59f76f..114d8fa3 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -5,8 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Speech#create class SpeechCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -31,39 +30,27 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } - # @!attribute [r] instructions + # @!attribute instructions # Control the voice of your generated audio with additional instructions. Does not # work with `tts-1` or `tts-1-hd`. # # @return [String, nil] optional :instructions, String - # @!parse - # # @return [String] - # attr_writer :instructions - - # @!attribute [r] response_format + # @!attribute response_format # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. # # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] - # attr_writer :response_format - - # @!attribute [r] speed + # @!attribute speed # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # the default. # # @return [Float, nil] optional :speed, Float - # @!parse - # # @return [Float] - # attr_writer :speed - # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) # @param input [String] # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 0e0cb142..96e65045 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -10,7 +10,7 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!attribute [r] logprobs + # @!attribute logprobs # The log probabilities of the tokens in the transcription. Only returned with the # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. @@ -18,10 +18,6 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } - # @!parse - # # @return [Array] - # attr_writer :logprobs - # @!method initialize(text:, logprobs: nil) # Represents a transcription response returned by model, based on the provided # input. @@ -30,36 +26,24 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @param logprobs [Array] class Logprob < OpenAI::Internal::Type::BaseModel - # @!attribute [r] token + # @!attribute token # The token in the transcription. # # @return [String, nil] optional :token, String - # @!parse - # # @return [String] - # attr_writer :token - - # @!attribute [r] bytes + # @!attribute bytes # The bytes of the token. # # @return [Array, nil] optional :bytes, OpenAI::Internal::Type::ArrayOf[Float] - # @!parse - # # @return [Array] - # attr_writer :bytes - - # @!attribute [r] logprob + # @!attribute logprob # The log probability of the token. # # @return [Float, nil] optional :logprob, Float - # @!parse - # # @return [Float] - # attr_writer :logprob - # @!method initialize(token: nil, bytes: nil, logprob: nil) # @param token [String] # @param bytes [Array] diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 259a21cb..4377a2ab 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -7,8 +7,7 @@ module Audio # # @see OpenAI::Resources::Audio::Transcriptions#create_streaming class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file @@ -26,7 +25,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranscriptionCreateParams::Model } - # @!attribute [r] include + # @!attribute include # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the # model's confidence in the transcription. `logprobs` only works with @@ -37,11 +36,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - - # @!attribute [r] language + # @!attribute language # The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) # format will improve accuracy and latency. @@ -49,11 +44,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :language, String - # @!parse - # # @return [String] - # attr_writer :language - - # @!attribute [r] prompt + # @!attribute prompt # An optional text to guide the model's style or continue a previous audio # segment. The # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) @@ -62,11 +53,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :prompt, String - # @!parse - # # @return [String] - # attr_writer :prompt - - # @!attribute [r] response_format + # @!attribute response_format # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. @@ -74,11 +61,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::AudioResponseFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::AudioResponseFormat] - # attr_writer :response_format - - # @!attribute [r] temperature + # @!attribute temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and # deterministic. If set to 0, the model will use @@ -88,11 +71,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float - # @!parse - # # @return [Float] - # attr_writer :temperature - - # @!attribute [r] timestamp_granularities + # @!attribute timestamp_granularities # The timestamp granularities to populate for this transcription. # `response_format` must be set `verbose_json` to use timestamp granularities. # Either or both of these options are supported: `word`, or `segment`. Note: There @@ -103,10 +82,6 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel optional :timestamp_granularities, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity] } - # @!parse - # # @return [Array] - # attr_writer :timestamp_granularities - # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index f7dff312..7f9705d1 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -16,7 +16,7 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"transcript.text.delta"] required :type, const: :"transcript.text.delta" - # @!attribute [r] logprobs + # @!attribute logprobs # The log probabilities of the delta. Only included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. @@ -25,10 +25,6 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } - # @!parse - # # @return [Array] - # attr_writer :logprobs - # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you @@ -40,36 +36,24 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :"transcript.text.delta"] class Logprob < OpenAI::Internal::Type::BaseModel - # @!attribute [r] token + # @!attribute token # The token that was used to generate the log probability. # # @return [String, nil] optional :token, String - # @!parse - # # @return [String] - # attr_writer :token - - # @!attribute [r] bytes + # @!attribute bytes # The bytes that were used to generate the log probability. # # @return [Array, nil] optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Array] - # attr_writer :bytes - - # @!attribute [r] logprob + # @!attribute logprob # The log probability of the token. # # @return [Float, nil] optional :logprob, Float - # @!parse - # # @return [Float] - # attr_writer :logprob - # @!method initialize(token: nil, bytes: nil, logprob: nil) # @param token [String] # @param bytes [Array] diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 3e0fb33c..be7eb322 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -16,7 +16,7 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"transcript.text.done"] required :type, const: :"transcript.text.done" - # @!attribute [r] logprobs + # @!attribute logprobs # The log probabilities of the individual tokens in the transcription. Only # included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) @@ -26,10 +26,6 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } - # @!parse - # # @return [Array] - # attr_writer :logprobs - # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you @@ -41,36 +37,24 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :"transcript.text.done"] class Logprob < OpenAI::Internal::Type::BaseModel - # @!attribute [r] token + # @!attribute token # The token that was used to generate the log probability. # # @return [String, nil] optional :token, String - # @!parse - # # @return [String] - # attr_writer :token - - # @!attribute [r] bytes + # @!attribute bytes # The bytes that were used to generate the log probability. # # @return [Array, nil] optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Array] - # attr_writer :bytes - - # @!attribute [r] logprob + # @!attribute logprob # The log probability of the token. # # @return [Float, nil] optional :logprob, Float - # @!parse - # # @return [Float] - # attr_writer :logprob - # @!method initialize(token: nil, bytes: nil, logprob: nil) # @param token [String] # @param bytes [Array] diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index f0b3f7c3..ae9e3c77 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -22,26 +22,18 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!attribute [r] segments + # @!attribute segments # Segments of the transcribed text and their corresponding details. # # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } - # @!parse - # # @return [Array] - # attr_writer :segments - - # @!attribute [r] words + # @!attribute words # Extracted words and their corresponding timestamps. # # @return [Array, nil] optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionWord] } - # @!parse - # # @return [Array] - # attr_writer :words - # @!method initialize(duration:, language:, text:, segments: nil, words: nil) # Represents a verbose json transcription response returned by model, based on the # provided input. diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 7589e685..ce70c85f 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -5,8 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Translations#create class TranslationCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file @@ -23,7 +22,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranslationCreateParams::Model } - # @!attribute [r] prompt + # @!attribute prompt # An optional text to guide the model's style or continue a previous audio # segment. The # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) @@ -32,22 +31,14 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :prompt, String - # @!parse - # # @return [String] - # attr_writer :prompt - - # @!attribute [r] response_format + # @!attribute response_format # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] - # attr_writer :response_format - - # @!attribute [r] temperature + # @!attribute temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and # deterministic. If set to 0, the model will use @@ -57,10 +48,6 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float - # @!parse - # # @return [Float] - # attr_writer :temperature - # @!method initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index 5d802ffc..c5c9c54c 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -22,16 +22,12 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!attribute [r] segments + # @!attribute segments # Segments of the translated text and their corresponding details. # # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } - # @!parse - # # @return [Array] - # attr_writer :segments - # @!method initialize(duration:, language:, text:, segments: nil) # @param duration [Float] # @param language [String] diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 0eb7ef45..d8a84818 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -45,105 +45,65 @@ class Batch < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Batch::Status] required :status, enum: -> { OpenAI::Models::Batch::Status } - # @!attribute [r] cancelled_at + # @!attribute cancelled_at # The Unix timestamp (in seconds) for when the batch was cancelled. # # @return [Integer, nil] optional :cancelled_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :cancelled_at - - # @!attribute [r] cancelling_at + # @!attribute cancelling_at # The Unix timestamp (in seconds) for when the batch started cancelling. # # @return [Integer, nil] optional :cancelling_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :cancelling_at - - # @!attribute [r] completed_at + # @!attribute completed_at # The Unix timestamp (in seconds) for when the batch was completed. # # @return [Integer, nil] optional :completed_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :completed_at - - # @!attribute [r] error_file_id + # @!attribute error_file_id # The ID of the file containing the outputs of requests with errors. # # @return [String, nil] optional :error_file_id, String - # @!parse - # # @return [String] - # attr_writer :error_file_id - - # @!attribute [r] errors + # @!attribute errors # # @return [OpenAI::Models::Batch::Errors, nil] optional :errors, -> { OpenAI::Models::Batch::Errors } - # @!parse - # # @return [OpenAI::Models::Batch::Errors] - # attr_writer :errors - - # @!attribute [r] expired_at + # @!attribute expired_at # The Unix timestamp (in seconds) for when the batch expired. # # @return [Integer, nil] optional :expired_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :expired_at - - # @!attribute [r] expires_at + # @!attribute expires_at # The Unix timestamp (in seconds) for when the batch will expire. # # @return [Integer, nil] optional :expires_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :expires_at - - # @!attribute [r] failed_at + # @!attribute failed_at # The Unix timestamp (in seconds) for when the batch failed. # # @return [Integer, nil] optional :failed_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :failed_at - - # @!attribute [r] finalizing_at + # @!attribute finalizing_at # The Unix timestamp (in seconds) for when the batch started finalizing. # # @return [Integer, nil] optional :finalizing_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :finalizing_at - - # @!attribute [r] in_progress_at + # @!attribute in_progress_at # The Unix timestamp (in seconds) for when the batch started processing. # # @return [Integer, nil] optional :in_progress_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :in_progress_at - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -155,26 +115,18 @@ class Batch < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] output_file_id + # @!attribute output_file_id # The ID of the file containing the outputs of successfully executed requests. # # @return [String, nil] optional :output_file_id, String - # @!parse - # # @return [String] - # attr_writer :output_file_id - - # @!attribute [r] request_counts + # @!attribute request_counts # The request counts for different statuses within the batch. # # @return [OpenAI::Models::BatchRequestCounts, nil] optional :request_counts, -> { OpenAI::Models::BatchRequestCounts } - # @!parse - # # @return [OpenAI::Models::BatchRequestCounts] - # attr_writer :request_counts - # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) # @param id [String] # @param completion_window [String] @@ -218,25 +170,17 @@ module Status # @see OpenAI::Models::Batch#errors class Errors < OpenAI::Internal::Type::BaseModel - # @!attribute [r] data + # @!attribute data # # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::BatchError] } - # @!parse - # # @return [Array] - # attr_writer :data - - # @!attribute [r] object + # @!attribute object # The object type, which is always `list`. # # @return [String, nil] optional :object, String - # @!parse - # # @return [String] - # attr_writer :object - # @!method initialize(data: nil, object: nil) # @param data [Array] # @param object [String] diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 60cd89b8..9068ce31 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#cancel class BatchCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index ef913352..9c5654db 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#create class BatchCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute completion_window diff --git a/lib/openai/models/batch_error.rb b/lib/openai/models/batch_error.rb index 513fcaf3..9d629686 100644 --- a/lib/openai/models/batch_error.rb +++ b/lib/openai/models/batch_error.rb @@ -3,32 +3,24 @@ module OpenAI module Models class BatchError < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code + # @!attribute code # An error code identifying the error type. # # @return [String, nil] optional :code, String - # @!parse - # # @return [String] - # attr_writer :code - # @!attribute line # The line number of the input file where the error occurred, if applicable. # # @return [Integer, nil] optional :line, Integer, nil?: true - # @!attribute [r] message + # @!attribute message # A human-readable message providing more details about the error. # # @return [String, nil] optional :message, String - # @!parse - # # @return [String] - # attr_writer :message - # @!attribute param # The name of the parameter that caused the error, if applicable. # diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index ab35f805..388dc273 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -4,11 +4,10 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#list class BatchListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -17,21 +16,13 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!method initialize(after: nil, limit: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index 6c9e459c..a03157a4 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#retrieve class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index 6f861ccc..a4069e36 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -142,24 +142,16 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::Assistant::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -171,7 +163,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. @@ -179,17 +171,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -198,10 +186,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 5c2c0fbe..089e0fe9 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#create class AssistantCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute model @@ -102,7 +101,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources }, nil?: true - # @!attribute [r] tools + # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. @@ -110,10 +109,6 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -156,25 +151,17 @@ module Model end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -186,7 +173,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -194,17 +181,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -213,11 +196,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - - # @!attribute [r] vector_stores + # @!attribute vector_stores # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # with file_ids and attach it to this assistant. There can be a maximum of 1 @@ -227,16 +206,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } - # @!parse - # # @return [Array] - # attr_writer :vector_stores - # @!method initialize(vector_store_ids: nil, vector_stores: nil) # @param vector_store_ids [Array] # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # @@ -244,11 +219,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # attr_writer :chunking_strategy - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. @@ -256,10 +227,6 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index df76595d..6200b148 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#delete class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index c35334fc..5d3b268b 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -5,11 +5,10 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#list class AssistantListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -18,11 +17,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -31,32 +26,20 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::AssistantListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 6bb8b075..852988c7 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#retrieve class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index e8f7fefa..eaa92aba 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -122,16 +122,12 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.created"] required :event, const: :"thread.created" - # @!attribute [r] enabled + # @!attribute enabled # Whether to enable input audio transcription. # # @return [Boolean, nil] optional :enabled, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :enabled - # @!method initialize(data:, enabled: nil, event: :"thread.created") # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 21e29156..43914c38 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -10,15 +10,11 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] required :type, enum: -> { OpenAI::Models::Beta::AssistantToolChoice::Type } - # @!attribute [r] function + # @!attribute function # # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction, nil] optional :function, -> { OpenAI::Models::Beta::AssistantToolChoiceFunction } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction] - # attr_writer :function - # @!method initialize(type:, function: nil) # Specifies a tool the model should use. Use to force the model to call a specific # tool. diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 6cb2ed05..c0a93261 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#update class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute description @@ -33,7 +32,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] model + # @!attribute model # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our @@ -43,10 +42,6 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] optional :model, union: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model } - # @!parse - # # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] - # attr_writer :model - # @!attribute name # The name of the assistant. The maximum length is 256 characters. # @@ -106,7 +101,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources }, nil?: true - # @!attribute [r] tools + # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. @@ -114,10 +109,6 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -270,25 +261,17 @@ module Model end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -300,7 +283,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # Overrides the list of # [file](https://platform.openai.com/docs/api-reference/files) IDs made available # to the `code_interpreter` tool. There can be a maximum of 20 files associated @@ -309,17 +292,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -328,10 +307,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index b9baee3e..9dc13172 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -10,23 +10,19 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!attribute [r] file_search + # @!attribute file_search # Overrides for the file search tool. # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch] - # attr_writer :file_search - # @!method initialize(file_search: nil, type: :file_search) # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] # @param type [Symbol, :file_search] # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] max_num_results + # @!attribute max_num_results # The maximum number of results the file search tool should output. The default is # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between # 1 and 50 inclusive. @@ -39,11 +35,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :max_num_results, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_num_results - - # @!attribute [r] ranking_options + # @!attribute ranking_options # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. # @@ -54,10 +46,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions } - # @!parse - # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] - # attr_writer :ranking_options - # @!method initialize(max_num_results: nil, ranking_options: nil) # Overrides for the file search tool. # @@ -73,17 +61,13 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @return [Float] required :score_threshold, Float - # @!attribute [r] ranker + # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` # ranker. # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] - # attr_writer :ranker - # @!method initialize(score_threshold:, ranker: nil) # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index bdb79d97..757ea5d5 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -55,24 +55,16 @@ class Thread < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::Thread::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -84,7 +76,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -92,17 +84,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -111,10 +99,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index c87d75f8..131ba814 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -7,8 +7,7 @@ module Beta # # @see OpenAI::Resources::Beta::Threads#stream_raw class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute assistant_id @@ -66,7 +65,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Model }, nil?: true - # @!attribute [r] parallel_tool_calls + # @!attribute parallel_tool_calls # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. @@ -74,10 +73,6 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :parallel_tool_calls - # @!attribute response_format # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -111,17 +106,13 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute [r] thread + # @!attribute thread # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, nil] optional :thread, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # attr_writer :thread - # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value @@ -206,7 +197,7 @@ module Model end class Thread < OpenAI::Internal::Type::BaseModel - # @!attribute [r] messages + # @!attribute messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # @@ -214,10 +205,6 @@ class Thread < OpenAI::Internal::Type::BaseModel optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] } - # @!parse - # # @return [Array] - # attr_writer :messages - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -329,27 +316,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] @@ -383,26 +362,18 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -414,7 +385,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -422,17 +393,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -441,11 +408,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - - # @!attribute [r] vector_stores + # @!attribute vector_stores # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # with file_ids and attach it to this thread. There can be a maximum of 1 vector @@ -455,16 +418,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } - # @!parse - # # @return [Array] - # attr_writer :vector_stores - # @!method initialize(vector_store_ids: nil, vector_stores: nil) # @param vector_store_ids [Array] # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # @@ -472,11 +431,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # attr_writer :chunking_strategy - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. @@ -484,10 +439,6 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -583,25 +534,17 @@ class Static < OpenAI::Internal::Type::BaseModel end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -613,7 +556,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -621,17 +564,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -640,10 +579,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 85033056..2d768eaf 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -5,11 +5,10 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#create class ThreadCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] messages + # @!attribute messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # @@ -17,10 +16,6 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message] } - # @!parse - # # @return [Array] - # attr_writer :messages - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -128,27 +123,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] @@ -181,25 +168,17 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -211,7 +190,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -219,17 +198,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -238,11 +213,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - - # @!attribute [r] vector_stores + # @!attribute vector_stores # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # with file_ids and attach it to this thread. There can be a maximum of 1 vector @@ -252,16 +223,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } - # @!parse - # # @return [Array] - # attr_writer :vector_stores - # @!method initialize(vector_store_ids: nil, vector_stores: nil) # @param vector_store_ids [Array] # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # @@ -269,11 +236,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # attr_writer :chunking_strategy - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. @@ -281,10 +244,6 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index 2071367c..308e47db 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#delete class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index 663f5488..c27f0bf6 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#retrieve class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index d7dee4f7..bf5cc945 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -16,16 +16,12 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.created"] required :event, const: :"thread.created" - # @!attribute [r] enabled + # @!attribute enabled # Whether to enable input audio transcription. # # @return [Boolean, nil] optional :enabled, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :enabled - # @!method initialize(data:, enabled: nil, event: :"thread.created") # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 97527fbf..8d7d621e 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#update class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute metadata @@ -35,25 +34,17 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -65,7 +56,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -73,17 +64,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -92,10 +79,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 66b0623a..3d825a51 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -17,43 +17,27 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!attribute [r] end_index + # @!attribute end_index # # @return [Integer, nil] optional :end_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :end_index - - # @!attribute [r] file_citation + # @!attribute file_citation # # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] optional :file_citation, -> { OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] - # attr_writer :file_citation - - # @!attribute [r] start_index + # @!attribute start_index # # @return [Integer, nil] optional :start_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :start_index - - # @!attribute [r] text + # @!attribute text # The text in the message content that needs to be replaced. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - # @!method initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation) # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant @@ -68,26 +52,18 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the specific File the citation is from. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] quote + # @!attribute quote # The specific quote in the file. # # @return [String, nil] optional :quote, String - # @!parse - # # @return [String] - # attr_writer :quote - # @!method initialize(file_id: nil, quote: nil) # @param file_id [String] # @param quote [String] diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 065e7eab..679015ba 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -17,43 +17,27 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!attribute [r] end_index + # @!attribute end_index # # @return [Integer, nil] optional :end_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :end_index - - # @!attribute [r] file_path + # @!attribute file_path # # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] optional :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] - # attr_writer :file_path - - # @!attribute [r] start_index + # @!attribute start_index # # @return [Integer, nil] optional :start_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :start_index - - # @!attribute [r] text + # @!attribute text # The text in the message content that needs to be replaced. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - # @!method initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path) # A URL for the file that's generated when the assistant used the # `code_interpreter` tool to generate a file. @@ -67,16 +51,12 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file that was generated. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - # @!method initialize(file_id: nil) # @param file_id [String] end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 61b60dc0..df480221 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -13,17 +13,13 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFile::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] - # attr_writer :detail - # @!method initialize(file_id:, detail: nil) # @param file_id [String] # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 117dd1c1..25fc81e0 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -5,18 +5,14 @@ module Models module Beta module Threads class ImageFileDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFileDelta::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] - # attr_writer :detail - - # @!attribute [r] file_id + # @!attribute file_id # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. @@ -24,10 +20,6 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - # @!method initialize(detail: nil, file_id: nil) # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] # @param file_id [String] diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index a4abc497..3befaf8c 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -17,15 +17,11 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_file] required :type, const: :image_file - # @!attribute [r] image_file + # @!attribute image_file # # @return [OpenAI::Models::Beta::Threads::ImageFileDelta, nil] optional :image_file, -> { OpenAI::Models::Beta::Threads::ImageFileDelta } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::ImageFileDelta] - # attr_writer :image_file - # @!method initialize(index:, image_file: nil, type: :image_file) # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 14266c31..1b88b1b2 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -12,17 +12,13 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURL::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] - # attr_writer :detail - # @!method initialize(url:, detail: nil) # @param url [String] # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 3b2f4eab..9ba548e3 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -5,28 +5,20 @@ module Models module Beta module Threads class ImageURLDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURLDelta::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] - # attr_writer :detail - - # @!attribute [r] url + # @!attribute url # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, # webp. # # @return [String, nil] optional :url, String - # @!parse - # # @return [String] - # attr_writer :url - # @!method initialize(detail: nil, url: nil) # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] # @param url [String] diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index 4f7b9a82..8b140bfb 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -17,15 +17,11 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!attribute [r] image_url + # @!attribute image_url # # @return [OpenAI::Models::Beta::Threads::ImageURLDelta, nil] optional :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDelta } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::ImageURLDelta] - # attr_writer :image_url - # @!method initialize(index:, image_url: nil, type: :image_url) # References an image URL in the content of a message. # diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 90cf7aba..ccff15ee 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -124,27 +124,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @param object [Symbol, :"thread.message"] class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Message::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 081a69c8..711686f8 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#create class MessageCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute content @@ -87,27 +86,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 3625e9b1..2e3c77d2 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#delete class MessageDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 9458a9bc..04f0a1fd 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -5,27 +5,19 @@ module Models module Beta module Threads class MessageDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] content + # @!attribute content # The content of the message in array of text and/or images. # # @return [Array, nil] optional :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentDelta] } - # @!parse - # # @return [Array] - # attr_writer :content - - # @!attribute [r] role + # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role, nil] optional :role, enum: -> { OpenAI::Models::Beta::Threads::MessageDelta::Role } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] - # attr_writer :role - # @!method initialize(content: nil, role: nil) # The delta containing the fields that have changed on the Message. # diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 1fffd076..2dbe8d80 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -6,11 +6,10 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#list class MessageListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -19,11 +18,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -32,42 +27,26 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::MessageListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] - # attr_writer :order - - # @!attribute [r] run_id + # @!attribute run_id # Filter messages by the run ID that generated them. # # @return [String, nil] optional :run_id, String - # @!parse - # # @return [String] - # attr_writer :run_id - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index 10e58171..4b724f65 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#retrieve class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index 568cc684..09909fdc 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#update class MessageUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/refusal_delta_block.rb b/lib/openai/models/beta/threads/refusal_delta_block.rb index dcc1bd21..9f1cc3a1 100644 --- a/lib/openai/models/beta/threads/refusal_delta_block.rb +++ b/lib/openai/models/beta/threads/refusal_delta_block.rb @@ -17,15 +17,11 @@ class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!attribute [r] refusal + # @!attribute refusal # # @return [String, nil] optional :refusal, String - # @!parse - # # @return [String] - # attr_writer :refusal - # @!method initialize(index:, refusal: nil, type: :refusal) # The refusal content that is part of a message. # diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index fb23679b..7638e17a 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -253,17 +253,13 @@ class Run < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] reason + # @!attribute reason # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] - # attr_writer :reason - # @!method initialize(reason: nil) # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index 6067a1a4..13baf1ce 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#cancel class RunCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 8ae80de8..6af10c28 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -8,8 +8,7 @@ module Threads # # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw class RunCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute assistant_id @@ -20,7 +19,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [String] required :assistant_id, String - # @!attribute [r] include + # @!attribute include # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` # to fetch the file search result content. @@ -33,10 +32,6 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - # @!attribute additional_instructions # Appends additional instructions at the end of the instructions for the run. This # is useful for modifying the behavior on a per-run basis without overriding other @@ -101,7 +96,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Models::Beta::Threads::RunCreateParams::Model }, nil?: true - # @!attribute [r] parallel_tool_calls + # @!attribute parallel_tool_calls # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. @@ -109,10 +104,6 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :parallel_tool_calls - # @!attribute reasoning_effort # **o-series models only** # @@ -299,27 +290,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index dd2e424a..85e39197 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -6,11 +6,10 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#list class RunListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -19,11 +18,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -32,32 +27,20 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::RunListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index 464d303b..307672fd 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#retrieve class RunRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 16181aa3..34faf0fa 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -8,8 +8,7 @@ module Threads # # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id @@ -30,27 +29,19 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolOutput < OpenAI::Internal::Type::BaseModel - # @!attribute [r] output + # @!attribute output # The output of the tool call to be submitted to continue the run. # # @return [String, nil] optional :output, String - # @!parse - # # @return [String] - # attr_writer :output - - # @!attribute [r] tool_call_id + # @!attribute tool_call_id # The ID of the tool call in the `required_action` object within the run object # the output is being submitted for. # # @return [String, nil] optional :tool_call_id, String - # @!parse - # # @return [String] - # attr_writer :tool_call_id - # @!method initialize(output: nil, tool_call_id: nil) # @param output [String] # @param tool_call_id [String] diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index ddfe60d0..2d418080 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#update class RunUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb index e1c7ac7f..ad3f6c66 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb @@ -18,16 +18,12 @@ class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # @return [Symbol, :logs] required :type, const: :logs - # @!attribute [r] logs + # @!attribute logs # The text output from the Code Interpreter tool call. # # @return [String, nil] optional :logs, String - # @!parse - # # @return [String] - # attr_writer :logs - # @!method initialize(index:, logs: nil, type: :logs) # Text output from the Code Interpreter tool call as part of a run step. # diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index d46e7d33..7e0c8ae1 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -18,15 +18,11 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image] required :type, const: :image - # @!attribute [r] image + # @!attribute image # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] optional :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] - # attr_writer :image - # @!method initialize(index:, image: nil, type: :image) # @param index [Integer] # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] @@ -34,17 +30,13 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - # @!method initialize(file_id: nil) # @param file_id [String] end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 3b041752..84e66baf 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -19,27 +19,19 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter - # @!attribute [r] id + # @!attribute id # The ID of the tool call. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # The Code Interpreter tool call definition. # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] - # attr_writer :code_interpreter - # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) # Details of the Code Interpreter tool call the run step was involved in. # @@ -50,17 +42,13 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] input + # @!attribute input # The input to the Code Interpreter tool call. # # @return [String, nil] optional :input, String - # @!parse - # # @return [String] - # attr_writer :input - - # @!attribute [r] outputs + # @!attribute outputs # The outputs from the Code Interpreter tool call. Code Interpreter can output one # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. @@ -69,10 +57,6 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } - # @!parse - # # @return [Array] - # attr_writer :outputs - # @!method initialize(input: nil, outputs: nil) # The Code Interpreter tool call definition. # diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 9ab8ead6..f42b150c 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -32,28 +32,20 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] ranking_options + # @!attribute ranking_options # The ranking options for the file search. # # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] - # attr_writer :ranking_options - - # @!attribute [r] results + # @!attribute results # The results of the file search. # # @return [Array, nil] optional :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } - # @!parse - # # @return [Array] - # attr_writer :results - # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. # @@ -118,7 +110,7 @@ class Result < OpenAI::Internal::Type::BaseModel # @return [Float] required :score, Float - # @!attribute [r] content + # @!attribute content # The content of the result that was found. The content is only included if # requested via the include query parameter. # @@ -126,10 +118,6 @@ class Result < OpenAI::Internal::Type::BaseModel optional :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } - # @!parse - # # @return [Array] - # attr_writer :content - # @!method initialize(file_id:, file_name:, score:, content: nil) # A result instance of the file search. # @@ -139,27 +127,19 @@ class Result < OpenAI::Internal::Type::BaseModel # @param content [Array] class Content < OpenAI::Internal::Type::BaseModel - # @!attribute [r] text + # @!attribute text # The text content of the file. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - - # @!attribute [r] type + # @!attribute type # The type of the content. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] optional :type, enum: -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] - # attr_writer :type - # @!method initialize(text: nil, type: nil) # @param text [String] # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index c4e4e5e3..31b50baf 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -25,16 +25,12 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!attribute [r] id + # @!attribute id # The ID of the tool call object. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - # @!method initialize(file_search:, index:, id: nil, type: :file_search) # @param file_search [Object] # @param index [Integer] diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index fe7116d5..c9af620f 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -19,26 +19,18 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!attribute [r] id + # @!attribute id # The ID of the tool call object. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] function + # @!attribute function # The definition of the function that was called. # # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] optional :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] - # attr_writer :function - # @!method initialize(index:, id: nil, function: nil, type: :function) # @param index [Integer] # @param id [String] @@ -47,26 +39,18 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel - # @!attribute [r] arguments + # @!attribute arguments # The arguments passed to the function. # # @return [String, nil] optional :arguments, String - # @!parse - # # @return [String] - # attr_writer :arguments - - # @!attribute [r] name + # @!attribute name # The name of the function. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!attribute output # The output of the function. This will be `null` if the outputs have not been # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 10ae040b..ec46591a 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -6,16 +6,12 @@ module Beta module Threads module Runs class RunStepDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] step_details + # @!attribute step_details # The details of the run step. # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject, nil] optional :step_details, union: -> { OpenAI::Models::Beta::Threads::Runs::RunStepDelta::StepDetails } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] - # attr_writer :step_details - # @!method initialize(step_details: nil) # The delta containing the fields that have changed on the run step. # diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index c57bba4e..4335b875 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -12,16 +12,12 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :message_creation] required :type, const: :message_creation - # @!attribute [r] message_creation + # @!attribute message_creation # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] optional :message_creation, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] - # attr_writer :message_creation - # @!method initialize(message_creation: nil, type: :message_creation) # Details of the message creation by the run step. # @@ -30,16 +26,12 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel - # @!attribute [r] message_id + # @!attribute message_id # The ID of the message that was created by this run step. # # @return [String, nil] optional :message_id, String - # @!parse - # # @return [String] - # attr_writer :message_id - # @!method initialize(message_id: nil) # @param message_id [String] end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 4f484a34..75b02311 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -7,8 +7,7 @@ module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#list class StepListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id @@ -16,7 +15,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -25,11 +24,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -38,11 +33,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] include + # @!attribute include # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` # to fetch the file search result content. @@ -55,32 +46,20 @@ class StepListParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::Runs::StepListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] - # attr_writer :order - # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # @param thread_id [String] # @param after [String] diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 3e6934b7..00db2d8a 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -7,8 +7,7 @@ module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve class StepRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id @@ -21,7 +20,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :run_id, String - # @!attribute [r] include + # @!attribute include # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` # to fetch the file search result content. @@ -34,10 +33,6 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - # @!method initialize(thread_id:, run_id:, include: nil, request_options: {}) # @param thread_id [String] # @param run_id [String] diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index ca02ab4c..417a924a 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -12,7 +12,7 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :tool_calls] required :type, const: :tool_calls - # @!attribute [r] tool_calls + # @!attribute tool_calls # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. @@ -21,10 +21,6 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCallDelta] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(tool_calls: nil, type: :tool_calls) # Details of the tool call. # diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index cbac0c09..2d767df4 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -5,26 +5,18 @@ module Models module Beta module Threads class TextDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] annotations + # @!attribute annotations # # @return [Array, nil] optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::AnnotationDelta] } - # @!parse - # # @return [Array] - # attr_writer :annotations - - # @!attribute [r] value + # @!attribute value # The data that makes up the text. # # @return [String, nil] optional :value, String - # @!parse - # # @return [String] - # attr_writer :value - # @!method initialize(annotations: nil, value: nil) # @param annotations [Array] # @param value [String] diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 12701fae..7191d790 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -17,15 +17,11 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!attribute [r] text + # @!attribute text # # @return [OpenAI::Models::Beta::Threads::TextDelta, nil] optional :text, -> { OpenAI::Models::Beta::Threads::TextDelta } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::TextDelta] - # attr_writer :text - # @!method initialize(index:, text: nil, type: :text) # The text content that is part of a message. # diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 8e808b6e..2e58ff3d 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -60,7 +60,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletion::ServiceTier }, nil?: true - # @!attribute [r] system_fingerprint + # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when @@ -69,20 +69,12 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :system_fingerprint, String - # @!parse - # # @return [String] - # attr_writer :system_fingerprint - - # @!attribute [r] usage + # @!attribute usage # Usage statistics for the completion request. # # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage } - # @!parse - # # @return [OpenAI::Models::CompletionUsage] - # attr_writer :usage - # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") # Represents a chat completion response returned by model, based on the provided # input. diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index bf6a5b2e..8cd2c5c2 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -35,34 +35,26 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall }, nil?: true - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!attribute refusal # The refusal message by the assistant. # # @return [String, nil] optional :refusal, String, nil?: true - # @!attribute [r] tool_calls + # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) # Messages sent by the model in response to user messages. # diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 61a57392..77585995 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -60,7 +60,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier }, nil?: true - # @!attribute [r] system_fingerprint + # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # Can be used in conjunction with the `seed` request parameter to understand when # backend changes have been made that might impact determinism. @@ -68,10 +68,6 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :system_fingerprint, String - # @!parse - # # @return [String] - # attr_writer :system_fingerprint - # @!attribute usage # An optional field that will only be present when you set # `stream_options: {"include_usage": true}` in your request. When present, it @@ -144,43 +140,31 @@ class Delta < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :content, String, nil?: true - # @!attribute [r] function_call + # @!attribute function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } - # @!parse - # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] - # attr_writer :function_call - # @!attribute refusal # The refusal message generated by the model. # # @return [String, nil] optional :refusal, String, nil?: true - # @!attribute [r] role + # @!attribute role # The role of the author of this message. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] optional :role, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] - # attr_writer :role - - # @!attribute [r] tool_calls + # @!attribute tool_calls # # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) # A chat completion delta generated by streamed model responses. # @@ -194,7 +178,7 @@ class Delta < OpenAI::Internal::Type::BaseModel # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel - # @!attribute [r] arguments + # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -203,20 +187,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :arguments, String - # @!parse - # # @return [String] - # attr_writer :arguments - - # @!attribute [r] name + # @!attribute name # The name of the function to call. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(arguments: nil, name: nil) # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. @@ -247,35 +223,23 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @return [Integer] required :index, Integer - # @!attribute [r] id + # @!attribute id # The ID of the tool call. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] function + # @!attribute function # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] optional :function, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function } - # @!parse - # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] - # attr_writer :function - - # @!attribute [r] type + # @!attribute type # The type of the tool. Currently, only `function` is supported. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] optional :type, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] - # attr_writer :type - # @!method initialize(index:, id: nil, function: nil, type: nil) # @param index [Integer] # @param id [String] @@ -284,7 +248,7 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel - # @!attribute [r] arguments + # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -293,20 +257,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :arguments, String - # @!parse - # # @return [String] - # attr_writer :arguments - - # @!attribute [r] name + # @!attribute name # The name of the function to call. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(arguments: nil, name: nil) # @param arguments [String] # @param name [String] diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index af75a6c6..ecfb1d58 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -43,37 +43,25 @@ class File < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_data + # @!attribute file_data # The base64 encoded file data, used when passing the file to the model as a # string. # # @return [String, nil] optional :file_data, String - # @!parse - # # @return [String] - # attr_writer :file_data - - # @!attribute [r] file_id + # @!attribute file_id # The ID of an uploaded file to use as input. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] filename + # @!attribute filename # The name of the file, used when passing the file to the model as a string. # # @return [String, nil] optional :filename, String - # @!parse - # # @return [String] - # attr_writer :filename - # @!method initialize(file_data: nil, file_id: nil, filename: nil) # @param file_data [String] # @param file_id [String] diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 023fa1d0..06c450d2 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -29,17 +29,13 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] - # attr_writer :detail - # @!method initialize(url:, detail: nil) # @param url [String] # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 1eb265ea..2c7e20e4 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -16,17 +16,13 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :developer] required :role, const: :developer - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(content:, name: nil, role: :developer) # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 63485db1..3d874f33 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -22,7 +22,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :assistant] required :role, const: :assistant - # @!attribute [r] annotations + # @!attribute annotations # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # @@ -30,10 +30,6 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] } - # @!parse - # # @return [Array] - # attr_writer :annotations - # @!attribute audio # If the audio output modality is requested, this object contains data about the # audio response from the model. @@ -42,28 +38,20 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionAudio, nil] optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudio }, nil?: true - # @!attribute [r] function_call + # @!attribute function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, nil] optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall } - # @!parse - # # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] - # attr_writer :function_call - - # @!attribute [r] tool_calls + # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) # A chat completion message generated by the model. # diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index 04006615..4bc8cef1 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] include_usage + # @!attribute include_usage # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire # request, and the `choices` field will always be an empty array. @@ -16,10 +16,6 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :include_usage, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :include_usage - # @!method initialize(include_usage: nil) # Options for streaming response. Only set this when you set `stream: true`. # diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index bb2c91b6..19d5d598 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -16,17 +16,13 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :system] required :role, const: :system - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(content:, name: nil, role: :system) # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 34b81339..7f72cdc3 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -16,17 +16,13 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :user] required :role, const: :user - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(content:, name: nil, role: :user) # Messages sent by an end user, containing prompts or additional context # information. diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index fff48ea0..999209e1 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -7,8 +7,7 @@ module Chat # # @see OpenAI::Resources::Chat::Completions#stream_raw class CompletionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute messages @@ -49,7 +48,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :frequency_penalty, Float, nil?: true - # @!attribute [r] function_call + # @!attribute function_call # Deprecated in favor of `tool_choice`. # # Controls which (if any) function is called by the model. @@ -68,11 +67,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, nil] optional :function_call, union: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # attr_writer :function_call - - # @!attribute [r] functions + # @!attribute functions # Deprecated in favor of `tools`. # # A list of functions the model may generate JSON inputs for. @@ -81,10 +76,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :functions, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::CompletionCreateParams::Function] } - # @!parse - # # @return [Array] - # attr_writer :functions - # @!attribute logit_bias # Modify the likelihood of specified tokens appearing in the completion. # @@ -162,7 +153,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true - # @!attribute [r] parallel_tool_calls + # @!attribute parallel_tool_calls # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. @@ -170,10 +161,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :parallel_tool_calls - # @!attribute prediction # Static predicted output content, such as the content of a text file that is # being regenerated. @@ -200,7 +187,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true - # @!attribute [r] response_format + # @!attribute response_format # An object specifying the format that the model must output. # # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured @@ -215,10 +202,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, union: -> { OpenAI::Models::Chat::CompletionCreateParams::ResponseFormat } - # @!parse - # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # attr_writer :response_format - # @!attribute seed # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and @@ -283,7 +266,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute [r] tool_choice + # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -297,11 +280,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # attr_writer :tool_choice - - # @!attribute [r] tools + # @!attribute tools # A list of tools the model may call. Currently, only functions are supported as a # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. @@ -309,10 +288,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -331,7 +306,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -339,11 +314,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - - # @!attribute [r] web_search_options + # @!attribute web_search_options # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). @@ -351,10 +322,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions } - # @!parse - # # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] - # attr_writer :web_search_options - # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # @param messages [Array] # @param model [String, Symbol, OpenAI::Models::ChatModel] @@ -459,18 +426,14 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!attribute [r] description + # @!attribute description # A description of what the function does, used by the model to choose when and # how to call the function. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - - # @!attribute [r] parameters + # @!attribute parameters # The parameters the functions accepts, described as a JSON Schema object. See the # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, # and the @@ -482,10 +445,6 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :parameters - # @!method initialize(name:, description: nil, parameters: nil) # @param name [String] # @param description [String] @@ -578,7 +537,7 @@ module Stop end class WebSearchOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] search_context_size + # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # @@ -586,10 +545,6 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel optional :search_context_size, enum: -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] - # attr_writer :search_context_size - # @!attribute user_location # Approximate location parameters for the search. # @@ -644,48 +599,32 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel - # @!attribute [r] city + # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. # # @return [String, nil] optional :city, String - # @!parse - # # @return [String] - # attr_writer :city - - # @!attribute [r] country + # @!attribute country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. # # @return [String, nil] optional :country, String - # @!parse - # # @return [String] - # attr_writer :country - - # @!attribute [r] region + # @!attribute region # Free text input for the region of the user, e.g. `California`. # # @return [String, nil] optional :region, String - # @!parse - # # @return [String] - # attr_writer :region - - # @!attribute [r] timezone + # @!attribute timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. # # @return [String, nil] optional :timezone, String - # @!parse - # # @return [String] - # attr_writer :timezone - # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) # Approximate location parameters for the search. # diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index c32fe53d..819d9af4 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -5,8 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#delete class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 911b8a36..ab7f9060 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -5,30 +5,21 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#list class CompletionListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last chat completion from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of Chat Completions to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!attribute metadata # A list of metadata keys to filter the Chat Completions by. Example: # @@ -37,27 +28,19 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] model + # @!attribute model # The model used to generate the Chat Completions. # # @return [String, nil] optional :model, String - # @!parse - # # @return [String] - # attr_writer :model - - # @!attribute [r] order + # @!attribute order # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Chat::CompletionListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index ccdba91c..50730021 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#retrieve class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 954a4400..b3a23cff 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -5,8 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#update class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute metadata diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 02329b59..19a29905 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -6,41 +6,28 @@ module Chat module Completions # @see OpenAI::Resources::Chat::Completions::Messages#list class MessageListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last message from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of messages to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Chat::Completions::MessageListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index e31c3bbf..4980830d 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -36,7 +36,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text_completion] required :object, const: :text_completion - # @!attribute [r] system_fingerprint + # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when @@ -45,20 +45,12 @@ class Completion < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :system_fingerprint, String - # @!parse - # # @return [String] - # attr_writer :system_fingerprint - - # @!attribute [r] usage + # @!attribute usage # Usage statistics for the completion request. # # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage } - # @!parse - # # @return [OpenAI::Models::CompletionUsage] - # attr_writer :usage - # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 096074a0..3fcd5752 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -52,42 +52,26 @@ module FinishReason # @see OpenAI::Models::CompletionChoice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel - # @!attribute [r] text_offset + # @!attribute text_offset # # @return [Array, nil] optional :text_offset, OpenAI::Internal::Type::ArrayOf[Integer] - # @!parse - # # @return [Array] - # attr_writer :text_offset - - # @!attribute [r] token_logprobs + # @!attribute token_logprobs # # @return [Array, nil] optional :token_logprobs, OpenAI::Internal::Type::ArrayOf[Float] - # @!parse - # # @return [Array] - # attr_writer :token_logprobs - - # @!attribute [r] tokens + # @!attribute tokens # # @return [Array, nil] optional :tokens, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :tokens - - # @!attribute [r] top_logprobs + # @!attribute top_logprobs # # @return [ArrayFloat}>, nil] optional :top_logprobs, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[Float]] - # @!parse - # # @return [ArrayFloat}>] - # attr_writer :top_logprobs - # @!method initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil) # @param text_offset [Array] # @param token_logprobs [Array] diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index c1e3656e..7084d9f5 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -6,8 +6,7 @@ module Models # # @see OpenAI::Resources::Completions#create_streaming class CompletionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute model @@ -175,7 +174,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -183,10 +182,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # @param prompt [String, Array, Array, Array>, nil] diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index d8e75136..e2b4092d 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -21,26 +21,18 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!attribute [r] completion_tokens_details + # @!attribute completion_tokens_details # Breakdown of tokens used in a completion. # # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails, nil] optional :completion_tokens_details, -> { OpenAI::Models::CompletionUsage::CompletionTokensDetails } - # @!parse - # # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails] - # attr_writer :completion_tokens_details - - # @!attribute [r] prompt_tokens_details + # @!attribute prompt_tokens_details # Breakdown of tokens used in the prompt. # # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails, nil] optional :prompt_tokens_details, -> { OpenAI::Models::CompletionUsage::PromptTokensDetails } - # @!parse - # # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails] - # attr_writer :prompt_tokens_details - # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) # Usage statistics for the completion request. # @@ -52,38 +44,26 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] accepted_prediction_tokens + # @!attribute accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. # # @return [Integer, nil] optional :accepted_prediction_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :accepted_prediction_tokens - - # @!attribute [r] audio_tokens + # @!attribute audio_tokens # Audio input tokens generated by the model. # # @return [Integer, nil] optional :audio_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :audio_tokens - - # @!attribute [r] reasoning_tokens + # @!attribute reasoning_tokens # Tokens generated by the model for reasoning. # # @return [Integer, nil] optional :reasoning_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :reasoning_tokens - - # @!attribute [r] rejected_prediction_tokens + # @!attribute rejected_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that did # not appear in the completion. However, like reasoning tokens, these tokens are # still counted in the total completion tokens for purposes of billing, output, @@ -92,10 +72,6 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :rejected_prediction_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :rejected_prediction_tokens - # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) # Breakdown of tokens used in a completion. # @@ -107,26 +83,18 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::CompletionUsage#prompt_tokens_details class PromptTokensDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] audio_tokens + # @!attribute audio_tokens # Audio input tokens present in the prompt. # # @return [Integer, nil] optional :audio_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :audio_tokens - - # @!attribute [r] cached_tokens + # @!attribute cached_tokens # Cached tokens present in the prompt. # # @return [Integer, nil] optional :cached_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :cached_tokens - # @!method initialize(audio_tokens: nil, cached_tokens: nil) # Breakdown of tokens used in the prompt. # diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 2586d07f..ec1dfd3d 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Embeddings#create class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -31,29 +30,21 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::EmbeddingModel] required :model, union: -> { OpenAI::Models::EmbeddingCreateParams::Model } - # @!attribute [r] dimensions + # @!attribute dimensions # The number of dimensions the resulting output embeddings should have. Only # supported in `text-embedding-3` and later models. # # @return [Integer, nil] optional :dimensions, Integer - # @!parse - # # @return [Integer] - # attr_writer :dimensions - - # @!attribute [r] encoding_format + # @!attribute encoding_format # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). # # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat, nil] optional :encoding_format, enum: -> { OpenAI::Models::EmbeddingCreateParams::EncodingFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] - # attr_writer :encoding_format - - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -61,10 +52,6 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) # @param input [String, Array, Array, Array>] # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index d66fae28..54d31b12 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#create class EvalCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute data_source_config @@ -32,26 +31,18 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # The name of the evaluation. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - - # @!attribute [r] share_with_openai + # @!attribute share_with_openai # Indicates whether the evaluation is shared with OpenAI. # # @return [Boolean, nil] optional :share_with_openai, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :share_with_openai - # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] # @param testing_criteria [Array] @@ -89,16 +80,12 @@ class Custom < OpenAI::Internal::Type::BaseModel # @return [Symbol, :custom] required :type, const: :custom - # @!attribute [r] include_sample_schema + # @!attribute include_sample_schema # Whether to include the sample schema in the data source. # # @return [Boolean, nil] optional :include_sample_schema, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :include_sample_schema - # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) # A CustomDataSourceConfig object that defines the schema for the data source used # for the evaluation runs. This schema is used to define the shape of the data diff --git a/lib/openai/models/eval_delete_params.rb b/lib/openai/models/eval_delete_params.rb index 80e4d81d..7e1938d2 100644 --- a/lib/openai/models/eval_delete_params.rb +++ b/lib/openai/models/eval_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#delete class EvalDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index e0d2fd84..3888ef6c 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -4,52 +4,35 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#list class EvalListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last eval from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of evals to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. # # @return [Symbol, OpenAI::Models::EvalListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::EvalListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::EvalListParams::Order] - # attr_writer :order - - # @!attribute [r] order_by + # @!attribute order_by # Evals can be ordered by creation time or last updated time. Use `created_at` for # creation time or `updated_at` for last updated time. # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy, nil] optional :order_by, enum: -> { OpenAI::Models::EvalListParams::OrderBy } - # @!parse - # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy] - # attr_writer :order_by - # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/eval_retrieve_params.rb b/lib/openai/models/eval_retrieve_params.rb index e06ffbe4..2dcaa7bb 100644 --- a/lib/openai/models/eval_retrieve_params.rb +++ b/lib/openai/models/eval_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#retrieve class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 9ff351b4..8d037316 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -34,16 +34,12 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text_similarity] required :type, const: :text_similarity - # @!attribute [r] name + # @!attribute name # The name of the grader. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) # A TextSimilarityGrader object which grades text based on similarity metrics. # diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb index 8572bf39..c8476dcf 100644 --- a/lib/openai/models/eval_update_params.rb +++ b/lib/openai/models/eval_update_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#update class EvalUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute metadata @@ -19,16 +18,12 @@ class EvalUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # Rename the evaluation. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(metadata: nil, name: nil, request_options: {}) # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 56b9b732..ac306948 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -28,16 +28,12 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type } - # @!attribute [r] sampling_params + # @!attribute sampling_params # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] optional :sampling_params, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } - # @!parse - # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # attr_writer :sampling_params - # @!method initialize(input_messages:, model:, source:, type:, sampling_params: nil) # A CompletionsRunDataSource object describing a model sampling configuration. # @@ -341,15 +337,11 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}] required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!attribute [r] sample + # @!attribute sample # # @return [Hash{Symbol=>Object}, nil] optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :sample - # @!method initialize(item:, sample: nil) # @param item [Hash{Symbol=>Object}] # @param sample [Hash{Symbol=>Object}] @@ -445,46 +437,30 @@ module Type # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute [r] max_completion_tokens + # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. # # @return [Integer, nil] optional :max_completion_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_completion_tokens - - # @!attribute [r] seed + # @!attribute seed # A seed value to initialize the randomness, during sampling. # # @return [Integer, nil] optional :seed, Integer - # @!parse - # # @return [Integer] - # attr_writer :seed - - # @!attribute [r] temperature + # @!attribute temperature # A higher temperature increases randomness in the outputs. # # @return [Float, nil] optional :temperature, Float - # @!parse - # # @return [Float] - # attr_writer :temperature - - # @!attribute [r] top_p + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!parse - # # @return [Float] - # attr_writer :top_p - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) # @param max_completion_tokens [Integer] # @param seed [Integer] diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 3b06b922..2ddad8b0 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -56,15 +56,11 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}] required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!attribute [r] sample + # @!attribute sample # # @return [Hash{Symbol=>Object}, nil] optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :sample - # @!method initialize(item:, sample: nil) # @param item [Hash{Symbol=>Object}] # @param sample [Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/run_cancel_params.rb b/lib/openai/models/evals/run_cancel_params.rb index 958dad3b..6f84e423 100644 --- a/lib/openai/models/evals/run_cancel_params.rb +++ b/lib/openai/models/evals/run_cancel_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#cancel class RunCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 3e03a25c..61365782 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#create class RunCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute data_source @@ -26,16 +25,12 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # The name of the run. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(data_source:, metadata: nil, name: nil, request_options: {}) # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] # @param metadata [Hash{Symbol=>String}, nil] diff --git a/lib/openai/models/evals/run_delete_params.rb b/lib/openai/models/evals/run_delete_params.rb index 887478b6..0f8cc006 100644 --- a/lib/openai/models/evals/run_delete_params.rb +++ b/lib/openai/models/evals/run_delete_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#delete class RunDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/run_delete_response.rb b/lib/openai/models/evals/run_delete_response.rb index bd1154dd..eb8707ad 100644 --- a/lib/openai/models/evals/run_delete_response.rb +++ b/lib/openai/models/evals/run_delete_response.rb @@ -5,33 +5,21 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#delete class RunDeleteResponse < OpenAI::Internal::Type::BaseModel - # @!attribute [r] deleted + # @!attribute deleted # # @return [Boolean, nil] optional :deleted, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :deleted - - # @!attribute [r] object + # @!attribute object # # @return [String, nil] optional :object, String - # @!parse - # # @return [String] - # attr_writer :object - - # @!attribute [r] run_id + # @!attribute run_id # # @return [String, nil] optional :run_id, String - # @!parse - # # @return [String] - # attr_writer :run_id - # @!method initialize(deleted: nil, object: nil, run_id: nil) # @param deleted [Boolean] # @param object [String] diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index b5c500f7..a2b2afb4 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -5,52 +5,35 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#list class RunListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last run from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of runs to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Evals::RunListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order] - # attr_writer :order - - # @!attribute [r] status + # @!attribute status # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | # "canceled". # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] optional :status, enum: -> { OpenAI::Models::Evals::RunListParams::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status] - # attr_writer :status - # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/evals/run_retrieve_params.rb b/lib/openai/models/evals/run_retrieve_params.rb index 648fa819..0ca8c695 100644 --- a/lib/openai/models/evals/run_retrieve_params.rb +++ b/lib/openai/models/evals/run_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#retrieve class RunRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index dc3ba2dd..301d2acd 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -6,8 +6,7 @@ module Evals module Runs # @see OpenAI::Resources::Evals::Runs::OutputItems#list class OutputItemListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id @@ -15,48 +14,32 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!attribute [r] after + # @!attribute after # Identifier for the last output item from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of output items to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] - # attr_writer :order - - # @!attribute [r] status + # @!attribute status # Filter output items by status. Use `failed` to filter by failed output items or # `pass` to filter by passed output items. # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status, nil] optional :status, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] - # attr_writer :status - # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) # @param eval_id [String] # @param after [String] diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index fe4db7ad..fc0d6e75 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -180,26 +180,18 @@ class Input < OpenAI::Internal::Type::BaseModel end class Output < OpenAI::Internal::Type::BaseModel - # @!attribute [r] content + # @!attribute content # The content of the message. # # @return [String, nil] optional :content, String - # @!parse - # # @return [String] - # attr_writer :content - - # @!attribute [r] role + # @!attribute role # The role of the message (e.g. "system", "assistant", "user"). # # @return [String, nil] optional :role, String - # @!parse - # # @return [String] - # attr_writer :role - # @!method initialize(content: nil, role: nil) # @param content [String] # @param role [String] diff --git a/lib/openai/models/evals/runs/output_item_retrieve_params.rb b/lib/openai/models/evals/runs/output_item_retrieve_params.rb index 599a0b19..d85fa9e5 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_params.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_params.rb @@ -6,8 +6,7 @@ module Evals module Runs # @see OpenAI::Resources::Evals::Runs::OutputItems#retrieve class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index bf311b56..eec2ea1a 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -180,26 +180,18 @@ class Input < OpenAI::Internal::Type::BaseModel end class Output < OpenAI::Internal::Type::BaseModel - # @!attribute [r] content + # @!attribute content # The content of the message. # # @return [String, nil] optional :content, String - # @!parse - # # @return [String] - # attr_writer :content - - # @!attribute [r] role + # @!attribute role # The role of the message (e.g. "system", "assistant", "user"). # # @return [String, nil] optional :role, String - # @!parse - # # @return [String] - # attr_writer :role - # @!method initialize(content: nil, role: nil) # @param content [String] # @param role [String] diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index 5557fffd..22d607fb 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#content class FileContentParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 567ed995..28d5f936 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#create class FileCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index 177b99cb..3893e91d 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#delete class FileDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 5f1be612..76de1996 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -4,11 +4,10 @@ module OpenAI module Models # @see OpenAI::Resources::Files#list class FileListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -17,42 +16,26 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 10,000, and the default is 10,000. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::FileListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::FileListParams::Order] - # attr_writer :order - - # @!attribute [r] purpose + # @!attribute purpose # Only return files with the given purpose. # # @return [String, nil] optional :purpose, String - # @!parse - # # @return [String] - # attr_writer :purpose - # @!method initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index fd9c63bb..5031a63d 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -49,27 +49,19 @@ class FileObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FileObject::Status] required :status, enum: -> { OpenAI::Models::FileObject::Status } - # @!attribute [r] expires_at + # @!attribute expires_at # The Unix timestamp (in seconds) for when the file will expire. # # @return [Integer, nil] optional :expires_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :expires_at - - # @!attribute [r] status_details + # @!attribute status_details # Deprecated. For details on why a fine-tuning training file failed validation, # see the `error` field on `fine_tuning.job`. # # @return [String, nil] optional :status_details, String - # @!parse - # # @return [String] - # attr_writer :status_details - # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) # The `File` object represents a document that has been uploaded to OpenAI. # diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index 6c8c1a70..29c0bdc3 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#retrieve class FileRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb index ef958285..8a333418 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb @@ -6,8 +6,7 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#create class PermissionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute project_ids diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb index 7281cf70..339cc7ea 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb @@ -6,8 +6,7 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#delete class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index 29fffbe9..6c272af8 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -6,50 +6,33 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#retrieve class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last permission ID from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of permissions to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # The order in which to retrieve permissions. # # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] optional :order, enum: -> { OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] - # attr_writer :order - - # @!attribute [r] project_id + # @!attribute project_id # The ID of the project to get permissions for. # # @return [String, nil] optional :project_id, String - # @!parse - # # @return [String] - # attr_writer :project_id - # @!method initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 27a10624..a5e3d2a9 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -132,16 +132,12 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] method_ + # @!attribute method_ # The method used for fine-tuning. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method, nil] optional :method_, -> { OpenAI::Models::FineTuning::FineTuningJob::Method }, api_name: :method - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method] - # attr_writer :method_ - # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. @@ -198,18 +194,14 @@ class Error < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -217,21 +209,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. @@ -306,36 +290,24 @@ module Status # @see OpenAI::Models::FineTuning::FineTuningJob#method_ class Method < OpenAI::Internal::Type::BaseModel - # @!attribute [r] dpo + # @!attribute dpo # Configuration for the DPO fine-tuning method. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, nil] optional :dpo, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] - # attr_writer :dpo - - # @!attribute [r] supervised + # @!attribute supervised # Configuration for the supervised fine-tuning method. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, nil] optional :supervised, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] - # attr_writer :supervised - - # @!attribute [r] type + # @!attribute type # The type of method. Is either `supervised` or `dpo`. # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type, nil] optional :type, enum: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] - # attr_writer :type - # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # @@ -345,16 +317,12 @@ class Method < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # @@ -362,7 +330,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -370,11 +338,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] beta + # @!attribute beta # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # @@ -382,11 +346,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :beta, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :beta - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -394,11 +354,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -406,10 +362,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # @@ -482,17 +434,13 @@ module NEpochs # @see OpenAI::Models::FineTuning::FineTuningJob::Method#supervised class Supervised < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # @@ -500,7 +448,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -508,11 +456,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -520,11 +464,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -532,10 +472,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index f647416e..b5fb144c 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -35,26 +35,18 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"fine_tuning.job.event"] required :object, const: :"fine_tuning.job.event" - # @!attribute [r] data + # @!attribute data # The data associated with the event. # # @return [Object, nil] optional :data, OpenAI::Internal::Type::Unknown - # @!parse - # # @return [Object] - # attr_writer :data - - # @!attribute [r] type + # @!attribute type # The type of event. # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type, nil] optional :type, enum: -> { OpenAI::Models::FineTuning::FineTuningJobEvent::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] - # attr_writer :type - # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") # Fine-tuning job event object # diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index b1a0ba4f..98c9eaca 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -25,7 +25,7 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String, nil?: true - # @!attribute [r] tags + # @!attribute tags # A list of tags to be attached to the newly created run. These tags are passed # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". @@ -33,10 +33,6 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tags, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :tags - # @!method initialize(project:, entity: nil, name: nil, tags: nil) # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 129f8e75..4f4cf0b2 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -5,8 +5,7 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#cancel class JobCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index e27eb79c..ada446ed 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -5,8 +5,7 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#create class JobCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute model @@ -38,17 +37,13 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @return [String] required :training_file, String - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] - # attr_writer :hyperparameters - # @!attribute integrations # A list of integrations to enable for your fine-tuning job. # @@ -68,16 +63,12 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] method_ + # @!attribute method_ # The method used for fine-tuning. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method, nil] optional :method_, -> { OpenAI::Models::FineTuning::JobCreateParams::Method }, api_name: :method - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method] - # attr_writer :method_ - # @!attribute seed # The seed controls the reproducibility of the job. Passing in the same seed and # job parameters should produce the same results, but may differ in rare cases. If @@ -155,7 +146,7 @@ module Model # @deprecated class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -163,11 +154,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -175,21 +162,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. @@ -288,7 +267,7 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String, nil?: true - # @!attribute [r] tags + # @!attribute tags # A list of tags to be attached to the newly created run. These tags are passed # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". @@ -296,10 +275,6 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tags, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :tags - # @!method initialize(project:, entity: nil, name: nil, tags: nil) # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an @@ -314,36 +289,24 @@ class Wandb < OpenAI::Internal::Type::BaseModel end class Method < OpenAI::Internal::Type::BaseModel - # @!attribute [r] dpo + # @!attribute dpo # Configuration for the DPO fine-tuning method. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, nil] optional :dpo, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] - # attr_writer :dpo - - # @!attribute [r] supervised + # @!attribute supervised # Configuration for the supervised fine-tuning method. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, nil] optional :supervised, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] - # attr_writer :supervised - - # @!attribute [r] type + # @!attribute type # The type of method. Is either `supervised` or `dpo`. # # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type, nil] optional :type, enum: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] - # attr_writer :type - # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # @@ -353,17 +316,13 @@ class Method < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # @@ -371,7 +330,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -379,11 +338,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] beta + # @!attribute beta # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # @@ -391,11 +346,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :beta, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :beta - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -403,11 +354,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -415,10 +362,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # @@ -491,17 +434,13 @@ module NEpochs # @see OpenAI::Models::FineTuning::JobCreateParams::Method#supervised class Supervised < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # @@ -509,7 +448,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -517,11 +456,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -529,11 +464,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -541,10 +472,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index d4729ee6..b745c87b 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -5,30 +5,21 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list_events class JobListEventsParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last event from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of events to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!method initialize(after: nil, limit: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index e2ed96ce..ddd836ef 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -5,30 +5,21 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list class JobListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last job from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of fine-tuning jobs to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!attribute metadata # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. # Alternatively, set `metadata=null` to indicate no metadata. diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index b1579373..9d8a5b18 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#retrieve class JobRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index f4f1bea1..afa0afa4 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -6,30 +6,21 @@ module FineTuning module Jobs # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list class CheckpointListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last checkpoint ID from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of checkpoints to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!method initialize(after: nil, limit: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 98dbd856..05325975 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -62,69 +62,41 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel - # @!attribute [r] full_valid_loss + # @!attribute full_valid_loss # # @return [Float, nil] optional :full_valid_loss, Float - # @!parse - # # @return [Float] - # attr_writer :full_valid_loss - - # @!attribute [r] full_valid_mean_token_accuracy + # @!attribute full_valid_mean_token_accuracy # # @return [Float, nil] optional :full_valid_mean_token_accuracy, Float - # @!parse - # # @return [Float] - # attr_writer :full_valid_mean_token_accuracy - - # @!attribute [r] step + # @!attribute step # # @return [Float, nil] optional :step, Float - # @!parse - # # @return [Float] - # attr_writer :step - - # @!attribute [r] train_loss + # @!attribute train_loss # # @return [Float, nil] optional :train_loss, Float - # @!parse - # # @return [Float] - # attr_writer :train_loss - - # @!attribute [r] train_mean_token_accuracy + # @!attribute train_mean_token_accuracy # # @return [Float, nil] optional :train_mean_token_accuracy, Float - # @!parse - # # @return [Float] - # attr_writer :train_mean_token_accuracy - - # @!attribute [r] valid_loss + # @!attribute valid_loss # # @return [Float, nil] optional :valid_loss, Float - # @!parse - # # @return [Float] - # attr_writer :valid_loss - - # @!attribute [r] valid_mean_token_accuracy + # @!attribute valid_mean_token_accuracy # # @return [Float, nil] optional :valid_mean_token_accuracy, Float - # @!parse - # # @return [Float] - # attr_writer :valid_mean_token_accuracy - # @!method initialize(full_valid_loss: nil, full_valid_mean_token_accuracy: nil, step: nil, train_loss: nil, train_mean_token_accuracy: nil, valid_loss: nil, valid_mean_token_accuracy: nil) # Metrics at the step number during the fine-tuning job. # diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index a37c41bc..ac6820db 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -10,18 +10,14 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!attribute [r] description + # @!attribute description # A description of what the function does, used by the model to choose when and # how to call the function. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - - # @!attribute [r] parameters + # @!attribute parameters # The parameters the functions accepts, described as a JSON Schema object. See the # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, # and the @@ -33,10 +29,6 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :parameters - # @!attribute strict # Whether to enable strict schema adherence when generating the function call. If # set to true, the model will follow the exact schema defined in the `parameters` diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index 7d54b273..d76cb186 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -3,38 +3,26 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel - # @!attribute [r] b64_json + # @!attribute b64_json # The base64-encoded JSON of the generated image, if `response_format` is # `b64_json`. # # @return [String, nil] optional :b64_json, String - # @!parse - # # @return [String] - # attr_writer :b64_json - - # @!attribute [r] revised_prompt + # @!attribute revised_prompt # The prompt that was used to generate the image, if there was any revision to the # prompt. # # @return [String, nil] optional :revised_prompt, String - # @!parse - # # @return [String] - # attr_writer :revised_prompt - - # @!attribute [r] url + # @!attribute url # The URL of the generated image, if `response_format` is `url` (default). # # @return [String, nil] optional :url, String - # @!parse - # # @return [String] - # attr_writer :url - # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) # Represents the url or the content of an image generated by the OpenAI API. # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index d2e09d8f..763a9b9a 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#create_variation class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute image @@ -46,7 +45,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageCreateVariationParams::Size }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -54,10 +53,6 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # @param image [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 247e370b..19525b78 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#edit class ImageEditParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute image @@ -22,7 +21,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String] required :prompt, String - # @!attribute [r] mask + # @!attribute mask # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. @@ -30,10 +29,6 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Pathname, StringIO, nil] optional :mask, OpenAI::Internal::Type::IOLike - # @!parse - # # @return [Pathname, StringIO] - # attr_writer :mask - # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -62,7 +57,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageEditParams::Size }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -70,10 +65,6 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # @param image [Pathname, StringIO] # @param prompt [String] diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 43b701b6..9e5550f1 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#generate class ImageGenerateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute prompt @@ -28,7 +27,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true - # @!attribute [r] quality + # @!attribute quality # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. @@ -36,10 +35,6 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] optional :quality, enum: -> { OpenAI::Models::ImageGenerateParams::Quality } - # @!parse - # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality] - # attr_writer :quality - # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been @@ -65,7 +60,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] optional :style, enum: -> { OpenAI::Models::ImageGenerateParams::Style }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -73,10 +68,6 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # @param prompt [String] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index f288614b..758a1682 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#delete class ModelDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index 52c1d783..77d83f84 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#list class ModelListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index c2d43bc2..deec29e7 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#retrieve class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 6bc57485..85c8aa74 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Moderations#create class ModerationCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -15,7 +14,7 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Array, Array] required :input, union: -> { OpenAI::Models::ModerationCreateParams::Input } - # @!attribute [r] model + # @!attribute model # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models @@ -24,10 +23,6 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::ModerationModel, nil] optional :model, union: -> { OpenAI::Models::ModerationCreateParams::Model } - # @!parse - # # @return [String, Symbol, OpenAI::Models::ModerationModel] - # attr_writer :model - # @!method initialize(input:, model: nil, request_options: {}) # @param input [String, Array, Array] # @param model [String, Symbol, OpenAI::Models::ModerationModel] diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 0e9e47fb..c6b0fdf9 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -32,28 +32,20 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!attribute [r] description + # @!attribute description # A description of what the response format is for, used by the model to determine # how to respond in the format. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - - # @!attribute [r] schema + # @!attribute schema # The schema for the response format, described as a JSON Schema object. Learn how # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}, nil] optional :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :schema - # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 9814987c..3dc4202e 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -18,16 +18,12 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] required :role, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Role } - # @!attribute [r] type + # @!attribute type # The type of the message input. Always `message`. # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type, nil] optional :type, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] - # attr_writer :type - # @!method initialize(content:, role:, type: nil) # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 8065a25a..85d13196 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -16,37 +16,25 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @return [Array] required :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!attribute [r] filters + # @!attribute filters # A filter to apply based on file attributes. # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::Models::Responses::FileSearchTool::Filters } - # @!parse - # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # attr_writer :filters - - # @!attribute [r] max_num_results + # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 # inclusive. # # @return [Integer, nil] optional :max_num_results, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_num_results - - # @!attribute [r] ranking_options + # @!attribute ranking_options # Ranking options for search. # # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions } - # @!parse - # # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions] - # attr_writer :ranking_options - # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) # A tool that searches for relevant content from uploaded files. Learn more about # the @@ -76,17 +64,13 @@ module Filters # @see OpenAI::Models::Responses::FileSearchTool#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] ranker + # @!attribute ranker # The ranker to use for the file search. # # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] - # attr_writer :ranker - - # @!attribute [r] score_threshold + # @!attribute score_threshold # The score threshold for the file search, a number between 0 and 1. Numbers # closer to 1 will attempt to return only the most relevant results, but may # return fewer results. @@ -94,10 +78,6 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :score_threshold, Float - # @!parse - # # @return [Float] - # attr_writer :score_threshold - # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index eabfd424..d34e8dd7 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -5,31 +5,22 @@ module Models module Responses # @see OpenAI::Resources::Responses::InputItems#list class InputItemListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # An item ID to list items after, used in pagination. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # An item ID to list items before, used in pagination. # # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] include + # @!attribute include # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # @@ -37,22 +28,14 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } - # @!parse - # # @return [Array] - # attr_writer :include - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. @@ -61,10 +44,6 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Responses::InputItemListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 48793f7b..a51b6317 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -181,18 +181,14 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Responses::Response::ServiceTier }, nil?: true - # @!attribute [r] status + # @!attribute status # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. # # @return [Symbol, OpenAI::Models::Responses::ResponseStatus, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseStatus } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseStatus] - # attr_writer :status - - # @!attribute [r] text + # @!attribute text # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: # @@ -202,10 +198,6 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } - # @!parse - # # @return [OpenAI::Models::Responses::ResponseTextConfig] - # attr_writer :text - # @!attribute truncation # The truncation strategy to use for the model response. # @@ -218,18 +210,14 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] optional :truncation, enum: -> { OpenAI::Models::Responses::Response::Truncation }, nil?: true - # @!attribute [r] usage + # @!attribute usage # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. # # @return [OpenAI::Models::Responses::ResponseUsage, nil] optional :usage, -> { OpenAI::Models::Responses::ResponseUsage } - # @!parse - # # @return [OpenAI::Models::Responses::ResponseUsage] - # attr_writer :usage - - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -237,10 +225,6 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # @param id [String] # @param created_at [Float] @@ -268,16 +252,12 @@ class Response < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] reason + # @!attribute reason # The reason why the response is incomplete. # # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Models::Responses::Response::IncompleteDetails::Reason } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] - # attr_writer :reason - # @!method initialize(reason: nil) # Details about why the response is incomplete. # diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 2a10286a..9bfc14e2 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -28,7 +28,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :computer_call_output] required :type, const: :computer_call_output - # @!attribute [r] acknowledged_safety_checks + # @!attribute acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the # developer. # @@ -36,21 +36,13 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } - # @!parse - # # @return [Array] - # attr_writer :acknowledged_safety_checks - - # @!attribute [r] status + # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] - # attr_writer :status - # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # @param id [String] # @param call_id [String] diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index e65f4fc5..9dc1550c 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -11,26 +11,18 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # @return [Symbol, :computer_screenshot] required :type, const: :computer_screenshot - # @!attribute [r] file_id + # @!attribute file_id # The identifier of an uploaded file that contains the screenshot. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] image_url + # @!attribute image_url # The URL of the screenshot image. # # @return [String, nil] optional :image_url, String - # @!parse - # # @return [String] - # attr_writer :image_url - # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) # A computer screenshot image used with the computer use tool. # diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 3eeb57fc..f20f9cc0 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -7,8 +7,7 @@ module Responses # # @see OpenAI::Resources::Responses#stream_raw class ResponseCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -142,7 +141,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute [r] text + # @!attribute text # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: # @@ -152,11 +151,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } - # @!parse - # # @return [OpenAI::Models::Responses::ResponseTextConfig] - # attr_writer :text - - # @!attribute [r] tool_choice + # @!attribute tool_choice # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. @@ -164,11 +159,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] optional :tool_choice, union: -> { OpenAI::Models::Responses::ResponseCreateParams::ToolChoice } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # attr_writer :tool_choice - - # @!attribute [r] tools + # @!attribute tools # An array of tools the model may call while generating a response. You can # specify which tool to use by setting the `tool_choice` parameter. # @@ -187,10 +178,6 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -213,7 +200,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] optional :truncation, enum: -> { OpenAI::Models::Responses::ResponseCreateParams::Truncation }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -221,10 +208,6 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # @param input [String, Array] # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index e3d6735f..96a2b404 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -5,8 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#delete class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 43a865fd..9fc12343 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -78,46 +78,30 @@ class Result < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::Attribute] }, nil?: true - # @!attribute [r] file_id + # @!attribute file_id # The unique ID of the file. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] filename + # @!attribute filename # The name of the file. # # @return [String, nil] optional :filename, String - # @!parse - # # @return [String] - # attr_writer :filename - - # @!attribute [r] score + # @!attribute score # The relevance score of the file - a value between 0 and 1. # # @return [Float, nil] optional :score, Float - # @!parse - # # @return [Float] - # attr_writer :score - - # @!attribute [r] text + # @!attribute text # The text that was retrieved from the file. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] # @param file_id [String] diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index c6271a6a..106627ff 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -24,17 +24,13 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @return [Symbol, :json_schema] required :type, const: :json_schema - # @!attribute [r] description + # @!attribute description # A description of what the response format is for, used by the model to determine # how to respond in the format. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 25210a43..7db552df 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -28,27 +28,19 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function_call] required :type, const: :function_call - # @!attribute [r] id + # @!attribute id # The unique ID of the function tool call. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCall::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] - # attr_writer :status - # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 6eb9b8a9..6623a73c 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -28,17 +28,13 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function_call_output] required :type, const: :function_call_output - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] - # attr_writer :status - # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) # @param id [String] # @param call_id [String] diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 4b186be4..13aeefd9 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -10,36 +10,24 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_file] required :type, const: :input_file - # @!attribute [r] file_data + # @!attribute file_data # The content of the file to be sent to the model. # # @return [String, nil] optional :file_data, String - # @!parse - # # @return [String] - # attr_writer :file_data - - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to be sent to the model. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] filename + # @!attribute filename # The name of the file to be sent to the model. # # @return [String, nil] optional :filename, String - # @!parse - # # @return [String] - # attr_writer :filename - # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) # A file input to the model. # diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index d70b58bc..72801ab7 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -72,27 +72,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] required :role, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Role } - # @!attribute [r] status + # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] - # attr_writer :status - - # @!attribute [r] type + # @!attribute type # The type of the message input. Always set to `message`. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type, nil] optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] - # attr_writer :type - # @!method initialize(content:, role:, status: nil, type: nil) # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -164,17 +156,13 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :computer_call_output] required :type, const: :computer_call_output - # @!attribute [r] id + # @!attribute id # The ID of the computer tool call output. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] acknowledged_safety_checks + # @!attribute acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the # developer. # @@ -182,21 +170,13 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] } - # @!parse - # # @return [Array] - # attr_writer :acknowledged_safety_checks - - # @!attribute [r] status + # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] - # attr_writer :status - # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # The output of a computer tool call. # @@ -269,28 +249,20 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function_call_output] required :type, const: :function_call_output - # @!attribute [r] id + # @!attribute id # The unique ID of the function tool call output. Populated when this item is # returned via API. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] - # attr_writer :status - # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) # The output of a function tool call. # diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 2c3f05d2..2b7b1ab1 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -24,27 +24,19 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] required :role, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Role } - # @!attribute [r] status + # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] - # attr_writer :status - - # @!attribute [r] type + # @!attribute type # The type of the message input. Always set to `message`. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type, nil] optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] - # attr_writer :type - # @!method initialize(id:, content:, role:, status: nil, type: nil) # @param id [String] # @param content [Array] diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index fb8d5db6..bec6c280 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -23,17 +23,13 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :reasoning] required :type, const: :reasoning - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseReasoningItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] - # attr_writer :status - # @!method initialize(id:, summary:, status: nil, type: :reasoning) # A description of the chain of thought used by a reasoning model while generating # a response. diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 1b64f738..bb881916 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -5,11 +5,10 @@ module Models module Responses # @see OpenAI::Resources::Responses#retrieve class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] include + # @!attribute include # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # @@ -17,10 +16,6 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } - # @!parse - # # @return [Array] - # attr_writer :include - # @!method initialize(include: nil, request_options: {}) # @param include [Array] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index cd0bf58f..05e2d3db 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class ResponseTextConfig < OpenAI::Internal::Type::BaseModel - # @!attribute [r] format_ + # @!attribute format_ # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -22,10 +22,6 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] optional :format_, union: -> { OpenAI::Models::Responses::ResponseFormatTextConfig }, api_name: :format - # @!parse - # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] - # attr_writer :format_ - # @!method initialize(format_: nil) # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 3ed57a22..09522f82 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -13,17 +13,13 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] required :type, enum: -> { OpenAI::Models::Responses::WebSearchTool::Type } - # @!attribute [r] search_context_size + # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Models::Responses::WebSearchTool::SearchContextSize } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] - # attr_writer :search_context_size - # @!attribute user_location # # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] @@ -77,48 +73,32 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :approximate] required :type, const: :approximate - # @!attribute [r] city + # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. # # @return [String, nil] optional :city, String - # @!parse - # # @return [String] - # attr_writer :city - - # @!attribute [r] country + # @!attribute country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. # # @return [String, nil] optional :country, String - # @!parse - # # @return [String] - # attr_writer :country - - # @!attribute [r] region + # @!attribute region # Free text input for the region of the user, e.g. `California`. # # @return [String, nil] optional :region, String - # @!parse - # # @return [String] - # attr_writer :region - - # @!attribute [r] timezone + # @!attribute timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. # # @return [String, nil] optional :timezone, String - # @!parse - # # @return [String] - # attr_writer :timezone - # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) # @param city [String] # @param country [String] diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 1b7164e8..717800ca 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#cancel class UploadCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 77f01df1..b978c7a2 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#complete class UploadCompleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute part_ids @@ -14,17 +13,13 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # @return [Array] required :part_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!attribute [r] md5 + # @!attribute md5 # The optional md5 checksum for the file contents to verify if the bytes uploaded # matches what you expect. # # @return [String, nil] optional :md5, String - # @!parse - # # @return [String] - # attr_writer :md5 - # @!method initialize(part_ids:, md5: nil, request_options: {}) # @param part_ids [Array] # @param md5 [String] diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 0c7d54d5..afa6ec61 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#create class UploadCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute bytes diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 1e11840f..9294e4c0 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -5,8 +5,7 @@ module Models module Uploads # @see OpenAI::Resources::Uploads::Parts#create class PartCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute data diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 5ad255fe..3db76e67 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -64,16 +64,12 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Integer] required :usage_bytes, Integer - # @!attribute [r] expires_after + # @!attribute expires_after # The expiration policy for a vector store. # # @return [OpenAI::Models::VectorStore::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::Models::VectorStore::ExpiresAfter } - # @!parse - # # @return [OpenAI::Models::VectorStore::ExpiresAfter] - # attr_writer :expires_after - # @!attribute expires_at # The Unix timestamp (in seconds) for when the vector store will expire. # diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 2899d54e..30eb84d9 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -4,32 +4,23 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#create class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } - # @!parse - # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # attr_writer :chunking_strategy - - # @!attribute [r] expires_after + # @!attribute expires_after # The expiration policy for a vector store. # # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::Models::VectorStoreCreateParams::ExpiresAfter } - # @!parse - # # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] - # attr_writer :expires_after - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # the vector store should use. Useful for tools like `file_search` that can access # files. @@ -37,10 +28,6 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -52,16 +39,12 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # The name of the vector store. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index e307e25d..11a788e6 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#delete class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 610dd889..caa16c07 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -4,11 +4,10 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#list class VectorStoreListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -17,11 +16,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -30,32 +25,20 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStoreListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index aaf50986..004d1047 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#retrieve class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 9807ed6a..808daadb 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#search class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute query @@ -14,47 +13,31 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @return [String, Array] required :query, union: -> { OpenAI::Models::VectorStoreSearchParams::Query } - # @!attribute [r] filters + # @!attribute filters # A filter to apply based on file attributes. # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::Models::VectorStoreSearchParams::Filters } - # @!parse - # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # attr_writer :filters - - # @!attribute [r] max_num_results + # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 # inclusive. # # @return [Integer, nil] optional :max_num_results, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_num_results - - # @!attribute [r] ranking_options + # @!attribute ranking_options # Ranking options for search. # # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::VectorStoreSearchParams::RankingOptions } - # @!parse - # # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions] - # attr_writer :ranking_options - - # @!attribute [r] rewrite_query + # @!attribute rewrite_query # Whether to rewrite the natural language query for vector search. # # @return [Boolean, nil] optional :rewrite_query, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :rewrite_query - # @!method initialize(query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) # @param query [String, Array] # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] @@ -92,24 +75,16 @@ module Filters end class RankingOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] ranker + # @!attribute ranker # # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] - # attr_writer :ranker - - # @!attribute [r] score_threshold + # @!attribute score_threshold # # @return [Float, nil] optional :score_threshold, Float - # @!parse - # # @return [Float] - # attr_writer :score_threshold - # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 9bda9d94..91eafa78 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#update class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute expires_after diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index c79d293e..c6f2f182 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#cancel class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index f0517a4c..9a873239 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#create class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file_ids @@ -29,17 +28,13 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileBatchCreateParams::Attribute] }, nil?: true - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } - # @!parse - # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # attr_writer :chunking_strategy - # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) # @param file_ids [Array] # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index a99326c3..543b49c7 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#list_files class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id @@ -14,7 +13,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -23,11 +22,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -36,42 +31,26 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] filter + # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter, nil] optional :filter, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] - # attr_writer :filter - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] - # attr_writer :order - # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # @param vector_store_id [String] # @param after [String] diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 48cf6115..3ec39a5b 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#retrieve class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index e4f3deec..0dbc1139 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#content class FileContentParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index b4924ae1..094dcbfb 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -5,26 +5,18 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#content class FileContentResponse < OpenAI::Internal::Type::BaseModel - # @!attribute [r] text + # @!attribute text # The text content # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - - # @!attribute [r] type + # @!attribute type # The content type (currently only `"text"`) # # @return [String, nil] optional :type, String - # @!parse - # # @return [String] - # attr_writer :type - # @!method initialize(text: nil, type: nil) # @param text [String] # @param type [String] diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index d8307a70..2d0fc3ea 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#create class FileCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file_id @@ -29,17 +28,13 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileCreateParams::Attribute] }, nil?: true - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } - # @!parse - # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # attr_writer :chunking_strategy - # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # @param file_id [String] # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index 25a5fbcc..ef1c9179 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#delete class FileDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 0c80decc..551d5a17 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -5,11 +5,10 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#list class FileListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -18,11 +17,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -31,42 +26,26 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] filter + # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter, nil] optional :filter, enum: -> { OpenAI::Models::VectorStores::FileListParams::Filter } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] - # attr_writer :filter - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStores::FileListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index 2b63ee84..3a301cdb 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#retrieve class FileRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 9cf5a31a..f054fa7a 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#update class FileUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index ae51122a..4891ed5a 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -66,16 +66,12 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute] }, nil?: true - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The strategy used to chunk the file. # # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - # attr_writer :chunking_strategy - # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") # A list of files attached to a vector store. # From 8d0ea95bfa699473231ee707c7d24cde2d38abef Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 17:29:54 +0000 Subject: [PATCH 145/295] chore: documentation improvements --- README.md | 153 +++++++++++++++++++++++++------------------------- Rakefile | 4 +- lib/openai.rb | 9 --- 3 files changed, 80 insertions(+), 86 deletions(-) diff --git a/README.md b/README.md index 954ea98a..4aa0b863 100644 --- a/README.md +++ b/README.md @@ -31,16 +31,26 @@ openai = OpenAI::Client.new( ) chat_completion = openai.chat.completions.create( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" + messages: [{role: :user, content: "Say this is a test"}], + model: :"gpt-4.1" ) puts(chat_completion) ``` +## Sorbet + +This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. + +When using sorbet, it is recommended to use model classes as below. This provides stronger type checking and tooling integration. + +```ruby +openai.chat.completions.create( + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + model: :"gpt-4.1" +) +``` + ### Pagination List methods in the OpenAI API are paginated. @@ -66,11 +76,8 @@ We provide support for streaming responses using Server-Sent Events (SSE). ```ruby stream = openai.chat.completions.stream_raw( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" + messages: [{role: :user, content: "Say this is a test"}], + model: :"gpt-4.1" ) stream.each do |completion| @@ -86,11 +93,11 @@ Request parameters that correspond to file uploads can be passed as `StringIO`, require "pathname" # using `Pathname`, the file will be lazily read, without reading everything in to memory -file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: "fine-tune") +file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: :"fine-tune") file = File.read("input.jsonl") # using `StringIO`, useful if you already have the data in memory -file_object = openai.files.create(file: StringIO.new(file), purpose: "fine-tune") +file_object = openai.files.create(file: StringIO.new(file), purpose: :"fine-tune") puts(file_object.id) ``` @@ -101,7 +108,7 @@ When the library is unable to connect to the API, or if the API returns a non-su ```ruby begin - job = openai.fine_tuning.jobs.create(model: "gpt-4o", training_file: "file-abc123") + job = openai.fine_tuning.jobs.create(model: :"babbage-002", training_file: "file-abc123") rescue OpenAI::Errors::APIError => e puts(e.status) # 400 end @@ -139,11 +146,8 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( - messages: [{ - role: "user", - content: "How can I get the name of the current day in JavaScript?" - }], - model: "gpt-4o", + messages: [{role: :user, content: "How can I get the name of the current day in JavaScript?"}], + model: :"gpt-4.1", request_options: {max_retries: 5} ) ``` @@ -164,76 +168,44 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( - messages: [{ - role: "user", - content: "How can I list all files in a directory using Python?" - }], - model: "gpt-4o", + messages: [{role: :user, content: "How can I list all files in a directory using Python?"}], + model: :"gpt-4.1", request_options: {timeout: 5} ) ``` -## LSP Support - -### Solargraph - -This library includes [Solargraph](https://solargraph.org) support for both auto completion and go to definition. - -```ruby -gem "solargraph", group: :development -``` - -After Solargraph is installed, **you must populate its index** either via the provided editor command, or by running the following in your terminal: - -```sh -bundle exec solargraph gems -``` - -Note: if you had installed the gem either using a `git:` or `github:` URL, or had vendored the gem using bundler, you will need to set up your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to the gem's `lib` directory. +## Editor support -```yaml -include: - - 'vendor/bundle/ruby/*/gems/openai-*/lib/**/*.rb' -``` +Some editor language services like [Solargraph](https://github.com/castwide/solargraph?tab=readme-ov-file#gem-support) or [Sorbet](https://sorbet.org/docs/rbi#the-hidden-definitions-rbi) require a manually triggered indexing step before functionalities like auto-completion and go to definition can operate. -Otherwise Solargraph will not be able to provide type information or auto-completion for any non-indexed libraries. +Please refer to their respective documentation for details. This library also includes a [short guide](https://github.com/openai/openai-ruby/tree/main/CONTRIBUTING.md#editor-support) on how to set up various editor services for internal development. -### Sorbet +## Advanced Concepts -This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. +### Model DSL -What this means is that while you can use Sorbet to type check your code statically, and benefit from the [Sorbet Language Server](https://sorbet.org/docs/lsp) in your editor, there is no runtime type checking and execution overhead from Sorbet itself. +This library uses a Model DSL to represent request parameters and response shapes in `lib/openai/models`. -Due to limitations with the Sorbet type system, where a method otherwise can take an instance of `OpenAI::BaseModel` class, you will need to use the `**` splat operator to pass the arguments: +The model classes service as anchor points for both toolchain readable documentation, and language service assisted navigation links. This information also allows the SDK's internals to perform translation between plain and rich data types; e.g., conversion between a `Time` instance and an ISO8601 `String`, and vice versa. -Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. +In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can also be used. For example, the following are interchangeable as arguments: ```ruby +# This has tooling readability, for auto-completion, static analysis, and goto definition with supported language services params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + model: :"gpt-4.1" ) -openai.chat.completions.create(**params) +# This also works +params = { + messages: [{role: :user, content: "Say this is a test"}], + model: :"gpt-4.1" +} ``` -Note: **This library emits an intentional warning under the [`tapioca` toolchain](https://github.com/Shopify/tapioca)**. This is normal, and does not impact functionality. - -### Ruby LSP - -The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. - -## Advanced - ### Making custom/undocumented requests -This library is typed for convenient access to the documented API. - -If you need to access undocumented endpoints, params, or response properties, the library can still be used. - #### Undocumented request params If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a requests as seen in examples above. @@ -244,15 +216,15 @@ To make requests to undocumented endpoints, you can make requests using `client. ```ruby response = client.request( - method: :post, - path: '/undocumented/endpoint', - query: {"dog": "woof"}, - headers: {"useful-header": "interesting-value"}, - body: {"he": "llo"}, - ) + method: :post, + path: '/undocumented/endpoint', + query: {"dog": "woof"}, + headers: {"useful-header": "interesting-value"}, + body: {"he": "llo"}, +) ``` -### Concurrency & Connection Pooling +### Concurrency & connection pooling The `OpenAI::Client` instances are thread-safe, and should be re-used across multiple threads. By default, each `Client` have their own HTTP connection pool, with a maximum number of connections equal to thread count. @@ -262,6 +234,33 @@ Unless otherwise specified, other classes in the SDK do not have locks protectin Currently, `OpenAI::Client` instances are only fork-safe if there are no in-flight HTTP requests. +### Sorbet + +#### Enums + +Sorbet's typed enums require sub-classing of the [`T::Enum` class](https://sorbet.org/docs/tenum) from the `sorbet-runtime` gem. + +Since this library does not depend on `sorbet-runtime`, it uses a [`T.all` intersection type](https://sorbet.org/docs/intersection-types) with a ruby primitive type to construct a "tagged alias" instead. + +```ruby +module OpenAI::Models::ChatModel + # This alias aids language service driven navigation. + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } +end +``` + +#### Argument passing trick + +It is possible to pass a compatible model / parameter class to a method that expects keyword arguments by using the `**` splat operator. + +```ruby +params = OpenAI::Models::Chat::CompletionCreateParams.new( + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + model: :"gpt-4.1" +) +openai.chat.completions.create(**params) +``` + ## Versioning This package follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions. As the library is in initial development and has a major version of `0`, APIs may change at any time. @@ -271,3 +270,7 @@ This package considers improvements to the (non-runtime) `*.rbi` and `*.rbs` typ ## Requirements Ruby 3.1.0 or higher. + +## Contributing + +See [the contributing documentation](https://github.com/openai/openai-ruby/tree/main/CONTRIBUTING.md). diff --git a/Rakefile b/Rakefile index 7a8155db..dde46bf9 100644 --- a/Rakefile +++ b/Rakefile @@ -21,7 +21,7 @@ end desc("Preview docs; use `PORT=` to change the port") multitask(:"docs:preview") do - sh(*%w[yard server --bind [::] --reload --quiet --port], ENV.fetch("PORT", "8808")) + sh(*%w[yard server --reload --quiet --bind [::] --port], ENV.fetch("PORT", "8808")) end desc("Run test suites; use `TEST=path/to/test.rb` to run a specific test file") @@ -111,7 +111,7 @@ end desc("Typecheck everything") multitask(typecheck: [:"typecheck:steep", :"typecheck:sorbet"]) -desc("Lint everything") +desc("Lint and typecheck") multitask(lint: [:"lint:rubocop", :typecheck]) desc("Build yard docs") diff --git a/lib/openai.rb b/lib/openai.rb index 66af9d43..68a9c048 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -19,15 +19,6 @@ # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. if Object.const_defined?(:Tapioca) && caller.chain([$PROGRAM_NAME]).chain(ARGV).grep(/tapioca/) - Warning.warn( - <<~WARN - \n - ⚠️ skipped loading of "openai" gem under `tapioca`. - - This message is normal and expected if you are running a `tapioca` command, and does not impact `.rbi` generation. - \n - WARN - ) return end From 7248298cfaea4d2247281ea2965888e953f58224 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 18:08:55 +0000 Subject: [PATCH 146/295] feat: implement `#hash` for data containers --- .../transport/pooled_net_requester.rb | 6 +---- lib/openai/internal/type/array_of.rb | 3 +++ lib/openai/internal/type/base_model.rb | 24 +++++++++---------- lib/openai/internal/type/enum.rb | 3 +++ lib/openai/internal/type/hash_of.rb | 3 +++ lib/openai/internal/type/union.rb | 5 +++- rbi/lib/openai/internal/type/array_of.rbi | 3 +++ rbi/lib/openai/internal/type/base_model.rbi | 6 +++++ rbi/lib/openai/internal/type/enum.rbi | 3 +++ rbi/lib/openai/internal/type/hash_of.rbi | 3 +++ rbi/lib/openai/internal/type/union.rbi | 3 +++ sig/openai/internal/type/array_of.rbs | 2 ++ sig/openai/internal/type/base_model.rbs | 4 ++++ sig/openai/internal/type/enum.rbs | 2 ++ sig/openai/internal/type/hash_of.rbs | 2 ++ sig/openai/internal/type/union.rbs | 2 ++ test/openai/internal/type/base_model_test.rb | 5 +++- 17 files changed, 60 insertions(+), 19 deletions(-) diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index df4e1205..67e58347 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -57,15 +57,11 @@ def calibrate_socket_timeout(conn, deadline) # @return [Array(Net::HTTPGenericRequest, Proc)] def build_request(request, &blk) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) - - # ensure we construct a URI class of the right scheme - url = URI(url.to_s) - req = Net::HTTPGenericRequest.new( method.to_s.upcase, !body.nil?, method != :head, - url + URI(url.to_s) # ensure we construct a URI class of the right scheme ) headers.each { req[_1] = _2 } diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 84109d94..d0d77538 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -44,6 +44,9 @@ def ==(other) # rubocop:enable Layout/LineLength end + # @return [Integer] + def hash = [self.class, item_type].hash + # @api private # # @param value [Array, Object] diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index e0849cca..043039d4 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -4,14 +4,6 @@ module OpenAI module Internal module Type # @abstract - # - # @example - # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` - # comparison_filter => { - # key: key, - # type: type, - # value: value - # } class BaseModel extend OpenAI::Internal::Type::Converter @@ -93,11 +85,13 @@ def fields state: state ) end - rescue StandardError + rescue StandardError => e cls = self.class.name.split("::").last - # rubocop:disable Layout/LineLength - message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." - # rubocop:enable Layout/LineLength + message = [ + "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}.", + "To get the unparsed API response, use #{cls}[#{__method__.inspect}].", + "Cause: #{e.message}" + ].join(" ") raise OpenAI::Errors::ConversionError.new(message) end end @@ -171,6 +165,9 @@ def optional(name_sym, type_info, spec = {}) def ==(other) other.is_a?(Class) && other <= OpenAI::Internal::Type::BaseModel && other.fields == fields end + + # @return [Integer] + def hash = fields.hash end # @param other [Object] @@ -178,6 +175,9 @@ def ==(other) # @return [Boolean] def ==(other) = self.class == other.class && @data == other.to_h + # @return [Integer] + def hash = [self.class, @data].hash + class << self # @api private # diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 8f14e58b..afdeb5f9 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -62,6 +62,9 @@ def ==(other) # rubocop:enable Style/CaseEquality end + # @return [Integer] + def hash = values.to_set.hash + # @api private # # Unlike with primitives, `Enum` additionally validates that the value is a member diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 5a6d6304..40c7a89a 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -59,6 +59,9 @@ def ==(other) # rubocop:enable Layout/LineLength end + # @return [Integer] + def hash = [self.class, item_type].hash + # @api private # # @param value [Hash{Object=>Object}, Object] diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 14a1a191..5f9048cf 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -43,7 +43,7 @@ module Union # # @return [Array] protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + known_variants.map { |key, variant_fn| [key, variant_fn.call] } end # All of the specified variants for this union. @@ -128,6 +128,9 @@ def ==(other) OpenAI::Internal::Type::Union === other && other.derefed_variants == derefed_variants end + # @return [Integer] + def hash = variants.hash + # @api private # # @param value [Object] diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index 8af5fd34..e7a84e1a 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -32,6 +32,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private sig do override diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index bc1959ae..cf8285ae 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -111,11 +111,17 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + + sig { returns(Integer) } + def hash; end end sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + class << self # @api private sig do diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index e1d0753c..8dcaa918 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -28,6 +28,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private # # Unlike with primitives, `Enum` additionally validates that the value is a member diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index 4edc379f..0faca34e 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -32,6 +32,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private sig do override diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi index 2e47dfe1..13b7409c 100644 --- a/rbi/lib/openai/internal/type/union.rbi +++ b/rbi/lib/openai/internal/type/union.rbi @@ -47,6 +47,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private sig do override diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 0489e6e2..80fcc2a2 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -15,6 +15,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( ::Array[top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index e3a7d42c..177b22b8 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -49,8 +49,12 @@ module OpenAI def self.==: (top other) -> bool + def self.hash: -> Integer + def ==: (top other) -> bool + def hash: -> Integer + def self.coerce: ( OpenAI::Internal::Type::BaseModel | ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 4de50b6d..897ae9eb 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -10,6 +10,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( String | Symbol | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index e23bc0c3..26f65397 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -15,6 +15,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 8b58f419..86c308a6 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -30,6 +30,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index b03987cf..a3267818 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -647,14 +647,17 @@ def test_equality [U0.new(String, Float), U0.new(String, Integer)] => false, [U1, U2] => true, [M1, M2] => false, - [M1, M3] => true + [M1, M3] => true, + [M1.new(a: 1), M1.new(a: 1)] => true } cases.each do if _2 assert_equal(*_1) + assert_equal(*_1.map(&:hash)) else refute_equal(*_1) + refute_equal(*_1.map(&:hash)) end end end From 3606642a0c9160946c88fb2a3adf2b98819123dc Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 19:04:49 +0000 Subject: [PATCH 147/295] chore: update README with recommended editor plugins --- CONTRIBUTING.md | 21 +++++++-------------- README.md | 20 +++++++++----------- 2 files changed, 16 insertions(+), 25 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d800af1c..6f93458e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -96,20 +96,17 @@ $ bundle exec rake format ## Editor Support -### Solargraph +### Ruby LSP -This library includes [Solargraph](https://solargraph.org) support for both auto-completion and go to definition. +[Ruby LSP](https://github.com/Shopify/ruby-lsp) has quite good support for go to definition, but not auto-completion. -```ruby -gem "solargraph", group: :development -``` +This can be installed along side Solargraph. + +### Solargraph -Note: if you had installed the gem locally using `git: "..."` or `path: "..."`, you must update your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to where the gem is located: +[Solargraph](https://solargraph.org) has quite good support for auto-completion, but not go to definition. -```yaml -include: - - '/lib/**/*.rb' -``` +This can be installed along side Ruby LSP. ### Sorbet @@ -119,10 +116,6 @@ include: 2. For each generic type in `*.rbi` files, a spurious "Duplicate type member" error is present. -### Ruby LSP - -The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. - ## Documentation Preview To preview the documentation, run: diff --git a/README.md b/README.md index 4aa0b863..120c18a4 100644 --- a/README.md +++ b/README.md @@ -174,19 +174,11 @@ openai.chat.completions.create( ) ``` -## Editor support - -Some editor language services like [Solargraph](https://github.com/castwide/solargraph?tab=readme-ov-file#gem-support) or [Sorbet](https://sorbet.org/docs/rbi#the-hidden-definitions-rbi) require a manually triggered indexing step before functionalities like auto-completion and go to definition can operate. - -Please refer to their respective documentation for details. This library also includes a [short guide](https://github.com/openai/openai-ruby/tree/main/CONTRIBUTING.md#editor-support) on how to set up various editor services for internal development. - -## Advanced Concepts - -### Model DSL +## Model DSL -This library uses a Model DSL to represent request parameters and response shapes in `lib/openai/models`. +This library uses a simple DSL to represent request parameters and response shapes in `lib/openai/models`. -The model classes service as anchor points for both toolchain readable documentation, and language service assisted navigation links. This information also allows the SDK's internals to perform translation between plain and rich data types; e.g., conversion between a `Time` instance and an ISO8601 `String`, and vice versa. +With the right [editor plugins](https://shopify.github.io/ruby-lsp), you can ctrl-click on elements of the DSL to navigate around and explore the library. In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can also be used. For example, the following are interchangeable as arguments: @@ -204,6 +196,12 @@ params = { } ``` +## Editor support + +A combination of [Shopify LSP](https://shopify.github.io/ruby-lsp) and [Solargraph](https://solargraph.org/) is recommended for non-[Sorbet](https://sorbet.org) users. The former is especially good at go to definition, while the latter has much better auto-completion support. + +## Advanced concepts + ### Making custom/undocumented requests #### Undocumented request params From fdde0dea5394e524003ace07a189c1f4678f2170 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 20:28:08 +0000 Subject: [PATCH 148/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b56c3d0b..e8285b71 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.4" + ".": "0.1.0-alpha.5" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index d5303e6f..0891755f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.alpha.4) + openai (0.1.0.pre.alpha.5) connection_pool GEM diff --git a/README.md b/README.md index 120c18a4..75305836 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.1.0.pre.alpha.4" +gem "openai", "~> 0.1.0.pre.alpha.5" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 3790ca0e..c92a3443 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.alpha.4" + VERSION = "0.1.0.pre.alpha.5" end From 5b1dadefe8142eea10a86e79210c39b134bfa617 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 19 Apr 2025 01:45:01 +0000 Subject: [PATCH 149/295] chore: explicitly mark apis public under `Internal` module --- .yardopts | 1 + lib/openai/client.rb | 8 ++++---- lib/openai/internal.rb | 1 - lib/openai/internal/type/array_of.rb | 6 ++++++ lib/openai/internal/type/base_model.rb | 18 +++++++++++++++++- lib/openai/internal/type/base_page.rb | 8 ++++++++ lib/openai/internal/type/base_stream.rb | 8 ++++++++ lib/openai/internal/type/boolean.rb | 4 ++++ lib/openai/internal/type/enum.rb | 6 ++++++ lib/openai/internal/type/hash_of.rb | 6 ++++++ lib/openai/internal/type/io_like.rb | 4 ++++ lib/openai/internal/type/union.rb | 6 ++++++ lib/openai/internal/type/unknown.rb | 4 ++++ .../chat_completion_assistant_message_param.rb | 2 ++ .../models/chat/chat_completion_chunk.rb | 2 ++ .../models/chat/chat_completion_message.rb | 2 ++ .../models/chat/completion_create_params.rb | 6 ++++++ lib/openai/models/file_object.rb | 4 ++++ .../models/fine_tuning/job_create_params.rb | 2 ++ lib/openai/models/reasoning.rb | 2 ++ rbi/lib/openai/client.rbi | 8 ++++---- rbi/lib/openai/internal.rbi | 1 - rbi/lib/openai/internal/type/base_model.rbi | 1 - rbi/lib/openai/internal/type/base_page.rbi | 2 ++ rbi/lib/openai/internal/type/base_stream.rbi | 2 ++ 25 files changed, 102 insertions(+), 12 deletions(-) diff --git a/.yardopts b/.yardopts index 5757768a..84c12f2a 100644 --- a/.yardopts +++ b/.yardopts @@ -1,4 +1,5 @@ --type-name-tag generic:Generic +--default-return void --markup markdown --markup-provider redcarpet --exclude /rbi diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 15aa8d39..c79c1a83 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -101,10 +101,10 @@ def initialize( organization: ENV["OPENAI_ORG_ID"], project: ENV["OPENAI_PROJECT_ID"], base_url: ENV["OPENAI_BASE_URL"], - max_retries: DEFAULT_MAX_RETRIES, - timeout: DEFAULT_TIMEOUT_IN_SECONDS, - initial_retry_delay: DEFAULT_INITIAL_RETRY_DELAY, - max_retry_delay: DEFAULT_MAX_RETRY_DELAY + max_retries: OpenAI::Client::DEFAULT_MAX_RETRIES, + timeout: OpenAI::Client::DEFAULT_TIMEOUT_IN_SECONDS, + initial_retry_delay: OpenAI::Client::DEFAULT_INITIAL_RETRY_DELAY, + max_retry_delay: OpenAI::Client::DEFAULT_MAX_RETRY_DELAY ) base_url ||= "https://api.openai.com/v1" diff --git a/lib/openai/internal.rb b/lib/openai/internal.rb index 32bc2cf5..143e002b 100644 --- a/lib/openai/internal.rb +++ b/lib/openai/internal.rb @@ -1,7 +1,6 @@ # frozen_string_literal: true module OpenAI - # @api private module Internal OMIT = Object.new.tap do diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index d0d77538..dcfc2f95 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -30,11 +30,15 @@ class ArrayOf # @option spec [Boolean] :"nil?" def self.[](...) = new(...) + # @api public + # # @param other [Object] # # @return [Boolean] def ===(other) = other.is_a?(Array) && other.all?(item_type) + # @api public + # # @param other [Object] # # @return [Boolean] @@ -44,6 +48,8 @@ def ==(other) # rubocop:enable Layout/LineLength end + # @api public + # # @return [Integer] def hash = [self.class, item_type].hash diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 043039d4..b356a480 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -159,6 +159,8 @@ def optional(name_sym, type_info, spec = {}) @mode = nil end + # @api public + # # @param other [Object] # # @return [Boolean] @@ -166,15 +168,21 @@ def ==(other) other.is_a?(Class) && other <= OpenAI::Internal::Type::BaseModel && other.fields == fields end + # @api public + # # @return [Integer] def hash = fields.hash end + # @api public + # # @param other [Object] # # @return [Boolean] def ==(other) = self.class == other.class && @data == other.to_h + # @api public + # # @return [Integer] def hash = [self.class, @data].hash @@ -291,6 +299,8 @@ def dump(value, state:) end end + # @api public + # # Returns the raw value associated with the given key, if found. Otherwise, nil is # returned. # @@ -309,6 +319,8 @@ def [](key) @data[key] end + # @api public + # # Returns a Hash of the data underlying this object. O(1) # # Keys are Symbols and values are the raw values from the response. The return @@ -361,11 +373,15 @@ def walk(model) end end + # @api public + # # @param a [Object] # # @return [String] def to_json(*a) = OpenAI::Internal::Type::Converter.dump(self.class, self).to_json(*a) + # @api public + # # @param a [Object] # # @return [String] @@ -407,7 +423,7 @@ def inspect(depth: 0) end end - # @api private + # @api public # # @return [String] def to_s = self.class.walk(@data).to_s diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb index 48c98885..e17895f2 100644 --- a/lib/openai/internal/type/base_page.rb +++ b/lib/openai/internal/type/base_page.rb @@ -3,19 +3,27 @@ module OpenAI module Internal module Type + # @api private + # # @generic Elem # # This module provides a base implementation for paginated responses in the SDK. module BasePage # rubocop:disable Lint/UnusedMethodArgument + # @api public + # # @return [Boolean] def next_page? = (raise NotImplementedError) + # @api public + # # @raise [OpenAI::Errors::APIError] # @return [OpenAI::Internal::Type::BasePage] def next_page = (raise NotImplementedError) + # @api public + # # @param blk [Proc] # # @yieldparam [generic] diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index ebcb564c..580ba8ab 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -3,6 +3,8 @@ module OpenAI module Internal module Type + # @api private + # # @generic Elem # # This module provides a base implementation for streaming responses in the SDK. @@ -26,6 +28,8 @@ class << self def defer_closing(stream) = ->(_id) { OpenAI::Internal::Util.close_fused!(stream) } end + # @api public + # # @return [void] def close = OpenAI::Internal::Util.close_fused!(@iterator) @@ -34,6 +38,8 @@ def close = OpenAI::Internal::Util.close_fused!(@iterator) # @return [Enumerable>] private def iterator = (raise NotImplementedError) + # @api public + # # @param blk [Proc] # # @yieldparam [generic] @@ -45,6 +51,8 @@ def each(&blk) @iterator.each(&blk) end + # @api public + # # @return [Enumerator>] def to_enum = @iterator diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index 298979c8..13c242e0 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -13,11 +13,15 @@ class Boolean private_class_method :new + # @api public + # # @param other [Object] # # @return [Boolean] def self.===(other) = other == true || other == false + # @api public + # # @param other [Object] # # @return [Boolean] diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index afdeb5f9..55df3e66 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -48,11 +48,15 @@ module Enum # @return [Array] def values = constants.map { const_get(_1) } + # @api public + # # @param other [Object] # # @return [Boolean] def ===(other) = values.include?(other) + # @api public + # # @param other [Object] # # @return [Boolean] @@ -62,6 +66,8 @@ def ==(other) # rubocop:enable Style/CaseEquality end + # @api public + # # @return [Integer] def hash = values.to_set.hash diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 40c7a89a..b586a220 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -30,6 +30,8 @@ class HashOf # @option spec [Boolean] :"nil?" def self.[](...) = new(...) + # @api public + # # @param other [Object] # # @return [Boolean] @@ -50,6 +52,8 @@ def ===(other) end end + # @api public + # # @param other [Object] # # @return [Boolean] @@ -59,6 +63,8 @@ def ==(other) # rubocop:enable Layout/LineLength end + # @api public + # # @return [Integer] def hash = [self.class, item_type].hash diff --git a/lib/openai/internal/type/io_like.rb b/lib/openai/internal/type/io_like.rb index 43aba589..568418be 100644 --- a/lib/openai/internal/type/io_like.rb +++ b/lib/openai/internal/type/io_like.rb @@ -13,6 +13,8 @@ class IOLike private_class_method :new + # @api public + # # @param other [Object] # # @return [Boolean] @@ -25,6 +27,8 @@ def self.===(other) end end + # @api public + # # @param other [Object] # # @return [Boolean] diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 5f9048cf..3db41ef0 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -112,6 +112,8 @@ def variants = derefed_variants.map(&:last) # rubocop:disable Style/HashEachMethods # rubocop:disable Style/CaseEquality + # @api public + # # @param other [Object] # # @return [Boolean] @@ -121,6 +123,8 @@ def ===(other) end end + # @api public + # # @param other [Object] # # @return [Boolean] @@ -128,6 +132,8 @@ def ==(other) OpenAI::Internal::Type::Union === other && other.derefed_variants == derefed_variants end + # @api public + # # @return [Integer] def hash = variants.hash diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index 36556ac1..698303d5 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -15,11 +15,15 @@ class Unknown private_class_method :new + # @api public + # # @param other [Object] # # @return [Boolean] def self.===(other) = true + # @api public + # # @param other [Object] # # @return [Boolean] diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 8cd2c5c2..a139420e 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -27,6 +27,8 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel nil?: true # @!attribute function_call + # @deprecated + # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 77585995..05970b9f 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -141,6 +141,8 @@ class Delta < OpenAI::Internal::Type::BaseModel optional :content, String, nil?: true # @!attribute function_call + # @deprecated + # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 3d874f33..21ca6db0 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -39,6 +39,8 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudio }, nil?: true # @!attribute function_call + # @deprecated + # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 999209e1..045de7a8 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -49,6 +49,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :frequency_penalty, Float, nil?: true # @!attribute function_call + # @deprecated + # # Deprecated in favor of `tool_choice`. # # Controls which (if any) function is called by the model. @@ -68,6 +70,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :function_call, union: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall } # @!attribute functions + # @deprecated + # # Deprecated in favor of `tools`. # # A list of functions the model may generate JSON inputs for. @@ -106,6 +110,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :max_completion_tokens, Integer, nil?: true # @!attribute max_tokens + # @deprecated + # # The maximum number of [tokens](/tokenizer) that can be generated in the chat # completion. This value can be used to control # [costs](https://openai.com/api/pricing/) for text generated via API. diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index 5031a63d..b6825ae1 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -43,6 +43,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel required :purpose, enum: -> { OpenAI::Models::FileObject::Purpose } # @!attribute status + # @deprecated + # # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # @@ -56,6 +58,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel optional :expires_at, Integer # @!attribute status_details + # @deprecated + # # Deprecated. For details on why a fine-tuning training file failed validation, # see the `error` field on `fine_tuning.job`. # diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index ada446ed..2078e4bd 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -38,6 +38,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel required :training_file, String # @!attribute hyperparameters + # @deprecated + # # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 5aa982ea..3457ccea 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -15,6 +15,8 @@ class Reasoning < OpenAI::Internal::Type::BaseModel optional :effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true # @!attribute generate_summary + # @deprecated + # # **Deprecated:** use `summary` instead. # # A summary of the reasoning performed by the model. This can be useful for diff --git a/rbi/lib/openai/client.rbi b/rbi/lib/openai/client.rbi index 64aba6c1..b70ce71a 100644 --- a/rbi/lib/openai/client.rbi +++ b/rbi/lib/openai/client.rbi @@ -93,10 +93,10 @@ module OpenAI # `"https://api.example.com/v2/"`. Defaults to `ENV["OPENAI_BASE_URL"]` base_url: ENV["OPENAI_BASE_URL"], # Max number of retries to attempt after a failed retryable request. - max_retries: DEFAULT_MAX_RETRIES, - timeout: DEFAULT_TIMEOUT_IN_SECONDS, - initial_retry_delay: DEFAULT_INITIAL_RETRY_DELAY, - max_retry_delay: DEFAULT_MAX_RETRY_DELAY + max_retries: OpenAI::Client::DEFAULT_MAX_RETRIES, + timeout: OpenAI::Client::DEFAULT_TIMEOUT_IN_SECONDS, + initial_retry_delay: OpenAI::Client::DEFAULT_INITIAL_RETRY_DELAY, + max_retry_delay: OpenAI::Client::DEFAULT_MAX_RETRY_DELAY ); end end end diff --git a/rbi/lib/openai/internal.rbi b/rbi/lib/openai/internal.rbi index 05388159..8390947e 100644 --- a/rbi/lib/openai/internal.rbi +++ b/rbi/lib/openai/internal.rbi @@ -1,7 +1,6 @@ # typed: strong module OpenAI - # @api private module Internal # Due to the current WIP status of Shapes support in Sorbet, types referencing # this alias might be refined in the future. diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index cf8285ae..d97f840f 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -203,7 +203,6 @@ module OpenAI def inspect(depth: 0); end end - # @api private sig { returns(String) } def to_s; end diff --git a/rbi/lib/openai/internal/type/base_page.rbi b/rbi/lib/openai/internal/type/base_page.rbi index 3906abb9..6a3257fe 100644 --- a/rbi/lib/openai/internal/type/base_page.rbi +++ b/rbi/lib/openai/internal/type/base_page.rbi @@ -3,6 +3,8 @@ module OpenAI module Internal module Type + # @api private + # # This module provides a base implementation for paginated responses in the SDK. module BasePage Elem = type_member(:out) diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/lib/openai/internal/type/base_stream.rbi index 8dcf7413..ff3f1d51 100644 --- a/rbi/lib/openai/internal/type/base_stream.rbi +++ b/rbi/lib/openai/internal/type/base_stream.rbi @@ -3,6 +3,8 @@ module OpenAI module Internal module Type + # @api private + # # This module provides a base implementation for streaming responses in the SDK. module BaseStream include Enumerable From 71b4e8bdc97503b01e7cda0821cc1f1b07d40a24 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 15:54:15 +0000 Subject: [PATCH 150/295] chore(internal): minor type annotation improvements --- Steepfile | 2 +- lib/openai/internal/cursor_page.rb | 2 +- lib/openai/internal/page.rb | 2 +- lib/openai/internal/transport/base_client.rb | 8 ++++---- lib/openai/internal/type/array_of.rb | 2 ++ lib/openai/internal/type/base_model.rb | 2 +- lib/openai/internal/type/base_page.rb | 2 +- lib/openai/internal/type/hash_of.rb | 2 ++ 8 files changed, 13 insertions(+), 9 deletions(-) diff --git a/Steepfile b/Steepfile index 6e5d0ac5..d7aebca1 100644 --- a/Steepfile +++ b/Steepfile @@ -2,7 +2,7 @@ require "yaml" -target :lib do +target(:lib) do configure_code_diagnostics(Steep::Diagnostic::Ruby.strict) signature("sig") diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb index eb661553..3cb0654b 100644 --- a/lib/openai/internal/cursor_page.rb +++ b/lib/openai/internal/cursor_page.rb @@ -28,7 +28,7 @@ def next_page? end # @raise [OpenAI::HTTP::Error] - # @return [OpenAI::Internal::CursorPage] + # @return [self] def next_page unless next_page? message = "No more pages available. Please check #next_page? before calling ##{__method__}" diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb index 7df85077..f3792c4c 100644 --- a/lib/openai/internal/page.rb +++ b/lib/openai/internal/page.rb @@ -28,7 +28,7 @@ def next_page? end # @raise [OpenAI::HTTP::Error] - # @return [OpenAI::Internal::Page] + # @return [self] def next_page RuntimeError.new("No more pages available.") end diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 30b12835..f3fd4559 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -216,9 +216,9 @@ def initialize( # # @option req [Symbol, nil] :unwrap # - # @option req [Class, nil] :page + # @option req [Class, nil] :page # - # @option req [Class, nil] :stream + # @option req [Class, nil] :stream # # @option req [OpenAI::Internal::Type::Converter, Class, nil] :model # @@ -417,9 +417,9 @@ def initialize( # # @param unwrap [Symbol, nil] # - # @param page [Class, nil] + # @param page [Class, nil] # - # @param stream [Class, nil] + # @param stream [Class, nil] # # @param model [OpenAI::Internal::Type::Converter, Class, nil] # diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index dcfc2f95..5b1a4113 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -28,6 +28,8 @@ class ArrayOf # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" + # + # @return [OpenAI::Internal::Type::ArrayOf] def self.[](...) = new(...) # @api public diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index b356a480..af231837 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -389,7 +389,7 @@ def to_yaml(*a) = OpenAI::Internal::Type::Converter.dump(self.class, self).to_ya # Create a new instance of a model. # - # @param data [Hash{Symbol=>Object}, OpenAI::Internal::Type::BaseModel] + # @param data [Hash{Symbol=>Object}, self] def initialize(data = {}) case OpenAI::Internal::Util.coerce_hash(data) in Hash => coerced diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb index e17895f2..3ebca02d 100644 --- a/lib/openai/internal/type/base_page.rb +++ b/lib/openai/internal/type/base_page.rb @@ -19,7 +19,7 @@ def next_page? = (raise NotImplementedError) # @api public # # @raise [OpenAI::Errors::APIError] - # @return [OpenAI::Internal::Type::BasePage] + # @return [self] def next_page = (raise NotImplementedError) # @api public diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index b586a220..20280b5b 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -28,6 +28,8 @@ class HashOf # @option spec [Proc] :union # # @option spec [Boolean] :"nil?" + # + # @return [OpenAI::Internal::Type::HashOf] def self.[](...) = new(...) # @api public From 3b7ed468c647ed3b71d7fec819228e1352e081f3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 22 Apr 2025 20:13:34 +0000 Subject: [PATCH 151/295] chore(ci): add timeout thresholds for CI jobs --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b2c0c8f7..4f7276bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,6 +10,7 @@ on: jobs: lint: + timeout-minutes: 10 name: lint runs-on: ubuntu-latest steps: @@ -25,6 +26,7 @@ jobs: - name: Run lints run: ./scripts/lint test: + timeout-minutes: 10 name: test runs-on: ubuntu-latest steps: From bb12564912e0210858eb3140f1d3827c27b65c63 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 16:30:52 +0000 Subject: [PATCH 152/295] feat(api): adding new image model support --- .stats.yml | 6 +- lib/openai.rb | 4 + .../beta/thread_create_and_run_params.rb | 15 +- lib/openai/models/eval_create_params.rb | 431 ++++++---- lib/openai/models/eval_create_response.rb | 218 ++++- lib/openai/models/eval_label_model_grader.rb | 226 ++--- lib/openai/models/eval_list_response.rb | 218 ++++- lib/openai/models/eval_retrieve_response.rb | 220 ++++- .../models/eval_text_similarity_grader.rb | 9 +- lib/openai/models/eval_update_response.rb | 218 ++++- ...create_eval_completions_run_data_source.rb | 492 +++++------ .../models/evals/run_cancel_response.rb | 454 +++++++++- lib/openai/models/evals/run_create_params.rb | 473 ++++++++++- .../models/evals/run_create_response.rb | 454 +++++++++- lib/openai/models/evals/run_list_params.rb | 8 +- lib/openai/models/evals/run_list_response.rb | 450 +++++++++- .../models/evals/run_retrieve_response.rb | 456 +++++++++- .../checkpoints/permission_delete_params.rb | 8 +- lib/openai/models/image.rb | 14 +- .../models/image_create_variation_params.rb | 3 +- lib/openai/models/image_edit_params.rb | 89 +- lib/openai/models/image_generate_params.rb | 175 +++- lib/openai/models/image_model.rb | 1 + lib/openai/models/images_response.rb | 73 +- ...onse_reasoning_summary_part_added_event.rb | 69 ++ ...ponse_reasoning_summary_part_done_event.rb | 69 ++ ...onse_reasoning_summary_text_delta_event.rb | 48 ++ ...ponse_reasoning_summary_text_done_event.rb | 48 ++ .../models/responses/response_stream_event.rb | 18 +- lib/openai/resources/evals.rb | 7 +- lib/openai/resources/evals/runs.rb | 2 +- .../fine_tuning/checkpoints/permissions.rb | 18 +- lib/openai/resources/images.rb | 19 +- lib/openai/resources/responses.rb | 2 +- .../beta/thread_create_and_run_params.rbi | 12 - rbi/lib/openai/models/eval_create_params.rbi | 636 ++++++++------ .../openai/models/eval_create_response.rbi | 315 ++++++- .../openai/models/eval_label_model_grader.rbi | 316 ++----- rbi/lib/openai/models/eval_list_response.rbi | 309 ++++++- .../openai/models/eval_retrieve_response.rbi | 326 +++++++- .../models/eval_text_similarity_grader.rbi | 9 +- .../openai/models/eval_update_response.rbi | 315 ++++++- ...reate_eval_completions_run_data_source.rbi | 786 +++++++----------- .../models/evals/run_cancel_response.rbi | 662 ++++++++++++++- .../openai/models/evals/run_create_params.rbi | 698 +++++++++++++++- .../models/evals/run_create_response.rbi | 662 ++++++++++++++- .../openai/models/evals/run_list_params.rbi | 8 +- .../openai/models/evals/run_list_response.rbi | 662 ++++++++++++++- .../models/evals/run_retrieve_response.rbi | 662 ++++++++++++++- .../checkpoints/permission_delete_params.rbi | 18 +- rbi/lib/openai/models/image.rbi | 14 +- .../models/image_create_variation_params.rbi | 3 +- rbi/lib/openai/models/image_edit_params.rbi | 88 +- .../openai/models/image_generate_params.rbi | 185 ++++- rbi/lib/openai/models/image_model.rbi | 1 + rbi/lib/openai/models/images_response.rbi | 100 ++- ...nse_reasoning_summary_part_added_event.rbi | 88 ++ ...onse_reasoning_summary_part_done_event.rbi | 88 ++ ...nse_reasoning_summary_text_delta_event.rbi | 55 ++ ...onse_reasoning_summary_text_done_event.rbi | 55 ++ .../responses/response_stream_event.rbi | 2 +- rbi/lib/openai/resources/evals.rbi | 9 +- rbi/lib/openai/resources/evals/runs.rbi | 7 +- .../fine_tuning/checkpoints/permissions.rbi | 5 +- rbi/lib/openai/resources/images.rbi | 105 ++- rbi/lib/openai/resources/responses.rbi | 4 + .../beta/thread_create_and_run_params.rbs | 17 +- sig/openai/models/eval_create_params.rbs | 281 ++++--- sig/openai/models/eval_create_response.rbs | 161 +++- sig/openai/models/eval_label_model_grader.rbs | 169 ++-- sig/openai/models/eval_list_response.rbs | 161 +++- sig/openai/models/eval_retrieve_response.rbs | 161 +++- .../models/eval_text_similarity_grader.rbs | 2 - sig/openai/models/eval_update_response.rbs | 161 +++- ...reate_eval_completions_run_data_source.rbs | 370 ++++----- .../models/evals/run_cancel_response.rbs | 341 +++++++- sig/openai/models/evals/run_create_params.rbs | 351 +++++++- .../models/evals/run_create_response.rbs | 341 +++++++- sig/openai/models/evals/run_list_response.rbs | 341 +++++++- .../models/evals/run_retrieve_response.rbs | 341 +++++++- .../checkpoints/permission_delete_params.rbs | 10 +- sig/openai/models/image_edit_params.rbs | 36 +- sig/openai/models/image_generate_params.rbs | 79 +- sig/openai/models/image_model.rbs | 3 +- sig/openai/models/images_response.rbs | 61 +- ...nse_reasoning_summary_part_added_event.rbs | 48 ++ ...onse_reasoning_summary_part_done_event.rbs | 48 ++ ...nse_reasoning_summary_text_delta_event.rbs | 36 + ...onse_reasoning_summary_text_done_event.rbs | 36 + .../responses/response_stream_event.rbs | 6 +- sig/openai/resources/beta/threads.rbs | 4 +- sig/openai/resources/evals.rbs | 1 - .../fine_tuning/checkpoints/permissions.rbs | 3 +- sig/openai/resources/images.rbs | 9 +- test/openai/resources/evals_test.rb | 4 - .../checkpoints/permissions_test.rb | 9 +- test/openai/resources/images_test.rb | 9 +- 97 files changed, 13333 insertions(+), 2487 deletions(-) create mode 100644 lib/openai/models/responses/response_reasoning_summary_part_added_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_summary_part_done_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_summary_text_done_event.rb create mode 100644 rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi create mode 100644 rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi create mode 100644 rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi create mode 100644 rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi create mode 100644 sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs diff --git a/.stats.yml b/.stats.yml index c3609ee0..7738ef3d 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5633633cc38734869cf7d993f7b549bb8e4d10e0ec45381ec2cd91507cd8eb8f.yml -openapi_spec_hash: c855121b2b2324b99499c9244c21d24d -config_hash: d20837393b73efdb19cd08e04c1cc9a1 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-8b68ae6b807dca92e914da1dd9e835a20f69b075e79102a264367fd7fddddb33.yml +openapi_spec_hash: b6ade5b1a6327339e6669e1134de2d03 +config_hash: b597cd9a31e9e5ec709e2eefb4c54122 diff --git a/lib/openai.rb b/lib/openai.rb index 68a9c048..5bd82a3c 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -363,6 +363,10 @@ require_relative "openai/models/responses/response_output_refusal" require_relative "openai/models/responses/response_output_text" require_relative "openai/models/responses/response_reasoning_item" +require_relative "openai/models/responses/response_reasoning_summary_part_added_event" +require_relative "openai/models/responses/response_reasoning_summary_part_done_event" +require_relative "openai/models/responses/response_reasoning_summary_text_delta_event" +require_relative "openai/models/responses/response_reasoning_summary_text_done_event" require_relative "openai/models/responses/response_refusal_delta_event" require_relative "openai/models/responses/response_refusal_done_event" require_relative "openai/models/responses/response_retrieve_params" diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 131ba814..979eb39f 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -140,7 +140,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Tool] }, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] }, nil?: true # @!attribute top_p @@ -584,19 +584,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end end - module Tool - extend OpenAI::Internal::Type::Union - - variant -> { OpenAI::Models::Beta::CodeInterpreterTool } - - variant -> { OpenAI::Models::Beta::FileSearchTool } - - variant -> { OpenAI::Models::Beta::FunctionTool } - - # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - end - class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 54d31b12..5f686026 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -10,13 +10,13 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # The configuration for the data source used for the evaluation runs. # - # @return [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @return [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] required :data_source_config, union: -> { OpenAI::Models::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria # A list of graders for all eval runs in this group. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateParams::TestingCriterion] } @@ -37,18 +37,11 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String - # @!attribute share_with_openai - # Indicates whether the evaluation is shared with OpenAI. - # - # @return [Boolean, nil] - optional :share_with_openai, OpenAI::Internal::Type::Boolean - - # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) - # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] - # @param testing_criteria [Array] + # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] + # @param testing_criteria [Array] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] - # @param share_with_openai [Boolean] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The configuration for the data source used for the evaluation runs. @@ -65,11 +58,11 @@ module DataSourceConfig # A data source config which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. - variant :stored_completions, -> { OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions } + variant :logs, -> { OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs } class Custom < OpenAI::Internal::Type::BaseModel # @!attribute item_schema - # The json schema for the run data source items. + # The json schema for each row in the data source. # # @return [Hash{Symbol=>Object}] required :item_schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] @@ -81,7 +74,8 @@ class Custom < OpenAI::Internal::Type::BaseModel required :type, const: :custom # @!attribute include_sample_schema - # Whether to include the sample schema in the data source. + # Whether the eval should expect you to populate the sample namespace (ie, by + # generating responses off of your data source) # # @return [Boolean, nil] optional :include_sample_schema, OpenAI::Internal::Type::Boolean @@ -99,35 +93,30 @@ class Custom < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :custom] end - class StoredCompletions < OpenAI::Internal::Type::BaseModel + class Logs < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of data source. Always `stored_completions`. + # The type of data source. Always `logs`. # - # @return [Symbol, :stored_completions] - required :type, const: :stored_completions + # @return [Symbol, :logs] + required :type, const: :logs # @!attribute metadata - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. + # Metadata filters for the logs data source. # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + # @return [Hash{Symbol=>Object}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!method initialize(metadata: nil, type: :stored_completions) + # @!method initialize(metadata: nil, type: :logs) # A data source config which specifies the metadata property of your stored # completions query. This is usually metadata like `usecase=chatbot` or # `prompt-version=v2`, etc. # - # @param metadata [Hash{Symbol=>String}, nil] - # @param type [Symbol, :stored_completions] + # @param metadata [Hash{Symbol=>Object}] + # @param type [Symbol, :logs] end # @!method self.variants - # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] + # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -147,10 +136,18 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + # A PythonGrader object that runs a python script on the input. + variant :python, -> { OpenAI::Models::EvalCreateParams::TestingCriterion::Python } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant :score_model, -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel } + class LabelModel < OpenAI::Internal::Type::BaseModel # @!attribute input + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input] } @@ -188,21 +185,26 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # @param labels [Array] # @param model [String] # @param name [String] # @param passing_labels [Array] # @param type [Symbol, :label_model] + # A chat message that makes up the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage } - variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage } - - variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage } + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem } class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -222,71 +224,88 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @param role [String] end - class InputMessage < OpenAI::Internal::Type::BaseModel + class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content + # Text inputs to the model - can contain template strings. # - # @return [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] required :content, - -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content } + union: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content } # @!attribute role - # The role of the message. One of `user`, `system`, or `developer`. + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] required :role, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role } + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role } # @!attribute type - # The type of item, which is always `message`. + # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] - required :type, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type } - - # @!method initialize(content:, role:, type:) - # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] - # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] - - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#content - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text content. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of content, which is always `input_text`. - # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] - required :type, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type } - - # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] - - # The type of content, which is always `input_text`. - # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content#type - module Type - extend OpenAI::Internal::Type::Enum - - INPUT_TEXT = :input_text + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] - # @!method self.values - # @return [Array] + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] end - # The role of the message. One of `user`, `system`, or `developer`. + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#role + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role module Role extend OpenAI::Internal::Type::Enum USER = :user + ASSISTANT = :assistant SYSTEM = :system DEVELOPER = :developer @@ -294,9 +313,9 @@ module Role # @return [Array] end - # The type of item, which is always `message`. + # The type of the message input. Always `message`. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#type + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type module Type extend OpenAI::Internal::Type::Enum @@ -307,96 +326,210 @@ module Type end end - class OutputMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # - # @return [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] - required :content, - -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content } + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] + end + end - # @!attribute role - # The role of the message. Must be `assistant` for output. - # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] - required :role, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role } + class Python < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String - # @!attribute type - # The type of item, which is always `message`. - # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] - required :type, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type } + # @!attribute source + # The source code of the python script. + # + # @return [String] + required :source, String + + # @!attribute type + # The object type, which is always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!attribute image_tag + # The image tag to use for the python script. + # + # @return [String, nil] + optional :image_tag, String + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # A PythonGrader object that runs a python script on the input. + # + # @param name [String] + # @param source [String] + # @param image_tag [String] + # @param pass_threshold [Float] + # @param type [Symbol, :python] + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input] } + + # @!attribute model + # The model to use for the evaluation. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String - # @!method initialize(content:, role:, type:) - # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] - # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] + # @!attribute type + # The object type, which is always `score_model`. + # + # @return [Symbol, :score_model] + required :type, const: :score_model + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#content - class Content < OpenAI::Internal::Type::BaseModel + # @!attribute range + # The range of the score. Defaults to `[0, 1]`. + # + # @return [Array, nil] + optional :range, OpenAI::Internal::Type::ArrayOf[Float] + + # @!attribute sampling_params + # The sampling parameters for the model. + # + # @return [Object, nil] + optional :sampling_params, OpenAI::Internal::Type::Unknown + + # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # A ScoreModelGrader object that uses a model to assign a score to the input. + # + # @param input [Array] + # @param model [String] + # @param name [String] + # @param pass_threshold [Float] + # @param range [Array] + # @param sampling_params [Object] + # @param type [Symbol, :score_model] + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] + required :content, + union: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] + required :role, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type, nil] + optional :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text - # The text content. + # The text output from the model. # # @return [String] required :text, String # @!attribute type - # The type of content, which is always `output_text`. + # The type of the output text. Always `output_text`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] - required :type, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type } - - # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] + # @return [Symbol, :output_text] + required :type, const: :output_text - # The type of content, which is always `output_text`. + # @!method initialize(text:, type: :output_text) + # A text output from the model. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content#type - module Type - extend OpenAI::Internal::Type::Enum - - OUTPUT_TEXT = :output_text - - # @!method self.values - # @return [Array] - end + # @param text [String] + # @param type [Symbol, :output_text] end - # The role of the message. Must be `assistant` for output. - # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#role - module Role - extend OpenAI::Internal::Type::Enum + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText)] + end - ASSISTANT = :assistant + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input#role + module Role + extend OpenAI::Internal::Type::Enum - # @!method self.values - # @return [Array] - end + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer - # The type of item, which is always `message`. - # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#type - module Type - extend OpenAI::Internal::Type::Enum + # @!method self.values + # @return [Array] + end - MESSAGE = :message + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input#type + module Type + extend OpenAI::Internal::Type::Enum - # @!method self.values - # @return [Array] - end - end + MESSAGE = :message - # @!method self.variants - # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage)] + # @!method self.values + # @return [Array] + end end end # @!method self.variants - # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 12832a04..65885fd7 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -45,20 +45,14 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @return [Symbol, :eval] required :object, const: :eval - # @!attribute share_with_openai - # Indicates whether the evaluation is shared with OpenAI. - # - # @return [Boolean] - required :share_with_openai, OpenAI::Internal::Type::Boolean - # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } - # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -71,8 +65,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] - # @param share_with_openai [Boolean] - # @param testing_criteria [Array] + # @param testing_criteria [Array] # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. @@ -116,8 +109,211 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + # A PythonGrader object that runs a python script on the input. + variant :python, -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::Python } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant :score_model, -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel } + + class Python < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute source + # The source code of the python script. + # + # @return [String] + required :source, String + + # @!attribute type + # The object type, which is always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!attribute image_tag + # The image tag to use for the python script. + # + # @return [String, nil] + optional :image_tag, String + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # A PythonGrader object that runs a python script on the input. + # + # @param name [String] + # @param source [String] + # @param image_tag [String] + # @param pass_threshold [Float] + # @param type [Symbol, :python] + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input] } + + # @!attribute model + # The model to use for the evaluation. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute type + # The object type, which is always `score_model`. + # + # @return [Symbol, :score_model] + required :type, const: :score_model + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!attribute range + # The range of the score. Defaults to `[0, 1]`. + # + # @return [Array, nil] + optional :range, OpenAI::Internal::Type::ArrayOf[Float] + + # @!attribute sampling_params + # The sampling parameters for the model. + # + # @return [Object, nil] + optional :sampling_params, OpenAI::Internal::Type::Unknown + + # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # A ScoreModelGrader object that uses a model to assign a score to the input. + # + # @param input [Array] + # @param model [String] + # @param name [String] + # @param pass_threshold [Float] + # @param range [Array] + # @param sampling_params [Object] + # @param type [Symbol, :score_model] + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + required :content, + union: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] + required :role, enum: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type, nil] + optional :type, enum: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb index 2318e553..2af7746f 100644 --- a/lib/openai/models/eval_label_model_grader.rb +++ b/lib/openai/models/eval_label_model_grader.rb @@ -5,9 +5,8 @@ module Models class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # - # @return [Array] - required :input, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalLabelModelGrader::Input] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalLabelModelGrader::Input] } # @!attribute labels # The labels to assign to each item in the evaluation. @@ -43,173 +42,110 @@ class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # @param labels [Array] # @param model [String] # @param name [String] # @param passing_labels [Array] # @param type [Symbol, :label_model] - # An item can either be an input message or an output message. - module Input - extend OpenAI::Internal::Type::Union - - discriminator :role - - variant :assistant, -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant } - - variant -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage } - - class InputMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # - # @return [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] - required :content, -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content } - - # @!attribute role - # The role of the message. One of `user`, `system`, or `developer`. - # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] - required :role, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role } - - # @!attribute type - # The type of item, which is always `message`. - # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] - required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type } - - # @!method initialize(content:, role:, type:) - # @param content [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] - # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] - # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] - - # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#content - class Content < OpenAI::Internal::Type::BaseModel + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Models::EvalLabelModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text - # The text content. + # The text output from the model. # # @return [String] required :text, String # @!attribute type - # The type of content, which is always `input_text`. + # The type of the output text. Always `output_text`. # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] - required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type } - - # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] + # @return [Symbol, :output_text] + required :type, const: :output_text - # The type of content, which is always `input_text`. + # @!method initialize(text:, type: :output_text) + # A text output from the model. # - # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content#type - module Type - extend OpenAI::Internal::Type::Enum - - INPUT_TEXT = :input_text - - # @!method self.values - # @return [Array] - end - end - - # The role of the message. One of `user`, `system`, or `developer`. - # - # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] + # @param text [String] + # @param type [Symbol, :output_text] end - # The type of item, which is always `message`. - # - # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText)] end - class Assistant < OpenAI::Internal::Type::BaseModel - # @!attribute content - # - # @return [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] - required :content, -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content } - - # @!attribute role - # The role of the message. Must be `assistant` for output. - # - # @return [Symbol, :assistant] - required :role, const: :assistant - - # @!attribute type - # The type of item, which is always `message`. - # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] - required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type } - - # @!method initialize(content:, type:, role: :assistant) - # @param content [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] - # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] - # @param role [Symbol, :assistant] - - # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant#content - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text content. - # - # @return [String] - required :text, String + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum - # @!attribute type - # The type of content, which is always `output_text`. - # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] - required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type } + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer - # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] - - # The type of content, which is always `output_text`. - # - # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content#type - module Type - extend OpenAI::Internal::Type::Enum - - OUTPUT_TEXT = :output_text - - # @!method self.values - # @return [Array] - end - end + # @!method self.values + # @return [Array] + end - # The type of item, which is always `message`. - # - # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant#type - module Type - extend OpenAI::Internal::Type::Enum + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalLabelModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum - MESSAGE = :message + MESSAGE = :message - # @!method self.values - # @return [Array] - end + # @!method self.values + # @return [Array] end - - # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage)] end end end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 0abeed04..d80085f0 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -45,20 +45,14 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @return [Symbol, :eval] required :object, const: :eval - # @!attribute share_with_openai - # Indicates whether the evaluation is shared with OpenAI. - # - # @return [Boolean] - required :share_with_openai, OpenAI::Internal::Type::Boolean - # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } - # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -71,8 +65,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] - # @param share_with_openai [Boolean] - # @param testing_criteria [Array] + # @param testing_criteria [Array] # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. @@ -116,8 +109,211 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + # A PythonGrader object that runs a python script on the input. + variant :python, -> { OpenAI::Models::EvalListResponse::TestingCriterion::Python } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant :score_model, -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel } + + class Python < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute source + # The source code of the python script. + # + # @return [String] + required :source, String + + # @!attribute type + # The object type, which is always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!attribute image_tag + # The image tag to use for the python script. + # + # @return [String, nil] + optional :image_tag, String + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # A PythonGrader object that runs a python script on the input. + # + # @param name [String] + # @param source [String] + # @param image_tag [String] + # @param pass_threshold [Float] + # @param type [Symbol, :python] + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input] } + + # @!attribute model + # The model to use for the evaluation. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute type + # The object type, which is always `score_model`. + # + # @return [Symbol, :score_model] + required :type, const: :score_model + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!attribute range + # The range of the score. Defaults to `[0, 1]`. + # + # @return [Array, nil] + optional :range, OpenAI::Internal::Type::ArrayOf[Float] + + # @!attribute sampling_params + # The sampling parameters for the model. + # + # @return [Object, nil] + optional :sampling_params, OpenAI::Internal::Type::Unknown + + # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # A ScoreModelGrader object that uses a model to assign a score to the input. + # + # @param input [Array] + # @param model [String] + # @param name [String] + # @param pass_threshold [Float] + # @param range [Array] + # @param sampling_params [Object] + # @param type [Symbol, :score_model] + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + required :content, + union: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] + required :role, enum: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type, nil] + optional :type, enum: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] + # @param type [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index b1d14b18..e90d4965 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -45,20 +45,14 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [Symbol, :eval] required :object, const: :eval - # @!attribute share_with_openai - # Indicates whether the evaluation is shared with OpenAI. - # - # @return [Boolean] - required :share_with_openai, OpenAI::Internal::Type::Boolean - # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } - # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -71,8 +65,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] - # @param share_with_openai [Boolean] - # @param testing_criteria [Array] + # @param testing_criteria [Array] # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. @@ -116,8 +109,213 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + # A PythonGrader object that runs a python script on the input. + variant :python, -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant :score_model, -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel } + + class Python < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute source + # The source code of the python script. + # + # @return [String] + required :source, String + + # @!attribute type + # The object type, which is always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!attribute image_tag + # The image tag to use for the python script. + # + # @return [String, nil] + optional :image_tag, String + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # A PythonGrader object that runs a python script on the input. + # + # @param name [String] + # @param source [String] + # @param image_tag [String] + # @param pass_threshold [Float] + # @param type [Symbol, :python] + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input] } + + # @!attribute model + # The model to use for the evaluation. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute type + # The object type, which is always `score_model`. + # + # @return [Symbol, :score_model] + required :type, const: :score_model + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!attribute range + # The range of the score. Defaults to `[0, 1]`. + # + # @return [Array, nil] + optional :range, OpenAI::Internal::Type::ArrayOf[Float] + + # @!attribute sampling_params + # The sampling parameters for the model. + # + # @return [Object, nil] + optional :sampling_params, OpenAI::Internal::Type::Unknown + + # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # A ScoreModelGrader object that uses a model to assign a score to the input. + # + # @param input [Array] + # @param model [String] + # @param name [String] + # @param pass_threshold [Float] + # @param range [Array] + # @param sampling_params [Object] + # @param type [Symbol, :score_model] + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + required :content, + union: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] + required :role, + enum: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type, nil] + optional :type, + enum: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] + # @param type [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 8d037316..2a8d70e0 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -4,8 +4,8 @@ module OpenAI module Models class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @!attribute evaluation_metric - # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, - # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # # @return [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] required :evaluation_metric, enum: -> { OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric } @@ -50,8 +50,8 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @param name [String] # @param type [Symbol, :text_similarity] - # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, - # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # # @see OpenAI::Models::EvalTextSimilarityGrader#evaluation_metric module EvaluationMetric @@ -67,7 +67,6 @@ module EvaluationMetric ROUGE_4 = :rouge_4 ROUGE_5 = :rouge_5 ROUGE_L = :rouge_l - COSINE = :cosine # @!method self.values # @return [Array] diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 08e01385..923a9e03 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -45,20 +45,14 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @return [Symbol, :eval] required :object, const: :eval - # @!attribute share_with_openai - # Indicates whether the evaluation is shared with OpenAI. - # - # @return [Boolean] - required :share_with_openai, OpenAI::Internal::Type::Boolean - # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } - # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -71,8 +65,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] - # @param share_with_openai [Boolean] - # @param testing_criteria [Array] + # @param testing_criteria [Array] # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. @@ -116,8 +109,211 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + # A PythonGrader object that runs a python script on the input. + variant :python, -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant :score_model, -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel } + + class Python < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute source + # The source code of the python script. + # + # @return [String] + required :source, String + + # @!attribute type + # The object type, which is always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!attribute image_tag + # The image tag to use for the python script. + # + # @return [String, nil] + optional :image_tag, String + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # A PythonGrader object that runs a python script on the input. + # + # @param name [String] + # @param source [String] + # @param image_tag [String] + # @param pass_threshold [Float] + # @param type [Symbol, :python] + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [Array] + required :input, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input] } + + # @!attribute model + # The model to use for the evaluation. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute type + # The object type, which is always `score_model`. + # + # @return [Symbol, :score_model] + required :type, const: :score_model + + # @!attribute pass_threshold + # The threshold for the score. + # + # @return [Float, nil] + optional :pass_threshold, Float + + # @!attribute range + # The range of the score. Defaults to `[0, 1]`. + # + # @return [Array, nil] + optional :range, OpenAI::Internal::Type::ArrayOf[Float] + + # @!attribute sampling_params + # The sampling parameters for the model. + # + # @return [Object, nil] + optional :sampling_params, OpenAI::Internal::Type::Unknown + + # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # A ScoreModelGrader object that uses a model to assign a score to the input. + # + # @param input [Array] + # @param model [String] + # @param name [String] + # @param pass_threshold [Float] + # @param range [Array] + # @param sampling_params [Object] + # @param type [Symbol, :score_model] + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + required :content, + union: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] + required :role, enum: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type, nil] + optional :type, enum: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @param role [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] + # @param type [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index ac306948..d4f61b7b 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -4,18 +4,6 @@ module OpenAI module Models module Evals class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - # @!attribute input_messages - # - # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] - required :input_messages, - union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String] - required :model, String - # @!attribute source # A StoredCompletionsRunDataSource configuration describing a set of filters # @@ -28,274 +16,33 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type } + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + # @!attribute sampling_params # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] optional :sampling_params, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } - # @!method initialize(input_messages:, model:, source:, type:, sampling_params: nil) + # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] - # @param model [String] # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] + # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # @param model [String] # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] } - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] - - module Template - extend OpenAI::Internal::Type::Union - - variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage } - - variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage } - - variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] - end - - class InputMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # - # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] - required :content, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content } - - # @!attribute role - # The role of the message. One of `user`, `system`, or `developer`. - # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] - required :role, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role } - - # @!attribute type - # The type of item, which is always `message`. - # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] - required :type, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type } - - # @!method initialize(content:, role:, type:) - # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] - # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] - - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#content - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text content. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of content, which is always `input_text`. - # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] - required :type, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type } - - # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] - - # The type of content, which is always `input_text`. - # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content#type - module Type - extend OpenAI::Internal::Type::Enum - - INPUT_TEXT = :input_text - - # @!method self.values - # @return [Array] - end - end - - # The role of the message. One of `user`, `system`, or `developer`. - # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of item, which is always `message`. - # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - class OutputMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # - # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] - required :content, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content } - - # @!attribute role - # The role of the message. Must be `assistant` for output. - # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] - required :role, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role } - - # @!attribute type - # The type of item, which is always `message`. - # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] - required :type, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type } - - # @!method initialize(content:, role:, type:) - # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] - # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] - - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#content - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text content. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of content, which is always `output_text`. - # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] - required :type, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type } - - # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] - - # The type of content, which is always `output_text`. - # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content#type - module Type - extend OpenAI::Internal::Type::Enum - - OUTPUT_TEXT = :output_text - - # @!method self.values - # @return [Array] - end - end - - # The role of the message. Must be `assistant` for output. - # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#role - module Role - extend OpenAI::Internal::Type::Enum - - ASSISTANT = :assistant - - # @!method self.values - # @return [Array] - end - - # The type of item, which is always `message`. - # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage)] - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] - end - # A StoredCompletionsRunDataSource configuration describing a set of filters # # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#source @@ -367,23 +114,29 @@ class FileID < OpenAI::Internal::Type::BaseModel end class StoredCompletions < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of source. Always `stored_completions`. + # + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions + # @!attribute created_after # An optional Unix timestamp to filter items created after this time. # # @return [Integer, nil] - required :created_after, Integer, nil?: true + optional :created_after, Integer, nil?: true # @!attribute created_before # An optional Unix timestamp to filter items created before this time. # # @return [Integer, nil] - required :created_before, Integer, nil?: true + optional :created_before, Integer, nil?: true # @!attribute limit # An optional maximum number of items to return. # # @return [Integer, nil] - required :limit, Integer, nil?: true + optional :limit, Integer, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -394,21 +147,15 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # a maximum length of 512 characters. # # @return [Hash{Symbol=>String}, nil] - required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!attribute model # An optional model to filter by (e.g., 'gpt-4o'). # # @return [String, nil] - required :model, String, nil?: true - - # @!attribute type - # The type of source. Always `stored_completions`. - # - # @return [Symbol, :stored_completions] - required :type, const: :stored_completions + optional :model, String, nil?: true - # @!method initialize(created_after:, created_before:, limit:, metadata:, model:, type: :stored_completions) + # @!method initialize(created_after: nil, created_before: nil, limit: nil, metadata: nil, model: nil, type: :stored_completions) # A StoredCompletionsRunDataSource configuration describing a set of filters # # @param created_after [Integer, nil] @@ -435,6 +182,191 @@ module Type # @return [Array] end + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant :message, -> { OpenAI::Models::Responses::EasyInputMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant :message, + -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message } + + class Message < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + required :role, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] + end + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 19775cff..2412cad7 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -20,7 +20,7 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } # @!attribute error @@ -101,7 +101,7 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # # @param id [String] # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] # @param error [OpenAI::Models::Evals::EvalAPIError] # @param eval_id [String] # @param metadata [Hash{Symbol=>String}, nil] @@ -128,8 +128,456 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + # A ResponsesRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions } + + class Completions < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source } + + # @!attribute type + # The type of run data source. Always `completions`. + # + # @return [Symbol, :completions] + required :type, const: :completions + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] + # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] + # @param model [String] + # @param sampling_params [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams] + # @param type [Symbol, :completions] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute allow_parallel_tool_calls + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional search string for instructions. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # A EvalResponsesSource object describing a run data source configuration. + # + # @param allow_parallel_tool_calls [Boolean, nil] + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param has_tool_calls [Boolean, nil] + # @param instructions_search [String, nil] + # @param metadata [Object, nil] + # @param model [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param users [Array, nil] + # @param type [Symbol, :responses] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses)] + end + + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 61365782..6ee4266d 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -11,7 +11,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Details about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource } # @!attribute metadata @@ -32,7 +32,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(data_source:, metadata: nil, name: nil, request_options: {}) - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -47,8 +47,475 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + # A ResponsesRunDataSource object describing a model sampling configuration. + variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource } + + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + required :source, + union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source } + + # @!attribute type + # The type of run data source. Always `completions`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + required :type, + enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type } + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams } + + # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + # @param input_messages [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + # @param model [String] + # @param sampling_params [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent } + + variant :file_id, + -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> do + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ] + end + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute allow_parallel_tool_calls + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional search string for instructions. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # A EvalResponsesSource object describing a run data source configuration. + # + # @param allow_parallel_tool_calls [Boolean, nil] + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param has_tool_calls [Boolean, nil] + # @param instructions_search [String, nil] + # @param metadata [Object, nil] + # @param model [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param users [Array, nil] + # @param type [Symbol, :responses] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] + end + + # The type of run data source. Always `completions`. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type + module Type + extend OpenAI::Internal::Type::Enum + + COMPLETIONS = :completions + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 25892cb6..22a5752b 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -20,7 +20,7 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } # @!attribute error @@ -101,7 +101,7 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # # @param id [String] # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] # @param error [OpenAI::Models::Evals::EvalAPIError] # @param eval_id [String] # @param metadata [Hash{Symbol=>String}, nil] @@ -128,8 +128,456 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + # A ResponsesRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions } + + class Completions < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source } + + # @!attribute type + # The type of run data source. Always `completions`. + # + # @return [Symbol, :completions] + required :type, const: :completions + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] + # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] + # @param model [String] + # @param sampling_params [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams] + # @param type [Symbol, :completions] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute allow_parallel_tool_calls + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional search string for instructions. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # A EvalResponsesSource object describing a run data source configuration. + # + # @param allow_parallel_tool_calls [Boolean, nil] + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param has_tool_calls [Boolean, nil] + # @param instructions_search [String, nil] + # @param metadata [Object, nil] + # @param model [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param users [Array, nil] + # @param type [Symbol, :responses] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses)] + end + + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index a2b2afb4..66cd4270 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -28,8 +28,8 @@ class RunListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Evals::RunListParams::Order } # @!attribute status - # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | - # "canceled". + # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # | `canceled`. # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] optional :status, enum: -> { OpenAI::Models::Evals::RunListParams::Status } @@ -53,8 +53,8 @@ module Order # @return [Array] end - # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | - # "canceled". + # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # | `canceled`. module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index cda98be8..941d74e1 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -20,7 +20,7 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } # @!attribute error @@ -101,7 +101,7 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # # @param id [String] # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] # @param error [OpenAI::Models::Evals::EvalAPIError] # @param eval_id [String] # @param metadata [Hash{Symbol=>String}, nil] @@ -128,8 +128,452 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + # A ResponsesRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions } + + class Completions < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source } + + # @!attribute type + # The type of run data source. Always `completions`. + # + # @return [Symbol, :completions] + required :type, const: :completions + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] + # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] + # @param model [String] + # @param sampling_params [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams] + # @param type [Symbol, :completions] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute allow_parallel_tool_calls + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional search string for instructions. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # A EvalResponsesSource object describing a run data source configuration. + # + # @param allow_parallel_tool_calls [Boolean, nil] + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param has_tool_calls [Boolean, nil] + # @param instructions_search [String, nil] + # @param metadata [Object, nil] + # @param model [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param users [Array, nil] + # @param type [Symbol, :responses] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses)] + end + + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index dabdd0a5..55c35341 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -20,7 +20,7 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } # @!attribute error @@ -101,7 +101,7 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param id [String] # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] # @param error [OpenAI::Models::Evals::EvalAPIError] # @param eval_id [String] # @param metadata [Hash{Symbol=>String}, nil] @@ -128,8 +128,458 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + # A ResponsesRunDataSource object describing a model sampling configuration. + variant :completions, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions } + + class Completions < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] + required :source, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source } + + # @!attribute type + # The type of run data source. Always `completions`. + # + # @return [Symbol, :completions] + required :type, const: :completions + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] + # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] + # @param model [String] + # @param sampling_params [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams] + # @param type [Symbol, :completions] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent } + + variant :file_id, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute allow_parallel_tool_calls + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional search string for instructions. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # A EvalResponsesSource object describing a run data source configuration. + # + # @param allow_parallel_tool_calls [Boolean, nil] + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param has_tool_calls [Boolean, nil] + # @param instructions_search [String, nil] + # @param metadata [Object, nil] + # @param model [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param users [Array, nil] + # @param type [Symbol, :responses] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses)] + end + + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] + # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Models::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # A text output from the model. + # + # @param text [String] + # @param type [Symbol, :output_text] + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + end + end + # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb index 339cc7ea..402eb4c2 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb @@ -9,7 +9,13 @@ class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!method initialize(request_options: {}) + # @!attribute fine_tuned_model_checkpoint + # + # @return [String] + required :fine_tuned_model_checkpoint, String + + # @!method initialize(fine_tuned_model_checkpoint:, request_options: {}) + # @param fine_tuned_model_checkpoint [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index d76cb186..52ace3e6 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -4,27 +4,29 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel # @!attribute b64_json - # The base64-encoded JSON of the generated image, if `response_format` is - # `b64_json`. + # The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, + # and only present if `response_format` is set to `b64_json` for `dall-e-2` and + # `dall-e-3`. # # @return [String, nil] optional :b64_json, String # @!attribute revised_prompt - # The prompt that was used to generate the image, if there was any revision to the - # prompt. + # For `dall-e-3` only, the revised prompt that was used to generate the image. # # @return [String, nil] optional :revised_prompt, String # @!attribute url - # The URL of the generated image, if `response_format` is `url` (default). + # When using `dall-e-2` or `dall-e-3`, the URL of the generated image if + # `response_format` is set to `url` (default value). Unsupported for + # `gpt-image-1`. # # @return [String, nil] optional :url, String # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) - # Represents the url or the content of an image generated by the OpenAI API. + # Represents the content or the URL of an image generated by the OpenAI API. # # @param b64_json [String] # @param revised_prompt [String] diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 763a9b9a..4a665cb8 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -22,8 +22,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel optional :model, union: -> { OpenAI::Models::ImageCreateVariationParams::Model }, nil?: true # @!attribute n - # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # The number of images to generate. Must be between 1 and 10. # # @return [Integer, nil] optional :n, Integer, nil?: true diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 19525b78..7abf3372 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -8,30 +8,34 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel include OpenAI::Internal::Type::RequestParameters # @!attribute image - # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. + # The image(s) to edit. Must be a supported image file or an array of images. For + # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square + # `png` file less than 4MB. # - # @return [Pathname, StringIO] - required :image, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, Array] + required :image, union: -> { OpenAI::Models::ImageEditParams::Image } # @!attribute prompt # A text description of the desired image(s). The maximum length is 1000 - # characters. + # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. # # @return [String] required :prompt, String # @!attribute mask # An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than + # indicate where `image` should be edited. If there are multiple images provided, + # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. # # @return [Pathname, StringIO, nil] optional :mask, OpenAI::Internal::Type::IOLike # @!attribute model - # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. # # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::Models::ImageEditParams::Model }, nil?: true @@ -42,17 +46,27 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true + # @!attribute quality + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. + # + # @return [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] + optional :quality, enum: -> { OpenAI::Models::ImageEditParams::Quality }, nil?: true + # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. # # @return [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::ImageEditParams::ResponseFormat }, nil?: true # @!attribute size - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. # # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageEditParams::Size }, nil?: true @@ -65,34 +79,70 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) - # @param image [Pathname, StringIO] + # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @param image [Pathname, StringIO, Array] # @param prompt [String] # @param mask [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] # @param n [Integer, nil] + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] # @param user [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # The image(s) to edit. Must be a supported image file or an array of images. For + # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square + # `png` file less than 4MB. + module Image + extend OpenAI::Internal::Type::Union + + variant OpenAI::Internal::Type::IOLike + + variant -> { OpenAI::Models::ImageEditParams::Image::StringArray } + + # @!method self.variants + # @return [Array(StringIO, Array)] + + StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::IOLike] + end + + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. module Model extend OpenAI::Internal::Type::Union variant String - # The model to use for image generation. Only `dall-e-2` is supported at this time. + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` is used. variant enum: -> { OpenAI::Models::ImageModel } # @!method self.variants # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. + module Quality + extend OpenAI::Internal::Type::Enum + + STANDARD = :standard + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -103,8 +153,9 @@ module ResponseFormat # @return [Array] end - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. module Size extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 9e5550f1..56986585 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -8,18 +8,40 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel include OpenAI::Internal::Type::RequestParameters # @!attribute prompt - # A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. + # A text description of the desired image(s). The maximum length is 32000 + # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters + # for `dall-e-3`. # # @return [String] required :prompt, String + # @!attribute background + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + # + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] + optional :background, enum: -> { OpenAI::Models::ImageGenerateParams::Background }, nil?: true + # @!attribute model - # The model to use for image generation. + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. # # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::Models::ImageGenerateParams::Model }, nil?: true + # @!attribute moderation + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). + # + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] + optional :moderation, enum: -> { OpenAI::Models::ImageGenerateParams::Moderation }, nil?: true + # @!attribute n # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # `n=1` is supported. @@ -27,35 +49,56 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true + # @!attribute output_compression + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + # + # @return [Integer, nil] + optional :output_compression, Integer, nil?: true + + # @!attribute output_format + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. + # + # @return [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] + optional :output_format, enum: -> { OpenAI::Models::ImageGenerateParams::OutputFormat }, nil?: true + # @!attribute quality - # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] - optional :quality, enum: -> { OpenAI::Models::ImageGenerateParams::Quality } + optional :quality, enum: -> { OpenAI::Models::ImageGenerateParams::Quality }, nil?: true # @!attribute response_format - # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::ImageGenerateParams::ResponseFormat }, nil?: true # @!attribute size - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageGenerateParams::Size }, nil?: true # @!attribute style - # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] optional :style, enum: -> { OpenAI::Models::ImageGenerateParams::Style }, nil?: true @@ -68,46 +111,104 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @!method initialize(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # @param prompt [String] + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] # @param n [Integer, nil] - # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] + # @param output_compression [Integer, nil] + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] # @param user [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The model to use for image generation. + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. module Model extend OpenAI::Internal::Type::Union variant String - # The model to use for image generation. + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` is used. variant enum: -> { OpenAI::Models::ImageModel } # @!method self.variants # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end - # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). + module Moderation + extend OpenAI::Internal::Type::Enum + + LOW = :low + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + JPEG = :jpeg + WEBP = :webp + + # @!method self.values + # @return [Array] + end + + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. module Quality extend OpenAI::Internal::Type::Enum STANDARD = :standard HD = :hd + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto # @!method self.values # @return [Array] end - # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -118,15 +219,19 @@ module ResponseFormat # @return [Array] end - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. module Size extend OpenAI::Internal::Type::Enum + AUTO = :auto + SIZE_1024X1024 = :"1024x1024" + SIZE_1536X1024 = :"1536x1024" + SIZE_1024X1536 = :"1024x1536" SIZE_256X256 = :"256x256" SIZE_512X512 = :"512x512" - SIZE_1024X1024 = :"1024x1024" SIZE_1792X1024 = :"1792x1024" SIZE_1024X1792 = :"1024x1792" @@ -134,10 +239,10 @@ module Size # @return [Array] end - # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. module Style extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index 4b6ca64c..0bd2690e 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -7,6 +7,7 @@ module ImageModel DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" + GPT_IMAGE_1 = :"gpt-image-1" # @!method self.values # @return [Array] diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index 8816ee07..90c77f01 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -5,18 +5,85 @@ module Models # @see OpenAI::Resources::Images#create_variation class ImagesResponse < OpenAI::Internal::Type::BaseModel # @!attribute created + # The Unix timestamp (in seconds) of when the image was created. # # @return [Integer] required :created, Integer # @!attribute data + # The list of generated images. # - # @return [Array] - required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image] } + # @return [Array, nil] + optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image] } - # @!method initialize(created:, data:) + # @!attribute usage + # For `gpt-image-1` only, the token usage information for the image generation. + # + # @return [OpenAI::Models::ImagesResponse::Usage, nil] + optional :usage, -> { OpenAI::Models::ImagesResponse::Usage } + + # @!method initialize(created:, data: nil, usage: nil) + # The response from the image generation endpoint. + # # @param created [Integer] # @param data [Array] + # @param usage [OpenAI::Models::ImagesResponse::Usage] + + # @see OpenAI::Models::ImagesResponse#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute input_tokens + # The number of tokens (images and text) in the input prompt. + # + # @return [Integer] + required :input_tokens, Integer + + # @!attribute input_tokens_details + # The input tokens detailed information for the image generation. + # + # @return [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] + required :input_tokens_details, -> { OpenAI::Models::ImagesResponse::Usage::InputTokensDetails } + + # @!attribute output_tokens + # The number of image tokens in the output image. + # + # @return [Integer] + required :output_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens (images and text) used for the image generation. + # + # @return [Integer] + required :total_tokens, Integer + + # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:) + # For `gpt-image-1` only, the token usage information for the image generation. + # + # @param input_tokens [Integer] + # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] + # @param output_tokens [Integer] + # @param total_tokens [Integer] + + # @see OpenAI::Models::ImagesResponse::Usage#input_tokens_details + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + # @!attribute image_tokens + # The number of image tokens in the input prompt. + # + # @return [Integer] + required :image_tokens, Integer + + # @!attribute text_tokens + # The number of text tokens in the input prompt. + # + # @return [Integer] + required :text_tokens, Integer + + # @!method initialize(image_tokens:, text_tokens:) + # The input tokens detailed information for the image generation. + # + # @param image_tokens [Integer] + # @param text_tokens [Integer] + end + end end end end diff --git a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb new file mode 100644 index 00000000..a7f42ec3 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the item this summary part is associated with. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item this summary part is associated with. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute part + # The summary part that was added. + # + # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] + required :part, -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part } + + # @!attribute summary_index + # The index of the summary part within the reasoning summary. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute type + # The type of the event. Always `response.reasoning_summary_part.added`. + # + # @return [Symbol, :"response.reasoning_summary_part.added"] + required :type, const: :"response.reasoning_summary_part.added" + + # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.added") + # Emitted when a new reasoning summary part is added. + # + # @param item_id [String] + # @param output_index [Integer] + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] + # @param summary_index [Integer] + # @param type [Symbol, :"response.reasoning_summary_part.added"] + + # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent#part + class Part < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text of the summary part. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the summary part. Always `summary_text`. + # + # @return [Symbol, :summary_text] + required :type, const: :summary_text + + # @!method initialize(text:, type: :summary_text) + # The summary part that was added. + # + # @param text [String] + # @param type [Symbol, :summary_text] + end + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb new file mode 100644 index 00000000..c868638f --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb @@ -0,0 +1,69 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the item this summary part is associated with. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item this summary part is associated with. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute part + # The completed summary part. + # + # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] + required :part, -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part } + + # @!attribute summary_index + # The index of the summary part within the reasoning summary. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute type + # The type of the event. Always `response.reasoning_summary_part.done`. + # + # @return [Symbol, :"response.reasoning_summary_part.done"] + required :type, const: :"response.reasoning_summary_part.done" + + # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.done") + # Emitted when a reasoning summary part is completed. + # + # @param item_id [String] + # @param output_index [Integer] + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] + # @param summary_index [Integer] + # @param type [Symbol, :"response.reasoning_summary_part.done"] + + # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent#part + class Part < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text of the summary part. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the summary part. Always `summary_text`. + # + # @return [Symbol, :summary_text] + required :type, const: :summary_text + + # @!method initialize(text:, type: :summary_text) + # The completed summary part. + # + # @param text [String] + # @param type [Symbol, :summary_text] + end + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb new file mode 100644 index 00000000..d816e961 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute delta + # The text delta that was added to the summary. + # + # @return [String] + required :delta, String + + # @!attribute item_id + # The ID of the item this summary text delta is associated with. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item this summary text delta is associated with. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute summary_index + # The index of the summary part within the reasoning summary. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute type + # The type of the event. Always `response.reasoning_summary_text.delta`. + # + # @return [Symbol, :"response.reasoning_summary_text.delta"] + required :type, const: :"response.reasoning_summary_text.delta" + + # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary_text.delta") + # Emitted when a delta is added to a reasoning summary text. + # + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param summary_index [Integer] + # @param type [Symbol, :"response.reasoning_summary_text.delta"] + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb new file mode 100644 index 00000000..93e8cadc --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the item this summary text is associated with. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item this summary text is associated with. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute summary_index + # The index of the summary part within the reasoning summary. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute text + # The full text of the completed reasoning summary. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always `response.reasoning_summary_text.done`. + # + # @return [Symbol, :"response.reasoning_summary_text.done"] + required :type, const: :"response.reasoning_summary_text.done" + + # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary_text.done") + # Emitted when a reasoning summary text is completed. + # + # @param item_id [String] + # @param output_index [Integer] + # @param summary_index [Integer] + # @param text [String] + # @param type [Symbol, :"response.reasoning_summary_text.done"] + end + end + end +end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index d6c7fee7..95156d56 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -93,6 +93,22 @@ module ResponseStreamEvent # Emitted when an output item is marked done. variant :"response.output_item.done", -> { OpenAI::Models::Responses::ResponseOutputItemDoneEvent } + # Emitted when a new reasoning summary part is added. + variant :"response.reasoning_summary_part.added", + -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent } + + # Emitted when a reasoning summary part is completed. + variant :"response.reasoning_summary_part.done", + -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent } + + # Emitted when a delta is added to a reasoning summary text. + variant :"response.reasoning_summary_text.delta", + -> { OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent } + + # Emitted when a reasoning summary text is completed. + variant :"response.reasoning_summary_text.done", + -> { OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent } + # Emitted when there is a partial refusal text. variant :"response.refusal.delta", -> { OpenAI::Models::Responses::ResponseRefusalDeltaEvent } @@ -122,7 +138,7 @@ module ResponseStreamEvent -> { OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] end end end diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 070c5958..2996ff10 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -12,13 +12,12 @@ class Evals # We support several types of graders and datasources. For more information, see # the [Evals guide](https://platform.openai.com/docs/guides/evals). # - # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) + # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] - # @param testing_criteria [Array] + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] + # @param testing_criteria [Array] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] - # @param share_with_openai [Boolean] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::EvalCreateResponse] diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index ce05aca8..77980ec8 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -12,7 +12,7 @@ class Runs # @overload create(eval_id, data_source:, metadata: nil, name: nil, request_options: {}) # # @param eval_id [String] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index 47a1d9b0..38c9fe03 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -64,20 +64,30 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) # Organization owners can use this endpoint to delete a permission for a # fine-tuned model checkpoint. # - # @overload delete(fine_tuned_model_checkpoint, request_options: {}) + # @overload delete(permission_id, fine_tuned_model_checkpoint:, request_options: {}) # + # @param permission_id [String] # @param fine_tuned_model_checkpoint [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse] # # @see OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteParams - def delete(fine_tuned_model_checkpoint, params = {}) + def delete(permission_id, params) + parsed, options = OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteParams.dump_request(params) + fine_tuned_model_checkpoint = + parsed.delete(:fine_tuned_model_checkpoint) do + raise ArgumentError.new("missing required path argument #{_1}") + end @client.request( method: :delete, - path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], + path: [ + "fine_tuning/checkpoints/%1$s/permissions/%2$s", + fine_tuned_model_checkpoint, + permission_id + ], model: OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse, - options: params[:request_options] + options: options ) end diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 886a0efb..40bfb0e5 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -3,7 +3,7 @@ module OpenAI module Resources class Images - # Creates a variation of a given image. + # Creates a variation of a given image. This endpoint only supports `dall-e-2`. # # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # @@ -30,15 +30,17 @@ def create_variation(params) ) end - # Creates an edited or extended image given an original image and a prompt. + # Creates an edited or extended image given one or more source images and a + # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. # - # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO] + # @param image [Pathname, StringIO, Array] # @param prompt [String] # @param mask [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] # @param n [Integer, nil] + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] # @param user [String] @@ -60,13 +62,18 @@ def edit(params) end # Creates an image given a prompt. + # [Learn more](https://platform.openai.com/docs/guides/images). # - # @overload generate(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # # @param prompt [String] + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] # @param n [Integer, nil] - # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] + # @param output_compression [Integer, nil] + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 05435651..70d045af 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -96,7 +96,7 @@ def create(params) # @param user [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index 091a1cab..ef54ba30 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -1018,18 +1018,6 @@ module OpenAI end end - module Tool - extend OpenAI::Internal::Type::Union - - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] - ) - end - def self.variants; end - end - class TruncationStrategy < OpenAI::Internal::Type::BaseModel # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in diff --git a/rbi/lib/openai/models/eval_create_params.rbi b/rbi/lib/openai/models/eval_create_params.rbi index dbec61df..32befe9a 100644 --- a/rbi/lib/openai/models/eval_create_params.rbi +++ b/rbi/lib/openai/models/eval_create_params.rbi @@ -11,7 +11,7 @@ module OpenAI returns( T.any( OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs ) ) end @@ -24,7 +24,9 @@ module OpenAI T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateParams::TestingCriterion::Python, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel ) ] ) @@ -47,61 +49,50 @@ module OpenAI sig { params(name: String).void } attr_writer :name - # Indicates whether the evaluation is shared with OpenAI. - sig { returns(T.nilable(T::Boolean)) } - attr_reader :share_with_openai - - sig { params(share_with_openai: T::Boolean).void } - attr_writer :share_with_openai - sig do params( data_source_config: T.any( OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs ), testing_criteria: T::Array[ T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Internal::AnyHash, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateParams::TestingCriterion::Python, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel ) ], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) end - def self.new( - data_source_config:, - testing_criteria:, - metadata: nil, - name: nil, - share_with_openai: nil, - request_options: {} - ); end + def self.new(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}); end + sig do override .returns( { data_source_config: T.any( OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs ), testing_criteria: T::Array[ T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateParams::TestingCriterion::Python, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel ) ], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, request_options: OpenAI::RequestOptions } ) @@ -113,7 +104,7 @@ module OpenAI extend OpenAI::Internal::Type::Union class Custom < OpenAI::Internal::Type::BaseModel - # The json schema for the run data source items. + # The json schema for each row in the data source. sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item_schema @@ -121,7 +112,8 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - # Whether to include the sample schema in the data source. + # Whether the eval should expect you to populate the sample namespace (ie, by + # generating responses off of your data source) sig { returns(T.nilable(T::Boolean)) } attr_reader :include_sample_schema @@ -151,34 +143,32 @@ module OpenAI def to_hash; end end - class StoredCompletions < OpenAI::Internal::Type::BaseModel - # The type of data source. Always `stored_completions`. + class Logs < OpenAI::Internal::Type::BaseModel + # The type of data source. Always `logs`. sig { returns(Symbol) } attr_accessor :type - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - sig { returns(T.nilable(T::Hash[Symbol, String])) } - attr_accessor :metadata + # Metadata filters for the logs data source. + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :metadata + + sig { params(metadata: T::Hash[Symbol, T.anything]).void } + attr_writer :metadata # A data source config which specifies the metadata property of your stored # completions query. This is usually metadata like `usecase=chatbot` or # `prompt-version=v2`, etc. - sig { params(metadata: T.nilable(T::Hash[Symbol, String]), type: Symbol).returns(T.attached_class) } - def self.new(metadata: nil, type: :stored_completions); end + sig { params(metadata: T::Hash[Symbol, T.anything], type: Symbol).returns(T.attached_class) } + def self.new(metadata: nil, type: :logs); end - sig { override.returns({type: Symbol, metadata: T.nilable(T::Hash[Symbol, String])}) } + sig { override.returns({type: Symbol, metadata: T::Hash[Symbol, T.anything]}) } def to_hash; end end sig do override .returns( - [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] ) end def self.variants; end @@ -190,13 +180,14 @@ module OpenAI extend OpenAI::Internal::Type::Union class LabelModel < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. sig do returns( T::Array[ T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ] ) @@ -231,8 +222,7 @@ module OpenAI T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ], labels: T::Array[String], @@ -252,8 +242,7 @@ module OpenAI input: T::Array[ T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ], labels: T::Array[String], @@ -266,6 +255,8 @@ module OpenAI end def to_hash; end + # A chat message that makes up the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union @@ -285,358 +276,455 @@ module OpenAI def to_hash; end end - class InputMessage < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content) } - attr_reader :content - + class EvalItem < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. sig do - params( - content: T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, - OpenAI::Internal::AnyHash + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText ) ) - .void end - attr_writer :content + attr_accessor :content - # The role of the message. One of `user`, `system`, or `developer`. + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. sig do returns( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::OrSymbol + T.nilable(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol) ) end - attr_accessor :role + attr_reader :type - # The type of item, which is always `message`. sig do - returns( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::OrSymbol + params( + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol ) + .void end - attr_accessor :type + attr_writer :type + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. sig do params( content: T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, - OpenAI::Internal::AnyHash + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText ), - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::OrSymbol + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol ) .returns(T.attached_class) end - def self.new(content:, role:, type:); end + def self.new(content:, role:, type: nil); end sig do override .returns( { - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::OrSymbol + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ), + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol } ) end def to_hash; end - class Content < OpenAI::Internal::Type::BaseModel - # The text content. - sig { returns(String) } - attr_accessor :text + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union - # The type of content, which is always `input_text`. - sig do - returns( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::OrSymbol - ) - end - attr_accessor :type + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text - sig do - params( - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::OrSymbol - ) - .returns(T.attached_class) + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end end - def self.new(text:, type:); end sig do override .returns( - { - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::OrSymbol - } + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] ) end - def to_hash; end - - # The type of content, which is always `input_text`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - INPUT_TEXT = - T.let( - :input_text, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol - ) - - sig do - override - .returns( - T::Array[ - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol - ] - ) - end - def self.values; end - end + def self.variants; end end - # The role of the message. One of `user`, `system`, or `developer`. + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. module Role extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role) } + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role) } OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( :user, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) SYSTEM = T.let( :system, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) DEVELOPER = T.let( :developer, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) sig do override .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol] + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol] ) end def self.values; end end - # The type of item, which is always `message`. + # The type of the message input. Always `message`. module Type extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type) } + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type) } OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( :message, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol ) sig do override .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol] + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol] ) end def self.values; end end end - class OutputMessage < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content) } - attr_reader :content - - sig do - params( - content: T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, - OpenAI::Internal::AnyHash - ) + sig do + override + .returns( + [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem] ) - .void - end - attr_writer :content + end + def self.variants; end + end + end - # The role of the message. Must be `assistant` for output. - sig do - returns( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::OrSymbol - ) - end - attr_accessor :role + class Python < OpenAI::Internal::Type::BaseModel + # The name of the grader. + sig { returns(String) } + attr_accessor :name - # The type of item, which is always `message`. - sig do - returns( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::OrSymbol - ) - end - attr_accessor :type + # The source code of the python script. + sig { returns(String) } + attr_accessor :source - sig do - params( - content: T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, - OpenAI::Internal::AnyHash - ), - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::OrSymbol - ) - .returns(T.attached_class) - end - def self.new(content:, role:, type:); end + # The object type, which is always `python`. + sig { returns(Symbol) } + attr_accessor :type - sig do - override - .returns( - { - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::OrSymbol - } - ) - end - def to_hash; end + # The image tag to use for the python script. + sig { returns(T.nilable(String)) } + attr_reader :image_tag - class Content < OpenAI::Internal::Type::BaseModel - # The text content. - sig { returns(String) } - attr_accessor :text + sig { params(image_tag: String).void } + attr_writer :image_tag - # The type of content, which is always `output_text`. - sig do - returns( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::OrSymbol - ) - end - attr_accessor :type + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold - sig do - params( - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::OrSymbol - ) - .returns(T.attached_class) - end - def self.new(text:, type:); end + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold - sig do - override - .returns( - { - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::OrSymbol - } - ) - end - def to_hash; end + # A PythonGrader object that runs a python script on the input. + sig do + params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) + .returns(T.attached_class) + end + def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end - # The type of content, which is always `output_text`. - module Type - extend OpenAI::Internal::Type::Enum + sig do + override.returns( + { + name: String, + source: String, + type: Symbol, + image_tag: String, + pass_threshold: Float + } + ) + end + def to_hash; end + end - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } + class ScoreModel < OpenAI::Internal::Type::BaseModel + # The input text. This may include template strings. + sig { returns(T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input]) } + attr_accessor :input - OUTPUT_TEXT = - T.let( - :output_text, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol - ) + # The model to use for the evaluation. + sig { returns(String) } + attr_accessor :model - sig do - override - .returns( - T::Array[ - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol - ] - ) - end - def self.values; end - end - end + # The name of the grader. + sig { returns(String) } + attr_accessor :name - # The role of the message. Must be `assistant` for output. - module Role - extend OpenAI::Internal::Type::Enum + # The object type, which is always `score_model`. + sig { returns(Symbol) } + attr_accessor :type - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role) } - OrSymbol = T.type_alias { T.any(Symbol, String) } + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol - ) + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold - sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol] - ) - end - def self.values; end - end + # The range of the score. Defaults to `[0, 1]`. + sig { returns(T.nilable(T::Array[Float])) } + attr_reader :range - # The type of item, which is always `message`. - module Type - extend OpenAI::Internal::Type::Enum + sig { params(range: T::Array[Float]).void } + attr_writer :range - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } + # The sampling parameters for the model. + sig { returns(T.nilable(T.anything)) } + attr_reader :sampling_params - MESSAGE = - T.let( - :message, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol - ) + sig { params(sampling_params: T.anything).void } + attr_writer :sampling_params - sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol] - ) - end - def self.values; end - end + # A ScoreModelGrader object that uses a model to assign a score to the input. + sig do + params( + input: T::Array[T.any(OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + model: String, + name: String, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + input:, + model:, + name:, + pass_threshold: nil, + range: nil, + sampling_params: nil, + type: :score_model + ) + end + + sig do + override + .returns( + { + input: T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + ) end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. + sig { returns(T.nilable(OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol)) } + attr_reader :type + + sig { params(type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end sig do override .returns( - [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage] + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol + } ) end - def self.variants; end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let(:user, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let(:system, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let(:message, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] + ) + end + def self.values; end + end end end sig do override .returns( - [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel] ) end def self.variants; end diff --git a/rbi/lib/openai/models/eval_create_response.rbi b/rbi/lib/openai/models/eval_create_response.rbi index c35f9e03..c812a4d5 100644 --- a/rbi/lib/openai/models/eval_create_response.rbi +++ b/rbi/lib/openai/models/eval_create_response.rbi @@ -36,10 +36,6 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # Indicates whether the evaluation is shared with OpenAI. - sig { returns(T::Boolean) } - attr_accessor :share_with_openai - # A list of testing criteria. sig do returns( @@ -47,7 +43,9 @@ module OpenAI T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel ) ] ) @@ -71,29 +69,23 @@ module OpenAI ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Internal::AnyHash, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel ) ], object: Symbol ) .returns(T.attached_class) end - def self.new( - id:, - created_at:, - data_source_config:, - metadata:, - name:, - share_with_openai:, - testing_criteria:, - object: :eval - ); end + def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + end + sig do override .returns( @@ -104,12 +96,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel ) ] } @@ -135,10 +128,292 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + class Python < OpenAI::Internal::Type::BaseModel + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The source code of the python script. + sig { returns(String) } + attr_accessor :source + + # The object type, which is always `python`. + sig { returns(Symbol) } + attr_accessor :type + + # The image tag to use for the python script. + sig { returns(T.nilable(String)) } + attr_reader :image_tag + + sig { params(image_tag: String).void } + attr_writer :image_tag + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # A PythonGrader object that runs a python script on the input. + sig do + params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) + .returns(T.attached_class) + end + def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end + + sig do + override.returns( + { + name: String, + source: String, + type: Symbol, + image_tag: String, + pass_threshold: Float + } + ) + end + def to_hash; end + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # The input text. This may include template strings. + sig { returns(T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input]) } + attr_accessor :input + + # The model to use for the evaluation. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The object type, which is always `score_model`. + sig { returns(Symbol) } + attr_accessor :type + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # The range of the score. Defaults to `[0, 1]`. + sig { returns(T.nilable(T::Array[Float])) } + attr_reader :range + + sig { params(range: T::Array[Float]).void } + attr_writer :range + + # The sampling parameters for the model. + sig { returns(T.nilable(T.anything)) } + attr_reader :sampling_params + + sig { params(sampling_params: T.anything).void } + attr_writer :sampling_params + + # A ScoreModelGrader object that uses a model to assign a score to the input. + sig do + params( + input: T::Array[T.any(OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + model: String, + name: String, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + input:, + model:, + name:, + pass_threshold: nil, + range: nil, + sampling_params: nil, + type: :score_model + ) + end + + sig do + override + .returns( + { + input: T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable(OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + ) + end + attr_reader :type + + sig { params(type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let(:user, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] + ) + end + def self.values; end + end + end + end + sig do override .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel] ) end def self.variants; end diff --git a/rbi/lib/openai/models/eval_label_model_grader.rbi b/rbi/lib/openai/models/eval_label_model_grader.rbi index 40ab0320..8e02688e 100644 --- a/rbi/lib/openai/models/eval_label_model_grader.rbi +++ b/rbi/lib/openai/models/eval_label_model_grader.rbi @@ -3,16 +3,7 @@ module OpenAI module Models class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - sig do - returns( - T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader::Input::Assistant, - OpenAI::Models::EvalLabelModelGrader::Input::InputMessage - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input]) } attr_accessor :input # The labels to assign to each item in the evaluation. @@ -39,13 +30,7 @@ module OpenAI # the evaluation. sig do params( - input: T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader::Input::Assistant, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalLabelModelGrader::Input::InputMessage - ) - ], + input: T::Array[T.any(OpenAI::Models::EvalLabelModelGrader::Input, OpenAI::Internal::AnyHash)], labels: T::Array[String], model: String, name: String, @@ -60,12 +45,7 @@ module OpenAI override .returns( { - input: T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader::Input::Assistant, - OpenAI::Models::EvalLabelModelGrader::Input::InputMessage - ) - ], + input: T::Array[OpenAI::Models::EvalLabelModelGrader::Input], labels: T::Array[String], model: String, name: String, @@ -76,238 +56,126 @@ module OpenAI end def to_hash; end - # An item can either be an input message or an output message. - module Input - extend OpenAI::Internal::Type::Union + class Input < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content - class InputMessage < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content) } - attr_reader :content + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) } + attr_accessor :role - sig do - params( - content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, OpenAI::Internal::AnyHash) - ) - .void - end - attr_writer :content + # The type of the message input. Always `message`. + sig { returns(T.nilable(OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol)) } + attr_reader :type - # The role of the message. One of `user`, `system`, or `developer`. - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) } - attr_accessor :role + sig { params(type: OpenAI::Models::EvalLabelModelGrader::Input::Type::OrSymbol).void } + attr_writer :type - # The type of item, which is always `message`. - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) } - attr_accessor :type + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText + ), + role: OpenAI::Models::EvalLabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Models::EvalLabelModelGrader::Input::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end - sig do - params( - content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, OpenAI::Internal::AnyHash), - role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::OrSymbol, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::OrSymbol + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText + ), + role: OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol, + type: OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol + } ) - .returns(T.attached_class) - end - def self.new(content:, role:, type:); end + end + def to_hash; end - sig do - override - .returns( - { - content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, - role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol - } - ) - end - def to_hash; end + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union - class Content < OpenAI::Internal::Type::BaseModel - # The text content. + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. sig { returns(String) } attr_accessor :text - # The type of content, which is always `input_text`. - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol) } + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } attr_accessor :type - sig do - params( - text: String, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::OrSymbol - ) - .returns(T.attached_class) - end - def self.new(text:, type:); end + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end - sig do - override - .returns( - {text: String, type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol} - ) - end + sig { override.returns({text: String, type: Symbol}) } def to_hash; end - - # The type of content, which is always `input_text`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - INPUT_TEXT = - T.let(:input_text, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol) - - sig do - override - .returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol]) - end - def self.values; end - end end - # The role of the message. One of `user`, `system`, or `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = T.let(:user, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) - DEVELOPER = - T.let(:developer, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol]) } - def self.values; end - end - - # The type of item, which is always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol]) } - def self.values; end - end - end - - class Assistant < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content) } - attr_reader :content - - sig do - params( - content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, OpenAI::Internal::AnyHash) - ) - .void - end - attr_writer :content - - # The role of the message. Must be `assistant` for output. - sig { returns(Symbol) } - attr_accessor :role - - # The type of item, which is always `message`. - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) } - attr_accessor :type - - sig do - params( - content: T.any(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, OpenAI::Internal::AnyHash), - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::OrSymbol, - role: Symbol - ) - .returns(T.attached_class) - end - def self.new(content:, type:, role: :assistant); end - sig do override .returns( - { - content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, - role: Symbol, - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol - } + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] ) end - def to_hash; end - - class Content < OpenAI::Internal::Type::BaseModel - # The text content. - sig { returns(String) } - attr_accessor :text - - # The type of content, which is always `output_text`. - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) } - attr_accessor :type - - sig do - params( - text: String, - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::OrSymbol - ) - .returns(T.attached_class) - end - def self.new(text:, type:); end - - sig do - override - .returns( - {text: String, type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol} - ) - end - def to_hash; end - - # The type of content, which is always `output_text`. - module Type - extend OpenAI::Internal::Type::Enum + def self.variants; end + end - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum - OUTPUT_TEXT = - T.let(:output_text, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } - sig do - override - .returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol]) - end - def self.values; end - end - end + USER = T.let(:user, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) + DEVELOPER = T.let(:developer, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) - # The type of item, which is always `message`. - module Type - extend OpenAI::Internal::Type::Enum + sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol]) } + def self.values; end + end - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum - MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } - sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol]) } - def self.values; end - end - end + MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol) - sig do - override - .returns( - [OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage] - ) + sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol]) } + def self.values; end end - def self.variants; end end end end diff --git a/rbi/lib/openai/models/eval_list_response.rbi b/rbi/lib/openai/models/eval_list_response.rbi index 7ea59a6c..08a0827f 100644 --- a/rbi/lib/openai/models/eval_list_response.rbi +++ b/rbi/lib/openai/models/eval_list_response.rbi @@ -36,10 +36,6 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # Indicates whether the evaluation is shared with OpenAI. - sig { returns(T::Boolean) } - attr_accessor :share_with_openai - # A list of testing criteria. sig do returns( @@ -47,7 +43,9 @@ module OpenAI T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::Python, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel ) ] ) @@ -71,29 +69,23 @@ module OpenAI ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Internal::AnyHash, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::Python, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel ) ], object: Symbol ) .returns(T.attached_class) end - def self.new( - id:, - created_at:, - data_source_config:, - metadata:, - name:, - share_with_openai:, - testing_criteria:, - object: :eval - ); end + def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + end + sig do override .returns( @@ -104,12 +96,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::Python, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel ) ] } @@ -135,10 +128,286 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + class Python < OpenAI::Internal::Type::BaseModel + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The source code of the python script. + sig { returns(String) } + attr_accessor :source + + # The object type, which is always `python`. + sig { returns(Symbol) } + attr_accessor :type + + # The image tag to use for the python script. + sig { returns(T.nilable(String)) } + attr_reader :image_tag + + sig { params(image_tag: String).void } + attr_writer :image_tag + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # A PythonGrader object that runs a python script on the input. + sig do + params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) + .returns(T.attached_class) + end + def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end + + sig do + override.returns( + { + name: String, + source: String, + type: Symbol, + image_tag: String, + pass_threshold: Float + } + ) + end + def to_hash; end + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # The input text. This may include template strings. + sig { returns(T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input]) } + attr_accessor :input + + # The model to use for the evaluation. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The object type, which is always `score_model`. + sig { returns(Symbol) } + attr_accessor :type + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # The range of the score. Defaults to `[0, 1]`. + sig { returns(T.nilable(T::Array[Float])) } + attr_reader :range + + sig { params(range: T::Array[Float]).void } + attr_writer :range + + # The sampling parameters for the model. + sig { returns(T.nilable(T.anything)) } + attr_reader :sampling_params + + sig { params(sampling_params: T.anything).void } + attr_writer :sampling_params + + # A ScoreModelGrader object that uses a model to assign a score to the input. + sig do + params( + input: T::Array[T.any(OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + model: String, + name: String, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + input:, + model:, + name:, + pass_threshold: nil, + range: nil, + sampling_params: nil, + type: :score_model + ) + end + + sig do + override + .returns( + { + input: T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable(OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + ) + end + attr_reader :type + + sig { params(type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let(:user, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let(:system, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let(:message, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] + ) + end + def self.values; end + end + end + end + sig do override .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel] ) end def self.variants; end diff --git a/rbi/lib/openai/models/eval_retrieve_response.rbi b/rbi/lib/openai/models/eval_retrieve_response.rbi index 721a617e..a84f25d8 100644 --- a/rbi/lib/openai/models/eval_retrieve_response.rbi +++ b/rbi/lib/openai/models/eval_retrieve_response.rbi @@ -36,10 +36,6 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # Indicates whether the evaluation is shared with OpenAI. - sig { returns(T::Boolean) } - attr_accessor :share_with_openai - # A list of testing criteria. sig do returns( @@ -47,7 +43,9 @@ module OpenAI T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel ) ] ) @@ -71,29 +69,23 @@ module OpenAI ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Internal::AnyHash, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel ) ], object: Symbol ) .returns(T.attached_class) end - def self.new( - id:, - created_at:, - data_source_config:, - metadata:, - name:, - share_with_openai:, - testing_criteria:, - object: :eval - ); end + def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + end + sig do override .returns( @@ -104,12 +96,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel ) ] } @@ -135,10 +128,303 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + class Python < OpenAI::Internal::Type::BaseModel + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The source code of the python script. + sig { returns(String) } + attr_accessor :source + + # The object type, which is always `python`. + sig { returns(Symbol) } + attr_accessor :type + + # The image tag to use for the python script. + sig { returns(T.nilable(String)) } + attr_reader :image_tag + + sig { params(image_tag: String).void } + attr_writer :image_tag + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # A PythonGrader object that runs a python script on the input. + sig do + params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) + .returns(T.attached_class) + end + def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end + + sig do + override.returns( + { + name: String, + source: String, + type: Symbol, + image_tag: String, + pass_threshold: Float + } + ) + end + def to_hash; end + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # The input text. This may include template strings. + sig { returns(T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input]) } + attr_accessor :input + + # The model to use for the evaluation. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The object type, which is always `score_model`. + sig { returns(Symbol) } + attr_accessor :type + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # The range of the score. Defaults to `[0, 1]`. + sig { returns(T.nilable(T::Array[Float])) } + attr_reader :range + + sig { params(range: T::Array[Float]).void } + attr_writer :range + + # The sampling parameters for the model. + sig { returns(T.nilable(T.anything)) } + attr_reader :sampling_params + + sig { params(sampling_params: T.anything).void } + attr_writer :sampling_params + + # A ScoreModelGrader object that uses a model to assign a score to the input. + sig do + params( + input: T::Array[ + T.any( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input, + OpenAI::Internal::AnyHash + ) + ], + model: String, + name: String, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + input:, + model:, + name:, + pass_threshold: nil, + range: nil, + sampling_params: nil, + type: :score_model + ) + end + + sig do + override + .returns( + { + input: T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable(OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + ) + end + attr_reader :type + + sig do + params(type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol) + .void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] + ) + end + def self.values; end + end + end + end + sig do override .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel] ) end def self.variants; end diff --git a/rbi/lib/openai/models/eval_text_similarity_grader.rbi b/rbi/lib/openai/models/eval_text_similarity_grader.rbi index 460cab97..f3675e7a 100644 --- a/rbi/lib/openai/models/eval_text_similarity_grader.rbi +++ b/rbi/lib/openai/models/eval_text_similarity_grader.rbi @@ -3,8 +3,8 @@ module OpenAI module Models class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel - # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, - # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. sig { returns(OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol) } attr_accessor :evaluation_metric @@ -68,8 +68,8 @@ module OpenAI end def to_hash; end - # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, - # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. module EvaluationMetric extend OpenAI::Internal::Type::Enum @@ -87,7 +87,6 @@ module OpenAI ROUGE_4 = T.let(:rouge_4, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) ROUGE_5 = T.let(:rouge_5, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) ROUGE_L = T.let(:rouge_l, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - COSINE = T.let(:cosine, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol]) } def self.values; end diff --git a/rbi/lib/openai/models/eval_update_response.rbi b/rbi/lib/openai/models/eval_update_response.rbi index 0025fbad..53a41e65 100644 --- a/rbi/lib/openai/models/eval_update_response.rbi +++ b/rbi/lib/openai/models/eval_update_response.rbi @@ -36,10 +36,6 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # Indicates whether the evaluation is shared with OpenAI. - sig { returns(T::Boolean) } - attr_accessor :share_with_openai - # A list of testing criteria. sig do returns( @@ -47,7 +43,9 @@ module OpenAI T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel ) ] ) @@ -71,29 +69,23 @@ module OpenAI ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Internal::AnyHash, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel ) ], object: Symbol ) .returns(T.attached_class) end - def self.new( - id:, - created_at:, - data_source_config:, - metadata:, - name:, - share_with_openai:, - testing_criteria:, - object: :eval - ); end + def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + end + sig do override .returns( @@ -104,12 +96,13 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, - share_with_openai: T::Boolean, testing_criteria: T::Array[ T.any( OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel ) ] } @@ -135,10 +128,292 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + class Python < OpenAI::Internal::Type::BaseModel + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The source code of the python script. + sig { returns(String) } + attr_accessor :source + + # The object type, which is always `python`. + sig { returns(Symbol) } + attr_accessor :type + + # The image tag to use for the python script. + sig { returns(T.nilable(String)) } + attr_reader :image_tag + + sig { params(image_tag: String).void } + attr_writer :image_tag + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # A PythonGrader object that runs a python script on the input. + sig do + params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) + .returns(T.attached_class) + end + def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end + + sig do + override.returns( + { + name: String, + source: String, + type: Symbol, + image_tag: String, + pass_threshold: Float + } + ) + end + def to_hash; end + end + + class ScoreModel < OpenAI::Internal::Type::BaseModel + # The input text. This may include template strings. + sig { returns(T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input]) } + attr_accessor :input + + # The model to use for the evaluation. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The object type, which is always `score_model`. + sig { returns(Symbol) } + attr_accessor :type + + # The threshold for the score. + sig { returns(T.nilable(Float)) } + attr_reader :pass_threshold + + sig { params(pass_threshold: Float).void } + attr_writer :pass_threshold + + # The range of the score. Defaults to `[0, 1]`. + sig { returns(T.nilable(T::Array[Float])) } + attr_reader :range + + sig { params(range: T::Array[Float]).void } + attr_writer :range + + # The sampling parameters for the model. + sig { returns(T.nilable(T.anything)) } + attr_reader :sampling_params + + sig { params(sampling_params: T.anything).void } + attr_writer :sampling_params + + # A ScoreModelGrader object that uses a model to assign a score to the input. + sig do + params( + input: T::Array[T.any(OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + model: String, + name: String, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + input:, + model:, + name:, + pass_threshold: nil, + range: nil, + sampling_params: nil, + type: :score_model + ) + end + + sig do + override + .returns( + { + input: T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash; end + + class Input < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable(OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + ) + end + attr_reader :type + + sig { params(type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ), + role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let(:user, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] + ) + end + def self.values; end + end + end + end + sig do override .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel] ) end def self.variants; end diff --git a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi index 83b73c77..6e2ac631 100644 --- a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -4,20 +4,6 @@ module OpenAI module Models module Evals class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - sig do - returns( - T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ) - ) - end - attr_accessor :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(String) } - attr_accessor :model - # A StoredCompletionsRunDataSource configuration describing a set of filters sig do returns( @@ -34,6 +20,37 @@ module OpenAI sig { returns(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol) } attr_accessor :type + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ) + ) + .void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + sig { returns(T.nilable(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams)) } attr_reader :sampling_params @@ -51,12 +68,6 @@ module OpenAI # A CompletionsRunDataSource object describing a model sampling configuration. sig do params( - input_messages: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ), - model: String, source: T.any( OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Internal::AnyHash, @@ -64,6 +75,12 @@ module OpenAI OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ), type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, + input_messages: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ), + model: String, sampling_params: T.any( OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, OpenAI::Internal::AnyHash @@ -71,29 +88,200 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input_messages:, model:, source:, type:, sampling_params: nil); end + def self.new(source:, type:, input_messages: nil, model: nil, sampling_params: nil); end sig do override .returns( { - input_messages: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ), - model: String, source: T.any( OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ), type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, + input_messages: T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ), + model: String, sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } ) end def to_hash; end + # A StoredCompletionsRunDataSource configuration describing a set of filters + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig { returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content]) } + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class StoredCompletions < OpenAI::Internal::Type::BaseModel + # The type of source. Always `stored_completions`. + sig { returns(Symbol) } + attr_accessor :type + + # An optional Unix timestamp to filter items created after this time. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # An optional Unix timestamp to filter items created before this time. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # An optional maximum number of items to return. + sig { returns(T.nilable(Integer)) } + attr_accessor :limit + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # An optional model to filter by (e.g., 'gpt-4o'). + sig { returns(T.nilable(String)) } + attr_accessor :model + + # A StoredCompletionsRunDataSource configuration describing a set of filters + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + limit: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(String), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + created_after: nil, + created_before: nil, + limit: nil, + metadata: nil, + model: nil, + type: :stored_completions + ) + end + + sig do + override + .returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + limit: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(String) + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + ) + end + def self.variants; end + end + + # The type of run data source. Always `completions`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + COMPLETIONS = + T.let(:completions, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol]) } + def self.values; end + end + module InputMessages extend OpenAI::Internal::Type::Union @@ -104,9 +292,8 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ] ) @@ -121,10 +308,9 @@ module OpenAI params( template: T::Array[ T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Responses::EasyInputMessage, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ], type: Symbol @@ -139,9 +325,8 @@ module OpenAI { template: T::Array[ T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + OpenAI::Models::Responses::EasyInputMessage, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ], type: Symbol @@ -150,380 +335,184 @@ module OpenAI end def to_hash; end + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. module Template extend OpenAI::Internal::Type::Union - class ChatMessage < OpenAI::Internal::Type::BaseModel - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - - sig { override.returns({content: String, role: String}) } - def to_hash; end - end - - class InputMessage < OpenAI::Internal::Type::BaseModel + class Message < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. sig do returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) ) end - attr_reader :content + attr_accessor :content + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. sig do - params( - content: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, - OpenAI::Internal::AnyHash - ) + returns( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol ) - .void end - attr_writer :content + attr_accessor :role - # The role of the message. One of `user`, `system`, or `developer`. + # The type of the message input. Always `message`. sig do returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::OrSymbol + T.nilable( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ) ) end - attr_accessor :role + attr_reader :type - # The type of item, which is always `message`. sig do - returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::OrSymbol + params( + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol ) + .void end - attr_accessor :type + attr_writer :type + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. sig do params( content: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, - OpenAI::Internal::AnyHash + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText ), - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::OrSymbol, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::OrSymbol + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol ) .returns(T.attached_class) end - def self.new(content:, role:, type:); end + def self.new(content:, role:, type: nil); end sig do override .returns( { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::OrSymbol, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::OrSymbol + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ), + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol } ) end def to_hash; end - class Content < OpenAI::Internal::Type::BaseModel - # The text content. - sig { returns(String) } - attr_accessor :text + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union - # The type of content, which is always `input_text`. - sig do - returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::OrSymbol - ) - end - attr_accessor :type + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text - sig do - params( - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::OrSymbol - ) - .returns(T.attached_class) + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end end - def self.new(text:, type:); end sig do override .returns( - { - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::OrSymbol - } - ) - end - def to_hash; end - - # The type of content, which is always `input_text`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - INPUT_TEXT = - T.let( - :input_text, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] ) - - sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol - ] - ) - end - def self.values; end end + def self.variants; end end - # The role of the message. One of `user`, `system`, or `developer`. + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. module Role extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role) + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role) end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( :user, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) SYSTEM = T.let( :system, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) DEVELOPER = T.let( :developer, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) sig do override .returns( T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ] ) end def self.values; end end - # The type of item, which is always `message`. + # The type of the message input. Always `message`. module Type extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type) + T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type) end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( :message, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol ) sig do override .returns( T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol - ] - ) - end - def self.values; end - end - end - - class OutputMessage < OpenAI::Internal::Type::BaseModel - sig do - returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content - ) - end - attr_reader :content - - sig do - params( - content: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, - OpenAI::Internal::AnyHash - ) - ) - .void - end - attr_writer :content - - # The role of the message. Must be `assistant` for output. - sig do - returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::OrSymbol - ) - end - attr_accessor :role - - # The type of item, which is always `message`. - sig do - returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::OrSymbol - ) - end - attr_accessor :type - - sig do - params( - content: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, - OpenAI::Internal::AnyHash - ), - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::OrSymbol, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::OrSymbol - ) - .returns(T.attached_class) - end - def self.new(content:, role:, type:); end - - sig do - override - .returns( - { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::OrSymbol, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::OrSymbol - } - ) - end - def to_hash; end - - class Content < OpenAI::Internal::Type::BaseModel - # The text content. - sig { returns(String) } - attr_accessor :text - - # The type of content, which is always `output_text`. - sig do - returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::OrSymbol - ) - end - attr_accessor :type - - sig do - params( - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::OrSymbol - ) - .returns(T.attached_class) - end - def self.new(text:, type:); end - - sig do - override - .returns( - { - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::OrSymbol - } - ) - end - def to_hash; end - - # The type of content, which is always `output_text`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - OUTPUT_TEXT = - T.let( - :output_text, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol - ) - - sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol - ] - ) - end - def self.values; end - end - end - - # The role of the message. Must be `assistant` for output. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol - ) - - sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol - ] - ) - end - def self.values; end - end - - # The type of item, which is always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol - ) - - sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol ] ) end @@ -534,7 +523,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage] + [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message] ) end def self.variants; end @@ -566,177 +555,6 @@ module OpenAI def self.variants; end end - # A StoredCompletionsRunDataSource configuration describing a set of filters - module Source - extend OpenAI::Internal::Type::Union - - class FileContent < OpenAI::Internal::Type::BaseModel - # The content of the jsonl file. - sig { returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content]) } - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], - type: Symbol - ) - .returns(T.attached_class) - end - def self.new(content:, type: :file_content); end - - sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], - type: Symbol - } - ) - end - def to_hash; end - - class Content < OpenAI::Internal::Type::BaseModel - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil); end - - sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) - end - def to_hash; end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end - end - - class StoredCompletions < OpenAI::Internal::Type::BaseModel - # An optional Unix timestamp to filter items created after this time. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # An optional Unix timestamp to filter items created before this time. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # An optional maximum number of items to return. - sig { returns(T.nilable(Integer)) } - attr_accessor :limit - - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - sig { returns(T.nilable(T::Hash[Symbol, String])) } - attr_accessor :metadata - - # An optional model to filter by (e.g., 'gpt-4o'). - sig { returns(T.nilable(String)) } - attr_accessor :model - - # The type of source. Always `stored_completions`. - sig { returns(Symbol) } - attr_accessor :type - - # A StoredCompletionsRunDataSource configuration describing a set of filters - sig do - params( - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - limit: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(String), - type: Symbol - ) - .returns(T.attached_class) - end - def self.new( - created_after:, - created_before:, - limit:, - metadata:, - model:, - type: :stored_completions - ) - end - - sig do - override - .returns( - { - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - limit: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(String), - type: Symbol - } - ) - end - def to_hash; end - end - - sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - ) - end - def self.variants; end - end - - # The type of run data source. Always `completions`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - COMPLETIONS = - T.let(:completions, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol]) } - def self.values; end - end - class SamplingParams < OpenAI::Internal::Type::BaseModel # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } diff --git a/rbi/lib/openai/models/evals/run_cancel_response.rbi b/rbi/lib/openai/models/evals/run_cancel_response.rbi index bc2ac3bd..35e93b1b 100644 --- a/rbi/lib/openai/models/evals/run_cancel_response.rbi +++ b/rbi/lib/openai/models/evals/run_cancel_response.rbi @@ -17,7 +17,8 @@ module OpenAI returns( T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions ) ) end @@ -91,7 +92,8 @@ module OpenAI data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions ), error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), eval_id: String, @@ -131,7 +133,8 @@ module OpenAI created_at: Integer, data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions ), error: OpenAI::Models::Evals::EvalAPIError, eval_id: String, @@ -153,10 +156,661 @@ module OpenAI module DataSource extend OpenAI::Internal::Type::Union + class Completions < OpenAI::Internal::Type::BaseModel + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `completions`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + .void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig { returns(T.nilable(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams)) } + attr_reader :sampling_params + + sig do + params( + sampling_params: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses + ), + input_messages: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + end + + sig do + override + .returns( + { + source: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses + ), + type: Symbol, + input_messages: T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash; end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig do + returns( + T::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, + T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class Responses < OpenAI::Internal::Type::BaseModel + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :allow_parallel_tool_calls + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional search string for instructions. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + allow_parallel_tool_calls: nil, + created_after: nil, + created_before: nil, + has_tool_calls: nil, + instructions_search: nil, + metadata: nil, + model: nil, + reasoning_effort: nil, + temperature: nil, + top_p: nil, + users: nil, + type: :responses + ); end + sig do + override + .returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] + ) + end + def self.variants; end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + class Template < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(template:, type: :template); end + + sig do + override + .returns( + { + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash; end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + ) + end + def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + def self.new(item_reference:, type: :item_reference); end + + sig { override.returns({item_reference: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] + ) + end + def self.variants; end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) + .returns(T.attached_class) + end + def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash; end + end + end + sig do override .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] ) end def self.variants; end diff --git a/rbi/lib/openai/models/evals/run_create_params.rbi b/rbi/lib/openai/models/evals/run_create_params.rbi index 0f0fd8b0..c995c3ef 100644 --- a/rbi/lib/openai/models/evals/run_create_params.rbi +++ b/rbi/lib/openai/models/evals/run_create_params.rbi @@ -12,7 +12,8 @@ module OpenAI returns( T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) ) end @@ -39,7 +40,8 @@ module OpenAI data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -55,7 +57,8 @@ module OpenAI { data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -69,10 +72,697 @@ module OpenAI module DataSource extend OpenAI::Internal::Type::Union + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `completions`. + sig do + returns( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + ) + .void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ), + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ), + model: String, + sampling_params: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .returns(T.attached_class) + end + def self.new(source:, type:, input_messages: nil, model: nil, sampling_params: nil); end + + sig do + override + .returns( + { + source: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ), + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ), + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + ) + end + def to_hash; end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[ + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, + T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class Responses < OpenAI::Internal::Type::BaseModel + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :allow_parallel_tool_calls + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional search string for instructions. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + allow_parallel_tool_calls: nil, + created_after: nil, + created_before: nil, + has_tool_calls: nil, + instructions_search: nil, + metadata: nil, + model: nil, + reasoning_effort: nil, + temperature: nil, + top_p: nil, + users: nil, + type: :responses + ); end + sig do + override + .returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + ) + end + def self.variants; end + end + + # The type of run data source. Always `completions`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + COMPLETIONS = + T.let( + :completions, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol] + ) + end + def self.values; end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + class Template < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(template:, type: :template); end + + sig do + override + .returns( + { + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash; end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem] + ) + end + def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + def self.new(item_reference:, type: :item_reference); end + + sig { override.returns({item_reference: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + ) + end + def self.variants; end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) + .returns(T.attached_class) + end + def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash; end + end + end + sig do override .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] ) end def self.variants; end diff --git a/rbi/lib/openai/models/evals/run_create_response.rbi b/rbi/lib/openai/models/evals/run_create_response.rbi index 8999012b..40364127 100644 --- a/rbi/lib/openai/models/evals/run_create_response.rbi +++ b/rbi/lib/openai/models/evals/run_create_response.rbi @@ -17,7 +17,8 @@ module OpenAI returns( T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions ) ) end @@ -91,7 +92,8 @@ module OpenAI data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions ), error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), eval_id: String, @@ -131,7 +133,8 @@ module OpenAI created_at: Integer, data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions ), error: OpenAI::Models::Evals::EvalAPIError, eval_id: String, @@ -153,10 +156,661 @@ module OpenAI module DataSource extend OpenAI::Internal::Type::Union + class Completions < OpenAI::Internal::Type::BaseModel + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `completions`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + .void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig { returns(T.nilable(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams)) } + attr_reader :sampling_params + + sig do + params( + sampling_params: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses + ), + input_messages: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + end + + sig do + override + .returns( + { + source: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses + ), + type: Symbol, + input_messages: T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash; end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig do + returns( + T::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, + T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class Responses < OpenAI::Internal::Type::BaseModel + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :allow_parallel_tool_calls + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional search string for instructions. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + allow_parallel_tool_calls: nil, + created_after: nil, + created_before: nil, + has_tool_calls: nil, + instructions_search: nil, + metadata: nil, + model: nil, + reasoning_effort: nil, + temperature: nil, + top_p: nil, + users: nil, + type: :responses + ); end + sig do + override + .returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] + ) + end + def self.variants; end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + class Template < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(template:, type: :template); end + + sig do + override + .returns( + { + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash; end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + ) + end + def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + def self.new(item_reference:, type: :item_reference); end + + sig { override.returns({item_reference: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] + ) + end + def self.variants; end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) + .returns(T.attached_class) + end + def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash; end + end + end + sig do override .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] ) end def self.variants; end diff --git a/rbi/lib/openai/models/evals/run_list_params.rbi b/rbi/lib/openai/models/evals/run_list_params.rbi index 5e68c583..c3cf6599 100644 --- a/rbi/lib/openai/models/evals/run_list_params.rbi +++ b/rbi/lib/openai/models/evals/run_list_params.rbi @@ -29,8 +29,8 @@ module OpenAI sig { params(order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol).void } attr_writer :order - # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | - # "canceled". + # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # | `canceled`. sig { returns(T.nilable(OpenAI::Models::Evals::RunListParams::Status::OrSymbol)) } attr_reader :status @@ -78,8 +78,8 @@ module OpenAI def self.values; end end - # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | - # "canceled". + # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # | `canceled`. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/evals/run_list_response.rbi b/rbi/lib/openai/models/evals/run_list_response.rbi index 01e8f33b..f264782f 100644 --- a/rbi/lib/openai/models/evals/run_list_response.rbi +++ b/rbi/lib/openai/models/evals/run_list_response.rbi @@ -17,7 +17,8 @@ module OpenAI returns( T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions ) ) end @@ -91,7 +92,8 @@ module OpenAI data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions ), error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), eval_id: String, @@ -131,7 +133,8 @@ module OpenAI created_at: Integer, data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions ), error: OpenAI::Models::Evals::EvalAPIError, eval_id: String, @@ -153,10 +156,661 @@ module OpenAI module DataSource extend OpenAI::Internal::Type::Union + class Completions < OpenAI::Internal::Type::BaseModel + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `completions`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + .void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig { returns(T.nilable(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams)) } + attr_reader :sampling_params + + sig do + params( + sampling_params: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses + ), + input_messages: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + end + + sig do + override + .returns( + { + source: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses + ), + type: Symbol, + input_messages: T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash; end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig do + returns( + T::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, + T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class Responses < OpenAI::Internal::Type::BaseModel + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :allow_parallel_tool_calls + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional search string for instructions. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + allow_parallel_tool_calls: nil, + created_after: nil, + created_before: nil, + has_tool_calls: nil, + instructions_search: nil, + metadata: nil, + model: nil, + reasoning_effort: nil, + temperature: nil, + top_p: nil, + users: nil, + type: :responses + ); end + sig do + override + .returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] + ) + end + def self.variants; end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + class Template < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(template:, type: :template); end + + sig do + override + .returns( + { + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash; end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + ) + end + def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + def self.new(item_reference:, type: :item_reference); end + + sig { override.returns({item_reference: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] + ) + end + def self.variants; end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) + .returns(T.attached_class) + end + def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash; end + end + end + sig do override .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] ) end def self.variants; end diff --git a/rbi/lib/openai/models/evals/run_retrieve_response.rbi b/rbi/lib/openai/models/evals/run_retrieve_response.rbi index 85db5714..405a83cf 100644 --- a/rbi/lib/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/lib/openai/models/evals/run_retrieve_response.rbi @@ -17,7 +17,8 @@ module OpenAI returns( T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions ) ) end @@ -91,7 +92,8 @@ module OpenAI data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions ), error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), eval_id: String, @@ -131,7 +133,8 @@ module OpenAI created_at: Integer, data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions ), error: OpenAI::Models::Evals::EvalAPIError, eval_id: String, @@ -153,10 +156,661 @@ module OpenAI module DataSource extend OpenAI::Internal::Type::Union + class Completions < OpenAI::Internal::Type::BaseModel + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `completions`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + ) + ) + .void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig { returns(T.nilable(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams)) } + attr_reader :sampling_params + + sig do + params( + sampling_params: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + ) + .void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses + ), + input_messages: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + end + + sig do + override + .returns( + { + source: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses + ), + type: Symbol, + input_messages: T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + ), + model: String, + sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash; end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + class FileContent < OpenAI::Internal::Type::BaseModel + # The content of the jsonl file. + sig do + returns( + T::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(content:, type: :file_content); end + + sig do + override + .returns( + { + content: T::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content], + type: Symbol + } + ) + end + def to_hash; end + + class Content < OpenAI::Internal::Type::BaseModel + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, + T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil); end + + sig do + override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + end + def to_hash; end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new(id:, type: :file_id); end + + sig { override.returns({id: String, type: Symbol}) } + def to_hash; end + end + + class Responses < OpenAI::Internal::Type::BaseModel + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :allow_parallel_tool_calls + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional search string for instructions. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + allow_parallel_tool_calls: nil, + created_after: nil, + created_before: nil, + has_tool_calls: nil, + instructions_search: nil, + metadata: nil, + model: nil, + reasoning_effort: nil, + temperature: nil, + top_p: nil, + users: nil, + type: :responses + ); end + sig do + override + .returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] + ) + end + def self.variants; end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + class Template < OpenAI::Internal::Type::BaseModel + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + ) + .returns(T.attached_class) + end + def self.new(template:, type: :template); end + + sig do + override + .returns( + { + template: T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash; end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig { params(content: String, role: String).returns(T.attached_class) } + def self.new(content:, role:); end + + sig { override.returns({content: String, role: String}) } + def to_hash; end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Internal::AnyHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + .returns(T.attached_class) + end + def self.new(content:, role:, type: nil); end + + sig do + override + .returns( + { + content: T.any( + String, + OpenAI::Models::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash; end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + class OutputText < OpenAI::Internal::Type::BaseModel + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :output_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + ) + end + def self.variants; end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values; end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override + .returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values; end + end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + ) + end + def self.variants; end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + def self.new(item_reference:, type: :item_reference); end + + sig { override.returns({item_reference: String, type: Symbol}) } + def to_hash; end + end + + sig do + override + .returns( + [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] + ) + end + def self.variants; end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) + .returns(T.attached_class) + end + def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash; end + end + end + sig do override .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] ) end def self.variants; end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi index 69429e05..b762881f 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi @@ -8,17 +8,21 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + sig { returns(String) } + attr_accessor :fine_tuned_model_checkpoint + sig do params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + fine_tuned_model_checkpoint: String, + request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) + ) + .returns(T.attached_class) end - def self.new(request_options: {}); end + def self.new(fine_tuned_model_checkpoint:, request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } + sig do + override.returns({fine_tuned_model_checkpoint: String, request_options: OpenAI::RequestOptions}) + end def to_hash; end end end diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index 3a2f3c67..cf15fc93 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -3,30 +3,32 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel - # The base64-encoded JSON of the generated image, if `response_format` is - # `b64_json`. + # The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, + # and only present if `response_format` is set to `b64_json` for `dall-e-2` and + # `dall-e-3`. sig { returns(T.nilable(String)) } attr_reader :b64_json sig { params(b64_json: String).void } attr_writer :b64_json - # The prompt that was used to generate the image, if there was any revision to the - # prompt. + # For `dall-e-3` only, the revised prompt that was used to generate the image. sig { returns(T.nilable(String)) } attr_reader :revised_prompt sig { params(revised_prompt: String).void } attr_writer :revised_prompt - # The URL of the generated image, if `response_format` is `url` (default). + # When using `dall-e-2` or `dall-e-3`, the URL of the generated image if + # `response_format` is set to `url` (default value). Unsupported for + # `gpt-image-1`. sig { returns(T.nilable(String)) } attr_reader :url sig { params(url: String).void } attr_writer :url - # Represents the url or the content of an image generated by the OpenAI API. + # Represents the content or the URL of an image generated by the OpenAI API. sig { params(b64_json: String, revised_prompt: String, url: String).returns(T.attached_class) } def self.new(b64_json: nil, revised_prompt: nil, url: nil); end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 44cd2758..c2d38787 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -16,8 +16,7 @@ module OpenAI sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } attr_accessor :model - # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # The number of images to generate. Must be between 1 and 10. sig { returns(T.nilable(Integer)) } attr_accessor :n diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 616662cf..922d5db6 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -6,18 +6,21 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. - sig { returns(T.any(Pathname, StringIO)) } + # The image(s) to edit. Must be a supported image file or an array of images. For + # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square + # `png` file less than 4MB. + sig { returns(T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)])) } attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 - # characters. + # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. sig { returns(String) } attr_accessor :prompt # An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than + # indicate where `image` should be edited. If there are multiple images provided, + # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. sig { returns(T.nilable(T.any(Pathname, StringIO))) } attr_reader :mask @@ -25,8 +28,9 @@ module OpenAI sig { params(mask: T.any(Pathname, StringIO)).void } attr_writer :mask - # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } attr_accessor :model @@ -34,14 +38,22 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :n + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. + sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol)) } + attr_accessor :quality + # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } attr_accessor :response_format - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } attr_accessor :size @@ -56,11 +68,12 @@ module OpenAI sig do params( - image: T.any(Pathname, StringIO), + image: T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)]), prompt: String, mask: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), + quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, @@ -74,6 +87,7 @@ module OpenAI mask: nil, model: nil, n: nil, + quality: nil, response_format: nil, size: nil, user: nil, @@ -83,11 +97,12 @@ module OpenAI override .returns( { - image: T.any(Pathname, StringIO), + image: T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)]), prompt: String, mask: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), + quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, @@ -97,8 +112,26 @@ module OpenAI end def to_hash; end - # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # The image(s) to edit. Must be a supported image file or an array of images. For + # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square + # `png` file less than 4MB. + module Image + extend OpenAI::Internal::Type::Union + + sig { override.returns([StringIO, T::Array[StringIO]]) } + def self.variants; end + + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::IOLike], + OpenAI::Internal::Type::Converter + ) + end + + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. module Model extend OpenAI::Internal::Type::Union @@ -106,9 +139,29 @@ module OpenAI def self.variants; end end + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Quality) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + STANDARD = T.let(:standard, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Quality::TaggedSymbol]) } + def self.values; end + end + # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -122,8 +175,9 @@ module OpenAI def self.values; end end - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. module Size extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index d35fb1e2..0ee05de0 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -6,45 +6,77 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. + # A text description of the desired image(s). The maximum length is 32000 + # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters + # for `dall-e-3`. sig { returns(String) } attr_accessor :prompt - # The model to use for image generation. + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol)) } + attr_accessor :background + + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } attr_accessor :model + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol)) } + attr_accessor :moderation + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # `n=1` is supported. sig { returns(T.nilable(Integer)) } attr_accessor :n - # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } - attr_reader :quality + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + sig { returns(T.nilable(Integer)) } + attr_accessor :output_compression + + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol)) } + attr_accessor :output_format - sig { params(quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol).void } - attr_writer :quality + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. + sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } + attr_accessor :quality - # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } attr_accessor :response_format - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } attr_accessor :size - # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } attr_accessor :style @@ -60,9 +92,13 @@ module OpenAI sig do params( prompt: String, + background: T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + moderation: T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), - quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + output_compression: T.nilable(Integer), + output_format: T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol), + quality: T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), @@ -73,8 +109,12 @@ module OpenAI end def self.new( prompt:, + background: nil, model: nil, + moderation: nil, n: nil, + output_compression: nil, + output_format: nil, quality: nil, response_format: nil, size: nil, @@ -87,9 +127,13 @@ module OpenAI .returns( { prompt: String, + background: T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + moderation: T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), - quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + output_compression: T.nilable(Integer), + output_format: T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol), + quality: T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), @@ -100,7 +144,30 @@ module OpenAI end def to_hash; end - # The model to use for image generation. + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Background) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = T.let(:transparent, OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol) + OPAQUE = T.let(:opaque, OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol]) } + def self.values; end + end + + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. module Model extend OpenAI::Internal::Type::Union @@ -108,9 +175,44 @@ module OpenAI def self.variants; end end - # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). + module Moderation + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Moderation) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = T.let(:low, OpenAI::Models::ImageGenerateParams::Moderation::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Moderation::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Moderation::TaggedSymbol]) } + def self.values; end + end + + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = T.let(:png, OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol) + JPEG = T.let(:jpeg, OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol) + WEBP = T.let(:webp, OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol]) } + def self.values; end + end + + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. module Quality extend OpenAI::Internal::Type::Enum @@ -119,14 +221,19 @@ module OpenAI STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) + LOW = T.let(:low, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol]) } def self.values; end end - # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. module ResponseFormat extend OpenAI::Internal::Type::Enum @@ -140,18 +247,22 @@ module OpenAI def self.values; end end - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. module Size extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, String) } + AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1536X1024 = T.let(:"1536x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1536 = T.let(:"1024x1536", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) SIZE_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) SIZE_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) @@ -159,10 +270,10 @@ module OpenAI def self.values; end end - # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. module Style extend OpenAI::Internal::Type::Enum diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 2dd50c7c..bf52f382 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -10,6 +10,7 @@ module OpenAI DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) + GPT_IMAGE_1 = T.let(:"gpt-image-1", OpenAI::Models::ImageModel::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageModel::TaggedSymbol]) } def self.values; end diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index cb594826..be9cfe0e 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -3,20 +3,110 @@ module OpenAI module Models class ImagesResponse < OpenAI::Internal::Type::BaseModel + # The Unix timestamp (in seconds) of when the image was created. sig { returns(Integer) } attr_accessor :created - sig { returns(T::Array[OpenAI::Models::Image]) } - attr_accessor :data + # The list of generated images. + sig { returns(T.nilable(T::Array[OpenAI::Models::Image])) } + attr_reader :data + sig { params(data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)]).void } + attr_writer :data + + # For `gpt-image-1` only, the token usage information for the image generation. + sig { returns(T.nilable(OpenAI::Models::ImagesResponse::Usage)) } + attr_reader :usage + + sig { params(usage: T.any(OpenAI::Models::ImagesResponse::Usage, OpenAI::Internal::AnyHash)).void } + attr_writer :usage + + # The response from the image generation endpoint. sig do - params(created: Integer, data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)]) + params( + created: Integer, + data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)], + usage: T.any(OpenAI::Models::ImagesResponse::Usage, OpenAI::Internal::AnyHash) + ) .returns(T.attached_class) end - def self.new(created:, data:); end + def self.new(created:, data: nil, usage: nil); end - sig { override.returns({created: Integer, data: T::Array[OpenAI::Models::Image]}) } + sig do + override + .returns( + {created: Integer, data: T::Array[OpenAI::Models::Image], usage: OpenAI::Models::ImagesResponse::Usage} + ) + end def to_hash; end + + class Usage < OpenAI::Internal::Type::BaseModel + # The number of tokens (images and text) in the input prompt. + sig { returns(Integer) } + attr_accessor :input_tokens + + # The input tokens detailed information for the image generation. + sig { returns(OpenAI::Models::ImagesResponse::Usage::InputTokensDetails) } + attr_reader :input_tokens_details + + sig do + params( + input_tokens_details: T.any(OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :input_tokens_details + + # The number of image tokens in the output image. + sig { returns(Integer) } + attr_accessor :output_tokens + + # The total number of tokens (images and text) used for the image generation. + sig { returns(Integer) } + attr_accessor :total_tokens + + # For `gpt-image-1` only, the token usage information for the image generation. + sig do + params( + input_tokens: Integer, + input_tokens_details: T.any(OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, OpenAI::Internal::AnyHash), + output_tokens: Integer, + total_tokens: Integer + ) + .returns(T.attached_class) + end + def self.new(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:); end + + sig do + override + .returns( + { + input_tokens: Integer, + input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash; end + + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + # The number of image tokens in the input prompt. + sig { returns(Integer) } + attr_accessor :image_tokens + + # The number of text tokens in the input prompt. + sig { returns(Integer) } + attr_accessor :text_tokens + + # The input tokens detailed information for the image generation. + sig { params(image_tokens: Integer, text_tokens: Integer).returns(T.attached_class) } + def self.new(image_tokens:, text_tokens:); end + + sig { override.returns({image_tokens: Integer, text_tokens: Integer}) } + def to_hash; end + end + end end end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi new file mode 100644 index 00000000..2a08cbdb --- /dev/null +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi @@ -0,0 +1,88 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel + # The ID of the item this summary part is associated with. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item this summary part is associated with. + sig { returns(Integer) } + attr_accessor :output_index + + # The summary part that was added. + sig { returns(OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part) } + attr_reader :part + + sig do + params( + part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :part + + # The index of the summary part within the reasoning summary. + sig { returns(Integer) } + attr_accessor :summary_index + + # The type of the event. Always `response.reasoning_summary_part.added`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a new reasoning summary part is added. + sig do + params( + item_id: String, + output_index: Integer, + part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, OpenAI::Internal::AnyHash), + summary_index: Integer, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + item_id:, + output_index:, + part:, + summary_index:, + type: :"response.reasoning_summary_part.added" + ) + end + + sig do + override + .returns( + { + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash; end + + class Part < OpenAI::Internal::Type::BaseModel + # The text of the summary part. + sig { returns(String) } + attr_accessor :text + + # The type of the summary part. Always `summary_text`. + sig { returns(Symbol) } + attr_accessor :type + + # The summary part that was added. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :summary_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi new file mode 100644 index 00000000..074c9319 --- /dev/null +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi @@ -0,0 +1,88 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel + # The ID of the item this summary part is associated with. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item this summary part is associated with. + sig { returns(Integer) } + attr_accessor :output_index + + # The completed summary part. + sig { returns(OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part) } + attr_reader :part + + sig do + params( + part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, OpenAI::Internal::AnyHash) + ) + .void + end + attr_writer :part + + # The index of the summary part within the reasoning summary. + sig { returns(Integer) } + attr_accessor :summary_index + + # The type of the event. Always `response.reasoning_summary_part.done`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a reasoning summary part is completed. + sig do + params( + item_id: String, + output_index: Integer, + part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, OpenAI::Internal::AnyHash), + summary_index: Integer, + type: Symbol + ) + .returns(T.attached_class) + end + def self.new( + item_id:, + output_index:, + part:, + summary_index:, + type: :"response.reasoning_summary_part.done" + ) + end + + sig do + override + .returns( + { + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash; end + + class Part < OpenAI::Internal::Type::BaseModel + # The text of the summary part. + sig { returns(String) } + attr_accessor :text + + # The type of the summary part. Always `summary_text`. + sig { returns(Symbol) } + attr_accessor :type + + # The completed summary part. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :summary_text); end + + sig { override.returns({text: String, type: Symbol}) } + def to_hash; end + end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi new file mode 100644 index 00000000..e659c052 --- /dev/null +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi @@ -0,0 +1,55 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel + # The text delta that was added to the summary. + sig { returns(String) } + attr_accessor :delta + + # The ID of the item this summary text delta is associated with. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item this summary text delta is associated with. + sig { returns(Integer) } + attr_accessor :output_index + + # The index of the summary part within the reasoning summary. + sig { returns(Integer) } + attr_accessor :summary_index + + # The type of the event. Always `response.reasoning_summary_text.delta`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a delta is added to a reasoning summary text. + sig do + params(delta: String, item_id: String, output_index: Integer, summary_index: Integer, type: Symbol) + .returns(T.attached_class) + end + def self.new( + delta:, + item_id:, + output_index:, + summary_index:, + type: :"response.reasoning_summary_text.delta" + ) + end + + sig do + override + .returns({ + delta: String, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: Symbol + }) + end + def to_hash; end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi new file mode 100644 index 00000000..dea35ce6 --- /dev/null +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi @@ -0,0 +1,55 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel + # The ID of the item this summary text is associated with. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item this summary text is associated with. + sig { returns(Integer) } + attr_accessor :output_index + + # The index of the summary part within the reasoning summary. + sig { returns(Integer) } + attr_accessor :summary_index + + # The full text of the completed reasoning summary. + sig { returns(String) } + attr_accessor :text + + # The type of the event. Always `response.reasoning_summary_text.done`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a reasoning summary text is completed. + sig do + params(item_id: String, output_index: Integer, summary_index: Integer, text: String, type: Symbol) + .returns(T.attached_class) + end + def self.new( + item_id:, + output_index:, + summary_index:, + text:, + type: :"response.reasoning_summary_text.done" + ) + end + + sig do + override + .returns({ + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: Symbol + }) + end + def to_hash; end + end + end + end +end diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/lib/openai/models/responses/response_stream_event.rbi index 2df75056..6234c3c6 100644 --- a/rbi/lib/openai/models/responses/response_stream_event.rbi +++ b/rbi/lib/openai/models/responses/response_stream_event.rbi @@ -10,7 +10,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] ) end def self.variants; end diff --git a/rbi/lib/openai/resources/evals.rbi b/rbi/lib/openai/resources/evals.rbi index f7fb72d2..ccf5a9f3 100644 --- a/rbi/lib/openai/resources/evals.rbi +++ b/rbi/lib/openai/resources/evals.rbi @@ -16,19 +16,20 @@ module OpenAI data_source_config: T.any( OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs ), testing_criteria: T::Array[ T.any( OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Internal::AnyHash, OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader + OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateParams::TestingCriterion::Python, + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel ) ], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - share_with_openai: T::Boolean, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::EvalCreateResponse) @@ -47,8 +48,6 @@ module OpenAI metadata: nil, # The name of the evaluation. name: nil, - # Indicates whether the evaluation is shared with OpenAI. - share_with_openai: nil, request_options: {} ); end # Get an evaluation by ID. diff --git a/rbi/lib/openai/resources/evals/runs.rbi b/rbi/lib/openai/resources/evals/runs.rbi index d49a538a..014d5734 100644 --- a/rbi/lib/openai/resources/evals/runs.rbi +++ b/rbi/lib/openai/resources/evals/runs.rbi @@ -14,7 +14,8 @@ module OpenAI data_source: T.any( OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -76,8 +77,8 @@ module OpenAI # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. order: nil, - # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | - # "canceled". + # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # | `canceled`. status: nil, request_options: {} ); end diff --git a/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi index 0d9761de..64fb5a6a 100644 --- a/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi +++ b/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi @@ -58,14 +58,17 @@ module OpenAI # fine-tuned model checkpoint. sig do params( + permission_id: String, fine_tuned_model_checkpoint: String, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse) end def delete( + # The ID of the fine-tuned model checkpoint permission to delete. + permission_id, # The ID of the fine-tuned model checkpoint to delete a permission for. - fine_tuned_model_checkpoint, + fine_tuned_model_checkpoint:, request_options: {} ); end # @api private diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 04c126f2..7e97700a 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -3,7 +3,7 @@ module OpenAI module Resources class Images - # Creates a variation of a given image. + # Creates a variation of a given image. This endpoint only supports `dall-e-2`. sig do params( image: T.any(Pathname, StringIO), @@ -23,8 +23,7 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. model: nil, - # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # `n=1` is supported. + # The number of images to generate. Must be between 1 and 10. n: nil, # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been @@ -39,14 +38,16 @@ module OpenAI user: nil, request_options: {} ); end - # Creates an edited or extended image given an original image and a prompt. + # Creates an edited or extended image given one or more source images and a + # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. sig do params( - image: T.any(Pathname, StringIO), + image: T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)]), prompt: String, mask: T.any(Pathname, StringIO), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), + quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, @@ -55,27 +56,37 @@ module OpenAI .returns(OpenAI::Models::ImagesResponse) end def edit( - # The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask - # is not provided, image must have transparency, which will be used as the mask. + # The image(s) to edit. Must be a supported image file or an array of images. For + # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square + # `png` file less than 4MB. image:, # A text description of the desired image(s). The maximum length is 1000 - # characters. + # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. prompt:, # An additional image whose fully transparent areas (e.g. where alpha is zero) - # indicate where `image` should be edited. Must be a valid PNG file, less than + # indicate where `image` should be edited. If there are multiple images provided, + # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. mask: nil, - # The model to use for image generation. Only `dall-e-2` is supported at this - # time. + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. model: nil, # The number of images to generate. Must be between 1 and 10. n: nil, + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. + quality: nil, # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. response_format: nil, - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024`. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. size: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. @@ -84,12 +95,17 @@ module OpenAI request_options: {} ); end # Creates an image given a prompt. + # [Learn more](https://platform.openai.com/docs/guides/images). sig do params( prompt: String, + background: T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + moderation: T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), - quality: OpenAI::Models::ImageGenerateParams::Quality::OrSymbol, + output_compression: T.nilable(Integer), + output_format: T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol), + quality: T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), @@ -99,30 +115,57 @@ module OpenAI .returns(OpenAI::Models::ImagesResponse) end def generate( - # A text description of the desired image(s). The maximum length is 1000 - # characters for `dall-e-2` and 4000 characters for `dall-e-3`. + # A text description of the desired image(s). The maximum length is 32000 + # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters + # for `dall-e-3`. prompt:, - # The model to use for image generation. + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + background: nil, + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. model: nil, + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). + moderation: nil, # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # `n=1` is supported. n: nil, - # The quality of the image that will be generated. `hd` creates images with finer - # details and greater consistency across the image. This param is only supported - # for `dall-e-3`. + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + output_compression: nil, + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. + output_format: nil, + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. quality: nil, - # The format in which the generated images are returned. Must be one of `url` or - # `b64_json`. URLs are only valid for 60 minutes after the image has been - # generated. + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. response_format: nil, - # The size of the generated images. Must be one of `256x256`, `512x512`, or - # `1024x1024` for `dall-e-2`. Must be one of `1024x1024`, `1792x1024`, or - # `1024x1792` for `dall-e-3` models. + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. size: nil, - # The style of the generated images. Must be one of `vivid` or `natural`. Vivid - # causes the model to lean towards generating hyper-real and dramatic images. - # Natural causes the model to produce more natural, less hyper-real looking - # images. This param is only supported for `dall-e-3`. + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. style: nil, # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/lib/openai/resources/responses.rbi index c890608c..b67ed0d1 100644 --- a/rbi/lib/openai/resources/responses.rbi +++ b/rbi/lib/openai/resources/responses.rbi @@ -305,6 +305,10 @@ module OpenAI OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, + OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, + OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, + OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, + OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index aeb818ab..67ca7b95 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -15,7 +15,7 @@ module OpenAI thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy? } @@ -55,7 +55,7 @@ module OpenAI attr_accessor tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources? - attr_accessor tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]? + attr_accessor tools: ::Array[OpenAI::Models::Beta::assistant_tool]? attr_accessor top_p: Float? @@ -74,7 +74,7 @@ module OpenAI ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts @@ -417,17 +417,6 @@ module OpenAI end end - type tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::FileSearchTool - | OpenAI::Models::Beta::FunctionTool - - module Tool - extend OpenAI::Internal::Type::Union - - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] - end - type truncation_strategy = { type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 81c23475..09ba33d8 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -5,8 +5,7 @@ module OpenAI data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], metadata: OpenAI::Models::metadata?, - name: String, - share_with_openai: bool + name: String } & OpenAI::Internal::Type::request_parameters @@ -24,16 +23,11 @@ module OpenAI def name=: (String) -> String - attr_reader share_with_openai: bool? - - def share_with_openai=: (bool) -> bool - def initialize: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, - ?share_with_openai: bool, ?request_options: OpenAI::request_opts ) -> void @@ -41,7 +35,7 @@ module OpenAI type data_source_config = OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom - | OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions + | OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -71,29 +65,29 @@ module OpenAI def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::custom end - type stored_completions = - { type: :stored_completions, metadata: OpenAI::Models::metadata? } + type logs = { type: :logs, metadata: ::Hash[Symbol, top] } - class StoredCompletions < OpenAI::Internal::Type::BaseModel - attr_accessor type: :stored_completions + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor type: :logs - attr_accessor metadata: OpenAI::Models::metadata? + attr_reader metadata: ::Hash[Symbol, top]? - def initialize: ( - ?metadata: OpenAI::Models::metadata?, - ?type: :stored_completions - ) -> void + def metadata=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: (?metadata: ::Hash[Symbol, top], ?type: :logs) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::stored_completions + def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::logs end - def self?.variants: -> [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + def self?.variants: -> [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] end type testing_criterion = OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel | OpenAI::Models::EvalStringCheckGrader | OpenAI::Models::EvalTextSimilarityGrader + | OpenAI::Models::EvalCreateParams::TestingCriterion::Python + | OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union @@ -134,8 +128,7 @@ module OpenAI type input = OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage - | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage - | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage + | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem module Input extend OpenAI::Internal::Type::Union @@ -152,67 +145,66 @@ module OpenAI def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::simple_input_message end - type input_message = + type eval_item = { - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_ + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ } - class InputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content - attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role + attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role - attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_ + attr_reader type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? + + def type=: ( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ def initialize: ( - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_ + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + ?type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::input_message + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::eval_item type content = - { - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_ - } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor text: String + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_ + module Content + extend OpenAI::Internal::Type::Union - def initialize: ( - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_ - ) -> void + type output_text = { text: String, type: :output_text } - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::content + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String - type type_ = :input_text + attr_accessor type: :output_text - module Type - extend OpenAI::Internal::Type::Enum + def initialize: (text: String, ?type: :output_text) -> void - INPUT_TEXT: :input_text - - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::type_] + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::output_text end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] end - type role = :user | :system | :developer + type role = :user | :assistant | :system | :developer module Role extend OpenAI::Internal::Type::Enum USER: :user + ASSISTANT: :assistant SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::role] + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] end type type_ = :message @@ -222,87 +214,168 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::type_] + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] end end - type output_message = - { - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_ - } + def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem] + end + end - class OutputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content + type python = + { + name: String, + source: String, + type: :python, + image_tag: String, + pass_threshold: Float + } - attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role + class Python < OpenAI::Internal::Type::BaseModel + attr_accessor name: String - attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_ + attr_accessor source: String - def initialize: ( - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_ - ) -> void + attr_accessor type: :python - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::output_message + attr_reader image_tag: String? - type content = - { - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_ - } + def image_tag=: (String) -> String - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor text: String + attr_reader pass_threshold: Float? - attr_accessor type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_ + def pass_threshold=: (Float) -> Float - def initialize: ( - text: String, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_ - ) -> void + def initialize: ( + name: String, + source: String, + ?image_tag: String, + ?pass_threshold: Float, + ?type: :python + ) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::content + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::python + end - type type_ = :output_text + type score_model = + { + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: :score_model, + pass_threshold: Float, + range: ::Array[Float], + sampling_params: top + } - module Type - extend OpenAI::Internal::Type::Enum + class ScoreModel < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input] - OUTPUT_TEXT: :output_text + attr_accessor model: String - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::type_] - end - end + attr_accessor name: String - type role = :assistant + attr_accessor type: :score_model - module Role - extend OpenAI::Internal::Type::Enum + attr_reader pass_threshold: Float? - ASSISTANT: :assistant + def pass_threshold=: (Float) -> Float - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::role] - end + attr_reader range: ::Array[Float]? - type type_ = :message + def range=: (::Array[Float]) -> ::Array[Float] - module Type - extend OpenAI::Internal::Type::Enum + attr_reader sampling_params: top? - MESSAGE: :message + def sampling_params=: (top) -> top + + def initialize: ( + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + ?pass_threshold: Float, + ?range: ::Array[Float], + ?sampling_params: top, + ?type: :score_model + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::score_model + + type input = + { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::content + + attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role + + attr_reader type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_? + + def type=: ( + OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + ) -> OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + + def initialize: ( + content: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, + ?type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + ) -> void - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::type_] + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::input + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::output_text end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] end - def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage] + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_] + end end end - def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel] end end end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 91552ac6..8ff0e39f 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -8,7 +8,6 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String, object: :eval, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] } @@ -25,8 +24,6 @@ module OpenAI attr_accessor object: :eval - attr_accessor share_with_openai: bool - attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] def initialize: ( @@ -35,7 +32,6 @@ module OpenAI data_source_config: OpenAI::Models::EvalCreateResponse::data_source_config, metadata: OpenAI::Models::metadata?, name: String, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion], ?object: :eval ) -> void @@ -56,11 +52,166 @@ module OpenAI OpenAI::Models::EvalLabelModelGrader | OpenAI::Models::EvalStringCheckGrader | OpenAI::Models::EvalTextSimilarityGrader + | OpenAI::Models::EvalCreateResponse::TestingCriterion::Python + | OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + type python = + { + name: String, + source: String, + type: :python, + image_tag: String, + pass_threshold: Float + } + + class Python < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor source: String + + attr_accessor type: :python + + attr_reader image_tag: String? + + def image_tag=: (String) -> String + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + def initialize: ( + name: String, + source: String, + ?image_tag: String, + ?pass_threshold: Float, + ?type: :python + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::python + end + + type score_model = + { + input: ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: :score_model, + pass_threshold: Float, + range: ::Array[Float], + sampling_params: top + } + + class ScoreModel < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor type: :score_model + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + attr_reader range: ::Array[Float]? + + def range=: (::Array[Float]) -> ::Array[Float] + + attr_reader sampling_params: top? + + def sampling_params=: (top) -> top + + def initialize: ( + input: ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + ?pass_threshold: Float, + ?range: ::Array[Float], + ?sampling_params: top, + ?type: :score_model + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::score_model + + type input = + { + content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role, + type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content + + attr_accessor role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role + + attr_reader type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_? + + def type=: ( + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ + + def initialize: ( + content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role, + ?type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::input + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_] + end + end + end + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel] end end end diff --git a/sig/openai/models/eval_label_model_grader.rbs b/sig/openai/models/eval_label_model_grader.rbs index af0c4a5f..dab1b730 100644 --- a/sig/openai/models/eval_label_model_grader.rbs +++ b/sig/openai/models/eval_label_model_grader.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type eval_label_model_grader = { - input: ::Array[OpenAI::Models::EvalLabelModelGrader::input], + input: ::Array[OpenAI::Models::EvalLabelModelGrader::Input], labels: ::Array[String], model: String, name: String, @@ -11,7 +11,7 @@ module OpenAI } class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalLabelModelGrader::input] + attr_accessor input: ::Array[OpenAI::Models::EvalLabelModelGrader::Input] attr_accessor labels: ::Array[String] @@ -24,7 +24,7 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::Models::EvalLabelModelGrader::input], + input: ::Array[OpenAI::Models::EvalLabelModelGrader::Input], labels: ::Array[String], model: String, name: String, @@ -35,149 +35,76 @@ module OpenAI def to_hash: -> OpenAI::Models::eval_label_model_grader type input = - OpenAI::Models::EvalLabelModelGrader::Input::Assistant - | OpenAI::Models::EvalLabelModelGrader::Input::InputMessage + { + content: OpenAI::Models::EvalLabelModelGrader::Input::content, + role: OpenAI::Models::EvalLabelModelGrader::Input::role, + type: OpenAI::Models::EvalLabelModelGrader::Input::type_ + } - module Input - extend OpenAI::Internal::Type::Union + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalLabelModelGrader::Input::content - type input_message = - { - content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, - role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_ - } + attr_accessor role: OpenAI::Models::EvalLabelModelGrader::Input::role - class InputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content + attr_reader type: OpenAI::Models::EvalLabelModelGrader::Input::type_? - attr_accessor role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role + def type=: ( + OpenAI::Models::EvalLabelModelGrader::Input::type_ + ) -> OpenAI::Models::EvalLabelModelGrader::Input::type_ - attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_ + def initialize: ( + content: OpenAI::Models::EvalLabelModelGrader::Input::content, + role: OpenAI::Models::EvalLabelModelGrader::Input::role, + ?type: OpenAI::Models::EvalLabelModelGrader::Input::type_ + ) -> void - def initialize: ( - content: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content, - role: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_ - ) -> void + def to_hash: -> OpenAI::Models::EvalLabelModelGrader::input - def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::input_message + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText - type content = - { - text: String, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_ - } + module Content + extend OpenAI::Internal::Type::Union - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_ - - def initialize: ( - text: String, - type: OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_ - ) -> void - - def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::content + type output_text = { text: String, type: :output_text } - type type_ = :input_text - - module Type - extend OpenAI::Internal::Type::Enum - - INPUT_TEXT: :input_text - - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::type_] - end - end - - type role = :user | :system | :developer + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String - module Role - extend OpenAI::Internal::Type::Enum + attr_accessor type: :output_text - USER: :user - SYSTEM: :system - DEVELOPER: :developer + def initialize: (text: String, ?type: :output_text) -> void - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::role] + def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::Content::output_text end - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::type_] - end + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] end - type assistant = - { - content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, - role: :assistant, - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_ - } - - class Assistant < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content + type role = :user | :assistant | :system | :developer - attr_accessor role: :assistant + module Role + extend OpenAI::Internal::Type::Enum - attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_ + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer - def initialize: ( - content: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content, - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_, - ?role: :assistant - ) -> void - - def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::assistant - - type content = - { - text: String, - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_ - } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_ - - def initialize: ( - text: String, - type: OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_ - ) -> void - - def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::Assistant::content - - type type_ = :output_text - - module Type - extend OpenAI::Internal::Type::Enum - - OUTPUT_TEXT: :output_text - - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::type_] - end - end + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::role] + end - type type_ = :message + type type_ = :message - module Type - extend OpenAI::Internal::Type::Enum + module Type + extend OpenAI::Internal::Type::Enum - MESSAGE: :message + MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::Assistant::type_] - end + def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::type_] end - - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage] end end end diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index 25f5a62c..f7a1047b 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -8,7 +8,6 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String, object: :eval, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion] } @@ -25,8 +24,6 @@ module OpenAI attr_accessor object: :eval - attr_accessor share_with_openai: bool - attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion] def initialize: ( @@ -35,7 +32,6 @@ module OpenAI data_source_config: OpenAI::Models::EvalListResponse::data_source_config, metadata: OpenAI::Models::metadata?, name: String, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion], ?object: :eval ) -> void @@ -56,11 +52,166 @@ module OpenAI OpenAI::Models::EvalLabelModelGrader | OpenAI::Models::EvalStringCheckGrader | OpenAI::Models::EvalTextSimilarityGrader + | OpenAI::Models::EvalListResponse::TestingCriterion::Python + | OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + type python = + { + name: String, + source: String, + type: :python, + image_tag: String, + pass_threshold: Float + } + + class Python < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor source: String + + attr_accessor type: :python + + attr_reader image_tag: String? + + def image_tag=: (String) -> String + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + def initialize: ( + name: String, + source: String, + ?image_tag: String, + ?pass_threshold: Float, + ?type: :python + ) -> void + + def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::python + end + + type score_model = + { + input: ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: :score_model, + pass_threshold: Float, + range: ::Array[Float], + sampling_params: top + } + + class ScoreModel < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor type: :score_model + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + attr_reader range: ::Array[Float]? + + def range=: (::Array[Float]) -> ::Array[Float] + + attr_reader sampling_params: top? + + def sampling_params=: (top) -> top + + def initialize: ( + input: ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + ?pass_threshold: Float, + ?range: ::Array[Float], + ?sampling_params: top, + ?type: :score_model + ) -> void + + def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::score_model + + type input = + { + content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role, + type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content + + attr_accessor role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role + + attr_reader type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_? + + def type=: ( + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ + + def initialize: ( + content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role, + ?type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::input + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_] + end + end + end + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel] end end end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index 39e82c42..b354045c 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -8,7 +8,6 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String, object: :eval, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] } @@ -25,8 +24,6 @@ module OpenAI attr_accessor object: :eval - attr_accessor share_with_openai: bool - attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] def initialize: ( @@ -35,7 +32,6 @@ module OpenAI data_source_config: OpenAI::Models::EvalRetrieveResponse::data_source_config, metadata: OpenAI::Models::metadata?, name: String, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion], ?object: :eval ) -> void @@ -56,11 +52,166 @@ module OpenAI OpenAI::Models::EvalLabelModelGrader | OpenAI::Models::EvalStringCheckGrader | OpenAI::Models::EvalTextSimilarityGrader + | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python + | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + type python = + { + name: String, + source: String, + type: :python, + image_tag: String, + pass_threshold: Float + } + + class Python < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor source: String + + attr_accessor type: :python + + attr_reader image_tag: String? + + def image_tag=: (String) -> String + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + def initialize: ( + name: String, + source: String, + ?image_tag: String, + ?pass_threshold: Float, + ?type: :python + ) -> void + + def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::python + end + + type score_model = + { + input: ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: :score_model, + pass_threshold: Float, + range: ::Array[Float], + sampling_params: top + } + + class ScoreModel < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor type: :score_model + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + attr_reader range: ::Array[Float]? + + def range=: (::Array[Float]) -> ::Array[Float] + + attr_reader sampling_params: top? + + def sampling_params=: (top) -> top + + def initialize: ( + input: ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + ?pass_threshold: Float, + ?range: ::Array[Float], + ?sampling_params: top, + ?type: :score_model + ) -> void + + def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::score_model + + type input = + { + content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role, + type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content + + attr_accessor role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role + + attr_reader type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_? + + def type=: ( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ + + def initialize: ( + content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role, + ?type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::input + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_] + end + end + end + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel] end end end diff --git a/sig/openai/models/eval_text_similarity_grader.rbs b/sig/openai/models/eval_text_similarity_grader.rbs index 7b15ca88..98059580 100644 --- a/sig/openai/models/eval_text_similarity_grader.rbs +++ b/sig/openai/models/eval_text_similarity_grader.rbs @@ -47,7 +47,6 @@ module OpenAI | :rouge_4 | :rouge_5 | :rouge_l - | :cosine module EvaluationMetric extend OpenAI::Internal::Type::Enum @@ -62,7 +61,6 @@ module OpenAI ROUGE_4: :rouge_4 ROUGE_5: :rouge_5 ROUGE_L: :rouge_l - COSINE: :cosine def self?.values: -> ::Array[OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric] end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index 6aad41bb..2cc7f800 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -8,7 +8,6 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String, object: :eval, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] } @@ -25,8 +24,6 @@ module OpenAI attr_accessor object: :eval - attr_accessor share_with_openai: bool - attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] def initialize: ( @@ -35,7 +32,6 @@ module OpenAI data_source_config: OpenAI::Models::EvalUpdateResponse::data_source_config, metadata: OpenAI::Models::metadata?, name: String, - share_with_openai: bool, testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion], ?object: :eval ) -> void @@ -56,11 +52,166 @@ module OpenAI OpenAI::Models::EvalLabelModelGrader | OpenAI::Models::EvalStringCheckGrader | OpenAI::Models::EvalTextSimilarityGrader + | OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python + | OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader] + type python = + { + name: String, + source: String, + type: :python, + image_tag: String, + pass_threshold: Float + } + + class Python < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor source: String + + attr_accessor type: :python + + attr_reader image_tag: String? + + def image_tag=: (String) -> String + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + def initialize: ( + name: String, + source: String, + ?image_tag: String, + ?pass_threshold: Float, + ?type: :python + ) -> void + + def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::python + end + + type score_model = + { + input: ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + type: :score_model, + pass_threshold: Float, + range: ::Array[Float], + sampling_params: top + } + + class ScoreModel < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor type: :score_model + + attr_reader pass_threshold: Float? + + def pass_threshold=: (Float) -> Float + + attr_reader range: ::Array[Float]? + + def range=: (::Array[Float]) -> ::Array[Float] + + attr_reader sampling_params: top? + + def sampling_params=: (top) -> top + + def initialize: ( + input: ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input], + model: String, + name: String, + ?pass_threshold: Float, + ?range: ::Array[Float], + ?sampling_params: top, + ?type: :score_model + ) -> void + + def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::score_model + + type input = + { + content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role, + type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content + + attr_accessor role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role + + attr_reader type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_? + + def type=: ( + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ + + def initialize: ( + content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role, + ?type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ + ) -> void + + def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::input + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_] + end + end + end + + def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel] end end end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index de1f4c07..bbe22adb 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -3,22 +3,28 @@ module OpenAI module Evals type create_eval_completions_run_data_source = { - input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, - model: String, source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + model: String, sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages - - attr_accessor model: String - attr_accessor source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_ + attr_reader input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages + + attr_reader model: String? + + def model=: (String) -> String + attr_reader sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams? def sampling_params=: ( @@ -26,193 +32,212 @@ module OpenAI ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams def initialize: ( - input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, - model: String, source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + ?input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + ?model: String, ?sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams ) -> void def to_hash: -> OpenAI::Models::Evals::create_eval_completions_run_data_source - type input_messages = - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + type source = + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions - module InputMessages + module Source extend OpenAI::Internal::Type::Union - type template = + type file_content = { - template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], - type: :template + content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: :file_content } - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] - attr_accessor type: :template + attr_accessor type: :file_content def initialize: ( - template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], - ?type: :template + content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::template + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_content - type template = - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - module Template - extend OpenAI::Internal::Type::Union + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] - type chat_message = { content: String, role: String } + attr_reader sample: ::Hash[Symbol, top]? - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - attr_accessor role: String + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void - def initialize: (content: String, role: String) -> void + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::content + end + end - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::chat_message - end + type file_id = { id: String, type: :file_id } - type input_message = - { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_ - } + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String - class InputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content + attr_accessor type: :file_id - attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role + def initialize: (id: String, ?type: :file_id) -> void - attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_ + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_id + end - def initialize: ( - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_ - ) -> void + type stored_completions = + { + type: :stored_completions, + created_after: Integer?, + created_before: Integer?, + limit: Integer?, + metadata: OpenAI::Models::metadata?, + model: String? + } - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::input_message + class StoredCompletions < OpenAI::Internal::Type::BaseModel + attr_accessor type: :stored_completions - type content = - { - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_ - } + attr_accessor created_after: Integer? - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor text: String + attr_accessor created_before: Integer? - attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_ + attr_accessor limit: Integer? - def initialize: ( - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_ - ) -> void + attr_accessor metadata: OpenAI::Models::metadata? - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::content + attr_accessor model: String? - type type_ = :input_text + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?limit: Integer?, + ?metadata: OpenAI::Models::metadata?, + ?model: String?, + ?type: :stored_completions + ) -> void - module Type - extend OpenAI::Internal::Type::Enum + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::stored_completions + end - INPUT_TEXT: :input_text + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + end - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::type_] - end - end + type type_ = :completions - type role = :user | :system | :developer + module Type + extend OpenAI::Internal::Type::Enum - module Role - extend OpenAI::Internal::Type::Enum + COMPLETIONS: :completions - USER: :user - SYSTEM: :system - DEVELOPER: :developer + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_] + end - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::role] - end + type input_messages = + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - type type_ = :message + module InputMessages + extend OpenAI::Internal::Type::Union - module Type - extend OpenAI::Internal::Type::Enum + type template = + { + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + type: :template + } - MESSAGE: :message + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::type_] - end - end + attr_accessor type: :template - type output_message = + def initialize: ( + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::template + + type template = + OpenAI::Models::Responses::EasyInputMessage + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + + module Template + extend OpenAI::Internal::Type::Union + + type message = { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ } - class OutputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content + class Message < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content - attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role + attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role - attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_ + attr_reader type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? + + def type=: ( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ def initialize: ( - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + ?type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::output_message + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::message type content = - { - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_ - } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor text: String + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_ + module Content + extend OpenAI::Internal::Type::Union - def initialize: ( - text: String, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_ - ) -> void + type output_text = { text: String, type: :output_text } - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::content + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String - type type_ = :output_text + attr_accessor type: :output_text - module Type - extend OpenAI::Internal::Type::Enum + def initialize: (text: String, ?type: :output_text) -> void - OUTPUT_TEXT: :output_text - - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::type_] + def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::output_text end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] end - type role = :assistant + type role = :user | :assistant | :system | :developer module Role extend OpenAI::Internal::Type::Enum + USER: :user ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] end type type_ = :message @@ -222,11 +247,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] end end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage] + def self?.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message] end end @@ -249,111 +274,6 @@ module OpenAI def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] end - type source = - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions - - module Source - extend OpenAI::Internal::Type::Union - - type file_content = - { - content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], - ?type: :file_content - ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_content - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::content - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_id - end - - type stored_completions = - { - created_after: Integer?, - created_before: Integer?, - limit: Integer?, - metadata: OpenAI::Models::metadata?, - model: String?, - type: :stored_completions - } - - class StoredCompletions < OpenAI::Internal::Type::BaseModel - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor limit: Integer? - - attr_accessor metadata: OpenAI::Models::metadata? - - attr_accessor model: String? - - attr_accessor type: :stored_completions - - def initialize: ( - created_after: Integer?, - created_before: Integer?, - limit: Integer?, - metadata: OpenAI::Models::metadata?, - model: String?, - ?type: :stored_completions - ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::stored_completions - end - - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - end - - type type_ = :completions - - module Type - extend OpenAI::Internal::Type::Enum - - COMPLETIONS: :completions - - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_] - end - type sampling_params = { max_completion_tokens: Integer, diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index 5a6e54d7..d6a2ba57 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -70,11 +70,350 @@ module OpenAI type data_source = OpenAI::Models::Evals::CreateEvalJSONLRunDataSource | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions module DataSource extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + type completions = + { + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source, + type: :completions, + input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams + } + + class Completions < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source + + attr_accessor type: :completions + + attr_reader input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source, + ?input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, + ?type: :completions + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::completions + + type source = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::file_id + end + + type responses = + { + type: :responses, + allow_parallel_tool_calls: bool?, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor allow_parallel_tool_calls: bool? + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?allow_parallel_tool_calls: bool?, + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::responses + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] + end + + type input_messages = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::template + + type template = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::chat_message + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::eval_item + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::item_reference + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::sampling_params + end + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] end type per_model_usage = diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index 326d6f6d..f38000c8 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -33,11 +33,360 @@ module OpenAI type data_source = OpenAI::Models::Evals::CreateEvalJSONLRunDataSource | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + type create_eval_responses_run_data_source = + { + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source + + attr_accessor type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ + + attr_reader input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + ?input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::create_eval_responses_run_data_source + + type source = + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::file_id + end + + type responses = + { + type: :responses, + allow_parallel_tool_calls: bool?, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor allow_parallel_tool_calls: bool? + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?allow_parallel_tool_calls: bool?, + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::responses + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + end + + type type_ = :completions + + module Type + extend OpenAI::Internal::Type::Enum + + COMPLETIONS: :completions + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] + end + + type input_messages = + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::template + + type template = + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::chat_message + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::eval_item + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::item_reference + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::sampling_params + end + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] end end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 3840e50a..571e81b9 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -70,11 +70,350 @@ module OpenAI type data_source = OpenAI::Models::Evals::CreateEvalJSONLRunDataSource | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions module DataSource extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + type completions = + { + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source, + type: :completions, + input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams + } + + class Completions < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source + + attr_accessor type: :completions + + attr_reader input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source, + ?input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, + ?type: :completions + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::completions + + type source = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::file_id + end + + type responses = + { + type: :responses, + allow_parallel_tool_calls: bool?, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor allow_parallel_tool_calls: bool? + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?allow_parallel_tool_calls: bool?, + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::responses + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] + end + + type input_messages = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::template + + type template = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::chat_message + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::eval_item + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::item_reference + end + + def self?.variants: -> [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::sampling_params + end + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] end type per_model_usage = diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index 8a12f80e..60062163 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -70,11 +70,350 @@ module OpenAI type data_source = OpenAI::Models::Evals::CreateEvalJSONLRunDataSource | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + | OpenAI::Models::Evals::RunListResponse::DataSource::Completions module DataSource extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + type completions = + { + source: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source, + type: :completions, + input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams + } + + class Completions < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source + + attr_accessor type: :completions + + attr_reader input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source, + ?input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, + ?type: :completions + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::completions + + type source = + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent + | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID + | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::file_id + end + + type responses = + { + type: :responses, + allow_parallel_tool_calls: bool?, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor allow_parallel_tool_calls: bool? + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?allow_parallel_tool_calls: bool?, + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::responses + end + + def self?.variants: -> [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] + end + + type input_messages = + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template + | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::template + + type template = + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::chat_message + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::eval_item + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::item_reference + end + + def self?.variants: -> [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::sampling_params + end + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] end type per_model_usage = diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index 5a9a33ba..6714cb0c 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -70,11 +70,350 @@ module OpenAI type data_source = OpenAI::Models::Evals::CreateEvalJSONLRunDataSource | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions module DataSource extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + type completions = + { + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source, + type: :completions, + input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams + } + + class Completions < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source + + attr_accessor type: :completions + + attr_reader input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source, + ?input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, + ?type: :completions + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::completions + + type source = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::file_content + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::content + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::file_id + end + + type responses = + { + type: :responses, + allow_parallel_tool_calls: bool?, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor allow_parallel_tool_calls: bool? + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?allow_parallel_tool_calls: bool?, + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::responses + end + + def self?.variants: -> [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] + end + + type input_messages = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::template + + type template = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::chat_message + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::eval_item + + type content = + String + | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text + end + + def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::item_reference + end + + def self?.variants: -> [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::sampling_params + end + end + + def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] end type per_model_usage = diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs index 142fe252..d57abc16 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs @@ -3,13 +3,19 @@ module OpenAI module FineTuning module Checkpoints type permission_delete_params = - { } & OpenAI::Internal::Type::request_parameters + { fine_tuned_model_checkpoint: String } + & OpenAI::Internal::Type::request_parameters class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - def initialize: (?request_options: OpenAI::request_opts) -> void + attr_accessor fine_tuned_model_checkpoint: String + + def initialize: ( + fine_tuned_model_checkpoint: String, + ?request_options: OpenAI::request_opts + ) -> void def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_delete_params end diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index c5367ca5..30bf203d 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -2,11 +2,12 @@ module OpenAI module Models type image_edit_params = { - image: (Pathname | StringIO), + image: OpenAI::Models::ImageEditParams::image, prompt: String, mask: (Pathname | StringIO), model: OpenAI::Models::ImageEditParams::model?, n: Integer?, + quality: OpenAI::Models::ImageEditParams::quality?, response_format: OpenAI::Models::ImageEditParams::response_format?, size: OpenAI::Models::ImageEditParams::size?, user: String @@ -17,7 +18,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor image: Pathname | StringIO + attr_accessor image: OpenAI::Models::ImageEditParams::image attr_accessor prompt: String @@ -29,6 +30,8 @@ module OpenAI attr_accessor n: Integer? + attr_accessor quality: OpenAI::Models::ImageEditParams::quality? + attr_accessor response_format: OpenAI::Models::ImageEditParams::response_format? attr_accessor size: OpenAI::Models::ImageEditParams::size? @@ -38,11 +41,12 @@ module OpenAI def user=: (String) -> String def initialize: ( - image: Pathname | StringIO, + image: OpenAI::Models::ImageEditParams::image, prompt: String, ?mask: Pathname | StringIO, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, + ?quality: OpenAI::Models::ImageEditParams::quality?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, ?size: OpenAI::Models::ImageEditParams::size?, ?user: String, @@ -51,6 +55,18 @@ module OpenAI def to_hash: -> OpenAI::Models::image_edit_params + type image = Pathname | StringIO | ::Array[Pathname | StringIO] + + module Image + extend OpenAI::Internal::Type::Union + + def self?.variants: -> [StringIO, ::Array[StringIO]] + + type string_array = ::Array[Pathname | StringIO] + + StringArray: string_array + end + type model = String | OpenAI::Models::image_model module Model @@ -59,6 +75,20 @@ module OpenAI def self?.variants: -> [String, OpenAI::Models::image_model] end + type quality = :standard | :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + STANDARD: :standard + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::quality] + end + type response_format = :url | :b64_json module ResponseFormat diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index 869a9475..c5efaac8 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -3,9 +3,13 @@ module OpenAI type image_generate_params = { prompt: String, + background: OpenAI::Models::ImageGenerateParams::background?, model: OpenAI::Models::ImageGenerateParams::model?, + moderation: OpenAI::Models::ImageGenerateParams::moderation?, n: Integer?, - quality: OpenAI::Models::ImageGenerateParams::quality, + output_compression: Integer?, + output_format: OpenAI::Models::ImageGenerateParams::output_format?, + quality: OpenAI::Models::ImageGenerateParams::quality?, response_format: OpenAI::Models::ImageGenerateParams::response_format?, size: OpenAI::Models::ImageGenerateParams::size?, style: OpenAI::Models::ImageGenerateParams::style?, @@ -19,15 +23,19 @@ module OpenAI attr_accessor prompt: String + attr_accessor background: OpenAI::Models::ImageGenerateParams::background? + attr_accessor model: OpenAI::Models::ImageGenerateParams::model? + attr_accessor moderation: OpenAI::Models::ImageGenerateParams::moderation? + attr_accessor n: Integer? - attr_reader quality: OpenAI::Models::ImageGenerateParams::quality? + attr_accessor output_compression: Integer? + + attr_accessor output_format: OpenAI::Models::ImageGenerateParams::output_format? - def quality=: ( - OpenAI::Models::ImageGenerateParams::quality - ) -> OpenAI::Models::ImageGenerateParams::quality + attr_accessor quality: OpenAI::Models::ImageGenerateParams::quality? attr_accessor response_format: OpenAI::Models::ImageGenerateParams::response_format? @@ -41,9 +49,13 @@ module OpenAI def initialize: ( prompt: String, + ?background: OpenAI::Models::ImageGenerateParams::background?, ?model: OpenAI::Models::ImageGenerateParams::model?, + ?moderation: OpenAI::Models::ImageGenerateParams::moderation?, ?n: Integer?, - ?quality: OpenAI::Models::ImageGenerateParams::quality, + ?output_compression: Integer?, + ?output_format: OpenAI::Models::ImageGenerateParams::output_format?, + ?quality: OpenAI::Models::ImageGenerateParams::quality?, ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, ?size: OpenAI::Models::ImageGenerateParams::size?, ?style: OpenAI::Models::ImageGenerateParams::style?, @@ -53,6 +65,18 @@ module OpenAI def to_hash: -> OpenAI::Models::image_generate_params + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::background] + end + type model = String | OpenAI::Models::image_model module Model @@ -61,13 +85,40 @@ module OpenAI def self?.variants: -> [String, OpenAI::Models::image_model] end - type quality = :standard | :hd + type moderation = :low | :auto + + module Moderation + extend OpenAI::Internal::Type::Enum + + LOW: :low + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::moderation] + end + + type output_format = :png | :jpeg | :webp + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + JPEG: :jpeg + WEBP: :webp + + def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::output_format] + end + + type quality = :standard | :hd | :low | :medium | :high | :auto module Quality extend OpenAI::Internal::Type::Enum STANDARD: :standard HD: :hd + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto def self?.values: -> ::Array[OpenAI::Models::ImageGenerateParams::quality] end @@ -84,14 +135,24 @@ module OpenAI end type size = - :"256x256" | :"512x512" | :"1024x1024" | :"1792x1024" | :"1024x1792" + :auto + | :"1024x1024" + | :"1536x1024" + | :"1024x1536" + | :"256x256" + | :"512x512" + | :"1792x1024" + | :"1024x1792" module Size extend OpenAI::Internal::Type::Enum + AUTO: :auto + SIZE_1024X1024: :"1024x1024" + SIZE_1536X1024: :"1536x1024" + SIZE_1024X1536: :"1024x1536" SIZE_256X256: :"256x256" SIZE_512X512: :"512x512" - SIZE_1024X1024: :"1024x1024" SIZE_1792X1024: :"1792x1024" SIZE_1024X1792: :"1024x1792" diff --git a/sig/openai/models/image_model.rbs b/sig/openai/models/image_model.rbs index 31f6927a..e41f713d 100644 --- a/sig/openai/models/image_model.rbs +++ b/sig/openai/models/image_model.rbs @@ -1,12 +1,13 @@ module OpenAI module Models - type image_model = :"dall-e-2" | :"dall-e-3" + type image_model = :"dall-e-2" | :"dall-e-3" | :"gpt-image-1" module ImageModel extend OpenAI::Internal::Type::Enum DALL_E_2: :"dall-e-2" DALL_E_3: :"dall-e-3" + GPT_IMAGE_1: :"gpt-image-1" def self?.values: -> ::Array[OpenAI::Models::image_model] end diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index e5608584..5f23336d 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -1,19 +1,74 @@ module OpenAI module Models type images_response = - { created: Integer, data: ::Array[OpenAI::Models::Image] } + { + created: Integer, + data: ::Array[OpenAI::Models::Image], + usage: OpenAI::Models::ImagesResponse::Usage + } class ImagesResponse < OpenAI::Internal::Type::BaseModel attr_accessor created: Integer - attr_accessor data: ::Array[OpenAI::Models::Image] + attr_reader data: ::Array[OpenAI::Models::Image]? + + def data=: ( + ::Array[OpenAI::Models::Image] + ) -> ::Array[OpenAI::Models::Image] + + attr_reader usage: OpenAI::Models::ImagesResponse::Usage? + + def usage=: ( + OpenAI::Models::ImagesResponse::Usage + ) -> OpenAI::Models::ImagesResponse::Usage def initialize: ( created: Integer, - data: ::Array[OpenAI::Models::Image] + ?data: ::Array[OpenAI::Models::Image], + ?usage: OpenAI::Models::ImagesResponse::Usage ) -> void def to_hash: -> OpenAI::Models::images_response + + type usage = + { + input_tokens: Integer, + input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor input_tokens: Integer + + attr_accessor input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails + + attr_accessor output_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + input_tokens: Integer, + input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> OpenAI::Models::ImagesResponse::usage + + type input_tokens_details = + { image_tokens: Integer, text_tokens: Integer } + + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + attr_accessor image_tokens: Integer + + attr_accessor text_tokens: Integer + + def initialize: (image_tokens: Integer, text_tokens: Integer) -> void + + def to_hash: -> OpenAI::Models::ImagesResponse::Usage::input_tokens_details + end + end end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs new file mode 100644 index 00000000..d970d8a3 --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs @@ -0,0 +1,48 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_part_added_event = + { + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + summary_index: Integer, + type: :"response.reasoning_summary_part.added" + } + + class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part + + attr_accessor summary_index: Integer + + attr_accessor type: :"response.reasoning_summary_part.added" + + def initialize: ( + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + summary_index: Integer, + ?type: :"response.reasoning_summary_part.added" + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_part_added_event + + type part = { text: String, type: :summary_text } + + class Part < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :summary_text + + def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::part + end + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs new file mode 100644 index 00000000..88fe9e2a --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs @@ -0,0 +1,48 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_part_done_event = + { + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + summary_index: Integer, + type: :"response.reasoning_summary_part.done" + } + + class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part + + attr_accessor summary_index: Integer + + attr_accessor type: :"response.reasoning_summary_part.done" + + def initialize: ( + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + summary_index: Integer, + ?type: :"response.reasoning_summary_part.done" + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_part_done_event + + type part = { text: String, type: :summary_text } + + class Part < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :summary_text + + def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::part + end + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs new file mode 100644 index 00000000..d73f86b6 --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs @@ -0,0 +1,36 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_text_delta_event = + { + delta: String, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_text.delta" + } + + class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor delta: String + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor summary_index: Integer + + attr_accessor type: :"response.reasoning_summary_text.delta" + + def initialize: ( + delta: String, + item_id: String, + output_index: Integer, + summary_index: Integer, + ?type: :"response.reasoning_summary_text.delta" + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_text_delta_event + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs new file mode 100644 index 00000000..63b910b7 --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs @@ -0,0 +1,36 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_text_done_event = + { + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary_text.done" + } + + class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor summary_index: Integer + + attr_accessor text: String + + attr_accessor type: :"response.reasoning_summary_text.done" + + def initialize: ( + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + ?type: :"response.reasoning_summary_text.done" + ) -> void + + def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_text_done_event + end + end + end +end diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 18708efb..e81d05de 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -26,6 +26,10 @@ module OpenAI | OpenAI::Models::Responses::ResponseIncompleteEvent | OpenAI::Models::Responses::ResponseOutputItemAddedEvent | OpenAI::Models::Responses::ResponseOutputItemDoneEvent + | OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent + | OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent + | OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent + | OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent | OpenAI::Models::Responses::ResponseRefusalDeltaEvent | OpenAI::Models::Responses::ResponseRefusalDoneEvent | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent @@ -38,7 +42,7 @@ module OpenAI module ResponseStreamEvent extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + def self?.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] end end end diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 50be0608..2ef9c298 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -43,7 +43,7 @@ module OpenAI ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts @@ -62,7 +62,7 @@ module OpenAI ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::tool]?, + ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts diff --git a/sig/openai/resources/evals.rbs b/sig/openai/resources/evals.rbs index 84d16747..dd3d6cc5 100644 --- a/sig/openai/resources/evals.rbs +++ b/sig/openai/resources/evals.rbs @@ -8,7 +8,6 @@ module OpenAI testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, - ?share_with_openai: bool, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::EvalCreateResponse diff --git a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs index 0033c40d..f36dcbbb 100644 --- a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs +++ b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs @@ -19,7 +19,8 @@ module OpenAI ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse def delete: ( - String fine_tuned_model_checkpoint, + String permission_id, + fine_tuned_model_checkpoint: String, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index e4831410..ec6082ff 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -12,11 +12,12 @@ module OpenAI ) -> OpenAI::Models::ImagesResponse def edit: ( - image: Pathname | StringIO, + image: OpenAI::Models::ImageEditParams::image, prompt: String, ?mask: Pathname | StringIO, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, + ?quality: OpenAI::Models::ImageEditParams::quality?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, ?size: OpenAI::Models::ImageEditParams::size?, ?user: String, @@ -25,9 +26,13 @@ module OpenAI def generate: ( prompt: String, + ?background: OpenAI::Models::ImageGenerateParams::background?, ?model: OpenAI::Models::ImageGenerateParams::model?, + ?moderation: OpenAI::Models::ImageGenerateParams::moderation?, ?n: Integer?, - ?quality: OpenAI::Models::ImageGenerateParams::quality, + ?output_compression: Integer?, + ?output_format: OpenAI::Models::ImageGenerateParams::output_format?, + ?quality: OpenAI::Models::ImageGenerateParams::quality?, ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, ?size: OpenAI::Models::ImageGenerateParams::size?, ?style: OpenAI::Models::ImageGenerateParams::style?, diff --git a/test/openai/resources/evals_test.rb b/test/openai/resources/evals_test.rb index 8d1bdca2..d4cdc458 100644 --- a/test/openai/resources/evals_test.rb +++ b/test/openai/resources/evals_test.rb @@ -31,7 +31,6 @@ def test_create_required_params metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - share_with_openai: OpenAI::Internal::Type::Boolean, testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion]) } end @@ -52,7 +51,6 @@ def test_retrieve metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - share_with_openai: OpenAI::Internal::Type::Boolean, testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion]) } end @@ -73,7 +71,6 @@ def test_update metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - share_with_openai: OpenAI::Internal::Type::Boolean, testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion]) } end @@ -101,7 +98,6 @@ def test_list metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - share_with_openai: OpenAI::Internal::Type::Boolean, testing_criteria: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion]) } end diff --git a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb index eb752c5f..0ed0d4c1 100644 --- a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb +++ b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb @@ -49,11 +49,12 @@ def test_retrieve end end - def test_delete - skip("OpenAPI spec is slightly incorrect") - + def test_delete_required_params response = - @openai.fine_tuning.checkpoints.permissions.delete("ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd") + @openai.fine_tuning.checkpoints.permissions.delete( + "cp_zc4Q7MP6XxulcVzj4MZdwsAB", + fine_tuned_model_checkpoint: "ft:gpt-4o-mini-2024-07-18:org:weather:B7R9VjQd" + ) assert_pattern do response => OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse diff --git a/test/openai/resources/images_test.rb b/test/openai/resources/images_test.rb index 867dc545..ccfee747 100644 --- a/test/openai/resources/images_test.rb +++ b/test/openai/resources/images_test.rb @@ -13,7 +13,8 @@ def test_create_variation_required_params assert_pattern do response => { created: Integer, - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) | nil, + usage: OpenAI::Models::ImagesResponse::Usage | nil } end end @@ -29,7 +30,8 @@ def test_edit_required_params assert_pattern do response => { created: Integer, - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) | nil, + usage: OpenAI::Models::ImagesResponse::Usage | nil } end end @@ -44,7 +46,8 @@ def test_generate_required_params assert_pattern do response => { created: Integer, - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) | nil, + usage: OpenAI::Models::ImagesResponse::Usage | nil } end end From 09cfc34350ed400bf37af87484b2eda00cd5da95 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 17:56:20 +0000 Subject: [PATCH 153/295] chore(ci): run on more branches and use depot runners --- .github/workflows/ci.yml | 17 +++++++++-------- .github/workflows/publish-gem.yml | 2 +- .github/workflows/release-doctor.yml | 2 +- 3 files changed, 11 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4f7276bb..85872a3c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,18 +1,19 @@ name: CI on: push: - branches: - - main - pull_request: - branches: - - main - - next + branches-ignore: + - 'generated' + - 'codegen/**' + - 'integrated/**' + - 'stl-preview-head/**' + - 'stl-preview-base/**' jobs: lint: timeout-minutes: 10 name: lint - runs-on: ubuntu-latest + runs-on: depot-ubuntu-24.04 + steps: - uses: actions/checkout@v4 - name: Set up Ruby @@ -28,7 +29,7 @@ jobs: test: timeout-minutes: 10 name: test - runs-on: ubuntu-latest + runs-on: depot-ubuntu-24.04 steps: - uses: actions/checkout@v4 - name: Set up Ruby diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index d6ba1c4a..5df4a2cd 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -11,7 +11,7 @@ on: jobs: publish: name: publish - runs-on: ubuntu-latest + runs-on: depot-ubuntu-24.04 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 1659237f..87a98158 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -8,7 +8,7 @@ on: jobs: release_doctor: name: release doctor - runs-on: ubuntu-latest + runs-on: depot-ubuntu-24.04 environment: publish if: github.repository == 'openai/openai-ruby' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') From 41f4ff04afa58a7d00262ca1b91523c32968597c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 19:59:49 +0000 Subject: [PATCH 154/295] chore(ci): only use depot for staging repos --- .github/workflows/ci.yml | 4 ++-- .github/workflows/publish-gem.yml | 2 +- .github/workflows/release-doctor.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 85872a3c..1d8dfaab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: lint: timeout-minutes: 10 name: lint - runs-on: depot-ubuntu-24.04 + runs-on: ${{ github.repository == 'stainless-sdks/openai-ruby' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 @@ -29,7 +29,7 @@ jobs: test: timeout-minutes: 10 name: test - runs-on: depot-ubuntu-24.04 + runs-on: ${{ github.repository == 'stainless-sdks/openai-ruby' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v4 - name: Set up Ruby diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index 5df4a2cd..d6ba1c4a 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -11,7 +11,7 @@ on: jobs: publish: name: publish - runs-on: depot-ubuntu-24.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 87a98158..1659237f 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -8,7 +8,7 @@ on: jobs: release_doctor: name: release doctor - runs-on: depot-ubuntu-24.04 + runs-on: ubuntu-latest environment: publish if: github.repository == 'openai/openai-ruby' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') From b6c966e601e0a4f8665a8d0e348d196794a12559 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 23 Apr 2025 22:09:17 +0000 Subject: [PATCH 155/295] chore: broadly detect json family of content-type headers --- lib/openai/internal/util.rb | 13 +++++++++---- rbi/lib/openai/internal/util.rbi | 3 +++ sig/openai/internal/util.rbs | 3 +++ test/openai/internal/util_test.rb | 16 ++++++++++++++++ 4 files changed, 31 insertions(+), 4 deletions(-) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index d2eb9c19..fa6aec64 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -471,6 +471,11 @@ def writable_enum(&blk) end end + # @type [Regexp] + JSON_CONTENT = %r{^application/(?:vnd(?:\.[^.]+)*\+)?json(?!l)} + # @type [Regexp] + JSONL_CONTENT = %r{^application/(?:x-)?jsonl} + class << self # @api private # @@ -563,9 +568,9 @@ def encode_content(headers, body) body = body.inner if body.is_a?(OpenAI::Internal::Util::SerializationAdapter) case [content_type, body] - in [%r{^application/(?:vnd\.api\+)?json}, Hash | Array | -> { primitive?(_1) }] + in [OpenAI::Internal::Util::JSON_CONTENT, Hash | Array | -> { primitive?(_1) }] [headers, JSON.fast_generate(body)] - in [%r{^application/(?:x-)?jsonl}, Enumerable] unless body.is_a?(StringIO) || body.is_a?(IO) + in [OpenAI::Internal::Util::JSONL_CONTENT, Enumerable] unless body.is_a?(StringIO) || body.is_a?(IO) [headers, body.lazy.map { JSON.fast_generate(_1) }] in [%r{^multipart/form-data}, Hash | Pathname | StringIO | IO] boundary, strio = encode_multipart_streaming(body) @@ -611,7 +616,7 @@ def force_charset!(content_type, text:) # @return [Object] def decode_content(headers, stream:, suppress_error: false) case (content_type = headers["content-type"]) - in %r{^application/(?:vnd\.api\+)?json} + in OpenAI::Internal::Util::JSON_CONTENT json = stream.to_a.join begin JSON.parse(json, symbolize_names: true) @@ -619,7 +624,7 @@ def decode_content(headers, stream:, suppress_error: false) raise e unless suppress_error json end - in %r{^application/(?:x-)?jsonl} + in OpenAI::Internal::Util::JSONL_CONTENT lines = decode_lines(stream) chain_fused(lines) do |y| lines.each { y << JSON.parse(_1, symbolize_names: true) } diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 9b88505b..f1ca8cb7 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -192,6 +192,9 @@ module OpenAI def writable_enum(&blk); end end + JSON_CONTENT = T.let(%r{^application/(?:vnd(?:\.[^.]+)*\+)?json(?!l)}, Regexp) + JSONL_CONTENT = T.let(%r{^application/(?:x-)?jsonl}, Regexp) + class << self # @api private sig do diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 0f040af8..6c038865 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -103,6 +103,9 @@ module OpenAI (Enumerator::Yielder y) -> void } -> Enumerable[String] + JSON_CONTENT: Regexp + JSONL_CONTENT: Regexp + def self?.write_multipart_chunk: ( Enumerator::Yielder y, boundary: String, diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index e533fdd3..adae5642 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -157,6 +157,22 @@ def test_joining_queries end end +class OpenAI::Test::RegexMatchTest < Minitest::Test + def test_json_content + cases = { + "application/json" => true, + "application/jsonl" => false, + "application/vnd.github.v3+json" => true, + "application/vnd.api+json" => true + } + cases.each do |header, _verdict| + assert_pattern do + OpenAI::Internal::Util::JSON_CONTENT.match?(header) => verdict + end + end + end +end + class OpenAI::Test::UtilFormDataEncodingTest < Minitest::Test class FakeCGI < CGI def initialize(headers, io) From 5b03d29cc441a7af423402f2d254405f7fa11e84 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 05:15:45 +0000 Subject: [PATCH 156/295] feat: support webmock for testing --- Gemfile | 1 + Gemfile.lock | 13 + Rakefile | 2 +- lib/openai/internal/transport/base_client.rb | 2 +- .../transport/pooled_net_requester.rb | 2 +- .../openai/internal/transport/base_client.rbi | 2 +- sig/openai/internal/transport/base_client.rbs | 2 +- test/openai/client_test.rb | 270 ++++++++++-------- test/openai/test_helper.rb | 9 +- 9 files changed, 181 insertions(+), 122 deletions(-) diff --git a/Gemfile b/Gemfile index e5ec01e9..0d76364b 100644 --- a/Gemfile +++ b/Gemfile @@ -23,6 +23,7 @@ group :development, :test do gem "minitest-hooks" gem "minitest-proveit" gem "minitest-rg" + gem "webmock" end group :development, :docs do diff --git a/Gemfile.lock b/Gemfile.lock index 0891755f..6a6143f4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -29,6 +29,8 @@ GEM minitest (>= 5.1) securerandom (>= 0.3) tzinfo (~> 2.0, >= 2.0.5) + addressable (2.8.7) + public_suffix (>= 2.0.2, < 7.0) ast (2.4.3) async (2.23.1) console (~> 1.29) @@ -45,6 +47,9 @@ GEM fiber-annotation fiber-local (~> 1.1) json + crack (1.0.0) + bigdecimal + rexml csv (3.3.3) drb (2.2.1) erubi (1.13.1) @@ -54,6 +59,7 @@ GEM fiber-storage fiber-storage (1.0.0) fileutils (1.7.3) + hashdiff (1.1.2) i18n (1.14.7) concurrent-ruby (~> 1.0) io-event (1.10.0) @@ -82,6 +88,7 @@ GEM racc prettier_print (1.2.1) prism (1.4.0) + public_suffix (6.0.1) racc (1.8.1) rainbow (3.1.1) rake (13.2.1) @@ -96,6 +103,7 @@ GEM logger redcarpet (3.6.1) regexp_parser (2.10.0) + rexml (3.4.1) rubocop (1.75.1) json (~> 2.3) language_server-protocol (~> 3.17.0.2) @@ -165,6 +173,10 @@ GEM unicode-emoji (~> 4.0, >= 4.0.4) unicode-emoji (4.0.4) uri (1.0.3) + webmock (3.25.1) + addressable (>= 2.8.0) + crack (>= 0.3.2) + hashdiff (>= 0.4.0, < 2.0.0) webrick (1.9.1) yard (0.9.37) yard-sorbet (0.9.0) @@ -191,6 +203,7 @@ DEPENDENCIES syntax_tree syntax_tree-rbs! tapioca + webmock webrick yard diff --git a/Rakefile b/Rakefile index dde46bf9..09924f69 100644 --- a/Rakefile +++ b/Rakefile @@ -31,7 +31,7 @@ multitask(:test) do .map { "require_relative(#{_1.dump});" } .join - ruby(*%w[-w -e], rb, verbose: false) { fail unless _1 } + ruby(*%w[-e], rb, verbose: false) { fail unless _1 } end rubo_find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index f3fd4559..8866d7ad 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -153,7 +153,7 @@ def reap_connection!(status, stream:) # @api private # @return [OpenAI::Internal::Transport::PooledNetRequester] - attr_accessor :requester + attr_reader :requester # @api private # diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index 67e58347..ce606177 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -176,7 +176,7 @@ def execute(request) conn.finish if !eof && conn&.started? closing&.call end - [Integer(response.code), response, (response.body = body)] + [Integer(response.code), response, body] end # @api private diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index 18a8ea43..73785f6c 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -93,7 +93,7 @@ module OpenAI # @api private sig { returns(OpenAI::Internal::Transport::PooledNetRequester) } - attr_accessor :requester + attr_reader :requester # @api private sig do diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs index d66d0e62..acb379e9 100644 --- a/sig/openai/internal/transport/base_client.rbs +++ b/sig/openai/internal/transport/base_client.rbs @@ -53,7 +53,7 @@ module OpenAI ) -> void # @api private - attr_accessor requester: OpenAI::Internal::Transport::PooledNetRequester + attr_reader requester: OpenAI::Internal::Transport::PooledNetRequester def initialize: ( base_url: String, diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 586ba30e..b084b1b5 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -3,12 +3,33 @@ require_relative "test_helper" class OpenAITest < Minitest::Test + include WebMock::API + + class << self + def test_order = :random + + def run_one_method(...) = Minitest::Runnable.run_one_method(...) + end + + def before_all + super + WebMock.enable! + end + def setup + super Thread.current.thread_variable_set(:mock_sleep, []) end def teardown Thread.current.thread_variable_set(:mock_sleep, nil) + WebMock.reset! + super + end + + def after_all + WebMock.disable! + super end def test_raises_on_missing_non_nullable_opts @@ -18,66 +39,34 @@ def test_raises_on_missing_non_nullable_opts assert_match(/is required/, e.message) end - class MockRequester - # @return [Integer] - attr_reader :response_code - - # @return [Hash{String=>String}] - attr_reader :response_headers - - # @return [Object] - attr_reader :response_data - - # @return [ArrayObject}>] - attr_accessor :attempts - - # @param response_code [Integer] - # @param response_headers [Hash{String=>String}] - # @param response_data [Object] - def initialize(response_code, response_headers, response_data) - @response_code = response_code - @response_headers = response_headers - @response_data = JSON.fast_generate(response_data) - @attempts = [] - end - - # @param req [Hash{Symbol=>Object}] - def execute(req) - # Deep copy the request because it is mutated on each retry. - attempts.push(Marshal.load(Marshal.dump(req))) - headers = {"content-type" => "application/json", **response_headers} - [response_code, headers, response_data.grapheme_clusters] - end - end - def test_client_default_request_default_retry_attempts - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end - assert_equal(3, requester.attempts.length) + assert_requested(:any, /./, times: 3) end def test_client_given_request_default_retry_attempts - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key", max_retries: 3) - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 3) assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end - assert_equal(4, requester.attempts.length) + assert_requested(:any, /./, times: 4) end def test_client_default_request_given_retry_attempts - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( @@ -87,13 +76,13 @@ def test_client_default_request_given_retry_attempts ) end - assert_equal(4, requester.attempts.length) + assert_requested(:any, /./, times: 4) end def test_client_given_request_given_retry_attempts - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key", max_retries: 3) - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 3) assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( @@ -103,26 +92,34 @@ def test_client_given_request_given_retry_attempts ) end - assert_equal(5, requester.attempts.length) + assert_requested(:any, /./, times: 5) end def test_client_retry_after_seconds - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key", max_retries: 1) - requester = MockRequester.new(500, {"retry-after" => "1.3"}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 500, + headers: {"retry-after" => "1.3"}, + body: {} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 1) assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end - assert_equal(2, requester.attempts.length) + assert_requested(:any, /./, times: 2) assert_equal(1.3, Thread.current.thread_variable_get(:mock_sleep).last) end def test_client_retry_after_date - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key", max_retries: 1) - requester = MockRequester.new(500, {"retry-after" => (Time.now + 10).httpdate}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 500, + headers: {"retry-after" => (Time.now + 10).httpdate}, + body: {} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 1) assert_raises(OpenAI::Errors::InternalServerError) do Thread.current.thread_variable_set(:time_now, Time.now) @@ -130,43 +127,45 @@ def test_client_retry_after_date Thread.current.thread_variable_set(:time_now, nil) end - assert_equal(2, requester.attempts.length) + assert_requested(:any, /./, times: 2) assert_in_delta(10, Thread.current.thread_variable_get(:mock_sleep).last, 1.0) end def test_client_retry_after_ms - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key", max_retries: 1) - requester = MockRequester.new(500, {"retry-after-ms" => "1300"}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 500, + headers: {"retry-after-ms" => "1300"}, + body: {} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 1) assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end - assert_equal(2, requester.attempts.length) + assert_requested(:any, /./, times: 2) assert_equal(1.3, Thread.current.thread_variable_get(:mock_sleep).last) end def test_retry_count_header - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") end - retry_count_headers = requester.attempts.map do - _1.fetch(:headers).fetch("x-stainless-retry-count") + 3.times do + assert_requested(:any, /./, headers: {"x-stainless-retry-count" => _1}) end - - assert_equal(%w[0 1 2], retry_count_headers) end def test_omit_retry_count_header - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( @@ -176,17 +175,15 @@ def test_omit_retry_count_header ) end - retry_count_headers = requester.attempts.map do - _1.fetch(:headers).fetch("x-stainless-retry-count", nil) + assert_requested(:any, /./, times: 3) do + refute_includes(_1.headers.keys.map(&:downcase), "x-stainless-retry-count") end - - assert_equal([nil, nil, nil], retry_count_headers) end def test_overwrite_retry_count_header - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(500, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 500, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( @@ -196,17 +193,21 @@ def test_overwrite_retry_count_header ) end - retry_count_headers = requester.attempts.map do - _1.fetch(:headers).fetch("x-stainless-retry-count") - end - - assert_equal(%w[42 42 42], retry_count_headers) + assert_requested(:any, /./, headers: {"x-stainless-retry-count" => "42"}, times: 3) end def test_client_redirect_307 - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(307, {"location" => "/redirected"}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 307, + headers: {"location" => "/redirected"}, + body: {} + ) + stub_request(:any, "http://localhost/redirected").to_return( + status: 307, + headers: {"location" => "/redirected"} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( @@ -216,19 +217,30 @@ def test_client_redirect_307 ) end - assert_equal("/redirected", requester.attempts.last.fetch(:url).path) - assert_equal(requester.attempts.first.fetch(:method), requester.attempts.last.fetch(:method)) - assert_equal(requester.attempts.first.fetch(:body), requester.attempts.last.fetch(:body)) - assert_equal( - requester.attempts.first.fetch(:headers)["content-type"], - requester.attempts.last.fetch(:headers)["content-type"] - ) + recorded, = WebMock::RequestRegistry.instance.requested_signatures.hash.first + + assert_requested(:any, "http://localhost/redirected", times: OpenAI::Client::MAX_REDIRECTS) do + assert_equal(recorded.method, _1.method) + assert_equal(recorded.body, _1.body) + assert_equal( + recorded.headers.transform_keys(&:downcase).fetch("content-type"), + _1.headers.transform_keys(&:downcase).fetch("content-type") + ) + end end def test_client_redirect_303 - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(303, {"location" => "/redirected"}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 303, + headers: {"location" => "/redirected"}, + body: {} + ) + stub_request(:get, "http://localhost/redirected").to_return( + status: 303, + headers: {"location" => "/redirected"} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( @@ -238,16 +250,25 @@ def test_client_redirect_303 ) end - assert_equal("/redirected", requester.attempts.last.fetch(:url).path) - assert_equal(:get, requester.attempts.last.fetch(:method)) - assert_nil(requester.attempts.last.fetch(:body)) - assert_nil(requester.attempts.last.fetch(:headers)["content-type"]) + assert_requested(:get, "http://localhost/redirected", times: OpenAI::Client::MAX_REDIRECTS) do + headers = _1.headers.keys.map(&:downcase) + refute_includes(headers, "content-type") + assert_nil(_1.body) + end end def test_client_redirect_auth_keep_same_origin - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(307, {"location" => "/redirected"}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 307, + headers: {"location" => "/redirected"}, + body: {} + ) + stub_request(:any, "http://localhost/redirected").to_return( + status: 307, + headers: {"location" => "/redirected"} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( @@ -257,16 +278,28 @@ def test_client_redirect_auth_keep_same_origin ) end - assert_equal( - requester.attempts.first.fetch(:headers)["authorization"], - requester.attempts.last.fetch(:headers)["authorization"] - ) + recorded, = WebMock::RequestRegistry.instance.requested_signatures.hash.first + auth_header = recorded.headers.transform_keys(&:downcase).fetch("authorization") + + assert_equal("Bearer xyz", auth_header) + assert_requested(:any, "http://localhost/redirected", times: OpenAI::Client::MAX_REDIRECTS) do + auth_header = _1.headers.transform_keys(&:downcase).fetch("authorization") + assert_equal("Bearer xyz", auth_header) + end end def test_client_redirect_auth_strip_cross_origin - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(307, {"location" => "https://example.com/redirected"}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json( + status: 307, + headers: {"location" => "https://example.com/redirected"}, + body: {} + ) + stub_request(:any, "https://example.com/redirected").to_return( + status: 307, + headers: {"location" => "https://example.com/redirected"} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( @@ -276,17 +309,22 @@ def test_client_redirect_auth_strip_cross_origin ) end - assert_nil(requester.attempts.last.fetch(:headers)["authorization"]) + assert_requested(:any, "https://example.com/redirected", times: OpenAI::Client::MAX_REDIRECTS) do + headers = _1.headers.keys.map(&:downcase) + refute_includes(headers, "authorization") + end end def test_default_headers - openai = OpenAI::Client.new(base_url: "http://localhost:4010", api_key: "My API Key") - requester = MockRequester.new(200, {}, {}) - openai.requester = requester + stub_request(:post, "http://localhost/chat/completions").to_return_json(status: 200, body: {}) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") - headers = requester.attempts.first.fetch(:headers) - refute_empty(headers["accept"]) - refute_empty(headers["content-type"]) + assert_requested(:any, /./) do |req| + headers = req.headers.transform_keys(&:downcase).fetch_values("accept", "content-type") + headers.each { refute_empty(_1) } + end end end diff --git a/test/openai/test_helper.rb b/test/openai/test_helper.rb index d0a5de2b..98b833aa 100644 --- a/test/openai/test_helper.rb +++ b/test/openai/test_helper.rb @@ -15,6 +15,7 @@ require "minitest/hooks/test" require "minitest/proveit" require "minitest/rg" +require "webmock" require_relative "../../lib/openai" require_relative "resource_namespaces" @@ -44,8 +45,10 @@ def self.now = Thread.current.thread_variable_get(:time_now) || _now class OpenAI::Test::SingletonClient < OpenAI::Client include Singleton + TEST_API_BASE_URL = ENV.fetch("TEST_API_BASE_URL", "http://localhost:4010") + def initialize - super(base_url: ENV.fetch("TEST_API_BASE_URL", "http://localhost:4010"), api_key: "My API Key") + super(base_url: OpenAI::Test::SingletonClient::TEST_API_BASE_URL, api_key: "My API Key") end end @@ -72,3 +75,7 @@ def around_all = async? ? Sync { super } : super def around = async? ? Async { super }.wait : super end + +module WebMock + AssertionFailure.error_class = Minitest::Assertion +end From 9b2daf6fa0e2e7108e4d34904047a71f55e76a95 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 07:10:26 +0000 Subject: [PATCH 157/295] chore: show truncated parameter docs in yard --- .../models/audio/speech_create_params.rb | 26 +- lib/openai/models/audio/transcription.rb | 17 +- .../audio/transcription_create_params.rb | 34 ++- .../models/audio/transcription_segment.rb | 35 ++- .../audio/transcription_text_delta_event.rb | 23 +- .../audio/transcription_text_done_event.rb | 23 +- .../models/audio/transcription_verbose.rb | 14 +- lib/openai/models/audio/transcription_word.rb | 8 +- .../models/audio/translation_create_params.rb | 23 +- .../models/audio/translation_verbose.rb | 11 +- .../auto_file_chunking_strategy_param.rb | 2 +- lib/openai/models/batch.rb | 61 +++-- lib/openai/models/batch_create_params.rb | 17 +- lib/openai/models/batch_error.rb | 11 +- lib/openai/models/batch_list_params.rb | 11 +- lib/openai/models/batch_request_counts.rb | 8 +- lib/openai/models/beta/assistant.rb | 61 +++-- .../models/beta/assistant_create_params.rb | 91 +++++-- .../models/beta/assistant_list_params.rb | 19 +- .../models/beta/assistant_stream_event.rb | 176 +++++++++++-- .../models/beta/assistant_tool_choice.rb | 3 +- .../beta/assistant_tool_choice_function.rb | 2 +- .../models/beta/assistant_update_params.rb | 57 ++++- .../models/beta/code_interpreter_tool.rb | 2 +- lib/openai/models/beta/file_search_tool.rb | 26 +- lib/openai/models/beta/function_tool.rb | 3 +- .../models/beta/message_stream_event.rb | 39 ++- .../models/beta/run_step_stream_event.rb | 49 +++- lib/openai/models/beta/run_stream_event.rb | 71 +++++- lib/openai/models/beta/thread.rb | 30 ++- .../beta/thread_create_and_run_params.rb | 168 ++++++++++--- .../models/beta/thread_create_params.rb | 83 +++++-- lib/openai/models/beta/thread_stream_event.rb | 10 +- .../models/beta/thread_update_params.rb | 24 +- .../beta/threads/file_citation_annotation.rb | 10 +- .../threads/file_citation_delta_annotation.rb | 16 +- .../beta/threads/file_path_annotation.rb | 10 +- .../threads/file_path_delta_annotation.rb | 13 +- lib/openai/models/beta/threads/image_file.rb | 10 +- .../beta/threads/image_file_content_block.rb | 3 +- .../models/beta/threads/image_file_delta.rb | 10 +- .../beta/threads/image_file_delta_block.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 10 +- .../beta/threads/image_url_content_block.rb | 3 +- .../models/beta/threads/image_url_delta.rb | 10 +- .../beta/threads/image_url_delta_block.rb | 6 +- lib/openai/models/beta/threads/message.rb | 57 +++-- .../beta/threads/message_create_params.rb | 22 +- .../models/beta/threads/message_delta.rb | 5 +- .../beta/threads/message_delta_event.rb | 8 +- .../beta/threads/message_list_params.rb | 22 +- .../beta/threads/message_update_params.rb | 7 +- .../beta/threads/refusal_content_block.rb | 3 +- .../beta/threads/refusal_delta_block.rb | 6 +- .../required_action_function_tool_call.rb | 19 +- lib/openai/models/beta/threads/run.rb | 133 +++++++--- .../models/beta/threads/run_create_params.rb | 100 ++++++-- .../models/beta/threads/run_list_params.rb | 19 +- .../threads/run_submit_tool_outputs_params.rb | 14 +- .../models/beta/threads/run_update_params.rb | 7 +- .../threads/runs/code_interpreter_logs.rb | 8 +- .../runs/code_interpreter_output_image.rb | 13 +- .../runs/code_interpreter_tool_call.rb | 37 ++- .../runs/code_interpreter_tool_call_delta.rb | 26 +- .../threads/runs/file_search_tool_call.rb | 50 +++- .../runs/file_search_tool_call_delta.rb | 15 +- .../beta/threads/runs/function_tool_call.rb | 25 +- .../threads/runs/function_tool_call_delta.rb | 28 ++- .../runs/message_creation_step_details.rb | 5 +- .../models/beta/threads/runs/run_step.rb | 70 ++++-- .../beta/threads/runs/run_step_delta.rb | 2 +- .../beta/threads/runs/run_step_delta_event.rb | 8 +- .../runs/run_step_delta_message_delta.rb | 5 +- .../beta/threads/runs/step_list_params.rb | 24 +- .../beta/threads/runs/step_retrieve_params.rb | 9 +- .../threads/runs/tool_call_delta_object.rb | 9 +- .../threads/runs/tool_calls_step_details.rb | 9 +- lib/openai/models/beta/threads/text.rb | 3 +- .../models/beta/threads/text_content_block.rb | 3 +- .../beta/threads/text_content_block_param.rb | 5 +- lib/openai/models/beta/threads/text_delta.rb | 3 +- .../models/beta/threads/text_delta_block.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 49 ++-- ...chat_completion_assistant_message_param.rb | 42 +++- .../models/chat/chat_completion_audio.rb | 14 +- .../chat/chat_completion_audio_param.rb | 8 +- .../models/chat/chat_completion_chunk.rb | 93 +++++-- .../chat/chat_completion_content_part.rb | 14 +- .../chat_completion_content_part_image.rb | 13 +- ...hat_completion_content_part_input_audio.rb | 12 +- .../chat_completion_content_part_refusal.rb | 5 +- .../chat/chat_completion_content_part_text.rb | 5 +- .../models/chat/chat_completion_deleted.rb | 8 +- ...chat_completion_developer_message_param.rb | 12 +- .../chat_completion_function_call_option.rb | 2 +- .../chat_completion_function_message_param.rb | 8 +- .../models/chat/chat_completion_message.rb | 49 ++-- .../chat/chat_completion_message_tool_call.rb | 18 +- .../chat/chat_completion_named_tool_choice.rb | 5 +- .../chat_completion_prediction_content.rb | 8 +- .../chat/chat_completion_store_message.rb | 2 +- .../chat/chat_completion_stream_options.rb | 5 +- .../chat_completion_system_message_param.rb | 12 +- .../chat/chat_completion_token_logprob.rb | 30 ++- .../models/chat/chat_completion_tool.rb | 3 +- .../chat_completion_tool_message_param.rb | 8 +- .../chat_completion_user_message_param.rb | 12 +- .../models/chat/completion_create_params.rb | 147 ++++++++--- .../models/chat/completion_list_params.rb | 19 +- .../models/chat/completion_update_params.rb | 6 +- .../chat/completions/message_list_params.rb | 13 +- lib/openai/models/comparison_filter.rb | 12 +- lib/openai/models/completion.rb | 24 +- lib/openai/models/completion_choice.rb | 9 +- lib/openai/models/completion_create_params.rb | 65 +++-- lib/openai/models/completion_usage.rb | 33 ++- lib/openai/models/compound_filter.rb | 9 +- .../models/create_embedding_response.rb | 16 +- lib/openai/models/embedding.rb | 12 +- lib/openai/models/embedding_create_params.rb | 23 +- lib/openai/models/eval_create_params.rb | 135 +++++++--- lib/openai/models/eval_create_response.rb | 78 ++++-- .../models/eval_custom_data_source_config.rb | 8 +- lib/openai/models/eval_label_model_grader.rb | 35 ++- lib/openai/models/eval_list_params.rb | 16 +- lib/openai/models/eval_list_response.rb | 78 ++++-- lib/openai/models/eval_retrieve_response.rb | 78 ++++-- ...l_stored_completions_data_source_config.rb | 11 +- lib/openai/models/eval_string_check_grader.rb | 14 +- .../models/eval_text_similarity_grader.rb | 21 +- lib/openai/models/eval_update_params.rb | 9 +- lib/openai/models/eval_update_response.rb | 78 ++++-- ...create_eval_completions_run_data_source.rb | 91 +++++-- .../create_eval_jsonl_run_data_source.rb | 13 +- lib/openai/models/evals/eval_api_error.rb | 5 +- .../models/evals/run_cancel_response.rb | 203 ++++++++++----- lib/openai/models/evals/run_create_params.rb | 135 +++++++--- .../models/evals/run_create_response.rb | 203 ++++++++++----- lib/openai/models/evals/run_list_params.rb | 17 +- lib/openai/models/evals/run_list_response.rb | 203 ++++++++++----- .../models/evals/run_retrieve_response.rb | 203 ++++++++++----- .../evals/runs/output_item_list_params.rb | 17 +- .../evals/runs/output_item_list_response.rb | 82 +++--- .../runs/output_item_retrieve_response.rb | 83 ++++--- lib/openai/models/file_create_params.rb | 10 +- lib/openai/models/file_list_params.rb | 18 +- lib/openai/models/file_object.rb | 32 ++- .../checkpoints/permission_create_params.rb | 3 +- .../checkpoints/permission_create_response.rb | 11 +- .../checkpoints/permission_delete_response.rb | 8 +- .../checkpoints/permission_retrieve_params.rb | 12 +- .../permission_retrieve_response.rb | 11 +- .../models/fine_tuning/fine_tuning_job.rb | 141 ++++++++--- .../fine_tuning/fine_tuning_job_event.rb | 20 +- .../fine_tuning_job_wandb_integration.rb | 17 +- ...ine_tuning_job_wandb_integration_object.rb | 10 +- .../models/fine_tuning/job_create_params.rb | 120 ++++++--- .../fine_tuning/job_list_events_params.rb | 6 +- .../models/fine_tuning/job_list_params.rb | 13 +- .../jobs/checkpoint_list_params.rb | 6 +- .../jobs/fine_tuning_job_checkpoint.rb | 20 +- lib/openai/models/function_definition.rb | 18 +- lib/openai/models/image.rb | 13 +- .../models/image_create_variation_params.rb | 26 +- lib/openai/models/image_edit_params.rb | 38 ++- lib/openai/models/image_generate_params.rb | 49 +++- lib/openai/models/images_response.rb | 28 ++- lib/openai/models/model.rb | 11 +- lib/openai/models/moderation.rb | 142 +++++++---- lib/openai/models/moderation_create_params.rb | 10 +- .../models/moderation_create_response.rb | 8 +- .../models/moderation_image_url_input.rb | 7 +- lib/openai/models/moderation_text_input.rb | 5 +- .../other_file_chunking_strategy_object.rb | 2 +- lib/openai/models/reasoning.rb | 11 +- .../models/response_format_json_object.rb | 2 +- .../models/response_format_json_schema.rb | 22 +- lib/openai/models/response_format_text.rb | 2 +- lib/openai/models/responses/computer_tool.rb | 14 +- .../models/responses/easy_input_message.rb | 11 +- .../models/responses/file_search_tool.rb | 25 +- lib/openai/models/responses/function_tool.rb | 17 +- .../responses/input_item_list_params.rb | 18 +- lib/openai/models/responses/response.rb | 78 ++++-- .../responses/response_audio_delta_event.rb | 8 +- .../responses/response_audio_done_event.rb | 5 +- .../response_audio_transcript_delta_event.rb | 8 +- .../response_audio_transcript_done_event.rb | 5 +- ..._code_interpreter_call_code_delta_event.rb | 12 +- ...e_code_interpreter_call_code_done_event.rb | 12 +- ...e_code_interpreter_call_completed_event.rb | 12 +- ...code_interpreter_call_in_progress_event.rb | 12 +- ...ode_interpreter_call_interpreting_event.rb | 12 +- .../response_code_interpreter_tool_call.rb | 42 +++- .../responses/response_completed_event.rb | 8 +- .../responses/response_computer_tool_call.rb | 135 +++++++--- ...response_computer_tool_call_output_item.rb | 29 ++- ...se_computer_tool_call_output_screenshot.rb | 12 +- .../response_content_part_added_event.rb | 17 +- .../response_content_part_done_event.rb | 17 +- .../responses/response_create_params.rb | 62 +++-- .../responses/response_created_event.rb | 8 +- lib/openai/models/responses/response_error.rb | 8 +- .../models/responses/response_error_event.rb | 14 +- .../models/responses/response_failed_event.rb | 8 +- ...sponse_file_search_call_completed_event.rb | 12 +- ...onse_file_search_call_in_progress_event.rb | 12 +- ...sponse_file_search_call_searching_event.rb | 12 +- .../response_file_search_tool_call.rb | 35 ++- ...response_format_text_json_schema_config.rb | 18 +- ...nse_function_call_arguments_delta_event.rb | 17 +- ...onse_function_call_arguments_done_event.rb | 9 +- .../responses/response_function_tool_call.rb | 20 +- .../response_function_tool_call_item.rb | 5 +- ...response_function_tool_call_output_item.rb | 18 +- .../responses/response_function_web_search.rb | 11 +- .../responses/response_in_progress_event.rb | 8 +- .../responses/response_incomplete_event.rb | 8 +- .../models/responses/response_input_audio.rb | 11 +- .../models/responses/response_input_file.rb | 14 +- .../models/responses/response_input_image.rb | 14 +- .../models/responses/response_input_item.rb | 69 ++++-- .../responses/response_input_message_item.rb | 17 +- .../models/responses/response_input_text.rb | 8 +- .../models/responses/response_item_list.rb | 14 +- .../models/responses/response_output_audio.rb | 11 +- .../response_output_item_added_event.rb | 11 +- .../response_output_item_done_event.rb | 11 +- .../responses/response_output_message.rb | 17 +- .../responses/response_output_refusal.rb | 8 +- .../models/responses/response_output_text.rb | 53 ++-- .../responses/response_reasoning_item.rb | 22 +- ...onse_reasoning_summary_part_added_event.rb | 23 +- ...ponse_reasoning_summary_part_done_event.rb | 23 +- ...onse_reasoning_summary_text_delta_event.rb | 18 +- ...ponse_reasoning_summary_text_done_event.rb | 18 +- .../responses/response_refusal_delta_event.rb | 17 +- .../responses/response_refusal_done_event.rb | 17 +- .../responses/response_retrieve_params.rb | 6 +- .../response_text_annotation_delta_event.rb | 62 +++-- .../models/responses/response_text_config.rb | 5 +- .../responses/response_text_delta_event.rb | 17 +- .../responses/response_text_done_event.rb | 17 +- lib/openai/models/responses/response_usage.rb | 21 +- ...esponse_web_search_call_completed_event.rb | 12 +- ...ponse_web_search_call_in_progress_event.rb | 12 +- ...esponse_web_search_call_searching_event.rb | 12 +- .../models/responses/tool_choice_function.rb | 5 +- .../models/responses/tool_choice_types.rb | 5 +- .../models/responses/web_search_tool.rb | 26 +- .../models/static_file_chunking_strategy.rb | 10 +- .../static_file_chunking_strategy_object.rb | 3 +- ...tic_file_chunking_strategy_object_param.rb | 3 +- lib/openai/models/upload.rb | 30 ++- lib/openai/models/upload_complete_params.rb | 10 +- lib/openai/models/upload_create_params.rb | 15 +- .../models/uploads/part_create_params.rb | 6 +- lib/openai/models/uploads/upload_part.rb | 11 +- lib/openai/models/vector_store.rb | 57 +++-- .../models/vector_store_create_params.rb | 29 ++- lib/openai/models/vector_store_list_params.rb | 19 +- .../models/vector_store_search_params.rb | 19 +- .../models/vector_store_search_response.rb | 22 +- .../models/vector_store_update_params.rb | 21 +- .../vector_stores/file_batch_create_params.rb | 14 +- .../file_batch_list_files_params.rb | 24 +- .../vector_stores/file_content_response.rb | 5 +- .../vector_stores/file_create_params.rb | 14 +- .../models/vector_stores/file_list_params.rb | 23 +- .../vector_stores/file_update_params.rb | 7 +- .../models/vector_stores/vector_store_file.rb | 38 ++- .../vector_stores/vector_store_file_batch.rb | 35 ++- lib/openai/resources/audio/speech.rb | 26 +- lib/openai/resources/audio/transcriptions.rb | 68 +++-- lib/openai/resources/audio/translations.rb | 23 +- lib/openai/resources/batches.rb | 34 ++- lib/openai/resources/beta/assistants.rb | 114 ++++++--- lib/openai/resources/beta/threads.rb | 153 +++++++++--- lib/openai/resources/beta/threads/messages.rb | 74 ++++-- lib/openai/resources/beta/threads/runs.rb | 220 ++++++++++++---- .../resources/beta/threads/runs/steps.rb | 45 +++- lib/openai/resources/chat/completions.rb | 234 ++++++++++++------ .../resources/chat/completions/messages.rb | 16 +- lib/openai/resources/completions.rb | 130 +++++++--- lib/openai/resources/embeddings.rb | 23 +- lib/openai/resources/evals.rb | 49 +++- lib/openai/resources/evals/runs.rb | 53 ++-- .../resources/evals/runs/output_items.rb | 33 ++- lib/openai/resources/files.rb | 37 ++- .../fine_tuning/checkpoints/permissions.rb | 39 ++- lib/openai/resources/fine_tuning/jobs.rb | 69 ++++-- .../resources/fine_tuning/jobs/checkpoints.rb | 12 +- lib/openai/resources/images.rb | 113 +++++++-- lib/openai/resources/models.rb | 6 +- lib/openai/resources/moderations.rb | 10 +- lib/openai/resources/responses.rb | 136 +++++++--- lib/openai/resources/responses/input_items.rb | 21 +- lib/openai/resources/uploads.rb | 34 ++- lib/openai/resources/uploads/parts.rb | 9 +- lib/openai/resources/vector_stores.rb | 82 ++++-- .../resources/vector_stores/file_batches.rb | 58 +++-- lib/openai/resources/vector_stores/files.rb | 74 ++++-- .../models/audio/speech_create_params.rbi | 17 +- rbi/lib/openai/models/audio/transcription.rbi | 20 +- .../audio/transcription_create_params.rbi | 30 +++ .../models/audio/transcription_segment.rbi | 17 +- .../audio/transcription_text_delta_event.rbi | 22 +- .../audio/transcription_text_done_event.rbi | 23 +- .../models/audio/transcription_verbose.rbi | 14 +- .../models/audio/transcription_word.rbi | 10 +- .../audio/translation_create_params.rbi | 26 +- .../models/audio/translation_verbose.rbi | 12 +- .../auto_file_chunking_strategy_param.rbi | 6 +- rbi/lib/openai/models/batch.rbi | 30 ++- rbi/lib/openai/models/batch_create_params.rbi | 30 ++- rbi/lib/openai/models/batch_error.rbi | 12 +- rbi/lib/openai/models/batch_list_params.rbi | 13 +- .../openai/models/batch_request_counts.rbi | 10 +- rbi/lib/openai/models/beta/assistant.rbi | 70 +++++- .../models/beta/assistant_create_params.rbi | 120 ++++++++- .../models/beta/assistant_list_params.rbi | 21 +- .../models/beta/assistant_stream_event.rbi | 180 ++++++++++---- .../models/beta/assistant_tool_choice.rbi | 7 +- .../beta/assistant_tool_choice_function.rbi | 6 +- .../models/beta/assistant_update_params.rbi | 74 +++++- .../models/beta/code_interpreter_tool.rbi | 6 +- .../openai/models/beta/file_search_tool.rbi | 38 ++- rbi/lib/openai/models/beta/function_tool.rbi | 7 +- .../models/beta/message_stream_event.rbi | 40 ++- .../models/beta/run_step_stream_event.rbi | 50 ++-- .../openai/models/beta/run_stream_event.rbi | 80 ++++-- rbi/lib/openai/models/beta/thread.rbi | 39 ++- .../beta/thread_create_and_run_params.rbi | 221 +++++++++++++++-- .../models/beta/thread_create_params.rbi | 120 +++++++-- .../models/beta/thread_stream_event.rbi | 10 +- .../models/beta/thread_update_params.rbi | 34 ++- .../beta/threads/file_citation_annotation.rbi | 17 +- .../file_citation_delta_annotation.rbi | 15 +- .../beta/threads/file_path_annotation.rbi | 17 +- .../threads/file_path_delta_annotation.rbi | 20 +- .../openai/models/beta/threads/image_file.rbi | 11 +- .../beta/threads/image_file_content_block.rbi | 7 +- .../models/beta/threads/image_file_delta.rbi | 11 +- .../beta/threads/image_file_delta_block.rbi | 9 +- .../openai/models/beta/threads/image_url.rbi | 10 +- .../beta/threads/image_url_content_block.rbi | 7 +- .../models/beta/threads/image_url_delta.rbi | 10 +- .../beta/threads/image_url_delta_block.rbi | 9 +- .../openai/models/beta/threads/message.rbi | 45 +++- .../beta/threads/message_create_params.rbi | 37 ++- .../models/beta/threads/message_delta.rbi | 8 +- .../beta/threads/message_delta_event.rbi | 10 +- .../beta/threads/message_list_params.rbi | 23 +- .../beta/threads/message_update_params.rbi | 13 +- .../beta/threads/refusal_content_block.rbi | 7 +- .../beta/threads/refusal_delta_block.rbi | 9 +- .../required_action_function_tool_call.rbi | 22 +- rbi/lib/openai/models/beta/threads/run.rbi | 127 +++++++++- .../models/beta/threads/run_create_params.rbi | 133 +++++++++- .../models/beta/threads/run_list_params.rbi | 21 +- .../run_submit_tool_outputs_params.rbi | 17 +- .../models/beta/threads/run_update_params.rbi | 13 +- .../threads/runs/code_interpreter_logs.rbi | 10 +- .../runs/code_interpreter_output_image.rbi | 16 +- .../runs/code_interpreter_tool_call.rbi | 43 +++- .../runs/code_interpreter_tool_call_delta.rbi | 23 +- .../threads/runs/file_search_tool_call.rbi | 51 +++- .../runs/file_search_tool_call_delta.rbi | 13 +- .../beta/threads/runs/function_tool_call.rbi | 23 +- .../threads/runs/function_tool_call_delta.rbi | 25 +- .../runs/message_creation_step_details.rbi | 13 +- .../models/beta/threads/runs/run_step.rbi | 47 +++- .../beta/threads/runs/run_step_delta.rbi | 6 +- .../threads/runs/run_step_delta_event.rbi | 10 +- .../runs/run_step_delta_message_delta.rbi | 13 +- .../beta/threads/runs/step_list_params.rbi | 23 +- .../threads/runs/step_retrieve_params.rbi | 15 +- .../threads/runs/tool_call_delta_object.rbi | 10 +- .../threads/runs/tool_calls_step_details.rbi | 10 +- rbi/lib/openai/models/beta/threads/text.rbi | 7 +- .../beta/threads/text_content_block.rbi | 7 +- .../beta/threads/text_content_block_param.rbi | 8 +- .../openai/models/beta/threads/text_delta.rbi | 7 +- .../models/beta/threads/text_delta_block.rbi | 9 +- .../openai/models/chat/chat_completion.rbi | 53 +++- ...hat_completion_assistant_message_param.rbi | 32 ++- .../models/chat/chat_completion_audio.rbi | 14 +- .../chat/chat_completion_audio_param.rbi | 10 +- .../models/chat/chat_completion_chunk.rbi | 106 +++++++- .../chat/chat_completion_content_part.rbi | 18 +- .../chat_completion_content_part_image.rbi | 16 +- ...at_completion_content_part_input_audio.rbi | 15 +- .../chat_completion_content_part_refusal.rbi | 8 +- .../chat_completion_content_part_text.rbi | 8 +- .../models/chat/chat_completion_deleted.rbi | 10 +- ...hat_completion_developer_message_param.rbi | 11 +- .../chat_completion_function_call_option.rbi | 6 +- ...chat_completion_function_message_param.rbi | 10 +- .../models/chat/chat_completion_message.rbi | 46 +++- .../chat_completion_message_tool_call.rbi | 21 +- .../chat_completion_named_tool_choice.rbi | 13 +- .../chat_completion_prediction_content.rbi | 11 +- .../chat/chat_completion_store_message.rbi | 6 +- .../chat/chat_completion_stream_options.rbi | 12 +- .../chat_completion_system_message_param.rbi | 11 +- .../chat/chat_completion_token_logprob.rbi | 34 ++- .../models/chat/chat_completion_tool.rbi | 7 +- .../chat_completion_tool_message_param.rbi | 10 +- .../chat_completion_user_message_param.rbi | 11 +- .../models/chat/completion_create_params.rbi | 207 +++++++++++++++- .../models/chat/completion_list_params.rbi | 18 +- .../models/chat/completion_update_params.rbi | 12 +- .../chat/completions/message_list_params.rbi | 12 +- rbi/lib/openai/models/comparison_filter.rbi | 18 +- rbi/lib/openai/models/completion.rbi | 14 +- rbi/lib/openai/models/completion_choice.rbi | 12 +- .../models/completion_create_params.rbi | 88 +++++++ rbi/lib/openai/models/completion_usage.rbi | 25 +- rbi/lib/openai/models/compound_filter.rbi | 9 +- .../models/create_embedding_response.rbi | 20 +- rbi/lib/openai/models/embedding.rbi | 12 +- .../openai/models/embedding_create_params.rbi | 30 ++- rbi/lib/openai/models/eval_create_params.rbi | 125 ++++++++-- .../openai/models/eval_create_response.rbi | 68 ++++- .../models/eval_custom_data_source_config.rbi | 9 +- .../openai/models/eval_label_model_grader.rbi | 34 ++- rbi/lib/openai/models/eval_list_params.rbi | 15 +- rbi/lib/openai/models/eval_list_response.rbi | 68 ++++- .../openai/models/eval_retrieve_response.rbi | 68 ++++- ..._stored_completions_data_source_config.rbi | 16 +- .../models/eval_string_check_grader.rbi | 14 +- .../models/eval_text_similarity_grader.rbi | 11 +- rbi/lib/openai/models/eval_update_params.rbi | 14 +- .../openai/models/eval_update_response.rbi | 68 ++++- ...reate_eval_completions_run_data_source.rbi | 91 +++++-- .../create_eval_jsonl_run_data_source.rbi | 23 +- .../openai/models/evals/eval_api_error.rbi | 8 +- .../models/evals/run_cancel_response.rbi | 156 ++++++++++-- .../openai/models/evals/run_create_params.rbi | 120 +++++++-- .../models/evals/run_create_response.rbi | 156 ++++++++++-- .../openai/models/evals/run_list_params.rbi | 15 +- .../openai/models/evals/run_list_response.rbi | 156 ++++++++++-- .../models/evals/run_retrieve_response.rbi | 156 ++++++++++-- .../evals/runs/output_item_list_params.rbi | 16 +- .../evals/runs/output_item_list_response.rbi | 48 +++- .../runs/output_item_retrieve_response.rbi | 48 +++- rbi/lib/openai/models/file_create_params.rbi | 12 +- rbi/lib/openai/models/file_list_params.rbi | 18 +- rbi/lib/openai/models/file_object.rbi | 13 + .../checkpoints/permission_create_params.rbi | 7 +- .../permission_create_response.rbi | 12 +- .../permission_delete_response.rbi | 10 +- .../permission_retrieve_params.rbi | 13 +- .../permission_retrieve_response.rbi | 12 +- .../models/fine_tuning/fine_tuning_job.rbi | 111 +++++++-- .../fine_tuning/fine_tuning_job_event.rbi | 11 +- .../fine_tuning_job_wandb_integration.rbi | 17 +- ...ne_tuning_job_wandb_integration_object.rbi | 11 +- .../models/fine_tuning/job_create_params.rbi | 142 +++++++++-- .../fine_tuning/job_list_events_params.rbi | 9 +- .../models/fine_tuning/job_list_params.rbi | 12 +- .../jobs/checkpoint_list_params.rbi | 9 +- .../jobs/fine_tuning_job_checkpoint.rbi | 7 + rbi/lib/openai/models/function_definition.rbi | 24 +- rbi/lib/openai/models/image.rbi | 14 +- .../models/image_create_variation_params.rbi | 17 +- rbi/lib/openai/models/image_edit_params.rbi | 27 ++ .../openai/models/image_generate_params.rbi | 44 ++++ rbi/lib/openai/models/images_response.rbi | 30 ++- rbi/lib/openai/models/model.rbi | 12 +- rbi/lib/openai/models/moderation.rbi | 68 ++++- .../models/moderation_create_params.rbi | 13 +- .../models/moderation_create_response.rbi | 10 +- .../models/moderation_image_url_input.rbi | 14 +- .../openai/models/moderation_text_input.rbi | 8 +- .../other_file_chunking_strategy_object.rbi | 6 +- rbi/lib/openai/models/reasoning.rbi | 21 +- .../models/response_format_json_object.rbi | 6 +- .../models/response_format_json_schema.rbi | 27 +- .../openai/models/response_format_text.rbi | 6 +- .../openai/models/responses/computer_tool.rbi | 12 +- .../models/responses/easy_input_message.rbi | 12 +- .../models/responses/file_search_tool.rbi | 20 +- .../openai/models/responses/function_tool.rbi | 15 +- .../responses/input_item_list_params.rbi | 20 +- rbi/lib/openai/models/responses/response.rbi | 108 +++++++- .../responses/response_audio_delta_event.rbi | 8 +- .../responses/response_audio_done_event.rbi | 6 +- .../response_audio_transcript_delta_event.rbi | 8 +- .../response_audio_transcript_done_event.rbi | 6 +- ...code_interpreter_call_code_delta_event.rbi | 10 +- ..._code_interpreter_call_code_done_event.rbi | 10 +- ..._code_interpreter_call_completed_event.rbi | 7 +- ...ode_interpreter_call_in_progress_event.rbi | 7 +- ...de_interpreter_call_interpreting_event.rbi | 7 +- .../response_code_interpreter_tool_call.rbi | 37 ++- .../responses/response_completed_event.rbi | 8 +- .../responses/response_computer_tool_call.rbi | 136 ++++++++-- ...esponse_computer_tool_call_output_item.rbi | 22 +- ...e_computer_tool_call_output_screenshot.rbi | 11 +- .../response_content_part_added_event.rbi | 15 +- .../response_content_part_done_event.rbi | 14 +- .../responses/response_create_params.rbi | 104 ++++++++ .../responses/response_created_event.rbi | 8 +- .../models/responses/response_error.rbi | 8 +- .../models/responses/response_error_event.rbi | 12 +- .../responses/response_failed_event.rbi | 8 +- ...ponse_file_search_call_completed_event.rbi | 10 +- ...nse_file_search_call_in_progress_event.rbi | 10 +- ...ponse_file_search_call_searching_event.rbi | 10 +- .../response_file_search_tool_call.rbi | 33 ++- ...esponse_format_text_json_schema_config.rbi | 21 +- ...se_function_call_arguments_delta_event.rbi | 12 +- ...nse_function_call_arguments_done_event.rbi | 11 +- .../responses/response_function_tool_call.rbi | 17 +- .../response_function_tool_call_item.rbi | 6 +- ...esponse_function_tool_call_output_item.rbi | 15 +- .../response_function_web_search.rbi | 10 +- .../responses/response_in_progress_event.rbi | 8 +- .../responses/response_incomplete_event.rbi | 8 +- .../models/responses/response_input_audio.rbi | 10 +- .../models/responses/response_input_file.rbi | 12 +- .../models/responses/response_input_image.rbi | 14 +- .../models/responses/response_input_item.rbi | 60 ++++- .../responses/response_input_message_item.rbi | 16 +- .../models/responses/response_input_text.rbi | 8 +- .../models/responses/response_item_list.rbi | 14 +- .../responses/response_output_audio.rbi | 10 +- .../response_output_item_added_event.rbi | 10 +- .../response_output_item_done_event.rbi | 10 +- .../responses/response_output_message.rbi | 15 +- .../responses/response_output_refusal.rbi | 8 +- .../models/responses/response_output_text.rbi | 44 +++- .../responses/response_reasoning_item.rbi | 21 +- ...nse_reasoning_summary_part_added_event.rbi | 17 +- ...onse_reasoning_summary_part_done_event.rbi | 17 +- ...nse_reasoning_summary_text_delta_event.rbi | 9 +- ...onse_reasoning_summary_text_done_event.rbi | 9 +- .../response_refusal_delta_event.rbi | 14 +- .../responses/response_refusal_done_event.rbi | 14 +- .../responses/response_retrieve_params.rbi | 8 +- .../response_text_annotation_delta_event.rbi | 40 ++- .../models/responses/response_text_config.rbi | 18 +- .../responses/response_text_delta_event.rbi | 15 +- .../responses/response_text_done_event.rbi | 14 +- .../models/responses/response_usage.rbi | 22 +- ...sponse_web_search_call_completed_event.rbi | 10 +- ...onse_web_search_call_in_progress_event.rbi | 10 +- ...sponse_web_search_call_searching_event.rbi | 10 +- .../models/responses/tool_choice_function.rbi | 8 +- .../models/responses/tool_choice_types.rbi | 13 +- .../models/responses/web_search_tool.rbi | 29 ++- .../models/static_file_chunking_strategy.rbi | 11 +- .../static_file_chunking_strategy_object.rbi | 7 +- ...ic_file_chunking_strategy_object_param.rbi | 7 +- rbi/lib/openai/models/upload.rbi | 15 +- .../openai/models/upload_complete_params.rbi | 10 +- .../openai/models/upload_create_params.rbi | 19 +- .../models/uploads/part_create_params.rbi | 7 +- rbi/lib/openai/models/uploads/upload_part.rbi | 12 +- rbi/lib/openai/models/vector_store.rbi | 40 ++- .../models/vector_store_create_params.rbi | 26 +- .../models/vector_store_list_params.rbi | 21 +- .../models/vector_store_search_params.rbi | 10 +- .../models/vector_store_search_response.rbi | 26 +- .../models/vector_store_update_params.rbi | 25 +- .../file_batch_create_params.rbi | 18 +- .../file_batch_list_files_params.rbi | 17 +- .../vector_stores/file_content_response.rbi | 8 +- .../vector_stores/file_create_params.rbi | 18 +- .../models/vector_stores/file_list_params.rbi | 23 +- .../vector_stores/file_update_params.rbi | 12 +- .../vector_stores/vector_store_file.rbi | 28 ++- .../vector_stores/vector_store_file_batch.rbi | 28 ++- 574 files changed, 13164 insertions(+), 3670 deletions(-) diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 114d8fa3..5ab05354 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -52,12 +52,26 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel optional :speed, Float # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) - # @param input [String] - # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] - # @param instructions [String] - # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] - # @param speed [Float] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::SpeechCreateParams} for more details. + # + # @param input [String] The text to generate audio for. The maximum length is 4096 characters. + # + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # ... + # + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # ... + # + # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not + # ... + # + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav + # ... + # + # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # One of the available [TTS models](https://platform.openai.com/docs/models#tts): diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 96e65045..7f74deeb 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -19,11 +19,16 @@ class Transcription < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } # @!method initialize(text:, logprobs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::Transcription} for more details. + # # Represents a transcription response returned by model, based on the provided # input. # - # @param text [String] - # @param logprobs [Array] + # @param text [String] The transcribed text. + # + # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the + # ... class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -45,9 +50,11 @@ class Logprob < OpenAI::Internal::Type::BaseModel optional :logprob, Float # @!method initialize(token: nil, bytes: nil, logprob: nil) - # @param token [String] - # @param bytes [Array] - # @param logprob [Float] + # @param token [String] The token in the transcription. + # + # @param bytes [Array] The bytes of the token. + # + # @param logprob [Float] The log probability of the token. end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 4377a2ab..ccb71b14 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -83,14 +83,32 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity] } # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) - # @param file [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::AudioModel] - # @param include [Array] - # @param language [String] - # @param prompt [String] - # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] - # @param temperature [Float] - # @param timestamp_granularities [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. + # + # @param file [Pathname, StringIO] The audio file object (not file name) to transcribe, in one of these formats: fl + # ... + # + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # ... + # + # @param include [Array] Additional information to include in the transcription response. ... + # + # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt + # ... + # + # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment + # ... + # + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # ... + # + # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # ... + # + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. The options are `gpt-4o-transcribe`, diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 19923d2d..26c61c14 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -68,16 +68,31 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel required :tokens, OpenAI::Internal::Type::ArrayOf[Integer] # @!method initialize(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) - # @param id [Integer] - # @param avg_logprob [Float] - # @param compression_ratio [Float] - # @param end_ [Float] - # @param no_speech_prob [Float] - # @param seek [Integer] - # @param start [Float] - # @param temperature [Float] - # @param text [String] - # @param tokens [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionSegment} for more details. + # + # @param id [Integer] Unique identifier of the segment. + # + # @param avg_logprob [Float] Average logprob of the segment. If the value is lower than -1, consider the logp + # ... + # + # @param compression_ratio [Float] Compression ratio of the segment. If the value is greater than 2.4, consider the + # ... + # + # @param end_ [Float] End time of the segment in seconds. + # + # @param no_speech_prob [Float] Probability of no speech in the segment. If the value is higher than 1.0 and the + # ... + # + # @param seek [Integer] Seek offset of the segment. + # + # @param start [Float] Start time of the segment in seconds. + # + # @param temperature [Float] Temperature parameter used for generating the segment. + # + # @param text [String] Text content of the segment. + # + # @param tokens [Array] Array of token IDs for the text content. end end end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 7f9705d1..774f99c8 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -26,14 +26,20 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent} for more details. + # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. # - # @param delta [String] - # @param logprobs [Array] - # @param type [Symbol, :"transcript.text.delta"] + # @param delta [String] The text delta that was additionally transcribed. ... + # + # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription + # ... + # + # @param type [Symbol, :"transcript.text.delta"] The type of the event. Always `transcript.text.delta`. ... class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -55,9 +61,14 @@ class Logprob < OpenAI::Internal::Type::BaseModel optional :logprob, Float # @!method initialize(token: nil, bytes: nil, logprob: nil) - # @param token [String] - # @param bytes [Array] - # @param logprob [Float] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. + # + # @param token [String] The token that was used to generate the log probability. ... + # + # @param bytes [Array] The bytes that were used to generate the log probability. ... + # + # @param logprob [Float] The log probability of the token. ... end end end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index be7eb322..99dd1045 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -27,14 +27,20 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionTextDoneEvent} for more details. + # # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. # - # @param text [String] - # @param logprobs [Array] - # @param type [Symbol, :"transcript.text.done"] + # @param text [String] The text that was transcribed. ... + # + # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ + # ... + # + # @param type [Symbol, :"transcript.text.done"] The type of the event. Always `transcript.text.done`. ... class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -56,9 +62,14 @@ class Logprob < OpenAI::Internal::Type::BaseModel optional :logprob, Float # @!method initialize(token: nil, bytes: nil, logprob: nil) - # @param token [String] - # @param bytes [Array] - # @param logprob [Float] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob} for more details. + # + # @param token [String] The token that was used to generate the log probability. ... + # + # @param bytes [Array] The bytes that were used to generate the log probability. ... + # + # @param logprob [Float] The log probability of the token. ... end end end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index ae9e3c77..361a380c 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -38,11 +38,15 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # Represents a verbose json transcription response returned by model, based on the # provided input. # - # @param duration [Float] - # @param language [String] - # @param text [String] - # @param segments [Array] - # @param words [Array] + # @param duration [Float] The duration of the input audio. + # + # @param language [String] The language of the input audio. + # + # @param text [String] The transcribed text. + # + # @param segments [Array] Segments of the transcribed text and their corresponding details. + # + # @param words [Array] Extracted words and their corresponding timestamps. end end end diff --git a/lib/openai/models/audio/transcription_word.rb b/lib/openai/models/audio/transcription_word.rb index f7f973cd..b9e5da59 100644 --- a/lib/openai/models/audio/transcription_word.rb +++ b/lib/openai/models/audio/transcription_word.rb @@ -23,9 +23,11 @@ class TranscriptionWord < OpenAI::Internal::Type::BaseModel required :word, String # @!method initialize(end_:, start:, word:) - # @param end_ [Float] - # @param start [Float] - # @param word [String] + # @param end_ [Float] End time of the word in seconds. + # + # @param start [Float] Start time of the word in seconds. + # + # @param word [String] The text content of the word. end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index ce70c85f..a933aaa9 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -49,11 +49,24 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel optional :temperature, Float # @!method initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) - # @param file [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::AudioModel] - # @param prompt [String] - # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] - # @param temperature [Float] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranslationCreateParams} for more details. + # + # @param file [Pathname, StringIO] The audio file object (not file name) translate, in one of these formats: flac, + # ... + # + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh + # ... + # + # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment + # ... + # + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # ... + # + # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. Only `whisper-1` (which is powered by our open source diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index c5c9c54c..8da3a73b 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -29,10 +29,13 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } # @!method initialize(duration:, language:, text:, segments: nil) - # @param duration [Float] - # @param language [String] - # @param text [String] - # @param segments [Array] + # @param duration [Float] The duration of the input audio. + # + # @param language [String] The language of the output translation (always `english`). + # + # @param text [String] The translated text. + # + # @param segments [Array] Segments of the translated text and their corresponding details. end end end diff --git a/lib/openai/models/auto_file_chunking_strategy_param.rb b/lib/openai/models/auto_file_chunking_strategy_param.rb index 9065ad1c..0bb7685f 100644 --- a/lib/openai/models/auto_file_chunking_strategy_param.rb +++ b/lib/openai/models/auto_file_chunking_strategy_param.rb @@ -13,7 +13,7 @@ class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. # - # @param type [Symbol, :auto] + # @param type [Symbol, :auto] Always `auto`. end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index d8a84818..b2ceaa9b 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -128,26 +128,48 @@ class Batch < OpenAI::Internal::Type::BaseModel optional :request_counts, -> { OpenAI::Models::BatchRequestCounts } # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) + # Some parameter documentations has been truncated, see {OpenAI::Models::Batch} + # for more details. + # # @param id [String] - # @param completion_window [String] - # @param created_at [Integer] - # @param endpoint [String] - # @param input_file_id [String] - # @param status [Symbol, OpenAI::Models::Batch::Status] - # @param cancelled_at [Integer] - # @param cancelling_at [Integer] - # @param completed_at [Integer] - # @param error_file_id [String] + # + # @param completion_window [String] The time frame within which the batch should be processed. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the batch was created. + # + # @param endpoint [String] The OpenAI API endpoint used by the batch. + # + # @param input_file_id [String] The ID of the input file for the batch. + # + # @param status [Symbol, OpenAI::Models::Batch::Status] The current status of the batch. + # + # @param cancelled_at [Integer] The Unix timestamp (in seconds) for when the batch was cancelled. + # + # @param cancelling_at [Integer] The Unix timestamp (in seconds) for when the batch started cancelling. + # + # @param completed_at [Integer] The Unix timestamp (in seconds) for when the batch was completed. + # + # @param error_file_id [String] The ID of the file containing the outputs of requests with errors. + # # @param errors [OpenAI::Models::Batch::Errors] - # @param expired_at [Integer] - # @param expires_at [Integer] - # @param failed_at [Integer] - # @param finalizing_at [Integer] - # @param in_progress_at [Integer] - # @param metadata [Hash{Symbol=>String}, nil] - # @param output_file_id [String] - # @param request_counts [OpenAI::Models::BatchRequestCounts] - # @param object [Symbol, :batch] + # + # @param expired_at [Integer] The Unix timestamp (in seconds) for when the batch expired. + # + # @param expires_at [Integer] The Unix timestamp (in seconds) for when the batch will expire. + # + # @param failed_at [Integer] The Unix timestamp (in seconds) for when the batch failed. + # + # @param finalizing_at [Integer] The Unix timestamp (in seconds) for when the batch started finalizing. + # + # @param in_progress_at [Integer] The Unix timestamp (in seconds) for when the batch started processing. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param output_file_id [String] The ID of the file containing the outputs of successfully executed requests. + # + # @param request_counts [OpenAI::Models::BatchRequestCounts] The request counts for different statuses within the batch. + # + # @param object [Symbol, :batch] The object type, which is always `batch`. # The current status of the batch. # @@ -183,7 +205,8 @@ class Errors < OpenAI::Internal::Type::BaseModel # @!method initialize(data: nil, object: nil) # @param data [Array] - # @param object [String] + # + # @param object [String] The object type, which is always `list`. end end end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 9c5654db..e72da75d 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -49,10 +49,19 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) - # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] - # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] - # @param input_file_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::BatchCreateParams} for more details. + # + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` + # ... + # + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` + # ... + # + # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The time frame within which the batch should be processed. Currently only `24h` diff --git a/lib/openai/models/batch_error.rb b/lib/openai/models/batch_error.rb index 9d629686..538e1183 100644 --- a/lib/openai/models/batch_error.rb +++ b/lib/openai/models/batch_error.rb @@ -28,10 +28,13 @@ class BatchError < OpenAI::Internal::Type::BaseModel optional :param, String, nil?: true # @!method initialize(code: nil, line: nil, message: nil, param: nil) - # @param code [String] - # @param line [Integer, nil] - # @param message [String] - # @param param [String, nil] + # @param code [String] An error code identifying the error type. + # + # @param line [Integer, nil] The line number of the input file where the error occurred, if applicable. + # + # @param message [String] A human-readable message providing more details about the error. + # + # @param param [String, nil] The name of the parameter that caused the error, if applicable. end end end diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 388dc273..f4b386d2 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -24,8 +24,15 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel optional :limit, Integer # @!method initialize(after: nil, limit: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::BatchListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/batch_request_counts.rb b/lib/openai/models/batch_request_counts.rb index dce46757..e8e7caf0 100644 --- a/lib/openai/models/batch_request_counts.rb +++ b/lib/openai/models/batch_request_counts.rb @@ -24,9 +24,11 @@ class BatchRequestCounts < OpenAI::Internal::Type::BaseModel # @!method initialize(completed:, failed:, total:) # The request counts for different statuses within the batch. # - # @param completed [Integer] - # @param failed [Integer] - # @param total [Integer] + # @param completed [Integer] Number of requests that have been completed successfully. + # + # @param failed [Integer] Number of requests that have failed. + # + # @param total [Integer] Total number of requests in the batch. end end end diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index a4069e36..1923fa0e 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -124,21 +124,43 @@ class Assistant < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(id:, created_at:, description:, instructions:, metadata:, model:, name:, tools:, response_format: nil, temperature: nil, tool_resources: nil, top_p: nil, object: :assistant) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Assistant} for more details. + # # Represents an `assistant` that can call the model and use tools. # - # @param id [String] - # @param created_at [Integer] - # @param description [String, nil] - # @param instructions [String, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param name [String, nil] - # @param tools [Array] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] - # @param top_p [Float, nil] - # @param object [Symbol, :assistant] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the assistant was created. + # + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # + # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param object [Symbol, :assistant] The object type, which is always `assistant`. # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel @@ -172,7 +194,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter} for more + # details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search @@ -187,7 +214,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(vector_store_ids: nil) - # @param vector_store_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Assistant::ToolResources::FileSearch} for more details. + # + # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect + # ... end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 089e0fe9..3870f034 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -120,17 +120,38 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) - # @param model [String, Symbol, OpenAI::Models::ChatModel] - # @param description [String, nil] - # @param instructions [String, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] - # @param tools [Array] - # @param top_p [Float, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantCreateParams} for more details. + # + # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # + # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the @@ -182,7 +203,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} + # for more details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search @@ -207,8 +233,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } # @!method initialize(vector_store_ids: nil, vector_stores: nil) - # @param vector_store_ids [Array] - # @param vector_stores [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch} for + # more details. + # + # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ + # ... + # + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # ... class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy @@ -239,9 +272,17 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) - # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # @param file_ids [Array] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} + # for more details. + # + # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -270,7 +311,7 @@ class Auto < OpenAI::Internal::Type::BaseModel # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. # - # @param type [Symbol, :auto] + # @param type [Symbol, :auto] Always `auto`. end class Static < OpenAI::Internal::Type::BaseModel @@ -288,7 +329,8 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # @param type [Symbol, :static] + # + # @param type [Symbol, :static] Always `static`. # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -308,8 +350,15 @@ class Static < OpenAI::Internal::Type::BaseModel required :max_chunk_size_tokens, Integer # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) - # @param chunk_overlap_tokens [Integer] - # @param max_chunk_size_tokens [Integer] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # for more details. + # + # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. + # ... + # + # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini + # ... end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 5d3b268b..a3c95fe0 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -41,10 +41,21 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Beta::AssistantListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index eaa92aba..b048ee92 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -129,12 +129,18 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel optional :enabled, OpenAI::Internal::Type::Boolean # @!method initialize(data:, enabled: nil, event: :"thread.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated} for more details. + # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Models::Beta::Thread] - # @param enabled [Boolean] + # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap + # ... + # + # @param enabled [Boolean] Whether to enable input audio transcription. + # # @param event [Symbol, :"thread.created"] end @@ -152,10 +158,15 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.created" # @!method initialize(data:, event: :"thread.run.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. + # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.created"] end @@ -173,10 +184,15 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.queued" # @!method initialize(data:, event: :"thread.run.queued") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.queued"] end @@ -194,10 +210,16 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.in_progress" # @!method initialize(data:, event: :"thread.run.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.in_progress"] end @@ -215,10 +237,16 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.requires_action" # @!method initialize(data:, event: :"thread.run.requires_action") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.requires_action"] end @@ -236,10 +264,16 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.completed" # @!method initialize(data:, event: :"thread.run.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.completed"] end @@ -257,10 +291,16 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.incomplete" # @!method initialize(data:, event: :"thread.run.incomplete") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.incomplete"] end @@ -278,10 +318,15 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.failed" # @!method initialize(data:, event: :"thread.run.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.failed"] end @@ -299,10 +344,16 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.cancelling" # @!method initialize(data:, event: :"thread.run.cancelling") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.cancelling"] end @@ -320,10 +371,16 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.cancelled" # @!method initialize(data:, event: :"thread.run.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.cancelled"] end @@ -341,10 +398,15 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.expired" # @!method initialize(data:, event: :"thread.run.expired") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.expired"] end @@ -361,11 +423,16 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.created" # @!method initialize(data:, event: :"thread.run.step.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.created"] end @@ -382,11 +449,16 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.in_progress" # @!method initialize(data:, event: :"thread.run.step.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -404,11 +476,17 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.delta" # @!method initialize(data:, event: :"thread.run.step.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more + # details. + # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami + # ... + # # @param event [Symbol, :"thread.run.step.delta"] end @@ -425,11 +503,16 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.completed" # @!method initialize(data:, event: :"thread.run.step.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.completed"] end @@ -446,11 +529,16 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.failed" # @!method initialize(data:, event: :"thread.run.step.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.failed"] end @@ -467,11 +555,16 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.cancelled" # @!method initialize(data:, event: :"thread.run.step.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -488,11 +581,16 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.expired" # @!method initialize(data:, event: :"thread.run.step.expired") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.expired"] end @@ -510,11 +608,17 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.created" # @!method initialize(data:, event: :"thread.message.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.created"] end @@ -532,11 +636,17 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.in_progress" # @!method initialize(data:, event: :"thread.message.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.in_progress"] end @@ -554,11 +664,17 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.delta" # @!method initialize(data:, event: :"thread.message.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta} for more + # details. + # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming + # ... + # # @param event [Symbol, :"thread.message.delta"] end @@ -576,11 +692,17 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.completed" # @!method initialize(data:, event: :"thread.message.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.completed"] end @@ -598,11 +720,17 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.incomplete" # @!method initialize(data:, event: :"thread.message.incomplete") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.incomplete"] end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 43914c38..43582de3 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -19,7 +19,8 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # Specifies a tool the model should use. Use to force the model to call a specific # tool. # - # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] + # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set + # # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] # The type of the tool. If type is `function`, the function name must be set diff --git a/lib/openai/models/beta/assistant_tool_choice_function.rb b/lib/openai/models/beta/assistant_tool_choice_function.rb index 87065a84..8440fb98 100644 --- a/lib/openai/models/beta/assistant_tool_choice_function.rb +++ b/lib/openai/models/beta/assistant_tool_choice_function.rb @@ -11,7 +11,7 @@ class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel required :name, String # @!method initialize(name:) - # @param name [String] + # @param name [String] The name of the function to call. end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index c0a93261..d507c550 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -120,17 +120,38 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(description: nil, instructions: nil, metadata: nil, model: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) - # @param description [String, nil] - # @param instructions [String, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] - # @param name [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] - # @param tools [Array] - # @param top_p [Float, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantUpdateParams} for more details. + # + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # + # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the @@ -293,7 +314,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} + # for more details. + # + # @param file_ids [Array] Overrides the list of [file](https://platform.openai.com/docs/api-reference/file + # ... end # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search @@ -308,7 +334,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(vector_store_ids: nil) - # @param vector_store_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch} for + # more details. + # + # @param vector_store_ids [Array] Overrides the [vector store](https://platform.openai.com/docs/api-reference/vect + # ... end end end diff --git a/lib/openai/models/beta/code_interpreter_tool.rb b/lib/openai/models/beta/code_interpreter_tool.rb index ffea0d31..ee84099e 100644 --- a/lib/openai/models/beta/code_interpreter_tool.rb +++ b/lib/openai/models/beta/code_interpreter_tool.rb @@ -11,7 +11,7 @@ class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel required :type, const: :code_interpreter # @!method initialize(type: :code_interpreter) - # @param type [Symbol, :code_interpreter] + # @param type [Symbol, :code_interpreter] The type of tool being defined: `code_interpreter` end end end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 9dc13172..2dbd7d7a 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -17,8 +17,9 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel optional :file_search, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch } # @!method initialize(file_search: nil, type: :file_search) - # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] - # @param type [Symbol, :file_search] + # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. + # + # @param type [Symbol, :file_search] The type of tool being defined: `file_search` # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel @@ -47,10 +48,16 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :ranking_options, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions } # @!method initialize(max_num_results: nil, ranking_options: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::FileSearchTool::FileSearch} for more details. + # # Overrides for the file search tool. # - # @param max_num_results [Integer] - # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] + # @param max_num_results [Integer] The maximum number of results the file search tool should output. The default is + # ... + # + # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool + # ... # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -69,6 +76,10 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel optional :ranker, enum: -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } # @!method initialize(score_threshold:, ranker: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions} for more + # details. + # # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. # @@ -76,8 +87,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @param score_threshold [Float] - # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] + # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num + # ... + # + # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank + # ... # The ranker to use for the file search. If not specified will use the `auto` # ranker. diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index bce8c29a..4a76a200 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -17,7 +17,8 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @!method initialize(function:, type: :function) # @param function [OpenAI::Models::FunctionDefinition] - # @param type [Symbol, :function] + # + # @param type [Symbol, :function] The type of tool being defined: `function` end end end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 74bb507a..77a99a07 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -43,11 +43,17 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.created" # @!method initialize(data:, event: :"thread.message.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.created"] end @@ -65,11 +71,17 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.in_progress" # @!method initialize(data:, event: :"thread.message.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.in_progress"] end @@ -87,11 +99,16 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.delta" # @!method initialize(data:, event: :"thread.message.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. + # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming + # ... + # # @param event [Symbol, :"thread.message.delta"] end @@ -109,11 +126,17 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.completed" # @!method initialize(data:, event: :"thread.message.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.completed"] end @@ -131,11 +154,17 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.message.incomplete" # @!method initialize(data:, event: :"thread.message.incomplete") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more + # details. + # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # ... + # # @param event [Symbol, :"thread.message.incomplete"] end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index e312bf45..f876f01e 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -48,11 +48,16 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.created" # @!method initialize(data:, event: :"thread.run.step.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.created"] end @@ -69,11 +74,16 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.in_progress" # @!method initialize(data:, event: :"thread.run.step.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -91,11 +101,16 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.delta" # @!method initialize(data:, event: :"thread.run.step.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. + # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami + # ... + # # @param event [Symbol, :"thread.run.step.delta"] end @@ -112,11 +127,16 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.completed" # @!method initialize(data:, event: :"thread.run.step.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.completed"] end @@ -133,11 +153,16 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.failed" # @!method initialize(data:, event: :"thread.run.step.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.failed"] end @@ -154,11 +179,16 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.cancelled" # @!method initialize(data:, event: :"thread.run.step.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -175,11 +205,16 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.step.expired" # @!method initialize(data:, event: :"thread.run.step.expired") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more + # details. + # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # # @param event [Symbol, :"thread.run.step.expired"] end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 33a63272..19bda475 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -55,10 +55,15 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.created" # @!method initialize(data:, event: :"thread.run.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated} for more details. + # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.created"] end @@ -76,10 +81,15 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.queued" # @!method initialize(data:, event: :"thread.run.queued") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.queued"] end @@ -97,10 +107,15 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.in_progress" # @!method initialize(data:, event: :"thread.run.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.in_progress"] end @@ -118,10 +133,16 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.requires_action" # @!method initialize(data:, event: :"thread.run.requires_action") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction} for more + # details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.requires_action"] end @@ -139,10 +160,15 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.completed" # @!method initialize(data:, event: :"thread.run.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.completed"] end @@ -160,10 +186,15 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.incomplete" # @!method initialize(data:, event: :"thread.run.incomplete") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.incomplete"] end @@ -181,10 +212,15 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.failed" # @!method initialize(data:, event: :"thread.run.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.failed"] end @@ -202,10 +238,15 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.cancelling" # @!method initialize(data:, event: :"thread.run.cancelling") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.cancelling"] end @@ -223,10 +264,15 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.cancelled" # @!method initialize(data:, event: :"thread.run.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.cancelled"] end @@ -244,10 +290,15 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel required :event, const: :"thread.run.expired" # @!method initialize(data:, event: :"thread.run.expired") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired} for more details. + # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Run] + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # ... + # # @param event [Symbol, :"thread.run.expired"] end diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 757ea5d5..cad2cd8d 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -44,14 +44,22 @@ class Thread < OpenAI::Internal::Type::BaseModel required :tool_resources, -> { OpenAI::Models::Beta::Thread::ToolResources }, nil?: true # @!method initialize(id:, created_at:, metadata:, tool_resources:, object: :thread) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Thread} for more details. + # # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @param id [String] - # @param created_at [Integer] - # @param metadata [Hash{Symbol=>String}, nil] - # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] - # @param object [Symbol, :thread] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the thread was created. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # ... + # + # @param object [Symbol, :thread] The object type, which is always `thread`. # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel @@ -85,7 +93,11 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter} for more details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::Thread::ToolResources#file_search @@ -100,7 +112,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(vector_store_ids: nil) - # @param vector_store_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Thread::ToolResources::FileSearch} for more details. + # + # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ + # ... end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 979eb39f..0cd74409 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -163,21 +163,51 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel nil?: true # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) - # @param assistant_id [String] - # @param instructions [String, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # @param parallel_tool_calls [Boolean] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] - # @param tools [Array, nil] - # @param top_p [Float, nil] - # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams} for more details. + # + # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista + # ... + # + # @param instructions [String, nil] Override the default system message of the assistant. This is useful for modifyi + # ... + # + # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the + # ... + # + # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to @@ -228,12 +258,19 @@ class Thread < OpenAI::Internal::Type::BaseModel nil?: true # @!method initialize(messages: nil, metadata: nil, tool_resources: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread} for more details. + # # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @param messages [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # ... class Message < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -273,10 +310,17 @@ class Message < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(content:, role:, attachments: nil, metadata: nil) - # @param content [String, Array] - # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] - # @param attachments [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message} for more + # details. + # + # @param content [String, Array] The text contents of the message. + # + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: ... + # + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... # The text contents of the message. # @@ -330,8 +374,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) - # @param file_id [String] - # @param tools [Array] + # @param file_id [String] The ID of the file to attach to the message. + # + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -351,7 +396,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel required :type, const: :file_search # @!method initialize(type: :file_search) - # @param type [Symbol, :file_search] + # @param type [Symbol, :file_search] The type of tool being defined: `file_search` end # @!method self.variants @@ -394,7 +439,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} + # for more details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search @@ -419,8 +469,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } # @!method initialize(vector_store_ids: nil, vector_stores: nil) - # @param vector_store_ids [Array] - # @param vector_stores [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} + # for more details. + # + # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ + # ... + # + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # ... class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy @@ -451,9 +508,17 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) - # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # @param file_ids [Array] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} + # for more details. + # + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -482,7 +547,7 @@ class Auto < OpenAI::Internal::Type::BaseModel # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. # - # @param type [Symbol, :auto] + # @param type [Symbol, :auto] Always `auto`. end class Static < OpenAI::Internal::Type::BaseModel @@ -500,7 +565,8 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # @param type [Symbol, :static] + # + # @param type [Symbol, :static] Always `static`. # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -520,8 +586,15 @@ class Static < OpenAI::Internal::Type::BaseModel required :max_chunk_size_tokens, Integer # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) - # @param chunk_overlap_tokens [Integer] - # @param max_chunk_size_tokens [Integer] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # for more details. + # + # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. + # ... + # + # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini + # ... end end @@ -565,7 +638,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} + # for more details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search @@ -580,7 +658,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(vector_store_ids: nil) - # @param vector_store_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for + # more details. + # + # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect + # ... end end @@ -602,11 +685,18 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel optional :last_messages, Integer, nil?: true # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more + # details. + # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] - # @param last_messages [Integer, nil] + # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # ... + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + # ... # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 2d768eaf..3cb1d240 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -37,9 +37,17 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources }, nil?: true # @!method initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) - # @param messages [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams} for more details. + # + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class Message < OpenAI::Internal::Type::BaseModel @@ -80,10 +88,16 @@ class Message < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(content:, role:, attachments: nil, metadata: nil) - # @param content [String, Array] - # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] - # @param attachments [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams::Message} for more details. + # + # @param content [String, Array] The text contents of the message. + # + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: ... + # + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... # The text contents of the message. # @@ -137,8 +151,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) - # @param file_id [String] - # @param tools [Array] + # @param file_id [String] The ID of the file to attach to the message. + # + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -158,7 +173,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel required :type, const: :file_search # @!method initialize(type: :file_search) - # @param type [Symbol, :file_search] + # @param type [Symbol, :file_search] The type of tool being defined: `file_search` end # @!method self.variants @@ -199,7 +214,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for + # more details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search @@ -224,8 +244,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } # @!method initialize(vector_store_ids: nil, vector_stores: nil) - # @param vector_store_ids [Array] - # @param vector_stores [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch} for more + # details. + # + # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ + # ... + # + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # ... class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy @@ -256,9 +283,17 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) - # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # @param file_ids [Array] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} + # for more details. + # + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -287,7 +322,7 @@ class Auto < OpenAI::Internal::Type::BaseModel # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. # - # @param type [Symbol, :auto] + # @param type [Symbol, :auto] Always `auto`. end class Static < OpenAI::Internal::Type::BaseModel @@ -305,7 +340,8 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # @param type [Symbol, :static] + # + # @param type [Symbol, :static] Always `static`. # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -325,8 +361,15 @@ class Static < OpenAI::Internal::Type::BaseModel required :max_chunk_size_tokens, Integer # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) - # @param chunk_overlap_tokens [Integer] - # @param max_chunk_size_tokens [Integer] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # for more details. + # + # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. + # ... + # + # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini + # ... end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index bf5cc945..d3279538 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -23,12 +23,18 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel optional :enabled, OpenAI::Internal::Type::Boolean # @!method initialize(data:, enabled: nil, event: :"thread.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadStreamEvent} for more details. + # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Models::Beta::Thread] - # @param enabled [Boolean] + # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap + # ... + # + # @param enabled [Boolean] Whether to enable input audio transcription. + # # @param event [Symbol, :"thread.created"] end end diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 8d7d621e..a96d0bc8 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -29,8 +29,14 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel optional :tool_resources, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources }, nil?: true # @!method initialize(metadata: nil, tool_resources: nil, request_options: {}) - # @param metadata [Hash{Symbol=>String}, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadUpdateParams} for more details. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel @@ -65,7 +71,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(file_ids: nil) - # @param file_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for + # more details. + # + # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # ... end # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search @@ -80,7 +91,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(vector_store_ids: nil) - # @param vector_store_ids [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more + # details. + # + # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ + # ... end end end diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index 77b9e19d..4ccc33d1 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -38,10 +38,14 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # uses the "file_search" tool to search files. # # @param end_index [Integer] + # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] + # # @param start_index [Integer] - # @param text [String] - # @param type [Symbol, :file_citation] + # + # @param text [String] The text in the message content that needs to be replaced. + # + # @param type [Symbol, :file_citation] Always `file_citation`. # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel @@ -52,7 +56,7 @@ class FileCitation < OpenAI::Internal::Type::BaseModel required :file_id, String # @!method initialize(file_id:) - # @param file_id [String] + # @param file_id [String] The ID of the specific File the citation is from. end end end diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 3d825a51..7a676989 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -43,12 +43,17 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # File associated with the assistant or the message. Generated when the assistant # uses the "file_search" tool to search files. # - # @param index [Integer] + # @param index [Integer] The index of the annotation in the text content part. + # # @param end_index [Integer] + # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] + # # @param start_index [Integer] - # @param text [String] - # @param type [Symbol, :file_citation] + # + # @param text [String] The text in the message content that needs to be replaced. + # + # @param type [Symbol, :file_citation] Always `file_citation`. # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel @@ -65,8 +70,9 @@ class FileCitation < OpenAI::Internal::Type::BaseModel optional :quote, String # @!method initialize(file_id: nil, quote: nil) - # @param file_id [String] - # @param quote [String] + # @param file_id [String] The ID of the specific File the citation is from. + # + # @param quote [String] The specific quote in the file. end end end diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index 91c2a6fd..516a7249 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -37,10 +37,14 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool to generate a file. # # @param end_index [Integer] + # # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] + # # @param start_index [Integer] - # @param text [String] - # @param type [Symbol, :file_path] + # + # @param text [String] The text in the message content that needs to be replaced. + # + # @param type [Symbol, :file_path] Always `file_path`. # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel @@ -51,7 +55,7 @@ class FilePath < OpenAI::Internal::Type::BaseModel required :file_id, String # @!method initialize(file_id:) - # @param file_id [String] + # @param file_id [String] The ID of the file that was generated. end end end diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 679015ba..b67e6401 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -42,12 +42,17 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # A URL for the file that's generated when the assistant used the # `code_interpreter` tool to generate a file. # - # @param index [Integer] + # @param index [Integer] The index of the annotation in the text content part. + # # @param end_index [Integer] + # # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] + # # @param start_index [Integer] - # @param text [String] - # @param type [Symbol, :file_path] + # + # @param text [String] The text in the message content that needs to be replaced. + # + # @param type [Symbol, :file_path] Always `file_path`. # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel @@ -58,7 +63,7 @@ class FilePath < OpenAI::Internal::Type::BaseModel optional :file_id, String # @!method initialize(file_id: nil) - # @param file_id [String] + # @param file_id [String] The ID of the file that was generated. end end end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index df480221..f99387f6 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -21,8 +21,14 @@ class ImageFile < OpenAI::Internal::Type::BaseModel optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFile::Detail } # @!method initialize(file_id:, detail: nil) - # @param file_id [String] - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::ImageFile} for more details. + # + # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + # ... + # + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few + # ... # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 2bcba265..99aeca4e 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -21,7 +21,8 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # in the content of a message. # # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] - # @param type [Symbol, :image_file] + # + # @param type [Symbol, :image_file] Always `image_file`. end end end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 25fc81e0..69c0f595 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -21,8 +21,14 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel optional :file_id, String # @!method initialize(detail: nil, file_id: nil) - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] - # @param file_id [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::ImageFileDelta} for more details. + # + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few + # ... + # + # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + # ... # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 3befaf8c..8831e0c3 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -26,9 +26,11 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. # - # @param index [Integer] + # @param index [Integer] The index of the content part in the message. + # # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] - # @param type [Symbol, :image_file] + # + # @param type [Symbol, :image_file] Always `image_file`. end end end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 1b88b1b2..02ed1378 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -20,8 +20,14 @@ class ImageURL < OpenAI::Internal::Type::BaseModel optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURL::Detail } # @!method initialize(url:, detail: nil) - # @param url [String] - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::ImageURL} for more details. + # + # @param url [String] The external URL of the image, must be a supported image types: jpeg, jpg, png, + # ... + # + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # ... # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index b44975eb..f9b5edc9 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -20,7 +20,8 @@ class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # References an image URL in the content of a message. # # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] - # @param type [Symbol, :image_url] + # + # @param type [Symbol, :image_url] The type of the content part. end end end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 9ba548e3..1f1d98ef 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -20,8 +20,14 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel optional :url, String # @!method initialize(detail: nil, url: nil) - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] - # @param url [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::ImageURLDelta} for more details. + # + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # ... + # + # @param url [String] The URL of the image, must be a supported image types: jpeg, jpg, png, gif, webp + # ... # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index 8b140bfb..f2a01238 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -25,9 +25,11 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(index:, image_url: nil, type: :image_url) # References an image URL in the content of a message. # - # @param index [Integer] + # @param index [Integer] The index of the content part in the message. + # # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] - # @param type [Symbol, :image_url] + # + # @param type [Symbol, :image_url] Always `image_url`. end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index ccff15ee..b98b3ca0 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -105,23 +105,43 @@ class Message < OpenAI::Internal::Type::BaseModel required :thread_id, String # @!method initialize(id:, assistant_id:, attachments:, completed_at:, content:, created_at:, incomplete_at:, incomplete_details:, metadata:, role:, run_id:, status:, thread_id:, object: :"thread.message") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Message} for more details. + # # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @param id [String] - # @param assistant_id [String, nil] - # @param attachments [Array, nil] - # @param completed_at [Integer, nil] - # @param content [Array] - # @param created_at [Integer] - # @param incomplete_at [Integer, nil] - # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] - # @param run_id [String, nil] - # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] - # @param thread_id [String] - # @param object [Symbol, :"thread.message"] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param assistant_id [String, nil] If applicable, the ID of the [assistant](https://platform.openai.com/docs/api-re + # ... + # + # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. + # + # @param completed_at [Integer, nil] The Unix timestamp (in seconds) for when the message was completed. + # + # @param content [Array] The content of the message in array of text and/or images. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the message was created. + # + # @param incomplete_at [Integer, nil] The Unix timestamp (in seconds) for when the message was marked as incomplete. + # + # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. + # + # @param run_id [String, nil] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) associa + # ... + # + # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` + # ... + # + # @param thread_id [String] The [thread](https://platform.openai.com/docs/api-reference/threads) ID that thi + # ... + # + # @param object [Symbol, :"thread.message"] The object type, which is always `thread.message`. class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -138,8 +158,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Message::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) - # @param file_id [String] - # @param tools [Array] + # @param file_id [String] The ID of the file to attach to the message. + # + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -156,7 +177,7 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel required :type, const: :file_search # @!method initialize(type: :file_search) - # @param type [Symbol, :file_search] + # @param type [Symbol, :file_search] The type of tool being defined: `file_search` end # @!method self.variants @@ -175,7 +196,7 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(reason:) # On an incomplete message, details about why the message is incomplete. # - # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. # The reason the message is incomplete. # diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 711686f8..ff6d233f 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -46,10 +46,17 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(content:, role:, attachments: nil, metadata: nil, request_options: {}) - # @param content [String, Array] - # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] - # @param attachments [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. + # + # @param content [String, Array] The text contents of the message. + # + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: ... + # + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The text contents of the message. @@ -100,8 +107,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) - # @param file_id [String] - # @param tools [Array] + # @param file_id [String] The ID of the file to attach to the message. + # + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -121,7 +129,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel required :type, const: :file_search # @!method initialize(type: :file_search) - # @param type [Symbol, :file_search] + # @param type [Symbol, :file_search] The type of tool being defined: `file_search` end # @!method self.variants diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 04f0a1fd..a845cecd 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -21,8 +21,9 @@ class MessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(content: nil, role: nil) # The delta containing the fields that have changed on the Message. # - # @param content [Array] - # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] + # @param content [Array] The content of the message in array of text and/or images. + # + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. # The entity that produced the message. One of `user` or `assistant`. # diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index e935e3bd..96c689fb 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -27,9 +27,11 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @param id [String] - # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] - # @param object [Symbol, :"thread.message.delta"] + # @param id [String] The identifier of the message, which can be referenced in API endpoints. + # + # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. + # + # @param object [Symbol, :"thread.message.delta"] The object type, which is always `thread.message.delta`. end end end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 2dbe8d80..911b6f57 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -48,11 +48,23 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel optional :run_id, String # @!method initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] - # @param run_id [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # + # @param run_id [String] Filter messages by the run ID that generated them. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index 09909fdc..ab23d244 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -26,8 +26,13 @@ class MessageUpdateParams < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(thread_id:, metadata: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageUpdateParams} for more details. + # # @param thread_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/beta/threads/refusal_content_block.rb b/lib/openai/models/beta/threads/refusal_content_block.rb index fcdce182..91eba521 100644 --- a/lib/openai/models/beta/threads/refusal_content_block.rb +++ b/lib/openai/models/beta/threads/refusal_content_block.rb @@ -20,7 +20,8 @@ class RefusalContentBlock < OpenAI::Internal::Type::BaseModel # The refusal content generated by the assistant. # # @param refusal [String] - # @param type [Symbol, :refusal] + # + # @param type [Symbol, :refusal] Always `refusal`. end end end diff --git a/lib/openai/models/beta/threads/refusal_delta_block.rb b/lib/openai/models/beta/threads/refusal_delta_block.rb index 9f1cc3a1..cdb3d1ea 100644 --- a/lib/openai/models/beta/threads/refusal_delta_block.rb +++ b/lib/openai/models/beta/threads/refusal_delta_block.rb @@ -25,9 +25,11 @@ class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(index:, refusal: nil, type: :refusal) # The refusal content that is part of a message. # - # @param index [Integer] + # @param index [Integer] The index of the refusal part in the message. + # # @param refusal [String] - # @param type [Symbol, :refusal] + # + # @param type [Symbol, :refusal] Always `refusal`. end end end diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index 7db514ed..f7a4a2b0 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -28,11 +28,19 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(id:, function:, type: :function) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall} for more + # details. + # # Tool call objects # - # @param id [String] - # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] - # @param type [Symbol, :function] + # @param id [String] The ID of the tool call. This ID must be referenced when you submit the tool out + # ... + # + # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. + # + # @param type [Symbol, :function] The type of tool call the output is required for. For now, this is always `funct + # ... # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -51,8 +59,9 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # The function definition. # - # @param arguments [String] - # @param name [String] + # @param arguments [String] The arguments that the model expects you to pass to the function. + # + # @param name [String] The name of the function. end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 7638e17a..51b736e9 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -220,36 +220,79 @@ class Run < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expires_at:, failed_at:, incomplete_details:, instructions:, last_error:, max_completion_tokens:, max_prompt_tokens:, metadata:, model:, parallel_tool_calls:, required_action:, response_format:, started_at:, status:, thread_id:, tool_choice:, tools:, truncation_strategy:, usage:, temperature: nil, top_p: nil, object: :"thread.run") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Run} for more details. + # # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @param id [String] - # @param assistant_id [String] - # @param cancelled_at [Integer, nil] - # @param completed_at [Integer, nil] - # @param created_at [Integer] - # @param expires_at [Integer, nil] - # @param failed_at [Integer, nil] - # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] - # @param instructions [String] - # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param parallel_tool_calls [Boolean] - # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param started_at [Integer, nil] - # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] - # @param thread_id [String] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tools [Array] - # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] - # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param object [Symbol, :"thread.run"] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista + # ... + # + # @param cancelled_at [Integer, nil] The Unix timestamp (in seconds) for when the run was cancelled. + # + # @param completed_at [Integer, nil] The Unix timestamp (in seconds) for when the run was completed. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the run was created. + # + # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the run will expire. + # + # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run failed. + # + # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet + # ... + # + # @param instructions [String] The instructions that the [assistant](https://platform.openai.com/docs/api-refer + # ... + # + # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. + # + # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens specified to have been used over the cou + # ... + # + # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens specified to have been used over the course + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String] The model that the [assistant](https://platform.openai.com/docs/api-reference/as + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action + # ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param started_at [Integer, nil] The Unix timestamp (in seconds) for when the run was started. + # + # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac + # ... + # + # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # + # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # ... + # + # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not + # ... + # + # @param temperature [Float, nil] The sampling temperature used for this run. If not set, defaults to 1. + # + # @param top_p [Float, nil] The nucleus sampling value used for this run. If not set, defaults to 1. + # + # @param object [Symbol, :"thread.run"] The object type, which is always `thread.run`. # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel @@ -261,10 +304,14 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel optional :reason, enum: -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason } # @!method initialize(reason: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Run::IncompleteDetails} for more details. + # # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li + # ... # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -298,8 +345,9 @@ class LastError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # The last error associated with this run. Will be `null` if there are no errors. # - # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] - # @param message [String] + # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + # + # @param message [String] A human-readable description of the error. # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # @@ -335,8 +383,9 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] - # @param type [Symbol, :submit_tool_outputs] + # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. + # + # @param type [Symbol, :submit_tool_outputs] For now, this is always `submit_tool_outputs`. # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel @@ -350,7 +399,7 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!method initialize(tool_calls:) # Details on the tool outputs needed for this run to continue. # - # @param tool_calls [Array] + # @param tool_calls [Array] A list of the relevant tool calls. end end @@ -373,11 +422,17 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel optional :last_messages, Integer, nil?: true # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Run::TruncationStrategy} for more details. + # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] - # @param last_messages [Integer, nil] + # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # ... + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + # ... # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -420,9 +475,11 @@ class Usage < OpenAI::Internal::Type::BaseModel # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). # - # @param completion_tokens [Integer] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param completion_tokens [Integer] Number of completion tokens used over the course of the run. + # + # @param prompt_tokens [Integer] Number of prompt tokens used over the course of the run. + # + # @param total_tokens [Integer] Total number of tokens used (prompt + completion). end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 6af10c28..d47e1177 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -189,23 +189,56 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel nil?: true # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) - # @param assistant_id [String] - # @param include [Array] - # @param additional_instructions [String, nil] - # @param additional_messages [Array, nil] - # @param instructions [String, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # @param parallel_tool_calls [Boolean] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tools [Array, nil] - # @param top_p [Float, nil] - # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunCreateParams} for more details. + # + # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista + # ... + # + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # ... + # + # @param additional_instructions [String, nil] Appends additional instructions at the end of the instructions for the run. This + # ... + # + # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. + # + # @param instructions [String, nil] Overrides the [instructions](https://platform.openai.com/docs/api-reference/assi + # ... + # + # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the + # ... + # + # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class AdditionalMessage < OpenAI::Internal::Type::BaseModel @@ -247,10 +280,17 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(content:, role:, attachments: nil, metadata: nil) - # @param content [String, Array] - # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] - # @param attachments [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage} for more + # details. + # + # @param content [String, Array] The text contents of the message. + # + # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: ... + # + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... # The text contents of the message. # @@ -304,8 +344,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) - # @param file_id [String] - # @param tools [Array] + # @param file_id [String] The ID of the file to attach to the message. + # + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -325,7 +366,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel required :type, const: :file_search # @!method initialize(type: :file_search) - # @param type [Symbol, :file_search] + # @param type [Symbol, :file_search] The type of tool being defined: `file_search` end # @!method self.variants @@ -368,11 +409,18 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel optional :last_messages, Integer, nil?: true # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy} for more + # details. + # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] - # @param last_messages [Integer, nil] + # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # ... + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + # ... # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 85e39197..1ef700a7 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -42,10 +42,21 @@ class RunListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Beta::Threads::RunListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 34faf0fa..3ac75a5c 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -25,7 +25,9 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!method initialize(thread_id:, tool_outputs:, request_options: {}) # @param thread_id [String] - # @param tool_outputs [Array] + # + # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolOutput < OpenAI::Internal::Type::BaseModel @@ -43,8 +45,14 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel optional :tool_call_id, String # @!method initialize(output: nil, tool_call_id: nil) - # @param output [String] - # @param tool_call_id [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more + # details. + # + # @param output [String] The output of the tool call to be submitted to continue the run. + # + # @param tool_call_id [String] The ID of the tool call in the `required_action` object within the run object th + # ... end end end diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 2d418080..6210136c 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -26,8 +26,13 @@ class RunUpdateParams < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(thread_id:, metadata: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunUpdateParams} for more details. + # # @param thread_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb index ad3f6c66..2abe8916 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb @@ -27,9 +27,11 @@ class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # @!method initialize(index:, logs: nil, type: :logs) # Text output from the Code Interpreter tool call as part of a run step. # - # @param index [Integer] - # @param logs [String] - # @param type [Symbol, :logs] + # @param index [Integer] The index of the output in the outputs array. + # + # @param logs [String] The text output from the Code Interpreter tool call. + # + # @param type [Symbol, :logs] Always `logs`. end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index 7e0c8ae1..b4458c9c 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -24,9 +24,11 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel optional :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } # @!method initialize(index:, image: nil, type: :image) - # @param index [Integer] + # @param index [Integer] The index of the output in the outputs array. + # # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] - # @param type [Symbol, :image] + # + # @param type [Symbol, :image] Always `image`. # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel @@ -38,7 +40,12 @@ class Image < OpenAI::Internal::Type::BaseModel optional :file_id, String # @!method initialize(file_id: nil) - # @param file_id [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for + # more details. + # + # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image + # ... end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 8c7683ea..88f00a99 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -27,11 +27,17 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel required :type, const: :code_interpreter # @!method initialize(id:, code_interpreter:, type: :code_interpreter) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. + # # Details of the Code Interpreter tool call the run step was involved in. # - # @param id [String] - # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] - # @param type [Symbol, :code_interpreter] + # @param id [String] The ID of the tool call. + # + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. + # + # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty + # ... # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -51,10 +57,16 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } # @!method initialize(input:, outputs:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} + # for more details. + # # The Code Interpreter tool call definition. # - # @param input [String] - # @param outputs [Array] + # @param input [String] The input to the Code Interpreter tool call. + # + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one + # ... # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -85,8 +97,9 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method initialize(logs:, type: :logs) # Text output from the Code Interpreter tool call as part of a run step. # - # @param logs [String] - # @param type [Symbol, :logs] + # @param logs [String] The text output from the Code Interpreter tool call. + # + # @param type [Symbol, :logs] Always `logs`. end class Image < OpenAI::Internal::Type::BaseModel @@ -104,7 +117,8 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(image:, type: :image) # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] - # @param type [Symbol, :image] + # + # @param type [Symbol, :image] Always `image`. # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::Internal::Type::BaseModel @@ -116,7 +130,12 @@ class Image < OpenAI::Internal::Type::BaseModel required :file_id, String # @!method initialize(file_id:) - # @param file_id [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} + # for more details. + # + # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image + # ... end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 84e66baf..204d059b 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -33,12 +33,20 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more + # details. + # # Details of the Code Interpreter tool call the run step was involved in. # - # @param index [Integer] - # @param id [String] - # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] - # @param type [Symbol, :code_interpreter] + # @param index [Integer] The index of the tool call in the tool calls array. + # + # @param id [String] The ID of the tool call. + # + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. + # + # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty + # ... # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -58,10 +66,16 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } # @!method initialize(input: nil, outputs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} + # for more details. + # # The Code Interpreter tool call definition. # - # @param input [String] - # @param outputs [Array] + # @param input [String] The input to the Code Interpreter tool call. + # + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one + # ... # Text output from the Code Interpreter tool call as part of a run step. module Output diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index f42b150c..d6149f24 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -26,9 +26,15 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel required :type, const: :file_search # @!method initialize(id:, file_search:, type: :file_search) - # @param id [String] - # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] - # @param type [Symbol, :file_search] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall} for more details. + # + # @param id [String] The ID of the tool call object. + # + # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. + # + # @param type [Symbol, :file_search] The type of tool call. This is always going to be `file_search` for this type of + # ... # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel @@ -49,8 +55,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. # - # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] - # @param results [Array] + # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. + # + # @param results [Array] The results of the file search. # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -70,10 +77,17 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel required :score_threshold, Float # @!method initialize(ranker:, score_threshold:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} + # for more details. + # # The ranking options for the file search. # - # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] - # @param score_threshold [Float] + # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank + # ... + # + # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num + # ... # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -119,12 +133,21 @@ class Result < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } # @!method initialize(file_id:, file_name:, score:, content: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} + # for more details. + # # A result instance of the file search. # - # @param file_id [String] - # @param file_name [String] - # @param score [Float] - # @param content [Array] + # @param file_id [String] The ID of the file that result was found in. + # + # @param file_name [String] The name of the file that result was found in. + # + # @param score [Float] The score of the result. All values must be a floating point number between 0 an + # ... + # + # @param content [Array] The content of the result that was found. The content is only included if reques + # ... class Content < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -141,8 +164,9 @@ class Content < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type } # @!method initialize(text: nil, type: nil) - # @param text [String] - # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] + # @param text [String] The text content of the file. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. # The type of the content. # diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index 31b50baf..de1c0704 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -32,10 +32,17 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel optional :id, String # @!method initialize(file_search:, index:, id: nil, type: :file_search) - # @param file_search [Object] - # @param index [Integer] - # @param id [String] - # @param type [Symbol, :file_search] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. + # + # @param file_search [Object] For now, this is always going to be an empty object. + # + # @param index [Integer] The index of the tool call in the tool calls array. + # + # @param id [String] The ID of the tool call object. + # + # @param type [Symbol, :file_search] The type of tool call. This is always going to be `file_search` for this type of + # ... end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index eb5ccad9..bb0655e2 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -26,9 +26,15 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(id:, function:, type: :function) - # @param id [String] - # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] - # @param type [Symbol, :function] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall} for more details. + # + # @param id [String] The ID of the tool call object. + # + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. + # + # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to + # ... # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -53,11 +59,18 @@ class Function < OpenAI::Internal::Type::BaseModel required :output, String, nil?: true # @!method initialize(arguments:, name:, output:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function} for more + # details. + # # The definition of the function that was called. # - # @param arguments [String] - # @param name [String] - # @param output [String, nil] + # @param arguments [String] The arguments passed to the function. + # + # @param name [String] The name of the function. + # + # @param output [String, nil] The output of the function. This will be `null` if the outputs have not been [su + # ... end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index c9af620f..62f29656 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -32,10 +32,17 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel optional :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function } # @!method initialize(index:, id: nil, function: nil, type: :function) - # @param index [Integer] - # @param id [String] - # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] - # @param type [Symbol, :function] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta} for more details. + # + # @param index [Integer] The index of the tool call in the tool calls array. + # + # @param id [String] The ID of the tool call object. + # + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. + # + # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to + # ... # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel @@ -60,11 +67,18 @@ class Function < OpenAI::Internal::Type::BaseModel optional :output, String, nil?: true # @!method initialize(arguments: nil, name: nil, output: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more + # details. + # # The definition of the function that was called. # - # @param arguments [String] - # @param name [String] - # @param output [String, nil] + # @param arguments [String] The arguments passed to the function. + # + # @param name [String] The name of the function. + # + # @param output [String, nil] The output of the function. This will be `null` if the outputs have not been [su + # ... end end end diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index 575eb64f..727d980b 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -22,7 +22,8 @@ class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # Details of the message creation by the run step. # # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] - # @param type [Symbol, :message_creation] + # + # @param type [Symbol, :message_creation] Always `message_creation`. # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -33,7 +34,7 @@ class MessageCreation < OpenAI::Internal::Type::BaseModel required :message_id, String # @!method initialize(message_id:) - # @param message_id [String] + # @param message_id [String] The ID of the message that was created by this run step. end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index d4fe0c67..c5076851 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -117,24 +117,49 @@ class RunStep < OpenAI::Internal::Type::BaseModel required :usage, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Usage }, nil?: true # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expired_at:, failed_at:, last_error:, metadata:, run_id:, status:, step_details:, thread_id:, type:, usage:, object: :"thread.run.step") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::RunStep} for more details. + # # Represents a step in execution of a run. # - # @param id [String] - # @param assistant_id [String] - # @param cancelled_at [Integer, nil] - # @param completed_at [Integer, nil] - # @param created_at [Integer] - # @param expired_at [Integer, nil] - # @param failed_at [Integer, nil] - # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param run_id [String] - # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] - # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] - # @param thread_id [String] - # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] - # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] - # @param object [Symbol, :"thread.run.step"] + # @param id [String] The identifier of the run step, which can be referenced in API endpoints. + # + # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista + # ... + # + # @param cancelled_at [Integer, nil] The Unix timestamp (in seconds) for when the run step was cancelled. + # + # @param completed_at [Integer, nil] The Unix timestamp (in seconds) for when the run step completed. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the run step was created. + # + # @param expired_at [Integer, nil] The Unix timestamp (in seconds) for when the run step expired. A step is conside + # ... + # + # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run step failed. + # + # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param run_id [String] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that th + # ... + # + # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai + # ... + # + # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. + # + # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t + # ... + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. + # + # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru + # ... + # + # @param object [Symbol, :"thread.run.step"] The object type, which is always `thread.run.step`. # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::Internal::Type::BaseModel @@ -154,8 +179,9 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] - # @param message [String] + # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # + # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # @@ -243,9 +269,11 @@ class Usage < OpenAI::Internal::Type::BaseModel # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. # - # @param completion_tokens [Integer] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param completion_tokens [Integer] Number of completion tokens used over the course of the run step. + # + # @param prompt_tokens [Integer] Number of prompt tokens used over the course of the run step. + # + # @param total_tokens [Integer] Total number of tokens used (prompt + completion). end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index ec46591a..2a53c523 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -15,7 +15,7 @@ class RunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(step_details: nil) # The delta containing the fields that have changed on the run step. # - # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] + # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. # The details of the run step. # diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index 4d83e30e..f5c81ffe 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -28,9 +28,11 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @param id [String] - # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] - # @param object [Symbol, :"thread.run.step.delta"] + # @param id [String] The identifier of the run step, which can be referenced in API endpoints. + # + # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. + # + # @param object [Symbol, :"thread.run.step.delta"] The object type, which is always `thread.run.step.delta`. end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 4335b875..85fdad6f 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -22,7 +22,8 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # Details of the message creation by the run step. # # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] - # @param type [Symbol, :message_creation] + # + # @param type [Symbol, :message_creation] Always `message_creation`. # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -33,7 +34,7 @@ class MessageCreation < OpenAI::Internal::Type::BaseModel optional :message_id, String # @!method initialize(message_id: nil) - # @param message_id [String] + # @param message_id [String] The ID of the message that was created by this run step. end end end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 75b02311..2bcb9cb5 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -61,12 +61,26 @@ class StepListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Beta::Threads::Runs::StepListParams::Order } # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::StepListParams} for more details. + # # @param thread_id [String] - # @param after [String] - # @param before [String] - # @param include [Array] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 00db2d8a..4a2c095e 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -34,9 +34,16 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } # @!method initialize(thread_id:, run_id:, include: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams} for more details. + # # @param thread_id [String] + # # @param run_id [String] - # @param include [Array] + # + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 417a924a..51c5d074 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -22,10 +22,15 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCallDelta] } # @!method initialize(tool_calls: nil, type: :tool_calls) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject} for more details. + # # Details of the tool call. # - # @param tool_calls [Array] - # @param type [Symbol, :tool_calls] + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit + # ... + # + # @param type [Symbol, :tool_calls] Always `tool_calls`. end end end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 08393ac6..3a8800d6 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -22,10 +22,15 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel required :type, const: :tool_calls # @!method initialize(tool_calls:, type: :tool_calls) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails} for more details. + # # Details of the tool call. # - # @param tool_calls [Array] - # @param type [Symbol, :tool_calls] + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit + # ... + # + # @param type [Symbol, :tool_calls] Always `tool_calls`. end end end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index aa0282e1..8c5eb9f0 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -19,7 +19,8 @@ class Text < OpenAI::Internal::Type::BaseModel # @!method initialize(annotations:, value:) # @param annotations [Array] - # @param value [String] + # + # @param value [String] The data that makes up the text. end end end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index 7d8eb24b..9f61e404 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -20,7 +20,8 @@ class TextContentBlock < OpenAI::Internal::Type::BaseModel # The text content that is part of a message. # # @param text [OpenAI::Models::Beta::Threads::Text] - # @param type [Symbol, :text] + # + # @param type [Symbol, :text] Always `text`. end end end diff --git a/lib/openai/models/beta/threads/text_content_block_param.rb b/lib/openai/models/beta/threads/text_content_block_param.rb index 26880392..89ff6435 100644 --- a/lib/openai/models/beta/threads/text_content_block_param.rb +++ b/lib/openai/models/beta/threads/text_content_block_param.rb @@ -20,8 +20,9 @@ class TextContentBlockParam < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :text) # The text content that is part of a message. # - # @param text [String] - # @param type [Symbol, :text] + # @param text [String] Text content to be sent to the model + # + # @param type [Symbol, :text] Always `text`. end end end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index 2d767df4..20b88879 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -19,7 +19,8 @@ class TextDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(annotations: nil, value: nil) # @param annotations [Array] - # @param value [String] + # + # @param value [String] The data that makes up the text. end end end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 7191d790..c0172733 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -25,9 +25,11 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(index:, text: nil, type: :text) # The text content that is part of a message. # - # @param index [Integer] + # @param index [Integer] The index of the content part in the message. + # # @param text [OpenAI::Models::Beta::Threads::TextDelta] - # @param type [Symbol, :text] + # + # @param type [Symbol, :text] Always `text`. end end end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 2e58ff3d..15613a18 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -76,17 +76,30 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel optional :usage, -> { OpenAI::Models::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletion} for more details. + # # Represents a chat completion response returned by model, based on the provided # input. # - # @param id [String] - # @param choices [Array] - # @param created [Integer] - # @param model [String] - # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] - # @param system_fingerprint [String] - # @param usage [OpenAI::Models::CompletionUsage] - # @param object [Symbol, :"chat.completion"] + # @param id [String] A unique identifier for the chat completion. + # + # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 + # ... + # + # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. + # + # @param model [String] The model used for the chat completion. + # + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. + # ... + # + # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. + # + # @param object [Symbol, :"chat.completion"] The object type, which is always `chat.completion`. class Choice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason @@ -119,10 +132,17 @@ class Choice < OpenAI::Internal::Type::BaseModel required :message, -> { OpenAI::Models::Chat::ChatCompletionMessage } # @!method initialize(finish_reason:, index:, logprobs:, message:) - # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] - # @param index [Integer] - # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] - # @param message [OpenAI::Models::Chat::ChatCompletionMessage] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletion::Choice} for more details. + # + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model + # ... + # + # @param index [Integer] The index of the choice in the list of choices. + # + # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. + # + # @param message [OpenAI::Models::Chat::ChatCompletionMessage] A chat completion message generated by the model. # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -166,8 +186,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] - # @param refusal [Array, nil] + # @param content [Array, nil] A list of message content tokens with log probability information. + # + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index a139420e..f5bc27e0 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -58,15 +58,27 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam} for more details. + # # Messages sent by the model in response to user messages. # - # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] - # @param content [String, Array, nil] - # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] - # @param name [String] - # @param refusal [String, nil] - # @param tool_calls [Array] - # @param role [Symbol, :assistant] + # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. ... + # + # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function + # ... + # + # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # ... + # + # @param name [String] An optional name for the participant. Provides the model information to differen + # ... + # + # @param refusal [String, nil] The refusal message by the assistant. + # + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # + # @param role [Symbol, :assistant] The role of the messages author, in this case `assistant`. # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::Internal::Type::BaseModel @@ -77,10 +89,14 @@ class Audio < OpenAI::Internal::Type::BaseModel required :id, String # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio} for more + # details. + # # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param id [String] + # @param id [String] Unique identifier for a previous audio response from the model. ... end # The contents of the assistant message. Required unless `tool_calls` or @@ -139,11 +155,17 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel required :name, String # @!method initialize(arguments:, name:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for + # more details. + # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @param arguments [String] - # @param name [String] + # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma + # ... + # + # @param name [String] The name of the function to call. end end end diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index e56cae38..4ade12b4 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -31,14 +31,20 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel required :transcript, String # @!method initialize(id:, data:, expires_at:, transcript:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAudio} for more details. + # # If the audio output modality is requested, this object contains data about the # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param id [String] - # @param data [String] - # @param expires_at [Integer] - # @param transcript [String] + # @param id [String] Unique identifier for this audio response. + # + # @param data [String] Base64 encoded audio bytes generated by the model, in the format ... + # + # @param expires_at [Integer] The Unix timestamp (in seconds) for when this audio response will ... + # + # @param transcript [String] Transcript of the audio generated by the model. end end diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 3374567d..faff16ff 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -19,12 +19,16 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } # @!method initialize(format_:, voice:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAudioParam} for more details. + # # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] - # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, ... + # + # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are ... # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 05970b9f..e663c989 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -81,18 +81,32 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionChunk} for more details. + # # Represents a streamed chunk of a chat completion response returned by the model, # based on the provided input. # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). # - # @param id [String] - # @param choices [Array] - # @param created [Integer] - # @param model [String] - # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] - # @param system_fingerprint [String] - # @param usage [OpenAI::Models::CompletionUsage, nil] - # @param object [Symbol, :"chat.completion.chunk"] + # @param id [String] A unique identifier for the chat completion. Each chunk has the same ID. + # + # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is + # ... + # + # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. Each ch + # ... + # + # @param model [String] The model to generate the completion. + # + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. + # ... + # + # @param usage [OpenAI::Models::CompletionUsage, nil] An optional field that will only be present when you set ... + # + # @param object [Symbol, :"chat.completion.chunk"] The object type, which is always `chat.completion.chunk`. class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta @@ -127,10 +141,17 @@ class Choice < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true # @!method initialize(delta:, finish_reason:, index:, logprobs: nil) - # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] - # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] - # @param index [Integer] - # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice} for more details. + # + # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. + # + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model + # ... + # + # @param index [Integer] The index of the choice in the list of choices. + # + # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::Internal::Type::BaseModel @@ -168,12 +189,20 @@ class Delta < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta} for more details. + # # A chat completion delta generated by streamed model responses. # - # @param content [String, nil] - # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] - # @param refusal [String, nil] - # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] + # @param content [String, nil] The contents of the chunk message. + # + # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # ... + # + # @param refusal [String, nil] The refusal message generated by the model. + # + # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. + # # @param tool_calls [Array] # @deprecated @@ -196,11 +225,17 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(arguments: nil, name: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for + # more details. + # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @param arguments [String] - # @param name [String] + # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma + # ... + # + # @param name [String] The name of the function to call. end # The role of the author of this message. @@ -244,9 +279,12 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(index:, id: nil, function: nil, type: nil) # @param index [Integer] - # @param id [String] + # + # @param id [String] The ID of the tool call. + # # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] - # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] + # + # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -266,8 +304,14 @@ class Function < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(arguments: nil, name: nil) - # @param arguments [String] - # @param name [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} + # for more details. + # + # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma + # ... + # + # @param name [String] The name of the function to call. end # The type of the tool. Currently, only `function` is supported. @@ -326,8 +370,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] - # @param refusal [Array, nil] + # @param content [Array, nil] A list of message content tokens with log probability information. + # + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index ecfb1d58..b00878ec 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -39,7 +39,8 @@ class File < OpenAI::Internal::Type::BaseModel # generation. # # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] - # @param type [Symbol, :file] + # + # @param type [Symbol, :file] The type of the content part. Always `file`. # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel @@ -63,9 +64,14 @@ class File < OpenAI::Internal::Type::BaseModel optional :filename, String # @!method initialize(file_data: nil, file_id: nil, filename: nil) - # @param file_data [String] - # @param file_id [String] - # @param filename [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionContentPart::File::File} for more details. + # + # @param file_data [String] The base64 encoded file data, used when passing the file to the model ... + # + # @param file_id [String] The ID of an uploaded file to use as input. ... + # + # @param filename [String] The name of the file, used when passing the file to the model as a ... end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 06c450d2..69462ff7 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -19,7 +19,8 @@ class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). # # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] - # @param type [Symbol, :image_url] + # + # @param type [Symbol, :image_url] The type of the content part. # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::Internal::Type::BaseModel @@ -37,8 +38,14 @@ class ImageURL < OpenAI::Internal::Type::BaseModel optional :detail, enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail } # @!method initialize(url:, detail: nil) - # @param url [String] - # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL} for more + # details. + # + # @param url [String] Either a URL of the image or the base64 encoded image data. + # + # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: + # ... # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 9331e125..155990b0 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -19,7 +19,8 @@ class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). # # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] - # @param type [Symbol, :input_audio] + # + # @param type [Symbol, :input_audio] The type of the content part. Always `input_audio`. # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::Internal::Type::BaseModel @@ -38,8 +39,13 @@ class InputAudio < OpenAI::Internal::Type::BaseModel api_name: :format # @!method initialize(data:, format_:) - # @param data [String] - # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more + # details. + # + # @param data [String] Base64 encoded audio data. + # + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". ... # The format of the encoded audio data. Currently supports "wav" and "mp3". # diff --git a/lib/openai/models/chat/chat_completion_content_part_refusal.rb b/lib/openai/models/chat/chat_completion_content_part_refusal.rb index 9137d008..5f1e561e 100644 --- a/lib/openai/models/chat/chat_completion_content_part_refusal.rb +++ b/lib/openai/models/chat/chat_completion_content_part_refusal.rb @@ -17,8 +17,9 @@ class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel required :type, const: :refusal # @!method initialize(refusal:, type: :refusal) - # @param refusal [String] - # @param type [Symbol, :refusal] + # @param refusal [String] The refusal message generated by the model. + # + # @param type [Symbol, :refusal] The type of the content part. end end diff --git a/lib/openai/models/chat/chat_completion_content_part_text.rb b/lib/openai/models/chat/chat_completion_content_part_text.rb index 212467d9..3800b650 100644 --- a/lib/openai/models/chat/chat_completion_content_part_text.rb +++ b/lib/openai/models/chat/chat_completion_content_part_text.rb @@ -20,8 +20,9 @@ class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). # - # @param text [String] - # @param type [Symbol, :text] + # @param text [String] The text content. + # + # @param type [Symbol, :text] The type of the content part. end end diff --git a/lib/openai/models/chat/chat_completion_deleted.rb b/lib/openai/models/chat/chat_completion_deleted.rb index a3125c86..2cec245f 100644 --- a/lib/openai/models/chat/chat_completion_deleted.rb +++ b/lib/openai/models/chat/chat_completion_deleted.rb @@ -24,9 +24,11 @@ class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel required :object, const: :"chat.completion.deleted" # @!method initialize(id:, deleted:, object: :"chat.completion.deleted") - # @param id [String] - # @param deleted [Boolean] - # @param object [Symbol, :"chat.completion.deleted"] + # @param id [String] The ID of the chat completion that was deleted. + # + # @param deleted [Boolean] Whether the chat completion was deleted. + # + # @param object [Symbol, :"chat.completion.deleted"] The type of object being deleted. end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 2c7e20e4..df851c32 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -24,13 +24,19 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(content:, name: nil, role: :developer) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam} for more details. + # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. # - # @param content [String, Array] - # @param name [String] - # @param role [Symbol, :developer] + # @param content [String, Array] The contents of the developer message. + # + # @param name [String] An optional name for the participant. Provides the model information to differen + # ... + # + # @param role [Symbol, :developer] The role of the messages author, in this case `developer`. # The contents of the developer message. # diff --git a/lib/openai/models/chat/chat_completion_function_call_option.rb b/lib/openai/models/chat/chat_completion_function_call_option.rb index 9c7d28fd..89566e41 100644 --- a/lib/openai/models/chat/chat_completion_function_call_option.rb +++ b/lib/openai/models/chat/chat_completion_function_call_option.rb @@ -14,7 +14,7 @@ class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # Specifying a particular function via `{"name": "my_function"}` forces the model # to call that function. # - # @param name [String] + # @param name [String] The name of the function to call. end end diff --git a/lib/openai/models/chat/chat_completion_function_message_param.rb b/lib/openai/models/chat/chat_completion_function_message_param.rb index 4a8efb76..feb98749 100644 --- a/lib/openai/models/chat/chat_completion_function_message_param.rb +++ b/lib/openai/models/chat/chat_completion_function_message_param.rb @@ -24,9 +24,11 @@ class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel required :role, const: :function # @!method initialize(content:, name:, role: :function) - # @param content [String, nil] - # @param name [String] - # @param role [Symbol, :function] + # @param content [String, nil] The contents of the function message. + # + # @param name [String] The name of the function to call. + # + # @param role [Symbol, :function] The role of the messages author, in this case `function`. end end diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 21ca6db0..8a75e905 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -55,15 +55,25 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionMessage} for more details. + # # A chat completion message generated by the model. # - # @param content [String, nil] - # @param refusal [String, nil] - # @param annotations [Array] - # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] - # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] - # @param tool_calls [Array] - # @param role [Symbol, :assistant] + # @param content [String, nil] The contents of the message. + # + # @param refusal [String, nil] The refusal message generated by the model. + # + # @param annotations [Array] Annotations for the message, when applicable, as when using the ... + # + # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data ... + # + # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # ... + # + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # + # @param role [Symbol, :assistant] The role of the author of this message. class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -81,8 +91,9 @@ class Annotation < OpenAI::Internal::Type::BaseModel # @!method initialize(url_citation:, type: :url_citation) # A URL citation when using web search. # - # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] - # @param type [Symbol, :url_citation] + # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. + # + # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::Internal::Type::BaseModel @@ -113,10 +124,13 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @!method initialize(end_index:, start_index:, title:, url:) # A URL citation when using web search. # - # @param end_index [Integer] - # @param start_index [Integer] - # @param title [String] - # @param url [String] + # @param end_index [Integer] The index of the last character of the URL citation in the message. + # + # @param start_index [Integer] The index of the first character of the URL citation in the message. + # + # @param title [String] The title of the web resource. + # + # @param url [String] The URL of the web resource. end end @@ -140,11 +154,16 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel required :name, String # @!method initialize(arguments:, name:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall} for more details. + # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @param arguments [String] - # @param name [String] + # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma + # ... + # + # @param name [String] The name of the function to call. end end end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index adad4c9a..6ecdc5b5 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -23,9 +23,11 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(id:, function:, type: :function) - # @param id [String] - # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] - # @param type [Symbol, :function] + # @param id [String] The ID of the tool call. + # + # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. + # + # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -45,10 +47,16 @@ class Function < OpenAI::Internal::Type::BaseModel required :name, String # @!method initialize(arguments:, name:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function} for more + # details. + # # The function that the model called. # - # @param arguments [String] - # @param name [String] + # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma + # ... + # + # @param name [String] The name of the function to call. end end end diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index d6d7a955..245af016 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -20,7 +20,8 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # function. # # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] - # @param type [Symbol, :function] + # + # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel @@ -31,7 +32,7 @@ class Function < OpenAI::Internal::Type::BaseModel required :name, String # @!method initialize(name:) - # @param name [String] + # @param name [String] The name of the function to call. end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index b6c130fb..3669e9d3 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -20,11 +20,15 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel required :type, const: :content # @!method initialize(content:, type: :content) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionPredictionContent} for more details. + # # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @param content [String, Array] - # @param type [Symbol, :content] + # @param content [String, Array] The content that should be matched when generating a model response. ... + # + # @param type [Symbol, :content] The type of the predicted content you want to provide. This type is ... # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be diff --git a/lib/openai/models/chat/chat_completion_store_message.rb b/lib/openai/models/chat/chat_completion_store_message.rb index 119b5a36..cd926898 100644 --- a/lib/openai/models/chat/chat_completion_store_message.rb +++ b/lib/openai/models/chat/chat_completion_store_message.rb @@ -13,7 +13,7 @@ class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # @!method initialize(id:) # A chat completion message generated by the model. # - # @param id [String] + # @param id [String] The identifier of the chat message. end end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index 4bc8cef1..de346443 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -17,9 +17,12 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel optional :include_usage, OpenAI::Internal::Type::Boolean # @!method initialize(include_usage: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionStreamOptions} for more details. + # # Options for streaming response. Only set this when you set `stream: true`. # - # @param include_usage [Boolean] + # @param include_usage [Boolean] If set, an additional chunk will be streamed before the `data: [DONE]` ... end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 19d5d598..99e35629 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -24,13 +24,19 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(content:, name: nil, role: :system) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionSystemMessageParam} for more details. + # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages # for this purpose instead. # - # @param content [String, Array] - # @param name [String] - # @param role [Symbol, :system] + # @param content [String, Array] The contents of the system message. + # + # @param name [String] An optional name for the participant. Provides the model information to differen + # ... + # + # @param role [Symbol, :system] The role of the messages author, in this case `system`. # The contents of the system message. # diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index 1ab8dd0f..2dfadc88 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -37,10 +37,19 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } # @!method initialize(token:, bytes:, logprob:, top_logprobs:) - # @param token [String] - # @param bytes [Array, nil] - # @param logprob [Float] - # @param top_logprobs [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionTokenLogprob} for more details. + # + # @param token [String] The token. + # + # @param bytes [Array, nil] A list of integers representing the UTF-8 bytes representation of the token. Use + # ... + # + # @param logprob [Float] The log probability of this token, if it is within the top 20 most likely tokens + # ... + # + # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position + # ... class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -67,9 +76,16 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel required :logprob, Float # @!method initialize(token:, bytes:, logprob:) - # @param token [String] - # @param bytes [Array, nil] - # @param logprob [Float] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. + # + # @param token [String] The token. + # + # @param bytes [Array, nil] A list of integers representing the UTF-8 bytes representation of the token. Use + # ... + # + # @param logprob [Float] The log probability of this token, if it is within the top 20 most likely tokens + # ... end end end diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index 1cef5fd5..c06b28d0 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -17,7 +17,8 @@ class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @!method initialize(function:, type: :function) # @param function [OpenAI::Models::FunctionDefinition] - # @param type [Symbol, :function] + # + # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index a0718af1..e03505be 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -23,9 +23,11 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel required :tool_call_id, String # @!method initialize(content:, tool_call_id:, role: :tool) - # @param content [String, Array] - # @param tool_call_id [String] - # @param role [Symbol, :tool] + # @param content [String, Array] The contents of the tool message. + # + # @param tool_call_id [String] Tool call that this message is responding to. + # + # @param role [Symbol, :tool] The role of the messages author, in this case `tool`. # The contents of the tool message. # diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 7f72cdc3..3aa9532f 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -24,12 +24,18 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(content:, name: nil, role: :user) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionUserMessageParam} for more details. + # # Messages sent by an end user, containing prompts or additional context # information. # - # @param content [String, Array] - # @param name [String] - # @param role [Symbol, :user] + # @param content [String, Array] The contents of the user message. ... + # + # @param name [String] An optional name for the participant. Provides the model information to differen + # ... + # + # @param role [Symbol, :user] The role of the messages author, in this case `user`. # The contents of the user message. # diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 045de7a8..185edac2 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -329,36 +329,76 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :web_search_options, -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions } # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) - # @param messages [Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel] - # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] - # @param frequency_penalty [Float, nil] - # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # @param functions [Array] - # @param logit_bias [Hash{Symbol=>Integer}, nil] - # @param logprobs [Boolean, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param modalities [Array, nil] - # @param n [Integer, nil] - # @param parallel_tool_calls [Boolean] - # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] - # @param presence_penalty [Float, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # @param seed [Integer, nil] - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] - # @param stop [String, Array, nil] - # @param store [Boolean, nil] - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # @param tools [Array] - # @param top_logprobs [Integer, nil] - # @param top_p [Float, nil] - # @param user [String] - # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams} for more details. + # + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with ... + # + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. ... + # + # @param functions [Array] Deprecated in favor of `tools`. ... + # + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # + # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, ... + # + # @param max_completion_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a completion, + # ... + # + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param modalities [Array, nil] Output types that you would like the model to generate. ... + # + # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is ... + # + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # + # @param seed [Integer, nil] This feature is in Beta. ... + # + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # + # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for ... + # + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. ... + # + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # ... + # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a @@ -452,9 +492,17 @@ class Function < OpenAI::Internal::Type::BaseModel optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!method initialize(name:, description: nil, parameters: nil) - # @param name [String] - # @param description [String] - # @param parameters [Hash{Symbol=>Object}] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams::Function} for more details. + # + # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc + # ... + # + # @param description [String] A description of what the function does, used by the model to choose when and ho + # ... + # + # @param parameters [Hash{Symbol=>Object}] The parameters the functions accepts, described as a JSON Schema object. See the + # ... end module Modality @@ -560,12 +608,17 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel nil?: true # @!method initialize(search_context_size: nil, user_location: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions} for more + # details. + # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] - # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] + # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the ... + # + # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. ... # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -598,10 +651,15 @@ class UserLocation < OpenAI::Internal::Type::BaseModel required :type, const: :approximate # @!method initialize(approximate:, type: :approximate) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} + # for more details. + # # Approximate location parameters for the search. # - # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] - # @param type [Symbol, :approximate] + # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. + # + # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. ... # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel @@ -632,12 +690,19 @@ class Approximate < OpenAI::Internal::Type::BaseModel optional :timezone, String # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} + # for more details. + # # Approximate location parameters for the search. # - # @param city [String] - # @param country [String] - # @param region [String] - # @param timezone [String] + # @param city [String] Free text input for the city of the user, e.g. `San Francisco`. ... + # + # @param country [String] The two-letter ... + # + # @param region [String] Free text input for the region of the user, e.g. `California`. ... + # + # @param timezone [String] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) ... end end end diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index ab7f9060..ea9cb4c5 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -42,11 +42,20 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Chat::CompletionListParams::Order } # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionListParams} for more details. + # + # @param after [String] Identifier for the last chat completion from the previous pagination request. + # + # @param limit [Integer] Number of Chat Completions to retrieve. + # + # @param metadata [Hash{Symbol=>String}, nil] A list of metadata keys to filter the Chat Completions by. Example: ... + # + # @param model [String] The model used to generate the Chat Completions. + # + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index b3a23cff..3c2e662d 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -20,7 +20,11 @@ class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(metadata:, request_options: {}) - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionUpdateParams} for more details. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 19a29905..775be46e 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -29,9 +29,16 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Chat::Completions::MessageListParams::Order } # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::Completions::MessageListParams} for more details. + # + # @param after [String] Identifier for the last message from the previous pagination request. + # + # @param limit [Integer] Number of messages to retrieve. + # + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 81515805..2969dedd 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -30,12 +30,18 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel required :value, union: -> { OpenAI::Models::ComparisonFilter::Value } # @!method initialize(key:, type:, value:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ComparisonFilter} for more details. + # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. # - # @param key [String] - # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] - # @param value [String, Float, Boolean] + # @param key [String] The key to compare against the value. + # + # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. ... + # + # @param value [String, Float, Boolean] The value to compare against the attribute key; supports string, number, or bool + # ... # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 4980830d..c493a5c5 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -52,16 +52,26 @@ class Completion < OpenAI::Internal::Type::BaseModel optional :usage, -> { OpenAI::Models::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Completion} for more details. + # # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). # - # @param id [String] - # @param choices [Array] - # @param created [Integer] - # @param model [String] - # @param system_fingerprint [String] - # @param usage [OpenAI::Models::CompletionUsage] - # @param object [Symbol, :text_completion] + # @param id [String] A unique identifier for the completion. + # + # @param choices [Array] The list of completion choices the model generated for the input prompt. + # + # @param created [Integer] The Unix timestamp (in seconds) of when the completion was created. + # + # @param model [String] The model used for completion. + # + # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. + # ... + # + # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. + # + # @param object [Symbol, :text_completion] The object type, which is always "text_completion" end end end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 3fcd5752..5bca4663 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -28,9 +28,16 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel required :text, String # @!method initialize(finish_reason:, index:, logprobs:, text:) - # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionChoice} for more details. + # + # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model + # ... + # # @param index [Integer] + # # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] + # # @param text [String] # The reason the model stopped generating tokens. This will be `stop` if the model diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 7084d9f5..c6f47865 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -183,23 +183,54 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) - # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] - # @param prompt [String, Array, Array, Array>, nil] - # @param best_of [Integer, nil] - # @param echo [Boolean, nil] - # @param frequency_penalty [Float, nil] - # @param logit_bias [Hash{Symbol=>Integer}, nil] - # @param logprobs [Integer, nil] - # @param max_tokens [Integer, nil] - # @param n [Integer, nil] - # @param presence_penalty [Float, nil] - # @param seed [Integer, nil] - # @param stop [String, Array, nil] - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # @param suffix [String, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param user [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionCreateParams} for more details. + # + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings + # ... + # + # @param best_of [Integer, nil] Generates `best_of` completions server-side and returns the "best" (the one with + # ... + # + # @param echo [Boolean, nil] Echo back the prompt in addition to the completion ... + # + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # ... + # + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # + # @param logprobs [Integer, nil] Include the log probabilities on the `logprobs` most likely output tokens, as we + # ... + # + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the completi + # ... + # + # @param n [Integer, nil] How many completions to generate for each prompt. ... + # + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on whethe + # ... + # + # @param seed [Integer, nil] If specified, our system will make a best effort to sample deterministically, su + # ... + # + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # + # @param suffix [String, nil] The suffix that comes after a completion of inserted text. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index e2b4092d..defd2f03 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -36,11 +36,15 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) # Usage statistics for the completion request. # - # @param completion_tokens [Integer] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] - # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] - # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] + # @param completion_tokens [Integer] Number of tokens in the generated completion. + # + # @param prompt_tokens [Integer] Number of tokens in the prompt. + # + # @param total_tokens [Integer] Total number of tokens used in the request (prompt + completion). + # + # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. + # + # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel @@ -73,12 +77,18 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel optional :rejected_prediction_tokens, Integer # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionUsage::CompletionTokensDetails} for more details. + # # Breakdown of tokens used in a completion. # - # @param accepted_prediction_tokens [Integer] - # @param audio_tokens [Integer] - # @param reasoning_tokens [Integer] - # @param rejected_prediction_tokens [Integer] + # @param accepted_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the ... + # + # @param audio_tokens [Integer] Audio input tokens generated by the model. + # + # @param reasoning_tokens [Integer] Tokens generated by the model for reasoning. + # + # @param rejected_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the ... end # @see OpenAI::Models::CompletionUsage#prompt_tokens_details @@ -98,8 +108,9 @@ class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(audio_tokens: nil, cached_tokens: nil) # Breakdown of tokens used in the prompt. # - # @param audio_tokens [Integer] - # @param cached_tokens [Integer] + # @param audio_tokens [Integer] Audio input tokens present in the prompt. + # + # @param cached_tokens [Integer] Cached tokens present in the prompt. end end end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 228c5572..9dc2a93d 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -17,10 +17,15 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::CompoundFilter::Type } # @!method initialize(filters:, type:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompoundFilter} for more details. + # # Combine multiple filters using `and` or `or`. # - # @param filters [Array] - # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] + # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` + # ... + # + # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] Type of operation: `and` or `or`. # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index 35e2aa45..e1030791 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -29,10 +29,13 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel required :usage, -> { OpenAI::Models::CreateEmbeddingResponse::Usage } # @!method initialize(data:, model:, usage:, object: :list) - # @param data [Array] - # @param model [String] - # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] - # @param object [Symbol, :list] + # @param data [Array] The list of embeddings generated by the model. + # + # @param model [String] The name of the model used to generate the embedding. + # + # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] The usage information for the request. + # + # @param object [Symbol, :list] The object type, which is always "list". # @see OpenAI::Models::CreateEmbeddingResponse#usage class Usage < OpenAI::Internal::Type::BaseModel @@ -51,8 +54,9 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!method initialize(prompt_tokens:, total_tokens:) # The usage information for the request. # - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param prompt_tokens [Integer] The number of tokens used by the prompt. + # + # @param total_tokens [Integer] The total number of tokens used by the request. end end end diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index e1e28ded..41792b3d 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -24,11 +24,17 @@ class Embedding < OpenAI::Internal::Type::BaseModel required :object, const: :embedding # @!method initialize(embedding:, index:, object: :embedding) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Embedding} for more details. + # # Represents an embedding vector returned by embedding endpoint. # - # @param embedding [Array] - # @param index [Integer] - # @param object [Symbol, :embedding] + # @param embedding [Array] The embedding vector, which is a list of floats. The length of vector depends on + # ... + # + # @param index [Integer] The index of the embedding in the list of embeddings. + # + # @param object [Symbol, :embedding] The object type, which is always "embedding". end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index ec1dfd3d..f3fe5c40 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -53,11 +53,24 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) - # @param input [String, Array, Array, Array>] - # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] - # @param dimensions [Integer] - # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] - # @param user [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EmbeddingCreateParams} for more details. + # + # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i + # ... + # + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo + # ... + # + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Input text to embed, encoded as a string or array of tokens. To embed multiple diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 5f686026..8f14b19f 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -38,10 +38,17 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) - # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] - # @param testing_criteria [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams} for more details. + # + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. + # + # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the evaluation. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The configuration for the data source used for the evaluation runs. @@ -81,6 +88,9 @@ class Custom < OpenAI::Internal::Type::BaseModel optional :include_sample_schema, OpenAI::Internal::Type::Boolean # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom} for more details. + # # A CustomDataSourceConfig object that defines the schema for the data source used # for the evaluation runs. This schema is used to define the shape of the data # that will be: @@ -88,9 +98,12 @@ class Custom < OpenAI::Internal::Type::BaseModel # - Used to define your testing criteria and # - What data is required when creating a run # - # @param item_schema [Hash{Symbol=>Object}] - # @param include_sample_schema [Boolean] - # @param type [Symbol, :custom] + # @param item_schema [Hash{Symbol=>Object}] The json schema for each row in the data source. + # + # @param include_sample_schema [Boolean] Whether the eval should expect you to populate the sample namespace (ie, by gene + # ... + # + # @param type [Symbol, :custom] The type of data source. Always `custom`. end class Logs < OpenAI::Internal::Type::BaseModel @@ -111,8 +124,9 @@ class Logs < OpenAI::Internal::Type::BaseModel # completions query. This is usually metadata like `usecase=chatbot` or # `prompt-version=v2`, etc. # - # @param metadata [Hash{Symbol=>Object}] - # @param type [Symbol, :logs] + # @param metadata [Hash{Symbol=>Object}] Metadata filters for the logs data source. + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. end # @!method self.variants @@ -182,15 +196,25 @@ class LabelModel < OpenAI::Internal::Type::BaseModel required :type, const: :label_model # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel} for more + # details. + # # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] - # @param labels [Array] - # @param model [String] - # @param name [String] - # @param passing_labels [Array] - # @param type [Symbol, :label_model] + # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param labels [Array] The labels to classify to each item in the evaluation. + # + # @param model [String] The model to use for the evaluation. Must support structured outputs. + # + # @param name [String] The name of the grader. + # + # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. + # + # @param type [Symbol, :label_model] The object type, which is always `label_model`. # A chat message that makes up the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -220,8 +244,9 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel required :role, String # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end class EvalItem < OpenAI::Internal::Type::BaseModel @@ -248,15 +273,21 @@ class EvalItem < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -287,10 +318,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -365,11 +401,15 @@ class Python < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) # A PythonGrader object that runs a python script on the input. # - # @param name [String] - # @param source [String] - # @param image_tag [String] - # @param pass_threshold [Float] - # @param type [Symbol, :python] + # @param name [String] The name of the grader. + # + # @param source [String] The source code of the python script. + # + # @param image_tag [String] The image tag to use for the python script. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param type [Symbol, :python] The object type, which is always `python`. end class ScoreModel < OpenAI::Internal::Type::BaseModel @@ -419,13 +459,19 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] - # @param model [String] - # @param name [String] - # @param pass_threshold [Float] - # @param range [Array] - # @param sampling_params [Object] - # @param type [Symbol, :score_model] + # @param input [Array] The input text. This may include template strings. + # + # @param model [String] The model to use for the evaluation. + # + # @param name [String] The name of the grader. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param range [Array] The range of the score. Defaults to `[0, 1]`. + # + # @param sampling_params [Object] The sampling parameters for the model. + # + # @param type [Symbol, :score_model] The object type, which is always `score_model`. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -449,15 +495,21 @@ class Input < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input} for more + # details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -488,10 +540,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 65885fd7..cd141022 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -53,6 +53,9 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateResponse} for more details. + # # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -60,13 +63,19 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # - See how well my chatbot handles customer support # - Check if o3-mini is better at my usecase than gpt-4o # - # @param id [String] - # @param created_at [Integer] - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] - # @param testing_criteria [Array] - # @param object [Symbol, :eval] + # @param id [String] Unique identifier for the evaluation. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. + # + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the evaluation. + # + # @param testing_criteria [Array] A list of testing criteria. + # + # @param object [Symbol, :eval] The object type. # Configuration of data sources used in runs of the evaluation. # @@ -149,11 +158,15 @@ class Python < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) # A PythonGrader object that runs a python script on the input. # - # @param name [String] - # @param source [String] - # @param image_tag [String] - # @param pass_threshold [Float] - # @param type [Symbol, :python] + # @param name [String] The name of the grader. + # + # @param source [String] The source code of the python script. + # + # @param image_tag [String] The image tag to use for the python script. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param type [Symbol, :python] The object type, which is always `python`. end class ScoreModel < OpenAI::Internal::Type::BaseModel @@ -203,13 +216,19 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] - # @param model [String] - # @param name [String] - # @param pass_threshold [Float] - # @param range [Array] - # @param sampling_params [Object] - # @param type [Symbol, :score_model] + # @param input [Array] The input text. This may include template strings. + # + # @param model [String] The model to use for the evaluation. + # + # @param name [String] The name of the grader. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param range [Array] The range of the score. Defaults to `[0, 1]`. + # + # @param sampling_params [Object] The sampling parameters for the model. + # + # @param type [Symbol, :score_model] The object type, which is always `score_model`. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -233,15 +252,21 @@ class Input < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input} for + # more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] - # @param type [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -272,10 +297,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb index 04e45803..84577b43 100644 --- a/lib/openai/models/eval_custom_data_source_config.rb +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -17,6 +17,9 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel required :type, const: :custom # @!method initialize(schema:, type: :custom) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCustomDataSourceConfig} for more details. + # # A CustomDataSourceConfig which specifies the schema of your `item` and # optionally `sample` namespaces. The response schema defines the shape of the # data that will be: @@ -24,8 +27,9 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # - Used to define your testing criteria and # - What data is required when creating a run # - # @param schema [Hash{Symbol=>Object}] - # @param type [Symbol, :custom] + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. ... + # + # @param type [Symbol, :custom] The type of data source. Always `custom`. end end end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb index 2af7746f..32db2840 100644 --- a/lib/openai/models/eval_label_model_grader.rb +++ b/lib/openai/models/eval_label_model_grader.rb @@ -43,11 +43,16 @@ class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # the evaluation. # # @param input [Array] - # @param labels [Array] - # @param model [String] - # @param name [String] - # @param passing_labels [Array] - # @param type [Symbol, :label_model] + # + # @param labels [Array] The labels to assign to each item in the evaluation. + # + # @param model [String] The model to use for the evaluation. Must support structured outputs. + # + # @param name [String] The name of the grader. + # + # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. + # + # @param type [Symbol, :label_model] The object type, which is always `label_model`. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -70,15 +75,20 @@ class Input < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalLabelModelGrader::Input} for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role] - # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -109,10 +119,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText} for more + # details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index 3888ef6c..7fa913a0 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -34,10 +34,18 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel optional :order_by, enum: -> { OpenAI::Models::EvalListParams::OrderBy } # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::EvalListParams::Order] - # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListParams} for more details. + # + # @param after [String] Identifier for the last eval from the previous pagination request. + # + # @param limit [Integer] Number of evals to retrieve. + # + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d + # ... + # + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index d80085f0..b7bf7898 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -53,6 +53,9 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListResponse} for more details. + # # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -60,13 +63,19 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # - See how well my chatbot handles customer support # - Check if o3-mini is better at my usecase than gpt-4o # - # @param id [String] - # @param created_at [Integer] - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] - # @param testing_criteria [Array] - # @param object [Symbol, :eval] + # @param id [String] Unique identifier for the evaluation. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. + # + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the evaluation. + # + # @param testing_criteria [Array] A list of testing criteria. + # + # @param object [Symbol, :eval] The object type. # Configuration of data sources used in runs of the evaluation. # @@ -149,11 +158,15 @@ class Python < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) # A PythonGrader object that runs a python script on the input. # - # @param name [String] - # @param source [String] - # @param image_tag [String] - # @param pass_threshold [Float] - # @param type [Symbol, :python] + # @param name [String] The name of the grader. + # + # @param source [String] The source code of the python script. + # + # @param image_tag [String] The image tag to use for the python script. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param type [Symbol, :python] The object type, which is always `python`. end class ScoreModel < OpenAI::Internal::Type::BaseModel @@ -203,13 +216,19 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] - # @param model [String] - # @param name [String] - # @param pass_threshold [Float] - # @param range [Array] - # @param sampling_params [Object] - # @param type [Symbol, :score_model] + # @param input [Array] The input text. This may include template strings. + # + # @param model [String] The model to use for the evaluation. + # + # @param name [String] The name of the grader. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param range [Array] The range of the score. Defaults to `[0, 1]`. + # + # @param sampling_params [Object] The sampling parameters for the model. + # + # @param type [Symbol, :score_model] The object type, which is always `score_model`. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -233,15 +252,21 @@ class Input < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input} for more + # details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] - # @param type [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -272,10 +297,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index e90d4965..f6f13166 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -53,6 +53,9 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalRetrieveResponse} for more details. + # # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -60,13 +63,19 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # - See how well my chatbot handles customer support # - Check if o3-mini is better at my usecase than gpt-4o # - # @param id [String] - # @param created_at [Integer] - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] - # @param testing_criteria [Array] - # @param object [Symbol, :eval] + # @param id [String] Unique identifier for the evaluation. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. + # + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the evaluation. + # + # @param testing_criteria [Array] A list of testing criteria. + # + # @param object [Symbol, :eval] The object type. # Configuration of data sources used in runs of the evaluation. # @@ -149,11 +158,15 @@ class Python < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) # A PythonGrader object that runs a python script on the input. # - # @param name [String] - # @param source [String] - # @param image_tag [String] - # @param pass_threshold [Float] - # @param type [Symbol, :python] + # @param name [String] The name of the grader. + # + # @param source [String] The source code of the python script. + # + # @param image_tag [String] The image tag to use for the python script. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param type [Symbol, :python] The object type, which is always `python`. end class ScoreModel < OpenAI::Internal::Type::BaseModel @@ -203,13 +216,19 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] - # @param model [String] - # @param name [String] - # @param pass_threshold [Float] - # @param range [Array] - # @param sampling_params [Object] - # @param type [Symbol, :score_model] + # @param input [Array] The input text. This may include template strings. + # + # @param model [String] The model to use for the evaluation. + # + # @param name [String] The name of the grader. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param range [Array] The range of the score. Defaults to `[0, 1]`. + # + # @param sampling_params [Object] The sampling parameters for the model. + # + # @param type [Symbol, :score_model] The object type, which is always `score_model`. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -235,15 +254,21 @@ class Input < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input} for + # more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] - # @param type [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -274,10 +299,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 53940b3e..0055f9bc 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -28,15 +28,20 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(schema:, metadata: nil, type: :stored_completions) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalStoredCompletionsDataSourceConfig} for more details. + # # A StoredCompletionsDataSourceConfig which specifies the metadata property of # your stored completions query. This is usually metadata like `usecase=chatbot` # or `prompt-version=v2`, etc. The schema returned by this data source config is # used to defined what variables are available in your evals. `item` and `sample` # are both defined when using this data source config. # - # @param schema [Hash{Symbol=>Object}] - # @param metadata [Hash{Symbol=>String}, nil] - # @param type [Symbol, :stored_completions] + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end end end diff --git a/lib/openai/models/eval_string_check_grader.rb b/lib/openai/models/eval_string_check_grader.rb index 421bb059..8563a345 100644 --- a/lib/openai/models/eval_string_check_grader.rb +++ b/lib/openai/models/eval_string_check_grader.rb @@ -37,11 +37,15 @@ class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel # A StringCheckGrader object that performs a string comparison between input and # reference using a specified operation. # - # @param input [String] - # @param name [String] - # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] - # @param reference [String] - # @param type [Symbol, :string_check] + # @param input [String] The input text. This may include template strings. + # + # @param name [String] The name of the grader. + # + # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # + # @param reference [String] The reference text. This may include template strings. + # + # @param type [Symbol, :string_check] The object type, which is always `string_check`. # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 2a8d70e0..4118b58b 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -41,14 +41,23 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalTextSimilarityGrader} for more details. + # # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] - # @param input [String] - # @param pass_threshold [Float] - # @param reference [String] - # @param name [String] - # @param type [Symbol, :text_similarity] + # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r + # ... + # + # @param input [String] The text being graded. + # + # @param pass_threshold [Float] A float score where a value greater than or equal indicates a passing grade. + # + # @param reference [String] The text being graded against. + # + # @param name [String] The name of the grader. + # + # @param type [Symbol, :text_similarity] The type of grader. # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb index c8476dcf..baad5c63 100644 --- a/lib/openai/models/eval_update_params.rb +++ b/lib/openai/models/eval_update_params.rb @@ -25,8 +25,13 @@ class EvalUpdateParams < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(metadata: nil, name: nil, request_options: {}) - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateParams} for more details. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] Rename the evaluation. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 923a9e03..2bc0b524 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -53,6 +53,9 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateResponse} for more details. + # # An Eval object with a data source config and testing criteria. An Eval # represents a task to be done for your LLM integration. Like: # @@ -60,13 +63,19 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # - See how well my chatbot handles customer support # - Check if o3-mini is better at my usecase than gpt-4o # - # @param id [String] - # @param created_at [Integer] - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] - # @param testing_criteria [Array] - # @param object [Symbol, :eval] + # @param id [String] Unique identifier for the evaluation. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. + # + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the evaluation. + # + # @param testing_criteria [Array] A list of testing criteria. + # + # @param object [Symbol, :eval] The object type. # Configuration of data sources used in runs of the evaluation. # @@ -149,11 +158,15 @@ class Python < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) # A PythonGrader object that runs a python script on the input. # - # @param name [String] - # @param source [String] - # @param image_tag [String] - # @param pass_threshold [Float] - # @param type [Symbol, :python] + # @param name [String] The name of the grader. + # + # @param source [String] The source code of the python script. + # + # @param image_tag [String] The image tag to use for the python script. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param type [Symbol, :python] The object type, which is always `python`. end class ScoreModel < OpenAI::Internal::Type::BaseModel @@ -203,13 +216,19 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] - # @param model [String] - # @param name [String] - # @param pass_threshold [Float] - # @param range [Array] - # @param sampling_params [Object] - # @param type [Symbol, :score_model] + # @param input [Array] The input text. This may include template strings. + # + # @param model [String] The model to use for the evaluation. + # + # @param name [String] The name of the grader. + # + # @param pass_threshold [Float] The threshold for the score. + # + # @param range [Array] The range of the score. Defaults to `[0, 1]`. + # + # @param sampling_params [Object] The sampling parameters for the model. + # + # @param type [Symbol, :score_model] The object type, which is always `score_model`. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -233,15 +252,21 @@ class Input < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input} for + # more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - # @param role [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] - # @param type [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -272,10 +297,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index d4f61b7b..eaa0f6e4 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -35,12 +35,19 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource} for more details. + # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] + # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters ... + # + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. + # # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] - # @param model [String] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -75,8 +82,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -109,8 +117,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end class StoredCompletions < OpenAI::Internal::Type::BaseModel @@ -156,14 +165,23 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel optional :model, String, nil?: true # @!method initialize(created_after: nil, created_before: nil, limit: nil, metadata: nil, model: nil, type: :stored_completions) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} + # for more details. + # # A StoredCompletionsRunDataSource configuration describing a set of filters # - # @param created_after [Integer, nil] - # @param created_before [Integer, nil] - # @param limit [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, nil] - # @param type [Symbol, :stored_completions] + # @param created_after [Integer, nil] An optional Unix timestamp to filter items created after this time. + # + # @param created_before [Integer, nil] An optional Unix timestamp to filter items created before this time. + # + # @param limit [Integer, nil] An optional maximum number of items to return. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, nil] An optional model to filter by (e.g., 'gpt-4o'). + # + # @param type [Symbol, :stored_completions] The type of source. Always `stored_completions`. end # @!method self.variants @@ -210,8 +228,14 @@ class Template < OpenAI::Internal::Type::BaseModel required :type, const: :template # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param type [Symbol, :template] The type of input messages. Always `template`. # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -262,15 +286,21 @@ class Message < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] - # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -301,10 +331,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -359,8 +394,9 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants @@ -394,10 +430,13 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 2ddad8b0..ae1cedae 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -20,7 +20,8 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # eval # # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] - # @param type [Symbol, :jsonl] + # + # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source module Source @@ -47,8 +48,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -81,8 +83,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end # @!method self.variants diff --git a/lib/openai/models/evals/eval_api_error.rb b/lib/openai/models/evals/eval_api_error.rb index 4d88b8dc..87f11122 100644 --- a/lib/openai/models/evals/eval_api_error.rb +++ b/lib/openai/models/evals/eval_api_error.rb @@ -19,8 +19,9 @@ class EvalAPIError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # An object representing an error response from the Eval API. # - # @param code [String] - # @param message [String] + # @param code [String] The error code. + # + # @param message [String] The error message. end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 2412cad7..da5f330b 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -97,22 +97,38 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel required :status, String # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse} for more details. + # # A schema representing an evaluation run. # - # @param id [String] - # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] - # @param error [OpenAI::Models::Evals::EvalAPIError] - # @param eval_id [String] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param name [String] - # @param per_model_usage [Array] - # @param per_testing_criteria_results [Array] - # @param report_url [String] - # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] - # @param status [String] - # @param object [Symbol, :"eval.run"] + # @param id [String] Unique identifier for the evaluation run. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. + # + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] Information about the run's data source. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # + # @param eval_id [String] The identifier of the associated evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String] The model that is evaluated, if applicable. + # + # @param name [String] The name of the evaluation run. + # + # @param per_model_usage [Array] Usage statistics for each model during the evaluation run. + # + # @param per_testing_criteria_results [Array] Results per testing criteria applied during the evaluation run. + # + # @param report_url [String] The URL to the rendered evaluation run report on the UI dashboard. + # + # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] Counters summarizing the outcomes of the evaluation run. + # + # @param status [String] The status of the evaluation run. + # + # @param object [Symbol, :"eval.run"] The type of the object. Always "eval.run". # Information about the run's data source. # @@ -163,13 +179,21 @@ class Completions < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams } # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions} for more + # details. + # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] + # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] - # @param model [String] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # # @param sampling_params [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams] - # @param type [Symbol, :completions] + # + # @param type [Symbol, :completions] The type of run data source. Always `completions`. # A EvalResponsesSource object describing a run data source configuration. # @@ -203,8 +227,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -237,8 +262,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end class Responses < OpenAI::Internal::Type::BaseModel @@ -323,20 +349,43 @@ class Responses < OpenAI::Internal::Type::BaseModel optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses} + # for more details. + # # A EvalResponsesSource object describing a run data source configuration. # - # @param allow_parallel_tool_calls [Boolean, nil] - # @param created_after [Integer, nil] - # @param created_before [Integer, nil] - # @param has_tool_calls [Boolean, nil] - # @param instructions_search [String, nil] - # @param metadata [Object, nil] - # @param model [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param users [Array, nil] - # @param type [Symbol, :responses] + # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r + # ... + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # ... + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # ... + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # ... + # + # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec + # ... + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # ... + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # ... + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. end # @!method self.variants @@ -375,8 +424,14 @@ class Template < OpenAI::Internal::Type::BaseModel required :type, const: :template # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param type [Symbol, :template] The type of input messages. Always `template`. # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -409,8 +464,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel required :role, String # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end class EvalItem < OpenAI::Internal::Type::BaseModel @@ -437,15 +493,21 @@ class EvalItem < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -476,10 +538,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -534,8 +601,9 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants @@ -569,10 +637,13 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end @@ -618,12 +689,17 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel required :total_tokens, Integer # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) - # @param cached_tokens [Integer] - # @param completion_tokens [Integer] - # @param invocation_count [Integer] - # @param model_name [String] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens retrieved from cache. + # + # @param completion_tokens [Integer] The number of completion tokens generated. + # + # @param invocation_count [Integer] The number of invocations. + # + # @param model_name [String] The name of the model. + # + # @param prompt_tokens [Integer] The number of prompt tokens used. + # + # @param total_tokens [Integer] The total number of tokens used. end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -646,9 +722,11 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel required :testing_criteria, String # @!method initialize(failed:, passed:, testing_criteria:) - # @param failed [Integer] - # @param passed [Integer] - # @param testing_criteria [String] + # @param failed [Integer] Number of tests failed for this criteria. + # + # @param passed [Integer] Number of tests passed for this criteria. + # + # @param testing_criteria [String] A description of the testing criteria. end # @see OpenAI::Models::Evals::RunCancelResponse#result_counts @@ -680,10 +758,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @!method initialize(errored:, failed:, passed:, total:) # Counters summarizing the outcomes of the evaluation run. # - # @param errored [Integer] - # @param failed [Integer] - # @param passed [Integer] - # @param total [Integer] + # @param errored [Integer] Number of output items that resulted in an error. + # + # @param failed [Integer] Number of output items that failed to pass the evaluation. + # + # @param passed [Integer] Number of output items that passed the evaluation. + # + # @param total [Integer] Total number of executed output items. end end end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 6ee4266d..73ceb87e 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -32,9 +32,15 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(data_source:, metadata: nil, name: nil, request_options: {}) - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams} for more details. + # + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the run. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Details about the run's data source. @@ -84,12 +90,20 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} + # for more details. + # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] - # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + # @param source [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `completions`. + # # @param input_messages [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] - # @param model [String] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # # @param sampling_params [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] # A EvalResponsesSource object describing a run data source configuration. @@ -129,8 +143,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -163,8 +178,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end class Responses < OpenAI::Internal::Type::BaseModel @@ -249,20 +265,43 @@ class Responses < OpenAI::Internal::Type::BaseModel optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} + # for more details. + # # A EvalResponsesSource object describing a run data source configuration. # - # @param allow_parallel_tool_calls [Boolean, nil] - # @param created_after [Integer, nil] - # @param created_before [Integer, nil] - # @param has_tool_calls [Boolean, nil] - # @param instructions_search [String, nil] - # @param metadata [Object, nil] - # @param model [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param users [Array, nil] - # @param type [Symbol, :responses] + # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r + # ... + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # ... + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # ... + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # ... + # + # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec + # ... + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # ... + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # ... + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. end # @!method self.variants @@ -313,8 +352,14 @@ class Template < OpenAI::Internal::Type::BaseModel required :type, const: :template # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param type [Symbol, :template] The type of input messages. Always `template`. # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -347,8 +392,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel required :role, String # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end class EvalItem < OpenAI::Internal::Type::BaseModel @@ -375,15 +421,21 @@ class EvalItem < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] - # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] - # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -414,10 +466,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -472,8 +529,9 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants @@ -507,10 +565,13 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 22a5752b..2326f6f0 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -97,22 +97,38 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel required :status, String # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse} for more details. + # # A schema representing an evaluation run. # - # @param id [String] - # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] - # @param error [OpenAI::Models::Evals::EvalAPIError] - # @param eval_id [String] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param name [String] - # @param per_model_usage [Array] - # @param per_testing_criteria_results [Array] - # @param report_url [String] - # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] - # @param status [String] - # @param object [Symbol, :"eval.run"] + # @param id [String] Unique identifier for the evaluation run. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. + # + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] Information about the run's data source. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # + # @param eval_id [String] The identifier of the associated evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String] The model that is evaluated, if applicable. + # + # @param name [String] The name of the evaluation run. + # + # @param per_model_usage [Array] Usage statistics for each model during the evaluation run. + # + # @param per_testing_criteria_results [Array] Results per testing criteria applied during the evaluation run. + # + # @param report_url [String] The URL to the rendered evaluation run report on the UI dashboard. + # + # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] Counters summarizing the outcomes of the evaluation run. + # + # @param status [String] The status of the evaluation run. + # + # @param object [Symbol, :"eval.run"] The type of the object. Always "eval.run". # Information about the run's data source. # @@ -163,13 +179,21 @@ class Completions < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams } # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions} for more + # details. + # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] + # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] - # @param model [String] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # # @param sampling_params [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams] - # @param type [Symbol, :completions] + # + # @param type [Symbol, :completions] The type of run data source. Always `completions`. # A EvalResponsesSource object describing a run data source configuration. # @@ -203,8 +227,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -237,8 +262,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end class Responses < OpenAI::Internal::Type::BaseModel @@ -323,20 +349,43 @@ class Responses < OpenAI::Internal::Type::BaseModel optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses} + # for more details. + # # A EvalResponsesSource object describing a run data source configuration. # - # @param allow_parallel_tool_calls [Boolean, nil] - # @param created_after [Integer, nil] - # @param created_before [Integer, nil] - # @param has_tool_calls [Boolean, nil] - # @param instructions_search [String, nil] - # @param metadata [Object, nil] - # @param model [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param users [Array, nil] - # @param type [Symbol, :responses] + # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r + # ... + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # ... + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # ... + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # ... + # + # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec + # ... + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # ... + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # ... + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. end # @!method self.variants @@ -375,8 +424,14 @@ class Template < OpenAI::Internal::Type::BaseModel required :type, const: :template # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param type [Symbol, :template] The type of input messages. Always `template`. # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -409,8 +464,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel required :role, String # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end class EvalItem < OpenAI::Internal::Type::BaseModel @@ -437,15 +493,21 @@ class EvalItem < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -476,10 +538,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -534,8 +601,9 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants @@ -569,10 +637,13 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end @@ -618,12 +689,17 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel required :total_tokens, Integer # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) - # @param cached_tokens [Integer] - # @param completion_tokens [Integer] - # @param invocation_count [Integer] - # @param model_name [String] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens retrieved from cache. + # + # @param completion_tokens [Integer] The number of completion tokens generated. + # + # @param invocation_count [Integer] The number of invocations. + # + # @param model_name [String] The name of the model. + # + # @param prompt_tokens [Integer] The number of prompt tokens used. + # + # @param total_tokens [Integer] The total number of tokens used. end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -646,9 +722,11 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel required :testing_criteria, String # @!method initialize(failed:, passed:, testing_criteria:) - # @param failed [Integer] - # @param passed [Integer] - # @param testing_criteria [String] + # @param failed [Integer] Number of tests failed for this criteria. + # + # @param passed [Integer] Number of tests passed for this criteria. + # + # @param testing_criteria [String] A description of the testing criteria. end # @see OpenAI::Models::Evals::RunCreateResponse#result_counts @@ -680,10 +758,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @!method initialize(errored:, failed:, passed:, total:) # Counters summarizing the outcomes of the evaluation run. # - # @param errored [Integer] - # @param failed [Integer] - # @param passed [Integer] - # @param total [Integer] + # @param errored [Integer] Number of output items that resulted in an error. + # + # @param failed [Integer] Number of output items that failed to pass the evaluation. + # + # @param passed [Integer] Number of output items that passed the evaluation. + # + # @param total [Integer] Total number of executed output items. end end end diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index 66cd4270..d828e118 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -35,10 +35,19 @@ class RunListParams < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Evals::RunListParams::Status } # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] - # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListParams} for more details. + # + # @param after [String] Identifier for the last run from the previous pagination request. + # + # @param limit [Integer] Number of runs to retrieve. + # + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de + # ... + # + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 941d74e1..3e2e0deb 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -97,22 +97,38 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel required :status, String # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse} for more details. + # # A schema representing an evaluation run. # - # @param id [String] - # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] - # @param error [OpenAI::Models::Evals::EvalAPIError] - # @param eval_id [String] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param name [String] - # @param per_model_usage [Array] - # @param per_testing_criteria_results [Array] - # @param report_url [String] - # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] - # @param status [String] - # @param object [Symbol, :"eval.run"] + # @param id [String] Unique identifier for the evaluation run. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. + # + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] Information about the run's data source. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # + # @param eval_id [String] The identifier of the associated evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String] The model that is evaluated, if applicable. + # + # @param name [String] The name of the evaluation run. + # + # @param per_model_usage [Array] Usage statistics for each model during the evaluation run. + # + # @param per_testing_criteria_results [Array] Results per testing criteria applied during the evaluation run. + # + # @param report_url [String] The URL to the rendered evaluation run report on the UI dashboard. + # + # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] Counters summarizing the outcomes of the evaluation run. + # + # @param status [String] The status of the evaluation run. + # + # @param object [Symbol, :"eval.run"] The type of the object. Always "eval.run". # Information about the run's data source. # @@ -163,13 +179,21 @@ class Completions < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams } # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions} for more + # details. + # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] + # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] - # @param model [String] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # # @param sampling_params [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams] - # @param type [Symbol, :completions] + # + # @param type [Symbol, :completions] The type of run data source. Always `completions`. # A EvalResponsesSource object describing a run data source configuration. # @@ -203,8 +227,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -237,8 +262,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end class Responses < OpenAI::Internal::Type::BaseModel @@ -323,20 +349,43 @@ class Responses < OpenAI::Internal::Type::BaseModel optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses} + # for more details. + # # A EvalResponsesSource object describing a run data source configuration. # - # @param allow_parallel_tool_calls [Boolean, nil] - # @param created_after [Integer, nil] - # @param created_before [Integer, nil] - # @param has_tool_calls [Boolean, nil] - # @param instructions_search [String, nil] - # @param metadata [Object, nil] - # @param model [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param users [Array, nil] - # @param type [Symbol, :responses] + # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r + # ... + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # ... + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # ... + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # ... + # + # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec + # ... + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # ... + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # ... + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. end # @!method self.variants @@ -371,8 +420,14 @@ class Template < OpenAI::Internal::Type::BaseModel required :type, const: :template # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param type [Symbol, :template] The type of input messages. Always `template`. # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -405,8 +460,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel required :role, String # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end class EvalItem < OpenAI::Internal::Type::BaseModel @@ -433,15 +489,21 @@ class EvalItem < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -472,10 +534,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -530,8 +597,9 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants @@ -565,10 +633,13 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end @@ -614,12 +685,17 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel required :total_tokens, Integer # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) - # @param cached_tokens [Integer] - # @param completion_tokens [Integer] - # @param invocation_count [Integer] - # @param model_name [String] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens retrieved from cache. + # + # @param completion_tokens [Integer] The number of completion tokens generated. + # + # @param invocation_count [Integer] The number of invocations. + # + # @param model_name [String] The name of the model. + # + # @param prompt_tokens [Integer] The number of prompt tokens used. + # + # @param total_tokens [Integer] The total number of tokens used. end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -642,9 +718,11 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel required :testing_criteria, String # @!method initialize(failed:, passed:, testing_criteria:) - # @param failed [Integer] - # @param passed [Integer] - # @param testing_criteria [String] + # @param failed [Integer] Number of tests failed for this criteria. + # + # @param passed [Integer] Number of tests passed for this criteria. + # + # @param testing_criteria [String] A description of the testing criteria. end # @see OpenAI::Models::Evals::RunListResponse#result_counts @@ -676,10 +754,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @!method initialize(errored:, failed:, passed:, total:) # Counters summarizing the outcomes of the evaluation run. # - # @param errored [Integer] - # @param failed [Integer] - # @param passed [Integer] - # @param total [Integer] + # @param errored [Integer] Number of output items that resulted in an error. + # + # @param failed [Integer] Number of output items that failed to pass the evaluation. + # + # @param passed [Integer] Number of output items that passed the evaluation. + # + # @param total [Integer] Total number of executed output items. end end end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 55c35341..1118a6ba 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -97,22 +97,38 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel required :status, String # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse} for more details. + # # A schema representing an evaluation run. # - # @param id [String] - # @param created_at [Integer] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] - # @param error [OpenAI::Models::Evals::EvalAPIError] - # @param eval_id [String] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param name [String] - # @param per_model_usage [Array] - # @param per_testing_criteria_results [Array] - # @param report_url [String] - # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] - # @param status [String] - # @param object [Symbol, :"eval.run"] + # @param id [String] Unique identifier for the evaluation run. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. + # + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] Information about the run's data source. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # + # @param eval_id [String] The identifier of the associated evaluation. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String] The model that is evaluated, if applicable. + # + # @param name [String] The name of the evaluation run. + # + # @param per_model_usage [Array] Usage statistics for each model during the evaluation run. + # + # @param per_testing_criteria_results [Array] Results per testing criteria applied during the evaluation run. + # + # @param report_url [String] The URL to the rendered evaluation run report on the UI dashboard. + # + # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] Counters summarizing the outcomes of the evaluation run. + # + # @param status [String] The status of the evaluation run. + # + # @param object [Symbol, :"eval.run"] The type of the object. Always "eval.run". # Information about the run's data source. # @@ -164,13 +180,21 @@ class Completions < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams } # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions} for more + # details. + # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] + # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] - # @param model [String] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # # @param sampling_params [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams] - # @param type [Symbol, :completions] + # + # @param type [Symbol, :completions] The type of run data source. Always `completions`. # A EvalResponsesSource object describing a run data source configuration. # @@ -205,8 +229,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] - # @param type [Symbol, :file_content] + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -239,8 +264,9 @@ class FileID < OpenAI::Internal::Type::BaseModel required :type, const: :file_id # @!method initialize(id:, type: :file_id) - # @param id [String] - # @param type [Symbol, :file_id] + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. end class Responses < OpenAI::Internal::Type::BaseModel @@ -325,20 +351,43 @@ class Responses < OpenAI::Internal::Type::BaseModel optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses} + # for more details. + # # A EvalResponsesSource object describing a run data source configuration. # - # @param allow_parallel_tool_calls [Boolean, nil] - # @param created_after [Integer, nil] - # @param created_before [Integer, nil] - # @param has_tool_calls [Boolean, nil] - # @param instructions_search [String, nil] - # @param metadata [Object, nil] - # @param model [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param users [Array, nil] - # @param type [Symbol, :responses] + # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r + # ... + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # ... + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # ... + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # ... + # + # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec + # ... + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # ... + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # ... + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. end # @!method self.variants @@ -377,8 +426,14 @@ class Template < OpenAI::Internal::Type::BaseModel required :type, const: :template # @!method initialize(template:, type: :template) - # @param template [Array] - # @param type [Symbol, :template] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # ... + # + # @param type [Symbol, :template] The type of input messages. Always `template`. # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -411,8 +466,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel required :role, String # @!method initialize(content:, role:) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end class EvalItem < OpenAI::Internal::Type::BaseModel @@ -439,15 +495,21 @@ class EvalItem < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} + # for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # + # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... # Text inputs to the model - can contain template strings. # @@ -478,10 +540,15 @@ class OutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # # A text output from the model. # - # @param text [String] - # @param type [Symbol, :output_text] + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... end # @!method self.variants @@ -536,8 +603,9 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] - # @param type [Symbol, :item_reference] + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants @@ -571,10 +639,13 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end @@ -620,12 +691,17 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel required :total_tokens, Integer # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) - # @param cached_tokens [Integer] - # @param completion_tokens [Integer] - # @param invocation_count [Integer] - # @param model_name [String] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens retrieved from cache. + # + # @param completion_tokens [Integer] The number of completion tokens generated. + # + # @param invocation_count [Integer] The number of invocations. + # + # @param model_name [String] The name of the model. + # + # @param prompt_tokens [Integer] The number of prompt tokens used. + # + # @param total_tokens [Integer] The total number of tokens used. end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -648,9 +724,11 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel required :testing_criteria, String # @!method initialize(failed:, passed:, testing_criteria:) - # @param failed [Integer] - # @param passed [Integer] - # @param testing_criteria [String] + # @param failed [Integer] Number of tests failed for this criteria. + # + # @param passed [Integer] Number of tests passed for this criteria. + # + # @param testing_criteria [String] A description of the testing criteria. end # @see OpenAI::Models::Evals::RunRetrieveResponse#result_counts @@ -682,10 +760,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @!method initialize(errored:, failed:, passed:, total:) # Counters summarizing the outcomes of the evaluation run. # - # @param errored [Integer] - # @param failed [Integer] - # @param passed [Integer] - # @param total [Integer] + # @param errored [Integer] Number of output items that resulted in an error. + # + # @param failed [Integer] Number of output items that failed to pass the evaluation. + # + # @param passed [Integer] Number of output items that passed the evaluation. + # + # @param total [Integer] Total number of executed output items. end end end diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index 301d2acd..6eadd7fb 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -41,11 +41,20 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Status } # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::Runs::OutputItemListParams} for more details. + # # @param eval_id [String] - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] - # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # + # @param after [String] Identifier for the last output item from the previous pagination request. + # + # @param limit [Integer] Number of output items to retrieve. + # + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc + # ... + # + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for output items by timestamp. Use `asc` for ascending order or diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index fc0d6e75..e5c369c9 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -70,16 +70,25 @@ class OutputItemListResponse < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, datasource_item:, datasource_item_id:, eval_id:, results:, run_id:, sample:, status:, object: :"eval.run.output_item") # A schema representing an evaluation run output item. # - # @param id [String] - # @param created_at [Integer] - # @param datasource_item [Hash{Symbol=>Object}] - # @param datasource_item_id [Integer] - # @param eval_id [String] - # @param results [ArrayObject}>] - # @param run_id [String] - # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] - # @param status [String] - # @param object [Symbol, :"eval.run.output_item"] + # @param id [String] Unique identifier for the evaluation run output item. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. + # + # @param datasource_item [Hash{Symbol=>Object}] Details of the input data source item. + # + # @param datasource_item_id [Integer] The identifier for the data source item. + # + # @param eval_id [String] The identifier of the evaluation group. + # + # @param results [ArrayObject}>] A list of results from the evaluation run. + # + # @param run_id [String] The identifier of the evaluation run associated with this output item. + # + # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] A sample containing the input and output of the evaluation run. + # + # @param status [String] The status of the evaluation run. + # + # @param object [Symbol, :"eval.run.output_item"] The type of the object. Always "eval.run.output_item". # @see OpenAI::Models::Evals::Runs::OutputItemListResponse#sample class Sample < OpenAI::Internal::Type::BaseModel @@ -146,18 +155,30 @@ class Sample < OpenAI::Internal::Type::BaseModel required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage } # @!method initialize(error:, finish_reason:, input:, max_completion_tokens:, model:, output:, seed:, temperature:, top_p:, usage:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample} for more details. + # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Models::Evals::EvalAPIError] - # @param finish_reason [String] - # @param input [Array] - # @param max_completion_tokens [Integer] - # @param model [String] - # @param output [Array] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] - # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # + # @param finish_reason [String] The reason why the sample generation was finished. + # + # @param input [Array] An array of input messages. + # + # @param max_completion_tokens [Integer] The maximum number of tokens allowed for completion. + # + # @param model [String] The model used for generating the sample. + # + # @param output [Array] An array of output messages. + # + # @param seed [Integer] The seed used for generating the sample. + # + # @param temperature [Float] The sampling temperature used. + # + # @param top_p [Float] The top_p value used for sampling. + # + # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] Token usage details for the sample. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -175,8 +196,9 @@ class Input < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:) # An input message. # - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message sender (e.g., system, user, developer). end class Output < OpenAI::Internal::Type::BaseModel @@ -193,8 +215,9 @@ class Output < OpenAI::Internal::Type::BaseModel optional :role, String # @!method initialize(content: nil, role: nil) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end # @see OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample#usage @@ -226,10 +249,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!method initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:) # Token usage details for the sample. # - # @param cached_tokens [Integer] - # @param completion_tokens [Integer] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens retrieved from cache. + # + # @param completion_tokens [Integer] The number of completion tokens generated. + # + # @param prompt_tokens [Integer] The number of prompt tokens used. + # + # @param total_tokens [Integer] The total number of tokens used. end end end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index eec2ea1a..70b4c3ab 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -70,16 +70,25 @@ class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, datasource_item:, datasource_item_id:, eval_id:, results:, run_id:, sample:, status:, object: :"eval.run.output_item") # A schema representing an evaluation run output item. # - # @param id [String] - # @param created_at [Integer] - # @param datasource_item [Hash{Symbol=>Object}] - # @param datasource_item_id [Integer] - # @param eval_id [String] - # @param results [ArrayObject}>] - # @param run_id [String] - # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] - # @param status [String] - # @param object [Symbol, :"eval.run.output_item"] + # @param id [String] Unique identifier for the evaluation run output item. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. + # + # @param datasource_item [Hash{Symbol=>Object}] Details of the input data source item. + # + # @param datasource_item_id [Integer] The identifier for the data source item. + # + # @param eval_id [String] The identifier of the evaluation group. + # + # @param results [ArrayObject}>] A list of results from the evaluation run. + # + # @param run_id [String] The identifier of the evaluation run associated with this output item. + # + # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] A sample containing the input and output of the evaluation run. + # + # @param status [String] The status of the evaluation run. + # + # @param object [Symbol, :"eval.run.output_item"] The type of the object. Always "eval.run.output_item". # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse#sample class Sample < OpenAI::Internal::Type::BaseModel @@ -146,18 +155,31 @@ class Sample < OpenAI::Internal::Type::BaseModel required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage } # @!method initialize(error:, finish_reason:, input:, max_completion_tokens:, model:, output:, seed:, temperature:, top_p:, usage:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample} for more + # details. + # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Models::Evals::EvalAPIError] - # @param finish_reason [String] - # @param input [Array] - # @param max_completion_tokens [Integer] - # @param model [String] - # @param output [Array] - # @param seed [Integer] - # @param temperature [Float] - # @param top_p [Float] - # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # + # @param finish_reason [String] The reason why the sample generation was finished. + # + # @param input [Array] An array of input messages. + # + # @param max_completion_tokens [Integer] The maximum number of tokens allowed for completion. + # + # @param model [String] The model used for generating the sample. + # + # @param output [Array] An array of output messages. + # + # @param seed [Integer] The seed used for generating the sample. + # + # @param temperature [Float] The sampling temperature used. + # + # @param top_p [Float] The top_p value used for sampling. + # + # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] Token usage details for the sample. class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -175,8 +197,9 @@ class Input < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:) # An input message. # - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message sender (e.g., system, user, developer). end class Output < OpenAI::Internal::Type::BaseModel @@ -193,8 +216,9 @@ class Output < OpenAI::Internal::Type::BaseModel optional :role, String # @!method initialize(content: nil, role: nil) - # @param content [String] - # @param role [String] + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample#usage @@ -226,10 +250,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!method initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:) # Token usage details for the sample. # - # @param cached_tokens [Integer] - # @param completion_tokens [Integer] - # @param prompt_tokens [Integer] - # @param total_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens retrieved from cache. + # + # @param completion_tokens [Integer] The number of completion tokens generated. + # + # @param prompt_tokens [Integer] The number of prompt tokens used. + # + # @param total_tokens [Integer] The total number of tokens used. end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 28d5f936..94e5f46f 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -23,8 +23,14 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel required :purpose, enum: -> { OpenAI::Models::FilePurpose } # @!method initialize(file:, purpose:, request_options: {}) - # @param file [Pathname, StringIO] - # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileCreateParams} for more details. + # + # @param file [Pathname, StringIO] The File object (not file name) to be uploaded. ... + # + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 76de1996..423236ed 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -37,10 +37,20 @@ class FileListParams < OpenAI::Internal::Type::BaseModel optional :purpose, String # @!method initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::FileListParams::Order] - # @param purpose [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # + # @param purpose [String] Only return files with the given purpose. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index b6825ae1..942e2361 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -67,17 +67,31 @@ class FileObject < OpenAI::Internal::Type::BaseModel optional :status_details, String # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileObject} for more details. + # # The `File` object represents a document that has been uploaded to OpenAI. # - # @param id [String] - # @param bytes [Integer] - # @param created_at [Integer] - # @param filename [String] - # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] - # @param status [Symbol, OpenAI::Models::FileObject::Status] - # @param expires_at [Integer] - # @param status_details [String] - # @param object [Symbol, :file] + # @param id [String] The file identifier, which can be referenced in the API endpoints. + # + # @param bytes [Integer] The size of the file, in bytes. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the file was created. + # + # @param filename [String] The name of the file. + # + # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants + # ... + # + # @param status [Symbol, OpenAI::Models::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro + # ... + # + # @param expires_at [Integer] The Unix timestamp (in seconds) for when the file will expire. + # + # @param status_details [String] Deprecated. For details on why a fine-tuning training file failed validation, se + # ... + # + # @param object [Symbol, :file] The object type, which is always `file`. # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb index 8a333418..448911d4 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb @@ -16,7 +16,8 @@ class PermissionCreateParams < OpenAI::Internal::Type::BaseModel required :project_ids, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(project_ids:, request_options: {}) - # @param project_ids [Array] + # @param project_ids [Array] The project identifiers to grant access to. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb index ed9e10fe..efe00e34 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb @@ -34,10 +34,13 @@ class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel # The `checkpoint.permission` object represents a permission for a fine-tuned # model checkpoint. # - # @param id [String] - # @param created_at [Integer] - # @param project_id [String] - # @param object [Symbol, :"checkpoint.permission"] + # @param id [String] The permission identifier, which can be referenced in the API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the permission was created. + # + # @param project_id [String] The project identifier that the permission is for. + # + # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb index 1fce739b..483d4664 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb @@ -25,9 +25,11 @@ class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel required :object, const: :"checkpoint.permission" # @!method initialize(id:, deleted:, object: :"checkpoint.permission") - # @param id [String] - # @param deleted [Boolean] - # @param object [Symbol, :"checkpoint.permission"] + # @param id [String] The ID of the fine-tuned model checkpoint permission that was deleted. + # + # @param deleted [Boolean] Whether the fine-tuned model checkpoint permission was successfully deleted. + # + # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index 6c272af8..cddf06b9 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -34,10 +34,14 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel optional :project_id, String # @!method initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] - # @param project_id [String] + # @param after [String] Identifier for the last permission ID from the previous pagination request. + # + # @param limit [Integer] Number of permissions to retrieve. + # + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # + # @param project_id [String] The ID of the project to get permissions for. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The order in which to retrieve permissions. diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb index 1de51fee..6ffbdf4d 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb @@ -68,10 +68,13 @@ class Data < OpenAI::Internal::Type::BaseModel # The `checkpoint.permission` object represents a permission for a fine-tuned # model checkpoint. # - # @param id [String] - # @param created_at [Integer] - # @param project_id [String] - # @param object [Symbol, :"checkpoint.permission"] + # @param id [String] The permission identifier, which can be referenced in the API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the permission was created. + # + # @param project_id [String] The project identifier that the permission is for. + # + # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index a5e3d2a9..36cc8d69 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -139,28 +139,59 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel optional :method_, -> { OpenAI::Models::FineTuning::FineTuningJob::Method }, api_name: :method # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJob} for more details. + # # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. # - # @param id [String] - # @param created_at [Integer] - # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] - # @param fine_tuned_model [String, nil] - # @param finished_at [Integer, nil] - # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] - # @param model [String] - # @param organization_id [String] - # @param result_files [Array] - # @param seed [Integer] - # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] - # @param trained_tokens [Integer, nil] - # @param training_file [String] - # @param validation_file [String, nil] - # @param estimated_finish [Integer, nil] - # @param integrations [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] - # @param object [Symbol, :"fine_tuning.job"] + # @param id [String] The object identifier, which can be referenced in the API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. + # + # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t + # ... + # + # @param fine_tuned_model [String, nil] The name of the fine-tuned model that is being created. The value will be null i + # ... + # + # @param finished_at [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job was finished. The v + # ... + # + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return + # ... + # + # @param model [String] The base model that is being fine-tuned. + # + # @param organization_id [String] The organization that owns the fine-tuning job. + # + # @param result_files [Array] The compiled results file ID(s) for the fine-tuning job. You can retrieve the re + # ... + # + # @param seed [Integer] The seed used for the fine-tuning job. + # + # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files + # ... + # + # @param trained_tokens [Integer, nil] The total number of billable tokens processed by this fine-tuning job. The value + # ... + # + # @param training_file [String] The file ID used for training. You can retrieve the training data with the [File + # ... + # + # @param validation_file [String, nil] The file ID used for validation. You can retrieve the validation results with th + # ... + # + # @param estimated_finish [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job is estimated to fin + # ... + # + # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] The method used for fine-tuning. + # + # @param object [Symbol, :"fine_tuning.job"] The object type, which is always "fine_tuning.job". # @see OpenAI::Models::FineTuning::FineTuningJob#error class Error < OpenAI::Internal::Type::BaseModel @@ -184,12 +215,18 @@ class Error < OpenAI::Internal::Type::BaseModel required :param, String, nil?: true # @!method initialize(code:, message:, param:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJob::Error} for more details. + # # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. # - # @param code [String] - # @param message [String] - # @param param [String, nil] + # @param code [String] A machine-readable error code. + # + # @param message [String] A human-readable error message. + # + # @param param [String, nil] The parameter that was invalid, usually `training_file` or `validation_file`. Th + # ... end # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters @@ -217,12 +254,20 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters} for more details. + # # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # - # @param batch_size [Symbol, :auto, Integer] - # @param learning_rate_multiplier [Symbol, :auto, Float] - # @param n_epochs [Symbol, :auto, Integer] + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # ... + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # ... + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle + # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -311,9 +356,11 @@ class Method < OpenAI::Internal::Type::BaseModel # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # - # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] - # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] - # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] + # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] Configuration for the DPO fine-tuning method. + # + # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] Configuration for the supervised fine-tuning method. + # + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised` or `dpo`. # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel @@ -326,7 +373,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -363,12 +410,23 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters} for + # more details. + # # The hyperparameters used for the fine-tuning job. # - # @param batch_size [Symbol, :auto, Integer] - # @param beta [Symbol, :auto, Float] - # @param learning_rate_multiplier [Symbol, :auto, Float] - # @param n_epochs [Symbol, :auto, Integer] + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # ... + # + # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight + # ... + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # ... + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -444,7 +502,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -473,11 +531,20 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters} + # for more details. + # # The hyperparameters used for the fine-tuning job. # - # @param batch_size [Symbol, :auto, Integer] - # @param learning_rate_multiplier [Symbol, :auto, Float] - # @param n_epochs [Symbol, :auto, Integer] + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # ... + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # ... + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index b5fb144c..5d1a7f6b 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -50,13 +50,19 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") # Fine-tuning job event object # - # @param id [String] - # @param created_at [Integer] - # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] - # @param message [String] - # @param data [Object] - # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] - # @param object [Symbol, :"fine_tuning.job.event"] + # @param id [String] The object identifier. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. + # + # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] The log level of the event. + # + # @param message [String] The message of the event. + # + # @param data [Object] The data associated with the event. + # + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] The type of event. + # + # @param object [Symbol, :"fine_tuning.job.event"] The object type, which is always "fine_tuning.job.event". # The log level of the event. # diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 98c9eaca..c32fef0e 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -34,15 +34,24 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel optional :tags, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(project:, entity: nil, name: nil, tags: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegration} for more details. + # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @param project [String] - # @param entity [String, nil] - # @param name [String, nil] - # @param tags [Array] + # @param project [String] The name of the project that the new run will be created under. ... + # + # @param entity [String, nil] The entity to use for the run. This allows you to set the team or username of th + # ... + # + # @param name [String, nil] A display name to set for the run. If not set, we will use the Job ID as the nam + # ... + # + # @param tags [Array] A list of tags to be attached to the newly created run. These tags are passed th + # ... end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 8d9da11d..147302a1 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -20,8 +20,14 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel required :wandb, -> { OpenAI::Models::FineTuning::FineTuningJobWandbIntegration } # @!method initialize(wandb:, type: :wandb) - # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] - # @param type [Symbol, :wandb] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject} for more + # details. + # + # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie + # ... + # + # @param type [Symbol, :wandb] The type of the integration being enabled for the fine-tuning job end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 2078e4bd..4d99beea 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -107,15 +107,29 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel optional :validation_file, String, nil?: true # @!method initialize(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) - # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] - # @param training_file [String] - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] - # @param integrations [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] - # @param seed [Integer, nil] - # @param suffix [String, nil] - # @param validation_file [String, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams} for more details. + # + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the ... + # + # @param training_file [String] The ID of an uploaded file that contains training data. ... + # + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. ... + # + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # + # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j + # ... + # + # @param suffix [String, nil] A string of up to 64 characters that will be added to your fine-tuned model name + # ... + # + # @param validation_file [String, nil] The ID of an uploaded file that contains validation data. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The name of the model to fine-tune. You can select one of the @@ -172,12 +186,20 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters} for more details. + # # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # - # @param batch_size [Symbol, :auto, Integer] - # @param learning_rate_multiplier [Symbol, :auto, Float] - # @param n_epochs [Symbol, :auto, Integer] + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # ... + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # ... + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle + # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -243,8 +265,14 @@ class Integration < OpenAI::Internal::Type::BaseModel required :wandb, -> { OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb } # @!method initialize(wandb:, type: :wandb) - # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] - # @param type [Symbol, :wandb] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams::Integration} for more details. + # + # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie + # ... + # + # @param type [Symbol, :wandb] The type of integration to enable. Currently, only "wandb" (Weights and Biases) + # ... # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::Internal::Type::BaseModel @@ -278,15 +306,25 @@ class Wandb < OpenAI::Internal::Type::BaseModel optional :tags, OpenAI::Internal::Type::ArrayOf[String] # @!method initialize(project:, entity: nil, name: nil, tags: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb} for more + # details. + # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @param project [String] - # @param entity [String, nil] - # @param name [String, nil] - # @param tags [Array] + # @param project [String] The name of the project that the new run will be created under. ... + # + # @param entity [String, nil] The entity to use for the run. This allows you to set the team or username of th + # ... + # + # @param name [String, nil] A display name to set for the run. If not set, we will use the Job ID as the nam + # ... + # + # @param tags [Array] A list of tags to be attached to the newly created run. These tags are passed th + # ... end end @@ -312,9 +350,11 @@ class Method < OpenAI::Internal::Type::BaseModel # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # - # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] - # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] - # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] + # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] Configuration for the DPO fine-tuning method. + # + # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] Configuration for the supervised fine-tuning method. + # + # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised` or `dpo`. # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel @@ -328,7 +368,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -365,12 +405,23 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters} for + # more details. + # # The hyperparameters used for the fine-tuning job. # - # @param batch_size [Symbol, :auto, Integer] - # @param beta [Symbol, :auto, Float] - # @param learning_rate_multiplier [Symbol, :auto, Float] - # @param n_epochs [Symbol, :auto, Integer] + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # ... + # + # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight + # ... + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # ... + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -446,7 +497,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -475,11 +526,20 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters} + # for more details. + # # The hyperparameters used for the fine-tuning job. # - # @param batch_size [Symbol, :auto, Integer] - # @param learning_rate_multiplier [Symbol, :auto, Float] - # @param n_epochs [Symbol, :auto, Integer] + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # ... + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # ... + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index b745c87b..1e911e0f 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -21,8 +21,10 @@ class JobListEventsParams < OpenAI::Internal::Type::BaseModel optional :limit, Integer # @!method initialize(after: nil, limit: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] + # @param after [String] Identifier for the last event from the previous pagination request. + # + # @param limit [Integer] Number of events to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index ddd836ef..da7349d7 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -28,9 +28,16 @@ class JobListParams < OpenAI::Internal::Type::BaseModel optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true # @!method initialize(after: nil, limit: nil, metadata: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] - # @param metadata [Hash{Symbol=>String}, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobListParams} for more details. + # + # @param after [String] Identifier for the last job from the previous pagination request. + # + # @param limit [Integer] Number of fine-tuning jobs to retrieve. + # + # @param metadata [Hash{Symbol=>String}, nil] Optional metadata filter. To filter, use the syntax `metadata[k]=v`. Alternative + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index afa0afa4..024df578 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -22,8 +22,10 @@ class CheckpointListParams < OpenAI::Internal::Type::BaseModel optional :limit, Integer # @!method initialize(after: nil, limit: nil, request_options: {}) - # @param after [String] - # @param limit [Integer] + # @param after [String] Identifier for the last checkpoint ID from the previous pagination request. + # + # @param limit [Integer] Number of checkpoints to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 05325975..230c5e4d 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -52,13 +52,19 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a # fine-tuning job that is ready to use. # - # @param id [String] - # @param created_at [Integer] - # @param fine_tuned_model_checkpoint [String] - # @param fine_tuning_job_id [String] - # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] - # @param step_number [Integer] - # @param object [Symbol, :"fine_tuning.job.checkpoint"] + # @param id [String] The checkpoint identifier, which can be referenced in the API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the checkpoint was created. + # + # @param fine_tuned_model_checkpoint [String] The name of the fine-tuned checkpoint model that is created. + # + # @param fine_tuning_job_id [String] The name of the fine-tuning job that this checkpoint was created from. + # + # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. + # + # @param step_number [Integer] The step number that the checkpoint was created at. + # + # @param object [Symbol, :"fine_tuning.job.checkpoint"] The object type, which is always "fine_tuning.job.checkpoint". # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index ac6820db..26ca81c0 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -40,10 +40,20 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!method initialize(name:, description: nil, parameters: nil, strict: nil) - # @param name [String] - # @param description [String] - # @param parameters [Hash{Symbol=>Object}] - # @param strict [Boolean, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FunctionDefinition} for more details. + # + # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc + # ... + # + # @param description [String] A description of what the function does, used by the model to choose when and ho + # ... + # + # @param parameters [Hash{Symbol=>Object}] The parameters the functions accepts, described as a JSON Schema object. See the + # ... + # + # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the function call. If + # ... end end end diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index 52ace3e6..b392c0c4 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -26,11 +26,18 @@ class Image < OpenAI::Internal::Type::BaseModel optional :url, String # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) + # Some parameter documentations has been truncated, see {OpenAI::Models::Image} + # for more details. + # # Represents the content or the URL of an image generated by the OpenAI API. # - # @param b64_json [String] - # @param revised_prompt [String] - # @param url [String] + # @param b64_json [String] The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, + # ... + # + # @param revised_prompt [String] For `dall-e-3` only, the revised prompt that was used to generate the image. + # + # @param url [String] When using `dall-e-2` or `dall-e-3`, the URL of the generated image if `response + # ... end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 4a665cb8..d3b45a32 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -53,12 +53,26 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) - # @param image [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # @param n [Integer, nil] - # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] - # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] - # @param user [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageCreateVariationParams} for more details. + # + # @param image [Pathname, StringIO] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # ... + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time + # ... + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. + # + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # ... + # + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. Only `dall-e-2` is supported at this diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 7abf3372..45591d12 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -80,15 +80,35 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) - # @param image [Pathname, StringIO, Array] - # @param prompt [String] - # @param mask [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # @param n [Integer, nil] - # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] - # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] - # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] - # @param user [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageEditParams} for more details. + # + # @param image [Pathname, StringIO, Array] The image(s) to edit. Must be a supported image file or an array of images. For + # ... + # + # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character + # ... + # + # @param mask [Pathname, StringIO] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # ... + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # ... + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. + # + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # ... + # + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # ... + # + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The image(s) to edit. Must be a supported image file or an array of images. For diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 56986585..0c7fd7f8 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -112,18 +112,43 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) - # @param prompt [String] - # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] - # @param n [Integer, nil] - # @param output_compression [Integer, nil] - # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] - # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] - # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] - # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] - # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] - # @param user [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageGenerateParams} for more details. + # + # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte + # ... + # + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # ... + # + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # ... + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # ... + # + # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only + # ... + # + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # ... + # + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. ... + # + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # ... + # + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # ... + # + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Allows to set transparency for the background of the generated image(s). This diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index 90c77f01..aa3ede9f 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -23,11 +23,17 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel optional :usage, -> { OpenAI::Models::ImagesResponse::Usage } # @!method initialize(created:, data: nil, usage: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImagesResponse} for more details. + # # The response from the image generation endpoint. # - # @param created [Integer] - # @param data [Array] - # @param usage [OpenAI::Models::ImagesResponse::Usage] + # @param created [Integer] The Unix timestamp (in seconds) of when the image was created. + # + # @param data [Array] The list of generated images. + # + # @param usage [OpenAI::Models::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. + # ... # @see OpenAI::Models::ImagesResponse#usage class Usage < OpenAI::Internal::Type::BaseModel @@ -58,10 +64,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:) # For `gpt-image-1` only, the token usage information for the image generation. # - # @param input_tokens [Integer] - # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] - # @param output_tokens [Integer] - # @param total_tokens [Integer] + # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. + # + # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. + # + # @param output_tokens [Integer] The number of image tokens in the output image. + # + # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. # @see OpenAI::Models::ImagesResponse::Usage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel @@ -80,8 +89,9 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(image_tokens:, text_tokens:) # The input tokens detailed information for the image generation. # - # @param image_tokens [Integer] - # @param text_tokens [Integer] + # @param image_tokens [Integer] The number of image tokens in the input prompt. + # + # @param text_tokens [Integer] The number of text tokens in the input prompt. end end end diff --git a/lib/openai/models/model.rb b/lib/openai/models/model.rb index e5893b7c..06721bf5 100644 --- a/lib/openai/models/model.rb +++ b/lib/openai/models/model.rb @@ -31,10 +31,13 @@ class Model < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created:, owned_by:, object: :model) # Describes an OpenAI model offering that can be used with the API. # - # @param id [String] - # @param created [Integer] - # @param owned_by [String] - # @param object [Symbol, :model] + # @param id [String] The model identifier, which can be referenced in the API endpoints. + # + # @param created [Integer] The Unix timestamp (in seconds) when the model was created. + # + # @param owned_by [String] The organization that owns the model. + # + # @param object [Symbol, :model] The object type, which is always "model". end end end diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 834ced17..e53c66fe 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -28,10 +28,17 @@ class Moderation < OpenAI::Internal::Type::BaseModel required :flagged, OpenAI::Internal::Type::Boolean # @!method initialize(categories:, category_applied_input_types:, category_scores:, flagged:) - # @param categories [OpenAI::Models::Moderation::Categories] - # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] - # @param category_scores [OpenAI::Models::Moderation::CategoryScores] - # @param flagged [Boolean] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Moderation} for more details. + # + # @param categories [OpenAI::Models::Moderation::Categories] A list of the categories, and whether they are flagged or not. + # + # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. + # ... + # + # @param category_scores [OpenAI::Models::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. + # + # @param flagged [Boolean] Whether any of the below categories are flagged. # @see OpenAI::Models::Moderation#categories class Categories < OpenAI::Internal::Type::BaseModel @@ -131,21 +138,46 @@ class Categories < OpenAI::Internal::Type::BaseModel required :violence_graphic, OpenAI::Internal::Type::Boolean, api_name: :"violence/graphic" # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Moderation::Categories} for more details. + # # A list of the categories, and whether they are flagged or not. # - # @param harassment [Boolean] - # @param harassment_threatening [Boolean] - # @param hate [Boolean] - # @param hate_threatening [Boolean] - # @param illicit [Boolean, nil] - # @param illicit_violent [Boolean, nil] - # @param self_harm [Boolean] - # @param self_harm_instructions [Boolean] - # @param self_harm_intent [Boolean] - # @param sexual [Boolean] - # @param sexual_minors [Boolean] - # @param violence [Boolean] - # @param violence_graphic [Boolean] + # @param harassment [Boolean] Content that expresses, incites, or promotes harassing language towards any targ + # ... + # + # @param harassment_threatening [Boolean] Harassment content that also includes violence or serious harm towards any targe + # ... + # + # @param hate [Boolean] Content that expresses, incites, or promotes hate based on race, gender, ethnici + # ... + # + # @param hate_threatening [Boolean] Hateful content that also includes violence or serious harm towards the targeted + # ... + # + # @param illicit [Boolean, nil] Content that includes instructions or advice that facilitate the planning or exe + # ... + # + # @param illicit_violent [Boolean, nil] Content that includes instructions or advice that facilitate the planning or exe + # ... + # + # @param self_harm [Boolean] Content that promotes, encourages, or depicts acts of self-harm, such as suicide + # ... + # + # @param self_harm_instructions [Boolean] Content that encourages performing acts of self-harm, such as suicide, cutting, + # ... + # + # @param self_harm_intent [Boolean] Content where the speaker expresses that they are engaging or intend to engage i + # ... + # + # @param sexual [Boolean] Content meant to arouse sexual excitement, such as the description of sexual act + # ... + # + # @param sexual_minors [Boolean] Sexual content that includes an individual who is under 18 years old. + # + # @param violence [Boolean] Content that depicts death, violence, or physical injury. + # + # @param violence_graphic [Boolean] Content that depicts death, violence, or physical injury in graphic detail. end # @see OpenAI::Models::Moderation#category_applied_input_types @@ -252,19 +284,31 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # A list of the categories along with the input type(s) that the score applies to. # - # @param harassment [Array] - # @param harassment_threatening [Array] - # @param hate [Array] - # @param hate_threatening [Array] - # @param illicit [Array] - # @param illicit_violent [Array] - # @param self_harm [Array] - # @param self_harm_instructions [Array] - # @param self_harm_intent [Array] - # @param sexual [Array] - # @param sexual_minors [Array] - # @param violence [Array] - # @param violence_graphic [Array] + # @param harassment [Array] The applied input type(s) for the category 'harassment'. + # + # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. + # + # @param hate [Array] The applied input type(s) for the category 'hate'. + # + # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. + # + # @param illicit [Array] The applied input type(s) for the category 'illicit'. + # + # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. + # + # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. + # + # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. + # + # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. + # + # @param sexual [Array] The applied input type(s) for the category 'sexual'. + # + # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. + # + # @param violence [Array] The applied input type(s) for the category 'violence'. + # + # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. module Harassment extend OpenAI::Internal::Type::Enum @@ -473,19 +517,31 @@ class CategoryScores < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # A list of the categories along with their scores as predicted by model. # - # @param harassment [Float] - # @param harassment_threatening [Float] - # @param hate [Float] - # @param hate_threatening [Float] - # @param illicit [Float] - # @param illicit_violent [Float] - # @param self_harm [Float] - # @param self_harm_instructions [Float] - # @param self_harm_intent [Float] - # @param sexual [Float] - # @param sexual_minors [Float] - # @param violence [Float] - # @param violence_graphic [Float] + # @param harassment [Float] The score for the category 'harassment'. + # + # @param harassment_threatening [Float] The score for the category 'harassment/threatening'. + # + # @param hate [Float] The score for the category 'hate'. + # + # @param hate_threatening [Float] The score for the category 'hate/threatening'. + # + # @param illicit [Float] The score for the category 'illicit'. + # + # @param illicit_violent [Float] The score for the category 'illicit/violent'. + # + # @param self_harm [Float] The score for the category 'self-harm'. + # + # @param self_harm_instructions [Float] The score for the category 'self-harm/instructions'. + # + # @param self_harm_intent [Float] The score for the category 'self-harm/intent'. + # + # @param sexual [Float] The score for the category 'sexual'. + # + # @param sexual_minors [Float] The score for the category 'sexual/minors'. + # + # @param violence [Float] The score for the category 'violence'. + # + # @param violence_graphic [Float] The score for the category 'violence/graphic'. end end end diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 85c8aa74..b6231c78 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -24,8 +24,14 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel optional :model, union: -> { OpenAI::Models::ModerationCreateParams::Model } # @!method initialize(input:, model: nil, request_options: {}) - # @param input [String, Array, Array] - # @param model [String, Symbol, OpenAI::Models::ModerationModel] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ModerationCreateParams} for more details. + # + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or + # ... + # + # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Input (or inputs) to classify. Can be a single string, an array of strings, or diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 45575319..18ff1405 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -25,9 +25,11 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, model:, results:) # Represents if a given text input is potentially harmful. # - # @param id [String] - # @param model [String] - # @param results [Array] + # @param id [String] The unique identifier for the moderation request. + # + # @param model [String] The model used to generate the moderation results. + # + # @param results [Array] A list of moderation objects. end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index f9fbd274..ac2342f8 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -18,8 +18,9 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # An object describing an image to classify. # - # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] - # @param type [Symbol, :image_url] + # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. + # + # @param type [Symbol, :image_url] Always `image_url`. # @see OpenAI::Models::ModerationImageURLInput#image_url class ImageURL < OpenAI::Internal::Type::BaseModel @@ -32,7 +33,7 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @!method initialize(url:) # Contains either an image URL or a data URL for a base64 encoded image. # - # @param url [String] + # @param url [String] Either a URL of the image or the base64 encoded image data. end end end diff --git a/lib/openai/models/moderation_text_input.rb b/lib/openai/models/moderation_text_input.rb index 2feaf23d..087178f8 100644 --- a/lib/openai/models/moderation_text_input.rb +++ b/lib/openai/models/moderation_text_input.rb @@ -18,8 +18,9 @@ class ModerationTextInput < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :text) # An object describing text to classify. # - # @param text [String] - # @param type [Symbol, :text] + # @param text [String] A string of text to classify. + # + # @param type [Symbol, :text] Always `text`. end end end diff --git a/lib/openai/models/other_file_chunking_strategy_object.rb b/lib/openai/models/other_file_chunking_strategy_object.rb index 862ae35e..72fc9de8 100644 --- a/lib/openai/models/other_file_chunking_strategy_object.rb +++ b/lib/openai/models/other_file_chunking_strategy_object.rb @@ -14,7 +14,7 @@ class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # because the file was indexed before the `chunking_strategy` concept was # introduced in the API. # - # @param type [Symbol, :other] + # @param type [Symbol, :other] Always `other`. end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 3457ccea..09690970 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -35,14 +35,19 @@ class Reasoning < OpenAI::Internal::Type::BaseModel optional :summary, enum: -> { OpenAI::Models::Reasoning::Summary }, nil?: true # @!method initialize(effort: nil, generate_summary: nil, summary: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Reasoning} for more details. + # # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] - # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] + # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. ... + # + # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be ... # @deprecated # diff --git a/lib/openai/models/response_format_json_object.rb b/lib/openai/models/response_format_json_object.rb index 52fc23ae..13a620f2 100644 --- a/lib/openai/models/response_format_json_object.rb +++ b/lib/openai/models/response_format_json_object.rb @@ -14,7 +14,7 @@ class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel # `json_schema` is recommended for models that support it. Note that the model # will not generate JSON without a system or user message instructing it to do so. # - # @param type [Symbol, :json_object] + # @param type [Symbol, :json_object] The type of response format being defined. Always `json_object`. end end end diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index c6b0fdf9..3e78c9ff 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -16,12 +16,16 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel required :type, const: :json_schema # @!method initialize(json_schema:, type: :json_schema) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ResponseFormatJSONSchema} for more details. + # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # - # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] - # @param type [Symbol, :json_schema] + # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. ... + # + # @param type [Symbol, :json_schema] The type of response format being defined. Always `json_schema`. # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::Internal::Type::BaseModel @@ -57,12 +61,18 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!method initialize(name:, description: nil, schema: nil, strict: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ResponseFormatJSONSchema::JSONSchema} for more details. + # # Structured Outputs configuration options, including a JSON Schema. # - # @param name [String] - # @param description [String] - # @param schema [Hash{Symbol=>Object}] - # @param strict [Boolean, nil] + # @param name [String] The name of the response format. Must be a-z, A-Z, 0-9, or contain ... + # + # @param description [String] A description of what the response format is for, used by the model to ... + # + # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. ... + # + # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. ... end end end diff --git a/lib/openai/models/response_format_text.rb b/lib/openai/models/response_format_text.rb index 8101bcca..a7ac56b0 100644 --- a/lib/openai/models/response_format_text.rb +++ b/lib/openai/models/response_format_text.rb @@ -12,7 +12,7 @@ class ResponseFormatText < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :text) # Default response format. Used to generate text responses. # - # @param type [Symbol, :text] + # @param type [Symbol, :text] The type of response format being defined. Always `text`. end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 41a7499d..29644748 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -29,13 +29,19 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel required :type, const: :computer_use_preview # @!method initialize(display_height:, display_width:, environment:, type: :computer_use_preview) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ComputerTool} for more details. + # # A tool that controls a virtual computer. Learn more about the # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). # - # @param display_height [Float] - # @param display_width [Float] - # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] - # @param type [Symbol, :computer_use_preview] + # @param display_height [Float] The height of the computer display. ... + # + # @param display_width [Float] The width of the computer display. ... + # + # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] The type of computer environment to control. ... + # + # @param type [Symbol, :computer_use_preview] The type of the computer use tool. Always `computer_use_preview`. ... # The type of computer environment to control. # diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 3dc4202e..172802bc 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -25,15 +25,20 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Type } # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::EasyInputMessage} for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, Array] - # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] - # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] + # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. ... + # + # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # + # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. ... # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 85d13196..bdd10f5f 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -36,15 +36,22 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel optional :ranking_options, -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions } # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::FileSearchTool} for more details. + # # A tool that searches for relevant content from uploaded files. Learn more about # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). # - # @param vector_store_ids [Array] - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # @param max_num_results [Integer] - # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] - # @param type [Symbol, :file_search] + # @param vector_store_ids [Array] The IDs of the vector stores to search. ... + # + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. + # + # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 ... + # + # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] Ranking options for search. + # + # @param type [Symbol, :file_search] The type of the file search tool. Always `file_search`. ... # A filter to apply based on file attributes. # @@ -79,10 +86,14 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel optional :score_threshold, Float # @!method initialize(ranker: nil, score_threshold: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::FileSearchTool::RankingOptions} for more details. + # # Ranking options for search. # - # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] - # @param score_threshold [Float] + # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. + # + # @param score_threshold [Float] The score threshold for the file search, a number between 0 and 1. ... # The ranker to use for the file search. # diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index d0cf09ab..e74ed4b6 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -36,15 +36,22 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel optional :description, String, nil?: true # @!method initialize(name:, parameters:, strict:, description: nil, type: :function) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::FunctionTool} for more details. + # # Defines a function in your own code the model can choose to call. Learn more # about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @param name [String] - # @param parameters [Hash{Symbol=>Object}] - # @param strict [Boolean] - # @param description [String, nil] - # @param type [Symbol, :function] + # @param name [String] The name of the function to call. ... + # + # @param parameters [Hash{Symbol=>Object}] A JSON schema object describing the parameters of the function. ... + # + # @param strict [Boolean] Whether to enforce strict parameter validation. Default `true`. ... + # + # @param description [String, nil] A description of the function. Used by the model to determine whether ... + # + # @param type [Symbol, :function] The type of the function tool. Always `function`. ... end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index d34e8dd7..795ddb04 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -45,11 +45,19 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::Responses::InputItemListParams::Order } # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) - # @param after [String] - # @param before [String] - # @param include [Array] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::InputItemListParams} for more details. + # + # @param after [String] An item ID to list items after, used in pagination. ... + # + # @param before [String] An item ID to list items before, used in pagination. ... + # + # @param include [Array] Additional fields to include in the response. See the `include` ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between ... + # + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The order to return the input items in. Default is `asc`. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index a51b6317..6adb6175 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -226,29 +226,59 @@ class Response < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) - # @param id [String] - # @param created_at [Float] - # @param error [OpenAI::Models::Responses::ResponseError, nil] - # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] - # @param instructions [String, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # @param output [Array] - # @param parallel_tool_calls [Boolean] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # @param tools [Array] - # @param top_p [Float, nil] - # @param max_output_tokens [Integer, nil] - # @param previous_response_id [String, nil] - # @param reasoning [OpenAI::Models::Reasoning, nil] - # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] - # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] - # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] - # @param usage [OpenAI::Models::Responses::ResponseUsage] - # @param user [String] - # @param object [Symbol, :response] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Response} for more details. + # + # @param id [String] Unique identifier for this Response. ... + # + # @param created_at [Float] Unix timestamp (in seconds) of when this Response was created. ... + # + # @param error [OpenAI::Models::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. ... + # + # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. ... + # + # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param output [Array] An array of content items generated by the model. ... + # + # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # + # @param tools [Array] An array of tools the model may call while generating a response. You ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in + # ... + # + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # + # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, ... + # + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # + # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. ... + # + # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # + # @param object [Symbol, :response] The object type of this resource - always set to `response`. ... # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel @@ -261,7 +291,7 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(reason: nil) # Details about why the response is incomplete. # - # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] + # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. # The reason why the response is incomplete. # diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index 45ede8ad..2a0394f8 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -17,10 +17,14 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.audio.delta" # @!method initialize(delta:, type: :"response.audio.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseAudioDeltaEvent} for more details. + # # Emitted when there is a partial audio response. # - # @param delta [String] - # @param type [Symbol, :"response.audio.delta"] + # @param delta [String] A chunk of Base64 encoded response audio bytes. ... + # + # @param type [Symbol, :"response.audio.delta"] The type of the event. Always `response.audio.delta`. ... end end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index 359f11b3..218e6146 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -11,9 +11,12 @@ class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.audio.done" # @!method initialize(type: :"response.audio.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseAudioDoneEvent} for more details. + # # Emitted when the audio response is complete. # - # @param type [Symbol, :"response.audio.done"] + # @param type [Symbol, :"response.audio.done"] The type of the event. Always `response.audio.done`. ... end end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 48f70a9d..31a92552 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -17,10 +17,14 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.audio.transcript.delta" # @!method initialize(delta:, type: :"response.audio.transcript.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent} for more details. + # # Emitted when there is a partial transcript of audio. # - # @param delta [String] - # @param type [Symbol, :"response.audio.transcript.delta"] + # @param delta [String] The partial transcript of the audio response. ... + # + # @param type [Symbol, :"response.audio.transcript.delta"] The type of the event. Always `response.audio.transcript.delta`. ... end end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 5abf997b..806bed6e 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -11,9 +11,12 @@ class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.audio.transcript.done" # @!method initialize(type: :"response.audio.transcript.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent} for more details. + # # Emitted when the full audio transcript is completed. # - # @param type [Symbol, :"response.audio.transcript.done"] + # @param type [Symbol, :"response.audio.transcript.done"] The type of the event. Always `response.audio.transcript.done`. ... end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 934cb5f9..0f337be3 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -23,11 +23,17 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo required :type, const: :"response.code_interpreter_call.code.delta" # @!method initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more + # details. + # # Emitted when a partial code snippet is added by the code interpreter. # - # @param delta [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.code_interpreter_call.code.delta"] + # @param delta [String] The partial code snippet added by the code interpreter. ... + # + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # + # @param type [Symbol, :"response.code_interpreter_call.code.delta"] The type of the event. Always `response.code_interpreter_call.code.delta`. ... end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 8b10fde8..4d7ad38d 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -23,11 +23,17 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod required :type, const: :"response.code_interpreter_call.code.done" # @!method initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more + # details. + # # Emitted when code snippet output is finalized by the code interpreter. # - # @param code [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.code_interpreter_call.code.done"] + # @param code [String] The final code snippet output by the code interpreter. ... + # + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # + # @param type [Symbol, :"response.code_interpreter_call.code.done"] The type of the event. Always `response.code_interpreter_call.code.done`. ... end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index c0e507cd..f5cd8062 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -23,11 +23,17 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo required :type, const: :"response.code_interpreter_call.completed" # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent} for more + # details. + # # Emitted when the code interpreter call is completed. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # @param output_index [Integer] - # @param type [Symbol, :"response.code_interpreter_call.completed"] + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. ... + # + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # + # @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`. ... end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 3277ceef..2d6c8c32 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -23,11 +23,17 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM required :type, const: :"response.code_interpreter_call.in_progress" # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent} for more + # details. + # # Emitted when a code interpreter call is in progress. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # @param output_index [Integer] - # @param type [Symbol, :"response.code_interpreter_call.in_progress"] + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. ... + # + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # + # @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`. ... end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 3a361629..63e41324 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -23,11 +23,17 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas required :type, const: :"response.code_interpreter_call.interpreting" # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent} for + # more details. + # # Emitted when the code interpreter is actively interpreting the code snippet. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # @param output_index [Integer] - # @param type [Symbol, :"response.code_interpreter_call.interpreting"] + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. ... + # + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # + # @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`. ... end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index cbab454f..3ab4f34f 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -36,13 +36,20 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel required :type, const: :code_interpreter_call # @!method initialize(id:, code:, results:, status:, type: :code_interpreter_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details. + # # A tool call to run code. # - # @param id [String] - # @param code [String] - # @param results [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] - # @param type [Symbol, :code_interpreter_call] + # @param id [String] The unique ID of the code interpreter tool call. ... + # + # @param code [String] The code to run. ... + # + # @param results [Array] The results of the code interpreter tool call. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. ... + # + # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. ... # The output of a code interpreter tool call that is text. module Result @@ -70,10 +77,15 @@ class Logs < OpenAI::Internal::Type::BaseModel required :type, const: :logs # @!method initialize(logs:, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for + # more details. + # # The output of a code interpreter tool call that is text. # - # @param logs [String] - # @param type [Symbol, :logs] + # @param logs [String] The logs of the code interpreter tool call. ... + # + # @param type [Symbol, :logs] The type of the code interpreter text output. Always `logs`. ... end class Files < OpenAI::Internal::Type::BaseModel @@ -90,10 +102,15 @@ class Files < OpenAI::Internal::Type::BaseModel required :type, const: :files # @!method initialize(files:, type: :files) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for + # more details. + # # The output of a code interpreter tool call that is a file. # # @param files [Array] - # @param type [Symbol, :files] + # + # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`. ... class File < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -109,8 +126,13 @@ class File < OpenAI::Internal::Type::BaseModel required :mime_type, String # @!method initialize(file_id:, mime_type:) - # @param file_id [String] - # @param mime_type [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} + # for more details. + # + # @param file_id [String] The ID of the file. ... + # + # @param mime_type [String] The MIME type of the file. ... end end diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index 509663df..e4995227 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -17,10 +17,14 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.completed" # @!method initialize(response:, type: :"response.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCompletedEvent} for more details. + # # Emitted when the model response is complete. # - # @param response [OpenAI::Models::Responses::Response] - # @param type [Symbol, :"response.completed"] + # @param response [OpenAI::Models::Responses::Response] Properties of the completed response. ... + # + # @param type [Symbol, :"response.completed"] The type of the event. Always `response.completed`. ... end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index b84db1e5..5f2dedd1 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -43,16 +43,24 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Type } # @!method initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall} for more details. + # # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) # for more information. # - # @param id [String] - # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - # @param call_id [String] - # @param pending_safety_checks [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] - # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] + # @param id [String] The unique ID of the computer call. + # + # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] A click action. ... + # + # @param call_id [String] An identifier used when responding to the tool call with output. ... + # + # @param pending_safety_checks [Array] The pending safety checks for the computer call. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. # A click action. # @@ -117,12 +125,20 @@ class Click < OpenAI::Internal::Type::BaseModel required :y_, Integer, api_name: :y # @!method initialize(button:, x:, y_:, type: :click) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click} for more + # details. + # # A click action. # - # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] - # @param x [Integer] - # @param y_ [Integer] - # @param type [Symbol, :click] + # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right + # ... + # + # @param x [Integer] The x-coordinate where the click occurred. ... + # + # @param y_ [Integer] The y-coordinate where the click occurred. ... + # + # @param type [Symbol, :click] Specifies the event type. For a click action, this property is ... # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -163,11 +179,17 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel required :y_, Integer, api_name: :y # @!method initialize(x:, y_:, type: :double_click) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick} for + # more details. + # # A double click action. # - # @param x [Integer] - # @param y_ [Integer] - # @param type [Symbol, :double_click] + # @param x [Integer] The x-coordinate where the double click occurred. ... + # + # @param y_ [Integer] The y-coordinate where the double click occurred. ... + # + # @param type [Symbol, :double_click] Specifies the event type. For a double click action, this property is ... end class Drag < OpenAI::Internal::Type::BaseModel @@ -194,10 +216,16 @@ class Drag < OpenAI::Internal::Type::BaseModel required :type, const: :drag # @!method initialize(path:, type: :drag) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag} for more + # details. + # # A drag action. # - # @param path [Array] - # @param type [Symbol, :drag] + # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi + # ... + # + # @param type [Symbol, :drag] Specifies the event type. For a drag action, this property is ... class Path < OpenAI::Internal::Type::BaseModel # @!attribute x @@ -213,10 +241,15 @@ class Path < OpenAI::Internal::Type::BaseModel required :y_, Integer, api_name: :y # @!method initialize(x:, y_:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path} for + # more details. + # # A series of x/y coordinate pairs in the drag path. # - # @param x [Integer] - # @param y_ [Integer] + # @param x [Integer] The x-coordinate. ... + # + # @param y_ [Integer] The y-coordinate. ... end end @@ -236,10 +269,15 @@ class Keypress < OpenAI::Internal::Type::BaseModel required :type, const: :keypress # @!method initialize(keys:, type: :keypress) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress} for more + # details. + # # A collection of keypresses the model would like to perform. # - # @param keys [Array] - # @param type [Symbol, :keypress] + # @param keys [Array] The combination of keys the model is requesting to be pressed. This is an ... + # + # @param type [Symbol, :keypress] Specifies the event type. For a keypress action, this property is ... end class Move < OpenAI::Internal::Type::BaseModel @@ -263,11 +301,17 @@ class Move < OpenAI::Internal::Type::BaseModel required :y_, Integer, api_name: :y # @!method initialize(x:, y_:, type: :move) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move} for more + # details. + # # A mouse move action. # - # @param x [Integer] - # @param y_ [Integer] - # @param type [Symbol, :move] + # @param x [Integer] The x-coordinate to move to. ... + # + # @param y_ [Integer] The y-coordinate to move to. ... + # + # @param type [Symbol, :move] Specifies the event type. For a move action, this property is ... end class Screenshot < OpenAI::Internal::Type::BaseModel @@ -279,9 +323,13 @@ class Screenshot < OpenAI::Internal::Type::BaseModel required :type, const: :screenshot # @!method initialize(type: :screenshot) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot} for + # more details. + # # A screenshot action. # - # @param type [Symbol, :screenshot] + # @param type [Symbol, :screenshot] Specifies the event type. For a screenshot action, this property is ... end class Scroll < OpenAI::Internal::Type::BaseModel @@ -317,13 +365,21 @@ class Scroll < OpenAI::Internal::Type::BaseModel required :y_, Integer, api_name: :y # @!method initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll} for more + # details. + # # A scroll action. # - # @param scroll_x [Integer] - # @param scroll_y [Integer] - # @param x [Integer] - # @param y_ [Integer] - # @param type [Symbol, :scroll] + # @param scroll_x [Integer] The horizontal scroll distance. ... + # + # @param scroll_y [Integer] The vertical scroll distance. ... + # + # @param x [Integer] The x-coordinate where the scroll occurred. ... + # + # @param y_ [Integer] The y-coordinate where the scroll occurred. ... + # + # @param type [Symbol, :scroll] Specifies the event type. For a scroll action, this property is ... end class Type < OpenAI::Internal::Type::BaseModel @@ -341,10 +397,15 @@ class Type < OpenAI::Internal::Type::BaseModel required :type, const: :type # @!method initialize(text:, type: :type) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type} for more + # details. + # # An action to type in text. # - # @param text [String] - # @param type [Symbol, :type] + # @param text [String] The text to type. ... + # + # @param type [Symbol, :type] Specifies the event type. For a type action, this property is ... end class Wait < OpenAI::Internal::Type::BaseModel @@ -356,9 +417,13 @@ class Wait < OpenAI::Internal::Type::BaseModel required :type, const: :wait # @!method initialize(type: :wait) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait} for more + # details. + # # A wait action. # - # @param type [Symbol, :wait] + # @param type [Symbol, :wait] Specifies the event type. For a wait action, this property is ... end # @!method self.variants @@ -387,9 +452,11 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code:, message:) # A pending safety check for the computer call. # - # @param id [String] - # @param code [String] - # @param message [String] + # @param id [String] The ID of the pending safety check. + # + # @param code [String] The type of the pending safety check. + # + # @param message [String] Details about the pending safety check. end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 9bfc14e2..1b404766 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -44,12 +44,21 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) - # @param id [String] - # @param call_id [String] - # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - # @param acknowledged_safety_checks [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] - # @param type [Symbol, :computer_call_output] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCallOutputItem} for more + # details. + # + # @param id [String] The unique ID of the computer call tool output. ... + # + # @param call_id [String] The ID of the computer tool call that produced the output. ... + # + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. ... + # + # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. ... class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -73,9 +82,11 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code:, message:) # A pending safety check for the computer call. # - # @param id [String] - # @param code [String] - # @param message [String] + # @param id [String] The ID of the pending safety check. + # + # @param code [String] The type of the pending safety check. + # + # @param message [String] Details about the pending safety check. end # The status of the message input. One of `in_progress`, `completed`, or diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 9dc1550c..e42b2d78 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -24,11 +24,17 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod optional :image_url, String # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot} for more + # details. + # # A computer screenshot image used with the computer use tool. # - # @param file_id [String] - # @param image_url [String] - # @param type [Symbol, :computer_screenshot] + # @param file_id [String] The identifier of an uploaded file that contains the screenshot. + # + # @param image_url [String] The URL of the screenshot image. + # + # @param type [Symbol, :computer_screenshot] Specifies the event type. For a computer screenshot, this property is ... end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 8399e487..86328573 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -35,13 +35,20 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.content_part.added" # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseContentPartAddedEvent} for more details. + # # Emitted when a new content part is added. # - # @param content_index [Integer] - # @param item_id [String] - # @param output_index [Integer] - # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - # @param type [Symbol, :"response.content_part.added"] + # @param content_index [Integer] The index of the content part that was added. ... + # + # @param item_id [String] The ID of the output item that the content part was added to. ... + # + # @param output_index [Integer] The index of the output item that the content part was added to. ... + # + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that was added. ... + # + # @param type [Symbol, :"response.content_part.added"] The type of the event. Always `response.content_part.added`. ... # The content part that was added. # diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 945ef949..2858e020 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -35,13 +35,20 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.content_part.done" # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseContentPartDoneEvent} for more details. + # # Emitted when a content part is done. # - # @param content_index [Integer] - # @param item_id [String] - # @param output_index [Integer] - # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - # @param type [Symbol, :"response.content_part.done"] + # @param content_index [Integer] The index of the content part that is done. ... + # + # @param item_id [String] The ID of the output item that the content part was added to. ... + # + # @param output_index [Integer] The index of the output item that the content part was added to. ... + # + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that is done. ... + # + # @param type [Symbol, :"response.content_part.done"] The type of the event. Always `response.content_part.done`. ... # The content part that is done. # diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index f20f9cc0..6e51faa1 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -209,24 +209,50 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :user, String # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) - # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # @param include [Array, nil] - # @param instructions [String, nil] - # @param max_output_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param parallel_tool_calls [Boolean, nil] - # @param previous_response_id [String, nil] - # @param reasoning [OpenAI::Models::Reasoning, nil] - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] - # @param store [Boolean, nil] - # @param temperature [Float, nil] - # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # @param tools [Array] - # @param top_p [Float, nil] - # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] - # @param user [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCreateParams} for more details. + # + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param include [Array, nil] Specify additional output data to include in the model response. Currently ... + # + # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # ... + # + # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. ... + # + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # + # @param tools [Array] An array of tools the model may call while generating a response. You ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Text, image, or file inputs to the model, used to generate a response. diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index ad5cf6d0..0c8408d0 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -17,10 +17,14 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.created" # @!method initialize(response:, type: :"response.created") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCreatedEvent} for more details. + # # An event that is emitted when a response is created. # - # @param response [OpenAI::Models::Responses::Response] - # @param type [Symbol, :"response.created"] + # @param response [OpenAI::Models::Responses::Response] The response that was created. ... + # + # @param type [Symbol, :"response.created"] The type of the event. Always `response.created`. ... end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 3727a834..3e4e38bc 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -17,10 +17,14 @@ class ResponseError < OpenAI::Internal::Type::BaseModel required :message, String # @!method initialize(code:, message:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseError} for more details. + # # An error object returned when the model fails to generate a Response. # - # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] - # @param message [String] + # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] The error code for the response. ... + # + # @param message [String] A human-readable description of the error. ... # The error code for the response. # diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index d3bfa55e..c8ad2c34 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -29,12 +29,18 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel required :type, const: :error # @!method initialize(code:, message:, param:, type: :error) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseErrorEvent} for more details. + # # Emitted when an error occurs. # - # @param code [String, nil] - # @param message [String] - # @param param [String, nil] - # @param type [Symbol, :error] + # @param code [String, nil] The error code. ... + # + # @param message [String] The error message. ... + # + # @param param [String, nil] The error parameter. ... + # + # @param type [Symbol, :error] The type of the event. Always `error`. ... end end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index cbed3cb7..54ee8e5c 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -17,10 +17,14 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.failed" # @!method initialize(response:, type: :"response.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFailedEvent} for more details. + # # An event that is emitted when a response fails. # - # @param response [OpenAI::Models::Responses::Response] - # @param type [Symbol, :"response.failed"] + # @param response [OpenAI::Models::Responses::Response] The response that failed. ... + # + # @param type [Symbol, :"response.failed"] The type of the event. Always `response.failed`. ... end end end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index ffb7c68d..1afd9153 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -23,11 +23,17 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.file_search_call.completed" # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent} for more + # details. + # # Emitted when a file search call is completed (results found). # - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.file_search_call.completed"] + # @param item_id [String] The ID of the output item that the file search call is initiated. ... + # + # @param output_index [Integer] The index of the output item that the file search call is initiated. ... + # + # @param type [Symbol, :"response.file_search_call.completed"] The type of the event. Always `response.file_search_call.completed`. ... end end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 72fafbc7..74e08290 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -23,11 +23,17 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.file_search_call.in_progress" # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent} for more + # details. + # # Emitted when a file search call is initiated. # - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.file_search_call.in_progress"] + # @param item_id [String] The ID of the output item that the file search call is initiated. ... + # + # @param output_index [Integer] The index of the output item that the file search call is initiated. ... + # + # @param type [Symbol, :"response.file_search_call.in_progress"] The type of the event. Always `response.file_search_call.in_progress`. ... end end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index 07c1186a..118b323f 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -23,11 +23,17 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.file_search_call.searching" # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.searching") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent} for more + # details. + # # Emitted when a file search is currently searching. # - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.file_search_call.searching"] + # @param item_id [String] The ID of the output item that the file search call is initiated. ... + # + # @param output_index [Integer] The index of the output item that the file search call is searching. ... + # + # @param type [Symbol, :"response.file_search_call.searching"] The type of the event. Always `response.file_search_call.searching`. ... end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 9fc12343..71462ed2 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -38,15 +38,22 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel nil?: true # @!method initialize(id:, queries:, status:, results: nil, type: :file_search_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFileSearchToolCall} for more details. + # # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) # for more information. # - # @param id [String] - # @param queries [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] - # @param results [Array, nil] - # @param type [Symbol, :file_search_call] + # @param id [String] The unique ID of the file search tool call. ... + # + # @param queries [Array] The queries used to search for files. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, ... + # + # @param results [Array, nil] The results of the file search tool call. ... + # + # @param type [Symbol, :file_search_call] The type of the file search tool call. Always `file_search_call`. ... # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, @@ -103,11 +110,19 @@ class Result < OpenAI::Internal::Type::BaseModel optional :text, String # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param file_id [String] - # @param filename [String] - # @param score [Float] - # @param text [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFileSearchToolCall::Result} for more + # details. + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param file_id [String] The unique ID of the file. ... + # + # @param filename [String] The name of the file. ... + # + # @param score [Float] The relevance score of the file - a value between 0 and 1. ... + # + # @param text [String] The text that was retrieved from the file. ... module Attribute extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index 106627ff..d2ac2772 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -42,15 +42,23 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel optional :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!method initialize(name:, schema:, description: nil, strict: nil, type: :json_schema) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig} for more + # details. + # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # - # @param name [String] - # @param schema [Hash{Symbol=>Object}] - # @param description [String] - # @param strict [Boolean, nil] - # @param type [Symbol, :json_schema] + # @param name [String] The name of the response format. Must be a-z, A-Z, 0-9, or contain ... + # + # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. ... + # + # @param description [String] A description of what the response format is for, used by the model to ... + # + # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. ... + # + # @param type [Symbol, :json_schema] The type of response format being defined. Always `json_schema`. end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index abd84e9c..57e63c2b 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -29,12 +29,21 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode required :type, const: :"response.function_call_arguments.delta" # @!method initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more + # details. + # # Emitted when there is a partial function-call arguments delta. # - # @param delta [String] - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.function_call_arguments.delta"] + # @param delta [String] The function-call arguments delta that is added. ... + # + # @param item_id [String] The ID of the output item that the function-call arguments delta is added to. + # ... + # + # @param output_index [Integer] The index of the output item that the function-call arguments delta is added to. + # ... + # + # @param type [Symbol, :"response.function_call_arguments.delta"] The type of the event. Always `response.function_call_arguments.delta`. ... end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_done_event.rb b/lib/openai/models/responses/response_function_call_arguments_done_event.rb index ba402026..ded7ece3 100644 --- a/lib/openai/models/responses/response_function_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_done_event.rb @@ -30,9 +30,12 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done") # Emitted when function-call arguments are finalized. # - # @param arguments [String] - # @param item_id [String] - # @param output_index [Integer] + # @param arguments [String] The function-call arguments. + # + # @param item_id [String] The ID of the item. + # + # @param output_index [Integer] The index of the output item. + # # @param type [Symbol, :"response.function_call_arguments.done"] end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 7db552df..1aa5d417 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -42,16 +42,24 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCall::Status } # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionToolCall} for more details. + # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. # - # @param arguments [String] - # @param call_id [String] - # @param name [String] - # @param id [String] - # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] - # @param type [Symbol, :function_call] + # @param arguments [String] A JSON string of the arguments to pass to the function. ... + # + # @param call_id [String] The unique ID of the function tool call generated by the model. ... + # + # @param name [String] The name of the function to run. ... + # + # @param id [String] The unique ID of the function tool call. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, :function_call] The type of the function tool call. Always `function_call`. ... # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index 2244965f..e304dda3 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -11,11 +11,14 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction required :id, String # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionToolCallItem} for more details. + # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. # - # @param id [String] + # @param id [String] The unique ID of the function tool call. ... end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 6623a73c..8cc6084a 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -36,11 +36,19 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) - # @param id [String] - # @param call_id [String] - # @param output [String] - # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] - # @param type [Symbol, :function_call_output] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem} for more + # details. + # + # @param id [String] The unique ID of the function call tool output. ... + # + # @param call_id [String] The unique ID of the function tool call generated by the model. ... + # + # @param output [String] A JSON string of the output of the function tool call. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. ... # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 3529a019..c08e4e32 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -23,13 +23,18 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel required :type, const: :web_search_call # @!method initialize(id:, status:, type: :web_search_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionWebSearch} for more details. + # # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for # more information. # - # @param id [String] - # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] - # @param type [Symbol, :web_search_call] + # @param id [String] The unique ID of the web search tool call. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. ... + # + # @param type [Symbol, :web_search_call] The type of the web search tool call. Always `web_search_call`. ... # The status of the web search tool call. # diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index 0259d443..f095d74d 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -17,10 +17,14 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.in_progress" # @!method initialize(response:, type: :"response.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInProgressEvent} for more details. + # # Emitted when the response is in progress. # - # @param response [OpenAI::Models::Responses::Response] - # @param type [Symbol, :"response.in_progress"] + # @param response [OpenAI::Models::Responses::Response] The response that is in progress. ... + # + # @param type [Symbol, :"response.in_progress"] The type of the event. Always `response.in_progress`. ... end end end diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index 5536418d..e23ef3a8 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -17,10 +17,14 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.incomplete" # @!method initialize(response:, type: :"response.incomplete") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseIncompleteEvent} for more details. + # # An event that is emitted when a response finishes as incomplete. # - # @param response [OpenAI::Models::Responses::Response] - # @param type [Symbol, :"response.incomplete"] + # @param response [OpenAI::Models::Responses::Response] The response that was incomplete. ... + # + # @param type [Symbol, :"response.incomplete"] The type of the event. Always `response.incomplete`. ... end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index f03f775a..ea23bdcd 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -23,11 +23,16 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel required :type, const: :input_audio # @!method initialize(data:, format_:, type: :input_audio) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputAudio} for more details. + # # An audio input to the model. # - # @param data [String] - # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] - # @param type [Symbol, :input_audio] + # @param data [String] Base64-encoded audio data. ... + # + # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and ... + # + # @param type [Symbol, :input_audio] The type of the input item. Always `input_audio`. ... # The format of the audio data. Currently supported formats are `mp3` and `wav`. # diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 13aeefd9..06938134 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -29,12 +29,18 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel optional :filename, String # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputFile} for more details. + # # A file input to the model. # - # @param file_data [String] - # @param file_id [String] - # @param filename [String] - # @param type [Symbol, :input_file] + # @param file_data [String] The content of the file to be sent to the model. ... + # + # @param file_id [String] The ID of the file to be sent to the model. ... + # + # @param filename [String] The name of the file to be sent to the model. ... + # + # @param type [Symbol, :input_file] The type of the input item. Always `input_file`. ... end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 6a07ce69..997851c2 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -31,13 +31,19 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel optional :image_url, String, nil?: true # @!method initialize(detail:, file_id: nil, image_url: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputImage} for more details. + # # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] - # @param file_id [String, nil] - # @param image_url [String, nil] - # @param type [Symbol, :input_image] + # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, ... + # + # @param file_id [String, nil] The ID of the file to be sent to the model. ... + # + # @param image_url [String, nil] The URL of the image to be sent to the model. A fully qualified URL or ... + # + # @param type [Symbol, :input_image] The type of the input item. Always `input_image`. ... # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 72801ab7..29e533f3 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -86,14 +86,20 @@ class Message < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Type } # @!method initialize(content:, role:, status: nil, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::Message} for more details. + # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. # - # @param content [Array] - # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] - # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] + # @param content [Array] A list of one or many input items to the model, containing different content ... + # + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. ... # The role of the message input. One of `user`, `system`, or `developer`. # @@ -178,14 +184,23 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status } # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput} for more + # details. + # # The output of a computer tool call. # - # @param call_id [String] - # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - # @param id [String] - # @param acknowledged_safety_checks [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] - # @param type [Symbol, :computer_call_output] + # @param call_id [String] The ID of the computer tool call that produced the output. ... + # + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. ... + # + # @param id [String] The ID of the computer tool call output. ... + # + # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] The status of the message input. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. ... class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -209,9 +224,11 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code:, message:) # A pending safety check for the computer call. # - # @param id [String] - # @param code [String] - # @param message [String] + # @param id [String] The ID of the pending safety check. + # + # @param code [String] The type of the pending safety check. + # + # @param message [String] Details about the pending safety check. end # The status of the message input. One of `in_progress`, `completed`, or @@ -264,13 +281,21 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status } # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput} for more + # details. + # # The output of a function tool call. # - # @param call_id [String] - # @param output [String] - # @param id [String] - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] - # @param type [Symbol, :function_call_output] + # @param call_id [String] The unique ID of the function tool call generated by the model. ... + # + # @param output [String] A JSON string of the output of the function tool call. ... + # + # @param id [String] The unique ID of the function tool call output. Populated when this item ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] The status of the item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. ... # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -302,10 +327,14 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(id:, type: :item_reference) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::ItemReference} for more details. + # # An internal identifier for an item to reference. # - # @param id [String] - # @param type [Symbol, :item_reference] + # @param id [String] The ID of the item to reference. ... + # + # @param type [Symbol, :item_reference] The type of item to reference. Always `item_reference`. ... end # @!method self.variants diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 2b7b1ab1..f188d81c 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -38,11 +38,18 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Type } # @!method initialize(id:, content:, role:, status: nil, type: nil) - # @param id [String] - # @param content [Array] - # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] - # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputMessageItem} for more details. + # + # @param id [String] The unique ID of the message input. ... + # + # @param content [Array] A list of one or many input items to the model, containing different content ... + # + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. ... # The role of the message input. One of `user`, `system`, or `developer`. # diff --git a/lib/openai/models/responses/response_input_text.rb b/lib/openai/models/responses/response_input_text.rb index ad65b3f2..c05e5a54 100644 --- a/lib/openai/models/responses/response_input_text.rb +++ b/lib/openai/models/responses/response_input_text.rb @@ -17,10 +17,14 @@ class ResponseInputText < OpenAI::Internal::Type::BaseModel required :type, const: :input_text # @!method initialize(text:, type: :input_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputText} for more details. + # # A text input to the model. # - # @param text [String] - # @param type [Symbol, :input_text] + # @param text [String] The text input to the model. ... + # + # @param type [Symbol, :input_text] The type of the input item. Always `input_text`. ... end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 2b6c1e85..06e55c53 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -37,11 +37,15 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) # A list of Response items. # - # @param data [Array] - # @param first_id [String] - # @param has_more [Boolean] - # @param last_id [String] - # @param object [Symbol, :list] + # @param data [Array] A list of items used to generate this response. + # + # @param first_id [String] The ID of the first item in the list. + # + # @param has_more [Boolean] Whether there are more items available. + # + # @param last_id [String] The ID of the last item in the list. + # + # @param object [Symbol, :list] The type of object returned, must be `list`. end end diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index 55675501..f66f4024 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -23,11 +23,16 @@ class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel required :type, const: :output_audio # @!method initialize(data:, transcript:, type: :output_audio) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputAudio} for more details. + # # An audio output from the model. # - # @param data [String] - # @param transcript [String] - # @param type [Symbol, :output_audio] + # @param data [String] Base64-encoded audio data from the model. ... + # + # @param transcript [String] The transcript of the audio data from the model. ... + # + # @param type [Symbol, :output_audio] The type of the output audio. Always `output_audio`. ... end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 2893bec8..3a42485d 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -23,11 +23,16 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.output_item.added" # @!method initialize(item:, output_index:, type: :"response.output_item.added") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItemAddedEvent} for more details. + # # Emitted when a new output item is added. # - # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - # @param output_index [Integer] - # @param type [Symbol, :"response.output_item.added"] + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] The output item that was added. ... + # + # @param output_index [Integer] The index of the output item that was added. ... + # + # @param type [Symbol, :"response.output_item.added"] The type of the event. Always `response.output_item.added`. ... end end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index a8ff9471..d3bfbfc3 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -23,11 +23,16 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.output_item.done" # @!method initialize(item:, output_index:, type: :"response.output_item.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItemDoneEvent} for more details. + # # Emitted when an output item is marked done. # - # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - # @param output_index [Integer] - # @param type [Symbol, :"response.output_item.done"] + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] The output item that was marked done. ... + # + # @param output_index [Integer] The index of the output item that was marked done. ... + # + # @param type [Symbol, :"response.output_item.done"] The type of the event. Always `response.output_item.done`. ... end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 107a4798..2406dc01 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -37,13 +37,20 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel required :type, const: :message # @!method initialize(id:, content:, status:, role: :assistant, type: :message) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputMessage} for more details. + # # An output message from the model. # - # @param id [String] - # @param content [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] - # @param role [Symbol, :assistant] - # @param type [Symbol, :message] + # @param id [String] The unique ID of the output message. ... + # + # @param content [Array] The content of the output message. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or ... + # + # @param role [Symbol, :assistant] The role of the output message. Always `assistant`. ... + # + # @param type [Symbol, :message] The type of the output message. Always `message`. ... # A text output from the model. module Content diff --git a/lib/openai/models/responses/response_output_refusal.rb b/lib/openai/models/responses/response_output_refusal.rb index 0c2ccce2..82729b85 100644 --- a/lib/openai/models/responses/response_output_refusal.rb +++ b/lib/openai/models/responses/response_output_refusal.rb @@ -17,10 +17,14 @@ class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel required :type, const: :refusal # @!method initialize(refusal:, type: :refusal) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputRefusal} for more details. + # # A refusal from the model. # - # @param refusal [String] - # @param type [Symbol, :refusal] + # @param refusal [String] The refusal explanationfrom the model. ... + # + # @param type [Symbol, :refusal] The type of the refusal. Always `refusal`. ... end end end diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index a857410f..32465ac6 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -24,11 +24,16 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(annotations:, text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputText} for more details. + # # A text output from the model. # - # @param annotations [Array] - # @param text [String] - # @param type [Symbol, :output_text] + # @param annotations [Array] The annotations of the text output. ... + # + # @param text [String] The text output from the model. ... + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... # A citation to a file. module Annotation @@ -65,11 +70,17 @@ class FileCitation < OpenAI::Internal::Type::BaseModel required :type, const: :file_citation # @!method initialize(file_id:, index:, type: :file_citation) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation} for + # more details. + # # A citation to a file. # - # @param file_id [String] - # @param index [Integer] - # @param type [Symbol, :file_citation] + # @param file_id [String] The ID of the file. ... + # + # @param index [Integer] The index of the file in the list of files. ... + # + # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. ... end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -104,13 +115,21 @@ class URLCitation < OpenAI::Internal::Type::BaseModel required :url, String # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation} for + # more details. + # # A citation for a web resource used to generate a model response. # - # @param end_index [Integer] - # @param start_index [Integer] - # @param title [String] - # @param url [String] - # @param type [Symbol, :url_citation] + # @param end_index [Integer] The index of the last character of the URL citation in the message. ... + # + # @param start_index [Integer] The index of the first character of the URL citation in the message. ... + # + # @param title [String] The title of the web resource. ... + # + # @param url [String] The URL of the web resource. ... + # + # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. ... end class FilePath < OpenAI::Internal::Type::BaseModel @@ -133,11 +152,17 @@ class FilePath < OpenAI::Internal::Type::BaseModel required :type, const: :file_path # @!method initialize(file_id:, index:, type: :file_path) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath} for more + # details. + # # A path to a file. # - # @param file_id [String] - # @param index [Integer] - # @param type [Symbol, :file_path] + # @param file_id [String] The ID of the file. ... + # + # @param index [Integer] The index of the file in the list of files. ... + # + # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. ... end # @!method self.variants diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index bec6c280..a09ba35b 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -31,13 +31,19 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Models::Responses::ResponseReasoningItem::Status } # @!method initialize(id:, summary:, status: nil, type: :reasoning) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningItem} for more details. + # # A description of the chain of thought used by a reasoning model while generating # a response. # - # @param id [String] - # @param summary [Array] - # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] - # @param type [Symbol, :reasoning] + # @param id [String] The unique identifier of the reasoning content. ... + # + # @param summary [Array] Reasoning text contents. ... + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or ... + # + # @param type [Symbol, :reasoning] The type of the object. Always `reasoning`. ... class Summary < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -53,8 +59,12 @@ class Summary < OpenAI::Internal::Type::BaseModel required :type, const: :summary_text # @!method initialize(text:, type: :summary_text) - # @param text [String] - # @param type [Symbol, :summary_text] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningItem::Summary} for more details. + # + # @param text [String] A short summary of the reasoning used by the model when generating ... + # + # @param type [Symbol, :summary_text] The type of the object. Always `summary_text`. ... end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. diff --git a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb index a7f42ec3..74d66131 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb @@ -35,13 +35,21 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.reasoning_summary_part.added" # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.added") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent} for more + # details. + # # Emitted when a new reasoning summary part is added. # - # @param item_id [String] - # @param output_index [Integer] - # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] - # @param summary_index [Integer] - # @param type [Symbol, :"response.reasoning_summary_part.added"] + # @param item_id [String] The ID of the item this summary part is associated with. ... + # + # @param output_index [Integer] The index of the output item this summary part is associated with. ... + # + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. ... + # + # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # + # @param type [Symbol, :"response.reasoning_summary_part.added"] The type of the event. Always `response.reasoning_summary_part.added`. ... # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent#part class Part < OpenAI::Internal::Type::BaseModel @@ -60,8 +68,9 @@ class Part < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :summary_text) # The summary part that was added. # - # @param text [String] - # @param type [Symbol, :summary_text] + # @param text [String] The text of the summary part. + # + # @param type [Symbol, :summary_text] The type of the summary part. Always `summary_text`. end end end diff --git a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb index c868638f..10926c74 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb @@ -35,13 +35,21 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.reasoning_summary_part.done" # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent} for more + # details. + # # Emitted when a reasoning summary part is completed. # - # @param item_id [String] - # @param output_index [Integer] - # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] - # @param summary_index [Integer] - # @param type [Symbol, :"response.reasoning_summary_part.done"] + # @param item_id [String] The ID of the item this summary part is associated with. ... + # + # @param output_index [Integer] The index of the output item this summary part is associated with. ... + # + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. ... + # + # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # + # @param type [Symbol, :"response.reasoning_summary_part.done"] The type of the event. Always `response.reasoning_summary_part.done`. ... # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent#part class Part < OpenAI::Internal::Type::BaseModel @@ -60,8 +68,9 @@ class Part < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :summary_text) # The completed summary part. # - # @param text [String] - # @param type [Symbol, :summary_text] + # @param text [String] The text of the summary part. + # + # @param type [Symbol, :summary_text] The type of the summary part. Always `summary_text`. end end end diff --git a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb index d816e961..b56b3b80 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb @@ -35,13 +35,21 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.reasoning_summary_text.delta" # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary_text.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent} for more + # details. + # # Emitted when a delta is added to a reasoning summary text. # - # @param delta [String] - # @param item_id [String] - # @param output_index [Integer] - # @param summary_index [Integer] - # @param type [Symbol, :"response.reasoning_summary_text.delta"] + # @param delta [String] The text delta that was added to the summary. ... + # + # @param item_id [String] The ID of the item this summary text delta is associated with. ... + # + # @param output_index [Integer] The index of the output item this summary text delta is associated with. ... + # + # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # + # @param type [Symbol, :"response.reasoning_summary_text.delta"] The type of the event. Always `response.reasoning_summary_text.delta`. ... end end end diff --git a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb index 93e8cadc..cd78cdba 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb @@ -35,13 +35,21 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.reasoning_summary_text.done" # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary_text.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent} for more + # details. + # # Emitted when a reasoning summary text is completed. # - # @param item_id [String] - # @param output_index [Integer] - # @param summary_index [Integer] - # @param text [String] - # @param type [Symbol, :"response.reasoning_summary_text.done"] + # @param item_id [String] The ID of the item this summary text is associated with. ... + # + # @param output_index [Integer] The index of the output item this summary text is associated with. ... + # + # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # + # @param text [String] The full text of the completed reasoning summary. ... + # + # @param type [Symbol, :"response.reasoning_summary_text.done"] The type of the event. Always `response.reasoning_summary_text.done`. ... end end end diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index ea97e622..fb8d6770 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -35,13 +35,20 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.refusal.delta" # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseRefusalDeltaEvent} for more details. + # # Emitted when there is a partial refusal text. # - # @param content_index [Integer] - # @param delta [String] - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.refusal.delta"] + # @param content_index [Integer] The index of the content part that the refusal text is added to. ... + # + # @param delta [String] The refusal text that is added. ... + # + # @param item_id [String] The ID of the output item that the refusal text is added to. ... + # + # @param output_index [Integer] The index of the output item that the refusal text is added to. ... + # + # @param type [Symbol, :"response.refusal.delta"] The type of the event. Always `response.refusal.delta`. ... end end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index ee7b4b55..a361e8b3 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -35,13 +35,20 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.refusal.done" # @!method initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseRefusalDoneEvent} for more details. + # # Emitted when refusal text is finalized. # - # @param content_index [Integer] - # @param item_id [String] - # @param output_index [Integer] - # @param refusal [String] - # @param type [Symbol, :"response.refusal.done"] + # @param content_index [Integer] The index of the content part that the refusal text is finalized. ... + # + # @param item_id [String] The ID of the output item that the refusal text is finalized. ... + # + # @param output_index [Integer] The index of the output item that the refusal text is finalized. ... + # + # @param refusal [String] The refusal text that is finalized. ... + # + # @param type [Symbol, :"response.refusal.done"] The type of the event. Always `response.refusal.done`. ... end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index bb881916..e4b242f8 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -17,7 +17,11 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } # @!method initialize(include: nil, request_options: {}) - # @param include [Array] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. + # + # @param include [Array] Additional fields to include in the response. See the `include` ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 28c8abee..063252da 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -42,14 +42,22 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.output_text.annotation.added" # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text.annotation.added") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent} for more details. + # # Emitted when a text annotation is added. # - # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - # @param annotation_index [Integer] - # @param content_index [Integer] - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.output_text.annotation.added"] + # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. ... + # + # @param annotation_index [Integer] The index of the annotation that was added. ... + # + # @param content_index [Integer] The index of the content part that the text annotation was added to. ... + # + # @param item_id [String] The ID of the output item that the text annotation was added to. ... + # + # @param output_index [Integer] The index of the output item that the text annotation was added to. ... + # + # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always `response.output_text.annotation.added`. ... # A citation to a file. # @@ -91,11 +99,17 @@ class FileCitation < OpenAI::Internal::Type::BaseModel required :type, const: :file_citation # @!method initialize(file_id:, index:, type: :file_citation) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation} + # for more details. + # # A citation to a file. # - # @param file_id [String] - # @param index [Integer] - # @param type [Symbol, :file_citation] + # @param file_id [String] The ID of the file. ... + # + # @param index [Integer] The index of the file in the list of files. ... + # + # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. ... end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -130,13 +144,21 @@ class URLCitation < OpenAI::Internal::Type::BaseModel required :url, String # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation} + # for more details. + # # A citation for a web resource used to generate a model response. # - # @param end_index [Integer] - # @param start_index [Integer] - # @param title [String] - # @param url [String] - # @param type [Symbol, :url_citation] + # @param end_index [Integer] The index of the last character of the URL citation in the message. ... + # + # @param start_index [Integer] The index of the first character of the URL citation in the message. ... + # + # @param title [String] The title of the web resource. ... + # + # @param url [String] The URL of the web resource. ... + # + # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. ... end class FilePath < OpenAI::Internal::Type::BaseModel @@ -159,11 +181,17 @@ class FilePath < OpenAI::Internal::Type::BaseModel required :type, const: :file_path # @!method initialize(file_id:, index:, type: :file_path) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} + # for more details. + # # A path to a file. # - # @param file_id [String] - # @param index [Integer] - # @param type [Symbol, :file_path] + # @param file_id [String] The ID of the file. ... + # + # @param index [Integer] The index of the file in the list of files. ... + # + # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. ... end # @!method self.variants diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 05e2d3db..a67c6449 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -23,13 +23,16 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel optional :format_, union: -> { OpenAI::Models::Responses::ResponseFormatTextConfig }, api_name: :format # @!method initialize(format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextConfig} for more details. + # # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 9f7744b8..fce5e269 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -35,13 +35,20 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.output_text.delta" # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextDeltaEvent} for more details. + # # Emitted when there is an additional text delta. # - # @param content_index [Integer] - # @param delta [String] - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.output_text.delta"] + # @param content_index [Integer] The index of the content part that the text delta was added to. ... + # + # @param delta [String] The text delta that was added. ... + # + # @param item_id [String] The ID of the output item that the text delta was added to. ... + # + # @param output_index [Integer] The index of the output item that the text delta was added to. ... + # + # @param type [Symbol, :"response.output_text.delta"] The type of the event. Always `response.output_text.delta`. ... end end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index cea42efc..15bd9abc 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -35,13 +35,20 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.output_text.done" # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextDoneEvent} for more details. + # # Emitted when text content is finalized. # - # @param content_index [Integer] - # @param item_id [String] - # @param output_index [Integer] - # @param text [String] - # @param type [Symbol, :"response.output_text.done"] + # @param content_index [Integer] The index of the content part that the text content is finalized. ... + # + # @param item_id [String] The ID of the output item that the text content is finalized. ... + # + # @param output_index [Integer] The index of the output item that the text content is finalized. ... + # + # @param text [String] The text content that is finalized. ... + # + # @param type [Symbol, :"response.output_text.done"] The type of the event. Always `response.output_text.done`. ... end end end diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index f8a7799f..93b1917a 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -38,11 +38,15 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. # - # @param input_tokens [Integer] - # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] - # @param output_tokens [Integer] - # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] - # @param total_tokens [Integer] + # @param input_tokens [Integer] The number of input tokens. + # + # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. + # + # @param output_tokens [Integer] The number of output tokens. + # + # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. + # + # @param total_tokens [Integer] The total number of tokens used. # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel @@ -54,9 +58,12 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel required :cached_tokens, Integer # @!method initialize(cached_tokens:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseUsage::InputTokensDetails} for more details. + # # A detailed breakdown of the input tokens. # - # @param cached_tokens [Integer] + # @param cached_tokens [Integer] The number of tokens that were retrieved from the cache. ... end # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details @@ -70,7 +77,7 @@ class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(reasoning_tokens:) # A detailed breakdown of the output tokens. # - # @param reasoning_tokens [Integer] + # @param reasoning_tokens [Integer] The number of reasoning tokens. end end end diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 59cdab36..5a94a581 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -23,11 +23,17 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.web_search_call.completed" # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent} for more + # details. + # # Emitted when a web search call is completed. # - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.web_search_call.completed"] + # @param item_id [String] Unique ID for the output item associated with the web search call. ... + # + # @param output_index [Integer] The index of the output item that the web search call is associated with. ... + # + # @param type [Symbol, :"response.web_search_call.completed"] The type of the event. Always `response.web_search_call.completed`. ... end end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index 6820c819..2c2c18c0 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -23,11 +23,17 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.web_search_call.in_progress" # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent} for more + # details. + # # Emitted when a web search call is initiated. # - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.web_search_call.in_progress"] + # @param item_id [String] Unique ID for the output item associated with the web search call. ... + # + # @param output_index [Integer] The index of the output item that the web search call is associated with. ... + # + # @param type [Symbol, :"response.web_search_call.in_progress"] The type of the event. Always `response.web_search_call.in_progress`. ... end end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index efa04758..c23044cb 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -23,11 +23,17 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.web_search_call.searching" # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent} for more + # details. + # # Emitted when a web search call is executing. # - # @param item_id [String] - # @param output_index [Integer] - # @param type [Symbol, :"response.web_search_call.searching"] + # @param item_id [String] Unique ID for the output item associated with the web search call. ... + # + # @param output_index [Integer] The index of the output item that the web search call is associated with. ... + # + # @param type [Symbol, :"response.web_search_call.searching"] The type of the event. Always `response.web_search_call.searching`. ... end end end diff --git a/lib/openai/models/responses/tool_choice_function.rb b/lib/openai/models/responses/tool_choice_function.rb index 47c1d3ef..67f00750 100644 --- a/lib/openai/models/responses/tool_choice_function.rb +++ b/lib/openai/models/responses/tool_choice_function.rb @@ -19,8 +19,9 @@ class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, type: :function) # Use this option to force the model to call a specific function. # - # @param name [String] - # @param type [Symbol, :function] + # @param name [String] The name of the function to call. + # + # @param type [Symbol, :function] For function calling, the type is always `function`. end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index e51b376e..6ecec732 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -18,10 +18,13 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Responses::ToolChoiceTypes::Type } # @!method initialize(type:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ToolChoiceTypes} for more details. + # # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). # - # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] + # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about ... # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 09522f82..0885c4db 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -26,12 +26,17 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel optional :user_location, -> { OpenAI::Models::Responses::WebSearchTool::UserLocation }, nil?: true # @!method initialize(type:, search_context_size: nil, user_location: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::WebSearchTool} for more details. + # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). # - # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] - # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] + # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] The type of the web search tool. One of: ... + # + # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the ... + # # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] # The type of the web search tool. One of: @@ -100,11 +105,18 @@ class UserLocation < OpenAI::Internal::Type::BaseModel optional :timezone, String # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) - # @param city [String] - # @param country [String] - # @param region [String] - # @param timezone [String] - # @param type [Symbol, :approximate] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::WebSearchTool::UserLocation} for more details. + # + # @param city [String] Free text input for the city of the user, e.g. `San Francisco`. ... + # + # @param country [String] The two-letter ... + # + # @param region [String] Free text input for the region of the user, e.g. `California`. ... + # + # @param timezone [String] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) ... + # + # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. ... end end end diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index c8dc5106..46682393 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -19,8 +19,14 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel required :max_chunk_size_tokens, Integer # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) - # @param chunk_overlap_tokens [Integer] - # @param max_chunk_size_tokens [Integer] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::StaticFileChunkingStrategy} for more details. + # + # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. + # ... + # + # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini + # ... end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index 7da9cb50..68d0d88e 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -16,7 +16,8 @@ class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # @param static [OpenAI::Models::StaticFileChunkingStrategy] - # @param type [Symbol, :static] + # + # @param type [Symbol, :static] Always `static`. end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index b0d5a5b6..f37c6aa6 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -18,7 +18,8 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # Customize your own chunking strategy by setting chunk size and chunk overlap. # # @param static [OpenAI::Models::StaticFileChunkingStrategy] - # @param type [Symbol, :static] + # + # @param type [Symbol, :static] Always `static`. end end end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 53ae0a23..39d5043e 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -61,17 +61,29 @@ class Upload < OpenAI::Internal::Type::BaseModel optional :file, -> { OpenAI::Models::FileObject }, nil?: true # @!method initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) + # Some parameter documentations has been truncated, see {OpenAI::Models::Upload} + # for more details. + # # The Upload object can accept byte chunks in the form of Parts. # - # @param id [String] - # @param bytes [Integer] - # @param created_at [Integer] - # @param expires_at [Integer] - # @param filename [String] - # @param purpose [String] - # @param status [Symbol, OpenAI::Models::Upload::Status] - # @param file [OpenAI::Models::FileObject, nil] - # @param object [Symbol, :upload] + # @param id [String] The Upload unique identifier, which can be referenced in API endpoints. + # + # @param bytes [Integer] The intended number of bytes to be uploaded. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the Upload was created. + # + # @param expires_at [Integer] The Unix timestamp (in seconds) for when the Upload will expire. + # + # @param filename [String] The name of the file to be uploaded. + # + # @param purpose [String] The intended purpose of the file. [Please refer here](https://platform.openai.co + # ... + # + # @param status [Symbol, OpenAI::Models::Upload::Status] The status of the Upload. + # + # @param file [OpenAI::Models::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. + # + # @param object [Symbol, :upload] The object type, which is always "upload". # The status of the Upload. # diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index b978c7a2..3c898f46 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -21,8 +21,14 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel optional :md5, String # @!method initialize(part_ids:, md5: nil, request_options: {}) - # @param part_ids [Array] - # @param md5 [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::UploadCompleteParams} for more details. + # + # @param part_ids [Array] The ordered list of Part IDs. ... + # + # @param md5 [String] The optional md5 checksum for the file contents to verify if the bytes uploaded + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index afa6ec61..cafa9448 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -38,10 +38,17 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel required :purpose, enum: -> { OpenAI::Models::FilePurpose } # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) - # @param bytes [Integer] - # @param filename [String] - # @param mime_type [String] - # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::UploadCreateParams} for more details. + # + # @param bytes [Integer] The number of bytes in the file you are uploading. ... + # + # @param filename [String] The name of the file to upload. ... + # + # @param mime_type [String] The MIME type of the file. ... + # + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 9294e4c0..d4e1d7bf 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -15,7 +15,11 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel required :data, OpenAI::Internal::Type::IOLike # @!method initialize(data:, request_options: {}) - # @param data [Pathname, StringIO] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Uploads::PartCreateParams} for more details. + # + # @param data [Pathname, StringIO] The chunk of bytes for this Part. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/models/uploads/upload_part.rb b/lib/openai/models/uploads/upload_part.rb index fa189119..f0b61875 100644 --- a/lib/openai/models/uploads/upload_part.rb +++ b/lib/openai/models/uploads/upload_part.rb @@ -32,10 +32,13 @@ class UploadPart < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, upload_id:, object: :"upload.part") # The upload Part represents a chunk of bytes we can add to an Upload object. # - # @param id [String] - # @param created_at [Integer] - # @param upload_id [String] - # @param object [Symbol, :"upload.part"] + # @param id [String] The upload Part unique identifier, which can be referenced in API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the Part was created. + # + # @param upload_id [String] The ID of the Upload object that this Part was added to. + # + # @param object [Symbol, :"upload.part"] The object type, which is always `upload.part`. end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 3db76e67..285ac28c 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -77,20 +77,34 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :expires_at, Integer, nil?: true # @!method initialize(id:, created_at:, file_counts:, last_active_at:, metadata:, name:, status:, usage_bytes:, expires_after: nil, expires_at: nil, object: :vector_store) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStore} for more details. + # # A vector store is a collection of processed files can be used by the # `file_search` tool. # - # @param id [String] - # @param created_at [Integer] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store was created. + # # @param file_counts [OpenAI::Models::VectorStore::FileCounts] - # @param last_active_at [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] - # @param status [Symbol, OpenAI::Models::VectorStore::Status] - # @param usage_bytes [Integer] - # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] - # @param expires_at [Integer, nil] - # @param object [Symbol, :vector_store] + # + # @param last_active_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store was last active. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the vector store. + # + # @param status [Symbol, OpenAI::Models::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or + # ... + # + # @param usage_bytes [Integer] The total number of bytes used by the files in the vector store. + # + # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] The expiration policy for a vector store. + # + # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store will expire. + # + # @param object [Symbol, :vector_store] The object type, which is always `vector_store`. # @see OpenAI::Models::VectorStore#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel @@ -125,11 +139,15 @@ class FileCounts < OpenAI::Internal::Type::BaseModel required :total, Integer # @!method initialize(cancelled:, completed:, failed:, in_progress:, total:) - # @param cancelled [Integer] - # @param completed [Integer] - # @param failed [Integer] - # @param in_progress [Integer] - # @param total [Integer] + # @param cancelled [Integer] The number of files that were cancelled. + # + # @param completed [Integer] The number of files that have been successfully processed. + # + # @param failed [Integer] The number of files that have failed to process. + # + # @param in_progress [Integer] The number of files that are currently being processed. + # + # @param total [Integer] The total number of files. end # The status of the vector store, which can be either `expired`, `in_progress`, or @@ -164,10 +182,15 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel required :days, Integer # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStore::ExpiresAfter} for more details. + # # The expiration policy for a vector store. # - # @param days [Integer] - # @param anchor [Symbol, :last_active_at] + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + # ... end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 30eb84d9..acbd89a2 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -46,11 +46,21 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel optional :name, String # @!method initialize(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] - # @param file_ids [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreCreateParams} for more details. + # + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # + # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the vector store. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -68,10 +78,15 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel required :days, Integer # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreCreateParams::ExpiresAfter} for more details. + # # The expiration policy for a vector store. # - # @param days [Integer] - # @param anchor [Symbol, :last_active_at] + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + # ... end end end diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index caa16c07..9a7d787d 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -40,10 +40,21 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::VectorStoreListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 808daadb..5eaa270d 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -39,11 +39,20 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel optional :rewrite_query, OpenAI::Internal::Type::Boolean # @!method initialize(query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) - # @param query [String, Array] - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # @param max_num_results [Integer] - # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] - # @param rewrite_query [Boolean] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreSearchParams} for more details. + # + # @param query [String, Array] A query string for a search + # + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. + # + # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 + # ... + # + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # + # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # A query string for a search diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index d06a0c95..75e2fdb5 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -42,11 +42,18 @@ class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel required :score, Float # @!method initialize(attributes:, content:, file_id:, filename:, score:) - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param content [Array] - # @param file_id [String] - # @param filename [String] - # @param score [Float] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreSearchResponse} for more details. + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param content [Array] Content chunks from the file. + # + # @param file_id [String] The ID of the vector store file. + # + # @param filename [String] The name of the vector store file. + # + # @param score [Float] The similarity score for the result. module Attribute extend OpenAI::Internal::Type::Union @@ -75,8 +82,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::VectorStoreSearchResponse::Content::Type } # @!method initialize(text:, type:) - # @param text [String] - # @param type [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] + # @param text [String] The text content returned from search. + # + # @param type [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] The type of content. # The type of content. # diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 91eafa78..b7d6bd13 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -31,9 +31,15 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel optional :name, String, nil?: true # @!method initialize(expires_after: nil, metadata: nil, name: nil, request_options: {}) - # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String, nil] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreUpdateParams} for more details. + # + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String, nil] The name of the vector store. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -51,10 +57,15 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel required :days, Integer # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter} for more details. + # # The expiration policy for a vector store. # - # @param days [Integer] - # @param anchor [Symbol, :last_active_at] + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + # ... end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 9a873239..fba3d18a 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -36,9 +36,17 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) - # @param file_ids [Array] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileBatchCreateParams} for more details. + # + # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # ... + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 543b49c7..fc97c2c0 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -52,12 +52,26 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Order } # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileBatchListFilesParams} for more details. + # # @param vector_store_id [String] - # @param after [String] - # @param before [String] - # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index 094dcbfb..8c9f595a 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -18,8 +18,9 @@ class FileContentResponse < OpenAI::Internal::Type::BaseModel optional :type, String # @!method initialize(text: nil, type: nil) - # @param text [String] - # @param type [String] + # @param text [String] The text content + # + # @param type [String] The content type (currently only `"text"`) end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 2d0fc3ea..d5e71148 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -36,9 +36,17 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) - # @param file_id [String] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileCreateParams} for more details. + # + # @param file_id [String] A [File](https://platform.openai.com/docs/api-reference/files) ID that the vecto + # ... + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 551d5a17..55dd58ce 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -47,11 +47,24 @@ class FileListParams < OpenAI::Internal::Type::BaseModel optional :order, enum: -> { OpenAI::Models::VectorStores::FileListParams::Order } # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) - # @param after [String] - # @param before [String] - # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index f054fa7a..cb4600bd 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -26,8 +26,13 @@ class FileUpdateParams < OpenAI::Internal::Type::BaseModel nil?: true # @!method initialize(vector_store_id:, attributes:, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileUpdateParams} for more details. + # # @param vector_store_id [String] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 4891ed5a..66754ed9 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -73,17 +73,32 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategy } # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::VectorStoreFile} for more details. + # # A list of files attached to a vector store. # - # @param id [String] - # @param created_at [Integer] - # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] - # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] - # @param usage_bytes [Integer] - # @param vector_store_id [String] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - # @param object [Symbol, :"vector_store.file"] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store file was created. + # + # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a + # ... + # + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet + # ... + # + # @param usage_bytes [Integer] The total vector store usage in bytes. Note that this may be different from the + # ... + # + # @param vector_store_id [String] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect + # ... + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] The strategy used to chunk the file. + # + # @param object [Symbol, :"vector_store.file"] The object type, which is always `vector_store.file`. # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::Internal::Type::BaseModel @@ -103,8 +118,9 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] - # @param message [String] + # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # + # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index ee7fb5ea..86d56390 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -46,14 +46,25 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel required :vector_store_id, String # @!method initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::VectorStoreFileBatch} for more details. + # # A batch of files attached to a vector store. # - # @param id [String] - # @param created_at [Integer] + # @param id [String] The identifier, which can be referenced in API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store files batch was create + # ... + # # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] - # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] - # @param vector_store_id [String] - # @param object [Symbol, :"vector_store.files_batch"] + # + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` + # ... + # + # @param vector_store_id [String] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect + # ... + # + # @param object [Symbol, :"vector_store.files_batch"] The object type, which is always `vector_store.file_batch`. # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel @@ -88,11 +99,15 @@ class FileCounts < OpenAI::Internal::Type::BaseModel required :total, Integer # @!method initialize(cancelled:, completed:, failed:, in_progress:, total:) - # @param cancelled [Integer] - # @param completed [Integer] - # @param failed [Integer] - # @param in_progress [Integer] - # @param total [Integer] + # @param cancelled [Integer] The number of files that where cancelled. + # + # @param completed [Integer] The number of files that have been processed. + # + # @param failed [Integer] The number of files that have failed to process. + # + # @param in_progress [Integer] The number of files that are currently being processed. + # + # @param total [Integer] The total number of files. end # The status of the vector store files batch, which can be either `in_progress`, diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index 6a31ca99..cd1044ab 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -4,16 +4,30 @@ module OpenAI module Resources class Audio class Speech + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::SpeechCreateParams} for more details. + # # Generates audio from the input text. # # @overload create(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) # - # @param input [String] - # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] - # @param instructions [String] - # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] - # @param speed [Float] + # @param input [String] The text to generate audio for. The maximum length is 4096 characters. + # + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # ... + # + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # ... + # + # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not + # ... + # + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav + # ... + # + # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [StringIO] diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index bdde1310..d7f5de3e 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -7,18 +7,36 @@ class Transcriptions # See {OpenAI::Resources::Audio::Transcriptions#create_streaming} for streaming # counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. + # # Transcribes audio into the input language. # # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::AudioModel] - # @param include [Array] - # @param language [String] - # @param prompt [String] - # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] - # @param temperature [Float] - # @param timestamp_granularities [Array] + # @param file [Pathname, StringIO] The audio file object (not file name) to transcribe, in one of these formats: fl + # ... + # + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # ... + # + # @param include [Array] Additional information to include in the transcription response. ... + # + # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt + # ... + # + # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment + # ... + # + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # ... + # + # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # ... + # + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] @@ -43,18 +61,36 @@ def create(params) # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming # counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. + # # Transcribes audio into the input language. # # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::AudioModel] - # @param include [Array] - # @param language [String] - # @param prompt [String] - # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] - # @param temperature [Float] - # @param timestamp_granularities [Array] + # @param file [Pathname, StringIO] The audio file object (not file name) to transcribe, in one of these formats: fl + # ... + # + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # ... + # + # @param include [Array] Additional information to include in the transcription response. ... + # + # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt + # ... + # + # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment + # ... + # + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # ... + # + # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # ... + # + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index 418e58af..d52be7ea 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -4,15 +4,28 @@ module OpenAI module Resources class Audio class Translations + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Audio::TranslationCreateParams} for more details. + # # Translates audio into English. # # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # - # @param file [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::AudioModel] - # @param prompt [String] - # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] - # @param temperature [Float] + # @param file [Pathname, StringIO] The audio file object (not file name) translate, in one of these formats: flac, + # ... + # + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh + # ... + # + # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment + # ... + # + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # ... + # + # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 84f5ad93..d63aac23 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -3,14 +3,23 @@ module OpenAI module Resources class Batches + # Some parameter documentations has been truncated, see + # {OpenAI::Models::BatchCreateParams} for more details. + # # Creates and executes a batch from an uploaded file of requests # # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) # - # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] - # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] - # @param input_file_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` + # ... + # + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` + # ... + # + # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] @@ -31,7 +40,8 @@ def create(params) # # @overload retrieve(batch_id, request_options: {}) # - # @param batch_id [String] + # @param batch_id [String] The ID of the batch to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] @@ -46,12 +56,19 @@ def retrieve(batch_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::BatchListParams} for more details. + # # List your organization's batches. # # @overload list(after: nil, limit: nil, request_options: {}) # - # @param after [String] - # @param limit [Integer] + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -75,7 +92,8 @@ def list(params = {}) # # @overload cancel(batch_id, request_options: {}) # - # @param batch_id [String] + # @param batch_id [String] The ID of the batch to cancel. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index a7f952e7..303843a0 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -4,21 +4,42 @@ module OpenAI module Resources class Beta class Assistants + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantCreateParams} for more details. + # # Create an assistant with a model and instructions. # # @overload create(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::ChatModel] - # @param description [String, nil] - # @param instructions [String, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] - # @param tools [Array] - # @param top_p [Float, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # + # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Assistant] @@ -39,7 +60,8 @@ def create(params) # # @overload retrieve(assistant_id, request_options: {}) # - # @param assistant_id [String] + # @param assistant_id [String] The ID of the assistant to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Assistant] @@ -54,22 +76,44 @@ def retrieve(assistant_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantUpdateParams} for more details. + # # Modifies an assistant. # # @overload update(assistant_id, description: nil, instructions: nil, metadata: nil, model: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) # - # @param assistant_id [String] - # @param description [String, nil] - # @param instructions [String, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] - # @param name [String, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] - # @param tools [Array] - # @param top_p [Float, nil] + # @param assistant_id [String] The ID of the assistant to modify. + # + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # + # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Assistant] @@ -86,14 +130,25 @@ def update(assistant_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::AssistantListParams} for more details. + # # Returns a list of assistants. # # @overload list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -115,7 +170,8 @@ def list(params = {}) # # @overload delete(assistant_id, request_options: {}) # - # @param assistant_id [String] + # @param assistant_id [String] The ID of the assistant to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::AssistantDeleted] diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index b72ad867..6203cb42 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -10,13 +10,21 @@ class Threads # @return [OpenAI::Resources::Beta::Threads::Messages] attr_reader :messages + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateParams} for more details. + # # Create a thread. # # @overload create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # - # @param messages [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Thread] @@ -37,7 +45,8 @@ def create(params = {}) # # @overload retrieve(thread_id, request_options: {}) # - # @param thread_id [String] + # @param thread_id [String] The ID of the thread to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Thread] @@ -52,13 +61,20 @@ def retrieve(thread_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadUpdateParams} for more details. + # # Modifies a thread. # # @overload update(thread_id, metadata: nil, tool_resources: nil, request_options: {}) # - # @param thread_id [String] - # @param metadata [Hash{Symbol=>String}, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] + # @param thread_id [String] The ID of the thread to modify. Only the `metadata` can be modified. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Thread] @@ -79,7 +95,8 @@ def update(thread_id, params = {}) # # @overload delete(thread_id, request_options: {}) # - # @param thread_id [String] + # @param thread_id [String] The ID of the thread to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::ThreadDeleted] @@ -96,25 +113,55 @@ def delete(thread_id, params = {}) # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams} for more details. + # # Create a thread and run it in one request. # # @overload create_and_run(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # - # @param assistant_id [String] - # @param instructions [String, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # @param parallel_tool_calls [Boolean] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] - # @param tools [Array, nil] - # @param top_p [Float, nil] - # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista + # ... + # + # @param instructions [String, nil] Override the default system message of the assistant. This is useful for modifyi + # ... + # + # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the + # ... + # + # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] @@ -138,25 +185,55 @@ def create_and_run(params) # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming # counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams} for more details. + # # Create a thread and run it in one request. # # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # - # @param assistant_id [String] - # @param instructions [String, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # @param parallel_tool_calls [Boolean] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] - # @param tools [Array, nil] - # @param top_p [Float, nil] - # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista + # ... + # + # @param instructions [String, nil] Override the default system message of the assistant. This is useful for modifyi + # ... + # + # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the + # ... + # + # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # ... + # + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 81de13fc..d9191639 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -5,15 +5,24 @@ module Resources class Beta class Threads class Messages + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. + # # Create a message. # # @overload create(thread_id, content:, role:, attachments: nil, metadata: nil, request_options: {}) # - # @param thread_id [String] - # @param content [String, Array] - # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] - # @param attachments [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] + # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t + # ... + # + # @param content [String, Array] The text contents of the message. + # + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: ... + # + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Message] @@ -30,12 +39,18 @@ def create(thread_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageRetrieveParams} for more details. + # # Retrieve a message. # # @overload retrieve(message_id, thread_id:, request_options: {}) # - # @param message_id [String] - # @param thread_id [String] + # @param message_id [String] The ID of the message to retrieve. + # + # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Message] @@ -55,13 +70,20 @@ def retrieve(message_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageUpdateParams} for more details. + # # Modifies a message. # # @overload update(message_id, thread_id:, metadata: nil, request_options: {}) # - # @param message_id [String] - # @param thread_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # @param message_id [String] Path param: The ID of the message to modify. + # + # @param thread_id [String] Path param: The ID of the thread to which this message belongs. + # + # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Message] @@ -82,16 +104,30 @@ def update(message_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::MessageListParams} for more details. + # # Returns a list of messages for a given thread. # # @overload list(thread_id, after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) # - # @param thread_id [String] - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] - # @param run_id [String] + # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t + # ... + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # + # @param run_id [String] Filter messages by the run ID that generated them. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -113,8 +149,10 @@ def list(thread_id, params = {}) # # @overload delete(message_id, thread_id:, request_options: {}) # - # @param message_id [String] - # @param thread_id [String] + # @param message_id [String] The ID of the message to delete. + # + # @param thread_id [String] The ID of the thread to which this message belongs. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::MessageDeleted] diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 8ebefd9c..1dfcaeb2 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -11,28 +11,63 @@ class Runs # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming # counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunCreateParams} for more details. + # # Create a run. # # @overload create(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # - # @param thread_id [String] - # @param assistant_id [String] - # @param include [Array] - # @param additional_instructions [String, nil] - # @param additional_messages [Array, nil] - # @param instructions [String, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # @param parallel_tool_calls [Boolean] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tools [Array, nil] - # @param top_p [Float, nil] - # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @param thread_id [String] Path param: The ID of the thread to run. + # + # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer + # ... + # + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # ... + # + # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t + # ... + # + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # + # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re + # ... + # + # @param max_completion_tokens [Integer, nil] Body param: The maximum number of completion tokens that may be used over the co + # ... + # + # @param max_prompt_tokens [Integer, nil] Body param: The maximum number of prompt tokens that may be used over the course + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca + # ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference + # ... + # + # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP + # ... + # + # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. ... + # + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu + # ... + # + # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] @@ -58,28 +93,63 @@ def create(thread_id, params) # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming # counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunCreateParams} for more details. + # # Create a run. # # @overload create_stream_raw(thread_id, assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # - # @param thread_id [String] - # @param assistant_id [String] - # @param include [Array] - # @param additional_instructions [String, nil] - # @param additional_messages [Array, nil] - # @param instructions [String, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_prompt_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # @param parallel_tool_calls [Boolean] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # @param tools [Array, nil] - # @param top_p [Float, nil] - # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @param thread_id [String] Path param: The ID of the thread to run. + # + # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer + # ... + # + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # ... + # + # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t + # ... + # + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # + # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re + # ... + # + # @param max_completion_tokens [Integer, nil] Body param: The maximum number of completion tokens that may be used over the co + # ... + # + # @param max_prompt_tokens [Integer, nil] Body param: The maximum number of prompt tokens that may be used over the course + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca + # ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference + # ... + # + # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena + # ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** ... + # + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP + # ... + # + # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. ... + # + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu + # ... + # + # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling + # ... + # + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] @@ -105,12 +175,18 @@ def create_stream_raw(thread_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunRetrieveParams} for more details. + # # Retrieves a run. # # @overload retrieve(run_id, thread_id:, request_options: {}) # - # @param run_id [String] - # @param thread_id [String] + # @param run_id [String] The ID of the run to retrieve. + # + # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] @@ -130,13 +206,21 @@ def retrieve(run_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunUpdateParams} for more details. + # # Modifies a run. # # @overload update(run_id, thread_id:, metadata: nil, request_options: {}) # - # @param run_id [String] - # @param thread_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # @param run_id [String] Path param: The ID of the run to modify. + # + # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] @@ -157,15 +241,27 @@ def update(run_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunListParams} for more details. + # # Returns a list of runs belonging to a thread. # # @overload list(thread_id, after: nil, before: nil, limit: nil, order: nil, request_options: {}) # - # @param thread_id [String] - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] + # @param thread_id [String] The ID of the thread the run belongs to. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -187,8 +283,10 @@ def list(thread_id, params = {}) # # @overload cancel(run_id, thread_id:, request_options: {}) # - # @param run_id [String] - # @param thread_id [String] + # @param run_id [String] The ID of the run to cancel. + # + # @param thread_id [String] The ID of the thread to which this run belongs. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] @@ -211,6 +309,9 @@ def cancel(run_id, params) # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams} for more details. + # # When a run has the `status: "requires_action"` and `required_action.type` is # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the # tool calls once they're all completed. All outputs must be submitted in a single @@ -218,9 +319,13 @@ def cancel(run_id, params) # # @overload submit_tool_outputs(run_id, thread_id:, tool_outputs:, request_options: {}) # - # @param run_id [String] - # @param thread_id [String] - # @param tool_outputs [Array] + # @param run_id [String] Path param: The ID of the run that requires the tool output submission. + # + # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc + # ... + # + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Run] @@ -248,6 +353,9 @@ def submit_tool_outputs(run_id, params) # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for # non-streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams} for more details. + # # When a run has the `status: "requires_action"` and `required_action.type` is # `submit_tool_outputs`, this endpoint can be used to submit the outputs from the # tool calls once they're all completed. All outputs must be submitted in a single @@ -255,9 +363,13 @@ def submit_tool_outputs(run_id, params) # # @overload submit_tool_outputs_stream_raw(run_id, thread_id:, tool_outputs:, request_options: {}) # - # @param run_id [String] - # @param thread_id [String] - # @param tool_outputs [Array] + # @param run_id [String] Path param: The ID of the run that requires the tool output submission. + # + # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc + # ... + # + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 65972e48..21bbd9a5 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -6,14 +6,22 @@ class Beta class Threads class Runs class Steps + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams} for more details. + # # Retrieves a run step. # # @overload retrieve(step_id, thread_id:, run_id:, include: nil, request_options: {}) # - # @param step_id [String] - # @param thread_id [String] - # @param run_id [String] - # @param include [Array] + # @param step_id [String] Path param: The ID of the run step to retrieve. + # + # @param thread_id [String] Path param: The ID of the thread to which the run and run step belongs. + # + # @param run_id [String] Path param: The ID of the run to which the run step belongs. + # + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] @@ -38,17 +46,32 @@ def retrieve(step_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Runs::StepListParams} for more details. + # # Returns a list of run steps belonging to a run. # # @overload list(run_id, thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # - # @param run_id [String] - # @param thread_id [String] - # @param after [String] - # @param before [String] - # @param include [Array] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] + # @param run_id [String] Path param: The ID of the run the run steps belong to. + # + # @param thread_id [String] Path param: The ID of the thread the run and run steps belong to. + # + # @param after [String] Query param: A cursor for use in pagination. `after` is an object ID that define + # ... + # + # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin + # ... + # + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # ... + # + # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be + # ... + # + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 499c9f52..c8b2b3ee 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -9,6 +9,9 @@ class Completions # See {OpenAI::Resources::Chat::Completions#stream_raw} for streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams} for more details. + # # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take # advantage of the latest OpenAI platform features. Compare @@ -29,36 +32,73 @@ class Completions # # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel] - # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] - # @param frequency_penalty [Float, nil] - # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # @param functions [Array] - # @param logit_bias [Hash{Symbol=>Integer}, nil] - # @param logprobs [Boolean, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param modalities [Array, nil] - # @param n [Integer, nil] - # @param parallel_tool_calls [Boolean] - # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] - # @param presence_penalty [Float, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # @param seed [Integer, nil] - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] - # @param stop [String, Array, nil] - # @param store [Boolean, nil] - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # @param tools [Array] - # @param top_logprobs [Integer, nil] - # @param top_p [Float, nil] - # @param user [String] - # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with ... + # + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. ... + # + # @param functions [Array] Deprecated in favor of `tools`. ... + # + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # + # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, ... + # + # @param max_completion_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a completion, + # ... + # + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param modalities [Array, nil] Output types that you would like the model to generate. ... + # + # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is ... + # + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # + # @param seed [Integer, nil] This feature is in Beta. ... + # + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # + # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for ... + # + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. ... + # + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # ... + # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletion] @@ -81,6 +121,9 @@ def create(params) # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams} for more details. + # # **Starting a new project?** We recommend trying # [Responses](https://platform.openai.com/docs/api-reference/responses) to take # advantage of the latest OpenAI platform features. Compare @@ -101,36 +144,73 @@ def create(params) # # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel] - # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] - # @param frequency_penalty [Float, nil] - # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # @param functions [Array] - # @param logit_bias [Hash{Symbol=>Integer}, nil] - # @param logprobs [Boolean, nil] - # @param max_completion_tokens [Integer, nil] - # @param max_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param modalities [Array, nil] - # @param n [Integer, nil] - # @param parallel_tool_calls [Boolean] - # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] - # @param presence_penalty [Float, nil] - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # @param seed [Integer, nil] - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] - # @param stop [String, Array, nil] - # @param store [Boolean, nil] - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # @param temperature [Float, nil] - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # @param tools [Array] - # @param top_logprobs [Integer, nil] - # @param top_p [Float, nil] - # @param user [String] - # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with ... + # + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. ... + # + # @param functions [Array] Deprecated in favor of `tools`. ... + # + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # + # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, ... + # + # @param max_completion_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a completion, + # ... + # + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param modalities [Array, nil] Output types that you would like the model to generate. ... + # + # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y + # ... + # + # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g + # ... + # + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is ... + # + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # + # @param seed [Integer, nil] This feature is in Beta. ... + # + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # + # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for ... + # + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. ... + # + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # ... + # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] @@ -159,7 +239,8 @@ def stream_raw(params) # # @overload retrieve(completion_id, request_options: {}) # - # @param completion_id [String] + # @param completion_id [String] The ID of the chat completion to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletion] @@ -174,14 +255,19 @@ def retrieve(completion_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionUpdateParams} for more details. + # # Modify a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be modified. Currently, the only # supported modification is to update the `metadata` field. # # @overload update(completion_id, metadata:, request_options: {}) # - # @param completion_id [String] - # @param metadata [Hash{Symbol=>String}, nil] + # @param completion_id [String] The ID of the chat completion to update. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletion] @@ -198,16 +284,25 @@ def update(completion_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionListParams} for more details. + # # List stored Chat Completions. Only Chat Completions that have been stored with # the `store` parameter set to `true` will be returned. # # @overload list(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) # - # @param after [String] - # @param limit [Integer] - # @param metadata [Hash{Symbol=>String}, nil] - # @param model [String] - # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] + # @param after [String] Identifier for the last chat completion from the previous pagination request. + # + # @param limit [Integer] Number of Chat Completions to retrieve. + # + # @param metadata [Hash{Symbol=>String}, nil] A list of metadata keys to filter the Chat Completions by. Example: ... + # + # @param model [String] The model used to generate the Chat Completions. + # + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -230,7 +325,8 @@ def list(params = {}) # # @overload delete(completion_id, request_options: {}) # - # @param completion_id [String] + # @param completion_id [String] The ID of the chat completion to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Chat::ChatCompletionDeleted] diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index ab3a3e19..a1dd20ca 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -5,15 +5,23 @@ module Resources class Chat class Completions class Messages + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::Completions::MessageListParams} for more details. + # # Get the messages in a stored chat completion. Only Chat Completions that have # been created with the `store` parameter set to `true` will be returned. # # @overload list(completion_id, after: nil, limit: nil, order: nil, request_options: {}) # - # @param completion_id [String] - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] + # @param completion_id [String] The ID of the chat completion to retrieve messages from. + # + # @param after [String] Identifier for the last message from the previous pagination request. + # + # @param limit [Integer] Number of messages to retrieve. + # + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 859c6b7f..7b563203 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -5,27 +5,58 @@ module Resources class Completions # See {OpenAI::Resources::Completions#create_streaming} for streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionCreateParams} for more details. + # # Creates a completion for the provided prompt and parameters. # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] - # @param prompt [String, Array, Array, Array>, nil] - # @param best_of [Integer, nil] - # @param echo [Boolean, nil] - # @param frequency_penalty [Float, nil] - # @param logit_bias [Hash{Symbol=>Integer}, nil] - # @param logprobs [Integer, nil] - # @param max_tokens [Integer, nil] - # @param n [Integer, nil] - # @param presence_penalty [Float, nil] - # @param seed [Integer, nil] - # @param stop [String, Array, nil] - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # @param suffix [String, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param user [String] + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings + # ... + # + # @param best_of [Integer, nil] Generates `best_of` completions server-side and returns the "best" (the one with + # ... + # + # @param echo [Boolean, nil] Echo back the prompt in addition to the completion ... + # + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # ... + # + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # + # @param logprobs [Integer, nil] Include the log probabilities on the `logprobs` most likely output tokens, as we + # ... + # + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the completi + # ... + # + # @param n [Integer, nil] How many completions to generate for each prompt. ... + # + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on whethe + # ... + # + # @param seed [Integer, nil] If specified, our system will make a best effort to sample deterministically, su + # ... + # + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # + # @param suffix [String, nil] The suffix that comes after a completion of inserted text. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Completion] @@ -48,27 +79,58 @@ def create(params) # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionCreateParams} for more details. + # # Creates a completion for the provided prompt and parameters. # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] - # @param prompt [String, Array, Array, Array>, nil] - # @param best_of [Integer, nil] - # @param echo [Boolean, nil] - # @param frequency_penalty [Float, nil] - # @param logit_bias [Hash{Symbol=>Integer}, nil] - # @param logprobs [Integer, nil] - # @param max_tokens [Integer, nil] - # @param n [Integer, nil] - # @param presence_penalty [Float, nil] - # @param seed [Integer, nil] - # @param stop [String, Array, nil] - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # @param suffix [String, nil] - # @param temperature [Float, nil] - # @param top_p [Float, nil] - # @param user [String] + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings + # ... + # + # @param best_of [Integer, nil] Generates `best_of` completions server-side and returns the "best" (the one with + # ... + # + # @param echo [Boolean, nil] Echo back the prompt in addition to the completion ... + # + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # ... + # + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # + # @param logprobs [Integer, nil] Include the log probabilities on the `logprobs` most likely output tokens, as we + # ... + # + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the completi + # ... + # + # @param n [Integer, nil] How many completions to generate for each prompt. ... + # + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on whethe + # ... + # + # @param seed [Integer, nil] If specified, our system will make a best effort to sample deterministically, su + # ... + # + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # + # @param suffix [String, nil] The suffix that comes after a completion of inserted text. ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index f9a2281d..2e0577e6 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -3,15 +3,28 @@ module OpenAI module Resources class Embeddings + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EmbeddingCreateParams} for more details. + # # Creates an embedding vector representing the input text. # # @overload create(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) # - # @param input [String, Array, Array, Array>] - # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] - # @param dimensions [Integer] - # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] - # @param user [String] + # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i + # ... + # + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # ... + # + # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo + # ... + # + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::CreateEmbeddingResponse] diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 2996ff10..9decea6d 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -6,6 +6,9 @@ class Evals # @return [OpenAI::Resources::Evals::Runs] attr_reader :runs + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams} for more details. + # # Create the structure of an evaluation that can be used to test a model's # performance. An evaluation is a set of testing criteria and a datasource. After # creating an evaluation, you can run it on different models and model parameters. @@ -14,10 +17,14 @@ class Evals # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] - # @param testing_criteria [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. + # + # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the evaluation. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::EvalCreateResponse] @@ -38,7 +45,8 @@ def create(params) # # @overload retrieve(eval_id, request_options: {}) # - # @param eval_id [String] + # @param eval_id [String] The ID of the evaluation to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::EvalRetrieveResponse] @@ -53,13 +61,19 @@ def retrieve(eval_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateParams} for more details. + # # Update certain properties of an evaluation. # # @overload update(eval_id, metadata: nil, name: nil, request_options: {}) # - # @param eval_id [String] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # @param eval_id [String] The ID of the evaluation to update. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] Rename the evaluation. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::EvalUpdateResponse] @@ -76,14 +90,22 @@ def update(eval_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListParams} for more details. + # # List evaluations for a project. # # @overload list(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) # - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::EvalListParams::Order] - # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # @param after [String] Identifier for the last eval from the previous pagination request. + # + # @param limit [Integer] Number of evals to retrieve. + # + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d + # ... + # + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -105,7 +127,8 @@ def list(params = {}) # # @overload delete(eval_id, request_options: {}) # - # @param eval_id [String] + # @param eval_id [String] The ID of the evaluation to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::EvalDeleteResponse] diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index 77980ec8..0bd63a75 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -7,14 +7,21 @@ class Runs # @return [OpenAI::Resources::Evals::Runs::OutputItems] attr_reader :output_items + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams} for more details. + # # Create a new evaluation run. This is the endpoint that will kick off grading. # # @overload create(eval_id, data_source:, metadata: nil, name: nil, request_options: {}) # - # @param eval_id [String] - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # @param eval_id [String] The ID of the evaluation to create a run for. + # + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the run. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Evals::RunCreateResponse] @@ -35,8 +42,10 @@ def create(eval_id, params) # # @overload retrieve(run_id, eval_id:, request_options: {}) # - # @param run_id [String] - # @param eval_id [String] + # @param run_id [String] The ID of the run to retrieve. + # + # @param eval_id [String] The ID of the evaluation to retrieve runs for. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Evals::RunRetrieveResponse] @@ -56,15 +65,25 @@ def retrieve(run_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListParams} for more details. + # # Get a list of runs for an evaluation. # # @overload list(eval_id, after: nil, limit: nil, order: nil, status: nil, request_options: {}) # - # @param eval_id [String] - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] - # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # @param eval_id [String] The ID of the evaluation to retrieve runs for. + # + # @param after [String] Identifier for the last run from the previous pagination request. + # + # @param limit [Integer] Number of runs to retrieve. + # + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de + # ... + # + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -86,8 +105,10 @@ def list(eval_id, params = {}) # # @overload delete(run_id, eval_id:, request_options: {}) # - # @param run_id [String] - # @param eval_id [String] + # @param run_id [String] The ID of the run to delete. + # + # @param eval_id [String] The ID of the evaluation to delete the run from. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Evals::RunDeleteResponse] @@ -111,8 +132,10 @@ def delete(run_id, params) # # @overload cancel(run_id, eval_id:, request_options: {}) # - # @param run_id [String] - # @param eval_id [String] + # @param run_id [String] The ID of the run to cancel. + # + # @param eval_id [String] The ID of the evaluation whose run you want to cancel. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Evals::RunCancelResponse] diff --git a/lib/openai/resources/evals/runs/output_items.rb b/lib/openai/resources/evals/runs/output_items.rb index eb6914ba..04624e2d 100644 --- a/lib/openai/resources/evals/runs/output_items.rb +++ b/lib/openai/resources/evals/runs/output_items.rb @@ -9,9 +9,12 @@ class OutputItems # # @overload retrieve(output_item_id, eval_id:, run_id:, request_options: {}) # - # @param output_item_id [String] - # @param eval_id [String] - # @param run_id [String] + # @param output_item_id [String] The ID of the output item to retrieve. + # + # @param eval_id [String] The ID of the evaluation to retrieve runs for. + # + # @param run_id [String] The ID of the run to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse] @@ -35,16 +38,28 @@ def retrieve(output_item_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::Runs::OutputItemListParams} for more details. + # # Get a list of output items for an evaluation run. # # @overload list(run_id, eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) # - # @param run_id [String] - # @param eval_id [String] - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] - # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # @param run_id [String] Path param: The ID of the run to retrieve output items for. + # + # @param eval_id [String] Path param: The ID of the evaluation to retrieve runs for. + # + # @param after [String] Query param: Identifier for the last output item from the previous pagination re + # ... + # + # @param limit [Integer] Query param: Number of output items to retrieve. + # + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o + # ... + # + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index f107ee07..726182c0 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -3,6 +3,9 @@ module OpenAI module Resources class Files + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileCreateParams} for more details. + # # Upload a file that can be used across various endpoints. Individual files can be # up to 512 MB, and the size of all files uploaded by one organization can be up # to 100 GB. @@ -27,8 +30,11 @@ class Files # # @overload create(file:, purpose:, request_options: {}) # - # @param file [Pathname, StringIO] - # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param file [Pathname, StringIO] The File object (not file name) to be uploaded. ... + # + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileObject] @@ -50,7 +56,8 @@ def create(params) # # @overload retrieve(file_id, request_options: {}) # - # @param file_id [String] + # @param file_id [String] The ID of the file to use for this request. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileObject] @@ -65,14 +72,24 @@ def retrieve(file_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileListParams} for more details. + # # Returns a list of files. # # @overload list(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) # - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::FileListParams::Order] - # @param purpose [String] + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # + # @param purpose [String] Only return files with the given purpose. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -94,7 +111,8 @@ def list(params = {}) # # @overload delete(file_id, request_options: {}) # - # @param file_id [String] + # @param file_id [String] The ID of the file to use for this request. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileDeleted] @@ -113,7 +131,8 @@ def delete(file_id, params = {}) # # @overload content(file_id, request_options: {}) # - # @param file_id [String] + # @param file_id [String] The ID of the file to use for this request. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [StringIO] diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index 38c9fe03..1d609a0a 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -5,6 +5,10 @@ module Resources class FineTuning class Checkpoints class Permissions + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::Checkpoints::PermissionCreateParams} for more + # details. + # # **NOTE:** Calling this endpoint requires an [admin API key](../admin-api-keys). # # This enables organization owners to share fine-tuned models with other projects @@ -12,8 +16,10 @@ class Permissions # # @overload create(fine_tuned_model_checkpoint, project_ids:, request_options: {}) # - # @param fine_tuned_model_checkpoint [String] - # @param project_ids [Array] + # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to create a permission for. ... + # + # @param project_ids [Array] The project identifiers to grant access to. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Page] @@ -31,6 +37,10 @@ def create(fine_tuned_model_checkpoint, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams} for more + # details. + # # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). # # Organization owners can use this endpoint to view all permissions for a @@ -38,11 +48,16 @@ def create(fine_tuned_model_checkpoint, params) # # @overload retrieve(fine_tuned_model_checkpoint, after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) # - # @param fine_tuned_model_checkpoint [String] - # @param after [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] - # @param project_id [String] + # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to get permissions for. ... + # + # @param after [String] Identifier for the last permission ID from the previous pagination request. + # + # @param limit [Integer] Number of permissions to retrieve. + # + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # + # @param project_id [String] The ID of the project to get permissions for. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse] @@ -59,6 +74,10 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteParams} for more + # details. + # # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). # # Organization owners can use this endpoint to delete a permission for a @@ -66,8 +85,10 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) # # @overload delete(permission_id, fine_tuned_model_checkpoint:, request_options: {}) # - # @param permission_id [String] - # @param fine_tuned_model_checkpoint [String] + # @param permission_id [String] The ID of the fine-tuned model checkpoint permission to delete. ... + # + # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to delete a permission for. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse] diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 50cf5b51..ba29a2ea 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -7,6 +7,9 @@ class Jobs # @return [OpenAI::Resources::FineTuning::Jobs::Checkpoints] attr_reader :checkpoints + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCreateParams} for more details. + # # Creates a fine-tuning job which begins the process of creating a new model from # a given dataset. # @@ -17,15 +20,26 @@ class Jobs # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] - # @param training_file [String] - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] - # @param integrations [Array, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] - # @param seed [Integer, nil] - # @param suffix [String, nil] - # @param validation_file [String, nil] + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the ... + # + # @param training_file [String] The ID of an uploaded file that contains training data. ... + # + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. ... + # + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # + # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j + # ... + # + # @param suffix [String, nil] A string of up to 64 characters that will be added to your fine-tuned model name + # ... + # + # @param validation_file [String, nil] The ID of an uploaded file that contains validation data. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::FineTuningJob] @@ -42,13 +56,17 @@ def create(params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobRetrieveParams} for more details. + # # Get info about a fine-tuning job. # # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) # # @overload retrieve(fine_tuning_job_id, request_options: {}) # - # @param fine_tuning_job_id [String] + # @param fine_tuning_job_id [String] The ID of the fine-tuning job. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::FineTuningJob] @@ -63,13 +81,20 @@ def retrieve(fine_tuning_job_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobListParams} for more details. + # # List your organization's fine-tuning jobs # # @overload list(after: nil, limit: nil, metadata: nil, request_options: {}) # - # @param after [String] - # @param limit [Integer] - # @param metadata [Hash{Symbol=>String}, nil] + # @param after [String] Identifier for the last job from the previous pagination request. + # + # @param limit [Integer] Number of fine-tuning jobs to retrieve. + # + # @param metadata [Hash{Symbol=>String}, nil] Optional metadata filter. To filter, use the syntax `metadata[k]=v`. Alternative + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -87,11 +112,15 @@ def list(params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobCancelParams} for more details. + # # Immediately cancel a fine-tune job. # # @overload cancel(fine_tuning_job_id, request_options: {}) # - # @param fine_tuning_job_id [String] + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to cancel. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FineTuning::FineTuningJob] @@ -106,13 +135,19 @@ def cancel(fine_tuning_job_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobListEventsParams} for more details. + # # Get status updates for a fine-tuning job. # # @overload list_events(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) # - # @param fine_tuning_job_id [String] - # @param after [String] - # @param limit [Integer] + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get events for. ... + # + # @param after [String] Identifier for the last event from the previous pagination request. + # + # @param limit [Integer] Number of events to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index b8195545..2b5e1d8e 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -5,13 +5,19 @@ module Resources class FineTuning class Jobs class Checkpoints + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::Jobs::CheckpointListParams} for more details. + # # List checkpoints for a fine-tuning job. # # @overload list(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) # - # @param fine_tuning_job_id [String] - # @param after [String] - # @param limit [Integer] + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get checkpoints for. ... + # + # @param after [String] Identifier for the last checkpoint ID from the previous pagination request. + # + # @param limit [Integer] Number of checkpoints to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 40bfb0e5..e5f8f465 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -3,16 +3,30 @@ module OpenAI module Resources class Images + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageCreateVariationParams} for more details. + # # Creates a variation of a given image. This endpoint only supports `dall-e-2`. # # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # @param n [Integer, nil] - # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] - # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] - # @param user [String] + # @param image [Pathname, StringIO] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # ... + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time + # ... + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. + # + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # ... + # + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ImagesResponse] @@ -30,20 +44,40 @@ def create_variation(params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageEditParams} for more details. + # # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. # # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, Array] - # @param prompt [String] - # @param mask [Pathname, StringIO] - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # @param n [Integer, nil] - # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] - # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] - # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] - # @param user [String] + # @param image [Pathname, StringIO, Array] The image(s) to edit. Must be a supported image file or an array of images. For + # ... + # + # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character + # ... + # + # @param mask [Pathname, StringIO] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # ... + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # ... + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. + # + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # ... + # + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # ... + # + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ImagesResponse] @@ -61,23 +95,48 @@ def edit(params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageGenerateParams} for more details. + # # Creates an image given a prompt. # [Learn more](https://platform.openai.com/docs/guides/images). # # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # - # @param prompt [String] - # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] - # @param n [Integer, nil] - # @param output_compression [Integer, nil] - # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] - # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] - # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] - # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] - # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] - # @param user [String] + # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte + # ... + # + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # ... + # + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # ... + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # ... + # + # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only + # ... + # + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # ... + # + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. ... + # + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # ... + # + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # ... + # + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ImagesResponse] diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index b77422d4..afe9d3e1 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -8,7 +8,8 @@ class Models # # @overload retrieve(model, request_options: {}) # - # @param model [String] + # @param model [String] The ID of the model to use for this request + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Model] @@ -48,7 +49,8 @@ def list(params = {}) # # @overload delete(model, request_options: {}) # - # @param model [String] + # @param model [String] The model to delete + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ModelDeleted] diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index 6df4561d..443d6b18 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -3,13 +3,19 @@ module OpenAI module Resources class Moderations + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ModerationCreateParams} for more details. + # # Classifies if text and/or image inputs are potentially harmful. Learn more in # the [moderation guide](https://platform.openai.com/docs/guides/moderation). # # @overload create(input:, model: nil, request_options: {}) # - # @param input [String, Array, Array] - # @param model [String, Symbol, OpenAI::Models::ModerationModel] + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or + # ... + # + # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::ModerationCreateResponse] diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 70d045af..223be4f7 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -8,6 +8,9 @@ class Responses # See {OpenAI::Resources::Responses#stream_raw} for streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCreateParams} for more details. + # # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or # [image](https://platform.openai.com/docs/guides/images) inputs to generate @@ -22,24 +25,47 @@ class Responses # # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # @param include [Array, nil] - # @param instructions [String, nil] - # @param max_output_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param parallel_tool_calls [Boolean, nil] - # @param previous_response_id [String, nil] - # @param reasoning [OpenAI::Models::Reasoning, nil] - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] - # @param store [Boolean, nil] - # @param temperature [Float, nil] - # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # @param tools [Array] - # @param top_p [Float, nil] - # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] - # @param user [String] + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param include [Array, nil] Specify additional output data to include in the model response. Currently ... + # + # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # ... + # + # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. ... + # + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # + # @param tools [Array] An array of tools the model may call while generating a response. You ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] @@ -62,6 +88,9 @@ def create(params) # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCreateParams} for more details. + # # Creates a model response. Provide # [text](https://platform.openai.com/docs/guides/text) or # [image](https://platform.openai.com/docs/guides/images) inputs to generate @@ -76,24 +105,47 @@ def create(params) # # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # @param include [Array, nil] - # @param instructions [String, nil] - # @param max_output_tokens [Integer, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param parallel_tool_calls [Boolean, nil] - # @param previous_response_id [String, nil] - # @param reasoning [OpenAI::Models::Reasoning, nil] - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] - # @param store [Boolean, nil] - # @param temperature [Float, nil] - # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # @param tools [Array] - # @param top_p [Float, nil] - # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] - # @param user [String] + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. ... + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # + # @param include [Array, nil] Specify additional output data to include in the model response. Currently ... + # + # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # ... + # + # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. ... + # + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # ... + # + # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via ... + # + # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m + # ... + # + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # + # @param tools [Array] An array of tools the model may call while generating a response. You ... + # + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. ... + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] @@ -117,12 +169,17 @@ def stream_raw(params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. + # # Retrieves a model response with the given ID. # # @overload retrieve(response_id, include: nil, request_options: {}) # - # @param response_id [String] - # @param include [Array] + # @param response_id [String] The ID of the response to retrieve. + # + # @param include [Array] Additional fields to include in the response. See the `include` ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] @@ -143,7 +200,8 @@ def retrieve(response_id, params = {}) # # @overload delete(response_id, request_options: {}) # - # @param response_id [String] + # @param response_id [String] The ID of the response to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [nil] diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 5e875778..a47baded 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -4,16 +4,25 @@ module OpenAI module Resources class Responses class InputItems + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::InputItemListParams} for more details. + # # Returns a list of input items for a given response. # # @overload list(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # - # @param response_id [String] - # @param after [String] - # @param before [String] - # @param include [Array] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] + # @param response_id [String] The ID of the response to retrieve input items for. + # + # @param after [String] An item ID to list items after, used in pagination. ... + # + # @param before [String] An item ID to list items before, used in pagination. ... + # + # @param include [Array] Additional fields to include in the response. See the `include` ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between ... + # + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 1c497d22..719532b1 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -6,6 +6,9 @@ class Uploads # @return [OpenAI::Resources::Uploads::Parts] attr_reader :parts + # Some parameter documentations has been truncated, see + # {OpenAI::Models::UploadCreateParams} for more details. + # # Creates an intermediate # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object # that you can add @@ -28,10 +31,14 @@ class Uploads # # @overload create(bytes:, filename:, mime_type:, purpose:, request_options: {}) # - # @param bytes [Integer] - # @param filename [String] - # @param mime_type [String] - # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param bytes [Integer] The number of bytes in the file you are uploading. ... + # + # @param filename [String] The name of the file to upload. ... + # + # @param mime_type [String] The MIME type of the file. ... + # + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] @@ -48,11 +55,15 @@ def create(params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::UploadCancelParams} for more details. + # # Cancels the Upload. No Parts may be added after an Upload is cancelled. # # @overload cancel(upload_id, request_options: {}) # - # @param upload_id [String] + # @param upload_id [String] The ID of the Upload. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] @@ -67,6 +78,9 @@ def cancel(upload_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::UploadCompleteParams} for more details. + # # Completes the # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). # @@ -83,9 +97,13 @@ def cancel(upload_id, params = {}) # # @overload complete(upload_id, part_ids:, md5: nil, request_options: {}) # - # @param upload_id [String] - # @param part_ids [Array] - # @param md5 [String] + # @param upload_id [String] The ID of the Upload. ... + # + # @param part_ids [Array] The ordered list of Part IDs. ... + # + # @param md5 [String] The optional md5 checksum for the file contents to verify if the bytes uploaded + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index 12d21af6..b0e572a1 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -4,6 +4,9 @@ module OpenAI module Resources class Uploads class Parts + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Uploads::PartCreateParams} for more details. + # # Adds a # [Part](https://platform.openai.com/docs/api-reference/uploads/part-object) to an # [Upload](https://platform.openai.com/docs/api-reference/uploads/object) object. @@ -18,8 +21,10 @@ class Parts # # @overload create(upload_id, data:, request_options: {}) # - # @param upload_id [String] - # @param data [Pathname, StringIO] + # @param upload_id [String] The ID of the Upload. ... + # + # @param data [Pathname, StringIO] The chunk of bytes for this Part. ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Uploads::UploadPart] diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 3547ae53..cc75a01e 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -9,15 +9,25 @@ class VectorStores # @return [OpenAI::Resources::VectorStores::FileBatches] attr_reader :file_batches + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreCreateParams} for more details. + # # Create a vector store. # # @overload create(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] - # @param file_ids [Array] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # + # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # ... + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String] The name of the vector store. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStore] @@ -38,7 +48,8 @@ def create(params = {}) # # @overload retrieve(vector_store_id, request_options: {}) # - # @param vector_store_id [String] + # @param vector_store_id [String] The ID of the vector store to retrieve. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStore] @@ -53,14 +64,21 @@ def retrieve(vector_store_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreUpdateParams} for more details. + # # Modifies a vector store. # # @overload update(vector_store_id, expires_after: nil, metadata: nil, name: nil, request_options: {}) # - # @param vector_store_id [String] - # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] - # @param metadata [Hash{Symbol=>String}, nil] - # @param name [String, nil] + # @param vector_store_id [String] The ID of the vector store to modify. + # + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param name [String, nil] The name of the vector store. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStore] @@ -77,14 +95,25 @@ def update(vector_store_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreListParams} for more details. + # # Returns a list of vector stores. # # @overload list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # - # @param after [String] - # @param before [String] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -106,7 +135,8 @@ def list(params = {}) # # @overload delete(vector_store_id, request_options: {}) # - # @param vector_store_id [String] + # @param vector_store_id [String] The ID of the vector store to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStoreDeleted] @@ -121,17 +151,27 @@ def delete(vector_store_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreSearchParams} for more details. + # # Search a vector store for relevant chunks based on a query and file attributes # filter. # # @overload search(vector_store_id, query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) # - # @param vector_store_id [String] - # @param query [String, Array] - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # @param max_num_results [Integer] - # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] - # @param rewrite_query [Boolean] + # @param vector_store_id [String] The ID of the vector store to search. + # + # @param query [String, Array] A query string for a search + # + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. + # + # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 + # ... + # + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # + # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Page] diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 71268646..10b3e3dc 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -4,14 +4,23 @@ module OpenAI module Resources class VectorStores class FileBatches + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileBatchCreateParams} for more details. + # # Create a vector store file batch. # # @overload create(vector_store_id, file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) # - # @param vector_store_id [String] - # @param file_ids [Array] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param vector_store_id [String] The ID of the vector store for which to create a File Batch. ... + # + # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # ... + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] @@ -32,8 +41,10 @@ def create(vector_store_id, params) # # @overload retrieve(batch_id, vector_store_id:, request_options: {}) # - # @param batch_id [String] - # @param vector_store_id [String] + # @param batch_id [String] The ID of the file batch being retrieved. + # + # @param vector_store_id [String] The ID of the vector store that the file batch belongs to. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] @@ -58,8 +69,10 @@ def retrieve(batch_id, params) # # @overload cancel(batch_id, vector_store_id:, request_options: {}) # - # @param batch_id [String] - # @param vector_store_id [String] + # @param batch_id [String] The ID of the file batch to cancel. + # + # @param vector_store_id [String] The ID of the vector store that the file batch belongs to. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] @@ -79,17 +92,32 @@ def cancel(batch_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileBatchListFilesParams} for more details. + # # Returns a list of vector store files in a batch. # # @overload list_files(batch_id, vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # - # @param batch_id [String] - # @param vector_store_id [String] - # @param after [String] - # @param before [String] - # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] + # @param batch_id [String] Path param: The ID of the file batch that the files belong to. + # + # @param vector_store_id [String] Path param: The ID of the vector store that the files belong to. + # + # @param after [String] Query param: A cursor for use in pagination. `after` is an object ID that define + # ... + # + # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin + # ... + # + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, + # ... + # + # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be + # ... + # + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 9f6900e7..74870229 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -4,16 +4,25 @@ module OpenAI module Resources class VectorStores class Files + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileCreateParams} for more details. + # # Create a vector store file by attaching a # [File](https://platform.openai.com/docs/api-reference/files) to a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object). # # @overload create(vector_store_id, file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # - # @param vector_store_id [String] - # @param file_id [String] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param vector_store_id [String] The ID of the vector store for which to create a File. ... + # + # @param file_id [String] A [File](https://platform.openai.com/docs/api-reference/files) ID that the vecto + # ... + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFile] @@ -34,8 +43,10 @@ def create(vector_store_id, params) # # @overload retrieve(file_id, vector_store_id:, request_options: {}) # - # @param file_id [String] - # @param vector_store_id [String] + # @param file_id [String] The ID of the file being retrieved. + # + # @param vector_store_id [String] The ID of the vector store that the file belongs to. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFile] @@ -55,13 +66,20 @@ def retrieve(file_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileUpdateParams} for more details. + # # Update attributes on a vector store file. # # @overload update(file_id, vector_store_id:, attributes:, request_options: {}) # - # @param file_id [String] - # @param vector_store_id [String] - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param file_id [String] Path param: The ID of the file to update attributes. + # + # @param vector_store_id [String] Path param: The ID of the vector store the file belongs to. + # + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFile] @@ -82,16 +100,30 @@ def update(file_id, params) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStores::FileListParams} for more details. + # # Returns a list of vector store files. # # @overload list(vector_store_id, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # - # @param vector_store_id [String] - # @param after [String] - # @param before [String] - # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] - # @param limit [Integer] - # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] + # @param vector_store_id [String] The ID of the vector store that the files belong to. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # ... + # + # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place + # ... + # + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # ... + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # ... + # + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # ... + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::CursorPage] @@ -116,8 +148,10 @@ def list(vector_store_id, params = {}) # # @overload delete(file_id, vector_store_id:, request_options: {}) # - # @param file_id [String] - # @param vector_store_id [String] + # @param file_id [String] The ID of the file to delete. + # + # @param vector_store_id [String] The ID of the vector store that the file belongs to. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] @@ -141,8 +175,10 @@ def delete(file_id, params) # # @overload content(file_id, vector_store_id:, request_options: {}) # - # @param file_id [String] - # @param vector_store_id [String] + # @param file_id [String] The ID of the file within the vector store. + # + # @param vector_store_id [String] The ID of the vector store. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Page] diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 68db9bed..d27b0482 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -60,16 +60,27 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The text to generate audio for. The maximum length is 4096 characters. input:, + # One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. model:, + # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and + # `verse`. Previews of the voices are available in the + # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). voice:, + # Control the voice of your generated audio with additional instructions. Does not + # work with `tts-1` or `tts-1-hd`. instructions: nil, + # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, + # `wav`, and `pcm`. response_format: nil, + # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is + # the default. speed: nil, request_options: {} - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/lib/openai/models/audio/transcription.rbi index ccea9d77..fa6cd066 100644 --- a/rbi/lib/openai/models/audio/transcription.rbi +++ b/rbi/lib/openai/models/audio/transcription.rbi @@ -31,8 +31,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text:, logprobs: nil); end - + def self.new( + # The transcribed text. + text:, + # The log probabilities of the tokens in the transcription. Only returned with the + # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added + # to the `include` array. + logprobs: nil + ); end sig { override.returns({text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]}) } def to_hash; end @@ -59,8 +65,14 @@ module OpenAI attr_writer :logprob sig { params(token: String, bytes: T::Array[Float], logprob: Float).returns(T.attached_class) } - def self.new(token: nil, bytes: nil, logprob: nil); end - + def self.new( + # The token in the transcription. + token: nil, + # The bytes of the token. + bytes: nil, + # The log probability of the token. + logprob: nil + ); end sig { override.returns({token: String, bytes: T::Array[Float], logprob: Float}) } def to_hash; end end diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index c6020c3f..f89a401f 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -103,13 +103,43 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The audio file object (not file name) to transcribe, in one of these formats: + # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. file:, + # ID of the model to use. The options are `gpt-4o-transcribe`, + # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source + # Whisper V2 model). model:, + # Additional information to include in the transcription response. `logprobs` will + # return the log probabilities of the tokens in the response to understand the + # model's confidence in the transcription. `logprobs` only works with + # response_format set to `json` and only with the models `gpt-4o-transcribe` and + # `gpt-4o-mini-transcribe`. include: nil, + # The language of the input audio. Supplying the input language in + # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) + # format will improve accuracy and latency. language: nil, + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should match the audio language. prompt: nil, + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, + # the only supported format is `json`. response_format: nil, + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. temperature: nil, + # The timestamp granularities to populate for this transcription. + # `response_format` must be set `verbose_json` to use timestamp granularities. + # Either or both of these options are supported: `word`, or `segment`. Note: There + # is no additional latency for segment timestamps, but generating word timestamps + # incurs additional latency. timestamp_granularities: nil, request_options: {} ); end diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/lib/openai/models/audio/transcription_segment.rbi index 37306ed0..e98fad18 100644 --- a/rbi/lib/openai/models/audio/transcription_segment.rbi +++ b/rbi/lib/openai/models/audio/transcription_segment.rbi @@ -63,19 +63,30 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier of the segment. id:, + # Average logprob of the segment. If the value is lower than -1, consider the + # logprobs failed. avg_logprob:, + # Compression ratio of the segment. If the value is greater than 2.4, consider the + # compression failed. compression_ratio:, + # End time of the segment in seconds. end_:, + # Probability of no speech in the segment. If the value is higher than 1.0 and the + # `avg_logprob` is below -1, consider this segment silent. no_speech_prob:, + # Seek offset of the segment. seek:, + # Start time of the segment in seconds. start:, + # Temperature parameter used for generating the segment. temperature:, + # Text content of the segment. text:, + # Array of token IDs for the text content. tokens: - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi index c0c5e553..25ace7da 100644 --- a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi @@ -38,8 +38,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(delta:, logprobs: nil, type: :"transcript.text.delta"); end - + def self.new( + # The text delta that was additionally transcribed. + delta:, + # The log probabilities of the delta. Only included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. + logprobs: nil, + # The type of the event. Always `transcript.text.delta`. + type: :"transcript.text.delta" + ); end sig do override .returns( @@ -71,8 +79,14 @@ module OpenAI attr_writer :logprob sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } - def self.new(token: nil, bytes: nil, logprob: nil); end - + def self.new( + # The token that was used to generate the log probability. + token: nil, + # The bytes that were used to generate the log probability. + bytes: nil, + # The log probability of the token. + logprob: nil + ); end sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } def to_hash; end end diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi index 95c68dcc..08687ed5 100644 --- a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/lib/openai/models/audio/transcription_text_done_event.rbi @@ -39,8 +39,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text:, logprobs: nil, type: :"transcript.text.done"); end - + def self.new( + # The text that was transcribed. + text:, + # The log probabilities of the individual tokens in the transcription. Only + # included if you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `include[]` parameter set to `logprobs`. + logprobs: nil, + # The type of the event. Always `transcript.text.done`. + type: :"transcript.text.done" + ); end sig do override .returns( @@ -72,8 +81,14 @@ module OpenAI attr_writer :logprob sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } - def self.new(token: nil, bytes: nil, logprob: nil); end - + def self.new( + # The token that was used to generate the log probability. + token: nil, + # The bytes that were used to generate the log probability. + bytes: nil, + # The log probability of the token. + logprob: nil + ); end sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } def to_hash; end end diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/lib/openai/models/audio/transcription_verbose.rbi index 76b0838c..43af0a5c 100644 --- a/rbi/lib/openai/models/audio/transcription_verbose.rbi +++ b/rbi/lib/openai/models/audio/transcription_verbose.rbi @@ -45,8 +45,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(duration:, language:, text:, segments: nil, words: nil); end - + def self.new( + # The duration of the input audio. + duration:, + # The language of the input audio. + language:, + # The transcribed text. + text:, + # Segments of the transcribed text and their corresponding details. + segments: nil, + # Extracted words and their corresponding timestamps. + words: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/audio/transcription_word.rbi b/rbi/lib/openai/models/audio/transcription_word.rbi index 639e066d..51122c61 100644 --- a/rbi/lib/openai/models/audio/transcription_word.rbi +++ b/rbi/lib/openai/models/audio/transcription_word.rbi @@ -17,8 +17,14 @@ module OpenAI attr_accessor :word sig { params(end_: Float, start: Float, word: String).returns(T.attached_class) } - def self.new(end_:, start:, word:); end - + def self.new( + # End time of the word in seconds. + end_:, + # Start time of the word in seconds. + start:, + # The text content of the word. + word: + ); end sig { override.returns({end_: Float, start: Float, word: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 405309c1..807e0bb2 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -57,9 +57,29 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) - end - + def self.new( + # The audio file object (not file name) translate, in one of these formats: flac, + # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. + file:, + # ID of the model to use. Only `whisper-1` (which is powered by our open source + # Whisper V2 model) is currently available. + model:, + # An optional text to guide the model's style or continue a previous audio + # segment. The + # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) + # should be in English. + prompt: nil, + # The format of the output, in one of these options: `json`, `text`, `srt`, + # `verbose_json`, or `vtt`. + response_format: nil, + # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the + # output more random, while lower values like 0.2 will make it more focused and + # deterministic. If set to 0, the model will use + # [log probability](https://en.wikipedia.org/wiki/Log_probability) to + # automatically increase the temperature until certain thresholds are hit. + temperature: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/lib/openai/models/audio/translation_verbose.rbi index 77fb4815..9458e067 100644 --- a/rbi/lib/openai/models/audio/translation_verbose.rbi +++ b/rbi/lib/openai/models/audio/translation_verbose.rbi @@ -35,8 +35,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(duration:, language:, text:, segments: nil); end - + def self.new( + # The duration of the input audio. + duration:, + # The language of the output translation (always `english`). + language:, + # The translated text. + text:, + # Segments of the translated text and their corresponding details. + segments: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi index d072297d..7ea24386 100644 --- a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi @@ -10,8 +10,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto); end - + def self.new( + # Always `auto`. + type: :auto + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 8de778c2..bbb7b1cd 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -149,24 +149,47 @@ module OpenAI end def self.new( id:, + # The time frame within which the batch should be processed. completion_window:, + # The Unix timestamp (in seconds) for when the batch was created. created_at:, + # The OpenAI API endpoint used by the batch. endpoint:, + # The ID of the input file for the batch. input_file_id:, + # The current status of the batch. status:, + # The Unix timestamp (in seconds) for when the batch was cancelled. cancelled_at: nil, + # The Unix timestamp (in seconds) for when the batch started cancelling. cancelling_at: nil, + # The Unix timestamp (in seconds) for when the batch was completed. completed_at: nil, + # The ID of the file containing the outputs of requests with errors. error_file_id: nil, errors: nil, + # The Unix timestamp (in seconds) for when the batch expired. expired_at: nil, + # The Unix timestamp (in seconds) for when the batch will expire. expires_at: nil, + # The Unix timestamp (in seconds) for when the batch failed. failed_at: nil, + # The Unix timestamp (in seconds) for when the batch started finalizing. finalizing_at: nil, + # The Unix timestamp (in seconds) for when the batch started processing. in_progress_at: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The ID of the file containing the outputs of successfully executed requests. output_file_id: nil, + # The request counts for different statuses within the batch. request_counts: nil, + # The object type, which is always `batch`. object: :batch ); end sig do @@ -236,8 +259,11 @@ module OpenAI params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::AnyHash)], object: String) .returns(T.attached_class) end - def self.new(data: nil, object: nil); end - + def self.new( + data: nil, + # The object type, which is always `list`. + object: nil + ); end sig { override.returns({data: T::Array[OpenAI::Models::BatchError], object: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 3a795500..fec9c7ac 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -49,8 +49,34 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}); end - + def self.new( + # The time frame within which the batch should be processed. Currently only `24h` + # is supported. + completion_window:, + # The endpoint to be used for all requests in the batch. Currently + # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` + # are supported. Note that `/v1/embeddings` batches are also restricted to a + # maximum of 50,000 embedding inputs across all requests in the batch. + endpoint:, + # The ID of an uploaded file that contains requests for the new batch. + # + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. + # + # Your input file must be formatted as a + # [JSONL file](https://platform.openai.com/docs/api-reference/batch/request-input), + # and must be uploaded with the purpose `batch`. The file can contain up to 50,000 + # requests, and can be up to 200 MB in size. + input_file_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/batch_error.rbi b/rbi/lib/openai/models/batch_error.rbi index 0871460e..24b461c6 100644 --- a/rbi/lib/openai/models/batch_error.rbi +++ b/rbi/lib/openai/models/batch_error.rbi @@ -29,8 +29,16 @@ module OpenAI params(code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)) .returns(T.attached_class) end - def self.new(code: nil, line: nil, message: nil, param: nil); end - + def self.new( + # An error code identifying the error type. + code: nil, + # The line number of the input file where the error occurred, if applicable. + line: nil, + # A human-readable message providing more details about the error. + message: nil, + # The name of the parameter that caused the error, if applicable. + param: nil + ); end sig do override.returns({code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)}) end diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/lib/openai/models/batch_list_params.rbi index 9ae31328..397a8042 100644 --- a/rbi/lib/openai/models/batch_list_params.rbi +++ b/rbi/lib/openai/models/batch_list_params.rbi @@ -32,8 +32,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + request_options: {} + ); end sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } def to_hash; end end diff --git a/rbi/lib/openai/models/batch_request_counts.rbi b/rbi/lib/openai/models/batch_request_counts.rbi index 2c97e840..8f022136 100644 --- a/rbi/lib/openai/models/batch_request_counts.rbi +++ b/rbi/lib/openai/models/batch_request_counts.rbi @@ -17,8 +17,14 @@ module OpenAI # The request counts for different statuses within the batch. sig { params(completed: Integer, failed: Integer, total: Integer).returns(T.attached_class) } - def self.new(completed:, failed:, total:); end - + def self.new( + # Number of requests that have been completed successfully. + completed:, + # Number of requests that have failed. + failed:, + # Total number of requests in the batch. + total: + ); end sig { override.returns({completed: Integer, failed: Integer, total: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/lib/openai/models/beta/assistant.rbi index f8ef7368..438ec36e 100644 --- a/rbi/lib/openai/models/beta/assistant.rbi +++ b/rbi/lib/openai/models/beta/assistant.rbi @@ -160,18 +160,71 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier, which can be referenced in API endpoints. id:, + # The Unix timestamp (in seconds) for when the assistant was created. created_at:, + # The description of the assistant. The maximum length is 512 characters. description:, + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. instructions:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, + # The name of the assistant. The maximum length is 256 characters. name:, + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools:, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # The object type, which is always `assistant`. object: :assistant ); end sig do @@ -267,8 +320,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter`` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -285,8 +342,13 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil); end - + def self.new( + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. + vector_store_ids: nil + ); end sig { override.returns({vector_store_ids: T::Array[String]}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index f068ee9b..d4ce6a96 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -175,16 +175,72 @@ module OpenAI .returns(T.attached_class) end def self.new( + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, + # The description of the assistant. The maximum length is 512 characters. description: nil, + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. instructions: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The name of the assistant. The maximum length is 256 characters. name: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} ); end @@ -298,8 +354,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -351,8 +411,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_ids: nil, vector_stores: nil); end - + def self.new( + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. + vector_store_ids: nil, + # A helper to create a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this assistant. There can be a maximum of 1 + # vector store attached to the assistant. + vector_stores: nil + ); end sig do override .returns( @@ -421,8 +491,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil); end - + def self.new( + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. + chunking_strategy: nil, + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. + file_ids: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil + ); end sig do override .returns( @@ -451,8 +535,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto); end - + def self.new( + # Always `auto`. + type: :auto + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -490,8 +576,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(static:, type: :static); end - + def self.new( + static:, + # Always `static`. + type: :static + ); end sig do override .returns( @@ -521,8 +610,15 @@ module OpenAI max_chunk_size_tokens: Integer ).returns(T.attached_class) end - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end - + def self.new( + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + chunk_overlap_tokens:, + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. + max_chunk_size_tokens: + ); end sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 48df52cc..70d05fcf 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -53,8 +53,25 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/lib/openai/models/beta/assistant_stream_event.rbi index 5cb77eb2..3b46028a 100644 --- a/rbi/lib/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/lib/openai/models/beta/assistant_stream_event.rbi @@ -56,8 +56,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, enabled: nil, event: :"thread.created"); end - + def self.new( + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). + data:, + # Whether to enable input audio transcription. + enabled: nil, + event: :"thread.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Thread, event: Symbol, enabled: T::Boolean}) } def to_hash; end end @@ -80,8 +86,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.created"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -104,8 +114,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.queued"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.queued" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -128,8 +142,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.in_progress"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.in_progress" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -152,8 +170,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.requires_action"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.requires_action" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -176,8 +198,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.completed"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.completed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -200,8 +226,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.incomplete"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.incomplete" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -224,8 +254,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.failed"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.failed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -248,8 +282,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelling"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.cancelling" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -272,8 +310,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelled"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.cancelled" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -296,8 +338,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.expired"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.expired" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -323,8 +369,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.created"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -350,8 +399,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.in_progress"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.in_progress" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -380,8 +432,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.delta"); end - + def self.new( + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. + data:, + event: :"thread.run.step.delta" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol}) } def to_hash; end end @@ -407,8 +463,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.completed"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.completed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -434,8 +493,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.failed"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.failed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -461,8 +523,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.cancelled"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.cancelled" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -488,8 +553,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.expired"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.expired" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -513,8 +581,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.created"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end @@ -538,8 +610,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.in_progress"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.in_progress" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end @@ -566,8 +642,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.delta"); end - + def self.new( + # Represents a message delta i.e. any changed fields on a message during + # streaming. + data:, + event: :"thread.message.delta" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol}) } def to_hash; end end @@ -591,8 +671,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.completed"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.completed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end @@ -616,8 +700,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.incomplete"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.incomplete" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 3ef8c526..b8e37979 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -23,8 +23,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, function: nil); end - + def self.new( + # The type of the tool. If type is `function`, the function name must be set + type:, + function: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi index c7334df0..4a88f071 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi @@ -9,8 +9,10 @@ module OpenAI attr_accessor :name sig { params(name: String).returns(T.attached_class) } - def self.new(name:); end - + def self.new( + # The name of the function to call. + name: + ); end sig { override.returns({name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 8e100d6d..29f452a9 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -178,16 +178,72 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The description of the assistant. The maximum length is 512 characters. description: nil, + # The system instructions that the assistant uses. The maximum length is 256,000 + # characters. instructions: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model: nil, + # The name of the assistant. The maximum length is 256 characters. name: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # assistant. Tools can be of types `code_interpreter`, `file_search`, or + # `function`. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} ); end @@ -364,8 +420,13 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # Overrides the list of + # [file](https://platform.openai.com/docs/api-reference/files) IDs made available + # to the `code_interpreter` tool. There can be a maximum of 20 files associated + # with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -382,8 +443,13 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil); end - + def self.new( + # Overrides the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. + vector_store_ids: nil + ); end sig { override.returns({vector_store_ids: T::Array[String]}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi index cacd14eb..d1ac9cc7 100644 --- a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/lib/openai/models/beta/code_interpreter_tool.rbi @@ -9,8 +9,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :code_interpreter); end - + def self.new( + # The type of tool being defined: `code_interpreter` + type: :code_interpreter + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index ad127d97..e2c738a3 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -25,8 +25,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_search: nil, type: :file_search); end - + def self.new( + # Overrides for the file search tool. + file_search: nil, + # The type of tool being defined: `file_search` + type: :file_search + ); end sig { override.returns({type: Symbol, file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch}) } def to_hash; end @@ -70,8 +74,24 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(max_num_results: nil, ranking_options: nil); end - + def self.new( + # The maximum number of results the file search tool should output. The default is + # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between + # 1 and 50 inclusive. + # + # Note that the file search tool may output fewer than `max_num_results` results. + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. + max_num_results: nil, + # The ranking options for the file search. If not specified, the file search tool + # will use the `auto` ranker and a score_threshold of 0. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. + ranking_options: nil + ); end sig do override .returns( @@ -107,8 +127,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(score_threshold:, ranker: nil); end - + def self.new( + # The score threshold for the file search. All values must be a floating point + # number between 0 and 1. + score_threshold:, + # The ranker to use for the file search. If not specified will use the `auto` + # ranker. + ranker: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/lib/openai/models/beta/function_tool.rbi index b375f86a..5a914d6f 100644 --- a/rbi/lib/openai/models/beta/function_tool.rbi +++ b/rbi/lib/openai/models/beta/function_tool.rbi @@ -18,8 +18,11 @@ module OpenAI params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(function:, type: :function); end - + def self.new( + function:, + # The type of tool being defined: `function` + type: :function + ); end sig { override.returns({function: OpenAI::Models::FunctionDefinition, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/lib/openai/models/beta/message_stream_event.rbi index 3f070ad2..ad322e59 100644 --- a/rbi/lib/openai/models/beta/message_stream_event.rbi +++ b/rbi/lib/openai/models/beta/message_stream_event.rbi @@ -28,8 +28,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.created"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end @@ -53,8 +57,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.in_progress"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.in_progress" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end @@ -81,8 +89,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.delta"); end - + def self.new( + # Represents a message delta i.e. any changed fields on a message during + # streaming. + data:, + event: :"thread.message.delta" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol}) } def to_hash; end end @@ -106,8 +118,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.completed"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.completed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end @@ -131,8 +147,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.message.incomplete"); end - + def self.new( + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.message.incomplete" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/lib/openai/models/beta/run_step_stream_event.rbi index c004aab8..80a4d141 100644 --- a/rbi/lib/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_step_stream_event.rbi @@ -30,8 +30,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.created"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -57,8 +60,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.in_progress"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.in_progress" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -87,8 +93,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.delta"); end - + def self.new( + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. + data:, + event: :"thread.run.step.delta" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol}) } def to_hash; end end @@ -114,8 +124,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.completed"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.completed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -141,8 +154,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.failed"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.failed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -168,8 +184,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.cancelled"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.cancelled" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end @@ -195,8 +214,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.step.expired"); end - + def self.new( + # Represents a step in execution of a run. + data:, + event: :"thread.run.step.expired" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/lib/openai/models/beta/run_stream_event.rbi index 29f03175..1f8fa007 100644 --- a/rbi/lib/openai/models/beta/run_stream_event.rbi +++ b/rbi/lib/openai/models/beta/run_stream_event.rbi @@ -26,8 +26,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.created"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -50,8 +54,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.queued"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.queued" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -74,8 +82,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.in_progress"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.in_progress" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -98,8 +110,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.requires_action"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.requires_action" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -122,8 +138,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.completed"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.completed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -146,8 +166,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.incomplete"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.incomplete" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -170,8 +194,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.failed"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.failed" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -194,8 +222,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelling"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.cancelling" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -218,8 +250,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.cancelled"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.cancelled" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end @@ -242,8 +278,12 @@ module OpenAI params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) .returns(T.attached_class) end - def self.new(data:, event: :"thread.run.expired"); end - + def self.new( + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + data:, + event: :"thread.run.expired" + ); end sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/lib/openai/models/beta/thread.rbi index 0a393255..46c58f55 100644 --- a/rbi/lib/openai/models/beta/thread.rbi +++ b/rbi/lib/openai/models/beta/thread.rbi @@ -52,8 +52,26 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, metadata:, tool_resources:, object: :thread); end - + def self.new( + # The identifier, which can be referenced in API endpoints. + id:, + # The Unix timestamp (in seconds) for when the thread was created. + created_at:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + tool_resources:, + # The object type, which is always `thread`. + object: :thread + ); end sig do override .returns( @@ -126,8 +144,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -144,8 +166,13 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil); end - + def self.new( + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. + vector_store_ids: nil + ); end sig { override.returns({vector_store_ids: T::Array[String]}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index ef54ba30..fed5543a 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -231,20 +231,93 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, + # Override the default system message of the assistant. This is useful for + # modifying the behavior on a per-run basis. instructions: nil, + # The maximum number of completion tokens that may be used over the course of the + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_completion_tokens: nil, + # The maximum number of prompt tokens that may be used over the course of the run. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_prompt_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. model: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. thread: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. tool_resources: nil, + # Override the tools the assistant can use for this run. This is useful for + # modifying the behavior on a per-run basis. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. truncation_strategy: nil, request_options: {} ); end @@ -356,8 +429,23 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(messages: nil, metadata: nil, tool_resources: nil); end - + def self.new( + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # start the thread with. + messages: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + tool_resources: nil + ); end sig do override .returns( @@ -436,8 +524,26 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil); end - + def self.new( + # The text contents of the message. + content:, + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. + role:, + # A list of files attached to the message, and the tools they should be added to. + attachments: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil + ); end sig do override .returns( @@ -562,8 +668,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil); end - + def self.new( + # The ID of the file to attach to the message. + file_id: nil, + # The tools to add this file to. + tools: nil + ); end sig do override .returns( @@ -589,8 +699,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search); end - + def self.new( + # The type of tool being defined: `file_search` + type: :file_search + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -676,8 +788,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -731,8 +847,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_ids: nil, vector_stores: nil); end - + def self.new( + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. + vector_store_ids: nil, + # A helper to create a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. + vector_stores: nil + ); end sig do override .returns( @@ -801,8 +927,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil); end - + def self.new( + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. + chunking_strategy: nil, + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. + file_ids: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil + ); end sig do override .returns( @@ -831,8 +971,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto); end - + def self.new( + # Always `auto`. + type: :auto + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -870,8 +1012,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(static:, type: :static); end - + def self.new( + static:, + # Always `static`. + type: :static + ); end sig do override .returns( @@ -901,8 +1046,15 @@ module OpenAI max_chunk_size_tokens: Integer ).returns(T.attached_class) end - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end - + def self.new( + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + chunk_overlap_tokens:, + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. + max_chunk_size_tokens: + ); end sig do override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) end @@ -993,8 +1145,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -1011,8 +1167,13 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil); end - + def self.new( + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this assistant. There can be a maximum of 1 vector store attached to + # the assistant. + vector_store_ids: nil + ); end sig { override.returns({vector_store_ids: T::Array[String]}) } def to_hash; end end @@ -1040,8 +1201,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, last_messages: nil); end - + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 6d1ed043..7a7ae81c 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -53,8 +53,24 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(messages: nil, metadata: nil, tool_resources: nil, request_options: {}); end - + def self.new( + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # start the thread with. + messages: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + tool_resources: nil, + request_options: {} + ); end sig do override .returns( @@ -129,8 +145,26 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil); end - + def self.new( + # The text contents of the message. + content:, + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. + role:, + # A list of files attached to the message, and the tools they should be added to. + attachments: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil + ); end sig do override .returns( @@ -250,8 +284,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil); end - + def self.new( + # The ID of the file to attach to the message. + file_id: nil, + # The tools to add this file to. + tools: nil + ); end sig do override .returns( @@ -277,8 +315,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search); end - + def self.new( + # The type of tool being defined: `file_search` + type: :file_search + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -352,8 +392,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -405,8 +449,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_ids: nil, vector_stores: nil); end - + def self.new( + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. + vector_store_ids: nil, + # A helper to create a + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # with file_ids and attach it to this thread. There can be a maximum of 1 vector + # store attached to the thread. + vector_stores: nil + ); end sig do override .returns( @@ -475,8 +529,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(chunking_strategy: nil, file_ids: nil, metadata: nil); end - + def self.new( + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. + chunking_strategy: nil, + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to + # add to the vector store. There can be a maximum of 10000 files in a vector + # store. + file_ids: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil + ); end sig do override .returns( @@ -505,8 +573,10 @@ module OpenAI # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of # `800` and `chunk_overlap_tokens` of `400`. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :auto); end - + def self.new( + # Always `auto`. + type: :auto + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -544,8 +614,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(static:, type: :static); end - + def self.new( + static:, + # Always `static`. + type: :static + ); end sig do override .returns( @@ -575,8 +648,15 @@ module OpenAI max_chunk_size_tokens: Integer ).returns(T.attached_class) end - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end - + def self.new( + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + chunk_overlap_tokens:, + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. + max_chunk_size_tokens: + ); end sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/lib/openai/models/beta/thread_stream_event.rbi index f169b98c..b8bd735a 100644 --- a/rbi/lib/openai/models/beta/thread_stream_event.rbi +++ b/rbi/lib/openai/models/beta/thread_stream_event.rbi @@ -33,8 +33,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, enabled: nil, event: :"thread.created"); end - + def self.new( + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). + data:, + # Whether to enable input audio transcription. + enabled: nil, + event: :"thread.created" + ); end sig { override.returns({data: OpenAI::Models::Beta::Thread, event: Symbol, enabled: T::Boolean}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/lib/openai/models/beta/thread_update_params.rbi index 1eb3f670..9af4b827 100644 --- a/rbi/lib/openai/models/beta/thread_update_params.rbi +++ b/rbi/lib/openai/models/beta/thread_update_params.rbi @@ -39,8 +39,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(metadata: nil, tool_resources: nil, request_options: {}); end - + def self.new( + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + tool_resources: nil, + request_options: {} + ); end sig do override .returns( @@ -111,8 +124,12 @@ module OpenAI attr_writer :file_ids sig { params(file_ids: T::Array[String]).returns(T.attached_class) } - def self.new(file_ids: nil); end - + def self.new( + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made + # available to the `code_interpreter` tool. There can be a maximum of 20 files + # associated with the tool. + file_ids: nil + ); end sig { override.returns({file_ids: T::Array[String]}) } def to_hash; end end @@ -129,8 +146,13 @@ module OpenAI attr_writer :vector_store_ids sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } - def self.new(vector_store_ids: nil); end - + def self.new( + # The + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # attached to this thread. There can be a maximum of 1 vector store attached to + # the thread. + vector_store_ids: nil + ); end sig { override.returns({vector_store_ids: T::Array[String]}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi index a9b10bb9..8f129c30 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi @@ -43,8 +43,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(end_index:, file_citation:, start_index:, text:, type: :file_citation); end - + def self.new( + end_index:, + file_citation:, + start_index:, + # The text in the message content that needs to be replaced. + text:, + # Always `file_citation`. + type: :file_citation + ); end sig do override .returns( @@ -65,8 +72,10 @@ module OpenAI attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id:); end - + def self.new( + # The ID of the specific File the citation is from. + file_id: + ); end sig { override.returns({file_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi index 1d28545a..5d81ac86 100644 --- a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -58,15 +58,16 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The index of the annotation in the text content part. index:, end_index: nil, file_citation: nil, start_index: nil, + # The text in the message content that needs to be replaced. text: nil, + # Always `file_citation`. type: :file_citation - ) - end - + ); end sig do override .returns( @@ -98,8 +99,12 @@ module OpenAI attr_writer :quote sig { params(file_id: String, quote: String).returns(T.attached_class) } - def self.new(file_id: nil, quote: nil); end - + def self.new( + # The ID of the specific File the citation is from. + file_id: nil, + # The specific quote in the file. + quote: nil + ); end sig { override.returns({file_id: String, quote: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi index 87953109..fc4afa16 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi @@ -42,8 +42,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(end_index:, file_path:, start_index:, text:, type: :file_path); end - + def self.new( + end_index:, + file_path:, + start_index:, + # The text in the message content that needs to be replaced. + text:, + # Always `file_path`. + type: :file_path + ); end sig do override .returns( @@ -64,8 +71,10 @@ module OpenAI attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id:); end - + def self.new( + # The ID of the file that was generated. + file_id: + ); end sig { override.returns({file_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi index 4c2c8c36..e58879be 100644 --- a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -56,9 +56,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path) - end - + def self.new( + # The index of the annotation in the text content part. + index:, + end_index: nil, + file_path: nil, + start_index: nil, + # The text in the message content that needs to be replaced. + text: nil, + # Always `file_path`. + type: :file_path + ); end sig do override .returns( @@ -83,8 +91,10 @@ module OpenAI attr_writer :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id: nil); end - + def self.new( + # The ID of the file that was generated. + file_id: nil + ); end sig { override.returns({file_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index c6c13ccf..f7ded701 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -23,8 +23,15 @@ module OpenAI params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) .returns(T.attached_class) end - def self.new(file_id:, detail: nil); end - + def self.new( + # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. + file_id:, + # Specifies the detail level of the image if specified by the user. `low` uses + # fewer tokens, you can opt in to high resolution using `high`. + detail: nil + ); end sig { override.returns({file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol}) } def to_hash; end diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi index 90524b05..1b87fd3f 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi @@ -24,8 +24,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image_file:, type: :image_file); end - + def self.new( + image_file:, + # Always `image_file`. + type: :image_file + ); end sig { override.returns({image_file: OpenAI::Models::Beta::Threads::ImageFile, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index c677a276..628e1cef 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -26,8 +26,15 @@ module OpenAI params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol, file_id: String) .returns(T.attached_class) end - def self.new(detail: nil, file_id: nil); end - + def self.new( + # Specifies the detail level of the image if specified by the user. `low` uses + # fewer tokens, you can opt in to high resolution using `high`. + detail: nil, + # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image + # in the message content. Set `purpose="vision"` when uploading the File if you + # need to later display the file content. + file_id: nil + ); end sig do override .returns({detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String}) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi index 74be2ead..c0ae3f1e 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi @@ -29,8 +29,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, image_file: nil, type: :image_file); end - + def self.new( + # The index of the content part in the message. + index:, + image_file: nil, + # Always `image_file`. + type: :image_file + ); end sig do override.returns({index: Integer, type: Symbol, image_file: OpenAI::Models::Beta::Threads::ImageFileDelta}) end diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index b98e88e4..6c364602 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -22,8 +22,14 @@ module OpenAI params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) .returns(T.attached_class) end - def self.new(url:, detail: nil); end - + def self.new( + # The external URL of the image, must be a supported image types: jpeg, jpg, png, + # gif, webp. + url:, + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # to high resolution using `high`. Default value is `auto` + detail: nil + ); end sig { override.returns({url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol}) } def to_hash; end diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi index a960242b..53b64254 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi @@ -20,8 +20,11 @@ module OpenAI params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(image_url:, type: :image_url); end - + def self.new( + image_url:, + # The type of the content part. + type: :image_url + ); end sig { override.returns({image_url: OpenAI::Models::Beta::Threads::ImageURL, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index a9c81f3e..0de64fe7 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -25,8 +25,14 @@ module OpenAI params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol, url: String) .returns(T.attached_class) end - def self.new(detail: nil, url: nil); end - + def self.new( + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # to high resolution using `high`. + detail: nil, + # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, + # webp. + url: nil + ); end sig do override.returns({detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String}) end diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi index ed36acf3..e7eb2ab2 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi @@ -28,8 +28,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, image_url: nil, type: :image_url); end - + def self.new( + # The index of the content part in the message. + index:, + image_url: nil, + # Always `image_url`. + type: :image_url + ); end sig { override.returns({index: Integer, type: Symbol, image_url: OpenAI::Models::Beta::Threads::ImageURLDelta}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index eef51a4c..3c4ecef7 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -121,19 +121,44 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier, which can be referenced in API endpoints. id:, + # If applicable, the ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) that + # authored this message. assistant_id:, + # A list of files attached to the message, and the tools they were added to. attachments:, + # The Unix timestamp (in seconds) for when the message was completed. completed_at:, + # The content of the message in array of text and/or images. content:, + # The Unix timestamp (in seconds) for when the message was created. created_at:, + # The Unix timestamp (in seconds) for when the message was marked as incomplete. incomplete_at:, + # On an incomplete message, details about why the message is incomplete. incomplete_details:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The entity that produced the message. One of `user` or `assistant`. role:, + # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) + # associated with the creation of this message. Value is `null` when messages are + # created manually using the create message or create thread endpoints. run_id:, + # The status of the message, which can be either `in_progress`, `incomplete`, or + # `completed`. status:, + # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that + # this message belongs to. thread_id:, + # The object type, which is always `thread.message`. object: :"thread.message" ); end sig do @@ -216,8 +241,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil); end - + def self.new( + # The ID of the file to attach to the message. + file_id: nil, + # The tools to add this file to. + tools: nil + ); end sig do override .returns( @@ -243,8 +272,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search); end - + def self.new( + # The type of tool being defined: `file_search` + type: :file_search + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -269,8 +300,10 @@ module OpenAI params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end - def self.new(reason:); end - + def self.new( + # The reason the message is incomplete. + reason: + ); end sig do override.returns({reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol}) end diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 6c57ca1f..326dc671 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -69,8 +69,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil, request_options: {}); end - + def self.new( + # The text contents of the message. + content:, + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. + role:, + # A list of files attached to the message, and the tools they should be added to. + attachments: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ); end sig do override .returns( @@ -191,8 +210,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil); end - + def self.new( + # The ID of the file to attach to the message. + file_id: nil, + # The tools to add this file to. + tools: nil + ); end sig do override .returns( @@ -218,8 +241,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search); end - + def self.new( + # The type of tool being defined: `file_search` + type: :file_search + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index 6156e86b..55f3b251 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -61,8 +61,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content: nil, role: nil); end - + def self.new( + # The content of the message in array of text and/or images. + content: nil, + # The entity that produced the message. One of `user` or `assistant`. + role: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi index f5e5b29b..4e6d3d17 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta_event.rbi @@ -30,8 +30,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, delta:, object: :"thread.message.delta"); end - + def self.new( + # The identifier of the message, which can be referenced in API endpoints. + id:, + # The delta containing the fields that have changed on the Message. + delta:, + # The object type, which is always `thread.message.delta`. + object: :"thread.message.delta" + ); end sig { override.returns({id: String, delta: OpenAI::Models::Beta::Threads::MessageDelta, object: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 945d30d3..50cf3829 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -62,8 +62,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + # Filter messages by the run ID that generated them. + run_id: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/lib/openai/models/beta/threads/message_update_params.rbi index 09e67673..33d47d32 100644 --- a/rbi/lib/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_update_params.rbi @@ -28,8 +28,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, metadata: nil, request_options: {}); end - + def self.new( + thread_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi index 591afe27..70e4dbb9 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi @@ -14,8 +14,11 @@ module OpenAI # The refusal content generated by the assistant. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(refusal:, type: :refusal); end - + def self.new( + refusal:, + # Always `refusal`. + type: :refusal + ); end sig { override.returns({refusal: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi index 8c9e2d1f..8b3878d9 100644 --- a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi @@ -21,8 +21,13 @@ module OpenAI # The refusal content that is part of a message. sig { params(index: Integer, refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(index:, refusal: nil, type: :refusal); end - + def self.new( + # The index of the refusal part in the message. + index:, + refusal: nil, + # Always `refusal`. + type: :refusal + ); end sig { override.returns({index: Integer, type: Symbol, refusal: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi index 61ec2da8..b1788c25 100644 --- a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -38,8 +38,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, function:, type: :function); end - + def self.new( + # The ID of the tool call. This ID must be referenced when you submit the tool + # outputs in using the + # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # endpoint. + id:, + # The function definition. + function:, + # The type of tool call the output is required for. For now, this is always + # `function`. + type: :function + ); end sig do override .returns( @@ -59,8 +69,12 @@ module OpenAI # The function definition. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:); end - + def self.new( + # The arguments that the model expects you to pass to the function. + arguments:, + # The name of the function. + name: + ); end sig { override.returns({arguments: String, name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index f0708be8..102cf828 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -283,32 +283,108 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier, which can be referenced in API endpoints. id:, + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # execution of this run. assistant_id:, + # The Unix timestamp (in seconds) for when the run was cancelled. cancelled_at:, + # The Unix timestamp (in seconds) for when the run was completed. completed_at:, + # The Unix timestamp (in seconds) for when the run was created. created_at:, + # The Unix timestamp (in seconds) for when the run will expire. expires_at:, + # The Unix timestamp (in seconds) for when the run failed. failed_at:, + # Details on why the run is incomplete. Will be `null` if the run is not + # incomplete. incomplete_details:, + # The instructions that the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. instructions:, + # The last error associated with this run. Will be `null` if there are no errors. last_error:, + # The maximum number of completion tokens specified to have been used over the + # course of the run. max_completion_tokens:, + # The maximum number of prompt tokens specified to have been used over the course + # of the run. max_prompt_tokens:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The model that the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. model:, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls:, + # Details on the action required to continue the run. Will be `null` if no action + # is required. required_action:, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format:, + # The Unix timestamp (in seconds) for when the run was started. started_at:, + # The status of the run, which can be either `queued`, `in_progress`, + # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, + # `incomplete`, or `expired`. status:, + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # that was executed on as a part of this run. thread_id:, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice:, + # The list of tools that the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for + # this run. tools:, + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. truncation_strategy:, + # Usage statistics related to the run. This value will be `null` if the run is not + # in a terminal state (i.e. `in_progress`, `queued`, etc.). usage:, + # The sampling temperature used for this run. If not set, defaults to 1. temperature: nil, + # The nucleus sampling value used for this run. If not set, defaults to 1. top_p: nil, + # The object type, which is always `thread.run`. object: :"thread.run" ); end sig do @@ -380,8 +456,11 @@ module OpenAI params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end - def self.new(reason: nil); end - + def self.new( + # The reason why the run is incomplete. This will point to which specific token + # limit was reached over the course of the run. + reason: nil + ); end sig { override.returns({reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol}) } def to_hash; end @@ -418,8 +497,12 @@ module OpenAI params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:); end - + def self.new( + # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + code:, + # A human-readable description of the error. + message: + ); end sig do override .returns({code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String}) @@ -470,8 +553,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(submit_tool_outputs:, type: :submit_tool_outputs); end - + def self.new( + # Details on the tool outputs needed for this run to continue. + submit_tool_outputs:, + # For now, this is always `submit_tool_outputs`. + type: :submit_tool_outputs + ); end sig do override .returns( @@ -492,8 +579,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(tool_calls:); end - + def self.new( + # A list of the relevant tool calls. + tool_calls: + ); end sig { override.returns({tool_calls: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]}) } def to_hash; end end @@ -521,8 +610,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, last_messages: nil); end - + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ); end sig do override .returns( @@ -576,8 +673,14 @@ module OpenAI total_tokens: Integer ).returns(T.attached_class) end - def self.new(completion_tokens:, prompt_tokens:, total_tokens:); end - + def self.new( + # Number of completion tokens used over the course of the run. + completion_tokens:, + # Number of prompt tokens used over the course of the run. + prompt_tokens:, + # Total number of tokens used (prompt + completion). + total_tokens: + ); end sig do override.returns({completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer}) end diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index 3d4cc20a..9bee75b3 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -243,22 +243,107 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to + # execute this run. assistant_id:, + # A list of additional fields to include in the response. Currently the only + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, + # Appends additional instructions at the end of the instructions for the run. This + # is useful for modifying the behavior on a per-run basis without overriding other + # instructions. additional_instructions: nil, + # Adds additional messages to the thread before creating the run. additional_messages: nil, + # Overrides the + # [instructions](https://platform.openai.com/docs/api-reference/assistants/createAssistant) + # of the assistant. This is useful for modifying the behavior on a per-run basis. instructions: nil, + # The maximum number of completion tokens that may be used over the course of the + # run. The run will make a best effort to use only the number of completion tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # completion tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_completion_tokens: nil, + # The maximum number of prompt tokens that may be used over the course of the run. + # The run will make a best effort to use only the number of prompt tokens + # specified, across multiple turns of the run. If the run exceeds the number of + # prompt tokens specified, the run will end with status `incomplete`. See + # `incomplete_details` for more info. max_prompt_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # be used to execute this run. If a value is provided here, it will override the + # model associated with the assistant. If not, the model associated with the + # assistant will be used. model: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # Specifies the format that the model must output. Compatible with + # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), + # [GPT-4 Turbo](https://platform.openai.com/docs/models#gpt-4-turbo-and-gpt-4), + # and all GPT-3.5 Turbo models since `gpt-3.5-turbo-1106`. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables JSON mode, which ensures the + # message the model generates is valid JSON. + # + # **Important:** when using JSON mode, you **must** also instruct the model to + # produce JSON yourself via a system or user message. Without this, the model may + # generate an unending stream of whitespace until the generation reaches the token + # limit, resulting in a long-running and seemingly "stuck" request. Also note that + # the message content may be partially cut off if `finish_reason="length"`, which + # indicates the generation exceeded `max_tokens` or the conversation exceeded the + # max context length. response_format: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. temperature: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tools and instead generates a message. `auto` is the default value + # and means the model can pick between generating a message or calling one or more + # tools. `required` means the model must call one or more tools before responding + # to the user. Specifying a particular tool like `{"type": "file_search"}` or + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. tool_choice: nil, + # Override the tools the assistant can use for this run. This is useful for + # modifying the behavior on a per-run basis. tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or temperature but not both. top_p: nil, + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. truncation_strategy: nil, request_options: {} ); end @@ -379,8 +464,26 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, attachments: nil, metadata: nil); end - + def self.new( + # The text contents of the message. + content:, + # The role of the entity that is creating the message. Allowed values include: + # + # - `user`: Indicates the message is sent by an actual user and should be used in + # most cases to represent user-generated messages. + # - `assistant`: Indicates the message is generated by the assistant. Use this + # value to insert messages from the assistant into the conversation. + role:, + # A list of files attached to the message, and the tools they should be added to. + attachments: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil + ); end sig do override .returns( @@ -506,8 +609,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id: nil, tools: nil); end - + def self.new( + # The ID of the file to attach to the message. + file_id: nil, + # The tools to add this file to. + tools: nil + ); end sig do override .returns( @@ -533,8 +640,10 @@ module OpenAI attr_accessor :type sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :file_search); end - + def self.new( + # The type of tool being defined: `file_search` + type: :file_search + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -583,8 +692,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, last_messages: nil); end - + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index d38cd84b..24f6d425 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -54,8 +54,25 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index e347bb7b..af244e16 100644 --- a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -23,8 +23,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, tool_outputs:, request_options: {}); end - + def self.new( + thread_id:, + # A list of tools for which the outputs are being submitted. + tool_outputs:, + request_options: {} + ); end sig do override .returns( @@ -54,8 +58,13 @@ module OpenAI attr_writer :tool_call_id sig { params(output: String, tool_call_id: String).returns(T.attached_class) } - def self.new(output: nil, tool_call_id: nil); end - + def self.new( + # The output of the tool call to be submitted to continue the run. + output: nil, + # The ID of the tool call in the `required_action` object within the run object + # the output is being submitted for. + tool_call_id: nil + ); end sig { override.returns({output: String, tool_call_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/lib/openai/models/beta/threads/run_update_params.rbi index 9c4e4bb2..175d4516 100644 --- a/rbi/lib/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_update_params.rbi @@ -28,8 +28,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, metadata: nil, request_options: {}); end - + def self.new( + thread_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi index a751e294..f0b834d2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -23,8 +23,14 @@ module OpenAI # Text output from the Code Interpreter tool call as part of a run step. sig { params(index: Integer, logs: String, type: Symbol).returns(T.attached_class) } - def self.new(index:, logs: nil, type: :logs); end - + def self.new( + # The index of the output in the outputs array. + index:, + # The text output from the Code Interpreter tool call. + logs: nil, + # Always `logs`. + type: :logs + ); end sig { override.returns({index: Integer, type: Symbol, logs: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 3cc75c96..9cadda46 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -33,8 +33,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, image: nil, type: :image); end - + def self.new( + # The index of the output in the outputs array. + index:, + image: nil, + # Always `image`. + type: :image + ); end sig do override .returns( @@ -53,8 +58,11 @@ module OpenAI attr_writer :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id: nil); end - + def self.new( + # The [file](https://platform.openai.com/docs/api-reference/files) ID of the + # image. + file_id: nil + ); end sig { override.returns({file_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 898db2f2..7ea07c11 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -42,8 +42,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, code_interpreter:, type: :code_interpreter); end - + def self.new( + # The ID of the tool call. + id:, + # The Code Interpreter tool call definition. + code_interpreter:, + # The type of tool call. This is always going to be `code_interpreter` for this + # type of tool call. + type: :code_interpreter + ); end sig do override .returns( @@ -90,8 +97,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, outputs:); end - + def self.new( + # The input to the Code Interpreter tool call. + input:, + # The outputs from the Code Interpreter tool call. Code Interpreter can output one + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. + outputs: + ); end sig do override .returns( @@ -123,8 +136,12 @@ module OpenAI # Text output from the Code Interpreter tool call as part of a run step. sig { params(logs: String, type: Symbol).returns(T.attached_class) } - def self.new(logs:, type: :logs); end - + def self.new( + # The text output from the Code Interpreter tool call. + logs:, + # Always `logs`. + type: :logs + ); end sig { override.returns({logs: String, type: Symbol}) } def to_hash; end end @@ -162,8 +179,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image:, type: :image); end - + def self.new( + image:, + # Always `image`. + type: :image + ); end sig do override .returns( @@ -182,8 +202,11 @@ module OpenAI attr_accessor :file_id sig { params(file_id: String).returns(T.attached_class) } - def self.new(file_id:); end - + def self.new( + # The [file](https://platform.openai.com/docs/api-reference/files) ID of the + # image. + file_id: + ); end sig { override.returns({file_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 06162584..b5314dd2 100644 --- a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -50,8 +50,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, id: nil, code_interpreter: nil, type: :code_interpreter); end - + def self.new( + # The index of the tool call in the tool calls array. + index:, + # The ID of the tool call. + id: nil, + # The Code Interpreter tool call definition. + code_interpreter: nil, + # The type of tool call. This is always going to be `code_interpreter` for this + # type of tool call. + type: :code_interpreter + ); end sig do override .returns( @@ -118,8 +127,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input: nil, outputs: nil); end - + def self.new( + # The input to the Code Interpreter tool call. + input: nil, + # The outputs from the Code Interpreter tool call. Code Interpreter can output one + # or more items, including text (`logs`) or images (`image`). Each of these are + # represented by a different object type. + outputs: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index 77bfcf06..5fab3990 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -35,8 +35,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, file_search:, type: :file_search); end - + def self.new( + # The ID of the tool call object. + id:, + # For now, this is always going to be an empty object. + file_search:, + # The type of tool call. This is always going to be `file_search` for this type of + # tool call. + type: :file_search + ); end sig do override .returns( @@ -94,8 +101,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(ranking_options: nil, results: nil); end - + def self.new( + # The ranking options for the file search. + ranking_options: nil, + # The results of the file search. + results: nil + ); end sig do override .returns( @@ -130,8 +141,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(ranker:, score_threshold:); end - + def self.new( + # The ranker to use for the file search. If not specified will use the `auto` + # ranker. + ranker:, + # The score threshold for the file search. All values must be a floating point + # number between 0 and 1. + score_threshold: + ); end sig do override .returns( @@ -224,8 +241,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id:, file_name:, score:, content: nil); end - + def self.new( + # The ID of the file that result was found in. + file_id:, + # The name of the file that result was found in. + file_name:, + # The score of the result. All values must be a floating point number between 0 + # and 1. + score:, + # The content of the result that was found. The content is only included if + # requested via the include query parameter. + content: nil + ); end sig do override .returns( @@ -272,8 +299,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(text: nil, type: nil); end - + def self.new( + # The text content of the file. + text: nil, + # The type of the content. + type: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 80a2db89..5d6789be 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -34,8 +34,17 @@ module OpenAI type: Symbol ).returns(T.attached_class) end - def self.new(file_search:, index:, id: nil, type: :file_search); end - + def self.new( + # For now, this is always going to be an empty object. + file_search:, + # The index of the tool call in the tool calls array. + index:, + # The ID of the tool call object. + id: nil, + # The type of tool call. This is always going to be `file_search` for this type of + # tool call. + type: :file_search + ); end sig { override.returns({file_search: T.anything, index: Integer, type: Symbol, id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi index 12d00e7e..b3e20062 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi @@ -35,8 +35,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, function:, type: :function); end - + def self.new( + # The ID of the tool call object. + id:, + # The definition of the function that was called. + function:, + # The type of tool call. This is always going to be `function` for this type of + # tool call. + type: :function + ); end sig do override .returns( @@ -64,8 +71,16 @@ module OpenAI sig do params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) end - def self.new(arguments:, name:, output:); end - + def self.new( + # The arguments passed to the function. + arguments:, + # The name of the function. + name:, + # The output of the function. This will be `null` if the outputs have not been + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. + output: + ); end sig { override.returns({arguments: String, name: String, output: T.nilable(String)}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi index df5bcf34..3a495934 100644 --- a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -43,8 +43,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, id: nil, function: nil, type: :function); end - + def self.new( + # The index of the tool call in the tool calls array. + index:, + # The ID of the tool call object. + id: nil, + # The definition of the function that was called. + function: nil, + # The type of tool call. This is always going to be `function` for this type of + # tool call. + type: :function + ); end sig do override .returns( @@ -83,8 +92,16 @@ module OpenAI sig do params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) end - def self.new(arguments: nil, name: nil, output: nil); end - + def self.new( + # The arguments passed to the function. + arguments: nil, + # The name of the function. + name: nil, + # The output of the function. This will be `null` if the outputs have not been + # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) + # yet. + output: nil + ); end sig { override.returns({arguments: String, name: String, output: T.nilable(String)}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi index 583a179d..d5358c72 100644 --- a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -35,8 +35,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(message_creation:, type: :message_creation); end - + def self.new( + message_creation:, + # Always `message_creation`. + type: :message_creation + ); end sig do override .returns( @@ -54,8 +57,10 @@ module OpenAI attr_accessor :message_id sig { params(message_id: String).returns(T.attached_class) } - def self.new(message_id:); end - + def self.new( + # The ID of the message that was created by this run step. + message_id: + ); end sig { override.returns({message_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index db09f851..0e70020b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -133,21 +133,50 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier of the run step, which can be referenced in API endpoints. id:, + # The ID of the + # [assistant](https://platform.openai.com/docs/api-reference/assistants) + # associated with the run step. assistant_id:, + # The Unix timestamp (in seconds) for when the run step was cancelled. cancelled_at:, + # The Unix timestamp (in seconds) for when the run step completed. completed_at:, + # The Unix timestamp (in seconds) for when the run step was created. created_at:, + # The Unix timestamp (in seconds) for when the run step expired. A step is + # considered expired if the parent run is expired. expired_at:, + # The Unix timestamp (in seconds) for when the run step failed. failed_at:, + # The last error associated with this run step. Will be `null` if there are no + # errors. last_error:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that + # this run step is a part of. run_id:, + # The status of the run step, which can be either `in_progress`, `cancelled`, + # `failed`, `completed`, or `expired`. status:, + # The details of the run step. step_details:, + # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) + # that was run. thread_id:, + # The type of run step, which can be either `message_creation` or `tool_calls`. type:, + # Usage statistics related to the run step. This value will be `null` while the + # run step's status is `in_progress`. usage:, + # The object type, which is always `thread.run.step`. object: :"thread.run.step" ); end sig do @@ -193,8 +222,12 @@ module OpenAI params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:); end - + def self.new( + # One of `server_error` or `rate_limit_exceeded`. + code:, + # A human-readable description of the error. + message: + ); end sig do override .returns( @@ -289,8 +322,14 @@ module OpenAI total_tokens: Integer ).returns(T.attached_class) end - def self.new(completion_tokens:, prompt_tokens:, total_tokens:); end - + def self.new( + # Number of completion tokens used over the course of the run step. + completion_tokens:, + # Number of prompt tokens used over the course of the run step. + prompt_tokens:, + # Total number of tokens used (prompt + completion). + total_tokens: + ); end sig do override.returns({completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer}) end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi index 561b30d0..d151496b 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi @@ -42,8 +42,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(step_details: nil); end - + def self.new( + # The details of the run step. + step_details: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi index e1124389..a9a700a4 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -31,8 +31,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, delta:, object: :"thread.run.step.delta"); end - + def self.new( + # The identifier of the run step, which can be referenced in API endpoints. + id:, + # The delta containing the fields that have changed on the run step. + delta:, + # The object type, which is always `thread.run.step.delta`. + object: :"thread.run.step.delta" + ); end sig { override.returns({id: String, delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, object: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index 2b151753..efe29b30 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -35,8 +35,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(message_creation: nil, type: :message_creation); end - + def self.new( + message_creation: nil, + # Always `message_creation`. + type: :message_creation + ); end sig do override .returns( @@ -57,8 +60,10 @@ module OpenAI attr_writer :message_id sig { params(message_id: String).returns(T.attached_class) } - def self.new(message_id: nil); end - + def self.new( + # The ID of the message that was created by this run step. + message_id: nil + ); end sig { override.returns({message_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index 9f8536b3..bda4f8f6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -75,15 +75,32 @@ module OpenAI end def self.new( thread_id:, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, + # A list of additional fields to include in the response. Currently the only + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. include: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi index c4b7c54d..a1c6e7c5 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -37,8 +37,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(thread_id:, run_id:, include: nil, request_options: {}); end - + def self.new( + thread_id:, + run_id:, + # A list of additional fields to include in the response. Currently the only + # supported value is `step_details.tool_calls[*].file_search.results[*].content` + # to fetch the file search result content. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. + include: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 796b549d..ea795b43 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -58,8 +58,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(tool_calls: nil, type: :tool_calls); end - + def self.new( + # An array of tool calls the run step was involved in. These can be associated + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. + tool_calls: nil, + # Always `tool_calls`. + type: :tool_calls + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 34fdc65c..a5240f76 100644 --- a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -41,8 +41,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(tool_calls:, type: :tool_calls); end - + def self.new( + # An array of tool calls the run step was involved in. These can be associated + # with one of three types of tools: `code_interpreter`, `file_search`, or + # `function`. + tool_calls:, + # Always `tool_calls`. + type: :tool_calls + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/lib/openai/models/beta/threads/text.rbi index 0c5b65ab..64eb1b67 100644 --- a/rbi/lib/openai/models/beta/threads/text.rbi +++ b/rbi/lib/openai/models/beta/threads/text.rbi @@ -34,8 +34,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(annotations:, value:); end - + def self.new( + annotations:, + # The data that makes up the text. + value: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/lib/openai/models/beta/threads/text_content_block.rbi index 60e48d2f..c1c5eb8c 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block.rbi @@ -20,8 +20,11 @@ module OpenAI params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(text:, type: :text); end - + def self.new( + text:, + # Always `text`. + type: :text + ); end sig { override.returns({text: OpenAI::Models::Beta::Threads::Text, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi index ecf224cb..20626187 100644 --- a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi @@ -15,8 +15,12 @@ module OpenAI # The text content that is part of a message. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :text); end - + def self.new( + # Text content to be sent to the model + text:, + # Always `text`. + type: :text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/lib/openai/models/beta/threads/text_delta.rbi index 7776fa38..8d384acc 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta.rbi @@ -53,8 +53,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(annotations: nil, value: nil); end - + def self.new( + annotations: nil, + # The data that makes up the text. + value: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi index d22fc8db..e054d1d2 100644 --- a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/lib/openai/models/beta/threads/text_delta_block.rbi @@ -28,8 +28,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, text: nil, type: :text); end - + def self.new( + # The index of the content part in the message. + index:, + text: nil, + # Always `text`. + type: :text + ); end sig { override.returns({index: Integer, type: Symbol, text: OpenAI::Models::Beta::Threads::TextDelta}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index 43d751bd..dbf3c910 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -78,13 +78,41 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A unique identifier for the chat completion. id:, + # A list of chat completion choices. Can be more than one if `n` is greater + # than 1. choices:, + # The Unix timestamp (in seconds) of when the chat completion was created. created:, + # The model used for the chat completion. model:, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # This fingerprint represents the backend configuration that the model runs with. + # + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. system_fingerprint: nil, + # Usage statistics for the completion request. usage: nil, + # The object type, which is always `chat.completion`. object: :"chat.completion" ); end sig do @@ -146,8 +174,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(finish_reason:, index:, logprobs:, message:); end - + def self.new( + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. + finish_reason:, + # The index of the choice in the list of choices. + index:, + # Log probability information for the choice. + logprobs:, + # A chat completion message generated by the model. + message: + ); end sig do override .returns( @@ -202,8 +243,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, refusal:); end - + def self.new( + # A list of message content tokens with log probability information. + content:, + # A list of message refusal tokens with log probability information. + refusal: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi index 436f9a60..a9321c4b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -110,16 +110,25 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Data about a previous audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, + # The contents of the assistant message. Required unless `tool_calls` or + # `function_call` is specified. content: nil, + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. function_call: nil, + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. name: nil, + # The refusal message by the assistant. refusal: nil, + # The tool calls generated by the model, such as function calls. tool_calls: nil, + # The role of the messages author, in this case `assistant`. role: :assistant - ) - end - + ); end sig do override .returns( @@ -154,8 +163,10 @@ module OpenAI # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). sig { params(id: String).returns(T.attached_class) } - def self.new(id:); end - + def self.new( + # Unique identifier for a previous audio response from the model. + id: + ); end sig { override.returns({id: String}) } def to_hash; end end @@ -217,8 +228,15 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:); end - + def self.new( + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + arguments:, + # The name of the function to call. + name: + ); end sig { override.returns({arguments: String, name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_audio.rbi index 77f2a8a9..3a562bd2 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio.rbi @@ -28,8 +28,18 @@ module OpenAI sig do params(id: String, data: String, expires_at: Integer, transcript: String).returns(T.attached_class) end - def self.new(id:, data:, expires_at:, transcript:); end - + def self.new( + # Unique identifier for this audio response. + id:, + # Base64 encoded audio bytes generated by the model, in the format specified in + # the request. + data:, + # The Unix timestamp (in seconds) for when this audio response will no longer be + # accessible on the server for use in multi-turn conversations. + expires_at:, + # Transcript of the audio generated by the model. + transcript: + ); end sig { override.returns({id: String, data: String, expires_at: Integer, transcript: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index 231ca7da..f8798fa0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -24,8 +24,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(format_:, voice:); end - + def self.new( + # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, + # or `pcm16`. + format_:, + # The voice the model uses to respond. Supported voices are `alloy`, `ash`, + # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. + voice: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index 8c755f8f..c4d28509 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -86,13 +86,48 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A unique identifier for the chat completion. Each chunk has the same ID. id:, + # A list of chat completion choices. Can contain more than one elements if `n` is + # greater than 1. Can also be empty for the last chunk if you set + # `stream_options: {"include_usage": true}`. choices:, + # The Unix timestamp (in seconds) of when the chat completion was created. Each + # chunk has the same timestamp. created:, + # The model to generate the completion. model:, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # This fingerprint represents the backend configuration that the model runs with. + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. system_fingerprint: nil, + # An optional field that will only be present when you set + # `stream_options: {"include_usage": true}` in your request. When present, it + # contains a null value **except for the last chunk** which contains the token + # usage statistics for the entire request. + # + # **NOTE:** If the stream is interrupted or cancelled, you may not receive the + # final usage chunk which contains the total token usage for the request. usage: nil, + # The object type, which is always `chat.completion.chunk`. object: :"chat.completion.chunk" ); end sig do @@ -157,8 +192,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(delta:, finish_reason:, index:, logprobs: nil); end - + def self.new( + # A chat completion delta generated by streamed model responses. + delta:, + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, `content_filter` if + # content was omitted due to a flag from our content filters, `tool_calls` if the + # model called a tool, or `function_call` (deprecated) if the model called a + # function. + finish_reason:, + # The index of the choice in the list of choices. + index:, + # Log probability information for the choice. + logprobs: nil + ); end sig do override .returns( @@ -223,8 +271,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil); end - + def self.new( + # The contents of the chunk message. + content: nil, + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + function_call: nil, + # The refusal message generated by the model. + refusal: nil, + # The role of the author of this message. + role: nil, + tool_calls: nil + ); end sig do override .returns( @@ -260,8 +318,15 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments: nil, name: nil); end - + def self.new( + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + arguments: nil, + # The name of the function to call. + name: nil + ); end sig { override.returns({arguments: String, name: String}) } def to_hash; end end @@ -330,8 +395,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(index:, id: nil, function: nil, type: nil); end - + def self.new( + index:, + # The ID of the tool call. + id: nil, + function: nil, + # The type of the tool. Currently, only `function` is supported. + type: nil + ); end sig do override .returns( @@ -364,8 +435,15 @@ module OpenAI attr_writer :name sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments: nil, name: nil); end - + def self.new( + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + arguments: nil, + # The name of the function to call. + name: nil + ); end sig { override.returns({arguments: String, name: String}) } def to_hash; end end @@ -433,8 +511,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, refusal:); end - + def self.new( + # A list of message content tokens with log probability information. + content:, + # A list of message refusal tokens with log probability information. + refusal: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi index ba9fd403..d380d60e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part.rbi @@ -33,8 +33,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file:, type: :file); end - + def self.new( + file:, + # The type of the content part. Always `file`. + type: :file + ); end sig { override.returns({file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, type: Symbol}) } def to_hash; end @@ -62,8 +65,15 @@ module OpenAI attr_writer :filename sig { params(file_data: String, file_id: String, filename: String).returns(T.attached_class) } - def self.new(file_data: nil, file_id: nil, filename: nil); end - + def self.new( + # The base64 encoded file data, used when passing the file to the model as a + # string. + file_data: nil, + # The ID of an uploaded file to use as input. + file_id: nil, + # The name of the file, used when passing the file to the model as a string. + filename: nil + ); end sig { override.returns({file_data: String, file_id: String, filename: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index e28956c2..8d2a8ddd 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -27,8 +27,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image_url:, type: :image_url); end - + def self.new( + image_url:, + # The type of the content part. + type: :image_url + ); end sig do override.returns({image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, type: Symbol}) end @@ -54,8 +57,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(url:, detail: nil); end - + def self.new( + # Either a URL of the image or the base64 encoded image data. + url:, + # Specifies the detail level of the image. Learn more in the + # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). + detail: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index 9e55ccdf..34e7e778 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -27,8 +27,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input_audio:, type: :input_audio); end - + def self.new( + input_audio:, + # The type of the content part. Always `input_audio`. + type: :input_audio + ); end sig do override .returns( @@ -53,8 +56,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, format_:); end - + def self.new( + # Base64 encoded audio data. + data:, + # The format of the encoded audio data. Currently supports "wav" and "mp3". + format_: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi index 6aedee59..8088fdc0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -13,8 +13,12 @@ module OpenAI attr_accessor :type sig { params(refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(refusal:, type: :refusal); end - + def self.new( + # The refusal message generated by the model. + refusal:, + # The type of the content part. + type: :refusal + ); end sig { override.returns({refusal: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi index ee4dcfaf..8fecc89b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi @@ -15,8 +15,12 @@ module OpenAI # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :text); end - + def self.new( + # The text content. + text:, + # The type of the content part. + type: :text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi index 0c496584..e5083b94 100644 --- a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_deleted.rbi @@ -17,8 +17,14 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"chat.completion.deleted"); end - + def self.new( + # The ID of the chat completion that was deleted. + id:, + # Whether the chat completion was deleted. + deleted:, + # The type of object being deleted. + object: :"chat.completion.deleted" + ); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi index 6e56c125..ea4a6d05 100644 --- a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi @@ -34,8 +34,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, name: nil, role: :developer); end - + def self.new( + # The contents of the developer message. + content:, + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. + name: nil, + # The role of the messages author, in this case `developer`. + role: :developer + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi index 5dcea933..93220f0f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi @@ -11,8 +11,10 @@ module OpenAI # Specifying a particular function via `{"name": "my_function"}` forces the model # to call that function. sig { params(name: String).returns(T.attached_class) } - def self.new(name:); end - + def self.new( + # The name of the function to call. + name: + ); end sig { override.returns({name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi index 0b1e82e1..dd32f6f7 100644 --- a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi @@ -17,8 +17,14 @@ module OpenAI attr_accessor :role sig { params(content: T.nilable(String), name: String, role: Symbol).returns(T.attached_class) } - def self.new(content:, name:, role: :function); end - + def self.new( + # The contents of the function message. + content:, + # The name of the function to call. + name:, + # The role of the messages author, in this case `function`. + role: :function + ); end sig { override.returns({content: T.nilable(String), name: String, role: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/lib/openai/models/chat/chat_completion_message.rbi index f70217f5..dcffce2a 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message.rbi @@ -77,16 +77,25 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The contents of the message. content:, + # The refusal message generated by the model. refusal:, + # Annotations for the message, when applicable, as when using the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). annotations: nil, + # If the audio output modality is requested, this object contains data about the + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. function_call: nil, + # The tool calls generated by the model, such as function calls. tool_calls: nil, + # The role of the author of this message. role: :assistant - ) - end - + ); end sig do override .returns( @@ -128,8 +137,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(url_citation:, type: :url_citation); end - + def self.new( + # A URL citation when using web search. + url_citation:, + # The type of the URL citation. Always `url_citation`. + type: :url_citation + ); end sig do override .returns( @@ -164,8 +177,16 @@ module OpenAI url: String ).returns(T.attached_class) end - def self.new(end_index:, start_index:, title:, url:); end - + def self.new( + # The index of the last character of the URL citation in the message. + end_index:, + # The index of the first character of the URL citation in the message. + start_index:, + # The title of the web resource. + title:, + # The URL of the web resource. + url: + ); end sig { override.returns({end_index: Integer, start_index: Integer, title: String, url: String}) } def to_hash; end end @@ -186,8 +207,15 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:); end - + def self.new( + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + arguments:, + # The name of the function to call. + name: + ); end sig { override.returns({arguments: String, name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi index 85b1dc6e..77013f68 100644 --- a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi @@ -32,8 +32,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, function:, type: :function); end - + def self.new( + # The ID of the tool call. + id:, + # The function that the model called. + function:, + # The type of the tool. Currently, only `function` is supported. + type: :function + ); end sig do override .returns( @@ -56,8 +62,15 @@ module OpenAI # The function that the model called. sig { params(arguments: String, name: String).returns(T.attached_class) } - def self.new(arguments:, name:); end - + def self.new( + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + arguments:, + # The name of the function to call. + name: + ); end sig { override.returns({arguments: String, name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi index 041eea0a..f7ef280e 100644 --- a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -28,8 +28,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(function:, type: :function); end - + def self.new( + function:, + # The type of the tool. Currently, only `function` is supported. + type: :function + ); end sig { override.returns({function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, type: Symbol}) } def to_hash; end @@ -39,8 +42,10 @@ module OpenAI attr_accessor :name sig { params(name: String).returns(T.attached_class) } - def self.new(name:); end - + def self.new( + # The name of the function to call. + name: + ); end sig { override.returns({name: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi index a993e33a..93fab93c 100644 --- a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi @@ -27,8 +27,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :content); end - + def self.new( + # The content that should be matched when generating a model response. If + # generated tokens would match this content, the entire model response can be + # returned much more quickly. + content:, + # The type of the predicted content you want to provide. This type is currently + # always `content`. + type: :content + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi index 67a32515..5af6dfc9 100644 --- a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_store_message.rbi @@ -10,8 +10,10 @@ module OpenAI # A chat completion message generated by the model. sig { params(id: String).returns(T.attached_class) } - def self.new(id:); end - + def self.new( + # The identifier of the chat message. + id: + ); end sig { override.returns({id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi index 771ab84e..08fdb738 100644 --- a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi @@ -19,8 +19,16 @@ module OpenAI # Options for streaming response. Only set this when you set `stream: true`. sig { params(include_usage: T::Boolean).returns(T.attached_class) } - def self.new(include_usage: nil); end - + def self.new( + # If set, an additional chunk will be streamed before the `data: [DONE]` message. + # The `usage` field on this chunk shows the token usage statistics for the entire + # request, and the `choices` field will always be an empty array. + # + # All other chunks will also include a `usage` field, but with a null value. + # **NOTE:** If the stream is interrupted, you may not receive the final usage + # chunk which contains the total token usage for the request. + include_usage: nil + ); end sig { override.returns({include_usage: T::Boolean}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi index b21a46af..aeac775d 100644 --- a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi @@ -34,8 +34,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, name: nil, role: :system); end - + def self.new( + # The contents of the system message. + content:, + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. + name: nil, + # The role of the messages author, in this case `system`. + role: :system + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi index 0f4594a5..00a79a48 100644 --- a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi @@ -36,8 +36,23 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(token:, bytes:, logprob:, top_logprobs:); end - + def self.new( + # The token. + token:, + # A list of integers representing the UTF-8 bytes representation of the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. + bytes:, + # The log probability of this token, if it is within the top 20 most likely + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. + logprob:, + # List of the most likely tokens and their log probability, at this token + # position. In rare cases, there may be fewer than the number of requested + # `top_logprobs` returned. + top_logprobs: + ); end sig do override .returns( @@ -76,8 +91,19 @@ module OpenAI logprob: Float ).returns(T.attached_class) end - def self.new(token:, bytes:, logprob:); end - + def self.new( + # The token. + token:, + # A list of integers representing the UTF-8 bytes representation of the token. + # Useful in instances where characters are represented by multiple tokens and + # their byte representations must be combined to generate the correct text + # representation. Can be `null` if there is no bytes representation for the token. + bytes:, + # The log probability of this token, if it is within the top 20 most likely + # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very + # unlikely. + logprob: + ); end sig { override.returns({token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/lib/openai/models/chat/chat_completion_tool.rbi index e6559aed..0b898367 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool.rbi @@ -18,8 +18,11 @@ module OpenAI params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(function:, type: :function); end - + def self.new( + function:, + # The type of the tool. Currently, only `function` is supported. + type: :function + ); end sig { override.returns({function: OpenAI::Models::FunctionDefinition, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi index 3e792e43..b0ed167b 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi @@ -27,8 +27,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, tool_call_id:, role: :tool); end - + def self.new( + # The contents of the tool message. + content:, + # Tool call that this message is responding to. + tool_call_id:, + # The role of the messages author, in this case `tool`. + role: :tool + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi index 24c806a5..197c73d0 100644 --- a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi @@ -55,8 +55,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, name: nil, role: :user); end - + def self.new( + # The contents of the user message. + content:, + # An optional name for the participant. Provides the model information to + # differentiate between participants of the same role. + name: nil, + # The role of the messages author, in this case `user`. + role: :user + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 6e3970fc..2f954588 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -440,35 +440,193 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A list of messages comprising the conversation so far. Depending on the + # [model](https://platform.openai.com/docs/models) you use, different message + # types (modalities) are supported, like + # [text](https://platform.openai.com/docs/guides/text-generation), + # [images](https://platform.openai.com/docs/guides/vision), and + # [audio](https://platform.openai.com/docs/guides/audio). messages:, + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). audio: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. frequency_penalty: nil, + # Deprecated in favor of `tool_choice`. + # + # Controls which (if any) function is called by the model. + # + # `none` means the model will not call a function and instead generates a message. + # + # `auto` means the model can pick between generating a message or calling a + # function. + # + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # `none` is the default when no functions are present. `auto` is the default if + # functions are present. function_call: nil, + # Deprecated in favor of `tools`. + # + # A list of functions the model may generate JSON inputs for. functions: nil, + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the + # tokenizer) to an associated bias value from -100 to 100. Mathematically, the + # bias is added to the logits generated by the model prior to sampling. The exact + # effect will vary per model, but values between -1 and 1 should decrease or + # increase likelihood of selection; values like -100 or 100 should result in a ban + # or exclusive selection of the relevant token. logit_bias: nil, + # Whether to return log probabilities of the output tokens or not. If true, + # returns the log probabilities of each output token returned in the `content` of + # `message`. logprobs: nil, + # An upper bound for the number of tokens that can be generated for a completion, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_completion_tokens: nil, + # The maximum number of [tokens](/tokenizer) that can be generated in the chat + # completion. This value can be used to control + # [costs](https://openai.com/api/pricing/) for text generated via API. + # + # This value is now deprecated in favor of `max_completion_tokens`, and is not + # compatible with + # [o-series models](https://platform.openai.com/docs/guides/reasoning). max_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Output types that you would like the model to generate. Most models are capable + # of generating text, which is the default: + # + # `["text"]` + # + # The `gpt-4o-audio-preview` model can also be used to + # [generate audio](https://platform.openai.com/docs/guides/audio). To request that + # this model generate both text and audio responses, you can use: + # + # `["text", "audio"]` modalities: nil, + # How many chat completion choices to generate for each input message. Note that + # you will be charged based on the number of generated tokens across all of the + # choices. Keep `n` as `1` to minimize costs. n: nil, + # Whether to enable + # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) + # during tool use. parallel_tool_calls: nil, + # Static predicted output content, such as the content of a text file that is + # being regenerated. prediction: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. presence_penalty: nil, + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. reasoning_effort: nil, + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. response_format: nil, + # This feature is in Beta. If specified, our system will make a best effort to + # sample deterministically, such that repeated requests with the same `seed` and + # parameters should return the same result. Determinism is not guaranteed, and you + # should refer to the `system_fingerprint` response parameter to monitor changes + # in the backend. seed: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # Not supported with latest reasoning models `o3` and `o4-mini`. + # + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. stop: nil, + # Whether or not to store the output of this chat completion request for use in + # our [model distillation](https://platform.openai.com/docs/guides/distillation) + # or [evals](https://platform.openai.com/docs/guides/evals) products. store: nil, + # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, + # Controls which (if any) tool is called by the model. `none` means the model will + # not call any tool and instead generates a message. `auto` means the model can + # pick between generating a message or calling one or more tools. `required` means + # the model must call one or more tools. Specifying a particular tool via + # `{"type": "function", "function": {"name": "my_function"}}` forces the model to + # call that tool. + # + # `none` is the default when no tools are present. `auto` is the default if tools + # are present. tool_choice: nil, + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. tools: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + # `logprobs` must be set to `true` if this parameter is used. top_logprobs: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, request_options: {} ); end @@ -624,8 +782,22 @@ module OpenAI params(name: String, description: String, parameters: T::Hash[Symbol, T.anything]) .returns(T.attached_class) end - def self.new(name:, description: nil, parameters: nil); end - + def self.new( + # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + # underscores and dashes, with a maximum length of 64. + name:, + # A description of what the function does, used by the model to choose when and + # how to call the function. + description: nil, + # The parameters the functions accepts, described as a JSON Schema object. See the + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. + # + # Omitting `parameters` defines a function with an empty parameter list. + parameters: nil + ); end sig do override.returns({name: String, description: String, parameters: T::Hash[Symbol, T.anything]}) end @@ -761,8 +933,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(search_context_size: nil, user_location: nil); end - + def self.new( + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + search_context_size: nil, + # Approximate location parameters for the search. + user_location: nil + ); end sig do override .returns( @@ -839,8 +1016,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(approximate:, type: :approximate); end - + def self.new( + # Approximate location parameters for the search. + approximate:, + # The type of location approximation. Always `approximate`. + type: :approximate + ); end sig do override .returns( @@ -892,8 +1073,18 @@ module OpenAI timezone: String ).returns(T.attached_class) end - def self.new(city: nil, country: nil, region: nil, timezone: nil); end - + def self.new( + # Free text input for the city of the user, e.g. `San Francisco`. + city: nil, + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. + country: nil, + # Free text input for the region of the user, e.g. `California`. + region: nil, + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. + timezone: nil + ); end sig { override.returns({city: String, country: String, region: String, timezone: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index f83e4b89..2574d2d6 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -53,8 +53,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}); end - + def self.new( + # Identifier for the last chat completion from the previous pagination request. + after: nil, + # Number of Chat Completions to retrieve. + limit: nil, + # A list of metadata keys to filter the Chat Completions by. Example: + # + # `metadata[key1]=value1&metadata[key2]=value2` + metadata: nil, + # The model used to generate the Chat Completions. + model: nil, + # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/lib/openai/models/chat/completion_update_params.rbi index b85b54da..48669c83 100644 --- a/rbi/lib/openai/models/chat/completion_update_params.rbi +++ b/rbi/lib/openai/models/chat/completion_update_params.rbi @@ -23,8 +23,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(metadata:, request_options: {}); end - + def self.new( + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + request_options: {} + ); end sig do override.returns( { diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index a661288c..aa957ac1 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -39,8 +39,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, request_options: {}); end - + def self.new( + # Identifier for the last message from the previous pagination request. + after: nil, + # Number of messages to retrieve. + limit: nil, + # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` + # for descending order. Defaults to `asc`. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 19f90169..43b5c434 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -33,8 +33,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(key:, type:, value:); end - + def self.new( + # The key to compare against the value. + key:, + # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. + # + # - `eq`: equals + # - `ne`: not equal + # - `gt`: greater than + # - `gte`: greater than or equal + # - `lt`: less than + # - `lte`: less than or equal + type:, + # The value to compare against the attribute key; supports string, number, or + # boolean types. + value: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/lib/openai/models/completion.rbi index aa47ab99..4cc82449 100644 --- a/rbi/lib/openai/models/completion.rbi +++ b/rbi/lib/openai/models/completion.rbi @@ -55,16 +55,24 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A unique identifier for the completion. id:, + # The list of completion choices the model generated for the input prompt. choices:, + # The Unix timestamp (in seconds) of when the completion was created. created:, + # The model used for completion. model:, + # This fingerprint represents the backend configuration that the model runs with. + # + # Can be used in conjunction with the `seed` request parameter to understand when + # backend changes have been made that might impact determinism. system_fingerprint: nil, + # Usage statistics for the completion request. usage: nil, + # The object type, which is always "text_completion" object: :text_completion - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 4731b8da..5eaba069 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -34,8 +34,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(finish_reason:, index:, logprobs:, text:); end - + def self.new( + # The reason the model stopped generating tokens. This will be `stop` if the model + # hit a natural stop point or a provided stop sequence, `length` if the maximum + # number of tokens specified in the request was reached, or `content_filter` if + # content was omitted due to a flag from our content filters. + finish_reason:, + index:, + logprobs:, + text: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index a0956081..058662fd 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -182,22 +182,110 @@ module OpenAI .returns(T.attached_class) end def self.new( + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. model:, + # The prompt(s) to generate completions for, encoded as a string, array of + # strings, array of tokens, or array of token arrays. + # + # Note that <|endoftext|> is the document separator that the model sees during + # training, so if a prompt is not specified the model will generate as if from the + # beginning of a new document. prompt:, + # Generates `best_of` completions server-side and returns the "best" (the one with + # the highest log probability per token). Results cannot be streamed. + # + # When used with `n`, `best_of` controls the number of candidate completions and + # `n` specifies how many to return – `best_of` must be greater than `n`. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. best_of: nil, + # Echo back the prompt in addition to the completion echo: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on their + # existing frequency in the text so far, decreasing the model's likelihood to + # repeat the same line verbatim. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) frequency_penalty: nil, + # Modify the likelihood of specified tokens appearing in the completion. + # + # Accepts a JSON object that maps tokens (specified by their token ID in the GPT + # tokenizer) to an associated bias value from -100 to 100. You can use this + # [tokenizer tool](/tokenizer?view=bpe) to convert text to token IDs. + # Mathematically, the bias is added to the logits generated by the model prior to + # sampling. The exact effect will vary per model, but values between -1 and 1 + # should decrease or increase likelihood of selection; values like -100 or 100 + # should result in a ban or exclusive selection of the relevant token. + # + # As an example, you can pass `{"50256": -100}` to prevent the <|endoftext|> token + # from being generated. logit_bias: nil, + # Include the log probabilities on the `logprobs` most likely output tokens, as + # well the chosen tokens. For example, if `logprobs` is 5, the API will return a + # list of the 5 most likely tokens. The API will always return the `logprob` of + # the sampled token, so there may be up to `logprobs+1` elements in the response. + # + # The maximum value for `logprobs` is 5. logprobs: nil, + # The maximum number of [tokens](/tokenizer) that can be generated in the + # completion. + # + # The token count of your prompt plus `max_tokens` cannot exceed the model's + # context length. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. max_tokens: nil, + # How many completions to generate for each prompt. + # + # **Note:** Because this parameter generates many completions, it can quickly + # consume your token quota. Use carefully and ensure that you have reasonable + # settings for `max_tokens` and `stop`. n: nil, + # Number between -2.0 and 2.0. Positive values penalize new tokens based on + # whether they appear in the text so far, increasing the model's likelihood to + # talk about new topics. + # + # [See more information about frequency and presence penalties.](https://platform.openai.com/docs/guides/text-generation) presence_penalty: nil, + # If specified, our system will make a best effort to sample deterministically, + # such that repeated requests with the same `seed` and parameters should return + # the same result. + # + # Determinism is not guaranteed, and you should refer to the `system_fingerprint` + # response parameter to monitor changes in the backend. seed: nil, + # Not supported with latest reasoning models `o3` and `o4-mini`. + # + # Up to 4 sequences where the API will stop generating further tokens. The + # returned text will not contain the stop sequence. stop: nil, + # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, + # The suffix that comes after a completion of inserted text. + # + # This parameter is only supported for `gpt-3.5-turbo-instruct`. suffix: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. + # + # We generally recommend altering this or `top_p` but not both. temperature: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/lib/openai/models/completion_usage.rbi index 2872756d..48f31d88 100644 --- a/rbi/lib/openai/models/completion_usage.rbi +++ b/rbi/lib/openai/models/completion_usage.rbi @@ -51,10 +51,15 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Number of tokens in the generated completion. completion_tokens:, + # Number of tokens in the prompt. prompt_tokens:, + # Total number of tokens used in the request (prompt + completion). total_tokens:, + # Breakdown of tokens used in a completion. completion_tokens_details: nil, + # Breakdown of tokens used in the prompt. prompt_tokens_details: nil ); end sig do @@ -115,13 +120,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # When using Predicted Outputs, the number of tokens in the prediction that + # appeared in the completion. accepted_prediction_tokens: nil, + # Audio input tokens generated by the model. audio_tokens: nil, + # Tokens generated by the model for reasoning. reasoning_tokens: nil, + # When using Predicted Outputs, the number of tokens in the prediction that did + # not appear in the completion. However, like reasoning tokens, these tokens are + # still counted in the total completion tokens for purposes of billing, output, + # and context window limits. rejected_prediction_tokens: nil - ) - end - + ); end sig do override .returns( @@ -153,8 +164,12 @@ module OpenAI # Breakdown of tokens used in the prompt. sig { params(audio_tokens: Integer, cached_tokens: Integer).returns(T.attached_class) } - def self.new(audio_tokens: nil, cached_tokens: nil); end - + def self.new( + # Audio input tokens present in the prompt. + audio_tokens: nil, + # Cached tokens present in the prompt. + cached_tokens: nil + ); end sig { override.returns({audio_tokens: Integer, cached_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 4bb4c09c..19966d7a 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -20,8 +20,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(filters:, type:); end - + def self.new( + # Array of filters to combine. Items can be `ComparisonFilter` or + # `CompoundFilter`. + filters:, + # Type of operation: `and` or `or`. + type: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/lib/openai/models/create_embedding_response.rbi index 878d96ba..4ebe2aee 100644 --- a/rbi/lib/openai/models/create_embedding_response.rbi +++ b/rbi/lib/openai/models/create_embedding_response.rbi @@ -31,8 +31,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, model:, usage:, object: :list); end - + def self.new( + # The list of embeddings generated by the model. + data:, + # The name of the model used to generate the embedding. + model:, + # The usage information for the request. + usage:, + # The object type, which is always "list". + object: :list + ); end sig do override .returns( @@ -57,8 +65,12 @@ module OpenAI # The usage information for the request. sig { params(prompt_tokens: Integer, total_tokens: Integer).returns(T.attached_class) } - def self.new(prompt_tokens:, total_tokens:); end - + def self.new( + # The number of tokens used by the prompt. + prompt_tokens:, + # The total number of tokens used by the request. + total_tokens: + ); end sig { override.returns({prompt_tokens: Integer, total_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/lib/openai/models/embedding.rbi index ac1fc9ab..9c78d508 100644 --- a/rbi/lib/openai/models/embedding.rbi +++ b/rbi/lib/openai/models/embedding.rbi @@ -19,8 +19,16 @@ module OpenAI # Represents an embedding vector returned by embedding endpoint. sig { params(embedding: T::Array[Float], index: Integer, object: Symbol).returns(T.attached_class) } - def self.new(embedding:, index:, object: :embedding); end - + def self.new( + # The embedding vector, which is a list of floats. The length of vector depends on + # the model as listed in the + # [embedding guide](https://platform.openai.com/docs/guides/embeddings). + embedding:, + # The index of the embedding in the list of embeddings. + index:, + # The object type, which is always "embedding". + object: :embedding + ); end sig { override.returns({embedding: T::Array[Float], index: Integer, object: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index b19f81a0..22c4e6b8 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -61,8 +61,34 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}); end - + def self.new( + # Input text to embed, encoded as a string or array of tokens. To embed multiple + # inputs in a single request, pass an array of strings or array of token arrays. + # The input must not exceed the max input tokens for the model (8192 tokens for + # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # dimensions or less. + # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) + # for counting tokens. Some models may also impose a limit on total number of + # tokens summed across inputs. + input:, + # ID of the model to use. You can use the + # [List models](https://platform.openai.com/docs/api-reference/models/list) API to + # see all of your available models, or see our + # [Model overview](https://platform.openai.com/docs/models) for descriptions of + # them. + model:, + # The number of dimensions the resulting output embeddings should have. Only + # supported in `text-embedding-3` and later models. + dimensions: nil, + # The format to return the embeddings in. Can be either `float` or + # [`base64`](https://pypi.org/project/pybase64/). + encoding_format: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + user: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/eval_create_params.rbi b/rbi/lib/openai/models/eval_create_params.rbi index 32befe9a..af1d1784 100644 --- a/rbi/lib/openai/models/eval_create_params.rbi +++ b/rbi/lib/openai/models/eval_create_params.rbi @@ -72,8 +72,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}); end - + def self.new( + # The configuration for the data source used for the evaluation runs. + data_source_config:, + # A list of graders for all eval runs in this group. + testing_criteria:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The name of the evaluation. + name: nil, + request_options: {} + ); end sig do override .returns( @@ -130,8 +144,15 @@ module OpenAI params(item_schema: T::Hash[Symbol, T.anything], include_sample_schema: T::Boolean, type: Symbol) .returns(T.attached_class) end - def self.new(item_schema:, include_sample_schema: nil, type: :custom); end - + def self.new( + # The json schema for each row in the data source. + item_schema:, + # Whether the eval should expect you to populate the sample namespace (ie, by + # generating responses off of your data source) + include_sample_schema: nil, + # The type of data source. Always `custom`. + type: :custom + ); end sig do override .returns({ @@ -159,8 +180,12 @@ module OpenAI # completions query. This is usually metadata like `usecase=chatbot` or # `prompt-version=v2`, etc. sig { params(metadata: T::Hash[Symbol, T.anything], type: Symbol).returns(T.attached_class) } - def self.new(metadata: nil, type: :logs); end - + def self.new( + # Metadata filters for the logs data source. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ); end sig { override.returns({type: Symbol, metadata: T::Hash[Symbol, T.anything]}) } def to_hash; end end @@ -233,8 +258,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, labels:, model:, name:, passing_labels:, type: :label_model); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + input:, + # The labels to classify to each item in the evaluation. + labels:, + # The model to use for the evaluation. Must support structured outputs. + model:, + # The name of the grader. + name:, + # The labels that indicate a passing result. Must be a subset of labels. + passing_labels:, + # The object type, which is always `label_model`. + type: :label_model + ); end sig do override .returns( @@ -270,8 +308,12 @@ module OpenAI attr_accessor :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -328,8 +370,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -361,8 +410,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -481,8 +534,18 @@ module OpenAI params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) .returns(T.attached_class) end - def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end - + def self.new( + # The name of the grader. + name:, + # The source code of the python script. + source:, + # The image tag to use for the python script. + image_tag: nil, + # The threshold for the score. + pass_threshold: nil, + # The object type, which is always `python`. + type: :python + ); end sig do override.returns( { @@ -549,16 +612,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The input text. This may include template strings. input:, + # The model to use for the evaluation. model:, + # The name of the grader. name:, + # The threshold for the score. pass_threshold: nil, + # The range of the score. Defaults to `[0, 1]`. range: nil, + # The sampling parameters for the model. sampling_params: nil, + # The object type, which is always `score_model`. type: :score_model - ) - end - + ); end sig do override .returns( @@ -618,8 +686,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -651,8 +726,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/eval_create_response.rbi b/rbi/lib/openai/models/eval_create_response.rbi index c812a4d5..d0304e79 100644 --- a/rbi/lib/openai/models/eval_create_response.rbi +++ b/rbi/lib/openai/models/eval_create_response.rbi @@ -83,9 +83,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) - end - + def self.new( + # Unique identifier for the evaluation. + id:, + # The Unix timestamp (in seconds) for when the eval was created. + created_at:, + # Configuration of data sources used in runs of the evaluation. + data_source_config:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + # The name of the evaluation. + name:, + # A list of testing criteria. + testing_criteria:, + # The object type. + object: :eval + ); end sig do override .returns( @@ -160,8 +178,18 @@ module OpenAI params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) .returns(T.attached_class) end - def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end - + def self.new( + # The name of the grader. + name:, + # The source code of the python script. + source:, + # The image tag to use for the python script. + image_tag: nil, + # The threshold for the score. + pass_threshold: nil, + # The object type, which is always `python`. + type: :python + ); end sig do override.returns( { @@ -228,16 +256,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The input text. This may include template strings. input:, + # The model to use for the evaluation. model:, + # The name of the grader. name:, + # The threshold for the score. pass_threshold: nil, + # The range of the score. Defaults to `[0, 1]`. range: nil, + # The sampling parameters for the model. sampling_params: nil, + # The object type, which is always `score_model`. type: :score_model - ) - end - + ); end sig do override .returns( @@ -301,8 +334,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -334,8 +374,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/eval_custom_data_source_config.rbi b/rbi/lib/openai/models/eval_custom_data_source_config.rbi index c8d1dcea..2c76acf3 100644 --- a/rbi/lib/openai/models/eval_custom_data_source_config.rbi +++ b/rbi/lib/openai/models/eval_custom_data_source_config.rbi @@ -19,8 +19,13 @@ module OpenAI # - Used to define your testing criteria and # - What data is required when creating a run sig { params(schema: T::Hash[Symbol, T.anything], type: Symbol).returns(T.attached_class) } - def self.new(schema:, type: :custom); end - + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # The type of data source. Always `custom`. + type: :custom + ); end sig { override.returns({schema: T::Hash[Symbol, T.anything], type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/eval_label_model_grader.rbi b/rbi/lib/openai/models/eval_label_model_grader.rbi index 8e02688e..1a12db15 100644 --- a/rbi/lib/openai/models/eval_label_model_grader.rbi +++ b/rbi/lib/openai/models/eval_label_model_grader.rbi @@ -39,8 +39,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, labels:, model:, name:, passing_labels:, type: :label_model); end - + def self.new( + input:, + # The labels to assign to each item in the evaluation. + labels:, + # The model to use for the evaluation. Must support structured outputs. + model:, + # The name of the grader. + name:, + # The labels that indicate a passing result. Must be a subset of labels. + passing_labels:, + # The object type, which is always `label_model`. + type: :label_model + ); end sig do override .returns( @@ -99,8 +110,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -132,8 +150,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/eval_list_params.rbi b/rbi/lib/openai/models/eval_list_params.rbi index e0e69e2a..0ef48b7c 100644 --- a/rbi/lib/openai/models/eval_list_params.rbi +++ b/rbi/lib/openai/models/eval_list_params.rbi @@ -46,8 +46,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}); end - + def self.new( + # Identifier for the last eval from the previous pagination request. + after: nil, + # Number of evals to retrieve. + limit: nil, + # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for + # descending order. + order: nil, + # Evals can be ordered by creation time or last updated time. Use `created_at` for + # creation time or `updated_at` for last updated time. + order_by: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/eval_list_response.rbi b/rbi/lib/openai/models/eval_list_response.rbi index 08a0827f..e1614678 100644 --- a/rbi/lib/openai/models/eval_list_response.rbi +++ b/rbi/lib/openai/models/eval_list_response.rbi @@ -83,9 +83,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) - end - + def self.new( + # Unique identifier for the evaluation. + id:, + # The Unix timestamp (in seconds) for when the eval was created. + created_at:, + # Configuration of data sources used in runs of the evaluation. + data_source_config:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + # The name of the evaluation. + name:, + # A list of testing criteria. + testing_criteria:, + # The object type. + object: :eval + ); end sig do override .returns( @@ -160,8 +178,18 @@ module OpenAI params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) .returns(T.attached_class) end - def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end - + def self.new( + # The name of the grader. + name:, + # The source code of the python script. + source:, + # The image tag to use for the python script. + image_tag: nil, + # The threshold for the score. + pass_threshold: nil, + # The object type, which is always `python`. + type: :python + ); end sig do override.returns( { @@ -228,16 +256,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The input text. This may include template strings. input:, + # The model to use for the evaluation. model:, + # The name of the grader. name:, + # The threshold for the score. pass_threshold: nil, + # The range of the score. Defaults to `[0, 1]`. range: nil, + # The sampling parameters for the model. sampling_params: nil, + # The object type, which is always `score_model`. type: :score_model - ) - end - + ); end sig do override .returns( @@ -301,8 +334,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -334,8 +374,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/eval_retrieve_response.rbi b/rbi/lib/openai/models/eval_retrieve_response.rbi index a84f25d8..8e47cfe9 100644 --- a/rbi/lib/openai/models/eval_retrieve_response.rbi +++ b/rbi/lib/openai/models/eval_retrieve_response.rbi @@ -83,9 +83,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) - end - + def self.new( + # Unique identifier for the evaluation. + id:, + # The Unix timestamp (in seconds) for when the eval was created. + created_at:, + # Configuration of data sources used in runs of the evaluation. + data_source_config:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + # The name of the evaluation. + name:, + # A list of testing criteria. + testing_criteria:, + # The object type. + object: :eval + ); end sig do override .returns( @@ -160,8 +178,18 @@ module OpenAI params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) .returns(T.attached_class) end - def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end - + def self.new( + # The name of the grader. + name:, + # The source code of the python script. + source:, + # The image tag to use for the python script. + image_tag: nil, + # The threshold for the score. + pass_threshold: nil, + # The object type, which is always `python`. + type: :python + ); end sig do override.returns( { @@ -233,16 +261,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The input text. This may include template strings. input:, + # The model to use for the evaluation. model:, + # The name of the grader. name:, + # The threshold for the score. pass_threshold: nil, + # The range of the score. Defaults to `[0, 1]`. range: nil, + # The sampling parameters for the model. sampling_params: nil, + # The object type, which is always `score_model`. type: :score_model - ) - end - + ); end sig do override .returns( @@ -309,8 +342,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -342,8 +382,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi index cfc531e0..2c6fd596 100644 --- a/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi +++ b/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi @@ -34,8 +34,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(schema:, metadata: nil, type: :stored_completions); end - + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `stored_completions`. + type: :stored_completions + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/eval_string_check_grader.rbi b/rbi/lib/openai/models/eval_string_check_grader.rbi index a7c9af35..cf0301de 100644 --- a/rbi/lib/openai/models/eval_string_check_grader.rbi +++ b/rbi/lib/openai/models/eval_string_check_grader.rbi @@ -35,8 +35,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, name:, operation:, reference:, type: :string_check); end - + def self.new( + # The input text. This may include template strings. + input:, + # The name of the grader. + name:, + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + operation:, + # The reference text. This may include template strings. + reference:, + # The object type, which is always `string_check`. + type: :string_check + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/eval_text_similarity_grader.rbi b/rbi/lib/openai/models/eval_text_similarity_grader.rbi index f3675e7a..6b1c6e54 100644 --- a/rbi/lib/openai/models/eval_text_similarity_grader.rbi +++ b/rbi/lib/openai/models/eval_text_similarity_grader.rbi @@ -44,15 +44,20 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. evaluation_metric:, + # The text being graded. input:, + # A float score where a value greater than or equal indicates a passing grade. pass_threshold:, + # The text being graded against. reference:, + # The name of the grader. name: nil, + # The type of grader. type: :text_similarity - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/eval_update_params.rbi b/rbi/lib/openai/models/eval_update_params.rbi index 86ef04e1..857900a1 100644 --- a/rbi/lib/openai/models/eval_update_params.rbi +++ b/rbi/lib/openai/models/eval_update_params.rbi @@ -30,8 +30,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(metadata: nil, name: nil, request_options: {}); end - + def self.new( + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # Rename the evaluation. + name: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/eval_update_response.rbi b/rbi/lib/openai/models/eval_update_response.rbi index 53a41e65..83a8e983 100644 --- a/rbi/lib/openai/models/eval_update_response.rbi +++ b/rbi/lib/openai/models/eval_update_response.rbi @@ -83,9 +83,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, created_at:, data_source_config:, metadata:, name:, testing_criteria:, object: :eval) - end - + def self.new( + # Unique identifier for the evaluation. + id:, + # The Unix timestamp (in seconds) for when the eval was created. + created_at:, + # Configuration of data sources used in runs of the evaluation. + data_source_config:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata:, + # The name of the evaluation. + name:, + # A list of testing criteria. + testing_criteria:, + # The object type. + object: :eval + ); end sig do override .returns( @@ -160,8 +178,18 @@ module OpenAI params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) .returns(T.attached_class) end - def self.new(name:, source:, image_tag: nil, pass_threshold: nil, type: :python); end - + def self.new( + # The name of the grader. + name:, + # The source code of the python script. + source:, + # The image tag to use for the python script. + image_tag: nil, + # The threshold for the score. + pass_threshold: nil, + # The object type, which is always `python`. + type: :python + ); end sig do override.returns( { @@ -228,16 +256,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The input text. This may include template strings. input:, + # The model to use for the evaluation. model:, + # The name of the grader. name:, + # The threshold for the score. pass_threshold: nil, + # The range of the score. Defaults to `[0, 1]`. range: nil, + # The sampling parameters for the model. sampling_params: nil, + # The object type, which is always `score_model`. type: :score_model - ) - end - + ); end sig do override .returns( @@ -301,8 +334,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -334,8 +374,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi index 6e2ac631..6ac91b72 100644 --- a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -88,8 +88,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, type:, input_messages: nil, model: nil, sampling_params: nil); end - + def self.new( + # A StoredCompletionsRunDataSource configuration describing a set of filters + source:, + # The type of run data source. Always `completions`. + type:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil + ); end sig do override .returns( @@ -136,8 +144,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -184,8 +196,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end @@ -233,15 +249,24 @@ module OpenAI .returns(T.attached_class) end def self.new( + # An optional Unix timestamp to filter items created after this time. created_after: nil, + # An optional Unix timestamp to filter items created before this time. created_before: nil, + # An optional maximum number of items to return. limit: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # An optional model to filter by (e.g., 'gpt-4o'). model: nil, + # The type of source. Always `stored_completions`. type: :stored_completions - ) - end - + ); end sig do override .returns( @@ -317,8 +342,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(template:, type: :template); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ); end sig do override .returns( @@ -401,8 +431,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -434,8 +471,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -540,8 +581,12 @@ module OpenAI attr_accessor :type sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } - def self.new(item_reference:, type: :item_reference); end - + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({item_reference: String, type: Symbol}) } def to_hash; end end @@ -588,8 +633,16 @@ module OpenAI params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) .returns(T.attached_class) end - def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end - + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ); end sig do override.returns( { diff --git a/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi index 12b3a3b6..66df840b 100644 --- a/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -31,8 +31,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, type: :jsonl); end - + def self.new( + source:, + # The type of data source. Always `jsonl`. + type: :jsonl + ); end sig do override .returns( @@ -71,8 +74,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -119,8 +126,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/eval_api_error.rbi b/rbi/lib/openai/models/evals/eval_api_error.rbi index cc6ac0e5..20859d95 100644 --- a/rbi/lib/openai/models/evals/eval_api_error.rbi +++ b/rbi/lib/openai/models/evals/eval_api_error.rbi @@ -14,8 +14,12 @@ module OpenAI # An object representing an error response from the Eval API. sig { params(code: String, message: String).returns(T.attached_class) } - def self.new(code:, message:); end - + def self.new( + # The error code. + code:, + # The error message. + message: + ); end sig { override.returns({code: String, message: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/run_cancel_response.rbi b/rbi/lib/openai/models/evals/run_cancel_response.rbi index 35e93b1b..41ea134d 100644 --- a/rbi/lib/openai/models/evals/run_cancel_response.rbi +++ b/rbi/lib/openai/models/evals/run_cancel_response.rbi @@ -110,19 +110,38 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for the evaluation run. id:, + # Unix timestamp (in seconds) when the evaluation run was created. created_at:, + # Information about the run's data source. data_source:, + # An object representing an error response from the Eval API. error:, + # The identifier of the associated evaluation. eval_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The model that is evaluated, if applicable. model:, + # The name of the evaluation run. name:, + # Usage statistics for each model during the evaluation run. per_model_usage:, + # Results per testing criteria applied during the evaluation run. per_testing_criteria_results:, + # The URL to the rendered evaluation run report on the UI dashboard. report_url:, + # Counters summarizing the outcomes of the evaluation run. result_counts:, + # The status of the evaluation run. status:, + # The type of the object. Always "eval.run". object: :"eval.run" ); end sig do @@ -241,9 +260,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - end - + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `completions`. + type: :completions + ); end sig do override .returns( @@ -294,8 +320,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -343,8 +373,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end @@ -425,17 +459,37 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. allow_parallel_tool_calls: nil, + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. has_tool_calls: nil, + # Optional search string for instructions. This is a query parameter used to + # select responses. instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. temperature: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. users: nil, + # The type of run data source. Always `responses`. type: :responses ); end sig do @@ -504,8 +558,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(template:, type: :template); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ); end sig do override .returns( @@ -540,8 +599,12 @@ module OpenAI attr_accessor :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -604,8 +667,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -637,8 +707,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -743,8 +817,12 @@ module OpenAI attr_accessor :type sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } - def self.new(item_reference:, type: :item_reference); end - + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({item_reference: String, type: Symbol}) } def to_hash; end end @@ -791,8 +869,16 @@ module OpenAI params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) .returns(T.attached_class) end - def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end - + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ); end sig do override.returns( { @@ -853,15 +939,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The number of tokens retrieved from cache. cached_tokens:, + # The number of completion tokens generated. completion_tokens:, + # The number of invocations. invocation_count:, + # The name of the model. model_name:, + # The number of prompt tokens used. prompt_tokens:, + # The total number of tokens used. total_tokens: - ) - end - + ); end sig do override .returns( @@ -892,8 +982,14 @@ module OpenAI attr_accessor :testing_criteria sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } - def self.new(failed:, passed:, testing_criteria:); end - + def self.new( + # Number of tests failed for this criteria. + failed:, + # Number of tests passed for this criteria. + passed:, + # A description of the testing criteria. + testing_criteria: + ); end sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } def to_hash; end end @@ -924,8 +1020,16 @@ module OpenAI total: Integer ).returns(T.attached_class) end - def self.new(errored:, failed:, passed:, total:); end - + def self.new( + # Number of output items that resulted in an error. + errored:, + # Number of output items that failed to pass the evaluation. + failed:, + # Number of output items that passed the evaluation. + passed:, + # Total number of executed output items. + total: + ); end sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/run_create_params.rbi b/rbi/lib/openai/models/evals/run_create_params.rbi index c995c3ef..4a286695 100644 --- a/rbi/lib/openai/models/evals/run_create_params.rbi +++ b/rbi/lib/openai/models/evals/run_create_params.rbi @@ -49,8 +49,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data_source:, metadata: nil, name: nil, request_options: {}); end - + def self.new( + # Details about the run's data source. + data_source:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The name of the run. + name: nil, + request_options: {} + ); end sig do override .returns( @@ -167,8 +179,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, type:, input_messages: nil, model: nil, sampling_params: nil); end - + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + # The type of run data source. Always `completions`. + type:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil + ); end sig do override .returns( @@ -221,8 +241,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -272,8 +296,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end @@ -354,17 +382,37 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. allow_parallel_tool_calls: nil, + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. has_tool_calls: nil, + # Optional search string for instructions. This is a query parameter used to + # select responses. instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. temperature: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. users: nil, + # The type of run data source. Always `responses`. type: :responses ); end sig do @@ -456,8 +504,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(template:, type: :template); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ); end sig do override .returns( @@ -492,8 +545,12 @@ module OpenAI attr_accessor :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -556,8 +613,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -589,8 +653,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -695,8 +763,12 @@ module OpenAI attr_accessor :type sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } - def self.new(item_reference:, type: :item_reference); end - + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({item_reference: String, type: Symbol}) } def to_hash; end end @@ -743,8 +815,16 @@ module OpenAI params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) .returns(T.attached_class) end - def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end - + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ); end sig do override.returns( { diff --git a/rbi/lib/openai/models/evals/run_create_response.rbi b/rbi/lib/openai/models/evals/run_create_response.rbi index 40364127..cebc26ac 100644 --- a/rbi/lib/openai/models/evals/run_create_response.rbi +++ b/rbi/lib/openai/models/evals/run_create_response.rbi @@ -110,19 +110,38 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for the evaluation run. id:, + # Unix timestamp (in seconds) when the evaluation run was created. created_at:, + # Information about the run's data source. data_source:, + # An object representing an error response from the Eval API. error:, + # The identifier of the associated evaluation. eval_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The model that is evaluated, if applicable. model:, + # The name of the evaluation run. name:, + # Usage statistics for each model during the evaluation run. per_model_usage:, + # Results per testing criteria applied during the evaluation run. per_testing_criteria_results:, + # The URL to the rendered evaluation run report on the UI dashboard. report_url:, + # Counters summarizing the outcomes of the evaluation run. result_counts:, + # The status of the evaluation run. status:, + # The type of the object. Always "eval.run". object: :"eval.run" ); end sig do @@ -241,9 +260,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - end - + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `completions`. + type: :completions + ); end sig do override .returns( @@ -294,8 +320,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -343,8 +373,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end @@ -425,17 +459,37 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. allow_parallel_tool_calls: nil, + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. has_tool_calls: nil, + # Optional search string for instructions. This is a query parameter used to + # select responses. instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. temperature: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. users: nil, + # The type of run data source. Always `responses`. type: :responses ); end sig do @@ -504,8 +558,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(template:, type: :template); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ); end sig do override .returns( @@ -540,8 +599,12 @@ module OpenAI attr_accessor :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -604,8 +667,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -637,8 +707,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -743,8 +817,12 @@ module OpenAI attr_accessor :type sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } - def self.new(item_reference:, type: :item_reference); end - + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({item_reference: String, type: Symbol}) } def to_hash; end end @@ -791,8 +869,16 @@ module OpenAI params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) .returns(T.attached_class) end - def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end - + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ); end sig do override.returns( { @@ -853,15 +939,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The number of tokens retrieved from cache. cached_tokens:, + # The number of completion tokens generated. completion_tokens:, + # The number of invocations. invocation_count:, + # The name of the model. model_name:, + # The number of prompt tokens used. prompt_tokens:, + # The total number of tokens used. total_tokens: - ) - end - + ); end sig do override .returns( @@ -892,8 +982,14 @@ module OpenAI attr_accessor :testing_criteria sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } - def self.new(failed:, passed:, testing_criteria:); end - + def self.new( + # Number of tests failed for this criteria. + failed:, + # Number of tests passed for this criteria. + passed:, + # A description of the testing criteria. + testing_criteria: + ); end sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } def to_hash; end end @@ -924,8 +1020,16 @@ module OpenAI total: Integer ).returns(T.attached_class) end - def self.new(errored:, failed:, passed:, total:); end - + def self.new( + # Number of output items that resulted in an error. + errored:, + # Number of output items that failed to pass the evaluation. + failed:, + # Number of output items that passed the evaluation. + passed:, + # Total number of executed output items. + total: + ); end sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/run_list_params.rbi b/rbi/lib/openai/models/evals/run_list_params.rbi index c3cf6599..aa4151ed 100644 --- a/rbi/lib/openai/models/evals/run_list_params.rbi +++ b/rbi/lib/openai/models/evals/run_list_params.rbi @@ -47,8 +47,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, status: nil, request_options: {}); end - + def self.new( + # Identifier for the last run from the previous pagination request. + after: nil, + # Number of runs to retrieve. + limit: nil, + # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for + # descending order. Defaults to `asc`. + order: nil, + # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # | `canceled`. + status: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/evals/run_list_response.rbi b/rbi/lib/openai/models/evals/run_list_response.rbi index f264782f..e00b1471 100644 --- a/rbi/lib/openai/models/evals/run_list_response.rbi +++ b/rbi/lib/openai/models/evals/run_list_response.rbi @@ -110,19 +110,38 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for the evaluation run. id:, + # Unix timestamp (in seconds) when the evaluation run was created. created_at:, + # Information about the run's data source. data_source:, + # An object representing an error response from the Eval API. error:, + # The identifier of the associated evaluation. eval_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The model that is evaluated, if applicable. model:, + # The name of the evaluation run. name:, + # Usage statistics for each model during the evaluation run. per_model_usage:, + # Results per testing criteria applied during the evaluation run. per_testing_criteria_results:, + # The URL to the rendered evaluation run report on the UI dashboard. report_url:, + # Counters summarizing the outcomes of the evaluation run. result_counts:, + # The status of the evaluation run. status:, + # The type of the object. Always "eval.run". object: :"eval.run" ); end sig do @@ -241,9 +260,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - end - + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `completions`. + type: :completions + ); end sig do override .returns( @@ -294,8 +320,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -343,8 +373,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end @@ -425,17 +459,37 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. allow_parallel_tool_calls: nil, + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. has_tool_calls: nil, + # Optional search string for instructions. This is a query parameter used to + # select responses. instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. temperature: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. users: nil, + # The type of run data source. Always `responses`. type: :responses ); end sig do @@ -504,8 +558,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(template:, type: :template); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ); end sig do override .returns( @@ -540,8 +599,12 @@ module OpenAI attr_accessor :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -604,8 +667,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -637,8 +707,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -743,8 +817,12 @@ module OpenAI attr_accessor :type sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } - def self.new(item_reference:, type: :item_reference); end - + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({item_reference: String, type: Symbol}) } def to_hash; end end @@ -791,8 +869,16 @@ module OpenAI params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) .returns(T.attached_class) end - def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end - + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ); end sig do override.returns( { @@ -853,15 +939,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The number of tokens retrieved from cache. cached_tokens:, + # The number of completion tokens generated. completion_tokens:, + # The number of invocations. invocation_count:, + # The name of the model. model_name:, + # The number of prompt tokens used. prompt_tokens:, + # The total number of tokens used. total_tokens: - ) - end - + ); end sig do override .returns( @@ -892,8 +982,14 @@ module OpenAI attr_accessor :testing_criteria sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } - def self.new(failed:, passed:, testing_criteria:); end - + def self.new( + # Number of tests failed for this criteria. + failed:, + # Number of tests passed for this criteria. + passed:, + # A description of the testing criteria. + testing_criteria: + ); end sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } def to_hash; end end @@ -924,8 +1020,16 @@ module OpenAI total: Integer ).returns(T.attached_class) end - def self.new(errored:, failed:, passed:, total:); end - + def self.new( + # Number of output items that resulted in an error. + errored:, + # Number of output items that failed to pass the evaluation. + failed:, + # Number of output items that passed the evaluation. + passed:, + # Total number of executed output items. + total: + ); end sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/run_retrieve_response.rbi b/rbi/lib/openai/models/evals/run_retrieve_response.rbi index 405a83cf..70df1bc9 100644 --- a/rbi/lib/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/lib/openai/models/evals/run_retrieve_response.rbi @@ -110,19 +110,38 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for the evaluation run. id:, + # Unix timestamp (in seconds) when the evaluation run was created. created_at:, + # Information about the run's data source. data_source:, + # An object representing an error response from the Eval API. error:, + # The identifier of the associated evaluation. eval_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The model that is evaluated, if applicable. model:, + # The name of the evaluation run. name:, + # Usage statistics for each model during the evaluation run. per_model_usage:, + # Results per testing criteria applied during the evaluation run. per_testing_criteria_results:, + # The URL to the rendered evaluation run report on the UI dashboard. report_url:, + # Counters summarizing the outcomes of the evaluation run. result_counts:, + # The status of the evaluation run. status:, + # The type of the object. Always "eval.run". object: :"eval.run" ); end sig do @@ -241,9 +260,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - end - + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `completions`. + type: :completions + ); end sig do override .returns( @@ -294,8 +320,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, type: :file_content); end - + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ); end sig do override .returns( @@ -343,8 +373,12 @@ module OpenAI attr_accessor :type sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :file_id); end - + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end @@ -425,17 +459,37 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Whether to allow parallel tool calls. This is a query parameter used to select + # responses. allow_parallel_tool_calls: nil, + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. has_tool_calls: nil, + # Optional search string for instructions. This is a query parameter used to + # select responses. instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. temperature: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. users: nil, + # The type of run data source. Always `responses`. type: :responses ); end sig do @@ -504,8 +558,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(template:, type: :template); end - + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ); end sig do override .returns( @@ -540,8 +599,12 @@ module OpenAI attr_accessor :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -604,8 +667,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( @@ -637,8 +707,12 @@ module OpenAI # A text output from the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :output_text); end - + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -743,8 +817,12 @@ module OpenAI attr_accessor :type sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } - def self.new(item_reference:, type: :item_reference); end - + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({item_reference: String, type: Symbol}) } def to_hash; end end @@ -791,8 +869,16 @@ module OpenAI params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) .returns(T.attached_class) end - def self.new(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil); end - + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ); end sig do override.returns( { @@ -853,15 +939,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The number of tokens retrieved from cache. cached_tokens:, + # The number of completion tokens generated. completion_tokens:, + # The number of invocations. invocation_count:, + # The name of the model. model_name:, + # The number of prompt tokens used. prompt_tokens:, + # The total number of tokens used. total_tokens: - ) - end - + ); end sig do override .returns( @@ -892,8 +982,14 @@ module OpenAI attr_accessor :testing_criteria sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } - def self.new(failed:, passed:, testing_criteria:); end - + def self.new( + # Number of tests failed for this criteria. + failed:, + # Number of tests passed for this criteria. + passed:, + # A description of the testing criteria. + testing_criteria: + ); end sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } def to_hash; end end @@ -924,8 +1020,16 @@ module OpenAI total: Integer ).returns(T.attached_class) end - def self.new(errored:, failed:, passed:, total:); end - + def self.new( + # Number of output items that resulted in an error. + errored:, + # Number of output items that failed to pass the evaluation. + failed:, + # Number of output items that passed the evaluation. + passed:, + # Total number of executed output items. + total: + ); end sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi index 1d9f802f..e6931f3d 100644 --- a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi +++ b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi @@ -52,8 +52,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}); end - + def self.new( + eval_id:, + # Identifier for the last output item from the previous pagination request. + after: nil, + # Number of output items to retrieve. + limit: nil, + # Sort order for output items by timestamp. Use `asc` for ascending order or + # `desc` for descending order. Defaults to `asc`. + order: nil, + # Filter output items by status. Use `failed` to filter by failed output items or + # `pass` to filter by passed output items. + status: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi b/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi index ecb65334..027b0dd3 100644 --- a/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi +++ b/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi @@ -70,15 +70,25 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for the evaluation run output item. id:, + # Unix timestamp (in seconds) when the evaluation run was created. created_at:, + # Details of the input data source item. datasource_item:, + # The identifier for the data source item. datasource_item_id:, + # The identifier of the evaluation group. eval_id:, + # A list of results from the evaluation run. results:, + # The identifier of the evaluation run associated with this output item. run_id:, + # A sample containing the input and output of the evaluation run. sample:, + # The status of the evaluation run. status:, + # The type of the object. Always "eval.run.output_item". object: :"eval.run.output_item" ); end sig do @@ -169,15 +179,25 @@ module OpenAI .returns(T.attached_class) end def self.new( + # An object representing an error response from the Eval API. error:, + # The reason why the sample generation was finished. finish_reason:, + # An array of input messages. input:, + # The maximum number of tokens allowed for completion. max_completion_tokens:, + # The model used for generating the sample. model:, + # An array of output messages. output:, + # The seed used for generating the sample. seed:, + # The sampling temperature used. temperature:, + # The top_p value used for sampling. top_p:, + # Token usage details for the sample. usage: ); end sig do @@ -210,8 +230,12 @@ module OpenAI # An input message. sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message sender (e.g., system, user, developer). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -232,8 +256,12 @@ module OpenAI attr_writer :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content: nil, role: nil); end - + def self.new( + # The content of the message. + content: nil, + # The role of the message (e.g. "system", "assistant", "user"). + role: nil + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -265,8 +293,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:); end - + def self.new( + # The number of tokens retrieved from cache. + cached_tokens:, + # The number of completion tokens generated. + completion_tokens:, + # The number of prompt tokens used. + prompt_tokens:, + # The total number of tokens used. + total_tokens: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi b/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi index b538f6cd..69bee6f1 100644 --- a/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi +++ b/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi @@ -70,15 +70,25 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for the evaluation run output item. id:, + # Unix timestamp (in seconds) when the evaluation run was created. created_at:, + # Details of the input data source item. datasource_item:, + # The identifier for the data source item. datasource_item_id:, + # The identifier of the evaluation group. eval_id:, + # A list of results from the evaluation run. results:, + # The identifier of the evaluation run associated with this output item. run_id:, + # A sample containing the input and output of the evaluation run. sample:, + # The status of the evaluation run. status:, + # The type of the object. Always "eval.run.output_item". object: :"eval.run.output_item" ); end sig do @@ -169,15 +179,25 @@ module OpenAI .returns(T.attached_class) end def self.new( + # An object representing an error response from the Eval API. error:, + # The reason why the sample generation was finished. finish_reason:, + # An array of input messages. input:, + # The maximum number of tokens allowed for completion. max_completion_tokens:, + # The model used for generating the sample. model:, + # An array of output messages. output:, + # The seed used for generating the sample. seed:, + # The sampling temperature used. temperature:, + # The top_p value used for sampling. top_p:, + # Token usage details for the sample. usage: ); end sig do @@ -210,8 +230,12 @@ module OpenAI # An input message. sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content:, role:); end - + def self.new( + # The content of the message. + content:, + # The role of the message sender (e.g., system, user, developer). + role: + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -232,8 +256,12 @@ module OpenAI attr_writer :role sig { params(content: String, role: String).returns(T.attached_class) } - def self.new(content: nil, role: nil); end - + def self.new( + # The content of the message. + content: nil, + # The role of the message (e.g. "system", "assistant", "user"). + role: nil + ); end sig { override.returns({content: String, role: String}) } def to_hash; end end @@ -265,8 +293,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:); end - + def self.new( + # The number of tokens retrieved from cache. + cached_tokens:, + # The number of completion tokens generated. + completion_tokens:, + # The number of prompt tokens used. + prompt_tokens:, + # The total number of tokens used. + total_tokens: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index c4a5174b..2580e280 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -25,8 +25,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file:, purpose:, request_options: {}); end - + def self.new( + # The File object (not file name) to be uploaded. + file:, + # The intended purpose of the uploaded file. One of: - `assistants`: Used in the + # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for + # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: + # Flexible file type for any purpose - `evals`: Used for eval data sets + purpose:, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index 69d9b1a3..b33289b5 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -49,8 +49,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 10,000, and the default is 10,000. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + # Only return files with the given purpose. + purpose: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 82c4880a..af7989d6 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -65,14 +65,27 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The file identifier, which can be referenced in the API endpoints. id:, + # The size of the file, in bytes. bytes:, + # The Unix timestamp (in seconds) for when the file was created. created_at:, + # The name of the file. filename:, + # The intended purpose of the file. Supported values are `assistants`, + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` + # and `vision`. purpose:, + # Deprecated. The current status of the file, which can be either `uploaded`, + # `processed`, or `error`. status:, + # The Unix timestamp (in seconds) for when the file will expire. expires_at: nil, + # Deprecated. For details on why a fine-tuning training file failed validation, + # see the `error` field on `fine_tuning.job`. status_details: nil, + # The object type, which is always `file`. object: :file ); end sig do diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi index 27d4ad09..b8e4f73d 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi @@ -19,8 +19,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(project_ids:, request_options: {}); end - + def self.new( + # The project identifiers to grant access to. + project_ids:, + request_options: {} + ); end sig { override.returns({project_ids: T::Array[String], request_options: OpenAI::RequestOptions}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi index 07d7d8b6..6dadadac 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi @@ -31,8 +31,16 @@ module OpenAI object: Symbol ).returns(T.attached_class) end - def self.new(id:, created_at:, project_id:, object: :"checkpoint.permission"); end - + def self.new( + # The permission identifier, which can be referenced in the API endpoints. + id:, + # The Unix timestamp (in seconds) for when the permission was created. + created_at:, + # The project identifier that the permission is for. + project_id:, + # The object type, which is always "checkpoint.permission". + object: :"checkpoint.permission" + ); end sig { override.returns({id: String, created_at: Integer, object: Symbol, project_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi index 9a510f8c..6bc5d73b 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi @@ -18,8 +18,14 @@ module OpenAI attr_accessor :object sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"checkpoint.permission"); end - + def self.new( + # The ID of the fine-tuned model checkpoint permission that was deleted. + id:, + # Whether the fine-tuned model checkpoint permission was successfully deleted. + deleted:, + # The object type, which is always "checkpoint.permission". + object: :"checkpoint.permission" + ); end sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi index 938c8a10..04da40a3 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi @@ -46,8 +46,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}); end - + def self.new( + # Identifier for the last permission ID from the previous pagination request. + after: nil, + # Number of permissions to retrieve. + limit: nil, + # The order in which to retrieve permissions. + order: nil, + # The ID of the project to get permissions for. + project_id: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi index dc137a5f..95aa5332 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi @@ -78,8 +78,16 @@ module OpenAI object: Symbol ).returns(T.attached_class) end - def self.new(id:, created_at:, project_id:, object: :"checkpoint.permission"); end - + def self.new( + # The permission identifier, which can be referenced in the API endpoints. + id:, + # The Unix timestamp (in seconds) for when the permission was created. + created_at:, + # The project identifier that the permission is for. + project_id:, + # The object type, which is always "checkpoint.permission". + object: :"checkpoint.permission" + ); end sig { override.returns({id: String, created_at: Integer, object: Symbol, project_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index 6e25d748..b9a0f929 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -145,24 +145,60 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The object identifier, which can be referenced in the API endpoints. id:, + # The Unix timestamp (in seconds) for when the fine-tuning job was created. created_at:, + # For fine-tuning jobs that have `failed`, this will contain more information on + # the cause of the failure. error:, + # The name of the fine-tuned model that is being created. The value will be null + # if the fine-tuning job is still running. fine_tuned_model:, + # The Unix timestamp (in seconds) for when the fine-tuning job was finished. The + # value will be null if the fine-tuning job is still running. finished_at:, + # The hyperparameters used for the fine-tuning job. This value will only be + # returned when running `supervised` jobs. hyperparameters:, + # The base model that is being fine-tuned. model:, + # The organization that owns the fine-tuning job. organization_id:, + # The compiled results file ID(s) for the fine-tuning job. You can retrieve the + # results with the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). result_files:, + # The seed used for the fine-tuning job. seed:, + # The current status of the fine-tuning job, which can be either + # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. status:, + # The total number of billable tokens processed by this fine-tuning job. The value + # will be null if the fine-tuning job is still running. trained_tokens:, + # The file ID used for training. You can retrieve the training data with the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). training_file:, + # The file ID used for validation. You can retrieve the validation results with + # the + # [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). validation_file:, + # The Unix timestamp (in seconds) for when the fine-tuning job is estimated to + # finish. The value will be null if the fine-tuning job is not running. estimated_finish: nil, + # A list of integrations to enable for this fine-tuning job. integrations: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The method used for fine-tuning. method_: nil, + # The object type, which is always "fine_tuning.job". object: :"fine_tuning.job" ); end sig do @@ -210,8 +246,15 @@ module OpenAI # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. sig { params(code: String, message: String, param: T.nilable(String)).returns(T.attached_class) } - def self.new(code:, message:, param:); end - + def self.new( + # A machine-readable error code. + code:, + # A human-readable error message. + message:, + # The parameter that was invalid, usually `training_file` or `validation_file`. + # This field will be null if the failure was not parameter-specific. + param: + ); end sig { override.returns({code: String, message: String, param: T.nilable(String)}) } def to_hash; end end @@ -251,8 +294,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end - + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ); end sig do override .returns( @@ -349,8 +401,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(dpo: nil, supervised: nil, type: nil); end - + def self.new( + # Configuration for the DPO fine-tuning method. + dpo: nil, + # Configuration for the supervised fine-tuning method. + supervised: nil, + # The type of method. Is either `supervised` or `dpo`. + type: nil + ); end sig do override .returns( @@ -383,8 +441,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil); end - + def self.new( + # The hyperparameters used for the fine-tuning job. + hyperparameters: nil + ); end sig do override .returns({hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters}) @@ -434,8 +494,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil); end - + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + beta: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ); end sig do override .returns( @@ -513,8 +585,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil); end - + def self.new( + # The hyperparameters used for the fine-tuning job. + hyperparameters: nil + ); end sig do override .returns({hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters}) @@ -555,8 +629,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end - + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index ea515e0f..bae007c6 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -52,16 +52,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The object identifier. id:, + # The Unix timestamp (in seconds) for when the fine-tuning job was created. created_at:, + # The log level of the event. level:, + # The message of the event. message:, + # The data associated with the event. data: nil, + # The type of event. type: nil, + # The object type, which is always "fine_tuning.job.event". object: :"fine_tuning.job.event" - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index ba0eda55..f177e1fc 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -36,8 +36,21 @@ module OpenAI params(project: String, entity: T.nilable(String), name: T.nilable(String), tags: T::Array[String]) .returns(T.attached_class) end - def self.new(project:, entity: nil, name: nil, tags: nil); end - + def self.new( + # The name of the project that the new run will be created under. + project:, + # The entity to use for the run. This allows you to set the team or username of + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. + entity: nil, + # A display name to set for the run. If not set, we will use the Job ID as the + # name. + name: nil, + # A list of tags to be attached to the newly created run. These tags are passed + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + tags: nil + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index 5c771091..2b2e10d9 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -28,8 +28,15 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(wandb:, type: :wandb); end - + def self.new( + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. + wandb:, + # The type of the integration being enabled for the fine-tuning job + type: :wandb + ); end sig { override.returns({type: Symbol, wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 13b9cfae..af70cdc3 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -114,14 +114,63 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The name of the model to fine-tune. You can select one of the + # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). model:, + # The ID of an uploaded file that contains training data. + # + # See [upload file](https://platform.openai.com/docs/api-reference/files/create) + # for how to upload a file. + # + # Your dataset must be formatted as a JSONL file. Additionally, you must upload + # your file with the purpose `fine-tune`. + # + # The contents of the file should differ depending on if the model uses the + # [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input), + # [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input) + # format, or if the fine-tuning method uses the + # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) + # format. + # + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. training_file:, + # The hyperparameters used for the fine-tuning job. This value is now deprecated + # in favor of `method`, and should be passed in under the `method` parameter. hyperparameters: nil, + # A list of integrations to enable for your fine-tuning job. integrations: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The method used for fine-tuning. method_: nil, + # The seed controls the reproducibility of the job. Passing in the same seed and + # job parameters should produce the same results, but may differ in rare cases. If + # a seed is not specified, one will be generated for you. seed: nil, + # A string of up to 64 characters that will be added to your fine-tuned model + # name. + # + # For example, a `suffix` of "custom-model-name" would produce a model name like + # `ft:gpt-4o-mini:openai:custom-model-name:7p4lURel`. suffix: nil, + # The ID of an uploaded file that contains validation data. + # + # If you provide this file, the data is used to generate validation metrics + # periodically during fine-tuning. These metrics can be viewed in the fine-tuning + # results file. The same data should not be present in both train and validation + # files. + # + # Your dataset must be formatted as a JSONL file. You must upload your file with + # the purpose `fine-tune`. + # + # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # for more details. validation_file: nil, request_options: {} ); end @@ -196,8 +245,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end - + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ); end sig do override .returns( @@ -266,8 +324,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(wandb:, type: :wandb); end - + def self.new( + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. + wandb:, + # The type of integration to enable. Currently, only "wandb" (Weights and Biases) + # is supported. + type: :wandb + ); end sig { override.returns({type: Symbol, wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb}) } def to_hash; end @@ -309,8 +375,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(project:, entity: nil, name: nil, tags: nil); end - + def self.new( + # The name of the project that the new run will be created under. + project:, + # The entity to use for the run. This allows you to set the team or username of + # the WandB user that you would like associated with the run. If not set, the + # default entity for the registered WandB API key is used. + entity: nil, + # A display name to set for the run. If not set, we will use the Job ID as the + # name. + name: nil, + # A list of tags to be attached to the newly created run. These tags are passed + # through directly to WandB. Some default tags are generated by OpenAI: + # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". + tags: nil + ); end sig do override .returns({ @@ -363,8 +442,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(dpo: nil, supervised: nil, type: nil); end - + def self.new( + # Configuration for the DPO fine-tuning method. + dpo: nil, + # Configuration for the supervised fine-tuning method. + supervised: nil, + # The type of method. Is either `supervised` or `dpo`. + type: nil + ); end sig do override .returns( @@ -403,8 +488,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil); end - + def self.new( + # The hyperparameters used for the fine-tuning job. + hyperparameters: nil + ); end sig do override .returns({hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters}) @@ -454,8 +541,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil); end - + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + beta: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ); end sig do override .returns( @@ -533,8 +632,10 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(hyperparameters: nil); end - + def self.new( + # The hyperparameters used for the fine-tuning job. + hyperparameters: nil + ); end sig do override .returns( @@ -577,8 +678,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil); end - + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi index 4c6e5963..9bfe0d0a 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi @@ -29,8 +29,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, request_options: {}); end - + def self.new( + # Identifier for the last event from the previous pagination request. + after: nil, + # Number of events to retrieve. + limit: nil, + request_options: {} + ); end sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi index 49bc0f41..21062f5c 100644 --- a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_list_params.rbi @@ -35,8 +35,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, metadata: nil, request_options: {}); end - + def self.new( + # Identifier for the last job from the previous pagination request. + after: nil, + # Number of fine-tuning jobs to retrieve. + limit: nil, + # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. + # Alternatively, set `metadata=null` to indicate no metadata. + metadata: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index fee17bda..27afbe12 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -30,8 +30,13 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, limit: nil, request_options: {}); end - + def self.new( + # Identifier for the last checkpoint ID from the previous pagination request. + after: nil, + # Number of checkpoints to retrieve. + limit: nil, + request_options: {} + ); end sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } def to_hash; end end diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index 7c253eca..527bf6e5 100644 --- a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -56,12 +56,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The checkpoint identifier, which can be referenced in the API endpoints. id:, + # The Unix timestamp (in seconds) for when the checkpoint was created. created_at:, + # The name of the fine-tuned checkpoint model that is created. fine_tuned_model_checkpoint:, + # The name of the fine-tuning job that this checkpoint was created from. fine_tuning_job_id:, + # Metrics at the step number during the fine-tuning job. metrics:, + # The step number that the checkpoint was created at. step_number:, + # The object type, which is always "fine_tuning.job.checkpoint". object: :"fine_tuning.job.checkpoint" ); end sig do diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/lib/openai/models/function_definition.rbi index 0351f239..bed3cc2c 100644 --- a/rbi/lib/openai/models/function_definition.rbi +++ b/rbi/lib/openai/models/function_definition.rbi @@ -46,8 +46,28 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, description: nil, parameters: nil, strict: nil); end - + def self.new( + # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain + # underscores and dashes, with a maximum length of 64. + name:, + # A description of what the function does, used by the model to choose when and + # how to call the function. + description: nil, + # The parameters the functions accepts, described as a JSON Schema object. See the + # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, + # and the + # [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for + # documentation about the format. + # + # Omitting `parameters` defines a function with an empty parameter list. + parameters: nil, + # Whether to enable strict schema adherence when generating the function call. If + # set to true, the model will follow the exact schema defined in the `parameters` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn + # more about Structured Outputs in the + # [function calling guide](docs/guides/function-calling). + strict: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/image.rbi b/rbi/lib/openai/models/image.rbi index cf15fc93..fa3f7278 100644 --- a/rbi/lib/openai/models/image.rbi +++ b/rbi/lib/openai/models/image.rbi @@ -30,8 +30,18 @@ module OpenAI # Represents the content or the URL of an image generated by the OpenAI API. sig { params(b64_json: String, revised_prompt: String, url: String).returns(T.attached_class) } - def self.new(b64_json: nil, revised_prompt: nil, url: nil); end - + def self.new( + # The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, + # and only present if `response_format` is set to `b64_json` for `dall-e-2` and + # `dall-e-3`. + b64_json: nil, + # For `dall-e-3` only, the revised prompt that was used to generate the image. + revised_prompt: nil, + # When using `dall-e-2` or `dall-e-3`, the URL of the generated image if + # `response_format` is set to `url` (default value). Unsupported for + # `gpt-image-1`. + url: nil + ); end sig { override.returns({b64_json: String, revised_prompt: String, url: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index c2d38787..96899fe5 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -53,16 +53,27 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The image to use as the basis for the variation(s). Must be a valid PNG file, + # less than 4MB, and square. image:, + # The model to use for image generation. Only `dall-e-2` is supported at this + # time. model: nil, + # The number of images to generate. Must be between 1 and 10. n: nil, + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. response_format: nil, + # The size of the generated images. Must be one of `256x256`, `512x512`, or + # `1024x1024`. size: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 922d5db6..68d09cd7 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -82,14 +82,41 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The image(s) to edit. Must be a supported image file or an array of images. For + # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square + # `png` file less than 4MB. image:, + # A text description of the desired image(s). The maximum length is 1000 + # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. prompt:, + # An additional image whose fully transparent areas (e.g. where alpha is zero) + # indicate where `image` should be edited. If there are multiple images provided, + # the mask will be applied on the first image. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. mask: nil, + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. model: nil, + # The number of images to generate. Must be between 1 and 10. n: nil, + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. quality: nil, + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. response_format: nil, + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. size: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index 0ee05de0..3dd3c2e4 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -108,17 +108,61 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A text description of the desired image(s). The maximum length is 32000 + # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters + # for `dall-e-3`. prompt:, + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. background: nil, + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. model: nil, + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). moderation: nil, + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # `n=1` is supported. n: nil, + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. output_compression: nil, + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. output_format: nil, + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. quality: nil, + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. response_format: nil, + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. size: nil, + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. style: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/lib/openai/models/images_response.rbi index be9cfe0e..cdfcd65f 100644 --- a/rbi/lib/openai/models/images_response.rbi +++ b/rbi/lib/openai/models/images_response.rbi @@ -30,8 +30,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(created:, data: nil, usage: nil); end - + def self.new( + # The Unix timestamp (in seconds) of when the image was created. + created:, + # The list of generated images. + data: nil, + # For `gpt-image-1` only, the token usage information for the image generation. + usage: nil + ); end sig do override .returns( @@ -75,8 +81,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:); end - + def self.new( + # The number of tokens (images and text) in the input prompt. + input_tokens:, + # The input tokens detailed information for the image generation. + input_tokens_details:, + # The number of image tokens in the output image. + output_tokens:, + # The total number of tokens (images and text) used for the image generation. + total_tokens: + ); end sig do override .returns( @@ -101,8 +115,12 @@ module OpenAI # The input tokens detailed information for the image generation. sig { params(image_tokens: Integer, text_tokens: Integer).returns(T.attached_class) } - def self.new(image_tokens:, text_tokens:); end - + def self.new( + # The number of image tokens in the input prompt. + image_tokens:, + # The number of text tokens in the input prompt. + text_tokens: + ); end sig { override.returns({image_tokens: Integer, text_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/model.rbi b/rbi/lib/openai/models/model.rbi index b7dfa10a..06ccb9b5 100644 --- a/rbi/lib/openai/models/model.rbi +++ b/rbi/lib/openai/models/model.rbi @@ -21,8 +21,16 @@ module OpenAI # Describes an OpenAI model offering that can be used with the API. sig { params(id: String, created: Integer, owned_by: String, object: Symbol).returns(T.attached_class) } - def self.new(id:, created:, owned_by:, object: :model); end - + def self.new( + # The model identifier, which can be referenced in the API endpoints. + id:, + # The Unix timestamp (in seconds) when the model was created. + created:, + # The organization that owns the model. + owned_by:, + # The object type, which is always "model". + object: :model + ); end sig { override.returns({id: String, created: Integer, object: Symbol, owned_by: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 7c63a9f8..8f8c32af 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -42,8 +42,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(categories:, category_applied_input_types:, category_scores:, flagged:); end - + def self.new( + # A list of the categories, and whether they are flagged or not. + categories:, + # A list of the categories along with the input type(s) that the score applies to. + category_applied_input_types:, + # A list of the categories along with their scores as predicted by model. + category_scores:, + # Whether any of the below categories are flagged. + flagged: + ); end sig do override .returns( @@ -147,18 +155,48 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Content that expresses, incites, or promotes harassing language towards any + # target. harassment:, + # Harassment content that also includes violence or serious harm towards any + # target. harassment_threatening:, + # Content that expresses, incites, or promotes hate based on race, gender, + # ethnicity, religion, nationality, sexual orientation, disability status, or + # caste. Hateful content aimed at non-protected groups (e.g., chess players) is + # harassment. hate:, + # Hateful content that also includes violence or serious harm towards the targeted + # group based on race, gender, ethnicity, religion, nationality, sexual + # orientation, disability status, or caste. hate_threatening:, + # Content that includes instructions or advice that facilitate the planning or + # execution of wrongdoing, or that gives advice or instruction on how to commit + # illicit acts. For example, "how to shoplift" would fit this category. illicit:, + # Content that includes instructions or advice that facilitate the planning or + # execution of wrongdoing that also includes violence, or that gives advice or + # instruction on the procurement of any weapon. illicit_violent:, + # Content that promotes, encourages, or depicts acts of self-harm, such as + # suicide, cutting, and eating disorders. self_harm:, + # Content that encourages performing acts of self-harm, such as suicide, cutting, + # and eating disorders, or that gives instructions or advice on how to commit such + # acts. self_harm_instructions:, + # Content where the speaker expresses that they are engaging or intend to engage + # in acts of self-harm, such as suicide, cutting, and eating disorders. self_harm_intent:, + # Content meant to arouse sexual excitement, such as the description of sexual + # activity, or that promotes sexual services (excluding sex education and + # wellness). sexual:, + # Sexual content that includes an individual who is under 18 years old. sexual_minors:, + # Content that depicts death, violence, or physical injury. violence:, + # Content that depicts death, violence, or physical injury in graphic detail. violence_graphic: ); end sig do @@ -265,18 +303,31 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The applied input type(s) for the category 'harassment'. harassment:, + # The applied input type(s) for the category 'harassment/threatening'. harassment_threatening:, + # The applied input type(s) for the category 'hate'. hate:, + # The applied input type(s) for the category 'hate/threatening'. hate_threatening:, + # The applied input type(s) for the category 'illicit'. illicit:, + # The applied input type(s) for the category 'illicit/violent'. illicit_violent:, + # The applied input type(s) for the category 'self-harm'. self_harm:, + # The applied input type(s) for the category 'self-harm/instructions'. self_harm_instructions:, + # The applied input type(s) for the category 'self-harm/intent'. self_harm_intent:, + # The applied input type(s) for the category 'sexual'. sexual:, + # The applied input type(s) for the category 'sexual/minors'. sexual_minors:, + # The applied input type(s) for the category 'violence'. violence:, + # The applied input type(s) for the category 'violence/graphic'. violence_graphic: ); end sig do @@ -582,18 +633,31 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The score for the category 'harassment'. harassment:, + # The score for the category 'harassment/threatening'. harassment_threatening:, + # The score for the category 'hate'. hate:, + # The score for the category 'hate/threatening'. hate_threatening:, + # The score for the category 'illicit'. illicit:, + # The score for the category 'illicit/violent'. illicit_violent:, + # The score for the category 'self-harm'. self_harm:, + # The score for the category 'self-harm/instructions'. self_harm_instructions:, + # The score for the category 'self-harm/intent'. self_harm_intent:, + # The score for the category 'sexual'. sexual:, + # The score for the category 'sexual/minors'. sexual_minors:, + # The score for the category 'violence'. violence:, + # The score for the category 'violence/graphic'. violence_graphic: ); end sig do diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 707f700c..527e104f 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -47,8 +47,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(input:, model: nil, request_options: {}); end - + def self.new( + # Input (or inputs) to classify. Can be a single string, an array of strings, or + # an array of multi-modal input objects similar to other models. + input:, + # The content moderation model you would like to use. Learn more in + # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and + # learn about available models + # [here](https://platform.openai.com/docs/models#moderation). + model: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/lib/openai/models/moderation_create_response.rbi index 34f57350..7e78a995 100644 --- a/rbi/lib/openai/models/moderation_create_response.rbi +++ b/rbi/lib/openai/models/moderation_create_response.rbi @@ -24,8 +24,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, model:, results:); end - + def self.new( + # The unique identifier for the moderation request. + id:, + # The model used to generate the moderation results. + model:, + # A list of moderation objects. + results: + ); end sig { override.returns({id: String, model: String, results: T::Array[OpenAI::Models::Moderation]}) } def to_hash; end end diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/lib/openai/models/moderation_image_url_input.rbi index fd241d15..091d5263 100644 --- a/rbi/lib/openai/models/moderation_image_url_input.rbi +++ b/rbi/lib/openai/models/moderation_image_url_input.rbi @@ -24,8 +24,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(image_url:, type: :image_url); end - + def self.new( + # Contains either an image URL or a data URL for a base64 encoded image. + image_url:, + # Always `image_url`. + type: :image_url + ); end sig { override.returns({image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, type: Symbol}) } def to_hash; end @@ -36,8 +40,10 @@ module OpenAI # Contains either an image URL or a data URL for a base64 encoded image. sig { params(url: String).returns(T.attached_class) } - def self.new(url:); end - + def self.new( + # Either a URL of the image or the base64 encoded image data. + url: + ); end sig { override.returns({url: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/moderation_text_input.rbi b/rbi/lib/openai/models/moderation_text_input.rbi index cdb7c967..462e88ed 100644 --- a/rbi/lib/openai/models/moderation_text_input.rbi +++ b/rbi/lib/openai/models/moderation_text_input.rbi @@ -13,8 +13,12 @@ module OpenAI # An object describing text to classify. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :text); end - + def self.new( + # A string of text to classify. + text:, + # Always `text`. + type: :text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi index 0d6d06b0..5af14eda 100644 --- a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi @@ -11,8 +11,10 @@ module OpenAI # because the file was indexed before the `chunking_strategy` concept was # introduced in the API. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :other); end - + def self.new( + # Always `other`. + type: :other + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index 1223bf2d..a54b00f4 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -38,8 +38,25 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(effort: nil, generate_summary: nil, summary: nil); end - + def self.new( + # **o-series models only** + # + # Constrains effort on reasoning for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently + # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can + # result in faster responses and fewer tokens used on reasoning in a response. + effort: nil, + # **Deprecated:** use `summary` instead. + # + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. + generate_summary: nil, + # A summary of the reasoning performed by the model. This can be useful for + # debugging and understanding the model's reasoning process. One of `auto`, + # `concise`, or `detailed`. + summary: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/lib/openai/models/response_format_json_object.rbi index caf63a9c..cb28d04a 100644 --- a/rbi/lib/openai/models/response_format_json_object.rbi +++ b/rbi/lib/openai/models/response_format_json_object.rbi @@ -11,8 +11,10 @@ module OpenAI # `json_schema` is recommended for models that support it. Note that the model # will not generate JSON without a system or user message instructing it to do so. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :json_object); end - + def self.new( + # The type of response format being defined. Always `json_object`. + type: :json_object + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/lib/openai/models/response_format_json_schema.rbi index 26bae2de..db50b7cd 100644 --- a/rbi/lib/openai/models/response_format_json_schema.rbi +++ b/rbi/lib/openai/models/response_format_json_schema.rbi @@ -29,8 +29,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(json_schema:, type: :json_schema); end - + def self.new( + # Structured Outputs configuration options, including a JSON Schema. + json_schema:, + # The type of response format being defined. Always `json_schema`. + type: :json_schema + ); end sig { override.returns({json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, type: Symbol}) } def to_hash; end @@ -74,8 +78,23 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, description: nil, schema: nil, strict: nil); end - + def self.new( + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + # and dashes, with a maximum length of 64. + name:, + # A description of what the response format is for, used by the model to determine + # how to respond in the format. + description: nil, + # The schema for the response format, described as a JSON Schema object. Learn how + # to build JSON schemas [here](https://json-schema.org/). + schema: nil, + # Whether to enable strict schema adherence when generating the output. If set to + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + strict: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/response_format_text.rbi b/rbi/lib/openai/models/response_format_text.rbi index ec7b0d03..f47d8756 100644 --- a/rbi/lib/openai/models/response_format_text.rbi +++ b/rbi/lib/openai/models/response_format_text.rbi @@ -9,8 +9,10 @@ module OpenAI # Default response format. Used to generate text responses. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :text); end - + def self.new( + # The type of response format being defined. Always `text`. + type: :text + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index 50bedd4b..0223a583 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -31,8 +31,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(display_height:, display_width:, environment:, type: :computer_use_preview); end - + def self.new( + # The height of the computer display. + display_height:, + # The width of the computer display. + display_width:, + # The type of computer environment to control. + environment:, + # The type of the computer use tool. Always `computer_use_preview`. + type: :computer_use_preview + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 8efd6fd4..698ad76f 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -57,8 +57,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, type: nil); end - + def self.new( + # Text, image, or audio input to the model, used to generate a response. Can also + # contain previous assistant responses. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index 6d579cea..12834382 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -58,14 +58,18 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The IDs of the vector stores to search. vector_store_ids:, + # A filter to apply based on file attributes. filters: nil, + # The maximum number of results to return. This number should be between 1 and 50 + # inclusive. max_num_results: nil, + # Ranking options for search. ranking_options: nil, + # The type of the file search tool. Always `file_search`. type: :file_search - ) - end - + ); end sig do override .returns( @@ -113,8 +117,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(ranker: nil, score_threshold: nil); end - + def self.new( + # The ranker to use for the file search. + ranker: nil, + # The score threshold for the file search, a number between 0 and 1. Numbers + # closer to 1 will attempt to return only the most relevant results, but may + # return fewer results. + score_threshold: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/lib/openai/models/responses/function_tool.rbi index 5d1008c2..5fa541bd 100644 --- a/rbi/lib/openai/models/responses/function_tool.rbi +++ b/rbi/lib/openai/models/responses/function_tool.rbi @@ -38,8 +38,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, parameters:, strict:, description: nil, type: :function); end - + def self.new( + # The name of the function to call. + name:, + # A JSON schema object describing the parameters of the function. + parameters:, + # Whether to enforce strict parameter validation. Default `true`. + strict:, + # A description of the function. Used by the model to determine whether or not to + # call the function. + description: nil, + # The type of the function tool. Always `function`. + type: :function + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 9699e037..3ea133cb 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -58,8 +58,24 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}); end - + def self.new( + # An item ID to list items after, used in pagination. + after: nil, + # An item ID to list items before, used in pagination. + before: nil, + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + include: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # The order to return the input items in. Default is `asc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 5e5dacba..189001b8 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -297,28 +297,130 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Unique identifier for this Response. id:, + # Unix timestamp (in seconds) of when this Response was created. created_at:, + # An error object returned when the model fails to generate a Response. error:, + # Details about why the response is incomplete. incomplete_details:, + # Inserts a system (or developer) message as the first item in the model's + # context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. instructions:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # An array of content items generated by the model. + # + # - The length and order of items in the `output` array is dependent on the + # model's response. + # - Rather than accessing the first item in the `output` array and assuming it's + # an `assistant` message with the content generated by the model, you might + # consider using the `output_text` property where supported in SDKs. output:, + # Whether to allow the model to run tool calls in parallel. parallel_tool_calls:, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature:, + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. tool_choice:, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). tools:, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p:, + # An upper bound for the number of tokens that can be generated for a response, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # The unique ID of the previous response to the model. Use this to create + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # The status of the response generation. One of `completed`, `failed`, + # `in_progress`, or `incomplete`. status: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. truncation: nil, + # Represents token usage details including input tokens, output tokens, a + # breakdown of output tokens, and the total tokens used. usage: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # The object type of this resource - always set to `response`. object: :response ); end sig do @@ -390,8 +492,10 @@ module OpenAI params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) .returns(T.attached_class) end - def self.new(reason: nil); end - + def self.new( + # The reason why the response is incomplete. + reason: nil + ); end sig { override.returns({reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol}) } def to_hash; end diff --git a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi index 386c8365..b2e4f0e0 100644 --- a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_delta_event.rbi @@ -14,8 +14,12 @@ module OpenAI # Emitted when there is a partial audio response. sig { params(delta: String, type: Symbol).returns(T.attached_class) } - def self.new(delta:, type: :"response.audio.delta"); end - + def self.new( + # A chunk of Base64 encoded response audio bytes. + delta:, + # The type of the event. Always `response.audio.delta`. + type: :"response.audio.delta" + ); end sig { override.returns({delta: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_audio_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_done_event.rbi index 40f5901f..9df7735c 100644 --- a/rbi/lib/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_done_event.rbi @@ -10,8 +10,10 @@ module OpenAI # Emitted when the audio response is complete. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :"response.audio.done"); end - + def self.new( + # The type of the event. Always `response.audio.done`. + type: :"response.audio.done" + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi index c121b3b9..7ea46461 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -14,8 +14,12 @@ module OpenAI # Emitted when there is a partial transcript of audio. sig { params(delta: String, type: Symbol).returns(T.attached_class) } - def self.new(delta:, type: :"response.audio.transcript.delta"); end - + def self.new( + # The partial transcript of the audio response. + delta:, + # The type of the event. Always `response.audio.transcript.delta`. + type: :"response.audio.transcript.delta" + ); end sig { override.returns({delta: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi index 983f0e0a..ec7e9f09 100644 --- a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi @@ -10,8 +10,10 @@ module OpenAI # Emitted when the full audio transcript is completed. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :"response.audio.transcript.done"); end - + def self.new( + # The type of the event. Always `response.audio.transcript.done`. + type: :"response.audio.transcript.done" + ); end sig { override.returns({type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index e19943db..4c36e297 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a partial code snippet is added by the code interpreter. sig { params(delta: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(delta:, output_index:, type: :"response.code_interpreter_call.code.delta"); end - + def self.new( + # The partial code snippet added by the code interpreter. + delta:, + # The index of the output item that the code interpreter call is in progress. + output_index:, + # The type of the event. Always `response.code_interpreter_call.code.delta`. + type: :"response.code_interpreter_call.code.delta" + ); end sig { override.returns({delta: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 5d6af463..bc7dd462 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when code snippet output is finalized by the code interpreter. sig { params(code: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(code:, output_index:, type: :"response.code_interpreter_call.code.done"); end - + def self.new( + # The final code snippet output by the code interpreter. + code:, + # The index of the output item that the code interpreter call is in progress. + output_index:, + # The type of the event. Always `response.code_interpreter_call.code.done`. + type: :"response.code_interpreter_call.code.done" + ); end sig { override.returns({code: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 28acb52b..05dc4800 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -34,12 +34,13 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A tool call to run code. code_interpreter_call:, + # The index of the output item that the code interpreter call is in progress. output_index:, + # The type of the event. Always `response.code_interpreter_call.completed`. type: :"response.code_interpreter_call.completed" - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index c0a220d0..2627f40b 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -34,12 +34,13 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A tool call to run code. code_interpreter_call:, + # The index of the output item that the code interpreter call is in progress. output_index:, + # The type of the event. Always `response.code_interpreter_call.in_progress`. type: :"response.code_interpreter_call.in_progress" - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index e0fe0abb..ee7e0050 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -34,12 +34,13 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A tool call to run code. code_interpreter_call:, + # The index of the output item that the code interpreter call is in progress. output_index:, + # The type of the event. Always `response.code_interpreter_call.interpreting`. type: :"response.code_interpreter_call.interpreting" - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 9c9e05b1..2568aca1 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -50,8 +50,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, code:, results:, status:, type: :code_interpreter_call); end - + def self.new( + # The unique ID of the code interpreter tool call. + id:, + # The code to run. + code:, + # The results of the code interpreter tool call. + results:, + # The status of the code interpreter tool call. + status:, + # The type of the code interpreter tool call. Always `code_interpreter_call`. + type: :code_interpreter_call + ); end sig do override .returns( @@ -86,8 +96,12 @@ module OpenAI # The output of a code interpreter tool call that is text. sig { params(logs: String, type: Symbol).returns(T.attached_class) } - def self.new(logs:, type: :logs); end - + def self.new( + # The logs of the code interpreter tool call. + logs:, + # The type of the code interpreter text output. Always `logs`. + type: :logs + ); end sig { override.returns({logs: String, type: Symbol}) } def to_hash; end end @@ -113,8 +127,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(files:, type: :files); end - + def self.new( + files:, + # The type of the code interpreter file output. Always `files`. + type: :files + ); end sig do override .returns( @@ -136,8 +153,12 @@ module OpenAI attr_accessor :mime_type sig { params(file_id: String, mime_type: String).returns(T.attached_class) } - def self.new(file_id:, mime_type:); end - + def self.new( + # The ID of the file. + file_id:, + # The MIME type of the file. + mime_type: + ); end sig { override.returns({file_id: String, mime_type: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/lib/openai/models/responses/response_completed_event.rbi index 26105306..bf715172 100644 --- a/rbi/lib/openai/models/responses/response_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_completed_event.rbi @@ -20,8 +20,12 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.completed"); end - + def self.new( + # Properties of the completed response. + response:, + # The type of the event. Always `response.completed`. + type: :"response.completed" + ); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index d899a4bc..37023dcb 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -68,8 +68,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, action:, call_id:, pending_safety_checks:, status:, type:); end - + def self.new( + # The unique ID of the computer call. + id:, + # A click action. + action:, + # An identifier used when responding to the tool call with output. + call_id:, + # The pending safety checks for the computer call. + pending_safety_checks:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status:, + # The type of the computer call. Always `computer_call`. + type: + ); end sig do override .returns( @@ -128,8 +141,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(button:, x:, y_:, type: :click); end - + def self.new( + # Indicates which mouse button was pressed during the click. One of `left`, + # `right`, `wheel`, `back`, or `forward`. + button:, + # The x-coordinate where the click occurred. + x:, + # The y-coordinate where the click occurred. + y_:, + # Specifies the event type. For a click action, this property is always set to + # `click`. + type: :click + ); end sig do override .returns( @@ -189,8 +212,15 @@ module OpenAI # A double click action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } - def self.new(x:, y_:, type: :double_click); end - + def self.new( + # The x-coordinate where the double click occurred. + x:, + # The y-coordinate where the double click occurred. + y_:, + # Specifies the event type. For a double click action, this property is always set + # to `double_click`. + type: :double_click + ); end sig { override.returns({type: Symbol, x: Integer, y_: Integer}) } def to_hash; end end @@ -221,8 +251,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(path:, type: :drag); end - + def self.new( + # An array of coordinates representing the path of the drag action. Coordinates + # will appear as an array of objects, eg + # + # ``` + # [ + # { x: 100, y: 200 }, + # { x: 200, y: 300 } + # ] + # ``` + path:, + # Specifies the event type. For a drag action, this property is always set to + # `drag`. + type: :drag + ); end sig do override .returns( @@ -242,8 +285,12 @@ module OpenAI # A series of x/y coordinate pairs in the drag path. sig { params(x: Integer, y_: Integer).returns(T.attached_class) } - def self.new(x:, y_:); end - + def self.new( + # The x-coordinate. + x:, + # The y-coordinate. + y_: + ); end sig { override.returns({x: Integer, y_: Integer}) } def to_hash; end end @@ -262,8 +309,14 @@ module OpenAI # A collection of keypresses the model would like to perform. sig { params(keys: T::Array[String], type: Symbol).returns(T.attached_class) } - def self.new(keys:, type: :keypress); end - + def self.new( + # The combination of keys the model is requesting to be pressed. This is an array + # of strings, each representing a key. + keys:, + # Specifies the event type. For a keypress action, this property is always set to + # `keypress`. + type: :keypress + ); end sig { override.returns({keys: T::Array[String], type: Symbol}) } def to_hash; end end @@ -284,8 +337,15 @@ module OpenAI # A mouse move action. sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } - def self.new(x:, y_:, type: :move); end - + def self.new( + # The x-coordinate to move to. + x:, + # The y-coordinate to move to. + y_:, + # Specifies the event type. For a move action, this property is always set to + # `move`. + type: :move + ); end sig { override.returns({type: Symbol, x: Integer, y_: Integer}) } def to_hash; end end @@ -298,8 +358,11 @@ module OpenAI # A screenshot action. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :screenshot); end - + def self.new( + # Specifies the event type. For a screenshot action, this property is always set + # to `screenshot`. + type: :screenshot + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -331,8 +394,19 @@ module OpenAI params(scroll_x: Integer, scroll_y: Integer, x: Integer, y_: Integer, type: Symbol) .returns(T.attached_class) end - def self.new(scroll_x:, scroll_y:, x:, y_:, type: :scroll); end - + def self.new( + # The horizontal scroll distance. + scroll_x:, + # The vertical scroll distance. + scroll_y:, + # The x-coordinate where the scroll occurred. + x:, + # The y-coordinate where the scroll occurred. + y_:, + # Specifies the event type. For a scroll action, this property is always set to + # `scroll`. + type: :scroll + ); end sig do override.returns({scroll_x: Integer, scroll_y: Integer, type: Symbol, x: Integer, y_: Integer}) end @@ -351,8 +425,13 @@ module OpenAI # An action to type in text. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :type); end - + def self.new( + # The text to type. + text:, + # Specifies the event type. For a type action, this property is always set to + # `type`. + type: :type + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end @@ -365,8 +444,11 @@ module OpenAI # A wait action. sig { params(type: Symbol).returns(T.attached_class) } - def self.new(type: :wait); end - + def self.new( + # Specifies the event type. For a wait action, this property is always set to + # `wait`. + type: :wait + ); end sig { override.returns({type: Symbol}) } def to_hash; end end @@ -395,8 +477,14 @@ module OpenAI # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:); end - + def self.new( + # The ID of the pending safety check. + id:, + # The type of the pending safety check. + code:, + # Details about the pending safety check. + message: + ); end sig { override.returns({id: String, code: String, message: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index cbf376cc..c8e4b2bf 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -77,15 +77,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The unique ID of the computer call tool output. id:, + # The ID of the computer tool call that produced the output. call_id:, + # A computer screenshot image used with the computer use tool. output:, + # The safety checks reported by the API that have been acknowledged by the + # developer. acknowledged_safety_checks: nil, + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. status: nil, + # The type of the computer tool call output. Always `computer_call_output`. type: :computer_call_output - ) - end - + ); end sig do override .returns( @@ -116,8 +122,14 @@ module OpenAI # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:); end - + def self.new( + # The ID of the pending safety check. + id:, + # The type of the pending safety check. + code:, + # Details about the pending safety check. + message: + ); end sig { override.returns({id: String, code: String, message: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index c95e0378..b0d1e196 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -25,8 +25,15 @@ module OpenAI # A computer screenshot image used with the computer use tool. sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } - def self.new(file_id: nil, image_url: nil, type: :computer_screenshot); end - + def self.new( + # The identifier of an uploaded file that contains the screenshot. + file_id: nil, + # The URL of the screenshot image. + image_url: nil, + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. + type: :computer_screenshot + ); end sig { override.returns({type: Symbol, file_id: String, image_url: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi index 5ec11d97..46fa7693 100644 --- a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_added_event.rbi @@ -43,9 +43,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") - end - + def self.new( + # The index of the content part that was added. + content_index:, + # The ID of the output item that the content part was added to. + item_id:, + # The index of the output item that the content part was added to. + output_index:, + # The content part that was added. + part:, + # The type of the event. Always `response.content_part.added`. + type: :"response.content_part.added" + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi index 35915ce9..c1519e4f 100644 --- a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_content_part_done_event.rbi @@ -43,8 +43,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done"); end - + def self.new( + # The index of the content part that is done. + content_index:, + # The ID of the output item that the content part was added to. + item_id:, + # The index of the output item that the content part was added to. + output_index:, + # The content part that is done. + part:, + # The type of the event. Always `response.content_part.done`. + type: :"response.content_part.done" + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 64664281..3dd10986 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -315,23 +315,127 @@ module OpenAI .returns(T.attached_class) end def self.new( + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) input:, + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. model:, + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. include: nil, + # Inserts a system (or developer) message as the first item in the model's + # context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. instructions: nil, + # An upper bound for the number of tokens that can be generated for a response, + # including visible output tokens and + # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, + # The unique ID of the previous response to the model. Use this to create + # multi-turn conversations. Learn more about + # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # Specifies the latency tier to use for processing the request. This parameter is + # relevant for customers subscribed to the scale tier service: + # + # - If set to 'auto', and the Project is Scale tier enabled, the system will + # utilize scale tier credits until they are exhausted. + # - If set to 'auto', and the Project is not Scale tier enabled, the request will + # be processed using the default service tier with a lower uptime SLA and no + # latency guarentee. + # - If set to 'default', the request will be processed using the default service + # tier with a lower uptime SLA and no latency guarentee. + # - If set to 'flex', the request will be processed with the Flex Processing + # service tier. + # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - When not set, the default behavior is 'auto'. + # + # When this parameter is set, the response body will include the `service_tier` + # utilized. service_tier: nil, + # Whether to store the generated model response for later retrieval via API. store: nil, + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will + # make the output more random, while lower values like 0.2 will make it more + # focused and deterministic. We generally recommend altering this or `top_p` but + # not both. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, + # How the model should select which tool (or tools) to use when generating a + # response. See the `tools` parameter to see how to specify which tools the model + # can call. tool_choice: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, + # An alternative to sampling with temperature, called nucleus sampling, where the + # model considers the results of the tokens with top_p probability mass. So 0.1 + # means only the tokens comprising the top 10% probability mass are considered. + # + # We generally recommend altering this or `temperature` but not both. top_p: nil, + # The truncation strategy to use for the model response. + # + # - `auto`: If the context of this response and previous ones exceeds the model's + # context window size, the model will truncate the response to fit the context + # window by dropping input items in the middle of the conversation. + # - `disabled` (default): If a model response will exceed the context window size + # for a model, the request will fail with a 400 error. truncation: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} ); end diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/lib/openai/models/responses/response_created_event.rbi index 58605657..33664af0 100644 --- a/rbi/lib/openai/models/responses/response_created_event.rbi +++ b/rbi/lib/openai/models/responses/response_created_event.rbi @@ -20,8 +20,12 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.created"); end - + def self.new( + # The response that was created. + response:, + # The type of the event. Always `response.created`. + type: :"response.created" + ); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 3beca9b7..796293af 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -17,8 +17,12 @@ module OpenAI params(code: OpenAI::Models::Responses::ResponseError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:); end - + def self.new( + # The error code for the response. + code:, + # A human-readable description of the error. + message: + ); end sig { override.returns({code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String}) } def to_hash; end diff --git a/rbi/lib/openai/models/responses/response_error_event.rbi b/rbi/lib/openai/models/responses/response_error_event.rbi index c81fa537..24a190a8 100644 --- a/rbi/lib/openai/models/responses/response_error_event.rbi +++ b/rbi/lib/openai/models/responses/response_error_event.rbi @@ -25,8 +25,16 @@ module OpenAI params(code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol) .returns(T.attached_class) end - def self.new(code:, message:, param:, type: :error); end - + def self.new( + # The error code. + code:, + # The error message. + message:, + # The error parameter. + param:, + # The type of the event. Always `error`. + type: :error + ); end sig do override.returns({code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol}) end diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/lib/openai/models/responses/response_failed_event.rbi index 7cafac40..ad3251e9 100644 --- a/rbi/lib/openai/models/responses/response_failed_event.rbi +++ b/rbi/lib/openai/models/responses/response_failed_event.rbi @@ -20,8 +20,12 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.failed"); end - + def self.new( + # The response that failed. + response:, + # The type of the event. Always `response.failed`. + type: :"response.failed" + ); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi index 289e0742..7ac06ad1 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a file search call is completed (results found). sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.file_search_call.completed"); end - + def self.new( + # The ID of the output item that the file search call is initiated. + item_id:, + # The index of the output item that the file search call is initiated. + output_index:, + # The type of the event. Always `response.file_search_call.completed`. + type: :"response.file_search_call.completed" + ); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi index 04d00647..99274ded 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a file search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.file_search_call.in_progress"); end - + def self.new( + # The ID of the output item that the file search call is initiated. + item_id:, + # The index of the output item that the file search call is initiated. + output_index:, + # The type of the event. Always `response.file_search_call.in_progress`. + type: :"response.file_search_call.in_progress" + ); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi index a6de609a..605b332b 100644 --- a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a file search is currently searching. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.file_search_call.searching"); end - + def self.new( + # The ID of the output item that the file search call is initiated. + item_id:, + # The index of the output item that the file search call is searching. + output_index:, + # The type of the event. Always `response.file_search_call.searching`. + type: :"response.file_search_call.searching" + ); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index edeca91a..94b16558 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -40,8 +40,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, queries:, status:, results: nil, type: :file_search_call); end - + def self.new( + # The unique ID of the file search tool call. + id:, + # The queries used to search for files. + queries:, + # The status of the file search tool call. One of `in_progress`, `searching`, + # `incomplete` or `failed`, + status:, + # The results of the file search tool call. + results: nil, + # The type of the file search tool call. Always `file_search_call`. + type: :file_search_call + ); end sig do override .returns( @@ -126,8 +137,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil); end - + def self.new( + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes: nil, + # The unique ID of the file. + file_id: nil, + # The name of the file. + filename: nil, + # The relevance score of the file - a value between 0 and 1. + score: nil, + # The text that was retrieved from the file. + text: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi index 25a553e6..1b94b7c2 100644 --- a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi @@ -47,8 +47,25 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(name:, schema:, description: nil, strict: nil, type: :json_schema); end - + def self.new( + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores + # and dashes, with a maximum length of 64. + name:, + # The schema for the response format, described as a JSON Schema object. Learn how + # to build JSON schemas [here](https://json-schema.org/). + schema:, + # A description of what the response format is for, used by the model to determine + # how to respond in the format. + description: nil, + # Whether to enable strict schema adherence when generating the output. If set to + # true, the model will always follow the exact schema defined in the `schema` + # field. Only a subset of JSON Schema is supported when `strict` is `true`. To + # learn more, read the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + strict: nil, + # The type of response format being defined. Always `json_schema`. + type: :json_schema + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi index 11ac7ab7..801e2df6 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -29,8 +29,16 @@ module OpenAI type: Symbol ).returns(T.attached_class) end - def self.new(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta"); end - + def self.new( + # The function-call arguments delta that is added. + delta:, + # The ID of the output item that the function-call arguments delta is added to. + item_id:, + # The index of the output item that the function-call arguments delta is added to. + output_index:, + # The type of the event. Always `response.function_call_arguments.delta`. + type: :"response.function_call_arguments.delta" + ); end sig { override.returns({delta: String, item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi index ebafa372..0886a45d 100644 --- a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -28,8 +28,15 @@ module OpenAI type: Symbol ).returns(T.attached_class) end - def self.new(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done"); end - + def self.new( + # The function-call arguments. + arguments:, + # The ID of the item. + item_id:, + # The index of the output item. + output_index:, + type: :"response.function_call_arguments.done" + ); end sig { override.returns({arguments: String, item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 5f89a8cf..0862724c 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -49,8 +49,21 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call); end - + def self.new( + # A JSON string of the arguments to pass to the function. + arguments:, + # The unique ID of the function tool call generated by the model. + call_id:, + # The name of the function to run. + name:, + # The unique ID of the function tool call. + id: nil, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status: nil, + # The type of the function tool call. Always `function_call`. + type: :function_call + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi index 7ed35c37..cfa76b9c 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi @@ -12,8 +12,10 @@ module OpenAI # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. sig { params(id: String).returns(T.attached_class) } - def self.new(id:); end - + def self.new( + # The unique ID of the function tool call. + id: + ); end sig { override.returns({id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index df902df8..64da7c58 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -38,8 +38,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, call_id:, output:, status: nil, type: :function_call_output); end - + def self.new( + # The unique ID of the function call tool output. + id:, + # The unique ID of the function tool call generated by the model. + call_id:, + # A JSON string of the output of the function tool call. + output:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status: nil, + # The type of the function tool call output. Always `function_call_output`. + type: :function_call_output + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 4c9bd2bd..1c892bf4 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -27,8 +27,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, status:, type: :web_search_call); end - + def self.new( + # The unique ID of the web search tool call. + id:, + # The status of the web search tool call. + status:, + # The type of the web search tool call. Always `web_search_call`. + type: :web_search_call + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_in_progress_event.rbi index 37252dc3..7ef933f7 100644 --- a/rbi/lib/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_in_progress_event.rbi @@ -20,8 +20,12 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.in_progress"); end - + def self.new( + # The response that is in progress. + response:, + # The type of the event. Always `response.in_progress`. + type: :"response.in_progress" + ); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/lib/openai/models/responses/response_incomplete_event.rbi index 12fe7ff5..079d5434 100644 --- a/rbi/lib/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/lib/openai/models/responses/response_incomplete_event.rbi @@ -20,8 +20,12 @@ module OpenAI params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(response:, type: :"response.incomplete"); end - + def self.new( + # The response that was incomplete. + response:, + # The type of the event. Always `response.incomplete`. + type: :"response.incomplete" + ); end sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index ff2d0075..9244a048 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -25,8 +25,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, format_:, type: :input_audio); end - + def self.new( + # Base64-encoded audio data. + data:, + # The format of the audio data. Currently supported formats are `mp3` and `wav`. + format_:, + # The type of the input item. Always `input_audio`. + type: :input_audio + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_input_file.rbi b/rbi/lib/openai/models/responses/response_input_file.rbi index 5d46ed32..74a535c7 100644 --- a/rbi/lib/openai/models/responses/response_input_file.rbi +++ b/rbi/lib/openai/models/responses/response_input_file.rbi @@ -33,8 +33,16 @@ module OpenAI sig do params(file_data: String, file_id: String, filename: String, type: Symbol).returns(T.attached_class) end - def self.new(file_data: nil, file_id: nil, filename: nil, type: :input_file); end - + def self.new( + # The content of the file to be sent to the model. + file_data: nil, + # The ID of the file to be sent to the model. + file_id: nil, + # The name of the file to be sent to the model. + filename: nil, + # The type of the input item. Always `input_file`. + type: :input_file + ); end sig { override.returns({type: Symbol, file_data: String, file_id: String, filename: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index 2cf0bfec..d058b10d 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -33,8 +33,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(detail:, file_id: nil, image_url: nil, type: :input_image); end - + def self.new( + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail:, + # The ID of the file to be sent to the model. + file_id: nil, + # The URL of the image to be sent to the model. A fully qualified URL or base64 + # encoded image in a data URL. + image_url: nil, + # The type of the input item. Always `input_image`. + type: :input_image + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index 7187822f..6595bbdb 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -65,8 +65,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content:, role:, status: nil, type: nil); end - + def self.new( + # A list of one or many input items to the model, containing different content + # types. + content:, + # The role of the message input. One of `user`, `system`, or `developer`. + role:, + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status: nil, + # The type of the message input. Always set to `message`. + type: nil + ); end sig do override .returns( @@ -215,15 +225,21 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The ID of the computer tool call that produced the output. call_id:, + # A computer screenshot image used with the computer use tool. output:, + # The ID of the computer tool call output. id: nil, + # The safety checks reported by the API that have been acknowledged by the + # developer. acknowledged_safety_checks: nil, + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. status: nil, + # The type of the computer tool call output. Always `computer_call_output`. type: :computer_call_output - ) - end - + ); end sig do override .returns( @@ -254,8 +270,14 @@ module OpenAI # A pending safety check for the computer call. sig { params(id: String, code: String, message: String).returns(T.attached_class) } - def self.new(id:, code:, message:); end - + def self.new( + # The ID of the pending safety check. + id:, + # The type of the pending safety check. + code:, + # Details about the pending safety check. + message: + ); end sig { override.returns({id: String, code: String, message: String}) } def to_hash; end end @@ -327,8 +349,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(call_id:, output:, id: nil, status: nil, type: :function_call_output); end - + def self.new( + # The unique ID of the function tool call generated by the model. + call_id:, + # A JSON string of the output of the function tool call. + output:, + # The unique ID of the function tool call output. Populated when this item is + # returned via API. + id: nil, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status: nil, + # The type of the function tool call output. Always `function_call_output`. + type: :function_call_output + ); end sig do override .returns( @@ -381,8 +415,12 @@ module OpenAI # An internal identifier for an item to reference. sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new(id:, type: :item_reference); end - + def self.new( + # The ID of the item to reference. + id:, + # The type of item to reference. Always `item_reference`. + type: :item_reference + ); end sig { override.returns({id: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index dbae6d9b..9c426966 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -59,8 +59,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, content:, role:, status: nil, type: nil); end - + def self.new( + # The unique ID of the message input. + id:, + # A list of one or many input items to the model, containing different content + # types. + content:, + # The role of the message input. One of `user`, `system`, or `developer`. + role:, + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status: nil, + # The type of the message input. Always set to `message`. + type: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_input_text.rbi b/rbi/lib/openai/models/responses/response_input_text.rbi index d3df7cc9..6ecb1f55 100644 --- a/rbi/lib/openai/models/responses/response_input_text.rbi +++ b/rbi/lib/openai/models/responses/response_input_text.rbi @@ -14,8 +14,12 @@ module OpenAI # A text input to the model. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :input_text); end - + def self.new( + # The text input to the model. + text:, + # The type of the input item. Always `input_text`. + type: :input_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/lib/openai/models/responses/response_item_list.rbi index a041edfb..60f9ee2b 100644 --- a/rbi/lib/openai/models/responses/response_item_list.rbi +++ b/rbi/lib/openai/models/responses/response_item_list.rbi @@ -62,8 +62,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, first_id:, has_more:, last_id:, object: :list); end - + def self.new( + # A list of items used to generate this response. + data:, + # The ID of the first item in the list. + first_id:, + # Whether there are more items available. + has_more:, + # The ID of the last item in the list. + last_id:, + # The type of object returned, must be `list`. + object: :list + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_output_audio.rbi b/rbi/lib/openai/models/responses/response_output_audio.rbi index 010d71b6..ccca474a 100644 --- a/rbi/lib/openai/models/responses/response_output_audio.rbi +++ b/rbi/lib/openai/models/responses/response_output_audio.rbi @@ -18,8 +18,14 @@ module OpenAI # An audio output from the model. sig { params(data: String, transcript: String, type: Symbol).returns(T.attached_class) } - def self.new(data:, transcript:, type: :output_audio); end - + def self.new( + # Base64-encoded audio data from the model. + data:, + # The transcript of the audio data from the model. + transcript:, + # The type of the output audio. Always `output_audio`. + type: :output_audio + ); end sig { override.returns({data: String, transcript: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi index 230e21d0..6318ea2f 100644 --- a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_added_event.rbi @@ -44,8 +44,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(item:, output_index:, type: :"response.output_item.added"); end - + def self.new( + # The output item that was added. + item:, + # The index of the output item that was added. + output_index:, + # The type of the event. Always `response.output_item.added`. + type: :"response.output_item.added" + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi index 94747df6..2ec17e88 100644 --- a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_output_item_done_event.rbi @@ -44,8 +44,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(item:, output_index:, type: :"response.output_item.done"); end - + def self.new( + # The output item that was marked done. + item:, + # The index of the output item that was marked done. + output_index:, + # The type of the event. Always `response.output_item.done`. + type: :"response.output_item.done" + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index e9c25f07..67f1cfc9 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -46,8 +46,19 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, content:, status:, role: :assistant, type: :message); end - + def self.new( + # The unique ID of the output message. + id:, + # The content of the output message. + content:, + # The status of the message input. One of `in_progress`, `completed`, or + # `incomplete`. Populated when input items are returned via API. + status:, + # The role of the output message. Always `assistant`. + role: :assistant, + # The type of the output message. Always `message`. + type: :message + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_output_refusal.rbi b/rbi/lib/openai/models/responses/response_output_refusal.rbi index ece03c37..16a800dd 100644 --- a/rbi/lib/openai/models/responses/response_output_refusal.rbi +++ b/rbi/lib/openai/models/responses/response_output_refusal.rbi @@ -14,8 +14,12 @@ module OpenAI # A refusal from the model. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } - def self.new(refusal:, type: :refusal); end - + def self.new( + # The refusal explanationfrom the model. + refusal:, + # The type of the refusal. Always `refusal`. + type: :refusal + ); end sig { override.returns({refusal: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/lib/openai/models/responses/response_output_text.rbi index 4d499086..e0267bd2 100644 --- a/rbi/lib/openai/models/responses/response_output_text.rbi +++ b/rbi/lib/openai/models/responses/response_output_text.rbi @@ -42,8 +42,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(annotations:, text:, type: :output_text); end - + def self.new( + # The annotations of the text output. + annotations:, + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ); end sig do override .returns( @@ -81,8 +87,14 @@ module OpenAI # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_citation); end - + def self.new( + # The ID of the file. + file_id:, + # The index of the file in the list of files. + index:, + # The type of the file citation. Always `file_citation`. + type: :file_citation + ); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } def to_hash; end end @@ -113,8 +125,18 @@ module OpenAI params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) .returns(T.attached_class) end - def self.new(end_index:, start_index:, title:, url:, type: :url_citation); end - + def self.new( + # The index of the last character of the URL citation in the message. + end_index:, + # The index of the first character of the URL citation in the message. + start_index:, + # The title of the web resource. + title:, + # The URL of the web resource. + url:, + # The type of the URL citation. Always `url_citation`. + type: :url_citation + ); end sig do override.returns( { @@ -144,8 +166,14 @@ module OpenAI # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_path); end - + def self.new( + # The ID of the file. + file_id:, + # The index of the file in the list of files. + index:, + # The type of the file path. Always `file_path`. + type: :file_path + ); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index ecd2a673..7b83bf50 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -35,8 +35,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(id:, summary:, status: nil, type: :reasoning); end - + def self.new( + # The unique identifier of the reasoning content. + id:, + # Reasoning text contents. + summary:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status: nil, + # The type of the object. Always `reasoning`. + type: :reasoning + ); end sig do override .returns( @@ -60,8 +69,12 @@ module OpenAI attr_accessor :type sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :summary_text); end - + def self.new( + # A short summary of the reasoning used by the model when generating the response. + text:, + # The type of the object. Always `summary_text`. + type: :summary_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi index 2a08cbdb..22c28d0f 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi @@ -44,14 +44,17 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The ID of the item this summary part is associated with. item_id:, + # The index of the output item this summary part is associated with. output_index:, + # The summary part that was added. part:, + # The index of the summary part within the reasoning summary. summary_index:, + # The type of the event. Always `response.reasoning_summary_part.added`. type: :"response.reasoning_summary_part.added" - ) - end - + ); end sig do override .returns( @@ -77,8 +80,12 @@ module OpenAI # The summary part that was added. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :summary_text); end - + def self.new( + # The text of the summary part. + text:, + # The type of the summary part. Always `summary_text`. + type: :summary_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi index 074c9319..93ac8fbb 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi @@ -44,14 +44,17 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The ID of the item this summary part is associated with. item_id:, + # The index of the output item this summary part is associated with. output_index:, + # The completed summary part. part:, + # The index of the summary part within the reasoning summary. summary_index:, + # The type of the event. Always `response.reasoning_summary_part.done`. type: :"response.reasoning_summary_part.done" - ) - end - + ); end sig do override .returns( @@ -77,8 +80,12 @@ module OpenAI # The completed summary part. sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new(text:, type: :summary_text); end - + def self.new( + # The text of the summary part. + text:, + # The type of the summary part. Always `summary_text`. + type: :summary_text + ); end sig { override.returns({text: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi index e659c052..0b471aed 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi @@ -30,14 +30,17 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The text delta that was added to the summary. delta:, + # The ID of the item this summary text delta is associated with. item_id:, + # The index of the output item this summary text delta is associated with. output_index:, + # The index of the summary part within the reasoning summary. summary_index:, + # The type of the event. Always `response.reasoning_summary_text.delta`. type: :"response.reasoning_summary_text.delta" - ) - end - + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi index dea35ce6..6ff8c695 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi @@ -30,14 +30,17 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The ID of the item this summary text is associated with. item_id:, + # The index of the output item this summary text is associated with. output_index:, + # The index of the summary part within the reasoning summary. summary_index:, + # The full text of the completed reasoning summary. text:, + # The type of the event. Always `response.reasoning_summary_text.done`. type: :"response.reasoning_summary_text.done" - ) - end - + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi index ec2c268f..204d391f 100644 --- a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi @@ -29,8 +29,18 @@ module OpenAI params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) .returns(T.attached_class) end - def self.new(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta"); end - + def self.new( + # The index of the content part that the refusal text is added to. + content_index:, + # The refusal text that is added. + delta:, + # The ID of the output item that the refusal text is added to. + item_id:, + # The index of the output item that the refusal text is added to. + output_index:, + # The type of the event. Always `response.refusal.delta`. + type: :"response.refusal.delta" + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi index 9f7db249..5bc2d764 100644 --- a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_refusal_done_event.rbi @@ -35,8 +35,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done"); end - + def self.new( + # The index of the content part that the refusal text is finalized. + content_index:, + # The ID of the output item that the refusal text is finalized. + item_id:, + # The index of the output item that the refusal text is finalized. + output_index:, + # The refusal text that is finalized. + refusal:, + # The type of the event. Always `response.refusal.done`. + type: :"response.refusal.done" + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/lib/openai/models/responses/response_retrieve_params.rbi index 39ea7e1c..876e71b5 100644 --- a/rbi/lib/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/lib/openai/models/responses/response_retrieve_params.rbi @@ -22,8 +22,12 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(include: nil, request_options: {}); end - + def self.new( + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + include: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi index 16c54b75..91bc1793 100644 --- a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi @@ -54,11 +54,17 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A citation to a file. annotation:, + # The index of the annotation that was added. annotation_index:, + # The index of the content part that the text annotation was added to. content_index:, + # The ID of the output item that the text annotation was added to. item_id:, + # The index of the output item that the text annotation was added to. output_index:, + # The type of the event. Always `response.output_text.annotation.added`. type: :"response.output_text.annotation.added" ); end sig do @@ -99,8 +105,14 @@ module OpenAI # A citation to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_citation); end - + def self.new( + # The ID of the file. + file_id:, + # The index of the file in the list of files. + index:, + # The type of the file citation. Always `file_citation`. + type: :file_citation + ); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } def to_hash; end end @@ -131,8 +143,18 @@ module OpenAI params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) .returns(T.attached_class) end - def self.new(end_index:, start_index:, title:, url:, type: :url_citation); end - + def self.new( + # The index of the last character of the URL citation in the message. + end_index:, + # The index of the first character of the URL citation in the message. + start_index:, + # The title of the web resource. + title:, + # The URL of the web resource. + url:, + # The type of the URL citation. Always `url_citation`. + type: :url_citation + ); end sig do override.returns( { @@ -162,8 +184,14 @@ module OpenAI # A path to a file. sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(file_id:, index:, type: :file_path); end - + def self.new( + # The ID of the file. + file_id:, + # The index of the file in the list of files. + index:, + # The type of the file path. Always `file_path`. + type: :file_path + ); end sig { override.returns({file_id: String, index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/lib/openai/models/responses/response_text_config.rbi index 68a82916..2f1c517e 100644 --- a/rbi/lib/openai/models/responses/response_text_config.rbi +++ b/rbi/lib/openai/models/responses/response_text_config.rbi @@ -59,8 +59,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(format_: nil); end - + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/responses/response_text_delta_event.rbi b/rbi/lib/openai/models/responses/response_text_delta_event.rbi index 7880e8f9..29ea7431 100644 --- a/rbi/lib/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_delta_event.rbi @@ -29,9 +29,18 @@ module OpenAI params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) .returns(T.attached_class) end - def self.new(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") - end - + def self.new( + # The index of the content part that the text delta was added to. + content_index:, + # The text delta that was added. + delta:, + # The ID of the output item that the text delta was added to. + item_id:, + # The index of the output item that the text delta was added to. + output_index:, + # The type of the event. Always `response.output_text.delta`. + type: :"response.output_text.delta" + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/responses/response_text_done_event.rbi b/rbi/lib/openai/models/responses/response_text_done_event.rbi index 52fa9bef..40459549 100644 --- a/rbi/lib/openai/models/responses/response_text_done_event.rbi +++ b/rbi/lib/openai/models/responses/response_text_done_event.rbi @@ -29,8 +29,18 @@ module OpenAI params(content_index: Integer, item_id: String, output_index: Integer, text: String, type: Symbol) .returns(T.attached_class) end - def self.new(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done"); end - + def self.new( + # The index of the content part that the text content is finalized. + content_index:, + # The ID of the output item that the text content is finalized. + item_id:, + # The index of the output item that the text content is finalized. + output_index:, + # The text content that is finalized. + text:, + # The type of the event. Always `response.output_text.done`. + type: :"response.output_text.done" + ); end sig do override .returns({ diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/lib/openai/models/responses/response_usage.rbi index 852d91c4..f435b178 100644 --- a/rbi/lib/openai/models/responses/response_usage.rbi +++ b/rbi/lib/openai/models/responses/response_usage.rbi @@ -53,14 +53,17 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The number of input tokens. input_tokens:, + # A detailed breakdown of the input tokens. input_tokens_details:, + # The number of output tokens. output_tokens:, + # A detailed breakdown of the output tokens. output_tokens_details:, + # The total number of tokens used. total_tokens: - ) - end - + ); end sig do override .returns( @@ -83,8 +86,11 @@ module OpenAI # A detailed breakdown of the input tokens. sig { params(cached_tokens: Integer).returns(T.attached_class) } - def self.new(cached_tokens:); end - + def self.new( + # The number of tokens that were retrieved from the cache. + # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). + cached_tokens: + ); end sig { override.returns({cached_tokens: Integer}) } def to_hash; end end @@ -96,8 +102,10 @@ module OpenAI # A detailed breakdown of the output tokens. sig { params(reasoning_tokens: Integer).returns(T.attached_class) } - def self.new(reasoning_tokens:); end - + def self.new( + # The number of reasoning tokens. + reasoning_tokens: + ); end sig { override.returns({reasoning_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi index fab93564..1f479679 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a web search call is completed. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.web_search_call.completed"); end - + def self.new( + # Unique ID for the output item associated with the web search call. + item_id:, + # The index of the output item that the web search call is associated with. + output_index:, + # The type of the event. Always `response.web_search_call.completed`. + type: :"response.web_search_call.completed" + ); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi index b98665f8..f8c355d5 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a web search call is initiated. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.web_search_call.in_progress"); end - + def self.new( + # Unique ID for the output item associated with the web search call. + item_id:, + # The index of the output item that the web search call is associated with. + output_index:, + # The type of the event. Always `response.web_search_call.in_progress`. + type: :"response.web_search_call.in_progress" + ); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi index 0ad8d67e..0cffcfae 100644 --- a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi @@ -18,8 +18,14 @@ module OpenAI # Emitted when a web search call is executing. sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } - def self.new(item_id:, output_index:, type: :"response.web_search_call.searching"); end - + def self.new( + # Unique ID for the output item associated with the web search call. + item_id:, + # The index of the output item that the web search call is associated with. + output_index:, + # The type of the event. Always `response.web_search_call.searching`. + type: :"response.web_search_call.searching" + ); end sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/tool_choice_function.rbi b/rbi/lib/openai/models/responses/tool_choice_function.rbi index d0753332..98de2eb9 100644 --- a/rbi/lib/openai/models/responses/tool_choice_function.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_function.rbi @@ -14,8 +14,12 @@ module OpenAI # Use this option to force the model to call a specific function. sig { params(name: String, type: Symbol).returns(T.attached_class) } - def self.new(name:, type: :function); end - + def self.new( + # The name of the function to call. + name:, + # For function calling, the type is always `function`. + type: :function + ); end sig { override.returns({name: String, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index 1b8ed0b6..943a8c1f 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -18,8 +18,17 @@ module OpenAI # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } - def self.new(type:); end - + def self.new( + # The type of hosted tool the model should to use. Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # + # Allowed values are: + # + # - `file_search` + # - `web_search_preview` + # - `computer_use_preview` + type: + ); end sig { override.returns({type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol}) } def to_hash; end diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 9b23c93c..fa6955fd 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -41,8 +41,17 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(type:, search_context_size: nil, user_location: nil); end - + def self.new( + # The type of the web search tool. One of: + # + # - `web_search_preview` + # - `web_search_preview_2025_03_11` + type:, + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + search_context_size: nil, + user_location: nil + ); end sig do override .returns( @@ -130,8 +139,20 @@ module OpenAI params(city: String, country: String, region: String, timezone: String, type: Symbol) .returns(T.attached_class) end - def self.new(city: nil, country: nil, region: nil, timezone: nil, type: :approximate); end - + def self.new( + # Free text input for the city of the user, e.g. `San Francisco`. + city: nil, + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. + country: nil, + # Free text input for the region of the user, e.g. `California`. + region: nil, + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. + timezone: nil, + # The type of location approximation. Always `approximate`. + type: :approximate + ); end sig do override.returns({type: Symbol, city: String, country: String, region: String, timezone: String}) end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/lib/openai/models/static_file_chunking_strategy.rbi index ce9bb8d5..ae5b7298 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy.rbi @@ -15,8 +15,15 @@ module OpenAI attr_accessor :max_chunk_size_tokens sig { params(chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer).returns(T.attached_class) } - def self.new(chunk_overlap_tokens:, max_chunk_size_tokens:); end - + def self.new( + # The number of tokens that overlap between chunks. The default value is `400`. + # + # Note that the overlap must not exceed half of `max_chunk_size_tokens`. + chunk_overlap_tokens:, + # The maximum number of tokens in each chunk. The default value is `800`. The + # minimum value is `100` and the maximum value is `4096`. + max_chunk_size_tokens: + ); end sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi index a5f3133a..2a978597 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi @@ -17,8 +17,11 @@ module OpenAI params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(static:, type: :static); end - + def self.new( + static:, + # Always `static`. + type: :static + ); end sig { override.returns({static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi index 49f61a35..ba278994 100644 --- a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi @@ -18,8 +18,11 @@ module OpenAI params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) .returns(T.attached_class) end - def self.new(static:, type: :static); end - + def self.new( + static:, + # Always `static`. + type: :static + ); end sig { override.returns({static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol}) } def to_hash; end end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 083dbd10..f0483771 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -60,18 +60,27 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The Upload unique identifier, which can be referenced in API endpoints. id:, + # The intended number of bytes to be uploaded. bytes:, + # The Unix timestamp (in seconds) for when the Upload was created. created_at:, + # The Unix timestamp (in seconds) for when the Upload will expire. expires_at:, + # The name of the file to be uploaded. filename:, + # The intended purpose of the file. + # [Please refer here](https://platform.openai.com/docs/api-reference/files/object#files/object-purpose) + # for acceptable values. purpose:, + # The status of the Upload. status:, + # The `File` object represents a document that has been uploaded to OpenAI. file: nil, + # The object type, which is always "upload". object: :upload - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/lib/openai/models/upload_complete_params.rbi index 344ab351..71c0c76b 100644 --- a/rbi/lib/openai/models/upload_complete_params.rbi +++ b/rbi/lib/openai/models/upload_complete_params.rbi @@ -26,8 +26,14 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(part_ids:, md5: nil, request_options: {}); end - + def self.new( + # The ordered list of Part IDs. + part_ids:, + # The optional md5 checksum for the file contents to verify if the bytes uploaded + # matches what you expect. + md5: nil, + request_options: {} + ); end sig do override.returns({part_ids: T::Array[String], md5: String, request_options: OpenAI::RequestOptions}) end diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/lib/openai/models/upload_create_params.rbi index 74c1e762..cd69d21c 100644 --- a/rbi/lib/openai/models/upload_create_params.rbi +++ b/rbi/lib/openai/models/upload_create_params.rbi @@ -38,8 +38,23 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(bytes:, filename:, mime_type:, purpose:, request_options: {}); end - + def self.new( + # The number of bytes in the file you are uploading. + bytes:, + # The name of the file to upload. + filename:, + # The MIME type of the file. + # + # This must fall within the supported MIME types for your file purpose. See the + # supported MIME types for assistants and vision. + mime_type:, + # The intended purpose of the uploaded file. + # + # See the + # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). + purpose:, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index 3915001e..1a62a478 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -18,8 +18,11 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(data:, request_options: {}); end - + def self.new( + # The chunk of bytes for this Part. + data:, + request_options: {} + ); end sig { override.returns({data: T.any(Pathname, StringIO), request_options: OpenAI::RequestOptions}) } def to_hash; end end diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/lib/openai/models/uploads/upload_part.rbi index a6d037e0..17a8849b 100644 --- a/rbi/lib/openai/models/uploads/upload_part.rbi +++ b/rbi/lib/openai/models/uploads/upload_part.rbi @@ -24,8 +24,16 @@ module OpenAI sig do params(id: String, created_at: Integer, upload_id: String, object: Symbol).returns(T.attached_class) end - def self.new(id:, created_at:, upload_id:, object: :"upload.part"); end - + def self.new( + # The upload Part unique identifier, which can be referenced in API endpoints. + id:, + # The Unix timestamp (in seconds) for when the Part was created. + created_at:, + # The ID of the Upload object that this Part was added to. + upload_id:, + # The object type, which is always `upload.part`. + object: :"upload.part" + ); end sig { override.returns({id: String, created_at: Integer, object: Symbol, upload_id: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index dd704e90..eefaf648 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -78,16 +78,33 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier, which can be referenced in API endpoints. id:, + # The Unix timestamp (in seconds) for when the vector store was created. created_at:, file_counts:, + # The Unix timestamp (in seconds) for when the vector store was last active. last_active_at:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata:, + # The name of the vector store. name:, + # The status of the vector store, which can be either `expired`, `in_progress`, or + # `completed`. A status of `completed` indicates that the vector store is ready + # for use. status:, + # The total number of bytes used by the files in the vector store. usage_bytes:, + # The expiration policy for a vector store. expires_after: nil, + # The Unix timestamp (in seconds) for when the vector store will expire. expires_at: nil, + # The object type, which is always `vector_store`. object: :vector_store ); end sig do @@ -141,8 +158,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(cancelled:, completed:, failed:, in_progress:, total:); end - + def self.new( + # The number of files that were cancelled. + cancelled:, + # The number of files that have been successfully processed. + completed:, + # The number of files that have failed to process. + failed:, + # The number of files that are currently being processed. + in_progress:, + # The total number of files. + total: + ); end sig do override .returns({ @@ -185,8 +212,13 @@ module OpenAI # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new(days:, anchor: :last_active_at); end - + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ); end sig { override.returns({anchor: Symbol, days: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/lib/openai/models/vector_store_create_params.rbi index c6256c21..ea4b49c5 100644 --- a/rbi/lib/openai/models/vector_store_create_params.rbi +++ b/rbi/lib/openai/models/vector_store_create_params.rbi @@ -85,15 +85,26 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, + # The expiration policy for a vector store. expires_after: nil, + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # the vector store should use. Useful for tools like `file_search` that can access + # files. file_ids: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. metadata: nil, + # The name of the vector store. name: nil, request_options: {} - ) - end - + ); end sig do override .returns( @@ -124,8 +135,13 @@ module OpenAI # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new(days:, anchor: :last_active_at); end - + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ); end sig { override.returns({anchor: Symbol, days: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index 802bf5c5..5a6218c2 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -52,8 +52,25 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, limit: nil, order: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index 47525fb1..e187c6b1 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -61,15 +61,19 @@ module OpenAI .returns(T.attached_class) end def self.new( + # A query string for a search query:, + # A filter to apply based on file attributes. filters: nil, + # The maximum number of results to return. This number should be between 1 and 50 + # inclusive. max_num_results: nil, + # Ranking options for search. ranking_options: nil, + # Whether to rewrite the natural language query for vector search. rewrite_query: nil, request_options: {} - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index fdfceacf..26ac1ca1 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -37,8 +37,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(attributes:, content:, file_id:, filename:, score:); end - + def self.new( + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes:, + # Content chunks from the file. + content:, + # The ID of the vector store file. + file_id:, + # The name of the vector store file. + filename:, + # The similarity score for the result. + score: + ); end sig do override .returns( @@ -73,8 +87,12 @@ module OpenAI params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol) .returns(T.attached_class) end - def self.new(text:, type:); end - + def self.new( + # The text content returned from search. + text:, + # The type of content. + type: + ); end sig do override .returns({text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol}) diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/lib/openai/models/vector_store_update_params.rbi index b40311aa..5955b4ed 100644 --- a/rbi/lib/openai/models/vector_store_update_params.rbi +++ b/rbi/lib/openai/models/vector_store_update_params.rbi @@ -40,8 +40,20 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(expires_after: nil, metadata: nil, name: nil, request_options: {}); end - + def self.new( + # The expiration policy for a vector store. + expires_after: nil, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The name of the vector store. + name: nil, + request_options: {} + ); end sig do override .returns( @@ -67,8 +79,13 @@ module OpenAI # The expiration policy for a vector store. sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new(days:, anchor: :last_active_at); end - + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ); end sig { override.returns({anchor: Symbol, days: Integer}) } def to_hash; end end diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi index f64e4470..9fbb2a61 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi @@ -60,8 +60,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}); end - + def self.new( + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that + # the vector store should use. Useful for tools like `file_search` that can access + # files. + file_ids:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes: nil, + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. + chunking_strategy: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 041e3c17..d5544112 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -67,15 +67,26 @@ module OpenAI end def self.new( vector_store_id:, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. before: nil, + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. filter: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. order: nil, request_options: {} - ) - end - + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_stores/file_content_response.rbi b/rbi/lib/openai/models/vector_stores/file_content_response.rbi index d7f8b8d3..658c26b7 100644 --- a/rbi/lib/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/lib/openai/models/vector_stores/file_content_response.rbi @@ -19,8 +19,12 @@ module OpenAI attr_writer :type sig { params(text: String, type: String).returns(T.attached_class) } - def self.new(text: nil, type: nil); end - + def self.new( + # The text content + text: nil, + # The content type (currently only `"text"`) + type: nil + ); end sig { override.returns({text: String, type: String}) } def to_hash; end end diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/lib/openai/models/vector_stores/file_create_params.rbi index 774e6c2e..17f31378 100644 --- a/rbi/lib/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_create_params.rbi @@ -60,8 +60,22 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}); end - + def self.new( + # A [File](https://platform.openai.com/docs/api-reference/files) ID that the + # vector store should use. Useful for tools like `file_search` that can access + # files. + file_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes: nil, + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # strategy. Only applicable if `file_ids` is non-empty. + chunking_strategy: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 0ef2cb3a..5e252bbb 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -61,8 +61,27 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}); end - + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A cursor for use in pagination. `before` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # starting with obj_foo, your subsequent call can include before=obj_foo in order + # to fetch the previous page of the list. + before: nil, + # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + filter: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/lib/openai/models/vector_stores/file_update_params.rbi index 41edb9ab..1d18d667 100644 --- a/rbi/lib/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_update_params.rbi @@ -26,8 +26,16 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(vector_store_id:, attributes:, request_options: {}); end - + def self.new( + vector_store_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. + attributes:, + request_options: {} + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 72356d37..168da5d5 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -97,14 +97,34 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier, which can be referenced in API endpoints. id:, + # The Unix timestamp (in seconds) for when the vector store file was created. created_at:, + # The last error associated with this vector store file. Will be `null` if there + # are no errors. last_error:, + # The status of the vector store file, which can be either `in_progress`, + # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the + # vector store file is ready for use. status:, + # The total vector store usage in bytes. Note that this may be different from the + # original file size. usage_bytes:, + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. vector_store_id:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters, booleans, or numbers. attributes: nil, + # The strategy used to chunk the file. chunking_strategy: nil, + # The object type, which is always `vector_store.file`. object: :"vector_store.file" ); end sig do @@ -140,8 +160,12 @@ module OpenAI params(code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, message: String) .returns(T.attached_class) end - def self.new(code:, message:); end - + def self.new( + # One of `server_error` or `rate_limit_exceeded`. + code:, + # A human-readable description of the error. + message: + ); end sig do override .returns( diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index 4b1e6a60..18461980 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -53,15 +53,23 @@ module OpenAI .returns(T.attached_class) end def self.new( + # The identifier, which can be referenced in API endpoints. id:, + # The Unix timestamp (in seconds) for when the vector store files batch was + # created. created_at:, file_counts:, + # The status of the vector store files batch, which can be either `in_progress`, + # `completed`, `cancelled` or `failed`. status:, + # The ID of the + # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) + # that the [File](https://platform.openai.com/docs/api-reference/files) is + # attached to. vector_store_id:, + # The object type, which is always `vector_store.file_batch`. object: :"vector_store.files_batch" - ) - end - + ); end sig do override .returns( @@ -108,8 +116,18 @@ module OpenAI ) .returns(T.attached_class) end - def self.new(cancelled:, completed:, failed:, in_progress:, total:); end - + def self.new( + # The number of files that where cancelled. + cancelled:, + # The number of files that have been processed. + completed:, + # The number of files that have failed to process. + failed:, + # The number of files that are currently being processed. + in_progress:, + # The total number of files. + total: + ); end sig do override .returns({ From ff14db0340e977c153848e365fdddfefe6753dc6 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 18:59:25 +0000 Subject: [PATCH 158/295] feat: support specifying content-type with FilePart class --- lib/openai.rb | 3 +- lib/openai/file_part.rb | 55 +++++++++++ lib/openai/internal/type/converter.rb | 2 +- .../type/{io_like.rb => file_input.rb} | 12 ++- lib/openai/internal/util.rb | 96 ++++++++++--------- .../audio/transcription_create_params.rb | 6 +- .../models/audio/translation_create_params.rb | 6 +- lib/openai/models/file_create_params.rb | 6 +- .../models/image_create_variation_params.rb | 6 +- lib/openai/models/image_edit_params.rb | 14 +-- .../models/uploads/part_create_params.rb | 6 +- lib/openai/resources/audio/transcriptions.rb | 4 +- lib/openai/resources/audio/translations.rb | 2 +- lib/openai/resources/files.rb | 2 +- lib/openai/resources/images.rb | 6 +- lib/openai/resources/uploads/parts.rb | 2 +- rbi/lib/openai/file_part.rbi | 34 +++++++ .../type/{io_like.rbi => file_input.rbi} | 8 +- rbi/lib/openai/internal/util.rbi | 28 +++--- .../audio/transcription_create_params.rbi | 6 +- .../audio/translation_create_params.rbi | 6 +- rbi/lib/openai/models/file_create_params.rbi | 6 +- .../models/image_create_variation_params.rbi | 6 +- rbi/lib/openai/models/image_edit_params.rbi | 38 ++++++-- .../models/uploads/part_create_params.rbi | 12 ++- .../openai/resources/audio/transcriptions.rbi | 4 +- .../openai/resources/audio/translations.rbi | 2 +- rbi/lib/openai/resources/files.rbi | 2 +- rbi/lib/openai/resources/images.rbi | 12 ++- rbi/lib/openai/resources/uploads/parts.rbi | 2 +- sig/openai/file_part.rbs | 21 ++++ .../type/{io_like.rbs => file_input.rbs} | 2 +- sig/openai/internal/util.rbs | 17 ++-- .../audio/transcription_create_params.rbs | 6 +- .../audio/translation_create_params.rbs | 6 +- sig/openai/models/file_create_params.rbs | 9 +- .../models/image_create_variation_params.rbs | 6 +- sig/openai/models/image_edit_params.rbs | 19 ++-- .../models/uploads/part_create_params.rbs | 6 +- sig/openai/resources/audio/transcriptions.rbs | 4 +- sig/openai/resources/audio/translations.rbs | 2 +- sig/openai/resources/files.rbs | 2 +- sig/openai/resources/images.rbs | 4 +- sig/openai/resources/uploads/parts.rbs | 2 +- test/openai/internal/type/base_model_test.rb | 2 +- test/openai/internal/util_test.rb | 14 ++- 46 files changed, 342 insertions(+), 174 deletions(-) create mode 100644 lib/openai/file_part.rb rename lib/openai/internal/type/{io_like.rb => file_input.rb} (84%) create mode 100644 rbi/lib/openai/file_part.rbi rename rbi/lib/openai/internal/type/{io_like.rbi => file_input.rbi} (83%) create mode 100644 sig/openai/file_part.rbs rename sig/openai/internal/type/{io_like.rbs => file_input.rbs} (96%) diff --git a/lib/openai.rb b/lib/openai.rb index 5bd82a3c..0883b56f 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -31,7 +31,7 @@ require_relative "openai/internal/type/converter" require_relative "openai/internal/type/unknown" require_relative "openai/internal/type/boolean" -require_relative "openai/internal/type/io_like" +require_relative "openai/internal/type/file_input" require_relative "openai/internal/type/enum" require_relative "openai/internal/type/union" require_relative "openai/internal/type/array_of" @@ -42,6 +42,7 @@ require_relative "openai/internal/type/request_parameters" require_relative "openai/internal" require_relative "openai/request_options" +require_relative "openai/file_part" require_relative "openai/errors" require_relative "openai/internal/transport/base_client" require_relative "openai/internal/transport/pooled_net_requester" diff --git a/lib/openai/file_part.rb b/lib/openai/file_part.rb new file mode 100644 index 00000000..f17371ec --- /dev/null +++ b/lib/openai/file_part.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +module OpenAI + class FilePart + # @return [Pathname, StringIO, IO, String] + attr_reader :content + + # @return [String, nil] + attr_reader :content_type + + # @return [String, nil] + attr_reader :filename + + # @api private + # + # @return [String] + private def read + case contents + in Pathname + contents.read(binmode: true) + in StringIO + contents.string + in IO + contents.read + in String + contents + end + end + + # @param a [Object] + # + # @return [String] + def to_json(*a) = read.to_json(*a) + + # @param a [Object] + # + # @return [String] + def to_yaml(*a) = read.to_yaml(*a) + + # @param content [Pathname, StringIO, IO, String] + # @param filename [String, nil] + # @param content_type [String, nil] + def initialize(content, filename: nil, content_type: nil) + @content = content + @filename = + case content + in Pathname + filename.nil? ? content.basename.to_path : File.basename(filename) + else + filename.nil? ? nil : File.basename(filename) + end + @content_type = content_type + end + end +end diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 1cd4385d..0b58d3b0 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -43,7 +43,7 @@ def dump(value, state:) value.string in Pathname | IO state[:can_retry] = false if value.is_a?(IO) - OpenAI::Internal::Util::SerializationAdapter.new(value) + OpenAI::FilePart.new(value) else value end diff --git a/lib/openai/internal/type/io_like.rb b/lib/openai/internal/type/file_input.rb similarity index 84% rename from lib/openai/internal/type/io_like.rb rename to lib/openai/internal/type/file_input.rb index 568418be..892a1761 100644 --- a/lib/openai/internal/type/io_like.rb +++ b/lib/openai/internal/type/file_input.rb @@ -7,8 +7,12 @@ module Type # # @abstract # - # Either `Pathname` or `StringIO`. - class IOLike + # Either `Pathname` or `StringIO`, or `IO`, or + # `OpenAI::Internal::Type::FileInput`. + # + # Note: when `IO` is used, all retries are disabled, since many IO` streams are + # not rewindable. + class FileInput extend OpenAI::Internal::Type::Converter private_class_method :new @@ -20,7 +24,7 @@ class IOLike # @return [Boolean] def self.===(other) case other - in StringIO | Pathname | IO + in Pathname | StringIO | IO | String | OpenAI::FilePart true else false @@ -32,7 +36,7 @@ def self.===(other) # @param other [Object] # # @return [Boolean] - def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::IOLike + def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::FileInput class << self # @api private diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index fa6aec64..7b82110b 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -348,27 +348,6 @@ def normalized_headers(*headers) end end - # @api private - class SerializationAdapter - # @return [Pathname, IO] - attr_reader :inner - - # @param a [Object] - # - # @return [String] - def to_json(*a) = (inner.is_a?(IO) ? inner.read : inner.read(binmode: true)).to_json(*a) - - # @param a [Object] - # - # @return [String] - def to_yaml(*a) = (inner.is_a?(IO) ? inner.read : inner.read(binmode: true)).to_yaml(*a) - - # @api private - # - # @param inner [Pathname, IO] - def initialize(inner) = @inner = inner - end - # @api private # # An adapter that satisfies the IO interface required by `::IO.copy_stream` @@ -480,42 +459,35 @@ class << self # @api private # # @param y [Enumerator::Yielder] - # @param boundary [String] - # @param key [Symbol, String] # @param val [Object] # @param closing [Array] - private def write_multipart_chunk(y, boundary:, key:, val:, closing:) - val = val.inner if val.is_a?(OpenAI::Internal::Util::SerializationAdapter) + # @param content_type [String, nil] + private def write_multipart_content(y, val:, closing:, content_type: nil) + content_type ||= "application/octet-stream" - y << "--#{boundary}\r\n" - y << "Content-Disposition: form-data" - unless key.nil? - name = ERB::Util.url_encode(key.to_s) - y << "; name=\"#{name}\"" - end - case val - in Pathname | IO - filename = ERB::Util.url_encode(File.basename(val.to_path)) - y << "; filename=\"#{filename}\"" - else - end - y << "\r\n" case val + in OpenAI::FilePart + return write_multipart_content( + y, + val: val.content, + closing: closing, + content_type: val.content_type + ) in Pathname - y << "Content-Type: application/octet-stream\r\n\r\n" + y << "Content-Type: #{content_type}\r\n\r\n" io = val.open(binmode: true) closing << io.method(:close) IO.copy_stream(io, y) in IO - y << "Content-Type: application/octet-stream\r\n\r\n" + y << "Content-Type: #{content_type}\r\n\r\n" IO.copy_stream(val, y) in StringIO - y << "Content-Type: application/octet-stream\r\n\r\n" + y << "Content-Type: #{content_type}\r\n\r\n" y << val.string in String - y << "Content-Type: application/octet-stream\r\n\r\n" + y << "Content-Type: #{content_type}\r\n\r\n" y << val.to_s - in _ if primitive?(val) + in -> { primitive?(_1) } y << "Content-Type: text/plain\r\n\r\n" y << val.to_s else @@ -525,6 +497,36 @@ class << self y << "\r\n" end + # @api private + # + # @param y [Enumerator::Yielder] + # @param boundary [String] + # @param key [Symbol, String] + # @param val [Object] + # @param closing [Array] + private def write_multipart_chunk(y, boundary:, key:, val:, closing:) + y << "--#{boundary}\r\n" + y << "Content-Disposition: form-data" + + unless key.nil? + name = ERB::Util.url_encode(key.to_s) + y << "; name=\"#{name}\"" + end + + case val + in OpenAI::FilePart unless val.filename.nil? + filename = ERB::Util.url_encode(val.filename) + y << "; filename=\"#{filename}\"" + in Pathname | IO + filename = ERB::Util.url_encode(File.basename(val.to_path)) + y << "; filename=\"#{filename}\"" + else + end + y << "\r\n" + + write_multipart_content(y, val: val, closing: closing) + end + # @api private # # @param body [Object] @@ -565,14 +567,12 @@ class << self # @return [Object] def encode_content(headers, body) content_type = headers["content-type"] - body = body.inner if body.is_a?(OpenAI::Internal::Util::SerializationAdapter) - case [content_type, body] in [OpenAI::Internal::Util::JSON_CONTENT, Hash | Array | -> { primitive?(_1) }] [headers, JSON.fast_generate(body)] - in [OpenAI::Internal::Util::JSONL_CONTENT, Enumerable] unless body.is_a?(StringIO) || body.is_a?(IO) + in [OpenAI::Internal::Util::JSONL_CONTENT, Enumerable] unless body.is_a?(OpenAI::Internal::Type::FileInput) [headers, body.lazy.map { JSON.fast_generate(_1) }] - in [%r{^multipart/form-data}, Hash | Pathname | StringIO | IO] + in [%r{^multipart/form-data}, Hash | OpenAI::Internal::Type::FileInput] boundary, strio = encode_multipart_streaming(body) headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} [headers, strio] @@ -580,6 +580,8 @@ def encode_content(headers, body) [headers, body.to_s] in [_, StringIO] [headers, body.string] + in [_, OpenAI::FilePart] + [headers, body.content] else [headers, body] end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index ccb71b14..c9efed38 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -14,8 +14,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [Pathname, StringIO] - required :file, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, IO, OpenAI::FilePart] + required :file, OpenAI::Internal::Type::FileInput # @!attribute model # ID of the model to use. The options are `gpt-4o-transcribe`, @@ -86,7 +86,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. # - # @param file [Pathname, StringIO] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # ... # # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index a933aaa9..d5ab1b8d 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -12,8 +12,8 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [Pathname, StringIO] - required :file, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, IO, OpenAI::FilePart] + required :file, OpenAI::Internal::Type::FileInput # @!attribute model # ID of the model to use. Only `whisper-1` (which is powered by our open source @@ -52,7 +52,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranslationCreateParams} for more details. # - # @param file [Pathname, StringIO] The audio file object (not file name) translate, in one of these formats: flac, + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # ... # # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 94e5f46f..d5062f4d 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -10,8 +10,8 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file # The File object (not file name) to be uploaded. # - # @return [Pathname, StringIO] - required :file, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, IO, OpenAI::FilePart] + required :file, OpenAI::Internal::Type::FileInput # @!attribute purpose # The intended purpose of the uploaded file. One of: - `assistants`: Used in the @@ -26,7 +26,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::FileCreateParams} for more details. # - # @param file [Pathname, StringIO] The File object (not file name) to be uploaded. ... + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. ... # # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # ... diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index d3b45a32..6dc339d5 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -11,8 +11,8 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. # - # @return [Pathname, StringIO] - required :image, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, IO, OpenAI::FilePart] + required :image, OpenAI::Internal::Type::FileInput # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this @@ -56,7 +56,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageCreateVariationParams} for more details. # - # @param image [Pathname, StringIO] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # ... # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 45591d12..33526447 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -13,7 +13,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square # `png` file less than 4MB. # - # @return [Pathname, StringIO, Array] + # @return [Pathname, StringIO, IO, OpenAI::FilePart, Array] required :image, union: -> { OpenAI::Models::ImageEditParams::Image } # @!attribute prompt @@ -29,8 +29,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. # - # @return [Pathname, StringIO, nil] - optional :mask, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, IO, OpenAI::FilePart, nil] + optional :mask, OpenAI::Internal::Type::FileInput # @!attribute model # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are @@ -83,13 +83,13 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # - # @param image [Pathname, StringIO, Array] The image(s) to edit. Must be a supported image file or an array of images. For + # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. For # ... # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # ... # - # @param mask [Pathname, StringIO] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # ... # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup @@ -118,14 +118,14 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel module Image extend OpenAI::Internal::Type::Union - variant OpenAI::Internal::Type::IOLike + variant OpenAI::Internal::Type::FileInput variant -> { OpenAI::Models::ImageEditParams::Image::StringArray } # @!method self.variants # @return [Array(StringIO, Array)] - StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::IOLike] + StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput] end # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index d4e1d7bf..df8520b6 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -11,14 +11,14 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data # The chunk of bytes for this Part. # - # @return [Pathname, StringIO] - required :data, OpenAI::Internal::Type::IOLike + # @return [Pathname, StringIO, IO, OpenAI::FilePart] + required :data, OpenAI::Internal::Type::FileInput # @!method initialize(data:, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Uploads::PartCreateParams} for more details. # - # @param data [Pathname, StringIO] The chunk of bytes for this Part. ... + # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index d7f5de3e..a3fc4454 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -14,7 +14,7 @@ class Transcriptions # # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # ... # # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc @@ -68,7 +68,7 @@ def create(params) # # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # ... # # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index d52be7ea..0a43a53a 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -11,7 +11,7 @@ class Translations # # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # - # @param file [Pathname, StringIO] The audio file object (not file name) translate, in one of these formats: flac, + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # ... # # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 726182c0..77ba2646 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -30,7 +30,7 @@ class Files # # @overload create(file:, purpose:, request_options: {}) # - # @param file [Pathname, StringIO] The File object (not file name) to be uploaded. ... + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. ... # # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # ... diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index e5f8f465..f34671b0 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -10,7 +10,7 @@ class Images # # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # ... # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time @@ -52,13 +52,13 @@ def create_variation(params) # # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, Array] The image(s) to edit. Must be a supported image file or an array of images. For + # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. For # ... # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # ... # - # @param mask [Pathname, StringIO] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # ... # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index b0e572a1..3d7e4770 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -23,7 +23,7 @@ class Parts # # @param upload_id [String] The ID of the Upload. ... # - # @param data [Pathname, StringIO] The chunk of bytes for this Part. ... + # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/rbi/lib/openai/file_part.rbi b/rbi/lib/openai/file_part.rbi new file mode 100644 index 00000000..20c06b85 --- /dev/null +++ b/rbi/lib/openai/file_part.rbi @@ -0,0 +1,34 @@ +# typed: strong + +module OpenAI + class FilePart + sig { returns(T.any(Pathname, StringIO, IO, String)) } + attr_reader :content + + sig { returns(T.nilable(String)) } + attr_reader :content_type + + sig { returns(T.nilable(String)) } + attr_reader :filename + + # @api private + sig { returns(String) } + private def read; end + + sig { params(a: T.anything).returns(String) } + def to_json(*a); end + + sig { params(a: T.anything).returns(String) } + def to_yaml(*a); end + + sig do + params( + content: T.any(Pathname, StringIO, IO, String), + filename: T.nilable(String), + content_type: T.nilable(String) + ) + .returns(T.attached_class) + end + def self.new(content, filename: nil, content_type: nil); end + end +end diff --git a/rbi/lib/openai/internal/type/io_like.rbi b/rbi/lib/openai/internal/type/file_input.rbi similarity index 83% rename from rbi/lib/openai/internal/type/io_like.rbi rename to rbi/lib/openai/internal/type/file_input.rbi index 321a5563..5271aa5e 100644 --- a/rbi/lib/openai/internal/type/io_like.rbi +++ b/rbi/lib/openai/internal/type/file_input.rbi @@ -5,8 +5,12 @@ module OpenAI module Type # @api private # - # Either `Pathname` or `StringIO`. - class IOLike + # Either `Pathname` or `StringIO`, or `IO`, or + # `OpenAI::Internal::Type::FileInput`. + # + # Note: when `IO` is used, all retries are disabled, since many IO` streams are + # not rewindable. + class FileInput extend OpenAI::Internal::Type::Converter abstract! diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index f1ca8cb7..70758fde 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -140,22 +140,6 @@ module OpenAI def normalized_headers(*headers); end end - # @api private - class SerializationAdapter - sig { returns(T.any(Pathname, IO)) } - attr_reader :inner - - sig { params(a: T.anything).returns(String) } - def to_json(*a); end - - sig { params(a: T.anything).returns(String) } - def to_yaml(*a); end - - # @api private - sig { params(inner: T.any(Pathname, IO)).returns(T.attached_class) } - def self.new(inner); end - end - # @api private # # An adapter that satisfies the IO interface required by `::IO.copy_stream` @@ -196,6 +180,18 @@ module OpenAI JSONL_CONTENT = T.let(%r{^application/(?:x-)?jsonl}, Regexp) class << self + # @api private + sig do + params( + y: Enumerator::Yielder, + val: T.anything, + closing: T::Array[T.proc.void], + content_type: T.nilable(String) + ) + .void + end + private def write_multipart_content(y, val:, closing:, content_type: nil); end + # @api private sig do params( diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index f89a401f..57d34b34 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -9,7 +9,7 @@ module OpenAI # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(Pathname, StringIO)) } + sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :file # ID of the model to use. The options are `gpt-4o-transcribe`, @@ -90,7 +90,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, @@ -147,7 +147,7 @@ module OpenAI override .returns( { - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 807e0bb2..77933b04 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -9,7 +9,7 @@ module OpenAI # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(Pathname, StringIO)) } + sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :file # ID of the model to use. Only `whisper-1` (which is powered by our open source @@ -48,7 +48,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, @@ -84,7 +84,7 @@ module OpenAI override .returns( { - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/lib/openai/models/file_create_params.rbi index 2580e280..675c3c74 100644 --- a/rbi/lib/openai/models/file_create_params.rbi +++ b/rbi/lib/openai/models/file_create_params.rbi @@ -7,7 +7,7 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The File object (not file name) to be uploaded. - sig { returns(T.any(Pathname, StringIO)) } + sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :file # The intended purpose of the uploaded file. One of: - `assistants`: Used in the @@ -19,7 +19,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) @@ -39,7 +39,7 @@ module OpenAI override .returns( { - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions } diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 96899fe5..361f3512 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -8,7 +8,7 @@ module OpenAI # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. - sig { returns(T.any(Pathname, StringIO)) } + sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :image # The model to use for image generation. Only `dall-e-2` is supported at this @@ -42,7 +42,7 @@ module OpenAI sig do params( - image: T.any(Pathname, StringIO), + image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), @@ -78,7 +78,7 @@ module OpenAI override .returns( { - image: T.any(Pathname, StringIO), + image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 68d09cd7..4ae05e29 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -10,7 +10,17 @@ module OpenAI # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square # `png` file less than 4MB. - sig { returns(T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)])) } + sig do + returns( + T.any( + Pathname, + StringIO, + IO, + OpenAI::FilePart, + T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] + ) + ) + end attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 @@ -22,10 +32,10 @@ module OpenAI # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. - sig { returns(T.nilable(T.any(Pathname, StringIO))) } + sig { returns(T.nilable(T.any(Pathname, StringIO, IO, OpenAI::FilePart))) } attr_reader :mask - sig { params(mask: T.any(Pathname, StringIO)).void } + sig { params(mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart)).void } attr_writer :mask # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are @@ -68,9 +78,15 @@ module OpenAI sig do params( - image: T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)]), + image: T.any( + Pathname, + StringIO, + IO, + OpenAI::FilePart, + T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] + ), prompt: String, - mask: T.any(Pathname, StringIO), + mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), @@ -124,9 +140,15 @@ module OpenAI override .returns( { - image: T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)]), + image: T.any( + Pathname, + StringIO, + IO, + OpenAI::FilePart, + T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] + ), prompt: String, - mask: T.any(Pathname, StringIO), + mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), @@ -151,7 +173,7 @@ module OpenAI StringArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::IOLike], + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/lib/openai/models/uploads/part_create_params.rbi index 1a62a478..e8d07b06 100644 --- a/rbi/lib/openai/models/uploads/part_create_params.rbi +++ b/rbi/lib/openai/models/uploads/part_create_params.rbi @@ -8,12 +8,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters # The chunk of bytes for this Part. - sig { returns(T.any(Pathname, StringIO)) } + sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :data sig do params( - data: T.any(Pathname, StringIO), + data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) ) .returns(T.attached_class) @@ -23,7 +23,13 @@ module OpenAI data:, request_options: {} ); end - sig { override.returns({data: T.any(Pathname, StringIO), request_options: OpenAI::RequestOptions}) } + sig do + override + .returns({ + data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + request_options: OpenAI::RequestOptions + }) + end def to_hash; end end end diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/lib/openai/resources/audio/transcriptions.rbi index a411c389..45316428 100644 --- a/rbi/lib/openai/resources/audio/transcriptions.rbi +++ b/rbi/lib/openai/resources/audio/transcriptions.rbi @@ -10,7 +10,7 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, @@ -73,7 +73,7 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], language: String, diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/lib/openai/resources/audio/translations.rbi index c11c0bde..b9130883 100644 --- a/rbi/lib/openai/resources/audio/translations.rbi +++ b/rbi/lib/openai/resources/audio/translations.rbi @@ -7,7 +7,7 @@ module OpenAI # Translates audio into English. sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/lib/openai/resources/files.rbi index 182dba52..aa872185 100644 --- a/rbi/lib/openai/resources/files.rbi +++ b/rbi/lib/openai/resources/files.rbi @@ -26,7 +26,7 @@ module OpenAI # storage limits. sig do params( - file: T.any(Pathname, StringIO), + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), purpose: OpenAI::Models::FilePurpose::OrSymbol, request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/lib/openai/resources/images.rbi index 7e97700a..f50d6486 100644 --- a/rbi/lib/openai/resources/images.rbi +++ b/rbi/lib/openai/resources/images.rbi @@ -6,7 +6,7 @@ module OpenAI # Creates a variation of a given image. This endpoint only supports `dall-e-2`. sig do params( - image: T.any(Pathname, StringIO), + image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), @@ -42,9 +42,15 @@ module OpenAI # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. sig do params( - image: T.any(Pathname, StringIO, T::Array[T.any(Pathname, StringIO)]), + image: T.any( + Pathname, + StringIO, + IO, + OpenAI::FilePart, + T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] + ), prompt: String, - mask: T.any(Pathname, StringIO), + mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/lib/openai/resources/uploads/parts.rbi index 8258aeea..867a760a 100644 --- a/rbi/lib/openai/resources/uploads/parts.rbi +++ b/rbi/lib/openai/resources/uploads/parts.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( upload_id: String, - data: T.any(Pathname, StringIO), + data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) ) .returns(OpenAI::Models::Uploads::UploadPart) diff --git a/sig/openai/file_part.rbs b/sig/openai/file_part.rbs new file mode 100644 index 00000000..c517bee8 --- /dev/null +++ b/sig/openai/file_part.rbs @@ -0,0 +1,21 @@ +module OpenAI + class FilePart + attr_reader content: Pathname | StringIO | IO | String + + attr_reader content_type: String? + + attr_reader filename: String? + + private def read: -> String + + def to_json: (*top a) -> String + + def to_yaml: (*top a) -> String + + def initialize: ( + Pathname | StringIO | IO | String content, + ?filename: String?, + ?content_type: String? + ) -> void + end +end diff --git a/sig/openai/internal/type/io_like.rbs b/sig/openai/internal/type/file_input.rbs similarity index 96% rename from sig/openai/internal/type/io_like.rbs rename to sig/openai/internal/type/file_input.rbs index 8a5fb508..862c2111 100644 --- a/sig/openai/internal/type/io_like.rbs +++ b/sig/openai/internal/type/file_input.rbs @@ -1,7 +1,7 @@ module OpenAI module Internal module Type - class IOLike + class FileInput extend OpenAI::Internal::Type::Converter def self.===: (top other) -> bool diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 6c038865..0e7d0388 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -73,16 +73,6 @@ module OpenAI | ::Array[(String | Integer)?])?] headers ) -> ::Hash[String, String] - class SerializationAdapter - attr_reader inner: Pathname | IO - - def to_json: (*top a) -> String - - def to_yaml: (*top a) -> String - - def initialize: (Pathname | IO inner) -> void - end - class ReadIOAdapter def close?: -> bool? @@ -106,6 +96,13 @@ module OpenAI JSON_CONTENT: Regexp JSONL_CONTENT: Regexp + def self?.write_multipart_content: ( + Enumerator::Yielder y, + val: top, + closing: ::Array[^-> void], + ?content_type: String? + ) -> void + def self?.write_multipart_chunk: ( Enumerator::Yielder y, boundary: String, diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 0ed3e9cd..e16d6287 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type transcription_create_params = { - file: (Pathname | StringIO), + file: (Pathname | StringIO | IO | OpenAI::FilePart), model: OpenAI::Models::Audio::TranscriptionCreateParams::model, include: ::Array[OpenAI::Models::Audio::transcription_include], language: String, @@ -18,7 +18,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO + attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart attr_accessor model: OpenAI::Models::Audio::TranscriptionCreateParams::model @@ -53,7 +53,7 @@ module OpenAI ) -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] def initialize: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 2ed83e46..8fcd623b 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type translation_create_params = { - file: (Pathname | StringIO), + file: (Pathname | StringIO | IO | OpenAI::FilePart), model: OpenAI::Models::Audio::TranslationCreateParams::model, prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO + attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart attr_accessor model: OpenAI::Models::Audio::TranslationCreateParams::model @@ -34,7 +34,7 @@ module OpenAI def temperature=: (Float) -> Float def initialize: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index c57c5981..33f381f0 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -1,19 +1,22 @@ module OpenAI module Models type file_create_params = - { file: (Pathname | StringIO), purpose: OpenAI::Models::file_purpose } + { + file: (Pathname | StringIO | IO | OpenAI::FilePart), + purpose: OpenAI::Models::file_purpose + } & OpenAI::Internal::Type::request_parameters class FileCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO + attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart attr_accessor purpose: OpenAI::Models::file_purpose def initialize: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 28803369..1d87d6c6 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type image_create_variation_params = { - image: (Pathname | StringIO), + image: (Pathname | StringIO | IO | OpenAI::FilePart), model: OpenAI::Models::ImageCreateVariationParams::model?, n: Integer?, response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor image: Pathname | StringIO + attr_accessor image: Pathname | StringIO | IO | OpenAI::FilePart attr_accessor model: OpenAI::Models::ImageCreateVariationParams::model? @@ -30,7 +30,7 @@ module OpenAI def user=: (String) -> String def initialize: ( - image: Pathname | StringIO, + image: Pathname | StringIO | IO | OpenAI::FilePart, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 30bf203d..95e02a84 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -4,7 +4,7 @@ module OpenAI { image: OpenAI::Models::ImageEditParams::image, prompt: String, - mask: (Pathname | StringIO), + mask: (Pathname | StringIO | IO | OpenAI::FilePart), model: OpenAI::Models::ImageEditParams::model?, n: Integer?, quality: OpenAI::Models::ImageEditParams::quality?, @@ -22,9 +22,11 @@ module OpenAI attr_accessor prompt: String - attr_reader mask: (Pathname | StringIO)? + attr_reader mask: (Pathname | StringIO | IO | OpenAI::FilePart)? - def mask=: (Pathname | StringIO) -> (Pathname | StringIO) + def mask=: ( + Pathname | StringIO | IO | OpenAI::FilePart + ) -> (Pathname | StringIO | IO | OpenAI::FilePart) attr_accessor model: OpenAI::Models::ImageEditParams::model? @@ -43,7 +45,7 @@ module OpenAI def initialize: ( image: OpenAI::Models::ImageEditParams::image, prompt: String, - ?mask: Pathname | StringIO, + ?mask: Pathname | StringIO | IO | OpenAI::FilePart, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, @@ -55,14 +57,19 @@ module OpenAI def to_hash: -> OpenAI::Models::image_edit_params - type image = Pathname | StringIO | ::Array[Pathname | StringIO] + type image = + Pathname + | StringIO + | IO + | OpenAI::FilePart + | ::Array[Pathname | StringIO | IO | OpenAI::FilePart] module Image extend OpenAI::Internal::Type::Union def self?.variants: -> [StringIO, ::Array[StringIO]] - type string_array = ::Array[Pathname | StringIO] + type string_array = ::Array[Pathname | StringIO | IO | OpenAI::FilePart] StringArray: string_array end diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index 89c4c092..f3ea9aa1 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -2,17 +2,17 @@ module OpenAI module Models module Uploads type part_create_params = - { data: (Pathname | StringIO) } + { data: (Pathname | StringIO | IO | OpenAI::FilePart) } & OpenAI::Internal::Type::request_parameters class PartCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor data: Pathname | StringIO + attr_accessor data: Pathname | StringIO | IO | OpenAI::FilePart def initialize: ( - data: Pathname | StringIO, + data: Pathname | StringIO | IO | OpenAI::FilePart, ?request_options: OpenAI::request_opts ) -> void diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index bf5e0c11..50dccae8 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Transcriptions def create: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, @@ -15,7 +15,7 @@ module OpenAI ) -> OpenAI::Models::Audio::transcription_create_response def create_streaming: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index a5cdf743..a2cece60 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Translations def create: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index f4c12155..b6cefa86 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Files def create: ( - file: Pathname | StringIO, + file: Pathname | StringIO | IO | OpenAI::FilePart, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FileObject diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index ec6082ff..d1bddd09 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Images def create_variation: ( - image: Pathname | StringIO, + image: Pathname | StringIO | IO | OpenAI::FilePart, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -14,7 +14,7 @@ module OpenAI def edit: ( image: OpenAI::Models::ImageEditParams::image, prompt: String, - ?mask: Pathname | StringIO, + ?mask: Pathname | StringIO | IO | OpenAI::FilePart, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index 7d3f7d89..f296f1ef 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -4,7 +4,7 @@ module OpenAI class Parts def create: ( String upload_id, - data: Pathname | StringIO, + data: Pathname | StringIO | IO | OpenAI::FilePart, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Uploads::UploadPart diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index a3267818..3f6e7420 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -94,7 +94,7 @@ def test_dump [:a, :b] => :b, [:a, "a"] => "a", [String, StringIO.new("one")] => "one", - [String, Pathname(__FILE__)] => OpenAI::Internal::Util::SerializationAdapter + [String, Pathname(__FILE__)] => OpenAI::FilePart } cases.each do diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index adae5642..5d52ee06 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -200,8 +200,12 @@ def test_file_encode file = Pathname(__FILE__) headers = {"content-type" => "multipart/form-data"} cases = { + "abc" => "abc", StringIO.new("abc") => "abc", - file => /^class OpenAI/ + OpenAI::FilePart.new("abc") => "abc", + OpenAI::FilePart.new(StringIO.new("abc")) => "abc", + file => /^class OpenAI/, + OpenAI::FilePart.new(file) => /^class OpenAI/ } cases.each do |body, val| encoded = OpenAI::Internal::Util.encode_content(headers, body) @@ -219,7 +223,13 @@ def test_hash_encode {a: 2, b: nil} => {"a" => "2", "b" => "null"}, {a: 2, b: [1, 2, 3]} => {"a" => "2", "b" => "1"}, {strio: StringIO.new("a")} => {"strio" => "a"}, - {pathname: Pathname(__FILE__)} => {"pathname" => -> { _1.read in /^class OpenAI/ }} + {strio: OpenAI::FilePart.new("a")} => {"strio" => "a"}, + {pathname: Pathname(__FILE__)} => {"pathname" => -> { _1.read in /^class OpenAI/ }}, + {pathname: OpenAI::FilePart.new(Pathname(__FILE__))} => { + "pathname" => -> { + _1.read in /^class OpenAI/ + } + } } cases.each do |body, testcase| encoded = OpenAI::Internal::Util.encode_content(headers, body) From d44126b16de904a07d0812fab582df487bb86337 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 19:45:39 +0000 Subject: [PATCH 159/295] chore: consistently use string in examples, even for enums --- README.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 75305836..3d43fae8 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ openai = OpenAI::Client.new( ) chat_completion = openai.chat.completions.create( - messages: [{role: :user, content: "Say this is a test"}], + messages: [{role: "user", content: "Say this is a test"}], model: :"gpt-4.1" ) @@ -46,7 +46,7 @@ When using sorbet, it is recommended to use model classes as below. This provide ```ruby openai.chat.completions.create( - messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) ``` @@ -76,7 +76,7 @@ We provide support for streaming responses using Server-Sent Events (SSE). ```ruby stream = openai.chat.completions.stream_raw( - messages: [{role: :user, content: "Say this is a test"}], + messages: [{role: "user", content: "Say this is a test"}], model: :"gpt-4.1" ) @@ -93,11 +93,11 @@ Request parameters that correspond to file uploads can be passed as `StringIO`, require "pathname" # using `Pathname`, the file will be lazily read, without reading everything in to memory -file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: :"fine-tune") +file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: "fine-tune") file = File.read("input.jsonl") # using `StringIO`, useful if you already have the data in memory -file_object = openai.files.create(file: StringIO.new(file), purpose: :"fine-tune") +file_object = openai.files.create(file: StringIO.new(file), purpose: "fine-tune") puts(file_object.id) ``` @@ -146,7 +146,7 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( - messages: [{role: :user, content: "How can I get the name of the current day in JavaScript?"}], + messages: [{role: "user", content: "How can I get the name of the current day in JavaScript?"}], model: :"gpt-4.1", request_options: {max_retries: 5} ) @@ -168,7 +168,7 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( - messages: [{role: :user, content: "How can I list all files in a directory using Python?"}], + messages: [{role: "user", content: "How can I list all files in a directory using Python?"}], model: :"gpt-4.1", request_options: {timeout: 5} ) @@ -185,13 +185,13 @@ In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can als ```ruby # This has tooling readability, for auto-completion, static analysis, and goto definition with supported language services params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) # This also works params = { - messages: [{role: :user, content: "Say this is a test"}], + messages: [{role: "user", content: "Say this is a test"}], model: :"gpt-4.1" } ``` @@ -253,7 +253,7 @@ It is possible to pass a compatible model / parameter class to a method that exp ```ruby params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) openai.chat.completions.create(**params) From ab9644113d92f57c35758cad7e24406274e6bd77 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 24 Apr 2025 23:03:34 +0000 Subject: [PATCH 160/295] chore(internal): improve response envelope unwrap functionality --- lib/openai/internal/transport/base_client.rb | 4 ++-- lib/openai/internal/util.rb | 23 ++++++++++--------- .../openai/internal/transport/base_client.rbi | 18 +++++++++++++-- rbi/lib/openai/internal/util.rbi | 12 +++++++--- sig/openai/internal/transport/base_client.rbs | 10 ++++++-- sig/openai/internal/util.rbs | 6 +++-- test/openai/internal/util_test.rb | 7 +++--- 7 files changed, 55 insertions(+), 25 deletions(-) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 8866d7ad..5d77a764 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -214,7 +214,7 @@ def initialize( # # @option req [Object, nil] :body # - # @option req [Symbol, nil] :unwrap + # @option req [Symbol, Integer, Array, Proc, nil] :unwrap # # @option req [Class, nil] :page # @@ -415,7 +415,7 @@ def initialize( # # @param body [Object, nil] # - # @param unwrap [Symbol, nil] + # @param unwrap [Symbol, Integer, Array, Proc, nil] # # @param page [Class, nil] # diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 7b82110b..c1359d17 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -175,18 +175,17 @@ def deep_merge(*values, sentinel: nil, concat: false) # @api private # # @param data [Hash{Symbol=>Object}, Array, Object] - # @param pick [Symbol, Integer, Array, nil] - # @param sentinel [Object, nil] + # @param pick [Symbol, Integer, Array, Proc, nil] # @param blk [Proc, nil] # # @return [Object, nil] - def dig(data, pick, sentinel = nil, &blk) - case [data, pick, blk] - in [_, nil, nil] + def dig(data, pick, &blk) + case [data, pick] + in [_, nil] data - in [Hash, Symbol, _] | [Array, Integer, _] - blk.nil? ? data.fetch(pick, sentinel) : data.fetch(pick, &blk) - in [Hash | Array, Array, _] + in [Hash, Symbol] | [Array, Integer] + data.fetch(pick) { blk&.call } + in [Hash | Array, Array] pick.reduce(data) do |acc, key| case acc in Hash if acc.key?(key) @@ -194,11 +193,13 @@ def dig(data, pick, sentinel = nil, &blk) in Array if key.is_a?(Integer) && key < acc.length acc[key] else - return blk.nil? ? sentinel : blk.call + return blk&.call end end - in _ - blk.nil? ? sentinel : blk.call + in [_, Proc] + pick.call(data) + else + blk&.call end end end diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/lib/openai/internal/transport/base_client.rbi index 73785f6c..fb87b782 100644 --- a/rbi/lib/openai/internal/transport/base_client.rbi +++ b/rbi/lib/openai/internal/transport/base_client.rbi @@ -24,7 +24,14 @@ module OpenAI )] ), body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), + unwrap: T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), stream: T.nilable( T::Class[OpenAI::Internal::Type::BaseStream[T.anything, @@ -173,7 +180,14 @@ module OpenAI )] ), body: T.nilable(T.anything), - unwrap: T.nilable(Symbol), + unwrap: T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), stream: T.nilable( T::Class[OpenAI::Internal::Type::BaseStream[T.anything, diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/lib/openai/internal/util.rbi index 70758fde..3fe2f560 100644 --- a/rbi/lib/openai/internal/util.rbi +++ b/rbi/lib/openai/internal/util.rbi @@ -68,13 +68,19 @@ module OpenAI sig do params( data: T.any(OpenAI::Internal::AnyHash, T::Array[T.anything], T.anything), - pick: T.nilable(T.any(Symbol, Integer, T::Array[T.any(Symbol, Integer)])), - sentinel: T.nilable(T.anything), + pick: T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), blk: T.nilable(T.proc.returns(T.anything)) ) .returns(T.nilable(T.anything)) end - def dig(data, pick, sentinel = nil, &blk); end + def dig(data, pick, &blk); end end class << self diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs index acb379e9..3b3e9b44 100644 --- a/sig/openai/internal/transport/base_client.rbs +++ b/sig/openai/internal/transport/base_client.rbs @@ -11,7 +11,10 @@ module OpenAI | Integer | ::Array[(String | Integer)?])?]?, body: top?, - unwrap: Symbol?, + unwrap: (Symbol + | Integer + | ::Array[(Symbol | Integer)] + | (^(top arg0) -> top))?, page: Class?, stream: Class?, model: OpenAI::Internal::Type::Converter::input?, @@ -96,7 +99,10 @@ module OpenAI | Integer | ::Array[(String | Integer)?])?]?, ?body: top?, - ?unwrap: Symbol?, + ?unwrap: (Symbol + | Integer + | ::Array[(Symbol | Integer)] + | (^(top arg0) -> top))?, ?page: Class?, ?stream: Class?, ?model: OpenAI::Internal::Type::Converter::input?, diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 0e7d0388..14b3d577 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -29,8 +29,10 @@ module OpenAI def self?.dig: ( ::Hash[Symbol, top] | ::Array[top] | top data, - (Symbol | Integer | ::Array[(Symbol | Integer)])? pick, - ?top? sentinel + (Symbol + | Integer + | ::Array[(Symbol | Integer)] + | (^(top arg0) -> top))? pick ) { -> top? } -> top? diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index 5d52ee06..565fe013 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -87,8 +87,9 @@ def test_dig OpenAI::Internal::Util.dig([], 1.0) => nil OpenAI::Internal::Util.dig(Object, 1) => nil - OpenAI::Internal::Util.dig([], 1.0, 2) => 2 OpenAI::Internal::Util.dig([], 1.0) { 2 } => 2 + OpenAI::Internal::Util.dig([], ->(_) { 2 }) => 2 + OpenAI::Internal::Util.dig([1], -> { _1 in [1] }) => true end end end @@ -165,9 +166,9 @@ def test_json_content "application/vnd.github.v3+json" => true, "application/vnd.api+json" => true } - cases.each do |header, _verdict| + cases.each do |header, verdict| assert_pattern do - OpenAI::Internal::Util::JSON_CONTENT.match?(header) => verdict + OpenAI::Internal::Util::JSON_CONTENT.match?(header) => ^verdict end end end From 61c419cdd7e51aff530e6cea5b8d272d06c8337d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 16:32:47 +0000 Subject: [PATCH 161/295] fix: ensure gem release is unaffected by renaming --- Rakefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Rakefile b/Rakefile index 09924f69..2e5368f2 100644 --- a/Rakefile +++ b/Rakefile @@ -135,5 +135,5 @@ end desc("Release ruby gem") multitask(release: [:"build:gem"]) do - sh(*%w[gem push], *FileList["openai-*.gem"]) + sh(*%w[gem push], *FileList["*.gem"]) end From a18080991762f6a57b6e1e4ec3726e3d1d9b8f18 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 21:06:42 +0000 Subject: [PATCH 162/295] fix: make a typo for `FilePart.content` --- lib/openai/file_part.rb | 10 ++++---- lib/openai/internal/type/converter.rb | 3 +++ lib/openai/internal/type/file_input.rb | 33 +++++++++++++++++--------- test/openai/file_part_test.rb | 12 ++++++++++ 4 files changed, 42 insertions(+), 16 deletions(-) create mode 100644 test/openai/file_part_test.rb diff --git a/lib/openai/file_part.rb b/lib/openai/file_part.rb index f17371ec..d2a3f6a5 100644 --- a/lib/openai/file_part.rb +++ b/lib/openai/file_part.rb @@ -15,15 +15,15 @@ class FilePart # # @return [String] private def read - case contents + case content in Pathname - contents.read(binmode: true) + content.read(binmode: true) in StringIO - contents.string + content.string in IO - contents.read + content.read in String - contents + content end end diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 0b58d3b0..0d498235 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -44,6 +44,9 @@ def dump(value, state:) in Pathname | IO state[:can_retry] = false if value.is_a?(IO) OpenAI::FilePart.new(value) + in OpenAI::FilePart + state[:can_retry] = false if value.content.is_a?(IO) + value else value end diff --git a/lib/openai/internal/type/file_input.rb b/lib/openai/internal/type/file_input.rb index 892a1761..3ed13ec6 100644 --- a/lib/openai/internal/type/file_input.rb +++ b/lib/openai/internal/type/file_input.rb @@ -67,17 +67,28 @@ def coerce(value, state:) end end - # @!parse - # # @api private - # # - # # @param value [Pathname, StringIO, IO, String, Object] - # # - # # @param state [Hash{Symbol=>Object}] . - # # - # # @option state [Boolean] :can_retry - # # - # # @return [Pathname, StringIO, IO, String, Object] - # def dump(value, state:) = super + # @api private + # + # @param value [Pathname, StringIO, IO, String, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # + # @return [Pathname, StringIO, IO, String, Object] + def dump(value, state:) + # rubocop:disable Lint/DuplicateBranch + case value + in IO + state[:can_retry] = false + in OpenAI::FilePart if value.content.is_a?(IO) + state[:can_retry] = false + else + end + # rubocop:enable Lint/DuplicateBranch + + value + end end end end diff --git a/test/openai/file_part_test.rb b/test/openai/file_part_test.rb new file mode 100644 index 00000000..4136b1b6 --- /dev/null +++ b/test/openai/file_part_test.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require_relative "test_helper" + +class OpenAI::Test::FilePartTest < Minitest::Test + def test_to_json + text = "gray" + filepart = OpenAI::FilePart.new(StringIO.new(text)) + + assert_equal(text.to_json, filepart.to_json) + end +end From 7792daf662b518cc3697ef022d95aa185cb5faea Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 26 Apr 2025 05:41:35 +0000 Subject: [PATCH 163/295] chore(internal): remove unnecessary `rbi/lib` folder --- rbi/{lib => }/openai/client.rbi | 0 rbi/{lib => }/openai/errors.rbi | 0 rbi/{lib => }/openai/file_part.rbi | 0 rbi/{lib => }/openai/internal.rbi | 0 rbi/{lib => }/openai/internal/cursor_page.rbi | 0 rbi/{lib => }/openai/internal/page.rbi | 0 rbi/{lib => }/openai/internal/stream.rbi | 0 rbi/{lib => }/openai/internal/transport/base_client.rbi | 0 rbi/{lib => }/openai/internal/transport/pooled_net_requester.rbi | 0 rbi/{lib => }/openai/internal/type/array_of.rbi | 0 rbi/{lib => }/openai/internal/type/base_model.rbi | 0 rbi/{lib => }/openai/internal/type/base_page.rbi | 0 rbi/{lib => }/openai/internal/type/base_stream.rbi | 0 rbi/{lib => }/openai/internal/type/boolean.rbi | 0 rbi/{lib => }/openai/internal/type/converter.rbi | 0 rbi/{lib => }/openai/internal/type/enum.rbi | 0 rbi/{lib => }/openai/internal/type/file_input.rbi | 0 rbi/{lib => }/openai/internal/type/hash_of.rbi | 0 rbi/{lib => }/openai/internal/type/request_parameters.rbi | 0 rbi/{lib => }/openai/internal/type/union.rbi | 0 rbi/{lib => }/openai/internal/type/unknown.rbi | 0 rbi/{lib => }/openai/internal/util.rbi | 0 rbi/{lib => }/openai/models/all_models.rbi | 0 rbi/{lib => }/openai/models/audio/speech_create_params.rbi | 0 rbi/{lib => }/openai/models/audio/speech_model.rbi | 0 rbi/{lib => }/openai/models/audio/transcription.rbi | 0 rbi/{lib => }/openai/models/audio/transcription_create_params.rbi | 0 .../openai/models/audio/transcription_create_response.rbi | 0 rbi/{lib => }/openai/models/audio/transcription_include.rbi | 0 rbi/{lib => }/openai/models/audio/transcription_segment.rbi | 0 rbi/{lib => }/openai/models/audio/transcription_stream_event.rbi | 0 .../openai/models/audio/transcription_text_delta_event.rbi | 0 .../openai/models/audio/transcription_text_done_event.rbi | 0 rbi/{lib => }/openai/models/audio/transcription_verbose.rbi | 0 rbi/{lib => }/openai/models/audio/transcription_word.rbi | 0 rbi/{lib => }/openai/models/audio/translation.rbi | 0 rbi/{lib => }/openai/models/audio/translation_create_params.rbi | 0 rbi/{lib => }/openai/models/audio/translation_create_response.rbi | 0 rbi/{lib => }/openai/models/audio/translation_verbose.rbi | 0 rbi/{lib => }/openai/models/audio_model.rbi | 0 rbi/{lib => }/openai/models/audio_response_format.rbi | 0 rbi/{lib => }/openai/models/auto_file_chunking_strategy_param.rbi | 0 rbi/{lib => }/openai/models/batch.rbi | 0 rbi/{lib => }/openai/models/batch_cancel_params.rbi | 0 rbi/{lib => }/openai/models/batch_create_params.rbi | 0 rbi/{lib => }/openai/models/batch_error.rbi | 0 rbi/{lib => }/openai/models/batch_list_params.rbi | 0 rbi/{lib => }/openai/models/batch_request_counts.rbi | 0 rbi/{lib => }/openai/models/batch_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/beta/assistant.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_create_params.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_delete_params.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_deleted.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_list_params.rbi | 0 .../openai/models/beta/assistant_response_format_option.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_stream_event.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_tool.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_tool_choice.rbi | 0 .../openai/models/beta/assistant_tool_choice_function.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_tool_choice_option.rbi | 0 rbi/{lib => }/openai/models/beta/assistant_update_params.rbi | 0 rbi/{lib => }/openai/models/beta/code_interpreter_tool.rbi | 0 rbi/{lib => }/openai/models/beta/file_search_tool.rbi | 0 rbi/{lib => }/openai/models/beta/function_tool.rbi | 0 rbi/{lib => }/openai/models/beta/message_stream_event.rbi | 0 rbi/{lib => }/openai/models/beta/run_step_stream_event.rbi | 0 rbi/{lib => }/openai/models/beta/run_stream_event.rbi | 0 rbi/{lib => }/openai/models/beta/thread.rbi | 0 rbi/{lib => }/openai/models/beta/thread_create_and_run_params.rbi | 0 rbi/{lib => }/openai/models/beta/thread_create_params.rbi | 0 rbi/{lib => }/openai/models/beta/thread_delete_params.rbi | 0 rbi/{lib => }/openai/models/beta/thread_deleted.rbi | 0 rbi/{lib => }/openai/models/beta/thread_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/beta/thread_stream_event.rbi | 0 rbi/{lib => }/openai/models/beta/thread_update_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/annotation.rbi | 0 rbi/{lib => }/openai/models/beta/threads/annotation_delta.rbi | 0 .../openai/models/beta/threads/file_citation_annotation.rbi | 0 .../openai/models/beta/threads/file_citation_delta_annotation.rbi | 0 rbi/{lib => }/openai/models/beta/threads/file_path_annotation.rbi | 0 .../openai/models/beta/threads/file_path_delta_annotation.rbi | 0 rbi/{lib => }/openai/models/beta/threads/image_file.rbi | 0 .../openai/models/beta/threads/image_file_content_block.rbi | 0 rbi/{lib => }/openai/models/beta/threads/image_file_delta.rbi | 0 .../openai/models/beta/threads/image_file_delta_block.rbi | 0 rbi/{lib => }/openai/models/beta/threads/image_url.rbi | 0 .../openai/models/beta/threads/image_url_content_block.rbi | 0 rbi/{lib => }/openai/models/beta/threads/image_url_delta.rbi | 0 .../openai/models/beta/threads/image_url_delta_block.rbi | 0 rbi/{lib => }/openai/models/beta/threads/message.rbi | 0 rbi/{lib => }/openai/models/beta/threads/message_content.rbi | 0 .../openai/models/beta/threads/message_content_delta.rbi | 0 .../openai/models/beta/threads/message_content_part_param.rbi | 0 .../openai/models/beta/threads/message_create_params.rbi | 0 .../openai/models/beta/threads/message_delete_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/message_deleted.rbi | 0 rbi/{lib => }/openai/models/beta/threads/message_delta.rbi | 0 rbi/{lib => }/openai/models/beta/threads/message_delta_event.rbi | 0 rbi/{lib => }/openai/models/beta/threads/message_list_params.rbi | 0 .../openai/models/beta/threads/message_retrieve_params.rbi | 0 .../openai/models/beta/threads/message_update_params.rbi | 0 .../openai/models/beta/threads/refusal_content_block.rbi | 0 rbi/{lib => }/openai/models/beta/threads/refusal_delta_block.rbi | 0 .../models/beta/threads/required_action_function_tool_call.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run_cancel_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run_create_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run_list_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run_status.rbi | 0 .../openai/models/beta/threads/run_submit_tool_outputs_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/run_update_params.rbi | 0 .../openai/models/beta/threads/runs/code_interpreter_logs.rbi | 0 .../models/beta/threads/runs/code_interpreter_output_image.rbi | 0 .../models/beta/threads/runs/code_interpreter_tool_call.rbi | 0 .../models/beta/threads/runs/code_interpreter_tool_call_delta.rbi | 0 .../openai/models/beta/threads/runs/file_search_tool_call.rbi | 0 .../models/beta/threads/runs/file_search_tool_call_delta.rbi | 0 .../openai/models/beta/threads/runs/function_tool_call.rbi | 0 .../openai/models/beta/threads/runs/function_tool_call_delta.rbi | 0 .../models/beta/threads/runs/message_creation_step_details.rbi | 0 rbi/{lib => }/openai/models/beta/threads/runs/run_step.rbi | 0 rbi/{lib => }/openai/models/beta/threads/runs/run_step_delta.rbi | 0 .../openai/models/beta/threads/runs/run_step_delta_event.rbi | 0 .../models/beta/threads/runs/run_step_delta_message_delta.rbi | 0 .../openai/models/beta/threads/runs/run_step_include.rbi | 0 .../openai/models/beta/threads/runs/step_list_params.rbi | 0 .../openai/models/beta/threads/runs/step_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/beta/threads/runs/tool_call.rbi | 0 rbi/{lib => }/openai/models/beta/threads/runs/tool_call_delta.rbi | 0 .../openai/models/beta/threads/runs/tool_call_delta_object.rbi | 0 .../openai/models/beta/threads/runs/tool_calls_step_details.rbi | 0 rbi/{lib => }/openai/models/beta/threads/text.rbi | 0 rbi/{lib => }/openai/models/beta/threads/text_content_block.rbi | 0 .../openai/models/beta/threads/text_content_block_param.rbi | 0 rbi/{lib => }/openai/models/beta/threads/text_delta.rbi | 0 rbi/{lib => }/openai/models/beta/threads/text_delta_block.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion.rbi | 0 .../models/chat/chat_completion_assistant_message_param.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_audio.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_audio_param.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_chunk.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_content_part.rbi | 0 .../openai/models/chat/chat_completion_content_part_image.rbi | 0 .../models/chat/chat_completion_content_part_input_audio.rbi | 0 .../openai/models/chat/chat_completion_content_part_refusal.rbi | 0 .../openai/models/chat/chat_completion_content_part_text.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_deleted.rbi | 0 .../models/chat/chat_completion_developer_message_param.rbi | 0 .../openai/models/chat/chat_completion_function_call_option.rbi | 0 .../openai/models/chat/chat_completion_function_message_param.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_message.rbi | 0 .../openai/models/chat/chat_completion_message_param.rbi | 0 .../openai/models/chat/chat_completion_message_tool_call.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_modality.rbi | 0 .../openai/models/chat/chat_completion_named_tool_choice.rbi | 0 .../openai/models/chat/chat_completion_prediction_content.rbi | 0 .../openai/models/chat/chat_completion_reasoning_effort.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_role.rbi | 0 .../openai/models/chat/chat_completion_store_message.rbi | 0 .../openai/models/chat/chat_completion_stream_options.rbi | 0 .../openai/models/chat/chat_completion_system_message_param.rbi | 0 .../openai/models/chat/chat_completion_token_logprob.rbi | 0 rbi/{lib => }/openai/models/chat/chat_completion_tool.rbi | 0 .../openai/models/chat/chat_completion_tool_choice_option.rbi | 0 .../openai/models/chat/chat_completion_tool_message_param.rbi | 0 .../openai/models/chat/chat_completion_user_message_param.rbi | 0 rbi/{lib => }/openai/models/chat/completion_create_params.rbi | 0 rbi/{lib => }/openai/models/chat/completion_delete_params.rbi | 0 rbi/{lib => }/openai/models/chat/completion_list_params.rbi | 0 rbi/{lib => }/openai/models/chat/completion_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/chat/completion_update_params.rbi | 0 .../openai/models/chat/completions/message_list_params.rbi | 0 rbi/{lib => }/openai/models/chat_model.rbi | 0 rbi/{lib => }/openai/models/comparison_filter.rbi | 0 rbi/{lib => }/openai/models/completion.rbi | 0 rbi/{lib => }/openai/models/completion_choice.rbi | 0 rbi/{lib => }/openai/models/completion_create_params.rbi | 0 rbi/{lib => }/openai/models/completion_usage.rbi | 0 rbi/{lib => }/openai/models/compound_filter.rbi | 0 rbi/{lib => }/openai/models/create_embedding_response.rbi | 0 rbi/{lib => }/openai/models/embedding.rbi | 0 rbi/{lib => }/openai/models/embedding_create_params.rbi | 0 rbi/{lib => }/openai/models/embedding_model.rbi | 0 rbi/{lib => }/openai/models/error_object.rbi | 0 rbi/{lib => }/openai/models/eval_create_params.rbi | 0 rbi/{lib => }/openai/models/eval_create_response.rbi | 0 rbi/{lib => }/openai/models/eval_custom_data_source_config.rbi | 0 rbi/{lib => }/openai/models/eval_delete_params.rbi | 0 rbi/{lib => }/openai/models/eval_delete_response.rbi | 0 rbi/{lib => }/openai/models/eval_label_model_grader.rbi | 0 rbi/{lib => }/openai/models/eval_list_params.rbi | 0 rbi/{lib => }/openai/models/eval_list_response.rbi | 0 rbi/{lib => }/openai/models/eval_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/eval_retrieve_response.rbi | 0 .../openai/models/eval_stored_completions_data_source_config.rbi | 0 rbi/{lib => }/openai/models/eval_string_check_grader.rbi | 0 rbi/{lib => }/openai/models/eval_text_similarity_grader.rbi | 0 rbi/{lib => }/openai/models/eval_update_params.rbi | 0 rbi/{lib => }/openai/models/eval_update_response.rbi | 0 .../models/evals/create_eval_completions_run_data_source.rbi | 0 .../openai/models/evals/create_eval_jsonl_run_data_source.rbi | 0 rbi/{lib => }/openai/models/evals/eval_api_error.rbi | 0 rbi/{lib => }/openai/models/evals/run_cancel_params.rbi | 0 rbi/{lib => }/openai/models/evals/run_cancel_response.rbi | 0 rbi/{lib => }/openai/models/evals/run_create_params.rbi | 0 rbi/{lib => }/openai/models/evals/run_create_response.rbi | 0 rbi/{lib => }/openai/models/evals/run_delete_params.rbi | 0 rbi/{lib => }/openai/models/evals/run_delete_response.rbi | 0 rbi/{lib => }/openai/models/evals/run_list_params.rbi | 0 rbi/{lib => }/openai/models/evals/run_list_response.rbi | 0 rbi/{lib => }/openai/models/evals/run_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/evals/run_retrieve_response.rbi | 0 .../openai/models/evals/runs/output_item_list_params.rbi | 0 .../openai/models/evals/runs/output_item_list_response.rbi | 0 .../openai/models/evals/runs/output_item_retrieve_params.rbi | 0 .../openai/models/evals/runs/output_item_retrieve_response.rbi | 0 rbi/{lib => }/openai/models/file_chunking_strategy.rbi | 0 rbi/{lib => }/openai/models/file_chunking_strategy_param.rbi | 0 rbi/{lib => }/openai/models/file_content_params.rbi | 0 rbi/{lib => }/openai/models/file_create_params.rbi | 0 rbi/{lib => }/openai/models/file_delete_params.rbi | 0 rbi/{lib => }/openai/models/file_deleted.rbi | 0 rbi/{lib => }/openai/models/file_list_params.rbi | 0 rbi/{lib => }/openai/models/file_object.rbi | 0 rbi/{lib => }/openai/models/file_purpose.rbi | 0 rbi/{lib => }/openai/models/file_retrieve_params.rbi | 0 .../models/fine_tuning/checkpoints/permission_create_params.rbi | 0 .../models/fine_tuning/checkpoints/permission_create_response.rbi | 0 .../models/fine_tuning/checkpoints/permission_delete_params.rbi | 0 .../models/fine_tuning/checkpoints/permission_delete_response.rbi | 0 .../models/fine_tuning/checkpoints/permission_retrieve_params.rbi | 0 .../fine_tuning/checkpoints/permission_retrieve_response.rbi | 0 rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job.rbi | 0 rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job_event.rbi | 0 .../openai/models/fine_tuning/fine_tuning_job_integration.rbi | 0 .../models/fine_tuning/fine_tuning_job_wandb_integration.rbi | 0 .../fine_tuning/fine_tuning_job_wandb_integration_object.rbi | 0 rbi/{lib => }/openai/models/fine_tuning/job_cancel_params.rbi | 0 rbi/{lib => }/openai/models/fine_tuning/job_create_params.rbi | 0 .../openai/models/fine_tuning/job_list_events_params.rbi | 0 rbi/{lib => }/openai/models/fine_tuning/job_list_params.rbi | 0 rbi/{lib => }/openai/models/fine_tuning/job_retrieve_params.rbi | 0 .../openai/models/fine_tuning/jobs/checkpoint_list_params.rbi | 0 .../openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi | 0 rbi/{lib => }/openai/models/function_definition.rbi | 0 rbi/{lib => }/openai/models/function_parameters.rbi | 0 rbi/{lib => }/openai/models/image.rbi | 0 rbi/{lib => }/openai/models/image_create_variation_params.rbi | 0 rbi/{lib => }/openai/models/image_edit_params.rbi | 0 rbi/{lib => }/openai/models/image_generate_params.rbi | 0 rbi/{lib => }/openai/models/image_model.rbi | 0 rbi/{lib => }/openai/models/images_response.rbi | 0 rbi/{lib => }/openai/models/metadata.rbi | 0 rbi/{lib => }/openai/models/model.rbi | 0 rbi/{lib => }/openai/models/model_delete_params.rbi | 0 rbi/{lib => }/openai/models/model_deleted.rbi | 0 rbi/{lib => }/openai/models/model_list_params.rbi | 0 rbi/{lib => }/openai/models/model_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/moderation.rbi | 0 rbi/{lib => }/openai/models/moderation_create_params.rbi | 0 rbi/{lib => }/openai/models/moderation_create_response.rbi | 0 rbi/{lib => }/openai/models/moderation_image_url_input.rbi | 0 rbi/{lib => }/openai/models/moderation_model.rbi | 0 rbi/{lib => }/openai/models/moderation_multi_modal_input.rbi | 0 rbi/{lib => }/openai/models/moderation_text_input.rbi | 0 .../openai/models/other_file_chunking_strategy_object.rbi | 0 rbi/{lib => }/openai/models/reasoning.rbi | 0 rbi/{lib => }/openai/models/reasoning_effort.rbi | 0 rbi/{lib => }/openai/models/response_format_json_object.rbi | 0 rbi/{lib => }/openai/models/response_format_json_schema.rbi | 0 rbi/{lib => }/openai/models/response_format_text.rbi | 0 rbi/{lib => }/openai/models/responses/computer_tool.rbi | 0 rbi/{lib => }/openai/models/responses/easy_input_message.rbi | 0 rbi/{lib => }/openai/models/responses/file_search_tool.rbi | 0 rbi/{lib => }/openai/models/responses/function_tool.rbi | 0 rbi/{lib => }/openai/models/responses/input_item_list_params.rbi | 0 rbi/{lib => }/openai/models/responses/response.rbi | 0 .../openai/models/responses/response_audio_delta_event.rbi | 0 .../openai/models/responses/response_audio_done_event.rbi | 0 .../models/responses/response_audio_transcript_delta_event.rbi | 0 .../models/responses/response_audio_transcript_done_event.rbi | 0 .../responses/response_code_interpreter_call_code_delta_event.rbi | 0 .../responses/response_code_interpreter_call_code_done_event.rbi | 0 .../responses/response_code_interpreter_call_completed_event.rbi | 0 .../response_code_interpreter_call_in_progress_event.rbi | 0 .../response_code_interpreter_call_interpreting_event.rbi | 0 .../models/responses/response_code_interpreter_tool_call.rbi | 0 .../openai/models/responses/response_completed_event.rbi | 0 .../openai/models/responses/response_computer_tool_call.rbi | 0 .../models/responses/response_computer_tool_call_output_item.rbi | 0 .../responses/response_computer_tool_call_output_screenshot.rbi | 0 rbi/{lib => }/openai/models/responses/response_content.rbi | 0 .../openai/models/responses/response_content_part_added_event.rbi | 0 .../openai/models/responses/response_content_part_done_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_create_params.rbi | 0 rbi/{lib => }/openai/models/responses/response_created_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_delete_params.rbi | 0 rbi/{lib => }/openai/models/responses/response_error.rbi | 0 rbi/{lib => }/openai/models/responses/response_error_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_failed_event.rbi | 0 .../responses/response_file_search_call_completed_event.rbi | 0 .../responses/response_file_search_call_in_progress_event.rbi | 0 .../responses/response_file_search_call_searching_event.rbi | 0 .../openai/models/responses/response_file_search_tool_call.rbi | 0 .../openai/models/responses/response_format_text_config.rbi | 0 .../models/responses/response_format_text_json_schema_config.rbi | 0 .../responses/response_function_call_arguments_delta_event.rbi | 0 .../responses/response_function_call_arguments_done_event.rbi | 0 .../openai/models/responses/response_function_tool_call.rbi | 0 .../openai/models/responses/response_function_tool_call_item.rbi | 0 .../models/responses/response_function_tool_call_output_item.rbi | 0 .../openai/models/responses/response_function_web_search.rbi | 0 .../openai/models/responses/response_in_progress_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_includable.rbi | 0 .../openai/models/responses/response_incomplete_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_input.rbi | 0 rbi/{lib => }/openai/models/responses/response_input_audio.rbi | 0 rbi/{lib => }/openai/models/responses/response_input_content.rbi | 0 rbi/{lib => }/openai/models/responses/response_input_file.rbi | 0 rbi/{lib => }/openai/models/responses/response_input_image.rbi | 0 rbi/{lib => }/openai/models/responses/response_input_item.rbi | 0 .../models/responses/response_input_message_content_list.rbi | 0 .../openai/models/responses/response_input_message_item.rbi | 0 rbi/{lib => }/openai/models/responses/response_input_text.rbi | 0 rbi/{lib => }/openai/models/responses/response_item.rbi | 0 rbi/{lib => }/openai/models/responses/response_item_list.rbi | 0 rbi/{lib => }/openai/models/responses/response_output_audio.rbi | 0 rbi/{lib => }/openai/models/responses/response_output_item.rbi | 0 .../openai/models/responses/response_output_item_added_event.rbi | 0 .../openai/models/responses/response_output_item_done_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_output_message.rbi | 0 rbi/{lib => }/openai/models/responses/response_output_refusal.rbi | 0 rbi/{lib => }/openai/models/responses/response_output_text.rbi | 0 rbi/{lib => }/openai/models/responses/response_reasoning_item.rbi | 0 .../responses/response_reasoning_summary_part_added_event.rbi | 0 .../responses/response_reasoning_summary_part_done_event.rbi | 0 .../responses/response_reasoning_summary_text_delta_event.rbi | 0 .../responses/response_reasoning_summary_text_done_event.rbi | 0 .../openai/models/responses/response_refusal_delta_event.rbi | 0 .../openai/models/responses/response_refusal_done_event.rbi | 0 .../openai/models/responses/response_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/responses/response_status.rbi | 0 rbi/{lib => }/openai/models/responses/response_stream_event.rbi | 0 .../models/responses/response_text_annotation_delta_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_text_config.rbi | 0 .../openai/models/responses/response_text_delta_event.rbi | 0 .../openai/models/responses/response_text_done_event.rbi | 0 rbi/{lib => }/openai/models/responses/response_usage.rbi | 0 .../models/responses/response_web_search_call_completed_event.rbi | 0 .../responses/response_web_search_call_in_progress_event.rbi | 0 .../models/responses/response_web_search_call_searching_event.rbi | 0 rbi/{lib => }/openai/models/responses/tool.rbi | 0 rbi/{lib => }/openai/models/responses/tool_choice_function.rbi | 0 rbi/{lib => }/openai/models/responses/tool_choice_options.rbi | 0 rbi/{lib => }/openai/models/responses/tool_choice_types.rbi | 0 rbi/{lib => }/openai/models/responses/web_search_tool.rbi | 0 rbi/{lib => }/openai/models/responses_model.rbi | 0 rbi/{lib => }/openai/models/static_file_chunking_strategy.rbi | 0 .../openai/models/static_file_chunking_strategy_object.rbi | 0 .../openai/models/static_file_chunking_strategy_object_param.rbi | 0 rbi/{lib => }/openai/models/upload.rbi | 0 rbi/{lib => }/openai/models/upload_cancel_params.rbi | 0 rbi/{lib => }/openai/models/upload_complete_params.rbi | 0 rbi/{lib => }/openai/models/upload_create_params.rbi | 0 rbi/{lib => }/openai/models/uploads/part_create_params.rbi | 0 rbi/{lib => }/openai/models/uploads/upload_part.rbi | 0 rbi/{lib => }/openai/models/vector_store.rbi | 0 rbi/{lib => }/openai/models/vector_store_create_params.rbi | 0 rbi/{lib => }/openai/models/vector_store_delete_params.rbi | 0 rbi/{lib => }/openai/models/vector_store_deleted.rbi | 0 rbi/{lib => }/openai/models/vector_store_list_params.rbi | 0 rbi/{lib => }/openai/models/vector_store_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/vector_store_search_params.rbi | 0 rbi/{lib => }/openai/models/vector_store_search_response.rbi | 0 rbi/{lib => }/openai/models/vector_store_update_params.rbi | 0 .../openai/models/vector_stores/file_batch_cancel_params.rbi | 0 .../openai/models/vector_stores/file_batch_create_params.rbi | 0 .../openai/models/vector_stores/file_batch_list_files_params.rbi | 0 .../openai/models/vector_stores/file_batch_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/vector_stores/file_content_params.rbi | 0 .../openai/models/vector_stores/file_content_response.rbi | 0 rbi/{lib => }/openai/models/vector_stores/file_create_params.rbi | 0 rbi/{lib => }/openai/models/vector_stores/file_delete_params.rbi | 0 rbi/{lib => }/openai/models/vector_stores/file_list_params.rbi | 0 .../openai/models/vector_stores/file_retrieve_params.rbi | 0 rbi/{lib => }/openai/models/vector_stores/file_update_params.rbi | 0 rbi/{lib => }/openai/models/vector_stores/vector_store_file.rbi | 0 .../openai/models/vector_stores/vector_store_file_batch.rbi | 0 .../openai/models/vector_stores/vector_store_file_deleted.rbi | 0 rbi/{lib => }/openai/request_options.rbi | 0 rbi/{lib => }/openai/resources/audio.rbi | 0 rbi/{lib => }/openai/resources/audio/speech.rbi | 0 rbi/{lib => }/openai/resources/audio/transcriptions.rbi | 0 rbi/{lib => }/openai/resources/audio/translations.rbi | 0 rbi/{lib => }/openai/resources/batches.rbi | 0 rbi/{lib => }/openai/resources/beta.rbi | 0 rbi/{lib => }/openai/resources/beta/assistants.rbi | 0 rbi/{lib => }/openai/resources/beta/threads.rbi | 0 rbi/{lib => }/openai/resources/beta/threads/messages.rbi | 0 rbi/{lib => }/openai/resources/beta/threads/runs.rbi | 0 rbi/{lib => }/openai/resources/beta/threads/runs/steps.rbi | 0 rbi/{lib => }/openai/resources/chat.rbi | 0 rbi/{lib => }/openai/resources/chat/completions.rbi | 0 rbi/{lib => }/openai/resources/chat/completions/messages.rbi | 0 rbi/{lib => }/openai/resources/completions.rbi | 0 rbi/{lib => }/openai/resources/embeddings.rbi | 0 rbi/{lib => }/openai/resources/evals.rbi | 0 rbi/{lib => }/openai/resources/evals/runs.rbi | 0 rbi/{lib => }/openai/resources/evals/runs/output_items.rbi | 0 rbi/{lib => }/openai/resources/files.rbi | 0 rbi/{lib => }/openai/resources/fine_tuning.rbi | 0 rbi/{lib => }/openai/resources/fine_tuning/checkpoints.rbi | 0 .../openai/resources/fine_tuning/checkpoints/permissions.rbi | 0 rbi/{lib => }/openai/resources/fine_tuning/jobs.rbi | 0 rbi/{lib => }/openai/resources/fine_tuning/jobs/checkpoints.rbi | 0 rbi/{lib => }/openai/resources/images.rbi | 0 rbi/{lib => }/openai/resources/models.rbi | 0 rbi/{lib => }/openai/resources/moderations.rbi | 0 rbi/{lib => }/openai/resources/responses.rbi | 0 rbi/{lib => }/openai/resources/responses/input_items.rbi | 0 rbi/{lib => }/openai/resources/uploads.rbi | 0 rbi/{lib => }/openai/resources/uploads/parts.rbi | 0 rbi/{lib => }/openai/resources/vector_stores.rbi | 0 rbi/{lib => }/openai/resources/vector_stores/file_batches.rbi | 0 rbi/{lib => }/openai/resources/vector_stores/files.rbi | 0 rbi/{lib => }/openai/version.rbi | 0 428 files changed, 0 insertions(+), 0 deletions(-) rename rbi/{lib => }/openai/client.rbi (100%) rename rbi/{lib => }/openai/errors.rbi (100%) rename rbi/{lib => }/openai/file_part.rbi (100%) rename rbi/{lib => }/openai/internal.rbi (100%) rename rbi/{lib => }/openai/internal/cursor_page.rbi (100%) rename rbi/{lib => }/openai/internal/page.rbi (100%) rename rbi/{lib => }/openai/internal/stream.rbi (100%) rename rbi/{lib => }/openai/internal/transport/base_client.rbi (100%) rename rbi/{lib => }/openai/internal/transport/pooled_net_requester.rbi (100%) rename rbi/{lib => }/openai/internal/type/array_of.rbi (100%) rename rbi/{lib => }/openai/internal/type/base_model.rbi (100%) rename rbi/{lib => }/openai/internal/type/base_page.rbi (100%) rename rbi/{lib => }/openai/internal/type/base_stream.rbi (100%) rename rbi/{lib => }/openai/internal/type/boolean.rbi (100%) rename rbi/{lib => }/openai/internal/type/converter.rbi (100%) rename rbi/{lib => }/openai/internal/type/enum.rbi (100%) rename rbi/{lib => }/openai/internal/type/file_input.rbi (100%) rename rbi/{lib => }/openai/internal/type/hash_of.rbi (100%) rename rbi/{lib => }/openai/internal/type/request_parameters.rbi (100%) rename rbi/{lib => }/openai/internal/type/union.rbi (100%) rename rbi/{lib => }/openai/internal/type/unknown.rbi (100%) rename rbi/{lib => }/openai/internal/util.rbi (100%) rename rbi/{lib => }/openai/models/all_models.rbi (100%) rename rbi/{lib => }/openai/models/audio/speech_create_params.rbi (100%) rename rbi/{lib => }/openai/models/audio/speech_model.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_create_params.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_create_response.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_include.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_segment.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_text_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_text_done_event.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_verbose.rbi (100%) rename rbi/{lib => }/openai/models/audio/transcription_word.rbi (100%) rename rbi/{lib => }/openai/models/audio/translation.rbi (100%) rename rbi/{lib => }/openai/models/audio/translation_create_params.rbi (100%) rename rbi/{lib => }/openai/models/audio/translation_create_response.rbi (100%) rename rbi/{lib => }/openai/models/audio/translation_verbose.rbi (100%) rename rbi/{lib => }/openai/models/audio_model.rbi (100%) rename rbi/{lib => }/openai/models/audio_response_format.rbi (100%) rename rbi/{lib => }/openai/models/auto_file_chunking_strategy_param.rbi (100%) rename rbi/{lib => }/openai/models/batch.rbi (100%) rename rbi/{lib => }/openai/models/batch_cancel_params.rbi (100%) rename rbi/{lib => }/openai/models/batch_create_params.rbi (100%) rename rbi/{lib => }/openai/models/batch_error.rbi (100%) rename rbi/{lib => }/openai/models/batch_list_params.rbi (100%) rename rbi/{lib => }/openai/models/batch_request_counts.rbi (100%) rename rbi/{lib => }/openai/models/batch_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_create_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_deleted.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_list_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_response_format_option.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_tool.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_tool_choice.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_tool_choice_function.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_tool_choice_option.rbi (100%) rename rbi/{lib => }/openai/models/beta/assistant_update_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/code_interpreter_tool.rbi (100%) rename rbi/{lib => }/openai/models/beta/file_search_tool.rbi (100%) rename rbi/{lib => }/openai/models/beta/function_tool.rbi (100%) rename rbi/{lib => }/openai/models/beta/message_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/run_step_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/run_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_create_and_run_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_create_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_deleted.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/thread_update_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/annotation.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/annotation_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/file_citation_annotation.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/file_citation_delta_annotation.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/file_path_annotation.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/file_path_delta_annotation.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_file.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_file_content_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_file_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_file_delta_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_url.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_url_content_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_url_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/image_url_delta_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_content.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_content_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_content_part_param.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_create_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_deleted.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_list_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/message_update_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/refusal_content_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/refusal_delta_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/required_action_function_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_cancel_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_create_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_list_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_status.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_submit_tool_outputs_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/run_update_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/code_interpreter_logs.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/code_interpreter_output_image.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/file_search_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/function_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/function_tool_call_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/message_creation_step_details.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/run_step.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/run_step_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/run_step_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/run_step_include.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/step_list_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/step_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/tool_call.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/tool_call_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/tool_call_delta_object.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/runs/tool_calls_step_details.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/text.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/text_content_block.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/text_content_block_param.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/text_delta.rbi (100%) rename rbi/{lib => }/openai/models/beta/threads/text_delta_block.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_assistant_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_audio.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_audio_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_chunk.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_content_part.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_content_part_image.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_content_part_input_audio.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_content_part_refusal.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_content_part_text.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_deleted.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_developer_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_function_call_option.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_function_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_message.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_message_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_modality.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_named_tool_choice.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_prediction_content.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_reasoning_effort.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_role.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_store_message.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_stream_options.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_system_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_token_logprob.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_tool.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_tool_choice_option.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_tool_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/chat_completion_user_message_param.rbi (100%) rename rbi/{lib => }/openai/models/chat/completion_create_params.rbi (100%) rename rbi/{lib => }/openai/models/chat/completion_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/chat/completion_list_params.rbi (100%) rename rbi/{lib => }/openai/models/chat/completion_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/chat/completion_update_params.rbi (100%) rename rbi/{lib => }/openai/models/chat/completions/message_list_params.rbi (100%) rename rbi/{lib => }/openai/models/chat_model.rbi (100%) rename rbi/{lib => }/openai/models/comparison_filter.rbi (100%) rename rbi/{lib => }/openai/models/completion.rbi (100%) rename rbi/{lib => }/openai/models/completion_choice.rbi (100%) rename rbi/{lib => }/openai/models/completion_create_params.rbi (100%) rename rbi/{lib => }/openai/models/completion_usage.rbi (100%) rename rbi/{lib => }/openai/models/compound_filter.rbi (100%) rename rbi/{lib => }/openai/models/create_embedding_response.rbi (100%) rename rbi/{lib => }/openai/models/embedding.rbi (100%) rename rbi/{lib => }/openai/models/embedding_create_params.rbi (100%) rename rbi/{lib => }/openai/models/embedding_model.rbi (100%) rename rbi/{lib => }/openai/models/error_object.rbi (100%) rename rbi/{lib => }/openai/models/eval_create_params.rbi (100%) rename rbi/{lib => }/openai/models/eval_create_response.rbi (100%) rename rbi/{lib => }/openai/models/eval_custom_data_source_config.rbi (100%) rename rbi/{lib => }/openai/models/eval_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/eval_delete_response.rbi (100%) rename rbi/{lib => }/openai/models/eval_label_model_grader.rbi (100%) rename rbi/{lib => }/openai/models/eval_list_params.rbi (100%) rename rbi/{lib => }/openai/models/eval_list_response.rbi (100%) rename rbi/{lib => }/openai/models/eval_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/eval_retrieve_response.rbi (100%) rename rbi/{lib => }/openai/models/eval_stored_completions_data_source_config.rbi (100%) rename rbi/{lib => }/openai/models/eval_string_check_grader.rbi (100%) rename rbi/{lib => }/openai/models/eval_text_similarity_grader.rbi (100%) rename rbi/{lib => }/openai/models/eval_update_params.rbi (100%) rename rbi/{lib => }/openai/models/eval_update_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/create_eval_completions_run_data_source.rbi (100%) rename rbi/{lib => }/openai/models/evals/create_eval_jsonl_run_data_source.rbi (100%) rename rbi/{lib => }/openai/models/evals/eval_api_error.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_cancel_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_cancel_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_create_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_create_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_delete_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_list_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_list_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/run_retrieve_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/runs/output_item_list_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/runs/output_item_list_response.rbi (100%) rename rbi/{lib => }/openai/models/evals/runs/output_item_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/evals/runs/output_item_retrieve_response.rbi (100%) rename rbi/{lib => }/openai/models/file_chunking_strategy.rbi (100%) rename rbi/{lib => }/openai/models/file_chunking_strategy_param.rbi (100%) rename rbi/{lib => }/openai/models/file_content_params.rbi (100%) rename rbi/{lib => }/openai/models/file_create_params.rbi (100%) rename rbi/{lib => }/openai/models/file_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/file_deleted.rbi (100%) rename rbi/{lib => }/openai/models/file_list_params.rbi (100%) rename rbi/{lib => }/openai/models/file_object.rbi (100%) rename rbi/{lib => }/openai/models/file_purpose.rbi (100%) rename rbi/{lib => }/openai/models/file_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/checkpoints/permission_create_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/checkpoints/permission_create_response.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job_event.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job_integration.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/job_cancel_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/job_create_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/job_list_events_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/job_list_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/job_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi (100%) rename rbi/{lib => }/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi (100%) rename rbi/{lib => }/openai/models/function_definition.rbi (100%) rename rbi/{lib => }/openai/models/function_parameters.rbi (100%) rename rbi/{lib => }/openai/models/image.rbi (100%) rename rbi/{lib => }/openai/models/image_create_variation_params.rbi (100%) rename rbi/{lib => }/openai/models/image_edit_params.rbi (100%) rename rbi/{lib => }/openai/models/image_generate_params.rbi (100%) rename rbi/{lib => }/openai/models/image_model.rbi (100%) rename rbi/{lib => }/openai/models/images_response.rbi (100%) rename rbi/{lib => }/openai/models/metadata.rbi (100%) rename rbi/{lib => }/openai/models/model.rbi (100%) rename rbi/{lib => }/openai/models/model_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/model_deleted.rbi (100%) rename rbi/{lib => }/openai/models/model_list_params.rbi (100%) rename rbi/{lib => }/openai/models/model_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/moderation.rbi (100%) rename rbi/{lib => }/openai/models/moderation_create_params.rbi (100%) rename rbi/{lib => }/openai/models/moderation_create_response.rbi (100%) rename rbi/{lib => }/openai/models/moderation_image_url_input.rbi (100%) rename rbi/{lib => }/openai/models/moderation_model.rbi (100%) rename rbi/{lib => }/openai/models/moderation_multi_modal_input.rbi (100%) rename rbi/{lib => }/openai/models/moderation_text_input.rbi (100%) rename rbi/{lib => }/openai/models/other_file_chunking_strategy_object.rbi (100%) rename rbi/{lib => }/openai/models/reasoning.rbi (100%) rename rbi/{lib => }/openai/models/reasoning_effort.rbi (100%) rename rbi/{lib => }/openai/models/response_format_json_object.rbi (100%) rename rbi/{lib => }/openai/models/response_format_json_schema.rbi (100%) rename rbi/{lib => }/openai/models/response_format_text.rbi (100%) rename rbi/{lib => }/openai/models/responses/computer_tool.rbi (100%) rename rbi/{lib => }/openai/models/responses/easy_input_message.rbi (100%) rename rbi/{lib => }/openai/models/responses/file_search_tool.rbi (100%) rename rbi/{lib => }/openai/models/responses/function_tool.rbi (100%) rename rbi/{lib => }/openai/models/responses/input_item_list_params.rbi (100%) rename rbi/{lib => }/openai/models/responses/response.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_audio_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_audio_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_audio_transcript_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_audio_transcript_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_code_interpreter_call_code_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_code_interpreter_call_completed_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_code_interpreter_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_completed_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_computer_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_computer_tool_call_output_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_computer_tool_call_output_screenshot.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_content.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_content_part_added_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_content_part_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_create_params.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_created_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_error.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_error_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_failed_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_file_search_call_completed_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_file_search_call_in_progress_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_file_search_call_searching_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_file_search_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_format_text_config.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_format_text_json_schema_config.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_function_call_arguments_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_function_call_arguments_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_function_tool_call.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_function_tool_call_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_function_tool_call_output_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_function_web_search.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_in_progress_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_includable.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_incomplete_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_audio.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_content.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_file.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_image.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_message_content_list.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_message_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_input_text.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_item_list.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_audio.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_item_added_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_item_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_message.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_refusal.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_output_text.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_reasoning_item.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_reasoning_summary_part_added_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_reasoning_summary_part_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_reasoning_summary_text_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_reasoning_summary_text_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_refusal_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_refusal_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_status.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_stream_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_text_annotation_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_text_config.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_text_delta_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_text_done_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_usage.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_web_search_call_completed_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_web_search_call_in_progress_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/response_web_search_call_searching_event.rbi (100%) rename rbi/{lib => }/openai/models/responses/tool.rbi (100%) rename rbi/{lib => }/openai/models/responses/tool_choice_function.rbi (100%) rename rbi/{lib => }/openai/models/responses/tool_choice_options.rbi (100%) rename rbi/{lib => }/openai/models/responses/tool_choice_types.rbi (100%) rename rbi/{lib => }/openai/models/responses/web_search_tool.rbi (100%) rename rbi/{lib => }/openai/models/responses_model.rbi (100%) rename rbi/{lib => }/openai/models/static_file_chunking_strategy.rbi (100%) rename rbi/{lib => }/openai/models/static_file_chunking_strategy_object.rbi (100%) rename rbi/{lib => }/openai/models/static_file_chunking_strategy_object_param.rbi (100%) rename rbi/{lib => }/openai/models/upload.rbi (100%) rename rbi/{lib => }/openai/models/upload_cancel_params.rbi (100%) rename rbi/{lib => }/openai/models/upload_complete_params.rbi (100%) rename rbi/{lib => }/openai/models/upload_create_params.rbi (100%) rename rbi/{lib => }/openai/models/uploads/part_create_params.rbi (100%) rename rbi/{lib => }/openai/models/uploads/upload_part.rbi (100%) rename rbi/{lib => }/openai/models/vector_store.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_create_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_deleted.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_list_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_search_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_search_response.rbi (100%) rename rbi/{lib => }/openai/models/vector_store_update_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_batch_cancel_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_batch_create_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_batch_list_files_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_batch_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_content_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_content_response.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_create_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_delete_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_list_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_retrieve_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/file_update_params.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/vector_store_file.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/vector_store_file_batch.rbi (100%) rename rbi/{lib => }/openai/models/vector_stores/vector_store_file_deleted.rbi (100%) rename rbi/{lib => }/openai/request_options.rbi (100%) rename rbi/{lib => }/openai/resources/audio.rbi (100%) rename rbi/{lib => }/openai/resources/audio/speech.rbi (100%) rename rbi/{lib => }/openai/resources/audio/transcriptions.rbi (100%) rename rbi/{lib => }/openai/resources/audio/translations.rbi (100%) rename rbi/{lib => }/openai/resources/batches.rbi (100%) rename rbi/{lib => }/openai/resources/beta.rbi (100%) rename rbi/{lib => }/openai/resources/beta/assistants.rbi (100%) rename rbi/{lib => }/openai/resources/beta/threads.rbi (100%) rename rbi/{lib => }/openai/resources/beta/threads/messages.rbi (100%) rename rbi/{lib => }/openai/resources/beta/threads/runs.rbi (100%) rename rbi/{lib => }/openai/resources/beta/threads/runs/steps.rbi (100%) rename rbi/{lib => }/openai/resources/chat.rbi (100%) rename rbi/{lib => }/openai/resources/chat/completions.rbi (100%) rename rbi/{lib => }/openai/resources/chat/completions/messages.rbi (100%) rename rbi/{lib => }/openai/resources/completions.rbi (100%) rename rbi/{lib => }/openai/resources/embeddings.rbi (100%) rename rbi/{lib => }/openai/resources/evals.rbi (100%) rename rbi/{lib => }/openai/resources/evals/runs.rbi (100%) rename rbi/{lib => }/openai/resources/evals/runs/output_items.rbi (100%) rename rbi/{lib => }/openai/resources/files.rbi (100%) rename rbi/{lib => }/openai/resources/fine_tuning.rbi (100%) rename rbi/{lib => }/openai/resources/fine_tuning/checkpoints.rbi (100%) rename rbi/{lib => }/openai/resources/fine_tuning/checkpoints/permissions.rbi (100%) rename rbi/{lib => }/openai/resources/fine_tuning/jobs.rbi (100%) rename rbi/{lib => }/openai/resources/fine_tuning/jobs/checkpoints.rbi (100%) rename rbi/{lib => }/openai/resources/images.rbi (100%) rename rbi/{lib => }/openai/resources/models.rbi (100%) rename rbi/{lib => }/openai/resources/moderations.rbi (100%) rename rbi/{lib => }/openai/resources/responses.rbi (100%) rename rbi/{lib => }/openai/resources/responses/input_items.rbi (100%) rename rbi/{lib => }/openai/resources/uploads.rbi (100%) rename rbi/{lib => }/openai/resources/uploads/parts.rbi (100%) rename rbi/{lib => }/openai/resources/vector_stores.rbi (100%) rename rbi/{lib => }/openai/resources/vector_stores/file_batches.rbi (100%) rename rbi/{lib => }/openai/resources/vector_stores/files.rbi (100%) rename rbi/{lib => }/openai/version.rbi (100%) diff --git a/rbi/lib/openai/client.rbi b/rbi/openai/client.rbi similarity index 100% rename from rbi/lib/openai/client.rbi rename to rbi/openai/client.rbi diff --git a/rbi/lib/openai/errors.rbi b/rbi/openai/errors.rbi similarity index 100% rename from rbi/lib/openai/errors.rbi rename to rbi/openai/errors.rbi diff --git a/rbi/lib/openai/file_part.rbi b/rbi/openai/file_part.rbi similarity index 100% rename from rbi/lib/openai/file_part.rbi rename to rbi/openai/file_part.rbi diff --git a/rbi/lib/openai/internal.rbi b/rbi/openai/internal.rbi similarity index 100% rename from rbi/lib/openai/internal.rbi rename to rbi/openai/internal.rbi diff --git a/rbi/lib/openai/internal/cursor_page.rbi b/rbi/openai/internal/cursor_page.rbi similarity index 100% rename from rbi/lib/openai/internal/cursor_page.rbi rename to rbi/openai/internal/cursor_page.rbi diff --git a/rbi/lib/openai/internal/page.rbi b/rbi/openai/internal/page.rbi similarity index 100% rename from rbi/lib/openai/internal/page.rbi rename to rbi/openai/internal/page.rbi diff --git a/rbi/lib/openai/internal/stream.rbi b/rbi/openai/internal/stream.rbi similarity index 100% rename from rbi/lib/openai/internal/stream.rbi rename to rbi/openai/internal/stream.rbi diff --git a/rbi/lib/openai/internal/transport/base_client.rbi b/rbi/openai/internal/transport/base_client.rbi similarity index 100% rename from rbi/lib/openai/internal/transport/base_client.rbi rename to rbi/openai/internal/transport/base_client.rbi diff --git a/rbi/lib/openai/internal/transport/pooled_net_requester.rbi b/rbi/openai/internal/transport/pooled_net_requester.rbi similarity index 100% rename from rbi/lib/openai/internal/transport/pooled_net_requester.rbi rename to rbi/openai/internal/transport/pooled_net_requester.rbi diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/openai/internal/type/array_of.rbi similarity index 100% rename from rbi/lib/openai/internal/type/array_of.rbi rename to rbi/openai/internal/type/array_of.rbi diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi similarity index 100% rename from rbi/lib/openai/internal/type/base_model.rbi rename to rbi/openai/internal/type/base_model.rbi diff --git a/rbi/lib/openai/internal/type/base_page.rbi b/rbi/openai/internal/type/base_page.rbi similarity index 100% rename from rbi/lib/openai/internal/type/base_page.rbi rename to rbi/openai/internal/type/base_page.rbi diff --git a/rbi/lib/openai/internal/type/base_stream.rbi b/rbi/openai/internal/type/base_stream.rbi similarity index 100% rename from rbi/lib/openai/internal/type/base_stream.rbi rename to rbi/openai/internal/type/base_stream.rbi diff --git a/rbi/lib/openai/internal/type/boolean.rbi b/rbi/openai/internal/type/boolean.rbi similarity index 100% rename from rbi/lib/openai/internal/type/boolean.rbi rename to rbi/openai/internal/type/boolean.rbi diff --git a/rbi/lib/openai/internal/type/converter.rbi b/rbi/openai/internal/type/converter.rbi similarity index 100% rename from rbi/lib/openai/internal/type/converter.rbi rename to rbi/openai/internal/type/converter.rbi diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/openai/internal/type/enum.rbi similarity index 100% rename from rbi/lib/openai/internal/type/enum.rbi rename to rbi/openai/internal/type/enum.rbi diff --git a/rbi/lib/openai/internal/type/file_input.rbi b/rbi/openai/internal/type/file_input.rbi similarity index 100% rename from rbi/lib/openai/internal/type/file_input.rbi rename to rbi/openai/internal/type/file_input.rbi diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/openai/internal/type/hash_of.rbi similarity index 100% rename from rbi/lib/openai/internal/type/hash_of.rbi rename to rbi/openai/internal/type/hash_of.rbi diff --git a/rbi/lib/openai/internal/type/request_parameters.rbi b/rbi/openai/internal/type/request_parameters.rbi similarity index 100% rename from rbi/lib/openai/internal/type/request_parameters.rbi rename to rbi/openai/internal/type/request_parameters.rbi diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi similarity index 100% rename from rbi/lib/openai/internal/type/union.rbi rename to rbi/openai/internal/type/union.rbi diff --git a/rbi/lib/openai/internal/type/unknown.rbi b/rbi/openai/internal/type/unknown.rbi similarity index 100% rename from rbi/lib/openai/internal/type/unknown.rbi rename to rbi/openai/internal/type/unknown.rbi diff --git a/rbi/lib/openai/internal/util.rbi b/rbi/openai/internal/util.rbi similarity index 100% rename from rbi/lib/openai/internal/util.rbi rename to rbi/openai/internal/util.rbi diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/openai/models/all_models.rbi similarity index 100% rename from rbi/lib/openai/models/all_models.rbi rename to rbi/openai/models/all_models.rbi diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/audio/speech_create_params.rbi rename to rbi/openai/models/audio/speech_create_params.rbi diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/openai/models/audio/speech_model.rbi similarity index 100% rename from rbi/lib/openai/models/audio/speech_model.rbi rename to rbi/openai/models/audio/speech_model.rbi diff --git a/rbi/lib/openai/models/audio/transcription.rbi b/rbi/openai/models/audio/transcription.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription.rbi rename to rbi/openai/models/audio/transcription.rbi diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_create_params.rbi rename to rbi/openai/models/audio/transcription_create_params.rbi diff --git a/rbi/lib/openai/models/audio/transcription_create_response.rbi b/rbi/openai/models/audio/transcription_create_response.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_create_response.rbi rename to rbi/openai/models/audio/transcription_create_response.rbi diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/openai/models/audio/transcription_include.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_include.rbi rename to rbi/openai/models/audio/transcription_include.rbi diff --git a/rbi/lib/openai/models/audio/transcription_segment.rbi b/rbi/openai/models/audio/transcription_segment.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_segment.rbi rename to rbi/openai/models/audio/transcription_segment.rbi diff --git a/rbi/lib/openai/models/audio/transcription_stream_event.rbi b/rbi/openai/models/audio/transcription_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_stream_event.rbi rename to rbi/openai/models/audio/transcription_stream_event.rbi diff --git a/rbi/lib/openai/models/audio/transcription_text_delta_event.rbi b/rbi/openai/models/audio/transcription_text_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_text_delta_event.rbi rename to rbi/openai/models/audio/transcription_text_delta_event.rbi diff --git a/rbi/lib/openai/models/audio/transcription_text_done_event.rbi b/rbi/openai/models/audio/transcription_text_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_text_done_event.rbi rename to rbi/openai/models/audio/transcription_text_done_event.rbi diff --git a/rbi/lib/openai/models/audio/transcription_verbose.rbi b/rbi/openai/models/audio/transcription_verbose.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_verbose.rbi rename to rbi/openai/models/audio/transcription_verbose.rbi diff --git a/rbi/lib/openai/models/audio/transcription_word.rbi b/rbi/openai/models/audio/transcription_word.rbi similarity index 100% rename from rbi/lib/openai/models/audio/transcription_word.rbi rename to rbi/openai/models/audio/transcription_word.rbi diff --git a/rbi/lib/openai/models/audio/translation.rbi b/rbi/openai/models/audio/translation.rbi similarity index 100% rename from rbi/lib/openai/models/audio/translation.rbi rename to rbi/openai/models/audio/translation.rbi diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/openai/models/audio/translation_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/audio/translation_create_params.rbi rename to rbi/openai/models/audio/translation_create_params.rbi diff --git a/rbi/lib/openai/models/audio/translation_create_response.rbi b/rbi/openai/models/audio/translation_create_response.rbi similarity index 100% rename from rbi/lib/openai/models/audio/translation_create_response.rbi rename to rbi/openai/models/audio/translation_create_response.rbi diff --git a/rbi/lib/openai/models/audio/translation_verbose.rbi b/rbi/openai/models/audio/translation_verbose.rbi similarity index 100% rename from rbi/lib/openai/models/audio/translation_verbose.rbi rename to rbi/openai/models/audio/translation_verbose.rbi diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/openai/models/audio_model.rbi similarity index 100% rename from rbi/lib/openai/models/audio_model.rbi rename to rbi/openai/models/audio_model.rbi diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/openai/models/audio_response_format.rbi similarity index 100% rename from rbi/lib/openai/models/audio_response_format.rbi rename to rbi/openai/models/audio_response_format.rbi diff --git a/rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/openai/models/auto_file_chunking_strategy_param.rbi similarity index 100% rename from rbi/lib/openai/models/auto_file_chunking_strategy_param.rbi rename to rbi/openai/models/auto_file_chunking_strategy_param.rbi diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/openai/models/batch.rbi similarity index 100% rename from rbi/lib/openai/models/batch.rbi rename to rbi/openai/models/batch.rbi diff --git a/rbi/lib/openai/models/batch_cancel_params.rbi b/rbi/openai/models/batch_cancel_params.rbi similarity index 100% rename from rbi/lib/openai/models/batch_cancel_params.rbi rename to rbi/openai/models/batch_cancel_params.rbi diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/openai/models/batch_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/batch_create_params.rbi rename to rbi/openai/models/batch_create_params.rbi diff --git a/rbi/lib/openai/models/batch_error.rbi b/rbi/openai/models/batch_error.rbi similarity index 100% rename from rbi/lib/openai/models/batch_error.rbi rename to rbi/openai/models/batch_error.rbi diff --git a/rbi/lib/openai/models/batch_list_params.rbi b/rbi/openai/models/batch_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/batch_list_params.rbi rename to rbi/openai/models/batch_list_params.rbi diff --git a/rbi/lib/openai/models/batch_request_counts.rbi b/rbi/openai/models/batch_request_counts.rbi similarity index 100% rename from rbi/lib/openai/models/batch_request_counts.rbi rename to rbi/openai/models/batch_request_counts.rbi diff --git a/rbi/lib/openai/models/batch_retrieve_params.rbi b/rbi/openai/models/batch_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/batch_retrieve_params.rbi rename to rbi/openai/models/batch_retrieve_params.rbi diff --git a/rbi/lib/openai/models/beta/assistant.rbi b/rbi/openai/models/beta/assistant.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant.rbi rename to rbi/openai/models/beta/assistant.rbi diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/openai/models/beta/assistant_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_create_params.rbi rename to rbi/openai/models/beta/assistant_create_params.rbi diff --git a/rbi/lib/openai/models/beta/assistant_delete_params.rbi b/rbi/openai/models/beta/assistant_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_delete_params.rbi rename to rbi/openai/models/beta/assistant_delete_params.rbi diff --git a/rbi/lib/openai/models/beta/assistant_deleted.rbi b/rbi/openai/models/beta/assistant_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_deleted.rbi rename to rbi/openai/models/beta/assistant_deleted.rbi diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/openai/models/beta/assistant_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_list_params.rbi rename to rbi/openai/models/beta/assistant_list_params.rbi diff --git a/rbi/lib/openai/models/beta/assistant_response_format_option.rbi b/rbi/openai/models/beta/assistant_response_format_option.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_response_format_option.rbi rename to rbi/openai/models/beta/assistant_response_format_option.rbi diff --git a/rbi/lib/openai/models/beta/assistant_retrieve_params.rbi b/rbi/openai/models/beta/assistant_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_retrieve_params.rbi rename to rbi/openai/models/beta/assistant_retrieve_params.rbi diff --git a/rbi/lib/openai/models/beta/assistant_stream_event.rbi b/rbi/openai/models/beta/assistant_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_stream_event.rbi rename to rbi/openai/models/beta/assistant_stream_event.rbi diff --git a/rbi/lib/openai/models/beta/assistant_tool.rbi b/rbi/openai/models/beta/assistant_tool.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_tool.rbi rename to rbi/openai/models/beta/assistant_tool.rbi diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/openai/models/beta/assistant_tool_choice.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_tool_choice.rbi rename to rbi/openai/models/beta/assistant_tool_choice.rbi diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/openai/models/beta/assistant_tool_choice_function.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_tool_choice_function.rbi rename to rbi/openai/models/beta/assistant_tool_choice_function.rbi diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/openai/models/beta/assistant_tool_choice_option.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi rename to rbi/openai/models/beta/assistant_tool_choice_option.rbi diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/openai/models/beta/assistant_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/assistant_update_params.rbi rename to rbi/openai/models/beta/assistant_update_params.rbi diff --git a/rbi/lib/openai/models/beta/code_interpreter_tool.rbi b/rbi/openai/models/beta/code_interpreter_tool.rbi similarity index 100% rename from rbi/lib/openai/models/beta/code_interpreter_tool.rbi rename to rbi/openai/models/beta/code_interpreter_tool.rbi diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/openai/models/beta/file_search_tool.rbi similarity index 100% rename from rbi/lib/openai/models/beta/file_search_tool.rbi rename to rbi/openai/models/beta/file_search_tool.rbi diff --git a/rbi/lib/openai/models/beta/function_tool.rbi b/rbi/openai/models/beta/function_tool.rbi similarity index 100% rename from rbi/lib/openai/models/beta/function_tool.rbi rename to rbi/openai/models/beta/function_tool.rbi diff --git a/rbi/lib/openai/models/beta/message_stream_event.rbi b/rbi/openai/models/beta/message_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/message_stream_event.rbi rename to rbi/openai/models/beta/message_stream_event.rbi diff --git a/rbi/lib/openai/models/beta/run_step_stream_event.rbi b/rbi/openai/models/beta/run_step_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/run_step_stream_event.rbi rename to rbi/openai/models/beta/run_step_stream_event.rbi diff --git a/rbi/lib/openai/models/beta/run_stream_event.rbi b/rbi/openai/models/beta/run_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/run_stream_event.rbi rename to rbi/openai/models/beta/run_stream_event.rbi diff --git a/rbi/lib/openai/models/beta/thread.rbi b/rbi/openai/models/beta/thread.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread.rbi rename to rbi/openai/models/beta/thread.rbi diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_create_and_run_params.rbi rename to rbi/openai/models/beta/thread_create_and_run_params.rbi diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/openai/models/beta/thread_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_create_params.rbi rename to rbi/openai/models/beta/thread_create_params.rbi diff --git a/rbi/lib/openai/models/beta/thread_delete_params.rbi b/rbi/openai/models/beta/thread_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_delete_params.rbi rename to rbi/openai/models/beta/thread_delete_params.rbi diff --git a/rbi/lib/openai/models/beta/thread_deleted.rbi b/rbi/openai/models/beta/thread_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_deleted.rbi rename to rbi/openai/models/beta/thread_deleted.rbi diff --git a/rbi/lib/openai/models/beta/thread_retrieve_params.rbi b/rbi/openai/models/beta/thread_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_retrieve_params.rbi rename to rbi/openai/models/beta/thread_retrieve_params.rbi diff --git a/rbi/lib/openai/models/beta/thread_stream_event.rbi b/rbi/openai/models/beta/thread_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_stream_event.rbi rename to rbi/openai/models/beta/thread_stream_event.rbi diff --git a/rbi/lib/openai/models/beta/thread_update_params.rbi b/rbi/openai/models/beta/thread_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/thread_update_params.rbi rename to rbi/openai/models/beta/thread_update_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/annotation.rbi b/rbi/openai/models/beta/threads/annotation.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/annotation.rbi rename to rbi/openai/models/beta/threads/annotation.rbi diff --git a/rbi/lib/openai/models/beta/threads/annotation_delta.rbi b/rbi/openai/models/beta/threads/annotation_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/annotation_delta.rbi rename to rbi/openai/models/beta/threads/annotation_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/openai/models/beta/threads/file_citation_annotation.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/file_citation_annotation.rbi rename to rbi/openai/models/beta/threads/file_citation_annotation.rbi diff --git a/rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/file_citation_delta_annotation.rbi rename to rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi diff --git a/rbi/lib/openai/models/beta/threads/file_path_annotation.rbi b/rbi/openai/models/beta/threads/file_path_annotation.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/file_path_annotation.rbi rename to rbi/openai/models/beta/threads/file_path_annotation.rbi diff --git a/rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/file_path_delta_annotation.rbi rename to rbi/openai/models/beta/threads/file_path_delta_annotation.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/openai/models/beta/threads/image_file.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_file.rbi rename to rbi/openai/models/beta/threads/image_file.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_file_content_block.rbi b/rbi/openai/models/beta/threads/image_file_content_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_file_content_block.rbi rename to rbi/openai/models/beta/threads/image_file_content_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/openai/models/beta/threads/image_file_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_file_delta.rbi rename to rbi/openai/models/beta/threads/image_file_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/openai/models/beta/threads/image_file_delta_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_file_delta_block.rbi rename to rbi/openai/models/beta/threads/image_file_delta_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/openai/models/beta/threads/image_url.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_url.rbi rename to rbi/openai/models/beta/threads/image_url.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_url_content_block.rbi b/rbi/openai/models/beta/threads/image_url_content_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_url_content_block.rbi rename to rbi/openai/models/beta/threads/image_url_content_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/openai/models/beta/threads/image_url_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_url_delta.rbi rename to rbi/openai/models/beta/threads/image_url_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/openai/models/beta/threads/image_url_delta_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/image_url_delta_block.rbi rename to rbi/openai/models/beta/threads/image_url_delta_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/openai/models/beta/threads/message.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message.rbi rename to rbi/openai/models/beta/threads/message.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_content.rbi b/rbi/openai/models/beta/threads/message_content.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_content.rbi rename to rbi/openai/models/beta/threads/message_content.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_content_delta.rbi b/rbi/openai/models/beta/threads/message_content_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_content_delta.rbi rename to rbi/openai/models/beta/threads/message_content_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_content_part_param.rbi b/rbi/openai/models/beta/threads/message_content_part_param.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_content_part_param.rbi rename to rbi/openai/models/beta/threads/message_content_part_param.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/openai/models/beta/threads/message_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_create_params.rbi rename to rbi/openai/models/beta/threads/message_create_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_delete_params.rbi b/rbi/openai/models/beta/threads/message_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_delete_params.rbi rename to rbi/openai/models/beta/threads/message_delete_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_deleted.rbi b/rbi/openai/models/beta/threads/message_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_deleted.rbi rename to rbi/openai/models/beta/threads/message_deleted.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/openai/models/beta/threads/message_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_delta.rbi rename to rbi/openai/models/beta/threads/message_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_delta_event.rbi b/rbi/openai/models/beta/threads/message_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_delta_event.rbi rename to rbi/openai/models/beta/threads/message_delta_event.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/openai/models/beta/threads/message_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_list_params.rbi rename to rbi/openai/models/beta/threads/message_list_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/openai/models/beta/threads/message_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_retrieve_params.rbi rename to rbi/openai/models/beta/threads/message_retrieve_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/message_update_params.rbi b/rbi/openai/models/beta/threads/message_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/message_update_params.rbi rename to rbi/openai/models/beta/threads/message_update_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/refusal_content_block.rbi b/rbi/openai/models/beta/threads/refusal_content_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/refusal_content_block.rbi rename to rbi/openai/models/beta/threads/refusal_content_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/openai/models/beta/threads/refusal_delta_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/refusal_delta_block.rbi rename to rbi/openai/models/beta/threads/refusal_delta_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/required_action_function_tool_call.rbi rename to rbi/openai/models/beta/threads/required_action_function_tool_call.rbi diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run.rbi rename to rbi/openai/models/beta/threads/run.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_cancel_params.rbi b/rbi/openai/models/beta/threads/run_cancel_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_cancel_params.rbi rename to rbi/openai/models/beta/threads/run_cancel_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_create_params.rbi rename to rbi/openai/models/beta/threads/run_create_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/openai/models/beta/threads/run_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_list_params.rbi rename to rbi/openai/models/beta/threads/run_list_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/openai/models/beta/threads/run_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_retrieve_params.rbi rename to rbi/openai/models/beta/threads/run_retrieve_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/openai/models/beta/threads/run_status.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_status.rbi rename to rbi/openai/models/beta/threads/run_status.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rbi rename to rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/run_update_params.rbi b/rbi/openai/models/beta/threads/run_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/run_update_params.rbi rename to rbi/openai/models/beta/threads/run_update_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/code_interpreter_logs.rbi rename to rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rbi rename to rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi rename to rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi rename to rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi rename to rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi rename to rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/openai/models/beta/threads/runs/function_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/function_tool_call.rbi rename to rbi/openai/models/beta/threads/runs/function_tool_call.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/function_tool_call_delta.rbi rename to rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/message_creation_step_details.rbi rename to rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/openai/models/beta/threads/runs/run_step.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/run_step.rbi rename to rbi/openai/models/beta/threads/runs/run_step.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/run_step_delta.rbi rename to rbi/openai/models/beta/threads/runs/run_step_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/run_step_delta_event.rbi rename to rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi rename to rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/openai/models/beta/threads/runs/run_step_include.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi rename to rbi/openai/models/beta/threads/runs/run_step_include.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/openai/models/beta/threads/runs/step_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi rename to rbi/openai/models/beta/threads/runs/step_list_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/step_retrieve_params.rbi rename to rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call.rbi b/rbi/openai/models/beta/threads/runs/tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/tool_call.rbi rename to rbi/openai/models/beta/threads/runs/tool_call.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/tool_call_delta.rbi rename to rbi/openai/models/beta/threads/runs/tool_call_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/tool_call_delta_object.rbi rename to rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi diff --git a/rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/runs/tool_calls_step_details.rbi rename to rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi diff --git a/rbi/lib/openai/models/beta/threads/text.rbi b/rbi/openai/models/beta/threads/text.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/text.rbi rename to rbi/openai/models/beta/threads/text.rbi diff --git a/rbi/lib/openai/models/beta/threads/text_content_block.rbi b/rbi/openai/models/beta/threads/text_content_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/text_content_block.rbi rename to rbi/openai/models/beta/threads/text_content_block.rbi diff --git a/rbi/lib/openai/models/beta/threads/text_content_block_param.rbi b/rbi/openai/models/beta/threads/text_content_block_param.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/text_content_block_param.rbi rename to rbi/openai/models/beta/threads/text_content_block_param.rbi diff --git a/rbi/lib/openai/models/beta/threads/text_delta.rbi b/rbi/openai/models/beta/threads/text_delta.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/text_delta.rbi rename to rbi/openai/models/beta/threads/text_delta.rbi diff --git a/rbi/lib/openai/models/beta/threads/text_delta_block.rbi b/rbi/openai/models/beta/threads/text_delta_block.rbi similarity index 100% rename from rbi/lib/openai/models/beta/threads/text_delta_block.rbi rename to rbi/openai/models/beta/threads/text_delta_block.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion.rbi rename to rbi/openai/models/chat/chat_completion.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_assistant_message_param.rbi rename to rbi/openai/models/chat/chat_completion_assistant_message_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_audio.rbi b/rbi/openai/models/chat/chat_completion_audio.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_audio.rbi rename to rbi/openai/models/chat/chat_completion_audio.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_audio_param.rbi rename to rbi/openai/models/chat/chat_completion_audio_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_chunk.rbi rename to rbi/openai/models/chat/chat_completion_chunk.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part.rbi b/rbi/openai/models/chat/chat_completion_content_part.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_content_part.rbi rename to rbi/openai/models/chat/chat_completion_content_part.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/openai/models/chat/chat_completion_content_part_image.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi rename to rbi/openai/models/chat/chat_completion_content_part_image.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi rename to rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_content_part_refusal.rbi rename to rbi/openai/models/chat/chat_completion_content_part_refusal.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/openai/models/chat/chat_completion_content_part_text.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_content_part_text.rbi rename to rbi/openai/models/chat/chat_completion_content_part_text.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_deleted.rbi b/rbi/openai/models/chat/chat_completion_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_deleted.rbi rename to rbi/openai/models/chat/chat_completion_deleted.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_developer_message_param.rbi rename to rbi/openai/models/chat/chat_completion_developer_message_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/openai/models/chat/chat_completion_function_call_option.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_function_call_option.rbi rename to rbi/openai/models/chat/chat_completion_function_call_option.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/openai/models/chat/chat_completion_function_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_function_message_param.rbi rename to rbi/openai/models/chat/chat_completion_function_message_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_message.rbi b/rbi/openai/models/chat/chat_completion_message.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_message.rbi rename to rbi/openai/models/chat/chat_completion_message.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_message_param.rbi b/rbi/openai/models/chat/chat_completion_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_message_param.rbi rename to rbi/openai/models/chat/chat_completion_message_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_message_tool_call.rbi rename to rbi/openai/models/chat/chat_completion_message_tool_call.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/openai/models/chat/chat_completion_modality.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_modality.rbi rename to rbi/openai/models/chat/chat_completion_modality.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_named_tool_choice.rbi rename to rbi/openai/models/chat/chat_completion_named_tool_choice.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/openai/models/chat/chat_completion_prediction_content.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_prediction_content.rbi rename to rbi/openai/models/chat/chat_completion_prediction_content.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_reasoning_effort.rbi b/rbi/openai/models/chat/chat_completion_reasoning_effort.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_reasoning_effort.rbi rename to rbi/openai/models/chat/chat_completion_reasoning_effort.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/openai/models/chat/chat_completion_role.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_role.rbi rename to rbi/openai/models/chat/chat_completion_role.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_store_message.rbi b/rbi/openai/models/chat/chat_completion_store_message.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_store_message.rbi rename to rbi/openai/models/chat/chat_completion_store_message.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_stream_options.rbi b/rbi/openai/models/chat/chat_completion_stream_options.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_stream_options.rbi rename to rbi/openai/models/chat/chat_completion_stream_options.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/openai/models/chat/chat_completion_system_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_system_message_param.rbi rename to rbi/openai/models/chat/chat_completion_system_message_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/openai/models/chat/chat_completion_token_logprob.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_token_logprob.rbi rename to rbi/openai/models/chat/chat_completion_token_logprob.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_tool.rbi b/rbi/openai/models/chat/chat_completion_tool.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_tool.rbi rename to rbi/openai/models/chat/chat_completion_tool.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi rename to rbi/openai/models/chat/chat_completion_tool_choice_option.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_tool_message_param.rbi rename to rbi/openai/models/chat/chat_completion_tool_message_param.rbi diff --git a/rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/openai/models/chat/chat_completion_user_message_param.rbi similarity index 100% rename from rbi/lib/openai/models/chat/chat_completion_user_message_param.rbi rename to rbi/openai/models/chat/chat_completion_user_message_param.rbi diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/chat/completion_create_params.rbi rename to rbi/openai/models/chat/completion_create_params.rbi diff --git a/rbi/lib/openai/models/chat/completion_delete_params.rbi b/rbi/openai/models/chat/completion_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/chat/completion_delete_params.rbi rename to rbi/openai/models/chat/completion_delete_params.rbi diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/openai/models/chat/completion_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/chat/completion_list_params.rbi rename to rbi/openai/models/chat/completion_list_params.rbi diff --git a/rbi/lib/openai/models/chat/completion_retrieve_params.rbi b/rbi/openai/models/chat/completion_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/chat/completion_retrieve_params.rbi rename to rbi/openai/models/chat/completion_retrieve_params.rbi diff --git a/rbi/lib/openai/models/chat/completion_update_params.rbi b/rbi/openai/models/chat/completion_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/chat/completion_update_params.rbi rename to rbi/openai/models/chat/completion_update_params.rbi diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/openai/models/chat/completions/message_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/chat/completions/message_list_params.rbi rename to rbi/openai/models/chat/completions/message_list_params.rbi diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/openai/models/chat_model.rbi similarity index 100% rename from rbi/lib/openai/models/chat_model.rbi rename to rbi/openai/models/chat_model.rbi diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/openai/models/comparison_filter.rbi similarity index 100% rename from rbi/lib/openai/models/comparison_filter.rbi rename to rbi/openai/models/comparison_filter.rbi diff --git a/rbi/lib/openai/models/completion.rbi b/rbi/openai/models/completion.rbi similarity index 100% rename from rbi/lib/openai/models/completion.rbi rename to rbi/openai/models/completion.rbi diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/openai/models/completion_choice.rbi similarity index 100% rename from rbi/lib/openai/models/completion_choice.rbi rename to rbi/openai/models/completion_choice.rbi diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/openai/models/completion_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/completion_create_params.rbi rename to rbi/openai/models/completion_create_params.rbi diff --git a/rbi/lib/openai/models/completion_usage.rbi b/rbi/openai/models/completion_usage.rbi similarity index 100% rename from rbi/lib/openai/models/completion_usage.rbi rename to rbi/openai/models/completion_usage.rbi diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/openai/models/compound_filter.rbi similarity index 100% rename from rbi/lib/openai/models/compound_filter.rbi rename to rbi/openai/models/compound_filter.rbi diff --git a/rbi/lib/openai/models/create_embedding_response.rbi b/rbi/openai/models/create_embedding_response.rbi similarity index 100% rename from rbi/lib/openai/models/create_embedding_response.rbi rename to rbi/openai/models/create_embedding_response.rbi diff --git a/rbi/lib/openai/models/embedding.rbi b/rbi/openai/models/embedding.rbi similarity index 100% rename from rbi/lib/openai/models/embedding.rbi rename to rbi/openai/models/embedding.rbi diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/embedding_create_params.rbi rename to rbi/openai/models/embedding_create_params.rbi diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/openai/models/embedding_model.rbi similarity index 100% rename from rbi/lib/openai/models/embedding_model.rbi rename to rbi/openai/models/embedding_model.rbi diff --git a/rbi/lib/openai/models/error_object.rbi b/rbi/openai/models/error_object.rbi similarity index 100% rename from rbi/lib/openai/models/error_object.rbi rename to rbi/openai/models/error_object.rbi diff --git a/rbi/lib/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/eval_create_params.rbi rename to rbi/openai/models/eval_create_params.rbi diff --git a/rbi/lib/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi similarity index 100% rename from rbi/lib/openai/models/eval_create_response.rbi rename to rbi/openai/models/eval_create_response.rbi diff --git a/rbi/lib/openai/models/eval_custom_data_source_config.rbi b/rbi/openai/models/eval_custom_data_source_config.rbi similarity index 100% rename from rbi/lib/openai/models/eval_custom_data_source_config.rbi rename to rbi/openai/models/eval_custom_data_source_config.rbi diff --git a/rbi/lib/openai/models/eval_delete_params.rbi b/rbi/openai/models/eval_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/eval_delete_params.rbi rename to rbi/openai/models/eval_delete_params.rbi diff --git a/rbi/lib/openai/models/eval_delete_response.rbi b/rbi/openai/models/eval_delete_response.rbi similarity index 100% rename from rbi/lib/openai/models/eval_delete_response.rbi rename to rbi/openai/models/eval_delete_response.rbi diff --git a/rbi/lib/openai/models/eval_label_model_grader.rbi b/rbi/openai/models/eval_label_model_grader.rbi similarity index 100% rename from rbi/lib/openai/models/eval_label_model_grader.rbi rename to rbi/openai/models/eval_label_model_grader.rbi diff --git a/rbi/lib/openai/models/eval_list_params.rbi b/rbi/openai/models/eval_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/eval_list_params.rbi rename to rbi/openai/models/eval_list_params.rbi diff --git a/rbi/lib/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi similarity index 100% rename from rbi/lib/openai/models/eval_list_response.rbi rename to rbi/openai/models/eval_list_response.rbi diff --git a/rbi/lib/openai/models/eval_retrieve_params.rbi b/rbi/openai/models/eval_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/eval_retrieve_params.rbi rename to rbi/openai/models/eval_retrieve_params.rbi diff --git a/rbi/lib/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi similarity index 100% rename from rbi/lib/openai/models/eval_retrieve_response.rbi rename to rbi/openai/models/eval_retrieve_response.rbi diff --git a/rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/openai/models/eval_stored_completions_data_source_config.rbi similarity index 100% rename from rbi/lib/openai/models/eval_stored_completions_data_source_config.rbi rename to rbi/openai/models/eval_stored_completions_data_source_config.rbi diff --git a/rbi/lib/openai/models/eval_string_check_grader.rbi b/rbi/openai/models/eval_string_check_grader.rbi similarity index 100% rename from rbi/lib/openai/models/eval_string_check_grader.rbi rename to rbi/openai/models/eval_string_check_grader.rbi diff --git a/rbi/lib/openai/models/eval_text_similarity_grader.rbi b/rbi/openai/models/eval_text_similarity_grader.rbi similarity index 100% rename from rbi/lib/openai/models/eval_text_similarity_grader.rbi rename to rbi/openai/models/eval_text_similarity_grader.rbi diff --git a/rbi/lib/openai/models/eval_update_params.rbi b/rbi/openai/models/eval_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/eval_update_params.rbi rename to rbi/openai/models/eval_update_params.rbi diff --git a/rbi/lib/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi similarity index 100% rename from rbi/lib/openai/models/eval_update_response.rbi rename to rbi/openai/models/eval_update_response.rbi diff --git a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi similarity index 100% rename from rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi rename to rbi/openai/models/evals/create_eval_completions_run_data_source.rbi diff --git a/rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi similarity index 100% rename from rbi/lib/openai/models/evals/create_eval_jsonl_run_data_source.rbi rename to rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi diff --git a/rbi/lib/openai/models/evals/eval_api_error.rbi b/rbi/openai/models/evals/eval_api_error.rbi similarity index 100% rename from rbi/lib/openai/models/evals/eval_api_error.rbi rename to rbi/openai/models/evals/eval_api_error.rbi diff --git a/rbi/lib/openai/models/evals/run_cancel_params.rbi b/rbi/openai/models/evals/run_cancel_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_cancel_params.rbi rename to rbi/openai/models/evals/run_cancel_params.rbi diff --git a/rbi/lib/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_cancel_response.rbi rename to rbi/openai/models/evals/run_cancel_response.rbi diff --git a/rbi/lib/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_create_params.rbi rename to rbi/openai/models/evals/run_create_params.rbi diff --git a/rbi/lib/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_create_response.rbi rename to rbi/openai/models/evals/run_create_response.rbi diff --git a/rbi/lib/openai/models/evals/run_delete_params.rbi b/rbi/openai/models/evals/run_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_delete_params.rbi rename to rbi/openai/models/evals/run_delete_params.rbi diff --git a/rbi/lib/openai/models/evals/run_delete_response.rbi b/rbi/openai/models/evals/run_delete_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_delete_response.rbi rename to rbi/openai/models/evals/run_delete_response.rbi diff --git a/rbi/lib/openai/models/evals/run_list_params.rbi b/rbi/openai/models/evals/run_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_list_params.rbi rename to rbi/openai/models/evals/run_list_params.rbi diff --git a/rbi/lib/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_list_response.rbi rename to rbi/openai/models/evals/run_list_response.rbi diff --git a/rbi/lib/openai/models/evals/run_retrieve_params.rbi b/rbi/openai/models/evals/run_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_retrieve_params.rbi rename to rbi/openai/models/evals/run_retrieve_params.rbi diff --git a/rbi/lib/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/run_retrieve_response.rbi rename to rbi/openai/models/evals/run_retrieve_response.rbi diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi b/rbi/openai/models/evals/runs/output_item_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/runs/output_item_list_params.rbi rename to rbi/openai/models/evals/runs/output_item_list_params.rbi diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_response.rbi b/rbi/openai/models/evals/runs/output_item_list_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/runs/output_item_list_response.rbi rename to rbi/openai/models/evals/runs/output_item_list_response.rbi diff --git a/rbi/lib/openai/models/evals/runs/output_item_retrieve_params.rbi b/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/evals/runs/output_item_retrieve_params.rbi rename to rbi/openai/models/evals/runs/output_item_retrieve_params.rbi diff --git a/rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi b/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi similarity index 100% rename from rbi/lib/openai/models/evals/runs/output_item_retrieve_response.rbi rename to rbi/openai/models/evals/runs/output_item_retrieve_response.rbi diff --git a/rbi/lib/openai/models/file_chunking_strategy.rbi b/rbi/openai/models/file_chunking_strategy.rbi similarity index 100% rename from rbi/lib/openai/models/file_chunking_strategy.rbi rename to rbi/openai/models/file_chunking_strategy.rbi diff --git a/rbi/lib/openai/models/file_chunking_strategy_param.rbi b/rbi/openai/models/file_chunking_strategy_param.rbi similarity index 100% rename from rbi/lib/openai/models/file_chunking_strategy_param.rbi rename to rbi/openai/models/file_chunking_strategy_param.rbi diff --git a/rbi/lib/openai/models/file_content_params.rbi b/rbi/openai/models/file_content_params.rbi similarity index 100% rename from rbi/lib/openai/models/file_content_params.rbi rename to rbi/openai/models/file_content_params.rbi diff --git a/rbi/lib/openai/models/file_create_params.rbi b/rbi/openai/models/file_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/file_create_params.rbi rename to rbi/openai/models/file_create_params.rbi diff --git a/rbi/lib/openai/models/file_delete_params.rbi b/rbi/openai/models/file_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/file_delete_params.rbi rename to rbi/openai/models/file_delete_params.rbi diff --git a/rbi/lib/openai/models/file_deleted.rbi b/rbi/openai/models/file_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/file_deleted.rbi rename to rbi/openai/models/file_deleted.rbi diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/openai/models/file_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/file_list_params.rbi rename to rbi/openai/models/file_list_params.rbi diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/openai/models/file_object.rbi similarity index 100% rename from rbi/lib/openai/models/file_object.rbi rename to rbi/openai/models/file_object.rbi diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/openai/models/file_purpose.rbi similarity index 100% rename from rbi/lib/openai/models/file_purpose.rbi rename to rbi/openai/models/file_purpose.rbi diff --git a/rbi/lib/openai/models/file_retrieve_params.rbi b/rbi/openai/models/file_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/file_retrieve_params.rbi rename to rbi/openai/models/file_retrieve_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rbi rename to rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rbi rename to rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi rename to rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi rename to rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi rename to rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi rename to rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi rename to rbi/openai/models/fine_tuning/fine_tuning_job.rbi diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi rename to rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/fine_tuning_job_integration.rbi rename to rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi rename to rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi rename to rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi diff --git a/rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/openai/models/fine_tuning/job_cancel_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/job_cancel_params.rbi rename to rbi/openai/models/fine_tuning/job_cancel_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/job_create_params.rbi rename to rbi/openai/models/fine_tuning/job_create_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/openai/models/fine_tuning/job_list_events_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/job_list_events_params.rbi rename to rbi/openai/models/fine_tuning/job_list_events_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/job_list_params.rbi b/rbi/openai/models/fine_tuning/job_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/job_list_params.rbi rename to rbi/openai/models/fine_tuning/job_list_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/openai/models/fine_tuning/job_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/job_retrieve_params.rbi rename to rbi/openai/models/fine_tuning/job_retrieve_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi rename to rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi diff --git a/rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi similarity index 100% rename from rbi/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi rename to rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi diff --git a/rbi/lib/openai/models/function_definition.rbi b/rbi/openai/models/function_definition.rbi similarity index 100% rename from rbi/lib/openai/models/function_definition.rbi rename to rbi/openai/models/function_definition.rbi diff --git a/rbi/lib/openai/models/function_parameters.rbi b/rbi/openai/models/function_parameters.rbi similarity index 100% rename from rbi/lib/openai/models/function_parameters.rbi rename to rbi/openai/models/function_parameters.rbi diff --git a/rbi/lib/openai/models/image.rbi b/rbi/openai/models/image.rbi similarity index 100% rename from rbi/lib/openai/models/image.rbi rename to rbi/openai/models/image.rbi diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/openai/models/image_create_variation_params.rbi similarity index 100% rename from rbi/lib/openai/models/image_create_variation_params.rbi rename to rbi/openai/models/image_create_variation_params.rbi diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi similarity index 100% rename from rbi/lib/openai/models/image_edit_params.rbi rename to rbi/openai/models/image_edit_params.rbi diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi similarity index 100% rename from rbi/lib/openai/models/image_generate_params.rbi rename to rbi/openai/models/image_generate_params.rbi diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/openai/models/image_model.rbi similarity index 100% rename from rbi/lib/openai/models/image_model.rbi rename to rbi/openai/models/image_model.rbi diff --git a/rbi/lib/openai/models/images_response.rbi b/rbi/openai/models/images_response.rbi similarity index 100% rename from rbi/lib/openai/models/images_response.rbi rename to rbi/openai/models/images_response.rbi diff --git a/rbi/lib/openai/models/metadata.rbi b/rbi/openai/models/metadata.rbi similarity index 100% rename from rbi/lib/openai/models/metadata.rbi rename to rbi/openai/models/metadata.rbi diff --git a/rbi/lib/openai/models/model.rbi b/rbi/openai/models/model.rbi similarity index 100% rename from rbi/lib/openai/models/model.rbi rename to rbi/openai/models/model.rbi diff --git a/rbi/lib/openai/models/model_delete_params.rbi b/rbi/openai/models/model_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/model_delete_params.rbi rename to rbi/openai/models/model_delete_params.rbi diff --git a/rbi/lib/openai/models/model_deleted.rbi b/rbi/openai/models/model_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/model_deleted.rbi rename to rbi/openai/models/model_deleted.rbi diff --git a/rbi/lib/openai/models/model_list_params.rbi b/rbi/openai/models/model_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/model_list_params.rbi rename to rbi/openai/models/model_list_params.rbi diff --git a/rbi/lib/openai/models/model_retrieve_params.rbi b/rbi/openai/models/model_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/model_retrieve_params.rbi rename to rbi/openai/models/model_retrieve_params.rbi diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/openai/models/moderation.rbi similarity index 100% rename from rbi/lib/openai/models/moderation.rbi rename to rbi/openai/models/moderation.rbi diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/openai/models/moderation_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/moderation_create_params.rbi rename to rbi/openai/models/moderation_create_params.rbi diff --git a/rbi/lib/openai/models/moderation_create_response.rbi b/rbi/openai/models/moderation_create_response.rbi similarity index 100% rename from rbi/lib/openai/models/moderation_create_response.rbi rename to rbi/openai/models/moderation_create_response.rbi diff --git a/rbi/lib/openai/models/moderation_image_url_input.rbi b/rbi/openai/models/moderation_image_url_input.rbi similarity index 100% rename from rbi/lib/openai/models/moderation_image_url_input.rbi rename to rbi/openai/models/moderation_image_url_input.rbi diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/openai/models/moderation_model.rbi similarity index 100% rename from rbi/lib/openai/models/moderation_model.rbi rename to rbi/openai/models/moderation_model.rbi diff --git a/rbi/lib/openai/models/moderation_multi_modal_input.rbi b/rbi/openai/models/moderation_multi_modal_input.rbi similarity index 100% rename from rbi/lib/openai/models/moderation_multi_modal_input.rbi rename to rbi/openai/models/moderation_multi_modal_input.rbi diff --git a/rbi/lib/openai/models/moderation_text_input.rbi b/rbi/openai/models/moderation_text_input.rbi similarity index 100% rename from rbi/lib/openai/models/moderation_text_input.rbi rename to rbi/openai/models/moderation_text_input.rbi diff --git a/rbi/lib/openai/models/other_file_chunking_strategy_object.rbi b/rbi/openai/models/other_file_chunking_strategy_object.rbi similarity index 100% rename from rbi/lib/openai/models/other_file_chunking_strategy_object.rbi rename to rbi/openai/models/other_file_chunking_strategy_object.rbi diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/openai/models/reasoning.rbi similarity index 100% rename from rbi/lib/openai/models/reasoning.rbi rename to rbi/openai/models/reasoning.rbi diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/openai/models/reasoning_effort.rbi similarity index 100% rename from rbi/lib/openai/models/reasoning_effort.rbi rename to rbi/openai/models/reasoning_effort.rbi diff --git a/rbi/lib/openai/models/response_format_json_object.rbi b/rbi/openai/models/response_format_json_object.rbi similarity index 100% rename from rbi/lib/openai/models/response_format_json_object.rbi rename to rbi/openai/models/response_format_json_object.rbi diff --git a/rbi/lib/openai/models/response_format_json_schema.rbi b/rbi/openai/models/response_format_json_schema.rbi similarity index 100% rename from rbi/lib/openai/models/response_format_json_schema.rbi rename to rbi/openai/models/response_format_json_schema.rbi diff --git a/rbi/lib/openai/models/response_format_text.rbi b/rbi/openai/models/response_format_text.rbi similarity index 100% rename from rbi/lib/openai/models/response_format_text.rbi rename to rbi/openai/models/response_format_text.rbi diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/openai/models/responses/computer_tool.rbi similarity index 100% rename from rbi/lib/openai/models/responses/computer_tool.rbi rename to rbi/openai/models/responses/computer_tool.rbi diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/openai/models/responses/easy_input_message.rbi similarity index 100% rename from rbi/lib/openai/models/responses/easy_input_message.rbi rename to rbi/openai/models/responses/easy_input_message.rbi diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/openai/models/responses/file_search_tool.rbi similarity index 100% rename from rbi/lib/openai/models/responses/file_search_tool.rbi rename to rbi/openai/models/responses/file_search_tool.rbi diff --git a/rbi/lib/openai/models/responses/function_tool.rbi b/rbi/openai/models/responses/function_tool.rbi similarity index 100% rename from rbi/lib/openai/models/responses/function_tool.rbi rename to rbi/openai/models/responses/function_tool.rbi diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/openai/models/responses/input_item_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/responses/input_item_list_params.rbi rename to rbi/openai/models/responses/input_item_list_params.rbi diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response.rbi rename to rbi/openai/models/responses/response.rbi diff --git a/rbi/lib/openai/models/responses/response_audio_delta_event.rbi b/rbi/openai/models/responses/response_audio_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_audio_delta_event.rbi rename to rbi/openai/models/responses/response_audio_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_audio_done_event.rbi b/rbi/openai/models/responses/response_audio_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_audio_done_event.rbi rename to rbi/openai/models/responses/response_audio_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_audio_transcript_delta_event.rbi rename to rbi/openai/models/responses/response_audio_transcript_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_audio_transcript_done_event.rbi rename to rbi/openai/models/responses/response_audio_transcript_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi rename to rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rbi rename to rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_code_interpreter_call_completed_event.rbi rename to rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi rename to rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi rename to rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi rename to rbi/openai/models/responses/response_code_interpreter_tool_call.rbi diff --git a/rbi/lib/openai/models/responses/response_completed_event.rbi b/rbi/openai/models/responses/response_completed_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_completed_event.rbi rename to rbi/openai/models/responses/response_completed_event.rbi diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/openai/models/responses/response_computer_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_computer_tool_call.rbi rename to rbi/openai/models/responses/response_computer_tool_call.rbi diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi rename to rbi/openai/models/responses/response_computer_tool_call_output_item.rbi diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rbi rename to rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi diff --git a/rbi/lib/openai/models/responses/response_content.rbi b/rbi/openai/models/responses/response_content.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_content.rbi rename to rbi/openai/models/responses/response_content.rbi diff --git a/rbi/lib/openai/models/responses/response_content_part_added_event.rbi b/rbi/openai/models/responses/response_content_part_added_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_content_part_added_event.rbi rename to rbi/openai/models/responses/response_content_part_added_event.rbi diff --git a/rbi/lib/openai/models/responses/response_content_part_done_event.rbi b/rbi/openai/models/responses/response_content_part_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_content_part_done_event.rbi rename to rbi/openai/models/responses/response_content_part_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_create_params.rbi rename to rbi/openai/models/responses/response_create_params.rbi diff --git a/rbi/lib/openai/models/responses/response_created_event.rbi b/rbi/openai/models/responses/response_created_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_created_event.rbi rename to rbi/openai/models/responses/response_created_event.rbi diff --git a/rbi/lib/openai/models/responses/response_delete_params.rbi b/rbi/openai/models/responses/response_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_delete_params.rbi rename to rbi/openai/models/responses/response_delete_params.rbi diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/openai/models/responses/response_error.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_error.rbi rename to rbi/openai/models/responses/response_error.rbi diff --git a/rbi/lib/openai/models/responses/response_error_event.rbi b/rbi/openai/models/responses/response_error_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_error_event.rbi rename to rbi/openai/models/responses/response_error_event.rbi diff --git a/rbi/lib/openai/models/responses/response_failed_event.rbi b/rbi/openai/models/responses/response_failed_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_failed_event.rbi rename to rbi/openai/models/responses/response_failed_event.rbi diff --git a/rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_file_search_call_completed_event.rbi rename to rbi/openai/models/responses/response_file_search_call_completed_event.rbi diff --git a/rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_file_search_call_in_progress_event.rbi rename to rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi diff --git a/rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_file_search_call_searching_event.rbi rename to rbi/openai/models/responses/response_file_search_call_searching_event.rbi diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/openai/models/responses/response_file_search_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_file_search_tool_call.rbi rename to rbi/openai/models/responses/response_file_search_tool_call.rbi diff --git a/rbi/lib/openai/models/responses/response_format_text_config.rbi b/rbi/openai/models/responses/response_format_text_config.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_format_text_config.rbi rename to rbi/openai/models/responses/response_format_text_config.rbi diff --git a/rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/openai/models/responses/response_format_text_json_schema_config.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_format_text_json_schema_config.rbi rename to rbi/openai/models/responses/response_format_text_json_schema_config.rbi diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_function_call_arguments_delta_event.rbi rename to rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_function_call_arguments_done_event.rbi rename to rbi/openai/models/responses/response_function_call_arguments_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/openai/models/responses/response_function_tool_call.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_function_tool_call.rbi rename to rbi/openai/models/responses/response_function_tool_call.rbi diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_item.rbi b/rbi/openai/models/responses/response_function_tool_call_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_function_tool_call_item.rbi rename to rbi/openai/models/responses/response_function_tool_call_item.rbi diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/openai/models/responses/response_function_tool_call_output_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi rename to rbi/openai/models/responses/response_function_tool_call_output_item.rbi diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/openai/models/responses/response_function_web_search.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_function_web_search.rbi rename to rbi/openai/models/responses/response_function_web_search.rbi diff --git a/rbi/lib/openai/models/responses/response_in_progress_event.rbi b/rbi/openai/models/responses/response_in_progress_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_in_progress_event.rbi rename to rbi/openai/models/responses/response_in_progress_event.rbi diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/openai/models/responses/response_includable.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_includable.rbi rename to rbi/openai/models/responses/response_includable.rbi diff --git a/rbi/lib/openai/models/responses/response_incomplete_event.rbi b/rbi/openai/models/responses/response_incomplete_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_incomplete_event.rbi rename to rbi/openai/models/responses/response_incomplete_event.rbi diff --git a/rbi/lib/openai/models/responses/response_input.rbi b/rbi/openai/models/responses/response_input.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input.rbi rename to rbi/openai/models/responses/response_input.rbi diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/openai/models/responses/response_input_audio.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_audio.rbi rename to rbi/openai/models/responses/response_input_audio.rbi diff --git a/rbi/lib/openai/models/responses/response_input_content.rbi b/rbi/openai/models/responses/response_input_content.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_content.rbi rename to rbi/openai/models/responses/response_input_content.rbi diff --git a/rbi/lib/openai/models/responses/response_input_file.rbi b/rbi/openai/models/responses/response_input_file.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_file.rbi rename to rbi/openai/models/responses/response_input_file.rbi diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/openai/models/responses/response_input_image.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_image.rbi rename to rbi/openai/models/responses/response_input_image.rbi diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_item.rbi rename to rbi/openai/models/responses/response_input_item.rbi diff --git a/rbi/lib/openai/models/responses/response_input_message_content_list.rbi b/rbi/openai/models/responses/response_input_message_content_list.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_message_content_list.rbi rename to rbi/openai/models/responses/response_input_message_content_list.rbi diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/openai/models/responses/response_input_message_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_message_item.rbi rename to rbi/openai/models/responses/response_input_message_item.rbi diff --git a/rbi/lib/openai/models/responses/response_input_text.rbi b/rbi/openai/models/responses/response_input_text.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_input_text.rbi rename to rbi/openai/models/responses/response_input_text.rbi diff --git a/rbi/lib/openai/models/responses/response_item.rbi b/rbi/openai/models/responses/response_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_item.rbi rename to rbi/openai/models/responses/response_item.rbi diff --git a/rbi/lib/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_item_list.rbi rename to rbi/openai/models/responses/response_item_list.rbi diff --git a/rbi/lib/openai/models/responses/response_output_audio.rbi b/rbi/openai/models/responses/response_output_audio.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_audio.rbi rename to rbi/openai/models/responses/response_output_audio.rbi diff --git a/rbi/lib/openai/models/responses/response_output_item.rbi b/rbi/openai/models/responses/response_output_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_item.rbi rename to rbi/openai/models/responses/response_output_item.rbi diff --git a/rbi/lib/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_item_added_event.rbi rename to rbi/openai/models/responses/response_output_item_added_event.rbi diff --git a/rbi/lib/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_item_done_event.rbi rename to rbi/openai/models/responses/response_output_item_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/openai/models/responses/response_output_message.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_message.rbi rename to rbi/openai/models/responses/response_output_message.rbi diff --git a/rbi/lib/openai/models/responses/response_output_refusal.rbi b/rbi/openai/models/responses/response_output_refusal.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_refusal.rbi rename to rbi/openai/models/responses/response_output_refusal.rbi diff --git a/rbi/lib/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_output_text.rbi rename to rbi/openai/models/responses/response_output_text.rbi diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/openai/models/responses/response_reasoning_item.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_reasoning_item.rbi rename to rbi/openai/models/responses/response_reasoning_item.rbi diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_reasoning_summary_part_added_event.rbi rename to rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_reasoning_summary_part_done_event.rbi rename to rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rbi rename to rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_reasoning_summary_text_done_event.rbi rename to rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_refusal_delta_event.rbi b/rbi/openai/models/responses/response_refusal_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_refusal_delta_event.rbi rename to rbi/openai/models/responses/response_refusal_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_refusal_done_event.rbi b/rbi/openai/models/responses/response_refusal_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_refusal_done_event.rbi rename to rbi/openai/models/responses/response_refusal_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_retrieve_params.rbi b/rbi/openai/models/responses/response_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_retrieve_params.rbi rename to rbi/openai/models/responses/response_retrieve_params.rbi diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/openai/models/responses/response_status.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_status.rbi rename to rbi/openai/models/responses/response_status.rbi diff --git a/rbi/lib/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_stream_event.rbi rename to rbi/openai/models/responses/response_stream_event.rbi diff --git a/rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_text_annotation_delta_event.rbi rename to rbi/openai/models/responses/response_text_annotation_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_text_config.rbi b/rbi/openai/models/responses/response_text_config.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_text_config.rbi rename to rbi/openai/models/responses/response_text_config.rbi diff --git a/rbi/lib/openai/models/responses/response_text_delta_event.rbi b/rbi/openai/models/responses/response_text_delta_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_text_delta_event.rbi rename to rbi/openai/models/responses/response_text_delta_event.rbi diff --git a/rbi/lib/openai/models/responses/response_text_done_event.rbi b/rbi/openai/models/responses/response_text_done_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_text_done_event.rbi rename to rbi/openai/models/responses/response_text_done_event.rbi diff --git a/rbi/lib/openai/models/responses/response_usage.rbi b/rbi/openai/models/responses/response_usage.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_usage.rbi rename to rbi/openai/models/responses/response_usage.rbi diff --git a/rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_web_search_call_completed_event.rbi rename to rbi/openai/models/responses/response_web_search_call_completed_event.rbi diff --git a/rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_web_search_call_in_progress_event.rbi rename to rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi diff --git a/rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi similarity index 100% rename from rbi/lib/openai/models/responses/response_web_search_call_searching_event.rbi rename to rbi/openai/models/responses/response_web_search_call_searching_event.rbi diff --git a/rbi/lib/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi similarity index 100% rename from rbi/lib/openai/models/responses/tool.rbi rename to rbi/openai/models/responses/tool.rbi diff --git a/rbi/lib/openai/models/responses/tool_choice_function.rbi b/rbi/openai/models/responses/tool_choice_function.rbi similarity index 100% rename from rbi/lib/openai/models/responses/tool_choice_function.rbi rename to rbi/openai/models/responses/tool_choice_function.rbi diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/openai/models/responses/tool_choice_options.rbi similarity index 100% rename from rbi/lib/openai/models/responses/tool_choice_options.rbi rename to rbi/openai/models/responses/tool_choice_options.rbi diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/openai/models/responses/tool_choice_types.rbi similarity index 100% rename from rbi/lib/openai/models/responses/tool_choice_types.rbi rename to rbi/openai/models/responses/tool_choice_types.rbi diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/openai/models/responses/web_search_tool.rbi similarity index 100% rename from rbi/lib/openai/models/responses/web_search_tool.rbi rename to rbi/openai/models/responses/web_search_tool.rbi diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/openai/models/responses_model.rbi similarity index 100% rename from rbi/lib/openai/models/responses_model.rbi rename to rbi/openai/models/responses_model.rbi diff --git a/rbi/lib/openai/models/static_file_chunking_strategy.rbi b/rbi/openai/models/static_file_chunking_strategy.rbi similarity index 100% rename from rbi/lib/openai/models/static_file_chunking_strategy.rbi rename to rbi/openai/models/static_file_chunking_strategy.rbi diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object.rbi b/rbi/openai/models/static_file_chunking_strategy_object.rbi similarity index 100% rename from rbi/lib/openai/models/static_file_chunking_strategy_object.rbi rename to rbi/openai/models/static_file_chunking_strategy_object.rbi diff --git a/rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/openai/models/static_file_chunking_strategy_object_param.rbi similarity index 100% rename from rbi/lib/openai/models/static_file_chunking_strategy_object_param.rbi rename to rbi/openai/models/static_file_chunking_strategy_object_param.rbi diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/openai/models/upload.rbi similarity index 100% rename from rbi/lib/openai/models/upload.rbi rename to rbi/openai/models/upload.rbi diff --git a/rbi/lib/openai/models/upload_cancel_params.rbi b/rbi/openai/models/upload_cancel_params.rbi similarity index 100% rename from rbi/lib/openai/models/upload_cancel_params.rbi rename to rbi/openai/models/upload_cancel_params.rbi diff --git a/rbi/lib/openai/models/upload_complete_params.rbi b/rbi/openai/models/upload_complete_params.rbi similarity index 100% rename from rbi/lib/openai/models/upload_complete_params.rbi rename to rbi/openai/models/upload_complete_params.rbi diff --git a/rbi/lib/openai/models/upload_create_params.rbi b/rbi/openai/models/upload_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/upload_create_params.rbi rename to rbi/openai/models/upload_create_params.rbi diff --git a/rbi/lib/openai/models/uploads/part_create_params.rbi b/rbi/openai/models/uploads/part_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/uploads/part_create_params.rbi rename to rbi/openai/models/uploads/part_create_params.rbi diff --git a/rbi/lib/openai/models/uploads/upload_part.rbi b/rbi/openai/models/uploads/upload_part.rbi similarity index 100% rename from rbi/lib/openai/models/uploads/upload_part.rbi rename to rbi/openai/models/uploads/upload_part.rbi diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/openai/models/vector_store.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store.rbi rename to rbi/openai/models/vector_store.rbi diff --git a/rbi/lib/openai/models/vector_store_create_params.rbi b/rbi/openai/models/vector_store_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_create_params.rbi rename to rbi/openai/models/vector_store_create_params.rbi diff --git a/rbi/lib/openai/models/vector_store_delete_params.rbi b/rbi/openai/models/vector_store_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_delete_params.rbi rename to rbi/openai/models/vector_store_delete_params.rbi diff --git a/rbi/lib/openai/models/vector_store_deleted.rbi b/rbi/openai/models/vector_store_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_deleted.rbi rename to rbi/openai/models/vector_store_deleted.rbi diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/openai/models/vector_store_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_list_params.rbi rename to rbi/openai/models/vector_store_list_params.rbi diff --git a/rbi/lib/openai/models/vector_store_retrieve_params.rbi b/rbi/openai/models/vector_store_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_retrieve_params.rbi rename to rbi/openai/models/vector_store_retrieve_params.rbi diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/openai/models/vector_store_search_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_search_params.rbi rename to rbi/openai/models/vector_store_search_params.rbi diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/openai/models/vector_store_search_response.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_search_response.rbi rename to rbi/openai/models/vector_store_search_response.rbi diff --git a/rbi/lib/openai/models/vector_store_update_params.rbi b/rbi/openai/models/vector_store_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_store_update_params.rbi rename to rbi/openai/models/vector_store_update_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_batch_cancel_params.rbi rename to rbi/openai/models/vector_stores/file_batch_cancel_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/openai/models/vector_stores/file_batch_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_batch_create_params.rbi rename to rbi/openai/models/vector_stores/file_batch_create_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi rename to rbi/openai/models/vector_stores/file_batch_list_files_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_batch_retrieve_params.rbi rename to rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_content_params.rbi b/rbi/openai/models/vector_stores/file_content_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_content_params.rbi rename to rbi/openai/models/vector_stores/file_content_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_content_response.rbi b/rbi/openai/models/vector_stores/file_content_response.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_content_response.rbi rename to rbi/openai/models/vector_stores/file_content_response.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_create_params.rbi b/rbi/openai/models/vector_stores/file_create_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_create_params.rbi rename to rbi/openai/models/vector_stores/file_create_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_delete_params.rbi b/rbi/openai/models/vector_stores/file_delete_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_delete_params.rbi rename to rbi/openai/models/vector_stores/file_delete_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/openai/models/vector_stores/file_list_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_list_params.rbi rename to rbi/openai/models/vector_stores/file_list_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/openai/models/vector_stores/file_retrieve_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_retrieve_params.rbi rename to rbi/openai/models/vector_stores/file_retrieve_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/file_update_params.rbi b/rbi/openai/models/vector_stores/file_update_params.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/file_update_params.rbi rename to rbi/openai/models/vector_stores/file_update_params.rbi diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/openai/models/vector_stores/vector_store_file.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/vector_store_file.rbi rename to rbi/openai/models/vector_stores/vector_store_file.rbi diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi rename to rbi/openai/models/vector_stores/vector_store_file_batch.rbi diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi similarity index 100% rename from rbi/lib/openai/models/vector_stores/vector_store_file_deleted.rbi rename to rbi/openai/models/vector_stores/vector_store_file_deleted.rbi diff --git a/rbi/lib/openai/request_options.rbi b/rbi/openai/request_options.rbi similarity index 100% rename from rbi/lib/openai/request_options.rbi rename to rbi/openai/request_options.rbi diff --git a/rbi/lib/openai/resources/audio.rbi b/rbi/openai/resources/audio.rbi similarity index 100% rename from rbi/lib/openai/resources/audio.rbi rename to rbi/openai/resources/audio.rbi diff --git a/rbi/lib/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi similarity index 100% rename from rbi/lib/openai/resources/audio/speech.rbi rename to rbi/openai/resources/audio/speech.rbi diff --git a/rbi/lib/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi similarity index 100% rename from rbi/lib/openai/resources/audio/transcriptions.rbi rename to rbi/openai/resources/audio/transcriptions.rbi diff --git a/rbi/lib/openai/resources/audio/translations.rbi b/rbi/openai/resources/audio/translations.rbi similarity index 100% rename from rbi/lib/openai/resources/audio/translations.rbi rename to rbi/openai/resources/audio/translations.rbi diff --git a/rbi/lib/openai/resources/batches.rbi b/rbi/openai/resources/batches.rbi similarity index 100% rename from rbi/lib/openai/resources/batches.rbi rename to rbi/openai/resources/batches.rbi diff --git a/rbi/lib/openai/resources/beta.rbi b/rbi/openai/resources/beta.rbi similarity index 100% rename from rbi/lib/openai/resources/beta.rbi rename to rbi/openai/resources/beta.rbi diff --git a/rbi/lib/openai/resources/beta/assistants.rbi b/rbi/openai/resources/beta/assistants.rbi similarity index 100% rename from rbi/lib/openai/resources/beta/assistants.rbi rename to rbi/openai/resources/beta/assistants.rbi diff --git a/rbi/lib/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi similarity index 100% rename from rbi/lib/openai/resources/beta/threads.rbi rename to rbi/openai/resources/beta/threads.rbi diff --git a/rbi/lib/openai/resources/beta/threads/messages.rbi b/rbi/openai/resources/beta/threads/messages.rbi similarity index 100% rename from rbi/lib/openai/resources/beta/threads/messages.rbi rename to rbi/openai/resources/beta/threads/messages.rbi diff --git a/rbi/lib/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi similarity index 100% rename from rbi/lib/openai/resources/beta/threads/runs.rbi rename to rbi/openai/resources/beta/threads/runs.rbi diff --git a/rbi/lib/openai/resources/beta/threads/runs/steps.rbi b/rbi/openai/resources/beta/threads/runs/steps.rbi similarity index 100% rename from rbi/lib/openai/resources/beta/threads/runs/steps.rbi rename to rbi/openai/resources/beta/threads/runs/steps.rbi diff --git a/rbi/lib/openai/resources/chat.rbi b/rbi/openai/resources/chat.rbi similarity index 100% rename from rbi/lib/openai/resources/chat.rbi rename to rbi/openai/resources/chat.rbi diff --git a/rbi/lib/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi similarity index 100% rename from rbi/lib/openai/resources/chat/completions.rbi rename to rbi/openai/resources/chat/completions.rbi diff --git a/rbi/lib/openai/resources/chat/completions/messages.rbi b/rbi/openai/resources/chat/completions/messages.rbi similarity index 100% rename from rbi/lib/openai/resources/chat/completions/messages.rbi rename to rbi/openai/resources/chat/completions/messages.rbi diff --git a/rbi/lib/openai/resources/completions.rbi b/rbi/openai/resources/completions.rbi similarity index 100% rename from rbi/lib/openai/resources/completions.rbi rename to rbi/openai/resources/completions.rbi diff --git a/rbi/lib/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi similarity index 100% rename from rbi/lib/openai/resources/embeddings.rbi rename to rbi/openai/resources/embeddings.rbi diff --git a/rbi/lib/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi similarity index 100% rename from rbi/lib/openai/resources/evals.rbi rename to rbi/openai/resources/evals.rbi diff --git a/rbi/lib/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi similarity index 100% rename from rbi/lib/openai/resources/evals/runs.rbi rename to rbi/openai/resources/evals/runs.rbi diff --git a/rbi/lib/openai/resources/evals/runs/output_items.rbi b/rbi/openai/resources/evals/runs/output_items.rbi similarity index 100% rename from rbi/lib/openai/resources/evals/runs/output_items.rbi rename to rbi/openai/resources/evals/runs/output_items.rbi diff --git a/rbi/lib/openai/resources/files.rbi b/rbi/openai/resources/files.rbi similarity index 100% rename from rbi/lib/openai/resources/files.rbi rename to rbi/openai/resources/files.rbi diff --git a/rbi/lib/openai/resources/fine_tuning.rbi b/rbi/openai/resources/fine_tuning.rbi similarity index 100% rename from rbi/lib/openai/resources/fine_tuning.rbi rename to rbi/openai/resources/fine_tuning.rbi diff --git a/rbi/lib/openai/resources/fine_tuning/checkpoints.rbi b/rbi/openai/resources/fine_tuning/checkpoints.rbi similarity index 100% rename from rbi/lib/openai/resources/fine_tuning/checkpoints.rbi rename to rbi/openai/resources/fine_tuning/checkpoints.rbi diff --git a/rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi similarity index 100% rename from rbi/lib/openai/resources/fine_tuning/checkpoints/permissions.rbi rename to rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi diff --git a/rbi/lib/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi similarity index 100% rename from rbi/lib/openai/resources/fine_tuning/jobs.rbi rename to rbi/openai/resources/fine_tuning/jobs.rbi diff --git a/rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi similarity index 100% rename from rbi/lib/openai/resources/fine_tuning/jobs/checkpoints.rbi rename to rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi diff --git a/rbi/lib/openai/resources/images.rbi b/rbi/openai/resources/images.rbi similarity index 100% rename from rbi/lib/openai/resources/images.rbi rename to rbi/openai/resources/images.rbi diff --git a/rbi/lib/openai/resources/models.rbi b/rbi/openai/resources/models.rbi similarity index 100% rename from rbi/lib/openai/resources/models.rbi rename to rbi/openai/resources/models.rbi diff --git a/rbi/lib/openai/resources/moderations.rbi b/rbi/openai/resources/moderations.rbi similarity index 100% rename from rbi/lib/openai/resources/moderations.rbi rename to rbi/openai/resources/moderations.rbi diff --git a/rbi/lib/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi similarity index 100% rename from rbi/lib/openai/resources/responses.rbi rename to rbi/openai/resources/responses.rbi diff --git a/rbi/lib/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi similarity index 100% rename from rbi/lib/openai/resources/responses/input_items.rbi rename to rbi/openai/resources/responses/input_items.rbi diff --git a/rbi/lib/openai/resources/uploads.rbi b/rbi/openai/resources/uploads.rbi similarity index 100% rename from rbi/lib/openai/resources/uploads.rbi rename to rbi/openai/resources/uploads.rbi diff --git a/rbi/lib/openai/resources/uploads/parts.rbi b/rbi/openai/resources/uploads/parts.rbi similarity index 100% rename from rbi/lib/openai/resources/uploads/parts.rbi rename to rbi/openai/resources/uploads/parts.rbi diff --git a/rbi/lib/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi similarity index 100% rename from rbi/lib/openai/resources/vector_stores.rbi rename to rbi/openai/resources/vector_stores.rbi diff --git a/rbi/lib/openai/resources/vector_stores/file_batches.rbi b/rbi/openai/resources/vector_stores/file_batches.rbi similarity index 100% rename from rbi/lib/openai/resources/vector_stores/file_batches.rbi rename to rbi/openai/resources/vector_stores/file_batches.rbi diff --git a/rbi/lib/openai/resources/vector_stores/files.rbi b/rbi/openai/resources/vector_stores/files.rbi similarity index 100% rename from rbi/lib/openai/resources/vector_stores/files.rbi rename to rbi/openai/resources/vector_stores/files.rbi diff --git a/rbi/lib/openai/version.rbi b/rbi/openai/version.rbi similarity index 100% rename from rbi/lib/openai/version.rbi rename to rbi/openai/version.rbi From 2689ab61e7ecda72bb060fe0eb1f642a1703c546 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 26 Apr 2025 06:27:23 +0000 Subject: [PATCH 164/295] chore: more accurate type annotations and aliases --- lib/openai/errors.rb | 36 +++++++++---------- lib/openai/internal/type/boolean.rb | 21 ++++++----- lib/openai/internal/type/enum.rb | 21 ++++++----- lib/openai/internal/type/unknown.rb | 21 ++++++----- .../beta/thread_create_and_run_params.rb | 1 + .../models/beta/thread_create_params.rb | 1 + .../beta/threads/message_create_params.rb | 1 + .../models/beta/threads/run_create_params.rb | 1 + ...chat_completion_assistant_message_param.rb | 1 + ...chat_completion_developer_message_param.rb | 1 + .../chat_completion_prediction_content.rb | 1 + .../chat_completion_system_message_param.rb | 1 + .../chat_completion_tool_message_param.rb | 1 + .../chat_completion_user_message_param.rb | 1 + .../models/chat/completion_create_params.rb | 1 + lib/openai/models/completion_create_params.rb | 4 +++ lib/openai/models/embedding_create_params.rb | 3 ++ lib/openai/models/image_edit_params.rb | 1 + lib/openai/models/moderation_create_params.rb | 2 ++ .../models/vector_store_search_params.rb | 1 + lib/openai/request_options.rb | 9 +++-- .../beta/thread_create_and_run_params.rbs | 5 +-- .../models/beta/thread_create_params.rbs | 5 +-- .../beta/threads/message_create_params.rbs | 5 +-- .../models/beta/threads/run_create_params.rbs | 5 +-- ...hat_completion_assistant_message_param.rbs | 5 +-- ...hat_completion_developer_message_param.rbs | 5 +-- .../chat_completion_prediction_content.rbs | 5 +-- .../chat_completion_system_message_param.rbs | 5 +-- .../chat_completion_tool_message_param.rbs | 5 +-- .../chat_completion_user_message_param.rbs | 5 +-- .../models/chat/completion_create_params.rbs | 4 +-- .../models/completion_create_params.rbs | 16 +++------ sig/openai/models/embedding_create_params.rbs | 12 ++----- sig/openai/models/image_edit_params.rbs | 4 +-- .../models/moderation_create_params.rbs | 9 ++--- .../models/vector_store_search_params.rbs | 4 +-- test/openai/file_part_test.rb | 1 + 38 files changed, 97 insertions(+), 133 deletions(-) diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 73e74f52..6f159c87 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -3,9 +3,9 @@ module OpenAI module Errors class Error < StandardError - # @!parse - # # @return [StandardError, nil] - # attr_accessor :cause + # @!attribute cause + # + # @return [StandardError, nil] end class ConversionError < OpenAI::Errors::Error @@ -49,25 +49,25 @@ def initialize(url:, status: nil, body: nil, request: nil, response: nil, messag end class APIConnectionError < OpenAI::Errors::APIError - # @!parse - # # @return [nil] - # attr_accessor :status + # @!attribute status + # + # @return [nil] - # @!parse - # # @return [nil] - # attr_accessor :body + # @!attribute body + # + # @return [nil] - # @!parse - # # @return [nil] - # attr_accessor :code + # @!attribute code + # + # @return [nil] - # @!parse - # # @return [nil] - # attr_accessor :param + # @!attribute param + # + # @return [nil] - # @!parse - # # @return [nil] - # attr_accessor :type + # @!attribute type + # + # @return [nil] # @api private # diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index 13c242e0..3e852539 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -46,17 +46,16 @@ def coerce(value, state:) value end - # @!parse - # # @api private - # # - # # @param value [Boolean, Object] - # # - # # @param state [Hash{Symbol=>Object}] . - # # - # # @option state [Boolean] :can_retry - # # - # # @return [Boolean, Object] - # def dump(value, state:) = super + # @!method dump(value, state:) + # @api private + # + # @param value [Boolean, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # + # @return [Boolean, Object] end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 55df3e66..5ceef679 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -100,17 +100,16 @@ def coerce(value, state:) end end - # @!parse - # # @api private - # # - # # @param value [Symbol, Object] - # # - # # @param state [Hash{Symbol=>Object}] . - # # - # # @option state [Boolean] :can_retry - # # - # # @return [Symbol, Object] - # def dump(value, state:) = super + # @!method dump(value, state:) + # @api private + # + # @param value [Symbol, Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # + # @return [Symbol, Object] # @api private # diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index 698303d5..a629570f 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -48,17 +48,16 @@ def coerce(value, state:) value end - # @!parse - # # @api private - # # - # # @param value [Object] - # # - # # @param state [Hash{Symbol=>Object}] . - # # - # # @option state [Boolean] :can_retry - # # - # # @return [Object] - # def dump(value, state:) = super + # @!method dump(value, state:) + # @api private + # + # @param value [Object] + # + # @param state [Hash{Symbol=>Object}] . + # + # @option state [Boolean] :can_retry + # + # @return [Object] end # rubocop:enable Lint/UnusedMethodArgument diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 0cd74409..4199cc52 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -337,6 +337,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 3cb1d240..50ef5aca 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -114,6 +114,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index ff6d233f..e6a5e155 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -72,6 +72,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index d47e1177..8ced1f05 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -307,6 +307,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index f5bc27e0..ab8b934b 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -131,6 +131,7 @@ module ArrayOfContentPart # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] ArrayOfContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index df851c32..412741c9 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -53,6 +53,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index 3669e9d3..df3dbe30 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -48,6 +48,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 99e35629..7ef7fbfd 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -53,6 +53,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index e03505be..2ff0bf3b 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -44,6 +44,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] end diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 3aa9532f..b412e5c9 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -52,6 +52,7 @@ module Content # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 185edac2..ed12261d 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -587,6 +587,7 @@ module Stop # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index c6f47865..d397fba1 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -281,10 +281,13 @@ module Prompt # @!method self.variants # @return [Array(String, Array, Array, Array>)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] + # @type [OpenAI::Internal::Type::Converter] IntegerArray = OpenAI::Internal::Type::ArrayOf[Integer] + # @type [OpenAI::Internal::Type::Converter] ArrayOfToken2DArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]] end @@ -302,6 +305,7 @@ module Stop # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index f3fe5c40..bff4d83f 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -99,10 +99,13 @@ module Input # @!method self.variants # @return [Array(String, Array, Array, Array>)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] + # @type [OpenAI::Internal::Type::Converter] IntegerArray = OpenAI::Internal::Type::ArrayOf[Integer] + # @type [OpenAI::Internal::Type::Converter] ArrayOfToken2DArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]] end diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 33526447..e42475a3 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -125,6 +125,7 @@ module Image # @!method self.variants # @return [Array(StringIO, Array)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput] end diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index b6231c78..ae82c207 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -51,8 +51,10 @@ module Input # @!method self.variants # @return [Array(String, Array, Array)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] + # @type [OpenAI::Internal::Type::Converter] ModerationMultiModalInputArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 5eaa270d..17447b6d 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -66,6 +66,7 @@ module Query # @!method self.variants # @return [Array(String, Array)] + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index 4bd64d4b..d4a15822 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -65,10 +65,9 @@ def self.validate!(opts) # @return [Float, nil] optional :timeout, Float - # @!parse - # # @!method initialize(values = {}) - # # Returns a new instance of RequestOptions. - # # - # # @param values [Hash{Symbol=>Object}] + # @!method initialize(values = {}) + # Returns a new instance of RequestOptions. + # + # @param values [Hash{Symbol=>Object}] end end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 67ca7b95..5d78ca13 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -151,10 +151,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] - type message_content_part_param_array = - ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - - MessageContentPartParamArray: message_content_part_param_array + MessageContentPartParamArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 96d0d366..08c6bdd2 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -67,10 +67,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] - type message_content_part_param_array = - ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - - MessageContentPartParamArray: message_content_part_param_array + MessageContentPartParamArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 0b4397bf..fd5c2c49 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -42,10 +42,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] - type message_content_part_param_array = - ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - - MessageContentPartParamArray: message_content_part_param_array + MessageContentPartParamArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index f5462f0c..da318adb 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -126,10 +126,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] - type message_content_part_param_array = - ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] - - MessageContentPartParamArray: message_content_part_param_array + MessageContentPartParamArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index 4f92813c..6016de8d 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -76,10 +76,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]] - type array_of_content_part_array = - ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] - - ArrayOfContentPartArray: array_of_content_part_array + ArrayOfContentPartArray: OpenAI::Internal::Type::Converter end type function_call = { arguments: String, name: String } diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index b0dfdf86..ed1e4afb 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -35,10 +35,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] - type chat_completion_content_part_text_array = - ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - - ChatCompletionContentPartTextArray: chat_completion_content_part_text_array + ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index 1f5c681a..ad9cdb21 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -29,10 +29,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] - type chat_completion_content_part_text_array = - ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - - ChatCompletionContentPartTextArray: chat_completion_content_part_text_array + ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 12811868..81cdf351 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -35,10 +35,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] - type chat_completion_content_part_text_array = - ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - - ChatCompletionContentPartTextArray: chat_completion_content_part_text_array + ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 7808c0a9..5b737943 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -33,10 +33,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] - type chat_completion_content_part_text_array = - ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] - - ChatCompletionContentPartTextArray: chat_completion_content_part_text_array + ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index ea9b16d6..5f5c1a1c 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -35,10 +35,7 @@ module OpenAI def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] - type chat_completion_content_part_array = - ::Array[OpenAI::Models::Chat::chat_completion_content_part] - - ChatCompletionContentPartArray: chat_completion_content_part_array + ChatCompletionContentPartArray: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 02d22187..1eb9953b 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -263,9 +263,7 @@ module OpenAI def self?.variants: -> [String, ::Array[String]] - type string_array = ::Array[String] - - StringArray: string_array + StringArray: OpenAI::Internal::Type::Converter end type web_search_options = diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index d4b9afbe..184a3c57 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -108,17 +108,11 @@ module OpenAI def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] - type string_array = ::Array[String] + StringArray: OpenAI::Internal::Type::Converter - StringArray: string_array + IntegerArray: OpenAI::Internal::Type::Converter - type integer_array = ::Array[Integer] - - IntegerArray: integer_array - - type array_of_token2_d_array = ::Array[::Array[Integer]] - - ArrayOfToken2DArray: array_of_token2_d_array + ArrayOfToken2DArray: OpenAI::Internal::Type::Converter end type stop = (String | ::Array[String])? @@ -128,9 +122,7 @@ module OpenAI def self?.variants: -> [String, ::Array[String]] - type string_array = ::Array[String] - - StringArray: string_array + StringArray: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index fc247341..16c3bd6d 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -51,17 +51,11 @@ module OpenAI def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] - type string_array = ::Array[String] + StringArray: OpenAI::Internal::Type::Converter - StringArray: string_array + IntegerArray: OpenAI::Internal::Type::Converter - type integer_array = ::Array[Integer] - - IntegerArray: integer_array - - type array_of_token2_d_array = ::Array[::Array[Integer]] - - ArrayOfToken2DArray: array_of_token2_d_array + ArrayOfToken2DArray: OpenAI::Internal::Type::Converter end type model = String | OpenAI::Models::embedding_model diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 95e02a84..bf702f1e 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -69,9 +69,7 @@ module OpenAI def self?.variants: -> [StringIO, ::Array[StringIO]] - type string_array = ::Array[Pathname | StringIO | IO | OpenAI::FilePart] - - StringArray: string_array + StringArray: OpenAI::Internal::Type::Converter end type model = String | OpenAI::Models::image_model diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index e8c1e576..e6f8668e 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -37,14 +37,9 @@ module OpenAI def self?.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] - type string_array = ::Array[String] + StringArray: OpenAI::Internal::Type::Converter - StringArray: string_array - - type moderation_multi_modal_input_array = - ::Array[OpenAI::Models::moderation_multi_modal_input] - - ModerationMultiModalInputArray: moderation_multi_modal_input_array + ModerationMultiModalInputArray: OpenAI::Internal::Type::Converter end type model = String | OpenAI::Models::moderation_model diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 823cd7bc..9d571eee 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -54,9 +54,7 @@ module OpenAI def self?.variants: -> [String, ::Array[String]] - type string_array = ::Array[String] - - StringArray: string_array + StringArray: OpenAI::Internal::Type::Converter end type filters = diff --git a/test/openai/file_part_test.rb b/test/openai/file_part_test.rb index 4136b1b6..96abf7cf 100644 --- a/test/openai/file_part_test.rb +++ b/test/openai/file_part_test.rb @@ -8,5 +8,6 @@ def test_to_json filepart = OpenAI::FilePart.new(StringIO.new(text)) assert_equal(text.to_json, filepart.to_json) + assert_equal(text.to_yaml, filepart.to_yaml) end end From 7377db894bc62a3cc7e0213c2fd612252531cb30 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 26 Apr 2025 13:23:36 +0000 Subject: [PATCH 165/295] chore(internal): annotate request options with type aliases in sorbet --- lib/openai/models/function_parameters.rb | 1 + lib/openai/models/metadata.rb | 1 + lib/openai/models/responses/response_input.rb | 1 + .../response_input_message_content_list.rb | 1 + rbi/openai/internal/transport/base_client.rbi | 4 +-- .../internal/type/request_parameters.rbi | 2 +- rbi/openai/request_options.rbi | 2 ++ rbi/openai/resources/audio/speech.rbi | 2 +- rbi/openai/resources/audio/transcriptions.rbi | 4 +-- rbi/openai/resources/audio/translations.rbi | 2 +- rbi/openai/resources/batches.rbi | 24 +++------------- rbi/openai/resources/beta/assistants.rbi | 17 ++++------- rbi/openai/resources/beta/threads.rbi | 21 ++++---------- .../resources/beta/threads/messages.rbi | 18 ++++-------- rbi/openai/resources/beta/threads/runs.rbi | 24 ++++++---------- .../resources/beta/threads/runs/steps.rbi | 4 +-- rbi/openai/resources/chat/completions.rbi | 18 ++++-------- .../resources/chat/completions/messages.rbi | 2 +- rbi/openai/resources/completions.rbi | 4 +-- rbi/openai/resources/embeddings.rbi | 2 +- rbi/openai/resources/evals.rbi | 20 ++++--------- rbi/openai/resources/evals/runs.rbi | 22 ++++----------- .../resources/evals/runs/output_items.rbi | 4 +-- rbi/openai/resources/files.rbi | 28 ++++--------------- .../fine_tuning/checkpoints/permissions.rbi | 6 ++-- rbi/openai/resources/fine_tuning/jobs.rbi | 16 ++++------- .../fine_tuning/jobs/checkpoints.rbi | 2 +- rbi/openai/resources/images.rbi | 6 ++-- rbi/openai/resources/models.rbi | 21 ++------------ rbi/openai/resources/moderations.rbi | 2 +- rbi/openai/resources/responses.rbi | 14 +++------- .../resources/responses/input_items.rbi | 2 +- rbi/openai/resources/uploads.rbi | 12 ++------ rbi/openai/resources/uploads/parts.rbi | 2 +- rbi/openai/resources/vector_stores.rbi | 21 ++++---------- .../resources/vector_stores/file_batches.rbi | 16 +++-------- rbi/openai/resources/vector_stores/files.rbi | 24 ++++------------ sig/openai/models/function_parameters.rbs | 2 +- sig/openai/models/metadata.rbs | 2 +- .../models/responses/response_input.rbs | 2 +- .../response_input_message_content_list.rbs | 2 +- 41 files changed, 115 insertions(+), 265 deletions(-) diff --git a/lib/openai/models/function_parameters.rb b/lib/openai/models/function_parameters.rb index c32b337a..efd72125 100644 --- a/lib/openai/models/function_parameters.rb +++ b/lib/openai/models/function_parameters.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @type [OpenAI::Internal::Type::Converter] FunctionParameters = OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] end end diff --git a/lib/openai/models/metadata.rb b/lib/openai/models/metadata.rb index c6a6935d..7930ffac 100644 --- a/lib/openai/models/metadata.rb +++ b/lib/openai/models/metadata.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @type [OpenAI::Internal::Type::Converter] Metadata = OpenAI::Internal::Type::HashOf[String] end end diff --git a/lib/openai/models/responses/response_input.rb b/lib/openai/models/responses/response_input.rb index 591bab05..aa4de811 100644 --- a/lib/openai/models/responses/response_input.rb +++ b/lib/openai/models/responses/response_input.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Responses + # @type [OpenAI::Internal::Type::Converter] ResponseInput = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputItem }] end diff --git a/lib/openai/models/responses/response_input_message_content_list.rb b/lib/openai/models/responses/response_input_message_content_list.rb index a1362365..888f3f1c 100644 --- a/lib/openai/models/responses/response_input_message_content_list.rb +++ b/lib/openai/models/responses/response_input_message_content_list.rb @@ -3,6 +3,7 @@ module OpenAI module Models module Responses + # @type [OpenAI::Internal::Type::Converter] ResponseInputMessageContentList = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputContent }] end diff --git a/rbi/openai/internal/transport/base_client.rbi b/rbi/openai/internal/transport/base_client.rbi index fb87b782..612daee4 100644 --- a/rbi/openai/internal/transport/base_client.rbi +++ b/rbi/openai/internal/transport/base_client.rbi @@ -38,7 +38,7 @@ module OpenAI OpenAI::Internal::Type::BaseModel]] ), model: T.nilable(OpenAI::Internal::Type::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + options: T.nilable(OpenAI::RequestOpts) } end @@ -194,7 +194,7 @@ module OpenAI OpenAI::Internal::Type::BaseModel]] ), model: T.nilable(OpenAI::Internal::Type::Converter::Input), - options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + options: T.nilable(OpenAI::RequestOpts) ) .returns(T.anything) end diff --git a/rbi/openai/internal/type/request_parameters.rbi b/rbi/openai/internal/type/request_parameters.rbi index 2a1c8d7f..1d86b7ab 100644 --- a/rbi/openai/internal/type/request_parameters.rbi +++ b/rbi/openai/internal/type/request_parameters.rbi @@ -6,7 +6,7 @@ module OpenAI # @api private module RequestParameters # Options to specify HTTP behaviour for this request. - sig { returns(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) } + sig { returns(OpenAI::RequestOpts) } attr_accessor :request_options # @api private diff --git a/rbi/openai/request_options.rbi b/rbi/openai/request_options.rbi index 9c3df5a0..2dc1eb77 100644 --- a/rbi/openai/request_options.rbi +++ b/rbi/openai/request_options.rbi @@ -1,6 +1,8 @@ # typed: strong module OpenAI + RequestOpts = T.type_alias { T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) } + # Specify HTTP behaviour to use for a specific request. These options supplement # or override those provided at the client level. # diff --git a/rbi/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi index 2717898a..16739bc4 100644 --- a/rbi/openai/resources/audio/speech.rbi +++ b/rbi/openai/resources/audio/speech.rbi @@ -13,7 +13,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(StringIO) end diff --git a/rbi/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi index 45316428..b8fb2b38 100644 --- a/rbi/openai/resources/audio/transcriptions.rbi +++ b/rbi/openai/resources/audio/transcriptions.rbi @@ -19,7 +19,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) end @@ -82,7 +82,7 @@ module OpenAI temperature: Float, timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns( OpenAI::Internal::Stream[ diff --git a/rbi/openai/resources/audio/translations.rbi b/rbi/openai/resources/audio/translations.rbi index b9130883..b9675bf8 100644 --- a/rbi/openai/resources/audio/translations.rbi +++ b/rbi/openai/resources/audio/translations.rbi @@ -12,7 +12,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)) end diff --git a/rbi/openai/resources/batches.rbi b/rbi/openai/resources/batches.rbi index 390a8fa9..f58e4e80 100644 --- a/rbi/openai/resources/batches.rbi +++ b/rbi/openai/resources/batches.rbi @@ -10,7 +10,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Batch) end @@ -43,13 +43,7 @@ module OpenAI request_options: {} ); end # Retrieves a batch. - sig do - params( - batch_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::Batch) - end + sig { params(batch_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Batch) } def retrieve( # The ID of the batch to retrieve. batch_id, @@ -57,11 +51,7 @@ module OpenAI ); end # List your organization's batches. sig do - params( - after: String, - limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(after: String, limit: Integer, request_options: OpenAI::RequestOpts) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Batch]) end def list( @@ -78,13 +68,7 @@ module OpenAI # Cancels an in-progress batch. The batch will be in status `cancelling` for up to # 10 minutes, before changing to `cancelled`, where it will have partial results # (if any) available in the output file. - sig do - params( - batch_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::Batch) - end + sig { params(batch_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Batch) } def cancel( # The ID of the batch to cancel. batch_id, diff --git a/rbi/openai/resources/beta/assistants.rbi b/rbi/openai/resources/beta/assistants.rbi index 580fbcba..f58ad07a 100644 --- a/rbi/openai/resources/beta/assistants.rbi +++ b/rbi/openai/resources/beta/assistants.rbi @@ -33,7 +33,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Assistant) end @@ -109,11 +109,7 @@ module OpenAI ); end # Retrieves an assistant. sig do - params( - assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::Beta::Assistant) + params(assistant_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Beta::Assistant) end def retrieve( # The ID of the assistant to retrieve. @@ -150,7 +146,7 @@ module OpenAI ) ], top_p: T.nilable(Float), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Assistant) end @@ -233,7 +229,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Assistant]) end @@ -258,10 +254,7 @@ module OpenAI ); end # Delete an assistant. sig do - params( - assistant_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(assistant_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Beta::AssistantDeleted) end def delete( diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index 316bbca4..aa3e43b7 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -16,7 +16,7 @@ module OpenAI messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)], metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Thread) end @@ -39,13 +39,7 @@ module OpenAI request_options: {} ); end # Retrieves a thread. - sig do - params( - thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::Beta::Thread) - end + sig { params(thread_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Beta::Thread) } def retrieve( # The ID of the thread to retrieve. thread_id, @@ -57,7 +51,7 @@ module OpenAI thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Thread) end @@ -80,10 +74,7 @@ module OpenAI ); end # Delete a thread. sig do - params( - thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(thread_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Beta::ThreadDeleted) end def delete( @@ -137,7 +128,7 @@ module OpenAI T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -282,7 +273,7 @@ module OpenAI T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns( OpenAI::Internal::Stream[ diff --git a/rbi/openai/resources/beta/threads/messages.rbi b/rbi/openai/resources/beta/threads/messages.rbi index 2d12cb7e..f17e3cfe 100644 --- a/rbi/openai/resources/beta/threads/messages.rbi +++ b/rbi/openai/resources/beta/threads/messages.rbi @@ -25,7 +25,7 @@ module OpenAI T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::AnyHash)] ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -55,11 +55,7 @@ module OpenAI ); end # Retrieve a message. sig do - params( - message_id: String, - thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(message_id: String, thread_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Beta::Threads::Message) end def retrieve( @@ -76,7 +72,7 @@ module OpenAI message_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Message) end @@ -103,7 +99,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Message]) end @@ -133,11 +129,7 @@ module OpenAI ); end # Deletes a message. sig do - params( - message_id: String, - thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(message_id: String, thread_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Beta::Threads::MessageDeleted) end def delete( diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index e3a1216a..0148ff01 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -60,7 +60,7 @@ module OpenAI T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -230,7 +230,7 @@ module OpenAI T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) ), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns( OpenAI::Internal::Stream[ @@ -379,11 +379,7 @@ module OpenAI ); end # Retrieves a run. sig do - params( - run_id: String, - thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(run_id: String, thread_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Beta::Threads::Run) end def retrieve( @@ -400,7 +396,7 @@ module OpenAI run_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -427,7 +423,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Run]) end @@ -454,11 +450,7 @@ module OpenAI ); end # Cancels a run that is `in_progress`. sig do - params( - run_id: String, - thread_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(run_id: String, thread_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Beta::Threads::Run) end def cancel( @@ -481,7 +473,7 @@ module OpenAI thread_id: String, tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Run) end @@ -513,7 +505,7 @@ module OpenAI thread_id: String, tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns( OpenAI::Internal::Stream[ diff --git a/rbi/openai/resources/beta/threads/runs/steps.rbi b/rbi/openai/resources/beta/threads/runs/steps.rbi index 949e7d14..4ffb638c 100644 --- a/rbi/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/openai/resources/beta/threads/runs/steps.rbi @@ -13,7 +13,7 @@ module OpenAI thread_id: String, run_id: String, include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) end @@ -45,7 +45,7 @@ module OpenAI include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], limit: Integer, order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 6a7ba0a2..9c73209e 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -82,7 +82,7 @@ module OpenAI user: String, web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -355,7 +355,7 @@ module OpenAI user: String, web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) end @@ -556,10 +556,7 @@ module OpenAI # Get a stored chat completion. Only Chat Completions that have been created with # the `store` parameter set to `true` will be returned. sig do - params( - completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(completion_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Chat::ChatCompletion) end def retrieve( @@ -574,7 +571,7 @@ module OpenAI params( completion_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Chat::ChatCompletion) end @@ -599,7 +596,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletion]) end @@ -622,10 +619,7 @@ module OpenAI # Delete a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be deleted. sig do - params( - completion_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(completion_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Chat::ChatCompletionDeleted) end def delete( diff --git a/rbi/openai/resources/chat/completions/messages.rbi b/rbi/openai/resources/chat/completions/messages.rbi index 53d6538e..c0c82fe6 100644 --- a/rbi/openai/resources/chat/completions/messages.rbi +++ b/rbi/openai/resources/chat/completions/messages.rbi @@ -13,7 +13,7 @@ module OpenAI after: String, limit: Integer, order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) end diff --git a/rbi/openai/resources/completions.rbi b/rbi/openai/resources/completions.rbi index 82e27795..b3576715 100644 --- a/rbi/openai/resources/completions.rbi +++ b/rbi/openai/resources/completions.rbi @@ -26,7 +26,7 @@ module OpenAI top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Completion) end @@ -164,7 +164,7 @@ module OpenAI top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::Stream[OpenAI::Models::Completion]) end diff --git a/rbi/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi index ae6d042f..26fa538b 100644 --- a/rbi/openai/resources/embeddings.rbi +++ b/rbi/openai/resources/embeddings.rbi @@ -11,7 +11,7 @@ module OpenAI dimensions: Integer, encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::CreateEmbeddingResponse) end diff --git a/rbi/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi index ccf5a9f3..0f330e56 100644 --- a/rbi/openai/resources/evals.rbi +++ b/rbi/openai/resources/evals.rbi @@ -30,7 +30,7 @@ module OpenAI ], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::EvalCreateResponse) end @@ -52,11 +52,7 @@ module OpenAI ); end # Get an evaluation by ID. sig do - params( - eval_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::EvalRetrieveResponse) + params(eval_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::EvalRetrieveResponse) end def retrieve( # The ID of the evaluation to retrieve. @@ -69,7 +65,7 @@ module OpenAI eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::EvalUpdateResponse) end @@ -94,7 +90,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::EvalListParams::Order::OrSymbol, order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::EvalListResponse]) end @@ -112,13 +108,7 @@ module OpenAI request_options: {} ); end # Delete an evaluation. - sig do - params( - eval_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::EvalDeleteResponse) - end + sig { params(eval_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::EvalDeleteResponse) } def delete( # The ID of the evaluation to delete. eval_id, diff --git a/rbi/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi index 014d5734..86dad155 100644 --- a/rbi/openai/resources/evals/runs.rbi +++ b/rbi/openai/resources/evals/runs.rbi @@ -19,7 +19,7 @@ module OpenAI ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Evals::RunCreateResponse) end @@ -41,11 +41,7 @@ module OpenAI ); end # Get an evaluation run by ID. sig do - params( - run_id: String, - eval_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(run_id: String, eval_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Evals::RunRetrieveResponse) end def retrieve( @@ -63,7 +59,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Evals::RunListResponse]) end @@ -84,11 +80,7 @@ module OpenAI ); end # Delete an eval run. sig do - params( - run_id: String, - eval_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(run_id: String, eval_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Evals::RunDeleteResponse) end def delete( @@ -100,11 +92,7 @@ module OpenAI ); end # Cancel an ongoing evaluation run. sig do - params( - run_id: String, - eval_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(run_id: String, eval_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::Evals::RunCancelResponse) end def cancel( diff --git a/rbi/openai/resources/evals/runs/output_items.rbi b/rbi/openai/resources/evals/runs/output_items.rbi index 16053fa0..85f0767a 100644 --- a/rbi/openai/resources/evals/runs/output_items.rbi +++ b/rbi/openai/resources/evals/runs/output_items.rbi @@ -11,7 +11,7 @@ module OpenAI output_item_id: String, eval_id: String, run_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse) end @@ -33,7 +33,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Evals::Runs::OutputItemListResponse]) end diff --git a/rbi/openai/resources/files.rbi b/rbi/openai/resources/files.rbi index aa872185..4afa8467 100644 --- a/rbi/openai/resources/files.rbi +++ b/rbi/openai/resources/files.rbi @@ -28,7 +28,7 @@ module OpenAI params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::FileObject) end @@ -43,13 +43,7 @@ module OpenAI request_options: {} ); end # Returns information about a specific file. - sig do - params( - file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::FileObject) - end + sig { params(file_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::FileObject) } def retrieve( # The ID of the file to use for this request. file_id, @@ -62,7 +56,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FileObject]) end @@ -83,26 +77,14 @@ module OpenAI request_options: {} ); end # Delete a file. - sig do - params( - file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::FileDeleted) - end + sig { params(file_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::FileDeleted) } def delete( # The ID of the file to use for this request. file_id, request_options: {} ); end # Returns the contents of the specified file. - sig do - params( - file_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(StringIO) - end + sig { params(file_id: String, request_options: OpenAI::RequestOpts).returns(StringIO) } def content( # The ID of the file to use for this request. file_id, diff --git a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi index 64fb5a6a..1fa2933b 100644 --- a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi +++ b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi @@ -13,7 +13,7 @@ module OpenAI params( fine_tuned_model_checkpoint: String, project_ids: T::Array[String], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::Page[OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse]) end @@ -35,7 +35,7 @@ module OpenAI limit: Integer, order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, project_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse) end @@ -60,7 +60,7 @@ module OpenAI params( permission_id: String, fine_tuned_model_checkpoint: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse) end diff --git a/rbi/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi index b39d1d85..c0e05100 100644 --- a/rbi/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/openai/resources/fine_tuning/jobs.rbi @@ -27,7 +27,7 @@ module OpenAI seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::FineTuning::FineTuningJob) end @@ -96,10 +96,7 @@ module OpenAI # # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do - params( - fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(fine_tuning_job_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::FineTuning::FineTuningJob) end def retrieve( @@ -113,7 +110,7 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) end @@ -129,10 +126,7 @@ module OpenAI ); end # Immediately cancel a fine-tune job. sig do - params( - fine_tuning_job_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(fine_tuning_job_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::FineTuning::FineTuningJob) end def cancel( @@ -146,7 +140,7 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) end diff --git a/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi index dfc6cee6..bf3ced26 100644 --- a/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -11,7 +11,7 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) end diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index f50d6486..13ac0077 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -12,7 +12,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::ImagesResponse) end @@ -57,7 +57,7 @@ module OpenAI response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::ImagesResponse) end @@ -116,7 +116,7 @@ module OpenAI size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::ImagesResponse) end diff --git a/rbi/openai/resources/models.rbi b/rbi/openai/resources/models.rbi index 252a1dca..e403a7e3 100644 --- a/rbi/openai/resources/models.rbi +++ b/rbi/openai/resources/models.rbi @@ -5,13 +5,7 @@ module OpenAI class Models # Retrieves a model instance, providing basic information about the model such as # the owner and permissioning. - sig do - params( - model: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::Model) - end + sig { params(model: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Model) } def retrieve( # The ID of the model to use for this request model, @@ -19,21 +13,12 @@ module OpenAI ); end # Lists the currently available models, and provides basic information about each # one such as the owner and availability. - sig do - params(request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash))) - .returns(OpenAI::Internal::Page[OpenAI::Models::Model]) - end + sig { params(request_options: OpenAI::RequestOpts).returns(OpenAI::Internal::Page[OpenAI::Models::Model]) } def list(request_options: {}); end # Delete a fine-tuned model. You must have the Owner role in your organization to # delete a model. - sig do - params( - model: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::ModelDeleted) - end + sig { params(model: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::ModelDeleted) } def delete( # The model to delete model, diff --git a/rbi/openai/resources/moderations.rbi b/rbi/openai/resources/moderations.rbi index b068d8e7..fe3696e7 100644 --- a/rbi/openai/resources/moderations.rbi +++ b/rbi/openai/resources/moderations.rbi @@ -19,7 +19,7 @@ module OpenAI ] ), model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::ModerationCreateResponse) end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index b67ed0d1..ba349b4b 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -75,7 +75,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Responses::Response) end @@ -276,7 +276,7 @@ module OpenAI truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), user: String, stream: T.noreturn, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns( OpenAI::Internal::Stream[ @@ -454,7 +454,7 @@ module OpenAI params( response_id: String, include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Responses::Response) end @@ -467,13 +467,7 @@ module OpenAI request_options: {} ); end # Deletes a model response with the given ID. - sig do - params( - response_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .void - end + sig { params(response_id: String, request_options: OpenAI::RequestOpts).void } def delete( # The ID of the response to delete. response_id, diff --git a/rbi/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi index 0c70b7b0..b2d6a042 100644 --- a/rbi/openai/resources/responses/input_items.rbi +++ b/rbi/openai/resources/responses/input_items.rbi @@ -13,7 +13,7 @@ module OpenAI include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns( OpenAI::Internal::CursorPage[ diff --git a/rbi/openai/resources/uploads.rbi b/rbi/openai/resources/uploads.rbi index c397fdbf..ef17a778 100644 --- a/rbi/openai/resources/uploads.rbi +++ b/rbi/openai/resources/uploads.rbi @@ -31,7 +31,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Upload) end @@ -53,13 +53,7 @@ module OpenAI request_options: {} ); end # Cancels the Upload. No Parts may be added after an Upload is cancelled. - sig do - params( - upload_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::Upload) - end + sig { params(upload_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Upload) } def cancel( # The ID of the Upload. upload_id, @@ -83,7 +77,7 @@ module OpenAI upload_id: String, part_ids: T::Array[String], md5: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Upload) end diff --git a/rbi/openai/resources/uploads/parts.rbi b/rbi/openai/resources/uploads/parts.rbi index 867a760a..4534662a 100644 --- a/rbi/openai/resources/uploads/parts.rbi +++ b/rbi/openai/resources/uploads/parts.rbi @@ -19,7 +19,7 @@ module OpenAI params( upload_id: String, data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::Uploads::UploadPart) end diff --git a/rbi/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi index 7658101e..6c6be823 100644 --- a/rbi/openai/resources/vector_stores.rbi +++ b/rbi/openai/resources/vector_stores.rbi @@ -21,7 +21,7 @@ module OpenAI file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::VectorStore) end @@ -47,13 +47,7 @@ module OpenAI request_options: {} ); end # Retrieves a vector store. - sig do - params( - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) - .returns(OpenAI::Models::VectorStore) - end + sig { params(vector_store_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::VectorStore) } def retrieve( # The ID of the vector store to retrieve. vector_store_id, @@ -66,7 +60,7 @@ module OpenAI expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::VectorStore) end @@ -93,7 +87,7 @@ module OpenAI before: String, limit: Integer, order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStore]) end @@ -118,10 +112,7 @@ module OpenAI ); end # Delete a vector store. sig do - params( - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(vector_store_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::VectorStoreDeleted) end def delete( @@ -139,7 +130,7 @@ module OpenAI max_num_results: Integer, ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash), rewrite_query: T::Boolean, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse]) end diff --git a/rbi/openai/resources/vector_stores/file_batches.rbi b/rbi/openai/resources/vector_stores/file_batches.rbi index 8a6a5231..3ef38403 100644 --- a/rbi/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/openai/resources/vector_stores/file_batches.rbi @@ -15,7 +15,7 @@ module OpenAI OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end @@ -39,11 +39,7 @@ module OpenAI ); end # Retrieves a vector store file batch. sig do - params( - batch_id: String, - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(batch_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end def retrieve( @@ -56,11 +52,7 @@ module OpenAI # Cancel a vector store file batch. This attempts to cancel the processing of # files in this batch as soon as possible. sig do - params( - batch_id: String, - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(batch_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) end def cancel( @@ -80,7 +72,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end diff --git a/rbi/openai/resources/vector_stores/files.rbi b/rbi/openai/resources/vector_stores/files.rbi index eb8c2391..e70c0adf 100644 --- a/rbi/openai/resources/vector_stores/files.rbi +++ b/rbi/openai/resources/vector_stores/files.rbi @@ -17,7 +17,7 @@ module OpenAI OpenAI::Internal::AnyHash, OpenAI::Models::StaticFileChunkingStrategyObjectParam ), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -41,11 +41,7 @@ module OpenAI ); end # Retrieves a vector store file. sig do - params( - file_id: String, - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(file_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end def retrieve( @@ -61,7 +57,7 @@ module OpenAI file_id: String, vector_store_id: String, attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Models::VectorStores::VectorStoreFile) end @@ -87,7 +83,7 @@ module OpenAI filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) + request_options: OpenAI::RequestOpts ) .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end @@ -119,11 +115,7 @@ module OpenAI # [delete file](https://platform.openai.com/docs/api-reference/files/delete) # endpoint. sig do - params( - file_id: String, - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(file_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Models::VectorStores::VectorStoreFileDeleted) end def delete( @@ -135,11 +127,7 @@ module OpenAI ); end # Retrieve the parsed contents of a vector store file. sig do - params( - file_id: String, - vector_store_id: String, - request_options: T.nilable(T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - ) + params(file_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStores::FileContentResponse]) end def content( diff --git a/sig/openai/models/function_parameters.rbs b/sig/openai/models/function_parameters.rbs index 6dcb5bc1..9370883a 100644 --- a/sig/openai/models/function_parameters.rbs +++ b/sig/openai/models/function_parameters.rbs @@ -2,6 +2,6 @@ module OpenAI module Models type function_parameters = ::Hash[Symbol, top] - FunctionParameters: function_parameters + FunctionParameters: OpenAI::Internal::Type::Converter end end diff --git a/sig/openai/models/metadata.rbs b/sig/openai/models/metadata.rbs index 304e17b7..74e531c1 100644 --- a/sig/openai/models/metadata.rbs +++ b/sig/openai/models/metadata.rbs @@ -2,6 +2,6 @@ module OpenAI module Models type metadata = ::Hash[Symbol, String]? - Metadata: metadata + Metadata: OpenAI::Internal::Type::Converter end end diff --git a/sig/openai/models/responses/response_input.rbs b/sig/openai/models/responses/response_input.rbs index 2ca95b4d..494efa0c 100644 --- a/sig/openai/models/responses/response_input.rbs +++ b/sig/openai/models/responses/response_input.rbs @@ -4,7 +4,7 @@ module OpenAI type response_input = ::Array[OpenAI::Models::Responses::response_input_item] - ResponseInput: response_input + ResponseInput: OpenAI::Internal::Type::Converter end end end diff --git a/sig/openai/models/responses/response_input_message_content_list.rbs b/sig/openai/models/responses/response_input_message_content_list.rbs index d269f8ed..38dd2e85 100644 --- a/sig/openai/models/responses/response_input_message_content_list.rbs +++ b/sig/openai/models/responses/response_input_message_content_list.rbs @@ -4,7 +4,7 @@ module OpenAI type response_input_message_content_list = ::Array[OpenAI::Models::Responses::response_input_content] - ResponseInputMessageContentList: response_input_message_content_list + ResponseInputMessageContentList: OpenAI::Internal::Type::Converter end end end From 42b30c9850e2b509dd11f79bbeaa4ba2b731c6de Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 14:14:25 +0000 Subject: [PATCH 166/295] chore: add generator safe directory --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6f93458e..97d0df78 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -14,7 +14,7 @@ This will install all the required dependencies. ## Modifying/Adding code -Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never modify the contents `examples/` directory. +Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never modify the contents of `lib/openai/helpers/` and `examples/` directory. ## Adding and running examples From 083e9fbaded7ae4a5708444f1bb96774b0584bff Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 15:14:57 +0000 Subject: [PATCH 167/295] chore: loosen rubocop rules that don't always make sense --- .rubocop.yml | 11 +++++++++++ Rakefile | 8 +++++++- test/openai/internal/type/base_model_test.rb | 2 -- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index c88b94f1..315db278 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -64,11 +64,19 @@ Layout/MultilineMethodParameterLineBreaks: Layout/SpaceInsideHashLiteralBraces: EnforcedStyle: no_space +Lint/BooleanSymbol: + Enabled: false + # This option occasionally mangles identifier names Lint/DeprecatedConstants: Exclude: - "**/*.rbi" +# We use pattern assertion in tests to ensure correctness. +Lint/DuplicateMatchPattern: + Exclude: + - "test/**/*" + # Fairly useful in tests for pattern assertions. Lint/EmptyInPattern: Exclude: @@ -119,6 +127,9 @@ Metrics/ParameterLists: Metrics/PerceivedComplexity: Enabled: false +Naming/AccessorMethodName: + Enabled: false + # Need to preserve block identifier for documentation. Naming/BlockForwarding: Enabled: false diff --git a/Rakefile b/Rakefile index 2e5368f2..97b1805c 100644 --- a/Rakefile +++ b/Rakefile @@ -39,7 +39,13 @@ xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do - lint = xargs + %w[rubocop --fail-level E] + (ENV.key?("CI") ? %w[--format github] : []) + rubocop = %w[rubocop --fail-level E] + rubocop += %w[--format github] if ENV.key?("CI") + + # some lines cannot be shortened + rubocop += %w[--except Lint/RedundantCopDisableDirective,Layout/LineLength] + + lint = xargs + rubocop sh("#{rubo_find.shelljoin} | #{lint.shelljoin}") end diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index 3f6e7420..f29a04c2 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -194,11 +194,9 @@ def test_coerce [E0.new(:one), "one"] => [{yes: 1}, :one], [E0.new(:two), "one"] => [{maybe: 1}, "one"], - # rubocop:disable Lint/BooleanSymbol [E1, true] => [{yes: 1}, true], [E1, false] => [{no: 1}, false], [E1, :true] => [{no: 1}, :true], - # rubocop:enable Lint/BooleanSymbol [E2, 1] => [{yes: 1}, 1], [E2, 1.0] => [{yes: 1}, 1], From e09e0a96f8c0c188ae4e53227a9ba741745bf871 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 30 Apr 2025 15:46:53 +0000 Subject: [PATCH 168/295] docs(readme): fix typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3d43fae8..0bba8725 100644 --- a/README.md +++ b/README.md @@ -114,7 +114,7 @@ rescue OpenAI::Errors::APIError => e end ``` -Error codes are as followed: +Error codes are as follows: | Cause | Error Type | | ---------------- | -------------------------- | From 462d6e862cc36e2c72036fe7377e830f2313306a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 30 Apr 2025 15:59:00 +0000 Subject: [PATCH 169/295] chore: migrate away from deprecated `JSON#fast_generate` --- README.md | 2 +- Rakefile | 2 +- lib/openai/internal/type/base_stream.rb | 4 ++-- lib/openai/internal/util.rb | 6 +++--- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 0bba8725..8b7adaa4 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ end ### File uploads -Request parameters that correspond to file uploads can be passed as `StringIO`, or a [`Pathname`](https://rubyapi.org/3.1/o/pathname) instance. +Request parameters that correspond to file uploads can be passed as `StringIO`, or a [`Pathname`](https://rubyapi.org/3.2/o/pathname) instance. ```ruby require "pathname" diff --git a/Rakefile b/Rakefile index 97b1805c..df7a5b88 100644 --- a/Rakefile +++ b/Rakefile @@ -31,7 +31,7 @@ multitask(:test) do .map { "require_relative(#{_1.dump});" } .join - ruby(*%w[-e], rb, verbose: false) { fail unless _1 } + ruby(*%w[-w -e], rb, verbose: false) { fail unless _1 } end rubo_find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index 580ba8ab..f1b1c8ff 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -9,7 +9,7 @@ module Type # # This module provides a base implementation for streaming responses in the SDK. # - # @see https://rubyapi.org/3.1/o/enumerable + # @see https://rubyapi.org/3.2/o/enumerable module BaseStream include Enumerable @@ -24,7 +24,7 @@ class << self # # @return [Proc] # - # @see https://rubyapi.org/3.1/o/objectspace#method-c-define_finalizer + # @see https://rubyapi.org/3.2/o/objectspace#method-c-define_finalizer def defer_closing(stream) = ->(_id) { OpenAI::Internal::Util.close_fused!(stream) } end diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index c1359d17..0ecc6aca 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -493,7 +493,7 @@ class << self y << val.to_s else y << "Content-Type: application/json\r\n\r\n" - y << JSON.fast_generate(val) + y << JSON.generate(val) end y << "\r\n" end @@ -570,9 +570,9 @@ def encode_content(headers, body) content_type = headers["content-type"] case [content_type, body] in [OpenAI::Internal::Util::JSON_CONTENT, Hash | Array | -> { primitive?(_1) }] - [headers, JSON.fast_generate(body)] + [headers, JSON.generate(body)] in [OpenAI::Internal::Util::JSONL_CONTENT, Enumerable] unless body.is_a?(OpenAI::Internal::Type::FileInput) - [headers, body.lazy.map { JSON.fast_generate(_1) }] + [headers, body.lazy.map { JSON.generate(_1) }] in [%r{^multipart/form-data}, Hash | OpenAI::Internal::Type::FileInput] boundary, strio = encode_multipart_streaming(body) headers = {**headers, "content-type" => "#{content_type}; boundary=#{boundary}"} From db2ab600c7ae4e87974a7965e87908cb7eb90e3e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 15:30:30 +0000 Subject: [PATCH 170/295] chore: remove Gemfile.lock during bootstrap --- .github/workflows/ci.yml | 2 - .github/workflows/publish-gem.yml | 1 - .gitignore | 1 + .ruby-version | 2 +- Gemfile.lock | 47 ++++++++++---------- README.md | 4 +- Rakefile | 2 +- lib/openai/internal/transport/base_client.rb | 2 +- openai.gemspec | 2 +- rbi/openai/errors.rbi | 2 +- scripts/bootstrap | 1 + 11 files changed, 33 insertions(+), 33 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1d8dfaab..6026ce13 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,6 @@ jobs: uses: ruby/setup-ruby@v1 with: bundler-cache: false - ruby-version: '3.1' - run: |- bundle install @@ -36,7 +35,6 @@ jobs: uses: ruby/setup-ruby@v1 with: bundler-cache: false - ruby-version: '3.1' - run: |- bundle install diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index d6ba1c4a..7a7a1d07 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -19,7 +19,6 @@ jobs: uses: ruby/setup-ruby@v1 with: bundler-cache: false - ruby-version: '3.1' - run: |- bundle install diff --git a/.gitignore b/.gitignore index 3d26ceed..edaa164e 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,5 @@ bin/tapioca Brewfile.lock.json doc/ +Gemfile.lock sorbet/tapioca/* diff --git a/.ruby-version b/.ruby-version index fd2a0186..944880fa 100644 --- a/.ruby-version +++ b/.ruby-version @@ -1 +1 @@ -3.1.0 +3.2.0 diff --git a/Gemfile.lock b/Gemfile.lock index 6a6143f4..e1fd6bb2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,6 +1,6 @@ GIT remote: https://github.com/stainless-api/syntax_tree-rbs.git - revision: 140eb3ba2ff4b959b345ac2a7927cd758a9f1284 + revision: c30b50219918be7cfe3ef803a00b59d1e77fcada branch: main specs: syntax_tree-rbs (1.0.0) @@ -17,7 +17,7 @@ PATH GEM remote: https://rubygems.org/ specs: - activesupport (7.2.2.1) + activesupport (8.0.2) base64 benchmark (>= 0.3) bigdecimal @@ -29,6 +29,7 @@ GEM minitest (>= 5.1) securerandom (>= 0.3) tzinfo (~> 2.0, >= 2.0.5) + uri (>= 0.13.1) addressable (2.8.7) public_suffix (>= 2.0.2, < 7.0) ast (2.4.3) @@ -42,7 +43,7 @@ GEM benchmark (0.4.0) bigdecimal (3.1.9) concurrent-ruby (1.3.5) - connection_pool (2.5.0) + connection_pool (2.5.3) console (1.30.2) fiber-annotation fiber-local (~> 1.1) @@ -50,20 +51,20 @@ GEM crack (1.0.0) bigdecimal rexml - csv (3.3.3) + csv (3.3.4) drb (2.2.1) erubi (1.13.1) - ffi (1.17.1) + ffi (1.17.2-x86_64-linux-gnu) fiber-annotation (0.2.0) fiber-local (1.1.0) fiber-storage - fiber-storage (1.0.0) + fiber-storage (1.0.1) fileutils (1.7.3) hashdiff (1.1.2) i18n (1.14.7) concurrent-ruby (~> 1.0) io-event (1.10.0) - json (2.10.2) + json (2.11.3) language_server-protocol (3.17.0.4) lint_roller (1.1.0) listen (3.9.0) @@ -82,20 +83,20 @@ GEM minitest (~> 5.0) mutex_m (0.3.0) netrc (0.11.0) - parallel (1.26.3) - parser (3.3.7.4) + parallel (1.27.0) + parser (3.3.8.0) ast (~> 2.4.1) racc prettier_print (1.2.1) prism (1.4.0) - public_suffix (6.0.1) + public_suffix (6.0.2) racc (1.8.1) rainbow (3.1.1) rake (13.2.1) rb-fsevent (0.11.2) rb-inotify (0.11.1) ffi (~> 1.0) - rbi (0.3.1) + rbi (0.3.2) prism (~> 1.0) rbs (>= 3.4.4) sorbet-runtime (>= 0.5.9204) @@ -104,7 +105,7 @@ GEM redcarpet (3.6.1) regexp_parser (2.10.0) rexml (3.4.1) - rubocop (1.75.1) + rubocop (1.75.4) json (~> 2.3) language_server-protocol (~> 3.17.0.2) lint_roller (~> 1.1.0) @@ -112,21 +113,21 @@ GEM parser (>= 3.3.0.2) rainbow (>= 2.2.2, < 4.0) regexp_parser (>= 2.9.3, < 3.0) - rubocop-ast (>= 1.43.0, < 2.0) + rubocop-ast (>= 1.44.0, < 2.0) ruby-progressbar (~> 1.7) unicode-display_width (>= 2.4.0, < 4.0) - rubocop-ast (1.43.0) + rubocop-ast (1.44.1) parser (>= 3.3.7.2) prism (~> 1.4) ruby-progressbar (1.13.0) securerandom (0.4.1) - sorbet (0.5.11966) - sorbet-static (= 0.5.11966) - sorbet-runtime (0.5.11966) - sorbet-static (0.5.11966-x86_64-linux) - sorbet-static-and-runtime (0.5.11966) - sorbet (= 0.5.11966) - sorbet-runtime (= 0.5.11966) + sorbet (0.5.12048) + sorbet-static (= 0.5.12048) + sorbet-runtime (0.5.12048) + sorbet-static (0.5.12048-x86_64-linux) + sorbet-static-and-runtime (0.5.12048) + sorbet (= 0.5.12048) + sorbet-runtime (= 0.5.12048) spoom (1.6.1) erubi (>= 1.10.0) prism (>= 0.28.0) @@ -150,7 +151,7 @@ GEM strscan (>= 1.0.0) terminal-table (>= 2, < 5) uri (>= 0.12.0) - strscan (3.1.2) + strscan (3.1.4) syntax_tree (6.2.0) prettier_print (>= 1.2.0) tapioca (0.16.11) @@ -208,4 +209,4 @@ DEPENDENCIES yard BUNDLED WITH - 2.3.3 + 2.4.1 diff --git a/README.md b/README.md index 8b7adaa4..b99f8d09 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # OpenAI Ruby API library -The OpenAI Ruby library provides convenient access to the OpenAI REST API from any Ruby 3.1.0+ application. +The OpenAI Ruby library provides convenient access to the OpenAI REST API from any Ruby 3.2.0+ application. ## Documentation @@ -267,7 +267,7 @@ This package considers improvements to the (non-runtime) `*.rbi` and `*.rbs` typ ## Requirements -Ruby 3.1.0 or higher. +Ruby 3.2.0 or higher. ## Contributing diff --git a/Rakefile b/Rakefile index df7a5b88..661da932 100644 --- a/Rakefile +++ b/Rakefile @@ -11,7 +11,7 @@ require "rubocop/rake_task" tapioca = "sorbet/tapioca" ignore_file = ".ignore" -CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) +CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/ Gemfile.lock], *FileList["*.gem"], ignore_file) CLOBBER.push(*%w[sorbet/rbi/annotations/ sorbet/rbi/gems/], tapioca) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 5d77a764..b3bb579b 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -261,7 +261,7 @@ def initialize( headers["x-stainless-retry-count"] = "0" end - timeout = opts.fetch(:timeout, @timeout).to_f.clamp((0..)) + timeout = opts.fetch(:timeout, @timeout).to_f.clamp(0..) unless headers.key?("x-stainless-timeout") || timeout.zero? headers["x-stainless-timeout"] = timeout.to_s end diff --git a/openai.gemspec b/openai.gemspec index 64c1b6d9..cc0a7426 100644 --- a/openai.gemspec +++ b/openai.gemspec @@ -12,7 +12,7 @@ Gem::Specification.new do |s| s.metadata["homepage_uri"] = s.homepage s.metadata["source_code_uri"] = "https://github.com/openai/openai-ruby" s.metadata["rubygems_mfa_required"] = false.to_s - s.required_ruby_version = ">= 3.0.0" + s.required_ruby_version = ">= 3.2.0" s.files = Dir[ "lib/**/*.rb", diff --git a/rbi/openai/errors.rbi b/rbi/openai/errors.rbi index 51d71f19..1eb51da4 100644 --- a/rbi/openai/errors.rbi +++ b/rbi/openai/errors.rbi @@ -164,7 +164,7 @@ module OpenAI end class InternalServerError < OpenAI::Errors::APIStatusError - HTTP_STATUS = T.let((500..), T::Range[Integer]) + HTTP_STATUS = T.let(500.., T::Range[Integer]) end end end diff --git a/scripts/bootstrap b/scripts/bootstrap index cc31aa85..9bf05537 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -13,4 +13,5 @@ fi echo "==> Installing Ruby dependencies…" +rm -fr -v -- Gemfile.lock exec -- bundle install "$@" From 48bb41c2b08fae8c5987b42789a473ce8cd05161 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 15:52:31 +0000 Subject: [PATCH 171/295] chore: reorganize type aliases --- lib/openai.rb | 1 + lib/openai/models/file_content.rb | 7 +++++++ rbi/openai/models/beta/threads/runs/run_step.rbi | 4 ++-- rbi/openai/models/beta/threads/runs/run_step_delta.rbi | 4 ++-- .../models/beta/threads/runs/run_step_delta_event.rbi | 4 ++-- .../beta/threads/runs/run_step_delta_message_delta.rbi | 4 ++-- rbi/openai/models/beta/threads/runs/run_step_include.rbi | 4 ++-- rbi/openai/models/chat/chat_completion.rbi | 4 ++-- .../chat/chat_completion_assistant_message_param.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_audio.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_audio_param.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_chunk.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_content_part.rbi | 4 ++-- .../models/chat/chat_completion_content_part_image.rbi | 4 ++-- .../chat/chat_completion_content_part_input_audio.rbi | 4 ++-- .../models/chat/chat_completion_content_part_refusal.rbi | 4 ++-- .../models/chat/chat_completion_content_part_text.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_deleted.rbi | 4 ++-- .../chat/chat_completion_developer_message_param.rbi | 4 ++-- .../models/chat/chat_completion_function_call_option.rbi | 4 ++-- .../models/chat/chat_completion_function_message_param.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_message.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_message_param.rbi | 4 ++-- .../models/chat/chat_completion_message_tool_call.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_modality.rbi | 4 ++-- .../models/chat/chat_completion_named_tool_choice.rbi | 4 ++-- .../models/chat/chat_completion_prediction_content.rbi | 4 ++-- .../models/chat/chat_completion_reasoning_effort.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_role.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_store_message.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_stream_options.rbi | 4 ++-- .../models/chat/chat_completion_system_message_param.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_token_logprob.rbi | 4 ++-- rbi/openai/models/chat/chat_completion_tool.rbi | 4 ++-- .../models/chat/chat_completion_tool_choice_option.rbi | 4 ++-- .../models/chat/chat_completion_tool_message_param.rbi | 4 ++-- .../models/chat/chat_completion_user_message_param.rbi | 4 ++-- rbi/openai/models/evals/eval_api_error.rbi | 4 ++-- rbi/openai/models/file_content.rbi | 7 +++++++ rbi/openai/models/fine_tuning/fine_tuning_job.rbi | 4 ++-- rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi | 4 ++-- .../models/fine_tuning/fine_tuning_job_integration.rbi | 4 ++-- .../fine_tuning/fine_tuning_job_wandb_integration.rbi | 4 ++-- .../fine_tuning_job_wandb_integration_object.rbi | 4 ++-- rbi/openai/models/responses/response_item_list.rbi | 4 ++-- rbi/openai/models/uploads/upload_part.rbi | 4 ++-- rbi/openai/models/vector_stores/vector_store_file.rbi | 4 ++-- .../models/vector_stores/vector_store_file_batch.rbi | 4 ++-- .../models/vector_stores/vector_store_file_deleted.rbi | 4 ++-- sig/openai/models/file_content.rbs | 5 +++++ 50 files changed, 112 insertions(+), 92 deletions(-) create mode 100644 lib/openai/models/file_content.rb create mode 100644 rbi/openai/models/file_content.rbi create mode 100644 sig/openai/models/file_content.rbs diff --git a/lib/openai.rb b/lib/openai.rb index 0883b56f..0aadd7ca 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -251,6 +251,7 @@ require_relative "openai/models/eval_update_response" require_relative "openai/models/file_chunking_strategy" require_relative "openai/models/file_chunking_strategy_param" +require_relative "openai/models/file_content" require_relative "openai/models/file_content_params" require_relative "openai/models/file_create_params" require_relative "openai/models/file_deleted" diff --git a/lib/openai/models/file_content.rb b/lib/openai/models/file_content.rb new file mode 100644 index 00000000..0ffd9f84 --- /dev/null +++ b/lib/openai/models/file_content.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +module OpenAI + module Models + FileContent = String + end +end diff --git a/rbi/openai/models/beta/threads/runs/run_step.rbi b/rbi/openai/models/beta/threads/runs/run_step.rbi index 0e70020b..86e8b940 100644 --- a/rbi/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta module Threads + RunStep = Runs::RunStep + module Runs class RunStep < OpenAI::Internal::Type::BaseModel # The identifier of the run step, which can be referenced in API endpoints. @@ -337,8 +339,6 @@ module OpenAI end end end - - RunStep = Runs::RunStep end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi index d151496b..e70a40af 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta module Threads + RunStepDelta = Runs::RunStepDelta + module Runs class RunStepDelta < OpenAI::Internal::Type::BaseModel # The details of the run step. @@ -73,8 +75,6 @@ module OpenAI end end end - - RunStepDelta = Runs::RunStepDelta end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi index a9a700a4..3828467d 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta module Threads + RunStepDeltaEvent = Runs::RunStepDeltaEvent + module Runs class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # The identifier of the run step, which can be referenced in API endpoints. @@ -43,8 +45,6 @@ module OpenAI def to_hash; end end end - - RunStepDeltaEvent = Runs::RunStepDeltaEvent end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index efe29b30..a05e1586 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta module Threads + RunStepDeltaMessageDelta = Runs::RunStepDeltaMessageDelta + module Runs class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # Always `message_creation`. @@ -69,8 +71,6 @@ module OpenAI end end end - - RunStepDeltaMessageDelta = Runs::RunStepDeltaMessageDelta end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/openai/models/beta/threads/runs/run_step_include.rbi index 1632e2a6..2b97eb05 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_include.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta module Threads + RunStepInclude = Runs::RunStepInclude + module Runs module RunStepInclude extend OpenAI::Internal::Type::Enum @@ -21,8 +23,6 @@ module OpenAI def self.values; end end end - - RunStepInclude = Runs::RunStepInclude end end end diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index dbf3c910..9986624b 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletion = Chat::ChatCompletion + module Chat class ChatCompletion < OpenAI::Internal::Type::BaseModel # A unique identifier for the chat completion. @@ -294,7 +296,5 @@ module OpenAI end end end - - ChatCompletion = Chat::ChatCompletion end end diff --git a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi index a9321c4b..783cdfc2 100644 --- a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam + module Chat class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # The role of the messages author, in this case `assistant`. @@ -242,7 +244,5 @@ module OpenAI end end end - - ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam end end diff --git a/rbi/openai/models/chat/chat_completion_audio.rbi b/rbi/openai/models/chat/chat_completion_audio.rbi index 3a562bd2..ce531040 100644 --- a/rbi/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/openai/models/chat/chat_completion_audio.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionAudio = Chat::ChatCompletionAudio + module Chat class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # Unique identifier for this audio response. @@ -44,7 +46,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionAudio = Chat::ChatCompletionAudio end end diff --git a/rbi/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi index f8798fa0..2b135efe 100644 --- a/rbi/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/openai/models/chat/chat_completion_audio_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionAudioParam = Chat::ChatCompletionAudioParam + module Chat class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, @@ -87,7 +89,5 @@ module OpenAI end end end - - ChatCompletionAudioParam = Chat::ChatCompletionAudioParam end end diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index c4d28509..e5905846 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionChunk = Chat::ChatCompletionChunk + module Chat class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # A unique identifier for the chat completion. Each chunk has the same ID. @@ -562,7 +564,5 @@ module OpenAI end end end - - ChatCompletionChunk = Chat::ChatCompletionChunk end end diff --git a/rbi/openai/models/chat/chat_completion_content_part.rbi b/rbi/openai/models/chat/chat_completion_content_part.rbi index d380d60e..beb80345 100644 --- a/rbi/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionContentPart = Chat::ChatCompletionContentPart + module Chat # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). @@ -88,7 +90,5 @@ module OpenAI def self.variants; end end end - - ChatCompletionContentPart = Chat::ChatCompletionContentPart end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/openai/models/chat/chat_completion_content_part_image.rbi index 8d2a8ddd..83ae49c3 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_image.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionContentPartImage = Chat::ChatCompletionContentPartImage + module Chat class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) } @@ -94,7 +96,5 @@ module OpenAI end end end - - ChatCompletionContentPartImage = Chat::ChatCompletionContentPartImage end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi index 34e7e778..412e8c32 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio + module Chat class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) } @@ -97,7 +99,5 @@ module OpenAI end end end - - ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi index 8088fdc0..4dbbaad1 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionContentPartRefusal = Chat::ChatCompletionContentPartRefusal + module Chat class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel # The refusal message generated by the model. @@ -23,7 +25,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionContentPartRefusal = Chat::ChatCompletionContentPartRefusal end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/openai/models/chat/chat_completion_content_part_text.rbi index 8fecc89b..d9d89cc1 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_text.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionContentPartText = Chat::ChatCompletionContentPartText + module Chat class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # The text content. @@ -25,7 +27,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionContentPartText = Chat::ChatCompletionContentPartText end end diff --git a/rbi/openai/models/chat/chat_completion_deleted.rbi b/rbi/openai/models/chat/chat_completion_deleted.rbi index e5083b94..344fb3ef 100644 --- a/rbi/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/openai/models/chat/chat_completion_deleted.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionDeleted = Chat::ChatCompletionDeleted + module Chat class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel # The ID of the chat completion that was deleted. @@ -29,7 +31,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionDeleted = Chat::ChatCompletionDeleted end end diff --git a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi index ea4a6d05..7e1c7e21 100644 --- a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam + module Chat class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the developer message. @@ -70,7 +72,5 @@ module OpenAI end end end - - ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam end end diff --git a/rbi/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/openai/models/chat/chat_completion_function_call_option.rbi index 93220f0f..56f38623 100644 --- a/rbi/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/openai/models/chat/chat_completion_function_call_option.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionFunctionCallOption = Chat::ChatCompletionFunctionCallOption + module Chat class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # The name of the function to call. @@ -19,7 +21,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionFunctionCallOption = Chat::ChatCompletionFunctionCallOption end end diff --git a/rbi/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/openai/models/chat/chat_completion_function_message_param.rbi index dd32f6f7..4bfeb555 100644 --- a/rbi/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_function_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam + module Chat class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the function message. @@ -29,7 +31,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam end end diff --git a/rbi/openai/models/chat/chat_completion_message.rbi b/rbi/openai/models/chat/chat_completion_message.rbi index dcffce2a..a1c9abf6 100644 --- a/rbi/openai/models/chat/chat_completion_message.rbi +++ b/rbi/openai/models/chat/chat_completion_message.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionMessage = Chat::ChatCompletionMessage + module Chat class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # The contents of the message. @@ -221,7 +223,5 @@ module OpenAI end end end - - ChatCompletionMessage = Chat::ChatCompletionMessage end end diff --git a/rbi/openai/models/chat/chat_completion_message_param.rbi b/rbi/openai/models/chat/chat_completion_message_param.rbi index 4c9296f5..8a340084 100644 --- a/rbi/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionMessageParam = Chat::ChatCompletionMessageParam + module Chat # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages @@ -18,7 +20,5 @@ module OpenAI def self.variants; end end end - - ChatCompletionMessageParam = Chat::ChatCompletionMessageParam end end diff --git a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi index 77013f68..85d78135 100644 --- a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall + module Chat class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # The ID of the tool call. @@ -76,7 +78,5 @@ module OpenAI end end end - - ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall end end diff --git a/rbi/openai/models/chat/chat_completion_modality.rbi b/rbi/openai/models/chat/chat_completion_modality.rbi index 0226b92f..de02e647 100644 --- a/rbi/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/openai/models/chat/chat_completion_modality.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionModality = Chat::ChatCompletionModality + module Chat module ChatCompletionModality extend OpenAI::Internal::Type::Enum @@ -16,7 +18,5 @@ module OpenAI def self.values; end end end - - ChatCompletionModality = Chat::ChatCompletionModality end end diff --git a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi index f7ef280e..4380546a 100644 --- a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionNamedToolChoice = Chat::ChatCompletionNamedToolChoice + module Chat class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) } @@ -51,7 +53,5 @@ module OpenAI end end end - - ChatCompletionNamedToolChoice = Chat::ChatCompletionNamedToolChoice end end diff --git a/rbi/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/openai/models/chat/chat_completion_prediction_content.rbi index 93fab93c..2bc5a26a 100644 --- a/rbi/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/openai/models/chat/chat_completion_prediction_content.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionPredictionContent = Chat::ChatCompletionPredictionContent + module Chat class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # The content that should be matched when generating a model response. If @@ -61,7 +63,5 @@ module OpenAI end end end - - ChatCompletionPredictionContent = Chat::ChatCompletionPredictionContent end end diff --git a/rbi/openai/models/chat/chat_completion_reasoning_effort.rbi b/rbi/openai/models/chat/chat_completion_reasoning_effort.rbi index 479be67b..87be1185 100644 --- a/rbi/openai/models/chat/chat_completion_reasoning_effort.rbi +++ b/rbi/openai/models/chat/chat_completion_reasoning_effort.rbi @@ -2,10 +2,10 @@ module OpenAI module Models + ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort + module Chat ChatCompletionReasoningEffort = OpenAI::Models::ReasoningEffort end - - ChatCompletionReasoningEffort = Chat::ChatCompletionReasoningEffort end end diff --git a/rbi/openai/models/chat/chat_completion_role.rbi b/rbi/openai/models/chat/chat_completion_role.rbi index 9be9e3e1..46776b39 100644 --- a/rbi/openai/models/chat/chat_completion_role.rbi +++ b/rbi/openai/models/chat/chat_completion_role.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionRole = Chat::ChatCompletionRole + module Chat # The role of the author of a message module ChatCompletionRole @@ -21,7 +23,5 @@ module OpenAI def self.values; end end end - - ChatCompletionRole = Chat::ChatCompletionRole end end diff --git a/rbi/openai/models/chat/chat_completion_store_message.rbi b/rbi/openai/models/chat/chat_completion_store_message.rbi index 5af6dfc9..db4cc17e 100644 --- a/rbi/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/openai/models/chat/chat_completion_store_message.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage + module Chat class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # The identifier of the chat message. @@ -18,7 +20,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage end end diff --git a/rbi/openai/models/chat/chat_completion_stream_options.rbi b/rbi/openai/models/chat/chat_completion_stream_options.rbi index 08fdb738..8a28c8ab 100644 --- a/rbi/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/openai/models/chat/chat_completion_stream_options.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions + module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # If set, an additional chunk will be streamed before the `data: [DONE]` message. @@ -33,7 +35,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions end end diff --git a/rbi/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/openai/models/chat/chat_completion_system_message_param.rbi index aeac775d..5578d9e7 100644 --- a/rbi/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_system_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionSystemMessageParam = Chat::ChatCompletionSystemMessageParam + module Chat class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the system message. @@ -70,7 +72,5 @@ module OpenAI end end end - - ChatCompletionSystemMessageParam = Chat::ChatCompletionSystemMessageParam end end diff --git a/rbi/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/openai/models/chat/chat_completion_token_logprob.rbi index 00a79a48..f75eb74c 100644 --- a/rbi/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/openai/models/chat/chat_completion_token_logprob.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionTokenLogprob = Chat::ChatCompletionTokenLogprob + module Chat class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # The token. @@ -109,7 +111,5 @@ module OpenAI end end end - - ChatCompletionTokenLogprob = Chat::ChatCompletionTokenLogprob end end diff --git a/rbi/openai/models/chat/chat_completion_tool.rbi b/rbi/openai/models/chat/chat_completion_tool.rbi index 0b898367..54e45a9a 100644 --- a/rbi/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/openai/models/chat/chat_completion_tool.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionTool = Chat::ChatCompletionTool + module Chat class ChatCompletionTool < OpenAI::Internal::Type::BaseModel sig { returns(OpenAI::Models::FunctionDefinition) } @@ -27,7 +29,5 @@ module OpenAI def to_hash; end end end - - ChatCompletionTool = Chat::ChatCompletionTool end end diff --git a/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi index fbdb6fda..090a8a1d 100644 --- a/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption + module Chat # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can @@ -41,7 +43,5 @@ module OpenAI def self.variants; end end end - - ChatCompletionToolChoiceOption = Chat::ChatCompletionToolChoiceOption end end diff --git a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi index b0ed167b..862e00bf 100644 --- a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionToolMessageParam = Chat::ChatCompletionToolMessageParam + module Chat class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the tool message. @@ -62,7 +64,5 @@ module OpenAI end end end - - ChatCompletionToolMessageParam = Chat::ChatCompletionToolMessageParam end end diff --git a/rbi/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/openai/models/chat/chat_completion_user_message_param.rbi index 197c73d0..a9452e5c 100644 --- a/rbi/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_user_message_param.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ChatCompletionUserMessageParam = Chat::ChatCompletionUserMessageParam + module Chat class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the user message. @@ -116,7 +118,5 @@ module OpenAI end end end - - ChatCompletionUserMessageParam = Chat::ChatCompletionUserMessageParam end end diff --git a/rbi/openai/models/evals/eval_api_error.rbi b/rbi/openai/models/evals/eval_api_error.rbi index 20859d95..906200ec 100644 --- a/rbi/openai/models/evals/eval_api_error.rbi +++ b/rbi/openai/models/evals/eval_api_error.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + EvalAPIError = Evals::EvalAPIError + module Evals class EvalAPIError < OpenAI::Internal::Type::BaseModel # The error code. @@ -24,7 +26,5 @@ module OpenAI def to_hash; end end end - - EvalAPIError = Evals::EvalAPIError end end diff --git a/rbi/openai/models/file_content.rbi b/rbi/openai/models/file_content.rbi new file mode 100644 index 00000000..92b8b41a --- /dev/null +++ b/rbi/openai/models/file_content.rbi @@ -0,0 +1,7 @@ +# typed: strong + +module OpenAI + module Models + FileContent = String + end +end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index b9a0f929..3036f3a4 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + FineTuningJob = FineTuning::FineTuningJob + module FineTuning class FineTuningJob < OpenAI::Internal::Type::BaseModel # The object identifier, which can be referenced in the API endpoints. @@ -697,7 +699,5 @@ module OpenAI end end end - - FineTuningJob = FineTuning::FineTuningJob end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi index bae007c6..f034e524 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + FineTuningJobEvent = FineTuning::FineTuningJobEvent + module FineTuning class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # The object identifier. @@ -113,7 +115,5 @@ module OpenAI end end end - - FineTuningJobEvent = FineTuning::FineTuningJobEvent end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi index 8a6c1da1..9ae1ecd5 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi @@ -2,10 +2,10 @@ module OpenAI module Models + FineTuningJobIntegration = FineTuning::FineTuningJobIntegration + module FineTuning FineTuningJobIntegration = OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject end - - FineTuningJobIntegration = FineTuning::FineTuningJobIntegration end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index f177e1fc..fb541aa4 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + FineTuningJobWandbIntegration = FineTuning::FineTuningJobWandbIntegration + module FineTuning class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # The name of the project that the new run will be created under. @@ -63,7 +65,5 @@ module OpenAI def to_hash; end end end - - FineTuningJobWandbIntegration = FineTuning::FineTuningJobWandbIntegration end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index 2b2e10d9..c17319c1 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject + module FineTuning class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # The type of the integration being enabled for the fine-tuning job @@ -41,7 +43,5 @@ module OpenAI def to_hash; end end end - - FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject end end diff --git a/rbi/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi index 60f9ee2b..d83e183d 100644 --- a/rbi/openai/models/responses/response_item_list.rbi +++ b/rbi/openai/models/responses/response_item_list.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + ResponseItemList = Responses::ResponseItemList + module Responses class ResponseItemList < OpenAI::Internal::Type::BaseModel # A list of items used to generate this response. @@ -100,7 +102,5 @@ module OpenAI def to_hash; end end end - - ResponseItemList = Responses::ResponseItemList end end diff --git a/rbi/openai/models/uploads/upload_part.rbi b/rbi/openai/models/uploads/upload_part.rbi index 17a8849b..79bf9d95 100644 --- a/rbi/openai/models/uploads/upload_part.rbi +++ b/rbi/openai/models/uploads/upload_part.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + UploadPart = Uploads::UploadPart + module Uploads class UploadPart < OpenAI::Internal::Type::BaseModel # The upload Part unique identifier, which can be referenced in API endpoints. @@ -38,7 +40,5 @@ module OpenAI def to_hash; end end end - - UploadPart = Uploads::UploadPart end end diff --git a/rbi/openai/models/vector_stores/vector_store_file.rbi b/rbi/openai/models/vector_stores/vector_store_file.rbi index 168da5d5..79686b0f 100644 --- a/rbi/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + VectorStoreFile = VectorStores::VectorStoreFile + module VectorStores class VectorStoreFile < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. @@ -220,7 +222,5 @@ module OpenAI end end end - - VectorStoreFile = VectorStores::VectorStoreFile end end diff --git a/rbi/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi index 18461980..8712684e 100644 --- a/rbi/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + VectorStoreFileBatch = VectorStores::VectorStoreFileBatch + module VectorStores class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # The identifier, which can be referenced in API endpoints. @@ -160,7 +162,5 @@ module OpenAI end end end - - VectorStoreFileBatch = VectorStores::VectorStoreFileBatch end end diff --git a/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi index 9642c9d5..769eeefd 100644 --- a/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -2,6 +2,8 @@ module OpenAI module Models + VectorStoreFileDeleted = VectorStores::VectorStoreFileDeleted + module VectorStores class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel sig { returns(String) } @@ -20,7 +22,5 @@ module OpenAI def to_hash; end end end - - VectorStoreFileDeleted = VectorStores::VectorStoreFileDeleted end end diff --git a/sig/openai/models/file_content.rbs b/sig/openai/models/file_content.rbs new file mode 100644 index 00000000..947667c0 --- /dev/null +++ b/sig/openai/models/file_content.rbs @@ -0,0 +1,5 @@ +module OpenAI + module Models + class FileContent = String + end +end From eb73769d183f975d711ddf80a682203102165ef7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 18:38:45 +0000 Subject: [PATCH 172/295] chore: re-export top level models under library namespace --- lib/openai/models.rb | 193 ++++++++++++++++++++++++++++++++++++++++++ rbi/openai/models.rbi | 191 +++++++++++++++++++++++++++++++++++++++++ sig/openai/models.rbs | 189 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 573 insertions(+) create mode 100644 lib/openai/models.rb create mode 100644 rbi/openai/models.rbi create mode 100644 sig/openai/models.rbs diff --git a/lib/openai/models.rb b/lib/openai/models.rb new file mode 100644 index 00000000..2a0f2cef --- /dev/null +++ b/lib/openai/models.rb @@ -0,0 +1,193 @@ +# frozen_string_literal: true + +module OpenAI + AllModels = OpenAI::Models::AllModels + + Audio = OpenAI::Models::Audio + + AudioModel = OpenAI::Models::AudioModel + + AudioResponseFormat = OpenAI::Models::AudioResponseFormat + + AutoFileChunkingStrategyParam = OpenAI::Models::AutoFileChunkingStrategyParam + + Batch = OpenAI::Models::Batch + + BatchCancelParams = OpenAI::Models::BatchCancelParams + + BatchCreateParams = OpenAI::Models::BatchCreateParams + + BatchError = OpenAI::Models::BatchError + + BatchListParams = OpenAI::Models::BatchListParams + + BatchRequestCounts = OpenAI::Models::BatchRequestCounts + + BatchRetrieveParams = OpenAI::Models::BatchRetrieveParams + + Beta = OpenAI::Models::Beta + + Chat = OpenAI::Models::Chat + + ChatModel = OpenAI::Models::ChatModel + + ComparisonFilter = OpenAI::Models::ComparisonFilter + + Completion = OpenAI::Models::Completion + + CompletionChoice = OpenAI::Models::CompletionChoice + + CompletionCreateParams = OpenAI::Models::CompletionCreateParams + + CompletionUsage = OpenAI::Models::CompletionUsage + + CompoundFilter = OpenAI::Models::CompoundFilter + + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse + + Embedding = OpenAI::Models::Embedding + + EmbeddingCreateParams = OpenAI::Models::EmbeddingCreateParams + + EmbeddingModel = OpenAI::Models::EmbeddingModel + + ErrorObject = OpenAI::Models::ErrorObject + + EvalCreateParams = OpenAI::Models::EvalCreateParams + + EvalCustomDataSourceConfig = OpenAI::Models::EvalCustomDataSourceConfig + + EvalDeleteParams = OpenAI::Models::EvalDeleteParams + + EvalLabelModelGrader = OpenAI::Models::EvalLabelModelGrader + + EvalListParams = OpenAI::Models::EvalListParams + + EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams + + Evals = OpenAI::Models::Evals + + EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader + + EvalTextSimilarityGrader = OpenAI::Models::EvalTextSimilarityGrader + + EvalUpdateParams = OpenAI::Models::EvalUpdateParams + + FileChunkingStrategy = OpenAI::Models::FileChunkingStrategy + + FileChunkingStrategyParam = OpenAI::Models::FileChunkingStrategyParam + + FileContent = OpenAI::Models::FileContent + + FileContentParams = OpenAI::Models::FileContentParams + + FileCreateParams = OpenAI::Models::FileCreateParams + + FileDeleted = OpenAI::Models::FileDeleted + + FileDeleteParams = OpenAI::Models::FileDeleteParams + + FileListParams = OpenAI::Models::FileListParams + + FileObject = OpenAI::Models::FileObject + + FilePurpose = OpenAI::Models::FilePurpose + + FileRetrieveParams = OpenAI::Models::FileRetrieveParams + + FineTuning = OpenAI::Models::FineTuning + + FunctionDefinition = OpenAI::Models::FunctionDefinition + + # @type [OpenAI::Internal::Type::Converter] + FunctionParameters = OpenAI::Models::FunctionParameters + + Image = OpenAI::Models::Image + + ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams + + ImageEditParams = OpenAI::Models::ImageEditParams + + ImageGenerateParams = OpenAI::Models::ImageGenerateParams + + ImageModel = OpenAI::Models::ImageModel + + ImagesResponse = OpenAI::Models::ImagesResponse + + # @type [OpenAI::Internal::Type::Converter] + Metadata = OpenAI::Models::Metadata + + Model = OpenAI::Models::Model + + ModelDeleted = OpenAI::Models::ModelDeleted + + ModelDeleteParams = OpenAI::Models::ModelDeleteParams + + ModelListParams = OpenAI::Models::ModelListParams + + ModelRetrieveParams = OpenAI::Models::ModelRetrieveParams + + Moderation = OpenAI::Models::Moderation + + ModerationCreateParams = OpenAI::Models::ModerationCreateParams + + ModerationImageURLInput = OpenAI::Models::ModerationImageURLInput + + ModerationModel = OpenAI::Models::ModerationModel + + ModerationMultiModalInput = OpenAI::Models::ModerationMultiModalInput + + ModerationTextInput = OpenAI::Models::ModerationTextInput + + OtherFileChunkingStrategyObject = OpenAI::Models::OtherFileChunkingStrategyObject + + Reasoning = OpenAI::Models::Reasoning + + ReasoningEffort = OpenAI::Models::ReasoningEffort + + ResponseFormatJSONObject = OpenAI::Models::ResponseFormatJSONObject + + ResponseFormatJSONSchema = OpenAI::Models::ResponseFormatJSONSchema + + ResponseFormatText = OpenAI::Models::ResponseFormatText + + Responses = OpenAI::Models::Responses + + ResponsesModel = OpenAI::Models::ResponsesModel + + StaticFileChunkingStrategy = OpenAI::Models::StaticFileChunkingStrategy + + StaticFileChunkingStrategyObject = OpenAI::Models::StaticFileChunkingStrategyObject + + StaticFileChunkingStrategyObjectParam = OpenAI::Models::StaticFileChunkingStrategyObjectParam + + Upload = OpenAI::Models::Upload + + UploadCancelParams = OpenAI::Models::UploadCancelParams + + UploadCompleteParams = OpenAI::Models::UploadCompleteParams + + UploadCreateParams = OpenAI::Models::UploadCreateParams + + Uploads = OpenAI::Models::Uploads + + VectorStore = OpenAI::Models::VectorStore + + VectorStoreCreateParams = OpenAI::Models::VectorStoreCreateParams + + VectorStoreDeleted = OpenAI::Models::VectorStoreDeleted + + VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams + + VectorStoreListParams = OpenAI::Models::VectorStoreListParams + + VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams + + VectorStores = OpenAI::Models::VectorStores + + VectorStoreSearchParams = OpenAI::Models::VectorStoreSearchParams + + VectorStoreUpdateParams = OpenAI::Models::VectorStoreUpdateParams +end diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi new file mode 100644 index 00000000..b72b3e70 --- /dev/null +++ b/rbi/openai/models.rbi @@ -0,0 +1,191 @@ +# typed: strong + +module OpenAI + AllModels = OpenAI::Models::AllModels + + Audio = OpenAI::Models::Audio + + AudioModel = OpenAI::Models::AudioModel + + AudioResponseFormat = OpenAI::Models::AudioResponseFormat + + AutoFileChunkingStrategyParam = OpenAI::Models::AutoFileChunkingStrategyParam + + Batch = OpenAI::Models::Batch + + BatchCancelParams = OpenAI::Models::BatchCancelParams + + BatchCreateParams = OpenAI::Models::BatchCreateParams + + BatchError = OpenAI::Models::BatchError + + BatchListParams = OpenAI::Models::BatchListParams + + BatchRequestCounts = OpenAI::Models::BatchRequestCounts + + BatchRetrieveParams = OpenAI::Models::BatchRetrieveParams + + Beta = OpenAI::Models::Beta + + Chat = OpenAI::Models::Chat + + ChatModel = OpenAI::Models::ChatModel + + ComparisonFilter = OpenAI::Models::ComparisonFilter + + Completion = OpenAI::Models::Completion + + CompletionChoice = OpenAI::Models::CompletionChoice + + CompletionCreateParams = OpenAI::Models::CompletionCreateParams + + CompletionUsage = OpenAI::Models::CompletionUsage + + CompoundFilter = OpenAI::Models::CompoundFilter + + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse + + Embedding = OpenAI::Models::Embedding + + EmbeddingCreateParams = OpenAI::Models::EmbeddingCreateParams + + EmbeddingModel = OpenAI::Models::EmbeddingModel + + ErrorObject = OpenAI::Models::ErrorObject + + EvalCreateParams = OpenAI::Models::EvalCreateParams + + EvalCustomDataSourceConfig = OpenAI::Models::EvalCustomDataSourceConfig + + EvalDeleteParams = OpenAI::Models::EvalDeleteParams + + EvalLabelModelGrader = OpenAI::Models::EvalLabelModelGrader + + EvalListParams = OpenAI::Models::EvalListParams + + EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams + + Evals = OpenAI::Models::Evals + + EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader + + EvalTextSimilarityGrader = OpenAI::Models::EvalTextSimilarityGrader + + EvalUpdateParams = OpenAI::Models::EvalUpdateParams + + FileChunkingStrategy = OpenAI::Models::FileChunkingStrategy + + FileChunkingStrategyParam = OpenAI::Models::FileChunkingStrategyParam + + FileContent = OpenAI::Models::FileContent + + FileContentParams = OpenAI::Models::FileContentParams + + FileCreateParams = OpenAI::Models::FileCreateParams + + FileDeleted = OpenAI::Models::FileDeleted + + FileDeleteParams = OpenAI::Models::FileDeleteParams + + FileListParams = OpenAI::Models::FileListParams + + FileObject = OpenAI::Models::FileObject + + FilePurpose = OpenAI::Models::FilePurpose + + FileRetrieveParams = OpenAI::Models::FileRetrieveParams + + FineTuning = OpenAI::Models::FineTuning + + FunctionDefinition = OpenAI::Models::FunctionDefinition + + FunctionParameters = T.let(OpenAI::Models::FunctionParameters, OpenAI::Internal::Type::Converter) + + Image = OpenAI::Models::Image + + ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams + + ImageEditParams = OpenAI::Models::ImageEditParams + + ImageGenerateParams = OpenAI::Models::ImageGenerateParams + + ImageModel = OpenAI::Models::ImageModel + + ImagesResponse = OpenAI::Models::ImagesResponse + + Metadata = T.let(OpenAI::Models::Metadata, OpenAI::Internal::Type::Converter) + + Model = OpenAI::Models::Model + + ModelDeleted = OpenAI::Models::ModelDeleted + + ModelDeleteParams = OpenAI::Models::ModelDeleteParams + + ModelListParams = OpenAI::Models::ModelListParams + + ModelRetrieveParams = OpenAI::Models::ModelRetrieveParams + + Moderation = OpenAI::Models::Moderation + + ModerationCreateParams = OpenAI::Models::ModerationCreateParams + + ModerationImageURLInput = OpenAI::Models::ModerationImageURLInput + + ModerationModel = OpenAI::Models::ModerationModel + + ModerationMultiModalInput = OpenAI::Models::ModerationMultiModalInput + + ModerationTextInput = OpenAI::Models::ModerationTextInput + + OtherFileChunkingStrategyObject = OpenAI::Models::OtherFileChunkingStrategyObject + + Reasoning = OpenAI::Models::Reasoning + + ReasoningEffort = OpenAI::Models::ReasoningEffort + + ResponseFormatJSONObject = OpenAI::Models::ResponseFormatJSONObject + + ResponseFormatJSONSchema = OpenAI::Models::ResponseFormatJSONSchema + + ResponseFormatText = OpenAI::Models::ResponseFormatText + + Responses = OpenAI::Models::Responses + + ResponsesModel = OpenAI::Models::ResponsesModel + + StaticFileChunkingStrategy = OpenAI::Models::StaticFileChunkingStrategy + + StaticFileChunkingStrategyObject = OpenAI::Models::StaticFileChunkingStrategyObject + + StaticFileChunkingStrategyObjectParam = OpenAI::Models::StaticFileChunkingStrategyObjectParam + + Upload = OpenAI::Models::Upload + + UploadCancelParams = OpenAI::Models::UploadCancelParams + + UploadCompleteParams = OpenAI::Models::UploadCompleteParams + + UploadCreateParams = OpenAI::Models::UploadCreateParams + + Uploads = OpenAI::Models::Uploads + + VectorStore = OpenAI::Models::VectorStore + + VectorStoreCreateParams = OpenAI::Models::VectorStoreCreateParams + + VectorStoreDeleted = OpenAI::Models::VectorStoreDeleted + + VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams + + VectorStoreListParams = OpenAI::Models::VectorStoreListParams + + VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams + + VectorStores = OpenAI::Models::VectorStores + + VectorStoreSearchParams = OpenAI::Models::VectorStoreSearchParams + + VectorStoreUpdateParams = OpenAI::Models::VectorStoreUpdateParams +end diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs new file mode 100644 index 00000000..7802cabe --- /dev/null +++ b/sig/openai/models.rbs @@ -0,0 +1,189 @@ +module OpenAI + module AllModels = OpenAI::Models::AllModels + + module Audio = OpenAI::Models::Audio + + module AudioModel = OpenAI::Models::AudioModel + + module AudioResponseFormat = OpenAI::Models::AudioResponseFormat + + class AutoFileChunkingStrategyParam = OpenAI::Models::AutoFileChunkingStrategyParam + + class Batch = OpenAI::Models::Batch + + class BatchCancelParams = OpenAI::Models::BatchCancelParams + + class BatchCreateParams = OpenAI::Models::BatchCreateParams + + class BatchError = OpenAI::Models::BatchError + + class BatchListParams = OpenAI::Models::BatchListParams + + class BatchRequestCounts = OpenAI::Models::BatchRequestCounts + + class BatchRetrieveParams = OpenAI::Models::BatchRetrieveParams + + module Beta = OpenAI::Models::Beta + + module Chat = OpenAI::Models::Chat + + module ChatModel = OpenAI::Models::ChatModel + + class ComparisonFilter = OpenAI::Models::ComparisonFilter + + class Completion = OpenAI::Models::Completion + + class CompletionChoice = OpenAI::Models::CompletionChoice + + class CompletionCreateParams = OpenAI::Models::CompletionCreateParams + + class CompletionUsage = OpenAI::Models::CompletionUsage + + class CompoundFilter = OpenAI::Models::CompoundFilter + + class CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse + + class Embedding = OpenAI::Models::Embedding + + class EmbeddingCreateParams = OpenAI::Models::EmbeddingCreateParams + + module EmbeddingModel = OpenAI::Models::EmbeddingModel + + class ErrorObject = OpenAI::Models::ErrorObject + + class EvalCreateParams = OpenAI::Models::EvalCreateParams + + class EvalCustomDataSourceConfig = OpenAI::Models::EvalCustomDataSourceConfig + + class EvalDeleteParams = OpenAI::Models::EvalDeleteParams + + class EvalLabelModelGrader = OpenAI::Models::EvalLabelModelGrader + + class EvalListParams = OpenAI::Models::EvalListParams + + class EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams + + module Evals = OpenAI::Models::Evals + + class EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig + + class EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader + + class EvalTextSimilarityGrader = OpenAI::Models::EvalTextSimilarityGrader + + class EvalUpdateParams = OpenAI::Models::EvalUpdateParams + + module FileChunkingStrategy = OpenAI::Models::FileChunkingStrategy + + module FileChunkingStrategyParam = OpenAI::Models::FileChunkingStrategyParam + + class FileContent = OpenAI::Models::FileContent + + class FileContentParams = OpenAI::Models::FileContentParams + + class FileCreateParams = OpenAI::Models::FileCreateParams + + class FileDeleted = OpenAI::Models::FileDeleted + + class FileDeleteParams = OpenAI::Models::FileDeleteParams + + class FileListParams = OpenAI::Models::FileListParams + + class FileObject = OpenAI::Models::FileObject + + module FilePurpose = OpenAI::Models::FilePurpose + + class FileRetrieveParams = OpenAI::Models::FileRetrieveParams + + module FineTuning = OpenAI::Models::FineTuning + + class FunctionDefinition = OpenAI::Models::FunctionDefinition + + FunctionParameters: OpenAI::Internal::Type::Converter + + class Image = OpenAI::Models::Image + + class ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams + + class ImageEditParams = OpenAI::Models::ImageEditParams + + class ImageGenerateParams = OpenAI::Models::ImageGenerateParams + + module ImageModel = OpenAI::Models::ImageModel + + class ImagesResponse = OpenAI::Models::ImagesResponse + + Metadata: OpenAI::Internal::Type::Converter + + class Model = OpenAI::Models::Model + + class ModelDeleted = OpenAI::Models::ModelDeleted + + class ModelDeleteParams = OpenAI::Models::ModelDeleteParams + + class ModelListParams = OpenAI::Models::ModelListParams + + class ModelRetrieveParams = OpenAI::Models::ModelRetrieveParams + + class Moderation = OpenAI::Models::Moderation + + class ModerationCreateParams = OpenAI::Models::ModerationCreateParams + + class ModerationImageURLInput = OpenAI::Models::ModerationImageURLInput + + module ModerationModel = OpenAI::Models::ModerationModel + + module ModerationMultiModalInput = OpenAI::Models::ModerationMultiModalInput + + class ModerationTextInput = OpenAI::Models::ModerationTextInput + + class OtherFileChunkingStrategyObject = OpenAI::Models::OtherFileChunkingStrategyObject + + class Reasoning = OpenAI::Models::Reasoning + + module ReasoningEffort = OpenAI::Models::ReasoningEffort + + class ResponseFormatJSONObject = OpenAI::Models::ResponseFormatJSONObject + + class ResponseFormatJSONSchema = OpenAI::Models::ResponseFormatJSONSchema + + class ResponseFormatText = OpenAI::Models::ResponseFormatText + + module Responses = OpenAI::Models::Responses + + module ResponsesModel = OpenAI::Models::ResponsesModel + + class StaticFileChunkingStrategy = OpenAI::Models::StaticFileChunkingStrategy + + class StaticFileChunkingStrategyObject = OpenAI::Models::StaticFileChunkingStrategyObject + + class StaticFileChunkingStrategyObjectParam = OpenAI::Models::StaticFileChunkingStrategyObjectParam + + class Upload = OpenAI::Models::Upload + + class UploadCancelParams = OpenAI::Models::UploadCancelParams + + class UploadCompleteParams = OpenAI::Models::UploadCompleteParams + + class UploadCreateParams = OpenAI::Models::UploadCreateParams + + module Uploads = OpenAI::Models::Uploads + + class VectorStore = OpenAI::Models::VectorStore + + class VectorStoreCreateParams = OpenAI::Models::VectorStoreCreateParams + + class VectorStoreDeleted = OpenAI::Models::VectorStoreDeleted + + class VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams + + class VectorStoreListParams = OpenAI::Models::VectorStoreListParams + + class VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams + + module VectorStores = OpenAI::Models::VectorStores + + class VectorStoreSearchParams = OpenAI::Models::VectorStoreSearchParams + + class VectorStoreUpdateParams = OpenAI::Models::VectorStoreUpdateParams +end From 05e37e2c34136fec64dad835488363f1014c6dca Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 21:45:07 +0000 Subject: [PATCH 173/295] chore: always check if current page is empty in `next_page?` --- lib/openai/internal/cursor_page.rb | 4 ++-- lib/openai/internal/page.rb | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/openai/internal/cursor_page.rb b/lib/openai/internal/cursor_page.rb index 3cb0654b..5f68a217 100644 --- a/lib/openai/internal/cursor_page.rb +++ b/lib/openai/internal/cursor_page.rb @@ -66,8 +66,8 @@ def initialize(client:, req:, headers:, page_data:) super case page_data - in {data: Array | nil => data} - @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } + in {data: Array => data} + @data = data.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } else end @has_more = page_data[:has_more] diff --git a/lib/openai/internal/page.rb b/lib/openai/internal/page.rb index f3792c4c..36c350a4 100644 --- a/lib/openai/internal/page.rb +++ b/lib/openai/internal/page.rb @@ -60,8 +60,8 @@ def initialize(client:, req:, headers:, page_data:) super case page_data - in {data: Array | nil => data} - @data = data&.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } + in {data: Array => data} + @data = data.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } else end @object = page_data[:object] From 847bc0e822ca05e45b5f2721b57323ef4ed9e189 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 22:55:29 +0000 Subject: [PATCH 174/295] chore: validate request option coercion correctness --- lib/openai/internal/type/base_model.rb | 10 +--------- lib/openai/internal/type/request_parameters.rb | 10 ++-------- lib/openai/internal/util.rb | 16 ++++++++++++++++ lib/openai/resources/beta/assistants.rb | 10 +++++----- lib/openai/resources/beta/threads.rb | 12 ++++++------ lib/openai/resources/beta/threads/messages.rb | 10 +++++----- lib/openai/resources/beta/threads/runs.rb | 16 ++++++++-------- lib/openai/resources/beta/threads/runs/steps.rb | 4 ++-- lib/openai/resources/vector_stores.rb | 12 ++++++------ .../resources/vector_stores/file_batches.rb | 8 ++++---- lib/openai/resources/vector_stores/files.rb | 12 ++++++------ rbi/openai/internal/util.rbi | 4 ++++ sig/openai/internal/util.rbs | 2 ++ 13 files changed, 67 insertions(+), 59 deletions(-) diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index af231837..0035eb6c 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -390,15 +390,7 @@ def to_yaml(*a) = OpenAI::Internal::Type::Converter.dump(self.class, self).to_ya # Create a new instance of a model. # # @param data [Hash{Symbol=>Object}, self] - def initialize(data = {}) - case OpenAI::Internal::Util.coerce_hash(data) - in Hash => coerced - @data = coerced - else - message = "Expected a #{Hash} or #{OpenAI::Internal::Type::BaseModel}, got #{data.inspect}" - raise ArgumentError.new(message) - end - end + def initialize(data = {}) = (@data = OpenAI::Internal::Util.coerce_hash!(data).to_h) class << self # @api private diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb index 958e1051..0a362d38 100644 --- a/lib/openai/internal/type/request_parameters.rb +++ b/lib/openai/internal/type/request_parameters.rb @@ -28,14 +28,8 @@ def dump_request(params) state = {can_retry: true} case (dumped = dump(params, state: state)) in Hash - options = OpenAI::Internal::Util.coerce_hash(dumped[:request_options]) - request_options = - case [options, state.fetch(:can_retry)] - in [Hash | nil, false] - {**options.to_h, max_retries: 0} - else - options - end + options = OpenAI::Internal::Util.coerce_hash!(dumped[:request_options]).to_h + request_options = state.fetch(:can_retry) ? options : {**options, max_retries: 0} [dumped.except(:request_options), request_options] else [dumped, nil] diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 0ecc6aca..e6424124 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -128,6 +128,22 @@ def coerce_hash(input) input.respond_to?(:to_h) ? input.to_h : input end end + + # @api private + # + # @param input [Object] + # + # @raise [ArgumentError] + # @return [Hash{Object=>Object}, nil] + def coerce_hash!(input) + case coerce_hash(input) + in Hash | nil => coerced + coerced + else + message = "Expected a #{Hash} or #{OpenAI::Internal::Type::BaseModel}, got #{data.inspect}" + raise ArgumentError.new(message) + end + end end class << self diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 303843a0..25b83738 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -52,7 +52,7 @@ def create(params) path: "assistants", body: parsed, model: OpenAI::Models::Beta::Assistant, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -72,7 +72,7 @@ def retrieve(assistant_id, params = {}) method: :get, path: ["assistants/%1$s", assistant_id], model: OpenAI::Models::Beta::Assistant, - options: params[:request_options] + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -126,7 +126,7 @@ def update(assistant_id, params = {}) path: ["assistants/%1$s", assistant_id], body: parsed, model: OpenAI::Models::Beta::Assistant, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -162,7 +162,7 @@ def list(params = {}) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Assistant, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -182,7 +182,7 @@ def delete(assistant_id, params = {}) method: :delete, path: ["assistants/%1$s", assistant_id], model: OpenAI::Models::Beta::AssistantDeleted, - options: params[:request_options] + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 6203cb42..754a760f 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -37,7 +37,7 @@ def create(params = {}) path: "threads", body: parsed, model: OpenAI::Models::Beta::Thread, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -57,7 +57,7 @@ def retrieve(thread_id, params = {}) method: :get, path: ["threads/%1$s", thread_id], model: OpenAI::Models::Beta::Thread, - options: params[:request_options] + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -87,7 +87,7 @@ def update(thread_id, params = {}) path: ["threads/%1$s", thread_id], body: parsed, model: OpenAI::Models::Beta::Thread, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -107,7 +107,7 @@ def delete(thread_id, params = {}) method: :delete, path: ["threads/%1$s", thread_id], model: OpenAI::Models::Beta::ThreadDeleted, - options: params[:request_options] + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -178,7 +178,7 @@ def create_and_run(params) path: "threads/runs", body: parsed, model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -253,7 +253,7 @@ def stream_raw(params) body: parsed, stream: OpenAI::Internal::Stream, model: OpenAI::Models::Beta::AssistantStreamEvent, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index d9191639..d60ca5f6 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -35,7 +35,7 @@ def create(thread_id, params) path: ["threads/%1$s/messages", thread_id], body: parsed, model: OpenAI::Models::Beta::Threads::Message, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -66,7 +66,7 @@ def retrieve(message_id, params) method: :get, path: ["threads/%1$s/messages/%2$s", thread_id, message_id], model: OpenAI::Models::Beta::Threads::Message, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -100,7 +100,7 @@ def update(message_id, params) path: ["threads/%1$s/messages/%2$s", thread_id, message_id], body: parsed, model: OpenAI::Models::Beta::Threads::Message, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -141,7 +141,7 @@ def list(thread_id, params = {}) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Threads::Message, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -168,7 +168,7 @@ def delete(message_id, params) method: :delete, path: ["threads/%1$s/messages/%2$s", thread_id, message_id], model: OpenAI::Models::Beta::Threads::MessageDeleted, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 1dfcaeb2..3f1b6d0d 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -86,7 +86,7 @@ def create(thread_id, params) query: parsed.slice(*query_params), body: parsed.except(*query_params), model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -171,7 +171,7 @@ def create_stream_raw(thread_id, params) body: parsed.except(*query_params), stream: OpenAI::Internal::Stream, model: OpenAI::Models::Beta::AssistantStreamEvent, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -202,7 +202,7 @@ def retrieve(run_id, params) method: :get, path: ["threads/%1$s/runs/%2$s", thread_id, run_id], model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -237,7 +237,7 @@ def update(run_id, params) path: ["threads/%1$s/runs/%2$s", thread_id, run_id], body: parsed, model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -275,7 +275,7 @@ def list(thread_id, params = {}) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -302,7 +302,7 @@ def cancel(run_id, params) method: :post, path: ["threads/%1$s/runs/%2$s/cancel", thread_id, run_id], model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -346,7 +346,7 @@ def submit_tool_outputs(run_id, params) path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id], body: parsed, model: OpenAI::Models::Beta::Threads::Run, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -393,7 +393,7 @@ def submit_tool_outputs_stream_raw(run_id, params) body: parsed, stream: OpenAI::Internal::Stream, model: OpenAI::Models::Beta::AssistantStreamEvent, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 21bbd9a5..011a0874 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -42,7 +42,7 @@ def retrieve(step_id, params) path: ["threads/%1$s/runs/%2$s/steps/%3$s", thread_id, run_id, step_id], query: parsed, model: OpenAI::Models::Beta::Threads::Runs::RunStep, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -89,7 +89,7 @@ def list(run_id, params) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::Beta::Threads::Runs::RunStep, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index cc75a01e..c0eace72 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -40,7 +40,7 @@ def create(params = {}) path: "vector_stores", body: parsed, model: OpenAI::Models::VectorStore, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -60,7 +60,7 @@ def retrieve(vector_store_id, params = {}) method: :get, path: ["vector_stores/%1$s", vector_store_id], model: OpenAI::Models::VectorStore, - options: params[:request_options] + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -91,7 +91,7 @@ def update(vector_store_id, params = {}) path: ["vector_stores/%1$s", vector_store_id], body: parsed, model: OpenAI::Models::VectorStore, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -127,7 +127,7 @@ def list(params = {}) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::VectorStore, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -147,7 +147,7 @@ def delete(vector_store_id, params = {}) method: :delete, path: ["vector_stores/%1$s", vector_store_id], model: OpenAI::Models::VectorStoreDeleted, - options: params[:request_options] + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -185,7 +185,7 @@ def search(vector_store_id, params) body: parsed, page: OpenAI::Internal::Page, model: OpenAI::Models::VectorStoreSearchResponse, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index 10b3e3dc..a56053dd 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -33,7 +33,7 @@ def create(vector_store_id, params) path: ["vector_stores/%1$s/file_batches", vector_store_id], body: parsed, model: OpenAI::Models::VectorStores::VectorStoreFileBatch, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -60,7 +60,7 @@ def retrieve(batch_id, params) method: :get, path: ["vector_stores/%1$s/file_batches/%2$s", vector_store_id, batch_id], model: OpenAI::Models::VectorStores::VectorStoreFileBatch, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -88,7 +88,7 @@ def cancel(batch_id, params) method: :post, path: ["vector_stores/%1$s/file_batches/%2$s/cancel", vector_store_id, batch_id], model: OpenAI::Models::VectorStores::VectorStoreFileBatch, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -135,7 +135,7 @@ def list_files(batch_id, params) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::VectorStores::VectorStoreFile, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 74870229..fa0b48c5 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -35,7 +35,7 @@ def create(vector_store_id, params) path: ["vector_stores/%1$s/files", vector_store_id], body: parsed, model: OpenAI::Models::VectorStores::VectorStoreFile, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -62,7 +62,7 @@ def retrieve(file_id, params) method: :get, path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], model: OpenAI::Models::VectorStores::VectorStoreFile, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -96,7 +96,7 @@ def update(file_id, params) path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], body: parsed, model: OpenAI::Models::VectorStores::VectorStoreFile, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -137,7 +137,7 @@ def list(vector_store_id, params = {}) query: parsed, page: OpenAI::Internal::CursorPage, model: OpenAI::Models::VectorStores::VectorStoreFile, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -167,7 +167,7 @@ def delete(file_id, params) method: :delete, path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], model: OpenAI::Models::VectorStores::VectorStoreFileDeleted, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -195,7 +195,7 @@ def content(file_id, params) path: ["vector_stores/%1$s/files/%2$s/content", vector_store_id, file_id], page: OpenAI::Internal::Page, model: OpenAI::Models::VectorStores::FileContentResponse, - options: options + options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index 3fe2f560..c86a9895 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -42,6 +42,10 @@ module OpenAI # @api private sig { params(input: T.anything).returns(T.any(T::Hash[T.anything, T.anything], T.anything)) } def coerce_hash(input); end + + # @api private + sig { params(input: T.anything).returns(T.nilable(T::Hash[T.anything, T.anything])) } + def coerce_hash!(input); end end class << self diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 14b3d577..725c664b 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -19,6 +19,8 @@ module OpenAI def self?.coerce_hash: (top input) -> (::Hash[top, top] | top) + def self?.coerce_hash!: (top input) -> ::Hash[top, top]? + def self?.deep_merge_lr: (top lhs, top rhs, ?concat: bool) -> top def self?.deep_merge: ( From 0464901a010d2973f7dcba1c6d7883798e2914bd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 06:25:06 +0000 Subject: [PATCH 175/295] chore(internal): codegen related update --- Gemfile.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index e1fd6bb2..d1e8f5ec 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -121,13 +121,13 @@ GEM prism (~> 1.4) ruby-progressbar (1.13.0) securerandom (0.4.1) - sorbet (0.5.12048) - sorbet-static (= 0.5.12048) - sorbet-runtime (0.5.12048) - sorbet-static (0.5.12048-x86_64-linux) - sorbet-static-and-runtime (0.5.12048) - sorbet (= 0.5.12048) - sorbet-runtime (= 0.5.12048) + sorbet (0.5.12053) + sorbet-static (= 0.5.12053) + sorbet-runtime (0.5.12053) + sorbet-static (0.5.12053-x86_64-linux) + sorbet-static-and-runtime (0.5.12053) + sorbet (= 0.5.12053) + sorbet-runtime (= 0.5.12053) spoom (1.6.1) erubi (>= 1.10.0) prism (>= 0.28.0) From 19e3a9c23839765d3515106d1059a5a430b62956 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 2 May 2025 19:12:14 +0000 Subject: [PATCH 176/295] feat(api): add image sizes, reasoning encryption --- .stats.yml | 6 +- .../models/audio/speech_create_params.rb | 2 +- lib/openai/models/image_edit_params.rb | 62 +++++++-- lib/openai/models/responses/computer_tool.rb | 22 ++-- .../models/responses/file_search_tool.rb | 18 +-- lib/openai/models/responses/function_tool.rb | 19 +-- .../responses/response_create_params.rb | 5 + .../models/responses/response_includable.rb | 6 + .../models/responses/response_input_file.rb | 8 +- .../models/responses/response_input_image.rb | 12 +- .../models/responses/response_input_item.rb | 82 +++++++----- .../models/responses/response_input_text.rb | 7 +- .../models/responses/response_output_item.rb | 4 +- .../responses/response_output_refusal.rb | 7 +- .../models/responses/response_output_text.rb | 33 ++--- .../responses/response_reasoning_item.rb | 15 ++- .../response_text_annotation_delta_event.rb | 26 ++-- lib/openai/models/responses/tool.rb | 16 +-- .../models/responses/web_search_tool.rb | 43 +++--- lib/openai/resources/images.rb | 7 +- .../models/audio/speech_create_params.rbi | 4 +- rbi/openai/models/image_edit_params.rbi | 77 +++++++++-- rbi/openai/models/responses/computer_tool.rbi | 15 ++- .../models/responses/file_search_tool.rbi | 22 ++-- rbi/openai/models/responses/function_tool.rbi | 12 +- .../responses/response_create_params.rbi | 10 ++ .../models/responses/response_includable.rbi | 7 + .../models/responses/response_input_file.rbi | 12 +- .../models/responses/response_input_image.rbi | 2 +- .../models/responses/response_input_item.rbi | 123 ++++++++++-------- .../responses/response_reasoning_item.rbi | 14 +- rbi/openai/models/responses/tool.rbi | 4 +- .../models/responses/web_search_tool.rbi | 60 ++++----- rbi/openai/resources/audio/speech.rbi | 2 +- rbi/openai/resources/images.rbi | 20 ++- rbi/openai/resources/responses.rbi | 10 ++ sig/openai/models/image_edit_params.rbs | 27 +++- sig/openai/models/responses/computer_tool.rbs | 17 +-- .../models/responses/file_search_tool.rbs | 10 +- sig/openai/models/responses/function_tool.rbs | 12 +- .../models/responses/response_includable.rbs | 2 + .../models/responses/response_input_file.rbs | 8 +- .../models/responses/response_input_image.rbs | 4 +- .../models/responses/response_input_item.rbs | 81 ++++++------ .../responses/response_reasoning_item.rbs | 4 + .../models/responses/web_search_tool.rbs | 32 ++--- sig/openai/resources/images.rbs | 1 + 47 files changed, 593 insertions(+), 399 deletions(-) diff --git a/.stats.yml b/.stats.yml index 7738ef3d..089abe5d 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-8b68ae6b807dca92e914da1dd9e835a20f69b075e79102a264367fd7fddddb33.yml -openapi_spec_hash: b6ade5b1a6327339e6669e1134de2d03 -config_hash: b597cd9a31e9e5ec709e2eefb4c54122 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-0ee6b36cf3cc278cef4199a6aec5f7d530a6c1f17a74830037e96d50ca1edc50.yml +openapi_spec_hash: e8ec5f46bc0655b34f292422d58a60f6 +config_hash: d9b6b6e6bc85744663e300eebc482067 diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 5ab05354..e17d7ec9 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -46,7 +46,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute speed # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. Does not work with `gpt-4o-mini-tts`. # # @return [Float, nil] optional :speed, Float diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index e42475a3..d850e2a5 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -8,10 +8,13 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel include OpenAI::Internal::Type::RequestParameters # @!attribute image - # The image(s) to edit. Must be a supported image file or an array of images. For - # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square - # `png` file less than 4MB. + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. # # @return [Pathname, StringIO, IO, OpenAI::FilePart, Array] required :image, union: -> { OpenAI::Models::ImageEditParams::Image } @@ -23,6 +26,18 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String] required :prompt, String + # @!attribute background + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + # + # @return [Symbol, OpenAI::Models::ImageEditParams::Background, nil] + optional :background, enum: -> { OpenAI::Models::ImageEditParams::Background }, nil?: true + # @!attribute mask # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, @@ -79,16 +94,17 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @!method initialize(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. For - # ... + # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. ... # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # ... # + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # ... # @@ -111,10 +127,13 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The image(s) to edit. Must be a supported image file or an array of images. For - # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square - # `png` file less than 4MB. + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. module Image extend OpenAI::Internal::Type::Union @@ -129,6 +148,24 @@ module Image StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput] end + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. @@ -183,6 +220,9 @@ module Size SIZE_256X256 = :"256x256" SIZE_512X512 = :"512x512" SIZE_1024X1024 = :"1024x1024" + SIZE_1536X1024 = :"1536x1024" + SIZE_1024X1536 = :"1024x1536" + AUTO = :auto # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 29644748..d65a0f31 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -7,14 +7,14 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @!attribute display_height # The height of the computer display. # - # @return [Float] - required :display_height, Float + # @return [Integer] + required :display_height, Integer # @!attribute display_width # The width of the computer display. # - # @return [Float] - required :display_width, Float + # @return [Integer] + required :display_width, Integer # @!attribute environment # The type of computer environment to control. @@ -29,19 +29,16 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel required :type, const: :computer_use_preview # @!method initialize(display_height:, display_width:, environment:, type: :computer_use_preview) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ComputerTool} for more details. - # # A tool that controls a virtual computer. Learn more about the # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). # - # @param display_height [Float] The height of the computer display. ... + # @param display_height [Integer] The height of the computer display. # - # @param display_width [Float] The width of the computer display. ... + # @param display_width [Integer] The width of the computer display. # - # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] The type of computer environment to control. ... + # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] The type of computer environment to control. # - # @param type [Symbol, :computer_use_preview] The type of the computer use tool. Always `computer_use_preview`. ... + # @param type [Symbol, :computer_use_preview] The type of the computer use tool. Always `computer_use_preview`. # The type of computer environment to control. # @@ -49,8 +46,9 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel module Environment extend OpenAI::Internal::Type::Enum - MAC = :mac WINDOWS = :windows + MAC = :mac + LINUX = :linux UBUNTU = :ubuntu BROWSER = :browser diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index bdd10f5f..1019298c 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -17,10 +17,10 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel required :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] # @!attribute filters - # A filter to apply based on file attributes. + # A filter to apply. # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] - optional :filters, union: -> { OpenAI::Models::Responses::FileSearchTool::Filters } + optional :filters, union: -> { OpenAI::Models::Responses::FileSearchTool::Filters }, nil?: true # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 @@ -43,17 +43,18 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # the # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). # - # @param vector_store_ids [Array] The IDs of the vector stores to search. ... + # @param vector_store_ids [Array] The IDs of the vector stores to search. # - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] A filter to apply. # - # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 ... + # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 + # ... # # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] Ranking options for search. # - # @param type [Symbol, :file_search] The type of the file search tool. Always `file_search`. ... + # @param type [Symbol, :file_search] The type of the file search tool. Always `file_search`. - # A filter to apply based on file attributes. + # A filter to apply. # # @see OpenAI::Models::Responses::FileSearchTool#filters module Filters @@ -93,7 +94,8 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. # - # @param score_threshold [Float] The score threshold for the file search, a number between 0 and 1. ... + # @param score_threshold [Float] The score threshold for the file search, a number between 0 and 1. Numbers close + # ... # The ranker to use for the file search. # diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index e74ed4b6..f37a388a 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -13,14 +13,14 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute parameters # A JSON schema object describing the parameters of the function. # - # @return [Hash{Symbol=>Object}] - required :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + # @return [Hash{Symbol=>Object}, nil] + required :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown], nil?: true # @!attribute strict # Whether to enforce strict parameter validation. Default `true`. # - # @return [Boolean] - required :strict, OpenAI::Internal::Type::Boolean + # @return [Boolean, nil] + required :strict, OpenAI::Internal::Type::Boolean, nil?: true # @!attribute type # The type of the function tool. Always `function`. @@ -43,15 +43,16 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @param name [String] The name of the function to call. ... + # @param name [String] The name of the function to call. # - # @param parameters [Hash{Symbol=>Object}] A JSON schema object describing the parameters of the function. ... + # @param parameters [Hash{Symbol=>Object}, nil] A JSON schema object describing the parameters of the function. # - # @param strict [Boolean] Whether to enforce strict parameter validation. Default `true`. ... + # @param strict [Boolean, nil] Whether to enforce strict parameter validation. Default `true`. # - # @param description [String, nil] A description of the function. Used by the model to determine whether ... + # @param description [String, nil] A description of the function. Used by the model to determine whether or not to + # ... # - # @param type [Symbol, :function] The type of the function tool. Always `function`. ... + # @param type [Symbol, :function] The type of the function tool. Always `function`. end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 6e51faa1..13a1bb6b 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -43,6 +43,11 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). # # @return [Array, nil] optional :include, diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 2f90f277..f56e4278 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -11,12 +11,18 @@ module Responses # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). module ResponseIncludable extend OpenAI::Internal::Type::Enum FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" + REASONING_ENCRYPTED_CONTENT = :"reasoning.encrypted_content" # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 06938134..9837a2e0 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -20,7 +20,7 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # The ID of the file to be sent to the model. # # @return [String, nil] - optional :file_id, String + optional :file_id, String, nil?: true # @!attribute filename # The name of the file to be sent to the model. @@ -36,11 +36,11 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # # @param file_data [String] The content of the file to be sent to the model. ... # - # @param file_id [String] The ID of the file to be sent to the model. ... + # @param file_id [String, nil] The ID of the file to be sent to the model. # - # @param filename [String] The name of the file to be sent to the model. ... + # @param filename [String] The name of the file to be sent to the model. # - # @param type [Symbol, :input_file] The type of the input item. Always `input_file`. ... + # @param type [Symbol, :input_file] The type of the input item. Always `input_file`. end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 997851c2..643a2e3d 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -37,13 +37,15 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, ... + # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or + # ... # - # @param file_id [String, nil] The ID of the file to be sent to the model. ... + # @param file_id [String, nil] The ID of the file to be sent to the model. # - # @param image_url [String, nil] The URL of the image to be sent to the model. A fully qualified URL or ... + # @param image_url [String, nil] The URL of the image to be sent to the model. A fully qualified URL or base64 en + # ... # - # @param type [Symbol, :input_image] The type of the input item. Always `input_image`. ... + # @param type [Symbol, :input_image] The type of the input item. Always `input_image`. # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. @@ -52,8 +54,8 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel module Detail extend OpenAI::Internal::Type::Enum - HIGH = :high LOW = :low + HIGH = :high AUTO = :auto # @!method self.values diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 29e533f3..95e8dbe5 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -51,7 +51,9 @@ module ResponseInputItem variant :function_call_output, -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput } # A description of the chain of thought used by a reasoning model while generating - # a response. + # a response. Be sure to include these items in your `input` to the Responses API + # for subsequent turns of a conversation if you are manually + # [managing context](https://platform.openai.com/docs/guides/conversation-state). variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } # An internal identifier for an item to reference. @@ -166,7 +168,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The ID of the computer tool call output. # # @return [String, nil] - optional :id, String + optional :id, String, nil?: true # @!attribute acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the @@ -174,14 +176,17 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :acknowledged_safety_checks, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] }, + nil?: true # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status } + optional :status, + enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status }, + nil?: true # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see @@ -190,17 +195,19 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # # The output of a computer tool call. # - # @param call_id [String] The ID of the computer tool call that produced the output. ... + # @param call_id [String] The ID of the computer tool call that produced the output. # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. ... # - # @param id [String] The ID of the computer tool call output. ... + # @param id [String, nil] The ID of the computer tool call output. # - # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the ... + # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop + # ... # - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] The status of the message input. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple + # ... # - # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. ... + # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -212,23 +219,23 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute code # The type of the pending safety check. # - # @return [String] - required :code, String + # @return [String, nil] + optional :code, String, nil?: true # @!attribute message # Details about the pending safety check. # - # @return [String] - required :message, String + # @return [String, nil] + optional :message, String, nil?: true - # @!method initialize(id:, code:, message:) + # @!method initialize(id:, code: nil, message: nil) # A pending safety check for the computer call. # # @param id [String] The ID of the pending safety check. # - # @param code [String] The type of the pending safety check. + # @param code [String, nil] The type of the pending safety check. # - # @param message [String] Details about the pending safety check. + # @param message [String, nil] Details about the pending safety check. end # The status of the message input. One of `in_progress`, `completed`, or @@ -271,14 +278,16 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # returned via API. # # @return [String, nil] - optional :id, String + optional :id, String, nil?: true # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status } + optional :status, + enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status }, + nil?: true # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see @@ -287,15 +296,17 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # # The output of a function tool call. # - # @param call_id [String] The unique ID of the function tool call generated by the model. ... + # @param call_id [String] The unique ID of the function tool call generated by the model. # - # @param output [String] A JSON string of the output of the function tool call. ... + # @param output [String] A JSON string of the output of the function tool call. # - # @param id [String] The unique ID of the function tool call output. Populated when this item ... + # @param id [String, nil] The unique ID of the function tool call output. Populated when this item is retu + # ... # - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] The status of the item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu + # ... # - # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. ... + # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -323,18 +334,27 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of item to reference. Always `item_reference`. # - # @return [Symbol, :item_reference] - required :type, const: :item_reference + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] + optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type }, nil?: true - # @!method initialize(id:, type: :item_reference) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputItem::ItemReference} for more details. - # + # @!method initialize(id:, type: nil) # An internal identifier for an item to reference. # - # @param id [String] The ID of the item to reference. ... + # @param id [String] The ID of the item to reference. # - # @param type [Symbol, :item_reference] The type of item to reference. Always `item_reference`. ... + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. + + # The type of item to reference. Always `item_reference`. + # + # @see OpenAI::Models::Responses::ResponseInputItem::ItemReference#type + module Type + extend OpenAI::Internal::Type::Enum + + ITEM_REFERENCE = :item_reference + + # @!method self.values + # @return [Array] + end end # @!method self.variants diff --git a/lib/openai/models/responses/response_input_text.rb b/lib/openai/models/responses/response_input_text.rb index c05e5a54..9735f187 100644 --- a/lib/openai/models/responses/response_input_text.rb +++ b/lib/openai/models/responses/response_input_text.rb @@ -17,14 +17,11 @@ class ResponseInputText < OpenAI::Internal::Type::BaseModel required :type, const: :input_text # @!method initialize(text:, type: :input_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputText} for more details. - # # A text input to the model. # - # @param text [String] The text input to the model. ... + # @param text [String] The text input to the model. # - # @param type [Symbol, :input_text] The type of the input item. Always `input_text`. ... + # @param type [Symbol, :input_text] The type of the input item. Always `input_text`. end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 83a8c4db..91539b62 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -29,7 +29,9 @@ module ResponseOutputItem variant :computer_call, -> { OpenAI::Models::Responses::ResponseComputerToolCall } # A description of the chain of thought used by a reasoning model while generating - # a response. + # a response. Be sure to include these items in your `input` to the Responses API + # for subsequent turns of a conversation if you are manually + # [managing context](https://platform.openai.com/docs/guides/conversation-state). variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } # @!method self.variants diff --git a/lib/openai/models/responses/response_output_refusal.rb b/lib/openai/models/responses/response_output_refusal.rb index 82729b85..62316c99 100644 --- a/lib/openai/models/responses/response_output_refusal.rb +++ b/lib/openai/models/responses/response_output_refusal.rb @@ -17,14 +17,11 @@ class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel required :type, const: :refusal # @!method initialize(refusal:, type: :refusal) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputRefusal} for more details. - # # A refusal from the model. # - # @param refusal [String] The refusal explanationfrom the model. ... + # @param refusal [String] The refusal explanationfrom the model. # - # @param type [Symbol, :refusal] The type of the refusal. Always `refusal`. ... + # @param type [Symbol, :refusal] The type of the refusal. Always `refusal`. end end end diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 32465ac6..aaeda218 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -24,16 +24,13 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel required :type, const: :output_text # @!method initialize(annotations:, text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputText} for more details. - # # A text output from the model. # - # @param annotations [Array] The annotations of the text output. ... + # @param annotations [Array] The annotations of the text output. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. # A citation to a file. module Annotation @@ -70,17 +67,13 @@ class FileCitation < OpenAI::Internal::Type::BaseModel required :type, const: :file_citation # @!method initialize(file_id:, index:, type: :file_citation) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation} for - # more details. - # # A citation to a file. # - # @param file_id [String] The ID of the file. ... + # @param file_id [String] The ID of the file. # - # @param index [Integer] The index of the file in the list of files. ... + # @param index [Integer] The index of the file in the list of files. # - # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. ... + # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -115,21 +108,17 @@ class URLCitation < OpenAI::Internal::Type::BaseModel required :url, String # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation} for - # more details. - # # A citation for a web resource used to generate a model response. # - # @param end_index [Integer] The index of the last character of the URL citation in the message. ... + # @param end_index [Integer] The index of the last character of the URL citation in the message. # - # @param start_index [Integer] The index of the first character of the URL citation in the message. ... + # @param start_index [Integer] The index of the first character of the URL citation in the message. # - # @param title [String] The title of the web resource. ... + # @param title [String] The title of the web resource. # - # @param url [String] The URL of the web resource. ... + # @param url [String] The URL of the web resource. # - # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. ... + # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. end class FilePath < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index a09ba35b..5b029d34 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -23,6 +23,13 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :reasoning] required :type, const: :reasoning + # @!attribute encrypted_content + # The encrypted content of the reasoning item - populated when a response is + # generated with `reasoning.encrypted_content` in the `include` parameter. + # + # @return [String, nil] + optional :encrypted_content, String, nil?: true + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -30,17 +37,21 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseReasoningItem::Status } - # @!method initialize(id:, summary:, status: nil, type: :reasoning) + # @!method initialize(id:, summary:, encrypted_content: nil, status: nil, type: :reasoning) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseReasoningItem} for more details. # # A description of the chain of thought used by a reasoning model while generating - # a response. + # a response. Be sure to include these items in your `input` to the Responses API + # for subsequent turns of a conversation if you are manually + # [managing context](https://platform.openai.com/docs/guides/conversation-state). # # @param id [String] The unique identifier of the reasoning content. ... # # @param summary [Array] Reasoning text contents. ... # + # @param encrypted_content [String, nil] The encrypted content of the reasoning item - populated when a response is ... + # # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or ... # # @param type [Symbol, :reasoning] The type of the object. Always `reasoning`. ... diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 063252da..f0be62c1 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -47,7 +47,7 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # # Emitted when a text annotation is added. # - # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. ... + # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. # # @param annotation_index [Integer] The index of the annotation that was added. ... # @@ -99,17 +99,13 @@ class FileCitation < OpenAI::Internal::Type::BaseModel required :type, const: :file_citation # @!method initialize(file_id:, index:, type: :file_citation) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation} - # for more details. - # # A citation to a file. # - # @param file_id [String] The ID of the file. ... + # @param file_id [String] The ID of the file. # - # @param index [Integer] The index of the file in the list of files. ... + # @param index [Integer] The index of the file in the list of files. # - # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. ... + # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -144,21 +140,17 @@ class URLCitation < OpenAI::Internal::Type::BaseModel required :url, String # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation} - # for more details. - # # A citation for a web resource used to generate a model response. # - # @param end_index [Integer] The index of the last character of the URL citation in the message. ... + # @param end_index [Integer] The index of the last character of the URL citation in the message. # - # @param start_index [Integer] The index of the first character of the URL citation in the message. ... + # @param start_index [Integer] The index of the first character of the URL citation in the message. # - # @param title [String] The title of the web resource. ... + # @param title [String] The title of the web resource. # - # @param url [String] The URL of the web resource. ... + # @param url [String] The URL of the web resource. # - # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. ... + # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. end class FilePath < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 5d053a5e..5c45cef7 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -3,28 +3,22 @@ module OpenAI module Models module Responses - # A tool that searches for relevant content from uploaded files. Learn more about - # the - # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # A tool that can be used to generate a response. module Tool extend OpenAI::Internal::Type::Union discriminator :type - # A tool that searches for relevant content from uploaded files. - # Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # A tool that searches for relevant content from uploaded files. Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). variant :file_search, -> { OpenAI::Models::Responses::FileSearchTool } - # Defines a function in your own code the model can choose to call. Learn more - # about [function calling](https://platform.openai.com/docs/guides/function-calling). + # Defines a function in your own code the model can choose to call. Learn more about [function calling](https://platform.openai.com/docs/guides/function-calling). variant :function, -> { OpenAI::Models::Responses::FunctionTool } - # A tool that controls a virtual computer. Learn more about the - # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). + # A tool that controls a virtual computer. Learn more about the [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). variant :computer_use_preview, -> { OpenAI::Models::Responses::ComputerTool } - # This tool searches the web for relevant results to use in a response. - # Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + # This tool searches the web for relevant results to use in a response. Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Models::Responses::WebSearchTool } # @!method self.variants diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 0885c4db..04073cbc 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -5,10 +5,8 @@ module Models module Responses class WebSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of the web search tool. One of: - # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # The type of the web search tool. One of `web_search_preview` or + # `web_search_preview_2025_03_11`. # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] required :type, enum: -> { OpenAI::Models::Responses::WebSearchTool::Type } @@ -21,6 +19,7 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel optional :search_context_size, enum: -> { OpenAI::Models::Responses::WebSearchTool::SearchContextSize } # @!attribute user_location + # The user's location. # # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] optional :user_location, -> { OpenAI::Models::Responses::WebSearchTool::UserLocation }, nil?: true @@ -33,16 +32,16 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). # - # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] The type of the web search tool. One of: ... + # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev + # ... # - # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the ... + # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search + # ... # - # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] + # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] The user's location. - # The type of the web search tool. One of: - # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # The type of the web search tool. One of `web_search_preview` or + # `web_search_preview_2025_03_11`. # # @see OpenAI::Models::Responses::WebSearchTool#type module Type @@ -82,41 +81,45 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # Free text input for the city of the user, e.g. `San Francisco`. # # @return [String, nil] - optional :city, String + optional :city, String, nil?: true # @!attribute country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. # # @return [String, nil] - optional :country, String + optional :country, String, nil?: true # @!attribute region # Free text input for the region of the user, e.g. `California`. # # @return [String, nil] - optional :region, String + optional :region, String, nil?: true # @!attribute timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. # # @return [String, nil] - optional :timezone, String + optional :timezone, String, nil?: true # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::WebSearchTool::UserLocation} for more details. # - # @param city [String] Free text input for the city of the user, e.g. `San Francisco`. ... + # The user's location. + # + # @param city [String, nil] Free text input for the city of the user, e.g. `San Francisco`. # - # @param country [String] The two-letter ... + # @param country [String, nil] The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of t + # ... # - # @param region [String] Free text input for the region of the user, e.g. `California`. ... + # @param region [String, nil] Free text input for the region of the user, e.g. `California`. # - # @param timezone [String] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) ... + # @param timezone [String, nil] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the user + # ... # - # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. ... + # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. end end end diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index f34671b0..13484438 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -50,14 +50,15 @@ def create_variation(params) # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. # - # @overload edit(image:, prompt:, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. For - # ... + # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. ... # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # ... # + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # ... # diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index d27b0482..5d48095e 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -40,7 +40,7 @@ module OpenAI attr_writer :response_format # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. Does not work with `gpt-4o-mini-tts`. sig { returns(T.nilable(Float)) } attr_reader :speed @@ -77,7 +77,7 @@ module OpenAI # `wav`, and `pcm`. response_format: nil, # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. Does not work with `gpt-4o-mini-tts`. speed: nil, request_options: {} ); end diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 4ae05e29..54150702 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -6,10 +6,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # The image(s) to edit. Must be a supported image file or an array of images. For - # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square - # `png` file less than 4MB. + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. sig do returns( T.any( @@ -28,6 +31,16 @@ module OpenAI sig { returns(String) } attr_accessor :prompt + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol)) } + attr_accessor :background + # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than @@ -86,6 +99,7 @@ module OpenAI T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] ), prompt: String, + background: T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), @@ -98,14 +112,25 @@ module OpenAI .returns(T.attached_class) end def self.new( - # The image(s) to edit. Must be a supported image file or an array of images. For - # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square - # `png` file less than 4MB. + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. image:, # A text description of the desired image(s). The maximum length is 1000 # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. prompt:, + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + background: nil, # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than @@ -148,6 +173,7 @@ module OpenAI T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] ), prompt: String, + background: T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), @@ -161,10 +187,13 @@ module OpenAI end def to_hash; end - # The image(s) to edit. Must be a supported image file or an array of images. For - # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square - # `png` file less than 4MB. + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. module Image extend OpenAI::Internal::Type::Union @@ -178,6 +207,27 @@ module OpenAI ) end + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Background) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = T.let(:transparent, OpenAI::Models::ImageEditParams::Background::TaggedSymbol) + OPAQUE = T.let(:opaque, OpenAI::Models::ImageEditParams::Background::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::ImageEditParams::Background::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Background::TaggedSymbol]) } + def self.values; end + end + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. @@ -236,6 +286,9 @@ module OpenAI SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + SIZE_1536X1024 = T.let(:"1536x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + SIZE_1024X1536 = T.let(:"1024x1536", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } def self.values; end diff --git a/rbi/openai/models/responses/computer_tool.rbi b/rbi/openai/models/responses/computer_tool.rbi index 0223a583..99b52d59 100644 --- a/rbi/openai/models/responses/computer_tool.rbi +++ b/rbi/openai/models/responses/computer_tool.rbi @@ -5,11 +5,11 @@ module OpenAI module Responses class ComputerTool < OpenAI::Internal::Type::BaseModel # The height of the computer display. - sig { returns(Float) } + sig { returns(Integer) } attr_accessor :display_height # The width of the computer display. - sig { returns(Float) } + sig { returns(Integer) } attr_accessor :display_width # The type of computer environment to control. @@ -24,8 +24,8 @@ module OpenAI # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). sig do params( - display_height: Float, - display_width: Float, + display_height: Integer, + display_width: Integer, environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, type: Symbol ) @@ -45,8 +45,8 @@ module OpenAI override .returns( { - display_height: Float, - display_width: Float, + display_height: Integer, + display_width: Integer, environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, type: Symbol } @@ -61,8 +61,9 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } OrSymbol = T.type_alias { T.any(Symbol, String) } - MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + LINUX = T.let(:linux, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) diff --git a/rbi/openai/models/responses/file_search_tool.rbi b/rbi/openai/models/responses/file_search_tool.rbi index 12834382..005e1270 100644 --- a/rbi/openai/models/responses/file_search_tool.rbi +++ b/rbi/openai/models/responses/file_search_tool.rbi @@ -12,17 +12,9 @@ module OpenAI sig { returns(T::Array[String]) } attr_accessor :vector_store_ids - # A filter to apply based on file attributes. + # A filter to apply. sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } - attr_reader :filters - - sig do - params( - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter) - ) - .void - end - attr_writer :filters + attr_accessor :filters # The maximum number of results to return. This number should be between 1 and 50 # inclusive. @@ -50,7 +42,9 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter), + filters: T.nilable( + T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter) + ), max_num_results: Integer, ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::AnyHash), type: Symbol @@ -60,7 +54,7 @@ module OpenAI def self.new( # The IDs of the vector stores to search. vector_store_ids:, - # A filter to apply based on file attributes. + # A filter to apply. filters: nil, # The maximum number of results to return. This number should be between 1 and 50 # inclusive. @@ -76,7 +70,7 @@ module OpenAI { type: Symbol, vector_store_ids: T::Array[String], - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), + filters: T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)), max_num_results: Integer, ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions } @@ -84,7 +78,7 @@ module OpenAI end def to_hash; end - # A filter to apply based on file attributes. + # A filter to apply. module Filters extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/responses/function_tool.rbi b/rbi/openai/models/responses/function_tool.rbi index 5fa541bd..294f57f1 100644 --- a/rbi/openai/models/responses/function_tool.rbi +++ b/rbi/openai/models/responses/function_tool.rbi @@ -9,11 +9,11 @@ module OpenAI attr_accessor :name # A JSON schema object describing the parameters of the function. - sig { returns(T::Hash[Symbol, T.anything]) } + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } attr_accessor :parameters # Whether to enforce strict parameter validation. Default `true`. - sig { returns(T::Boolean) } + sig { returns(T.nilable(T::Boolean)) } attr_accessor :strict # The type of the function tool. Always `function`. @@ -31,8 +31,8 @@ module OpenAI sig do params( name: String, - parameters: T::Hash[Symbol, T.anything], - strict: T::Boolean, + parameters: T.nilable(T::Hash[Symbol, T.anything]), + strict: T.nilable(T::Boolean), description: T.nilable(String), type: Symbol ) @@ -56,8 +56,8 @@ module OpenAI .returns( { name: String, - parameters: T::Hash[Symbol, T.anything], - strict: T::Boolean, + parameters: T.nilable(T::Hash[Symbol, T.anything]), + strict: T.nilable(T::Boolean), type: Symbol, description: T.nilable(String) } diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 3dd10986..753835a4 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -64,6 +64,11 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } attr_accessor :include @@ -339,6 +344,11 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). include: nil, # Inserts a system (or developer) message as the first item in the model's # context. diff --git a/rbi/openai/models/responses/response_includable.rbi b/rbi/openai/models/responses/response_includable.rbi index a6d5df86..cc0efd7c 100644 --- a/rbi/openai/models/responses/response_includable.rbi +++ b/rbi/openai/models/responses/response_includable.rbi @@ -11,6 +11,11 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). module ResponseIncludable extend OpenAI::Internal::Type::Enum @@ -26,6 +31,8 @@ module OpenAI :"computer_call_output.output.image_url", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol ) + REASONING_ENCRYPTED_CONTENT = + T.let(:"reasoning.encrypted_content", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol]) } def self.values; end diff --git a/rbi/openai/models/responses/response_input_file.rbi b/rbi/openai/models/responses/response_input_file.rbi index 74a535c7..b0dbcd3a 100644 --- a/rbi/openai/models/responses/response_input_file.rbi +++ b/rbi/openai/models/responses/response_input_file.rbi @@ -17,10 +17,7 @@ module OpenAI # The ID of the file to be sent to the model. sig { returns(T.nilable(String)) } - attr_reader :file_id - - sig { params(file_id: String).void } - attr_writer :file_id + attr_accessor :file_id # The name of the file to be sent to the model. sig { returns(T.nilable(String)) } @@ -31,7 +28,8 @@ module OpenAI # A file input to the model. sig do - params(file_data: String, file_id: String, filename: String, type: Symbol).returns(T.attached_class) + params(file_data: String, file_id: T.nilable(String), filename: String, type: Symbol) + .returns(T.attached_class) end def self.new( # The content of the file to be sent to the model. @@ -43,7 +41,9 @@ module OpenAI # The type of the input item. Always `input_file`. type: :input_file ); end - sig { override.returns({type: Symbol, file_data: String, file_id: String, filename: String}) } + sig do + override.returns({type: Symbol, file_data: String, file_id: T.nilable(String), filename: String}) + end def to_hash; end end end diff --git a/rbi/openai/models/responses/response_input_image.rbi b/rbi/openai/models/responses/response_input_image.rbi index d058b10d..ecac54d3 100644 --- a/rbi/openai/models/responses/response_input_image.rbi +++ b/rbi/openai/models/responses/response_input_image.rbi @@ -66,8 +66,8 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } OrSymbol = T.type_alias { T.any(Symbol, String) } - HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) + HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol]) } diff --git a/rbi/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi index 6595bbdb..9c3dec78 100644 --- a/rbi/openai/models/responses/response_input_item.rbi +++ b/rbi/openai/models/responses/response_input_item.rbi @@ -170,10 +170,7 @@ module OpenAI # The ID of the computer tool call output. sig { returns(T.nilable(String)) } - attr_reader :id - - sig { params(id: String).void } - attr_writer :id + attr_accessor :id # The safety checks reported by the API that have been acknowledged by the # developer. @@ -184,42 +181,28 @@ module OpenAI ) ) end - attr_reader :acknowledged_safety_checks - - sig do - params( - acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) - ] - ) - .void - end - attr_writer :acknowledged_safety_checks + attr_accessor :acknowledged_safety_checks # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } - attr_reader :status - - sig { params(status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol).void } - attr_writer :status + attr_accessor :status # The output of a computer tool call. sig do params( call_id: String, output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), - id: String, - acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) - ], - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol, + id: T.nilable(String), + acknowledged_safety_checks: T.nilable( + T::Array[ + T.any( + OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) + ] + ), + status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol), type: Symbol ) .returns(T.attached_class) @@ -247,9 +230,11 @@ module OpenAI call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: Symbol, - id: String, - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol + id: T.nilable(String), + acknowledged_safety_checks: T.nilable( + T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + ), + status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) } ) end @@ -261,24 +246,30 @@ module OpenAI attr_accessor :id # The type of the pending safety check. - sig { returns(String) } + sig { returns(T.nilable(String)) } attr_accessor :code # Details about the pending safety check. - sig { returns(String) } + sig { returns(T.nilable(String)) } attr_accessor :message # A pending safety check for the computer call. - sig { params(id: String, code: String, message: String).returns(T.attached_class) } + sig do + params( + id: String, + code: T.nilable(String), + message: T.nilable(String) + ).returns(T.attached_class) + end def self.new( # The ID of the pending safety check. id:, # The type of the pending safety check. - code:, + code: nil, # Details about the pending safety check. - message: + message: nil ); end - sig { override.returns({id: String, code: String, message: String}) } + sig { override.returns({id: String, code: T.nilable(String), message: T.nilable(String)}) } def to_hash; end end @@ -325,26 +316,20 @@ module OpenAI # The unique ID of the function tool call output. Populated when this item is # returned via API. sig { returns(T.nilable(String)) } - attr_reader :id - - sig { params(id: String).void } - attr_writer :id + attr_accessor :id # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } - attr_reader :status - - sig { params(status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol).void } - attr_writer :status + attr_accessor :status # The output of a function tool call. sig do params( call_id: String, output: String, - id: String, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol, + id: T.nilable(String), + status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol), type: Symbol ) .returns(T.attached_class) @@ -370,8 +355,8 @@ module OpenAI call_id: String, output: String, type: Symbol, - id: String, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol + id: T.nilable(String), + status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) } ) end @@ -410,19 +395,47 @@ module OpenAI attr_accessor :id # The type of item to reference. Always `item_reference`. - sig { returns(Symbol) } + sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::OrSymbol)) } attr_accessor :type # An internal identifier for an item to reference. - sig { params(id: String, type: Symbol).returns(T.attached_class) } + sig do + params( + id: String, + type: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::OrSymbol) + ) + .returns(T.attached_class) + end def self.new( # The ID of the item to reference. id:, # The type of item to reference. Always `item_reference`. - type: :item_reference + type: nil ); end - sig { override.returns({id: String, type: Symbol}) } + sig do + override + .returns( + {id: String, type: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::OrSymbol)} + ) + end def to_hash; end + + # The type of item to reference. Always `item_reference`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ITEM_REFERENCE = + T.let(:item_reference, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::TaggedSymbol) + + sig do + override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::TaggedSymbol]) + end + def self.values; end + end end sig do diff --git a/rbi/openai/models/responses/response_reasoning_item.rbi b/rbi/openai/models/responses/response_reasoning_item.rbi index 7b83bf50..6c5006fd 100644 --- a/rbi/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/openai/models/responses/response_reasoning_item.rbi @@ -16,6 +16,11 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # The encrypted content of the reasoning item - populated when a response is + # generated with `reasoning.encrypted_content` in the `include` parameter. + sig { returns(T.nilable(String)) } + attr_accessor :encrypted_content + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } @@ -25,11 +30,14 @@ module OpenAI attr_writer :status # A description of the chain of thought used by a reasoning model while generating - # a response. + # a response. Be sure to include these items in your `input` to the Responses API + # for subsequent turns of a conversation if you are manually + # [managing context](https://platform.openai.com/docs/guides/conversation-state). sig do params( id: String, summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Internal::AnyHash)], + encrypted_content: T.nilable(String), status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol ) @@ -40,6 +48,9 @@ module OpenAI id:, # Reasoning text contents. summary:, + # The encrypted content of the reasoning item - populated when a response is + # generated with `reasoning.encrypted_content` in the `include` parameter. + encrypted_content: nil, # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. status: nil, @@ -53,6 +64,7 @@ module OpenAI id: String, summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], type: Symbol, + encrypted_content: T.nilable(String), status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol } ) diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index 292e8c9d..dd20795d 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -3,9 +3,7 @@ module OpenAI module Models module Responses - # A tool that searches for relevant content from uploaded files. Learn more about - # the - # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # A tool that can be used to generate a response. module Tool extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/responses/web_search_tool.rbi b/rbi/openai/models/responses/web_search_tool.rbi index fa6955fd..032f4d13 100644 --- a/rbi/openai/models/responses/web_search_tool.rbi +++ b/rbi/openai/models/responses/web_search_tool.rbi @@ -4,10 +4,8 @@ module OpenAI module Models module Responses class WebSearchTool < OpenAI::Internal::Type::BaseModel - # The type of the web search tool. One of: - # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # The type of the web search tool. One of `web_search_preview` or + # `web_search_preview_2025_03_11`. sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } attr_accessor :type @@ -19,6 +17,7 @@ module OpenAI sig { params(search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol).void } attr_writer :search_context_size + # The user's location. sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation)) } attr_reader :user_location @@ -42,14 +41,13 @@ module OpenAI .returns(T.attached_class) end def self.new( - # The type of the web search tool. One of: - # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # The type of the web search tool. One of `web_search_preview` or + # `web_search_preview_2025_03_11`. type:, # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. search_context_size: nil, + # The user's location. user_location: nil ); end sig do @@ -64,10 +62,8 @@ module OpenAI end def to_hash; end - # The type of the web search tool. One of: - # - # - `web_search_preview` - # - `web_search_preview_2025_03_11` + # The type of the web search tool. One of `web_search_preview` or + # `web_search_preview_2025_03_11`. module Type extend OpenAI::Internal::Type::Enum @@ -107,36 +103,31 @@ module OpenAI # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } - attr_reader :city - - sig { params(city: String).void } - attr_writer :city + attr_accessor :city # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. sig { returns(T.nilable(String)) } - attr_reader :country - - sig { params(country: String).void } - attr_writer :country + attr_accessor :country # Free text input for the region of the user, e.g. `California`. sig { returns(T.nilable(String)) } - attr_reader :region - - sig { params(region: String).void } - attr_writer :region + attr_accessor :region # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. sig { returns(T.nilable(String)) } - attr_reader :timezone - - sig { params(timezone: String).void } - attr_writer :timezone + attr_accessor :timezone + # The user's location. sig do - params(city: String, country: String, region: String, timezone: String, type: Symbol) + params( + city: T.nilable(String), + country: T.nilable(String), + region: T.nilable(String), + timezone: T.nilable(String), + type: Symbol + ) .returns(T.attached_class) end def self.new( @@ -154,7 +145,16 @@ module OpenAI type: :approximate ); end sig do - override.returns({type: Symbol, city: String, country: String, region: String, timezone: String}) + override + .returns( + { + type: Symbol, + city: T.nilable(String), + country: T.nilable(String), + region: T.nilable(String), + timezone: T.nilable(String) + } + ) end def to_hash; end end diff --git a/rbi/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi index 16739bc4..4a2ffa44 100644 --- a/rbi/openai/resources/audio/speech.rbi +++ b/rbi/openai/resources/audio/speech.rbi @@ -35,7 +35,7 @@ module OpenAI # `wav`, and `pcm`. response_format: nil, # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. + # the default. Does not work with `gpt-4o-mini-tts`. speed: nil, request_options: {} ); end diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index 13ac0077..5341ac2b 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -50,6 +50,7 @@ module OpenAI T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] ), prompt: String, + background: T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), n: T.nilable(Integer), @@ -62,14 +63,25 @@ module OpenAI .returns(OpenAI::Models::ImagesResponse) end def edit( - # The image(s) to edit. Must be a supported image file or an array of images. For - # `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. For `dall-e-2`, you can only provide one image, and it should be a square - # `png` file less than 4MB. + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 25MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. image:, # A text description of the desired image(s). The maximum length is 1000 # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. prompt:, + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + background: nil, # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index ba349b4b..e0dec08f 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -104,6 +104,11 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). include: nil, # Inserts a system (or developer) message as the first item in the model's # context. @@ -346,6 +351,11 @@ module OpenAI # - `message.input_image.image_url`: Include image urls from the input message. # - `computer_call_output.output.image_url`: Include image urls from the computer # call output. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). include: nil, # Inserts a system (or developer) message as the first item in the model's # context. diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index bf702f1e..b8499e7e 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -4,6 +4,7 @@ module OpenAI { image: OpenAI::Models::ImageEditParams::image, prompt: String, + background: OpenAI::Models::ImageEditParams::background?, mask: (Pathname | StringIO | IO | OpenAI::FilePart), model: OpenAI::Models::ImageEditParams::model?, n: Integer?, @@ -22,6 +23,8 @@ module OpenAI attr_accessor prompt: String + attr_accessor background: OpenAI::Models::ImageEditParams::background? + attr_reader mask: (Pathname | StringIO | IO | OpenAI::FilePart)? def mask=: ( @@ -45,6 +48,7 @@ module OpenAI def initialize: ( image: OpenAI::Models::ImageEditParams::image, prompt: String, + ?background: OpenAI::Models::ImageEditParams::background?, ?mask: Pathname | StringIO | IO | OpenAI::FilePart, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, @@ -72,6 +76,18 @@ module OpenAI StringArray: OpenAI::Internal::Type::Converter end + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::background] + end + type model = String | OpenAI::Models::image_model module Model @@ -105,7 +121,13 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::response_format] end - type size = :"256x256" | :"512x512" | :"1024x1024" + type size = + :"256x256" + | :"512x512" + | :"1024x1024" + | :"1536x1024" + | :"1024x1536" + | :auto module Size extend OpenAI::Internal::Type::Enum @@ -113,6 +135,9 @@ module OpenAI SIZE_256X256: :"256x256" SIZE_512X512: :"512x512" SIZE_1024X1024: :"1024x1024" + SIZE_1536X1024: :"1536x1024" + SIZE_1024X1536: :"1024x1536" + AUTO: :auto def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::size] end diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index f30d8909..f362244f 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -3,37 +3,38 @@ module OpenAI module Responses type computer_tool = { - display_height: Float, - display_width: Float, + display_height: Integer, + display_width: Integer, environment: OpenAI::Models::Responses::ComputerTool::environment, type: :computer_use_preview } class ComputerTool < OpenAI::Internal::Type::BaseModel - attr_accessor display_height: Float + attr_accessor display_height: Integer - attr_accessor display_width: Float + attr_accessor display_width: Integer attr_accessor environment: OpenAI::Models::Responses::ComputerTool::environment attr_accessor type: :computer_use_preview def initialize: ( - display_height: Float, - display_width: Float, + display_height: Integer, + display_width: Integer, environment: OpenAI::Models::Responses::ComputerTool::environment, ?type: :computer_use_preview ) -> void def to_hash: -> OpenAI::Models::Responses::computer_tool - type environment = :mac | :windows | :ubuntu | :browser + type environment = :windows | :mac | :linux | :ubuntu | :browser module Environment extend OpenAI::Internal::Type::Enum - MAC: :mac WINDOWS: :windows + MAC: :mac + LINUX: :linux UBUNTU: :ubuntu BROWSER: :browser diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 6c174053..07747db5 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -5,7 +5,7 @@ module OpenAI { type: :file_search, vector_store_ids: ::Array[String], - filters: OpenAI::Models::Responses::FileSearchTool::filters, + filters: OpenAI::Models::Responses::FileSearchTool::filters?, max_num_results: Integer, ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions } @@ -15,11 +15,7 @@ module OpenAI attr_accessor vector_store_ids: ::Array[String] - attr_reader filters: OpenAI::Models::Responses::FileSearchTool::filters? - - def filters=: ( - OpenAI::Models::Responses::FileSearchTool::filters - ) -> OpenAI::Models::Responses::FileSearchTool::filters + attr_accessor filters: OpenAI::Models::Responses::FileSearchTool::filters? attr_reader max_num_results: Integer? @@ -33,7 +29,7 @@ module OpenAI def initialize: ( vector_store_ids: ::Array[String], - ?filters: OpenAI::Models::Responses::FileSearchTool::filters, + ?filters: OpenAI::Models::Responses::FileSearchTool::filters?, ?max_num_results: Integer, ?ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, ?type: :file_search diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index f69630ac..3121cb1d 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -4,8 +4,8 @@ module OpenAI type function_tool = { name: String, - parameters: ::Hash[Symbol, top], - strict: bool, + parameters: ::Hash[Symbol, top]?, + strict: bool?, type: :function, description: String? } @@ -13,9 +13,9 @@ module OpenAI class FunctionTool < OpenAI::Internal::Type::BaseModel attr_accessor name: String - attr_accessor parameters: ::Hash[Symbol, top] + attr_accessor parameters: ::Hash[Symbol, top]? - attr_accessor strict: bool + attr_accessor strict: bool? attr_accessor type: :function @@ -23,8 +23,8 @@ module OpenAI def initialize: ( name: String, - parameters: ::Hash[Symbol, top], - strict: bool, + parameters: ::Hash[Symbol, top]?, + strict: bool?, ?description: String?, ?type: :function ) -> void diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index 0634f434..4f37a1b1 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -5,6 +5,7 @@ module OpenAI :"file_search_call.results" | :"message.input_image.image_url" | :"computer_call_output.output.image_url" + | :"reasoning.encrypted_content" module ResponseIncludable extend OpenAI::Internal::Type::Enum @@ -12,6 +13,7 @@ module OpenAI FILE_SEARCH_CALL_RESULTS: :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" + REASONING_ENCRYPTED_CONTENT: :"reasoning.encrypted_content" def self?.values: -> ::Array[OpenAI::Models::Responses::response_includable] end diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index 75f0306c..20c40b3a 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -5,7 +5,7 @@ module OpenAI { type: :input_file, file_data: String, - file_id: String, + file_id: String?, filename: String } @@ -16,9 +16,7 @@ module OpenAI def file_data=: (String) -> String - attr_reader file_id: String? - - def file_id=: (String) -> String + attr_accessor file_id: String? attr_reader filename: String? @@ -26,7 +24,7 @@ module OpenAI def initialize: ( ?file_data: String, - ?file_id: String, + ?file_id: String?, ?filename: String, ?type: :input_file ) -> void diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index f6e47720..f9a423cc 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -27,13 +27,13 @@ module OpenAI def to_hash: -> OpenAI::Models::Responses::response_input_image - type detail = :high | :low | :auto + type detail = :low | :high | :auto module Detail extend OpenAI::Internal::Type::Enum - HIGH: :high LOW: :low + HIGH: :high AUTO: :auto def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputImage::detail] diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 35ad1d08..f5de100d 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -91,9 +91,9 @@ module OpenAI call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, type: :computer_call_output, - id: String, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status + id: String?, + acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? } class ComputerCallOutput < OpenAI::Internal::Type::BaseModel @@ -103,44 +103,38 @@ module OpenAI attr_accessor type: :computer_call_output - attr_reader id: String? - - def id=: (String) -> String - - attr_reader acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]? + attr_accessor id: String? - def acknowledged_safety_checks=: ( - ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] - ) -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + attr_accessor acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]? - attr_reader status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? - - def status=: ( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status - ) -> OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? def initialize: ( call_id: String, output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - ?id: String, - ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck], - ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status, + ?id: String?, + ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, + ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status?, ?type: :computer_call_output ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::computer_call_output type acknowledged_safety_check = - { id: String, code: String, message: String } + { id: String, code: String?, message: String? } class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor code: String + attr_accessor code: String? - attr_accessor message: String + attr_accessor message: String? - def initialize: (id: String, code: String, message: String) -> void + def initialize: ( + id: String, + ?code: String?, + ?message: String? + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::acknowledged_safety_check end @@ -163,8 +157,8 @@ module OpenAI call_id: String, output: String, type: :function_call_output, - id: String, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status + id: String?, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? } class FunctionCallOutput < OpenAI::Internal::Type::BaseModel @@ -174,21 +168,15 @@ module OpenAI attr_accessor type: :function_call_output - attr_reader id: String? + attr_accessor id: String? - def id=: (String) -> String - - attr_reader status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? - - def status=: ( - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status - ) -> OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? def initialize: ( call_id: String, output: String, - ?id: String, - ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status, + ?id: String?, + ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status?, ?type: :function_call_output ) -> void @@ -207,16 +195,33 @@ module OpenAI end end - type item_reference = { id: String, type: :item_reference } + type item_reference = + { + id: String, + type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + } class ItemReference < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor type: :item_reference + attr_accessor type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? - def initialize: (id: String, ?type: :item_reference) -> void + def initialize: ( + id: String, + ?type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + ) -> void def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::item_reference + + type type_ = :item_reference + + module Type + extend OpenAI::Internal::Type::Enum + + ITEM_REFERENCE: :item_reference + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_] + end end def self?.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index e83fbe06..19c0882c 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -6,6 +6,7 @@ module OpenAI id: String, summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], type: :reasoning, + encrypted_content: String?, status: OpenAI::Models::Responses::ResponseReasoningItem::status } @@ -16,6 +17,8 @@ module OpenAI attr_accessor type: :reasoning + attr_accessor encrypted_content: String? + attr_reader status: OpenAI::Models::Responses::ResponseReasoningItem::status? def status=: ( @@ -25,6 +28,7 @@ module OpenAI def initialize: ( id: String, summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], + ?encrypted_content: String?, ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, ?type: :reasoning ) -> void diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index 92f25bd0..6b68aad5 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -53,36 +53,28 @@ module OpenAI type user_location = { type: :approximate, - city: String, - country: String, - region: String, - timezone: String + city: String?, + country: String?, + region: String?, + timezone: String? } class UserLocation < OpenAI::Internal::Type::BaseModel attr_accessor type: :approximate - attr_reader city: String? + attr_accessor city: String? - def city=: (String) -> String + attr_accessor country: String? - attr_reader country: String? + attr_accessor region: String? - def country=: (String) -> String - - attr_reader region: String? - - def region=: (String) -> String - - attr_reader timezone: String? - - def timezone=: (String) -> String + attr_accessor timezone: String? def initialize: ( - ?city: String, - ?country: String, - ?region: String, - ?timezone: String, + ?city: String?, + ?country: String?, + ?region: String?, + ?timezone: String?, ?type: :approximate ) -> void diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index d1bddd09..20b05ba5 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -14,6 +14,7 @@ module OpenAI def edit: ( image: OpenAI::Models::ImageEditParams::image, prompt: String, + ?background: OpenAI::Models::ImageEditParams::background?, ?mask: Pathname | StringIO | IO | OpenAI::FilePart, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, From ee0078007fedefaea5dfc0cc6ec83d0aab763627 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 08:27:47 +0000 Subject: [PATCH 177/295] chore(internal): codegen related update --- Gemfile.lock | 20 ++++++++++---------- lib/openai/internal/stream.rb | 8 ++++---- lib/openai/internal/transport/base_client.rb | 4 ++-- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index d1e8f5ec..868b76e9 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -33,7 +33,7 @@ GEM addressable (2.8.7) public_suffix (>= 2.0.2, < 7.0) ast (2.4.3) - async (2.23.1) + async (2.24.0) console (~> 1.29) fiber-annotation io-event (~> 1.9) @@ -105,7 +105,7 @@ GEM redcarpet (3.6.1) regexp_parser (2.10.0) rexml (3.4.1) - rubocop (1.75.4) + rubocop (1.75.5) json (~> 2.3) language_server-protocol (~> 3.17.0.2) lint_roller (~> 1.1.0) @@ -121,13 +121,13 @@ GEM prism (~> 1.4) ruby-progressbar (1.13.0) securerandom (0.4.1) - sorbet (0.5.12053) - sorbet-static (= 0.5.12053) - sorbet-runtime (0.5.12053) - sorbet-static (0.5.12053-x86_64-linux) - sorbet-static-and-runtime (0.5.12053) - sorbet (= 0.5.12053) - sorbet-runtime (= 0.5.12053) + sorbet (0.5.12060) + sorbet-static (= 0.5.12060) + sorbet-runtime (0.5.12060) + sorbet-static (0.5.12060-x86_64-linux) + sorbet-static-and-runtime (0.5.12060) + sorbet (= 0.5.12060) + sorbet-runtime (= 0.5.12060) spoom (1.6.1) erubi (>= 1.10.0) prism (>= 0.28.0) @@ -151,7 +151,7 @@ GEM strscan (>= 1.0.0) terminal-table (>= 2, < 5) uri (>= 0.12.0) - strscan (3.1.4) + strscan (3.1.5) syntax_tree (6.2.0) prettier_print (>= 1.2.0) tapioca (0.16.11) diff --git a/lib/openai/internal/stream.rb b/lib/openai/internal/stream.rb index ad1f7a1d..2d3b9ac3 100644 --- a/lib/openai/internal/stream.rb +++ b/lib/openai/internal/stream.rb @@ -23,17 +23,17 @@ class Stream next if consume case msg - in { data: String => data } if data.start_with?("[DONE]") + in {data: String => data} if data.start_with?("[DONE]") consume = true next - in { data: String => data } + in {data: String => data} case JSON.parse(data, symbolize_names: true) - in { error: error } + in {error: error} message = case error in String error - in { message: String => m } + in {message: String => m} m else "An error occurred during streaming" diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index b3bb579b..0d15ff3d 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -458,9 +458,9 @@ def request(req) decoded = OpenAI::Internal::Util.decode_content(response, stream: stream) case req - in { stream: Class => st } + in {stream: Class => st} st.new(model: model, url: url, status: status, response: response, stream: decoded) - in { page: Class => page } + in {page: Class => page} page.new(client: self, req: req, headers: response, page_data: decoded) else unwrapped = OpenAI::Internal::Util.dig(decoded, req[:unwrap]) From 4ce6070383358791d406f78f8ddf5fbd465e00d8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 15:30:25 +0000 Subject: [PATCH 178/295] docs: illustrate environmental defaults for auth variables --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b99f8d09..09cc7a5a 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ require "bundler/setup" require "openai" openai = OpenAI::Client.new( - api_key: "My API Key" # defaults to ENV["OPENAI_API_KEY"] + api_key: ENV["OPENAI_API_KEY"] # This is the default and can be omitted ) chat_completion = openai.chat.completions.create( From 43d607b0fbe6cf812dce6b05d321ab6b63ff3fc2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 22:18:45 +0000 Subject: [PATCH 179/295] chore(internal): codegen related update --- lib/openai/internal/util.rb | 2 +- rbi/openai/internal/util.rbi | 2 +- test/openai/internal/util_test.rb | 16 ++++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index e6424124..74239da5 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -470,7 +470,7 @@ def writable_enum(&blk) # @type [Regexp] JSON_CONTENT = %r{^application/(?:vnd(?:\.[^.]+)*\+)?json(?!l)} # @type [Regexp] - JSONL_CONTENT = %r{^application/(?:x-)?jsonl} + JSONL_CONTENT = %r{^application/(:?x-(?:n|l)djson)|(:?(?:x-)?jsonl)} class << self # @api private diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index c86a9895..594fae53 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -187,7 +187,7 @@ module OpenAI end JSON_CONTENT = T.let(%r{^application/(?:vnd(?:\.[^.]+)*\+)?json(?!l)}, Regexp) - JSONL_CONTENT = T.let(%r{^application/(?:x-)?jsonl}, Regexp) + JSONL_CONTENT = T.let(%r{^application/(:?x-(?:n|l)djson)|(:?(?:x-)?jsonl)}, Regexp) class << self # @api private diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index 565fe013..76f2f9f5 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -172,6 +172,22 @@ def test_json_content end end end + + def test_jsonl_content + cases = { + "application/x-ndjson" => true, + "application/x-ldjson" => true, + "application/jsonl" => true, + "application/x-jsonl" => true, + "application/json" => false, + "application/vnd.api+json" => false + } + cases.each do |header, verdict| + assert_pattern do + OpenAI::Internal::Util::JSONL_CONTENT.match?(header) => ^verdict + end + end + end end class OpenAI::Test::UtilFormDataEncodingTest < Minitest::Test From e1a0e0f8f0a06376427b742049fd02d8cded2f6f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 6 May 2025 13:58:31 +0000 Subject: [PATCH 180/295] chore(internal): codegen related update --- Gemfile.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 868b76e9..065e1cd1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -121,13 +121,13 @@ GEM prism (~> 1.4) ruby-progressbar (1.13.0) securerandom (0.4.1) - sorbet (0.5.12060) - sorbet-static (= 0.5.12060) - sorbet-runtime (0.5.12060) - sorbet-static (0.5.12060-x86_64-linux) - sorbet-static-and-runtime (0.5.12060) - sorbet (= 0.5.12060) - sorbet-runtime (= 0.5.12060) + sorbet (0.5.12067) + sorbet-static (= 0.5.12067) + sorbet-runtime (0.5.12067) + sorbet-static (0.5.12067-x86_64-linux) + sorbet-static-and-runtime (0.5.12067) + sorbet (= 0.5.12067) + sorbet-runtime (= 0.5.12067) spoom (1.6.1) erubi (>= 1.10.0) prism (>= 0.28.0) From b7ee198613b53a38e06dbfbf5ecb20f9806807d8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 6 May 2025 15:20:02 +0000 Subject: [PATCH 181/295] fix(internal): fix formatting script for macos --- Rakefile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Rakefile b/Rakefile index 661da932..e8317577 100644 --- a/Rakefile +++ b/Rakefile @@ -58,12 +58,13 @@ end desc("Format `*.rbs`") multitask(:"format:syntax_tree") do find = %w[find ./sig -type f -name *.rbs -print0] - inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? %w[-i''] : %w[-i] + inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? ["-i", ""] : %w[-i] uuid = SecureRandom.uuid # `syntax_tree` has trouble with `rbs`'s class & module aliases - sed = xargs + %w[sed -E] + inplace + %w[-e] + sed_bin = /darwin/ =~ RUBY_PLATFORM ? "/usr/bin/sed" : "sed" + sed = xargs + [sed_bin, "-E", *inplace, "-e"] # annotate unprocessable aliases with a unique comment pre = sed + ["s/(class|module) ([^ ]+) = (.+$)/# \\1 #{uuid}\\n\\2: \\3/", "--"] fmt = xargs + %w[stree write --plugin=rbs --] From 25713b598296b4c6861455fa1ae3417bf7e448ab Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 6 May 2025 16:38:34 +0000 Subject: [PATCH 182/295] chore: revert ignoring Gemfile.lock --- .gitignore | 1 - Gemfile.lock | 2 +- Rakefile | 2 +- scripts/bootstrap | 1 - 4 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index edaa164e..3d26ceed 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,4 @@ bin/tapioca Brewfile.lock.json doc/ -Gemfile.lock sorbet/tapioca/* diff --git a/Gemfile.lock b/Gemfile.lock index 065e1cd1..15c4f091 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,6 +1,6 @@ GIT remote: https://github.com/stainless-api/syntax_tree-rbs.git - revision: c30b50219918be7cfe3ef803a00b59d1e77fcada + revision: 140eb3ba2ff4b959b345ac2a7927cd758a9f1284 branch: main specs: syntax_tree-rbs (1.0.0) diff --git a/Rakefile b/Rakefile index e8317577..db2c83ed 100644 --- a/Rakefile +++ b/Rakefile @@ -11,7 +11,7 @@ require "rubocop/rake_task" tapioca = "sorbet/tapioca" ignore_file = ".ignore" -CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/ Gemfile.lock], *FileList["*.gem"], ignore_file) +CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) CLOBBER.push(*%w[sorbet/rbi/annotations/ sorbet/rbi/gems/], tapioca) diff --git a/scripts/bootstrap b/scripts/bootstrap index 9bf05537..cc31aa85 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -13,5 +13,4 @@ fi echo "==> Installing Ruby dependencies…" -rm -fr -v -- Gemfile.lock exec -- bundle install "$@" From b96e3eca0bb4f3740f2f057abab92b05a0615814 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 7 May 2025 14:10:17 +0000 Subject: [PATCH 183/295] fix(internal): ensure formatting always uses c.utf-8 locale --- .rubocop.yml | 10 + Gemfile.lock | 2 +- README.md | 10 +- Rakefile | 27 +- lib/openai.rb | 1 + lib/openai/file_part.rb | 4 +- lib/openai/internal/type/enum.rb | 8 +- .../internal/type/request_parameters.rb | 8 +- lib/openai/internal/type/union.rb | 8 +- lib/openai/internal/util.rb | 2 +- lib/openai/models/all_models.rb | 6 +- .../models/audio/speech_create_params.rb | 49 +- lib/openai/models/audio/transcription.rb | 9 +- .../audio/transcription_create_params.rb | 38 +- .../audio/transcription_create_response.rb | 6 +- .../models/audio/transcription_segment.rb | 5 +- .../audio/transcription_stream_event.rb | 6 +- .../audio/transcription_text_delta_event.rb | 21 +- .../audio/transcription_text_done_event.rb | 21 +- .../models/audio/transcription_verbose.rb | 12 +- .../models/audio/translation_create_params.rb | 21 +- .../audio/translation_create_response.rb | 6 +- .../models/audio/translation_verbose.rb | 6 +- lib/openai/models/batch.rb | 34 +- lib/openai/models/batch_create_params.rb | 18 +- lib/openai/models/batch_list_params.rb | 2 - lib/openai/models/beta/assistant.rb | 60 +- .../models/beta/assistant_create_params.rb | 124 +- .../models/beta/assistant_list_params.rb | 10 +- .../beta/assistant_response_format_option.rb | 8 +- .../models/beta/assistant_stream_event.rb | 290 +++-- lib/openai/models/beta/assistant_tool.rb | 8 +- .../models/beta/assistant_tool_choice.rb | 14 +- .../beta/assistant_tool_choice_option.rb | 6 +- .../models/beta/assistant_update_params.rb | 143 ++- lib/openai/models/beta/file_search_tool.rb | 33 +- lib/openai/models/beta/function_tool.rb | 6 +- .../models/beta/message_stream_event.rb | 65 +- .../models/beta/run_step_stream_event.rb | 83 +- lib/openai/models/beta/run_stream_event.rb | 114 +- lib/openai/models/beta/thread.rb | 37 +- .../beta/thread_create_and_run_params.rb | 255 ++-- .../models/beta/thread_create_params.rb | 144 +-- lib/openai/models/beta/thread_stream_event.rb | 9 +- .../models/beta/thread_update_params.rb | 35 +- lib/openai/models/beta/threads/annotation.rb | 6 +- .../models/beta/threads/annotation_delta.rb | 6 +- .../beta/threads/file_citation_annotation.rb | 8 +- .../threads/file_citation_delta_annotation.rb | 8 +- .../beta/threads/file_path_annotation.rb | 8 +- .../threads/file_path_delta_annotation.rb | 8 +- lib/openai/models/beta/threads/image_file.rb | 12 +- .../beta/threads/image_file_content_block.rb | 6 +- .../models/beta/threads/image_file_delta.rb | 12 +- .../beta/threads/image_file_delta_block.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 12 +- .../beta/threads/image_url_content_block.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 12 +- .../beta/threads/image_url_delta_block.rb | 6 +- lib/openai/models/beta/threads/message.rb | 70 +- .../models/beta/threads/message_content.rb | 10 +- .../beta/threads/message_content_delta.rb | 10 +- .../threads/message_content_part_param.rb | 8 +- .../beta/threads/message_create_params.rb | 44 +- .../models/beta/threads/message_delta.rb | 14 +- .../beta/threads/message_delta_event.rb | 6 +- .../beta/threads/message_list_params.rb | 12 +- .../beta/threads/message_update_params.rb | 2 +- .../required_action_function_tool_call.rb | 13 +- lib/openai/models/beta/threads/run.rb | 131 +-- .../models/beta/threads/run_create_params.rb | 139 ++- .../models/beta/threads/run_list_params.rb | 10 +- .../threads/run_submit_tool_outputs_params.rb | 11 +- .../models/beta/threads/run_update_params.rb | 2 +- .../runs/code_interpreter_output_image.rb | 13 +- .../runs/code_interpreter_tool_call.rb | 49 +- .../runs/code_interpreter_tool_call_delta.rb | 31 +- .../threads/runs/file_search_tool_call.rb | 65 +- .../runs/file_search_tool_call_delta.rb | 3 +- .../beta/threads/runs/function_tool_call.rb | 15 +- .../threads/runs/function_tool_call_delta.rb | 15 +- .../runs/message_creation_step_details.rb | 8 +- .../models/beta/threads/runs/run_step.rb | 65 +- .../beta/threads/runs/run_step_delta.rb | 14 +- .../beta/threads/runs/run_step_delta_event.rb | 6 +- .../runs/run_step_delta_message_delta.rb | 10 +- .../beta/threads/runs/step_list_params.rb | 17 +- .../beta/threads/runs/step_retrieve_params.rb | 7 +- .../models/beta/threads/runs/tool_call.rb | 8 +- .../beta/threads/runs/tool_call_delta.rb | 8 +- .../threads/runs/tool_call_delta_object.rb | 9 +- .../threads/runs/tool_calls_step_details.rb | 11 +- lib/openai/models/beta/threads/text.rb | 8 +- .../models/beta/threads/text_content_block.rb | 6 +- lib/openai/models/beta/threads/text_delta.rb | 6 +- .../models/beta/threads/text_delta_block.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 62 +- ...chat_completion_assistant_message_param.rb | 63 +- .../models/chat/chat_completion_audio.rb | 6 +- .../chat/chat_completion_audio_param.rb | 40 +- .../models/chat/chat_completion_chunk.rb | 121 +- .../chat/chat_completion_content_part.rb | 26 +- .../chat_completion_content_part_image.rb | 20 +- ...hat_completion_content_part_input_audio.rb | 18 +- ...chat_completion_developer_message_param.rb | 19 +- .../models/chat/chat_completion_message.rb | 42 +- .../chat/chat_completion_message_param.rb | 14 +- .../chat/chat_completion_message_tool_call.rb | 12 +- .../chat/chat_completion_named_tool_choice.rb | 8 +- .../chat_completion_prediction_content.rb | 20 +- .../chat/chat_completion_stream_options.rb | 4 +- .../chat_completion_system_message_param.rb | 19 +- .../chat/chat_completion_token_logprob.rb | 15 +- .../models/chat/chat_completion_tool.rb | 6 +- .../chat_completion_tool_choice_option.rb | 6 +- .../chat_completion_tool_message_param.rb | 16 +- .../chat_completion_user_message_param.rb | 17 +- .../models/chat/completion_create_params.rb | 183 ++- .../models/chat/completion_list_params.rb | 9 +- .../models/chat/completion_update_params.rb | 2 +- .../chat/completions/message_list_params.rb | 7 +- lib/openai/models/comparison_filter.rb | 17 +- lib/openai/models/completion.rb | 17 +- lib/openai/models/completion_choice.rb | 21 +- lib/openai/models/completion_create_params.rb | 51 +- lib/openai/models/completion_usage.rb | 22 +- lib/openai/models/compound_filter.rb | 23 +- .../models/create_embedding_response.rb | 14 +- lib/openai/models/embedding.rb | 5 +- lib/openai/models/embedding_create_params.rb | 29 +- lib/openai/models/eval_create_params.rb | 151 +-- lib/openai/models/eval_create_response.rb | 40 +- .../models/eval_custom_data_source_config.rb | 4 +- lib/openai/models/eval_label_model_grader.rb | 45 +- lib/openai/models/eval_list_params.rb | 13 +- lib/openai/models/eval_list_response.rb | 40 +- lib/openai/models/eval_retrieve_response.rb | 40 +- ...l_stored_completions_data_source_config.rb | 6 +- lib/openai/models/eval_string_check_grader.rb | 8 +- .../models/eval_text_similarity_grader.rb | 11 +- lib/openai/models/eval_update_params.rb | 2 +- lib/openai/models/eval_update_response.rb | 40 +- ...create_eval_completions_run_data_source.rb | 132 ++- .../create_eval_jsonl_run_data_source.rb | 22 +- .../models/evals/run_cancel_response.rb | 51 +- lib/openai/models/evals/run_create_params.rb | 175 +-- .../models/evals/run_create_response.rb | 51 +- lib/openai/models/evals/run_list_params.rb | 14 +- lib/openai/models/evals/run_list_response.rb | 51 +- .../models/evals/run_retrieve_response.rb | 51 +- .../evals/runs/output_item_list_params.rb | 13 +- .../evals/runs/output_item_list_response.rb | 6 +- .../runs/output_item_retrieve_response.rb | 6 +- lib/openai/models/file_chunking_strategy.rb | 6 +- .../models/file_chunking_strategy_param.rb | 6 +- lib/openai/models/file_create_params.rb | 9 +- lib/openai/models/file_list_params.rb | 9 +- lib/openai/models/file_object.rb | 23 +- .../checkpoints/permission_retrieve_params.rb | 6 +- .../models/fine_tuning/fine_tuning_job.rb | 173 ++- .../fine_tuning/fine_tuning_job_event.rb | 16 +- .../fine_tuning_job_wandb_integration.rb | 7 +- ...ine_tuning_job_wandb_integration_object.rb | 10 +- .../models/fine_tuning/job_create_params.rb | 179 ++- .../models/fine_tuning/job_list_params.rb | 1 - .../jobs/fine_tuning_job_checkpoint.rb | 8 +- lib/openai/models/function_definition.rb | 6 +- lib/openai/models/image.rb | 6 +- .../models/image_create_variation_params.rb | 29 +- lib/openai/models/image_edit_params.rb | 47 +- lib/openai/models/image_generate_params.rb | 62 +- lib/openai/models/images_response.rb | 27 +- lib/openai/models/moderation.rb | 145 +-- lib/openai/models/moderation_create_params.rb | 25 +- .../models/moderation_create_response.rb | 6 +- .../models/moderation_image_url_input.rb | 8 +- .../models/moderation_multi_modal_input.rb | 6 +- lib/openai/models/reasoning.rb | 26 +- .../models/response_format_json_schema.rb | 20 +- lib/openai/models/responses/computer_tool.rb | 8 +- .../models/responses/easy_input_message.rb | 30 +- .../models/responses/file_search_tool.rb | 36 +- lib/openai/models/responses/function_tool.rb | 3 +- .../responses/input_item_list_params.rb | 19 +- lib/openai/models/responses/response.rb | 118 +- .../responses/response_audio_delta_event.rb | 6 +- .../responses/response_audio_done_event.rb | 4 +- .../response_audio_transcript_delta_event.rb | 6 +- .../response_audio_transcript_done_event.rb | 4 +- ..._code_interpreter_call_code_delta_event.rb | 9 +- ...e_code_interpreter_call_code_done_event.rb | 9 +- ...e_code_interpreter_call_completed_event.rb | 13 +- ...code_interpreter_call_in_progress_event.rb | 12 +- ...ode_interpreter_call_interpreting_event.rb | 14 +- .../response_code_interpreter_tool_call.rb | 60 +- .../responses/response_completed_event.rb | 10 +- .../responses/response_computer_tool_call.rb | 152 ++- ...response_computer_tool_call_output_item.rb | 31 +- ...se_computer_tool_call_output_screenshot.rb | 5 +- .../models/responses/response_content.rb | 12 +- .../response_content_part_added_event.rb | 24 +- .../response_content_part_done_event.rb | 24 +- .../responses/response_create_params.rb | 83 +- .../responses/response_created_event.rb | 10 +- lib/openai/models/responses/response_error.rb | 12 +- .../models/responses/response_error_event.rb | 10 +- .../models/responses/response_failed_event.rb | 10 +- ...sponse_file_search_call_completed_event.rb | 9 +- ...onse_file_search_call_in_progress_event.rb | 9 +- ...sponse_file_search_call_searching_event.rb | 9 +- .../response_file_search_tool_call.rb | 41 +- .../responses/response_format_text_config.rb | 8 +- ...response_format_text_json_schema_config.rb | 11 +- ...nse_function_call_arguments_delta_event.rb | 9 +- .../responses/response_function_tool_call.rb | 20 +- .../response_function_tool_call_item.rb | 4 +- ...response_function_tool_call_output_item.rb | 19 +- .../responses/response_function_web_search.rb | 14 +- .../responses/response_in_progress_event.rb | 10 +- .../responses/response_incomplete_event.rb | 10 +- lib/openai/models/responses/response_input.rb | 3 +- .../models/responses/response_input_audio.rb | 14 +- .../responses/response_input_content.rb | 8 +- .../models/responses/response_input_file.rb | 4 +- .../models/responses/response_input_image.rb | 12 +- .../models/responses/response_input_item.rb | 106 +- .../response_input_message_content_list.rb | 2 +- .../responses/response_input_message_item.rb | 36 +- lib/openai/models/responses/response_item.rb | 18 +- .../models/responses/response_item_list.rb | 6 +- .../models/responses/response_output_audio.rb | 8 +- .../models/responses/response_output_item.rb | 14 +- .../response_output_item_added_event.rb | 12 +- .../response_output_item_done_event.rb | 12 +- .../responses/response_output_message.rb | 30 +- .../models/responses/response_output_text.rb | 25 +- .../responses/response_reasoning_item.rb | 28 +- ...onse_reasoning_summary_part_added_event.rb | 19 +- ...ponse_reasoning_summary_part_done_event.rb | 19 +- ...onse_reasoning_summary_text_delta_event.rb | 13 +- ...ponse_reasoning_summary_text_done_event.rb | 13 +- .../responses/response_refusal_delta_event.rb | 12 +- .../responses/response_refusal_done_event.rb | 12 +- .../responses/response_retrieve_params.rb | 7 +- .../models/responses/response_stream_event.rb | 77 +- .../response_text_annotation_delta_event.rb | 42 +- .../models/responses/response_text_config.rb | 8 +- .../responses/response_text_delta_event.rb | 12 +- .../responses/response_text_done_event.rb | 12 +- lib/openai/models/responses/response_usage.rb | 20 +- ...esponse_web_search_call_completed_event.rb | 9 +- ...ponse_web_search_call_in_progress_event.rb | 9 +- ...esponse_web_search_call_searching_event.rb | 9 +- lib/openai/models/responses/tool.rb | 10 +- .../models/responses/tool_choice_types.rb | 10 +- .../models/responses/web_search_tool.rb | 32 +- lib/openai/models/responses_model.rb | 6 +- .../models/static_file_chunking_strategy.rb | 4 +- .../static_file_chunking_strategy_object.rb | 6 +- ...tic_file_chunking_strategy_object_param.rb | 6 +- lib/openai/models/upload.rb | 19 +- lib/openai/models/upload_complete_params.rb | 3 +- lib/openai/models/upload_create_params.rb | 12 +- .../models/uploads/part_create_params.rb | 2 +- lib/openai/models/vector_store.rb | 34 +- .../models/vector_store_create_params.rb | 19 +- lib/openai/models/vector_store_list_params.rb | 10 +- .../models/vector_store_search_params.rb | 31 +- .../models/vector_store_search_response.rb | 2 +- .../models/vector_store_update_params.rb | 11 +- .../vector_stores/file_batch_create_params.rb | 14 +- .../file_batch_list_files_params.rb | 17 +- .../vector_stores/file_create_params.rb | 14 +- .../models/vector_stores/file_list_params.rb | 17 +- .../vector_stores/file_update_params.rb | 6 +- .../models/vector_stores/vector_store_file.rb | 42 +- .../vector_stores/vector_store_file_batch.rb | 21 +- lib/openai/resources/audio/speech.rb | 13 +- lib/openai/resources/audio/transcriptions.rb | 40 +- lib/openai/resources/audio/translations.rb | 13 +- lib/openai/resources/batches.rb | 38 +- lib/openai/resources/beta/assistants.rb | 78 +- lib/openai/resources/beta/threads.rb | 101 +- lib/openai/resources/beta/threads/messages.rb | 50 +- lib/openai/resources/beta/threads/runs.rb | 123 +- .../resources/beta/threads/runs/steps.rb | 24 +- lib/openai/resources/chat/completions.rb | 153 ++- .../resources/chat/completions/messages.rb | 9 +- lib/openai/resources/completions.rb | 62 +- lib/openai/resources/embeddings.rb | 15 +- lib/openai/resources/evals.rb | 19 +- lib/openai/resources/evals/runs.rb | 20 +- .../resources/evals/runs/output_items.rb | 11 +- lib/openai/resources/files.rb | 30 +- .../fine_tuning/checkpoints/permissions.rb | 16 +- lib/openai/resources/fine_tuning/jobs.rb | 49 +- .../resources/fine_tuning/jobs/checkpoints.rb | 8 +- lib/openai/resources/images.rb | 74 +- lib/openai/resources/models.rb | 12 +- lib/openai/resources/moderations.rb | 7 +- lib/openai/resources/responses.rb | 86 +- lib/openai/resources/responses/input_items.rb | 16 +- lib/openai/resources/uploads.rb | 37 +- lib/openai/resources/uploads/parts.rb | 10 +- lib/openai/resources/vector_stores.rb | 51 +- .../resources/vector_stores/file_batches.rb | 41 +- lib/openai/resources/vector_stores/files.rb | 50 +- rbi/openai/client.rbi | 9 +- rbi/openai/errors.rbi | 51 +- rbi/openai/file_part.rbi | 15 +- rbi/openai/internal/cursor_page.rbi | 3 +- rbi/openai/internal/page.rbi | 3 +- rbi/openai/internal/stream.rbi | 6 +- rbi/openai/internal/transport/base_client.rbi | 234 ++-- .../transport/pooled_net_requester.rbi | 38 +- rbi/openai/internal/type/array_of.rbi | 58 +- rbi/openai/internal/type/base_model.rbi | 189 +-- rbi/openai/internal/type/base_page.rbi | 21 +- rbi/openai/internal/type/base_stream.rbi | 33 +- rbi/openai/internal/type/boolean.rbi | 30 +- rbi/openai/internal/type/converter.rbi | 92 +- rbi/openai/internal/type/enum.rbi | 42 +- rbi/openai/internal/type/file_input.rbi | 22 +- rbi/openai/internal/type/hash_of.rbi | 58 +- .../internal/type/request_parameters.rbi | 16 +- rbi/openai/internal/type/union.rbi | 78 +- rbi/openai/internal/type/unknown.rbi | 27 +- rbi/openai/internal/util.rbi | 301 +++-- rbi/openai/models.rbi | 15 +- rbi/openai/models/all_models.rbi | 47 +- .../models/audio/speech_create_params.rbi | 212 +++- rbi/openai/models/audio/speech_model.rbi | 17 +- rbi/openai/models/audio/transcription.rbi | 56 +- .../audio/transcription_create_params.rbi | 131 ++- .../audio/transcription_create_response.rbi | 19 +- .../models/audio/transcription_include.rbi | 15 +- .../models/audio/transcription_segment.rbi | 41 +- .../audio/transcription_stream_event.rbi | 18 +- .../audio/transcription_text_delta_event.rbi | 69 +- .../audio/transcription_text_done_event.rbi | 69 +- .../models/audio/transcription_verbose.rbi | 50 +- .../models/audio/transcription_word.rbi | 17 +- rbi/openai/models/audio/translation.rbi | 10 +- .../audio/translation_create_params.rbi | 117 +- .../audio/translation_create_response.rbi | 14 +- .../models/audio/translation_verbose.rbi | 35 +- rbi/openai/models/audio_model.rbi | 15 +- rbi/openai/models/audio_response_format.rbi | 20 +- .../auto_file_chunking_strategy_param.rbi | 11 +- rbi/openai/models/batch.rbi | 127 +- rbi/openai/models/batch_cancel_params.rbi | 19 +- rbi/openai/models/batch_create_params.rbi | 98 +- rbi/openai/models/batch_error.rbi | 26 +- rbi/openai/models/batch_list_params.rbi | 24 +- rbi/openai/models/batch_request_counts.rbi | 21 +- rbi/openai/models/batch_retrieve_params.rbi | 19 +- rbi/openai/models/beta/assistant.rbi | 204 ++-- .../models/beta/assistant_create_params.rbi | 462 +++++--- .../models/beta/assistant_delete_params.rbi | 19 +- rbi/openai/models/beta/assistant_deleted.rbi | 18 +- .../models/beta/assistant_list_params.rbi | 63 +- .../beta/assistant_response_format_option.rbi | 20 +- .../models/beta/assistant_retrieve_params.rbi | 19 +- .../models/beta/assistant_stream_event.rbi | 668 ++++++++--- rbi/openai/models/beta/assistant_tool.rbi | 17 +- .../models/beta/assistant_tool_choice.rbi | 70 +- .../beta/assistant_tool_choice_function.rbi | 11 +- .../beta/assistant_tool_choice_option.rbi | 52 +- .../models/beta/assistant_update_params.rbi | 489 +++++--- .../models/beta/code_interpreter_tool.rbi | 11 +- rbi/openai/models/beta/file_search_tool.rbi | 140 ++- rbi/openai/models/beta/function_tool.rbi | 25 +- .../models/beta/message_stream_event.rbi | 150 ++- .../models/beta/run_step_stream_event.rbi | 213 +++- rbi/openai/models/beta/run_stream_event.rbi | 284 +++-- rbi/openai/models/beta/thread.rbi | 126 +- .../beta/thread_create_and_run_params.rbi | 1033 ++++++++++------- .../models/beta/thread_create_params.rbi | 610 ++++++---- .../models/beta/thread_delete_params.rbi | 19 +- rbi/openai/models/beta/thread_deleted.rbi | 18 +- .../models/beta/thread_retrieve_params.rbi | 19 +- .../models/beta/thread_stream_event.rbi | 24 +- .../models/beta/thread_update_params.rbi | 136 ++- rbi/openai/models/beta/threads/annotation.rbi | 18 +- .../models/beta/threads/annotation_delta.rbi | 18 +- .../beta/threads/file_citation_annotation.rbi | 58 +- .../file_citation_delta_annotation.rbi | 68 +- .../beta/threads/file_path_annotation.rbi | 55 +- .../threads/file_path_delta_annotation.rbi | 64 +- rbi/openai/models/beta/threads/image_file.rbi | 69 +- .../beta/threads/image_file_content_block.rbi | 27 +- .../models/beta/threads/image_file_delta.rbi | 73 +- .../beta/threads/image_file_delta_block.rbi | 31 +- rbi/openai/models/beta/threads/image_url.rbi | 66 +- .../beta/threads/image_url_content_block.rbi | 28 +- .../models/beta/threads/image_url_delta.rbi | 72 +- .../beta/threads/image_url_delta_block.rbi | 31 +- rbi/openai/models/beta/threads/message.rbi | 350 ++++-- .../models/beta/threads/message_content.rbi | 20 +- .../beta/threads/message_content_delta.rbi | 20 +- .../threads/message_content_part_param.rbi | 19 +- .../beta/threads/message_create_params.rbi | 261 +++-- .../beta/threads/message_delete_params.rbi | 23 +- .../models/beta/threads/message_deleted.rbi | 21 +- .../models/beta/threads/message_delta.rbi | 124 +- .../beta/threads/message_delta_event.rbi | 31 +- .../beta/threads/message_list_params.rbi | 81 +- .../beta/threads/message_retrieve_params.rbi | 23 +- .../beta/threads/message_update_params.rbi | 30 +- .../beta/threads/refusal_content_block.rbi | 16 +- .../beta/threads/refusal_delta_block.rbi | 20 +- .../required_action_function_tool_call.rbi | 56 +- rbi/openai/models/beta/threads/run.rbi | 518 ++++++--- .../models/beta/threads/run_cancel_params.rbi | 23 +- .../models/beta/threads/run_create_params.rbi | 562 +++++---- .../models/beta/threads/run_list_params.rbi | 76 +- .../beta/threads/run_retrieve_params.rbi | 23 +- rbi/openai/models/beta/threads/run_status.rbi | 42 +- .../run_submit_tool_outputs_params.rbi | 64 +- .../models/beta/threads/run_update_params.rbi | 30 +- .../threads/runs/code_interpreter_logs.rbi | 20 +- .../runs/code_interpreter_output_image.rbi | 54 +- .../runs/code_interpreter_tool_call.rbi | 197 ++-- .../runs/code_interpreter_tool_call_delta.rbi | 140 ++- .../threads/runs/file_search_tool_call.rbi | 304 +++-- .../runs/file_search_tool_call_delta.rbi | 21 +- .../beta/threads/runs/function_tool_call.rbi | 60 +- .../threads/runs/function_tool_call_delta.rbi | 70 +- .../runs/message_creation_step_details.rbi | 60 +- .../models/beta/threads/runs/run_step.rbi | 297 +++-- .../beta/threads/runs/run_step_delta.rbi | 76 +- .../threads/runs/run_step_delta_event.rbi | 33 +- .../runs/run_step_delta_message_delta.rbi | 62 +- .../beta/threads/runs/run_step_include.rbi | 18 +- .../beta/threads/runs/step_list_params.rbi | 115 +- .../threads/runs/step_retrieve_params.rbi | 59 +- .../models/beta/threads/runs/tool_call.rbi | 19 +- .../beta/threads/runs/tool_call_delta.rbi | 19 +- .../threads/runs/tool_call_delta_object.rbi | 74 +- .../threads/runs/tool_calls_step_details.rbi | 55 +- rbi/openai/models/beta/threads/text.rbi | 49 +- .../beta/threads/text_content_block.rbi | 26 +- .../beta/threads/text_content_block_param.rbi | 12 +- rbi/openai/models/beta/threads/text_delta.rbi | 66 +- .../models/beta/threads/text_delta_block.rbi | 29 +- rbi/openai/models/chat/chat_completion.rbi | 258 ++-- ...hat_completion_assistant_message_param.rbi | 226 ++-- .../models/chat/chat_completion_audio.rbi | 27 +- .../chat/chat_completion_audio_param.rbi | 190 ++- .../models/chat/chat_completion_chunk.rbi | 544 ++++++--- .../chat/chat_completion_content_part.rbi | 75 +- .../chat_completion_content_part_image.rbi | 107 +- ...at_completion_content_part_input_audio.rbi | 104 +- .../chat_completion_content_part_refusal.rbi | 11 +- .../chat_completion_content_part_text.rbi | 11 +- .../models/chat/chat_completion_deleted.rbi | 19 +- ...hat_completion_developer_message_param.rbi | 72 +- .../chat_completion_function_call_option.rbi | 11 +- ...chat_completion_function_message_param.rbi | 26 +- .../models/chat/chat_completion_message.rbi | 165 ++- .../chat/chat_completion_message_param.rbi | 22 +- .../chat_completion_message_tool_call.rbi | 50 +- .../models/chat/chat_completion_modality.rbi | 17 +- .../chat_completion_named_tool_choice.rbi | 44 +- .../chat_completion_prediction_content.rbi | 64 +- .../models/chat/chat_completion_role.rbi | 27 +- .../chat/chat_completion_store_message.rbi | 11 +- .../chat/chat_completion_stream_options.rbi | 11 +- .../chat_completion_system_message_param.rbi | 69 +- .../chat/chat_completion_token_logprob.rbi | 61 +- .../models/chat/chat_completion_tool.rbi | 25 +- .../chat_completion_tool_choice_option.rbi | 52 +- .../chat_completion_tool_message_param.rbi | 69 +- .../chat_completion_user_message_param.rbi | 107 +- .../models/chat/completion_create_params.rbi | 705 +++++++---- .../models/chat/completion_delete_params.rbi | 19 +- .../models/chat/completion_list_params.rbi | 72 +- .../chat/completion_retrieve_params.rbi | 19 +- .../models/chat/completion_update_params.rbi | 14 +- .../chat/completions/message_list_params.rbi | 81 +- rbi/openai/models/chat_model.rbi | 154 ++- rbi/openai/models/comparison_filter.rbi | 60 +- rbi/openai/models/completion.rbi | 45 +- rbi/openai/models/completion_choice.rbi | 99 +- .../models/completion_create_params.rbi | 186 ++- rbi/openai/models/completion_usage.rbi | 104 +- rbi/openai/models/compound_filter.rbi | 57 +- .../models/create_embedding_response.rbi | 58 +- rbi/openai/models/embedding.rbi | 23 +- rbi/openai/models/embedding_create_params.rbi | 156 ++- rbi/openai/models/embedding_model.rbi | 16 +- rbi/openai/models/error_object.rbi | 25 +- rbi/openai/models/eval_create_params.rbi | 678 +++++++---- rbi/openai/models/eval_create_response.rbi | 351 ++++-- .../models/eval_custom_data_source_config.rbi | 19 +- rbi/openai/models/eval_delete_params.rbi | 19 +- rbi/openai/models/eval_delete_response.rbi | 20 +- rbi/openai/models/eval_label_model_grader.rbi | 190 ++- rbi/openai/models/eval_list_params.rbi | 79 +- rbi/openai/models/eval_list_response.rbi | 361 ++++-- rbi/openai/models/eval_retrieve_params.rbi | 19 +- rbi/openai/models/eval_retrieve_response.rbi | 350 ++++-- ..._stored_completions_data_source_config.rbi | 25 +- .../models/eval_string_check_grader.rbi | 59 +- .../models/eval_text_similarity_grader.rbi | 116 +- rbi/openai/models/eval_update_params.rbi | 29 +- rbi/openai/models/eval_update_response.rbi | 351 ++++-- ...reate_eval_completions_run_data_source.rbi | 556 +++++---- .../create_eval_jsonl_run_data_source.rbi | 139 ++- rbi/openai/models/evals/eval_api_error.rbi | 11 +- rbi/openai/models/evals/run_cancel_params.rbi | 20 +- .../models/evals/run_cancel_response.rbi | 743 +++++++----- rbi/openai/models/evals/run_create_params.rbi | 665 +++++++---- .../models/evals/run_create_response.rbi | 743 +++++++----- rbi/openai/models/evals/run_delete_params.rbi | 20 +- .../models/evals/run_delete_response.rbi | 22 +- rbi/openai/models/evals/run_list_params.rbi | 104 +- rbi/openai/models/evals/run_list_response.rbi | 741 +++++++----- .../models/evals/run_retrieve_params.rbi | 20 +- .../models/evals/run_retrieve_response.rbi | 747 +++++++----- .../evals/runs/output_item_list_params.rbi | 128 +- .../evals/runs/output_item_list_response.rbi | 214 ++-- .../runs/output_item_retrieve_params.rbi | 24 +- .../runs/output_item_retrieve_response.rbi | 216 ++-- rbi/openai/models/file_chunking_strategy.rbi | 14 +- .../models/file_chunking_strategy_param.rbi | 16 +- rbi/openai/models/file_content_params.rbi | 19 +- rbi/openai/models/file_create_params.rbi | 33 +- rbi/openai/models/file_delete_params.rbi | 19 +- rbi/openai/models/file_deleted.rbi | 18 +- rbi/openai/models/file_list_params.rbi | 55 +- rbi/openai/models/file_object.rbi | 94 +- rbi/openai/models/file_purpose.rbi | 19 +- rbi/openai/models/file_retrieve_params.rbi | 19 +- .../checkpoints/permission_create_params.rbi | 24 +- .../permission_create_response.rbi | 21 +- .../checkpoints/permission_delete_params.rbi | 21 +- .../permission_delete_response.rbi | 22 +- .../permission_retrieve_params.rbi | 80 +- .../permission_retrieve_response.rbi | 79 +- .../models/fine_tuning/fine_tuning_job.rbi | 598 +++++++--- .../fine_tuning/fine_tuning_job_event.rbi | 120 +- .../fine_tuning_job_integration.rbi | 3 +- .../fine_tuning_job_wandb_integration.rbi | 32 +- ...ne_tuning_job_wandb_integration_object.rbi | 33 +- .../models/fine_tuning/job_cancel_params.rbi | 19 +- .../models/fine_tuning/job_create_params.rbi | 626 +++++++--- .../fine_tuning/job_list_events_params.rbi | 24 +- .../models/fine_tuning/job_list_params.rbi | 31 +- .../fine_tuning/job_retrieve_params.rbi | 19 +- .../jobs/checkpoint_list_params.rbi | 25 +- .../jobs/fine_tuning_job_checkpoint.rbi | 86 +- rbi/openai/models/function_definition.rbi | 29 +- rbi/openai/models/image.rbi | 21 +- .../models/image_create_variation_params.rbi | 135 ++- rbi/openai/models/image_edit_params.rbi | 205 ++-- rbi/openai/models/image_generate_params.rbi | 286 +++-- rbi/openai/models/image_model.rbi | 13 +- rbi/openai/models/images_response.rbi | 96 +- rbi/openai/models/metadata.rbi | 6 +- rbi/openai/models/model.rbi | 24 +- rbi/openai/models/model_delete_params.rbi | 19 +- rbi/openai/models/model_deleted.rbi | 18 +- rbi/openai/models/model_list_params.rbi | 19 +- rbi/openai/models/model_retrieve_params.rbi | 19 +- rbi/openai/models/moderation.rbi | 737 +++++++++--- .../models/moderation_create_params.rbi | 122 +- .../models/moderation_create_response.rbi | 22 +- .../models/moderation_image_url_input.rbi | 37 +- rbi/openai/models/moderation_model.rbi | 21 +- .../models/moderation_multi_modal_input.rbi | 12 +- rbi/openai/models/moderation_text_input.rbi | 11 +- .../other_file_chunking_strategy_object.rbi | 11 +- rbi/openai/models/reasoning.rbi | 77 +- rbi/openai/models/reasoning_effort.rbi | 13 +- .../models/response_format_json_object.rbi | 11 +- .../models/response_format_json_schema.rbi | 59 +- rbi/openai/models/response_format_text.rbi | 11 +- rbi/openai/models/responses/computer_tool.rbi | 80 +- .../models/responses/easy_input_message.rbi | 167 ++- .../models/responses/file_search_tool.rbi | 143 ++- rbi/openai/models/responses/function_tool.rbi | 31 +- .../responses/input_item_list_params.rbi | 92 +- rbi/openai/models/responses/response.rbi | 400 ++++--- .../responses/response_audio_delta_event.rbi | 11 +- .../responses/response_audio_done_event.rbi | 11 +- .../response_audio_transcript_delta_event.rbi | 11 +- .../response_audio_transcript_done_event.rbi | 11 +- ...code_interpreter_call_code_delta_event.rbi | 21 +- ..._code_interpreter_call_code_done_event.rbi | 21 +- ..._code_interpreter_call_completed_event.rbi | 39 +- ...ode_interpreter_call_in_progress_event.rbi | 39 +- ...de_interpreter_call_interpreting_event.rbi | 39 +- .../response_code_interpreter_tool_call.rbi | 193 ++- .../responses/response_completed_event.rbi | 25 +- .../responses/response_computer_tool_call.rbi | 477 +++++--- ...esponse_computer_tool_call_output_item.rbi | 148 ++- ...e_computer_tool_call_output_screenshot.rbi | 19 +- .../models/responses/response_content.rbi | 21 +- .../response_content_part_added_event.rbi | 68 +- .../response_content_part_done_event.rbi | 68 +- .../responses/response_create_params.rbi | 447 ++++--- .../responses/response_created_event.rbi | 25 +- .../responses/response_delete_params.rbi | 19 +- .../models/responses/response_error.rbi | 136 ++- .../models/responses/response_error_event.rbi | 26 +- .../responses/response_failed_event.rbi | 25 +- ...ponse_file_search_call_completed_event.rbi | 21 +- ...nse_file_search_call_in_progress_event.rbi | 21 +- ...ponse_file_search_call_searching_event.rbi | 21 +- .../response_file_search_tool_call.rbi | 162 ++- .../responses/response_format_text_config.rbi | 19 +- ...esponse_format_text_json_schema_config.rbi | 31 +- ...se_function_call_arguments_delta_event.rbi | 20 +- ...nse_function_call_arguments_done_event.rbi | 20 +- .../responses/response_function_tool_call.rbi | 83 +- .../response_function_tool_call_item.rbi | 11 +- ...esponse_function_tool_call_output_item.rbi | 82 +- .../response_function_web_search.rbi | 74 +- .../responses/response_in_progress_event.rbi | 25 +- .../models/responses/response_includable.rbi | 29 +- .../responses/response_incomplete_event.rbi | 25 +- .../models/responses/response_input.rbi | 4 +- .../models/responses/response_input_audio.rbi | 55 +- .../responses/response_input_content.rbi | 19 +- .../models/responses/response_input_file.rbi | 26 +- .../models/responses/response_input_image.rbi | 67 +- .../models/responses/response_input_item.rbi | 500 +++++--- .../response_input_message_content_list.rbi | 4 +- .../responses/response_input_message_item.rbi | 192 ++- .../models/responses/response_input_text.rbi | 11 +- rbi/openai/models/responses/response_item.rbi | 22 +- .../models/responses/response_item_list.rbi | 90 +- .../responses/response_output_audio.rbi | 19 +- .../models/responses/response_output_item.rbi | 22 +- .../response_output_item_added_event.rbi | 70 +- .../response_output_item_done_event.rbi | 70 +- .../responses/response_output_message.rbi | 117 +- .../responses/response_output_refusal.rbi | 11 +- .../models/responses/response_output_text.rbi | 140 ++- .../responses/response_reasoning_item.rbi | 103 +- ...nse_reasoning_summary_part_added_event.rbi | 59 +- ...onse_reasoning_summary_part_done_event.rbi | 59 +- ...nse_reasoning_summary_text_delta_event.rbi | 35 +- ...onse_reasoning_summary_text_done_event.rbi | 35 +- .../response_refusal_delta_event.rbi | 35 +- .../responses/response_refusal_done_event.rbi | 29 +- .../responses/response_retrieve_params.rbi | 42 +- .../models/responses/response_status.rbi | 23 +- .../responses/response_stream_event.rbi | 52 +- .../response_text_annotation_delta_event.rbi | 142 ++- .../models/responses/response_text_config.rbi | 63 +- .../responses/response_text_delta_event.rbi | 35 +- .../responses/response_text_done_event.rbi | 35 +- .../models/responses/response_usage.rbi | 79 +- ...sponse_web_search_call_completed_event.rbi | 21 +- ...onse_web_search_call_in_progress_event.rbi | 21 +- ...sponse_web_search_call_searching_event.rbi | 21 +- rbi/openai/models/responses/tool.rbi | 16 +- .../models/responses/tool_choice_function.rbi | 11 +- .../models/responses/tool_choice_options.rbi | 19 +- .../models/responses/tool_choice_types.rbi | 58 +- .../models/responses/web_search_tool.rbi | 153 ++- rbi/openai/models/responses_model.rbi | 49 +- .../models/static_file_chunking_strategy.rbi | 22 +- .../static_file_chunking_strategy_object.rbi | 25 +- ...ic_file_chunking_strategy_object_param.rbi | 25 +- rbi/openai/models/upload.rbi | 64 +- rbi/openai/models/upload_cancel_params.rbi | 19 +- rbi/openai/models/upload_complete_params.rbi | 22 +- rbi/openai/models/upload_create_params.rbi | 37 +- .../models/uploads/part_create_params.rbi | 25 +- rbi/openai/models/uploads/upload_part.rbi | 27 +- rbi/openai/models/vector_store.rbi | 118 +- .../models/vector_store_create_params.rbi | 87 +- .../models/vector_store_delete_params.rbi | 19 +- rbi/openai/models/vector_store_deleted.rbi | 18 +- .../models/vector_store_list_params.rbi | 55 +- .../models/vector_store_retrieve_params.rbi | 19 +- .../models/vector_store_search_params.rbi | 166 ++- .../models/vector_store_search_response.rbi | 118 +- .../models/vector_store_update_params.rbi | 54 +- .../file_batch_cancel_params.rbi | 19 +- .../file_batch_create_params.rbi | 84 +- .../file_batch_list_files_params.rbi | 144 ++- .../file_batch_retrieve_params.rbi | 19 +- .../vector_stores/file_content_params.rbi | 19 +- .../vector_stores/file_content_response.rbi | 11 +- .../vector_stores/file_create_params.rbi | 84 +- .../vector_stores/file_delete_params.rbi | 19 +- .../models/vector_stores/file_list_params.rbi | 131 ++- .../vector_stores/file_retrieve_params.rbi | 19 +- .../vector_stores/file_update_params.rbi | 50 +- .../vector_stores/vector_store_file.rbi | 224 +++- .../vector_stores/vector_store_file_batch.rbi | 124 +- .../vector_store_file_deleted.rbi | 18 +- rbi/openai/request_options.rbi | 18 +- rbi/openai/resources/audio.rbi | 3 +- rbi/openai/resources/audio/speech.rbi | 20 +- rbi/openai/resources/audio/transcriptions.rbi | 58 +- rbi/openai/resources/audio/translations.rbi | 17 +- rbi/openai/resources/batches.rbi | 50 +- rbi/openai/resources/beta.rbi | 3 +- rbi/openai/resources/beta/assistants.rbi | 141 ++- rbi/openai/resources/beta/threads.rbi | 257 ++-- .../resources/beta/threads/messages.rbi | 87 +- rbi/openai/resources/beta/threads/runs.rbi | 345 +++--- .../resources/beta/threads/runs/steps.rbi | 36 +- rbi/openai/resources/chat.rbi | 3 +- rbi/openai/resources/chat/completions.rbi | 233 ++-- .../resources/chat/completions/messages.rbi | 17 +- rbi/openai/resources/completions.rbi | 51 +- rbi/openai/resources/embeddings.rbi | 25 +- rbi/openai/resources/evals.rbi | 82 +- rbi/openai/resources/evals/runs.rbi | 70 +- .../resources/evals/runs/output_items.rbi | 28 +- rbi/openai/resources/files.rbi | 58 +- rbi/openai/resources/fine_tuning.rbi | 3 +- .../resources/fine_tuning/checkpoints.rbi | 3 +- .../fine_tuning/checkpoints/permissions.rbi | 35 +- rbi/openai/resources/fine_tuning/jobs.rbi | 70 +- .../fine_tuning/jobs/checkpoints.rbi | 14 +- rbi/openai/resources/images.rbi | 85 +- rbi/openai/resources/models.rbi | 34 +- rbi/openai/resources/moderations.rbi | 36 +- rbi/openai/resources/responses.rbi | 315 ++--- .../resources/responses/input_items.rbi | 40 +- rbi/openai/resources/uploads.rbi | 34 +- rbi/openai/resources/uploads/parts.rbi | 12 +- rbi/openai/resources/vector_stores.rbi | 86 +- .../resources/vector_stores/file_batches.rbi | 62 +- rbi/openai/resources/vector_stores/files.rbi | 87 +- .../internal/type/request_parameters.rbs | 4 +- sig/openai/models/all_models.rbs | 6 +- .../models/audio/speech_create_params.rbs | 16 +- sig/openai/models/audio/transcription.rbs | 14 +- .../audio/transcription_create_params.rbs | 16 +- .../audio/transcription_create_response.rbs | 5 +- .../models/audio/transcription_segment.rbs | 2 - .../audio/transcription_stream_event.rbs | 6 +- .../audio/transcription_text_delta_event.rbs | 14 +- .../audio/transcription_text_done_event.rbs | 14 +- .../models/audio/transcription_verbose.rbs | 22 +- .../models/audio/transcription_word.rbs | 2 - sig/openai/models/audio/translation.rbs | 2 - .../audio/translation_create_params.rbs | 4 +- .../audio/translation_create_response.rbs | 5 +- .../models/audio/translation_verbose.rbs | 12 +- .../auto_file_chunking_strategy_param.rbs | 2 - sig/openai/models/batch.rbs | 43 +- sig/openai/models/batch_cancel_params.rbs | 2 - sig/openai/models/batch_create_params.rbs | 2 - sig/openai/models/batch_error.rbs | 2 - sig/openai/models/batch_list_params.rbs | 2 - sig/openai/models/batch_request_counts.rbs | 2 - sig/openai/models/batch_retrieve_params.rbs | 2 - sig/openai/models/beta/assistant.rbs | 34 +- .../models/beta/assistant_create_params.rbs | 76 +- .../models/beta/assistant_delete_params.rbs | 2 - sig/openai/models/beta/assistant_deleted.rbs | 2 - .../models/beta/assistant_list_params.rbs | 2 - .../beta/assistant_response_format_option.rbs | 8 +- .../models/beta/assistant_retrieve_params.rbs | 2 - .../models/beta/assistant_stream_event.rbs | 272 ++--- sig/openai/models/beta/assistant_tool.rbs | 8 +- .../models/beta/assistant_tool_choice.rbs | 20 +- .../beta/assistant_tool_choice_function.rbs | 2 - .../beta/assistant_tool_choice_option.rbs | 8 +- .../models/beta/assistant_update_params.rbs | 71 +- .../models/beta/code_interpreter_tool.rbs | 2 - sig/openai/models/beta/file_search_tool.rbs | 38 +- sig/openai/models/beta/function_tool.rbs | 8 +- .../models/beta/message_stream_event.rbs | 52 +- .../models/beta/run_step_stream_event.rbs | 72 +- sig/openai/models/beta/run_stream_event.rbs | 129 +- sig/openai/models/beta/thread.rbs | 34 +- .../beta/thread_create_and_run_params.rbs | 190 ++- .../models/beta/thread_create_params.rbs | 128 +- .../models/beta/thread_delete_params.rbs | 2 - sig/openai/models/beta/thread_deleted.rbs | 2 - .../models/beta/thread_retrieve_params.rbs | 2 - .../models/beta/thread_stream_event.rbs | 12 +- .../models/beta/thread_update_params.rbs | 34 +- sig/openai/models/beta/threads/annotation.rbs | 6 +- .../models/beta/threads/annotation_delta.rbs | 6 +- .../beta/threads/file_citation_annotation.rbs | 10 +- .../file_citation_delta_annotation.rbs | 14 +- .../beta/threads/file_path_annotation.rbs | 10 +- .../threads/file_path_delta_annotation.rbs | 14 +- sig/openai/models/beta/threads/image_file.rbs | 17 +- .../beta/threads/image_file_content_block.rbs | 11 +- .../models/beta/threads/image_file_delta.rbs | 14 +- .../beta/threads/image_file_delta_block.rbs | 12 +- sig/openai/models/beta/threads/image_url.rbs | 17 +- .../beta/threads/image_url_content_block.rbs | 11 +- .../models/beta/threads/image_url_delta.rbs | 17 +- .../beta/threads/image_url_delta_block.rbs | 12 +- sig/openai/models/beta/threads/message.rbs | 60 +- .../models/beta/threads/message_content.rbs | 10 +- .../beta/threads/message_content_delta.rbs | 10 +- .../threads/message_content_part_param.rbs | 8 +- .../beta/threads/message_create_params.rbs | 30 +- .../beta/threads/message_delete_params.rbs | 2 - .../models/beta/threads/message_deleted.rbs | 2 - .../models/beta/threads/message_delta.rbs | 14 +- .../beta/threads/message_delta_event.rbs | 8 +- .../beta/threads/message_list_params.rbs | 2 - .../beta/threads/message_retrieve_params.rbs | 2 - .../beta/threads/message_update_params.rbs | 2 - .../beta/threads/refusal_content_block.rbs | 2 - .../beta/threads/refusal_delta_block.rbs | 2 - .../required_action_function_tool_call.rbs | 10 +- sig/openai/models/beta/threads/run.rbs | 86 +- .../models/beta/threads/run_cancel_params.rbs | 2 - .../models/beta/threads/run_create_params.rbs | 70 +- .../models/beta/threads/run_list_params.rbs | 2 - .../beta/threads/run_retrieve_params.rbs | 2 - .../run_submit_tool_outputs_params.rbs | 10 +- .../models/beta/threads/run_update_params.rbs | 2 - .../threads/runs/code_interpreter_logs.rbs | 2 - .../runs/code_interpreter_output_image.rbs | 14 +- .../runs/code_interpreter_tool_call.rbs | 34 +- .../runs/code_interpreter_tool_call_delta.rbs | 30 +- .../threads/runs/file_search_tool_call.rbs | 66 +- .../runs/file_search_tool_call_delta.rbs | 2 - .../beta/threads/runs/function_tool_call.rbs | 10 +- .../threads/runs/function_tool_call_delta.rbs | 14 +- .../runs/message_creation_step_details.rbs | 10 +- .../models/beta/threads/runs/run_step.rbs | 54 +- .../beta/threads/runs/run_step_delta.rbs | 18 +- .../threads/runs/run_step_delta_event.rbs | 8 +- .../runs/run_step_delta_message_delta.rbs | 14 +- .../beta/threads/runs/step_list_params.rbs | 2 - .../threads/runs/step_retrieve_params.rbs | 2 - .../models/beta/threads/runs/tool_call.rbs | 8 +- .../beta/threads/runs/tool_call_delta.rbs | 8 +- .../threads/runs/tool_call_delta_object.rbs | 2 - .../threads/runs/tool_calls_step_details.rbs | 2 - sig/openai/models/beta/threads/text.rbs | 2 - .../beta/threads/text_content_block.rbs | 8 +- .../beta/threads/text_content_block_param.rbs | 2 - sig/openai/models/beta/threads/text_delta.rbs | 2 - .../models/beta/threads/text_delta_block.rbs | 12 +- sig/openai/models/chat/chat_completion.rbs | 62 +- ...hat_completion_assistant_message_param.rbs | 44 +- .../models/chat/chat_completion_audio.rbs | 2 - .../chat/chat_completion_audio_param.rbs | 28 +- .../models/chat/chat_completion_chunk.rbs | 120 +- .../chat/chat_completion_content_part.rbs | 20 +- .../chat_completion_content_part_image.rbs | 22 +- ...at_completion_content_part_input_audio.rbs | 18 +- .../chat_completion_content_part_refusal.rbs | 2 - .../chat_completion_content_part_text.rbs | 2 - .../models/chat/chat_completion_deleted.rbs | 2 - ...hat_completion_developer_message_param.rbs | 12 +- .../chat_completion_function_call_option.rbs | 2 - ...chat_completion_function_message_param.rbs | 2 - .../models/chat/chat_completion_message.rbs | 50 +- .../chat/chat_completion_message_param.rbs | 14 +- .../chat_completion_message_tool_call.rbs | 10 +- .../chat_completion_named_tool_choice.rbs | 10 +- .../chat_completion_prediction_content.rbs | 12 +- .../chat/chat_completion_store_message.rbs | 2 - .../chat/chat_completion_stream_options.rbs | 2 - .../chat_completion_system_message_param.rbs | 12 +- .../chat/chat_completion_token_logprob.rbs | 10 +- .../models/chat/chat_completion_tool.rbs | 8 +- .../chat_completion_tool_choice_option.rbs | 8 +- .../chat_completion_tool_message_param.rbs | 12 +- .../chat_completion_user_message_param.rbs | 10 +- .../models/chat/completion_create_params.rbs | 110 +- .../models/chat/completion_delete_params.rbs | 2 - .../models/chat/completion_list_params.rbs | 2 - .../chat/completion_retrieve_params.rbs | 2 - .../models/chat/completion_update_params.rbs | 2 - .../chat/completions/message_list_params.rbs | 2 - sig/openai/models/comparison_filter.rbs | 18 +- sig/openai/models/completion.rbs | 18 +- sig/openai/models/completion_choice.rbs | 18 +- .../models/completion_create_params.rbs | 16 +- sig/openai/models/completion_usage.rbs | 26 +- sig/openai/models/compound_filter.rbs | 20 +- .../models/create_embedding_response.rbs | 16 +- sig/openai/models/embedding.rbs | 2 - sig/openai/models/embedding_create_params.rbs | 6 +- sig/openai/models/error_object.rbs | 2 - sig/openai/models/eval_create_params.rbs | 128 +- sig/openai/models/eval_create_response.rbs | 28 +- .../models/eval_custom_data_source_config.rbs | 2 - sig/openai/models/eval_delete_params.rbs | 2 - sig/openai/models/eval_delete_response.rbs | 2 - sig/openai/models/eval_label_model_grader.rbs | 44 +- sig/openai/models/eval_list_params.rbs | 2 - sig/openai/models/eval_list_response.rbs | 28 +- sig/openai/models/eval_retrieve_params.rbs | 2 - sig/openai/models/eval_retrieve_response.rbs | 28 +- ..._stored_completions_data_source_config.rbs | 2 - .../models/eval_string_check_grader.rbs | 10 +- .../models/eval_text_similarity_grader.rbs | 10 +- sig/openai/models/eval_update_params.rbs | 2 - sig/openai/models/eval_update_response.rbs | 28 +- ...reate_eval_completions_run_data_source.rbs | 118 +- .../create_eval_jsonl_run_data_source.rbs | 26 +- sig/openai/models/evals/eval_api_error.rbs | 2 - sig/openai/models/evals/run_cancel_params.rbs | 2 - .../models/evals/run_cancel_response.rbs | 52 +- sig/openai/models/evals/run_create_params.rbs | 130 +-- .../models/evals/run_create_response.rbs | 52 +- sig/openai/models/evals/run_delete_params.rbs | 2 - .../models/evals/run_delete_response.rbs | 2 - sig/openai/models/evals/run_list_params.rbs | 2 - sig/openai/models/evals/run_list_response.rbs | 52 +- .../models/evals/run_retrieve_params.rbs | 2 - .../models/evals/run_retrieve_response.rbs | 52 +- .../evals/runs/output_item_list_params.rbs | 2 - .../evals/runs/output_item_list_response.rbs | 16 +- .../runs/output_item_retrieve_params.rbs | 2 - .../runs/output_item_retrieve_response.rbs | 16 +- sig/openai/models/file_chunking_strategy.rbs | 6 +- .../models/file_chunking_strategy_param.rbs | 6 +- sig/openai/models/file_content_params.rbs | 2 - sig/openai/models/file_create_params.rbs | 2 - sig/openai/models/file_delete_params.rbs | 2 - sig/openai/models/file_deleted.rbs | 2 - sig/openai/models/file_list_params.rbs | 2 - sig/openai/models/file_object.rbs | 18 +- sig/openai/models/file_retrieve_params.rbs | 2 - .../checkpoints/permission_create_params.rbs | 2 - .../permission_create_response.rbs | 2 - .../checkpoints/permission_delete_params.rbs | 2 - .../permission_delete_response.rbs | 2 - .../permission_retrieve_params.rbs | 2 - .../permission_retrieve_response.rbs | 4 - .../models/fine_tuning/fine_tuning_job.rbs | 224 ++-- .../fine_tuning/fine_tuning_job_event.rbs | 22 +- .../fine_tuning_job_wandb_integration.rbs | 2 - ...ne_tuning_job_wandb_integration_object.rbs | 8 +- .../models/fine_tuning/job_cancel_params.rbs | 2 - .../models/fine_tuning/job_create_params.rbs | 227 ++-- .../fine_tuning/job_list_events_params.rbs | 2 - .../models/fine_tuning/job_list_params.rbs | 2 - .../fine_tuning/job_retrieve_params.rbs | 2 - .../jobs/checkpoint_list_params.rbs | 2 - .../jobs/fine_tuning_job_checkpoint.rbs | 10 +- sig/openai/models/function_definition.rbs | 2 - sig/openai/models/image.rbs | 2 - .../models/image_create_variation_params.rbs | 4 +- sig/openai/models/image_edit_params.rbs | 6 +- sig/openai/models/image_generate_params.rbs | 4 +- sig/openai/models/images_response.rbs | 32 +- sig/openai/models/model.rbs | 2 - sig/openai/models/model_delete_params.rbs | 2 - sig/openai/models/model_deleted.rbs | 2 - sig/openai/models/model_list_params.rbs | 2 - sig/openai/models/model_retrieve_params.rbs | 2 - sig/openai/models/moderation.rbs | 130 +-- .../models/moderation_create_params.rbs | 6 +- .../models/moderation_create_response.rbs | 12 +- .../models/moderation_image_url_input.rbs | 13 +- .../models/moderation_multi_modal_input.rbs | 5 +- sig/openai/models/moderation_text_input.rbs | 2 - .../other_file_chunking_strategy_object.rbs | 2 - sig/openai/models/reasoning.rbs | 18 +- .../models/response_format_json_object.rbs | 2 - .../models/response_format_json_schema.rbs | 10 +- sig/openai/models/response_format_text.rbs | 2 - sig/openai/models/responses/computer_tool.rbs | 10 +- .../models/responses/easy_input_message.rbs | 30 +- .../models/responses/file_search_tool.rbs | 37 +- sig/openai/models/responses/function_tool.rbs | 2 - .../responses/input_item_list_params.rbs | 2 - sig/openai/models/responses/response.rbs | 84 +- .../responses/response_audio_delta_event.rbs | 2 - .../responses/response_audio_done_event.rbs | 2 - .../response_audio_transcript_delta_event.rbs | 2 - .../response_audio_transcript_done_event.rbs | 2 - ...code_interpreter_call_code_delta_event.rbs | 2 - ..._code_interpreter_call_code_done_event.rbs | 2 - ..._code_interpreter_call_completed_event.rbs | 8 +- ...ode_interpreter_call_in_progress_event.rbs | 8 +- ...de_interpreter_call_interpreting_event.rbs | 8 +- .../response_code_interpreter_tool_call.rbs | 34 +- .../responses/response_completed_event.rbs | 11 +- .../responses/response_computer_tool_call.rbs | 86 +- ...esponse_computer_tool_call_output_item.rbs | 32 +- ...e_computer_tool_call_output_screenshot.rbs | 2 - .../models/responses/response_content.rbs | 12 +- .../response_content_part_added_event.rbs | 14 +- .../response_content_part_done_event.rbs | 14 +- .../responses/response_create_params.rbs | 26 +- .../responses/response_created_event.rbs | 11 +- .../responses/response_delete_params.rbs | 2 - .../models/responses/response_error.rbs | 13 +- .../models/responses/response_error_event.rbs | 2 - .../responses/response_failed_event.rbs | 11 +- ...ponse_file_search_call_completed_event.rbs | 2 - ...nse_file_search_call_in_progress_event.rbs | 2 - ...ponse_file_search_call_searching_event.rbs | 2 - .../response_file_search_tool_call.rbs | 26 +- .../responses/response_format_text_config.rbs | 8 +- ...esponse_format_text_json_schema_config.rbs | 2 - ...se_function_call_arguments_delta_event.rbs | 2 - ...nse_function_call_arguments_done_event.rbs | 2 - .../responses/response_function_tool_call.rbs | 14 +- .../response_function_tool_call_item.rbs | 2 - ...esponse_function_tool_call_output_item.rbs | 14 +- .../response_function_web_search.rbs | 10 +- .../responses/response_in_progress_event.rbs | 11 +- .../responses/response_incomplete_event.rbs | 11 +- .../models/responses/response_input_audio.rbs | 10 +- .../responses/response_input_content.rbs | 8 +- .../models/responses/response_input_file.rbs | 2 - .../models/responses/response_input_image.rbs | 10 +- .../models/responses/response_input_item.rbs | 102 +- .../responses/response_input_message_item.rbs | 34 +- .../models/responses/response_input_text.rbs | 2 - sig/openai/models/responses/response_item.rbs | 18 +- .../models/responses/response_item_list.rbs | 2 - .../responses/response_output_audio.rbs | 2 - .../models/responses/response_output_item.rbs | 14 +- .../response_output_item_added_event.rbs | 2 - .../response_output_item_done_event.rbs | 2 - .../responses/response_output_message.rbs | 22 +- .../responses/response_output_refusal.rbs | 2 - .../models/responses/response_output_text.rbs | 22 +- .../responses/response_reasoning_item.rbs | 22 +- ...nse_reasoning_summary_part_added_event.rbs | 10 +- ...onse_reasoning_summary_part_done_event.rbs | 10 +- ...nse_reasoning_summary_text_delta_event.rbs | 2 - ...onse_reasoning_summary_text_done_event.rbs | 2 - .../response_refusal_delta_event.rbs | 2 - .../responses/response_refusal_done_event.rbs | 2 - .../responses/response_retrieve_params.rbs | 2 - .../responses/response_stream_event.rbs | 74 +- .../response_text_annotation_delta_event.rbs | 22 +- .../models/responses/response_text_config.rbs | 2 - .../responses/response_text_delta_event.rbs | 2 - .../responses/response_text_done_event.rbs | 2 - .../models/responses/response_usage.rbs | 18 +- ...sponse_web_search_call_completed_event.rbs | 2 - ...onse_web_search_call_in_progress_event.rbs | 2 - ...sponse_web_search_call_searching_event.rbs | 2 - sig/openai/models/responses/tool.rbs | 10 +- .../models/responses/tool_choice_function.rbs | 2 - .../models/responses/tool_choice_types.rbs | 10 +- .../models/responses/web_search_tool.rbs | 30 +- sig/openai/models/responses_model.rbs | 6 +- .../models/static_file_chunking_strategy.rbs | 2 - .../static_file_chunking_strategy_object.rbs | 8 +- ...ic_file_chunking_strategy_object_param.rbs | 8 +- sig/openai/models/upload.rbs | 16 +- sig/openai/models/upload_cancel_params.rbs | 2 - sig/openai/models/upload_complete_params.rbs | 2 - sig/openai/models/upload_create_params.rbs | 2 - .../models/uploads/part_create_params.rbs | 2 - sig/openai/models/uploads/upload_part.rbs | 2 - sig/openai/models/vector_store.rbs | 30 +- .../models/vector_store_create_params.rbs | 14 +- .../models/vector_store_delete_params.rbs | 2 - sig/openai/models/vector_store_deleted.rbs | 2 - .../models/vector_store_list_params.rbs | 2 - .../models/vector_store_retrieve_params.rbs | 2 - .../models/vector_store_search_params.rbs | 33 +- .../models/vector_store_search_response.rbs | 6 +- .../models/vector_store_update_params.rbs | 10 +- .../file_batch_cancel_params.rbs | 2 - .../file_batch_create_params.rbs | 10 +- .../file_batch_list_files_params.rbs | 2 - .../file_batch_retrieve_params.rbs | 2 - .../vector_stores/file_content_params.rbs | 2 - .../vector_stores/file_content_response.rbs | 2 - .../vector_stores/file_create_params.rbs | 10 +- .../vector_stores/file_delete_params.rbs | 2 - .../models/vector_stores/file_list_params.rbs | 2 - .../vector_stores/file_retrieve_params.rbs | 2 - .../vector_stores/file_update_params.rbs | 10 +- .../vector_stores/vector_store_file.rbs | 34 +- .../vector_stores/vector_store_file_batch.rbs | 18 +- .../vector_store_file_deleted.rbs | 2 - sig/openai/request_options.rbs | 2 +- sig/openai/resources/audio/transcriptions.rbs | 4 +- sig/openai/resources/batches.rbs | 8 +- sig/openai/resources/beta/assistants.rbs | 14 +- sig/openai/resources/beta/threads.rbs | 28 +- .../resources/beta/threads/messages.rbs | 12 +- sig/openai/resources/beta/threads/runs.rbs | 24 +- .../resources/beta/threads/runs/steps.rbs | 4 +- sig/openai/resources/chat/completions.rbs | 40 +- .../resources/chat/completions/messages.rbs | 2 +- sig/openai/resources/completions.rbs | 8 +- sig/openai/resources/embeddings.rbs | 2 +- sig/openai/resources/evals.rbs | 2 +- sig/openai/resources/files.rbs | 8 +- sig/openai/resources/fine_tuning/jobs.rbs | 16 +- .../fine_tuning/jobs/checkpoints.rbs | 2 +- sig/openai/resources/images.rbs | 6 +- sig/openai/resources/models.rbs | 6 +- sig/openai/resources/responses.rbs | 12 +- sig/openai/resources/uploads.rbs | 6 +- sig/openai/resources/uploads/parts.rbs | 2 +- sig/openai/resources/vector_stores.rbs | 16 +- .../resources/vector_stores/file_batches.rbs | 10 +- sig/openai/resources/vector_stores/files.rbs | 14 +- .../resources/audio/transcriptions_test.rb | 4 +- .../resources/audio/translations_test.rb | 4 +- test/openai/resources/batches_test.rb | 32 +- test/openai/resources/beta/assistants_test.rb | 34 +- .../resources/beta/threads/messages_test.rb | 50 +- .../resources/beta/threads/runs/steps_test.rb | 24 +- .../resources/beta/threads/runs_test.rb | 120 +- test/openai/resources/beta/threads_test.rb | 34 +- .../chat/completions/messages_test.rb | 2 +- .../openai/resources/chat/completions_test.rb | 34 +- test/openai/resources/completions_test.rb | 6 +- test/openai/resources/embeddings_test.rb | 6 +- test/openai/resources/evals/runs_test.rb | 8 +- test/openai/resources/files_test.rb | 20 +- .../fine_tuning/jobs/checkpoints_test.rb | 4 +- .../openai/resources/fine_tuning/jobs_test.rb | 54 +- test/openai/resources/images_test.rb | 18 +- test/openai/resources/models_test.rb | 6 +- test/openai/resources/moderations_test.rb | 2 +- .../resources/responses/input_items_test.rb | 48 +- test/openai/resources/responses_test.rb | 52 +- test/openai/resources/uploads/parts_test.rb | 2 +- test/openai/resources/uploads_test.rb | 18 +- .../vector_stores/file_batches_test.rb | 28 +- .../resources/vector_stores/files_test.rb | 42 +- test/openai/resources/vector_stores_test.rb | 34 +- 1129 files changed, 33174 insertions(+), 21915 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index 315db278..42345dee 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -63,6 +63,8 @@ Layout/MultilineMethodParameterLineBreaks: # Prefer compact hash literals. Layout/SpaceInsideHashLiteralBraces: EnforcedStyle: no_space + Exclude: + - "**/*.rbi" Lint/BooleanSymbol: Enabled: false @@ -90,6 +92,10 @@ Lint/MissingSuper: Exclude: - "**/*.rbi" +Lint/SymbolConversion: + Exclude: + - "**/*.rbi" + # Disabled for safety reasons, this option changes code semantics. Lint/UnusedMethodArgument: AutoCorrect: false @@ -244,6 +250,10 @@ Style/RedundantInitialize: Exclude: - "**/*.rbi" +Style/RedundantParentheses: + Exclude: + - "**/*.rbi" + # Prefer slashes for regex literals. Style/RegexpLiteral: EnforcedStyle: slashes diff --git a/Gemfile.lock b/Gemfile.lock index 15c4f091..065e1cd1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,6 +1,6 @@ GIT remote: https://github.com/stainless-api/syntax_tree-rbs.git - revision: 140eb3ba2ff4b959b345ac2a7927cd758a9f1284 + revision: c30b50219918be7cfe3ef803a00b59d1e77fcada branch: main specs: syntax_tree-rbs (1.0.0) diff --git a/README.md b/README.md index 09cc7a5a..4fc254e3 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ When using sorbet, it is recommended to use model classes as below. This provide ```ruby openai.chat.completions.create( - messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], + messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) ``` @@ -185,7 +185,7 @@ In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can als ```ruby # This has tooling readability, for auto-completion, static analysis, and goto definition with supported language services params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], + messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) @@ -241,9 +241,9 @@ Sorbet's typed enums require sub-classing of the [`T::Enum` class](https://sorbe Since this library does not depend on `sorbet-runtime`, it uses a [`T.all` intersection type](https://sorbet.org/docs/intersection-types) with a ruby primitive type to construct a "tagged alias" instead. ```ruby -module OpenAI::Models::ChatModel +module OpenAI::ChatModel # This alias aids language service driven navigation. - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ChatModel) } end ``` @@ -253,7 +253,7 @@ It is possible to pass a compatible model / parameter class to a method that exp ```ruby params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], + messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) openai.chat.completions.create(**params) diff --git a/Rakefile b/Rakefile index db2c83ed..8361a8b9 100644 --- a/Rakefile +++ b/Rakefile @@ -34,11 +34,13 @@ multitask(:test) do ruby(*%w[-w -e], rb, verbose: false) { fail unless _1 } end -rubo_find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] +locale = {"LC_ALL" => "C.UTF-8"} desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do + find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] + rubocop = %w[rubocop --fail-level E] rubocop += %w[--format github] if ENV.key?("CI") @@ -46,17 +48,26 @@ multitask(:"lint:rubocop") do rubocop += %w[--except Lint/RedundantCopDisableDirective,Layout/LineLength] lint = xargs + rubocop - sh("#{rubo_find.shelljoin} | #{lint.shelljoin}") + sh("#{find.shelljoin} | #{lint.shelljoin}") end -desc("Format `*.rb(i)`") -multitask(:"format:rubocop") do +desc("Format `*.rb`") +multitask(:"format:rb") do + # while `syntax_tree` is much faster than `rubocop`, `rubocop` is the only formatter with full syntax support + find = %w[find ./lib ./test -type f -and -name *.rb -print0] fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] - sh("#{rubo_find.shelljoin} | #{fmt.shelljoin}") + sh("#{find.shelljoin} | #{fmt.shelljoin}") +end + +desc("Format `*.rbi`") +multitask(:"format:rbi") do + find = %w[find ./rbi -type f -and -name *.rbi -print0] + fmt = xargs + %w[stree write --] + sh(locale, "#{find.shelljoin} | #{fmt.shelljoin}") end desc("Format `*.rbs`") -multitask(:"format:syntax_tree") do +multitask(:"format:rbs") do find = %w[find ./sig -type f -name *.rbs -print0] inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? ["-i", ""] : %w[-i] uuid = SecureRandom.uuid @@ -88,7 +99,7 @@ multitask(:"format:syntax_tree") do # transform class aliases to type aliases, which syntax tree has no trouble with sh("#{find.shelljoin} | #{pre.shelljoin}") # run syntax tree to format `*.rbs` files - sh("#{find.shelljoin} | #{fmt.shelljoin}") do + sh(locale, "#{find.shelljoin} | #{fmt.shelljoin}") do success = _1 end # transform type aliases back to class aliases @@ -99,7 +110,7 @@ multitask(:"format:syntax_tree") do end desc("Format everything") -multitask(format: [:"format:rubocop", :"format:syntax_tree"]) +multitask(format: [:"format:rb", :"format:rbi", :"format:rbs"]) desc("Typecheck `*.rbs`") multitask(:"typecheck:steep") do diff --git a/lib/openai.rb b/lib/openai.rb index 0aadd7ca..c1d3f3a2 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -420,6 +420,7 @@ require_relative "openai/models/vector_store_search_params" require_relative "openai/models/vector_store_search_response" require_relative "openai/models/vector_store_update_params" +require_relative "openai/models" require_relative "openai/resources/audio" require_relative "openai/resources/audio/speech" require_relative "openai/resources/audio/transcriptions" diff --git a/lib/openai/file_part.rb b/lib/openai/file_part.rb index d2a3f6a5..f853ca4d 100644 --- a/lib/openai/file_part.rb +++ b/lib/openai/file_part.rb @@ -45,9 +45,9 @@ def initialize(content, filename: nil, content_type: nil) @filename = case content in Pathname - filename.nil? ? content.basename.to_path : File.basename(filename) + filename.nil? ? content.basename.to_path : ::File.basename(filename) else - filename.nil? ? nil : File.basename(filename) + filename.nil? ? nil : ::File.basename(filename) end @content_type = content_type end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 5ceef679..30c8e0c0 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -17,13 +17,13 @@ module Type # values safely. # # @example - # # `chat_model` is a `OpenAI::Models::ChatModel` + # # `chat_model` is a `OpenAI::ChatModel` # case chat_model - # when OpenAI::Models::ChatModel::GPT_4_1 + # when OpenAI::ChatModel::GPT_4_1 # # ... - # when OpenAI::Models::ChatModel::GPT_4_1_MINI + # when OpenAI::ChatModel::GPT_4_1_MINI # # ... - # when OpenAI::Models::ChatModel::GPT_4_1_NANO + # when OpenAI::ChatModel::GPT_4_1_NANO # # ... # else # puts(chat_model) diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb index 0a362d38..d2546558 100644 --- a/lib/openai/internal/type/request_parameters.rb +++ b/lib/openai/internal/type/request_parameters.rb @@ -5,10 +5,10 @@ module Internal module Type # @api private module RequestParameters - # @!parse - # # Options to specify HTTP behaviour for this request. - # # @return [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # attr_accessor :request_options + # @!attribute request_options + # Options to specify HTTP behaviour for this request. + # + # @return [OpenAI::RequestOptions, Hash{Symbol=>Object}] # @param mod [Module] def self.included(mod) diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 3db41ef0..e653de47 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -6,13 +6,13 @@ module Type # @api private # # @example - # # `chat_completion_content_part` is a `OpenAI::Models::Chat::ChatCompletionContentPart` + # # `chat_completion_content_part` is a `OpenAI::Chat::ChatCompletionContentPart` # case chat_completion_content_part - # when OpenAI::Models::Chat::ChatCompletionContentPartText + # when OpenAI::Chat::ChatCompletionContentPartText # puts(chat_completion_content_part.text) - # when OpenAI::Models::Chat::ChatCompletionContentPartImage + # when OpenAI::Chat::ChatCompletionContentPartImage # puts(chat_completion_content_part.image_url) - # when OpenAI::Models::Chat::ChatCompletionContentPartInputAudio + # when OpenAI::Chat::ChatCompletionContentPartInputAudio # puts(chat_completion_content_part.input_audio) # else # puts(chat_completion_content_part) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 74239da5..32f9ae41 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -535,7 +535,7 @@ class << self filename = ERB::Util.url_encode(val.filename) y << "; filename=\"#{filename}\"" in Pathname | IO - filename = ERB::Util.url_encode(File.basename(val.to_path)) + filename = ERB::Util.url_encode(::File.basename(val.to_path)) y << "; filename=\"#{filename}\"" else end diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index d79c7182..dd458b9f 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -7,9 +7,9 @@ module AllModels variant String - variant enum: -> { OpenAI::Models::ChatModel } + variant enum: -> { OpenAI::ChatModel } - variant enum: -> { OpenAI::Models::AllModels::ResponsesOnlyModel } + variant enum: -> { OpenAI::AllModels::ResponsesOnlyModel } module ResponsesOnlyModel extend OpenAI::Internal::Type::Enum @@ -24,7 +24,7 @@ module ResponsesOnlyModel end # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] + # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::AllModels::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index e17d7ec9..9a2372af 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -18,8 +18,8 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # - # @return [String, Symbol, OpenAI::Models::Audio::SpeechModel] - required :model, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Model } + # @return [String, Symbol, OpenAI::Audio::SpeechModel] + required :model, union: -> { OpenAI::Audio::SpeechCreateParams::Model } # @!attribute voice # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -27,8 +27,8 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # - # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] - required :voice, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } + # @return [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] + required :voice, union: -> { OpenAI::Audio::SpeechCreateParams::Voice } # @!attribute instructions # Control the voice of your generated audio with additional instructions. Does not @@ -41,8 +41,8 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. # - # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat, nil] - optional :response_format, enum: -> { OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat } + # @return [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::Audio::SpeechCreateParams::ResponseFormat } # @!attribute speed # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is @@ -57,20 +57,15 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # - # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # ... + # @param model [String, Symbol, OpenAI::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): # - # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # ... + # @param voice [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not - # ... # - # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav - # ... + # @param response_format [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -82,10 +77,10 @@ module Model variant String # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - variant enum: -> { OpenAI::Models::Audio::SpeechModel } + variant enum: -> { OpenAI::Audio::SpeechModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] + # @return [Array(String, Symbol, OpenAI::Audio::SpeechModel)] end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -97,27 +92,27 @@ module Voice variant String - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ALLOY } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ALLOY } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ASH } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ASH } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::BALLAD } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::BALLAD } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::CORAL } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::CORAL } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ECHO } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::FABLE } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ONYX } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::NOVA } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::SAGE } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::SHIMMER } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE } + variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::VERSE } # @!method self.variants # @return [Array(String, Symbol)] diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 7f74deeb..cd9c0b2d 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -15,20 +15,19 @@ class Transcription < OpenAI::Internal::Type::BaseModel # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. # - # @return [Array, nil] - optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } + # @return [Array, nil] + optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::Transcription::Logprob] } # @!method initialize(text:, logprobs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Audio::Transcription} for more details. + # {OpenAI::Audio::Transcription} for more details. # # Represents a transcription response returned by model, based on the provided # input. # # @param text [String] The transcribed text. # - # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the - # ... + # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index c9efed38..8fabd4b2 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -22,8 +22,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). # - # @return [String, Symbol, OpenAI::Models::AudioModel] - required :model, union: -> { OpenAI::Models::Audio::TranscriptionCreateParams::Model } + # @return [String, Symbol, OpenAI::AudioModel] + required :model, union: -> { OpenAI::Audio::TranscriptionCreateParams::Model } # @!attribute include # Additional information to include in the transcription response. `logprobs` will @@ -32,9 +32,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. # - # @return [Array, nil] - optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionInclude] } + # @return [Array, nil] + optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionInclude] } # @!attribute language # The language of the input audio. Supplying the input language in @@ -58,8 +57,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. # - # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] - optional :response_format, enum: -> { OpenAI::Models::AudioResponseFormat } + # @return [Symbol, OpenAI::AudioResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::AudioResponseFormat } # @!attribute temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the @@ -78,36 +77,31 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. # - # @return [Array, nil] + # @return [Array, nil] optional :timestamp_granularities, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity] } + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] + } # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl - # ... # - # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc - # ... + # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param include [Array] Additional information to include in the transcription response. ... + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt - # ... # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment - # ... # - # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo - # ... + # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # ... # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format - # ... + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -120,10 +114,10 @@ module Model variant String # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). - variant enum: -> { OpenAI::Models::AudioModel } + variant enum: -> { OpenAI::AudioModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] + # @return [Array(String, Symbol, OpenAI::AudioModel)] end module TimestampGranularity diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 52850e6a..ec74e17e 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -13,13 +13,13 @@ module TranscriptionCreateResponse extend OpenAI::Internal::Type::Union # Represents a transcription response returned by model, based on the provided input. - variant -> { OpenAI::Models::Audio::Transcription } + variant -> { OpenAI::Audio::Transcription } # Represents a verbose json transcription response returned by model, based on the provided input. - variant -> { OpenAI::Models::Audio::TranscriptionVerbose } + variant -> { OpenAI::Audio::TranscriptionVerbose } # @!method self.variants - # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] + # @return [Array(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose)] end end end diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 26c61c14..8b7b4416 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -69,20 +69,17 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Audio::TranscriptionSegment} for more details. + # {OpenAI::Audio::TranscriptionSegment} for more details. # # @param id [Integer] Unique identifier of the segment. # # @param avg_logprob [Float] Average logprob of the segment. If the value is lower than -1, consider the logp - # ... # # @param compression_ratio [Float] Compression ratio of the segment. If the value is greater than 2.4, consider the - # ... # # @param end_ [Float] End time of the segment in seconds. # # @param no_speech_prob [Float] Probability of no speech in the segment. If the value is higher than 1.0 and the - # ... # # @param seek [Integer] Seek offset of the segment. # diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index d4a5f12e..9d386b9b 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -13,13 +13,13 @@ module TranscriptionStreamEvent discriminator :type # Emitted when there is an additional text delta. This is also the first event emitted when the transcription starts. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. - variant :"transcript.text.delta", -> { OpenAI::Models::Audio::TranscriptionTextDeltaEvent } + variant :"transcript.text.delta", -> { OpenAI::Audio::TranscriptionTextDeltaEvent } # Emitted when the transcription is complete. Contains the complete transcription text. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. - variant :"transcript.text.done", -> { OpenAI::Models::Audio::TranscriptionTextDoneEvent } + variant :"transcript.text.done", -> { OpenAI::Audio::TranscriptionTextDoneEvent } # @!method self.variants - # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] + # @return [Array(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent)] end end end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 774f99c8..58db67b8 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -21,25 +21,24 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] } # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent} for more details. + # {OpenAI::Audio::TranscriptionTextDeltaEvent} for more details. # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. # - # @param delta [String] The text delta that was additionally transcribed. ... + # @param delta [String] The text delta that was additionally transcribed. # - # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription - # ... + # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription # - # @param type [Symbol, :"transcript.text.delta"] The type of the event. Always `transcript.text.delta`. ... + # @param type [Symbol, :"transcript.text.delta"] The type of the event. Always `transcript.text.delta`. class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -62,13 +61,13 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token: nil, bytes: nil, logprob: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. + # {OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. # - # @param token [String] The token that was used to generate the log probability. ... + # @param token [String] The token that was used to generate the log probability. # - # @param bytes [Array] The bytes that were used to generate the log probability. ... + # @param bytes [Array] The bytes that were used to generate the log probability. # - # @param logprob [Float] The log probability of the token. ... + # @param logprob [Float] The log probability of the token. end end end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 99dd1045..2b2eb5b2 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -22,25 +22,24 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] } # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Audio::TranscriptionTextDoneEvent} for more details. + # {OpenAI::Audio::TranscriptionTextDoneEvent} for more details. # # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `Stream` parameter set to `true`. # - # @param text [String] The text that was transcribed. ... + # @param text [String] The text that was transcribed. # - # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ - # ... + # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ # - # @param type [Symbol, :"transcript.text.done"] The type of the event. Always `transcript.text.done`. ... + # @param type [Symbol, :"transcript.text.done"] The type of the event. Always `transcript.text.done`. class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -63,13 +62,13 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token: nil, bytes: nil, logprob: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob} for more details. + # {OpenAI::Audio::TranscriptionTextDoneEvent::Logprob} for more details. # - # @param token [String] The token that was used to generate the log probability. ... + # @param token [String] The token that was used to generate the log probability. # - # @param bytes [Array] The bytes that were used to generate the log probability. ... + # @param bytes [Array] The bytes that were used to generate the log probability. # - # @param logprob [Float] The log probability of the token. ... + # @param logprob [Float] The log probability of the token. end end end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 361a380c..00cf9ea0 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -25,14 +25,14 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @!attribute segments # Segments of the transcribed text and their corresponding details. # - # @return [Array, nil] - optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } + # @return [Array, nil] + optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } # @!attribute words # Extracted words and their corresponding timestamps. # - # @return [Array, nil] - optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionWord] } + # @return [Array, nil] + optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionWord] } # @!method initialize(duration:, language:, text:, segments: nil, words: nil) # Represents a verbose json transcription response returned by model, based on the @@ -44,9 +44,9 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # # @param text [String] The transcribed text. # - # @param segments [Array] Segments of the transcribed text and their corresponding details. + # @param segments [Array] Segments of the transcribed text and their corresponding details. # - # @param words [Array] Extracted words and their corresponding timestamps. + # @param words [Array] Extracted words and their corresponding timestamps. end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index d5ab1b8d..ed3107e2 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -19,8 +19,8 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. # - # @return [String, Symbol, OpenAI::Models::AudioModel] - required :model, union: -> { OpenAI::Models::Audio::TranslationCreateParams::Model } + # @return [String, Symbol, OpenAI::AudioModel] + required :model, union: -> { OpenAI::Audio::TranslationCreateParams::Model } # @!attribute prompt # An optional text to guide the model's style or continue a previous audio @@ -35,8 +35,8 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # - # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] - optional :response_format, enum: -> { OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat } + # @return [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::Audio::TranslationCreateParams::ResponseFormat } # @!attribute temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the @@ -53,19 +53,14 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Audio::TranslationCreateParams} for more details. # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, - # ... # - # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh - # ... + # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment - # ... # - # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo - # ... + # @param response_format [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -77,10 +72,10 @@ module Model variant String # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. - variant enum: -> { OpenAI::Models::AudioModel } + variant enum: -> { OpenAI::AudioModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] + # @return [Array(String, Symbol, OpenAI::AudioModel)] end # The format of the output, in one of these options: `json`, `text`, `srt`, diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index 49d7cc9e..f24d4b2b 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -7,12 +7,12 @@ module Audio module TranslationCreateResponse extend OpenAI::Internal::Type::Union - variant -> { OpenAI::Models::Audio::Translation } + variant -> { OpenAI::Audio::Translation } - variant -> { OpenAI::Models::Audio::TranslationVerbose } + variant -> { OpenAI::Audio::TranslationVerbose } # @!method self.variants - # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] + # @return [Array(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose)] end end end diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index 8da3a73b..a237803c 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -25,8 +25,8 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # @!attribute segments # Segments of the translated text and their corresponding details. # - # @return [Array, nil] - optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } + # @return [Array, nil] + optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } # @!method initialize(duration:, language:, text:, segments: nil) # @param duration [Float] The duration of the input audio. @@ -35,7 +35,7 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # # @param text [String] The translated text. # - # @param segments [Array] Segments of the translated text and their corresponding details. + # @param segments [Array] Segments of the translated text and their corresponding details. end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index b2ceaa9b..84f42355 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -42,8 +42,8 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute status # The current status of the batch. # - # @return [Symbol, OpenAI::Models::Batch::Status] - required :status, enum: -> { OpenAI::Models::Batch::Status } + # @return [Symbol, OpenAI::Batch::Status] + required :status, enum: -> { OpenAI::Batch::Status } # @!attribute cancelled_at # The Unix timestamp (in seconds) for when the batch was cancelled. @@ -71,8 +71,8 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute errors # - # @return [OpenAI::Models::Batch::Errors, nil] - optional :errors, -> { OpenAI::Models::Batch::Errors } + # @return [OpenAI::Batch::Errors, nil] + optional :errors, -> { OpenAI::Batch::Errors } # @!attribute expired_at # The Unix timestamp (in seconds) for when the batch expired. @@ -124,12 +124,12 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute request_counts # The request counts for different statuses within the batch. # - # @return [OpenAI::Models::BatchRequestCounts, nil] - optional :request_counts, -> { OpenAI::Models::BatchRequestCounts } + # @return [OpenAI::BatchRequestCounts, nil] + optional :request_counts, -> { OpenAI::BatchRequestCounts } # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) - # Some parameter documentations has been truncated, see {OpenAI::Models::Batch} - # for more details. + # Some parameter documentations has been truncated, see {OpenAI::Batch} for more + # details. # # @param id [String] # @@ -141,7 +141,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param input_file_id [String] The ID of the input file for the batch. # - # @param status [Symbol, OpenAI::Models::Batch::Status] The current status of the batch. + # @param status [Symbol, OpenAI::Batch::Status] The current status of the batch. # # @param cancelled_at [Integer] The Unix timestamp (in seconds) for when the batch was cancelled. # @@ -151,7 +151,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param error_file_id [String] The ID of the file containing the outputs of requests with errors. # - # @param errors [OpenAI::Models::Batch::Errors] + # @param errors [OpenAI::Batch::Errors] # # @param expired_at [Integer] The Unix timestamp (in seconds) for when the batch expired. # @@ -163,17 +163,17 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param in_progress_at [Integer] The Unix timestamp (in seconds) for when the batch started processing. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param output_file_id [String] The ID of the file containing the outputs of successfully executed requests. # - # @param request_counts [OpenAI::Models::BatchRequestCounts] The request counts for different statuses within the batch. + # @param request_counts [OpenAI::BatchRequestCounts] The request counts for different statuses within the batch. # # @param object [Symbol, :batch] The object type, which is always `batch`. # The current status of the batch. # - # @see OpenAI::Models::Batch#status + # @see OpenAI::Batch#status module Status extend OpenAI::Internal::Type::Enum @@ -190,12 +190,12 @@ module Status # @return [Array] end - # @see OpenAI::Models::Batch#errors + # @see OpenAI::Batch#errors class Errors < OpenAI::Internal::Type::BaseModel # @!attribute data # - # @return [Array, nil] - optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::BatchError] } + # @return [Array, nil] + optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::BatchError] } # @!attribute object # The object type, which is always `list`. @@ -204,7 +204,7 @@ class Errors < OpenAI::Internal::Type::BaseModel optional :object, String # @!method initialize(data: nil, object: nil) - # @param data [Array] + # @param data [Array] # # @param object [String] The object type, which is always `list`. end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index e72da75d..ce21fc86 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -11,8 +11,8 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # The time frame within which the batch should be processed. Currently only `24h` # is supported. # - # @return [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] - required :completion_window, enum: -> { OpenAI::Models::BatchCreateParams::CompletionWindow } + # @return [Symbol, OpenAI::BatchCreateParams::CompletionWindow] + required :completion_window, enum: -> { OpenAI::BatchCreateParams::CompletionWindow } # @!attribute endpoint # The endpoint to be used for all requests in the batch. Currently @@ -20,8 +20,8 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. # - # @return [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] - required :endpoint, enum: -> { OpenAI::Models::BatchCreateParams::Endpoint } + # @return [Symbol, OpenAI::BatchCreateParams::Endpoint] + required :endpoint, enum: -> { OpenAI::BatchCreateParams::Endpoint } # @!attribute input_file_id # The ID of an uploaded file that contains requests for the new batch. @@ -52,15 +52,13 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::BatchCreateParams} for more details. # - # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` - # ... + # @param completion_window [Symbol, OpenAI::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # - # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` - # ... + # @param endpoint [Symbol, OpenAI::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` # - # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. ... + # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index f4b386d2..1a01f061 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -28,10 +28,8 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::BatchListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index 1923fa0e..d6aa3ad5 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -68,8 +68,8 @@ class Assistant < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array] - required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + # @return [Array] + required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute response_format # Specifies the format that the model must output. Compatible with @@ -93,8 +93,8 @@ class Assistant < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true + # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -110,8 +110,8 @@ class Assistant < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Models::Beta::Assistant::ToolResources, nil] - optional :tool_resources, -> { OpenAI::Models::Beta::Assistant::ToolResources }, nil?: true + # @return [OpenAI::Beta::Assistant::ToolResources, nil] + optional :tool_resources, -> { OpenAI::Beta::Assistant::ToolResources }, nil?: true # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -124,8 +124,8 @@ class Assistant < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(id:, created_at:, description:, instructions:, metadata:, model:, name:, tools:, response_format: nil, temperature: nil, tool_resources: nil, top_p: nil, object: :assistant) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Assistant} for more details. + # Some parameter documentations has been truncated, see {OpenAI::Beta::Assistant} + # for more details. # # Represents an `assistant` that can call the model and use tools. # @@ -133,46 +133,39 @@ class Assistant < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the assistant was created. # - # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... # - # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # ... + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param object [Symbol, :assistant] The object type, which is always `assistant`. - # @see OpenAI::Models::Beta::Assistant#tool_resources + # @see OpenAI::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, -> { OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter } + # @return [OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, nil] + optional :code_interpreter, -> { OpenAI::Beta::Assistant::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::Assistant::ToolResources::FileSearch } + # @return [OpenAI::Beta::Assistant::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::Assistant::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are @@ -180,10 +173,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::Assistant::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::Assistant::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter + # @see OpenAI::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -195,14 +188,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Beta::Assistant::ToolResources::CodeInterpreter} for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search + # @see OpenAI::Beta::Assistant::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The ID of the @@ -215,10 +206,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Assistant::ToolResources::FileSearch} for more details. + # {OpenAI::Beta::Assistant::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect - # ... end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 3870f034..f05d1764 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -15,8 +15,8 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Models::ChatModel] - required :model, union: -> { OpenAI::Models::Beta::AssistantCreateParams::Model } + # @return [String, Symbol, OpenAI::ChatModel] + required :model, union: -> { OpenAI::Beta::AssistantCreateParams::Model } # @!attribute description # The description of the assistant. The maximum length is 512 characters. @@ -56,8 +56,8 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with @@ -81,8 +81,8 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true + # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -98,16 +98,16 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] - optional :tool_resources, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources }, nil?: true + # @return [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] + optional :tool_resources, -> { OpenAI::Beta::AssistantCreateParams::ToolResources }, nil?: true # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -123,34 +123,27 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::AssistantCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co # - # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # ... + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -165,23 +158,25 @@ module Model variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. - variant enum: -> { OpenAI::Models::ChatModel } + variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # @return [Array(String, Symbol, OpenAI::ChatModel)] end class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, - -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter } + -> { + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter + } # @!attribute file_search # - # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch } + # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are @@ -189,10 +184,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter + # @see OpenAI::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -204,14 +199,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} - # for more details. + # {OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} for more + # details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search + # @see OpenAI::Beta::AssistantCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -228,29 +222,31 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this assistant. There can be a maximum of 1 # vector store attached to the assistant. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + } # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch} for - # more details. + # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ - # ... # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen - # ... + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, - union: -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } + union: -> { + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy + } # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to @@ -273,21 +269,19 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} + # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} # for more details. # - # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -295,10 +289,14 @@ module ChunkingStrategy # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. variant :auto, - -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto } + -> { + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto + } variant :static, - -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } + -> { + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + } class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -317,9 +315,11 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, - -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static } + -> { + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + } # @!attribute type # Always `static`. @@ -328,11 +328,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -351,19 +351,17 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. - # ... # # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini - # ... end end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # @return [Array(OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index a3c95fe0..3c936a2a 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -37,24 +37,20 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Beta::AssistantListParams::Order } + # @return [Symbol, OpenAI::Beta::AssistantListParams::Order, nil] + optional :order, enum: -> { OpenAI::Beta::AssistantListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::AssistantListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 8c471773..411077cb 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -30,20 +30,20 @@ module AssistantResponseFormatOption variant const: :auto # Default response format. Used to generate text responses. - variant -> { OpenAI::Models::ResponseFormatText } + variant -> { OpenAI::ResponseFormatText } # JSON object response format. An older method of generating JSON responses. # Using `json_schema` is recommended for models that support it. Note that the # model will not generate JSON without a system or user message instructing it # to do so. - variant -> { OpenAI::Models::ResponseFormatJSONObject } + variant -> { OpenAI::ResponseFormatJSONObject } # JSON Schema response format. Used to generate structured JSON responses. # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - variant -> { OpenAI::Models::ResponseFormatJSONSchema } + variant -> { OpenAI::ResponseFormatJSONSchema } # @!method self.variants - # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] + # @return [Array(Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema)] end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index b048ee92..74345189 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -29,93 +29,102 @@ module AssistantStreamEvent discriminator :event # Occurs when a new [thread](https://platform.openai.com/docs/api-reference/threads/object) is created. - variant :"thread.created", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated } + variant :"thread.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadCreated } # Occurs when a new [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - variant :"thread.run.created", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated } + variant :"thread.run.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `queued` status. - variant :"thread.run.queued", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued } + variant :"thread.run.queued", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to an `in_progress` status. - variant :"thread.run.in_progress", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress } + variant :"thread.run.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `requires_action` status. variant :"thread.run.requires_action", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction } + -> { + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction + } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is completed. - variant :"thread.run.completed", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted } + variant :"thread.run.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) ends with status `incomplete`. - variant :"thread.run.incomplete", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete } + variant :"thread.run.incomplete", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) fails. - variant :"thread.run.failed", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed } + variant :"thread.run.failed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `cancelling` status. - variant :"thread.run.cancelling", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling } + variant :"thread.run.cancelling", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is cancelled. - variant :"thread.run.cancelled", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled } + variant :"thread.run.cancelled", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) expires. - variant :"thread.run.expired", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired } + variant :"thread.run.expired", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is created. - variant :"thread.run.step.created", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated } + variant :"thread.run.step.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state. variant :"thread.run.step.in_progress", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress } + -> { + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress + } # Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed. - variant :"thread.run.step.delta", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta } + variant :"thread.run.step.delta", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is completed. variant :"thread.run.step.completed", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted } + -> { + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted + } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) fails. - variant :"thread.run.step.failed", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed } + variant :"thread.run.step.failed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is cancelled. variant :"thread.run.step.cancelled", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled } + -> { + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled + } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) expires. - variant :"thread.run.step.expired", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired } + variant :"thread.run.step.expired", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is created. - variant :"thread.message.created", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated } + variant :"thread.message.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state. variant :"thread.message.in_progress", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress } + -> { + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress + } # Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed. - variant :"thread.message.delta", -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta } + variant :"thread.message.delta", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is completed. - variant :"thread.message.completed", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted } + variant :"thread.message.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) ends before it is completed. variant :"thread.message.incomplete", - -> { OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete } + -> { + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete + } # Occurs when an [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. This can happen due to an internal server error or a timeout. - variant :error, -> { OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent } + variant :error, -> { OpenAI::Beta::AssistantStreamEvent::ErrorEvent } class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @return [OpenAI::Models::Beta::Thread] - required :data, -> { OpenAI::Models::Beta::Thread } + # @return [OpenAI::Beta::Thread] + required :data, -> { OpenAI::Beta::Thread } # @!attribute event # @@ -130,14 +139,13 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, enabled: nil, event: :"thread.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated} for more details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadCreated} for more details. # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap - # ... + # @param data [OpenAI::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap # # @param enabled [Boolean] Whether to enable input audio transcription. # @@ -149,8 +157,8 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -159,13 +167,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.created"] end @@ -175,8 +182,8 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -185,13 +192,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.queued") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.queued"] end @@ -201,8 +207,8 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -211,14 +217,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.in_progress"] end @@ -228,8 +232,8 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -238,14 +242,12 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.requires_action") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.requires_action"] end @@ -255,8 +257,8 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -265,14 +267,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.completed"] end @@ -282,8 +282,8 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -292,14 +292,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.incomplete"] end @@ -309,8 +307,8 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -319,13 +317,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.failed"] end @@ -335,8 +332,8 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -345,14 +342,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelling") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelling"] end @@ -362,8 +357,8 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -372,14 +367,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelled"] end @@ -389,8 +382,8 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -399,13 +392,12 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.expired"] end @@ -414,8 +406,8 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -424,14 +416,13 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.created"] end @@ -440,8 +431,8 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -450,14 +441,13 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -467,8 +457,8 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent } + # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaEvent } # @!attribute event # @@ -477,15 +467,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more details. # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami - # ... + # @param data [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami # # @param event [Symbol, :"thread.run.step.delta"] end @@ -494,8 +482,8 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -504,14 +492,13 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.completed"] end @@ -520,8 +507,8 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -530,14 +517,13 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.failed"] end @@ -546,8 +532,8 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -556,14 +542,13 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -572,8 +557,8 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -582,14 +567,13 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.expired"] end @@ -599,8 +583,8 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -609,15 +593,13 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.created"] end @@ -627,8 +609,8 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -637,15 +619,13 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.in_progress"] end @@ -655,8 +635,8 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] - required :data, -> { OpenAI::Models::Beta::Threads::MessageDeltaEvent } + # @return [OpenAI::Beta::Threads::MessageDeltaEvent] + required :data, -> { OpenAI::Beta::Threads::MessageDeltaEvent } # @!attribute event # @@ -665,15 +645,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta} for more details. # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming - # ... + # @param data [OpenAI::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming # # @param event [Symbol, :"thread.message.delta"] end @@ -683,8 +661,8 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -693,15 +671,13 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.completed"] end @@ -711,8 +687,8 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -721,15 +697,13 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more - # details. + # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.incomplete"] end @@ -737,8 +711,8 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!attribute data # - # @return [OpenAI::Models::ErrorObject] - required :data, -> { OpenAI::Models::ErrorObject } + # @return [OpenAI::ErrorObject] + required :data, -> { OpenAI::ErrorObject } # @!attribute event # @@ -750,12 +724,12 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. # - # @param data [OpenAI::Models::ErrorObject] + # @param data [OpenAI::ErrorObject] # @param event [Symbol, :error] end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] + # @return [Array(OpenAI::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Beta::AssistantStreamEvent::ErrorEvent)] end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 0272eb94..111defb9 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -8,14 +8,14 @@ module AssistantTool discriminator :type - variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } + variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } - variant :file_search, -> { OpenAI::Models::Beta::FileSearchTool } + variant :file_search, -> { OpenAI::Beta::FileSearchTool } - variant :function, -> { OpenAI::Models::Beta::FunctionTool } + variant :function, -> { OpenAI::Beta::FunctionTool } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] + # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool)] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 43582de3..7f43fa4a 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -7,25 +7,25 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the tool. If type is `function`, the function name must be set # - # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] - required :type, enum: -> { OpenAI::Models::Beta::AssistantToolChoice::Type } + # @return [Symbol, OpenAI::Beta::AssistantToolChoice::Type] + required :type, enum: -> { OpenAI::Beta::AssistantToolChoice::Type } # @!attribute function # - # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction, nil] - optional :function, -> { OpenAI::Models::Beta::AssistantToolChoiceFunction } + # @return [OpenAI::Beta::AssistantToolChoiceFunction, nil] + optional :function, -> { OpenAI::Beta::AssistantToolChoiceFunction } # @!method initialize(type:, function: nil) # Specifies a tool the model should use. Use to force the model to call a specific # tool. # - # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set + # @param type [Symbol, OpenAI::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set # - # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] + # @param function [OpenAI::Beta::AssistantToolChoiceFunction] # The type of the tool. If type is `function`, the function name must be set # - # @see OpenAI::Models::Beta::AssistantToolChoice#type + # @see OpenAI::Beta::AssistantToolChoice#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index f9d205e3..5a87d00a 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -14,10 +14,10 @@ module AssistantToolChoiceOption extend OpenAI::Internal::Type::Union # `none` means the model will not call any tools and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools before responding to the user. - variant enum: -> { OpenAI::Models::Beta::AssistantToolChoiceOption::Auto } + variant enum: -> { OpenAI::Beta::AssistantToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific tool. - variant -> { OpenAI::Models::Beta::AssistantToolChoice } + variant -> { OpenAI::Beta::AssistantToolChoice } # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or @@ -35,7 +35,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] + # @return [Array(Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice)] end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index d507c550..3d9b9fbc 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -39,8 +39,8 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] - optional :model, union: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model } + # @return [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model, nil] + optional :model, union: -> { OpenAI::Beta::AssistantUpdateParams::Model } # @!attribute name # The name of the assistant. The maximum length is 256 characters. @@ -56,8 +56,8 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with @@ -81,8 +81,8 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true + # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -98,16 +98,16 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] - optional :tool_resources, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources }, nil?: true + # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] + optional :tool_resources, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources }, nil?: true # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -123,34 +123,27 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::AssistantUpdateParams} for more details. # - # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # - # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # ... + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -164,77 +157,77 @@ module Model variant String - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O3_MINI } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O1 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0314 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0314 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0613 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0613 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } - variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } + variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } # @!method self.variants # @return [Array(String, Symbol)] @@ -284,14 +277,16 @@ module Model class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, - -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter } + -> { + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter + } # @!attribute file_search # - # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch } + # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are @@ -299,10 +294,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter + # @see OpenAI::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # Overrides the list of @@ -315,14 +310,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} - # for more details. + # {OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} for more + # details. # # @param file_ids [Array] Overrides the list of [file](https://platform.openai.com/docs/api-reference/file - # ... end - # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search + # @see OpenAI::Beta::AssistantUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # Overrides the @@ -335,11 +329,10 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch} for - # more details. + # {OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] Overrides the [vector store](https://platform.openai.com/docs/api-reference/vect - # ... end end end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 2dbd7d7a..c521e6a6 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -13,15 +13,15 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute file_search # Overrides for the file search tool. # - # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch } + # @return [OpenAI::Beta::FileSearchTool::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::FileSearchTool::FileSearch } # @!method initialize(file_search: nil, type: :file_search) - # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. + # @param file_search [OpenAI::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. # # @param type [Symbol, :file_search] The type of tool being defined: `file_search` - # @see OpenAI::Models::Beta::FileSearchTool#file_search + # @see OpenAI::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute max_num_results # The maximum number of results the file search tool should output. The default is @@ -44,22 +44,20 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, nil] - optional :ranking_options, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions } + # @return [OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions, nil] + optional :ranking_options, -> { OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions } # @!method initialize(max_num_results: nil, ranking_options: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::FileSearchTool::FileSearch} for more details. + # {OpenAI::Beta::FileSearchTool::FileSearch} for more details. # # Overrides for the file search tool. # # @param max_num_results [Integer] The maximum number of results the file search tool should output. The default is - # ... # - # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool - # ... + # @param ranking_options [OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool - # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options + # @see OpenAI::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point @@ -72,13 +70,12 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] - optional :ranker, enum: -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } + # @return [Symbol, OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] + optional :ranker, enum: -> { OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } # @!method initialize(score_threshold:, ranker: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions} for more - # details. + # {OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions} for more details. # # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. @@ -88,15 +85,13 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # for more information. # # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num - # ... # - # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank - # ... + # @param ranker [Symbol, OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker + # @see OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index 4a76a200..512eb078 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -6,8 +6,8 @@ module Beta class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Models::FunctionDefinition] - required :function, -> { OpenAI::Models::FunctionDefinition } + # @return [OpenAI::FunctionDefinition] + required :function, -> { OpenAI::FunctionDefinition } # @!attribute type # The type of tool being defined: `function` @@ -16,7 +16,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(function:, type: :function) - # @param function [OpenAI::Models::FunctionDefinition] + # @param function [OpenAI::FunctionDefinition] # # @param type [Symbol, :function] The type of tool being defined: `function` end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 77a99a07..ce394898 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -12,30 +12,30 @@ module MessageStreamEvent discriminator :event # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is created. - variant :"thread.message.created", -> { OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated } + variant :"thread.message.created", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state. variant :"thread.message.in_progress", - -> { OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress } + -> { + OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress + } # Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed. - variant :"thread.message.delta", -> { OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta } + variant :"thread.message.delta", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) is completed. - variant :"thread.message.completed", - -> { OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted } + variant :"thread.message.completed", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) ends before it is completed. - variant :"thread.message.incomplete", - -> { OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete } + variant :"thread.message.incomplete", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete } class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -44,15 +44,13 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated} for more - # details. + # {OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.created"] end @@ -62,8 +60,8 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -72,15 +70,13 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress} for more - # details. + # {OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.in_progress"] end @@ -90,8 +86,8 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] - required :data, -> { OpenAI::Models::Beta::Threads::MessageDeltaEvent } + # @return [OpenAI::Beta::Threads::MessageDeltaEvent] + required :data, -> { OpenAI::Beta::Threads::MessageDeltaEvent } # @!attribute event # @@ -100,14 +96,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. + # {OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming - # ... + # @param data [OpenAI::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming # # @param event [Symbol, :"thread.message.delta"] end @@ -117,8 +112,8 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -127,15 +122,13 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted} for more - # details. + # {OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.completed"] end @@ -145,8 +138,8 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Message] - required :data, -> { OpenAI::Models::Beta::Threads::Message } + # @return [OpenAI::Beta::Threads::Message] + required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event # @@ -155,21 +148,19 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more - # details. + # {OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe - # ... + # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.incomplete"] end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] + # @return [Array(OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete)] end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index f876f01e..40fef09f 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -12,35 +12,35 @@ module RunStepStreamEvent discriminator :event # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is created. - variant :"thread.run.step.created", -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated } + variant :"thread.run.step.created", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state. variant :"thread.run.step.in_progress", - -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress } + -> { + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress + } # Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed. - variant :"thread.run.step.delta", -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta } + variant :"thread.run.step.delta", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is completed. - variant :"thread.run.step.completed", - -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted } + variant :"thread.run.step.completed", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) fails. - variant :"thread.run.step.failed", -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed } + variant :"thread.run.step.failed", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is cancelled. - variant :"thread.run.step.cancelled", - -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled } + variant :"thread.run.step.cancelled", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) expires. - variant :"thread.run.step.expired", -> { OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired } + variant :"thread.run.step.expired", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired } class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -49,14 +49,13 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more - # details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.created"] end @@ -65,8 +64,8 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -75,14 +74,13 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more - # details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -92,8 +90,8 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent } + # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaEvent } # @!attribute event # @@ -102,14 +100,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami - # ... + # @param data [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami # # @param event [Symbol, :"thread.run.step.delta"] end @@ -118,8 +115,8 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -128,14 +125,13 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more - # details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.completed"] end @@ -144,8 +140,8 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -154,14 +150,13 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more - # details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.failed"] end @@ -170,8 +165,8 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -180,14 +175,13 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more - # details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -196,8 +190,8 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] - required :data, -> { OpenAI::Models::Beta::Threads::Runs::RunStep } + # @return [OpenAI::Beta::Threads::Runs::RunStep] + required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event # @@ -206,20 +200,19 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more - # details. + # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. ... + # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.expired"] end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] + # @return [Array(OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired)] end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 19bda475..ca41f968 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -11,43 +11,42 @@ module RunStreamEvent discriminator :event # Occurs when a new [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - variant :"thread.run.created", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated } + variant :"thread.run.created", -> { OpenAI::Beta::RunStreamEvent::ThreadRunCreated } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `queued` status. - variant :"thread.run.queued", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued } + variant :"thread.run.queued", -> { OpenAI::Beta::RunStreamEvent::ThreadRunQueued } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to an `in_progress` status. - variant :"thread.run.in_progress", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress } + variant :"thread.run.in_progress", -> { OpenAI::Beta::RunStreamEvent::ThreadRunInProgress } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `requires_action` status. - variant :"thread.run.requires_action", - -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction } + variant :"thread.run.requires_action", -> { OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is completed. - variant :"thread.run.completed", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted } + variant :"thread.run.completed", -> { OpenAI::Beta::RunStreamEvent::ThreadRunCompleted } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) ends with status `incomplete`. - variant :"thread.run.incomplete", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete } + variant :"thread.run.incomplete", -> { OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) fails. - variant :"thread.run.failed", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed } + variant :"thread.run.failed", -> { OpenAI::Beta::RunStreamEvent::ThreadRunFailed } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `cancelling` status. - variant :"thread.run.cancelling", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling } + variant :"thread.run.cancelling", -> { OpenAI::Beta::RunStreamEvent::ThreadRunCancelling } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is cancelled. - variant :"thread.run.cancelled", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled } + variant :"thread.run.cancelled", -> { OpenAI::Beta::RunStreamEvent::ThreadRunCancelled } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) expires. - variant :"thread.run.expired", -> { OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired } + variant :"thread.run.expired", -> { OpenAI::Beta::RunStreamEvent::ThreadRunExpired } class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -56,13 +55,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunCreated} for more details. # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.created"] end @@ -72,8 +70,8 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -82,13 +80,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.queued") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunQueued} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.queued"] end @@ -98,8 +95,8 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -108,13 +105,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunInProgress} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.in_progress"] end @@ -124,8 +120,8 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -134,14 +130,12 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.requires_action") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction} for more - # details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.requires_action"] end @@ -151,8 +145,8 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -161,13 +155,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunCompleted} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.completed"] end @@ -177,8 +170,8 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -187,13 +180,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.incomplete"] end @@ -203,8 +195,8 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -213,13 +205,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunFailed} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.failed"] end @@ -229,8 +220,8 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -239,13 +230,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelling") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunCancelling} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelling"] end @@ -255,8 +245,8 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -265,13 +255,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunCancelled} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelled"] end @@ -281,8 +270,8 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Models::Beta::Threads::Run] - required :data, -> { OpenAI::Models::Beta::Threads::Run } + # @return [OpenAI::Beta::Threads::Run] + required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event # @@ -291,19 +280,18 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired} for more details. + # {OpenAI::Beta::RunStreamEvent::ThreadRunExpired} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r - # ... + # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.expired"] end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] + # @return [Array(OpenAI::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Beta::RunStreamEvent::ThreadRunExpired)] end end end diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index cad2cd8d..11d37b69 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -40,12 +40,12 @@ class Thread < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Models::Beta::Thread::ToolResources, nil] - required :tool_resources, -> { OpenAI::Models::Beta::Thread::ToolResources }, nil?: true + # @return [OpenAI::Beta::Thread::ToolResources, nil] + required :tool_resources, -> { OpenAI::Beta::Thread::ToolResources }, nil?: true # @!method initialize(id:, created_at:, metadata:, tool_resources:, object: :thread) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Thread} for more details. + # Some parameter documentations has been truncated, see {OpenAI::Beta::Thread} for + # more details. # # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -54,24 +54,23 @@ class Thread < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the thread was created. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre - # ... + # @param tool_resources [OpenAI::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param object [Symbol, :thread] The object type, which is always `thread`. - # @see OpenAI::Models::Beta::Thread#tool_resources + # @see OpenAI::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, -> { OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter } + # @return [OpenAI::Beta::Thread::ToolResources::CodeInterpreter, nil] + optional :code_interpreter, -> { OpenAI::Beta::Thread::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::Thread::ToolResources::FileSearch } + # @return [OpenAI::Beta::Thread::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::Thread::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this @@ -79,10 +78,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::Thread::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter + # @see OpenAI::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -94,13 +93,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter} for more details. + # {OpenAI::Beta::Thread::ToolResources::CodeInterpreter} for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::Thread::ToolResources#file_search + # @see OpenAI::Beta::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -113,10 +111,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Thread::ToolResources::FileSearch} for more details. + # {OpenAI::Beta::Thread::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ - # ... end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 4199cc52..5d4533ef 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -62,8 +62,8 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. # - # @return [String, Symbol, OpenAI::Models::ChatModel, nil] - optional :model, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Model }, nil?: true + # @return [String, Symbol, OpenAI::ChatModel, nil] + optional :model, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Model }, nil?: true # @!attribute parallel_tool_calls # Whether to enable @@ -95,8 +95,8 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true + # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -110,8 +110,8 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, nil] - optional :thread, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread } + # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread, nil] + optional :thread, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread } # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will @@ -122,8 +122,8 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - optional :tool_choice, union: -> { OpenAI::Models::Beta::AssistantToolChoiceOption }, nil?: true + # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + optional :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tool_resources # A set of resources that are used by the assistant's tools. The resources are @@ -131,16 +131,18 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] - optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources }, nil?: true + # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] + optional :tool_resources, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] }, + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] + }, nil?: true # @!attribute top_p @@ -157,9 +159,9 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] optional :truncation_strategy, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy }, + -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy }, nil?: true # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) @@ -167,46 +169,34 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Beta::ThreadCreateAndRunParams} for more details. # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista - # ... # # @param instructions [String, nil] Override the default system message of the assistant. This is useful for modifyi - # ... # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the - # ... # # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # ... + # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a ... + # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify - # ... + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # - # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro - # ... + # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -220,10 +210,10 @@ module Model variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. - variant enum: -> { OpenAI::Models::ChatModel } + variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # @return [Array(String, Symbol, OpenAI::ChatModel)] end class Thread < OpenAI::Internal::Type::BaseModel @@ -231,9 +221,11 @@ class Thread < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # - # @return [Array, nil] + # @return [Array, nil] optional :messages, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] + } # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -252,32 +244,32 @@ class Thread < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] optional :tool_resources, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources }, + -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources + }, nil?: true # @!method initialize(messages: nil, metadata: nil, tool_resources: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread} for more details. + # {OpenAI::Beta::ThreadCreateAndRunParams::Thread} for more details. # # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # ... + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre - # ... + # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content } # @!attribute role # The role of the entity that is creating the message. Allowed values include: @@ -287,15 +279,17 @@ class Message < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] - required :role, enum: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role } + # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role] + required :role, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment] + }, nil?: true # @!attribute metadata @@ -311,20 +305,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message} for more - # details. + # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: ... + # @param role [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#content + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message#content module Content extend OpenAI::Internal::Type::Union @@ -332,14 +325,16 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray } + variant -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray + } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -349,7 +344,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -370,24 +365,28 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] + } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union discriminator :type - variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } + variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } variant :file_search, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch } + -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -401,24 +400,26 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] end end end - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter } + -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] optional :file_search, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch } + -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this @@ -426,10 +427,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -441,14 +442,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} + # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -465,29 +465,31 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + } # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} - # for more details. + # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ - # ... # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen - # ... + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, - union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy } + union: -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy + } # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to @@ -510,21 +512,19 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} + # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} # for more details. # - # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -532,10 +532,14 @@ module ChunkingStrategy # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. variant :auto, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto } + -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto + } variant :static, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } + -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + } class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -554,9 +558,11 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static } + -> { + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + } # @!attribute type # Always `static`. @@ -565,11 +571,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -588,19 +594,17 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. - # ... # # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini - # ... end end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # @return [Array(OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end @@ -610,14 +614,16 @@ class Static < OpenAI::Internal::Type::BaseModel class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, - -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter } + -> { + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter + } # @!attribute file_search # - # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } + # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are @@ -625,10 +631,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter + # @see OpenAI::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -640,14 +646,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} - # for more details. + # {OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} for + # more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search + # @see OpenAI::Beta::ThreadCreateAndRunParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The ID of the @@ -660,11 +665,10 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for - # more details. + # {OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect - # ... end end @@ -675,8 +679,8 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] - required :type, enum: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } + # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } # @!attribute last_messages # The number of most recent messages from the thread when constructing the context @@ -687,24 +691,21 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, last_messages: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more - # details. + # {OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more details. # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # ... + # @param type [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - # ... # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type + # @see OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 50ef5aca..c4d1c025 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -12,9 +12,8 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # - # @return [Array, nil] - optional :messages, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message] } + # @return [Array, nil] + optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::Message] } # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -33,20 +32,18 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] - optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources }, nil?: true + # @return [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] + optional :tool_resources, -> { OpenAI::Beta::ThreadCreateParams::ToolResources }, nil?: true # @!method initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadCreateParams} for more details. # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # ... + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre - # ... + # @param tool_resources [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -54,8 +51,8 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Beta::ThreadCreateParams::Message::Content } # @!attribute role # The role of the entity that is creating the message. Allowed values include: @@ -65,15 +62,17 @@ class Message < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] - required :role, enum: -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Role } + # @return [Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role] + required :role, enum: -> { OpenAI::Beta::ThreadCreateParams::Message::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::Message::Attachment] + }, nil?: true # @!attribute metadata @@ -89,19 +88,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateParams::Message} for more details. + # {OpenAI::Beta::ThreadCreateParams::Message} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: ... + # @param role [Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Models::Beta::ThreadCreateParams::Message#content + # @see OpenAI::Beta::ThreadCreateParams::Message#content module Content extend OpenAI::Internal::Type::Union @@ -109,14 +108,14 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } + variant -> { OpenAI::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -126,7 +125,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Models::Beta::ThreadCreateParams::Message#role + # @see OpenAI::Beta::ThreadCreateParams::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -147,24 +146,28 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool] + } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union discriminator :type - variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } + variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } variant :file_search, - -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch } + -> { + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -178,7 +181,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] end end end @@ -186,14 +189,13 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, - -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter } + # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] + optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch } + # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this @@ -201,10 +203,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter + # @see OpenAI::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -216,14 +218,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for - # more details. + # {OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for more + # details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search + # @see OpenAI::Beta::ThreadCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -240,29 +241,30 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + } # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ - # ... # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen - # ... + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, - union: -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } + union: -> { + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy + } # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to @@ -285,21 +287,19 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} - # for more details. + # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} for + # more details. # - # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -307,10 +307,14 @@ module ChunkingStrategy # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. variant :auto, - -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto } + -> { + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto + } variant :static, - -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } + -> { + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + } class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -329,9 +333,11 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, - -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static } + -> { + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + } # @!attribute type # Always `static`. @@ -340,11 +346,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -363,19 +369,17 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. - # ... # # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini - # ... end end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # @return [Array(OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index d3279538..1dbc9873 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -8,8 +8,8 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @return [OpenAI::Models::Beta::Thread] - required :data, -> { OpenAI::Models::Beta::Thread } + # @return [OpenAI::Beta::Thread] + required :data, -> { OpenAI::Beta::Thread } # @!attribute event # @@ -24,14 +24,13 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, enabled: nil, event: :"thread.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadStreamEvent} for more details. + # {OpenAI::Beta::ThreadStreamEvent} for more details. # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap - # ... + # @param data [OpenAI::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap # # @param enabled [Boolean] Whether to enable input audio transcription. # diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index a96d0bc8..742aeb19 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -25,31 +25,29 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] - optional :tool_resources, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources }, nil?: true + # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] + optional :tool_resources, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources }, nil?: true # @!method initialize(metadata: nil, tool_resources: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadUpdateParams} for more details. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre - # ... + # @param tool_resources [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, - -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter } + # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] + optional :code_interpreter, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] - optional :file_search, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch } + # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] + optional :file_search, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this @@ -57,10 +55,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter + # @see OpenAI::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -72,14 +70,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for - # more details. + # {OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for more + # details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made - # ... end - # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search + # @see OpenAI::Beta::ThreadUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -92,11 +89,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ - # ... end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index b5adaf6d..c110cbbd 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -13,13 +13,13 @@ module Annotation discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. - variant :file_citation, -> { OpenAI::Models::Beta::Threads::FileCitationAnnotation } + variant :file_citation, -> { OpenAI::Beta::Threads::FileCitationAnnotation } # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. - variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathAnnotation } + variant :file_path, -> { OpenAI::Beta::Threads::FilePathAnnotation } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] + # @return [Array(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index c6ed7bc1..e5b290d5 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -13,13 +13,13 @@ module AnnotationDelta discriminator :type # A citation within the message that points to a specific quote from a specific File associated with the assistant or the message. Generated when the assistant uses the "file_search" tool to search files. - variant :file_citation, -> { OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation } + variant :file_citation, -> { OpenAI::Beta::Threads::FileCitationDeltaAnnotation } # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. - variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation } + variant :file_path, -> { OpenAI::Beta::Threads::FilePathDeltaAnnotation } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] + # @return [Array(OpenAI::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Beta::Threads::FilePathDeltaAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index 4ccc33d1..bb8a4050 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -12,8 +12,8 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_citation # - # @return [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] - required :file_citation, -> { OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation } + # @return [OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation] + required :file_citation, -> { OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation } # @!attribute start_index # @@ -39,7 +39,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] + # @param file_citation [OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation] # # @param start_index [Integer] # @@ -47,7 +47,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_citation] Always `file_citation`. - # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation + # @see OpenAI::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 7a676989..4449922e 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -24,8 +24,8 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_citation # - # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] - optional :file_citation, -> { OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation } + # @return [OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] + optional :file_citation, -> { OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation } # @!attribute start_index # @@ -47,7 +47,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] + # @param file_citation [OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] # # @param start_index [Integer] # @@ -55,7 +55,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_citation] Always `file_citation`. - # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation + # @see OpenAI::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index 516a7249..90055353 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -12,8 +12,8 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_path # - # @return [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] - required :file_path, -> { OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath } + # @return [OpenAI::Beta::Threads::FilePathAnnotation::FilePath] + required :file_path, -> { OpenAI::Beta::Threads::FilePathAnnotation::FilePath } # @!attribute start_index # @@ -38,7 +38,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] + # @param file_path [OpenAI::Beta::Threads::FilePathAnnotation::FilePath] # # @param start_index [Integer] # @@ -46,7 +46,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_path] Always `file_path`. - # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path + # @see OpenAI::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index b67e6401..659b9518 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -24,8 +24,8 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_path # - # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] - optional :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath } + # @return [OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] + optional :file_path, -> { OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath } # @!attribute start_index # @@ -46,7 +46,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] + # @param file_path [OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath] # # @param start_index [Integer] # @@ -54,7 +54,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_path] Always `file_path`. - # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path + # @see OpenAI::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index f99387f6..b71b6a5a 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -17,23 +17,21 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail, nil] - optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFile::Detail } + # @return [Symbol, OpenAI::Beta::Threads::ImageFile::Detail, nil] + optional :detail, enum: -> { OpenAI::Beta::Threads::ImageFile::Detail } # @!method initialize(file_id:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::ImageFile} for more details. + # {OpenAI::Beta::Threads::ImageFile} for more details. # # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image - # ... # - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few - # ... + # @param detail [Symbol, OpenAI::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @see OpenAI::Models::Beta::Threads::ImageFile#detail + # @see OpenAI::Beta::Threads::ImageFile#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 99aeca4e..09da28f8 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -7,8 +7,8 @@ module Threads class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # - # @return [OpenAI::Models::Beta::Threads::ImageFile] - required :image_file, -> { OpenAI::Models::Beta::Threads::ImageFile } + # @return [OpenAI::Beta::Threads::ImageFile] + required :image_file, -> { OpenAI::Beta::Threads::ImageFile } # @!attribute type # Always `image_file`. @@ -20,7 +20,7 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. # - # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] + # @param image_file [OpenAI::Beta::Threads::ImageFile] # # @param type [Symbol, :image_file] Always `image_file`. end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 69c0f595..886ed307 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -9,8 +9,8 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail, nil] - optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFileDelta::Detail } + # @return [Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail, nil] + optional :detail, enum: -> { OpenAI::Beta::Threads::ImageFileDelta::Detail } # @!attribute file_id # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image @@ -22,18 +22,16 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(detail: nil, file_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::ImageFileDelta} for more details. + # {OpenAI::Beta::Threads::ImageFileDelta} for more details. # - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few - # ... + # @param detail [Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few # # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image - # ... # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @see OpenAI::Models::Beta::Threads::ImageFileDelta#detail + # @see OpenAI::Beta::Threads::ImageFileDelta#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 8831e0c3..206e36dd 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -19,8 +19,8 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # - # @return [OpenAI::Models::Beta::Threads::ImageFileDelta, nil] - optional :image_file, -> { OpenAI::Models::Beta::Threads::ImageFileDelta } + # @return [OpenAI::Beta::Threads::ImageFileDelta, nil] + optional :image_file, -> { OpenAI::Beta::Threads::ImageFileDelta } # @!method initialize(index:, image_file: nil, type: :image_file) # References an image [File](https://platform.openai.com/docs/api-reference/files) @@ -28,7 +28,7 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] + # @param image_file [OpenAI::Beta::Threads::ImageFileDelta] # # @param type [Symbol, :image_file] Always `image_file`. end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 02ed1378..c932079d 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -16,23 +16,21 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # - # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail, nil] - optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURL::Detail } + # @return [Symbol, OpenAI::Beta::Threads::ImageURL::Detail, nil] + optional :detail, enum: -> { OpenAI::Beta::Threads::ImageURL::Detail } # @!method initialize(url:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::ImageURL} for more details. + # {OpenAI::Beta::Threads::ImageURL} for more details. # # @param url [String] The external URL of the image, must be a supported image types: jpeg, jpg, png, - # ... # - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # ... + # @param detail [Symbol, OpenAI::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # - # @see OpenAI::Models::Beta::Threads::ImageURL#detail + # @see OpenAI::Beta::Threads::ImageURL#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index f9b5edc9..52d35a06 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -7,8 +7,8 @@ module Threads class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Models::Beta::Threads::ImageURL] - required :image_url, -> { OpenAI::Models::Beta::Threads::ImageURL } + # @return [OpenAI::Beta::Threads::ImageURL] + required :image_url, -> { OpenAI::Beta::Threads::ImageURL } # @!attribute type # The type of the content part. @@ -19,7 +19,7 @@ class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # References an image URL in the content of a message. # - # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] + # @param image_url [OpenAI::Beta::Threads::ImageURL] # # @param type [Symbol, :image_url] The type of the content part. end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 1f1d98ef..c8b10e43 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -9,8 +9,8 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail, nil] - optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURLDelta::Detail } + # @return [Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail, nil] + optional :detail, enum: -> { OpenAI::Beta::Threads::ImageURLDelta::Detail } # @!attribute url # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, @@ -21,18 +21,16 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(detail: nil, url: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::ImageURLDelta} for more details. + # {OpenAI::Beta::Threads::ImageURLDelta} for more details. # - # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in - # ... + # @param detail [Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # # @param url [String] The URL of the image, must be a supported image types: jpeg, jpg, png, gif, webp - # ... # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # - # @see OpenAI::Models::Beta::Threads::ImageURLDelta#detail + # @see OpenAI::Beta::Threads::ImageURLDelta#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index f2a01238..efe44526 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -19,15 +19,15 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Models::Beta::Threads::ImageURLDelta, nil] - optional :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDelta } + # @return [OpenAI::Beta::Threads::ImageURLDelta, nil] + optional :image_url, -> { OpenAI::Beta::Threads::ImageURLDelta } # @!method initialize(index:, image_url: nil, type: :image_url) # References an image URL in the content of a message. # # @param index [Integer] The index of the content part in the message. # - # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] + # @param image_url [OpenAI::Beta::Threads::ImageURLDelta] # # @param type [Symbol, :image_url] Always `image_url`. end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index b98b3ca0..19af32cc 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -23,9 +23,9 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute attachments # A list of files attached to the message, and the tools they were added to. # - # @return [Array, nil] + # @return [Array, nil] required :attachments, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment] }, nil?: true # @!attribute completed_at @@ -37,9 +37,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the message in array of text and/or images. # - # @return [Array] + # @return [Array] required :content, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent] + } # @!attribute created_at # The Unix timestamp (in seconds) for when the message was created. @@ -56,8 +58,8 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute incomplete_details # On an incomplete message, details about why the message is incomplete. # - # @return [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] - required :incomplete_details, -> { OpenAI::Models::Beta::Threads::Message::IncompleteDetails }, nil?: true + # @return [OpenAI::Beta::Threads::Message::IncompleteDetails, nil] + required :incomplete_details, -> { OpenAI::Beta::Threads::Message::IncompleteDetails }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -79,8 +81,8 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Role] - required :role, enum: -> { OpenAI::Models::Beta::Threads::Message::Role } + # @return [Symbol, OpenAI::Beta::Threads::Message::Role] + required :role, enum: -> { OpenAI::Beta::Threads::Message::Role } # @!attribute run_id # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) @@ -94,8 +96,8 @@ class Message < OpenAI::Internal::Type::BaseModel # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Status] - required :status, enum: -> { OpenAI::Models::Beta::Threads::Message::Status } + # @return [Symbol, OpenAI::Beta::Threads::Message::Status] + required :status, enum: -> { OpenAI::Beta::Threads::Message::Status } # @!attribute thread_id # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that @@ -106,7 +108,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, assistant_id:, attachments:, completed_at:, content:, created_at:, incomplete_at:, incomplete_details:, metadata:, role:, run_id:, status:, thread_id:, object: :"thread.message") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Message} for more details. + # {OpenAI::Beta::Threads::Message} for more details. # # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -114,32 +116,28 @@ class Message < OpenAI::Internal::Type::BaseModel # @param id [String] The identifier, which can be referenced in API endpoints. # # @param assistant_id [String, nil] If applicable, the ID of the [assistant](https://platform.openai.com/docs/api-re - # ... # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. # # @param completed_at [Integer, nil] The Unix timestamp (in seconds) for when the message was completed. # - # @param content [Array] The content of the message in array of text and/or images. + # @param content [Array] The content of the message in array of text and/or images. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the message was created. # # @param incomplete_at [Integer, nil] The Unix timestamp (in seconds) for when the message was marked as incomplete. # - # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. + # @param incomplete_details [OpenAI::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. + # @param role [Symbol, OpenAI::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. # # @param run_id [String, nil] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) associa - # ... # - # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` - # ... + # @param status [Symbol, OpenAI::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` # # @param thread_id [String] The [thread](https://platform.openai.com/docs/api-reference/threads) ID that thi - # ... # # @param object [Symbol, :"thread.message"] The object type, which is always `thread.message`. @@ -153,21 +151,25 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Message::Attachment::Tool] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool] + } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union - variant -> { OpenAI::Models::Beta::CodeInterpreterTool } + variant -> { OpenAI::Beta::CodeInterpreterTool } - variant -> { OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly } + variant -> { + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + } class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -181,26 +183,26 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] + # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] end end - # @see OpenAI::Models::Beta::Threads::Message#incomplete_details + # @see OpenAI::Beta::Threads::Message#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason the message is incomplete. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] - required :reason, enum: -> { OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason } + # @return [Symbol, OpenAI::Beta::Threads::Message::IncompleteDetails::Reason] + required :reason, enum: -> { OpenAI::Beta::Threads::Message::IncompleteDetails::Reason } # @!method initialize(reason:) # On an incomplete message, details about why the message is incomplete. # - # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. + # @param reason [Symbol, OpenAI::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. # The reason the message is incomplete. # - # @see OpenAI::Models::Beta::Threads::Message::IncompleteDetails#reason + # @see OpenAI::Beta::Threads::Message::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -217,7 +219,7 @@ module Reason # The entity that produced the message. One of `user` or `assistant`. # - # @see OpenAI::Models::Beta::Threads::Message#role + # @see OpenAI::Beta::Threads::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -231,7 +233,7 @@ module Role # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. # - # @see OpenAI::Models::Beta::Threads::Message#status + # @see OpenAI::Beta::Threads::Message#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index dbc60f62..295d6858 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -12,19 +12,19 @@ module MessageContent discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. - variant :image_file, -> { OpenAI::Models::Beta::Threads::ImageFileContentBlock } + variant :image_file, -> { OpenAI::Beta::Threads::ImageFileContentBlock } # References an image URL in the content of a message. - variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLContentBlock } + variant :image_url, -> { OpenAI::Beta::Threads::ImageURLContentBlock } # The text content that is part of a message. - variant :text, -> { OpenAI::Models::Beta::Threads::TextContentBlock } + variant :text, -> { OpenAI::Beta::Threads::TextContentBlock } # The refusal content generated by the assistant. - variant :refusal, -> { OpenAI::Models::Beta::Threads::RefusalContentBlock } + variant :refusal, -> { OpenAI::Beta::Threads::RefusalContentBlock } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] + # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlock, OpenAI::Beta::Threads::RefusalContentBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 744d6ed2..a6b04dc3 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -12,19 +12,19 @@ module MessageContentDelta discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. - variant :image_file, -> { OpenAI::Models::Beta::Threads::ImageFileDeltaBlock } + variant :image_file, -> { OpenAI::Beta::Threads::ImageFileDeltaBlock } # The text content that is part of a message. - variant :text, -> { OpenAI::Models::Beta::Threads::TextDeltaBlock } + variant :text, -> { OpenAI::Beta::Threads::TextDeltaBlock } # The refusal content that is part of a message. - variant :refusal, -> { OpenAI::Models::Beta::Threads::RefusalDeltaBlock } + variant :refusal, -> { OpenAI::Beta::Threads::RefusalDeltaBlock } # References an image URL in the content of a message. - variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDeltaBlock } + variant :image_url, -> { OpenAI::Beta::Threads::ImageURLDeltaBlock } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] + # @return [Array(OpenAI::Beta::Threads::ImageFileDeltaBlock, OpenAI::Beta::Threads::TextDeltaBlock, OpenAI::Beta::Threads::RefusalDeltaBlock, OpenAI::Beta::Threads::ImageURLDeltaBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 9310f8bb..93fd228a 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -12,16 +12,16 @@ module MessageContentPartParam discriminator :type # References an image [File](https://platform.openai.com/docs/api-reference/files) in the content of a message. - variant :image_file, -> { OpenAI::Models::Beta::Threads::ImageFileContentBlock } + variant :image_file, -> { OpenAI::Beta::Threads::ImageFileContentBlock } # References an image URL in the content of a message. - variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLContentBlock } + variant :image_url, -> { OpenAI::Beta::Threads::ImageURLContentBlock } # The text content that is part of a message. - variant :text, -> { OpenAI::Models::Beta::Threads::TextContentBlockParam } + variant :text, -> { OpenAI::Beta::Threads::TextContentBlockParam } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] + # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlockParam)] end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index e6a5e155..223feced 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -12,8 +12,8 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Beta::Threads::MessageCreateParams::Content } # @!attribute role # The role of the entity that is creating the message. Allowed values include: @@ -23,15 +23,17 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] - required :role, enum: -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Role } + # @return [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] + required :role, enum: -> { OpenAI::Beta::Threads::MessageCreateParams::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::MessageCreateParams::Attachment] + }, nil?: true # @!attribute metadata @@ -49,13 +51,13 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: ... + # @param role [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -67,14 +69,14 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } + variant -> { OpenAI::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -103,24 +105,28 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool] + } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union discriminator :type - variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } + variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } variant :file_search, - -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch } + -> { + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -134,7 +140,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index a845cecd..831356c3 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -8,26 +8,26 @@ class MessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the message in array of text and/or images. # - # @return [Array, nil] + # @return [Array, nil] optional :content, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentDelta] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContentDelta] } # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role, nil] - optional :role, enum: -> { OpenAI::Models::Beta::Threads::MessageDelta::Role } + # @return [Symbol, OpenAI::Beta::Threads::MessageDelta::Role, nil] + optional :role, enum: -> { OpenAI::Beta::Threads::MessageDelta::Role } # @!method initialize(content: nil, role: nil) # The delta containing the fields that have changed on the Message. # - # @param content [Array] The content of the message in array of text and/or images. + # @param content [Array] The content of the message in array of text and/or images. # - # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. + # @param role [Symbol, OpenAI::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. # The entity that produced the message. One of `user` or `assistant`. # - # @see OpenAI::Models::Beta::Threads::MessageDelta#role + # @see OpenAI::Beta::Threads::MessageDelta#role module Role extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index 96c689fb..510cd5cf 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -14,8 +14,8 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The delta containing the fields that have changed on the Message. # - # @return [OpenAI::Models::Beta::Threads::MessageDelta] - required :delta, -> { OpenAI::Models::Beta::Threads::MessageDelta } + # @return [OpenAI::Beta::Threads::MessageDelta] + required :delta, -> { OpenAI::Beta::Threads::MessageDelta } # @!attribute object # The object type, which is always `thread.message.delta`. @@ -29,7 +29,7 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param id [String] The identifier of the message, which can be referenced in API endpoints. # - # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. + # @param delta [OpenAI::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. # # @param object [Symbol, :"thread.message.delta"] The object type, which is always `thread.message.delta`. end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 911b6f57..6ffe6655 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -38,8 +38,8 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Beta::Threads::MessageListParams::Order } + # @return [Symbol, OpenAI::Beta::Threads::MessageListParams::Order, nil] + optional :order, enum: -> { OpenAI::Beta::Threads::MessageListParams::Order } # @!attribute run_id # Filter messages by the run ID that generated them. @@ -52,18 +52,14 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Beta::Threads::MessageListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # - # @param run_id [String] Filter messages by the run ID that generated them. ... + # @param run_id [String] Filter messages by the run ID that generated them. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index ab23d244..43bbab67 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -31,7 +31,7 @@ class MessageUpdateParams < OpenAI::Internal::Type::BaseModel # # @param thread_id [String] # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index f7a4a2b0..66fbe931 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -17,8 +17,8 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The function definition. # - # @return [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] - required :function, -> { OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function } + # @return [OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function] + required :function, -> { OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function } # @!attribute type # The type of tool call the output is required for. For now, this is always @@ -29,20 +29,17 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall} for more - # details. + # {OpenAI::Beta::Threads::RequiredActionFunctionToolCall} for more details. # # Tool call objects # # @param id [String] The ID of the tool call. This ID must be referenced when you submit the tool out - # ... # - # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. + # @param function [OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. # # @param type [Symbol, :function] The type of tool call the output is required for. For now, this is always `funct - # ... - # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function + # @see OpenAI::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments that the model expects you to pass to the function. diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 51b736e9..a4c6345e 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -56,8 +56,8 @@ class Run < OpenAI::Internal::Type::BaseModel # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @return [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] - required :incomplete_details, -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails }, nil?: true + # @return [OpenAI::Beta::Threads::Run::IncompleteDetails, nil] + required :incomplete_details, -> { OpenAI::Beta::Threads::Run::IncompleteDetails }, nil?: true # @!attribute instructions # The instructions that the @@ -70,8 +70,8 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute last_error # The last error associated with this run. Will be `null` if there are no errors. # - # @return [OpenAI::Models::Beta::Threads::Run::LastError, nil] - required :last_error, -> { OpenAI::Models::Beta::Threads::Run::LastError }, nil?: true + # @return [OpenAI::Beta::Threads::Run::LastError, nil] + required :last_error, -> { OpenAI::Beta::Threads::Run::LastError }, nil?: true # @!attribute max_completion_tokens # The maximum number of completion tokens specified to have been used over the @@ -124,8 +124,8 @@ class Run < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] - required :required_action, -> { OpenAI::Models::Beta::Threads::Run::RequiredAction }, nil?: true + # @return [OpenAI::Beta::Threads::Run::RequiredAction, nil] + required :required_action, -> { OpenAI::Beta::Threads::Run::RequiredAction }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with @@ -149,8 +149,8 @@ class Run < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - required :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true + # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + required :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute started_at # The Unix timestamp (in seconds) for when the run was started. @@ -163,8 +163,8 @@ class Run < OpenAI::Internal::Type::BaseModel # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::RunStatus] - required :status, enum: -> { OpenAI::Models::Beta::Threads::RunStatus } + # @return [Symbol, OpenAI::Beta::Threads::RunStatus] + required :status, enum: -> { OpenAI::Beta::Threads::RunStatus } # @!attribute thread_id # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) @@ -182,30 +182,30 @@ class Run < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - required :tool_choice, union: -> { OpenAI::Models::Beta::AssistantToolChoiceOption }, nil?: true + # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + required :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools # The list of tools that the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. # - # @return [Array] - required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } + # @return [Array] + required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] - required :truncation_strategy, -> { OpenAI::Models::Beta::Threads::Run::TruncationStrategy }, nil?: true + # @return [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] + required :truncation_strategy, -> { OpenAI::Beta::Threads::Run::TruncationStrategy }, nil?: true # @!attribute usage # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). # - # @return [OpenAI::Models::Beta::Threads::Run::Usage, nil] - required :usage, -> { OpenAI::Models::Beta::Threads::Run::Usage }, nil?: true + # @return [OpenAI::Beta::Threads::Run::Usage, nil] + required :usage, -> { OpenAI::Beta::Threads::Run::Usage }, nil?: true # @!attribute temperature # The sampling temperature used for this run. If not set, defaults to 1. @@ -221,7 +221,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expires_at:, failed_at:, incomplete_details:, instructions:, last_error:, max_completion_tokens:, max_prompt_tokens:, metadata:, model:, parallel_tool_calls:, required_action:, response_format:, started_at:, status:, thread_id:, tool_choice:, tools:, truncation_strategy:, usage:, temperature: nil, top_p: nil, object: :"thread.run") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Run} for more details. + # {OpenAI::Beta::Threads::Run} for more details. # # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -229,7 +229,6 @@ class Run < OpenAI::Internal::Type::BaseModel # @param id [String] The identifier, which can be referenced in API endpoints. # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista - # ... # # @param cancelled_at [Integer, nil] The Unix timestamp (in seconds) for when the run was cancelled. # @@ -241,52 +240,39 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run failed. # - # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet - # ... + # @param incomplete_details [OpenAI::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet # # @param instructions [String] The instructions that the [assistant](https://platform.openai.com/docs/api-refer - # ... # - # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. + # @param last_error [OpenAI::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens specified to have been used over the cou - # ... # # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens specified to have been used over the course - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String] The model that the [assistant](https://platform.openai.com/docs/api-reference/as - # ... # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action - # ... + # @param required_action [OpenAI::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param started_at [Integer, nil] The Unix timestamp (in seconds) for when the run was started. # - # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac - # ... + # @param status [Symbol, OpenAI::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe - # ... + # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe # - # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro - # ... + # @param truncation_strategy [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # - # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not - # ... + # @param usage [OpenAI::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not # # @param temperature [Float, nil] The sampling temperature used for this run. If not set, defaults to 1. # @@ -294,29 +280,28 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param object [Symbol, :"thread.run"] The object type, which is always `thread.run`. - # @see OpenAI::Models::Beta::Threads::Run#incomplete_details + # @see OpenAI::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason, nil] - optional :reason, enum: -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason } + # @return [Symbol, OpenAI::Beta::Threads::Run::IncompleteDetails::Reason, nil] + optional :reason, enum: -> { OpenAI::Beta::Threads::Run::IncompleteDetails::Reason } # @!method initialize(reason: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Run::IncompleteDetails} for more details. + # {OpenAI::Beta::Threads::Run::IncompleteDetails} for more details. # # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li - # ... + # @param reason [Symbol, OpenAI::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # - # @see OpenAI::Models::Beta::Threads::Run::IncompleteDetails#reason + # @see OpenAI::Beta::Threads::Run::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -328,13 +313,13 @@ module Reason end end - # @see OpenAI::Models::Beta::Threads::Run#last_error + # @see OpenAI::Beta::Threads::Run#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] - required :code, enum: -> { OpenAI::Models::Beta::Threads::Run::LastError::Code } + # @return [Symbol, OpenAI::Beta::Threads::Run::LastError::Code] + required :code, enum: -> { OpenAI::Beta::Threads::Run::LastError::Code } # @!attribute message # A human-readable description of the error. @@ -345,13 +330,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # The last error associated with this run. Will be `null` if there are no errors. # - # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + # @param code [Symbol, OpenAI::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # # @param message [String] A human-readable description of the error. # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # - # @see OpenAI::Models::Beta::Threads::Run::LastError#code + # @see OpenAI::Beta::Threads::Run::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -364,14 +349,16 @@ module Code end end - # @see OpenAI::Models::Beta::Threads::Run#required_action + # @see OpenAI::Beta::Threads::Run#required_action class RequiredAction < OpenAI::Internal::Type::BaseModel # @!attribute submit_tool_outputs # Details on the tool outputs needed for this run to continue. # - # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] + # @return [OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] required :submit_tool_outputs, - -> { OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs } + -> { + OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs + } # @!attribute type # For now, this is always `submit_tool_outputs`. @@ -383,27 +370,29 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. + # @param submit_tool_outputs [OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. # # @param type [Symbol, :submit_tool_outputs] For now, this is always `submit_tool_outputs`. - # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs + # @see OpenAI::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # A list of the relevant tool calls. # - # @return [Array] + # @return [Array] required :tool_calls, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] + } # @!method initialize(tool_calls:) # Details on the tool outputs needed for this run to continue. # - # @param tool_calls [Array] A list of the relevant tool calls. + # @param tool_calls [Array] A list of the relevant tool calls. end end - # @see OpenAI::Models::Beta::Threads::Run#truncation_strategy + # @see OpenAI::Beta::Threads::Run#truncation_strategy class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to @@ -411,8 +400,8 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] - required :type, enum: -> { OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type } + # @return [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::Threads::Run::TruncationStrategy::Type } # @!attribute last_messages # The number of most recent messages from the thread when constructing the context @@ -423,23 +412,21 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, last_messages: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Run::TruncationStrategy} for more details. + # {OpenAI::Beta::Threads::Run::TruncationStrategy} for more details. # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # ... + # @param type [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - # ... # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @see OpenAI::Models::Beta::Threads::Run::TruncationStrategy#type + # @see OpenAI::Beta::Threads::Run::TruncationStrategy#type module Type extend OpenAI::Internal::Type::Enum @@ -451,7 +438,7 @@ module Type end end - # @see OpenAI::Models::Beta::Threads::Run#usage + # @see OpenAI::Beta::Threads::Run#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 8ced1f05..773bbb7b 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -28,9 +28,9 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } # @!attribute additional_instructions # Appends additional instructions at the end of the instructions for the run. This @@ -43,9 +43,11 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute additional_messages # Adds additional messages to the thread before creating the run. # - # @return [Array, nil] + # @return [Array, nil] optional :additional_messages, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage] + }, nil?: true # @!attribute instructions @@ -93,8 +95,8 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. # - # @return [String, Symbol, OpenAI::Models::ChatModel, nil] - optional :model, union: -> { OpenAI::Models::Beta::Threads::RunCreateParams::Model }, nil?: true + # @return [String, Symbol, OpenAI::ChatModel, nil] + optional :model, union: -> { OpenAI::Beta::Threads::RunCreateParams::Model }, nil?: true # @!attribute parallel_tool_calls # Whether to enable @@ -112,8 +114,8 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format # Specifies the format that the model must output. Compatible with @@ -137,8 +139,8 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - optional :response_format, union: -> { OpenAI::Models::Beta::AssistantResponseFormatOption }, nil?: true + # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -157,16 +159,18 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - optional :tool_choice, union: -> { OpenAI::Models::Beta::AssistantToolChoiceOption }, nil?: true + # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + optional :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] }, + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] + }, nil?: true # @!attribute top_p @@ -183,9 +187,9 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @return [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] optional :truncation_strategy, - -> { OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy }, + -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy }, nil?: true # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) @@ -193,51 +197,38 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Beta::Threads::RunCreateParams} for more details. # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista - # ... # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo - # ... + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param additional_instructions [String, nil] Appends additional instructions at the end of the instructions for the run. This - # ... # - # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Overrides the [instructions](https://platform.openai.com/docs/api-reference/assi - # ... # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the - # ... # # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # ... + # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify - # ... + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # - # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro - # ... + # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -245,9 +236,11 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, - union: -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content } + union: -> { + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content + } # @!attribute role # The role of the entity that is creating the message. Allowed values include: @@ -257,15 +250,17 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] - required :role, enum: -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role } + # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role] + required :role, enum: -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment] + }, nil?: true # @!attribute metadata @@ -281,20 +276,19 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage} for more - # details. + # {OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: ... + # @param role [Symbol, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#content + # @see OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage#content module Content extend OpenAI::Internal::Type::Union @@ -302,14 +296,16 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray } + variant -> { + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray + } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] end # The role of the entity that is creating the message. Allowed values include: @@ -319,7 +315,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#role + # @see OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage#role module Role extend OpenAI::Internal::Type::Enum @@ -340,24 +336,28 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] + } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union discriminator :type - variant :code_interpreter, -> { OpenAI::Models::Beta::CodeInterpreterTool } + variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } variant :file_search, - -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch } + -> { + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -371,7 +371,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] end end end @@ -386,10 +386,10 @@ module Model variant String # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. - variant enum: -> { OpenAI::Models::ChatModel } + variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # @return [Array(String, Symbol, OpenAI::ChatModel)] end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -399,8 +399,8 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] - required :type, enum: -> { OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type } + # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type } # @!attribute last_messages # The number of most recent messages from the thread when constructing the context @@ -411,24 +411,21 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, last_messages: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy} for more - # details. + # {OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy} for more details. # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # ... + # @param type [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - # ... # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @see OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy#type + # @see OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 1ef700a7..0399613f 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -38,24 +38,20 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Beta::Threads::RunListParams::Order } + # @return [Symbol, OpenAI::Beta::Threads::RunListParams::Order, nil] + optional :order, enum: -> { OpenAI::Beta::Threads::RunListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 3ac75a5c..85f8acff 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -19,14 +19,16 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!attribute tool_outputs # A list of tools for which the outputs are being submitted. # - # @return [Array] + # @return [Array] required :tool_outputs, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] + } # @!method initialize(thread_id:, tool_outputs:, request_options: {}) # @param thread_id [String] # - # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -46,13 +48,12 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel # @!method initialize(output: nil, tool_call_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more + # {OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more # details. # # @param output [String] The output of the tool call to be submitted to continue the run. # # @param tool_call_id [String] The ID of the tool call in the `required_action` object within the run object th - # ... end end end diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 6210136c..0033640d 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -31,7 +31,7 @@ class RunUpdateParams < OpenAI::Internal::Type::BaseModel # # @param thread_id [String] # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index b4458c9c..8ef2e615 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -20,17 +20,17 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @!attribute image # - # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] - optional :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } + # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] + optional :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } # @!method initialize(index:, image: nil, type: :image) # @param index [Integer] The index of the output in the outputs array. # - # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] + # @param image [OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] # # @param type [Symbol, :image] Always `image`. - # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image + # @see OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -41,11 +41,10 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for - # more details. + # {OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for more + # details. # # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image - # ... end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 88f00a99..4c7b9f6a 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -15,9 +15,11 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # The Code Interpreter tool call definition. # - # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] + # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] required :code_interpreter, - -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter } + -> { + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter + } # @!attribute type # The type of tool call. This is always going to be `code_interpreter` for this @@ -28,18 +30,17 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code_interpreter:, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. + # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. # # Details of the Code Interpreter tool call the run step was involved in. # # @param id [String] The ID of the tool call. # - # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. + # @param code_interpreter [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. # # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty - # ... - # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter + # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -52,21 +53,22 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. # - # @return [Array] + # @return [Array] required :outputs, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] + } # @!method initialize(input:, outputs:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} - # for more details. + # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} for more + # details. # # The Code Interpreter tool call definition. # # @param input [String] The input to the Code Interpreter tool call. # - # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one - # ... + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -76,10 +78,14 @@ module Output # Text output from the Code Interpreter tool call as part of a run step. variant :logs, - -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs } + -> { + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs + } variant :image, - -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image } + -> { + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute logs @@ -105,9 +111,11 @@ class Logs < OpenAI::Internal::Type::BaseModel class Image < OpenAI::Internal::Type::BaseModel # @!attribute image # - # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] required :image, - -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image } + -> { + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image + } # @!attribute type # Always `image`. @@ -116,11 +124,11 @@ class Image < OpenAI::Internal::Type::BaseModel required :type, const: :image # @!method initialize(image:, type: :image) - # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @param image [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] # # @param type [Symbol, :image] Always `image`. - # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image + # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -131,16 +139,15 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} + # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} # for more details. # # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image - # ... end end # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] + # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 204d059b..98ceaa6b 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -28,14 +28,13 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # The Code Interpreter tool call definition. # - # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] + # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] optional :code_interpreter, - -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } + -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more - # details. + # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more details. # # Details of the Code Interpreter tool call the run step was involved in. # @@ -43,12 +42,11 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the tool call. # - # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. + # @param code_interpreter [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. # # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty - # ... - # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter + # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -61,21 +59,22 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. # - # @return [Array, nil] + # @return [Array, nil] optional :outputs, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] + } # @!method initialize(input: nil, outputs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} - # for more details. + # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} for + # more details. # # The Code Interpreter tool call definition. # # @param input [String] The input to the Code Interpreter tool call. # - # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one - # ... + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -84,12 +83,12 @@ module Output discriminator :type # Text output from the Code Interpreter tool call as part of a run step. - variant :logs, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs } + variant :logs, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterLogs } - variant :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage } + variant :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] + # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage)] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index d6149f24..b8d0d149 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -15,8 +15,8 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute file_search # For now, this is always going to be an empty object. # - # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] - required :file_search, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch } + # @return [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch] + required :file_search, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch } # @!attribute type # The type of tool call. This is always going to be `file_search` for this type of @@ -27,47 +27,50 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, file_search:, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall} for more details. + # {OpenAI::Beta::Threads::Runs::FileSearchToolCall} for more details. # # @param id [String] The ID of the tool call object. # - # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. + # @param file_search [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. # # @param type [Symbol, :file_search] The type of tool call. This is always going to be `file_search` for this type of - # ... - # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search + # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # The ranking options for the file search. # - # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] + # @return [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] optional :ranking_options, - -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions } + -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions } # @!attribute results # The results of the file search. # - # @return [Array, nil] + # @return [Array, nil] optional :results, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + } # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. # - # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. + # @param ranking_options [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. # - # @param results [Array] The results of the file search. + # @param results [Array] The results of the file search. - # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options + # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] + # @return [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] required :ranker, - enum: -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker } + enum: -> { + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker + } # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point @@ -78,21 +81,19 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker:, score_threshold:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} + # {OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} # for more details. # # The ranking options for the file search. # - # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank - # ... + # @param ranker [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank # # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num - # ... # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker + # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum @@ -128,14 +129,16 @@ class Result < OpenAI::Internal::Type::BaseModel # The content of the result that was found. The content is only included if # requested via the include query parameter. # - # @return [Array, nil] + # @return [Array, nil] optional :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + } # @!method initialize(file_id:, file_name:, score:, content: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} - # for more details. + # {OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} for more + # details. # # A result instance of the file search. # @@ -144,10 +147,8 @@ class Result < OpenAI::Internal::Type::BaseModel # @param file_name [String] The name of the file that result was found in. # # @param score [Float] The score of the result. All values must be a floating point number between 0 an - # ... # - # @param content [Array] The content of the result that was found. The content is only included if reques - # ... + # @param content [Array] The content of the result that was found. The content is only included if reques class Content < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -159,18 +160,20 @@ class Content < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the content. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] + # @return [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] optional :type, - enum: -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type } + enum: -> { + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type + } # @!method initialize(text: nil, type: nil) # @param text [String] The text content of the file. # - # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. + # @param type [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. # The type of the content. # - # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type + # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index de1c0704..a0896ee1 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -33,7 +33,7 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(file_search:, index:, id: nil, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. + # {OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. # # @param file_search [Object] For now, this is always going to be an empty object. # @@ -42,7 +42,6 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @param id [String] The ID of the tool call object. # # @param type [Symbol, :file_search] The type of tool call. This is always going to be `file_search` for this type of - # ... end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index bb0655e2..ce1e3ad3 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -15,8 +15,8 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The definition of the function that was called. # - # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] - required :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function } + # @return [OpenAI::Beta::Threads::Runs::FunctionToolCall::Function] + required :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCall::Function } # @!attribute type # The type of tool call. This is always going to be `function` for this type of @@ -27,16 +27,15 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall} for more details. + # {OpenAI::Beta::Threads::Runs::FunctionToolCall} for more details. # # @param id [String] The ID of the tool call object. # - # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. + # @param function [OpenAI::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. # # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to - # ... - # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function + # @see OpenAI::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. @@ -60,8 +59,7 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:, output:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function} for more - # details. + # {OpenAI::Beta::Threads::Runs::FunctionToolCall::Function} for more details. # # The definition of the function that was called. # @@ -70,7 +68,6 @@ class Function < OpenAI::Internal::Type::BaseModel # @param name [String] The name of the function. # # @param output [String, nil] The output of the function. This will be `null` if the outputs have not been [su - # ... end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index 62f29656..9dc353ce 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -28,23 +28,22 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute function # The definition of the function that was called. # - # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] - optional :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function } + # @return [OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] + optional :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function } # @!method initialize(index:, id: nil, function: nil, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta} for more details. + # {OpenAI::Beta::Threads::Runs::FunctionToolCallDelta} for more details. # # @param index [Integer] The index of the tool call in the tool calls array. # # @param id [String] The ID of the tool call object. # - # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. + # @param function [OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. # # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to - # ... - # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function + # @see OpenAI::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. @@ -68,8 +67,7 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil, output: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more - # details. + # {OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more details. # # The definition of the function that was called. # @@ -78,7 +76,6 @@ class Function < OpenAI::Internal::Type::BaseModel # @param name [String] The name of the function. # # @param output [String, nil] The output of the function. This will be `null` if the outputs have not been [su - # ... end end end diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index 727d980b..f39e253b 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -8,9 +8,9 @@ module Runs class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # - # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @return [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] required :message_creation, - -> { OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation } + -> { OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation } # @!attribute type # Always `message_creation`. @@ -21,11 +21,11 @@ class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(message_creation:, type: :message_creation) # Details of the message creation by the run step. # - # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @param message_creation [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] # # @param type [Symbol, :message_creation] Always `message_creation`. - # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation + # @see OpenAI::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index c5076851..2dc26909 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -56,8 +56,8 @@ class RunStep < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] - required :last_error, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::LastError }, nil?: true + # @return [OpenAI::Beta::Threads::Runs::RunStep::LastError, nil] + required :last_error, -> { OpenAI::Beta::Threads::Runs::RunStep::LastError }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -87,14 +87,14 @@ class RunStep < OpenAI::Internal::Type::BaseModel # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] - required :status, enum: -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Status } + # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status] + required :status, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::Status } # @!attribute step_details # The details of the run step. # - # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] - required :step_details, union: -> { OpenAI::Models::Beta::Threads::Runs::RunStep::StepDetails } + # @return [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails] + required :step_details, union: -> { OpenAI::Beta::Threads::Runs::RunStep::StepDetails } # @!attribute thread_id # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) @@ -106,26 +106,25 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of run step, which can be either `message_creation` or `tool_calls`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] - required :type, enum: -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Type } + # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type] + required :type, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::Type } # @!attribute usage # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] - required :usage, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Usage }, nil?: true + # @return [OpenAI::Beta::Threads::Runs::RunStep::Usage, nil] + required :usage, -> { OpenAI::Beta::Threads::Runs::RunStep::Usage }, nil?: true # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expired_at:, failed_at:, last_error:, metadata:, run_id:, status:, step_details:, thread_id:, type:, usage:, object: :"thread.run.step") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::RunStep} for more details. + # {OpenAI::Beta::Threads::Runs::RunStep} for more details. # # Represents a step in execution of a run. # # @param id [String] The identifier of the run step, which can be referenced in API endpoints. # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista - # ... # # @param cancelled_at [Integer, nil] The Unix timestamp (in seconds) for when the run step was cancelled. # @@ -134,40 +133,34 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @param created_at [Integer] The Unix timestamp (in seconds) for when the run step was created. # # @param expired_at [Integer, nil] The Unix timestamp (in seconds) for when the run step expired. A step is conside - # ... # # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run step failed. # - # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err - # ... + # @param last_error [OpenAI::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param run_id [String] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that th - # ... # - # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai - # ... + # @param status [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai # - # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. + # @param step_details [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t - # ... # - # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. + # @param type [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. # - # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru - # ... + # @param usage [OpenAI::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru # # @param object [Symbol, :"thread.run.step"] The object type, which is always `thread.run.step`. - # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error + # @see OpenAI::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] - required :code, enum: -> { OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code } + # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::LastError::Code] + required :code, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::LastError::Code } # @!attribute message # A human-readable description of the error. @@ -179,13 +172,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # @param code [Symbol, OpenAI::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. # # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # - # @see OpenAI::Models::Beta::Threads::Runs::RunStep::LastError#code + # @see OpenAI::Beta::Threads::Runs::RunStep::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -200,7 +193,7 @@ module Code # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. # - # @see OpenAI::Models::Beta::Threads::Runs::RunStep#status + # @see OpenAI::Beta::Threads::Runs::RunStep#status module Status extend OpenAI::Internal::Type::Enum @@ -216,25 +209,25 @@ module Status # The details of the run step. # - # @see OpenAI::Models::Beta::Threads::Runs::RunStep#step_details + # @see OpenAI::Beta::Threads::Runs::RunStep#step_details module StepDetails extend OpenAI::Internal::Type::Union discriminator :type # Details of the message creation by the run step. - variant :message_creation, -> { OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails } + variant :message_creation, -> { OpenAI::Beta::Threads::Runs::MessageCreationStepDetails } # Details of the tool call. - variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails } + variant :tool_calls, -> { OpenAI::Beta::Threads::Runs::ToolCallsStepDetails } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] + # @return [Array(OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails)] end # The type of run step, which can be either `message_creation` or `tool_calls`. # - # @see OpenAI::Models::Beta::Threads::Runs::RunStep#type + # @see OpenAI::Beta::Threads::Runs::RunStep#type module Type extend OpenAI::Internal::Type::Enum @@ -245,7 +238,7 @@ module Type # @return [Array] end - # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage + # @see OpenAI::Beta::Threads::Runs::RunStep#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 2a53c523..4666af0b 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -9,30 +9,30 @@ class RunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute step_details # The details of the run step. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject, nil] - optional :step_details, union: -> { OpenAI::Models::Beta::Threads::Runs::RunStepDelta::StepDetails } + # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject, nil] + optional :step_details, union: -> { OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails } # @!method initialize(step_details: nil) # The delta containing the fields that have changed on the run step. # - # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. + # @param step_details [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. # The details of the run step. # - # @see OpenAI::Models::Beta::Threads::Runs::RunStepDelta#step_details + # @see OpenAI::Beta::Threads::Runs::RunStepDelta#step_details module StepDetails extend OpenAI::Internal::Type::Union discriminator :type # Details of the message creation by the run step. - variant :message_creation, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta } + variant :message_creation, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta } # Details of the tool call. - variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject } + variant :tool_calls, -> { OpenAI::Beta::Threads::Runs::ToolCallDeltaObject } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] + # @return [Array(OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject)] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index f5c81ffe..19c633c5 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -15,8 +15,8 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The delta containing the fields that have changed on the run step. # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] - required :delta, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDelta } + # @return [OpenAI::Beta::Threads::Runs::RunStepDelta] + required :delta, -> { OpenAI::Beta::Threads::Runs::RunStepDelta } # @!attribute object # The object type, which is always `thread.run.step.delta`. @@ -30,7 +30,7 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param id [String] The identifier of the run step, which can be referenced in API endpoints. # - # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. + # @param delta [OpenAI::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. # # @param object [Symbol, :"thread.run.step.delta"] The object type, which is always `thread.run.step.delta`. end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 85fdad6f..1c617d09 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -14,18 +14,20 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] + # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] optional :message_creation, - -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation } + -> { + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + } # @!method initialize(message_creation: nil, type: :message_creation) # Details of the message creation by the run step. # - # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] + # @param message_creation [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] # # @param type [Symbol, :message_creation] Always `message_creation`. - # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation + # @see OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 2bcb9cb5..e5251302 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -42,9 +42,9 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and @@ -57,8 +57,8 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Beta::Threads::Runs::StepListParams::Order } + # @return [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order, nil] + optional :order, enum: -> { OpenAI::Beta::Threads::Runs::StepListParams::Order } # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -67,19 +67,14 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @param thread_id [String] # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo - # ... + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 4a2c095e..f6238fd8 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -29,9 +29,9 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } # @!method initialize(thread_id:, run_id:, include: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -41,8 +41,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # # @param run_id [String] # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo - # ... + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 5e18fa3b..4140ec79 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -12,14 +12,14 @@ module ToolCall discriminator :type # Details of the Code Interpreter tool call the run step was involved in. - variant :code_interpreter, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall } + variant :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall } - variant :file_search, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall } + variant :file_search, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall } - variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCall } + variant :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCall } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] + # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Beta::Threads::Runs::FunctionToolCall)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index e5cd0aff..ab51e0a3 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -12,14 +12,14 @@ module ToolCallDelta discriminator :type # Details of the Code Interpreter tool call the run step was involved in. - variant :code_interpreter, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta } + variant :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta } - variant :file_search, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta } + variant :file_search, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta } - variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta } + variant :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCallDelta } # @!method self.variants - # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] + # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Beta::Threads::Runs::FunctionToolCallDelta)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 51c5d074..668b9ec8 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -17,18 +17,17 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCallDelta] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCallDelta] } # @!method initialize(tool_calls: nil, type: :tool_calls) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject} for more details. + # {OpenAI::Beta::Threads::Runs::ToolCallDeltaObject} for more details. # # Details of the tool call. # - # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit - # ... + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit # # @param type [Symbol, :tool_calls] Always `tool_calls`. end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 3a8800d6..368ce9b5 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -11,9 +11,11 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # - # @return [Array] + # @return [Array] required :tool_calls, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCall] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCall] + } # @!attribute type # Always `tool_calls`. @@ -23,12 +25,11 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(tool_calls:, type: :tool_calls) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails} for more details. + # {OpenAI::Beta::Threads::Runs::ToolCallsStepDetails} for more details. # # Details of the tool call. # - # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit - # ... + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit # # @param type [Symbol, :tool_calls] Always `tool_calls`. end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index 8c5eb9f0..180b3abb 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -7,9 +7,11 @@ module Threads class Text < OpenAI::Internal::Type::BaseModel # @!attribute annotations # - # @return [Array] + # @return [Array] required :annotations, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Annotation] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Annotation] + } # @!attribute value # The data that makes up the text. @@ -18,7 +20,7 @@ class Text < OpenAI::Internal::Type::BaseModel required :value, String # @!method initialize(annotations:, value:) - # @param annotations [Array] + # @param annotations [Array] # # @param value [String] The data that makes up the text. end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index 9f61e404..73b0bd54 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -7,8 +7,8 @@ module Threads class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # - # @return [OpenAI::Models::Beta::Threads::Text] - required :text, -> { OpenAI::Models::Beta::Threads::Text } + # @return [OpenAI::Beta::Threads::Text] + required :text, -> { OpenAI::Beta::Threads::Text } # @!attribute type # Always `text`. @@ -19,7 +19,7 @@ class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :text) # The text content that is part of a message. # - # @param text [OpenAI::Models::Beta::Threads::Text] + # @param text [OpenAI::Beta::Threads::Text] # # @param type [Symbol, :text] Always `text`. end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index 20b88879..63ad3975 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -7,9 +7,9 @@ module Threads class TextDelta < OpenAI::Internal::Type::BaseModel # @!attribute annotations # - # @return [Array, nil] + # @return [Array, nil] optional :annotations, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::AnnotationDelta] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::AnnotationDelta] } # @!attribute value # The data that makes up the text. @@ -18,7 +18,7 @@ class TextDelta < OpenAI::Internal::Type::BaseModel optional :value, String # @!method initialize(annotations: nil, value: nil) - # @param annotations [Array] + # @param annotations [Array] # # @param value [String] The data that makes up the text. end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index c0172733..126aefbd 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -19,15 +19,15 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # - # @return [OpenAI::Models::Beta::Threads::TextDelta, nil] - optional :text, -> { OpenAI::Models::Beta::Threads::TextDelta } + # @return [OpenAI::Beta::Threads::TextDelta, nil] + optional :text, -> { OpenAI::Beta::Threads::TextDelta } # @!method initialize(index:, text: nil, type: :text) # The text content that is part of a message. # # @param index [Integer] The index of the content part in the message. # - # @param text [OpenAI::Models::Beta::Threads::TextDelta] + # @param text [OpenAI::Beta::Threads::TextDelta] # # @param type [Symbol, :text] Always `text`. end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 15613a18..8fb216ee 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -17,8 +17,8 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # A list of chat completion choices. Can be more than one if `n` is greater # than 1. # - # @return [Array] - required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice] } + # @return [Array] + required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice] } # @!attribute created # The Unix timestamp (in seconds) of when the chat completion was created. @@ -57,8 +57,8 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] - optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletion::ServiceTier }, nil?: true + # @return [Symbol, OpenAI::Chat::ChatCompletion::ServiceTier, nil] + optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletion::ServiceTier }, nil?: true # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. @@ -72,32 +72,29 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics for the completion request. # - # @return [OpenAI::Models::CompletionUsage, nil] - optional :usage, -> { OpenAI::Models::CompletionUsage } + # @return [OpenAI::CompletionUsage, nil] + optional :usage, -> { OpenAI::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletion} for more details. + # {OpenAI::Chat::ChatCompletion} for more details. # # Represents a chat completion response returned by model, based on the provided # input. # # @param id [String] A unique identifier for the chat completion. # - # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 - # ... + # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 # # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. # # @param model [String] The model used for the chat completion. # - # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. - # ... # - # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. + # @param usage [OpenAI::CompletionUsage] Usage statistics for the completion request. # # @param object [Symbol, :"chat.completion"] The object type, which is always `chat.completion`. @@ -110,8 +107,8 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] - required :finish_reason, enum: -> { OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason } + # @return [Symbol, OpenAI::Chat::ChatCompletion::Choice::FinishReason] + required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason } # @!attribute index # The index of the choice in the list of choices. @@ -122,27 +119,26 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # Log probability information for the choice. # - # @return [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] - required :logprobs, -> { OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs }, nil?: true + # @return [OpenAI::Chat::ChatCompletion::Choice::Logprobs, nil] + required :logprobs, -> { OpenAI::Chat::ChatCompletion::Choice::Logprobs }, nil?: true # @!attribute message # A chat completion message generated by the model. # - # @return [OpenAI::Models::Chat::ChatCompletionMessage] - required :message, -> { OpenAI::Models::Chat::ChatCompletionMessage } + # @return [OpenAI::Chat::ChatCompletionMessage] + required :message, -> { OpenAI::Chat::ChatCompletionMessage } # @!method initialize(finish_reason:, index:, logprobs:, message:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletion::Choice} for more details. + # {OpenAI::Chat::ChatCompletion::Choice} for more details. # - # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model - # ... + # @param finish_reason [Symbol, OpenAI::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] The index of the choice in the list of choices. # - # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. + # @param logprobs [OpenAI::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. # - # @param message [OpenAI::Models::Chat::ChatCompletionMessage] A chat completion message generated by the model. + # @param message [OpenAI::Chat::ChatCompletionMessage] A chat completion message generated by the model. # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -151,7 +147,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason + # @see OpenAI::Chat::ChatCompletion::Choice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -165,30 +161,30 @@ module FinishReason # @return [Array] end - # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs + # @see OpenAI::Chat::ChatCompletion::Choice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true # @!attribute refusal # A list of message refusal tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :refusal, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] A list of message content tokens with log probability information. + # @param content [Array, nil] A list of message content tokens with log probability information. # - # @param refusal [Array, nil] A list of message refusal tokens with log probability information. + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end @@ -210,7 +206,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Models::Chat::ChatCompletion#service_tier + # @see OpenAI::Chat::ChatCompletion#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index ab8b934b..2f9fbe2b 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -14,16 +14,18 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] - optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio }, nil?: true + # @return [OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, nil] + optional :audio, -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio }, nil?: true # @!attribute content # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. # - # @return [String, Array, nil] + # @return [String, Array, nil] optional :content, - union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content }, + union: -> { + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content + }, nil?: true # @!attribute function_call @@ -32,9 +34,9 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] + # @return [OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] optional :function_call, - -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall }, + -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall }, nil?: true # @!attribute name @@ -53,34 +55,33 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] + } # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam} for more details. + # {OpenAI::Chat::ChatCompletionAssistantMessageParam} for more details. # # Messages sent by the model in response to user messages. # - # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. ... + # @param audio [OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. # - # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function - # ... + # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function # - # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th - # ... + # @param function_call [OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # # @param name [String] An optional name for the participant. Provides the model information to differen - # ... # # @param refusal [String, nil] The refusal message by the assistant. # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the messages author, in this case `assistant`. - # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio + # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for a previous audio response from the model. @@ -90,19 +91,18 @@ class Audio < OpenAI::Internal::Type::BaseModel # @!method initialize(id:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio} for more - # details. + # {OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio} for more details. # # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param id [String] Unique identifier for a previous audio response from the model. ... + # @param id [String] Unique identifier for a previous audio response from the model. end # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. # - # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#content + # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -110,7 +110,7 @@ module Content variant String # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. - variant -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } + variant -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). @@ -120,25 +120,27 @@ module ArrayOfContentPart discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). - variant :text, -> { OpenAI::Models::Chat::ChatCompletionContentPartText } + variant :text, -> { OpenAI::Chat::ChatCompletionContentPartText } - variant :refusal, -> { OpenAI::Models::Chat::ChatCompletionContentPartRefusal } + variant :refusal, -> { OpenAI::Chat::ChatCompletionContentPartRefusal } # @!method self.variants - # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] + # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartRefusal)] end # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ArrayOfContentPartArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] + OpenAI::Internal::Type::ArrayOf[union: -> { + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart + }] end # @deprecated # - # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#function_call + # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -157,14 +159,13 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for - # more details. + # {OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for more + # details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma - # ... # # @param name [String] The name of the function to call. end diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index 4ade12b4..861309b5 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -32,7 +32,7 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, data:, expires_at:, transcript:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionAudio} for more details. + # {OpenAI::Chat::ChatCompletionAudio} for more details. # # If the audio output modality is requested, this object contains data about the # audio response from the model. @@ -40,9 +40,9 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # # @param id [String] Unique identifier for this audio response. # - # @param data [String] Base64 encoded audio bytes generated by the model, in the format ... + # @param data [String] Base64 encoded audio bytes generated by the model, in the format # - # @param expires_at [Integer] The Unix timestamp (in seconds) for when this audio response will ... + # @param expires_at [Integer] The Unix timestamp (in seconds) for when this audio response will # # @param transcript [String] Transcript of the audio generated by the model. end diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index faff16ff..e9901c92 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -8,32 +8,32 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] - required :format_, enum: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Format }, api_name: :format + # @return [Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format] + required :format_, enum: -> { OpenAI::Chat::ChatCompletionAudioParam::Format }, api_name: :format # @!attribute voice # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # - # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] - required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } + # @return [String, Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice] + required :voice, union: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice } # @!method initialize(format_:, voice:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionAudioParam} for more details. + # {OpenAI::Chat::ChatCompletionAudioParam} for more details. # # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, ... + # @param format_ [Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, # - # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are ... + # @param voice [String, Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # - # @see OpenAI::Models::Chat::ChatCompletionAudioParam#format_ + # @see OpenAI::Chat::ChatCompletionAudioParam#format_ module Format extend OpenAI::Internal::Type::Enum @@ -51,33 +51,33 @@ module Format # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # - # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice + # @see OpenAI::Chat::ChatCompletionAudioParam#voice module Voice extend OpenAI::Internal::Type::Union variant String - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ALLOY } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ALLOY } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ASH } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ASH } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::BALLAD } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::BALLAD } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::CORAL } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::CORAL } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ECHO } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::FABLE } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ONYX } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::NOVA } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::SAGE } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::SHIMMER } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE } + variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::VERSE } # @!method self.variants # @return [Array(String, Symbol)] diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index e663c989..8f94cd1e 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -15,9 +15,8 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # greater than 1. Can also be empty for the last chunk if you set # `stream_options: {"include_usage": true}`. # - # @return [Array] - required :choices, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice] } + # @return [Array] + required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice] } # @!attribute created # The Unix timestamp (in seconds) of when the chat completion was created. Each @@ -57,8 +56,8 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] - optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier }, nil?: true + # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier, nil] + optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletionChunk::ServiceTier }, nil?: true # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. @@ -77,12 +76,12 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # **NOTE:** If the stream is interrupted or cancelled, you may not receive the # final usage chunk which contains the total token usage for the request. # - # @return [OpenAI::Models::CompletionUsage, nil] - optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true + # @return [OpenAI::CompletionUsage, nil] + optional :usage, -> { OpenAI::CompletionUsage }, nil?: true # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionChunk} for more details. + # {OpenAI::Chat::ChatCompletionChunk} for more details. # # Represents a streamed chunk of a chat completion response returned by the model, # based on the provided input. @@ -90,21 +89,17 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # # @param id [String] A unique identifier for the chat completion. Each chunk has the same ID. # - # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is - # ... + # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is # # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. Each ch - # ... # # @param model [String] The model to generate the completion. # - # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. - # ... # - # @param usage [OpenAI::Models::CompletionUsage, nil] An optional field that will only be present when you set ... + # @param usage [OpenAI::CompletionUsage, nil] An optional field that will only be present when you set # # @param object [Symbol, :"chat.completion.chunk"] The object type, which is always `chat.completion.chunk`. @@ -112,8 +107,8 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta # A chat completion delta generated by streamed model responses. # - # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] - required :delta, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta } + # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta] + required :delta, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta } # @!attribute finish_reason # The reason the model stopped generating tokens. This will be `stop` if the model @@ -123,9 +118,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] + # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason, nil] required :finish_reason, - enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason }, + enum: -> { + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason + }, nil?: true # @!attribute index @@ -137,23 +134,22 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # Log probability information for the choice. # - # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] - optional :logprobs, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true + # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, nil] + optional :logprobs, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true # @!method initialize(delta:, finish_reason:, index:, logprobs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionChunk::Choice} for more details. + # {OpenAI::Chat::ChatCompletionChunk::Choice} for more details. # - # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. + # @param delta [OpenAI::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. # - # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model - # ... + # @param finish_reason [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] The index of the choice in the list of choices. # - # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. + # @param logprobs [OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta + # @see OpenAI::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the chunk message. @@ -167,8 +163,8 @@ class Delta < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] - optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } + # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] + optional :function_call, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } # @!attribute refusal # The refusal message generated by the model. @@ -179,35 +175,36 @@ class Delta < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the author of this message. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] - optional :role, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role } + # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] + optional :role, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role } # @!attribute tool_calls # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + } # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta} for more details. + # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta} for more details. # # A chat completion delta generated by streamed model responses. # # @param content [String, nil] The contents of the chunk message. # - # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th - # ... + # @param function_call [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # # @param refusal [String, nil] The refusal message generated by the model. # - # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. + # @param role [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. # - # @param tool_calls [Array] + # @param tool_calls [Array] # @deprecated # - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call + # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -226,21 +223,20 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for - # more details. + # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for more + # details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma - # ... # # @param name [String] The name of the function to call. end # The role of the author of this message. # - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#role + # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta#role module Role extend OpenAI::Internal::Type::Enum @@ -268,25 +264,25 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] - optional :function, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function } + # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] + optional :function, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function } # @!attribute type # The type of the tool. Currently, only `function` is supported. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] - optional :type, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type } + # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] + optional :type, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type } # @!method initialize(index:, id: nil, function: nil, type: nil) # @param index [Integer] # # @param id [String] The ID of the tool call. # - # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] + # @param function [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] # - # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. + # @param type [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function + # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -305,18 +301,17 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} - # for more details. + # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} for more + # details. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma - # ... # # @param name [String] The name of the function to call. end # The type of the tool. Currently, only `function` is supported. # - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type + # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type module Type extend OpenAI::Internal::Type::Enum @@ -335,7 +330,7 @@ module Type # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#finish_reason + # @see OpenAI::Chat::ChatCompletionChunk::Choice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -349,30 +344,30 @@ module FinishReason # @return [Array] end - # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs + # @see OpenAI::Chat::ChatCompletionChunk::Choice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true # @!attribute refusal # A list of message refusal tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :refusal, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] A list of message content tokens with log probability information. + # @param content [Array, nil] A list of message content tokens with log probability information. # - # @param refusal [Array, nil] A list of message refusal tokens with log probability information. + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end @@ -394,7 +389,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier + # @see OpenAI::Chat::ChatCompletionChunk#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index b00878ec..64a02cf0 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -11,22 +11,22 @@ module ChatCompletionContentPart discriminator :type # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). - variant :text, -> { OpenAI::Models::Chat::ChatCompletionContentPartText } + variant :text, -> { OpenAI::Chat::ChatCompletionContentPartText } # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). - variant :image_url, -> { OpenAI::Models::Chat::ChatCompletionContentPartImage } + variant :image_url, -> { OpenAI::Chat::ChatCompletionContentPartImage } # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). - variant :input_audio, -> { OpenAI::Models::Chat::ChatCompletionContentPartInputAudio } + variant :input_audio, -> { OpenAI::Chat::ChatCompletionContentPartInputAudio } # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text generation. - variant :file, -> { OpenAI::Models::Chat::ChatCompletionContentPart::File } + variant :file, -> { OpenAI::Chat::ChatCompletionContentPart::File } class File < OpenAI::Internal::Type::BaseModel # @!attribute file # - # @return [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] - required :file, -> { OpenAI::Models::Chat::ChatCompletionContentPart::File::File } + # @return [OpenAI::Chat::ChatCompletionContentPart::File::File] + required :file, -> { OpenAI::Chat::ChatCompletionContentPart::File::File } # @!attribute type # The type of the content part. Always `file`. @@ -38,11 +38,11 @@ class File < OpenAI::Internal::Type::BaseModel # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text # generation. # - # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] + # @param file [OpenAI::Chat::ChatCompletionContentPart::File::File] # # @param type [Symbol, :file] The type of the content part. Always `file`. - # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file + # @see OpenAI::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel # @!attribute file_data # The base64 encoded file data, used when passing the file to the model as a @@ -65,18 +65,18 @@ class File < OpenAI::Internal::Type::BaseModel # @!method initialize(file_data: nil, file_id: nil, filename: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionContentPart::File::File} for more details. + # {OpenAI::Chat::ChatCompletionContentPart::File::File} for more details. # - # @param file_data [String] The base64 encoded file data, used when passing the file to the model ... + # @param file_data [String] The base64 encoded file data, used when passing the file to the model # - # @param file_id [String] The ID of an uploaded file to use as input. ... + # @param file_id [String] The ID of an uploaded file to use as input. # - # @param filename [String] The name of the file, used when passing the file to the model as a ... + # @param filename [String] The name of the file, used when passing the file to the model as a end end # @!method self.variants - # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] + # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartImage, OpenAI::Chat::ChatCompletionContentPartInputAudio, OpenAI::Chat::ChatCompletionContentPart::File)] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 69462ff7..9288c5d4 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -6,8 +6,8 @@ module Chat class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] - required :image_url, -> { OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL } + # @return [OpenAI::Chat::ChatCompletionContentPartImage::ImageURL] + required :image_url, -> { OpenAI::Chat::ChatCompletionContentPartImage::ImageURL } # @!attribute type # The type of the content part. @@ -18,11 +18,11 @@ class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] + # @param image_url [OpenAI::Chat::ChatCompletionContentPartImage::ImageURL] # # @param type [Symbol, :image_url] The type of the content part. - # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url + # @see OpenAI::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. @@ -34,23 +34,21 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] - optional :detail, enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail } + # @return [Symbol, OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] + optional :detail, enum: -> { OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail } # @!method initialize(url:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL} for more - # details. + # {OpenAI::Chat::ChatCompletionContentPartImage::ImageURL} for more details. # # @param url [String] Either a URL of the image or the base64 encoded image data. # - # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: - # ... + # @param detail [Symbol, OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # - # @see OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL#detail + # @see OpenAI::Chat::ChatCompletionContentPartImage::ImageURL#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 155990b0..86a9a7d5 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -6,8 +6,8 @@ module Chat class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute input_audio # - # @return [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] - required :input_audio, -> { OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio } + # @return [OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio] + required :input_audio, -> { OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio } # @!attribute type # The type of the content part. Always `input_audio`. @@ -18,11 +18,11 @@ class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(input_audio:, type: :input_audio) # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). # - # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @param input_audio [OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio] # # @param type [Symbol, :input_audio] The type of the content part. Always `input_audio`. - # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio + # @see OpenAI::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64 encoded audio data. @@ -33,23 +33,23 @@ class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute format_ # The format of the encoded audio data. Currently supports "wav" and "mp3". # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] + # @return [Symbol, OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] required :format_, - enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format }, + enum: -> { OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format }, api_name: :format # @!method initialize(data:, format_:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more + # {OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more # details. # # @param data [String] Base64 encoded audio data. # - # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". ... + # @param format_ [Symbol, OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". # The format of the encoded audio data. Currently supports "wav" and "mp3". # - # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ + # @see OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ module Format extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 412741c9..31fafa47 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -7,8 +7,8 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the developer message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content } # @!attribute role # The role of the messages author, in this case `developer`. @@ -25,22 +25,21 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :developer) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam} for more details. + # {OpenAI::Chat::ChatCompletionDeveloperMessageParam} for more details. # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. # - # @param content [String, Array] The contents of the developer message. + # @param content [String, Array] The contents of the developer message. # # @param name [String] An optional name for the participant. Provides the model information to differen - # ... # # @param role [Symbol, :developer] The role of the messages author, in this case `developer`. # The contents of the developer message. # - # @see OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam#content + # @see OpenAI::Chat::ChatCompletionDeveloperMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -48,14 +47,16 @@ module Content variant String # An array of content parts with a defined type. For developer messages, only type `text` is supported. - variant -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray } + variant -> { + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray + } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = - OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 8a75e905..275a8339 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -26,17 +26,17 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @return [Array, nil] + # @return [Array, nil] optional :annotations, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessage::Annotation] } # @!attribute audio # If the audio output modality is requested, this object contains data about the # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Models::Chat::ChatCompletionAudio, nil] - optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudio }, nil?: true + # @return [OpenAI::Chat::ChatCompletionAudio, nil] + optional :audio, -> { OpenAI::Chat::ChatCompletionAudio }, nil?: true # @!attribute function_call # @deprecated @@ -44,19 +44,21 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, nil] - optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall } + # @return [OpenAI::Chat::ChatCompletionMessage::FunctionCall, nil] + optional :function_call, -> { OpenAI::Chat::ChatCompletionMessage::FunctionCall } # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] + } # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionMessage} for more details. + # {OpenAI::Chat::ChatCompletionMessage} for more details. # # A chat completion message generated by the model. # @@ -64,14 +66,13 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # # @param refusal [String, nil] The refusal message generated by the model. # - # @param annotations [Array] Annotations for the message, when applicable, as when using the ... + # @param annotations [Array] Annotations for the message, when applicable, as when using the # - # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data ... + # @param audio [OpenAI::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data # - # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th - # ... + # @param function_call [OpenAI::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the author of this message. @@ -85,17 +86,17 @@ class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute url_citation # A URL citation when using web search. # - # @return [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] - required :url_citation, -> { OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation } + # @return [OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation] + required :url_citation, -> { OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation } # @!method initialize(url_citation:, type: :url_citation) # A URL citation when using web search. # - # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. + # @param url_citation [OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. # # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. - # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation + # @see OpenAI::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. @@ -136,7 +137,7 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @deprecated # - # @see OpenAI::Models::Chat::ChatCompletionMessage#function_call + # @see OpenAI::Chat::ChatCompletionMessage#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -155,13 +156,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall} for more details. + # {OpenAI::Chat::ChatCompletionMessage::FunctionCall} for more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma - # ... # # @param name [String] The name of the function to call. end diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index 16e5d625..6710bee4 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -14,26 +14,26 @@ module ChatCompletionMessageParam # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. - variant :developer, -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam } + variant :developer, -> { OpenAI::Chat::ChatCompletionDeveloperMessageParam } # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages # for this purpose instead. - variant :system, -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam } + variant :system, -> { OpenAI::Chat::ChatCompletionSystemMessageParam } # Messages sent by an end user, containing prompts or additional context # information. - variant :user, -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam } + variant :user, -> { OpenAI::Chat::ChatCompletionUserMessageParam } # Messages sent by the model in response to user messages. - variant :assistant, -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam } + variant :assistant, -> { OpenAI::Chat::ChatCompletionAssistantMessageParam } - variant :tool, -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam } + variant :tool, -> { OpenAI::Chat::ChatCompletionToolMessageParam } - variant :function, -> { OpenAI::Models::Chat::ChatCompletionFunctionMessageParam } + variant :function, -> { OpenAI::Chat::ChatCompletionFunctionMessageParam } # @!method self.variants - # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] + # @return [Array(OpenAI::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Chat::ChatCompletionSystemMessageParam, OpenAI::Chat::ChatCompletionUserMessageParam, OpenAI::Chat::ChatCompletionAssistantMessageParam, OpenAI::Chat::ChatCompletionToolMessageParam, OpenAI::Chat::ChatCompletionFunctionMessageParam)] end end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index 6ecdc5b5..c90216d9 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -13,8 +13,8 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The function that the model called. # - # @return [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] - required :function, -> { OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function } + # @return [OpenAI::Chat::ChatCompletionMessageToolCall::Function] + required :function, -> { OpenAI::Chat::ChatCompletionMessageToolCall::Function } # @!attribute type # The type of the tool. Currently, only `function` is supported. @@ -25,11 +25,11 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # @param id [String] The ID of the tool call. # - # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. + # @param function [OpenAI::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function + # @see OpenAI::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -48,13 +48,11 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function} for more - # details. + # {OpenAI::Chat::ChatCompletionMessageToolCall::Function} for more details. # # The function that the model called. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma - # ... # # @param name [String] The name of the function to call. end diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index 245af016..afab8ee0 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -6,8 +6,8 @@ module Chat class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] - required :function, -> { OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function } + # @return [OpenAI::Chat::ChatCompletionNamedToolChoice::Function] + required :function, -> { OpenAI::Chat::ChatCompletionNamedToolChoice::Function } # @!attribute type # The type of the tool. Currently, only `function` is supported. @@ -19,11 +19,11 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # Specifies a tool the model should use. Use to force the model to call a specific # function. # - # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] + # @param function [OpenAI::Chat::ChatCompletionNamedToolChoice::Function] # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function + # @see OpenAI::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index df3dbe30..eeccd9df 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -9,8 +9,8 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # generated tokens would match this content, the entire model response can be # returned much more quickly. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Chat::ChatCompletionPredictionContent::Content } # @!attribute type # The type of the predicted content you want to provide. This type is currently @@ -21,20 +21,20 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, type: :content) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionPredictionContent} for more details. + # {OpenAI::Chat::ChatCompletionPredictionContent} for more details. # # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @param content [String, Array] The content that should be matched when generating a model response. ... + # @param content [String, Array] The content that should be matched when generating a model response. # - # @param type [Symbol, :content] The type of the predicted content you want to provide. This type is ... + # @param type [Symbol, :content] The type of the predicted content you want to provide. This type is # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. # - # @see OpenAI::Models::Chat::ChatCompletionPredictionContent#content + # @see OpenAI::Chat::ChatCompletionPredictionContent#content module Content extend OpenAI::Internal::Type::Union @@ -43,14 +43,16 @@ module Content variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. - variant -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray } + variant -> { + OpenAI::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray + } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = - OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index de346443..ffbaa513 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -18,11 +18,11 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(include_usage: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionStreamOptions} for more details. + # {OpenAI::Chat::ChatCompletionStreamOptions} for more details. # # Options for streaming response. Only set this when you set `stream: true`. # - # @param include_usage [Boolean] If set, an additional chunk will be streamed before the `data: [DONE]` ... + # @param include_usage [Boolean] If set, an additional chunk will be streamed before the `data: [DONE]` end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 7ef7fbfd..43b7a5c4 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -7,8 +7,8 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the system message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Chat::ChatCompletionSystemMessageParam::Content } # @!attribute role # The role of the messages author, in this case `system`. @@ -25,22 +25,21 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :system) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionSystemMessageParam} for more details. + # {OpenAI::Chat::ChatCompletionSystemMessageParam} for more details. # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages # for this purpose instead. # - # @param content [String, Array] The contents of the system message. + # @param content [String, Array] The contents of the system message. # # @param name [String] An optional name for the participant. Provides the model information to differen - # ... # # @param role [Symbol, :system] The role of the messages author, in this case `system`. # The contents of the system message. # - # @see OpenAI::Models::Chat::ChatCompletionSystemMessageParam#content + # @see OpenAI::Chat::ChatCompletionSystemMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -48,14 +47,16 @@ module Content variant String # An array of content parts with a defined type. For system messages, only type `text` is supported. - variant -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray } + variant -> { + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray + } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = - OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index 2dfadc88..c45fcee7 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -32,24 +32,21 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # position. In rare cases, there may be fewer than the number of requested # `top_logprobs` returned. # - # @return [Array] + # @return [Array] required :top_logprobs, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] } # @!method initialize(token:, bytes:, logprob:, top_logprobs:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionTokenLogprob} for more details. + # {OpenAI::Chat::ChatCompletionTokenLogprob} for more details. # # @param token [String] The token. # # @param bytes [Array, nil] A list of integers representing the UTF-8 bytes representation of the token. Use - # ... # # @param logprob [Float] The log probability of this token, if it is within the top 20 most likely tokens - # ... # - # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position - # ... + # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -77,15 +74,13 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token:, bytes:, logprob:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. + # {OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. # # @param token [String] The token. # # @param bytes [Array, nil] A list of integers representing the UTF-8 bytes representation of the token. Use - # ... # # @param logprob [Float] The log probability of this token, if it is within the top 20 most likely tokens - # ... end end end diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index c06b28d0..21a86040 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -6,8 +6,8 @@ module Chat class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Models::FunctionDefinition] - required :function, -> { OpenAI::Models::FunctionDefinition } + # @return [OpenAI::FunctionDefinition] + required :function, -> { OpenAI::FunctionDefinition } # @!attribute type # The type of the tool. Currently, only `function` is supported. @@ -16,7 +16,7 @@ class ChatCompletionTool < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(function:, type: :function) - # @param function [OpenAI::Models::FunctionDefinition] + # @param function [OpenAI::FunctionDefinition] # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 5fdd5796..4b8a6fe3 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -16,10 +16,10 @@ module ChatCompletionToolChoiceOption extend OpenAI::Internal::Type::Union # `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools. - variant enum: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto } + variant enum: -> { OpenAI::Chat::ChatCompletionToolChoiceOption::Auto } # Specifies a tool the model should use. Use to force the model to call a specific function. - variant -> { OpenAI::Models::Chat::ChatCompletionNamedToolChoice } + variant -> { OpenAI::Chat::ChatCompletionNamedToolChoice } # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or @@ -36,7 +36,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] + # @return [Array(Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice)] end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 2ff0bf3b..c88bb51c 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -7,8 +7,8 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the tool message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Chat::ChatCompletionToolMessageParam::Content } # @!attribute role # The role of the messages author, in this case `tool`. @@ -23,7 +23,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel required :tool_call_id, String # @!method initialize(content:, tool_call_id:, role: :tool) - # @param content [String, Array] The contents of the tool message. + # @param content [String, Array] The contents of the tool message. # # @param tool_call_id [String] Tool call that this message is responding to. # @@ -31,7 +31,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the tool message. # - # @see OpenAI::Models::Chat::ChatCompletionToolMessageParam#content + # @see OpenAI::Chat::ChatCompletionToolMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -39,14 +39,16 @@ module Content variant String # An array of content parts with a defined type. For tool messages, only type `text` is supported. - variant -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray } + variant -> { + OpenAI::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray + } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = - OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] + OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] end end end diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index b412e5c9..ffd7b68a 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -7,8 +7,8 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the user message. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Chat::ChatCompletionUserMessageParam::Content } # @!attribute role # The role of the messages author, in this case `user`. @@ -25,21 +25,20 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :user) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionUserMessageParam} for more details. + # {OpenAI::Chat::ChatCompletionUserMessageParam} for more details. # # Messages sent by an end user, containing prompts or additional context # information. # - # @param content [String, Array] The contents of the user message. ... + # @param content [String, Array] The contents of the user message. # # @param name [String] An optional name for the participant. Provides the model information to differen - # ... # # @param role [Symbol, :user] The role of the messages author, in this case `user`. # The contents of the user message. # - # @see OpenAI::Models::Chat::ChatCompletionUserMessageParam#content + # @see OpenAI::Chat::ChatCompletionUserMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -47,14 +46,14 @@ module Content variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. - variant -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } + variant -> { OpenAI::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Chat::ChatCompletionContentPart }] end end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index ed12261d..3a3a3c23 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -18,9 +18,9 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [images](https://platform.openai.com/docs/guides/vision), and # [audio](https://platform.openai.com/docs/guides/audio). # - # @return [Array] + # @return [Array] required :messages, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionMessageParam] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionMessageParam] } # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a @@ -29,16 +29,16 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel] - required :model, union: -> { OpenAI::Models::Chat::CompletionCreateParams::Model } + # @return [String, Symbol, OpenAI::ChatModel] + required :model, union: -> { OpenAI::Chat::CompletionCreateParams::Model } # @!attribute audio # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] - optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudioParam }, nil?: true + # @return [OpenAI::Chat::ChatCompletionAudioParam, nil] + optional :audio, -> { OpenAI::Chat::ChatCompletionAudioParam }, nil?: true # @!attribute frequency_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on their @@ -66,8 +66,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no functions are present. `auto` is the default if # functions are present. # - # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, nil] - optional :function_call, union: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall } + # @return [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption, nil] + optional :function_call, union: -> { OpenAI::Chat::CompletionCreateParams::FunctionCall } # @!attribute functions # @deprecated @@ -76,9 +76,9 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # A list of functions the model may generate JSON inputs for. # - # @return [Array, nil] + # @return [Array, nil] optional :functions, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::CompletionCreateParams::Function] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::CompletionCreateParams::Function] } # @!attribute logit_bias # Modify the likelihood of specified tokens appearing in the completion. @@ -146,9 +146,9 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # `["text", "audio"]` # - # @return [Array, nil] + # @return [Array, nil] optional :modalities, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Chat::CompletionCreateParams::Modality] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Chat::CompletionCreateParams::Modality] }, nil?: true # @!attribute n @@ -171,8 +171,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @return [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] - optional :prediction, -> { OpenAI::Models::Chat::ChatCompletionPredictionContent }, nil?: true + # @return [OpenAI::Chat::ChatCompletionPredictionContent, nil] + optional :prediction, -> { OpenAI::Chat::ChatCompletionPredictionContent }, nil?: true # @!attribute presence_penalty # Number between -2.0 and 2.0. Positive values penalize new tokens based on @@ -190,8 +190,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format # An object specifying the format that the model must output. @@ -205,8 +205,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. # - # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] - optional :response_format, union: -> { OpenAI::Models::Chat::CompletionCreateParams::ResponseFormat } + # @return [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject, nil] + optional :response_format, union: -> { OpenAI::Chat::CompletionCreateParams::ResponseFormat } # @!attribute seed # This feature is in Beta. If specified, our system will make a best effort to @@ -237,8 +237,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] - optional :service_tier, enum: -> { OpenAI::Models::Chat::CompletionCreateParams::ServiceTier }, nil?: true + # @return [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] + optional :service_tier, enum: -> { OpenAI::Chat::CompletionCreateParams::ServiceTier }, nil?: true # @!attribute stop # Not supported with latest reasoning models `o3` and `o4-mini`. @@ -247,7 +247,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # returned text will not contain the stop sequence. # # @return [String, Array, nil] - optional :stop, union: -> { OpenAI::Models::Chat::CompletionCreateParams::Stop }, nil?: true + optional :stop, union: -> { OpenAI::Chat::CompletionCreateParams::Stop }, nil?: true # @!attribute store # Whether or not to store the output of this chat completion request for use in @@ -260,8 +260,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. # - # @return [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - optional :stream_options, -> { OpenAI::Models::Chat::ChatCompletionStreamOptions }, nil?: true + # @return [OpenAI::Chat::ChatCompletionStreamOptions, nil] + optional :stream_options, -> { OpenAI::Chat::ChatCompletionStreamOptions }, nil?: true # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will @@ -283,16 +283,16 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no tools are present. `auto` is the default if tools # are present. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] - optional :tool_choice, union: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption } + # @return [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice, nil] + optional :tool_choice, union: -> { OpenAI::Chat::ChatCompletionToolChoiceOption } # @!attribute tools # A list of tools the model may call. Currently, only functions are supported as a # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTool] } + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTool] } # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to @@ -325,79 +325,72 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] - optional :web_search_options, -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions } + # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions, nil] + optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the ... + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with ... + # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # - # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. ... + # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. ... + # @param functions [Array] Deprecated in favor of `tools`. # - # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # - # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, ... + # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, # # @param max_completion_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a completion, - # ... # - # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the ... + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. ... + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y - # ... # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is ... + # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # - # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. # - # @param seed [Integer, nil] This feature is in Beta. ... + # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for ... + # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a - # ... + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # - # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to ... + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # - # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. ... + # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -415,10 +408,10 @@ module Model # offers a wide range of models with different capabilities, performance # characteristics, and price points. Refer to the [model guide](https://platform.openai.com/docs/models) # to browse and compare available models. - variant enum: -> { OpenAI::Models::ChatModel } + variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + # @return [Array(String, Symbol, OpenAI::ChatModel)] end # @deprecated @@ -441,10 +434,10 @@ module FunctionCall extend OpenAI::Internal::Type::Union # `none` means the model will not call a function and instead generates a message. `auto` means the model can pick between generating a message or calling a function. - variant enum: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode } + variant enum: -> { OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode } # Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. - variant -> { OpenAI::Models::Chat::ChatCompletionFunctionCallOption } + variant -> { OpenAI::Chat::ChatCompletionFunctionCallOption } # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a @@ -460,7 +453,7 @@ module FunctionCallMode end # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] + # @return [Array(Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption)] end # @deprecated @@ -493,16 +486,13 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, parameters: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::CompletionCreateParams::Function} for more details. + # {OpenAI::Chat::CompletionCreateParams::Function} for more details. # # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc - # ... # # @param description [String] A description of what the function does, used by the model to choose when and ho - # ... # # @param parameters [Hash{Symbol=>Object}] The parameters the functions accepts, described as a JSON Schema object. See the - # ... end module Modality @@ -529,20 +519,20 @@ module ResponseFormat extend OpenAI::Internal::Type::Union # Default response format. Used to generate text responses. - variant -> { OpenAI::Models::ResponseFormatText } + variant -> { OpenAI::ResponseFormatText } # JSON Schema response format. Used to generate structured JSON responses. # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - variant -> { OpenAI::Models::ResponseFormatJSONSchema } + variant -> { OpenAI::ResponseFormatJSONSchema } # JSON object response format. An older method of generating JSON responses. # Using `json_schema` is recommended for models that support it. Note that the # model will not generate JSON without a system or user message instructing it # to do so. - variant -> { OpenAI::Models::ResponseFormatJSONObject } + variant -> { OpenAI::ResponseFormatJSONObject } # @!method self.variants - # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] + # @return [Array(OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -582,7 +572,7 @@ module Stop variant String - variant -> { OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray } + variant -> { OpenAI::Chat::CompletionCreateParams::Stop::StringArray } # @!method self.variants # @return [Array(String, Array)] @@ -596,35 +586,34 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] + # @return [Symbol, OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] optional :search_context_size, - enum: -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize } + enum: -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize } # @!attribute user_location # Approximate location parameters for the search. # - # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] + # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] optional :user_location, - -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation }, + -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation }, nil?: true # @!method initialize(search_context_size: nil, user_location: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions} for more - # details. + # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions} for more details. # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the ... + # @param search_context_size [Symbol, OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the # - # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. ... + # @param user_location [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#search_context_size + # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions#search_context_size module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -636,14 +625,14 @@ module SearchContextSize # @return [Array] end - # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location + # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions#user_location class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute approximate # Approximate location parameters for the search. # - # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] + # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] required :approximate, - -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate } + -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate } # @!attribute type # The type of location approximation. Always `approximate`. @@ -653,16 +642,16 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!method initialize(approximate:, type: :approximate) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} - # for more details. + # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} for more + # details. # # Approximate location parameters for the search. # - # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. + # @param approximate [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. # - # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. ... + # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. - # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate + # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. @@ -692,18 +681,18 @@ class Approximate < OpenAI::Internal::Type::BaseModel # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} + # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} # for more details. # # Approximate location parameters for the search. # - # @param city [String] Free text input for the city of the user, e.g. `San Francisco`. ... + # @param city [String] Free text input for the city of the user, e.g. `San Francisco`. # - # @param country [String] The two-letter ... + # @param country [String] The two-letter # - # @param region [String] Free text input for the region of the user, e.g. `California`. ... + # @param region [String] Free text input for the region of the user, e.g. `California`. # - # @param timezone [String] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) ... + # @param timezone [String] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) end end end diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index ea9cb4c5..b1f9e734 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -38,8 +38,8 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Chat::CompletionListParams::Order } + # @return [Symbol, OpenAI::Chat::CompletionListParams::Order, nil] + optional :order, enum: -> { OpenAI::Chat::CompletionListParams::Order } # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -49,12 +49,11 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of Chat Completions to retrieve. # - # @param metadata [Hash{Symbol=>String}, nil] A list of metadata keys to filter the Chat Completions by. Example: ... + # @param metadata [Hash{Symbol=>String}, nil] A list of metadata keys to filter the Chat Completions by. Example: # # @param model [String] The model used to generate the Chat Completions. # - # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` - # ... + # @param order [Symbol, OpenAI::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 3c2e662d..c30b1a6b 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -23,7 +23,7 @@ class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionUpdateParams} for more details. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 775be46e..8292066a 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -25,8 +25,8 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Chat::Completions::MessageListParams::Order } + # @return [Symbol, OpenAI::Chat::Completions::MessageListParams::Order, nil] + optional :order, enum: -> { OpenAI::Chat::Completions::MessageListParams::Order } # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -36,8 +36,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of messages to retrieve. # - # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo - # ... + # @param order [Symbol, OpenAI::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 2969dedd..582e2f5a 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -19,29 +19,28 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # - `lt`: less than # - `lte`: less than or equal # - # @return [Symbol, OpenAI::Models::ComparisonFilter::Type] - required :type, enum: -> { OpenAI::Models::ComparisonFilter::Type } + # @return [Symbol, OpenAI::ComparisonFilter::Type] + required :type, enum: -> { OpenAI::ComparisonFilter::Type } # @!attribute value # The value to compare against the attribute key; supports string, number, or # boolean types. # # @return [String, Float, Boolean] - required :value, union: -> { OpenAI::Models::ComparisonFilter::Value } + required :value, union: -> { OpenAI::ComparisonFilter::Value } # @!method initialize(key:, type:, value:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::ComparisonFilter} for more details. + # Some parameter documentations has been truncated, see {OpenAI::ComparisonFilter} + # for more details. # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. # # @param key [String] The key to compare against the value. # - # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. ... + # @param type [Symbol, OpenAI::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # # @param value [String, Float, Boolean] The value to compare against the attribute key; supports string, number, or bool - # ... # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -52,7 +51,7 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # - `lt`: less than # - `lte`: less than or equal # - # @see OpenAI::Models::ComparisonFilter#type + # @see OpenAI::ComparisonFilter#type module Type extend OpenAI::Internal::Type::Enum @@ -70,7 +69,7 @@ module Type # The value to compare against the attribute key; supports string, number, or # boolean types. # - # @see OpenAI::Models::ComparisonFilter#value + # @see OpenAI::ComparisonFilter#value module Value extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index c493a5c5..8d33e0b0 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -15,8 +15,8 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute choices # The list of completion choices the model generated for the input prompt. # - # @return [Array] - required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::CompletionChoice] } + # @return [Array] + required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::CompletionChoice] } # @!attribute created # The Unix timestamp (in seconds) of when the completion was created. @@ -48,28 +48,27 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics for the completion request. # - # @return [OpenAI::Models::CompletionUsage, nil] - optional :usage, -> { OpenAI::Models::CompletionUsage } + # @return [OpenAI::CompletionUsage, nil] + optional :usage, -> { OpenAI::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Completion} for more details. + # Some parameter documentations has been truncated, see {OpenAI::Completion} for + # more details. # # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). # # @param id [String] A unique identifier for the completion. # - # @param choices [Array] The list of completion choices the model generated for the input prompt. + # @param choices [Array] The list of completion choices the model generated for the input prompt. # # @param created [Integer] The Unix timestamp (in seconds) of when the completion was created. # # @param model [String] The model used for completion. # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. - # ... # - # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. + # @param usage [OpenAI::CompletionUsage] Usage statistics for the completion request. # # @param object [Symbol, :text_completion] The object type, which is always "text_completion" end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 5bca4663..c2a91320 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -9,8 +9,8 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. # - # @return [Symbol, OpenAI::Models::CompletionChoice::FinishReason] - required :finish_reason, enum: -> { OpenAI::Models::CompletionChoice::FinishReason } + # @return [Symbol, OpenAI::CompletionChoice::FinishReason] + required :finish_reason, enum: -> { OpenAI::CompletionChoice::FinishReason } # @!attribute index # @@ -19,8 +19,8 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # - # @return [OpenAI::Models::CompletionChoice::Logprobs, nil] - required :logprobs, -> { OpenAI::Models::CompletionChoice::Logprobs }, nil?: true + # @return [OpenAI::CompletionChoice::Logprobs, nil] + required :logprobs, -> { OpenAI::CompletionChoice::Logprobs }, nil?: true # @!attribute text # @@ -28,15 +28,14 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel required :text, String # @!method initialize(finish_reason:, index:, logprobs:, text:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::CompletionChoice} for more details. + # Some parameter documentations has been truncated, see {OpenAI::CompletionChoice} + # for more details. # - # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model - # ... + # @param finish_reason [Symbol, OpenAI::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] # - # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] + # @param logprobs [OpenAI::CompletionChoice::Logprobs, nil] # # @param text [String] @@ -45,7 +44,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. # - # @see OpenAI::Models::CompletionChoice#finish_reason + # @see OpenAI::CompletionChoice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -57,7 +56,7 @@ module FinishReason # @return [Array] end - # @see OpenAI::Models::CompletionChoice#logprobs + # @see OpenAI::CompletionChoice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute text_offset # diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index d397fba1..075f9d95 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -16,8 +16,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] - required :model, union: -> { OpenAI::Models::CompletionCreateParams::Model } + # @return [String, Symbol, OpenAI::CompletionCreateParams::Model] + required :model, union: -> { OpenAI::CompletionCreateParams::Model } # @!attribute prompt # The prompt(s) to generate completions for, encoded as a string, array of @@ -28,7 +28,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # beginning of a new document. # # @return [String, Array, Array, Array>, nil] - required :prompt, union: -> { OpenAI::Models::CompletionCreateParams::Prompt }, nil?: true + required :prompt, union: -> { OpenAI::CompletionCreateParams::Prompt }, nil?: true # @!attribute best_of # Generates `best_of` completions server-side and returns the "best" (the one with @@ -138,13 +138,13 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # returned text will not contain the stop sequence. # # @return [String, Array, nil] - optional :stop, union: -> { OpenAI::Models::CompletionCreateParams::Stop }, nil?: true + optional :stop, union: -> { OpenAI::CompletionCreateParams::Stop }, nil?: true # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. # - # @return [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - optional :stream_options, -> { OpenAI::Models::Chat::ChatCompletionStreamOptions }, nil?: true + # @return [OpenAI::Chat::ChatCompletionStreamOptions, nil] + optional :stream_options, -> { OpenAI::Chat::ChatCompletionStreamOptions }, nil?: true # @!attribute suffix # The suffix that comes after a completion of inserted text. @@ -186,50 +186,39 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::CompletionCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings - # ... # # @param best_of [Integer, nil] Generates `best_of` completions server-side and returns the "best" (the one with - # ... # - # @param echo [Boolean, nil] Echo back the prompt in addition to the completion ... + # @param echo [Boolean, nil] Echo back the prompt in addition to the completion # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # ... # - # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # # @param logprobs [Integer, nil] Include the log probabilities on the `logprobs` most likely output tokens, as we - # ... # # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the completi - # ... # - # @param n [Integer, nil] How many completions to generate for each prompt. ... + # @param n [Integer, nil] How many completions to generate for each prompt. # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on whethe - # ... # # @param seed [Integer, nil] If specified, our system will make a best effort to sample deterministically, su - # ... # - # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # - # @param suffix [String, nil] The suffix that comes after a completion of inserted text. ... + # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -243,11 +232,11 @@ module Model variant String - variant const: -> { OpenAI::Models::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } + variant const: -> { OpenAI::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } - variant const: -> { OpenAI::Models::CompletionCreateParams::Model::DAVINCI_002 } + variant const: -> { OpenAI::CompletionCreateParams::Model::DAVINCI_002 } - variant const: -> { OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 } + variant const: -> { OpenAI::CompletionCreateParams::Model::BABBAGE_002 } # @!method self.variants # @return [Array(String, Symbol)] @@ -272,11 +261,11 @@ module Prompt variant String - variant -> { OpenAI::Models::CompletionCreateParams::Prompt::StringArray } + variant -> { OpenAI::CompletionCreateParams::Prompt::StringArray } - variant -> { OpenAI::Models::CompletionCreateParams::Prompt::IntegerArray } + variant -> { OpenAI::CompletionCreateParams::Prompt::IntegerArray } - variant -> { OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray } + variant -> { OpenAI::CompletionCreateParams::Prompt::ArrayOfToken2DArray } # @!method self.variants # @return [Array(String, Array, Array, Array>)] @@ -300,7 +289,7 @@ module Stop variant String - variant -> { OpenAI::Models::CompletionCreateParams::Stop::StringArray } + variant -> { OpenAI::CompletionCreateParams::Stop::StringArray } # @!method self.variants # @return [Array(String, Array)] diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index defd2f03..202f9218 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -24,14 +24,14 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens_details # Breakdown of tokens used in a completion. # - # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails, nil] - optional :completion_tokens_details, -> { OpenAI::Models::CompletionUsage::CompletionTokensDetails } + # @return [OpenAI::CompletionUsage::CompletionTokensDetails, nil] + optional :completion_tokens_details, -> { OpenAI::CompletionUsage::CompletionTokensDetails } # @!attribute prompt_tokens_details # Breakdown of tokens used in the prompt. # - # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails, nil] - optional :prompt_tokens_details, -> { OpenAI::Models::CompletionUsage::PromptTokensDetails } + # @return [OpenAI::CompletionUsage::PromptTokensDetails, nil] + optional :prompt_tokens_details, -> { OpenAI::CompletionUsage::PromptTokensDetails } # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) # Usage statistics for the completion request. @@ -42,11 +42,11 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # # @param total_tokens [Integer] Total number of tokens used in the request (prompt + completion). # - # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. + # @param completion_tokens_details [OpenAI::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. # - # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. + # @param prompt_tokens_details [OpenAI::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. - # @see OpenAI::Models::CompletionUsage#completion_tokens_details + # @see OpenAI::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that @@ -78,20 +78,20 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::CompletionUsage::CompletionTokensDetails} for more details. + # {OpenAI::CompletionUsage::CompletionTokensDetails} for more details. # # Breakdown of tokens used in a completion. # - # @param accepted_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the ... + # @param accepted_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the # # @param audio_tokens [Integer] Audio input tokens generated by the model. # # @param reasoning_tokens [Integer] Tokens generated by the model for reasoning. # - # @param rejected_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the ... + # @param rejected_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the end - # @see OpenAI::Models::CompletionUsage#prompt_tokens_details + # @see OpenAI::CompletionUsage#prompt_tokens_details class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute audio_tokens # Audio input tokens present in the prompt. diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 9dc2a93d..8bfaf4d2 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -7,25 +7,24 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. # - # @return [Array] - required :filters, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::CompoundFilter::Filter] } + # @return [Array] + required :filters, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::CompoundFilter::Filter] } # @!attribute type # Type of operation: `and` or `or`. # - # @return [Symbol, OpenAI::Models::CompoundFilter::Type] - required :type, enum: -> { OpenAI::Models::CompoundFilter::Type } + # @return [Symbol, OpenAI::CompoundFilter::Type] + required :type, enum: -> { OpenAI::CompoundFilter::Type } # @!method initialize(filters:, type:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::CompoundFilter} for more details. + # Some parameter documentations has been truncated, see {OpenAI::CompoundFilter} + # for more details. # # Combine multiple filters using `and` or `or`. # - # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` - # ... + # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` # - # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] Type of operation: `and` or `or`. + # @param type [Symbol, OpenAI::CompoundFilter::Type] Type of operation: `and` or `or`. # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. @@ -33,17 +32,17 @@ module Filter extend OpenAI::Internal::Type::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. - variant -> { OpenAI::Models::ComparisonFilter } + variant -> { OpenAI::ComparisonFilter } variant OpenAI::Internal::Type::Unknown # @!method self.variants - # @return [Array(OpenAI::Models::ComparisonFilter, Object)] + # @return [Array(OpenAI::ComparisonFilter, Object)] end # Type of operation: `and` or `or`. # - # @see OpenAI::Models::CompoundFilter#type + # @see OpenAI::CompoundFilter#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index e1030791..ec420899 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -7,8 +7,8 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of embeddings generated by the model. # - # @return [Array] - required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Embedding] } + # @return [Array] + required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Embedding] } # @!attribute model # The name of the model used to generate the embedding. @@ -25,19 +25,19 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute usage # The usage information for the request. # - # @return [OpenAI::Models::CreateEmbeddingResponse::Usage] - required :usage, -> { OpenAI::Models::CreateEmbeddingResponse::Usage } + # @return [OpenAI::CreateEmbeddingResponse::Usage] + required :usage, -> { OpenAI::CreateEmbeddingResponse::Usage } # @!method initialize(data:, model:, usage:, object: :list) - # @param data [Array] The list of embeddings generated by the model. + # @param data [Array] The list of embeddings generated by the model. # # @param model [String] The name of the model used to generate the embedding. # - # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] The usage information for the request. + # @param usage [OpenAI::CreateEmbeddingResponse::Usage] The usage information for the request. # # @param object [Symbol, :list] The object type, which is always "list". - # @see OpenAI::Models::CreateEmbeddingResponse#usage + # @see OpenAI::CreateEmbeddingResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute prompt_tokens # The number of tokens used by the prompt. diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index 41792b3d..6f8e6de3 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -24,13 +24,12 @@ class Embedding < OpenAI::Internal::Type::BaseModel required :object, const: :embedding # @!method initialize(embedding:, index:, object: :embedding) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Embedding} for more details. + # Some parameter documentations has been truncated, see {OpenAI::Embedding} for + # more details. # # Represents an embedding vector returned by embedding endpoint. # # @param embedding [Array] The embedding vector, which is a list of floats. The length of vector depends on - # ... # # @param index [Integer] The index of the embedding in the list of embeddings. # diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index bff4d83f..48890276 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -18,7 +18,7 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # tokens summed across inputs. # # @return [String, Array, Array, Array>] - required :input, union: -> { OpenAI::Models::EmbeddingCreateParams::Input } + required :input, union: -> { OpenAI::EmbeddingCreateParams::Input } # @!attribute model # ID of the model to use. You can use the @@ -27,8 +27,8 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Models::EmbeddingModel] - required :model, union: -> { OpenAI::Models::EmbeddingCreateParams::Model } + # @return [String, Symbol, OpenAI::EmbeddingModel] + required :model, union: -> { OpenAI::EmbeddingCreateParams::Model } # @!attribute dimensions # The number of dimensions the resulting output embeddings should have. Only @@ -41,8 +41,8 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). # - # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat, nil] - optional :encoding_format, enum: -> { OpenAI::Models::EmbeddingCreateParams::EncodingFormat } + # @return [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat, nil] + optional :encoding_format, enum: -> { OpenAI::EmbeddingCreateParams::EncodingFormat } # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -57,19 +57,14 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::EmbeddingCreateParams} for more details. # # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i - # ... # - # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo - # ... # - # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http - # ... + # @param encoding_format [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -88,13 +83,13 @@ module Input variant String # The array of strings that will be turned into an embedding. - variant -> { OpenAI::Models::EmbeddingCreateParams::Input::StringArray } + variant -> { OpenAI::EmbeddingCreateParams::Input::StringArray } # The array of integers that will be turned into an embedding. - variant -> { OpenAI::Models::EmbeddingCreateParams::Input::IntegerArray } + variant -> { OpenAI::EmbeddingCreateParams::Input::IntegerArray } # The array of arrays containing integers that will be turned into an embedding. - variant -> { OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray } + variant -> { OpenAI::EmbeddingCreateParams::Input::ArrayOfToken2DArray } # @!method self.variants # @return [Array(String, Array, Array, Array>)] @@ -120,10 +115,10 @@ module Model variant String # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. - variant enum: -> { OpenAI::Models::EmbeddingModel } + variant enum: -> { OpenAI::EmbeddingModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] + # @return [Array(String, Symbol, OpenAI::EmbeddingModel)] end # The format to return the embeddings in. Can be either `float` or diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 8f14b19f..91a2a00f 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -10,15 +10,15 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # The configuration for the data source used for the evaluation runs. # - # @return [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] - required :data_source_config, union: -> { OpenAI::Models::EvalCreateParams::DataSourceConfig } + # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs] + required :data_source_config, union: -> { OpenAI::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria # A list of graders for all eval runs in this group. # - # @return [Array] + # @return [Array] required :testing_criteria, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateParams::TestingCriterion] } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion] } # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -41,11 +41,11 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalCreateParams} for more details. # - # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # @@ -61,11 +61,11 @@ module DataSourceConfig # This schema is used to define the shape of the data that will be: # - Used to define your testing criteria and # - What data is required when creating a run - variant :custom, -> { OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom } + variant :custom, -> { OpenAI::EvalCreateParams::DataSourceConfig::Custom } # A data source config which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. - variant :logs, -> { OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs } + variant :logs, -> { OpenAI::EvalCreateParams::DataSourceConfig::Logs } class Custom < OpenAI::Internal::Type::BaseModel # @!attribute item_schema @@ -89,7 +89,7 @@ class Custom < OpenAI::Internal::Type::BaseModel # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom} for more details. + # {OpenAI::EvalCreateParams::DataSourceConfig::Custom} for more details. # # A CustomDataSourceConfig object that defines the schema for the data source used # for the evaluation runs. This schema is used to define the shape of the data @@ -101,7 +101,6 @@ class Custom < OpenAI::Internal::Type::BaseModel # @param item_schema [Hash{Symbol=>Object}] The json schema for each row in the data source. # # @param include_sample_schema [Boolean] Whether the eval should expect you to populate the sample namespace (ie, by gene - # ... # # @param type [Symbol, :custom] The type of data source. Always `custom`. end @@ -130,7 +129,7 @@ class Logs < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs)] + # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -142,28 +141,30 @@ module TestingCriterion # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel } + variant :label_model, -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + variant :string_check, -> { OpenAI::EvalStringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } # A PythonGrader object that runs a python script on the input. - variant :python, -> { OpenAI::Models::EvalCreateParams::TestingCriterion::Python } + variant :python, -> { OpenAI::EvalCreateParams::TestingCriterion::Python } # A ScoreModelGrader object that uses a model to assign a score to the input. - variant :score_model, -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel } + variant :score_model, -> { OpenAI::EvalCreateParams::TestingCriterion::ScoreModel } class LabelModel < OpenAI::Internal::Type::BaseModel # @!attribute input # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :input, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] + } # @!attribute labels # The labels to classify to each item in the evaluation. @@ -197,14 +198,12 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel} for more - # details. + # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel} for more details. # # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... + # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param labels [Array] The labels to classify to each item in the evaluation. # @@ -221,14 +220,14 @@ class LabelModel < OpenAI::Internal::Type::BaseModel module Input extend OpenAI::Internal::Type::Union - variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage } + variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage } # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem } + variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem } class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -253,29 +252,35 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] required :content, - union: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content } + union: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content + } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] + # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] required :role, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role } + enum: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role + } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] + # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] optional :type, - enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type } + enum: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type + } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} - # for more details. + # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} for + # more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -283,15 +288,15 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content + # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content module Content extend OpenAI::Internal::Type::Union @@ -299,10 +304,12 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText } + variant -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + } class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -319,24 +326,24 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} + # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} # for more details. # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role + # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role module Role extend OpenAI::Internal::Type::Enum @@ -351,7 +358,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type + # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type module Type extend OpenAI::Internal::Type::Enum @@ -363,7 +370,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] + # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] end end @@ -416,9 +423,11 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!attribute input # The input text. This may include template strings. # - # @return [Array] + # @return [Array] required :input, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input] + } # @!attribute model # The model to use for the evaluation. @@ -459,7 +468,7 @@ class ScoreModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. + # @param input [Array] The input text. This may include template strings. # # @param model [String] The model to use for the evaluation. # @@ -477,26 +486,28 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] required :content, - union: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content } + union: -> { + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content + } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] - required :role, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role } + # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] + required :role, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type, nil] - optional :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type } + # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type, nil] + optional :type, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input} for more + # {OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input} for more # details. # # A message input to the model with a role indicating instruction following @@ -505,15 +516,15 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input#content + # @see OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input#content module Content extend OpenAI::Internal::Type::Union @@ -521,10 +532,12 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText } + variant -> { + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + } class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -541,24 +554,24 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText} + # {OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText} # for more details. # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input#role + # @see OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input#role module Role extend OpenAI::Internal::Type::Enum @@ -573,7 +586,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input#type + # @see OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input#type module Type extend OpenAI::Internal::Type::Enum @@ -586,7 +599,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index cd141022..db8e08f0 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -19,7 +19,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -89,16 +89,16 @@ module DataSourceConfig # The response schema defines the shape of the data that will be: # - Used to define your testing criteria and # - What data is required when creating a run - variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -110,13 +110,13 @@ module TestingCriterion # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + variant :label_model, -> { OpenAI::EvalLabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + variant :string_check, -> { OpenAI::EvalStringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } # A PythonGrader object that runs a python script on the input. variant :python, -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::Python } @@ -234,7 +234,7 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] required :content, union: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content } @@ -262,11 +262,11 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -278,7 +278,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } @@ -303,13 +303,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -343,7 +343,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb index 84577b43..38c0e0ec 100644 --- a/lib/openai/models/eval_custom_data_source_config.rb +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -18,7 +18,7 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(schema:, type: :custom) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCustomDataSourceConfig} for more details. + # {OpenAI::EvalCustomDataSourceConfig} for more details. # # A CustomDataSourceConfig which specifies the schema of your `item` and # optionally `sample` namespaces. The response schema defines the shape of the @@ -27,7 +27,7 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # - Used to define your testing criteria and # - What data is required when creating a run # - # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. ... + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. # # @param type [Symbol, :custom] The type of data source. Always `custom`. end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb index 32db2840..271e8884 100644 --- a/lib/openai/models/eval_label_model_grader.rb +++ b/lib/openai/models/eval_label_model_grader.rb @@ -5,8 +5,8 @@ module Models class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalLabelModelGrader::Input] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalLabelModelGrader::Input] } # @!attribute labels # The labels to assign to each item in the evaluation. @@ -42,7 +42,7 @@ class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # # @param labels [Array] The labels to assign to each item in the evaluation. # @@ -58,25 +58,25 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] - required :content, union: -> { OpenAI::Models::EvalLabelModelGrader::Input::Content } + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalLabelModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::EvalLabelModelGrader::Input::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role] - required :role, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Role } + # @return [Symbol, OpenAI::EvalLabelModelGrader::Input::Role] + required :role, enum: -> { OpenAI::EvalLabelModelGrader::Input::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type, nil] - optional :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Type } + # @return [Symbol, OpenAI::EvalLabelModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::EvalLabelModelGrader::Input::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalLabelModelGrader::Input} for more details. + # {OpenAI::EvalLabelModelGrader::Input} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -84,15 +84,15 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalLabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::EvalLabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::EvalLabelModelGrader::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Models::EvalLabelModelGrader::Input#content + # @see OpenAI::EvalLabelModelGrader::Input#content module Content extend OpenAI::Internal::Type::Union @@ -100,10 +100,10 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText } + variant -> { OpenAI::EvalLabelModelGrader::Input::Content::OutputText } class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -120,24 +120,23 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText} for more - # details. + # {OpenAI::EvalLabelModelGrader::Input::Content::OutputText} for more details. # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalLabelModelGrader::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::EvalLabelModelGrader::Input#role + # @see OpenAI::EvalLabelModelGrader::Input#role module Role extend OpenAI::Internal::Type::Enum @@ -152,7 +151,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::EvalLabelModelGrader::Input#type + # @see OpenAI::EvalLabelModelGrader::Input#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index 7fa913a0..c772f07f 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -23,15 +23,15 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. # - # @return [Symbol, OpenAI::Models::EvalListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::EvalListParams::Order } + # @return [Symbol, OpenAI::EvalListParams::Order, nil] + optional :order, enum: -> { OpenAI::EvalListParams::Order } # @!attribute order_by # Evals can be ordered by creation time or last updated time. Use `created_at` for # creation time or `updated_at` for last updated time. # - # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy, nil] - optional :order_by, enum: -> { OpenAI::Models::EvalListParams::OrderBy } + # @return [Symbol, OpenAI::EvalListParams::OrderBy, nil] + optional :order_by, enum: -> { OpenAI::EvalListParams::OrderBy } # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -41,10 +41,9 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of evals to retrieve. # - # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d - # ... + # @param order [Symbol, OpenAI::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d # - # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use ... + # @param order_by [Symbol, OpenAI::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index b7bf7898..7ff9c586 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -19,7 +19,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalListResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -89,16 +89,16 @@ module DataSourceConfig # The response schema defines the shape of the data that will be: # - Used to define your testing criteria and # - What data is required when creating a run - variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -110,13 +110,13 @@ module TestingCriterion # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + variant :label_model, -> { OpenAI::EvalLabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + variant :string_check, -> { OpenAI::EvalStringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } # A PythonGrader object that runs a python script on the input. variant :python, -> { OpenAI::Models::EvalListResponse::TestingCriterion::Python } @@ -234,7 +234,7 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] required :content, union: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content } @@ -262,11 +262,11 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -278,7 +278,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } @@ -303,13 +303,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -343,7 +343,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index f6f13166..51f2d27c 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -19,7 +19,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -89,16 +89,16 @@ module DataSourceConfig # The response schema defines the shape of the data that will be: # - Used to define your testing criteria and # - What data is required when creating a run - variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -110,13 +110,13 @@ module TestingCriterion # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + variant :label_model, -> { OpenAI::EvalLabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + variant :string_check, -> { OpenAI::EvalStringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } # A PythonGrader object that runs a python script on the input. variant :python, -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python } @@ -234,7 +234,7 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] required :content, union: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content } @@ -264,11 +264,11 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -280,7 +280,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } @@ -305,13 +305,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -345,7 +345,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 0055f9bc..6bb63bf6 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -29,7 +29,7 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(schema:, metadata: nil, type: :stored_completions) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalStoredCompletionsDataSourceConfig} for more details. + # {OpenAI::EvalStoredCompletionsDataSourceConfig} for more details. # # A StoredCompletionsDataSourceConfig which specifies the metadata property of # your stored completions query. This is usually metadata like `usecase=chatbot` @@ -37,9 +37,9 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # used to defined what variables are available in your evals. `item` and `sample` # are both defined when using this data source config. # - # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. ... + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end diff --git a/lib/openai/models/eval_string_check_grader.rb b/lib/openai/models/eval_string_check_grader.rb index 8563a345..494f8199 100644 --- a/lib/openai/models/eval_string_check_grader.rb +++ b/lib/openai/models/eval_string_check_grader.rb @@ -18,8 +18,8 @@ class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel # @!attribute operation # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # - # @return [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] - required :operation, enum: -> { OpenAI::Models::EvalStringCheckGrader::Operation } + # @return [Symbol, OpenAI::EvalStringCheckGrader::Operation] + required :operation, enum: -> { OpenAI::EvalStringCheckGrader::Operation } # @!attribute reference # The reference text. This may include template strings. @@ -41,7 +41,7 @@ class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the grader. # - # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # @param operation [Symbol, OpenAI::EvalStringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # # @param reference [String] The reference text. This may include template strings. # @@ -49,7 +49,7 @@ class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # - # @see OpenAI::Models::EvalStringCheckGrader#operation + # @see OpenAI::EvalStringCheckGrader#operation module Operation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 4118b58b..765bb9c6 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -7,8 +7,8 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # - # @return [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] - required :evaluation_metric, enum: -> { OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric } + # @return [Symbol, OpenAI::EvalTextSimilarityGrader::EvaluationMetric] + required :evaluation_metric, enum: -> { OpenAI::EvalTextSimilarityGrader::EvaluationMetric } # @!attribute input # The text being graded. @@ -42,12 +42,11 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalTextSimilarityGrader} for more details. + # {OpenAI::EvalTextSimilarityGrader} for more details. # # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r - # ... + # @param evaluation_metric [Symbol, OpenAI::EvalTextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r # # @param input [String] The text being graded. # @@ -62,7 +61,7 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # - # @see OpenAI::Models::EvalTextSimilarityGrader#evaluation_metric + # @see OpenAI::EvalTextSimilarityGrader#evaluation_metric module EvaluationMetric extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb index baad5c63..2b7d40b3 100644 --- a/lib/openai/models/eval_update_params.rb +++ b/lib/openai/models/eval_update_params.rb @@ -28,7 +28,7 @@ class EvalUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalUpdateParams} for more details. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] Rename the evaluation. # diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 2bc0b524..12986ff3 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -19,7 +19,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -89,16 +89,16 @@ module DataSourceConfig # The response schema defines the shape of the data that will be: # - Used to define your testing criteria and # - What data is required when creating a run - variant :custom, -> { OpenAI::Models::EvalCustomDataSourceConfig } + variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -110,13 +110,13 @@ module TestingCriterion # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::Models::EvalLabelModelGrader } + variant :label_model, -> { OpenAI::EvalLabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::Models::EvalStringCheckGrader } + variant :string_check, -> { OpenAI::EvalStringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } + variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } # A PythonGrader object that runs a python script on the input. variant :python, -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python } @@ -234,7 +234,7 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] required :content, union: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content } @@ -262,11 +262,11 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -278,7 +278,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } @@ -303,13 +303,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -343,7 +343,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index eaa0f6e4..f31118d7 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -7,20 +7,22 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # A StoredCompletionsRunDataSource configuration describing a set of filters # - # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - required :source, union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source } + # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + required :source, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source } # @!attribute type # The type of run data source. Always `completions`. # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] - required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type } + # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] + required :type, enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type } # @!attribute input_messages # - # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] + # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, - union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages } + union: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages + } # @!attribute model # The name of the model to use for generating completions (e.g. "o3-mini"). @@ -30,50 +32,50 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute sampling_params # - # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] - optional :sampling_params, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } + # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] + optional :sampling_params, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource} for more details. + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource} for more details. # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters ... + # @param source [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters # - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. + # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. # - # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # @param input_messages [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # - # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] + # @param sampling_params [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # A StoredCompletionsRunDataSource configuration describing a set of filters # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#source + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#source module Source extend OpenAI::Internal::Type::Union discriminator :type - variant :file_content, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent } + variant :file_content, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent } - variant :file_id, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID } + variant :file_id, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID } # A StoredCompletionsRunDataSource configuration describing a set of filters variant :stored_completions, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions } + -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions } class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the jsonl file. # - # @return [Array] + # @return [Array] required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + } # @!attribute type # The type of jsonl source. Always `file_content`. @@ -82,7 +84,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. + # @param content [Array] The content of the jsonl file. # # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. @@ -166,7 +168,7 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method initialize(created_after: nil, created_before: nil, limit: nil, metadata: nil, model: nil, type: :stored_completions) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} # for more details. # # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -177,7 +179,7 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # # @param limit [Integer, nil] An optional maximum number of items to return. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String, nil] An optional model to filter by (e.g., 'gpt-4o'). # @@ -185,12 +187,12 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] + # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] end # The type of run data source. Always `completions`. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#type + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#type module Type extend OpenAI::Internal::Type::Enum @@ -200,26 +202,27 @@ module Type # @return [Array] end - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union discriminator :type - variant :template, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template } + variant :template, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template } variant :item_reference, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference } + -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference } class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] + } # @!attribute type # The type of input messages. Always `template`. @@ -229,11 +232,10 @@ class Template < OpenAI::Internal::Type::BaseModel # @!method initialize(template:, type: :template) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} - # for more details. + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} for + # more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -252,7 +254,7 @@ module Template # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, -> { OpenAI::Models::Responses::EasyInputMessage } + variant :message, -> { OpenAI::Responses::EasyInputMessage } # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -260,34 +262,42 @@ module Template # `assistant` role are presumed to have been generated by the model in previous # interactions. variant :message, - -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message } + -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + } class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] required :content, - union: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content } + union: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content + } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] required :role, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role } + enum: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] + # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] optional :type, - enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type } + enum: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} # for more details. # # A message input to the model with a role indicating instruction following @@ -296,15 +306,15 @@ class Message < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content module Content extend OpenAI::Internal::Type::Union @@ -312,10 +322,12 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText } + variant -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + } class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -332,24 +344,24 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} # for more details. # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -364,7 +376,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type module Type extend OpenAI::Internal::Type::Enum @@ -376,7 +388,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] + # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] end end @@ -400,10 +412,10 @@ class ItemReference < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] + # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] end - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index ae1cedae..514f2494 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -6,8 +6,8 @@ module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] - required :source, union: -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source } + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + required :source, union: -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source } # @!attribute type # The type of data source. Always `jsonl`. @@ -19,27 +19,29 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # A JsonlRunDataSource object with that specifies a JSONL file that matches the # eval # - # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @param source [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] # # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. - # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source + # @see OpenAI::Evals::CreateEvalJSONLRunDataSource#source module Source extend OpenAI::Internal::Type::Union discriminator :type - variant :file_content, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent } + variant :file_content, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent } - variant :file_id, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID } + variant :file_id, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID } class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the jsonl file. # - # @return [Array] + # @return [Array] required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + } # @!attribute type # The type of jsonl source. Always `file_content`. @@ -48,7 +50,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. + # @param content [Array] The content of the jsonl file. # # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. @@ -89,7 +91,7 @@ class FileID < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index da5f330b..1a5d2402 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -20,14 +20,14 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Models::Evals::EvalAPIError] - required :error, -> { OpenAI::Models::Evals::EvalAPIError } + # @return [OpenAI::Evals::EvalAPIError] + required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id # The identifier of the associated evaluation. @@ -106,13 +106,13 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] Information about the run's data source. # - # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String] The model that is evaluated, if applicable. # @@ -139,10 +139,10 @@ module DataSource discriminator :type # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval - variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + variant :jsonl, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource } # A CompletionsRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions } @@ -185,7 +185,7 @@ class Completions < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. # # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] # @@ -327,8 +327,8 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature # Sampling temperature. This is a query parameter used to select responses. @@ -356,28 +356,20 @@ class Responses < OpenAI::Internal::Type::BaseModel # A EvalResponsesSource object describing a run data source configuration. # # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # ... # # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # ... # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # ... # # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # ... # # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # ... # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # ... # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -429,7 +421,6 @@ class Template < OpenAI::Internal::Type::BaseModel # for more details. # # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -473,7 +464,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } @@ -503,11 +494,11 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -519,7 +510,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } @@ -544,13 +535,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -648,7 +639,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 73ceb87e..46e49a3d 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -11,8 +11,8 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Details about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] - required :data_source, union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource } + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] + required :data_source, union: -> { OpenAI::Evals::RunCreateParams::DataSource } # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -35,9 +35,9 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the run. # @@ -48,34 +48,40 @@ module DataSource extend OpenAI::Internal::Type::Union # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval - variant -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + variant -> { OpenAI::Evals::CreateEvalJSONLRunDataSource } # A CompletionsRunDataSource object describing a model sampling configuration. - variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource } class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # A EvalResponsesSource object describing a run data source configuration. # - # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] required :source, - union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source } + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source + } # @!attribute type # The type of run data source. Always `completions`. # - # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] required :type, - enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type } + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type + } # @!attribute input_messages # - # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] + # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, - union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages } + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages + } # @!attribute model # The name of the model to use for generating completions (e.g. "o3-mini"). @@ -85,56 +91,62 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute sampling_params # - # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] + # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] optional :sampling_params, - -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams } + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} # for more details. # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # @param source [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. # - # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `completions`. + # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `completions`. # - # @param input_messages [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + # @param input_messages [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # - # @param sampling_params [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] + # @param sampling_params [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] # A EvalResponsesSource object describing a run data source configuration. # - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source module Source extend OpenAI::Internal::Type::Union discriminator :type variant :file_content, - -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent } + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + } variant :file_id, - -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID } + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + } # A EvalResponsesSource object describing a run data source configuration. variant :responses, - -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses } + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + } class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the jsonl file. # - # @return [Array] + # @return [Array] required :content, - -> do - OpenAI::Internal::Type::ArrayOf[ - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content - ] - end + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + } # @!attribute type # The type of jsonl source. Always `file_content`. @@ -143,7 +155,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. + # @param content [Array] The content of the jsonl file. # # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. @@ -243,8 +255,8 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature # Sampling temperature. This is a query parameter used to select responses. @@ -266,34 +278,26 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} # for more details. # # A EvalResponsesSource object describing a run data source configuration. # # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # ... # # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # ... # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # ... # # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # ... # # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # ... # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # ... # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -305,12 +309,12 @@ class Responses < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] + # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] end # The type of run data source. Always `completions`. # - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type module Type extend OpenAI::Internal::Type::Enum @@ -320,28 +324,32 @@ module Type # @return [Array] end - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union discriminator :type variant :template, - -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template } + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + } variant :item_reference, - -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference } + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + } class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> do OpenAI::Internal::Type::ArrayOf[ - union: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template + union: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template ] end @@ -353,11 +361,10 @@ class Template < OpenAI::Internal::Type::BaseModel # @!method initialize(template:, type: :template) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} # for more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -369,14 +376,18 @@ class Template < OpenAI::Internal::Type::BaseModel module Template extend OpenAI::Internal::Type::Union - variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage } + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + } # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem } + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + } class ChatMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -401,28 +412,34 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, - union: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content } + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content + } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] + # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] required :role, - enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role } + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role + } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] + # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] optional :type, - enum: -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type } + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type + } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} # for more details. # # A message input to the model with a role indicating instruction following @@ -431,15 +448,15 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content module Content extend OpenAI::Internal::Type::Union @@ -447,10 +464,12 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText } + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + } class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -467,24 +486,24 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} # for more details. # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role module Role extend OpenAI::Internal::Type::Enum @@ -499,7 +518,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type module Type extend OpenAI::Internal::Type::Enum @@ -511,7 +530,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] + # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] end end @@ -535,10 +554,10 @@ class ItemReference < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] end - # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. @@ -576,7 +595,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 2326f6f0..d8870c35 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -20,14 +20,14 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Models::Evals::EvalAPIError] - required :error, -> { OpenAI::Models::Evals::EvalAPIError } + # @return [OpenAI::Evals::EvalAPIError] + required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id # The identifier of the associated evaluation. @@ -106,13 +106,13 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] Information about the run's data source. # - # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String] The model that is evaluated, if applicable. # @@ -139,10 +139,10 @@ module DataSource discriminator :type # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval - variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + variant :jsonl, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource } # A CompletionsRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions } @@ -185,7 +185,7 @@ class Completions < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. # # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] # @@ -327,8 +327,8 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature # Sampling temperature. This is a query parameter used to select responses. @@ -356,28 +356,20 @@ class Responses < OpenAI::Internal::Type::BaseModel # A EvalResponsesSource object describing a run data source configuration. # # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # ... # # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # ... # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # ... # # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # ... # # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # ... # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # ... # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -429,7 +421,6 @@ class Template < OpenAI::Internal::Type::BaseModel # for more details. # # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -473,7 +464,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } @@ -503,11 +494,11 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -519,7 +510,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } @@ -544,13 +535,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -648,7 +639,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index d828e118..bfb9d83f 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -24,15 +24,15 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Evals::RunListParams::Order } + # @return [Symbol, OpenAI::Evals::RunListParams::Order, nil] + optional :order, enum: -> { OpenAI::Evals::RunListParams::Order } # @!attribute status # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # | `canceled`. # - # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] - optional :status, enum: -> { OpenAI::Models::Evals::RunListParams::Status } + # @return [Symbol, OpenAI::Evals::RunListParams::Status, nil] + optional :status, enum: -> { OpenAI::Evals::RunListParams::Status } # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -42,11 +42,9 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of runs to retrieve. # - # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de - # ... + # @param order [Symbol, OpenAI::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de # - # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` - # ... + # @param status [Symbol, OpenAI::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 3e2e0deb..6f7a7637 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -20,14 +20,14 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Models::Evals::EvalAPIError] - required :error, -> { OpenAI::Models::Evals::EvalAPIError } + # @return [OpenAI::Evals::EvalAPIError] + required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id # The identifier of the associated evaluation. @@ -106,13 +106,13 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] Information about the run's data source. # - # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String] The model that is evaluated, if applicable. # @@ -139,10 +139,10 @@ module DataSource discriminator :type # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval - variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + variant :jsonl, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource } # A CompletionsRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions } @@ -185,7 +185,7 @@ class Completions < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. # # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] # @@ -327,8 +327,8 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature # Sampling temperature. This is a query parameter used to select responses. @@ -356,28 +356,20 @@ class Responses < OpenAI::Internal::Type::BaseModel # A EvalResponsesSource object describing a run data source configuration. # # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # ... # # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # ... # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # ... # # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # ... # # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # ... # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # ... # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -425,7 +417,6 @@ class Template < OpenAI::Internal::Type::BaseModel # for more details. # # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -469,7 +460,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } @@ -499,11 +490,11 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -515,7 +506,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } @@ -540,13 +531,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -644,7 +635,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 1118a6ba..84c210e8 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -20,14 +20,14 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Models::Evals::EvalAPIError] - required :error, -> { OpenAI::Models::Evals::EvalAPIError } + # @return [OpenAI::Evals::EvalAPIError] + required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id # The identifier of the associated evaluation. @@ -106,13 +106,13 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] Information about the run's data source. # - # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String] The model that is evaluated, if applicable. # @@ -139,10 +139,10 @@ module DataSource discriminator :type # A JsonlRunDataSource object with that specifies a JSONL file that matches the eval - variant :jsonl, -> { OpenAI::Models::Evals::CreateEvalJSONLRunDataSource } + variant :jsonl, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource } # A CompletionsRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } + variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions } @@ -186,7 +186,7 @@ class Completions < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. ... + # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. # # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] # @@ -329,8 +329,8 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature # Sampling temperature. This is a query parameter used to select responses. @@ -358,28 +358,20 @@ class Responses < OpenAI::Internal::Type::BaseModel # A EvalResponsesSource object describing a run data source configuration. # # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # ... # # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # ... # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # ... # # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # ... # # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # ... # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # ... # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -431,7 +423,6 @@ class Template < OpenAI::Internal::Type::BaseModel # for more details. # # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # ... # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -475,7 +466,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } @@ -505,11 +496,11 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. ... + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # @@ -521,7 +512,7 @@ module Content variant String # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } @@ -546,13 +537,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel # # A text output from the model. # - # @param text [String] The text output from the model. ... + # @param text [String] The text output from the model. # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. ... + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -650,7 +641,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index 6eadd7fb..0d0e6406 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -30,15 +30,15 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Order } + # @return [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order, nil] + optional :order, enum: -> { OpenAI::Evals::Runs::OutputItemListParams::Order } # @!attribute status # Filter output items by status. Use `failed` to filter by failed output items or # `pass` to filter by passed output items. # - # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status, nil] - optional :status, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Status } + # @return [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status, nil] + optional :status, enum: -> { OpenAI::Evals::Runs::OutputItemListParams::Status } # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -50,10 +50,9 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of output items to retrieve. # - # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc - # ... + # @param order [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc # - # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output ... + # @param status [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index e5c369c9..85505173 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -95,8 +95,8 @@ class Sample < OpenAI::Internal::Type::BaseModel # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Models::Evals::EvalAPIError] - required :error, -> { OpenAI::Models::Evals::EvalAPIError } + # @return [OpenAI::Evals::EvalAPIError] + required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute finish_reason # The reason why the sample generation was finished. @@ -160,7 +160,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param finish_reason [String] The reason why the sample generation was finished. # diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index 70b4c3ab..5a75e4a0 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -95,8 +95,8 @@ class Sample < OpenAI::Internal::Type::BaseModel # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Models::Evals::EvalAPIError] - required :error, -> { OpenAI::Models::Evals::EvalAPIError } + # @return [OpenAI::Evals::EvalAPIError] + required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute finish_reason # The reason why the sample generation was finished. @@ -161,7 +161,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. ... + # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param finish_reason [String] The reason why the sample generation was finished. # diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index a7354de0..5ee317be 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -8,13 +8,13 @@ module FileChunkingStrategy discriminator :type - variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObject } + variant :static, -> { OpenAI::StaticFileChunkingStrategyObject } # This is returned when the chunking strategy is unknown. Typically, this is because the file was indexed before the `chunking_strategy` concept was introduced in the API. - variant :other, -> { OpenAI::Models::OtherFileChunkingStrategyObject } + variant :other, -> { OpenAI::OtherFileChunkingStrategyObject } # @!method self.variants - # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] + # @return [Array(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject)] end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 7f5a2487..6a46bdfc 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -10,13 +10,13 @@ module FileChunkingStrategyParam discriminator :type # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. - variant :auto, -> { OpenAI::Models::AutoFileChunkingStrategyParam } + variant :auto, -> { OpenAI::AutoFileChunkingStrategyParam } # Customize your own chunking strategy by setting chunk size and chunk overlap. - variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObjectParam } + variant :static, -> { OpenAI::StaticFileChunkingStrategyObjectParam } # @!method self.variants - # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] + # @return [Array(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam)] end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index d5062f4d..c88d1052 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -19,17 +19,16 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets # - # @return [Symbol, OpenAI::Models::FilePurpose] - required :purpose, enum: -> { OpenAI::Models::FilePurpose } + # @return [Symbol, OpenAI::FilePurpose] + required :purpose, enum: -> { OpenAI::FilePurpose } # @!method initialize(file:, purpose:, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::FileCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. ... + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. # - # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A - # ... + # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 423236ed..82ca75f1 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -27,8 +27,8 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::FileListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::FileListParams::Order } + # @return [Symbol, OpenAI::FileListParams::Order, nil] + optional :order, enum: -> { OpenAI::FileListParams::Order } # @!attribute purpose # Only return files with the given purpose. @@ -41,13 +41,10 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::FileListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param purpose [String] Only return files with the given purpose. # diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index 942e2361..ec5c9839 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -39,8 +39,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. # - # @return [Symbol, OpenAI::Models::FileObject::Purpose] - required :purpose, enum: -> { OpenAI::Models::FileObject::Purpose } + # @return [Symbol, OpenAI::FileObject::Purpose] + required :purpose, enum: -> { OpenAI::FileObject::Purpose } # @!attribute status # @deprecated @@ -48,8 +48,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # - # @return [Symbol, OpenAI::Models::FileObject::Status] - required :status, enum: -> { OpenAI::Models::FileObject::Status } + # @return [Symbol, OpenAI::FileObject::Status] + required :status, enum: -> { OpenAI::FileObject::Status } # @!attribute expires_at # The Unix timestamp (in seconds) for when the file will expire. @@ -67,8 +67,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel optional :status_details, String # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::FileObject} for more details. + # Some parameter documentations has been truncated, see {OpenAI::FileObject} for + # more details. # # The `File` object represents a document that has been uploaded to OpenAI. # @@ -80,16 +80,13 @@ class FileObject < OpenAI::Internal::Type::BaseModel # # @param filename [String] The name of the file. # - # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants - # ... + # @param purpose [Symbol, OpenAI::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants # - # @param status [Symbol, OpenAI::Models::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro - # ... + # @param status [Symbol, OpenAI::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro # # @param expires_at [Integer] The Unix timestamp (in seconds) for when the file will expire. # # @param status_details [String] Deprecated. For details on why a fine-tuning training file failed validation, se - # ... # # @param object [Symbol, :file] The object type, which is always `file`. @@ -97,7 +94,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. # - # @see OpenAI::Models::FileObject#purpose + # @see OpenAI::FileObject#purpose module Purpose extend OpenAI::Internal::Type::Enum @@ -118,7 +115,7 @@ module Purpose # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # - # @see OpenAI::Models::FileObject#status + # @see OpenAI::FileObject#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index cddf06b9..9bdaffeb 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -24,8 +24,8 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # @!attribute order # The order in which to retrieve permissions. # - # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order } + # @return [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] + optional :order, enum: -> { OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order } # @!attribute project_id # The ID of the project to get permissions for. @@ -38,7 +38,7 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of permissions to retrieve. # - # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # @param order [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. # # @param project_id [String] The ID of the project to get permissions for. # diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 36cc8d69..aafd3b38 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -21,8 +21,8 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] - required :error, -> { OpenAI::Models::FineTuning::FineTuningJob::Error }, nil?: true + # @return [OpenAI::FineTuning::FineTuningJob::Error, nil] + required :error, -> { OpenAI::FineTuning::FineTuningJob::Error }, nil?: true # @!attribute fine_tuned_model # The name of the fine-tuned model that is being created. The value will be null @@ -42,8 +42,8 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] - required :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters } + # @return [OpenAI::FineTuning::FineTuningJob::Hyperparameters] + required :hyperparameters, -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters } # @!attribute model # The base model that is being fine-tuned. @@ -81,8 +81,8 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # - # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] - required :status, enum: -> { OpenAI::Models::FineTuning::FineTuningJob::Status } + # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Status] + required :status, enum: -> { OpenAI::FineTuning::FineTuningJob::Status } # @!attribute trained_tokens # The total number of billable tokens processed by this fine-tuning job. The value @@ -116,9 +116,11 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute integrations # A list of integrations to enable for this fine-tuning job. # - # @return [Array, nil] + # @return [Array, nil] optional :integrations, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject] + }, nil?: true # @!attribute metadata @@ -135,12 +137,12 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute method_ # The method used for fine-tuning. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Method, nil] - optional :method_, -> { OpenAI::Models::FineTuning::FineTuningJob::Method }, api_name: :method + # @return [OpenAI::FineTuning::FineTuningJob::Method, nil] + optional :method_, -> { OpenAI::FineTuning::FineTuningJob::Method }, api_name: :method # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJob} for more details. + # {OpenAI::FineTuning::FineTuningJob} for more details. # # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. @@ -149,51 +151,41 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. # - # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t - # ... + # @param error [OpenAI::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t # # @param fine_tuned_model [String, nil] The name of the fine-tuned model that is being created. The value will be null i - # ... # # @param finished_at [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job was finished. The v - # ... # - # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return - # ... + # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return # # @param model [String] The base model that is being fine-tuned. # # @param organization_id [String] The organization that owns the fine-tuning job. # # @param result_files [Array] The compiled results file ID(s) for the fine-tuning job. You can retrieve the re - # ... # # @param seed [Integer] The seed used for the fine-tuning job. # - # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files - # ... + # @param status [Symbol, OpenAI::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files # # @param trained_tokens [Integer, nil] The total number of billable tokens processed by this fine-tuning job. The value - # ... # # @param training_file [String] The file ID used for training. You can retrieve the training data with the [File - # ... # # @param validation_file [String, nil] The file ID used for validation. You can retrieve the validation results with th - # ... # # @param estimated_finish [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job is estimated to fin - # ... # - # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] The method used for fine-tuning. + # @param method_ [OpenAI::FineTuning::FineTuningJob::Method] The method used for fine-tuning. # # @param object [Symbol, :"fine_tuning.job"] The object type, which is always "fine_tuning.job". - # @see OpenAI::Models::FineTuning::FineTuningJob#error + # @see OpenAI::FineTuning::FineTuningJob#error class Error < OpenAI::Internal::Type::BaseModel # @!attribute code # A machine-readable error code. @@ -216,7 +208,7 @@ class Error < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:, param:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJob::Error} for more details. + # {OpenAI::FineTuning::FineTuningJob::Error} for more details. # # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. @@ -226,17 +218,16 @@ class Error < OpenAI::Internal::Type::BaseModel # @param message [String] A human-readable error message. # # @param param [String, nil] The parameter that was invalid, usually `training_file` or `validation_file`. Th - # ... end - # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters + # @see OpenAI::FineTuning::FineTuningJob#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] - optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::BatchSize } + optional :batch_size, union: -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize } # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -244,35 +235,32 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier } + union: -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier } # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # # @return [Symbol, :auto, Integer, nil] - optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::NEpochs } + optional :n_epochs, union: -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters} for more details. + # {OpenAI::FineTuning::FineTuningJob::Hyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # ... # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # ... # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle - # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#batch_size + # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -287,7 +275,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier + # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -302,7 +290,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#n_epochs + # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -318,7 +306,7 @@ module NEpochs # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # - # @see OpenAI::Models::FineTuning::FineTuningJob#status + # @see OpenAI::FineTuning::FineTuningJob#status module Status extend OpenAI::Internal::Type::Enum @@ -333,49 +321,49 @@ module Status # @return [Array] end - # @see OpenAI::Models::FineTuning::FineTuningJob#method_ + # @see OpenAI::FineTuning::FineTuningJob#method_ class Method < OpenAI::Internal::Type::BaseModel # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, nil] - optional :dpo, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo } + # @return [OpenAI::FineTuning::FineTuningJob::Method::Dpo, nil] + optional :dpo, -> { OpenAI::FineTuning::FineTuningJob::Method::Dpo } # @!attribute supervised # Configuration for the supervised fine-tuning method. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, nil] - optional :supervised, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised } + # @return [OpenAI::FineTuning::FineTuningJob::Method::Supervised, nil] + optional :supervised, -> { OpenAI::FineTuning::FineTuningJob::Method::Supervised } # @!attribute type # The type of method. Is either `supervised` or `dpo`. # - # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type, nil] - optional :type, enum: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Type } + # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type, nil] + optional :type, enum: -> { OpenAI::FineTuning::FineTuningJob::Method::Type } # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # - # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] Configuration for the DPO fine-tuning method. + # @param dpo [OpenAI::FineTuning::FineTuningJob::Method::Dpo] Configuration for the DPO fine-tuning method. # - # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] Configuration for the supervised fine-tuning method. + # @param supervised [OpenAI::FineTuning::FineTuningJob::Method::Supervised] Configuration for the supervised fine-tuning method. # - # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised` or `dpo`. + # @param type [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised` or `dpo`. - # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo + # @see OpenAI::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, nil] - optional :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } + # @return [OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, nil] + optional :hyperparameters, -> { OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters + # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model @@ -383,7 +371,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :batch_size, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize + } # @!attribute beta # The beta value for the DPO method. A higher beta value will increase the weight @@ -391,7 +381,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :beta, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta + } # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -399,7 +391,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier + } # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle @@ -407,31 +401,29 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs + } # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters} for - # more details. + # {OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters} for more + # details. # # The hyperparameters used for the fine-tuning job. # # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # ... # # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight - # ... # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # ... # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#batch_size + # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -446,7 +438,7 @@ module BatchSize # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#beta + # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#beta module Beta extend OpenAI::Internal::Type::Union @@ -461,7 +453,7 @@ module Beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#learning_rate_multiplier + # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -476,7 +468,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#n_epochs + # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -490,21 +482,23 @@ module NEpochs end end - # @see OpenAI::Models::FineTuning::FineTuningJob::Method#supervised + # @see OpenAI::FineTuning::FineTuningJob::Method#supervised class Supervised < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # - # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, nil] + # @return [OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, nil] optional :hyperparameters, - -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters } + -> { + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + } # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters + # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model @@ -512,7 +506,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :batch_size, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize + } # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -520,7 +516,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier + } # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle @@ -528,28 +526,27 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, - union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs } + union: -> { + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs + } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters} - # for more details. + # {OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters} for + # more details. # # The hyperparameters used for the fine-tuning job. # # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # ... # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # ... # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#batch_size + # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -564,7 +561,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#learning_rate_multiplier + # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -579,7 +576,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#n_epochs + # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -595,7 +592,7 @@ module NEpochs # The type of method. Is either `supervised` or `dpo`. # - # @see OpenAI::Models::FineTuning::FineTuningJob::Method#type + # @see OpenAI::FineTuning::FineTuningJob::Method#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 5d1a7f6b..743df8cc 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -20,8 +20,8 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute level # The log level of the event. # - # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] - required :level, enum: -> { OpenAI::Models::FineTuning::FineTuningJobEvent::Level } + # @return [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level] + required :level, enum: -> { OpenAI::FineTuning::FineTuningJobEvent::Level } # @!attribute message # The message of the event. @@ -44,8 +44,8 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of event. # - # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type, nil] - optional :type, enum: -> { OpenAI::Models::FineTuning::FineTuningJobEvent::Type } + # @return [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type, nil] + optional :type, enum: -> { OpenAI::FineTuning::FineTuningJobEvent::Type } # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") # Fine-tuning job event object @@ -54,19 +54,19 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. # - # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] The log level of the event. + # @param level [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level] The log level of the event. # # @param message [String] The message of the event. # # @param data [Object] The data associated with the event. # - # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] The type of event. + # @param type [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type] The type of event. # # @param object [Symbol, :"fine_tuning.job.event"] The object type, which is always "fine_tuning.job.event". # The log level of the event. # - # @see OpenAI::Models::FineTuning::FineTuningJobEvent#level + # @see OpenAI::FineTuning::FineTuningJobEvent#level module Level extend OpenAI::Internal::Type::Enum @@ -80,7 +80,7 @@ module Level # The type of event. # - # @see OpenAI::Models::FineTuning::FineTuningJobEvent#type + # @see OpenAI::FineTuning::FineTuningJobEvent#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index c32fef0e..07b43842 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -35,23 +35,20 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @!method initialize(project:, entity: nil, name: nil, tags: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegration} for more details. + # {OpenAI::FineTuning::FineTuningJobWandbIntegration} for more details. # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @param project [String] The name of the project that the new run will be created under. ... + # @param project [String] The name of the project that the new run will be created under. # # @param entity [String, nil] The entity to use for the run. This allows you to set the team or username of th - # ... # # @param name [String, nil] A display name to set for the run. If not set, we will use the Job ID as the nam - # ... # # @param tags [Array] A list of tags to be attached to the newly created run. These tags are passed th - # ... end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 147302a1..4b691bd6 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -16,16 +16,14 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @return [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] - required :wandb, -> { OpenAI::Models::FineTuning::FineTuningJobWandbIntegration } + # @return [OpenAI::FineTuning::FineTuningJobWandbIntegration] + required :wandb, -> { OpenAI::FineTuning::FineTuningJobWandbIntegration } # @!method initialize(wandb:, type: :wandb) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject} for more - # details. + # {OpenAI::FineTuning::FineTuningJobWandbIntegrationObject} for more details. # - # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie - # ... + # @param wandb [OpenAI::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie # # @param type [Symbol, :wandb] The type of the integration being enabled for the fine-tuning job end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 4d99beea..c86b0f69 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -12,8 +12,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # - # @return [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] - required :model, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Model } + # @return [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] + required :model, union: -> { OpenAI::FineTuning::JobCreateParams::Model } # @!attribute training_file # The ID of an uploaded file that contains training data. @@ -43,15 +43,15 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, nil] - optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters } + # @return [OpenAI::FineTuning::JobCreateParams::Hyperparameters, nil] + optional :hyperparameters, -> { OpenAI::FineTuning::JobCreateParams::Hyperparameters } # @!attribute integrations # A list of integrations to enable for your fine-tuning job. # - # @return [Array, nil] + # @return [Array, nil] optional :integrations, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::JobCreateParams::Integration] }, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::JobCreateParams::Integration] }, nil?: true # @!attribute metadata @@ -68,8 +68,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute method_ # The method used for fine-tuning. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Method, nil] - optional :method_, -> { OpenAI::Models::FineTuning::JobCreateParams::Method }, api_name: :method + # @return [OpenAI::FineTuning::JobCreateParams::Method, nil] + optional :method_, -> { OpenAI::FineTuning::JobCreateParams::Method }, api_name: :method # @!attribute seed # The seed controls the reproducibility of the job. Passing in the same seed and @@ -110,25 +110,23 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::FineTuning::JobCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the ... + # @param model [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the # - # @param training_file [String] The ID of an uploaded file that contains training data. ... + # @param training_file [String] The ID of an uploaded file that contains training data. # - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. ... + # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. # - # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # @param method_ [OpenAI::FineTuning::JobCreateParams::Method] The method used for fine-tuning. # # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j - # ... # # @param suffix [String, nil] A string of up to 64 characters that will be added to your fine-tuned model name - # ... # - # @param validation_file [String, nil] The ID of an uploaded file that contains validation data. ... + # @param validation_file [String, nil] The ID of an uploaded file that contains validation data. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -139,13 +137,13 @@ module Model variant String - variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::BABBAGE_002 } + variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::BABBAGE_002 } - variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::DAVINCI_002 } + variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::DAVINCI_002 } - variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } + variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } - variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI } + variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::GPT_4O_MINI } # @!method self.variants # @return [Array(String, Symbol)] @@ -167,8 +165,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] - optional :batch_size, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::BatchSize } + optional :batch_size, union: -> { OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize } # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -176,35 +173,32 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier } + union: -> { OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier } # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # # @return [Symbol, :auto, Integer, nil] - optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::NEpochs } + optional :n_epochs, union: -> { OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters} for more details. + # {OpenAI::FineTuning::JobCreateParams::Hyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # ... # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # ... # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle - # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#batch_size + # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -219,7 +213,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier + # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -234,7 +228,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#n_epochs + # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -261,20 +255,18 @@ class Integration < OpenAI::Internal::Type::BaseModel # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] - required :wandb, -> { OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb } + # @return [OpenAI::FineTuning::JobCreateParams::Integration::Wandb] + required :wandb, -> { OpenAI::FineTuning::JobCreateParams::Integration::Wandb } # @!method initialize(wandb:, type: :wandb) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::JobCreateParams::Integration} for more details. + # {OpenAI::FineTuning::JobCreateParams::Integration} for more details. # - # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie - # ... + # @param wandb [OpenAI::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie # # @param type [Symbol, :wandb] The type of integration to enable. Currently, only "wandb" (Weights and Biases) - # ... - # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb + # @see OpenAI::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::Internal::Type::BaseModel # @!attribute project # The name of the project that the new run will be created under. @@ -307,24 +299,20 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @!method initialize(project:, entity: nil, name: nil, tags: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb} for more - # details. + # {OpenAI::FineTuning::JobCreateParams::Integration::Wandb} for more details. # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @param project [String] The name of the project that the new run will be created under. ... + # @param project [String] The name of the project that the new run will be created under. # # @param entity [String, nil] The entity to use for the run. This allows you to set the team or username of th - # ... # # @param name [String, nil] A display name to set for the run. If not set, we will use the Job ID as the nam - # ... # # @param tags [Array] A list of tags to be attached to the newly created run. These tags are passed th - # ... end end @@ -332,45 +320,47 @@ class Method < OpenAI::Internal::Type::BaseModel # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, nil] - optional :dpo, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo } + # @return [OpenAI::FineTuning::JobCreateParams::Method::Dpo, nil] + optional :dpo, -> { OpenAI::FineTuning::JobCreateParams::Method::Dpo } # @!attribute supervised # Configuration for the supervised fine-tuning method. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, nil] - optional :supervised, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised } + # @return [OpenAI::FineTuning::JobCreateParams::Method::Supervised, nil] + optional :supervised, -> { OpenAI::FineTuning::JobCreateParams::Method::Supervised } # @!attribute type # The type of method. Is either `supervised` or `dpo`. # - # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type, nil] - optional :type, enum: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Type } + # @return [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type, nil] + optional :type, enum: -> { OpenAI::FineTuning::JobCreateParams::Method::Type } # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # - # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] Configuration for the DPO fine-tuning method. + # @param dpo [OpenAI::FineTuning::JobCreateParams::Method::Dpo] Configuration for the DPO fine-tuning method. # - # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] Configuration for the supervised fine-tuning method. + # @param supervised [OpenAI::FineTuning::JobCreateParams::Method::Supervised] Configuration for the supervised fine-tuning method. # - # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised` or `dpo`. + # @param type [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised` or `dpo`. - # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo + # @see OpenAI::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, nil] + # @return [OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, nil] optional :hyperparameters, - -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters } + -> { + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + } # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters + # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model @@ -378,7 +368,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :batch_size, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize + } # @!attribute beta # The beta value for the DPO method. A higher beta value will increase the weight @@ -386,7 +378,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :beta, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta + } # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -394,7 +388,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier + } # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle @@ -402,31 +398,29 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs + } # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters} for - # more details. + # {OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters} for more + # details. # # The hyperparameters used for the fine-tuning job. # # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # ... # # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight - # ... # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # ... # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#batch_size + # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -441,7 +435,7 @@ module BatchSize # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#beta + # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#beta module Beta extend OpenAI::Internal::Type::Union @@ -456,7 +450,7 @@ module Beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#learning_rate_multiplier + # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -471,7 +465,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#n_epochs + # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -485,21 +479,23 @@ module NEpochs end end - # @see OpenAI::Models::FineTuning::JobCreateParams::Method#supervised + # @see OpenAI::FineTuning::JobCreateParams::Method#supervised class Supervised < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # - # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, nil] + # @return [OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, nil] optional :hyperparameters, - -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters } + -> { + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + } # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters + # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model @@ -507,7 +503,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :batch_size, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize + } # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -515,7 +513,9 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Float, nil] optional :learning_rate_multiplier, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier + } # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle @@ -523,28 +523,27 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, - union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs } + union: -> { + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs + } # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters} - # for more details. + # {OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters} for + # more details. # # The hyperparameters used for the fine-tuning job. # # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # ... # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # ... # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - # ... # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#batch_size + # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -559,7 +558,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#learning_rate_multiplier + # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -574,7 +573,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#n_epochs + # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -590,7 +589,7 @@ module NEpochs # The type of method. Is either `supervised` or `dpo`. # - # @see OpenAI::Models::FineTuning::JobCreateParams::Method#type + # @see OpenAI::FineTuning::JobCreateParams::Method#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index da7349d7..30c6f937 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -36,7 +36,6 @@ class JobListParams < OpenAI::Internal::Type::BaseModel # @param limit [Integer] Number of fine-tuning jobs to retrieve. # # @param metadata [Hash{Symbol=>String}, nil] Optional metadata filter. To filter, use the syntax `metadata[k]=v`. Alternative - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 230c5e4d..f0a8ff33 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -33,8 +33,8 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @!attribute metrics # Metrics at the step number during the fine-tuning job. # - # @return [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] - required :metrics, -> { OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics } + # @return [OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] + required :metrics, -> { OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics } # @!attribute object # The object type, which is always "fine_tuning.job.checkpoint". @@ -60,13 +60,13 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # # @param fine_tuning_job_id [String] The name of the fine-tuning job that this checkpoint was created from. # - # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. + # @param metrics [OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. # # @param step_number [Integer] The step number that the checkpoint was created at. # # @param object [Symbol, :"fine_tuning.job.checkpoint"] The object type, which is always "fine_tuning.job.checkpoint". - # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics + # @see OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel # @!attribute full_valid_loss # diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 26ca81c0..10fe7935 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -41,19 +41,15 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, parameters: nil, strict: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::FunctionDefinition} for more details. + # {OpenAI::FunctionDefinition} for more details. # # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc - # ... # # @param description [String] A description of what the function does, used by the model to choose when and ho - # ... # # @param parameters [Hash{Symbol=>Object}] The parameters the functions accepts, described as a JSON Schema object. See the - # ... # # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the function call. If - # ... end end end diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index b392c0c4..e10a18f2 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -26,18 +26,16 @@ class Image < OpenAI::Internal::Type::BaseModel optional :url, String # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) - # Some parameter documentations has been truncated, see {OpenAI::Models::Image} - # for more details. + # Some parameter documentations has been truncated, see {OpenAI::Image} for more + # details. # # Represents the content or the URL of an image generated by the OpenAI API. # # @param b64_json [String] The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, - # ... # # @param revised_prompt [String] For `dall-e-3` only, the revised prompt that was used to generate the image. # # @param url [String] When using `dall-e-2` or `dall-e-3`, the URL of the generated image if `response - # ... end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 6dc339d5..66ad7ea0 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -18,8 +18,8 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # The model to use for image generation. Only `dall-e-2` is supported at this # time. # - # @return [String, Symbol, OpenAI::Models::ImageModel, nil] - optional :model, union: -> { OpenAI::Models::ImageCreateVariationParams::Model }, nil?: true + # @return [String, Symbol, OpenAI::ImageModel, nil] + optional :model, union: -> { OpenAI::ImageCreateVariationParams::Model }, nil?: true # @!attribute n # The number of images to generate. Must be between 1 and 10. @@ -32,17 +32,15 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. # - # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] - optional :response_format, - enum: -> { OpenAI::Models::ImageCreateVariationParams::ResponseFormat }, - nil?: true + # @return [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::ImageCreateVariationParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. # - # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] - optional :size, enum: -> { OpenAI::Models::ImageCreateVariationParams::Size }, nil?: true + # @return [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] + optional :size, enum: -> { OpenAI::ImageCreateVariationParams::Size }, nil?: true # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -57,21 +55,16 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::ImageCreateVariationParams} for more details. # # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le - # ... # - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time - # ... + # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` - # ... + # @param response_format [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x - # ... + # @param size [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -83,10 +76,10 @@ module Model variant String # The model to use for image generation. Only `dall-e-2` is supported at this time. - variant enum: -> { OpenAI::Models::ImageModel } + variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # @return [Array(String, Symbol, OpenAI::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index d850e2a5..c38c7821 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -17,7 +17,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # file less than 4MB. # # @return [Pathname, StringIO, IO, OpenAI::FilePart, Array] - required :image, union: -> { OpenAI::Models::ImageEditParams::Image } + required :image, union: -> { OpenAI::ImageEditParams::Image } # @!attribute prompt # A text description of the desired image(s). The maximum length is 1000 @@ -35,8 +35,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. # - # @return [Symbol, OpenAI::Models::ImageEditParams::Background, nil] - optional :background, enum: -> { OpenAI::Models::ImageEditParams::Background }, nil?: true + # @return [Symbol, OpenAI::ImageEditParams::Background, nil] + optional :background, enum: -> { OpenAI::ImageEditParams::Background }, nil?: true # @!attribute mask # An additional image whose fully transparent areas (e.g. where alpha is zero) @@ -52,8 +52,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. # - # @return [String, Symbol, OpenAI::Models::ImageModel, nil] - optional :model, union: -> { OpenAI::Models::ImageEditParams::Model }, nil?: true + # @return [String, Symbol, OpenAI::ImageModel, nil] + optional :model, union: -> { OpenAI::ImageEditParams::Model }, nil?: true # @!attribute n # The number of images to generate. Must be between 1 and 10. @@ -66,8 +66,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. # - # @return [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] - optional :quality, enum: -> { OpenAI::Models::ImageEditParams::Quality }, nil?: true + # @return [Symbol, OpenAI::ImageEditParams::Quality, nil] + optional :quality, enum: -> { OpenAI::ImageEditParams::Quality }, nil?: true # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or @@ -75,16 +75,16 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` # will always return base64-encoded images. # - # @return [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] - optional :response_format, enum: -> { OpenAI::Models::ImageEditParams::ResponseFormat }, nil?: true + # @return [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::ImageEditParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `1024x1024`, `1536x1024` # (landscape), `1024x1536` (portrait), or `auto` (default value) for # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. # - # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] - optional :size, enum: -> { OpenAI::Models::ImageEditParams::Size }, nil?: true + # @return [Symbol, OpenAI::ImageEditParams::Size, nil] + optional :size, enum: -> { OpenAI::ImageEditParams::Size }, nil?: true # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -98,32 +98,25 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. ... + # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character - # ... # - # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind - # ... # - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup - # ... + # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are - # ... + # @param quality [Symbol, OpenAI::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # - # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` - # ... + # @param response_format [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands - # ... + # @param size [Symbol, OpenAI::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -139,7 +132,7 @@ module Image variant OpenAI::Internal::Type::FileInput - variant -> { OpenAI::Models::ImageEditParams::Image::StringArray } + variant -> { OpenAI::ImageEditParams::Image::StringArray } # @!method self.variants # @return [Array(StringIO, Array)] @@ -175,10 +168,10 @@ module Model variant String # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` is used. - variant enum: -> { OpenAI::Models::ImageModel } + variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # @return [Array(String, Symbol, OpenAI::ImageModel)] end # The quality of the image that will be generated. `high`, `medium` and `low` are diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 0c7fd7f8..c46f1558 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -24,23 +24,23 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] - optional :background, enum: -> { OpenAI::Models::ImageGenerateParams::Background }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::Background, nil] + optional :background, enum: -> { OpenAI::ImageGenerateParams::Background }, nil?: true # @!attribute model # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to # `gpt-image-1` is used. # - # @return [String, Symbol, OpenAI::Models::ImageModel, nil] - optional :model, union: -> { OpenAI::Models::ImageGenerateParams::Model }, nil?: true + # @return [String, Symbol, OpenAI::ImageModel, nil] + optional :model, union: -> { OpenAI::ImageGenerateParams::Model }, nil?: true # @!attribute moderation # Control the content-moderation level for images generated by `gpt-image-1`. Must # be either `low` for less restrictive filtering or `auto` (default value). # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] - optional :moderation, enum: -> { OpenAI::Models::ImageGenerateParams::Moderation }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] + optional :moderation, enum: -> { OpenAI::ImageGenerateParams::Moderation }, nil?: true # @!attribute n # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only @@ -61,8 +61,8 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # The format in which the generated images are returned. This parameter is only # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] - optional :output_format, enum: -> { OpenAI::Models::ImageGenerateParams::OutputFormat }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] + optional :output_format, enum: -> { OpenAI::ImageGenerateParams::OutputFormat }, nil?: true # @!attribute quality # The quality of the image that will be generated. @@ -73,8 +73,8 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # - `hd` and `standard` are supported for `dall-e-3`. # - `standard` is the only option for `dall-e-2`. # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] - optional :quality, enum: -> { OpenAI::Models::ImageGenerateParams::Quality }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::Quality, nil] + optional :quality, enum: -> { OpenAI::ImageGenerateParams::Quality }, nil?: true # @!attribute response_format # The format in which generated images with `dall-e-2` and `dall-e-3` are @@ -82,8 +82,8 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # after the image has been generated. This parameter isn't supported for # `gpt-image-1` which will always return base64-encoded images. # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] - optional :response_format, enum: -> { OpenAI::Models::ImageGenerateParams::ResponseFormat }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] + optional :response_format, enum: -> { OpenAI::ImageGenerateParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `1024x1024`, `1536x1024` @@ -91,8 +91,8 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] - optional :size, enum: -> { OpenAI::Models::ImageGenerateParams::Size }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::Size, nil] + optional :size, enum: -> { OpenAI::ImageGenerateParams::Size }, nil?: true # @!attribute style # The style of the generated images. This parameter is only supported for @@ -100,8 +100,8 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # towards generating hyper-real and dramatic images. Natural causes the model to # produce more natural, less hyper-real looking images. # - # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] - optional :style, enum: -> { OpenAI::Models::ImageGenerateParams::Style }, nil?: true + # @return [Symbol, OpenAI::ImageGenerateParams::Style, nil] + optional :style, enum: -> { OpenAI::ImageGenerateParams::Style }, nil?: true # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -116,38 +116,28 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::ImageGenerateParams} for more details. # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte - # ... # - # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # @param background [Symbol, OpenAI::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im - # ... + # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im # - # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must - # ... + # @param moderation [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # ... # # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only - # ... # - # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su - # ... + # @param output_format [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # - # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. ... + # @param quality [Symbol, OpenAI::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # - # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned - # ... + # @param response_format [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned # - # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands - # ... + # @param size [Symbol, OpenAI::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # - # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- - # ... + # @param style [Symbol, OpenAI::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -178,10 +168,10 @@ module Model variant String # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` is used. - variant enum: -> { OpenAI::Models::ImageModel } + variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] + # @return [Array(String, Symbol, OpenAI::ImageModel)] end # Control the content-moderation level for images generated by `gpt-image-1`. Must diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index aa3ede9f..df579e91 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -13,29 +13,28 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of generated images. # - # @return [Array, nil] - optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image] } + # @return [Array, nil] + optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Image] } # @!attribute usage # For `gpt-image-1` only, the token usage information for the image generation. # - # @return [OpenAI::Models::ImagesResponse::Usage, nil] - optional :usage, -> { OpenAI::Models::ImagesResponse::Usage } + # @return [OpenAI::ImagesResponse::Usage, nil] + optional :usage, -> { OpenAI::ImagesResponse::Usage } # @!method initialize(created:, data: nil, usage: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::ImagesResponse} for more details. + # Some parameter documentations has been truncated, see {OpenAI::ImagesResponse} + # for more details. # # The response from the image generation endpoint. # # @param created [Integer] The Unix timestamp (in seconds) of when the image was created. # - # @param data [Array] The list of generated images. + # @param data [Array] The list of generated images. # - # @param usage [OpenAI::Models::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. - # ... + # @param usage [OpenAI::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. - # @see OpenAI::Models::ImagesResponse#usage + # @see OpenAI::ImagesResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens # The number of tokens (images and text) in the input prompt. @@ -46,8 +45,8 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens_details # The input tokens detailed information for the image generation. # - # @return [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] - required :input_tokens_details, -> { OpenAI::Models::ImagesResponse::Usage::InputTokensDetails } + # @return [OpenAI::ImagesResponse::Usage::InputTokensDetails] + required :input_tokens_details, -> { OpenAI::ImagesResponse::Usage::InputTokensDetails } # @!attribute output_tokens # The number of image tokens in the output image. @@ -66,13 +65,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. # - # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. + # @param input_tokens_details [OpenAI::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. # # @param output_tokens [Integer] The number of image tokens in the output image. # # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. - # @see OpenAI::Models::ImagesResponse::Usage#input_tokens_details + # @see OpenAI::ImagesResponse::Usage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute image_tokens # The number of image tokens in the input prompt. diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index e53c66fe..39fc2d41 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -6,20 +6,20 @@ class Moderation < OpenAI::Internal::Type::BaseModel # @!attribute categories # A list of the categories, and whether they are flagged or not. # - # @return [OpenAI::Models::Moderation::Categories] - required :categories, -> { OpenAI::Models::Moderation::Categories } + # @return [OpenAI::Moderation::Categories] + required :categories, -> { OpenAI::Moderation::Categories } # @!attribute category_applied_input_types # A list of the categories along with the input type(s) that the score applies to. # - # @return [OpenAI::Models::Moderation::CategoryAppliedInputTypes] - required :category_applied_input_types, -> { OpenAI::Models::Moderation::CategoryAppliedInputTypes } + # @return [OpenAI::Moderation::CategoryAppliedInputTypes] + required :category_applied_input_types, -> { OpenAI::Moderation::CategoryAppliedInputTypes } # @!attribute category_scores # A list of the categories along with their scores as predicted by model. # - # @return [OpenAI::Models::Moderation::CategoryScores] - required :category_scores, -> { OpenAI::Models::Moderation::CategoryScores } + # @return [OpenAI::Moderation::CategoryScores] + required :category_scores, -> { OpenAI::Moderation::CategoryScores } # @!attribute flagged # Whether any of the below categories are flagged. @@ -28,19 +28,18 @@ class Moderation < OpenAI::Internal::Type::BaseModel required :flagged, OpenAI::Internal::Type::Boolean # @!method initialize(categories:, category_applied_input_types:, category_scores:, flagged:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Moderation} for more details. + # Some parameter documentations has been truncated, see {OpenAI::Moderation} for + # more details. # - # @param categories [OpenAI::Models::Moderation::Categories] A list of the categories, and whether they are flagged or not. + # @param categories [OpenAI::Moderation::Categories] A list of the categories, and whether they are flagged or not. # - # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. - # ... + # @param category_applied_input_types [OpenAI::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. # - # @param category_scores [OpenAI::Models::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. + # @param category_scores [OpenAI::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. # # @param flagged [Boolean] Whether any of the below categories are flagged. - # @see OpenAI::Models::Moderation#categories + # @see OpenAI::Moderation#categories class Categories < OpenAI::Internal::Type::BaseModel # @!attribute harassment # Content that expresses, incites, or promotes harassing language towards any @@ -139,39 +138,29 @@ class Categories < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Moderation::Categories} for more details. + # {OpenAI::Moderation::Categories} for more details. # # A list of the categories, and whether they are flagged or not. # # @param harassment [Boolean] Content that expresses, incites, or promotes harassing language towards any targ - # ... # # @param harassment_threatening [Boolean] Harassment content that also includes violence or serious harm towards any targe - # ... # # @param hate [Boolean] Content that expresses, incites, or promotes hate based on race, gender, ethnici - # ... # # @param hate_threatening [Boolean] Hateful content that also includes violence or serious harm towards the targeted - # ... # # @param illicit [Boolean, nil] Content that includes instructions or advice that facilitate the planning or exe - # ... # # @param illicit_violent [Boolean, nil] Content that includes instructions or advice that facilitate the planning or exe - # ... # # @param self_harm [Boolean] Content that promotes, encourages, or depicts acts of self-harm, such as suicide - # ... # # @param self_harm_instructions [Boolean] Content that encourages performing acts of self-harm, such as suicide, cutting, - # ... # # @param self_harm_intent [Boolean] Content where the speaker expresses that they are engaging or intend to engage i - # ... # # @param sexual [Boolean] Content meant to arouse sexual excitement, such as the description of sexual act - # ... # # @param sexual_minors [Boolean] Sexual content that includes an individual who is under 18 years old. # @@ -180,135 +169,161 @@ class Categories < OpenAI::Internal::Type::BaseModel # @param violence_graphic [Boolean] Content that depicts death, violence, or physical injury in graphic detail. end - # @see OpenAI::Models::Moderation#category_applied_input_types + # @see OpenAI::Moderation#category_applied_input_types class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The applied input type(s) for the category 'harassment'. # - # @return [Array] + # @return [Array] required :harassment, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment] } + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Harassment] + } # @!attribute harassment_threatening # The applied input type(s) for the category 'harassment/threatening'. # - # @return [Array] + # @return [Array] required :harassment_threatening, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening] + }, api_name: :"harassment/threatening" # @!attribute hate # The applied input type(s) for the category 'hate'. # - # @return [Array] + # @return [Array] required :hate, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate] } + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Hate] + } # @!attribute hate_threatening # The applied input type(s) for the category 'hate/threatening'. # - # @return [Array] + # @return [Array] required :hate_threatening, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening] + }, api_name: :"hate/threatening" # @!attribute illicit # The applied input type(s) for the category 'illicit'. # - # @return [Array] + # @return [Array] required :illicit, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit] } + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Illicit] + } # @!attribute illicit_violent # The applied input type(s) for the category 'illicit/violent'. # - # @return [Array] + # @return [Array] required :illicit_violent, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent] + }, api_name: :"illicit/violent" # @!attribute self_harm # The applied input type(s) for the category 'self-harm'. # - # @return [Array] + # @return [Array] required :self_harm, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm] + }, api_name: :"self-harm" # @!attribute self_harm_instructions # The applied input type(s) for the category 'self-harm/instructions'. # - # @return [Array] + # @return [Array] required :self_harm_instructions, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction] + }, api_name: :"self-harm/instructions" # @!attribute self_harm_intent # The applied input type(s) for the category 'self-harm/intent'. # - # @return [Array] + # @return [Array] required :self_harm_intent, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent] + }, api_name: :"self-harm/intent" # @!attribute sexual # The applied input type(s) for the category 'sexual'. # - # @return [Array] + # @return [Array] required :sexual, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual] } + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Sexual] + } # @!attribute sexual_minors # The applied input type(s) for the category 'sexual/minors'. # - # @return [Array] + # @return [Array] required :sexual_minors, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor] + }, api_name: :"sexual/minors" # @!attribute violence # The applied input type(s) for the category 'violence'. # - # @return [Array] + # @return [Array] required :violence, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence] } + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Violence] + } # @!attribute violence_graphic # The applied input type(s) for the category 'violence/graphic'. # - # @return [Array] + # @return [Array] required :violence_graphic, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] }, + -> { + OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] + }, api_name: :"violence/graphic" # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # A list of the categories along with the input type(s) that the score applies to. # - # @param harassment [Array] The applied input type(s) for the category 'harassment'. + # @param harassment [Array] The applied input type(s) for the category 'harassment'. # - # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. + # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. # - # @param hate [Array] The applied input type(s) for the category 'hate'. + # @param hate [Array] The applied input type(s) for the category 'hate'. # - # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. + # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. # - # @param illicit [Array] The applied input type(s) for the category 'illicit'. + # @param illicit [Array] The applied input type(s) for the category 'illicit'. # - # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. + # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. # - # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. + # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. # - # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. + # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. # - # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. + # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. # - # @param sexual [Array] The applied input type(s) for the category 'sexual'. + # @param sexual [Array] The applied input type(s) for the category 'sexual'. # - # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. + # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. # - # @param violence [Array] The applied input type(s) for the category 'violence'. + # @param violence [Array] The applied input type(s) for the category 'violence'. # - # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. + # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. module Harassment extend OpenAI::Internal::Type::Enum @@ -434,7 +449,7 @@ module ViolenceGraphic end end - # @see OpenAI::Models::Moderation#category_scores + # @see OpenAI::Moderation#category_scores class CategoryScores < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The score for the category 'harassment'. diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index ae82c207..59e0cdd9 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -11,8 +11,8 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. # - # @return [String, Array, Array] - required :input, union: -> { OpenAI::Models::ModerationCreateParams::Input } + # @return [String, Array, Array] + required :input, union: -> { OpenAI::ModerationCreateParams::Input } # @!attribute model # The content moderation model you would like to use. Learn more in @@ -20,17 +20,16 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # learn about available models # [here](https://platform.openai.com/docs/models#moderation). # - # @return [String, Symbol, OpenAI::Models::ModerationModel, nil] - optional :model, union: -> { OpenAI::Models::ModerationCreateParams::Model } + # @return [String, Symbol, OpenAI::ModerationModel, nil] + optional :model, union: -> { OpenAI::ModerationCreateParams::Model } # @!method initialize(input:, model: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ModerationCreateParams} for more details. # - # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or - # ... + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or # - # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in ... + # @param model [String, Symbol, OpenAI::ModerationModel] The content moderation model you would like to use. Learn more in # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -43,20 +42,20 @@ module Input variant String # An array of strings to classify for moderation. - variant -> { OpenAI::Models::ModerationCreateParams::Input::StringArray } + variant -> { OpenAI::ModerationCreateParams::Input::StringArray } # An array of multi-modal inputs to the moderation model. - variant -> { OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray } + variant -> { OpenAI::ModerationCreateParams::Input::ModerationMultiModalInputArray } # @!method self.variants - # @return [Array(String, Array, Array)] + # @return [Array(String, Array, Array)] # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] # @type [OpenAI::Internal::Type::Converter] ModerationMultiModalInputArray = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::ModerationMultiModalInput }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::ModerationMultiModalInput }] end # The content moderation model you would like to use. Learn more in @@ -71,10 +70,10 @@ module Model # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and learn about # available models [here](https://platform.openai.com/docs/models#moderation). - variant enum: -> { OpenAI::Models::ModerationModel } + variant enum: -> { OpenAI::ModerationModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] + # @return [Array(String, Symbol, OpenAI::ModerationModel)] end end end diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 18ff1405..17c60d91 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -19,8 +19,8 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute results # A list of moderation objects. # - # @return [Array] - required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Moderation] } + # @return [Array] + required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Moderation] } # @!method initialize(id:, model:, results:) # Represents if a given text input is potentially harmful. @@ -29,7 +29,7 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # # @param model [String] The model used to generate the moderation results. # - # @param results [Array] A list of moderation objects. + # @param results [Array] A list of moderation objects. end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index ac2342f8..ed95c5b8 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -6,8 +6,8 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!attribute image_url # Contains either an image URL or a data URL for a base64 encoded image. # - # @return [OpenAI::Models::ModerationImageURLInput::ImageURL] - required :image_url, -> { OpenAI::Models::ModerationImageURLInput::ImageURL } + # @return [OpenAI::ModerationImageURLInput::ImageURL] + required :image_url, -> { OpenAI::ModerationImageURLInput::ImageURL } # @!attribute type # Always `image_url`. @@ -18,11 +18,11 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # An object describing an image to classify. # - # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. + # @param image_url [OpenAI::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. # # @param type [Symbol, :image_url] Always `image_url`. - # @see OpenAI::Models::ModerationImageURLInput#image_url + # @see OpenAI::ModerationImageURLInput#image_url class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index 32f5923b..5142ae94 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -9,13 +9,13 @@ module ModerationMultiModalInput discriminator :type # An object describing an image to classify. - variant :image_url, -> { OpenAI::Models::ModerationImageURLInput } + variant :image_url, -> { OpenAI::ModerationImageURLInput } # An object describing text to classify. - variant :text, -> { OpenAI::Models::ModerationTextInput } + variant :text, -> { OpenAI::ModerationTextInput } # @!method self.variants - # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + # @return [Array(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 09690970..9c418c18 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -11,8 +11,8 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] - optional :effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute generate_summary # @deprecated @@ -23,31 +23,31 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @return [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] - optional :generate_summary, enum: -> { OpenAI::Models::Reasoning::GenerateSummary }, nil?: true + # @return [Symbol, OpenAI::Reasoning::GenerateSummary, nil] + optional :generate_summary, enum: -> { OpenAI::Reasoning::GenerateSummary }, nil?: true # @!attribute summary # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @return [Symbol, OpenAI::Models::Reasoning::Summary, nil] - optional :summary, enum: -> { OpenAI::Models::Reasoning::Summary }, nil?: true + # @return [Symbol, OpenAI::Reasoning::Summary, nil] + optional :summary, enum: -> { OpenAI::Reasoning::Summary }, nil?: true # @!method initialize(effort: nil, generate_summary: nil, summary: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Reasoning} for more details. + # Some parameter documentations has been truncated, see {OpenAI::Reasoning} for + # more details. # # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. ... + # @param generate_summary [Symbol, OpenAI::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. # - # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be ... + # @param summary [Symbol, OpenAI::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be # @deprecated # @@ -57,7 +57,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @see OpenAI::Models::Reasoning#generate_summary + # @see OpenAI::Reasoning#generate_summary module GenerateSummary extend OpenAI::Internal::Type::Enum @@ -73,7 +73,7 @@ module GenerateSummary # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @see OpenAI::Models::Reasoning#summary + # @see OpenAI::Reasoning#summary module Summary extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 3e78c9ff..294afc55 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -6,8 +6,8 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute json_schema # Structured Outputs configuration options, including a JSON Schema. # - # @return [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] - required :json_schema, -> { OpenAI::Models::ResponseFormatJSONSchema::JSONSchema } + # @return [OpenAI::ResponseFormatJSONSchema::JSONSchema] + required :json_schema, -> { OpenAI::ResponseFormatJSONSchema::JSONSchema } # @!attribute type # The type of response format being defined. Always `json_schema`. @@ -17,17 +17,17 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!method initialize(json_schema:, type: :json_schema) # Some parameter documentations has been truncated, see - # {OpenAI::Models::ResponseFormatJSONSchema} for more details. + # {OpenAI::ResponseFormatJSONSchema} for more details. # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # - # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. ... + # @param json_schema [OpenAI::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. # # @param type [Symbol, :json_schema] The type of response format being defined. Always `json_schema`. - # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema + # @see OpenAI::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores @@ -62,17 +62,17 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, schema: nil, strict: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::ResponseFormatJSONSchema::JSONSchema} for more details. + # {OpenAI::ResponseFormatJSONSchema::JSONSchema} for more details. # # Structured Outputs configuration options, including a JSON Schema. # - # @param name [String] The name of the response format. Must be a-z, A-Z, 0-9, or contain ... + # @param name [String] The name of the response format. Must be a-z, A-Z, 0-9, or contain # - # @param description [String] A description of what the response format is for, used by the model to ... + # @param description [String] A description of what the response format is for, used by the model to # - # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. ... + # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. # - # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. ... + # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index d65a0f31..e6b14978 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -19,8 +19,8 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @!attribute environment # The type of computer environment to control. # - # @return [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] - required :environment, enum: -> { OpenAI::Models::Responses::ComputerTool::Environment } + # @return [Symbol, OpenAI::Responses::ComputerTool::Environment] + required :environment, enum: -> { OpenAI::Responses::ComputerTool::Environment } # @!attribute type # The type of the computer use tool. Always `computer_use_preview`. @@ -36,13 +36,13 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # # @param display_width [Integer] The width of the computer display. # - # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] The type of computer environment to control. + # @param environment [Symbol, OpenAI::Responses::ComputerTool::Environment] The type of computer environment to control. # # @param type [Symbol, :computer_use_preview] The type of the computer use tool. Always `computer_use_preview`. # The type of computer environment to control. # - # @see OpenAI::Models::Responses::ComputerTool#environment + # @see OpenAI::Responses::ComputerTool#environment module Environment extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 172802bc..3cdb0887 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -8,25 +8,25 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # - # @return [String, Array] - required :content, union: -> { OpenAI::Models::Responses::EasyInputMessage::Content } + # @return [String, Array] + required :content, union: -> { OpenAI::Responses::EasyInputMessage::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] - required :role, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Role } + # @return [Symbol, OpenAI::Responses::EasyInputMessage::Role] + required :role, enum: -> { OpenAI::Responses::EasyInputMessage::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type, nil] - optional :type, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Type } + # @return [Symbol, OpenAI::Responses::EasyInputMessage::Type, nil] + optional :type, enum: -> { OpenAI::Responses::EasyInputMessage::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::EasyInputMessage} for more details. + # {OpenAI::Responses::EasyInputMessage} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -34,16 +34,16 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. ... + # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. # - # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or ... + # @param role [Symbol, OpenAI::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. ... + # @param type [Symbol, OpenAI::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # - # @see OpenAI::Models::Responses::EasyInputMessage#content + # @see OpenAI::Responses::EasyInputMessage#content module Content extend OpenAI::Internal::Type::Union @@ -52,16 +52,16 @@ module Content # A list of one or many input items to the model, containing different content # types. - variant -> { OpenAI::Models::Responses::ResponseInputMessageContentList } + variant -> { OpenAI::Responses::ResponseInputMessageContentList } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::Responses::EasyInputMessage#role + # @see OpenAI::Responses::EasyInputMessage#role module Role extend OpenAI::Internal::Type::Enum @@ -76,7 +76,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::Responses::EasyInputMessage#type + # @see OpenAI::Responses::EasyInputMessage#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 1019298c..515ef7a2 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -19,8 +19,8 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute filters # A filter to apply. # - # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] - optional :filters, union: -> { OpenAI::Models::Responses::FileSearchTool::Filters }, nil?: true + # @return [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] + optional :filters, union: -> { OpenAI::Responses::FileSearchTool::Filters }, nil?: true # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 @@ -32,12 +32,12 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # Ranking options for search. # - # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions, nil] - optional :ranking_options, -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions } + # @return [OpenAI::Responses::FileSearchTool::RankingOptions, nil] + optional :ranking_options, -> { OpenAI::Responses::FileSearchTool::RankingOptions } # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::FileSearchTool} for more details. + # {OpenAI::Responses::FileSearchTool} for more details. # # A tool that searches for relevant content from uploaded files. Learn more about # the @@ -45,38 +45,37 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @param vector_store_ids [Array] The IDs of the vector stores to search. # - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] A filter to apply. + # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] A filter to apply. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 - # ... # - # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Responses::FileSearchTool::RankingOptions] Ranking options for search. # # @param type [Symbol, :file_search] The type of the file search tool. Always `file_search`. # A filter to apply. # - # @see OpenAI::Models::Responses::FileSearchTool#filters + # @see OpenAI::Responses::FileSearchTool#filters module Filters extend OpenAI::Internal::Type::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. - variant -> { OpenAI::Models::ComparisonFilter } + variant -> { OpenAI::ComparisonFilter } # Combine multiple filters using `and` or `or`. - variant -> { OpenAI::Models::CompoundFilter } + variant -> { OpenAI::CompoundFilter } # @!method self.variants - # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] + # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] end - # @see OpenAI::Models::Responses::FileSearchTool#ranking_options + # @see OpenAI::Responses::FileSearchTool#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. # - # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker, nil] - optional :ranker, enum: -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker } + # @return [Symbol, OpenAI::Responses::FileSearchTool::RankingOptions::Ranker, nil] + optional :ranker, enum: -> { OpenAI::Responses::FileSearchTool::RankingOptions::Ranker } # @!attribute score_threshold # The score threshold for the file search, a number between 0 and 1. Numbers @@ -88,18 +87,17 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::FileSearchTool::RankingOptions} for more details. + # {OpenAI::Responses::FileSearchTool::RankingOptions} for more details. # # Ranking options for search. # - # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. + # @param ranker [Symbol, OpenAI::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. # # @param score_threshold [Float] The score threshold for the file search, a number between 0 and 1. Numbers close - # ... # The ranker to use for the file search. # - # @see OpenAI::Models::Responses::FileSearchTool::RankingOptions#ranker + # @see OpenAI::Responses::FileSearchTool::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index f37a388a..246cf91a 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -37,7 +37,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, parameters:, strict:, description: nil, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::FunctionTool} for more details. + # {OpenAI::Responses::FunctionTool} for more details. # # Defines a function in your own code the model can choose to call. Learn more # about @@ -50,7 +50,6 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @param strict [Boolean, nil] Whether to enforce strict parameter validation. Default `true`. # # @param description [String, nil] A description of the function. Used by the model to determine whether or not to - # ... # # @param type [Symbol, :function] The type of the function tool. Always `function`. end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index 795ddb04..fb2306d3 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -24,9 +24,8 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # - # @return [Array, nil] - optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } + # @return [Array, nil] + optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and @@ -41,22 +40,22 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. # - # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::Responses::InputItemListParams::Order } + # @return [Symbol, OpenAI::Responses::InputItemListParams::Order, nil] + optional :order, enum: -> { OpenAI::Responses::InputItemListParams::Order } # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::InputItemListParams} for more details. # - # @param after [String] An item ID to list items after, used in pagination. ... + # @param after [String] An item ID to list items after, used in pagination. # - # @param before [String] An item ID to list items before, used in pagination. ... + # @param before [String] An item ID to list items before, used in pagination. # - # @param include [Array] Additional fields to include in the response. See the `include` ... + # @param include [Array] Additional fields to include in the response. See the `include` # - # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between ... + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. ... + # @param order [Symbol, OpenAI::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 6adb6175..4813a1af 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -22,14 +22,14 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute error # An error object returned when the model fails to generate a Response. # - # @return [OpenAI::Models::Responses::ResponseError, nil] - required :error, -> { OpenAI::Models::Responses::ResponseError }, nil?: true + # @return [OpenAI::Responses::ResponseError, nil] + required :error, -> { OpenAI::Responses::ResponseError }, nil?: true # @!attribute incomplete_details # Details about why the response is incomplete. # - # @return [OpenAI::Models::Responses::Response::IncompleteDetails, nil] - required :incomplete_details, -> { OpenAI::Models::Responses::Response::IncompleteDetails }, nil?: true + # @return [OpenAI::Responses::Response::IncompleteDetails, nil] + required :incomplete_details, -> { OpenAI::Responses::Response::IncompleteDetails }, nil?: true # @!attribute instructions # Inserts a system (or developer) message as the first item in the model's @@ -60,8 +60,8 @@ class Response < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - required :model, union: -> { OpenAI::Models::ResponsesModel } + # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] + required :model, union: -> { OpenAI::ResponsesModel } # @!attribute object # The object type of this resource - always set to `response`. @@ -78,9 +78,8 @@ class Response < OpenAI::Internal::Type::BaseModel # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. # - # @return [Array] - required :output, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem] } + # @return [Array] + required :output, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem] } # @!attribute parallel_tool_calls # Whether to allow the model to run tool calls in parallel. @@ -102,8 +101,8 @@ class Response < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - required :tool_choice, union: -> { OpenAI::Models::Responses::Response::ToolChoice } + # @return [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] + required :tool_choice, union: -> { OpenAI::Responses::Response::ToolChoice } # @!attribute tools # An array of tools the model may call while generating a response. You can @@ -121,8 +120,8 @@ class Response < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array] - required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } + # @return [Array] + required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -156,8 +155,8 @@ class Response < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @return [OpenAI::Models::Reasoning, nil] - optional :reasoning, -> { OpenAI::Models::Reasoning }, nil?: true + # @return [OpenAI::Reasoning, nil] + optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier # Specifies the latency tier to use for processing the request. This parameter is @@ -178,15 +177,15 @@ class Response < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] - optional :service_tier, enum: -> { OpenAI::Models::Responses::Response::ServiceTier }, nil?: true + # @return [Symbol, OpenAI::Responses::Response::ServiceTier, nil] + optional :service_tier, enum: -> { OpenAI::Responses::Response::ServiceTier }, nil?: true # @!attribute status # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseStatus, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseStatus } + # @return [Symbol, OpenAI::Responses::ResponseStatus, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseStatus } # @!attribute text # Configuration options for a text response from the model. Can be plain text or @@ -195,8 +194,8 @@ class Response < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] - optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } + # @return [OpenAI::Responses::ResponseTextConfig, nil] + optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute truncation # The truncation strategy to use for the model response. @@ -207,15 +206,15 @@ class Response < OpenAI::Internal::Type::BaseModel # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @return [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] - optional :truncation, enum: -> { OpenAI::Models::Responses::Response::Truncation }, nil?: true + # @return [Symbol, OpenAI::Responses::Response::Truncation, nil] + optional :truncation, enum: -> { OpenAI::Responses::Response::Truncation }, nil?: true # @!attribute usage # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. # - # @return [OpenAI::Models::Responses::ResponseUsage, nil] - optional :usage, -> { OpenAI::Models::Responses::ResponseUsage } + # @return [OpenAI::Responses::ResponseUsage, nil] + optional :usage, -> { OpenAI::Responses::ResponseUsage } # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -227,75 +226,70 @@ class Response < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::Response} for more details. + # {OpenAI::Responses::Response} for more details. # - # @param id [String] Unique identifier for this Response. ... + # @param id [String] Unique identifier for this Response. # - # @param created_at [Float] Unix timestamp (in seconds) of when this Response was created. ... + # @param created_at [Float] Unix timestamp (in seconds) of when this Response was created. # - # @param error [OpenAI::Models::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. ... + # @param error [OpenAI::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. # - # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. ... + # @param incomplete_details [OpenAI::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param output [Array] An array of content items generated by the model. ... + # @param output [Array] An array of content items generated by the model. # - # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. ... + # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You ... + # @param tools [Array] An array of tools the model may call while generating a response. You # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in - # ... # - # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, ... + # @param status [Symbol, OpenAI::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. ... + # @param truncation [Symbol, OpenAI::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. # - # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, ... + # @param usage [OpenAI::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # - # @param object [Symbol, :response] The object type of this resource - always set to `response`. ... + # @param object [Symbol, :response] The object type of this resource - always set to `response`. - # @see OpenAI::Models::Responses::Response#incomplete_details + # @see OpenAI::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason why the response is incomplete. # - # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason, nil] - optional :reason, enum: -> { OpenAI::Models::Responses::Response::IncompleteDetails::Reason } + # @return [Symbol, OpenAI::Responses::Response::IncompleteDetails::Reason, nil] + optional :reason, enum: -> { OpenAI::Responses::Response::IncompleteDetails::Reason } # @!method initialize(reason: nil) # Details about why the response is incomplete. # - # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. + # @param reason [Symbol, OpenAI::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. # The reason why the response is incomplete. # - # @see OpenAI::Models::Responses::Response::IncompleteDetails#reason + # @see OpenAI::Responses::Response::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -311,7 +305,7 @@ module Reason # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @see OpenAI::Models::Responses::Response#tool_choice + # @see OpenAI::Responses::Response#tool_choice module ToolChoice extend OpenAI::Internal::Type::Union @@ -323,17 +317,17 @@ module ToolChoice # more tools. # # `required` means the model must call one or more tools. - variant enum: -> { OpenAI::Models::Responses::ToolChoiceOptions } + variant enum: -> { OpenAI::Responses::ToolChoiceOptions } # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - variant -> { OpenAI::Models::Responses::ToolChoiceTypes } + variant -> { OpenAI::Responses::ToolChoiceTypes } # Use this option to force the model to call a specific function. - variant -> { OpenAI::Models::Responses::ToolChoiceFunction } + variant -> { OpenAI::Responses::ToolChoiceFunction } # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -354,7 +348,7 @@ module ToolChoice # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Models::Responses::Response#service_tier + # @see OpenAI::Responses::Response#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum @@ -374,7 +368,7 @@ module ServiceTier # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @see OpenAI::Models::Responses::Response#truncation + # @see OpenAI::Responses::Response#truncation module Truncation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index 2a0394f8..f630ddd2 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -18,13 +18,13 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, type: :"response.audio.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseAudioDeltaEvent} for more details. + # {OpenAI::Responses::ResponseAudioDeltaEvent} for more details. # # Emitted when there is a partial audio response. # - # @param delta [String] A chunk of Base64 encoded response audio bytes. ... + # @param delta [String] A chunk of Base64 encoded response audio bytes. # - # @param type [Symbol, :"response.audio.delta"] The type of the event. Always `response.audio.delta`. ... + # @param type [Symbol, :"response.audio.delta"] The type of the event. Always `response.audio.delta`. end end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index 218e6146..d156cd64 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -12,11 +12,11 @@ class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :"response.audio.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseAudioDoneEvent} for more details. + # {OpenAI::Responses::ResponseAudioDoneEvent} for more details. # # Emitted when the audio response is complete. # - # @param type [Symbol, :"response.audio.done"] The type of the event. Always `response.audio.done`. ... + # @param type [Symbol, :"response.audio.done"] The type of the event. Always `response.audio.done`. end end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 31a92552..1361afe0 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -18,13 +18,13 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, type: :"response.audio.transcript.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent} for more details. + # {OpenAI::Responses::ResponseAudioTranscriptDeltaEvent} for more details. # # Emitted when there is a partial transcript of audio. # - # @param delta [String] The partial transcript of the audio response. ... + # @param delta [String] The partial transcript of the audio response. # - # @param type [Symbol, :"response.audio.transcript.delta"] The type of the event. Always `response.audio.transcript.delta`. ... + # @param type [Symbol, :"response.audio.transcript.delta"] The type of the event. Always `response.audio.transcript.delta`. end end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 806bed6e..13d588f5 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -12,11 +12,11 @@ class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :"response.audio.transcript.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent} for more details. + # {OpenAI::Responses::ResponseAudioTranscriptDoneEvent} for more details. # # Emitted when the full audio transcript is completed. # - # @param type [Symbol, :"response.audio.transcript.done"] The type of the event. Always `response.audio.transcript.done`. ... + # @param type [Symbol, :"response.audio.transcript.done"] The type of the event. Always `response.audio.transcript.done`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 0f337be3..07cebc16 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -24,16 +24,15 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @!method initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more - # details. + # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more details. # # Emitted when a partial code snippet is added by the code interpreter. # - # @param delta [String] The partial code snippet added by the code interpreter. ... + # @param delta [String] The partial code snippet added by the code interpreter. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # - # @param type [Symbol, :"response.code_interpreter_call.code.delta"] The type of the event. Always `response.code_interpreter_call.code.delta`. ... + # @param type [Symbol, :"response.code_interpreter_call.code.delta"] The type of the event. Always `response.code_interpreter_call.code.delta`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 4d7ad38d..40845a15 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -24,16 +24,15 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @!method initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more - # details. + # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more details. # # Emitted when code snippet output is finalized by the code interpreter. # - # @param code [String] The final code snippet output by the code interpreter. ... + # @param code [String] The final code snippet output by the code interpreter. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # - # @param type [Symbol, :"response.code_interpreter_call.code.done"] The type of the event. Always `response.code_interpreter_call.code.done`. ... + # @param type [Symbol, :"response.code_interpreter_call.code.done"] The type of the event. Always `response.code_interpreter_call.code.done`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index f5cd8062..37bf5f84 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -7,8 +7,8 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - required :code_interpreter_call, -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall } + # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index # The index of the output item that the code interpreter call is in progress. @@ -24,16 +24,15 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent} for more - # details. + # {OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent} for more details. # # Emitted when the code interpreter call is completed. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. ... + # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # - # @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`. ... + # @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 2d6c8c32..96683a3d 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -7,8 +7,8 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - required :code_interpreter_call, -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall } + # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index # The index of the output item that the code interpreter call is in progress. @@ -24,16 +24,16 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent} for more + # {OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent} for more # details. # # Emitted when a code interpreter call is in progress. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. ... + # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # - # @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`. ... + # @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 63e41324..1cf23747 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -7,8 +7,8 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - required :code_interpreter_call, -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall } + # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index # The index of the output item that the code interpreter call is in progress. @@ -24,16 +24,16 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent} for - # more details. + # {OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent} for more + # details. # # Emitted when the code interpreter is actively interpreting the code snippet. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. ... + # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. ... + # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # - # @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`. ... + # @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 3ab4f34f..16bac78f 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -19,15 +19,17 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute results # The results of the code interpreter tool call. # - # @return [Array] + # @return [Array] required :results, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result] + } # @!attribute status # The status of the code interpreter tool call. # - # @return [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] - required :status, enum: -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status } + # @return [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Status } # @!attribute type # The type of the code interpreter tool call. Always `code_interpreter_call`. @@ -37,19 +39,19 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code:, results:, status:, type: :code_interpreter_call) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details. + # {OpenAI::Responses::ResponseCodeInterpreterToolCall} for more details. # # A tool call to run code. # - # @param id [String] The unique ID of the code interpreter tool call. ... + # @param id [String] The unique ID of the code interpreter tool call. # - # @param code [String] The code to run. ... + # @param code [String] The code to run. # - # @param results [Array] The results of the code interpreter tool call. ... + # @param results [Array] The results of the code interpreter tool call. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. ... + # @param status [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. # - # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. ... + # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. # The output of a code interpreter tool call that is text. module Result @@ -58,10 +60,10 @@ module Result discriminator :type # The output of a code interpreter tool call that is text. - variant :logs, -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs } + variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs } # The output of a code interpreter tool call that is a file. - variant :files, -> { OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files } + variant :files, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute logs @@ -78,22 +80,24 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method initialize(logs:, type: :logs) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for - # more details. + # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for more + # details. # # The output of a code interpreter tool call that is text. # - # @param logs [String] The logs of the code interpreter tool call. ... + # @param logs [String] The logs of the code interpreter tool call. # - # @param type [Symbol, :logs] The type of the code interpreter text output. Always `logs`. ... + # @param type [Symbol, :logs] The type of the code interpreter text output. Always `logs`. end class Files < OpenAI::Internal::Type::BaseModel # @!attribute files # - # @return [Array] + # @return [Array] required :files, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] + } # @!attribute type # The type of the code interpreter file output. Always `files`. @@ -103,14 +107,14 @@ class Files < OpenAI::Internal::Type::BaseModel # @!method initialize(files:, type: :files) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for - # more details. + # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files} for more + # details. # # The output of a code interpreter tool call that is a file. # - # @param files [Array] + # @param files [Array] # - # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`. ... + # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`. class File < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -127,22 +131,22 @@ class File < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, mime_type:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} - # for more details. + # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} for + # more details. # - # @param file_id [String] The ID of the file. ... + # @param file_id [String] The ID of the file. # - # @param mime_type [String] The MIME type of the file. ... + # @param mime_type [String] The MIME type of the file. end end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] + # @return [Array(OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files)] end # The status of the code interpreter tool call. # - # @see OpenAI::Models::Responses::ResponseCodeInterpreterToolCall#status + # @see OpenAI::Responses::ResponseCodeInterpreterToolCall#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index e4995227..5bbb6426 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -7,8 +7,8 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # Properties of the completed response. # - # @return [OpenAI::Models::Responses::Response] - required :response, -> { OpenAI::Models::Responses::Response } + # @return [OpenAI::Responses::Response] + required :response, -> { OpenAI::Responses::Response } # @!attribute type # The type of the event. Always `response.completed`. @@ -18,13 +18,13 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, type: :"response.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCompletedEvent} for more details. + # {OpenAI::Responses::ResponseCompletedEvent} for more details. # # Emitted when the model response is complete. # - # @param response [OpenAI::Models::Responses::Response] Properties of the completed response. ... + # @param response [OpenAI::Responses::Response] Properties of the completed response. # - # @param type [Symbol, :"response.completed"] The type of the event. Always `response.completed`. ... + # @param type [Symbol, :"response.completed"] The type of the event. Always `response.completed`. end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 5f2dedd1..b18746b0 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -13,8 +13,8 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute action # A click action. # - # @return [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - required :action, union: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action } + # @return [OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait] + required :action, union: -> { OpenAI::Responses::ResponseComputerToolCall::Action } # @!attribute call_id # An identifier used when responding to the tool call with output. @@ -25,26 +25,28 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute pending_safety_checks # The pending safety checks for the computer call. # - # @return [Array] + # @return [Array] required :pending_safety_checks, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] + } # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] - required :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Status } + # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Status } # @!attribute type # The type of the computer call. Always `computer_call`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] - required :type, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Type } + # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Type] + required :type, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Type } # @!method initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall} for more details. + # {OpenAI::Responses::ResponseComputerToolCall} for more details. # # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) @@ -52,58 +54,58 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the computer call. # - # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] A click action. ... + # @param action [OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait] A click action. # - # @param call_id [String] An identifier used when responding to the tool call with output. ... + # @param call_id [String] An identifier used when responding to the tool call with output. # - # @param pending_safety_checks [Array] The pending safety checks for the computer call. ... + # @param pending_safety_checks [Array] The pending safety checks for the computer call. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. + # @param type [Symbol, OpenAI::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. # A click action. # - # @see OpenAI::Models::Responses::ResponseComputerToolCall#action + # @see OpenAI::Responses::ResponseComputerToolCall#action module Action extend OpenAI::Internal::Type::Union discriminator :type # A click action. - variant :click, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click } + variant :click, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Click } # A double click action. - variant :double_click, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick } + variant :double_click, -> { OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick } # A drag action. - variant :drag, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag } + variant :drag, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Drag } # A collection of keypresses the model would like to perform. - variant :keypress, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress } + variant :keypress, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Keypress } # A mouse move action. - variant :move, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move } + variant :move, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Move } # A screenshot action. - variant :screenshot, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot } + variant :screenshot, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot } # A scroll action. - variant :scroll, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll } + variant :scroll, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Scroll } # An action to type in text. - variant :type, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type } + variant :type, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Type } # A wait action. - variant :wait, -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait } + variant :wait, -> { OpenAI::Responses::ResponseComputerToolCall::Action::Wait } class Click < OpenAI::Internal::Type::BaseModel # @!attribute button # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] - required :button, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button } + # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button] + required :button, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button } # @!attribute type # Specifies the event type. For a click action, this property is always set to @@ -126,24 +128,22 @@ class Click < OpenAI::Internal::Type::BaseModel # @!method initialize(button:, x:, y_:, type: :click) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Click} for more details. # # A click action. # - # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right - # ... + # @param button [Symbol, OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right # - # @param x [Integer] The x-coordinate where the click occurred. ... + # @param x [Integer] The x-coordinate where the click occurred. # - # @param y_ [Integer] The y-coordinate where the click occurred. ... + # @param y_ [Integer] The y-coordinate where the click occurred. # - # @param type [Symbol, :click] Specifies the event type. For a click action, this property is ... + # @param type [Symbol, :click] Specifies the event type. For a click action, this property is # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # - # @see OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click#button + # @see OpenAI::Responses::ResponseComputerToolCall::Action::Click#button module Button extend OpenAI::Internal::Type::Enum @@ -180,16 +180,16 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:, type: :double_click) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick} for - # more details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick} for more + # details. # # A double click action. # - # @param x [Integer] The x-coordinate where the double click occurred. ... + # @param x [Integer] The x-coordinate where the double click occurred. # - # @param y_ [Integer] The y-coordinate where the double click occurred. ... + # @param y_ [Integer] The y-coordinate where the double click occurred. # - # @param type [Symbol, :double_click] Specifies the event type. For a double click action, this property is ... + # @param type [Symbol, :double_click] Specifies the event type. For a double click action, this property is end class Drag < OpenAI::Internal::Type::BaseModel @@ -204,9 +204,11 @@ class Drag < OpenAI::Internal::Type::BaseModel # ] # ``` # - # @return [Array] + # @return [Array] required :path, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path] + } # @!attribute type # Specifies the event type. For a drag action, this property is always set to @@ -217,15 +219,13 @@ class Drag < OpenAI::Internal::Type::BaseModel # @!method initialize(path:, type: :drag) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Drag} for more details. # # A drag action. # - # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi - # ... + # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi # - # @param type [Symbol, :drag] Specifies the event type. For a drag action, this property is ... + # @param type [Symbol, :drag] Specifies the event type. For a drag action, this property is class Path < OpenAI::Internal::Type::BaseModel # @!attribute x @@ -242,14 +242,14 @@ class Path < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path} for - # more details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path} for more + # details. # # A series of x/y coordinate pairs in the drag path. # - # @param x [Integer] The x-coordinate. ... + # @param x [Integer] The x-coordinate. # - # @param y_ [Integer] The y-coordinate. ... + # @param y_ [Integer] The y-coordinate. end end @@ -270,14 +270,14 @@ class Keypress < OpenAI::Internal::Type::BaseModel # @!method initialize(keys:, type: :keypress) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress} for more + # {OpenAI::Responses::ResponseComputerToolCall::Action::Keypress} for more # details. # # A collection of keypresses the model would like to perform. # - # @param keys [Array] The combination of keys the model is requesting to be pressed. This is an ... + # @param keys [Array] The combination of keys the model is requesting to be pressed. This is an # - # @param type [Symbol, :keypress] Specifies the event type. For a keypress action, this property is ... + # @param type [Symbol, :keypress] Specifies the event type. For a keypress action, this property is end class Move < OpenAI::Internal::Type::BaseModel @@ -302,16 +302,15 @@ class Move < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:, type: :move) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Move} for more details. # # A mouse move action. # - # @param x [Integer] The x-coordinate to move to. ... + # @param x [Integer] The x-coordinate to move to. # - # @param y_ [Integer] The y-coordinate to move to. ... + # @param y_ [Integer] The y-coordinate to move to. # - # @param type [Symbol, :move] Specifies the event type. For a move action, this property is ... + # @param type [Symbol, :move] Specifies the event type. For a move action, this property is end class Screenshot < OpenAI::Internal::Type::BaseModel @@ -324,12 +323,12 @@ class Screenshot < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :screenshot) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot} for - # more details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot} for more + # details. # # A screenshot action. # - # @param type [Symbol, :screenshot] Specifies the event type. For a screenshot action, this property is ... + # @param type [Symbol, :screenshot] Specifies the event type. For a screenshot action, this property is end class Scroll < OpenAI::Internal::Type::BaseModel @@ -366,20 +365,19 @@ class Scroll < OpenAI::Internal::Type::BaseModel # @!method initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Scroll} for more details. # # A scroll action. # - # @param scroll_x [Integer] The horizontal scroll distance. ... + # @param scroll_x [Integer] The horizontal scroll distance. # - # @param scroll_y [Integer] The vertical scroll distance. ... + # @param scroll_y [Integer] The vertical scroll distance. # - # @param x [Integer] The x-coordinate where the scroll occurred. ... + # @param x [Integer] The x-coordinate where the scroll occurred. # - # @param y_ [Integer] The y-coordinate where the scroll occurred. ... + # @param y_ [Integer] The y-coordinate where the scroll occurred. # - # @param type [Symbol, :scroll] Specifies the event type. For a scroll action, this property is ... + # @param type [Symbol, :scroll] Specifies the event type. For a scroll action, this property is end class Type < OpenAI::Internal::Type::BaseModel @@ -398,14 +396,13 @@ class Type < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :type) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Type} for more details. # # An action to type in text. # - # @param text [String] The text to type. ... + # @param text [String] The text to type. # - # @param type [Symbol, :type] Specifies the event type. For a type action, this property is ... + # @param type [Symbol, :type] Specifies the event type. For a type action, this property is end class Wait < OpenAI::Internal::Type::BaseModel @@ -418,16 +415,15 @@ class Wait < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :wait) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCall::Action::Wait} for more details. # # A wait action. # - # @param type [Symbol, :wait] Specifies the event type. For a wait action, this property is ... + # @param type [Symbol, :wait] Specifies the event type. For a wait action, this property is end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] + # @return [Array(OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait)] end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel @@ -462,7 +458,7 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseComputerToolCall#status + # @see OpenAI::Responses::ResponseComputerToolCall#status module Status extend OpenAI::Internal::Type::Enum @@ -476,7 +472,7 @@ module Status # The type of the computer call. Always `computer_call`. # - # @see OpenAI::Models::Responses::ResponseComputerToolCall#type + # @see OpenAI::Responses::ResponseComputerToolCall#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 1b404766..f18b9a16 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -19,8 +19,8 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - required :output, -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot } + # @return [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] + required :output, -> { OpenAI::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type # The type of the computer tool call output. Always `computer_call_output`. @@ -32,33 +32,34 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The safety checks reported by the API that have been acknowledged by the # developer. # - # @return [Array, nil] + # @return [Array, nil] optional :acknowledged_safety_checks, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + } # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status } + # @return [Symbol, OpenAI::Responses::ResponseComputerToolCallOutputItem::Status, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseComputerToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCallOutputItem} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCallOutputItem} for more details. # - # @param id [String] The unique ID of the computer call tool output. ... + # @param id [String] The unique ID of the computer call tool output. # - # @param call_id [String] The ID of the computer tool call that produced the output. ... + # @param call_id [String] The ID of the computer tool call that produced the output. # - # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. ... + # @param output [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. # - # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the ... + # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the # - # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or # - # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. ... + # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -92,7 +93,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Models::Responses::ResponseComputerToolCallOutputItem#status + # @see OpenAI::Responses::ResponseComputerToolCallOutputItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index e42b2d78..962d8ed7 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -25,8 +25,7 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot} for more - # details. + # {OpenAI::Responses::ResponseComputerToolCallOutputScreenshot} for more details. # # A computer screenshot image used with the computer use tool. # @@ -34,7 +33,7 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # # @param image_url [String] The URL of the screenshot image. # - # @param type [Symbol, :computer_screenshot] Specifies the event type. For a computer screenshot, this property is ... + # @param type [Symbol, :computer_screenshot] Specifies the event type. For a computer screenshot, this property is end end end diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index ed9ff454..95e1afdd 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -8,22 +8,22 @@ module ResponseContent extend OpenAI::Internal::Type::Union # A text input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputText } + variant -> { OpenAI::Responses::ResponseInputText } # An image input to the model. Learn about [image inputs](https://platform.openai.com/docs/guides/vision). - variant -> { OpenAI::Models::Responses::ResponseInputImage } + variant -> { OpenAI::Responses::ResponseInputImage } # A file input to the model. - variant -> { OpenAI::Models::Responses::ResponseInputFile } + variant -> { OpenAI::Responses::ResponseInputFile } # A text output from the model. - variant -> { OpenAI::Models::Responses::ResponseOutputText } + variant -> { OpenAI::Responses::ResponseOutputText } # A refusal from the model. - variant -> { OpenAI::Models::Responses::ResponseOutputRefusal } + variant -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile, OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 86328573..1bb0cddf 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -25,8 +25,8 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The content part that was added. # - # @return [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - required :part, union: -> { OpenAI::Models::Responses::ResponseContentPartAddedEvent::Part } + # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] + required :part, union: -> { OpenAI::Responses::ResponseContentPartAddedEvent::Part } # @!attribute type # The type of the event. Always `response.content_part.added`. @@ -36,36 +36,36 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseContentPartAddedEvent} for more details. + # {OpenAI::Responses::ResponseContentPartAddedEvent} for more details. # # Emitted when a new content part is added. # - # @param content_index [Integer] The index of the content part that was added. ... + # @param content_index [Integer] The index of the content part that was added. # - # @param item_id [String] The ID of the output item that the content part was added to. ... + # @param item_id [String] The ID of the output item that the content part was added to. # - # @param output_index [Integer] The index of the output item that the content part was added to. ... + # @param output_index [Integer] The index of the output item that the content part was added to. # - # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that was added. ... + # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that was added. # - # @param type [Symbol, :"response.content_part.added"] The type of the event. Always `response.content_part.added`. ... + # @param type [Symbol, :"response.content_part.added"] The type of the event. Always `response.content_part.added`. # The content part that was added. # - # @see OpenAI::Models::Responses::ResponseContentPartAddedEvent#part + # @see OpenAI::Responses::ResponseContentPartAddedEvent#part module Part extend OpenAI::Internal::Type::Union discriminator :type # A text output from the model. - variant :output_text, -> { OpenAI::Models::Responses::ResponseOutputText } + variant :output_text, -> { OpenAI::Responses::ResponseOutputText } # A refusal from the model. - variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } + variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 2858e020..56b1e274 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -25,8 +25,8 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The content part that is done. # - # @return [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - required :part, union: -> { OpenAI::Models::Responses::ResponseContentPartDoneEvent::Part } + # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] + required :part, union: -> { OpenAI::Responses::ResponseContentPartDoneEvent::Part } # @!attribute type # The type of the event. Always `response.content_part.done`. @@ -36,36 +36,36 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseContentPartDoneEvent} for more details. + # {OpenAI::Responses::ResponseContentPartDoneEvent} for more details. # # Emitted when a content part is done. # - # @param content_index [Integer] The index of the content part that is done. ... + # @param content_index [Integer] The index of the content part that is done. # - # @param item_id [String] The ID of the output item that the content part was added to. ... + # @param item_id [String] The ID of the output item that the content part was added to. # - # @param output_index [Integer] The index of the output item that the content part was added to. ... + # @param output_index [Integer] The index of the output item that the content part was added to. # - # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that is done. ... + # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that is done. # - # @param type [Symbol, :"response.content_part.done"] The type of the event. Always `response.content_part.done`. ... + # @param type [Symbol, :"response.content_part.done"] The type of the event. Always `response.content_part.done`. # The content part that is done. # - # @see OpenAI::Models::Responses::ResponseContentPartDoneEvent#part + # @see OpenAI::Responses::ResponseContentPartDoneEvent#part module Part extend OpenAI::Internal::Type::Union discriminator :type # A text output from the model. - variant :output_text, -> { OpenAI::Models::Responses::ResponseOutputText } + variant :output_text, -> { OpenAI::Responses::ResponseOutputText } # A refusal from the model. - variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } + variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 13a1bb6b..4a38c09e 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -21,8 +21,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @return [String, Array] - required :input, union: -> { OpenAI::Models::Responses::ResponseCreateParams::Input } + # @return [String, Array] + required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } # @!attribute model # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a @@ -31,8 +31,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - required :model, union: -> { OpenAI::Models::ResponsesModel } + # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] + required :model, union: -> { OpenAI::ResponsesModel } # @!attribute include # Specify additional output data to include in the model response. Currently @@ -49,9 +49,9 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). # - # @return [Array, nil] + # @return [Array, nil] optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] }, + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }, nil?: true # @!attribute instructions @@ -104,8 +104,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @return [OpenAI::Models::Reasoning, nil] - optional :reasoning, -> { OpenAI::Models::Reasoning }, nil?: true + # @return [OpenAI::Reasoning, nil] + optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier # Specifies the latency tier to use for processing the request. This parameter is @@ -126,10 +126,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] - optional :service_tier, - enum: -> { OpenAI::Models::Responses::ResponseCreateParams::ServiceTier }, - nil?: true + # @return [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] + optional :service_tier, enum: -> { OpenAI::Responses::ResponseCreateParams::ServiceTier }, nil?: true # @!attribute store # Whether to store the generated model response for later retrieval via API. @@ -153,16 +151,16 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] - optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } + # @return [OpenAI::Responses::ResponseTextConfig, nil] + optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute tool_choice # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] - optional :tool_choice, union: -> { OpenAI::Models::Responses::ResponseCreateParams::ToolChoice } + # @return [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, nil] + optional :tool_choice, union: -> { OpenAI::Responses::ResponseCreateParams::ToolChoice } # @!attribute tools # An array of tools the model may call while generating a response. You can @@ -180,8 +178,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the @@ -202,8 +200,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] - optional :truncation, enum: -> { OpenAI::Models::Responses::ResponseCreateParams::Truncation }, nil?: true + # @return [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] + optional :truncation, enum: -> { OpenAI::Responses::ResponseCreateParams::Truncation }, nil?: true # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -217,46 +215,41 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. ... + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently ... + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context - # ... # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. ... + # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. # - # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via ... + # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You ... + # @param tools [Array] An array of tools the model may call while generating a response. You # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. ... + # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -278,10 +271,10 @@ module Input # A list of one or many input items to the model, containing # different content types. - variant -> { OpenAI::Models::Responses::ResponseInput } + variant -> { OpenAI::Responses::ResponseInput } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -326,17 +319,17 @@ module ToolChoice # more tools. # # `required` means the model must call one or more tools. - variant enum: -> { OpenAI::Models::Responses::ToolChoiceOptions } + variant enum: -> { OpenAI::Responses::ToolChoiceOptions } # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - variant -> { OpenAI::Models::Responses::ToolChoiceTypes } + variant -> { OpenAI::Responses::ToolChoiceTypes } # Use this option to force the model to call a specific function. - variant -> { OpenAI::Models::Responses::ToolChoiceFunction } + variant -> { OpenAI::Responses::ToolChoiceFunction } # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] end # The truncation strategy to use for the model response. diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index 0c8408d0..ec044aac 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -7,8 +7,8 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was created. # - # @return [OpenAI::Models::Responses::Response] - required :response, -> { OpenAI::Models::Responses::Response } + # @return [OpenAI::Responses::Response] + required :response, -> { OpenAI::Responses::Response } # @!attribute type # The type of the event. Always `response.created`. @@ -18,13 +18,13 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, type: :"response.created") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCreatedEvent} for more details. + # {OpenAI::Responses::ResponseCreatedEvent} for more details. # # An event that is emitted when a response is created. # - # @param response [OpenAI::Models::Responses::Response] The response that was created. ... + # @param response [OpenAI::Responses::Response] The response that was created. # - # @param type [Symbol, :"response.created"] The type of the event. Always `response.created`. ... + # @param type [Symbol, :"response.created"] The type of the event. Always `response.created`. end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 3e4e38bc..45c2dda9 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -7,8 +7,8 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @!attribute code # The error code for the response. # - # @return [Symbol, OpenAI::Models::Responses::ResponseError::Code] - required :code, enum: -> { OpenAI::Models::Responses::ResponseError::Code } + # @return [Symbol, OpenAI::Responses::ResponseError::Code] + required :code, enum: -> { OpenAI::Responses::ResponseError::Code } # @!attribute message # A human-readable description of the error. @@ -18,17 +18,17 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseError} for more details. + # {OpenAI::Responses::ResponseError} for more details. # # An error object returned when the model fails to generate a Response. # - # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] The error code for the response. ... + # @param code [Symbol, OpenAI::Responses::ResponseError::Code] The error code for the response. # - # @param message [String] A human-readable description of the error. ... + # @param message [String] A human-readable description of the error. # The error code for the response. # - # @see OpenAI::Models::Responses::ResponseError#code + # @see OpenAI::Responses::ResponseError#code module Code extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index c8ad2c34..651a400d 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -30,17 +30,17 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:, param:, type: :error) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseErrorEvent} for more details. + # {OpenAI::Responses::ResponseErrorEvent} for more details. # # Emitted when an error occurs. # - # @param code [String, nil] The error code. ... + # @param code [String, nil] The error code. # - # @param message [String] The error message. ... + # @param message [String] The error message. # - # @param param [String, nil] The error parameter. ... + # @param param [String, nil] The error parameter. # - # @param type [Symbol, :error] The type of the event. Always `error`. ... + # @param type [Symbol, :error] The type of the event. Always `error`. end end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index 54ee8e5c..f3454dd5 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -7,8 +7,8 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that failed. # - # @return [OpenAI::Models::Responses::Response] - required :response, -> { OpenAI::Models::Responses::Response } + # @return [OpenAI::Responses::Response] + required :response, -> { OpenAI::Responses::Response } # @!attribute type # The type of the event. Always `response.failed`. @@ -18,13 +18,13 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, type: :"response.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFailedEvent} for more details. + # {OpenAI::Responses::ResponseFailedEvent} for more details. # # An event that is emitted when a response fails. # - # @param response [OpenAI::Models::Responses::Response] The response that failed. ... + # @param response [OpenAI::Responses::Response] The response that failed. # - # @param type [Symbol, :"response.failed"] The type of the event. Always `response.failed`. ... + # @param type [Symbol, :"response.failed"] The type of the event. Always `response.failed`. end end end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 1afd9153..5c616920 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -24,16 +24,15 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent} for more - # details. + # {OpenAI::Responses::ResponseFileSearchCallCompletedEvent} for more details. # # Emitted when a file search call is completed (results found). # - # @param item_id [String] The ID of the output item that the file search call is initiated. ... + # @param item_id [String] The ID of the output item that the file search call is initiated. # - # @param output_index [Integer] The index of the output item that the file search call is initiated. ... + # @param output_index [Integer] The index of the output item that the file search call is initiated. # - # @param type [Symbol, :"response.file_search_call.completed"] The type of the event. Always `response.file_search_call.completed`. ... + # @param type [Symbol, :"response.file_search_call.completed"] The type of the event. Always `response.file_search_call.completed`. end end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 74e08290..15ff92de 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -24,16 +24,15 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent} for more - # details. + # {OpenAI::Responses::ResponseFileSearchCallInProgressEvent} for more details. # # Emitted when a file search call is initiated. # - # @param item_id [String] The ID of the output item that the file search call is initiated. ... + # @param item_id [String] The ID of the output item that the file search call is initiated. # - # @param output_index [Integer] The index of the output item that the file search call is initiated. ... + # @param output_index [Integer] The index of the output item that the file search call is initiated. # - # @param type [Symbol, :"response.file_search_call.in_progress"] The type of the event. Always `response.file_search_call.in_progress`. ... + # @param type [Symbol, :"response.file_search_call.in_progress"] The type of the event. Always `response.file_search_call.in_progress`. end end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index 118b323f..58540d2a 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -24,16 +24,15 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.searching") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent} for more - # details. + # {OpenAI::Responses::ResponseFileSearchCallSearchingEvent} for more details. # # Emitted when a file search is currently searching. # - # @param item_id [String] The ID of the output item that the file search call is initiated. ... + # @param item_id [String] The ID of the output item that the file search call is initiated. # - # @param output_index [Integer] The index of the output item that the file search call is searching. ... + # @param output_index [Integer] The index of the output item that the file search call is searching. # - # @param type [Symbol, :"response.file_search_call.searching"] The type of the event. Always `response.file_search_call.searching`. ... + # @param type [Symbol, :"response.file_search_call.searching"] The type of the event. Always `response.file_search_call.searching`. end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 71462ed2..cc1b88ec 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -20,8 +20,8 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # - # @return [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] - required :status, enum: -> { OpenAI::Models::Responses::ResponseFileSearchToolCall::Status } + # @return [Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseFileSearchToolCall::Status } # @!attribute type # The type of the file search tool call. Always `file_search_call`. @@ -32,33 +32,35 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute results # The results of the file search tool call. # - # @return [Array, nil] + # @return [Array, nil] optional :results, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFileSearchToolCall::Result] + }, nil?: true # @!method initialize(id:, queries:, status:, results: nil, type: :file_search_call) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFileSearchToolCall} for more details. + # {OpenAI::Responses::ResponseFileSearchToolCall} for more details. # # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) # for more information. # - # @param id [String] The unique ID of the file search tool call. ... + # @param id [String] The unique ID of the file search tool call. # - # @param queries [Array] The queries used to search for files. ... + # @param queries [Array] The queries used to search for files. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, ... + # @param status [Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, # - # @param results [Array, nil] The results of the file search tool call. ... + # @param results [Array, nil] The results of the file search tool call. # - # @param type [Symbol, :file_search_call] The type of the file search tool call. Always `file_search_call`. ... + # @param type [Symbol, :file_search_call] The type of the file search tool call. Always `file_search_call`. # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # - # @see OpenAI::Models::Responses::ResponseFileSearchToolCall#status + # @see OpenAI::Responses::ResponseFileSearchToolCall#status module Status extend OpenAI::Internal::Type::Enum @@ -82,7 +84,9 @@ class Result < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::Attribute] }, + -> { + OpenAI::Internal::Type::HashOf[union: OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute] + }, nil?: true # @!attribute file_id @@ -111,18 +115,17 @@ class Result < OpenAI::Internal::Type::BaseModel # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFileSearchToolCall::Result} for more - # details. + # {OpenAI::Responses::ResponseFileSearchToolCall::Result} for more details. # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param file_id [String] The unique ID of the file. ... + # @param file_id [String] The unique ID of the file. # - # @param filename [String] The name of the file. ... + # @param filename [String] The name of the file. # - # @param score [Float] The relevance score of the file - a value between 0 and 1. ... + # @param score [Float] The relevance score of the file - a value between 0 and 1. # - # @param text [String] The text that was retrieved from the file. ... + # @param text [String] The text that was retrieved from the file. module Attribute extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 6b73450e..81d0ad54 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -22,20 +22,20 @@ module ResponseFormatTextConfig discriminator :type # Default response format. Used to generate text responses. - variant :text, -> { OpenAI::Models::ResponseFormatText } + variant :text, -> { OpenAI::ResponseFormatText } # JSON Schema response format. Used to generate structured JSON responses. # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - variant :json_schema, -> { OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig } + variant :json_schema, -> { OpenAI::Responses::ResponseFormatTextJSONSchemaConfig } # JSON object response format. An older method of generating JSON responses. # Using `json_schema` is recommended for models that support it. Note that the # model will not generate JSON without a system or user message instructing it # to do so. - variant :json_object, -> { OpenAI::Models::ResponseFormatJSONObject } + variant :json_object, -> { OpenAI::ResponseFormatJSONObject } # @!method self.variants - # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] + # @return [Array(OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject)] end end end diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index d2ac2772..c4e33d24 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -43,20 +43,19 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, schema:, description: nil, strict: nil, type: :json_schema) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig} for more - # details. + # {OpenAI::Responses::ResponseFormatTextJSONSchemaConfig} for more details. # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # - # @param name [String] The name of the response format. Must be a-z, A-Z, 0-9, or contain ... + # @param name [String] The name of the response format. Must be a-z, A-Z, 0-9, or contain # - # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. ... + # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. # - # @param description [String] A description of what the response format is for, used by the model to ... + # @param description [String] A description of what the response format is for, used by the model to # - # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. ... + # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. # # @param type [Symbol, :json_schema] The type of response format being defined. Always `json_schema`. end diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index 57e63c2b..05a59f54 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -30,20 +30,17 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # @!method initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more - # details. + # {OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more details. # # Emitted when there is a partial function-call arguments delta. # - # @param delta [String] The function-call arguments delta that is added. ... + # @param delta [String] The function-call arguments delta that is added. # # @param item_id [String] The ID of the output item that the function-call arguments delta is added to. - # ... # # @param output_index [Integer] The index of the output item that the function-call arguments delta is added to. - # ... # - # @param type [Symbol, :"response.function_call_arguments.delta"] The type of the event. Always `response.function_call_arguments.delta`. ... + # @param type [Symbol, :"response.function_call_arguments.delta"] The type of the event. Always `response.function_call_arguments.delta`. end end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 1aa5d417..e972d9e8 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -38,33 +38,33 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCall::Status } + # @return [Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseFunctionToolCall::Status } # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFunctionToolCall} for more details. + # {OpenAI::Responses::ResponseFunctionToolCall} for more details. # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. # - # @param arguments [String] A JSON string of the arguments to pass to the function. ... + # @param arguments [String] A JSON string of the arguments to pass to the function. # - # @param call_id [String] The unique ID of the function tool call generated by the model. ... + # @param call_id [String] The unique ID of the function tool call generated by the model. # - # @param name [String] The name of the function to run. ... + # @param name [String] The name of the function to run. # - # @param id [String] The unique ID of the function tool call. ... + # @param id [String] The unique ID of the function tool call. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or # - # @param type [Symbol, :function_call] The type of the function tool call. Always `function_call`. ... + # @param type [Symbol, :function_call] The type of the function tool call. Always `function_call`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseFunctionToolCall#status + # @see OpenAI::Responses::ResponseFunctionToolCall#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index e304dda3..e113338f 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -12,13 +12,13 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # @!method initialize(id:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFunctionToolCallItem} for more details. + # {OpenAI::Responses::ResponseFunctionToolCallItem} for more details. # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) # for more information. # - # @param id [String] The unique ID of the function tool call. ... + # @param id [String] The unique ID of the function tool call. end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 8cc6084a..53e8afd3 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -32,28 +32,27 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status } + # @return [Symbol, OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem} for more - # details. + # {OpenAI::Responses::ResponseFunctionToolCallOutputItem} for more details. # - # @param id [String] The unique ID of the function call tool output. ... + # @param id [String] The unique ID of the function call tool output. # - # @param call_id [String] The unique ID of the function tool call generated by the model. ... + # @param call_id [String] The unique ID of the function tool call generated by the model. # - # @param output [String] A JSON string of the output of the function tool call. ... + # @param output [String] A JSON string of the output of the function tool call. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or # - # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. ... + # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem#status + # @see OpenAI::Responses::ResponseFunctionToolCallOutputItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index c08e4e32..f69ad27a 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -13,8 +13,8 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the web search tool call. # - # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] - required :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionWebSearch::Status } + # @return [Symbol, OpenAI::Responses::ResponseFunctionWebSearch::Status] + required :status, enum: -> { OpenAI::Responses::ResponseFunctionWebSearch::Status } # @!attribute type # The type of the web search tool call. Always `web_search_call`. @@ -24,21 +24,21 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, status:, type: :web_search_call) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseFunctionWebSearch} for more details. + # {OpenAI::Responses::ResponseFunctionWebSearch} for more details. # # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for # more information. # - # @param id [String] The unique ID of the web search tool call. ... + # @param id [String] The unique ID of the web search tool call. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. ... + # @param status [Symbol, OpenAI::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. # - # @param type [Symbol, :web_search_call] The type of the web search tool call. Always `web_search_call`. ... + # @param type [Symbol, :web_search_call] The type of the web search tool call. Always `web_search_call`. # The status of the web search tool call. # - # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#status + # @see OpenAI::Responses::ResponseFunctionWebSearch#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index f095d74d..788e9ea6 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -7,8 +7,8 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that is in progress. # - # @return [OpenAI::Models::Responses::Response] - required :response, -> { OpenAI::Models::Responses::Response } + # @return [OpenAI::Responses::Response] + required :response, -> { OpenAI::Responses::Response } # @!attribute type # The type of the event. Always `response.in_progress`. @@ -18,13 +18,13 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, type: :"response.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInProgressEvent} for more details. + # {OpenAI::Responses::ResponseInProgressEvent} for more details. # # Emitted when the response is in progress. # - # @param response [OpenAI::Models::Responses::Response] The response that is in progress. ... + # @param response [OpenAI::Responses::Response] The response that is in progress. # - # @param type [Symbol, :"response.in_progress"] The type of the event. Always `response.in_progress`. ... + # @param type [Symbol, :"response.in_progress"] The type of the event. Always `response.in_progress`. end end end diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index e23ef3a8..d33b32fc 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -7,8 +7,8 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was incomplete. # - # @return [OpenAI::Models::Responses::Response] - required :response, -> { OpenAI::Models::Responses::Response } + # @return [OpenAI::Responses::Response] + required :response, -> { OpenAI::Responses::Response } # @!attribute type # The type of the event. Always `response.incomplete`. @@ -18,13 +18,13 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, type: :"response.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseIncompleteEvent} for more details. + # {OpenAI::Responses::ResponseIncompleteEvent} for more details. # # An event that is emitted when a response finishes as incomplete. # - # @param response [OpenAI::Models::Responses::Response] The response that was incomplete. ... + # @param response [OpenAI::Responses::Response] The response that was incomplete. # - # @param type [Symbol, :"response.incomplete"] The type of the event. Always `response.incomplete`. ... + # @param type [Symbol, :"response.incomplete"] The type of the event. Always `response.incomplete`. end end end diff --git a/lib/openai/models/responses/response_input.rb b/lib/openai/models/responses/response_input.rb index aa4de811..e23a7b8d 100644 --- a/lib/openai/models/responses/response_input.rb +++ b/lib/openai/models/responses/response_input.rb @@ -4,8 +4,7 @@ module OpenAI module Models module Responses # @type [OpenAI::Internal::Type::Converter] - ResponseInput = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputItem }] + ResponseInput = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Responses::ResponseInputItem }] end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index ea23bdcd..d403a4ca 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -13,8 +13,8 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute format_ # The format of the audio data. Currently supported formats are `mp3` and `wav`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] - required :format_, enum: -> { OpenAI::Models::Responses::ResponseInputAudio::Format }, api_name: :format + # @return [Symbol, OpenAI::Responses::ResponseInputAudio::Format] + required :format_, enum: -> { OpenAI::Responses::ResponseInputAudio::Format }, api_name: :format # @!attribute type # The type of the input item. Always `input_audio`. @@ -24,19 +24,19 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, format_:, type: :input_audio) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputAudio} for more details. + # {OpenAI::Responses::ResponseInputAudio} for more details. # # An audio input to the model. # - # @param data [String] Base64-encoded audio data. ... + # @param data [String] Base64-encoded audio data. # - # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and ... + # @param format_ [Symbol, OpenAI::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and # - # @param type [Symbol, :input_audio] The type of the input item. Always `input_audio`. ... + # @param type [Symbol, :input_audio] The type of the input item. Always `input_audio`. # The format of the audio data. Currently supported formats are `mp3` and `wav`. # - # @see OpenAI::Models::Responses::ResponseInputAudio#format_ + # @see OpenAI::Responses::ResponseInputAudio#format_ module Format extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 806d9c26..7e349985 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -10,16 +10,16 @@ module ResponseInputContent discriminator :type # A text input to the model. - variant :input_text, -> { OpenAI::Models::Responses::ResponseInputText } + variant :input_text, -> { OpenAI::Responses::ResponseInputText } # An image input to the model. Learn about [image inputs](https://platform.openai.com/docs/guides/vision). - variant :input_image, -> { OpenAI::Models::Responses::ResponseInputImage } + variant :input_image, -> { OpenAI::Responses::ResponseInputImage } # A file input to the model. - variant :input_file, -> { OpenAI::Models::Responses::ResponseInputFile } + variant :input_file, -> { OpenAI::Responses::ResponseInputFile } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] + # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile)] end end end diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 9837a2e0..3c992986 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -30,11 +30,11 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputFile} for more details. + # {OpenAI::Responses::ResponseInputFile} for more details. # # A file input to the model. # - # @param file_data [String] The content of the file to be sent to the model. ... + # @param file_data [String] The content of the file to be sent to the model. # # @param file_id [String, nil] The ID of the file to be sent to the model. # diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 643a2e3d..242f378e 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -8,8 +8,8 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] - required :detail, enum: -> { OpenAI::Models::Responses::ResponseInputImage::Detail } + # @return [Symbol, OpenAI::Responses::ResponseInputImage::Detail] + required :detail, enum: -> { OpenAI::Responses::ResponseInputImage::Detail } # @!attribute type # The type of the input item. Always `input_image`. @@ -32,25 +32,23 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @!method initialize(detail:, file_id: nil, image_url: nil, type: :input_image) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputImage} for more details. + # {OpenAI::Responses::ResponseInputImage} for more details. # # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or - # ... + # @param detail [Symbol, OpenAI::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or # # @param file_id [String, nil] The ID of the file to be sent to the model. # # @param image_url [String, nil] The URL of the image to be sent to the model. A fully qualified URL or base64 en - # ... # # @param type [Symbol, :input_image] The type of the input item. Always `input_image`. # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # - # @see OpenAI::Models::Responses::ResponseInputImage#detail + # @see OpenAI::Responses::ResponseInputImage#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 95e8dbe5..8ce7e375 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -18,94 +18,96 @@ module ResponseInputItem # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, -> { OpenAI::Models::Responses::EasyInputMessage } + variant :message, -> { OpenAI::Responses::EasyInputMessage } # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. - variant :message, -> { OpenAI::Models::Responses::ResponseInputItem::Message } + variant :message, -> { OpenAI::Responses::ResponseInputItem::Message } # An output message from the model. - variant :message, -> { OpenAI::Models::Responses::ResponseOutputMessage } + variant :message, -> { OpenAI::Responses::ResponseOutputMessage } # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) for more information. - variant :file_search_call, -> { OpenAI::Models::Responses::ResponseFileSearchToolCall } + variant :file_search_call, -> { OpenAI::Responses::ResponseFileSearchToolCall } # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) for more information. - variant :computer_call, -> { OpenAI::Models::Responses::ResponseComputerToolCall } + variant :computer_call, -> { OpenAI::Responses::ResponseComputerToolCall } # The output of a computer tool call. - variant :computer_call_output, -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput } + variant :computer_call_output, -> { OpenAI::Responses::ResponseInputItem::ComputerCallOutput } # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for more information. - variant :web_search_call, -> { OpenAI::Models::Responses::ResponseFunctionWebSearch } + variant :web_search_call, -> { OpenAI::Responses::ResponseFunctionWebSearch } # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) for more information. - variant :function_call, -> { OpenAI::Models::Responses::ResponseFunctionToolCall } + variant :function_call, -> { OpenAI::Responses::ResponseFunctionToolCall } # The output of a function tool call. - variant :function_call_output, -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput } + variant :function_call_output, -> { OpenAI::Responses::ResponseInputItem::FunctionCallOutput } # A description of the chain of thought used by a reasoning model while generating # a response. Be sure to include these items in your `input` to the Responses API # for subsequent turns of a conversation if you are manually # [managing context](https://platform.openai.com/docs/guides/conversation-state). - variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } + variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } # An internal identifier for an item to reference. - variant :item_reference, -> { OpenAI::Models::Responses::ResponseInputItem::ItemReference } + variant :item_reference, -> { OpenAI::Responses::ResponseInputItem::ItemReference } class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of one or many input items to the model, containing different content # types. # - # @return [Array] + # @return [Array] required :content, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] + } # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] - required :role, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Role } + # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Role] + required :role, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Role } # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Status } + # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Status, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Status } # @!attribute type # The type of the message input. Always set to `message`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type, nil] - optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Type } + # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Type, nil] + optional :type, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Type } # @!method initialize(content:, role:, status: nil, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputItem::Message} for more details. + # {OpenAI::Responses::ResponseInputItem::Message} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. # - # @param content [Array] A list of one or many input items to the model, containing different content ... + # @param content [Array] A list of one or many input items to the model, containing different content # - # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. ... + # @param role [Symbol, OpenAI::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. ... + # @param type [Symbol, OpenAI::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. # The role of the message input. One of `user`, `system`, or `developer`. # - # @see OpenAI::Models::Responses::ResponseInputItem::Message#role + # @see OpenAI::Responses::ResponseInputItem::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -120,7 +122,7 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseInputItem::Message#status + # @see OpenAI::Responses::ResponseInputItem::Message#status module Status extend OpenAI::Internal::Type::Enum @@ -134,7 +136,7 @@ module Status # The type of the message input. Always set to `message`. # - # @see OpenAI::Models::Responses::ResponseInputItem::Message#type + # @see OpenAI::Responses::ResponseInputItem::Message#type module Type extend OpenAI::Internal::Type::Enum @@ -155,8 +157,8 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - required :output, -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot } + # @return [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] + required :output, -> { OpenAI::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type # The type of the computer tool call output. Always `computer_call_output`. @@ -174,38 +176,37 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The safety checks reported by the API that have been acknowledged by the # developer. # - # @return [Array, nil] + # @return [Array, nil] optional :acknowledged_safety_checks, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] }, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + }, nil?: true # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] + # @return [Symbol, OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] optional :status, - enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status }, + enum: -> { OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status }, nil?: true # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput} for more - # details. + # {OpenAI::Responses::ResponseInputItem::ComputerCallOutput} for more details. # # The output of a computer tool call. # # @param call_id [String] The ID of the computer tool call that produced the output. # - # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. ... + # @param output [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. # # @param id [String, nil] The ID of the computer tool call output. # - # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop - # ... + # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop # - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple - # ... + # @param status [Symbol, OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple # # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. @@ -241,7 +242,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput#status + # @see OpenAI::Responses::ResponseInputItem::ComputerCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -284,15 +285,14 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] + # @return [Symbol, OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] optional :status, - enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status }, + enum: -> { OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status }, nil?: true # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput} for more - # details. + # {OpenAI::Responses::ResponseInputItem::FunctionCallOutput} for more details. # # The output of a function tool call. # @@ -301,17 +301,15 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # @param output [String] A JSON string of the output of the function tool call. # # @param id [String, nil] The unique ID of the function tool call output. Populated when this item is retu - # ... # - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu - # ... + # @param status [Symbol, OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu # # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput#status + # @see OpenAI::Responses::ResponseInputItem::FunctionCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -334,19 +332,19 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of item to reference. Always `item_reference`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] - optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type }, nil?: true + # @return [Symbol, OpenAI::Responses::ResponseInputItem::ItemReference::Type, nil] + optional :type, enum: -> { OpenAI::Responses::ResponseInputItem::ItemReference::Type }, nil?: true # @!method initialize(id:, type: nil) # An internal identifier for an item to reference. # # @param id [String] The ID of the item to reference. # - # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. + # @param type [Symbol, OpenAI::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. # The type of item to reference. Always `item_reference`. # - # @see OpenAI::Models::Responses::ResponseInputItem::ItemReference#type + # @see OpenAI::Responses::ResponseInputItem::ItemReference#type module Type extend OpenAI::Internal::Type::Enum @@ -358,7 +356,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] + # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_input_message_content_list.rb b/lib/openai/models/responses/response_input_message_content_list.rb index 888f3f1c..517253ab 100644 --- a/lib/openai/models/responses/response_input_message_content_list.rb +++ b/lib/openai/models/responses/response_input_message_content_list.rb @@ -5,7 +5,7 @@ module Models module Responses # @type [OpenAI::Internal::Type::Converter] ResponseInputMessageContentList = - OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Responses::ResponseInputContent }] + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Responses::ResponseInputContent }] end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index f188d81c..6bad6d0f 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -14,46 +14,48 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content # types. # - # @return [Array] + # @return [Array] required :content, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] + } # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] - required :role, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Role } + # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Role] + required :role, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Role } # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Status } + # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Status, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Status } # @!attribute type # The type of the message input. Always set to `message`. # - # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type, nil] - optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Type } + # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Type, nil] + optional :type, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Type } # @!method initialize(id:, content:, role:, status: nil, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseInputMessageItem} for more details. + # {OpenAI::Responses::ResponseInputMessageItem} for more details. # - # @param id [String] The unique ID of the message input. ... + # @param id [String] The unique ID of the message input. # - # @param content [Array] A list of one or many input items to the model, containing different content ... + # @param content [Array] A list of one or many input items to the model, containing different content # - # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. ... + # @param role [Symbol, OpenAI::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. ... + # @param type [Symbol, OpenAI::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. # The role of the message input. One of `user`, `system`, or `developer`. # - # @see OpenAI::Models::Responses::ResponseInputMessageItem#role + # @see OpenAI::Responses::ResponseInputMessageItem#role module Role extend OpenAI::Internal::Type::Enum @@ -68,7 +70,7 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseInputMessageItem#status + # @see OpenAI::Responses::ResponseInputMessageItem#status module Status extend OpenAI::Internal::Type::Enum @@ -82,7 +84,7 @@ module Status # The type of the message input. Always set to `message`. # - # @see OpenAI::Models::Responses::ResponseInputMessageItem#type + # @see OpenAI::Responses::ResponseInputMessageItem#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index 42dcb8be..e0bd4301 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -9,33 +9,33 @@ module ResponseItem discriminator :type - variant :message, -> { OpenAI::Models::Responses::ResponseInputMessageItem } + variant :message, -> { OpenAI::Responses::ResponseInputMessageItem } # An output message from the model. - variant :message, -> { OpenAI::Models::Responses::ResponseOutputMessage } + variant :message, -> { OpenAI::Responses::ResponseOutputMessage } # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) for more information. - variant :file_search_call, -> { OpenAI::Models::Responses::ResponseFileSearchToolCall } + variant :file_search_call, -> { OpenAI::Responses::ResponseFileSearchToolCall } # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) for more information. - variant :computer_call, -> { OpenAI::Models::Responses::ResponseComputerToolCall } + variant :computer_call, -> { OpenAI::Responses::ResponseComputerToolCall } - variant :computer_call_output, -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem } + variant :computer_call_output, -> { OpenAI::Responses::ResponseComputerToolCallOutputItem } # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for more information. - variant :web_search_call, -> { OpenAI::Models::Responses::ResponseFunctionWebSearch } + variant :web_search_call, -> { OpenAI::Responses::ResponseFunctionWebSearch } # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) for more information. - variant :function_call, -> { OpenAI::Models::Responses::ResponseFunctionToolCallItem } + variant :function_call, -> { OpenAI::Responses::ResponseFunctionToolCallItem } - variant :function_call_output, -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem } + variant :function_call_output, -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] + # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem)] end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 06e55c53..6d7d8e30 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -7,8 +7,8 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!attribute data # A list of items used to generate this response. # - # @return [Array] - required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseItem] } + # @return [Array] + required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseItem] } # @!attribute first_id # The ID of the first item in the list. @@ -37,7 +37,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) # A list of Response items. # - # @param data [Array] A list of items used to generate this response. + # @param data [Array] A list of items used to generate this response. # # @param first_id [String] The ID of the first item in the list. # diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index f66f4024..8183cdda 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -24,15 +24,15 @@ class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, transcript:, type: :output_audio) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputAudio} for more details. + # {OpenAI::Responses::ResponseOutputAudio} for more details. # # An audio output from the model. # - # @param data [String] Base64-encoded audio data from the model. ... + # @param data [String] Base64-encoded audio data from the model. # - # @param transcript [String] The transcript of the audio data from the model. ... + # @param transcript [String] The transcript of the audio data from the model. # - # @param type [Symbol, :output_audio] The type of the output audio. Always `output_audio`. ... + # @param type [Symbol, :output_audio] The type of the output audio. Always `output_audio`. end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 91539b62..8e653d3a 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -10,32 +10,32 @@ module ResponseOutputItem discriminator :type # An output message from the model. - variant :message, -> { OpenAI::Models::Responses::ResponseOutputMessage } + variant :message, -> { OpenAI::Responses::ResponseOutputMessage } # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) for more information. - variant :file_search_call, -> { OpenAI::Models::Responses::ResponseFileSearchToolCall } + variant :file_search_call, -> { OpenAI::Responses::ResponseFileSearchToolCall } # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) for more information. - variant :function_call, -> { OpenAI::Models::Responses::ResponseFunctionToolCall } + variant :function_call, -> { OpenAI::Responses::ResponseFunctionToolCall } # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for more information. - variant :web_search_call, -> { OpenAI::Models::Responses::ResponseFunctionWebSearch } + variant :web_search_call, -> { OpenAI::Responses::ResponseFunctionWebSearch } # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) for more information. - variant :computer_call, -> { OpenAI::Models::Responses::ResponseComputerToolCall } + variant :computer_call, -> { OpenAI::Responses::ResponseComputerToolCall } # A description of the chain of thought used by a reasoning model while generating # a response. Be sure to include these items in your `input` to the Responses API # for subsequent turns of a conversation if you are manually # [managing context](https://platform.openai.com/docs/guides/conversation-state). - variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } + variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] + # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 3a42485d..94383e4f 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -7,8 +7,8 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was added. # - # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - required :item, union: -> { OpenAI::Models::Responses::ResponseOutputItem } + # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] + required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index # The index of the output item that was added. @@ -24,15 +24,15 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item:, output_index:, type: :"response.output_item.added") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputItemAddedEvent} for more details. + # {OpenAI::Responses::ResponseOutputItemAddedEvent} for more details. # # Emitted when a new output item is added. # - # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] The output item that was added. ... + # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] The output item that was added. # - # @param output_index [Integer] The index of the output item that was added. ... + # @param output_index [Integer] The index of the output item that was added. # - # @param type [Symbol, :"response.output_item.added"] The type of the event. Always `response.output_item.added`. ... + # @param type [Symbol, :"response.output_item.added"] The type of the event. Always `response.output_item.added`. end end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index d3bfbfc3..54d941ad 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -7,8 +7,8 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was marked done. # - # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - required :item, union: -> { OpenAI::Models::Responses::ResponseOutputItem } + # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] + required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index # The index of the output item that was marked done. @@ -24,15 +24,15 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item:, output_index:, type: :"response.output_item.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputItemDoneEvent} for more details. + # {OpenAI::Responses::ResponseOutputItemDoneEvent} for more details. # # Emitted when an output item is marked done. # - # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] The output item that was marked done. ... + # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] The output item that was marked done. # - # @param output_index [Integer] The index of the output item that was marked done. ... + # @param output_index [Integer] The index of the output item that was marked done. # - # @param type [Symbol, :"response.output_item.done"] The type of the event. Always `response.output_item.done`. ... + # @param type [Symbol, :"response.output_item.done"] The type of the event. Always `response.output_item.done`. end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 2406dc01..2e7eee71 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -13,9 +13,11 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the output message. # - # @return [Array] + # @return [Array] required :content, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputMessage::Content] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputMessage::Content] + } # @!attribute role # The role of the output message. Always `assistant`. @@ -27,8 +29,8 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] - required :status, enum: -> { OpenAI::Models::Responses::ResponseOutputMessage::Status } + # @return [Symbol, OpenAI::Responses::ResponseOutputMessage::Status] + required :status, enum: -> { OpenAI::Responses::ResponseOutputMessage::Status } # @!attribute type # The type of the output message. Always `message`. @@ -38,19 +40,19 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, content:, status:, role: :assistant, type: :message) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputMessage} for more details. + # {OpenAI::Responses::ResponseOutputMessage} for more details. # # An output message from the model. # - # @param id [String] The unique ID of the output message. ... + # @param id [String] The unique ID of the output message. # - # @param content [Array] The content of the output message. ... + # @param content [Array] The content of the output message. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or # - # @param role [Symbol, :assistant] The role of the output message. Always `assistant`. ... + # @param role [Symbol, :assistant] The role of the output message. Always `assistant`. # - # @param type [Symbol, :message] The type of the output message. Always `message`. ... + # @param type [Symbol, :message] The type of the output message. Always `message`. # A text output from the model. module Content @@ -59,19 +61,19 @@ module Content discriminator :type # A text output from the model. - variant :output_text, -> { OpenAI::Models::Responses::ResponseOutputText } + variant :output_text, -> { OpenAI::Responses::ResponseOutputText } # A refusal from the model. - variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } + variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] end # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Models::Responses::ResponseOutputMessage#status + # @see OpenAI::Responses::ResponseOutputMessage#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index aaeda218..6234918b 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -7,9 +7,11 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!attribute annotations # The annotations of the text output. # - # @return [Array] + # @return [Array] required :annotations, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputText::Annotation] } + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputText::Annotation] + } # @!attribute text # The text output from the model. @@ -26,7 +28,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(annotations:, text:, type: :output_text) # A text output from the model. # - # @param annotations [Array] The annotations of the text output. + # @param annotations [Array] The annotations of the text output. # # @param text [String] The text output from the model. # @@ -39,13 +41,13 @@ module Annotation discriminator :type # A citation to a file. - variant :file_citation, -> { OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation } + variant :file_citation, -> { OpenAI::Responses::ResponseOutputText::Annotation::FileCitation } # A citation for a web resource used to generate a model response. - variant :url_citation, -> { OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation } + variant :url_citation, -> { OpenAI::Responses::ResponseOutputText::Annotation::URLCitation } # A path to a file. - variant :file_path, -> { OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath } + variant :file_path, -> { OpenAI::Responses::ResponseOutputText::Annotation::FilePath } class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -142,20 +144,19 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, index:, type: :file_path) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath} for more - # details. + # {OpenAI::Responses::ResponseOutputText::Annotation::FilePath} for more details. # # A path to a file. # - # @param file_id [String] The ID of the file. ... + # @param file_id [String] The ID of the file. # - # @param index [Integer] The index of the file in the list of files. ... + # @param index [Integer] The index of the file in the list of files. # - # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. ... + # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] + # @return [Array(OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 5b029d34..47d20960 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -13,9 +13,9 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @!attribute summary # Reasoning text contents. # - # @return [Array] + # @return [Array] required :summary, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseReasoningItem::Summary] } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Summary] } # @!attribute type # The type of the object. Always `reasoning`. @@ -34,27 +34,27 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] - optional :status, enum: -> { OpenAI::Models::Responses::ResponseReasoningItem::Status } + # @return [Symbol, OpenAI::Responses::ResponseReasoningItem::Status, nil] + optional :status, enum: -> { OpenAI::Responses::ResponseReasoningItem::Status } # @!method initialize(id:, summary:, encrypted_content: nil, status: nil, type: :reasoning) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningItem} for more details. + # {OpenAI::Responses::ResponseReasoningItem} for more details. # # A description of the chain of thought used by a reasoning model while generating # a response. Be sure to include these items in your `input` to the Responses API # for subsequent turns of a conversation if you are manually # [managing context](https://platform.openai.com/docs/guides/conversation-state). # - # @param id [String] The unique identifier of the reasoning content. ... + # @param id [String] The unique identifier of the reasoning content. # - # @param summary [Array] Reasoning text contents. ... + # @param summary [Array] Reasoning text contents. # - # @param encrypted_content [String, nil] The encrypted content of the reasoning item - populated when a response is ... + # @param encrypted_content [String, nil] The encrypted content of the reasoning item - populated when a response is # - # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or ... + # @param status [Symbol, OpenAI::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or # - # @param type [Symbol, :reasoning] The type of the object. Always `reasoning`. ... + # @param type [Symbol, :reasoning] The type of the object. Always `reasoning`. class Summary < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -71,17 +71,17 @@ class Summary < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :summary_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningItem::Summary} for more details. + # {OpenAI::Responses::ResponseReasoningItem::Summary} for more details. # - # @param text [String] A short summary of the reasoning used by the model when generating ... + # @param text [String] A short summary of the reasoning used by the model when generating # - # @param type [Symbol, :summary_text] The type of the object. Always `summary_text`. ... + # @param type [Symbol, :summary_text] The type of the object. Always `summary_text`. end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Models::Responses::ResponseReasoningItem#status + # @see OpenAI::Responses::ResponseReasoningItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb index 74d66131..4701beaa 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb @@ -19,8 +19,8 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The summary part that was added. # - # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] - required :part, -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part } + # @return [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] + required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part } # @!attribute summary_index # The index of the summary part within the reasoning summary. @@ -36,22 +36,21 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.added") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent} for more - # details. + # {OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent} for more details. # # Emitted when a new reasoning summary part is added. # - # @param item_id [String] The ID of the item this summary part is associated with. ... + # @param item_id [String] The ID of the item this summary part is associated with. # - # @param output_index [Integer] The index of the output item this summary part is associated with. ... + # @param output_index [Integer] The index of the output item this summary part is associated with. # - # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. ... + # @param part [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. # - # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # @param summary_index [Integer] The index of the summary part within the reasoning summary. # - # @param type [Symbol, :"response.reasoning_summary_part.added"] The type of the event. Always `response.reasoning_summary_part.added`. ... + # @param type [Symbol, :"response.reasoning_summary_part.added"] The type of the event. Always `response.reasoning_summary_part.added`. - # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent#part + # @see OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent#part class Part < OpenAI::Internal::Type::BaseModel # @!attribute text # The text of the summary part. diff --git a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb index 10926c74..8b67b1ea 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb @@ -19,8 +19,8 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The completed summary part. # - # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] - required :part, -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part } + # @return [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] + required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part } # @!attribute summary_index # The index of the summary part within the reasoning summary. @@ -36,22 +36,21 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent} for more - # details. + # {OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent} for more details. # # Emitted when a reasoning summary part is completed. # - # @param item_id [String] The ID of the item this summary part is associated with. ... + # @param item_id [String] The ID of the item this summary part is associated with. # - # @param output_index [Integer] The index of the output item this summary part is associated with. ... + # @param output_index [Integer] The index of the output item this summary part is associated with. # - # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. ... + # @param part [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. # - # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # @param summary_index [Integer] The index of the summary part within the reasoning summary. # - # @param type [Symbol, :"response.reasoning_summary_part.done"] The type of the event. Always `response.reasoning_summary_part.done`. ... + # @param type [Symbol, :"response.reasoning_summary_part.done"] The type of the event. Always `response.reasoning_summary_part.done`. - # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent#part + # @see OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent#part class Part < OpenAI::Internal::Type::BaseModel # @!attribute text # The text of the summary part. diff --git a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb index b56b3b80..2aaefaf3 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb @@ -36,20 +36,19 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary_text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent} for more - # details. + # {OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent} for more details. # # Emitted when a delta is added to a reasoning summary text. # - # @param delta [String] The text delta that was added to the summary. ... + # @param delta [String] The text delta that was added to the summary. # - # @param item_id [String] The ID of the item this summary text delta is associated with. ... + # @param item_id [String] The ID of the item this summary text delta is associated with. # - # @param output_index [Integer] The index of the output item this summary text delta is associated with. ... + # @param output_index [Integer] The index of the output item this summary text delta is associated with. # - # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # @param summary_index [Integer] The index of the summary part within the reasoning summary. # - # @param type [Symbol, :"response.reasoning_summary_text.delta"] The type of the event. Always `response.reasoning_summary_text.delta`. ... + # @param type [Symbol, :"response.reasoning_summary_text.delta"] The type of the event. Always `response.reasoning_summary_text.delta`. end end end diff --git a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb index cd78cdba..5359b84a 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb @@ -36,20 +36,19 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary_text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent} for more - # details. + # {OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent} for more details. # # Emitted when a reasoning summary text is completed. # - # @param item_id [String] The ID of the item this summary text is associated with. ... + # @param item_id [String] The ID of the item this summary text is associated with. # - # @param output_index [Integer] The index of the output item this summary text is associated with. ... + # @param output_index [Integer] The index of the output item this summary text is associated with. # - # @param summary_index [Integer] The index of the summary part within the reasoning summary. ... + # @param summary_index [Integer] The index of the summary part within the reasoning summary. # - # @param text [String] The full text of the completed reasoning summary. ... + # @param text [String] The full text of the completed reasoning summary. # - # @param type [Symbol, :"response.reasoning_summary_text.done"] The type of the event. Always `response.reasoning_summary_text.done`. ... + # @param type [Symbol, :"response.reasoning_summary_text.done"] The type of the event. Always `response.reasoning_summary_text.done`. end end end diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index fb8d6770..aef8a0d5 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -36,19 +36,19 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseRefusalDeltaEvent} for more details. + # {OpenAI::Responses::ResponseRefusalDeltaEvent} for more details. # # Emitted when there is a partial refusal text. # - # @param content_index [Integer] The index of the content part that the refusal text is added to. ... + # @param content_index [Integer] The index of the content part that the refusal text is added to. # - # @param delta [String] The refusal text that is added. ... + # @param delta [String] The refusal text that is added. # - # @param item_id [String] The ID of the output item that the refusal text is added to. ... + # @param item_id [String] The ID of the output item that the refusal text is added to. # - # @param output_index [Integer] The index of the output item that the refusal text is added to. ... + # @param output_index [Integer] The index of the output item that the refusal text is added to. # - # @param type [Symbol, :"response.refusal.delta"] The type of the event. Always `response.refusal.delta`. ... + # @param type [Symbol, :"response.refusal.delta"] The type of the event. Always `response.refusal.delta`. end end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index a361e8b3..912b1796 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -36,19 +36,19 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseRefusalDoneEvent} for more details. + # {OpenAI::Responses::ResponseRefusalDoneEvent} for more details. # # Emitted when refusal text is finalized. # - # @param content_index [Integer] The index of the content part that the refusal text is finalized. ... + # @param content_index [Integer] The index of the content part that the refusal text is finalized. # - # @param item_id [String] The ID of the output item that the refusal text is finalized. ... + # @param item_id [String] The ID of the output item that the refusal text is finalized. # - # @param output_index [Integer] The index of the output item that the refusal text is finalized. ... + # @param output_index [Integer] The index of the output item that the refusal text is finalized. # - # @param refusal [String] The refusal text that is finalized. ... + # @param refusal [String] The refusal text that is finalized. # - # @param type [Symbol, :"response.refusal.done"] The type of the event. Always `response.refusal.done`. ... + # @param type [Symbol, :"response.refusal.done"] The type of the event. Always `response.refusal.done`. end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index e4b242f8..07638190 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -12,15 +12,14 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # - # @return [Array, nil] - optional :include, - -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } + # @return [Array, nil] + optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } # @!method initialize(include: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # - # @param include [Array] Additional fields to include in the response. See the `include` ... + # @param include [Array] Additional fields to include in the response. See the `include` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 95156d56..48456245 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -10,135 +10,136 @@ module ResponseStreamEvent discriminator :type # Emitted when there is a partial audio response. - variant :"response.audio.delta", -> { OpenAI::Models::Responses::ResponseAudioDeltaEvent } + variant :"response.audio.delta", -> { OpenAI::Responses::ResponseAudioDeltaEvent } # Emitted when the audio response is complete. - variant :"response.audio.done", -> { OpenAI::Models::Responses::ResponseAudioDoneEvent } + variant :"response.audio.done", -> { OpenAI::Responses::ResponseAudioDoneEvent } # Emitted when there is a partial transcript of audio. variant :"response.audio.transcript.delta", - -> { OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent } + -> { + OpenAI::Responses::ResponseAudioTranscriptDeltaEvent + } # Emitted when the full audio transcript is completed. - variant :"response.audio.transcript.done", - -> { OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent } + variant :"response.audio.transcript.done", -> { OpenAI::Responses::ResponseAudioTranscriptDoneEvent } # Emitted when a partial code snippet is added by the code interpreter. variant :"response.code_interpreter_call.code.delta", - -> { OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent } + -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent } # Emitted when code snippet output is finalized by the code interpreter. variant :"response.code_interpreter_call.code.done", - -> { OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent } + -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent } # Emitted when the code interpreter call is completed. variant :"response.code_interpreter_call.completed", - -> { OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent } + -> { OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent } # Emitted when a code interpreter call is in progress. variant :"response.code_interpreter_call.in_progress", - -> { OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent } + -> { OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent } # Emitted when the code interpreter is actively interpreting the code snippet. variant :"response.code_interpreter_call.interpreting", - -> { OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent } + -> { OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent } # Emitted when the model response is complete. - variant :"response.completed", -> { OpenAI::Models::Responses::ResponseCompletedEvent } + variant :"response.completed", -> { OpenAI::Responses::ResponseCompletedEvent } # Emitted when a new content part is added. - variant :"response.content_part.added", -> { OpenAI::Models::Responses::ResponseContentPartAddedEvent } + variant :"response.content_part.added", -> { OpenAI::Responses::ResponseContentPartAddedEvent } # Emitted when a content part is done. - variant :"response.content_part.done", -> { OpenAI::Models::Responses::ResponseContentPartDoneEvent } + variant :"response.content_part.done", -> { OpenAI::Responses::ResponseContentPartDoneEvent } # An event that is emitted when a response is created. - variant :"response.created", -> { OpenAI::Models::Responses::ResponseCreatedEvent } + variant :"response.created", -> { OpenAI::Responses::ResponseCreatedEvent } # Emitted when an error occurs. - variant :error, -> { OpenAI::Models::Responses::ResponseErrorEvent } + variant :error, -> { OpenAI::Responses::ResponseErrorEvent } # Emitted when a file search call is completed (results found). variant :"response.file_search_call.completed", - -> { OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent } + -> { OpenAI::Responses::ResponseFileSearchCallCompletedEvent } # Emitted when a file search call is initiated. variant :"response.file_search_call.in_progress", - -> { OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent } + -> { OpenAI::Responses::ResponseFileSearchCallInProgressEvent } # Emitted when a file search is currently searching. variant :"response.file_search_call.searching", - -> { OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent } + -> { OpenAI::Responses::ResponseFileSearchCallSearchingEvent } # Emitted when there is a partial function-call arguments delta. variant :"response.function_call_arguments.delta", - -> { OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent } + -> { OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent } # Emitted when function-call arguments are finalized. variant :"response.function_call_arguments.done", - -> { OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent } + -> { OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent } # Emitted when the response is in progress. - variant :"response.in_progress", -> { OpenAI::Models::Responses::ResponseInProgressEvent } + variant :"response.in_progress", -> { OpenAI::Responses::ResponseInProgressEvent } # An event that is emitted when a response fails. - variant :"response.failed", -> { OpenAI::Models::Responses::ResponseFailedEvent } + variant :"response.failed", -> { OpenAI::Responses::ResponseFailedEvent } # An event that is emitted when a response finishes as incomplete. - variant :"response.incomplete", -> { OpenAI::Models::Responses::ResponseIncompleteEvent } + variant :"response.incomplete", -> { OpenAI::Responses::ResponseIncompleteEvent } # Emitted when a new output item is added. - variant :"response.output_item.added", -> { OpenAI::Models::Responses::ResponseOutputItemAddedEvent } + variant :"response.output_item.added", -> { OpenAI::Responses::ResponseOutputItemAddedEvent } # Emitted when an output item is marked done. - variant :"response.output_item.done", -> { OpenAI::Models::Responses::ResponseOutputItemDoneEvent } + variant :"response.output_item.done", -> { OpenAI::Responses::ResponseOutputItemDoneEvent } # Emitted when a new reasoning summary part is added. variant :"response.reasoning_summary_part.added", - -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent } + -> { OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent } # Emitted when a reasoning summary part is completed. variant :"response.reasoning_summary_part.done", - -> { OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent } + -> { OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent } # Emitted when a delta is added to a reasoning summary text. variant :"response.reasoning_summary_text.delta", - -> { OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent } + -> { OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent } # Emitted when a reasoning summary text is completed. variant :"response.reasoning_summary_text.done", - -> { OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent } + -> { OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent } # Emitted when there is a partial refusal text. - variant :"response.refusal.delta", -> { OpenAI::Models::Responses::ResponseRefusalDeltaEvent } + variant :"response.refusal.delta", -> { OpenAI::Responses::ResponseRefusalDeltaEvent } # Emitted when refusal text is finalized. - variant :"response.refusal.done", -> { OpenAI::Models::Responses::ResponseRefusalDoneEvent } + variant :"response.refusal.done", -> { OpenAI::Responses::ResponseRefusalDoneEvent } # Emitted when a text annotation is added. variant :"response.output_text.annotation.added", - -> { OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent } + -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent } # Emitted when there is an additional text delta. - variant :"response.output_text.delta", -> { OpenAI::Models::Responses::ResponseTextDeltaEvent } + variant :"response.output_text.delta", -> { OpenAI::Responses::ResponseTextDeltaEvent } # Emitted when text content is finalized. - variant :"response.output_text.done", -> { OpenAI::Models::Responses::ResponseTextDoneEvent } + variant :"response.output_text.done", -> { OpenAI::Responses::ResponseTextDoneEvent } # Emitted when a web search call is completed. variant :"response.web_search_call.completed", - -> { OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent } + -> { OpenAI::Responses::ResponseWebSearchCallCompletedEvent } # Emitted when a web search call is initiated. variant :"response.web_search_call.in_progress", - -> { OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent } + -> { OpenAI::Responses::ResponseWebSearchCallInProgressEvent } # Emitted when a web search call is executing. variant :"response.web_search_call.searching", - -> { OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent } + -> { OpenAI::Responses::ResponseWebSearchCallSearchingEvent } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] + # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent)] end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index f0be62c1..8ffc14a6 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -7,9 +7,8 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute annotation # A citation to a file. # - # @return [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - required :annotation, - union: -> { OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation } + # @return [OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + required :annotation, union: -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation } # @!attribute annotation_index # The index of the annotation that was added. @@ -43,25 +42,25 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text.annotation.added") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent} for more details. + # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent} for more details. # # Emitted when a text annotation is added. # - # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. + # @param annotation [OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. # - # @param annotation_index [Integer] The index of the annotation that was added. ... + # @param annotation_index [Integer] The index of the annotation that was added. # - # @param content_index [Integer] The index of the content part that the text annotation was added to. ... + # @param content_index [Integer] The index of the content part that the text annotation was added to. # - # @param item_id [String] The ID of the output item that the text annotation was added to. ... + # @param item_id [String] The ID of the output item that the text annotation was added to. # - # @param output_index [Integer] The index of the output item that the text annotation was added to. ... + # @param output_index [Integer] The index of the output item that the text annotation was added to. # - # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always `response.output_text.annotation.added`. ... + # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always `response.output_text.annotation.added`. # A citation to a file. # - # @see OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent#annotation + # @see OpenAI::Responses::ResponseTextAnnotationDeltaEvent#annotation module Annotation extend OpenAI::Internal::Type::Union @@ -69,15 +68,16 @@ module Annotation # A citation to a file. variant :file_citation, - -> { OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation } + -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation } # A citation for a web resource used to generate a model response. variant :url_citation, - -> { OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation } + -> { + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation + } # A path to a file. - variant :file_path, - -> { OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath } + variant :file_path, -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath } class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -174,20 +174,20 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, index:, type: :file_path) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} - # for more details. + # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} for + # more details. # # A path to a file. # - # @param file_id [String] The ID of the file. ... + # @param file_id [String] The ID of the file. # - # @param index [Integer] The index of the file in the list of files. ... + # @param index [Integer] The index of the file in the list of files. # - # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. ... + # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] + # @return [Array(OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index a67c6449..2d22f544 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -19,12 +19,12 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. # - # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] - optional :format_, union: -> { OpenAI::Models::Responses::ResponseFormatTextConfig }, api_name: :format + # @return [OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject, nil] + optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format # @!method initialize(format_: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextConfig} for more details. + # {OpenAI::Responses::ResponseTextConfig} for more details. # # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -32,7 +32,7 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # @param format_ [OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index fce5e269..48c639ef 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -36,19 +36,19 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextDeltaEvent} for more details. + # {OpenAI::Responses::ResponseTextDeltaEvent} for more details. # # Emitted when there is an additional text delta. # - # @param content_index [Integer] The index of the content part that the text delta was added to. ... + # @param content_index [Integer] The index of the content part that the text delta was added to. # - # @param delta [String] The text delta that was added. ... + # @param delta [String] The text delta that was added. # - # @param item_id [String] The ID of the output item that the text delta was added to. ... + # @param item_id [String] The ID of the output item that the text delta was added to. # - # @param output_index [Integer] The index of the output item that the text delta was added to. ... + # @param output_index [Integer] The index of the output item that the text delta was added to. # - # @param type [Symbol, :"response.output_text.delta"] The type of the event. Always `response.output_text.delta`. ... + # @param type [Symbol, :"response.output_text.delta"] The type of the event. Always `response.output_text.delta`. end end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 15bd9abc..7b8921d4 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -36,19 +36,19 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextDoneEvent} for more details. + # {OpenAI::Responses::ResponseTextDoneEvent} for more details. # # Emitted when text content is finalized. # - # @param content_index [Integer] The index of the content part that the text content is finalized. ... + # @param content_index [Integer] The index of the content part that the text content is finalized. # - # @param item_id [String] The ID of the output item that the text content is finalized. ... + # @param item_id [String] The ID of the output item that the text content is finalized. # - # @param output_index [Integer] The index of the output item that the text content is finalized. ... + # @param output_index [Integer] The index of the output item that the text content is finalized. # - # @param text [String] The text content that is finalized. ... + # @param text [String] The text content that is finalized. # - # @param type [Symbol, :"response.output_text.done"] The type of the event. Always `response.output_text.done`. ... + # @param type [Symbol, :"response.output_text.done"] The type of the event. Always `response.output_text.done`. end end end diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 93b1917a..09831893 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -13,8 +13,8 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens_details # A detailed breakdown of the input tokens. # - # @return [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] - required :input_tokens_details, -> { OpenAI::Models::Responses::ResponseUsage::InputTokensDetails } + # @return [OpenAI::Responses::ResponseUsage::InputTokensDetails] + required :input_tokens_details, -> { OpenAI::Responses::ResponseUsage::InputTokensDetails } # @!attribute output_tokens # The number of output tokens. @@ -25,8 +25,8 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute output_tokens_details # A detailed breakdown of the output tokens. # - # @return [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] - required :output_tokens_details, -> { OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails } + # @return [OpenAI::Responses::ResponseUsage::OutputTokensDetails] + required :output_tokens_details, -> { OpenAI::Responses::ResponseUsage::OutputTokensDetails } # @!attribute total_tokens # The total number of tokens used. @@ -40,15 +40,15 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # # @param input_tokens [Integer] The number of input tokens. # - # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. + # @param input_tokens_details [OpenAI::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. # # @param output_tokens [Integer] The number of output tokens. # - # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. + # @param output_tokens_details [OpenAI::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. # # @param total_tokens [Integer] The total number of tokens used. - # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details + # @see OpenAI::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute cached_tokens # The number of tokens that were retrieved from the cache. @@ -59,14 +59,14 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(cached_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseUsage::InputTokensDetails} for more details. + # {OpenAI::Responses::ResponseUsage::InputTokensDetails} for more details. # # A detailed breakdown of the input tokens. # - # @param cached_tokens [Integer] The number of tokens that were retrieved from the cache. ... + # @param cached_tokens [Integer] The number of tokens that were retrieved from the cache. end - # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details + # @see OpenAI::Responses::ResponseUsage#output_tokens_details class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_tokens # The number of reasoning tokens. diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 5a94a581..4a8461ac 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -24,16 +24,15 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent} for more - # details. + # {OpenAI::Responses::ResponseWebSearchCallCompletedEvent} for more details. # # Emitted when a web search call is completed. # - # @param item_id [String] Unique ID for the output item associated with the web search call. ... + # @param item_id [String] Unique ID for the output item associated with the web search call. # - # @param output_index [Integer] The index of the output item that the web search call is associated with. ... + # @param output_index [Integer] The index of the output item that the web search call is associated with. # - # @param type [Symbol, :"response.web_search_call.completed"] The type of the event. Always `response.web_search_call.completed`. ... + # @param type [Symbol, :"response.web_search_call.completed"] The type of the event. Always `response.web_search_call.completed`. end end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index 2c2c18c0..a930db0f 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -24,16 +24,15 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent} for more - # details. + # {OpenAI::Responses::ResponseWebSearchCallInProgressEvent} for more details. # # Emitted when a web search call is initiated. # - # @param item_id [String] Unique ID for the output item associated with the web search call. ... + # @param item_id [String] Unique ID for the output item associated with the web search call. # - # @param output_index [Integer] The index of the output item that the web search call is associated with. ... + # @param output_index [Integer] The index of the output item that the web search call is associated with. # - # @param type [Symbol, :"response.web_search_call.in_progress"] The type of the event. Always `response.web_search_call.in_progress`. ... + # @param type [Symbol, :"response.web_search_call.in_progress"] The type of the event. Always `response.web_search_call.in_progress`. end end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index c23044cb..31a1e8c1 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -24,16 +24,15 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent} for more - # details. + # {OpenAI::Responses::ResponseWebSearchCallSearchingEvent} for more details. # # Emitted when a web search call is executing. # - # @param item_id [String] Unique ID for the output item associated with the web search call. ... + # @param item_id [String] Unique ID for the output item associated with the web search call. # - # @param output_index [Integer] The index of the output item that the web search call is associated with. ... + # @param output_index [Integer] The index of the output item that the web search call is associated with. # - # @param type [Symbol, :"response.web_search_call.searching"] The type of the event. Always `response.web_search_call.searching`. ... + # @param type [Symbol, :"response.web_search_call.searching"] The type of the event. Always `response.web_search_call.searching`. end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 5c45cef7..97939459 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -10,19 +10,19 @@ module Tool discriminator :type # A tool that searches for relevant content from uploaded files. Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - variant :file_search, -> { OpenAI::Models::Responses::FileSearchTool } + variant :file_search, -> { OpenAI::Responses::FileSearchTool } # Defines a function in your own code the model can choose to call. Learn more about [function calling](https://platform.openai.com/docs/guides/function-calling). - variant :function, -> { OpenAI::Models::Responses::FunctionTool } + variant :function, -> { OpenAI::Responses::FunctionTool } # A tool that controls a virtual computer. Learn more about the [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). - variant :computer_use_preview, -> { OpenAI::Models::Responses::ComputerTool } + variant :computer_use_preview, -> { OpenAI::Responses::ComputerTool } # This tool searches the web for relevant results to use in a response. Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). - variant -> { OpenAI::Models::Responses::WebSearchTool } + variant -> { OpenAI::Responses::WebSearchTool } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] + # @return [Array(OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 6ecec732..504daa0a 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -14,17 +14,17 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `web_search_preview` # - `computer_use_preview` # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] - required :type, enum: -> { OpenAI::Models::Responses::ToolChoiceTypes::Type } + # @return [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] + required :type, enum: -> { OpenAI::Responses::ToolChoiceTypes::Type } # @!method initialize(type:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ToolChoiceTypes} for more details. + # {OpenAI::Responses::ToolChoiceTypes} for more details. # # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). # - # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about ... + # @param type [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -35,7 +35,7 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `web_search_preview` # - `computer_use_preview` # - # @see OpenAI::Models::Responses::ToolChoiceTypes#type + # @see OpenAI::Responses::ToolChoiceTypes#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 04073cbc..37dd39fe 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -8,42 +8,40 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. # - # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] - required :type, enum: -> { OpenAI::Models::Responses::WebSearchTool::Type } + # @return [Symbol, OpenAI::Responses::WebSearchTool::Type] + required :type, enum: -> { OpenAI::Responses::WebSearchTool::Type } # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize, nil] - optional :search_context_size, enum: -> { OpenAI::Models::Responses::WebSearchTool::SearchContextSize } + # @return [Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize, nil] + optional :search_context_size, enum: -> { OpenAI::Responses::WebSearchTool::SearchContextSize } # @!attribute user_location # The user's location. # - # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] - optional :user_location, -> { OpenAI::Models::Responses::WebSearchTool::UserLocation }, nil?: true + # @return [OpenAI::Responses::WebSearchTool::UserLocation, nil] + optional :user_location, -> { OpenAI::Responses::WebSearchTool::UserLocation }, nil?: true # @!method initialize(type:, search_context_size: nil, user_location: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::WebSearchTool} for more details. + # {OpenAI::Responses::WebSearchTool} for more details. # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). # - # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev - # ... + # @param type [Symbol, OpenAI::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev # - # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search - # ... + # @param search_context_size [Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search # - # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] The user's location. + # @param user_location [OpenAI::Responses::WebSearchTool::UserLocation, nil] The user's location. # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. # - # @see OpenAI::Models::Responses::WebSearchTool#type + # @see OpenAI::Responses::WebSearchTool#type module Type extend OpenAI::Internal::Type::Enum @@ -57,7 +55,7 @@ module Type # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @see OpenAI::Models::Responses::WebSearchTool#search_context_size + # @see OpenAI::Responses::WebSearchTool#search_context_size module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -69,7 +67,7 @@ module SearchContextSize # @return [Array] end - # @see OpenAI::Models::Responses::WebSearchTool#user_location + # @see OpenAI::Responses::WebSearchTool#user_location class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of location approximation. Always `approximate`. @@ -105,19 +103,17 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::WebSearchTool::UserLocation} for more details. + # {OpenAI::Responses::WebSearchTool::UserLocation} for more details. # # The user's location. # # @param city [String, nil] Free text input for the city of the user, e.g. `San Francisco`. # # @param country [String, nil] The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of t - # ... # # @param region [String, nil] Free text input for the region of the user, e.g. `California`. # # @param timezone [String, nil] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the user - # ... # # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. end diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 5984103a..5634269f 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -7,9 +7,9 @@ module ResponsesModel variant String - variant enum: -> { OpenAI::Models::ChatModel } + variant enum: -> { OpenAI::ChatModel } - variant enum: -> { OpenAI::Models::ResponsesModel::ResponsesOnlyModel } + variant enum: -> { OpenAI::ResponsesModel::ResponsesOnlyModel } module ResponsesOnlyModel extend OpenAI::Internal::Type::Enum @@ -24,7 +24,7 @@ module ResponsesOnlyModel end # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] + # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::ResponsesModel::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 46682393..30388ebb 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -20,13 +20,11 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Models::StaticFileChunkingStrategy} for more details. + # {OpenAI::StaticFileChunkingStrategy} for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. - # ... # # @param max_chunk_size_tokens [Integer] The maximum number of tokens in each chunk. The default value is `800`. The mini - # ... end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index 68d0d88e..2169aa9f 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -5,8 +5,8 @@ module Models class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Models::StaticFileChunkingStrategy] - required :static, -> { OpenAI::Models::StaticFileChunkingStrategy } + # @return [OpenAI::StaticFileChunkingStrategy] + required :static, -> { OpenAI::StaticFileChunkingStrategy } # @!attribute type # Always `static`. @@ -15,7 +15,7 @@ class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Models::StaticFileChunkingStrategy] + # @param static [OpenAI::StaticFileChunkingStrategy] # # @param type [Symbol, :static] Always `static`. end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index f37c6aa6..304bacb5 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -5,8 +5,8 @@ module Models class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Models::StaticFileChunkingStrategy] - required :static, -> { OpenAI::Models::StaticFileChunkingStrategy } + # @return [OpenAI::StaticFileChunkingStrategy] + required :static, -> { OpenAI::StaticFileChunkingStrategy } # @!attribute type # Always `static`. @@ -17,7 +17,7 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # Customize your own chunking strategy by setting chunk size and chunk overlap. # - # @param static [OpenAI::Models::StaticFileChunkingStrategy] + # @param static [OpenAI::StaticFileChunkingStrategy] # # @param type [Symbol, :static] Always `static`. end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 39d5043e..8fe51192 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -51,18 +51,18 @@ class Upload < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the Upload. # - # @return [Symbol, OpenAI::Models::Upload::Status] - required :status, enum: -> { OpenAI::Models::Upload::Status } + # @return [Symbol, OpenAI::Upload::Status] + required :status, enum: -> { OpenAI::Upload::Status } # @!attribute file # The `File` object represents a document that has been uploaded to OpenAI. # - # @return [OpenAI::Models::FileObject, nil] - optional :file, -> { OpenAI::Models::FileObject }, nil?: true + # @return [OpenAI::FileObject, nil] + optional :file, -> { OpenAI::FileObject }, nil?: true # @!method initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) - # Some parameter documentations has been truncated, see {OpenAI::Models::Upload} - # for more details. + # Some parameter documentations has been truncated, see {OpenAI::Upload} for more + # details. # # The Upload object can accept byte chunks in the form of Parts. # @@ -77,17 +77,16 @@ class Upload < OpenAI::Internal::Type::BaseModel # @param filename [String] The name of the file to be uploaded. # # @param purpose [String] The intended purpose of the file. [Please refer here](https://platform.openai.co - # ... # - # @param status [Symbol, OpenAI::Models::Upload::Status] The status of the Upload. + # @param status [Symbol, OpenAI::Upload::Status] The status of the Upload. # - # @param file [OpenAI::Models::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. + # @param file [OpenAI::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. # # @param object [Symbol, :upload] The object type, which is always "upload". # The status of the Upload. # - # @see OpenAI::Models::Upload#status + # @see OpenAI::Upload#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 3c898f46..2d67440d 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -24,10 +24,9 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::UploadCompleteParams} for more details. # - # @param part_ids [Array] The ordered list of Part IDs. ... + # @param part_ids [Array] The ordered list of Part IDs. # # @param md5 [String] The optional md5 checksum for the file contents to verify if the bytes uploaded - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index cafa9448..13caaed5 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -34,20 +34,20 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). # - # @return [Symbol, OpenAI::Models::FilePurpose] - required :purpose, enum: -> { OpenAI::Models::FilePurpose } + # @return [Symbol, OpenAI::FilePurpose] + required :purpose, enum: -> { OpenAI::FilePurpose } # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::UploadCreateParams} for more details. # - # @param bytes [Integer] The number of bytes in the file you are uploading. ... + # @param bytes [Integer] The number of bytes in the file you are uploading. # - # @param filename [String] The name of the file to upload. ... + # @param filename [String] The name of the file to upload. # - # @param mime_type [String] The MIME type of the file. ... + # @param mime_type [String] The MIME type of the file. # - # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. ... + # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index df8520b6..1ed39993 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -18,7 +18,7 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Uploads::PartCreateParams} for more details. # - # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. ... + # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 285ac28c..48c1a984 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -18,8 +18,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute file_counts # - # @return [OpenAI::Models::VectorStore::FileCounts] - required :file_counts, -> { OpenAI::Models::VectorStore::FileCounts } + # @return [OpenAI::VectorStore::FileCounts] + required :file_counts, -> { OpenAI::VectorStore::FileCounts } # @!attribute last_active_at # The Unix timestamp (in seconds) for when the vector store was last active. @@ -55,8 +55,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # `completed`. A status of `completed` indicates that the vector store is ready # for use. # - # @return [Symbol, OpenAI::Models::VectorStore::Status] - required :status, enum: -> { OpenAI::Models::VectorStore::Status } + # @return [Symbol, OpenAI::VectorStore::Status] + required :status, enum: -> { OpenAI::VectorStore::Status } # @!attribute usage_bytes # The total number of bytes used by the files in the vector store. @@ -67,8 +67,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::Models::VectorStore::ExpiresAfter, nil] - optional :expires_after, -> { OpenAI::Models::VectorStore::ExpiresAfter } + # @return [OpenAI::VectorStore::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStore::ExpiresAfter } # @!attribute expires_at # The Unix timestamp (in seconds) for when the vector store will expire. @@ -77,8 +77,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :expires_at, Integer, nil?: true # @!method initialize(id:, created_at:, file_counts:, last_active_at:, metadata:, name:, status:, usage_bytes:, expires_after: nil, expires_at: nil, object: :vector_store) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::VectorStore} for more details. + # Some parameter documentations has been truncated, see {OpenAI::VectorStore} for + # more details. # # A vector store is a collection of processed files can be used by the # `file_search` tool. @@ -87,26 +87,25 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store was created. # - # @param file_counts [OpenAI::Models::VectorStore::FileCounts] + # @param file_counts [OpenAI::VectorStore::FileCounts] # # @param last_active_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store was last active. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the vector store. # - # @param status [Symbol, OpenAI::Models::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or - # ... + # @param status [Symbol, OpenAI::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or # # @param usage_bytes [Integer] The total number of bytes used by the files in the vector store. # - # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStore::ExpiresAfter] The expiration policy for a vector store. # # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store will expire. # # @param object [Symbol, :vector_store] The object type, which is always `vector_store`. - # @see OpenAI::Models::VectorStore#file_counts + # @see OpenAI::VectorStore#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that were cancelled. @@ -154,7 +153,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # `completed`. A status of `completed` indicates that the vector store is ready # for use. # - # @see OpenAI::Models::VectorStore#status + # @see OpenAI::VectorStore#status module Status extend OpenAI::Internal::Type::Enum @@ -166,7 +165,7 @@ module Status # @return [Array] end - # @see OpenAI::Models::VectorStore#expires_after + # @see OpenAI::VectorStore#expires_after class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: @@ -183,14 +182,13 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(days:, anchor: :last_active_at) # Some parameter documentations has been truncated, see - # {OpenAI::Models::VectorStore::ExpiresAfter} for more details. + # {OpenAI::VectorStore::ExpiresAfter} for more details. # # The expiration policy for a vector store. # # @param days [Integer] The number of days after the anchor time that the vector store will expire. # # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - # ... end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index acbd89a2..a4babcf4 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -11,14 +11,14 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] - optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } + # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, nil] - optional :expires_after, -> { OpenAI::Models::VectorStoreCreateParams::ExpiresAfter } + # @return [OpenAI::VectorStoreCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreCreateParams::ExpiresAfter } # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that @@ -49,15 +49,13 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreCreateParams} for more details. # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the vector store. # @@ -79,14 +77,13 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(days:, anchor: :last_active_at) # Some parameter documentations has been truncated, see - # {OpenAI::Models::VectorStoreCreateParams::ExpiresAfter} for more details. + # {OpenAI::VectorStoreCreateParams::ExpiresAfter} for more details. # # The expiration policy for a vector store. # # @param days [Integer] The number of days after the anchor time that the vector store will expire. # # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - # ... end end end diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 9a7d787d..40712974 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -36,24 +36,20 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::VectorStoreListParams::Order } + # @return [Symbol, OpenAI::VectorStoreListParams::Order, nil] + optional :order, enum: -> { OpenAI::VectorStoreListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 17447b6d..32bf6133 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -11,13 +11,13 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # A query string for a search # # @return [String, Array] - required :query, union: -> { OpenAI::Models::VectorStoreSearchParams::Query } + required :query, union: -> { OpenAI::VectorStoreSearchParams::Query } # @!attribute filters # A filter to apply based on file attributes. # - # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] - optional :filters, union: -> { OpenAI::Models::VectorStoreSearchParams::Filters } + # @return [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] + optional :filters, union: -> { OpenAI::VectorStoreSearchParams::Filters } # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 @@ -29,8 +29,8 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # Ranking options for search. # - # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions, nil] - optional :ranking_options, -> { OpenAI::Models::VectorStoreSearchParams::RankingOptions } + # @return [OpenAI::VectorStoreSearchParams::RankingOptions, nil] + optional :ranking_options, -> { OpenAI::VectorStoreSearchParams::RankingOptions } # @!attribute rewrite_query # Whether to rewrite the natural language query for vector search. @@ -44,12 +44,11 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # # @param query [String, Array] A query string for a search # - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter] A filter to apply based on file attributes. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 - # ... # - # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::VectorStoreSearchParams::RankingOptions] Ranking options for search. # # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. # @@ -61,7 +60,7 @@ module Query variant String - variant -> { OpenAI::Models::VectorStoreSearchParams::Query::StringArray } + variant -> { OpenAI::VectorStoreSearchParams::Query::StringArray } # @!method self.variants # @return [Array(String, Array)] @@ -75,20 +74,20 @@ module Filters extend OpenAI::Internal::Type::Union # A filter used to compare a specified attribute key to a given value using a defined comparison operation. - variant -> { OpenAI::Models::ComparisonFilter } + variant -> { OpenAI::ComparisonFilter } # Combine multiple filters using `and` or `or`. - variant -> { OpenAI::Models::CompoundFilter } + variant -> { OpenAI::CompoundFilter } # @!method self.variants - # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] + # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] end class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # - # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] - optional :ranker, enum: -> { OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker } + # @return [Symbol, OpenAI::VectorStoreSearchParams::RankingOptions::Ranker, nil] + optional :ranker, enum: -> { OpenAI::VectorStoreSearchParams::RankingOptions::Ranker } # @!attribute score_threshold # @@ -98,10 +97,10 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # - # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] + # @param ranker [Symbol, OpenAI::VectorStoreSearchParams::RankingOptions::Ranker] # @param score_threshold [Float] - # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker + # @see OpenAI::VectorStoreSearchParams::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 75e2fdb5..5b623829 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -45,7 +45,7 @@ class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreSearchResponse} for more details. # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param content [Array] Content chunks from the file. # diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index b7d6bd13..9fab30cf 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -10,8 +10,8 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] - optional :expires_after, -> { OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter }, nil?: true + # @return [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreUpdateParams::ExpiresAfter }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -34,9 +34,9 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreUpdateParams} for more details. # - # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String, nil] The name of the vector store. # @@ -58,14 +58,13 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(days:, anchor: :last_active_at) # Some parameter documentations has been truncated, see - # {OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter} for more details. + # {OpenAI::VectorStoreUpdateParams::ExpiresAfter} for more details. # # The expiration policy for a vector store. # # @param days [Integer] The number of days after the anchor time that the vector store will expire. # # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - # ... end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index fba3d18a..5fd03105 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -25,27 +25,27 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileBatchCreateParams::Attribute] }, + -> { + OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::FileBatchCreateParams::Attribute] + }, nil?: true # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] - optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } + # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStores::FileBatchCreateParams} for more details. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # ... # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index fc97c2c0..a6e6e635 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -34,8 +34,8 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # - # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter, nil] - optional :filter, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter } + # @return [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter, nil] + optional :filter, enum: -> { OpenAI::VectorStores::FileBatchListFilesParams::Filter } # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and @@ -48,8 +48,8 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Order } + # @return [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order, nil] + optional :order, enum: -> { OpenAI::VectorStores::FileBatchListFilesParams::Order } # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -58,19 +58,14 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @param vector_store_id [String] # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # - # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - # ... + # @param filter [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index d5e71148..8cc4cee4 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -25,27 +25,27 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileCreateParams::Attribute] }, + -> { + OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::FileCreateParams::Attribute] + }, nil?: true # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] - optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } + # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStores::FileCreateParams} for more details. # # @param file_id [String] A [File](https://platform.openai.com/docs/api-reference/files) ID that the vecto - # ... # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 55dd58ce..dcebcd20 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -29,8 +29,8 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # - # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter, nil] - optional :filter, enum: -> { OpenAI::Models::VectorStores::FileListParams::Filter } + # @return [Symbol, OpenAI::VectorStores::FileListParams::Filter, nil] + optional :filter, enum: -> { OpenAI::VectorStores::FileListParams::Filter } # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and @@ -43,27 +43,22 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order, nil] - optional :order, enum: -> { OpenAI::Models::VectorStores::FileListParams::Order } + # @return [Symbol, OpenAI::VectorStores::FileListParams::Order, nil] + optional :order, enum: -> { OpenAI::VectorStores::FileListParams::Order } # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStores::FileListParams} for more details. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # - # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - # ... + # @param filter [Symbol, OpenAI::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index cb4600bd..be3d5d7b 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -22,7 +22,9 @@ class FileUpdateParams < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] required :attributes, - -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileUpdateParams::Attribute] }, + -> { + OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::FileUpdateParams::Attribute] + }, nil?: true # @!method initialize(vector_store_id:, attributes:, request_options: {}) @@ -31,7 +33,7 @@ class FileUpdateParams < OpenAI::Internal::Type::BaseModel # # @param vector_store_id [String] # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 66754ed9..eab6df4b 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -21,8 +21,8 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @return [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] - required :last_error, -> { OpenAI::Models::VectorStores::VectorStoreFile::LastError }, nil?: true + # @return [OpenAI::VectorStores::VectorStoreFile::LastError, nil] + required :last_error, -> { OpenAI::VectorStores::VectorStoreFile::LastError }, nil?: true # @!attribute object # The object type, which is always `vector_store.file`. @@ -35,8 +35,8 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. # - # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] - required :status, enum: -> { OpenAI::Models::VectorStores::VectorStoreFile::Status } + # @return [Symbol, OpenAI::VectorStores::VectorStoreFile::Status] + required :status, enum: -> { OpenAI::VectorStores::VectorStoreFile::Status } # @!attribute usage_bytes # The total vector store usage in bytes. Note that this may be different from the @@ -63,18 +63,20 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>String, Float, Boolean}, nil] optional :attributes, - -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute] }, + -> { + OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::VectorStoreFile::Attribute] + }, nil?: true # @!attribute chunking_strategy # The strategy used to chunk the file. # - # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject, nil] - optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategy } + # @return [OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject, nil] + optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategy } # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") # Some parameter documentations has been truncated, see - # {OpenAI::Models::VectorStores::VectorStoreFile} for more details. + # {OpenAI::VectorStores::VectorStoreFile} for more details. # # A list of files attached to a vector store. # @@ -82,31 +84,27 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store file was created. # - # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a - # ... + # @param last_error [OpenAI::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a # - # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet - # ... + # @param status [Symbol, OpenAI::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet # # @param usage_bytes [Integer] The total vector store usage in bytes. Note that this may be different from the - # ... # # @param vector_store_id [String] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect - # ... # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] The strategy used to chunk the file. + # @param chunking_strategy [OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject] The strategy used to chunk the file. # # @param object [Symbol, :"vector_store.file"] The object type, which is always `vector_store.file`. - # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error + # @see OpenAI::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # - # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] - required :code, enum: -> { OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code } + # @return [Symbol, OpenAI::VectorStores::VectorStoreFile::LastError::Code] + required :code, enum: -> { OpenAI::VectorStores::VectorStoreFile::LastError::Code } # @!attribute message # A human-readable description of the error. @@ -118,13 +116,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # @param code [Symbol, OpenAI::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. # # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # - # @see OpenAI::Models::VectorStores::VectorStoreFile::LastError#code + # @see OpenAI::VectorStores::VectorStoreFile::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -141,7 +139,7 @@ module Code # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. # - # @see OpenAI::Models::VectorStores::VectorStoreFile#status + # @see OpenAI::VectorStores::VectorStoreFile#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index 86d56390..b66e2b14 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -20,8 +20,8 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!attribute file_counts # - # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] - required :file_counts, -> { OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts } + # @return [OpenAI::VectorStores::VectorStoreFileBatch::FileCounts] + required :file_counts, -> { OpenAI::VectorStores::VectorStoreFileBatch::FileCounts } # @!attribute object # The object type, which is always `vector_store.file_batch`. @@ -33,8 +33,8 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. # - # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] - required :status, enum: -> { OpenAI::Models::VectorStores::VectorStoreFileBatch::Status } + # @return [Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status] + required :status, enum: -> { OpenAI::VectorStores::VectorStoreFileBatch::Status } # @!attribute vector_store_id # The ID of the @@ -47,26 +47,23 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") # Some parameter documentations has been truncated, see - # {OpenAI::Models::VectorStores::VectorStoreFileBatch} for more details. + # {OpenAI::VectorStores::VectorStoreFileBatch} for more details. # # A batch of files attached to a vector store. # # @param id [String] The identifier, which can be referenced in API endpoints. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store files batch was create - # ... # - # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] + # @param file_counts [OpenAI::VectorStores::VectorStoreFileBatch::FileCounts] # - # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` - # ... + # @param status [Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` # # @param vector_store_id [String] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect - # ... # # @param object [Symbol, :"vector_store.files_batch"] The object type, which is always `vector_store.file_batch`. - # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts + # @see OpenAI::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that where cancelled. @@ -113,7 +110,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. # - # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#status + # @see OpenAI::VectorStores::VectorStoreFileBatch#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index cd1044ab..befc2a60 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -13,20 +13,15 @@ class Speech # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # - # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): - # ... + # @param model [String, Symbol, OpenAI::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): # - # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, - # ... + # @param voice [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not - # ... # - # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav - # ... + # @param response_format [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -34,7 +29,7 @@ class Speech # # @see OpenAI::Models::Audio::SpeechCreateParams def create(params) - parsed, options = OpenAI::Models::Audio::SpeechCreateParams.dump_request(params) + parsed, options = OpenAI::Audio::SpeechCreateParams.dump_request(params) @client.request( method: :post, path: "audio/speech", diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index a3fc4454..df39fc22 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -15,35 +15,28 @@ class Transcriptions # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl - # ... # - # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc - # ... + # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param include [Array] Additional information to include in the transcription response. ... + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt - # ... # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment - # ... # - # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo - # ... + # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # ... # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format - # ... + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] + # @return [OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def create(params) - parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) + parsed, options = OpenAI::Audio::TranscriptionCreateParams.dump_request(params) if parsed[:stream] message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) @@ -69,35 +62,28 @@ def create(params) # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl - # ... # - # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc - # ... + # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param include [Array] Additional information to include in the transcription response. ... + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt - # ... # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment - # ... # - # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo - # ... + # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # ... # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format - # ... + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def create_streaming(params) - parsed, options = OpenAI::Models::Audio::TranscriptionCreateParams.dump_request(params) + parsed, options = OpenAI::Audio::TranscriptionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." raise ArgumentError.new(message) @@ -109,7 +95,7 @@ def create_streaming(params) headers: {"content-type" => "multipart/form-data", "accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Audio::TranscriptionStreamEvent, + model: OpenAI::Audio::TranscriptionStreamEvent, options: options ) end diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index 0a43a53a..f43551ab 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -12,27 +12,22 @@ class Translations # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, - # ... # - # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh - # ... + # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment - # ... # - # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo - # ... + # @param response_format [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] + # @return [OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose] # # @see OpenAI::Models::Audio::TranslationCreateParams def create(params) - parsed, options = OpenAI::Models::Audio::TranslationCreateParams.dump_request(params) + parsed, options = OpenAI::Audio::TranslationCreateParams.dump_request(params) @client.request( method: :post, path: "audio/translations", diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index d63aac23..718d0a81 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -10,30 +10,22 @@ class Batches # # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) # - # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` - # ... + # @param completion_window [Symbol, OpenAI::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # - # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` - # ... + # @param endpoint [Symbol, OpenAI::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` # - # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. ... + # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Batch] + # @return [OpenAI::Batch] # # @see OpenAI::Models::BatchCreateParams def create(params) - parsed, options = OpenAI::Models::BatchCreateParams.dump_request(params) - @client.request( - method: :post, - path: "batches", - body: parsed, - model: OpenAI::Models::Batch, - options: options - ) + parsed, options = OpenAI::BatchCreateParams.dump_request(params) + @client.request(method: :post, path: "batches", body: parsed, model: OpenAI::Batch, options: options) end # Retrieves a batch. @@ -44,14 +36,14 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Batch] + # @return [OpenAI::Batch] # # @see OpenAI::Models::BatchRetrieveParams def retrieve(batch_id, params = {}) @client.request( method: :get, path: ["batches/%1$s", batch_id], - model: OpenAI::Models::Batch, + model: OpenAI::Batch, options: params[:request_options] ) end @@ -64,24 +56,22 @@ def retrieve(batch_id, params = {}) # @overload list(after: nil, limit: nil, request_options: {}) # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::BatchListParams def list(params = {}) - parsed, options = OpenAI::Models::BatchListParams.dump_request(params) + parsed, options = OpenAI::BatchListParams.dump_request(params) @client.request( method: :get, path: "batches", query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Batch, + model: OpenAI::Batch, options: options ) end @@ -96,14 +86,14 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Batch] + # @return [OpenAI::Batch] # # @see OpenAI::Models::BatchCancelParams def cancel(batch_id, params = {}) @client.request( method: :post, path: ["batches/%1$s/cancel", batch_id], - model: OpenAI::Models::Batch, + model: OpenAI::Batch, options: params[:request_options] ) end diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 25b83738..ca192073 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -11,47 +11,40 @@ class Assistants # # @overload create(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co # - # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # ... + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Assistant] + # @return [OpenAI::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantCreateParams def create(params) - parsed, options = OpenAI::Models::Beta::AssistantCreateParams.dump_request(params) + parsed, options = OpenAI::Beta::AssistantCreateParams.dump_request(params) @client.request( method: :post, path: "assistants", body: parsed, - model: OpenAI::Models::Beta::Assistant, + model: OpenAI::Beta::Assistant, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -64,14 +57,14 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Assistant] + # @return [OpenAI::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantRetrieveParams def retrieve(assistant_id, params = {}) @client.request( method: :get, path: ["assistants/%1$s", assistant_id], - model: OpenAI::Models::Beta::Assistant, + model: OpenAI::Beta::Assistant, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -85,47 +78,40 @@ def retrieve(assistant_id, params = {}) # # @param assistant_id [String] The ID of the assistant to modify. # - # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. ... + # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # # @param instructions [String, nil] The system instructions that the assistant uses. The maximum length is 256,000 c - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # - # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. ... + # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per - # ... + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Assistant] + # @return [OpenAI::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantUpdateParams def update(assistant_id, params = {}) - parsed, options = OpenAI::Models::Beta::AssistantUpdateParams.dump_request(params) + parsed, options = OpenAI::Beta::AssistantUpdateParams.dump_request(params) @client.request( method: :post, path: ["assistants/%1$s", assistant_id], body: parsed, - model: OpenAI::Models::Beta::Assistant, + model: OpenAI::Beta::Assistant, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -138,30 +124,26 @@ def update(assistant_id, params = {}) # @overload list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::AssistantListParams def list(params = {}) - parsed, options = OpenAI::Models::Beta::AssistantListParams.dump_request(params) + parsed, options = OpenAI::Beta::AssistantListParams.dump_request(params) @client.request( method: :get, path: "assistants", query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Beta::Assistant, + model: OpenAI::Beta::Assistant, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -174,14 +156,14 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::AssistantDeleted] + # @return [OpenAI::Beta::AssistantDeleted] # # @see OpenAI::Models::Beta::AssistantDeleteParams def delete(assistant_id, params = {}) @client.request( method: :delete, path: ["assistants/%1$s", assistant_id], - model: OpenAI::Models::Beta::AssistantDeleted, + model: OpenAI::Beta::AssistantDeleted, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 754a760f..37b1d488 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -17,26 +17,24 @@ class Threads # # @overload create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to - # ... + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre - # ... + # @param tool_resources [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Thread] + # @return [OpenAI::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadCreateParams def create(params = {}) - parsed, options = OpenAI::Models::Beta::ThreadCreateParams.dump_request(params) + parsed, options = OpenAI::Beta::ThreadCreateParams.dump_request(params) @client.request( method: :post, path: "threads", body: parsed, - model: OpenAI::Models::Beta::Thread, + model: OpenAI::Beta::Thread, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -49,14 +47,14 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Thread] + # @return [OpenAI::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadRetrieveParams def retrieve(thread_id, params = {}) @client.request( method: :get, path: ["threads/%1$s", thread_id], - model: OpenAI::Models::Beta::Thread, + model: OpenAI::Beta::Thread, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -70,23 +68,22 @@ def retrieve(thread_id, params = {}) # # @param thread_id [String] The ID of the thread to modify. Only the `metadata` can be modified. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre - # ... + # @param tool_resources [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Thread] + # @return [OpenAI::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadUpdateParams def update(thread_id, params = {}) - parsed, options = OpenAI::Models::Beta::ThreadUpdateParams.dump_request(params) + parsed, options = OpenAI::Beta::ThreadUpdateParams.dump_request(params) @client.request( method: :post, path: ["threads/%1$s", thread_id], body: parsed, - model: OpenAI::Models::Beta::Thread, + model: OpenAI::Beta::Thread, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -99,14 +96,14 @@ def update(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::ThreadDeleted] + # @return [OpenAI::Beta::ThreadDeleted] # # @see OpenAI::Models::Beta::ThreadDeleteParams def delete(thread_id, params = {}) @client.request( method: :delete, path: ["threads/%1$s", thread_id], - model: OpenAI::Models::Beta::ThreadDeleted, + model: OpenAI::Beta::ThreadDeleted, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -121,54 +118,42 @@ def delete(thread_id, params = {}) # @overload create_and_run(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista - # ... # # @param instructions [String, nil] Override the default system message of the assistant. This is useful for modifyi - # ... # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the - # ... # # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # ... + # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a ... + # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify - # ... + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # - # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro - # ... + # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Run] + # @return [OpenAI::Beta::Threads::Run] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def create_and_run(params) - parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) + parsed, options = OpenAI::Beta::ThreadCreateAndRunParams.dump_request(params) if parsed[:stream] message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) @@ -177,7 +162,7 @@ def create_and_run(params) method: :post, path: "threads/runs", body: parsed, - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -193,54 +178,42 @@ def create_and_run(params) # @overload stream_raw(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista - # ... # # @param instructions [String, nil] Override the default system message of the assistant. This is useful for modifyi - # ... # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens that may be used over the course of the - # ... # # @param max_prompt_tokens [Integer, nil] The maximum number of prompt tokens that may be used over the course of the run. - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to - # ... + # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a ... + # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe - # ... + # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify - # ... + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # - # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro - # ... + # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def stream_raw(params) - parsed, options = OpenAI::Models::Beta::ThreadCreateAndRunParams.dump_request(params) + parsed, options = OpenAI::Beta::ThreadCreateAndRunParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create_and_run` for the non-streaming use case." raise ArgumentError.new(message) @@ -252,7 +225,7 @@ def stream_raw(params) headers: {"accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Beta::AssistantStreamEvent, + model: OpenAI::Beta::AssistantStreamEvent, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index d60ca5f6..503f7228 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -13,28 +13,27 @@ class Messages # @overload create(thread_id, content:, role:, attachments: nil, metadata: nil, request_options: {}) # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t - # ... # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: ... + # @param role [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Message] + # @return [OpenAI::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageCreateParams def create(thread_id, params) - parsed, options = OpenAI::Models::Beta::Threads::MessageCreateParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::MessageCreateParams.dump_request(params) @client.request( method: :post, path: ["threads/%1$s/messages", thread_id], body: parsed, - model: OpenAI::Models::Beta::Threads::Message, + model: OpenAI::Beta::Threads::Message, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -49,15 +48,14 @@ def create(thread_id, params) # @param message_id [String] The ID of the message to retrieve. # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Message] + # @return [OpenAI::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageRetrieveParams def retrieve(message_id, params) - parsed, options = OpenAI::Models::Beta::Threads::MessageRetrieveParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::MessageRetrieveParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -65,7 +63,7 @@ def retrieve(message_id, params) @client.request( method: :get, path: ["threads/%1$s/messages/%2$s", thread_id, message_id], - model: OpenAI::Models::Beta::Threads::Message, + model: OpenAI::Beta::Threads::Message, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -82,15 +80,14 @@ def retrieve(message_id, params) # @param thread_id [String] Path param: The ID of the thread to which this message belongs. # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Message] + # @return [OpenAI::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageUpdateParams def update(message_id, params) - parsed, options = OpenAI::Models::Beta::Threads::MessageUpdateParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::MessageUpdateParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -99,7 +96,7 @@ def update(message_id, params) method: :post, path: ["threads/%1$s/messages/%2$s", thread_id, message_id], body: parsed, - model: OpenAI::Models::Beta::Threads::Message, + model: OpenAI::Beta::Threads::Message, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -112,35 +109,30 @@ def update(message_id, params) # @overload list(thread_id, after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t - # ... # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # - # @param run_id [String] Filter messages by the run ID that generated them. ... + # @param run_id [String] Filter messages by the run ID that generated them. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::MessageListParams def list(thread_id, params = {}) - parsed, options = OpenAI::Models::Beta::Threads::MessageListParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::MessageListParams.dump_request(params) @client.request( method: :get, path: ["threads/%1$s/messages", thread_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Beta::Threads::Message, + model: OpenAI::Beta::Threads::Message, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -155,11 +147,11 @@ def list(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::MessageDeleted] + # @return [OpenAI::Beta::Threads::MessageDeleted] # # @see OpenAI::Models::Beta::Threads::MessageDeleteParams def delete(message_id, params) - parsed, options = OpenAI::Models::Beta::Threads::MessageDeleteParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::MessageDeleteParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -167,7 +159,7 @@ def delete(message_id, params) @client.request( method: :delete, path: ["threads/%1$s/messages/%2$s", thread_id, message_id], - model: OpenAI::Models::Beta::Threads::MessageDeleted, + model: OpenAI::Beta::Threads::MessageDeleted, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 3f1b6d0d..cbf1b293 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -21,60 +21,46 @@ class Runs # @param thread_id [String] Path param: The ID of the thread to run. # # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer - # ... # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t - # ... + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t - # ... # - # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re - # ... # # @param max_completion_tokens [Integer, nil] Body param: The maximum number of completion tokens that may be used over the co - # ... # # @param max_prompt_tokens [Integer, nil] Body param: The maximum number of prompt tokens that may be used over the course - # ... # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca - # ... # - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference - # ... + # @param model [String, Symbol, OpenAI::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Body param: **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu - # ... + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling - # ... # - # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th - # ... + # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Run] + # @return [OpenAI::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def create(thread_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunCreateParams.dump_request(params) if parsed[:stream] message = "Please use `#create_stream_raw` for the streaming use case." raise ArgumentError.new(message) @@ -85,7 +71,7 @@ def create(thread_id, params) path: ["threads/%1$s/runs", thread_id], query: parsed.slice(*query_params), body: parsed.except(*query_params), - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -103,60 +89,46 @@ def create(thread_id, params) # @param thread_id [String] Path param: The ID of the thread to run. # # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer - # ... # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t - # ... + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t - # ... # - # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re - # ... # # @param max_completion_tokens [Integer, nil] Body param: The maximum number of completion tokens that may be used over the co - # ... # # @param max_prompt_tokens [Integer, nil] Body param: The maximum number of prompt tokens that may be used over the course - # ... # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca - # ... # - # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference - # ... + # @param model [String, Symbol, OpenAI::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena - # ... # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Body param: **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP - # ... + # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu - # ... + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling - # ... # - # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th - # ... + # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def create_stream_raw(thread_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunCreateParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." raise ArgumentError.new(message) @@ -170,7 +142,7 @@ def create_stream_raw(thread_id, params) headers: {"accept" => "text/event-stream"}, body: parsed.except(*query_params), stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Beta::AssistantStreamEvent, + model: OpenAI::Beta::AssistantStreamEvent, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -185,15 +157,14 @@ def create_stream_raw(thread_id, params) # @param run_id [String] The ID of the run to retrieve. # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Run] + # @return [OpenAI::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunRetrieveParams def retrieve(run_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunRetrieveParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunRetrieveParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -201,7 +172,7 @@ def retrieve(run_id, params) @client.request( method: :get, path: ["threads/%1$s/runs/%2$s", thread_id, run_id], - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -216,18 +187,16 @@ def retrieve(run_id, params) # @param run_id [String] Path param: The ID of the run to modify. # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc - # ... # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Run] + # @return [OpenAI::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunUpdateParams def update(run_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunUpdateParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunUpdateParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -236,7 +205,7 @@ def update(run_id, params) method: :post, path: ["threads/%1$s/runs/%2$s", thread_id, run_id], body: parsed, - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -251,30 +220,26 @@ def update(run_id, params) # @param thread_id [String] The ID of the thread the run belongs to. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::RunListParams def list(thread_id, params = {}) - parsed, options = OpenAI::Models::Beta::Threads::RunListParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunListParams.dump_request(params) @client.request( method: :get, path: ["threads/%1$s/runs", thread_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -289,11 +254,11 @@ def list(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Run] + # @return [OpenAI::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunCancelParams def cancel(run_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunCancelParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunCancelParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -301,7 +266,7 @@ def cancel(run_id, params) @client.request( method: :post, path: ["threads/%1$s/runs/%2$s/cancel", thread_id, run_id], - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -322,17 +287,16 @@ def cancel(run_id, params) # @param run_id [String] Path param: The ID of the run that requires the tool output submission. # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc - # ... # - # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Run] + # @return [OpenAI::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs(run_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) if parsed[:stream] message = "Please use `#submit_tool_outputs_stream_raw` for the streaming use case." raise ArgumentError.new(message) @@ -345,7 +309,7 @@ def submit_tool_outputs(run_id, params) method: :post, path: ["threads/%1$s/runs/%2$s/submit_tool_outputs", thread_id, run_id], body: parsed, - model: OpenAI::Models::Beta::Threads::Run, + model: OpenAI::Beta::Threads::Run, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -366,17 +330,16 @@ def submit_tool_outputs(run_id, params) # @param run_id [String] Path param: The ID of the run that requires the tool output submission. # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc - # ... # - # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs_stream_raw(run_id, params) - parsed, options = OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::RunSubmitToolOutputsParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#submit_tool_outputs` for the non-streaming use case." raise ArgumentError.new(message) @@ -392,7 +355,7 @@ def submit_tool_outputs_stream_raw(run_id, params) headers: {"accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Beta::AssistantStreamEvent, + model: OpenAI::Beta::AssistantStreamEvent, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 011a0874..eaa27d6e 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -19,16 +19,15 @@ class Steps # # @param run_id [String] Path param: The ID of the run to which the run step belongs. # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t - # ... + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Beta::Threads::Runs::RunStep] # # @see OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams def retrieve(step_id, params) - parsed, options = OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::Runs::StepRetrieveParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -41,7 +40,7 @@ def retrieve(step_id, params) method: :get, path: ["threads/%1$s/runs/%2$s/steps/%3$s", thread_id, run_id, step_id], query: parsed, - model: OpenAI::Models::Beta::Threads::Runs::RunStep, + model: OpenAI::Beta::Threads::Runs::RunStep, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -58,27 +57,22 @@ def retrieve(step_id, params) # @param thread_id [String] Path param: The ID of the thread the run and run steps belong to. # # @param after [String] Query param: A cursor for use in pagination. `after` is an object ID that define - # ... # # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin - # ... # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t - # ... + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be - # ... # - # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for - # ... + # @param order [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::Runs::StepListParams def list(run_id, params) - parsed, options = OpenAI::Models::Beta::Threads::Runs::StepListParams.dump_request(params) + parsed, options = OpenAI::Beta::Threads::Runs::StepListParams.dump_request(params) thread_id = parsed.delete(:thread_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -88,7 +82,7 @@ def list(run_id, params) path: ["threads/%1$s/runs/%2$s/steps", thread_id, run_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Beta::Threads::Runs::RunStep, + model: OpenAI::Beta::Threads::Runs::RunStep, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index c8b2b3ee..b44f7afc 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -32,80 +32,73 @@ class Completions # # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the ... + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with ... + # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # - # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. ... + # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. ... + # @param functions [Array] Deprecated in favor of `tools`. # - # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # - # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, ... + # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, # # @param max_completion_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a completion, - # ... # - # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the ... + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. ... + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y - # ... # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is ... + # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # - # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. # - # @param seed [Integer, nil] This feature is in Beta. ... + # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for ... + # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a - # ... + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # - # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to ... + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # - # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. ... + # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Chat::ChatCompletion] + # @return [OpenAI::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionCreateParams def create(params) - parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) + parsed, options = OpenAI::Chat::CompletionCreateParams.dump_request(params) if parsed[:stream] message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) @@ -114,7 +107,7 @@ def create(params) method: :post, path: "chat/completions", body: parsed, - model: OpenAI::Models::Chat::ChatCompletion, + model: OpenAI::Chat::ChatCompletion, options: options ) end @@ -144,80 +137,73 @@ def create(params) # # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the ... + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with ... + # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # - # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. ... + # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. ... + # @param functions [Array] Deprecated in favor of `tools`. # - # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # - # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, ... + # @param logprobs [Boolean, nil] Whether to return log probabilities of the output tokens or not. If true, # # @param max_completion_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a completion, - # ... # - # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the ... + # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. ... + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y - # ... # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g - # ... # - # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is ... + # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # - # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on ... + # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** ... + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. ... + # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. # - # @param seed [Integer, nil] This feature is in Beta. ... + # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for ... + # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. ... + # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a - # ... + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # - # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to ... + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # - # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. ... + # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams def stream_raw(params) - parsed, options = OpenAI::Models::Chat::CompletionCreateParams.dump_request(params) + parsed, options = OpenAI::Chat::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." raise ArgumentError.new(message) @@ -229,7 +215,7 @@ def stream_raw(params) headers: {"accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Chat::ChatCompletionChunk, + model: OpenAI::Chat::ChatCompletionChunk, options: options ) end @@ -243,14 +229,14 @@ def stream_raw(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Chat::ChatCompletion] + # @return [OpenAI::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionRetrieveParams def retrieve(completion_id, params = {}) @client.request( method: :get, path: ["chat/completions/%1$s", completion_id], - model: OpenAI::Models::Chat::ChatCompletion, + model: OpenAI::Chat::ChatCompletion, options: params[:request_options] ) end @@ -266,20 +252,20 @@ def retrieve(completion_id, params = {}) # # @param completion_id [String] The ID of the chat completion to update. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Chat::ChatCompletion] + # @return [OpenAI::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionUpdateParams def update(completion_id, params) - parsed, options = OpenAI::Models::Chat::CompletionUpdateParams.dump_request(params) + parsed, options = OpenAI::Chat::CompletionUpdateParams.dump_request(params) @client.request( method: :post, path: ["chat/completions/%1$s", completion_id], body: parsed, - model: OpenAI::Models::Chat::ChatCompletion, + model: OpenAI::Chat::ChatCompletion, options: options ) end @@ -296,26 +282,25 @@ def update(completion_id, params) # # @param limit [Integer] Number of Chat Completions to retrieve. # - # @param metadata [Hash{Symbol=>String}, nil] A list of metadata keys to filter the Chat Completions by. Example: ... + # @param metadata [Hash{Symbol=>String}, nil] A list of metadata keys to filter the Chat Completions by. Example: # # @param model [String] The model used to generate the Chat Completions. # - # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` - # ... + # @param order [Symbol, OpenAI::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::CompletionListParams def list(params = {}) - parsed, options = OpenAI::Models::Chat::CompletionListParams.dump_request(params) + parsed, options = OpenAI::Chat::CompletionListParams.dump_request(params) @client.request( method: :get, path: "chat/completions", query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Chat::ChatCompletion, + model: OpenAI::Chat::ChatCompletion, options: options ) end @@ -329,14 +314,14 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Chat::ChatCompletionDeleted] + # @return [OpenAI::Chat::ChatCompletionDeleted] # # @see OpenAI::Models::Chat::CompletionDeleteParams def delete(completion_id, params = {}) @client.request( method: :delete, path: ["chat/completions/%1$s", completion_id], - model: OpenAI::Models::Chat::ChatCompletionDeleted, + model: OpenAI::Chat::ChatCompletionDeleted, options: params[:request_options] ) end diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index a1dd20ca..36a3e3c8 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -19,22 +19,21 @@ class Messages # # @param limit [Integer] Number of messages to retrieve. # - # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo - # ... + # @param order [Symbol, OpenAI::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::Completions::MessageListParams def list(completion_id, params = {}) - parsed, options = OpenAI::Models::Chat::Completions::MessageListParams.dump_request(params) + parsed, options = OpenAI::Chat::Completions::MessageListParams.dump_request(params) @client.request( method: :get, path: ["chat/completions/%1$s/messages", completion_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Chat::ChatCompletionStoreMessage, + model: OpenAI::Chat::ChatCompletionStoreMessage, options: options ) end diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index 7b563203..f65e1891 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -12,58 +12,47 @@ class Completions # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings - # ... # # @param best_of [Integer, nil] Generates `best_of` completions server-side and returns the "best" (the one with - # ... # - # @param echo [Boolean, nil] Echo back the prompt in addition to the completion ... + # @param echo [Boolean, nil] Echo back the prompt in addition to the completion # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # ... # - # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # # @param logprobs [Integer, nil] Include the log probabilities on the `logprobs` most likely output tokens, as we - # ... # # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the completi - # ... # - # @param n [Integer, nil] How many completions to generate for each prompt. ... + # @param n [Integer, nil] How many completions to generate for each prompt. # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on whethe - # ... # # @param seed [Integer, nil] If specified, our system will make a best effort to sample deterministically, su - # ... # - # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # - # @param suffix [String, nil] The suffix that comes after a completion of inserted text. ... + # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Completion] + # @return [OpenAI::Completion] # # @see OpenAI::Models::CompletionCreateParams def create(params) - parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) + parsed, options = OpenAI::CompletionCreateParams.dump_request(params) if parsed[:stream] message = "Please use `#create_streaming` for the streaming use case." raise ArgumentError.new(message) @@ -72,7 +61,7 @@ def create(params) method: :post, path: "completions", body: parsed, - model: OpenAI::Models::Completion, + model: OpenAI::Completion, options: options ) end @@ -86,58 +75,47 @@ def create(params) # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings - # ... # # @param best_of [Integer, nil] Generates `best_of` completions server-side and returns the "best" (the one with - # ... # - # @param echo [Boolean, nil] Echo back the prompt in addition to the completion ... + # @param echo [Boolean, nil] Echo back the prompt in addition to the completion # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on their - # ... # - # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. ... + # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # # @param logprobs [Integer, nil] Include the log probabilities on the `logprobs` most likely output tokens, as we - # ... # # @param max_tokens [Integer, nil] The maximum number of [tokens](/tokenizer) that can be generated in the completi - # ... # - # @param n [Integer, nil] How many completions to generate for each prompt. ... + # @param n [Integer, nil] How many completions to generate for each prompt. # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on whethe - # ... # # @param seed [Integer, nil] If specified, our system will make a best effort to sample deterministically, su - # ... # - # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. ... + # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. ... + # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # - # @param suffix [String, nil] The suffix that comes after a completion of inserted text. ... + # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the - # ... # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::CompletionCreateParams def create_streaming(params) - parsed, options = OpenAI::Models::CompletionCreateParams.dump_request(params) + parsed, options = OpenAI::CompletionCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." raise ArgumentError.new(message) @@ -149,7 +127,7 @@ def create_streaming(params) headers: {"accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Completion, + model: OpenAI::Completion, options: options ) end diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index 2e0577e6..8d709c8b 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -11,32 +11,27 @@ class Embeddings # @overload create(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) # # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i - # ... # - # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co - # ... + # @param model [String, Symbol, OpenAI::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo - # ... # - # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http - # ... + # @param encoding_format [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::CreateEmbeddingResponse] + # @return [OpenAI::CreateEmbeddingResponse] # # @see OpenAI::Models::EmbeddingCreateParams def create(params) - parsed, options = OpenAI::Models::EmbeddingCreateParams.dump_request(params) + parsed, options = OpenAI::EmbeddingCreateParams.dump_request(params) @client.request( method: :post, path: "embeddings", body: parsed, - model: OpenAI::Models::CreateEmbeddingResponse, + model: OpenAI::CreateEmbeddingResponse, options: options ) end diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 9decea6d..23f0de0b 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -17,11 +17,11 @@ class Evals # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # @@ -31,7 +31,7 @@ class Evals # # @see OpenAI::Models::EvalCreateParams def create(params) - parsed, options = OpenAI::Models::EvalCreateParams.dump_request(params) + parsed, options = OpenAI::EvalCreateParams.dump_request(params) @client.request( method: :post, path: "evals", @@ -70,7 +70,7 @@ def retrieve(eval_id, params = {}) # # @param eval_id [String] The ID of the evaluation to update. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] Rename the evaluation. # @@ -80,7 +80,7 @@ def retrieve(eval_id, params = {}) # # @see OpenAI::Models::EvalUpdateParams def update(eval_id, params = {}) - parsed, options = OpenAI::Models::EvalUpdateParams.dump_request(params) + parsed, options = OpenAI::EvalUpdateParams.dump_request(params) @client.request( method: :post, path: ["evals/%1$s", eval_id], @@ -101,10 +101,9 @@ def update(eval_id, params = {}) # # @param limit [Integer] Number of evals to retrieve. # - # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d - # ... + # @param order [Symbol, OpenAI::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d # - # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use ... + # @param order_by [Symbol, OpenAI::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -112,7 +111,7 @@ def update(eval_id, params = {}) # # @see OpenAI::Models::EvalListParams def list(params = {}) - parsed, options = OpenAI::Models::EvalListParams.dump_request(params) + parsed, options = OpenAI::EvalListParams.dump_request(params) @client.request( method: :get, path: "evals", diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index 0bd63a75..590951f6 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -16,9 +16,9 @@ class Runs # # @param eval_id [String] The ID of the evaluation to create a run for. # - # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the run. # @@ -28,7 +28,7 @@ class Runs # # @see OpenAI::Models::Evals::RunCreateParams def create(eval_id, params) - parsed, options = OpenAI::Models::Evals::RunCreateParams.dump_request(params) + parsed, options = OpenAI::Evals::RunCreateParams.dump_request(params) @client.request( method: :post, path: ["evals/%1$s/runs", eval_id], @@ -52,7 +52,7 @@ def create(eval_id, params) # # @see OpenAI::Models::Evals::RunRetrieveParams def retrieve(run_id, params) - parsed, options = OpenAI::Models::Evals::RunRetrieveParams.dump_request(params) + parsed, options = OpenAI::Evals::RunRetrieveParams.dump_request(params) eval_id = parsed.delete(:eval_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -78,11 +78,9 @@ def retrieve(run_id, params) # # @param limit [Integer] Number of runs to retrieve. # - # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de - # ... + # @param order [Symbol, OpenAI::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de # - # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` - # ... + # @param status [Symbol, OpenAI::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -90,7 +88,7 @@ def retrieve(run_id, params) # # @see OpenAI::Models::Evals::RunListParams def list(eval_id, params = {}) - parsed, options = OpenAI::Models::Evals::RunListParams.dump_request(params) + parsed, options = OpenAI::Evals::RunListParams.dump_request(params) @client.request( method: :get, path: ["evals/%1$s/runs", eval_id], @@ -115,7 +113,7 @@ def list(eval_id, params = {}) # # @see OpenAI::Models::Evals::RunDeleteParams def delete(run_id, params) - parsed, options = OpenAI::Models::Evals::RunDeleteParams.dump_request(params) + parsed, options = OpenAI::Evals::RunDeleteParams.dump_request(params) eval_id = parsed.delete(:eval_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -142,7 +140,7 @@ def delete(run_id, params) # # @see OpenAI::Models::Evals::RunCancelParams def cancel(run_id, params) - parsed, options = OpenAI::Models::Evals::RunCancelParams.dump_request(params) + parsed, options = OpenAI::Evals::RunCancelParams.dump_request(params) eval_id = parsed.delete(:eval_id) do raise ArgumentError.new("missing required path argument #{_1}") diff --git a/lib/openai/resources/evals/runs/output_items.rb b/lib/openai/resources/evals/runs/output_items.rb index 04624e2d..0d80996a 100644 --- a/lib/openai/resources/evals/runs/output_items.rb +++ b/lib/openai/resources/evals/runs/output_items.rb @@ -21,7 +21,7 @@ class OutputItems # # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveParams def retrieve(output_item_id, params) - parsed, options = OpenAI::Models::Evals::Runs::OutputItemRetrieveParams.dump_request(params) + parsed, options = OpenAI::Evals::Runs::OutputItemRetrieveParams.dump_request(params) eval_id = parsed.delete(:eval_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -50,15 +50,12 @@ def retrieve(output_item_id, params) # @param eval_id [String] Path param: The ID of the evaluation to retrieve runs for. # # @param after [String] Query param: Identifier for the last output item from the previous pagination re - # ... # # @param limit [Integer] Query param: Number of output items to retrieve. # - # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o - # ... + # @param order [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o # - # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out - # ... + # @param status [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -66,7 +63,7 @@ def retrieve(output_item_id, params) # # @see OpenAI::Models::Evals::Runs::OutputItemListParams def list(run_id, params) - parsed, options = OpenAI::Models::Evals::Runs::OutputItemListParams.dump_request(params) + parsed, options = OpenAI::Evals::Runs::OutputItemListParams.dump_request(params) eval_id = parsed.delete(:eval_id) do raise ArgumentError.new("missing required path argument #{_1}") diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 77ba2646..a7cff25c 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -30,24 +30,23 @@ class Files # # @overload create(file:, purpose:, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. ... + # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. # - # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A - # ... + # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FileObject] + # @return [OpenAI::FileObject] # # @see OpenAI::Models::FileCreateParams def create(params) - parsed, options = OpenAI::Models::FileCreateParams.dump_request(params) + parsed, options = OpenAI::FileCreateParams.dump_request(params) @client.request( method: :post, path: "files", headers: {"content-type" => "multipart/form-data"}, body: parsed, - model: OpenAI::Models::FileObject, + model: OpenAI::FileObject, options: options ) end @@ -60,14 +59,14 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FileObject] + # @return [OpenAI::FileObject] # # @see OpenAI::Models::FileRetrieveParams def retrieve(file_id, params = {}) @client.request( method: :get, path: ["files/%1$s", file_id], - model: OpenAI::Models::FileObject, + model: OpenAI::FileObject, options: params[:request_options] ) end @@ -80,29 +79,26 @@ def retrieve(file_id, params = {}) # @overload list(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param purpose [String] Only return files with the given purpose. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FileListParams def list(params = {}) - parsed, options = OpenAI::Models::FileListParams.dump_request(params) + parsed, options = OpenAI::FileListParams.dump_request(params) @client.request( method: :get, path: "files", query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::FileObject, + model: OpenAI::FileObject, options: options ) end @@ -115,14 +111,14 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FileDeleted] + # @return [OpenAI::FileDeleted] # # @see OpenAI::Models::FileDeleteParams def delete(file_id, params = {}) @client.request( method: :delete, path: ["files/%1$s", file_id], - model: OpenAI::Models::FileDeleted, + model: OpenAI::FileDeleted, options: params[:request_options] ) end diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index 1d609a0a..e11f3d60 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -16,7 +16,7 @@ class Permissions # # @overload create(fine_tuned_model_checkpoint, project_ids:, request_options: {}) # - # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to create a permission for. ... + # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to create a permission for. # # @param project_ids [Array] The project identifiers to grant access to. # @@ -26,7 +26,7 @@ class Permissions # # @see OpenAI::Models::FineTuning::Checkpoints::PermissionCreateParams def create(fine_tuned_model_checkpoint, params) - parsed, options = OpenAI::Models::FineTuning::Checkpoints::PermissionCreateParams.dump_request(params) + parsed, options = OpenAI::FineTuning::Checkpoints::PermissionCreateParams.dump_request(params) @client.request( method: :post, path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], @@ -48,13 +48,13 @@ def create(fine_tuned_model_checkpoint, params) # # @overload retrieve(fine_tuned_model_checkpoint, after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) # - # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to get permissions for. ... + # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to get permissions for. # # @param after [String] Identifier for the last permission ID from the previous pagination request. # # @param limit [Integer] Number of permissions to retrieve. # - # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # @param order [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. # # @param project_id [String] The ID of the project to get permissions for. # @@ -64,7 +64,7 @@ def create(fine_tuned_model_checkpoint, params) # # @see OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams def retrieve(fine_tuned_model_checkpoint, params = {}) - parsed, options = OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams.dump_request(params) + parsed, options = OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams.dump_request(params) @client.request( method: :get, path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], @@ -85,9 +85,9 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) # # @overload delete(permission_id, fine_tuned_model_checkpoint:, request_options: {}) # - # @param permission_id [String] The ID of the fine-tuned model checkpoint permission to delete. ... + # @param permission_id [String] The ID of the fine-tuned model checkpoint permission to delete. # - # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to delete a permission for. ... + # @param fine_tuned_model_checkpoint [String] The ID of the fine-tuned model checkpoint to delete a permission for. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -95,7 +95,7 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) # # @see OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteParams def delete(permission_id, params) - parsed, options = OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteParams.dump_request(params) + parsed, options = OpenAI::FineTuning::Checkpoints::PermissionDeleteParams.dump_request(params) fine_tuned_model_checkpoint = parsed.delete(:fine_tuned_model_checkpoint) do raise ArgumentError.new("missing required path argument #{_1}") diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index ba29a2ea..4a7bdfb7 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -20,38 +20,36 @@ class Jobs # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the ... + # @param model [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the # - # @param training_file [String] The ID of an uploaded file that contains training data. ... + # @param training_file [String] The ID of an uploaded file that contains training data. # - # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. ... + # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. # - # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # @param method_ [OpenAI::FineTuning::JobCreateParams::Method] The method used for fine-tuning. # # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j - # ... # # @param suffix [String, nil] A string of up to 64 characters that will be added to your fine-tuned model name - # ... # - # @param validation_file [String, nil] The ID of an uploaded file that contains validation data. ... + # @param validation_file [String, nil] The ID of an uploaded file that contains validation data. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FineTuning::FineTuningJob] + # @return [OpenAI::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobCreateParams def create(params) - parsed, options = OpenAI::Models::FineTuning::JobCreateParams.dump_request(params) + parsed, options = OpenAI::FineTuning::JobCreateParams.dump_request(params) @client.request( method: :post, path: "fine_tuning/jobs", body: parsed, - model: OpenAI::Models::FineTuning::FineTuningJob, + model: OpenAI::FineTuning::FineTuningJob, options: options ) end @@ -65,18 +63,18 @@ def create(params) # # @overload retrieve(fine_tuning_job_id, request_options: {}) # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job. ... + # @param fine_tuning_job_id [String] The ID of the fine-tuning job. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FineTuning::FineTuningJob] + # @return [OpenAI::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobRetrieveParams def retrieve(fine_tuning_job_id, params = {}) @client.request( method: :get, path: ["fine_tuning/jobs/%1$s", fine_tuning_job_id], - model: OpenAI::Models::FineTuning::FineTuningJob, + model: OpenAI::FineTuning::FineTuningJob, options: params[:request_options] ) end @@ -93,21 +91,20 @@ def retrieve(fine_tuning_job_id, params = {}) # @param limit [Integer] Number of fine-tuning jobs to retrieve. # # @param metadata [Hash{Symbol=>String}, nil] Optional metadata filter. To filter, use the syntax `metadata[k]=v`. Alternative - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListParams def list(params = {}) - parsed, options = OpenAI::Models::FineTuning::JobListParams.dump_request(params) + parsed, options = OpenAI::FineTuning::JobListParams.dump_request(params) @client.request( method: :get, path: "fine_tuning/jobs", query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::FineTuning::FineTuningJob, + model: OpenAI::FineTuning::FineTuningJob, options: options ) end @@ -119,18 +116,18 @@ def list(params = {}) # # @overload cancel(fine_tuning_job_id, request_options: {}) # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job to cancel. ... + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to cancel. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FineTuning::FineTuningJob] + # @return [OpenAI::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobCancelParams def cancel(fine_tuning_job_id, params = {}) @client.request( method: :post, path: ["fine_tuning/jobs/%1$s/cancel", fine_tuning_job_id], - model: OpenAI::Models::FineTuning::FineTuningJob, + model: OpenAI::FineTuning::FineTuningJob, options: params[:request_options] ) end @@ -142,7 +139,7 @@ def cancel(fine_tuning_job_id, params = {}) # # @overload list_events(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get events for. ... + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get events for. # # @param after [String] Identifier for the last event from the previous pagination request. # @@ -150,17 +147,17 @@ def cancel(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListEventsParams def list_events(fine_tuning_job_id, params = {}) - parsed, options = OpenAI::Models::FineTuning::JobListEventsParams.dump_request(params) + parsed, options = OpenAI::FineTuning::JobListEventsParams.dump_request(params) @client.request( method: :get, path: ["fine_tuning/jobs/%1$s/events", fine_tuning_job_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::FineTuning::FineTuningJobEvent, + model: OpenAI::FineTuning::FineTuningJobEvent, options: options ) end diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index 2b5e1d8e..b588de3d 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -12,7 +12,7 @@ class Checkpoints # # @overload list(fine_tuning_job_id, after: nil, limit: nil, request_options: {}) # - # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get checkpoints for. ... + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to get checkpoints for. # # @param after [String] Identifier for the last checkpoint ID from the previous pagination request. # @@ -20,17 +20,17 @@ class Checkpoints # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::Jobs::CheckpointListParams def list(fine_tuning_job_id, params = {}) - parsed, options = OpenAI::Models::FineTuning::Jobs::CheckpointListParams.dump_request(params) + parsed, options = OpenAI::FineTuning::Jobs::CheckpointListParams.dump_request(params) @client.request( method: :get, path: ["fine_tuning/jobs/%1$s/checkpoints", fine_tuning_job_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint, + model: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint, options: options ) end diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 13484438..b04d709d 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -11,35 +11,30 @@ class Images # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le - # ... # - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time - # ... + # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` - # ... + # @param response_format [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x - # ... + # @param size [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::ImagesResponse] + # @return [OpenAI::ImagesResponse] # # @see OpenAI::Models::ImageCreateVariationParams def create_variation(params) - parsed, options = OpenAI::Models::ImageCreateVariationParams.dump_request(params) + parsed, options = OpenAI::ImageCreateVariationParams.dump_request(params) @client.request( method: :post, path: "images/variations", headers: {"content-type" => "multipart/form-data"}, body: parsed, - model: OpenAI::Models::ImagesResponse, + model: OpenAI::ImagesResponse, options: options ) end @@ -52,46 +47,39 @@ def create_variation(params) # # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. ... + # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character - # ... # - # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind - # ... # - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup - # ... + # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are - # ... + # @param quality [Symbol, OpenAI::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # - # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` - # ... + # @param response_format [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands - # ... + # @param size [Symbol, OpenAI::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::ImagesResponse] + # @return [OpenAI::ImagesResponse] # # @see OpenAI::Models::ImageEditParams def edit(params) - parsed, options = OpenAI::Models::ImageEditParams.dump_request(params) + parsed, options = OpenAI::ImageEditParams.dump_request(params) @client.request( method: :post, path: "images/edits", headers: {"content-type" => "multipart/form-data"}, body: parsed, - model: OpenAI::Models::ImagesResponse, + model: OpenAI::ImagesResponse, options: options ) end @@ -105,51 +93,41 @@ def edit(params) # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte - # ... # - # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). ... + # @param background [Symbol, OpenAI::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im - # ... + # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im # - # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must - # ... + # @param moderation [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only - # ... # # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only - # ... # - # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su - # ... + # @param output_format [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # - # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. ... + # @param quality [Symbol, OpenAI::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # - # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned - # ... + # @param response_format [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned # - # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands - # ... + # @param size [Symbol, OpenAI::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # - # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- - # ... + # @param style [Symbol, OpenAI::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::ImagesResponse] + # @return [OpenAI::ImagesResponse] # # @see OpenAI::Models::ImageGenerateParams def generate(params) - parsed, options = OpenAI::Models::ImageGenerateParams.dump_request(params) + parsed, options = OpenAI::ImageGenerateParams.dump_request(params) @client.request( method: :post, path: "images/generations", body: parsed, - model: OpenAI::Models::ImagesResponse, + model: OpenAI::ImagesResponse, options: options ) end diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index afe9d3e1..1dd26c31 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -12,14 +12,14 @@ class Models # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Model] + # @return [OpenAI::Model] # # @see OpenAI::Models::ModelRetrieveParams def retrieve(model, params = {}) @client.request( method: :get, path: ["models/%1$s", model], - model: OpenAI::Models::Model, + model: OpenAI::Model, options: params[:request_options] ) end @@ -31,7 +31,7 @@ def retrieve(model, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Page] + # @return [OpenAI::Internal::Page] # # @see OpenAI::Models::ModelListParams def list(params = {}) @@ -39,7 +39,7 @@ def list(params = {}) method: :get, path: "models", page: OpenAI::Internal::Page, - model: OpenAI::Models::Model, + model: OpenAI::Model, options: params[:request_options] ) end @@ -53,14 +53,14 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::ModelDeleted] + # @return [OpenAI::ModelDeleted] # # @see OpenAI::Models::ModelDeleteParams def delete(model, params = {}) @client.request( method: :delete, path: ["models/%1$s", model], - model: OpenAI::Models::ModelDeleted, + model: OpenAI::ModelDeleted, options: params[:request_options] ) end diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index 443d6b18..568e03d3 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -11,10 +11,9 @@ class Moderations # # @overload create(input:, model: nil, request_options: {}) # - # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or - # ... + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or # - # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in ... + # @param model [String, Symbol, OpenAI::ModerationModel] The content moderation model you would like to use. Learn more in # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -22,7 +21,7 @@ class Moderations # # @see OpenAI::Models::ModerationCreateParams def create(params) - parsed, options = OpenAI::Models::ModerationCreateParams.dump_request(params) + parsed, options = OpenAI::ModerationCreateParams.dump_request(params) @client.request( method: :post, path: "moderations", diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 223be4f7..f3c18eea 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -25,54 +25,49 @@ class Responses # # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. ... + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently ... + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context - # ... # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. ... + # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. # - # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via ... + # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You ... + # @param tools [Array] An array of tools the model may call while generating a response. You # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. ... + # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Responses::Response] + # @return [OpenAI::Responses::Response] # # @see OpenAI::Models::Responses::ResponseCreateParams def create(params) - parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) + parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] message = "Please use `#stream_raw` for the streaming use case." raise ArgumentError.new(message) @@ -81,7 +76,7 @@ def create(params) method: :post, path: "responses", body: parsed, - model: OpenAI::Models::Responses::Response, + model: OpenAI::Responses::Response, options: options ) end @@ -105,54 +100,49 @@ def create(params) # # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. ... + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI ... + # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently ... + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context - # ... # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. ... + # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. # - # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to ... + # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** ... + # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is - # ... + # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via ... + # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m - # ... # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain ... + # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating ... + # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You ... + # @param tools [Array] An array of tools the model may call while generating a response. You # - # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, ... + # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. ... + # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) - parsed, options = OpenAI::Models::Responses::ResponseCreateParams.dump_request(params) + parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." raise ArgumentError.new(message) @@ -164,7 +154,7 @@ def stream_raw(params) headers: {"accept" => "text/event-stream"}, body: parsed, stream: OpenAI::Internal::Stream, - model: OpenAI::Models::Responses::ResponseStreamEvent, + model: OpenAI::Responses::ResponseStreamEvent, options: options ) end @@ -178,20 +168,20 @@ def stream_raw(params) # # @param response_id [String] The ID of the response to retrieve. # - # @param include [Array] Additional fields to include in the response. See the `include` ... + # @param include [Array] Additional fields to include in the response. See the `include` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Responses::Response] + # @return [OpenAI::Responses::Response] # # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve(response_id, params = {}) - parsed, options = OpenAI::Models::Responses::ResponseRetrieveParams.dump_request(params) + parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params) @client.request( method: :get, path: ["responses/%1$s", response_id], query: parsed, - model: OpenAI::Models::Responses::Response, + model: OpenAI::Responses::Response, options: options ) end diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index a47baded..3cec0416 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -13,29 +13,29 @@ class InputItems # # @param response_id [String] The ID of the response to retrieve input items for. # - # @param after [String] An item ID to list items after, used in pagination. ... + # @param after [String] An item ID to list items after, used in pagination. # - # @param before [String] An item ID to list items before, used in pagination. ... + # @param before [String] An item ID to list items before, used in pagination. # - # @param include [Array] Additional fields to include in the response. See the `include` ... + # @param include [Array] Additional fields to include in the response. See the `include` # - # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between ... + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. ... + # @param order [Symbol, OpenAI::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Responses::InputItemListParams def list(response_id, params = {}) - parsed, options = OpenAI::Models::Responses::InputItemListParams.dump_request(params) + parsed, options = OpenAI::Responses::InputItemListParams.dump_request(params) @client.request( method: :get, path: ["responses/%1$s/input_items", response_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::Responses::ResponseItem, + model: OpenAI::Responses::ResponseItem, options: options ) end diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 719532b1..363b2e57 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -31,28 +31,22 @@ class Uploads # # @overload create(bytes:, filename:, mime_type:, purpose:, request_options: {}) # - # @param bytes [Integer] The number of bytes in the file you are uploading. ... + # @param bytes [Integer] The number of bytes in the file you are uploading. # - # @param filename [String] The name of the file to upload. ... + # @param filename [String] The name of the file to upload. # - # @param mime_type [String] The MIME type of the file. ... + # @param mime_type [String] The MIME type of the file. # - # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. ... + # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Upload] + # @return [OpenAI::Upload] # # @see OpenAI::Models::UploadCreateParams def create(params) - parsed, options = OpenAI::Models::UploadCreateParams.dump_request(params) - @client.request( - method: :post, - path: "uploads", - body: parsed, - model: OpenAI::Models::Upload, - options: options - ) + parsed, options = OpenAI::UploadCreateParams.dump_request(params) + @client.request(method: :post, path: "uploads", body: parsed, model: OpenAI::Upload, options: options) end # Some parameter documentations has been truncated, see @@ -62,18 +56,18 @@ def create(params) # # @overload cancel(upload_id, request_options: {}) # - # @param upload_id [String] The ID of the Upload. ... + # @param upload_id [String] The ID of the Upload. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Upload] + # @return [OpenAI::Upload] # # @see OpenAI::Models::UploadCancelParams def cancel(upload_id, params = {}) @client.request( method: :post, path: ["uploads/%1$s/cancel", upload_id], - model: OpenAI::Models::Upload, + model: OpenAI::Upload, options: params[:request_options] ) end @@ -97,25 +91,24 @@ def cancel(upload_id, params = {}) # # @overload complete(upload_id, part_ids:, md5: nil, request_options: {}) # - # @param upload_id [String] The ID of the Upload. ... + # @param upload_id [String] The ID of the Upload. # - # @param part_ids [Array] The ordered list of Part IDs. ... + # @param part_ids [Array] The ordered list of Part IDs. # # @param md5 [String] The optional md5 checksum for the file contents to verify if the bytes uploaded - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Upload] + # @return [OpenAI::Upload] # # @see OpenAI::Models::UploadCompleteParams def complete(upload_id, params) - parsed, options = OpenAI::Models::UploadCompleteParams.dump_request(params) + parsed, options = OpenAI::UploadCompleteParams.dump_request(params) @client.request( method: :post, path: ["uploads/%1$s/complete", upload_id], body: parsed, - model: OpenAI::Models::Upload, + model: OpenAI::Upload, options: options ) end diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index 3d7e4770..7ad2e042 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -21,23 +21,23 @@ class Parts # # @overload create(upload_id, data:, request_options: {}) # - # @param upload_id [String] The ID of the Upload. ... + # @param upload_id [String] The ID of the Upload. # - # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. ... + # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::Uploads::UploadPart] + # @return [OpenAI::Uploads::UploadPart] # # @see OpenAI::Models::Uploads::PartCreateParams def create(upload_id, params) - parsed, options = OpenAI::Models::Uploads::PartCreateParams.dump_request(params) + parsed, options = OpenAI::Uploads::PartCreateParams.dump_request(params) @client.request( method: :post, path: ["uploads/%1$s/parts", upload_id], headers: {"content-type" => "multipart/form-data"}, body: parsed, - model: OpenAI::Models::Uploads::UploadPart, + model: OpenAI::Uploads::UploadPart, options: options ) end diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index c0eace72..c1d3c184 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -16,30 +16,28 @@ class VectorStores # # @overload create(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # ... # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStore] + # @return [OpenAI::VectorStore] # # @see OpenAI::Models::VectorStoreCreateParams def create(params = {}) - parsed, options = OpenAI::Models::VectorStoreCreateParams.dump_request(params) + parsed, options = OpenAI::VectorStoreCreateParams.dump_request(params) @client.request( method: :post, path: "vector_stores", body: parsed, - model: OpenAI::Models::VectorStore, + model: OpenAI::VectorStore, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -52,14 +50,14 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStore] + # @return [OpenAI::VectorStore] # # @see OpenAI::Models::VectorStoreRetrieveParams def retrieve(vector_store_id, params = {}) @client.request( method: :get, path: ["vector_stores/%1$s", vector_store_id], - model: OpenAI::Models::VectorStore, + model: OpenAI::VectorStore, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -73,24 +71,24 @@ def retrieve(vector_store_id, params = {}) # # @param vector_store_id [String] The ID of the vector store to modify. # - # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String, nil] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStore] + # @return [OpenAI::VectorStore] # # @see OpenAI::Models::VectorStoreUpdateParams def update(vector_store_id, params = {}) - parsed, options = OpenAI::Models::VectorStoreUpdateParams.dump_request(params) + parsed, options = OpenAI::VectorStoreUpdateParams.dump_request(params) @client.request( method: :post, path: ["vector_stores/%1$s", vector_store_id], body: parsed, - model: OpenAI::Models::VectorStore, + model: OpenAI::VectorStore, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -103,30 +101,26 @@ def update(vector_store_id, params = {}) # @overload list(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStoreListParams def list(params = {}) - parsed, options = OpenAI::Models::VectorStoreListParams.dump_request(params) + parsed, options = OpenAI::VectorStoreListParams.dump_request(params) @client.request( method: :get, path: "vector_stores", query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::VectorStore, + model: OpenAI::VectorStore, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -139,14 +133,14 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStoreDeleted] + # @return [OpenAI::VectorStoreDeleted] # # @see OpenAI::Models::VectorStoreDeleteParams def delete(vector_store_id, params = {}) @client.request( method: :delete, path: ["vector_stores/%1$s", vector_store_id], - model: OpenAI::Models::VectorStoreDeleted, + model: OpenAI::VectorStoreDeleted, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **params[:request_options].to_h} ) end @@ -163,12 +157,11 @@ def delete(vector_store_id, params = {}) # # @param query [String, Array] A query string for a search # - # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter] A filter to apply based on file attributes. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 - # ... # - # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::VectorStoreSearchParams::RankingOptions] Ranking options for search. # # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. # @@ -178,7 +171,7 @@ def delete(vector_store_id, params = {}) # # @see OpenAI::Models::VectorStoreSearchParams def search(vector_store_id, params) - parsed, options = OpenAI::Models::VectorStoreSearchParams.dump_request(params) + parsed, options = OpenAI::VectorStoreSearchParams.dump_request(params) @client.request( method: :post, path: ["vector_stores/%1$s/search", vector_store_id], diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index a56053dd..b8b4133c 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -11,28 +11,26 @@ class FileBatches # # @overload create(vector_store_id, file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) # - # @param vector_store_id [String] The ID of the vector store for which to create a File Batch. ... + # @param vector_store_id [String] The ID of the vector store for which to create a File Batch. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that - # ... # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchCreateParams def create(vector_store_id, params) - parsed, options = OpenAI::Models::VectorStores::FileBatchCreateParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileBatchCreateParams.dump_request(params) @client.request( method: :post, path: ["vector_stores/%1$s/file_batches", vector_store_id], body: parsed, - model: OpenAI::Models::VectorStores::VectorStoreFileBatch, + model: OpenAI::VectorStores::VectorStoreFileBatch, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -47,11 +45,11 @@ def create(vector_store_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchRetrieveParams def retrieve(batch_id, params) - parsed, options = OpenAI::Models::VectorStores::FileBatchRetrieveParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileBatchRetrieveParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -59,7 +57,7 @@ def retrieve(batch_id, params) @client.request( method: :get, path: ["vector_stores/%1$s/file_batches/%2$s", vector_store_id, batch_id], - model: OpenAI::Models::VectorStores::VectorStoreFileBatch, + model: OpenAI::VectorStores::VectorStoreFileBatch, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -75,11 +73,11 @@ def retrieve(batch_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchCancelParams def cancel(batch_id, params) - parsed, options = OpenAI::Models::VectorStores::FileBatchCancelParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileBatchCancelParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -87,7 +85,7 @@ def cancel(batch_id, params) @client.request( method: :post, path: ["vector_stores/%1$s/file_batches/%2$s/cancel", vector_store_id, batch_id], - model: OpenAI::Models::VectorStores::VectorStoreFileBatch, + model: OpenAI::VectorStores::VectorStoreFileBatch, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -104,27 +102,22 @@ def cancel(batch_id, params) # @param vector_store_id [String] Path param: The ID of the vector store that the files belong to. # # @param after [String] Query param: A cursor for use in pagination. `after` is an object ID that define - # ... # # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin - # ... # - # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, - # ... + # @param filter [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, # # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be - # ... # - # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for - # ... + # @param order [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileBatchListFilesParams def list_files(batch_id, params) - parsed, options = OpenAI::Models::VectorStores::FileBatchListFilesParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileBatchListFilesParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -134,7 +127,7 @@ def list_files(batch_id, params) path: ["vector_stores/%1$s/file_batches/%2$s/files", vector_store_id, batch_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::VectorStores::VectorStoreFile, + model: OpenAI::VectorStores::VectorStoreFile, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index fa0b48c5..79c76c82 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -13,28 +13,26 @@ class Files # # @overload create(vector_store_id, file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # - # @param vector_store_id [String] The ID of the vector store for which to create a File. ... + # @param vector_store_id [String] The ID of the vector store for which to create a File. # # @param file_id [String] A [File](https://platform.openai.com/docs/api-reference/files) ID that the vecto - # ... # - # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be ... + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` - # ... + # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFile] + # @return [OpenAI::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileCreateParams def create(vector_store_id, params) - parsed, options = OpenAI::Models::VectorStores::FileCreateParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileCreateParams.dump_request(params) @client.request( method: :post, path: ["vector_stores/%1$s/files", vector_store_id], body: parsed, - model: OpenAI::Models::VectorStores::VectorStoreFile, + model: OpenAI::VectorStores::VectorStoreFile, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -49,11 +47,11 @@ def create(vector_store_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFile] + # @return [OpenAI::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileRetrieveParams def retrieve(file_id, params) - parsed, options = OpenAI::Models::VectorStores::FileRetrieveParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileRetrieveParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -61,7 +59,7 @@ def retrieve(file_id, params) @client.request( method: :get, path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], - model: OpenAI::Models::VectorStores::VectorStoreFile, + model: OpenAI::VectorStores::VectorStoreFile, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -78,15 +76,14 @@ def retrieve(file_id, params) # @param vector_store_id [String] Path param: The ID of the vector store the file belongs to. # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca - # ... # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFile] + # @return [OpenAI::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileUpdateParams def update(file_id, params) - parsed, options = OpenAI::Models::VectorStores::FileUpdateParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileUpdateParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -95,7 +92,7 @@ def update(file_id, params) method: :post, path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], body: parsed, - model: OpenAI::Models::VectorStores::VectorStoreFile, + model: OpenAI::VectorStores::VectorStoreFile, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -110,33 +107,28 @@ def update(file_id, params) # @param vector_store_id [String] The ID of the vector store that the files belong to. # # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place - # ... # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place - # ... # - # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - # ... + # @param filter [Symbol, OpenAI::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 - # ... # - # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord - # ... + # @param order [Symbol, OpenAI::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileListParams def list(vector_store_id, params = {}) - parsed, options = OpenAI::Models::VectorStores::FileListParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileListParams.dump_request(params) @client.request( method: :get, path: ["vector_stores/%1$s/files", vector_store_id], query: parsed, page: OpenAI::Internal::CursorPage, - model: OpenAI::Models::VectorStores::VectorStoreFile, + model: OpenAI::VectorStores::VectorStoreFile, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -154,11 +146,11 @@ def list(vector_store_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] + # @return [OpenAI::VectorStores::VectorStoreFileDeleted] # # @see OpenAI::Models::VectorStores::FileDeleteParams def delete(file_id, params) - parsed, options = OpenAI::Models::VectorStores::FileDeleteParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileDeleteParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") @@ -166,7 +158,7 @@ def delete(file_id, params) @client.request( method: :delete, path: ["vector_stores/%1$s/files/%2$s", vector_store_id, file_id], - model: OpenAI::Models::VectorStores::VectorStoreFileDeleted, + model: OpenAI::VectorStores::VectorStoreFileDeleted, options: {extra_headers: {"OpenAI-Beta" => "assistants=v2"}, **options} ) end @@ -185,7 +177,7 @@ def delete(file_id, params) # # @see OpenAI::Models::VectorStores::FileContentParams def content(file_id, params) - parsed, options = OpenAI::Models::VectorStores::FileContentParams.dump_request(params) + parsed, options = OpenAI::VectorStores::FileContentParams.dump_request(params) vector_store_id = parsed.delete(:vector_store_id) do raise ArgumentError.new("missing required path argument #{_1}") diff --git a/rbi/openai/client.rbi b/rbi/openai/client.rbi index b70ce71a..7ac76a8c 100644 --- a/rbi/openai/client.rbi +++ b/rbi/openai/client.rbi @@ -66,7 +66,8 @@ module OpenAI # @api private sig { override.returns(T::Hash[String, String]) } - private def auth_headers; end + private def auth_headers + end # Creates and returns a new client for interacting with the API. sig do @@ -79,8 +80,7 @@ module OpenAI timeout: Float, initial_retry_delay: Float, max_retry_delay: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Defaults to `ENV["OPENAI_API_KEY"]` @@ -97,6 +97,7 @@ module OpenAI timeout: OpenAI::Client::DEFAULT_TIMEOUT_IN_SECONDS, initial_retry_delay: OpenAI::Client::DEFAULT_INITIAL_RETRY_DELAY, max_retry_delay: OpenAI::Client::DEFAULT_MAX_RETRY_DELAY - ); end + ) + end end end diff --git a/rbi/openai/errors.rbi b/rbi/openai/errors.rbi index 1eb51da4..4ade8ef1 100644 --- a/rbi/openai/errors.rbi +++ b/rbi/openai/errors.rbi @@ -38,10 +38,17 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new( + url:, + status: nil, + body: nil, + request: nil, + response: nil, + message: nil + ) end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: nil); end end class APIConnectionError < OpenAI::Errors::APIError @@ -69,10 +76,16 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: "Connection error.") + def self.new( + url:, + status: nil, + body: nil, + request: nil, + response: nil, + message: "Connection error." + ) end end @@ -86,10 +99,16 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end - def self.new(url:, status: nil, body: nil, request: nil, response: nil, message: "Request timed out.") + def self.new( + url:, + status: nil, + body: nil, + request: nil, + response: nil, + message: "Request timed out." + ) end end @@ -103,10 +122,10 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.for(url:, status:, body:, request:, response:, message: nil) end - def self.for(url:, status:, body:, request:, response:, message: nil); end sig { returns(Integer) } attr_accessor :status @@ -129,10 +148,10 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new(url:, status:, body:, request:, response:, message: nil) end - def self.new(url:, status:, body:, request:, response:, message: nil); end end class BadRequestError < OpenAI::Errors::APIStatusError @@ -164,7 +183,7 @@ module OpenAI end class InternalServerError < OpenAI::Errors::APIStatusError - HTTP_STATUS = T.let(500.., T::Range[Integer]) + HTTP_STATUS = T.let((500..), T::Range[Integer]) end end end diff --git a/rbi/openai/file_part.rbi b/rbi/openai/file_part.rbi index 20c06b85..5df7359c 100644 --- a/rbi/openai/file_part.rbi +++ b/rbi/openai/file_part.rbi @@ -13,22 +13,25 @@ module OpenAI # @api private sig { returns(String) } - private def read; end + private def read + end sig { params(a: T.anything).returns(String) } - def to_json(*a); end + def to_json(*a) + end sig { params(a: T.anything).returns(String) } - def to_yaml(*a); end + def to_yaml(*a) + end sig do params( content: T.any(Pathname, StringIO, IO, String), filename: T.nilable(String), content_type: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new(content, filename: nil, content_type: nil) end - def self.new(content, filename: nil, content_type: nil); end end end diff --git a/rbi/openai/internal/cursor_page.rbi b/rbi/openai/internal/cursor_page.rbi index 7bc68531..4eb4d3b1 100644 --- a/rbi/openai/internal/cursor_page.rbi +++ b/rbi/openai/internal/cursor_page.rbi @@ -15,7 +15,8 @@ module OpenAI # @api private sig { returns(String) } - def inspect; end + def inspect + end end end end diff --git a/rbi/openai/internal/page.rbi b/rbi/openai/internal/page.rbi index eda6d887..6f9f6551 100644 --- a/rbi/openai/internal/page.rbi +++ b/rbi/openai/internal/page.rbi @@ -15,7 +15,8 @@ module OpenAI # @api private sig { returns(String) } - def inspect; end + def inspect + end end end end diff --git a/rbi/openai/internal/stream.rbi b/rbi/openai/internal/stream.rbi index 8c1f5f34..ffcb1934 100644 --- a/rbi/openai/internal/stream.rbi +++ b/rbi/openai/internal/stream.rbi @@ -3,14 +3,16 @@ module OpenAI module Internal class Stream - Message = type_member(:in) { {fixed: OpenAI::Internal::Util::ServerSentEvent} } + Message = + type_member(:in) { { fixed: OpenAI::Internal::Util::ServerSentEvent } } Elem = type_member(:out) include OpenAI::Internal::Type::BaseStream # @api private sig { override.returns(T::Enumerable[Elem]) } - private def iterator; end + private def iterator + end end end end diff --git a/rbi/openai/internal/transport/base_client.rbi b/rbi/openai/internal/transport/base_client.rbi index 612daee4..29580549 100644 --- a/rbi/openai/internal/transport/base_client.rbi +++ b/rbi/openai/internal/transport/base_client.rbi @@ -12,33 +12,52 @@ module OpenAI { method: Symbol, path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), + query: + T.nilable( + T::Hash[String, T.nilable(T.any(T::Array[String], String))] + ), + headers: + T.nilable( + T::Hash[ + String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + ) + ] + ), body: T.nilable(T.anything), - unwrap: T.nilable( - T.any( - Symbol, - Integer, - T::Array[T.any(Symbol, Integer)], - T.proc.params(arg0: T.anything).returns(T.anything) - ) - ), - page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), - stream: T.nilable( - T::Class[OpenAI::Internal::Type::BaseStream[T.anything, - OpenAI::Internal::Type::BaseModel]] - ), + unwrap: + T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), + page: + T.nilable( + T::Class[ + OpenAI::Internal::Type::BasePage[ + OpenAI::Internal::Type::BaseModel + ] + ] + ), + stream: + T.nilable( + T::Class[ + OpenAI::Internal::Type::BaseStream[ + T.anything, + OpenAI::Internal::Type::BaseModel + ] + ] + ), model: T.nilable(OpenAI::Internal::Type::Converter::Input), - options: T.nilable(OpenAI::RequestOpts) + options: T.nilable(OpenAI::RequestOptions::OrHash) } end @@ -61,41 +80,48 @@ module OpenAI class << self # @api private - sig { params(req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape).void } - def validate!(req); end + sig do + params( + req: + OpenAI::Internal::Transport::BaseClient::RequestComponentsShape + ).void + end + def validate!(req) + end # @api private sig do params( status: Integer, - headers: T.any( - T::Hash[String, String], - Net::HTTPHeader - ) + headers: T.any(T::Hash[String, String], Net::HTTPHeader) ).returns(T::Boolean) end - def should_retry?(status, headers:); end + def should_retry?(status, headers:) + end # @api private sig do params( - request: OpenAI::Internal::Transport::BaseClient::RequestInputShape, + request: + OpenAI::Internal::Transport::BaseClient::RequestInputShape, status: Integer, response_headers: T.any(T::Hash[String, String], Net::HTTPHeader) + ).returns( + OpenAI::Internal::Transport::BaseClient::RequestInputShape ) - .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) end - def follow_redirect(request, status:, response_headers:); end + def follow_redirect(request, status:, response_headers:) + end # @api private sig do params( status: T.any(Integer, OpenAI::Errors::APIConnectionError), stream: T.nilable(T::Enumerable[String]) - ) - .void + ).void + end + def reap_connection!(status, stream:) end - def reap_connection!(status, stream:); end end # @api private @@ -110,11 +136,19 @@ module OpenAI max_retries: Integer, initial_retry_delay: Float, max_retry_delay: Float, - headers: T::Hash[String, - T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))], + headers: + T::Hash[ + String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + ) + ], idempotency_header: T.nilable(String) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( base_url:, @@ -124,29 +158,41 @@ module OpenAI max_retry_delay: 0.0, headers: {}, idempotency_header: nil - ); end + ) + end + # @api private sig { overridable.returns(T::Hash[String, String]) } - private def auth_headers; end + private def auth_headers + end # @api private sig { returns(String) } - private def generate_idempotency_key; end + private def generate_idempotency_key + end # @api private sig do overridable .params( - req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, + req: + OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, opts: OpenAI::Internal::AnyHash ) .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) end - private def build_request(req, opts); end + private def build_request(req, opts) + end # @api private - sig { params(headers: T::Hash[String, String], retry_count: Integer).returns(Float) } - private def retry_delay(headers, retry_count:); end + sig do + params( + headers: T::Hash[String, String], + retry_count: Integer + ).returns(Float) + end + private def retry_delay(headers, retry_count:) + end # @api private sig do @@ -155,10 +201,15 @@ module OpenAI redirect_count: Integer, retry_count: Integer, send_retry_header: T::Boolean - ) - .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + ).returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + end + private def send_request( + request, + redirect_count:, + retry_count:, + send_retry_header: + ) end - private def send_request(request, redirect_count:, retry_count:, send_retry_header:); end # Execute the request specified by `req`. This is the method that all resource # methods call into. @@ -168,35 +219,53 @@ module OpenAI params( method: Symbol, path: T.any(String, T::Array[String]), - query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), - headers: T.nilable( - T::Hash[String, - T.nilable( - T.any( - String, - Integer, - T::Array[T.nilable(T.any(String, Integer))] - ) - )] - ), + query: + T.nilable( + T::Hash[String, T.nilable(T.any(T::Array[String], String))] + ), + headers: + T.nilable( + T::Hash[ + String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + ) + ] + ), body: T.nilable(T.anything), - unwrap: T.nilable( - T.any( - Symbol, - Integer, - T::Array[T.any(Symbol, Integer)], - T.proc.params(arg0: T.anything).returns(T.anything) - ) - ), - page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), - stream: T.nilable( - T::Class[OpenAI::Internal::Type::BaseStream[T.anything, - OpenAI::Internal::Type::BaseModel]] - ), + unwrap: + T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), + page: + T.nilable( + T::Class[ + OpenAI::Internal::Type::BasePage[ + OpenAI::Internal::Type::BaseModel + ] + ] + ), + stream: + T.nilable( + T::Class[ + OpenAI::Internal::Type::BaseStream[ + T.anything, + OpenAI::Internal::Type::BaseModel + ] + ] + ), model: T.nilable(OpenAI::Internal::Type::Converter::Input), - options: T.nilable(OpenAI::RequestOpts) - ) - .returns(T.anything) + options: T.nilable(OpenAI::RequestOptions::OrHash) + ).returns(T.anything) end def request( method, @@ -209,10 +278,13 @@ module OpenAI stream: nil, model: OpenAI::Internal::Type::Unknown, options: {} - ); end + ) + end + # @api private sig { returns(String) } - def inspect; end + def inspect + end end end end diff --git a/rbi/openai/internal/transport/pooled_net_requester.rbi b/rbi/openai/internal/transport/pooled_net_requester.rbi index c0a3d443..373c1c3b 100644 --- a/rbi/openai/internal/transport/pooled_net_requester.rbi +++ b/rbi/openai/internal/transport/pooled_net_requester.rbi @@ -23,37 +23,51 @@ module OpenAI class << self # @api private sig { params(url: URI::Generic).returns(Net::HTTP) } - def connect(url); end + def connect(url) + end # @api private sig { params(conn: Net::HTTP, deadline: Float).void } - def calibrate_socket_timeout(conn, deadline); end + def calibrate_socket_timeout(conn, deadline) + end # @api private sig do params( - request: OpenAI::Internal::Transport::PooledNetRequester::RequestShape, + request: + OpenAI::Internal::Transport::PooledNetRequester::RequestShape, blk: T.proc.params(arg0: String).void - ) - .returns([Net::HTTPGenericRequest, T.proc.void]) + ).returns([Net::HTTPGenericRequest, T.proc.void]) + end + def build_request(request, &blk) end - def build_request(request, &blk); end end # @api private - sig { params(url: URI::Generic, deadline: Float, blk: T.proc.params(arg0: Net::HTTP).void).void } - private def with_pool(url, deadline:, &blk); end + sig do + params( + url: URI::Generic, + deadline: Float, + blk: T.proc.params(arg0: Net::HTTP).void + ).void + end + private def with_pool(url, deadline:, &blk) + end # @api private sig do - params(request: OpenAI::Internal::Transport::PooledNetRequester::RequestShape) - .returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + params( + request: + OpenAI::Internal::Transport::PooledNetRequester::RequestShape + ).returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) + end + def execute(request) end - def execute(request); end # @api private sig { params(size: Integer).returns(T.attached_class) } - def self.new(size: Etc.nprocessors); end + def self.new(size: Etc.nprocessors) + end end end end diff --git a/rbi/openai/internal/type/array_of.rbi b/rbi/openai/internal/type/array_of.rbi index e7a84e1a..9cc138b7 100644 --- a/rbi/openai/internal/type/array_of.rbi +++ b/rbi/openai/internal/type/array_of.rbi @@ -15,25 +15,29 @@ module OpenAI sig do params( - type_info: T.any( - OpenAI::Internal::AnyHash, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.[](type_info, spec = {}) end - def self.[](type_info, spec = {}); end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other); end + def ===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other); end + def ==(other) + end sig { returns(Integer) } - def hash; end + def hash + end # @api private sig do @@ -44,7 +48,8 @@ module OpenAI ) .returns(T.any(T::Array[T.anything], T.anything)) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do @@ -55,33 +60,38 @@ module OpenAI ) .returns(T.any(T::Array[T.anything], T.anything)) end - def dump(value, state:); end + def dump(value, state:) + end # @api private sig { returns(Elem) } - protected def item_type; end + protected def item_type + end # @api private sig { returns(T::Boolean) } - protected def nilable?; end + protected def nilable? + end # @api private sig do params( - type_info: T.any( - OpenAI::Internal::AnyHash, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .void + ).void + end + def initialize(type_info, spec = {}) end - def initialize(type_info, spec = {}); end # @api private sig { params(depth: Integer).returns(String) } - def inspect(depth: 0); end + def inspect(depth: 0) + end end end end diff --git a/rbi/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi index d97f840f..f0bcae46 100644 --- a/rbi/openai/internal/type/base_model.rbi +++ b/rbi/openai/internal/type/base_model.rbi @@ -8,9 +8,16 @@ module OpenAI abstract! - KnownFieldShape = T.type_alias do - {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} - end + KnownFieldShape = + T.type_alias do + { + mode: T.nilable(Symbol), + required: T::Boolean, + nilable: T::Boolean + } + end + + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } class << self # @api private @@ -23,12 +30,16 @@ module OpenAI Symbol, T.all( OpenAI::Internal::Type::BaseModel::KnownFieldShape, - {type_fn: T.proc.returns(OpenAI::Internal::Type::Converter::Input)} + { + type_fn: + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + } ) ] ) end - def known_fields; end + def known_fields + end # @api private sig do @@ -37,114 +48,139 @@ module OpenAI Symbol, T.all( OpenAI::Internal::Type::BaseModel::KnownFieldShape, - {type: OpenAI::Internal::Type::Converter::Input} + { type: OpenAI::Internal::Type::Converter::Input } ) ] ) end - def fields; end + def fields + end # @api private sig do params( name_sym: Symbol, required: T::Boolean, - type_info: T.any( - { - const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), - enum: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)), - union: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)), - api_name: Symbol, - nil?: T::Boolean - }, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + { + const: + T.nilable( + T.any(NilClass, T::Boolean, Integer, Float, Symbol) + ), + enum: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ), + union: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ), + api_name: Symbol, + nil?: T::Boolean + }, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .void + ).void + end + private def add_field(name_sym, required:, type_info:, spec:) end - private def add_field(name_sym, required:, type_info:, spec:); end # @api private sig do params( name_sym: Symbol, - type_info: T.any( - OpenAI::Internal::AnyHash, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .void + ).void + end + def required(name_sym, type_info, spec = {}) end - def required(name_sym, type_info, spec = {}); end # @api private sig do params( name_sym: Symbol, - type_info: T.any( - OpenAI::Internal::AnyHash, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .void + ).void + end + def optional(name_sym, type_info, spec = {}) end - def optional(name_sym, type_info, spec = {}); end # @api private # # `request_only` attributes not excluded from `.#coerce` when receiving responses # even if well behaved servers should not send them sig { params(blk: T.proc.void).void } - private def request_only(&blk); end + private def request_only(&blk) + end # @api private # # `response_only` attributes are omitted from `.#dump` when making requests sig { params(blk: T.proc.void).void } - private def response_only(&blk); end + private def response_only(&blk) + end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other); end + def ==(other) + end sig { returns(Integer) } - def hash; end + def hash + end end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other); end + def ==(other) + end sig { returns(Integer) } - def hash; end + def hash + end class << self # @api private sig do override .params( - value: T.any(OpenAI::Internal::Type::BaseModel, T::Hash[T.anything, T.anything], T.anything), + value: + T.any( + OpenAI::Internal::Type::BaseModel, + T::Hash[T.anything, T.anything], + T.anything + ), state: OpenAI::Internal::Type::Converter::CoerceState ) .returns(T.any(T.attached_class, T.anything)) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do override - .params(value: T.any( - T.attached_class, - T.anything - ), - state: OpenAI::Internal::Type::Converter::DumpState) + .params( + value: T.any(T.attached_class, T.anything), + state: OpenAI::Internal::Type::Converter::DumpState + ) .returns(T.any(T::Hash[T.anything, T.anything], T.anything)) end - def dump(value, state:); end + def dump(value, state:) + end end # Returns the raw value associated with the given key, if found. Otherwise, nil is @@ -154,7 +190,8 @@ module OpenAI # undocumented features. This method does not parse response data into # higher-level types. Lookup by anything other than a Symbol is an ArgumentError. sig { params(key: Symbol).returns(T.nilable(T.anything)) } - def [](key); end + def [](key) + end # Returns a Hash of the data underlying this object. O(1) # @@ -165,7 +202,8 @@ module OpenAI # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. sig { overridable.returns(OpenAI::Internal::AnyHash) } - def to_h; end + def to_h + end # Returns a Hash of the data underlying this object. O(1) # @@ -176,39 +214,60 @@ module OpenAI # This method is not recursive. The returned value is shared by the object, so it # should not be mutated. sig { overridable.returns(OpenAI::Internal::AnyHash) } - def to_hash; end + def to_hash + end - sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::AnyHash) } - def deconstruct_keys(keys); end + sig do + params(keys: T.nilable(T::Array[Symbol])).returns( + OpenAI::Internal::AnyHash + ) + end + def deconstruct_keys(keys) + end class << self # @api private - sig { params(model: OpenAI::Internal::Type::BaseModel).returns(OpenAI::Internal::AnyHash) } - def walk(model); end + sig do + params(model: OpenAI::Internal::Type::BaseModel).returns( + OpenAI::Internal::AnyHash + ) + end + def walk(model) + end end sig { params(a: T.anything).returns(String) } - def to_json(*a); end + def to_json(*a) + end sig { params(a: T.anything).returns(String) } - def to_yaml(*a); end + def to_yaml(*a) + end # Create a new instance of a model. - sig { params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns(T.attached_class) } - def self.new(data = {}); end + sig do + params(data: T.any(T::Hash[Symbol, T.anything], T.self_type)).returns( + T.attached_class + ) + end + def self.new(data = {}) + end class << self # @api private sig { params(depth: Integer).returns(String) } - def inspect(depth: 0); end + def inspect(depth: 0) + end end sig { returns(String) } - def to_s; end + def to_s + end # @api private sig { returns(String) } - def inspect; end + def inspect + end end end end diff --git a/rbi/openai/internal/type/base_page.rbi b/rbi/openai/internal/type/base_page.rbi index 6a3257fe..1df53287 100644 --- a/rbi/openai/internal/type/base_page.rbi +++ b/rbi/openai/internal/type/base_page.rbi @@ -10,28 +10,33 @@ module OpenAI Elem = type_member(:out) sig { overridable.returns(T::Boolean) } - def next_page?; end + def next_page? + end sig { overridable.returns(T.self_type) } - def next_page; end + def next_page + end sig { overridable.params(blk: T.proc.params(arg0: Elem).void).void } - def auto_paging_each(&blk); end + def auto_paging_each(&blk) + end sig { returns(T::Enumerable[Elem]) } - def to_enum; end + def to_enum + end # @api private sig do params( client: OpenAI::Internal::Transport::BaseClient, - req: OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, + req: + OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, headers: T.any(T::Hash[String, String], Net::HTTPHeader), page_data: T.anything - ) - .void + ).void + end + def initialize(client:, req:, headers:, page_data:) end - def initialize(client:, req:, headers:, page_data:); end end end end diff --git a/rbi/openai/internal/type/base_stream.rbi b/rbi/openai/internal/type/base_stream.rbi index ff3f1d51..82b62c1a 100644 --- a/rbi/openai/internal/type/base_stream.rbi +++ b/rbi/openai/internal/type/base_stream.rbi @@ -18,39 +18,50 @@ module OpenAI # # This should not be relied upon for resource clean up, as the garbage collector # is not guaranteed to run. - sig { params(stream: T::Enumerable[T.anything]).returns(T.proc.params(arg0: Integer).void) } - def defer_closing(stream); end + sig do + params(stream: T::Enumerable[T.anything]).returns( + T.proc.params(arg0: Integer).void + ) + end + def defer_closing(stream) + end end sig { void } - def close; end + def close + end # @api private sig { overridable.returns(T::Enumerable[Elem]) } - private def iterator; end + private def iterator + end sig { params(blk: T.proc.params(arg0: Elem).void).void } - def each(&blk); end + def each(&blk) + end sig { returns(T::Enumerator[Elem]) } - def to_enum; end + def to_enum + end # @api private sig do params( - model: T.any(T::Class[T.anything], OpenAI::Internal::Type::Converter), + model: + T.any(T::Class[T.anything], OpenAI::Internal::Type::Converter), url: URI::Generic, status: Integer, response: Net::HTTPResponse, stream: T::Enumerable[Message] - ) - .void + ).void + end + def initialize(model:, url:, status:, response:, stream:) end - def initialize(model:, url:, status:, response:, stream:); end # @api private sig { returns(String) } - def inspect; end + def inspect + end end end end diff --git a/rbi/openai/internal/type/boolean.rbi b/rbi/openai/internal/type/boolean.rbi index 1cecc37d..b7cc1e3d 100644 --- a/rbi/openai/internal/type/boolean.rbi +++ b/rbi/openai/internal/type/boolean.rbi @@ -12,35 +12,37 @@ module OpenAI abstract! sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other); end + def self.===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other); end + def self.==(other) + end class << self # @api private sig do override - .params(value: T.any( - T::Boolean, - T.anything - ), - state: OpenAI::Internal::Type::Converter::CoerceState) + .params( + value: T.any(T::Boolean, T.anything), + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.any(T::Boolean, T.anything)) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do override - .params(value: T.any( - T::Boolean, - T.anything - ), - state: OpenAI::Internal::Type::Converter::DumpState) + .params( + value: T.any(T::Boolean, T.anything), + state: OpenAI::Internal::Type::Converter::DumpState + ) .returns(T.any(T::Boolean, T.anything)) end - def dump(value, state:); end + def dump(value, state:) + end end end end diff --git a/rbi/openai/internal/type/converter.rbi b/rbi/openai/internal/type/converter.rbi index 47dace26..eb354592 100644 --- a/rbi/openai/internal/type/converter.rbi +++ b/rbi/openai/internal/type/converter.rbi @@ -5,56 +5,82 @@ module OpenAI module Type # @api private module Converter - Input = T.type_alias { T.any(OpenAI::Internal::Type::Converter, T::Class[T.anything]) } + Input = + T.type_alias do + T.any(OpenAI::Internal::Type::Converter, T::Class[T.anything]) + end CoerceState = T.type_alias do { strictness: T.any(T::Boolean, Symbol), - exactness: {yes: Integer, no: Integer, maybe: Integer}, + exactness: { + yes: Integer, + no: Integer, + maybe: Integer + }, branched: Integer } end - DumpState = T.type_alias { {can_retry: T::Boolean} } + DumpState = T.type_alias { { can_retry: T::Boolean } } # @api private sig do overridable - .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) + .params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.anything) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do overridable - .params(value: T.anything, state: OpenAI::Internal::Type::Converter::DumpState) + .params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::DumpState + ) .returns(T.anything) end - def dump(value, state:); end + def dump(value, state:) + end # @api private sig { params(depth: Integer).returns(String) } - def inspect(depth: 0); end + def inspect(depth: 0) + end class << self # @api private sig do params( - spec: T.any( - { - const: T.nilable(T.any(NilClass, T::Boolean, Integer, Float, Symbol)), - enum: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)), - union: T.nilable(T.proc.returns(OpenAI::Internal::Type::Converter::Input)) - }, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ) - ) - .returns(T.proc.returns(T.anything)) + spec: + T.any( + { + const: + T.nilable( + T.any(NilClass, T::Boolean, Integer, Float, Symbol) + ), + enum: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ), + union: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ) + }, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ) + ).returns(T.proc.returns(T.anything)) + end + def self.type_info(spec) end - def self.type_info(spec); end # @api private # @@ -72,8 +98,7 @@ module OpenAI target: OpenAI::Internal::Type::Converter::Input, value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState - ) - .returns(T.anything) + ).returns(T.anything) end def self.coerce( target, @@ -97,22 +122,33 @@ module OpenAI # - `no`: the value cannot be converted to the target type. # # See implementation below for more details. - state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - ); end + state: { + strictness: true, + exactness: { + yes: 0, + no: 0, + maybe: 0 + }, + branched: 0 + } + ) + end + # @api private sig do params( target: OpenAI::Internal::Type::Converter::Input, value: T.anything, state: OpenAI::Internal::Type::Converter::DumpState - ) - .returns(T.anything) + ).returns(T.anything) + end + def self.dump(target, value, state: { can_retry: true }) end - def self.dump(target, value, state: {can_retry: true}); end # @api private sig { params(target: T.anything, depth: Integer).returns(String) } - def self.inspect(target, depth:); end + def self.inspect(target, depth:) + end end end end diff --git a/rbi/openai/internal/type/enum.rbi b/rbi/openai/internal/type/enum.rbi index 8dcaa918..09de405a 100644 --- a/rbi/openai/internal/type/enum.rbi +++ b/rbi/openai/internal/type/enum.rbi @@ -19,17 +19,25 @@ module OpenAI include OpenAI::Internal::Type::Converter # All of the valid Symbol values for this enum. - sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } - def values; end + sig do + overridable.returns( + T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)] + ) + end + def values + end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other); end + def ===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other); end + def ==(other) + end sig { returns(Integer) } - def hash; end + def hash + end # @api private # @@ -37,27 +45,31 @@ module OpenAI # of the enum. sig do override - .params(value: T.any( - String, - Symbol, - T.anything - ), - state: OpenAI::Internal::Type::Converter::CoerceState) + .params( + value: T.any(String, Symbol, T.anything), + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.any(Symbol, T.anything)) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do override - .params(value: T.any(Symbol, T.anything), state: OpenAI::Internal::Type::Converter::DumpState) + .params( + value: T.any(Symbol, T.anything), + state: OpenAI::Internal::Type::Converter::DumpState + ) .returns(T.any(Symbol, T.anything)) end - def dump(value, state:); end + def dump(value, state:) + end # @api private sig { params(depth: Integer).returns(String) } - def inspect(depth: 0); end + def inspect(depth: 0) + end end end end diff --git a/rbi/openai/internal/type/file_input.rbi b/rbi/openai/internal/type/file_input.rbi index 5271aa5e..19e6c2e1 100644 --- a/rbi/openai/internal/type/file_input.rbi +++ b/rbi/openai/internal/type/file_input.rbi @@ -16,24 +16,25 @@ module OpenAI abstract! sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other); end + def self.===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other); end + def self.==(other) + end class << self # @api private sig do override - .params(value: T.any( - StringIO, - String, - T.anything - ), - state: OpenAI::Internal::Type::Converter::CoerceState) + .params( + value: T.any(StringIO, String, T.anything), + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.any(StringIO, T.anything)) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do @@ -44,7 +45,8 @@ module OpenAI ) .returns(T.any(Pathname, StringIO, IO, String, T.anything)) end - def dump(value, state:); end + def dump(value, state:) + end end end end diff --git a/rbi/openai/internal/type/hash_of.rbi b/rbi/openai/internal/type/hash_of.rbi index 0faca34e..25123f07 100644 --- a/rbi/openai/internal/type/hash_of.rbi +++ b/rbi/openai/internal/type/hash_of.rbi @@ -15,25 +15,29 @@ module OpenAI sig do params( - type_info: T.any( - OpenAI::Internal::AnyHash, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.[](type_info, spec = {}) end - def self.[](type_info, spec = {}); end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other); end + def ===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other); end + def ==(other) + end sig { returns(Integer) } - def hash; end + def hash + end # @api private sig do @@ -44,7 +48,8 @@ module OpenAI ) .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do @@ -55,33 +60,38 @@ module OpenAI ) .returns(T.any(OpenAI::Internal::AnyHash, T.anything)) end - def dump(value, state:); end + def dump(value, state:) + end # @api private sig { returns(Elem) } - protected def item_type; end + protected def item_type + end # @api private sig { returns(T::Boolean) } - protected def nilable?; end + protected def nilable? + end # @api private sig do params( - type_info: T.any( - OpenAI::Internal::AnyHash, - T.proc.returns(OpenAI::Internal::Type::Converter::Input), - OpenAI::Internal::Type::Converter::Input - ), + type_info: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), spec: OpenAI::Internal::AnyHash - ) - .void + ).void + end + def initialize(type_info, spec = {}) end - def initialize(type_info, spec = {}); end # @api private sig { params(depth: Integer).returns(String) } - def inspect(depth: 0); end + def inspect(depth: 0) + end end end end diff --git a/rbi/openai/internal/type/request_parameters.rbi b/rbi/openai/internal/type/request_parameters.rbi index 1d86b7ab..45ab8328 100644 --- a/rbi/openai/internal/type/request_parameters.rbi +++ b/rbi/openai/internal/type/request_parameters.rbi @@ -6,14 +6,22 @@ module OpenAI # @api private module RequestParameters # Options to specify HTTP behaviour for this request. - sig { returns(OpenAI::RequestOpts) } - attr_accessor :request_options + sig { returns(OpenAI::RequestOptions) } + attr_reader :request_options + + sig { params(request_options: OpenAI::RequestOptions::OrHash).void } + attr_writer :request_options # @api private module Converter # @api private - sig { params(params: T.anything).returns([T.anything, OpenAI::Internal::AnyHash]) } - def dump_request(params); end + sig do + params(params: T.anything).returns( + [T.anything, OpenAI::Internal::AnyHash] + ) + end + def dump_request(params) + end end end end diff --git a/rbi/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi index 13b7409c..7d3148a9 100644 --- a/rbi/openai/internal/type/union.rbi +++ b/rbi/openai/internal/type/union.rbi @@ -11,65 +11,99 @@ module OpenAI # # All of the specified variant info for this union. sig do - returns(T::Array[[T.nilable(Symbol), T.proc.returns(OpenAI::Internal::Type::Converter::Input)]]) + returns( + T::Array[ + [ + T.nilable(Symbol), + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ] + ] + ) + end + private def known_variants end - private def known_variants; end # @api private sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } - protected def derefed_variants; end + protected def derefed_variants + end # All of the specified variants for this union. sig { overridable.returns(T::Array[T.anything]) } - def variants; end + def variants + end # @api private sig { params(property: Symbol).void } - private def discriminator(property); end + private def discriminator(property) + end # @api private sig do params( - key: T.any(Symbol, OpenAI::Internal::AnyHash, T.proc.returns(T.anything), T.anything), - spec: T.any(OpenAI::Internal::AnyHash, T.proc.returns(T.anything), T.anything) - ) - .void + key: + T.any( + Symbol, + OpenAI::Internal::AnyHash, + T.proc.returns(T.anything), + T.anything + ), + spec: + T.any( + OpenAI::Internal::AnyHash, + T.proc.returns(T.anything), + T.anything + ) + ).void + end + private def variant(key, spec = nil) end - private def variant(key, spec = nil); end # @api private sig { params(value: T.anything).returns(T.nilable(T.anything)) } - private def resolve_variant(value); end + private def resolve_variant(value) + end sig { params(other: T.anything).returns(T::Boolean) } - def ===(other); end + def ===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def ==(other); end + def ==(other) + end sig { returns(Integer) } - def hash; end + def hash + end # @api private sig do override - .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) + .params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.anything) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do - override.params( - value: T.anything, - state: OpenAI::Internal::Type::Converter::DumpState - ).returns(T.anything) + override + .params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::DumpState + ) + .returns(T.anything) + end + def dump(value, state:) end - def dump(value, state:); end # @api private sig { params(depth: Integer).returns(String) } - def inspect(depth: 0); end + def inspect(depth: 0) + end end end end diff --git a/rbi/openai/internal/type/unknown.rbi b/rbi/openai/internal/type/unknown.rbi index 0128954a..48a18c80 100644 --- a/rbi/openai/internal/type/unknown.rbi +++ b/rbi/openai/internal/type/unknown.rbi @@ -12,28 +12,37 @@ module OpenAI abstract! sig { params(other: T.anything).returns(T::Boolean) } - def self.===(other); end + def self.===(other) + end sig { params(other: T.anything).returns(T::Boolean) } - def self.==(other); end + def self.==(other) + end class << self # @api private sig do override - .params(value: T.anything, state: OpenAI::Internal::Type::Converter::CoerceState) + .params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::CoerceState + ) .returns(T.anything) end - def coerce(value, state:); end + def coerce(value, state:) + end # @api private sig do - override.params( - value: T.anything, - state: OpenAI::Internal::Type::Converter::DumpState - ).returns(T.anything) + override + .params( + value: T.anything, + state: OpenAI::Internal::Type::Converter::DumpState + ) + .returns(T.anything) + end + def dump(value, state:) end - def dump(value, state:); end end end end diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index 594fae53..54ec6f18 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -6,60 +6,102 @@ module OpenAI module Util # @api private sig { returns(Float) } - def self.monotonic_secs; end + def self.monotonic_secs + end class << self # @api private sig { returns(String) } - def arch; end + def arch + end # @api private sig { returns(String) } - def os; end + def os + end end class << self # @api private sig { params(input: T.anything).returns(T::Boolean) } - def primitive?(input); end + def primitive?(input) + end # @api private - sig { params(input: T.any(String, T::Boolean)).returns(T.any(T::Boolean, T.anything)) } - def coerce_boolean(input); end + sig do + params(input: T.any(String, T::Boolean)).returns( + T.any(T::Boolean, T.anything) + ) + end + def coerce_boolean(input) + end # @api private - sig { params(input: T.any(String, T::Boolean)).returns(T.nilable(T::Boolean)) } - def coerce_boolean!(input); end + sig do + params(input: T.any(String, T::Boolean)).returns( + T.nilable(T::Boolean) + ) + end + def coerce_boolean!(input) + end # @api private - sig { params(input: T.any(String, Integer)).returns(T.any(Integer, T.anything)) } - def coerce_integer(input); end + sig do + params(input: T.any(String, Integer)).returns( + T.any(Integer, T.anything) + ) + end + def coerce_integer(input) + end # @api private - sig { params(input: T.any(String, Integer, Float)).returns(T.any(Float, T.anything)) } - def coerce_float(input); end + sig do + params(input: T.any(String, Integer, Float)).returns( + T.any(Float, T.anything) + ) + end + def coerce_float(input) + end # @api private - sig { params(input: T.anything).returns(T.any(T::Hash[T.anything, T.anything], T.anything)) } - def coerce_hash(input); end + sig do + params(input: T.anything).returns( + T.any(T::Hash[T.anything, T.anything], T.anything) + ) + end + def coerce_hash(input) + end # @api private - sig { params(input: T.anything).returns(T.nilable(T::Hash[T.anything, T.anything])) } - def coerce_hash!(input); end + sig do + params(input: T.anything).returns( + T.nilable(T::Hash[T.anything, T.anything]) + ) + end + def coerce_hash!(input) + end end class << self # @api private - sig { params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns(T.anything) } - private def deep_merge_lr(lhs, rhs, concat: false); end + sig do + params(lhs: T.anything, rhs: T.anything, concat: T::Boolean).returns( + T.anything + ) + end + private def deep_merge_lr(lhs, rhs, concat: false) + end # @api private # # Recursively merge one hash with another. If the values at a given key are not # both hashes, just take the new value. sig do - params(values: T::Array[T.anything], sentinel: T.nilable(T.anything), concat: T::Boolean) - .returns(T.anything) + params( + values: T::Array[T.anything], + sentinel: T.nilable(T.anything), + concat: T::Boolean + ).returns(T.anything) end def deep_merge( *values, @@ -67,47 +109,67 @@ module OpenAI sentinel: nil, # whether to merge sequences by concatenation. concat: false - ); end + ) + end + # @api private sig do params( - data: T.any(OpenAI::Internal::AnyHash, T::Array[T.anything], T.anything), - pick: T.nilable( + data: T.any( - Symbol, - Integer, - T::Array[T.any(Symbol, Integer)], - T.proc.params(arg0: T.anything).returns(T.anything) - ) - ), + OpenAI::Internal::AnyHash, + T::Array[T.anything], + T.anything + ), + pick: + T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), blk: T.nilable(T.proc.returns(T.anything)) - ) - .returns(T.nilable(T.anything)) + ).returns(T.nilable(T.anything)) + end + def dig(data, pick, &blk) end - def dig(data, pick, &blk); end end class << self # @api private sig { params(uri: URI::Generic).returns(String) } - def uri_origin(uri); end + def uri_origin(uri) + end # @api private sig { params(path: T.any(String, T::Array[String])).returns(String) } - def interpolate_path(path); end + def interpolate_path(path) + end end class << self # @api private - sig { params(query: T.nilable(String)).returns(T::Hash[String, T::Array[String]]) } - def decode_query(query); end + sig do + params(query: T.nilable(String)).returns( + T::Hash[String, T::Array[String]] + ) + end + def decode_query(query) + end # @api private sig do - params(query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) - .returns(T.nilable(String)) + params( + query: + T.nilable( + T::Hash[String, T.nilable(T.any(T::Array[String], String))] + ) + ).returns(T.nilable(String)) + end + def encode_query(query) end - def encode_query(query); end end ParsedUriShape = @@ -123,31 +185,53 @@ module OpenAI class << self # @api private - sig { params(url: T.any(URI::Generic, String)).returns(OpenAI::Internal::Util::ParsedUriShape) } - def parse_uri(url); end + sig do + params(url: T.any(URI::Generic, String)).returns( + OpenAI::Internal::Util::ParsedUriShape + ) + end + def parse_uri(url) + end # @api private - sig { params(parsed: OpenAI::Internal::Util::ParsedUriShape).returns(URI::Generic) } - def unparse_uri(parsed); end + sig do + params(parsed: OpenAI::Internal::Util::ParsedUriShape).returns( + URI::Generic + ) + end + def unparse_uri(parsed) + end # @api private sig do - params(lhs: OpenAI::Internal::Util::ParsedUriShape, rhs: OpenAI::Internal::Util::ParsedUriShape) - .returns(URI::Generic) + params( + lhs: OpenAI::Internal::Util::ParsedUriShape, + rhs: OpenAI::Internal::Util::ParsedUriShape + ).returns(URI::Generic) + end + def join_parsed_uri(lhs, rhs) end - def join_parsed_uri(lhs, rhs); end end class << self # @api private sig do params( - headers: T::Hash[String, - T.nilable(T.any(String, Integer, T::Array[T.nilable(T.any(String, Integer))]))] - ) - .returns(T::Hash[String, String]) + headers: + T::Hash[ + String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + ) + ] + ).returns(T::Hash[String, String]) + end + def normalized_headers(*headers) end - def normalized_headers(*headers); end end # @api private @@ -156,38 +240,54 @@ module OpenAI class ReadIOAdapter # @api private sig { returns(T.nilable(T::Boolean)) } - def close?; end + def close? + end # @api private sig { void } - def close; end + def close + end # @api private sig { params(max_len: T.nilable(Integer)).returns(String) } - private def read_enum(max_len); end + private def read_enum(max_len) + end # @api private - sig { params(max_len: T.nilable(Integer), out_string: T.nilable(String)).returns(T.nilable(String)) } - def read(max_len = nil, out_string = nil); end + sig do + params( + max_len: T.nilable(Integer), + out_string: T.nilable(String) + ).returns(T.nilable(String)) + end + def read(max_len = nil, out_string = nil) + end # @api private sig do params( src: T.any(String, Pathname, StringIO, T::Enumerable[String]), blk: T.proc.params(arg0: String).void - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new(src, &blk) end - def self.new(src, &blk); end end class << self - sig { params(blk: T.proc.params(y: Enumerator::Yielder).void).returns(T::Enumerable[String]) } - def writable_enum(&blk); end + sig do + params(blk: T.proc.params(y: Enumerator::Yielder).void).returns( + T::Enumerable[String] + ) + end + def writable_enum(&blk) + end end - JSON_CONTENT = T.let(%r{^application/(?:vnd(?:\.[^.]+)*\+)?json(?!l)}, Regexp) - JSONL_CONTENT = T.let(%r{^application/(:?x-(?:n|l)djson)|(:?(?:x-)?jsonl)}, Regexp) + JSON_CONTENT = + T.let(%r{^application/(?:vnd(?:\.[^.]+)*\+)?json(?!l)}, Regexp) + JSONL_CONTENT = + T.let(%r{^application/(:?x-(?:n|l)djson)|(:?(?:x-)?jsonl)}, Regexp) class << self # @api private @@ -197,10 +297,15 @@ module OpenAI val: T.anything, closing: T::Array[T.proc.void], content_type: T.nilable(String) - ) - .void + ).void + end + private def write_multipart_content( + y, + val:, + closing:, + content_type: nil + ) end - private def write_multipart_content(y, val:, closing:, content_type: nil); end # @api private sig do @@ -210,24 +315,33 @@ module OpenAI key: T.any(Symbol, String), val: T.anything, closing: T::Array[T.proc.void] - ) - .void + ).void + end + private def write_multipart_chunk(y, boundary:, key:, val:, closing:) end - private def write_multipart_chunk(y, boundary:, key:, val:, closing:); end # @api private - sig { params(body: T.anything).returns([String, T::Enumerable[String]]) } - private def encode_multipart_streaming(body); end + sig do + params(body: T.anything).returns([String, T::Enumerable[String]]) + end + private def encode_multipart_streaming(body) + end # @api private - sig { params(headers: T::Hash[String, String], body: T.anything).returns(T.anything) } - def encode_content(headers, body); end + sig do + params(headers: T::Hash[String, String], body: T.anything).returns( + T.anything + ) + end + def encode_content(headers, body) + end # @api private # # https://www.iana.org/assignments/character-sets/character-sets.xhtml sig { params(content_type: String, text: String).void } - def force_charset!(content_type, text:); end + def force_charset!(content_type, text:) + end # @api private # @@ -237,10 +351,10 @@ module OpenAI headers: T.any(T::Hash[String, String], Net::HTTPHeader), stream: T::Enumerable[String], suppress_error: T::Boolean - ) - .returns(T.anything) + ).returns(T.anything) + end + def decode_content(headers, stream:, suppress_error: false) end - def decode_content(headers, stream:, suppress_error: false); end end class << self @@ -248,24 +362,29 @@ module OpenAI # # https://doc.rust-lang.org/std/iter/trait.FusedIterator.html sig do - params(enum: T::Enumerable[T.anything], external: T::Boolean, close: T.proc.void) - .returns(T::Enumerable[T.anything]) + params( + enum: T::Enumerable[T.anything], + external: T::Boolean, + close: T.proc.void + ).returns(T::Enumerable[T.anything]) + end + def fused_enum(enum, external: false, &close) end - def fused_enum(enum, external: false, &close); end # @api private sig { params(enum: T.nilable(T::Enumerable[T.anything])).void } - def close_fused!(enum); end + def close_fused!(enum) + end # @api private sig do params( enum: T.nilable(T::Enumerable[T.anything]), blk: T.proc.params(arg0: Enumerator::Yielder).void - ) - .returns(T::Enumerable[T.anything]) + ).returns(T::Enumerable[T.anything]) + end + def chain_fused(enum, &blk) end - def chain_fused(enum, &blk); end end ServerSentEvent = @@ -285,8 +404,11 @@ module OpenAI # # This decoder is responsible for reassembling lines split across multiple # fragments. - sig { params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) } - def decode_lines(enum); end + sig do + params(enum: T::Enumerable[String]).returns(T::Enumerable[String]) + end + def decode_lines(enum) + end # @api private # @@ -294,9 +416,12 @@ module OpenAI # # Assumes that `lines` has been decoded with `#decode_lines`. sig do - params(lines: T::Enumerable[String]).returns(T::Enumerable[OpenAI::Internal::Util::ServerSentEvent]) + params(lines: T::Enumerable[String]).returns( + T::Enumerable[OpenAI::Internal::Util::ServerSentEvent] + ) + end + def decode_sse(lines) end - def decode_sse(lines); end end end end diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index b72b3e70..c3eaf325 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -67,7 +67,8 @@ module OpenAI Evals = OpenAI::Models::Evals - EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig + EvalStoredCompletionsDataSourceConfig = + OpenAI::Models::EvalStoredCompletionsDataSourceConfig EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader @@ -101,7 +102,8 @@ module OpenAI FunctionDefinition = OpenAI::Models::FunctionDefinition - FunctionParameters = T.let(OpenAI::Models::FunctionParameters, OpenAI::Internal::Type::Converter) + FunctionParameters = + T.let(OpenAI::Models::FunctionParameters, OpenAI::Internal::Type::Converter) Image = OpenAI::Models::Image @@ -139,7 +141,8 @@ module OpenAI ModerationTextInput = OpenAI::Models::ModerationTextInput - OtherFileChunkingStrategyObject = OpenAI::Models::OtherFileChunkingStrategyObject + OtherFileChunkingStrategyObject = + OpenAI::Models::OtherFileChunkingStrategyObject Reasoning = OpenAI::Models::Reasoning @@ -157,9 +160,11 @@ module OpenAI StaticFileChunkingStrategy = OpenAI::Models::StaticFileChunkingStrategy - StaticFileChunkingStrategyObject = OpenAI::Models::StaticFileChunkingStrategyObject + StaticFileChunkingStrategyObject = + OpenAI::Models::StaticFileChunkingStrategyObject - StaticFileChunkingStrategyObjectParam = OpenAI::Models::StaticFileChunkingStrategyObjectParam + StaticFileChunkingStrategyObjectParam = + OpenAI::Models::StaticFileChunkingStrategyObjectParam Upload = OpenAI::Models::Upload diff --git a/rbi/openai/models/all_models.rbi b/rbi/openai/models/all_models.rbi index 82f906f3..66c0e6ae 100644 --- a/rbi/openai/models/all_models.rbi +++ b/rbi/openai/models/all_models.rbi @@ -5,31 +5,52 @@ module OpenAI module AllModels extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::ChatModel::TaggedSymbol, + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) + end + module ResponsesOnlyModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::AllModels::ResponsesOnlyModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + O1_PRO = + T.let(:"o1-pro", OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol) O1_PRO_2025_03_19 = - T.let(:"o1-pro-2025-03-19", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + T.let( + :"o1-pro-2025-03-19", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW = - T.let(:"computer-use-preview", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + T.let( + :"computer-use-preview", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW_2025_03_11 = - T.let(:"computer-use-preview-2025-03-11", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) + T.let( + :"computer-use-preview-2025-03-11", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol] + ) + end + def self.values + end end - sig do - override - .returns( - [String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol] - ) + sig { override.returns(T::Array[OpenAI::AllModels::Variants]) } + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index 5d48095e..254db622 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -7,20 +7,26 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text to generate audio for. The maximum length is 4096 characters. sig { returns(String) } attr_accessor :input # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - sig { returns(T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol)) } + sig { returns(T.any(String, OpenAI::Audio::SpeechModel::OrSymbol)) } attr_accessor :model # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig { returns(T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol)) } + sig do + returns( + T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol) + ) + end attr_accessor :voice # Control the voice of your generated audio with additional instructions. Does not @@ -33,10 +39,21 @@ module OpenAI # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. - sig { returns(T.nilable(OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol + ) + ) + end attr_reader :response_format - sig { params(response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol).void } + sig do + params( + response_format: + OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol + ).void + end attr_writer :response_format # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is @@ -50,14 +67,15 @@ module OpenAI sig do params( input: String, - model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol), + model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), + voice: + T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, - response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, + response_format: + OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The text to generate audio for. The maximum length is 4096 characters. @@ -80,30 +98,47 @@ module OpenAI # the default. Does not work with `gpt-4o-mini-tts`. speed: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - input: String, - model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol), - instructions: String, - response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, - speed: Float, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + input: String, + model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), + voice: + T.any( + String, + OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol + ), + instructions: String, + response_format: + OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, + speed: Float, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias do + T.any(String, OpenAI::Audio::SpeechModel::TaggedSymbol) + end + + sig do + override.returns( + T::Array[OpenAI::Audio::SpeechCreateParams::Model::Variants] + ) + end + def self.variants + end end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -113,23 +148,68 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) + end - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } + sig do + override.returns( + T::Array[OpenAI::Audio::SpeechCreateParams::Voice::Variants] + ) + end + def self.variants + end + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Audio::SpeechCreateParams::Voice) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - BALLAD = T.let(:ballad, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - CORAL = T.let(:coral, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - ECHO = T.let(:echo, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - FABLE = T.let(:fable, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - ONYX = T.let(:onyx, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - NOVA = T.let(:nova, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - SAGE = T.let(:sage, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - SHIMMER = T.let(:shimmer, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) - VERSE = T.let(:verse, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) + ALLOY = + T.let( + :alloy, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) + ASH = + T.let(:ash, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) + BALLAD = + T.let( + :ballad, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) + CORAL = + T.let( + :coral, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) + ECHO = + T.let(:echo, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) + FABLE = + T.let( + :fable, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) + ONYX = + T.let(:onyx, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) + NOVA = + T.let(:nova, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) + SAGE = + T.let(:sage, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) + SHIMMER = + T.let( + :shimmer, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) + VERSE = + T.let( + :verse, + OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol + ) end # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, @@ -137,18 +217,52 @@ module OpenAI module ResponseFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) - OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) - AAC = T.let(:aac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) - FLAC = T.let(:flac, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) - WAV = T.let(:wav, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) - PCM = T.let(:pcm, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) + MP3 = + T.let( + :mp3, + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ) + OPUS = + T.let( + :opus, + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ) + AAC = + T.let( + :aac, + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ) + FLAC = + T.let( + :flac, + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ) + WAV = + T.let( + :wav, + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ) + PCM = + T.let( + :pcm, + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/audio/speech_model.rbi b/rbi/openai/models/audio/speech_model.rbi index e049d032..882f990f 100644 --- a/rbi/openai/models/audio/speech_model.rbi +++ b/rbi/openai/models/audio/speech_model.rbi @@ -6,15 +6,20 @@ module OpenAI module SpeechModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Audio::SpeechModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) - TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) - GPT_4O_MINI_TTS = T.let(:"gpt-4o-mini-tts", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) + TTS_1 = T.let(:"tts-1", OpenAI::Audio::SpeechModel::TaggedSymbol) + TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Audio::SpeechModel::TaggedSymbol) + GPT_4O_MINI_TTS = + T.let(:"gpt-4o-mini-tts", OpenAI::Audio::SpeechModel::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } - def self.values; end + sig do + override.returns(T::Array[OpenAI::Audio::SpeechModel::TaggedSymbol]) + end + def self.values + end end end end diff --git a/rbi/openai/models/audio/transcription.rbi b/rbi/openai/models/audio/transcription.rbi index fa6cd066..b7bca615 100644 --- a/rbi/openai/models/audio/transcription.rbi +++ b/rbi/openai/models/audio/transcription.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class Transcription < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The transcribed text. sig { returns(String) } attr_accessor :text @@ -11,14 +13,15 @@ module OpenAI # The log probabilities of the tokens in the transcription. Only returned with the # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::Transcription::Logprob])) } + sig do + returns(T.nilable(T::Array[OpenAI::Audio::Transcription::Logprob])) + end attr_reader :logprobs sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::AnyHash)] - ) - .void + logprobs: T::Array[OpenAI::Audio::Transcription::Logprob::OrHash] + ).void end attr_writer :logprobs @@ -27,9 +30,8 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::Transcription::Logprob, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + logprobs: T::Array[OpenAI::Audio::Transcription::Logprob::OrHash] + ).returns(T.attached_class) end def self.new( # The transcribed text. @@ -38,11 +40,24 @@ module OpenAI # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. logprobs: nil - ); end - sig { override.returns({text: String, logprobs: T::Array[OpenAI::Models::Audio::Transcription::Logprob]}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + text: String, + logprobs: T::Array[OpenAI::Audio::Transcription::Logprob] + } + ) + end + def to_hash + end class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The token in the transcription. sig { returns(T.nilable(String)) } attr_reader :token @@ -64,7 +79,13 @@ module OpenAI sig { params(logprob: Float).void } attr_writer :logprob - sig { params(token: String, bytes: T::Array[Float], logprob: Float).returns(T.attached_class) } + sig do + params( + token: String, + bytes: T::Array[Float], + logprob: Float + ).returns(T.attached_class) + end def self.new( # The token in the transcription. token: nil, @@ -72,9 +93,16 @@ module OpenAI bytes: nil, # The log probability of the token. logprob: nil - ); end - sig { override.returns({token: String, bytes: T::Array[Float], logprob: Float}) } - def to_hash; end + ) + end + + sig do + override.returns( + { token: String, bytes: T::Array[Float], logprob: Float } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi index 57d34b34..ba0c7098 100644 --- a/rbi/openai/models/audio/transcription_create_params.rbi +++ b/rbi/openai/models/audio/transcription_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } @@ -15,7 +17,7 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } + sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } attr_accessor :model # Additional information to include in the transcription response. `logprobs` will @@ -23,10 +25,18 @@ module OpenAI # model's confidence in the transcription. `logprobs` only works with # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol]) + ) + end attr_reader :include - sig { params(include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol]).void } + sig do + params( + include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol] + ).void + end attr_writer :include # The language of the input audio. Supplying the input language in @@ -51,10 +61,12 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. - sig { returns(T.nilable(OpenAI::Models::AudioResponseFormat::OrSymbol)) } + sig { returns(T.nilable(OpenAI::AudioResponseFormat::OrSymbol)) } attr_reader :response_format - sig { params(response_format: OpenAI::Models::AudioResponseFormat::OrSymbol).void } + sig do + params(response_format: OpenAI::AudioResponseFormat::OrSymbol).void + end attr_writer :response_format # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the @@ -75,32 +87,40 @@ module OpenAI # incurs additional latency. sig do returns( - T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol]) + T.nilable( + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol + ] + ) ) end attr_reader :timestamp_granularities sig do params( - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol] - ) - .void + timestamp_granularities: + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol + ] + ).void end attr_writer :timestamp_granularities sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, OpenAI::AudioModel::OrSymbol), + include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: OpenAI::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + timestamp_granularities: + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol + ], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The audio file object (not file name) to transcribe, in one of these formats: @@ -142,24 +162,29 @@ module OpenAI # incurs additional latency. timestamp_granularities: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], - language: String, - prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, - temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + model: T.any(String, OpenAI::AudioModel::OrSymbol), + include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], + language: String, + prompt: String, + response_format: OpenAI::AudioResponseFormat::OrSymbol, + temperature: Float, + timestamp_granularities: + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol + ], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source @@ -167,26 +192,52 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::AudioModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::Model::Variants + ] + ) + end + def self.variants + end end module TimestampGranularity extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } + T.type_alias do + T.all( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) + WORD = + T.let( + :word, + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol + ) SEGMENT = - T.let(:segment, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) + T.let( + :segment, + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/audio/transcription_create_response.rbi b/rbi/openai/models/audio/transcription_create_response.rbi index c6ca8f08..a96f3c65 100644 --- a/rbi/openai/models/audio/transcription_create_response.rbi +++ b/rbi/openai/models/audio/transcription_create_response.rbi @@ -8,8 +8,23 @@ module OpenAI module TranscriptionCreateResponse extend OpenAI::Internal::Type::Union - sig { override.returns([OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + OpenAI::Audio::Transcription, + OpenAI::Audio::TranscriptionVerbose + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Audio::TranscriptionCreateResponse::Variants + ] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/audio/transcription_include.rbi b/rbi/openai/models/audio/transcription_include.rbi index bb5758c1..a9400086 100644 --- a/rbi/openai/models/audio/transcription_include.rbi +++ b/rbi/openai/models/audio/transcription_include.rbi @@ -6,13 +6,20 @@ module OpenAI module TranscriptionInclude extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Audio::TranscriptionInclude) } OrSymbol = T.type_alias { T.any(Symbol, String) } - LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) + LOGPROBS = + T.let(:logprobs, OpenAI::Audio::TranscriptionInclude::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Audio::TranscriptionInclude::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/audio/transcription_segment.rbi b/rbi/openai/models/audio/transcription_segment.rbi index e98fad18..bac3f1b7 100644 --- a/rbi/openai/models/audio/transcription_segment.rbi +++ b/rbi/openai/models/audio/transcription_segment.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class TranscriptionSegment < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier of the segment. sig { returns(Integer) } attr_accessor :id @@ -59,8 +61,7 @@ module OpenAI temperature: Float, text: String, tokens: T::Array[Integer] - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier of the segment. @@ -86,25 +87,27 @@ module OpenAI text:, # Array of token IDs for the text content. tokens: - ); end + ) + end + sig do - override - .returns( - { - id: Integer, - avg_logprob: Float, - compression_ratio: Float, - end_: Float, - no_speech_prob: Float, - seek: Integer, - start: Float, - temperature: Float, - text: String, - tokens: T::Array[Integer] - } - ) + override.returns( + { + id: Integer, + avg_logprob: Float, + compression_ratio: Float, + end_: Float, + no_speech_prob: Float, + seek: Integer, + start: Float, + temperature: Float, + text: String, + tokens: T::Array[Integer] + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/audio/transcription_stream_event.rbi b/rbi/openai/models/audio/transcription_stream_event.rbi index 4a3dc34e..7c5989b6 100644 --- a/rbi/openai/models/audio/transcription_stream_event.rbi +++ b/rbi/openai/models/audio/transcription_stream_event.rbi @@ -10,13 +10,21 @@ module OpenAI module TranscriptionStreamEvent extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] + Variants = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDeltaEvent, + OpenAI::Audio::TranscriptionTextDoneEvent ) + end + + sig do + override.returns( + T::Array[OpenAI::Audio::TranscriptionStreamEvent::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/audio/transcription_text_delta_event.rbi b/rbi/openai/models/audio/transcription_text_delta_event.rbi index 25ace7da..15165550 100644 --- a/rbi/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/openai/models/audio/transcription_text_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text delta that was additionally transcribed. sig { returns(String) } attr_accessor :delta @@ -15,14 +17,22 @@ module OpenAI # The log probabilities of the delta. Only included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] + ) + ) + end attr_reader :logprobs sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::AnyHash)] - ) - .void + logprobs: + T::Array[ + OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob::OrHash + ] + ).void end attr_writer :logprobs @@ -33,10 +43,12 @@ module OpenAI sig do params( delta: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob, OpenAI::Internal::AnyHash)], + logprobs: + T::Array[ + OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The text delta that was additionally transcribed. @@ -47,16 +59,26 @@ module OpenAI logprobs: nil, # The type of the event. Always `transcript.text.delta`. type: :"transcript.text.delta" - ); end + ) + end + sig do - override - .returns( - {delta: String, type: Symbol, logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]} - ) + override.returns( + { + delta: String, + type: Symbol, + logprobs: + T::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] + } + ) + end + def to_hash end - def to_hash; end class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } attr_reader :token @@ -78,7 +100,13 @@ module OpenAI sig { params(logprob: Float).void } attr_writer :logprob - sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } + sig do + params( + token: String, + bytes: T::Array[T.anything], + logprob: Float + ).returns(T.attached_class) + end def self.new( # The token that was used to generate the log probability. token: nil, @@ -86,9 +114,16 @@ module OpenAI bytes: nil, # The log probability of the token. logprob: nil - ); end - sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } - def to_hash; end + ) + end + + sig do + override.returns( + { token: String, bytes: T::Array[T.anything], logprob: Float } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/audio/transcription_text_done_event.rbi b/rbi/openai/models/audio/transcription_text_done_event.rbi index 08687ed5..993b519e 100644 --- a/rbi/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/openai/models/audio/transcription_text_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text that was transcribed. sig { returns(String) } attr_accessor :text @@ -16,14 +18,22 @@ module OpenAI # included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + ) + ) + end attr_reader :logprobs sig do params( - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::AnyHash)] - ) - .void + logprobs: + T::Array[ + OpenAI::Audio::TranscriptionTextDoneEvent::Logprob::OrHash + ] + ).void end attr_writer :logprobs @@ -34,10 +44,12 @@ module OpenAI sig do params( text: String, - logprobs: T::Array[T.any(OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob, OpenAI::Internal::AnyHash)], + logprobs: + T::Array[ + OpenAI::Audio::TranscriptionTextDoneEvent::Logprob::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The text that was transcribed. @@ -49,16 +61,26 @@ module OpenAI logprobs: nil, # The type of the event. Always `transcript.text.done`. type: :"transcript.text.done" - ); end + ) + end + sig do - override - .returns( - {text: String, type: Symbol, logprobs: T::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]} - ) + override.returns( + { + text: String, + type: Symbol, + logprobs: + T::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + } + ) + end + def to_hash end - def to_hash; end class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } attr_reader :token @@ -80,7 +102,13 @@ module OpenAI sig { params(logprob: Float).void } attr_writer :logprob - sig { params(token: String, bytes: T::Array[T.anything], logprob: Float).returns(T.attached_class) } + sig do + params( + token: String, + bytes: T::Array[T.anything], + logprob: Float + ).returns(T.attached_class) + end def self.new( # The token that was used to generate the log probability. token: nil, @@ -88,9 +116,16 @@ module OpenAI bytes: nil, # The log probability of the token. logprob: nil - ); end - sig { override.returns({token: String, bytes: T::Array[T.anything], logprob: Float}) } - def to_hash; end + ) + end + + sig do + override.returns( + { token: String, bytes: T::Array[T.anything], logprob: Float } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/audio/transcription_verbose.rbi b/rbi/openai/models/audio/transcription_verbose.rbi index 43af0a5c..6fc105f7 100644 --- a/rbi/openai/models/audio/transcription_verbose.rbi +++ b/rbi/openai/models/audio/transcription_verbose.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The duration of the input audio. sig { returns(Float) } attr_accessor :duration @@ -17,20 +19,25 @@ module OpenAI attr_accessor :text # Segments of the transcribed text and their corresponding details. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } + sig do + returns(T.nilable(T::Array[OpenAI::Audio::TranscriptionSegment])) + end attr_reader :segments sig do - params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)]) - .void + params( + segments: T::Array[OpenAI::Audio::TranscriptionSegment::OrHash] + ).void end attr_writer :segments # Extracted words and their corresponding timestamps. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionWord])) } + sig { returns(T.nilable(T::Array[OpenAI::Audio::TranscriptionWord])) } attr_reader :words - sig { params(words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::AnyHash)]).void } + sig do + params(words: T::Array[OpenAI::Audio::TranscriptionWord::OrHash]).void + end attr_writer :words # Represents a verbose json transcription response returned by model, based on the @@ -40,10 +47,9 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)], - words: T::Array[T.any(OpenAI::Models::Audio::TranscriptionWord, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + segments: T::Array[OpenAI::Audio::TranscriptionSegment::OrHash], + words: T::Array[OpenAI::Audio::TranscriptionWord::OrHash] + ).returns(T.attached_class) end def self.new( # The duration of the input audio. @@ -56,20 +62,22 @@ module OpenAI segments: nil, # Extracted words and their corresponding timestamps. words: nil - ); end + ) + end + sig do - override - .returns( - { - duration: Float, - language: String, - text: String, - segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment], - words: T::Array[OpenAI::Models::Audio::TranscriptionWord] - } - ) + override.returns( + { + duration: Float, + language: String, + text: String, + segments: T::Array[OpenAI::Audio::TranscriptionSegment], + words: T::Array[OpenAI::Audio::TranscriptionWord] + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/audio/transcription_word.rbi b/rbi/openai/models/audio/transcription_word.rbi index 51122c61..141e388e 100644 --- a/rbi/openai/models/audio/transcription_word.rbi +++ b/rbi/openai/models/audio/transcription_word.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class TranscriptionWord < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # End time of the word in seconds. sig { returns(Float) } attr_accessor :end_ @@ -16,7 +18,11 @@ module OpenAI sig { returns(String) } attr_accessor :word - sig { params(end_: Float, start: Float, word: String).returns(T.attached_class) } + sig do + params(end_: Float, start: Float, word: String).returns( + T.attached_class + ) + end def self.new( # End time of the word in seconds. end_:, @@ -24,9 +30,12 @@ module OpenAI start:, # The text content of the word. word: - ); end - sig { override.returns({end_: Float, start: Float, word: String}) } - def to_hash; end + ) + end + + sig { override.returns({ end_: Float, start: Float, word: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/audio/translation.rbi b/rbi/openai/models/audio/translation.rbi index 96bbc6d2..253e5428 100644 --- a/rbi/openai/models/audio/translation.rbi +++ b/rbi/openai/models/audio/translation.rbi @@ -4,14 +4,18 @@ module OpenAI module Models module Audio class Translation < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :text sig { params(text: String).returns(T.attached_class) } - def self.new(text:); end + def self.new(text:) + end - sig { override.returns({text: String}) } - def to_hash; end + sig { override.returns({ text: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/audio/translation_create_params.rbi b/rbi/openai/models/audio/translation_create_params.rbi index 77933b04..0762f210 100644 --- a/rbi/openai/models/audio/translation_create_params.rbi +++ b/rbi/openai/models/audio/translation_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } @@ -14,7 +16,7 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - sig { returns(T.any(String, OpenAI::Models::AudioModel::OrSymbol)) } + sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } attr_accessor :model # An optional text to guide the model's style or continue a previous audio @@ -29,10 +31,21 @@ module OpenAI # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. - sig { returns(T.nilable(OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol + ) + ) + end attr_reader :response_format - sig { params(response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol).void } + sig do + params( + response_format: + OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol + ).void + end attr_writer :response_format # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the @@ -49,13 +62,13 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, - response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, + response_format: + OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The audio file object (not file name) translate, in one of these formats: flac, @@ -79,29 +92,40 @@ module OpenAI # automatically increase the temperature until certain thresholds are hit. temperature: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - prompt: String, - response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, - temperature: Float, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + model: T.any(String, OpenAI::AudioModel::OrSymbol), + prompt: String, + response_format: + OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, + temperature: Float, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::AudioModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::Audio::TranslationCreateParams::Model::Variants] + ) + end + def self.variants + end end # The format of the output, in one of these options: `json`, `text`, `srt`, @@ -110,18 +134,49 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } + T.type_alias do + T.all( + Symbol, + OpenAI::Audio::TranslationCreateParams::ResponseFormat + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) - TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) - SRT = T.let(:srt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) + JSON = + T.let( + :json, + OpenAI::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol + ) + TEXT = + T.let( + :text, + OpenAI::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol + ) + SRT = + T.let( + :srt, + OpenAI::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol + ) VERBOSE_JSON = - T.let(:verbose_json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) - VTT = T.let(:vtt, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) + T.let( + :verbose_json, + OpenAI::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol + ) + VTT = + T.let( + :vtt, + OpenAI::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/audio/translation_create_response.rbi b/rbi/openai/models/audio/translation_create_response.rbi index 57941765..528526c0 100644 --- a/rbi/openai/models/audio/translation_create_response.rbi +++ b/rbi/openai/models/audio/translation_create_response.rbi @@ -6,8 +6,18 @@ module OpenAI module TranslationCreateResponse extend OpenAI::Internal::Type::Union - sig { override.returns([OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose]) } - def self.variants; end + Variants = + T.type_alias do + T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) + end + + sig do + override.returns( + T::Array[OpenAI::Models::Audio::TranslationCreateResponse::Variants] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/audio/translation_verbose.rbi b/rbi/openai/models/audio/translation_verbose.rbi index 9458e067..ddc7e192 100644 --- a/rbi/openai/models/audio/translation_verbose.rbi +++ b/rbi/openai/models/audio/translation_verbose.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Audio class TranslationVerbose < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The duration of the input audio. sig { returns(Float) } attr_accessor :duration @@ -17,12 +19,15 @@ module OpenAI attr_accessor :text # Segments of the translated text and their corresponding details. - sig { returns(T.nilable(T::Array[OpenAI::Models::Audio::TranscriptionSegment])) } + sig do + returns(T.nilable(T::Array[OpenAI::Audio::TranscriptionSegment])) + end attr_reader :segments sig do - params(segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)]) - .void + params( + segments: T::Array[OpenAI::Audio::TranscriptionSegment::OrHash] + ).void end attr_writer :segments @@ -31,9 +36,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: T::Array[T.any(OpenAI::Models::Audio::TranscriptionSegment, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + segments: T::Array[OpenAI::Audio::TranscriptionSegment::OrHash] + ).returns(T.attached_class) end def self.new( # The duration of the input audio. @@ -44,14 +48,21 @@ module OpenAI text:, # Segments of the translated text and their corresponding details. segments: nil - ); end + ) + end + sig do - override - .returns( - {duration: Float, language: String, text: String, segments: T::Array[OpenAI::Models::Audio::TranscriptionSegment]} - ) + override.returns( + { + duration: Float, + language: String, + text: String, + segments: T::Array[OpenAI::Audio::TranscriptionSegment] + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/audio_model.rbi b/rbi/openai/models/audio_model.rbi index e82aa420..3f22719a 100644 --- a/rbi/openai/models/audio_model.rbi +++ b/rbi/openai/models/audio_model.rbi @@ -5,15 +5,18 @@ module OpenAI module AudioModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::AudioModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::TaggedSymbol) - GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) - GPT_4O_MINI_TRANSCRIBE = T.let(:"gpt-4o-mini-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) + WHISPER_1 = T.let(:"whisper-1", OpenAI::AudioModel::TaggedSymbol) + GPT_4O_TRANSCRIBE = + T.let(:"gpt-4o-transcribe", OpenAI::AudioModel::TaggedSymbol) + GPT_4O_MINI_TRANSCRIBE = + T.let(:"gpt-4o-mini-transcribe", OpenAI::AudioModel::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::AudioModel::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::AudioModel::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/audio_response_format.rbi b/rbi/openai/models/audio_response_format.rbi index b7fca47d..4afcf558 100644 --- a/rbi/openai/models/audio_response_format.rbi +++ b/rbi/openai/models/audio_response_format.rbi @@ -8,17 +8,21 @@ module OpenAI module AudioResponseFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::AudioResponseFormat) } OrSymbol = T.type_alias { T.any(Symbol, String) } - JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) - TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::TaggedSymbol) - SRT = T.let(:srt, OpenAI::Models::AudioResponseFormat::TaggedSymbol) - VERBOSE_JSON = T.let(:verbose_json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) - VTT = T.let(:vtt, OpenAI::Models::AudioResponseFormat::TaggedSymbol) + JSON = T.let(:json, OpenAI::AudioResponseFormat::TaggedSymbol) + TEXT = T.let(:text, OpenAI::AudioResponseFormat::TaggedSymbol) + SRT = T.let(:srt, OpenAI::AudioResponseFormat::TaggedSymbol) + VERBOSE_JSON = + T.let(:verbose_json, OpenAI::AudioResponseFormat::TaggedSymbol) + VTT = T.let(:vtt, OpenAI::AudioResponseFormat::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::AudioResponseFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns(T::Array[OpenAI::AudioResponseFormat::TaggedSymbol]) + end + def self.values + end end end end diff --git a/rbi/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/openai/models/auto_file_chunking_strategy_param.rbi index 7ea24386..658b0cc9 100644 --- a/rbi/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/openai/models/auto_file_chunking_strategy_param.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -13,9 +15,12 @@ module OpenAI def self.new( # Always `auto`. type: :auto - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/batch.rbi b/rbi/openai/models/batch.rbi index bbb7b1cd..dcc6308c 100644 --- a/rbi/openai/models/batch.rbi +++ b/rbi/openai/models/batch.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class Batch < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -27,7 +29,7 @@ module OpenAI attr_accessor :object # The current status of the batch. - sig { returns(OpenAI::Models::Batch::Status::TaggedSymbol) } + sig { returns(OpenAI::Batch::Status::TaggedSymbol) } attr_accessor :status # The Unix timestamp (in seconds) for when the batch was cancelled. @@ -58,10 +60,10 @@ module OpenAI sig { params(error_file_id: String).void } attr_writer :error_file_id - sig { returns(T.nilable(OpenAI::Models::Batch::Errors)) } + sig { returns(T.nilable(OpenAI::Batch::Errors)) } attr_reader :errors - sig { params(errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::AnyHash)).void } + sig { params(errors: OpenAI::Batch::Errors::OrHash).void } attr_writer :errors # The Unix timestamp (in seconds) for when the batch expired. @@ -116,10 +118,10 @@ module OpenAI attr_writer :output_file_id # The request counts for different statuses within the batch. - sig { returns(T.nilable(OpenAI::Models::BatchRequestCounts)) } + sig { returns(T.nilable(OpenAI::BatchRequestCounts)) } attr_reader :request_counts - sig { params(request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::AnyHash)).void } + sig { params(request_counts: OpenAI::BatchRequestCounts::OrHash).void } attr_writer :request_counts sig do @@ -129,12 +131,12 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Models::Batch::Status::OrSymbol, + status: OpenAI::Batch::Status::OrSymbol, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, error_file_id: String, - errors: T.any(OpenAI::Models::Batch::Errors, OpenAI::Internal::AnyHash), + errors: OpenAI::Batch::Errors::OrHash, expired_at: Integer, expires_at: Integer, failed_at: Integer, @@ -142,10 +144,9 @@ module OpenAI in_progress_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), output_file_id: String, - request_counts: T.any(OpenAI::Models::BatchRequestCounts, OpenAI::Internal::AnyHash), + request_counts: OpenAI::BatchRequestCounts::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( id:, @@ -191,61 +192,66 @@ module OpenAI request_counts: nil, # The object type, which is always `batch`. object: :batch - ); end + ) + end + sig do - override - .returns( - { - id: String, - completion_window: String, - created_at: Integer, - endpoint: String, - input_file_id: String, - object: Symbol, - status: OpenAI::Models::Batch::Status::TaggedSymbol, - cancelled_at: Integer, - cancelling_at: Integer, - completed_at: Integer, - error_file_id: String, - errors: OpenAI::Models::Batch::Errors, - expired_at: Integer, - expires_at: Integer, - failed_at: Integer, - finalizing_at: Integer, - in_progress_at: Integer, - metadata: T.nilable(T::Hash[Symbol, String]), - output_file_id: String, - request_counts: OpenAI::Models::BatchRequestCounts - } - ) + override.returns( + { + id: String, + completion_window: String, + created_at: Integer, + endpoint: String, + input_file_id: String, + object: Symbol, + status: OpenAI::Batch::Status::TaggedSymbol, + cancelled_at: Integer, + cancelling_at: Integer, + completed_at: Integer, + error_file_id: String, + errors: OpenAI::Batch::Errors, + expired_at: Integer, + expires_at: Integer, + failed_at: Integer, + finalizing_at: Integer, + in_progress_at: Integer, + metadata: T.nilable(T::Hash[Symbol, String]), + output_file_id: String, + request_counts: OpenAI::BatchRequestCounts + } + ) + end + def to_hash end - def to_hash; end # The current status of the batch. module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Batch::Status) } OrSymbol = T.type_alias { T.any(Symbol, String) } - VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Batch::Status::TaggedSymbol) - FINALIZING = T.let(:finalizing, OpenAI::Models::Batch::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Batch::Status::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Batch::Status::TaggedSymbol) - CANCELLING = T.let(:cancelling, OpenAI::Models::Batch::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Batch::Status::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::Batch::Status::TaggedSymbol]) } - def self.values; end + VALIDATING = T.let(:validating, OpenAI::Batch::Status::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Batch::Status::TaggedSymbol) + IN_PROGRESS = T.let(:in_progress, OpenAI::Batch::Status::TaggedSymbol) + FINALIZING = T.let(:finalizing, OpenAI::Batch::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Batch::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Batch::Status::TaggedSymbol) + CANCELLING = T.let(:cancelling, OpenAI::Batch::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Batch::Status::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::Batch::Status::TaggedSymbol]) } + def self.values + end end class Errors < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(T::Array[OpenAI::Models::BatchError])) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(T.nilable(T::Array[OpenAI::BatchError])) } attr_reader :data - sig { params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::AnyHash)]).void } + sig { params(data: T::Array[OpenAI::BatchError::OrHash]).void } attr_writer :data # The object type, which is always `list`. @@ -256,16 +262,25 @@ module OpenAI attr_writer :object sig do - params(data: T::Array[T.any(OpenAI::Models::BatchError, OpenAI::Internal::AnyHash)], object: String) - .returns(T.attached_class) + params( + data: T::Array[OpenAI::BatchError::OrHash], + object: String + ).returns(T.attached_class) end def self.new( data: nil, # The object type, which is always `list`. object: nil - ); end - sig { override.returns({data: T::Array[OpenAI::Models::BatchError], object: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: T::Array[OpenAI::BatchError], object: String } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/batch_cancel_params.rbi b/rbi/openai/models/batch_cancel_params.rbi index 137bcef9..37f0df79 100644 --- a/rbi/openai/models/batch_cancel_params.rbi +++ b/rbi/openai/models/batch_cancel_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/batch_create_params.rbi b/rbi/openai/models/batch_create_params.rbi index fec9c7ac..31e62492 100644 --- a/rbi/openai/models/batch_create_params.rbi +++ b/rbi/openai/models/batch_create_params.rbi @@ -6,16 +6,18 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The time frame within which the batch should be processed. Currently only `24h` # is supported. - sig { returns(OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol) } + sig { returns(OpenAI::BatchCreateParams::CompletionWindow::OrSymbol) } attr_accessor :completion_window # The endpoint to be used for all requests in the batch. Currently # `/v1/responses`, `/v1/chat/completions`, `/v1/embeddings`, and `/v1/completions` # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. - sig { returns(OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol) } + sig { returns(OpenAI::BatchCreateParams::Endpoint::OrSymbol) } attr_accessor :endpoint # The ID of an uploaded file that contains requests for the new batch. @@ -41,13 +43,13 @@ module OpenAI sig do params( - completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, - endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, + completion_window: + OpenAI::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The time frame within which the batch should be processed. Currently only `24h` @@ -76,33 +78,48 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, - endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, - input_file_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + completion_window: + OpenAI::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::BatchCreateParams::Endpoint::OrSymbol, + input_file_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The time frame within which the batch should be processed. Currently only `24h` # is supported. module CompletionWindow extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::BatchCreateParams::CompletionWindow) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - COMPLETION_WINDOW_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) + COMPLETION_WINDOW_24H = + T.let( + :"24h", + OpenAI::BatchCreateParams::CompletionWindow::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::BatchCreateParams::CompletionWindow::TaggedSymbol] + ) + end + def self.values + end end # The endpoint to be used for all requests in the batch. Currently @@ -112,17 +129,38 @@ module OpenAI module Endpoint extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::BatchCreateParams::Endpoint) } OrSymbol = T.type_alias { T.any(Symbol, String) } - V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) + V1_RESPONSES = + T.let( + :"/v1/responses", + OpenAI::BatchCreateParams::Endpoint::TaggedSymbol + ) V1_CHAT_COMPLETIONS = - T.let(:"/v1/chat/completions", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) - V1_EMBEDDINGS = T.let(:"/v1/embeddings", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) - V1_COMPLETIONS = T.let(:"/v1/completions", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) + T.let( + :"/v1/chat/completions", + OpenAI::BatchCreateParams::Endpoint::TaggedSymbol + ) + V1_EMBEDDINGS = + T.let( + :"/v1/embeddings", + OpenAI::BatchCreateParams::Endpoint::TaggedSymbol + ) + V1_COMPLETIONS = + T.let( + :"/v1/completions", + OpenAI::BatchCreateParams::Endpoint::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::BatchCreateParams::Endpoint::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/batch_error.rbi b/rbi/openai/models/batch_error.rbi index 24b461c6..77e0e754 100644 --- a/rbi/openai/models/batch_error.rbi +++ b/rbi/openai/models/batch_error.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class BatchError < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An error code identifying the error type. sig { returns(T.nilable(String)) } attr_reader :code @@ -26,8 +28,12 @@ module OpenAI attr_accessor :param sig do - params(code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)) - .returns(T.attached_class) + params( + code: String, + line: T.nilable(Integer), + message: String, + param: T.nilable(String) + ).returns(T.attached_class) end def self.new( # An error code identifying the error type. @@ -38,11 +44,21 @@ module OpenAI message: nil, # The name of the parameter that caused the error, if applicable. param: nil - ); end + ) + end + sig do - override.returns({code: String, line: T.nilable(Integer), message: String, param: T.nilable(String)}) + override.returns( + { + code: String, + line: T.nilable(Integer), + message: String, + param: T.nilable(String) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/batch_list_params.rbi b/rbi/openai/models/batch_list_params.rbi index 397a8042..1843894d 100644 --- a/rbi/openai/models/batch_list_params.rbi +++ b/rbi/openai/models/batch_list_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -28,9 +30,8 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -42,9 +43,20 @@ module OpenAI # 100, and the default is 20. limit: nil, request_options: {} - ); end - sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/batch_request_counts.rbi b/rbi/openai/models/batch_request_counts.rbi index 8f022136..6835bf20 100644 --- a/rbi/openai/models/batch_request_counts.rbi +++ b/rbi/openai/models/batch_request_counts.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class BatchRequestCounts < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of requests that have been completed successfully. sig { returns(Integer) } attr_accessor :completed @@ -16,7 +18,11 @@ module OpenAI attr_accessor :total # The request counts for different statuses within the batch. - sig { params(completed: Integer, failed: Integer, total: Integer).returns(T.attached_class) } + sig do + params(completed: Integer, failed: Integer, total: Integer).returns( + T.attached_class + ) + end def self.new( # Number of requests that have been completed successfully. completed:, @@ -24,9 +30,16 @@ module OpenAI failed:, # Total number of requests in the batch. total: - ); end - sig { override.returns({completed: Integer, failed: Integer, total: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { completed: Integer, failed: Integer, total: Integer } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/batch_retrieve_params.rbi b/rbi/openai/models/batch_retrieve_params.rbi index 7a154d58..b7289ba9 100644 --- a/rbi/openai/models/batch_retrieve_params.rbi +++ b/rbi/openai/models/batch_retrieve_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/assistant.rbi b/rbi/openai/models/beta/assistant.rbi index 438ec36e..9baa786f 100644 --- a/rbi/openai/models/beta/assistant.rbi +++ b/rbi/openai/models/beta/assistant.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class Assistant < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -53,9 +55,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ) @@ -87,9 +89,9 @@ module OpenAI T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ) ) @@ -106,14 +108,14 @@ module OpenAI # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources)) } + sig { returns(T.nilable(OpenAI::Beta::Assistant::ToolResources)) } attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable(OpenAI::Beta::Assistant::ToolResources::OrHash) + ).void end attr_writer :tool_resources @@ -135,29 +137,29 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: T.nilable(String), - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ], - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ], + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Assistant::ToolResources, OpenAI::Internal::AnyHash)), + tool_resources: + T.nilable(OpenAI::Beta::Assistant::ToolResources::OrHash), top_p: T.nilable(Float), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -226,62 +228,77 @@ module OpenAI top_p: nil, # The object type, which is always `assistant`. object: :assistant - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - description: T.nilable(String), - instructions: T.nilable(String), - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - name: T.nilable(String), - object: Symbol, - tools: T::Array[ + override.returns( + { + id: String, + created_at: Integer, + description: T.nilable(String), + instructions: T.nilable(String), + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: T.nilable(String), + object: Symbol, + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ], - response_format: T.nilable( + response_format: + T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ), - temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::Assistant::ToolResources), - top_p: T.nilable(Float) - } - ) + temperature: T.nilable(Float), + tool_resources: T.nilable(OpenAI::Beta::Assistant::ToolResources), + top_p: T.nilable(Float) + } + ) + end + def to_hash end - def to_hash; end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter)) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T.nilable(OpenAI::Beta::Assistant::ToolResources::CodeInterpreter) + ) + end attr_reader :code_interpreter sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter: + OpenAI::Beta::Assistant::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch)) } + sig do + returns( + T.nilable(OpenAI::Beta::Assistant::ToolResources::FileSearch) + ) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::Assistant::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -291,25 +308,31 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), - file_search: T.any(OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::Assistant::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::Assistant::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. @@ -325,12 +348,18 @@ module OpenAI # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -341,16 +370,23 @@ module OpenAI sig { params(vector_store_ids: T::Array[String]).void } attr_writer :vector_store_ids - sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } + sig do + params(vector_store_ids: T::Array[String]).returns( + T.attached_class + ) + end def self.new( # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. vector_store_ids: nil - ); end - sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ vector_store_ids: T::Array[String] }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/assistant_create_params.rbi b/rbi/openai/models/beta/assistant_create_params.rbi index d4ce6a96..eaf13a70 100644 --- a/rbi/openai/models/beta/assistant_create_params.rbi +++ b/rbi/openai/models/beta/assistant_create_params.rbi @@ -7,12 +7,14 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } + sig { returns(T.any(String, OpenAI::ChatModel::OrSymbol)) } attr_accessor :model # The description of the assistant. The maximum length is 512 characters. @@ -43,7 +45,7 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with @@ -71,9 +73,9 @@ module OpenAI T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ) ) @@ -90,14 +92,18 @@ module OpenAI # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources)) } + sig do + returns(T.nilable(OpenAI::Beta::AssistantCreateParams::ToolResources)) + end attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable( + OpenAI::Beta::AssistantCreateParams::ToolResources::OrHash + ) + ).void end attr_writer :tool_resources @@ -109,9 +115,9 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ) @@ -121,16 +127,15 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ).void end attr_writer :tools @@ -144,35 +149,37 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, OpenAI::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ], + tool_resources: + T.nilable( + OpenAI::Beta::AssistantCreateParams::ToolResources::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # ID of the model to use. You can use the @@ -243,40 +250,45 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - description: T.nilable(String), - instructions: T.nilable(String), - metadata: T.nilable(T::Hash[Symbol, String]), - name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( + override.returns( + { + model: T.any(String, OpenAI::ChatModel::OrSymbol), + description: T.nilable(String), + instructions: T.nilable(String), + metadata: T.nilable(T::Hash[Symbol, String]), + name: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ), - temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources), - tools: T::Array[ + temperature: T.nilable(Float), + tool_resources: + T.nilable(OpenAI::Beta::AssistantCreateParams::ToolResources), + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ], - top_p: T.nilable(Float), - request_options: OpenAI::RequestOptions - } - ) + top_p: T.nilable(Float), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -286,33 +298,53 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::Beta::AssistantCreateParams::Model::Variants] + ) + end + def self.variants + end end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter)) } - attr_reader :code_interpreter + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } sig do - params( - code_interpreter: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash + returns( + T.nilable( + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter ) ) - .void + end + attr_reader :code_interpreter + + sig do + params( + code_interpreter: + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch)) } + sig do + returns( + T.nilable( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch + ) + ) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -322,28 +354,32 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash - ), - file_search: T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + file_search: + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -359,12 +395,18 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -381,35 +423,33 @@ module OpenAI # vector store attached to the assistant. sig do returns( - T.nilable(T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]) + T.nilable( + T::Array[ + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore + ] + ) ) end attr_reader :vector_stores sig do params( - vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) - ] - ) - .void + vector_stores: + T::Array[ + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::OrHash + ] + ).void end attr_writer :vector_stores sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) - ] - ) - .returns(T.attached_class) + vector_stores: + T::Array[ + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::OrHash + ] + ).returns(T.attached_class) end def self.new( # The @@ -422,27 +462,35 @@ module OpenAI # with file_ids and attach it to this assistant. There can be a maximum of 1 # vector store attached to the assistant. vector_stores: nil - ); end + ) + end + sig do - override - .returns( - { - vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] - } - ) + override.returns( + { + vector_store_ids: T::Array[String], + vector_stores: + T::Array[ + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore + ] + } + ) + end + def to_hash end - def to_hash; end class VectorStore < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. sig do returns( T.nilable( T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) ) @@ -451,13 +499,12 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto::OrHash, + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::OrHash + ) + ).void end attr_writer :chunking_strategy @@ -481,15 +528,14 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ), + chunking_strategy: + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto::OrHash, + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::OrHash + ), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -506,28 +552,44 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil - ); end + ) + end + sig do - override - .returns( - { - chunking_strategy: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + override.returns( + { + chunking_strategy: + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), - file_ids: T::Array[String], - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) + file_ids: T::Array[String], + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. module ChunkingStrategy extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + end + class Auto < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -538,27 +600,32 @@ module OpenAI def self.new( # Always `auto`. type: :auto - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end class Static < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig do returns( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static ) end attr_reader :static sig do params( - static: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::AnyHash - ) - ) - .void + static: + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static::OrHash + ).void end attr_writer :static @@ -568,31 +635,36 @@ module OpenAI sig do params( - static: T.any( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::AnyHash - ), + static: + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( static:, # Always `static`. type: :static - ); end + ) + end + sig do - override - .returns( - { - static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - type: Symbol - } - ) + override.returns( + { + static: + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Static < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. @@ -618,19 +690,31 @@ module OpenAI # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. max_chunk_size_tokens: - ); end - sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } + ) + end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) + override.returns( + T::Array[ + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/assistant_delete_params.rbi b/rbi/openai/models/beta/assistant_delete_params.rbi index 283d4cf9..48fee74b 100644 --- a/rbi/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/openai/models/beta/assistant_delete_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/assistant_deleted.rbi b/rbi/openai/models/beta/assistant_deleted.rbi index 676a92bf..7ebe179e 100644 --- a/rbi/openai/models/beta/assistant_deleted.rbi +++ b/rbi/openai/models/beta/assistant_deleted.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class AssistantDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -13,11 +15,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"assistant.deleted"); end + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"assistant.deleted") + end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/assistant_list_params.rbi b/rbi/openai/models/beta/assistant_list_params.rbi index 70d05fcf..b9644242 100644 --- a/rbi/openai/models/beta/assistant_list_params.rbi +++ b/rbi/openai/models/beta/assistant_list_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -37,10 +39,14 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::Beta::AssistantListParams::Order::OrSymbol)) + end attr_reader :order - sig { params(order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol).void } + sig do + params(order: OpenAI::Beta::AssistantListParams::Order::OrSymbol).void + end attr_writer :order sig do @@ -48,10 +54,9 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::Beta::AssistantListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -71,34 +76,46 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + before: String, + limit: Integer, + order: OpenAI::Beta::AssistantListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::AssistantListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) + ASC = + T.let(:asc, OpenAI::Beta::AssistantListParams::Order::TaggedSymbol) + DESC = + T.let(:desc, OpenAI::Beta::AssistantListParams::Order::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::AssistantListParams::Order::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/assistant_response_format_option.rbi b/rbi/openai/models/beta/assistant_response_format_option.rbi index af1951eb..744863b5 100644 --- a/rbi/openai/models/beta/assistant_response_format_option.rbi +++ b/rbi/openai/models/beta/assistant_response_format_option.rbi @@ -26,13 +26,23 @@ module OpenAI module AssistantResponseFormatOption extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [Symbol, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] + Variants = + T.type_alias do + T.any( + Symbol, + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::AssistantResponseFormatOption::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/assistant_retrieve_params.rbi b/rbi/openai/models/beta/assistant_retrieve_params.rbi index 195f1fe4..2ca3eb4e 100644 --- a/rbi/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/openai/models/beta/assistant_retrieve_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/assistant_stream_event.rbi b/rbi/openai/models/beta/assistant_stream_event.rbi index 3b46028a..b69c801c 100644 --- a/rbi/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/openai/models/beta/assistant_stream_event.rbi @@ -26,13 +26,46 @@ module OpenAI module AssistantStreamEvent extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Beta::AssistantStreamEvent::ErrorEvent + ) + end + class ThreadCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). - sig { returns(OpenAI::Models::Beta::Thread) } + sig { returns(OpenAI::Beta::Thread) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Thread::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -50,11 +83,10 @@ module OpenAI # created. sig do params( - data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Thread::OrHash, enabled: T::Boolean, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a thread that contains @@ -63,18 +95,28 @@ module OpenAI # Whether to enable input audio transcription. enabled: nil, event: :"thread.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Thread, event: Symbol, enabled: T::Boolean}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Thread, event: Symbol, enabled: T::Boolean } + ) + end + def to_hash + end end class ThreadRunCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -83,26 +125,38 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -111,26 +165,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.queued" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -139,26 +205,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.in_progress" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -167,26 +245,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.requires_action" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -195,26 +285,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.completed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -223,26 +325,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.incomplete" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -251,26 +365,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.failed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -279,26 +405,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.cancelling" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -307,26 +445,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.cancelled" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -335,25 +485,39 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.expired" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -364,26 +528,37 @@ module OpenAI # is created. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -394,28 +569,39 @@ module OpenAI # moves to an `in_progress` state. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.in_progress" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a run step delta i.e. any changed fields on a run step during # streaming. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void + params( + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent::OrHash + ).void end attr_writer :data @@ -427,27 +613,41 @@ module OpenAI # are being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a run step delta i.e. any changed fields on a run step during # streaming. data:, event: :"thread.run.step.delta" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + event: Symbol + } + ) + end + def to_hash + end end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -458,26 +658,37 @@ module OpenAI # is completed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.completed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -488,26 +699,37 @@ module OpenAI # fails. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.failed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -518,26 +740,37 @@ module OpenAI # is cancelled. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.cancelled" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -548,27 +781,36 @@ module OpenAI # expires. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.expired" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -578,26 +820,38 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -607,26 +861,40 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.in_progress" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message delta i.e. any changed fields on a message during # streaming. - sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } + sig { returns(OpenAI::Beta::Threads::MessageDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::MessageDeltaEvent::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -637,28 +905,37 @@ module OpenAI # being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::MessageDeltaEvent::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a message delta i.e. any changed fields on a message during # streaming. data:, event: :"thread.message.delta" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::MessageDeltaEvent, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -668,26 +945,38 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.completed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -697,24 +986,36 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.incomplete" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ErrorEvent < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::ErrorObject) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::ErrorObject) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::ErrorObject::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -724,22 +1025,25 @@ module OpenAI # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. sig do - params(data: T.any(OpenAI::Models::ErrorObject, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params(data: OpenAI::ErrorObject::OrHash, event: Symbol).returns( + T.attached_class + ) + end + def self.new(data:, event: :error) end - def self.new(data:, event: :error); end - sig { override.returns({data: OpenAI::Models::ErrorObject, event: Symbol}) } - def to_hash; end + sig { override.returns({ data: OpenAI::ErrorObject, event: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] - ) + override.returns( + T::Array[OpenAI::Beta::AssistantStreamEvent::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/assistant_tool.rbi b/rbi/openai/models/beta/assistant_tool.rbi index b26fd5d8..84570448 100644 --- a/rbi/openai/models/beta/assistant_tool.rbi +++ b/rbi/openai/models/beta/assistant_tool.rbi @@ -6,13 +6,20 @@ module OpenAI module AssistantTool extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) + end + + sig do + override.returns(T::Array[OpenAI::Beta::AssistantTool::Variants]) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/assistant_tool_choice.rbi b/rbi/openai/models/beta/assistant_tool_choice.rbi index b8e37979..a018f2b0 100644 --- a/rbi/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/openai/models/beta/assistant_tool_choice.rbi @@ -4,55 +4,81 @@ module OpenAI module Models module Beta class AssistantToolChoice < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the tool. If type is `function`, the function name must be set - sig { returns(OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol) } + sig { returns(OpenAI::Beta::AssistantToolChoice::Type::OrSymbol) } attr_accessor :type - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantToolChoiceFunction)) } + sig { returns(T.nilable(OpenAI::Beta::AssistantToolChoiceFunction)) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::AnyHash)).void } + sig do + params( + function: OpenAI::Beta::AssistantToolChoiceFunction::OrHash + ).void + end attr_writer :function # Specifies a tool the model should use. Use to force the model to call a specific # tool. sig do params( - type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: T.any(OpenAI::Models::Beta::AssistantToolChoiceFunction, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + type: OpenAI::Beta::AssistantToolChoice::Type::OrSymbol, + function: OpenAI::Beta::AssistantToolChoiceFunction::OrHash + ).returns(T.attached_class) end def self.new( # The type of the tool. If type is `function`, the function name must be set type:, function: nil - ); end + ) + end + sig do - override - .returns( - { - type: OpenAI::Models::Beta::AssistantToolChoice::Type::OrSymbol, - function: OpenAI::Models::Beta::AssistantToolChoiceFunction - } - ) + override.returns( + { + type: OpenAI::Beta::AssistantToolChoice::Type::OrSymbol, + function: OpenAI::Beta::AssistantToolChoiceFunction + } + ) + end + def to_hash end - def to_hash; end # The type of the tool. If type is `function`, the function name must be set module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::AssistantToolChoice::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) + FUNCTION = + T.let( + :function, + OpenAI::Beta::AssistantToolChoice::Type::TaggedSymbol + ) CODE_INTERPRETER = - T.let(:code_interpreter, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) - FILE_SEARCH = T.let(:file_search, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) + T.let( + :code_interpreter, + OpenAI::Beta::AssistantToolChoice::Type::TaggedSymbol + ) + FILE_SEARCH = + T.let( + :file_search, + OpenAI::Beta::AssistantToolChoice::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::AssistantToolChoice::Type::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/openai/models/beta/assistant_tool_choice_function.rbi index 4a88f071..3f18016b 100644 --- a/rbi/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/openai/models/beta/assistant_tool_choice_function.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to call. sig { returns(String) } attr_accessor :name @@ -12,9 +14,12 @@ module OpenAI def self.new( # The name of the function to call. name: - ); end - sig { override.returns({name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/openai/models/beta/assistant_tool_choice_option.rbi index 51538b5e..823ebeb1 100644 --- a/rbi/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/openai/models/beta/assistant_tool_choice_option.rbi @@ -13,6 +13,14 @@ module OpenAI module AssistantToolChoiceOption extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Beta::AssistantToolChoice + ) + end + # `none` means the model will not call any tools and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools before @@ -20,24 +28,46 @@ module OpenAI module Auto extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) + NONE = + T.let( + :none, + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol + ) + REQUIRED = + T.let( + :required, + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol + ] + ) + end + def self.values + end end sig do - override - .returns( - [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, OpenAI::Models::Beta::AssistantToolChoice] - ) + override.returns( + T::Array[OpenAI::Beta::AssistantToolChoiceOption::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/assistant_update_params.rbi b/rbi/openai/models/beta/assistant_update_params.rbi index 29f452a9..27dbf4a1 100644 --- a/rbi/openai/models/beta/assistant_update_params.rbi +++ b/rbi/openai/models/beta/assistant_update_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } attr_accessor :description @@ -30,10 +32,27 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.nilable(T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol))) } + sig do + returns( + T.nilable( + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ) + ) + ) + end attr_reader :model - sig { params(model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol)).void } + sig do + params( + model: + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ) + ).void + end attr_writer :model # The name of the assistant. The maximum length is 256 characters. @@ -46,7 +65,7 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with @@ -74,9 +93,9 @@ module OpenAI T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ) ) @@ -93,14 +112,18 @@ module OpenAI # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources)) } + sig do + returns(T.nilable(OpenAI::Beta::AssistantUpdateParams::ToolResources)) + end attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable( + OpenAI::Beta::AssistantUpdateParams::ToolResources::OrHash + ) + ).void end attr_writer :tool_resources @@ -112,9 +135,9 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ) @@ -124,16 +147,15 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ).void end attr_writer :tools @@ -150,32 +172,38 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), - name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( + model: T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ), + name: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ], + tool_resources: + T.nilable( + OpenAI::Beta::AssistantUpdateParams::ToolResources::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ], top_p: T.nilable(Float), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The description of the assistant. The maximum length is 512 characters. @@ -246,40 +274,49 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - description: T.nilable(String), - instructions: T.nilable(String), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), - name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( + override.returns( + { + description: T.nilable(String), + instructions: T.nilable(String), + metadata: T.nilable(T::Hash[Symbol, String]), + model: + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ), + name: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ), - temperature: T.nilable(Float), - tool_resources: T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources), - tools: T::Array[ + temperature: T.nilable(Float), + tool_resources: + T.nilable(OpenAI::Beta::AssistantUpdateParams::ToolResources), + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ], - top_p: T.nilable(Float), - request_options: OpenAI::RequestOptions - } - ) + top_p: T.nilable(Float), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -289,95 +326,242 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::AssistantUpdateParams::Model::Variants] + ) + end + def self.variants + end - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::AssistantUpdateParams::Model) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4_1_NANO = T.let(:"gpt-4.1-nano", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + GPT_4_1 = + T.let( + :"gpt-4.1", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4_1_MINI = + T.let( + :"gpt-4.1-mini", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4_1_NANO = + T.let( + :"gpt-4.1-nano", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_1_2025_04_14 = - T.let(:"gpt-4.1-2025-04-14", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4.1-2025-04-14", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_1_MINI_2025_04_14 = - T.let(:"gpt-4.1-mini-2025-04-14", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4.1-mini-2025-04-14", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_1_NANO_2025_04_14 = - T.let(:"gpt-4.1-nano-2025-04-14", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - O3_MINI = T.let(:"o3-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4.1-nano-2025-04-14", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + O3_MINI = + T.let( + :"o3-mini", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) O3_MINI_2025_01_31 = - T.let(:"o3-mini-2025-01-31", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - O1 = T.let(:o1, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4O = T.let(:"gpt-4o", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"o3-mini-2025-01-31", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + O1 = + T.let(:o1, OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol) + O1_2024_12_17 = + T.let( + :"o1-2024-12-17", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4O = + T.let( + :"gpt-4o", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4O_2024_11_20 = - T.let(:"gpt-4o-2024-11-20", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4o-2024-11-20", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4O_2024_08_06 = - T.let(:"gpt-4o-2024-08-06", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4o-2024-08-06", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4O_2024_05_13 = - T.let(:"gpt-4o-2024-05-13", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4o-2024-05-13", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4O_MINI = + T.let( + :"gpt-4o-mini", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4O_MINI_2024_07_18 = - T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4o-mini-2024-07-18", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_5_PREVIEW = - T.let(:"gpt-4.5-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4.5-preview", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_5_PREVIEW_2025_02_27 = - T.let(:"gpt-4.5-preview-2025-02-27", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4.5-preview-2025-02-27", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4_TURBO = + T.let( + :"gpt-4-turbo", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_TURBO_2024_04_09 = - T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-turbo-2024-04-09", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_0125_PREVIEW = - T.let(:"gpt-4-0125-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-0125-preview", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_TURBO_PREVIEW = - T.let(:"gpt-4-turbo-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-turbo-preview", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_1106_PREVIEW = - T.let(:"gpt-4-1106-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-1106-preview", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_VISION_PREVIEW = - T.let(:"gpt-4-vision-preview", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4 = T.let(:"gpt-4", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-vision-preview", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4 = + T.let( + :"gpt-4", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4_0314 = + T.let( + :"gpt-4-0314", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4_0613 = + T.let( + :"gpt-4-0613", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_4_32K = + T.let( + :"gpt-4-32k", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_32K_0314 = - T.let(:"gpt-4-32k-0314", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-32k-0314", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_32K_0613 = - T.let(:"gpt-4-32k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-4-32k-0613", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_3_5_TURBO = + T.let( + :"gpt-3.5-turbo", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_3_5_TURBO_16K = - T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-3.5-turbo-16k", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_3_5_TURBO_0613 = - T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-3.5-turbo-0613", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_3_5_TURBO_1106 = - T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-3.5-turbo-1106", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_3_5_TURBO_0125 = - T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-3.5-turbo-0125", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_3_5_TURBO_16K_0613 = - T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) + T.let( + :"gpt-3.5-turbo-16k-0613", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter)) } - attr_reader :code_interpreter + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } sig do - params( - code_interpreter: T.any( - OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash + returns( + T.nilable( + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter ) ) - .void + end + attr_reader :code_interpreter + + sig do + params( + code_interpreter: + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch)) } + sig do + returns( + T.nilable( + OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch + ) + ) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -387,28 +571,32 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash - ), - file_search: T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + file_search: + OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Overrides the list of # [file](https://platform.openai.com/docs/api-reference/files) IDs made available # to the `code_interpreter` tool. There can be a maximum of 20 files associated @@ -426,12 +614,18 @@ module OpenAI # to the `code_interpreter` tool. There can be a maximum of 20 files associated # with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -442,16 +636,23 @@ module OpenAI sig { params(vector_store_ids: T::Array[String]).void } attr_writer :vector_store_ids - sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } + sig do + params(vector_store_ids: T::Array[String]).returns( + T.attached_class + ) + end def self.new( # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. vector_store_ids: nil - ); end - sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ vector_store_ids: T::Array[String] }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/code_interpreter_tool.rbi b/rbi/openai/models/beta/code_interpreter_tool.rbi index d1ac9cc7..f9f7b8bb 100644 --- a/rbi/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/openai/models/beta/code_interpreter_tool.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of tool being defined: `code_interpreter` sig { returns(Symbol) } attr_accessor :type @@ -12,9 +14,12 @@ module OpenAI def self.new( # The type of tool being defined: `code_interpreter` type: :code_interpreter - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/file_search_tool.rbi b/rbi/openai/models/beta/file_search_tool.rbi index e2c738a3..d2442c3f 100644 --- a/rbi/openai/models/beta/file_search_tool.rbi +++ b/rbi/openai/models/beta/file_search_tool.rbi @@ -4,37 +4,52 @@ module OpenAI module Models module Beta class FileSearchTool < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type # Overrides for the file search tool. - sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch)) } + sig { returns(T.nilable(OpenAI::Beta::FileSearchTool::FileSearch)) } attr_reader :file_search sig do - params(file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::AnyHash)) - .void + params( + file_search: OpenAI::Beta::FileSearchTool::FileSearch::OrHash + ).void end attr_writer :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch, OpenAI::Internal::AnyHash), + file_search: OpenAI::Beta::FileSearchTool::FileSearch::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Overrides for the file search tool. file_search: nil, # The type of tool being defined: `file_search` type: :file_search - ); end - sig { override.returns({type: Symbol, file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + type: Symbol, + file_search: OpenAI::Beta::FileSearchTool::FileSearch + } + ) + end + def to_hash + end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of results the file search tool should output. The default is # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between # 1 and 50 inclusive. @@ -55,14 +70,20 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions)) } + sig do + returns( + T.nilable( + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions + ) + ) + end attr_reader :ranking_options sig do params( - ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::AnyHash) - ) - .void + ranking_options: + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::OrHash + ).void end attr_writer :ranking_options @@ -70,9 +91,9 @@ module OpenAI sig do params( max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + ranking_options: + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::OrHash + ).returns(T.attached_class) end def self.new( # The maximum number of results the file search tool should output. The default is @@ -91,16 +112,25 @@ module OpenAI # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. ranking_options: nil - ); end + ) + end + sig do - override - .returns( - {max_num_results: Integer, ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions} - ) + override.returns( + { + max_num_results: Integer, + ranking_options: + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions + } + ) + end + def to_hash end - def to_hash; end class RankingOptions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The score threshold for the file search. All values must be a floating point # number between 0 and 1. sig { returns(Float) } @@ -108,10 +138,21 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. - sig { returns(T.nilable(OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ) + ) + end attr_reader :ranker - sig { params(ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol).void } + sig do + params( + ranker: + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ).void + end attr_writer :ranker # The ranking options for the file search. If not specified, the file search tool @@ -123,9 +164,9 @@ module OpenAI sig do params( score_threshold: Float, - ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol - ) - .returns(T.attached_class) + ranker: + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + ).returns(T.attached_class) end def self.new( # The score threshold for the file search. All values must be a floating point @@ -134,17 +175,20 @@ module OpenAI # The ranker to use for the file search. If not specified will use the `auto` # ranker. ranker: nil - ); end + ) + end + sig do - override - .returns( - { - score_threshold: Float, - ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol - } - ) + override.returns( + { + score_threshold: Float, + ranker: + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -152,22 +196,34 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = - T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) + T.let( + :auto, + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol + ) DEFAULT_2024_08_21 = T.let( :default_2024_08_21, - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol ) sig do - override - .returns(T::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/beta/function_tool.rbi b/rbi/openai/models/beta/function_tool.rbi index 5a914d6f..ad129493 100644 --- a/rbi/openai/models/beta/function_tool.rbi +++ b/rbi/openai/models/beta/function_tool.rbi @@ -4,10 +4,12 @@ module OpenAI module Models module Beta class FunctionTool < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::FunctionDefinition) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::FunctionDefinition) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash)).void } + sig { params(function: OpenAI::FunctionDefinition::OrHash).void } attr_writer :function # The type of tool being defined: `function` @@ -15,16 +17,25 @@ module OpenAI attr_accessor :type sig do - params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + function: OpenAI::FunctionDefinition::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( function:, # The type of tool being defined: `function` type: :function - ); end - sig { override.returns({function: OpenAI::Models::FunctionDefinition, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { function: OpenAI::FunctionDefinition, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/message_stream_event.rbi b/rbi/openai/models/beta/message_stream_event.rbi index ad322e59..562d10c2 100644 --- a/rbi/openai/models/beta/message_stream_event.rbi +++ b/rbi/openai/models/beta/message_stream_event.rbi @@ -9,13 +9,27 @@ module OpenAI module MessageStreamEvent extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, + OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, + OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete + ) + end + class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -25,26 +39,38 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -54,26 +80,40 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.in_progress" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message delta i.e. any changed fields on a message during # streaming. - sig { returns(OpenAI::Models::Beta::Threads::MessageDeltaEvent) } + sig { returns(OpenAI::Beta::Threads::MessageDeltaEvent) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::MessageDeltaEvent::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -84,28 +124,37 @@ module OpenAI # being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::MessageDeltaEvent, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::MessageDeltaEvent::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a message delta i.e. any changed fields on a message during # streaming. data:, event: :"thread.message.delta" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::MessageDeltaEvent, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -115,26 +164,38 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.completed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Message) } + sig { returns(OpenAI::Beta::Threads::Message) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Message::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -144,26 +205,33 @@ module OpenAI # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Message, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Message::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.message.incomplete" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Message, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Message, event: Symbol } + ) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] - ) + override.returns(T::Array[OpenAI::Beta::MessageStreamEvent::Variants]) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/run_step_stream_event.rbi b/rbi/openai/models/beta/run_step_stream_event.rbi index 80a4d141..7c052f14 100644 --- a/rbi/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/openai/models/beta/run_step_stream_event.rbi @@ -9,12 +9,30 @@ module OpenAI module RunStepStreamEvent extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired + ) + end + class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -25,26 +43,37 @@ module OpenAI # is created. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -55,28 +84,39 @@ module OpenAI # moves to an `in_progress` state. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.in_progress" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a run step delta i.e. any changed fields on a run step during # streaming. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStepDeltaEvent) } attr_reader :data sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash)).void + params( + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent::OrHash + ).void end attr_writer :data @@ -88,27 +128,41 @@ module OpenAI # are being streamed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a run step delta i.e. any changed fields on a run step during # streaming. data:, event: :"thread.run.step.delta" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + event: Symbol + } + ) + end + def to_hash + end end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -119,26 +173,37 @@ module OpenAI # is completed. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.completed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -149,26 +214,37 @@ module OpenAI # fails. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.failed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -179,26 +255,37 @@ module OpenAI # is cancelled. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.cancelled" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a step in execution of a run. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash)).void } + sig do + params(data: OpenAI::Beta::Threads::Runs::RunStep::OrHash).void + end attr_writer :data sig { returns(Symbol) } @@ -209,27 +296,31 @@ module OpenAI # expires. sig do params( - data: T.any(OpenAI::Models::Beta::Threads::Runs::RunStep, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Threads::Runs::RunStep::OrHash, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a step in execution of a run. data:, event: :"thread.run.step.expired" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Runs::RunStep, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Runs::RunStep, event: Symbol } + ) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] - ) + override.returns(T::Array[OpenAI::Beta::RunStepStreamEvent::Variants]) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/run_stream_event.rbi b/rbi/openai/models/beta/run_stream_event.rbi index 1f8fa007..77e86149 100644 --- a/rbi/openai/models/beta/run_stream_event.rbi +++ b/rbi/openai/models/beta/run_stream_event.rbi @@ -8,13 +8,32 @@ module OpenAI module RunStreamEvent extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunCreated, + OpenAI::Beta::RunStreamEvent::ThreadRunQueued, + OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, + OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, + OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::RunStreamEvent::ThreadRunFailed, + OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, + OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, + OpenAI::Beta::RunStreamEvent::ThreadRunExpired + ) + end + class ThreadRunCreated < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -23,26 +42,38 @@ module OpenAI # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -51,26 +82,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.queued" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -79,26 +122,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.in_progress" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -107,26 +162,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.requires_action" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -135,26 +202,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.completed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -163,26 +242,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.incomplete" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -191,26 +282,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.failed" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -219,26 +322,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.cancelling" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -247,26 +362,38 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.cancelled" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). - sig { returns(OpenAI::Models::Beta::Threads::Run) } + sig { returns(OpenAI::Beta::Threads::Run) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Threads::Run::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -275,26 +402,33 @@ module OpenAI # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. sig do - params(data: T.any(OpenAI::Models::Beta::Threads::Run, OpenAI::Internal::AnyHash), event: Symbol) - .returns(T.attached_class) + params( + data: OpenAI::Beta::Threads::Run::OrHash, + event: Symbol + ).returns(T.attached_class) end def self.new( # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). data:, event: :"thread.run.expired" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Threads::Run, event: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Threads::Run, event: Symbol } + ) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] - ) + override.returns(T::Array[OpenAI::Beta::RunStreamEvent::Variants]) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/thread.rbi b/rbi/openai/models/beta/thread.rbi index 46c58f55..8f84f6d1 100644 --- a/rbi/openai/models/beta/thread.rbi +++ b/rbi/openai/models/beta/thread.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class Thread < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -29,14 +31,14 @@ module OpenAI # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources)) } + sig { returns(T.nilable(OpenAI::Beta::Thread::ToolResources)) } attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable(OpenAI::Beta::Thread::ToolResources::OrHash) + ).void end attr_writer :tool_resources @@ -47,10 +49,10 @@ module OpenAI id: String, created_at: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::Thread::ToolResources, OpenAI::Internal::AnyHash)), + tool_resources: + T.nilable(OpenAI::Beta::Thread::ToolResources::OrHash), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -71,41 +73,52 @@ module OpenAI tool_resources:, # The object type, which is always `thread`. object: :thread - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - metadata: T.nilable(T::Hash[Symbol, String]), - object: Symbol, - tool_resources: T.nilable(OpenAI::Models::Beta::Thread::ToolResources) - } - ) + override.returns( + { + id: String, + created_at: Integer, + metadata: T.nilable(T::Hash[Symbol, String]), + object: Symbol, + tool_resources: T.nilable(OpenAI::Beta::Thread::ToolResources) + } + ) + end + def to_hash end - def to_hash; end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter)) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T.nilable(OpenAI::Beta::Thread::ToolResources::CodeInterpreter) + ) + end attr_reader :code_interpreter sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter: + OpenAI::Beta::Thread::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::Thread::ToolResources::FileSearch)) } + sig do + returns(T.nilable(OpenAI::Beta::Thread::ToolResources::FileSearch)) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::Thread::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -115,25 +128,31 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), - file_search: T.any(OpenAI::Models::Beta::Thread::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::Thread::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::Thread::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Thread::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -149,12 +168,18 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -165,16 +190,23 @@ module OpenAI sig { params(vector_store_ids: T::Array[String]).void } attr_writer :vector_store_ids - sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } + sig do + params(vector_store_ids: T::Array[String]).returns( + T.attached_class + ) + end def self.new( # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to # the thread. vector_store_ids: nil - ); end - sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ vector_store_ids: T::Array[String] }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index fed5543a..e65fa5a3 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. @@ -47,7 +49,7 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol))) } attr_accessor :model # Whether to enable @@ -84,9 +86,9 @@ module OpenAI T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ) ) @@ -101,12 +103,15 @@ module OpenAI # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread)) } + sig do + returns(T.nilable(OpenAI::Beta::ThreadCreateAndRunParams::Thread)) + end attr_reader :thread sig do - params(thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash)) - .void + params( + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread::OrHash + ).void end attr_writer :thread @@ -121,8 +126,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice ) ) ) @@ -133,14 +138,20 @@ module OpenAI # specific to the type of tool. For example, the `code_interpreter` tool requires # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources)) } + sig do + returns( + T.nilable(OpenAI::Beta::ThreadCreateAndRunParams::ToolResources) + ) + end attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::OrHash + ) + ).void end attr_writer :tool_resources @@ -151,9 +162,9 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ) @@ -171,16 +182,22 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy + ) + ) + end attr_reader :truncation_strategy sig do params( - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ) - ) - .void + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ) + ).void end attr_writer :truncation_strategy @@ -191,44 +208,47 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T.nilable( - T::Array[ + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread::OrHash, + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash ) - ] - ), + ), + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::OrHash + ), + tools: + T.nilable( + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ), top_p: T.nilable(Float), - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The ID of the @@ -320,51 +340,62 @@ module OpenAI # control the intial context window of the run. truncation_strategy: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - assistant_id: String, - instructions: T.nilable(String), - max_completion_tokens: T.nilable(Integer), - max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), - parallel_tool_calls: T::Boolean, - response_format: T.nilable( + override.returns( + { + assistant_id: String, + instructions: T.nilable(String), + max_completion_tokens: T.nilable(Integer), + max_prompt_tokens: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + parallel_tool_calls: T::Boolean, + response_format: + T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ), - temperature: T.nilable(Float), - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, - tool_choice: T.nilable( + temperature: T.nilable(Float), + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice ) ), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources), - tools: T.nilable( + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources + ), + tools: + T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ), - top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy), - request_options: OpenAI::RequestOptions - } - ) + top_p: T.nilable(Float), + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy + ), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the @@ -373,21 +404,42 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants] + ) + end + def self.variants + end end class Thread < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message + ] + ) + ) + end attr_reader :messages sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::AnyHash)] - ) - .void + messages: + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::OrHash + ] + ).void end attr_writer :messages @@ -404,16 +456,22 @@ module OpenAI # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources + ) + ) + end attr_reader :tool_resources sig do params( - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Internal::AnyHash) - ) - ) - .void + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::OrHash + ) + ).void end attr_writer :tool_resources @@ -421,13 +479,16 @@ module OpenAI # an empty thread will be created. sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message, OpenAI::Internal::AnyHash)], + messages: + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::OrHash + ], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, OpenAI::Internal::AnyHash) - ) - ) - .returns(T.attached_class) + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::OrHash + ) + ).returns(T.attached_class) end def self.new( # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to @@ -445,20 +506,31 @@ module OpenAI # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. tool_resources: nil - ); end + ) + end + sig do - override - .returns( - { - messages: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], - metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources) - } - ) + override.returns( + { + messages: + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message + ], + metadata: T.nilable(T::Hash[Symbol, String]), + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources + ) + } + ) + end + def to_hash end - def to_hash; end class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text contents of the message. sig do returns( @@ -466,9 +538,9 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ) @@ -482,11 +554,23 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol) } + sig do + returns( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol + ) + end attr_accessor :role # A list of files attached to the message, and the tools they should be added to. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment + ] + ) + ) + end attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -500,29 +584,27 @@ module OpenAI sig do params( - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ), - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, - attachments: T.nilable( - T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, - OpenAI::Internal::AnyHash - ) - ] - ), + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, + OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, + OpenAI::Beta::Threads::TextContentBlockParam::OrHash + ) + ] + ), + role: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The text contents of the message. @@ -543,53 +625,71 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ), - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]), - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) + role: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end # The text contents of the message. module Content extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) ] ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end MessageContentPartParamArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Beta::Threads::MessageContentPartParam + ], OpenAI::Internal::Type::Converter ) end @@ -604,21 +704,40 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol + ) ASSISTANT = - T.let(:assistant, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) + T.let( + :assistant, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end class Attachment < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -632,8 +751,8 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] ) @@ -643,57 +762,71 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch::OrHash + ) + ] + ).void end attr_writer :tools sig do params( file_id: String, - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - ] - ) - .returns(T.attached_class) + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The ID of the file to attach to the message. file_id: nil, # The tools to add this file to. tools: nil - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - tools: T::Array[ + override.returns( + { + file_id: String, + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end module Tool extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) + end + class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -702,48 +835,62 @@ module OpenAI def self.new( # The type of tool being defined: `file_search` type: :file_search - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] - ) + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter)) } - attr_reader :code_interpreter + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } sig do - params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter ) ) - .void + end + attr_reader :code_interpreter + + sig do + params( + code_interpreter: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + ) + ) + end attr_reader :file_search sig do params( - file_search: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Internal::AnyHash - ) - ) - .void + file_search: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -753,31 +900,32 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash - ), - file_search: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + file_search: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -787,18 +935,26 @@ module OpenAI sig { params(file_ids: T::Array[String]).void } attr_writer :file_ids - sig { params(file_ids: T::Array[String]).returns(T.attached_class) } + sig do + params(file_ids: T::Array[String]).returns(T.attached_class) + end def self.new( # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -816,7 +972,9 @@ module OpenAI sig do returns( T.nilable( - T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore + ] ) ) end @@ -824,28 +982,22 @@ module OpenAI sig do params( - vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) - ] - ) - .void + vector_stores: + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::OrHash + ] + ).void end attr_writer :vector_stores sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) - ] - ) - .returns(T.attached_class) + vector_stores: + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::OrHash + ] + ).returns(T.attached_class) end def self.new( # The @@ -858,27 +1010,35 @@ module OpenAI # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. vector_stores: nil - ); end + ) + end + sig do - override - .returns( - { - vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] - } - ) + override.returns( + { + vector_store_ids: T::Array[String], + vector_stores: + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore + ] + } + ) + end + def to_hash end - def to_hash; end class VectorStore < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. sig do returns( T.nilable( T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) ) @@ -887,13 +1047,12 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto::OrHash, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::OrHash + ) + ).void end attr_writer :chunking_strategy @@ -917,15 +1076,14 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ), + chunking_strategy: + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto::OrHash, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::OrHash + ), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -942,28 +1100,44 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil - ); end + ) + end + sig do - override - .returns( - { - chunking_strategy: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + override.returns( + { + chunking_strategy: + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), - file_ids: T::Array[String], - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) + file_ids: T::Array[String], + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. module ChunkingStrategy extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + end + class Auto < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -974,27 +1148,32 @@ module OpenAI def self.new( # Always `auto`. type: :auto - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end class Static < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig do returns( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static ) end attr_reader :static sig do params( - static: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::AnyHash - ) - ) - .void + static: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static::OrHash + ).void end attr_writer :static @@ -1004,31 +1183,36 @@ module OpenAI sig do params( - static: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::AnyHash - ), + static: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( static:, # Always `static`. type: :static - ); end + ) + end + sig do - override - .returns( - { - static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - type: Symbol - } - ) + override.returns( + { + static: + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Static < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. @@ -1054,21 +1238,31 @@ module OpenAI # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. max_chunk_size_tokens: - ); end + ) + end + sig do - override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) + override.returns( + { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end end sig do - override - .returns( - [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end @@ -1076,31 +1270,40 @@ module OpenAI end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter)) } - attr_reader :code_interpreter + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } sig do - params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter ) ) - .void + end + attr_reader :code_interpreter + + sig do + params( + code_interpreter: + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + ) + ) + end attr_reader :file_search sig do params( - file_search: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, - OpenAI::Internal::AnyHash - ) - ) - .void + file_search: + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -1110,31 +1313,32 @@ module OpenAI # IDs. sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - OpenAI::Internal::AnyHash - ), - file_search: T.any( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + file_search: + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -1150,12 +1354,18 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -1166,25 +1376,39 @@ module OpenAI sig { params(vector_store_ids: T::Array[String]).void } attr_writer :vector_store_ids - sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } + sig do + params(vector_store_ids: T::Array[String]).returns( + T.attached_class + ) + end def self.new( # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to # the assistant. vector_store_ids: nil - ); end - sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ vector_store_ids: T::Array[String] }) } + def to_hash + end end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol) } + sig do + returns( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol + ) + end attr_accessor :type # The number of most recent messages from the thread when constructing the context @@ -1196,10 +1420,10 @@ module OpenAI # control the intial context window of the run. sig do params( - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + type: + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, last_messages: T.nilable(Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The truncation strategy to use for the thread. The default is `auto`. If set to @@ -1210,17 +1434,20 @@ module OpenAI # The number of most recent messages from the thread when constructing the context # for the run. last_messages: nil - ); end + ) + end + sig do - override - .returns( - { - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) + override.returns( + { + type: + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash end - def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -1230,22 +1457,34 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = - T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) + T.let( + :auto, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) LAST_MESSAGES = T.let( :last_messages, - OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol ) sig do - override - .returns(T::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/beta/thread_create_params.rbi b/rbi/openai/models/beta/thread_create_params.rbi index 7a7ae81c..fa4f781f 100644 --- a/rbi/openai/models/beta/thread_create_params.rbi +++ b/rbi/openai/models/beta/thread_create_params.rbi @@ -7,16 +7,22 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Beta::ThreadCreateParams::Message]) + ) + end attr_reader :messages sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)] - ) - .void + messages: + T::Array[OpenAI::Beta::ThreadCreateParams::Message::OrHash] + ).void end attr_writer :messages @@ -33,25 +39,30 @@ module OpenAI # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources)) } + sig do + returns(T.nilable(OpenAI::Beta::ThreadCreateParams::ToolResources)) + end attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable(OpenAI::Beta::ThreadCreateParams::ToolResources::OrHash) + ).void end attr_writer :tool_resources sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)], + messages: + T::Array[OpenAI::Beta::ThreadCreateParams::Message::OrHash], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateParams::ToolResources::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to @@ -70,21 +81,27 @@ module OpenAI # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - messages: T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], - metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + messages: T::Array[OpenAI::Beta::ThreadCreateParams::Message], + metadata: T.nilable(T::Hash[Symbol, String]), + tool_resources: + T.nilable(OpenAI::Beta::ThreadCreateParams::ToolResources), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text contents of the message. sig do returns( @@ -92,9 +109,9 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ) @@ -108,11 +125,19 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol) } + sig do + returns(OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol) + end attr_accessor :role # A list of files attached to the message, and the tools they should be added to. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment] + ) + ) + end attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -126,24 +151,26 @@ module OpenAI sig do params( - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ), - role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, - attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, + OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, + OpenAI::Beta::Threads::TextContentBlockParam::OrHash + ) + ] + ), + role: OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateParams::Message::Attachment::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The text contents of the message. @@ -164,53 +191,70 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ), - role: OpenAI::Models::Beta::ThreadCreateParams::Message::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]), - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) + role: OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateParams::Message::Attachment + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end # The text contents of the message. module Content extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) ] ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end MessageContentPartParamArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Beta::Threads::MessageContentPartParam + ], OpenAI::Internal::Type::Converter ) end @@ -224,17 +268,38 @@ module OpenAI module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Beta::ThreadCreateParams::Message::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Beta::ThreadCreateParams::Message::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateParams::Message::Role::TaggedSymbol + ] + ) + end + def self.values + end end class Attachment < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -248,8 +313,8 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] ) @@ -259,57 +324,69 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch::OrHash + ) + ] + ).void end attr_writer :tools sig do params( file_id: String, - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - ] - ) - .returns(T.attached_class) + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The ID of the file to attach to the message. file_id: nil, # The tools to add this file to. tools: nil - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - tools: T::Array[ + override.returns( + { + file_id: String, + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end module Tool extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) + end + class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -318,42 +395,62 @@ module OpenAI def self.new( # The type of tool being defined: `file_search` type: :file_search - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] - ) + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter)) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter + ) + ) + end attr_reader :code_interpreter sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter: + OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch + ) + ) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -363,25 +460,32 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), - file_search: T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + file_search: + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -397,12 +501,18 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -419,35 +529,33 @@ module OpenAI # store attached to the thread. sig do returns( - T.nilable(T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]) + T.nilable( + T::Array[ + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore + ] + ) ) end attr_reader :vector_stores sig do params( - vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) - ] - ) - .void + vector_stores: + T::Array[ + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::OrHash + ] + ).void end attr_writer :vector_stores sig do params( vector_store_ids: T::Array[String], - vector_stores: T::Array[ - T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, - OpenAI::Internal::AnyHash - ) - ] - ) - .returns(T.attached_class) + vector_stores: + T::Array[ + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::OrHash + ] + ).returns(T.attached_class) end def self.new( # The @@ -460,27 +568,35 @@ module OpenAI # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. vector_stores: nil - ); end + ) + end + sig do - override - .returns( - { - vector_store_ids: T::Array[String], - vector_stores: T::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] - } - ) + override.returns( + { + vector_store_ids: T::Array[String], + vector_stores: + T::Array[ + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore + ] + } + ) + end + def to_hash end - def to_hash; end class VectorStore < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. sig do returns( T.nilable( T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ) ) ) @@ -489,13 +605,12 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto::OrHash, + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::OrHash + ) + ).void end attr_writer :chunking_strategy @@ -519,15 +634,14 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ), + chunking_strategy: + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto::OrHash, + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::OrHash + ), file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -544,28 +658,44 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil - ); end + ) + end + sig do - override - .returns( - { - chunking_strategy: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + override.returns( + { + chunking_strategy: + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static ), - file_ids: T::Array[String], - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) + file_ids: T::Array[String], + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. module ChunkingStrategy extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + end + class Auto < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Always `auto`. sig { returns(Symbol) } attr_accessor :type @@ -576,27 +706,32 @@ module OpenAI def self.new( # Always `auto`. type: :auto - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end class Static < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig do returns( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static ) end attr_reader :static sig do params( - static: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::AnyHash - ) - ) - .void + static: + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static::OrHash + ).void end attr_writer :static @@ -606,31 +741,36 @@ module OpenAI sig do params( - static: T.any( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - OpenAI::Internal::AnyHash - ), + static: + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( static:, # Always `static`. type: :static - ); end + ) + end + sig do - override - .returns( - { - static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, - type: Symbol - } - ) + override.returns( + { + static: + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Static < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. @@ -656,19 +796,31 @@ module OpenAI # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. max_chunk_size_tokens: - ); end - sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } + ) + end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - ) + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/thread_delete_params.rbi b/rbi/openai/models/beta/thread_delete_params.rbi index c9083423..6b2c04ba 100644 --- a/rbi/openai/models/beta/thread_delete_params.rbi +++ b/rbi/openai/models/beta/thread_delete_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/thread_deleted.rbi b/rbi/openai/models/beta/thread_deleted.rbi index 9944d075..c648d9ac 100644 --- a/rbi/openai/models/beta/thread_deleted.rbi +++ b/rbi/openai/models/beta/thread_deleted.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Beta class ThreadDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -13,11 +15,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"thread.deleted"); end + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"thread.deleted") + end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/thread_retrieve_params.rbi b/rbi/openai/models/beta/thread_retrieve_params.rbi index 7658004e..64fd5cd2 100644 --- a/rbi/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/openai/models/beta/thread_retrieve_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/thread_stream_event.rbi b/rbi/openai/models/beta/thread_stream_event.rbi index b8bd735a..cd6d25a0 100644 --- a/rbi/openai/models/beta/thread_stream_event.rbi +++ b/rbi/openai/models/beta/thread_stream_event.rbi @@ -4,12 +4,14 @@ module OpenAI module Models module Beta class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). - sig { returns(OpenAI::Models::Beta::Thread) } + sig { returns(OpenAI::Beta::Thread) } attr_reader :data - sig { params(data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash)).void } + sig { params(data: OpenAI::Beta::Thread::OrHash).void } attr_writer :data sig { returns(Symbol) } @@ -27,11 +29,10 @@ module OpenAI # created. sig do params( - data: T.any(OpenAI::Models::Beta::Thread, OpenAI::Internal::AnyHash), + data: OpenAI::Beta::Thread::OrHash, enabled: T::Boolean, event: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Represents a thread that contains @@ -40,9 +41,16 @@ module OpenAI # Whether to enable input audio transcription. enabled: nil, event: :"thread.created" - ); end - sig { override.returns({data: OpenAI::Models::Beta::Thread, event: Symbol, enabled: T::Boolean}) } - def to_hash; end + ) + end + + sig do + override.returns( + { data: OpenAI::Beta::Thread, event: Symbol, enabled: T::Boolean } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/thread_update_params.rbi b/rbi/openai/models/beta/thread_update_params.rbi index 9af4b827..4244ec93 100644 --- a/rbi/openai/models/beta/thread_update_params.rbi +++ b/rbi/openai/models/beta/thread_update_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -20,24 +22,28 @@ module OpenAI # thread. The resources are specific to the type of tool. For example, the # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources)) } + sig do + returns(T.nilable(OpenAI::Beta::ThreadUpdateParams::ToolResources)) + end attr_reader :tool_resources sig do params( - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)) - ) - .void + tool_resources: + T.nilable(OpenAI::Beta::ThreadUpdateParams::ToolResources::OrHash) + ).void end attr_writer :tool_resources sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + tool_resources: + T.nilable( + OpenAI::Beta::ThreadUpdateParams::ToolResources::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -53,39 +59,57 @@ module OpenAI # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + metadata: T.nilable(T::Hash[Symbol, String]), + tool_resources: + T.nilable(OpenAI::Beta::ThreadUpdateParams::ToolResources), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end class ToolResources < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter)) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter + ) + ) + end attr_reader :code_interpreter sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter: + OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter - sig { returns(T.nilable(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch + ) + ) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch::OrHash + ).void end attr_writer :file_search @@ -95,25 +119,32 @@ module OpenAI # tool requires a list of vector store IDs. sig do params( - code_interpreter: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, OpenAI::Internal::AnyHash), - file_search: T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + code_interpreter: + OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter::OrHash, + file_search: + OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch::OrHash + ).returns(T.attached_class) + end + def self.new(code_interpreter: nil, file_search: nil) end - def self.new(code_interpreter: nil, file_search: nil); end sig do - override - .returns( - { - code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch - } - ) + override.returns( + { + code_interpreter: + OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + file_search: + OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -129,12 +160,18 @@ module OpenAI # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. file_ids: nil - ); end - sig { override.returns({file_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ file_ids: T::Array[String] }) } + def to_hash + end end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -145,16 +182,23 @@ module OpenAI sig { params(vector_store_ids: T::Array[String]).void } attr_writer :vector_store_ids - sig { params(vector_store_ids: T::Array[String]).returns(T.attached_class) } + sig do + params(vector_store_ids: T::Array[String]).returns( + T.attached_class + ) + end def self.new( # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to # the thread. vector_store_ids: nil - ); end - sig { override.returns({vector_store_ids: T::Array[String]}) } - def to_hash; end + ) + end + + sig { override.returns({ vector_store_ids: T::Array[String] }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/annotation.rbi b/rbi/openai/models/beta/threads/annotation.rbi index 6ed86777..ee7c4ae0 100644 --- a/rbi/openai/models/beta/threads/annotation.rbi +++ b/rbi/openai/models/beta/threads/annotation.rbi @@ -10,13 +10,21 @@ module OpenAI module Annotation extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationAnnotation, + OpenAI::Beta::Threads::FilePathAnnotation ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::Annotation::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/annotation_delta.rbi b/rbi/openai/models/beta/threads/annotation_delta.rbi index deb39180..6bf8aeaa 100644 --- a/rbi/openai/models/beta/threads/annotation_delta.rbi +++ b/rbi/openai/models/beta/threads/annotation_delta.rbi @@ -10,13 +10,21 @@ module OpenAI module AnnotationDelta extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Beta::Threads::FilePathDeltaAnnotation ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::AnnotationDelta::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/openai/models/beta/threads/file_citation_annotation.rbi index 8f129c30..2fcaeee0 100644 --- a/rbi/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_citation_annotation.rbi @@ -5,17 +5,22 @@ module OpenAI module Beta module Threads class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(Integer) } attr_accessor :end_index - sig { returns(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation) } + sig do + returns(OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation) + end attr_reader :file_citation sig do params( - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Internal::AnyHash) - ) - .void + file_citation: + OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation::OrHash + ).void end attr_writer :file_citation @@ -36,12 +41,12 @@ module OpenAI sig do params( end_index: Integer, - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, OpenAI::Internal::AnyHash), + file_citation: + OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation::OrHash, start_index: Integer, text: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( end_index:, @@ -51,22 +56,28 @@ module OpenAI text:, # Always `file_citation`. type: :file_citation - ); end + ) + end + sig do - override - .returns( - { - end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, - start_index: Integer, - text: String, - type: Symbol - } - ) + override.returns( + { + end_index: Integer, + file_citation: + OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation, + start_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class FileCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the specific File the citation is from. sig { returns(String) } attr_accessor :file_id @@ -75,9 +86,12 @@ module OpenAI def self.new( # The ID of the specific File the citation is from. file_id: - ); end - sig { override.returns({file_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi index 5d81ac86..33d0dab1 100644 --- a/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the annotation in the text content part. sig { returns(Integer) } attr_accessor :index @@ -19,14 +22,20 @@ module OpenAI sig { params(end_index: Integer).void } attr_writer :end_index - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation + ) + ) + end attr_reader :file_citation sig do params( - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Internal::AnyHash) - ) - .void + file_citation: + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation::OrHash + ).void end attr_writer :file_citation @@ -50,12 +59,12 @@ module OpenAI params( index: Integer, end_index: Integer, - file_citation: T.any(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, OpenAI::Internal::AnyHash), + file_citation: + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation::OrHash, start_index: Integer, text: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the annotation in the text content part. @@ -67,23 +76,29 @@ module OpenAI text: nil, # Always `file_citation`. type: :file_citation - ); end + ) + end + sig do - override - .returns( - { - index: Integer, - type: Symbol, - end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, - start_index: Integer, - text: String - } - ) + override.returns( + { + index: Integer, + type: Symbol, + end_index: Integer, + file_citation: + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + start_index: Integer, + text: String + } + ) + end + def to_hash end - def to_hash; end class FileCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the specific File the citation is from. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -98,15 +113,20 @@ module OpenAI sig { params(quote: String).void } attr_writer :quote - sig { params(file_id: String, quote: String).returns(T.attached_class) } + sig do + params(file_id: String, quote: String).returns(T.attached_class) + end def self.new( # The ID of the specific File the citation is from. file_id: nil, # The specific quote in the file. quote: nil - ); end - sig { override.returns({file_id: String, quote: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String, quote: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/file_path_annotation.rbi b/rbi/openai/models/beta/threads/file_path_annotation.rbi index fc4afa16..1a6776cb 100644 --- a/rbi/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_path_annotation.rbi @@ -5,17 +5,20 @@ module OpenAI module Beta module Threads class FilePathAnnotation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(Integer) } attr_accessor :end_index - sig { returns(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath) } + sig { returns(OpenAI::Beta::Threads::FilePathAnnotation::FilePath) } attr_reader :file_path sig do params( - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::AnyHash) - ) - .void + file_path: + OpenAI::Beta::Threads::FilePathAnnotation::FilePath::OrHash + ).void end attr_writer :file_path @@ -35,12 +38,12 @@ module OpenAI sig do params( end_index: Integer, - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, OpenAI::Internal::AnyHash), + file_path: + OpenAI::Beta::Threads::FilePathAnnotation::FilePath::OrHash, start_index: Integer, text: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( end_index:, @@ -50,22 +53,27 @@ module OpenAI text:, # Always `file_path`. type: :file_path - ); end + ) + end + sig do - override - .returns( - { - end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, - start_index: Integer, - text: String, - type: Symbol - } - ) + override.returns( + { + end_index: Integer, + file_path: OpenAI::Beta::Threads::FilePathAnnotation::FilePath, + start_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class FilePath < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file that was generated. sig { returns(String) } attr_accessor :file_id @@ -74,9 +82,12 @@ module OpenAI def self.new( # The ID of the file that was generated. file_id: - ); end - sig { override.returns({file_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi index e58879be..d8fa4d69 100644 --- a/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the annotation in the text content part. sig { returns(Integer) } attr_accessor :index @@ -19,14 +22,20 @@ module OpenAI sig { params(end_index: Integer).void } attr_writer :end_index - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath + ) + ) + end attr_reader :file_path sig do params( - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::AnyHash) - ) - .void + file_path: + OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath::OrHash + ).void end attr_writer :file_path @@ -49,12 +58,12 @@ module OpenAI params( index: Integer, end_index: Integer, - file_path: T.any(OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, OpenAI::Internal::AnyHash), + file_path: + OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath::OrHash, start_index: Integer, text: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the annotation in the text content part. @@ -66,23 +75,29 @@ module OpenAI text: nil, # Always `file_path`. type: :file_path - ); end + ) + end + sig do - override - .returns( - { - index: Integer, - type: Symbol, - end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, - start_index: Integer, - text: String - } - ) + override.returns( + { + index: Integer, + type: Symbol, + end_index: Integer, + file_path: + OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, + start_index: Integer, + text: String + } + ) + end + def to_hash end - def to_hash; end class FilePath < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file that was generated. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -94,9 +109,12 @@ module OpenAI def self.new( # The ID of the file that was generated. file_id: nil - ); end - sig { override.returns({file_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/image_file.rbi b/rbi/openai/models/beta/threads/image_file.rbi index f7ded701..78802b93 100644 --- a/rbi/openai/models/beta/threads/image_file.rbi +++ b/rbi/openai/models/beta/threads/image_file.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class ImageFile < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. @@ -13,15 +16,25 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::ImageFile::Detail::OrSymbol) + ) + end attr_reader :detail - sig { params(detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol).void } + sig do + params( + detail: OpenAI::Beta::Threads::ImageFile::Detail::OrSymbol + ).void + end attr_writer :detail sig do - params(file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol) - .returns(T.attached_class) + params( + file_id: String, + detail: OpenAI::Beta::Threads::ImageFile::Detail::OrSymbol + ).returns(T.attached_class) end def self.new( # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image @@ -31,24 +44,54 @@ module OpenAI # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. detail: nil - ); end - sig { override.returns({file_id: String, detail: OpenAI::Models::Beta::Threads::ImageFile::Detail::OrSymbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + file_id: String, + detail: OpenAI::Beta::Threads::ImageFile::Detail::OrSymbol + } + ) + end + def to_hash + end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. module Detail extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::ImageFile::Detail) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::ImageFile::Detail::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Beta::Threads::ImageFile::Detail::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Beta::Threads::ImageFile::Detail::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::ImageFile::Detail::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/image_file_content_block.rbi b/rbi/openai/models/beta/threads/image_file_content_block.rbi index 1b87fd3f..8ff6beaa 100644 --- a/rbi/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/openai/models/beta/threads/image_file_content_block.rbi @@ -5,10 +5,15 @@ module OpenAI module Beta module Threads class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Beta::Threads::ImageFile) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::Beta::Threads::ImageFile) } attr_reader :image_file - sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::AnyHash)).void } + sig do + params(image_file: OpenAI::Beta::Threads::ImageFile::OrHash).void + end attr_writer :image_file # Always `image_file`. @@ -19,18 +24,24 @@ module OpenAI # in the content of a message. sig do params( - image_file: T.any(OpenAI::Models::Beta::Threads::ImageFile, OpenAI::Internal::AnyHash), + image_file: OpenAI::Beta::Threads::ImageFile::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( image_file:, # Always `image_file`. type: :image_file - ); end - sig { override.returns({image_file: OpenAI::Models::Beta::Threads::ImageFile, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { image_file: OpenAI::Beta::Threads::ImageFile, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/image_file_delta.rbi b/rbi/openai/models/beta/threads/image_file_delta.rbi index 628e1cef..33be5605 100644 --- a/rbi/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/openai/models/beta/threads/image_file_delta.rbi @@ -5,12 +5,25 @@ module OpenAI module Beta module Threads class ImageFileDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol + ) + ) + end attr_reader :detail - sig { params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol).void } + sig do + params( + detail: OpenAI::Beta::Threads::ImageFileDelta::Detail::OrSymbol + ).void + end attr_writer :detail # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image @@ -23,8 +36,10 @@ module OpenAI attr_writer :file_id sig do - params(detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::OrSymbol, file_id: String) - .returns(T.attached_class) + params( + detail: OpenAI::Beta::Threads::ImageFileDelta::Detail::OrSymbol, + file_id: String + ).returns(T.attached_class) end def self.new( # Specifies the detail level of the image if specified by the user. `low` uses @@ -34,27 +49,57 @@ module OpenAI # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. file_id: nil - ); end + ) + end + sig do - override - .returns({detail: OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, file_id: String}) + override.returns( + { + detail: + OpenAI::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol, + file_id: String + } + ) + end + def to_hash end - def to_hash; end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. module Detail extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/openai/models/beta/threads/image_file_delta_block.rbi index c0ae3f1e..ea1914cc 100644 --- a/rbi/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/openai/models/beta/threads/image_file_delta_block.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part in the message. sig { returns(Integer) } attr_accessor :index @@ -13,10 +16,14 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageFileDelta)) } + sig { returns(T.nilable(OpenAI::Beta::Threads::ImageFileDelta)) } attr_reader :image_file - sig { params(image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::AnyHash)).void } + sig do + params( + image_file: OpenAI::Beta::Threads::ImageFileDelta::OrHash + ).void + end attr_writer :image_file # References an image [File](https://platform.openai.com/docs/api-reference/files) @@ -24,10 +31,9 @@ module OpenAI sig do params( index: Integer, - image_file: T.any(OpenAI::Models::Beta::Threads::ImageFileDelta, OpenAI::Internal::AnyHash), + image_file: OpenAI::Beta::Threads::ImageFileDelta::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the content part in the message. @@ -35,11 +41,20 @@ module OpenAI image_file: nil, # Always `image_file`. type: :image_file - ); end + ) + end + sig do - override.returns({index: Integer, type: Symbol, image_file: OpenAI::Models::Beta::Threads::ImageFileDelta}) + override.returns( + { + index: Integer, + type: Symbol, + image_file: OpenAI::Beta::Threads::ImageFileDelta + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/image_url.rbi b/rbi/openai/models/beta/threads/image_url.rbi index 6c364602..04b0f75d 100644 --- a/rbi/openai/models/beta/threads/image_url.rbi +++ b/rbi/openai/models/beta/threads/image_url.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class ImageURL < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The external URL of the image, must be a supported image types: jpeg, jpg, png, # gif, webp. sig { returns(String) } @@ -12,15 +15,25 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::ImageURL::Detail::OrSymbol) + ) + end attr_reader :detail - sig { params(detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol).void } + sig do + params( + detail: OpenAI::Beta::Threads::ImageURL::Detail::OrSymbol + ).void + end attr_writer :detail sig do - params(url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol) - .returns(T.attached_class) + params( + url: String, + detail: OpenAI::Beta::Threads::ImageURL::Detail::OrSymbol + ).returns(T.attached_class) end def self.new( # The external URL of the image, must be a supported image types: jpeg, jpg, png, @@ -29,24 +42,51 @@ module OpenAI # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` detail: nil - ); end - sig { override.returns({url: String, detail: OpenAI::Models::Beta::Threads::ImageURL::Detail::OrSymbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + url: String, + detail: OpenAI::Beta::Threads::ImageURL::Detail::OrSymbol + } + ) + end + def to_hash + end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` module Detail extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::ImageURL::Detail) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::ImageURL::Detail::TaggedSymbol + ) + LOW = + T.let(:low, OpenAI::Beta::Threads::ImageURL::Detail::TaggedSymbol) + HIGH = + T.let( + :high, + OpenAI::Beta::Threads::ImageURL::Detail::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::ImageURL::Detail::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/image_url_content_block.rbi b/rbi/openai/models/beta/threads/image_url_content_block.rbi index 53b64254..b2e47c68 100644 --- a/rbi/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/openai/models/beta/threads/image_url_content_block.rbi @@ -5,10 +5,15 @@ module OpenAI module Beta module Threads class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Beta::Threads::ImageURL) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::Beta::Threads::ImageURL) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash)).void } + sig do + params(image_url: OpenAI::Beta::Threads::ImageURL::OrHash).void + end attr_writer :image_url # The type of the content part. @@ -17,16 +22,25 @@ module OpenAI # References an image URL in the content of a message. sig do - params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + image_url: OpenAI::Beta::Threads::ImageURL::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( image_url:, # The type of the content part. type: :image_url - ); end - sig { override.returns({image_url: OpenAI::Models::Beta::Threads::ImageURL, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { image_url: OpenAI::Beta::Threads::ImageURL, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/image_url_delta.rbi b/rbi/openai/models/beta/threads/image_url_delta.rbi index 0de64fe7..a742d53f 100644 --- a/rbi/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/openai/models/beta/threads/image_url_delta.rbi @@ -5,12 +5,25 @@ module OpenAI module Beta module Threads class ImageURLDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol + ) + ) + end attr_reader :detail - sig { params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol).void } + sig do + params( + detail: OpenAI::Beta::Threads::ImageURLDelta::Detail::OrSymbol + ).void + end attr_writer :detail # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, @@ -22,8 +35,10 @@ module OpenAI attr_writer :url sig do - params(detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::OrSymbol, url: String) - .returns(T.attached_class) + params( + detail: OpenAI::Beta::Threads::ImageURLDelta::Detail::OrSymbol, + url: String + ).returns(T.attached_class) end def self.new( # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in @@ -32,26 +47,57 @@ module OpenAI # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, # webp. url: nil - ); end + ) + end + sig do - override.returns({detail: OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, url: String}) + override.returns( + { + detail: + OpenAI::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol, + url: String + } + ) + end + def to_hash end - def to_hash; end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. module Detail extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/openai/models/beta/threads/image_url_delta_block.rbi index e7eb2ab2..e2cc3d25 100644 --- a/rbi/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/openai/models/beta/threads/image_url_delta_block.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part in the message. sig { returns(Integer) } attr_accessor :index @@ -13,20 +16,21 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::ImageURLDelta)) } + sig { returns(T.nilable(OpenAI::Beta::Threads::ImageURLDelta)) } attr_reader :image_url - sig { params(image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::AnyHash)).void } + sig do + params(image_url: OpenAI::Beta::Threads::ImageURLDelta::OrHash).void + end attr_writer :image_url # References an image URL in the content of a message. sig do params( index: Integer, - image_url: T.any(OpenAI::Models::Beta::Threads::ImageURLDelta, OpenAI::Internal::AnyHash), + image_url: OpenAI::Beta::Threads::ImageURLDelta::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the content part in the message. @@ -34,9 +38,20 @@ module OpenAI image_url: nil, # Always `image_url`. type: :image_url - ); end - sig { override.returns({index: Integer, type: Symbol, image_url: OpenAI::Models::Beta::Threads::ImageURLDelta}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + index: Integer, + type: Symbol, + image_url: OpenAI::Beta::Threads::ImageURLDelta + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/message.rbi b/rbi/openai/models/beta/threads/message.rbi index 3c4ecef7..02bd3c97 100644 --- a/rbi/openai/models/beta/threads/message.rbi +++ b/rbi/openai/models/beta/threads/message.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -16,7 +19,11 @@ module OpenAI attr_accessor :assistant_id # A list of files attached to the message, and the tools they were added to. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Beta::Threads::Message::Attachment]) + ) + end attr_accessor :attachments # The Unix timestamp (in seconds) for when the message was completed. @@ -28,10 +35,10 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlock, + OpenAI::Beta::Threads::RefusalContentBlock ) ] ) @@ -47,14 +54,20 @@ module OpenAI attr_accessor :incomplete_at # On an incomplete message, details about why the message is incomplete. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails)) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::Message::IncompleteDetails) + ) + end attr_reader :incomplete_details sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::AnyHash)) - ) - .void + incomplete_details: + T.nilable( + OpenAI::Beta::Threads::Message::IncompleteDetails::OrHash + ) + ).void end attr_writer :incomplete_details @@ -72,7 +85,7 @@ module OpenAI attr_accessor :object # The entity that produced the message. One of `user` or `assistant`. - sig { returns(OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } + sig { returns(OpenAI::Beta::Threads::Message::Role::TaggedSymbol) } attr_accessor :role # The ID of the [run](https://platform.openai.com/docs/api-reference/runs) @@ -83,7 +96,7 @@ module OpenAI # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. - sig { returns(OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } + sig { returns(OpenAI::Beta::Threads::Message::Status::TaggedSymbol) } attr_accessor :status # The [thread](https://platform.openai.com/docs/api-reference/threads) ID that @@ -97,28 +110,33 @@ module OpenAI params( id: String, assistant_id: T.nilable(String), - attachments: T.nilable(T::Array[T.any(OpenAI::Models::Beta::Threads::Message::Attachment, OpenAI::Internal::AnyHash)]), + attachments: + T.nilable( + T::Array[OpenAI::Beta::Threads::Message::Attachment::OrHash] + ), completed_at: T.nilable(Integer), - content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock - ) - ], + content: + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, + OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, + OpenAI::Beta::Threads::TextContentBlock::OrHash, + OpenAI::Beta::Threads::RefusalContentBlock::OrHash + ) + ], created_at: Integer, incomplete_at: T.nilable(Integer), - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Message::IncompleteDetails, OpenAI::Internal::AnyHash)), + incomplete_details: + T.nilable( + OpenAI::Beta::Threads::Message::IncompleteDetails::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), - role: OpenAI::Models::Beta::Threads::Message::Role::OrSymbol, + role: OpenAI::Beta::Threads::Message::Role::OrSymbol, run_id: T.nilable(String), - status: OpenAI::Models::Beta::Threads::Message::Status::OrSymbol, + status: OpenAI::Beta::Threads::Message::Status::OrSymbol, thread_id: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -160,38 +178,48 @@ module OpenAI thread_id:, # The object type, which is always `thread.message`. object: :"thread.message" - ); end + ) + end + sig do - override - .returns( - { - id: String, - assistant_id: T.nilable(String), - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::Message::Attachment]), - completed_at: T.nilable(Integer), - content: T::Array[ + override.returns( + { + id: String, + assistant_id: T.nilable(String), + attachments: + T.nilable( + T::Array[OpenAI::Beta::Threads::Message::Attachment] + ), + completed_at: T.nilable(Integer), + content: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlock, - OpenAI::Models::Beta::Threads::RefusalContentBlock + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlock, + OpenAI::Beta::Threads::RefusalContentBlock ) ], - created_at: Integer, - incomplete_at: T.nilable(Integer), - incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Message::IncompleteDetails), - metadata: T.nilable(T::Hash[Symbol, String]), - object: Symbol, - role: OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol, - run_id: T.nilable(String), - status: OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol, - thread_id: String - } - ) + created_at: Integer, + incomplete_at: T.nilable(Integer), + incomplete_details: + T.nilable(OpenAI::Beta::Threads::Message::IncompleteDetails), + metadata: T.nilable(T::Hash[Symbol, String]), + object: Symbol, + role: OpenAI::Beta::Threads::Message::Role::TaggedSymbol, + run_id: T.nilable(String), + status: OpenAI::Beta::Threads::Message::Status::TaggedSymbol, + thread_id: String + } + ) + end + def to_hash end - def to_hash; end class Attachment < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -205,8 +233,8 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] ) @@ -216,57 +244,69 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly::OrHash + ) + ] + ).void end attr_writer :tools sig do params( file_id: String, - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - ] - ) - .returns(T.attached_class) + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The ID of the file to attach to the message. file_id: nil, # The tools to add this file to. tools: nil - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - tools: T::Array[ + override.returns( + { + file_id: String, + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end module Tool extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) + end + class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -275,63 +315,110 @@ module OpenAI def self.new( # The type of tool being defined: `file_search` type: :file_search - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] - ) + override.returns( + T::Array[ + OpenAI::Beta::Threads::Message::Attachment::Tool::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class IncompleteDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The reason the message is incomplete. - sig { returns(OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + sig do + returns( + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ) + end attr_accessor :reason # On an incomplete message, details about why the message is incomplete. sig do - params(reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::OrSymbol) - .returns(T.attached_class) + params( + reason: + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::OrSymbol + ).returns(T.attached_class) end def self.new( # The reason the message is incomplete. reason: - ); end + ) + end + sig do - override.returns({reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol}) + override.returns( + { + reason: + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The reason the message is incomplete. module Reason extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :content_filter, + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ) MAX_TOKENS = - T.let(:max_tokens, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :max_tokens, + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ) RUN_CANCELLED = - T.let(:run_cancelled, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :run_cancelled, + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ) RUN_EXPIRED = - T.let(:run_expired, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :run_expired, + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ) RUN_FAILED = - T.let(:run_failed, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :run_failed, + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ) sig do - override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end @@ -339,14 +426,27 @@ module OpenAI module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::Message::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) + USER = + T.let(:user, OpenAI::Beta::Threads::Message::Role::TaggedSymbol) + ASSISTANT = + T.let( + :assistant, + OpenAI::Beta::Threads::Message::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::Message::Role::TaggedSymbol] + ) + end + def self.values + end end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -354,15 +454,35 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::Message::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Beta::Threads::Message::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Beta::Threads::Message::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Beta::Threads::Message::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::Message::Status::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/message_content.rbi b/rbi/openai/models/beta/threads/message_content.rbi index 99a0c488..5b159a12 100644 --- a/rbi/openai/models/beta/threads/message_content.rbi +++ b/rbi/openai/models/beta/threads/message_content.rbi @@ -9,13 +9,23 @@ module OpenAI module MessageContent extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlock, + OpenAI::Beta::Threads::RefusalContentBlock ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::MessageContent::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/message_content_delta.rbi b/rbi/openai/models/beta/threads/message_content_delta.rbi index a6ad0c14..4ed6646b 100644 --- a/rbi/openai/models/beta/threads/message_content_delta.rbi +++ b/rbi/openai/models/beta/threads/message_content_delta.rbi @@ -9,13 +9,23 @@ module OpenAI module MessageContentDelta extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Beta::Threads::TextDeltaBlock, + OpenAI::Beta::Threads::RefusalDeltaBlock, + OpenAI::Beta::Threads::ImageURLDeltaBlock ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::MessageContentDelta::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/message_content_part_param.rbi b/rbi/openai/models/beta/threads/message_content_part_param.rbi index fbe37283..38daf411 100644 --- a/rbi/openai/models/beta/threads/message_content_part_param.rbi +++ b/rbi/openai/models/beta/threads/message_content_part_param.rbi @@ -9,13 +9,22 @@ module OpenAI module MessageContentPartParam extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::MessageContentPartParam::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/message_create_params.rbi b/rbi/openai/models/beta/threads/message_create_params.rbi index 326dc671..60fb47c0 100644 --- a/rbi/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/openai/models/beta/threads/message_create_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text contents of the message. sig do returns( @@ -15,9 +18,9 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ) @@ -31,11 +34,19 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol) } + sig do + returns(OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol) + end attr_accessor :role # A list of files attached to the message, and the tools they should be added to. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment] + ) + ) + end attr_accessor :attachments # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -49,25 +60,27 @@ module OpenAI sig do params( - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ), - role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, + OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, + OpenAI::Beta::Threads::TextContentBlockParam::OrHash + ) + ] + ), + role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::MessageCreateParams::Attachment::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The text contents of the message. @@ -89,54 +102,72 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ), - role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]), - metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOptions - } - ) + role: + OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::MessageCreateParams::Attachment + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The text contents of the message. module Content extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) ] ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end MessageContentPartParamArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Beta::Threads::MessageContentPartParam + ], OpenAI::Internal::Type::Converter ) end @@ -150,17 +181,38 @@ module OpenAI module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Beta::Threads::MessageCreateParams::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Beta::Threads::MessageCreateParams::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::MessageCreateParams::Role::TaggedSymbol + ] + ) + end + def self.values + end end class Attachment < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -174,8 +226,8 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] ) @@ -185,57 +237,69 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch::OrHash + ) + ] + ).void end attr_writer :tools sig do params( file_id: String, - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - ] - ) - .returns(T.attached_class) + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The ID of the file to attach to the message. file_id: nil, # The tools to add this file to. tools: nil - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - tools: T::Array[ + override.returns( + { + file_id: String, + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end module Tool extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) + end + class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -244,18 +308,23 @@ module OpenAI def self.new( # The type of tool being defined: `file_search` type: :file_search - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] - ) + override.returns( + T::Array[ + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/message_delete_params.rbi b/rbi/openai/models/beta/threads/message_delete_params.rbi index 8f25de1b..0811af43 100644 --- a/rbi/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/openai/models/beta/threads/message_delete_params.rbi @@ -8,23 +8,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id sig do params( thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(thread_id:, request_options: {}) end - def self.new(thread_id:, request_options: {}); end - sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { thread_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/message_deleted.rbi b/rbi/openai/models/beta/threads/message_deleted.rbi index b5da3306..73805106 100644 --- a/rbi/openai/models/beta/threads/message_deleted.rbi +++ b/rbi/openai/models/beta/threads/message_deleted.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class MessageDeleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -14,11 +17,21 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"thread.message.deleted"); end + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"thread.message.deleted") + end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + sig do + override.returns( + { id: String, deleted: T::Boolean, object: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/message_delta.rbi b/rbi/openai/models/beta/threads/message_delta.rbi index 55f3b251..7278f6d3 100644 --- a/rbi/openai/models/beta/threads/message_delta.rbi +++ b/rbi/openai/models/beta/threads/message_delta.rbi @@ -5,16 +5,19 @@ module OpenAI module Beta module Threads class MessageDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the message in array of text and/or images. sig do returns( T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + OpenAI::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Beta::Threads::TextDeltaBlock, + OpenAI::Beta::Threads::RefusalDeltaBlock, + OpenAI::Beta::Threads::ImageURLDeltaBlock ) ] ) @@ -24,79 +27,106 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) - ] - ) - .void + content: + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileDeltaBlock::OrHash, + OpenAI::Beta::Threads::TextDeltaBlock::OrHash, + OpenAI::Beta::Threads::RefusalDeltaBlock::OrHash, + OpenAI::Beta::Threads::ImageURLDeltaBlock::OrHash + ) + ] + ).void end attr_writer :content # The entity that produced the message. One of `user` or `assistant`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol)) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol) + ) + end attr_reader :role - sig { params(role: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol).void } + sig do + params( + role: OpenAI::Beta::Threads::MessageDelta::Role::OrSymbol + ).void + end attr_writer :role # The delta containing the fields that have changed on the Message. sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock - ) - ], - role: OpenAI::Models::Beta::Threads::MessageDelta::Role::OrSymbol - ) - .returns(T.attached_class) + content: + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileDeltaBlock::OrHash, + OpenAI::Beta::Threads::TextDeltaBlock::OrHash, + OpenAI::Beta::Threads::RefusalDeltaBlock::OrHash, + OpenAI::Beta::Threads::ImageURLDeltaBlock::OrHash + ) + ], + role: OpenAI::Beta::Threads::MessageDelta::Role::OrSymbol + ).returns(T.attached_class) end def self.new( # The content of the message in array of text and/or images. content: nil, # The entity that produced the message. One of `user` or `assistant`. role: nil - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[ + override.returns( + { + content: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Models::Beta::Threads::TextDeltaBlock, - OpenAI::Models::Beta::Threads::RefusalDeltaBlock, - OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + OpenAI::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Beta::Threads::TextDeltaBlock, + OpenAI::Beta::Threads::RefusalDeltaBlock, + OpenAI::Beta::Threads::ImageURLDeltaBlock ) ], - role: OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol - } - ) + role: OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The entity that produced the message. One of `user` or `assistant`. module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::MessageDelta::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/message_delta_event.rbi b/rbi/openai/models/beta/threads/message_delta_event.rbi index 4e6d3d17..4a93d6d2 100644 --- a/rbi/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/openai/models/beta/threads/message_delta_event.rbi @@ -5,15 +5,20 @@ module OpenAI module Beta module Threads class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the message, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id # The delta containing the fields that have changed on the Message. - sig { returns(OpenAI::Models::Beta::Threads::MessageDelta) } + sig { returns(OpenAI::Beta::Threads::MessageDelta) } attr_reader :delta - sig { params(delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::AnyHash)).void } + sig do + params(delta: OpenAI::Beta::Threads::MessageDelta::OrHash).void + end attr_writer :delta # The object type, which is always `thread.message.delta`. @@ -25,10 +30,9 @@ module OpenAI sig do params( id: String, - delta: T.any(OpenAI::Models::Beta::Threads::MessageDelta, OpenAI::Internal::AnyHash), + delta: OpenAI::Beta::Threads::MessageDelta::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier of the message, which can be referenced in API endpoints. @@ -37,9 +41,20 @@ module OpenAI delta:, # The object type, which is always `thread.message.delta`. object: :"thread.message.delta" - ); end - sig { override.returns({id: String, delta: OpenAI::Models::Beta::Threads::MessageDelta, object: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + delta: OpenAI::Beta::Threads::MessageDelta, + object: Symbol + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/message_list_params.rbi b/rbi/openai/models/beta/threads/message_list_params.rbi index 50cf3829..158f377d 100644 --- a/rbi/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/openai/models/beta/threads/message_list_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -38,10 +41,20 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::MessageListParams::Order::OrSymbol + ) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol).void } + sig do + params( + order: OpenAI::Beta::Threads::MessageListParams::Order::OrSymbol + ).void + end attr_writer :order # Filter messages by the run ID that generated them. @@ -56,11 +69,10 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, + order: OpenAI::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -82,35 +94,56 @@ module OpenAI # Filter messages by the run ID that generated them. run_id: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, - run_id: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + before: String, + limit: Integer, + order: + OpenAI::Beta::Threads::MessageListParams::Order::OrSymbol, + run_id: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::MessageListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::Beta::Threads::MessageListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Beta::Threads::MessageListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::MessageListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/openai/models/beta/threads/message_retrieve_params.rbi index 1818fc76..4a7da6f0 100644 --- a/rbi/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/openai/models/beta/threads/message_retrieve_params.rbi @@ -8,23 +8,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id sig do params( thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(thread_id:, request_options: {}) end - def self.new(thread_id:, request_options: {}); end - sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { thread_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/message_update_params.rbi b/rbi/openai/models/beta/threads/message_update_params.rbi index 33d47d32..ecbd63ee 100644 --- a/rbi/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/openai/models/beta/threads/message_update_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id @@ -24,9 +27,8 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( thread_id:, @@ -38,18 +40,20 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - thread_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + thread_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/refusal_content_block.rbi b/rbi/openai/models/beta/threads/refusal_content_block.rbi index 70e4dbb9..96ddb8b1 100644 --- a/rbi/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/openai/models/beta/threads/refusal_content_block.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class RefusalContentBlock < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :refusal @@ -13,14 +16,19 @@ module OpenAI attr_accessor :type # The refusal content generated by the assistant. - sig { params(refusal: String, type: Symbol).returns(T.attached_class) } + sig do + params(refusal: String, type: Symbol).returns(T.attached_class) + end def self.new( refusal:, # Always `refusal`. type: :refusal - ); end - sig { override.returns({refusal: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ refusal: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/openai/models/beta/threads/refusal_delta_block.rbi index 8b3878d9..c6854430 100644 --- a/rbi/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/openai/models/beta/threads/refusal_delta_block.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the refusal part in the message. sig { returns(Integer) } attr_accessor :index @@ -20,16 +23,25 @@ module OpenAI attr_writer :refusal # The refusal content that is part of a message. - sig { params(index: Integer, refusal: String, type: Symbol).returns(T.attached_class) } + sig do + params(index: Integer, refusal: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The index of the refusal part in the message. index:, refusal: nil, # Always `refusal`. type: :refusal - ); end - sig { override.returns({index: Integer, type: Symbol, refusal: String}) } - def to_hash; end + ) + end + + sig do + override.returns({ index: Integer, type: Symbol, refusal: String }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi index b1788c25..c2625cd6 100644 --- a/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the tool call. This ID must be referenced when you submit the tool # outputs in using the # [Submit tool outputs to run](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) @@ -13,14 +16,18 @@ module OpenAI attr_accessor :id # The function definition. - sig { returns(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function) } + sig do + returns( + OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function + ) + end attr_reader :function sig do params( - function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Internal::AnyHash) - ) - .void + function: + OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function::OrHash + ).void end attr_writer :function @@ -33,10 +40,10 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, OpenAI::Internal::AnyHash), + function: + OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the tool call. This ID must be referenced when you submit the tool @@ -49,16 +56,26 @@ module OpenAI # The type of tool call the output is required for. For now, this is always # `function`. type: :function - ); end + ) + end + sig do - override - .returns( - {id: String, function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, type: Symbol} - ) + override.returns( + { + id: String, + function: + OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments that the model expects you to pass to the function. sig { returns(String) } attr_accessor :arguments @@ -68,15 +85,20 @@ module OpenAI attr_accessor :name # The function definition. - sig { params(arguments: String, name: String).returns(T.attached_class) } + sig do + params(arguments: String, name: String).returns(T.attached_class) + end def self.new( # The arguments that the model expects you to pass to the function. arguments:, # The name of the function. name: - ); end - sig { override.returns({arguments: String, name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index 102cf828..c769a3f6 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class Run < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -37,14 +40,16 @@ module OpenAI # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails)) } + sig do + returns(T.nilable(OpenAI::Beta::Threads::Run::IncompleteDetails)) + end attr_reader :incomplete_details sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::AnyHash)) - ) - .void + incomplete_details: + T.nilable(OpenAI::Beta::Threads::Run::IncompleteDetails::OrHash) + ).void end attr_writer :incomplete_details @@ -55,14 +60,14 @@ module OpenAI attr_accessor :instructions # The last error associated with this run. Will be `null` if there are no errors. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::LastError)) } + sig { returns(T.nilable(OpenAI::Beta::Threads::Run::LastError)) } attr_reader :last_error sig do params( - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::AnyHash)) - ) - .void + last_error: + T.nilable(OpenAI::Beta::Threads::Run::LastError::OrHash) + ).void end attr_writer :last_error @@ -103,14 +108,14 @@ module OpenAI # Details on the action required to continue the run. Will be `null` if no action # is required. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction)) } + sig { returns(T.nilable(OpenAI::Beta::Threads::Run::RequiredAction)) } attr_reader :required_action sig do params( - required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::AnyHash)) - ) - .void + required_action: + T.nilable(OpenAI::Beta::Threads::Run::RequiredAction::OrHash) + ).void end attr_writer :required_action @@ -139,9 +144,9 @@ module OpenAI T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ) ) @@ -155,7 +160,7 @@ module OpenAI # The status of the run, which can be either `queued`, `in_progress`, # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. - sig { returns(OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } + sig { returns(OpenAI::Beta::Threads::RunStatus::TaggedSymbol) } attr_accessor :status # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) @@ -174,8 +179,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Beta::AssistantToolChoice ) ) ) @@ -189,9 +194,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ) @@ -200,23 +205,31 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy)) } + sig do + returns(T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy)) + end attr_reader :truncation_strategy sig do params( - truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::AnyHash)) - ) - .void + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash + ) + ).void end attr_writer :truncation_strategy # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::Usage)) } + sig { returns(T.nilable(OpenAI::Beta::Threads::Run::Usage)) } attr_reader :usage - sig { params(usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::AnyHash))).void } + sig do + params( + usage: T.nilable(OpenAI::Beta::Threads::Run::Usage::OrHash) + ).void + end attr_writer :usage # The sampling temperature used for this run. If not set, defaults to 1. @@ -238,49 +251,56 @@ module OpenAI created_at: Integer, expires_at: T.nilable(Integer), failed_at: T.nilable(Integer), - incomplete_details: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::IncompleteDetails, OpenAI::Internal::AnyHash)), + incomplete_details: + T.nilable( + OpenAI::Beta::Threads::Run::IncompleteDetails::OrHash + ), instructions: String, - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::LastError, OpenAI::Internal::AnyHash)), + last_error: + T.nilable(OpenAI::Beta::Threads::Run::LastError::OrHash), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: String, parallel_tool_calls: T::Boolean, - required_action: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction, OpenAI::Internal::AnyHash)), - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + required_action: + T.nilable(OpenAI::Beta::Threads::Run::RequiredAction::OrHash), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), started_at: T.nilable(Integer), - status: OpenAI::Models::Beta::Threads::RunStatus::OrSymbol, + status: OpenAI::Beta::Threads::RunStatus::OrSymbol, thread_id: String, - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ], - truncation_strategy: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::TruncationStrategy, OpenAI::Internal::AnyHash)), - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Run::Usage, OpenAI::Internal::AnyHash)), + tool_choice: + T.nilable( + T.any( + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash + ) + ), + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ], + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash + ), + usage: T.nilable(OpenAI::Beta::Threads::Run::Usage::OrHash), temperature: T.nilable(Float), top_p: T.nilable(Float), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -386,83 +406,117 @@ module OpenAI top_p: nil, # The object type, which is always `thread.run`. object: :"thread.run" - ); end + ) + end + sig do - override - .returns( - { - id: String, - assistant_id: String, - cancelled_at: T.nilable(Integer), - completed_at: T.nilable(Integer), - created_at: Integer, - expires_at: T.nilable(Integer), - failed_at: T.nilable(Integer), - incomplete_details: T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails), - instructions: String, - last_error: T.nilable(OpenAI::Models::Beta::Threads::Run::LastError), - max_completion_tokens: T.nilable(Integer), - max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - object: Symbol, - parallel_tool_calls: T::Boolean, - required_action: T.nilable(OpenAI::Models::Beta::Threads::Run::RequiredAction), - response_format: T.nilable( + override.returns( + { + id: String, + assistant_id: String, + cancelled_at: T.nilable(Integer), + completed_at: T.nilable(Integer), + created_at: Integer, + expires_at: T.nilable(Integer), + failed_at: T.nilable(Integer), + incomplete_details: + T.nilable(OpenAI::Beta::Threads::Run::IncompleteDetails), + instructions: String, + last_error: T.nilable(OpenAI::Beta::Threads::Run::LastError), + max_completion_tokens: T.nilable(Integer), + max_prompt_tokens: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + object: Symbol, + parallel_tool_calls: T::Boolean, + required_action: + T.nilable(OpenAI::Beta::Threads::Run::RequiredAction), + response_format: + T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ), - started_at: T.nilable(Integer), - status: OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol, - thread_id: String, - tool_choice: T.nilable( + started_at: T.nilable(Integer), + status: OpenAI::Beta::Threads::RunStatus::TaggedSymbol, + thread_id: String, + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Beta::AssistantToolChoice ) ), - tools: T::Array[ + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ], - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::Run::TruncationStrategy), - usage: T.nilable(OpenAI::Models::Beta::Threads::Run::Usage), - temperature: T.nilable(Float), - top_p: T.nilable(Float) - } - ) + truncation_strategy: + T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy), + usage: T.nilable(OpenAI::Beta::Threads::Run::Usage), + temperature: T.nilable(Float), + top_p: T.nilable(Float) + } + ) + end + def to_hash end - def to_hash; end class IncompleteDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol + ) + ) + end attr_reader :reason - sig { params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol).void } + sig do + params( + reason: + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol + ).void + end attr_writer :reason # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. sig do - params(reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol) - .returns(T.attached_class) + params( + reason: + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::OrSymbol + ).returns(T.attached_class) end def self.new( # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. reason: nil - ); end - sig { override.returns({reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + reason: + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol + } + ) + end + def to_hash + end # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -470,22 +524,45 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MAX_COMPLETION_TOKENS = - T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :max_completion_tokens, + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol + ) MAX_PROMPT_TOKENS = - T.let(:max_prompt_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :max_prompt_tokens, + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol + ] + ) + end + def self.values + end end end class LastError < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. - sig { returns(OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } + sig do + returns(OpenAI::Beta::Threads::Run::LastError::Code::TaggedSymbol) + end attr_accessor :code # A human-readable description of the error. @@ -494,49 +571,86 @@ module OpenAI # The last error associated with this run. Will be `null` if there are no errors. sig do - params(code: OpenAI::Models::Beta::Threads::Run::LastError::Code::OrSymbol, message: String) - .returns(T.attached_class) + params( + code: OpenAI::Beta::Threads::Run::LastError::Code::OrSymbol, + message: String + ).returns(T.attached_class) end def self.new( # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. code:, # A human-readable description of the error. message: - ); end + ) + end + sig do - override - .returns({code: OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol, message: String}) + override.returns( + { + code: + OpenAI::Beta::Threads::Run::LastError::Code::TaggedSymbol, + message: String + } + ) + end + def to_hash end - def to_hash; end # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. module Code extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::Run::LastError::Code) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + SERVER_ERROR = + T.let( + :server_error, + OpenAI::Beta::Threads::Run::LastError::Code::TaggedSymbol + ) RATE_LIMIT_EXCEEDED = - T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + T.let( + :rate_limit_exceeded, + OpenAI::Beta::Threads::Run::LastError::Code::TaggedSymbol + ) INVALID_PROMPT = - T.let(:invalid_prompt, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) + T.let( + :invalid_prompt, + OpenAI::Beta::Threads::Run::LastError::Code::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Run::LastError::Code::TaggedSymbol + ] + ) + end + def self.values + end end end class RequiredAction < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Details on the tool outputs needed for this run to continue. - sig { returns(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs) } + sig do + returns( + OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs + ) + end attr_reader :submit_tool_outputs sig do params( - submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Internal::AnyHash) - ) - .void + submit_tool_outputs: + OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs::OrHash + ).void end attr_writer :submit_tool_outputs @@ -548,52 +662,88 @@ module OpenAI # is required. sig do params( - submit_tool_outputs: T.any(OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, OpenAI::Internal::AnyHash), + submit_tool_outputs: + OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Details on the tool outputs needed for this run to continue. submit_tool_outputs:, # For now, this is always `submit_tool_outputs`. type: :submit_tool_outputs - ); end + ) + end + sig do - override - .returns( - {submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, type: Symbol} - ) + override.returns( + { + submit_tool_outputs: + OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of the relevant tool calls. - sig { returns(T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]) } + sig do + returns( + T::Array[ + OpenAI::Beta::Threads::RequiredActionFunctionToolCall + ] + ) + end attr_accessor :tool_calls # Details on the tool outputs needed for this run to continue. sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + tool_calls: + T::Array[ + OpenAI::Beta::Threads::RequiredActionFunctionToolCall::OrHash + ] + ).returns(T.attached_class) end def self.new( # A list of the relevant tool calls. tool_calls: - ); end - sig { override.returns({tool_calls: T::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall]}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + tool_calls: + T::Array[ + OpenAI::Beta::Threads::RequiredActionFunctionToolCall + ] + } + ) + end + def to_hash + end end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } + sig do + returns( + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + end attr_accessor :type # The number of most recent messages from the thread when constructing the context @@ -605,10 +755,10 @@ module OpenAI # control the intial context window of the run. sig do params( - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, + type: + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, last_messages: T.nilable(Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The truncation strategy to use for the thread. The default is `auto`. If set to @@ -619,17 +769,20 @@ module OpenAI # The number of most recent messages from the thread when constructing the context # for the run. last_messages: nil - ); end + ) + end + sig do - override - .returns( - { - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, - last_messages: T.nilable(Integer) - } - ) + override.returns( + { + type: + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash end - def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -639,19 +792,41 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) LAST_MESSAGES = - T.let(:last_messages, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) + T.let( + :last_messages, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end end end class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of completion tokens used over the course of the run. sig { returns(Integer) } attr_accessor :completion_tokens @@ -680,11 +855,20 @@ module OpenAI prompt_tokens:, # Total number of tokens used (prompt + completion). total_tokens: - ); end + ) + end + sig do - override.returns({completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer}) + override.returns( + { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/run_cancel_params.rbi b/rbi/openai/models/beta/threads/run_cancel_params.rbi index 829a2ec4..1101750b 100644 --- a/rbi/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/openai/models/beta/threads/run_cancel_params.rbi @@ -8,23 +8,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id sig do params( thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(thread_id:, request_options: {}) end - def self.new(thread_id:, request_options: {}); end - sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { thread_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index 9bee75b3..85823188 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to # execute this run. @@ -21,10 +24,21 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol] + ) + ) + end attr_reader :include - sig { params(include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]).void } + sig do + params( + include: + T::Array[OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol] + ).void + end attr_writer :include # Appends additional instructions at the end of the instructions for the run. This @@ -34,7 +48,15 @@ module OpenAI attr_accessor :additional_instructions # Adds additional messages to the thread before creating the run. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage + ] + ) + ) + end attr_accessor :additional_messages # Overrides the @@ -72,7 +94,7 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol))) } attr_accessor :model # Whether to enable @@ -90,7 +112,7 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Specifies the format that the model must output. Compatible with @@ -118,9 +140,9 @@ module OpenAI T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ) ) @@ -144,8 +166,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice ) ) ) @@ -159,9 +181,9 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ) @@ -179,68 +201,78 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy + ) + ) + end attr_reader :truncation_strategy sig do params( - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ) - ) - .void + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ) + ).void end attr_writer :truncation_strategy sig do params( assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: + T::Array[OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)] - ), + additional_messages: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::OrHash + ] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tools: T.nilable( - T::Array[ + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash ) - ] - ), + ), + tools: + T.nilable( + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ), top_p: T.nilable(Float), - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The ID of the @@ -346,55 +378,74 @@ module OpenAI # control the intial context window of the run. truncation_strategy: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - additional_instructions: T.nilable(String), - additional_messages: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]), - instructions: T.nilable(String), - max_completion_tokens: T.nilable(Integer), - max_prompt_tokens: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), - parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( + override.returns( + { + assistant_id: String, + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], + additional_instructions: T.nilable(String), + additional_messages: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage + ] + ), + instructions: T.nilable(String), + max_completion_tokens: T.nilable(Integer), + max_prompt_tokens: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + parallel_tool_calls: T::Boolean, + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( T.any( Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema ) ), - temperature: T.nilable(Float), - tool_choice: T.nilable( + temperature: T.nilable(Float), + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice ) ), - tools: T.nilable( + tools: + T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::FileSearchTool, + OpenAI::Beta::FunctionTool ) ] ), - top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy), - request_options: OpenAI::RequestOptions - } - ) + top_p: T.nilable(Float), + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy + ), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end class AdditionalMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text contents of the message. sig do returns( @@ -402,9 +453,9 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ) @@ -418,13 +469,21 @@ module OpenAI # most cases to represent user-generated messages. # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. - sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol) } + sig do + returns( + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol + ) + end attr_accessor :role # A list of files attached to the message, and the tools they should be added to. sig do returns( - T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]) + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment + ] + ) ) end attr_accessor :attachments @@ -440,29 +499,27 @@ module OpenAI sig do params( - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ), - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, - attachments: T.nilable( - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, - OpenAI::Internal::AnyHash - ) - ] - ), + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, + OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, + OpenAI::Beta::Threads::TextContentBlockParam::OrHash + ) + ] + ), + role: + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The text contents of the message. @@ -483,53 +540,71 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, T::Array[ T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam ) ] ), - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, - attachments: T.nilable(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]), - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) + role: + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end # The text contents of the message. module Content extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) ] ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end MessageContentPartParamArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentPartParam], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Beta::Threads::MessageContentPartParam + ], OpenAI::Internal::Type::Converter ) end @@ -544,22 +619,40 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = - T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) + T.let( + :user, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol + ) ASSISTANT = - T.let(:assistant, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) + T.let( + :assistant, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end class Attachment < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } attr_reader :file_id @@ -573,8 +666,8 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] ) @@ -584,57 +677,71 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch::OrHash + ) + ] + ).void end attr_writer :tools sig do params( file_id: String, - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - ] - ) - .returns(T.attached_class) + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The ID of the file to attach to the message. file_id: nil, # The tools to add this file to. tools: nil - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - tools: T::Array[ + override.returns( + { + file_id: String, + tools: + T::Array[ T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end module Tool extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) + end + class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The type of tool being defined: `file_search` sig { returns(Symbol) } attr_accessor :type @@ -643,18 +750,23 @@ module OpenAI def self.new( # The type of tool being defined: `file_search` type: :file_search - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] - ) + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end @@ -666,16 +778,33 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::Model::Variants + ] + ) + end + def self.variants + end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol) } + sig do + returns( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol + ) + end attr_accessor :type # The number of most recent messages from the thread when constructing the context @@ -687,10 +816,10 @@ module OpenAI # control the intial context window of the run. sig do params( - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + type: + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, last_messages: T.nilable(Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The truncation strategy to use for the thread. The default is `auto`. If set to @@ -701,17 +830,20 @@ module OpenAI # The number of most recent messages from the thread when constructing the context # for the run. last_messages: nil - ); end + ) + end + sig do - override - .returns( - { - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) + override.returns( + { + type: + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash end - def to_hash; end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -721,22 +853,34 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = - T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) + T.let( + :auto, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) LAST_MESSAGES = T.let( :last_messages, - OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol ) sig do - override - .returns(T::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/beta/threads/run_list_params.rbi b/rbi/openai/models/beta/threads/run_list_params.rbi index 24f6d425..a8904daf 100644 --- a/rbi/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/openai/models/beta/threads/run_list_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -38,10 +41,18 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::RunListParams::Order::OrSymbol) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol).void } + sig do + params( + order: OpenAI::Beta::Threads::RunListParams::Order::OrSymbol + ).void + end attr_writer :order sig do @@ -49,10 +60,9 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::Beta::Threads::RunListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -72,34 +82,54 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + before: String, + limit: Integer, + order: OpenAI::Beta::Threads::RunListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::RunListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::Beta::Threads::RunListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Beta::Threads::RunListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/openai/models/beta/threads/run_retrieve_params.rbi index aa6cb4b9..de4d56da 100644 --- a/rbi/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/openai/models/beta/threads/run_retrieve_params.rbi @@ -8,23 +8,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id sig do params( thread_id: String, - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(thread_id:, request_options: {}) end - def self.new(thread_id:, request_options: {}); end - sig { override.returns({thread_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { thread_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/run_status.rbi b/rbi/openai/models/beta/threads/run_status.rbi index 7eb4d991..ed3ecdb2 100644 --- a/rbi/openai/models/beta/threads/run_status.rbi +++ b/rbi/openai/models/beta/threads/run_status.rbi @@ -10,21 +10,39 @@ module OpenAI module RunStatus extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Beta::Threads::RunStatus) } OrSymbol = T.type_alias { T.any(Symbol, String) } - QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - REQUIRES_ACTION = T.let(:requires_action, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - CANCELLING = T.let(:cancelling, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) + QUEUED = + T.let(:queued, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + IN_PROGRESS = + T.let(:in_progress, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + REQUIRES_ACTION = + T.let( + :requires_action, + OpenAI::Beta::Threads::RunStatus::TaggedSymbol + ) + CANCELLING = + T.let(:cancelling, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + CANCELLED = + T.let(:cancelled, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + FAILED = + T.let(:failed, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) + EXPIRED = + T.let(:expired, OpenAI::Beta::Threads::RunStatus::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::RunStatus::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index af244e16..9be1a4b3 100644 --- a/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -8,40 +8,59 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id # A list of tools for which the outputs are being submitted. - sig { returns(T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput]) } + sig do + returns( + T::Array[ + OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput + ] + ) + end attr_accessor :tool_outputs sig do params( thread_id: String, - tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + tool_outputs: + T::Array[ + OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput::OrHash + ], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( thread_id:, # A list of tools for which the outputs are being submitted. tool_outputs:, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - thread_id: String, - tool_outputs: T::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + thread_id: String, + tool_outputs: + T::Array[ + OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput + ], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end class ToolOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The output of the tool call to be submitted to continue the run. sig { returns(T.nilable(String)) } attr_reader :output @@ -57,16 +76,23 @@ module OpenAI sig { params(tool_call_id: String).void } attr_writer :tool_call_id - sig { params(output: String, tool_call_id: String).returns(T.attached_class) } + sig do + params(output: String, tool_call_id: String).returns( + T.attached_class + ) + end def self.new( # The output of the tool call to be submitted to continue the run. output: nil, # The ID of the tool call in the `required_action` object within the run object # the output is being submitted for. tool_call_id: nil - ); end - sig { override.returns({output: String, tool_call_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ output: String, tool_call_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/run_update_params.rbi b/rbi/openai/models/beta/threads/run_update_params.rbi index 175d4516..c99a7cf2 100644 --- a/rbi/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/openai/models/beta/threads/run_update_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id @@ -24,9 +27,8 @@ module OpenAI params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( thread_id:, @@ -38,18 +40,20 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - thread_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + thread_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi index f0b834d2..a48e0434 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the output in the outputs array. sig { returns(Integer) } attr_accessor :index @@ -22,7 +25,11 @@ module OpenAI attr_writer :logs # Text output from the Code Interpreter tool call as part of a run step. - sig { params(index: Integer, logs: String, type: Symbol).returns(T.attached_class) } + sig do + params(index: Integer, logs: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The index of the output in the outputs array. index:, @@ -30,9 +37,14 @@ module OpenAI logs: nil, # Always `logs`. type: :logs - ); end - sig { override.returns({index: Integer, type: Symbol, logs: String}) } - def to_hash; end + ) + end + + sig do + override.returns({ index: Integer, type: Symbol, logs: String }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 9cadda46..67c7e444 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the output in the outputs array. sig { returns(Integer) } attr_accessor :index @@ -14,24 +17,30 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + ) + ) + end attr_reader :image sig do params( - image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Internal::AnyHash) - ) - .void + image: + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image::OrHash + ).void end attr_writer :image sig do params( index: Integer, - image: T.any(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, OpenAI::Internal::AnyHash), + image: + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the output in the outputs array. @@ -39,16 +48,26 @@ module OpenAI image: nil, # Always `image`. type: :image - ); end + ) + end + sig do - override - .returns( - {index: Integer, type: Symbol, image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} - ) + override.returns( + { + index: Integer, + type: Symbol, + image: + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + } + ) + end + def to_hash end - def to_hash; end class Image < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. sig { returns(T.nilable(String)) } @@ -62,9 +81,12 @@ module OpenAI # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. file_id: nil - ); end - sig { override.returns({file_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 7ea07c11..a1829caf 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -6,22 +6,26 @@ module OpenAI module Threads module Runs class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the tool call. sig { returns(String) } attr_accessor :id # The Code Interpreter tool call definition. - sig { returns(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter) } + sig do + returns( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter + ) + end attr_reader :code_interpreter sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Internal::AnyHash - ) - ) - .void + code_interpreter: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter @@ -34,13 +38,10 @@ module OpenAI sig do params( id: String, - code_interpreter: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - OpenAI::Internal::AnyHash - ), + code_interpreter: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the tool call. @@ -50,20 +51,26 @@ module OpenAI # The type of tool call. This is always going to be `code_interpreter` for this # type of tool call. type: :code_interpreter - ); end + ) + end + sig do - override - .returns( - { - id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, - type: Symbol - } - ) + override.returns( + { + id: String, + code_interpreter: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input to the Code Interpreter tool call. sig { returns(String) } attr_accessor :input @@ -75,8 +82,8 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image ) ] ) @@ -87,15 +94,14 @@ module OpenAI sig do params( input: String, - outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - ] - ) - .returns(T.attached_class) + outputs: + T::Array[ + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs::OrHash, + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The input to the Code Interpreter tool call. @@ -104,28 +110,44 @@ module OpenAI # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. outputs: - ); end + ) + end + sig do - override - .returns( - { - input: String, - outputs: T::Array[ + override.returns( + { + input: String, + outputs: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end # Text output from the Code Interpreter tool call as part of a run step. module Output extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) + end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the Code Interpreter tool call. sig { returns(String) } attr_accessor :logs @@ -135,33 +157,40 @@ module OpenAI attr_accessor :type # Text output from the Code Interpreter tool call as part of a run step. - sig { params(logs: String, type: Symbol).returns(T.attached_class) } + sig do + params(logs: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the Code Interpreter tool call. logs:, # Always `logs`. type: :logs - ); end - sig { override.returns({logs: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ logs: String, type: Symbol }) } + def to_hash + end end class Image < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig do returns( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image ) end attr_reader :image sig do params( - image: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Internal::AnyHash - ) - ) - .void + image: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image::OrHash + ).void end attr_writer :image @@ -171,31 +200,36 @@ module OpenAI sig do params( - image: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - OpenAI::Internal::AnyHash - ), + image: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( image:, # Always `image`. type: :image - ); end + ) + end + sig do - override - .returns( - { - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, - type: Symbol - } - ) + override.returns( + { + image: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Image < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. sig { returns(String) } @@ -206,19 +240,24 @@ module OpenAI # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. file_id: - ); end - sig { override.returns({file_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String }) } + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] - ) + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index b5314dd2..b1bb2cf1 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the tool call in the tool calls array. sig { returns(Integer) } attr_accessor :index @@ -23,17 +26,20 @@ module OpenAI attr_writer :id # The Code Interpreter tool call definition. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + ) + ) + end attr_reader :code_interpreter sig do params( - code_interpreter: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Internal::AnyHash - ) - ) - .void + code_interpreter: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::OrHash + ).void end attr_writer :code_interpreter @@ -42,13 +48,10 @@ module OpenAI params( index: Integer, id: String, - code_interpreter: T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, - OpenAI::Internal::AnyHash - ), + code_interpreter: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the tool call in the tool calls array. @@ -60,21 +63,27 @@ module OpenAI # The type of tool call. This is always going to be `code_interpreter` for this # type of tool call. type: :code_interpreter - ); end + ) + end + sig do - override - .returns( - { - index: Integer, - type: Symbol, - id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter - } - ) + override.returns( + { + index: Integer, + type: Symbol, + id: String, + code_interpreter: + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + } + ) + end + def to_hash end - def to_hash; end class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input to the Code Interpreter tool call. sig { returns(T.nilable(String)) } attr_reader :input @@ -90,8 +99,8 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] ) @@ -101,15 +110,14 @@ module OpenAI sig do params( - outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - ] - ) - .void + outputs: + T::Array[ + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs::OrHash, + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::OrHash + ) + ] + ).void end attr_writer :outputs @@ -117,15 +125,14 @@ module OpenAI sig do params( input: String, - outputs: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - ] - ) - .returns(T.attached_class) + outputs: + T::Array[ + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs::OrHash, + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::OrHash + ) + ] + ).returns(T.attached_class) end def self.new( # The input to the Code Interpreter tool call. @@ -134,34 +141,47 @@ module OpenAI # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. outputs: nil - ); end + ) + end + sig do - override - .returns( - { - input: String, - outputs: T::Array[ + override.returns( + { + input: String, + outputs: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end # Text output from the Code Interpreter tool call as part of a run step. module Output extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi index 5fab3990..6ca0cd80 100644 --- a/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -6,19 +6,26 @@ module OpenAI module Threads module Runs class FileSearchToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the tool call object. sig { returns(String) } attr_accessor :id # For now, this is always going to be an empty object. - sig { returns(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch) } + sig do + returns( + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch + ) + end attr_reader :file_search sig do params( - file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Internal::AnyHash) - ) - .void + file_search: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::OrHash + ).void end attr_writer :file_search @@ -30,10 +37,10 @@ module OpenAI sig do params( id: String, - file_search: T.any(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, OpenAI::Internal::AnyHash), + file_search: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the tool call object. @@ -43,87 +50,109 @@ module OpenAI # The type of tool call. This is always going to be `file_search` for this type of # tool call. type: :file_search - ); end + ) + end + sig do - override - .returns( - {id: String, file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, type: Symbol} - ) + override.returns( + { + id: String, + file_search: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class FileSearch < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ranking options for the file search. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions + ) + ) + end attr_reader :ranking_options sig do params( - ranking_options: T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Internal::AnyHash - ) - ) - .void + ranking_options: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::OrHash + ).void end attr_writer :ranking_options # The results of the file search. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result + ] + ) + ) + end attr_reader :results sig do params( - results: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, - OpenAI::Internal::AnyHash - ) - ] - ) - .void + results: + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::OrHash + ] + ).void end attr_writer :results # For now, this is always going to be an empty object. sig do params( - ranking_options: T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - OpenAI::Internal::AnyHash - ), - results: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, - OpenAI::Internal::AnyHash - ) - ] - ) - .returns(T.attached_class) + ranking_options: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::OrHash, + results: + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::OrHash + ] + ).returns(T.attached_class) end def self.new( # The ranking options for the file search. ranking_options: nil, # The results of the file search. results: nil - ); end + ) + end + sig do - override - .returns( - { - ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - results: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] - } - ) + override.returns( + { + ranking_options: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + results: + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result + ] + } + ) + end + def to_hash end - def to_hash; end class RankingOptions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ranker to use for the file search. If not specified will use the `auto` # ranker. sig do returns( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) end attr_accessor :ranker @@ -136,10 +165,10 @@ module OpenAI # The ranking options for the file search. sig do params( - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::OrSymbol, + ranker: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::OrSymbol, score_threshold: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ranker to use for the file search. If not specified will use the `auto` @@ -148,17 +177,20 @@ module OpenAI # The score threshold for the file search. All values must be a floating point # number between 0 and 1. score_threshold: - ); end + ) + end + sig do - override - .returns( - { - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, - score_threshold: Float - } - ) + override.returns( + { + ranker: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol, + score_threshold: Float + } + ) + end + def to_hash end - def to_hash; end # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -166,31 +198,41 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let( :auto, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) DEFAULT_2024_08_21 = T.let( :default_2024_08_21, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end class Result < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file that result was found in. sig { returns(String) } attr_accessor :file_id @@ -208,21 +250,22 @@ module OpenAI # requested via the include query parameter. sig do returns( - T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]) + T.nilable( + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content + ] + ) ) end attr_reader :content sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Internal::AnyHash - ) - ] - ) - .void + content: + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::OrHash + ] + ).void end attr_writer :content @@ -232,14 +275,11 @@ module OpenAI file_id: String, file_name: String, score: Float, - content: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, - OpenAI::Internal::AnyHash - ) - ] - ) - .returns(T.attached_class) + content: + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::OrHash + ] + ).returns(T.attached_class) end def self.new( # The ID of the file that result was found in. @@ -252,21 +292,31 @@ module OpenAI # The content of the result that was found. The content is only included if # requested via the include query parameter. content: nil - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - file_name: String, - score: Float, - content: T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] - } - ) + override.returns( + { + file_id: String, + file_name: String, + score: Float, + content: + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content + ] + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text content of the file. sig { returns(T.nilable(String)) } attr_reader :text @@ -278,7 +328,7 @@ module OpenAI sig do returns( T.nilable( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol ) ) end @@ -286,57 +336,67 @@ module OpenAI sig do params( - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol - ) - .void + type: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol + ).void end attr_writer :type sig do params( text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol - ) - .returns(T.attached_class) + type: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # The text content of the file. text: nil, # The type of the content. type: nil - ); end + ) + end + sig do - override - .returns( - { - text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - } - ) + override.returns( + { + text: String, + type: + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The type of the content. module Type extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let( :text, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 5d6789be..62cd8cb2 100644 --- a/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # For now, this is always going to be an empty object. sig { returns(T.anything) } attr_accessor :file_search @@ -44,9 +47,21 @@ module OpenAI # The type of tool call. This is always going to be `file_search` for this type of # tool call. type: :file_search - ); end - sig { override.returns({file_search: T.anything, index: Integer, type: Symbol, id: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + file_search: T.anything, + index: Integer, + type: Symbol, + id: String + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/openai/models/beta/threads/runs/function_tool_call.rbi index b3e20062..90fa9db4 100644 --- a/rbi/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/function_tool_call.rbi @@ -6,19 +6,24 @@ module OpenAI module Threads module Runs class FunctionToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the tool call object. sig { returns(String) } attr_accessor :id # The definition of the function that was called. - sig { returns(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function) } + sig do + returns(OpenAI::Beta::Threads::Runs::FunctionToolCall::Function) + end attr_reader :function sig do params( - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::AnyHash) - ) - .void + function: + OpenAI::Beta::Threads::Runs::FunctionToolCall::Function::OrHash + ).void end attr_writer :function @@ -30,10 +35,10 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, OpenAI::Internal::AnyHash), + function: + OpenAI::Beta::Threads::Runs::FunctionToolCall::Function::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the tool call object. @@ -43,16 +48,26 @@ module OpenAI # The type of tool call. This is always going to be `function` for this type of # tool call. type: :function - ); end + ) + end + sig do - override - .returns( - {id: String, function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, type: Symbol} - ) + override.returns( + { + id: String, + function: + OpenAI::Beta::Threads::Runs::FunctionToolCall::Function, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments passed to the function. sig { returns(String) } attr_accessor :arguments @@ -69,7 +84,11 @@ module OpenAI # The definition of the function that was called. sig do - params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) + params( + arguments: String, + name: String, + output: T.nilable(String) + ).returns(T.attached_class) end def self.new( # The arguments passed to the function. @@ -80,9 +99,16 @@ module OpenAI # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) # yet. output: - ); end - sig { override.returns({arguments: String, name: String, output: T.nilable(String)}) } - def to_hash; end + ) + end + + sig do + override.returns( + { arguments: String, name: String, output: T.nilable(String) } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi index 3a495934..8b3a5afa 100644 --- a/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the tool call in the tool calls array. sig { returns(Integer) } attr_accessor :index @@ -23,14 +26,20 @@ module OpenAI attr_writer :id # The definition of the function that was called. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function + ) + ) + end attr_reader :function sig do params( - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Internal::AnyHash) - ) - .void + function: + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function::OrHash + ).void end attr_writer :function @@ -38,10 +47,10 @@ module OpenAI params( index: Integer, id: String, - function: T.any(OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, OpenAI::Internal::AnyHash), + function: + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the tool call in the tool calls array. @@ -53,21 +62,27 @@ module OpenAI # The type of tool call. This is always going to be `function` for this type of # tool call. type: :function - ); end + ) + end + sig do - override - .returns( - { - index: Integer, - type: Symbol, - id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function - } - ) + override.returns( + { + index: Integer, + type: Symbol, + id: String, + function: + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function + } + ) + end + def to_hash end - def to_hash; end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments passed to the function. sig { returns(T.nilable(String)) } attr_reader :arguments @@ -90,7 +105,11 @@ module OpenAI # The definition of the function that was called. sig do - params(arguments: String, name: String, output: T.nilable(String)).returns(T.attached_class) + params( + arguments: String, + name: String, + output: T.nilable(String) + ).returns(T.attached_class) end def self.new( # The arguments passed to the function. @@ -101,9 +120,16 @@ module OpenAI # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) # yet. output: nil - ); end - sig { override.returns({arguments: String, name: String, output: T.nilable(String)}) } - def to_hash; end + ) + end + + sig do + override.returns( + { arguments: String, name: String, output: T.nilable(String) } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi index d5358c72..946fb84a 100644 --- a/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -6,17 +6,21 @@ module OpenAI module Threads module Runs class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation + ) + end attr_reader :message_creation sig do params( - message_creation: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Internal::AnyHash - ) - ) - .void + message_creation: + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation::OrHash + ).void end attr_writer :message_creation @@ -27,31 +31,34 @@ module OpenAI # Details of the message creation by the run step. sig do params( - message_creation: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - OpenAI::Internal::AnyHash - ), + message_creation: + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( message_creation:, # Always `message_creation`. type: :message_creation - ); end + ) + end + sig do - override - .returns( - { - message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, - type: Symbol - } - ) + override.returns( + { + message_creation: + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class MessageCreation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the message that was created by this run step. sig { returns(String) } attr_accessor :message_id @@ -60,9 +67,12 @@ module OpenAI def self.new( # The ID of the message that was created by this run step. message_id: - ); end - sig { override.returns({message_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ message_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step.rbi b/rbi/openai/models/beta/threads/runs/run_step.rbi index 86e8b940..9555206e 100644 --- a/rbi/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step.rbi @@ -8,6 +8,9 @@ module OpenAI module Runs class RunStep < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -41,14 +44,20 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError)) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::Runs::RunStep::LastError) + ) + end attr_reader :last_error sig do params( - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::AnyHash)) - ) - .void + last_error: + T.nilable( + OpenAI::Beta::Threads::Runs::RunStep::LastError::OrHash + ) + ).void end attr_writer :last_error @@ -72,15 +81,19 @@ module OpenAI # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } + sig do + returns( + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ) + end attr_accessor :status # The details of the run step. sig do returns( T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Beta::Threads::Runs::ToolCallsStepDetails ) ) end @@ -92,19 +105,23 @@ module OpenAI attr_accessor :thread_id # The type of run step, which can be either `message_creation` or `tool_calls`. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } + sig do + returns(OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + end attr_accessor :type # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage)) } + sig do + returns(T.nilable(OpenAI::Beta::Threads::Runs::RunStep::Usage)) + end attr_reader :usage sig do params( - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::AnyHash)) - ) - .void + usage: + T.nilable(OpenAI::Beta::Threads::Runs::RunStep::Usage::OrHash) + ).void end attr_writer :usage @@ -118,21 +135,26 @@ module OpenAI created_at: Integer, expired_at: T.nilable(Integer), failed_at: T.nilable(Integer), - last_error: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, OpenAI::Internal::AnyHash)), + last_error: + T.nilable( + OpenAI::Beta::Threads::Runs::RunStep::LastError::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::OrSymbol, - step_details: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails - ), + status: OpenAI::Beta::Threads::Runs::RunStep::Status::OrSymbol, + step_details: + T.any( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::OrHash, + OpenAI::Beta::Threads::Runs::ToolCallsStepDetails::OrHash + ), thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::OrSymbol, - usage: T.nilable(T.any(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, OpenAI::Internal::AnyHash)), + type: OpenAI::Beta::Threads::Runs::RunStep::Type::OrSymbol, + usage: + T.nilable( + OpenAI::Beta::Threads::Runs::RunStep::Usage::OrHash + ), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier of the run step, which can be referenced in API endpoints. @@ -180,38 +202,51 @@ module OpenAI usage:, # The object type, which is always `thread.run.step`. object: :"thread.run.step" - ); end + ) + end + sig do - override - .returns( - { - id: String, - assistant_id: String, - cancelled_at: T.nilable(Integer), - completed_at: T.nilable(Integer), - created_at: Integer, - expired_at: T.nilable(Integer), - failed_at: T.nilable(Integer), - last_error: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError), - metadata: T.nilable(T::Hash[Symbol, String]), - object: Symbol, - run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, - step_details: T.any( - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails + override.returns( + { + id: String, + assistant_id: String, + cancelled_at: T.nilable(Integer), + completed_at: T.nilable(Integer), + created_at: Integer, + expired_at: T.nilable(Integer), + failed_at: T.nilable(Integer), + last_error: + T.nilable(OpenAI::Beta::Threads::Runs::RunStep::LastError), + metadata: T.nilable(T::Hash[Symbol, String]), + object: Symbol, + run_id: String, + status: + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, + step_details: + T.any( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Beta::Threads::Runs::ToolCallsStepDetails ), - thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, - usage: T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStep::Usage) - } - ) + thread_id: String, + type: + OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, + usage: T.nilable(OpenAI::Beta::Threads::Runs::RunStep::Usage) + } + ) + end + def to_hash end - def to_hash; end class LastError < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # One of `server_error` or `rate_limit_exceeded`. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + sig do + returns( + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol + ) + end attr_accessor :code # A human-readable description of the error. @@ -221,38 +256,65 @@ module OpenAI # The last error associated with this run step. Will be `null` if there are no # errors. sig do - params(code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, message: String) - .returns(T.attached_class) + params( + code: + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code::OrSymbol, + message: String + ).returns(T.attached_class) end def self.new( # One of `server_error` or `rate_limit_exceeded`. code:, # A human-readable description of the error. message: - ); end + ) + end + sig do - override - .returns( - {code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, message: String} - ) + override.returns( + { + code: + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol, + message: String + } + ) + end + def to_hash end - def to_hash; end # One of `server_error` or `rate_limit_exceeded`. module Code extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = - T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + T.let( + :server_error, + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol + ) RATE_LIMIT_EXCEEDED = - T.let(:rate_limit_exceeded, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) + T.let( + :rate_limit_exceeded, + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol + ] + ) + end + def self.values + end end end @@ -261,48 +323,108 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ) + CANCELLED = + T.let( + :cancelled, + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ) + EXPIRED = + T.let( + :expired, + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol + ] + ) + end + def self.values + end end # The details of the run step. module StepDetails extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Beta::Threads::Runs::ToolCallsStepDetails ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStep::StepDetails::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The type of run step, which can be either `message_creation` or `tool_calls`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE_CREATION = - T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) - TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) + T.let( + :message_creation, + OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol + ) + TOOL_CALLS = + T.let( + :tool_calls, + OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol + ] + ) + end + def self.values + end end class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of completion tokens used over the course of the run step. sig { returns(Integer) } attr_accessor :completion_tokens @@ -331,11 +453,20 @@ module OpenAI prompt_tokens:, # Total number of tokens used (prompt + completion). total_tokens: - ); end + ) + end + sig do - override.returns({completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer}) + override.returns( + { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi index e70a40af..2f6d25e5 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi @@ -8,13 +8,16 @@ module OpenAI module Runs class RunStepDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The details of the run step. sig do returns( T.nilable( T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject ) ) ) @@ -23,55 +26,66 @@ module OpenAI sig do params( - step_details: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - ) - ) - .void + step_details: + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::OrHash, + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject::OrHash + ) + ).void end attr_writer :step_details # The delta containing the fields that have changed on the run step. sig do params( - step_details: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject - ) - ) - .returns(T.attached_class) + step_details: + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::OrHash, + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject::OrHash + ) + ).returns(T.attached_class) end def self.new( # The details of the run step. step_details: nil - ); end + ) + end + sig do - override - .returns( - { - step_details: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject + override.returns( + { + step_details: + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject ) - } - ) + } + ) + end + def to_hash end - def to_hash; end # The details of the run step. module StepDetails extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi index 3828467d..75d170fe 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -8,15 +8,22 @@ module OpenAI module Runs class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id # The delta containing the fields that have changed on the run step. - sig { returns(OpenAI::Models::Beta::Threads::Runs::RunStepDelta) } + sig { returns(OpenAI::Beta::Threads::Runs::RunStepDelta) } attr_reader :delta - sig { params(delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::AnyHash)).void } + sig do + params( + delta: OpenAI::Beta::Threads::Runs::RunStepDelta::OrHash + ).void + end attr_writer :delta # The object type, which is always `thread.run.step.delta`. @@ -28,10 +35,9 @@ module OpenAI sig do params( id: String, - delta: T.any(OpenAI::Models::Beta::Threads::Runs::RunStepDelta, OpenAI::Internal::AnyHash), + delta: OpenAI::Beta::Threads::Runs::RunStepDelta::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier of the run step, which can be referenced in API endpoints. @@ -40,9 +46,20 @@ module OpenAI delta:, # The object type, which is always `thread.run.step.delta`. object: :"thread.run.step.delta" - ); end - sig { override.returns({id: String, delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, object: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + delta: OpenAI::Beta::Threads::Runs::RunStepDelta, + object: Symbol + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index a05e1586..caad103a 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -8,52 +8,61 @@ module OpenAI module Runs class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Always `message_creation`. sig { returns(Symbol) } attr_accessor :type - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + ) + ) + end attr_reader :message_creation sig do params( - message_creation: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Internal::AnyHash - ) - ) - .void + message_creation: + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation::OrHash + ).void end attr_writer :message_creation # Details of the message creation by the run step. sig do params( - message_creation: T.any( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, - OpenAI::Internal::AnyHash - ), + message_creation: + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( message_creation: nil, # Always `message_creation`. type: :message_creation - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation - } - ) + override.returns( + { + type: Symbol, + message_creation: + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + } + ) + end + def to_hash end - def to_hash; end class MessageCreation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the message that was created by this run step. sig { returns(T.nilable(String)) } attr_reader :message_id @@ -65,9 +74,12 @@ module OpenAI def self.new( # The ID of the message that was created by this run step. message_id: nil - ); end - sig { override.returns({message_id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ message_id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/openai/models/beta/threads/runs/run_step_include.rbi index 2b97eb05..1b0fdb68 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_include.rbi @@ -10,17 +10,27 @@ module OpenAI module RunStepInclude extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Beta::Threads::Runs::RunStepInclude) + end OrSymbol = T.type_alias { T.any(Symbol, String) } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = T.let( :"step_details.tool_calls[*].file_search.results[*].content", - OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol + OpenAI::Beta::Threads::Runs::RunStepInclude::TaggedSymbol ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/openai/models/beta/threads/runs/step_list_params.rbi index bda4f8f6..013b8ef3 100644 --- a/rbi/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/openai/models/beta/threads/runs/step_list_params.rbi @@ -9,6 +9,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id @@ -39,10 +42,25 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ] + ) + ) + end attr_reader :include - sig { params(include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]).void } + sig do + params( + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ] + ).void + end attr_writer :include # A limit on the number of objects to be returned. Limit can range between 1 and @@ -55,10 +73,21 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::Runs::StepListParams::Order::OrSymbol + ) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol).void } + sig do + params( + order: + OpenAI::Beta::Threads::Runs::StepListParams::Order::OrSymbol + ).void + end attr_writer :order sig do @@ -66,12 +95,15 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: + OpenAI::Beta::Threads::Runs::StepListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( thread_id:, @@ -100,36 +132,63 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - thread_id: String, - after: String, - before: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + thread_id: String, + after: String, + before: String, + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], + limit: Integer, + order: + OpenAI::Beta::Threads::Runs::StepListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Runs::StepListParams::Order + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi index a1c6e7c5..bb2bc31f 100644 --- a/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -9,6 +9,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :thread_id @@ -22,20 +25,37 @@ module OpenAI # See the # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ] + ) + ) + end attr_reader :include - sig { params(include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol]).void } + sig do + params( + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ] + ).void + end attr_writer :include sig do params( thread_id: String, run_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( thread_id:, @@ -49,19 +69,24 @@ module OpenAI # for more information. include: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - thread_id: String, - run_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + thread_id: String, + run_id: String, + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/runs/tool_call.rbi b/rbi/openai/models/beta/threads/runs/tool_call.rbi index bd900d9c..a202bb18 100644 --- a/rbi/openai/models/beta/threads/runs/tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_call.rbi @@ -9,13 +9,22 @@ module OpenAI module ToolCall extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Beta::Threads::Runs::FunctionToolCall ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::Runs::ToolCall::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/runs/tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta.rbi index 336a93e9..f76f8eaa 100644 --- a/rbi/openai/models/beta/threads/runs/tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_call_delta.rbi @@ -9,13 +9,22 @@ module OpenAI module ToolCallDelta extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] + Variants = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta ) + end + + sig do + override.returns( + T::Array[OpenAI::Beta::Threads::Runs::ToolCallDelta::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi index ea795b43..09699ee5 100644 --- a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Always `tool_calls`. sig { returns(Symbol) } attr_accessor :type @@ -18,9 +21,9 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta ) ] ) @@ -30,33 +33,31 @@ module OpenAI sig do params( - tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) - ] - ) - .void + tool_calls: + T::Array[ + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::OrHash, + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta::OrHash, + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::OrHash + ) + ] + ).void end attr_writer :tool_calls # Details of the tool call. sig do params( - tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta - ) - ], + tool_calls: + T::Array[ + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::OrHash, + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta::OrHash, + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # An array of tool calls the run step was involved in. These can be associated @@ -65,23 +66,26 @@ module OpenAI tool_calls: nil, # Always `tool_calls`. type: :tool_calls - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - tool_calls: T::Array[ + override.returns( + { + type: Symbol, + tool_calls: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi index a5240f76..b34c48e7 100644 --- a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -6,6 +6,9 @@ module OpenAI module Threads module Runs class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. @@ -13,9 +16,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Beta::Threads::Runs::FunctionToolCall ) ] ) @@ -29,17 +32,16 @@ module OpenAI # Details of the tool call. sig do params( - tool_calls: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall - ) - ], + tool_calls: + T::Array[ + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::OrHash, + OpenAI::Beta::Threads::Runs::FileSearchToolCall::OrHash, + OpenAI::Beta::Threads::Runs::FunctionToolCall::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # An array of tool calls the run step was involved in. These can be associated @@ -48,23 +50,26 @@ module OpenAI tool_calls:, # Always `tool_calls`. type: :tool_calls - ); end + ) + end + sig do - override - .returns( - { - tool_calls: T::Array[ + override.returns( + { + tool_calls: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Beta::Threads::Runs::FunctionToolCall ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/text.rbi b/rbi/openai/models/beta/threads/text.rbi index 64eb1b67..9827491d 100644 --- a/rbi/openai/models/beta/threads/text.rbi +++ b/rbi/openai/models/beta/threads/text.rbi @@ -5,12 +5,15 @@ module OpenAI module Beta module Threads class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do returns( T::Array[ T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation + OpenAI::Beta::Threads::FileCitationAnnotation, + OpenAI::Beta::Threads::FilePathAnnotation ) ] ) @@ -23,37 +26,39 @@ module OpenAI sig do params( - annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::FilePathAnnotation - ) - ], + annotations: + T::Array[ + T.any( + OpenAI::Beta::Threads::FileCitationAnnotation::OrHash, + OpenAI::Beta::Threads::FilePathAnnotation::OrHash + ) + ], value: String - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( annotations:, # The data that makes up the text. value: - ); end + ) + end + sig do - override - .returns( - { - annotations: T::Array[ + override.returns( + { + annotations: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::FileCitationAnnotation, - OpenAI::Models::Beta::Threads::FilePathAnnotation + OpenAI::Beta::Threads::FileCitationAnnotation, + OpenAI::Beta::Threads::FilePathAnnotation ) ], - value: String - } - ) + value: String + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/text_content_block.rbi b/rbi/openai/models/beta/threads/text_content_block.rbi index c1c5eb8c..7474675b 100644 --- a/rbi/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/openai/models/beta/threads/text_content_block.rbi @@ -5,10 +5,13 @@ module OpenAI module Beta module Threads class TextContentBlock < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Beta::Threads::Text) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::Beta::Threads::Text) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::AnyHash)).void } + sig { params(text: OpenAI::Beta::Threads::Text::OrHash).void } attr_writer :text # Always `text`. @@ -17,16 +20,25 @@ module OpenAI # The text content that is part of a message. sig do - params(text: T.any(OpenAI::Models::Beta::Threads::Text, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + text: OpenAI::Beta::Threads::Text::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( text:, # Always `text`. type: :text - ); end - sig { override.returns({text: OpenAI::Models::Beta::Threads::Text, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { text: OpenAI::Beta::Threads::Text, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/text_content_block_param.rbi b/rbi/openai/models/beta/threads/text_content_block_param.rbi index 20626187..d96847a2 100644 --- a/rbi/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/openai/models/beta/threads/text_content_block_param.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class TextContentBlockParam < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text content to be sent to the model sig { returns(String) } attr_accessor :text @@ -20,9 +23,12 @@ module OpenAI text:, # Always `text`. type: :text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/beta/threads/text_delta.rbi b/rbi/openai/models/beta/threads/text_delta.rbi index 8d384acc..cd39960e 100644 --- a/rbi/openai/models/beta/threads/text_delta.rbi +++ b/rbi/openai/models/beta/threads/text_delta.rbi @@ -5,13 +5,16 @@ module OpenAI module Beta module Threads class TextDelta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do returns( T.nilable( T::Array[ T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + OpenAI::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Beta::Threads::FilePathDeltaAnnotation ) ] ) @@ -21,15 +24,14 @@ module OpenAI sig do params( - annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) - ] - ) - .void + annotations: + T::Array[ + T.any( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::OrHash, + OpenAI::Beta::Threads::FilePathDeltaAnnotation::OrHash + ) + ] + ).void end attr_writer :annotations @@ -42,37 +44,39 @@ module OpenAI sig do params( - annotations: T::Array[ - T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation - ) - ], + annotations: + T::Array[ + T.any( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::OrHash, + OpenAI::Beta::Threads::FilePathDeltaAnnotation::OrHash + ) + ], value: String - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( annotations: nil, # The data that makes up the text. value: nil - ); end + ) + end + sig do - override - .returns( - { - annotations: T::Array[ + override.returns( + { + annotations: + T::Array[ T.any( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + OpenAI::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Beta::Threads::FilePathDeltaAnnotation ) ], - value: String - } - ) + value: String + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/beta/threads/text_delta_block.rbi b/rbi/openai/models/beta/threads/text_delta_block.rbi index e054d1d2..7de27ada 100644 --- a/rbi/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/openai/models/beta/threads/text_delta_block.rbi @@ -5,6 +5,9 @@ module OpenAI module Beta module Threads class TextDeltaBlock < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part in the message. sig { returns(Integer) } attr_accessor :index @@ -13,20 +16,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { returns(T.nilable(OpenAI::Models::Beta::Threads::TextDelta)) } + sig { returns(T.nilable(OpenAI::Beta::Threads::TextDelta)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::AnyHash)).void } + sig { params(text: OpenAI::Beta::Threads::TextDelta::OrHash).void } attr_writer :text # The text content that is part of a message. sig do params( index: Integer, - text: T.any(OpenAI::Models::Beta::Threads::TextDelta, OpenAI::Internal::AnyHash), + text: OpenAI::Beta::Threads::TextDelta::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the content part in the message. @@ -34,9 +36,20 @@ module OpenAI text: nil, # Always `text`. type: :text - ); end - sig { override.returns({index: Integer, type: Symbol, text: OpenAI::Models::Beta::Threads::TextDelta}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + index: Integer, + type: Symbol, + text: OpenAI::Beta::Threads::TextDelta + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index 9986624b..1e90aecc 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -6,13 +6,15 @@ module OpenAI module Chat class ChatCompletion < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A unique identifier for the chat completion. sig { returns(String) } attr_accessor :id # A list of chat completion choices. Can be more than one if `n` is greater # than 1. - sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice]) } + sig { returns(T::Array[OpenAI::Chat::ChatCompletion::Choice]) } attr_accessor :choices # The Unix timestamp (in seconds) of when the chat completion was created. @@ -44,7 +46,11 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol)) } + sig do + returns( + T.nilable(OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + ) + end attr_accessor :service_tier # This fingerprint represents the backend configuration that the model runs with. @@ -58,10 +64,10 @@ module OpenAI attr_writer :system_fingerprint # Usage statistics for the completion request. - sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } + sig { returns(T.nilable(OpenAI::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash)).void } + sig { params(usage: OpenAI::CompletionUsage::OrHash).void } attr_writer :usage # Represents a chat completion response returned by model, based on the provided @@ -69,15 +75,15 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletion::Choice, OpenAI::Internal::AnyHash)], + choices: T::Array[OpenAI::Chat::ChatCompletion::Choice::OrHash], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::OrSymbol), + service_tier: + T.nilable(OpenAI::Chat::ChatCompletion::ServiceTier::OrSymbol), system_fingerprint: String, - usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash), + usage: OpenAI::CompletionUsage::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A unique identifier for the chat completion. @@ -116,32 +122,44 @@ module OpenAI usage: nil, # The object type, which is always `chat.completion`. object: :"chat.completion" - ); end + ) + end + sig do - override - .returns( - { - id: String, - choices: T::Array[OpenAI::Models::Chat::ChatCompletion::Choice], - created: Integer, - model: String, - object: Symbol, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol), - system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage - } - ) + override.returns( + { + id: String, + choices: T::Array[OpenAI::Chat::ChatCompletion::Choice], + created: Integer, + model: String, + object: Symbol, + service_tier: + T.nilable( + OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol + ), + system_fingerprint: String, + usage: OpenAI::CompletionUsage + } + ) + end + def to_hash end - def to_hash; end class Choice < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, `content_filter` if # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + sig do + returns( + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ) + end attr_accessor :finish_reason # The index of the choice in the list of choices. @@ -149,32 +167,41 @@ module OpenAI attr_accessor :index # Log probability information for the choice. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs)) } + sig do + returns(T.nilable(OpenAI::Chat::ChatCompletion::Choice::Logprobs)) + end attr_reader :logprobs sig do params( - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::AnyHash)) - ) - .void + logprobs: + T.nilable( + OpenAI::Chat::ChatCompletion::Choice::Logprobs::OrHash + ) + ).void end attr_writer :logprobs # A chat completion message generated by the model. - sig { returns(OpenAI::Models::Chat::ChatCompletionMessage) } + sig { returns(OpenAI::Chat::ChatCompletionMessage) } attr_reader :message - sig { params(message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::AnyHash)).void } + sig do + params(message: OpenAI::Chat::ChatCompletionMessage::OrHash).void + end attr_writer :message sig do params( - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::OrSymbol, + finish_reason: + OpenAI::Chat::ChatCompletion::Choice::FinishReason::OrSymbol, index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, OpenAI::Internal::AnyHash)), - message: T.any(OpenAI::Models::Chat::ChatCompletionMessage, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + logprobs: + T.nilable( + OpenAI::Chat::ChatCompletion::Choice::Logprobs::OrHash + ), + message: OpenAI::Chat::ChatCompletionMessage::OrHash + ).returns(T.attached_class) end def self.new( # The reason the model stopped generating tokens. This will be `stop` if the model @@ -190,19 +217,23 @@ module OpenAI logprobs:, # A chat completion message generated by the model. message: - ); end + ) + end + sig do - override - .returns( - { - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, - index: Integer, - logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs), - message: OpenAI::Models::Chat::ChatCompletionMessage - } - ) + override.returns( + { + finish_reason: + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol, + index: Integer, + logprobs: + T.nilable(OpenAI::Chat::ChatCompletion::Choice::Logprobs), + message: OpenAI::Chat::ChatCompletionMessage + } + ) + end + def to_hash end - def to_hash; end # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -213,54 +244,109 @@ module OpenAI module FinishReason extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletion::Choice::FinishReason + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) - TOOL_CALLS = T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + STOP = + T.let( + :stop, + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ) + LENGTH = + T.let( + :length, + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ) + TOOL_CALLS = + T.let( + :tool_calls, + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ) CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + T.let( + :content_filter, + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ) FUNCTION_CALL = - T.let(:function_call, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) + T.let( + :function_call, + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol + ] + ) + end + def self.values + end end class Logprobs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of message content tokens with log probability information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionTokenLogprob]) + ) + end attr_accessor :content # A list of message refusal tokens with log probability information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionTokenLogprob]) + ) + end attr_accessor :refusal # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]), - refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]) - ) - .returns(T.attached_class) + content: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob::OrHash] + ), + refusal: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob::OrHash] + ) + ).returns(T.attached_class) end def self.new( # A list of message content tokens with log probability information. content:, # A list of message refusal tokens with log probability information. refusal: - ); end + ) + end + sig do - override - .returns( - { - content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), - refusal: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]) - } - ) + override.returns( + { + content: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob] + ), + refusal: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob] + ) + } + ) + end + def to_hash end - def to_hash; end end end @@ -284,15 +370,35 @@ module OpenAI module ServiceTier extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::ChatCompletion::ServiceTier) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) - FLEX = T.let(:flex, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol + ) + DEFAULT = + T.let( + :default, + OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol + ) + FLEX = + T.let( + :flex, + OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi index 783cdfc2..b56149a1 100644 --- a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -2,26 +2,33 @@ module OpenAI module Models - ChatCompletionAssistantMessageParam = Chat::ChatCompletionAssistantMessageParam + ChatCompletionAssistantMessageParam = + Chat::ChatCompletionAssistantMessageParam module Chat class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The role of the messages author, in this case `assistant`. sig { returns(Symbol) } attr_accessor :role # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio)) } + sig do + returns( + T.nilable(OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio) + ) + end attr_reader :audio sig do params( - audio: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::AnyHash) - ) - ) - .void + audio: + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio::OrHash + ) + ).void end attr_writer :audio @@ -34,8 +41,8 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartRefusal ) ] ) @@ -46,16 +53,22 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall + ) + ) + end attr_reader :function_call sig do params( - function_call: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Internal::AnyHash) - ) - ) - .void + function_call: + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall::OrHash + ) + ).void end attr_writer :function_call @@ -72,44 +85,50 @@ module OpenAI attr_accessor :refusal # The tool calls generated by the model, such as function calls. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionMessageToolCall]) + ) + end attr_reader :tool_calls sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)] - ) - .void + tool_calls: + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash] + ).void end attr_writer :tool_calls # Messages sent by the model in response to user messages. sig do params( - audio: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, OpenAI::Internal::AnyHash) - ), - content: T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ) - ), - function_call: T.nilable( - T.any(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, OpenAI::Internal::AnyHash) - ), + audio: + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio::OrHash + ), + content: + T.nilable( + T.any( + String, + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionContentPartText::OrHash, + OpenAI::Chat::ChatCompletionContentPartRefusal::OrHash + ) + ] + ) + ), + function_call: + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall::OrHash + ), name: String, refusal: T.nilable(String), - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)], + tool_calls: + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash], role: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Data about a previous audio response from the model. @@ -130,34 +149,46 @@ module OpenAI tool_calls: nil, # The role of the messages author, in this case `assistant`. role: :assistant - ); end + ) + end + sig do - override - .returns( - { - role: Symbol, - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio), - content: T.nilable( + override.returns( + { + role: Symbol, + audio: + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio + ), + content: + T.nilable( T.any( String, T::Array[ T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartRefusal ) ] ) ), - function_call: T.nilable(OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall), - name: String, - refusal: T.nilable(String), - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] - } - ) + function_call: + T.nilable( + OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall + ), + name: String, + refusal: T.nilable(String), + tool_calls: T::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + } + ) + end + def to_hash end - def to_hash; end class Audio < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for a previous audio response from the model. sig { returns(String) } attr_accessor :id @@ -168,9 +199,12 @@ module OpenAI def self.new( # Unique identifier for a previous audio response from the model. id: - ); end - sig { override.returns({id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end end # The contents of the assistant message. Required unless `tool_calls` or @@ -178,44 +212,67 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartRefusal + ) + ] + ) + end + # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). module ArrayOfContentPart extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] + Variants = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartRefusal ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart::Variants + ] + ) + end + def self.variants end - def self.variants; end end sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartRefusal - ) - ] - ] - ) + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end ArrayOfContentPartArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart], + OpenAI::Internal::Type::ArrayOf[ + union: + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart + ], OpenAI::Internal::Type::Converter ) end class FunctionCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -229,7 +286,9 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. - sig { params(arguments: String, name: String).returns(T.attached_class) } + sig do + params(arguments: String, name: String).returns(T.attached_class) + end def self.new( # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -238,9 +297,12 @@ module OpenAI arguments:, # The name of the function to call. name: - ); end - sig { override.returns({arguments: String, name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_audio.rbi b/rbi/openai/models/chat/chat_completion_audio.rbi index ce531040..34c22c71 100644 --- a/rbi/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/openai/models/chat/chat_completion_audio.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for this audio response. sig { returns(String) } attr_accessor :id @@ -28,7 +30,12 @@ module OpenAI # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). sig do - params(id: String, data: String, expires_at: Integer, transcript: String).returns(T.attached_class) + params( + id: String, + data: String, + expires_at: Integer, + transcript: String + ).returns(T.attached_class) end def self.new( # Unique identifier for this audio response. @@ -41,9 +48,21 @@ module OpenAI expires_at:, # Transcript of the audio generated by the model. transcript: - ); end - sig { override.returns({id: String, data: String, expires_at: Integer, transcript: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + data: String, + expires_at: Integer, + transcript: String + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi index 2b135efe..03887e62 100644 --- a/rbi/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/openai/models/chat/chat_completion_audio_param.rbi @@ -6,14 +6,25 @@ module OpenAI module Chat class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. - sig { returns(OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol) } + sig do + returns(OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol) + end attr_accessor :format_ # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. - sig { returns(T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol)) } + sig do + returns( + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) + ) + end attr_accessor :voice # Parameters for audio output. Required when audio output is requested with @@ -21,10 +32,13 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/audio). sig do params( - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - ) - .returns(T.attached_class) + format_: OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol, + voice: + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) + ).returns(T.attached_class) end def self.new( # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, @@ -33,35 +47,75 @@ module OpenAI # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. voice: - ); end + ) + end + sig do - override - .returns( - { - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: T.any(String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol) - } - ) + override.returns( + { + format_: OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol, + voice: + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) + } + ) + end + def to_hash end - def to_hash; end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. module Format extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) - AAC = T.let(:aac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) - MP3 = T.let(:mp3, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) - FLAC = T.let(:flac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) - OPUS = T.let(:opus, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) - PCM16 = T.let(:pcm16, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) + WAV = + T.let( + :wav, + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ) + AAC = + T.let( + :aac, + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ) + MP3 = + T.let( + :mp3, + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ) + FLAC = + T.let( + :flac, + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ) + OPUS = + T.let( + :opus, + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ) + PCM16 = + T.let( + :pcm16, + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionAudioParam::Format::TaggedSymbol + ] + ) + end + def self.values + end end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -69,23 +123,83 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + end + + sig do + override.returns( + T::Array[OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants] + ) + end + def self.variants + end - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - BALLAD = T.let(:ballad, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - CORAL = T.let(:coral, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - ECHO = T.let(:echo, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - FABLE = T.let(:fable, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - ONYX = T.let(:onyx, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - NOVA = T.let(:nova, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - SAGE = T.let(:sage, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - SHIMMER = T.let(:shimmer, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) - VERSE = T.let(:verse, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) + ALLOY = + T.let( + :alloy, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + ASH = + T.let( + :ash, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + BALLAD = + T.let( + :ballad, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + CORAL = + T.let( + :coral, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + ECHO = + T.let( + :echo, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + FABLE = + T.let( + :fable, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + ONYX = + T.let( + :onyx, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + NOVA = + T.let( + :nova, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + SAGE = + T.let( + :sage, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + SHIMMER = + T.let( + :shimmer, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) + VERSE = + T.let( + :verse, + OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol + ) end end end diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index e5905846..c495ed49 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A unique identifier for the chat completion. Each chunk has the same ID. sig { returns(String) } attr_accessor :id @@ -13,7 +15,7 @@ module OpenAI # A list of chat completion choices. Can contain more than one elements if `n` is # greater than 1. Can also be empty for the last chunk if you set # `stream_options: {"include_usage": true}`. - sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice]) } + sig { returns(T::Array[OpenAI::Chat::ChatCompletionChunk::Choice]) } attr_accessor :choices # The Unix timestamp (in seconds) of when the chat completion was created. Each @@ -46,7 +48,13 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ) + ) + end attr_accessor :service_tier # This fingerprint represents the backend configuration that the model runs with. @@ -65,10 +73,10 @@ module OpenAI # # **NOTE:** If the stream is interrupted or cancelled, you may not receive the # final usage chunk which contains the total token usage for the request. - sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } + sig { returns(T.nilable(OpenAI::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash))).void } + sig { params(usage: T.nilable(OpenAI::CompletionUsage::OrHash)).void } attr_writer :usage # Represents a streamed chunk of a chat completion response returned by the model, @@ -77,15 +85,18 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice, OpenAI::Internal::AnyHash)], + choices: + T::Array[OpenAI::Chat::ChatCompletionChunk::Choice::OrHash], created: Integer, model: String, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::OrSymbol), + service_tier: + T.nilable( + OpenAI::Chat::ChatCompletionChunk::ServiceTier::OrSymbol + ), system_fingerprint: String, - usage: T.nilable(T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash)), + usage: T.nilable(OpenAI::CompletionUsage::OrHash), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A unique identifier for the chat completion. Each chunk has the same ID. @@ -131,32 +142,41 @@ module OpenAI usage: nil, # The object type, which is always `chat.completion.chunk`. object: :"chat.completion.chunk" - ); end + ) + end + sig do - override - .returns( - { - id: String, - choices: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], - created: Integer, - model: String, - object: Symbol, - service_tier: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol), - system_fingerprint: String, - usage: T.nilable(OpenAI::Models::CompletionUsage) - } - ) + override.returns( + { + id: String, + choices: T::Array[OpenAI::Chat::ChatCompletionChunk::Choice], + created: Integer, + model: String, + object: Symbol, + service_tier: + T.nilable( + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ), + system_fingerprint: String, + usage: T.nilable(OpenAI::CompletionUsage) + } + ) + end + def to_hash end - def to_hash; end class Choice < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A chat completion delta generated by streamed model responses. - sig { returns(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta) } + sig { returns(OpenAI::Chat::ChatCompletionChunk::Choice::Delta) } attr_reader :delta sig do - params(delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::AnyHash)) - .void + params( + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::OrHash + ).void end attr_writer :delta @@ -166,7 +186,13 @@ module OpenAI # content was omitted due to a flag from our content filters, `tool_calls` if the # model called a tool, or `function_call` (deprecated) if the model called a # function. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ) + ) + end attr_accessor :finish_reason # The index of the choice in the list of choices. @@ -174,25 +200,36 @@ module OpenAI attr_accessor :index # Log probability information for the choice. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs)) } + sig do + returns( + T.nilable(OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs) + ) + end attr_reader :logprobs sig do params( - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::AnyHash)) - ) - .void + logprobs: + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs::OrHash + ) + ).void end attr_writer :logprobs sig do params( - delta: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, OpenAI::Internal::AnyHash), - finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::OrSymbol), + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::OrHash, + finish_reason: + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::OrSymbol + ), index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, OpenAI::Internal::AnyHash)) - ) - .returns(T.attached_class) + logprobs: + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs::OrHash + ) + ).returns(T.attached_class) end def self.new( # A chat completion delta generated by streamed model responses. @@ -208,35 +245,50 @@ module OpenAI index:, # Log probability information for the choice. logprobs: nil - ); end + ) + end + sig do - override - .returns( - { - delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol), - index: Integer, - logprobs: T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs) - } - ) + override.returns( + { + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, + finish_reason: + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ), + index: Integer, + logprobs: + T.nilable(OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs) + } + ) + end + def to_hash end - def to_hash; end class Delta < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the chunk message. sig { returns(T.nilable(String)) } attr_accessor :content # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall + ) + ) + end attr_reader :function_call sig do params( - function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Internal::AnyHash) - ) - .void + function_call: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall::OrHash + ).void end attr_writer :function_call @@ -245,20 +297,41 @@ module OpenAI attr_accessor :refusal # The role of the author of this message. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ) + ) + end attr_reader :role - sig { params(role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol).void } + sig do + params( + role: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol + ).void + end attr_writer :role - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall])) } + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall + ] + ) + ) + end attr_reader :tool_calls sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::AnyHash)] - ) - .void + tool_calls: + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::OrHash + ] + ).void end attr_writer :tool_calls @@ -266,12 +339,16 @@ module OpenAI sig do params( content: T.nilable(String), - function_call: T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, OpenAI::Internal::AnyHash), + function_call: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall::OrHash, refusal: T.nilable(String), - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol, - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + role: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::OrSymbol, + tool_calls: + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::OrHash + ] + ).returns(T.attached_class) end def self.new( # The contents of the chunk message. @@ -284,22 +361,32 @@ module OpenAI # The role of the author of this message. role: nil, tool_calls: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.nilable(String), - function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, - refusal: T.nilable(String), - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] - } - ) + override.returns( + { + content: T.nilable(String), + function_call: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + refusal: T.nilable(String), + role: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol, + tool_calls: + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall + ] + } + ) + end + def to_hash end - def to_hash; end class FunctionCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -319,7 +406,11 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. - sig { params(arguments: String, name: String).returns(T.attached_class) } + sig do + params(arguments: String, name: String).returns( + T.attached_class + ) + end def self.new( # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -328,9 +419,12 @@ module OpenAI arguments: nil, # The name of the function to call. name: nil - ); end - sig { override.returns({arguments: String, name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end end # The role of the author of this message. @@ -338,22 +432,55 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } DEVELOPER = - T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + T.let( + :developer, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ) + USER = + T.let( + :user, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ) ASSISTANT = - T.let(:assistant, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) - TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) + T.let( + :assistant, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ) + TOOL = + T.let( + :tool, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol + ] + ) + end + def self.values + end end class ToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(Integer) } attr_accessor :index @@ -364,38 +491,50 @@ module OpenAI sig { params(id: String).void } attr_writer :id - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function + ) + ) + end attr_reader :function sig do params( - function: T.any( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - OpenAI::Internal::AnyHash - ) - ) - .void + function: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function::OrHash + ).void end attr_writer :function # The type of the tool. Currently, only `function` is supported. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol).void } + sig do + params( + type: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol + ).void + end attr_writer :type sig do params( index: Integer, id: String, - function: T.any( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - OpenAI::Internal::AnyHash - ), - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol - ) - .returns(T.attached_class) + function: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function::OrHash, + type: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::OrSymbol + ).returns(T.attached_class) end def self.new( index:, @@ -404,21 +543,28 @@ module OpenAI function: nil, # The type of the tool. Currently, only `function` is supported. type: nil - ); end + ) + end + sig do - override - .returns( - { - index: Integer, - id: String, - function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol - } - ) + override.returns( + { + index: Integer, + id: String, + function: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + type: + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -436,7 +582,11 @@ module OpenAI sig { params(name: String).void } attr_writer :name - sig { params(arguments: String, name: String).returns(T.attached_class) } + sig do + params(arguments: String, name: String).returns( + T.attached_class + ) + end def self.new( # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -445,9 +595,12 @@ module OpenAI arguments: nil, # The name of the function to call. name: nil - ); end - sig { override.returns({arguments: String, name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end end # The type of the tool. Currently, only `function` is supported. @@ -455,17 +608,29 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } FUNCTION = - T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) + T.let( + :function, + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end @@ -480,55 +645,108 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) - LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + STOP = + T.let( + :stop, + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ) + LENGTH = + T.let( + :length, + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ) TOOL_CALLS = - T.let(:tool_calls, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + T.let( + :tool_calls, + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ) CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + T.let( + :content_filter, + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ) FUNCTION_CALL = - T.let(:function_call, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) + T.let( + :function_call, + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol + ] + ) + end + def self.values + end end class Logprobs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of message content tokens with log probability information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionTokenLogprob]) + ) + end attr_accessor :content # A list of message refusal tokens with log probability information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionTokenLogprob]) + ) + end attr_accessor :refusal # Log probability information for the choice. sig do params( - content: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]), - refusal: T.nilable(T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob, OpenAI::Internal::AnyHash)]) - ) - .returns(T.attached_class) + content: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob::OrHash] + ), + refusal: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob::OrHash] + ) + ).returns(T.attached_class) end def self.new( # A list of message content tokens with log probability information. content:, # A list of message refusal tokens with log probability information. refusal: - ); end + ) + end + sig do - override - .returns( - { - content: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]), - refusal: T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]) - } - ) + override.returns( + { + content: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob] + ), + refusal: + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob] + ) + } + ) + end + def to_hash end - def to_hash; end end end @@ -552,15 +770,37 @@ module OpenAI module ServiceTier extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) - FLEX = T.let(:flex, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ) + DEFAULT = + T.let( + :default, + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ) + FLEX = + T.let( + :flex, + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/chat/chat_completion_content_part.rbi b/rbi/openai/models/chat/chat_completion_content_part.rbi index beb80345..dbe8d522 100644 --- a/rbi/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part.rbi @@ -10,15 +10,27 @@ module OpenAI module ChatCompletionContentPart extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Chat::ChatCompletionContentPart::File + ) + end + class File < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Chat::ChatCompletionContentPart::File::File) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::Chat::ChatCompletionContentPart::File::File) } attr_reader :file sig do params( - file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::AnyHash) - ) - .void + file: OpenAI::Chat::ChatCompletionContentPart::File::File::OrHash + ).void end attr_writer :file @@ -30,20 +42,32 @@ module OpenAI # generation. sig do params( - file: T.any(OpenAI::Models::Chat::ChatCompletionContentPart::File::File, OpenAI::Internal::AnyHash), + file: OpenAI::Chat::ChatCompletionContentPart::File::File::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( file:, # The type of the content part. Always `file`. type: :file - ); end - sig { override.returns({file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + file: OpenAI::Chat::ChatCompletionContentPart::File::File, + type: Symbol + } + ) + end + def to_hash + end class File < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The base64 encoded file data, used when passing the file to the model as a # string. sig { returns(T.nilable(String)) } @@ -66,7 +90,13 @@ module OpenAI sig { params(filename: String).void } attr_writer :filename - sig { params(file_data: String, file_id: String, filename: String).returns(T.attached_class) } + sig do + params( + file_data: String, + file_id: String, + filename: String + ).returns(T.attached_class) + end def self.new( # The base64 encoded file data, used when passing the file to the model as a # string. @@ -75,19 +105,26 @@ module OpenAI file_id: nil, # The name of the file, used when passing the file to the model as a string. filename: nil - ); end - sig { override.returns({file_data: String, file_id: String, filename: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { file_data: String, file_id: String, filename: String } + ) + end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] - ) + override.returns( + T::Array[OpenAI::Chat::ChatCompletionContentPart::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/openai/models/chat/chat_completion_content_part_image.rbi index 83ae49c3..10b1a355 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_image.rbi @@ -6,14 +6,16 @@ module OpenAI module Chat class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::Chat::ChatCompletionContentPartImage::ImageURL) } attr_reader :image_url sig do params( - image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::AnyHash) - ) - .void + image_url: + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::OrHash + ).void end attr_writer :image_url @@ -24,40 +26,62 @@ module OpenAI # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). sig do params( - image_url: T.any(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, OpenAI::Internal::AnyHash), + image_url: + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( image_url:, # The type of the content part. type: :image_url - ); end + ) + end + sig do - override.returns({image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, type: Symbol}) + override.returns( + { + image_url: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class ImageURL < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Either a URL of the image or the base64 encoded image data. sig { returns(String) } attr_accessor :url # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol + ) + ) + end attr_reader :detail - sig { params(detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol).void } + sig do + params( + detail: + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol + ).void + end attr_writer :detail sig do params( url: String, - detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol - ) - .returns(T.attached_class) + detail: + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol + ).returns(T.attached_class) end def self.new( # Either a URL of the image or the base64 encoded image data. @@ -65,14 +89,20 @@ module OpenAI # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). detail: nil - ); end + ) + end + sig do - override - .returns( - {url: String, detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol} - ) + override.returns( + { + url: String, + detail: + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). @@ -80,18 +110,39 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi index 412e8c32..f1f1270c 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -2,18 +2,23 @@ module OpenAI module Models - ChatCompletionContentPartInputAudio = Chat::ChatCompletionContentPartInputAudio + ChatCompletionContentPartInputAudio = + Chat::ChatCompletionContentPartInputAudio module Chat class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns(OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio) + end attr_reader :input_audio sig do params( - input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Internal::AnyHash) - ) - .void + input_audio: + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::OrHash + ).void end attr_writer :input_audio @@ -24,77 +29,106 @@ module OpenAI # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). sig do params( - input_audio: T.any(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, OpenAI::Internal::AnyHash), + input_audio: + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( input_audio:, # The type of the content part. Always `input_audio`. type: :input_audio - ); end + ) + end + sig do - override - .returns( - {input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, type: Symbol} - ) + override.returns( + { + input_audio: + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class InputAudio < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Base64 encoded audio data. sig { returns(String) } attr_accessor :data # The format of the encoded audio data. Currently supports "wav" and "mp3". - sig { returns(OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol) } + sig do + returns( + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + ) + end attr_accessor :format_ sig do params( data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol - ) - .returns(T.attached_class) + format_: + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + ).returns(T.attached_class) end def self.new( # Base64 encoded audio data. data:, # The format of the encoded audio data. Currently supports "wav" and "mp3". format_: - ); end + ) + end + sig do - override - .returns( - { - data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol - } - ) + override.returns( + { + data: String, + format_: + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # The format of the encoded audio data. Currently supports "wav" and "mp3". module Format extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } WAV = - T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) + T.let( + :wav, + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol + ) MP3 = - T.let(:mp3, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) + T.let( + :mp3, + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi index 4dbbaad1..73724c93 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The refusal message generated by the model. sig { returns(String) } attr_accessor :refusal @@ -20,9 +22,12 @@ module OpenAI refusal:, # The type of the content part. type: :refusal - ); end - sig { override.returns({refusal: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ refusal: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/openai/models/chat/chat_completion_content_part_text.rbi index d9d89cc1..70aea8ec 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_text.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text content. sig { returns(String) } attr_accessor :text @@ -22,9 +24,12 @@ module OpenAI text:, # The type of the content part. type: :text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_deleted.rbi b/rbi/openai/models/chat/chat_completion_deleted.rbi index 344fb3ef..f63fdc1d 100644 --- a/rbi/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/openai/models/chat/chat_completion_deleted.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the chat completion that was deleted. sig { returns(String) } attr_accessor :id @@ -18,7 +20,11 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the chat completion that was deleted. id:, @@ -26,9 +32,14 @@ module OpenAI deleted:, # The type of object being deleted. object: :"chat.completion.deleted" - ); end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi index 7e1c7e21..956aaabe 100644 --- a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi @@ -2,12 +2,19 @@ module OpenAI module Models - ChatCompletionDeveloperMessageParam = Chat::ChatCompletionDeveloperMessageParam + ChatCompletionDeveloperMessageParam = + Chat::ChatCompletionDeveloperMessageParam module Chat class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the developer message. - sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } + sig do + returns( + T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + ) + end attr_accessor :content # The role of the messages author, in this case `developer`. @@ -27,14 +34,14 @@ module OpenAI # replace the previous `system` messages. sig do params( - content: T.any( - String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] + ), name: String, role: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The contents of the developer message. @@ -44,29 +51,52 @@ module OpenAI name: nil, # The role of the messages author, in this case `developer`. role: :developer - ); end + ) + end + sig do - override - .returns( - { - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), - role: Symbol, - name: String - } - ) + override.returns( + { + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ), + role: Symbol, + name: String + } + ) + end + def to_hash end - def to_hash; end # The contents of the developer message. module Content extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants + ] + ) + end + def self.variants + end ChatCompletionContentPartTextArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Chat::ChatCompletionContentPartText + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/openai/models/chat/chat_completion_function_call_option.rbi index 56f38623..51a20194 100644 --- a/rbi/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/openai/models/chat/chat_completion_function_call_option.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to call. sig { returns(String) } attr_accessor :name @@ -16,9 +18,12 @@ module OpenAI def self.new( # The name of the function to call. name: - ); end - sig { override.returns({name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/openai/models/chat/chat_completion_function_message_param.rbi index 4bfeb555..d5029286 100644 --- a/rbi/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_function_message_param.rbi @@ -2,10 +2,13 @@ module OpenAI module Models - ChatCompletionFunctionMessageParam = Chat::ChatCompletionFunctionMessageParam + ChatCompletionFunctionMessageParam = + Chat::ChatCompletionFunctionMessageParam module Chat class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the function message. sig { returns(T.nilable(String)) } attr_accessor :content @@ -18,7 +21,13 @@ module OpenAI sig { returns(Symbol) } attr_accessor :role - sig { params(content: T.nilable(String), name: String, role: Symbol).returns(T.attached_class) } + sig do + params( + content: T.nilable(String), + name: String, + role: Symbol + ).returns(T.attached_class) + end def self.new( # The contents of the function message. content:, @@ -26,9 +35,16 @@ module OpenAI name:, # The role of the messages author, in this case `function`. role: :function - ); end - sig { override.returns({content: T.nilable(String), name: String, role: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { content: T.nilable(String), name: String, role: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_message.rbi b/rbi/openai/models/chat/chat_completion_message.rbi index a1c9abf6..a7166480 100644 --- a/rbi/openai/models/chat/chat_completion_message.rbi +++ b/rbi/openai/models/chat/chat_completion_message.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the message. sig { returns(T.nilable(String)) } attr_accessor :content @@ -20,48 +22,62 @@ module OpenAI # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionMessage::Annotation]) + ) + end attr_reader :annotations sig do params( - annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::AnyHash)] - ) - .void + annotations: + T::Array[OpenAI::Chat::ChatCompletionMessage::Annotation::OrHash] + ).void end attr_writer :annotations # If the audio output modality is requested, this object contains data about the # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudio)) } + sig { returns(T.nilable(OpenAI::Chat::ChatCompletionAudio)) } attr_reader :audio - sig { params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::AnyHash))).void } + sig do + params( + audio: T.nilable(OpenAI::Chat::ChatCompletionAudio::OrHash) + ).void + end attr_writer :audio # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall)) } + sig do + returns(T.nilable(OpenAI::Chat::ChatCompletionMessage::FunctionCall)) + end attr_reader :function_call sig do params( - function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::AnyHash) - ) - .void + function_call: + OpenAI::Chat::ChatCompletionMessage::FunctionCall::OrHash + ).void end attr_writer :function_call # The tool calls generated by the model, such as function calls. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionMessageToolCall]) + ) + end attr_reader :tool_calls sig do params( - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)] - ) - .void + tool_calls: + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash] + ).void end attr_writer :tool_calls @@ -70,13 +86,15 @@ module OpenAI params( content: T.nilable(String), refusal: T.nilable(String), - annotations: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation, OpenAI::Internal::AnyHash)], - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudio, OpenAI::Internal::AnyHash)), - function_call: T.any(OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, OpenAI::Internal::AnyHash), - tool_calls: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall, OpenAI::Internal::AnyHash)], + annotations: + T::Array[OpenAI::Chat::ChatCompletionMessage::Annotation::OrHash], + audio: T.nilable(OpenAI::Chat::ChatCompletionAudio::OrHash), + function_call: + OpenAI::Chat::ChatCompletionMessage::FunctionCall::OrHash, + tool_calls: + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash], role: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The contents of the message. @@ -97,63 +115,82 @@ module OpenAI tool_calls: nil, # The role of the author of this message. role: :assistant - ); end + ) + end + sig do - override - .returns( - { - content: T.nilable(String), - refusal: T.nilable(String), - role: Symbol, - annotations: T::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudio), - function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, - tool_calls: T::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] - } - ) + override.returns( + { + content: T.nilable(String), + refusal: T.nilable(String), + role: Symbol, + annotations: + T::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], + audio: T.nilable(OpenAI::Chat::ChatCompletionAudio), + function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, + tool_calls: T::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + } + ) + end + def to_hash end - def to_hash; end class Annotation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } attr_accessor :type # A URL citation when using web search. - sig { returns(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation) } + sig do + returns( + OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation + ) + end attr_reader :url_citation sig do params( - url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Internal::AnyHash) - ) - .void + url_citation: + OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation::OrHash + ).void end attr_writer :url_citation # A URL citation when using web search. sig do params( - url_citation: T.any(OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, OpenAI::Internal::AnyHash), + url_citation: + OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A URL citation when using web search. url_citation:, # The type of the URL citation. Always `url_citation`. type: :url_citation - ); end + ) + end + sig do - override - .returns( - {type: Symbol, url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation} - ) + override.returns( + { + type: Symbol, + url_citation: + OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation + } + ) + end + def to_hash end - def to_hash; end class URLCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the last character of the URL citation in the message. sig { returns(Integer) } attr_accessor :end_index @@ -188,13 +225,28 @@ module OpenAI title:, # The URL of the web resource. url: - ); end - sig { override.returns({end_index: Integer, start_index: Integer, title: String, url: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + end_index: Integer, + start_index: Integer, + title: String, + url: String + } + ) + end + def to_hash + end end end class FunctionCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -208,7 +260,9 @@ module OpenAI # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. - sig { params(arguments: String, name: String).returns(T.attached_class) } + sig do + params(arguments: String, name: String).returns(T.attached_class) + end def self.new( # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -217,9 +271,12 @@ module OpenAI arguments:, # The name of the function to call. name: - ); end - sig { override.returns({arguments: String, name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_message_param.rbi b/rbi/openai/models/chat/chat_completion_message_param.rbi index 8a340084..ae3e1358 100644 --- a/rbi/openai/models/chat/chat_completion_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_message_param.rbi @@ -11,13 +11,25 @@ module OpenAI module ChatCompletionMessageParam extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] + Variants = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Chat::ChatCompletionSystemMessageParam, + OpenAI::Chat::ChatCompletionUserMessageParam, + OpenAI::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Chat::ChatCompletionToolMessageParam, + OpenAI::Chat::ChatCompletionFunctionMessageParam ) + end + + sig do + override.returns( + T::Array[OpenAI::Chat::ChatCompletionMessageParam::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi index 85d78135..98f7b380 100644 --- a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi @@ -6,19 +6,21 @@ module OpenAI module Chat class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the tool call. sig { returns(String) } attr_accessor :id # The function that the model called. - sig { returns(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function) } + sig { returns(OpenAI::Chat::ChatCompletionMessageToolCall::Function) } attr_reader :function sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::AnyHash) - ) - .void + function: + OpenAI::Chat::ChatCompletionMessageToolCall::Function::OrHash + ).void end attr_writer :function @@ -29,10 +31,10 @@ module OpenAI sig do params( id: String, - function: T.any(OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, OpenAI::Internal::AnyHash), + function: + OpenAI::Chat::ChatCompletionMessageToolCall::Function::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the tool call. @@ -41,16 +43,25 @@ module OpenAI function:, # The type of the tool. Currently, only `function` is supported. type: :function - ); end + ) + end + sig do - override - .returns( - {id: String, function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, type: Symbol} - ) + override.returns( + { + id: String, + function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -63,7 +74,9 @@ module OpenAI attr_accessor :name # The function that the model called. - sig { params(arguments: String, name: String).returns(T.attached_class) } + sig do + params(arguments: String, name: String).returns(T.attached_class) + end def self.new( # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -72,9 +85,12 @@ module OpenAI arguments:, # The name of the function to call. name: - ); end - sig { override.returns({arguments: String, name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_modality.rbi b/rbi/openai/models/chat/chat_completion_modality.rbi index de02e647..e65aca82 100644 --- a/rbi/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/openai/models/chat/chat_completion_modality.rbi @@ -8,14 +8,21 @@ module OpenAI module ChatCompletionModality extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Chat::ChatCompletionModality) } OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) - AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) + TEXT = T.let(:text, OpenAI::Chat::ChatCompletionModality::TaggedSymbol) + AUDIO = + T.let(:audio, OpenAI::Chat::ChatCompletionModality::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Chat::ChatCompletionModality::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi index 4380546a..42d4f7b5 100644 --- a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -6,14 +6,16 @@ module OpenAI module Chat class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::Chat::ChatCompletionNamedToolChoice::Function) } attr_reader :function sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::AnyHash) - ) - .void + function: + OpenAI::Chat::ChatCompletionNamedToolChoice::Function::OrHash + ).void end attr_writer :function @@ -25,20 +27,33 @@ module OpenAI # function. sig do params( - function: T.any(OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, OpenAI::Internal::AnyHash), + function: + OpenAI::Chat::ChatCompletionNamedToolChoice::Function::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( function:, # The type of the tool. Currently, only `function` is supported. type: :function - ); end - sig { override.returns({function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + function: OpenAI::Chat::ChatCompletionNamedToolChoice::Function, + type: Symbol + } + ) + end + def to_hash + end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to call. sig { returns(String) } attr_accessor :name @@ -47,9 +62,12 @@ module OpenAI def self.new( # The name of the function to call. name: - ); end - sig { override.returns({name: String}) } - def to_hash; end + ) + end + + sig { override.returns({ name: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/openai/models/chat/chat_completion_prediction_content.rbi index 2bc5a26a..fa1a0fba 100644 --- a/rbi/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/openai/models/chat/chat_completion_prediction_content.rbi @@ -6,10 +6,16 @@ module OpenAI module Chat class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be # returned much more quickly. - sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } + sig do + returns( + T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + ) + end attr_accessor :content # The type of the predicted content you want to provide. This type is currently @@ -21,13 +27,13 @@ module OpenAI # being regenerated. sig do params( - content: T.any( - String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content that should be matched when generating a model response. If @@ -37,14 +43,23 @@ module OpenAI # The type of the predicted content you want to provide. This type is currently # always `content`. type: :content - ); end + ) + end + sig do - override - .returns( - {content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), type: Symbol} - ) + override.returns( + { + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ), + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be @@ -52,12 +67,29 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants + ] + ) + end + def self.variants + end ChatCompletionContentPartTextArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Chat::ChatCompletionContentPartText + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/chat/chat_completion_role.rbi b/rbi/openai/models/chat/chat_completion_role.rbi index 46776b39..63f5fc3d 100644 --- a/rbi/openai/models/chat/chat_completion_role.rbi +++ b/rbi/openai/models/chat/chat_completion_role.rbi @@ -9,18 +9,27 @@ module OpenAI module ChatCompletionRole extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Chat::ChatCompletionRole) } OrSymbol = T.type_alias { T.any(Symbol, String) } - DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) - USER = T.let(:user, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) - TOOL = T.let(:tool, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) - FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) + DEVELOPER = + T.let(:developer, OpenAI::Chat::ChatCompletionRole::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::Chat::ChatCompletionRole::TaggedSymbol) + USER = T.let(:user, OpenAI::Chat::ChatCompletionRole::TaggedSymbol) + ASSISTANT = + T.let(:assistant, OpenAI::Chat::ChatCompletionRole::TaggedSymbol) + TOOL = T.let(:tool, OpenAI::Chat::ChatCompletionRole::TaggedSymbol) + FUNCTION = + T.let(:function, OpenAI::Chat::ChatCompletionRole::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Chat::ChatCompletionRole::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/chat/chat_completion_store_message.rbi b/rbi/openai/models/chat/chat_completion_store_message.rbi index db4cc17e..67c86275 100644 --- a/rbi/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/openai/models/chat/chat_completion_store_message.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the chat message. sig { returns(String) } attr_accessor :id @@ -15,9 +17,12 @@ module OpenAI def self.new( # The identifier of the chat message. id: - ); end - sig { override.returns({id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_stream_options.rbi b/rbi/openai/models/chat/chat_completion_stream_options.rbi index 8a28c8ab..4ac025c0 100644 --- a/rbi/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/openai/models/chat/chat_completion_stream_options.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire # request, and the `choices` field will always be an empty array. @@ -30,9 +32,12 @@ module OpenAI # **NOTE:** If the stream is interrupted, you may not receive the final usage # chunk which contains the total token usage for the request. include_usage: nil - ); end - sig { override.returns({include_usage: T::Boolean}) } - def to_hash; end + ) + end + + sig { override.returns({ include_usage: T::Boolean }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/openai/models/chat/chat_completion_system_message_param.rbi index 5578d9e7..099a437a 100644 --- a/rbi/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_system_message_param.rbi @@ -6,8 +6,14 @@ module OpenAI module Chat class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the system message. - sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } + sig do + returns( + T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + ) + end attr_accessor :content # The role of the messages author, in this case `system`. @@ -27,14 +33,14 @@ module OpenAI # for this purpose instead. sig do params( - content: T.any( - String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] + ), name: String, role: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The contents of the system message. @@ -44,29 +50,52 @@ module OpenAI name: nil, # The role of the messages author, in this case `system`. role: :system - ); end + ) + end + sig do - override - .returns( - { - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), - role: Symbol, - name: String - } - ) + override.returns( + { + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ), + role: Symbol, + name: String + } + ) + end + def to_hash end - def to_hash; end # The contents of the system message. module Content extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants + ] + ) + end + def self.variants + end ChatCompletionContentPartTextArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Chat::ChatCompletionContentPartText + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/openai/models/chat/chat_completion_token_logprob.rbi index f75eb74c..e38eaadc 100644 --- a/rbi/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/openai/models/chat/chat_completion_token_logprob.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The token. sig { returns(String) } attr_accessor :token @@ -26,7 +28,11 @@ module OpenAI # List of the most likely tokens and their log probability, at this token # position. In rare cases, there may be fewer than the number of requested # `top_logprobs` returned. - sig { returns(T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob]) } + sig do + returns( + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] + ) + end attr_accessor :top_logprobs sig do @@ -34,9 +40,11 @@ module OpenAI token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float, - top_logprobs: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + top_logprobs: + T::Array[ + OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob::OrHash + ] + ).returns(T.attached_class) end def self.new( # The token. @@ -54,21 +62,27 @@ module OpenAI # position. In rare cases, there may be fewer than the number of requested # `top_logprobs` returned. top_logprobs: - ); end + ) + end + sig do - override - .returns( - { - token: String, - bytes: T.nilable(T::Array[Integer]), - logprob: Float, - top_logprobs: T::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] - } - ) + override.returns( + { + token: String, + bytes: T.nilable(T::Array[Integer]), + logprob: Float, + top_logprobs: + T::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] + } + ) + end + def to_hash end - def to_hash; end class TopLogprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The token. sig { returns(String) } attr_accessor :token @@ -105,9 +119,20 @@ module OpenAI # tokens. Otherwise, the value `-9999.0` is used to signify that the token is very # unlikely. logprob: - ); end - sig { override.returns({token: String, bytes: T.nilable(T::Array[Integer]), logprob: Float}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + token: String, + bytes: T.nilable(T::Array[Integer]), + logprob: Float + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_tool.rbi b/rbi/openai/models/chat/chat_completion_tool.rbi index 54e45a9a..3ea290d0 100644 --- a/rbi/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/openai/models/chat/chat_completion_tool.rbi @@ -6,10 +6,12 @@ module OpenAI module Chat class ChatCompletionTool < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::FunctionDefinition) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::FunctionDefinition) } attr_reader :function - sig { params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash)).void } + sig { params(function: OpenAI::FunctionDefinition::OrHash).void } attr_writer :function # The type of the tool. Currently, only `function` is supported. @@ -17,16 +19,25 @@ module OpenAI attr_accessor :type sig do - params(function: T.any(OpenAI::Models::FunctionDefinition, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + function: OpenAI::FunctionDefinition::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( function:, # The type of the tool. Currently, only `function` is supported. type: :function - ); end - sig { override.returns({function: OpenAI::Models::FunctionDefinition, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { function: OpenAI::FunctionDefinition, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi index 090a8a1d..5363a308 100644 --- a/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -17,30 +17,60 @@ module OpenAI module ChatCompletionToolChoiceOption extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice + ) + end + # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. module Auto extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) + NONE = + T.let( + :none, + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol + ) + REQUIRED = + T.let( + :required, + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol + ] + ) + end + def self.values + end end sig do - override - .returns( - [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - ) + override.returns( + T::Array[OpenAI::Chat::ChatCompletionToolChoiceOption::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi index 862e00bf..67ab6bc6 100644 --- a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi @@ -6,8 +6,14 @@ module OpenAI module Chat class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the tool message. - sig { returns(T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText])) } + sig do + returns( + T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + ) + end attr_accessor :content # The role of the messages author, in this case `tool`. @@ -20,14 +26,14 @@ module OpenAI sig do params( - content: T.any( - String, - T::Array[T.any(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] + ), tool_call_id: String, role: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The contents of the tool message. @@ -36,29 +42,52 @@ module OpenAI tool_call_id:, # The role of the messages author, in this case `tool`. role: :tool - ); end + ) + end + sig do - override - .returns( - { - content: T.any(String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]), - role: Symbol, - tool_call_id: String - } - ) + override.returns( + { + content: + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ), + role: Symbol, + tool_call_id: String + } + ) + end + def to_hash end - def to_hash; end # The contents of the tool message. module Content extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + T::Array[OpenAI::Chat::ChatCompletionContentPartText] + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants + ] + ) + end + def self.variants + end ChatCompletionContentPartTextArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionContentPartText], + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Chat::ChatCompletionContentPartText + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/openai/models/chat/chat_completion_user_message_param.rbi index a9452e5c..2afec258 100644 --- a/rbi/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_user_message_param.rbi @@ -6,6 +6,8 @@ module OpenAI module Chat class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The contents of the user message. sig do returns( @@ -13,10 +15,10 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Chat::ChatCompletionContentPart::File ) ] ) @@ -40,22 +42,21 @@ module OpenAI # information. sig do params( - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] - ), + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionContentPartText::OrHash, + OpenAI::Chat::ChatCompletionContentPartImage::OrHash, + OpenAI::Chat::ChatCompletionContentPartInputAudio::OrHash, + OpenAI::Chat::ChatCompletionContentPart::File::OrHash + ) + ] + ), name: String, role: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The contents of the user message. @@ -65,54 +66,66 @@ module OpenAI name: nil, # The role of the messages author, in this case `user`. role: :user - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, T::Array[ T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Chat::ChatCompletionContentPart::File ) ] ), - role: Symbol, - name: String - } - ) + role: Symbol, + name: String + } + ) + end + def to_hash end - def to_hash; end # The contents of the user message. module Content extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionContentPartText, - OpenAI::Models::Chat::ChatCompletionContentPartImage, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Models::Chat::ChatCompletionContentPart::File - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Chat::ChatCompletionContentPart::File + ) ] ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end ChatCompletionContentPartArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Chat::ChatCompletionContentPart], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Chat::ChatCompletionContentPart + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 2f954588..9e67e729 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of messages comprising the conversation so far. Depending on the # [model](https://platform.openai.com/docs/models) you use, different message # types (modalities) are supported, like @@ -17,12 +19,12 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + OpenAI::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Chat::ChatCompletionSystemMessageParam, + OpenAI::Chat::ChatCompletionUserMessageParam, + OpenAI::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Chat::ChatCompletionToolMessageParam, + OpenAI::Chat::ChatCompletionFunctionMessageParam ) ] ) @@ -34,18 +36,19 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, OpenAI::Models::ChatModel::OrSymbol)) } + sig { returns(T.any(String, OpenAI::ChatModel::OrSymbol)) } attr_accessor :model # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam)) } + sig { returns(T.nilable(OpenAI::Chat::ChatCompletionAudioParam)) } attr_reader :audio sig do - params(audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash))) - .void + params( + audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash) + ).void end attr_writer :audio @@ -73,8 +76,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption ) ) ) @@ -83,27 +86,30 @@ module OpenAI sig do params( - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::AnyHash - ) - ) - .void + function_call: + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption::OrHash + ) + ).void end attr_writer :function_call # Deprecated in favor of `tools`. # # A list of functions the model may generate JSON inputs for. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::CompletionCreateParams::Function]) + ) + end attr_reader :functions sig do params( - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)] - ) - .void + functions: + T::Array[OpenAI::Chat::CompletionCreateParams::Function::OrHash] + ).void end attr_writer :functions @@ -159,7 +165,13 @@ module OpenAI # this model generate both text and audio responses, you can use: # # `["text", "audio"]` - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Chat::CompletionCreateParams::Modality::OrSymbol] + ) + ) + end attr_accessor :modalities # How many chat completion choices to generate for each input message. Note that @@ -179,14 +191,16 @@ module OpenAI # Static predicted output content, such as the content of a text file that is # being regenerated. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent)) } + sig do + returns(T.nilable(OpenAI::Chat::ChatCompletionPredictionContent)) + end attr_reader :prediction sig do params( - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)) - ) - .void + prediction: + T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash) + ).void end attr_writer :prediction @@ -202,7 +216,7 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # An object specifying the format that the model must output. @@ -219,9 +233,9 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject ) ) ) @@ -230,14 +244,13 @@ module OpenAI sig do params( - response_format: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ) - ) - .void + response_format: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void end attr_writer :response_format @@ -266,7 +279,13 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol + ) + ) + end attr_accessor :service_tier # Not supported with latest reasoning models `o3` and `o4-mini`. @@ -283,14 +302,14 @@ module OpenAI attr_accessor :store # Options for streaming response. Only set this when you set `stream: true`. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) } + sig { returns(T.nilable(OpenAI::Chat::ChatCompletionStreamOptions)) } attr_reader :stream_options sig do params( - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)) - ) - .void + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash) + ).void end attr_writer :stream_options @@ -314,8 +333,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice ) ) ) @@ -324,23 +343,24 @@ module OpenAI sig do params( - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::AnyHash - ) - ) - .void + tool_choice: + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + ) + ).void end attr_writer :tool_choice # A list of tools the model may call. Currently, only functions are supported as a # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. - sig { returns(T.nilable(T::Array[OpenAI::Models::Chat::ChatCompletionTool])) } + sig { returns(T.nilable(T::Array[OpenAI::Chat::ChatCompletionTool])) } attr_reader :tools - sig { params(tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)]).void } + sig do + params(tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash]).void + end attr_writer :tools # An integer between 0 and 20 specifying the number of most likely tokens to @@ -369,75 +389,90 @@ module OpenAI # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). - sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions)) } + sig do + returns( + T.nilable(OpenAI::Chat::CompletionCreateParams::WebSearchOptions) + ) + end attr_reader :web_search_options sig do params( - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash) - ) - .void + web_search_options: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash + ).void end attr_writer :web_search_options sig do params( - messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), + messages: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionDeveloperMessageParam::OrHash, + OpenAI::Chat::ChatCompletionSystemMessageParam::OrHash, + OpenAI::Chat::ChatCompletionUserMessageParam::OrHash, + OpenAI::Chat::ChatCompletionAssistantMessageParam::OrHash, + OpenAI::Chat::ChatCompletionToolMessageParam::OrHash, + OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash + ) + ], + model: T.any(String, OpenAI::ChatModel::OrSymbol), + audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::AnyHash - ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)], + function_call: + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption::OrHash + ), + functions: + T::Array[OpenAI::Chat::CompletionCreateParams::Function::OrHash], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: + T.nilable( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Modality::OrSymbol + ] + ), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)), + prediction: + T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: + T.nilable( + OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol + ), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::AnyHash - ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)], + tool_choice: + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + ), + tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + web_search_options: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A list of messages comprising the conversation so far. Depending on the @@ -629,65 +664,83 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). web_search_options: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - messages: T::Array[ + override.returns( + { + messages: + T::Array[ T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + OpenAI::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Chat::ChatCompletionSystemMessageParam, + OpenAI::Chat::ChatCompletionUserMessageParam, + OpenAI::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Chat::ChatCompletionToolMessageParam, + OpenAI::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(OpenAI::Models::Chat::ChatCompletionAudioParam), - frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption + model: T.any(String, OpenAI::ChatModel::OrSymbol), + audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam), + frequency_penalty: T.nilable(Float), + function_call: + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption ), - functions: T::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], - logit_bias: T.nilable(T::Hash[Symbol, Integer]), - logprobs: T.nilable(T::Boolean), - max_completion_tokens: T.nilable(Integer), - max_tokens: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), - n: T.nilable(Integer), - parallel_tool_calls: T::Boolean, - prediction: T.nilable(OpenAI::Models::Chat::ChatCompletionPredictionContent), - presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject + functions: + T::Array[OpenAI::Chat::CompletionCreateParams::Function], + logit_bias: T.nilable(T::Hash[Symbol, Integer]), + logprobs: T.nilable(T::Boolean), + max_completion_tokens: T.nilable(Integer), + max_tokens: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + modalities: + T.nilable( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Modality::OrSymbol + ] ), - seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), - stop: T.nilable(T.any(String, T::Array[String])), - store: T.nilable(T::Boolean), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), - temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice + n: T.nilable(Integer), + parallel_tool_calls: T::Boolean, + prediction: + T.nilable(OpenAI::Chat::ChatCompletionPredictionContent), + presence_penalty: T.nilable(Float), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.any( + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject ), - tools: T::Array[OpenAI::Models::Chat::ChatCompletionTool], - top_logprobs: T.nilable(Integer), - top_p: T.nilable(Float), - user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, - request_options: OpenAI::RequestOptions - } - ) + seed: T.nilable(Integer), + service_tier: + T.nilable( + OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol + ), + stop: T.nilable(T.any(String, T::Array[String])), + store: T.nilable(T::Boolean), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), + temperature: T.nilable(Float), + tool_choice: + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice + ), + tools: T::Array[OpenAI::Chat::ChatCompletionTool], + top_logprobs: T.nilable(Integer), + top_p: T.nilable(Float), + user: String, + web_search_options: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -697,8 +750,16 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::Chat::CompletionCreateParams::Model::Variants] + ) + end + def self.variants + end end # Deprecated in favor of `tool_choice`. @@ -718,6 +779,14 @@ module OpenAI module FunctionCall extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption + ) + end + # `none` means the model will not call a function and instead generates a message. # `auto` means the model can pick between generating a message or calling a # function. @@ -725,33 +794,51 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = - T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) + T.let( + :none, + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol + ) AUTO = - T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) + T.let( + :auto, + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end sig do - override - .returns( - [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - ) + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::FunctionCall::Variants + ] + ) + end + def self.variants end - def self.variants; end end class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. sig { returns(String) } @@ -779,8 +866,11 @@ module OpenAI attr_writer :parameters sig do - params(name: String, description: String, parameters: T::Hash[Symbol, T.anything]) - .returns(T.attached_class) + params( + name: String, + description: String, + parameters: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) end def self.new( # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain @@ -797,24 +887,51 @@ module OpenAI # # Omitting `parameters` defines a function with an empty parameter list. parameters: nil - ); end + ) + end + sig do - override.returns({name: String, description: String, parameters: T::Hash[Symbol, T.anything]}) + override.returns( + { + name: String, + description: String, + parameters: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end module Modality extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::CompletionCreateParams::Modality) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) - AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Chat::CompletionCreateParams::Modality::TaggedSymbol + ) + AUDIO = + T.let( + :audio, + OpenAI::Chat::CompletionCreateParams::Modality::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Modality::TaggedSymbol + ] + ) + end + def self.values + end end # An object specifying the format that the model must output. @@ -830,13 +947,24 @@ module OpenAI module ResponseFormat extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + Variants = + T.type_alias do + T.any( + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::ResponseFormat::Variants + ] + ) + end + def self.variants end - def self.variants; end end # Specifies the latency tier to use for processing the request. This parameter is @@ -859,15 +987,37 @@ module OpenAI module ServiceTier extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) - FLEX = T.let(:flex, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol + ) + DEFAULT = + T.let( + :default, + OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol + ) + FLEX = + T.let( + :flex, + OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol + ] + ) + end + def self.values + end end # Not supported with latest reasoning models `o3` and `o4-mini`. @@ -877,44 +1027,63 @@ module OpenAI module Stop extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[String]]) } - def self.variants; end + Variants = T.type_alias { T.nilable(T.any(String, T::Array[String])) } - StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) + sig do + override.returns( + T::Array[OpenAI::Chat::CompletionCreateParams::Stop::Variants] + ) + end + def self.variants + end + + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) end class WebSearchOptions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. sig do returns( - T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol) + T.nilable( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol + ) ) end attr_reader :search_context_size sig do params( - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol - ) - .void + search_context_size: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol + ).void end attr_writer :search_context_size # Approximate location parameters for the search. - sig { returns(T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation)) } + sig do + returns( + T.nilable( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation + ) + ) + end attr_reader :user_location sig do params( - user_location: T.nilable( - T.any( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, - OpenAI::Internal::AnyHash + user_location: + T.nilable( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::OrHash ) - ) - ) - .void + ).void end attr_writer :user_location @@ -923,15 +1092,13 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). sig do params( - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, - user_location: T.nilable( - T.any( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, - OpenAI::Internal::AnyHash + search_context_size: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, + user_location: + T.nilable( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::OrHash ) - ) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # High level guidance for the amount of context window space to use for the @@ -939,17 +1106,23 @@ module OpenAI search_context_size: nil, # Approximate location parameters for the search. user_location: nil - ); end + ) + end + sig do - override - .returns( - { - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, - user_location: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation) - } - ) + override.returns( + { + search_context_size: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::OrSymbol, + user_location: + T.nilable( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation + ) + } + ) + end + def to_hash end - def to_hash; end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -957,47 +1130,58 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let( :low, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol ) MEDIUM = T.let( :medium, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol ) HIGH = T.let( :high, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end class UserLocation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Approximate location parameters for the search. - sig { returns(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate) } + sig do + returns( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate + ) + end attr_reader :approximate sig do params( - approximate: T.any( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Internal::AnyHash - ) - ) - .void + approximate: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate::OrHash + ).void end attr_writer :approximate @@ -1008,32 +1192,35 @@ module OpenAI # Approximate location parameters for the search. sig do params( - approximate: T.any( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - OpenAI::Internal::AnyHash - ), + approximate: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Approximate location parameters for the search. approximate:, # The type of location approximation. Always `approximate`. type: :approximate - ); end + ) + end + sig do - override - .returns( - { - approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, - type: Symbol - } - ) + override.returns( + { + approximate: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Approximate < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } attr_reader :city @@ -1084,9 +1271,21 @@ module OpenAI # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. timezone: nil - ); end - sig { override.returns({city: String, country: String, region: String, timezone: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + city: String, + country: String, + region: String, + timezone: String + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/completion_delete_params.rbi b/rbi/openai/models/chat/completion_delete_params.rbi index 42ccf3bf..b960e8e9 100644 --- a/rbi/openai/models/chat/completion_delete_params.rbi +++ b/rbi/openai/models/chat/completion_delete_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/completion_list_params.rbi b/rbi/openai/models/chat/completion_list_params.rbi index 2574d2d6..cf9b4c89 100644 --- a/rbi/openai/models/chat/completion_list_params.rbi +++ b/rbi/openai/models/chat/completion_list_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last chat completion from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -36,10 +38,18 @@ module OpenAI # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - sig { returns(T.nilable(OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::Chat::CompletionListParams::Order::OrSymbol) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol).void } + sig do + params( + order: OpenAI::Chat::CompletionListParams::Order::OrSymbol + ).void + end attr_writer :order sig do @@ -48,10 +58,9 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::Chat::CompletionListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last chat completion from the previous pagination request. @@ -68,35 +77,50 @@ module OpenAI # `desc` for descending order. Defaults to `asc`. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + order: OpenAI::Chat::CompletionListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::CompletionListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) + ASC = + T.let(:asc, OpenAI::Chat::CompletionListParams::Order::TaggedSymbol) + DESC = + T.let( + :desc, + OpenAI::Chat::CompletionListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Chat::CompletionListParams::Order::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/chat/completion_retrieve_params.rbi b/rbi/openai/models/chat/completion_retrieve_params.rbi index 3cb4b992..94561ed0 100644 --- a/rbi/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/openai/models/chat/completion_retrieve_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/chat/completion_update_params.rbi b/rbi/openai/models/chat/completion_update_params.rbi index 48669c83..0055be95 100644 --- a/rbi/openai/models/chat/completion_update_params.rbi +++ b/rbi/openai/models/chat/completion_update_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -19,9 +21,8 @@ module OpenAI sig do params( metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -32,7 +33,9 @@ module OpenAI # a maximum length of 512 characters. metadata:, request_options: {} - ); end + ) + end + sig do override.returns( { @@ -41,7 +44,8 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end end diff --git a/rbi/openai/models/chat/completions/message_list_params.rbi b/rbi/openai/models/chat/completions/message_list_params.rbi index aa957ac1..f61f0565 100644 --- a/rbi/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/openai/models/chat/completions/message_list_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last message from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -24,20 +27,31 @@ module OpenAI # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. - sig { returns(T.nilable(OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Chat::Completions::MessageListParams::Order::OrSymbol + ) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol).void } + sig do + params( + order: + OpenAI::Chat::Completions::MessageListParams::Order::OrSymbol + ).void + end attr_writer :order sig do params( after: String, limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: + OpenAI::Chat::Completions::MessageListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last message from the previous pagination request. @@ -48,19 +62,22 @@ module OpenAI # for descending order. Defaults to `asc`. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + order: + OpenAI::Chat::Completions::MessageListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. @@ -68,14 +85,34 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::Completions::MessageListParams::Order + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::Chat::Completions::MessageListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Chat::Completions::MessageListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Chat::Completions::MessageListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/chat_model.rbi b/rbi/openai/models/chat_model.rbi index f4c20501..106901df 100644 --- a/rbi/openai/models/chat_model.rbi +++ b/rbi/openai/models/chat_model.rbi @@ -5,71 +5,113 @@ module OpenAI module ChatModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ChatModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_1_NANO = T.let(:"gpt-4.1-nano", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_1_2025_04_14 = T.let(:"gpt-4.1-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_1_MINI_2025_04_14 = T.let(:"gpt-4.1-mini-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_1_NANO_2025_04_14 = T.let(:"gpt-4.1-nano-2025-04-14", OpenAI::Models::ChatModel::TaggedSymbol) - O4_MINI = T.let(:"o4-mini", OpenAI::Models::ChatModel::TaggedSymbol) - O4_MINI_2025_04_16 = T.let(:"o4-mini-2025-04-16", OpenAI::Models::ChatModel::TaggedSymbol) - O3 = T.let(:o3, OpenAI::Models::ChatModel::TaggedSymbol) - O3_2025_04_16 = T.let(:"o3-2025-04-16", OpenAI::Models::ChatModel::TaggedSymbol) - O3_MINI = T.let(:"o3-mini", OpenAI::Models::ChatModel::TaggedSymbol) - O3_MINI_2025_01_31 = T.let(:"o3-mini-2025-01-31", OpenAI::Models::ChatModel::TaggedSymbol) - O1 = T.let(:o1, OpenAI::Models::ChatModel::TaggedSymbol) - O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::Models::ChatModel::TaggedSymbol) - O1_PREVIEW = T.let(:"o1-preview", OpenAI::Models::ChatModel::TaggedSymbol) - O1_PREVIEW_2024_09_12 = T.let(:"o1-preview-2024-09-12", OpenAI::Models::ChatModel::TaggedSymbol) - O1_MINI = T.let(:"o1-mini", OpenAI::Models::ChatModel::TaggedSymbol) - O1_MINI_2024_09_12 = T.let(:"o1-mini-2024-09-12", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O = T.let(:"gpt-4o", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_2024_11_20 = T.let(:"gpt-4o-2024-11-20", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_2024_08_06 = T.let(:"gpt-4o-2024-08-06", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_2024_05_13 = T.let(:"gpt-4o-2024-05-13", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_AUDIO_PREVIEW = T.let(:"gpt-4o-audio-preview", OpenAI::Models::ChatModel::TaggedSymbol) + GPT_4_1 = T.let(:"gpt-4.1", OpenAI::ChatModel::TaggedSymbol) + GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::ChatModel::TaggedSymbol) + GPT_4_1_NANO = T.let(:"gpt-4.1-nano", OpenAI::ChatModel::TaggedSymbol) + GPT_4_1_2025_04_14 = + T.let(:"gpt-4.1-2025-04-14", OpenAI::ChatModel::TaggedSymbol) + GPT_4_1_MINI_2025_04_14 = + T.let(:"gpt-4.1-mini-2025-04-14", OpenAI::ChatModel::TaggedSymbol) + GPT_4_1_NANO_2025_04_14 = + T.let(:"gpt-4.1-nano-2025-04-14", OpenAI::ChatModel::TaggedSymbol) + O4_MINI = T.let(:"o4-mini", OpenAI::ChatModel::TaggedSymbol) + O4_MINI_2025_04_16 = + T.let(:"o4-mini-2025-04-16", OpenAI::ChatModel::TaggedSymbol) + O3 = T.let(:o3, OpenAI::ChatModel::TaggedSymbol) + O3_2025_04_16 = T.let(:"o3-2025-04-16", OpenAI::ChatModel::TaggedSymbol) + O3_MINI = T.let(:"o3-mini", OpenAI::ChatModel::TaggedSymbol) + O3_MINI_2025_01_31 = + T.let(:"o3-mini-2025-01-31", OpenAI::ChatModel::TaggedSymbol) + O1 = T.let(:o1, OpenAI::ChatModel::TaggedSymbol) + O1_2024_12_17 = T.let(:"o1-2024-12-17", OpenAI::ChatModel::TaggedSymbol) + O1_PREVIEW = T.let(:"o1-preview", OpenAI::ChatModel::TaggedSymbol) + O1_PREVIEW_2024_09_12 = + T.let(:"o1-preview-2024-09-12", OpenAI::ChatModel::TaggedSymbol) + O1_MINI = T.let(:"o1-mini", OpenAI::ChatModel::TaggedSymbol) + O1_MINI_2024_09_12 = + T.let(:"o1-mini-2024-09-12", OpenAI::ChatModel::TaggedSymbol) + GPT_4O = T.let(:"gpt-4o", OpenAI::ChatModel::TaggedSymbol) + GPT_4O_2024_11_20 = + T.let(:"gpt-4o-2024-11-20", OpenAI::ChatModel::TaggedSymbol) + GPT_4O_2024_08_06 = + T.let(:"gpt-4o-2024-08-06", OpenAI::ChatModel::TaggedSymbol) + GPT_4O_2024_05_13 = + T.let(:"gpt-4o-2024-05-13", OpenAI::ChatModel::TaggedSymbol) + GPT_4O_AUDIO_PREVIEW = + T.let(:"gpt-4o-audio-preview", OpenAI::ChatModel::TaggedSymbol) GPT_4O_AUDIO_PREVIEW_2024_10_01 = - T.let(:"gpt-4o-audio-preview-2024-10-01", OpenAI::Models::ChatModel::TaggedSymbol) + T.let( + :"gpt-4o-audio-preview-2024-10-01", + OpenAI::ChatModel::TaggedSymbol + ) GPT_4O_AUDIO_PREVIEW_2024_12_17 = - T.let(:"gpt-4o-audio-preview-2024-12-17", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::Models::ChatModel::TaggedSymbol) + T.let( + :"gpt-4o-audio-preview-2024-12-17", + OpenAI::ChatModel::TaggedSymbol + ) + GPT_4O_MINI_AUDIO_PREVIEW = + T.let(:"gpt-4o-mini-audio-preview", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = - T.let(:"gpt-4o-mini-audio-preview-2024-12-17", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_SEARCH_PREVIEW = T.let(:"gpt-4o-search-preview", OpenAI::Models::ChatModel::TaggedSymbol) + T.let( + :"gpt-4o-mini-audio-preview-2024-12-17", + OpenAI::ChatModel::TaggedSymbol + ) + GPT_4O_SEARCH_PREVIEW = + T.let(:"gpt-4o-search-preview", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI_SEARCH_PREVIEW = - T.let(:"gpt-4o-mini-search-preview", OpenAI::Models::ChatModel::TaggedSymbol) + T.let(:"gpt-4o-mini-search-preview", OpenAI::ChatModel::TaggedSymbol) GPT_4O_SEARCH_PREVIEW_2025_03_11 = - T.let(:"gpt-4o-search-preview-2025-03-11", OpenAI::Models::ChatModel::TaggedSymbol) + T.let( + :"gpt-4o-search-preview-2025-03-11", + OpenAI::ChatModel::TaggedSymbol + ) GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = - T.let(:"gpt-4o-mini-search-preview-2025-03-11", OpenAI::Models::ChatModel::TaggedSymbol) - CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_TURBO_2024_04_09 = T.let(:"gpt-4-turbo-2024-04-09", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_0125_PREVIEW = T.let(:"gpt-4-0125-preview", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_TURBO_PREVIEW = T.let(:"gpt-4-turbo-preview", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_1106_PREVIEW = T.let(:"gpt-4-1106-preview", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_VISION_PREVIEW = T.let(:"gpt-4-vision-preview", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4 = T.let(:"gpt-4", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO_16K = T.let(:"gpt-3.5-turbo-16k", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO_0301 = T.let(:"gpt-3.5-turbo-0301", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO_0613 = T.let(:"gpt-3.5-turbo-0613", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO_1106 = T.let(:"gpt-3.5-turbo-1106", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO_0125 = T.let(:"gpt-3.5-turbo-0125", OpenAI::Models::ChatModel::TaggedSymbol) - GPT_3_5_TURBO_16K_0613 = T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::Models::ChatModel::TaggedSymbol) + T.let( + :"gpt-4o-mini-search-preview-2025-03-11", + OpenAI::ChatModel::TaggedSymbol + ) + CHATGPT_4O_LATEST = + T.let(:"chatgpt-4o-latest", OpenAI::ChatModel::TaggedSymbol) + GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::ChatModel::TaggedSymbol) + GPT_4O_MINI_2024_07_18 = + T.let(:"gpt-4o-mini-2024-07-18", OpenAI::ChatModel::TaggedSymbol) + GPT_4_TURBO = T.let(:"gpt-4-turbo", OpenAI::ChatModel::TaggedSymbol) + GPT_4_TURBO_2024_04_09 = + T.let(:"gpt-4-turbo-2024-04-09", OpenAI::ChatModel::TaggedSymbol) + GPT_4_0125_PREVIEW = + T.let(:"gpt-4-0125-preview", OpenAI::ChatModel::TaggedSymbol) + GPT_4_TURBO_PREVIEW = + T.let(:"gpt-4-turbo-preview", OpenAI::ChatModel::TaggedSymbol) + GPT_4_1106_PREVIEW = + T.let(:"gpt-4-1106-preview", OpenAI::ChatModel::TaggedSymbol) + GPT_4_VISION_PREVIEW = + T.let(:"gpt-4-vision-preview", OpenAI::ChatModel::TaggedSymbol) + GPT_4 = T.let(:"gpt-4", OpenAI::ChatModel::TaggedSymbol) + GPT_4_0314 = T.let(:"gpt-4-0314", OpenAI::ChatModel::TaggedSymbol) + GPT_4_0613 = T.let(:"gpt-4-0613", OpenAI::ChatModel::TaggedSymbol) + GPT_4_32K = T.let(:"gpt-4-32k", OpenAI::ChatModel::TaggedSymbol) + GPT_4_32K_0314 = T.let(:"gpt-4-32k-0314", OpenAI::ChatModel::TaggedSymbol) + GPT_4_32K_0613 = T.let(:"gpt-4-32k-0613", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_16K = + T.let(:"gpt-3.5-turbo-16k", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_0301 = + T.let(:"gpt-3.5-turbo-0301", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_0613 = + T.let(:"gpt-3.5-turbo-0613", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_1106 = + T.let(:"gpt-3.5-turbo-1106", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_0125 = + T.let(:"gpt-3.5-turbo-0125", OpenAI::ChatModel::TaggedSymbol) + GPT_3_5_TURBO_16K_0613 = + T.let(:"gpt-3.5-turbo-16k-0613", OpenAI::ChatModel::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ChatModel::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::ChatModel::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/comparison_filter.rbi b/rbi/openai/models/comparison_filter.rbi index 43b5c434..3030e3c5 100644 --- a/rbi/openai/models/comparison_filter.rbi +++ b/rbi/openai/models/comparison_filter.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ComparisonFilter < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The key to compare against the value. sig { returns(String) } attr_accessor :key @@ -15,7 +17,7 @@ module OpenAI # - `gte`: greater than or equal # - `lt`: less than # - `lte`: less than or equal - sig { returns(OpenAI::Models::ComparisonFilter::Type::OrSymbol) } + sig { returns(OpenAI::ComparisonFilter::Type::OrSymbol) } attr_accessor :type # The value to compare against the attribute key; supports string, number, or @@ -28,10 +30,9 @@ module OpenAI sig do params( key: String, - type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, + type: OpenAI::ComparisonFilter::Type::OrSymbol, value: T.any(String, Float, T::Boolean) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The key to compare against the value. @@ -48,14 +49,20 @@ module OpenAI # The value to compare against the attribute key; supports string, number, or # boolean types. value: - ); end + ) + end + sig do - override - .returns( - {key: String, type: OpenAI::Models::ComparisonFilter::Type::OrSymbol, value: T.any(String, Float, T::Boolean)} - ) + override.returns( + { + key: String, + type: OpenAI::ComparisonFilter::Type::OrSymbol, + value: T.any(String, Float, T::Boolean) + } + ) + end + def to_hash end - def to_hash; end # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -68,18 +75,24 @@ module OpenAI module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ComparisonFilter::Type) } OrSymbol = T.type_alias { T.any(Symbol, String) } - EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) - NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) - GT = T.let(:gt, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) - GTE = T.let(:gte, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) - LT = T.let(:lt, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) - LTE = T.let(:lte, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) + EQ = T.let(:eq, OpenAI::ComparisonFilter::Type::TaggedSymbol) + NE = T.let(:ne, OpenAI::ComparisonFilter::Type::TaggedSymbol) + GT = T.let(:gt, OpenAI::ComparisonFilter::Type::TaggedSymbol) + GTE = T.let(:gte, OpenAI::ComparisonFilter::Type::TaggedSymbol) + LT = T.let(:lt, OpenAI::ComparisonFilter::Type::TaggedSymbol) + LTE = T.let(:lte, OpenAI::ComparisonFilter::Type::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ComparisonFilter::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ComparisonFilter::Type::TaggedSymbol] + ) + end + def self.values + end end # The value to compare against the attribute key; supports string, number, or @@ -87,8 +100,13 @@ module OpenAI module Value extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns(T::Array[OpenAI::ComparisonFilter::Value::Variants]) + end + def self.variants + end end end end diff --git a/rbi/openai/models/completion.rbi b/rbi/openai/models/completion.rbi index 4cc82449..9eab71d9 100644 --- a/rbi/openai/models/completion.rbi +++ b/rbi/openai/models/completion.rbi @@ -3,12 +3,14 @@ module OpenAI module Models class Completion < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A unique identifier for the completion. sig { returns(String) } attr_accessor :id # The list of completion choices the model generated for the input prompt. - sig { returns(T::Array[OpenAI::Models::CompletionChoice]) } + sig { returns(T::Array[OpenAI::CompletionChoice]) } attr_accessor :choices # The Unix timestamp (in seconds) of when the completion was created. @@ -34,10 +36,10 @@ module OpenAI attr_writer :system_fingerprint # Usage statistics for the completion request. - sig { returns(T.nilable(OpenAI::Models::CompletionUsage)) } + sig { returns(T.nilable(OpenAI::CompletionUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash)).void } + sig { params(usage: OpenAI::CompletionUsage::OrHash).void } attr_writer :usage # Represents a completion response from the API. Note: both the streamed and @@ -45,14 +47,13 @@ module OpenAI sig do params( id: String, - choices: T::Array[T.any(OpenAI::Models::CompletionChoice, OpenAI::Internal::AnyHash)], + choices: T::Array[OpenAI::CompletionChoice::OrHash], created: Integer, model: String, system_fingerprint: String, - usage: T.any(OpenAI::Models::CompletionUsage, OpenAI::Internal::AnyHash), + usage: OpenAI::CompletionUsage::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A unique identifier for the completion. @@ -72,22 +73,24 @@ module OpenAI usage: nil, # The object type, which is always "text_completion" object: :text_completion - ); end + ) + end + sig do - override - .returns( - { - id: String, - choices: T::Array[OpenAI::Models::CompletionChoice], - created: Integer, - model: String, - object: Symbol, - system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage - } - ) + override.returns( + { + id: String, + choices: T::Array[OpenAI::CompletionChoice], + created: Integer, + model: String, + object: Symbol, + system_fingerprint: String, + usage: OpenAI::CompletionUsage + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/completion_choice.rbi b/rbi/openai/models/completion_choice.rbi index 5eaba069..17b28983 100644 --- a/rbi/openai/models/completion_choice.rbi +++ b/rbi/openai/models/completion_choice.rbi @@ -3,22 +3,25 @@ module OpenAI module Models class CompletionChoice < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. - sig { returns(OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } + sig { returns(OpenAI::CompletionChoice::FinishReason::TaggedSymbol) } attr_accessor :finish_reason sig { returns(Integer) } attr_accessor :index - sig { returns(T.nilable(OpenAI::Models::CompletionChoice::Logprobs)) } + sig { returns(T.nilable(OpenAI::CompletionChoice::Logprobs)) } attr_reader :logprobs sig do - params(logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::AnyHash))) - .void + params( + logprobs: T.nilable(OpenAI::CompletionChoice::Logprobs::OrHash) + ).void end attr_writer :logprobs @@ -27,12 +30,11 @@ module OpenAI sig do params( - finish_reason: OpenAI::Models::CompletionChoice::FinishReason::OrSymbol, + finish_reason: OpenAI::CompletionChoice::FinishReason::OrSymbol, index: Integer, - logprobs: T.nilable(T.any(OpenAI::Models::CompletionChoice::Logprobs, OpenAI::Internal::AnyHash)), + logprobs: T.nilable(OpenAI::CompletionChoice::Logprobs::OrHash), text: String - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The reason the model stopped generating tokens. This will be `stop` if the model @@ -43,19 +45,21 @@ module OpenAI index:, logprobs:, text: - ); end + ) + end + sig do - override - .returns( - { - finish_reason: OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol, - index: Integer, - logprobs: T.nilable(OpenAI::Models::CompletionChoice::Logprobs), - text: String - } - ) + override.returns( + { + finish_reason: OpenAI::CompletionChoice::FinishReason::TaggedSymbol, + index: Integer, + logprobs: T.nilable(OpenAI::CompletionChoice::Logprobs), + text: String + } + ) + end + def to_hash end - def to_hash; end # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -64,18 +68,32 @@ module OpenAI module FinishReason extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::CompletionChoice::FinishReason) } OrSymbol = T.type_alias { T.any(Symbol, String) } - STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) - CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) + STOP = + T.let(:stop, OpenAI::CompletionChoice::FinishReason::TaggedSymbol) + LENGTH = + T.let(:length, OpenAI::CompletionChoice::FinishReason::TaggedSymbol) + CONTENT_FILTER = + T.let( + :content_filter, + OpenAI::CompletionChoice::FinishReason::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::CompletionChoice::FinishReason::TaggedSymbol] + ) + end + def self.values + end end class Logprobs < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T.nilable(T::Array[Integer])) } attr_reader :text_offset @@ -106,23 +124,28 @@ module OpenAI token_logprobs: T::Array[Float], tokens: T::Array[String], top_logprobs: T::Array[T::Hash[Symbol, Float]] - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new( + text_offset: nil, + token_logprobs: nil, + tokens: nil, + top_logprobs: nil + ) end - def self.new(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil); end sig do - override - .returns( - { - text_offset: T::Array[Integer], - token_logprobs: T::Array[Float], - tokens: T::Array[String], - top_logprobs: T::Array[T::Hash[Symbol, Float]] - } - ) + override.returns( + { + text_offset: T::Array[Integer], + token_logprobs: T::Array[Float], + tokens: T::Array[String], + top_logprobs: T::Array[T::Hash[Symbol, Float]] + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/completion_create_params.rbi b/rbi/openai/models/completion_create_params.rbi index 058662fd..efe5336e 100644 --- a/rbi/openai/models/completion_create_params.rbi +++ b/rbi/openai/models/completion_create_params.rbi @@ -6,12 +6,16 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol)) } + sig do + returns(T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol)) + end attr_accessor :model # The prompt(s) to generate completions for, encoded as a string, array of @@ -21,7 +25,16 @@ module OpenAI # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. sig do - returns(T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]))) + returns( + T.nilable( + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + ) + ) end attr_accessor :prompt @@ -116,14 +129,14 @@ module OpenAI attr_accessor :stop # Options for streaming response. Only set this when you set `stream: true`. - sig { returns(T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions)) } + sig { returns(T.nilable(OpenAI::Chat::ChatCompletionStreamOptions)) } attr_reader :stream_options sig do params( - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)) - ) - .void + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash) + ).void end attr_writer :stream_options @@ -160,8 +173,16 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])), + model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), + prompt: + T.nilable( + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + ), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -172,14 +193,14 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # ID of the model to use. You can use the @@ -288,13 +309,16 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable( + override.returns( + { + model: + T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), + prompt: + T.nilable( T.any( String, T::Array[String], @@ -302,26 +326,28 @@ module OpenAI T::Array[T::Array[Integer]] ) ), - best_of: T.nilable(Integer), - echo: T.nilable(T::Boolean), - frequency_penalty: T.nilable(Float), - logit_bias: T.nilable(T::Hash[Symbol, Integer]), - logprobs: T.nilable(Integer), - max_tokens: T.nilable(Integer), - n: T.nilable(Integer), - presence_penalty: T.nilable(Float), - seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(OpenAI::Models::Chat::ChatCompletionStreamOptions), - suffix: T.nilable(String), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - user: String, - request_options: OpenAI::RequestOptions - } - ) + best_of: T.nilable(Integer), + echo: T.nilable(T::Boolean), + frequency_penalty: T.nilable(Float), + logit_bias: T.nilable(T::Hash[Symbol, Integer]), + logprobs: T.nilable(Integer), + max_tokens: T.nilable(Integer), + n: T.nilable(Integer), + presence_penalty: T.nilable(Float), + seed: T.nilable(Integer), + stop: T.nilable(T.any(String, T::Array[String])), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), + suffix: T.nilable(String), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + user: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -331,16 +357,38 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias do + T.any(String, OpenAI::CompletionCreateParams::Model::TaggedSymbol) + end - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } + sig do + override.returns( + T::Array[OpenAI::CompletionCreateParams::Model::Variants] + ) + end + def self.variants + end + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::CompletionCreateParams::Model) } OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_3_5_TURBO_INSTRUCT = - T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) - DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) - BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) + T.let( + :"gpt-3.5-turbo-instruct", + OpenAI::CompletionCreateParams::Model::TaggedSymbol + ) + DAVINCI_002 = + T.let( + :"davinci-002", + OpenAI::CompletionCreateParams::Model::TaggedSymbol + ) + BABBAGE_002 = + T.let( + :"babbage-002", + OpenAI::CompletionCreateParams::Model::TaggedSymbol + ) end # The prompt(s) to generate completions for, encoded as a string, array of @@ -352,16 +400,41 @@ module OpenAI module Prompt extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + end + + sig do + override.returns( + T::Array[OpenAI::CompletionCreateParams::Prompt::Variants] + ) + end + def self.variants + end - StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) - IntegerArray = T.let(OpenAI::Internal::Type::ArrayOf[Integer], OpenAI::Internal::Type::Converter) + IntegerArray = + T.let( + OpenAI::Internal::Type::ArrayOf[Integer], + OpenAI::Internal::Type::Converter + ) ArrayOfToken2DArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]], + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::ArrayOf[Integer] + ], OpenAI::Internal::Type::Converter ) end @@ -373,10 +446,21 @@ module OpenAI module Stop extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[String]]) } - def self.variants; end + Variants = T.type_alias { T.nilable(T.any(String, T::Array[String])) } + + sig do + override.returns( + T::Array[OpenAI::CompletionCreateParams::Stop::Variants] + ) + end + def self.variants + end - StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) end end end diff --git a/rbi/openai/models/completion_usage.rbi b/rbi/openai/models/completion_usage.rbi index 48f31d88..4e3dbbf3 100644 --- a/rbi/openai/models/completion_usage.rbi +++ b/rbi/openai/models/completion_usage.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class CompletionUsage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of tokens in the generated completion. sig { returns(Integer) } attr_accessor :completion_tokens @@ -16,26 +18,28 @@ module OpenAI attr_accessor :total_tokens # Breakdown of tokens used in a completion. - sig { returns(T.nilable(OpenAI::Models::CompletionUsage::CompletionTokensDetails)) } + sig do + returns(T.nilable(OpenAI::CompletionUsage::CompletionTokensDetails)) + end attr_reader :completion_tokens_details sig do params( - completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::AnyHash) - ) - .void + completion_tokens_details: + OpenAI::CompletionUsage::CompletionTokensDetails::OrHash + ).void end attr_writer :completion_tokens_details # Breakdown of tokens used in the prompt. - sig { returns(T.nilable(OpenAI::Models::CompletionUsage::PromptTokensDetails)) } + sig { returns(T.nilable(OpenAI::CompletionUsage::PromptTokensDetails)) } attr_reader :prompt_tokens_details sig do params( - prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::AnyHash) - ) - .void + prompt_tokens_details: + OpenAI::CompletionUsage::PromptTokensDetails::OrHash + ).void end attr_writer :prompt_tokens_details @@ -45,10 +49,11 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - completion_tokens_details: T.any(OpenAI::Models::CompletionUsage::CompletionTokensDetails, OpenAI::Internal::AnyHash), - prompt_tokens_details: T.any(OpenAI::Models::CompletionUsage::PromptTokensDetails, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + completion_tokens_details: + OpenAI::CompletionUsage::CompletionTokensDetails::OrHash, + prompt_tokens_details: + OpenAI::CompletionUsage::PromptTokensDetails::OrHash + ).returns(T.attached_class) end def self.new( # Number of tokens in the generated completion. @@ -61,22 +66,27 @@ module OpenAI completion_tokens_details: nil, # Breakdown of tokens used in the prompt. prompt_tokens_details: nil - ); end + ) + end + sig do - override - .returns( - { - completion_tokens: Integer, - prompt_tokens: Integer, - total_tokens: Integer, - completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, - prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails - } - ) + override.returns( + { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer, + completion_tokens_details: + OpenAI::CompletionUsage::CompletionTokensDetails, + prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails + } + ) + end + def to_hash end - def to_hash; end class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. sig { returns(T.nilable(Integer)) } @@ -116,8 +126,7 @@ module OpenAI audio_tokens: Integer, reasoning_tokens: Integer, rejected_prediction_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # When using Predicted Outputs, the number of tokens in the prediction that @@ -132,22 +141,26 @@ module OpenAI # still counted in the total completion tokens for purposes of billing, output, # and context window limits. rejected_prediction_tokens: nil - ); end + ) + end + sig do - override - .returns( - { - accepted_prediction_tokens: Integer, - audio_tokens: Integer, - reasoning_tokens: Integer, - rejected_prediction_tokens: Integer - } - ) + override.returns( + { + accepted_prediction_tokens: Integer, + audio_tokens: Integer, + reasoning_tokens: Integer, + rejected_prediction_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end class PromptTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Audio input tokens present in the prompt. sig { returns(T.nilable(Integer)) } attr_reader :audio_tokens @@ -163,15 +176,24 @@ module OpenAI attr_writer :cached_tokens # Breakdown of tokens used in the prompt. - sig { params(audio_tokens: Integer, cached_tokens: Integer).returns(T.attached_class) } + sig do + params(audio_tokens: Integer, cached_tokens: Integer).returns( + T.attached_class + ) + end def self.new( # Audio input tokens present in the prompt. audio_tokens: nil, # Cached tokens present in the prompt. cached_tokens: nil - ); end - sig { override.returns({audio_tokens: Integer, cached_tokens: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns({ audio_tokens: Integer, cached_tokens: Integer }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/compound_filter.rbi b/rbi/openai/models/compound_filter.rbi index 19966d7a..b8972c6d 100644 --- a/rbi/openai/models/compound_filter.rbi +++ b/rbi/openai/models/compound_filter.rbi @@ -3,22 +3,24 @@ module OpenAI module Models class CompoundFilter < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. - sig { returns(T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)]) } + sig { returns(T::Array[T.any(OpenAI::ComparisonFilter, T.anything)]) } attr_accessor :filters # Type of operation: `and` or `or`. - sig { returns(OpenAI::Models::CompoundFilter::Type::OrSymbol) } + sig { returns(OpenAI::CompoundFilter::Type::OrSymbol) } attr_accessor :type # Combine multiple filters using `and` or `or`. sig do params( - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, T.anything)], - type: OpenAI::Models::CompoundFilter::Type::OrSymbol - ) - .returns(T.attached_class) + filters: + T::Array[T.any(OpenAI::ComparisonFilter::OrHash, T.anything)], + type: OpenAI::CompoundFilter::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Array of filters to combine. Items can be `ComparisonFilter` or @@ -26,39 +28,50 @@ module OpenAI filters:, # Type of operation: `and` or `or`. type: - ); end + ) + end + sig do - override - .returns( - { - filters: T::Array[T.any(OpenAI::Models::ComparisonFilter, T.anything)], - type: OpenAI::Models::CompoundFilter::Type::OrSymbol - } - ) + override.returns( + { + filters: T::Array[T.any(OpenAI::ComparisonFilter, T.anything)], + type: OpenAI::CompoundFilter::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. module Filter extend OpenAI::Internal::Type::Union - sig { override.returns([OpenAI::Models::ComparisonFilter, T.anything]) } - def self.variants; end + Variants = T.type_alias { T.any(OpenAI::ComparisonFilter, T.anything) } + + sig do + override.returns(T::Array[OpenAI::CompoundFilter::Filter::Variants]) + end + def self.variants + end end # Type of operation: `and` or `or`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::CompoundFilter::Type) } OrSymbol = T.type_alias { T.any(Symbol, String) } - AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) - OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) + AND = T.let(:and, OpenAI::CompoundFilter::Type::TaggedSymbol) + OR = T.let(:or, OpenAI::CompoundFilter::Type::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::CompoundFilter::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns(T::Array[OpenAI::CompoundFilter::Type::TaggedSymbol]) + end + def self.values + end end end end diff --git a/rbi/openai/models/create_embedding_response.rbi b/rbi/openai/models/create_embedding_response.rbi index 4ebe2aee..cf00c902 100644 --- a/rbi/openai/models/create_embedding_response.rbi +++ b/rbi/openai/models/create_embedding_response.rbi @@ -3,8 +3,10 @@ module OpenAI module Models class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The list of embeddings generated by the model. - sig { returns(T::Array[OpenAI::Models::Embedding]) } + sig { returns(T::Array[OpenAI::Embedding]) } attr_accessor :data # The name of the model used to generate the embedding. @@ -16,20 +18,19 @@ module OpenAI attr_accessor :object # The usage information for the request. - sig { returns(OpenAI::Models::CreateEmbeddingResponse::Usage) } + sig { returns(OpenAI::CreateEmbeddingResponse::Usage) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::AnyHash)).void } + sig { params(usage: OpenAI::CreateEmbeddingResponse::Usage::OrHash).void } attr_writer :usage sig do params( - data: T::Array[T.any(OpenAI::Models::Embedding, OpenAI::Internal::AnyHash)], + data: T::Array[OpenAI::Embedding::OrHash], model: String, - usage: T.any(OpenAI::Models::CreateEmbeddingResponse::Usage, OpenAI::Internal::AnyHash), + usage: OpenAI::CreateEmbeddingResponse::Usage::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The list of embeddings generated by the model. @@ -40,21 +41,25 @@ module OpenAI usage:, # The object type, which is always "list". object: :list - ); end + ) + end + sig do - override - .returns( - { - data: T::Array[OpenAI::Models::Embedding], - model: String, - object: Symbol, - usage: OpenAI::Models::CreateEmbeddingResponse::Usage - } - ) + override.returns( + { + data: T::Array[OpenAI::Embedding], + model: String, + object: Symbol, + usage: OpenAI::CreateEmbeddingResponse::Usage + } + ) + end + def to_hash end - def to_hash; end class Usage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens used by the prompt. sig { returns(Integer) } attr_accessor :prompt_tokens @@ -64,15 +69,24 @@ module OpenAI attr_accessor :total_tokens # The usage information for the request. - sig { params(prompt_tokens: Integer, total_tokens: Integer).returns(T.attached_class) } + sig do + params(prompt_tokens: Integer, total_tokens: Integer).returns( + T.attached_class + ) + end def self.new( # The number of tokens used by the prompt. prompt_tokens:, # The total number of tokens used by the request. total_tokens: - ); end - sig { override.returns({prompt_tokens: Integer, total_tokens: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns({ prompt_tokens: Integer, total_tokens: Integer }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/embedding.rbi b/rbi/openai/models/embedding.rbi index 9c78d508..aa3abb7e 100644 --- a/rbi/openai/models/embedding.rbi +++ b/rbi/openai/models/embedding.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class Embedding < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The embedding vector, which is a list of floats. The length of vector depends on # the model as listed in the # [embedding guide](https://platform.openai.com/docs/guides/embeddings). @@ -18,7 +20,13 @@ module OpenAI attr_accessor :object # Represents an embedding vector returned by embedding endpoint. - sig { params(embedding: T::Array[Float], index: Integer, object: Symbol).returns(T.attached_class) } + sig do + params( + embedding: T::Array[Float], + index: Integer, + object: Symbol + ).returns(T.attached_class) + end def self.new( # The embedding vector, which is a list of floats. The length of vector depends on # the model as listed in the @@ -28,9 +36,16 @@ module OpenAI index:, # The object type, which is always "embedding". object: :embedding - ); end - sig { override.returns({embedding: T::Array[Float], index: Integer, object: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { embedding: T::Array[Float], index: Integer, object: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi index 22c4e6b8..a4452287 100644 --- a/rbi/openai/models/embedding_create_params.rbi +++ b/rbi/openai/models/embedding_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for @@ -14,7 +16,16 @@ module OpenAI # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) # for counting tokens. Some models may also impose a limit on total number of # tokens summed across inputs. - sig { returns(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])) } + sig do + returns( + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + ) + end attr_accessor :input # ID of the model to use. You can use the @@ -22,7 +33,7 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol)) } + sig { returns(T.any(String, OpenAI::EmbeddingModel::OrSymbol)) } attr_accessor :model # The number of dimensions the resulting output embeddings should have. Only @@ -35,10 +46,19 @@ module OpenAI # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). - sig { returns(T.nilable(OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol) + ) + end attr_reader :encoding_format - sig { params(encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol).void } + sig do + params( + encoding_format: + OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol + ).void + end attr_writer :encoding_format # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -52,14 +72,20 @@ module OpenAI sig do params( - input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), + input: + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ), + model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, + encoding_format: + OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Input text to embed, encoded as a string or array of tokens. To embed multiple @@ -88,21 +114,30 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), - dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, - user: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + input: + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ), + model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), + dimensions: Integer, + encoding_format: + OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, + user: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. @@ -115,16 +150,41 @@ module OpenAI module Input extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + end + + sig do + override.returns( + T::Array[OpenAI::EmbeddingCreateParams::Input::Variants] + ) + end + def self.variants + end - StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) - IntegerArray = T.let(OpenAI::Internal::Type::ArrayOf[Integer], OpenAI::Internal::Type::Converter) + IntegerArray = + T.let( + OpenAI::Internal::Type::ArrayOf[Integer], + OpenAI::Internal::Type::Converter + ) ArrayOfToken2DArray = T.let( - OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::ArrayOf[Integer]], + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::ArrayOf[Integer] + ], OpenAI::Internal::Type::Converter ) end @@ -137,8 +197,16 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::EmbeddingModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::EmbeddingModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::EmbeddingCreateParams::Model::Variants] + ) + end + def self.variants + end end # The format to return the embeddings in. Can be either `float` or @@ -146,14 +214,32 @@ module OpenAI module EncodingFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) - BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) + FLOAT = + T.let( + :float, + OpenAI::EmbeddingCreateParams::EncodingFormat::TaggedSymbol + ) + BASE64 = + T.let( + :base64, + OpenAI::EmbeddingCreateParams::EncodingFormat::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::EmbeddingCreateParams::EncodingFormat::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/embedding_model.rbi b/rbi/openai/models/embedding_model.rbi index bbaaae0a..42bc7b6f 100644 --- a/rbi/openai/models/embedding_model.rbi +++ b/rbi/openai/models/embedding_model.rbi @@ -5,15 +5,19 @@ module OpenAI module EmbeddingModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EmbeddingModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::TaggedSymbol) - TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::TaggedSymbol) - TEXT_EMBEDDING_3_LARGE = T.let(:"text-embedding-3-large", OpenAI::Models::EmbeddingModel::TaggedSymbol) + TEXT_EMBEDDING_ADA_002 = + T.let(:"text-embedding-ada-002", OpenAI::EmbeddingModel::TaggedSymbol) + TEXT_EMBEDDING_3_SMALL = + T.let(:"text-embedding-3-small", OpenAI::EmbeddingModel::TaggedSymbol) + TEXT_EMBEDDING_3_LARGE = + T.let(:"text-embedding-3-large", OpenAI::EmbeddingModel::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::EmbeddingModel::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::EmbeddingModel::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/error_object.rbi b/rbi/openai/models/error_object.rbi index 4dc767bd..5f923ecc 100644 --- a/rbi/openai/models/error_object.rbi +++ b/rbi/openai/models/error_object.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ErrorObject < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T.nilable(String)) } attr_accessor :code @@ -16,15 +18,28 @@ module OpenAI attr_accessor :type sig do - params(code: T.nilable(String), message: String, param: T.nilable(String), type: String) - .returns(T.attached_class) + params( + code: T.nilable(String), + message: String, + param: T.nilable(String), + type: String + ).returns(T.attached_class) + end + def self.new(code:, message:, param:, type:) end - def self.new(code:, message:, param:, type:); end sig do - override.returns({code: T.nilable(String), message: String, param: T.nilable(String), type: String}) + override.returns( + { + code: T.nilable(String), + message: String, + param: T.nilable(String), + type: String + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index af1d1784..77dd005f 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -6,12 +6,14 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The configuration for the data source used for the evaluation runs. sig do returns( T.any( - OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs + OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs ) ) end @@ -22,11 +24,11 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateParams::TestingCriterion::Python, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel + OpenAI::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::EvalCreateParams::TestingCriterion::Python, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel ) ] ) @@ -51,26 +53,25 @@ module OpenAI sig do params( - data_source_config: T.any( - OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs - ), - testing_criteria: T::Array[ + data_source_config: T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateParams::TestingCriterion::Python, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel - ) - ], + OpenAI::EvalCreateParams::DataSourceConfig::Custom::OrHash, + OpenAI::EvalCreateParams::DataSourceConfig::Logs::OrHash + ), + testing_criteria: + T::Array[ + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::OrHash, + OpenAI::EvalStringCheckGrader::OrHash, + OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::Python::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::OrHash + ) + ], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The configuration for the data source used for the evaluation runs. @@ -87,37 +88,52 @@ module OpenAI # The name of the evaluation. name: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - data_source_config: T.any( - OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs + override.returns( + { + data_source_config: + T.any( + OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs ), - testing_criteria: T::Array[ + testing_criteria: + T::Array[ T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateParams::TestingCriterion::Python, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel + OpenAI::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::EvalCreateParams::TestingCriterion::Python, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel ) ], - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - request_options: OpenAI::RequestOptions - } - ) + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The configuration for the data source used for the evaluation runs. module DataSourceConfig extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs + ) + end + class Custom < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The json schema for each row in the data source. sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item_schema @@ -141,8 +157,11 @@ module OpenAI # - Used to define your testing criteria and # - What data is required when creating a run sig do - params(item_schema: T::Hash[Symbol, T.anything], include_sample_schema: T::Boolean, type: Symbol) - .returns(T.attached_class) + params( + item_schema: T::Hash[Symbol, T.anything], + include_sample_schema: T::Boolean, + type: Symbol + ).returns(T.attached_class) end def self.new( # The json schema for each row in the data source. @@ -152,19 +171,26 @@ module OpenAI include_sample_schema: nil, # The type of data source. Always `custom`. type: :custom - ); end + ) + end + sig do - override - .returns({ - item_schema: T::Hash[Symbol, T.anything], - type: Symbol, - include_sample_schema: T::Boolean - }) + override.returns( + { + item_schema: T::Hash[Symbol, T.anything], + type: Symbol, + include_sample_schema: T::Boolean + } + ) + end + def to_hash end - def to_hash; end end class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of data source. Always `logs`. sig { returns(Symbol) } attr_accessor :type @@ -179,24 +205,35 @@ module OpenAI # A data source config which specifies the metadata property of your stored # completions query. This is usually metadata like `usecase=chatbot` or # `prompt-version=v2`, etc. - sig { params(metadata: T::Hash[Symbol, T.anything], type: Symbol).returns(T.attached_class) } + sig do + params(metadata: T::Hash[Symbol, T.anything], type: Symbol).returns( + T.attached_class + ) + end def self.new( # Metadata filters for the logs data source. metadata: nil, # The type of data source. Always `logs`. type: :logs - ); end - sig { override.returns({type: Symbol, metadata: T::Hash[Symbol, T.anything]}) } - def to_hash; end + ) + end + + sig do + override.returns( + { type: Symbol, metadata: T::Hash[Symbol, T.anything] } + ) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] - ) + override.returns( + T::Array[OpenAI::EvalCreateParams::DataSourceConfig::Variants] + ) + end + def self.variants end - def self.variants; end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -204,15 +241,29 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::EvalCreateParams::TestingCriterion::Python, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel + ) + end + class LabelModel < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do returns( T::Array[ T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ] ) @@ -243,20 +294,19 @@ module OpenAI # the evaluation. sig do params( - input: T::Array[ - T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem - ) - ], + input: + T::Array[ + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::OrHash + ) + ], labels: T::Array[String], model: String, name: String, passing_labels: T::Array[String], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -272,33 +322,47 @@ module OpenAI passing_labels:, # The object type, which is always `label_model`. type: :label_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[ + override.returns( + { + input: + T::Array[ T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ], - labels: T::Array[String], - model: String, - name: String, - passing_labels: T::Array[String], - type: Symbol - } - ) + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A chat message that makes up the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + ) + end + class SimpleInputMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the message. sig { returns(String) } attr_accessor :content @@ -307,25 +371,33 @@ module OpenAI sig { returns(String) } attr_accessor :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns(T.attached_class) + end def self.new( # The content of the message. content:, # The role of the message (e.g. "system", "assistant", "user"). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText ) ) end @@ -333,22 +405,28 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol) } + sig do + returns( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol + ) + end attr_accessor :role # The type of the message input. Always `message`. sig do returns( - T.nilable(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol) + T.nilable( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ) ) end attr_reader :type sig do params( - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - ) - .void + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ).void end attr_writer :type @@ -359,16 +437,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - ), - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -378,28 +457,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText ), - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - } - ) + role: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -409,24 +507,31 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -435,37 +540,44 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( :user, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) ASSISTANT = T.let( :assistant, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) SYSTEM = T.let( :system, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) DEVELOPER = T.let( :developer, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -473,36 +585,48 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( :message, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class Python < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the grader. sig { returns(String) } attr_accessor :name @@ -531,8 +655,13 @@ module OpenAI # A PythonGrader object that runs a python script on the input. sig do - params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) - .returns(T.attached_class) + params( + name: String, + source: String, + image_tag: String, + pass_threshold: Float, + type: Symbol + ).returns(T.attached_class) end def self.new( # The name of the grader. @@ -545,7 +674,9 @@ module OpenAI pass_threshold: nil, # The object type, which is always `python`. type: :python - ); end + ) + end + sig do override.returns( { @@ -557,12 +688,22 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class ScoreModel < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input]) } + sig do + returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input + ] + ) + end attr_accessor :input # The model to use for the evaluation. @@ -601,15 +742,17 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[T.any(OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + input: + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::OrHash + ], model: String, name: String, pass_threshold: Float, range: T::Array[Float], sampling_params: T.anything, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The input text. This may include template strings. @@ -626,31 +769,39 @@ module OpenAI sampling_params: nil, # The object type, which is always `score_model`. type: :score_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) + override.returns( + { + input: + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input + ], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText ) ) end @@ -658,14 +809,29 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol) } + sig do + returns( + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol + ) + end attr_accessor :role # The type of the message input. Always `message`. - sig { returns(T.nilable(OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + sig do + params( + type: + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -675,16 +841,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash + ), + role: + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -694,28 +861,45 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText ), - role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol - } - ) + role: + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -725,24 +909,31 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -751,31 +942,44 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = - T.let(:user, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + T.let( + :user, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) ASSISTANT = T.let( :assistant, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol ) SYSTEM = - T.let(:system, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + T.let( + :system, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) DEVELOPER = T.let( :developer, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -783,30 +987,40 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = - T.let(:message, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + T.let( + :message, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end sig do - override - .returns( - [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel] - ) + override.returns( + T::Array[OpenAI::EvalCreateParams::TestingCriterion::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index d0304e79..97b8fa5b 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation. sig { returns(String) } attr_accessor :id @@ -14,7 +16,10 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do returns( - T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) ) end attr_accessor :data_source_config @@ -41,9 +46,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel ) @@ -62,26 +67,25 @@ module OpenAI params( id: String, created_at: Integer, - data_source_config: T.any( - OpenAI::Models::EvalCustomDataSourceConfig, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStoredCompletionsDataSourceConfig - ), + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - testing_criteria: T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel - ) - ], + testing_criteria: + T::Array[ + T.any( + OpenAI::EvalLabelModelGrader::OrHash, + OpenAI::EvalStringCheckGrader::OrHash, + OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::Python::OrHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::OrHash + ) + ], object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation. @@ -103,42 +107,59 @@ module OpenAI testing_criteria:, # The object type. object: :eval - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - object: Symbol, - testing_criteria: T::Array[ + override.returns( + { + id: String, + created_at: Integer, + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + testing_criteria: + T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end # Configuration of data sources used in runs of the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + Variants = + T.type_alias do + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Variants + ] + ) + end + def self.variants end - def self.variants; end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -146,7 +167,21 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel + ) + end + class Python < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the grader. sig { returns(String) } attr_accessor :name @@ -175,8 +210,13 @@ module OpenAI # A PythonGrader object that runs a python script on the input. sig do - params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) - .returns(T.attached_class) + params( + name: String, + source: String, + image_tag: String, + pass_threshold: Float, + type: Symbol + ).returns(T.attached_class) end def self.new( # The name of the grader. @@ -189,7 +229,9 @@ module OpenAI pass_threshold: nil, # The object type, which is always `python`. type: :python - ); end + ) + end + sig do override.returns( { @@ -201,12 +243,22 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class ScoreModel < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input]) } + sig do + returns( + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input + ] + ) + end attr_accessor :input # The model to use for the evaluation. @@ -245,15 +297,17 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[T.any(OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + input: + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::OrHash + ], model: String, name: String, pass_threshold: Float, range: T::Array[Float], sampling_params: T.anything, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The input text. This may include template strings. @@ -270,30 +324,38 @@ module OpenAI sampling_params: nil, # The object type, which is always `score_model`. type: :score_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) + override.returns( + { + input: + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input + ], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ) ) @@ -302,18 +364,29 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + sig do + returns( + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + end attr_accessor :role # The type of the message input. Always `message`. sig do returns( - T.nilable(OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + T.nilable( + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) ) end attr_reader :type - sig { params(type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + sig do + params( + type: + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -323,16 +396,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash + ), + role: + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -342,28 +416,45 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ), - role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -373,24 +464,31 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -399,11 +497,19 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = - T.let(:user, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + T.let( + :user, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) ASSISTANT = T.let( :assistant, @@ -421,12 +527,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -434,7 +542,12 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = @@ -444,23 +557,27 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end sig do - override - .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalCreateResponse::TestingCriterion::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/eval_custom_data_source_config.rbi b/rbi/openai/models/eval_custom_data_source_config.rbi index 2c76acf3..6725b324 100644 --- a/rbi/openai/models/eval_custom_data_source_config.rbi +++ b/rbi/openai/models/eval_custom_data_source_config.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The json schema for the run data source items. Learn how to build JSON schemas # [here](https://json-schema.org/). sig { returns(T::Hash[Symbol, T.anything]) } @@ -18,16 +20,25 @@ module OpenAI # # - Used to define your testing criteria and # - What data is required when creating a run - sig { params(schema: T::Hash[Symbol, T.anything], type: Symbol).returns(T.attached_class) } + sig do + params(schema: T::Hash[Symbol, T.anything], type: Symbol).returns( + T.attached_class + ) + end def self.new( # The json schema for the run data source items. Learn how to build JSON schemas # [here](https://json-schema.org/). schema:, # The type of data source. Always `custom`. type: :custom - ); end - sig { override.returns({schema: T::Hash[Symbol, T.anything], type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ schema: T::Hash[Symbol, T.anything], type: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/eval_delete_params.rbi b/rbi/openai/models/eval_delete_params.rbi index 14cbeaac..e5ead349 100644 --- a/rbi/openai/models/eval_delete_params.rbi +++ b/rbi/openai/models/eval_delete_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/eval_delete_response.rbi b/rbi/openai/models/eval_delete_response.rbi index 72e1edf4..5ed757c3 100644 --- a/rbi/openai/models/eval_delete_response.rbi +++ b/rbi/openai/models/eval_delete_response.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T::Boolean) } attr_accessor :deleted @@ -12,11 +14,21 @@ module OpenAI sig { returns(String) } attr_accessor :object - sig { params(deleted: T::Boolean, eval_id: String, object: String).returns(T.attached_class) } - def self.new(deleted:, eval_id:, object:); end + sig do + params(deleted: T::Boolean, eval_id: String, object: String).returns( + T.attached_class + ) + end + def self.new(deleted:, eval_id:, object:) + end - sig { override.returns({deleted: T::Boolean, eval_id: String, object: String}) } - def to_hash; end + sig do + override.returns( + { deleted: T::Boolean, eval_id: String, object: String } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/eval_label_model_grader.rbi b/rbi/openai/models/eval_label_model_grader.rbi index 1a12db15..df283285 100644 --- a/rbi/openai/models/eval_label_model_grader.rbi +++ b/rbi/openai/models/eval_label_model_grader.rbi @@ -3,7 +3,9 @@ module OpenAI module Models class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - sig { returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input]) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(T::Array[OpenAI::EvalLabelModelGrader::Input]) } attr_accessor :input # The labels to assign to each item in the evaluation. @@ -30,14 +32,13 @@ module OpenAI # the evaluation. sig do params( - input: T::Array[T.any(OpenAI::Models::EvalLabelModelGrader::Input, OpenAI::Internal::AnyHash)], + input: T::Array[OpenAI::EvalLabelModelGrader::Input::OrHash], labels: T::Array[String], model: String, name: String, passing_labels: T::Array[String], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( input:, @@ -51,30 +52,34 @@ module OpenAI passing_labels:, # The object type, which is always `label_model`. type: :label_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[OpenAI::Models::EvalLabelModelGrader::Input], - labels: T::Array[String], - model: String, - name: String, - passing_labels: T::Array[String], - type: Symbol - } - ) + override.returns( + { + input: T::Array[OpenAI::EvalLabelModelGrader::Input], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::EvalLabelModelGrader::Input::Content::OutputText ) ) end @@ -82,14 +87,20 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) } + sig { returns(OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol) } attr_accessor :role # The type of the message input. Always `message`. - sig { returns(T.nilable(OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol)) } + sig do + returns( + T.nilable(OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::EvalLabelModelGrader::Input::Type::OrSymbol).void } + sig do + params(type: OpenAI::EvalLabelModelGrader::Input::Type::OrSymbol).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -99,16 +110,15 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText - ), - role: OpenAI::Models::EvalLabelModelGrader::Input::Role::OrSymbol, - type: OpenAI::Models::EvalLabelModelGrader::Input::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::EvalLabelModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::EvalLabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::EvalLabelModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -118,28 +128,43 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::EvalLabelModelGrader::Input::Content::OutputText ), - role: OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol, - type: OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol - } - ) + role: OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol, + type: OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalLabelModelGrader::Input::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -155,18 +180,21 @@ module OpenAI text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] - ) + override.returns( + T::Array[OpenAI::EvalLabelModelGrader::Input::Content::Variants] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -174,29 +202,65 @@ module OpenAI module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::EvalLabelModelGrader::Input::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol] + ) + end + def self.values + end end # The type of the message input. Always `message`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::EvalLabelModelGrader::Input::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol) + MESSAGE = + T.let( + :message, + OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::EvalLabelModelGrader::Input::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/eval_list_params.rbi b/rbi/openai/models/eval_list_params.rbi index 0ef48b7c..5b1a38f4 100644 --- a/rbi/openai/models/eval_list_params.rbi +++ b/rbi/openai/models/eval_list_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last eval from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -22,29 +24,28 @@ module OpenAI # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. - sig { returns(T.nilable(OpenAI::Models::EvalListParams::Order::OrSymbol)) } + sig { returns(T.nilable(OpenAI::EvalListParams::Order::OrSymbol)) } attr_reader :order - sig { params(order: OpenAI::Models::EvalListParams::Order::OrSymbol).void } + sig { params(order: OpenAI::EvalListParams::Order::OrSymbol).void } attr_writer :order # Evals can be ordered by creation time or last updated time. Use `created_at` for # creation time or `updated_at` for last updated time. - sig { returns(T.nilable(OpenAI::Models::EvalListParams::OrderBy::OrSymbol)) } + sig { returns(T.nilable(OpenAI::EvalListParams::OrderBy::OrSymbol)) } attr_reader :order_by - sig { params(order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol).void } + sig { params(order_by: OpenAI::EvalListParams::OrderBy::OrSymbol).void } attr_writer :order_by sig do params( after: String, limit: Integer, - order: OpenAI::Models::EvalListParams::Order::OrSymbol, - order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::EvalListParams::Order::OrSymbol, + order_by: OpenAI::EvalListParams::OrderBy::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last eval from the previous pagination request. @@ -58,34 +59,42 @@ module OpenAI # creation time or `updated_at` for last updated time. order_by: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - order: OpenAI::Models::EvalListParams::Order::OrSymbol, - order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::EvalListParams::Order::OrSymbol, + order_by: OpenAI::EvalListParams::OrderBy::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::Order) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::EvalListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) + ASC = T.let(:asc, OpenAI::EvalListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::EvalListParams::Order::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::EvalListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::EvalListParams::Order::TaggedSymbol] + ) + end + def self.values + end end # Evals can be ordered by creation time or last updated time. Use `created_at` for @@ -93,14 +102,22 @@ module OpenAI module OrderBy extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::OrderBy) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::EvalListParams::OrderBy) } OrSymbol = T.type_alias { T.any(Symbol, String) } - CREATED_AT = T.let(:created_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) - UPDATED_AT = T.let(:updated_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) + CREATED_AT = + T.let(:created_at, OpenAI::EvalListParams::OrderBy::TaggedSymbol) + UPDATED_AT = + T.let(:updated_at, OpenAI::EvalListParams::OrderBy::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::EvalListParams::OrderBy::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index e1614678..b59f14c9 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalListResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation. sig { returns(String) } attr_accessor :id @@ -14,7 +16,10 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do returns( - T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) ) end attr_accessor :data_source_config @@ -41,9 +46,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel ) @@ -62,26 +67,25 @@ module OpenAI params( id: String, created_at: Integer, - data_source_config: T.any( - OpenAI::Models::EvalCustomDataSourceConfig, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStoredCompletionsDataSourceConfig - ), + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - testing_criteria: T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::Python, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel - ) - ], + testing_criteria: + T::Array[ + T.any( + OpenAI::EvalLabelModelGrader::OrHash, + OpenAI::EvalStringCheckGrader::OrHash, + OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::Models::EvalListResponse::TestingCriterion::Python::OrHash, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::OrHash + ) + ], object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation. @@ -103,42 +107,59 @@ module OpenAI testing_criteria:, # The object type. object: :eval - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - object: Symbol, - testing_criteria: T::Array[ + override.returns( + { + id: String, + created_at: Integer, + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + testing_criteria: + T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end # Configuration of data sources used in runs of the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + Variants = + T.type_alias do + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::EvalListResponse::DataSourceConfig::Variants + ] + ) + end + def self.variants end - def self.variants; end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -146,7 +167,21 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::Python, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel + ) + end + class Python < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the grader. sig { returns(String) } attr_accessor :name @@ -175,8 +210,13 @@ module OpenAI # A PythonGrader object that runs a python script on the input. sig do - params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) - .returns(T.attached_class) + params( + name: String, + source: String, + image_tag: String, + pass_threshold: Float, + type: Symbol + ).returns(T.attached_class) end def self.new( # The name of the grader. @@ -189,7 +229,9 @@ module OpenAI pass_threshold: nil, # The object type, which is always `python`. type: :python - ); end + ) + end + sig do override.returns( { @@ -201,12 +243,22 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class ScoreModel < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input]) } + sig do + returns( + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input + ] + ) + end attr_accessor :input # The model to use for the evaluation. @@ -245,15 +297,17 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[T.any(OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + input: + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::OrHash + ], model: String, name: String, pass_threshold: Float, range: T::Array[Float], sampling_params: T.anything, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The input text. This may include template strings. @@ -270,30 +324,38 @@ module OpenAI sampling_params: nil, # The object type, which is always `score_model`. type: :score_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) + override.returns( + { + input: + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input + ], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ) ) @@ -302,18 +364,29 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + sig do + returns( + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + end attr_accessor :role # The type of the message input. Always `message`. sig do returns( - T.nilable(OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + T.nilable( + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) ) end attr_reader :type - sig { params(type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + sig do + params( + type: + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -323,16 +396,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash + ), + role: + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -342,28 +416,45 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ), - role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -373,24 +464,31 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -399,18 +497,29 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = - T.let(:user, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + T.let( + :user, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) ASSISTANT = T.let( :assistant, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol ) SYSTEM = - T.let(:system, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + T.let( + :system, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) DEVELOPER = T.let( :developer, @@ -418,12 +527,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -431,30 +542,42 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = - T.let(:message, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + T.let( + :message, + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end sig do - override - .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalListResponse::TestingCriterion::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/eval_retrieve_params.rbi b/rbi/openai/models/eval_retrieve_params.rbi index 33cfd174..c75e1ad6 100644 --- a/rbi/openai/models/eval_retrieve_params.rbi +++ b/rbi/openai/models/eval_retrieve_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index 8e47cfe9..b6216729 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation. sig { returns(String) } attr_accessor :id @@ -14,7 +16,10 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do returns( - T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) ) end attr_accessor :data_source_config @@ -41,9 +46,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel ) @@ -62,26 +67,25 @@ module OpenAI params( id: String, created_at: Integer, - data_source_config: T.any( - OpenAI::Models::EvalCustomDataSourceConfig, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStoredCompletionsDataSourceConfig - ), + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - testing_criteria: T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel - ) - ], + testing_criteria: + T::Array[ + T.any( + OpenAI::EvalLabelModelGrader::OrHash, + OpenAI::EvalStringCheckGrader::OrHash, + OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python::OrHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::OrHash + ) + ], object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation. @@ -103,42 +107,59 @@ module OpenAI testing_criteria:, # The object type. object: :eval - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - object: Symbol, - testing_criteria: T::Array[ + override.returns( + { + id: String, + created_at: Integer, + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + testing_criteria: + T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end # Configuration of data sources used in runs of the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + Variants = + T.type_alias do + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Variants + ] + ) + end + def self.variants end - def self.variants; end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -146,7 +167,21 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel + ) + end + class Python < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the grader. sig { returns(String) } attr_accessor :name @@ -175,8 +210,13 @@ module OpenAI # A PythonGrader object that runs a python script on the input. sig do - params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) - .returns(T.attached_class) + params( + name: String, + source: String, + image_tag: String, + pass_threshold: Float, + type: Symbol + ).returns(T.attached_class) end def self.new( # The name of the grader. @@ -189,7 +229,9 @@ module OpenAI pass_threshold: nil, # The object type, which is always `python`. type: :python - ); end + ) + end + sig do override.returns( { @@ -201,12 +243,22 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class ScoreModel < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input]) } + sig do + returns( + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input + ] + ) + end attr_accessor :input # The model to use for the evaluation. @@ -245,20 +297,17 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[ - T.any( - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input, - OpenAI::Internal::AnyHash - ) - ], + input: + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::OrHash + ], model: String, name: String, pass_threshold: Float, range: T::Array[Float], sampling_params: T.anything, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The input text. This may include template strings. @@ -275,30 +324,38 @@ module OpenAI sampling_params: nil, # The object type, which is always `score_model`. type: :score_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) + override.returns( + { + input: + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input + ], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ) ) @@ -307,20 +364,28 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + sig do + returns( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + end attr_accessor :role # The type of the message input. Always `message`. sig do returns( - T.nilable(OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + T.nilable( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) ) end attr_reader :type sig do - params(type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol) - .void + params( + type: + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).void end attr_writer :type @@ -331,16 +396,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash + ), + role: + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -350,28 +416,45 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ), - role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -381,24 +464,31 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -407,7 +497,12 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = @@ -432,12 +527,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -445,7 +542,12 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = @@ -455,23 +557,27 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end sig do - override - .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/openai/models/eval_stored_completions_data_source_config.rbi index 2c6fd596..99f736f1 100644 --- a/rbi/openai/models/eval_stored_completions_data_source_config.rbi +++ b/rbi/openai/models/eval_stored_completions_data_source_config.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The json schema for the run data source items. Learn how to build JSON schemas # [here](https://json-schema.org/). sig { returns(T::Hash[Symbol, T.anything]) } @@ -31,8 +33,7 @@ module OpenAI schema: T::Hash[Symbol, T.anything], metadata: T.nilable(T::Hash[Symbol, String]), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The json schema for the run data source items. Learn how to build JSON schemas @@ -47,16 +48,20 @@ module OpenAI metadata: nil, # The type of data source. Always `stored_completions`. type: :stored_completions - ); end + ) + end + sig do - override - .returns({ - schema: T::Hash[Symbol, T.anything], - type: Symbol, - metadata: T.nilable(T::Hash[Symbol, String]) - }) + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/eval_string_check_grader.rbi b/rbi/openai/models/eval_string_check_grader.rbi index cf0301de..b22c1497 100644 --- a/rbi/openai/models/eval_string_check_grader.rbi +++ b/rbi/openai/models/eval_string_check_grader.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input text. This may include template strings. sig { returns(String) } attr_accessor :input @@ -12,7 +14,7 @@ module OpenAI attr_accessor :name # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - sig { returns(OpenAI::Models::EvalStringCheckGrader::Operation::OrSymbol) } + sig { returns(OpenAI::EvalStringCheckGrader::Operation::OrSymbol) } attr_accessor :operation # The reference text. This may include template strings. @@ -29,11 +31,10 @@ module OpenAI params( input: String, name: String, - operation: OpenAI::Models::EvalStringCheckGrader::Operation::OrSymbol, + operation: OpenAI::EvalStringCheckGrader::Operation::OrSymbol, reference: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The input text. This may include template strings. @@ -46,35 +47,47 @@ module OpenAI reference:, # The object type, which is always `string_check`. type: :string_check - ); end + ) + end + sig do - override - .returns( - { - input: String, - name: String, - operation: OpenAI::Models::EvalStringCheckGrader::Operation::OrSymbol, - reference: String, - type: Symbol - } - ) + override.returns( + { + input: String, + name: String, + operation: OpenAI::EvalStringCheckGrader::Operation::OrSymbol, + reference: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. module Operation extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalStringCheckGrader::Operation) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::EvalStringCheckGrader::Operation) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - EQ = T.let(:eq, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) - NE = T.let(:ne, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) - LIKE = T.let(:like, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) - ILIKE = T.let(:ilike, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) + EQ = T.let(:eq, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) + NE = T.let(:ne, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) + LIKE = + T.let(:like, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) + ILIKE = + T.let(:ilike, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/eval_text_similarity_grader.rbi b/rbi/openai/models/eval_text_similarity_grader.rbi index 6b1c6e54..fad82523 100644 --- a/rbi/openai/models/eval_text_similarity_grader.rbi +++ b/rbi/openai/models/eval_text_similarity_grader.rbi @@ -3,9 +3,13 @@ module OpenAI module Models class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. - sig { returns(OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol) } + sig do + returns(OpenAI::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol) + end attr_accessor :evaluation_metric # The text being graded. @@ -34,14 +38,14 @@ module OpenAI # A TextSimilarityGrader object which grades text based on similarity metrics. sig do params( - evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, + evaluation_metric: + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, input: String, pass_threshold: Float, reference: String, name: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, @@ -57,44 +61,96 @@ module OpenAI name: nil, # The type of grader. type: :text_similarity - ); end + ) + end + sig do - override - .returns( - { - evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, - input: String, - pass_threshold: Float, - reference: String, - type: Symbol, - name: String - } - ) + override.returns( + { + evaluation_metric: + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, + input: String, + pass_threshold: Float, + reference: String, + type: Symbol, + name: String + } + ) + end + def to_hash end - def to_hash; end # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. module EvaluationMetric extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::EvalTextSimilarityGrader::EvaluationMetric) + end OrSymbol = T.type_alias { T.any(Symbol, String) } FUZZY_MATCH = - T.let(:fuzzy_match, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - BLEU = T.let(:bleu, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - GLEU = T.let(:gleu, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - METEOR = T.let(:meteor, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - ROUGE_1 = T.let(:rouge_1, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - ROUGE_2 = T.let(:rouge_2, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - ROUGE_3 = T.let(:rouge_3, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - ROUGE_4 = T.let(:rouge_4, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - ROUGE_5 = T.let(:rouge_5, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) - ROUGE_L = T.let(:rouge_l, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) + T.let( + :fuzzy_match, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + BLEU = + T.let( + :bleu, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + GLEU = + T.let( + :gleu, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + METEOR = + T.let( + :meteor, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_1 = + T.let( + :rouge_1, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_2 = + T.let( + :rouge_2, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_3 = + T.let( + :rouge_3, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_4 = + T.let( + :rouge_4, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_5 = + T.let( + :rouge_5, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_L = + T.let( + :rouge_l, + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/eval_update_params.rbi b/rbi/openai/models/eval_update_params.rbi index 857900a1..a5bbc2a7 100644 --- a/rbi/openai/models/eval_update_params.rbi +++ b/rbi/openai/models/eval_update_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -26,9 +28,8 @@ module OpenAI params( metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -41,18 +42,20 @@ module OpenAI # Rename the evaluation. name: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index 83a8e983..b3c85236 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation. sig { returns(String) } attr_accessor :id @@ -14,7 +16,10 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do returns( - T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) ) end attr_accessor :data_source_config @@ -41,9 +46,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel ) @@ -62,26 +67,25 @@ module OpenAI params( id: String, created_at: Integer, - data_source_config: T.any( - OpenAI::Models::EvalCustomDataSourceConfig, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStoredCompletionsDataSourceConfig - ), + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - testing_criteria: T::Array[ - T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel - ) - ], + testing_criteria: + T::Array[ + T.any( + OpenAI::EvalLabelModelGrader::OrHash, + OpenAI::EvalStringCheckGrader::OrHash, + OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python::OrHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::OrHash + ) + ], object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation. @@ -103,42 +107,59 @@ module OpenAI testing_criteria:, # The object type. object: :eval - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source_config: T.any(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig), - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - object: Symbol, - testing_criteria: T::Array[ + override.returns( + { + id: String, + created_at: Integer, + data_source_config: + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + testing_criteria: + T::Array[ T.any( - OpenAI::Models::EvalLabelModelGrader, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel ) ] - } - ) + } + ) + end + def to_hash end - def to_hash; end # Configuration of data sources used in runs of the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + Variants = + T.type_alias do + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Variants + ] + ) + end + def self.variants end - def self.variants; end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -146,7 +167,21 @@ module OpenAI module TestingCriterion extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::EvalLabelModelGrader, + OpenAI::EvalStringCheckGrader, + OpenAI::EvalTextSimilarityGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel + ) + end + class Python < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the grader. sig { returns(String) } attr_accessor :name @@ -175,8 +210,13 @@ module OpenAI # A PythonGrader object that runs a python script on the input. sig do - params(name: String, source: String, image_tag: String, pass_threshold: Float, type: Symbol) - .returns(T.attached_class) + params( + name: String, + source: String, + image_tag: String, + pass_threshold: Float, + type: Symbol + ).returns(T.attached_class) end def self.new( # The name of the grader. @@ -189,7 +229,9 @@ module OpenAI pass_threshold: nil, # The object type, which is always `python`. type: :python - ); end + ) + end + sig do override.returns( { @@ -201,12 +243,22 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class ScoreModel < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input]) } + sig do + returns( + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input + ] + ) + end attr_accessor :input # The model to use for the evaluation. @@ -245,15 +297,17 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[T.any(OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input, OpenAI::Internal::AnyHash)], + input: + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::OrHash + ], model: String, name: String, pass_threshold: Float, range: T::Array[Float], sampling_params: T.anything, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The input text. This may include template strings. @@ -270,30 +324,38 @@ module OpenAI sampling_params: nil, # The object type, which is always `score_model`. type: :score_model - ); end + ) + end + sig do - override - .returns( - { - input: T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) + override.returns( + { + input: + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input + ], + model: String, + name: String, + type: Symbol, + pass_threshold: Float, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ) ) @@ -302,18 +364,29 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) } + sig do + returns( + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) + end attr_accessor :role # The type of the message input. Always `message`. sig do returns( - T.nilable(OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol) + T.nilable( + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ) ) end attr_reader :type - sig { params(type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol).void } + sig do + params( + type: + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -323,16 +396,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash + ), + role: + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, + type: + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -342,28 +416,45 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText ), - role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, + type: + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -373,24 +464,31 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -399,11 +497,19 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = - T.let(:user, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol) + T.let( + :user, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ) ASSISTANT = T.let( :assistant, @@ -421,12 +527,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -434,7 +542,12 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = @@ -444,23 +557,27 @@ module OpenAI ) sig do - override - .returns( - T::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end sig do - override - .returns( - [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel] - ) + override.returns( + T::Array[ + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 6ac91b72..c3f2299e 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -4,28 +4,34 @@ module OpenAI module Models module Evals class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A StoredCompletionsRunDataSource configuration describing a set of filters sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) ) end attr_accessor :source # The type of run data source. Always `completions`. - sig { returns(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol) } + sig do + returns( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol + ) + end attr_accessor :type sig do returns( T.nilable( T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference ) ) ) @@ -34,13 +40,12 @@ module OpenAI sig do params( - input_messages: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ) - ) - .void + input_messages: + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference::OrHash + ) + ).void end attr_writer :input_messages @@ -51,42 +56,43 @@ module OpenAI sig { params(model: String).void } attr_writer :model - sig { returns(T.nilable(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams)) } + sig do + returns( + T.nilable( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + ) + ) + end attr_reader :sampling_params sig do params( - sampling_params: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .void + sampling_params: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::OrHash + ).void end attr_writer :sampling_params # A CompletionsRunDataSource object describing a model sampling configuration. sig do params( - source: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions - ), - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, - input_messages: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ), + source: + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions::OrHash + ), + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference::OrHash + ), model: String, - sampling_params: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + sampling_params: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::OrHash + ).returns(T.attached_class) end def self.new( # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -97,35 +103,59 @@ module OpenAI # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, sampling_params: nil - ); end + ) + end + sig do - override - .returns( - { - source: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + override.returns( + { + source: + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ), - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, - input_messages: T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference ), - model: String, - sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams - } - ) + model: String, + sampling_params: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + } + ) + end + def to_hash end - def to_hash; end # A StoredCompletionsRunDataSource configuration describing a set of filters module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. - sig { returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content]) } + sig do + returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content + ] + ) + end attr_accessor :content # The type of jsonl source. Always `file_content`. @@ -134,34 +164,39 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], - type: Symbol - } - ) + override.returns( + { + content: + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -177,16 +212,26 @@ module OpenAI sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -201,12 +246,18 @@ module OpenAI id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end class StoredCompletions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of source. Always `stored_completions`. sig { returns(Symbol) } attr_accessor :type @@ -245,8 +296,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), model: T.nilable(String), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # An optional Unix timestamp to filter items created after this time. @@ -266,30 +316,34 @@ module OpenAI model: nil, # The type of source. Always `stored_completions`. type: :stored_completions - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - limit: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(String) - } - ) + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + limit: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + model: T.nilable(String) + } + ) + end + def to_hash end - def to_hash; end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The type of run data source. Always `completions`. @@ -297,28 +351,54 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETIONS = - T.let(:completions, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) + T.let( + :completions, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol + ] + ) + end + def self.values + end end module InputMessages extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ) + end + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do returns( T::Array[ T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ] ) @@ -331,16 +411,15 @@ module OpenAI sig do params( - template: T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - ) - ], + template: + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -348,22 +427,25 @@ module OpenAI template:, # The type of input messages. Always `template`. type: :template - ); end + ) + end + sig do - override - .returns( - { - template: T::Array[ + override.returns( + { + template: + T::Array[ T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -373,14 +455,25 @@ module OpenAI module Template extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) + end + class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText ) ) end @@ -390,7 +483,7 @@ module OpenAI # `developer`. sig do returns( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol ) end attr_accessor :role @@ -399,7 +492,7 @@ module OpenAI sig do returns( T.nilable( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol ) ) end @@ -407,9 +500,9 @@ module OpenAI sig do params( - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - ) - .void + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ).void end attr_writer :type @@ -420,16 +513,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - ), - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash + ), + role: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -439,28 +533,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText ), - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - } - ) + role: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -470,24 +583,33 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -497,40 +619,43 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role) + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( :user, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) ASSISTANT = T.let( :assistant, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) SYSTEM = T.let( :system, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) DEVELOPER = T.let( :developer, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -539,39 +664,47 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type) + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( :message, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -580,27 +713,39 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A reference to a variable in the "item" namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference - ); end - sig { override.returns({item_reference: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ item_reference: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Variants + ] + ) + end + def self.variants end - def self.variants; end end class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } attr_reader :max_completion_tokens @@ -630,8 +775,12 @@ module OpenAI attr_writer :top_p sig do - params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) - .returns(T.attached_class) + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. @@ -642,7 +791,9 @@ module OpenAI temperature: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil - ); end + ) + end + sig do override.returns( { @@ -653,7 +804,8 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi index 66df840b..e9715934 100644 --- a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -4,11 +4,13 @@ module OpenAI module Models module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ) ) end @@ -22,40 +24,59 @@ module OpenAI # eval sig do params( - source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID - ), + source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID::OrHash + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( source:, # The type of data source. Always `jsonl`. type: :jsonl - ); end + ) + end + sig do - override - .returns( - { - source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + override.returns( + { + source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ), - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. - sig { returns(T::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content]) } + sig do + returns( + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content + ] + ) + end attr_accessor :content # The type of jsonl source. Always `file_content`. @@ -64,34 +85,39 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], - type: Symbol - } - ) + override.returns( + { + content: + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -107,16 +133,26 @@ module OpenAI sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -131,18 +167,23 @@ module OpenAI id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] - ) + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/evals/eval_api_error.rbi b/rbi/openai/models/evals/eval_api_error.rbi index 906200ec..153211d5 100644 --- a/rbi/openai/models/evals/eval_api_error.rbi +++ b/rbi/openai/models/evals/eval_api_error.rbi @@ -6,6 +6,8 @@ module OpenAI module Evals class EvalAPIError < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The error code. sig { returns(String) } attr_accessor :code @@ -21,9 +23,12 @@ module OpenAI code:, # The error message. message: - ); end - sig { override.returns({code: String, message: String}) } - def to_hash; end + ) + end + + sig { override.returns({ code: String, message: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_cancel_params.rbi b/rbi/openai/models/evals/run_cancel_params.rbi index 4d5aae07..0dd60f39 100644 --- a/rbi/openai/models/evals/run_cancel_params.rbi +++ b/rbi/openai/models/evals/run_cancel_params.rbi @@ -7,17 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :eval_id sig do - params(eval_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - .returns(T.attached_class) + params( + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(eval_id:, request_options: {}) end - def self.new(eval_id:, request_options: {}); end - sig { override.returns({eval_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { eval_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 41ea134d..923b2437 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Evals class RunCancelResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation run. sig { returns(String) } attr_accessor :id @@ -16,8 +18,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions ) ) @@ -25,10 +27,10 @@ module OpenAI attr_accessor :data_source # An object representing an error response from the Eval API. - sig { returns(OpenAI::Models::Evals::EvalAPIError) } + sig { returns(OpenAI::Evals::EvalAPIError) } attr_reader :error - sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + sig { params(error: OpenAI::Evals::EvalAPIError::OrHash).void } attr_writer :error # The identifier of the associated evaluation. @@ -57,11 +59,21 @@ module OpenAI attr_accessor :object # Usage statistics for each model during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage]) } + sig do + returns( + T::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage] + ) + end attr_accessor :per_model_usage # Results per testing criteria applied during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult + ] + ) + end attr_accessor :per_testing_criteria_results # The URL to the rendered evaluation run report on the UI dashboard. @@ -74,9 +86,9 @@ module OpenAI sig do params( - result_counts: T.any(OpenAI::Models::Evals::RunCancelResponse::ResultCounts, OpenAI::Internal::AnyHash) - ) - .void + result_counts: + OpenAI::Models::Evals::RunCancelResponse::ResultCounts::OrHash + ).void end attr_writer :result_counts @@ -89,25 +101,31 @@ module OpenAI params( id: String, created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions - ), - error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::OrHash + ), + error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: String, - per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunCancelResponse::PerModelUsage, OpenAI::Internal::AnyHash)], - per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::PerModelUsage::OrHash + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult::OrHash + ], report_url: String, - result_counts: T.any(OpenAI::Models::Evals::RunCancelResponse::ResultCounts, OpenAI::Internal::AnyHash), + result_counts: + OpenAI::Models::Evals::RunCancelResponse::ResultCounts::OrHash, status: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation run. @@ -143,39 +161,61 @@ module OpenAI status:, # The type of the object. Always "eval.run". object: :"eval.run" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + override.returns( + { + id: String, + created_at: Integer, + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions ), - error: OpenAI::Models::Evals::EvalAPIError, - eval_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - name: String, - object: Symbol, - per_model_usage: T::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage], - per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult], - report_url: String, - result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, - status: String - } - ) + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::PerModelUsage + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult + ], + report_url: String, + result_counts: + OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String + } + ) + end + def to_hash end - def to_hash; end # Information about the run's data source. module DataSource extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + ) + end + class Completions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A EvalResponsesSource object describing a run data source configuration. sig do returns( @@ -206,13 +246,12 @@ module OpenAI sig do params( - input_messages: T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - .void + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ) + ).void end attr_writer :input_messages @@ -223,42 +262,42 @@ module OpenAI sig { params(model: String).void } attr_writer :model - sig { returns(T.nilable(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams)) } + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams + ) + ) + end attr_reader :sampling_params sig do params( - sampling_params: T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .void + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams::OrHash + ).void end attr_writer :sampling_params # A ResponsesRunDataSource object describing a model sampling configuration. sig do params( - source: T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses - ), - input_messages: T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - ), + source: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ), model: String, - sampling_params: T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ), + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A EvalResponsesSource object describing a run data source configuration. @@ -269,37 +308,56 @@ module OpenAI sampling_params: nil, # The type of run data source. Always `completions`. type: :completions - ); end + ) + end + sig do - override - .returns( - { - source: T.any( + override.returns( + { + source: + T.any( OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses ), - type: Symbol, - input_messages: T.any( + type: Symbol, + input_messages: + T.any( OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference ), - model: String, - sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams - } - ) + model: String, + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash end - def to_hash; end # A EvalResponsesSource object describing a run data source configuration. module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. sig do returns( - T::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content] + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content + ] ) end attr_accessor :content @@ -310,34 +368,41 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content], - type: Symbol - } - ) + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -350,20 +415,29 @@ module OpenAI sig do params( item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, - T.anything] + sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -372,18 +446,26 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(id: String, type: Symbol).returns(T.attached_class) } + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end def self.new( # The identifier of the file. id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of run data source. Always `responses`. sig { returns(Symbol) } attr_accessor :type @@ -425,7 +507,9 @@ module OpenAI # Optional reasoning effort parameter. This is a query parameter used to select # responses. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end attr_accessor :reasoning_effort # Sampling temperature. This is a query parameter used to select responses. @@ -450,13 +534,13 @@ module OpenAI instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), temperature: T.nilable(Float), top_p: T.nilable(Float), users: T.nilable(T::Array[String]), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Whether to allow parallel tool calls. This is a query parameter used to select @@ -491,42 +575,58 @@ module OpenAI users: nil, # The type of run data source. Always `responses`. type: :responses - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) + override.returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash end - def to_hash; end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end module InputMessages extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do @@ -547,16 +647,15 @@ module OpenAI sig do params( - template: T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -564,22 +663,25 @@ module OpenAI template:, # The type of input messages. Always `template`. type: :template - ); end + ) + end + sig do - override - .returns( - { - template: T::Array[ + override.returns( + { + template: + T::Array[ T.any( OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -589,7 +691,20 @@ module OpenAI module Template extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The content of the message. sig { returns(String) } attr_accessor :content @@ -598,24 +713,36 @@ module OpenAI sig { returns(String) } attr_accessor :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end def self.new( # The content of the message. content:, # The role of the message (e.g. "system", "assistant", "user"). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ) ) @@ -643,9 +770,9 @@ module OpenAI sig do params( - type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .void + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void end attr_writer :type @@ -656,16 +783,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -675,28 +803,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ), - role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -706,24 +853,33 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -733,7 +889,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + T.all( + Symbol, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -759,14 +918,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -775,7 +934,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + T.all( + Symbol, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -786,28 +948,33 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -816,27 +983,41 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A reference to a variable in the "item" namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference - ); end - sig { override.returns({item_reference: String, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Variants + ] + ) + end + def self.variants end - def self.variants; end end class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } attr_reader :max_completion_tokens @@ -866,8 +1047,12 @@ module OpenAI attr_writer :top_p sig do - params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) - .returns(T.attached_class) + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. @@ -878,7 +1063,9 @@ module OpenAI temperature: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil - ); end + ) + end + sig do override.returns( { @@ -889,20 +1076,26 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Variants + ] + ) + end + def self.variants end - def self.variants; end end class PerModelUsage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens retrieved from cache. sig { returns(Integer) } attr_accessor :cached_tokens @@ -935,8 +1128,7 @@ module OpenAI model_name: String, prompt_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens retrieved from cache. @@ -951,24 +1143,29 @@ module OpenAI prompt_tokens:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - cached_tokens: Integer, - completion_tokens: Integer, - invocation_count: Integer, - model_name: String, - prompt_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of tests failed for this criteria. sig { returns(Integer) } attr_accessor :failed @@ -981,7 +1178,13 @@ module OpenAI sig { returns(String) } attr_accessor :testing_criteria - sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + sig do + params( + failed: Integer, + passed: Integer, + testing_criteria: String + ).returns(T.attached_class) + end def self.new( # Number of tests failed for this criteria. failed:, @@ -989,12 +1192,22 @@ module OpenAI passed:, # A description of the testing criteria. testing_criteria: - ); end - sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { failed: Integer, passed: Integer, testing_criteria: String } + ) + end + def to_hash + end end class ResultCounts < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of output items that resulted in an error. sig { returns(Integer) } attr_accessor :errored @@ -1029,9 +1242,21 @@ module OpenAI passed:, # Total number of executed output items. total: - ); end - sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index 4a286695..b1f07b5d 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -7,13 +7,15 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Details about the run's data source. sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) ) end @@ -37,17 +39,16 @@ module OpenAI sig do params( - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource - ), + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Details about the run's data source. @@ -62,36 +63,51 @@ module OpenAI # The name of the run. name: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + override.returns( + { + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ), - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - request_options: OpenAI::RequestOptions - } - ) + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Details about the run's data source. module DataSource extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + ) + end + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A EvalResponsesSource object describing a run data source configuration. sig do returns( T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses ) ) end @@ -100,7 +116,7 @@ module OpenAI # The type of run data source. Always `completions`. sig do returns( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol ) end attr_accessor :type @@ -109,8 +125,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference ) ) ) @@ -119,13 +135,12 @@ module OpenAI sig do params( - input_messages: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - ) - .void + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ) + ).void end attr_writer :input_messages @@ -139,7 +154,7 @@ module OpenAI sig do returns( T.nilable( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams ) ) end @@ -147,37 +162,32 @@ module OpenAI sig do params( - sampling_params: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .void + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).void end attr_writer :sampling_params # A ResponsesRunDataSource object describing a model sampling configuration. sig do params( - source: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - ), - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ), + source: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses::OrHash + ), + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ), model: String, - sampling_params: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).returns(T.attached_class) end def self.new( # A EvalResponsesSource object describing a run data source configuration. @@ -188,38 +198,56 @@ module OpenAI # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, sampling_params: nil - ); end + ) + end + sig do - override - .returns( - { - source: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + override.returns( + { + source: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses ), - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference ), - model: String, - sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - } - ) + model: String, + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + ) + end + def to_hash end - def to_hash; end # A EvalResponsesSource object describing a run data source configuration. module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. sig do returns( T::Array[ - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content ] ) end @@ -231,36 +259,41 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[ - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + override.returns( + { + content: + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -273,20 +306,29 @@ module OpenAI sig do params( item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, - T.anything] + sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -295,18 +337,26 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(id: String, type: Symbol).returns(T.attached_class) } + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end def self.new( # The identifier of the file. id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of run data source. Always `responses`. sig { returns(Symbol) } attr_accessor :type @@ -348,7 +398,7 @@ module OpenAI # Optional reasoning effort parameter. This is a query parameter used to select # responses. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort # Sampling temperature. This is a query parameter used to select responses. @@ -373,13 +423,13 @@ module OpenAI instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), temperature: T.nilable(Float), top_p: T.nilable(Float), users: T.nilable(T::Array[String]), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Whether to allow parallel tool calls. This is a query parameter used to select @@ -414,36 +464,41 @@ module OpenAI users: nil, # The type of run data source. Always `responses`. type: :responses - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) + override.returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash end - def to_hash; end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The type of run data source. Always `completions`. @@ -451,36 +506,54 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETIONS = T.let( :completions, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol ) sig do - override - .returns( - T::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module InputMessages extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do returns( T::Array[ T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem ) ] ) @@ -493,16 +566,15 @@ module OpenAI sig do params( - template: T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - ) - ], + template: + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -510,22 +582,25 @@ module OpenAI template:, # The type of input messages. Always `template`. type: :template - ); end + ) + end + sig do - override - .returns( - { - template: T::Array[ + override.returns( + { + template: + T::Array[ T.any( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -535,7 +610,20 @@ module OpenAI module Template extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + end + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The content of the message. sig { returns(String) } attr_accessor :content @@ -544,25 +632,37 @@ module OpenAI sig { returns(String) } attr_accessor :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end def self.new( # The content of the message. content:, # The role of the message (e.g. "system", "assistant", "user"). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText ) ) end @@ -572,7 +672,7 @@ module OpenAI # `developer`. sig do returns( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol ) end attr_accessor :role @@ -581,7 +681,7 @@ module OpenAI sig do returns( T.nilable( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol ) ) end @@ -589,9 +689,9 @@ module OpenAI sig do params( - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .void + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void end attr_writer :type @@ -602,16 +702,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -621,28 +722,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText ), - role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - } - ) + role: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -652,24 +772,33 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -679,40 +808,43 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role) + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( :user, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) ASSISTANT = T.let( :assistant, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) SYSTEM = T.let( :system, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) DEVELOPER = T.let( :developer, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -721,39 +853,47 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type) + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( :message, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -762,27 +902,41 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A reference to a variable in the "item" namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference - ); end - sig { override.returns({item_reference: String, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] - ) + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Variants + ] + ) + end + def self.variants end - def self.variants; end end class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } attr_reader :max_completion_tokens @@ -812,8 +966,12 @@ module OpenAI attr_writer :top_p sig do - params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) - .returns(T.attached_class) + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. @@ -824,7 +982,9 @@ module OpenAI temperature: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil - ); end + ) + end + sig do override.returns( { @@ -835,17 +995,18 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] - ) + override.returns( + T::Array[OpenAI::Evals::RunCreateParams::DataSource::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index cebc26ac..6c3c64f6 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Evals class RunCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation run. sig { returns(String) } attr_accessor :id @@ -16,8 +18,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions ) ) @@ -25,10 +27,10 @@ module OpenAI attr_accessor :data_source # An object representing an error response from the Eval API. - sig { returns(OpenAI::Models::Evals::EvalAPIError) } + sig { returns(OpenAI::Evals::EvalAPIError) } attr_reader :error - sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + sig { params(error: OpenAI::Evals::EvalAPIError::OrHash).void } attr_writer :error # The identifier of the associated evaluation. @@ -57,11 +59,21 @@ module OpenAI attr_accessor :object # Usage statistics for each model during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage]) } + sig do + returns( + T::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage] + ) + end attr_accessor :per_model_usage # Results per testing criteria applied during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult + ] + ) + end attr_accessor :per_testing_criteria_results # The URL to the rendered evaluation run report on the UI dashboard. @@ -74,9 +86,9 @@ module OpenAI sig do params( - result_counts: T.any(OpenAI::Models::Evals::RunCreateResponse::ResultCounts, OpenAI::Internal::AnyHash) - ) - .void + result_counts: + OpenAI::Models::Evals::RunCreateResponse::ResultCounts::OrHash + ).void end attr_writer :result_counts @@ -89,25 +101,31 @@ module OpenAI params( id: String, created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions - ), - error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::OrHash + ), + error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: String, - per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunCreateResponse::PerModelUsage, OpenAI::Internal::AnyHash)], - per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::PerModelUsage::OrHash + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult::OrHash + ], report_url: String, - result_counts: T.any(OpenAI::Models::Evals::RunCreateResponse::ResultCounts, OpenAI::Internal::AnyHash), + result_counts: + OpenAI::Models::Evals::RunCreateResponse::ResultCounts::OrHash, status: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation run. @@ -143,39 +161,61 @@ module OpenAI status:, # The type of the object. Always "eval.run". object: :"eval.run" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + override.returns( + { + id: String, + created_at: Integer, + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions ), - error: OpenAI::Models::Evals::EvalAPIError, - eval_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - name: String, - object: Symbol, - per_model_usage: T::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage], - per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult], - report_url: String, - result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, - status: String - } - ) + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::PerModelUsage + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult + ], + report_url: String, + result_counts: + OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String + } + ) + end + def to_hash end - def to_hash; end # Information about the run's data source. module DataSource extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + ) + end + class Completions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A EvalResponsesSource object describing a run data source configuration. sig do returns( @@ -206,13 +246,12 @@ module OpenAI sig do params( - input_messages: T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - .void + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ) + ).void end attr_writer :input_messages @@ -223,42 +262,42 @@ module OpenAI sig { params(model: String).void } attr_writer :model - sig { returns(T.nilable(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams)) } + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams + ) + ) + end attr_reader :sampling_params sig do params( - sampling_params: T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .void + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams::OrHash + ).void end attr_writer :sampling_params # A ResponsesRunDataSource object describing a model sampling configuration. sig do params( - source: T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses - ), - input_messages: T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - ), + source: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ), model: String, - sampling_params: T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ), + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A EvalResponsesSource object describing a run data source configuration. @@ -269,37 +308,56 @@ module OpenAI sampling_params: nil, # The type of run data source. Always `completions`. type: :completions - ); end + ) + end + sig do - override - .returns( - { - source: T.any( + override.returns( + { + source: + T.any( OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses ), - type: Symbol, - input_messages: T.any( + type: Symbol, + input_messages: + T.any( OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference ), - model: String, - sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams - } - ) + model: String, + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash end - def to_hash; end # A EvalResponsesSource object describing a run data source configuration. module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. sig do returns( - T::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content] + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content + ] ) end attr_accessor :content @@ -310,34 +368,41 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content], - type: Symbol - } - ) + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -350,20 +415,29 @@ module OpenAI sig do params( item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, - T.anything] + sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -372,18 +446,26 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(id: String, type: Symbol).returns(T.attached_class) } + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end def self.new( # The identifier of the file. id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of run data source. Always `responses`. sig { returns(Symbol) } attr_accessor :type @@ -425,7 +507,9 @@ module OpenAI # Optional reasoning effort parameter. This is a query parameter used to select # responses. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end attr_accessor :reasoning_effort # Sampling temperature. This is a query parameter used to select responses. @@ -450,13 +534,13 @@ module OpenAI instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), temperature: T.nilable(Float), top_p: T.nilable(Float), users: T.nilable(T::Array[String]), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Whether to allow parallel tool calls. This is a query parameter used to select @@ -491,42 +575,58 @@ module OpenAI users: nil, # The type of run data source. Always `responses`. type: :responses - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) + override.returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash end - def to_hash; end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end module InputMessages extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do @@ -547,16 +647,15 @@ module OpenAI sig do params( - template: T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -564,22 +663,25 @@ module OpenAI template:, # The type of input messages. Always `template`. type: :template - ); end + ) + end + sig do - override - .returns( - { - template: T::Array[ + override.returns( + { + template: + T::Array[ T.any( OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -589,7 +691,20 @@ module OpenAI module Template extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The content of the message. sig { returns(String) } attr_accessor :content @@ -598,24 +713,36 @@ module OpenAI sig { returns(String) } attr_accessor :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end def self.new( # The content of the message. content:, # The role of the message (e.g. "system", "assistant", "user"). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ) ) @@ -643,9 +770,9 @@ module OpenAI sig do params( - type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .void + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void end attr_writer :type @@ -656,16 +783,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -675,28 +803,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ), - role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -706,24 +853,33 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -733,7 +889,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + T.all( + Symbol, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -759,14 +918,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -775,7 +934,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + T.all( + Symbol, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -786,28 +948,33 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -816,27 +983,41 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A reference to a variable in the "item" namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference - ); end - sig { override.returns({item_reference: String, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Variants + ] + ) + end + def self.variants end - def self.variants; end end class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } attr_reader :max_completion_tokens @@ -866,8 +1047,12 @@ module OpenAI attr_writer :top_p sig do - params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) - .returns(T.attached_class) + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. @@ -878,7 +1063,9 @@ module OpenAI temperature: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil - ); end + ) + end + sig do override.returns( { @@ -889,20 +1076,26 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Variants + ] + ) + end + def self.variants end - def self.variants; end end class PerModelUsage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens retrieved from cache. sig { returns(Integer) } attr_accessor :cached_tokens @@ -935,8 +1128,7 @@ module OpenAI model_name: String, prompt_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens retrieved from cache. @@ -951,24 +1143,29 @@ module OpenAI prompt_tokens:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - cached_tokens: Integer, - completion_tokens: Integer, - invocation_count: Integer, - model_name: String, - prompt_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of tests failed for this criteria. sig { returns(Integer) } attr_accessor :failed @@ -981,7 +1178,13 @@ module OpenAI sig { returns(String) } attr_accessor :testing_criteria - sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + sig do + params( + failed: Integer, + passed: Integer, + testing_criteria: String + ).returns(T.attached_class) + end def self.new( # Number of tests failed for this criteria. failed:, @@ -989,12 +1192,22 @@ module OpenAI passed:, # A description of the testing criteria. testing_criteria: - ); end - sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { failed: Integer, passed: Integer, testing_criteria: String } + ) + end + def to_hash + end end class ResultCounts < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of output items that resulted in an error. sig { returns(Integer) } attr_accessor :errored @@ -1029,9 +1242,21 @@ module OpenAI passed:, # Total number of executed output items. total: - ); end - sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_delete_params.rbi b/rbi/openai/models/evals/run_delete_params.rbi index 6f4f786a..3fa9908b 100644 --- a/rbi/openai/models/evals/run_delete_params.rbi +++ b/rbi/openai/models/evals/run_delete_params.rbi @@ -7,17 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :eval_id sig do - params(eval_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - .returns(T.attached_class) + params( + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(eval_id:, request_options: {}) end - def self.new(eval_id:, request_options: {}); end - sig { override.returns({eval_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { eval_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_delete_response.rbi b/rbi/openai/models/evals/run_delete_response.rbi index 2130f09b..b7556eda 100644 --- a/rbi/openai/models/evals/run_delete_response.rbi +++ b/rbi/openai/models/evals/run_delete_response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Evals class RunDeleteResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T.nilable(T::Boolean)) } attr_reader :deleted @@ -22,11 +24,21 @@ module OpenAI sig { params(run_id: String).void } attr_writer :run_id - sig { params(deleted: T::Boolean, object: String, run_id: String).returns(T.attached_class) } - def self.new(deleted: nil, object: nil, run_id: nil); end - - sig { override.returns({deleted: T::Boolean, object: String, run_id: String}) } - def to_hash; end + sig do + params(deleted: T::Boolean, object: String, run_id: String).returns( + T.attached_class + ) + end + def self.new(deleted: nil, object: nil, run_id: nil) + end + + sig do + override.returns( + { deleted: T::Boolean, object: String, run_id: String } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_list_params.rbi b/rbi/openai/models/evals/run_list_params.rbi index aa4151ed..a442bc5d 100644 --- a/rbi/openai/models/evals/run_list_params.rbi +++ b/rbi/openai/models/evals/run_list_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last run from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -23,29 +25,36 @@ module OpenAI # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. - sig { returns(T.nilable(OpenAI::Models::Evals::RunListParams::Order::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::Evals::RunListParams::Order::OrSymbol)) + end attr_reader :order - sig { params(order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol).void } + sig do + params(order: OpenAI::Evals::RunListParams::Order::OrSymbol).void + end attr_writer :order # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # | `canceled`. - sig { returns(T.nilable(OpenAI::Models::Evals::RunListParams::Status::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::Evals::RunListParams::Status::OrSymbol)) + end attr_reader :status - sig { params(status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol).void } + sig do + params(status: OpenAI::Evals::RunListParams::Status::OrSymbol).void + end attr_writer :status sig do params( after: String, limit: Integer, - order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, - status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::Evals::RunListParams::Order::OrSymbol, + status: OpenAI::Evals::RunListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last run from the previous pagination request. @@ -59,34 +68,42 @@ module OpenAI # | `canceled`. status: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, - status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::Evals::RunListParams::Order::OrSymbol, + status: OpenAI::Evals::RunListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Order) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Evals::RunListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) + ASC = T.let(:asc, OpenAI::Evals::RunListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::Evals::RunListParams::Order::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Evals::RunListParams::Order::TaggedSymbol] + ) + end + def self.values + end end # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` @@ -94,17 +111,34 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Status) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Evals::RunListParams::Status) } OrSymbol = T.type_alias { T.any(Symbol, String) } - QUEUED = T.let(:queued, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) - CANCELED = T.let(:canceled, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol]) } - def self.values; end + QUEUED = + T.let(:queued, OpenAI::Evals::RunListParams::Status::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Evals::RunListParams::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Evals::RunListParams::Status::TaggedSymbol + ) + CANCELED = + T.let(:canceled, OpenAI::Evals::RunListParams::Status::TaggedSymbol) + FAILED = + T.let(:failed, OpenAI::Evals::RunListParams::Status::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::Evals::RunListParams::Status::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index e00b1471..8bce072b 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Evals class RunListResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation run. sig { returns(String) } attr_accessor :id @@ -16,8 +18,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions ) ) @@ -25,10 +27,10 @@ module OpenAI attr_accessor :data_source # An object representing an error response from the Eval API. - sig { returns(OpenAI::Models::Evals::EvalAPIError) } + sig { returns(OpenAI::Evals::EvalAPIError) } attr_reader :error - sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + sig { params(error: OpenAI::Evals::EvalAPIError::OrHash).void } attr_writer :error # The identifier of the associated evaluation. @@ -57,11 +59,21 @@ module OpenAI attr_accessor :object # Usage statistics for each model during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage]) } + sig do + returns( + T::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage] + ) + end attr_accessor :per_model_usage # Results per testing criteria applied during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult + ] + ) + end attr_accessor :per_testing_criteria_results # The URL to the rendered evaluation run report on the UI dashboard. @@ -74,9 +86,9 @@ module OpenAI sig do params( - result_counts: T.any(OpenAI::Models::Evals::RunListResponse::ResultCounts, OpenAI::Internal::AnyHash) - ) - .void + result_counts: + OpenAI::Models::Evals::RunListResponse::ResultCounts::OrHash + ).void end attr_writer :result_counts @@ -89,25 +101,31 @@ module OpenAI params( id: String, created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions - ), - error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::OrHash + ), + error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: String, - per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunListResponse::PerModelUsage, OpenAI::Internal::AnyHash)], - per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunListResponse::PerModelUsage::OrHash + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult::OrHash + ], report_url: String, - result_counts: T.any(OpenAI::Models::Evals::RunListResponse::ResultCounts, OpenAI::Internal::AnyHash), + result_counts: + OpenAI::Models::Evals::RunListResponse::ResultCounts::OrHash, status: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation run. @@ -143,39 +161,59 @@ module OpenAI status:, # The type of the object. Always "eval.run". object: :"eval.run" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + override.returns( + { + id: String, + created_at: Integer, + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions ), - error: OpenAI::Models::Evals::EvalAPIError, - eval_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - name: String, - object: Symbol, - per_model_usage: T::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], - per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult], - report_url: String, - result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, - status: String - } - ) + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: + T::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult + ], + report_url: String, + result_counts: + OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String + } + ) + end + def to_hash end - def to_hash; end # Information about the run's data source. module DataSource extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions + ) + end + class Completions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A EvalResponsesSource object describing a run data source configuration. sig do returns( @@ -206,13 +244,12 @@ module OpenAI sig do params( - input_messages: T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - .void + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ) + ).void end attr_writer :input_messages @@ -223,42 +260,42 @@ module OpenAI sig { params(model: String).void } attr_writer :model - sig { returns(T.nilable(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams)) } + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams + ) + ) + end attr_reader :sampling_params sig do params( - sampling_params: T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .void + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams::OrHash + ).void end attr_writer :sampling_params # A ResponsesRunDataSource object describing a model sampling configuration. sig do params( - source: T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses - ), - input_messages: T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - ), + source: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ), model: String, - sampling_params: T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ), + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A EvalResponsesSource object describing a run data source configuration. @@ -269,37 +306,56 @@ module OpenAI sampling_params: nil, # The type of run data source. Always `completions`. type: :completions - ); end + ) + end + sig do - override - .returns( - { - source: T.any( + override.returns( + { + source: + T.any( OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses ), - type: Symbol, - input_messages: T.any( + type: Symbol, + input_messages: + T.any( OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference ), - model: String, - sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams - } - ) + model: String, + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash end - def to_hash; end # A EvalResponsesSource object describing a run data source configuration. module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. sig do returns( - T::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content] + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content + ] ) end attr_accessor :content @@ -310,34 +366,41 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content], - type: Symbol - } - ) + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -350,20 +413,29 @@ module OpenAI sig do params( item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, - T.anything] + sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -372,18 +444,26 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(id: String, type: Symbol).returns(T.attached_class) } + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end def self.new( # The identifier of the file. id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of run data source. Always `responses`. sig { returns(Symbol) } attr_accessor :type @@ -425,7 +505,9 @@ module OpenAI # Optional reasoning effort parameter. This is a query parameter used to select # responses. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end attr_accessor :reasoning_effort # Sampling temperature. This is a query parameter used to select responses. @@ -450,13 +532,13 @@ module OpenAI instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), temperature: T.nilable(Float), top_p: T.nilable(Float), users: T.nilable(T::Array[String]), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Whether to allow parallel tool calls. This is a query parameter used to select @@ -491,42 +573,58 @@ module OpenAI users: nil, # The type of run data source. Always `responses`. type: :responses - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) + override.returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash end - def to_hash; end end sig do - override - .returns( - [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end module InputMessages extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do @@ -547,16 +645,15 @@ module OpenAI sig do params( - template: T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -564,22 +661,25 @@ module OpenAI template:, # The type of input messages. Always `template`. type: :template - ); end + ) + end + sig do - override - .returns( - { - template: T::Array[ + override.returns( + { + template: + T::Array[ T.any( OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -589,7 +689,20 @@ module OpenAI module Template extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The content of the message. sig { returns(String) } attr_accessor :content @@ -598,24 +711,36 @@ module OpenAI sig { returns(String) } attr_accessor :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end def self.new( # The content of the message. content:, # The role of the message (e.g. "system", "assistant", "user"). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ) ) @@ -643,9 +768,9 @@ module OpenAI sig do params( - type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .void + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void end attr_writer :type @@ -656,16 +781,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -675,28 +801,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ), - role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -706,24 +851,33 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -733,7 +887,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + T.all( + Symbol, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -759,14 +916,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -775,7 +932,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + T.all( + Symbol, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -786,28 +946,33 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -816,27 +981,41 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A reference to a variable in the "item" namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference - ); end - sig { override.returns({item_reference: String, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Variants + ] + ) + end + def self.variants end - def self.variants; end end class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } attr_reader :max_completion_tokens @@ -866,8 +1045,12 @@ module OpenAI attr_writer :top_p sig do - params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) - .returns(T.attached_class) + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. @@ -878,7 +1061,9 @@ module OpenAI temperature: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil - ); end + ) + end + sig do override.returns( { @@ -889,20 +1074,26 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Variants + ] + ) + end + def self.variants end - def self.variants; end end class PerModelUsage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens retrieved from cache. sig { returns(Integer) } attr_accessor :cached_tokens @@ -935,8 +1126,7 @@ module OpenAI model_name: String, prompt_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens retrieved from cache. @@ -951,24 +1141,29 @@ module OpenAI prompt_tokens:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - cached_tokens: Integer, - completion_tokens: Integer, - invocation_count: Integer, - model_name: String, - prompt_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of tests failed for this criteria. sig { returns(Integer) } attr_accessor :failed @@ -981,7 +1176,13 @@ module OpenAI sig { returns(String) } attr_accessor :testing_criteria - sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + sig do + params( + failed: Integer, + passed: Integer, + testing_criteria: String + ).returns(T.attached_class) + end def self.new( # Number of tests failed for this criteria. failed:, @@ -989,12 +1190,22 @@ module OpenAI passed:, # A description of the testing criteria. testing_criteria: - ); end - sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { failed: Integer, passed: Integer, testing_criteria: String } + ) + end + def to_hash + end end class ResultCounts < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of output items that resulted in an error. sig { returns(Integer) } attr_accessor :errored @@ -1029,9 +1240,21 @@ module OpenAI passed:, # Total number of executed output items. total: - ); end - sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_retrieve_params.rbi b/rbi/openai/models/evals/run_retrieve_params.rbi index 7dd9ed20..ad837ffe 100644 --- a/rbi/openai/models/evals/run_retrieve_params.rbi +++ b/rbi/openai/models/evals/run_retrieve_params.rbi @@ -7,17 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :eval_id sig do - params(eval_id: String, request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash)) - .returns(T.attached_class) + params( + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(eval_id:, request_options: {}) end - def self.new(eval_id:, request_options: {}); end - sig { override.returns({eval_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { eval_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 70df1bc9..d1770d9c 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Evals class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation run. sig { returns(String) } attr_accessor :id @@ -16,8 +18,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions ) ) @@ -25,10 +27,10 @@ module OpenAI attr_accessor :data_source # An object representing an error response from the Eval API. - sig { returns(OpenAI::Models::Evals::EvalAPIError) } + sig { returns(OpenAI::Evals::EvalAPIError) } attr_reader :error - sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + sig { params(error: OpenAI::Evals::EvalAPIError::OrHash).void } attr_writer :error # The identifier of the associated evaluation. @@ -57,11 +59,21 @@ module OpenAI attr_accessor :object # Usage statistics for each model during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage]) } + sig do + returns( + T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage] + ) + end attr_accessor :per_model_usage # Results per testing criteria applied during the evaluation run. - sig { returns(T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult + ] + ) + end attr_accessor :per_testing_criteria_results # The URL to the rendered evaluation run report on the UI dashboard. @@ -69,14 +81,16 @@ module OpenAI attr_accessor :report_url # Counters summarizing the outcomes of the evaluation run. - sig { returns(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts) } + sig do + returns(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts) + end attr_reader :result_counts sig do params( - result_counts: T.any(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, OpenAI::Internal::AnyHash) - ) - .void + result_counts: + OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts::OrHash + ).void end attr_writer :result_counts @@ -89,25 +103,31 @@ module OpenAI params( id: String, created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions - ), - error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::OrHash + ), + error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), model: String, name: String, - per_model_usage: T::Array[T.any(OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage, OpenAI::Internal::AnyHash)], - per_testing_criteria_results: T::Array[T.any(OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult, OpenAI::Internal::AnyHash)], + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage::OrHash + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult::OrHash + ], report_url: String, - result_counts: T.any(OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, OpenAI::Internal::AnyHash), + result_counts: + OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts::OrHash, status: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation run. @@ -143,39 +163,61 @@ module OpenAI status:, # The type of the object. Always "eval.run". object: :"eval.run" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, + override.returns( + { + id: String, + created_at: Integer, + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions ), - error: OpenAI::Models::Evals::EvalAPIError, - eval_id: String, - metadata: T.nilable(T::Hash[Symbol, String]), - model: String, - name: String, - object: Symbol, - per_model_usage: T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage], - per_testing_criteria_results: T::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult], - report_url: String, - result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, - status: String - } - ) + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: T.nilable(T::Hash[Symbol, String]), + model: String, + name: String, + object: Symbol, + per_model_usage: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage + ], + per_testing_criteria_results: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult + ], + report_url: String, + result_counts: + OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String + } + ) + end + def to_hash end - def to_hash; end # Information about the run's data source. module DataSource extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + ) + end + class Completions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A EvalResponsesSource object describing a run data source configuration. sig do returns( @@ -206,13 +248,12 @@ module OpenAI sig do params( - input_messages: T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - .void + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ) + ).void end attr_writer :input_messages @@ -223,42 +264,42 @@ module OpenAI sig { params(model: String).void } attr_writer :model - sig { returns(T.nilable(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams)) } + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams + ) + ) + end attr_reader :sampling_params sig do params( - sampling_params: T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - ) - .void + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams::OrHash + ).void end attr_writer :sampling_params # A ResponsesRunDataSource object describing a model sampling configuration. sig do params( - source: T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses - ), - input_messages: T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - ), + source: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference::OrHash + ), model: String, - sampling_params: T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ), + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A EvalResponsesSource object describing a run data source configuration. @@ -269,37 +310,56 @@ module OpenAI sampling_params: nil, # The type of run data source. Always `completions`. type: :completions - ); end + ) + end + sig do - override - .returns( - { - source: T.any( + override.returns( + { + source: + T.any( OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses ), - type: Symbol, - input_messages: T.any( + type: Symbol, + input_messages: + T.any( OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference ), - model: String, - sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams - } - ) + model: String, + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams + } + ) + end + def to_hash end - def to_hash; end # A EvalResponsesSource object describing a run data source configuration. module Source extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses + ) + end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the jsonl file. sig do returns( - T::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content] + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content + ] ) end attr_accessor :content @@ -310,34 +370,41 @@ module OpenAI sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - ], + content: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The content of the jsonl file. content:, # The type of jsonl source. Always `file_content`. type: :file_content - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content], - type: Symbol - } - ) + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -350,20 +417,29 @@ module OpenAI sig do params( item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, - T.anything] + sample: T::Hash[Symbol, T.anything] ).returns(T.attached_class) end - def self.new(item:, sample: nil); end + def self.new(item:, sample: nil) + end sig do - override.returns({item: T::Hash[Symbol, T.anything], sample: T::Hash[Symbol, T.anything]}) + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash end - def to_hash; end end end class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier of the file. sig { returns(String) } attr_accessor :id @@ -372,18 +448,26 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(id: String, type: Symbol).returns(T.attached_class) } + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end def self.new( # The identifier of the file. id:, # The type of jsonl source. Always `file_id`. type: :file_id - ); end - sig { override.returns({id: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end end class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of run data source. Always `responses`. sig { returns(Symbol) } attr_accessor :type @@ -425,7 +509,9 @@ module OpenAI # Optional reasoning effort parameter. This is a query parameter used to select # responses. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol)) } + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end attr_accessor :reasoning_effort # Sampling temperature. This is a query parameter used to select responses. @@ -450,13 +536,13 @@ module OpenAI instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), temperature: T.nilable(Float), top_p: T.nilable(Float), users: T.nilable(T::Array[String]), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Whether to allow parallel tool calls. This is a query parameter used to select @@ -491,42 +577,58 @@ module OpenAI users: nil, # The type of run data source. Always `responses`. type: :responses - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) + override.returns( + { + type: Symbol, + allow_parallel_tool_calls: T.nilable(T::Boolean), + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash end - def to_hash; end end sig do - override - .returns( - [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Variants + ] + ) + end + def self.variants end - def self.variants; end end module InputMessages extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. sig do @@ -547,16 +649,15 @@ module OpenAI sig do params( - template: T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash + ) + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of chat messages forming the prompt or context. May include variable @@ -564,22 +665,25 @@ module OpenAI template:, # The type of input messages. Always `template`. type: :template - ); end + ) + end + sig do - override - .returns( - { - template: T::Array[ + override.returns( + { + template: + T::Array[ T.any( OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem ) ], - type: Symbol - } - ) + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -589,7 +693,20 @@ module OpenAI module Template extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The content of the message. sig { returns(String) } attr_accessor :content @@ -598,24 +715,36 @@ module OpenAI sig { returns(String) } attr_accessor :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end def self.new( # The content of the message. content:, # The role of the message (e.g. "system", "assistant", "user"). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # Text inputs to the model - can contain template strings. sig do returns( T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ) ) @@ -643,9 +772,9 @@ module OpenAI sig do params( - type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .void + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void end attr_writer :type @@ -656,16 +785,17 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text inputs to the model - can contain template strings. @@ -675,28 +805,47 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, - OpenAI::Models::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText ), - role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) + role: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # Text inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(T.self_type, OpenAI::Internal::AnyHash) + end + # The text output from the model. sig { returns(String) } attr_accessor :text @@ -706,24 +855,33 @@ module OpenAI attr_accessor :type # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The text output from the model. text:, # The type of the output text. Always `output_text`. type: :output_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -733,7 +891,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role) + T.all( + Symbol, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -759,14 +920,14 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end # The type of the message input. Always `message`. @@ -775,7 +936,10 @@ module OpenAI TaggedSymbol = T.type_alias do - T.all(Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type) + T.all( + Symbol, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type + ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -786,28 +950,33 @@ module OpenAI ) sig do - override - .returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants end - def self.variants; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -816,27 +985,41 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - sig { params(item_reference: String, type: Symbol).returns(T.attached_class) } + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A reference to a variable in the "item" namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference - ); end - sig { override.returns({item_reference: String, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Variants + ] + ) + end + def self.variants end - def self.variants; end end class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } attr_reader :max_completion_tokens @@ -866,8 +1049,12 @@ module OpenAI attr_writer :top_p sig do - params(max_completion_tokens: Integer, seed: Integer, temperature: Float, top_p: Float) - .returns(T.attached_class) + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. @@ -878,7 +1065,9 @@ module OpenAI temperature: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil - ); end + ) + end + sig do override.returns( { @@ -889,20 +1078,26 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] - ) + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Variants + ] + ) + end + def self.variants end - def self.variants; end end class PerModelUsage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens retrieved from cache. sig { returns(Integer) } attr_accessor :cached_tokens @@ -935,8 +1130,7 @@ module OpenAI model_name: String, prompt_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens retrieved from cache. @@ -951,24 +1145,29 @@ module OpenAI prompt_tokens:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - cached_tokens: Integer, - completion_tokens: Integer, - invocation_count: Integer, - model_name: String, - prompt_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of tests failed for this criteria. sig { returns(Integer) } attr_accessor :failed @@ -981,7 +1180,13 @@ module OpenAI sig { returns(String) } attr_accessor :testing_criteria - sig { params(failed: Integer, passed: Integer, testing_criteria: String).returns(T.attached_class) } + sig do + params( + failed: Integer, + passed: Integer, + testing_criteria: String + ).returns(T.attached_class) + end def self.new( # Number of tests failed for this criteria. failed:, @@ -989,12 +1194,22 @@ module OpenAI passed:, # A description of the testing criteria. testing_criteria: - ); end - sig { override.returns({failed: Integer, passed: Integer, testing_criteria: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { failed: Integer, passed: Integer, testing_criteria: String } + ) + end + def to_hash + end end class ResultCounts < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of output items that resulted in an error. sig { returns(Integer) } attr_accessor :errored @@ -1029,9 +1244,21 @@ module OpenAI passed:, # Total number of executed output items. total: - ); end - sig { override.returns({errored: Integer, failed: Integer, passed: Integer, total: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/runs/output_item_list_params.rbi b/rbi/openai/models/evals/runs/output_item_list_params.rbi index e6931f3d..52a091de 100644 --- a/rbi/openai/models/evals/runs/output_item_list_params.rbi +++ b/rbi/openai/models/evals/runs/output_item_list_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :eval_id @@ -27,18 +30,39 @@ module OpenAI # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. - sig { returns(T.nilable(OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Evals::Runs::OutputItemListParams::Order::OrSymbol + ) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol).void } + sig do + params( + order: OpenAI::Evals::Runs::OutputItemListParams::Order::OrSymbol + ).void + end attr_writer :order # Filter output items by status. Use `failed` to filter by failed output items or # `pass` to filter by passed output items. - sig { returns(T.nilable(OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Evals::Runs::OutputItemListParams::Status::OrSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol).void } + sig do + params( + status: + OpenAI::Evals::Runs::OutputItemListParams::Status::OrSymbol + ).void + end attr_writer :status sig do @@ -46,11 +70,11 @@ module OpenAI eval_id: String, after: String, limit: Integer, - order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, - status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::Evals::Runs::OutputItemListParams::Order::OrSymbol, + status: + OpenAI::Evals::Runs::OutputItemListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( eval_id:, @@ -65,35 +89,57 @@ module OpenAI # `pass` to filter by passed output items. status: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - eval_id: String, - after: String, - limit: Integer, - order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, - status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + eval_id: String, + after: String, + limit: Integer, + order: + OpenAI::Evals::Runs::OutputItemListParams::Order::OrSymbol, + status: + OpenAI::Evals::Runs::OutputItemListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::Evals::Runs::OutputItemListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Evals::Runs::OutputItemListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Evals::Runs::OutputItemListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end # Filter output items by status. Use `failed` to filter by failed output items or @@ -101,14 +147,32 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - FAIL = T.let(:fail, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) - PASS = T.let(:pass, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) + FAIL = + T.let( + :fail, + OpenAI::Evals::Runs::OutputItemListParams::Status::TaggedSymbol + ) + PASS = + T.let( + :pass, + OpenAI::Evals::Runs::OutputItemListParams::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Evals::Runs::OutputItemListParams::Status::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/evals/runs/output_item_list_response.rbi b/rbi/openai/models/evals/runs/output_item_list_response.rbi index 027b0dd3..6f5cd696 100644 --- a/rbi/openai/models/evals/runs/output_item_list_response.rbi +++ b/rbi/openai/models/evals/runs/output_item_list_response.rbi @@ -5,6 +5,9 @@ module OpenAI module Evals module Runs class OutputItemListResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation run output item. sig { returns(String) } attr_accessor :id @@ -38,14 +41,16 @@ module OpenAI attr_accessor :run_id # A sample containing the input and output of the evaluation run. - sig { returns(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample) } + sig do + returns(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample) + end attr_reader :sample sig do params( - sample: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, OpenAI::Internal::AnyHash) - ) - .void + sample: + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::OrHash + ).void end attr_writer :sample @@ -63,11 +68,11 @@ module OpenAI eval_id: String, results: T::Array[T::Hash[Symbol, T.anything]], run_id: String, - sample: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, OpenAI::Internal::AnyHash), + sample: + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::OrHash, status: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation run output item. @@ -90,32 +95,38 @@ module OpenAI status:, # The type of the object. Always "eval.run.output_item". object: :"eval.run.output_item" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - datasource_item: T::Hash[Symbol, T.anything], - datasource_item_id: Integer, - eval_id: String, - object: Symbol, - results: T::Array[T::Hash[Symbol, T.anything]], - run_id: String, - sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, - status: String - } - ) + override.returns( + { + id: String, + created_at: Integer, + datasource_item: T::Hash[Symbol, T.anything], + datasource_item_id: Integer, + eval_id: String, + object: Symbol, + results: T::Array[T::Hash[Symbol, T.anything]], + run_id: String, + sample: + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String + } + ) + end + def to_hash end - def to_hash; end class Sample < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An object representing an error response from the Eval API. - sig { returns(OpenAI::Models::Evals::EvalAPIError) } + sig { returns(OpenAI::Evals::EvalAPIError) } attr_reader :error - sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + sig { params(error: OpenAI::Evals::EvalAPIError::OrHash).void } attr_writer :error # The reason why the sample generation was finished. @@ -123,7 +134,13 @@ module OpenAI attr_accessor :finish_reason # An array of input messages. - sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input + ] + ) + end attr_accessor :input # The maximum number of tokens allowed for completion. @@ -135,7 +152,13 @@ module OpenAI attr_accessor :model # An array of output messages. - sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output + ] + ) + end attr_accessor :output # The seed used for generating the sample. @@ -151,32 +174,42 @@ module OpenAI attr_accessor :top_p # Token usage details for the sample. - sig { returns(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage) } + sig do + returns( + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + ) + end attr_reader :usage sig do params( - usage: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage, OpenAI::Internal::AnyHash) - ) - .void + usage: + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage::OrHash + ).void end attr_writer :usage # A sample containing the input and output of the evaluation run. sig do params( - error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + error: OpenAI::Evals::EvalAPIError::OrHash, finish_reason: String, - input: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input, OpenAI::Internal::AnyHash)], + input: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input::OrHash + ], max_completion_tokens: Integer, model: String, - output: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output, OpenAI::Internal::AnyHash)], + output: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output::OrHash + ], seed: Integer, temperature: Float, top_p: Float, - usage: T.any(OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + usage: + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage::OrHash + ).returns(T.attached_class) end def self.new( # An object representing an error response from the Eval API. @@ -199,27 +232,39 @@ module OpenAI top_p:, # Token usage details for the sample. usage: - ); end + ) + end + sig do - override - .returns( - { - error: OpenAI::Models::Evals::EvalAPIError, - finish_reason: String, - input: T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], - max_completion_tokens: Integer, - model: String, - output: T::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output], - seed: Integer, - temperature: Float, - top_p: Float, - usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage - } - ) + override.returns( + { + error: OpenAI::Evals::EvalAPIError, + finish_reason: String, + input: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input + ], + max_completion_tokens: Integer, + model: String, + output: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output + ], + seed: Integer, + temperature: Float, + top_p: Float, + usage: + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the message. sig { returns(String) } attr_accessor :content @@ -229,18 +274,26 @@ module OpenAI attr_accessor :role # An input message. - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns(T.attached_class) + end def self.new( # The content of the message. content:, # The role of the message sender (e.g., system, user, developer). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class Output < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the message. sig { returns(T.nilable(String)) } attr_reader :content @@ -255,18 +308,26 @@ module OpenAI sig { params(role: String).void } attr_writer :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns(T.attached_class) + end def self.new( # The content of the message. content: nil, # The role of the message (e.g. "system", "assistant", "user"). role: nil - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens retrieved from cache. sig { returns(Integer) } attr_accessor :cached_tokens @@ -290,8 +351,7 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens retrieved from cache. @@ -302,19 +362,21 @@ module OpenAI prompt_tokens:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - cached_tokens: Integer, - completion_tokens: Integer, - prompt_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi b/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi index 6a3eb80e..77adecd0 100644 --- a/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi +++ b/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :eval_id @@ -18,14 +21,23 @@ module OpenAI params( eval_id: String, run_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(eval_id:, run_id:, request_options: {}) end - def self.new(eval_id:, run_id:, request_options: {}); end - sig { override.returns({eval_id: String, run_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { + eval_id: String, + run_id: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi b/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi index 69bee6f1..2e7053a5 100644 --- a/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi +++ b/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi @@ -5,6 +5,9 @@ module OpenAI module Evals module Runs class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for the evaluation run output item. sig { returns(String) } attr_accessor :id @@ -38,14 +41,18 @@ module OpenAI attr_accessor :run_id # A sample containing the input and output of the evaluation run. - sig { returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample) } + sig do + returns( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample + ) + end attr_reader :sample sig do params( - sample: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, OpenAI::Internal::AnyHash) - ) - .void + sample: + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::OrHash + ).void end attr_writer :sample @@ -63,11 +70,11 @@ module OpenAI eval_id: String, results: T::Array[T::Hash[Symbol, T.anything]], run_id: String, - sample: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, OpenAI::Internal::AnyHash), + sample: + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::OrHash, status: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for the evaluation run output item. @@ -90,32 +97,38 @@ module OpenAI status:, # The type of the object. Always "eval.run.output_item". object: :"eval.run.output_item" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - datasource_item: T::Hash[Symbol, T.anything], - datasource_item_id: Integer, - eval_id: String, - object: Symbol, - results: T::Array[T::Hash[Symbol, T.anything]], - run_id: String, - sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, - status: String - } - ) + override.returns( + { + id: String, + created_at: Integer, + datasource_item: T::Hash[Symbol, T.anything], + datasource_item_id: Integer, + eval_id: String, + object: Symbol, + results: T::Array[T::Hash[Symbol, T.anything]], + run_id: String, + sample: + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String + } + ) + end + def to_hash end - def to_hash; end class Sample < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An object representing an error response from the Eval API. - sig { returns(OpenAI::Models::Evals::EvalAPIError) } + sig { returns(OpenAI::Evals::EvalAPIError) } attr_reader :error - sig { params(error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash)).void } + sig { params(error: OpenAI::Evals::EvalAPIError::OrHash).void } attr_writer :error # The reason why the sample generation was finished. @@ -123,7 +136,13 @@ module OpenAI attr_accessor :finish_reason # An array of input messages. - sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input + ] + ) + end attr_accessor :input # The maximum number of tokens allowed for completion. @@ -135,7 +154,13 @@ module OpenAI attr_accessor :model # An array of output messages. - sig { returns(T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output]) } + sig do + returns( + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output + ] + ) + end attr_accessor :output # The seed used for generating the sample. @@ -151,32 +176,42 @@ module OpenAI attr_accessor :top_p # Token usage details for the sample. - sig { returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage) } + sig do + returns( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + ) + end attr_reader :usage sig do params( - usage: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage, OpenAI::Internal::AnyHash) - ) - .void + usage: + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage::OrHash + ).void end attr_writer :usage # A sample containing the input and output of the evaluation run. sig do params( - error: T.any(OpenAI::Models::Evals::EvalAPIError, OpenAI::Internal::AnyHash), + error: OpenAI::Evals::EvalAPIError::OrHash, finish_reason: String, - input: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input, OpenAI::Internal::AnyHash)], + input: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input::OrHash + ], max_completion_tokens: Integer, model: String, - output: T::Array[T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output, OpenAI::Internal::AnyHash)], + output: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output::OrHash + ], seed: Integer, temperature: Float, top_p: Float, - usage: T.any(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + usage: + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage::OrHash + ).returns(T.attached_class) end def self.new( # An object representing an error response from the Eval API. @@ -199,27 +234,39 @@ module OpenAI top_p:, # Token usage details for the sample. usage: - ); end + ) + end + sig do - override - .returns( - { - error: OpenAI::Models::Evals::EvalAPIError, - finish_reason: String, - input: T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], - max_completion_tokens: Integer, - model: String, - output: T::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output], - seed: Integer, - temperature: Float, - top_p: Float, - usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage - } - ) + override.returns( + { + error: OpenAI::Evals::EvalAPIError, + finish_reason: String, + input: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input + ], + max_completion_tokens: Integer, + model: String, + output: + T::Array[ + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output + ], + seed: Integer, + temperature: Float, + top_p: Float, + usage: + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + } + ) + end + def to_hash end - def to_hash; end class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the message. sig { returns(String) } attr_accessor :content @@ -229,18 +276,26 @@ module OpenAI attr_accessor :role # An input message. - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns(T.attached_class) + end def self.new( # The content of the message. content:, # The role of the message sender (e.g., system, user, developer). role: - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class Output < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The content of the message. sig { returns(T.nilable(String)) } attr_reader :content @@ -255,18 +310,26 @@ module OpenAI sig { params(role: String).void } attr_writer :role - sig { params(content: String, role: String).returns(T.attached_class) } + sig do + params(content: String, role: String).returns(T.attached_class) + end def self.new( # The content of the message. content: nil, # The role of the message (e.g. "system", "assistant", "user"). role: nil - ); end - sig { override.returns({content: String, role: String}) } - def to_hash; end + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end end class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens retrieved from cache. sig { returns(Integer) } attr_accessor :cached_tokens @@ -290,8 +353,7 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens retrieved from cache. @@ -302,19 +364,21 @@ module OpenAI prompt_tokens:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - cached_tokens: Integer, - completion_tokens: Integer, - prompt_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/file_chunking_strategy.rbi b/rbi/openai/models/file_chunking_strategy.rbi index b05dd510..74e488c7 100644 --- a/rbi/openai/models/file_chunking_strategy.rbi +++ b/rbi/openai/models/file_chunking_strategy.rbi @@ -6,13 +6,17 @@ module OpenAI module FileChunkingStrategy extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + Variants = + T.type_alias do + T.any( + OpenAI::StaticFileChunkingStrategyObject, + OpenAI::OtherFileChunkingStrategyObject ) + end + + sig { override.returns(T::Array[OpenAI::FileChunkingStrategy::Variants]) } + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/file_chunking_strategy_param.rbi b/rbi/openai/models/file_chunking_strategy_param.rbi index ca7b56c4..d8588bdf 100644 --- a/rbi/openai/models/file_chunking_strategy_param.rbi +++ b/rbi/openai/models/file_chunking_strategy_param.rbi @@ -7,13 +7,19 @@ module OpenAI module FileChunkingStrategyParam extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + Variants = + T.type_alias do + T.any( + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ) + end + + sig do + override.returns(T::Array[OpenAI::FileChunkingStrategyParam::Variants]) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/file_content_params.rbi b/rbi/openai/models/file_content_params.rbi index 97fa8439..4790429d 100644 --- a/rbi/openai/models/file_content_params.rbi +++ b/rbi/openai/models/file_content_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/file_create_params.rbi b/rbi/openai/models/file_create_params.rbi index 675c3c74..db0074ca 100644 --- a/rbi/openai/models/file_create_params.rbi +++ b/rbi/openai/models/file_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The File object (not file name) to be uploaded. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :file @@ -14,16 +16,15 @@ module OpenAI # Assistants API - `batch`: Used in the Batch API - `fine-tune`: Used for # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets - sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } + sig { returns(OpenAI::FilePurpose::OrSymbol) } attr_accessor :purpose sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + purpose: OpenAI::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The File object (not file name) to be uploaded. @@ -34,18 +35,20 @@ module OpenAI # Flexible file type for any purpose - `evals`: Used for eval data sets purpose:, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + purpose: OpenAI::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/file_delete_params.rbi b/rbi/openai/models/file_delete_params.rbi index 2d359dc0..32e8c812 100644 --- a/rbi/openai/models/file_delete_params.rbi +++ b/rbi/openai/models/file_delete_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/file_deleted.rbi b/rbi/openai/models/file_deleted.rbi index 60dd5163..afff0670 100644 --- a/rbi/openai/models/file_deleted.rbi +++ b/rbi/openai/models/file_deleted.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class FileDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -12,11 +14,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :file); end + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :file) + end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/file_list_params.rbi b/rbi/openai/models/file_list_params.rbi index b33289b5..03836c50 100644 --- a/rbi/openai/models/file_list_params.rbi +++ b/rbi/openai/models/file_list_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -26,10 +28,10 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::FileListParams::Order::OrSymbol)) } + sig { returns(T.nilable(OpenAI::FileListParams::Order::OrSymbol)) } attr_reader :order - sig { params(order: OpenAI::Models::FileListParams::Order::OrSymbol).void } + sig { params(order: OpenAI::FileListParams::Order::OrSymbol).void } attr_writer :order # Only return files with the given purpose. @@ -43,11 +45,10 @@ module OpenAI params( after: String, limit: Integer, - order: OpenAI::Models::FileListParams::Order::OrSymbol, + order: OpenAI::FileListParams::Order::OrSymbol, purpose: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -64,34 +65,42 @@ module OpenAI # Only return files with the given purpose. purpose: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - order: OpenAI::Models::FileListParams::Order::OrSymbol, - purpose: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::FileListParams::Order::OrSymbol, + purpose: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::FileListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) + ASC = T.let(:asc, OpenAI::FileListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::FileListParams::Order::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::FileListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::FileListParams::Order::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/file_object.rbi b/rbi/openai/models/file_object.rbi index af7989d6..829cee09 100644 --- a/rbi/openai/models/file_object.rbi +++ b/rbi/openai/models/file_object.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class FileObject < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The file identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -26,12 +28,12 @@ module OpenAI # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. - sig { returns(OpenAI::Models::FileObject::Purpose::TaggedSymbol) } + sig { returns(OpenAI::FileObject::Purpose::TaggedSymbol) } attr_accessor :purpose # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. - sig { returns(OpenAI::Models::FileObject::Status::TaggedSymbol) } + sig { returns(OpenAI::FileObject::Status::TaggedSymbol) } attr_accessor :status # The Unix timestamp (in seconds) for when the file will expire. @@ -56,13 +58,12 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::Models::FileObject::Purpose::OrSymbol, - status: OpenAI::Models::FileObject::Status::OrSymbol, + purpose: OpenAI::FileObject::Purpose::OrSymbol, + status: OpenAI::FileObject::Status::OrSymbol, expires_at: Integer, status_details: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The file identifier, which can be referenced in the API endpoints. @@ -87,24 +88,26 @@ module OpenAI status_details: nil, # The object type, which is always `file`. object: :file - ); end + ) + end + sig do - override - .returns( - { - id: String, - bytes: Integer, - created_at: Integer, - filename: String, - object: Symbol, - purpose: OpenAI::Models::FileObject::Purpose::TaggedSymbol, - status: OpenAI::Models::FileObject::Status::TaggedSymbol, - expires_at: Integer, - status_details: String - } - ) + override.returns( + { + id: String, + bytes: Integer, + created_at: Integer, + filename: String, + object: Symbol, + purpose: OpenAI::FileObject::Purpose::TaggedSymbol, + status: OpenAI::FileObject::Status::TaggedSymbol, + expires_at: Integer, + status_details: String + } + ) + end + def to_hash end - def to_hash; end # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` @@ -112,19 +115,28 @@ module OpenAI module Purpose extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::FileObject::Purpose) } OrSymbol = T.type_alias { T.any(Symbol, String) } - ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - BATCH = T.let(:batch, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - BATCH_OUTPUT = T.let(:batch_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FileObject::Purpose::TaggedSymbol) - FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::Models::FileObject::Purpose::TaggedSymbol) - VISION = T.let(:vision, OpenAI::Models::FileObject::Purpose::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::FileObject::Purpose::TaggedSymbol]) } - def self.values; end + ASSISTANTS = + T.let(:assistants, OpenAI::FileObject::Purpose::TaggedSymbol) + ASSISTANTS_OUTPUT = + T.let(:assistants_output, OpenAI::FileObject::Purpose::TaggedSymbol) + BATCH = T.let(:batch, OpenAI::FileObject::Purpose::TaggedSymbol) + BATCH_OUTPUT = + T.let(:batch_output, OpenAI::FileObject::Purpose::TaggedSymbol) + FINE_TUNE = + T.let(:"fine-tune", OpenAI::FileObject::Purpose::TaggedSymbol) + FINE_TUNE_RESULTS = + T.let(:"fine-tune-results", OpenAI::FileObject::Purpose::TaggedSymbol) + VISION = T.let(:vision, OpenAI::FileObject::Purpose::TaggedSymbol) + + sig do + override.returns(T::Array[OpenAI::FileObject::Purpose::TaggedSymbol]) + end + def self.values + end end # Deprecated. The current status of the file, which can be either `uploaded`, @@ -132,15 +144,19 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::FileObject::Status) } OrSymbol = T.type_alias { T.any(Symbol, String) } - UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) - PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) - ERROR = T.let(:error, OpenAI::Models::FileObject::Status::TaggedSymbol) + UPLOADED = T.let(:uploaded, OpenAI::FileObject::Status::TaggedSymbol) + PROCESSED = T.let(:processed, OpenAI::FileObject::Status::TaggedSymbol) + ERROR = T.let(:error, OpenAI::FileObject::Status::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::FileObject::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns(T::Array[OpenAI::FileObject::Status::TaggedSymbol]) + end + def self.values + end end end end diff --git a/rbi/openai/models/file_purpose.rbi b/rbi/openai/models/file_purpose.rbi index 7d3a2d58..6b9af2cd 100644 --- a/rbi/openai/models/file_purpose.rbi +++ b/rbi/openai/models/file_purpose.rbi @@ -9,18 +9,19 @@ module OpenAI module FilePurpose extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::FilePurpose) } OrSymbol = T.type_alias { T.any(Symbol, String) } - ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::TaggedSymbol) - BATCH = T.let(:batch, OpenAI::Models::FilePurpose::TaggedSymbol) - FINE_TUNE = T.let(:"fine-tune", OpenAI::Models::FilePurpose::TaggedSymbol) - VISION = T.let(:vision, OpenAI::Models::FilePurpose::TaggedSymbol) - USER_DATA = T.let(:user_data, OpenAI::Models::FilePurpose::TaggedSymbol) - EVALS = T.let(:evals, OpenAI::Models::FilePurpose::TaggedSymbol) + ASSISTANTS = T.let(:assistants, OpenAI::FilePurpose::TaggedSymbol) + BATCH = T.let(:batch, OpenAI::FilePurpose::TaggedSymbol) + FINE_TUNE = T.let(:"fine-tune", OpenAI::FilePurpose::TaggedSymbol) + VISION = T.let(:vision, OpenAI::FilePurpose::TaggedSymbol) + USER_DATA = T.let(:user_data, OpenAI::FilePurpose::TaggedSymbol) + EVALS = T.let(:evals, OpenAI::FilePurpose::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::FilePurpose::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::FilePurpose::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/file_retrieve_params.rbi b/rbi/openai/models/file_retrieve_params.rbi index 90bf8337..ed8a974f 100644 --- a/rbi/openai/models/file_retrieve_params.rbi +++ b/rbi/openai/models/file_retrieve_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi index b8e4f73d..bb260bea 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The project identifiers to grant access to. sig { returns(T::Array[String]) } attr_accessor :project_ids @@ -15,17 +18,26 @@ module OpenAI sig do params( project_ids: T::Array[String], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The project identifiers to grant access to. project_ids:, request_options: {} - ); end - sig { override.returns({project_ids: T::Array[String], request_options: OpenAI::RequestOptions}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + project_ids: T::Array[String], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi index 6dadadac..c17f2d89 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi @@ -5,6 +5,9 @@ module OpenAI module FineTuning module Checkpoints class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The permission identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -40,9 +43,21 @@ module OpenAI project_id:, # The object type, which is always "checkpoint.permission". object: :"checkpoint.permission" - ); end - sig { override.returns({id: String, created_at: Integer, object: Symbol, project_id: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + object: Symbol, + project_id: String + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi index b762881f..0be64531 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi @@ -8,22 +8,31 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :fine_tuned_model_checkpoint sig do params( fine_tuned_model_checkpoint: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(fine_tuned_model_checkpoint:, request_options: {}) end - def self.new(fine_tuned_model_checkpoint:, request_options: {}); end sig do - override.returns({fine_tuned_model_checkpoint: String, request_options: OpenAI::RequestOptions}) + override.returns( + { + fine_tuned_model_checkpoint: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi index 6bc5d73b..f84b6fea 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi @@ -5,6 +5,9 @@ module OpenAI module FineTuning module Checkpoints class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the fine-tuned model checkpoint permission that was deleted. sig { returns(String) } attr_accessor :id @@ -17,7 +20,11 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the fine-tuned model checkpoint permission that was deleted. id:, @@ -25,9 +32,16 @@ module OpenAI deleted:, # The object type, which is always "checkpoint.permission". object: :"checkpoint.permission" - ); end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { id: String, deleted: T::Boolean, object: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi index 04da40a3..84601eba 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last permission ID from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -23,10 +26,21 @@ module OpenAI attr_writer :limit # The order in which to retrieve permissions. - sig { returns(T.nilable(OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol + ) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol).void } + sig do + params( + order: + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol + ).void + end attr_writer :order # The ID of the project to get permissions for. @@ -40,11 +54,11 @@ module OpenAI params( after: String, limit: Integer, - order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, + order: + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, project_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last permission ID from the previous pagination request. @@ -56,39 +70,57 @@ module OpenAI # The ID of the project to get permissions for. project_id: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, - project_id: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + order: + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, + project_id: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The order in which to retrieve permissions. module Order extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order) } + T.type_alias do + T.all( + Symbol, + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } ASCENDING = - T.let(:ascending, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol) + T.let( + :ascending, + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol + ) DESCENDING = - T.let(:descending, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol) + T.let( + :descending, + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi index 95aa5332..3c65d481 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi @@ -5,7 +5,16 @@ module OpenAI module FineTuning module Checkpoints class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel - sig { returns(T::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data]) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T::Array[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data + ] + ) + end attr_accessor :data sig { returns(T::Boolean) } @@ -22,36 +31,46 @@ module OpenAI sig do params( - data: T::Array[ - T.any( - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data, - OpenAI::Internal::AnyHash - ) - ], + data: + T::Array[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data::OrHash + ], has_more: T::Boolean, first_id: T.nilable(String), last_id: T.nilable(String), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new( + data:, + has_more:, + first_id: nil, + last_id: nil, + object: :list + ) end - def self.new(data:, has_more:, first_id: nil, last_id: nil, object: :list); end sig do - override - .returns( - { - data: T::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], - has_more: T::Boolean, - object: Symbol, - first_id: T.nilable(String), - last_id: T.nilable(String) - } - ) + override.returns( + { + data: + T::Array[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data + ], + has_more: T::Boolean, + object: Symbol, + first_id: T.nilable(String), + last_id: T.nilable(String) + } + ) + end + def to_hash end - def to_hash; end class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The permission identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -87,9 +106,21 @@ module OpenAI project_id:, # The object type, which is always "checkpoint.permission". object: :"checkpoint.permission" - ); end - sig { override.returns({id: String, created_at: Integer, object: Symbol, project_id: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + object: Symbol, + project_id: String + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index 3036f3a4..ccd100af 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -6,6 +6,8 @@ module OpenAI module FineTuning class FineTuningJob < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The object identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -16,14 +18,13 @@ module OpenAI # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error)) } + sig { returns(T.nilable(OpenAI::FineTuning::FineTuningJob::Error)) } attr_reader :error sig do params( - error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::AnyHash)) - ) - .void + error: T.nilable(OpenAI::FineTuning::FineTuningJob::Error::OrHash) + ).void end attr_writer :error @@ -39,14 +40,14 @@ module OpenAI # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. - sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters) } + sig { returns(OpenAI::FineTuning::FineTuningJob::Hyperparameters) } attr_reader :hyperparameters sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::AnyHash) - ) - .void + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::OrHash + ).void end attr_writer :hyperparameters @@ -74,7 +75,7 @@ module OpenAI # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. - sig { returns(OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } + sig { returns(OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol) } attr_accessor :status # The total number of billable tokens processed by this fine-tuning job. The value @@ -99,7 +100,13 @@ module OpenAI attr_accessor :estimated_finish # A list of integrations to enable for this fine-tuning job. - sig { returns(T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject] + ) + ) + end attr_accessor :integrations # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -112,10 +119,14 @@ module OpenAI attr_accessor :metadata # The method used for fine-tuning. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method)) } + sig { returns(T.nilable(OpenAI::FineTuning::FineTuningJob::Method)) } attr_reader :method_ - sig { params(method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::AnyHash)).void } + sig do + params( + method_: OpenAI::FineTuning::FineTuningJob::Method::OrHash + ).void + end attr_writer :method_ # The `fine_tuning.job` object represents a fine-tuning job that has been created @@ -124,27 +135,30 @@ module OpenAI params( id: String, created_at: Integer, - error: T.nilable(T.any(OpenAI::Models::FineTuning::FineTuningJob::Error, OpenAI::Internal::AnyHash)), + error: T.nilable(OpenAI::FineTuning::FineTuningJob::Error::OrHash), fine_tuned_model: T.nilable(String), finished_at: T.nilable(Integer), - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, OpenAI::Internal::AnyHash), + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::OrHash, model: String, organization_id: String, result_files: T::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status::OrSymbol, + status: OpenAI::FineTuning::FineTuningJob::Status::OrSymbol, trained_tokens: T.nilable(Integer), training_file: String, validation_file: T.nilable(String), estimated_finish: T.nilable(Integer), - integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject, OpenAI::Internal::AnyHash)] - ), + integrations: + T.nilable( + T::Array[ + OpenAI::FineTuning::FineTuningJobWandbIntegrationObject::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method, OpenAI::Internal::AnyHash), + method_: OpenAI::FineTuning::FineTuningJob::Method::OrHash, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The object identifier, which can be referenced in the API endpoints. @@ -202,36 +216,47 @@ module OpenAI method_: nil, # The object type, which is always "fine_tuning.job". object: :"fine_tuning.job" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - error: T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Error), - fine_tuned_model: T.nilable(String), - finished_at: T.nilable(Integer), - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, - model: String, - object: Symbol, - organization_id: String, - result_files: T::Array[String], - seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol, - trained_tokens: T.nilable(Integer), - training_file: String, - validation_file: T.nilable(String), - estimated_finish: T.nilable(Integer), - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]), - metadata: T.nilable(T::Hash[Symbol, String]), - method_: OpenAI::Models::FineTuning::FineTuningJob::Method - } - ) + override.returns( + { + id: String, + created_at: Integer, + error: T.nilable(OpenAI::FineTuning::FineTuningJob::Error), + fine_tuned_model: T.nilable(String), + finished_at: T.nilable(Integer), + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Hyperparameters, + model: String, + object: Symbol, + organization_id: String, + result_files: T::Array[String], + seed: Integer, + status: OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol, + trained_tokens: T.nilable(Integer), + training_file: String, + validation_file: T.nilable(String), + estimated_finish: T.nilable(Integer), + integrations: + T.nilable( + T::Array[ + OpenAI::FineTuning::FineTuningJobWandbIntegrationObject + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]), + method_: OpenAI::FineTuning::FineTuningJob::Method + } + ) + end + def to_hash end - def to_hash; end class Error < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A machine-readable error code. sig { returns(String) } attr_accessor :code @@ -247,7 +272,13 @@ module OpenAI # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. - sig { params(code: String, message: String, param: T.nilable(String)).returns(T.attached_class) } + sig do + params( + code: String, + message: String, + param: T.nilable(String) + ).returns(T.attached_class) + end def self.new( # A machine-readable error code. code:, @@ -256,12 +287,22 @@ module OpenAI # The parameter that was invalid, usually `training_file` or `validation_file`. # This field will be null if the failure was not parameter-specific. param: - ); end - sig { override.returns({code: String, message: String, param: T.nilable(String)}) } - def to_hash; end + ) + end + + sig do + override.returns( + { code: String, message: String, param: T.nilable(String) } + ) + end + def to_hash + end end class Hyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -293,8 +334,7 @@ module OpenAI batch_size: T.any(Symbol, Integer), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Number of examples in each batch. A larger batch size means that model @@ -306,26 +346,37 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. n_epochs: nil - ); end + ) + end + sig do - override - .returns( - { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) + override.returns( + { + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash end - def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -333,8 +384,17 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -342,8 +402,17 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end end end @@ -352,56 +421,110 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::FineTuning::FineTuningJob::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } VALIDATING_FILES = - T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - QUEUED = T.let(:queued, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - RUNNING = T.let(:running, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - SUCCEEDED = T.let(:succeeded, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol]) } - def self.values; end + T.let( + :validating_files, + OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol + ) + QUEUED = + T.let( + :queued, + OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol + ) + RUNNING = + T.let( + :running, + OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol + ) + SUCCEEDED = + T.let( + :succeeded, + OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol + ) + CANCELLED = + T.let( + :cancelled, + OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::FineTuning::FineTuningJob::Status::TaggedSymbol] + ) + end + def self.values + end end class Method < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Configuration for the DPO fine-tuning method. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo)) } + sig do + returns(T.nilable(OpenAI::FineTuning::FineTuningJob::Method::Dpo)) + end attr_reader :dpo - sig { params(dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::AnyHash)).void } + sig do + params( + dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo::OrHash + ).void + end attr_writer :dpo # Configuration for the supervised fine-tuning method. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised)) } + sig do + returns( + T.nilable(OpenAI::FineTuning::FineTuningJob::Method::Supervised) + ) + end attr_reader :supervised sig do params( - supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::AnyHash) - ) - .void + supervised: + OpenAI::FineTuning::FineTuningJob::Method::Supervised::OrHash + ).void end attr_writer :supervised # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol).void } + sig do + params( + type: OpenAI::FineTuning::FineTuningJob::Method::Type::OrSymbol + ).void + end attr_writer :type # The method used for fine-tuning. sig do params( - dpo: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, OpenAI::Internal::AnyHash), - supervised: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, OpenAI::Internal::AnyHash), - type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::OrSymbol - ) - .returns(T.attached_class) + dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo::OrHash, + supervised: + OpenAI::FineTuning::FineTuningJob::Method::Supervised::OrHash, + type: OpenAI::FineTuning::FineTuningJob::Method::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Configuration for the DPO fine-tuning method. @@ -410,50 +533,73 @@ module OpenAI supervised: nil, # The type of method. Is either `supervised` or `dpo`. type: nil - ); end + ) + end + sig do - override - .returns( - { - dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, - supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol - } - ) + override.returns( + { + dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo, + supervised: + OpenAI::FineTuning::FineTuningJob::Method::Supervised, + type: + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end class Dpo < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The hyperparameters used for the fine-tuning job. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + ) + ) + end attr_reader :hyperparameters sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Internal::AnyHash) - ) - .void + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::OrHash + ).void end attr_writer :hyperparameters # Configuration for the DPO fine-tuning method. sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::OrHash + ).returns(T.attached_class) end def self.new( # The hyperparameters used for the fine-tuning job. hyperparameters: nil - ); end + ) + end + sig do - override - .returns({hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters}) + override.returns( + { + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + } + ) + end + def to_hash end - def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -475,7 +621,9 @@ module OpenAI sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params(learning_rate_multiplier: T.any(Symbol, Float)).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle @@ -493,8 +641,7 @@ module OpenAI beta: T.any(Symbol, Float), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Number of examples in each batch. A larger batch size means that model @@ -509,27 +656,38 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. n_epochs: nil - ); end + ) + end + sig do - override - .returns( - { - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) + override.returns( + { + batch_size: T.any(Symbol, Integer), + beta: T.any(Symbol, Float), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash end - def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -537,8 +695,17 @@ module OpenAI module Beta extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta::Variants + ] + ) + end + def self.variants + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -546,8 +713,17 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -555,49 +731,71 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end end end end class Supervised < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The hyperparameters used for the fine-tuning job. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + ) + ) + end attr_reader :hyperparameters sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Internal::AnyHash - ) - ) - .void + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::OrHash + ).void end attr_writer :hyperparameters # Configuration for the supervised fine-tuning method. sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::OrHash + ).returns(T.attached_class) end def self.new( # The hyperparameters used for the fine-tuning job. hyperparameters: nil - ); end + ) + end + sig do - override - .returns({hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters}) + override.returns( + { + hyperparameters: + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + } + ) + end + def to_hash end - def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -611,7 +809,9 @@ module OpenAI sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params(learning_rate_multiplier: T.any(Symbol, Float)).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle @@ -628,8 +828,7 @@ module OpenAI batch_size: T.any(Symbol, Integer), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Number of examples in each batch. A larger batch size means that model @@ -641,26 +840,37 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. n_epochs: nil - ); end + ) + end + sig do - override - .returns( - { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) + override.returns( + { + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash end - def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -668,8 +878,17 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -677,8 +896,17 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end end end end @@ -687,14 +915,32 @@ module OpenAI module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) - DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) + SUPERVISED = + T.let( + :supervised, + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + ) + DPO = + T.let( + :dpo, + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi index f034e524..4cee296b 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -6,6 +6,8 @@ module OpenAI module FineTuning class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The object identifier. sig { returns(String) } attr_accessor :id @@ -15,7 +17,9 @@ module OpenAI attr_accessor :created_at # The log level of the event. - sig { returns(OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } + sig do + returns(OpenAI::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + end attr_accessor :level # The message of the event. @@ -34,10 +38,20 @@ module OpenAI attr_writer :data # The type of event. - sig { returns(T.nilable(OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJobEvent::Type::TaggedSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol).void } + sig do + params( + type: OpenAI::FineTuning::FineTuningJobEvent::Type::OrSymbol + ).void + end attr_writer :type # Fine-tuning job event object @@ -45,13 +59,12 @@ module OpenAI params( id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::OrSymbol, + level: OpenAI::FineTuning::FineTuningJobEvent::Level::OrSymbol, message: String, data: T.anything, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::OrSymbol, + type: OpenAI::FineTuning::FineTuningJobEvent::Type::OrSymbol, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The object identifier. @@ -68,50 +81,93 @@ module OpenAI type: nil, # The object type, which is always "fine_tuning.job.event". object: :"fine_tuning.job.event" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, - message: String, - object: Symbol, - data: T.anything, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol - } - ) + override.returns( + { + id: String, + created_at: Integer, + level: + OpenAI::FineTuning::FineTuningJobEvent::Level::TaggedSymbol, + message: String, + object: Symbol, + data: T.anything, + type: OpenAI::FineTuning::FineTuningJobEvent::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The log level of the event. module Level extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) - ERROR = T.let(:error, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) + INFO = + T.let( + :info, + OpenAI::FineTuning::FineTuningJobEvent::Level::TaggedSymbol + ) + WARN = + T.let( + :warn, + OpenAI::FineTuning::FineTuningJobEvent::Level::TaggedSymbol + ) + ERROR = + T.let( + :error, + OpenAI::FineTuning::FineTuningJobEvent::Level::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJobEvent::Level::TaggedSymbol + ] + ) + end + def self.values + end end # The type of event. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) - METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) + MESSAGE = + T.let( + :message, + OpenAI::FineTuning::FineTuningJobEvent::Type::TaggedSymbol + ) + METRICS = + T.let( + :metrics, + OpenAI::FineTuning::FineTuningJobEvent::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::FineTuningJobEvent::Type::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi index 9ae1ecd5..a2e8e306 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_integration.rbi @@ -5,7 +5,8 @@ module OpenAI FineTuningJobIntegration = FineTuning::FineTuningJobIntegration module FineTuning - FineTuningJobIntegration = OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject + FineTuningJobIntegration = + OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject end end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index fb541aa4..c5b719f8 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -6,6 +6,8 @@ module OpenAI module FineTuning class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the project that the new run will be created under. sig { returns(String) } attr_accessor :project @@ -35,8 +37,12 @@ module OpenAI # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. sig do - params(project: String, entity: T.nilable(String), name: T.nilable(String), tags: T::Array[String]) - .returns(T.attached_class) + params( + project: String, + entity: T.nilable(String), + name: T.nilable(String), + tags: T::Array[String] + ).returns(T.attached_class) end def self.new( # The name of the project that the new run will be created under. @@ -52,17 +58,21 @@ module OpenAI # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". tags: nil - ); end + ) + end + sig do - override - .returns({ - project: String, - entity: T.nilable(String), - name: T.nilable(String), - tags: T::Array[String] - }) + override.returns( + { + project: String, + entity: T.nilable(String), + name: T.nilable(String), + tags: T::Array[String] + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index c17319c1..934a2242 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -2,10 +2,13 @@ module OpenAI module Models - FineTuningJobWandbIntegrationObject = FineTuning::FineTuningJobWandbIntegrationObject + FineTuningJobWandbIntegrationObject = + FineTuning::FineTuningJobWandbIntegrationObject module FineTuning class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the integration being enabled for the fine-tuning job sig { returns(Symbol) } attr_accessor :type @@ -14,21 +17,21 @@ module OpenAI # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. - sig { returns(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration) } + sig { returns(OpenAI::FineTuning::FineTuningJobWandbIntegration) } attr_reader :wandb sig do - params(wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::AnyHash)) - .void + params( + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration::OrHash + ).void end attr_writer :wandb sig do params( - wandb: T.any(OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, OpenAI::Internal::AnyHash), + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The settings for your integration with Weights and Biases. This payload @@ -38,9 +41,19 @@ module OpenAI wandb:, # The type of the integration being enabled for the fine-tuning job type: :wandb - ); end - sig { override.returns({type: Symbol, wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + type: Symbol, + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/openai/models/fine_tuning/job_cancel_params.rbi index 9976752d..92f99a73 100644 --- a/rbi/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/openai/models/fine_tuning/job_cancel_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi index af70cdc3..eaa8cb24 100644 --- a/rbi/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/openai/models/fine_tuning/job_create_params.rbi @@ -7,9 +7,15 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - sig { returns(T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol)) } + sig do + returns( + T.any(String, OpenAI::FineTuning::JobCreateParams::Model::OrSymbol) + ) + end attr_accessor :model # The ID of an uploaded file that contains training data. @@ -34,19 +40,29 @@ module OpenAI # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters)) } + sig do + returns( + T.nilable(OpenAI::FineTuning::JobCreateParams::Hyperparameters) + ) + end attr_reader :hyperparameters sig do params( - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::AnyHash) - ) - .void + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash + ).void end attr_writer :hyperparameters # A list of integrations to enable for your fine-tuning job. - sig { returns(T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::FineTuning::JobCreateParams::Integration] + ) + ) + end attr_accessor :integrations # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -59,11 +75,13 @@ module OpenAI attr_accessor :metadata # The method used for fine-tuning. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method)) } + sig { returns(T.nilable(OpenAI::FineTuning::JobCreateParams::Method)) } attr_reader :method_ sig do - params(method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash)).void + params( + method_: OpenAI::FineTuning::JobCreateParams::Method::OrHash + ).void end attr_writer :method_ @@ -98,20 +116,27 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), + model: + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::OrSymbol + ), training_file: String, - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::AnyHash), - integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::AnyHash)] - ), + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash, + integrations: + T.nilable( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Integration::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash), + method_: OpenAI::FineTuning::JobCreateParams::Method::OrHash, seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The name of the model to fine-tune. You can select one of the @@ -173,44 +198,89 @@ module OpenAI # for more details. validation_file: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), - training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - integrations: T.nilable(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]), - metadata: T.nilable(T::Hash[Symbol, String]), - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, - seed: T.nilable(Integer), - suffix: T.nilable(String), - validation_file: T.nilable(String), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + model: + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::OrSymbol + ), + training_file: String, + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Hyperparameters, + integrations: + T.nilable( + T::Array[OpenAI::FineTuning::JobCreateParams::Integration] + ), + metadata: T.nilable(T::Hash[Symbol, String]), + method_: OpenAI::FineTuning::JobCreateParams::Method, + seed: T.nilable(Integer), + suffix: T.nilable(String), + validation_file: T.nilable(String), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias do + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::TaggedSymbol + ) + end - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } + sig do + override.returns( + T::Array[OpenAI::FineTuning::JobCreateParams::Model::Variants] + ) + end + def self.variants + end + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::FineTuning::JobCreateParams::Model) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) - DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) - GPT_3_5_TURBO = T.let(:"gpt-3.5-turbo", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) - GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) + BABBAGE_002 = + T.let( + :"babbage-002", + OpenAI::FineTuning::JobCreateParams::Model::TaggedSymbol + ) + DAVINCI_002 = + T.let( + :"davinci-002", + OpenAI::FineTuning::JobCreateParams::Model::TaggedSymbol + ) + GPT_3_5_TURBO = + T.let( + :"gpt-3.5-turbo", + OpenAI::FineTuning::JobCreateParams::Model::TaggedSymbol + ) + GPT_4O_MINI = + T.let( + :"gpt-4o-mini", + OpenAI::FineTuning::JobCreateParams::Model::TaggedSymbol + ) end class Hyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -242,8 +312,7 @@ module OpenAI batch_size: T.any(Symbol, Integer), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Number of examples in each batch. A larger batch size means that model @@ -255,26 +324,37 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. n_epochs: nil - ); end + ) + end + sig do - override - .returns( - { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) + override.returns( + { + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash end - def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -282,8 +362,17 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -291,12 +380,24 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end end end class Integration < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of integration to enable. Currently, only "wandb" (Weights and Biases) # is supported. sig { returns(Symbol) } @@ -306,23 +407,25 @@ module OpenAI # specifies the project that metrics will be sent to. Optionally, you can set an # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. - sig { returns(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb) } + sig do + returns(OpenAI::FineTuning::JobCreateParams::Integration::Wandb) + end attr_reader :wandb sig do params( - wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::AnyHash) - ) - .void + wandb: + OpenAI::FineTuning::JobCreateParams::Integration::Wandb::OrHash + ).void end attr_writer :wandb sig do params( - wandb: T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, OpenAI::Internal::AnyHash), + wandb: + OpenAI::FineTuning::JobCreateParams::Integration::Wandb::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The settings for your integration with Weights and Biases. This payload @@ -333,11 +436,24 @@ module OpenAI # The type of integration to enable. Currently, only "wandb" (Weights and Biases) # is supported. type: :wandb - ); end - sig { override.returns({type: Symbol, wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + type: Symbol, + wandb: OpenAI::FineTuning::JobCreateParams::Integration::Wandb + } + ) + end + def to_hash + end class Wandb < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the project that the new run will be created under. sig { returns(String) } attr_accessor :project @@ -372,8 +488,7 @@ module OpenAI entity: T.nilable(String), name: T.nilable(String), tags: T::Array[String] - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The name of the project that the new run will be created under. @@ -389,58 +504,82 @@ module OpenAI # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". tags: nil - ); end + ) + end + sig do - override - .returns({ - project: String, - entity: T.nilable(String), - name: T.nilable(String), - tags: T::Array[String] - }) + override.returns( + { + project: String, + entity: T.nilable(String), + name: T.nilable(String), + tags: T::Array[String] + } + ) + end + def to_hash end - def to_hash; end end end class Method < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Configuration for the DPO fine-tuning method. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo)) } + sig do + returns(T.nilable(OpenAI::FineTuning::JobCreateParams::Method::Dpo)) + end attr_reader :dpo sig do - params(dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::AnyHash)) - .void + params( + dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo::OrHash + ).void end attr_writer :dpo # Configuration for the supervised fine-tuning method. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised)) } + sig do + returns( + T.nilable(OpenAI::FineTuning::JobCreateParams::Method::Supervised) + ) + end attr_reader :supervised sig do params( - supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::AnyHash) - ) - .void + supervised: + OpenAI::FineTuning::JobCreateParams::Method::Supervised::OrHash + ).void end attr_writer :supervised # The type of method. Is either `supervised` or `dpo`. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol).void } + sig do + params( + type: OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + ).void + end attr_writer :type # The method used for fine-tuning. sig do params( - dpo: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, OpenAI::Internal::AnyHash), - supervised: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, OpenAI::Internal::AnyHash), - type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol - ) - .returns(T.attached_class) + dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo::OrHash, + supervised: + OpenAI::FineTuning::JobCreateParams::Method::Supervised::OrHash, + type: OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Configuration for the DPO fine-tuning method. @@ -449,56 +588,73 @@ module OpenAI supervised: nil, # The type of method. Is either `supervised` or `dpo`. type: nil - ); end + ) + end + sig do - override - .returns( - { - dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, - supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: OpenAI::Models::FineTuning::JobCreateParams::Method::Type::OrSymbol - } - ) + override.returns( + { + dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo, + supervised: + OpenAI::FineTuning::JobCreateParams::Method::Supervised, + type: + OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end class Dpo < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The hyperparameters used for the fine-tuning job. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + ) + ) + end attr_reader :hyperparameters sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, - OpenAI::Internal::AnyHash - ) - ) - .void + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::OrHash + ).void end attr_writer :hyperparameters # Configuration for the DPO fine-tuning method. sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::OrHash + ).returns(T.attached_class) end def self.new( # The hyperparameters used for the fine-tuning job. hyperparameters: nil - ); end + ) + end + sig do - override - .returns({hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters}) + override.returns( + { + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + } + ) + end + def to_hash end - def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -520,7 +676,9 @@ module OpenAI sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params(learning_rate_multiplier: T.any(Symbol, Float)).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle @@ -538,8 +696,7 @@ module OpenAI beta: T.any(Symbol, Float), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Number of examples in each batch. A larger batch size means that model @@ -554,27 +711,38 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. n_epochs: nil - ); end + ) + end + sig do - override - .returns( - { - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) + override.returns( + { + batch_size: T.any(Symbol, Integer), + beta: T.any(Symbol, Float), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash end - def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -582,8 +750,17 @@ module OpenAI module Beta extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta::Variants + ] + ) + end + def self.variants + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -591,8 +768,17 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -600,51 +786,71 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end end end end class Supervised < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The hyperparameters used for the fine-tuning job. - sig { returns(T.nilable(OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters)) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + ) + ) + end attr_reader :hyperparameters sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Internal::AnyHash - ) - ) - .void + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::OrHash + ).void end attr_writer :hyperparameters # Configuration for the supervised fine-tuning method. sig do params( - hyperparameters: T.any( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, - OpenAI::Internal::AnyHash - ) - ) - .returns(T.attached_class) + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::OrHash + ).returns(T.attached_class) end def self.new( # The hyperparameters used for the fine-tuning job. hyperparameters: nil - ); end + ) + end + sig do - override - .returns( - {hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters} - ) + override.returns( + { + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + } + ) + end + def to_hash end - def to_hash; end class Hyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. sig { returns(T.nilable(T.any(Symbol, Integer))) } @@ -658,7 +864,9 @@ module OpenAI sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params(learning_rate_multiplier: T.any(Symbol, Float)).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle @@ -675,8 +883,7 @@ module OpenAI batch_size: T.any(Symbol, Integer), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Number of examples in each batch. A larger batch size means that model @@ -688,26 +895,37 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. n_epochs: nil - ); end + ) + end + sig do - override - .returns( - { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) + override.returns( + { + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash end - def to_hash; end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. module BatchSize extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -715,8 +933,17 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Float]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -724,8 +951,17 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - sig { override.returns([Symbol, Integer]) } - def self.variants; end + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end end end end @@ -734,14 +970,32 @@ module OpenAI module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) - DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) + SUPERVISED = + T.let( + :supervised, + OpenAI::FineTuning::JobCreateParams::Method::Type::TaggedSymbol + ) + DPO = + T.let( + :dpo, + OpenAI::FineTuning::JobCreateParams::Method::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Method::Type::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/openai/models/fine_tuning/job_list_events_params.rbi index 9bfe0d0a..60c32908 100644 --- a/rbi/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/openai/models/fine_tuning/job_list_events_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last event from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -25,9 +27,8 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last event from the previous pagination request. @@ -35,9 +36,20 @@ module OpenAI # Number of events to retrieve. limit: nil, request_options: {} - ); end - sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/job_list_params.rbi b/rbi/openai/models/fine_tuning/job_list_params.rbi index 21062f5c..03b6a56d 100644 --- a/rbi/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/openai/models/fine_tuning/job_list_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last job from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -31,9 +33,8 @@ module OpenAI after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last job from the previous pagination request. @@ -44,19 +45,21 @@ module OpenAI # Alternatively, set `metadata=null` to indicate no metadata. metadata: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - limit: Integer, - metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + limit: Integer, + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/openai/models/fine_tuning/job_retrieve_params.rbi index ca938301..2d5d4a41 100644 --- a/rbi/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/openai/models/fine_tuning/job_retrieve_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 27afbe12..29a595b3 100644 --- a/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -8,6 +8,9 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Identifier for the last checkpoint ID from the previous pagination request. sig { returns(T.nilable(String)) } attr_reader :after @@ -26,9 +29,8 @@ module OpenAI params( after: String, limit: Integer, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Identifier for the last checkpoint ID from the previous pagination request. @@ -36,9 +38,20 @@ module OpenAI # Number of checkpoints to retrieve. limit: nil, request_options: {} - ); end - sig { override.returns({after: String, limit: Integer, request_options: OpenAI::RequestOptions}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index 527bf6e5..80b8eb71 100644 --- a/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -5,6 +5,9 @@ module OpenAI module FineTuning module Jobs class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The checkpoint identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -22,14 +25,16 @@ module OpenAI attr_accessor :fine_tuning_job_id # Metrics at the step number during the fine-tuning job. - sig { returns(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics) } + sig do + returns(OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics) + end attr_reader :metrics sig do params( - metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::AnyHash) - ) - .void + metrics: + OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics::OrHash + ).void end attr_writer :metrics @@ -49,11 +54,11 @@ module OpenAI created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: T.any(OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, OpenAI::Internal::AnyHash), + metrics: + OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics::OrHash, step_number: Integer, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The checkpoint identifier, which can be referenced in the API endpoints. @@ -70,24 +75,30 @@ module OpenAI step_number:, # The object type, which is always "fine_tuning.job.checkpoint". object: :"fine_tuning.job.checkpoint" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - fine_tuned_model_checkpoint: String, - fine_tuning_job_id: String, - metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, - object: Symbol, - step_number: Integer - } - ) + override.returns( + { + id: String, + created_at: Integer, + fine_tuned_model_checkpoint: String, + fine_tuning_job_id: String, + metrics: + OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + object: Symbol, + step_number: Integer + } + ) + end + def to_hash end - def to_hash; end class Metrics < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(T.nilable(Float)) } attr_reader :full_valid_loss @@ -140,8 +151,7 @@ module OpenAI train_mean_token_accuracy: Float, valid_loss: Float, valid_mean_token_accuracy: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( full_valid_loss: nil, @@ -151,22 +161,24 @@ module OpenAI train_mean_token_accuracy: nil, valid_loss: nil, valid_mean_token_accuracy: nil - ); end + ) + end + sig do - override - .returns( - { - full_valid_loss: Float, - full_valid_mean_token_accuracy: Float, - step: Float, - train_loss: Float, - train_mean_token_accuracy: Float, - valid_loss: Float, - valid_mean_token_accuracy: Float - } - ) + override.returns( + { + full_valid_loss: Float, + full_valid_mean_token_accuracy: Float, + step: Float, + train_loss: Float, + train_mean_token_accuracy: Float, + valid_loss: Float, + valid_mean_token_accuracy: Float + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/function_definition.rbi b/rbi/openai/models/function_definition.rbi index bed3cc2c..66649eae 100644 --- a/rbi/openai/models/function_definition.rbi +++ b/rbi/openai/models/function_definition.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class FunctionDefinition < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. sig { returns(String) } @@ -43,8 +45,7 @@ module OpenAI description: String, parameters: T::Hash[Symbol, T.anything], strict: T.nilable(T::Boolean) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain @@ -67,19 +68,21 @@ module OpenAI # more about Structured Outputs in the # [function calling guide](docs/guides/function-calling). strict: nil - ); end + ) + end + sig do - override - .returns( - { - name: String, - description: String, - parameters: T::Hash[Symbol, T.anything], - strict: T.nilable(T::Boolean) - } - ) + override.returns( + { + name: String, + description: String, + parameters: T::Hash[Symbol, T.anything], + strict: T.nilable(T::Boolean) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/image.rbi b/rbi/openai/models/image.rbi index fa3f7278..867722c0 100644 --- a/rbi/openai/models/image.rbi +++ b/rbi/openai/models/image.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, # and only present if `response_format` is set to `b64_json` for `dall-e-2` and # `dall-e-3`. @@ -29,7 +31,11 @@ module OpenAI attr_writer :url # Represents the content or the URL of an image generated by the OpenAI API. - sig { params(b64_json: String, revised_prompt: String, url: String).returns(T.attached_class) } + sig do + params(b64_json: String, revised_prompt: String, url: String).returns( + T.attached_class + ) + end def self.new( # The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, # and only present if `response_format` is set to `b64_json` for `dall-e-2` and @@ -41,9 +47,16 @@ module OpenAI # `response_format` is set to `url` (default value). Unsupported for # `gpt-image-1`. url: nil - ); end - sig { override.returns({b64_json: String, revised_prompt: String, url: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { b64_json: String, revised_prompt: String, url: String } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/image_create_variation_params.rbi b/rbi/openai/models/image_create_variation_params.rbi index 361f3512..9c31879a 100644 --- a/rbi/openai/models/image_create_variation_params.rbi +++ b/rbi/openai/models/image_create_variation_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } @@ -13,7 +15,7 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } attr_accessor :model # The number of images to generate. Must be between 1 and 10. @@ -23,12 +25,20 @@ module OpenAI # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. - sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::ImageCreateVariationParams::ResponseFormat::OrSymbol + ) + ) + end attr_accessor :response_format # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. - sig { returns(T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::ImageCreateVariationParams::Size::OrSymbol)) + end attr_accessor :size # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -43,14 +53,16 @@ module OpenAI sig do params( image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), + response_format: + T.nilable( + OpenAI::ImageCreateVariationParams::ResponseFormat::OrSymbol + ), + size: T.nilable(OpenAI::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The image to use as the basis for the variation(s). Must be a valid PNG file, @@ -73,30 +85,43 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), - n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), - user: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + n: T.nilable(Integer), + response_format: + T.nilable( + OpenAI::ImageCreateVariationParams::ResponseFormat::OrSymbol + ), + size: T.nilable(OpenAI::ImageCreateVariationParams::Size::OrSymbol), + user: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The model to use for image generation. Only `dall-e-2` is supported at this # time. module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::ImageCreateVariationParams::Model::Variants] + ) + end + def self.variants + end end # The format in which the generated images are returned. Must be one of `url` or @@ -105,14 +130,32 @@ module OpenAI module ResponseFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) + URL = + T.let( + :url, + OpenAI::ImageCreateVariationParams::ResponseFormat::TaggedSymbol + ) + B64_JSON = + T.let( + :b64_json, + OpenAI::ImageCreateVariationParams::ResponseFormat::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::ImageCreateVariationParams::ResponseFormat::TaggedSymbol + ] + ) + end + def self.values + end end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -120,15 +163,35 @@ module OpenAI module Size extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageCreateVariationParams::Size) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) - SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) - SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) + SIZE_256X256 = + T.let( + :"256x256", + OpenAI::ImageCreateVariationParams::Size::TaggedSymbol + ) + SIZE_512X512 = + T.let( + :"512x512", + OpenAI::ImageCreateVariationParams::Size::TaggedSymbol + ) + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::ImageCreateVariationParams::Size::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageCreateVariationParams::Size::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 54150702..6fd55690 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than @@ -38,14 +40,16 @@ module OpenAI # # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. - sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ImageEditParams::Background::OrSymbol)) } attr_accessor :background # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. - sig { returns(T.nilable(T.any(Pathname, StringIO, IO, OpenAI::FilePart))) } + sig do + returns(T.nilable(T.any(Pathname, StringIO, IO, OpenAI::FilePart))) + end attr_reader :mask sig { params(mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart)).void } @@ -54,7 +58,7 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } attr_accessor :model # The number of images to generate. Must be between 1 and 10. @@ -64,20 +68,22 @@ module OpenAI # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. - sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol)) } attr_accessor :quality # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` # will always return base64-encoded images. - sig { returns(T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol)) + end attr_accessor :response_format # The size of the generated images. Must be one of `1024x1024`, `1536x1024` # (landscape), `1024x1536` (portrait), or `auto` (default value) for # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. - sig { returns(T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ImageEditParams::Size::OrSymbol)) } attr_accessor :size # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -91,25 +97,26 @@ module OpenAI sig do params( - image: T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: + T.any( + Pathname, + StringIO, + IO, + OpenAI::FilePart, + T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] + ), prompt: String, - background: T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol), + background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), - response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), + quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageEditParams::Size::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The image(s) to edit. Must be a supported image file or an array of images. @@ -160,32 +167,37 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - image: T.any( + override.returns( + { + image: + T.any( Pathname, StringIO, IO, OpenAI::FilePart, T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] ), - prompt: String, - background: T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), - n: T.nilable(Integer), - quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), - response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), - user: String, - request_options: OpenAI::RequestOptions - } - ) + prompt: String, + background: + T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), + mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + n: T.nilable(Integer), + quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageEditParams::Size::OrSymbol), + user: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The image(s) to edit. Must be a supported image file or an array of images. # @@ -197,8 +209,13 @@ module OpenAI module Image extend OpenAI::Internal::Type::Union - sig { override.returns([StringIO, T::Array[StringIO]]) } - def self.variants; end + Variants = T.type_alias { T.any(StringIO, T::Array[StringIO]) } + + sig do + override.returns(T::Array[OpenAI::ImageEditParams::Image::Variants]) + end + def self.variants + end StringArray = T.let( @@ -217,15 +234,23 @@ module OpenAI module Background extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Background) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageEditParams::Background) } OrSymbol = T.type_alias { T.any(Symbol, String) } - TRANSPARENT = T.let(:transparent, OpenAI::Models::ImageEditParams::Background::TaggedSymbol) - OPAQUE = T.let(:opaque, OpenAI::Models::ImageEditParams::Background::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::ImageEditParams::Background::TaggedSymbol) + TRANSPARENT = + T.let(:transparent, OpenAI::ImageEditParams::Background::TaggedSymbol) + OPAQUE = + T.let(:opaque, OpenAI::ImageEditParams::Background::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::ImageEditParams::Background::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Background::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageEditParams::Background::TaggedSymbol] + ) + end + def self.values + end end # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are @@ -234,8 +259,14 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } + + sig do + override.returns(T::Array[OpenAI::ImageEditParams::Model::Variants]) + end + def self.variants + end end # The quality of the image that will be generated. `high`, `medium` and `low` are @@ -244,17 +275,24 @@ module OpenAI module Quality extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Quality) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageEditParams::Quality) } OrSymbol = T.type_alias { T.any(Symbol, String) } - STANDARD = T.let(:standard, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::ImageEditParams::Quality::TaggedSymbol) + STANDARD = + T.let(:standard, OpenAI::ImageEditParams::Quality::TaggedSymbol) + LOW = T.let(:low, OpenAI::ImageEditParams::Quality::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::ImageEditParams::Quality::TaggedSymbol) + HIGH = T.let(:high, OpenAI::ImageEditParams::Quality::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::ImageEditParams::Quality::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Quality::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageEditParams::Quality::TaggedSymbol] + ) + end + def self.values + end end # The format in which the generated images are returned. Must be one of `url` or @@ -264,14 +302,26 @@ module OpenAI module ResponseFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditParams::ResponseFormat) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) + URL = T.let(:url, OpenAI::ImageEditParams::ResponseFormat::TaggedSymbol) + B64_JSON = + T.let( + :b64_json, + OpenAI::ImageEditParams::ResponseFormat::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageEditParams::ResponseFormat::TaggedSymbol] + ) + end + def self.values + end end # The size of the generated images. Must be one of `1024x1024`, `1536x1024` @@ -280,18 +330,29 @@ module OpenAI module Size extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageEditParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, String) } - SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - SIZE_1536X1024 = T.let(:"1536x1024", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - SIZE_1024X1536 = T.let(:"1024x1536", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::ImageEditParams::Size::TaggedSymbol]) } - def self.values; end + SIZE_256X256 = + T.let(:"256x256", OpenAI::ImageEditParams::Size::TaggedSymbol) + SIZE_512X512 = + T.let(:"512x512", OpenAI::ImageEditParams::Size::TaggedSymbol) + SIZE_1024X1024 = + T.let(:"1024x1024", OpenAI::ImageEditParams::Size::TaggedSymbol) + SIZE_1536X1024 = + T.let(:"1536x1024", OpenAI::ImageEditParams::Size::TaggedSymbol) + SIZE_1024X1536 = + T.let(:"1024x1536", OpenAI::ImageEditParams::Size::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::ImageEditParams::Size::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageEditParams::Size::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi index 3dd3c2e4..72894e72 100644 --- a/rbi/openai/models/image_generate_params.rbi +++ b/rbi/openai/models/image_generate_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A text description of the desired image(s). The maximum length is 32000 # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters # for `dall-e-3`. @@ -19,18 +21,22 @@ module OpenAI # # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol)) + end attr_accessor :background # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to # `gpt-image-1` is used. - sig { returns(T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol))) } + sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } attr_accessor :model # Control the content-moderation level for images generated by `gpt-image-1`. Must # be either `low` for less restrictive filtering or `auto` (default value). - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol)) + end attr_accessor :moderation # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only @@ -46,7 +52,9 @@ module OpenAI # The format in which the generated images are returned. This parameter is only # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol)) } + sig do + returns(T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol)) + end attr_accessor :output_format # The quality of the image that will be generated. @@ -56,28 +64,32 @@ module OpenAI # - `high`, `medium` and `low` are supported for `gpt-image-1`. # - `hd` and `standard` are supported for `dall-e-3`. # - `standard` is the only option for `dall-e-2`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol)) } attr_accessor :quality # The format in which generated images with `dall-e-2` and `dall-e-3` are # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes # after the image has been generated. This parameter isn't supported for # `gpt-image-1` which will always return base64-encoded images. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol) + ) + end attr_accessor :response_format # The size of the generated images. Must be one of `1024x1024`, `1536x1024` # (landscape), `1024x1536` (portrait), or `auto` (default value) for # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ImageGenerateParams::Size::OrSymbol)) } attr_accessor :size # The style of the generated images. This parameter is only supported for # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean # towards generating hyper-real and dramatic images. Natural causes the model to # produce more natural, less hyper-real looking images. - sig { returns(T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ImageGenerateParams::Style::OrSymbol)) } attr_accessor :style # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -92,20 +104,23 @@ module OpenAI sig do params( prompt: String, - background: T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), - moderation: T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol), + background: + T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + moderation: + T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), output_compression: T.nilable(Integer), - output_format: T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol), - quality: T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol), - response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), - style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), + output_format: + T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A text description of the desired image(s). The maximum length is 32000 @@ -165,28 +180,34 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - prompt: String, - background: T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), - moderation: T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol), - n: T.nilable(Integer), - output_compression: T.nilable(Integer), - output_format: T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol), - quality: T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol), - response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), - style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), - user: String, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + prompt: String, + background: + T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + moderation: + T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), + n: T.nilable(Integer), + output_compression: T.nilable(Integer), + output_format: + T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::ImageGenerateParams::Style::OrSymbol), + user: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Allows to set transparency for the background of the generated image(s). This # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, @@ -198,15 +219,29 @@ module OpenAI module Background extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Background) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenerateParams::Background) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TRANSPARENT = T.let(:transparent, OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol) - OPAQUE = T.let(:opaque, OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Background::TaggedSymbol]) } - def self.values; end + TRANSPARENT = + T.let( + :transparent, + OpenAI::ImageGenerateParams::Background::TaggedSymbol + ) + OPAQUE = + T.let(:opaque, OpenAI::ImageGenerateParams::Background::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::ImageGenerateParams::Background::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::Background::TaggedSymbol] + ) + end + def self.values + end end # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or @@ -215,8 +250,16 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::Model::Variants] + ) + end + def self.variants + end end # Control the content-moderation level for images generated by `gpt-image-1`. Must @@ -224,14 +267,23 @@ module OpenAI module Moderation extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Moderation) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenerateParams::Moderation) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - LOW = T.let(:low, OpenAI::Models::ImageGenerateParams::Moderation::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Moderation::TaggedSymbol) + LOW = T.let(:low, OpenAI::ImageGenerateParams::Moderation::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::ImageGenerateParams::Moderation::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Moderation::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::Moderation::TaggedSymbol] + ) + end + def self.values + end end # The format in which the generated images are returned. This parameter is only @@ -239,15 +291,26 @@ module OpenAI module OutputFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenerateParams::OutputFormat) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - PNG = T.let(:png, OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol) - JPEG = T.let(:jpeg, OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol) - WEBP = T.let(:webp, OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol) + PNG = + T.let(:png, OpenAI::ImageGenerateParams::OutputFormat::TaggedSymbol) + JPEG = + T.let(:jpeg, OpenAI::ImageGenerateParams::OutputFormat::TaggedSymbol) + WEBP = + T.let(:webp, OpenAI::ImageGenerateParams::OutputFormat::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::OutputFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::OutputFormat::TaggedSymbol] + ) + end + def self.values + end end # The quality of the image that will be generated. @@ -260,18 +323,26 @@ module OpenAI module Quality extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageGenerateParams::Quality) } OrSymbol = T.type_alias { T.any(Symbol, String) } - STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - LOW = T.let(:low, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol]) } - def self.values; end + STANDARD = + T.let(:standard, OpenAI::ImageGenerateParams::Quality::TaggedSymbol) + HD = T.let(:hd, OpenAI::ImageGenerateParams::Quality::TaggedSymbol) + LOW = T.let(:low, OpenAI::ImageGenerateParams::Quality::TaggedSymbol) + MEDIUM = + T.let(:medium, OpenAI::ImageGenerateParams::Quality::TaggedSymbol) + HIGH = T.let(:high, OpenAI::ImageGenerateParams::Quality::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::ImageGenerateParams::Quality::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::Quality::TaggedSymbol] + ) + end + def self.values + end end # The format in which generated images with `dall-e-2` and `dall-e-3` are @@ -281,14 +352,27 @@ module OpenAI module ResponseFormat extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenerateParams::ResponseFormat) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) - B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) + URL = + T.let(:url, OpenAI::ImageGenerateParams::ResponseFormat::TaggedSymbol) + B64_JSON = + T.let( + :b64_json, + OpenAI::ImageGenerateParams::ResponseFormat::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::ResponseFormat::TaggedSymbol] + ) + end + def self.values + end end # The size of the generated images. Must be one of `1024x1024`, `1536x1024` @@ -298,20 +382,33 @@ module OpenAI module Size extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageGenerateParams::Size) } OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_1024X1024 = T.let(:"1024x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_1536X1024 = T.let(:"1536x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_1024X1536 = T.let(:"1024x1536", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_1792X1024 = T.let(:"1792x1024", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - SIZE_1024X1792 = T.let(:"1024x1792", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol]) } - def self.values; end + AUTO = T.let(:auto, OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1024 = + T.let(:"1024x1024", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1536X1024 = + T.let(:"1536x1024", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1536 = + T.let(:"1024x1536", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_256X256 = + T.let(:"256x256", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_512X512 = + T.let(:"512x512", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1792X1024 = + T.let(:"1792x1024", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + SIZE_1024X1792 = + T.let(:"1024x1792", OpenAI::ImageGenerateParams::Size::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::Size::TaggedSymbol] + ) + end + def self.values + end end # The style of the generated images. This parameter is only supported for @@ -321,14 +418,21 @@ module OpenAI module Style extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageGenerateParams::Style) } OrSymbol = T.type_alias { T.any(Symbol, String) } - VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) - NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) + VIVID = T.let(:vivid, OpenAI::ImageGenerateParams::Style::TaggedSymbol) + NATURAL = + T.let(:natural, OpenAI::ImageGenerateParams::Style::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ImageGenerateParams::Style::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/image_model.rbi b/rbi/openai/models/image_model.rbi index bf52f382..dabb10a7 100644 --- a/rbi/openai/models/image_model.rbi +++ b/rbi/openai/models/image_model.rbi @@ -5,15 +5,16 @@ module OpenAI module ImageModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ImageModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) - DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) - GPT_IMAGE_1 = T.let(:"gpt-image-1", OpenAI::Models::ImageModel::TaggedSymbol) + DALL_E_2 = T.let(:"dall-e-2", OpenAI::ImageModel::TaggedSymbol) + DALL_E_3 = T.let(:"dall-e-3", OpenAI::ImageModel::TaggedSymbol) + GPT_IMAGE_1 = T.let(:"gpt-image-1", OpenAI::ImageModel::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ImageModel::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::ImageModel::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/images_response.rbi b/rbi/openai/models/images_response.rbi index cdfcd65f..86766493 100644 --- a/rbi/openai/models/images_response.rbi +++ b/rbi/openai/models/images_response.rbi @@ -3,32 +3,33 @@ module OpenAI module Models class ImagesResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The Unix timestamp (in seconds) of when the image was created. sig { returns(Integer) } attr_accessor :created # The list of generated images. - sig { returns(T.nilable(T::Array[OpenAI::Models::Image])) } + sig { returns(T.nilable(T::Array[OpenAI::Image])) } attr_reader :data - sig { params(data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)]).void } + sig { params(data: T::Array[OpenAI::Image::OrHash]).void } attr_writer :data # For `gpt-image-1` only, the token usage information for the image generation. - sig { returns(T.nilable(OpenAI::Models::ImagesResponse::Usage)) } + sig { returns(T.nilable(OpenAI::ImagesResponse::Usage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::ImagesResponse::Usage, OpenAI::Internal::AnyHash)).void } + sig { params(usage: OpenAI::ImagesResponse::Usage::OrHash).void } attr_writer :usage # The response from the image generation endpoint. sig do params( created: Integer, - data: T::Array[T.any(OpenAI::Models::Image, OpenAI::Internal::AnyHash)], - usage: T.any(OpenAI::Models::ImagesResponse::Usage, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + data: T::Array[OpenAI::Image::OrHash], + usage: OpenAI::ImagesResponse::Usage::OrHash + ).returns(T.attached_class) end def self.new( # The Unix timestamp (in seconds) of when the image was created. @@ -37,29 +38,37 @@ module OpenAI data: nil, # For `gpt-image-1` only, the token usage information for the image generation. usage: nil - ); end + ) + end + sig do - override - .returns( - {created: Integer, data: T::Array[OpenAI::Models::Image], usage: OpenAI::Models::ImagesResponse::Usage} - ) + override.returns( + { + created: Integer, + data: T::Array[OpenAI::Image], + usage: OpenAI::ImagesResponse::Usage + } + ) + end + def to_hash end - def to_hash; end class Usage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens (images and text) in the input prompt. sig { returns(Integer) } attr_accessor :input_tokens # The input tokens detailed information for the image generation. - sig { returns(OpenAI::Models::ImagesResponse::Usage::InputTokensDetails) } + sig { returns(OpenAI::ImagesResponse::Usage::InputTokensDetails) } attr_reader :input_tokens_details sig do params( - input_tokens_details: T.any(OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, OpenAI::Internal::AnyHash) - ) - .void + input_tokens_details: + OpenAI::ImagesResponse::Usage::InputTokensDetails::OrHash + ).void end attr_writer :input_tokens_details @@ -75,11 +84,11 @@ module OpenAI sig do params( input_tokens: Integer, - input_tokens_details: T.any(OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, OpenAI::Internal::AnyHash), + input_tokens_details: + OpenAI::ImagesResponse::Usage::InputTokensDetails::OrHash, output_tokens: Integer, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of tokens (images and text) in the input prompt. @@ -90,21 +99,27 @@ module OpenAI output_tokens:, # The total number of tokens (images and text) used for the image generation. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - input_tokens: Integer, - input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, - output_tokens: Integer, - total_tokens: Integer - } - ) + override.returns( + { + input_tokens: Integer, + input_tokens_details: + OpenAI::ImagesResponse::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end class InputTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of image tokens in the input prompt. sig { returns(Integer) } attr_accessor :image_tokens @@ -114,15 +129,24 @@ module OpenAI attr_accessor :text_tokens # The input tokens detailed information for the image generation. - sig { params(image_tokens: Integer, text_tokens: Integer).returns(T.attached_class) } + sig do + params(image_tokens: Integer, text_tokens: Integer).returns( + T.attached_class + ) + end def self.new( # The number of image tokens in the input prompt. image_tokens:, # The number of text tokens in the input prompt. text_tokens: - ); end - sig { override.returns({image_tokens: Integer, text_tokens: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns({ image_tokens: Integer, text_tokens: Integer }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/metadata.rbi b/rbi/openai/models/metadata.rbi index 04102855..82208718 100644 --- a/rbi/openai/models/metadata.rbi +++ b/rbi/openai/models/metadata.rbi @@ -2,6 +2,10 @@ module OpenAI module Models - Metadata = T.let(OpenAI::Internal::Type::HashOf[String], OpenAI::Internal::Type::Converter) + Metadata = + T.let( + OpenAI::Internal::Type::HashOf[String], + OpenAI::Internal::Type::Converter + ) end end diff --git a/rbi/openai/models/model.rbi b/rbi/openai/models/model.rbi index 06ccb9b5..3e2886a8 100644 --- a/rbi/openai/models/model.rbi +++ b/rbi/openai/models/model.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class Model < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The model identifier, which can be referenced in the API endpoints. sig { returns(String) } attr_accessor :id @@ -20,7 +22,14 @@ module OpenAI attr_accessor :owned_by # Describes an OpenAI model offering that can be used with the API. - sig { params(id: String, created: Integer, owned_by: String, object: Symbol).returns(T.attached_class) } + sig do + params( + id: String, + created: Integer, + owned_by: String, + object: Symbol + ).returns(T.attached_class) + end def self.new( # The model identifier, which can be referenced in the API endpoints. id:, @@ -30,9 +39,16 @@ module OpenAI owned_by:, # The object type, which is always "model". object: :model - ); end - sig { override.returns({id: String, created: Integer, object: Symbol, owned_by: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { id: String, created: Integer, object: Symbol, owned_by: String } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/model_delete_params.rbi b/rbi/openai/models/model_delete_params.rbi index 9a19e392..34aa523d 100644 --- a/rbi/openai/models/model_delete_params.rbi +++ b/rbi/openai/models/model_delete_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/model_deleted.rbi b/rbi/openai/models/model_deleted.rbi index e5b41780..61571ba3 100644 --- a/rbi/openai/models/model_deleted.rbi +++ b/rbi/openai/models/model_deleted.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ModelDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -12,11 +14,19 @@ module OpenAI sig { returns(String) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: String).returns(T.attached_class) } - def self.new(id:, deleted:, object:); end + sig do + params(id: String, deleted: T::Boolean, object: String).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object:) + end - sig { override.returns({id: String, deleted: T::Boolean, object: String}) } - def to_hash; end + sig do + override.returns({ id: String, deleted: T::Boolean, object: String }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/model_list_params.rbi b/rbi/openai/models/model_list_params.rbi index 6ecd8a67..6b5c0602 100644 --- a/rbi/openai/models/model_list_params.rbi +++ b/rbi/openai/models/model_list_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/model_retrieve_params.rbi b/rbi/openai/models/model_retrieve_params.rbi index 1b835b5d..69b847c8 100644 --- a/rbi/openai/models/model_retrieve_params.rbi +++ b/rbi/openai/models/model_retrieve_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/moderation.rbi b/rbi/openai/models/moderation.rbi index 8f8c32af..d069ca8a 100644 --- a/rbi/openai/models/moderation.rbi +++ b/rbi/openai/models/moderation.rbi @@ -3,30 +3,34 @@ module OpenAI module Models class Moderation < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of the categories, and whether they are flagged or not. - sig { returns(OpenAI::Models::Moderation::Categories) } + sig { returns(OpenAI::Moderation::Categories) } attr_reader :categories - sig { params(categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::AnyHash)).void } + sig { params(categories: OpenAI::Moderation::Categories::OrHash).void } attr_writer :categories # A list of the categories along with the input type(s) that the score applies to. - sig { returns(OpenAI::Models::Moderation::CategoryAppliedInputTypes) } + sig { returns(OpenAI::Moderation::CategoryAppliedInputTypes) } attr_reader :category_applied_input_types sig do params( - category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::AnyHash) - ) - .void + category_applied_input_types: + OpenAI::Moderation::CategoryAppliedInputTypes::OrHash + ).void end attr_writer :category_applied_input_types # A list of the categories along with their scores as predicted by model. - sig { returns(OpenAI::Models::Moderation::CategoryScores) } + sig { returns(OpenAI::Moderation::CategoryScores) } attr_reader :category_scores - sig { params(category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::AnyHash)).void } + sig do + params(category_scores: OpenAI::Moderation::CategoryScores::OrHash).void + end attr_writer :category_scores # Whether any of the below categories are flagged. @@ -35,12 +39,12 @@ module OpenAI sig do params( - categories: T.any(OpenAI::Models::Moderation::Categories, OpenAI::Internal::AnyHash), - category_applied_input_types: T.any(OpenAI::Models::Moderation::CategoryAppliedInputTypes, OpenAI::Internal::AnyHash), - category_scores: T.any(OpenAI::Models::Moderation::CategoryScores, OpenAI::Internal::AnyHash), + categories: OpenAI::Moderation::Categories::OrHash, + category_applied_input_types: + OpenAI::Moderation::CategoryAppliedInputTypes::OrHash, + category_scores: OpenAI::Moderation::CategoryScores::OrHash, flagged: T::Boolean - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of the categories, and whether they are flagged or not. @@ -51,21 +55,26 @@ module OpenAI category_scores:, # Whether any of the below categories are flagged. flagged: - ); end + ) + end + sig do - override - .returns( - { - categories: OpenAI::Models::Moderation::Categories, - category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes, - category_scores: OpenAI::Models::Moderation::CategoryScores, - flagged: T::Boolean - } - ) + override.returns( + { + categories: OpenAI::Moderation::Categories, + category_applied_input_types: + OpenAI::Moderation::CategoryAppliedInputTypes, + category_scores: OpenAI::Moderation::CategoryScores, + flagged: T::Boolean + } + ) + end + def to_hash end - def to_hash; end class Categories < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Content that expresses, incites, or promotes harassing language towards any # target. sig { returns(T::Boolean) } @@ -151,8 +160,7 @@ module OpenAI sexual_minors: T::Boolean, violence: T::Boolean, violence_graphic: T::Boolean - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Content that expresses, incites, or promotes harassing language towards any @@ -198,109 +206,221 @@ module OpenAI violence:, # Content that depicts death, violence, or physical injury in graphic detail. violence_graphic: - ); end + ) + end + sig do - override - .returns( - { - harassment: T::Boolean, - harassment_threatening: T::Boolean, - hate: T::Boolean, - hate_threatening: T::Boolean, - illicit: T.nilable(T::Boolean), - illicit_violent: T.nilable(T::Boolean), - self_harm: T::Boolean, - self_harm_instructions: T::Boolean, - self_harm_intent: T::Boolean, - sexual: T::Boolean, - sexual_minors: T::Boolean, - violence: T::Boolean, - violence_graphic: T::Boolean - } - ) - end - def to_hash; end + override.returns( + { + harassment: T::Boolean, + harassment_threatening: T::Boolean, + hate: T::Boolean, + hate_threatening: T::Boolean, + illicit: T.nilable(T::Boolean), + illicit_violent: T.nilable(T::Boolean), + self_harm: T::Boolean, + self_harm_instructions: T::Boolean, + self_harm_intent: T::Boolean, + sexual: T::Boolean, + sexual_minors: T::Boolean, + violence: T::Boolean, + violence_graphic: T::Boolean + } + ) + end + def to_hash + end end class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The applied input type(s) for the category 'harassment'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol + ] + ) + end attr_accessor :harassment # The applied input type(s) for the category 'harassment/threatening'. sig do returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol + ] ) end attr_accessor :harassment_threatening # The applied input type(s) for the category 'hate'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol + ] + ) + end attr_accessor :hate # The applied input type(s) for the category 'hate/threatening'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol + ] + ) + end attr_accessor :hate_threatening # The applied input type(s) for the category 'illicit'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol + ] + ) + end attr_accessor :illicit # The applied input type(s) for the category 'illicit/violent'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol + ] + ) + end attr_accessor :illicit_violent # The applied input type(s) for the category 'self-harm'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol + ] + ) + end attr_accessor :self_harm # The applied input type(s) for the category 'self-harm/instructions'. sig do returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol + ] ) end attr_accessor :self_harm_instructions # The applied input type(s) for the category 'self-harm/intent'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol + ] + ) + end attr_accessor :self_harm_intent # The applied input type(s) for the category 'sexual'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol + ] + ) + end attr_accessor :sexual # The applied input type(s) for the category 'sexual/minors'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol + ] + ) + end attr_accessor :sexual_minors # The applied input type(s) for the category 'violence'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol + ] + ) + end attr_accessor :violence # The applied input type(s) for the category 'violence/graphic'. - sig { returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) } + sig do + returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol + ] + ) + end attr_accessor :violence_graphic # A list of the categories along with the input type(s) that the score applies to. sig do params( - harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::OrSymbol], - harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::OrSymbol], - hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::OrSymbol], - hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::OrSymbol], - illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::OrSymbol], - illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::OrSymbol], - self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::OrSymbol], - self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::OrSymbol], - self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::OrSymbol], - sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::OrSymbol], - sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::OrSymbol], - violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::OrSymbol], - violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::OrSymbol] - ) - .returns(T.attached_class) + harassment: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Harassment::OrSymbol + ], + harassment_threatening: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::OrSymbol + ], + hate: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Hate::OrSymbol + ], + hate_threatening: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening::OrSymbol + ], + illicit: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Illicit::OrSymbol + ], + illicit_violent: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent::OrSymbol + ], + self_harm: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm::OrSymbol + ], + self_harm_instructions: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::OrSymbol + ], + self_harm_intent: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::OrSymbol + ], + sexual: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual::OrSymbol + ], + sexual_minors: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor::OrSymbol + ], + violence: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Violence::OrSymbol + ], + violence_graphic: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::OrSymbol + ] + ).returns(T.attached_class) end def self.new( # The applied input type(s) for the category 'harassment'. @@ -329,238 +449,478 @@ module OpenAI violence:, # The applied input type(s) for the category 'violence/graphic'. violence_graphic: - ); end + ) + end + sig do - override - .returns( - { - harassment: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol], - harassment_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol], - hate: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol], - hate_threatening: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol], - illicit: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol], - illicit_violent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol], - self_harm: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol], - self_harm_instructions: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol], - self_harm_intent: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol], - sexual: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol], - sexual_minors: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol], - violence: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol], - violence_graphic: T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol] - } - ) - end - def to_hash; end + override.returns( + { + harassment: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol + ], + harassment_threatening: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol + ], + hate: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol + ], + hate_threatening: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol + ], + illicit: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol + ], + illicit_violent: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol + ], + self_harm: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol + ], + self_harm_instructions: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol + ], + self_harm_intent: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol + ], + sexual: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol + ], + sexual_minors: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol + ], + violence: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol + ], + violence_graphic: + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol + ] + } + ) + end + def to_hash + end module Harassment extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::Harassment + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol + ) sig do - override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module HarassmentThreatening extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = - T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module Hate extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } + T.type_alias do + T.all(Symbol, OpenAI::Moderation::CategoryAppliedInputTypes::Hate) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol + ] + ) + end + def self.values + end end module HateThreatening extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module Illicit extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::Illicit + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol + ] + ) + end + def self.values + end end module IllicitViolent extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module SelfHarm extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) - IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol + ) + IMAGE = + T.let( + :image, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol + ] + ) + end + def self.values + end end module SelfHarmInstruction extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = - T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol + ) IMAGE = - T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) + T.let( + :image, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module SelfHarmIntent extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol + ) IMAGE = - T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) + T.let( + :image, + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module Sexual extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) - IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol + ) + IMAGE = + T.let( + :image, + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol + ] + ) + end + def self.values + end end module SexualMinor extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end module Violence extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::Violence + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) - IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol + ) + IMAGE = + T.let( + :image, + OpenAI::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol + ] + ) + end + def self.values + end end module ViolenceGraphic extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } + T.type_alias do + T.all( + Symbol, + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol + ) IMAGE = - T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) + T.let( + :image, + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end class CategoryScores < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The score for the category 'harassment'. sig { returns(Float) } attr_accessor :harassment @@ -629,8 +989,7 @@ module OpenAI sexual_minors: Float, violence: Float, violence_graphic: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The score for the category 'harassment'. @@ -659,28 +1018,30 @@ module OpenAI violence:, # The score for the category 'violence/graphic'. violence_graphic: - ); end + ) + end + sig do - override - .returns( - { - harassment: Float, - harassment_threatening: Float, - hate: Float, - hate_threatening: Float, - illicit: Float, - illicit_violent: Float, - self_harm: Float, - self_harm_instructions: Float, - self_harm_intent: Float, - sexual: Float, - sexual_minors: Float, - violence: Float, - violence_graphic: Float - } - ) - end - def to_hash; end + override.returns( + { + harassment: Float, + harassment_threatening: Float, + hate: Float, + hate_threatening: Float, + illicit: Float, + illicit_violent: Float, + self_harm: Float, + self_harm_instructions: Float, + self_harm_intent: Float, + sexual: Float, + sexual_minors: Float, + violence: Float, + violence_graphic: Float + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/moderation_create_params.rbi b/rbi/openai/models/moderation_create_params.rbi index 527e104f..7644e955 100644 --- a/rbi/openai/models/moderation_create_params.rbi +++ b/rbi/openai/models/moderation_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. sig do @@ -13,7 +15,12 @@ module OpenAI T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + T::Array[ + T.any( + OpenAI::ModerationImageURLInput, + OpenAI::ModerationTextInput + ) + ] ) ) end @@ -23,29 +30,32 @@ module OpenAI # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models # [here](https://platform.openai.com/docs/models#moderation). - sig { returns(T.nilable(T.any(String, OpenAI::Models::ModerationModel::OrSymbol))) } + sig do + returns(T.nilable(T.any(String, OpenAI::ModerationModel::OrSymbol))) + end attr_reader :model - sig { params(model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol)).void } + sig do + params(model: T.any(String, OpenAI::ModerationModel::OrSymbol)).void + end attr_writer :model sig do params( - input: T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::Models::ModerationImageURLInput, - OpenAI::Internal::AnyHash, - OpenAI::Models::ModerationTextInput - ) - ] - ), - model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + input: + T.any( + String, + T::Array[String], + T::Array[ + T.any( + OpenAI::ModerationImageURLInput::OrHash, + OpenAI::ModerationTextInput::OrHash + ) + ] + ), + model: T.any(String, OpenAI::ModerationModel::OrSymbol), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Input (or inputs) to classify. Can be a single string, an array of strings, or @@ -57,41 +67,69 @@ module OpenAI # [here](https://platform.openai.com/docs/models#moderation). model: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - input: T.any( + override.returns( + { + input: + T.any( String, T::Array[String], - T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] + T::Array[ + T.any( + OpenAI::ModerationImageURLInput, + OpenAI::ModerationTextInput + ) + ] ), - model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: OpenAI::RequestOptions - } - ) + model: T.any(String, OpenAI::ModerationModel::OrSymbol), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. module Input extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [String, T::Array[String], T::Array[T.any(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)]] + Variants = + T.type_alias do + T.any( + String, + T::Array[String], + T::Array[ + T.any( + OpenAI::ModerationImageURLInput, + OpenAI::ModerationTextInput + ) + ] ) + end + + sig do + override.returns( + T::Array[OpenAI::ModerationCreateParams::Input::Variants] + ) + end + def self.variants end - def self.variants; end - StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) ModerationMultiModalInputArray = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::ModerationMultiModalInput], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::ModerationMultiModalInput + ], OpenAI::Internal::Type::Converter ) end @@ -103,8 +141,16 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ModerationModel::TaggedSymbol]) } - def self.variants; end + Variants = + T.type_alias { T.any(String, OpenAI::ModerationModel::TaggedSymbol) } + + sig do + override.returns( + T::Array[OpenAI::ModerationCreateParams::Model::Variants] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/moderation_create_response.rbi b/rbi/openai/models/moderation_create_response.rbi index 7e78a995..0da4c5b3 100644 --- a/rbi/openai/models/moderation_create_response.rbi +++ b/rbi/openai/models/moderation_create_response.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique identifier for the moderation request. sig { returns(String) } attr_accessor :id @@ -12,7 +14,7 @@ module OpenAI attr_accessor :model # A list of moderation objects. - sig { returns(T::Array[OpenAI::Models::Moderation]) } + sig { returns(T::Array[OpenAI::Moderation]) } attr_accessor :results # Represents if a given text input is potentially harmful. @@ -20,9 +22,8 @@ module OpenAI params( id: String, model: String, - results: T::Array[T.any(OpenAI::Models::Moderation, OpenAI::Internal::AnyHash)] - ) - .returns(T.attached_class) + results: T::Array[OpenAI::Moderation::OrHash] + ).returns(T.attached_class) end def self.new( # The unique identifier for the moderation request. @@ -31,9 +32,16 @@ module OpenAI model:, # A list of moderation objects. results: - ); end - sig { override.returns({id: String, model: String, results: T::Array[OpenAI::Models::Moderation]}) } - def to_hash; end + ) + end + + sig do + override.returns( + { id: String, model: String, results: T::Array[OpenAI::Moderation] } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/moderation_image_url_input.rbi b/rbi/openai/models/moderation_image_url_input.rbi index 091d5263..df96e516 100644 --- a/rbi/openai/models/moderation_image_url_input.rbi +++ b/rbi/openai/models/moderation_image_url_input.rbi @@ -3,12 +3,16 @@ module OpenAI module Models class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Contains either an image URL or a data URL for a base64 encoded image. - sig { returns(OpenAI::Models::ModerationImageURLInput::ImageURL) } + sig { returns(OpenAI::ModerationImageURLInput::ImageURL) } attr_reader :image_url sig do - params(image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::AnyHash)).void + params( + image_url: OpenAI::ModerationImageURLInput::ImageURL::OrHash + ).void end attr_writer :image_url @@ -19,21 +23,29 @@ module OpenAI # An object describing an image to classify. sig do params( - image_url: T.any(OpenAI::Models::ModerationImageURLInput::ImageURL, OpenAI::Internal::AnyHash), + image_url: OpenAI::ModerationImageURLInput::ImageURL::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Contains either an image URL or a data URL for a base64 encoded image. image_url:, # Always `image_url`. type: :image_url - ); end - sig { override.returns({image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { image_url: OpenAI::ModerationImageURLInput::ImageURL, type: Symbol } + ) + end + def to_hash + end class ImageURL < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Either a URL of the image or the base64 encoded image data. sig { returns(String) } attr_accessor :url @@ -43,9 +55,12 @@ module OpenAI def self.new( # Either a URL of the image or the base64 encoded image data. url: - ); end - sig { override.returns({url: String}) } - def to_hash; end + ) + end + + sig { override.returns({ url: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/moderation_model.rbi b/rbi/openai/models/moderation_model.rbi index 36b6b843..2e122275 100644 --- a/rbi/openai/models/moderation_model.rbi +++ b/rbi/openai/models/moderation_model.rbi @@ -5,17 +5,24 @@ module OpenAI module ModerationModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ModerationModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } - OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) + OMNI_MODERATION_LATEST = + T.let(:"omni-moderation-latest", OpenAI::ModerationModel::TaggedSymbol) OMNI_MODERATION_2024_09_26 = - T.let(:"omni-moderation-2024-09-26", OpenAI::Models::ModerationModel::TaggedSymbol) - TEXT_MODERATION_LATEST = T.let(:"text-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) - TEXT_MODERATION_STABLE = T.let(:"text-moderation-stable", OpenAI::Models::ModerationModel::TaggedSymbol) + T.let( + :"omni-moderation-2024-09-26", + OpenAI::ModerationModel::TaggedSymbol + ) + TEXT_MODERATION_LATEST = + T.let(:"text-moderation-latest", OpenAI::ModerationModel::TaggedSymbol) + TEXT_MODERATION_STABLE = + T.let(:"text-moderation-stable", OpenAI::ModerationModel::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ModerationModel::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::ModerationModel::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/moderation_multi_modal_input.rbi b/rbi/openai/models/moderation_multi_modal_input.rbi index 5f5a8f6d..9c3ad7a6 100644 --- a/rbi/openai/models/moderation_multi_modal_input.rbi +++ b/rbi/openai/models/moderation_multi_modal_input.rbi @@ -6,8 +6,16 @@ module OpenAI module ModerationMultiModalInput extend OpenAI::Internal::Type::Union - sig { override.returns([OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput]) } - def self.variants; end + Variants = + T.type_alias do + T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput) + end + + sig do + override.returns(T::Array[OpenAI::ModerationMultiModalInput::Variants]) + end + def self.variants + end end end end diff --git a/rbi/openai/models/moderation_text_input.rbi b/rbi/openai/models/moderation_text_input.rbi index 462e88ed..98b5925e 100644 --- a/rbi/openai/models/moderation_text_input.rbi +++ b/rbi/openai/models/moderation_text_input.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ModerationTextInput < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A string of text to classify. sig { returns(String) } attr_accessor :text @@ -18,9 +20,12 @@ module OpenAI text:, # Always `text`. type: :text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/other_file_chunking_strategy_object.rbi b/rbi/openai/models/other_file_chunking_strategy_object.rbi index 5af14eda..d7ae19bd 100644 --- a/rbi/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/openai/models/other_file_chunking_strategy_object.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Always `other`. sig { returns(Symbol) } attr_accessor :type @@ -14,9 +16,12 @@ module OpenAI def self.new( # Always `other`. type: :other - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/reasoning.rbi b/rbi/openai/models/reasoning.rbi index a54b00f4..c11df8b1 100644 --- a/rbi/openai/models/reasoning.rbi +++ b/rbi/openai/models/reasoning.rbi @@ -3,13 +3,15 @@ module OpenAI module Models class Reasoning < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # **o-series models only** # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. - sig { returns(T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol)) } + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :effort # **Deprecated:** use `summary` instead. @@ -17,13 +19,13 @@ module OpenAI # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. - sig { returns(T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol)) } + sig { returns(T.nilable(OpenAI::Reasoning::GenerateSummary::OrSymbol)) } attr_accessor :generate_summary # A summary of the reasoning performed by the model. This can be useful for # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. - sig { returns(T.nilable(OpenAI::Models::Reasoning::Summary::OrSymbol)) } + sig { returns(T.nilable(OpenAI::Reasoning::Summary::OrSymbol)) } attr_accessor :summary # **o-series models only** @@ -32,11 +34,11 @@ module OpenAI # [reasoning models](https://platform.openai.com/docs/guides/reasoning). sig do params( - effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol), - summary: T.nilable(OpenAI::Models::Reasoning::Summary::OrSymbol) - ) - .returns(T.attached_class) + effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + generate_summary: + T.nilable(OpenAI::Reasoning::GenerateSummary::OrSymbol), + summary: T.nilable(OpenAI::Reasoning::Summary::OrSymbol) + ).returns(T.attached_class) end def self.new( # **o-series models only** @@ -56,18 +58,21 @@ module OpenAI # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. summary: nil - ); end + ) + end + sig do - override - .returns( - { - effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - generate_summary: T.nilable(OpenAI::Models::Reasoning::GenerateSummary::OrSymbol), - summary: T.nilable(OpenAI::Models::Reasoning::Summary::OrSymbol) - } - ) + override.returns( + { + effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + generate_summary: + T.nilable(OpenAI::Reasoning::GenerateSummary::OrSymbol), + summary: T.nilable(OpenAI::Reasoning::Summary::OrSymbol) + } + ) + end + def to_hash end - def to_hash; end # **Deprecated:** use `summary` instead. # @@ -77,15 +82,23 @@ module OpenAI module GenerateSummary extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Reasoning::GenerateSummary) } OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) - CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) - DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Reasoning::GenerateSummary::TaggedSymbol) + CONCISE = + T.let(:concise, OpenAI::Reasoning::GenerateSummary::TaggedSymbol) + DETAILED = + T.let(:detailed, OpenAI::Reasoning::GenerateSummary::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Reasoning::GenerateSummary::TaggedSymbol] + ) + end + def self.values + end end # A summary of the reasoning performed by the model. This can be useful for @@ -94,15 +107,19 @@ module OpenAI module Summary extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::Summary) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Reasoning::Summary) } OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Reasoning::Summary::TaggedSymbol) - CONCISE = T.let(:concise, OpenAI::Models::Reasoning::Summary::TaggedSymbol) - DETAILED = T.let(:detailed, OpenAI::Models::Reasoning::Summary::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Reasoning::Summary::TaggedSymbol) + CONCISE = T.let(:concise, OpenAI::Reasoning::Summary::TaggedSymbol) + DETAILED = T.let(:detailed, OpenAI::Reasoning::Summary::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Reasoning::Summary::TaggedSymbol]) } - def self.values; end + sig do + override.returns(T::Array[OpenAI::Reasoning::Summary::TaggedSymbol]) + end + def self.values + end end end end diff --git a/rbi/openai/models/reasoning_effort.rbi b/rbi/openai/models/reasoning_effort.rbi index 11b95ba9..30ff7a5f 100644 --- a/rbi/openai/models/reasoning_effort.rbi +++ b/rbi/openai/models/reasoning_effort.rbi @@ -11,15 +11,16 @@ module OpenAI module ReasoningEffort extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ReasoningEffort) } OrSymbol = T.type_alias { T.any(Symbol, String) } - LOW = T.let(:low, OpenAI::Models::ReasoningEffort::TaggedSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::ReasoningEffort::TaggedSymbol) + LOW = T.let(:low, OpenAI::ReasoningEffort::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::ReasoningEffort::TaggedSymbol) + HIGH = T.let(:high, OpenAI::ReasoningEffort::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::ReasoningEffort::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::ReasoningEffort::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/response_format_json_object.rbi b/rbi/openai/models/response_format_json_object.rbi index cb28d04a..a02e14bc 100644 --- a/rbi/openai/models/response_format_json_object.rbi +++ b/rbi/openai/models/response_format_json_object.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of response format being defined. Always `json_object`. sig { returns(Symbol) } attr_accessor :type @@ -14,9 +16,12 @@ module OpenAI def self.new( # The type of response format being defined. Always `json_object`. type: :json_object - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/response_format_json_schema.rbi b/rbi/openai/models/response_format_json_schema.rbi index db50b7cd..dbaac8e0 100644 --- a/rbi/openai/models/response_format_json_schema.rbi +++ b/rbi/openai/models/response_format_json_schema.rbi @@ -3,15 +3,16 @@ module OpenAI module Models class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Structured Outputs configuration options, including a JSON Schema. - sig { returns(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema) } + sig { returns(OpenAI::ResponseFormatJSONSchema::JSONSchema) } attr_reader :json_schema sig do params( - json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::AnyHash) - ) - .void + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash + ).void end attr_writer :json_schema @@ -24,21 +25,32 @@ module OpenAI # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( - json_schema: T.any(OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, OpenAI::Internal::AnyHash), + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Structured Outputs configuration options, including a JSON Schema. json_schema:, # The type of response format being defined. Always `json_schema`. type: :json_schema - ); end - sig { override.returns({json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema, + type: Symbol + } + ) + end + def to_hash + end class JSONSchema < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. sig { returns(String) } @@ -75,8 +87,7 @@ module OpenAI description: String, schema: T::Hash[Symbol, T.anything], strict: T.nilable(T::Boolean) - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores @@ -94,19 +105,21 @@ module OpenAI # learn more, read the # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). strict: nil - ); end + ) + end + sig do - override - .returns( - { - name: String, - description: String, - schema: T::Hash[Symbol, T.anything], - strict: T.nilable(T::Boolean) - } - ) + override.returns( + { + name: String, + description: String, + schema: T::Hash[Symbol, T.anything], + strict: T.nilable(T::Boolean) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/response_format_text.rbi b/rbi/openai/models/response_format_text.rbi index f47d8756..a20756e2 100644 --- a/rbi/openai/models/response_format_text.rbi +++ b/rbi/openai/models/response_format_text.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class ResponseFormatText < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of response format being defined. Always `text`. sig { returns(Symbol) } attr_accessor :type @@ -12,9 +14,12 @@ module OpenAI def self.new( # The type of response format being defined. Always `text`. type: :text - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/computer_tool.rbi b/rbi/openai/models/responses/computer_tool.rbi index 99b52d59..9cfcfe6a 100644 --- a/rbi/openai/models/responses/computer_tool.rbi +++ b/rbi/openai/models/responses/computer_tool.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ComputerTool < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The height of the computer display. sig { returns(Integer) } attr_accessor :display_height @@ -13,7 +15,7 @@ module OpenAI attr_accessor :display_width # The type of computer environment to control. - sig { returns(OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol) } + sig { returns(OpenAI::Responses::ComputerTool::Environment::OrSymbol) } attr_accessor :environment # The type of the computer use tool. Always `computer_use_preview`. @@ -26,10 +28,9 @@ module OpenAI params( display_height: Integer, display_width: Integer, - environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, + environment: OpenAI::Responses::ComputerTool::Environment::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The height of the computer display. @@ -40,35 +41,68 @@ module OpenAI environment:, # The type of the computer use tool. Always `computer_use_preview`. type: :computer_use_preview - ); end + ) + end + sig do - override - .returns( - { - display_height: Integer, - display_width: Integer, - environment: OpenAI::Models::Responses::ComputerTool::Environment::OrSymbol, - type: Symbol - } - ) + override.returns( + { + display_height: Integer, + display_width: Integer, + environment: + OpenAI::Responses::ComputerTool::Environment::OrSymbol, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The type of computer environment to control. module Environment extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ComputerTool::Environment) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) - MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) - LINUX = T.let(:linux, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) - UBUNTU = T.let(:ubuntu, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) - BROWSER = T.let(:browser, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) + WINDOWS = + T.let( + :windows, + OpenAI::Responses::ComputerTool::Environment::TaggedSymbol + ) + MAC = + T.let( + :mac, + OpenAI::Responses::ComputerTool::Environment::TaggedSymbol + ) + LINUX = + T.let( + :linux, + OpenAI::Responses::ComputerTool::Environment::TaggedSymbol + ) + UBUNTU = + T.let( + :ubuntu, + OpenAI::Responses::ComputerTool::Environment::TaggedSymbol + ) + BROWSER = + T.let( + :browser, + OpenAI::Responses::ComputerTool::Environment::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ComputerTool::Environment::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/easy_input_message.rbi b/rbi/openai/models/responses/easy_input_message.rbi index 698ad76f..304fcceb 100644 --- a/rbi/openai/models/responses/easy_input_message.rbi +++ b/rbi/openai/models/responses/easy_input_message.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class EasyInputMessage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. sig do @@ -12,9 +14,9 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) ] ) @@ -24,14 +26,20 @@ module OpenAI # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. - sig { returns(OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol) } + sig { returns(OpenAI::Responses::EasyInputMessage::Role::OrSymbol) } attr_accessor :role # The type of the message input. Always `message`. - sig { returns(T.nilable(OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::Responses::EasyInputMessage::Type::OrSymbol) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol).void } + sig do + params(type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -41,21 +49,20 @@ module OpenAI # interactions. sig do params( - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] - ), - role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, - type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Responses::ResponseInputImage::OrHash, + OpenAI::Responses::ResponseInputFile::OrHash + ) + ] + ), + role: OpenAI::Responses::EasyInputMessage::Role::OrSymbol, + type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # Text, image, or audio input to the model, used to generate a response. Can also @@ -66,49 +73,57 @@ module OpenAI role:, # The type of the message input. Always `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T.any( + override.returns( + { + content: + T.any( String, T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) ] ), - role: OpenAI::Models::Responses::EasyInputMessage::Role::OrSymbol, - type: OpenAI::Models::Responses::EasyInputMessage::Type::OrSymbol - } - ) + role: OpenAI::Responses::EasyInputMessage::Role::OrSymbol, + type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. module Content extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile + ) ] ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::EasyInputMessage::Content::Variants] + ) + end + def self.variants end - def self.variants; end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -116,29 +131,65 @@ module OpenAI module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::EasyInputMessage::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Responses::EasyInputMessage::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Responses::EasyInputMessage::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Responses::EasyInputMessage::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Responses::EasyInputMessage::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::EasyInputMessage::Role::TaggedSymbol] + ) + end + def self.values + end end # The type of the message input. Always `message`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::EasyInputMessage::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) + MESSAGE = + T.let( + :message, + OpenAI::Responses::EasyInputMessage::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::EasyInputMessage::Type::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/file_search_tool.rbi b/rbi/openai/models/responses/file_search_tool.rbi index 005e1270..286697f9 100644 --- a/rbi/openai/models/responses/file_search_tool.rbi +++ b/rbi/openai/models/responses/file_search_tool.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class FileSearchTool < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the file search tool. Always `file_search`. sig { returns(Symbol) } attr_accessor :type @@ -13,7 +15,11 @@ module OpenAI attr_accessor :vector_store_ids # A filter to apply. - sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } + sig do + returns( + T.nilable(T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)) + ) + end attr_accessor :filters # The maximum number of results to return. This number should be between 1 and 50 @@ -25,14 +31,16 @@ module OpenAI attr_writer :max_num_results # Ranking options for search. - sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions)) } + sig do + returns(T.nilable(OpenAI::Responses::FileSearchTool::RankingOptions)) + end attr_reader :ranking_options sig do params( - ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::AnyHash) - ) - .void + ranking_options: + OpenAI::Responses::FileSearchTool::RankingOptions::OrHash + ).void end attr_writer :ranking_options @@ -42,14 +50,18 @@ module OpenAI sig do params( vector_store_ids: T::Array[String], - filters: T.nilable( - T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter) - ), + filters: + T.nilable( + T.any( + OpenAI::ComparisonFilter::OrHash, + OpenAI::CompoundFilter::OrHash + ) + ), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::Responses::FileSearchTool::RankingOptions, OpenAI::Internal::AnyHash), + ranking_options: + OpenAI::Responses::FileSearchTool::RankingOptions::OrHash, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The IDs of the vector stores to search. @@ -63,35 +75,64 @@ module OpenAI ranking_options: nil, # The type of the file search tool. Always `file_search`. type: :file_search - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - vector_store_ids: T::Array[String], - filters: T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)), - max_num_results: Integer, - ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions - } - ) + override.returns( + { + type: Symbol, + vector_store_ids: T::Array[String], + filters: + T.nilable( + T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) + ), + max_num_results: Integer, + ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions + } + ) + end + def to_hash end - def to_hash; end # A filter to apply. module Filters extend OpenAI::Internal::Type::Union - sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def self.variants; end + Variants = + T.type_alias do + T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::FileSearchTool::Filters::Variants] + ) + end + def self.variants + end end class RankingOptions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ranker to use for the file search. - sig { returns(T.nilable(OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol + ) + ) + end attr_reader :ranker - sig { params(ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol).void } + sig do + params( + ranker: + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol + ).void + end attr_writer :ranker # The score threshold for the file search, a number between 0 and 1. Numbers @@ -106,10 +147,10 @@ module OpenAI # Ranking options for search. sig do params( - ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, + ranker: + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, score_threshold: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ranker to use for the file search. @@ -118,34 +159,54 @@ module OpenAI # closer to 1 will attempt to return only the most relevant results, but may # return fewer results. score_threshold: nil - ); end + ) + end + sig do - override - .returns( - {ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, score_threshold: Float} - ) + override.returns( + { + ranker: + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::OrSymbol, + score_threshold: Float + } + ) + end + def to_hash end - def to_hash; end # The ranker to use for the file search. module Ranker extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol + ) DEFAULT_2024_11_15 = T.let( :"default-2024-11-15", - OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol ) sig do - override.returns(T::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/responses/function_tool.rbi b/rbi/openai/models/responses/function_tool.rbi index 294f57f1..c2658fc5 100644 --- a/rbi/openai/models/responses/function_tool.rbi +++ b/rbi/openai/models/responses/function_tool.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class FunctionTool < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to call. sig { returns(String) } attr_accessor :name @@ -35,8 +37,7 @@ module OpenAI strict: T.nilable(T::Boolean), description: T.nilable(String), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The name of the function to call. @@ -50,20 +51,22 @@ module OpenAI description: nil, # The type of the function tool. Always `function`. type: :function - ); end + ) + end + sig do - override - .returns( - { - name: String, - parameters: T.nilable(T::Hash[Symbol, T.anything]), - strict: T.nilable(T::Boolean), - type: Symbol, - description: T.nilable(String) - } - ) + override.returns( + { + name: String, + parameters: T.nilable(T::Hash[Symbol, T.anything]), + strict: T.nilable(T::Boolean), + type: Symbol, + description: T.nilable(String) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/input_item_list_params.rbi b/rbi/openai/models/responses/input_item_list_params.rbi index 3ea133cb..8fe8462a 100644 --- a/rbi/openai/models/responses/input_item_list_params.rbi +++ b/rbi/openai/models/responses/input_item_list_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An item ID to list items after, used in pagination. sig { returns(T.nilable(String)) } attr_reader :after @@ -23,10 +25,18 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) + ) + end attr_reader :include - sig { params(include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]).void } + sig do + params( + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ).void + end attr_writer :include # A limit on the number of objects to be returned. Limit can range between 1 and @@ -41,22 +51,29 @@ module OpenAI # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. - sig { returns(T.nilable(OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::Responses::InputItemListParams::Order::OrSymbol) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol).void } + sig do + params( + order: OpenAI::Responses::InputItemListParams::Order::OrSymbol + ).void + end attr_writer :order sig do params( after: String, before: String, - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::Responses::InputItemListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # An item ID to list items after, used in pagination. @@ -75,21 +92,24 @@ module OpenAI # - `desc`: Return the input items in descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - before: String, - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + before: String, + include: + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + limit: Integer, + order: OpenAI::Responses::InputItemListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # The order to return the input items in. Default is `asc`. # @@ -98,14 +118,32 @@ module OpenAI module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::InputItemListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::Responses::InputItemListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Responses::InputItemListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::InputItemListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 189001b8..3f45fc15 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class Response < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique identifier for this Response. sig { returns(String) } attr_accessor :id @@ -13,21 +15,27 @@ module OpenAI attr_accessor :created_at # An error object returned when the model fails to generate a Response. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseError)) } + sig { returns(T.nilable(OpenAI::Responses::ResponseError)) } attr_reader :error - sig { params(error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::AnyHash))).void } + sig do + params( + error: T.nilable(OpenAI::Responses::ResponseError::OrHash) + ).void + end attr_writer :error # Details about why the response is incomplete. - sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails)) } + sig do + returns(T.nilable(OpenAI::Responses::Response::IncompleteDetails)) + end attr_reader :incomplete_details sig do params( - incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::AnyHash)) - ) - .void + incomplete_details: + T.nilable(OpenAI::Responses::Response::IncompleteDetails::OrHash) + ).void end attr_writer :incomplete_details @@ -58,8 +66,8 @@ module OpenAI returns( T.any( String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + OpenAI::ChatModel::TaggedSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol ) ) end @@ -80,12 +88,12 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ) ] ) @@ -109,9 +117,9 @@ module OpenAI sig do returns( T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction + OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction ) ) end @@ -135,10 +143,10 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::FunctionTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::WebSearchTool ) ] ) @@ -169,10 +177,10 @@ module OpenAI # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - sig { returns(T.nilable(OpenAI::Models::Reasoning)) } + sig { returns(T.nilable(OpenAI::Reasoning)) } attr_reader :reasoning - sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash))).void } + sig { params(reasoning: T.nilable(OpenAI::Reasoning::OrHash)).void } attr_writer :reasoning # Specifies the latency tier to use for processing the request. This parameter is @@ -192,15 +200,21 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol)) } + sig do + returns( + T.nilable(OpenAI::Responses::Response::ServiceTier::TaggedSymbol) + ) + end attr_accessor :service_tier # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseStatus::TaggedSymbol)) } + sig do + returns(T.nilable(OpenAI::Responses::ResponseStatus::TaggedSymbol)) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseStatus::OrSymbol).void } + sig { params(status: OpenAI::Responses::ResponseStatus::OrSymbol).void } attr_writer :status # Configuration options for a text response from the model. Can be plain text or @@ -208,10 +222,10 @@ module OpenAI # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } + sig { returns(T.nilable(OpenAI::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash)).void } + sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } attr_writer :text # The truncation strategy to use for the model response. @@ -221,15 +235,19 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol)) } + sig do + returns( + T.nilable(OpenAI::Responses::Response::Truncation::TaggedSymbol) + ) + end attr_accessor :truncation # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseUsage)) } + sig { returns(T.nilable(OpenAI::Responses::ResponseUsage)) } attr_reader :usage - sig { params(usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::AnyHash)).void } + sig { params(usage: OpenAI::Responses::ResponseUsage::OrHash).void } attr_writer :usage # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -245,56 +263,59 @@ module OpenAI params( id: String, created_at: Float, - error: T.nilable(T.any(OpenAI::Models::Responses::ResponseError, OpenAI::Internal::AnyHash)), - incomplete_details: T.nilable(T.any(OpenAI::Models::Responses::Response::IncompleteDetails, OpenAI::Internal::AnyHash)), + error: T.nilable(OpenAI::Responses::ResponseError::OrHash), + incomplete_details: + T.nilable(OpenAI::Responses::Response::IncompleteDetails::OrHash), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any( - String, - OpenAI::Models::ChatModel::OrSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), - output: T::Array[ + model: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ) - ], + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), + output: + T::Array[ + T.any( + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash + ) + ], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ToolChoiceFunction - ), - tools: T::Array[ + tool_choice: T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ], + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes::OrHash, + OpenAI::Responses::ToolChoiceFunction::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), - service_tier: T.nilable(OpenAI::Models::Responses::Response::ServiceTier::OrSymbol), - status: OpenAI::Models::Responses::ResponseStatus::OrSymbol, - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), - truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::OrSymbol), - usage: T.any(OpenAI::Models::Responses::ResponseUsage, OpenAI::Internal::AnyHash), + reasoning: T.nilable(OpenAI::Reasoning::OrHash), + service_tier: + T.nilable(OpenAI::Responses::Response::ServiceTier::OrSymbol), + status: OpenAI::Responses::ResponseStatus::OrSymbol, + text: OpenAI::Responses::ResponseTextConfig::OrHash, + truncation: + T.nilable(OpenAI::Responses::Response::Truncation::OrSymbol), + usage: OpenAI::Responses::ResponseUsage::OrHash, user: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Unique identifier for this Response. @@ -422,98 +443,155 @@ module OpenAI user: nil, # The object type of this resource - always set to `response`. object: :response - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Float, - error: T.nilable(OpenAI::Models::Responses::ResponseError), - incomplete_details: T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails), - instructions: T.nilable(String), - metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any( + override.returns( + { + id: String, + created_at: Float, + error: T.nilable(OpenAI::Responses::ResponseError), + incomplete_details: + T.nilable(OpenAI::Responses::Response::IncompleteDetails), + instructions: T.nilable(String), + metadata: T.nilable(T::Hash[Symbol, String]), + model: + T.any( String, - OpenAI::Models::ChatModel::TaggedSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + OpenAI::ChatModel::TaggedSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol ), - object: Symbol, - output: T::Array[ + object: Symbol, + output: + T::Array[ T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ) ], - parallel_tool_calls: T::Boolean, - temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction + parallel_tool_calls: T::Boolean, + temperature: T.nilable(Float), + tool_choice: + T.any( + OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction ), - tools: T::Array[ + tools: + T::Array[ T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::FunctionTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::WebSearchTool ) ], - top_p: T.nilable(Float), - max_output_tokens: T.nilable(Integer), - previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), - service_tier: T.nilable(OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol), - status: OpenAI::Models::Responses::ResponseStatus::TaggedSymbol, - text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: T.nilable(OpenAI::Models::Responses::Response::Truncation::TaggedSymbol), - usage: OpenAI::Models::Responses::ResponseUsage, - user: String - } - ) + top_p: T.nilable(Float), + max_output_tokens: T.nilable(Integer), + previous_response_id: T.nilable(String), + reasoning: T.nilable(OpenAI::Reasoning), + service_tier: + T.nilable( + OpenAI::Responses::Response::ServiceTier::TaggedSymbol + ), + status: OpenAI::Responses::ResponseStatus::TaggedSymbol, + text: OpenAI::Responses::ResponseTextConfig, + truncation: + T.nilable( + OpenAI::Responses::Response::Truncation::TaggedSymbol + ), + usage: OpenAI::Responses::ResponseUsage, + user: String + } + ) + end + def to_hash end - def to_hash; end class IncompleteDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The reason why the response is incomplete. - sig { returns(T.nilable(OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::Response::IncompleteDetails::Reason::TaggedSymbol + ) + ) + end attr_reader :reason - sig { params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol).void } + sig do + params( + reason: + OpenAI::Responses::Response::IncompleteDetails::Reason::OrSymbol + ).void + end attr_writer :reason # Details about why the response is incomplete. sig do - params(reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::OrSymbol) - .returns(T.attached_class) + params( + reason: + OpenAI::Responses::Response::IncompleteDetails::Reason::OrSymbol + ).returns(T.attached_class) end def self.new( # The reason why the response is incomplete. reason: nil - ); end - sig { override.returns({reason: OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + reason: + OpenAI::Responses::Response::IncompleteDetails::Reason::TaggedSymbol + } + ) + end + def to_hash + end # The reason why the response is incomplete. module Reason extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Response::IncompleteDetails::Reason + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } MAX_OUTPUT_TOKENS = - T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :max_output_tokens, + OpenAI::Responses::Response::IncompleteDetails::Reason::TaggedSymbol + ) CONTENT_FILTER = - T.let(:content_filter, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) + T.let( + :content_filter, + OpenAI::Responses::Response::IncompleteDetails::Reason::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::Response::IncompleteDetails::Reason::TaggedSymbol + ] + ) + end + def self.values + end end end @@ -523,13 +601,22 @@ module OpenAI module ToolChoice extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::Response::ToolChoice::Variants] + ) + end + def self.variants end - def self.variants; end end # Specifies the latency tier to use for processing the request. This parameter is @@ -552,15 +639,29 @@ module OpenAI module ServiceTier extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::ServiceTier) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Response::ServiceTier) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) - FLEX = T.let(:flex, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::Responses::Response::ServiceTier::TaggedSymbol) + DEFAULT = + T.let( + :default, + OpenAI::Responses::Response::ServiceTier::TaggedSymbol + ) + FLEX = + T.let(:flex, OpenAI::Responses::Response::ServiceTier::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::Response::ServiceTier::TaggedSymbol] + ) + end + def self.values + end end # The truncation strategy to use for the model response. @@ -573,14 +674,27 @@ module OpenAI module Truncation extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Response::Truncation) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) - DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::Responses::Response::Truncation::TaggedSymbol) + DISABLED = + T.let( + :disabled, + OpenAI::Responses::Response::Truncation::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::Response::Truncation::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::Response::Truncation::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_audio_delta_event.rbi b/rbi/openai/models/responses/response_audio_delta_event.rbi index b2e4f0e0..0ed8d2e1 100644 --- a/rbi/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A chunk of Base64 encoded response audio bytes. sig { returns(String) } attr_accessor :delta @@ -19,9 +21,12 @@ module OpenAI delta:, # The type of the event. Always `response.audio.delta`. type: :"response.audio.delta" - ); end - sig { override.returns({delta: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ delta: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_audio_done_event.rbi b/rbi/openai/models/responses/response_audio_done_event.rbi index 9df7735c..3d74ccf4 100644 --- a/rbi/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } attr_accessor :type @@ -13,9 +15,12 @@ module OpenAI def self.new( # The type of the event. Always `response.audio.done`. type: :"response.audio.done" - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi index 7ea46461..bb70f631 100644 --- a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The partial transcript of the audio response. sig { returns(String) } attr_accessor :delta @@ -19,9 +21,12 @@ module OpenAI delta:, # The type of the event. Always `response.audio.transcript.delta`. type: :"response.audio.transcript.delta" - ); end - sig { override.returns({delta: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ delta: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi index ec7e9f09..df2381ba 100644 --- a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } attr_accessor :type @@ -13,9 +15,12 @@ module OpenAI def self.new( # The type of the event. Always `response.audio.transcript.done`. type: :"response.audio.transcript.done" - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 4c36e297..dd5c1396 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The partial code snippet added by the code interpreter. sig { returns(String) } attr_accessor :delta @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a partial code snippet is added by the code interpreter. - sig { params(delta: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(delta: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The partial code snippet added by the code interpreter. delta:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.code_interpreter_call.code.delta`. type: :"response.code_interpreter_call.code.delta" - ); end - sig { override.returns({delta: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { delta: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index bc7dd462..9cd3b09c 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The final code snippet output by the code interpreter. sig { returns(String) } attr_accessor :code @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when code snippet output is finalized by the code interpreter. - sig { params(code: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(code: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The final code snippet output by the code interpreter. code:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.code_interpreter_call.code.done`. type: :"response.code_interpreter_call.code.done" - ); end - sig { override.returns({code: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { code: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 05dc4800..e2f8c573 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -4,15 +4,17 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A tool call to run code. - sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } + sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } attr_reader :code_interpreter_call sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash + ).void end attr_writer :code_interpreter_call @@ -27,11 +29,11 @@ module OpenAI # Emitted when the code interpreter call is completed. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash), + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A tool call to run code. @@ -40,18 +42,21 @@ module OpenAI output_index:, # The type of the event. Always `response.code_interpreter_call.completed`. type: :"response.code_interpreter_call.completed" - ); end + ) + end + sig do - override - .returns( - { - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, - output_index: Integer, - type: Symbol - } - ) + override.returns( + { + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 2627f40b..983c9007 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -4,15 +4,17 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A tool call to run code. - sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } + sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } attr_reader :code_interpreter_call sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash + ).void end attr_writer :code_interpreter_call @@ -27,11 +29,11 @@ module OpenAI # Emitted when a code interpreter call is in progress. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash), + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A tool call to run code. @@ -40,18 +42,21 @@ module OpenAI output_index:, # The type of the event. Always `response.code_interpreter_call.in_progress`. type: :"response.code_interpreter_call.in_progress" - ); end + ) + end + sig do - override - .returns( - { - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, - output_index: Integer, - type: Symbol - } - ) + override.returns( + { + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index ee7e0050..031a295b 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -4,15 +4,17 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A tool call to run code. - sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall) } + sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } attr_reader :code_interpreter_call sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash) - ) - .void + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash + ).void end attr_writer :code_interpreter_call @@ -27,11 +29,11 @@ module OpenAI # Emitted when the code interpreter is actively interpreting the code snippet. sig do params( - code_interpreter_call: T.any(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Internal::AnyHash), + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A tool call to run code. @@ -40,18 +42,21 @@ module OpenAI output_index:, # The type of the event. Always `response.code_interpreter_call.interpreting`. type: :"response.code_interpreter_call.interpreting" - ); end + ) + end + sig do - override - .returns( - { - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, - output_index: Integer, - type: Symbol - } - ) + override.returns( + { + code_interpreter_call: + OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index 2568aca1..d964daa1 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the code interpreter tool call. sig { returns(String) } attr_accessor :id @@ -17,8 +19,8 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ] ) @@ -26,7 +28,11 @@ module OpenAI attr_accessor :results # The status of the code interpreter tool call. - sig { returns(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + sig do + returns( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ) + end attr_accessor :status # The type of the code interpreter tool call. Always `code_interpreter_call`. @@ -38,17 +44,17 @@ module OpenAI params( id: String, code: String, - results: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - ], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, + results: + T::Array[ + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::OrHash + ) + ], + status: + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the code interpreter tool call. @@ -61,31 +67,46 @@ module OpenAI status:, # The type of the code interpreter tool call. Always `code_interpreter_call`. type: :code_interpreter_call - ); end + ) + end + sig do - override - .returns( - { - id: String, - code: String, - results: T::Array[ + override.returns( + { + id: String, + code: String, + results: + T::Array[ T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files ) ], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, - type: Symbol - } - ) + status: + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The output of a code interpreter tool call that is text. module Result extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) + end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The logs of the code interpreter tool call. sig { returns(String) } attr_accessor :logs @@ -101,13 +122,25 @@ module OpenAI logs:, # The type of the code interpreter text output. Always `logs`. type: :logs - ); end - sig { override.returns({logs: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ logs: String, type: Symbol }) } + def to_hash + end end class Files < OpenAI::Internal::Type::BaseModel - sig { returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File]) } + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File + ] + ) + end attr_accessor :files # The type of the code interpreter file output. Always `files`. @@ -117,33 +150,38 @@ module OpenAI # The output of a code interpreter tool call that is a file. sig do params( - files: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, - OpenAI::Internal::AnyHash - ) - ], + files: + T::Array[ + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( files:, # The type of the code interpreter file output. Always `files`. type: :files - ); end + ) + end + sig do - override - .returns( - { - files: T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], - type: Symbol - } - ) + override.returns( + { + files: + T::Array[ + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class File < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -152,25 +190,34 @@ module OpenAI sig { returns(String) } attr_accessor :mime_type - sig { params(file_id: String, mime_type: String).returns(T.attached_class) } + sig do + params(file_id: String, mime_type: String).returns( + T.attached_class + ) + end def self.new( # The ID of the file. file_id:, # The MIME type of the file. mime_type: - ); end - sig { override.returns({file_id: String, mime_type: String}) } - def to_hash; end + ) + end + + sig { override.returns({ file_id: String, mime_type: String }) } + def to_hash + end end end sig do - override - .returns( - [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] - ) + override.returns( + T::Array[ + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The status of the code interpreter tool call. @@ -178,21 +225,39 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ) INTERPRETING = - T.let(:interpreting, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + T.let( + :interpreting, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/responses/response_completed_event.rbi b/rbi/openai/models/responses/response_completed_event.rbi index bf715172..3c962886 100644 --- a/rbi/openai/models/responses/response_completed_event.rbi +++ b/rbi/openai/models/responses/response_completed_event.rbi @@ -4,11 +4,13 @@ module OpenAI module Models module Responses class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Properties of the completed response. - sig { returns(OpenAI::Models::Responses::Response) } + sig { returns(OpenAI::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } + sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response # The type of the event. Always `response.completed`. @@ -17,17 +19,26 @@ module OpenAI # Emitted when the model response is complete. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + response: OpenAI::Responses::Response::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( # Properties of the completed response. response:, # The type of the event. Always `response.completed`. type: :"response.completed" - ); end - sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { response: OpenAI::Responses::Response, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_computer_tool_call.rbi b/rbi/openai/models/responses/response_computer_tool_call.rbi index 37023dcb..bd660209 100644 --- a/rbi/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/openai/models/responses/response_computer_tool_call.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the computer call. sig { returns(String) } attr_accessor :id @@ -12,15 +14,15 @@ module OpenAI sig do returns( T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait + OpenAI::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Responses::ResponseComputerToolCall::Action::Wait ) ) end @@ -31,16 +33,26 @@ module OpenAI attr_accessor :call_id # The pending safety checks for the computer call. - sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]) } + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck + ] + ) + end attr_accessor :pending_safety_checks # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol) } + sig do + returns(OpenAI::Responses::ResponseComputerToolCall::Status::OrSymbol) + end attr_accessor :status # The type of the computer call. Always `computer_call`. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol) } + sig do + returns(OpenAI::Responses::ResponseComputerToolCall::Type::OrSymbol) + end attr_accessor :type # A tool call to a computer use tool. See the @@ -49,24 +61,27 @@ module OpenAI sig do params( id: String, - action: T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait - ), + action: + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Click::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Drag::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Keypress::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Move::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Scroll::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Type::OrHash, + OpenAI::Responses::ResponseComputerToolCall::Action::Wait::OrHash + ), call_id: String, - pending_safety_checks: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck, OpenAI::Internal::AnyHash)], - status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol - ) - .returns(T.attached_class) + pending_safety_checks: + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck::OrHash + ], + status: + OpenAI::Responses::ResponseComputerToolCall::Status::OrSymbol, + type: OpenAI::Responses::ResponseComputerToolCall::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # The unique ID of the computer call. @@ -82,40 +97,69 @@ module OpenAI status:, # The type of the computer call. Always `computer_call`. type: - ); end + ) + end + sig do - override - .returns( - { - id: String, - action: T.any( - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait + override.returns( + { + id: String, + action: + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Responses::ResponseComputerToolCall::Action::Wait ), - call_id: String, - pending_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCall::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseComputerToolCall::Type::OrSymbol - } - ) + call_id: String, + pending_safety_checks: + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck + ], + status: + OpenAI::Responses::ResponseComputerToolCall::Status::OrSymbol, + type: OpenAI::Responses::ResponseComputerToolCall::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # A click action. module Action extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Responses::ResponseComputerToolCall::Action::Wait + ) + end + class Click < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol) } + sig do + returns( + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol + ) + end attr_accessor :button # Specifies the event type. For a click action, this property is always set to @@ -134,12 +178,12 @@ module OpenAI # A click action. sig do params( - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, + button: + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, x: Integer, y_: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Indicates which mouse button was pressed during the click. One of `left`, @@ -152,19 +196,22 @@ module OpenAI # Specifies the event type. For a click action, this property is always set to # `click`. type: :click - ); end + ) + end + sig do - override - .returns( - { - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, - type: Symbol, - x: Integer, - y_: Integer - } - ) + override.returns( + { + button: + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::OrSymbol, + type: Symbol, + x: Integer, + y_: Integer + } + ) + end + def to_hash end - def to_hash; end # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -172,31 +219,56 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } LEFT = - T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + T.let( + :left, + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ) RIGHT = - T.let(:right, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + T.let( + :right, + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ) WHEEL = - T.let(:wheel, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + T.let( + :wheel, + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ) BACK = - T.let(:back, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + T.let( + :back, + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ) FORWARD = - T.let(:forward, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) + T.let( + :forward, + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ) sig do - override - .returns( - T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol] - ) + override.returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end class DoubleClick < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the event type. For a double click action, this property is always set # to `double_click`. sig { returns(Symbol) } @@ -211,7 +283,11 @@ module OpenAI attr_accessor :y_ # A double click action. - sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(x: Integer, y_: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The x-coordinate where the double click occurred. x:, @@ -220,12 +296,18 @@ module OpenAI # Specifies the event type. For a double click action, this property is always set # to `double_click`. type: :double_click - ); end - sig { override.returns({type: Symbol, x: Integer, y_: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol, x: Integer, y_: Integer }) } + def to_hash + end end class Drag < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An array of coordinates representing the path of the drag action. Coordinates # will appear as an array of objects, eg # @@ -235,7 +317,13 @@ module OpenAI # { x: 200, y: 300 } # ] # ``` - sig { returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path]) } + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path + ] + ) + end attr_accessor :path # Specifies the event type. For a drag action, this property is always set to @@ -246,10 +334,12 @@ module OpenAI # A drag action. sig do params( - path: T::Array[T.any(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path, OpenAI::Internal::AnyHash)], + path: + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path::OrHash + ], type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # An array of coordinates representing the path of the drag action. Coordinates @@ -265,16 +355,27 @@ module OpenAI # Specifies the event type. For a drag action, this property is always set to # `drag`. type: :drag - ); end + ) + end + sig do - override - .returns( - {path: T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], type: Symbol} - ) + override.returns( + { + path: + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path + ], + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Path < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The x-coordinate. sig { returns(Integer) } attr_accessor :x @@ -290,13 +391,19 @@ module OpenAI x:, # The y-coordinate. y_: - ); end - sig { override.returns({x: Integer, y_: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ x: Integer, y_: Integer }) } + def to_hash + end end end class Keypress < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The combination of keys the model is requesting to be pressed. This is an array # of strings, each representing a key. sig { returns(T::Array[String]) } @@ -308,7 +415,11 @@ module OpenAI attr_accessor :type # A collection of keypresses the model would like to perform. - sig { params(keys: T::Array[String], type: Symbol).returns(T.attached_class) } + sig do + params(keys: T::Array[String], type: Symbol).returns( + T.attached_class + ) + end def self.new( # The combination of keys the model is requesting to be pressed. This is an array # of strings, each representing a key. @@ -316,12 +427,18 @@ module OpenAI # Specifies the event type. For a keypress action, this property is always set to # `keypress`. type: :keypress - ); end - sig { override.returns({keys: T::Array[String], type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ keys: T::Array[String], type: Symbol }) } + def to_hash + end end class Move < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the event type. For a move action, this property is always set to # `move`. sig { returns(Symbol) } @@ -336,7 +453,11 @@ module OpenAI attr_accessor :y_ # A mouse move action. - sig { params(x: Integer, y_: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(x: Integer, y_: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The x-coordinate to move to. x:, @@ -345,12 +466,18 @@ module OpenAI # Specifies the event type. For a move action, this property is always set to # `move`. type: :move - ); end - sig { override.returns({type: Symbol, x: Integer, y_: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol, x: Integer, y_: Integer }) } + def to_hash + end end class Screenshot < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the event type. For a screenshot action, this property is always set # to `screenshot`. sig { returns(Symbol) } @@ -362,12 +489,18 @@ module OpenAI # Specifies the event type. For a screenshot action, this property is always set # to `screenshot`. type: :screenshot - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end class Scroll < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The horizontal scroll distance. sig { returns(Integer) } attr_accessor :scroll_x @@ -391,8 +524,13 @@ module OpenAI # A scroll action. sig do - params(scroll_x: Integer, scroll_y: Integer, x: Integer, y_: Integer, type: Symbol) - .returns(T.attached_class) + params( + scroll_x: Integer, + scroll_y: Integer, + x: Integer, + y_: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The horizontal scroll distance. @@ -406,14 +544,28 @@ module OpenAI # Specifies the event type. For a scroll action, this property is always set to # `scroll`. type: :scroll - ); end + ) + end + sig do - override.returns({scroll_x: Integer, scroll_y: Integer, type: Symbol, x: Integer, y_: Integer}) + override.returns( + { + scroll_x: Integer, + scroll_y: Integer, + type: Symbol, + x: Integer, + y_: Integer + } + ) + end + def to_hash end - def to_hash; end end class Type < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text to type. sig { returns(String) } attr_accessor :text @@ -431,12 +583,18 @@ module OpenAI # Specifies the event type. For a type action, this property is always set to # `type`. type: :type - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end class Wait < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the event type. For a wait action, this property is always set to # `wait`. sig { returns(Symbol) } @@ -448,21 +606,29 @@ module OpenAI # Specifies the event type. For a wait action, this property is always set to # `wait`. type: :wait - ); end - sig { override.returns({type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - ) + override.returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Action::Variants + ] + ) + end + def self.variants end - def self.variants; end end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the pending safety check. sig { returns(String) } attr_accessor :id @@ -476,7 +642,11 @@ module OpenAI attr_accessor :message # A pending safety check for the computer call. - sig { params(id: String, code: String, message: String).returns(T.attached_class) } + sig do + params(id: String, code: String, message: String).returns( + T.attached_class + ) + end def self.new( # The ID of the pending safety check. id:, @@ -484,9 +654,14 @@ module OpenAI code:, # Details about the pending safety check. message: - ); end - sig { override.returns({id: String, code: String, message: String}) } - def to_hash; end + ) + end + + sig do + override.returns({ id: String, code: String, message: String }) + end + def to_hash + end end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -495,31 +670,63 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseComputerToolCall::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseComputerToolCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseComputerToolCall::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseComputerToolCall::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Status::TaggedSymbol + ] + ) + end + def self.values + end end # The type of the computer call. Always `computer_call`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseComputerToolCall::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } COMPUTER_CALL = - T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) + T.let( + :computer_call, + OpenAI::Responses::ResponseComputerToolCall::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCall::Type::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi index c8e4b2bf..f5b233b2 100644 --- a/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the computer call tool output. sig { returns(String) } attr_accessor :id @@ -13,14 +15,16 @@ module OpenAI attr_accessor :call_id # A computer screenshot image used with the computer use tool. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) } + sig do + returns(OpenAI::Responses::ResponseComputerToolCallOutputScreenshot) + end attr_reader :output sig do params( - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash) - ) - .void + output: + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot::OrHash + ).void end attr_writer :output @@ -33,7 +37,9 @@ module OpenAI sig do returns( T.nilable( - T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + T::Array[ + OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck + ] ) ) end @@ -41,40 +47,47 @@ module OpenAI sig do params( - acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) - ] - ) - .void + acknowledged_safety_checks: + T::Array[ + OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck::OrHash + ] + ).void end attr_writer :acknowledged_safety_checks # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol).void } + sig do + params( + status: + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol + ).void + end attr_writer :status sig do params( id: String, call_id: String, - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), - acknowledged_safety_checks: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) - ], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, + output: + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot::OrHash, + acknowledged_safety_checks: + T::Array[ + OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck::OrHash + ], + status: + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the computer call tool output. @@ -91,23 +104,33 @@ module OpenAI status: nil, # The type of the computer tool call output. Always `computer_call_output`. type: :computer_call_output - ); end + ) + end + sig do - override - .returns( - { - id: String, - call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - type: Symbol, - acknowledged_safety_checks: T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol - } - ) + override.returns( + { + id: String, + call_id: String, + output: + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + type: Symbol, + acknowledged_safety_checks: + T::Array[ + OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck + ], + status: + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the pending safety check. sig { returns(String) } attr_accessor :id @@ -121,7 +144,11 @@ module OpenAI attr_accessor :message # A pending safety check for the computer call. - sig { params(id: String, code: String, message: String).returns(T.attached_class) } + sig do + params(id: String, code: String, message: String).returns( + T.attached_class + ) + end def self.new( # The ID of the pending safety check. id:, @@ -129,9 +156,14 @@ module OpenAI code:, # Details about the pending safety check. message: - ); end - sig { override.returns({id: String, code: String, message: String}) } - def to_hash; end + ) + end + + sig do + override.returns({ id: String, code: String, message: String }) + end + def to_hash + end end # The status of the message input. One of `in_progress`, `completed`, or @@ -140,21 +172,39 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index b0d1e196..7961326d 100644 --- a/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Specifies the event type. For a computer screenshot, this property is always set # to `computer_screenshot`. sig { returns(Symbol) } @@ -24,7 +26,11 @@ module OpenAI attr_writer :image_url # A computer screenshot image used with the computer use tool. - sig { params(file_id: String, image_url: String, type: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, image_url: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The identifier of an uploaded file that contains the screenshot. file_id: nil, @@ -33,9 +39,14 @@ module OpenAI # Specifies the event type. For a computer screenshot, this property is always set # to `computer_screenshot`. type: :computer_screenshot - ); end - sig { override.returns({type: Symbol, file_id: String, image_url: String}) } - def to_hash; end + ) + end + + sig do + override.returns({ type: Symbol, file_id: String, image_url: String }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_content.rbi b/rbi/openai/models/responses/response_content.rbi index 5fd0ccd8..126e783d 100644 --- a/rbi/openai/models/responses/response_content.rbi +++ b/rbi/openai/models/responses/response_content.rbi @@ -7,13 +7,24 @@ module OpenAI module ResponseContent extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile, + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseContent::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_content_part_added_event.rbi b/rbi/openai/models/responses/response_content_part_added_event.rbi index 46fa7693..341887c8 100644 --- a/rbi/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/openai/models/responses/response_content_part_added_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part that was added. sig { returns(Integer) } attr_accessor :content_index @@ -19,7 +21,10 @@ module OpenAI # The content part that was added. sig do returns( - T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) ) end attr_accessor :part @@ -34,14 +39,13 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: T.any( - OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputRefusal - ), + part: + T.any( + OpenAI::Responses::ResponseOutputText::OrHash, + OpenAI::Responses::ResponseOutputRefusal::OrHash + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the content part that was added. @@ -54,30 +58,48 @@ module OpenAI part:, # The type of the event. Always `response.content_part.added`. type: :"response.content_part.added" - ); end + ) + end + sig do - override - .returns( - { - content_index: Integer, - item_id: String, - output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal), - type: Symbol - } - ) + override.returns( + { + content_index: Integer, + item_id: String, + output_index: Integer, + part: + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ), + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The content part that was added. module Part extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) + end + sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseContentPartAddedEvent::Part::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_content_part_done_event.rbi b/rbi/openai/models/responses/response_content_part_done_event.rbi index c1519e4f..e004eed3 100644 --- a/rbi/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/openai/models/responses/response_content_part_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part that is done. sig { returns(Integer) } attr_accessor :content_index @@ -19,7 +21,10 @@ module OpenAI # The content part that is done. sig do returns( - T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal) + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) ) end attr_accessor :part @@ -34,14 +39,13 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: T.any( - OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputRefusal - ), + part: + T.any( + OpenAI::Responses::ResponseOutputText::OrHash, + OpenAI::Responses::ResponseOutputRefusal::OrHash + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the content part that is done. @@ -54,30 +58,48 @@ module OpenAI part:, # The type of the event. Always `response.content_part.done`. type: :"response.content_part.done" - ); end + ) + end + sig do - override - .returns( - { - content_index: Integer, - item_id: String, - output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal), - type: Symbol - } - ) + override.returns( + { + content_index: Integer, + item_id: String, + output_index: Integer, + part: + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ), + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The content part that is done. module Part extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) + end + sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseContentPartDoneEvent::Part::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 753835a4..9f9c2fc9 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Text, image, or file inputs to the model, used to generate a response. # # Learn more: @@ -22,17 +24,17 @@ module OpenAI String, T::Array[ T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ItemReference ) ] ) @@ -49,8 +51,8 @@ module OpenAI returns( T.any( String, - OpenAI::Models::ChatModel::OrSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ) ) end @@ -69,7 +71,11 @@ module OpenAI # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) + ) + end attr_accessor :include # Inserts a system (or developer) message as the first item in the model's @@ -110,10 +116,10 @@ module OpenAI # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - sig { returns(T.nilable(OpenAI::Models::Reasoning)) } + sig { returns(T.nilable(OpenAI::Reasoning)) } attr_reader :reasoning - sig { params(reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash))).void } + sig { params(reasoning: T.nilable(OpenAI::Reasoning::OrHash)).void } attr_writer :reasoning # Specifies the latency tier to use for processing the request. This parameter is @@ -133,7 +139,13 @@ module OpenAI # # When this parameter is set, the response body will include the `service_tier` # utilized. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol + ) + ) + end attr_accessor :service_tier # Whether to store the generated model response for later retrieval via API. @@ -152,10 +164,10 @@ module OpenAI # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseTextConfig)) } + sig { returns(T.nilable(OpenAI::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash)).void } + sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } attr_writer :text # How the model should select which tool (or tools) to use when generating a @@ -165,9 +177,9 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction ) ) ) @@ -176,14 +188,13 @@ module OpenAI sig do params( - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ToolChoiceFunction - ) - ) - .void + tool_choice: + T.any( + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes::OrHash, + OpenAI::Responses::ToolChoiceFunction::OrHash + ) + ).void end attr_writer :tool_choice @@ -206,10 +217,10 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::FunctionTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::WebSearchTool ) ] ) @@ -219,17 +230,16 @@ module OpenAI sig do params( - tools: T::Array[ - T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ] - ) - .void + tools: + T::Array[ + T.any( + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ] + ).void end attr_writer :tools @@ -248,7 +258,13 @@ module OpenAI # window by dropping input items in the middle of the conversation. # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol + ) + ) + end attr_accessor :truncation # A unique identifier representing your end-user, which can help OpenAI to monitor @@ -262,62 +278,71 @@ module OpenAI sig do params( - input: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ), - model: T.any( - String, - OpenAI::Models::ChatModel::OrSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + input: + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ] + ), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), + include: + T.nilable( + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), - service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), + reasoning: T.nilable(OpenAI::Reasoning::OrHash), + service_tier: + T.nilable( + OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol + ), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ToolChoiceFunction - ), - tools: T::Array[ + text: OpenAI::Responses::ResponseTextConfig::OrHash, + tool_choice: T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ], + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes::OrHash, + OpenAI::Responses::ToolChoiceFunction::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol + ), user: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Text, image, or file inputs to the model, used to generate a response. @@ -448,66 +473,81 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - input: T.any( + override.returns( + { + input: + T.any( String, T::Array[ T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ItemReference ) ] ), - model: T.any( + model: + T.any( String, - OpenAI::Models::ChatModel::OrSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), + include: + T.nilable( + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] ), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), - instructions: T.nilable(String), - max_output_tokens: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - parallel_tool_calls: T.nilable(T::Boolean), - previous_response_id: T.nilable(String), - reasoning: T.nilable(OpenAI::Models::Reasoning), - service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), - store: T.nilable(T::Boolean), - temperature: T.nilable(Float), - text: OpenAI::Models::Responses::ResponseTextConfig, - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Models::Responses::ToolChoiceFunction + instructions: T.nilable(String), + max_output_tokens: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + parallel_tool_calls: T.nilable(T::Boolean), + previous_response_id: T.nilable(String), + reasoning: T.nilable(OpenAI::Reasoning), + service_tier: + T.nilable( + OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol ), - tools: T::Array[ + store: T.nilable(T::Boolean), + temperature: T.nilable(Float), + text: OpenAI::Responses::ResponseTextConfig, + tool_choice: + T.any( + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction + ), + tools: + T::Array[ T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::FunctionTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::WebSearchTool ) ], - top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), - user: String, - request_options: OpenAI::RequestOptions - } - ) + top_p: T.nilable(Float), + truncation: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol + ), + user: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Text, image, or file inputs to the model, used to generate a response. # @@ -521,30 +561,35 @@ module OpenAI module Input extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [ - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] + Variants = + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ItemReference + ) ] ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseCreateParams::Input::Variants] + ) + end + def self.variants end - def self.variants; end end # Specifies the latency tier to use for processing the request. This parameter is @@ -568,15 +613,39 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseCreateParams::ServiceTier + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) - DEFAULT = T.let(:default, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) - FLEX = T.let(:flex, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol + ) + DEFAULT = + T.let( + :default, + OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol + ) + FLEX = + T.let( + :flex, + OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol + ] + ) + end + def self.values + end end # How the model should select which tool (or tools) to use when generating a @@ -585,13 +654,24 @@ module OpenAI module ToolChoice extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseCreateParams::ToolChoice::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The truncation strategy to use for the model response. @@ -605,14 +685,31 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseCreateParams::Truncation) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) - DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::Responses::ResponseCreateParams::Truncation::TaggedSymbol + ) + DISABLED = + T.let( + :disabled, + OpenAI::Responses::ResponseCreateParams::Truncation::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseCreateParams::Truncation::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_created_event.rbi b/rbi/openai/models/responses/response_created_event.rbi index 33664af0..49ef465e 100644 --- a/rbi/openai/models/responses/response_created_event.rbi +++ b/rbi/openai/models/responses/response_created_event.rbi @@ -4,11 +4,13 @@ module OpenAI module Models module Responses class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The response that was created. - sig { returns(OpenAI::Models::Responses::Response) } + sig { returns(OpenAI::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } + sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response # The type of the event. Always `response.created`. @@ -17,17 +19,26 @@ module OpenAI # An event that is emitted when a response is created. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + response: OpenAI::Responses::Response::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( # The response that was created. response:, # The type of the event. Always `response.created`. type: :"response.created" - ); end - sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { response: OpenAI::Responses::Response, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_delete_params.rbi b/rbi/openai/models/responses/response_delete_params.rbi index 934c6bdc..b5adf324 100644 --- a/rbi/openai/models/responses/response_delete_params.rbi +++ b/rbi/openai/models/responses/response_delete_params.rbi @@ -7,18 +7,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_error.rbi b/rbi/openai/models/responses/response_error.rbi index 796293af..1f52c7f1 100644 --- a/rbi/openai/models/responses/response_error.rbi +++ b/rbi/openai/models/responses/response_error.rbi @@ -4,8 +4,10 @@ module OpenAI module Models module Responses class ResponseError < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The error code for the response. - sig { returns(OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + sig { returns(OpenAI::Responses::ResponseError::Code::TaggedSymbol) } attr_accessor :code # A human-readable description of the error. @@ -14,58 +16,138 @@ module OpenAI # An error object returned when the model fails to generate a Response. sig do - params(code: OpenAI::Models::Responses::ResponseError::Code::OrSymbol, message: String) - .returns(T.attached_class) + params( + code: OpenAI::Responses::ResponseError::Code::OrSymbol, + message: String + ).returns(T.attached_class) end def self.new( # The error code for the response. code:, # A human-readable description of the error. message: - ); end - sig { override.returns({code: OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol, message: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + code: OpenAI::Responses::ResponseError::Code::TaggedSymbol, + message: String + } + ) + end + def to_hash + end # The error code for the response. module Code extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseError::Code) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + SERVER_ERROR = + T.let( + :server_error, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) RATE_LIMIT_EXCEEDED = - T.let(:rate_limit_exceeded, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_PROMPT = T.let(:invalid_prompt, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :rate_limit_exceeded, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) + INVALID_PROMPT = + T.let( + :invalid_prompt, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) VECTOR_STORE_TIMEOUT = - T.let(:vector_store_timeout, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - INVALID_IMAGE = T.let(:invalid_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :vector_store_timeout, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) + INVALID_IMAGE = + T.let( + :invalid_image, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) INVALID_IMAGE_FORMAT = - T.let(:invalid_image_format, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :invalid_image_format, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) INVALID_BASE64_IMAGE = - T.let(:invalid_base64_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :invalid_base64_image, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) INVALID_IMAGE_URL = - T.let(:invalid_image_url, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_TOO_LARGE = T.let(:image_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - IMAGE_TOO_SMALL = T.let(:image_too_small, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :invalid_image_url, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) + IMAGE_TOO_LARGE = + T.let( + :image_too_large, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) + IMAGE_TOO_SMALL = + T.let( + :image_too_small, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) IMAGE_PARSE_ERROR = - T.let(:image_parse_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :image_parse_error, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) IMAGE_CONTENT_POLICY_VIOLATION = - T.let(:image_content_policy_violation, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :image_content_policy_violation, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) INVALID_IMAGE_MODE = - T.let(:invalid_image_mode, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :invalid_image_mode, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) IMAGE_FILE_TOO_LARGE = - T.let(:image_file_too_large, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :image_file_too_large, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) UNSUPPORTED_IMAGE_MEDIA_TYPE = - T.let(:unsupported_image_media_type, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) - EMPTY_IMAGE_FILE = T.let(:empty_image_file, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :unsupported_image_media_type, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) + EMPTY_IMAGE_FILE = + T.let( + :empty_image_file, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) FAILED_TO_DOWNLOAD_IMAGE = - T.let(:failed_to_download_image, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :failed_to_download_image, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) IMAGE_FILE_NOT_FOUND = - T.let(:image_file_not_found, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) + T.let( + :image_file_not_found, + OpenAI::Responses::ResponseError::Code::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseError::Code::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_error_event.rbi b/rbi/openai/models/responses/response_error_event.rbi index 24a190a8..34c0a475 100644 --- a/rbi/openai/models/responses/response_error_event.rbi +++ b/rbi/openai/models/responses/response_error_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The error code. sig { returns(T.nilable(String)) } attr_accessor :code @@ -22,8 +24,12 @@ module OpenAI # Emitted when an error occurs. sig do - params(code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol) - .returns(T.attached_class) + params( + code: T.nilable(String), + message: String, + param: T.nilable(String), + type: Symbol + ).returns(T.attached_class) end def self.new( # The error code. @@ -34,11 +40,21 @@ module OpenAI param:, # The type of the event. Always `error`. type: :error - ); end + ) + end + sig do - override.returns({code: T.nilable(String), message: String, param: T.nilable(String), type: Symbol}) + override.returns( + { + code: T.nilable(String), + message: String, + param: T.nilable(String), + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_failed_event.rbi b/rbi/openai/models/responses/response_failed_event.rbi index ad3251e9..f4a8afd0 100644 --- a/rbi/openai/models/responses/response_failed_event.rbi +++ b/rbi/openai/models/responses/response_failed_event.rbi @@ -4,11 +4,13 @@ module OpenAI module Models module Responses class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The response that failed. - sig { returns(OpenAI::Models::Responses::Response) } + sig { returns(OpenAI::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } + sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response # The type of the event. Always `response.failed`. @@ -17,17 +19,26 @@ module OpenAI # An event that is emitted when a response fails. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + response: OpenAI::Responses::Response::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( # The response that failed. response:, # The type of the event. Always `response.failed`. type: :"response.failed" - ); end - sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { response: OpenAI::Responses::Response, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi index 7ac06ad1..e8ba8841 100644 --- a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the output item that the file search call is initiated. sig { returns(String) } attr_accessor :item_id @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a file search call is completed (results found). - sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the output item that the file search call is initiated. item_id:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.file_search_call.completed`. type: :"response.file_search_call.completed" - ); end - sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi index 99274ded..9c90a725 100644 --- a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the output item that the file search call is initiated. sig { returns(String) } attr_accessor :item_id @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a file search call is initiated. - sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the output item that the file search call is initiated. item_id:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.file_search_call.in_progress`. type: :"response.file_search_call.in_progress" - ); end - sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi index 605b332b..9e44847c 100644 --- a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the output item that the file search call is initiated. sig { returns(String) } attr_accessor :item_id @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a file search is currently searching. - sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the output item that the file search call is initiated. item_id:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.file_search_call.searching`. type: :"response.file_search_call.searching" - ); end - sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_file_search_tool_call.rbi b/rbi/openai/models/responses/response_file_search_tool_call.rbi index 94b16558..9a7ca93a 100644 --- a/rbi/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/openai/models/responses/response_file_search_tool_call.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the file search tool call. sig { returns(String) } attr_accessor :id @@ -14,7 +16,11 @@ module OpenAI # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, - sig { returns(OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol) } + sig do + returns( + OpenAI::Responses::ResponseFileSearchToolCall::Status::OrSymbol + ) + end attr_accessor :status # The type of the file search tool call. Always `file_search_call`. @@ -22,7 +28,13 @@ module OpenAI attr_accessor :type # The results of the file search tool call. - sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result])) } + sig do + returns( + T.nilable( + T::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result] + ) + ) + end attr_accessor :results # The results of a file search tool call. See the @@ -32,13 +44,16 @@ module OpenAI params( id: String, queries: T::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, - results: T.nilable( - T::Array[T.any(OpenAI::Models::Responses::ResponseFileSearchToolCall::Result, OpenAI::Internal::AnyHash)] - ), + status: + OpenAI::Responses::ResponseFileSearchToolCall::Status::OrSymbol, + results: + T.nilable( + T::Array[ + OpenAI::Responses::ResponseFileSearchToolCall::Result::OrHash + ] + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the file search tool call. @@ -52,20 +67,28 @@ module OpenAI results: nil, # The type of the file search tool call. Always `file_search_call`. type: :file_search_call - ); end + ) + end + sig do - override - .returns( - { - id: String, - queries: T::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::OrSymbol, - type: Symbol, - results: T.nilable(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) - } - ) + override.returns( + { + id: String, + queries: T::Array[String], + status: + OpenAI::Responses::ResponseFileSearchToolCall::Status::OrSymbol, + type: Symbol, + results: + T.nilable( + T::Array[ + OpenAI::Responses::ResponseFileSearchToolCall::Result + ] + ) + } + ) + end + def to_hash end - def to_hash; end # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, @@ -73,30 +96,65 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol + ) SEARCHING = - T.let(:searching, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + T.let( + :searching, + OpenAI::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol + ] + ) + end + def self.values + end end class Result < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } + sig do + returns( + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]) + ) + end attr_accessor :attributes # The unique ID of the file. @@ -129,13 +187,13 @@ module OpenAI sig do params( - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), file_id: String, filename: String, score: Float, text: String - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -152,26 +210,38 @@ module OpenAI score: nil, # The text that was retrieved from the file. text: nil - ); end + ) + end + sig do - override - .returns( - { - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - file_id: String, - filename: String, - score: Float, - text: String - } - ) + override.returns( + { + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + file_id: String, + filename: String, + score: Float, + text: String + } + ) + end + def to_hash end - def to_hash; end module Attribute extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/responses/response_format_text_config.rbi b/rbi/openai/models/responses/response_format_text_config.rbi index ae61d108..2c1186ac 100644 --- a/rbi/openai/models/responses/response_format_text_config.rbi +++ b/rbi/openai/models/responses/response_format_text_config.rbi @@ -19,13 +19,22 @@ module OpenAI module ResponseFormatTextConfig extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] + Variants = + T.type_alias do + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseFormatTextConfig::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/openai/models/responses/response_format_text_json_schema_config.rbi index 1b94b7c2..0ec62279 100644 --- a/rbi/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/openai/models/responses/response_format_text_json_schema_config.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. sig { returns(String) } @@ -44,8 +46,7 @@ module OpenAI description: String, strict: T.nilable(T::Boolean), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores @@ -65,20 +66,22 @@ module OpenAI strict: nil, # The type of response format being defined. Always `json_schema`. type: :json_schema - ); end + ) + end + sig do - override - .returns( - { - name: String, - schema: T::Hash[Symbol, T.anything], - type: Symbol, - description: String, - strict: T.nilable(T::Boolean) - } - ) + override.returns( + { + name: String, + schema: T::Hash[Symbol, T.anything], + type: Symbol, + description: String, + strict: T.nilable(T::Boolean) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi index 801e2df6..2932676c 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The function-call arguments delta that is added. sig { returns(String) } attr_accessor :delta @@ -38,9 +40,21 @@ module OpenAI output_index:, # The type of the event. Always `response.function_call_arguments.delta`. type: :"response.function_call_arguments.delta" - ); end - sig { override.returns({delta: String, item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + delta: String, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi index 0886a45d..76dec426 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The function-call arguments. sig { returns(String) } attr_accessor :arguments @@ -36,9 +38,21 @@ module OpenAI # The index of the output item. output_index:, type: :"response.function_call_arguments.done" - ); end - sig { override.returns({arguments: String, item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + arguments: String, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_function_tool_call.rbi b/rbi/openai/models/responses/response_function_tool_call.rbi index 0862724c..ed3d200e 100644 --- a/rbi/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/openai/models/responses/response_function_tool_call.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A JSON string of the arguments to pass to the function. sig { returns(String) } attr_accessor :arguments @@ -29,10 +31,21 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseFunctionToolCall::Status::OrSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol).void } + sig do + params( + status: + OpenAI::Responses::ResponseFunctionToolCall::Status::OrSymbol + ).void + end attr_writer :status # A tool call to run a function. See the @@ -44,10 +57,10 @@ module OpenAI call_id: String, name: String, id: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol, + status: + OpenAI::Responses::ResponseFunctionToolCall::Status::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A JSON string of the arguments to pass to the function. @@ -63,21 +76,24 @@ module OpenAI status: nil, # The type of the function tool call. Always `function_call`. type: :function_call - ); end + ) + end + sig do - override - .returns( - { - arguments: String, - call_id: String, - name: String, - type: Symbol, - id: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::Status::OrSymbol - } - ) + override.returns( + { + arguments: String, + call_id: String, + name: String, + type: Symbol, + id: String, + status: + OpenAI::Responses::ResponseFunctionToolCall::Status::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -85,17 +101,36 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseFunctionToolCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseFunctionToolCall::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseFunctionToolCall::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseFunctionToolCall::Status::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_function_tool_call_item.rbi b/rbi/openai/models/responses/response_function_tool_call_item.rbi index cfa76b9c..75156efc 100644 --- a/rbi/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/openai/models/responses/response_function_tool_call_item.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the function tool call. sig { returns(String) } attr_accessor :id @@ -15,9 +17,12 @@ module OpenAI def self.new( # The unique ID of the function tool call. id: - ); end - sig { override.returns({id: String}) } - def to_hash; end + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/openai/models/responses/response_function_tool_call_output_item.rbi index 64da7c58..f050d6bc 100644 --- a/rbi/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/openai/models/responses/response_function_tool_call_output_item.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the function call tool output. sig { returns(String) } attr_accessor :id @@ -22,10 +24,21 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol).void } + sig do + params( + status: + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol + ).void + end attr_writer :status sig do @@ -33,10 +46,10 @@ module OpenAI id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol, + status: + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the function call tool output. @@ -50,20 +63,23 @@ module OpenAI status: nil, # The type of the function tool call output. Always `function_call_output`. type: :function_call_output - ); end + ) + end + sig do - override - .returns( - { - id: String, - call_id: String, - output: String, - type: Symbol, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol - } - ) + override.returns( + { + id: String, + call_id: String, + output: String, + type: Symbol, + status: + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -71,21 +87,39 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end end diff --git a/rbi/openai/models/responses/response_function_web_search.rbi b/rbi/openai/models/responses/response_function_web_search.rbi index 1c892bf4..2e535daa 100644 --- a/rbi/openai/models/responses/response_function_web_search.rbi +++ b/rbi/openai/models/responses/response_function_web_search.rbi @@ -4,12 +4,18 @@ module OpenAI module Models module Responses class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the web search tool call. sig { returns(String) } attr_accessor :id # The status of the web search tool call. - sig { returns(OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol) } + sig do + returns( + OpenAI::Responses::ResponseFunctionWebSearch::Status::OrSymbol + ) + end attr_accessor :status # The type of the web search tool call. Always `web_search_call`. @@ -22,10 +28,10 @@ module OpenAI sig do params( id: String, - status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, + status: + OpenAI::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the web search tool call. @@ -34,31 +40,65 @@ module OpenAI status:, # The type of the web search tool call. Always `web_search_call`. type: :web_search_call - ); end + ) + end + sig do - override - .returns( - {id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol} - ) + override.returns( + { + id: String, + status: + OpenAI::Responses::ResponseFunctionWebSearch::Status::OrSymbol, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The status of the web search tool call. module Status extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseFunctionWebSearch::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) - SEARCHING = T.let(:searching, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol + ) + SEARCHING = + T.let( + :searching, + OpenAI::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_in_progress_event.rbi b/rbi/openai/models/responses/response_in_progress_event.rbi index 7ef933f7..933f623b 100644 --- a/rbi/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_in_progress_event.rbi @@ -4,11 +4,13 @@ module OpenAI module Models module Responses class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The response that is in progress. - sig { returns(OpenAI::Models::Responses::Response) } + sig { returns(OpenAI::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } + sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response # The type of the event. Always `response.in_progress`. @@ -17,17 +19,26 @@ module OpenAI # Emitted when the response is in progress. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + response: OpenAI::Responses::Response::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( # The response that is in progress. response:, # The type of the event. Always `response.in_progress`. type: :"response.in_progress" - ); end - sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { response: OpenAI::Responses::Response, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_includable.rbi b/rbi/openai/models/responses/response_includable.rbi index cc0efd7c..b99bd61a 100644 --- a/rbi/openai/models/responses/response_includable.rbi +++ b/rbi/openai/models/responses/response_includable.rbi @@ -19,23 +19,38 @@ module OpenAI module ResponseIncludable extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Responses::ResponseIncludable) } OrSymbol = T.type_alias { T.any(Symbol, String) } FILE_SEARCH_CALL_RESULTS = - T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) + T.let( + :"file_search_call.results", + OpenAI::Responses::ResponseIncludable::TaggedSymbol + ) MESSAGE_INPUT_IMAGE_IMAGE_URL = - T.let(:"message.input_image.image_url", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) + T.let( + :"message.input_image.image_url", + OpenAI::Responses::ResponseIncludable::TaggedSymbol + ) COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = T.let( :"computer_call_output.output.image_url", - OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol + OpenAI::Responses::ResponseIncludable::TaggedSymbol ) REASONING_ENCRYPTED_CONTENT = - T.let(:"reasoning.encrypted_content", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) + T.let( + :"reasoning.encrypted_content", + OpenAI::Responses::ResponseIncludable::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseIncludable::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_incomplete_event.rbi b/rbi/openai/models/responses/response_incomplete_event.rbi index 079d5434..047ad561 100644 --- a/rbi/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/openai/models/responses/response_incomplete_event.rbi @@ -4,11 +4,13 @@ module OpenAI module Models module Responses class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The response that was incomplete. - sig { returns(OpenAI::Models::Responses::Response) } + sig { returns(OpenAI::Responses::Response) } attr_reader :response - sig { params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash)).void } + sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response # The type of the event. Always `response.incomplete`. @@ -17,17 +19,26 @@ module OpenAI # An event that is emitted when a response finishes as incomplete. sig do - params(response: T.any(OpenAI::Models::Responses::Response, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + response: OpenAI::Responses::Response::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( # The response that was incomplete. response:, # The type of the event. Always `response.incomplete`. type: :"response.incomplete" - ); end - sig { override.returns({response: OpenAI::Models::Responses::Response, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { response: OpenAI::Responses::Response, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_input.rbi b/rbi/openai/models/responses/response_input.rbi index 77a9217a..48025693 100644 --- a/rbi/openai/models/responses/response_input.rbi +++ b/rbi/openai/models/responses/response_input.rbi @@ -5,7 +5,9 @@ module OpenAI module Responses ResponseInput = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputItem], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Responses::ResponseInputItem + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/responses/response_input_audio.rbi b/rbi/openai/models/responses/response_input_audio.rbi index 9244a048..0cacbacd 100644 --- a/rbi/openai/models/responses/response_input_audio.rbi +++ b/rbi/openai/models/responses/response_input_audio.rbi @@ -4,12 +4,14 @@ module OpenAI module Models module Responses class ResponseInputAudio < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Base64-encoded audio data. sig { returns(String) } attr_accessor :data # The format of the audio data. Currently supported formats are `mp3` and `wav`. - sig { returns(OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol) } + sig { returns(OpenAI::Responses::ResponseInputAudio::Format::OrSymbol) } attr_accessor :format_ # The type of the input item. Always `input_audio`. @@ -20,10 +22,9 @@ module OpenAI sig do params( data: String, - format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, + format_: OpenAI::Responses::ResponseInputAudio::Format::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Base64-encoded audio data. @@ -32,27 +33,51 @@ module OpenAI format_:, # The type of the input item. Always `input_audio`. type: :input_audio - ); end + ) + end + sig do - override - .returns( - {data: String, format_: OpenAI::Models::Responses::ResponseInputAudio::Format::OrSymbol, type: Symbol} - ) + override.returns( + { + data: String, + format_: OpenAI::Responses::ResponseInputAudio::Format::OrSymbol, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # The format of the audio data. Currently supported formats are `mp3` and `wav`. module Format extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseInputAudio::Format) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) - WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) + MP3 = + T.let( + :mp3, + OpenAI::Responses::ResponseInputAudio::Format::TaggedSymbol + ) + WAV = + T.let( + :wav, + OpenAI::Responses::ResponseInputAudio::Format::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputAudio::Format::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_input_content.rbi b/rbi/openai/models/responses/response_input_content.rbi index e4f21010..f18545cc 100644 --- a/rbi/openai/models/responses/response_input_content.rbi +++ b/rbi/openai/models/responses/response_input_content.rbi @@ -7,13 +7,22 @@ module OpenAI module ResponseInputContent extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseInputContent::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_input_file.rbi b/rbi/openai/models/responses/response_input_file.rbi index b0dbcd3a..3275a394 100644 --- a/rbi/openai/models/responses/response_input_file.rbi +++ b/rbi/openai/models/responses/response_input_file.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseInputFile < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the input item. Always `input_file`. sig { returns(Symbol) } attr_accessor :type @@ -28,8 +30,12 @@ module OpenAI # A file input to the model. sig do - params(file_data: String, file_id: T.nilable(String), filename: String, type: Symbol) - .returns(T.attached_class) + params( + file_data: String, + file_id: T.nilable(String), + filename: String, + type: Symbol + ).returns(T.attached_class) end def self.new( # The content of the file to be sent to the model. @@ -40,11 +46,21 @@ module OpenAI filename: nil, # The type of the input item. Always `input_file`. type: :input_file - ); end + ) + end + sig do - override.returns({type: Symbol, file_data: String, file_id: T.nilable(String), filename: String}) + override.returns( + { + type: Symbol, + file_data: String, + file_id: T.nilable(String), + filename: String + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_input_image.rbi b/rbi/openai/models/responses/response_input_image.rbi index ecac54d3..182aa6d4 100644 --- a/rbi/openai/models/responses/response_input_image.rbi +++ b/rbi/openai/models/responses/response_input_image.rbi @@ -4,9 +4,11 @@ module OpenAI module Models module Responses class ResponseInputImage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. - sig { returns(OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol) } + sig { returns(OpenAI::Responses::ResponseInputImage::Detail::OrSymbol) } attr_accessor :detail # The type of the input item. Always `input_image`. @@ -26,12 +28,11 @@ module OpenAI # [image inputs](https://platform.openai.com/docs/guides/vision). sig do params( - detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, + detail: OpenAI::Responses::ResponseInputImage::Detail::OrSymbol, file_id: T.nilable(String), image_url: T.nilable(String), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The detail level of the image to be sent to the model. One of `high`, `low`, or @@ -44,34 +45,58 @@ module OpenAI image_url: nil, # The type of the input item. Always `input_image`. type: :input_image - ); end + ) + end + sig do - override - .returns( - { - detail: OpenAI::Models::Responses::ResponseInputImage::Detail::OrSymbol, - type: Symbol, - file_id: T.nilable(String), - image_url: T.nilable(String) - } - ) + override.returns( + { + detail: OpenAI::Responses::ResponseInputImage::Detail::OrSymbol, + type: Symbol, + file_id: T.nilable(String), + image_url: T.nilable(String) + } + ) + end + def to_hash end - def to_hash; end # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. module Detail extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseInputImage::Detail) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) + LOW = + T.let( + :low, + OpenAI::Responses::ResponseInputImage::Detail::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::ResponseInputImage::Detail::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::ResponseInputImage::Detail::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputImage::Detail::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi index 9c3dec78..3d0c8ba2 100644 --- a/rbi/openai/models/responses/response_input_item.rbi +++ b/rbi/openai/models/responses/response_input_item.rbi @@ -11,16 +11,36 @@ module OpenAI module ResponseInputItem extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + end + class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of one or many input items to the model, containing different content # types. sig do returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) ] ) @@ -28,22 +48,48 @@ module OpenAI attr_accessor :content # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol) } + sig do + returns( + OpenAI::Responses::ResponseInputItem::Message::Role::OrSymbol + ) + end attr_accessor :role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::Message::Status::OrSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol).void } + sig do + params( + status: + OpenAI::Responses::ResponseInputItem::Message::Status::OrSymbol + ).void + end attr_writer :status # The type of the message input. Always set to `message`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::Message::Type::OrSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol).void } + sig do + params( + type: + OpenAI::Responses::ResponseInputItem::Message::Type::OrSymbol + ).void + end attr_writer :type # A message input to the model with a role indicating instruction following @@ -51,19 +97,21 @@ module OpenAI # precedence over instructions given with the `user` role. sig do params( - content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ], - role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, - status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T::Array[ + T.any( + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Responses::ResponseInputImage::OrHash, + OpenAI::Responses::ResponseInputFile::OrHash + ) + ], + role: + OpenAI::Responses::ResponseInputItem::Message::Role::OrSymbol, + status: + OpenAI::Responses::ResponseInputItem::Message::Status::OrSymbol, + type: + OpenAI::Responses::ResponseInputItem::Message::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # A list of one or many input items to the model, containing different content @@ -76,40 +124,70 @@ module OpenAI status: nil, # The type of the message input. Always set to `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - content: T::Array[ + override.returns( + { + content: + T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputItem::Message::Role::OrSymbol, - status: OpenAI::Models::Responses::ResponseInputItem::Message::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseInputItem::Message::Type::OrSymbol - } - ) + role: + OpenAI::Responses::ResponseInputItem::Message::Role::OrSymbol, + status: + OpenAI::Responses::ResponseInputItem::Message::Status::OrSymbol, + type: + OpenAI::Responses::ResponseInputItem::Message::Type::OrSymbol + } + ) + end + def to_hash end - def to_hash; end # The role of the message input. One of `user`, `system`, or `developer`. module Role extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::Message::Role + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Responses::ResponseInputItem::Message::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Responses::ResponseInputItem::Message::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Responses::ResponseInputItem::Message::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::Message::Role::TaggedSymbol + ] + ) + end + def self.values + end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -118,18 +196,39 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::Message::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::Message::Status::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::Message::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::Message::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::Message::Status::TaggedSymbol + ] + ) + end + def self.values + end end # The type of the message input. Always set to `message`. @@ -137,30 +236,51 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::Message::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) + MESSAGE = + T.let( + :message, + OpenAI::Responses::ResponseInputItem::Message::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::Message::Type::TaggedSymbol + ] + ) + end + def self.values + end end end class ComputerCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the computer tool call that produced the output. sig { returns(String) } attr_accessor :call_id # A computer screenshot image used with the computer use tool. - sig { returns(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot) } + sig do + returns(OpenAI::Responses::ResponseComputerToolCallOutputScreenshot) + end attr_reader :output sig do params( - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash) - ) - .void + output: + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot::OrHash + ).void end attr_writer :output @@ -177,7 +297,9 @@ module OpenAI sig do returns( T.nilable( - T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + T::Array[ + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck + ] ) ) end @@ -185,27 +307,34 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol + ) + ) + end attr_accessor :status # The output of a computer tool call. sig do params( call_id: String, - output: T.any(OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, OpenAI::Internal::AnyHash), + output: + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot::OrHash, id: T.nilable(String), - acknowledged_safety_checks: T.nilable( - T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, - OpenAI::Internal::AnyHash - ) - ] - ), - status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol), + acknowledged_safety_checks: + T.nilable( + T::Array[ + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck::OrHash + ] + ), + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the computer tool call that produced the output. @@ -222,25 +351,37 @@ module OpenAI status: nil, # The type of the computer tool call output. Always `computer_call_output`. type: :computer_call_output - ); end + ) + end + sig do - override - .returns( - { - call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - type: Symbol, - id: T.nilable(String), - acknowledged_safety_checks: T.nilable( - T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] + override.returns( + { + call_id: String, + output: + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + type: Symbol, + id: T.nilable(String), + acknowledged_safety_checks: + T.nilable( + T::Array[ + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck + ] ), - status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol) - } - ) + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::OrSymbol + ) + } + ) + end + def to_hash end - def to_hash; end class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the pending safety check. sig { returns(String) } attr_accessor :id @@ -268,9 +409,20 @@ module OpenAI code: nil, # Details about the pending safety check. message: nil - ); end - sig { override.returns({id: String, code: T.nilable(String), message: T.nilable(String)}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + code: T.nilable(String), + message: T.nilable(String) + } + ) + end + def to_hash + end end # The status of the message input. One of `in_progress`, `completed`, or @@ -279,28 +431,46 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let( :in_progress, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end class FunctionCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the function tool call generated by the model. sig { returns(String) } attr_accessor :call_id @@ -320,7 +490,13 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol + ) + ) + end attr_accessor :status # The output of a function tool call. @@ -329,10 +505,12 @@ module OpenAI call_id: String, output: String, id: T.nilable(String), - status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol), + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol + ), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the function tool call generated by the model. @@ -347,20 +525,25 @@ module OpenAI status: nil, # The type of the function tool call output. Always `function_call_output`. type: :function_call_output - ); end + ) + end + sig do - override - .returns( - { - call_id: String, - output: String, - type: Symbol, - id: T.nilable(String), - status: T.nilable(OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol) - } - ) + override.returns( + { + call_id: String, + output: String, + type: Symbol, + id: T.nilable(String), + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::OrSymbol + ) + } + ) + end + def to_hash end - def to_hash; end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -368,83 +551,130 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let( :in_progress, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol ) COMPLETED = - T.let(:completed, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol + ) sig do - override - .returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the item to reference. sig { returns(String) } attr_accessor :id # The type of item to reference. Always `item_reference`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::ItemReference::Type::OrSymbol + ) + ) + end attr_accessor :type # An internal identifier for an item to reference. sig do params( id: String, - type: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::OrSymbol) - ) - .returns(T.attached_class) + type: + T.nilable( + OpenAI::Responses::ResponseInputItem::ItemReference::Type::OrSymbol + ) + ).returns(T.attached_class) end def self.new( # The ID of the item to reference. id:, # The type of item to reference. Always `item_reference`. type: nil - ); end + ) + end + sig do - override - .returns( - {id: String, type: T.nilable(OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::OrSymbol)} - ) + override.returns( + { + id: String, + type: + T.nilable( + OpenAI::Responses::ResponseInputItem::ItemReference::Type::OrSymbol + ) + } + ) + end + def to_hash end - def to_hash; end # The type of item to reference. Always `item_reference`. module Type extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type) } + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::ItemReference::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } ITEM_REFERENCE = - T.let(:item_reference, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::TaggedSymbol) + T.let( + :item_reference, + OpenAI::Responses::ResponseInputItem::ItemReference::Type::TaggedSymbol + ) sig do - override.returns(T::Array[OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type::TaggedSymbol]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::ItemReference::Type::TaggedSymbol + ] + ) + end + def self.values end - def self.values; end end end sig do - override - .returns( - [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] - ) + override.returns( + T::Array[OpenAI::Responses::ResponseInputItem::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_input_message_content_list.rbi b/rbi/openai/models/responses/response_input_message_content_list.rbi index f25328f7..15396775 100644 --- a/rbi/openai/models/responses/response_input_message_content_list.rbi +++ b/rbi/openai/models/responses/response_input_message_content_list.rbi @@ -5,7 +5,9 @@ module OpenAI module Responses ResponseInputMessageContentList = T.let( - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent], + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Responses::ResponseInputContent + ], OpenAI::Internal::Type::Converter ) end diff --git a/rbi/openai/models/responses/response_input_message_item.rbi b/rbi/openai/models/responses/response_input_message_item.rbi index 9c426966..25f40889 100644 --- a/rbi/openai/models/responses/response_input_message_item.rbi +++ b/rbi/openai/models/responses/response_input_message_item.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the message input. sig { returns(String) } attr_accessor :id @@ -14,9 +16,9 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) ] ) @@ -24,40 +26,65 @@ module OpenAI attr_accessor :content # The role of the message input. One of `user`, `system`, or `developer`. - sig { returns(OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } + sig do + returns( + OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol + ) + end attr_accessor :role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputMessageItem::Status::TaggedSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol).void } + sig do + params( + status: + OpenAI::Responses::ResponseInputMessageItem::Status::OrSymbol + ).void + end attr_writer :status # The type of the message input. Always set to `message`. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputMessageItem::Type::TaggedSymbol + ) + ) + end attr_reader :type - sig { params(type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol).void } + sig do + params( + type: OpenAI::Responses::ResponseInputMessageItem::Type::OrSymbol + ).void + end attr_writer :type sig do params( id: String, - content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile - ) - ], - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::OrSymbol, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::OrSymbol, - type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::OrSymbol - ) - .returns(T.attached_class) + content: + T::Array[ + T.any( + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Responses::ResponseInputImage::OrHash, + OpenAI::Responses::ResponseInputFile::OrHash + ) + ], + role: OpenAI::Responses::ResponseInputMessageItem::Role::OrSymbol, + status: + OpenAI::Responses::ResponseInputMessageItem::Status::OrSymbol, + type: OpenAI::Responses::ResponseInputMessageItem::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # The unique ID of the message input. @@ -72,40 +99,68 @@ module OpenAI status: nil, # The type of the message input. Always set to `message`. type: nil - ); end + ) + end + sig do - override - .returns( - { - id: String, - content: T::Array[ + override.returns( + { + id: String, + content: + T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputText, - OpenAI::Models::Responses::ResponseInputImage, - OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile ) ], - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol, - type: OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol - } - ) + role: + OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol, + status: + OpenAI::Responses::ResponseInputMessageItem::Status::TaggedSymbol, + type: + OpenAI::Responses::ResponseInputMessageItem::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The role of the message input. One of `user`, `system`, or `developer`. module Role extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseInputMessageItem::Role) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) + USER = + T.let( + :user, + OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol + ] + ) + end + def self.values + end end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -114,30 +169,63 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseInputMessageItem::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::Responses::ResponseInputMessageItem::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputMessageItem::Status::TaggedSymbol + ) INCOMPLETE = - T.let(:incomplete, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) + T.let( + :incomplete, + OpenAI::Responses::ResponseInputMessageItem::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputMessageItem::Status::TaggedSymbol + ] + ) + end + def self.values + end end # The type of the message input. Always set to `message`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseInputMessageItem::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) + MESSAGE = + T.let( + :message, + OpenAI::Responses::ResponseInputMessageItem::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputMessageItem::Type::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_input_text.rbi b/rbi/openai/models/responses/response_input_text.rbi index 6ecb1f55..566e9627 100644 --- a/rbi/openai/models/responses/response_input_text.rbi +++ b/rbi/openai/models/responses/response_input_text.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseInputText < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text input to the model. sig { returns(String) } attr_accessor :text @@ -19,9 +21,12 @@ module OpenAI text:, # The type of the input item. Always `input_text`. type: :input_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_item.rbi b/rbi/openai/models/responses/response_item.rbi index 21440ba2..021afb6a 100644 --- a/rbi/openai/models/responses/response_item.rbi +++ b/rbi/openai/models/responses/response_item.rbi @@ -7,13 +7,25 @@ module OpenAI module ResponseItem extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputMessageItem, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Responses::ResponseFunctionToolCallOutputItem ) + end + + sig do + override.returns(T::Array[OpenAI::Responses::ResponseItem::Variants]) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi index d83e183d..ef2ba10b 100644 --- a/rbi/openai/models/responses/response_item_list.rbi +++ b/rbi/openai/models/responses/response_item_list.rbi @@ -6,19 +6,21 @@ module OpenAI module Responses class ResponseItemList < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of items used to generate this response. sig do returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + OpenAI::Responses::ResponseInputMessageItem, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Responses::ResponseFunctionToolCallOutputItem ) ] ) @@ -44,25 +46,24 @@ module OpenAI # A list of Response items. sig do params( - data: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) - ], + data: + T::Array[ + T.any( + OpenAI::Responses::ResponseInputMessageItem::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCallOutputItem::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCallItem::OrHash, + OpenAI::Responses::ResponseFunctionToolCallOutputItem::OrHash + ) + ], first_id: String, has_more: T::Boolean, last_id: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A list of items used to generate this response. @@ -75,31 +76,34 @@ module OpenAI last_id:, # The type of object returned, must be `list`. object: :list - ); end + ) + end + sig do - override - .returns( - { - data: T::Array[ + override.returns( + { + data: + T::Array[ T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + OpenAI::Responses::ResponseInputMessageItem, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Responses::ResponseFunctionToolCallOutputItem ) ], - first_id: String, - has_more: T::Boolean, - last_id: String, - object: Symbol - } - ) + first_id: String, + has_more: T::Boolean, + last_id: String, + object: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_output_audio.rbi b/rbi/openai/models/responses/response_output_audio.rbi index ccca474a..bfcafd48 100644 --- a/rbi/openai/models/responses/response_output_audio.rbi +++ b/rbi/openai/models/responses/response_output_audio.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Base64-encoded audio data from the model. sig { returns(String) } attr_accessor :data @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # An audio output from the model. - sig { params(data: String, transcript: String, type: Symbol).returns(T.attached_class) } + sig do + params(data: String, transcript: String, type: Symbol).returns( + T.attached_class + ) + end def self.new( # Base64-encoded audio data from the model. data:, @@ -25,9 +31,14 @@ module OpenAI transcript:, # The type of the output audio. Always `output_audio`. type: :output_audio - ); end - sig { override.returns({data: String, transcript: String, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns({ data: String, transcript: String, type: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_output_item.rbi b/rbi/openai/models/responses/response_output_item.rbi index 5409b70c..01dd560a 100644 --- a/rbi/openai/models/responses/response_output_item.rbi +++ b/rbi/openai/models/responses/response_output_item.rbi @@ -7,13 +7,25 @@ module OpenAI module ResponseOutputItem extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseOutputItem::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index 6318ea2f..7f0f565c 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -4,16 +4,18 @@ module OpenAI module Models module Responses class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The output item that was added. sig do returns( T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ) ) end @@ -30,19 +32,18 @@ module OpenAI # Emitted when a new output item is added. sig do params( - item: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ), + item: + T.any( + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash + ), output_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The output item that was added. @@ -51,25 +52,28 @@ module OpenAI output_index:, # The type of the event. Always `response.output_item.added`. type: :"response.output_item.added" - ); end + ) + end + sig do - override - .returns( - { - item: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem + override.returns( + { + item: + T.any( + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ), - output_index: Integer, - type: Symbol - } - ) + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index 2ec17e88..94e4db56 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -4,16 +4,18 @@ module OpenAI module Models module Responses class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The output item that was marked done. sig do returns( T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ) ) end @@ -30,19 +32,18 @@ module OpenAI # Emitted when an output item is marked done. sig do params( - item: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem - ), + item: + T.any( + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash + ), output_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The output item that was marked done. @@ -51,25 +52,28 @@ module OpenAI output_index:, # The type of the event. Always `response.output_item.done`. type: :"response.output_item.done" - ); end + ) + end + sig do - override - .returns( - { - item: T.any( - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseReasoningItem + override.returns( + { + item: + T.any( + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem ), - output_index: Integer, - type: Symbol - } - ) + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_output_message.rbi b/rbi/openai/models/responses/response_output_message.rbi index 67f1cfc9..e807c5bc 100644 --- a/rbi/openai/models/responses/response_output_message.rbi +++ b/rbi/openai/models/responses/response_output_message.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique ID of the output message. sig { returns(String) } attr_accessor :id @@ -11,7 +13,12 @@ module OpenAI # The content of the output message. sig do returns( - T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] + T::Array[ + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) + ] ) end attr_accessor :content @@ -22,7 +29,9 @@ module OpenAI # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. - sig { returns(OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol) } + sig do + returns(OpenAI::Responses::ResponseOutputMessage::Status::OrSymbol) + end attr_accessor :status # The type of the output message. Always `message`. @@ -33,18 +42,17 @@ module OpenAI sig do params( id: String, - content: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputRefusal - ) - ], - status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, + content: + T::Array[ + T.any( + OpenAI::Responses::ResponseOutputText::OrHash, + OpenAI::Responses::ResponseOutputRefusal::OrHash + ) + ], + status: OpenAI::Responses::ResponseOutputMessage::Status::OrSymbol, role: Symbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique ID of the output message. @@ -58,30 +66,51 @@ module OpenAI role: :assistant, # The type of the output message. Always `message`. type: :message - ); end + ) + end + sig do - override - .returns( - { - id: String, - content: T::Array[T.any(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)], - role: Symbol, - status: OpenAI::Models::Responses::ResponseOutputMessage::Status::OrSymbol, - type: Symbol - } - ) + override.returns( + { + id: String, + content: + T::Array[ + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) + ], + role: Symbol, + status: + OpenAI::Responses::ResponseOutputMessage::Status::OrSymbol, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A text output from the model. module Content extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) + end + sig do - override - .returns([OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal]) + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputMessage::Content::Variants + ] + ) + end + def self.variants end - def self.variants; end end # The status of the message input. One of `in_progress`, `completed`, or @@ -89,15 +118,37 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseOutputMessage::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseOutputMessage::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseOutputMessage::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseOutputMessage::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputMessage::Status::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_output_refusal.rbi b/rbi/openai/models/responses/response_output_refusal.rbi index 16a800dd..0d0d1a26 100644 --- a/rbi/openai/models/responses/response_output_refusal.rbi +++ b/rbi/openai/models/responses/response_output_refusal.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The refusal explanationfrom the model. sig { returns(String) } attr_accessor :refusal @@ -19,9 +21,12 @@ module OpenAI refusal:, # The type of the refusal. Always `refusal`. type: :refusal - ); end - sig { override.returns({refusal: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ refusal: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi index e0267bd2..cea49a6e 100644 --- a/rbi/openai/models/responses/response_output_text.rbi +++ b/rbi/openai/models/responses/response_output_text.rbi @@ -4,14 +4,16 @@ module OpenAI module Models module Responses class ResponseOutputText < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The annotations of the text output. sig do returns( T::Array[ T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::FilePath ) ] ) @@ -29,18 +31,17 @@ module OpenAI # A text output from the model. sig do params( - annotations: T::Array[ - T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath - ) - ], + annotations: + T::Array[ + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation::OrHash, + OpenAI::Responses::ResponseOutputText::Annotation::URLCitation::OrHash, + OpenAI::Responses::ResponseOutputText::Annotation::FilePath::OrHash + ) + ], text: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The annotations of the text output. @@ -49,30 +50,45 @@ module OpenAI text:, # The type of the output text. Always `output_text`. type: :output_text - ); end + ) + end + sig do - override - .returns( - { - annotations: T::Array[ + override.returns( + { + annotations: + T::Array[ T.any( - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::FilePath ) ], - text: String, - type: Symbol - } - ) + text: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A citation to a file. module Annotation extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::FilePath + ) + end + class FileCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -86,7 +102,11 @@ module OpenAI attr_accessor :type # A citation to a file. - sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the file. file_id:, @@ -94,12 +114,22 @@ module OpenAI index:, # The type of the file citation. Always `file_citation`. type: :file_citation - ); end - sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { file_id: String, index: Integer, type: Symbol } + ) + end + def to_hash + end end class URLCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the last character of the URL citation in the message. sig { returns(Integer) } attr_accessor :end_index @@ -122,8 +152,13 @@ module OpenAI # A citation for a web resource used to generate a model response. sig do - params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) - .returns(T.attached_class) + params( + end_index: Integer, + start_index: Integer, + title: String, + url: String, + type: Symbol + ).returns(T.attached_class) end def self.new( # The index of the last character of the URL citation in the message. @@ -136,7 +171,9 @@ module OpenAI url:, # The type of the URL citation. Always `url_citation`. type: :url_citation - ); end + ) + end + sig do override.returns( { @@ -148,10 +185,14 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class FilePath < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -165,7 +206,11 @@ module OpenAI attr_accessor :type # A path to a file. - sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the file. file_id:, @@ -173,18 +218,27 @@ module OpenAI index:, # The type of the file path. Always `file_path`. type: :file_path - ); end - sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { file_id: String, index: Integer, type: Symbol } + ) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] - ) + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputText::Annotation::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_reasoning_item.rbi b/rbi/openai/models/responses/response_reasoning_item.rbi index 6c5006fd..d54abb0d 100644 --- a/rbi/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/openai/models/responses/response_reasoning_item.rbi @@ -4,12 +4,16 @@ module OpenAI module Models module Responses class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The unique identifier of the reasoning content. sig { returns(String) } attr_accessor :id # Reasoning text contents. - sig { returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary]) } + sig do + returns(T::Array[OpenAI::Responses::ResponseReasoningItem::Summary]) + end attr_accessor :summary # The type of the object. Always `reasoning`. @@ -23,10 +27,20 @@ module OpenAI # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. - sig { returns(T.nilable(OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseReasoningItem::Status::OrSymbol + ) + ) + end attr_reader :status - sig { params(status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol).void } + sig do + params( + status: OpenAI::Responses::ResponseReasoningItem::Status::OrSymbol + ).void + end attr_writer :status # A description of the chain of thought used by a reasoning model while generating @@ -36,12 +50,14 @@ module OpenAI sig do params( id: String, - summary: T::Array[T.any(OpenAI::Models::Responses::ResponseReasoningItem::Summary, OpenAI::Internal::AnyHash)], + summary: + T::Array[ + OpenAI::Responses::ResponseReasoningItem::Summary::OrHash + ], encrypted_content: T.nilable(String), - status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol, + status: OpenAI::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The unique identifier of the reasoning content. @@ -56,22 +72,28 @@ module OpenAI status: nil, # The type of the object. Always `reasoning`. type: :reasoning - ); end + ) + end + sig do - override - .returns( - { - id: String, - summary: T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], - type: Symbol, - encrypted_content: T.nilable(String), - status: OpenAI::Models::Responses::ResponseReasoningItem::Status::OrSymbol - } - ) + override.returns( + { + id: String, + summary: + T::Array[OpenAI::Responses::ResponseReasoningItem::Summary], + type: Symbol, + encrypted_content: T.nilable(String), + status: OpenAI::Responses::ResponseReasoningItem::Status::OrSymbol + } + ) + end + def to_hash end - def to_hash; end class Summary < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A short summary of the reasoning used by the model when generating the response. sig { returns(String) } attr_accessor :text @@ -86,9 +108,12 @@ module OpenAI text:, # The type of the object. Always `summary_text`. type: :summary_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -96,15 +121,37 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseReasoningItem::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseReasoningItem::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseReasoningItem::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseReasoningItem::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseReasoningItem::Status::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi index 22c28d0f..260bed98 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the item this summary part is associated with. sig { returns(String) } attr_accessor :item_id @@ -13,14 +15,18 @@ module OpenAI attr_accessor :output_index # The summary part that was added. - sig { returns(OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part) } + sig do + returns( + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part + ) + end attr_reader :part sig do params( - part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, OpenAI::Internal::AnyHash) - ) - .void + part: + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part::OrHash + ).void end attr_writer :part @@ -37,11 +43,11 @@ module OpenAI params( item_id: String, output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, OpenAI::Internal::AnyHash), + part: + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part::OrHash, summary_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the item this summary part is associated with. @@ -54,22 +60,28 @@ module OpenAI summary_index:, # The type of the event. Always `response.reasoning_summary_part.added`. type: :"response.reasoning_summary_part.added" - ); end + ) + end + sig do - override - .returns( - { - item_id: String, - output_index: Integer, - part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, - summary_index: Integer, - type: Symbol - } - ) + override.returns( + { + item_id: String, + output_index: Integer, + part: + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Part < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text of the summary part. sig { returns(String) } attr_accessor :text @@ -85,9 +97,12 @@ module OpenAI text:, # The type of the summary part. Always `summary_text`. type: :summary_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi index 93ac8fbb..036aaa0d 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the item this summary part is associated with. sig { returns(String) } attr_accessor :item_id @@ -13,14 +15,18 @@ module OpenAI attr_accessor :output_index # The completed summary part. - sig { returns(OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part) } + sig do + returns( + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part + ) + end attr_reader :part sig do params( - part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, OpenAI::Internal::AnyHash) - ) - .void + part: + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part::OrHash + ).void end attr_writer :part @@ -37,11 +43,11 @@ module OpenAI params( item_id: String, output_index: Integer, - part: T.any(OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, OpenAI::Internal::AnyHash), + part: + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part::OrHash, summary_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The ID of the item this summary part is associated with. @@ -54,22 +60,28 @@ module OpenAI summary_index:, # The type of the event. Always `response.reasoning_summary_part.done`. type: :"response.reasoning_summary_part.done" - ); end + ) + end + sig do - override - .returns( - { - item_id: String, - output_index: Integer, - part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, - summary_index: Integer, - type: Symbol - } - ) + override.returns( + { + item_id: String, + output_index: Integer, + part: + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end class Part < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text of the summary part. sig { returns(String) } attr_accessor :text @@ -85,9 +97,12 @@ module OpenAI text:, # The type of the summary part. Always `summary_text`. type: :summary_text - ); end - sig { override.returns({text: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi index 0b471aed..6fb92b4d 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text delta that was added to the summary. sig { returns(String) } attr_accessor :delta @@ -26,8 +28,13 @@ module OpenAI # Emitted when a delta is added to a reasoning summary text. sig do - params(delta: String, item_id: String, output_index: Integer, summary_index: Integer, type: Symbol) - .returns(T.attached_class) + params( + delta: String, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The text delta that was added to the summary. @@ -40,18 +47,22 @@ module OpenAI summary_index:, # The type of the event. Always `response.reasoning_summary_text.delta`. type: :"response.reasoning_summary_text.delta" - ); end + ) + end + sig do - override - .returns({ - delta: String, - item_id: String, - output_index: Integer, - summary_index: Integer, - type: Symbol - }) + override.returns( + { + delta: String, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi index 6ff8c695..a9f0d59c 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the item this summary text is associated with. sig { returns(String) } attr_accessor :item_id @@ -26,8 +28,13 @@ module OpenAI # Emitted when a reasoning summary text is completed. sig do - params(item_id: String, output_index: Integer, summary_index: Integer, text: String, type: Symbol) - .returns(T.attached_class) + params( + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the item this summary text is associated with. @@ -40,18 +47,22 @@ module OpenAI text:, # The type of the event. Always `response.reasoning_summary_text.done`. type: :"response.reasoning_summary_text.done" - ); end + ) + end + sig do - override - .returns({ - item_id: String, - output_index: Integer, - summary_index: Integer, - text: String, - type: Symbol - }) + override.returns( + { + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_refusal_delta_event.rbi b/rbi/openai/models/responses/response_refusal_delta_event.rbi index 204d391f..a89025e3 100644 --- a/rbi/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/openai/models/responses/response_refusal_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part that the refusal text is added to. sig { returns(Integer) } attr_accessor :content_index @@ -26,8 +28,13 @@ module OpenAI # Emitted when there is a partial refusal text. sig do - params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) - .returns(T.attached_class) + params( + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The index of the content part that the refusal text is added to. @@ -40,18 +47,22 @@ module OpenAI output_index:, # The type of the event. Always `response.refusal.delta`. type: :"response.refusal.delta" - ); end + ) + end + sig do - override - .returns({ - content_index: Integer, - delta: String, - item_id: String, - output_index: Integer, - type: Symbol - }) + override.returns( + { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_refusal_done_event.rbi b/rbi/openai/models/responses/response_refusal_done_event.rbi index 5bc2d764..0100485a 100644 --- a/rbi/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/openai/models/responses/response_refusal_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part that the refusal text is finalized. sig { returns(Integer) } attr_accessor :content_index @@ -32,8 +34,7 @@ module OpenAI output_index: Integer, refusal: String, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The index of the content part that the refusal text is finalized. @@ -46,18 +47,22 @@ module OpenAI refusal:, # The type of the event. Always `response.refusal.done`. type: :"response.refusal.done" - ); end + ) + end + sig do - override - .returns({ - content_index: Integer, - item_id: String, - output_index: Integer, - refusal: String, - type: Symbol - }) + override.returns( + { + content_index: Integer, + item_id: String, + output_index: Integer, + refusal: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_retrieve_params.rbi b/rbi/openai/models/responses/response_retrieve_params.rbi index 876e71b5..95522e58 100644 --- a/rbi/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/openai/models/responses/response_retrieve_params.rbi @@ -7,37 +7,49 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. - sig { returns(T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) + ) + end attr_reader :include - sig { params(include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]).void } + sig do + params( + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ).void + end attr_writer :include sig do params( - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + include: + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_status.rbi b/rbi/openai/models/responses/response_status.rbi index 7fb2ba80..ed8a5a5d 100644 --- a/rbi/openai/models/responses/response_status.rbi +++ b/rbi/openai/models/responses/response_status.rbi @@ -8,16 +8,25 @@ module OpenAI module ResponseStatus extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Responses::ResponseStatus) } OrSymbol = T.type_alias { T.any(Symbol, String) } - COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) - INCOMPLETE = T.let(:incomplete, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) + COMPLETED = + T.let(:completed, OpenAI::Responses::ResponseStatus::TaggedSymbol) + FAILED = T.let(:failed, OpenAI::Responses::ResponseStatus::TaggedSymbol) + IN_PROGRESS = + T.let(:in_progress, OpenAI::Responses::ResponseStatus::TaggedSymbol) + INCOMPLETE = + T.let(:incomplete, OpenAI::Responses::ResponseStatus::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Responses::ResponseStatus::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseStatus::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index 6234c3c6..de4fe138 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -7,13 +7,55 @@ module OpenAI module ResponseStreamEvent extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseAudioDeltaEvent, + OpenAI::Responses::ResponseAudioDoneEvent, + OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Responses::ResponseCompletedEvent, + OpenAI::Responses::ResponseContentPartAddedEvent, + OpenAI::Responses::ResponseContentPartDoneEvent, + OpenAI::Responses::ResponseCreatedEvent, + OpenAI::Responses::ResponseErrorEvent, + OpenAI::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Responses::ResponseInProgressEvent, + OpenAI::Responses::ResponseFailedEvent, + OpenAI::Responses::ResponseIncompleteEvent, + OpenAI::Responses::ResponseOutputItemAddedEvent, + OpenAI::Responses::ResponseOutputItemDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, + OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, + OpenAI::Responses::ResponseRefusalDeltaEvent, + OpenAI::Responses::ResponseRefusalDoneEvent, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Responses::ResponseTextDeltaEvent, + OpenAI::Responses::ResponseTextDoneEvent, + OpenAI::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Responses::ResponseWebSearchCallSearchingEvent ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponseStreamEvent::Variants] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi index 91bc1793..c235f425 100644 --- a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi @@ -4,13 +4,15 @@ module OpenAI module Models module Responses class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A citation to a file. sig do returns( T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath ) ) end @@ -39,19 +41,18 @@ module OpenAI # Emitted when a text annotation is added. sig do params( - annotation: T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ), + annotation: + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation::OrHash, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation::OrHash, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath::OrHash + ), annotation_index: Integer, content_index: Integer, item_id: String, output_index: Integer, type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # A citation to a file. @@ -66,31 +67,46 @@ module OpenAI output_index:, # The type of the event. Always `response.output_text.annotation.added`. type: :"response.output_text.annotation.added" - ); end + ) + end + sig do - override - .returns( - { - annotation: T.any( - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + override.returns( + { + annotation: + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath ), - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - type: Symbol - } - ) + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end # A citation to a file. module Annotation extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + ) + end + class FileCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -104,7 +120,11 @@ module OpenAI attr_accessor :type # A citation to a file. - sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the file. file_id:, @@ -112,12 +132,22 @@ module OpenAI index:, # The type of the file citation. Always `file_citation`. type: :file_citation - ); end - sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { file_id: String, index: Integer, type: Symbol } + ) + end + def to_hash + end end class URLCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the last character of the URL citation in the message. sig { returns(Integer) } attr_accessor :end_index @@ -140,8 +170,13 @@ module OpenAI # A citation for a web resource used to generate a model response. sig do - params(end_index: Integer, start_index: Integer, title: String, url: String, type: Symbol) - .returns(T.attached_class) + params( + end_index: Integer, + start_index: Integer, + title: String, + url: String, + type: Symbol + ).returns(T.attached_class) end def self.new( # The index of the last character of the URL citation in the message. @@ -154,7 +189,9 @@ module OpenAI url:, # The type of the URL citation. Always `url_citation`. type: :url_citation - ); end + ) + end + sig do override.returns( { @@ -166,10 +203,14 @@ module OpenAI } ) end - def to_hash; end + def to_hash + end end class FilePath < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ID of the file. sig { returns(String) } attr_accessor :file_id @@ -183,7 +224,11 @@ module OpenAI attr_accessor :type # A path to a file. - sig { params(file_id: String, index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(file_id: String, index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The ID of the file. file_id:, @@ -191,18 +236,27 @@ module OpenAI index:, # The type of the file path. Always `file_path`. type: :file_path - ); end - sig { override.returns({file_id: String, index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { file_id: String, index: Integer, type: Symbol } + ) + end + def to_hash + end end sig do - override - .returns( - [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - ) + override.returns( + T::Array[ + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants + ] + ) + end + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/response_text_config.rbi b/rbi/openai/models/responses/response_text_config.rbi index 2f1c517e..ddd0d65c 100644 --- a/rbi/openai/models/responses/response_text_config.rbi +++ b/rbi/openai/models/responses/response_text_config.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseTextConfig < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -21,9 +23,9 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject ) ) ) @@ -32,14 +34,13 @@ module OpenAI sig do params( - format_: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject - ) - ) - .void + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void end attr_writer :format_ @@ -50,14 +51,13 @@ module OpenAI # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) sig do params( - format_: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject - ) - ) - .returns(T.attached_class) + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).returns(T.attached_class) end def self.new( # An object specifying the format that the model must output. @@ -74,20 +74,23 @@ module OpenAI # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. format_: nil - ); end + ) + end + sig do - override - .returns( - { - format_: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::Models::ResponseFormatJSONObject + override.returns( + { + format_: + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject ) - } - ) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_text_delta_event.rbi b/rbi/openai/models/responses/response_text_delta_event.rbi index 29ea7431..5b4d13ac 100644 --- a/rbi/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_delta_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part that the text delta was added to. sig { returns(Integer) } attr_accessor :content_index @@ -26,8 +28,13 @@ module OpenAI # Emitted when there is an additional text delta. sig do - params(content_index: Integer, delta: String, item_id: String, output_index: Integer, type: Symbol) - .returns(T.attached_class) + params( + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The index of the content part that the text delta was added to. @@ -40,18 +47,22 @@ module OpenAI output_index:, # The type of the event. Always `response.output_text.delta`. type: :"response.output_text.delta" - ); end + ) + end + sig do - override - .returns({ - content_index: Integer, - delta: String, - item_id: String, - output_index: Integer, - type: Symbol - }) + override.returns( + { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_text_done_event.rbi b/rbi/openai/models/responses/response_text_done_event.rbi index 40459549..1b05e949 100644 --- a/rbi/openai/models/responses/response_text_done_event.rbi +++ b/rbi/openai/models/responses/response_text_done_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The index of the content part that the text content is finalized. sig { returns(Integer) } attr_accessor :content_index @@ -26,8 +28,13 @@ module OpenAI # Emitted when text content is finalized. sig do - params(content_index: Integer, item_id: String, output_index: Integer, text: String, type: Symbol) - .returns(T.attached_class) + params( + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + type: Symbol + ).returns(T.attached_class) end def self.new( # The index of the content part that the text content is finalized. @@ -40,18 +47,22 @@ module OpenAI text:, # The type of the event. Always `response.output_text.done`. type: :"response.output_text.done" - ); end + ) + end + sig do - override - .returns({ - content_index: Integer, - item_id: String, - output_index: Integer, - text: String, - type: Symbol - }) + override.returns( + { + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses/response_usage.rbi b/rbi/openai/models/responses/response_usage.rbi index f435b178..e9e0c3bc 100644 --- a/rbi/openai/models/responses/response_usage.rbi +++ b/rbi/openai/models/responses/response_usage.rbi @@ -4,19 +4,21 @@ module OpenAI module Models module Responses class ResponseUsage < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of input tokens. sig { returns(Integer) } attr_accessor :input_tokens # A detailed breakdown of the input tokens. - sig { returns(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails) } + sig { returns(OpenAI::Responses::ResponseUsage::InputTokensDetails) } attr_reader :input_tokens_details sig do params( - input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::AnyHash) - ) - .void + input_tokens_details: + OpenAI::Responses::ResponseUsage::InputTokensDetails::OrHash + ).void end attr_writer :input_tokens_details @@ -25,14 +27,14 @@ module OpenAI attr_accessor :output_tokens # A detailed breakdown of the output tokens. - sig { returns(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails) } + sig { returns(OpenAI::Responses::ResponseUsage::OutputTokensDetails) } attr_reader :output_tokens_details sig do params( - output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::AnyHash) - ) - .void + output_tokens_details: + OpenAI::Responses::ResponseUsage::OutputTokensDetails::OrHash + ).void end attr_writer :output_tokens_details @@ -45,12 +47,13 @@ module OpenAI sig do params( input_tokens: Integer, - input_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, OpenAI::Internal::AnyHash), + input_tokens_details: + OpenAI::Responses::ResponseUsage::InputTokensDetails::OrHash, output_tokens: Integer, - output_tokens_details: T.any(OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, OpenAI::Internal::AnyHash), + output_tokens_details: + OpenAI::Responses::ResponseUsage::OutputTokensDetails::OrHash, total_tokens: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of input tokens. @@ -63,22 +66,29 @@ module OpenAI output_tokens_details:, # The total number of tokens used. total_tokens: - ); end + ) + end + sig do - override - .returns( - { - input_tokens: Integer, - input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, - output_tokens: Integer, - output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, - total_tokens: Integer - } - ) + override.returns( + { + input_tokens: Integer, + input_tokens_details: + OpenAI::Responses::ResponseUsage::InputTokensDetails, + output_tokens: Integer, + output_tokens_details: + OpenAI::Responses::ResponseUsage::OutputTokensDetails, + total_tokens: Integer + } + ) + end + def to_hash end - def to_hash; end class InputTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens that were retrieved from the cache. # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). sig { returns(Integer) } @@ -90,12 +100,18 @@ module OpenAI # The number of tokens that were retrieved from the cache. # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). cached_tokens: - ); end - sig { override.returns({cached_tokens: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ cached_tokens: Integer }) } + def to_hash + end end class OutputTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of reasoning tokens. sig { returns(Integer) } attr_accessor :reasoning_tokens @@ -105,9 +121,12 @@ module OpenAI def self.new( # The number of reasoning tokens. reasoning_tokens: - ); end - sig { override.returns({reasoning_tokens: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ reasoning_tokens: Integer }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi index 1f479679..10e66da7 100644 --- a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique ID for the output item associated with the web search call. sig { returns(String) } attr_accessor :item_id @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a web search call is completed. - sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # Unique ID for the output item associated with the web search call. item_id:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.web_search_call.completed`. type: :"response.web_search_call.completed" - ); end - sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi index f8c355d5..22a00bbf 100644 --- a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique ID for the output item associated with the web search call. sig { returns(String) } attr_accessor :item_id @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a web search call is initiated. - sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # Unique ID for the output item associated with the web search call. item_id:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.web_search_call.in_progress`. type: :"response.web_search_call.in_progress" - ); end - sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi index 0cffcfae..1d1b8d70 100644 --- a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Unique ID for the output item associated with the web search call. sig { returns(String) } attr_accessor :item_id @@ -17,7 +19,11 @@ module OpenAI attr_accessor :type # Emitted when a web search call is executing. - sig { params(item_id: String, output_index: Integer, type: Symbol).returns(T.attached_class) } + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # Unique ID for the output item associated with the web search call. item_id:, @@ -25,9 +31,16 @@ module OpenAI output_index:, # The type of the event. Always `response.web_search_call.searching`. type: :"response.web_search_call.searching" - ); end - sig { override.returns({item_id: String, output_index: Integer, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index dd20795d..f9eb832e 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -7,13 +7,19 @@ module OpenAI module Tool extend OpenAI::Internal::Type::Union - sig do - override - .returns( - [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] + Variants = + T.type_alias do + T.any( + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::FunctionTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::WebSearchTool ) + end + + sig { override.returns(T::Array[OpenAI::Responses::Tool::Variants]) } + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/responses/tool_choice_function.rbi b/rbi/openai/models/responses/tool_choice_function.rbi index 98de2eb9..44ea5976 100644 --- a/rbi/openai/models/responses/tool_choice_function.rbi +++ b/rbi/openai/models/responses/tool_choice_function.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The name of the function to call. sig { returns(String) } attr_accessor :name @@ -19,9 +21,12 @@ module OpenAI name:, # For function calling, the type is always `function`. type: :function - ); end - sig { override.returns({name: String, type: Symbol}) } - def to_hash; end + ) + end + + sig { override.returns({ name: String, type: Symbol }) } + def to_hash + end end end end diff --git a/rbi/openai/models/responses/tool_choice_options.rbi b/rbi/openai/models/responses/tool_choice_options.rbi index 793acf3c..01f24950 100644 --- a/rbi/openai/models/responses/tool_choice_options.rbi +++ b/rbi/openai/models/responses/tool_choice_options.rbi @@ -14,15 +14,22 @@ module OpenAI module ToolChoiceOptions extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Responses::ToolChoiceOptions) } OrSymbol = T.type_alias { T.any(Symbol, String) } - NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) - AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) - REQUIRED = T.let(:required, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) + NONE = T.let(:none, OpenAI::Responses::ToolChoiceOptions::TaggedSymbol) + AUTO = T.let(:auto, OpenAI::Responses::ToolChoiceOptions::TaggedSymbol) + REQUIRED = + T.let(:required, OpenAI::Responses::ToolChoiceOptions::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::ToolChoiceOptions::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/tool_choice_types.rbi b/rbi/openai/models/responses/tool_choice_types.rbi index 943a8c1f..1013d8f9 100644 --- a/rbi/openai/models/responses/tool_choice_types.rbi +++ b/rbi/openai/models/responses/tool_choice_types.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module Responses class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # @@ -12,12 +14,16 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` - sig { returns(OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol) } + sig { returns(OpenAI::Responses::ToolChoiceTypes::Type::OrSymbol) } attr_accessor :type # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - sig { params(type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol).returns(T.attached_class) } + sig do + params( + type: OpenAI::Responses::ToolChoiceTypes::Type::OrSymbol + ).returns(T.attached_class) + end def self.new( # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -28,9 +34,16 @@ module OpenAI # - `web_search_preview` # - `computer_use_preview` type: - ); end - sig { override.returns({type: OpenAI::Models::Responses::ToolChoiceTypes::Type::OrSymbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { type: OpenAI::Responses::ToolChoiceTypes::Type::OrSymbol } + ) + end + def to_hash + end # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -43,19 +56,40 @@ module OpenAI module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ToolChoiceTypes::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) + FILE_SEARCH = + T.let( + :file_search, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) WEB_SEARCH_PREVIEW = - T.let(:web_search_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) + T.let( + :web_search_preview, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) COMPUTER_USE_PREVIEW = - T.let(:computer_use_preview, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) + T.let( + :computer_use_preview, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) WEB_SEARCH_PREVIEW_2025_03_11 = - T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) + T.let( + :web_search_preview_2025_03_11, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/responses/web_search_tool.rbi b/rbi/openai/models/responses/web_search_tool.rbi index 032f4d13..7c952749 100644 --- a/rbi/openai/models/responses/web_search_tool.rbi +++ b/rbi/openai/models/responses/web_search_tool.rbi @@ -4,28 +4,43 @@ module OpenAI module Models module Responses class WebSearchTool < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. - sig { returns(OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol) } + sig { returns(OpenAI::Responses::WebSearchTool::Type::OrSymbol) } attr_accessor :type # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. - sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::Responses::WebSearchTool::SearchContextSize::OrSymbol + ) + ) + end attr_reader :search_context_size - sig { params(search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol).void } + sig do + params( + search_context_size: + OpenAI::Responses::WebSearchTool::SearchContextSize::OrSymbol + ).void + end attr_writer :search_context_size # The user's location. - sig { returns(T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation)) } + sig do + returns(T.nilable(OpenAI::Responses::WebSearchTool::UserLocation)) + end attr_reader :user_location sig do params( - user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::AnyHash)) - ) - .void + user_location: + T.nilable(OpenAI::Responses::WebSearchTool::UserLocation::OrHash) + ).void end attr_writer :user_location @@ -34,11 +49,12 @@ module OpenAI # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). sig do params( - type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, - search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, - user_location: T.nilable(T.any(OpenAI::Models::Responses::WebSearchTool::UserLocation, OpenAI::Internal::AnyHash)) - ) - .returns(T.attached_class) + type: OpenAI::Responses::WebSearchTool::Type::OrSymbol, + search_context_size: + OpenAI::Responses::WebSearchTool::SearchContextSize::OrSymbol, + user_location: + T.nilable(OpenAI::Responses::WebSearchTool::UserLocation::OrHash) + ).returns(T.attached_class) end def self.new( # The type of the web search tool. One of `web_search_preview` or @@ -49,34 +65,52 @@ module OpenAI search_context_size: nil, # The user's location. user_location: nil - ); end + ) + end + sig do - override - .returns( - { - type: OpenAI::Models::Responses::WebSearchTool::Type::OrSymbol, - search_context_size: OpenAI::Models::Responses::WebSearchTool::SearchContextSize::OrSymbol, - user_location: T.nilable(OpenAI::Models::Responses::WebSearchTool::UserLocation) - } - ) + override.returns( + { + type: OpenAI::Responses::WebSearchTool::Type::OrSymbol, + search_context_size: + OpenAI::Responses::WebSearchTool::SearchContextSize::OrSymbol, + user_location: + T.nilable(OpenAI::Responses::WebSearchTool::UserLocation) + } + ) + end + def to_hash end - def to_hash; end # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::WebSearchTool::Type) + end OrSymbol = T.type_alias { T.any(Symbol, String) } WEB_SEARCH_PREVIEW = - T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) + T.let( + :web_search_preview, + OpenAI::Responses::WebSearchTool::Type::TaggedSymbol + ) WEB_SEARCH_PREVIEW_2025_03_11 = - T.let(:web_search_preview_2025_03_11, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) + T.let( + :web_search_preview_2025_03_11, + OpenAI::Responses::WebSearchTool::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::Responses::WebSearchTool::Type::TaggedSymbol] + ) + end + def self.values + end end # High level guidance for the amount of context window space to use for the @@ -85,18 +119,42 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } + T.type_alias do + T.all(Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) - MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) - HIGH = T.let(:high, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) + LOW = + T.let( + :low, + OpenAI::Responses::WebSearchTool::SearchContextSize::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::WebSearchTool::SearchContextSize::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::WebSearchTool::SearchContextSize::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Responses::WebSearchTool::SearchContextSize::TaggedSymbol + ] + ) + end + def self.values + end end class UserLocation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The type of location approximation. Always `approximate`. sig { returns(Symbol) } attr_accessor :type @@ -127,8 +185,7 @@ module OpenAI region: T.nilable(String), timezone: T.nilable(String), type: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Free text input for the city of the user, e.g. `San Francisco`. @@ -143,20 +200,22 @@ module OpenAI timezone: nil, # The type of location approximation. Always `approximate`. type: :approximate - ); end + ) + end + sig do - override - .returns( - { - type: Symbol, - city: T.nilable(String), - country: T.nilable(String), - region: T.nilable(String), - timezone: T.nilable(String) - } - ) + override.returns( + { + type: Symbol, + city: T.nilable(String), + country: T.nilable(String), + region: T.nilable(String), + timezone: T.nilable(String) + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/responses_model.rbi b/rbi/openai/models/responses_model.rbi index 24670d55..5fc0cd97 100644 --- a/rbi/openai/models/responses_model.rbi +++ b/rbi/openai/models/responses_model.rbi @@ -5,34 +5,57 @@ module OpenAI module ResponsesModel extend OpenAI::Internal::Type::Union + Variants = + T.type_alias do + T.any( + String, + OpenAI::ChatModel::TaggedSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) + end + module ResponsesOnlyModel extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ResponsesModel::ResponsesOnlyModel) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) + O1_PRO = + T.let( + :"o1-pro", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) O1_PRO_2025_03_19 = - T.let(:"o1-pro-2025-03-19", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) + T.let( + :"o1-pro-2025-03-19", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW = - T.let(:"computer-use-preview", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) + T.let( + :"computer-use-preview", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW_2025_03_11 = T.let( :"computer-use-preview-2025-03-11", - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol ) - sig { override.returns(T::Array[OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol] + ) + end + def self.values + end end - sig do - override - .returns( - [String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol] - ) + sig { override.returns(T::Array[OpenAI::ResponsesModel::Variants]) } + def self.variants end - def self.variants; end end end end diff --git a/rbi/openai/models/static_file_chunking_strategy.rbi b/rbi/openai/models/static_file_chunking_strategy.rbi index ae5b7298..2bfb9abc 100644 --- a/rbi/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/openai/models/static_file_chunking_strategy.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of tokens that overlap between chunks. The default value is `400`. # # Note that the overlap must not exceed half of `max_chunk_size_tokens`. @@ -14,7 +16,12 @@ module OpenAI sig { returns(Integer) } attr_accessor :max_chunk_size_tokens - sig { params(chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer).returns(T.attached_class) } + sig do + params( + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + ).returns(T.attached_class) + end def self.new( # The number of tokens that overlap between chunks. The default value is `400`. # @@ -23,9 +30,16 @@ module OpenAI # The maximum number of tokens in each chunk. The default value is `800`. The # minimum value is `100` and the maximum value is `4096`. max_chunk_size_tokens: - ); end - sig { override.returns({chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer}) } - def to_hash; end + ) + end + + sig do + override.returns( + { chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/static_file_chunking_strategy_object.rbi b/rbi/openai/models/static_file_chunking_strategy_object.rbi index 2a978597..3e1ca623 100644 --- a/rbi/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/openai/models/static_file_chunking_strategy_object.rbi @@ -3,10 +3,12 @@ module OpenAI module Models class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::StaticFileChunkingStrategy) } attr_reader :static - sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash)).void } + sig { params(static: OpenAI::StaticFileChunkingStrategy::OrHash).void } attr_writer :static # Always `static`. @@ -14,16 +16,25 @@ module OpenAI attr_accessor :type sig do - params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + static: OpenAI::StaticFileChunkingStrategy::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( static:, # Always `static`. type: :static - ); end - sig { override.returns({static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { static: OpenAI::StaticFileChunkingStrategy, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/openai/models/static_file_chunking_strategy_object_param.rbi index ba278994..ede12a4b 100644 --- a/rbi/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/openai/models/static_file_chunking_strategy_object_param.rbi @@ -3,10 +3,12 @@ module OpenAI module Models class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel - sig { returns(OpenAI::Models::StaticFileChunkingStrategy) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(OpenAI::StaticFileChunkingStrategy) } attr_reader :static - sig { params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash)).void } + sig { params(static: OpenAI::StaticFileChunkingStrategy::OrHash).void } attr_writer :static # Always `static`. @@ -15,16 +17,25 @@ module OpenAI # Customize your own chunking strategy by setting chunk size and chunk overlap. sig do - params(static: T.any(OpenAI::Models::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash), type: Symbol) - .returns(T.attached_class) + params( + static: OpenAI::StaticFileChunkingStrategy::OrHash, + type: Symbol + ).returns(T.attached_class) end def self.new( static:, # Always `static`. type: :static - ); end - sig { override.returns({static: OpenAI::Models::StaticFileChunkingStrategy, type: Symbol}) } - def to_hash; end + ) + end + + sig do + override.returns( + { static: OpenAI::StaticFileChunkingStrategy, type: Symbol } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/upload.rbi b/rbi/openai/models/upload.rbi index f0483771..95c1b624 100644 --- a/rbi/openai/models/upload.rbi +++ b/rbi/openai/models/upload.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class Upload < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The Upload unique identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -34,14 +36,14 @@ module OpenAI attr_accessor :purpose # The status of the Upload. - sig { returns(OpenAI::Models::Upload::Status::TaggedSymbol) } + sig { returns(OpenAI::Upload::Status::TaggedSymbol) } attr_accessor :status # The `File` object represents a document that has been uploaded to OpenAI. - sig { returns(T.nilable(OpenAI::Models::FileObject)) } + sig { returns(T.nilable(OpenAI::FileObject)) } attr_reader :file - sig { params(file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::AnyHash))).void } + sig { params(file: T.nilable(OpenAI::FileObject::OrHash)).void } attr_writer :file # The Upload object can accept byte chunks in the form of Parts. @@ -53,11 +55,10 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Models::Upload::Status::OrSymbol, - file: T.nilable(T.any(OpenAI::Models::FileObject, OpenAI::Internal::AnyHash)), + status: OpenAI::Upload::Status::OrSymbol, + file: T.nilable(OpenAI::FileObject::OrHash), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The Upload unique identifier, which can be referenced in API endpoints. @@ -80,39 +81,42 @@ module OpenAI file: nil, # The object type, which is always "upload". object: :upload - ); end + ) + end + sig do - override - .returns( - { - id: String, - bytes: Integer, - created_at: Integer, - expires_at: Integer, - filename: String, - object: Symbol, - purpose: String, - status: OpenAI::Models::Upload::Status::TaggedSymbol, - file: T.nilable(OpenAI::Models::FileObject) - } - ) + override.returns( + { + id: String, + bytes: Integer, + created_at: Integer, + expires_at: Integer, + filename: String, + object: Symbol, + purpose: String, + status: OpenAI::Upload::Status::TaggedSymbol, + file: T.nilable(OpenAI::FileObject) + } + ) + end + def to_hash end - def to_hash; end # The status of the Upload. module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Upload::Status) } OrSymbol = T.type_alias { T.any(Symbol, String) } - PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::Upload::Status::TaggedSymbol) - EXPIRED = T.let(:expired, OpenAI::Models::Upload::Status::TaggedSymbol) + PENDING = T.let(:pending, OpenAI::Upload::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::Upload::Status::TaggedSymbol) + CANCELLED = T.let(:cancelled, OpenAI::Upload::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::Upload::Status::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::Upload::Status::TaggedSymbol]) } - def self.values; end + sig { override.returns(T::Array[OpenAI::Upload::Status::TaggedSymbol]) } + def self.values + end end end end diff --git a/rbi/openai/models/upload_cancel_params.rbi b/rbi/openai/models/upload_cancel_params.rbi index 8dd03bf2..283948ca 100644 --- a/rbi/openai/models/upload_cancel_params.rbi +++ b/rbi/openai/models/upload_cancel_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/upload_complete_params.rbi b/rbi/openai/models/upload_complete_params.rbi index 71c0c76b..a8770632 100644 --- a/rbi/openai/models/upload_complete_params.rbi +++ b/rbi/openai/models/upload_complete_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The ordered list of Part IDs. sig { returns(T::Array[String]) } attr_accessor :part_ids @@ -22,9 +24,8 @@ module OpenAI params( part_ids: T::Array[String], md5: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The ordered list of Part IDs. @@ -33,11 +34,20 @@ module OpenAI # matches what you expect. md5: nil, request_options: {} - ); end + ) + end + sig do - override.returns({part_ids: T::Array[String], md5: String, request_options: OpenAI::RequestOptions}) + override.returns( + { + part_ids: T::Array[String], + md5: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/upload_create_params.rbi b/rbi/openai/models/upload_create_params.rbi index cd69d21c..43ab3774 100644 --- a/rbi/openai/models/upload_create_params.rbi +++ b/rbi/openai/models/upload_create_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of bytes in the file you are uploading. sig { returns(Integer) } attr_accessor :bytes @@ -25,7 +27,7 @@ module OpenAI # # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). - sig { returns(OpenAI::Models::FilePurpose::OrSymbol) } + sig { returns(OpenAI::FilePurpose::OrSymbol) } attr_accessor :purpose sig do @@ -33,10 +35,9 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + purpose: OpenAI::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The number of bytes in the file you are uploading. @@ -54,20 +55,22 @@ module OpenAI # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). purpose:, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - bytes: Integer, - filename: String, - mime_type: String, - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + bytes: Integer, + filename: String, + mime_type: String, + purpose: OpenAI::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/uploads/part_create_params.rbi b/rbi/openai/models/uploads/part_create_params.rbi index e8d07b06..9a8822d5 100644 --- a/rbi/openai/models/uploads/part_create_params.rbi +++ b/rbi/openai/models/uploads/part_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The chunk of bytes for this Part. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } attr_accessor :data @@ -14,23 +16,26 @@ module OpenAI sig do params( data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The chunk of bytes for this Part. data:, request_options: {} - ); end + ) + end + sig do - override - .returns({ - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - request_options: OpenAI::RequestOptions - }) + override.returns( + { + data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end end end end diff --git a/rbi/openai/models/uploads/upload_part.rbi b/rbi/openai/models/uploads/upload_part.rbi index 79bf9d95..a9f0b817 100644 --- a/rbi/openai/models/uploads/upload_part.rbi +++ b/rbi/openai/models/uploads/upload_part.rbi @@ -6,6 +6,8 @@ module OpenAI module Uploads class UploadPart < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The upload Part unique identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -24,7 +26,12 @@ module OpenAI # The upload Part represents a chunk of bytes we can add to an Upload object. sig do - params(id: String, created_at: Integer, upload_id: String, object: Symbol).returns(T.attached_class) + params( + id: String, + created_at: Integer, + upload_id: String, + object: Symbol + ).returns(T.attached_class) end def self.new( # The upload Part unique identifier, which can be referenced in API endpoints. @@ -35,9 +42,21 @@ module OpenAI upload_id:, # The object type, which is always `upload.part`. object: :"upload.part" - ); end - sig { override.returns({id: String, created_at: Integer, object: Symbol, upload_id: String}) } - def to_hash; end + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + object: Symbol, + upload_id: String + } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_store.rbi b/rbi/openai/models/vector_store.rbi index eefaf648..a4140933 100644 --- a/rbi/openai/models/vector_store.rbi +++ b/rbi/openai/models/vector_store.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class VectorStore < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -11,10 +13,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :created_at - sig { returns(OpenAI::Models::VectorStore::FileCounts) } + sig { returns(OpenAI::VectorStore::FileCounts) } attr_reader :file_counts - sig { params(file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::AnyHash)).void } + sig { params(file_counts: OpenAI::VectorStore::FileCounts::OrHash).void } attr_writer :file_counts # The Unix timestamp (in seconds) for when the vector store was last active. @@ -41,7 +43,7 @@ module OpenAI # The status of the vector store, which can be either `expired`, `in_progress`, or # `completed`. A status of `completed` indicates that the vector store is ready # for use. - sig { returns(OpenAI::Models::VectorStore::Status::TaggedSymbol) } + sig { returns(OpenAI::VectorStore::Status::TaggedSymbol) } attr_accessor :status # The total number of bytes used by the files in the vector store. @@ -49,10 +51,12 @@ module OpenAI attr_accessor :usage_bytes # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::Models::VectorStore::ExpiresAfter)) } + sig { returns(T.nilable(OpenAI::VectorStore::ExpiresAfter)) } attr_reader :expires_after - sig { params(expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash)).void } + sig do + params(expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash).void + end attr_writer :expires_after # The Unix timestamp (in seconds) for when the vector store will expire. @@ -65,17 +69,16 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: T.any(OpenAI::Models::VectorStore::FileCounts, OpenAI::Internal::AnyHash), + file_counts: OpenAI::VectorStore::FileCounts::OrHash, last_active_at: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - status: OpenAI::Models::VectorStore::Status::OrSymbol, + status: OpenAI::VectorStore::Status::OrSymbol, usage_bytes: Integer, - expires_after: T.any(OpenAI::Models::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash), + expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash, expires_at: T.nilable(Integer), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -106,28 +109,32 @@ module OpenAI expires_at: nil, # The object type, which is always `vector_store`. object: :vector_store - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, - last_active_at: T.nilable(Integer), - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - object: Symbol, - status: OpenAI::Models::VectorStore::Status::TaggedSymbol, - usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter, - expires_at: T.nilable(Integer) - } - ) + override.returns( + { + id: String, + created_at: Integer, + file_counts: OpenAI::VectorStore::FileCounts, + last_active_at: T.nilable(Integer), + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + object: Symbol, + status: OpenAI::VectorStore::Status::TaggedSymbol, + usage_bytes: Integer, + expires_after: OpenAI::VectorStore::ExpiresAfter, + expires_at: T.nilable(Integer) + } + ) + end + def to_hash end - def to_hash; end class FileCounts < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of files that were cancelled. sig { returns(Integer) } attr_accessor :cancelled @@ -155,8 +162,7 @@ module OpenAI failed: Integer, in_progress: Integer, total: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of files that were cancelled. @@ -169,18 +175,22 @@ module OpenAI in_progress:, # The total number of files. total: - ); end + ) + end + sig do - override - .returns({ - cancelled: Integer, - completed: Integer, - failed: Integer, - in_progress: Integer, - total: Integer - }) + override.returns( + { + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + } + ) + end + def to_hash end - def to_hash; end end # The status of the vector store, which can be either `expired`, `in_progress`, or @@ -189,18 +199,25 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::VectorStore::Status) } OrSymbol = T.type_alias { T.any(Symbol, String) } - EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStore::Status::TaggedSymbol) + EXPIRED = T.let(:expired, OpenAI::VectorStore::Status::TaggedSymbol) + IN_PROGRESS = + T.let(:in_progress, OpenAI::VectorStore::Status::TaggedSymbol) + COMPLETED = T.let(:completed, OpenAI::VectorStore::Status::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::VectorStore::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns(T::Array[OpenAI::VectorStore::Status::TaggedSymbol]) + end + def self.values + end end class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } @@ -218,9 +235,12 @@ module OpenAI # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. anchor: :last_active_at - ); end - sig { override.returns({anchor: Symbol, days: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end end end end diff --git a/rbi/openai/models/vector_store_create_params.rbi b/rbi/openai/models/vector_store_create_params.rbi index ea4b49c5..38d38a30 100644 --- a/rbi/openai/models/vector_store_create_params.rbi +++ b/rbi/openai/models/vector_store_create_params.rbi @@ -6,14 +6,16 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. sig do returns( T.nilable( T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ) ) ) @@ -22,25 +24,23 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ) + ).void end attr_writer :chunking_strategy # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreCreateParams::ExpiresAfter)) } attr_reader :expires_after sig do params( - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::AnyHash) - ) - .void + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash + ).void end attr_writer :expires_after @@ -71,18 +71,17 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ), - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::AnyHash), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ), + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -104,26 +103,31 @@ module OpenAI # The name of the vector store. name: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam + override.returns( + { + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, - file_ids: T::Array[String], - metadata: T.nilable(T::Hash[Symbol, String]), - name: String, - request_options: OpenAI::RequestOptions - } - ) + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + file_ids: T::Array[String], + metadata: T.nilable(T::Hash[Symbol, String]), + name: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } @@ -141,9 +145,12 @@ module OpenAI # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. anchor: :last_active_at - ); end - sig { override.returns({anchor: Symbol, days: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end end end end diff --git a/rbi/openai/models/vector_store_delete_params.rbi b/rbi/openai/models/vector_store_delete_params.rbi index fa28a443..bb86910b 100644 --- a/rbi/openai/models/vector_store_delete_params.rbi +++ b/rbi/openai/models/vector_store_delete_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/vector_store_deleted.rbi b/rbi/openai/models/vector_store_deleted.rbi index 8c4ccaf1..3236938a 100644 --- a/rbi/openai/models/vector_store_deleted.rbi +++ b/rbi/openai/models/vector_store_deleted.rbi @@ -3,6 +3,8 @@ module OpenAI module Models class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -12,11 +14,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"vector_store.deleted"); end + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"vector_store.deleted") + end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_store_list_params.rbi b/rbi/openai/models/vector_store_list_params.rbi index 5a6218c2..13a9b81d 100644 --- a/rbi/openai/models/vector_store_list_params.rbi +++ b/rbi/openai/models/vector_store_list_params.rbi @@ -6,6 +6,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -36,10 +38,10 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::VectorStoreListParams::Order::OrSymbol)) } + sig { returns(T.nilable(OpenAI::VectorStoreListParams::Order::OrSymbol)) } attr_reader :order - sig { params(order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol).void } + sig { params(order: OpenAI::VectorStoreListParams::Order::OrSymbol).void } attr_writer :order sig do @@ -47,10 +49,9 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::VectorStoreListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -70,34 +71,42 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - before: String, - limit: Integer, - order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + before: String, + limit: Integer, + order: OpenAI::VectorStoreListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::VectorStoreListParams::Order) } OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) + ASC = T.let(:asc, OpenAI::VectorStoreListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::VectorStoreListParams::Order::TaggedSymbol) - sig { override.returns(T::Array[OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[OpenAI::VectorStoreListParams::Order::TaggedSymbol] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/vector_store_retrieve_params.rbi b/rbi/openai/models/vector_store_retrieve_params.rbi index 2f2b9fef..c96ace2f 100644 --- a/rbi/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/openai/models/vector_store_retrieve_params.rbi @@ -6,18 +6,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig do - params( - request_options: T.any( - OpenAI::RequestOptions, - OpenAI::Internal::AnyHash - ) - ).returns(T.attached_class) + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) end - def self.new(request_options: {}); end - sig { override.returns({request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end end end end diff --git a/rbi/openai/models/vector_store_search_params.rbi b/rbi/openai/models/vector_store_search_params.rbi index e187c6b1..97e4d691 100644 --- a/rbi/openai/models/vector_store_search_params.rbi +++ b/rbi/openai/models/vector_store_search_params.rbi @@ -6,19 +6,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A query string for a search sig { returns(T.any(String, T::Array[String])) } attr_accessor :query # A filter to apply based on file attributes. - sig { returns(T.nilable(T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter))) } + sig do + returns( + T.nilable(T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)) + ) + end attr_reader :filters sig do params( - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter) - ) - .void + filters: + T.any( + OpenAI::ComparisonFilter::OrHash, + OpenAI::CompoundFilter::OrHash + ) + ).void end attr_writer :filters @@ -31,14 +40,16 @@ module OpenAI attr_writer :max_num_results # Ranking options for search. - sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions)) } + sig do + returns(T.nilable(OpenAI::VectorStoreSearchParams::RankingOptions)) + end attr_reader :ranking_options sig do params( - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash) - ) - .void + ranking_options: + OpenAI::VectorStoreSearchParams::RankingOptions::OrHash + ).void end attr_writer :ranking_options @@ -52,13 +63,17 @@ module OpenAI sig do params( query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter), + filters: + T.any( + OpenAI::ComparisonFilter::OrHash, + OpenAI::CompoundFilter::OrHash + ), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash), + ranking_options: + OpenAI::VectorStoreSearchParams::RankingOptions::OrHash, rewrite_query: T::Boolean, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A query string for a search @@ -73,45 +88,81 @@ module OpenAI # Whether to rewrite the natural language query for vector search. rewrite_query: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter), - max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, - rewrite_query: T::Boolean, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + query: T.any(String, T::Array[String]), + filters: T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter), + max_num_results: Integer, + ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, + rewrite_query: T::Boolean, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # A query string for a search module Query extend OpenAI::Internal::Type::Union - sig { override.returns([String, T::Array[String]]) } - def self.variants; end + Variants = T.type_alias { T.any(String, T::Array[String]) } - StringArray = T.let(OpenAI::Internal::Type::ArrayOf[String], OpenAI::Internal::Type::Converter) + sig do + override.returns( + T::Array[OpenAI::VectorStoreSearchParams::Query::Variants] + ) + end + def self.variants + end + + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) end # A filter to apply based on file attributes. module Filters extend OpenAI::Internal::Type::Union - sig { override.returns([OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter]) } - def self.variants; end + Variants = + T.type_alias do + T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) + end + + sig do + override.returns( + T::Array[OpenAI::VectorStoreSearchParams::Filters::Variants] + ) + end + def self.variants + end end class RankingOptions < OpenAI::Internal::Type::BaseModel - sig { returns(T.nilable(OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol)) } + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + T.nilable( + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol + ) + ) + end attr_reader :ranker - sig { params(ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol).void } + sig do + params( + ranker: + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol + ).void + end attr_writer :ranker sig { returns(T.nilable(Float)) } @@ -123,37 +174,58 @@ module OpenAI # Ranking options for search. sig do params( - ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, + ranker: + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, score_threshold: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) + end + def self.new(ranker: nil, score_threshold: nil) end - def self.new(ranker: nil, score_threshold: nil); end sig do - override - .returns( - {ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, score_threshold: Float} - ) + override.returns( + { + ranker: + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::OrSymbol, + score_threshold: Float + } + ) + end + def to_hash end - def to_hash; end module Ranker extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } + T.type_alias do + T.all( + Symbol, + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) + AUTO = + T.let( + :auto, + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol + ) DEFAULT_2024_11_15 = T.let( :"default-2024-11-15", - OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol ) - sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/vector_store_search_response.rbi b/rbi/openai/models/vector_store_search_response.rbi index 26ac1ca1..bca7112b 100644 --- a/rbi/openai/models/vector_store_search_response.rbi +++ b/rbi/openai/models/vector_store_search_response.rbi @@ -3,16 +3,22 @@ module OpenAI module Models class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } + sig do + returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + end attr_accessor :attributes # Content chunks from the file. - sig { returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content]) } + sig do + returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content]) + end attr_accessor :content # The ID of the vector store file. @@ -29,13 +35,16 @@ module OpenAI sig do params( - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - content: T::Array[T.any(OpenAI::Models::VectorStoreSearchResponse::Content, OpenAI::Internal::AnyHash)], + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + content: + T::Array[ + OpenAI::Models::VectorStoreSearchResponse::Content::OrHash + ], file_id: String, filename: String, score: Float - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -52,64 +61,111 @@ module OpenAI filename:, # The similarity score for the result. score: - ); end + ) + end + sig do - override - .returns( - { - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - content: T::Array[OpenAI::Models::VectorStoreSearchResponse::Content], - file_id: String, - filename: String, - score: Float - } - ) + override.returns( + { + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + content: + T::Array[OpenAI::Models::VectorStoreSearchResponse::Content], + file_id: String, + filename: String, + score: Float + } + ) + end + def to_hash end - def to_hash; end module Attribute extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns( + T::Array[ + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ) + end + def self.variants + end end class Content < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text content returned from search. sig { returns(String) } attr_accessor :text # The type of content. - sig { returns(OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } + sig do + returns( + OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol + ) + end attr_accessor :type sig do - params(text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol) - .returns(T.attached_class) + params( + text: String, + type: + OpenAI::Models::VectorStoreSearchResponse::Content::Type::OrSymbol + ).returns(T.attached_class) end def self.new( # The text content returned from search. text:, # The type of content. type: - ); end + ) + end + sig do - override - .returns({text: String, type: OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol}) + override.returns( + { + text: String, + type: + OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol + } + ) + end + def to_hash end - def to_hash; end # The type of content. module Type extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Content::Type + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) + TEXT = + T.let( + :text, + OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/vector_store_update_params.rbi b/rbi/openai/models/vector_store_update_params.rbi index 5955b4ed..1bc41e32 100644 --- a/rbi/openai/models/vector_store_update_params.rbi +++ b/rbi/openai/models/vector_store_update_params.rbi @@ -6,15 +6,17 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter)) } attr_reader :expires_after sig do params( - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)) - ) - .void + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash) + ).void end attr_writer :expires_after @@ -33,12 +35,12 @@ module OpenAI sig do params( - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # The expiration policy for a vector store. @@ -53,21 +55,26 @@ module OpenAI # The name of the vector store. name: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - expires_after: T.nilable(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter), - metadata: T.nilable(T::Hash[Symbol, String]), - name: T.nilable(String), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter), + metadata: T.nilable(T::Hash[Symbol, String]), + name: T.nilable(String), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. sig { returns(Symbol) } @@ -85,9 +92,12 @@ module OpenAI # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. anchor: :last_active_at - ); end - sig { override.returns({anchor: Symbol, days: Integer}) } - def to_hash; end + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi index 5d9bf50a..018b178f 100644 --- a/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -7,20 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(vector_store_id:, request_options: {}) end - def self.new(vector_store_id:, request_options: {}); end - sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { vector_store_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/openai/models/vector_stores/file_batch_create_params.rbi index 9fbb2a61..99ae5367 100644 --- a/rbi/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # the vector store should use. Useful for tools like `file_search` that can access # files. @@ -18,7 +20,9 @@ module OpenAI # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } + sig do + returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + end attr_accessor :attributes # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -27,8 +31,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ) ) ) @@ -37,28 +41,27 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ) + ).void end attr_writer :chunking_strategy sig do params( file_ids: T::Array[String], - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that @@ -75,28 +78,41 @@ module OpenAI # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - file_ids: T::Array[String], - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam + override.returns( + { + file_ids: T::Array[String], + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ), - request_options: OpenAI::RequestOptions - } - ) + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end module Attribute extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi index d5544112..92c9e94d 100644 --- a/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id @@ -31,10 +33,21 @@ module OpenAI attr_writer :before # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::VectorStores::FileBatchListFilesParams::Filter::OrSymbol + ) + ) + end attr_reader :filter - sig { params(filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol).void } + sig do + params( + filter: + OpenAI::VectorStores::FileBatchListFilesParams::Filter::OrSymbol + ).void + end attr_writer :filter # A limit on the number of objects to be returned. Limit can range between 1 and @@ -47,10 +60,21 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol)) } + sig do + returns( + T.nilable( + OpenAI::VectorStores::FileBatchListFilesParams::Order::OrSymbol + ) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol).void } + sig do + params( + order: + OpenAI::VectorStores::FileBatchListFilesParams::Order::OrSymbol + ).void + end attr_writer :order sig do @@ -58,12 +82,13 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, + filter: + OpenAI::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: + OpenAI::VectorStores::FileBatchListFilesParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( vector_store_id:, @@ -86,41 +111,70 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - vector_store_id: String, - after: String, - before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, - limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + vector_store_id: String, + after: String, + before: String, + filter: + OpenAI::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, + limit: Integer, + order: + OpenAI::VectorStores::FileBatchListFilesParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } + T.type_alias do + T.all( + Symbol, + OpenAI::VectorStores::FileBatchListFilesParams::Filter + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + T.let( + :in_progress, + OpenAI::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol + ) COMPLETED = - T.let(:completed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + T.let( + :completed, + OpenAI::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol + ) CANCELLED = - T.let(:cancelled, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) + T.let( + :cancelled, + OpenAI::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol + ] + ) + end + def self.values + end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -129,14 +183,34 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } + T.type_alias do + T.all( + Symbol, + OpenAI::VectorStores::FileBatchListFilesParams::Order + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi index c59d8363..9269b441 100644 --- a/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -7,20 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(vector_store_id:, request_options: {}) end - def self.new(vector_store_id:, request_options: {}); end - sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { vector_store_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_content_params.rbi b/rbi/openai/models/vector_stores/file_content_params.rbi index e241565e..808e189c 100644 --- a/rbi/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/openai/models/vector_stores/file_content_params.rbi @@ -7,20 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(vector_store_id:, request_options: {}) end - def self.new(vector_store_id:, request_options: {}); end - sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { vector_store_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_content_response.rbi b/rbi/openai/models/vector_stores/file_content_response.rbi index 658c26b7..c0c3ab04 100644 --- a/rbi/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/openai/models/vector_stores/file_content_response.rbi @@ -4,6 +4,8 @@ module OpenAI module Models module VectorStores class FileContentResponse < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The text content sig { returns(T.nilable(String)) } attr_reader :text @@ -24,9 +26,12 @@ module OpenAI text: nil, # The content type (currently only `"text"`) type: nil - ); end - sig { override.returns({text: String, type: String}) } - def to_hash; end + ) + end + + sig { override.returns({ text: String, type: String }) } + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_create_params.rbi b/rbi/openai/models/vector_stores/file_create_params.rbi index 17f31378..2a56c040 100644 --- a/rbi/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_create_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A [File](https://platform.openai.com/docs/api-reference/files) ID that the # vector store should use. Useful for tools like `file_search` that can access # files. @@ -18,7 +20,9 @@ module OpenAI # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } + sig do + returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + end attr_accessor :attributes # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -27,8 +31,8 @@ module OpenAI returns( T.nilable( T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ) ) ) @@ -37,28 +41,27 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ) + ).void end attr_writer :chunking_strategy sig do params( file_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A [File](https://platform.openai.com/docs/api-reference/files) ID that the @@ -75,28 +78,41 @@ module OpenAI # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - file_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Models::StaticFileChunkingStrategyObjectParam + override.returns( + { + file_id: String, + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::StaticFileChunkingStrategyObjectParam ), - request_options: OpenAI::RequestOptions - } - ) + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end module Attribute extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/vector_stores/file_delete_params.rbi b/rbi/openai/models/vector_stores/file_delete_params.rbi index d5920802..c93454bd 100644 --- a/rbi/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/openai/models/vector_stores/file_delete_params.rbi @@ -7,20 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(vector_store_id:, request_options: {}) end - def self.new(vector_store_id:, request_options: {}); end - sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { vector_store_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_list_params.rbi b/rbi/openai/models/vector_stores/file_list_params.rbi index 5e252bbb..f3697d8c 100644 --- a/rbi/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/openai/models/vector_stores/file_list_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -28,10 +30,18 @@ module OpenAI attr_writer :before # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::VectorStores::FileListParams::Filter::OrSymbol) + ) + end attr_reader :filter - sig { params(filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol).void } + sig do + params( + filter: OpenAI::VectorStores::FileListParams::Filter::OrSymbol + ).void + end attr_writer :filter # A limit on the number of objects to be returned. Limit can range between 1 and @@ -44,22 +54,29 @@ module OpenAI # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. - sig { returns(T.nilable(OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol)) } + sig do + returns( + T.nilable(OpenAI::VectorStores::FileListParams::Order::OrSymbol) + ) + end attr_reader :order - sig { params(order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol).void } + sig do + params( + order: OpenAI::VectorStores::FileListParams::Order::OrSymbol + ).void + end attr_writer :order sig do params( after: String, before: String, - filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, + filter: OpenAI::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + order: OpenAI::VectorStores::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -81,36 +98,64 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - after: String, - before: String, - filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, - limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + after: String, + before: String, + filter: OpenAI::VectorStores::FileListParams::Filter::OrSymbol, + limit: Integer, + order: OpenAI::VectorStores::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::VectorStores::FileListParams::Filter) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::VectorStores::FileListParams::Filter::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::VectorStores::FileListParams::Filter::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::VectorStores::FileListParams::Filter::TaggedSymbol + ) + CANCELLED = + T.let( + :cancelled, + OpenAI::VectorStores::FileListParams::Filter::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileListParams::Filter::TaggedSymbol + ] + ) + end + def self.values + end end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -118,14 +163,32 @@ module OpenAI module Order extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::VectorStores::FileListParams::Order) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) - DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) + ASC = + T.let( + :asc, + OpenAI::VectorStores::FileListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::VectorStores::FileListParams::Order::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/openai/models/vector_stores/file_retrieve_params.rbi index 17c67d42..5904b6a0 100644 --- a/rbi/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/openai/models/vector_stores/file_retrieve_params.rbi @@ -7,20 +7,27 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id sig do params( vector_store_id: String, - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(vector_store_id:, request_options: {}) end - def self.new(vector_store_id:, request_options: {}); end - sig { override.returns({vector_store_id: String, request_options: OpenAI::RequestOptions}) } - def to_hash; end + sig do + override.returns( + { vector_store_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end end end end diff --git a/rbi/openai/models/vector_stores/file_update_params.rbi b/rbi/openai/models/vector_stores/file_update_params.rbi index 1d18d667..2a92b688 100644 --- a/rbi/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/openai/models/vector_stores/file_update_params.rbi @@ -7,6 +7,8 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :vector_store_id @@ -15,16 +17,18 @@ module OpenAI # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } + sig do + returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + end attr_accessor :attributes sig do params( vector_store_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) - ) - .returns(T.attached_class) + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) end def self.new( vector_store_id:, @@ -35,24 +39,36 @@ module OpenAI # characters, booleans, or numbers. attributes:, request_options: {} - ); end + ) + end + sig do - override - .returns( - { - vector_store_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: OpenAI::RequestOptions - } - ) + override.returns( + { + vector_store_id: String, + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash end - def to_hash; end module Attribute extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/vector_stores/vector_store_file.rbi b/rbi/openai/models/vector_stores/vector_store_file.rbi index 79686b0f..e75ae730 100644 --- a/rbi/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file.rbi @@ -6,6 +6,8 @@ module OpenAI module VectorStores class VectorStoreFile < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -16,14 +18,18 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. - sig { returns(T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError)) } + sig do + returns(T.nilable(OpenAI::VectorStores::VectorStoreFile::LastError)) + end attr_reader :last_error sig do params( - last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::AnyHash)) - ) - .void + last_error: + T.nilable( + OpenAI::VectorStores::VectorStoreFile::LastError::OrHash + ) + ).void end attr_writer :last_error @@ -34,7 +40,9 @@ module OpenAI # The status of the vector store file, which can be either `in_progress`, # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. - sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } + sig do + returns(OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol) + end attr_accessor :status # The total vector store usage in bytes. Note that this may be different from the @@ -54,14 +62,19 @@ module OpenAI # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig { returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) } + sig do + returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + end attr_accessor :attributes # The strategy used to chunk the file. sig do returns( T.nilable( - T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) + T.any( + OpenAI::StaticFileChunkingStrategyObject, + OpenAI::OtherFileChunkingStrategyObject + ) ) ) end @@ -69,13 +82,12 @@ module OpenAI sig do params( - chunking_strategy: T.any( - OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Internal::AnyHash, - OpenAI::Models::OtherFileChunkingStrategyObject - ) - ) - .void + chunking_strategy: + T.any( + OpenAI::StaticFileChunkingStrategyObject::OrHash, + OpenAI::OtherFileChunkingStrategyObject::OrHash + ) + ).void end attr_writer :chunking_strategy @@ -84,19 +96,22 @@ module OpenAI params( id: String, created_at: Integer, - last_error: T.nilable(T.any(OpenAI::Models::VectorStores::VectorStoreFile::LastError, OpenAI::Internal::AnyHash)), - status: OpenAI::Models::VectorStores::VectorStoreFile::Status::OrSymbol, + last_error: + T.nilable( + OpenAI::VectorStores::VectorStoreFile::LastError::OrHash + ), + status: OpenAI::VectorStores::VectorStoreFile::Status::OrSymbol, usage_bytes: Integer, vector_store_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::StaticFileChunkingStrategyObject, - OpenAI::Internal::AnyHash, - OpenAI::Models::OtherFileChunkingStrategyObject - ), + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::StaticFileChunkingStrategyObject::OrHash, + OpenAI::OtherFileChunkingStrategyObject::OrHash + ), object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -128,28 +143,44 @@ module OpenAI chunking_strategy: nil, # The object type, which is always `vector_store.file`. object: :"vector_store.file" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - last_error: T.nilable(OpenAI::Models::VectorStores::VectorStoreFile::LastError), - object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol, - usage_bytes: Integer, - vector_store_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject) - } - ) + override.returns( + { + id: String, + created_at: Integer, + last_error: + T.nilable(OpenAI::VectorStores::VectorStoreFile::LastError), + object: Symbol, + status: + OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol, + usage_bytes: Integer, + vector_store_id: String, + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::StaticFileChunkingStrategyObject, + OpenAI::OtherFileChunkingStrategyObject + ) + } + ) + end + def to_hash end - def to_hash; end class LastError < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # One of `server_error` or `rate_limit_exceeded`. - sig { returns(OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + sig do + returns( + OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol + ) + end attr_accessor :code # A human-readable description of the error. @@ -159,40 +190,70 @@ module OpenAI # The last error associated with this vector store file. Will be `null` if there # are no errors. sig do - params(code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, message: String) - .returns(T.attached_class) + params( + code: + OpenAI::VectorStores::VectorStoreFile::LastError::Code::OrSymbol, + message: String + ).returns(T.attached_class) end def self.new( # One of `server_error` or `rate_limit_exceeded`. code:, # A human-readable description of the error. message: - ); end + ) + end + sig do - override - .returns( - {code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, message: String} - ) + override.returns( + { + code: + OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol, + message: String + } + ) + end + def to_hash end - def to_hash; end # One of `server_error` or `rate_limit_exceeded`. module Code extend OpenAI::Internal::Type::Enum TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } + T.type_alias do + T.all( + Symbol, + OpenAI::VectorStores::VectorStoreFile::LastError::Code + ) + end OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = - T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + T.let( + :server_error, + OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol + ) UNSUPPORTED_FILE = - T.let(:unsupported_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + T.let( + :unsupported_file, + OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol + ) INVALID_FILE = - T.let(:invalid_file, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) + T.let( + :invalid_file, + OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol + ] + ) + end + def self.values + end end end @@ -202,23 +263,58 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::VectorStores::VectorStoreFile::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } - IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol + ) + CANCELLED = + T.let( + :cancelled, + OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::VectorStoreFile::Status::TaggedSymbol + ] + ) + end + def self.values + end end module Attribute extend OpenAI::Internal::Type::Union - sig { override.returns([String, Float, T::Boolean]) } - def self.variants; end + Variants = T.type_alias { T.any(String, Float, T::Boolean) } + + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] + ) + end + def self.variants + end end end end diff --git a/rbi/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi index 8712684e..1dc7f034 100644 --- a/rbi/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi @@ -6,6 +6,8 @@ module OpenAI module VectorStores class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The identifier, which can be referenced in API endpoints. sig { returns(String) } attr_accessor :id @@ -15,14 +17,14 @@ module OpenAI sig { returns(Integer) } attr_accessor :created_at - sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts) } + sig { returns(OpenAI::VectorStores::VectorStoreFileBatch::FileCounts) } attr_reader :file_counts sig do params( - file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::AnyHash) - ) - .void + file_counts: + OpenAI::VectorStores::VectorStoreFileBatch::FileCounts::OrHash + ).void end attr_writer :file_counts @@ -32,7 +34,11 @@ module OpenAI # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. - sig { returns(OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + sig do + returns( + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol + ) + end attr_accessor :status # The ID of the @@ -47,12 +53,13 @@ module OpenAI params( id: String, created_at: Integer, - file_counts: T.any(OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, OpenAI::Internal::AnyHash), - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::OrSymbol, + file_counts: + OpenAI::VectorStores::VectorStoreFileBatch::FileCounts::OrHash, + status: + OpenAI::VectorStores::VectorStoreFileBatch::Status::OrSymbol, vector_store_id: String, object: Symbol - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The identifier, which can be referenced in API endpoints. @@ -71,23 +78,30 @@ module OpenAI vector_store_id:, # The object type, which is always `vector_store.file_batch`. object: :"vector_store.files_batch" - ); end + ) + end + sig do - override - .returns( - { - id: String, - created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, - object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, - vector_store_id: String - } - ) + override.returns( + { + id: String, + created_at: Integer, + file_counts: + OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, + object: Symbol, + status: + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol, + vector_store_id: String + } + ) + end + def to_hash end - def to_hash; end class FileCounts < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # The number of files that where cancelled. sig { returns(Integer) } attr_accessor :cancelled @@ -115,8 +129,7 @@ module OpenAI failed: Integer, in_progress: Integer, total: Integer - ) - .returns(T.attached_class) + ).returns(T.attached_class) end def self.new( # The number of files that where cancelled. @@ -129,18 +142,22 @@ module OpenAI in_progress:, # The total number of files. total: - ); end + ) + end + sig do - override - .returns({ - cancelled: Integer, - completed: Integer, - failed: Integer, - in_progress: Integer, - total: Integer - }) + override.returns( + { + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + } + ) + end + def to_hash end - def to_hash; end end # The status of the vector store files batch, which can be either `in_progress`, @@ -148,17 +165,42 @@ module OpenAI module Status extend OpenAI::Internal::Type::Enum - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status) + end OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = - T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - CANCELLED = T.let(:cancelled, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) - FAILED = T.let(:failed, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) + T.let( + :in_progress, + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol + ) + CANCELLED = + T.let( + :cancelled, + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol + ) - sig { override.returns(T::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol]) } - def self.values; end + sig do + override.returns( + T::Array[ + OpenAI::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol + ] + ) + end + def self.values + end end end end diff --git a/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi index 769eeefd..3317327e 100644 --- a/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -6,6 +6,8 @@ module OpenAI module VectorStores class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + sig { returns(String) } attr_accessor :id @@ -15,11 +17,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - sig { params(id: String, deleted: T::Boolean, object: Symbol).returns(T.attached_class) } - def self.new(id:, deleted:, object: :"vector_store.file.deleted"); end + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"vector_store.file.deleted") + end - sig { override.returns({id: String, deleted: T::Boolean, object: Symbol}) } - def to_hash; end + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end end end end diff --git a/rbi/openai/request_options.rbi b/rbi/openai/request_options.rbi index 2dc1eb77..459f3a88 100644 --- a/rbi/openai/request_options.rbi +++ b/rbi/openai/request_options.rbi @@ -1,17 +1,18 @@ # typed: strong module OpenAI - RequestOpts = T.type_alias { T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) } - # Specify HTTP behaviour to use for a specific request. These options supplement # or override those provided at the client level. # # When making a request, you can pass an actual {RequestOptions} instance, or # simply pass a Hash with symbol keys matching the attributes on this class. class RequestOptions < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + # @api private - sig { params(opts: T.any(T.self_type, T::Hash[Symbol, T.anything])).void } - def self.validate!(opts); end + sig { params(opts: OpenAI::RequestOptions::OrHash).void } + def self.validate!(opts) + end # Idempotency key to send with request and all associated retries. Will only be # sent for write requests. @@ -20,7 +21,11 @@ module OpenAI # Extra query params to send with the request. These are `.merge`’d into any # `query` given at the client level. - sig { returns(T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))])) } + sig do + returns( + T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]) + ) + end attr_accessor :extra_query # Extra headers to send with the request. These are `.merged`’d into any @@ -43,6 +48,7 @@ module OpenAI # Returns a new instance of RequestOptions. sig { params(values: OpenAI::Internal::AnyHash).returns(T.attached_class) } - def self.new(values = {}); end + def self.new(values = {}) + end end end diff --git a/rbi/openai/resources/audio.rbi b/rbi/openai/resources/audio.rbi index 53749c68..1747c22c 100644 --- a/rbi/openai/resources/audio.rbi +++ b/rbi/openai/resources/audio.rbi @@ -14,7 +14,8 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi index 4a2ffa44..2c8d03a3 100644 --- a/rbi/openai/resources/audio/speech.rbi +++ b/rbi/openai/resources/audio/speech.rbi @@ -8,14 +8,15 @@ module OpenAI sig do params( input: String, - model: T.any(String, OpenAI::Models::Audio::SpeechModel::OrSymbol), - voice: T.any(String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol), + model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), + voice: + T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, - response_format: OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, + response_format: + OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, - request_options: OpenAI::RequestOpts - ) - .returns(StringIO) + request_options: OpenAI::RequestOptions::OrHash + ).returns(StringIO) end def create( # The text to generate audio for. The maximum length is 4096 characters. @@ -38,10 +39,13 @@ module OpenAI # the default. Does not work with `gpt-4o-mini-tts`. speed: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi index b8fb2b38..80543b1d 100644 --- a/rbi/openai/resources/audio/transcriptions.rbi +++ b/rbi/openai/resources/audio/transcriptions.rbi @@ -11,17 +11,24 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, OpenAI::AudioModel::OrSymbol), + include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: OpenAI::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], + timestamp_granularities: + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol + ], stream: T.noreturn, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + T.any( + OpenAI::Audio::Transcription, + OpenAI::Audio::TranscriptionVerbose + ) ) - .returns(T.any(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)) end def create( # The audio file object (not file name) to transcribe, in one of these formats: @@ -66,7 +73,9 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Audio::Transcriptions#create} for non-streaming # counterpart. # @@ -74,24 +83,26 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), - include: T::Array[OpenAI::Models::Audio::TranscriptionInclude::OrSymbol], + model: T.any(String, OpenAI::AudioModel::OrSymbol), + include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, - response_format: OpenAI::Models::AudioResponseFormat::OrSymbol, + response_format: OpenAI::AudioResponseFormat::OrSymbol, temperature: Float, - timestamp_granularities: T::Array[OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol], + timestamp_granularities: + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity::OrSymbol + ], stream: T.noreturn, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + T.any( + OpenAI::Audio::TranscriptionTextDeltaEvent, + OpenAI::Audio::TranscriptionTextDoneEvent + ) + ] ) - .returns( - OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Audio::TranscriptionTextDeltaEvent, - OpenAI::Models::Audio::TranscriptionTextDoneEvent - ) - ] - ) end def create_streaming( # The audio file object (not file name) to transcribe, in one of these formats: @@ -136,10 +147,13 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/audio/translations.rbi b/rbi/openai/resources/audio/translations.rbi index b9675bf8..b2427856 100644 --- a/rbi/openai/resources/audio/translations.rbi +++ b/rbi/openai/resources/audio/translations.rbi @@ -8,13 +8,15 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::Models::AudioModel::OrSymbol), + model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, - response_format: OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, + response_format: + OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) ) - .returns(T.any(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)) end def create( # The audio file object (not file name) translate, in one of these formats: flac, @@ -38,10 +40,13 @@ module OpenAI # automatically increase the temperature until certain thresholds are hit. temperature: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/batches.rbi b/rbi/openai/resources/batches.rbi index f58e4e80..64125755 100644 --- a/rbi/openai/resources/batches.rbi +++ b/rbi/openai/resources/batches.rbi @@ -6,13 +6,13 @@ module OpenAI # Creates and executes a batch from an uploaded file of requests sig do params( - completion_window: OpenAI::Models::BatchCreateParams::CompletionWindow::OrSymbol, - endpoint: OpenAI::Models::BatchCreateParams::Endpoint::OrSymbol, + completion_window: + OpenAI::BatchCreateParams::CompletionWindow::OrSymbol, + endpoint: OpenAI::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Batch) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Batch) end def create( # The time frame within which the batch should be processed. Currently only `24h` @@ -41,18 +41,30 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + # Retrieves a batch. - sig { params(batch_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Batch) } + sig do + params( + batch_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Batch) + end def retrieve( # The ID of the batch to retrieve. batch_id, request_options: {} - ); end + ) + end + # List your organization's batches. sig do - params(after: String, limit: Integer, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Batch]) + params( + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::CursorPage[OpenAI::Batch]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -64,19 +76,29 @@ module OpenAI # 100, and the default is 20. limit: nil, request_options: {} - ); end + ) + end + # Cancels an in-progress batch. The batch will be in status `cancelling` for up to # 10 minutes, before changing to `cancelled`, where it will have partial results # (if any) available in the output file. - sig { params(batch_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Batch) } + sig do + params( + batch_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Batch) + end def cancel( # The ID of the batch to cancel. batch_id, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/beta.rbi b/rbi/openai/resources/beta.rbi index c0be4403..c53baf2b 100644 --- a/rbi/openai/resources/beta.rbi +++ b/rbi/openai/resources/beta.rbi @@ -11,7 +11,8 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/beta/assistants.rbi b/rbi/openai/resources/beta/assistants.rbi index f58ad07a..d58ecbb1 100644 --- a/rbi/openai/resources/beta/assistants.rbi +++ b/rbi/openai/resources/beta/assistants.rbi @@ -7,35 +7,37 @@ module OpenAI # Create an assistant with a model and instructions. sig do params( - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), + model: T.any(String, OpenAI::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantCreateParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ], + tool_resources: + T.nilable( + OpenAI::Beta::AssistantCreateParams::ToolResources::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ], top_p: T.nilable(Float), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Assistant) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Assistant) end def create( # ID of the model to use. You can use the @@ -106,16 +108,23 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} - ); end + ) + end + # Retrieves an assistant. sig do - params(assistant_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Beta::Assistant) + params( + assistant_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Assistant) end def retrieve( # The ID of the assistant to retrieve. assistant_id, request_options: {} - ); end + ) + end + # Modifies an assistant. sig do params( @@ -123,32 +132,38 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.any(String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol), - name: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( + model: T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ), + name: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T::Array[ - T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool - ) - ], + tool_resources: + T.nilable( + OpenAI::Beta::AssistantUpdateParams::ToolResources::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ], top_p: T.nilable(Float), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Assistant) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Assistant) end def update( # The ID of the assistant to modify. @@ -221,17 +236,18 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, request_options: {} - ); end + ) + end + # Returns a list of assistants. sig do params( after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::AssistantListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Assistant]) + order: OpenAI::Beta::AssistantListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::CursorPage[OpenAI::Beta::Assistant]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -251,20 +267,27 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + # Delete an assistant. sig do - params(assistant_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Beta::AssistantDeleted) + params( + assistant_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::AssistantDeleted) end def delete( # The ID of the assistant to delete. assistant_id, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index aa3e43b7..9279a612 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -13,12 +13,15 @@ module OpenAI # Create a thread. sig do params( - messages: T::Array[T.any(OpenAI::Models::Beta::ThreadCreateParams::Message, OpenAI::Internal::AnyHash)], + messages: + T::Array[OpenAI::Beta::ThreadCreateParams::Message::OrHash], metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateParams::ToolResources, OpenAI::Internal::AnyHash)), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Thread) + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateParams::ToolResources::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Thread) end def create( # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to @@ -37,23 +40,34 @@ module OpenAI # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} - ); end + ) + end + # Retrieves a thread. - sig { params(thread_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Beta::Thread) } + sig do + params( + thread_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Thread) + end def retrieve( # The ID of the thread to retrieve. thread_id, request_options: {} - ); end + ) + end + # Modifies a thread. sig do params( thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, OpenAI::Internal::AnyHash)), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Thread) + tool_resources: + T.nilable( + OpenAI::Beta::ThreadUpdateParams::ToolResources::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Thread) end def update( # The ID of the thread to modify. Only the `metadata` can be modified. @@ -71,17 +85,23 @@ module OpenAI # tool requires a list of vector store IDs. tool_resources: nil, request_options: {} - ); end + ) + end + # Delete a thread. sig do - params(thread_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Beta::ThreadDeleted) + params( + thread_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::ThreadDeleted) end def delete( # The ID of the thread to delete. thread_id, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # # Create a thread and run it in one request. @@ -92,45 +112,48 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T.nilable( - T::Array[ + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread::OrHash, + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash ) - ] - ), + ), + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::OrHash + ), + tools: + T.nilable( + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ), top_p: T.nilable(Float), - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ), + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Run) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Run) end def create_and_run( # The ID of the @@ -225,7 +248,9 @@ module OpenAI # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming # counterpart. # @@ -237,74 +262,77 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - thread: T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, OpenAI::Internal::AnyHash), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tool_resources: T.nilable(T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, OpenAI::Internal::AnyHash)), - tools: T.nilable( - T::Array[ + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread::OrHash, + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash ) - ] - ), + ), + tool_resources: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::OrHash + ), + tools: + T.nilable( + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ), top_p: T.nilable(Float), - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ), + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), stream: T.noreturn, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Beta::AssistantStreamEvent::ErrorEvent + ) + ] ) - .returns( - OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) - ] - ) end def stream_raw( # The ID of the @@ -399,10 +427,13 @@ module OpenAI # `#create_and_run` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/beta/threads/messages.rbi b/rbi/openai/resources/beta/threads/messages.rbi index f17e3cfe..593ceae0 100644 --- a/rbi/openai/resources/beta/threads/messages.rbi +++ b/rbi/openai/resources/beta/threads/messages.rbi @@ -9,25 +9,27 @@ module OpenAI sig do params( thread_id: String, - content: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Beta::Threads::ImageFileContentBlock, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::Threads::ImageURLContentBlock, - OpenAI::Models::Beta::Threads::TextContentBlockParam - ) - ] - ), - role: OpenAI::Models::Beta::Threads::MessageCreateParams::Role::OrSymbol, - attachments: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment, OpenAI::Internal::AnyHash)] - ), + content: + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, + OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, + OpenAI::Beta::Threads::TextContentBlockParam::OrHash + ) + ] + ), + role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, + attachments: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::MessageCreateParams::Attachment::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Message) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Message) end def create( # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) @@ -52,11 +54,16 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + # Retrieve a message. sig do - params(message_id: String, thread_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Beta::Threads::Message) + params( + message_id: String, + thread_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Message) end def retrieve( # The ID of the message to retrieve. @@ -65,16 +72,17 @@ module OpenAI # to which this message belongs. thread_id:, request_options: {} - ); end + ) + end + # Modifies a message. sig do params( message_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Message) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Message) end def update( # Path param: The ID of the message to modify. @@ -89,7 +97,9 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + # Returns a list of messages for a given thread. sig do params( @@ -97,11 +107,12 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::MessageListParams::Order::OrSymbol, + order: OpenAI::Beta::Threads::MessageListParams::Order::OrSymbol, run_id: String, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::Beta::Threads::Message] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Message]) end def list( # The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) @@ -126,11 +137,16 @@ module OpenAI # Filter messages by the run ID that generated them. run_id: nil, request_options: {} - ); end + ) + end + # Deletes a message. sig do - params(message_id: String, thread_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Beta::Threads::MessageDeleted) + params( + message_id: String, + thread_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::MessageDeleted) end def delete( # The ID of the message to delete. @@ -138,10 +154,13 @@ module OpenAI # The ID of the thread to which this message belongs. thread_id:, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index 0148ff01..a1d803bf 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -16,53 +16,57 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: + T::Array[OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)] - ), + additional_messages: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::OrHash + ] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tools: T.nilable( - T::Array[ + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash ) - ] - ), + ), + tools: + T.nilable( + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ), top_p: T.nilable(Float), - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ), + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Run) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Run) end def create( # Path param: The ID of the thread to run. @@ -177,7 +181,9 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming # counterpart. # @@ -186,82 +192,86 @@ module OpenAI params( thread_id: String, assistant_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: + T::Array[OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol], additional_instructions: T.nilable(String), - additional_messages: T.nilable( - T::Array[T.any(OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage, OpenAI::Internal::AnyHash)] - ), + additional_messages: + T.nilable( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::OrHash + ] + ), instructions: T.nilable(String), max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::Models::ChatModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.nilable( - T.any( - Symbol, - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONObject, - OpenAI::Models::ResponseFormatJSONSchema - ) - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.nilable( + T.any( + Symbol, + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash + ) + ), temperature: T.nilable(Float), - tool_choice: T.nilable( - T.any( - OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Beta::AssistantToolChoice, - OpenAI::Internal::AnyHash - ) - ), - tools: T.nilable( - T::Array[ + tool_choice: + T.nilable( T.any( - OpenAI::Models::Beta::CodeInterpreterTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Beta::FileSearchTool, - OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::AssistantToolChoiceOption::Auto::OrSymbol, + OpenAI::Beta::AssistantToolChoice::OrHash ) - ] - ), + ), + tools: + T.nilable( + T::Array[ + T.any( + OpenAI::Beta::CodeInterpreterTool::OrHash, + OpenAI::Beta::FileSearchTool::OrHash, + OpenAI::Beta::FunctionTool::OrHash + ) + ] + ), top_p: T.nilable(Float), - truncation_strategy: T.nilable( - T.any(OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, OpenAI::Internal::AnyHash) - ), + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), stream: T.noreturn, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Beta::AssistantStreamEvent::ErrorEvent + ) + ] ) - .returns( - OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) - ] - ) end def create_stream_raw( # Path param: The ID of the thread to run. @@ -376,11 +386,16 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # Retrieves a run. sig do - params(run_id: String, thread_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Beta::Threads::Run) + params( + run_id: String, + thread_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Run) end def retrieve( # The ID of the run to retrieve. @@ -389,16 +404,17 @@ module OpenAI # that was run. thread_id:, request_options: {} - ); end + ) + end + # Modifies a run. sig do params( run_id: String, thread_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Run) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Run) end def update( # Path param: The ID of the run to modify. @@ -414,7 +430,9 @@ module OpenAI # a maximum length of 512 characters. metadata: nil, request_options: {} - ); end + ) + end + # Returns a list of runs belonging to a thread. sig do params( @@ -422,10 +440,9 @@ module OpenAI after: String, before: String, limit: Integer, - order: OpenAI::Models::Beta::Threads::RunListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Run]) + order: OpenAI::Beta::Threads::RunListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::CursorPage[OpenAI::Beta::Threads::Run]) end def list( # The ID of the thread the run belongs to. @@ -447,11 +464,16 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + # Cancels a run that is `in_progress`. sig do - params(run_id: String, thread_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Beta::Threads::Run) + params( + run_id: String, + thread_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Run) end def cancel( # The ID of the run to cancel. @@ -459,7 +481,9 @@ module OpenAI # The ID of the thread to which this run belongs. thread_id:, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # @@ -471,11 +495,13 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], + tool_outputs: + T::Array[ + OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput::OrHash + ], stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Run) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Run) end def submit_tool_outputs( # Path param: The ID of the run that requires the tool output submission. @@ -491,7 +517,9 @@ module OpenAI # non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for # non-streaming counterpart. # @@ -503,40 +531,42 @@ module OpenAI params( run_id: String, thread_id: String, - tool_outputs: T::Array[T.any(OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, OpenAI::Internal::AnyHash)], + tool_outputs: + T::Array[ + OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput::OrHash + ], stream: T.noreturn, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Beta::AssistantStreamEvent::ErrorEvent + ) + ] ) - .returns( - OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent - ) - ] - ) end def submit_tool_outputs_stream_raw( # Path param: The ID of the run that requires the tool output submission. @@ -552,10 +582,13 @@ module OpenAI # non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/beta/threads/runs/steps.rbi b/rbi/openai/resources/beta/threads/runs/steps.rbi index 4ffb638c..a70232d6 100644 --- a/rbi/openai/resources/beta/threads/runs/steps.rbi +++ b/rbi/openai/resources/beta/threads/runs/steps.rbi @@ -12,10 +12,12 @@ module OpenAI step_id: String, thread_id: String, run_id: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Beta::Threads::Runs::RunStep) + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Beta::Threads::Runs::RunStep) end def retrieve( # Path param: The ID of the run step to retrieve. @@ -34,7 +36,9 @@ module OpenAI # for more information. include: nil, request_options: {} - ); end + ) + end + # Returns a list of run steps belonging to a run. sig do params( @@ -42,12 +46,19 @@ module OpenAI thread_id: String, after: String, before: String, - include: T::Array[OpenAI::Models::Beta::Threads::Runs::RunStepInclude::OrSymbol], + include: + T::Array[ + OpenAI::Beta::Threads::Runs::RunStepInclude::OrSymbol + ], limit: Integer, - order: OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts + order: + OpenAI::Beta::Threads::Runs::StepListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + OpenAI::Beta::Threads::Runs::RunStep + ] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep]) end def list( # Path param: The ID of the run the run steps belong to. @@ -80,10 +91,13 @@ module OpenAI # ascending order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/chat.rbi b/rbi/openai/resources/chat.rbi index 94e9f4ce..45c92d55 100644 --- a/rbi/openai/resources/chat.rbi +++ b/rbi/openai/resources/chat.rbi @@ -8,7 +8,8 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 9c73209e..46d3f6ba 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -28,63 +28,74 @@ module OpenAI # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). sig do params( - messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), + messages: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionDeveloperMessageParam::OrHash, + OpenAI::Chat::ChatCompletionSystemMessageParam::OrHash, + OpenAI::Chat::ChatCompletionUserMessageParam::OrHash, + OpenAI::Chat::ChatCompletionAssistantMessageParam::OrHash, + OpenAI::Chat::ChatCompletionToolMessageParam::OrHash, + OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash + ) + ], + model: T.any(String, OpenAI::ChatModel::OrSymbol), + audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::AnyHash - ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)], + function_call: + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption::OrHash + ), + functions: + T::Array[OpenAI::Chat::CompletionCreateParams::Function::OrHash], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: + T.nilable( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Modality::OrSymbol + ] + ), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)), + prediction: + T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: + T.nilable( + OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol + ), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::AnyHash - ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)], + tool_choice: + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + ), + tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), + web_search_options: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash, stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Chat::ChatCompletion) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Chat::ChatCompletion) end def create( # A list of messages comprising the conversation so far. Depending on the @@ -279,7 +290,9 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Chat::Completions#create} for non-streaming counterpart. # # **Starting a new project?** We recommend trying @@ -301,63 +314,74 @@ module OpenAI # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). sig do params( - messages: T::Array[ - T.any( - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::Chat::ChatCompletionSystemMessageParam, - OpenAI::Models::Chat::ChatCompletionUserMessageParam, - OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Models::Chat::ChatCompletionToolMessageParam, - OpenAI::Models::Chat::ChatCompletionFunctionMessageParam - ) - ], - model: T.any(String, OpenAI::Models::ChatModel::OrSymbol), - audio: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionAudioParam, OpenAI::Internal::AnyHash)), + messages: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionDeveloperMessageParam::OrHash, + OpenAI::Chat::ChatCompletionSystemMessageParam::OrHash, + OpenAI::Chat::ChatCompletionUserMessageParam::OrHash, + OpenAI::Chat::ChatCompletionAssistantMessageParam::OrHash, + OpenAI::Chat::ChatCompletionToolMessageParam::OrHash, + OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash + ) + ], + model: T.any(String, OpenAI::ChatModel::OrSymbol), + audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), - function_call: T.any( - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, - OpenAI::Models::Chat::ChatCompletionFunctionCallOption, - OpenAI::Internal::AnyHash - ), - functions: T::Array[T.any(OpenAI::Models::Chat::CompletionCreateParams::Function, OpenAI::Internal::AnyHash)], + function_call: + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption::OrHash + ), + functions: + T::Array[OpenAI::Chat::CompletionCreateParams::Function::OrHash], logit_bias: T.nilable(T::Hash[Symbol, Integer]), logprobs: T.nilable(T::Boolean), max_completion_tokens: T.nilable(Integer), max_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - modalities: T.nilable(T::Array[OpenAI::Models::Chat::CompletionCreateParams::Modality::OrSymbol]), + modalities: + T.nilable( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Modality::OrSymbol + ] + ), n: T.nilable(Integer), parallel_tool_calls: T::Boolean, - prediction: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionPredictionContent, OpenAI::Internal::AnyHash)), + prediction: + T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash), presence_penalty: T.nilable(Float), - reasoning_effort: T.nilable(OpenAI::Models::ReasoningEffort::OrSymbol), - response_format: T.any( - OpenAI::Models::ResponseFormatText, - OpenAI::Internal::AnyHash, - OpenAI::Models::ResponseFormatJSONSchema, - OpenAI::Models::ResponseFormatJSONObject - ), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + response_format: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ), seed: T.nilable(Integer), - service_tier: T.nilable(OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::OrSymbol), + service_tier: + T.nilable( + OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol + ), stop: T.nilable(T.any(String, T::Array[String])), store: T.nilable(T::Boolean), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), - tool_choice: T.any( - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Models::Chat::ChatCompletionNamedToolChoice, - OpenAI::Internal::AnyHash - ), - tools: T::Array[T.any(OpenAI::Models::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash)], + tool_choice: + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + ), + tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, - web_search_options: T.any(OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, OpenAI::Internal::AnyHash), + web_search_options: + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash, stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk]) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::Stream[OpenAI::Chat::ChatCompletionChunk]) end def stream_raw( # A list of messages comprising the conversation so far. Depending on the @@ -552,18 +576,24 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # Get a stored chat completion. Only Chat Completions that have been created with # the `store` parameter set to `true` will be returned. sig do - params(completion_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Chat::ChatCompletion) + params( + completion_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Chat::ChatCompletion) end def retrieve( # The ID of the chat completion to retrieve. completion_id, request_options: {} - ); end + ) + end + # Modify a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be modified. Currently, the only # supported modification is to update the `metadata` field. @@ -571,9 +601,8 @@ module OpenAI params( completion_id: String, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Chat::ChatCompletion) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Chat::ChatCompletion) end def update( # The ID of the chat completion to update. @@ -586,7 +615,9 @@ module OpenAI # a maximum length of 512 characters. metadata:, request_options: {} - ); end + ) + end + # List stored Chat Completions. Only Chat Completions that have been stored with # the `store` parameter set to `true` will be returned. sig do @@ -595,10 +626,9 @@ module OpenAI limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), model: String, - order: OpenAI::Models::Chat::CompletionListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletion]) + order: OpenAI::Chat::CompletionListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::CursorPage[OpenAI::Chat::ChatCompletion]) end def list( # Identifier for the last chat completion from the previous pagination request. @@ -615,21 +645,28 @@ module OpenAI # `desc` for descending order. Defaults to `asc`. order: nil, request_options: {} - ); end + ) + end + # Delete a stored chat completion. Only Chat Completions that have been created # with the `store` parameter set to `true` can be deleted. sig do - params(completion_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Chat::ChatCompletionDeleted) + params( + completion_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Chat::ChatCompletionDeleted) end def delete( # The ID of the chat completion to delete. completion_id, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/chat/completions/messages.rbi b/rbi/openai/resources/chat/completions/messages.rbi index c0c82fe6..69a906e7 100644 --- a/rbi/openai/resources/chat/completions/messages.rbi +++ b/rbi/openai/resources/chat/completions/messages.rbi @@ -12,10 +12,14 @@ module OpenAI completion_id: String, after: String, limit: Integer, - order: OpenAI::Models::Chat::Completions::MessageListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts + order: + OpenAI::Chat::Completions::MessageListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + OpenAI::Chat::ChatCompletionStoreMessage + ] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage]) end def list( # The ID of the chat completion to retrieve messages from. @@ -28,10 +32,13 @@ module OpenAI # for descending order. Defaults to `asc`. order: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/completions.rbi b/rbi/openai/resources/completions.rbi index b3576715..685399d1 100644 --- a/rbi/openai/resources/completions.rbi +++ b/rbi/openai/resources/completions.rbi @@ -8,8 +8,16 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])), + model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), + prompt: + T.nilable( + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + ), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -20,15 +28,15 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Completion) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Completion) end def create( # ID of the model to use. You can use the @@ -140,14 +148,24 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Completions#create} for non-streaming counterpart. # # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol), - prompt: T.nilable(T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]])), + model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), + prompt: + T.nilable( + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ) + ), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -158,15 +176,15 @@ module OpenAI presence_penalty: T.nilable(Float), seed: T.nilable(Integer), stop: T.nilable(T.any(String, T::Array[String])), - stream_options: T.nilable(T.any(OpenAI::Models::Chat::ChatCompletionStreamOptions, OpenAI::Internal::AnyHash)), + stream_options: + T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), temperature: T.nilable(Float), top_p: T.nilable(Float), user: String, stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::Stream[OpenAI::Models::Completion]) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::Stream[OpenAI::Completion]) end def create_streaming( # ID of the model to use. You can use the @@ -278,10 +296,13 @@ module OpenAI # `#create` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi index 26fa538b..aec35a9c 100644 --- a/rbi/openai/resources/embeddings.rbi +++ b/rbi/openai/resources/embeddings.rbi @@ -6,14 +6,20 @@ module OpenAI # Creates an embedding vector representing the input text. sig do params( - input: T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]), - model: T.any(String, OpenAI::Models::EmbeddingModel::OrSymbol), + input: + T.any( + String, + T::Array[String], + T::Array[Integer], + T::Array[T::Array[Integer]] + ), + model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, - encoding_format: OpenAI::Models::EmbeddingCreateParams::EncodingFormat::OrSymbol, + encoding_format: + OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, user: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::CreateEmbeddingResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::CreateEmbeddingResponse) end def create( # Input text to embed, encoded as a string or array of tokens. To embed multiple @@ -42,10 +48,13 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi index 0f330e56..a55cb86d 100644 --- a/rbi/openai/resources/evals.rbi +++ b/rbi/openai/resources/evals.rbi @@ -13,26 +13,25 @@ module OpenAI # the [Evals guide](https://platform.openai.com/docs/guides/evals). sig do params( - data_source_config: T.any( - OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs - ), - testing_criteria: T::Array[ + data_source_config: T.any( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::Internal::AnyHash, - OpenAI::Models::EvalStringCheckGrader, - OpenAI::Models::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateParams::TestingCriterion::Python, - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel - ) - ], + OpenAI::EvalCreateParams::DataSourceConfig::Custom::OrHash, + OpenAI::EvalCreateParams::DataSourceConfig::Logs::OrHash + ), + testing_criteria: + T::Array[ + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::OrHash, + OpenAI::EvalStringCheckGrader::OrHash, + OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::Python::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::OrHash + ) + ], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::EvalCreateResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::EvalCreateResponse) end def create( # The configuration for the data source used for the evaluation runs. @@ -49,25 +48,31 @@ module OpenAI # The name of the evaluation. name: nil, request_options: {} - ); end + ) + end + # Get an evaluation by ID. sig do - params(eval_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::EvalRetrieveResponse) + params( + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::EvalRetrieveResponse) end def retrieve( # The ID of the evaluation to retrieve. eval_id, request_options: {} - ); end + ) + end + # Update certain properties of an evaluation. sig do params( eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::EvalUpdateResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::EvalUpdateResponse) end def update( # The ID of the evaluation to update. @@ -82,17 +87,20 @@ module OpenAI # Rename the evaluation. name: nil, request_options: {} - ); end + ) + end + # List evaluations for a project. sig do params( after: String, limit: Integer, - order: OpenAI::Models::EvalListParams::Order::OrSymbol, - order_by: OpenAI::Models::EvalListParams::OrderBy::OrSymbol, - request_options: OpenAI::RequestOpts + order: OpenAI::EvalListParams::Order::OrSymbol, + order_by: OpenAI::EvalListParams::OrderBy::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::Models::EvalListResponse] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::EvalListResponse]) end def list( # Identifier for the last eval from the previous pagination request. @@ -106,17 +114,27 @@ module OpenAI # creation time or `updated_at` for last updated time. order_by: nil, request_options: {} - ); end + ) + end + # Delete an evaluation. - sig { params(eval_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::EvalDeleteResponse) } + sig do + params( + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::EvalDeleteResponse) + end def delete( # The ID of the evaluation to delete. eval_id, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi index 86dad155..183f65ed 100644 --- a/rbi/openai/resources/evals/runs.rbi +++ b/rbi/openai/resources/evals/runs.rbi @@ -11,17 +11,16 @@ module OpenAI sig do params( eval_id: String, - data_source: T.any( - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Internal::AnyHash, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource - ), + data_source: + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash + ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Evals::RunCreateResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Evals::RunCreateResponse) end def create( # The ID of the evaluation to create a run for. @@ -38,11 +37,16 @@ module OpenAI # The name of the run. name: nil, request_options: {} - ); end + ) + end + # Get an evaluation run by ID. sig do - params(run_id: String, eval_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Evals::RunRetrieveResponse) + params( + run_id: String, + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Evals::RunRetrieveResponse) end def retrieve( # The ID of the run to retrieve. @@ -50,18 +54,21 @@ module OpenAI # The ID of the evaluation to retrieve runs for. eval_id:, request_options: {} - ); end + ) + end + # Get a list of runs for an evaluation. sig do params( eval_id: String, after: String, limit: Integer, - order: OpenAI::Models::Evals::RunListParams::Order::OrSymbol, - status: OpenAI::Models::Evals::RunListParams::Status::OrSymbol, - request_options: OpenAI::RequestOpts + order: OpenAI::Evals::RunListParams::Order::OrSymbol, + status: OpenAI::Evals::RunListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::Models::Evals::RunListResponse] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Evals::RunListResponse]) end def list( # The ID of the evaluation to retrieve runs for. @@ -77,11 +84,16 @@ module OpenAI # | `canceled`. status: nil, request_options: {} - ); end + ) + end + # Delete an eval run. sig do - params(run_id: String, eval_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Evals::RunDeleteResponse) + params( + run_id: String, + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Evals::RunDeleteResponse) end def delete( # The ID of the run to delete. @@ -89,11 +101,16 @@ module OpenAI # The ID of the evaluation to delete the run from. eval_id:, request_options: {} - ); end + ) + end + # Cancel an ongoing evaluation run. sig do - params(run_id: String, eval_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::Evals::RunCancelResponse) + params( + run_id: String, + eval_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Evals::RunCancelResponse) end def cancel( # The ID of the run to cancel. @@ -101,10 +118,13 @@ module OpenAI # The ID of the evaluation whose run you want to cancel. eval_id:, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/evals/runs/output_items.rbi b/rbi/openai/resources/evals/runs/output_items.rbi index 85f0767a..8e8f764b 100644 --- a/rbi/openai/resources/evals/runs/output_items.rbi +++ b/rbi/openai/resources/evals/runs/output_items.rbi @@ -11,9 +11,8 @@ module OpenAI output_item_id: String, eval_id: String, run_id: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse) end def retrieve( # The ID of the output item to retrieve. @@ -23,7 +22,9 @@ module OpenAI # The ID of the run to retrieve. run_id:, request_options: {} - ); end + ) + end + # Get a list of output items for an evaluation run. sig do params( @@ -31,11 +32,15 @@ module OpenAI eval_id: String, after: String, limit: Integer, - order: OpenAI::Models::Evals::Runs::OutputItemListParams::Order::OrSymbol, - status: OpenAI::Models::Evals::Runs::OutputItemListParams::Status::OrSymbol, - request_options: OpenAI::RequestOpts + order: OpenAI::Evals::Runs::OutputItemListParams::Order::OrSymbol, + status: + OpenAI::Evals::Runs::OutputItemListParams::Status::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + OpenAI::Models::Evals::Runs::OutputItemListResponse + ] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::Evals::Runs::OutputItemListResponse]) end def list( # Path param: The ID of the run to retrieve output items for. @@ -54,10 +59,13 @@ module OpenAI # output items or `pass` to filter by passed output items. status: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/files.rbi b/rbi/openai/resources/files.rbi index 4afa8467..022613a9 100644 --- a/rbi/openai/resources/files.rbi +++ b/rbi/openai/resources/files.rbi @@ -27,10 +27,9 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::FileObject) + purpose: OpenAI::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FileObject) end def create( # The File object (not file name) to be uploaded. @@ -41,24 +40,32 @@ module OpenAI # Flexible file type for any purpose - `evals`: Used for eval data sets purpose:, request_options: {} - ); end + ) + end + # Returns information about a specific file. - sig { params(file_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::FileObject) } + sig do + params( + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FileObject) + end def retrieve( # The ID of the file to use for this request. file_id, request_options: {} - ); end + ) + end + # Returns a list of files. sig do params( after: String, limit: Integer, - order: OpenAI::Models::FileListParams::Order::OrSymbol, + order: OpenAI::FileListParams::Order::OrSymbol, purpose: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FileObject]) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::CursorPage[OpenAI::FileObject]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -75,24 +82,41 @@ module OpenAI # Only return files with the given purpose. purpose: nil, request_options: {} - ); end + ) + end + # Delete a file. - sig { params(file_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::FileDeleted) } + sig do + params( + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FileDeleted) + end def delete( # The ID of the file to use for this request. file_id, request_options: {} - ); end + ) + end + # Returns the contents of the specified file. - sig { params(file_id: String, request_options: OpenAI::RequestOpts).returns(StringIO) } + sig do + params( + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(StringIO) + end def content( # The ID of the file to use for this request. file_id, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/fine_tuning.rbi b/rbi/openai/resources/fine_tuning.rbi index cf5c249f..e790ba5e 100644 --- a/rbi/openai/resources/fine_tuning.rbi +++ b/rbi/openai/resources/fine_tuning.rbi @@ -11,7 +11,8 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/fine_tuning/checkpoints.rbi b/rbi/openai/resources/fine_tuning/checkpoints.rbi index e9131b1d..ca0f882d 100644 --- a/rbi/openai/resources/fine_tuning/checkpoints.rbi +++ b/rbi/openai/resources/fine_tuning/checkpoints.rbi @@ -9,7 +9,8 @@ module OpenAI # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi index 1fa2933b..be76789c 100644 --- a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi +++ b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi @@ -13,9 +13,12 @@ module OpenAI params( fine_tuned_model_checkpoint: String, project_ids: T::Array[String], - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Page[ + OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse + ] ) - .returns(OpenAI::Internal::Page[OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse]) end def create( # The ID of the fine-tuned model checkpoint to create a permission for. @@ -23,7 +26,9 @@ module OpenAI # The project identifiers to grant access to. project_ids:, request_options: {} - ); end + ) + end + # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). # # Organization owners can use this endpoint to view all permissions for a @@ -33,11 +38,13 @@ module OpenAI fine_tuned_model_checkpoint: String, after: String, limit: Integer, - order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, + order: + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order::OrSymbol, project_id: String, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse ) - .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse) end def retrieve( # The ID of the fine-tuned model checkpoint to get permissions for. @@ -51,7 +58,9 @@ module OpenAI # The ID of the project to get permissions for. project_id: nil, request_options: {} - ); end + ) + end + # **NOTE:** This endpoint requires an [admin API key](../admin-api-keys). # # Organization owners can use this endpoint to delete a permission for a @@ -60,9 +69,10 @@ module OpenAI params( permission_id: String, fine_tuned_model_checkpoint: String, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse ) - .returns(OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse) end def delete( # The ID of the fine-tuned model checkpoint permission to delete. @@ -70,10 +80,13 @@ module OpenAI # The ID of the fine-tuned model checkpoint to delete a permission for. fine_tuned_model_checkpoint:, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi index c0e05100..06098504 100644 --- a/rbi/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/openai/resources/fine_tuning/jobs.rbi @@ -16,20 +16,27 @@ module OpenAI # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( - model: T.any(String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol), + model: + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::OrSymbol + ), training_file: String, - hyperparameters: T.any(OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, OpenAI::Internal::AnyHash), - integrations: T.nilable( - T::Array[T.any(OpenAI::Models::FineTuning::JobCreateParams::Integration, OpenAI::Internal::AnyHash)] - ), + hyperparameters: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash, + integrations: + T.nilable( + T::Array[ + OpenAI::FineTuning::JobCreateParams::Integration::OrHash + ] + ), metadata: T.nilable(T::Hash[Symbol, String]), - method_: T.any(OpenAI::Models::FineTuning::JobCreateParams::Method, OpenAI::Internal::AnyHash), + method_: OpenAI::FineTuning::JobCreateParams::Method::OrHash, seed: T.nilable(Integer), suffix: T.nilable(String), validation_file: T.nilable(String), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::FineTuning::FineTuningJob) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FineTuning::FineTuningJob) end def create( # The name of the model to fine-tune. You can select one of the @@ -91,28 +98,35 @@ module OpenAI # for more details. validation_file: nil, request_options: {} - ); end + ) + end + # Get info about a fine-tuning job. # # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do - params(fine_tuning_job_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::FineTuning::FineTuningJob) + params( + fine_tuning_job_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FineTuning::FineTuningJob) end def retrieve( # The ID of the fine-tuning job. fine_tuning_job_id, request_options: {} - ); end + ) + end + # List your organization's fine-tuning jobs sig do params( after: String, limit: Integer, metadata: T.nilable(T::Hash[Symbol, String]), - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::FineTuning::FineTuningJob] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJob]) end def list( # Identifier for the last job from the previous pagination request. @@ -123,26 +137,33 @@ module OpenAI # Alternatively, set `metadata=null` to indicate no metadata. metadata: nil, request_options: {} - ); end + ) + end + # Immediately cancel a fine-tune job. sig do - params(fine_tuning_job_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::FineTuning::FineTuningJob) + params( + fine_tuning_job_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FineTuning::FineTuningJob) end def cancel( # The ID of the fine-tuning job to cancel. fine_tuning_job_id, request_options: {} - ); end + ) + end + # Get status updates for a fine-tuning job. sig do params( fine_tuning_job_id: String, after: String, limit: Integer, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::FineTuning::FineTuningJobEvent] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent]) end def list_events( # The ID of the fine-tuning job to get events for. @@ -152,10 +173,13 @@ module OpenAI # Number of events to retrieve. limit: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi b/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi index bf3ced26..0c037db9 100644 --- a/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi +++ b/rbi/openai/resources/fine_tuning/jobs/checkpoints.rbi @@ -11,9 +11,12 @@ module OpenAI fine_tuning_job_id: String, after: String, limit: Integer, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint + ] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint]) end def list( # The ID of the fine-tuning job to get checkpoints for. @@ -23,10 +26,13 @@ module OpenAI # Number of checkpoints to retrieve. limit: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index 5341ac2b..bc10faa9 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -7,14 +7,16 @@ module OpenAI sig do params( image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), - response_format: T.nilable(OpenAI::Models::ImageCreateVariationParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageCreateVariationParams::Size::OrSymbol), + response_format: + T.nilable( + OpenAI::ImageCreateVariationParams::ResponseFormat::OrSymbol + ), + size: T.nilable(OpenAI::ImageCreateVariationParams::Size::OrSymbol), user: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::ImagesResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::ImagesResponse) end def create_variation( # The image to use as the basis for the variation(s). Must be a valid PNG file, @@ -37,30 +39,33 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. sig do params( - image: T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: + T.any( + Pathname, + StringIO, + IO, + OpenAI::FilePart, + T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] + ), prompt: String, - background: T.nilable(OpenAI::Models::ImageEditParams::Background::OrSymbol), + background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), - quality: T.nilable(OpenAI::Models::ImageEditParams::Quality::OrSymbol), - response_format: T.nilable(OpenAI::Models::ImageEditParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageEditParams::Size::OrSymbol), + quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageEditParams::Size::OrSymbol), user: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::ImagesResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::ImagesResponse) end def edit( # The image(s) to edit. Must be a supported image file or an array of images. @@ -111,26 +116,31 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + # Creates an image given a prompt. # [Learn more](https://platform.openai.com/docs/guides/images). sig do params( prompt: String, - background: T.nilable(OpenAI::Models::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(T.any(String, OpenAI::Models::ImageModel::OrSymbol)), - moderation: T.nilable(OpenAI::Models::ImageGenerateParams::Moderation::OrSymbol), + background: + T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + moderation: + T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), output_compression: T.nilable(Integer), - output_format: T.nilable(OpenAI::Models::ImageGenerateParams::OutputFormat::OrSymbol), - quality: T.nilable(OpenAI::Models::ImageGenerateParams::Quality::OrSymbol), - response_format: T.nilable(OpenAI::Models::ImageGenerateParams::ResponseFormat::OrSymbol), - size: T.nilable(OpenAI::Models::ImageGenerateParams::Size::OrSymbol), - style: T.nilable(OpenAI::Models::ImageGenerateParams::Style::OrSymbol), + output_format: + T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::ImageGenerateParams::Style::OrSymbol), user: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::ImagesResponse) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::ImagesResponse) end def generate( # A text description of the desired image(s). The maximum length is 32000 @@ -190,10 +200,13 @@ module OpenAI # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/models.rbi b/rbi/openai/resources/models.rbi index e403a7e3..bfb67109 100644 --- a/rbi/openai/resources/models.rbi +++ b/rbi/openai/resources/models.rbi @@ -5,28 +5,48 @@ module OpenAI class Models # Retrieves a model instance, providing basic information about the model such as # the owner and permissioning. - sig { params(model: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Model) } + sig do + params( + model: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Model) + end def retrieve( # The ID of the model to use for this request model, request_options: {} - ); end + ) + end + # Lists the currently available models, and provides basic information about each # one such as the owner and availability. - sig { params(request_options: OpenAI::RequestOpts).returns(OpenAI::Internal::Page[OpenAI::Models::Model]) } - def list(request_options: {}); end + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + OpenAI::Internal::Page[OpenAI::Model] + ) + end + def list(request_options: {}) + end # Delete a fine-tuned model. You must have the Owner role in your organization to # delete a model. - sig { params(model: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::ModelDeleted) } + sig do + params( + model: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::ModelDeleted) + end def delete( # The model to delete model, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/moderations.rbi b/rbi/openai/resources/moderations.rbi index fe3696e7..06d15b68 100644 --- a/rbi/openai/resources/moderations.rbi +++ b/rbi/openai/resources/moderations.rbi @@ -7,21 +7,20 @@ module OpenAI # the [moderation guide](https://platform.openai.com/docs/guides/moderation). sig do params( - input: T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::Models::ModerationImageURLInput, - OpenAI::Internal::AnyHash, - OpenAI::Models::ModerationTextInput - ) - ] - ), - model: T.any(String, OpenAI::Models::ModerationModel::OrSymbol), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::ModerationCreateResponse) + input: + T.any( + String, + T::Array[String], + T::Array[ + T.any( + OpenAI::ModerationImageURLInput::OrHash, + OpenAI::ModerationTextInput::OrHash + ) + ] + ), + model: T.any(String, OpenAI::ModerationModel::OrSymbol), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::ModerationCreateResponse) end def create( # Input (or inputs) to classify. Can be a single string, an array of strings, or @@ -33,10 +32,13 @@ module OpenAI # [here](https://platform.openai.com/docs/models#moderation). model: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index e0dec08f..2c880f53 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -21,63 +21,72 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ), - model: T.any( - String, - OpenAI::Models::ChatModel::OrSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + input: + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ] + ), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), + include: + T.nilable( + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), - service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), + reasoning: T.nilable(OpenAI::Reasoning::OrHash), + service_tier: + T.nilable( + OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol + ), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ToolChoiceFunction - ), - tools: T::Array[ + text: OpenAI::Responses::ResponseTextConfig::OrHash, + tool_choice: T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ], + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes::OrHash, + OpenAI::Responses::ToolChoiceFunction::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol + ), user: String, stream: T.noreturn, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Responses::Response) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Responses::Response) end def create( # Text, image, or file inputs to the model, used to generate a response. @@ -211,7 +220,9 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: false, request_options: {} - ); end + ) + end + # See {OpenAI::Resources::Responses#create} for non-streaming counterpart. # # Creates a model response. Provide @@ -227,104 +238,113 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: T.any( - String, - T::Array[ - T.any( - OpenAI::Models::Responses::EasyInputMessage, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ResponseInputItem::Message, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCall, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Models::Responses::ResponseReasoningItem, - OpenAI::Models::Responses::ResponseInputItem::ItemReference - ) - ] - ), - model: T.any( - String, - OpenAI::Models::ChatModel::OrSymbol, - OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), - include: T.nilable(T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol]), + input: + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ] + ), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), + include: + T.nilable( + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ), instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), - reasoning: T.nilable(T.any(OpenAI::Models::Reasoning, OpenAI::Internal::AnyHash)), - service_tier: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::OrSymbol), + reasoning: T.nilable(OpenAI::Reasoning::OrHash), + service_tier: + T.nilable( + OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol + ), store: T.nilable(T::Boolean), temperature: T.nilable(Float), - text: T.any(OpenAI::Models::Responses::ResponseTextConfig, OpenAI::Internal::AnyHash), - tool_choice: T.any( - OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, - OpenAI::Models::Responses::ToolChoiceTypes, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::ToolChoiceFunction - ), - tools: T::Array[ + text: OpenAI::Responses::ResponseTextConfig::OrHash, + tool_choice: T.any( - OpenAI::Models::Responses::FileSearchTool, - OpenAI::Internal::AnyHash, - OpenAI::Models::Responses::FunctionTool, - OpenAI::Models::Responses::ComputerTool, - OpenAI::Models::Responses::WebSearchTool - ) - ], + OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceTypes::OrHash, + OpenAI::Responses::ToolChoiceFunction::OrHash + ), + tools: + T::Array[ + T.any( + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: T.nilable(Float), - truncation: T.nilable(OpenAI::Models::Responses::ResponseCreateParams::Truncation::OrSymbol), + truncation: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol + ), user: String, stream: T.noreturn, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + T.any( + OpenAI::Responses::ResponseAudioDeltaEvent, + OpenAI::Responses::ResponseAudioDoneEvent, + OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Responses::ResponseCompletedEvent, + OpenAI::Responses::ResponseContentPartAddedEvent, + OpenAI::Responses::ResponseContentPartDoneEvent, + OpenAI::Responses::ResponseCreatedEvent, + OpenAI::Responses::ResponseErrorEvent, + OpenAI::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Responses::ResponseInProgressEvent, + OpenAI::Responses::ResponseFailedEvent, + OpenAI::Responses::ResponseIncompleteEvent, + OpenAI::Responses::ResponseOutputItemAddedEvent, + OpenAI::Responses::ResponseOutputItemDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, + OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, + OpenAI::Responses::ResponseRefusalDeltaEvent, + OpenAI::Responses::ResponseRefusalDoneEvent, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Responses::ResponseTextDeltaEvent, + OpenAI::Responses::ResponseTextDoneEvent, + OpenAI::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Responses::ResponseWebSearchCallSearchingEvent + ) + ] ) - .returns( - OpenAI::Internal::Stream[ - T.any( - OpenAI::Models::Responses::ResponseAudioDeltaEvent, - OpenAI::Models::Responses::ResponseAudioDoneEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Models::Responses::ResponseCompletedEvent, - OpenAI::Models::Responses::ResponseContentPartAddedEvent, - OpenAI::Models::Responses::ResponseContentPartDoneEvent, - OpenAI::Models::Responses::ResponseCreatedEvent, - OpenAI::Models::Responses::ResponseErrorEvent, - OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Models::Responses::ResponseInProgressEvent, - OpenAI::Models::Responses::ResponseFailedEvent, - OpenAI::Models::Responses::ResponseIncompleteEvent, - OpenAI::Models::Responses::ResponseOutputItemAddedEvent, - OpenAI::Models::Responses::ResponseOutputItemDoneEvent, - OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, - OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, - OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, - OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, - OpenAI::Models::Responses::ResponseRefusalDeltaEvent, - OpenAI::Models::Responses::ResponseRefusalDoneEvent, - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Models::Responses::ResponseTextDeltaEvent, - OpenAI::Models::Responses::ResponseTextDoneEvent, - OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent - ) - ] - ) end def stream_raw( # Text, image, or file inputs to the model, used to generate a response. @@ -458,15 +478,16 @@ module OpenAI # for streaming and non-streaming use cases, respectively. stream: true, request_options: {} - ); end + ) + end + # Retrieves a model response with the given ID. sig do params( response_id: String, - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Responses::Response) + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Responses::Response) end def retrieve( # The ID of the response to retrieve. @@ -475,17 +496,27 @@ module OpenAI # Response creation above for more information. include: nil, request_options: {} - ); end + ) + end + # Deletes a model response with the given ID. - sig { params(response_id: String, request_options: OpenAI::RequestOpts).void } + sig do + params( + response_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end def delete( # The ID of the response to delete. response_id, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi index b2d6a042..feef44a4 100644 --- a/rbi/openai/resources/responses/input_items.rbi +++ b/rbi/openai/resources/responses/input_items.rbi @@ -10,25 +10,24 @@ module OpenAI response_id: String, after: String, before: String, - include: T::Array[OpenAI::Models::Responses::ResponseIncludable::OrSymbol], + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], limit: Integer, - order: OpenAI::Models::Responses::InputItemListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts + order: OpenAI::Responses::InputItemListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + T.any( + OpenAI::Responses::ResponseInputMessageItem, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Responses::ResponseFunctionToolCallOutputItem + ) + ] ) - .returns( - OpenAI::Internal::CursorPage[ - T.any( - OpenAI::Models::Responses::ResponseInputMessageItem, - OpenAI::Models::Responses::ResponseOutputMessage, - OpenAI::Models::Responses::ResponseFileSearchToolCall, - OpenAI::Models::Responses::ResponseComputerToolCall, - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Models::Responses::ResponseFunctionWebSearch, - OpenAI::Models::Responses::ResponseFunctionToolCallItem, - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem - ) - ] - ) end def list( # The ID of the response to retrieve input items for. @@ -49,10 +48,13 @@ module OpenAI # - `desc`: Return the input items in descending order. order: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/uploads.rbi b/rbi/openai/resources/uploads.rbi index ef17a778..eef3396f 100644 --- a/rbi/openai/resources/uploads.rbi +++ b/rbi/openai/resources/uploads.rbi @@ -30,10 +30,9 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: OpenAI::Models::FilePurpose::OrSymbol, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Upload) + purpose: OpenAI::FilePurpose::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Upload) end def create( # The number of bytes in the file you are uploading. @@ -51,14 +50,23 @@ module OpenAI # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). purpose:, request_options: {} - ); end + ) + end + # Cancels the Upload. No Parts may be added after an Upload is cancelled. - sig { params(upload_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::Upload) } + sig do + params( + upload_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Upload) + end def cancel( # The ID of the Upload. upload_id, request_options: {} - ); end + ) + end + # Completes the # [Upload](https://platform.openai.com/docs/api-reference/uploads/object). # @@ -77,9 +85,8 @@ module OpenAI upload_id: String, part_ids: T::Array[String], md5: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Upload) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Upload) end def complete( # The ID of the Upload. @@ -90,10 +97,13 @@ module OpenAI # matches what you expect. md5: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/uploads/parts.rbi b/rbi/openai/resources/uploads/parts.rbi index 4534662a..ae17f0fc 100644 --- a/rbi/openai/resources/uploads/parts.rbi +++ b/rbi/openai/resources/uploads/parts.rbi @@ -19,9 +19,8 @@ module OpenAI params( upload_id: String, data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::Uploads::UploadPart) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Uploads::UploadPart) end def create( # The ID of the Upload. @@ -29,10 +28,13 @@ module OpenAI # The chunk of bytes for this Part. data:, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi index 6c6be823..7be8933d 100644 --- a/rbi/openai/resources/vector_stores.rbi +++ b/rbi/openai/resources/vector_stores.rbi @@ -12,18 +12,17 @@ module OpenAI # Create a vector store. sig do params( - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ), - expires_after: T.any(OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, OpenAI::Internal::AnyHash), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ), + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::VectorStore) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStore) end def create( # The chunking strategy used to chunk the file(s). If not set, will use the `auto` @@ -45,24 +44,33 @@ module OpenAI # The name of the vector store. name: nil, request_options: {} - ); end + ) + end + # Retrieves a vector store. - sig { params(vector_store_id: String, request_options: OpenAI::RequestOpts).returns(OpenAI::Models::VectorStore) } + sig do + params( + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStore) + end def retrieve( # The ID of the vector store to retrieve. vector_store_id, request_options: {} - ); end + ) + end + # Modifies a vector store. sig do params( vector_store_id: String, - expires_after: T.nilable(T.any(OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, OpenAI::Internal::AnyHash)), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::VectorStore) + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStore) end def update( # The ID of the vector store to modify. @@ -79,17 +87,18 @@ module OpenAI # The name of the vector store. name: nil, request_options: {} - ); end + ) + end + # Returns a list of vector stores. sig do params( after: String, before: String, limit: Integer, - order: OpenAI::Models::VectorStoreListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStore]) + order: OpenAI::VectorStoreListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Internal::CursorPage[OpenAI::VectorStore]) end def list( # A cursor for use in pagination. `after` is an object ID that defines your place @@ -109,30 +118,42 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + # Delete a vector store. sig do - params(vector_store_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::VectorStoreDeleted) + params( + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStoreDeleted) end def delete( # The ID of the vector store to delete. vector_store_id, request_options: {} - ); end + ) + end + # Search a vector store for relevant chunks based on a query and file attributes # filter. sig do params( vector_store_id: String, query: T.any(String, T::Array[String]), - filters: T.any(OpenAI::Models::ComparisonFilter, OpenAI::Internal::AnyHash, OpenAI::Models::CompoundFilter), + filters: + T.any( + OpenAI::ComparisonFilter::OrHash, + OpenAI::CompoundFilter::OrHash + ), max_num_results: Integer, - ranking_options: T.any(OpenAI::Models::VectorStoreSearchParams::RankingOptions, OpenAI::Internal::AnyHash), + ranking_options: + OpenAI::VectorStoreSearchParams::RankingOptions::OrHash, rewrite_query: T::Boolean, - request_options: OpenAI::RequestOpts + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse] ) - .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse]) end def search( # The ID of the vector store to search. @@ -149,10 +170,13 @@ module OpenAI # Whether to rewrite the natural language query for vector search. rewrite_query: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/vector_stores/file_batches.rbi b/rbi/openai/resources/vector_stores/file_batches.rbi index 3ef38403..43c47915 100644 --- a/rbi/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/openai/resources/vector_stores/file_batches.rbi @@ -9,15 +9,15 @@ module OpenAI params( vector_store_id: String, file_ids: T::Array[String], - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFileBatch) end def create( # The ID of the vector store for which to create a File Batch. @@ -36,11 +36,16 @@ module OpenAI # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} - ); end + ) + end + # Retrieves a vector store file batch. sig do - params(batch_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) + params( + batch_id: String, + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFileBatch) end def retrieve( # The ID of the file batch being retrieved. @@ -48,12 +53,17 @@ module OpenAI # The ID of the vector store that the file batch belongs to. vector_store_id:, request_options: {} - ); end + ) + end + # Cancel a vector store file batch. This attempts to cancel the processing of # files in this batch as soon as possible. sig do - params(batch_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::VectorStores::VectorStoreFileBatch) + params( + batch_id: String, + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFileBatch) end def cancel( # The ID of the file batch to cancel. @@ -61,7 +71,9 @@ module OpenAI # The ID of the vector store that the file batch belongs to. vector_store_id:, request_options: {} - ); end + ) + end + # Returns a list of vector store files in a batch. sig do params( @@ -69,12 +81,15 @@ module OpenAI vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, + filter: + OpenAI::VectorStores::FileBatchListFilesParams::Filter::OrSymbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts + order: + OpenAI::VectorStores::FileBatchListFilesParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::VectorStores::VectorStoreFile] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end def list_files( # Path param: The ID of the file batch that the files belong to. @@ -101,10 +116,13 @@ module OpenAI # ascending order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/rbi/openai/resources/vector_stores/files.rbi b/rbi/openai/resources/vector_stores/files.rbi index e70c0adf..8d760b4a 100644 --- a/rbi/openai/resources/vector_stores/files.rbi +++ b/rbi/openai/resources/vector_stores/files.rbi @@ -11,15 +11,15 @@ module OpenAI params( vector_store_id: String, file_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: T.any( - OpenAI::Models::AutoFileChunkingStrategyParam, - OpenAI::Internal::AnyHash, - OpenAI::Models::StaticFileChunkingStrategyObjectParam - ), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::VectorStores::VectorStoreFile) + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + chunking_strategy: + T.any( + OpenAI::AutoFileChunkingStrategyParam::OrHash, + OpenAI::StaticFileChunkingStrategyObjectParam::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFile) end def create( # The ID of the vector store for which to create a File. @@ -38,11 +38,16 @@ module OpenAI # strategy. Only applicable if `file_ids` is non-empty. chunking_strategy: nil, request_options: {} - ); end + ) + end + # Retrieves a vector store file. sig do - params(file_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::VectorStores::VectorStoreFile) + params( + file_id: String, + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFile) end def retrieve( # The ID of the file being retrieved. @@ -50,16 +55,18 @@ module OpenAI # The ID of the vector store that the file belongs to. vector_store_id:, request_options: {} - ); end + ) + end + # Update attributes on a vector store file. sig do params( file_id: String, vector_store_id: String, - attributes: T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - request_options: OpenAI::RequestOpts - ) - .returns(OpenAI::Models::VectorStores::VectorStoreFile) + attributes: + T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFile) end def update( # Path param: The ID of the file to update attributes. @@ -73,19 +80,22 @@ module OpenAI # maximum length of 512 characters, booleans, or numbers. attributes:, request_options: {} - ); end + ) + end + # Returns a list of vector store files. sig do params( vector_store_id: String, after: String, before: String, - filter: OpenAI::Models::VectorStores::FileListParams::Filter::OrSymbol, + filter: OpenAI::VectorStores::FileListParams::Filter::OrSymbol, limit: Integer, - order: OpenAI::Models::VectorStores::FileListParams::Order::OrSymbol, - request_options: OpenAI::RequestOpts + order: OpenAI::VectorStores::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::VectorStores::VectorStoreFile] ) - .returns(OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile]) end def list( # The ID of the vector store that the files belong to. @@ -109,14 +119,19 @@ module OpenAI # order and `desc` for descending order. order: nil, request_options: {} - ); end + ) + end + # Delete a vector store file. This will remove the file from the vector store but # the file itself will not be deleted. To delete the file, use the # [delete file](https://platform.openai.com/docs/api-reference/files/delete) # endpoint. sig do - params(file_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Models::VectorStores::VectorStoreFileDeleted) + params( + file_id: String, + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::VectorStores::VectorStoreFileDeleted) end def delete( # The ID of the file to delete. @@ -124,11 +139,20 @@ module OpenAI # The ID of the vector store that the file belongs to. vector_store_id:, request_options: {} - ); end + ) + end + # Retrieve the parsed contents of a vector store file. sig do - params(file_id: String, vector_store_id: String, request_options: OpenAI::RequestOpts) - .returns(OpenAI::Internal::Page[OpenAI::Models::VectorStores::FileContentResponse]) + params( + file_id: String, + vector_store_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Page[ + OpenAI::Models::VectorStores::FileContentResponse + ] + ) end def content( # The ID of the file within the vector store. @@ -136,10 +160,13 @@ module OpenAI # The ID of the vector store. vector_store_id:, request_options: {} - ); end + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } - def self.new(client:); end + def self.new(client:) + end end end end diff --git a/sig/openai/internal/type/request_parameters.rbs b/sig/openai/internal/type/request_parameters.rbs index da025b59..6519af95 100644 --- a/sig/openai/internal/type/request_parameters.rbs +++ b/sig/openai/internal/type/request_parameters.rbs @@ -4,7 +4,9 @@ module OpenAI type request_parameters = { request_options: OpenAI::request_opts } module RequestParameters - attr_accessor request_options: OpenAI::request_opts + attr_reader request_options: OpenAI::request_opts + + def request_options=: (OpenAI::request_opts) -> OpenAI::request_opts module Converter def dump_request: (top params) -> [top, ::Hash[Symbol, top]] diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 3d567669..90320393 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -3,7 +3,7 @@ module OpenAI type all_models = String | OpenAI::Models::chat_model - | OpenAI::Models::AllModels::responses_only_model + | OpenAI::AllModels::responses_only_model module AllModels extend OpenAI::Internal::Type::Union @@ -22,10 +22,10 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.values: -> ::Array[OpenAI::Models::AllModels::responses_only_model] + def self?.values: -> ::Array[OpenAI::AllModels::responses_only_model] end - def self?.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::AllModels::responses_only_model] + def self?.variants: -> ::Array[OpenAI::Models::all_models] end end end diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 97aff0df..2faac5ef 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -46,14 +46,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Audio::speech_create_params - type model = String | OpenAI::Models::Audio::speech_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::Audio::speech_model] + def self?.variants: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::model] end type voice = @@ -73,17 +71,7 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, (:alloy - | :ash - | :ballad - | :coral - | :echo - | :fable - | :onyx - | :nova - | :sage - | :shimmer - | :verse)] + def self?.variants: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::voice] ALLOY: :alloy ASH: :ash diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 12d41fc2..3e522926 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -4,25 +4,23 @@ module OpenAI type transcription = { text: String, - logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob] + logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] } class Transcription < OpenAI::Internal::Type::BaseModel attr_accessor text: String - attr_reader logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob]? + attr_reader logprobs: ::Array[OpenAI::Audio::Transcription::Logprob]? def logprobs=: ( - ::Array[OpenAI::Models::Audio::Transcription::Logprob] - ) -> ::Array[OpenAI::Models::Audio::Transcription::Logprob] + ::Array[OpenAI::Audio::Transcription::Logprob] + ) -> ::Array[OpenAI::Audio::Transcription::Logprob] def initialize: ( text: String, - ?logprobs: ::Array[OpenAI::Models::Audio::Transcription::Logprob] + ?logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] ) -> void - def to_hash: -> OpenAI::Models::Audio::transcription - type logprob = { token: String, bytes: ::Array[Float], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -43,8 +41,6 @@ module OpenAI ?bytes: ::Array[Float], ?logprob: Float ) -> void - - def to_hash: -> OpenAI::Models::Audio::Transcription::logprob end end end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index e16d6287..fda04c86 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -10,7 +10,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::audio_response_format, temperature: Float, - timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] + timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] } & OpenAI::Internal::Type::request_parameters @@ -46,11 +46,11 @@ module OpenAI def temperature=: (Float) -> Float - attr_reader timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity]? + attr_reader timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity]? def timestamp_granularities=: ( - ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] - ) -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] + ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + ) -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] def initialize: ( file: Pathname | StringIO | IO | OpenAI::FilePart, @@ -60,18 +60,16 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Audio::transcription_create_params - type model = String | OpenAI::Models::audio_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::audio_model] + def self?.variants: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::model] end type timestamp_granularity = :word | :segment @@ -82,7 +80,7 @@ module OpenAI WORD: :word SEGMENT: :segment - def self?.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] + def self?.values: -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] end end end diff --git a/sig/openai/models/audio/transcription_create_response.rbs b/sig/openai/models/audio/transcription_create_response.rbs index f4931f11..5e18958f 100644 --- a/sig/openai/models/audio/transcription_create_response.rbs +++ b/sig/openai/models/audio/transcription_create_response.rbs @@ -2,13 +2,12 @@ module OpenAI module Models module Audio type transcription_create_response = - OpenAI::Models::Audio::Transcription - | OpenAI::Models::Audio::TranscriptionVerbose + OpenAI::Audio::Transcription | OpenAI::Audio::TranscriptionVerbose module TranscriptionCreateResponse extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] + def self?.variants: -> ::Array[OpenAI::Models::Audio::transcription_create_response] end end end diff --git a/sig/openai/models/audio/transcription_segment.rbs b/sig/openai/models/audio/transcription_segment.rbs index b0748151..9c38a1be 100644 --- a/sig/openai/models/audio/transcription_segment.rbs +++ b/sig/openai/models/audio/transcription_segment.rbs @@ -48,8 +48,6 @@ module OpenAI text: String, tokens: ::Array[Integer] ) -> void - - def to_hash: -> OpenAI::Models::Audio::transcription_segment end end end diff --git a/sig/openai/models/audio/transcription_stream_event.rbs b/sig/openai/models/audio/transcription_stream_event.rbs index 158d4540..f6c55919 100644 --- a/sig/openai/models/audio/transcription_stream_event.rbs +++ b/sig/openai/models/audio/transcription_stream_event.rbs @@ -2,13 +2,13 @@ module OpenAI module Models module Audio type transcription_stream_event = - OpenAI::Models::Audio::TranscriptionTextDeltaEvent - | OpenAI::Models::Audio::TranscriptionTextDoneEvent + OpenAI::Audio::TranscriptionTextDeltaEvent + | OpenAI::Audio::TranscriptionTextDoneEvent module TranscriptionStreamEvent extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent] + def self?.variants: -> ::Array[OpenAI::Models::Audio::transcription_stream_event] end end end diff --git a/sig/openai/models/audio/transcription_text_delta_event.rbs b/sig/openai/models/audio/transcription_text_delta_event.rbs index c34d5fcd..09c419d7 100644 --- a/sig/openai/models/audio/transcription_text_delta_event.rbs +++ b/sig/openai/models/audio/transcription_text_delta_event.rbs @@ -5,7 +5,7 @@ module OpenAI { delta: String, type: :"transcript.text.delta", - logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] } class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel @@ -13,20 +13,18 @@ module OpenAI attr_accessor type: :"transcript.text.delta" - attr_reader logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob]? + attr_reader logprobs: ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob]? def logprobs=: ( - ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] - ) -> ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] + ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] + ) -> ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] def initialize: ( delta: String, - ?logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob], + ?logprobs: ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob], ?type: :"transcript.text.delta" ) -> void - def to_hash: -> OpenAI::Models::Audio::transcription_text_delta_event - type logprob = { token: String, bytes: ::Array[top], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -47,8 +45,6 @@ module OpenAI ?bytes: ::Array[top], ?logprob: Float ) -> void - - def to_hash: -> OpenAI::Models::Audio::TranscriptionTextDeltaEvent::logprob end end end diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs index f6216cee..c2fb0bc1 100644 --- a/sig/openai/models/audio/transcription_text_done_event.rbs +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -5,7 +5,7 @@ module OpenAI { text: String, type: :"transcript.text.done", - logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] } class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel @@ -13,20 +13,18 @@ module OpenAI attr_accessor type: :"transcript.text.done" - attr_reader logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob]? + attr_reader logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob]? def logprobs=: ( - ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] - ) -> ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] + ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + ) -> ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] def initialize: ( text: String, - ?logprobs: ::Array[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob], + ?logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob], ?type: :"transcript.text.done" ) -> void - def to_hash: -> OpenAI::Models::Audio::transcription_text_done_event - type logprob = { token: String, bytes: ::Array[top], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -47,8 +45,6 @@ module OpenAI ?bytes: ::Array[top], ?logprob: Float ) -> void - - def to_hash: -> OpenAI::Models::Audio::TranscriptionTextDoneEvent::logprob end end end diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 34e8fe7a..6e40e651 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -6,8 +6,8 @@ module OpenAI duration: Float, language: String, text: String, - segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment], - words: ::Array[OpenAI::Models::Audio::TranscriptionWord] + segments: ::Array[OpenAI::Audio::TranscriptionSegment], + words: ::Array[OpenAI::Audio::TranscriptionWord] } class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel @@ -17,27 +17,25 @@ module OpenAI attr_accessor text: String - attr_reader segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment]? + attr_reader segments: ::Array[OpenAI::Audio::TranscriptionSegment]? def segments=: ( - ::Array[OpenAI::Models::Audio::TranscriptionSegment] - ) -> ::Array[OpenAI::Models::Audio::TranscriptionSegment] + ::Array[OpenAI::Audio::TranscriptionSegment] + ) -> ::Array[OpenAI::Audio::TranscriptionSegment] - attr_reader words: ::Array[OpenAI::Models::Audio::TranscriptionWord]? + attr_reader words: ::Array[OpenAI::Audio::TranscriptionWord]? def words=: ( - ::Array[OpenAI::Models::Audio::TranscriptionWord] - ) -> ::Array[OpenAI::Models::Audio::TranscriptionWord] + ::Array[OpenAI::Audio::TranscriptionWord] + ) -> ::Array[OpenAI::Audio::TranscriptionWord] def initialize: ( duration: Float, language: String, text: String, - ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment], - ?words: ::Array[OpenAI::Models::Audio::TranscriptionWord] + ?segments: ::Array[OpenAI::Audio::TranscriptionSegment], + ?words: ::Array[OpenAI::Audio::TranscriptionWord] ) -> void - - def to_hash: -> OpenAI::Models::Audio::transcription_verbose end end end diff --git a/sig/openai/models/audio/transcription_word.rbs b/sig/openai/models/audio/transcription_word.rbs index e9332bbd..eb48b12d 100644 --- a/sig/openai/models/audio/transcription_word.rbs +++ b/sig/openai/models/audio/transcription_word.rbs @@ -11,8 +11,6 @@ module OpenAI attr_accessor word: String def initialize: (end_: Float, start: Float, word: String) -> void - - def to_hash: -> OpenAI::Models::Audio::transcription_word end end end diff --git a/sig/openai/models/audio/translation.rbs b/sig/openai/models/audio/translation.rbs index 43eeef91..cc45a2b2 100644 --- a/sig/openai/models/audio/translation.rbs +++ b/sig/openai/models/audio/translation.rbs @@ -7,8 +7,6 @@ module OpenAI attr_accessor text: String def initialize: (text: String) -> void - - def to_hash: -> OpenAI::Models::Audio::translation end end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 8fcd623b..5a9c7d5f 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -42,14 +42,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Audio::translation_create_params - type model = String | OpenAI::Models::audio_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::audio_model] + def self?.variants: -> ::Array[OpenAI::Models::Audio::TranslationCreateParams::model] end type response_format = :json | :text | :srt | :verbose_json | :vtt diff --git a/sig/openai/models/audio/translation_create_response.rbs b/sig/openai/models/audio/translation_create_response.rbs index 785dfce2..a792f349 100644 --- a/sig/openai/models/audio/translation_create_response.rbs +++ b/sig/openai/models/audio/translation_create_response.rbs @@ -2,13 +2,12 @@ module OpenAI module Models module Audio type translation_create_response = - OpenAI::Models::Audio::Translation - | OpenAI::Models::Audio::TranslationVerbose + OpenAI::Audio::Translation | OpenAI::Audio::TranslationVerbose module TranslationCreateResponse extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] + def self?.variants: -> ::Array[OpenAI::Models::Audio::translation_create_response] end end end diff --git a/sig/openai/models/audio/translation_verbose.rbs b/sig/openai/models/audio/translation_verbose.rbs index 3f69b2ca..20796402 100644 --- a/sig/openai/models/audio/translation_verbose.rbs +++ b/sig/openai/models/audio/translation_verbose.rbs @@ -6,7 +6,7 @@ module OpenAI duration: Float, language: String, text: String, - segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] + segments: ::Array[OpenAI::Audio::TranscriptionSegment] } class TranslationVerbose < OpenAI::Internal::Type::BaseModel @@ -16,20 +16,18 @@ module OpenAI attr_accessor text: String - attr_reader segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment]? + attr_reader segments: ::Array[OpenAI::Audio::TranscriptionSegment]? def segments=: ( - ::Array[OpenAI::Models::Audio::TranscriptionSegment] - ) -> ::Array[OpenAI::Models::Audio::TranscriptionSegment] + ::Array[OpenAI::Audio::TranscriptionSegment] + ) -> ::Array[OpenAI::Audio::TranscriptionSegment] def initialize: ( duration: Float, language: String, text: String, - ?segments: ::Array[OpenAI::Models::Audio::TranscriptionSegment] + ?segments: ::Array[OpenAI::Audio::TranscriptionSegment] ) -> void - - def to_hash: -> OpenAI::Models::Audio::translation_verbose end end end diff --git a/sig/openai/models/auto_file_chunking_strategy_param.rbs b/sig/openai/models/auto_file_chunking_strategy_param.rbs index a5285e7b..81e99fa2 100644 --- a/sig/openai/models/auto_file_chunking_strategy_param.rbs +++ b/sig/openai/models/auto_file_chunking_strategy_param.rbs @@ -6,8 +6,6 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void - - def to_hash: -> OpenAI::Models::auto_file_chunking_strategy_param end end end diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 75a90985..702f7dff 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -8,12 +8,12 @@ module OpenAI endpoint: String, input_file_id: String, object: :batch, - status: OpenAI::Models::Batch::status, + status: OpenAI::Batch::status, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, error_file_id: String, - errors: OpenAI::Models::Batch::Errors, + errors: OpenAI::Batch::Errors, expired_at: Integer, expires_at: Integer, failed_at: Integer, @@ -21,7 +21,7 @@ module OpenAI in_progress_at: Integer, metadata: OpenAI::Models::metadata?, output_file_id: String, - request_counts: OpenAI::Models::BatchRequestCounts + request_counts: OpenAI::BatchRequestCounts } class Batch < OpenAI::Internal::Type::BaseModel @@ -37,7 +37,7 @@ module OpenAI attr_accessor object: :batch - attr_accessor status: OpenAI::Models::Batch::status + attr_accessor status: OpenAI::Batch::status attr_reader cancelled_at: Integer? @@ -55,11 +55,9 @@ module OpenAI def error_file_id=: (String) -> String - attr_reader errors: OpenAI::Models::Batch::Errors? + attr_reader errors: OpenAI::Batch::Errors? - def errors=: ( - OpenAI::Models::Batch::Errors - ) -> OpenAI::Models::Batch::Errors + def errors=: (OpenAI::Batch::Errors) -> OpenAI::Batch::Errors attr_reader expired_at: Integer? @@ -87,11 +85,11 @@ module OpenAI def output_file_id=: (String) -> String - attr_reader request_counts: OpenAI::Models::BatchRequestCounts? + attr_reader request_counts: OpenAI::BatchRequestCounts? def request_counts=: ( - OpenAI::Models::BatchRequestCounts - ) -> OpenAI::Models::BatchRequestCounts + OpenAI::BatchRequestCounts + ) -> OpenAI::BatchRequestCounts def initialize: ( id: String, @@ -99,12 +97,12 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Models::Batch::status, + status: OpenAI::Batch::status, ?cancelled_at: Integer, ?cancelling_at: Integer, ?completed_at: Integer, ?error_file_id: String, - ?errors: OpenAI::Models::Batch::Errors, + ?errors: OpenAI::Batch::Errors, ?expired_at: Integer, ?expires_at: Integer, ?failed_at: Integer, @@ -112,12 +110,10 @@ module OpenAI ?in_progress_at: Integer, ?metadata: OpenAI::Models::metadata?, ?output_file_id: String, - ?request_counts: OpenAI::Models::BatchRequestCounts, + ?request_counts: OpenAI::BatchRequestCounts, ?object: :batch ) -> void - def to_hash: -> OpenAI::Models::batch - type status = :validating | :failed @@ -140,29 +136,24 @@ module OpenAI CANCELLING: :cancelling CANCELLED: :cancelled - def self?.values: -> ::Array[OpenAI::Models::Batch::status] + def self?.values: -> ::Array[OpenAI::Batch::status] end - type errors = - { data: ::Array[OpenAI::Models::BatchError], object: String } + type errors = { data: ::Array[OpenAI::BatchError], object: String } class Errors < OpenAI::Internal::Type::BaseModel - attr_reader data: ::Array[OpenAI::Models::BatchError]? + attr_reader data: ::Array[OpenAI::BatchError]? - def data=: ( - ::Array[OpenAI::Models::BatchError] - ) -> ::Array[OpenAI::Models::BatchError] + def data=: (::Array[OpenAI::BatchError]) -> ::Array[OpenAI::BatchError] attr_reader object: String? def object=: (String) -> String def initialize: ( - ?data: ::Array[OpenAI::Models::BatchError], + ?data: ::Array[OpenAI::BatchError], ?object: String ) -> void - - def to_hash: -> OpenAI::Models::Batch::errors end end end diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index c8b34f9f..9b655647 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -7,8 +7,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::batch_cancel_params end end end diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index cacafcf2..b5519cea 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -29,8 +29,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::batch_create_params - type completion_window = :"24h" module CompletionWindow diff --git a/sig/openai/models/batch_error.rbs b/sig/openai/models/batch_error.rbs index 75e828eb..07c43d31 100644 --- a/sig/openai/models/batch_error.rbs +++ b/sig/openai/models/batch_error.rbs @@ -22,8 +22,6 @@ module OpenAI ?message: String, ?param: String? ) -> void - - def to_hash: -> OpenAI::Models::batch_error end end end diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index 9cb84ad1..9f1b2961 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -21,8 +21,6 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::batch_list_params end end end diff --git a/sig/openai/models/batch_request_counts.rbs b/sig/openai/models/batch_request_counts.rbs index 56860e0e..38d29256 100644 --- a/sig/openai/models/batch_request_counts.rbs +++ b/sig/openai/models/batch_request_counts.rbs @@ -15,8 +15,6 @@ module OpenAI failed: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::batch_request_counts end end end diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index 681750c7..b1deb5c3 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::batch_retrieve_params end end end diff --git a/sig/openai/models/beta/assistant.rbs b/sig/openai/models/beta/assistant.rbs index 557bcacf..867ab8e5 100644 --- a/sig/openai/models/beta/assistant.rbs +++ b/sig/openai/models/beta/assistant.rbs @@ -14,7 +14,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::assistant_tool], response_format: OpenAI::Models::Beta::assistant_response_format_option?, temperature: Float?, - tool_resources: OpenAI::Models::Beta::Assistant::ToolResources?, + tool_resources: OpenAI::Beta::Assistant::ToolResources?, top_p: Float? } @@ -41,7 +41,7 @@ module OpenAI attr_accessor temperature: Float? - attr_accessor tool_resources: OpenAI::Models::Beta::Assistant::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::Assistant::ToolResources? attr_accessor top_p: Float? @@ -56,39 +56,35 @@ module OpenAI tools: ::Array[OpenAI::Models::Beta::assistant_tool], ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::Assistant::ToolResources?, + ?tool_resources: OpenAI::Beta::Assistant::ToolResources?, ?top_p: Float?, ?object: :assistant ) -> void - def to_hash: -> OpenAI::Models::Beta::assistant - type tool_resources = { - code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter + OpenAI::Beta::Assistant::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::Assistant::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::Assistant::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::Assistant::ToolResources::FileSearch + OpenAI::Beta::Assistant::ToolResources::FileSearch + ) -> OpenAI::Beta::Assistant::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::Assistant::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::Assistant::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -97,8 +93,6 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::Assistant::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String] } @@ -109,8 +103,6 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::Assistant::ToolResources::file_search end end end diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 83167a35..574c22a2 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI reasoning_effort: OpenAI::Models::reasoning_effort?, response_format: OpenAI::Models::Beta::assistant_response_format_option?, temperature: Float?, - tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + tool_resources: OpenAI::Beta::AssistantCreateParams::ToolResources?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], top_p: Float? } @@ -37,7 +37,7 @@ module OpenAI attr_accessor temperature: Float? - attr_accessor tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::AssistantCreateParams::ToolResources? attr_reader tools: ::Array[OpenAI::Models::Beta::assistant_tool]? @@ -56,48 +56,44 @@ module OpenAI ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::AssistantCreateParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], ?top_p: Float?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::assistant_create_params - type model = String | OpenAI::Models::chat_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> ::Array[OpenAI::Models::Beta::AssistantCreateParams::model] end type tool_resources = { - code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch + ) -> OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -106,14 +102,12 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String], - vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } class FileSearch < OpenAI::Internal::Type::BaseModel @@ -121,32 +115,30 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - attr_reader vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]? + attr_reader vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore]? def vector_stores=: ( - ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] - ) -> ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + ) -> ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] def initialize: ( ?vector_store_ids: ::Array[String], - ?vector_stores: ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + ?vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] ) -> void - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::file_search - type vector_store = { - chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -155,16 +147,14 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::vector_store - type chunking_strategy = - OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto - | OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto + | OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -175,28 +165,24 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto end type static = { - static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, type: :static } class Static < OpenAI::Internal::Type::BaseModel - attr_accessor static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + attr_accessor static: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static attr_accessor type: :static def initialize: ( - static: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, ?type: :static ) -> void - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static - type static = { chunk_overlap_tokens: Integer, @@ -212,12 +198,10 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static end end - def self?.variants: -> [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + def self?.variants: -> ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index d6a42d1d..e429cf3e 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Beta::assistant_delete_params end end end diff --git a/sig/openai/models/beta/assistant_deleted.rbs b/sig/openai/models/beta/assistant_deleted.rbs index 8c913e60..5ea06673 100644 --- a/sig/openai/models/beta/assistant_deleted.rbs +++ b/sig/openai/models/beta/assistant_deleted.rbs @@ -16,8 +16,6 @@ module OpenAI deleted: bool, ?object: :"assistant.deleted" ) -> void - - def to_hash: -> OpenAI::Models::Beta::assistant_deleted end end end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index c0481b7d..252e6b46 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -40,8 +40,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::assistant_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/assistant_response_format_option.rbs b/sig/openai/models/beta/assistant_response_format_option.rbs index e5da5be7..7ae082e6 100644 --- a/sig/openai/models/beta/assistant_response_format_option.rbs +++ b/sig/openai/models/beta/assistant_response_format_option.rbs @@ -3,14 +3,14 @@ module OpenAI module Beta type assistant_response_format_option = :auto - | OpenAI::Models::ResponseFormatText - | OpenAI::Models::ResponseFormatJSONObject - | OpenAI::Models::ResponseFormatJSONSchema + | OpenAI::ResponseFormatText + | OpenAI::ResponseFormatJSONObject + | OpenAI::ResponseFormatJSONSchema module AssistantResponseFormatOption extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema] + def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_response_format_option] end end end diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index 53274daa..a1fec037 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Beta::assistant_retrieve_params end end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index 915e637d..e0797c37 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -2,43 +2,43 @@ module OpenAI module Models module Beta type assistant_stream_event = - OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted - | OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete - | OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent + OpenAI::Beta::AssistantStreamEvent::ThreadCreated + | OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated + | OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued + | OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress + | OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction + | OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted + | OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete + | OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed + | OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling + | OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled + | OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled + | OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired + | OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated + | OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress + | OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta + | OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted + | OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete + | OpenAI::Beta::AssistantStreamEvent::ErrorEvent module AssistantStreamEvent extend OpenAI::Internal::Type::Union type thread_created = { - data: OpenAI::Models::Beta::Thread, + data: OpenAI::Beta::Thread, event: :"thread.created", enabled: bool } class ThreadCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Thread + attr_accessor data: OpenAI::Beta::Thread attr_accessor event: :"thread.created" @@ -47,448 +47,370 @@ module OpenAI def enabled=: (bool) -> bool def initialize: ( - data: OpenAI::Models::Beta::Thread, + data: OpenAI::Beta::Thread, ?enabled: bool, ?event: :"thread.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_created end type thread_run_created = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.created" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.created" } class ThreadRunCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.created" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_created end type thread_run_queued = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.queued" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.queued" } class ThreadRunQueued < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.queued" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.queued" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_queued end type thread_run_in_progress = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.in_progress" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.in_progress" } class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.in_progress" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_in_progress end type thread_run_requires_action = { - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, event: :"thread.run.requires_action" } class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.requires_action" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.requires_action" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_requires_action end type thread_run_completed = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.completed" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.completed" } class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.completed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.completed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_completed end type thread_run_incomplete = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.incomplete" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.incomplete" } class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.incomplete" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.incomplete" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_incomplete end type thread_run_failed = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.failed" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.failed" } class ThreadRunFailed < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.failed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.failed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_failed end type thread_run_cancelling = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelling" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.cancelling" } class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.cancelling" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelling" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelling end type thread_run_cancelled = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelled" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.cancelled" } class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.cancelled" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelled" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_cancelled end type thread_run_expired = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.expired" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.expired" } class ThreadRunExpired < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.expired" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.expired" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_expired end type thread_run_step_created = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.created" } class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.created" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_created end type thread_run_step_in_progress = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.in_progress" } class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.in_progress" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_in_progress end type thread_run_step_delta = { - data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, event: :"thread.run.step.delta" } class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent attr_accessor event: :"thread.run.step.delta" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, ?event: :"thread.run.step.delta" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_delta end type thread_run_step_completed = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.completed" } class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.completed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.completed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_completed end type thread_run_step_failed = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.failed" } class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.failed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.failed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_failed end type thread_run_step_cancelled = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.cancelled" } class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.cancelled" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.cancelled" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_cancelled end type thread_run_step_expired = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.expired" } class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.expired" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.expired" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_run_step_expired end type thread_message_created = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.created" } class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.created" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_created end type thread_message_in_progress = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.in_progress" } class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.in_progress" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_in_progress end type thread_message_delta = { - data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, + data: OpenAI::Beta::Threads::MessageDeltaEvent, event: :"thread.message.delta" } class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::MessageDeltaEvent + attr_accessor data: OpenAI::Beta::Threads::MessageDeltaEvent attr_accessor event: :"thread.message.delta" def initialize: ( - data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, + data: OpenAI::Beta::Threads::MessageDeltaEvent, ?event: :"thread.message.delta" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_delta end type thread_message_completed = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.completed" } class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.completed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.completed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_completed end type thread_message_incomplete = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.incomplete" } class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.incomplete" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.incomplete" ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::thread_message_incomplete end - type error_event = { data: OpenAI::Models::ErrorObject, event: :error } + type error_event = { data: OpenAI::ErrorObject, event: :error } class ErrorEvent < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::ErrorObject + attr_accessor data: OpenAI::ErrorObject attr_accessor event: :error - def initialize: ( - data: OpenAI::Models::ErrorObject, - ?event: :error - ) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantStreamEvent::error_event + def initialize: (data: OpenAI::ErrorObject, ?event: :error) -> void end - def self?.variants: -> [OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent] + def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_stream_event] end end end diff --git a/sig/openai/models/beta/assistant_tool.rbs b/sig/openai/models/beta/assistant_tool.rbs index abdfdcff..6a65bb27 100644 --- a/sig/openai/models/beta/assistant_tool.rbs +++ b/sig/openai/models/beta/assistant_tool.rbs @@ -2,14 +2,14 @@ module OpenAI module Models module Beta type assistant_tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::FileSearchTool - | OpenAI::Models::Beta::FunctionTool + OpenAI::Beta::CodeInterpreterTool + | OpenAI::Beta::FileSearchTool + | OpenAI::Beta::FunctionTool module AssistantTool extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_tool] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index 92b87de5..526b290d 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -3,26 +3,24 @@ module OpenAI module Beta type assistant_tool_choice = { - type: OpenAI::Models::Beta::AssistantToolChoice::type_, - function: OpenAI::Models::Beta::AssistantToolChoiceFunction + type: OpenAI::Beta::AssistantToolChoice::type_, + function: OpenAI::Beta::AssistantToolChoiceFunction } class AssistantToolChoice < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Models::Beta::AssistantToolChoice::type_ + attr_accessor type: OpenAI::Beta::AssistantToolChoice::type_ - attr_reader function: OpenAI::Models::Beta::AssistantToolChoiceFunction? + attr_reader function: OpenAI::Beta::AssistantToolChoiceFunction? def function=: ( - OpenAI::Models::Beta::AssistantToolChoiceFunction - ) -> OpenAI::Models::Beta::AssistantToolChoiceFunction + OpenAI::Beta::AssistantToolChoiceFunction + ) -> OpenAI::Beta::AssistantToolChoiceFunction def initialize: ( - type: OpenAI::Models::Beta::AssistantToolChoice::type_, - ?function: OpenAI::Models::Beta::AssistantToolChoiceFunction + type: OpenAI::Beta::AssistantToolChoice::type_, + ?function: OpenAI::Beta::AssistantToolChoiceFunction ) -> void - def to_hash: -> OpenAI::Models::Beta::assistant_tool_choice - type type_ = :function | :code_interpreter | :file_search module Type @@ -32,7 +30,7 @@ module OpenAI CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search - def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoice::type_] + def self?.values: -> ::Array[OpenAI::Beta::AssistantToolChoice::type_] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_function.rbs b/sig/openai/models/beta/assistant_tool_choice_function.rbs index bd41ba31..36f0983c 100644 --- a/sig/openai/models/beta/assistant_tool_choice_function.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_function.rbs @@ -7,8 +7,6 @@ module OpenAI attr_accessor name: String def initialize: (name: String) -> void - - def to_hash: -> OpenAI::Models::Beta::assistant_tool_choice_function end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index 54a6f6c5..ddd66365 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -2,8 +2,8 @@ module OpenAI module Models module Beta type assistant_tool_choice_option = - OpenAI::Models::Beta::AssistantToolChoiceOption::auto - | OpenAI::Models::Beta::AssistantToolChoice + OpenAI::Beta::AssistantToolChoiceOption::auto + | OpenAI::Beta::AssistantToolChoice module AssistantToolChoiceOption extend OpenAI::Internal::Type::Union @@ -17,10 +17,10 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Beta::AssistantToolChoiceOption::auto] end - def self?.variants: -> [OpenAI::Models::Beta::AssistantToolChoiceOption::auto, OpenAI::Models::Beta::AssistantToolChoice] + def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_tool_choice_option] end end end diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index 1ce0f114..de493080 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -11,7 +11,7 @@ module OpenAI reasoning_effort: OpenAI::Models::reasoning_effort?, response_format: OpenAI::Models::Beta::assistant_response_format_option?, temperature: Float?, - tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + tool_resources: OpenAI::Beta::AssistantUpdateParams::ToolResources?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], top_p: Float? } @@ -41,7 +41,7 @@ module OpenAI attr_accessor temperature: Float? - attr_accessor tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::AssistantUpdateParams::ToolResources? attr_reader tools: ::Array[OpenAI::Models::Beta::assistant_tool]? @@ -60,14 +60,12 @@ module OpenAI ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::AssistantUpdateParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], ?top_p: Float?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::assistant_update_params - type model = String | :"gpt-4.1" @@ -110,42 +108,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, (:"gpt-4.1" - | :"gpt-4.1-mini" - | :"gpt-4.1-nano" - | :"gpt-4.1-2025-04-14" - | :"gpt-4.1-mini-2025-04-14" - | :"gpt-4.1-nano-2025-04-14" - | :"o3-mini" - | :"o3-mini-2025-01-31" - | :o1 - | :"o1-2024-12-17" - | :"gpt-4o" - | :"gpt-4o-2024-11-20" - | :"gpt-4o-2024-08-06" - | :"gpt-4o-2024-05-13" - | :"gpt-4o-mini" - | :"gpt-4o-mini-2024-07-18" - | :"gpt-4.5-preview" - | :"gpt-4.5-preview-2025-02-27" - | :"gpt-4-turbo" - | :"gpt-4-turbo-2024-04-09" - | :"gpt-4-0125-preview" - | :"gpt-4-turbo-preview" - | :"gpt-4-1106-preview" - | :"gpt-4-vision-preview" - | :"gpt-4" - | :"gpt-4-0314" - | :"gpt-4-0613" - | :"gpt-4-32k" - | :"gpt-4-32k-0314" - | :"gpt-4-32k-0613" - | :"gpt-3.5-turbo" - | :"gpt-3.5-turbo-16k" - | :"gpt-3.5-turbo-0613" - | :"gpt-3.5-turbo-1106" - | :"gpt-3.5-turbo-0125" - | :"gpt-3.5-turbo-16k-0613")] + def self?.variants: -> ::Array[OpenAI::Models::Beta::AssistantUpdateParams::model] GPT_4_1: :"gpt-4.1" GPT_4_1_MINI: :"gpt-4.1-mini" @@ -187,30 +150,28 @@ module OpenAI type tool_resources = { - code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch + OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch + ) -> OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::AssistantUpdateParams::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -219,8 +180,6 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String] } @@ -231,8 +190,6 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::file_search end end end diff --git a/sig/openai/models/beta/code_interpreter_tool.rbs b/sig/openai/models/beta/code_interpreter_tool.rbs index f986bb74..84b353d0 100644 --- a/sig/openai/models/beta/code_interpreter_tool.rbs +++ b/sig/openai/models/beta/code_interpreter_tool.rbs @@ -7,8 +7,6 @@ module OpenAI attr_accessor type: :code_interpreter def initialize: (?type: :code_interpreter) -> void - - def to_hash: -> OpenAI::Models::Beta::code_interpreter_tool end end end diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index f3bec757..9e544ea9 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -4,29 +4,27 @@ module OpenAI type file_search_tool = { type: :file_search, - file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch + file_search: OpenAI::Beta::FileSearchTool::FileSearch } class FileSearchTool < OpenAI::Internal::Type::BaseModel attr_accessor type: :file_search - attr_reader file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch? + attr_reader file_search: OpenAI::Beta::FileSearchTool::FileSearch? def file_search=: ( - OpenAI::Models::Beta::FileSearchTool::FileSearch - ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch + OpenAI::Beta::FileSearchTool::FileSearch + ) -> OpenAI::Beta::FileSearchTool::FileSearch def initialize: ( - ?file_search: OpenAI::Models::Beta::FileSearchTool::FileSearch, + ?file_search: OpenAI::Beta::FileSearchTool::FileSearch, ?type: :file_search ) -> void - def to_hash: -> OpenAI::Models::Beta::file_search_tool - type file_search = { max_num_results: Integer, - ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions + ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions } class FileSearch < OpenAI::Internal::Type::BaseModel @@ -34,41 +32,37 @@ module OpenAI def max_num_results=: (Integer) -> Integer - attr_reader ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions? + attr_reader ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions? def ranking_options=: ( - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions - ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions + ) -> OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions def initialize: ( ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions + ?ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions ) -> void - def to_hash: -> OpenAI::Models::Beta::FileSearchTool::file_search - type ranking_options = { score_threshold: Float, - ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker } class RankingOptions < OpenAI::Internal::Type::BaseModel attr_accessor score_threshold: Float - attr_reader ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? + attr_reader ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? def ranker=: ( - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker - ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ) -> OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker def initialize: ( score_threshold: Float, - ?ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ?ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker ) -> void - def to_hash: -> OpenAI::Models::Beta::FileSearchTool::FileSearch::ranking_options - type ranker = :auto | :default_2024_08_21 module Ranker @@ -77,7 +71,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self?.values: -> ::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] end end end diff --git a/sig/openai/models/beta/function_tool.rbs b/sig/openai/models/beta/function_tool.rbs index cdb1900f..d93fda7c 100644 --- a/sig/openai/models/beta/function_tool.rbs +++ b/sig/openai/models/beta/function_tool.rbs @@ -2,19 +2,17 @@ module OpenAI module Models module Beta type function_tool = - { function: OpenAI::Models::FunctionDefinition, type: :function } + { function: OpenAI::FunctionDefinition, type: :function } class FunctionTool < OpenAI::Internal::Type::BaseModel - attr_accessor function: OpenAI::Models::FunctionDefinition + attr_accessor function: OpenAI::FunctionDefinition attr_accessor type: :function def initialize: ( - function: OpenAI::Models::FunctionDefinition, + function: OpenAI::FunctionDefinition, ?type: :function ) -> void - - def to_hash: -> OpenAI::Models::Beta::function_tool end end end diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index 80b9ef55..e13d605b 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -2,111 +2,101 @@ module OpenAI module Models module Beta type message_stream_event = - OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated - | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress - | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta - | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted - | OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete + OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated + | OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress + | OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta + | OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted + | OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete module MessageStreamEvent extend OpenAI::Internal::Type::Union type thread_message_created = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.created" } class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.created" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_created end type thread_message_in_progress = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.in_progress" } class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.in_progress" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_in_progress end type thread_message_delta = { - data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, + data: OpenAI::Beta::Threads::MessageDeltaEvent, event: :"thread.message.delta" } class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::MessageDeltaEvent + attr_accessor data: OpenAI::Beta::Threads::MessageDeltaEvent attr_accessor event: :"thread.message.delta" def initialize: ( - data: OpenAI::Models::Beta::Threads::MessageDeltaEvent, + data: OpenAI::Beta::Threads::MessageDeltaEvent, ?event: :"thread.message.delta" ) -> void - - def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_delta end type thread_message_completed = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.completed" } class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.completed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.completed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_completed end type thread_message_incomplete = { - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, event: :"thread.message.incomplete" } class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Message + attr_accessor data: OpenAI::Beta::Threads::Message attr_accessor event: :"thread.message.incomplete" def initialize: ( - data: OpenAI::Models::Beta::Threads::Message, + data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.incomplete" ) -> void - - def to_hash: -> OpenAI::Models::Beta::MessageStreamEvent::thread_message_incomplete end - def self?.variants: -> [OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete] + def self?.variants: -> ::Array[OpenAI::Models::Beta::message_stream_event] end end end diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 6cd64a35..3b292e7c 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -2,151 +2,137 @@ module OpenAI module Models module Beta type run_step_stream_event = - OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated - | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress - | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta - | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted - | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed - | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled - | OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated + | OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress + | OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta + | OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted + | OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed + | OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled + | OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired module RunStepStreamEvent extend OpenAI::Internal::Type::Union type thread_run_step_created = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.created" } class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.created" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_created end type thread_run_step_in_progress = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.in_progress" } class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.in_progress" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_in_progress end type thread_run_step_delta = { - data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, event: :"thread.run.step.delta" } class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent attr_accessor event: :"thread.run.step.delta" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent, + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, ?event: :"thread.run.step.delta" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_delta end type thread_run_step_completed = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.completed" } class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.completed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.completed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_completed end type thread_run_step_failed = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.failed" } class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.failed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.failed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_failed end type thread_run_step_cancelled = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.cancelled" } class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.cancelled" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.cancelled" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_cancelled end type thread_run_step_expired = { - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, event: :"thread.run.step.expired" } class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Runs::RunStep + attr_accessor data: OpenAI::Beta::Threads::Runs::RunStep attr_accessor event: :"thread.run.step.expired" def initialize: ( - data: OpenAI::Models::Beta::Threads::Runs::RunStep, + data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.expired" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStepStreamEvent::thread_run_step_expired end - def self?.variants: -> [OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired] + def self?.variants: -> ::Array[OpenAI::Models::Beta::run_step_stream_event] end end end diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index 3e2604b4..e1c4d276 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -2,211 +2,164 @@ module OpenAI module Models module Beta type run_stream_event = - OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled - | OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired + OpenAI::Beta::RunStreamEvent::ThreadRunCreated + | OpenAI::Beta::RunStreamEvent::ThreadRunQueued + | OpenAI::Beta::RunStreamEvent::ThreadRunInProgress + | OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction + | OpenAI::Beta::RunStreamEvent::ThreadRunCompleted + | OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete + | OpenAI::Beta::RunStreamEvent::ThreadRunFailed + | OpenAI::Beta::RunStreamEvent::ThreadRunCancelling + | OpenAI::Beta::RunStreamEvent::ThreadRunCancelled + | OpenAI::Beta::RunStreamEvent::ThreadRunExpired module RunStreamEvent extend OpenAI::Internal::Type::Union type thread_run_created = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.created" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.created" } class ThreadRunCreated < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.created" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_created end type thread_run_queued = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.queued" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.queued" } class ThreadRunQueued < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.queued" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.queued" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_queued end type thread_run_in_progress = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.in_progress" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.in_progress" } class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.in_progress" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_in_progress end type thread_run_requires_action = { - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, event: :"thread.run.requires_action" } class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.requires_action" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.requires_action" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_requires_action end type thread_run_completed = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.completed" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.completed" } class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.completed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.completed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_completed end type thread_run_incomplete = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.incomplete" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.incomplete" } class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.incomplete" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.incomplete" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_incomplete end type thread_run_failed = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.failed" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.failed" } class ThreadRunFailed < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.failed" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.failed" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_failed end type thread_run_cancelling = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelling" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.cancelling" } class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.cancelling" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelling" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelling end type thread_run_cancelled = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.cancelled" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.cancelled" } class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.cancelled" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelled" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_cancelled end type thread_run_expired = - { - data: OpenAI::Models::Beta::Threads::Run, - event: :"thread.run.expired" - } + { data: OpenAI::Beta::Threads::Run, event: :"thread.run.expired" } class ThreadRunExpired < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Threads::Run + attr_accessor data: OpenAI::Beta::Threads::Run attr_accessor event: :"thread.run.expired" def initialize: ( - data: OpenAI::Models::Beta::Threads::Run, + data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.expired" ) -> void - - def to_hash: -> OpenAI::Models::Beta::RunStreamEvent::thread_run_expired end - def self?.variants: -> [OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired] + def self?.variants: -> ::Array[OpenAI::Models::Beta::run_stream_event] end end end diff --git a/sig/openai/models/beta/thread.rbs b/sig/openai/models/beta/thread.rbs index e0418d02..c934342f 100644 --- a/sig/openai/models/beta/thread.rbs +++ b/sig/openai/models/beta/thread.rbs @@ -7,7 +7,7 @@ module OpenAI created_at: Integer, metadata: OpenAI::Models::metadata?, object: :thread, - tool_resources: OpenAI::Models::Beta::Thread::ToolResources? + tool_resources: OpenAI::Beta::Thread::ToolResources? } class Thread < OpenAI::Internal::Type::BaseModel @@ -19,44 +19,40 @@ module OpenAI attr_accessor object: :thread - attr_accessor tool_resources: OpenAI::Models::Beta::Thread::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::Thread::ToolResources? def initialize: ( id: String, created_at: Integer, metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::Thread::ToolResources?, + tool_resources: OpenAI::Beta::Thread::ToolResources?, ?object: :thread ) -> void - def to_hash: -> OpenAI::Models::Beta::thread - type tool_resources = { - code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Thread::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter + OpenAI::Beta::Thread::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::Thread::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::Thread::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::Thread::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::Thread::ToolResources::FileSearch + OpenAI::Beta::Thread::ToolResources::FileSearch + ) -> OpenAI::Beta::Thread::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::Thread::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::Thread::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::Thread::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -65,8 +61,6 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::Thread::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String] } @@ -77,8 +71,6 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::Thread::ToolResources::file_search end end end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 5d78ca13..ca752e94 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -12,12 +12,12 @@ module OpenAI parallel_tool_calls: bool, response_format: OpenAI::Models::Beta::assistant_response_format_option?, temperature: Float?, - thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy? + truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? } & OpenAI::Internal::Type::request_parameters @@ -45,21 +45,21 @@ module OpenAI attr_accessor temperature: Float? - attr_reader thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread? + attr_reader thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread? def thread=: ( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread - ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread + OpenAI::Beta::ThreadCreateAndRunParams::Thread + ) -> OpenAI::Beta::ThreadCreateAndRunParams::Thread attr_accessor tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option? - attr_accessor tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources? attr_accessor tools: ::Array[OpenAI::Models::Beta::assistant_tool]? attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy? + attr_accessor truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? def initialize: ( assistant_id: String, @@ -71,77 +71,71 @@ module OpenAI ?parallel_tool_calls: bool, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::thread_create_and_run_params - type model = String | OpenAI::Models::chat_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::model] end type thread = { - messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], + messages: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message], metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? } class Thread < OpenAI::Internal::Type::BaseModel - attr_reader messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message]? + attr_reader messages: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message]? def messages=: ( - ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] - ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] + ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] + ) -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? def initialize: ( - ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message], + ?messages: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message], ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::thread - type message = { - content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, - attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, + content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role, + attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, metadata: OpenAI::Models::metadata? } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content + attr_accessor content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content - attr_accessor role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role + attr_accessor role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role - attr_accessor attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]? + attr_accessor attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, - ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, + content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role, + ?attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::message - type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -149,7 +143,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -162,13 +156,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role] + def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -176,22 +170,20 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] - ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ) -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::attachment - type tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool + | OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch module Tool extend OpenAI::Internal::Type::Union @@ -202,41 +194,37 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::file_search end - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch] + def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] end end end type tool_resources = { - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + ) -> OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -245,14 +233,12 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String], - vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } class FileSearch < OpenAI::Internal::Type::BaseModel @@ -260,32 +246,30 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - attr_reader vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore]? + attr_reader vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore]? def vector_stores=: ( - ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] - ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + ) -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] def initialize: ( ?vector_store_ids: ::Array[String], - ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + ?vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::file_search - type vector_store = { - chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -294,16 +278,14 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::vector_store - type chunking_strategy = - OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto - | OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto + | OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -314,28 +296,24 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto end type static = { - static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, type: :static } class Static < OpenAI::Internal::Type::BaseModel - attr_accessor static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + attr_accessor static: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static attr_accessor type: :static def initialize: ( - static: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, ?type: :static ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static - type static = { chunk_overlap_tokens: Integer, @@ -351,12 +329,10 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static end end - def self?.variants: -> [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end @@ -365,30 +341,28 @@ module OpenAI type tool_resources = { - code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + ) -> OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -397,8 +371,6 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String] } @@ -409,29 +381,25 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::file_search end end type truncation_strategy = { - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, last_messages: Integer? } class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ + attr_accessor type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ attr_accessor last_messages: Integer? def initialize: ( - type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, ?last_messages: Integer? ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateAndRunParams::truncation_strategy - type type_ = :auto | :last_messages module Type @@ -440,7 +408,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index 08c6bdd2..d3593936 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -3,9 +3,9 @@ module OpenAI module Beta type thread_create_params = { - messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + messages: ::Array[OpenAI::Beta::ThreadCreateParams::Message], metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources? + tool_resources: OpenAI::Beta::ThreadCreateParams::ToolResources? } & OpenAI::Internal::Type::request_parameters @@ -13,51 +13,47 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_reader messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message]? + attr_reader messages: ::Array[OpenAI::Beta::ThreadCreateParams::Message]? def messages=: ( - ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message] - ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message] + ::Array[OpenAI::Beta::ThreadCreateParams::Message] + ) -> ::Array[OpenAI::Beta::ThreadCreateParams::Message] attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::ThreadCreateParams::ToolResources? def initialize: ( - ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + ?messages: ::Array[OpenAI::Beta::ThreadCreateParams::Message], ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadCreateParams::ToolResources?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::thread_create_params - type message = { - content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, - attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]?, + content: OpenAI::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Beta::ThreadCreateParams::Message::role, + attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, metadata: OpenAI::Models::metadata? } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Beta::ThreadCreateParams::Message::content + attr_accessor content: OpenAI::Beta::ThreadCreateParams::Message::content - attr_accessor role: OpenAI::Models::Beta::ThreadCreateParams::Message::role + attr_accessor role: OpenAI::Beta::ThreadCreateParams::Message::role - attr_accessor attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]? + attr_accessor attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, - ?attachments: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment]?, + content: OpenAI::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Beta::ThreadCreateParams::Message::role, + ?attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::message - type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -65,7 +61,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -78,13 +74,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::role] + def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -92,22 +88,20 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] - ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + ) -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::Message::attachment - type tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool + | OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch module Tool extend OpenAI::Internal::Type::Union @@ -118,41 +112,37 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::file_search end - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch] + def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] end end end type tool_resources = { - code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter + OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch + ) -> OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -161,14 +151,12 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String], - vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } class FileSearch < OpenAI::Internal::Type::BaseModel @@ -176,32 +164,30 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] - attr_reader vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]? + attr_reader vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore]? def vector_stores=: ( - ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] - ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + ) -> ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] def initialize: ( ?vector_store_ids: ::Array[String], - ?vector_stores: ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + ?vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::file_search - type vector_store = { - chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -210,16 +196,14 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::vector_store - type chunking_strategy = - OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto - | OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto + | OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -230,28 +214,24 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::auto end type static = { - static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, type: :static } class Static < OpenAI::Internal::Type::BaseModel - attr_accessor static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static + attr_accessor static: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static attr_accessor type: :static def initialize: ( - static: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + static: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, ?type: :static ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::static - type static = { chunk_overlap_tokens: Integer, @@ -267,12 +247,10 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::static end end - def self?.variants: -> [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index 485a121a..fa242461 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Beta::thread_delete_params end end end diff --git a/sig/openai/models/beta/thread_deleted.rbs b/sig/openai/models/beta/thread_deleted.rbs index 1734e860..9ca391a5 100644 --- a/sig/openai/models/beta/thread_deleted.rbs +++ b/sig/openai/models/beta/thread_deleted.rbs @@ -16,8 +16,6 @@ module OpenAI deleted: bool, ?object: :"thread.deleted" ) -> void - - def to_hash: -> OpenAI::Models::Beta::thread_deleted end end end diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index 90d1a84a..90b81360 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Beta::thread_retrieve_params end end end diff --git a/sig/openai/models/beta/thread_stream_event.rbs b/sig/openai/models/beta/thread_stream_event.rbs index 74c61286..01af4a19 100644 --- a/sig/openai/models/beta/thread_stream_event.rbs +++ b/sig/openai/models/beta/thread_stream_event.rbs @@ -2,14 +2,10 @@ module OpenAI module Models module Beta type thread_stream_event = - { - data: OpenAI::Models::Beta::Thread, - event: :"thread.created", - enabled: bool - } + { data: OpenAI::Beta::Thread, event: :"thread.created", enabled: bool } class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel - attr_accessor data: OpenAI::Models::Beta::Thread + attr_accessor data: OpenAI::Beta::Thread attr_accessor event: :"thread.created" @@ -18,12 +14,10 @@ module OpenAI def enabled=: (bool) -> bool def initialize: ( - data: OpenAI::Models::Beta::Thread, + data: OpenAI::Beta::Thread, ?enabled: bool, ?event: :"thread.created" ) -> void - - def to_hash: -> OpenAI::Models::Beta::thread_stream_event end end end diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index 81883e74..98253f4e 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -4,7 +4,7 @@ module OpenAI type thread_update_params = { metadata: OpenAI::Models::metadata?, - tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources? + tool_resources: OpenAI::Beta::ThreadUpdateParams::ToolResources? } & OpenAI::Internal::Type::request_parameters @@ -14,42 +14,38 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources? + attr_accessor tool_resources: OpenAI::Beta::ThreadUpdateParams::ToolResources? def initialize: ( ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadUpdateParams::ToolResources?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::thread_update_params - type tool_resources = { - code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch + code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch } class ToolResources < OpenAI::Internal::Type::BaseModel - attr_reader code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter - ) -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter + OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter + ) -> OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter - attr_reader file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch? + attr_reader file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch? def file_search=: ( - OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch - ) -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch + OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch + ) -> OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch def initialize: ( - ?code_interpreter: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, - ?file_search: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch + ?code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + ?file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch ) -> void - def to_hash: -> OpenAI::Models::Beta::ThreadUpdateParams::tool_resources - type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -58,8 +54,6 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::code_interpreter end type file_search = { vector_store_ids: ::Array[String] } @@ -70,8 +64,6 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void - - def to_hash: -> OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::file_search end end end diff --git a/sig/openai/models/beta/threads/annotation.rbs b/sig/openai/models/beta/threads/annotation.rbs index 72c5b0ec..872a6668 100644 --- a/sig/openai/models/beta/threads/annotation.rbs +++ b/sig/openai/models/beta/threads/annotation.rbs @@ -3,13 +3,13 @@ module OpenAI module Beta module Threads type annotation = - OpenAI::Models::Beta::Threads::FileCitationAnnotation - | OpenAI::Models::Beta::Threads::FilePathAnnotation + OpenAI::Beta::Threads::FileCitationAnnotation + | OpenAI::Beta::Threads::FilePathAnnotation module Annotation extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::annotation] end end end diff --git a/sig/openai/models/beta/threads/annotation_delta.rbs b/sig/openai/models/beta/threads/annotation_delta.rbs index 0840b591..4ea5f3a3 100644 --- a/sig/openai/models/beta/threads/annotation_delta.rbs +++ b/sig/openai/models/beta/threads/annotation_delta.rbs @@ -3,13 +3,13 @@ module OpenAI module Beta module Threads type annotation_delta = - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation - | OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation + OpenAI::Beta::Threads::FileCitationDeltaAnnotation + | OpenAI::Beta::Threads::FilePathDeltaAnnotation module AnnotationDelta extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::annotation_delta] end end end diff --git a/sig/openai/models/beta/threads/file_citation_annotation.rbs b/sig/openai/models/beta/threads/file_citation_annotation.rbs index b22f592c..424350aa 100644 --- a/sig/openai/models/beta/threads/file_citation_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_annotation.rbs @@ -5,7 +5,7 @@ module OpenAI type file_citation_annotation = { end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, + file_citation: OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation, start_index: Integer, text: String, type: :file_citation @@ -14,7 +14,7 @@ module OpenAI class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer - attr_accessor file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation + attr_accessor file_citation: OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation attr_accessor start_index: Integer @@ -24,22 +24,18 @@ module OpenAI def initialize: ( end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation, + file_citation: OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation, start_index: Integer, text: String, ?type: :file_citation ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::file_citation_annotation - type file_citation = { file_id: String } class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::FileCitationAnnotation::file_citation end end end diff --git a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs index eff94443..ba8e1399 100644 --- a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs @@ -7,7 +7,7 @@ module OpenAI index: Integer, type: :file_citation, end_index: Integer, - file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + file_citation: OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, start_index: Integer, text: String } @@ -21,11 +21,11 @@ module OpenAI def end_index=: (Integer) -> Integer - attr_reader file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation? + attr_reader file_citation: OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation? def file_citation=: ( - OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation - ) -> OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation + ) -> OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation attr_reader start_index: Integer? @@ -38,14 +38,12 @@ module OpenAI def initialize: ( index: Integer, ?end_index: Integer, - ?file_citation: OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + ?file_citation: OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, ?start_index: Integer, ?text: String, ?type: :file_citation ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::file_citation_delta_annotation - type file_citation = { file_id: String, quote: String } class FileCitation < OpenAI::Internal::Type::BaseModel @@ -58,8 +56,6 @@ module OpenAI def quote=: (String) -> String def initialize: (?file_id: String, ?quote: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::file_citation end end end diff --git a/sig/openai/models/beta/threads/file_path_annotation.rbs b/sig/openai/models/beta/threads/file_path_annotation.rbs index 4234a0fd..95a8cd5d 100644 --- a/sig/openai/models/beta/threads/file_path_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_annotation.rbs @@ -5,7 +5,7 @@ module OpenAI type file_path_annotation = { end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, + file_path: OpenAI::Beta::Threads::FilePathAnnotation::FilePath, start_index: Integer, text: String, type: :file_path @@ -14,7 +14,7 @@ module OpenAI class FilePathAnnotation < OpenAI::Internal::Type::BaseModel attr_accessor end_index: Integer - attr_accessor file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath + attr_accessor file_path: OpenAI::Beta::Threads::FilePathAnnotation::FilePath attr_accessor start_index: Integer @@ -24,22 +24,18 @@ module OpenAI def initialize: ( end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath, + file_path: OpenAI::Beta::Threads::FilePathAnnotation::FilePath, start_index: Integer, text: String, ?type: :file_path ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::file_path_annotation - type file_path = { file_id: String } class FilePath < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::FilePathAnnotation::file_path end end end diff --git a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs index 671f758d..c75d696a 100644 --- a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs @@ -7,7 +7,7 @@ module OpenAI index: Integer, type: :file_path, end_index: Integer, - file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, + file_path: OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, start_index: Integer, text: String } @@ -21,11 +21,11 @@ module OpenAI def end_index=: (Integer) -> Integer - attr_reader file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath? + attr_reader file_path: OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath? def file_path=: ( - OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath - ) -> OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath + OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath + ) -> OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath attr_reader start_index: Integer? @@ -38,14 +38,12 @@ module OpenAI def initialize: ( index: Integer, ?end_index: Integer, - ?file_path: OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, + ?file_path: OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, ?start_index: Integer, ?text: String, ?type: :file_path ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::file_path_delta_annotation - type file_path = { file_id: String } class FilePath < OpenAI::Internal::Type::BaseModel @@ -54,8 +52,6 @@ module OpenAI def file_id=: (String) -> String def initialize: (?file_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::file_path end end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 8c594ae4..38e77c68 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -3,27 +3,22 @@ module OpenAI module Beta module Threads type image_file = - { - file_id: String, - detail: OpenAI::Models::Beta::Threads::ImageFile::detail - } + { file_id: String, detail: OpenAI::Beta::Threads::ImageFile::detail } class ImageFile < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String - attr_reader detail: OpenAI::Models::Beta::Threads::ImageFile::detail? + attr_reader detail: OpenAI::Beta::Threads::ImageFile::detail? def detail=: ( - OpenAI::Models::Beta::Threads::ImageFile::detail - ) -> OpenAI::Models::Beta::Threads::ImageFile::detail + OpenAI::Beta::Threads::ImageFile::detail + ) -> OpenAI::Beta::Threads::ImageFile::detail def initialize: ( file_id: String, - ?detail: OpenAI::Models::Beta::Threads::ImageFile::detail + ?detail: OpenAI::Beta::Threads::ImageFile::detail ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::image_file - type detail = :auto | :low | :high module Detail @@ -33,7 +28,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFile::detail] + def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageFile::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_content_block.rbs b/sig/openai/models/beta/threads/image_file_content_block.rbs index 85cbd424..103b48ac 100644 --- a/sig/openai/models/beta/threads/image_file_content_block.rbs +++ b/sig/openai/models/beta/threads/image_file_content_block.rbs @@ -3,22 +3,17 @@ module OpenAI module Beta module Threads type image_file_content_block = - { - image_file: OpenAI::Models::Beta::Threads::ImageFile, - type: :image_file - } + { image_file: OpenAI::Beta::Threads::ImageFile, type: :image_file } class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel - attr_accessor image_file: OpenAI::Models::Beta::Threads::ImageFile + attr_accessor image_file: OpenAI::Beta::Threads::ImageFile attr_accessor type: :image_file def initialize: ( - image_file: OpenAI::Models::Beta::Threads::ImageFile, + image_file: OpenAI::Beta::Threads::ImageFile, ?type: :image_file ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::image_file_content_block end end end diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index 1360958e..5ed4a435 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -4,28 +4,26 @@ module OpenAI module Threads type image_file_delta = { - detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, + detail: OpenAI::Beta::Threads::ImageFileDelta::detail, file_id: String } class ImageFileDelta < OpenAI::Internal::Type::BaseModel - attr_reader detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail? + attr_reader detail: OpenAI::Beta::Threads::ImageFileDelta::detail? def detail=: ( - OpenAI::Models::Beta::Threads::ImageFileDelta::detail - ) -> OpenAI::Models::Beta::Threads::ImageFileDelta::detail + OpenAI::Beta::Threads::ImageFileDelta::detail + ) -> OpenAI::Beta::Threads::ImageFileDelta::detail attr_reader file_id: String? def file_id=: (String) -> String def initialize: ( - ?detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, + ?detail: OpenAI::Beta::Threads::ImageFileDelta::detail, ?file_id: String ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::image_file_delta - type detail = :auto | :low | :high module Detail @@ -35,7 +33,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::detail] + def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageFileDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_delta_block.rbs b/sig/openai/models/beta/threads/image_file_delta_block.rbs index cdb0e087..7aca7085 100644 --- a/sig/openai/models/beta/threads/image_file_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_file_delta_block.rbs @@ -6,7 +6,7 @@ module OpenAI { index: Integer, type: :image_file, - image_file: OpenAI::Models::Beta::Threads::ImageFileDelta + image_file: OpenAI::Beta::Threads::ImageFileDelta } class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel @@ -14,19 +14,17 @@ module OpenAI attr_accessor type: :image_file - attr_reader image_file: OpenAI::Models::Beta::Threads::ImageFileDelta? + attr_reader image_file: OpenAI::Beta::Threads::ImageFileDelta? def image_file=: ( - OpenAI::Models::Beta::Threads::ImageFileDelta - ) -> OpenAI::Models::Beta::Threads::ImageFileDelta + OpenAI::Beta::Threads::ImageFileDelta + ) -> OpenAI::Beta::Threads::ImageFileDelta def initialize: ( index: Integer, - ?image_file: OpenAI::Models::Beta::Threads::ImageFileDelta, + ?image_file: OpenAI::Beta::Threads::ImageFileDelta, ?type: :image_file ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::image_file_delta_block end end end diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 90adafe5..8808afc1 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -3,27 +3,22 @@ module OpenAI module Beta module Threads type image_url = - { - url: String, - detail: OpenAI::Models::Beta::Threads::ImageURL::detail - } + { url: String, detail: OpenAI::Beta::Threads::ImageURL::detail } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String - attr_reader detail: OpenAI::Models::Beta::Threads::ImageURL::detail? + attr_reader detail: OpenAI::Beta::Threads::ImageURL::detail? def detail=: ( - OpenAI::Models::Beta::Threads::ImageURL::detail - ) -> OpenAI::Models::Beta::Threads::ImageURL::detail + OpenAI::Beta::Threads::ImageURL::detail + ) -> OpenAI::Beta::Threads::ImageURL::detail def initialize: ( url: String, - ?detail: OpenAI::Models::Beta::Threads::ImageURL::detail + ?detail: OpenAI::Beta::Threads::ImageURL::detail ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::image_url - type detail = :auto | :low | :high module Detail @@ -33,7 +28,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageURL::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_content_block.rbs b/sig/openai/models/beta/threads/image_url_content_block.rbs index a8e5425f..445e5239 100644 --- a/sig/openai/models/beta/threads/image_url_content_block.rbs +++ b/sig/openai/models/beta/threads/image_url_content_block.rbs @@ -3,22 +3,17 @@ module OpenAI module Beta module Threads type image_url_content_block = - { - image_url: OpenAI::Models::Beta::Threads::ImageURL, - type: :image_url - } + { image_url: OpenAI::Beta::Threads::ImageURL, type: :image_url } class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel - attr_accessor image_url: OpenAI::Models::Beta::Threads::ImageURL + attr_accessor image_url: OpenAI::Beta::Threads::ImageURL attr_accessor type: :image_url def initialize: ( - image_url: OpenAI::Models::Beta::Threads::ImageURL, + image_url: OpenAI::Beta::Threads::ImageURL, ?type: :image_url ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::image_url_content_block end end end diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index b85ba1e9..54d6425a 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -3,29 +3,24 @@ module OpenAI module Beta module Threads type image_url_delta = - { - detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, - url: String - } + { detail: OpenAI::Beta::Threads::ImageURLDelta::detail, url: String } class ImageURLDelta < OpenAI::Internal::Type::BaseModel - attr_reader detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail? + attr_reader detail: OpenAI::Beta::Threads::ImageURLDelta::detail? def detail=: ( - OpenAI::Models::Beta::Threads::ImageURLDelta::detail - ) -> OpenAI::Models::Beta::Threads::ImageURLDelta::detail + OpenAI::Beta::Threads::ImageURLDelta::detail + ) -> OpenAI::Beta::Threads::ImageURLDelta::detail attr_reader url: String? def url=: (String) -> String def initialize: ( - ?detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + ?detail: OpenAI::Beta::Threads::ImageURLDelta::detail, ?url: String ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::image_url_delta - type detail = :auto | :low | :high module Detail @@ -35,7 +30,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::detail] + def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageURLDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_delta_block.rbs b/sig/openai/models/beta/threads/image_url_delta_block.rbs index 4269c159..d3375983 100644 --- a/sig/openai/models/beta/threads/image_url_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_url_delta_block.rbs @@ -6,7 +6,7 @@ module OpenAI { index: Integer, type: :image_url, - image_url: OpenAI::Models::Beta::Threads::ImageURLDelta + image_url: OpenAI::Beta::Threads::ImageURLDelta } class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel @@ -14,19 +14,17 @@ module OpenAI attr_accessor type: :image_url - attr_reader image_url: OpenAI::Models::Beta::Threads::ImageURLDelta? + attr_reader image_url: OpenAI::Beta::Threads::ImageURLDelta? def image_url=: ( - OpenAI::Models::Beta::Threads::ImageURLDelta - ) -> OpenAI::Models::Beta::Threads::ImageURLDelta + OpenAI::Beta::Threads::ImageURLDelta + ) -> OpenAI::Beta::Threads::ImageURLDelta def initialize: ( index: Integer, - ?image_url: OpenAI::Models::Beta::Threads::ImageURLDelta, + ?image_url: OpenAI::Beta::Threads::ImageURLDelta, ?type: :image_url ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::image_url_delta_block end end end diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 02263056..10c9bca6 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -6,17 +6,17 @@ module OpenAI { id: String, assistant_id: String?, - attachments: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment]?, + attachments: ::Array[OpenAI::Beta::Threads::Message::Attachment]?, completed_at: Integer?, content: ::Array[OpenAI::Models::Beta::Threads::message_content], created_at: Integer, incomplete_at: Integer?, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails?, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, metadata: OpenAI::Models::metadata?, object: :"thread.message", - role: OpenAI::Models::Beta::Threads::Message::role, + role: OpenAI::Beta::Threads::Message::role, run_id: String?, - status: OpenAI::Models::Beta::Threads::Message::status, + status: OpenAI::Beta::Threads::Message::status, thread_id: String } @@ -25,7 +25,7 @@ module OpenAI attr_accessor assistant_id: String? - attr_accessor attachments: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment]? + attr_accessor attachments: ::Array[OpenAI::Beta::Threads::Message::Attachment]? attr_accessor completed_at: Integer? @@ -35,43 +35,41 @@ module OpenAI attr_accessor incomplete_at: Integer? - attr_accessor incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails? + attr_accessor incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails? attr_accessor metadata: OpenAI::Models::metadata? attr_accessor object: :"thread.message" - attr_accessor role: OpenAI::Models::Beta::Threads::Message::role + attr_accessor role: OpenAI::Beta::Threads::Message::role attr_accessor run_id: String? - attr_accessor status: OpenAI::Models::Beta::Threads::Message::status + attr_accessor status: OpenAI::Beta::Threads::Message::status attr_accessor thread_id: String def initialize: ( id: String, assistant_id: String?, - attachments: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment]?, + attachments: ::Array[OpenAI::Beta::Threads::Message::Attachment]?, completed_at: Integer?, content: ::Array[OpenAI::Models::Beta::Threads::message_content], created_at: Integer, incomplete_at: Integer?, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails?, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, metadata: OpenAI::Models::metadata?, - role: OpenAI::Models::Beta::Threads::Message::role, + role: OpenAI::Beta::Threads::Message::role, run_id: String?, - status: OpenAI::Models::Beta::Threads::Message::status, + status: OpenAI::Beta::Threads::Message::status, thread_id: String, ?object: :"thread.message" ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::message - type attachment = { file_id: String, - tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -79,22 +77,20 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] - ) -> ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + ) -> ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Message::attachment - type tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + OpenAI::Beta::CodeInterpreterTool + | OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly module Tool extend OpenAI::Internal::Type::Union @@ -106,28 +102,24 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Message::Attachment::Tool::assistant_tools_file_search_type_only end - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] end end type incomplete_details = { - reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason + reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_accessor reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason + attr_accessor reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason def initialize: ( - reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason + reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Message::incomplete_details - type reason = :content_filter | :max_tokens @@ -144,7 +136,7 @@ module OpenAI RUN_EXPIRED: :run_expired RUN_FAILED: :run_failed - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::IncompleteDetails::reason] end end @@ -156,7 +148,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::role] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::role] end type status = :in_progress | :incomplete | :completed @@ -168,7 +160,7 @@ module OpenAI INCOMPLETE: :incomplete COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::status] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::status] end end end diff --git a/sig/openai/models/beta/threads/message_content.rbs b/sig/openai/models/beta/threads/message_content.rbs index e3bc2805..35e6ff1f 100644 --- a/sig/openai/models/beta/threads/message_content.rbs +++ b/sig/openai/models/beta/threads/message_content.rbs @@ -3,15 +3,15 @@ module OpenAI module Beta module Threads type message_content = - OpenAI::Models::Beta::Threads::ImageFileContentBlock - | OpenAI::Models::Beta::Threads::ImageURLContentBlock - | OpenAI::Models::Beta::Threads::TextContentBlock - | OpenAI::Models::Beta::Threads::RefusalContentBlock + OpenAI::Beta::Threads::ImageFileContentBlock + | OpenAI::Beta::Threads::ImageURLContentBlock + | OpenAI::Beta::Threads::TextContentBlock + | OpenAI::Beta::Threads::RefusalContentBlock module MessageContent extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::message_content] end end end diff --git a/sig/openai/models/beta/threads/message_content_delta.rbs b/sig/openai/models/beta/threads/message_content_delta.rbs index f06072ee..6c1edb1d 100644 --- a/sig/openai/models/beta/threads/message_content_delta.rbs +++ b/sig/openai/models/beta/threads/message_content_delta.rbs @@ -3,15 +3,15 @@ module OpenAI module Beta module Threads type message_content_delta = - OpenAI::Models::Beta::Threads::ImageFileDeltaBlock - | OpenAI::Models::Beta::Threads::TextDeltaBlock - | OpenAI::Models::Beta::Threads::RefusalDeltaBlock - | OpenAI::Models::Beta::Threads::ImageURLDeltaBlock + OpenAI::Beta::Threads::ImageFileDeltaBlock + | OpenAI::Beta::Threads::TextDeltaBlock + | OpenAI::Beta::Threads::RefusalDeltaBlock + | OpenAI::Beta::Threads::ImageURLDeltaBlock module MessageContentDelta extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::message_content_delta] end end end diff --git a/sig/openai/models/beta/threads/message_content_part_param.rbs b/sig/openai/models/beta/threads/message_content_part_param.rbs index 1e8a7d91..802d8b17 100644 --- a/sig/openai/models/beta/threads/message_content_part_param.rbs +++ b/sig/openai/models/beta/threads/message_content_part_param.rbs @@ -3,14 +3,14 @@ module OpenAI module Beta module Threads type message_content_part_param = - OpenAI::Models::Beta::Threads::ImageFileContentBlock - | OpenAI::Models::Beta::Threads::ImageURLContentBlock - | OpenAI::Models::Beta::Threads::TextContentBlockParam + OpenAI::Beta::Threads::ImageFileContentBlock + | OpenAI::Beta::Threads::ImageURLContentBlock + | OpenAI::Beta::Threads::TextContentBlockParam module MessageContentPartParam extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] end end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index fd5c2c49..48d32702 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -6,7 +6,7 @@ module OpenAI { content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + attachments: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment]?, metadata: OpenAI::Models::metadata? } & OpenAI::Internal::Type::request_parameters @@ -19,20 +19,18 @@ module OpenAI attr_accessor role: OpenAI::Models::Beta::Threads::MessageCreateParams::role - attr_accessor attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]? + attr_accessor attachments: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + ?attachments: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment]?, ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::message_create_params - type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -40,7 +38,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -59,7 +57,7 @@ module OpenAI type attachment = { file_id: String, - tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -67,22 +65,20 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] - ) -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + ) -> ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + ?tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::MessageCreateParams::attachment - type tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool + | OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch module Tool extend OpenAI::Internal::Type::Union @@ -93,11 +89,9 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::file_search end - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] end end end diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index 9abbe1c5..9edbd8b5 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -15,8 +15,6 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::message_delete_params end end end diff --git a/sig/openai/models/beta/threads/message_deleted.rbs b/sig/openai/models/beta/threads/message_deleted.rbs index d1356267..d3b0da8c 100644 --- a/sig/openai/models/beta/threads/message_deleted.rbs +++ b/sig/openai/models/beta/threads/message_deleted.rbs @@ -17,8 +17,6 @@ module OpenAI deleted: bool, ?object: :"thread.message.deleted" ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::message_deleted end end end diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index e74b69e1..d953aeb6 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -5,7 +5,7 @@ module OpenAI type message_delta = { content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - role: OpenAI::Models::Beta::Threads::MessageDelta::role + role: OpenAI::Beta::Threads::MessageDelta::role } class MessageDelta < OpenAI::Internal::Type::BaseModel @@ -15,19 +15,17 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::message_content_delta] ) -> ::Array[OpenAI::Models::Beta::Threads::message_content_delta] - attr_reader role: OpenAI::Models::Beta::Threads::MessageDelta::role? + attr_reader role: OpenAI::Beta::Threads::MessageDelta::role? def role=: ( - OpenAI::Models::Beta::Threads::MessageDelta::role - ) -> OpenAI::Models::Beta::Threads::MessageDelta::role + OpenAI::Beta::Threads::MessageDelta::role + ) -> OpenAI::Beta::Threads::MessageDelta::role def initialize: ( ?content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - ?role: OpenAI::Models::Beta::Threads::MessageDelta::role + ?role: OpenAI::Beta::Threads::MessageDelta::role ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::message_delta - type role = :user | :assistant module Role @@ -36,7 +34,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageDelta::role] + def self?.values: -> ::Array[OpenAI::Beta::Threads::MessageDelta::role] end end end diff --git a/sig/openai/models/beta/threads/message_delta_event.rbs b/sig/openai/models/beta/threads/message_delta_event.rbs index 553d6fef..d84d3446 100644 --- a/sig/openai/models/beta/threads/message_delta_event.rbs +++ b/sig/openai/models/beta/threads/message_delta_event.rbs @@ -5,24 +5,22 @@ module OpenAI type message_delta_event = { id: String, - delta: OpenAI::Models::Beta::Threads::MessageDelta, + delta: OpenAI::Beta::Threads::MessageDelta, object: :"thread.message.delta" } class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor delta: OpenAI::Models::Beta::Threads::MessageDelta + attr_accessor delta: OpenAI::Beta::Threads::MessageDelta attr_accessor object: :"thread.message.delta" def initialize: ( id: String, - delta: OpenAI::Models::Beta::Threads::MessageDelta, + delta: OpenAI::Beta::Threads::MessageDelta, ?object: :"thread.message.delta" ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::message_delta_event end end end diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index 2cdda317..eede9b56 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -47,8 +47,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::message_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index c6ff2a71..847e3c4c 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -15,8 +15,6 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::message_retrieve_params end end end diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index 4f14f212..38806f27 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -19,8 +19,6 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::message_update_params end end end diff --git a/sig/openai/models/beta/threads/refusal_content_block.rbs b/sig/openai/models/beta/threads/refusal_content_block.rbs index c91e1603..98c6625d 100644 --- a/sig/openai/models/beta/threads/refusal_content_block.rbs +++ b/sig/openai/models/beta/threads/refusal_content_block.rbs @@ -10,8 +10,6 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::refusal_content_block end end end diff --git a/sig/openai/models/beta/threads/refusal_delta_block.rbs b/sig/openai/models/beta/threads/refusal_delta_block.rbs index 6fc97486..c6f86524 100644 --- a/sig/openai/models/beta/threads/refusal_delta_block.rbs +++ b/sig/openai/models/beta/threads/refusal_delta_block.rbs @@ -19,8 +19,6 @@ module OpenAI ?refusal: String, ?type: :refusal ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::refusal_delta_block end end end diff --git a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs index b2cd22a2..5fcd15c2 100644 --- a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs +++ b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs @@ -5,25 +5,23 @@ module OpenAI type required_action_function_tool_call = { id: String, - function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, + function: OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function, type: :function } class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function + attr_accessor function: OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function attr_accessor type: :function def initialize: ( id: String, - function: OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function, + function: OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function, ?type: :function ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::required_action_function_tool_call - type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -32,8 +30,6 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::function end end end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index 6fb8ca5b..0d4ed055 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -11,24 +11,24 @@ module OpenAI created_at: Integer, expires_at: Integer?, failed_at: Integer?, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails?, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails?, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError?, + last_error: OpenAI::Beta::Threads::Run::LastError?, max_completion_tokens: Integer?, max_prompt_tokens: Integer?, metadata: OpenAI::Models::metadata?, model: String, object: :"thread.run", parallel_tool_calls: bool, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction?, + required_action: OpenAI::Beta::Threads::Run::RequiredAction?, response_format: OpenAI::Models::Beta::assistant_response_format_option?, started_at: Integer?, status: OpenAI::Models::Beta::Threads::run_status, thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy?, - usage: OpenAI::Models::Beta::Threads::Run::Usage?, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, + usage: OpenAI::Beta::Threads::Run::Usage?, temperature: Float?, top_p: Float? } @@ -48,11 +48,11 @@ module OpenAI attr_accessor failed_at: Integer? - attr_accessor incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails? + attr_accessor incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails? attr_accessor instructions: String - attr_accessor last_error: OpenAI::Models::Beta::Threads::Run::LastError? + attr_accessor last_error: OpenAI::Beta::Threads::Run::LastError? attr_accessor max_completion_tokens: Integer? @@ -66,7 +66,7 @@ module OpenAI attr_accessor parallel_tool_calls: bool - attr_accessor required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction? + attr_accessor required_action: OpenAI::Beta::Threads::Run::RequiredAction? attr_accessor response_format: OpenAI::Models::Beta::assistant_response_format_option? @@ -80,9 +80,9 @@ module OpenAI attr_accessor tools: ::Array[OpenAI::Models::Beta::assistant_tool] - attr_accessor truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy? + attr_accessor truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy? - attr_accessor usage: OpenAI::Models::Beta::Threads::Run::Usage? + attr_accessor usage: OpenAI::Beta::Threads::Run::Usage? attr_accessor temperature: Float? @@ -96,48 +96,42 @@ module OpenAI created_at: Integer, expires_at: Integer?, failed_at: Integer?, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails?, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails?, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError?, + last_error: OpenAI::Beta::Threads::Run::LastError?, max_completion_tokens: Integer?, max_prompt_tokens: Integer?, metadata: OpenAI::Models::metadata?, model: String, parallel_tool_calls: bool, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction?, + required_action: OpenAI::Beta::Threads::Run::RequiredAction?, response_format: OpenAI::Models::Beta::assistant_response_format_option?, started_at: Integer?, status: OpenAI::Models::Beta::Threads::run_status, thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy?, - usage: OpenAI::Models::Beta::Threads::Run::Usage?, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, + usage: OpenAI::Beta::Threads::Run::Usage?, ?temperature: Float?, ?top_p: Float?, ?object: :"thread.run" ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::run - type incomplete_details = - { - reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason - } + { reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_reader reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason? + attr_reader reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason? def reason=: ( - OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason - ) -> OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + OpenAI::Beta::Threads::Run::IncompleteDetails::reason + ) -> OpenAI::Beta::Threads::Run::IncompleteDetails::reason def initialize: ( - ?reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + ?reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Run::incomplete_details - type reason = :max_completion_tokens | :max_prompt_tokens module Reason @@ -146,28 +140,26 @@ module OpenAI MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::IncompleteDetails::reason] end end type last_error = { - code: OpenAI::Models::Beta::Threads::Run::LastError::code, + code: OpenAI::Beta::Threads::Run::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Models::Beta::Threads::Run::LastError::code + attr_accessor code: OpenAI::Beta::Threads::Run::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Models::Beta::Threads::Run::LastError::code, + code: OpenAI::Beta::Threads::Run::LastError::code, message: String ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Run::last_error - type code = :server_error | :rate_limit_exceeded | :invalid_prompt module Code @@ -177,62 +169,56 @@ module OpenAI RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::LastError::code] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::LastError::code] end end type required_action = { - submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + submit_tool_outputs: OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, type: :submit_tool_outputs } class RequiredAction < OpenAI::Internal::Type::BaseModel - attr_accessor submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs + attr_accessor submit_tool_outputs: OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs attr_accessor type: :submit_tool_outputs def initialize: ( - submit_tool_outputs: OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + submit_tool_outputs: OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, ?type: :submit_tool_outputs ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Run::required_action - type submit_tool_outputs = { - tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] + tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] } class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel - attr_accessor tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] + attr_accessor tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] def initialize: ( - tool_calls: ::Array[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] + tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Run::RequiredAction::submit_tool_outputs end end type truncation_strategy = { - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, last_messages: Integer? } class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_ + attr_accessor type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_ attr_accessor last_messages: Integer? def initialize: ( - type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, ?last_messages: Integer? ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Run::truncation_strategy - type type_ = :auto | :last_messages module Type @@ -241,7 +227,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::TruncationStrategy::type_] end end @@ -264,8 +250,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Run::usage end end end diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 4c384a32..03525bb2 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -15,8 +15,6 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::run_cancel_params end end end diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index da318adb..ab0e1ca8 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -7,7 +7,7 @@ module OpenAI assistant_id: String, include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], additional_instructions: String?, - additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]?, instructions: String?, max_completion_tokens: Integer?, max_prompt_tokens: Integer?, @@ -20,7 +20,7 @@ module OpenAI tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy? + truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? } & OpenAI::Internal::Type::request_parameters @@ -38,7 +38,7 @@ module OpenAI attr_accessor additional_instructions: String? - attr_accessor additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]? + attr_accessor additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]? attr_accessor instructions: String? @@ -66,13 +66,13 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy? + attr_accessor truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? def initialize: ( assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?additional_instructions: String?, - ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]?, ?instructions: String?, ?max_completion_tokens: Integer?, ?max_prompt_tokens: Integer?, @@ -85,38 +85,34 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::run_create_params - type additional_message = { - content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, - attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, + content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role, + attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, metadata: OpenAI::Models::metadata? } class AdditionalMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content + attr_accessor content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content - attr_accessor role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role + attr_accessor role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role - attr_accessor attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]? + attr_accessor attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, - ?attachments: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, + content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role, + ?attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::additional_message - type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -124,7 +120,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Beta::Threads::message_content_part_param]] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -137,13 +133,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role] + def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -151,22 +147,20 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] - ) -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ) -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ?tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::attachment - type tool = - OpenAI::Models::Beta::CodeInterpreterTool - | OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + OpenAI::Beta::CodeInterpreterTool + | OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch module Tool extend OpenAI::Internal::Type::Union @@ -177,11 +171,9 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::file_search end - def self?.variants: -> [OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] end end end @@ -191,27 +183,25 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::model] end type truncation_strategy = { - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, last_messages: Integer? } class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_ + attr_accessor type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_ attr_accessor last_messages: Integer? def initialize: ( - type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, ?last_messages: Integer? ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::RunCreateParams::truncation_strategy - type type_ = :auto | :last_messages module Type @@ -220,7 +210,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index 42825950..de898e20 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -41,8 +41,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::run_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index ddbeaf2b..c9efe99b 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -15,8 +15,6 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::run_retrieve_params end end end diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index 294478bc..4e87b641 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -5,7 +5,7 @@ module OpenAI type run_submit_tool_outputs_params = { thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] + tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } & OpenAI::Internal::Type::request_parameters @@ -15,16 +15,14 @@ module OpenAI attr_accessor thread_id: String - attr_accessor tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] + attr_accessor tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] def initialize: ( thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::run_submit_tool_outputs_params - type tool_output = { output: String, tool_call_id: String } class ToolOutput < OpenAI::Internal::Type::BaseModel @@ -37,8 +35,6 @@ module OpenAI def tool_call_id=: (String) -> String def initialize: (?output: String, ?tool_call_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::tool_output end end end diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index 8ea338f3..28ff20c9 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -19,8 +19,6 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::run_update_params end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs index e002868b..35175f5b 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs @@ -20,8 +20,6 @@ module OpenAI ?logs: String, ?type: :logs ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_logs end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs index e05b9326..a90448ca 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs @@ -7,7 +7,7 @@ module OpenAI { index: Integer, type: :image, - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + image: OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel @@ -15,20 +15,18 @@ module OpenAI attr_accessor type: :image - attr_reader image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image? + attr_reader image: OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image? def image=: ( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image - ) -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + ) -> OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image def initialize: ( index: Integer, - ?image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + ?image: OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, ?type: :image ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_output_image - type image = { file_id: String } class Image < OpenAI::Internal::Type::BaseModel @@ -37,8 +35,6 @@ module OpenAI def file_id=: (String) -> String def initialize: (?file_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::image end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index 6ded6a40..a5051e0b 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -6,46 +6,42 @@ module OpenAI type code_interpreter_tool_call = { id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, type: :code_interpreter } class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter + attr_accessor code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter attr_accessor type: :code_interpreter def initialize: ( id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, ?type: :code_interpreter ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call - type code_interpreter = { input: String, - outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_accessor input: String - attr_accessor outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + attr_accessor outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] def initialize: ( input: String, - outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::code_interpreter - type output = - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs - | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs + | OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image module Output extend OpenAI::Internal::Type::Union @@ -58,40 +54,34 @@ module OpenAI attr_accessor type: :logs def initialize: (logs: String, ?type: :logs) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::logs end type image = { - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + image: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, type: :image } class Image < OpenAI::Internal::Type::BaseModel - attr_accessor image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image + attr_accessor image: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image attr_accessor type: :image def initialize: ( - image: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + image: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, ?type: :image ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::image - type image = { file_id: String } class Image < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::image end end - def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index 632bed4c..e15f351d 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -8,7 +8,7 @@ module OpenAI index: Integer, type: :code_interpreter, id: String, - code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel @@ -20,25 +20,23 @@ module OpenAI def id=: (String) -> String - attr_reader code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter? + attr_reader code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter? def code_interpreter=: ( - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter - ) -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + ) -> OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter def initialize: ( index: Integer, ?id: String, - ?code_interpreter: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + ?code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, ?type: :code_interpreter ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::code_interpreter_tool_call_delta - type code_interpreter = { input: String, - outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -46,27 +44,25 @@ module OpenAI def input=: (String) -> String - attr_reader outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output]? + attr_reader outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output]? def outputs=: ( - ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] - ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ) -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] def initialize: ( ?input: String, - ?outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ?outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::code_interpreter - type output = - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs - | OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs + | OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage module Output extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index c0437f96..5c49e956 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -6,69 +6,63 @@ module OpenAI type file_search_tool_call = { id: String, - file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + file_search: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch, type: :file_search } class FileSearchToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch + attr_accessor file_search: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch attr_accessor type: :file_search def initialize: ( id: String, - file_search: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + file_search: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch, ?type: :file_search ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::file_search_tool_call - type file_search = { - ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } class FileSearch < OpenAI::Internal::Type::BaseModel - attr_reader ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions? + attr_reader ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions? def ranking_options=: ( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions - ) -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions + ) -> OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions - attr_reader results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result]? + attr_reader results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result]? def results=: ( - ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] - ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + ) -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] def initialize: ( - ?ranking_options: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, - ?results: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + ?ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + ?results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::file_search - type ranking_options = { - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_accessor ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker + attr_accessor ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker attr_accessor score_threshold: Float def initialize: ( - ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, score_threshold: Float ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::ranking_options - type ranker = :auto | :default_2024_08_21 module Ranker @@ -77,7 +71,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] end end @@ -86,7 +80,7 @@ module OpenAI file_id: String, file_name: String, score: Float, - content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } class Result < OpenAI::Internal::Type::BaseModel @@ -96,25 +90,23 @@ module OpenAI attr_accessor score: Float - attr_reader content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]? + attr_reader content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content]? def content=: ( - ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] - ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + ) -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] def initialize: ( file_id: String, file_name: String, score: Float, - ?content: ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + ?content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::result - type content = { text: String, - type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ } class Content < OpenAI::Internal::Type::BaseModel @@ -122,19 +114,17 @@ module OpenAI def text=: (String) -> String - attr_reader type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_? + attr_reader type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_? def type=: ( - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ - ) -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ) -> OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ def initialize: ( ?text: String, - ?type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ?type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::content - type type_ = :text module Type @@ -142,7 +132,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs index 563a191d..15b045e3 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs @@ -23,8 +23,6 @@ module OpenAI ?id: String, ?type: :file_search ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::file_search_tool_call_delta end end end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call.rbs b/sig/openai/models/beta/threads/runs/function_tool_call.rbs index 324286aa..cb76a0f6 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call.rbs @@ -6,25 +6,23 @@ module OpenAI type function_tool_call = { id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, + function: OpenAI::Beta::Threads::Runs::FunctionToolCall::Function, type: :function } class FunctionToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function + attr_accessor function: OpenAI::Beta::Threads::Runs::FunctionToolCall::Function attr_accessor type: :function def initialize: ( id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function, + function: OpenAI::Beta::Threads::Runs::FunctionToolCall::Function, ?type: :function ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::function_tool_call - type function = { arguments: String, name: String, output: String? } class Function < OpenAI::Internal::Type::BaseModel @@ -39,8 +37,6 @@ module OpenAI name: String, output: String? ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::function end end end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs index 0f96b130..44cf047c 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs @@ -8,7 +8,7 @@ module OpenAI index: Integer, type: :function, id: String, - function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function + function: OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function } class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel @@ -20,21 +20,19 @@ module OpenAI def id=: (String) -> String - attr_reader function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function? + attr_reader function: OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function? def function=: ( - OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function - ) -> OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function + ) -> OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function def initialize: ( index: Integer, ?id: String, - ?function: OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, + ?function: OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function, ?type: :function ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::function_tool_call_delta - type function = { arguments: String, name: String, output: String? } class Function < OpenAI::Internal::Type::BaseModel @@ -53,8 +51,6 @@ module OpenAI ?name: String, ?output: String? ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::function end end end diff --git a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs index f7300f72..0ec0c19f 100644 --- a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs @@ -5,30 +5,26 @@ module OpenAI module Runs type message_creation_step_details = { - message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + message_creation: OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, type: :message_creation } class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel - attr_accessor message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation + attr_accessor message_creation: OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation attr_accessor type: :message_creation def initialize: ( - message_creation: OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + message_creation: OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, ?type: :message_creation ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::message_creation_step_details - type message_creation = { message_id: String } class MessageCreation < OpenAI::Internal::Type::BaseModel attr_accessor message_id: String def initialize: (message_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::message_creation end end end diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 952766af..164ba811 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -14,15 +14,15 @@ module OpenAI created_at: Integer, expired_at: Integer?, failed_at: Integer?, - last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError?, + last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError?, metadata: OpenAI::Models::metadata?, object: :"thread.run.step", run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, + status: OpenAI::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details, thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, - usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage? + type: OpenAI::Beta::Threads::Runs::RunStep::type_, + usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? } class RunStep < OpenAI::Internal::Type::BaseModel @@ -40,7 +40,7 @@ module OpenAI attr_accessor failed_at: Integer? - attr_accessor last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError? + attr_accessor last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError? attr_accessor metadata: OpenAI::Models::metadata? @@ -48,15 +48,15 @@ module OpenAI attr_accessor run_id: String - attr_accessor status: OpenAI::Models::Beta::Threads::Runs::RunStep::status + attr_accessor status: OpenAI::Beta::Threads::Runs::RunStep::status - attr_accessor step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details + attr_accessor step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details attr_accessor thread_id: String - attr_accessor type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_ + attr_accessor type: OpenAI::Beta::Threads::Runs::RunStep::type_ - attr_accessor usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage? + attr_accessor usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? def initialize: ( id: String, @@ -66,37 +66,33 @@ module OpenAI created_at: Integer, expired_at: Integer?, failed_at: Integer?, - last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError?, + last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError?, metadata: OpenAI::Models::metadata?, run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, + status: OpenAI::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details, thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, - usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage?, + type: OpenAI::Beta::Threads::Runs::RunStep::type_, + usage: OpenAI::Beta::Threads::Runs::RunStep::Usage?, ?object: :"thread.run.step" ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step - type last_error = { - code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, + code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code + attr_accessor code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, + code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code, message: String ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::RunStep::last_error - type code = :server_error | :rate_limit_exceeded module Code @@ -105,7 +101,7 @@ module OpenAI SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::LastError::code] end end @@ -121,17 +117,17 @@ module OpenAI COMPLETED: :completed EXPIRED: :expired - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::status] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::status] end type step_details = - OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails - | OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails + | OpenAI::Beta::Threads::Runs::ToolCallsStepDetails module StepDetails extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::step_details] end type type_ = :message_creation | :tool_calls @@ -142,7 +138,7 @@ module OpenAI MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls - def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::type_] + def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::type_] end type usage = @@ -164,8 +160,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::RunStep::usage end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index 1f3143ee..cd977802 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -7,30 +7,28 @@ module OpenAI module Runs type run_step_delta = { - step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details } class RunStepDelta < OpenAI::Internal::Type::BaseModel - attr_reader step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details? + attr_reader step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details? def step_details=: ( - OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details - ) -> OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + ) -> OpenAI::Beta::Threads::Runs::RunStepDelta::step_details def initialize: ( - ?step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + ?step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step_delta - type step_details = - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta - | OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta + | OpenAI::Beta::Threads::Runs::ToolCallDeltaObject module StepDetails extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] + def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::RunStepDelta::step_details] end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index 0300ea33..a0e9f5e8 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -8,24 +8,22 @@ module OpenAI type run_step_delta_event = { id: String, - delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, + delta: OpenAI::Beta::Threads::Runs::RunStepDelta, object: :"thread.run.step.delta" } class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta + attr_accessor delta: OpenAI::Beta::Threads::Runs::RunStepDelta attr_accessor object: :"thread.run.step.delta" def initialize: ( id: String, - delta: OpenAI::Models::Beta::Threads::Runs::RunStepDelta, + delta: OpenAI::Beta::Threads::Runs::RunStepDelta, ?object: :"thread.run.step.delta" ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step_delta_event end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index b397b858..dae61d3d 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -8,25 +8,23 @@ module OpenAI type run_step_delta_message_delta = { type: :message_creation, - message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + message_creation: OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation } class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel attr_accessor type: :message_creation - attr_reader message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation? + attr_reader message_creation: OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation? def message_creation=: ( - OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation - ) -> OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + ) -> OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation def initialize: ( - ?message_creation: OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + ?message_creation: OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, ?type: :message_creation ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::run_step_delta_message_delta - type message_creation = { message_id: String } class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -35,8 +33,6 @@ module OpenAI def message_id=: (String) -> String def initialize: (?message_id: String) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::message_creation end end end diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 5fe683ff..786d087f 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -54,8 +54,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::step_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index c759864c..414a4b61 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -31,8 +31,6 @@ module OpenAI ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::step_retrieve_params end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call.rbs b/sig/openai/models/beta/threads/runs/tool_call.rbs index f3604833..4337520d 100644 --- a/sig/openai/models/beta/threads/runs/tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call.rbs @@ -4,14 +4,14 @@ module OpenAI module Threads module Runs type tool_call = - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall - | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall - | OpenAI::Models::Beta::Threads::Runs::FunctionToolCall + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall + | OpenAI::Beta::Threads::Runs::FileSearchToolCall + | OpenAI::Beta::Threads::Runs::FunctionToolCall module ToolCall extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call] end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs index f5159374..9ab47252 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta.rbs @@ -4,14 +4,14 @@ module OpenAI module Threads module Runs type tool_call_delta = - OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta - | OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta - | OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta + | OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta + | OpenAI::Beta::Threads::Runs::FunctionToolCallDelta module ToolCallDelta extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta] end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs index f693e116..2b6aefef 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs @@ -22,8 +22,6 @@ module OpenAI ?tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], ?type: :tool_calls ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::tool_call_delta_object end end end diff --git a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs index c08bece5..b5aaf17e 100644 --- a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs @@ -18,8 +18,6 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], ?type: :tool_calls ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::Runs::tool_calls_step_details end end end diff --git a/sig/openai/models/beta/threads/text.rbs b/sig/openai/models/beta/threads/text.rbs index a4aa3141..a6585bbd 100644 --- a/sig/openai/models/beta/threads/text.rbs +++ b/sig/openai/models/beta/threads/text.rbs @@ -17,8 +17,6 @@ module OpenAI annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], value: String ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::text end end end diff --git a/sig/openai/models/beta/threads/text_content_block.rbs b/sig/openai/models/beta/threads/text_content_block.rbs index 2bcf7f2a..5192465f 100644 --- a/sig/openai/models/beta/threads/text_content_block.rbs +++ b/sig/openai/models/beta/threads/text_content_block.rbs @@ -3,19 +3,17 @@ module OpenAI module Beta module Threads type text_content_block = - { text: OpenAI::Models::Beta::Threads::Text, type: :text } + { text: OpenAI::Beta::Threads::Text, type: :text } class TextContentBlock < OpenAI::Internal::Type::BaseModel - attr_accessor text: OpenAI::Models::Beta::Threads::Text + attr_accessor text: OpenAI::Beta::Threads::Text attr_accessor type: :text def initialize: ( - text: OpenAI::Models::Beta::Threads::Text, + text: OpenAI::Beta::Threads::Text, ?type: :text ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::text_content_block end end end diff --git a/sig/openai/models/beta/threads/text_content_block_param.rbs b/sig/openai/models/beta/threads/text_content_block_param.rbs index 291be17d..7b4be77b 100644 --- a/sig/openai/models/beta/threads/text_content_block_param.rbs +++ b/sig/openai/models/beta/threads/text_content_block_param.rbs @@ -10,8 +10,6 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::text_content_block_param end end end diff --git a/sig/openai/models/beta/threads/text_delta.rbs b/sig/openai/models/beta/threads/text_delta.rbs index 0b5ac945..9bad2d71 100644 --- a/sig/openai/models/beta/threads/text_delta.rbs +++ b/sig/openai/models/beta/threads/text_delta.rbs @@ -23,8 +23,6 @@ module OpenAI ?annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], ?value: String ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::text_delta end end end diff --git a/sig/openai/models/beta/threads/text_delta_block.rbs b/sig/openai/models/beta/threads/text_delta_block.rbs index 8d70d40b..261d3fc6 100644 --- a/sig/openai/models/beta/threads/text_delta_block.rbs +++ b/sig/openai/models/beta/threads/text_delta_block.rbs @@ -6,7 +6,7 @@ module OpenAI { index: Integer, type: :text, - text: OpenAI::Models::Beta::Threads::TextDelta + text: OpenAI::Beta::Threads::TextDelta } class TextDeltaBlock < OpenAI::Internal::Type::BaseModel @@ -14,19 +14,17 @@ module OpenAI attr_accessor type: :text - attr_reader text: OpenAI::Models::Beta::Threads::TextDelta? + attr_reader text: OpenAI::Beta::Threads::TextDelta? def text=: ( - OpenAI::Models::Beta::Threads::TextDelta - ) -> OpenAI::Models::Beta::Threads::TextDelta + OpenAI::Beta::Threads::TextDelta + ) -> OpenAI::Beta::Threads::TextDelta def initialize: ( index: Integer, - ?text: OpenAI::Models::Beta::Threads::TextDelta, + ?text: OpenAI::Beta::Threads::TextDelta, ?type: :text ) -> void - - def to_hash: -> OpenAI::Models::Beta::Threads::text_delta_block end end end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index 8f3b42e4..b717a8ba 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -6,19 +6,19 @@ module OpenAI type chat_completion = { id: String, - choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice], + choices: ::Array[OpenAI::Chat::ChatCompletion::Choice], created: Integer, model: String, object: :"chat.completion", - service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, + service_tier: OpenAI::Chat::ChatCompletion::service_tier?, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage + usage: OpenAI::CompletionUsage } class ChatCompletion < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice] + attr_accessor choices: ::Array[OpenAI::Chat::ChatCompletion::Choice] attr_accessor created: Integer @@ -26,57 +26,51 @@ module OpenAI attr_accessor object: :"chat.completion" - attr_accessor service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier? + attr_accessor service_tier: OpenAI::Chat::ChatCompletion::service_tier? attr_reader system_fingerprint: String? def system_fingerprint=: (String) -> String - attr_reader usage: OpenAI::Models::CompletionUsage? + attr_reader usage: OpenAI::CompletionUsage? - def usage=: ( - OpenAI::Models::CompletionUsage - ) -> OpenAI::Models::CompletionUsage + def usage=: (OpenAI::CompletionUsage) -> OpenAI::CompletionUsage def initialize: ( id: String, - choices: ::Array[OpenAI::Models::Chat::ChatCompletion::Choice], + choices: ::Array[OpenAI::Chat::ChatCompletion::Choice], created: Integer, model: String, - ?service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, + ?service_tier: OpenAI::Chat::ChatCompletion::service_tier?, ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage, + ?usage: OpenAI::CompletionUsage, ?object: :"chat.completion" ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion - type choice = { - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, + finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason, index: Integer, - logprobs: OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs?, - message: OpenAI::Models::Chat::ChatCompletionMessage + logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, + message: OpenAI::Chat::ChatCompletionMessage } class Choice < OpenAI::Internal::Type::BaseModel - attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason + attr_accessor finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason attr_accessor index: Integer - attr_accessor logprobs: OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs? + attr_accessor logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs? - attr_accessor message: OpenAI::Models::Chat::ChatCompletionMessage + attr_accessor message: OpenAI::Chat::ChatCompletionMessage def initialize: ( - finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, + finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason, index: Integer, - logprobs: OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs?, - message: OpenAI::Models::Chat::ChatCompletionMessage + logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, + message: OpenAI::Chat::ChatCompletionMessage ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletion::choice - type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call @@ -89,26 +83,24 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletion::Choice::finish_reason] end type logprobs = { - content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, - refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? } class Logprobs < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + attr_accessor content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? - attr_accessor refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + attr_accessor refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? def initialize: ( - content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, - refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? ) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletion::Choice::logprobs end end @@ -121,7 +113,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletion::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index 6016de8d..bf03ff8c 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -6,22 +6,22 @@ module OpenAI type chat_completion_assistant_message_param = { role: :assistant, - audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio?, - content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, - function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, + audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, + content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content?, + function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, name: String, refusal: String?, - tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] + tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] } class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel attr_accessor role: :assistant - attr_accessor audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio? + attr_accessor audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio? - attr_accessor content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content? + attr_accessor content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content? - attr_accessor function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall? + attr_accessor function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall? attr_reader name: String? @@ -29,52 +29,48 @@ module OpenAI attr_accessor refusal: String? - attr_reader tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]? + attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall]? def tool_calls=: ( - ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] - ) -> ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] + ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + ) -> ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] def initialize: ( - ?audio: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio?, - ?content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, - ?function_call: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, + ?audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, + ?content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content?, + ?function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, ?name: String, ?refusal: String?, - ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall], ?role: :assistant ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_assistant_message_param - type audio = { id: String } class Audio < OpenAI::Internal::Type::BaseModel attr_accessor id: String def initialize: (id: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::audio end type content = String - | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] + | ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] module Content extend OpenAI::Internal::Type::Union type array_of_content_part = - OpenAI::Models::Chat::ChatCompletionContentPartText - | OpenAI::Models::Chat::ChatCompletionContentPartRefusal + OpenAI::Chat::ChatCompletionContentPartText + | OpenAI::Chat::ChatCompletionContentPartRefusal module ArrayOfContentPart extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] end - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part]] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::content] ArrayOfContentPartArray: OpenAI::Internal::Type::Converter end @@ -87,8 +83,6 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::function_call end end end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index 18d4ba70..596be96d 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -21,8 +21,6 @@ module OpenAI expires_at: Integer, transcript: String ) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_audio end end end diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index fb20281b..b326faf7 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -5,22 +5,20 @@ module OpenAI module Chat type chat_completion_audio_param = { - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice + format_: OpenAI::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Chat::ChatCompletionAudioParam::voice } class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel - attr_accessor format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_ + attr_accessor format_: OpenAI::Chat::ChatCompletionAudioParam::format_ - attr_accessor voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice + attr_accessor voice: OpenAI::Chat::ChatCompletionAudioParam::voice def initialize: ( - format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice + format_: OpenAI::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Chat::ChatCompletionAudioParam::voice ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_audio_param - type format_ = :wav | :aac | :mp3 | :flac | :opus | :pcm16 module Format @@ -33,7 +31,7 @@ module OpenAI OPUS: :opus PCM16: :pcm16 - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::format_] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionAudioParam::format_] end type voice = @@ -53,17 +51,7 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, (:alloy - | :ash - | :ballad - | :coral - | :echo - | :fable - | :onyx - | :nova - | :sage - | :shimmer - | :verse)] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAudioParam::voice] ALLOY: :alloy ASH: :ash diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index aabd304a..2a451ed9 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -6,19 +6,19 @@ module OpenAI type chat_completion_chunk = { id: String, - choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], + choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, object: :"chat.completion.chunk", - service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, + service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier?, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage? + usage: OpenAI::CompletionUsage? } class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice] + attr_accessor choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice] attr_accessor created: Integer @@ -26,95 +26,89 @@ module OpenAI attr_accessor object: :"chat.completion.chunk" - attr_accessor service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier? + attr_accessor service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier? attr_reader system_fingerprint: String? def system_fingerprint=: (String) -> String - attr_accessor usage: OpenAI::Models::CompletionUsage? + attr_accessor usage: OpenAI::CompletionUsage? def initialize: ( id: String, - choices: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice], + choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, + ?service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier?, ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage?, + ?usage: OpenAI::CompletionUsage?, ?object: :"chat.completion.chunk" ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_chunk - type choice = { - delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, + finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, - logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? + logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? } class Choice < OpenAI::Internal::Type::BaseModel - attr_accessor delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta + attr_accessor delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta - attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason? + attr_accessor finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason? attr_accessor index: Integer - attr_accessor logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? + attr_accessor logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? def initialize: ( - delta: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, + finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, - ?logprobs: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs? + ?logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::choice - type delta = { content: String?, - function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: String?, - role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, - tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } class Delta < OpenAI::Internal::Type::BaseModel attr_accessor content: String? - attr_reader function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall? + attr_reader function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall? def function_call=: ( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall - ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall + ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall attr_accessor refusal: String? - attr_reader role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role? + attr_reader role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role? def role=: ( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role - ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role + ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role - attr_reader tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]? + attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]? def tool_calls=: ( - ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] - ) -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + ) -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] def initialize: ( ?content: String?, - ?function_call: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + ?function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, ?refusal: String?, - ?role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, - ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + ?role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role, + ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::delta - type function_call = { arguments: String, name: String } class FunctionCall < OpenAI::Internal::Type::BaseModel @@ -127,8 +121,6 @@ module OpenAI def name=: (String) -> String def initialize: (?arguments: String, ?name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::function_call end type role = :developer | :system | :user | :assistant | :tool @@ -142,15 +134,15 @@ module OpenAI ASSISTANT: :assistant TOOL: :tool - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role] end type tool_call = { index: Integer, id: String, - function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ } class ToolCall < OpenAI::Internal::Type::BaseModel @@ -160,27 +152,25 @@ module OpenAI def id=: (String) -> String - attr_reader function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function? + attr_reader function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function? def function=: ( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function - ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function + ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function - attr_reader type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_? + attr_reader type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_? def type=: ( - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ - ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ def initialize: ( index: Integer, ?id: String, - ?function: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - ?type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ?function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + ?type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::tool_call - type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -193,8 +183,6 @@ module OpenAI def name=: (String) -> String def initialize: (?arguments: String, ?name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::function end type type_ = :function @@ -204,7 +192,7 @@ module OpenAI FUNCTION: :function - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] end end end @@ -221,26 +209,24 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason] end type logprobs = { - content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, - refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? } class Logprobs < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + attr_accessor content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? - attr_accessor refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + attr_accessor refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? def initialize: ( - content: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]?, - refusal: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob]? + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? ) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::logprobs end end @@ -253,7 +239,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 470cda35..77010098 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -4,32 +4,30 @@ module OpenAI module Chat type chat_completion_content_part = - OpenAI::Models::Chat::ChatCompletionContentPartText - | OpenAI::Models::Chat::ChatCompletionContentPartImage - | OpenAI::Models::Chat::ChatCompletionContentPartInputAudio - | OpenAI::Models::Chat::ChatCompletionContentPart::File + OpenAI::Chat::ChatCompletionContentPartText + | OpenAI::Chat::ChatCompletionContentPartImage + | OpenAI::Chat::ChatCompletionContentPartInputAudio + | OpenAI::Chat::ChatCompletionContentPart::File module ChatCompletionContentPart extend OpenAI::Internal::Type::Union type file = { - file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, + file: OpenAI::Chat::ChatCompletionContentPart::File::File, type: :file } class File < OpenAI::Internal::Type::BaseModel - attr_accessor file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File + attr_accessor file: OpenAI::Chat::ChatCompletionContentPart::File::File attr_accessor type: :file def initialize: ( - file: OpenAI::Models::Chat::ChatCompletionContentPart::File::File, + file: OpenAI::Chat::ChatCompletionContentPart::File::File, ?type: :file ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPart::file - type file = { file_data: String, file_id: String, filename: String } class File < OpenAI::Internal::Type::BaseModel @@ -50,12 +48,10 @@ module OpenAI ?file_id: String, ?filename: String ) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPart::File::file end end - def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File] + def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_content_part] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index 6aa104fb..f7088321 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -5,44 +5,40 @@ module OpenAI module Chat type chat_completion_content_part_image = { - image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, + image_url: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL, type: :image_url } class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel - attr_accessor image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL + attr_accessor image_url: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL attr_accessor type: :image_url def initialize: ( - image_url: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL, + image_url: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL, ?type: :image_url ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_image - type image_url = { url: String, - detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String - attr_reader detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail? + attr_reader detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail? def detail=: ( - OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail - ) -> OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + ) -> OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail def initialize: ( url: String, - ?detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + ?detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPartImage::image_url - type detail = :auto | :low | :high module Detail @@ -52,7 +48,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index e3267b32..85d1abea 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -5,40 +5,36 @@ module OpenAI module Chat type chat_completion_content_part_input_audio = { - input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, + input_audio: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio, type: :input_audio } class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel - attr_accessor input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio + attr_accessor input_audio: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio attr_accessor type: :input_audio def initialize: ( - input_audio: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio, + input_audio: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio, ?type: :input_audio ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_input_audio - type input_audio = { data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ } class InputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String - attr_accessor format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + attr_accessor format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ def initialize: ( data: String, - format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::input_audio - type format_ = :wav | :mp3 module Format @@ -47,7 +43,7 @@ module OpenAI WAV: :wav MP3: :mp3 - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index e7b62c6b..9845f993 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -12,8 +12,6 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_refusal end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index b8a43400..799d8e14 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -11,8 +11,6 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_content_part_text end end end diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index 6d7e613c..eb9ae980 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -18,8 +18,6 @@ module OpenAI deleted: bool, ?object: :"chat.completion.deleted" ) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_deleted end end end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index ed1e4afb..ecf27a5d 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_developer_message_param = { - content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, + content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content, role: :developer, name: String } class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content + attr_accessor content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content attr_accessor role: :developer @@ -20,20 +20,18 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, + content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content, ?name: String, ?role: :developer ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_developer_message_param - type content = - String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] + String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionDeveloperMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index fd033d17..b18d80c5 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -9,8 +9,6 @@ module OpenAI attr_accessor name: String def initialize: (name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_function_call_option end end end diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index e83ffd82..17944160 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -18,8 +18,6 @@ module OpenAI name: String, ?role: :function ) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_function_message_param end end end diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index 7fa0b355..bea71029 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -8,10 +8,10 @@ module OpenAI content: String?, refusal: String?, role: :assistant, - annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], - audio: OpenAI::Models::Chat::ChatCompletionAudio?, - function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, - tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] + annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], + audio: OpenAI::Chat::ChatCompletionAudio?, + function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] } class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel @@ -21,56 +21,52 @@ module OpenAI attr_accessor role: :assistant - attr_reader annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation]? + attr_reader annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation]? def annotations=: ( - ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] - ) -> ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] + ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation] + ) -> ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation] - attr_accessor audio: OpenAI::Models::Chat::ChatCompletionAudio? + attr_accessor audio: OpenAI::Chat::ChatCompletionAudio? - attr_reader function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall? + attr_reader function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall? def function_call=: ( - OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall - ) -> OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall + OpenAI::Chat::ChatCompletionMessage::FunctionCall + ) -> OpenAI::Chat::ChatCompletionMessage::FunctionCall - attr_reader tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall]? + attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall]? def tool_calls=: ( - ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] - ) -> ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall] + ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + ) -> ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] def initialize: ( content: String?, refusal: String?, - ?annotations: ::Array[OpenAI::Models::Chat::ChatCompletionMessage::Annotation], - ?audio: OpenAI::Models::Chat::ChatCompletionAudio?, - ?function_call: OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, - ?tool_calls: ::Array[OpenAI::Models::Chat::ChatCompletionMessageToolCall], + ?annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], + ?audio: OpenAI::Chat::ChatCompletionAudio?, + ?function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, + ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall], ?role: :assistant ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_message - type annotation = { type: :url_citation, - url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation + url_citation: OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation } class Annotation < OpenAI::Internal::Type::BaseModel attr_accessor type: :url_citation - attr_accessor url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation + attr_accessor url_citation: OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation def initialize: ( - url_citation: OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation, + url_citation: OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation, ?type: :url_citation ) -> void - def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessage::annotation - type url_citation = { end_index: Integer, @@ -94,8 +90,6 @@ module OpenAI title: String, url: String ) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessage::Annotation::url_citation end end @@ -107,8 +101,6 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessage::function_call end end end diff --git a/sig/openai/models/chat/chat_completion_message_param.rbs b/sig/openai/models/chat/chat_completion_message_param.rbs index e30fd658..8feb9872 100644 --- a/sig/openai/models/chat/chat_completion_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_message_param.rbs @@ -4,17 +4,17 @@ module OpenAI module Chat type chat_completion_message_param = - OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam - | OpenAI::Models::Chat::ChatCompletionSystemMessageParam - | OpenAI::Models::Chat::ChatCompletionUserMessageParam - | OpenAI::Models::Chat::ChatCompletionAssistantMessageParam - | OpenAI::Models::Chat::ChatCompletionToolMessageParam - | OpenAI::Models::Chat::ChatCompletionFunctionMessageParam + OpenAI::Chat::ChatCompletionDeveloperMessageParam + | OpenAI::Chat::ChatCompletionSystemMessageParam + | OpenAI::Chat::ChatCompletionUserMessageParam + | OpenAI::Chat::ChatCompletionAssistantMessageParam + | OpenAI::Chat::ChatCompletionToolMessageParam + | OpenAI::Chat::ChatCompletionFunctionMessageParam module ChatCompletionMessageParam extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam] + def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_message_param] end end end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index ca90663e..d15d52bf 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -6,25 +6,23 @@ module OpenAI type chat_completion_message_tool_call = { id: String, - function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, + function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, type: :function } class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function + attr_accessor function: OpenAI::Chat::ChatCompletionMessageToolCall::Function attr_accessor type: :function def initialize: ( id: String, - function: OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function, + function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, ?type: :function ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_message_tool_call - type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -33,8 +31,6 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionMessageToolCall::function end end end diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 77b79236..38e0aeb5 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -5,30 +5,26 @@ module OpenAI module Chat type chat_completion_named_tool_choice = { - function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, + function: OpenAI::Chat::ChatCompletionNamedToolChoice::Function, type: :function } class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel - attr_accessor function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function + attr_accessor function: OpenAI::Chat::ChatCompletionNamedToolChoice::Function attr_accessor type: :function def initialize: ( - function: OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function, + function: OpenAI::Chat::ChatCompletionNamedToolChoice::Function, ?type: :function ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_named_tool_choice - type function = { name: String } class Function < OpenAI::Internal::Type::BaseModel attr_accessor name: String def initialize: (name: String) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionNamedToolChoice::function end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index ad9cdb21..65a4a7a6 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -5,29 +5,27 @@ module OpenAI module Chat type chat_completion_prediction_content = { - content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, + content: OpenAI::Chat::ChatCompletionPredictionContent::content, type: :content } class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content + attr_accessor content: OpenAI::Chat::ChatCompletionPredictionContent::content attr_accessor type: :content def initialize: ( - content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, + content: OpenAI::Chat::ChatCompletionPredictionContent::content, ?type: :content ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_prediction_content - type content = - String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] + String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionPredictionContent::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 75e77b6c..13440ab9 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -11,8 +11,6 @@ module OpenAI def id=: (String _) -> String def initialize: (id: String) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_store_message end end end diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index bf86a211..4b0267d1 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -11,8 +11,6 @@ module OpenAI def include_usage=: (bool) -> bool def initialize: (?include_usage: bool) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_stream_options end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 81cdf351..067f582c 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_system_message_param = { - content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, + content: OpenAI::Chat::ChatCompletionSystemMessageParam::content, role: :system, name: String } class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content + attr_accessor content: OpenAI::Chat::ChatCompletionSystemMessageParam::content attr_accessor role: :system @@ -20,20 +20,18 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, + content: OpenAI::Chat::ChatCompletionSystemMessageParam::content, ?name: String, ?role: :system ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_system_message_param - type content = - String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] + String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionSystemMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index 968dd8a5..f2bd18a1 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -8,7 +8,7 @@ module OpenAI token: String, bytes: ::Array[Integer]?, logprob: Float, - top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] + top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] } class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel @@ -18,17 +18,15 @@ module OpenAI attr_accessor logprob: Float - attr_accessor top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] + attr_accessor top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] def initialize: ( token: String, bytes: ::Array[Integer]?, logprob: Float, - top_logprobs: ::Array[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] + top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_token_logprob - type top_logprob = { token: String, bytes: ::Array[Integer]?, logprob: Float } @@ -44,8 +42,6 @@ module OpenAI bytes: ::Array[Integer]?, logprob: Float ) -> void - - def to_hash: -> OpenAI::Models::Chat::ChatCompletionTokenLogprob::top_logprob end end end diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index 1fd217b1..e5d54e1b 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -4,19 +4,17 @@ module OpenAI module Chat type chat_completion_tool = - { function: OpenAI::Models::FunctionDefinition, type: :function } + { function: OpenAI::FunctionDefinition, type: :function } class ChatCompletionTool < OpenAI::Internal::Type::BaseModel - attr_accessor function: OpenAI::Models::FunctionDefinition + attr_accessor function: OpenAI::FunctionDefinition attr_accessor type: :function def initialize: ( - function: OpenAI::Models::FunctionDefinition, + function: OpenAI::FunctionDefinition, ?type: :function ) -> void - - def to_hash: -> OpenAI::Models::Chat::chat_completion_tool end end end diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index aadc1e00..a5c2df81 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -4,8 +4,8 @@ module OpenAI module Chat type chat_completion_tool_choice_option = - OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto - | OpenAI::Models::Chat::ChatCompletionNamedToolChoice + OpenAI::Chat::ChatCompletionToolChoiceOption::auto + | OpenAI::Chat::ChatCompletionNamedToolChoice module ChatCompletionToolChoiceOption extend OpenAI::Internal::Type::Union @@ -19,10 +19,10 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionToolChoiceOption::auto] end - def self?.variants: -> [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_tool_choice_option] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 5b737943..21997531 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -5,33 +5,31 @@ module OpenAI module Chat type chat_completion_tool_message_param = { - content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, + content: OpenAI::Chat::ChatCompletionToolMessageParam::content, role: :tool, tool_call_id: String } class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content + attr_accessor content: OpenAI::Chat::ChatCompletionToolMessageParam::content attr_accessor role: :tool attr_accessor tool_call_id: String def initialize: ( - content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, + content: OpenAI::Chat::ChatCompletionToolMessageParam::content, tool_call_id: String, ?role: :tool ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_tool_message_param - type content = - String | ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText] + String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::ChatCompletionContentPartText]] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionToolMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 5f5c1a1c..9bc5d59c 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_user_message_param = { - content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, + content: OpenAI::Chat::ChatCompletionUserMessageParam::content, role: :user, name: String } class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content + attr_accessor content: OpenAI::Chat::ChatCompletionUserMessageParam::content attr_accessor role: :user @@ -20,20 +20,18 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, + content: OpenAI::Chat::ChatCompletionUserMessageParam::content, ?name: String, ?role: :user ) -> void - def to_hash: -> OpenAI::Models::Chat::chat_completion_user_message_param - type content = String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[OpenAI::Models::Chat::chat_completion_content_part]] + def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionUserMessageParam::content] ChatCompletionContentPartArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 1eb9953b..05054037 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -5,19 +5,19 @@ module OpenAI { messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + audio: OpenAI::Chat::ChatCompletionAudioParam?, frequency_penalty: Float?, function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function], logit_bias: ::Hash[Symbol, Integer]?, logprobs: bool?, max_completion_tokens: Integer?, max_tokens: Integer?, metadata: OpenAI::Models::metadata?, - modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, n: Integer?, parallel_tool_calls: bool, - prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + prediction: OpenAI::Chat::ChatCompletionPredictionContent?, presence_penalty: Float?, reasoning_effort: OpenAI::Models::reasoning_effort?, response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, @@ -25,14 +25,14 @@ module OpenAI service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, store: bool?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + tools: ::Array[OpenAI::Chat::ChatCompletionTool], top_logprobs: Integer?, top_p: Float?, user: String, - web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions + web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions } & OpenAI::Internal::Type::request_parameters @@ -44,7 +44,7 @@ module OpenAI attr_accessor model: OpenAI::Models::Chat::CompletionCreateParams::model - attr_accessor audio: OpenAI::Models::Chat::ChatCompletionAudioParam? + attr_accessor audio: OpenAI::Chat::ChatCompletionAudioParam? attr_accessor frequency_penalty: Float? @@ -54,11 +54,11 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::function_call ) -> OpenAI::Models::Chat::CompletionCreateParams::function_call - attr_reader functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function]? + attr_reader functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function]? def functions=: ( - ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function] - ) -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function] + ::Array[OpenAI::Chat::CompletionCreateParams::Function] + ) -> ::Array[OpenAI::Chat::CompletionCreateParams::Function] attr_accessor logit_bias: ::Hash[Symbol, Integer]? @@ -70,7 +70,7 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]? + attr_accessor modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]? attr_accessor n: Integer? @@ -78,7 +78,7 @@ module OpenAI def parallel_tool_calls=: (bool) -> bool - attr_accessor prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent? + attr_accessor prediction: OpenAI::Chat::ChatCompletionPredictionContent? attr_accessor presence_penalty: Float? @@ -98,7 +98,7 @@ module OpenAI attr_accessor store: bool? - attr_accessor stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions? + attr_accessor stream_options: OpenAI::Chat::ChatCompletionStreamOptions? attr_accessor temperature: Float? @@ -108,11 +108,11 @@ module OpenAI OpenAI::Models::Chat::chat_completion_tool_choice_option ) -> OpenAI::Models::Chat::chat_completion_tool_choice_option - attr_reader tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool]? + attr_reader tools: ::Array[OpenAI::Chat::ChatCompletionTool]? def tools=: ( - ::Array[OpenAI::Models::Chat::ChatCompletionTool] - ) -> ::Array[OpenAI::Models::Chat::ChatCompletionTool] + ::Array[OpenAI::Chat::ChatCompletionTool] + ) -> ::Array[OpenAI::Chat::ChatCompletionTool] attr_accessor top_logprobs: Integer? @@ -122,28 +122,28 @@ module OpenAI def user=: (String) -> String - attr_reader web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions? + attr_reader web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions? def web_search_options=: ( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions - ) -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions + OpenAI::Chat::CompletionCreateParams::WebSearchOptions + ) -> OpenAI::Chat::CompletionCreateParams::WebSearchOptions def initialize: ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?audio: OpenAI::Chat::ChatCompletionAudioParam?, ?frequency_penalty: Float?, ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function], ?logit_bias: ::Hash[Symbol, Integer]?, ?logprobs: bool?, ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, - ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, ?presence_penalty: Float?, ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, @@ -151,30 +151,28 @@ module OpenAI ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, ?store: bool?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], ?top_logprobs: Integer?, ?top_p: Float?, ?user: String, - ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Chat::completion_create_params - type model = String | OpenAI::Models::chat_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::chat_model] + def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::model] end type function_call = - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode - | OpenAI::Models::Chat::ChatCompletionFunctionCallOption + OpenAI::Chat::CompletionCreateParams::FunctionCall::function_call_mode + | OpenAI::Chat::ChatCompletionFunctionCallOption module FunctionCall extend OpenAI::Internal::Type::Union @@ -187,10 +185,10 @@ module OpenAI NONE: :none AUTO: :auto - def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode] + def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::FunctionCall::function_call_mode] end - def self?.variants: -> [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::function_call] end type function = @@ -218,8 +216,6 @@ module OpenAI ?description: String, ?parameters: OpenAI::Models::function_parameters ) -> void - - def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::function end type modality = :text | :audio @@ -230,18 +226,18 @@ module OpenAI TEXT: :text AUDIO: :audio - def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality] + def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::modality] end type response_format = - OpenAI::Models::ResponseFormatText - | OpenAI::Models::ResponseFormatJSONSchema - | OpenAI::Models::ResponseFormatJSONObject + OpenAI::ResponseFormatText + | OpenAI::ResponseFormatJSONSchema + | OpenAI::ResponseFormatJSONObject module ResponseFormat extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::response_format] end type service_tier = :auto | :default | :flex @@ -261,33 +257,31 @@ module OpenAI module Stop extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[String]] + def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::stop] StringArray: OpenAI::Internal::Type::Converter end type web_search_options = { - search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, - user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? } class WebSearchOptions < OpenAI::Internal::Type::BaseModel - attr_reader search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? + attr_reader search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? def search_context_size=: ( - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size - ) -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size + ) -> OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size - attr_accessor user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + attr_accessor user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? def initialize: ( - ?search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, - ?user_location: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + ?search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + ?user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? ) -> void - def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::web_search_options - type search_context_size = :low | :medium | :high module SearchContextSize @@ -297,27 +291,25 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] + def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] end type user_location = { - approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + approximate: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, type: :approximate } class UserLocation < OpenAI::Internal::Type::BaseModel - attr_accessor approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate + attr_accessor approximate: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate attr_accessor type: :approximate def initialize: ( - approximate: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + approximate: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, ?type: :approximate ) -> void - def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::user_location - type approximate = { city: String, @@ -349,8 +341,6 @@ module OpenAI ?region: String, ?timezone: String ) -> void - - def to_hash: -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::approximate end end end diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index 324fe253..e20bc3a4 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Chat::completion_delete_params end end end diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index e7bbb7d5..83b22904 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -44,8 +44,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Chat::completion_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 7d97ef11..8d4b832e 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Chat::completion_retrieve_params end end end diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index dc63a22a..942741c4 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -15,8 +15,6 @@ module OpenAI metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Chat::completion_update_params end end end diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 73db541f..0257770c 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -35,8 +35,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Chat::Completions::message_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index e4a99bcf..a2288002 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -3,25 +3,23 @@ module OpenAI type comparison_filter = { key: String, - type: OpenAI::Models::ComparisonFilter::type_, - value: OpenAI::Models::ComparisonFilter::value + type: OpenAI::ComparisonFilter::type_, + value: OpenAI::ComparisonFilter::value } class ComparisonFilter < OpenAI::Internal::Type::BaseModel attr_accessor key: String - attr_accessor type: OpenAI::Models::ComparisonFilter::type_ + attr_accessor type: OpenAI::ComparisonFilter::type_ - attr_accessor value: OpenAI::Models::ComparisonFilter::value + attr_accessor value: OpenAI::ComparisonFilter::value def initialize: ( key: String, - type: OpenAI::Models::ComparisonFilter::type_, - value: OpenAI::Models::ComparisonFilter::value + type: OpenAI::ComparisonFilter::type_, + value: OpenAI::ComparisonFilter::value ) -> void - def to_hash: -> OpenAI::Models::comparison_filter - type type_ = :eq | :ne | :gt | :gte | :lt | :lte module Type @@ -34,7 +32,7 @@ module OpenAI LT: :lt LTE: :lte - def self?.values: -> ::Array[OpenAI::Models::ComparisonFilter::type_] + def self?.values: -> ::Array[OpenAI::ComparisonFilter::type_] end type value = String | Float | bool @@ -42,7 +40,7 @@ module OpenAI module Value extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::ComparisonFilter::value] end end end diff --git a/sig/openai/models/completion.rbs b/sig/openai/models/completion.rbs index ed2ca5d6..a9d8a71b 100644 --- a/sig/openai/models/completion.rbs +++ b/sig/openai/models/completion.rbs @@ -3,18 +3,18 @@ module OpenAI type completion = { id: String, - choices: ::Array[OpenAI::Models::CompletionChoice], + choices: ::Array[OpenAI::CompletionChoice], created: Integer, model: String, object: :text_completion, system_fingerprint: String, - usage: OpenAI::Models::CompletionUsage + usage: OpenAI::CompletionUsage } class Completion < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor choices: ::Array[OpenAI::Models::CompletionChoice] + attr_accessor choices: ::Array[OpenAI::CompletionChoice] attr_accessor created: Integer @@ -26,23 +26,19 @@ module OpenAI def system_fingerprint=: (String) -> String - attr_reader usage: OpenAI::Models::CompletionUsage? + attr_reader usage: OpenAI::CompletionUsage? - def usage=: ( - OpenAI::Models::CompletionUsage - ) -> OpenAI::Models::CompletionUsage + def usage=: (OpenAI::CompletionUsage) -> OpenAI::CompletionUsage def initialize: ( id: String, - choices: ::Array[OpenAI::Models::CompletionChoice], + choices: ::Array[OpenAI::CompletionChoice], created: Integer, model: String, ?system_fingerprint: String, - ?usage: OpenAI::Models::CompletionUsage, + ?usage: OpenAI::CompletionUsage, ?object: :text_completion ) -> void - - def to_hash: -> OpenAI::Models::completion end end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index 5e50fc5f..a42cad21 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -2,30 +2,28 @@ module OpenAI module Models type completion_choice = { - finish_reason: OpenAI::Models::CompletionChoice::finish_reason, + finish_reason: OpenAI::CompletionChoice::finish_reason, index: Integer, - logprobs: OpenAI::Models::CompletionChoice::Logprobs?, + logprobs: OpenAI::CompletionChoice::Logprobs?, text: String } class CompletionChoice < OpenAI::Internal::Type::BaseModel - attr_accessor finish_reason: OpenAI::Models::CompletionChoice::finish_reason + attr_accessor finish_reason: OpenAI::CompletionChoice::finish_reason attr_accessor index: Integer - attr_accessor logprobs: OpenAI::Models::CompletionChoice::Logprobs? + attr_accessor logprobs: OpenAI::CompletionChoice::Logprobs? attr_accessor text: String def initialize: ( - finish_reason: OpenAI::Models::CompletionChoice::finish_reason, + finish_reason: OpenAI::CompletionChoice::finish_reason, index: Integer, - logprobs: OpenAI::Models::CompletionChoice::Logprobs?, + logprobs: OpenAI::CompletionChoice::Logprobs?, text: String ) -> void - def to_hash: -> OpenAI::Models::completion_choice - type finish_reason = :stop | :length | :content_filter module FinishReason @@ -35,7 +33,7 @@ module OpenAI LENGTH: :length CONTENT_FILTER: :content_filter - def self?.values: -> ::Array[OpenAI::Models::CompletionChoice::finish_reason] + def self?.values: -> ::Array[OpenAI::CompletionChoice::finish_reason] end type logprobs = @@ -71,8 +69,6 @@ module OpenAI ?tokens: ::Array[String], ?top_logprobs: ::Array[::Hash[Symbol, Float]] ) -> void - - def to_hash: -> OpenAI::Models::CompletionChoice::logprobs end end end diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index 184a3c57..f2a4f357 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -14,7 +14,7 @@ module OpenAI presence_penalty: Float?, seed: Integer?, stop: OpenAI::Models::CompletionCreateParams::stop?, - stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, suffix: String?, temperature: Float?, top_p: Float?, @@ -50,7 +50,7 @@ module OpenAI attr_accessor stop: OpenAI::Models::CompletionCreateParams::stop? - attr_accessor stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions? + attr_accessor stream_options: OpenAI::Chat::ChatCompletionStreamOptions? attr_accessor suffix: String? @@ -75,7 +75,7 @@ module OpenAI ?presence_penalty: Float?, ?seed: Integer?, ?stop: OpenAI::Models::CompletionCreateParams::stop?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?suffix: String?, ?temperature: Float?, ?top_p: Float?, @@ -83,17 +83,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::completion_create_params - type model = String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, (:"gpt-3.5-turbo-instruct" - | :"davinci-002" - | :"babbage-002")] + def self?.variants: -> ::Array[OpenAI::Models::CompletionCreateParams::model] GPT_3_5_TURBO_INSTRUCT: :"gpt-3.5-turbo-instruct" DAVINCI_002: :"davinci-002" @@ -106,7 +102,7 @@ module OpenAI module Prompt extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] + def self?.variants: -> ::Array[OpenAI::Models::CompletionCreateParams::prompt] StringArray: OpenAI::Internal::Type::Converter @@ -120,7 +116,7 @@ module OpenAI module Stop extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[String]] + def self?.variants: -> ::Array[OpenAI::Models::CompletionCreateParams::stop] StringArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/completion_usage.rbs b/sig/openai/models/completion_usage.rbs index 5a98db8b..f50f1dad 100644 --- a/sig/openai/models/completion_usage.rbs +++ b/sig/openai/models/completion_usage.rbs @@ -5,8 +5,8 @@ module OpenAI completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, - prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails + completion_tokens_details: OpenAI::CompletionUsage::CompletionTokensDetails, + prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails } class CompletionUsage < OpenAI::Internal::Type::BaseModel @@ -16,28 +16,26 @@ module OpenAI attr_accessor total_tokens: Integer - attr_reader completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails? + attr_reader completion_tokens_details: OpenAI::CompletionUsage::CompletionTokensDetails? def completion_tokens_details=: ( - OpenAI::Models::CompletionUsage::CompletionTokensDetails - ) -> OpenAI::Models::CompletionUsage::CompletionTokensDetails + OpenAI::CompletionUsage::CompletionTokensDetails + ) -> OpenAI::CompletionUsage::CompletionTokensDetails - attr_reader prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails? + attr_reader prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails? def prompt_tokens_details=: ( - OpenAI::Models::CompletionUsage::PromptTokensDetails - ) -> OpenAI::Models::CompletionUsage::PromptTokensDetails + OpenAI::CompletionUsage::PromptTokensDetails + ) -> OpenAI::CompletionUsage::PromptTokensDetails def initialize: ( completion_tokens: Integer, prompt_tokens: Integer, total_tokens: Integer, - ?completion_tokens_details: OpenAI::Models::CompletionUsage::CompletionTokensDetails, - ?prompt_tokens_details: OpenAI::Models::CompletionUsage::PromptTokensDetails + ?completion_tokens_details: OpenAI::CompletionUsage::CompletionTokensDetails, + ?prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails ) -> void - def to_hash: -> OpenAI::Models::completion_usage - type completion_tokens_details = { accepted_prediction_tokens: Integer, @@ -69,8 +67,6 @@ module OpenAI ?reasoning_tokens: Integer, ?rejected_prediction_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::CompletionUsage::completion_tokens_details end type prompt_tokens_details = @@ -89,8 +85,6 @@ module OpenAI ?audio_tokens: Integer, ?cached_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::CompletionUsage::prompt_tokens_details end end end diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index 2f98c0d3..f5c17954 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -2,28 +2,26 @@ module OpenAI module Models type compound_filter = { - filters: ::Array[OpenAI::Models::CompoundFilter::filter], - type: OpenAI::Models::CompoundFilter::type_ + filters: ::Array[OpenAI::CompoundFilter::filter], + type: OpenAI::CompoundFilter::type_ } class CompoundFilter < OpenAI::Internal::Type::BaseModel - attr_accessor filters: ::Array[OpenAI::Models::CompoundFilter::filter] + attr_accessor filters: ::Array[OpenAI::CompoundFilter::filter] - attr_accessor type: OpenAI::Models::CompoundFilter::type_ + attr_accessor type: OpenAI::CompoundFilter::type_ def initialize: ( - filters: ::Array[OpenAI::Models::CompoundFilter::filter], - type: OpenAI::Models::CompoundFilter::type_ + filters: ::Array[OpenAI::CompoundFilter::filter], + type: OpenAI::CompoundFilter::type_ ) -> void - def to_hash: -> OpenAI::Models::compound_filter - - type filter = OpenAI::Models::ComparisonFilter | top + type filter = OpenAI::ComparisonFilter | top module Filter extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::ComparisonFilter, top] + def self?.variants: -> ::Array[OpenAI::CompoundFilter::filter] end type type_ = :and | :or @@ -34,7 +32,7 @@ module OpenAI AND: :and OR: :or - def self?.values: -> ::Array[OpenAI::Models::CompoundFilter::type_] + def self?.values: -> ::Array[OpenAI::CompoundFilter::type_] end end end diff --git a/sig/openai/models/create_embedding_response.rbs b/sig/openai/models/create_embedding_response.rbs index d9d813c3..d406e7da 100644 --- a/sig/openai/models/create_embedding_response.rbs +++ b/sig/openai/models/create_embedding_response.rbs @@ -2,30 +2,28 @@ module OpenAI module Models type create_embedding_response = { - data: ::Array[OpenAI::Models::Embedding], + data: ::Array[OpenAI::Embedding], model: String, object: :list, - usage: OpenAI::Models::CreateEmbeddingResponse::Usage + usage: OpenAI::CreateEmbeddingResponse::Usage } class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel - attr_accessor data: ::Array[OpenAI::Models::Embedding] + attr_accessor data: ::Array[OpenAI::Embedding] attr_accessor model: String attr_accessor object: :list - attr_accessor usage: OpenAI::Models::CreateEmbeddingResponse::Usage + attr_accessor usage: OpenAI::CreateEmbeddingResponse::Usage def initialize: ( - data: ::Array[OpenAI::Models::Embedding], + data: ::Array[OpenAI::Embedding], model: String, - usage: OpenAI::Models::CreateEmbeddingResponse::Usage, + usage: OpenAI::CreateEmbeddingResponse::Usage, ?object: :list ) -> void - def to_hash: -> OpenAI::Models::create_embedding_response - type usage = { prompt_tokens: Integer, total_tokens: Integer } class Usage < OpenAI::Internal::Type::BaseModel @@ -34,8 +32,6 @@ module OpenAI attr_accessor total_tokens: Integer def initialize: (prompt_tokens: Integer, total_tokens: Integer) -> void - - def to_hash: -> OpenAI::Models::CreateEmbeddingResponse::usage end end end diff --git a/sig/openai/models/embedding.rbs b/sig/openai/models/embedding.rbs index c0787d7b..88478e43 100644 --- a/sig/openai/models/embedding.rbs +++ b/sig/openai/models/embedding.rbs @@ -15,8 +15,6 @@ module OpenAI index: Integer, ?object: :embedding ) -> void - - def to_hash: -> OpenAI::Models::embedding end end end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 16c3bd6d..73e2bc84 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -41,15 +41,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::embedding_create_params - type input = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] module Input extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[String], ::Array[Integer], ::Array[::Array[Integer]]] + def self?.variants: -> ::Array[OpenAI::Models::EmbeddingCreateParams::input] StringArray: OpenAI::Internal::Type::Converter @@ -63,7 +61,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::embedding_model] + def self?.variants: -> ::Array[OpenAI::Models::EmbeddingCreateParams::model] end type encoding_format = :float | :base64 diff --git a/sig/openai/models/error_object.rbs b/sig/openai/models/error_object.rbs index 0a526b8a..f3cb58bb 100644 --- a/sig/openai/models/error_object.rbs +++ b/sig/openai/models/error_object.rbs @@ -18,8 +18,6 @@ module OpenAI param: String?, type: String ) -> void - - def to_hash: -> OpenAI::Models::error_object end end end diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 09ba33d8..ac24927c 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type eval_create_params = { data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], metadata: OpenAI::Models::metadata?, name: String } @@ -15,7 +15,7 @@ module OpenAI attr_accessor data_source_config: OpenAI::Models::EvalCreateParams::data_source_config - attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion] + attr_accessor testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion] attr_accessor metadata: OpenAI::Models::metadata? @@ -25,17 +25,15 @@ module OpenAI def initialize: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::eval_create_params - type data_source_config = - OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom - | OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs + OpenAI::EvalCreateParams::DataSourceConfig::Custom + | OpenAI::EvalCreateParams::DataSourceConfig::Logs module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -61,8 +59,6 @@ module OpenAI ?include_sample_schema: bool, ?type: :custom ) -> void - - def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::custom end type logs = { type: :logs, metadata: ::Hash[Symbol, top] } @@ -75,26 +71,24 @@ module OpenAI def metadata=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] def initialize: (?metadata: ::Hash[Symbol, top], ?type: :logs) -> void - - def to_hash: -> OpenAI::Models::EvalCreateParams::DataSourceConfig::logs end - def self?.variants: -> [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::data_source_config] end type testing_criterion = - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel - | OpenAI::Models::EvalStringCheckGrader - | OpenAI::Models::EvalTextSimilarityGrader - | OpenAI::Models::EvalCreateParams::TestingCriterion::Python - | OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel + OpenAI::EvalCreateParams::TestingCriterion::LabelModel + | OpenAI::EvalStringCheckGrader + | OpenAI::EvalTextSimilarityGrader + | OpenAI::EvalCreateParams::TestingCriterion::Python + | OpenAI::EvalCreateParams::TestingCriterion::ScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union type label_model = { - input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], + input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input], labels: ::Array[String], model: String, name: String, @@ -103,7 +97,7 @@ module OpenAI } class LabelModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input] + attr_accessor input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] attr_accessor labels: ::Array[String] @@ -116,7 +110,7 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], + input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input], labels: ::Array[String], model: String, name: String, @@ -124,11 +118,9 @@ module OpenAI ?type: :label_model ) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::label_model - type input = - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage - | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem module Input extend OpenAI::Internal::Type::Union @@ -141,40 +133,36 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::simple_input_message end type eval_item = { - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ } class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content + attr_accessor content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content - attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role + attr_accessor role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role - attr_reader type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? + attr_reader type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? def type=: ( - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ - ) -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ def initialize: ( - content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, - ?type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + ?type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::eval_item - type content = String - | OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + | OpenAI::Responses::ResponseInputText + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText module Content extend OpenAI::Internal::Type::Union @@ -187,11 +175,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -204,7 +190,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] + def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] end type type_ = :message @@ -214,11 +200,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] + def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] end end - def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem] + def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] end end @@ -253,13 +239,11 @@ module OpenAI ?pass_threshold: Float, ?type: :python ) -> void - - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::python end type score_model = { - input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input], + input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input], model: String, name: String, type: :score_model, @@ -269,7 +253,7 @@ module OpenAI } class ScoreModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input] + attr_accessor input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input] attr_accessor model: String @@ -290,7 +274,7 @@ module OpenAI def sampling_params=: (top) -> top def initialize: ( - input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input], + input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input], model: String, name: String, ?pass_threshold: Float, @@ -299,38 +283,34 @@ module OpenAI ?type: :score_model ) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::score_model - type input = { - content: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, - type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + content: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, + type: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ } class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::content + attr_accessor content: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content - attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role + attr_accessor role: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role - attr_reader type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_? + attr_reader type: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_? def type=: ( - OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ - ) -> OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + ) -> OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ def initialize: ( - content: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, - ?type: OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ + content: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, + role: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, + ?type: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::input - type content = String - | OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText + | OpenAI::Responses::ResponseInputText + | OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText module Content extend OpenAI::Internal::Type::Union @@ -343,11 +323,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content] end type role = :user | :assistant | :system | :developer @@ -360,7 +338,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::role] + def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role] end type type_ = :message @@ -370,12 +348,12 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_] + def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_] end end end - def self?.variants: -> [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel] + def self?.variants: -> ::Array[OpenAI::EvalCreateParams::testing_criterion] end end end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 8ff0e39f..8f0a934e 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -36,22 +36,20 @@ module OpenAI ?object: :eval ) -> void - def to_hash: -> OpenAI::Models::eval_create_response - type data_source_config = - OpenAI::Models::EvalCustomDataSourceConfig - | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::data_source_config] end type testing_criterion = - OpenAI::Models::EvalLabelModelGrader - | OpenAI::Models::EvalStringCheckGrader - | OpenAI::Models::EvalTextSimilarityGrader + OpenAI::EvalLabelModelGrader + | OpenAI::EvalStringCheckGrader + | OpenAI::EvalTextSimilarityGrader | OpenAI::Models::EvalCreateResponse::TestingCriterion::Python | OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel @@ -89,8 +87,6 @@ module OpenAI ?pass_threshold: Float, ?type: :python ) -> void - - def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::python end type score_model = @@ -135,8 +131,6 @@ module OpenAI ?type: :score_model ) -> void - def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::score_model - type input = { content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content, @@ -161,11 +155,9 @@ module OpenAI ?type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::input - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText module Content @@ -179,11 +171,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content] end type role = :user | :assistant | :system | :developer @@ -211,7 +201,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] end end end diff --git a/sig/openai/models/eval_custom_data_source_config.rbs b/sig/openai/models/eval_custom_data_source_config.rbs index d87e099d..8b53b580 100644 --- a/sig/openai/models/eval_custom_data_source_config.rbs +++ b/sig/openai/models/eval_custom_data_source_config.rbs @@ -9,8 +9,6 @@ module OpenAI attr_accessor type: :custom def initialize: (schema: ::Hash[Symbol, top], ?type: :custom) -> void - - def to_hash: -> OpenAI::Models::eval_custom_data_source_config end end end diff --git a/sig/openai/models/eval_delete_params.rbs b/sig/openai/models/eval_delete_params.rbs index cb820afd..4fb3f99f 100644 --- a/sig/openai/models/eval_delete_params.rbs +++ b/sig/openai/models/eval_delete_params.rbs @@ -7,8 +7,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::eval_delete_params end end end diff --git a/sig/openai/models/eval_delete_response.rbs b/sig/openai/models/eval_delete_response.rbs index e06e798b..4f0adf53 100644 --- a/sig/openai/models/eval_delete_response.rbs +++ b/sig/openai/models/eval_delete_response.rbs @@ -11,8 +11,6 @@ module OpenAI attr_accessor object: String def initialize: (deleted: bool, eval_id: String, object: String) -> void - - def to_hash: -> OpenAI::Models::eval_delete_response end end end diff --git a/sig/openai/models/eval_label_model_grader.rbs b/sig/openai/models/eval_label_model_grader.rbs index dab1b730..63867ffd 100644 --- a/sig/openai/models/eval_label_model_grader.rbs +++ b/sig/openai/models/eval_label_model_grader.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type eval_label_model_grader = { - input: ::Array[OpenAI::Models::EvalLabelModelGrader::Input], + input: ::Array[OpenAI::EvalLabelModelGrader::Input], labels: ::Array[String], model: String, name: String, @@ -11,7 +11,7 @@ module OpenAI } class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalLabelModelGrader::Input] + attr_accessor input: ::Array[OpenAI::EvalLabelModelGrader::Input] attr_accessor labels: ::Array[String] @@ -24,7 +24,7 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::Models::EvalLabelModelGrader::Input], + input: ::Array[OpenAI::EvalLabelModelGrader::Input], labels: ::Array[String], model: String, name: String, @@ -32,38 +32,34 @@ module OpenAI ?type: :label_model ) -> void - def to_hash: -> OpenAI::Models::eval_label_model_grader - type input = { - content: OpenAI::Models::EvalLabelModelGrader::Input::content, - role: OpenAI::Models::EvalLabelModelGrader::Input::role, - type: OpenAI::Models::EvalLabelModelGrader::Input::type_ + content: OpenAI::EvalLabelModelGrader::Input::content, + role: OpenAI::EvalLabelModelGrader::Input::role, + type: OpenAI::EvalLabelModelGrader::Input::type_ } class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalLabelModelGrader::Input::content + attr_accessor content: OpenAI::EvalLabelModelGrader::Input::content - attr_accessor role: OpenAI::Models::EvalLabelModelGrader::Input::role + attr_accessor role: OpenAI::EvalLabelModelGrader::Input::role - attr_reader type: OpenAI::Models::EvalLabelModelGrader::Input::type_? + attr_reader type: OpenAI::EvalLabelModelGrader::Input::type_? def type=: ( - OpenAI::Models::EvalLabelModelGrader::Input::type_ - ) -> OpenAI::Models::EvalLabelModelGrader::Input::type_ + OpenAI::EvalLabelModelGrader::Input::type_ + ) -> OpenAI::EvalLabelModelGrader::Input::type_ def initialize: ( - content: OpenAI::Models::EvalLabelModelGrader::Input::content, - role: OpenAI::Models::EvalLabelModelGrader::Input::role, - ?type: OpenAI::Models::EvalLabelModelGrader::Input::type_ + content: OpenAI::EvalLabelModelGrader::Input::content, + role: OpenAI::EvalLabelModelGrader::Input::role, + ?type: OpenAI::EvalLabelModelGrader::Input::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalLabelModelGrader::input - type content = String - | OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText + | OpenAI::Responses::ResponseInputText + | OpenAI::EvalLabelModelGrader::Input::Content::OutputText module Content extend OpenAI::Internal::Type::Union @@ -76,11 +72,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalLabelModelGrader::Input::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalLabelModelGrader::Input::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::EvalLabelModelGrader::Input::content] end type role = :user | :assistant | :system | :developer @@ -93,7 +87,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::role] + def self?.values: -> ::Array[OpenAI::EvalLabelModelGrader::Input::role] end type type_ = :message @@ -103,7 +97,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::EvalLabelModelGrader::Input::type_] + def self?.values: -> ::Array[OpenAI::EvalLabelModelGrader::Input::type_] end end end diff --git a/sig/openai/models/eval_list_params.rbs b/sig/openai/models/eval_list_params.rbs index f5a4316b..57ad5212 100644 --- a/sig/openai/models/eval_list_params.rbs +++ b/sig/openai/models/eval_list_params.rbs @@ -41,8 +41,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::eval_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index f7a1047b..0384d54a 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -36,22 +36,20 @@ module OpenAI ?object: :eval ) -> void - def to_hash: -> OpenAI::Models::eval_list_response - type data_source_config = - OpenAI::Models::EvalCustomDataSourceConfig - | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::data_source_config] end type testing_criterion = - OpenAI::Models::EvalLabelModelGrader - | OpenAI::Models::EvalStringCheckGrader - | OpenAI::Models::EvalTextSimilarityGrader + OpenAI::EvalLabelModelGrader + | OpenAI::EvalStringCheckGrader + | OpenAI::EvalTextSimilarityGrader | OpenAI::Models::EvalListResponse::TestingCriterion::Python | OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel @@ -89,8 +87,6 @@ module OpenAI ?pass_threshold: Float, ?type: :python ) -> void - - def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::python end type score_model = @@ -135,8 +131,6 @@ module OpenAI ?type: :score_model ) -> void - def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::score_model - type input = { content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content, @@ -161,11 +155,9 @@ module OpenAI ?type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::input - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText module Content @@ -179,11 +171,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content] end type role = :user | :assistant | :system | :developer @@ -211,7 +201,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel] + def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::testing_criterion] end end end diff --git a/sig/openai/models/eval_retrieve_params.rbs b/sig/openai/models/eval_retrieve_params.rbs index 9b2a4444..c6242dcb 100644 --- a/sig/openai/models/eval_retrieve_params.rbs +++ b/sig/openai/models/eval_retrieve_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::eval_retrieve_params end end end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index b354045c..191c892e 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -36,22 +36,20 @@ module OpenAI ?object: :eval ) -> void - def to_hash: -> OpenAI::Models::eval_retrieve_response - type data_source_config = - OpenAI::Models::EvalCustomDataSourceConfig - | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::data_source_config] end type testing_criterion = - OpenAI::Models::EvalLabelModelGrader - | OpenAI::Models::EvalStringCheckGrader - | OpenAI::Models::EvalTextSimilarityGrader + OpenAI::EvalLabelModelGrader + | OpenAI::EvalStringCheckGrader + | OpenAI::EvalTextSimilarityGrader | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel @@ -89,8 +87,6 @@ module OpenAI ?pass_threshold: Float, ?type: :python ) -> void - - def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::python end type score_model = @@ -135,8 +131,6 @@ module OpenAI ?type: :score_model ) -> void - def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::score_model - type input = { content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content, @@ -161,11 +155,9 @@ module OpenAI ?type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::input - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText module Content @@ -179,11 +171,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content] end type role = :user | :assistant | :system | :developer @@ -211,7 +201,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel] + def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] end end end diff --git a/sig/openai/models/eval_stored_completions_data_source_config.rbs b/sig/openai/models/eval_stored_completions_data_source_config.rbs index b99a1a02..345b12d6 100644 --- a/sig/openai/models/eval_stored_completions_data_source_config.rbs +++ b/sig/openai/models/eval_stored_completions_data_source_config.rbs @@ -19,8 +19,6 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?type: :stored_completions ) -> void - - def to_hash: -> OpenAI::Models::eval_stored_completions_data_source_config end end end diff --git a/sig/openai/models/eval_string_check_grader.rbs b/sig/openai/models/eval_string_check_grader.rbs index c7d89a65..4fd28012 100644 --- a/sig/openai/models/eval_string_check_grader.rbs +++ b/sig/openai/models/eval_string_check_grader.rbs @@ -4,7 +4,7 @@ module OpenAI { input: String, name: String, - operation: OpenAI::Models::EvalStringCheckGrader::operation, + operation: OpenAI::EvalStringCheckGrader::operation, reference: String, type: :string_check } @@ -14,7 +14,7 @@ module OpenAI attr_accessor name: String - attr_accessor operation: OpenAI::Models::EvalStringCheckGrader::operation + attr_accessor operation: OpenAI::EvalStringCheckGrader::operation attr_accessor reference: String @@ -23,13 +23,11 @@ module OpenAI def initialize: ( input: String, name: String, - operation: OpenAI::Models::EvalStringCheckGrader::operation, + operation: OpenAI::EvalStringCheckGrader::operation, reference: String, ?type: :string_check ) -> void - def to_hash: -> OpenAI::Models::eval_string_check_grader - type operation = :eq | :ne | :like | :ilike module Operation @@ -40,7 +38,7 @@ module OpenAI LIKE: :like ILIKE: :ilike - def self?.values: -> ::Array[OpenAI::Models::EvalStringCheckGrader::operation] + def self?.values: -> ::Array[OpenAI::EvalStringCheckGrader::operation] end end end diff --git a/sig/openai/models/eval_text_similarity_grader.rbs b/sig/openai/models/eval_text_similarity_grader.rbs index 98059580..1e13b3f2 100644 --- a/sig/openai/models/eval_text_similarity_grader.rbs +++ b/sig/openai/models/eval_text_similarity_grader.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type eval_text_similarity_grader = { - evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric, + evaluation_metric: OpenAI::EvalTextSimilarityGrader::evaluation_metric, input: String, pass_threshold: Float, reference: String, @@ -11,7 +11,7 @@ module OpenAI } class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel - attr_accessor evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric + attr_accessor evaluation_metric: OpenAI::EvalTextSimilarityGrader::evaluation_metric attr_accessor input: String @@ -26,7 +26,7 @@ module OpenAI def name=: (String) -> String def initialize: ( - evaluation_metric: OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric, + evaluation_metric: OpenAI::EvalTextSimilarityGrader::evaluation_metric, input: String, pass_threshold: Float, reference: String, @@ -34,8 +34,6 @@ module OpenAI ?type: :text_similarity ) -> void - def to_hash: -> OpenAI::Models::eval_text_similarity_grader - type evaluation_metric = :fuzzy_match | :bleu @@ -62,7 +60,7 @@ module OpenAI ROUGE_5: :rouge_5 ROUGE_L: :rouge_l - def self?.values: -> ::Array[OpenAI::Models::EvalTextSimilarityGrader::evaluation_metric] + def self?.values: -> ::Array[OpenAI::EvalTextSimilarityGrader::evaluation_metric] end end end diff --git a/sig/openai/models/eval_update_params.rbs b/sig/openai/models/eval_update_params.rbs index aa2b67fa..cb60c3c2 100644 --- a/sig/openai/models/eval_update_params.rbs +++ b/sig/openai/models/eval_update_params.rbs @@ -19,8 +19,6 @@ module OpenAI ?name: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::eval_update_params end end end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index 2cc7f800..5138ddc0 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -36,22 +36,20 @@ module OpenAI ?object: :eval ) -> void - def to_hash: -> OpenAI::Models::eval_update_response - type data_source_config = - OpenAI::Models::EvalCustomDataSourceConfig - | OpenAI::Models::EvalStoredCompletionsDataSourceConfig + OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::data_source_config] end type testing_criterion = - OpenAI::Models::EvalLabelModelGrader - | OpenAI::Models::EvalStringCheckGrader - | OpenAI::Models::EvalTextSimilarityGrader + OpenAI::EvalLabelModelGrader + | OpenAI::EvalStringCheckGrader + | OpenAI::EvalTextSimilarityGrader | OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python | OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel @@ -89,8 +87,6 @@ module OpenAI ?pass_threshold: Float, ?type: :python ) -> void - - def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::python end type score_model = @@ -135,8 +131,6 @@ module OpenAI ?type: :score_model ) -> void - def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::score_model - type input = { content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content, @@ -161,11 +155,9 @@ module OpenAI ?type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ ) -> void - def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::input - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText module Content @@ -179,11 +171,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content] end type role = :user | :assistant | :system | :developer @@ -211,7 +201,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel] + def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] end end end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index bbe22adb..bec2dc8b 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -3,70 +3,66 @@ module OpenAI module Evals type create_eval_completions_run_data_source = { - source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, - input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_, + input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages, model: String, - sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source + attr_accessor source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source - attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_ + attr_accessor type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_ - attr_reader input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages? + attr_reader input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages? def input_messages=: ( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages - ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages + OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages + ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages attr_reader model: String? def model=: (String) -> String - attr_reader sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams? + attr_reader sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams? def sampling_params=: ( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams - ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams def initialize: ( - source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, - ?input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_, + ?input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages, ?model: String, - ?sampling_params: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + ?sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams ) -> void - def to_hash: -> OpenAI::Models::Evals::create_eval_completions_run_data_source - type source = - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions module Source extend OpenAI::Internal::Type::Union type file_content = { - content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], type: :file_content } class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + attr_accessor content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] attr_accessor type: :file_content def initialize: ( - content: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -81,8 +77,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::content end end @@ -94,8 +88,6 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::file_id end type stored_completions = @@ -129,11 +121,9 @@ module OpenAI ?model: String?, ?type: :stored_completions ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::stored_completions end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::source] end type type_ = :completions @@ -143,71 +133,67 @@ module OpenAI COMPLETIONS: :completions - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_] + def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_] end type input_messages = - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference module InputMessages extend OpenAI::Internal::Type::Union type template = { - template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], type: :template } class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + attr_accessor template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] attr_accessor type: :template def initialize: ( - template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], ?type: :template ) -> void - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::template - type template = - OpenAI::Models::Responses::EasyInputMessage - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Responses::EasyInputMessage + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message module Template extend OpenAI::Internal::Type::Union type message = { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content + attr_accessor content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content - attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role + attr_accessor role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role - attr_reader type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? + attr_reader type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? def type=: ( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ def initialize: ( - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - ?type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + ?type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::message - type content = String - | OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + | OpenAI::Responses::ResponseInputText + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText module Content extend OpenAI::Internal::Type::Union @@ -220,11 +206,9 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] end type role = :user | :assistant | :system | :developer @@ -237,7 +221,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] + def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] end type type_ = :message @@ -247,11 +231,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] + def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] end end - def self?.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message] + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] end end @@ -267,11 +251,9 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::item_reference end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages] end type sampling_params = @@ -305,8 +287,6 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::sampling_params end end end diff --git a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs index 46524f62..c376b674 100644 --- a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs @@ -3,47 +3,43 @@ module OpenAI module Evals type create_eval_jsonl_run_data_source = { - source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, + source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source, type: :jsonl } class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source + attr_accessor source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source attr_accessor type: :jsonl def initialize: ( - source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, + source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source, ?type: :jsonl ) -> void - def to_hash: -> OpenAI::Models::Evals::create_eval_jsonl_run_data_source - type source = - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent - | OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent + | OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID module Source extend OpenAI::Internal::Type::Union type file_content = { - content: ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], type: :file_content } class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + attr_accessor content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] attr_accessor type: :file_content def initialize: ( - content: ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -58,8 +54,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::content end end @@ -71,11 +65,9 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::file_id end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::source] end end end diff --git a/sig/openai/models/evals/eval_api_error.rbs b/sig/openai/models/evals/eval_api_error.rbs index eaf19bc5..70bd686a 100644 --- a/sig/openai/models/evals/eval_api_error.rbs +++ b/sig/openai/models/evals/eval_api_error.rbs @@ -11,8 +11,6 @@ module OpenAI attr_accessor message: String def initialize: (code: String, message: String) -> void - - def to_hash: -> OpenAI::Models::Evals::eval_api_error end end end diff --git a/sig/openai/models/evals/run_cancel_params.rbs b/sig/openai/models/evals/run_cancel_params.rbs index fd4d80f8..32a0b270 100644 --- a/sig/openai/models/evals/run_cancel_params.rbs +++ b/sig/openai/models/evals/run_cancel_params.rbs @@ -14,8 +14,6 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Evals::run_cancel_params end end end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index d6a2ba57..359ecd76 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -6,7 +6,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunCancelResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -26,7 +26,7 @@ module OpenAI attr_accessor data_source: OpenAI::Models::Evals::RunCancelResponse::data_source - attr_accessor error: OpenAI::Models::Evals::EvalAPIError + attr_accessor error: OpenAI::Evals::EvalAPIError attr_accessor eval_id: String @@ -52,7 +52,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunCancelResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -65,11 +65,9 @@ module OpenAI ?object: :"eval.run" ) -> void - def to_hash: -> OpenAI::Models::Evals::run_cancel_response - type data_source = - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Evals::CreateEvalCompletionsRunDataSource | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions module DataSource @@ -113,8 +111,6 @@ module OpenAI ?type: :completions ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::completions - type source = OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID @@ -139,8 +135,6 @@ module OpenAI ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -155,8 +149,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::content end end @@ -168,8 +160,6 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::file_id end type responses = @@ -227,11 +217,9 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::responses end - def self?.variants: -> [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source] end type input_messages = @@ -257,8 +245,6 @@ module OpenAI ?type: :template ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::template - type template = OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem @@ -274,8 +260,6 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::chat_message end type eval_item = @@ -302,11 +286,9 @@ module OpenAI ?type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::eval_item - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText module Content @@ -323,11 +305,9 @@ module OpenAI text: String, ?type: :output_text ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -354,7 +334,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template] end end @@ -370,11 +350,9 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::item_reference end - def self?.variants: -> [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages] end type sampling_params = @@ -408,12 +386,10 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::sampling_params end end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::data_source] end type per_model_usage = @@ -447,8 +423,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::per_model_usage end type per_testing_criteria_result = @@ -466,8 +440,6 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::per_testing_criteria_result end type result_counts = @@ -488,8 +460,6 @@ module OpenAI passed: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCancelResponse::result_counts end end end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index f38000c8..81184938 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -28,82 +28,76 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Evals::run_create_params - type data_source = - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + OpenAI::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Evals::CreateEvalCompletionsRunDataSource + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union type create_eval_responses_run_data_source = { - source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, - input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, model: String, - sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams } class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source + attr_accessor source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source - attr_accessor type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ + attr_accessor type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ - attr_reader input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? + attr_reader input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? def input_messages=: ( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages - ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages attr_reader model: String? def model=: (String) -> String - attr_reader sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams? + attr_reader sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams? def sampling_params=: ( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams def initialize: ( - source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, - ?input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + ?input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, ?model: String, - ?sampling_params: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ?sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::create_eval_responses_run_data_source - type source = - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent - | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID - | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses module Source extend OpenAI::Internal::Type::Union type file_content = { - content: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], type: :file_content } class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + attr_accessor content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] attr_accessor type: :file_content def initialize: ( - content: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -118,8 +112,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::content end end @@ -131,8 +123,6 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::file_id end type responses = @@ -190,11 +180,9 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::responses end - def self?.variants: -> [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source] end type type_ = :completions @@ -204,37 +192,35 @@ module OpenAI COMPLETIONS: :completions - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] + def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] end type input_messages = - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template - | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference module InputMessages extend OpenAI::Internal::Type::Union type template = { - template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], type: :template } class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + attr_accessor template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] attr_accessor type: :template def initialize: ( - template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], ?type: :template ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::template - type template = - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem module Template extend OpenAI::Internal::Type::Union @@ -247,40 +233,36 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::chat_message end type eval_item = { - content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ } class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content + attr_accessor content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content - attr_accessor role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role + attr_accessor role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role - attr_reader type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? + attr_reader type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? def type=: ( - OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ def initialize: ( - content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::eval_item - type content = String - | OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Responses::ResponseInputText + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText module Content extend OpenAI::Internal::Type::Union @@ -296,11 +278,9 @@ module OpenAI text: String, ?type: :output_text ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -313,7 +293,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] + def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] end type type_ = :message @@ -323,11 +303,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] + def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] end end - def self?.variants: -> [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem] + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] end end @@ -343,11 +323,9 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::item_reference end - def self?.variants: -> [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages] end type sampling_params = @@ -381,12 +359,10 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::sampling_params end end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::data_source] end end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 571e81b9..f6ab37df 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -6,7 +6,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunCreateResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -26,7 +26,7 @@ module OpenAI attr_accessor data_source: OpenAI::Models::Evals::RunCreateResponse::data_source - attr_accessor error: OpenAI::Models::Evals::EvalAPIError + attr_accessor error: OpenAI::Evals::EvalAPIError attr_accessor eval_id: String @@ -52,7 +52,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunCreateResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -65,11 +65,9 @@ module OpenAI ?object: :"eval.run" ) -> void - def to_hash: -> OpenAI::Models::Evals::run_create_response - type data_source = - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Evals::CreateEvalCompletionsRunDataSource | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions module DataSource @@ -113,8 +111,6 @@ module OpenAI ?type: :completions ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::completions - type source = OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID @@ -139,8 +135,6 @@ module OpenAI ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -155,8 +149,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::content end end @@ -168,8 +160,6 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::file_id end type responses = @@ -227,11 +217,9 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::responses end - def self?.variants: -> [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source] end type input_messages = @@ -257,8 +245,6 @@ module OpenAI ?type: :template ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::template - type template = OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem @@ -274,8 +260,6 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::chat_message end type eval_item = @@ -302,11 +286,9 @@ module OpenAI ?type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::eval_item - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText module Content @@ -323,11 +305,9 @@ module OpenAI text: String, ?type: :output_text ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -354,7 +334,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template] end end @@ -370,11 +350,9 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::item_reference end - def self?.variants: -> [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages] end type sampling_params = @@ -408,12 +386,10 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::sampling_params end end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::data_source] end type per_model_usage = @@ -447,8 +423,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::per_model_usage end type per_testing_criteria_result = @@ -466,8 +440,6 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::per_testing_criteria_result end type result_counts = @@ -488,8 +460,6 @@ module OpenAI passed: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunCreateResponse::result_counts end end end diff --git a/sig/openai/models/evals/run_delete_params.rbs b/sig/openai/models/evals/run_delete_params.rbs index db7ba2df..f775e4d5 100644 --- a/sig/openai/models/evals/run_delete_params.rbs +++ b/sig/openai/models/evals/run_delete_params.rbs @@ -14,8 +14,6 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Evals::run_delete_params end end end diff --git a/sig/openai/models/evals/run_delete_response.rbs b/sig/openai/models/evals/run_delete_response.rbs index c3fe313f..21d23706 100644 --- a/sig/openai/models/evals/run_delete_response.rbs +++ b/sig/openai/models/evals/run_delete_response.rbs @@ -22,8 +22,6 @@ module OpenAI ?object: String, ?run_id: String ) -> void - - def to_hash: -> OpenAI::Models::Evals::run_delete_response end end end diff --git a/sig/openai/models/evals/run_list_params.rbs b/sig/openai/models/evals/run_list_params.rbs index 7a65ceef..33c002ca 100644 --- a/sig/openai/models/evals/run_list_params.rbs +++ b/sig/openai/models/evals/run_list_params.rbs @@ -42,8 +42,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Evals::run_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index 60062163..baadd18a 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -6,7 +6,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunListResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -26,7 +26,7 @@ module OpenAI attr_accessor data_source: OpenAI::Models::Evals::RunListResponse::data_source - attr_accessor error: OpenAI::Models::Evals::EvalAPIError + attr_accessor error: OpenAI::Evals::EvalAPIError attr_accessor eval_id: String @@ -52,7 +52,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunListResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -65,11 +65,9 @@ module OpenAI ?object: :"eval.run" ) -> void - def to_hash: -> OpenAI::Models::Evals::run_list_response - type data_source = - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Evals::CreateEvalCompletionsRunDataSource | OpenAI::Models::Evals::RunListResponse::DataSource::Completions module DataSource @@ -113,8 +111,6 @@ module OpenAI ?type: :completions ) -> void - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::completions - type source = OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID @@ -139,8 +135,6 @@ module OpenAI ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -155,8 +149,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::content end end @@ -168,8 +160,6 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::file_id end type responses = @@ -227,11 +217,9 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::responses end - def self?.variants: -> [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source] end type input_messages = @@ -257,8 +245,6 @@ module OpenAI ?type: :template ) -> void - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::template - type template = OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem @@ -274,8 +260,6 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::chat_message end type eval_item = @@ -302,11 +286,9 @@ module OpenAI ?type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::eval_item - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText module Content @@ -323,11 +305,9 @@ module OpenAI text: String, ?type: :output_text ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -354,7 +334,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template] end end @@ -370,11 +350,9 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::item_reference end - def self?.variants: -> [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages] end type sampling_params = @@ -408,12 +386,10 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::sampling_params end end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::data_source] end type per_model_usage = @@ -447,8 +423,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::per_model_usage end type per_testing_criteria_result = @@ -466,8 +440,6 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::per_testing_criteria_result end type result_counts = @@ -488,8 +460,6 @@ module OpenAI passed: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunListResponse::result_counts end end end diff --git a/sig/openai/models/evals/run_retrieve_params.rbs b/sig/openai/models/evals/run_retrieve_params.rbs index 50130d18..955259eb 100644 --- a/sig/openai/models/evals/run_retrieve_params.rbs +++ b/sig/openai/models/evals/run_retrieve_params.rbs @@ -14,8 +14,6 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Evals::run_retrieve_params end end end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index 6714cb0c..d8bce8d1 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -6,7 +6,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -26,7 +26,7 @@ module OpenAI attr_accessor data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source - attr_accessor error: OpenAI::Models::Evals::EvalAPIError + attr_accessor error: OpenAI::Evals::EvalAPIError attr_accessor eval_id: String @@ -52,7 +52,7 @@ module OpenAI id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: OpenAI::Models::metadata?, model: String, @@ -65,11 +65,9 @@ module OpenAI ?object: :"eval.run" ) -> void - def to_hash: -> OpenAI::Models::Evals::run_retrieve_response - type data_source = - OpenAI::Models::Evals::CreateEvalJSONLRunDataSource - | OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource + OpenAI::Evals::CreateEvalJSONLRunDataSource + | OpenAI::Evals::CreateEvalCompletionsRunDataSource | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions module DataSource @@ -113,8 +111,6 @@ module OpenAI ?type: :completions ) -> void - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::completions - type source = OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID @@ -139,8 +135,6 @@ module OpenAI ?type: :file_content ) -> void - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::file_content - type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -155,8 +149,6 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::content end end @@ -168,8 +160,6 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::file_id end type responses = @@ -227,11 +217,9 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::responses end - def self?.variants: -> [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source] end type input_messages = @@ -257,8 +245,6 @@ module OpenAI ?type: :template ) -> void - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::template - type template = OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem @@ -274,8 +260,6 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::chat_message end type eval_item = @@ -302,11 +286,9 @@ module OpenAI ?type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ ) -> void - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::eval_item - type content = String - | OpenAI::Models::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText module Content @@ -323,11 +305,9 @@ module OpenAI text: String, ?type: :output_text ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::output_text end - def self?.variants: -> [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -354,7 +334,7 @@ module OpenAI end end - def self?.variants: -> [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template] end end @@ -370,11 +350,9 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::item_reference end - def self?.variants: -> [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages] end type sampling_params = @@ -408,12 +386,10 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::sampling_params end end - def self?.variants: -> [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::data_source] end type per_model_usage = @@ -447,8 +423,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::per_model_usage end type per_testing_criteria_result = @@ -466,8 +440,6 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::per_testing_criteria_result end type result_counts = @@ -488,8 +460,6 @@ module OpenAI passed: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::RunRetrieveResponse::result_counts end end end diff --git a/sig/openai/models/evals/runs/output_item_list_params.rbs b/sig/openai/models/evals/runs/output_item_list_params.rbs index 53ec2a12..4ff88c6f 100644 --- a/sig/openai/models/evals/runs/output_item_list_params.rbs +++ b/sig/openai/models/evals/runs/output_item_list_params.rbs @@ -47,8 +47,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Evals::Runs::output_item_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/evals/runs/output_item_list_response.rbs b/sig/openai/models/evals/runs/output_item_list_response.rbs index f33ed688..8f79060c 100644 --- a/sig/openai/models/evals/runs/output_item_list_response.rbs +++ b/sig/openai/models/evals/runs/output_item_list_response.rbs @@ -50,11 +50,9 @@ module OpenAI ?object: :"eval.run.output_item" ) -> void - def to_hash: -> OpenAI::Models::Evals::Runs::output_item_list_response - type sample = { - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, finish_reason: String, input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], max_completion_tokens: Integer, @@ -67,7 +65,7 @@ module OpenAI } class Sample < OpenAI::Internal::Type::BaseModel - attr_accessor error: OpenAI::Models::Evals::EvalAPIError + attr_accessor error: OpenAI::Evals::EvalAPIError attr_accessor finish_reason: String @@ -88,7 +86,7 @@ module OpenAI attr_accessor usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage def initialize: ( - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, finish_reason: String, input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], max_completion_tokens: Integer, @@ -100,8 +98,6 @@ module OpenAI usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage ) -> void - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::sample - type input = { content: String, role: String } class Input < OpenAI::Internal::Type::BaseModel @@ -110,8 +106,6 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::input end type output = { content: String, role: String } @@ -126,8 +120,6 @@ module OpenAI def role=: (String) -> String def initialize: (?content: String, ?role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::output end type usage = @@ -153,8 +145,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::usage end end end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs index 1631b85e..91add94f 100644 --- a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs +++ b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs @@ -19,8 +19,6 @@ module OpenAI run_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::output_item_retrieve_params end end end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs index fc662a02..87d32fa3 100644 --- a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs +++ b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs @@ -50,11 +50,9 @@ module OpenAI ?object: :"eval.run.output_item" ) -> void - def to_hash: -> OpenAI::Models::Evals::Runs::output_item_retrieve_response - type sample = { - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, finish_reason: String, input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], max_completion_tokens: Integer, @@ -67,7 +65,7 @@ module OpenAI } class Sample < OpenAI::Internal::Type::BaseModel - attr_accessor error: OpenAI::Models::Evals::EvalAPIError + attr_accessor error: OpenAI::Evals::EvalAPIError attr_accessor finish_reason: String @@ -88,7 +86,7 @@ module OpenAI attr_accessor usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage def initialize: ( - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, finish_reason: String, input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], max_completion_tokens: Integer, @@ -100,8 +98,6 @@ module OpenAI usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage ) -> void - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::sample - type input = { content: String, role: String } class Input < OpenAI::Internal::Type::BaseModel @@ -110,8 +106,6 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::input end type output = { content: String, role: String } @@ -126,8 +120,6 @@ module OpenAI def role=: (String) -> String def initialize: (?content: String, ?role: String) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::output end type usage = @@ -153,8 +145,6 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::usage end end end diff --git a/sig/openai/models/file_chunking_strategy.rbs b/sig/openai/models/file_chunking_strategy.rbs index ebe4dcb5..5f6d7277 100644 --- a/sig/openai/models/file_chunking_strategy.rbs +++ b/sig/openai/models/file_chunking_strategy.rbs @@ -1,13 +1,13 @@ module OpenAI module Models type file_chunking_strategy = - OpenAI::Models::StaticFileChunkingStrategyObject - | OpenAI::Models::OtherFileChunkingStrategyObject + OpenAI::StaticFileChunkingStrategyObject + | OpenAI::OtherFileChunkingStrategyObject module FileChunkingStrategy extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + def self?.variants: -> ::Array[OpenAI::Models::file_chunking_strategy] end end end diff --git a/sig/openai/models/file_chunking_strategy_param.rbs b/sig/openai/models/file_chunking_strategy_param.rbs index 22233a43..434e01b4 100644 --- a/sig/openai/models/file_chunking_strategy_param.rbs +++ b/sig/openai/models/file_chunking_strategy_param.rbs @@ -1,13 +1,13 @@ module OpenAI module Models type file_chunking_strategy_param = - OpenAI::Models::AutoFileChunkingStrategyParam - | OpenAI::Models::StaticFileChunkingStrategyObjectParam + OpenAI::AutoFileChunkingStrategyParam + | OpenAI::StaticFileChunkingStrategyObjectParam module FileChunkingStrategyParam extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + def self?.variants: -> ::Array[OpenAI::Models::file_chunking_strategy_param] end end end diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index d0eb084a..df1a12a0 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -7,8 +7,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::file_content_params end end end diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 33f381f0..54eaab64 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -20,8 +20,6 @@ module OpenAI purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::file_create_params end end end diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index bd9575f3..8cd08d59 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -7,8 +7,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::file_delete_params end end end diff --git a/sig/openai/models/file_deleted.rbs b/sig/openai/models/file_deleted.rbs index e4134f28..fd681d7c 100644 --- a/sig/openai/models/file_deleted.rbs +++ b/sig/openai/models/file_deleted.rbs @@ -10,8 +10,6 @@ module OpenAI attr_accessor object: :file def initialize: (id: String, deleted: bool, ?object: :file) -> void - - def to_hash: -> OpenAI::Models::file_deleted end end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 8cc477d7..51c07c76 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -39,8 +39,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::file_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 2ea4ec93..3ebcb910 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -7,8 +7,8 @@ module OpenAI created_at: Integer, filename: String, object: :file, - purpose: OpenAI::Models::FileObject::purpose, - status: OpenAI::Models::FileObject::status, + purpose: OpenAI::FileObject::purpose, + status: OpenAI::FileObject::status, expires_at: Integer, status_details: String } @@ -24,9 +24,9 @@ module OpenAI attr_accessor object: :file - attr_accessor purpose: OpenAI::Models::FileObject::purpose + attr_accessor purpose: OpenAI::FileObject::purpose - attr_accessor status: OpenAI::Models::FileObject::status + attr_accessor status: OpenAI::FileObject::status attr_reader expires_at: Integer? @@ -41,15 +41,13 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::Models::FileObject::purpose, - status: OpenAI::Models::FileObject::status, + purpose: OpenAI::FileObject::purpose, + status: OpenAI::FileObject::status, ?expires_at: Integer, ?status_details: String, ?object: :file ) -> void - def to_hash: -> OpenAI::Models::file_object - type purpose = :assistants | :assistants_output @@ -70,7 +68,7 @@ module OpenAI FINE_TUNE_RESULTS: :"fine-tune-results" VISION: :vision - def self?.values: -> ::Array[OpenAI::Models::FileObject::purpose] + def self?.values: -> ::Array[OpenAI::FileObject::purpose] end type status = :uploaded | :processed | :error @@ -82,7 +80,7 @@ module OpenAI PROCESSED: :processed ERROR: :error - def self?.values: -> ::Array[OpenAI::Models::FileObject::status] + def self?.values: -> ::Array[OpenAI::FileObject::status] end end end diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index 818e9432..4788e5a6 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::file_retrieve_params end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs index 2a715c70..d8a52d89 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs @@ -16,8 +16,6 @@ module OpenAI project_ids: ::Array[String], ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_create_params end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs index 65e33f69..1bb85f1a 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs @@ -25,8 +25,6 @@ module OpenAI project_id: String, ?object: :"checkpoint.permission" ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_create_response end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs index d57abc16..78bbd44d 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs @@ -16,8 +16,6 @@ module OpenAI fine_tuned_model_checkpoint: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_delete_params end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs index f3920ae7..7cf0427b 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs @@ -17,8 +17,6 @@ module OpenAI deleted: bool, ?object: :"checkpoint.permission" ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_delete_response end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs index fe2c9c7d..0b609dbc 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs @@ -41,8 +41,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_retrieve_params - type order = :ascending | :descending module Order diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs index 061528d5..db9a1b9d 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs @@ -30,8 +30,6 @@ module OpenAI ?object: :list ) -> void - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::permission_retrieve_response - type data = { id: String, @@ -55,8 +53,6 @@ module OpenAI project_id: String, ?object: :"checkpoint.permission" ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::data end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 8a66e5a9..bcf014f4 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -7,23 +7,23 @@ module OpenAI { id: String, created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error?, + error: OpenAI::FineTuning::FineTuningJob::Error?, fine_tuned_model: String?, finished_at: Integer?, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, model: String, object: :"fine_tuning.job", organization_id: String, result_files: ::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::status, + status: OpenAI::FineTuning::FineTuningJob::status, trained_tokens: Integer?, training_file: String, validation_file: String?, estimated_finish: Integer?, - integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]?, + integrations: ::Array[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]?, metadata: OpenAI::Models::metadata?, - method_: OpenAI::Models::FineTuning::FineTuningJob::Method + method_: OpenAI::FineTuning::FineTuningJob::Method } class FineTuningJob < OpenAI::Internal::Type::BaseModel @@ -31,13 +31,13 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor error: OpenAI::Models::FineTuning::FineTuningJob::Error? + attr_accessor error: OpenAI::FineTuning::FineTuningJob::Error? attr_accessor fine_tuned_model: String? attr_accessor finished_at: Integer? - attr_accessor hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters + attr_accessor hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters attr_accessor model: String @@ -49,7 +49,7 @@ module OpenAI attr_accessor seed: Integer - attr_accessor status: OpenAI::Models::FineTuning::FineTuningJob::status + attr_accessor status: OpenAI::FineTuning::FineTuningJob::status attr_accessor trained_tokens: Integer? @@ -59,40 +59,38 @@ module OpenAI attr_accessor estimated_finish: Integer? - attr_accessor integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]? + attr_accessor integrations: ::Array[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]? attr_accessor metadata: OpenAI::Models::metadata? - attr_reader method_: OpenAI::Models::FineTuning::FineTuningJob::Method? + attr_reader method_: OpenAI::FineTuning::FineTuningJob::Method? def method_=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method + OpenAI::FineTuning::FineTuningJob::Method + ) -> OpenAI::FineTuning::FineTuningJob::Method def initialize: ( id: String, created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error?, + error: OpenAI::FineTuning::FineTuningJob::Error?, fine_tuned_model: String?, finished_at: Integer?, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, model: String, organization_id: String, result_files: ::Array[String], seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::status, + status: OpenAI::FineTuning::FineTuningJob::status, trained_tokens: Integer?, training_file: String, validation_file: String?, ?estimated_finish: Integer?, - ?integrations: ::Array[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]?, + ?integrations: ::Array[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]?, ?metadata: OpenAI::Models::metadata?, - ?method_: OpenAI::Models::FineTuning::FineTuningJob::Method, + ?method_: OpenAI::FineTuning::FineTuningJob::Method, ?object: :"fine_tuning.job" ) -> void - def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job - type error = { code: String, message: String, param: String? } class Error < OpenAI::Internal::Type::BaseModel @@ -107,50 +105,46 @@ module OpenAI message: String, param: String? ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::error end type hyperparameters = { - batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size - ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size + OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size + ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier + OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs - ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ?batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs ) -> void - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::hyperparameters - type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -158,7 +152,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -166,7 +160,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs] end end @@ -188,109 +182,103 @@ module OpenAI FAILED: :failed CANCELLED: :cancelled - def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::status] + def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::status] end type method_ = { - dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, - supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ + dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo, + supervised: OpenAI::FineTuning::FineTuningJob::Method::Supervised, + type: OpenAI::FineTuning::FineTuningJob::Method::type_ } class Method < OpenAI::Internal::Type::BaseModel - attr_reader dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo? + attr_reader dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo? def dpo=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo + OpenAI::FineTuning::FineTuningJob::Method::Dpo + ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo - attr_reader supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised? + attr_reader supervised: OpenAI::FineTuning::FineTuningJob::Method::Supervised? def supervised=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised + OpenAI::FineTuning::FineTuningJob::Method::Supervised + ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised - attr_reader type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_? + attr_reader type: OpenAI::FineTuning::FineTuningJob::Method::type_? def type=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::type_ - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::type_ + OpenAI::FineTuning::FineTuningJob::Method::type_ + ) -> OpenAI::FineTuning::FineTuningJob::Method::type_ def initialize: ( - ?dpo: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, - ?supervised: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, - ?type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ + ?dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo, + ?supervised: OpenAI::FineTuning::FineTuningJob::Method::Supervised, + ?type: OpenAI::FineTuning::FineTuningJob::Method::type_ ) -> void - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::method_ - type dpo = { - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } class Dpo < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters? + attr_reader hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters? def hyperparameters=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters def initialize: ( - ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters + ?hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters ) -> void - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::dpo - type hyperparameters = { - batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, - beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, - learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs + batch_size: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, + beta: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, + learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size + ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size - attr_reader beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta? + attr_reader beta: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta? def beta=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta + ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta - attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs + OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs + ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, - ?beta: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs + ?batch_size: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, + ?beta: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, + ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs ) -> void - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::hyperparameters - type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size] end type beta = :auto | Float @@ -298,7 +286,7 @@ module OpenAI module Beta extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta] end type learning_rate_multiplier = :auto | Float @@ -306,7 +294,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -314,69 +302,65 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs] end end end type supervised = { - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters } class Supervised < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters? + attr_reader hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters? def hyperparameters=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters def initialize: ( - ?hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters + ?hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters ) -> void - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::supervised - type hyperparameters = { - batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs + batch_size: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size + ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - ) -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs + OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs + ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs + ?batch_size: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs ) -> void - def to_hash: -> OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::hyperparameters - type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -384,7 +368,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -392,7 +376,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs] end end end @@ -405,7 +389,7 @@ module OpenAI SUPERVISED: :supervised DPO: :dpo - def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::type_] + def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index dfc567a3..018e7195 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -7,11 +7,11 @@ module OpenAI { id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, + level: OpenAI::FineTuning::FineTuningJobEvent::level, message: String, object: :"fine_tuning.job.event", data: top, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_ + type: OpenAI::FineTuning::FineTuningJobEvent::type_ } class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel @@ -19,7 +19,7 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor level: OpenAI::Models::FineTuning::FineTuningJobEvent::level + attr_accessor level: OpenAI::FineTuning::FineTuningJobEvent::level attr_accessor message: String @@ -29,24 +29,22 @@ module OpenAI def data=: (top) -> top - attr_reader type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_? + attr_reader type: OpenAI::FineTuning::FineTuningJobEvent::type_? def type=: ( - OpenAI::Models::FineTuning::FineTuningJobEvent::type_ - ) -> OpenAI::Models::FineTuning::FineTuningJobEvent::type_ + OpenAI::FineTuning::FineTuningJobEvent::type_ + ) -> OpenAI::FineTuning::FineTuningJobEvent::type_ def initialize: ( id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, + level: OpenAI::FineTuning::FineTuningJobEvent::level, message: String, ?data: top, - ?type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, + ?type: OpenAI::FineTuning::FineTuningJobEvent::type_, ?object: :"fine_tuning.job.event" ) -> void - def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job_event - type level = :info | :warn | :error module Level @@ -56,7 +54,7 @@ module OpenAI WARN: :warn ERROR: :error - def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::level] + def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJobEvent::level] end type type_ = :message | :metrics @@ -67,7 +65,7 @@ module OpenAI MESSAGE: :message METRICS: :metrics - def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::type_] + def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJobEvent::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index b952d550..a1e46c1a 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -28,8 +28,6 @@ module OpenAI ?name: String?, ?tags: ::Array[String] ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index 559090c6..cef52fc6 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -6,20 +6,18 @@ module OpenAI type fine_tuning_job_wandb_integration_object = { type: :wandb, - wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration } class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel attr_accessor type: :wandb - attr_accessor wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration + attr_accessor wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration def initialize: ( - wandb: OpenAI::Models::FineTuning::FineTuningJobWandbIntegration, + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration, ?type: :wandb ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::fine_tuning_job_wandb_integration_object end end end diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index 46fadbe0..bba98884 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::FineTuning::job_cancel_params end end end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 86773182..0669fd09 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -5,10 +5,10 @@ module OpenAI { model: OpenAI::Models::FineTuning::JobCreateParams::model, training_file: String, - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, + integrations: ::Array[OpenAI::FineTuning::JobCreateParams::Integration]?, metadata: OpenAI::Models::metadata?, - method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + method_: OpenAI::FineTuning::JobCreateParams::Method, seed: Integer?, suffix: String?, validation_file: String? @@ -23,21 +23,21 @@ module OpenAI attr_accessor training_file: String - attr_reader hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters? + attr_reader hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters? def hyperparameters=: ( - OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters - ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters + OpenAI::FineTuning::JobCreateParams::Hyperparameters + ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters - attr_accessor integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]? + attr_accessor integrations: ::Array[OpenAI::FineTuning::JobCreateParams::Integration]? attr_accessor metadata: OpenAI::Models::metadata? - attr_reader method_: OpenAI::Models::FineTuning::JobCreateParams::Method? + attr_reader method_: OpenAI::FineTuning::JobCreateParams::Method? def method_=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method + OpenAI::FineTuning::JobCreateParams::Method + ) -> OpenAI::FineTuning::JobCreateParams::Method attr_accessor seed: Integer? @@ -48,18 +48,16 @@ module OpenAI def initialize: ( model: OpenAI::Models::FineTuning::JobCreateParams::model, training_file: String, - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + ?hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, + ?integrations: ::Array[OpenAI::FineTuning::JobCreateParams::Integration]?, ?metadata: OpenAI::Models::metadata?, - ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + ?method_: OpenAI::FineTuning::JobCreateParams::Method, ?seed: Integer?, ?suffix: String?, ?validation_file: String?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::FineTuning::job_create_params - type model = String | :"babbage-002" @@ -70,10 +68,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, (:"babbage-002" - | :"davinci-002" - | :"gpt-3.5-turbo" - | :"gpt-4o-mini")] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::model] BABBAGE_002: :"babbage-002" DAVINCI_002: :"davinci-002" @@ -83,44 +78,42 @@ module OpenAI type hyperparameters = { - batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size - ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size + OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size + ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier + OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs - ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ?batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::hyperparameters - type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -128,7 +121,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -136,28 +129,26 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs] end end type integration = { type: :wandb, - wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb + wandb: OpenAI::FineTuning::JobCreateParams::Integration::Wandb } class Integration < OpenAI::Internal::Type::BaseModel attr_accessor type: :wandb - attr_accessor wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb + attr_accessor wandb: OpenAI::FineTuning::JobCreateParams::Integration::Wandb def initialize: ( - wandb: OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb, + wandb: OpenAI::FineTuning::JobCreateParams::Integration::Wandb, ?type: :wandb ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::integration - type wandb = { project: String, @@ -183,111 +174,103 @@ module OpenAI ?name: String?, ?tags: ::Array[String] ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Integration::wandb end end type method_ = { - dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, - supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ + dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo, + supervised: OpenAI::FineTuning::JobCreateParams::Method::Supervised, + type: OpenAI::FineTuning::JobCreateParams::Method::type_ } class Method < OpenAI::Internal::Type::BaseModel - attr_reader dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo? + attr_reader dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo? def dpo=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo + OpenAI::FineTuning::JobCreateParams::Method::Dpo + ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo - attr_reader supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised? + attr_reader supervised: OpenAI::FineTuning::JobCreateParams::Method::Supervised? def supervised=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised + OpenAI::FineTuning::JobCreateParams::Method::Supervised + ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised - attr_reader type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_? + attr_reader type: OpenAI::FineTuning::JobCreateParams::Method::type_? def type=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::type_ - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::type_ + OpenAI::FineTuning::JobCreateParams::Method::type_ + ) -> OpenAI::FineTuning::JobCreateParams::Method::type_ def initialize: ( - ?dpo: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, - ?supervised: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, - ?type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ + ?dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo, + ?supervised: OpenAI::FineTuning::JobCreateParams::Method::Supervised, + ?type: OpenAI::FineTuning::JobCreateParams::Method::type_ ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::method_ - type dpo = { - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters } class Dpo < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters? + attr_reader hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters? def hyperparameters=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters def initialize: ( - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters + ?hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::dpo - type hyperparameters = { - batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, - beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, - learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs + batch_size: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, + beta: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, + learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size + ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size - attr_reader beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta? + attr_reader beta: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta? def beta=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta + ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta - attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs + OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs + ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, - ?beta: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs + ?batch_size: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, + ?beta: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, + ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::hyperparameters - type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size] end type beta = :auto | Float @@ -295,7 +278,7 @@ module OpenAI module Beta extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta] end type learning_rate_multiplier = :auto | Float @@ -303,7 +286,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -311,69 +294,65 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs] end end end type supervised = { - hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters } class Supervised < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters? + attr_reader hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters? def hyperparameters=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters def initialize: ( - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters + ?hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::supervised - type hyperparameters = { - batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs + batch_size: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size + ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - ) -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs + OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs + ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs + ?batch_size: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs ) -> void - def to_hash: -> OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::hyperparameters - type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -381,7 +360,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Float] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -389,7 +368,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> [:auto, Integer] + def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs] end end end @@ -402,7 +381,7 @@ module OpenAI SUPERVISED: :supervised DPO: :dpo - def self?.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::type_] + def self?.values: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index 79968e83..f3d26bd2 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -22,8 +22,6 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::job_list_events_params end end end diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index b32ac587..db301888 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -25,8 +25,6 @@ module OpenAI ?metadata: ::Hash[Symbol, String]?, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::job_list_params end end end diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index 179216b1..ed195b91 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::FineTuning::job_retrieve_params end end end diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index 012833d8..2cf57298 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -23,8 +23,6 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Jobs::checkpoint_list_params end end end diff --git a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs index 1f11cb97..51e7737c 100644 --- a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs +++ b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs @@ -8,7 +8,7 @@ module OpenAI created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + metrics: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, object: :"fine_tuning.job.checkpoint", step_number: Integer } @@ -22,7 +22,7 @@ module OpenAI attr_accessor fine_tuning_job_id: String - attr_accessor metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics + attr_accessor metrics: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics attr_accessor object: :"fine_tuning.job.checkpoint" @@ -33,13 +33,11 @@ module OpenAI created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + metrics: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, step_number: Integer, ?object: :"fine_tuning.job.checkpoint" ) -> void - def to_hash: -> OpenAI::Models::FineTuning::Jobs::fine_tuning_job_checkpoint - type metrics = { full_valid_loss: Float, @@ -89,8 +87,6 @@ module OpenAI ?valid_loss: Float, ?valid_mean_token_accuracy: Float ) -> void - - def to_hash: -> OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::metrics end end end diff --git a/sig/openai/models/function_definition.rbs b/sig/openai/models/function_definition.rbs index c8acb986..40bf94b9 100644 --- a/sig/openai/models/function_definition.rbs +++ b/sig/openai/models/function_definition.rbs @@ -29,8 +29,6 @@ module OpenAI ?parameters: OpenAI::Models::function_parameters, ?strict: bool? ) -> void - - def to_hash: -> OpenAI::Models::function_definition end end end diff --git a/sig/openai/models/image.rbs b/sig/openai/models/image.rbs index f95aa74f..96a4fccc 100644 --- a/sig/openai/models/image.rbs +++ b/sig/openai/models/image.rbs @@ -20,8 +20,6 @@ module OpenAI ?revised_prompt: String, ?url: String ) -> void - - def to_hash: -> OpenAI::Models::image end end end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 1d87d6c6..e47b6178 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -39,14 +39,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::image_create_variation_params - type model = String | OpenAI::Models::image_model module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::image_model] + def self?.variants: -> ::Array[OpenAI::Models::ImageCreateVariationParams::model] end type response_format = :url | :b64_json diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index b8499e7e..75a3c9e4 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -59,8 +59,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::image_edit_params - type image = Pathname | StringIO @@ -71,7 +69,7 @@ module OpenAI module Image extend OpenAI::Internal::Type::Union - def self?.variants: -> [StringIO, ::Array[StringIO]] + def self?.variants: -> ::Array[OpenAI::Models::ImageEditParams::image] StringArray: OpenAI::Internal::Type::Converter end @@ -93,7 +91,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::image_model] + def self?.variants: -> ::Array[OpenAI::Models::ImageEditParams::model] end type quality = :standard | :low | :medium | :high | :auto diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index c5efaac8..d6367505 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -63,8 +63,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::image_generate_params - type background = :transparent | :opaque | :auto module Background @@ -82,7 +80,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::image_model] + def self?.variants: -> ::Array[OpenAI::Models::ImageGenerateParams::model] end type moderation = :low | :auto diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index 5f23336d..da6579dc 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -3,37 +3,33 @@ module OpenAI type images_response = { created: Integer, - data: ::Array[OpenAI::Models::Image], - usage: OpenAI::Models::ImagesResponse::Usage + data: ::Array[OpenAI::Image], + usage: OpenAI::ImagesResponse::Usage } class ImagesResponse < OpenAI::Internal::Type::BaseModel attr_accessor created: Integer - attr_reader data: ::Array[OpenAI::Models::Image]? + attr_reader data: ::Array[OpenAI::Image]? - def data=: ( - ::Array[OpenAI::Models::Image] - ) -> ::Array[OpenAI::Models::Image] + def data=: (::Array[OpenAI::Image]) -> ::Array[OpenAI::Image] - attr_reader usage: OpenAI::Models::ImagesResponse::Usage? + attr_reader usage: OpenAI::ImagesResponse::Usage? def usage=: ( - OpenAI::Models::ImagesResponse::Usage - ) -> OpenAI::Models::ImagesResponse::Usage + OpenAI::ImagesResponse::Usage + ) -> OpenAI::ImagesResponse::Usage def initialize: ( created: Integer, - ?data: ::Array[OpenAI::Models::Image], - ?usage: OpenAI::Models::ImagesResponse::Usage + ?data: ::Array[OpenAI::Image], + ?usage: OpenAI::ImagesResponse::Usage ) -> void - def to_hash: -> OpenAI::Models::images_response - type usage = { input_tokens: Integer, - input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, + input_tokens_details: OpenAI::ImagesResponse::Usage::InputTokensDetails, output_tokens: Integer, total_tokens: Integer } @@ -41,7 +37,7 @@ module OpenAI class Usage < OpenAI::Internal::Type::BaseModel attr_accessor input_tokens: Integer - attr_accessor input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails + attr_accessor input_tokens_details: OpenAI::ImagesResponse::Usage::InputTokensDetails attr_accessor output_tokens: Integer @@ -49,13 +45,11 @@ module OpenAI def initialize: ( input_tokens: Integer, - input_tokens_details: OpenAI::Models::ImagesResponse::Usage::InputTokensDetails, + input_tokens_details: OpenAI::ImagesResponse::Usage::InputTokensDetails, output_tokens: Integer, total_tokens: Integer ) -> void - def to_hash: -> OpenAI::Models::ImagesResponse::usage - type input_tokens_details = { image_tokens: Integer, text_tokens: Integer } @@ -65,8 +59,6 @@ module OpenAI attr_accessor text_tokens: Integer def initialize: (image_tokens: Integer, text_tokens: Integer) -> void - - def to_hash: -> OpenAI::Models::ImagesResponse::Usage::input_tokens_details end end end diff --git a/sig/openai/models/model.rbs b/sig/openai/models/model.rbs index 88000ee1..79402173 100644 --- a/sig/openai/models/model.rbs +++ b/sig/openai/models/model.rbs @@ -18,8 +18,6 @@ module OpenAI owned_by: String, ?object: :model ) -> void - - def to_hash: -> OpenAI::Models::model end end end diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index ab28b6e7..16285da7 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -7,8 +7,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::model_delete_params end end end diff --git a/sig/openai/models/model_deleted.rbs b/sig/openai/models/model_deleted.rbs index f1c1b5f9..b95b2a67 100644 --- a/sig/openai/models/model_deleted.rbs +++ b/sig/openai/models/model_deleted.rbs @@ -10,8 +10,6 @@ module OpenAI attr_accessor object: String def initialize: (id: String, deleted: bool, object: String) -> void - - def to_hash: -> OpenAI::Models::model_deleted end end end diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index ffbe02ed..37d678e8 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -7,8 +7,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::model_list_params end end end diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index 8cf3afab..fed08da1 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::model_retrieve_params end end end diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index 2a38be49..7d543126 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -2,30 +2,28 @@ module OpenAI module Models type moderation = { - categories: OpenAI::Models::Moderation::Categories, - category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes, - category_scores: OpenAI::Models::Moderation::CategoryScores, + categories: OpenAI::Moderation::Categories, + category_applied_input_types: OpenAI::Moderation::CategoryAppliedInputTypes, + category_scores: OpenAI::Moderation::CategoryScores, flagged: bool } class Moderation < OpenAI::Internal::Type::BaseModel - attr_accessor categories: OpenAI::Models::Moderation::Categories + attr_accessor categories: OpenAI::Moderation::Categories - attr_accessor category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes + attr_accessor category_applied_input_types: OpenAI::Moderation::CategoryAppliedInputTypes - attr_accessor category_scores: OpenAI::Models::Moderation::CategoryScores + attr_accessor category_scores: OpenAI::Moderation::CategoryScores attr_accessor flagged: bool def initialize: ( - categories: OpenAI::Models::Moderation::Categories, - category_applied_input_types: OpenAI::Models::Moderation::CategoryAppliedInputTypes, - category_scores: OpenAI::Models::Moderation::CategoryScores, + categories: OpenAI::Moderation::Categories, + category_applied_input_types: OpenAI::Moderation::CategoryAppliedInputTypes, + category_scores: OpenAI::Moderation::CategoryScores, flagged: bool ) -> void - def to_hash: -> OpenAI::Models::moderation - type categories = { harassment: bool, @@ -85,72 +83,68 @@ module OpenAI violence: bool, violence_graphic: bool ) -> void - - def to_hash: -> OpenAI::Models::Moderation::categories end type category_applied_input_types = { - harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] } class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel - attr_accessor harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] + attr_accessor harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment] - attr_accessor harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] + attr_accessor harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening] - attr_accessor hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] + attr_accessor hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate] - attr_accessor hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] + attr_accessor hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening] - attr_accessor illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] + attr_accessor illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit] - attr_accessor illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] + attr_accessor illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent] - attr_accessor self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] + attr_accessor self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm] - attr_accessor self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + attr_accessor self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction] - attr_accessor self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] + attr_accessor self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent] - attr_accessor sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] + attr_accessor sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual] - attr_accessor sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] + attr_accessor sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor] - attr_accessor violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] + attr_accessor violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence] - attr_accessor violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + attr_accessor violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] def initialize: ( - harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] ) -> void - def to_hash: -> OpenAI::Models::Moderation::category_applied_input_types - type harassment = :text module Harassment @@ -158,7 +152,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment] end type harassment_threatening = :text @@ -168,7 +162,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening] end type hate = :text @@ -178,7 +172,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate] end type hate_threatening = :text @@ -188,7 +182,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening] end type illicit = :text @@ -198,7 +192,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit] end type illicit_violent = :text @@ -208,7 +202,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent] end type self_harm = :text | :image @@ -219,7 +213,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm] end type self_harm_instruction = :text | :image @@ -230,7 +224,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction] end type self_harm_intent = :text | :image @@ -241,7 +235,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent] end type sexual = :text | :image @@ -252,7 +246,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual] end type sexual_minor = :text @@ -262,7 +256,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor] end type violence = :text | :image @@ -273,7 +267,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence] end type violence_graphic = :text | :image @@ -284,7 +278,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] end end @@ -347,8 +341,6 @@ module OpenAI violence: Float, violence_graphic: Float ) -> void - - def to_hash: -> OpenAI::Models::Moderation::category_scores end end end diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index e6f8668e..f681a3bb 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -25,8 +25,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::moderation_create_params - type input = String | ::Array[String] @@ -35,7 +33,7 @@ module OpenAI module Input extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[String], ::Array[OpenAI::Models::moderation_multi_modal_input]] + def self?.variants: -> ::Array[OpenAI::Models::ModerationCreateParams::input] StringArray: OpenAI::Internal::Type::Converter @@ -47,7 +45,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::moderation_model] + def self?.variants: -> ::Array[OpenAI::Models::ModerationCreateParams::model] end end end diff --git a/sig/openai/models/moderation_create_response.rbs b/sig/openai/models/moderation_create_response.rbs index d6432898..616ad949 100644 --- a/sig/openai/models/moderation_create_response.rbs +++ b/sig/openai/models/moderation_create_response.rbs @@ -1,26 +1,20 @@ module OpenAI module Models type moderation_create_response = - { - id: String, - model: String, - results: ::Array[OpenAI::Models::Moderation] - } + { id: String, model: String, results: ::Array[OpenAI::Moderation] } class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel attr_accessor id: String attr_accessor model: String - attr_accessor results: ::Array[OpenAI::Models::Moderation] + attr_accessor results: ::Array[OpenAI::Moderation] def initialize: ( id: String, model: String, - results: ::Array[OpenAI::Models::Moderation] + results: ::Array[OpenAI::Moderation] ) -> void - - def to_hash: -> OpenAI::Models::moderation_create_response end end end diff --git a/sig/openai/models/moderation_image_url_input.rbs b/sig/openai/models/moderation_image_url_input.rbs index 4aa41a71..e099ab00 100644 --- a/sig/openai/models/moderation_image_url_input.rbs +++ b/sig/openai/models/moderation_image_url_input.rbs @@ -1,31 +1,24 @@ module OpenAI module Models type moderation_image_url_input = - { - image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, - type: :image_url - } + { image_url: OpenAI::ModerationImageURLInput::ImageURL, type: :image_url } class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel - attr_accessor image_url: OpenAI::Models::ModerationImageURLInput::ImageURL + attr_accessor image_url: OpenAI::ModerationImageURLInput::ImageURL attr_accessor type: :image_url def initialize: ( - image_url: OpenAI::Models::ModerationImageURLInput::ImageURL, + image_url: OpenAI::ModerationImageURLInput::ImageURL, ?type: :image_url ) -> void - def to_hash: -> OpenAI::Models::moderation_image_url_input - type image_url = { url: String } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String def initialize: (url: String) -> void - - def to_hash: -> OpenAI::Models::ModerationImageURLInput::image_url end end end diff --git a/sig/openai/models/moderation_multi_modal_input.rbs b/sig/openai/models/moderation_multi_modal_input.rbs index 61d672c1..ba782390 100644 --- a/sig/openai/models/moderation_multi_modal_input.rbs +++ b/sig/openai/models/moderation_multi_modal_input.rbs @@ -1,13 +1,12 @@ module OpenAI module Models type moderation_multi_modal_input = - OpenAI::Models::ModerationImageURLInput - | OpenAI::Models::ModerationTextInput + OpenAI::ModerationImageURLInput | OpenAI::ModerationTextInput module ModerationMultiModalInput extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput] + def self?.variants: -> ::Array[OpenAI::Models::moderation_multi_modal_input] end end end diff --git a/sig/openai/models/moderation_text_input.rbs b/sig/openai/models/moderation_text_input.rbs index c6d70b96..6f258e86 100644 --- a/sig/openai/models/moderation_text_input.rbs +++ b/sig/openai/models/moderation_text_input.rbs @@ -8,8 +8,6 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void - - def to_hash: -> OpenAI::Models::moderation_text_input end end end diff --git a/sig/openai/models/other_file_chunking_strategy_object.rbs b/sig/openai/models/other_file_chunking_strategy_object.rbs index 01290f70..90cc48b4 100644 --- a/sig/openai/models/other_file_chunking_strategy_object.rbs +++ b/sig/openai/models/other_file_chunking_strategy_object.rbs @@ -6,8 +6,6 @@ module OpenAI attr_accessor type: :other def initialize: (?type: :other) -> void - - def to_hash: -> OpenAI::Models::other_file_chunking_strategy_object end end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index 30c6ad68..9cfe03fc 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -3,25 +3,23 @@ module OpenAI type reasoning = { effort: OpenAI::Models::reasoning_effort?, - generate_summary: OpenAI::Models::Reasoning::generate_summary?, - summary: OpenAI::Models::Reasoning::summary? + generate_summary: OpenAI::Reasoning::generate_summary?, + summary: OpenAI::Reasoning::summary? } class Reasoning < OpenAI::Internal::Type::BaseModel attr_accessor effort: OpenAI::Models::reasoning_effort? - attr_accessor generate_summary: OpenAI::Models::Reasoning::generate_summary? + attr_accessor generate_summary: OpenAI::Reasoning::generate_summary? - attr_accessor summary: OpenAI::Models::Reasoning::summary? + attr_accessor summary: OpenAI::Reasoning::summary? def initialize: ( ?effort: OpenAI::Models::reasoning_effort?, - ?generate_summary: OpenAI::Models::Reasoning::generate_summary?, - ?summary: OpenAI::Models::Reasoning::summary? + ?generate_summary: OpenAI::Reasoning::generate_summary?, + ?summary: OpenAI::Reasoning::summary? ) -> void - def to_hash: -> OpenAI::Models::reasoning - type generate_summary = :auto | :concise | :detailed module GenerateSummary @@ -31,7 +29,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self?.values: -> ::Array[OpenAI::Models::Reasoning::generate_summary] + def self?.values: -> ::Array[OpenAI::Reasoning::generate_summary] end type summary = :auto | :concise | :detailed @@ -43,7 +41,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self?.values: -> ::Array[OpenAI::Models::Reasoning::summary] + def self?.values: -> ::Array[OpenAI::Reasoning::summary] end end end diff --git a/sig/openai/models/response_format_json_object.rbs b/sig/openai/models/response_format_json_object.rbs index 09c67374..b34ea82e 100644 --- a/sig/openai/models/response_format_json_object.rbs +++ b/sig/openai/models/response_format_json_object.rbs @@ -6,8 +6,6 @@ module OpenAI attr_accessor type: :json_object def initialize: (?type: :json_object) -> void - - def to_hash: -> OpenAI::Models::response_format_json_object end end end diff --git a/sig/openai/models/response_format_json_schema.rbs b/sig/openai/models/response_format_json_schema.rbs index 4d9f4d87..6f200e90 100644 --- a/sig/openai/models/response_format_json_schema.rbs +++ b/sig/openai/models/response_format_json_schema.rbs @@ -2,22 +2,20 @@ module OpenAI module Models type response_format_json_schema = { - json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema, type: :json_schema } class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel - attr_accessor json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema + attr_accessor json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema attr_accessor type: :json_schema def initialize: ( - json_schema: OpenAI::Models::ResponseFormatJSONSchema::JSONSchema, + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema, ?type: :json_schema ) -> void - def to_hash: -> OpenAI::Models::response_format_json_schema - type json_schema = { name: String, @@ -45,8 +43,6 @@ module OpenAI ?schema: ::Hash[Symbol, top], ?strict: bool? ) -> void - - def to_hash: -> OpenAI::Models::ResponseFormatJSONSchema::json_schema end end end diff --git a/sig/openai/models/response_format_text.rbs b/sig/openai/models/response_format_text.rbs index 7b892f07..b245c218 100644 --- a/sig/openai/models/response_format_text.rbs +++ b/sig/openai/models/response_format_text.rbs @@ -6,8 +6,6 @@ module OpenAI attr_accessor type: :text def initialize: (?type: :text) -> void - - def to_hash: -> OpenAI::Models::response_format_text end end end diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index f362244f..23544e1a 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -5,7 +5,7 @@ module OpenAI { display_height: Integer, display_width: Integer, - environment: OpenAI::Models::Responses::ComputerTool::environment, + environment: OpenAI::Responses::ComputerTool::environment, type: :computer_use_preview } @@ -14,19 +14,17 @@ module OpenAI attr_accessor display_width: Integer - attr_accessor environment: OpenAI::Models::Responses::ComputerTool::environment + attr_accessor environment: OpenAI::Responses::ComputerTool::environment attr_accessor type: :computer_use_preview def initialize: ( display_height: Integer, display_width: Integer, - environment: OpenAI::Models::Responses::ComputerTool::environment, + environment: OpenAI::Responses::ComputerTool::environment, ?type: :computer_use_preview ) -> void - def to_hash: -> OpenAI::Models::Responses::computer_tool - type environment = :windows | :mac | :linux | :ubuntu | :browser module Environment @@ -38,7 +36,7 @@ module OpenAI UBUNTU: :ubuntu BROWSER: :browser - def self?.values: -> ::Array[OpenAI::Models::Responses::ComputerTool::environment] + def self?.values: -> ::Array[OpenAI::Responses::ComputerTool::environment] end end end diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 1120ea2c..0b52f94f 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -3,30 +3,28 @@ module OpenAI module Responses type easy_input_message = { - content: OpenAI::Models::Responses::EasyInputMessage::content, - role: OpenAI::Models::Responses::EasyInputMessage::role, - type: OpenAI::Models::Responses::EasyInputMessage::type_ + content: OpenAI::Responses::EasyInputMessage::content, + role: OpenAI::Responses::EasyInputMessage::role, + type: OpenAI::Responses::EasyInputMessage::type_ } class EasyInputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Responses::EasyInputMessage::content + attr_accessor content: OpenAI::Responses::EasyInputMessage::content - attr_accessor role: OpenAI::Models::Responses::EasyInputMessage::role + attr_accessor role: OpenAI::Responses::EasyInputMessage::role - attr_reader type: OpenAI::Models::Responses::EasyInputMessage::type_? + attr_reader type: OpenAI::Responses::EasyInputMessage::type_? def type=: ( - OpenAI::Models::Responses::EasyInputMessage::type_ - ) -> OpenAI::Models::Responses::EasyInputMessage::type_ + OpenAI::Responses::EasyInputMessage::type_ + ) -> OpenAI::Responses::EasyInputMessage::type_ def initialize: ( - content: OpenAI::Models::Responses::EasyInputMessage::content, - role: OpenAI::Models::Responses::EasyInputMessage::role, - ?type: OpenAI::Models::Responses::EasyInputMessage::type_ + content: OpenAI::Responses::EasyInputMessage::content, + role: OpenAI::Responses::EasyInputMessage::role, + ?type: OpenAI::Responses::EasyInputMessage::type_ ) -> void - def to_hash: -> OpenAI::Models::Responses::easy_input_message - type content = String | OpenAI::Models::Responses::response_input_message_content_list @@ -34,7 +32,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::Responses::response_input_message_content_list] + def self?.variants: -> ::Array[OpenAI::Responses::EasyInputMessage::content] end type role = :user | :assistant | :system | :developer @@ -47,7 +45,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::role] + def self?.values: -> ::Array[OpenAI::Responses::EasyInputMessage::role] end type type_ = :message @@ -57,7 +55,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] + def self?.values: -> ::Array[OpenAI::Responses::EasyInputMessage::type_] end end end diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index 07747db5..da6a8189 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -5,9 +5,9 @@ module OpenAI { type: :file_search, vector_store_ids: ::Array[String], - filters: OpenAI::Models::Responses::FileSearchTool::filters?, + filters: OpenAI::Responses::FileSearchTool::filters?, max_num_results: Integer, - ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions + ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions } class FileSearchTool < OpenAI::Internal::Type::BaseModel @@ -15,61 +15,56 @@ module OpenAI attr_accessor vector_store_ids: ::Array[String] - attr_accessor filters: OpenAI::Models::Responses::FileSearchTool::filters? + attr_accessor filters: OpenAI::Responses::FileSearchTool::filters? attr_reader max_num_results: Integer? def max_num_results=: (Integer) -> Integer - attr_reader ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions? + attr_reader ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions? def ranking_options=: ( - OpenAI::Models::Responses::FileSearchTool::RankingOptions - ) -> OpenAI::Models::Responses::FileSearchTool::RankingOptions + OpenAI::Responses::FileSearchTool::RankingOptions + ) -> OpenAI::Responses::FileSearchTool::RankingOptions def initialize: ( vector_store_ids: ::Array[String], - ?filters: OpenAI::Models::Responses::FileSearchTool::filters?, + ?filters: OpenAI::Responses::FileSearchTool::filters?, ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::Responses::FileSearchTool::RankingOptions, + ?ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions, ?type: :file_search ) -> void - def to_hash: -> OpenAI::Models::Responses::file_search_tool - - type filters = - OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter + type filters = OpenAI::ComparisonFilter | OpenAI::CompoundFilter module Filters extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + def self?.variants: -> ::Array[OpenAI::Responses::FileSearchTool::filters] end type ranking_options = { - ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, + ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_reader ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker? + attr_reader ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker? def ranker=: ( - OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker - ) -> OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker + OpenAI::Responses::FileSearchTool::RankingOptions::ranker + ) -> OpenAI::Responses::FileSearchTool::RankingOptions::ranker attr_reader score_threshold: Float? def score_threshold=: (Float) -> Float def initialize: ( - ?ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, + ?ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker, ?score_threshold: Float ) -> void - def to_hash: -> OpenAI::Models::Responses::FileSearchTool::ranking_options - type ranker = :auto | :"default-2024-11-15" module Ranker @@ -78,7 +73,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self?.values: -> ::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Responses::FileSearchTool::RankingOptions::ranker] end end end diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index 3121cb1d..eb7df928 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -28,8 +28,6 @@ module OpenAI ?description: String?, ?type: :function ) -> void - - def to_hash: -> OpenAI::Models::Responses::function_tool end end end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 3e047f28..0897b4e9 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -48,8 +48,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Responses::input_item_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 2957a8d3..ea887796 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -5,8 +5,8 @@ module OpenAI { id: String, created_at: Float, - error: OpenAI::Models::Responses::ResponseError?, - incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails?, + error: OpenAI::Responses::ResponseError?, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, instructions: String?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, @@ -14,17 +14,17 @@ module OpenAI output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, - tool_choice: OpenAI::Models::Responses::Response::tool_choice, + tool_choice: OpenAI::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, max_output_tokens: Integer?, previous_response_id: String?, - reasoning: OpenAI::Models::Reasoning?, - service_tier: OpenAI::Models::Responses::Response::service_tier?, + reasoning: OpenAI::Reasoning?, + service_tier: OpenAI::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, - text: OpenAI::Models::Responses::ResponseTextConfig, - truncation: OpenAI::Models::Responses::Response::truncation?, - usage: OpenAI::Models::Responses::ResponseUsage, + text: OpenAI::Responses::ResponseTextConfig, + truncation: OpenAI::Responses::Response::truncation?, + usage: OpenAI::Responses::ResponseUsage, user: String } @@ -33,9 +33,9 @@ module OpenAI attr_accessor created_at: Float - attr_accessor error: OpenAI::Models::Responses::ResponseError? + attr_accessor error: OpenAI::Responses::ResponseError? - attr_accessor incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails? + attr_accessor incomplete_details: OpenAI::Responses::Response::IncompleteDetails? attr_accessor instructions: String? @@ -51,7 +51,7 @@ module OpenAI attr_accessor temperature: Float? - attr_accessor tool_choice: OpenAI::Models::Responses::Response::tool_choice + attr_accessor tool_choice: OpenAI::Responses::Response::tool_choice attr_accessor tools: ::Array[OpenAI::Models::Responses::tool] @@ -61,9 +61,9 @@ module OpenAI attr_accessor previous_response_id: String? - attr_accessor reasoning: OpenAI::Models::Reasoning? + attr_accessor reasoning: OpenAI::Reasoning? - attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier? + attr_accessor service_tier: OpenAI::Responses::Response::service_tier? attr_reader status: OpenAI::Models::Responses::response_status? @@ -71,19 +71,19 @@ module OpenAI OpenAI::Models::Responses::response_status ) -> OpenAI::Models::Responses::response_status - attr_reader text: OpenAI::Models::Responses::ResponseTextConfig? + attr_reader text: OpenAI::Responses::ResponseTextConfig? def text=: ( - OpenAI::Models::Responses::ResponseTextConfig - ) -> OpenAI::Models::Responses::ResponseTextConfig + OpenAI::Responses::ResponseTextConfig + ) -> OpenAI::Responses::ResponseTextConfig - attr_accessor truncation: OpenAI::Models::Responses::Response::truncation? + attr_accessor truncation: OpenAI::Responses::Response::truncation? - attr_reader usage: OpenAI::Models::Responses::ResponseUsage? + attr_reader usage: OpenAI::Responses::ResponseUsage? def usage=: ( - OpenAI::Models::Responses::ResponseUsage - ) -> OpenAI::Models::Responses::ResponseUsage + OpenAI::Responses::ResponseUsage + ) -> OpenAI::Responses::ResponseUsage attr_reader user: String? @@ -92,49 +92,43 @@ module OpenAI def initialize: ( id: String, created_at: Float, - error: OpenAI::Models::Responses::ResponseError?, - incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails?, + error: OpenAI::Responses::ResponseError?, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, instructions: String?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, - tool_choice: OpenAI::Models::Responses::Response::tool_choice, + tool_choice: OpenAI::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, ?max_output_tokens: Integer?, ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, - ?service_tier: OpenAI::Models::Responses::Response::service_tier?, + ?reasoning: OpenAI::Reasoning?, + ?service_tier: OpenAI::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, - ?text: OpenAI::Models::Responses::ResponseTextConfig, - ?truncation: OpenAI::Models::Responses::Response::truncation?, - ?usage: OpenAI::Models::Responses::ResponseUsage, + ?text: OpenAI::Responses::ResponseTextConfig, + ?truncation: OpenAI::Responses::Response::truncation?, + ?usage: OpenAI::Responses::ResponseUsage, ?user: String, ?object: :response ) -> void - def to_hash: -> OpenAI::Models::Responses::response - type incomplete_details = - { - reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason - } + { reason: OpenAI::Responses::Response::IncompleteDetails::reason } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_reader reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason? + attr_reader reason: OpenAI::Responses::Response::IncompleteDetails::reason? def reason=: ( - OpenAI::Models::Responses::Response::IncompleteDetails::reason - ) -> OpenAI::Models::Responses::Response::IncompleteDetails::reason + OpenAI::Responses::Response::IncompleteDetails::reason + ) -> OpenAI::Responses::Response::IncompleteDetails::reason def initialize: ( - ?reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + ?reason: OpenAI::Responses::Response::IncompleteDetails::reason ) -> void - def to_hash: -> OpenAI::Models::Responses::Response::incomplete_details - type reason = :max_output_tokens | :content_filter module Reason @@ -143,19 +137,19 @@ module OpenAI MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter - def self?.values: -> ::Array[OpenAI::Models::Responses::Response::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Responses::Response::IncompleteDetails::reason] end end type tool_choice = OpenAI::Models::Responses::tool_choice_options - | OpenAI::Models::Responses::ToolChoiceTypes - | OpenAI::Models::Responses::ToolChoiceFunction + | OpenAI::Responses::ToolChoiceTypes + | OpenAI::Responses::ToolChoiceFunction module ToolChoice extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + def self?.variants: -> ::Array[OpenAI::Responses::Response::tool_choice] end type service_tier = :auto | :default | :flex @@ -167,7 +161,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] + def self?.values: -> ::Array[OpenAI::Responses::Response::service_tier] end type truncation = :auto | :disabled @@ -178,7 +172,7 @@ module OpenAI AUTO: :auto DISABLED: :disabled - def self?.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] + def self?.values: -> ::Array[OpenAI::Responses::Response::truncation] end end end diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index 7a218696..370be606 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -10,8 +10,6 @@ module OpenAI attr_accessor type: :"response.audio.delta" def initialize: (delta: String, ?type: :"response.audio.delta") -> void - - def to_hash: -> OpenAI::Models::Responses::response_audio_delta_event end end end diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index cbff6d81..9399bfae 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -7,8 +7,6 @@ module OpenAI attr_accessor type: :"response.audio.done" def initialize: (?type: :"response.audio.done") -> void - - def to_hash: -> OpenAI::Models::Responses::response_audio_done_event end end end diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index 6f8fc8cb..ef7c93c3 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -13,8 +13,6 @@ module OpenAI delta: String, ?type: :"response.audio.transcript.delta" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_audio_transcript_delta_event end end end diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index 9d7072d8..7397feff 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -8,8 +8,6 @@ module OpenAI attr_accessor type: :"response.audio.transcript.done" def initialize: (?type: :"response.audio.transcript.done") -> void - - def to_hash: -> OpenAI::Models::Responses::response_audio_transcript_done_event end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index 23837b29..212d9bfa 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.code_interpreter_call.code.delta" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_code_delta_event end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index d1358071..72f8bc5d 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.code_interpreter_call.code.done" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_code_done_event end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index 840dabb1..d2d8e451 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -3,25 +3,23 @@ module OpenAI module Responses type response_code_interpreter_call_completed_event = { - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, type: :"response.code_interpreter_call.completed" } class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel - attr_accessor code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall + attr_accessor code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall attr_accessor output_index: Integer attr_accessor type: :"response.code_interpreter_call.completed" def initialize: ( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, ?type: :"response.code_interpreter_call.completed" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_completed_event end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index f19814d0..d4d09537 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -3,25 +3,23 @@ module OpenAI module Responses type response_code_interpreter_call_in_progress_event = { - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, type: :"response.code_interpreter_call.in_progress" } class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel - attr_accessor code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall + attr_accessor code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall attr_accessor output_index: Integer attr_accessor type: :"response.code_interpreter_call.in_progress" def initialize: ( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, ?type: :"response.code_interpreter_call.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_in_progress_event end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index fdaebc1e..8fd6cac0 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -3,25 +3,23 @@ module OpenAI module Responses type response_code_interpreter_call_interpreting_event = { - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, type: :"response.code_interpreter_call.interpreting" } class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel - attr_accessor code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall + attr_accessor code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall attr_accessor output_index: Integer attr_accessor type: :"response.code_interpreter_call.interpreting" def initialize: ( - code_interpreter_call: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, ?type: :"response.code_interpreter_call.interpreting" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_call_interpreting_event end end end diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index b6aa4db6..c892569f 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -5,8 +5,8 @@ module OpenAI { id: String, code: String, - results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, type: :code_interpreter_call } @@ -15,25 +15,23 @@ module OpenAI attr_accessor code: String - attr_accessor results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] + attr_accessor results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result] - attr_accessor status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status + attr_accessor status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status attr_accessor type: :code_interpreter_call def initialize: ( id: String, code: String, - results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, ?type: :code_interpreter_call ) -> void - def to_hash: -> OpenAI::Models::Responses::response_code_interpreter_tool_call - type result = - OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs - | OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs + | OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files module Result extend OpenAI::Internal::Type::Union @@ -46,28 +44,24 @@ module OpenAI attr_accessor type: :logs def initialize: (logs: String, ?type: :logs) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::logs end type files = { - files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], type: :files } class Files < OpenAI::Internal::Type::BaseModel - attr_accessor files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] + attr_accessor files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] attr_accessor type: :files def initialize: ( - files: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], ?type: :files ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::files - type file = { file_id: String, mime_type: String } class File < OpenAI::Internal::Type::BaseModel @@ -76,12 +70,10 @@ module OpenAI attr_accessor mime_type: String def initialize: (file_id: String, mime_type: String) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::file end end - def self?.variants: -> [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result] end type status = :in_progress | :interpreting | :completed @@ -93,7 +85,7 @@ module OpenAI INTERPRETING: :interpreting COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::status] end end end diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 5ef101ac..4900852e 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -2,22 +2,17 @@ module OpenAI module Models module Responses type response_completed_event = - { - response: OpenAI::Models::Responses::Response, - type: :"response.completed" - } + { response: OpenAI::Responses::Response, type: :"response.completed" } class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel - attr_accessor response: OpenAI::Models::Responses::Response + attr_accessor response: OpenAI::Responses::Response attr_accessor type: :"response.completed" def initialize: ( - response: OpenAI::Models::Responses::Response, + response: OpenAI::Responses::Response, ?type: :"response.completed" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_completed_event end end end diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index ab81a03e..52c82604 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -4,61 +4,59 @@ module OpenAI type response_computer_tool_call = { id: String, - action: OpenAI::Models::Responses::ResponseComputerToolCall::action, + action: OpenAI::Responses::ResponseComputerToolCall::action, call_id: String, - pending_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCall::status, - type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ + pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], + status: OpenAI::Responses::ResponseComputerToolCall::status, + type: OpenAI::Responses::ResponseComputerToolCall::type_ } class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor action: OpenAI::Models::Responses::ResponseComputerToolCall::action + attr_accessor action: OpenAI::Responses::ResponseComputerToolCall::action attr_accessor call_id: String - attr_accessor pending_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck] + attr_accessor pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] - attr_accessor status: OpenAI::Models::Responses::ResponseComputerToolCall::status + attr_accessor status: OpenAI::Responses::ResponseComputerToolCall::status - attr_accessor type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ + attr_accessor type: OpenAI::Responses::ResponseComputerToolCall::type_ def initialize: ( id: String, - action: OpenAI::Models::Responses::ResponseComputerToolCall::action, + action: OpenAI::Responses::ResponseComputerToolCall::action, call_id: String, - pending_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCall::status, - type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ + pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], + status: OpenAI::Responses::ResponseComputerToolCall::status, + type: OpenAI::Responses::ResponseComputerToolCall::type_ ) -> void - def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call - type action = - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type - | OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait + OpenAI::Responses::ResponseComputerToolCall::Action::Click + | OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick + | OpenAI::Responses::ResponseComputerToolCall::Action::Drag + | OpenAI::Responses::ResponseComputerToolCall::Action::Keypress + | OpenAI::Responses::ResponseComputerToolCall::Action::Move + | OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot + | OpenAI::Responses::ResponseComputerToolCall::Action::Scroll + | OpenAI::Responses::ResponseComputerToolCall::Action::Type + | OpenAI::Responses::ResponseComputerToolCall::Action::Wait module Action extend OpenAI::Internal::Type::Union type click = { - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, + button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button, type: :click, x: Integer, y_: Integer } class Click < OpenAI::Internal::Type::BaseModel - attr_accessor button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button + attr_accessor button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button attr_accessor type: :click @@ -67,14 +65,12 @@ module OpenAI attr_accessor y_: Integer def initialize: ( - button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, + button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button, x: Integer, y_: Integer, ?type: :click ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::click - type button = :left | :right | :wheel | :back | :forward module Button @@ -86,7 +82,7 @@ module OpenAI BACK: :back FORWARD: :forward - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button] + def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Click::button] end end @@ -104,28 +100,24 @@ module OpenAI y_: Integer, ?type: :double_click ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::double_click end type drag = { - path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], + path: ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path], type: :drag } class Drag < OpenAI::Internal::Type::BaseModel - attr_accessor path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path] + attr_accessor path: ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path] attr_accessor type: :drag def initialize: ( - path: ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path], + path: ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path], ?type: :drag ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::drag - type path = { x: Integer, y_: Integer } class Path < OpenAI::Internal::Type::BaseModel @@ -134,8 +126,6 @@ module OpenAI attr_accessor y_: Integer def initialize: (x: Integer, y_: Integer) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::path end end @@ -147,8 +137,6 @@ module OpenAI attr_accessor type: :keypress def initialize: (keys: ::Array[String], ?type: :keypress) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::keypress end type move = { type: :move, x: Integer, y_: Integer } @@ -161,8 +149,6 @@ module OpenAI attr_accessor y_: Integer def initialize: (x: Integer, y_: Integer, ?type: :move) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::move end type screenshot = { type: :screenshot } @@ -171,8 +157,6 @@ module OpenAI attr_accessor type: :screenshot def initialize: (?type: :screenshot) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::screenshot end type scroll = @@ -202,8 +186,6 @@ module OpenAI y_: Integer, ?type: :scroll ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::scroll end type type_ = { text: String, type: :type } @@ -214,8 +196,6 @@ module OpenAI attr_accessor type: :type def initialize: (text: String, ?type: :type) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::type_ end type wait = { type: :wait } @@ -224,11 +204,9 @@ module OpenAI attr_accessor type: :wait def initialize: (?type: :wait) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::Action::wait end - def self?.variants: -> [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::action] end type pending_safety_check = @@ -242,8 +220,6 @@ module OpenAI attr_accessor message: String def initialize: (id: String, code: String, message: String) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCall::pending_safety_check end type status = :in_progress | :completed | :incomplete @@ -255,7 +231,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::status] end type type_ = :computer_call @@ -265,7 +241,7 @@ module OpenAI COMPUTER_CALL: :computer_call - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] + def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::type_] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 32edd0b4..4ce6f3a0 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -5,10 +5,10 @@ module OpenAI { id: String, call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, type: :computer_call_output, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status } class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel @@ -16,33 +16,31 @@ module OpenAI attr_accessor call_id: String - attr_accessor output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot + attr_accessor output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot attr_accessor type: :computer_call_output - attr_reader acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]? + attr_reader acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]? def acknowledged_safety_checks=: ( - ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] - ) -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] + ) -> ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] - attr_reader status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status? + attr_reader status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status? def status=: ( - OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status - ) -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + OpenAI::Responses::ResponseComputerToolCallOutputItem::status + ) -> OpenAI::Responses::ResponseComputerToolCallOutputItem::status def initialize: ( id: String, call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - ?status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + ?acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + ?status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status, ?type: :computer_call_output ) -> void - def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call_output_item - type acknowledged_safety_check = { id: String, code: String, message: String } @@ -54,8 +52,6 @@ module OpenAI attr_accessor message: String def initialize: (id: String, code: String, message: String) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::acknowledged_safety_check end type status = :in_progress | :completed | :incomplete @@ -67,7 +63,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs index eae3e223..5cea0d21 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -20,8 +20,6 @@ module OpenAI ?image_url: String, ?type: :computer_screenshot ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_computer_tool_call_output_screenshot end end end diff --git a/sig/openai/models/responses/response_content.rbs b/sig/openai/models/responses/response_content.rbs index 4e48827c..deb9917d 100644 --- a/sig/openai/models/responses/response_content.rbs +++ b/sig/openai/models/responses/response_content.rbs @@ -2,16 +2,16 @@ module OpenAI module Models module Responses type response_content = - OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::Responses::ResponseInputImage - | OpenAI::Models::Responses::ResponseInputFile - | OpenAI::Models::Responses::ResponseOutputText - | OpenAI::Models::Responses::ResponseOutputRefusal + OpenAI::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputImage + | OpenAI::Responses::ResponseInputFile + | OpenAI::Responses::ResponseOutputText + | OpenAI::Responses::ResponseOutputRefusal module ResponseContent extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_content] end end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index e4c3e4bf..7f1ecbc3 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -6,7 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + part: OpenAI::Responses::ResponseContentPartAddedEvent::part, type: :"response.content_part.added" } @@ -17,7 +17,7 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part + attr_accessor part: OpenAI::Responses::ResponseContentPartAddedEvent::part attr_accessor type: :"response.content_part.added" @@ -25,20 +25,18 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + part: OpenAI::Responses::ResponseContentPartAddedEvent::part, ?type: :"response.content_part.added" ) -> void - def to_hash: -> OpenAI::Models::Responses::response_content_part_added_event - type part = - OpenAI::Models::Responses::ResponseOutputText - | OpenAI::Models::Responses::ResponseOutputRefusal + OpenAI::Responses::ResponseOutputText + | OpenAI::Responses::ResponseOutputRefusal module Part extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseContentPartAddedEvent::part] end end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index ff72b2b3..2cfd7195 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -6,7 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + part: OpenAI::Responses::ResponseContentPartDoneEvent::part, type: :"response.content_part.done" } @@ -17,7 +17,7 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part + attr_accessor part: OpenAI::Responses::ResponseContentPartDoneEvent::part attr_accessor type: :"response.content_part.done" @@ -25,20 +25,18 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + part: OpenAI::Responses::ResponseContentPartDoneEvent::part, ?type: :"response.content_part.done" ) -> void - def to_hash: -> OpenAI::Models::Responses::response_content_part_done_event - type part = - OpenAI::Models::Responses::ResponseOutputText - | OpenAI::Models::Responses::ResponseOutputRefusal + OpenAI::Responses::ResponseOutputText + | OpenAI::Responses::ResponseOutputRefusal module Part extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseContentPartDoneEvent::part] end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 1e84251f..cf306e43 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -11,11 +11,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, parallel_tool_calls: bool?, previous_response_id: String?, - reasoning: OpenAI::Models::Reasoning?, + reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, temperature: Float?, - text: OpenAI::Models::Responses::ResponseTextConfig, + text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, @@ -44,7 +44,7 @@ module OpenAI attr_accessor previous_response_id: String? - attr_accessor reasoning: OpenAI::Models::Reasoning? + attr_accessor reasoning: OpenAI::Reasoning? attr_accessor service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier? @@ -52,11 +52,11 @@ module OpenAI attr_accessor temperature: Float? - attr_reader text: OpenAI::Models::Responses::ResponseTextConfig? + attr_reader text: OpenAI::Responses::ResponseTextConfig? def text=: ( - OpenAI::Models::Responses::ResponseTextConfig - ) -> OpenAI::Models::Responses::ResponseTextConfig + OpenAI::Responses::ResponseTextConfig + ) -> OpenAI::Responses::ResponseTextConfig attr_reader tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice? @@ -87,11 +87,11 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?parallel_tool_calls: bool?, ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, + ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, - ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float?, @@ -100,14 +100,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::Responses::response_create_params - type input = String | OpenAI::Models::Responses::response_input module Input extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, OpenAI::Models::Responses::response_input] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::input] end type service_tier = :auto | :default | :flex @@ -124,13 +122,13 @@ module OpenAI type tool_choice = OpenAI::Models::Responses::tool_choice_options - | OpenAI::Models::Responses::ToolChoiceTypes - | OpenAI::Models::Responses::ToolChoiceFunction + | OpenAI::Responses::ToolChoiceTypes + | OpenAI::Responses::ToolChoiceFunction module ToolChoice extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::tool_choice_options, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::tool_choice] end type truncation = :auto | :disabled diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index 6848ecf7..e93fe5a7 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -2,22 +2,17 @@ module OpenAI module Models module Responses type response_created_event = - { - response: OpenAI::Models::Responses::Response, - type: :"response.created" - } + { response: OpenAI::Responses::Response, type: :"response.created" } class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel - attr_accessor response: OpenAI::Models::Responses::Response + attr_accessor response: OpenAI::Responses::Response attr_accessor type: :"response.created" def initialize: ( - response: OpenAI::Models::Responses::Response, + response: OpenAI::Responses::Response, ?type: :"response.created" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_created_event end end end diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index 1cedd65a..fceca07d 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -9,8 +9,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::Responses::response_delete_params end end end diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index f9721f01..ee1b5f7a 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -2,23 +2,18 @@ module OpenAI module Models module Responses type response_error = - { - code: OpenAI::Models::Responses::ResponseError::code, - message: String - } + { code: OpenAI::Responses::ResponseError::code, message: String } class ResponseError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Models::Responses::ResponseError::code + attr_accessor code: OpenAI::Responses::ResponseError::code attr_accessor message: String def initialize: ( - code: OpenAI::Models::Responses::ResponseError::code, + code: OpenAI::Responses::ResponseError::code, message: String ) -> void - def to_hash: -> OpenAI::Models::Responses::response_error - type code = :server_error | :rate_limit_exceeded @@ -61,7 +56,7 @@ module OpenAI FAILED_TO_DOWNLOAD_IMAGE: :failed_to_download_image IMAGE_FILE_NOT_FOUND: :image_file_not_found - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseError::code] + def self?.values: -> ::Array[OpenAI::Responses::ResponseError::code] end end end diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index b64588ae..ac79ae54 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -19,8 +19,6 @@ module OpenAI param: String?, ?type: :error ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_error_event end end end diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index f6f26208..fe0562a3 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -2,22 +2,17 @@ module OpenAI module Models module Responses type response_failed_event = - { - response: OpenAI::Models::Responses::Response, - type: :"response.failed" - } + { response: OpenAI::Responses::Response, type: :"response.failed" } class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel - attr_accessor response: OpenAI::Models::Responses::Response + attr_accessor response: OpenAI::Responses::Response attr_accessor type: :"response.failed" def initialize: ( - response: OpenAI::Models::Responses::Response, + response: OpenAI::Responses::Response, ?type: :"response.failed" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_failed_event end end end diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index 8249564b..dc0013fc 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.file_search_call.completed" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_file_search_call_completed_event end end end diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index 5008567b..36f69c57 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.file_search_call.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_file_search_call_in_progress_event end end end diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index ba475249..f1994439 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.file_search_call.searching" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_file_search_call_searching_event end end end diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index 6e6bfb5c..d97daaa2 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -5,9 +5,9 @@ module OpenAI { id: String, queries: ::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, + status: OpenAI::Responses::ResponseFileSearchToolCall::status, type: :file_search_call, - results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]? + results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]? } class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel @@ -15,22 +15,20 @@ module OpenAI attr_accessor queries: ::Array[String] - attr_accessor status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status + attr_accessor status: OpenAI::Responses::ResponseFileSearchToolCall::status attr_accessor type: :file_search_call - attr_accessor results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]? + attr_accessor results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]? def initialize: ( id: String, queries: ::Array[String], - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, - ?results: ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]?, + status: OpenAI::Responses::ResponseFileSearchToolCall::status, + ?results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]?, ?type: :file_search_call ) -> void - def to_hash: -> OpenAI::Models::Responses::response_file_search_tool_call - type status = :in_progress | :searching | :completed | :incomplete | :failed @@ -43,12 +41,12 @@ module OpenAI INCOMPLETE: :incomplete FAILED: :failed - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseFileSearchToolCall::status] end type result = { - attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]?, file_id: String, filename: String, score: Float, @@ -56,7 +54,7 @@ module OpenAI } class Result < OpenAI::Internal::Type::BaseModel - attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]? attr_reader file_id: String? @@ -75,21 +73,19 @@ module OpenAI def text=: (String) -> String def initialize: ( - ?attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]?, ?file_id: String, ?filename: String, ?score: Float, ?text: String ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseFileSearchToolCall::result - type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute] end end end diff --git a/sig/openai/models/responses/response_format_text_config.rbs b/sig/openai/models/responses/response_format_text_config.rbs index 11e70474..c085d693 100644 --- a/sig/openai/models/responses/response_format_text_config.rbs +++ b/sig/openai/models/responses/response_format_text_config.rbs @@ -2,14 +2,14 @@ module OpenAI module Models module Responses type response_format_text_config = - OpenAI::Models::ResponseFormatText - | OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig - | OpenAI::Models::ResponseFormatJSONObject + OpenAI::ResponseFormatText + | OpenAI::Responses::ResponseFormatTextJSONSchemaConfig + | OpenAI::ResponseFormatJSONObject module ResponseFormatTextConfig extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_format_text_config] end end end diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index f6a08df7..319fd52e 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -30,8 +30,6 @@ module OpenAI ?strict: bool?, ?type: :json_schema ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_format_text_json_schema_config end end end diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index 19067caf..e4486520 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -24,8 +24,6 @@ module OpenAI output_index: Integer, ?type: :"response.function_call_arguments.delta" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_function_call_arguments_delta_event end end end diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index 9a6c9073..3e023f5c 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -24,8 +24,6 @@ module OpenAI output_index: Integer, ?type: :"response.function_call_arguments.done" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_function_call_arguments_done_event end end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index ec454c7d..47884c8a 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -8,7 +8,7 @@ module OpenAI name: String, type: :function_call, id: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCall::status + status: OpenAI::Responses::ResponseFunctionToolCall::status } class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel @@ -24,23 +24,21 @@ module OpenAI def id=: (String) -> String - attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCall::status? + attr_reader status: OpenAI::Responses::ResponseFunctionToolCall::status? def status=: ( - OpenAI::Models::Responses::ResponseFunctionToolCall::status - ) -> OpenAI::Models::Responses::ResponseFunctionToolCall::status + OpenAI::Responses::ResponseFunctionToolCall::status + ) -> OpenAI::Responses::ResponseFunctionToolCall::status def initialize: ( arguments: String, call_id: String, name: String, ?id: String, - ?status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, + ?status: OpenAI::Responses::ResponseFunctionToolCall::status, ?type: :function_call ) -> void - def to_hash: -> OpenAI::Models::Responses::response_function_tool_call - type status = :in_progress | :completed | :incomplete module Status @@ -50,7 +48,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionToolCall::status] end end end diff --git a/sig/openai/models/responses/response_function_tool_call_item.rbs b/sig/openai/models/responses/response_function_tool_call_item.rbs index 86727e9b..81206561 100644 --- a/sig/openai/models/responses/response_function_tool_call_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_item.rbs @@ -9,8 +9,6 @@ module OpenAI def id=: (String _) -> String def initialize: (id: String) -> void - - def to_hash: -> OpenAI::Models::Responses::response_function_tool_call_item end end end diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 7fd2c526..048189b6 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -7,7 +7,7 @@ module OpenAI call_id: String, output: String, type: :function_call_output, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status } class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel @@ -19,22 +19,20 @@ module OpenAI attr_accessor type: :function_call_output - attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status? + attr_reader status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status? def status=: ( - OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status - ) -> OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + OpenAI::Responses::ResponseFunctionToolCallOutputItem::status + ) -> OpenAI::Responses::ResponseFunctionToolCallOutputItem::status def initialize: ( id: String, call_id: String, output: String, - ?status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, + ?status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status, ?type: :function_call_output ) -> void - def to_hash: -> OpenAI::Models::Responses::response_function_tool_call_output_item - type status = :in_progress | :completed | :incomplete module Status @@ -44,7 +42,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 3a2241fe..5efa6740 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -4,25 +4,23 @@ module OpenAI type response_function_web_search = { id: String, - status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, + status: OpenAI::Responses::ResponseFunctionWebSearch::status, type: :web_search_call } class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status + attr_accessor status: OpenAI::Responses::ResponseFunctionWebSearch::status attr_accessor type: :web_search_call def initialize: ( id: String, - status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, + status: OpenAI::Responses::ResponseFunctionWebSearch::status, ?type: :web_search_call ) -> void - def to_hash: -> OpenAI::Models::Responses::response_function_web_search - type status = :in_progress | :searching | :completed | :failed module Status @@ -33,7 +31,7 @@ module OpenAI COMPLETED: :completed FAILED: :failed - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionWebSearch::status] end end end diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index fb29e454..4d9f9e2a 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -2,22 +2,17 @@ module OpenAI module Models module Responses type response_in_progress_event = - { - response: OpenAI::Models::Responses::Response, - type: :"response.in_progress" - } + { response: OpenAI::Responses::Response, type: :"response.in_progress" } class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel - attr_accessor response: OpenAI::Models::Responses::Response + attr_accessor response: OpenAI::Responses::Response attr_accessor type: :"response.in_progress" def initialize: ( - response: OpenAI::Models::Responses::Response, + response: OpenAI::Responses::Response, ?type: :"response.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_in_progress_event end end end diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index 8e4251ac..5e527b2f 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -2,22 +2,17 @@ module OpenAI module Models module Responses type response_incomplete_event = - { - response: OpenAI::Models::Responses::Response, - type: :"response.incomplete" - } + { response: OpenAI::Responses::Response, type: :"response.incomplete" } class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel - attr_accessor response: OpenAI::Models::Responses::Response + attr_accessor response: OpenAI::Responses::Response attr_accessor type: :"response.incomplete" def initialize: ( - response: OpenAI::Models::Responses::Response, + response: OpenAI::Responses::Response, ?type: :"response.incomplete" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_incomplete_event end end end diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index 605fd682..542c1371 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -4,25 +4,23 @@ module OpenAI type response_input_audio = { data: String, - format_: OpenAI::Models::Responses::ResponseInputAudio::format_, + format_: OpenAI::Responses::ResponseInputAudio::format_, type: :input_audio } class ResponseInputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String - attr_accessor format_: OpenAI::Models::Responses::ResponseInputAudio::format_ + attr_accessor format_: OpenAI::Responses::ResponseInputAudio::format_ attr_accessor type: :input_audio def initialize: ( data: String, - format_: OpenAI::Models::Responses::ResponseInputAudio::format_, + format_: OpenAI::Responses::ResponseInputAudio::format_, ?type: :input_audio ) -> void - def to_hash: -> OpenAI::Models::Responses::response_input_audio - type format_ = :mp3 | :wav module Format @@ -31,7 +29,7 @@ module OpenAI MP3: :mp3 WAV: :wav - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputAudio::format_] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputAudio::format_] end end end diff --git a/sig/openai/models/responses/response_input_content.rbs b/sig/openai/models/responses/response_input_content.rbs index 04d4f7b9..81fa4a14 100644 --- a/sig/openai/models/responses/response_input_content.rbs +++ b/sig/openai/models/responses/response_input_content.rbs @@ -2,14 +2,14 @@ module OpenAI module Models module Responses type response_input_content = - OpenAI::Models::Responses::ResponseInputText - | OpenAI::Models::Responses::ResponseInputImage - | OpenAI::Models::Responses::ResponseInputFile + OpenAI::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputImage + | OpenAI::Responses::ResponseInputFile module ResponseInputContent extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_input_content] end end end diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index 20c40b3a..c5060c70 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -28,8 +28,6 @@ module OpenAI ?filename: String, ?type: :input_file ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_input_file end end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index f9a423cc..ebc718c8 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -3,14 +3,14 @@ module OpenAI module Responses type response_input_image = { - detail: OpenAI::Models::Responses::ResponseInputImage::detail, + detail: OpenAI::Responses::ResponseInputImage::detail, type: :input_image, file_id: String?, image_url: String? } class ResponseInputImage < OpenAI::Internal::Type::BaseModel - attr_accessor detail: OpenAI::Models::Responses::ResponseInputImage::detail + attr_accessor detail: OpenAI::Responses::ResponseInputImage::detail attr_accessor type: :input_image @@ -19,14 +19,12 @@ module OpenAI attr_accessor image_url: String? def initialize: ( - detail: OpenAI::Models::Responses::ResponseInputImage::detail, + detail: OpenAI::Responses::ResponseInputImage::detail, ?file_id: String?, ?image_url: String?, ?type: :input_image ) -> void - def to_hash: -> OpenAI::Models::Responses::response_input_image - type detail = :low | :high | :auto module Detail @@ -36,7 +34,7 @@ module OpenAI HIGH: :high AUTO: :auto - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputImage::detail] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputImage::detail] end end end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index f5de100d..f3cb1160 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -2,17 +2,17 @@ module OpenAI module Models module Responses type response_input_item = - OpenAI::Models::Responses::EasyInputMessage - | OpenAI::Models::Responses::ResponseInputItem::Message - | OpenAI::Models::Responses::ResponseOutputMessage - | OpenAI::Models::Responses::ResponseFileSearchToolCall - | OpenAI::Models::Responses::ResponseComputerToolCall - | OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput - | OpenAI::Models::Responses::ResponseFunctionWebSearch - | OpenAI::Models::Responses::ResponseFunctionToolCall - | OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput - | OpenAI::Models::Responses::ResponseReasoningItem - | OpenAI::Models::Responses::ResponseInputItem::ItemReference + OpenAI::Responses::EasyInputMessage + | OpenAI::Responses::ResponseInputItem::Message + | OpenAI::Responses::ResponseOutputMessage + | OpenAI::Responses::ResponseFileSearchToolCall + | OpenAI::Responses::ResponseComputerToolCall + | OpenAI::Responses::ResponseInputItem::ComputerCallOutput + | OpenAI::Responses::ResponseFunctionWebSearch + | OpenAI::Responses::ResponseFunctionToolCall + | OpenAI::Responses::ResponseInputItem::FunctionCallOutput + | OpenAI::Responses::ResponseReasoningItem + | OpenAI::Responses::ResponseInputItem::ItemReference module ResponseInputItem extend OpenAI::Internal::Type::Union @@ -20,37 +20,35 @@ module OpenAI type message = { content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseInputItem::Message::role, - status: OpenAI::Models::Responses::ResponseInputItem::Message::status, - type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ + role: OpenAI::Responses::ResponseInputItem::Message::role, + status: OpenAI::Responses::ResponseInputItem::Message::status, + type: OpenAI::Responses::ResponseInputItem::Message::type_ } class Message < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - attr_accessor role: OpenAI::Models::Responses::ResponseInputItem::Message::role + attr_accessor role: OpenAI::Responses::ResponseInputItem::Message::role - attr_reader status: OpenAI::Models::Responses::ResponseInputItem::Message::status? + attr_reader status: OpenAI::Responses::ResponseInputItem::Message::status? def status=: ( - OpenAI::Models::Responses::ResponseInputItem::Message::status - ) -> OpenAI::Models::Responses::ResponseInputItem::Message::status + OpenAI::Responses::ResponseInputItem::Message::status + ) -> OpenAI::Responses::ResponseInputItem::Message::status - attr_reader type: OpenAI::Models::Responses::ResponseInputItem::Message::type_? + attr_reader type: OpenAI::Responses::ResponseInputItem::Message::type_? def type=: ( - OpenAI::Models::Responses::ResponseInputItem::Message::type_ - ) -> OpenAI::Models::Responses::ResponseInputItem::Message::type_ + OpenAI::Responses::ResponseInputItem::Message::type_ + ) -> OpenAI::Responses::ResponseInputItem::Message::type_ def initialize: ( content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseInputItem::Message::role, - ?status: OpenAI::Models::Responses::ResponseInputItem::Message::status, - ?type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ + role: OpenAI::Responses::ResponseInputItem::Message::role, + ?status: OpenAI::Responses::ResponseInputItem::Message::status, + ?type: OpenAI::Responses::ResponseInputItem::Message::type_ ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::message - type role = :user | :system | :developer module Role @@ -60,7 +58,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::role] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::role] end type status = :in_progress | :completed | :incomplete @@ -72,7 +70,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::status] end type type_ = :message @@ -82,44 +80,42 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::type_] end end type computer_call_output = { call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, type: :computer_call_output, id: String?, - acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, - status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? + acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, + status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status? } class ComputerCallOutput < OpenAI::Internal::Type::BaseModel attr_accessor call_id: String - attr_accessor output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot + attr_accessor output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot attr_accessor type: :computer_call_output attr_accessor id: String? - attr_accessor acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]? + attr_accessor acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]? - attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? + attr_accessor status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status? def initialize: ( call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, ?id: String?, - ?acknowledged_safety_checks: ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, - ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status?, + ?acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, + ?status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status?, ?type: :computer_call_output ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::computer_call_output - type acknowledged_safety_check = { id: String, code: String?, message: String? } @@ -135,8 +131,6 @@ module OpenAI ?code: String?, ?message: String? ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::acknowledged_safety_check end type status = :in_progress | :completed | :incomplete @@ -148,7 +142,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status] end end @@ -158,7 +152,7 @@ module OpenAI output: String, type: :function_call_output, id: String?, - status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? + status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status? } class FunctionCallOutput < OpenAI::Internal::Type::BaseModel @@ -170,18 +164,16 @@ module OpenAI attr_accessor id: String? - attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? + attr_accessor status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status? def initialize: ( call_id: String, output: String, ?id: String?, - ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status?, + ?status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status?, ?type: :function_call_output ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::function_call_output - type status = :in_progress | :completed | :incomplete module Status @@ -191,28 +183,26 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status] end end type item_reference = { id: String, - type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? } class ItemReference < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + attr_accessor type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? def initialize: ( id: String, - ?type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + ?type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? ) -> void - def to_hash: -> OpenAI::Models::Responses::ResponseInputItem::item_reference - type type_ = :item_reference module Type @@ -220,11 +210,11 @@ module OpenAI ITEM_REFERENCE: :item_reference - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ItemReference::type_] end end - def self?.variants: -> [OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_input_item] end end end diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index 96c1aa3d..f9cfd8b8 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -5,9 +5,9 @@ module OpenAI { id: String, content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseInputMessageItem::role, - status: OpenAI::Models::Responses::ResponseInputMessageItem::status, - type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + role: OpenAI::Responses::ResponseInputMessageItem::role, + status: OpenAI::Responses::ResponseInputMessageItem::status, + type: OpenAI::Responses::ResponseInputMessageItem::type_ } class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel @@ -15,30 +15,28 @@ module OpenAI attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - attr_accessor role: OpenAI::Models::Responses::ResponseInputMessageItem::role + attr_accessor role: OpenAI::Responses::ResponseInputMessageItem::role - attr_reader status: OpenAI::Models::Responses::ResponseInputMessageItem::status? + attr_reader status: OpenAI::Responses::ResponseInputMessageItem::status? def status=: ( - OpenAI::Models::Responses::ResponseInputMessageItem::status - ) -> OpenAI::Models::Responses::ResponseInputMessageItem::status + OpenAI::Responses::ResponseInputMessageItem::status + ) -> OpenAI::Responses::ResponseInputMessageItem::status - attr_reader type: OpenAI::Models::Responses::ResponseInputMessageItem::type_? + attr_reader type: OpenAI::Responses::ResponseInputMessageItem::type_? def type=: ( - OpenAI::Models::Responses::ResponseInputMessageItem::type_ - ) -> OpenAI::Models::Responses::ResponseInputMessageItem::type_ + OpenAI::Responses::ResponseInputMessageItem::type_ + ) -> OpenAI::Responses::ResponseInputMessageItem::type_ def initialize: ( id: String, content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Models::Responses::ResponseInputMessageItem::role, - ?status: OpenAI::Models::Responses::ResponseInputMessageItem::status, - ?type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + role: OpenAI::Responses::ResponseInputMessageItem::role, + ?status: OpenAI::Responses::ResponseInputMessageItem::status, + ?type: OpenAI::Responses::ResponseInputMessageItem::type_ ) -> void - def to_hash: -> OpenAI::Models::Responses::response_input_message_item - type role = :user | :system | :developer module Role @@ -48,7 +46,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::role] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::role] end type status = :in_progress | :completed | :incomplete @@ -60,7 +58,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::status] end type type_ = :message @@ -70,7 +68,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::type_] end end end diff --git a/sig/openai/models/responses/response_input_text.rbs b/sig/openai/models/responses/response_input_text.rbs index bb0b03f9..e60a488e 100644 --- a/sig/openai/models/responses/response_input_text.rbs +++ b/sig/openai/models/responses/response_input_text.rbs @@ -9,8 +9,6 @@ module OpenAI attr_accessor type: :input_text def initialize: (text: String, ?type: :input_text) -> void - - def to_hash: -> OpenAI::Models::Responses::response_input_text end end end diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index a84d2504..ab5eaa4c 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -2,19 +2,19 @@ module OpenAI module Models module Responses type response_item = - OpenAI::Models::Responses::ResponseInputMessageItem - | OpenAI::Models::Responses::ResponseOutputMessage - | OpenAI::Models::Responses::ResponseFileSearchToolCall - | OpenAI::Models::Responses::ResponseComputerToolCall - | OpenAI::Models::Responses::ResponseComputerToolCallOutputItem - | OpenAI::Models::Responses::ResponseFunctionWebSearch - | OpenAI::Models::Responses::ResponseFunctionToolCallItem - | OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + OpenAI::Responses::ResponseInputMessageItem + | OpenAI::Responses::ResponseOutputMessage + | OpenAI::Responses::ResponseFileSearchToolCall + | OpenAI::Responses::ResponseComputerToolCall + | OpenAI::Responses::ResponseComputerToolCallOutputItem + | OpenAI::Responses::ResponseFunctionWebSearch + | OpenAI::Responses::ResponseFunctionToolCallItem + | OpenAI::Responses::ResponseFunctionToolCallOutputItem module ResponseItem extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_item] end end end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index 196112a5..e7390def 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -30,8 +30,6 @@ module OpenAI last_id: String, ?object: :list ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_item_list end end end diff --git a/sig/openai/models/responses/response_output_audio.rbs b/sig/openai/models/responses/response_output_audio.rbs index 23c68656..a2c7f16e 100644 --- a/sig/openai/models/responses/response_output_audio.rbs +++ b/sig/openai/models/responses/response_output_audio.rbs @@ -16,8 +16,6 @@ module OpenAI transcript: String, ?type: :output_audio ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_output_audio end end end diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index f3a5b7b5..cee5b2c7 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -2,17 +2,17 @@ module OpenAI module Models module Responses type response_output_item = - OpenAI::Models::Responses::ResponseOutputMessage - | OpenAI::Models::Responses::ResponseFileSearchToolCall - | OpenAI::Models::Responses::ResponseFunctionToolCall - | OpenAI::Models::Responses::ResponseFunctionWebSearch - | OpenAI::Models::Responses::ResponseComputerToolCall - | OpenAI::Models::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseOutputMessage + | OpenAI::Responses::ResponseFileSearchToolCall + | OpenAI::Responses::ResponseFunctionToolCall + | OpenAI::Responses::ResponseFunctionWebSearch + | OpenAI::Responses::ResponseComputerToolCall + | OpenAI::Responses::ResponseReasoningItem module ResponseOutputItem extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_output_item] end end end diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index bdbcfb2e..03ac25c8 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.output_item.added" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_output_item_added_event end end end diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index 076a3f09..506bcabd 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.output_item.done" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_output_item_done_event end end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index 8662cc68..47827460 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -4,41 +4,39 @@ module OpenAI type response_output_message = { id: String, - content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], + content: ::Array[OpenAI::Responses::ResponseOutputMessage::content], role: :assistant, - status: OpenAI::Models::Responses::ResponseOutputMessage::status, + status: OpenAI::Responses::ResponseOutputMessage::status, type: :message } class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content] + attr_accessor content: ::Array[OpenAI::Responses::ResponseOutputMessage::content] attr_accessor role: :assistant - attr_accessor status: OpenAI::Models::Responses::ResponseOutputMessage::status + attr_accessor status: OpenAI::Responses::ResponseOutputMessage::status attr_accessor type: :message def initialize: ( id: String, - content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], - status: OpenAI::Models::Responses::ResponseOutputMessage::status, + content: ::Array[OpenAI::Responses::ResponseOutputMessage::content], + status: OpenAI::Responses::ResponseOutputMessage::status, ?role: :assistant, ?type: :message ) -> void - def to_hash: -> OpenAI::Models::Responses::response_output_message - type content = - OpenAI::Models::Responses::ResponseOutputText - | OpenAI::Models::Responses::ResponseOutputRefusal + OpenAI::Responses::ResponseOutputText + | OpenAI::Responses::ResponseOutputRefusal module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseOutputMessage::content] end type status = :in_progress | :completed | :incomplete @@ -50,7 +48,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputMessage::status] end end end diff --git a/sig/openai/models/responses/response_output_refusal.rbs b/sig/openai/models/responses/response_output_refusal.rbs index 69e08eed..14de45aa 100644 --- a/sig/openai/models/responses/response_output_refusal.rbs +++ b/sig/openai/models/responses/response_output_refusal.rbs @@ -9,8 +9,6 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void - - def to_hash: -> OpenAI::Models::Responses::response_output_refusal end end end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index e3814bc4..368b2b07 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -3,30 +3,28 @@ module OpenAI module Responses type response_output_text = { - annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], + annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation], text: String, type: :output_text } class ResponseOutputText < OpenAI::Internal::Type::BaseModel - attr_accessor annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] + attr_accessor annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation] attr_accessor text: String attr_accessor type: :output_text def initialize: ( - annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], + annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation], text: String, ?type: :output_text ) -> void - def to_hash: -> OpenAI::Models::Responses::response_output_text - type annotation = - OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation - | OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation - | OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation + | OpenAI::Responses::ResponseOutputText::Annotation::URLCitation + | OpenAI::Responses::ResponseOutputText::Annotation::FilePath module Annotation extend OpenAI::Internal::Type::Union @@ -46,8 +44,6 @@ module OpenAI index: Integer, ?type: :file_citation ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::file_citation end type url_citation = @@ -77,8 +73,6 @@ module OpenAI url: String, ?type: :url_citation ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::url_citation end type file_path = { file_id: String, index: Integer, type: :file_path } @@ -95,11 +89,9 @@ module OpenAI index: Integer, ?type: :file_path ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseOutputText::Annotation::file_path end - def self?.variants: -> [OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseOutputText::annotation] end end end diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 19c0882c..7efc2a2d 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -4,37 +4,35 @@ module OpenAI type response_reasoning_item = { id: String, - summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], + summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], type: :reasoning, encrypted_content: String?, - status: OpenAI::Models::Responses::ResponseReasoningItem::status + status: OpenAI::Responses::ResponseReasoningItem::status } class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary] + attr_accessor summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary] attr_accessor type: :reasoning attr_accessor encrypted_content: String? - attr_reader status: OpenAI::Models::Responses::ResponseReasoningItem::status? + attr_reader status: OpenAI::Responses::ResponseReasoningItem::status? def status=: ( - OpenAI::Models::Responses::ResponseReasoningItem::status - ) -> OpenAI::Models::Responses::ResponseReasoningItem::status + OpenAI::Responses::ResponseReasoningItem::status + ) -> OpenAI::Responses::ResponseReasoningItem::status def initialize: ( id: String, - summary: ::Array[OpenAI::Models::Responses::ResponseReasoningItem::Summary], + summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], ?encrypted_content: String?, - ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, + ?status: OpenAI::Responses::ResponseReasoningItem::status, ?type: :reasoning ) -> void - def to_hash: -> OpenAI::Models::Responses::response_reasoning_item - type summary = { text: String, type: :summary_text } class Summary < OpenAI::Internal::Type::BaseModel @@ -43,8 +41,6 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseReasoningItem::summary end type status = :in_progress | :completed | :incomplete @@ -56,7 +52,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseReasoningItem::status] + def self?.values: -> ::Array[OpenAI::Responses::ResponseReasoningItem::status] end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs index d970d8a3..05c6f71d 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs @@ -5,7 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, summary_index: Integer, type: :"response.reasoning_summary_part.added" } @@ -15,7 +15,7 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part + attr_accessor part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part attr_accessor summary_index: Integer @@ -24,13 +24,11 @@ module OpenAI def initialize: ( item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, summary_index: Integer, ?type: :"response.reasoning_summary_part.added" ) -> void - def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_part_added_event - type part = { text: String, type: :summary_text } class Part < OpenAI::Internal::Type::BaseModel @@ -39,8 +37,6 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::part end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs index 88fe9e2a..c03f6cf4 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs @@ -5,7 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, summary_index: Integer, type: :"response.reasoning_summary_part.done" } @@ -15,7 +15,7 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part + attr_accessor part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part attr_accessor summary_index: Integer @@ -24,13 +24,11 @@ module OpenAI def initialize: ( item_id: String, output_index: Integer, - part: OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, summary_index: Integer, ?type: :"response.reasoning_summary_part.done" ) -> void - def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_part_done_event - type part = { text: String, type: :summary_text } class Part < OpenAI::Internal::Type::BaseModel @@ -39,8 +37,6 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::part end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs index d73f86b6..494fa8c3 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs @@ -28,8 +28,6 @@ module OpenAI summary_index: Integer, ?type: :"response.reasoning_summary_text.delta" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_text_delta_event end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs index 63b910b7..36bb9006 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs @@ -28,8 +28,6 @@ module OpenAI text: String, ?type: :"response.reasoning_summary_text.done" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_reasoning_summary_text_done_event end end end diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index 0ab5e996..2dc7df34 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -28,8 +28,6 @@ module OpenAI output_index: Integer, ?type: :"response.refusal.delta" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_refusal_delta_event end end end diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 761e42a0..852fbb7b 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -28,8 +28,6 @@ module OpenAI refusal: String, ?type: :"response.refusal.done" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_refusal_done_event end end end diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 9a99b74c..2a8ef689 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -19,8 +19,6 @@ module OpenAI ?include: ::Array[OpenAI::Models::Responses::response_includable], ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_retrieve_params end end end diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index e81d05de..b8f72810 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -2,47 +2,47 @@ module OpenAI module Models module Responses type response_stream_event = - OpenAI::Models::Responses::ResponseAudioDeltaEvent - | OpenAI::Models::Responses::ResponseAudioDoneEvent - | OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent - | OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent - | OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent - | OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent - | OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent - | OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent - | OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent - | OpenAI::Models::Responses::ResponseCompletedEvent - | OpenAI::Models::Responses::ResponseContentPartAddedEvent - | OpenAI::Models::Responses::ResponseContentPartDoneEvent - | OpenAI::Models::Responses::ResponseCreatedEvent - | OpenAI::Models::Responses::ResponseErrorEvent - | OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent - | OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent - | OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent - | OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent - | OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent - | OpenAI::Models::Responses::ResponseInProgressEvent - | OpenAI::Models::Responses::ResponseFailedEvent - | OpenAI::Models::Responses::ResponseIncompleteEvent - | OpenAI::Models::Responses::ResponseOutputItemAddedEvent - | OpenAI::Models::Responses::ResponseOutputItemDoneEvent - | OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent - | OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent - | OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent - | OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent - | OpenAI::Models::Responses::ResponseRefusalDeltaEvent - | OpenAI::Models::Responses::ResponseRefusalDoneEvent - | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent - | OpenAI::Models::Responses::ResponseTextDeltaEvent - | OpenAI::Models::Responses::ResponseTextDoneEvent - | OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent - | OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent - | OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent + OpenAI::Responses::ResponseAudioDeltaEvent + | OpenAI::Responses::ResponseAudioDoneEvent + | OpenAI::Responses::ResponseAudioTranscriptDeltaEvent + | OpenAI::Responses::ResponseAudioTranscriptDoneEvent + | OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent + | OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent + | OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent + | OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent + | OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent + | OpenAI::Responses::ResponseCompletedEvent + | OpenAI::Responses::ResponseContentPartAddedEvent + | OpenAI::Responses::ResponseContentPartDoneEvent + | OpenAI::Responses::ResponseCreatedEvent + | OpenAI::Responses::ResponseErrorEvent + | OpenAI::Responses::ResponseFileSearchCallCompletedEvent + | OpenAI::Responses::ResponseFileSearchCallInProgressEvent + | OpenAI::Responses::ResponseFileSearchCallSearchingEvent + | OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent + | OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent + | OpenAI::Responses::ResponseInProgressEvent + | OpenAI::Responses::ResponseFailedEvent + | OpenAI::Responses::ResponseIncompleteEvent + | OpenAI::Responses::ResponseOutputItemAddedEvent + | OpenAI::Responses::ResponseOutputItemDoneEvent + | OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent + | OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent + | OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent + | OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent + | OpenAI::Responses::ResponseRefusalDeltaEvent + | OpenAI::Responses::ResponseRefusalDoneEvent + | OpenAI::Responses::ResponseTextAnnotationDeltaEvent + | OpenAI::Responses::ResponseTextDeltaEvent + | OpenAI::Responses::ResponseTextDoneEvent + | OpenAI::Responses::ResponseWebSearchCallCompletedEvent + | OpenAI::Responses::ResponseWebSearchCallInProgressEvent + | OpenAI::Responses::ResponseWebSearchCallSearchingEvent module ResponseStreamEvent extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent] + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_stream_event] end end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 54d32aa3..36d79ace 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_text_annotation_delta_event = { - annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, + annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation, annotation_index: Integer, content_index: Integer, item_id: String, @@ -12,7 +12,7 @@ module OpenAI } class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation + attr_accessor annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation attr_accessor annotation_index: Integer @@ -25,7 +25,7 @@ module OpenAI attr_accessor type: :"response.output_text.annotation.added" def initialize: ( - annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, + annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation, annotation_index: Integer, content_index: Integer, item_id: String, @@ -33,12 +33,10 @@ module OpenAI ?type: :"response.output_text.annotation.added" ) -> void - def to_hash: -> OpenAI::Models::Responses::response_text_annotation_delta_event - type annotation = - OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation - | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation - | OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation + | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation + | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath module Annotation extend OpenAI::Internal::Type::Union @@ -58,8 +56,6 @@ module OpenAI index: Integer, ?type: :file_citation ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_citation end type url_citation = @@ -89,8 +85,6 @@ module OpenAI url: String, ?type: :url_citation ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::url_citation end type file_path = { file_id: String, index: Integer, type: :file_path } @@ -107,11 +101,9 @@ module OpenAI index: Integer, ?type: :file_path ) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::file_path end - def self?.variants: -> [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + def self?.variants: -> ::Array[OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation] end end end diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index 30ff82f7..816d5b67 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -14,8 +14,6 @@ module OpenAI def initialize: ( ?format_: OpenAI::Models::Responses::response_format_text_config ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_text_config end end end diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index 2c36a1bf..c5025499 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -28,8 +28,6 @@ module OpenAI output_index: Integer, ?type: :"response.output_text.delta" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_text_delta_event end end end diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 86dc66db..0585e9f1 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -28,8 +28,6 @@ module OpenAI text: String, ?type: :"response.output_text.done" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_text_done_event end end end diff --git a/sig/openai/models/responses/response_usage.rbs b/sig/openai/models/responses/response_usage.rbs index 28b094d0..a5b8cbc5 100644 --- a/sig/openai/models/responses/response_usage.rbs +++ b/sig/openai/models/responses/response_usage.rbs @@ -4,41 +4,37 @@ module OpenAI type response_usage = { input_tokens: Integer, - input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, + input_tokens_details: OpenAI::Responses::ResponseUsage::InputTokensDetails, output_tokens: Integer, - output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, + output_tokens_details: OpenAI::Responses::ResponseUsage::OutputTokensDetails, total_tokens: Integer } class ResponseUsage < OpenAI::Internal::Type::BaseModel attr_accessor input_tokens: Integer - attr_accessor input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails + attr_accessor input_tokens_details: OpenAI::Responses::ResponseUsage::InputTokensDetails attr_accessor output_tokens: Integer - attr_accessor output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails + attr_accessor output_tokens_details: OpenAI::Responses::ResponseUsage::OutputTokensDetails attr_accessor total_tokens: Integer def initialize: ( input_tokens: Integer, - input_tokens_details: OpenAI::Models::Responses::ResponseUsage::InputTokensDetails, + input_tokens_details: OpenAI::Responses::ResponseUsage::InputTokensDetails, output_tokens: Integer, - output_tokens_details: OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails, + output_tokens_details: OpenAI::Responses::ResponseUsage::OutputTokensDetails, total_tokens: Integer ) -> void - def to_hash: -> OpenAI::Models::Responses::response_usage - type input_tokens_details = { cached_tokens: Integer } class InputTokensDetails < OpenAI::Internal::Type::BaseModel attr_accessor cached_tokens: Integer def initialize: (cached_tokens: Integer) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseUsage::input_tokens_details end type output_tokens_details = { reasoning_tokens: Integer } @@ -47,8 +43,6 @@ module OpenAI attr_accessor reasoning_tokens: Integer def initialize: (reasoning_tokens: Integer) -> void - - def to_hash: -> OpenAI::Models::Responses::ResponseUsage::output_tokens_details end end end diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index d5772178..852a046e 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.web_search_call.completed" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_web_search_call_completed_event end end end diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index 56dfa277..996acf2e 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.web_search_call.in_progress" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_web_search_call_in_progress_event end end end diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index 3a70544f..b77bc5aa 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -20,8 +20,6 @@ module OpenAI output_index: Integer, ?type: :"response.web_search_call.searching" ) -> void - - def to_hash: -> OpenAI::Models::Responses::response_web_search_call_searching_event end end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 0d3a572d..81616de5 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -2,15 +2,15 @@ module OpenAI module Models module Responses type tool = - OpenAI::Models::Responses::FileSearchTool - | OpenAI::Models::Responses::FunctionTool - | OpenAI::Models::Responses::ComputerTool - | OpenAI::Models::Responses::WebSearchTool + OpenAI::Responses::FileSearchTool + | OpenAI::Responses::FunctionTool + | OpenAI::Responses::ComputerTool + | OpenAI::Responses::WebSearchTool module Tool extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool] + def self?.variants: -> ::Array[OpenAI::Models::Responses::tool] end end end diff --git a/sig/openai/models/responses/tool_choice_function.rbs b/sig/openai/models/responses/tool_choice_function.rbs index a1881b61..1aa68ba1 100644 --- a/sig/openai/models/responses/tool_choice_function.rbs +++ b/sig/openai/models/responses/tool_choice_function.rbs @@ -9,8 +9,6 @@ module OpenAI attr_accessor type: :function def initialize: (name: String, ?type: :function) -> void - - def to_hash: -> OpenAI::Models::Responses::tool_choice_function end end end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 588a58ac..71458e63 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -2,17 +2,15 @@ module OpenAI module Models module Responses type tool_choice_types = - { type: OpenAI::Models::Responses::ToolChoiceTypes::type_ } + { type: OpenAI::Responses::ToolChoiceTypes::type_ } class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Models::Responses::ToolChoiceTypes::type_ + attr_accessor type: OpenAI::Responses::ToolChoiceTypes::type_ def initialize: ( - type: OpenAI::Models::Responses::ToolChoiceTypes::type_ + type: OpenAI::Responses::ToolChoiceTypes::type_ ) -> void - def to_hash: -> OpenAI::Models::Responses::tool_choice_types - type type_ = :file_search | :web_search_preview @@ -27,7 +25,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 - def self?.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceTypes::type_] + def self?.values: -> ::Array[OpenAI::Responses::ToolChoiceTypes::type_] end end end diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index 6b68aad5..dba0454c 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -3,30 +3,28 @@ module OpenAI module Responses type web_search_tool = { - type: OpenAI::Models::Responses::WebSearchTool::type_, - search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, - user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? + type: OpenAI::Responses::WebSearchTool::type_, + search_context_size: OpenAI::Responses::WebSearchTool::search_context_size, + user_location: OpenAI::Responses::WebSearchTool::UserLocation? } class WebSearchTool < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Models::Responses::WebSearchTool::type_ + attr_accessor type: OpenAI::Responses::WebSearchTool::type_ - attr_reader search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size? + attr_reader search_context_size: OpenAI::Responses::WebSearchTool::search_context_size? def search_context_size=: ( - OpenAI::Models::Responses::WebSearchTool::search_context_size - ) -> OpenAI::Models::Responses::WebSearchTool::search_context_size + OpenAI::Responses::WebSearchTool::search_context_size + ) -> OpenAI::Responses::WebSearchTool::search_context_size - attr_accessor user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? + attr_accessor user_location: OpenAI::Responses::WebSearchTool::UserLocation? def initialize: ( - type: OpenAI::Models::Responses::WebSearchTool::type_, - ?search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, - ?user_location: OpenAI::Models::Responses::WebSearchTool::UserLocation? + type: OpenAI::Responses::WebSearchTool::type_, + ?search_context_size: OpenAI::Responses::WebSearchTool::search_context_size, + ?user_location: OpenAI::Responses::WebSearchTool::UserLocation? ) -> void - def to_hash: -> OpenAI::Models::Responses::web_search_tool - type type_ = :web_search_preview | :web_search_preview_2025_03_11 module Type @@ -35,7 +33,7 @@ module OpenAI WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 - def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::type_] + def self?.values: -> ::Array[OpenAI::Responses::WebSearchTool::type_] end type search_context_size = :low | :medium | :high @@ -47,7 +45,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::search_context_size] + def self?.values: -> ::Array[OpenAI::Responses::WebSearchTool::search_context_size] end type user_location = @@ -77,8 +75,6 @@ module OpenAI ?timezone: String?, ?type: :approximate ) -> void - - def to_hash: -> OpenAI::Models::Responses::WebSearchTool::user_location end end end diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index bc8de438..42a66641 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -3,7 +3,7 @@ module OpenAI type responses_model = String | OpenAI::Models::chat_model - | OpenAI::Models::ResponsesModel::responses_only_model + | OpenAI::ResponsesModel::responses_only_model module ResponsesModel extend OpenAI::Internal::Type::Union @@ -22,10 +22,10 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.values: -> ::Array[OpenAI::Models::ResponsesModel::responses_only_model] + def self?.values: -> ::Array[OpenAI::ResponsesModel::responses_only_model] end - def self?.variants: -> [String, OpenAI::Models::chat_model, OpenAI::Models::ResponsesModel::responses_only_model] + def self?.variants: -> ::Array[OpenAI::Models::responses_model] end end end diff --git a/sig/openai/models/static_file_chunking_strategy.rbs b/sig/openai/models/static_file_chunking_strategy.rbs index 7c00abb0..0ba93ae4 100644 --- a/sig/openai/models/static_file_chunking_strategy.rbs +++ b/sig/openai/models/static_file_chunking_strategy.rbs @@ -12,8 +12,6 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void - - def to_hash: -> OpenAI::Models::static_file_chunking_strategy end end end diff --git a/sig/openai/models/static_file_chunking_strategy_object.rbs b/sig/openai/models/static_file_chunking_strategy_object.rbs index 1122e914..6b2d231e 100644 --- a/sig/openai/models/static_file_chunking_strategy_object.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object.rbs @@ -1,19 +1,17 @@ module OpenAI module Models type static_file_chunking_strategy_object = - { static: OpenAI::Models::StaticFileChunkingStrategy, type: :static } + { static: OpenAI::StaticFileChunkingStrategy, type: :static } class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel - attr_accessor static: OpenAI::Models::StaticFileChunkingStrategy + attr_accessor static: OpenAI::StaticFileChunkingStrategy attr_accessor type: :static def initialize: ( - static: OpenAI::Models::StaticFileChunkingStrategy, + static: OpenAI::StaticFileChunkingStrategy, ?type: :static ) -> void - - def to_hash: -> OpenAI::Models::static_file_chunking_strategy_object end end end diff --git a/sig/openai/models/static_file_chunking_strategy_object_param.rbs b/sig/openai/models/static_file_chunking_strategy_object_param.rbs index 639a7ea1..c0f5182c 100644 --- a/sig/openai/models/static_file_chunking_strategy_object_param.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object_param.rbs @@ -1,19 +1,17 @@ module OpenAI module Models type static_file_chunking_strategy_object_param = - { static: OpenAI::Models::StaticFileChunkingStrategy, type: :static } + { static: OpenAI::StaticFileChunkingStrategy, type: :static } class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel - attr_accessor static: OpenAI::Models::StaticFileChunkingStrategy + attr_accessor static: OpenAI::StaticFileChunkingStrategy attr_accessor type: :static def initialize: ( - static: OpenAI::Models::StaticFileChunkingStrategy, + static: OpenAI::StaticFileChunkingStrategy, ?type: :static ) -> void - - def to_hash: -> OpenAI::Models::static_file_chunking_strategy_object_param end end end diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index 0a50164d..955ba4fe 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -9,8 +9,8 @@ module OpenAI filename: String, object: :upload, purpose: String, - status: OpenAI::Models::Upload::status, - file: OpenAI::Models::FileObject? + status: OpenAI::Upload::status, + file: OpenAI::FileObject? } class Upload < OpenAI::Internal::Type::BaseModel @@ -28,9 +28,9 @@ module OpenAI attr_accessor purpose: String - attr_accessor status: OpenAI::Models::Upload::status + attr_accessor status: OpenAI::Upload::status - attr_accessor file: OpenAI::Models::FileObject? + attr_accessor file: OpenAI::FileObject? def initialize: ( id: String, @@ -39,13 +39,11 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Models::Upload::status, - ?file: OpenAI::Models::FileObject?, + status: OpenAI::Upload::status, + ?file: OpenAI::FileObject?, ?object: :upload ) -> void - def to_hash: -> OpenAI::Models::upload - type status = :pending | :completed | :cancelled | :expired module Status @@ -56,7 +54,7 @@ module OpenAI CANCELLED: :cancelled EXPIRED: :expired - def self?.values: -> ::Array[OpenAI::Models::Upload::status] + def self?.values: -> ::Array[OpenAI::Upload::status] end end end diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index 669ffc83..c5cb5b12 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::upload_cancel_params end end end diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index 5cfff932..4bdf3d87 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -19,8 +19,6 @@ module OpenAI ?md5: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::upload_complete_params end end end diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index 9b68eb00..dce638fe 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -28,8 +28,6 @@ module OpenAI purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::upload_create_params end end end diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index f3ea9aa1..7a94c0ae 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -15,8 +15,6 @@ module OpenAI data: Pathname | StringIO | IO | OpenAI::FilePart, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::Uploads::part_create_params end end end diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 93530d14..17613909 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -26,8 +26,6 @@ module OpenAI upload_id: String, ?object: :"upload.part" ) -> void - - def to_hash: -> OpenAI::Models::Uploads::upload_part end end end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index a29d28eb..5ba3aa0f 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -4,14 +4,14 @@ module OpenAI { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: OpenAI::VectorStore::FileCounts, last_active_at: Integer?, metadata: OpenAI::Models::metadata?, name: String, object: :vector_store, - status: OpenAI::Models::VectorStore::status, + status: OpenAI::VectorStore::status, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter, + expires_after: OpenAI::VectorStore::ExpiresAfter, expires_at: Integer? } @@ -20,7 +20,7 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor file_counts: OpenAI::Models::VectorStore::FileCounts + attr_accessor file_counts: OpenAI::VectorStore::FileCounts attr_accessor last_active_at: Integer? @@ -30,34 +30,32 @@ module OpenAI attr_accessor object: :vector_store - attr_accessor status: OpenAI::Models::VectorStore::status + attr_accessor status: OpenAI::VectorStore::status attr_accessor usage_bytes: Integer - attr_reader expires_after: OpenAI::Models::VectorStore::ExpiresAfter? + attr_reader expires_after: OpenAI::VectorStore::ExpiresAfter? def expires_after=: ( - OpenAI::Models::VectorStore::ExpiresAfter - ) -> OpenAI::Models::VectorStore::ExpiresAfter + OpenAI::VectorStore::ExpiresAfter + ) -> OpenAI::VectorStore::ExpiresAfter attr_accessor expires_at: Integer? def initialize: ( id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: OpenAI::VectorStore::FileCounts, last_active_at: Integer?, metadata: OpenAI::Models::metadata?, name: String, - status: OpenAI::Models::VectorStore::status, + status: OpenAI::VectorStore::status, usage_bytes: Integer, - ?expires_after: OpenAI::Models::VectorStore::ExpiresAfter, + ?expires_after: OpenAI::VectorStore::ExpiresAfter, ?expires_at: Integer?, ?object: :vector_store ) -> void - def to_hash: -> OpenAI::Models::vector_store - type file_counts = { cancelled: Integer, @@ -85,8 +83,6 @@ module OpenAI in_progress: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::VectorStore::file_counts end type status = :expired | :in_progress | :completed @@ -98,7 +94,7 @@ module OpenAI IN_PROGRESS: :in_progress COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Models::VectorStore::status] + def self?.values: -> ::Array[OpenAI::VectorStore::status] end type expires_after = { anchor: :last_active_at, days: Integer } @@ -109,8 +105,6 @@ module OpenAI attr_accessor days: Integer def initialize: (days: Integer, ?anchor: :last_active_at) -> void - - def to_hash: -> OpenAI::Models::VectorStore::expires_after end end end diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index accb9698..67a550f7 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_create_params = { chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, file_ids: ::Array[String], metadata: OpenAI::Models::metadata?, name: String @@ -20,11 +20,11 @@ module OpenAI OpenAI::Models::file_chunking_strategy_param ) -> OpenAI::Models::file_chunking_strategy_param - attr_reader expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter? + attr_reader expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter? def expires_after=: ( - OpenAI::Models::VectorStoreCreateParams::ExpiresAfter - ) -> OpenAI::Models::VectorStoreCreateParams::ExpiresAfter + OpenAI::VectorStoreCreateParams::ExpiresAfter + ) -> OpenAI::VectorStoreCreateParams::ExpiresAfter attr_reader file_ids: ::Array[String]? @@ -38,15 +38,13 @@ module OpenAI def initialize: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::vector_store_create_params - type expires_after = { anchor: :last_active_at, days: Integer } class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -55,8 +53,6 @@ module OpenAI attr_accessor days: Integer def initialize: (days: Integer, ?anchor: :last_active_at) -> void - - def to_hash: -> OpenAI::Models::VectorStoreCreateParams::expires_after end end end diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index 1dfb12b4..89da672f 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::vector_store_delete_params end end end diff --git a/sig/openai/models/vector_store_deleted.rbs b/sig/openai/models/vector_store_deleted.rbs index 1c8ca3ae..8bc51626 100644 --- a/sig/openai/models/vector_store_deleted.rbs +++ b/sig/openai/models/vector_store_deleted.rbs @@ -15,8 +15,6 @@ module OpenAI deleted: bool, ?object: :"vector_store.deleted" ) -> void - - def to_hash: -> OpenAI::Models::vector_store_deleted end end end diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index 5c2aa3f9..ed5b3079 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -39,8 +39,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::vector_store_list_params - type order = :asc | :desc module Order diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 51b51332..6b466528 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -8,8 +8,6 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void - - def to_hash: -> OpenAI::Models::vector_store_retrieve_params end end end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 9d571eee..863ea4ab 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -5,7 +5,7 @@ module OpenAI query: OpenAI::Models::VectorStoreSearchParams::query, filters: OpenAI::Models::VectorStoreSearchParams::filters, max_num_results: Integer, - ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, rewrite_query: bool } & OpenAI::Internal::Type::request_parameters @@ -26,11 +26,11 @@ module OpenAI def max_num_results=: (Integer) -> Integer - attr_reader ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions? + attr_reader ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions? def ranking_options=: ( - OpenAI::Models::VectorStoreSearchParams::RankingOptions - ) -> OpenAI::Models::VectorStoreSearchParams::RankingOptions + OpenAI::VectorStoreSearchParams::RankingOptions + ) -> OpenAI::VectorStoreSearchParams::RankingOptions attr_reader rewrite_query: bool? @@ -40,56 +40,51 @@ module OpenAI query: OpenAI::Models::VectorStoreSearchParams::query, ?filters: OpenAI::Models::VectorStoreSearchParams::filters, ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ?ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, ?rewrite_query: bool, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::vector_store_search_params - type query = String | ::Array[String] module Query extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, ::Array[String]] + def self?.variants: -> ::Array[OpenAI::Models::VectorStoreSearchParams::query] StringArray: OpenAI::Internal::Type::Converter end - type filters = - OpenAI::Models::ComparisonFilter | OpenAI::Models::CompoundFilter + type filters = OpenAI::ComparisonFilter | OpenAI::CompoundFilter module Filters extend OpenAI::Internal::Type::Union - def self?.variants: -> [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + def self?.variants: -> ::Array[OpenAI::Models::VectorStoreSearchParams::filters] end type ranking_options = { - ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, + ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_reader ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker? + attr_reader ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker? def ranker=: ( - OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker - ) -> OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker + OpenAI::VectorStoreSearchParams::RankingOptions::ranker + ) -> OpenAI::VectorStoreSearchParams::RankingOptions::ranker attr_reader score_threshold: Float? def score_threshold=: (Float) -> Float def initialize: ( - ?ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, + ?ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker, ?score_threshold: Float ) -> void - def to_hash: -> OpenAI::Models::VectorStoreSearchParams::ranking_options - type ranker = :auto | :"default-2024-11-15" module Ranker @@ -98,7 +93,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self?.values: -> ::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::VectorStoreSearchParams::RankingOptions::ranker] end end end diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index d7c590bd..2c977b03 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -28,14 +28,12 @@ module OpenAI score: Float ) -> void - def to_hash: -> OpenAI::Models::vector_store_search_response - type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::Models::VectorStoreSearchResponse::attribute] end type content = @@ -54,8 +52,6 @@ module OpenAI type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ ) -> void - def to_hash: -> OpenAI::Models::VectorStoreSearchResponse::content - type type_ = :text module Type diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index f8258d41..cb5a0433 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type vector_store_update_params = { - expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, metadata: OpenAI::Models::metadata?, name: String? } @@ -12,21 +12,19 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter? + attr_accessor expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter? attr_accessor metadata: OpenAI::Models::metadata? attr_accessor name: String? def initialize: ( - ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::vector_store_update_params - type expires_after = { anchor: :last_active_at, days: Integer } class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -35,8 +33,6 @@ module OpenAI attr_accessor days: Integer def initialize: (days: Integer, ?anchor: :last_active_at) -> void - - def to_hash: -> OpenAI::Models::VectorStoreUpdateParams::expires_after end end end diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index 26c9d893..0ee2d4d8 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -14,8 +14,6 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::file_batch_cancel_params end end end diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 03b56b5a..3715958c 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_batch_create_params = { file_ids: ::Array[String], - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } & OpenAI::Internal::Type::request_parameters @@ -15,7 +15,7 @@ module OpenAI attr_accessor file_ids: ::Array[String] - attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? @@ -25,19 +25,17 @@ module OpenAI def initialize: ( file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::VectorStores::file_batch_create_params - type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::VectorStores::FileBatchCreateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index 87868689..918e28f0 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -52,8 +52,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::VectorStores::file_batch_list_files_params - type filter = :in_progress | :completed | :failed | :cancelled module Filter diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index b43f76b8..1e21e9d1 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -14,8 +14,6 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::file_batch_retrieve_params end end end diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index a3f37f30..5d35fcf1 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -14,8 +14,6 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::file_content_params end end end diff --git a/sig/openai/models/vector_stores/file_content_response.rbs b/sig/openai/models/vector_stores/file_content_response.rbs index df2aa45a..23306b97 100644 --- a/sig/openai/models/vector_stores/file_content_response.rbs +++ b/sig/openai/models/vector_stores/file_content_response.rbs @@ -13,8 +13,6 @@ module OpenAI def type=: (String) -> String def initialize: (?text: String, ?type: String) -> void - - def to_hash: -> OpenAI::Models::VectorStores::file_content_response end end end diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 9a396ee0..29469ef1 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_create_params = { file_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } & OpenAI::Internal::Type::request_parameters @@ -15,7 +15,7 @@ module OpenAI attr_accessor file_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? @@ -25,19 +25,17 @@ module OpenAI def initialize: ( file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::VectorStores::file_create_params - type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::VectorStores::FileCreateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index a80ed26d..5fc9986b 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -14,8 +14,6 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::file_delete_params end end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 7b916738..623629d4 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -48,8 +48,6 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::VectorStores::file_list_params - type filter = :in_progress | :completed | :failed | :cancelled module Filter diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index 266f9e3c..d46822ed 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -14,8 +14,6 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::file_retrieve_params end end end diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 7b75e4f2..58700418 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_update_params = { vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? + attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]? } & OpenAI::Internal::Type::request_parameters @@ -14,22 +14,20 @@ module OpenAI attr_accessor vector_store_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]? def initialize: ( vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]?, ?request_options: OpenAI::request_opts ) -> void - def to_hash: -> OpenAI::Models::VectorStores::file_update_params - type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::VectorStores::FileUpdateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 7039aa57..5bbb1310 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -7,12 +7,12 @@ module OpenAI { id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError?, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, object: :"vector_store.file", - status: OpenAI::Models::VectorStores::VectorStoreFile::status, + status: OpenAI::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, + attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy } @@ -21,17 +21,17 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError? + attr_accessor last_error: OpenAI::VectorStores::VectorStoreFile::LastError? attr_accessor object: :"vector_store.file" - attr_accessor status: OpenAI::Models::VectorStores::VectorStoreFile::status + attr_accessor status: OpenAI::VectorStores::VectorStoreFile::status attr_accessor usage_bytes: Integer attr_accessor vector_store_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy? @@ -42,35 +42,31 @@ module OpenAI def initialize: ( id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError?, - status: OpenAI::Models::VectorStores::VectorStoreFile::status, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, + status: OpenAI::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy, ?object: :"vector_store.file" ) -> void - def to_hash: -> OpenAI::Models::VectorStores::vector_store_file - type last_error = { - code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, + code: OpenAI::VectorStores::VectorStoreFile::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code + attr_accessor code: OpenAI::VectorStores::VectorStoreFile::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, + code: OpenAI::VectorStores::VectorStoreFile::LastError::code, message: String ) -> void - def to_hash: -> OpenAI::Models::VectorStores::VectorStoreFile::last_error - type code = :server_error | :unsupported_file | :invalid_file module Code @@ -80,7 +76,7 @@ module OpenAI UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file - def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::code] + def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFile::LastError::code] end end @@ -94,7 +90,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::status] + def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFile::status] end type attribute = String | Float | bool @@ -102,7 +98,7 @@ module OpenAI module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> [String, Float, bool] + def self?.variants: -> ::Array[OpenAI::VectorStores::VectorStoreFile::attribute] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index 6c286435..b21e96bd 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -7,9 +7,9 @@ module OpenAI { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, object: :"vector_store.files_batch", - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, + status: OpenAI::VectorStores::VectorStoreFileBatch::status, vector_store_id: String } @@ -18,25 +18,23 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts + attr_accessor file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts attr_accessor object: :"vector_store.files_batch" - attr_accessor status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status + attr_accessor status: OpenAI::VectorStores::VectorStoreFileBatch::status attr_accessor vector_store_id: String def initialize: ( id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, + status: OpenAI::VectorStores::VectorStoreFileBatch::status, vector_store_id: String, ?object: :"vector_store.files_batch" ) -> void - def to_hash: -> OpenAI::Models::VectorStores::vector_store_file_batch - type file_counts = { cancelled: Integer, @@ -64,8 +62,6 @@ module OpenAI in_progress: Integer, total: Integer ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::VectorStoreFileBatch::file_counts end type status = :in_progress | :completed | :cancelled | :failed @@ -78,7 +74,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::status] + def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFileBatch::status] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index 15ae080f..d9d9038a 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -18,8 +18,6 @@ module OpenAI deleted: bool, ?object: :"vector_store.file.deleted" ) -> void - - def to_hash: -> OpenAI::Models::VectorStores::vector_store_file_deleted end end end diff --git a/sig/openai/request_options.rbs b/sig/openai/request_options.rbs index a3ac246f..0b21f3e7 100644 --- a/sig/openai/request_options.rbs +++ b/sig/openai/request_options.rbs @@ -13,7 +13,7 @@ module OpenAI } class RequestOptions < OpenAI::Internal::Type::BaseModel - def self.validate!: (self | ::Hash[Symbol, top] opts) -> void + def self.validate!: (OpenAI::request_opts opts) -> void attr_accessor idempotency_key: String? diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 50dccae8..6877c0f6 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -10,7 +10,7 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response @@ -22,7 +22,7 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Audio::transcription_stream_event] diff --git a/sig/openai/resources/batches.rbs b/sig/openai/resources/batches.rbs index 338c2671..b5382fa1 100644 --- a/sig/openai/resources/batches.rbs +++ b/sig/openai/resources/batches.rbs @@ -7,23 +7,23 @@ module OpenAI input_file_id: String, ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Batch + ) -> OpenAI::Batch def retrieve: ( String batch_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Batch + ) -> OpenAI::Batch def list: ( ?after: String, ?limit: Integer, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Batch] + ) -> OpenAI::Internal::CursorPage[OpenAI::Batch] def cancel: ( String batch_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Batch + ) -> OpenAI::Batch def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/assistants.rbs b/sig/openai/resources/beta/assistants.rbs index fa36413e..223b3916 100644 --- a/sig/openai/resources/beta/assistants.rbs +++ b/sig/openai/resources/beta/assistants.rbs @@ -11,16 +11,16 @@ module OpenAI ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantCreateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::AssistantCreateParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], ?top_p: Float?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Assistant + ) -> OpenAI::Beta::Assistant def retrieve: ( String assistant_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Assistant + ) -> OpenAI::Beta::Assistant def update: ( String assistant_id, @@ -32,11 +32,11 @@ module OpenAI ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?tool_resources: OpenAI::Models::Beta::AssistantUpdateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::AssistantUpdateParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool], ?top_p: Float?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Assistant + ) -> OpenAI::Beta::Assistant def list: ( ?after: String, @@ -44,12 +44,12 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Beta::AssistantListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Assistant] + ) -> OpenAI::Internal::CursorPage[OpenAI::Beta::Assistant] def delete: ( String assistant_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::AssistantDeleted + ) -> OpenAI::Beta::AssistantDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 2ef9c298..27b8eeaa 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -7,28 +7,28 @@ module OpenAI attr_reader messages: OpenAI::Resources::Beta::Threads::Messages def create: ( - ?messages: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message], + ?messages: ::Array[OpenAI::Beta::ThreadCreateParams::Message], ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadCreateParams::ToolResources?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Thread + ) -> OpenAI::Beta::Thread def retrieve: ( String thread_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Thread + ) -> OpenAI::Beta::Thread def update: ( String thread_id, ?metadata: OpenAI::Models::metadata?, - ?tool_resources: OpenAI::Models::Beta::ThreadUpdateParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadUpdateParams::ToolResources?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Thread + ) -> OpenAI::Beta::Thread def delete: ( String thread_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::ThreadDeleted + ) -> OpenAI::Beta::ThreadDeleted def create_and_run: ( assistant_id: String, @@ -40,14 +40,14 @@ module OpenAI ?parallel_tool_calls: bool, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + ) -> OpenAI::Beta::Threads::Run def stream_raw: ( assistant_id: String, @@ -59,12 +59,12 @@ module OpenAI ?parallel_tool_calls: bool, ?response_format: OpenAI::Models::Beta::assistant_response_format_option?, ?temperature: Float?, - ?thread: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, + ?thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, - ?tool_resources: OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources?, + ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/beta/threads/messages.rbs b/sig/openai/resources/beta/threads/messages.rbs index 560d8eb7..8a02c24b 100644 --- a/sig/openai/resources/beta/threads/messages.rbs +++ b/sig/openai/resources/beta/threads/messages.rbs @@ -7,23 +7,23 @@ module OpenAI String thread_id, content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, - ?attachments: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment]?, + ?attachments: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment]?, ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Message + ) -> OpenAI::Beta::Threads::Message def retrieve: ( String message_id, thread_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Message + ) -> OpenAI::Beta::Threads::Message def update: ( String message_id, thread_id: String, ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Message + ) -> OpenAI::Beta::Threads::Message def list: ( String thread_id, @@ -33,13 +33,13 @@ module OpenAI ?order: OpenAI::Models::Beta::Threads::MessageListParams::order, ?run_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Message] + ) -> OpenAI::Internal::CursorPage[OpenAI::Beta::Threads::Message] def delete: ( String message_id, thread_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::MessageDeleted + ) -> OpenAI::Beta::Threads::MessageDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index df8265f2..38743701 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -10,7 +10,7 @@ module OpenAI assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?additional_instructions: String?, - ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]?, ?instructions: String?, ?max_completion_tokens: Integer?, ?max_prompt_tokens: Integer?, @@ -23,16 +23,16 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + ) -> OpenAI::Beta::Threads::Run def create_stream_raw: ( String thread_id, assistant_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?additional_instructions: String?, - ?additional_messages: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage]?, + ?additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]?, ?instructions: String?, ?max_completion_tokens: Integer?, ?max_prompt_tokens: Integer?, @@ -45,7 +45,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] @@ -53,14 +53,14 @@ module OpenAI String run_id, thread_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + ) -> OpenAI::Beta::Threads::Run def update: ( String run_id, thread_id: String, ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + ) -> OpenAI::Beta::Threads::Run def list: ( String thread_id, @@ -69,25 +69,25 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Beta::Threads::RunListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Run] + ) -> OpenAI::Internal::CursorPage[OpenAI::Beta::Threads::Run] def cancel: ( String run_id, thread_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + ) -> OpenAI::Beta::Threads::Run def submit_tool_outputs: ( String run_id, thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Run + ) -> OpenAI::Beta::Threads::Run def submit_tool_outputs_stream_raw: ( String run_id, thread_id: String, - tool_outputs: ::Array[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/beta/threads/runs/steps.rbs b/sig/openai/resources/beta/threads/runs/steps.rbs index 7cdcbff4..85af5cd8 100644 --- a/sig/openai/resources/beta/threads/runs/steps.rbs +++ b/sig/openai/resources/beta/threads/runs/steps.rbs @@ -10,7 +10,7 @@ module OpenAI run_id: String, ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Beta::Threads::Runs::RunStep + ) -> OpenAI::Beta::Threads::Runs::RunStep def list: ( String run_id, @@ -21,7 +21,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Beta::Threads::Runs::RunStep] + ) -> OpenAI::Internal::CursorPage[OpenAI::Beta::Threads::Runs::RunStep] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 0e8a88ef..3eb309a0 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -7,19 +7,19 @@ module OpenAI def create: ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?audio: OpenAI::Chat::ChatCompletionAudioParam?, ?frequency_penalty: Float?, ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function], ?logit_bias: ::Hash[Symbol, Integer]?, ?logprobs: bool?, ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, - ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, ?presence_penalty: Float?, ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, @@ -27,33 +27,33 @@ module OpenAI ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, ?store: bool?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], ?top_logprobs: Integer?, ?top_p: Float?, ?user: String, - ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletion + ) -> OpenAI::Chat::ChatCompletion def stream_raw: ( messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], model: OpenAI::Models::Chat::CompletionCreateParams::model, - ?audio: OpenAI::Models::Chat::ChatCompletionAudioParam?, + ?audio: OpenAI::Chat::ChatCompletionAudioParam?, ?frequency_penalty: Float?, ?function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, - ?functions: ::Array[OpenAI::Models::Chat::CompletionCreateParams::Function], + ?functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function], ?logit_bias: ::Hash[Symbol, Integer]?, ?logprobs: bool?, ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, - ?prediction: OpenAI::Models::Chat::ChatCompletionPredictionContent?, + ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, ?presence_penalty: Float?, ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, @@ -61,27 +61,27 @@ module OpenAI ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, ?store: bool?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Models::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], ?top_logprobs: Integer?, ?top_p: Float?, ?user: String, - ?web_search_options: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, + ?web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::Stream[OpenAI::Models::Chat::ChatCompletionChunk] + ) -> OpenAI::Internal::Stream[OpenAI::Chat::ChatCompletionChunk] def retrieve: ( String completion_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletion + ) -> OpenAI::Chat::ChatCompletion def update: ( String completion_id, metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletion + ) -> OpenAI::Chat::ChatCompletion def list: ( ?after: String, @@ -90,12 +90,12 @@ module OpenAI ?model: String, ?order: OpenAI::Models::Chat::CompletionListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletion] + ) -> OpenAI::Internal::CursorPage[OpenAI::Chat::ChatCompletion] def delete: ( String completion_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Chat::ChatCompletionDeleted + ) -> OpenAI::Chat::ChatCompletionDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/chat/completions/messages.rbs b/sig/openai/resources/chat/completions/messages.rbs index 6af4409f..29b994c0 100644 --- a/sig/openai/resources/chat/completions/messages.rbs +++ b/sig/openai/resources/chat/completions/messages.rbs @@ -9,7 +9,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::Chat::Completions::MessageListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Chat::ChatCompletionStoreMessage] + ) -> OpenAI::Internal::CursorPage[OpenAI::Chat::ChatCompletionStoreMessage] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/completions.rbs b/sig/openai/resources/completions.rbs index c266af82..6828d284 100644 --- a/sig/openai/resources/completions.rbs +++ b/sig/openai/resources/completions.rbs @@ -14,13 +14,13 @@ module OpenAI ?presence_penalty: Float?, ?seed: Integer?, ?stop: OpenAI::Models::CompletionCreateParams::stop?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?suffix: String?, ?temperature: Float?, ?top_p: Float?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Completion + ) -> OpenAI::Completion def create_streaming: ( model: OpenAI::Models::CompletionCreateParams::model, @@ -35,13 +35,13 @@ module OpenAI ?presence_penalty: Float?, ?seed: Integer?, ?stop: OpenAI::Models::CompletionCreateParams::stop?, - ?stream_options: OpenAI::Models::Chat::ChatCompletionStreamOptions?, + ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?suffix: String?, ?temperature: Float?, ?top_p: Float?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::Stream[OpenAI::Models::Completion] + ) -> OpenAI::Internal::Stream[OpenAI::Completion] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/embeddings.rbs b/sig/openai/resources/embeddings.rbs index 3babf508..0f73d65c 100644 --- a/sig/openai/resources/embeddings.rbs +++ b/sig/openai/resources/embeddings.rbs @@ -8,7 +8,7 @@ module OpenAI ?encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::CreateEmbeddingResponse + ) -> OpenAI::CreateEmbeddingResponse def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/evals.rbs b/sig/openai/resources/evals.rbs index dd3d6cc5..b4ed4454 100644 --- a/sig/openai/resources/evals.rbs +++ b/sig/openai/resources/evals.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index b6cefa86..2af65729 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -5,12 +5,12 @@ module OpenAI file: Pathname | StringIO | IO | OpenAI::FilePart, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FileObject + ) -> OpenAI::FileObject def retrieve: ( String file_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FileObject + ) -> OpenAI::FileObject def list: ( ?after: String, @@ -18,12 +18,12 @@ module OpenAI ?order: OpenAI::Models::FileListParams::order, ?purpose: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FileObject] + ) -> OpenAI::Internal::CursorPage[OpenAI::FileObject] def delete: ( String file_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FileDeleted + ) -> OpenAI::FileDeleted def content: ( String file_id, diff --git a/sig/openai/resources/fine_tuning/jobs.rbs b/sig/openai/resources/fine_tuning/jobs.rbs index 4264a1e9..fe96137a 100644 --- a/sig/openai/resources/fine_tuning/jobs.rbs +++ b/sig/openai/resources/fine_tuning/jobs.rbs @@ -7,39 +7,39 @@ module OpenAI def create: ( model: OpenAI::Models::FineTuning::JobCreateParams::model, training_file: String, - ?hyperparameters: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, - ?integrations: ::Array[OpenAI::Models::FineTuning::JobCreateParams::Integration]?, + ?hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, + ?integrations: ::Array[OpenAI::FineTuning::JobCreateParams::Integration]?, ?metadata: OpenAI::Models::metadata?, - ?method_: OpenAI::Models::FineTuning::JobCreateParams::Method, + ?method_: OpenAI::FineTuning::JobCreateParams::Method, ?seed: Integer?, ?suffix: String?, ?validation_file: String?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::FineTuningJob + ) -> OpenAI::FineTuning::FineTuningJob def retrieve: ( String fine_tuning_job_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::FineTuningJob + ) -> OpenAI::FineTuning::FineTuningJob def list: ( ?after: String, ?limit: Integer, ?metadata: ::Hash[Symbol, String]?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJob] + ) -> OpenAI::Internal::CursorPage[OpenAI::FineTuning::FineTuningJob] def cancel: ( String fine_tuning_job_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::FineTuningJob + ) -> OpenAI::FineTuning::FineTuningJob def list_events: ( String fine_tuning_job_id, ?after: String, ?limit: Integer, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::FineTuningJobEvent] + ) -> OpenAI::Internal::CursorPage[OpenAI::FineTuning::FineTuningJobEvent] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs index 45bec94e..3770d993 100644 --- a/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs +++ b/sig/openai/resources/fine_tuning/jobs/checkpoints.rbs @@ -8,7 +8,7 @@ module OpenAI ?after: String, ?limit: Integer, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint] + ) -> OpenAI::Internal::CursorPage[OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index 20b05ba5..387d37d0 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -9,7 +9,7 @@ module OpenAI ?size: OpenAI::Models::ImageCreateVariationParams::size?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ImagesResponse + ) -> OpenAI::ImagesResponse def edit: ( image: OpenAI::Models::ImageEditParams::image, @@ -23,7 +23,7 @@ module OpenAI ?size: OpenAI::Models::ImageEditParams::size?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ImagesResponse + ) -> OpenAI::ImagesResponse def generate: ( prompt: String, @@ -39,7 +39,7 @@ module OpenAI ?style: OpenAI::Models::ImageGenerateParams::style?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ImagesResponse + ) -> OpenAI::ImagesResponse def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/models.rbs b/sig/openai/resources/models.rbs index 042eaed1..dfe23787 100644 --- a/sig/openai/resources/models.rbs +++ b/sig/openai/resources/models.rbs @@ -4,16 +4,16 @@ module OpenAI def retrieve: ( String model, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Model + ) -> OpenAI::Model def list: ( ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::Page[OpenAI::Models::Model] + ) -> OpenAI::Internal::Page[OpenAI::Model] def delete: ( String model, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::ModelDeleted + ) -> OpenAI::ModelDeleted def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 40041a27..6cbfd4aa 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -12,18 +12,18 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?parallel_tool_calls: bool?, ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, + ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, - ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Responses::Response + ) -> OpenAI::Responses::Response def stream_raw: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, @@ -34,11 +34,11 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?parallel_tool_calls: bool?, ?previous_response_id: String?, - ?reasoning: OpenAI::Models::Reasoning?, + ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, - ?text: OpenAI::Models::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float?, @@ -51,7 +51,7 @@ module OpenAI String response_id, ?include: ::Array[OpenAI::Models::Responses::response_includable], ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Responses::Response + ) -> OpenAI::Responses::Response def delete: ( String response_id, diff --git a/sig/openai/resources/uploads.rbs b/sig/openai/resources/uploads.rbs index c60f0054..50996546 100644 --- a/sig/openai/resources/uploads.rbs +++ b/sig/openai/resources/uploads.rbs @@ -9,19 +9,19 @@ module OpenAI mime_type: String, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Upload + ) -> OpenAI::Upload def cancel: ( String upload_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Upload + ) -> OpenAI::Upload def complete: ( String upload_id, part_ids: ::Array[String], ?md5: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Upload + ) -> OpenAI::Upload def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index f296f1ef..20aac99a 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -6,7 +6,7 @@ module OpenAI String upload_id, data: Pathname | StringIO | IO | OpenAI::FilePart, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::Uploads::UploadPart + ) -> OpenAI::Uploads::UploadPart def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index c60ff766..d717bd54 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -7,25 +7,25 @@ module OpenAI def create: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, + ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStore + ) -> OpenAI::VectorStore def retrieve: ( String vector_store_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStore + ) -> OpenAI::VectorStore def update: ( String vector_store_id, - ?expires_after: OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter?, + ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStore + ) -> OpenAI::VectorStore def list: ( ?after: String, @@ -33,19 +33,19 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::VectorStoreListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::VectorStore] + ) -> OpenAI::Internal::CursorPage[OpenAI::VectorStore] def delete: ( String vector_store_id, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStoreDeleted + ) -> OpenAI::VectorStoreDeleted def search: ( String vector_store_id, query: OpenAI::Models::VectorStoreSearchParams::query, ?filters: OpenAI::Models::VectorStoreSearchParams::filters, ?max_num_results: Integer, - ?ranking_options: OpenAI::Models::VectorStoreSearchParams::RankingOptions, + ?ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, ?rewrite_query: bool, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Page[OpenAI::Models::VectorStoreSearchResponse] diff --git a/sig/openai/resources/vector_stores/file_batches.rbs b/sig/openai/resources/vector_stores/file_batches.rbs index 39606ed6..448f8ebb 100644 --- a/sig/openai/resources/vector_stores/file_batches.rbs +++ b/sig/openai/resources/vector_stores/file_batches.rbs @@ -5,22 +5,22 @@ module OpenAI def create: ( String vector_store_id, file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch + ) -> OpenAI::VectorStores::VectorStoreFileBatch def retrieve: ( String batch_id, vector_store_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch + ) -> OpenAI::VectorStores::VectorStoreFileBatch def cancel: ( String batch_id, vector_store_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileBatch + ) -> OpenAI::VectorStores::VectorStoreFileBatch def list_files: ( String batch_id, @@ -31,7 +31,7 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] + ) -> OpenAI::Internal::CursorPage[OpenAI::VectorStores::VectorStoreFile] def initialize: (client: OpenAI::Client) -> void end diff --git a/sig/openai/resources/vector_stores/files.rbs b/sig/openai/resources/vector_stores/files.rbs index 3669f6a7..2a650189 100644 --- a/sig/openai/resources/vector_stores/files.rbs +++ b/sig/openai/resources/vector_stores/files.rbs @@ -5,23 +5,23 @@ module OpenAI def create: ( String vector_store_id, file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFile + ) -> OpenAI::VectorStores::VectorStoreFile def retrieve: ( String file_id, vector_store_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFile + ) -> OpenAI::VectorStores::VectorStoreFile def update: ( String file_id, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]?, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFile + ) -> OpenAI::VectorStores::VectorStoreFile def list: ( String vector_store_id, @@ -31,13 +31,13 @@ module OpenAI ?limit: Integer, ?order: OpenAI::Models::VectorStores::FileListParams::order, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::VectorStores::VectorStoreFile] + ) -> OpenAI::Internal::CursorPage[OpenAI::VectorStores::VectorStoreFile] def delete: ( String file_id, vector_store_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::VectorStores::VectorStoreFileDeleted + ) -> OpenAI::VectorStores::VectorStoreFileDeleted def content: ( String file_id, diff --git a/test/openai/resources/audio/transcriptions_test.rb b/test/openai/resources/audio/transcriptions_test.rb index b074f1ba..f4f5b94b 100644 --- a/test/openai/resources/audio/transcriptions_test.rb +++ b/test/openai/resources/audio/transcriptions_test.rb @@ -12,8 +12,8 @@ def test_create_required_params assert_pattern do case response - in OpenAI::Models::Audio::Transcription - in OpenAI::Models::Audio::TranscriptionVerbose + in OpenAI::Audio::Transcription + in OpenAI::Audio::TranscriptionVerbose end end end diff --git a/test/openai/resources/audio/translations_test.rb b/test/openai/resources/audio/translations_test.rb index c6a64ad3..feb7ccc7 100644 --- a/test/openai/resources/audio/translations_test.rb +++ b/test/openai/resources/audio/translations_test.rb @@ -12,8 +12,8 @@ def test_create_required_params assert_pattern do case response - in OpenAI::Models::Audio::Translation - in OpenAI::Models::Audio::TranslationVerbose + in OpenAI::Audio::Translation + in OpenAI::Audio::TranslationVerbose end end end diff --git a/test/openai/resources/batches_test.rb b/test/openai/resources/batches_test.rb index 8ba0d03e..37bc1861 100644 --- a/test/openai/resources/batches_test.rb +++ b/test/openai/resources/batches_test.rb @@ -12,7 +12,7 @@ def test_create_required_params ) assert_pattern do - response => OpenAI::Models::Batch + response => OpenAI::Batch end assert_pattern do @@ -23,12 +23,12 @@ def test_create_required_params endpoint: String, input_file_id: String, object: Symbol, - status: OpenAI::Models::Batch::Status, + status: OpenAI::Batch::Status, cancelled_at: Integer | nil, cancelling_at: Integer | nil, completed_at: Integer | nil, error_file_id: String | nil, - errors: OpenAI::Models::Batch::Errors | nil, + errors: OpenAI::Batch::Errors | nil, expired_at: Integer | nil, expires_at: Integer | nil, failed_at: Integer | nil, @@ -36,7 +36,7 @@ def test_create_required_params in_progress_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, - request_counts: OpenAI::Models::BatchRequestCounts | nil + request_counts: OpenAI::BatchRequestCounts | nil } end end @@ -45,7 +45,7 @@ def test_retrieve response = @openai.batches.retrieve("batch_id") assert_pattern do - response => OpenAI::Models::Batch + response => OpenAI::Batch end assert_pattern do @@ -56,12 +56,12 @@ def test_retrieve endpoint: String, input_file_id: String, object: Symbol, - status: OpenAI::Models::Batch::Status, + status: OpenAI::Batch::Status, cancelled_at: Integer | nil, cancelling_at: Integer | nil, completed_at: Integer | nil, error_file_id: String | nil, - errors: OpenAI::Models::Batch::Errors | nil, + errors: OpenAI::Batch::Errors | nil, expired_at: Integer | nil, expires_at: Integer | nil, failed_at: Integer | nil, @@ -69,7 +69,7 @@ def test_retrieve in_progress_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, - request_counts: OpenAI::Models::BatchRequestCounts | nil + request_counts: OpenAI::BatchRequestCounts | nil } end end @@ -85,7 +85,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Batch + row => OpenAI::Batch end assert_pattern do @@ -96,12 +96,12 @@ def test_list endpoint: String, input_file_id: String, object: Symbol, - status: OpenAI::Models::Batch::Status, + status: OpenAI::Batch::Status, cancelled_at: Integer | nil, cancelling_at: Integer | nil, completed_at: Integer | nil, error_file_id: String | nil, - errors: OpenAI::Models::Batch::Errors | nil, + errors: OpenAI::Batch::Errors | nil, expired_at: Integer | nil, expires_at: Integer | nil, failed_at: Integer | nil, @@ -109,7 +109,7 @@ def test_list in_progress_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, - request_counts: OpenAI::Models::BatchRequestCounts | nil + request_counts: OpenAI::BatchRequestCounts | nil } end end @@ -118,7 +118,7 @@ def test_cancel response = @openai.batches.cancel("batch_id") assert_pattern do - response => OpenAI::Models::Batch + response => OpenAI::Batch end assert_pattern do @@ -129,12 +129,12 @@ def test_cancel endpoint: String, input_file_id: String, object: Symbol, - status: OpenAI::Models::Batch::Status, + status: OpenAI::Batch::Status, cancelled_at: Integer | nil, cancelling_at: Integer | nil, completed_at: Integer | nil, error_file_id: String | nil, - errors: OpenAI::Models::Batch::Errors | nil, + errors: OpenAI::Batch::Errors | nil, expired_at: Integer | nil, expires_at: Integer | nil, failed_at: Integer | nil, @@ -142,7 +142,7 @@ def test_cancel in_progress_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, output_file_id: String | nil, - request_counts: OpenAI::Models::BatchRequestCounts | nil + request_counts: OpenAI::BatchRequestCounts | nil } end end diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index b76d7856..ea241550 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -7,7 +7,7 @@ def test_create_required_params response = @openai.beta.assistants.create(model: :"gpt-4.1") assert_pattern do - response => OpenAI::Models::Beta::Assistant + response => OpenAI::Beta::Assistant end assert_pattern do @@ -20,10 +20,10 @@ def test_create_required_params model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, - tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, + tool_resources: OpenAI::Beta::Assistant::ToolResources | nil, top_p: Float | nil } end @@ -33,7 +33,7 @@ def test_retrieve response = @openai.beta.assistants.retrieve("assistant_id") assert_pattern do - response => OpenAI::Models::Beta::Assistant + response => OpenAI::Beta::Assistant end assert_pattern do @@ -46,10 +46,10 @@ def test_retrieve model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, - tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, + tool_resources: OpenAI::Beta::Assistant::ToolResources | nil, top_p: Float | nil } end @@ -59,7 +59,7 @@ def test_update response = @openai.beta.assistants.update("assistant_id") assert_pattern do - response => OpenAI::Models::Beta::Assistant + response => OpenAI::Beta::Assistant end assert_pattern do @@ -72,10 +72,10 @@ def test_update model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, - tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, + tool_resources: OpenAI::Beta::Assistant::ToolResources | nil, top_p: Float | nil } end @@ -92,7 +92,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Beta::Assistant + row => OpenAI::Beta::Assistant end assert_pattern do @@ -105,10 +105,10 @@ def test_list model: String, name: String | nil, object: Symbol, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, temperature: Float | nil, - tool_resources: OpenAI::Models::Beta::Assistant::ToolResources | nil, + tool_resources: OpenAI::Beta::Assistant::ToolResources | nil, top_p: Float | nil } end @@ -118,7 +118,7 @@ def test_delete response = @openai.beta.assistants.delete("assistant_id") assert_pattern do - response => OpenAI::Models::Beta::AssistantDeleted + response => OpenAI::Beta::AssistantDeleted end assert_pattern do diff --git a/test/openai/resources/beta/threads/messages_test.rb b/test/openai/resources/beta/threads/messages_test.rb index e9d4d46c..c2904ac0 100644 --- a/test/openai/resources/beta/threads/messages_test.rb +++ b/test/openai/resources/beta/threads/messages_test.rb @@ -7,24 +7,24 @@ def test_create_required_params response = @openai.beta.threads.messages.create("thread_id", content: "string", role: :user) assert_pattern do - response => OpenAI::Models::Beta::Threads::Message + response => OpenAI::Beta::Threads::Message end assert_pattern do response => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - role: OpenAI::Models::Beta::Threads::Message::Role, + role: OpenAI::Beta::Threads::Message::Role, run_id: String | nil, - status: OpenAI::Models::Beta::Threads::Message::Status, + status: OpenAI::Beta::Threads::Message::Status, thread_id: String } end @@ -34,24 +34,24 @@ def test_retrieve_required_params response = @openai.beta.threads.messages.retrieve("message_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Message + response => OpenAI::Beta::Threads::Message end assert_pattern do response => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - role: OpenAI::Models::Beta::Threads::Message::Role, + role: OpenAI::Beta::Threads::Message::Role, run_id: String | nil, - status: OpenAI::Models::Beta::Threads::Message::Status, + status: OpenAI::Beta::Threads::Message::Status, thread_id: String } end @@ -61,24 +61,24 @@ def test_update_required_params response = @openai.beta.threads.messages.update("message_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Message + response => OpenAI::Beta::Threads::Message end assert_pattern do response => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - role: OpenAI::Models::Beta::Threads::Message::Role, + role: OpenAI::Beta::Threads::Message::Role, run_id: String | nil, - status: OpenAI::Models::Beta::Threads::Message::Status, + status: OpenAI::Beta::Threads::Message::Status, thread_id: String } end @@ -95,24 +95,24 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Beta::Threads::Message + row => OpenAI::Beta::Threads::Message end assert_pattern do row => { id: String, assistant_id: String | nil, - attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Message::Attachment]) | nil, + attachments: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment]) | nil, completed_at: Integer | nil, - content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContent]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent]), created_at: Integer, incomplete_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Message::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - role: OpenAI::Models::Beta::Threads::Message::Role, + role: OpenAI::Beta::Threads::Message::Role, run_id: String | nil, - status: OpenAI::Models::Beta::Threads::Message::Status, + status: OpenAI::Beta::Threads::Message::Status, thread_id: String } end @@ -122,7 +122,7 @@ def test_delete_required_params response = @openai.beta.threads.messages.delete("message_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::MessageDeleted + response => OpenAI::Beta::Threads::MessageDeleted end assert_pattern do diff --git a/test/openai/resources/beta/threads/runs/steps_test.rb b/test/openai/resources/beta/threads/runs/steps_test.rb index 200fc186..bb9c7355 100644 --- a/test/openai/resources/beta/threads/runs/steps_test.rb +++ b/test/openai/resources/beta/threads/runs/steps_test.rb @@ -7,7 +7,7 @@ def test_retrieve_required_params response = @openai.beta.threads.runs.steps.retrieve("step_id", thread_id: "thread_id", run_id: "run_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Runs::RunStep + response => OpenAI::Beta::Threads::Runs::RunStep end assert_pattern do @@ -19,15 +19,15 @@ def test_retrieve_required_params created_at: Integer, expired_at: Integer | nil, failed_at: Integer | nil, - last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError | nil, + last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status, - step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::StepDetails, + status: OpenAI::Beta::Threads::Runs::RunStep::Status, + step_details: OpenAI::Beta::Threads::Runs::RunStep::StepDetails, thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type, - usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage | nil + type: OpenAI::Beta::Threads::Runs::RunStep::Type, + usage: OpenAI::Beta::Threads::Runs::RunStep::Usage | nil } end end @@ -43,7 +43,7 @@ def test_list_required_params return if row.nil? assert_pattern do - row => OpenAI::Models::Beta::Threads::Runs::RunStep + row => OpenAI::Beta::Threads::Runs::RunStep end assert_pattern do @@ -55,15 +55,15 @@ def test_list_required_params created_at: Integer, expired_at: Integer | nil, failed_at: Integer | nil, - last_error: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError | nil, + last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, run_id: String, - status: OpenAI::Models::Beta::Threads::Runs::RunStep::Status, - step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::StepDetails, + status: OpenAI::Beta::Threads::Runs::RunStep::Status, + step_details: OpenAI::Beta::Threads::Runs::RunStep::StepDetails, thread_id: String, - type: OpenAI::Models::Beta::Threads::Runs::RunStep::Type, - usage: OpenAI::Models::Beta::Threads::Runs::RunStep::Usage | nil + type: OpenAI::Beta::Threads::Runs::RunStep::Type, + usage: OpenAI::Beta::Threads::Runs::RunStep::Usage | nil } end end diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index 2ada16a3..bed1e829 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -7,7 +7,7 @@ def test_create_required_params response = @openai.beta.threads.runs.create("thread_id", assistant_id: "assistant_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Run + response => OpenAI::Beta::Threads::Run end assert_pattern do @@ -19,24 +19,24 @@ def test_create_required_params created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } @@ -47,7 +47,7 @@ def test_retrieve_required_params response = @openai.beta.threads.runs.retrieve("run_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Run + response => OpenAI::Beta::Threads::Run end assert_pattern do @@ -59,24 +59,24 @@ def test_retrieve_required_params created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } @@ -87,7 +87,7 @@ def test_update_required_params response = @openai.beta.threads.runs.update("run_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Run + response => OpenAI::Beta::Threads::Run end assert_pattern do @@ -99,24 +99,24 @@ def test_update_required_params created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } @@ -134,7 +134,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Beta::Threads::Run + row => OpenAI::Beta::Threads::Run end assert_pattern do @@ -146,24 +146,24 @@ def test_list created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } @@ -174,7 +174,7 @@ def test_cancel_required_params response = @openai.beta.threads.runs.cancel("run_id", thread_id: "thread_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Run + response => OpenAI::Beta::Threads::Run end assert_pattern do @@ -186,24 +186,24 @@ def test_cancel_required_params created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } @@ -215,7 +215,7 @@ def test_submit_tool_outputs_required_params @openai.beta.threads.runs.submit_tool_outputs("run_id", thread_id: "thread_id", tool_outputs: [{}]) assert_pattern do - response => OpenAI::Models::Beta::Threads::Run + response => OpenAI::Beta::Threads::Run end assert_pattern do @@ -227,24 +227,24 @@ def test_submit_tool_outputs_required_params created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index da2960dc..903a5185 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -7,7 +7,7 @@ def test_create response = @openai.beta.threads.create assert_pattern do - response => OpenAI::Models::Beta::Thread + response => OpenAI::Beta::Thread end assert_pattern do @@ -16,7 +16,7 @@ def test_create created_at: Integer, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - tool_resources: OpenAI::Models::Beta::Thread::ToolResources | nil + tool_resources: OpenAI::Beta::Thread::ToolResources | nil } end end @@ -25,7 +25,7 @@ def test_retrieve response = @openai.beta.threads.retrieve("thread_id") assert_pattern do - response => OpenAI::Models::Beta::Thread + response => OpenAI::Beta::Thread end assert_pattern do @@ -34,7 +34,7 @@ def test_retrieve created_at: Integer, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - tool_resources: OpenAI::Models::Beta::Thread::ToolResources | nil + tool_resources: OpenAI::Beta::Thread::ToolResources | nil } end end @@ -43,7 +43,7 @@ def test_update response = @openai.beta.threads.update("thread_id") assert_pattern do - response => OpenAI::Models::Beta::Thread + response => OpenAI::Beta::Thread end assert_pattern do @@ -52,7 +52,7 @@ def test_update created_at: Integer, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, object: Symbol, - tool_resources: OpenAI::Models::Beta::Thread::ToolResources | nil + tool_resources: OpenAI::Beta::Thread::ToolResources | nil } end end @@ -61,7 +61,7 @@ def test_delete response = @openai.beta.threads.delete("thread_id") assert_pattern do - response => OpenAI::Models::Beta::ThreadDeleted + response => OpenAI::Beta::ThreadDeleted end assert_pattern do @@ -77,7 +77,7 @@ def test_create_and_run_required_params response = @openai.beta.threads.create_and_run(assistant_id: "assistant_id") assert_pattern do - response => OpenAI::Models::Beta::Threads::Run + response => OpenAI::Beta::Threads::Run end assert_pattern do @@ -89,24 +89,24 @@ def test_create_and_run_required_params created_at: Integer, expires_at: Integer | nil, failed_at: Integer | nil, - incomplete_details: OpenAI::Models::Beta::Threads::Run::IncompleteDetails | nil, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails | nil, instructions: String, - last_error: OpenAI::Models::Beta::Threads::Run::LastError | nil, + last_error: OpenAI::Beta::Threads::Run::LastError | nil, max_completion_tokens: Integer | nil, max_prompt_tokens: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, object: Symbol, parallel_tool_calls: OpenAI::Internal::Type::Boolean, - required_action: OpenAI::Models::Beta::Threads::Run::RequiredAction | nil, - response_format: OpenAI::Models::Beta::AssistantResponseFormatOption | nil, + required_action: OpenAI::Beta::Threads::Run::RequiredAction | nil, + response_format: OpenAI::Beta::AssistantResponseFormatOption | nil, started_at: Integer | nil, - status: OpenAI::Models::Beta::Threads::RunStatus, + status: OpenAI::Beta::Threads::RunStatus, thread_id: String, - tool_choice: OpenAI::Models::Beta::AssistantToolChoiceOption | nil, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool]), - truncation_strategy: OpenAI::Models::Beta::Threads::Run::TruncationStrategy | nil, - usage: OpenAI::Models::Beta::Threads::Run::Usage | nil, + tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil } diff --git a/test/openai/resources/chat/completions/messages_test.rb b/test/openai/resources/chat/completions/messages_test.rb index 1b68a1b0..99f3a865 100644 --- a/test/openai/resources/chat/completions/messages_test.rb +++ b/test/openai/resources/chat/completions/messages_test.rb @@ -14,7 +14,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Chat::ChatCompletionStoreMessage + row => OpenAI::Chat::ChatCompletionStoreMessage end end end diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index f73285f4..2bb2db4b 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -8,19 +8,19 @@ def test_create_required_params @openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") assert_pattern do - response => OpenAI::Models::Chat::ChatCompletion + response => OpenAI::Chat::ChatCompletion end assert_pattern do response => { id: String, - choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, - service_tier: OpenAI::Models::Chat::ChatCompletion::ServiceTier | nil, + service_tier: OpenAI::Chat::ChatCompletion::ServiceTier | nil, system_fingerprint: String | nil, - usage: OpenAI::Models::CompletionUsage | nil + usage: OpenAI::CompletionUsage | nil } end end @@ -29,19 +29,19 @@ def test_retrieve response = @openai.chat.completions.retrieve("completion_id") assert_pattern do - response => OpenAI::Models::Chat::ChatCompletion + response => OpenAI::Chat::ChatCompletion end assert_pattern do response => { id: String, - choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, - service_tier: OpenAI::Models::Chat::ChatCompletion::ServiceTier | nil, + service_tier: OpenAI::Chat::ChatCompletion::ServiceTier | nil, system_fingerprint: String | nil, - usage: OpenAI::Models::CompletionUsage | nil + usage: OpenAI::CompletionUsage | nil } end end @@ -50,19 +50,19 @@ def test_update_required_params response = @openai.chat.completions.update("completion_id", metadata: {foo: "string"}) assert_pattern do - response => OpenAI::Models::Chat::ChatCompletion + response => OpenAI::Chat::ChatCompletion end assert_pattern do response => { id: String, - choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, - service_tier: OpenAI::Models::Chat::ChatCompletion::ServiceTier | nil, + service_tier: OpenAI::Chat::ChatCompletion::ServiceTier | nil, system_fingerprint: String | nil, - usage: OpenAI::Models::CompletionUsage | nil + usage: OpenAI::CompletionUsage | nil } end end @@ -78,19 +78,19 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Chat::ChatCompletion + row => OpenAI::Chat::ChatCompletion end assert_pattern do row => { id: String, - choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletion::Choice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice]), created: Integer, model: String, object: Symbol, - service_tier: OpenAI::Models::Chat::ChatCompletion::ServiceTier | nil, + service_tier: OpenAI::Chat::ChatCompletion::ServiceTier | nil, system_fingerprint: String | nil, - usage: OpenAI::Models::CompletionUsage | nil + usage: OpenAI::CompletionUsage | nil } end end @@ -99,7 +99,7 @@ def test_delete response = @openai.chat.completions.delete("completion_id") assert_pattern do - response => OpenAI::Models::Chat::ChatCompletionDeleted + response => OpenAI::Chat::ChatCompletionDeleted end assert_pattern do diff --git a/test/openai/resources/completions_test.rb b/test/openai/resources/completions_test.rb index e37c502a..349c682e 100644 --- a/test/openai/resources/completions_test.rb +++ b/test/openai/resources/completions_test.rb @@ -7,18 +7,18 @@ def test_create_required_params response = @openai.completions.create(model: :"gpt-3.5-turbo-instruct", prompt: "This is a test.") assert_pattern do - response => OpenAI::Models::Completion + response => OpenAI::Completion end assert_pattern do response => { id: String, - choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::CompletionChoice]), + choices: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::CompletionChoice]), created: Integer, model: String, object: Symbol, system_fingerprint: String | nil, - usage: OpenAI::Models::CompletionUsage | nil + usage: OpenAI::CompletionUsage | nil } end end diff --git a/test/openai/resources/embeddings_test.rb b/test/openai/resources/embeddings_test.rb index a280c031..b5539280 100644 --- a/test/openai/resources/embeddings_test.rb +++ b/test/openai/resources/embeddings_test.rb @@ -11,15 +11,15 @@ def test_create_required_params ) assert_pattern do - response => OpenAI::Models::CreateEmbeddingResponse + response => OpenAI::CreateEmbeddingResponse end assert_pattern do response => { - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Embedding]), + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Embedding]), model: String, object: Symbol, - usage: OpenAI::Models::CreateEmbeddingResponse::Usage + usage: OpenAI::CreateEmbeddingResponse::Usage } end end diff --git a/test/openai/resources/evals/runs_test.rb b/test/openai/resources/evals/runs_test.rb index d55ad6a8..cb839c51 100644 --- a/test/openai/resources/evals/runs_test.rb +++ b/test/openai/resources/evals/runs_test.rb @@ -19,7 +19,7 @@ def test_create_required_params id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunCreateResponse::DataSource, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, @@ -46,7 +46,7 @@ def test_retrieve_required_params id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, @@ -80,7 +80,7 @@ def test_list id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunListResponse::DataSource, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, @@ -123,7 +123,7 @@ def test_cancel_required_params id: String, created_at: Integer, data_source: OpenAI::Models::Evals::RunCancelResponse::DataSource, - error: OpenAI::Models::Evals::EvalAPIError, + error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: String, diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index 9fceee6a..de03395e 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -7,7 +7,7 @@ def test_create_required_params response = @openai.files.create(file: Pathname(__FILE__), purpose: :assistants) assert_pattern do - response => OpenAI::Models::FileObject + response => OpenAI::FileObject end assert_pattern do @@ -17,8 +17,8 @@ def test_create_required_params created_at: Integer, filename: String, object: Symbol, - purpose: OpenAI::Models::FileObject::Purpose, - status: OpenAI::Models::FileObject::Status, + purpose: OpenAI::FileObject::Purpose, + status: OpenAI::FileObject::Status, expires_at: Integer | nil, status_details: String | nil } @@ -29,7 +29,7 @@ def test_retrieve response = @openai.files.retrieve("file_id") assert_pattern do - response => OpenAI::Models::FileObject + response => OpenAI::FileObject end assert_pattern do @@ -39,8 +39,8 @@ def test_retrieve created_at: Integer, filename: String, object: Symbol, - purpose: OpenAI::Models::FileObject::Purpose, - status: OpenAI::Models::FileObject::Status, + purpose: OpenAI::FileObject::Purpose, + status: OpenAI::FileObject::Status, expires_at: Integer | nil, status_details: String | nil } @@ -58,7 +58,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::FileObject + row => OpenAI::FileObject end assert_pattern do @@ -68,8 +68,8 @@ def test_list created_at: Integer, filename: String, object: Symbol, - purpose: OpenAI::Models::FileObject::Purpose, - status: OpenAI::Models::FileObject::Status, + purpose: OpenAI::FileObject::Purpose, + status: OpenAI::FileObject::Status, expires_at: Integer | nil, status_details: String | nil } @@ -80,7 +80,7 @@ def test_delete response = @openai.files.delete("file_id") assert_pattern do - response => OpenAI::Models::FileDeleted + response => OpenAI::FileDeleted end assert_pattern do diff --git a/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb b/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb index 08154e71..ab4136ce 100644 --- a/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb +++ b/test/openai/resources/fine_tuning/jobs/checkpoints_test.rb @@ -14,7 +14,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint + row => OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint end assert_pattern do @@ -23,7 +23,7 @@ def test_list created_at: Integer, fine_tuned_model_checkpoint: String, fine_tuning_job_id: String, - metrics: OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + metrics: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, object: Symbol, step_number: Integer } diff --git a/test/openai/resources/fine_tuning/jobs_test.rb b/test/openai/resources/fine_tuning/jobs_test.rb index 9f164929..188a79e7 100644 --- a/test/openai/resources/fine_tuning/jobs_test.rb +++ b/test/openai/resources/fine_tuning/jobs_test.rb @@ -7,30 +7,30 @@ def test_create_required_params response = @openai.fine_tuning.jobs.create(model: :"babbage-002", training_file: "file-abc123") assert_pattern do - response => OpenAI::Models::FineTuning::FineTuningJob + response => OpenAI::FineTuning::FineTuningJob end assert_pattern do response => { id: String, created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error | nil, + error: OpenAI::FineTuning::FineTuningJob::Error | nil, fine_tuned_model: String | nil, finished_at: Integer | nil, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, model: String, object: Symbol, organization_id: String, result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status, + status: OpenAI::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, - method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil + method_: OpenAI::FineTuning::FineTuningJob::Method | nil } end end @@ -39,30 +39,30 @@ def test_retrieve response = @openai.fine_tuning.jobs.retrieve("ft-AF1WoRqd3aJAHsqc9NY7iL8F") assert_pattern do - response => OpenAI::Models::FineTuning::FineTuningJob + response => OpenAI::FineTuning::FineTuningJob end assert_pattern do response => { id: String, created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error | nil, + error: OpenAI::FineTuning::FineTuningJob::Error | nil, fine_tuned_model: String | nil, finished_at: Integer | nil, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, model: String, object: Symbol, organization_id: String, result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status, + status: OpenAI::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, - method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil + method_: OpenAI::FineTuning::FineTuningJob::Method | nil } end end @@ -78,30 +78,30 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::FineTuning::FineTuningJob + row => OpenAI::FineTuning::FineTuningJob end assert_pattern do row => { id: String, created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error | nil, + error: OpenAI::FineTuning::FineTuningJob::Error | nil, fine_tuned_model: String | nil, finished_at: Integer | nil, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, model: String, object: Symbol, organization_id: String, result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status, + status: OpenAI::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, - method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil + method_: OpenAI::FineTuning::FineTuningJob::Method | nil } end end @@ -110,30 +110,30 @@ def test_cancel response = @openai.fine_tuning.jobs.cancel("ft-AF1WoRqd3aJAHsqc9NY7iL8F") assert_pattern do - response => OpenAI::Models::FineTuning::FineTuningJob + response => OpenAI::FineTuning::FineTuningJob end assert_pattern do response => { id: String, created_at: Integer, - error: OpenAI::Models::FineTuning::FineTuningJob::Error | nil, + error: OpenAI::FineTuning::FineTuningJob::Error | nil, fine_tuned_model: String | nil, finished_at: Integer | nil, - hyperparameters: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, model: String, object: Symbol, organization_id: String, result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), seed: Integer, - status: OpenAI::Models::FineTuning::FineTuningJob::Status, + status: OpenAI::FineTuning::FineTuningJob::Status, trained_tokens: Integer | nil, training_file: String, validation_file: String | nil, estimated_finish: Integer | nil, - integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, - method_: OpenAI::Models::FineTuning::FineTuningJob::Method | nil + method_: OpenAI::FineTuning::FineTuningJob::Method | nil } end end @@ -149,18 +149,18 @@ def test_list_events return if row.nil? assert_pattern do - row => OpenAI::Models::FineTuning::FineTuningJobEvent + row => OpenAI::FineTuning::FineTuningJobEvent end assert_pattern do row => { id: String, created_at: Integer, - level: OpenAI::Models::FineTuning::FineTuningJobEvent::Level, + level: OpenAI::FineTuning::FineTuningJobEvent::Level, message: String, object: Symbol, data: OpenAI::Internal::Type::Unknown | nil, - type: OpenAI::Models::FineTuning::FineTuningJobEvent::Type | nil + type: OpenAI::FineTuning::FineTuningJobEvent::Type | nil } end end diff --git a/test/openai/resources/images_test.rb b/test/openai/resources/images_test.rb index ccfee747..6d6dd917 100644 --- a/test/openai/resources/images_test.rb +++ b/test/openai/resources/images_test.rb @@ -7,14 +7,14 @@ def test_create_variation_required_params response = @openai.images.create_variation(image: Pathname(__FILE__)) assert_pattern do - response => OpenAI::Models::ImagesResponse + response => OpenAI::ImagesResponse end assert_pattern do response => { created: Integer, - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) | nil, - usage: OpenAI::Models::ImagesResponse::Usage | nil + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Image]) | nil, + usage: OpenAI::ImagesResponse::Usage | nil } end end @@ -24,14 +24,14 @@ def test_edit_required_params @openai.images.edit(image: Pathname(__FILE__), prompt: "A cute baby sea otter wearing a beret") assert_pattern do - response => OpenAI::Models::ImagesResponse + response => OpenAI::ImagesResponse end assert_pattern do response => { created: Integer, - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) | nil, - usage: OpenAI::Models::ImagesResponse::Usage | nil + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Image]) | nil, + usage: OpenAI::ImagesResponse::Usage | nil } end end @@ -40,14 +40,14 @@ def test_generate_required_params response = @openai.images.generate(prompt: "A cute baby sea otter") assert_pattern do - response => OpenAI::Models::ImagesResponse + response => OpenAI::ImagesResponse end assert_pattern do response => { created: Integer, - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image]) | nil, - usage: OpenAI::Models::ImagesResponse::Usage | nil + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Image]) | nil, + usage: OpenAI::ImagesResponse::Usage | nil } end end diff --git a/test/openai/resources/models_test.rb b/test/openai/resources/models_test.rb index 291a9a89..1a9ae880 100644 --- a/test/openai/resources/models_test.rb +++ b/test/openai/resources/models_test.rb @@ -7,7 +7,7 @@ def test_retrieve response = @openai.models.retrieve("gpt-4o-mini") assert_pattern do - response => OpenAI::Models::Model + response => OpenAI::Model end assert_pattern do @@ -31,7 +31,7 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Model + row => OpenAI::Model end assert_pattern do @@ -48,7 +48,7 @@ def test_delete response = @openai.models.delete("ft:gpt-4o-mini:acemeco:suffix:abc123") assert_pattern do - response => OpenAI::Models::ModelDeleted + response => OpenAI::ModelDeleted end assert_pattern do diff --git a/test/openai/resources/moderations_test.rb b/test/openai/resources/moderations_test.rb index d77f7d0b..8e1ce626 100644 --- a/test/openai/resources/moderations_test.rb +++ b/test/openai/resources/moderations_test.rb @@ -14,7 +14,7 @@ def test_create_required_params response => { id: String, model: String, - results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Moderation]) + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Moderation]) } end end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 705de078..3c32f365 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -14,19 +14,19 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::Responses::ResponseItem + row => OpenAI::Responses::ResponseItem end assert_pattern do case row - in OpenAI::Models::Responses::ResponseInputMessageItem - in OpenAI::Models::Responses::ResponseOutputMessage - in OpenAI::Models::Responses::ResponseFileSearchToolCall - in OpenAI::Models::Responses::ResponseComputerToolCall - in OpenAI::Models::Responses::ResponseComputerToolCallOutputItem - in OpenAI::Models::Responses::ResponseFunctionWebSearch - in OpenAI::Models::Responses::ResponseFunctionToolCallItem - in OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem + in OpenAI::Responses::ResponseInputMessageItem + in OpenAI::Responses::ResponseOutputMessage + in OpenAI::Responses::ResponseFileSearchToolCall + in OpenAI::Responses::ResponseComputerToolCall + in OpenAI::Responses::ResponseComputerToolCallOutputItem + in OpenAI::Responses::ResponseFunctionWebSearch + in OpenAI::Responses::ResponseFunctionToolCallItem + in OpenAI::Responses::ResponseFunctionToolCallOutputItem end end @@ -35,47 +35,47 @@ def test_list in { type: :message, id: String, - content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseInputContent]), - role: OpenAI::Models::Responses::ResponseInputMessageItem::Role, - status: OpenAI::Models::Responses::ResponseInputMessageItem::Status | nil + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent]), + role: OpenAI::Responses::ResponseInputMessageItem::Role, + status: OpenAI::Responses::ResponseInputMessageItem::Status | nil } in { type: :message, id: String, - content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputMessage::Content]), + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputMessage::Content]), role: Symbol, - status: OpenAI::Models::Responses::ResponseOutputMessage::Status + status: OpenAI::Responses::ResponseOutputMessage::Status } in { type: :file_search_call, id: String, queries: ^(OpenAI::Internal::Type::ArrayOf[String]), - status: OpenAI::Models::Responses::ResponseFileSearchToolCall::Status, - results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result]) | nil + status: OpenAI::Responses::ResponseFileSearchToolCall::Status, + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFileSearchToolCall::Result]) | nil } in { type: :computer_call, id: String, - action: OpenAI::Models::Responses::ResponseComputerToolCall::Action, + action: OpenAI::Responses::ResponseComputerToolCall::Action, call_id: String, - pending_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCall::PendingSafetyCheck]), - status: OpenAI::Models::Responses::ResponseComputerToolCall::Status + pending_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck]), + status: OpenAI::Responses::ResponseComputerToolCall::Status } in { type: :computer_call_output, id: String, call_id: String, - output: OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot, - acknowledged_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, - status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status | nil + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + acknowledged_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, + status: OpenAI::Responses::ResponseComputerToolCallOutputItem::Status | nil } - in {type: :web_search_call, id: String, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::Status} + in {type: :web_search_call, id: String, status: OpenAI::Responses::ResponseFunctionWebSearch::Status} in { type: :function_call_output, id: String, call_id: String, output: String, - status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status | nil + status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status | nil } end end diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index c5066e33..a01200f4 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -7,33 +7,33 @@ def test_create_required_params response = @openai.responses.create(input: "string", model: :"gpt-4o") assert_pattern do - response => OpenAI::Models::Responses::Response + response => OpenAI::Responses::Response end assert_pattern do response => { id: String, created_at: Float, - error: OpenAI::Models::Responses::ResponseError | nil, - incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails | nil, + error: OpenAI::Responses::ResponseError | nil, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails | nil, instructions: String | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, - model: OpenAI::Models::ResponsesModel, + model: OpenAI::ResponsesModel, object: Symbol, - output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), + output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem]), parallel_tool_calls: OpenAI::Internal::Type::Boolean, temperature: Float | nil, - tool_choice: OpenAI::Models::Responses::Response::ToolChoice, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool]), + tool_choice: OpenAI::Responses::Response::ToolChoice, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, - reasoning: OpenAI::Models::Reasoning | nil, - service_tier: OpenAI::Models::Responses::Response::ServiceTier | nil, - status: OpenAI::Models::Responses::ResponseStatus | nil, - text: OpenAI::Models::Responses::ResponseTextConfig | nil, - truncation: OpenAI::Models::Responses::Response::Truncation | nil, - usage: OpenAI::Models::Responses::ResponseUsage | nil, + reasoning: OpenAI::Reasoning | nil, + service_tier: OpenAI::Responses::Response::ServiceTier | nil, + status: OpenAI::Responses::ResponseStatus | nil, + text: OpenAI::Responses::ResponseTextConfig | nil, + truncation: OpenAI::Responses::Response::Truncation | nil, + usage: OpenAI::Responses::ResponseUsage | nil, user: String | nil } end @@ -43,33 +43,33 @@ def test_retrieve response = @openai.responses.retrieve("resp_677efb5139a88190b512bc3fef8e535d") assert_pattern do - response => OpenAI::Models::Responses::Response + response => OpenAI::Responses::Response end assert_pattern do response => { id: String, created_at: Float, - error: OpenAI::Models::Responses::ResponseError | nil, - incomplete_details: OpenAI::Models::Responses::Response::IncompleteDetails | nil, + error: OpenAI::Responses::ResponseError | nil, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails | nil, instructions: String | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, - model: OpenAI::Models::ResponsesModel, + model: OpenAI::ResponsesModel, object: Symbol, - output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::ResponseOutputItem]), + output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem]), parallel_tool_calls: OpenAI::Internal::Type::Boolean, temperature: Float | nil, - tool_choice: OpenAI::Models::Responses::Response::ToolChoice, - tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool]), + tool_choice: OpenAI::Responses::Response::ToolChoice, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, - reasoning: OpenAI::Models::Reasoning | nil, - service_tier: OpenAI::Models::Responses::Response::ServiceTier | nil, - status: OpenAI::Models::Responses::ResponseStatus | nil, - text: OpenAI::Models::Responses::ResponseTextConfig | nil, - truncation: OpenAI::Models::Responses::Response::Truncation | nil, - usage: OpenAI::Models::Responses::ResponseUsage | nil, + reasoning: OpenAI::Reasoning | nil, + service_tier: OpenAI::Responses::Response::ServiceTier | nil, + status: OpenAI::Responses::ResponseStatus | nil, + text: OpenAI::Responses::ResponseTextConfig | nil, + truncation: OpenAI::Responses::Response::Truncation | nil, + usage: OpenAI::Responses::ResponseUsage | nil, user: String | nil } end diff --git a/test/openai/resources/uploads/parts_test.rb b/test/openai/resources/uploads/parts_test.rb index b10dcbb3..92fbc11f 100644 --- a/test/openai/resources/uploads/parts_test.rb +++ b/test/openai/resources/uploads/parts_test.rb @@ -7,7 +7,7 @@ def test_create_required_params response = @openai.uploads.parts.create("upload_abc123", data: Pathname(__FILE__)) assert_pattern do - response => OpenAI::Models::Uploads::UploadPart + response => OpenAI::Uploads::UploadPart end assert_pattern do diff --git a/test/openai/resources/uploads_test.rb b/test/openai/resources/uploads_test.rb index 3b3e356c..38a3a1e4 100644 --- a/test/openai/resources/uploads_test.rb +++ b/test/openai/resources/uploads_test.rb @@ -8,7 +8,7 @@ def test_create_required_params @openai.uploads.create(bytes: 0, filename: "filename", mime_type: "mime_type", purpose: :assistants) assert_pattern do - response => OpenAI::Models::Upload + response => OpenAI::Upload end assert_pattern do @@ -20,8 +20,8 @@ def test_create_required_params filename: String, object: Symbol, purpose: String, - status: OpenAI::Models::Upload::Status, - file: OpenAI::Models::FileObject | nil + status: OpenAI::Upload::Status, + file: OpenAI::FileObject | nil } end end @@ -30,7 +30,7 @@ def test_cancel response = @openai.uploads.cancel("upload_abc123") assert_pattern do - response => OpenAI::Models::Upload + response => OpenAI::Upload end assert_pattern do @@ -42,8 +42,8 @@ def test_cancel filename: String, object: Symbol, purpose: String, - status: OpenAI::Models::Upload::Status, - file: OpenAI::Models::FileObject | nil + status: OpenAI::Upload::Status, + file: OpenAI::FileObject | nil } end end @@ -52,7 +52,7 @@ def test_complete_required_params response = @openai.uploads.complete("upload_abc123", part_ids: ["string"]) assert_pattern do - response => OpenAI::Models::Upload + response => OpenAI::Upload end assert_pattern do @@ -64,8 +64,8 @@ def test_complete_required_params filename: String, object: Symbol, purpose: String, - status: OpenAI::Models::Upload::Status, - file: OpenAI::Models::FileObject | nil + status: OpenAI::Upload::Status, + file: OpenAI::FileObject | nil } end end diff --git a/test/openai/resources/vector_stores/file_batches_test.rb b/test/openai/resources/vector_stores/file_batches_test.rb index 55dce57e..ccaeb85e 100644 --- a/test/openai/resources/vector_stores/file_batches_test.rb +++ b/test/openai/resources/vector_stores/file_batches_test.rb @@ -7,16 +7,16 @@ def test_create_required_params response = @openai.vector_stores.file_batches.create("vs_abc123", file_ids: ["string"]) assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFileBatch + response => OpenAI::VectorStores::VectorStoreFileBatch end assert_pattern do response => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status, + status: OpenAI::VectorStores::VectorStoreFileBatch::Status, vector_store_id: String } end @@ -26,16 +26,16 @@ def test_retrieve_required_params response = @openai.vector_stores.file_batches.retrieve("vsfb_abc123", vector_store_id: "vs_abc123") assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFileBatch + response => OpenAI::VectorStores::VectorStoreFileBatch end assert_pattern do response => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status, + status: OpenAI::VectorStores::VectorStoreFileBatch::Status, vector_store_id: String } end @@ -45,16 +45,16 @@ def test_cancel_required_params response = @openai.vector_stores.file_batches.cancel("batch_id", vector_store_id: "vector_store_id") assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFileBatch + response => OpenAI::VectorStores::VectorStoreFileBatch end assert_pattern do response => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFileBatch::Status, + status: OpenAI::VectorStores::VectorStoreFileBatch::Status, vector_store_id: String } end @@ -71,20 +71,20 @@ def test_list_files_required_params return if row.nil? assert_pattern do - row => OpenAI::Models::VectorStores::VectorStoreFile + row => OpenAI::VectorStores::VectorStoreFile end assert_pattern do row => { id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError | nil, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError | nil, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status, + status: OpenAI::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, - chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::VectorStoreFile::Attribute]) | nil, + chunking_strategy: OpenAI::FileChunkingStrategy | nil } end end diff --git a/test/openai/resources/vector_stores/files_test.rb b/test/openai/resources/vector_stores/files_test.rb index b7573f30..e322a6ba 100644 --- a/test/openai/resources/vector_stores/files_test.rb +++ b/test/openai/resources/vector_stores/files_test.rb @@ -7,20 +7,20 @@ def test_create_required_params response = @openai.vector_stores.files.create("vs_abc123", file_id: "file_id") assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFile + response => OpenAI::VectorStores::VectorStoreFile end assert_pattern do response => { id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError | nil, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError | nil, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status, + status: OpenAI::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, - chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::VectorStoreFile::Attribute]) | nil, + chunking_strategy: OpenAI::FileChunkingStrategy | nil } end end @@ -29,20 +29,20 @@ def test_retrieve_required_params response = @openai.vector_stores.files.retrieve("file-abc123", vector_store_id: "vs_abc123") assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFile + response => OpenAI::VectorStores::VectorStoreFile end assert_pattern do response => { id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError | nil, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError | nil, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status, + status: OpenAI::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, - chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::VectorStoreFile::Attribute]) | nil, + chunking_strategy: OpenAI::FileChunkingStrategy | nil } end end @@ -56,20 +56,20 @@ def test_update_required_params ) assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFile + response => OpenAI::VectorStores::VectorStoreFile end assert_pattern do response => { id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError | nil, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError | nil, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status, + status: OpenAI::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, - chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::VectorStoreFile::Attribute]) | nil, + chunking_strategy: OpenAI::FileChunkingStrategy | nil } end end @@ -85,20 +85,20 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::VectorStores::VectorStoreFile + row => OpenAI::VectorStores::VectorStoreFile end assert_pattern do row => { id: String, created_at: Integer, - last_error: OpenAI::Models::VectorStores::VectorStoreFile::LastError | nil, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError | nil, object: Symbol, - status: OpenAI::Models::VectorStores::VectorStoreFile::Status, + status: OpenAI::VectorStores::VectorStoreFile::Status, usage_bytes: Integer, vector_store_id: String, - attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute]) | nil, - chunking_strategy: OpenAI::Models::FileChunkingStrategy | nil + attributes: ^(OpenAI::Internal::Type::HashOf[union: OpenAI::VectorStores::VectorStoreFile::Attribute]) | nil, + chunking_strategy: OpenAI::FileChunkingStrategy | nil } end end @@ -107,7 +107,7 @@ def test_delete_required_params response = @openai.vector_stores.files.delete("file_id", vector_store_id: "vector_store_id") assert_pattern do - response => OpenAI::Models::VectorStores::VectorStoreFileDeleted + response => OpenAI::VectorStores::VectorStoreFileDeleted end assert_pattern do diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index a68725ca..bbce9895 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -7,21 +7,21 @@ def test_create response = @openai.vector_stores.create assert_pattern do - response => OpenAI::Models::VectorStore + response => OpenAI::VectorStore end assert_pattern do response => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: OpenAI::VectorStore::FileCounts, last_active_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - status: OpenAI::Models::VectorStore::Status, + status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -31,21 +31,21 @@ def test_retrieve response = @openai.vector_stores.retrieve("vector_store_id") assert_pattern do - response => OpenAI::Models::VectorStore + response => OpenAI::VectorStore end assert_pattern do response => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: OpenAI::VectorStore::FileCounts, last_active_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - status: OpenAI::Models::VectorStore::Status, + status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -55,21 +55,21 @@ def test_update response = @openai.vector_stores.update("vector_store_id") assert_pattern do - response => OpenAI::Models::VectorStore + response => OpenAI::VectorStore end assert_pattern do response => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: OpenAI::VectorStore::FileCounts, last_active_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - status: OpenAI::Models::VectorStore::Status, + status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -86,21 +86,21 @@ def test_list return if row.nil? assert_pattern do - row => OpenAI::Models::VectorStore + row => OpenAI::VectorStore end assert_pattern do row => { id: String, created_at: Integer, - file_counts: OpenAI::Models::VectorStore::FileCounts, + file_counts: OpenAI::VectorStore::FileCounts, last_active_at: Integer | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, name: String, object: Symbol, - status: OpenAI::Models::VectorStore::Status, + status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::Models::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -110,7 +110,7 @@ def test_delete response = @openai.vector_stores.delete("vector_store_id") assert_pattern do - response => OpenAI::Models::VectorStoreDeleted + response => OpenAI::VectorStoreDeleted end assert_pattern do From a27ca00df8d5b6c4888aa73b702275074ab4ec1c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 8 May 2025 17:24:33 +0000 Subject: [PATCH 184/295] feat(api): Add reinforcement fine-tuning api support --- .stats.yml | 8 +- lib/openai.rb | 26 +- lib/openai/client.rb | 4 + lib/openai/models.rb | 8 +- lib/openai/models/eval_create_params.rb | 245 ++-------- lib/openai/models/eval_create_response.rb | 223 +-------- lib/openai/models/eval_label_model_grader.rb | 166 ------- lib/openai/models/eval_list_response.rb | 223 +-------- lib/openai/models/eval_retrieve_response.rb | 225 +-------- lib/openai/models/eval_string_check_grader.rb | 66 --- .../models/eval_text_similarity_grader.rb | 84 ---- lib/openai/models/eval_update_response.rb | 223 +-------- .../fine_tuning/alpha/grader_run_params.rb | 89 ++++ .../fine_tuning/alpha/grader_run_response.rb | 175 +++++++ .../alpha/grader_validate_params.rb | 49 ++ .../alpha/grader_validate_response.rb | 46 ++ .../models/fine_tuning/dpo_hyperparameters.rb | 112 +++++ lib/openai/models/fine_tuning/dpo_method.rb | 20 + .../models/fine_tuning/fine_tuning_job.rb | 279 +---------- .../models/fine_tuning/job_create_params.rb | 282 +---------- .../models/fine_tuning/job_pause_params.rb | 16 + .../models/fine_tuning/job_resume_params.rb | 16 + .../reinforcement_hyperparameters.rb | 181 ++++++++ .../fine_tuning/reinforcement_method.rb | 53 +++ .../fine_tuning/supervised_hyperparameters.rb | 88 ++++ .../models/fine_tuning/supervised_method.rb | 20 + .../models/graders/label_model_grader.rb | 171 +++++++ lib/openai/models/graders/multi_grader.rb | 71 +++ lib/openai/models/graders/python_grader.rb | 46 ++ .../models/graders/score_model_grader.rb | 171 +++++++ .../models/graders/string_check_grader.rb | 70 +++ .../models/graders/text_similarity_grader.rb | 80 ++++ lib/openai/resources/evals.rb | 4 +- lib/openai/resources/fine_tuning.rb | 8 + lib/openai/resources/fine_tuning/alpha.rb | 20 + .../resources/fine_tuning/alpha/graders.rb | 66 +++ lib/openai/resources/fine_tuning/jobs.rb | 46 ++ lib/openai/resources/fine_tuning/methods.rb | 16 + lib/openai/resources/graders.rb | 18 + lib/openai/resources/graders/grader_models.rb | 16 + rbi/openai/client.rbi | 3 + rbi/openai/models.rbi | 8 +- rbi/openai/models/eval_create_params.rbi | 408 ++-------------- rbi/openai/models/eval_create_response.rbi | 412 ++-------------- rbi/openai/models/eval_label_model_grader.rbi | 268 ----------- rbi/openai/models/eval_list_response.rbi | 412 ++-------------- rbi/openai/models/eval_retrieve_response.rbi | 412 ++-------------- .../models/eval_string_check_grader.rbi | 94 ---- .../models/eval_text_similarity_grader.rbi | 157 ------- rbi/openai/models/eval_update_response.rbi | 412 ++-------------- .../fine_tuning/alpha/grader_run_params.rbi | 143 ++++++ .../fine_tuning/alpha/grader_run_response.rbi | 253 ++++++++++ .../alpha/grader_validate_params.rbi | 95 ++++ .../alpha/grader_validate_response.rbi | 105 +++++ .../fine_tuning/dpo_hyperparameters.rbi | 151 ++++++ rbi/openai/models/fine_tuning/dpo_method.rbi | 42 ++ .../models/fine_tuning/fine_tuning_job.rbi | 439 ++---------------- .../models/fine_tuning/job_create_params.rbi | 437 ++--------------- .../models/fine_tuning/job_pause_params.rbi | 26 ++ .../models/fine_tuning/job_resume_params.rbi | 26 ++ .../reinforcement_hyperparameters.rbi | 275 +++++++++++ .../fine_tuning/reinforcement_method.rbi | 106 +++++ .../supervised_hyperparameters.rbi | 122 +++++ .../models/fine_tuning/supervised_method.rbi | 46 ++ .../models/graders/label_model_grader.rbi | 287 ++++++++++++ rbi/openai/models/graders/multi_grader.rbi | 120 +++++ rbi/openai/models/graders/python_grader.rbi | 61 +++ .../models/graders/score_model_grader.rbi | 294 ++++++++++++ .../models/graders/string_check_grader.rbi | 115 +++++ .../models/graders/text_similarity_grader.rbi | 155 +++++++ rbi/openai/resources/evals.rbi | 6 +- rbi/openai/resources/fine_tuning.rbi | 6 + rbi/openai/resources/fine_tuning/alpha.rbi | 17 + .../resources/fine_tuning/alpha/graders.rbi | 65 +++ rbi/openai/resources/fine_tuning/jobs.rbi | 28 ++ rbi/openai/resources/fine_tuning/methods.rbi | 14 + rbi/openai/resources/graders.rbi | 15 + .../resources/graders/grader_models.rbi | 14 + sig/openai/client.rbs | 2 + sig/openai/models.rbs | 8 +- sig/openai/models/eval_create_params.rbs | 165 ++----- sig/openai/models/eval_create_response.rbs | 157 +------ sig/openai/models/eval_label_model_grader.rbs | 105 ----- sig/openai/models/eval_list_response.rbs | 157 +------ sig/openai/models/eval_retrieve_response.rbs | 157 +------ .../models/eval_string_check_grader.rbs | 45 -- .../models/eval_text_similarity_grader.rbs | 67 --- sig/openai/models/eval_update_response.rbs | 157 +------ .../fine_tuning/alpha/grader_run_params.rbs | 56 +++ .../fine_tuning/alpha/grader_run_response.rbs | 134 ++++++ .../alpha/grader_validate_params.rbs | 38 ++ .../alpha/grader_validate_response.rbs | 37 ++ .../fine_tuning/dpo_hyperparameters.rbs | 78 ++++ sig/openai/models/fine_tuning/dpo_method.rbs | 20 + .../models/fine_tuning/fine_tuning_job.rbs | 205 +------- .../models/fine_tuning/job_create_params.rbs | 205 +------- .../models/fine_tuning/job_pause_params.rbs | 14 + .../models/fine_tuning/job_resume_params.rbs | 14 + .../reinforcement_hyperparameters.rbs | 131 ++++++ .../fine_tuning/reinforcement_method.rbs | 39 ++ .../supervised_hyperparameters.rbs | 62 +++ .../models/fine_tuning/supervised_method.rbs | 20 + .../models/graders/label_model_grader.rbs | 109 +++++ sig/openai/models/graders/multi_grader.rbs | 45 ++ sig/openai/models/graders/python_grader.rbs | 29 ++ .../models/graders/score_model_grader.rbs | 113 +++++ .../models/graders/string_check_grader.rbs | 49 ++ .../models/graders/text_similarity_grader.rbs | 65 +++ sig/openai/resources/fine_tuning.rbs | 4 + sig/openai/resources/fine_tuning/alpha.rbs | 11 + .../resources/fine_tuning/alpha/graders.rbs | 23 + sig/openai/resources/fine_tuning/jobs.rbs | 10 + sig/openai/resources/fine_tuning/methods.rbs | 9 + sig/openai/resources/graders.rbs | 9 + .../resources/graders/grader_models.rbs | 9 + test/openai/resource_namespaces.rb | 9 + .../fine_tuning/alpha/graders_test.rb | 44 ++ .../resources/fine_tuning/alpha_test.rb | 6 + .../openai/resources/fine_tuning/jobs_test.rb | 64 +++ .../resources/fine_tuning/methods_test.rb | 6 + .../resources/graders/grader_models_test.rb | 6 + test/openai/resources/graders_test.rb | 6 + 122 files changed, 6177 insertions(+), 6301 deletions(-) delete mode 100644 lib/openai/models/eval_label_model_grader.rb delete mode 100644 lib/openai/models/eval_string_check_grader.rb delete mode 100644 lib/openai/models/eval_text_similarity_grader.rb create mode 100644 lib/openai/models/fine_tuning/alpha/grader_run_params.rb create mode 100644 lib/openai/models/fine_tuning/alpha/grader_run_response.rb create mode 100644 lib/openai/models/fine_tuning/alpha/grader_validate_params.rb create mode 100644 lib/openai/models/fine_tuning/alpha/grader_validate_response.rb create mode 100644 lib/openai/models/fine_tuning/dpo_hyperparameters.rb create mode 100644 lib/openai/models/fine_tuning/dpo_method.rb create mode 100644 lib/openai/models/fine_tuning/job_pause_params.rb create mode 100644 lib/openai/models/fine_tuning/job_resume_params.rb create mode 100644 lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb create mode 100644 lib/openai/models/fine_tuning/reinforcement_method.rb create mode 100644 lib/openai/models/fine_tuning/supervised_hyperparameters.rb create mode 100644 lib/openai/models/fine_tuning/supervised_method.rb create mode 100644 lib/openai/models/graders/label_model_grader.rb create mode 100644 lib/openai/models/graders/multi_grader.rb create mode 100644 lib/openai/models/graders/python_grader.rb create mode 100644 lib/openai/models/graders/score_model_grader.rb create mode 100644 lib/openai/models/graders/string_check_grader.rb create mode 100644 lib/openai/models/graders/text_similarity_grader.rb create mode 100644 lib/openai/resources/fine_tuning/alpha.rb create mode 100644 lib/openai/resources/fine_tuning/alpha/graders.rb create mode 100644 lib/openai/resources/fine_tuning/methods.rb create mode 100644 lib/openai/resources/graders.rb create mode 100644 lib/openai/resources/graders/grader_models.rb delete mode 100644 rbi/openai/models/eval_label_model_grader.rbi delete mode 100644 rbi/openai/models/eval_string_check_grader.rbi delete mode 100644 rbi/openai/models/eval_text_similarity_grader.rbi create mode 100644 rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi create mode 100644 rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi create mode 100644 rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi create mode 100644 rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi create mode 100644 rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi create mode 100644 rbi/openai/models/fine_tuning/dpo_method.rbi create mode 100644 rbi/openai/models/fine_tuning/job_pause_params.rbi create mode 100644 rbi/openai/models/fine_tuning/job_resume_params.rbi create mode 100644 rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi create mode 100644 rbi/openai/models/fine_tuning/reinforcement_method.rbi create mode 100644 rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi create mode 100644 rbi/openai/models/fine_tuning/supervised_method.rbi create mode 100644 rbi/openai/models/graders/label_model_grader.rbi create mode 100644 rbi/openai/models/graders/multi_grader.rbi create mode 100644 rbi/openai/models/graders/python_grader.rbi create mode 100644 rbi/openai/models/graders/score_model_grader.rbi create mode 100644 rbi/openai/models/graders/string_check_grader.rbi create mode 100644 rbi/openai/models/graders/text_similarity_grader.rbi create mode 100644 rbi/openai/resources/fine_tuning/alpha.rbi create mode 100644 rbi/openai/resources/fine_tuning/alpha/graders.rbi create mode 100644 rbi/openai/resources/fine_tuning/methods.rbi create mode 100644 rbi/openai/resources/graders.rbi create mode 100644 rbi/openai/resources/graders/grader_models.rbi delete mode 100644 sig/openai/models/eval_label_model_grader.rbs delete mode 100644 sig/openai/models/eval_string_check_grader.rbs delete mode 100644 sig/openai/models/eval_text_similarity_grader.rbs create mode 100644 sig/openai/models/fine_tuning/alpha/grader_run_params.rbs create mode 100644 sig/openai/models/fine_tuning/alpha/grader_run_response.rbs create mode 100644 sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs create mode 100644 sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs create mode 100644 sig/openai/models/fine_tuning/dpo_hyperparameters.rbs create mode 100644 sig/openai/models/fine_tuning/dpo_method.rbs create mode 100644 sig/openai/models/fine_tuning/job_pause_params.rbs create mode 100644 sig/openai/models/fine_tuning/job_resume_params.rbs create mode 100644 sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs create mode 100644 sig/openai/models/fine_tuning/reinforcement_method.rbs create mode 100644 sig/openai/models/fine_tuning/supervised_hyperparameters.rbs create mode 100644 sig/openai/models/fine_tuning/supervised_method.rbs create mode 100644 sig/openai/models/graders/label_model_grader.rbs create mode 100644 sig/openai/models/graders/multi_grader.rbs create mode 100644 sig/openai/models/graders/python_grader.rbs create mode 100644 sig/openai/models/graders/score_model_grader.rbs create mode 100644 sig/openai/models/graders/string_check_grader.rbs create mode 100644 sig/openai/models/graders/text_similarity_grader.rbs create mode 100644 sig/openai/resources/fine_tuning/alpha.rbs create mode 100644 sig/openai/resources/fine_tuning/alpha/graders.rbs create mode 100644 sig/openai/resources/fine_tuning/methods.rbs create mode 100644 sig/openai/resources/graders.rbs create mode 100644 sig/openai/resources/graders/grader_models.rbs create mode 100644 test/openai/resources/fine_tuning/alpha/graders_test.rb create mode 100644 test/openai/resources/fine_tuning/alpha_test.rb create mode 100644 test/openai/resources/fine_tuning/methods_test.rb create mode 100644 test/openai/resources/graders/grader_models_test.rb create mode 100644 test/openai/resources/graders_test.rb diff --git a/.stats.yml b/.stats.yml index 089abe5d..5a1f2ff0 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 95 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-0ee6b36cf3cc278cef4199a6aec5f7d530a6c1f17a74830037e96d50ca1edc50.yml -openapi_spec_hash: e8ec5f46bc0655b34f292422d58a60f6 -config_hash: d9b6b6e6bc85744663e300eebc482067 +configured_endpoints: 99 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-794a6ed3c3d3d77887564755168056af8a426b17cf1ec721e3a300503dc22a41.yml +openapi_spec_hash: 25a81c220713cd5b0bafc221d1dfa79a +config_hash: 0b768ed1b56c6d82816f0fa40dc4aaf5 diff --git a/lib/openai.rb b/lib/openai.rb index c1d3f3a2..99a75f6c 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -52,6 +52,9 @@ require_relative "openai/internal/page" require_relative "openai/models/reasoning_effort" require_relative "openai/models/chat/chat_completion_message" +require_relative "openai/models/graders/score_model_grader" +require_relative "openai/models/graders/python_grader" +require_relative "openai/models/graders/text_similarity_grader" require_relative "openai/models/fine_tuning/fine_tuning_job_wandb_integration_object" require_relative "openai/models/responses/response_function_tool_call" require_relative "openai/models/all_models" @@ -222,7 +225,6 @@ require_relative "openai/models/eval_custom_data_source_config" require_relative "openai/models/eval_delete_params" require_relative "openai/models/eval_delete_response" -require_relative "openai/models/eval_label_model_grader" require_relative "openai/models/eval_list_params" require_relative "openai/models/eval_list_response" require_relative "openai/models/eval_retrieve_params" @@ -245,8 +247,6 @@ require_relative "openai/models/evals/runs/output_item_retrieve_params" require_relative "openai/models/evals/runs/output_item_retrieve_response" require_relative "openai/models/eval_stored_completions_data_source_config" -require_relative "openai/models/eval_string_check_grader" -require_relative "openai/models/eval_text_similarity_grader" require_relative "openai/models/eval_update_params" require_relative "openai/models/eval_update_response" require_relative "openai/models/file_chunking_strategy" @@ -260,12 +260,18 @@ require_relative "openai/models/file_object" require_relative "openai/models/file_purpose" require_relative "openai/models/file_retrieve_params" +require_relative "openai/models/fine_tuning/alpha/grader_run_params" +require_relative "openai/models/fine_tuning/alpha/grader_run_response" +require_relative "openai/models/fine_tuning/alpha/grader_validate_params" +require_relative "openai/models/fine_tuning/alpha/grader_validate_response" require_relative "openai/models/fine_tuning/checkpoints/permission_create_params" require_relative "openai/models/fine_tuning/checkpoints/permission_create_response" require_relative "openai/models/fine_tuning/checkpoints/permission_delete_params" require_relative "openai/models/fine_tuning/checkpoints/permission_delete_response" require_relative "openai/models/fine_tuning/checkpoints/permission_retrieve_params" require_relative "openai/models/fine_tuning/checkpoints/permission_retrieve_response" +require_relative "openai/models/fine_tuning/dpo_hyperparameters" +require_relative "openai/models/fine_tuning/dpo_method" require_relative "openai/models/fine_tuning/fine_tuning_job" require_relative "openai/models/fine_tuning/fine_tuning_job_event" require_relative "openai/models/fine_tuning/fine_tuning_job_integration" @@ -274,11 +280,20 @@ require_relative "openai/models/fine_tuning/job_create_params" require_relative "openai/models/fine_tuning/job_list_events_params" require_relative "openai/models/fine_tuning/job_list_params" +require_relative "openai/models/fine_tuning/job_pause_params" +require_relative "openai/models/fine_tuning/job_resume_params" require_relative "openai/models/fine_tuning/job_retrieve_params" require_relative "openai/models/fine_tuning/jobs/checkpoint_list_params" require_relative "openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint" +require_relative "openai/models/fine_tuning/reinforcement_hyperparameters" +require_relative "openai/models/fine_tuning/reinforcement_method" +require_relative "openai/models/fine_tuning/supervised_hyperparameters" +require_relative "openai/models/fine_tuning/supervised_method" require_relative "openai/models/function_definition" require_relative "openai/models/function_parameters" +require_relative "openai/models/graders/label_model_grader" +require_relative "openai/models/graders/multi_grader" +require_relative "openai/models/graders/string_check_grader" require_relative "openai/models/image" require_relative "openai/models/image_create_variation_params" require_relative "openai/models/image_edit_params" @@ -442,10 +457,15 @@ require_relative "openai/resources/evals/runs/output_items" require_relative "openai/resources/files" require_relative "openai/resources/fine_tuning" +require_relative "openai/resources/fine_tuning/alpha" +require_relative "openai/resources/fine_tuning/alpha/graders" require_relative "openai/resources/fine_tuning/checkpoints" require_relative "openai/resources/fine_tuning/checkpoints/permissions" require_relative "openai/resources/fine_tuning/jobs" require_relative "openai/resources/fine_tuning/jobs/checkpoints" +require_relative "openai/resources/fine_tuning/methods" +require_relative "openai/resources/graders" +require_relative "openai/resources/graders/grader_models" require_relative "openai/resources/images" require_relative "openai/resources/models" require_relative "openai/resources/moderations" diff --git a/lib/openai/client.rb b/lib/openai/client.rb index c79c1a83..f1ae51e6 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -51,6 +51,9 @@ class Client < OpenAI::Internal::Transport::BaseClient # @return [OpenAI::Resources::FineTuning] attr_reader :fine_tuning + # @return [OpenAI::Resources::Graders] + attr_reader :graders + # @return [OpenAI::Resources::VectorStores] attr_reader :vector_stores @@ -137,6 +140,7 @@ def initialize( @moderations = OpenAI::Resources::Moderations.new(client: self) @models = OpenAI::Resources::Models.new(client: self) @fine_tuning = OpenAI::Resources::FineTuning.new(client: self) + @graders = OpenAI::Resources::Graders.new(client: self) @vector_stores = OpenAI::Resources::VectorStores.new(client: self) @beta = OpenAI::Resources::Beta.new(client: self) @batches = OpenAI::Resources::Batches.new(client: self) diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 2a0f2cef..31c2a050 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -59,8 +59,6 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams - EvalLabelModelGrader = OpenAI::Models::EvalLabelModelGrader - EvalListParams = OpenAI::Models::EvalListParams EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams @@ -69,10 +67,6 @@ module OpenAI EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig - EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader - - EvalTextSimilarityGrader = OpenAI::Models::EvalTextSimilarityGrader - EvalUpdateParams = OpenAI::Models::EvalUpdateParams FileChunkingStrategy = OpenAI::Models::FileChunkingStrategy @@ -104,6 +98,8 @@ module OpenAI # @type [OpenAI::Internal::Type::Converter] FunctionParameters = OpenAI::Models::FunctionParameters + Graders = OpenAI::Models::Graders + Image = OpenAI::Models::Image ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 91a2a00f..668813af 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -10,13 +10,13 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # The configuration for the data source used for the evaluation runs. # - # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs] + # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] required :data_source_config, union: -> { OpenAI::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria # A list of graders for all eval runs in this group. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion] } @@ -41,9 +41,9 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalCreateParams} for more details. # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -65,7 +65,7 @@ module DataSourceConfig # A data source config which specifies the metadata property of your stored completions query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. - variant :logs, -> { OpenAI::EvalCreateParams::DataSourceConfig::Logs } + variant :stored_completions, -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } class Custom < OpenAI::Internal::Type::BaseModel # @!attribute item_schema @@ -105,31 +105,31 @@ class Custom < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :custom] The type of data source. Always `custom`. end - class Logs < OpenAI::Internal::Type::BaseModel + class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of data source. Always `logs`. + # The type of data source. Always `stored_completions`. # - # @return [Symbol, :logs] - required :type, const: :logs + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions # @!attribute metadata - # Metadata filters for the logs data source. + # Metadata filters for the stored completions data source. # # @return [Hash{Symbol=>Object}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!method initialize(metadata: nil, type: :logs) + # @!method initialize(metadata: nil, type: :stored_completions) # A data source config which specifies the metadata property of your stored # completions query. This is usually metadata like `usecase=chatbot` or # `prompt-version=v2`, etc. # - # @param metadata [Hash{Symbol=>Object}] Metadata filters for the logs data source. + # @param metadata [Hash{Symbol=>Object}] Metadata filters for the stored completions data source. # - # @param type [Symbol, :logs] The type of data source. Always `logs`. + # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs)] + # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -144,10 +144,10 @@ module TestingCriterion variant :label_model, -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::EvalStringCheckGrader } + variant :string_check, -> { OpenAI::Graders::StringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } + variant :text_similarity, -> { OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity } # A PythonGrader object that runs a python script on the input. variant :python, -> { OpenAI::EvalCreateParams::TestingCriterion::Python } @@ -374,232 +374,47 @@ module Type end end - class Python < OpenAI::Internal::Type::BaseModel - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute source - # The source code of the python script. - # - # @return [String] - required :source, String - - # @!attribute type - # The object type, which is always `python`. + class TextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + # @!attribute pass_threshold + # The threshold for the score. # - # @return [Symbol, :python] - required :type, const: :python + # @return [Float] + required :pass_threshold, Float - # @!attribute image_tag - # The image tag to use for the python script. + # @!method initialize(pass_threshold:) + # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @return [String, nil] - optional :image_tag, String + # @param pass_threshold [Float] The threshold for the score. + end + class Python < OpenAI::Models::Graders::PythonGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # @!method initialize(pass_threshold: nil) # A PythonGrader object that runs a python script on the input. # - # @param name [String] The name of the grader. - # - # @param source [String] The source code of the python script. - # - # @param image_tag [String] The image tag to use for the python script. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param type [Symbol, :python] The object type, which is always `python`. end - class ScoreModel < OpenAI::Internal::Type::BaseModel - # @!attribute input - # The input text. This may include template strings. - # - # @return [Array] - required :input, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input] - } - - # @!attribute model - # The model to use for the evaluation. - # - # @return [String] - required :model, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute type - # The object type, which is always `score_model`. - # - # @return [Symbol, :score_model] - required :type, const: :score_model - + class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!attribute range - # The range of the score. Defaults to `[0, 1]`. - # - # @return [Array, nil] - optional :range, OpenAI::Internal::Type::ArrayOf[Float] - - # @!attribute sampling_params - # The sampling parameters for the model. - # - # @return [Object, nil] - optional :sampling_params, OpenAI::Internal::Type::Unknown - - # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # @!method initialize(pass_threshold: nil) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. - # - # @param model [String] The model to use for the evaluation. - # - # @param name [String] The name of the grader. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param range [Array] The range of the score. Defaults to `[0, 1]`. - # - # @param sampling_params [Object] The sampling parameters for the model. - # - # @param type [Symbol, :score_model] The object type, which is always `score_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] - required :content, - union: -> { - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content - } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] - required :role, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type, nil] - optional :type, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input} for more - # details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText - } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText)] - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Graders::StringCheckGrader, OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index db8e08f0..992dbed4 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -48,7 +48,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } @@ -73,7 +73,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -106,244 +106,63 @@ module DataSourceConfig module TestingCriterion extend OpenAI::Internal::Type::Union - discriminator :type - # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::EvalLabelModelGrader } + variant -> { OpenAI::Graders::LabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::EvalStringCheckGrader } + variant -> { OpenAI::Graders::StringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } + variant -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity } # A PythonGrader object that runs a python script on the input. - variant :python, -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::Python } + variant -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython } # A ScoreModelGrader object that uses a model to assign a score to the input. - variant :score_model, -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel } - - class Python < OpenAI::Internal::Type::BaseModel - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String + variant -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel } - # @!attribute source - # The source code of the python script. - # - # @return [String] - required :source, String - - # @!attribute type - # The object type, which is always `python`. + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + # @!attribute pass_threshold + # The threshold for the score. # - # @return [Symbol, :python] - required :type, const: :python + # @return [Float] + required :pass_threshold, Float - # @!attribute image_tag - # The image tag to use for the python script. + # @!method initialize(pass_threshold:) + # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @return [String, nil] - optional :image_tag, String + # @param pass_threshold [Float] The threshold for the score. + end + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # @!method initialize(pass_threshold: nil) # A PythonGrader object that runs a python script on the input. # - # @param name [String] The name of the grader. - # - # @param source [String] The source code of the python script. - # - # @param image_tag [String] The image tag to use for the python script. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param type [Symbol, :python] The object type, which is always `python`. end - class ScoreModel < OpenAI::Internal::Type::BaseModel - # @!attribute input - # The input text. This may include template strings. - # - # @return [Array] - required :input, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input] } - - # @!attribute model - # The model to use for the evaluation. - # - # @return [String] - required :model, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute type - # The object type, which is always `score_model`. - # - # @return [Symbol, :score_model] - required :type, const: :score_model - + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!attribute range - # The range of the score. Defaults to `[0, 1]`. - # - # @return [Array, nil] - optional :range, OpenAI::Internal::Type::ArrayOf[Float] - - # @!attribute sampling_params - # The sampling parameters for the model. - # - # @return [Object, nil] - optional :sampling_params, OpenAI::Internal::Type::Unknown - - # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # @!method initialize(pass_threshold: nil) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. - # - # @param model [String] The model to use for the evaluation. - # - # @param name [String] The name of the grader. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param range [Array] The range of the score. Defaults to `[0, 1]`. - # - # @param sampling_params [Object] The sampling parameters for the model. - # - # @param type [Symbol, :score_model] The object type, which is always `score_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - required :content, - union: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] - required :role, enum: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type, nil] - optional :type, enum: -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input} for - # more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end # @!method self.variants - # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb deleted file mode 100644 index 271e8884..00000000 --- a/lib/openai/models/eval_label_model_grader.rb +++ /dev/null @@ -1,166 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - # @!attribute input - # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalLabelModelGrader::Input] } - - # @!attribute labels - # The labels to assign to each item in the evaluation. - # - # @return [Array] - required :labels, OpenAI::Internal::Type::ArrayOf[String] - - # @!attribute model - # The model to use for the evaluation. Must support structured outputs. - # - # @return [String] - required :model, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute passing_labels - # The labels that indicate a passing result. Must be a subset of labels. - # - # @return [Array] - required :passing_labels, OpenAI::Internal::Type::ArrayOf[String] - - # @!attribute type - # The object type, which is always `label_model`. - # - # @return [Symbol, :label_model] - required :type, const: :label_model - - # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) - # A LabelModelGrader object which uses a model to assign labels to each item in - # the evaluation. - # - # @param input [Array] - # - # @param labels [Array] The labels to assign to each item in the evaluation. - # - # @param model [String] The model to use for the evaluation. Must support structured outputs. - # - # @param name [String] The name of the grader. - # - # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. - # - # @param type [Symbol, :label_model] The object type, which is always `label_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalLabelModelGrader::Input::Content::OutputText] - required :content, union: -> { OpenAI::EvalLabelModelGrader::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::EvalLabelModelGrader::Input::Role] - required :role, enum: -> { OpenAI::EvalLabelModelGrader::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::EvalLabelModelGrader::Input::Type, nil] - optional :type, enum: -> { OpenAI::EvalLabelModelGrader::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalLabelModelGrader::Input} for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalLabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::EvalLabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::EvalLabelModelGrader::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::EvalLabelModelGrader::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::EvalLabelModelGrader::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalLabelModelGrader::Input::Content::OutputText} for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalLabelModelGrader::Input::Content::OutputText)] - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::EvalLabelModelGrader::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::EvalLabelModelGrader::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - end - end -end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 7ff9c586..d75a67aa 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -48,7 +48,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } @@ -73,7 +73,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -106,244 +106,63 @@ module DataSourceConfig module TestingCriterion extend OpenAI::Internal::Type::Union - discriminator :type - # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::EvalLabelModelGrader } + variant -> { OpenAI::Graders::LabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::EvalStringCheckGrader } + variant -> { OpenAI::Graders::StringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } + variant -> { OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity } # A PythonGrader object that runs a python script on the input. - variant :python, -> { OpenAI::Models::EvalListResponse::TestingCriterion::Python } + variant -> { OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython } # A ScoreModelGrader object that uses a model to assign a score to the input. - variant :score_model, -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel } - - class Python < OpenAI::Internal::Type::BaseModel - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String + variant -> { OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel } - # @!attribute source - # The source code of the python script. - # - # @return [String] - required :source, String - - # @!attribute type - # The object type, which is always `python`. + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + # @!attribute pass_threshold + # The threshold for the score. # - # @return [Symbol, :python] - required :type, const: :python + # @return [Float] + required :pass_threshold, Float - # @!attribute image_tag - # The image tag to use for the python script. + # @!method initialize(pass_threshold:) + # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @return [String, nil] - optional :image_tag, String + # @param pass_threshold [Float] The threshold for the score. + end + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # @!method initialize(pass_threshold: nil) # A PythonGrader object that runs a python script on the input. # - # @param name [String] The name of the grader. - # - # @param source [String] The source code of the python script. - # - # @param image_tag [String] The image tag to use for the python script. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param type [Symbol, :python] The object type, which is always `python`. end - class ScoreModel < OpenAI::Internal::Type::BaseModel - # @!attribute input - # The input text. This may include template strings. - # - # @return [Array] - required :input, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input] } - - # @!attribute model - # The model to use for the evaluation. - # - # @return [String] - required :model, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute type - # The object type, which is always `score_model`. - # - # @return [Symbol, :score_model] - required :type, const: :score_model - + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!attribute range - # The range of the score. Defaults to `[0, 1]`. - # - # @return [Array, nil] - optional :range, OpenAI::Internal::Type::ArrayOf[Float] - - # @!attribute sampling_params - # The sampling parameters for the model. - # - # @return [Object, nil] - optional :sampling_params, OpenAI::Internal::Type::Unknown - - # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # @!method initialize(pass_threshold: nil) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. - # - # @param model [String] The model to use for the evaluation. - # - # @param name [String] The name of the grader. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param range [Array] The range of the score. Defaults to `[0, 1]`. - # - # @param sampling_params [Object] The sampling parameters for the model. - # - # @param type [Symbol, :score_model] The object type, which is always `score_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - required :content, - union: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] - required :role, enum: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type, nil] - optional :type, enum: -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input} for more - # details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end # @!method self.variants - # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalListResponse::TestingCriterion::Python, OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 51f2d27c..8cfd179d 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -48,7 +48,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } @@ -73,7 +73,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -106,246 +106,63 @@ module DataSourceConfig module TestingCriterion extend OpenAI::Internal::Type::Union - discriminator :type - # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::EvalLabelModelGrader } + variant -> { OpenAI::Graders::LabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::EvalStringCheckGrader } + variant -> { OpenAI::Graders::StringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } + variant -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity } # A PythonGrader object that runs a python script on the input. - variant :python, -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python } + variant -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython } # A ScoreModelGrader object that uses a model to assign a score to the input. - variant :score_model, -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel } - - class Python < OpenAI::Internal::Type::BaseModel - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String + variant -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel } - # @!attribute source - # The source code of the python script. - # - # @return [String] - required :source, String - - # @!attribute type - # The object type, which is always `python`. + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + # @!attribute pass_threshold + # The threshold for the score. # - # @return [Symbol, :python] - required :type, const: :python + # @return [Float] + required :pass_threshold, Float - # @!attribute image_tag - # The image tag to use for the python script. + # @!method initialize(pass_threshold:) + # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @return [String, nil] - optional :image_tag, String + # @param pass_threshold [Float] The threshold for the score. + end + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # @!method initialize(pass_threshold: nil) # A PythonGrader object that runs a python script on the input. # - # @param name [String] The name of the grader. - # - # @param source [String] The source code of the python script. - # - # @param image_tag [String] The image tag to use for the python script. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param type [Symbol, :python] The object type, which is always `python`. end - class ScoreModel < OpenAI::Internal::Type::BaseModel - # @!attribute input - # The input text. This may include template strings. - # - # @return [Array] - required :input, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input] } - - # @!attribute model - # The model to use for the evaluation. - # - # @return [String] - required :model, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute type - # The object type, which is always `score_model`. - # - # @return [Symbol, :score_model] - required :type, const: :score_model - + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!attribute range - # The range of the score. Defaults to `[0, 1]`. - # - # @return [Array, nil] - optional :range, OpenAI::Internal::Type::ArrayOf[Float] - - # @!attribute sampling_params - # The sampling parameters for the model. - # - # @return [Object, nil] - optional :sampling_params, OpenAI::Internal::Type::Unknown - - # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # @!method initialize(pass_threshold: nil) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. - # - # @param model [String] The model to use for the evaluation. - # - # @param name [String] The name of the grader. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param range [Array] The range of the score. Defaults to `[0, 1]`. - # - # @param sampling_params [Object] The sampling parameters for the model. - # - # @param type [Symbol, :score_model] The object type, which is always `score_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - required :content, - union: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] - required :role, - enum: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type, nil] - optional :type, - enum: -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input} for - # more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end # @!method self.variants - # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_string_check_grader.rb b/lib/openai/models/eval_string_check_grader.rb deleted file mode 100644 index 494f8199..00000000 --- a/lib/openai/models/eval_string_check_grader.rb +++ /dev/null @@ -1,66 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel - # @!attribute input - # The input text. This may include template strings. - # - # @return [String] - required :input, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute operation - # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - # - # @return [Symbol, OpenAI::EvalStringCheckGrader::Operation] - required :operation, enum: -> { OpenAI::EvalStringCheckGrader::Operation } - - # @!attribute reference - # The reference text. This may include template strings. - # - # @return [String] - required :reference, String - - # @!attribute type - # The object type, which is always `string_check`. - # - # @return [Symbol, :string_check] - required :type, const: :string_check - - # @!method initialize(input:, name:, operation:, reference:, type: :string_check) - # A StringCheckGrader object that performs a string comparison between input and - # reference using a specified operation. - # - # @param input [String] The input text. This may include template strings. - # - # @param name [String] The name of the grader. - # - # @param operation [Symbol, OpenAI::EvalStringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - # - # @param reference [String] The reference text. This may include template strings. - # - # @param type [Symbol, :string_check] The object type, which is always `string_check`. - - # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - # - # @see OpenAI::EvalStringCheckGrader#operation - module Operation - extend OpenAI::Internal::Type::Enum - - EQ = :eq - NE = :ne - LIKE = :like - ILIKE = :ilike - - # @!method self.values - # @return [Array] - end - end - end -end diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb deleted file mode 100644 index 765bb9c6..00000000 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ /dev/null @@ -1,84 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel - # @!attribute evaluation_metric - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. - # - # @return [Symbol, OpenAI::EvalTextSimilarityGrader::EvaluationMetric] - required :evaluation_metric, enum: -> { OpenAI::EvalTextSimilarityGrader::EvaluationMetric } - - # @!attribute input - # The text being graded. - # - # @return [String] - required :input, String - - # @!attribute pass_threshold - # A float score where a value greater than or equal indicates a passing grade. - # - # @return [Float] - required :pass_threshold, Float - - # @!attribute reference - # The text being graded against. - # - # @return [String] - required :reference, String - - # @!attribute type - # The type of grader. - # - # @return [Symbol, :text_similarity] - required :type, const: :text_similarity - - # @!attribute name - # The name of the grader. - # - # @return [String, nil] - optional :name, String - - # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalTextSimilarityGrader} for more details. - # - # A TextSimilarityGrader object which grades text based on similarity metrics. - # - # @param evaluation_metric [Symbol, OpenAI::EvalTextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r - # - # @param input [String] The text being graded. - # - # @param pass_threshold [Float] A float score where a value greater than or equal indicates a passing grade. - # - # @param reference [String] The text being graded against. - # - # @param name [String] The name of the grader. - # - # @param type [Symbol, :text_similarity] The type of grader. - - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. - # - # @see OpenAI::EvalTextSimilarityGrader#evaluation_metric - module EvaluationMetric - extend OpenAI::Internal::Type::Enum - - FUZZY_MATCH = :fuzzy_match - BLEU = :bleu - GLEU = :gleu - METEOR = :meteor - ROUGE_1 = :rouge_1 - ROUGE_2 = :rouge_2 - ROUGE_3 = :rouge_3 - ROUGE_4 = :rouge_4 - ROUGE_5 = :rouge_5 - ROUGE_L = :rouge_l - - # @!method self.values - # @return [Array] - end - end - end -end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 12986ff3..67e02168 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -48,7 +48,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } @@ -73,7 +73,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -106,244 +106,63 @@ module DataSourceConfig module TestingCriterion extend OpenAI::Internal::Type::Union - discriminator :type - # A LabelModelGrader object which uses a model to assign labels to each item # in the evaluation. - variant :label_model, -> { OpenAI::EvalLabelModelGrader } + variant -> { OpenAI::Graders::LabelModelGrader } # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. - variant :string_check, -> { OpenAI::EvalStringCheckGrader } + variant -> { OpenAI::Graders::StringCheckGrader } # A TextSimilarityGrader object which grades text based on similarity metrics. - variant :text_similarity, -> { OpenAI::EvalTextSimilarityGrader } + variant -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity } # A PythonGrader object that runs a python script on the input. - variant :python, -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python } + variant -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython } # A ScoreModelGrader object that uses a model to assign a score to the input. - variant :score_model, -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel } - - class Python < OpenAI::Internal::Type::BaseModel - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String + variant -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel } - # @!attribute source - # The source code of the python script. - # - # @return [String] - required :source, String - - # @!attribute type - # The object type, which is always `python`. + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + # @!attribute pass_threshold + # The threshold for the score. # - # @return [Symbol, :python] - required :type, const: :python + # @return [Float] + required :pass_threshold, Float - # @!attribute image_tag - # The image tag to use for the python script. + # @!method initialize(pass_threshold:) + # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @return [String, nil] - optional :image_tag, String + # @param pass_threshold [Float] The threshold for the score. + end + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!method initialize(name:, source:, image_tag: nil, pass_threshold: nil, type: :python) + # @!method initialize(pass_threshold: nil) # A PythonGrader object that runs a python script on the input. # - # @param name [String] The name of the grader. - # - # @param source [String] The source code of the python script. - # - # @param image_tag [String] The image tag to use for the python script. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param type [Symbol, :python] The object type, which is always `python`. end - class ScoreModel < OpenAI::Internal::Type::BaseModel - # @!attribute input - # The input text. This may include template strings. - # - # @return [Array] - required :input, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input] } - - # @!attribute model - # The model to use for the evaluation. - # - # @return [String] - required :model, String - - # @!attribute name - # The name of the grader. - # - # @return [String] - required :name, String - - # @!attribute type - # The object type, which is always `score_model`. - # - # @return [Symbol, :score_model] - required :type, const: :score_model - + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!attribute pass_threshold # The threshold for the score. # # @return [Float, nil] optional :pass_threshold, Float - # @!attribute range - # The range of the score. Defaults to `[0, 1]`. - # - # @return [Array, nil] - optional :range, OpenAI::Internal::Type::ArrayOf[Float] - - # @!attribute sampling_params - # The sampling parameters for the model. - # - # @return [Object, nil] - optional :sampling_params, OpenAI::Internal::Type::Unknown - - # @!method initialize(input:, model:, name:, pass_threshold: nil, range: nil, sampling_params: nil, type: :score_model) + # @!method initialize(pass_threshold: nil) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. - # - # @param model [String] The model to use for the evaluation. - # - # @param name [String] The name of the grader. - # # @param pass_threshold [Float] The threshold for the score. - # - # @param range [Array] The range of the score. Defaults to `[0, 1]`. - # - # @param sampling_params [Object] The sampling parameters for the model. - # - # @param type [Symbol, :score_model] The object type, which is always `score_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] - required :content, - union: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] - required :role, enum: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type, nil] - optional :type, enum: -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input} for - # more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText)] - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end # @!method self.variants - # @return [Array(OpenAI::EvalLabelModelGrader, OpenAI::EvalStringCheckGrader, OpenAI::EvalTextSimilarityGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb new file mode 100644 index 00000000..921ece7c --- /dev/null +++ b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb @@ -0,0 +1,89 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Alpha + # @see OpenAI::Resources::FineTuning::Alpha::Graders#run + class GraderRunParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute grader + # The grader used for the fine-tuning job. + # + # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderRunParams::Grader } + + # @!attribute model_sample + # The model sample to be evaluated. + # + # @return [String] + required :model_sample, String + + # @!attribute reference_answer + # The reference answer for the evaluation. + # + # @return [String, Object, Array, Float] + required :reference_answer, + union: -> { + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer + } + + # @!method initialize(grader:, model_sample:, reference_answer:, request_options: {}) + # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # + # @param model_sample [String] The model sample to be evaluated. + # + # @param reference_answer [String, Object, Array, Float] The reference answer for the evaluation. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # The grader used for the fine-tuning job. + module Grader + extend OpenAI::Internal::Type::Union + + discriminator :type + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant :string_check, -> { OpenAI::Graders::StringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant :text_similarity, -> { OpenAI::Graders::TextSimilarityGrader } + + # A PythonGrader object that runs a python script on the input. + variant :python, -> { OpenAI::Graders::PythonGrader } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant :score_model, -> { OpenAI::Graders::ScoreModelGrader } + + # A MultiGrader object combines the output of multiple graders to produce a single score. + variant :multi, -> { OpenAI::Graders::MultiGrader } + + # @!method self.variants + # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + end + + # The reference answer for the evaluation. + module ReferenceAnswer + extend OpenAI::Internal::Type::Union + + variant String + + variant OpenAI::Internal::Type::Unknown + + variant -> { OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::UnionMember2Array } + + variant Float + + # @!method self.variants + # @return [Array(String, Object, Array, Float)] + + # @type [OpenAI::Internal::Type::Converter] + UnionMember2Array = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] + end + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_response.rb b/lib/openai/models/fine_tuning/alpha/grader_run_response.rb new file mode 100644 index 00000000..2b6bdaff --- /dev/null +++ b/lib/openai/models/fine_tuning/alpha/grader_run_response.rb @@ -0,0 +1,175 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Alpha + # @see OpenAI::Resources::FineTuning::Alpha::Graders#run + class GraderRunResponse < OpenAI::Internal::Type::BaseModel + # @!attribute metadata + # + # @return [OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata] + required :metadata, -> { OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata } + + # @!attribute model_grader_token_usage_per_model + # + # @return [Hash{Symbol=>Object}] + required :model_grader_token_usage_per_model, + OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute reward + # + # @return [Float] + required :reward, Float + + # @!attribute sub_rewards + # + # @return [Hash{Symbol=>Object}] + required :sub_rewards, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(metadata:, model_grader_token_usage_per_model:, reward:, sub_rewards:) + # @param metadata [OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata] + # @param model_grader_token_usage_per_model [Hash{Symbol=>Object}] + # @param reward [Float] + # @param sub_rewards [Hash{Symbol=>Object}] + + # @see OpenAI::Models::FineTuning::Alpha::GraderRunResponse#metadata + class Metadata < OpenAI::Internal::Type::BaseModel + # @!attribute errors + # + # @return [OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors] + required :errors, -> { OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors } + + # @!attribute execution_time + # + # @return [Float] + required :execution_time, Float + + # @!attribute name + # + # @return [String] + required :name, String + + # @!attribute sampled_model_name + # + # @return [String, nil] + required :sampled_model_name, String, nil?: true + + # @!attribute scores + # + # @return [Hash{Symbol=>Object}] + required :scores, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute token_usage + # + # @return [Integer, nil] + required :token_usage, Integer, nil?: true + + # @!attribute type + # + # @return [String] + required :type, String + + # @!method initialize(errors:, execution_time:, name:, sampled_model_name:, scores:, token_usage:, type:) + # @param errors [OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors] + # @param execution_time [Float] + # @param name [String] + # @param sampled_model_name [String, nil] + # @param scores [Hash{Symbol=>Object}] + # @param token_usage [Integer, nil] + # @param type [String] + + # @see OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata#errors + class Errors < OpenAI::Internal::Type::BaseModel + # @!attribute formula_parse_error + # + # @return [Boolean] + required :formula_parse_error, OpenAI::Internal::Type::Boolean + + # @!attribute invalid_variable_error + # + # @return [Boolean] + required :invalid_variable_error, OpenAI::Internal::Type::Boolean + + # @!attribute model_grader_parse_error + # + # @return [Boolean] + required :model_grader_parse_error, OpenAI::Internal::Type::Boolean + + # @!attribute model_grader_refusal_error + # + # @return [Boolean] + required :model_grader_refusal_error, OpenAI::Internal::Type::Boolean + + # @!attribute model_grader_server_error + # + # @return [Boolean] + required :model_grader_server_error, OpenAI::Internal::Type::Boolean + + # @!attribute model_grader_server_error_details + # + # @return [String, nil] + required :model_grader_server_error_details, String, nil?: true + + # @!attribute other_error + # + # @return [Boolean] + required :other_error, OpenAI::Internal::Type::Boolean + + # @!attribute python_grader_runtime_error + # + # @return [Boolean] + required :python_grader_runtime_error, OpenAI::Internal::Type::Boolean + + # @!attribute python_grader_runtime_error_details + # + # @return [String, nil] + required :python_grader_runtime_error_details, String, nil?: true + + # @!attribute python_grader_server_error + # + # @return [Boolean] + required :python_grader_server_error, OpenAI::Internal::Type::Boolean + + # @!attribute python_grader_server_error_type + # + # @return [String, nil] + required :python_grader_server_error_type, String, nil?: true + + # @!attribute sample_parse_error + # + # @return [Boolean] + required :sample_parse_error, OpenAI::Internal::Type::Boolean + + # @!attribute truncated_observation_error + # + # @return [Boolean] + required :truncated_observation_error, OpenAI::Internal::Type::Boolean + + # @!attribute unresponsive_reward_error + # + # @return [Boolean] + required :unresponsive_reward_error, OpenAI::Internal::Type::Boolean + + # @!method initialize(formula_parse_error:, invalid_variable_error:, model_grader_parse_error:, model_grader_refusal_error:, model_grader_server_error:, model_grader_server_error_details:, other_error:, python_grader_runtime_error:, python_grader_runtime_error_details:, python_grader_server_error:, python_grader_server_error_type:, sample_parse_error:, truncated_observation_error:, unresponsive_reward_error:) + # @param formula_parse_error [Boolean] + # @param invalid_variable_error [Boolean] + # @param model_grader_parse_error [Boolean] + # @param model_grader_refusal_error [Boolean] + # @param model_grader_server_error [Boolean] + # @param model_grader_server_error_details [String, nil] + # @param other_error [Boolean] + # @param python_grader_runtime_error [Boolean] + # @param python_grader_runtime_error_details [String, nil] + # @param python_grader_server_error [Boolean] + # @param python_grader_server_error_type [String, nil] + # @param sample_parse_error [Boolean] + # @param truncated_observation_error [Boolean] + # @param unresponsive_reward_error [Boolean] + end + end + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb new file mode 100644 index 00000000..a252fb6a --- /dev/null +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Alpha + # @see OpenAI::Resources::FineTuning::Alpha::Graders#validate + class GraderValidateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute grader + # The grader used for the fine-tuning job. + # + # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderValidateParams::Grader } + + # @!method initialize(grader:, request_options: {}) + # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # The grader used for the fine-tuning job. + module Grader + extend OpenAI::Internal::Type::Union + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant -> { OpenAI::Graders::StringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant -> { OpenAI::Graders::TextSimilarityGrader } + + # A PythonGrader object that runs a python script on the input. + variant -> { OpenAI::Graders::PythonGrader } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant -> { OpenAI::Graders::ScoreModelGrader } + + # A MultiGrader object combines the output of multiple graders to produce a single score. + variant -> { OpenAI::Graders::MultiGrader } + + # @!method self.variants + # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + end + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb new file mode 100644 index 00000000..a89553ee --- /dev/null +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + module Alpha + # @see OpenAI::Resources::FineTuning::Alpha::Graders#validate + class GraderValidateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute grader + # The grader used for the fine-tuning job. + # + # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader, nil] + optional :grader, union: -> { OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader } + + # @!method initialize(grader: nil) + # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + + # The grader used for the fine-tuning job. + # + # @see OpenAI::Models::FineTuning::Alpha::GraderValidateResponse#grader + module Grader + extend OpenAI::Internal::Type::Union + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant -> { OpenAI::Graders::StringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant -> { OpenAI::Graders::TextSimilarityGrader } + + # A PythonGrader object that runs a python script on the input. + variant -> { OpenAI::Graders::PythonGrader } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant -> { OpenAI::Graders::ScoreModelGrader } + + # A MultiGrader object combines the output of multiple graders to produce a single score. + variant -> { OpenAI::Graders::MultiGrader } + + # @!method self.variants + # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + end + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb new file mode 100644 index 00000000..b57d2245 --- /dev/null +++ b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb @@ -0,0 +1,112 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + class DpoHyperparameters < OpenAI::Internal::Type::BaseModel + # @!attribute batch_size + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + # + # @return [Symbol, :auto, Integer, nil] + optional :batch_size, union: -> { OpenAI::FineTuning::DpoHyperparameters::BatchSize } + + # @!attribute beta + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + # + # @return [Symbol, :auto, Float, nil] + optional :beta, union: -> { OpenAI::FineTuning::DpoHyperparameters::Beta } + + # @!attribute learning_rate_multiplier + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + # + # @return [Symbol, :auto, Float, nil] + optional :learning_rate_multiplier, + union: -> { OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier } + + # @!attribute n_epochs + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + # + # @return [Symbol, :auto, Integer, nil] + optional :n_epochs, union: -> { OpenAI::FineTuning::DpoHyperparameters::NEpochs } + + # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::FineTuning::DpoHyperparameters} for more details. + # + # The hyperparameters used for the DPO fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # + # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::FineTuning::DpoHyperparameters#batch_size + module BatchSize + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + # + # @see OpenAI::FineTuning::DpoHyperparameters#beta + module Beta + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Float + + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] + end + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + # + # @see OpenAI::FineTuning::DpoHyperparameters#learning_rate_multiplier + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Float + + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] + end + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + # + # @see OpenAI::FineTuning::DpoHyperparameters#n_epochs + module NEpochs + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/dpo_method.rb b/lib/openai/models/fine_tuning/dpo_method.rb new file mode 100644 index 00000000..023cf72f --- /dev/null +++ b/lib/openai/models/fine_tuning/dpo_method.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + class DpoMethod < OpenAI::Internal::Type::BaseModel + # @!attribute hyperparameters + # The hyperparameters used for the DPO fine-tuning job. + # + # @return [OpenAI::FineTuning::DpoHyperparameters, nil] + optional :hyperparameters, -> { OpenAI::FineTuning::DpoHyperparameters } + + # @!method initialize(hyperparameters: nil) + # Configuration for the DPO fine-tuning method. + # + # @param hyperparameters [OpenAI::FineTuning::DpoHyperparameters] The hyperparameters used for the DPO fine-tuning job. + end + end + end +end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index aafd3b38..aaf073d0 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -323,274 +323,42 @@ module Status # @see OpenAI::FineTuning::FineTuningJob#method_ class Method < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + # + # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] + required :type, enum: -> { OpenAI::FineTuning::FineTuningJob::Method::Type } + # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::FineTuning::FineTuningJob::Method::Dpo, nil] - optional :dpo, -> { OpenAI::FineTuning::FineTuningJob::Method::Dpo } + # @return [OpenAI::FineTuning::DpoMethod, nil] + optional :dpo, -> { OpenAI::FineTuning::DpoMethod } - # @!attribute supervised - # Configuration for the supervised fine-tuning method. + # @!attribute reinforcement + # Configuration for the reinforcement fine-tuning method. # - # @return [OpenAI::FineTuning::FineTuningJob::Method::Supervised, nil] - optional :supervised, -> { OpenAI::FineTuning::FineTuningJob::Method::Supervised } + # @return [OpenAI::FineTuning::ReinforcementMethod, nil] + optional :reinforcement, -> { OpenAI::FineTuning::ReinforcementMethod } - # @!attribute type - # The type of method. Is either `supervised` or `dpo`. + # @!attribute supervised + # Configuration for the supervised fine-tuning method. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type, nil] - optional :type, enum: -> { OpenAI::FineTuning::FineTuningJob::Method::Type } + # @return [OpenAI::FineTuning::SupervisedMethod, nil] + optional :supervised, -> { OpenAI::FineTuning::SupervisedMethod } - # @!method initialize(dpo: nil, supervised: nil, type: nil) + # @!method initialize(type:, dpo: nil, reinforcement: nil, supervised: nil) # The method used for fine-tuning. # - # @param dpo [OpenAI::FineTuning::FineTuningJob::Method::Dpo] Configuration for the DPO fine-tuning method. + # @param type [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @param supervised [OpenAI::FineTuning::FineTuningJob::Method::Supervised] Configuration for the supervised fine-tuning method. + # @param dpo [OpenAI::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. # - # @param type [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised` or `dpo`. - - # @see OpenAI::FineTuning::FineTuningJob::Method#dpo - class Dpo < OpenAI::Internal::Type::BaseModel - # @!attribute hyperparameters - # The hyperparameters used for the fine-tuning job. - # - # @return [OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, nil] - optional :hyperparameters, -> { OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } - - # @!method initialize(hyperparameters: nil) - # Configuration for the DPO fine-tuning method. - # - # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. - - # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo#hyperparameters - class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute batch_size - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @return [Symbol, :auto, Integer, nil] - optional :batch_size, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize - } - - # @!attribute beta - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - # - # @return [Symbol, :auto, Float, nil] - optional :beta, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta - } - - # @!attribute learning_rate_multiplier - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @return [Symbol, :auto, Float, nil] - optional :learning_rate_multiplier, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier - } - - # @!attribute n_epochs - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @return [Symbol, :auto, Integer, nil] - optional :n_epochs, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs - } - - # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters} for more - # details. - # - # The hyperparameters used for the fine-tuning job. - # - # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # - # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight - # - # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # - # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#batch_size - module BatchSize - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#beta - module Beta - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Float - - # @!method self.variants - # @return [Array(Symbol, :auto, Float)] - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#learning_rate_multiplier - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Float - - # @!method self.variants - # @return [Array(Symbol, :auto, Float)] - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters#n_epochs - module NEpochs - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - end - end - - # @see OpenAI::FineTuning::FineTuningJob::Method#supervised - class Supervised < OpenAI::Internal::Type::BaseModel - # @!attribute hyperparameters - # The hyperparameters used for the fine-tuning job. - # - # @return [OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, nil] - optional :hyperparameters, - -> { - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - } - - # @!method initialize(hyperparameters: nil) - # Configuration for the supervised fine-tuning method. - # - # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. - - # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised#hyperparameters - class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute batch_size - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @return [Symbol, :auto, Integer, nil] - optional :batch_size, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize - } - - # @!attribute learning_rate_multiplier - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @return [Symbol, :auto, Float, nil] - optional :learning_rate_multiplier, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier - } - - # @!attribute n_epochs - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @return [Symbol, :auto, Integer, nil] - optional :n_epochs, - union: -> { - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs - } - - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters} for - # more details. - # - # The hyperparameters used for the fine-tuning job. - # - # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # - # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # - # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#batch_size - module BatchSize - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#learning_rate_multiplier - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Float - - # @!method self.variants - # @return [Array(Symbol, :auto, Float)] - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @see OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters#n_epochs - module NEpochs - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - end - end + # @param reinforcement [OpenAI::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. + # + # @param supervised [OpenAI::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. - # The type of method. Is either `supervised` or `dpo`. + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # # @see OpenAI::FineTuning::FineTuningJob::Method#type module Type @@ -598,6 +366,7 @@ module Type SUPERVISED = :supervised DPO = :dpo + REINFORCEMENT = :reinforcement # @!method self.values # @return [Array] diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index c86b0f69..de31c8f6 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -317,277 +317,42 @@ class Wandb < OpenAI::Internal::Type::BaseModel end class Method < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + # + # @return [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] + required :type, enum: -> { OpenAI::FineTuning::JobCreateParams::Method::Type } + # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::FineTuning::JobCreateParams::Method::Dpo, nil] - optional :dpo, -> { OpenAI::FineTuning::JobCreateParams::Method::Dpo } + # @return [OpenAI::FineTuning::DpoMethod, nil] + optional :dpo, -> { OpenAI::FineTuning::DpoMethod } - # @!attribute supervised - # Configuration for the supervised fine-tuning method. + # @!attribute reinforcement + # Configuration for the reinforcement fine-tuning method. # - # @return [OpenAI::FineTuning::JobCreateParams::Method::Supervised, nil] - optional :supervised, -> { OpenAI::FineTuning::JobCreateParams::Method::Supervised } + # @return [OpenAI::FineTuning::ReinforcementMethod, nil] + optional :reinforcement, -> { OpenAI::FineTuning::ReinforcementMethod } - # @!attribute type - # The type of method. Is either `supervised` or `dpo`. + # @!attribute supervised + # Configuration for the supervised fine-tuning method. # - # @return [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type, nil] - optional :type, enum: -> { OpenAI::FineTuning::JobCreateParams::Method::Type } + # @return [OpenAI::FineTuning::SupervisedMethod, nil] + optional :supervised, -> { OpenAI::FineTuning::SupervisedMethod } - # @!method initialize(dpo: nil, supervised: nil, type: nil) + # @!method initialize(type:, dpo: nil, reinforcement: nil, supervised: nil) # The method used for fine-tuning. # - # @param dpo [OpenAI::FineTuning::JobCreateParams::Method::Dpo] Configuration for the DPO fine-tuning method. + # @param type [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @param supervised [OpenAI::FineTuning::JobCreateParams::Method::Supervised] Configuration for the supervised fine-tuning method. + # @param dpo [OpenAI::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. # - # @param type [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised` or `dpo`. - - # @see OpenAI::FineTuning::JobCreateParams::Method#dpo - class Dpo < OpenAI::Internal::Type::BaseModel - # @!attribute hyperparameters - # The hyperparameters used for the fine-tuning job. - # - # @return [OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, nil] - optional :hyperparameters, - -> { - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - } - - # @!method initialize(hyperparameters: nil) - # Configuration for the DPO fine-tuning method. - # - # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] The hyperparameters used for the fine-tuning job. - - # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo#hyperparameters - class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute batch_size - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @return [Symbol, :auto, Integer, nil] - optional :batch_size, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize - } - - # @!attribute beta - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - # - # @return [Symbol, :auto, Float, nil] - optional :beta, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta - } - - # @!attribute learning_rate_multiplier - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @return [Symbol, :auto, Float, nil] - optional :learning_rate_multiplier, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier - } - - # @!attribute n_epochs - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @return [Symbol, :auto, Integer, nil] - optional :n_epochs, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs - } - - # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters} for more - # details. - # - # The hyperparameters used for the fine-tuning job. - # - # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # - # @param beta [Symbol, :auto, Float] The beta value for the DPO method. A higher beta value will increase the weight - # - # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # - # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#batch_size - module BatchSize - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#beta - module Beta - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Float - - # @!method self.variants - # @return [Array(Symbol, :auto, Float)] - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#learning_rate_multiplier - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Float - - # @!method self.variants - # @return [Array(Symbol, :auto, Float)] - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters#n_epochs - module NEpochs - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - end - end - - # @see OpenAI::FineTuning::JobCreateParams::Method#supervised - class Supervised < OpenAI::Internal::Type::BaseModel - # @!attribute hyperparameters - # The hyperparameters used for the fine-tuning job. - # - # @return [OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, nil] - optional :hyperparameters, - -> { - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - } - - # @!method initialize(hyperparameters: nil) - # Configuration for the supervised fine-tuning method. - # - # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] The hyperparameters used for the fine-tuning job. - - # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised#hyperparameters - class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute batch_size - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @return [Symbol, :auto, Integer, nil] - optional :batch_size, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize - } - - # @!attribute learning_rate_multiplier - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @return [Symbol, :auto, Float, nil] - optional :learning_rate_multiplier, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier - } - - # @!attribute n_epochs - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @return [Symbol, :auto, Integer, nil] - optional :n_epochs, - union: -> { - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs - } - - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters} for - # more details. - # - # The hyperparameters used for the fine-tuning job. - # - # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter - # - # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a - # - # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#batch_size - module BatchSize - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#learning_rate_multiplier - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Float - - # @!method self.variants - # @return [Array(Symbol, :auto, Float)] - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - # - # @see OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters#n_epochs - module NEpochs - extend OpenAI::Internal::Type::Union - - variant const: :auto - - variant Integer - - # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] - end - end - end + # @param reinforcement [OpenAI::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. + # + # @param supervised [OpenAI::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. - # The type of method. Is either `supervised` or `dpo`. + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # # @see OpenAI::FineTuning::JobCreateParams::Method#type module Type @@ -595,6 +360,7 @@ module Type SUPERVISED = :supervised DPO = :dpo + REINFORCEMENT = :reinforcement # @!method self.values # @return [Array] diff --git a/lib/openai/models/fine_tuning/job_pause_params.rb b/lib/openai/models/fine_tuning/job_pause_params.rb new file mode 100644 index 00000000..739a90da --- /dev/null +++ b/lib/openai/models/fine_tuning/job_pause_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#pause + class JobPauseParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/fine_tuning/job_resume_params.rb b/lib/openai/models/fine_tuning/job_resume_params.rb new file mode 100644 index 00000000..97d4c317 --- /dev/null +++ b/lib/openai/models/fine_tuning/job_resume_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + # @see OpenAI::Resources::FineTuning::Jobs#resume + class JobResumeParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb new file mode 100644 index 00000000..7df826e1 --- /dev/null +++ b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb @@ -0,0 +1,181 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel + # @!attribute batch_size + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + # + # @return [Symbol, :auto, Integer, nil] + optional :batch_size, union: -> { OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize } + + # @!attribute compute_multiplier + # Multiplier on amount of compute used for exploring search space during training. + # + # @return [Symbol, :auto, Float, nil] + optional :compute_multiplier, + union: -> { OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier } + + # @!attribute eval_interval + # The number of training steps between evaluation runs. + # + # @return [Symbol, :auto, Integer, nil] + optional :eval_interval, union: -> { OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval } + + # @!attribute eval_samples + # Number of evaluation samples to generate per training step. + # + # @return [Symbol, :auto, Integer, nil] + optional :eval_samples, union: -> { OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples } + + # @!attribute learning_rate_multiplier + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + # + # @return [Symbol, :auto, Float, nil] + optional :learning_rate_multiplier, + union: -> { OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier } + + # @!attribute n_epochs + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + # + # @return [Symbol, :auto, Integer, nil] + optional :n_epochs, union: -> { OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs } + + # @!attribute reasoning_effort + # Level of reasoning effort. + # + # @return [Symbol, OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort, nil] + optional :reasoning_effort, + enum: -> { + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort + } + + # @!method initialize(batch_size: nil, compute_multiplier: nil, eval_interval: nil, eval_samples: nil, learning_rate_multiplier: nil, n_epochs: nil, reasoning_effort: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::FineTuning::ReinforcementHyperparameters} for more details. + # + # The hyperparameters used for the reinforcement fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # + # @param compute_multiplier [Symbol, :auto, Float] Multiplier on amount of compute used for exploring search space during training. + # + # @param eval_interval [Symbol, :auto, Integer] The number of training steps between evaluation runs. + # + # @param eval_samples [Symbol, :auto, Integer] Number of evaluation samples to generate per training step. + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + # + # @param reasoning_effort [Symbol, OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort] Level of reasoning effort. + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#batch_size + module BatchSize + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + + # Multiplier on amount of compute used for exploring search space during training. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#compute_multiplier + module ComputeMultiplier + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Float + + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] + end + + # The number of training steps between evaluation runs. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#eval_interval + module EvalInterval + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + + # Number of evaluation samples to generate per training step. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#eval_samples + module EvalSamples + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#learning_rate_multiplier + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Float + + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] + end + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#n_epochs + module NEpochs + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + + # Level of reasoning effort. + # + # @see OpenAI::FineTuning::ReinforcementHyperparameters#reasoning_effort + module ReasoningEffort + extend OpenAI::Internal::Type::Enum + + DEFAULT = :default + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/reinforcement_method.rb b/lib/openai/models/fine_tuning/reinforcement_method.rb new file mode 100644 index 00000000..d2955f1e --- /dev/null +++ b/lib/openai/models/fine_tuning/reinforcement_method.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + class ReinforcementMethod < OpenAI::Internal::Type::BaseModel + # @!attribute grader + # The grader used for the fine-tuning job. + # + # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + required :grader, union: -> { OpenAI::FineTuning::ReinforcementMethod::Grader } + + # @!attribute hyperparameters + # The hyperparameters used for the reinforcement fine-tuning job. + # + # @return [OpenAI::FineTuning::ReinforcementHyperparameters, nil] + optional :hyperparameters, -> { OpenAI::FineTuning::ReinforcementHyperparameters } + + # @!method initialize(grader:, hyperparameters: nil) + # Configuration for the reinforcement fine-tuning method. + # + # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # + # @param hyperparameters [OpenAI::FineTuning::ReinforcementHyperparameters] The hyperparameters used for the reinforcement fine-tuning job. + + # The grader used for the fine-tuning job. + # + # @see OpenAI::FineTuning::ReinforcementMethod#grader + module Grader + extend OpenAI::Internal::Type::Union + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant -> { OpenAI::Graders::StringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant -> { OpenAI::Graders::TextSimilarityGrader } + + # A PythonGrader object that runs a python script on the input. + variant -> { OpenAI::Graders::PythonGrader } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant -> { OpenAI::Graders::ScoreModelGrader } + + # A MultiGrader object combines the output of multiple graders to produce a single score. + variant -> { OpenAI::Graders::MultiGrader } + + # @!method self.variants + # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb new file mode 100644 index 00000000..612870f0 --- /dev/null +++ b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel + # @!attribute batch_size + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + # + # @return [Symbol, :auto, Integer, nil] + optional :batch_size, union: -> { OpenAI::FineTuning::SupervisedHyperparameters::BatchSize } + + # @!attribute learning_rate_multiplier + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + # + # @return [Symbol, :auto, Float, nil] + optional :learning_rate_multiplier, + union: -> { OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier } + + # @!attribute n_epochs + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + # + # @return [Symbol, :auto, Integer, nil] + optional :n_epochs, union: -> { OpenAI::FineTuning::SupervisedHyperparameters::NEpochs } + + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::FineTuning::SupervisedHyperparameters} for more details. + # + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # + # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a + # + # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + # + # @see OpenAI::FineTuning::SupervisedHyperparameters#batch_size + module BatchSize + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + # + # @see OpenAI::FineTuning::SupervisedHyperparameters#learning_rate_multiplier + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Float + + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] + end + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + # + # @see OpenAI::FineTuning::SupervisedHyperparameters#n_epochs + module NEpochs + extend OpenAI::Internal::Type::Union + + variant const: :auto + + variant Integer + + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] + end + end + end + end +end diff --git a/lib/openai/models/fine_tuning/supervised_method.rb b/lib/openai/models/fine_tuning/supervised_method.rb new file mode 100644 index 00000000..f45655a0 --- /dev/null +++ b/lib/openai/models/fine_tuning/supervised_method.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module FineTuning + class SupervisedMethod < OpenAI::Internal::Type::BaseModel + # @!attribute hyperparameters + # The hyperparameters used for the fine-tuning job. + # + # @return [OpenAI::FineTuning::SupervisedHyperparameters, nil] + optional :hyperparameters, -> { OpenAI::FineTuning::SupervisedHyperparameters } + + # @!method initialize(hyperparameters: nil) + # Configuration for the supervised fine-tuning method. + # + # @param hyperparameters [OpenAI::FineTuning::SupervisedHyperparameters] The hyperparameters used for the fine-tuning job. + end + end + end +end diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb new file mode 100644 index 00000000..99e0e087 --- /dev/null +++ b/lib/openai/models/graders/label_model_grader.rb @@ -0,0 +1,171 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Graders + class LabelModelGrader < OpenAI::Internal::Type::BaseModel + # @!attribute input + # + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::LabelModelGrader::Input] } + + # @!attribute labels + # The labels to assign to each item in the evaluation. + # + # @return [Array] + required :labels, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute model + # The model to use for the evaluation. Must support structured outputs. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute passing_labels + # The labels that indicate a passing result. Must be a subset of labels. + # + # @return [Array] + required :passing_labels, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute type + # The object type, which is always `label_model`. + # + # @return [Symbol, :label_model] + required :type, const: :label_model + + # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + # + # @param input [Array] + # + # @param labels [Array] The labels to assign to each item in the evaluation. + # + # @param model [String] The model to use for the evaluation. Must support structured outputs. + # + # @param name [String] The name of the grader. + # + # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. + # + # @param type [Symbol, :label_model] The object type, which is always `label_model`. + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Graders::LabelModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::LabelModelGrader::Input} for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Graders::LabelModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Graders::LabelModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::LabelModelGrader::Input::Content::OutputText} for more + # details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Graders::LabelModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Graders::LabelModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end + end + + LabelModelGrader = Graders::LabelModelGrader + end +end diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb new file mode 100644 index 00000000..3cb7bf6b --- /dev/null +++ b/lib/openai/models/graders/multi_grader.rb @@ -0,0 +1,71 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Graders + class MultiGrader < OpenAI::Internal::Type::BaseModel + # @!attribute calculate_output + # A formula to calculate the output based on grader results. + # + # @return [String] + required :calculate_output, String + + # @!attribute graders + # + # @return [Hash{Symbol=>OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader}] + required :graders, -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Graders::MultiGrader::Grader] } + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute type + # The type of grader. + # + # @return [Symbol, :multi] + required :type, const: :multi + + # @!method initialize(calculate_output:, graders:, name:, type: :multi) + # A MultiGrader object combines the output of multiple graders to produce a single + # score. + # + # @param calculate_output [String] A formula to calculate the output based on grader results. + # + # @param graders [Hash{Symbol=>OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader}] + # + # @param name [String] The name of the grader. + # + # @param type [Symbol, :multi] The type of grader. + + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + module Grader + extend OpenAI::Internal::Type::Union + + # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. + variant -> { OpenAI::Graders::StringCheckGrader } + + # A TextSimilarityGrader object which grades text based on similarity metrics. + variant -> { OpenAI::Graders::TextSimilarityGrader } + + # A PythonGrader object that runs a python script on the input. + variant -> { OpenAI::Graders::PythonGrader } + + # A ScoreModelGrader object that uses a model to assign a score to the input. + variant -> { OpenAI::Graders::ScoreModelGrader } + + # A LabelModelGrader object which uses a model to assign labels to each item + # in the evaluation. + variant -> { OpenAI::Graders::LabelModelGrader } + + # @!method self.variants + # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader)] + end + end + end + + MultiGrader = Graders::MultiGrader + end +end diff --git a/lib/openai/models/graders/python_grader.rb b/lib/openai/models/graders/python_grader.rb new file mode 100644 index 00000000..f5fcae1c --- /dev/null +++ b/lib/openai/models/graders/python_grader.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Graders + class PythonGrader < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute source + # The source code of the python script. + # + # @return [String] + required :source, String + + # @!attribute type + # The object type, which is always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!attribute image_tag + # The image tag to use for the python script. + # + # @return [String, nil] + optional :image_tag, String + + # @!method initialize(name:, source:, image_tag: nil, type: :python) + # A PythonGrader object that runs a python script on the input. + # + # @param name [String] The name of the grader. + # + # @param source [String] The source code of the python script. + # + # @param image_tag [String] The image tag to use for the python script. + # + # @param type [Symbol, :python] The object type, which is always `python`. + end + end + + PythonGrader = Graders::PythonGrader + end +end diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb new file mode 100644 index 00000000..62cf1a6c --- /dev/null +++ b/lib/openai/models/graders/score_model_grader.rb @@ -0,0 +1,171 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Graders + class ScoreModelGrader < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::ScoreModelGrader::Input] } + + # @!attribute model + # The model to use for the evaluation. + # + # @return [String] + required :model, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute type + # The object type, which is always `score_model`. + # + # @return [Symbol, :score_model] + required :type, const: :score_model + + # @!attribute range + # The range of the score. Defaults to `[0, 1]`. + # + # @return [Array, nil] + optional :range, OpenAI::Internal::Type::ArrayOf[Float] + + # @!attribute sampling_params + # The sampling parameters for the model. + # + # @return [Object, nil] + optional :sampling_params, OpenAI::Internal::Type::Unknown + + # @!method initialize(input:, model:, name:, range: nil, sampling_params: nil, type: :score_model) + # A ScoreModelGrader object that uses a model to assign a score to the input. + # + # @param input [Array] The input text. This may include template strings. + # + # @param model [String] The model to use for the evaluation. + # + # @param name [String] The name of the grader. + # + # @param range [Array] The range of the score. Defaults to `[0, 1]`. + # + # @param sampling_params [Object] The sampling parameters for the model. + # + # @param type [Symbol, :score_model] The object type, which is always `score_model`. + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Graders::ScoreModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::ScoreModelGrader::Input} for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Graders::ScoreModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText} for more + # details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Graders::ScoreModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Graders::ScoreModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end + end + + ScoreModelGrader = Graders::ScoreModelGrader + end +end diff --git a/lib/openai/models/graders/string_check_grader.rb b/lib/openai/models/graders/string_check_grader.rb new file mode 100644 index 00000000..90aeb689 --- /dev/null +++ b/lib/openai/models/graders/string_check_grader.rb @@ -0,0 +1,70 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Graders + class StringCheckGrader < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input text. This may include template strings. + # + # @return [String] + required :input, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute operation + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # + # @return [Symbol, OpenAI::Graders::StringCheckGrader::Operation] + required :operation, enum: -> { OpenAI::Graders::StringCheckGrader::Operation } + + # @!attribute reference + # The reference text. This may include template strings. + # + # @return [String] + required :reference, String + + # @!attribute type + # The object type, which is always `string_check`. + # + # @return [Symbol, :string_check] + required :type, const: :string_check + + # @!method initialize(input:, name:, operation:, reference:, type: :string_check) + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + # + # @param input [String] The input text. This may include template strings. + # + # @param name [String] The name of the grader. + # + # @param operation [Symbol, OpenAI::Graders::StringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # + # @param reference [String] The reference text. This may include template strings. + # + # @param type [Symbol, :string_check] The object type, which is always `string_check`. + + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # + # @see OpenAI::Graders::StringCheckGrader#operation + module Operation + extend OpenAI::Internal::Type::Enum + + EQ = :eq + NE = :ne + LIKE = :like + ILIKE = :ilike + + # @!method self.values + # @return [Array] + end + end + end + + StringCheckGrader = Graders::StringCheckGrader + end +end diff --git a/lib/openai/models/graders/text_similarity_grader.rb b/lib/openai/models/graders/text_similarity_grader.rb new file mode 100644 index 00000000..30b7eaad --- /dev/null +++ b/lib/openai/models/graders/text_similarity_grader.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Graders + class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel + # @!attribute evaluation_metric + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # + # @return [Symbol, OpenAI::Graders::TextSimilarityGrader::EvaluationMetric] + required :evaluation_metric, enum: -> { OpenAI::Graders::TextSimilarityGrader::EvaluationMetric } + + # @!attribute input + # The text being graded. + # + # @return [String] + required :input, String + + # @!attribute name + # The name of the grader. + # + # @return [String] + required :name, String + + # @!attribute reference + # The text being graded against. + # + # @return [String] + required :reference, String + + # @!attribute type + # The type of grader. + # + # @return [Symbol, :text_similarity] + required :type, const: :text_similarity + + # @!method initialize(evaluation_metric:, input:, name:, reference:, type: :text_similarity) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::TextSimilarityGrader} for more details. + # + # A TextSimilarityGrader object which grades text based on similarity metrics. + # + # @param evaluation_metric [Symbol, OpenAI::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r + # + # @param input [String] The text being graded. + # + # @param name [String] The name of the grader. + # + # @param reference [String] The text being graded against. + # + # @param type [Symbol, :text_similarity] The type of grader. + + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # + # @see OpenAI::Graders::TextSimilarityGrader#evaluation_metric + module EvaluationMetric + extend OpenAI::Internal::Type::Enum + + FUZZY_MATCH = :fuzzy_match + BLEU = :bleu + GLEU = :gleu + METEOR = :meteor + ROUGE_1 = :rouge_1 + ROUGE_2 = :rouge_2 + ROUGE_3 = :rouge_3 + ROUGE_4 = :rouge_4 + ROUGE_5 = :rouge_5 + ROUGE_L = :rouge_l + + # @!method self.values + # @return [Array] + end + end + end + + TextSimilarityGrader = Graders::TextSimilarityGrader + end +end diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 23f0de0b..5d87bc09 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -17,9 +17,9 @@ class Evals # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/lib/openai/resources/fine_tuning.rb b/lib/openai/resources/fine_tuning.rb index 051803a7..9f5fc2c0 100644 --- a/lib/openai/resources/fine_tuning.rb +++ b/lib/openai/resources/fine_tuning.rb @@ -3,19 +3,27 @@ module OpenAI module Resources class FineTuning + # @return [OpenAI::Resources::FineTuning::Methods] + attr_reader :methods_ + # @return [OpenAI::Resources::FineTuning::Jobs] attr_reader :jobs # @return [OpenAI::Resources::FineTuning::Checkpoints] attr_reader :checkpoints + # @return [OpenAI::Resources::FineTuning::Alpha] + attr_reader :alpha + # @api private # # @param client [OpenAI::Client] def initialize(client:) @client = client + @methods_ = OpenAI::Resources::FineTuning::Methods.new(client: client) @jobs = OpenAI::Resources::FineTuning::Jobs.new(client: client) @checkpoints = OpenAI::Resources::FineTuning::Checkpoints.new(client: client) + @alpha = OpenAI::Resources::FineTuning::Alpha.new(client: client) end end end diff --git a/lib/openai/resources/fine_tuning/alpha.rb b/lib/openai/resources/fine_tuning/alpha.rb new file mode 100644 index 00000000..4b3d6bc8 --- /dev/null +++ b/lib/openai/resources/fine_tuning/alpha.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class FineTuning + class Alpha + # @return [OpenAI::Resources::FineTuning::Alpha::Graders] + attr_reader :graders + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @graders = OpenAI::Resources::FineTuning::Alpha::Graders.new(client: client) + end + end + end + end +end diff --git a/lib/openai/resources/fine_tuning/alpha/graders.rb b/lib/openai/resources/fine_tuning/alpha/graders.rb new file mode 100644 index 00000000..5c7ba54b --- /dev/null +++ b/lib/openai/resources/fine_tuning/alpha/graders.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class FineTuning + class Alpha + class Graders + # Run a grader. + # + # @overload run(grader:, model_sample:, reference_answer:, request_options: {}) + # + # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # + # @param model_sample [String] The model sample to be evaluated. + # + # @param reference_answer [String, Object, Array, Float] The reference answer for the evaluation. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::FineTuning::Alpha::GraderRunResponse] + # + # @see OpenAI::Models::FineTuning::Alpha::GraderRunParams + def run(params) + parsed, options = OpenAI::FineTuning::Alpha::GraderRunParams.dump_request(params) + @client.request( + method: :post, + path: "fine_tuning/alpha/graders/run", + body: parsed, + model: OpenAI::Models::FineTuning::Alpha::GraderRunResponse, + options: options + ) + end + + # Validate a grader. + # + # @overload validate(grader:, request_options: {}) + # + # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::FineTuning::Alpha::GraderValidateResponse] + # + # @see OpenAI::Models::FineTuning::Alpha::GraderValidateParams + def validate(params) + parsed, options = OpenAI::FineTuning::Alpha::GraderValidateParams.dump_request(params) + @client.request( + method: :post, + path: "fine_tuning/alpha/graders/validate", + body: parsed, + model: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end + end +end diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index 4a7bdfb7..b6458288 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -162,6 +162,52 @@ def list_events(fine_tuning_job_id, params = {}) ) end + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobPauseParams} for more details. + # + # Pause a fine-tune job. + # + # @overload pause(fine_tuning_job_id, request_options: {}) + # + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to pause. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::FineTuning::FineTuningJob] + # + # @see OpenAI::Models::FineTuning::JobPauseParams + def pause(fine_tuning_job_id, params = {}) + @client.request( + method: :post, + path: ["fine_tuning/jobs/%1$s/pause", fine_tuning_job_id], + model: OpenAI::FineTuning::FineTuningJob, + options: params[:request_options] + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::JobResumeParams} for more details. + # + # Resume a fine-tune job. + # + # @overload resume(fine_tuning_job_id, request_options: {}) + # + # @param fine_tuning_job_id [String] The ID of the fine-tuning job to resume. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::FineTuning::FineTuningJob] + # + # @see OpenAI::Models::FineTuning::JobResumeParams + def resume(fine_tuning_job_id, params = {}) + @client.request( + method: :post, + path: ["fine_tuning/jobs/%1$s/resume", fine_tuning_job_id], + model: OpenAI::FineTuning::FineTuningJob, + options: params[:request_options] + ) + end + # @api private # # @param client [OpenAI::Client] diff --git a/lib/openai/resources/fine_tuning/methods.rb b/lib/openai/resources/fine_tuning/methods.rb new file mode 100644 index 00000000..fcc6d076 --- /dev/null +++ b/lib/openai/resources/fine_tuning/methods.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class FineTuning + class Methods + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end +end diff --git a/lib/openai/resources/graders.rb b/lib/openai/resources/graders.rb new file mode 100644 index 00000000..2aca1a85 --- /dev/null +++ b/lib/openai/resources/graders.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Graders + # @return [OpenAI::Resources::Graders::GraderModels] + attr_reader :grader_models + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @grader_models = OpenAI::Resources::Graders::GraderModels.new(client: client) + end + end + end +end diff --git a/lib/openai/resources/graders/grader_models.rb b/lib/openai/resources/graders/grader_models.rb new file mode 100644 index 00000000..172b5ef6 --- /dev/null +++ b/lib/openai/resources/graders/grader_models.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Graders + class GraderModels + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end +end diff --git a/rbi/openai/client.rbi b/rbi/openai/client.rbi index 7ac76a8c..b89f1995 100644 --- a/rbi/openai/client.rbi +++ b/rbi/openai/client.rbi @@ -46,6 +46,9 @@ module OpenAI sig { returns(OpenAI::Resources::FineTuning) } attr_reader :fine_tuning + sig { returns(OpenAI::Resources::Graders) } + attr_reader :graders + sig { returns(OpenAI::Resources::VectorStores) } attr_reader :vector_stores diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index c3eaf325..f26af6b7 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -59,8 +59,6 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams - EvalLabelModelGrader = OpenAI::Models::EvalLabelModelGrader - EvalListParams = OpenAI::Models::EvalListParams EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams @@ -70,10 +68,6 @@ module OpenAI EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig - EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader - - EvalTextSimilarityGrader = OpenAI::Models::EvalTextSimilarityGrader - EvalUpdateParams = OpenAI::Models::EvalUpdateParams FileChunkingStrategy = OpenAI::Models::FileChunkingStrategy @@ -105,6 +99,8 @@ module OpenAI FunctionParameters = T.let(OpenAI::Models::FunctionParameters, OpenAI::Internal::Type::Converter) + Graders = OpenAI::Models::Graders + Image = OpenAI::Models::Image ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index 77dd005f..b3673227 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -13,7 +13,7 @@ module OpenAI returns( T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::EvalCreateParams::DataSourceConfig::Logs + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ) ) end @@ -25,8 +25,8 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel ) @@ -56,14 +56,14 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom::OrHash, - OpenAI::EvalCreateParams::DataSourceConfig::Logs::OrHash + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions::OrHash ), testing_criteria: T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::OrHash, - OpenAI::EvalStringCheckGrader::OrHash, - OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity::OrHash, OpenAI::EvalCreateParams::TestingCriterion::Python::OrHash, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::OrHash ) @@ -97,14 +97,14 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::EvalCreateParams::DataSourceConfig::Logs + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ), testing_criteria: T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel ) @@ -126,7 +126,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::EvalCreateParams::DataSourceConfig::Logs + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ) end @@ -187,15 +187,15 @@ module OpenAI end end - class Logs < OpenAI::Internal::Type::BaseModel + class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The type of data source. Always `logs`. + # The type of data source. Always `stored_completions`. sig { returns(Symbol) } attr_accessor :type - # Metadata filters for the logs data source. + # Metadata filters for the stored completions data source. sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } attr_reader :metadata @@ -211,10 +211,10 @@ module OpenAI ) end def self.new( - # Metadata filters for the logs data source. + # Metadata filters for the stored completions data source. metadata: nil, - # The type of data source. Always `logs`. - type: :logs + # The type of data source. Always `stored_completions`. + type: :stored_completions ) end @@ -245,8 +245,8 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel ) @@ -623,28 +623,30 @@ module OpenAI end end - class Python < OpenAI::Internal::Type::BaseModel + class TextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The source code of the python script. - sig { returns(String) } - attr_accessor :source + # The threshold for the score. + sig { returns(Float) } + attr_accessor :pass_threshold - # The object type, which is always `python`. - sig { returns(Symbol) } - attr_accessor :type + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig { params(pass_threshold: Float).returns(T.attached_class) } + def self.new( + # The threshold for the score. + pass_threshold: + ) + end - # The image tag to use for the python script. - sig { returns(T.nilable(String)) } - attr_reader :image_tag + sig { override.returns({ pass_threshold: Float }) } + def to_hash + end + end - sig { params(image_tag: String).void } - attr_writer :image_tag + class Python < OpenAI::Models::Graders::PythonGrader + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -654,70 +656,22 @@ module OpenAI attr_writer :pass_threshold # A PythonGrader object that runs a python script on the input. - sig do - params( - name: String, - source: String, - image_tag: String, - pass_threshold: Float, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The name of the grader. - name:, - # The source code of the python script. - source:, - # The image tag to use for the python script. - image_tag: nil, # The threshold for the score. - pass_threshold: nil, - # The object type, which is always `python`. - type: :python + pass_threshold: nil ) end - sig do - override.returns( - { - name: String, - source: String, - type: Symbol, - image_tag: String, - pass_threshold: Float - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end end - class ScoreModel < OpenAI::Internal::Type::BaseModel + class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The input text. This may include template strings. - sig do - returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input - ] - ) - end - attr_accessor :input - - # The model to use for the evaluation. - sig { returns(String) } - attr_accessor :model - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The object type, which is always `score_model`. - sig { returns(Symbol) } - attr_accessor :type - # The threshold for the score. sig { returns(T.nilable(Float)) } attr_reader :pass_threshold @@ -725,293 +679,17 @@ module OpenAI sig { params(pass_threshold: Float).void } attr_writer :pass_threshold - # The range of the score. Defaults to `[0, 1]`. - sig { returns(T.nilable(T::Array[Float])) } - attr_reader :range - - sig { params(range: T::Array[Float]).void } - attr_writer :range - - # The sampling parameters for the model. - sig { returns(T.nilable(T.anything)) } - attr_reader :sampling_params - - sig { params(sampling_params: T.anything).void } - attr_writer :sampling_params - # A ScoreModelGrader object that uses a model to assign a score to the input. - sig do - params( - input: - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::OrHash - ], - model: String, - name: String, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The input text. This may include template strings. - input:, - # The model to use for the evaluation. - model:, - # The name of the grader. - name:, # The threshold for the score. - pass_threshold: nil, - # The range of the score. Defaults to `[0, 1]`. - range: nil, - # The sampling parameters for the model. - sampling_params: nil, - # The object type, which is always `score_model`. - type: :score_model + pass_threshold: nil ) end - sig do - override.returns( - { - input: - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input - ], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash - ), - role: - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end sig do diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index 97b8fa5b..2928d3b9 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -46,11 +46,11 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel ) ] ) @@ -77,11 +77,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader::OrHash, - OpenAI::EvalStringCheckGrader::OrHash, - OpenAI::EvalTextSimilarityGrader::OrHash, - OpenAI::Models::EvalCreateResponse::TestingCriterion::Python::OrHash, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::OrHash + OpenAI::Graders::LabelModelGrader::OrHash, + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython::OrHash, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel::OrHash ) ], object: Symbol @@ -126,11 +126,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel ) ] } @@ -170,36 +170,38 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::Python, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel ) end - class Python < OpenAI::Internal::Type::BaseModel + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The source code of the python script. - sig { returns(String) } - attr_accessor :source + # The threshold for the score. + sig { returns(Float) } + attr_accessor :pass_threshold - # The object type, which is always `python`. - sig { returns(Symbol) } - attr_accessor :type + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig { params(pass_threshold: Float).returns(T.attached_class) } + def self.new( + # The threshold for the score. + pass_threshold: + ) + end - # The image tag to use for the python script. - sig { returns(T.nilable(String)) } - attr_reader :image_tag + sig { override.returns({ pass_threshold: Float }) } + def to_hash + end + end - sig { params(image_tag: String).void } - attr_writer :image_tag + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -209,70 +211,22 @@ module OpenAI attr_writer :pass_threshold # A PythonGrader object that runs a python script on the input. - sig do - params( - name: String, - source: String, - image_tag: String, - pass_threshold: Float, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The name of the grader. - name:, - # The source code of the python script. - source:, - # The image tag to use for the python script. - image_tag: nil, # The threshold for the score. - pass_threshold: nil, - # The object type, which is always `python`. - type: :python + pass_threshold: nil ) end - sig do - override.returns( - { - name: String, - source: String, - type: Symbol, - image_tag: String, - pass_threshold: Float - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end end - class ScoreModel < OpenAI::Internal::Type::BaseModel + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The input text. This may include template strings. - sig do - returns( - T::Array[ - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input - ] - ) - end - attr_accessor :input - - # The model to use for the evaluation. - sig { returns(String) } - attr_accessor :model - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The object type, which is always `score_model`. - sig { returns(Symbol) } - attr_accessor :type - # The threshold for the score. sig { returns(T.nilable(Float)) } attr_reader :pass_threshold @@ -280,293 +234,17 @@ module OpenAI sig { params(pass_threshold: Float).void } attr_writer :pass_threshold - # The range of the score. Defaults to `[0, 1]`. - sig { returns(T.nilable(T::Array[Float])) } - attr_reader :range - - sig { params(range: T::Array[Float]).void } - attr_writer :range - - # The sampling parameters for the model. - sig { returns(T.nilable(T.anything)) } - attr_reader :sampling_params - - sig { params(sampling_params: T.anything).void } - attr_writer :sampling_params - # A ScoreModelGrader object that uses a model to assign a score to the input. - sig do - params( - input: - T::Array[ - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::OrHash - ], - model: String, - name: String, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The input text. This may include template strings. - input:, - # The model to use for the evaluation. - model:, - # The name of the grader. - name:, # The threshold for the score. - pass_threshold: nil, - # The range of the score. Defaults to `[0, 1]`. - range: nil, - # The sampling parameters for the model. - sampling_params: nil, - # The object type, which is always `score_model`. - type: :score_model + pass_threshold: nil ) end - sig do - override.returns( - { - input: - T::Array[ - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input - ], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash - ), - role: - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end sig do diff --git a/rbi/openai/models/eval_label_model_grader.rbi b/rbi/openai/models/eval_label_model_grader.rbi deleted file mode 100644 index df283285..00000000 --- a/rbi/openai/models/eval_label_model_grader.rbi +++ /dev/null @@ -1,268 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - sig { returns(T::Array[OpenAI::EvalLabelModelGrader::Input]) } - attr_accessor :input - - # The labels to assign to each item in the evaluation. - sig { returns(T::Array[String]) } - attr_accessor :labels - - # The model to use for the evaluation. Must support structured outputs. - sig { returns(String) } - attr_accessor :model - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The labels that indicate a passing result. Must be a subset of labels. - sig { returns(T::Array[String]) } - attr_accessor :passing_labels - - # The object type, which is always `label_model`. - sig { returns(Symbol) } - attr_accessor :type - - # A LabelModelGrader object which uses a model to assign labels to each item in - # the evaluation. - sig do - params( - input: T::Array[OpenAI::EvalLabelModelGrader::Input::OrHash], - labels: T::Array[String], - model: String, - name: String, - passing_labels: T::Array[String], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - input:, - # The labels to assign to each item in the evaluation. - labels:, - # The model to use for the evaluation. Must support structured outputs. - model:, - # The name of the grader. - name:, - # The labels that indicate a passing result. Must be a subset of labels. - passing_labels:, - # The object type, which is always `label_model`. - type: :label_model - ) - end - - sig do - override.returns( - { - input: T::Array[OpenAI::EvalLabelModelGrader::Input], - labels: T::Array[String], - model: String, - name: String, - passing_labels: T::Array[String], - type: Symbol - } - ) - end - def to_hash - end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalLabelModelGrader::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig { returns(OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol) } - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable(OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol) - ) - end - attr_reader :type - - sig do - params(type: OpenAI::EvalLabelModelGrader::Input::Type::OrSymbol).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::EvalLabelModelGrader::Input::Content::OutputText::OrHash - ), - role: OpenAI::EvalLabelModelGrader::Input::Role::OrSymbol, - type: OpenAI::EvalLabelModelGrader::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalLabelModelGrader::Input::Content::OutputText - ), - role: OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol, - type: OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalLabelModelGrader::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[OpenAI::EvalLabelModelGrader::Input::Content::Variants] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::EvalLabelModelGrader::Input::Role) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[OpenAI::EvalLabelModelGrader::Input::Role::TaggedSymbol] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::EvalLabelModelGrader::Input::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[OpenAI::EvalLabelModelGrader::Input::Type::TaggedSymbol] - ) - end - def self.values - end - end - end - end - end -end diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index b59f14c9..5b20006b 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -46,11 +46,11 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::Python, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel ) ] ) @@ -77,11 +77,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader::OrHash, - OpenAI::EvalStringCheckGrader::OrHash, - OpenAI::EvalTextSimilarityGrader::OrHash, - OpenAI::Models::EvalListResponse::TestingCriterion::Python::OrHash, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::OrHash + OpenAI::Graders::LabelModelGrader::OrHash, + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython::OrHash, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel::OrHash ) ], object: Symbol @@ -126,11 +126,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::Python, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel ) ] } @@ -170,36 +170,38 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::Python, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel ) end - class Python < OpenAI::Internal::Type::BaseModel + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The source code of the python script. - sig { returns(String) } - attr_accessor :source + # The threshold for the score. + sig { returns(Float) } + attr_accessor :pass_threshold - # The object type, which is always `python`. - sig { returns(Symbol) } - attr_accessor :type + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig { params(pass_threshold: Float).returns(T.attached_class) } + def self.new( + # The threshold for the score. + pass_threshold: + ) + end - # The image tag to use for the python script. - sig { returns(T.nilable(String)) } - attr_reader :image_tag + sig { override.returns({ pass_threshold: Float }) } + def to_hash + end + end - sig { params(image_tag: String).void } - attr_writer :image_tag + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -209,70 +211,22 @@ module OpenAI attr_writer :pass_threshold # A PythonGrader object that runs a python script on the input. - sig do - params( - name: String, - source: String, - image_tag: String, - pass_threshold: Float, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The name of the grader. - name:, - # The source code of the python script. - source:, - # The image tag to use for the python script. - image_tag: nil, # The threshold for the score. - pass_threshold: nil, - # The object type, which is always `python`. - type: :python + pass_threshold: nil ) end - sig do - override.returns( - { - name: String, - source: String, - type: Symbol, - image_tag: String, - pass_threshold: Float - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end end - class ScoreModel < OpenAI::Internal::Type::BaseModel + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The input text. This may include template strings. - sig do - returns( - T::Array[ - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input - ] - ) - end - attr_accessor :input - - # The model to use for the evaluation. - sig { returns(String) } - attr_accessor :model - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The object type, which is always `score_model`. - sig { returns(Symbol) } - attr_accessor :type - # The threshold for the score. sig { returns(T.nilable(Float)) } attr_reader :pass_threshold @@ -280,293 +234,17 @@ module OpenAI sig { params(pass_threshold: Float).void } attr_writer :pass_threshold - # The range of the score. Defaults to `[0, 1]`. - sig { returns(T.nilable(T::Array[Float])) } - attr_reader :range - - sig { params(range: T::Array[Float]).void } - attr_writer :range - - # The sampling parameters for the model. - sig { returns(T.nilable(T.anything)) } - attr_reader :sampling_params - - sig { params(sampling_params: T.anything).void } - attr_writer :sampling_params - # A ScoreModelGrader object that uses a model to assign a score to the input. - sig do - params( - input: - T::Array[ - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::OrHash - ], - model: String, - name: String, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The input text. This may include template strings. - input:, - # The model to use for the evaluation. - model:, - # The name of the grader. - name:, # The threshold for the score. - pass_threshold: nil, - # The range of the score. Defaults to `[0, 1]`. - range: nil, - # The sampling parameters for the model. - sampling_params: nil, - # The object type, which is always `score_model`. - type: :score_model + pass_threshold: nil ) end - sig do - override.returns( - { - input: - T::Array[ - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input - ], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash - ), - role: - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end sig do diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index b6216729..cdeeaf69 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -46,11 +46,11 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel ) ] ) @@ -77,11 +77,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader::OrHash, - OpenAI::EvalStringCheckGrader::OrHash, - OpenAI::EvalTextSimilarityGrader::OrHash, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python::OrHash, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::OrHash + OpenAI::Graders::LabelModelGrader::OrHash, + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython::OrHash, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel::OrHash ) ], object: Symbol @@ -126,11 +126,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel ) ] } @@ -170,36 +170,38 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel ) end - class Python < OpenAI::Internal::Type::BaseModel + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The source code of the python script. - sig { returns(String) } - attr_accessor :source + # The threshold for the score. + sig { returns(Float) } + attr_accessor :pass_threshold - # The object type, which is always `python`. - sig { returns(Symbol) } - attr_accessor :type + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig { params(pass_threshold: Float).returns(T.attached_class) } + def self.new( + # The threshold for the score. + pass_threshold: + ) + end - # The image tag to use for the python script. - sig { returns(T.nilable(String)) } - attr_reader :image_tag + sig { override.returns({ pass_threshold: Float }) } + def to_hash + end + end - sig { params(image_tag: String).void } - attr_writer :image_tag + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -209,70 +211,22 @@ module OpenAI attr_writer :pass_threshold # A PythonGrader object that runs a python script on the input. - sig do - params( - name: String, - source: String, - image_tag: String, - pass_threshold: Float, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The name of the grader. - name:, - # The source code of the python script. - source:, - # The image tag to use for the python script. - image_tag: nil, # The threshold for the score. - pass_threshold: nil, - # The object type, which is always `python`. - type: :python + pass_threshold: nil ) end - sig do - override.returns( - { - name: String, - source: String, - type: Symbol, - image_tag: String, - pass_threshold: Float - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end end - class ScoreModel < OpenAI::Internal::Type::BaseModel + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The input text. This may include template strings. - sig do - returns( - T::Array[ - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input - ] - ) - end - attr_accessor :input - - # The model to use for the evaluation. - sig { returns(String) } - attr_accessor :model - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The object type, which is always `score_model`. - sig { returns(Symbol) } - attr_accessor :type - # The threshold for the score. sig { returns(T.nilable(Float)) } attr_reader :pass_threshold @@ -280,293 +234,17 @@ module OpenAI sig { params(pass_threshold: Float).void } attr_writer :pass_threshold - # The range of the score. Defaults to `[0, 1]`. - sig { returns(T.nilable(T::Array[Float])) } - attr_reader :range - - sig { params(range: T::Array[Float]).void } - attr_writer :range - - # The sampling parameters for the model. - sig { returns(T.nilable(T.anything)) } - attr_reader :sampling_params - - sig { params(sampling_params: T.anything).void } - attr_writer :sampling_params - # A ScoreModelGrader object that uses a model to assign a score to the input. - sig do - params( - input: - T::Array[ - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::OrHash - ], - model: String, - name: String, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The input text. This may include template strings. - input:, - # The model to use for the evaluation. - model:, - # The name of the grader. - name:, # The threshold for the score. - pass_threshold: nil, - # The range of the score. Defaults to `[0, 1]`. - range: nil, - # The sampling parameters for the model. - sampling_params: nil, - # The object type, which is always `score_model`. - type: :score_model + pass_threshold: nil ) end - sig do - override.returns( - { - input: - T::Array[ - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input - ], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash - ), - role: - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end sig do diff --git a/rbi/openai/models/eval_string_check_grader.rbi b/rbi/openai/models/eval_string_check_grader.rbi deleted file mode 100644 index b22c1497..00000000 --- a/rbi/openai/models/eval_string_check_grader.rbi +++ /dev/null @@ -1,94 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The input text. This may include template strings. - sig { returns(String) } - attr_accessor :input - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - sig { returns(OpenAI::EvalStringCheckGrader::Operation::OrSymbol) } - attr_accessor :operation - - # The reference text. This may include template strings. - sig { returns(String) } - attr_accessor :reference - - # The object type, which is always `string_check`. - sig { returns(Symbol) } - attr_accessor :type - - # A StringCheckGrader object that performs a string comparison between input and - # reference using a specified operation. - sig do - params( - input: String, - name: String, - operation: OpenAI::EvalStringCheckGrader::Operation::OrSymbol, - reference: String, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The input text. This may include template strings. - input:, - # The name of the grader. - name:, - # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - operation:, - # The reference text. This may include template strings. - reference:, - # The object type, which is always `string_check`. - type: :string_check - ) - end - - sig do - override.returns( - { - input: String, - name: String, - operation: OpenAI::EvalStringCheckGrader::Operation::OrSymbol, - reference: String, - type: Symbol - } - ) - end - def to_hash - end - - # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. - module Operation - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::EvalStringCheckGrader::Operation) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - EQ = T.let(:eq, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) - NE = T.let(:ne, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) - LIKE = - T.let(:like, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) - ILIKE = - T.let(:ilike, OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol) - - sig do - override.returns( - T::Array[OpenAI::EvalStringCheckGrader::Operation::TaggedSymbol] - ) - end - def self.values - end - end - end - end -end diff --git a/rbi/openai/models/eval_text_similarity_grader.rbi b/rbi/openai/models/eval_text_similarity_grader.rbi deleted file mode 100644 index fad82523..00000000 --- a/rbi/openai/models/eval_text_similarity_grader.rbi +++ /dev/null @@ -1,157 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. - sig do - returns(OpenAI::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol) - end - attr_accessor :evaluation_metric - - # The text being graded. - sig { returns(String) } - attr_accessor :input - - # A float score where a value greater than or equal indicates a passing grade. - sig { returns(Float) } - attr_accessor :pass_threshold - - # The text being graded against. - sig { returns(String) } - attr_accessor :reference - - # The type of grader. - sig { returns(Symbol) } - attr_accessor :type - - # The name of the grader. - sig { returns(T.nilable(String)) } - attr_reader :name - - sig { params(name: String).void } - attr_writer :name - - # A TextSimilarityGrader object which grades text based on similarity metrics. - sig do - params( - evaluation_metric: - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, - input: String, - pass_threshold: Float, - reference: String, - name: String, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. - evaluation_metric:, - # The text being graded. - input:, - # A float score where a value greater than or equal indicates a passing grade. - pass_threshold:, - # The text being graded against. - reference:, - # The name of the grader. - name: nil, - # The type of grader. - type: :text_similarity - ) - end - - sig do - override.returns( - { - evaluation_metric: - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::OrSymbol, - input: String, - pass_threshold: Float, - reference: String, - type: Symbol, - name: String - } - ) - end - def to_hash - end - - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. - module EvaluationMetric - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::EvalTextSimilarityGrader::EvaluationMetric) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - FUZZY_MATCH = - T.let( - :fuzzy_match, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - BLEU = - T.let( - :bleu, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - GLEU = - T.let( - :gleu, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - METEOR = - T.let( - :meteor, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - ROUGE_1 = - T.let( - :rouge_1, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - ROUGE_2 = - T.let( - :rouge_2, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - ROUGE_3 = - T.let( - :rouge_3, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - ROUGE_4 = - T.let( - :rouge_4, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - ROUGE_5 = - T.let( - :rouge_5, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - ROUGE_L = - T.let( - :rouge_l, - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol - ] - ) - end - def self.values - end - end - end - end -end diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index b3c85236..a8c27dd8 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -46,11 +46,11 @@ module OpenAI returns( T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel ) ] ) @@ -77,11 +77,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader::OrHash, - OpenAI::EvalStringCheckGrader::OrHash, - OpenAI::EvalTextSimilarityGrader::OrHash, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python::OrHash, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::OrHash + OpenAI::Graders::LabelModelGrader::OrHash, + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython::OrHash, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel::OrHash ) ], object: Symbol @@ -126,11 +126,11 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel ) ] } @@ -170,36 +170,38 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::EvalLabelModelGrader, - OpenAI::EvalStringCheckGrader, - OpenAI::EvalTextSimilarityGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel ) end - class Python < OpenAI::Internal::Type::BaseModel + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The source code of the python script. - sig { returns(String) } - attr_accessor :source + # The threshold for the score. + sig { returns(Float) } + attr_accessor :pass_threshold - # The object type, which is always `python`. - sig { returns(Symbol) } - attr_accessor :type + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig { params(pass_threshold: Float).returns(T.attached_class) } + def self.new( + # The threshold for the score. + pass_threshold: + ) + end - # The image tag to use for the python script. - sig { returns(T.nilable(String)) } - attr_reader :image_tag + sig { override.returns({ pass_threshold: Float }) } + def to_hash + end + end - sig { params(image_tag: String).void } - attr_writer :image_tag + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -209,70 +211,22 @@ module OpenAI attr_writer :pass_threshold # A PythonGrader object that runs a python script on the input. - sig do - params( - name: String, - source: String, - image_tag: String, - pass_threshold: Float, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The name of the grader. - name:, - # The source code of the python script. - source:, - # The image tag to use for the python script. - image_tag: nil, # The threshold for the score. - pass_threshold: nil, - # The object type, which is always `python`. - type: :python + pass_threshold: nil ) end - sig do - override.returns( - { - name: String, - source: String, - type: Symbol, - image_tag: String, - pass_threshold: Float - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end end - class ScoreModel < OpenAI::Internal::Type::BaseModel + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # The input text. This may include template strings. - sig do - returns( - T::Array[ - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input - ] - ) - end - attr_accessor :input - - # The model to use for the evaluation. - sig { returns(String) } - attr_accessor :model - - # The name of the grader. - sig { returns(String) } - attr_accessor :name - - # The object type, which is always `score_model`. - sig { returns(Symbol) } - attr_accessor :type - # The threshold for the score. sig { returns(T.nilable(Float)) } attr_reader :pass_threshold @@ -280,293 +234,17 @@ module OpenAI sig { params(pass_threshold: Float).void } attr_writer :pass_threshold - # The range of the score. Defaults to `[0, 1]`. - sig { returns(T.nilable(T::Array[Float])) } - attr_reader :range - - sig { params(range: T::Array[Float]).void } - attr_writer :range - - # The sampling parameters for the model. - sig { returns(T.nilable(T.anything)) } - attr_reader :sampling_params - - sig { params(sampling_params: T.anything).void } - attr_writer :sampling_params - # A ScoreModelGrader object that uses a model to assign a score to the input. - sig do - params( - input: - T::Array[ - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::OrHash - ], - model: String, - name: String, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything, - type: Symbol - ).returns(T.attached_class) - end + sig { params(pass_threshold: Float).returns(T.attached_class) } def self.new( - # The input text. This may include template strings. - input:, - # The model to use for the evaluation. - model:, - # The name of the grader. - name:, # The threshold for the score. - pass_threshold: nil, - # The range of the score. Defaults to `[0, 1]`. - range: nil, - # The sampling parameters for the model. - sampling_params: nil, - # The object type, which is always `score_model`. - type: :score_model + pass_threshold: nil ) end - sig do - override.returns( - { - input: - T::Array[ - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input - ], - model: String, - name: String, - type: Symbol, - pass_threshold: Float, - range: T::Array[Float], - sampling_params: T.anything - } - ) - end + sig { override.returns({ pass_threshold: Float }) } def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText::OrHash - ), - role: - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::OrSymbol, - type: - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ), - role: - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol, - type: - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end sig do diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi new file mode 100644 index 00000000..b2df808e --- /dev/null +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi @@ -0,0 +1,143 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Alpha + class GraderRunParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The grader used for the fine-tuning job. + sig do + returns( + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + ) + end + attr_accessor :grader + + # The model sample to be evaluated. + sig { returns(String) } + attr_accessor :model_sample + + # The reference answer for the evaluation. + sig do + returns(T.any(String, T.anything, T::Array[T.anything], Float)) + end + attr_accessor :reference_answer + + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ), + model_sample: String, + reference_answer: + T.any(String, T.anything, T::Array[T.anything], Float), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # The grader used for the fine-tuning job. + grader:, + # The model sample to be evaluated. + model_sample:, + # The reference answer for the evaluation. + reference_answer:, + request_options: {} + ) + end + + sig do + override.returns( + { + grader: + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ), + model_sample: String, + reference_answer: + T.any(String, T.anything, T::Array[T.anything], Float), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # The grader used for the fine-tuning job. + module Grader + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::Alpha::GraderRunParams::Grader::Variants + ] + ) + end + def self.variants + end + end + + # The reference answer for the evaluation. + module ReferenceAnswer + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any(String, T.anything, T::Array[T.anything], Float) + end + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants + ] + ) + end + def self.variants + end + + UnionMember2Array = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi new file mode 100644 index 00000000..4db912c8 --- /dev/null +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi @@ -0,0 +1,253 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Alpha + class GraderRunResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata + ) + end + attr_reader :metadata + + sig do + params( + metadata: + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::OrHash + ).void + end + attr_writer :metadata + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :model_grader_token_usage_per_model + + sig { returns(Float) } + attr_accessor :reward + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :sub_rewards + + sig do + params( + metadata: + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::OrHash, + model_grader_token_usage_per_model: T::Hash[Symbol, T.anything], + reward: Float, + sub_rewards: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new( + metadata:, + model_grader_token_usage_per_model:, + reward:, + sub_rewards: + ) + end + + sig do + override.returns( + { + metadata: + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + model_grader_token_usage_per_model: T::Hash[Symbol, T.anything], + reward: Float, + sub_rewards: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + + class Metadata < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + returns( + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors + ) + end + attr_reader :errors + + sig do + params( + errors: + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors::OrHash + ).void + end + attr_writer :errors + + sig { returns(Float) } + attr_accessor :execution_time + + sig { returns(String) } + attr_accessor :name + + sig { returns(T.nilable(String)) } + attr_accessor :sampled_model_name + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :scores + + sig { returns(T.nilable(Integer)) } + attr_accessor :token_usage + + sig { returns(String) } + attr_accessor :type + + sig do + params( + errors: + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors::OrHash, + execution_time: Float, + name: String, + sampled_model_name: T.nilable(String), + scores: T::Hash[Symbol, T.anything], + token_usage: T.nilable(Integer), + type: String + ).returns(T.attached_class) + end + def self.new( + errors:, + execution_time:, + name:, + sampled_model_name:, + scores:, + token_usage:, + type: + ) + end + + sig do + override.returns( + { + errors: + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, + execution_time: Float, + name: String, + sampled_model_name: T.nilable(String), + scores: T::Hash[Symbol, T.anything], + token_usage: T.nilable(Integer), + type: String + } + ) + end + def to_hash + end + + class Errors < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(T::Boolean) } + attr_accessor :formula_parse_error + + sig { returns(T::Boolean) } + attr_accessor :invalid_variable_error + + sig { returns(T::Boolean) } + attr_accessor :model_grader_parse_error + + sig { returns(T::Boolean) } + attr_accessor :model_grader_refusal_error + + sig { returns(T::Boolean) } + attr_accessor :model_grader_server_error + + sig { returns(T.nilable(String)) } + attr_accessor :model_grader_server_error_details + + sig { returns(T::Boolean) } + attr_accessor :other_error + + sig { returns(T::Boolean) } + attr_accessor :python_grader_runtime_error + + sig { returns(T.nilable(String)) } + attr_accessor :python_grader_runtime_error_details + + sig { returns(T::Boolean) } + attr_accessor :python_grader_server_error + + sig { returns(T.nilable(String)) } + attr_accessor :python_grader_server_error_type + + sig { returns(T::Boolean) } + attr_accessor :sample_parse_error + + sig { returns(T::Boolean) } + attr_accessor :truncated_observation_error + + sig { returns(T::Boolean) } + attr_accessor :unresponsive_reward_error + + sig do + params( + formula_parse_error: T::Boolean, + invalid_variable_error: T::Boolean, + model_grader_parse_error: T::Boolean, + model_grader_refusal_error: T::Boolean, + model_grader_server_error: T::Boolean, + model_grader_server_error_details: T.nilable(String), + other_error: T::Boolean, + python_grader_runtime_error: T::Boolean, + python_grader_runtime_error_details: T.nilable(String), + python_grader_server_error: T::Boolean, + python_grader_server_error_type: T.nilable(String), + sample_parse_error: T::Boolean, + truncated_observation_error: T::Boolean, + unresponsive_reward_error: T::Boolean + ).returns(T.attached_class) + end + def self.new( + formula_parse_error:, + invalid_variable_error:, + model_grader_parse_error:, + model_grader_refusal_error:, + model_grader_server_error:, + model_grader_server_error_details:, + other_error:, + python_grader_runtime_error:, + python_grader_runtime_error_details:, + python_grader_server_error:, + python_grader_server_error_type:, + sample_parse_error:, + truncated_observation_error:, + unresponsive_reward_error: + ) + end + + sig do + override.returns( + { + formula_parse_error: T::Boolean, + invalid_variable_error: T::Boolean, + model_grader_parse_error: T::Boolean, + model_grader_refusal_error: T::Boolean, + model_grader_server_error: T::Boolean, + model_grader_server_error_details: T.nilable(String), + other_error: T::Boolean, + python_grader_runtime_error: T::Boolean, + python_grader_runtime_error_details: T.nilable(String), + python_grader_server_error: T::Boolean, + python_grader_server_error_type: T.nilable(String), + sample_parse_error: T::Boolean, + truncated_observation_error: T::Boolean, + unresponsive_reward_error: T::Boolean + } + ) + end + def to_hash + end + end + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi new file mode 100644 index 00000000..461e9373 --- /dev/null +++ b/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi @@ -0,0 +1,95 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Alpha + class GraderValidateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The grader used for the fine-tuning job. + sig do + returns( + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + ) + end + attr_accessor :grader + + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # The grader used for the fine-tuning job. + grader:, + request_options: {} + ) + end + + sig do + override.returns( + { + grader: + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # The grader used for the fine-tuning job. + module Grader + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::Alpha::GraderValidateParams::Grader::Variants + ] + ) + end + def self.variants + end + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi new file mode 100644 index 00000000..ca2d42db --- /dev/null +++ b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi @@ -0,0 +1,105 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + module Alpha + class GraderValidateResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The grader used for the fine-tuning job. + sig do + returns( + T.nilable( + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + ) + ) + end + attr_reader :grader + + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ) + ).void + end + attr_writer :grader + + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # The grader used for the fine-tuning job. + grader: nil + ) + end + + sig do + override.returns( + { + grader: + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + } + ) + end + def to_hash + end + + # The grader used for the fine-tuning job. + module Grader + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader::Variants + ] + ) + end + def self.variants + end + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi new file mode 100644 index 00000000..d210f91b --- /dev/null +++ b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi @@ -0,0 +1,151 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class DpoHyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :batch_size + + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size + + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + sig { returns(T.nilable(T.any(Symbol, Float))) } + attr_reader :beta + + sig { params(beta: T.any(Symbol, Float)).void } + attr_writer :beta + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + sig { returns(T.nilable(T.any(Symbol, Float))) } + attr_reader :learning_rate_multiplier + + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :n_epochs + + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs + + # The hyperparameters used for the DPO fine-tuning job. + sig do + params( + batch_size: T.any(Symbol, Integer), + beta: T.any(Symbol, Float), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + ).returns(T.attached_class) + end + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + beta: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ) + end + + sig do + override.returns( + { + batch_size: T.any(Symbol, Integer), + beta: T.any(Symbol, Float), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash + end + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + module BatchSize + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end + end + + # The beta value for the DPO method. A higher beta value will increase the weight + # of the penalty between the policy and reference model. + module Beta + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[OpenAI::FineTuning::DpoHyperparameters::Beta::Variants] + ) + end + def self.variants + end + end + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end + end + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + module NEpochs + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/dpo_method.rbi b/rbi/openai/models/fine_tuning/dpo_method.rbi new file mode 100644 index 00000000..05f15967 --- /dev/null +++ b/rbi/openai/models/fine_tuning/dpo_method.rbi @@ -0,0 +1,42 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class DpoMethod < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The hyperparameters used for the DPO fine-tuning job. + sig { returns(T.nilable(OpenAI::FineTuning::DpoHyperparameters)) } + attr_reader :hyperparameters + + sig do + params( + hyperparameters: OpenAI::FineTuning::DpoHyperparameters::OrHash + ).void + end + attr_writer :hyperparameters + + # Configuration for the DPO fine-tuning method. + sig do + params( + hyperparameters: OpenAI::FineTuning::DpoHyperparameters::OrHash + ).returns(T.attached_class) + end + def self.new( + # The hyperparameters used for the DPO fine-tuning job. + hyperparameters: nil + ) + end + + sig do + override.returns( + { hyperparameters: OpenAI::FineTuning::DpoHyperparameters } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index ccd100af..00b007eb 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -471,447 +471,79 @@ module OpenAI OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # Configuration for the DPO fine-tuning method. + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. sig do - returns(T.nilable(OpenAI::FineTuning::FineTuningJob::Method::Dpo)) + returns( + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + ) end + attr_accessor :type + + # Configuration for the DPO fine-tuning method. + sig { returns(T.nilable(OpenAI::FineTuning::DpoMethod)) } attr_reader :dpo - sig do - params( - dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo::OrHash - ).void - end + sig { params(dpo: OpenAI::FineTuning::DpoMethod::OrHash).void } attr_writer :dpo - # Configuration for the supervised fine-tuning method. - sig do - returns( - T.nilable(OpenAI::FineTuning::FineTuningJob::Method::Supervised) - ) - end - attr_reader :supervised + # Configuration for the reinforcement fine-tuning method. + sig { returns(T.nilable(OpenAI::FineTuning::ReinforcementMethod)) } + attr_reader :reinforcement sig do params( - supervised: - OpenAI::FineTuning::FineTuningJob::Method::Supervised::OrHash + reinforcement: OpenAI::FineTuning::ReinforcementMethod::OrHash ).void end - attr_writer :supervised + attr_writer :reinforcement - # The type of method. Is either `supervised` or `dpo`. - sig do - returns( - T.nilable( - OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol - ) - ) - end - attr_reader :type + # Configuration for the supervised fine-tuning method. + sig { returns(T.nilable(OpenAI::FineTuning::SupervisedMethod)) } + attr_reader :supervised sig do params( - type: OpenAI::FineTuning::FineTuningJob::Method::Type::OrSymbol + supervised: OpenAI::FineTuning::SupervisedMethod::OrHash ).void end - attr_writer :type + attr_writer :supervised # The method used for fine-tuning. sig do params( - dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo::OrHash, - supervised: - OpenAI::FineTuning::FineTuningJob::Method::Supervised::OrHash, - type: OpenAI::FineTuning::FineTuningJob::Method::Type::OrSymbol + type: OpenAI::FineTuning::FineTuningJob::Method::Type::OrSymbol, + dpo: OpenAI::FineTuning::DpoMethod::OrHash, + reinforcement: OpenAI::FineTuning::ReinforcementMethod::OrHash, + supervised: OpenAI::FineTuning::SupervisedMethod::OrHash ).returns(T.attached_class) end def self.new( + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + type:, # Configuration for the DPO fine-tuning method. dpo: nil, + # Configuration for the reinforcement fine-tuning method. + reinforcement: nil, # Configuration for the supervised fine-tuning method. - supervised: nil, - # The type of method. Is either `supervised` or `dpo`. - type: nil + supervised: nil ) end sig do override.returns( { - dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo, - supervised: - OpenAI::FineTuning::FineTuningJob::Method::Supervised, type: - OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod } ) end def to_hash end - class Dpo < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The hyperparameters used for the fine-tuning job. - sig do - returns( - T.nilable( - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - ) - ) - end - attr_reader :hyperparameters - - sig do - params( - hyperparameters: - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::OrHash - ).void - end - attr_writer :hyperparameters - - # Configuration for the DPO fine-tuning method. - sig do - params( - hyperparameters: - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::OrHash - ).returns(T.attached_class) - end - def self.new( - # The hyperparameters used for the fine-tuning job. - hyperparameters: nil - ) - end - - sig do - override.returns( - { - hyperparameters: - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - } - ) - end - def to_hash - end - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :batch_size - - sig { params(batch_size: T.any(Symbol, Integer)).void } - attr_writer :batch_size - - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - sig { returns(T.nilable(T.any(Symbol, Float))) } - attr_reader :beta - - sig { params(beta: T.any(Symbol, Float)).void } - attr_writer :beta - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } - attr_reader :learning_rate_multiplier - - sig do - params(learning_rate_multiplier: T.any(Symbol, Float)).void - end - attr_writer :learning_rate_multiplier - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :n_epochs - - sig { params(n_epochs: T.any(Symbol, Integer)).void } - attr_writer :n_epochs - - # The hyperparameters used for the fine-tuning job. - sig do - params( - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - ).returns(T.attached_class) - end - def self.new( - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - batch_size: nil, - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - beta: nil, - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - learning_rate_multiplier: nil, - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - n_epochs: nil - ) - end - - sig do - override.returns( - { - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) - end - def to_hash - end - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize::Variants - ] - ) - end - def self.variants - end - end - - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - module Beta - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Float) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta::Variants - ] - ) - end - def self.variants - end - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Float) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier::Variants - ] - ) - end - def self.variants - end - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - module NEpochs - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs::Variants - ] - ) - end - def self.variants - end - end - end - end - - class Supervised < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The hyperparameters used for the fine-tuning job. - sig do - returns( - T.nilable( - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - ) - ) - end - attr_reader :hyperparameters - - sig do - params( - hyperparameters: - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::OrHash - ).void - end - attr_writer :hyperparameters - - # Configuration for the supervised fine-tuning method. - sig do - params( - hyperparameters: - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::OrHash - ).returns(T.attached_class) - end - def self.new( - # The hyperparameters used for the fine-tuning job. - hyperparameters: nil - ) - end - - sig do - override.returns( - { - hyperparameters: - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - } - ) - end - def to_hash - end - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :batch_size - - sig { params(batch_size: T.any(Symbol, Integer)).void } - attr_writer :batch_size - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } - attr_reader :learning_rate_multiplier - - sig do - params(learning_rate_multiplier: T.any(Symbol, Float)).void - end - attr_writer :learning_rate_multiplier - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :n_epochs - - sig { params(n_epochs: T.any(Symbol, Integer)).void } - attr_writer :n_epochs - - # The hyperparameters used for the fine-tuning job. - sig do - params( - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - ).returns(T.attached_class) - end - def self.new( - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - batch_size: nil, - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - learning_rate_multiplier: nil, - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - n_epochs: nil - ) - end - - sig do - override.returns( - { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) - end - def to_hash - end - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize::Variants - ] - ) - end - def self.variants - end - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Float) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier::Variants - ] - ) - end - def self.variants - end - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - module NEpochs - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs::Variants - ] - ) - end - def self.variants - end - end - end - end - - # The type of method. Is either `supervised` or `dpo`. + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. module Type extend OpenAI::Internal::Type::Enum @@ -931,6 +563,11 @@ module OpenAI :dpo, OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol ) + REINFORCEMENT = + T.let( + :reinforcement, + OpenAI::FineTuning::FineTuningJob::Method::Type::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi index eaa8cb24..2f9fcad2 100644 --- a/rbi/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/openai/models/fine_tuning/job_create_params.rbi @@ -526,447 +526,77 @@ module OpenAI OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - # Configuration for the DPO fine-tuning method. + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. sig do - returns(T.nilable(OpenAI::FineTuning::JobCreateParams::Method::Dpo)) + returns(OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol) end + attr_accessor :type + + # Configuration for the DPO fine-tuning method. + sig { returns(T.nilable(OpenAI::FineTuning::DpoMethod)) } attr_reader :dpo - sig do - params( - dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo::OrHash - ).void - end + sig { params(dpo: OpenAI::FineTuning::DpoMethod::OrHash).void } attr_writer :dpo - # Configuration for the supervised fine-tuning method. - sig do - returns( - T.nilable(OpenAI::FineTuning::JobCreateParams::Method::Supervised) - ) - end - attr_reader :supervised + # Configuration for the reinforcement fine-tuning method. + sig { returns(T.nilable(OpenAI::FineTuning::ReinforcementMethod)) } + attr_reader :reinforcement sig do params( - supervised: - OpenAI::FineTuning::JobCreateParams::Method::Supervised::OrHash + reinforcement: OpenAI::FineTuning::ReinforcementMethod::OrHash ).void end - attr_writer :supervised + attr_writer :reinforcement - # The type of method. Is either `supervised` or `dpo`. - sig do - returns( - T.nilable( - OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol - ) - ) - end - attr_reader :type + # Configuration for the supervised fine-tuning method. + sig { returns(T.nilable(OpenAI::FineTuning::SupervisedMethod)) } + attr_reader :supervised sig do params( - type: OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + supervised: OpenAI::FineTuning::SupervisedMethod::OrHash ).void end - attr_writer :type + attr_writer :supervised # The method used for fine-tuning. sig do params( - dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo::OrHash, - supervised: - OpenAI::FineTuning::JobCreateParams::Method::Supervised::OrHash, - type: OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + type: OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol, + dpo: OpenAI::FineTuning::DpoMethod::OrHash, + reinforcement: OpenAI::FineTuning::ReinforcementMethod::OrHash, + supervised: OpenAI::FineTuning::SupervisedMethod::OrHash ).returns(T.attached_class) end def self.new( + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + type:, # Configuration for the DPO fine-tuning method. dpo: nil, + # Configuration for the reinforcement fine-tuning method. + reinforcement: nil, # Configuration for the supervised fine-tuning method. - supervised: nil, - # The type of method. Is either `supervised` or `dpo`. - type: nil + supervised: nil ) end sig do override.returns( { - dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo, - supervised: - OpenAI::FineTuning::JobCreateParams::Method::Supervised, type: - OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol + OpenAI::FineTuning::JobCreateParams::Method::Type::OrSymbol, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod } ) end def to_hash end - class Dpo < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The hyperparameters used for the fine-tuning job. - sig do - returns( - T.nilable( - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - ) - ) - end - attr_reader :hyperparameters - - sig do - params( - hyperparameters: - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::OrHash - ).void - end - attr_writer :hyperparameters - - # Configuration for the DPO fine-tuning method. - sig do - params( - hyperparameters: - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::OrHash - ).returns(T.attached_class) - end - def self.new( - # The hyperparameters used for the fine-tuning job. - hyperparameters: nil - ) - end - - sig do - override.returns( - { - hyperparameters: - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - } - ) - end - def to_hash - end - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :batch_size - - sig { params(batch_size: T.any(Symbol, Integer)).void } - attr_writer :batch_size - - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - sig { returns(T.nilable(T.any(Symbol, Float))) } - attr_reader :beta - - sig { params(beta: T.any(Symbol, Float)).void } - attr_writer :beta - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } - attr_reader :learning_rate_multiplier - - sig do - params(learning_rate_multiplier: T.any(Symbol, Float)).void - end - attr_writer :learning_rate_multiplier - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :n_epochs - - sig { params(n_epochs: T.any(Symbol, Integer)).void } - attr_writer :n_epochs - - # The hyperparameters used for the fine-tuning job. - sig do - params( - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - ).returns(T.attached_class) - end - def self.new( - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - batch_size: nil, - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - beta: nil, - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - learning_rate_multiplier: nil, - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - n_epochs: nil - ) - end - - sig do - override.returns( - { - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) - end - def to_hash - end - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize::Variants - ] - ) - end - def self.variants - end - end - - # The beta value for the DPO method. A higher beta value will increase the weight - # of the penalty between the policy and reference model. - module Beta - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Float) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta::Variants - ] - ) - end - def self.variants - end - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Float) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier::Variants - ] - ) - end - def self.variants - end - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - module NEpochs - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs::Variants - ] - ) - end - def self.variants - end - end - end - end - - class Supervised < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # The hyperparameters used for the fine-tuning job. - sig do - returns( - T.nilable( - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - ) - ) - end - attr_reader :hyperparameters - - sig do - params( - hyperparameters: - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::OrHash - ).void - end - attr_writer :hyperparameters - - # Configuration for the supervised fine-tuning method. - sig do - params( - hyperparameters: - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::OrHash - ).returns(T.attached_class) - end - def self.new( - # The hyperparameters used for the fine-tuning job. - hyperparameters: nil - ) - end - - sig do - override.returns( - { - hyperparameters: - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - } - ) - end - def to_hash - end - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :batch_size - - sig { params(batch_size: T.any(Symbol, Integer)).void } - attr_writer :batch_size - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } - attr_reader :learning_rate_multiplier - - sig do - params(learning_rate_multiplier: T.any(Symbol, Float)).void - end - attr_writer :learning_rate_multiplier - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :n_epochs - - sig { params(n_epochs: T.any(Symbol, Integer)).void } - attr_writer :n_epochs - - # The hyperparameters used for the fine-tuning job. - sig do - params( - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - ).returns(T.attached_class) - end - def self.new( - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - batch_size: nil, - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - learning_rate_multiplier: nil, - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - n_epochs: nil - ) - end - - sig do - override.returns( - { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) - } - ) - end - def to_hash - end - - # Number of examples in each batch. A larger batch size means that model - # parameters are updated less frequently, but with lower variance. - module BatchSize - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize::Variants - ] - ) - end - def self.variants - end - end - - # Scaling factor for the learning rate. A smaller learning rate may be useful to - # avoid overfitting. - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Float) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier::Variants - ] - ) - end - def self.variants - end - end - - # The number of epochs to train the model for. An epoch refers to one full cycle - # through the training dataset. - module NEpochs - extend OpenAI::Internal::Type::Union - - Variants = T.type_alias { T.any(Symbol, Integer) } - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs::Variants - ] - ) - end - def self.variants - end - end - end - end - - # The type of method. Is either `supervised` or `dpo`. + # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. module Type extend OpenAI::Internal::Type::Enum @@ -986,6 +616,11 @@ module OpenAI :dpo, OpenAI::FineTuning::JobCreateParams::Method::Type::TaggedSymbol ) + REINFORCEMENT = + T.let( + :reinforcement, + OpenAI::FineTuning::JobCreateParams::Method::Type::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/fine_tuning/job_pause_params.rbi b/rbi/openai/models/fine_tuning/job_pause_params.rbi new file mode 100644 index 00000000..66935edf --- /dev/null +++ b/rbi/openai/models/fine_tuning/job_pause_params.rbi @@ -0,0 +1,26 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class JobPauseParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/job_resume_params.rbi b/rbi/openai/models/fine_tuning/job_resume_params.rbi new file mode 100644 index 00000000..da91536e --- /dev/null +++ b/rbi/openai/models/fine_tuning/job_resume_params.rbi @@ -0,0 +1,26 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class JobResumeParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi new file mode 100644 index 00000000..1c81d0d8 --- /dev/null +++ b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi @@ -0,0 +1,275 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :batch_size + + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size + + # Multiplier on amount of compute used for exploring search space during training. + sig { returns(T.nilable(T.any(Symbol, Float))) } + attr_reader :compute_multiplier + + sig { params(compute_multiplier: T.any(Symbol, Float)).void } + attr_writer :compute_multiplier + + # The number of training steps between evaluation runs. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :eval_interval + + sig { params(eval_interval: T.any(Symbol, Integer)).void } + attr_writer :eval_interval + + # Number of evaluation samples to generate per training step. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :eval_samples + + sig { params(eval_samples: T.any(Symbol, Integer)).void } + attr_writer :eval_samples + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + sig { returns(T.nilable(T.any(Symbol, Float))) } + attr_reader :learning_rate_multiplier + + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :n_epochs + + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs + + # Level of reasoning effort. + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol + ) + ) + end + attr_reader :reasoning_effort + + sig do + params( + reasoning_effort: + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol + ).void + end + attr_writer :reasoning_effort + + # The hyperparameters used for the reinforcement fine-tuning job. + sig do + params( + batch_size: T.any(Symbol, Integer), + compute_multiplier: T.any(Symbol, Float), + eval_interval: T.any(Symbol, Integer), + eval_samples: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer), + reasoning_effort: + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # Multiplier on amount of compute used for exploring search space during training. + compute_multiplier: nil, + # The number of training steps between evaluation runs. + eval_interval: nil, + # Number of evaluation samples to generate per training step. + eval_samples: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil, + # Level of reasoning effort. + reasoning_effort: nil + ) + end + + sig do + override.returns( + { + batch_size: T.any(Symbol, Integer), + compute_multiplier: T.any(Symbol, Float), + eval_interval: T.any(Symbol, Integer), + eval_samples: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer), + reasoning_effort: + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol + } + ) + end + def to_hash + end + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + module BatchSize + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end + end + + # Multiplier on amount of compute used for exploring search space during training. + module ComputeMultiplier + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants + ] + ) + end + def self.variants + end + end + + # The number of training steps between evaluation runs. + module EvalInterval + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants + ] + ) + end + def self.variants + end + end + + # Number of evaluation samples to generate per training step. + module EvalSamples + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants + ] + ) + end + def self.variants + end + end + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end + end + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + module NEpochs + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end + end + + # Level of reasoning effort. + module ReasoningEffort + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + DEFAULT = + T.let( + :default, + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/reinforcement_method.rbi b/rbi/openai/models/fine_tuning/reinforcement_method.rbi new file mode 100644 index 00000000..2ed3ae7b --- /dev/null +++ b/rbi/openai/models/fine_tuning/reinforcement_method.rbi @@ -0,0 +1,106 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class ReinforcementMethod < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The grader used for the fine-tuning job. + sig do + returns( + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + ) + end + attr_accessor :grader + + # The hyperparameters used for the reinforcement fine-tuning job. + sig do + returns(T.nilable(OpenAI::FineTuning::ReinforcementHyperparameters)) + end + attr_reader :hyperparameters + + sig do + params( + hyperparameters: + OpenAI::FineTuning::ReinforcementHyperparameters::OrHash + ).void + end + attr_writer :hyperparameters + + # Configuration for the reinforcement fine-tuning method. + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ), + hyperparameters: + OpenAI::FineTuning::ReinforcementHyperparameters::OrHash + ).returns(T.attached_class) + end + def self.new( + # The grader used for the fine-tuning job. + grader:, + # The hyperparameters used for the reinforcement fine-tuning job. + hyperparameters: nil + ) + end + + sig do + override.returns( + { + grader: + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ), + hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters + } + ) + end + def to_hash + end + + # The grader used for the fine-tuning job. + module Grader + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::ReinforcementMethod::Grader::Variants + ] + ) + end + def self.variants + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi new file mode 100644 index 00000000..bc9b8321 --- /dev/null +++ b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi @@ -0,0 +1,122 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :batch_size + + sig { params(batch_size: T.any(Symbol, Integer)).void } + attr_writer :batch_size + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + sig { returns(T.nilable(T.any(Symbol, Float))) } + attr_reader :learning_rate_multiplier + + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + attr_writer :learning_rate_multiplier + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + sig { returns(T.nilable(T.any(Symbol, Integer))) } + attr_reader :n_epochs + + sig { params(n_epochs: T.any(Symbol, Integer)).void } + attr_writer :n_epochs + + # The hyperparameters used for the fine-tuning job. + sig do + params( + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + ).returns(T.attached_class) + end + def self.new( + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + batch_size: nil, + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + learning_rate_multiplier: nil, + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + n_epochs: nil + ) + end + + sig do + override.returns( + { + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) + } + ) + end + def to_hash + end + + # Number of examples in each batch. A larger batch size means that model + # parameters are updated less frequently, but with lower variance. + module BatchSize + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants + ] + ) + end + def self.variants + end + end + + # Scaling factor for the learning rate. A smaller learning rate may be useful to + # avoid overfitting. + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Float) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants + ] + ) + end + def self.variants + end + end + + # The number of epochs to train the model for. An epoch refers to one full cycle + # through the training dataset. + module NEpochs + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias { T.any(Symbol, Integer) } + + sig do + override.returns( + T::Array[ + OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants + ] + ) + end + def self.variants + end + end + end + end + end +end diff --git a/rbi/openai/models/fine_tuning/supervised_method.rbi b/rbi/openai/models/fine_tuning/supervised_method.rbi new file mode 100644 index 00000000..01cf3450 --- /dev/null +++ b/rbi/openai/models/fine_tuning/supervised_method.rbi @@ -0,0 +1,46 @@ +# typed: strong + +module OpenAI + module Models + module FineTuning + class SupervisedMethod < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The hyperparameters used for the fine-tuning job. + sig do + returns(T.nilable(OpenAI::FineTuning::SupervisedHyperparameters)) + end + attr_reader :hyperparameters + + sig do + params( + hyperparameters: + OpenAI::FineTuning::SupervisedHyperparameters::OrHash + ).void + end + attr_writer :hyperparameters + + # Configuration for the supervised fine-tuning method. + sig do + params( + hyperparameters: + OpenAI::FineTuning::SupervisedHyperparameters::OrHash + ).returns(T.attached_class) + end + def self.new( + # The hyperparameters used for the fine-tuning job. + hyperparameters: nil + ) + end + + sig do + override.returns( + { hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/graders/label_model_grader.rbi b/rbi/openai/models/graders/label_model_grader.rbi new file mode 100644 index 00000000..7ed4f4e7 --- /dev/null +++ b/rbi/openai/models/graders/label_model_grader.rbi @@ -0,0 +1,287 @@ +# typed: strong + +module OpenAI + module Models + LabelModelGrader = Graders::LabelModelGrader + + module Graders + class LabelModelGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + sig { returns(T::Array[OpenAI::Graders::LabelModelGrader::Input]) } + attr_accessor :input + + # The labels to assign to each item in the evaluation. + sig { returns(T::Array[String]) } + attr_accessor :labels + + # The model to use for the evaluation. Must support structured outputs. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The labels that indicate a passing result. Must be a subset of labels. + sig { returns(T::Array[String]) } + attr_accessor :passing_labels + + # The object type, which is always `label_model`. + sig { returns(Symbol) } + attr_accessor :type + + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + sig do + params( + input: T::Array[OpenAI::Graders::LabelModelGrader::Input::OrHash], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + input:, + # The labels to assign to each item in the evaluation. + labels:, + # The model to use for the evaluation. Must support structured outputs. + model:, + # The name of the grader. + name:, + # The labels that indicate a passing result. Must be a subset of labels. + passing_labels:, + # The object type, which is always `label_model`. + type: :label_model + ) + end + + sig do + override.returns( + { + input: T::Array[OpenAI::Graders::LabelModelGrader::Input], + labels: T::Array[String], + model: String, + name: String, + passing_labels: T::Array[String], + type: Symbol + } + ) + end + def to_hash + end + + class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns(OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ), + role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end + end +end diff --git a/rbi/openai/models/graders/multi_grader.rbi b/rbi/openai/models/graders/multi_grader.rbi new file mode 100644 index 00000000..73159cd7 --- /dev/null +++ b/rbi/openai/models/graders/multi_grader.rbi @@ -0,0 +1,120 @@ +# typed: strong + +module OpenAI + module Models + MultiGrader = Graders::MultiGrader + + module Graders + class MultiGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # A formula to calculate the output based on grader results. + sig { returns(String) } + attr_accessor :calculate_output + + sig do + returns( + T::Hash[ + Symbol, + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::LabelModelGrader + ) + ] + ) + end + attr_accessor :graders + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The type of grader. + sig { returns(Symbol) } + attr_accessor :type + + # A MultiGrader object combines the output of multiple graders to produce a single + # score. + sig do + params( + calculate_output: String, + graders: + T::Hash[ + Symbol, + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::LabelModelGrader::OrHash + ) + ], + name: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A formula to calculate the output based on grader results. + calculate_output:, + graders:, + # The name of the grader. + name:, + # The type of grader. + type: :multi + ) + end + + sig do + override.returns( + { + calculate_output: String, + graders: + T::Hash[ + Symbol, + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::LabelModelGrader + ) + ], + name: String, + type: Symbol + } + ) + end + def to_hash + end + + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + module Grader + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::LabelModelGrader + ) + end + + sig do + override.returns( + T::Array[OpenAI::Graders::MultiGrader::Grader::Variants] + ) + end + def self.variants + end + end + end + end + end +end diff --git a/rbi/openai/models/graders/python_grader.rbi b/rbi/openai/models/graders/python_grader.rbi new file mode 100644 index 00000000..35441d1b --- /dev/null +++ b/rbi/openai/models/graders/python_grader.rbi @@ -0,0 +1,61 @@ +# typed: strong + +module OpenAI + module Models + PythonGrader = Graders::PythonGrader + + module Graders + class PythonGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The source code of the python script. + sig { returns(String) } + attr_accessor :source + + # The object type, which is always `python`. + sig { returns(Symbol) } + attr_accessor :type + + # The image tag to use for the python script. + sig { returns(T.nilable(String)) } + attr_reader :image_tag + + sig { params(image_tag: String).void } + attr_writer :image_tag + + # A PythonGrader object that runs a python script on the input. + sig do + params( + name: String, + source: String, + image_tag: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The name of the grader. + name:, + # The source code of the python script. + source:, + # The image tag to use for the python script. + image_tag: nil, + # The object type, which is always `python`. + type: :python + ) + end + + sig do + override.returns( + { name: String, source: String, type: Symbol, image_tag: String } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/graders/score_model_grader.rbi b/rbi/openai/models/graders/score_model_grader.rbi new file mode 100644 index 00000000..1e082a6e --- /dev/null +++ b/rbi/openai/models/graders/score_model_grader.rbi @@ -0,0 +1,294 @@ +# typed: strong + +module OpenAI + module Models + ScoreModelGrader = Graders::ScoreModelGrader + + module Graders + class ScoreModelGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The input text. This may include template strings. + sig { returns(T::Array[OpenAI::Graders::ScoreModelGrader::Input]) } + attr_accessor :input + + # The model to use for the evaluation. + sig { returns(String) } + attr_accessor :model + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The object type, which is always `score_model`. + sig { returns(Symbol) } + attr_accessor :type + + # The range of the score. Defaults to `[0, 1]`. + sig { returns(T.nilable(T::Array[Float])) } + attr_reader :range + + sig { params(range: T::Array[Float]).void } + attr_writer :range + + # The sampling parameters for the model. + sig { returns(T.nilable(T.anything)) } + attr_reader :sampling_params + + sig { params(sampling_params: T.anything).void } + attr_writer :sampling_params + + # A ScoreModelGrader object that uses a model to assign a score to the input. + sig do + params( + input: T::Array[OpenAI::Graders::ScoreModelGrader::Input::OrHash], + model: String, + name: String, + range: T::Array[Float], + sampling_params: T.anything, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The input text. This may include template strings. + input:, + # The model to use for the evaluation. + model:, + # The name of the grader. + name:, + # The range of the score. Defaults to `[0, 1]`. + range: nil, + # The sampling parameters for the model. + sampling_params: nil, + # The object type, which is always `score_model`. + type: :score_model + ) + end + + sig do + override.returns( + { + input: T::Array[OpenAI::Graders::ScoreModelGrader::Input], + model: String, + name: String, + type: Symbol, + range: T::Array[Float], + sampling_params: T.anything + } + ) + end + def to_hash + end + + class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns(OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ), + role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end + end +end diff --git a/rbi/openai/models/graders/string_check_grader.rbi b/rbi/openai/models/graders/string_check_grader.rbi new file mode 100644 index 00000000..73de12d2 --- /dev/null +++ b/rbi/openai/models/graders/string_check_grader.rbi @@ -0,0 +1,115 @@ +# typed: strong + +module OpenAI + module Models + StringCheckGrader = Graders::StringCheckGrader + + module Graders + class StringCheckGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The input text. This may include template strings. + sig { returns(String) } + attr_accessor :input + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + sig { returns(OpenAI::Graders::StringCheckGrader::Operation::OrSymbol) } + attr_accessor :operation + + # The reference text. This may include template strings. + sig { returns(String) } + attr_accessor :reference + + # The object type, which is always `string_check`. + sig { returns(Symbol) } + attr_accessor :type + + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + sig do + params( + input: String, + name: String, + operation: OpenAI::Graders::StringCheckGrader::Operation::OrSymbol, + reference: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The input text. This may include template strings. + input:, + # The name of the grader. + name:, + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + operation:, + # The reference text. This may include template strings. + reference:, + # The object type, which is always `string_check`. + type: :string_check + ) + end + + sig do + override.returns( + { + input: String, + name: String, + operation: + OpenAI::Graders::StringCheckGrader::Operation::OrSymbol, + reference: String, + type: Symbol + } + ) + end + def to_hash + end + + # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + module Operation + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::StringCheckGrader::Operation) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EQ = + T.let( + :eq, + OpenAI::Graders::StringCheckGrader::Operation::TaggedSymbol + ) + NE = + T.let( + :ne, + OpenAI::Graders::StringCheckGrader::Operation::TaggedSymbol + ) + LIKE = + T.let( + :like, + OpenAI::Graders::StringCheckGrader::Operation::TaggedSymbol + ) + ILIKE = + T.let( + :ilike, + OpenAI::Graders::StringCheckGrader::Operation::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::StringCheckGrader::Operation::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/graders/text_similarity_grader.rbi b/rbi/openai/models/graders/text_similarity_grader.rbi new file mode 100644 index 00000000..6d25d1e0 --- /dev/null +++ b/rbi/openai/models/graders/text_similarity_grader.rbi @@ -0,0 +1,155 @@ +# typed: strong + +module OpenAI + module Models + TextSimilarityGrader = Graders::TextSimilarityGrader + + module Graders + class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel + OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + sig do + returns( + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::OrSymbol + ) + end + attr_accessor :evaluation_metric + + # The text being graded. + sig { returns(String) } + attr_accessor :input + + # The name of the grader. + sig { returns(String) } + attr_accessor :name + + # The text being graded against. + sig { returns(String) } + attr_accessor :reference + + # The type of grader. + sig { returns(Symbol) } + attr_accessor :type + + # A TextSimilarityGrader object which grades text based on similarity metrics. + sig do + params( + evaluation_metric: + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::OrSymbol, + input: String, + name: String, + reference: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + evaluation_metric:, + # The text being graded. + input:, + # The name of the grader. + name:, + # The text being graded against. + reference:, + # The type of grader. + type: :text_similarity + ) + end + + sig do + override.returns( + { + evaluation_metric: + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::OrSymbol, + input: String, + name: String, + reference: String, + type: Symbol + } + ) + end + def to_hash + end + + # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, + # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + module EvaluationMetric + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + FUZZY_MATCH = + T.let( + :fuzzy_match, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + BLEU = + T.let( + :bleu, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + GLEU = + T.let( + :gleu, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + METEOR = + T.let( + :meteor, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_1 = + T.let( + :rouge_1, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_2 = + T.let( + :rouge_2, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_3 = + T.let( + :rouge_3, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_4 = + T.let( + :rouge_4, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_5 = + T.let( + :rouge_5, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + ROUGE_L = + T.let( + :rouge_l, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi index a55cb86d..bcc6ae0a 100644 --- a/rbi/openai/resources/evals.rbi +++ b/rbi/openai/resources/evals.rbi @@ -16,14 +16,14 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom::OrHash, - OpenAI::EvalCreateParams::DataSourceConfig::Logs::OrHash + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions::OrHash ), testing_criteria: T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::OrHash, - OpenAI::EvalStringCheckGrader::OrHash, - OpenAI::EvalTextSimilarityGrader::OrHash, + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity::OrHash, OpenAI::EvalCreateParams::TestingCriterion::Python::OrHash, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::OrHash ) diff --git a/rbi/openai/resources/fine_tuning.rbi b/rbi/openai/resources/fine_tuning.rbi index e790ba5e..42e53e48 100644 --- a/rbi/openai/resources/fine_tuning.rbi +++ b/rbi/openai/resources/fine_tuning.rbi @@ -3,12 +3,18 @@ module OpenAI module Resources class FineTuning + sig { returns(OpenAI::Resources::FineTuning::Methods) } + attr_reader :methods_ + sig { returns(OpenAI::Resources::FineTuning::Jobs) } attr_reader :jobs sig { returns(OpenAI::Resources::FineTuning::Checkpoints) } attr_reader :checkpoints + sig { returns(OpenAI::Resources::FineTuning::Alpha) } + attr_reader :alpha + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/openai/resources/fine_tuning/alpha.rbi b/rbi/openai/resources/fine_tuning/alpha.rbi new file mode 100644 index 00000000..86bfd743 --- /dev/null +++ b/rbi/openai/resources/fine_tuning/alpha.rbi @@ -0,0 +1,17 @@ +# typed: strong + +module OpenAI + module Resources + class FineTuning + class Alpha + sig { returns(OpenAI::Resources::FineTuning::Alpha::Graders) } + attr_reader :graders + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end +end diff --git a/rbi/openai/resources/fine_tuning/alpha/graders.rbi b/rbi/openai/resources/fine_tuning/alpha/graders.rbi new file mode 100644 index 00000000..4e22b461 --- /dev/null +++ b/rbi/openai/resources/fine_tuning/alpha/graders.rbi @@ -0,0 +1,65 @@ +# typed: strong + +module OpenAI + module Resources + class FineTuning + class Alpha + class Graders + # Run a grader. + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ), + model_sample: String, + reference_answer: + T.any(String, T.anything, T::Array[T.anything], Float), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::FineTuning::Alpha::GraderRunResponse) + end + def run( + # The grader used for the fine-tuning job. + grader:, + # The model sample to be evaluated. + model_sample:, + # The reference answer for the evaluation. + reference_answer:, + request_options: {} + ) + end + + # Validate a grader. + sig do + params( + grader: + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::MultiGrader::OrHash + ), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::FineTuning::Alpha::GraderValidateResponse) + end + def validate( + # The grader used for the fine-tuning job. + grader:, + request_options: {} + ) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end + end +end diff --git a/rbi/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi index 06098504..e60f057d 100644 --- a/rbi/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/openai/resources/fine_tuning/jobs.rbi @@ -176,6 +176,34 @@ module OpenAI ) end + # Pause a fine-tune job. + sig do + params( + fine_tuning_job_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FineTuning::FineTuningJob) + end + def pause( + # The ID of the fine-tuning job to pause. + fine_tuning_job_id, + request_options: {} + ) + end + + # Resume a fine-tune job. + sig do + params( + fine_tuning_job_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::FineTuning::FineTuningJob) + end + def resume( + # The ID of the fine-tuning job to resume. + fine_tuning_job_id, + request_options: {} + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/openai/resources/fine_tuning/methods.rbi b/rbi/openai/resources/fine_tuning/methods.rbi new file mode 100644 index 00000000..a03708e5 --- /dev/null +++ b/rbi/openai/resources/fine_tuning/methods.rbi @@ -0,0 +1,14 @@ +# typed: strong + +module OpenAI + module Resources + class FineTuning + class Methods + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end +end diff --git a/rbi/openai/resources/graders.rbi b/rbi/openai/resources/graders.rbi new file mode 100644 index 00000000..b409a493 --- /dev/null +++ b/rbi/openai/resources/graders.rbi @@ -0,0 +1,15 @@ +# typed: strong + +module OpenAI + module Resources + class Graders + sig { returns(OpenAI::Resources::Graders::GraderModels) } + attr_reader :grader_models + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end +end diff --git a/rbi/openai/resources/graders/grader_models.rbi b/rbi/openai/resources/graders/grader_models.rbi new file mode 100644 index 00000000..bc4bcd24 --- /dev/null +++ b/rbi/openai/resources/graders/grader_models.rbi @@ -0,0 +1,14 @@ +# typed: strong + +module OpenAI + module Resources + class Graders + class GraderModels + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end +end diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 154568f7..929bc894 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -32,6 +32,8 @@ module OpenAI attr_reader fine_tuning: OpenAI::Resources::FineTuning + attr_reader graders: OpenAI::Resources::Graders + attr_reader vector_stores: OpenAI::Resources::VectorStores attr_reader beta: OpenAI::Resources::Beta diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index 7802cabe..f7ef937f 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -57,8 +57,6 @@ module OpenAI class EvalDeleteParams = OpenAI::Models::EvalDeleteParams - class EvalLabelModelGrader = OpenAI::Models::EvalLabelModelGrader - class EvalListParams = OpenAI::Models::EvalListParams class EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams @@ -67,10 +65,6 @@ module OpenAI class EvalStoredCompletionsDataSourceConfig = OpenAI::Models::EvalStoredCompletionsDataSourceConfig - class EvalStringCheckGrader = OpenAI::Models::EvalStringCheckGrader - - class EvalTextSimilarityGrader = OpenAI::Models::EvalTextSimilarityGrader - class EvalUpdateParams = OpenAI::Models::EvalUpdateParams module FileChunkingStrategy = OpenAI::Models::FileChunkingStrategy @@ -101,6 +95,8 @@ module OpenAI FunctionParameters: OpenAI::Internal::Type::Converter + module Graders = OpenAI::Models::Graders + class Image = OpenAI::Models::Image class ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index ac24927c..32949757 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -33,7 +33,7 @@ module OpenAI type data_source_config = OpenAI::EvalCreateParams::DataSourceConfig::Custom - | OpenAI::EvalCreateParams::DataSourceConfig::Logs + | OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -61,16 +61,20 @@ module OpenAI ) -> void end - type logs = { type: :logs, metadata: ::Hash[Symbol, top] } + type stored_completions = + { type: :stored_completions, metadata: ::Hash[Symbol, top] } - class Logs < OpenAI::Internal::Type::BaseModel - attr_accessor type: :logs + class StoredCompletions < OpenAI::Internal::Type::BaseModel + attr_accessor type: :stored_completions attr_reader metadata: ::Hash[Symbol, top]? def metadata=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - def initialize: (?metadata: ::Hash[Symbol, top], ?type: :logs) -> void + def initialize: ( + ?metadata: ::Hash[Symbol, top], + ?type: :stored_completions + ) -> void end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::data_source_config] @@ -78,8 +82,8 @@ module OpenAI type testing_criterion = OpenAI::EvalCreateParams::TestingCriterion::LabelModel - | OpenAI::EvalStringCheckGrader - | OpenAI::EvalTextSimilarityGrader + | OpenAI::Graders::StringCheckGrader + | OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity | OpenAI::EvalCreateParams::TestingCriterion::Python | OpenAI::EvalCreateParams::TestingCriterion::ScoreModel @@ -208,149 +212,34 @@ module OpenAI end end - type python = - { - name: String, - source: String, - type: :python, - image_tag: String, - pass_threshold: Float - } - - class Python < OpenAI::Internal::Type::BaseModel - attr_accessor name: String - - attr_accessor source: String - - attr_accessor type: :python - - attr_reader image_tag: String? - - def image_tag=: (String) -> String + type text_similarity = { pass_threshold: Float } - attr_reader pass_threshold: Float? + class TextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + def pass_threshold: -> Float - def pass_threshold=: (Float) -> Float + def pass_threshold=: (Float _) -> Float - def initialize: ( - name: String, - source: String, - ?image_tag: String, - ?pass_threshold: Float, - ?type: :python - ) -> void + def initialize: (pass_threshold: Float) -> void end - type score_model = - { - input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: :score_model, - pass_threshold: Float, - range: ::Array[Float], - sampling_params: top - } - - class ScoreModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input] - - attr_accessor model: String - - attr_accessor name: String - - attr_accessor type: :score_model - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - attr_reader range: ::Array[Float]? - - def range=: (::Array[Float]) -> ::Array[Float] + type python = { pass_threshold: Float } - attr_reader sampling_params: top? + class Python < OpenAI::Models::Graders::PythonGrader + def pass_threshold: -> Float? - def sampling_params=: (top) -> top + def pass_threshold=: (Float _) -> Float - def initialize: ( - input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - ?pass_threshold: Float, - ?range: ::Array[Float], - ?sampling_params: top, - ?type: :score_model - ) -> void - - type input = - { - content: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, - type: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ - } - - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content - - attr_accessor role: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role - - attr_reader type: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_? - - def type=: ( - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ - ) -> OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ - - def initialize: ( - content: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role, - ?type: OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::role] - end + def initialize: (?pass_threshold: Float) -> void + end - type type_ = :message + type score_model = { pass_threshold: Float } - module Type - extend OpenAI::Internal::Type::Enum + class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader + def pass_threshold: -> Float? - MESSAGE: :message + def pass_threshold=: (Float _) -> Float - def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::ScoreModel::Input::type_] - end - end + def initialize: (?pass_threshold: Float) -> void end def self?.variants: -> ::Array[OpenAI::EvalCreateParams::testing_criterion] diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 8f0a934e..3f610608 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -47,158 +47,43 @@ module OpenAI end type testing_criterion = - OpenAI::EvalLabelModelGrader - | OpenAI::EvalStringCheckGrader - | OpenAI::EvalTextSimilarityGrader - | OpenAI::Models::EvalCreateResponse::TestingCriterion::Python - | OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader + | OpenAI::Graders::StringCheckGrader + | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity + | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython + | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - type python = - { - name: String, - source: String, - type: :python, - image_tag: String, - pass_threshold: Float - } + type eval_grader_text_similarity = { pass_threshold: Float } - class Python < OpenAI::Internal::Type::BaseModel - attr_accessor name: String + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + def pass_threshold: -> Float - attr_accessor source: String + def pass_threshold=: (Float _) -> Float - attr_accessor type: :python - - attr_reader image_tag: String? - - def image_tag=: (String) -> String - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - def initialize: ( - name: String, - source: String, - ?image_tag: String, - ?pass_threshold: Float, - ?type: :python - ) -> void + def initialize: (pass_threshold: Float) -> void end - type score_model = - { - input: ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: :score_model, - pass_threshold: Float, - range: ::Array[Float], - sampling_params: top - } - - class ScoreModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input] - - attr_accessor model: String - - attr_accessor name: String - - attr_accessor type: :score_model - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - attr_reader range: ::Array[Float]? - - def range=: (::Array[Float]) -> ::Array[Float] - - attr_reader sampling_params: top? - - def sampling_params=: (top) -> top - - def initialize: ( - input: ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - ?pass_threshold: Float, - ?range: ::Array[Float], - ?sampling_params: top, - ?type: :score_model - ) -> void + type eval_grader_python = { pass_threshold: Float } - type input = - { - content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role, - type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ - } + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + def pass_threshold: -> Float? - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content + def pass_threshold=: (Float _) -> Float - attr_accessor role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role - - attr_reader type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_? - - def type=: ( - OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ - - def initialize: ( - content: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role, - ?type: OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::role] - end + def initialize: (?pass_threshold: Float) -> void + end - type type_ = :message + type eval_grader_score_model = { pass_threshold: Float } - module Type - extend OpenAI::Internal::Type::Enum + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader + def pass_threshold: -> Float? - MESSAGE: :message + def pass_threshold=: (Float _) -> Float - def self?.values: -> ::Array[OpenAI::Models::EvalCreateResponse::TestingCriterion::ScoreModel::Input::type_] - end - end + def initialize: (?pass_threshold: Float) -> void end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] diff --git a/sig/openai/models/eval_label_model_grader.rbs b/sig/openai/models/eval_label_model_grader.rbs deleted file mode 100644 index 63867ffd..00000000 --- a/sig/openai/models/eval_label_model_grader.rbs +++ /dev/null @@ -1,105 +0,0 @@ -module OpenAI - module Models - type eval_label_model_grader = - { - input: ::Array[OpenAI::EvalLabelModelGrader::Input], - labels: ::Array[String], - model: String, - name: String, - passing_labels: ::Array[String], - type: :label_model - } - - class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalLabelModelGrader::Input] - - attr_accessor labels: ::Array[String] - - attr_accessor model: String - - attr_accessor name: String - - attr_accessor passing_labels: ::Array[String] - - attr_accessor type: :label_model - - def initialize: ( - input: ::Array[OpenAI::EvalLabelModelGrader::Input], - labels: ::Array[String], - model: String, - name: String, - passing_labels: ::Array[String], - ?type: :label_model - ) -> void - - type input = - { - content: OpenAI::EvalLabelModelGrader::Input::content, - role: OpenAI::EvalLabelModelGrader::Input::role, - type: OpenAI::EvalLabelModelGrader::Input::type_ - } - - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::EvalLabelModelGrader::Input::content - - attr_accessor role: OpenAI::EvalLabelModelGrader::Input::role - - attr_reader type: OpenAI::EvalLabelModelGrader::Input::type_? - - def type=: ( - OpenAI::EvalLabelModelGrader::Input::type_ - ) -> OpenAI::EvalLabelModelGrader::Input::type_ - - def initialize: ( - content: OpenAI::EvalLabelModelGrader::Input::content, - role: OpenAI::EvalLabelModelGrader::Input::role, - ?type: OpenAI::EvalLabelModelGrader::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::EvalLabelModelGrader::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::EvalLabelModelGrader::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::EvalLabelModelGrader::Input::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::EvalLabelModelGrader::Input::type_] - end - end - end - end -end diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index 0384d54a..b53da12b 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -47,158 +47,43 @@ module OpenAI end type testing_criterion = - OpenAI::EvalLabelModelGrader - | OpenAI::EvalStringCheckGrader - | OpenAI::EvalTextSimilarityGrader - | OpenAI::Models::EvalListResponse::TestingCriterion::Python - | OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader + | OpenAI::Graders::StringCheckGrader + | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity + | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython + | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - type python = - { - name: String, - source: String, - type: :python, - image_tag: String, - pass_threshold: Float - } + type eval_grader_text_similarity = { pass_threshold: Float } - class Python < OpenAI::Internal::Type::BaseModel - attr_accessor name: String + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + def pass_threshold: -> Float - attr_accessor source: String + def pass_threshold=: (Float _) -> Float - attr_accessor type: :python - - attr_reader image_tag: String? - - def image_tag=: (String) -> String - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - def initialize: ( - name: String, - source: String, - ?image_tag: String, - ?pass_threshold: Float, - ?type: :python - ) -> void + def initialize: (pass_threshold: Float) -> void end - type score_model = - { - input: ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: :score_model, - pass_threshold: Float, - range: ::Array[Float], - sampling_params: top - } - - class ScoreModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input] - - attr_accessor model: String - - attr_accessor name: String - - attr_accessor type: :score_model - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - attr_reader range: ::Array[Float]? - - def range=: (::Array[Float]) -> ::Array[Float] - - attr_reader sampling_params: top? - - def sampling_params=: (top) -> top - - def initialize: ( - input: ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - ?pass_threshold: Float, - ?range: ::Array[Float], - ?sampling_params: top, - ?type: :score_model - ) -> void + type eval_grader_python = { pass_threshold: Float } - type input = - { - content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role, - type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ - } + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + def pass_threshold: -> Float? - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content + def pass_threshold=: (Float _) -> Float - attr_accessor role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role - - attr_reader type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_? - - def type=: ( - OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ - - def initialize: ( - content: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role, - ?type: OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::role] - end + def initialize: (?pass_threshold: Float) -> void + end - type type_ = :message + type eval_grader_score_model = { pass_threshold: Float } - module Type - extend OpenAI::Internal::Type::Enum + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader + def pass_threshold: -> Float? - MESSAGE: :message + def pass_threshold=: (Float _) -> Float - def self?.values: -> ::Array[OpenAI::Models::EvalListResponse::TestingCriterion::ScoreModel::Input::type_] - end - end + def initialize: (?pass_threshold: Float) -> void end def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::testing_criterion] diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index 191c892e..5bbb313f 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -47,158 +47,43 @@ module OpenAI end type testing_criterion = - OpenAI::EvalLabelModelGrader - | OpenAI::EvalStringCheckGrader - | OpenAI::EvalTextSimilarityGrader - | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Python - | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader + | OpenAI::Graders::StringCheckGrader + | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity + | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython + | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - type python = - { - name: String, - source: String, - type: :python, - image_tag: String, - pass_threshold: Float - } + type eval_grader_text_similarity = { pass_threshold: Float } - class Python < OpenAI::Internal::Type::BaseModel - attr_accessor name: String + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + def pass_threshold: -> Float - attr_accessor source: String + def pass_threshold=: (Float _) -> Float - attr_accessor type: :python - - attr_reader image_tag: String? - - def image_tag=: (String) -> String - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - def initialize: ( - name: String, - source: String, - ?image_tag: String, - ?pass_threshold: Float, - ?type: :python - ) -> void + def initialize: (pass_threshold: Float) -> void end - type score_model = - { - input: ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: :score_model, - pass_threshold: Float, - range: ::Array[Float], - sampling_params: top - } - - class ScoreModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input] - - attr_accessor model: String - - attr_accessor name: String - - attr_accessor type: :score_model - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - attr_reader range: ::Array[Float]? - - def range=: (::Array[Float]) -> ::Array[Float] - - attr_reader sampling_params: top? - - def sampling_params=: (top) -> top - - def initialize: ( - input: ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - ?pass_threshold: Float, - ?range: ::Array[Float], - ?sampling_params: top, - ?type: :score_model - ) -> void + type eval_grader_python = { pass_threshold: Float } - type input = - { - content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role, - type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ - } + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + def pass_threshold: -> Float? - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content + def pass_threshold=: (Float _) -> Float - attr_accessor role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role - - attr_reader type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_? - - def type=: ( - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ - - def initialize: ( - content: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role, - ?type: OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::role] - end + def initialize: (?pass_threshold: Float) -> void + end - type type_ = :message + type eval_grader_score_model = { pass_threshold: Float } - module Type - extend OpenAI::Internal::Type::Enum + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader + def pass_threshold: -> Float? - MESSAGE: :message + def pass_threshold=: (Float _) -> Float - def self?.values: -> ::Array[OpenAI::Models::EvalRetrieveResponse::TestingCriterion::ScoreModel::Input::type_] - end - end + def initialize: (?pass_threshold: Float) -> void end def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] diff --git a/sig/openai/models/eval_string_check_grader.rbs b/sig/openai/models/eval_string_check_grader.rbs deleted file mode 100644 index 4fd28012..00000000 --- a/sig/openai/models/eval_string_check_grader.rbs +++ /dev/null @@ -1,45 +0,0 @@ -module OpenAI - module Models - type eval_string_check_grader = - { - input: String, - name: String, - operation: OpenAI::EvalStringCheckGrader::operation, - reference: String, - type: :string_check - } - - class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: String - - attr_accessor name: String - - attr_accessor operation: OpenAI::EvalStringCheckGrader::operation - - attr_accessor reference: String - - attr_accessor type: :string_check - - def initialize: ( - input: String, - name: String, - operation: OpenAI::EvalStringCheckGrader::operation, - reference: String, - ?type: :string_check - ) -> void - - type operation = :eq | :ne | :like | :ilike - - module Operation - extend OpenAI::Internal::Type::Enum - - EQ: :eq - NE: :ne - LIKE: :like - ILIKE: :ilike - - def self?.values: -> ::Array[OpenAI::EvalStringCheckGrader::operation] - end - end - end -end diff --git a/sig/openai/models/eval_text_similarity_grader.rbs b/sig/openai/models/eval_text_similarity_grader.rbs deleted file mode 100644 index 1e13b3f2..00000000 --- a/sig/openai/models/eval_text_similarity_grader.rbs +++ /dev/null @@ -1,67 +0,0 @@ -module OpenAI - module Models - type eval_text_similarity_grader = - { - evaluation_metric: OpenAI::EvalTextSimilarityGrader::evaluation_metric, - input: String, - pass_threshold: Float, - reference: String, - type: :text_similarity, - name: String - } - - class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel - attr_accessor evaluation_metric: OpenAI::EvalTextSimilarityGrader::evaluation_metric - - attr_accessor input: String - - attr_accessor pass_threshold: Float - - attr_accessor reference: String - - attr_accessor type: :text_similarity - - attr_reader name: String? - - def name=: (String) -> String - - def initialize: ( - evaluation_metric: OpenAI::EvalTextSimilarityGrader::evaluation_metric, - input: String, - pass_threshold: Float, - reference: String, - ?name: String, - ?type: :text_similarity - ) -> void - - type evaluation_metric = - :fuzzy_match - | :bleu - | :gleu - | :meteor - | :rouge_1 - | :rouge_2 - | :rouge_3 - | :rouge_4 - | :rouge_5 - | :rouge_l - - module EvaluationMetric - extend OpenAI::Internal::Type::Enum - - FUZZY_MATCH: :fuzzy_match - BLEU: :bleu - GLEU: :gleu - METEOR: :meteor - ROUGE_1: :rouge_1 - ROUGE_2: :rouge_2 - ROUGE_3: :rouge_3 - ROUGE_4: :rouge_4 - ROUGE_5: :rouge_5 - ROUGE_L: :rouge_l - - def self?.values: -> ::Array[OpenAI::EvalTextSimilarityGrader::evaluation_metric] - end - end - end -end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index 5138ddc0..e1e0e429 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -47,158 +47,43 @@ module OpenAI end type testing_criterion = - OpenAI::EvalLabelModelGrader - | OpenAI::EvalStringCheckGrader - | OpenAI::EvalTextSimilarityGrader - | OpenAI::Models::EvalUpdateResponse::TestingCriterion::Python - | OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel + OpenAI::Graders::LabelModelGrader + | OpenAI::Graders::StringCheckGrader + | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity + | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython + | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel module TestingCriterion extend OpenAI::Internal::Type::Union - type python = - { - name: String, - source: String, - type: :python, - image_tag: String, - pass_threshold: Float - } + type eval_grader_text_similarity = { pass_threshold: Float } - class Python < OpenAI::Internal::Type::BaseModel - attr_accessor name: String + class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader + def pass_threshold: -> Float - attr_accessor source: String + def pass_threshold=: (Float _) -> Float - attr_accessor type: :python - - attr_reader image_tag: String? - - def image_tag=: (String) -> String - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - def initialize: ( - name: String, - source: String, - ?image_tag: String, - ?pass_threshold: Float, - ?type: :python - ) -> void + def initialize: (pass_threshold: Float) -> void end - type score_model = - { - input: ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - type: :score_model, - pass_threshold: Float, - range: ::Array[Float], - sampling_params: top - } - - class ScoreModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input] - - attr_accessor model: String - - attr_accessor name: String - - attr_accessor type: :score_model - - attr_reader pass_threshold: Float? - - def pass_threshold=: (Float) -> Float - - attr_reader range: ::Array[Float]? - - def range=: (::Array[Float]) -> ::Array[Float] - - attr_reader sampling_params: top? - - def sampling_params=: (top) -> top - - def initialize: ( - input: ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input], - model: String, - name: String, - ?pass_threshold: Float, - ?range: ::Array[Float], - ?sampling_params: top, - ?type: :score_model - ) -> void + type eval_grader_python = { pass_threshold: Float } - type input = - { - content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role, - type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ - } + class EvalGraderPython < OpenAI::Models::Graders::PythonGrader + def pass_threshold: -> Float? - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content + def pass_threshold=: (Float _) -> Float - attr_accessor role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role - - attr_reader type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_? - - def type=: ( - OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ - - def initialize: ( - content: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content, - role: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role, - ?type: OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::role] - end + def initialize: (?pass_threshold: Float) -> void + end - type type_ = :message + type eval_grader_score_model = { pass_threshold: Float } - module Type - extend OpenAI::Internal::Type::Enum + class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader + def pass_threshold: -> Float? - MESSAGE: :message + def pass_threshold=: (Float _) -> Float - def self?.values: -> ::Array[OpenAI::Models::EvalUpdateResponse::TestingCriterion::ScoreModel::Input::type_] - end - end + def initialize: (?pass_threshold: Float) -> void end def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs new file mode 100644 index 00000000..4c094ab8 --- /dev/null +++ b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs @@ -0,0 +1,56 @@ +module OpenAI + module Models + module FineTuning + module Alpha + type grader_run_params = + { + grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, + model_sample: String, + reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer + } + & OpenAI::Internal::Type::request_parameters + + class GraderRunParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader + + attr_accessor model_sample: String + + attr_accessor reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer + + def initialize: ( + grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, + model_sample: String, + reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + ?request_options: OpenAI::request_opts + ) -> void + + type grader = + OpenAI::Graders::StringCheckGrader + | OpenAI::Graders::TextSimilarityGrader + | OpenAI::Graders::PythonGrader + | OpenAI::Graders::ScoreModelGrader + | OpenAI::Graders::MultiGrader + + module Grader + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader] + end + + type reference_answer = String | top | ::Array[top] | Float + + module ReferenceAnswer + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer] + + UnionMember2Array: OpenAI::Internal::Type::Converter + end + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs new file mode 100644 index 00000000..fdc022c0 --- /dev/null +++ b/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs @@ -0,0 +1,134 @@ +module OpenAI + module Models + module FineTuning + module Alpha + type grader_run_response = + { + metadata: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + model_grader_token_usage_per_model: ::Hash[Symbol, top], + reward: Float, + sub_rewards: ::Hash[Symbol, top] + } + + class GraderRunResponse < OpenAI::Internal::Type::BaseModel + attr_accessor metadata: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata + + attr_accessor model_grader_token_usage_per_model: ::Hash[Symbol, top] + + attr_accessor reward: Float + + attr_accessor sub_rewards: ::Hash[Symbol, top] + + def initialize: ( + metadata: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + model_grader_token_usage_per_model: ::Hash[Symbol, top], + reward: Float, + sub_rewards: ::Hash[Symbol, top] + ) -> void + + type metadata = + { + errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, + execution_time: Float, + name: String, + sampled_model_name: String?, + scores: ::Hash[Symbol, top], + token_usage: Integer?, + type: String + } + + class Metadata < OpenAI::Internal::Type::BaseModel + attr_accessor errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors + + attr_accessor execution_time: Float + + attr_accessor name: String + + attr_accessor sampled_model_name: String? + + attr_accessor scores: ::Hash[Symbol, top] + + attr_accessor token_usage: Integer? + + attr_accessor type: String + + def initialize: ( + errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, + execution_time: Float, + name: String, + sampled_model_name: String?, + scores: ::Hash[Symbol, top], + token_usage: Integer?, + type: String + ) -> void + + type errors = + { + formula_parse_error: bool, + invalid_variable_error: bool, + model_grader_parse_error: bool, + model_grader_refusal_error: bool, + model_grader_server_error: bool, + model_grader_server_error_details: String?, + other_error: bool, + python_grader_runtime_error: bool, + python_grader_runtime_error_details: String?, + python_grader_server_error: bool, + python_grader_server_error_type: String?, + sample_parse_error: bool, + truncated_observation_error: bool, + unresponsive_reward_error: bool + } + + class Errors < OpenAI::Internal::Type::BaseModel + attr_accessor formula_parse_error: bool + + attr_accessor invalid_variable_error: bool + + attr_accessor model_grader_parse_error: bool + + attr_accessor model_grader_refusal_error: bool + + attr_accessor model_grader_server_error: bool + + attr_accessor model_grader_server_error_details: String? + + attr_accessor other_error: bool + + attr_accessor python_grader_runtime_error: bool + + attr_accessor python_grader_runtime_error_details: String? + + attr_accessor python_grader_server_error: bool + + attr_accessor python_grader_server_error_type: String? + + attr_accessor sample_parse_error: bool + + attr_accessor truncated_observation_error: bool + + attr_accessor unresponsive_reward_error: bool + + def initialize: ( + formula_parse_error: bool, + invalid_variable_error: bool, + model_grader_parse_error: bool, + model_grader_refusal_error: bool, + model_grader_server_error: bool, + model_grader_server_error_details: String?, + other_error: bool, + python_grader_runtime_error: bool, + python_grader_runtime_error_details: String?, + python_grader_server_error: bool, + python_grader_server_error_type: String?, + sample_parse_error: bool, + truncated_observation_error: bool, + unresponsive_reward_error: bool + ) -> void + end + end + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs new file mode 100644 index 00000000..8eec588b --- /dev/null +++ b/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Models + module FineTuning + module Alpha + type grader_validate_params = + { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader + } + & OpenAI::Internal::Type::request_parameters + + class GraderValidateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor grader: OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader + + def initialize: ( + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader, + ?request_options: OpenAI::request_opts + ) -> void + + type grader = + OpenAI::Graders::StringCheckGrader + | OpenAI::Graders::TextSimilarityGrader + | OpenAI::Graders::PythonGrader + | OpenAI::Graders::ScoreModelGrader + | OpenAI::Graders::MultiGrader + + module Grader + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader] + end + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs b/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs new file mode 100644 index 00000000..a2139b77 --- /dev/null +++ b/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module FineTuning + module Alpha + type grader_validate_response = + { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader + } + + class GraderValidateResponse < OpenAI::Internal::Type::BaseModel + attr_reader grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader? + + def grader=: ( + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader + ) -> OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader + + def initialize: ( + ?grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader + ) -> void + + type grader = + OpenAI::Graders::StringCheckGrader + | OpenAI::Graders::TextSimilarityGrader + | OpenAI::Graders::PythonGrader + | OpenAI::Graders::ScoreModelGrader + | OpenAI::Graders::MultiGrader + + module Grader + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader] + end + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs b/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs new file mode 100644 index 00000000..49aa63fd --- /dev/null +++ b/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs @@ -0,0 +1,78 @@ +module OpenAI + module Models + module FineTuning + type dpo_hyperparameters = + { + batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size, + beta: OpenAI::FineTuning::DpoHyperparameters::beta, + learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs + } + + class DpoHyperparameters < OpenAI::Internal::Type::BaseModel + attr_reader batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size? + + def batch_size=: ( + OpenAI::FineTuning::DpoHyperparameters::batch_size + ) -> OpenAI::FineTuning::DpoHyperparameters::batch_size + + attr_reader beta: OpenAI::FineTuning::DpoHyperparameters::beta? + + def beta=: ( + OpenAI::FineTuning::DpoHyperparameters::beta + ) -> OpenAI::FineTuning::DpoHyperparameters::beta + + attr_reader learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier? + + def learning_rate_multiplier=: ( + OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier + + attr_reader n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs? + + def n_epochs=: ( + OpenAI::FineTuning::DpoHyperparameters::n_epochs + ) -> OpenAI::FineTuning::DpoHyperparameters::n_epochs + + def initialize: ( + ?batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size, + ?beta: OpenAI::FineTuning::DpoHyperparameters::beta, + ?learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs + ) -> void + + type batch_size = :auto | Integer + + module BatchSize + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::batch_size] + end + + type beta = :auto | Float + + module Beta + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::beta] + end + + type learning_rate_multiplier = :auto | Float + + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier] + end + + type n_epochs = :auto | Integer + + module NEpochs + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::n_epochs] + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/dpo_method.rbs b/sig/openai/models/fine_tuning/dpo_method.rbs new file mode 100644 index 00000000..4284a080 --- /dev/null +++ b/sig/openai/models/fine_tuning/dpo_method.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Models + module FineTuning + type dpo_method = + { hyperparameters: OpenAI::FineTuning::DpoHyperparameters } + + class DpoMethod < OpenAI::Internal::Type::BaseModel + attr_reader hyperparameters: OpenAI::FineTuning::DpoHyperparameters? + + def hyperparameters=: ( + OpenAI::FineTuning::DpoHyperparameters + ) -> OpenAI::FineTuning::DpoHyperparameters + + def initialize: ( + ?hyperparameters: OpenAI::FineTuning::DpoHyperparameters + ) -> void + end + end + end +end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index bcf014f4..c109a5a8 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -187,207 +187,48 @@ module OpenAI type method_ = { - dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo, - supervised: OpenAI::FineTuning::FineTuningJob::Method::Supervised, - type: OpenAI::FineTuning::FineTuningJob::Method::type_ + type: OpenAI::FineTuning::FineTuningJob::Method::type_, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod } class Method < OpenAI::Internal::Type::BaseModel - attr_reader dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo? + attr_accessor type: OpenAI::FineTuning::FineTuningJob::Method::type_ + + attr_reader dpo: OpenAI::FineTuning::DpoMethod? def dpo=: ( - OpenAI::FineTuning::FineTuningJob::Method::Dpo - ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo + OpenAI::FineTuning::DpoMethod + ) -> OpenAI::FineTuning::DpoMethod - attr_reader supervised: OpenAI::FineTuning::FineTuningJob::Method::Supervised? + attr_reader reinforcement: OpenAI::FineTuning::ReinforcementMethod? - def supervised=: ( - OpenAI::FineTuning::FineTuningJob::Method::Supervised - ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised + def reinforcement=: ( + OpenAI::FineTuning::ReinforcementMethod + ) -> OpenAI::FineTuning::ReinforcementMethod - attr_reader type: OpenAI::FineTuning::FineTuningJob::Method::type_? + attr_reader supervised: OpenAI::FineTuning::SupervisedMethod? - def type=: ( - OpenAI::FineTuning::FineTuningJob::Method::type_ - ) -> OpenAI::FineTuning::FineTuningJob::Method::type_ + def supervised=: ( + OpenAI::FineTuning::SupervisedMethod + ) -> OpenAI::FineTuning::SupervisedMethod def initialize: ( - ?dpo: OpenAI::FineTuning::FineTuningJob::Method::Dpo, - ?supervised: OpenAI::FineTuning::FineTuningJob::Method::Supervised, - ?type: OpenAI::FineTuning::FineTuningJob::Method::type_ + type: OpenAI::FineTuning::FineTuningJob::Method::type_, + ?dpo: OpenAI::FineTuning::DpoMethod, + ?reinforcement: OpenAI::FineTuning::ReinforcementMethod, + ?supervised: OpenAI::FineTuning::SupervisedMethod ) -> void - type dpo = - { - hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - } - - class Dpo < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters? - - def hyperparameters=: ( - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - - def initialize: ( - ?hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters - ) -> void - - type hyperparameters = - { - batch_size: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, - beta: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, - learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - } - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size? - - def batch_size=: ( - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size - - attr_reader beta: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta? - - def beta=: ( - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta - ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta - - attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier? - - def learning_rate_multiplier=: ( - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier - - attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs? - - def n_epochs=: ( - OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - - def initialize: ( - ?batch_size: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size, - ?beta: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta, - ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs - ) -> void - - type batch_size = :auto | Integer - - module BatchSize - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::batch_size] - end - - type beta = :auto | Float - - module Beta - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::beta] - end - - type learning_rate_multiplier = :auto | Float - - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::learning_rate_multiplier] - end - - type n_epochs = :auto | Integer - - module NEpochs - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::n_epochs] - end - end - end - - type supervised = - { - hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - } - - class Supervised < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters? - - def hyperparameters=: ( - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - - def initialize: ( - ?hyperparameters: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters - ) -> void - - type hyperparameters = - { - batch_size: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - } - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size? - - def batch_size=: ( - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size - - attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier? - - def learning_rate_multiplier=: ( - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier - - attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs? - - def n_epochs=: ( - OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - - def initialize: ( - ?batch_size: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs - ) -> void - - type batch_size = :auto | Integer - - module BatchSize - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::batch_size] - end - - type learning_rate_multiplier = :auto | Float - - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::learning_rate_multiplier] - end - - type n_epochs = :auto | Integer - - module NEpochs - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::n_epochs] - end - end - end - - type type_ = :supervised | :dpo + type type_ = :supervised | :dpo | :reinforcement module Type extend OpenAI::Internal::Type::Enum SUPERVISED: :supervised DPO: :dpo + REINFORCEMENT: :reinforcement def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::type_] end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 0669fd09..37e6178d 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -179,207 +179,48 @@ module OpenAI type method_ = { - dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo, - supervised: OpenAI::FineTuning::JobCreateParams::Method::Supervised, - type: OpenAI::FineTuning::JobCreateParams::Method::type_ + type: OpenAI::FineTuning::JobCreateParams::Method::type_, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod } class Method < OpenAI::Internal::Type::BaseModel - attr_reader dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo? + attr_accessor type: OpenAI::FineTuning::JobCreateParams::Method::type_ + + attr_reader dpo: OpenAI::FineTuning::DpoMethod? def dpo=: ( - OpenAI::FineTuning::JobCreateParams::Method::Dpo - ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo + OpenAI::FineTuning::DpoMethod + ) -> OpenAI::FineTuning::DpoMethod - attr_reader supervised: OpenAI::FineTuning::JobCreateParams::Method::Supervised? + attr_reader reinforcement: OpenAI::FineTuning::ReinforcementMethod? - def supervised=: ( - OpenAI::FineTuning::JobCreateParams::Method::Supervised - ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised + def reinforcement=: ( + OpenAI::FineTuning::ReinforcementMethod + ) -> OpenAI::FineTuning::ReinforcementMethod - attr_reader type: OpenAI::FineTuning::JobCreateParams::Method::type_? + attr_reader supervised: OpenAI::FineTuning::SupervisedMethod? - def type=: ( - OpenAI::FineTuning::JobCreateParams::Method::type_ - ) -> OpenAI::FineTuning::JobCreateParams::Method::type_ + def supervised=: ( + OpenAI::FineTuning::SupervisedMethod + ) -> OpenAI::FineTuning::SupervisedMethod def initialize: ( - ?dpo: OpenAI::FineTuning::JobCreateParams::Method::Dpo, - ?supervised: OpenAI::FineTuning::JobCreateParams::Method::Supervised, - ?type: OpenAI::FineTuning::JobCreateParams::Method::type_ + type: OpenAI::FineTuning::JobCreateParams::Method::type_, + ?dpo: OpenAI::FineTuning::DpoMethod, + ?reinforcement: OpenAI::FineTuning::ReinforcementMethod, + ?supervised: OpenAI::FineTuning::SupervisedMethod ) -> void - type dpo = - { - hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - } - - class Dpo < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters? - - def hyperparameters=: ( - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - - def initialize: ( - ?hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters - ) -> void - - type hyperparameters = - { - batch_size: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, - beta: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, - learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - } - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size? - - def batch_size=: ( - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size - - attr_reader beta: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta? - - def beta=: ( - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta - ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta - - attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier? - - def learning_rate_multiplier=: ( - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier - - attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs? - - def n_epochs=: ( - OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - - def initialize: ( - ?batch_size: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size, - ?beta: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta, - ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs - ) -> void - - type batch_size = :auto | Integer - - module BatchSize - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::batch_size] - end - - type beta = :auto | Float - - module Beta - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::beta] - end - - type learning_rate_multiplier = :auto | Float - - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::learning_rate_multiplier] - end - - type n_epochs = :auto | Integer - - module NEpochs - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::n_epochs] - end - end - end - - type supervised = - { - hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - } - - class Supervised < OpenAI::Internal::Type::BaseModel - attr_reader hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters? - - def hyperparameters=: ( - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - - def initialize: ( - ?hyperparameters: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters - ) -> void - - type hyperparameters = - { - batch_size: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - } - - class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size? - - def batch_size=: ( - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size - - attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier? - - def learning_rate_multiplier=: ( - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier - - attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs? - - def n_epochs=: ( - OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - - def initialize: ( - ?batch_size: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs - ) -> void - - type batch_size = :auto | Integer - - module BatchSize - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::batch_size] - end - - type learning_rate_multiplier = :auto | Float - - module LearningRateMultiplier - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::learning_rate_multiplier] - end - - type n_epochs = :auto | Integer - - module NEpochs - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::n_epochs] - end - end - end - - type type_ = :supervised | :dpo + type type_ = :supervised | :dpo | :reinforcement module Type extend OpenAI::Internal::Type::Enum SUPERVISED: :supervised DPO: :dpo + REINFORCEMENT: :reinforcement def self?.values: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::type_] end diff --git a/sig/openai/models/fine_tuning/job_pause_params.rbs b/sig/openai/models/fine_tuning/job_pause_params.rbs new file mode 100644 index 00000000..a5ac0d51 --- /dev/null +++ b/sig/openai/models/fine_tuning/job_pause_params.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module FineTuning + type job_pause_params = { } & OpenAI::Internal::Type::request_parameters + + class JobPauseParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + end + end + end +end diff --git a/sig/openai/models/fine_tuning/job_resume_params.rbs b/sig/openai/models/fine_tuning/job_resume_params.rbs new file mode 100644 index 00000000..fa50a15b --- /dev/null +++ b/sig/openai/models/fine_tuning/job_resume_params.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module FineTuning + type job_resume_params = { } & OpenAI::Internal::Type::request_parameters + + class JobResumeParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + end + end + end +end diff --git a/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs b/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs new file mode 100644 index 00000000..64891385 --- /dev/null +++ b/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs @@ -0,0 +1,131 @@ +module OpenAI + module Models + module FineTuning + type reinforcement_hyperparameters = + { + batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size, + compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier, + eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval, + eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples, + learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs, + reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + } + + class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel + attr_reader batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size? + + def batch_size=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::batch_size + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::batch_size + + attr_reader compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier? + + def compute_multiplier=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier + + attr_reader eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval? + + def eval_interval=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval + + attr_reader eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples? + + def eval_samples=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples + + attr_reader learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier? + + def learning_rate_multiplier=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier + + attr_reader n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs? + + def n_epochs=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs + + attr_reader reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort? + + def reasoning_effort=: ( + OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + ) -> OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + + def initialize: ( + ?batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size, + ?compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier, + ?eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval, + ?eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples, + ?learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs, + ?reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + ) -> void + + type batch_size = :auto | Integer + + module BatchSize + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::batch_size] + end + + type compute_multiplier = :auto | Float + + module ComputeMultiplier + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier] + end + + type eval_interval = :auto | Integer + + module EvalInterval + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval] + end + + type eval_samples = :auto | Integer + + module EvalSamples + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples] + end + + type learning_rate_multiplier = :auto | Float + + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier] + end + + type n_epochs = :auto | Integer + + module NEpochs + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs] + end + + type reasoning_effort = :default | :low | :medium | :high + + module ReasoningEffort + extend OpenAI::Internal::Type::Enum + + DEFAULT: :default + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort] + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/reinforcement_method.rbs b/sig/openai/models/fine_tuning/reinforcement_method.rbs new file mode 100644 index 00000000..4205559a --- /dev/null +++ b/sig/openai/models/fine_tuning/reinforcement_method.rbs @@ -0,0 +1,39 @@ +module OpenAI + module Models + module FineTuning + type reinforcement_method = + { + grader: OpenAI::FineTuning::ReinforcementMethod::grader, + hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters + } + + class ReinforcementMethod < OpenAI::Internal::Type::BaseModel + attr_accessor grader: OpenAI::FineTuning::ReinforcementMethod::grader + + attr_reader hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters? + + def hyperparameters=: ( + OpenAI::FineTuning::ReinforcementHyperparameters + ) -> OpenAI::FineTuning::ReinforcementHyperparameters + + def initialize: ( + grader: OpenAI::FineTuning::ReinforcementMethod::grader, + ?hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters + ) -> void + + type grader = + OpenAI::Graders::StringCheckGrader + | OpenAI::Graders::TextSimilarityGrader + | OpenAI::Graders::PythonGrader + | OpenAI::Graders::ScoreModelGrader + | OpenAI::Graders::MultiGrader + + module Grader + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementMethod::grader] + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs b/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs new file mode 100644 index 00000000..7ca1ee8f --- /dev/null +++ b/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs @@ -0,0 +1,62 @@ +module OpenAI + module Models + module FineTuning + type supervised_hyperparameters = + { + batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size, + learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + } + + class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel + attr_reader batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size? + + def batch_size=: ( + OpenAI::FineTuning::SupervisedHyperparameters::batch_size + ) -> OpenAI::FineTuning::SupervisedHyperparameters::batch_size + + attr_reader learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier? + + def learning_rate_multiplier=: ( + OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier + ) -> OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier + + attr_reader n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs? + + def n_epochs=: ( + OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + ) -> OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + + def initialize: ( + ?batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + ) -> void + + type batch_size = :auto | Integer + + module BatchSize + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::batch_size] + end + + type learning_rate_multiplier = :auto | Float + + module LearningRateMultiplier + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier] + end + + type n_epochs = :auto | Integer + + module NEpochs + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::n_epochs] + end + end + end + end +end diff --git a/sig/openai/models/fine_tuning/supervised_method.rbs b/sig/openai/models/fine_tuning/supervised_method.rbs new file mode 100644 index 00000000..eccaf9d3 --- /dev/null +++ b/sig/openai/models/fine_tuning/supervised_method.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Models + module FineTuning + type supervised_method = + { hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters } + + class SupervisedMethod < OpenAI::Internal::Type::BaseModel + attr_reader hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters? + + def hyperparameters=: ( + OpenAI::FineTuning::SupervisedHyperparameters + ) -> OpenAI::FineTuning::SupervisedHyperparameters + + def initialize: ( + ?hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters + ) -> void + end + end + end +end diff --git a/sig/openai/models/graders/label_model_grader.rbs b/sig/openai/models/graders/label_model_grader.rbs new file mode 100644 index 00000000..93d8ef92 --- /dev/null +++ b/sig/openai/models/graders/label_model_grader.rbs @@ -0,0 +1,109 @@ +module OpenAI + module Models + class LabelModelGrader = Graders::LabelModelGrader + + module Graders + type label_model_grader = + { + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + + class LabelModelGrader < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Graders::LabelModelGrader::Input] + + attr_accessor labels: ::Array[String] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor passing_labels: ::Array[String] + + attr_accessor type: :label_model + + def initialize: ( + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + ?type: :label_model + ) -> void + + type input = + { + content: OpenAI::Graders::LabelModelGrader::Input::content, + role: OpenAI::Graders::LabelModelGrader::Input::role, + type: OpenAI::Graders::LabelModelGrader::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Graders::LabelModelGrader::Input::content + + attr_accessor role: OpenAI::Graders::LabelModelGrader::Input::role + + attr_reader type: OpenAI::Graders::LabelModelGrader::Input::type_? + + def type=: ( + OpenAI::Graders::LabelModelGrader::Input::type_ + ) -> OpenAI::Graders::LabelModelGrader::Input::type_ + + def initialize: ( + content: OpenAI::Graders::LabelModelGrader::Input::content, + role: OpenAI::Graders::LabelModelGrader::Input::role, + ?type: OpenAI::Graders::LabelModelGrader::Input::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::type_] + end + end + end + end + end +end diff --git a/sig/openai/models/graders/multi_grader.rbs b/sig/openai/models/graders/multi_grader.rbs new file mode 100644 index 00000000..295e5177 --- /dev/null +++ b/sig/openai/models/graders/multi_grader.rbs @@ -0,0 +1,45 @@ +module OpenAI + module Models + class MultiGrader = Graders::MultiGrader + + module Graders + type multi_grader = + { + calculate_output: String, + graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader], + name: String, + type: :multi + } + + class MultiGrader < OpenAI::Internal::Type::BaseModel + attr_accessor calculate_output: String + + attr_accessor graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader] + + attr_accessor name: String + + attr_accessor type: :multi + + def initialize: ( + calculate_output: String, + graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader], + name: String, + ?type: :multi + ) -> void + + type grader = + OpenAI::Graders::StringCheckGrader + | OpenAI::Graders::TextSimilarityGrader + | OpenAI::Graders::PythonGrader + | OpenAI::Graders::ScoreModelGrader + | OpenAI::Graders::LabelModelGrader + + module Grader + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Graders::MultiGrader::grader] + end + end + end + end +end diff --git a/sig/openai/models/graders/python_grader.rbs b/sig/openai/models/graders/python_grader.rbs new file mode 100644 index 00000000..e23c7c85 --- /dev/null +++ b/sig/openai/models/graders/python_grader.rbs @@ -0,0 +1,29 @@ +module OpenAI + module Models + class PythonGrader = Graders::PythonGrader + + module Graders + type python_grader = + { name: String, source: String, type: :python, image_tag: String } + + class PythonGrader < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor source: String + + attr_accessor type: :python + + attr_reader image_tag: String? + + def image_tag=: (String) -> String + + def initialize: ( + name: String, + source: String, + ?image_tag: String, + ?type: :python + ) -> void + end + end + end +end diff --git a/sig/openai/models/graders/score_model_grader.rbs b/sig/openai/models/graders/score_model_grader.rbs new file mode 100644 index 00000000..74b9785e --- /dev/null +++ b/sig/openai/models/graders/score_model_grader.rbs @@ -0,0 +1,113 @@ +module OpenAI + module Models + class ScoreModelGrader = Graders::ScoreModelGrader + + module Graders + type score_model_grader = + { + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], + model: String, + name: String, + type: :score_model, + range: ::Array[Float], + sampling_params: top + } + + class ScoreModelGrader < OpenAI::Internal::Type::BaseModel + attr_accessor input: ::Array[OpenAI::Graders::ScoreModelGrader::Input] + + attr_accessor model: String + + attr_accessor name: String + + attr_accessor type: :score_model + + attr_reader range: ::Array[Float]? + + def range=: (::Array[Float]) -> ::Array[Float] + + attr_reader sampling_params: top? + + def sampling_params=: (top) -> top + + def initialize: ( + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], + model: String, + name: String, + ?range: ::Array[Float], + ?sampling_params: top, + ?type: :score_model + ) -> void + + type input = + { + content: OpenAI::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Graders::ScoreModelGrader::Input::role, + type: OpenAI::Graders::ScoreModelGrader::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Graders::ScoreModelGrader::Input::content + + attr_accessor role: OpenAI::Graders::ScoreModelGrader::Input::role + + attr_reader type: OpenAI::Graders::ScoreModelGrader::Input::type_? + + def type=: ( + OpenAI::Graders::ScoreModelGrader::Input::type_ + ) -> OpenAI::Graders::ScoreModelGrader::Input::type_ + + def initialize: ( + content: OpenAI::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Graders::ScoreModelGrader::Input::role, + ?type: OpenAI::Graders::ScoreModelGrader::Input::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::type_] + end + end + end + end + end +end diff --git a/sig/openai/models/graders/string_check_grader.rbs b/sig/openai/models/graders/string_check_grader.rbs new file mode 100644 index 00000000..5f60ffb0 --- /dev/null +++ b/sig/openai/models/graders/string_check_grader.rbs @@ -0,0 +1,49 @@ +module OpenAI + module Models + class StringCheckGrader = Graders::StringCheckGrader + + module Graders + type string_check_grader = + { + input: String, + name: String, + operation: OpenAI::Graders::StringCheckGrader::operation, + reference: String, + type: :string_check + } + + class StringCheckGrader < OpenAI::Internal::Type::BaseModel + attr_accessor input: String + + attr_accessor name: String + + attr_accessor operation: OpenAI::Graders::StringCheckGrader::operation + + attr_accessor reference: String + + attr_accessor type: :string_check + + def initialize: ( + input: String, + name: String, + operation: OpenAI::Graders::StringCheckGrader::operation, + reference: String, + ?type: :string_check + ) -> void + + type operation = :eq | :ne | :like | :ilike + + module Operation + extend OpenAI::Internal::Type::Enum + + EQ: :eq + NE: :ne + LIKE: :like + ILIKE: :ilike + + def self?.values: -> ::Array[OpenAI::Graders::StringCheckGrader::operation] + end + end + end + end +end diff --git a/sig/openai/models/graders/text_similarity_grader.rbs b/sig/openai/models/graders/text_similarity_grader.rbs new file mode 100644 index 00000000..24453b12 --- /dev/null +++ b/sig/openai/models/graders/text_similarity_grader.rbs @@ -0,0 +1,65 @@ +module OpenAI + module Models + class TextSimilarityGrader = Graders::TextSimilarityGrader + + module Graders + type text_similarity_grader = + { + evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric, + input: String, + name: String, + reference: String, + type: :text_similarity + } + + class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel + attr_accessor evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric + + attr_accessor input: String + + attr_accessor name: String + + attr_accessor reference: String + + attr_accessor type: :text_similarity + + def initialize: ( + evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric, + input: String, + name: String, + reference: String, + ?type: :text_similarity + ) -> void + + type evaluation_metric = + :fuzzy_match + | :bleu + | :gleu + | :meteor + | :rouge_1 + | :rouge_2 + | :rouge_3 + | :rouge_4 + | :rouge_5 + | :rouge_l + + module EvaluationMetric + extend OpenAI::Internal::Type::Enum + + FUZZY_MATCH: :fuzzy_match + BLEU: :bleu + GLEU: :gleu + METEOR: :meteor + ROUGE_1: :rouge_1 + ROUGE_2: :rouge_2 + ROUGE_3: :rouge_3 + ROUGE_4: :rouge_4 + ROUGE_5: :rouge_5 + ROUGE_L: :rouge_l + + def self?.values: -> ::Array[OpenAI::Graders::TextSimilarityGrader::evaluation_metric] + end + end + end + end +end diff --git a/sig/openai/resources/fine_tuning.rbs b/sig/openai/resources/fine_tuning.rbs index db3f1574..f51dbeaa 100644 --- a/sig/openai/resources/fine_tuning.rbs +++ b/sig/openai/resources/fine_tuning.rbs @@ -1,10 +1,14 @@ module OpenAI module Resources class FineTuning + attr_reader methods_: OpenAI::Resources::FineTuning::Methods + attr_reader jobs: OpenAI::Resources::FineTuning::Jobs attr_reader checkpoints: OpenAI::Resources::FineTuning::Checkpoints + attr_reader alpha: OpenAI::Resources::FineTuning::Alpha + def initialize: (client: OpenAI::Client) -> void end end diff --git a/sig/openai/resources/fine_tuning/alpha.rbs b/sig/openai/resources/fine_tuning/alpha.rbs new file mode 100644 index 00000000..467661b1 --- /dev/null +++ b/sig/openai/resources/fine_tuning/alpha.rbs @@ -0,0 +1,11 @@ +module OpenAI + module Resources + class FineTuning + class Alpha + attr_reader graders: OpenAI::Resources::FineTuning::Alpha::Graders + + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/fine_tuning/alpha/graders.rbs b/sig/openai/resources/fine_tuning/alpha/graders.rbs new file mode 100644 index 00000000..a460c4c7 --- /dev/null +++ b/sig/openai/resources/fine_tuning/alpha/graders.rbs @@ -0,0 +1,23 @@ +module OpenAI + module Resources + class FineTuning + class Alpha + class Graders + def run: ( + grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, + model_sample: String, + reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::Alpha::GraderRunResponse + + def validate: ( + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::FineTuning::Alpha::GraderValidateResponse + + def initialize: (client: OpenAI::Client) -> void + end + end + end + end +end diff --git a/sig/openai/resources/fine_tuning/jobs.rbs b/sig/openai/resources/fine_tuning/jobs.rbs index fe96137a..c28f22d8 100644 --- a/sig/openai/resources/fine_tuning/jobs.rbs +++ b/sig/openai/resources/fine_tuning/jobs.rbs @@ -41,6 +41,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::CursorPage[OpenAI::FineTuning::FineTuningJobEvent] + def pause: ( + String fine_tuning_job_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::FineTuning::FineTuningJob + + def resume: ( + String fine_tuning_job_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::FineTuning::FineTuningJob + def initialize: (client: OpenAI::Client) -> void end end diff --git a/sig/openai/resources/fine_tuning/methods.rbs b/sig/openai/resources/fine_tuning/methods.rbs new file mode 100644 index 00000000..16dc30bc --- /dev/null +++ b/sig/openai/resources/fine_tuning/methods.rbs @@ -0,0 +1,9 @@ +module OpenAI + module Resources + class FineTuning + class Methods + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/graders.rbs b/sig/openai/resources/graders.rbs new file mode 100644 index 00000000..aed8f8b6 --- /dev/null +++ b/sig/openai/resources/graders.rbs @@ -0,0 +1,9 @@ +module OpenAI + module Resources + class Graders + attr_reader grader_models: OpenAI::Resources::Graders::GraderModels + + def initialize: (client: OpenAI::Client) -> void + end + end +end diff --git a/sig/openai/resources/graders/grader_models.rbs b/sig/openai/resources/graders/grader_models.rbs new file mode 100644 index 00000000..637eb1a6 --- /dev/null +++ b/sig/openai/resources/graders/grader_models.rbs @@ -0,0 +1,9 @@ +module OpenAI + module Resources + class Graders + class GraderModels + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/test/openai/resource_namespaces.rb b/test/openai/resource_namespaces.rb index c76a68aa..dc01c156 100644 --- a/test/openai/resource_namespaces.rb +++ b/test/openai/resource_namespaces.rb @@ -3,6 +3,9 @@ module OpenAI module Test module Resources + module Alpha + end + module Audio end @@ -30,6 +33,9 @@ module Runs end module FineTuning + module Alpha + end + module Checkpoints end @@ -37,6 +43,9 @@ module Jobs end end + module Graders + end + module Jobs end diff --git a/test/openai/resources/fine_tuning/alpha/graders_test.rb b/test/openai/resources/fine_tuning/alpha/graders_test.rb new file mode 100644 index 00000000..7a1c620a --- /dev/null +++ b/test/openai/resources/fine_tuning/alpha/graders_test.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::FineTuning::Alpha::GradersTest < OpenAI::Test::ResourceTest + def test_run_required_params + response = + @openai.fine_tuning.alpha.graders.run( + grader: {input: "input", name: "name", operation: :eq, reference: "reference", type: :string_check}, + model_sample: "model_sample", + reference_answer: "string" + ) + + assert_pattern do + response => OpenAI::Models::FineTuning::Alpha::GraderRunResponse + end + + assert_pattern do + response => { + metadata: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + model_grader_token_usage_per_model: ^(OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]), + reward: Float, + sub_rewards: ^(OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]) + } + end + end + + def test_validate_required_params + response = + @openai.fine_tuning.alpha.graders.validate( + grader: {input: "input", name: "name", operation: :eq, reference: "reference", type: :string_check} + ) + + assert_pattern do + response => OpenAI::Models::FineTuning::Alpha::GraderValidateResponse + end + + assert_pattern do + response => { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader | nil + } + end + end +end diff --git a/test/openai/resources/fine_tuning/alpha_test.rb b/test/openai/resources/fine_tuning/alpha_test.rb new file mode 100644 index 00000000..53389b81 --- /dev/null +++ b/test/openai/resources/fine_tuning/alpha_test.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::FineTuning::AlphaTest < OpenAI::Test::ResourceTest +end diff --git a/test/openai/resources/fine_tuning/jobs_test.rb b/test/openai/resources/fine_tuning/jobs_test.rb index 188a79e7..3047ffd4 100644 --- a/test/openai/resources/fine_tuning/jobs_test.rb +++ b/test/openai/resources/fine_tuning/jobs_test.rb @@ -164,4 +164,68 @@ def test_list_events } end end + + def test_pause + response = @openai.fine_tuning.jobs.pause("ft-AF1WoRqd3aJAHsqc9NY7iL8F") + + assert_pattern do + response => OpenAI::FineTuning::FineTuningJob + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + error: OpenAI::FineTuning::FineTuningJob::Error | nil, + fine_tuned_model: String | nil, + finished_at: Integer | nil, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, + model: String, + object: Symbol, + organization_id: String, + result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), + seed: Integer, + status: OpenAI::FineTuning::FineTuningJob::Status, + trained_tokens: Integer | nil, + training_file: String, + validation_file: String | nil, + estimated_finish: Integer | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + method_: OpenAI::FineTuning::FineTuningJob::Method | nil + } + end + end + + def test_resume + response = @openai.fine_tuning.jobs.resume("ft-AF1WoRqd3aJAHsqc9NY7iL8F") + + assert_pattern do + response => OpenAI::FineTuning::FineTuningJob + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + error: OpenAI::FineTuning::FineTuningJob::Error | nil, + fine_tuned_model: String | nil, + finished_at: Integer | nil, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, + model: String, + object: Symbol, + organization_id: String, + result_files: ^(OpenAI::Internal::Type::ArrayOf[String]), + seed: Integer, + status: OpenAI::FineTuning::FineTuningJob::Status, + trained_tokens: Integer | nil, + training_file: String, + validation_file: String | nil, + estimated_finish: Integer | nil, + integrations: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]) | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + method_: OpenAI::FineTuning::FineTuningJob::Method | nil + } + end + end end diff --git a/test/openai/resources/fine_tuning/methods_test.rb b/test/openai/resources/fine_tuning/methods_test.rb new file mode 100644 index 00000000..69a6ca5a --- /dev/null +++ b/test/openai/resources/fine_tuning/methods_test.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::FineTuning::MethodsTest < OpenAI::Test::ResourceTest +end diff --git a/test/openai/resources/graders/grader_models_test.rb b/test/openai/resources/graders/grader_models_test.rb new file mode 100644 index 00000000..6a2f0b0a --- /dev/null +++ b/test/openai/resources/graders/grader_models_test.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::Graders::GraderModelsTest < OpenAI::Test::ResourceTest +end diff --git a/test/openai/resources/graders_test.rb b/test/openai/resources/graders_test.rb new file mode 100644 index 00000000..64e6bf4f --- /dev/null +++ b/test/openai/resources/graders_test.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::Resources::GradersTest < OpenAI::Test::ResourceTest +end From 18b4ceea18fc54b00e4c417e54f0005f4684a265 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 18:06:21 +0000 Subject: [PATCH 185/295] feat: support sorbet aliases at the runtime --- lib/openai/internal.rb | 6 + lib/openai/internal/transport/base_client.rb | 50 ++++++ .../transport/pooled_net_requester.rb | 14 ++ lib/openai/internal/type/array_of.rb | 2 +- lib/openai/internal/type/base_model.rb | 21 ++- lib/openai/internal/type/converter.rb | 18 ++ lib/openai/internal/type/enum.rb | 1 + lib/openai/internal/type/hash_of.rb | 2 +- lib/openai/internal/type/union.rb | 1 + lib/openai/internal/util.rb | 56 ++++++ lib/openai/models.rb | 27 +++ lib/openai/models/all_models.rb | 6 + .../models/audio/speech_create_params.rb | 8 + .../audio/transcription_create_params.rb | 4 + .../audio/transcription_create_response.rb | 4 + .../audio/transcription_stream_event.rb | 6 + .../models/audio/translation_create_params.rb | 4 + .../audio/translation_create_response.rb | 4 + .../models/beta/assistant_create_params.rb | 13 ++ .../beta/assistant_response_format_option.rb | 11 ++ .../models/beta/assistant_stream_event.rb | 31 ++++ lib/openai/models/beta/assistant_tool.rb | 6 + .../beta/assistant_tool_choice_option.rb | 9 + .../models/beta/assistant_update_params.rb | 4 + .../models/beta/message_stream_event.rb | 12 ++ .../models/beta/run_step_stream_event.rb | 14 ++ lib/openai/models/beta/run_stream_event.rb | 17 ++ .../beta/thread_create_and_run_params.rb | 37 ++++ .../models/beta/thread_create_params.rb | 33 ++++ lib/openai/models/beta/threads/annotation.rb | 6 + .../models/beta/threads/annotation_delta.rb | 9 + lib/openai/models/beta/threads/message.rb | 9 + .../models/beta/threads/message_content.rb | 11 ++ .../beta/threads/message_content_delta.rb | 11 ++ .../threads/message_content_part_param.rb | 10 ++ .../beta/threads/message_create_params.rb | 24 +++ .../models/beta/threads/run_create_params.rb | 28 +++ .../runs/code_interpreter_tool_call.rb | 9 + .../runs/code_interpreter_tool_call_delta.rb | 9 + .../models/beta/threads/runs/run_step.rb | 9 + .../beta/threads/runs/run_step_delta.rb | 9 + .../models/beta/threads/runs/tool_call.rb | 10 ++ .../beta/threads/runs/tool_call_delta.rb | 10 ++ ...chat_completion_assistant_message_param.rb | 21 +++ .../chat/chat_completion_audio_param.rb | 4 + .../chat/chat_completion_content_part.rb | 11 ++ ...chat_completion_developer_message_param.rb | 4 + .../chat/chat_completion_message_param.rb | 13 ++ .../chat_completion_prediction_content.rb | 4 + .../chat_completion_system_message_param.rb | 4 + .../chat_completion_tool_choice_option.rb | 9 + .../chat_completion_tool_message_param.rb | 4 + .../chat_completion_user_message_param.rb | 16 ++ .../models/chat/completion_create_params.rb | 27 +++ lib/openai/models/comparison_filter.rb | 4 + lib/openai/models/completion_create_params.rb | 12 ++ lib/openai/models/compound_filter.rb | 4 + lib/openai/models/embedding_create_params.rb | 8 + lib/openai/models/eval_create_params.rb | 40 +++++ lib/openai/models/eval_create_response.rb | 18 ++ lib/openai/models/eval_list_response.rb | 18 ++ lib/openai/models/eval_retrieve_response.rb | 18 ++ lib/openai/models/eval_update_response.rb | 18 ++ ...create_eval_completions_run_data_source.rb | 38 ++++ .../create_eval_jsonl_run_data_source.rb | 9 + .../models/evals/run_cancel_response.rb | 48 +++++ lib/openai/models/evals/run_create_params.rb | 48 +++++ .../models/evals/run_create_response.rb | 48 +++++ lib/openai/models/evals/run_list_response.rb | 48 +++++ .../models/evals/run_retrieve_response.rb | 48 +++++ lib/openai/models/file_chunking_strategy.rb | 6 + .../models/file_chunking_strategy_param.rb | 6 + .../fine_tuning/alpha/grader_run_params.rb | 16 ++ .../alpha/grader_validate_params.rb | 12 ++ .../alpha/grader_validate_response.rb | 12 ++ .../models/fine_tuning/dpo_hyperparameters.rb | 16 ++ .../models/fine_tuning/fine_tuning_job.rb | 12 ++ .../models/fine_tuning/job_create_params.rb | 16 ++ .../reinforcement_hyperparameters.rb | 24 +++ .../fine_tuning/reinforcement_method.rb | 12 ++ .../fine_tuning/supervised_hyperparameters.rb | 12 ++ .../models/graders/label_model_grader.rb | 10 ++ lib/openai/models/graders/multi_grader.rb | 12 ++ .../models/graders/score_model_grader.rb | 10 ++ .../models/image_create_variation_params.rb | 4 + lib/openai/models/image_edit_params.rb | 8 + lib/openai/models/image_generate_params.rb | 4 + lib/openai/models/moderation_create_params.rb | 14 ++ .../models/moderation_multi_modal_input.rb | 4 + .../models/responses/easy_input_message.rb | 15 ++ .../models/responses/file_search_tool.rb | 4 + lib/openai/models/responses/response.rb | 10 ++ .../response_code_interpreter_tool_call.rb | 9 + .../responses/response_computer_tool_call.rb | 16 ++ .../models/responses/response_content.rb | 12 ++ .../response_content_part_added_event.rb | 6 + .../response_content_part_done_event.rb | 6 + .../responses/response_create_params.rb | 33 ++++ .../response_file_search_tool_call.rb | 4 + .../responses/response_format_text_config.rb | 10 ++ .../responses/response_input_content.rb | 10 ++ .../models/responses/response_input_item.rb | 18 ++ lib/openai/models/responses/response_item.rb | 15 ++ .../models/responses/response_output_item.rb | 13 ++ .../responses/response_output_message.rb | 6 + .../models/responses/response_output_text.rb | 10 ++ .../models/responses/response_stream_event.rb | 43 +++++ .../response_text_annotation_delta_event.rb | 10 ++ lib/openai/models/responses/tool.rb | 11 ++ lib/openai/models/responses_model.rb | 10 ++ .../models/vector_store_search_params.rb | 8 + .../models/vector_store_search_response.rb | 4 + .../vector_stores/file_batch_create_params.rb | 4 + .../vector_stores/file_create_params.rb | 4 + .../vector_stores/file_update_params.rb | 4 + .../models/vector_stores/vector_store_file.rb | 4 + lib/openai/request_options.rb | 4 + rbi/openai/errors.rbi | 2 +- rbi/openai/internal.rbi | 2 + rbi/openai/internal/transport/base_client.rbi | 23 ++- .../transport/pooled_net_requester.rbi | 10 +- rbi/openai/internal/type/base_model.rbi | 17 +- rbi/openai/internal/type/base_page.rbi | 3 +- rbi/openai/internal/type/converter.rbi | 2 + rbi/openai/internal/type/enum.rbi | 1 + rbi/openai/internal/type/union.rbi | 1 + rbi/openai/internal/util.rbi | 33 +++- .../models/audio/speech_create_params.rbi | 5 +- rbi/openai/models/audio/transcription.rbi | 12 +- .../audio/transcription_create_params.rbi | 8 +- .../models/audio/transcription_segment.rbi | 8 +- .../audio/transcription_text_delta_event.rbi | 15 +- .../audio/transcription_text_done_event.rbi | 15 +- .../models/audio/transcription_verbose.rbi | 8 +- .../models/audio/transcription_word.rbi | 5 +- rbi/openai/models/audio/translation.rbi | 5 +- .../audio/translation_create_params.rbi | 8 +- .../models/audio/translation_verbose.rbi | 5 +- .../auto_file_chunking_strategy_param.rbi | 8 +- rbi/openai/models/batch.rbi | 7 +- rbi/openai/models/batch_cancel_params.rbi | 5 +- rbi/openai/models/batch_create_params.rbi | 5 +- rbi/openai/models/batch_error.rbi | 3 +- rbi/openai/models/batch_list_params.rbi | 5 +- rbi/openai/models/batch_request_counts.rbi | 5 +- rbi/openai/models/batch_retrieve_params.rbi | 5 +- rbi/openai/models/beta/assistant.rbi | 26 ++- .../models/beta/assistant_create_params.rbi | 51 +++++- .../models/beta/assistant_delete_params.rbi | 8 +- rbi/openai/models/beta/assistant_deleted.rbi | 5 +- .../models/beta/assistant_list_params.rbi | 5 +- .../models/beta/assistant_retrieve_params.rbi | 8 +- .../models/beta/assistant_stream_event.rbi | 168 +++++++++++++++--- .../models/beta/assistant_tool_choice.rbi | 5 +- .../beta/assistant_tool_choice_function.rbi | 8 +- .../models/beta/assistant_update_params.rbi | 29 ++- .../models/beta/code_interpreter_tool.rbi | 5 +- rbi/openai/models/beta/file_search_tool.rbi | 19 +- rbi/openai/models/beta/function_tool.rbi | 5 +- .../models/beta/message_stream_event.rbi | 35 +++- .../models/beta/run_step_stream_event.rbi | 49 ++++- rbi/openai/models/beta/run_stream_event.rbi | 70 ++++++-- rbi/openai/models/beta/thread.rbi | 26 ++- .../beta/thread_create_and_run_params.rbi | 105 +++++++++-- .../models/beta/thread_create_params.rbi | 69 +++++-- .../models/beta/thread_delete_params.rbi | 5 +- rbi/openai/models/beta/thread_deleted.rbi | 5 +- .../models/beta/thread_retrieve_params.rbi | 5 +- .../models/beta/thread_stream_event.rbi | 5 +- .../models/beta/thread_update_params.rbi | 26 ++- .../beta/threads/file_citation_annotation.rbi | 14 +- .../file_citation_delta_annotation.rbi | 14 +- .../beta/threads/file_path_annotation.rbi | 14 +- .../threads/file_path_delta_annotation.rbi | 14 +- rbi/openai/models/beta/threads/image_file.rbi | 4 +- .../beta/threads/image_file_content_block.rbi | 7 +- .../models/beta/threads/image_file_delta.rbi | 7 +- .../beta/threads/image_file_delta_block.rbi | 7 +- rbi/openai/models/beta/threads/image_url.rbi | 4 +- .../beta/threads/image_url_content_block.rbi | 7 +- .../models/beta/threads/image_url_delta.rbi | 7 +- .../beta/threads/image_url_delta_block.rbi | 7 +- rbi/openai/models/beta/threads/message.rbi | 25 ++- .../beta/threads/message_create_params.rbi | 21 ++- .../beta/threads/message_delete_params.rbi | 7 +- .../models/beta/threads/message_deleted.rbi | 7 +- .../models/beta/threads/message_delta.rbi | 7 +- .../beta/threads/message_delta_event.rbi | 7 +- .../beta/threads/message_list_params.rbi | 7 +- .../beta/threads/message_retrieve_params.rbi | 7 +- .../beta/threads/message_update_params.rbi | 7 +- .../beta/threads/refusal_content_block.rbi | 7 +- .../beta/threads/refusal_delta_block.rbi | 7 +- .../required_action_function_tool_call.rbi | 14 +- rbi/openai/models/beta/threads/run.rbi | 46 ++++- .../models/beta/threads/run_cancel_params.rbi | 7 +- .../models/beta/threads/run_create_params.rbi | 33 +++- .../models/beta/threads/run_list_params.rbi | 7 +- .../beta/threads/run_retrieve_params.rbi | 7 +- .../run_submit_tool_outputs_params.rbi | 14 +- .../models/beta/threads/run_update_params.rbi | 7 +- .../threads/runs/code_interpreter_logs.rbi | 7 +- .../runs/code_interpreter_output_image.rbi | 14 +- .../runs/code_interpreter_tool_call.rbi | 29 ++- .../runs/code_interpreter_tool_call_delta.rbi | 14 +- .../threads/runs/file_search_tool_call.rbi | 33 +++- .../runs/file_search_tool_call_delta.rbi | 7 +- .../beta/threads/runs/function_tool_call.rbi | 14 +- .../threads/runs/function_tool_call_delta.rbi | 14 +- .../runs/message_creation_step_details.rbi | 14 +- .../models/beta/threads/runs/run_step.rbi | 21 ++- .../beta/threads/runs/run_step_delta.rbi | 7 +- .../threads/runs/run_step_delta_event.rbi | 7 +- .../runs/run_step_delta_message_delta.rbi | 14 +- .../beta/threads/runs/step_list_params.rbi | 7 +- .../threads/runs/step_retrieve_params.rbi | 7 +- .../threads/runs/tool_call_delta_object.rbi | 7 +- .../threads/runs/tool_calls_step_details.rbi | 7 +- rbi/openai/models/beta/threads/text.rbi | 4 +- .../beta/threads/text_content_block.rbi | 7 +- .../beta/threads/text_content_block_param.rbi | 7 +- rbi/openai/models/beta/threads/text_delta.rbi | 4 +- .../models/beta/threads/text_delta_block.rbi | 7 +- rbi/openai/models/chat/chat_completion.rbi | 19 +- ...hat_completion_assistant_message_param.rbi | 22 ++- .../models/chat/chat_completion_audio.rbi | 5 +- .../chat/chat_completion_audio_param.rbi | 8 +- .../models/chat/chat_completion_chunk.rbi | 47 ++++- .../chat/chat_completion_content_part.rbi | 14 +- .../chat_completion_content_part_image.rbi | 15 +- ...at_completion_content_part_input_audio.rbi | 15 +- .../chat_completion_content_part_refusal.rbi | 8 +- .../chat_completion_content_part_text.rbi | 8 +- .../models/chat/chat_completion_deleted.rbi | 8 +- ...hat_completion_developer_message_param.rbi | 8 +- .../chat_completion_function_call_option.rbi | 8 +- ...chat_completion_function_message_param.rbi | 8 +- .../models/chat/chat_completion_message.rbi | 29 ++- .../chat_completion_message_tool_call.rbi | 15 +- .../chat_completion_named_tool_choice.rbi | 15 +- .../chat_completion_prediction_content.rbi | 8 +- .../chat/chat_completion_store_message.rbi | 8 +- .../chat/chat_completion_stream_options.rbi | 8 +- .../chat_completion_system_message_param.rbi | 8 +- .../chat/chat_completion_token_logprob.rbi | 15 +- .../models/chat/chat_completion_tool.rbi | 5 +- .../chat_completion_tool_message_param.rbi | 8 +- .../chat_completion_user_message_param.rbi | 8 +- .../models/chat/completion_create_params.rbi | 36 +++- .../models/chat/completion_delete_params.rbi | 8 +- .../models/chat/completion_list_params.rbi | 5 +- .../chat/completion_retrieve_params.rbi | 8 +- .../models/chat/completion_update_params.rbi | 8 +- .../chat/completions/message_list_params.rbi | 7 +- rbi/openai/models/comparison_filter.rbi | 5 +- rbi/openai/models/completion.rbi | 3 +- rbi/openai/models/completion_choice.rbi | 10 +- .../models/completion_create_params.rbi | 5 +- rbi/openai/models/completion_usage.rbi | 21 ++- rbi/openai/models/compound_filter.rbi | 5 +- .../models/create_embedding_response.rbi | 13 +- rbi/openai/models/embedding.rbi | 3 +- rbi/openai/models/embedding_create_params.rbi | 5 +- rbi/openai/models/error_object.rbi | 3 +- rbi/openai/models/eval_create_params.rbi | 66 +++++-- rbi/openai/models/eval_create_response.rbi | 26 ++- .../models/eval_custom_data_source_config.rbi | 5 +- rbi/openai/models/eval_delete_params.rbi | 5 +- rbi/openai/models/eval_delete_response.rbi | 5 +- rbi/openai/models/eval_list_params.rbi | 5 +- rbi/openai/models/eval_list_response.rbi | 26 ++- rbi/openai/models/eval_retrieve_params.rbi | 5 +- rbi/openai/models/eval_retrieve_response.rbi | 26 ++- ..._stored_completions_data_source_config.rbi | 8 +- rbi/openai/models/eval_update_params.rbi | 5 +- rbi/openai/models/eval_update_response.rbi | 26 ++- ...reate_eval_completions_run_data_source.rbi | 69 +++++-- .../create_eval_jsonl_run_data_source.rbi | 29 ++- rbi/openai/models/evals/eval_api_error.rbi | 5 +- rbi/openai/models/evals/run_cancel_params.rbi | 5 +- .../models/evals/run_cancel_response.rbi | 98 ++++++++-- rbi/openai/models/evals/run_create_params.rbi | 74 ++++++-- .../models/evals/run_create_response.rbi | 98 ++++++++-- rbi/openai/models/evals/run_delete_params.rbi | 5 +- .../models/evals/run_delete_response.rbi | 8 +- rbi/openai/models/evals/run_list_params.rbi | 5 +- rbi/openai/models/evals/run_list_response.rbi | 98 ++++++++-- .../models/evals/run_retrieve_params.rbi | 5 +- .../models/evals/run_retrieve_response.rbi | 98 ++++++++-- .../evals/runs/output_item_list_params.rbi | 7 +- .../evals/runs/output_item_list_response.rbi | 35 +++- .../runs/output_item_retrieve_params.rbi | 7 +- .../runs/output_item_retrieve_response.rbi | 35 +++- rbi/openai/models/file_content_params.rbi | 5 +- rbi/openai/models/file_create_params.rbi | 5 +- rbi/openai/models/file_delete_params.rbi | 5 +- rbi/openai/models/file_deleted.rbi | 3 +- rbi/openai/models/file_list_params.rbi | 5 +- rbi/openai/models/file_object.rbi | 3 +- rbi/openai/models/file_retrieve_params.rbi | 5 +- .../fine_tuning/alpha/grader_run_params.rbi | 7 +- .../fine_tuning/alpha/grader_run_response.rbi | 21 ++- .../alpha/grader_validate_params.rbi | 7 +- .../alpha/grader_validate_response.rbi | 7 +- .../checkpoints/permission_create_params.rbi | 7 +- .../permission_create_response.rbi | 7 +- .../checkpoints/permission_delete_params.rbi | 7 +- .../permission_delete_response.rbi | 7 +- .../permission_retrieve_params.rbi | 7 +- .../permission_retrieve_response.rbi | 14 +- .../fine_tuning/dpo_hyperparameters.rbi | 8 +- rbi/openai/models/fine_tuning/dpo_method.rbi | 5 +- .../models/fine_tuning/fine_tuning_job.rbi | 26 ++- .../fine_tuning/fine_tuning_job_event.rbi | 8 +- .../fine_tuning_job_wandb_integration.rbi | 8 +- ...ne_tuning_job_wandb_integration_object.rbi | 8 +- .../models/fine_tuning/job_cancel_params.rbi | 8 +- .../models/fine_tuning/job_create_params.rbi | 36 +++- .../fine_tuning/job_list_events_params.rbi | 8 +- .../models/fine_tuning/job_list_params.rbi | 5 +- .../models/fine_tuning/job_pause_params.rbi | 5 +- .../models/fine_tuning/job_resume_params.rbi | 8 +- .../fine_tuning/job_retrieve_params.rbi | 8 +- .../jobs/checkpoint_list_params.rbi | 7 +- .../jobs/fine_tuning_job_checkpoint.rbi | 14 +- .../reinforcement_hyperparameters.rbi | 8 +- .../fine_tuning/reinforcement_method.rbi | 8 +- .../supervised_hyperparameters.rbi | 8 +- .../models/fine_tuning/supervised_method.rbi | 8 +- rbi/openai/models/function_definition.rbi | 5 +- .../models/graders/label_model_grader.rbi | 19 +- rbi/openai/models/graders/multi_grader.rbi | 5 +- rbi/openai/models/graders/python_grader.rbi | 5 +- .../models/graders/score_model_grader.rbi | 19 +- .../models/graders/string_check_grader.rbi | 5 +- .../models/graders/text_similarity_grader.rbi | 8 +- rbi/openai/models/image.rbi | 2 +- .../models/image_create_variation_params.rbi | 5 +- rbi/openai/models/image_edit_params.rbi | 5 +- rbi/openai/models/image_generate_params.rbi | 5 +- rbi/openai/models/images_response.rbi | 17 +- rbi/openai/models/model.rbi | 2 +- rbi/openai/models/model_delete_params.rbi | 5 +- rbi/openai/models/model_deleted.rbi | 3 +- rbi/openai/models/model_list_params.rbi | 5 +- rbi/openai/models/model_retrieve_params.rbi | 5 +- rbi/openai/models/moderation.rbi | 21 ++- .../models/moderation_create_params.rbi | 5 +- .../models/moderation_create_response.rbi | 8 +- .../models/moderation_image_url_input.rbi | 13 +- rbi/openai/models/moderation_text_input.rbi | 5 +- .../other_file_chunking_strategy_object.rbi | 8 +- rbi/openai/models/reasoning.rbi | 3 +- .../models/response_format_json_object.rbi | 5 +- .../models/response_format_json_schema.rbi | 13 +- rbi/openai/models/response_format_text.rbi | 5 +- rbi/openai/models/responses/computer_tool.rbi | 5 +- .../models/responses/easy_input_message.rbi | 8 +- .../models/responses/file_search_tool.rbi | 12 +- rbi/openai/models/responses/function_tool.rbi | 5 +- .../responses/input_item_list_params.rbi | 8 +- rbi/openai/models/responses/response.rbi | 12 +- .../responses/response_audio_delta_event.rbi | 8 +- .../responses/response_audio_done_event.rbi | 8 +- .../response_audio_transcript_delta_event.rbi | 8 +- .../response_audio_transcript_done_event.rbi | 8 +- ...code_interpreter_call_code_delta_event.rbi | 8 +- ..._code_interpreter_call_code_done_event.rbi | 8 +- ..._code_interpreter_call_completed_event.rbi | 8 +- ...ode_interpreter_call_in_progress_event.rbi | 8 +- ...de_interpreter_call_interpreting_event.rbi | 8 +- .../response_code_interpreter_tool_call.rbi | 29 ++- .../responses/response_completed_event.rbi | 8 +- .../responses/response_computer_tool_call.rbi | 85 +++++++-- ...esponse_computer_tool_call_output_item.rbi | 15 +- ...e_computer_tool_call_output_screenshot.rbi | 8 +- .../response_content_part_added_event.rbi | 8 +- .../response_content_part_done_event.rbi | 8 +- .../responses/response_create_params.rbi | 8 +- .../responses/response_created_event.rbi | 8 +- .../responses/response_delete_params.rbi | 8 +- .../models/responses/response_error.rbi | 5 +- .../models/responses/response_error_event.rbi | 8 +- .../responses/response_failed_event.rbi | 8 +- ...ponse_file_search_call_completed_event.rbi | 8 +- ...nse_file_search_call_in_progress_event.rbi | 8 +- ...ponse_file_search_call_searching_event.rbi | 8 +- .../response_file_search_tool_call.rbi | 15 +- ...esponse_format_text_json_schema_config.rbi | 8 +- ...se_function_call_arguments_delta_event.rbi | 8 +- ...nse_function_call_arguments_done_event.rbi | 8 +- .../responses/response_function_tool_call.rbi | 8 +- .../response_function_tool_call_item.rbi | 8 +- ...esponse_function_tool_call_output_item.rbi | 8 +- .../response_function_web_search.rbi | 8 +- .../responses/response_in_progress_event.rbi | 8 +- .../responses/response_incomplete_event.rbi | 8 +- .../models/responses/response_input_audio.rbi | 8 +- .../models/responses/response_input_file.rbi | 8 +- .../models/responses/response_input_image.rbi | 8 +- .../models/responses/response_input_item.rbi | 35 +++- .../responses/response_input_message_item.rbi | 8 +- .../models/responses/response_input_text.rbi | 8 +- .../models/responses/response_item_list.rbi | 8 +- .../responses/response_output_audio.rbi | 8 +- .../response_output_item_added_event.rbi | 8 +- .../response_output_item_done_event.rbi | 8 +- .../responses/response_output_message.rbi | 8 +- .../responses/response_output_refusal.rbi | 8 +- .../models/responses/response_output_text.rbi | 29 ++- .../responses/response_reasoning_item.rbi | 15 +- ...nse_reasoning_summary_part_added_event.rbi | 15 +- ...onse_reasoning_summary_part_done_event.rbi | 15 +- ...nse_reasoning_summary_text_delta_event.rbi | 8 +- ...onse_reasoning_summary_text_done_event.rbi | 8 +- .../response_refusal_delta_event.rbi | 8 +- .../responses/response_refusal_done_event.rbi | 8 +- .../responses/response_retrieve_params.rbi | 8 +- .../response_text_annotation_delta_event.rbi | 29 ++- .../models/responses/response_text_config.rbi | 8 +- .../responses/response_text_delta_event.rbi | 8 +- .../responses/response_text_done_event.rbi | 8 +- .../models/responses/response_usage.rbi | 19 +- ...sponse_web_search_call_completed_event.rbi | 8 +- ...onse_web_search_call_in_progress_event.rbi | 8 +- ...sponse_web_search_call_searching_event.rbi | 8 +- .../models/responses/tool_choice_function.rbi | 8 +- .../models/responses/tool_choice_types.rbi | 5 +- .../models/responses/web_search_tool.rbi | 12 +- .../models/static_file_chunking_strategy.rbi | 5 +- .../static_file_chunking_strategy_object.rbi | 8 +- ...ic_file_chunking_strategy_object_param.rbi | 8 +- rbi/openai/models/upload.rbi | 2 +- rbi/openai/models/upload_cancel_params.rbi | 5 +- rbi/openai/models/upload_complete_params.rbi | 5 +- rbi/openai/models/upload_create_params.rbi | 5 +- .../models/uploads/part_create_params.rbi | 5 +- rbi/openai/models/uploads/upload_part.rbi | 5 +- rbi/openai/models/vector_store.rbi | 13 +- .../models/vector_store_create_params.rbi | 13 +- .../models/vector_store_delete_params.rbi | 5 +- rbi/openai/models/vector_store_deleted.rbi | 5 +- .../models/vector_store_list_params.rbi | 5 +- .../models/vector_store_retrieve_params.rbi | 5 +- .../models/vector_store_search_params.rbi | 13 +- .../models/vector_store_search_response.rbi | 16 +- .../models/vector_store_update_params.rbi | 13 +- .../file_batch_cancel_params.rbi | 8 +- .../file_batch_create_params.rbi | 8 +- .../file_batch_list_files_params.rbi | 8 +- .../file_batch_retrieve_params.rbi | 8 +- .../vector_stores/file_content_params.rbi | 8 +- .../vector_stores/file_content_response.rbi | 8 +- .../vector_stores/file_create_params.rbi | 8 +- .../vector_stores/file_delete_params.rbi | 8 +- .../models/vector_stores/file_list_params.rbi | 8 +- .../vector_stores/file_retrieve_params.rbi | 8 +- .../vector_stores/file_update_params.rbi | 8 +- .../vector_stores/vector_store_file.rbi | 15 +- .../vector_stores/vector_store_file_batch.rbi | 15 +- .../vector_store_file_deleted.rbi | 8 +- rbi/openai/request_options.rbi | 3 +- sig/openai/internal.rbs | 2 + sig/openai/internal/transport/base_client.rbs | 3 +- .../transport/pooled_net_requester.rbs | 2 + sig/openai/internal/type/array_of.rbs | 2 +- sig/openai/internal/type/base_model.rbs | 7 +- sig/openai/internal/type/converter.rbs | 2 + sig/openai/internal/type/enum.rbs | 1 + sig/openai/internal/type/hash_of.rbs | 2 +- sig/openai/internal/type/union.rbs | 1 + sig/openai/internal/util.rbs | 13 ++ test/openai/client_test.rb | 7 +- .../internal/sorbet_runtime_support_test.rb | 49 +++++ test/openai/test_helper.rb | 6 + 475 files changed, 5541 insertions(+), 711 deletions(-) create mode 100644 test/openai/internal/sorbet_runtime_support_test.rb diff --git a/lib/openai/internal.rb b/lib/openai/internal.rb index 143e002b..01f2c4b7 100644 --- a/lib/openai/internal.rb +++ b/lib/openai/internal.rb @@ -2,10 +2,16 @@ module OpenAI module Internal + extend OpenAI::Internal::Util::SorbetRuntimeSupport + OMIT = Object.new.tap do _1.define_singleton_method(:inspect) { "#<#{OpenAI::Internal}::OMIT>" } end .freeze + + define_sorbet_constant!(:AnyHash) do + T.type_alias { T::Hash[Symbol, T.anything] } + end end end diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 0d15ff3d..db914a5c 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -7,6 +7,8 @@ module Transport # # @abstract class BaseClient + extend OpenAI::Internal::Util::SorbetRuntimeSupport + # from whatwg fetch spec MAX_REDIRECTS = 20 @@ -477,6 +479,54 @@ def inspect "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" # rubocop:enable Layout/LineLength end + + define_sorbet_constant!(:RequestComponents) do + T.type_alias do + { + method: Symbol, + path: T.any(String, T::Array[String]), + query: T.nilable(T::Hash[String, T.nilable(T.any(T::Array[String], String))]), + headers: T.nilable( + T::Hash[String, + T.nilable( + T.any( + String, + Integer, + T::Array[T.nilable(T.any(String, Integer))] + ) + )] + ), + body: T.nilable(T.anything), + unwrap: T.nilable( + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ) + ), + page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), + stream: T.nilable( + T::Class[OpenAI::Internal::Type::BaseStream[T.anything, + OpenAI::Internal::Type::BaseModel]] + ), + model: T.nilable(OpenAI::Internal::Type::Converter::Input), + options: T.nilable(OpenAI::RequestOptions::OrHash) + } + end + end + define_sorbet_constant!(:RequestInput) do + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + max_retries: Integer, + timeout: Float + } + end + end end end end diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index ce606177..2c93cc86 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -5,6 +5,8 @@ module Internal module Transport # @api private class PooledNetRequester + extend OpenAI::Internal::Util::SorbetRuntimeSupport + # from the golang stdlib # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 KEEP_ALIVE_TIMEOUT = 30 @@ -187,6 +189,18 @@ def initialize(size: Etc.nprocessors) @size = size @pools = {} end + + define_sorbet_constant!(:Request) do + T.type_alias do + { + method: Symbol, + url: URI::Generic, + headers: T::Hash[String, String], + body: T.anything, + deadline: Float + } + end + end end end end diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 5b1a4113..3c043142 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -29,7 +29,7 @@ class ArrayOf # # @option spec [Boolean] :"nil?" # - # @return [OpenAI::Internal::Type::ArrayOf] + # @return [self] def self.[](...) = new(...) # @api public diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 0035eb6c..dc28182f 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -6,6 +6,7 @@ module Type # @abstract class BaseModel extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport class << self # @api private @@ -13,11 +14,17 @@ class << self # Assumes superclass fields are totally defined before fields are accessed / # defined on subclasses. # - # @return [Hash{Symbol=>Hash{Symbol=>Object}}] - def known_fields - @known_fields ||= (self < OpenAI::Internal::Type::BaseModel ? superclass.known_fields.dup : {}) + # @param child [Class] + def inherited(child) + super + child.known_fields.replace(known_fields.dup) end + # @api private + # + # @return [Hash{Symbol=>Hash{Symbol=>Object}}] + def known_fields = @known_fields ||= {} + # @api private # # @return [Hash{Symbol=>Hash{Symbol=>Object}}] @@ -199,7 +206,7 @@ class << self # # @option state [Integer] :branched # - # @return [OpenAI::Internal::Type::BaseModel, Object] + # @return [self, Object] def coerce(value, state:) exactness = state.fetch(:exactness) @@ -258,7 +265,7 @@ def coerce(value, state:) # @api private # - # @param value [OpenAI::Internal::Type::BaseModel, Object] + # @param value [self, Object] # # @param state [Hash{Symbol=>Object}] . # @@ -424,6 +431,10 @@ def to_s = self.class.walk(@data).to_s # # @return [String] def inspect = "#<#{self.class}:0x#{object_id.to_s(16)} #{self}>" + + define_sorbet_constant!(:KnownField) do + T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} } + end end end end diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 0d498235..29cee8df 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -5,6 +5,8 @@ module Internal module Type # @api private module Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport + # rubocop:disable Lint/UnusedMethodArgument # @api private @@ -268,6 +270,22 @@ def inspect(target, depth:) end end end + + define_sorbet_constant!(:Input) do + T.type_alias { T.any(OpenAI::Internal::Type::Converter, T::Class[T.anything]) } + end + define_sorbet_constant!(:CoerceState) do + T.type_alias do + { + strictness: T.any(T::Boolean, Symbol), + exactness: {yes: Integer, no: Integer, maybe: Integer}, + branched: Integer + } + end + end + define_sorbet_constant!(:DumpState) do + T.type_alias { {can_retry: T::Boolean} } + end end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 30c8e0c0..c28bf11f 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -42,6 +42,7 @@ module Type # end module Enum include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport # All of the valid Symbol values for this enum. # diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 20280b5b..6e60bc15 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -29,7 +29,7 @@ class HashOf # # @option spec [Boolean] :"nil?" # - # @return [OpenAI::Internal::Type::HashOf] + # @return [self] def self.[](...) = new(...) # @api public diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index e653de47..47a040ff 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -31,6 +31,7 @@ module Type # end module Union include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport # @api private # diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 32f9ae41..b33fa88f 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -801,6 +801,62 @@ def decode_sse(lines) end end end + + # @api private + module SorbetRuntimeSupport + class MissingSorbetRuntimeError < ::RuntimeError + end + + # @api private + # + # @return [Hash{Symbol=>Object}] + private def sorbet_runtime_constants = @sorbet_runtime_constants ||= {} + + # @api private + # + # @param name [Symbol] + def const_missing(name) + super unless sorbet_runtime_constants.key?(name) + + unless Object.const_defined?(:T) + message = "Trying to access a Sorbet constant #{name.inspect} without `sorbet-runtime`." + raise MissingSorbetRuntimeError.new(message) + end + + sorbet_runtime_constants.fetch(name).call + end + + # @api private + # + # @param name [Symbol] + # @param blk [Proc] + def define_sorbet_constant!(name, &blk) = sorbet_runtime_constants.store(name, blk) + end + + extend OpenAI::Internal::Util::SorbetRuntimeSupport + + define_sorbet_constant!(:ParsedUri) do + T.type_alias do + { + scheme: T.nilable(String), + host: T.nilable(String), + port: T.nilable(Integer), + path: T.nilable(String), + query: T::Hash[String, T::Array[String]] + } + end + end + + define_sorbet_constant!(:ServerSentEvent) do + T.type_alias do + { + event: T.nilable(String), + data: T.nilable(String), + id: T.nilable(String), + retry: T.nilable(Integer) + } + end + end end end end diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 31c2a050..6406a297 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -1,6 +1,33 @@ # frozen_string_literal: true module OpenAI + [OpenAI::Internal::Type::BaseModel, *OpenAI::Internal::Type::BaseModel.subclasses].each do |cls| + cls.define_sorbet_constant!(:OrHash) { T.type_alias { T.any(cls, OpenAI::Internal::AnyHash) } } + end + + [ + *OpenAI::Internal::Type::Enum.included_modules, + *OpenAI::Internal::Type::Union.included_modules + ].each do |cls| + cls.constants.each do |name| + case cls.const_get(name) + in true | false + cls.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T.all(T::Boolean, cls) } } + cls.define_sorbet_constant!(:OrBoolean) { T.type_alias { T::Boolean } } + in Integer + cls.define_sorbet_constant!(:TaggedInteger) { T.type_alias { T.all(Integer, cls) } } + cls.define_sorbet_constant!(:OrInteger) { T.type_alias { Integer } } + in Float + cls.define_sorbet_constant!(:TaggedFloat) { T.type_alias { T.all(Float, cls) } } + cls.define_sorbet_constant!(:OrFloat) { T.type_alias { Float } } + in Symbol + cls.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { T.all(Symbol, cls) } } + cls.define_sorbet_constant!(:OrSymbol) { T.type_alias { T.any(Symbol, String) } } + else + end + end + end + AllModels = OpenAI::Models::AllModels Audio = OpenAI::Models::Audio diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index dd458b9f..a337cfe0 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -25,6 +25,12 @@ module ResponsesOnlyModel # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::AllModels::ResponsesOnlyModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(String, OpenAI::ChatModel::TaggedSymbol, OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol) + end + end end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 9a2372af..6a4e469f 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -81,6 +81,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::Audio::SpeechModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::Audio::SpeechModel::TaggedSymbol) } + end end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -117,6 +121,10 @@ module Voice # @!method self.variants # @return [Array(String, Symbol)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) } + end + # @!group ALLOY = :alloy diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 8fabd4b2..4015cac8 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -118,6 +118,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::AudioModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } + end end module TimestampGranularity diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index ec74e17e..8f0ea45a 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -20,6 +20,10 @@ module TranscriptionCreateResponse # @!method self.variants # @return [Array(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose) } + end end end end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 9d386b9b..93079e55 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -20,6 +20,12 @@ module TranscriptionStreamEvent # @!method self.variants # @return [Array(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent) + end + end end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index ed3107e2..a3594a7c 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -76,6 +76,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::AudioModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } + end end # The format of the output, in one of these options: `json`, `text`, `srt`, diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index f24d4b2b..c0fcb1fc 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -13,6 +13,10 @@ module TranslationCreateResponse # @!method self.variants # @return [Array(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) } + end end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index f05d1764..c3a32b57 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -162,6 +162,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + end end class ToolResources < OpenAI::Internal::Type::BaseModel @@ -362,6 +366,15 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + end + end end end end diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 411077cb..4e58c10e 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -44,6 +44,17 @@ module AssistantResponseFormatOption # @!method self.variants # @return [Array(Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + Symbol, + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONObject, + OpenAI::ResponseFormatJSONSchema + ) + end + end end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 74345189..6f78210f 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -730,6 +730,37 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Beta::AssistantStreamEvent::ErrorEvent)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Beta::AssistantStreamEvent::ErrorEvent + ) + end + end end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 111defb9..c6f7c311 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -16,6 +16,12 @@ module AssistantTool # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool) + end + end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 5a87d00a..0561c756 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -36,6 +36,15 @@ module Auto # @!method self.variants # @return [Array(Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Beta::AssistantToolChoice + ) + end + end end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 3d9b9fbc..40734bc9 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -232,6 +232,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + end + # @!group GPT_4_1 = :"gpt-4.1" diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index ce394898..564f9417 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -161,6 +161,18 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, + OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, + OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, + OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, + OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete + ) + end + end end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 40fef09f..165aeefc 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -213,6 +213,20 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired + ) + end + end end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index ca41f968..8c2d8801 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -292,6 +292,23 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Beta::RunStreamEvent::ThreadRunExpired)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunCreated, + OpenAI::Beta::RunStreamEvent::ThreadRunQueued, + OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, + OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, + OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, + OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, + OpenAI::Beta::RunStreamEvent::ThreadRunFailed, + OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, + OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, + OpenAI::Beta::RunStreamEvent::ThreadRunExpired + ) + end + end end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 5d4533ef..f9addd97 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -214,6 +214,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + end end class Thread < OpenAI::Internal::Type::BaseModel @@ -332,6 +336,21 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) + ] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -401,6 +420,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch + ) + end + end end end end @@ -605,6 +633,15 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + end + end end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index c4d1c025..df5b80d4 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -113,6 +113,21 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) + ] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -182,6 +197,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch + ) + end + end end end end @@ -380,6 +404,15 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index c110cbbd..080262b3 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -20,6 +20,12 @@ module Annotation # @!method self.variants # @return [Array(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation) + end + end end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index e5b290d5..2dbd5956 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -20,6 +20,15 @@ module AnnotationDelta # @!method self.variants # @return [Array(OpenAI::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Beta::Threads::FilePathDeltaAnnotation)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Beta::Threads::FilePathDeltaAnnotation + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 19af32cc..ad5cd0f2 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -184,6 +184,15 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly + ) + end + end end end diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 295d6858..f40e35dd 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -25,6 +25,17 @@ module MessageContent # @!method self.variants # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlock, OpenAI::Beta::Threads::RefusalContentBlock)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlock, + OpenAI::Beta::Threads::RefusalContentBlock + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index a6b04dc3..80ecb9cb 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -25,6 +25,17 @@ module MessageContentDelta # @!method self.variants # @return [Array(OpenAI::Beta::Threads::ImageFileDeltaBlock, OpenAI::Beta::Threads::TextDeltaBlock, OpenAI::Beta::Threads::RefusalDeltaBlock, OpenAI::Beta::Threads::ImageURLDeltaBlock)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Beta::Threads::TextDeltaBlock, + OpenAI::Beta::Threads::RefusalDeltaBlock, + OpenAI::Beta::Threads::ImageURLDeltaBlock + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 93fd228a..65e1a5b3 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -22,6 +22,16 @@ module MessageContentPartParam # @!method self.variants # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlockParam)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 223feced..11c3ea83 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -74,6 +74,21 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) + ] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -141,6 +156,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 773bbb7b..65cf8129 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -303,6 +303,21 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Beta::Threads::TextContentBlockParam + ) + ] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -372,6 +387,15 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::CodeInterpreterTool, + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch + ) + end + end end end end @@ -390,6 +414,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 4c7b9f6a..5d03c473 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -148,6 +148,15 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 98ceaa6b..6ce54421 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -89,6 +89,15 @@ module Output # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 2dc26909..656fc313 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -223,6 +223,15 @@ module StepDetails # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Beta::Threads::Runs::ToolCallsStepDetails + ) + end + end end # The type of run step, which can be either `message_creation` or `tool_calls`. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 4666af0b..a4848a8c 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -33,6 +33,15 @@ module StepDetails # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 4140ec79..9452c475 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -20,6 +20,16 @@ module ToolCall # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Beta::Threads::Runs::FunctionToolCall)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Beta::Threads::Runs::FunctionToolCall + ) + end + end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index ab51e0a3..10ad55e8 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -20,6 +20,16 @@ module ToolCallDelta # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Beta::Threads::Runs::FunctionToolCallDelta)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta + ) + end + end end end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 2f9fbe2b..b49a8303 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -126,11 +126,32 @@ module ArrayOfContentPart # @!method self.variants # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartRefusal)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartRefusal + ) + end + end end # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartRefusal + )] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] ArrayOfContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index e9901c92..3ab3a1d0 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -82,6 +82,10 @@ module Voice # @!method self.variants # @return [Array(String, Symbol)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } + end + # @!group ALLOY = :alloy diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 64a02cf0..a79c7dbf 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -77,6 +77,17 @@ class File < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartImage, OpenAI::Chat::ChatCompletionContentPartInputAudio, OpenAI::Chat::ChatCompletionContentPart::File)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Chat::ChatCompletionContentPart::File + ) + end + end end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 31fafa47..492403d8 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -54,6 +54,10 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } + end + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index 6710bee4..b3ba243b 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -34,6 +34,19 @@ module ChatCompletionMessageParam # @!method self.variants # @return [Array(OpenAI::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Chat::ChatCompletionSystemMessageParam, OpenAI::Chat::ChatCompletionUserMessageParam, OpenAI::Chat::ChatCompletionAssistantMessageParam, OpenAI::Chat::ChatCompletionToolMessageParam, OpenAI::Chat::ChatCompletionFunctionMessageParam)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Chat::ChatCompletionSystemMessageParam, + OpenAI::Chat::ChatCompletionUserMessageParam, + OpenAI::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Chat::ChatCompletionToolMessageParam, + OpenAI::Chat::ChatCompletionFunctionMessageParam + ) + end + end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index eeccd9df..0b0ba1f0 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -50,6 +50,10 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } + end + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 43b7a5c4..746905d7 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -54,6 +54,10 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } + end + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 4b8a6fe3..506d3899 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -37,6 +37,15 @@ module Auto # @!method self.variants # @return [Array(Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, + OpenAI::Chat::ChatCompletionNamedToolChoice + ) + end + end end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index c88bb51c..6ab285b1 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -46,6 +46,10 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } + end + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index ffd7b68a..33a1f532 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -51,6 +51,22 @@ module Content # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Chat::ChatCompletionContentPart::File + ) + ] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 3a3a3c23..69ccac80 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -412,6 +412,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + end end # @deprecated @@ -454,6 +458,15 @@ module FunctionCallMode # @!method self.variants # @return [Array(Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, + OpenAI::Chat::ChatCompletionFunctionCallOption + ) + end + end end # @deprecated @@ -533,6 +546,16 @@ module ResponseFormat # @!method self.variants # @return [Array(OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject + ) + end + end end # Specifies the latency tier to use for processing the request. This parameter is @@ -577,6 +600,10 @@ module Stop # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.nilable(T.any(String, T::Array[String])) } + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 582e2f5a..2847efa6 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -81,6 +81,10 @@ module Value # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 075f9d95..bc8a3619 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -241,6 +241,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::CompletionCreateParams::Model::TaggedSymbol) } + end + # @!group GPT_3_5_TURBO_INSTRUCT = :"gpt-3.5-turbo-instruct" @@ -270,6 +274,10 @@ module Prompt # @!method self.variants # @return [Array(String, Array, Array, Array>)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]) } + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -294,6 +302,10 @@ module Stop # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.nilable(T.any(String, T::Array[String])) } + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 8bfaf4d2..2791671b 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -38,6 +38,10 @@ module Filter # @!method self.variants # @return [Array(OpenAI::ComparisonFilter, Object)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(OpenAI::ComparisonFilter, T.anything) } + end end # Type of operation: `and` or `or`. diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 48890276..7640b480 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -94,6 +94,10 @@ module Input # @!method self.variants # @return [Array(String, Array, Array, Array>)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]) } + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -119,6 +123,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::EmbeddingModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::EmbeddingModel::TaggedSymbol) } + end end # The format to return the embeddings in. Can be either `float` or diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 668813af..1dbd1acd 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -130,6 +130,15 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions + ) + end + end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -338,6 +347,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -371,6 +390,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + ) + end + end end end @@ -415,6 +443,18 @@ class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Graders::StringCheckGrader, OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::Graders::StringCheckGrader, + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, + OpenAI::EvalCreateParams::TestingCriterion::Python, + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel + ) + end + end end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 992dbed4..6030deb8 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -99,6 +99,12 @@ module DataSourceConfig # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + end + end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -163,6 +169,18 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel + ) + end + end end end end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index d75a67aa..966d6bcb 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -99,6 +99,12 @@ module DataSourceConfig # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + end + end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -163,6 +169,18 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel + ) + end + end end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 8cfd179d..d8e2ca54 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -99,6 +99,12 @@ module DataSourceConfig # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + end + end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -163,6 +169,18 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel + ) + end + end end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 67e02168..61f3e677 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -99,6 +99,12 @@ module DataSourceConfig # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + end + end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -163,6 +169,18 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader, + OpenAI::Graders::StringCheckGrader, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel + ) + end + end end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index f31118d7..92535eb5 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -188,6 +188,16 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions + ) + end + end end # The type of run data source. Always `completions`. @@ -356,6 +366,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -389,6 +409,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) + end + end end end @@ -413,6 +442,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference + ) + end + end end # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#sampling_params diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 514f2494..faedad0e 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -92,6 +92,15 @@ class FileID < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + ) + end + end end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 1a5d2402..e9ac234b 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -382,6 +382,16 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses + ) + end + end end # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#input_messages @@ -542,6 +552,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -575,6 +595,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + end end end @@ -599,6 +628,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + end end # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#sampling_params @@ -640,6 +678,16 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + ) + end + end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 46e49a3d..849d33d9 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -310,6 +310,16 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + end end # The type of run data source. Always `completions`. @@ -498,6 +508,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -531,6 +551,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + end + end end end @@ -555,6 +584,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + end end # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params @@ -596,6 +634,16 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + ) + end + end end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index d8870c35..988f2fe9 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -382,6 +382,16 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses + ) + end + end end # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#input_messages @@ -542,6 +552,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -575,6 +595,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + end end end @@ -599,6 +628,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + end end # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#sampling_params @@ -640,6 +678,16 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + ) + end + end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 6f7a7637..6a038c54 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -382,6 +382,16 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses + ) + end + end end # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#input_messages @@ -538,6 +548,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -571,6 +591,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + end end end @@ -595,6 +624,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + end end # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#sampling_params @@ -636,6 +674,16 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunListResponse::DataSource::Completions + ) + end + end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 84c210e8..6286d0fd 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -384,6 +384,16 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses + ) + end + end end # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#input_messages @@ -544,6 +554,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -577,6 +597,15 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem + ) + end + end end end @@ -601,6 +630,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference + ) + end + end end # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#sampling_params @@ -642,6 +680,16 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + ) + end + end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 5ee317be..117b02fa 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -15,6 +15,12 @@ module FileChunkingStrategy # @!method self.variants # @return [Array(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject) + end + end end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 6a46bdfc..c70c3336 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -17,6 +17,12 @@ module FileChunkingStrategyParam # @!method self.variants # @return [Array(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam) + end + end end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb index 921ece7c..e425c638 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb @@ -62,6 +62,18 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + end end # The reference answer for the evaluation. @@ -79,6 +91,10 @@ module ReferenceAnswer # @!method self.variants # @return [Array(String, Object, Array, Float)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T.anything, T::Array[T.anything], Float) } + end + # @type [OpenAI::Internal::Type::Converter] UnionMember2Array = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb index a252fb6a..6ec580aa 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb @@ -41,6 +41,18 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + end end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb index a89553ee..f0c28dcc 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb @@ -38,6 +38,18 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + end end end end diff --git a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb index b57d2245..54b87256 100644 --- a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb @@ -60,6 +60,10 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -75,6 +79,10 @@ module Beta # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -90,6 +98,10 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -105,6 +117,10 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index aaf073d0..80821e35 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -270,6 +270,10 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -285,6 +289,10 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -300,6 +308,10 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index de31c8f6..9b1d6016 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -148,6 +148,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::FineTuning::JobCreateParams::Model::TaggedSymbol) } + end + # @!group BABBAGE_002 = :"babbage-002" @@ -208,6 +212,10 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -223,6 +231,10 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -238,6 +250,10 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end end diff --git a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb index 7df826e1..ee22496e 100644 --- a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb @@ -87,6 +87,10 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Multiplier on amount of compute used for exploring search space during training. @@ -101,6 +105,10 @@ module ComputeMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # The number of training steps between evaluation runs. @@ -115,6 +123,10 @@ module EvalInterval # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Number of evaluation samples to generate per training step. @@ -129,6 +141,10 @@ module EvalSamples # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -144,6 +160,10 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -159,6 +179,10 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Level of reasoning effort. diff --git a/lib/openai/models/fine_tuning/reinforcement_method.rb b/lib/openai/models/fine_tuning/reinforcement_method.rb index d2955f1e..cfe966c9 100644 --- a/lib/openai/models/fine_tuning/reinforcement_method.rb +++ b/lib/openai/models/fine_tuning/reinforcement_method.rb @@ -46,6 +46,18 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::MultiGrader + ) + end + end end end end diff --git a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb index 612870f0..2a907416 100644 --- a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb @@ -51,6 +51,10 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -66,6 +70,10 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Float) } + end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -81,6 +89,10 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(Symbol, Integer) } + end end end end diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index 99e0e087..801b3432 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -133,6 +133,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb index 3cb7bf6b..520ef0e2 100644 --- a/lib/openai/models/graders/multi_grader.rb +++ b/lib/openai/models/graders/multi_grader.rb @@ -62,6 +62,18 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::LabelModelGrader + ) + end + end end end end diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index 62cf1a6c..7742ec75 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -133,6 +133,16 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 66ad7ea0..f83ea80a 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -80,6 +80,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ImageModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } + end end # The format in which the generated images are returned. Must be one of `url` or diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index c38c7821..aec90bd0 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -137,6 +137,10 @@ module Image # @!method self.variants # @return [Array(StringIO, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(StringIO, T::Array[StringIO]) } + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput] end @@ -172,6 +176,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ImageModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } + end end # The quality of the image that will be generated. `high`, `medium` and `low` are diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index c46f1558..a3aca3cb 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -172,6 +172,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ImageModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } + end end # Control the content-moderation level for images generated by `gpt-image-1`. Must diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 59e0cdd9..72f3a355 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -50,6 +50,16 @@ module Input # @!method self.variants # @return [Array(String, Array, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[String], + T::Array[T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] + ) + end + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -74,6 +84,10 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ModerationModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, OpenAI::ModerationModel::TaggedSymbol) } + end end end end diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index 5142ae94..ef09466b 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -16,6 +16,10 @@ module ModerationMultiModalInput # @!method self.variants # @return [Array(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput) } + end end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 3cdb0887..ecf0c374 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -56,6 +56,21 @@ module Content # @!method self.variants # @return [Array(String, Array)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile + ) + ] + ) + end + end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 515ef7a2..938f84c6 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -67,6 +67,10 @@ module Filters # @!method self.variants # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) } + end end # @see OpenAI::Responses::FileSearchTool#ranking_options diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 4813a1af..d8358f27 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -328,6 +328,16 @@ module ToolChoice # @!method self.variants # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction + ) + end + end end # Specifies the latency tier to use for processing the request. This parameter is diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 16bac78f..81114445 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -142,6 +142,15 @@ class File < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) + end + end end # The status of the code interpreter tool call. diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index b18746b0..9a70b230 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -424,6 +424,22 @@ class Wait < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Responses::ResponseComputerToolCall::Action::Wait + ) + end + end end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 95e1afdd..7b4e0c77 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -24,6 +24,18 @@ module ResponseContent # @!method self.variants # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile, OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile, + OpenAI::Responses::ResponseOutputText, + OpenAI::Responses::ResponseOutputRefusal + ) + end + end end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 1bb0cddf..6b16ea84 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -66,6 +66,12 @@ module Part # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) + end + end end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 56b1e274..e2af41dd 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -66,6 +66,12 @@ module Part # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) + end + end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 4a38c09e..affb7fa5 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -275,6 +275,29 @@ module Input # @!method self.variants # @return [Array(String, Array)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + ] + ) + end + end end # Specifies the latency tier to use for processing the request. This parameter is @@ -330,6 +353,16 @@ module ToolChoice # @!method self.variants # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceTypes, + OpenAI::Responses::ToolChoiceFunction + ) + end + end end # The truncation strategy to use for the model response. diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index cc1b88ec..f8f7fc1b 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -138,6 +138,10 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 81d0ad54..d81ab862 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -36,6 +36,16 @@ module ResponseFormatTextConfig # @!method self.variants # @return [Array(OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject + ) + end + end end end end diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 7e349985..d518271c 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -20,6 +20,16 @@ module ResponseInputContent # @!method self.variants # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile + ) + end + end end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 8ce7e375..62cf20ee 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -357,6 +357,24 @@ module Type # @!method self.variants # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + end + end end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index e0bd4301..fe6001ab 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -36,6 +36,21 @@ module ResponseItem # @!method self.variants # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputMessageItem, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Responses::ResponseFunctionToolCallOutputItem + ) + end + end end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 8e653d3a..502acd49 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -36,6 +36,19 @@ module ResponseOutputItem # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseReasoningItem + ) + end + end end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 2e7eee71..d0d7f291 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -68,6 +68,12 @@ module Content # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) + end + end end # The status of the message input. One of `in_progress`, `completed`, or diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 6234918b..fc08cb56 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -157,6 +157,16 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::FilePath + ) + end + end end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 48456245..4f6c6c4a 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -140,6 +140,49 @@ module ResponseStreamEvent # @!method self.variants # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseAudioDeltaEvent, + OpenAI::Responses::ResponseAudioDoneEvent, + OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Responses::ResponseCompletedEvent, + OpenAI::Responses::ResponseContentPartAddedEvent, + OpenAI::Responses::ResponseContentPartDoneEvent, + OpenAI::Responses::ResponseCreatedEvent, + OpenAI::Responses::ResponseErrorEvent, + OpenAI::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Responses::ResponseInProgressEvent, + OpenAI::Responses::ResponseFailedEvent, + OpenAI::Responses::ResponseIncompleteEvent, + OpenAI::Responses::ResponseOutputItemAddedEvent, + OpenAI::Responses::ResponseOutputItemDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, + OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, + OpenAI::Responses::ResponseRefusalDeltaEvent, + OpenAI::Responses::ResponseRefusalDoneEvent, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Responses::ResponseTextDeltaEvent, + OpenAI::Responses::ResponseTextDoneEvent, + OpenAI::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Responses::ResponseWebSearchCallSearchingEvent + ) + end + end end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 8ffc14a6..f8cc77c2 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -188,6 +188,16 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath + ) + end + end end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 97939459..2d2c6702 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -23,6 +23,17 @@ module Tool # @!method self.variants # @return [Array(OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::WebSearchTool)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::FunctionTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::WebSearchTool + ) + end + end end end end diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 5634269f..a35d5278 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -25,6 +25,16 @@ module ResponsesOnlyModel # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::ResponsesModel::ResponsesOnlyModel)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::ChatModel::TaggedSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) + end + end end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 32bf6133..2009716d 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -65,6 +65,10 @@ module Query # @!method self.variants # @return [Array(String, Array)] + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, T::Array[String]) } + end + # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -81,6 +85,10 @@ module Filters # @!method self.variants # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) } + end end class RankingOptions < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 5b623829..31f23ac7 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -66,6 +66,10 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end class Content < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 5fd03105..f590ab7f 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -60,6 +60,10 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 8cc4cee4..0d1bc6ed 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -60,6 +60,10 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index be3d5d7b..30c15708 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -48,6 +48,10 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index eab6df4b..594fd1bc 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -163,6 +163,10 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] + + define_sorbet_constant!(:Variants) do + T.type_alias { T.any(String, Float, T::Boolean) } + end end end end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index d4a15822..ed62d70f 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -69,5 +69,9 @@ def self.validate!(opts) # Returns a new instance of RequestOptions. # # @param values [Hash{Symbol=>Object}] + + define_sorbet_constant!(:OrHash) do + T.type_alias { T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) } + end end end diff --git a/rbi/openai/errors.rbi b/rbi/openai/errors.rbi index 4ade8ef1..4fa76a30 100644 --- a/rbi/openai/errors.rbi +++ b/rbi/openai/errors.rbi @@ -122,7 +122,7 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ).returns(T.attached_class) + ).returns(T.self_type) end def self.for(url:, status:, body:, request:, response:, message: nil) end diff --git a/rbi/openai/internal.rbi b/rbi/openai/internal.rbi index 8390947e..135f6cba 100644 --- a/rbi/openai/internal.rbi +++ b/rbi/openai/internal.rbi @@ -2,6 +2,8 @@ module OpenAI module Internal + extend OpenAI::Internal::Util::SorbetRuntimeSupport + # Due to the current WIP status of Shapes support in Sorbet, types referencing # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } diff --git a/rbi/openai/internal/transport/base_client.rbi b/rbi/openai/internal/transport/base_client.rbi index 29580549..b66e1262 100644 --- a/rbi/openai/internal/transport/base_client.rbi +++ b/rbi/openai/internal/transport/base_client.rbi @@ -5,9 +5,11 @@ module OpenAI module Transport # @api private class BaseClient + extend OpenAI::Internal::Util::SorbetRuntimeSupport + abstract! - RequestComponentsShape = + RequestComponents = T.type_alias do { method: Symbol, @@ -61,7 +63,7 @@ module OpenAI } end - RequestInputShape = + RequestInput = T.type_alias do { method: Symbol, @@ -82,8 +84,7 @@ module OpenAI # @api private sig do params( - req: - OpenAI::Internal::Transport::BaseClient::RequestComponentsShape + req: OpenAI::Internal::Transport::BaseClient::RequestComponents ).void end def validate!(req) @@ -102,13 +103,10 @@ module OpenAI # @api private sig do params( - request: - OpenAI::Internal::Transport::BaseClient::RequestInputShape, + request: OpenAI::Internal::Transport::BaseClient::RequestInput, status: Integer, response_headers: T.any(T::Hash[String, String], Net::HTTPHeader) - ).returns( - OpenAI::Internal::Transport::BaseClient::RequestInputShape - ) + ).returns(OpenAI::Internal::Transport::BaseClient::RequestInput) end def follow_redirect(request, status:, response_headers:) end @@ -175,11 +173,10 @@ module OpenAI sig do overridable .params( - req: - OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, + req: OpenAI::Internal::Transport::BaseClient::RequestComponents, opts: OpenAI::Internal::AnyHash ) - .returns(OpenAI::Internal::Transport::BaseClient::RequestInputShape) + .returns(OpenAI::Internal::Transport::BaseClient::RequestInput) end private def build_request(req, opts) end @@ -197,7 +194,7 @@ module OpenAI # @api private sig do params( - request: OpenAI::Internal::Transport::BaseClient::RequestInputShape, + request: OpenAI::Internal::Transport::BaseClient::RequestInput, redirect_count: Integer, retry_count: Integer, send_retry_header: T::Boolean diff --git a/rbi/openai/internal/transport/pooled_net_requester.rbi b/rbi/openai/internal/transport/pooled_net_requester.rbi index 373c1c3b..f8eeecf5 100644 --- a/rbi/openai/internal/transport/pooled_net_requester.rbi +++ b/rbi/openai/internal/transport/pooled_net_requester.rbi @@ -5,7 +5,9 @@ module OpenAI module Transport # @api private class PooledNetRequester - RequestShape = + extend OpenAI::Internal::Util::SorbetRuntimeSupport + + Request = T.type_alias do { method: Symbol, @@ -34,8 +36,7 @@ module OpenAI # @api private sig do params( - request: - OpenAI::Internal::Transport::PooledNetRequester::RequestShape, + request: OpenAI::Internal::Transport::PooledNetRequester::Request, blk: T.proc.params(arg0: String).void ).returns([Net::HTTPGenericRequest, T.proc.void]) end @@ -57,8 +58,7 @@ module OpenAI # @api private sig do params( - request: - OpenAI::Internal::Transport::PooledNetRequester::RequestShape + request: OpenAI::Internal::Transport::PooledNetRequester::Request ).returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) end def execute(request) diff --git a/rbi/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi index f0bcae46..3357deae 100644 --- a/rbi/openai/internal/type/base_model.rbi +++ b/rbi/openai/internal/type/base_model.rbi @@ -5,10 +5,11 @@ module OpenAI module Type class BaseModel extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport abstract! - KnownFieldShape = + KnownField = T.type_alias do { mode: T.nilable(Symbol), @@ -17,19 +18,27 @@ module OpenAI } end - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Internal::Type::BaseModel, OpenAI::Internal::AnyHash) + end class << self # @api private # # Assumes superclass fields are totally defined before fields are accessed / # defined on subclasses. + sig { params(child: T.self_type).void } + def inherited(child) + end + + # @api private sig do returns( T::Hash[ Symbol, T.all( - OpenAI::Internal::Type::BaseModel::KnownFieldShape, + OpenAI::Internal::Type::BaseModel::KnownField, { type_fn: T.proc.returns(OpenAI::Internal::Type::Converter::Input) @@ -47,7 +56,7 @@ module OpenAI T::Hash[ Symbol, T.all( - OpenAI::Internal::Type::BaseModel::KnownFieldShape, + OpenAI::Internal::Type::BaseModel::KnownField, { type: OpenAI::Internal::Type::Converter::Input } ) ] diff --git a/rbi/openai/internal/type/base_page.rbi b/rbi/openai/internal/type/base_page.rbi index 1df53287..c097c095 100644 --- a/rbi/openai/internal/type/base_page.rbi +++ b/rbi/openai/internal/type/base_page.rbi @@ -29,8 +29,7 @@ module OpenAI sig do params( client: OpenAI::Internal::Transport::BaseClient, - req: - OpenAI::Internal::Transport::BaseClient::RequestComponentsShape, + req: OpenAI::Internal::Transport::BaseClient::RequestComponents, headers: T.any(T::Hash[String, String], Net::HTTPHeader), page_data: T.anything ).void diff --git a/rbi/openai/internal/type/converter.rbi b/rbi/openai/internal/type/converter.rbi index eb354592..cbace65d 100644 --- a/rbi/openai/internal/type/converter.rbi +++ b/rbi/openai/internal/type/converter.rbi @@ -5,6 +5,8 @@ module OpenAI module Type # @api private module Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport + Input = T.type_alias do T.any(OpenAI::Internal::Type::Converter, T::Class[T.anything]) diff --git a/rbi/openai/internal/type/enum.rbi b/rbi/openai/internal/type/enum.rbi index 09de405a..1a1c4c47 100644 --- a/rbi/openai/internal/type/enum.rbi +++ b/rbi/openai/internal/type/enum.rbi @@ -17,6 +17,7 @@ module OpenAI # values safely. module Enum include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport # All of the valid Symbol values for this enum. sig do diff --git a/rbi/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi index 7d3148a9..f8598117 100644 --- a/rbi/openai/internal/type/union.rbi +++ b/rbi/openai/internal/type/union.rbi @@ -6,6 +6,7 @@ module OpenAI # @api private module Union include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport # @api private # diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index 54ec6f18..fe1e8cac 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -4,6 +4,8 @@ module OpenAI module Internal # @api private module Util + extend OpenAI::Internal::Util::SorbetRuntimeSupport + # @api private sig { returns(Float) } def self.monotonic_secs @@ -172,7 +174,7 @@ module OpenAI end end - ParsedUriShape = + ParsedUri = T.type_alias do { scheme: T.nilable(String), @@ -187,7 +189,7 @@ module OpenAI # @api private sig do params(url: T.any(URI::Generic, String)).returns( - OpenAI::Internal::Util::ParsedUriShape + OpenAI::Internal::Util::ParsedUri ) end def parse_uri(url) @@ -195,7 +197,7 @@ module OpenAI # @api private sig do - params(parsed: OpenAI::Internal::Util::ParsedUriShape).returns( + params(parsed: OpenAI::Internal::Util::ParsedUri).returns( URI::Generic ) end @@ -205,8 +207,8 @@ module OpenAI # @api private sig do params( - lhs: OpenAI::Internal::Util::ParsedUriShape, - rhs: OpenAI::Internal::Util::ParsedUriShape + lhs: OpenAI::Internal::Util::ParsedUri, + rhs: OpenAI::Internal::Util::ParsedUri ).returns(URI::Generic) end def join_parsed_uri(lhs, rhs) @@ -423,6 +425,27 @@ module OpenAI def decode_sse(lines) end end + + # @api private + module SorbetRuntimeSupport + class MissingSorbetRuntimeError < ::RuntimeError + end + + # @api private + sig { returns(T::Hash[Symbol, T.anything]) } + private def sorbet_runtime_constants + end + + # @api private + sig { params(name: Symbol).void } + def const_missing(name) + end + + # @api private + sig { params(name: Symbol, blk: T.proc.returns(T.anything)).void } + def define_sorbet_constant!(name, &blk) + end + end end end end diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index 254db622..87b9e0b9 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Audio::SpeechCreateParams, OpenAI::Internal::AnyHash) + end # The text to generate audio for. The maximum length is 4096 characters. sig { returns(String) } diff --git a/rbi/openai/models/audio/transcription.rbi b/rbi/openai/models/audio/transcription.rbi index b7bca615..22ae3343 100644 --- a/rbi/openai/models/audio/transcription.rbi +++ b/rbi/openai/models/audio/transcription.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Audio class Transcription < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Audio::Transcription, OpenAI::Internal::AnyHash) + end # The transcribed text. sig { returns(String) } @@ -56,7 +59,12 @@ module OpenAI class Logprob < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Audio::Transcription::Logprob, + OpenAI::Internal::AnyHash + ) + end # The token in the transcription. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi index ba0c7098..1bffb2a0 100644 --- a/rbi/openai/models/audio/transcription_create_params.rbi +++ b/rbi/openai/models/audio/transcription_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionCreateParams, + OpenAI::Internal::AnyHash + ) + end # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. diff --git a/rbi/openai/models/audio/transcription_segment.rbi b/rbi/openai/models/audio/transcription_segment.rbi index bac3f1b7..a86ed570 100644 --- a/rbi/openai/models/audio/transcription_segment.rbi +++ b/rbi/openai/models/audio/transcription_segment.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Audio class TranscriptionSegment < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionSegment, + OpenAI::Internal::AnyHash + ) + end # Unique identifier of the segment. sig { returns(Integer) } diff --git a/rbi/openai/models/audio/transcription_text_delta_event.rbi b/rbi/openai/models/audio/transcription_text_delta_event.rbi index 15165550..a196922d 100644 --- a/rbi/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/openai/models/audio/transcription_text_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Audio class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The text delta that was additionally transcribed. sig { returns(String) } @@ -77,7 +83,12 @@ module OpenAI class Logprob < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob, + OpenAI::Internal::AnyHash + ) + end # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/audio/transcription_text_done_event.rbi b/rbi/openai/models/audio/transcription_text_done_event.rbi index 993b519e..80acac59 100644 --- a/rbi/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/openai/models/audio/transcription_text_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Audio class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The text that was transcribed. sig { returns(String) } @@ -79,7 +85,12 @@ module OpenAI class Logprob < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDoneEvent::Logprob, + OpenAI::Internal::AnyHash + ) + end # The token that was used to generate the log probability. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/audio/transcription_verbose.rbi b/rbi/openai/models/audio/transcription_verbose.rbi index 6fc105f7..afd2b789 100644 --- a/rbi/openai/models/audio/transcription_verbose.rbi +++ b/rbi/openai/models/audio/transcription_verbose.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Audio class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionVerbose, + OpenAI::Internal::AnyHash + ) + end # The duration of the input audio. sig { returns(Float) } diff --git a/rbi/openai/models/audio/transcription_word.rbi b/rbi/openai/models/audio/transcription_word.rbi index 141e388e..2bf37011 100644 --- a/rbi/openai/models/audio/transcription_word.rbi +++ b/rbi/openai/models/audio/transcription_word.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Audio class TranscriptionWord < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Audio::TranscriptionWord, OpenAI::Internal::AnyHash) + end # End time of the word in seconds. sig { returns(Float) } diff --git a/rbi/openai/models/audio/translation.rbi b/rbi/openai/models/audio/translation.rbi index 253e5428..dd78df7f 100644 --- a/rbi/openai/models/audio/translation.rbi +++ b/rbi/openai/models/audio/translation.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Audio class Translation < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Audio::Translation, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :text diff --git a/rbi/openai/models/audio/translation_create_params.rbi b/rbi/openai/models/audio/translation_create_params.rbi index 0762f210..bcfb2484 100644 --- a/rbi/openai/models/audio/translation_create_params.rbi +++ b/rbi/openai/models/audio/translation_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranslationCreateParams, + OpenAI::Internal::AnyHash + ) + end # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. diff --git a/rbi/openai/models/audio/translation_verbose.rbi b/rbi/openai/models/audio/translation_verbose.rbi index ddc7e192..4dfdee59 100644 --- a/rbi/openai/models/audio/translation_verbose.rbi +++ b/rbi/openai/models/audio/translation_verbose.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Audio class TranslationVerbose < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Audio::TranslationVerbose, OpenAI::Internal::AnyHash) + end # The duration of the input audio. sig { returns(Float) } diff --git a/rbi/openai/models/auto_file_chunking_strategy_param.rbi b/rbi/openai/models/auto_file_chunking_strategy_param.rbi index 658b0cc9..91995701 100644 --- a/rbi/openai/models/auto_file_chunking_strategy_param.rbi +++ b/rbi/openai/models/auto_file_chunking_strategy_param.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::AutoFileChunkingStrategyParam, + OpenAI::Internal::AnyHash + ) + end # Always `auto`. sig { returns(Symbol) } diff --git a/rbi/openai/models/batch.rbi b/rbi/openai/models/batch.rbi index dcc6308c..3d6ce897 100644 --- a/rbi/openai/models/batch.rbi +++ b/rbi/openai/models/batch.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class Batch < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = T.type_alias { T.any(OpenAI::Batch, OpenAI::Internal::AnyHash) } sig { returns(String) } attr_accessor :id @@ -246,7 +246,10 @@ module OpenAI end class Errors < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Batch::Errors, OpenAI::Internal::AnyHash) + end sig { returns(T.nilable(T::Array[OpenAI::BatchError])) } attr_reader :data diff --git a/rbi/openai/models/batch_cancel_params.rbi b/rbi/openai/models/batch_cancel_params.rbi index 37f0df79..b6a056fc 100644 --- a/rbi/openai/models/batch_cancel_params.rbi +++ b/rbi/openai/models/batch_cancel_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::BatchCancelParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/batch_create_params.rbi b/rbi/openai/models/batch_create_params.rbi index 31e62492..7568daed 100644 --- a/rbi/openai/models/batch_create_params.rbi +++ b/rbi/openai/models/batch_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::BatchCreateParams, OpenAI::Internal::AnyHash) + end # The time frame within which the batch should be processed. Currently only `24h` # is supported. diff --git a/rbi/openai/models/batch_error.rbi b/rbi/openai/models/batch_error.rbi index 77e0e754..4cade073 100644 --- a/rbi/openai/models/batch_error.rbi +++ b/rbi/openai/models/batch_error.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class BatchError < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::BatchError, OpenAI::Internal::AnyHash) } # An error code identifying the error type. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/batch_list_params.rbi b/rbi/openai/models/batch_list_params.rbi index 1843894d..7000c142 100644 --- a/rbi/openai/models/batch_list_params.rbi +++ b/rbi/openai/models/batch_list_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::BatchListParams, OpenAI::Internal::AnyHash) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/batch_request_counts.rbi b/rbi/openai/models/batch_request_counts.rbi index 6835bf20..88cf5914 100644 --- a/rbi/openai/models/batch_request_counts.rbi +++ b/rbi/openai/models/batch_request_counts.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class BatchRequestCounts < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::BatchRequestCounts, OpenAI::Internal::AnyHash) + end # Number of requests that have been completed successfully. sig { returns(Integer) } diff --git a/rbi/openai/models/batch_retrieve_params.rbi b/rbi/openai/models/batch_retrieve_params.rbi index b7289ba9..f749ed91 100644 --- a/rbi/openai/models/batch_retrieve_params.rbi +++ b/rbi/openai/models/batch_retrieve_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::BatchRetrieveParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/beta/assistant.rbi b/rbi/openai/models/beta/assistant.rbi index 9baa786f..4b635b82 100644 --- a/rbi/openai/models/beta/assistant.rbi +++ b/rbi/openai/models/beta/assistant.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class Assistant < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::Assistant, OpenAI::Internal::AnyHash) + end # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -270,7 +273,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Assistant::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -331,7 +339,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter`` tool. There can be a maximum of 20 files @@ -358,7 +371,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Assistant::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) diff --git a/rbi/openai/models/beta/assistant_create_params.rbi b/rbi/openai/models/beta/assistant_create_params.rbi index eaf13a70..c04793b1 100644 --- a/rbi/openai/models/beta/assistant_create_params.rbi +++ b/rbi/openai/models/beta/assistant_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams, + OpenAI::Internal::AnyHash + ) + end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -312,7 +318,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -378,7 +389,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files @@ -405,7 +421,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -481,7 +502,12 @@ module OpenAI class VectorStore < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) + end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -587,7 +613,10 @@ module OpenAI class Auto < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Internal::AnyHash + ) end # Always `auto`. @@ -611,7 +640,10 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, + OpenAI::Internal::AnyHash + ) end sig do @@ -662,7 +694,10 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Internal::AnyHash + ) end # The number of tokens that overlap between chunks. The default value is `400`. diff --git a/rbi/openai/models/beta/assistant_delete_params.rbi b/rbi/openai/models/beta/assistant_delete_params.rbi index 48fee74b..d9efdc8d 100644 --- a/rbi/openai/models/beta/assistant_delete_params.rbi +++ b/rbi/openai/models/beta/assistant_delete_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::AssistantDeleteParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/beta/assistant_deleted.rbi b/rbi/openai/models/beta/assistant_deleted.rbi index 7ebe179e..6925eb3b 100644 --- a/rbi/openai/models/beta/assistant_deleted.rbi +++ b/rbi/openai/models/beta/assistant_deleted.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class AssistantDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::AssistantDeleted, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/models/beta/assistant_list_params.rbi b/rbi/openai/models/beta/assistant_list_params.rbi index b9644242..0d1099af 100644 --- a/rbi/openai/models/beta/assistant_list_params.rbi +++ b/rbi/openai/models/beta/assistant_list_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::AssistantListParams, OpenAI::Internal::AnyHash) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/beta/assistant_retrieve_params.rbi b/rbi/openai/models/beta/assistant_retrieve_params.rbi index 2ca3eb4e..4f006925 100644 --- a/rbi/openai/models/beta/assistant_retrieve_params.rbi +++ b/rbi/openai/models/beta/assistant_retrieve_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::AssistantRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/beta/assistant_stream_event.rbi b/rbi/openai/models/beta/assistant_stream_event.rbi index b69c801c..fc236a1b 100644 --- a/rbi/openai/models/beta/assistant_stream_event.rbi +++ b/rbi/openai/models/beta/assistant_stream_event.rbi @@ -58,7 +58,12 @@ module OpenAI class ThreadCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadCreated, + OpenAI::Internal::AnyHash + ) + end # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -109,7 +114,12 @@ module OpenAI class ThreadRunCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -149,7 +159,12 @@ module OpenAI class ThreadRunQueued < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -189,7 +204,12 @@ module OpenAI class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -229,7 +249,12 @@ module OpenAI class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -269,7 +294,12 @@ module OpenAI class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -309,7 +339,12 @@ module OpenAI class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -349,7 +384,12 @@ module OpenAI class ThreadRunFailed < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -389,7 +429,12 @@ module OpenAI class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -429,7 +474,12 @@ module OpenAI class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -469,7 +519,12 @@ module OpenAI class ThreadRunExpired < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -509,7 +564,12 @@ module OpenAI class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -550,7 +610,12 @@ module OpenAI class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -591,7 +656,12 @@ module OpenAI class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, + OpenAI::Internal::AnyHash + ) + end # Represents a run step delta i.e. any changed fields on a run step during # streaming. @@ -639,7 +709,12 @@ module OpenAI class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -680,7 +755,12 @@ module OpenAI class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -721,7 +801,12 @@ module OpenAI class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -762,7 +847,12 @@ module OpenAI class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -803,7 +893,12 @@ module OpenAI class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -844,7 +939,12 @@ module OpenAI class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -885,7 +985,12 @@ module OpenAI class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, + OpenAI::Internal::AnyHash + ) + end # Represents a message delta i.e. any changed fields on a message during # streaming. @@ -928,7 +1033,12 @@ module OpenAI class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -969,7 +1079,12 @@ module OpenAI class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -1010,7 +1125,12 @@ module OpenAI class ErrorEvent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantStreamEvent::ErrorEvent, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::ErrorObject) } attr_reader :data diff --git a/rbi/openai/models/beta/assistant_tool_choice.rbi b/rbi/openai/models/beta/assistant_tool_choice.rbi index a018f2b0..11dd4c1a 100644 --- a/rbi/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/openai/models/beta/assistant_tool_choice.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class AssistantToolChoice < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::AssistantToolChoice, OpenAI::Internal::AnyHash) + end # The type of the tool. If type is `function`, the function name must be set sig { returns(OpenAI::Beta::AssistantToolChoice::Type::OrSymbol) } diff --git a/rbi/openai/models/beta/assistant_tool_choice_function.rbi b/rbi/openai/models/beta/assistant_tool_choice_function.rbi index 3f18016b..88ed6dbc 100644 --- a/rbi/openai/models/beta/assistant_tool_choice_function.rbi +++ b/rbi/openai/models/beta/assistant_tool_choice_function.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Beta class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::AssistantToolChoiceFunction, + OpenAI::Internal::AnyHash + ) + end # The name of the function to call. sig { returns(String) } diff --git a/rbi/openai/models/beta/assistant_update_params.rbi b/rbi/openai/models/beta/assistant_update_params.rbi index 27dbf4a1..b044c34b 100644 --- a/rbi/openai/models/beta/assistant_update_params.rbi +++ b/rbi/openai/models/beta/assistant_update_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::AssistantUpdateParams, + OpenAI::Internal::AnyHash + ) + end # The description of the assistant. The maximum length is 512 characters. sig { returns(T.nilable(String)) } @@ -529,7 +535,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantUpdateParams::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -595,7 +606,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # Overrides the list of # [file](https://platform.openai.com/docs/api-reference/files) IDs made available @@ -624,7 +640,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) diff --git a/rbi/openai/models/beta/code_interpreter_tool.rbi b/rbi/openai/models/beta/code_interpreter_tool.rbi index f9f7b8bb..3d315b25 100644 --- a/rbi/openai/models/beta/code_interpreter_tool.rbi +++ b/rbi/openai/models/beta/code_interpreter_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::CodeInterpreterTool, OpenAI::Internal::AnyHash) + end # The type of tool being defined: `code_interpreter` sig { returns(Symbol) } diff --git a/rbi/openai/models/beta/file_search_tool.rbi b/rbi/openai/models/beta/file_search_tool.rbi index d2442c3f..656a81da 100644 --- a/rbi/openai/models/beta/file_search_tool.rbi +++ b/rbi/openai/models/beta/file_search_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class FileSearchTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::FileSearchTool, OpenAI::Internal::AnyHash) + end # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -48,7 +51,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::FileSearchTool::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The maximum number of results the file search tool should output. The default is # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between @@ -129,7 +137,12 @@ module OpenAI class RankingOptions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions, + OpenAI::Internal::AnyHash + ) + end # The score threshold for the file search. All values must be a floating point # number between 0 and 1. diff --git a/rbi/openai/models/beta/function_tool.rbi b/rbi/openai/models/beta/function_tool.rbi index ad129493..87fcce58 100644 --- a/rbi/openai/models/beta/function_tool.rbi +++ b/rbi/openai/models/beta/function_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class FunctionTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::FunctionTool, OpenAI::Internal::AnyHash) + end sig { returns(OpenAI::FunctionDefinition) } attr_reader :function diff --git a/rbi/openai/models/beta/message_stream_event.rbi b/rbi/openai/models/beta/message_stream_event.rbi index 562d10c2..01893282 100644 --- a/rbi/openai/models/beta/message_stream_event.rbi +++ b/rbi/openai/models/beta/message_stream_event.rbi @@ -22,7 +22,12 @@ module OpenAI class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -63,7 +68,12 @@ module OpenAI class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -104,7 +114,12 @@ module OpenAI class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, + OpenAI::Internal::AnyHash + ) + end # Represents a message delta i.e. any changed fields on a message during # streaming. @@ -147,7 +162,12 @@ module OpenAI class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -188,7 +208,12 @@ module OpenAI class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete, + OpenAI::Internal::AnyHash + ) + end # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). diff --git a/rbi/openai/models/beta/run_step_stream_event.rbi b/rbi/openai/models/beta/run_step_stream_event.rbi index 7c052f14..5d42a8d3 100644 --- a/rbi/openai/models/beta/run_step_stream_event.rbi +++ b/rbi/openai/models/beta/run_step_stream_event.rbi @@ -24,7 +24,12 @@ module OpenAI class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -65,7 +70,12 @@ module OpenAI class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -106,7 +116,12 @@ module OpenAI class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, + OpenAI::Internal::AnyHash + ) + end # Represents a run step delta i.e. any changed fields on a run step during # streaming. @@ -154,7 +169,12 @@ module OpenAI class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -195,7 +215,12 @@ module OpenAI class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -236,7 +261,12 @@ module OpenAI class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } @@ -277,7 +307,12 @@ module OpenAI class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired, + OpenAI::Internal::AnyHash + ) + end # Represents a step in execution of a run. sig { returns(OpenAI::Beta::Threads::Runs::RunStep) } diff --git a/rbi/openai/models/beta/run_stream_event.rbi b/rbi/openai/models/beta/run_stream_event.rbi index 77e86149..ed6893e1 100644 --- a/rbi/openai/models/beta/run_stream_event.rbi +++ b/rbi/openai/models/beta/run_stream_event.rbi @@ -26,7 +26,12 @@ module OpenAI class ThreadRunCreated < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunCreated, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -66,7 +71,12 @@ module OpenAI class ThreadRunQueued < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunQueued, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -106,7 +116,12 @@ module OpenAI class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -146,7 +161,12 @@ module OpenAI class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -186,7 +206,12 @@ module OpenAI class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -226,7 +251,12 @@ module OpenAI class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -266,7 +296,12 @@ module OpenAI class ThreadRunFailed < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunFailed, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -306,7 +341,12 @@ module OpenAI class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -346,7 +386,12 @@ module OpenAI class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -386,7 +431,12 @@ module OpenAI class ThreadRunExpired < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::RunStreamEvent::ThreadRunExpired, + OpenAI::Internal::AnyHash + ) + end # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). diff --git a/rbi/openai/models/beta/thread.rbi b/rbi/openai/models/beta/thread.rbi index 8f84f6d1..e828a1aa 100644 --- a/rbi/openai/models/beta/thread.rbi +++ b/rbi/openai/models/beta/thread.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class Thread < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::Thread, OpenAI::Internal::AnyHash) + end # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -92,7 +95,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Thread::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -151,7 +159,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Thread::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files @@ -178,7 +191,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Thread::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index e65fa5a3..7e2348cc 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams, + OpenAI::Internal::AnyHash + ) + end # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to @@ -418,7 +424,12 @@ module OpenAI class Thread < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread, + OpenAI::Internal::AnyHash + ) + end # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. @@ -529,7 +540,12 @@ module OpenAI class Message < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message, + OpenAI::Internal::AnyHash + ) + end # The text contents of the message. sig do @@ -736,7 +752,12 @@ module OpenAI class Attachment < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment, + OpenAI::Internal::AnyHash + ) + end # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } @@ -824,7 +845,10 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch, + OpenAI::Internal::AnyHash + ) end # The type of tool being defined: `file_search` @@ -858,7 +882,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -924,7 +953,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files @@ -953,7 +987,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -1029,7 +1068,12 @@ module OpenAI class VectorStore < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) + end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -1135,7 +1179,10 @@ module OpenAI class Auto < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Internal::AnyHash + ) end # Always `auto`. @@ -1159,7 +1206,10 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, + OpenAI::Internal::AnyHash + ) end sig do @@ -1210,7 +1260,10 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Internal::AnyHash + ) end # The number of tokens that overlap between chunks. The default value is `400`. @@ -1271,7 +1324,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -1337,7 +1395,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files @@ -1364,7 +1427,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -1398,7 +1466,12 @@ module OpenAI class TruncationStrategy < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in diff --git a/rbi/openai/models/beta/thread_create_params.rbi b/rbi/openai/models/beta/thread_create_params.rbi index fa4f781f..aa3692ba 100644 --- a/rbi/openai/models/beta/thread_create_params.rbi +++ b/rbi/openai/models/beta/thread_create_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::ThreadCreateParams, OpenAI::Internal::AnyHash) + end # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. @@ -100,7 +103,12 @@ module OpenAI class Message < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::Message, + OpenAI::Internal::AnyHash + ) + end # The text contents of the message. sig do @@ -298,7 +306,12 @@ module OpenAI class Attachment < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::Message::Attachment, + OpenAI::Internal::AnyHash + ) + end # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } @@ -385,7 +398,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -418,7 +436,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -484,7 +507,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files @@ -511,7 +539,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) @@ -587,7 +620,12 @@ module OpenAI class VectorStore < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore, + OpenAI::Internal::AnyHash + ) + end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -693,7 +731,10 @@ module OpenAI class Auto < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, + OpenAI::Internal::AnyHash + ) end # Always `auto`. @@ -717,7 +758,10 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, + OpenAI::Internal::AnyHash + ) end sig do @@ -768,7 +812,10 @@ module OpenAI class Static < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + OpenAI::Internal::AnyHash + ) end # The number of tokens that overlap between chunks. The default value is `400`. diff --git a/rbi/openai/models/beta/thread_delete_params.rbi b/rbi/openai/models/beta/thread_delete_params.rbi index 6b2c04ba..0909624a 100644 --- a/rbi/openai/models/beta/thread_delete_params.rbi +++ b/rbi/openai/models/beta/thread_delete_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::ThreadDeleteParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/beta/thread_deleted.rbi b/rbi/openai/models/beta/thread_deleted.rbi index c648d9ac..62402b00 100644 --- a/rbi/openai/models/beta/thread_deleted.rbi +++ b/rbi/openai/models/beta/thread_deleted.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class ThreadDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::ThreadDeleted, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/models/beta/thread_retrieve_params.rbi b/rbi/openai/models/beta/thread_retrieve_params.rbi index 64fd5cd2..8e36a4e7 100644 --- a/rbi/openai/models/beta/thread_retrieve_params.rbi +++ b/rbi/openai/models/beta/thread_retrieve_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::ThreadRetrieveParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/beta/thread_stream_event.rbi b/rbi/openai/models/beta/thread_stream_event.rbi index cd6d25a0..4674c89f 100644 --- a/rbi/openai/models/beta/thread_stream_event.rbi +++ b/rbi/openai/models/beta/thread_stream_event.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Beta class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::ThreadStreamEvent, OpenAI::Internal::AnyHash) + end # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). diff --git a/rbi/openai/models/beta/thread_update_params.rbi b/rbi/openai/models/beta/thread_update_params.rbi index 4244ec93..8d9389b0 100644 --- a/rbi/openai/models/beta/thread_update_params.rbi +++ b/rbi/openai/models/beta/thread_update_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Beta::ThreadUpdateParams, OpenAI::Internal::AnyHash) + end # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -77,7 +80,12 @@ module OpenAI class ToolResources < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadUpdateParams::ToolResources, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -143,7 +151,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files @@ -170,7 +183,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) diff --git a/rbi/openai/models/beta/threads/file_citation_annotation.rbi b/rbi/openai/models/beta/threads/file_citation_annotation.rbi index 2fcaeee0..09c69d08 100644 --- a/rbi/openai/models/beta/threads/file_citation_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_citation_annotation.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationAnnotation, + OpenAI::Internal::AnyHash + ) + end sig { returns(Integer) } attr_accessor :end_index @@ -76,7 +81,12 @@ module OpenAI class FileCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation, + OpenAI::Internal::AnyHash + ) + end # The ID of the specific File the citation is from. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi b/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi index 33d0dab1..574352cd 100644 --- a/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_citation_delta_annotation.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation, + OpenAI::Internal::AnyHash + ) + end # The index of the annotation in the text content part. sig { returns(Integer) } @@ -97,7 +102,12 @@ module OpenAI class FileCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + OpenAI::Internal::AnyHash + ) + end # The ID of the specific File the citation is from. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/beta/threads/file_path_annotation.rbi b/rbi/openai/models/beta/threads/file_path_annotation.rbi index 1a6776cb..6c989044 100644 --- a/rbi/openai/models/beta/threads/file_path_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_path_annotation.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class FilePathAnnotation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FilePathAnnotation, + OpenAI::Internal::AnyHash + ) + end sig { returns(Integer) } attr_accessor :end_index @@ -72,7 +77,12 @@ module OpenAI class FilePath < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FilePathAnnotation::FilePath, + OpenAI::Internal::AnyHash + ) + end # The ID of the file that was generated. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi b/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi index d8fa4d69..437a2151 100644 --- a/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi +++ b/rbi/openai/models/beta/threads/file_path_delta_annotation.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FilePathDeltaAnnotation, + OpenAI::Internal::AnyHash + ) + end # The index of the annotation in the text content part. sig { returns(Integer) } @@ -96,7 +101,12 @@ module OpenAI class FilePath < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, + OpenAI::Internal::AnyHash + ) + end # The ID of the file that was generated. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/beta/threads/image_file.rbi b/rbi/openai/models/beta/threads/image_file.rbi index 78802b93..2c805eae 100644 --- a/rbi/openai/models/beta/threads/image_file.rbi +++ b/rbi/openai/models/beta/threads/image_file.rbi @@ -6,7 +6,9 @@ module OpenAI module Threads class ImageFile < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any(OpenAI::Beta::Threads::ImageFile, OpenAI::Internal::AnyHash) + end # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you diff --git a/rbi/openai/models/beta/threads/image_file_content_block.rbi b/rbi/openai/models/beta/threads/image_file_content_block.rbi index 8ff6beaa..407bd31a 100644 --- a/rbi/openai/models/beta/threads/image_file_content_block.rbi +++ b/rbi/openai/models/beta/threads/image_file_content_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileContentBlock, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::Beta::Threads::ImageFile) } attr_reader :image_file diff --git a/rbi/openai/models/beta/threads/image_file_delta.rbi b/rbi/openai/models/beta/threads/image_file_delta.rbi index 33be5605..86951221 100644 --- a/rbi/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/openai/models/beta/threads/image_file_delta.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class ImageFileDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileDelta, + OpenAI::Internal::AnyHash + ) + end # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. diff --git a/rbi/openai/models/beta/threads/image_file_delta_block.rbi b/rbi/openai/models/beta/threads/image_file_delta_block.rbi index ea1914cc..9efa65de 100644 --- a/rbi/openai/models/beta/threads/image_file_delta_block.rbi +++ b/rbi/openai/models/beta/threads/image_file_delta_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageFileDeltaBlock, + OpenAI::Internal::AnyHash + ) + end # The index of the content part in the message. sig { returns(Integer) } diff --git a/rbi/openai/models/beta/threads/image_url.rbi b/rbi/openai/models/beta/threads/image_url.rbi index 04b0f75d..52f2c4ad 100644 --- a/rbi/openai/models/beta/threads/image_url.rbi +++ b/rbi/openai/models/beta/threads/image_url.rbi @@ -6,7 +6,9 @@ module OpenAI module Threads class ImageURL < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any(OpenAI::Beta::Threads::ImageURL, OpenAI::Internal::AnyHash) + end # The external URL of the image, must be a supported image types: jpeg, jpg, png, # gif, webp. diff --git a/rbi/openai/models/beta/threads/image_url_content_block.rbi b/rbi/openai/models/beta/threads/image_url_content_block.rbi index b2e47c68..eeaa7a8d 100644 --- a/rbi/openai/models/beta/threads/image_url_content_block.rbi +++ b/rbi/openai/models/beta/threads/image_url_content_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageURLContentBlock, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::Beta::Threads::ImageURL) } attr_reader :image_url diff --git a/rbi/openai/models/beta/threads/image_url_delta.rbi b/rbi/openai/models/beta/threads/image_url_delta.rbi index a742d53f..b760eefa 100644 --- a/rbi/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/openai/models/beta/threads/image_url_delta.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class ImageURLDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageURLDelta, + OpenAI::Internal::AnyHash + ) + end # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. diff --git a/rbi/openai/models/beta/threads/image_url_delta_block.rbi b/rbi/openai/models/beta/threads/image_url_delta_block.rbi index e2cc3d25..0a4c523c 100644 --- a/rbi/openai/models/beta/threads/image_url_delta_block.rbi +++ b/rbi/openai/models/beta/threads/image_url_delta_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::ImageURLDeltaBlock, + OpenAI::Internal::AnyHash + ) + end # The index of the content part in the message. sig { returns(Integer) } diff --git a/rbi/openai/models/beta/threads/message.rbi b/rbi/openai/models/beta/threads/message.rbi index 02bd3c97..82120763 100644 --- a/rbi/openai/models/beta/threads/message.rbi +++ b/rbi/openai/models/beta/threads/message.rbi @@ -6,7 +6,9 @@ module OpenAI module Threads class Message < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any(OpenAI::Beta::Threads::Message, OpenAI::Internal::AnyHash) + end # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -218,7 +220,12 @@ module OpenAI class Attachment < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Message::Attachment, + OpenAI::Internal::AnyHash + ) + end # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } @@ -305,7 +312,12 @@ module OpenAI class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly, + OpenAI::Internal::AnyHash + ) + end # The type of tool being defined: `file_search` sig { returns(Symbol) } @@ -337,7 +349,12 @@ module OpenAI class IncompleteDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Message::IncompleteDetails, + OpenAI::Internal::AnyHash + ) + end # The reason the message is incomplete. sig do diff --git a/rbi/openai/models/beta/threads/message_create_params.rbi b/rbi/openai/models/beta/threads/message_create_params.rbi index 60fb47c0..4e6416b0 100644 --- a/rbi/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/openai/models/beta/threads/message_create_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageCreateParams, + OpenAI::Internal::AnyHash + ) + end # The text contents of the message. sig do @@ -211,7 +216,12 @@ module OpenAI class Attachment < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageCreateParams::Attachment, + OpenAI::Internal::AnyHash + ) + end # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } @@ -298,7 +308,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The type of tool being defined: `file_search` sig { returns(Symbol) } diff --git a/rbi/openai/models/beta/threads/message_delete_params.rbi b/rbi/openai/models/beta/threads/message_delete_params.rbi index 0811af43..9c875c22 100644 --- a/rbi/openai/models/beta/threads/message_delete_params.rbi +++ b/rbi/openai/models/beta/threads/message_delete_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageDeleteParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/message_deleted.rbi b/rbi/openai/models/beta/threads/message_deleted.rbi index 73805106..d794d043 100644 --- a/rbi/openai/models/beta/threads/message_deleted.rbi +++ b/rbi/openai/models/beta/threads/message_deleted.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class MessageDeleted < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageDeleted, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/models/beta/threads/message_delta.rbi b/rbi/openai/models/beta/threads/message_delta.rbi index 7278f6d3..f234666f 100644 --- a/rbi/openai/models/beta/threads/message_delta.rbi +++ b/rbi/openai/models/beta/threads/message_delta.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class MessageDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageDelta, + OpenAI::Internal::AnyHash + ) + end # The content of the message in array of text and/or images. sig do diff --git a/rbi/openai/models/beta/threads/message_delta_event.rbi b/rbi/openai/models/beta/threads/message_delta_event.rbi index 4a93d6d2..9dc95d52 100644 --- a/rbi/openai/models/beta/threads/message_delta_event.rbi +++ b/rbi/openai/models/beta/threads/message_delta_event.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The identifier of the message, which can be referenced in API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/message_list_params.rbi b/rbi/openai/models/beta/threads/message_list_params.rbi index 158f377d..7873262c 100644 --- a/rbi/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/openai/models/beta/threads/message_list_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageListParams, + OpenAI::Internal::AnyHash + ) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/beta/threads/message_retrieve_params.rbi b/rbi/openai/models/beta/threads/message_retrieve_params.rbi index 4a7da6f0..c0d12ba0 100644 --- a/rbi/openai/models/beta/threads/message_retrieve_params.rbi +++ b/rbi/openai/models/beta/threads/message_retrieve_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/message_update_params.rbi b/rbi/openai/models/beta/threads/message_update_params.rbi index ecbd63ee..767a7340 100644 --- a/rbi/openai/models/beta/threads/message_update_params.rbi +++ b/rbi/openai/models/beta/threads/message_update_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::MessageUpdateParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/refusal_content_block.rbi b/rbi/openai/models/beta/threads/refusal_content_block.rbi index 96ddb8b1..24ad27e7 100644 --- a/rbi/openai/models/beta/threads/refusal_content_block.rbi +++ b/rbi/openai/models/beta/threads/refusal_content_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class RefusalContentBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RefusalContentBlock, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :refusal diff --git a/rbi/openai/models/beta/threads/refusal_delta_block.rbi b/rbi/openai/models/beta/threads/refusal_delta_block.rbi index c6854430..7fe5eed0 100644 --- a/rbi/openai/models/beta/threads/refusal_delta_block.rbi +++ b/rbi/openai/models/beta/threads/refusal_delta_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RefusalDeltaBlock, + OpenAI::Internal::AnyHash + ) + end # The index of the refusal part in the message. sig { returns(Integer) } diff --git a/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi b/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi index c2625cd6..c92a1e83 100644 --- a/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi +++ b/rbi/openai/models/beta/threads/required_action_function_tool_call.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RequiredActionFunctionToolCall, + OpenAI::Internal::AnyHash + ) + end # The ID of the tool call. This ID must be referenced when you submit the tool # outputs in using the @@ -74,7 +79,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function, + OpenAI::Internal::AnyHash + ) + end # The arguments that the model expects you to pass to the function. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index c769a3f6..aba1382a 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -6,7 +6,9 @@ module OpenAI module Threads class Run < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any(OpenAI::Beta::Threads::Run, OpenAI::Internal::AnyHash) + end # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -471,7 +473,12 @@ module OpenAI class IncompleteDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::IncompleteDetails, + OpenAI::Internal::AnyHash + ) + end # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -557,7 +564,12 @@ module OpenAI class LastError < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::LastError, + OpenAI::Internal::AnyHash + ) + end # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. sig do @@ -636,7 +648,12 @@ module OpenAI class RequiredAction < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::RequiredAction, + OpenAI::Internal::AnyHash + ) + end # Details on the tool outputs needed for this run to continue. sig do @@ -689,7 +706,12 @@ module OpenAI class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + OpenAI::Internal::AnyHash + ) + end # A list of the relevant tool calls. sig do @@ -733,7 +755,12 @@ module OpenAI class TruncationStrategy < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -825,7 +852,12 @@ module OpenAI class Usage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::Usage, + OpenAI::Internal::AnyHash + ) + end # Number of completion tokens used over the course of the run. sig { returns(Integer) } diff --git a/rbi/openai/models/beta/threads/run_cancel_params.rbi b/rbi/openai/models/beta/threads/run_cancel_params.rbi index 1101750b..3879ca2b 100644 --- a/rbi/openai/models/beta/threads/run_cancel_params.rbi +++ b/rbi/openai/models/beta/threads/run_cancel_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCancelParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index 85823188..7e4e98e4 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCreateParams, + OpenAI::Internal::AnyHash + ) + end # The ID of the # [assistant](https://platform.openai.com/docs/api-reference/assistants) to use to @@ -444,7 +449,12 @@ module OpenAI class AdditionalMessage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage, + OpenAI::Internal::AnyHash + ) + end # The text contents of the message. sig do @@ -651,7 +661,12 @@ module OpenAI class Attachment < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment, + OpenAI::Internal::AnyHash + ) + end # The ID of the file to attach to the message. sig { returns(T.nilable(String)) } @@ -739,7 +754,10 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch, + OpenAI::Internal::AnyHash + ) end # The type of tool being defined: `file_search` @@ -794,7 +812,12 @@ module OpenAI class TruncationStrategy < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in diff --git a/rbi/openai/models/beta/threads/run_list_params.rbi b/rbi/openai/models/beta/threads/run_list_params.rbi index a8904daf..b5cefecb 100644 --- a/rbi/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/openai/models/beta/threads/run_list_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunListParams, + OpenAI::Internal::AnyHash + ) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/beta/threads/run_retrieve_params.rbi b/rbi/openai/models/beta/threads/run_retrieve_params.rbi index de4d56da..0d257698 100644 --- a/rbi/openai/models/beta/threads/run_retrieve_params.rbi +++ b/rbi/openai/models/beta/threads/run_retrieve_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi b/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi index 9be1a4b3..490d18a2 100644 --- a/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi +++ b/rbi/openai/models/beta/threads/run_submit_tool_outputs_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunSubmitToolOutputsParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id @@ -59,7 +64,12 @@ module OpenAI class ToolOutput < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput, + OpenAI::Internal::AnyHash + ) + end # The output of the tool call to be submitted to continue the run. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/beta/threads/run_update_params.rbi b/rbi/openai/models/beta/threads/run_update_params.rbi index c99a7cf2..f92b8cea 100644 --- a/rbi/openai/models/beta/threads/run_update_params.rbi +++ b/rbi/openai/models/beta/threads/run_update_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunUpdateParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi index a48e0434..06eb6486 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_logs.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, + OpenAI::Internal::AnyHash + ) + end # The index of the output in the outputs array. sig { returns(Integer) } diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi index 67c7e444..52c083be 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_output_image.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage, + OpenAI::Internal::AnyHash + ) + end # The index of the output in the outputs array. sig { returns(Integer) } @@ -66,7 +71,12 @@ module OpenAI class Image < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, + OpenAI::Internal::AnyHash + ) + end # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index a1829caf..2989b129 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, + OpenAI::Internal::AnyHash + ) + end # The ID of the tool call. sig { returns(String) } @@ -69,7 +74,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # The input to the Code Interpreter tool call. sig { returns(String) } @@ -145,7 +155,10 @@ module OpenAI class Logs < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, + OpenAI::Internal::AnyHash + ) end # The text output from the Code Interpreter tool call. @@ -176,7 +189,10 @@ module OpenAI class Image < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image, + OpenAI::Internal::AnyHash + ) end sig do @@ -227,7 +243,10 @@ module OpenAI class Image < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + OpenAI::Internal::AnyHash + ) end # The [file](https://platform.openai.com/docs/api-reference/files) ID of the diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index b1bb2cf1..028059df 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, + OpenAI::Internal::AnyHash + ) + end # The index of the tool call in the tool calls array. sig { returns(Integer) } @@ -82,7 +87,12 @@ module OpenAI class CodeInterpreter < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end # The input to the Code Interpreter tool call. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi index 6ca0cd80..343bec61 100644 --- a/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class FileSearchToolCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FileSearchToolCall, + OpenAI::Internal::AnyHash + ) + end # The ID of the tool call object. sig { returns(String) } @@ -68,7 +73,12 @@ module OpenAI class FileSearch < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + OpenAI::Internal::AnyHash + ) + end # The ranking options for the file search. sig do @@ -146,7 +156,12 @@ module OpenAI class RankingOptions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + OpenAI::Internal::AnyHash + ) + end # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -231,7 +246,12 @@ module OpenAI class Result < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result, + OpenAI::Internal::AnyHash + ) + end # The ID of the file that result was found in. sig { returns(String) } @@ -314,7 +334,10 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content, + OpenAI::Internal::AnyHash + ) end # The text content of the file. diff --git a/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi index 62cd8cb2..78d5bb30 100644 --- a/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/file_search_tool_call_delta.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, + OpenAI::Internal::AnyHash + ) + end # For now, this is always going to be an empty object. sig { returns(T.anything) } diff --git a/rbi/openai/models/beta/threads/runs/function_tool_call.rbi b/rbi/openai/models/beta/threads/runs/function_tool_call.rbi index 90fa9db4..55ee8104 100644 --- a/rbi/openai/models/beta/threads/runs/function_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/function_tool_call.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class FunctionToolCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FunctionToolCall, + OpenAI::Internal::AnyHash + ) + end # The ID of the tool call object. sig { returns(String) } @@ -66,7 +71,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FunctionToolCall::Function, + OpenAI::Internal::AnyHash + ) + end # The arguments passed to the function. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi index 8b3a5afa..73ec53a9 100644 --- a/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/function_tool_call_delta.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta, + OpenAI::Internal::AnyHash + ) + end # The index of the tool call in the tool calls array. sig { returns(Integer) } @@ -81,7 +86,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function, + OpenAI::Internal::AnyHash + ) + end # The arguments passed to the function. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi b/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi index 946fb84a..1c11f5d0 100644 --- a/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi +++ b/rbi/openai/models/beta/threads/runs/message_creation_step_details.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -57,7 +62,12 @@ module OpenAI class MessageCreation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + OpenAI::Internal::AnyHash + ) + end # The ID of the message that was created by this run step. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/runs/run_step.rbi b/rbi/openai/models/beta/threads/runs/run_step.rbi index 9555206e..1204d5fe 100644 --- a/rbi/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step.rbi @@ -9,7 +9,12 @@ module OpenAI module Runs class RunStep < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStep, + OpenAI::Internal::AnyHash + ) + end # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } @@ -239,7 +244,12 @@ module OpenAI class LastError < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStep::LastError, + OpenAI::Internal::AnyHash + ) + end # One of `server_error` or `rate_limit_exceeded`. sig do @@ -423,7 +433,12 @@ module OpenAI class Usage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStep::Usage, + OpenAI::Internal::AnyHash + ) + end # Number of completion tokens used over the course of the run step. sig { returns(Integer) } diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi index 2f6d25e5..cbe1297d 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi @@ -9,7 +9,12 @@ module OpenAI module Runs class RunStepDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStepDelta, + OpenAI::Internal::AnyHash + ) + end # The details of the run step. sig do diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi index 75d170fe..d3a77c15 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta_event.rbi @@ -9,7 +9,12 @@ module OpenAI module Runs class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The identifier of the run step, which can be referenced in API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi index caad103a..b0304be4 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta_message_delta.rbi @@ -9,7 +9,12 @@ module OpenAI module Runs class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, + OpenAI::Internal::AnyHash + ) + end # Always `message_creation`. sig { returns(Symbol) } @@ -61,7 +66,12 @@ module OpenAI class MessageCreation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, + OpenAI::Internal::AnyHash + ) + end # The ID of the message that was created by this run step. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/openai/models/beta/threads/runs/step_list_params.rbi index 013b8ef3..618a115d 100644 --- a/rbi/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/openai/models/beta/threads/runs/step_list_params.rbi @@ -10,7 +10,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::StepListParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi b/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi index bb2bc31f..c1a7ae02 100644 --- a/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi +++ b/rbi/openai/models/beta/threads/runs/step_retrieve_params.rbi @@ -10,7 +10,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::StepRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :thread_id diff --git a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi index 09699ee5..e034b7e3 100644 --- a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::ToolCallDeltaObject, + OpenAI::Internal::AnyHash + ) + end # Always `tool_calls`. sig { returns(Symbol) } diff --git a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi index b34c48e7..61ef18e1 100644 --- a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -7,7 +7,12 @@ module OpenAI module Runs class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::Runs::ToolCallsStepDetails, + OpenAI::Internal::AnyHash + ) + end # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or diff --git a/rbi/openai/models/beta/threads/text.rbi b/rbi/openai/models/beta/threads/text.rbi index 9827491d..25110168 100644 --- a/rbi/openai/models/beta/threads/text.rbi +++ b/rbi/openai/models/beta/threads/text.rbi @@ -6,7 +6,9 @@ module OpenAI module Threads class Text < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any(OpenAI::Beta::Threads::Text, OpenAI::Internal::AnyHash) + end sig do returns( diff --git a/rbi/openai/models/beta/threads/text_content_block.rbi b/rbi/openai/models/beta/threads/text_content_block.rbi index 7474675b..81340c03 100644 --- a/rbi/openai/models/beta/threads/text_content_block.rbi +++ b/rbi/openai/models/beta/threads/text_content_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class TextContentBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::TextContentBlock, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::Beta::Threads::Text) } attr_reader :text diff --git a/rbi/openai/models/beta/threads/text_content_block_param.rbi b/rbi/openai/models/beta/threads/text_content_block_param.rbi index d96847a2..4cdfec42 100644 --- a/rbi/openai/models/beta/threads/text_content_block_param.rbi +++ b/rbi/openai/models/beta/threads/text_content_block_param.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class TextContentBlockParam < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::TextContentBlockParam, + OpenAI::Internal::AnyHash + ) + end # Text content to be sent to the model sig { returns(String) } diff --git a/rbi/openai/models/beta/threads/text_delta.rbi b/rbi/openai/models/beta/threads/text_delta.rbi index cd39960e..546b0523 100644 --- a/rbi/openai/models/beta/threads/text_delta.rbi +++ b/rbi/openai/models/beta/threads/text_delta.rbi @@ -6,7 +6,9 @@ module OpenAI module Threads class TextDelta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any(OpenAI::Beta::Threads::TextDelta, OpenAI::Internal::AnyHash) + end sig do returns( diff --git a/rbi/openai/models/beta/threads/text_delta_block.rbi b/rbi/openai/models/beta/threads/text_delta_block.rbi index 7de27ada..1d6a884a 100644 --- a/rbi/openai/models/beta/threads/text_delta_block.rbi +++ b/rbi/openai/models/beta/threads/text_delta_block.rbi @@ -6,7 +6,12 @@ module OpenAI module Threads class TextDeltaBlock < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Beta::Threads::TextDeltaBlock, + OpenAI::Internal::AnyHash + ) + end # The index of the content part in the message. sig { returns(Integer) } diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index 1e90aecc..c2353966 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -6,7 +6,10 @@ module OpenAI module Chat class ChatCompletion < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Chat::ChatCompletion, OpenAI::Internal::AnyHash) + end # A unique identifier for the chat completion. sig { returns(String) } @@ -147,7 +150,12 @@ module OpenAI class Choice < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletion::Choice, + OpenAI::Internal::AnyHash + ) + end # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -292,7 +300,12 @@ module OpenAI class Logprobs < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletion::Choice::Logprobs, + OpenAI::Internal::AnyHash + ) + end # A list of message content tokens with log probability information. sig do diff --git a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi index b56149a1..2e10edd1 100644 --- a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -7,7 +7,13 @@ module OpenAI module Chat class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionAssistantMessageParam, + OpenAI::Internal::AnyHash + ) + end # The role of the messages author, in this case `assistant`. sig { returns(Symbol) } @@ -187,7 +193,12 @@ module OpenAI class Audio < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for a previous audio response from the model. sig { returns(String) } @@ -271,7 +282,12 @@ module OpenAI class FunctionCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, + OpenAI::Internal::AnyHash + ) + end # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may diff --git a/rbi/openai/models/chat/chat_completion_audio.rbi b/rbi/openai/models/chat/chat_completion_audio.rbi index 34c22c71..59c400f9 100644 --- a/rbi/openai/models/chat/chat_completion_audio.rbi +++ b/rbi/openai/models/chat/chat_completion_audio.rbi @@ -6,7 +6,10 @@ module OpenAI module Chat class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Chat::ChatCompletionAudio, OpenAI::Internal::AnyHash) + end # Unique identifier for this audio response. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi index 03887e62..49a3dd9d 100644 --- a/rbi/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/openai/models/chat/chat_completion_audio_param.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionAudioParam, + OpenAI::Internal::AnyHash + ) + end # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index c495ed49..9d06ce7f 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -6,7 +6,10 @@ module OpenAI module Chat class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Chat::ChatCompletionChunk, OpenAI::Internal::AnyHash) + end # A unique identifier for the chat completion. Each chunk has the same ID. sig { returns(String) } @@ -167,7 +170,12 @@ module OpenAI class Choice < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionChunk::Choice, + OpenAI::Internal::AnyHash + ) + end # A chat completion delta generated by streamed model responses. sig { returns(OpenAI::Chat::ChatCompletionChunk::Choice::Delta) } @@ -267,7 +275,12 @@ module OpenAI class Delta < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta, + OpenAI::Internal::AnyHash + ) + end # The contents of the chunk message. sig { returns(T.nilable(String)) } @@ -385,7 +398,12 @@ module OpenAI class FunctionCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + OpenAI::Internal::AnyHash + ) + end # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -479,7 +497,12 @@ module OpenAI class ToolCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall, + OpenAI::Internal::AnyHash + ) + end sig { returns(Integer) } attr_accessor :index @@ -563,7 +586,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + OpenAI::Internal::AnyHash + ) + end # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may @@ -692,7 +720,12 @@ module OpenAI class Logprobs < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, + OpenAI::Internal::AnyHash + ) + end # A list of message content tokens with log probability information. sig do diff --git a/rbi/openai/models/chat/chat_completion_content_part.rbi b/rbi/openai/models/chat/chat_completion_content_part.rbi index dbe8d522..33230ab9 100644 --- a/rbi/openai/models/chat/chat_completion_content_part.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part.rbi @@ -22,7 +22,12 @@ module OpenAI class File < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPart::File, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::Chat::ChatCompletionContentPart::File::File) } attr_reader :file @@ -66,7 +71,12 @@ module OpenAI class File < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPart::File::File, + OpenAI::Internal::AnyHash + ) + end # The base64 encoded file data, used when passing the file to the model as a # string. diff --git a/rbi/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/openai/models/chat/chat_completion_content_part_image.rbi index 10b1a355..5c2ff978 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_image.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartImage, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::Chat::ChatCompletionContentPartImage::ImageURL) } attr_reader :image_url @@ -51,7 +57,12 @@ module OpenAI class ImageURL < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartImage::ImageURL, + OpenAI::Internal::AnyHash + ) + end # Either a URL of the image or the base64 encoded image data. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi index f1f1270c..addd6b5b 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -7,7 +7,13 @@ module OpenAI module Chat class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartInputAudio, + OpenAI::Internal::AnyHash + ) + end sig do returns(OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio) @@ -55,7 +61,12 @@ module OpenAI class InputAudio < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio, + OpenAI::Internal::AnyHash + ) + end # Base64 encoded audio data. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi index 73724c93..8e6e0c55 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_refusal.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartRefusal, + OpenAI::Internal::AnyHash + ) + end # The refusal message generated by the model. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_content_part_text.rbi b/rbi/openai/models/chat/chat_completion_content_part_text.rbi index 70aea8ec..c11d4932 100644 --- a/rbi/openai/models/chat/chat_completion_content_part_text.rbi +++ b/rbi/openai/models/chat/chat_completion_content_part_text.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Internal::AnyHash + ) + end # The text content. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_deleted.rbi b/rbi/openai/models/chat/chat_completion_deleted.rbi index f63fdc1d..9edfdd85 100644 --- a/rbi/openai/models/chat/chat_completion_deleted.rbi +++ b/rbi/openai/models/chat/chat_completion_deleted.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionDeleted, + OpenAI::Internal::AnyHash + ) + end # The ID of the chat completion that was deleted. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi index 956aaabe..8326b4e7 100644 --- a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi @@ -7,7 +7,13 @@ module OpenAI module Chat class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionDeveloperMessageParam, + OpenAI::Internal::AnyHash + ) + end # The contents of the developer message. sig do diff --git a/rbi/openai/models/chat/chat_completion_function_call_option.rbi b/rbi/openai/models/chat/chat_completion_function_call_option.rbi index 51a20194..bcccb328 100644 --- a/rbi/openai/models/chat/chat_completion_function_call_option.rbi +++ b/rbi/openai/models/chat/chat_completion_function_call_option.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionFunctionCallOption, + OpenAI::Internal::AnyHash + ) + end # The name of the function to call. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_function_message_param.rbi b/rbi/openai/models/chat/chat_completion_function_message_param.rbi index d5029286..3b7e878a 100644 --- a/rbi/openai/models/chat/chat_completion_function_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_function_message_param.rbi @@ -7,7 +7,13 @@ module OpenAI module Chat class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionFunctionMessageParam, + OpenAI::Internal::AnyHash + ) + end # The contents of the function message. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/chat/chat_completion_message.rbi b/rbi/openai/models/chat/chat_completion_message.rbi index a7166480..85e74838 100644 --- a/rbi/openai/models/chat/chat_completion_message.rbi +++ b/rbi/openai/models/chat/chat_completion_message.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessage, + OpenAI::Internal::AnyHash + ) + end # The contents of the message. sig { returns(T.nilable(String)) } @@ -137,7 +143,12 @@ module OpenAI class Annotation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessage::Annotation, + OpenAI::Internal::AnyHash + ) + end # The type of the URL citation. Always `url_citation`. sig { returns(Symbol) } @@ -189,7 +200,12 @@ module OpenAI class URLCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation, + OpenAI::Internal::AnyHash + ) + end # The index of the last character of the URL citation in the message. sig { returns(Integer) } @@ -245,7 +261,12 @@ module OpenAI class FunctionCall < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessage::FunctionCall, + OpenAI::Internal::AnyHash + ) + end # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may diff --git a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi index 98f7b380..0e512346 100644 --- a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessageToolCall, + OpenAI::Internal::AnyHash + ) + end # The ID of the tool call. sig { returns(String) } @@ -60,7 +66,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessageToolCall::Function, + OpenAI::Internal::AnyHash + ) + end # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may diff --git a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi index 42d4f7b5..daed342c 100644 --- a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionNamedToolChoice, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::Chat::ChatCompletionNamedToolChoice::Function) } attr_reader :function @@ -52,7 +58,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionNamedToolChoice::Function, + OpenAI::Internal::AnyHash + ) + end # The name of the function to call. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/openai/models/chat/chat_completion_prediction_content.rbi index fa1a0fba..3e8b7a9c 100644 --- a/rbi/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/openai/models/chat/chat_completion_prediction_content.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionPredictionContent, + OpenAI::Internal::AnyHash + ) + end # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be diff --git a/rbi/openai/models/chat/chat_completion_store_message.rbi b/rbi/openai/models/chat/chat_completion_store_message.rbi index 67c86275..c19850a1 100644 --- a/rbi/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/openai/models/chat/chat_completion_store_message.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionStoreMessage, + OpenAI::Internal::AnyHash + ) + end # The identifier of the chat message. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_stream_options.rbi b/rbi/openai/models/chat/chat_completion_stream_options.rbi index 4ac025c0..7061b1ff 100644 --- a/rbi/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/openai/models/chat/chat_completion_stream_options.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionStreamOptions, + OpenAI::Internal::AnyHash + ) + end # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire diff --git a/rbi/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/openai/models/chat/chat_completion_system_message_param.rbi index 099a437a..bd7d25b4 100644 --- a/rbi/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_system_message_param.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionSystemMessageParam, + OpenAI::Internal::AnyHash + ) + end # The contents of the system message. sig do diff --git a/rbi/openai/models/chat/chat_completion_token_logprob.rbi b/rbi/openai/models/chat/chat_completion_token_logprob.rbi index e38eaadc..0cca0836 100644 --- a/rbi/openai/models/chat/chat_completion_token_logprob.rbi +++ b/rbi/openai/models/chat/chat_completion_token_logprob.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionTokenLogprob, + OpenAI::Internal::AnyHash + ) + end # The token. sig { returns(String) } @@ -81,7 +87,12 @@ module OpenAI class TopLogprob < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob, + OpenAI::Internal::AnyHash + ) + end # The token. sig { returns(String) } diff --git a/rbi/openai/models/chat/chat_completion_tool.rbi b/rbi/openai/models/chat/chat_completion_tool.rbi index 3ea290d0..0fcacb06 100644 --- a/rbi/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/openai/models/chat/chat_completion_tool.rbi @@ -6,7 +6,10 @@ module OpenAI module Chat class ChatCompletionTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash) + end sig { returns(OpenAI::FunctionDefinition) } attr_reader :function diff --git a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi index 67ab6bc6..6aab530f 100644 --- a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionToolMessageParam, + OpenAI::Internal::AnyHash + ) + end # The contents of the tool message. sig do diff --git a/rbi/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/openai/models/chat/chat_completion_user_message_param.rbi index 2afec258..d53e5739 100644 --- a/rbi/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_user_message_param.rbi @@ -6,7 +6,13 @@ module OpenAI module Chat class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionUserMessageParam, + OpenAI::Internal::AnyHash + ) + end # The contents of the user message. sig do diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 9e67e729..a0f4b474 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams, + OpenAI::Internal::AnyHash + ) + end # A list of messages comprising the conversation so far. Depending on the # [model](https://platform.openai.com/docs/models) you use, different message @@ -837,7 +843,12 @@ module OpenAI class Function < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::Function, + OpenAI::Internal::AnyHash + ) + end # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. @@ -1046,7 +1057,12 @@ module OpenAI class WebSearchOptions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions, + OpenAI::Internal::AnyHash + ) + end # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -1167,7 +1183,12 @@ module OpenAI class UserLocation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, + OpenAI::Internal::AnyHash + ) + end # Approximate location parameters for the search. sig do @@ -1219,7 +1240,12 @@ module OpenAI class Approximate < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + OpenAI::Internal::AnyHash + ) + end # Free text input for the city of the user, e.g. `San Francisco`. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/chat/completion_delete_params.rbi b/rbi/openai/models/chat/completion_delete_params.rbi index b960e8e9..1ab23698 100644 --- a/rbi/openai/models/chat/completion_delete_params.rbi +++ b/rbi/openai/models/chat/completion_delete_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::CompletionDeleteParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/chat/completion_list_params.rbi b/rbi/openai/models/chat/completion_list_params.rbi index cf9b4c89..cb92d858 100644 --- a/rbi/openai/models/chat/completion_list_params.rbi +++ b/rbi/openai/models/chat/completion_list_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Chat::CompletionListParams, OpenAI::Internal::AnyHash) + end # Identifier for the last chat completion from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/chat/completion_retrieve_params.rbi b/rbi/openai/models/chat/completion_retrieve_params.rbi index 94561ed0..b1ece45b 100644 --- a/rbi/openai/models/chat/completion_retrieve_params.rbi +++ b/rbi/openai/models/chat/completion_retrieve_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::CompletionRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/chat/completion_update_params.rbi b/rbi/openai/models/chat/completion_update_params.rbi index 0055be95..7897528d 100644 --- a/rbi/openai/models/chat/completion_update_params.rbi +++ b/rbi/openai/models/chat/completion_update_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::CompletionUpdateParams, + OpenAI::Internal::AnyHash + ) + end # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/rbi/openai/models/chat/completions/message_list_params.rbi b/rbi/openai/models/chat/completions/message_list_params.rbi index f61f0565..2f78c626 100644 --- a/rbi/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/openai/models/chat/completions/message_list_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Chat::Completions::MessageListParams, + OpenAI::Internal::AnyHash + ) + end # Identifier for the last message from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/comparison_filter.rbi b/rbi/openai/models/comparison_filter.rbi index 3030e3c5..c18ff737 100644 --- a/rbi/openai/models/comparison_filter.rbi +++ b/rbi/openai/models/comparison_filter.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ComparisonFilter < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ComparisonFilter, OpenAI::Internal::AnyHash) + end # The key to compare against the value. sig { returns(String) } diff --git a/rbi/openai/models/completion.rbi b/rbi/openai/models/completion.rbi index 9eab71d9..b82f5786 100644 --- a/rbi/openai/models/completion.rbi +++ b/rbi/openai/models/completion.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class Completion < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::Completion, OpenAI::Internal::AnyHash) } # A unique identifier for the completion. sig { returns(String) } diff --git a/rbi/openai/models/completion_choice.rbi b/rbi/openai/models/completion_choice.rbi index 17b28983..25f173ec 100644 --- a/rbi/openai/models/completion_choice.rbi +++ b/rbi/openai/models/completion_choice.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class CompletionChoice < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::CompletionChoice, OpenAI::Internal::AnyHash) + end # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -92,7 +95,10 @@ module OpenAI end class Logprobs < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::CompletionChoice::Logprobs, OpenAI::Internal::AnyHash) + end sig { returns(T.nilable(T::Array[Integer])) } attr_reader :text_offset diff --git a/rbi/openai/models/completion_create_params.rbi b/rbi/openai/models/completion_create_params.rbi index efe5336e..4f9c3b9c 100644 --- a/rbi/openai/models/completion_create_params.rbi +++ b/rbi/openai/models/completion_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::CompletionCreateParams, OpenAI::Internal::AnyHash) + end # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to diff --git a/rbi/openai/models/completion_usage.rbi b/rbi/openai/models/completion_usage.rbi index 4e3dbbf3..f64ef423 100644 --- a/rbi/openai/models/completion_usage.rbi +++ b/rbi/openai/models/completion_usage.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class CompletionUsage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::CompletionUsage, OpenAI::Internal::AnyHash) + end # Number of tokens in the generated completion. sig { returns(Integer) } @@ -85,7 +88,13 @@ module OpenAI end class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::CompletionUsage::CompletionTokensDetails, + OpenAI::Internal::AnyHash + ) + end # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. @@ -159,7 +168,13 @@ module OpenAI end class PromptTokensDetails < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::CompletionUsage::PromptTokensDetails, + OpenAI::Internal::AnyHash + ) + end # Audio input tokens present in the prompt. sig { returns(T.nilable(Integer)) } diff --git a/rbi/openai/models/compound_filter.rbi b/rbi/openai/models/compound_filter.rbi index b8972c6d..99097583 100644 --- a/rbi/openai/models/compound_filter.rbi +++ b/rbi/openai/models/compound_filter.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class CompoundFilter < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::CompoundFilter, OpenAI::Internal::AnyHash) + end # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. diff --git a/rbi/openai/models/create_embedding_response.rbi b/rbi/openai/models/create_embedding_response.rbi index cf00c902..3228933e 100644 --- a/rbi/openai/models/create_embedding_response.rbi +++ b/rbi/openai/models/create_embedding_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::CreateEmbeddingResponse, OpenAI::Internal::AnyHash) + end # The list of embeddings generated by the model. sig { returns(T::Array[OpenAI::Embedding]) } @@ -58,7 +61,13 @@ module OpenAI end class Usage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::CreateEmbeddingResponse::Usage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens used by the prompt. sig { returns(Integer) } diff --git a/rbi/openai/models/embedding.rbi b/rbi/openai/models/embedding.rbi index aa3abb7e..173bc91d 100644 --- a/rbi/openai/models/embedding.rbi +++ b/rbi/openai/models/embedding.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class Embedding < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::Embedding, OpenAI::Internal::AnyHash) } # The embedding vector, which is a list of floats. The length of vector depends on # the model as listed in the diff --git a/rbi/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi index a4452287..dd303b0d 100644 --- a/rbi/openai/models/embedding_create_params.rbi +++ b/rbi/openai/models/embedding_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EmbeddingCreateParams, OpenAI::Internal::AnyHash) + end # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. diff --git a/rbi/openai/models/error_object.rbi b/rbi/openai/models/error_object.rbi index 5f923ecc..8cc3cf23 100644 --- a/rbi/openai/models/error_object.rbi +++ b/rbi/openai/models/error_object.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class ErrorObject < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::ErrorObject, OpenAI::Internal::AnyHash) } sig { returns(T.nilable(String)) } attr_accessor :code diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index b3673227..baafa832 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EvalCreateParams, OpenAI::Internal::AnyHash) + end # The configuration for the data source used for the evaluation runs. sig do @@ -132,7 +135,12 @@ module OpenAI class Custom < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::Internal::AnyHash + ) + end # The json schema for each row in the data source. sig { returns(T::Hash[Symbol, T.anything]) } @@ -189,7 +197,12 @@ module OpenAI class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions, + OpenAI::Internal::AnyHash + ) + end # The type of data source. Always `stored_completions`. sig { returns(Symbol) } @@ -254,7 +267,12 @@ module OpenAI class LabelModel < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -361,7 +379,12 @@ module OpenAI class SimpleInputMessage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, + OpenAI::Internal::AnyHash + ) + end # The content of the message. sig { returns(String) } @@ -389,7 +412,12 @@ module OpenAI class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem, + OpenAI::Internal::AnyHash + ) + end # Text inputs to the model - can contain template strings. sig do @@ -495,7 +523,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -625,7 +656,12 @@ module OpenAI class TextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(Float) } @@ -646,7 +682,12 @@ module OpenAI class Python < OpenAI::Models::Graders::PythonGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::Python, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -670,7 +711,12 @@ module OpenAI class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::ScoreModel, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index 2928d3b9..fd3d328d 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class EvalCreateResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Models::EvalCreateResponse, OpenAI::Internal::AnyHash) + end # Unique identifier for the evaluation. sig { returns(String) } @@ -180,7 +183,12 @@ module OpenAI class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(Float) } @@ -201,7 +209,12 @@ module OpenAI class EvalGraderPython < OpenAI::Models::Graders::PythonGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -225,7 +238,12 @@ module OpenAI class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } diff --git a/rbi/openai/models/eval_custom_data_source_config.rbi b/rbi/openai/models/eval_custom_data_source_config.rbi index 6725b324..e00ae9a0 100644 --- a/rbi/openai/models/eval_custom_data_source_config.rbi +++ b/rbi/openai/models/eval_custom_data_source_config.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::Internal::AnyHash) + end # The json schema for the run data source items. Learn how to build JSON schemas # [here](https://json-schema.org/). diff --git a/rbi/openai/models/eval_delete_params.rbi b/rbi/openai/models/eval_delete_params.rbi index e5ead349..40b7ec64 100644 --- a/rbi/openai/models/eval_delete_params.rbi +++ b/rbi/openai/models/eval_delete_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EvalDeleteParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/eval_delete_response.rbi b/rbi/openai/models/eval_delete_response.rbi index 5ed757c3..e324447f 100644 --- a/rbi/openai/models/eval_delete_response.rbi +++ b/rbi/openai/models/eval_delete_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Models::EvalDeleteResponse, OpenAI::Internal::AnyHash) + end sig { returns(T::Boolean) } attr_accessor :deleted diff --git a/rbi/openai/models/eval_list_params.rbi b/rbi/openai/models/eval_list_params.rbi index 5b1a38f4..4b125b67 100644 --- a/rbi/openai/models/eval_list_params.rbi +++ b/rbi/openai/models/eval_list_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EvalListParams, OpenAI::Internal::AnyHash) + end # Identifier for the last eval from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index 5b20006b..32fb803c 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class EvalListResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Models::EvalListResponse, OpenAI::Internal::AnyHash) + end # Unique identifier for the evaluation. sig { returns(String) } @@ -180,7 +183,12 @@ module OpenAI class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(Float) } @@ -201,7 +209,12 @@ module OpenAI class EvalGraderPython < OpenAI::Models::Graders::PythonGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -225,7 +238,12 @@ module OpenAI class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } diff --git a/rbi/openai/models/eval_retrieve_params.rbi b/rbi/openai/models/eval_retrieve_params.rbi index c75e1ad6..f8fd0c57 100644 --- a/rbi/openai/models/eval_retrieve_params.rbi +++ b/rbi/openai/models/eval_retrieve_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EvalRetrieveParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index cdeeaf69..65635306 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Models::EvalRetrieveResponse, OpenAI::Internal::AnyHash) + end # Unique identifier for the evaluation. sig { returns(String) } @@ -180,7 +183,12 @@ module OpenAI class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(Float) } @@ -201,7 +209,12 @@ module OpenAI class EvalGraderPython < OpenAI::Models::Graders::PythonGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -225,7 +238,12 @@ module OpenAI class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } diff --git a/rbi/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/openai/models/eval_stored_completions_data_source_config.rbi index 99f736f1..0658e8f4 100644 --- a/rbi/openai/models/eval_stored_completions_data_source_config.rbi +++ b/rbi/openai/models/eval_stored_completions_data_source_config.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::EvalStoredCompletionsDataSourceConfig, + OpenAI::Internal::AnyHash + ) + end # The json schema for the run data source items. Learn how to build JSON schemas # [here](https://json-schema.org/). diff --git a/rbi/openai/models/eval_update_params.rbi b/rbi/openai/models/eval_update_params.rbi index a5bbc2a7..e3e760e6 100644 --- a/rbi/openai/models/eval_update_params.rbi +++ b/rbi/openai/models/eval_update_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::EvalUpdateParams, OpenAI::Internal::AnyHash) + end # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index a8c27dd8..c3cefaf3 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Models::EvalUpdateResponse, OpenAI::Internal::AnyHash) + end # Unique identifier for the evaluation. sig { returns(String) } @@ -180,7 +183,12 @@ module OpenAI class EvalGraderTextSimilarity < OpenAI::Models::Graders::TextSimilarityGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(Float) } @@ -201,7 +209,12 @@ module OpenAI class EvalGraderPython < OpenAI::Models::Graders::PythonGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } @@ -225,7 +238,12 @@ module OpenAI class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel, + OpenAI::Internal::AnyHash + ) + end # The threshold for the score. sig { returns(T.nilable(Float)) } diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index c3f2299e..7dc975b1 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource, + OpenAI::Internal::AnyHash + ) + end # A StoredCompletionsRunDataSource configuration describing a set of filters sig do @@ -146,7 +152,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -195,7 +206,12 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -230,7 +246,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } @@ -256,7 +277,12 @@ module OpenAI class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions, + OpenAI::Internal::AnyHash + ) + end # The type of source. Always `stored_completions`. sig { returns(Symbol) } @@ -389,7 +415,12 @@ module OpenAI class Template < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -465,7 +496,12 @@ module OpenAI class Message < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message, + OpenAI::Internal::AnyHash + ) + end # Text inputs to the model - can contain template strings. sig do @@ -571,7 +607,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -703,7 +742,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } @@ -744,7 +788,12 @@ module OpenAI class SamplingParams < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + end # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } diff --git a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi index e9715934..f27424f1 100644 --- a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -67,7 +73,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -116,7 +127,12 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :item @@ -151,7 +167,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } diff --git a/rbi/openai/models/evals/eval_api_error.rbi b/rbi/openai/models/evals/eval_api_error.rbi index 153211d5..c37beff5 100644 --- a/rbi/openai/models/evals/eval_api_error.rbi +++ b/rbi/openai/models/evals/eval_api_error.rbi @@ -6,7 +6,10 @@ module OpenAI module Evals class EvalAPIError < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Evals::EvalAPIError, OpenAI::Internal::AnyHash) + end # The error code. sig { returns(String) } diff --git a/rbi/openai/models/evals/run_cancel_params.rbi b/rbi/openai/models/evals/run_cancel_params.rbi index 0dd60f39..fdef62a7 100644 --- a/rbi/openai/models/evals/run_cancel_params.rbi +++ b/rbi/openai/models/evals/run_cancel_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Evals::RunCancelParams, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :eval_id diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 923b2437..2c1f6358 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class RunCancelResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for the evaluation run. sig { returns(String) } @@ -214,7 +220,12 @@ module OpenAI class Completions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions, + OpenAI::Internal::AnyHash + ) + end # A EvalResponsesSource object describing a run data source configuration. sig do @@ -350,7 +361,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -400,7 +416,10 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) end sig { returns(T::Hash[Symbol, T.anything]) } @@ -436,7 +455,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } @@ -464,7 +488,12 @@ module OpenAI class Responses < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses, + OpenAI::Internal::AnyHash + ) + end # The type of run data source. Always `responses`. sig { returns(Symbol) } @@ -625,7 +654,12 @@ module OpenAI class Template < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -702,7 +736,10 @@ module OpenAI class ChatMessage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) end # The content of the message. @@ -734,7 +771,10 @@ module OpenAI class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) end # Text inputs to the model - can contain template strings. @@ -841,7 +881,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -973,7 +1016,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } @@ -1016,7 +1064,12 @@ module OpenAI class SamplingParams < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + end # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } @@ -1094,7 +1147,12 @@ module OpenAI class PerModelUsage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::PerModelUsage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens retrieved from cache. sig { returns(Integer) } @@ -1164,7 +1222,12 @@ module OpenAI class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult, + OpenAI::Internal::AnyHash + ) + end # Number of tests failed for this criteria. sig { returns(Integer) } @@ -1206,7 +1269,12 @@ module OpenAI class ResultCounts < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + OpenAI::Internal::AnyHash + ) + end # Number of output items that resulted in an error. sig { returns(Integer) } diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index b1f07b5d..7b1ebb3f 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Evals::RunCreateParams, OpenAI::Internal::AnyHash) + end # Details about the run's data source. sig do @@ -99,7 +102,12 @@ module OpenAI class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource, + OpenAI::Internal::AnyHash + ) + end # A EvalResponsesSource object describing a run data source configuration. sig do @@ -241,7 +249,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -291,7 +304,10 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) end sig { returns(T::Hash[Symbol, T.anything]) } @@ -327,7 +343,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } @@ -355,7 +376,12 @@ module OpenAI class Responses < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses, + OpenAI::Internal::AnyHash + ) + end # The type of run data source. Always `responses`. sig { returns(Symbol) } @@ -544,7 +570,12 @@ module OpenAI class Template < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -621,7 +652,10 @@ module OpenAI class ChatMessage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) end # The content of the message. @@ -653,7 +687,10 @@ module OpenAI class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) end # Text inputs to the model - can contain template strings. @@ -760,7 +797,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -892,7 +932,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } @@ -935,7 +980,12 @@ module OpenAI class SamplingParams < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + end # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index 6c3c64f6..d4f02bc5 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class RunCreateResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for the evaluation run. sig { returns(String) } @@ -214,7 +220,12 @@ module OpenAI class Completions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions, + OpenAI::Internal::AnyHash + ) + end # A EvalResponsesSource object describing a run data source configuration. sig do @@ -350,7 +361,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -400,7 +416,10 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) end sig { returns(T::Hash[Symbol, T.anything]) } @@ -436,7 +455,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } @@ -464,7 +488,12 @@ module OpenAI class Responses < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses, + OpenAI::Internal::AnyHash + ) + end # The type of run data source. Always `responses`. sig { returns(Symbol) } @@ -625,7 +654,12 @@ module OpenAI class Template < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -702,7 +736,10 @@ module OpenAI class ChatMessage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) end # The content of the message. @@ -734,7 +771,10 @@ module OpenAI class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) end # Text inputs to the model - can contain template strings. @@ -841,7 +881,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -973,7 +1016,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } @@ -1016,7 +1064,12 @@ module OpenAI class SamplingParams < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + end # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } @@ -1094,7 +1147,12 @@ module OpenAI class PerModelUsage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::PerModelUsage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens retrieved from cache. sig { returns(Integer) } @@ -1164,7 +1222,12 @@ module OpenAI class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult, + OpenAI::Internal::AnyHash + ) + end # Number of tests failed for this criteria. sig { returns(Integer) } @@ -1206,7 +1269,12 @@ module OpenAI class ResultCounts < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + OpenAI::Internal::AnyHash + ) + end # Number of output items that resulted in an error. sig { returns(Integer) } diff --git a/rbi/openai/models/evals/run_delete_params.rbi b/rbi/openai/models/evals/run_delete_params.rbi index 3fa9908b..e82ad163 100644 --- a/rbi/openai/models/evals/run_delete_params.rbi +++ b/rbi/openai/models/evals/run_delete_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Evals::RunDeleteParams, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :eval_id diff --git a/rbi/openai/models/evals/run_delete_response.rbi b/rbi/openai/models/evals/run_delete_response.rbi index b7556eda..552da6af 100644 --- a/rbi/openai/models/evals/run_delete_response.rbi +++ b/rbi/openai/models/evals/run_delete_response.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class RunDeleteResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunDeleteResponse, + OpenAI::Internal::AnyHash + ) + end sig { returns(T.nilable(T::Boolean)) } attr_reader :deleted diff --git a/rbi/openai/models/evals/run_list_params.rbi b/rbi/openai/models/evals/run_list_params.rbi index a442bc5d..67d654cd 100644 --- a/rbi/openai/models/evals/run_list_params.rbi +++ b/rbi/openai/models/evals/run_list_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Evals::RunListParams, OpenAI::Internal::AnyHash) + end # Identifier for the last run from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index 8bce072b..1306554c 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class RunListResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for the evaluation run. sig { returns(String) } @@ -212,7 +218,12 @@ module OpenAI class Completions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions, + OpenAI::Internal::AnyHash + ) + end # A EvalResponsesSource object describing a run data source configuration. sig do @@ -348,7 +359,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -398,7 +414,10 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) end sig { returns(T::Hash[Symbol, T.anything]) } @@ -434,7 +453,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } @@ -462,7 +486,12 @@ module OpenAI class Responses < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses, + OpenAI::Internal::AnyHash + ) + end # The type of run data source. Always `responses`. sig { returns(Symbol) } @@ -623,7 +652,12 @@ module OpenAI class Template < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -700,7 +734,10 @@ module OpenAI class ChatMessage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) end # The content of the message. @@ -732,7 +769,10 @@ module OpenAI class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) end # Text inputs to the model - can contain template strings. @@ -839,7 +879,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -971,7 +1014,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } @@ -1014,7 +1062,12 @@ module OpenAI class SamplingParams < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + end # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } @@ -1092,7 +1145,12 @@ module OpenAI class PerModelUsage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::PerModelUsage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens retrieved from cache. sig { returns(Integer) } @@ -1162,7 +1220,12 @@ module OpenAI class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult, + OpenAI::Internal::AnyHash + ) + end # Number of tests failed for this criteria. sig { returns(Integer) } @@ -1204,7 +1267,12 @@ module OpenAI class ResultCounts < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::ResultCounts, + OpenAI::Internal::AnyHash + ) + end # Number of output items that resulted in an error. sig { returns(Integer) } diff --git a/rbi/openai/models/evals/run_retrieve_params.rbi b/rbi/openai/models/evals/run_retrieve_params.rbi index ad837ffe..002b7cb6 100644 --- a/rbi/openai/models/evals/run_retrieve_params.rbi +++ b/rbi/openai/models/evals/run_retrieve_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Evals::RunRetrieveParams, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :eval_id diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index d1770d9c..eab94463 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Evals class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for the evaluation run. sig { returns(String) } @@ -216,7 +222,12 @@ module OpenAI class Completions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions, + OpenAI::Internal::AnyHash + ) + end # A EvalResponsesSource object describing a run data source configuration. sig do @@ -352,7 +363,12 @@ module OpenAI class FileContent < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end # The content of the jsonl file. sig do @@ -402,7 +418,10 @@ module OpenAI class Content < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) end sig { returns(T::Hash[Symbol, T.anything]) } @@ -438,7 +457,12 @@ module OpenAI class FileID < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, + OpenAI::Internal::AnyHash + ) + end # The identifier of the file. sig { returns(String) } @@ -466,7 +490,12 @@ module OpenAI class Responses < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses, + OpenAI::Internal::AnyHash + ) + end # The type of run data source. Always `responses`. sig { returns(Symbol) } @@ -627,7 +656,12 @@ module OpenAI class Template < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. @@ -704,7 +738,10 @@ module OpenAI class ChatMessage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) end # The content of the message. @@ -736,7 +773,10 @@ module OpenAI class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) end # Text inputs to the model - can contain template strings. @@ -843,7 +883,10 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do - T.any(T.self_type, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) end # The text output from the model. @@ -975,7 +1018,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end # A reference to a variable in the "item" namespace. Ie, "item.name" sig { returns(String) } @@ -1018,7 +1066,12 @@ module OpenAI class SamplingParams < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, + OpenAI::Internal::AnyHash + ) + end # The maximum number of tokens in the generated output. sig { returns(T.nilable(Integer)) } @@ -1096,7 +1149,12 @@ module OpenAI class PerModelUsage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens retrieved from cache. sig { returns(Integer) } @@ -1166,7 +1224,12 @@ module OpenAI class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult, + OpenAI::Internal::AnyHash + ) + end # Number of tests failed for this criteria. sig { returns(Integer) } @@ -1208,7 +1271,12 @@ module OpenAI class ResultCounts < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + OpenAI::Internal::AnyHash + ) + end # Number of output items that resulted in an error. sig { returns(Integer) } diff --git a/rbi/openai/models/evals/runs/output_item_list_params.rbi b/rbi/openai/models/evals/runs/output_item_list_params.rbi index 52a091de..c4bfe68e 100644 --- a/rbi/openai/models/evals/runs/output_item_list_params.rbi +++ b/rbi/openai/models/evals/runs/output_item_list_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::Runs::OutputItemListParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :eval_id diff --git a/rbi/openai/models/evals/runs/output_item_list_response.rbi b/rbi/openai/models/evals/runs/output_item_list_response.rbi index 6f5cd696..9a34bb28 100644 --- a/rbi/openai/models/evals/runs/output_item_list_response.rbi +++ b/rbi/openai/models/evals/runs/output_item_list_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Runs class OutputItemListResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemListResponse, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for the evaluation run output item. sig { returns(String) } @@ -120,7 +125,12 @@ module OpenAI class Sample < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + OpenAI::Internal::AnyHash + ) + end # An object representing an error response from the Eval API. sig { returns(OpenAI::Evals::EvalAPIError) } @@ -263,7 +273,12 @@ module OpenAI class Input < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input, + OpenAI::Internal::AnyHash + ) + end # The content of the message. sig { returns(String) } @@ -292,7 +307,12 @@ module OpenAI class Output < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output, + OpenAI::Internal::AnyHash + ) + end # The content of the message. sig { returns(T.nilable(String)) } @@ -326,7 +346,12 @@ module OpenAI class Usage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens retrieved from cache. sig { returns(Integer) } diff --git a/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi b/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi index 77adecd0..59e73a99 100644 --- a/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi +++ b/rbi/openai/models/evals/runs/output_item_retrieve_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Evals::Runs::OutputItemRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :eval_id diff --git a/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi b/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi index 2e7053a5..8a39ad5e 100644 --- a/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi +++ b/rbi/openai/models/evals/runs/output_item_retrieve_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Runs class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end # Unique identifier for the evaluation run output item. sig { returns(String) } @@ -122,7 +127,12 @@ module OpenAI class Sample < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + OpenAI::Internal::AnyHash + ) + end # An object representing an error response from the Eval API. sig { returns(OpenAI::Evals::EvalAPIError) } @@ -265,7 +275,12 @@ module OpenAI class Input < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input, + OpenAI::Internal::AnyHash + ) + end # The content of the message. sig { returns(String) } @@ -294,7 +309,12 @@ module OpenAI class Output < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output, + OpenAI::Internal::AnyHash + ) + end # The content of the message. sig { returns(T.nilable(String)) } @@ -328,7 +348,12 @@ module OpenAI class Usage < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage, + OpenAI::Internal::AnyHash + ) + end # The number of tokens retrieved from cache. sig { returns(Integer) } diff --git a/rbi/openai/models/file_content_params.rbi b/rbi/openai/models/file_content_params.rbi index 4790429d..4d403623 100644 --- a/rbi/openai/models/file_content_params.rbi +++ b/rbi/openai/models/file_content_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FileContentParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/file_create_params.rbi b/rbi/openai/models/file_create_params.rbi index db0074ca..d3a1c945 100644 --- a/rbi/openai/models/file_create_params.rbi +++ b/rbi/openai/models/file_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FileCreateParams, OpenAI::Internal::AnyHash) + end # The File object (not file name) to be uploaded. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } diff --git a/rbi/openai/models/file_delete_params.rbi b/rbi/openai/models/file_delete_params.rbi index 32e8c812..8c870046 100644 --- a/rbi/openai/models/file_delete_params.rbi +++ b/rbi/openai/models/file_delete_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FileDeleteParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/file_deleted.rbi b/rbi/openai/models/file_deleted.rbi index afff0670..c8a74d38 100644 --- a/rbi/openai/models/file_deleted.rbi +++ b/rbi/openai/models/file_deleted.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class FileDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::FileDeleted, OpenAI::Internal::AnyHash) } sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/models/file_list_params.rbi b/rbi/openai/models/file_list_params.rbi index 03836c50..0fb370b2 100644 --- a/rbi/openai/models/file_list_params.rbi +++ b/rbi/openai/models/file_list_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FileListParams, OpenAI::Internal::AnyHash) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/file_object.rbi b/rbi/openai/models/file_object.rbi index 829cee09..437d1b71 100644 --- a/rbi/openai/models/file_object.rbi +++ b/rbi/openai/models/file_object.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class FileObject < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::FileObject, OpenAI::Internal::AnyHash) } # The file identifier, which can be referenced in the API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/file_retrieve_params.rbi b/rbi/openai/models/file_retrieve_params.rbi index ed8a974f..f5c3c562 100644 --- a/rbi/openai/models/file_retrieve_params.rbi +++ b/rbi/openai/models/file_retrieve_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FileRetrieveParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi index b2df808e..1c5d0dbd 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Alpha::GraderRunParams, + OpenAI::Internal::AnyHash + ) + end # The grader used for the fine-tuning job. sig do diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi index 4db912c8..18e6589a 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Alpha class GraderRunResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Alpha::GraderRunResponse, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -65,7 +70,12 @@ module OpenAI class Metadata < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -142,7 +152,12 @@ module OpenAI class Errors < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, + OpenAI::Internal::AnyHash + ) + end sig { returns(T::Boolean) } attr_accessor :formula_parse_error diff --git a/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi index 461e9373..f8dc6077 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_validate_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Alpha::GraderValidateParams, + OpenAI::Internal::AnyHash + ) + end # The grader used for the fine-tuning job. sig do diff --git a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi index ca2d42db..138bf221 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Alpha class GraderValidateResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse, + OpenAI::Internal::AnyHash + ) + end # The grader used for the fine-tuning job. sig do diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi index bb260bea..26583ba9 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_create_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Checkpoints::PermissionCreateParams, + OpenAI::Internal::AnyHash + ) + end # The project identifiers to grant access to. sig { returns(T::Array[String]) } diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi index c17f2d89..356802c5 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_create_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Checkpoints class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Checkpoints::PermissionCreateResponse, + OpenAI::Internal::AnyHash + ) + end # The permission identifier, which can be referenced in the API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi index 0be64531..8051825a 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Checkpoints::PermissionDeleteParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :fine_tuned_model_checkpoint diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi index f84b6fea..b0b5d45b 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_delete_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Checkpoints class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Checkpoints::PermissionDeleteResponse, + OpenAI::Internal::AnyHash + ) + end # The ID of the fine-tuned model checkpoint permission that was deleted. sig { returns(String) } diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi index 84601eba..56996168 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams, + OpenAI::Internal::AnyHash + ) + end # Identifier for the last permission ID from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi index 3c65d481..2501be7b 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi @@ -6,7 +6,12 @@ module OpenAI module Checkpoints class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -69,7 +74,12 @@ module OpenAI class Data < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data, + OpenAI::Internal::AnyHash + ) + end # The permission identifier, which can be referenced in the API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi index d210f91b..4172bfd0 100644 --- a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module FineTuning class DpoHyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::DpoHyperparameters, + OpenAI::Internal::AnyHash + ) + end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. diff --git a/rbi/openai/models/fine_tuning/dpo_method.rbi b/rbi/openai/models/fine_tuning/dpo_method.rbi index 05f15967..86d2b505 100644 --- a/rbi/openai/models/fine_tuning/dpo_method.rbi +++ b/rbi/openai/models/fine_tuning/dpo_method.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module FineTuning class DpoMethod < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FineTuning::DpoMethod, OpenAI::Internal::AnyHash) + end # The hyperparameters used for the DPO fine-tuning job. sig { returns(T.nilable(OpenAI::FineTuning::DpoHyperparameters)) } diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index 00b007eb..21a26e15 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -6,7 +6,10 @@ module OpenAI module FineTuning class FineTuningJob < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FineTuning::FineTuningJob, OpenAI::Internal::AnyHash) + end # The object identifier, which can be referenced in the API endpoints. sig { returns(String) } @@ -255,7 +258,12 @@ module OpenAI class Error < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::FineTuningJob::Error, + OpenAI::Internal::AnyHash + ) + end # A machine-readable error code. sig { returns(String) } @@ -301,7 +309,12 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::FineTuningJob::Hyperparameters, + OpenAI::Internal::AnyHash + ) + end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -469,7 +482,12 @@ module OpenAI class Method < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::FineTuningJob::Method, + OpenAI::Internal::AnyHash + ) + end # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. sig do diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi index 4cee296b..abbbe399 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -6,7 +6,13 @@ module OpenAI module FineTuning class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::FineTuningJobEvent, + OpenAI::Internal::AnyHash + ) + end # The object identifier. sig { returns(String) } diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi index c5b719f8..a728e848 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbi @@ -6,7 +6,13 @@ module OpenAI module FineTuning class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::FineTuningJobWandbIntegration, + OpenAI::Internal::AnyHash + ) + end # The name of the project that the new run will be created under. sig { returns(String) } diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi index 934a2242..b92f3743 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbi @@ -7,7 +7,13 @@ module OpenAI module FineTuning class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::FineTuningJobWandbIntegrationObject, + OpenAI::Internal::AnyHash + ) + end # The type of the integration being enabled for the fine-tuning job sig { returns(Symbol) } diff --git a/rbi/openai/models/fine_tuning/job_cancel_params.rbi b/rbi/openai/models/fine_tuning/job_cancel_params.rbi index 92f99a73..3ae73b64 100644 --- a/rbi/openai/models/fine_tuning/job_cancel_params.rbi +++ b/rbi/openai/models/fine_tuning/job_cancel_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::JobCancelParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi index 2f9fcad2..32e49ed1 100644 --- a/rbi/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/openai/models/fine_tuning/job_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::JobCreateParams, + OpenAI::Internal::AnyHash + ) + end # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). @@ -279,7 +285,12 @@ module OpenAI class Hyperparameters < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::JobCreateParams::Hyperparameters, + OpenAI::Internal::AnyHash + ) + end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -396,7 +407,12 @@ module OpenAI class Integration < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::JobCreateParams::Integration, + OpenAI::Internal::AnyHash + ) + end # The type of integration to enable. Currently, only "wandb" (Weights and Biases) # is supported. @@ -452,7 +468,12 @@ module OpenAI class Wandb < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::JobCreateParams::Integration::Wandb, + OpenAI::Internal::AnyHash + ) + end # The name of the project that the new run will be created under. sig { returns(String) } @@ -524,7 +545,12 @@ module OpenAI class Method < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::JobCreateParams::Method, + OpenAI::Internal::AnyHash + ) + end # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. sig do diff --git a/rbi/openai/models/fine_tuning/job_list_events_params.rbi b/rbi/openai/models/fine_tuning/job_list_events_params.rbi index 60c32908..9f01086f 100644 --- a/rbi/openai/models/fine_tuning/job_list_events_params.rbi +++ b/rbi/openai/models/fine_tuning/job_list_events_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::JobListEventsParams, + OpenAI::Internal::AnyHash + ) + end # Identifier for the last event from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/fine_tuning/job_list_params.rbi b/rbi/openai/models/fine_tuning/job_list_params.rbi index 03b6a56d..96c0267e 100644 --- a/rbi/openai/models/fine_tuning/job_list_params.rbi +++ b/rbi/openai/models/fine_tuning/job_list_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FineTuning::JobListParams, OpenAI::Internal::AnyHash) + end # Identifier for the last job from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/fine_tuning/job_pause_params.rbi b/rbi/openai/models/fine_tuning/job_pause_params.rbi index 66935edf..cf0e05ac 100644 --- a/rbi/openai/models/fine_tuning/job_pause_params.rbi +++ b/rbi/openai/models/fine_tuning/job_pause_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FineTuning::JobPauseParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/fine_tuning/job_resume_params.rbi b/rbi/openai/models/fine_tuning/job_resume_params.rbi index da91536e..43fa43b3 100644 --- a/rbi/openai/models/fine_tuning/job_resume_params.rbi +++ b/rbi/openai/models/fine_tuning/job_resume_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::JobResumeParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/fine_tuning/job_retrieve_params.rbi b/rbi/openai/models/fine_tuning/job_retrieve_params.rbi index 2d5d4a41..b43b2e48 100644 --- a/rbi/openai/models/fine_tuning/job_retrieve_params.rbi +++ b/rbi/openai/models/fine_tuning/job_retrieve_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::JobRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi b/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi index 29a595b3..06c2f618 100644 --- a/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi +++ b/rbi/openai/models/fine_tuning/jobs/checkpoint_list_params.rbi @@ -9,7 +9,12 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Jobs::CheckpointListParams, + OpenAI::Internal::AnyHash + ) + end # Identifier for the last checkpoint ID from the previous pagination request. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi b/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi index 80b8eb71..ab83c2cc 100644 --- a/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi +++ b/rbi/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbi @@ -6,7 +6,12 @@ module OpenAI module Jobs class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint, + OpenAI::Internal::AnyHash + ) + end # The checkpoint identifier, which can be referenced in the API endpoints. sig { returns(String) } @@ -97,7 +102,12 @@ module OpenAI class Metrics < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + OpenAI::Internal::AnyHash + ) + end sig { returns(T.nilable(Float)) } attr_reader :full_valid_loss diff --git a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi index 1c81d0d8..3105e2ef 100644 --- a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module FineTuning class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::ReinforcementHyperparameters, + OpenAI::Internal::AnyHash + ) + end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. diff --git a/rbi/openai/models/fine_tuning/reinforcement_method.rbi b/rbi/openai/models/fine_tuning/reinforcement_method.rbi index 2ed3ae7b..f49c9b5c 100644 --- a/rbi/openai/models/fine_tuning/reinforcement_method.rbi +++ b/rbi/openai/models/fine_tuning/reinforcement_method.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module FineTuning class ReinforcementMethod < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::ReinforcementMethod, + OpenAI::Internal::AnyHash + ) + end # The grader used for the fine-tuning job. sig do diff --git a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi index bc9b8321..dc8167ab 100644 --- a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module FineTuning class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::SupervisedHyperparameters, + OpenAI::Internal::AnyHash + ) + end # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. diff --git a/rbi/openai/models/fine_tuning/supervised_method.rbi b/rbi/openai/models/fine_tuning/supervised_method.rbi index 01cf3450..8b4b000e 100644 --- a/rbi/openai/models/fine_tuning/supervised_method.rbi +++ b/rbi/openai/models/fine_tuning/supervised_method.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module FineTuning class SupervisedMethod < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::FineTuning::SupervisedMethod, + OpenAI::Internal::AnyHash + ) + end # The hyperparameters used for the fine-tuning job. sig do diff --git a/rbi/openai/models/function_definition.rbi b/rbi/openai/models/function_definition.rbi index 66649eae..1d615e84 100644 --- a/rbi/openai/models/function_definition.rbi +++ b/rbi/openai/models/function_definition.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class FunctionDefinition < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::FunctionDefinition, OpenAI::Internal::AnyHash) + end # The name of the function to be called. Must be a-z, A-Z, 0-9, or contain # underscores and dashes, with a maximum length of 64. diff --git a/rbi/openai/models/graders/label_model_grader.rbi b/rbi/openai/models/graders/label_model_grader.rbi index 7ed4f4e7..9d062b87 100644 --- a/rbi/openai/models/graders/label_model_grader.rbi +++ b/rbi/openai/models/graders/label_model_grader.rbi @@ -6,7 +6,10 @@ module OpenAI module Graders class LabelModelGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Graders::LabelModelGrader, OpenAI::Internal::AnyHash) + end sig { returns(T::Array[OpenAI::Graders::LabelModelGrader::Input]) } attr_accessor :input @@ -75,7 +78,12 @@ module OpenAI class Input < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input, + OpenAI::Internal::AnyHash + ) + end # Text inputs to the model - can contain template strings. sig do @@ -173,7 +181,12 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end # The text output from the model. sig { returns(String) } diff --git a/rbi/openai/models/graders/multi_grader.rbi b/rbi/openai/models/graders/multi_grader.rbi index 73159cd7..bed184f8 100644 --- a/rbi/openai/models/graders/multi_grader.rbi +++ b/rbi/openai/models/graders/multi_grader.rbi @@ -6,7 +6,10 @@ module OpenAI module Graders class MultiGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Graders::MultiGrader, OpenAI::Internal::AnyHash) + end # A formula to calculate the output based on grader results. sig { returns(String) } diff --git a/rbi/openai/models/graders/python_grader.rbi b/rbi/openai/models/graders/python_grader.rbi index 35441d1b..ed504598 100644 --- a/rbi/openai/models/graders/python_grader.rbi +++ b/rbi/openai/models/graders/python_grader.rbi @@ -6,7 +6,10 @@ module OpenAI module Graders class PythonGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Graders::PythonGrader, OpenAI::Internal::AnyHash) + end # The name of the grader. sig { returns(String) } diff --git a/rbi/openai/models/graders/score_model_grader.rbi b/rbi/openai/models/graders/score_model_grader.rbi index 1e082a6e..ab602eda 100644 --- a/rbi/openai/models/graders/score_model_grader.rbi +++ b/rbi/openai/models/graders/score_model_grader.rbi @@ -6,7 +6,10 @@ module OpenAI module Graders class ScoreModelGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Graders::ScoreModelGrader, OpenAI::Internal::AnyHash) + end # The input text. This may include template strings. sig { returns(T::Array[OpenAI::Graders::ScoreModelGrader::Input]) } @@ -82,7 +85,12 @@ module OpenAI class Input < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input, + OpenAI::Internal::AnyHash + ) + end # Text inputs to the model - can contain template strings. sig do @@ -180,7 +188,12 @@ module OpenAI class OutputText < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end # The text output from the model. sig { returns(String) } diff --git a/rbi/openai/models/graders/string_check_grader.rbi b/rbi/openai/models/graders/string_check_grader.rbi index 73de12d2..f46aca63 100644 --- a/rbi/openai/models/graders/string_check_grader.rbi +++ b/rbi/openai/models/graders/string_check_grader.rbi @@ -6,7 +6,10 @@ module OpenAI module Graders class StringCheckGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Graders::StringCheckGrader, OpenAI::Internal::AnyHash) + end # The input text. This may include template strings. sig { returns(String) } diff --git a/rbi/openai/models/graders/text_similarity_grader.rbi b/rbi/openai/models/graders/text_similarity_grader.rbi index 6d25d1e0..687e2f93 100644 --- a/rbi/openai/models/graders/text_similarity_grader.rbi +++ b/rbi/openai/models/graders/text_similarity_grader.rbi @@ -6,7 +6,13 @@ module OpenAI module Graders class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Internal::AnyHash + ) + end # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. diff --git a/rbi/openai/models/image.rbi b/rbi/openai/models/image.rbi index 867722c0..6c01cf82 100644 --- a/rbi/openai/models/image.rbi +++ b/rbi/openai/models/image.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = T.type_alias { T.any(OpenAI::Image, OpenAI::Internal::AnyHash) } # The base64-encoded JSON of the generated image. Default value for `gpt-image-1`, # and only present if `response_format` is set to `b64_json` for `dall-e-2` and diff --git a/rbi/openai/models/image_create_variation_params.rbi b/rbi/openai/models/image_create_variation_params.rbi index 9c31879a..a6be588e 100644 --- a/rbi/openai/models/image_create_variation_params.rbi +++ b/rbi/openai/models/image_create_variation_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ImageCreateVariationParams, OpenAI::Internal::AnyHash) + end # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 6fd55690..7225f73f 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ImageEditParams, OpenAI::Internal::AnyHash) + end # The image(s) to edit. Must be a supported image file or an array of images. # diff --git a/rbi/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi index 72894e72..c81dbbcf 100644 --- a/rbi/openai/models/image_generate_params.rbi +++ b/rbi/openai/models/image_generate_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ImageGenerateParams, OpenAI::Internal::AnyHash) + end # A text description of the desired image(s). The maximum length is 32000 # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters diff --git a/rbi/openai/models/images_response.rbi b/rbi/openai/models/images_response.rbi index 86766493..b9d60ee3 100644 --- a/rbi/openai/models/images_response.rbi +++ b/rbi/openai/models/images_response.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ImagesResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ImagesResponse, OpenAI::Internal::AnyHash) + end # The Unix timestamp (in seconds) of when the image was created. sig { returns(Integer) } @@ -54,7 +57,10 @@ module OpenAI end class Usage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ImagesResponse::Usage, OpenAI::Internal::AnyHash) + end # The number of tokens (images and text) in the input prompt. sig { returns(Integer) } @@ -118,7 +124,12 @@ module OpenAI class InputTokensDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::ImagesResponse::Usage::InputTokensDetails, + OpenAI::Internal::AnyHash + ) + end # The number of image tokens in the input prompt. sig { returns(Integer) } diff --git a/rbi/openai/models/model.rbi b/rbi/openai/models/model.rbi index 3e2886a8..e84f4d72 100644 --- a/rbi/openai/models/model.rbi +++ b/rbi/openai/models/model.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class Model < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = T.type_alias { T.any(OpenAI::Model, OpenAI::Internal::AnyHash) } # The model identifier, which can be referenced in the API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/model_delete_params.rbi b/rbi/openai/models/model_delete_params.rbi index 34aa523d..50bdedfa 100644 --- a/rbi/openai/models/model_delete_params.rbi +++ b/rbi/openai/models/model_delete_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ModelDeleteParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/model_deleted.rbi b/rbi/openai/models/model_deleted.rbi index 61571ba3..611d54d9 100644 --- a/rbi/openai/models/model_deleted.rbi +++ b/rbi/openai/models/model_deleted.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class ModelDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::ModelDeleted, OpenAI::Internal::AnyHash) } sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/models/model_list_params.rbi b/rbi/openai/models/model_list_params.rbi index 6b5c0602..c07911ee 100644 --- a/rbi/openai/models/model_list_params.rbi +++ b/rbi/openai/models/model_list_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ModelListParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/model_retrieve_params.rbi b/rbi/openai/models/model_retrieve_params.rbi index 69b847c8..9d8d4c05 100644 --- a/rbi/openai/models/model_retrieve_params.rbi +++ b/rbi/openai/models/model_retrieve_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ModelRetrieveParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/moderation.rbi b/rbi/openai/models/moderation.rbi index d069ca8a..b6e45d8b 100644 --- a/rbi/openai/models/moderation.rbi +++ b/rbi/openai/models/moderation.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class Moderation < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::Moderation, OpenAI::Internal::AnyHash) } # A list of the categories, and whether they are flagged or not. sig { returns(OpenAI::Moderation::Categories) } @@ -73,7 +74,10 @@ module OpenAI end class Categories < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Moderation::Categories, OpenAI::Internal::AnyHash) + end # Content that expresses, incites, or promotes harassing language towards any # target. @@ -233,7 +237,13 @@ module OpenAI end class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Moderation::CategoryAppliedInputTypes, + OpenAI::Internal::AnyHash + ) + end # The applied input type(s) for the category 'harassment'. sig do @@ -919,7 +929,10 @@ module OpenAI end class CategoryScores < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Moderation::CategoryScores, OpenAI::Internal::AnyHash) + end # The score for the category 'harassment'. sig { returns(Float) } diff --git a/rbi/openai/models/moderation_create_params.rbi b/rbi/openai/models/moderation_create_params.rbi index 7644e955..1f2a76f8 100644 --- a/rbi/openai/models/moderation_create_params.rbi +++ b/rbi/openai/models/moderation_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ModerationCreateParams, OpenAI::Internal::AnyHash) + end # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. diff --git a/rbi/openai/models/moderation_create_response.rbi b/rbi/openai/models/moderation_create_response.rbi index 0da4c5b3..bdf51d1c 100644 --- a/rbi/openai/models/moderation_create_response.rbi +++ b/rbi/openai/models/moderation_create_response.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ModerationCreateResponse, + OpenAI::Internal::AnyHash + ) + end # The unique identifier for the moderation request. sig { returns(String) } diff --git a/rbi/openai/models/moderation_image_url_input.rbi b/rbi/openai/models/moderation_image_url_input.rbi index df96e516..4867a348 100644 --- a/rbi/openai/models/moderation_image_url_input.rbi +++ b/rbi/openai/models/moderation_image_url_input.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ModerationImageURLInput, OpenAI::Internal::AnyHash) + end # Contains either an image URL or a data URL for a base64 encoded image. sig { returns(OpenAI::ModerationImageURLInput::ImageURL) } @@ -44,7 +47,13 @@ module OpenAI end class ImageURL < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::ModerationImageURLInput::ImageURL, + OpenAI::Internal::AnyHash + ) + end # Either a URL of the image or the base64 encoded image data. sig { returns(String) } diff --git a/rbi/openai/models/moderation_text_input.rbi b/rbi/openai/models/moderation_text_input.rbi index 98b5925e..7c08a607 100644 --- a/rbi/openai/models/moderation_text_input.rbi +++ b/rbi/openai/models/moderation_text_input.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ModerationTextInput < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ModerationTextInput, OpenAI::Internal::AnyHash) + end # A string of text to classify. sig { returns(String) } diff --git a/rbi/openai/models/other_file_chunking_strategy_object.rbi b/rbi/openai/models/other_file_chunking_strategy_object.rbi index d7ae19bd..744dd2d8 100644 --- a/rbi/openai/models/other_file_chunking_strategy_object.rbi +++ b/rbi/openai/models/other_file_chunking_strategy_object.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::OtherFileChunkingStrategyObject, + OpenAI::Internal::AnyHash + ) + end # Always `other`. sig { returns(Symbol) } diff --git a/rbi/openai/models/reasoning.rbi b/rbi/openai/models/reasoning.rbi index c11df8b1..dc89cb60 100644 --- a/rbi/openai/models/reasoning.rbi +++ b/rbi/openai/models/reasoning.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class Reasoning < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::Reasoning, OpenAI::Internal::AnyHash) } # **o-series models only** # diff --git a/rbi/openai/models/response_format_json_object.rbi b/rbi/openai/models/response_format_json_object.rbi index a02e14bc..6aebd1ed 100644 --- a/rbi/openai/models/response_format_json_object.rbi +++ b/rbi/openai/models/response_format_json_object.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ResponseFormatJSONObject, OpenAI::Internal::AnyHash) + end # The type of response format being defined. Always `json_object`. sig { returns(Symbol) } diff --git a/rbi/openai/models/response_format_json_schema.rbi b/rbi/openai/models/response_format_json_schema.rbi index dbaac8e0..8155e9c9 100644 --- a/rbi/openai/models/response_format_json_schema.rbi +++ b/rbi/openai/models/response_format_json_schema.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ResponseFormatJSONSchema, OpenAI::Internal::AnyHash) + end # Structured Outputs configuration options, including a JSON Schema. sig { returns(OpenAI::ResponseFormatJSONSchema::JSONSchema) } @@ -49,7 +52,13 @@ module OpenAI end class JSONSchema < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::ResponseFormatJSONSchema::JSONSchema, + OpenAI::Internal::AnyHash + ) + end # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. diff --git a/rbi/openai/models/response_format_text.rbi b/rbi/openai/models/response_format_text.rbi index a20756e2..24fa0549 100644 --- a/rbi/openai/models/response_format_text.rbi +++ b/rbi/openai/models/response_format_text.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class ResponseFormatText < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::ResponseFormatText, OpenAI::Internal::AnyHash) + end # The type of response format being defined. Always `text`. sig { returns(Symbol) } diff --git a/rbi/openai/models/responses/computer_tool.rbi b/rbi/openai/models/responses/computer_tool.rbi index 9cfcfe6a..af8eff7a 100644 --- a/rbi/openai/models/responses/computer_tool.rbi +++ b/rbi/openai/models/responses/computer_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class ComputerTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::ComputerTool, OpenAI::Internal::AnyHash) + end # The height of the computer display. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/easy_input_message.rbi b/rbi/openai/models/responses/easy_input_message.rbi index 304fcceb..f4f1e6e9 100644 --- a/rbi/openai/models/responses/easy_input_message.rbi +++ b/rbi/openai/models/responses/easy_input_message.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class EasyInputMessage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Internal::AnyHash + ) + end # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. diff --git a/rbi/openai/models/responses/file_search_tool.rbi b/rbi/openai/models/responses/file_search_tool.rbi index 286697f9..29ff2004 100644 --- a/rbi/openai/models/responses/file_search_tool.rbi +++ b/rbi/openai/models/responses/file_search_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class FileSearchTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::FileSearchTool, OpenAI::Internal::AnyHash) + end # The type of the file search tool. Always `file_search`. sig { returns(Symbol) } @@ -115,7 +118,12 @@ module OpenAI class RankingOptions < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::FileSearchTool::RankingOptions, + OpenAI::Internal::AnyHash + ) + end # The ranker to use for the file search. sig do diff --git a/rbi/openai/models/responses/function_tool.rbi b/rbi/openai/models/responses/function_tool.rbi index c2658fc5..16f8f2d7 100644 --- a/rbi/openai/models/responses/function_tool.rbi +++ b/rbi/openai/models/responses/function_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class FunctionTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::FunctionTool, OpenAI::Internal::AnyHash) + end # The name of the function to call. sig { returns(String) } diff --git a/rbi/openai/models/responses/input_item_list_params.rbi b/rbi/openai/models/responses/input_item_list_params.rbi index 8fe8462a..403928db 100644 --- a/rbi/openai/models/responses/input_item_list_params.rbi +++ b/rbi/openai/models/responses/input_item_list_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::InputItemListParams, + OpenAI::Internal::AnyHash + ) + end # An item ID to list items after, used in pagination. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 3f45fc15..fcc7367f 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class Response < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::Response, OpenAI::Internal::AnyHash) + end # Unique identifier for this Response. sig { returns(String) } @@ -515,7 +518,12 @@ module OpenAI class IncompleteDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::Response::IncompleteDetails, + OpenAI::Internal::AnyHash + ) + end # The reason why the response is incomplete. sig do diff --git a/rbi/openai/models/responses/response_audio_delta_event.rbi b/rbi/openai/models/responses/response_audio_delta_event.rbi index 0ed8d2e1..87c6eff6 100644 --- a/rbi/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseAudioDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # A chunk of Base64 encoded response audio bytes. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_audio_done_event.rbi b/rbi/openai/models/responses/response_audio_done_event.rbi index 3d74ccf4..d9a86980 100644 --- a/rbi/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseAudioDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } diff --git a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi index bb70f631..74e69b9b 100644 --- a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The partial transcript of the audio response. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi index df2381ba..d0d670a2 100644 --- a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseAudioTranscriptDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index dd5c1396..220e24ca 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The partial code snippet added by the code interpreter. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 9cd3b09c..43a90d5f 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The final code snippet output by the code interpreter. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi index e2f8c573..be772f72 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end # A tool call to run code. sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } diff --git a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 983c9007..578fdb15 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end # A tool call to run code. sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } diff --git a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index 031a295b..bdf1313a 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, + OpenAI::Internal::AnyHash + ) + end # A tool call to run code. sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index d964daa1..6d787089 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the code interpreter tool call. sig { returns(String) } @@ -105,7 +111,12 @@ module OpenAI class Logs < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Internal::AnyHash + ) + end # The logs of the code interpreter tool call. sig { returns(String) } @@ -132,7 +143,12 @@ module OpenAI class Files < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files, + OpenAI::Internal::AnyHash + ) + end sig do returns( @@ -180,7 +196,12 @@ module OpenAI class File < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, + OpenAI::Internal::AnyHash + ) + end # The ID of the file. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_completed_event.rbi b/rbi/openai/models/responses/response_completed_event.rbi index 3c962886..18dd991a 100644 --- a/rbi/openai/models/responses/response_completed_event.rbi +++ b/rbi/openai/models/responses/response_completed_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCompletedEvent, + OpenAI::Internal::AnyHash + ) + end # Properties of the completed response. sig { returns(OpenAI::Responses::Response) } diff --git a/rbi/openai/models/responses/response_computer_tool_call.rbi b/rbi/openai/models/responses/response_computer_tool_call.rbi index bd660209..8bdf6af6 100644 --- a/rbi/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/openai/models/responses/response_computer_tool_call.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the computer call. sig { returns(String) } @@ -151,7 +157,12 @@ module OpenAI class Click < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Click, + OpenAI::Internal::AnyHash + ) + end # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -267,7 +278,12 @@ module OpenAI class DoubleClick < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, + OpenAI::Internal::AnyHash + ) + end # Specifies the event type. For a double click action, this property is always set # to `double_click`. @@ -306,7 +322,12 @@ module OpenAI class Drag < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Drag, + OpenAI::Internal::AnyHash + ) + end # An array of coordinates representing the path of the drag action. Coordinates # will appear as an array of objects, eg @@ -374,7 +395,12 @@ module OpenAI class Path < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path, + OpenAI::Internal::AnyHash + ) + end # The x-coordinate. sig { returns(Integer) } @@ -402,7 +428,12 @@ module OpenAI class Keypress < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, + OpenAI::Internal::AnyHash + ) + end # The combination of keys the model is requesting to be pressed. This is an array # of strings, each representing a key. @@ -437,7 +468,12 @@ module OpenAI class Move < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Move, + OpenAI::Internal::AnyHash + ) + end # Specifies the event type. For a move action, this property is always set to # `move`. @@ -476,7 +512,12 @@ module OpenAI class Screenshot < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, + OpenAI::Internal::AnyHash + ) + end # Specifies the event type. For a screenshot action, this property is always set # to `screenshot`. @@ -499,7 +540,12 @@ module OpenAI class Scroll < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, + OpenAI::Internal::AnyHash + ) + end # The horizontal scroll distance. sig { returns(Integer) } @@ -564,7 +610,12 @@ module OpenAI class Type < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Type, + OpenAI::Internal::AnyHash + ) + end # The text to type. sig { returns(String) } @@ -593,7 +644,12 @@ module OpenAI class Wait < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::Action::Wait, + OpenAI::Internal::AnyHash + ) + end # Specifies the event type. For a wait action, this property is always set to # `wait`. @@ -627,7 +683,12 @@ module OpenAI class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck, + OpenAI::Internal::AnyHash + ) + end # The ID of the pending safety check. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi index f5b233b2..c56f05d6 100644 --- a/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the computer call tool output. sig { returns(String) } @@ -129,7 +135,12 @@ module OpenAI class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) + end # The ID of the pending safety check. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi b/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi index 7961326d..fc13ccfe 100644 --- a/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi +++ b/rbi/openai/models/responses/response_computer_tool_call_output_screenshot.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + OpenAI::Internal::AnyHash + ) + end # Specifies the event type. For a computer screenshot, this property is always set # to `computer_screenshot`. diff --git a/rbi/openai/models/responses/response_content_part_added_event.rbi b/rbi/openai/models/responses/response_content_part_added_event.rbi index 341887c8..3cc852cc 100644 --- a/rbi/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/openai/models/responses/response_content_part_added_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseContentPartAddedEvent, + OpenAI::Internal::AnyHash + ) + end # The index of the content part that was added. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_content_part_done_event.rbi b/rbi/openai/models/responses/response_content_part_done_event.rbi index e004eed3..6bf2b951 100644 --- a/rbi/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/openai/models/responses/response_content_part_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseContentPartDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The index of the content part that is done. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 9f9c2fc9..0e582266 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCreateParams, + OpenAI::Internal::AnyHash + ) + end # Text, image, or file inputs to the model, used to generate a response. # diff --git a/rbi/openai/models/responses/response_created_event.rbi b/rbi/openai/models/responses/response_created_event.rbi index 49ef465e..ab66dcdd 100644 --- a/rbi/openai/models/responses/response_created_event.rbi +++ b/rbi/openai/models/responses/response_created_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCreatedEvent, + OpenAI::Internal::AnyHash + ) + end # The response that was created. sig { returns(OpenAI::Responses::Response) } diff --git a/rbi/openai/models/responses/response_delete_params.rbi b/rbi/openai/models/responses/response_delete_params.rbi index b5adf324..6d3d35bd 100644 --- a/rbi/openai/models/responses/response_delete_params.rbi +++ b/rbi/openai/models/responses/response_delete_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseDeleteParams, + OpenAI::Internal::AnyHash + ) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/responses/response_error.rbi b/rbi/openai/models/responses/response_error.rbi index 1f52c7f1..b27ac5dd 100644 --- a/rbi/openai/models/responses/response_error.rbi +++ b/rbi/openai/models/responses/response_error.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class ResponseError < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::ResponseError, OpenAI::Internal::AnyHash) + end # The error code for the response. sig { returns(OpenAI::Responses::ResponseError::Code::TaggedSymbol) } diff --git a/rbi/openai/models/responses/response_error_event.rbi b/rbi/openai/models/responses/response_error_event.rbi index 34c0a475..5c93890e 100644 --- a/rbi/openai/models/responses/response_error_event.rbi +++ b/rbi/openai/models/responses/response_error_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseErrorEvent, + OpenAI::Internal::AnyHash + ) + end # The error code. sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/responses/response_failed_event.rbi b/rbi/openai/models/responses/response_failed_event.rbi index f4a8afd0..8f8f2c19 100644 --- a/rbi/openai/models/responses/response_failed_event.rbi +++ b/rbi/openai/models/responses/response_failed_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFailedEvent, + OpenAI::Internal::AnyHash + ) + end # The response that failed. sig { returns(OpenAI::Responses::Response) } diff --git a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi index e8ba8841..3b6a2331 100644 --- a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFileSearchCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end # The ID of the output item that the file search call is initiated. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi index 9c90a725..d93e58f0 100644 --- a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFileSearchCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end # The ID of the output item that the file search call is initiated. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi index 9e44847c..6fd13a21 100644 --- a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFileSearchCallSearchingEvent, + OpenAI::Internal::AnyHash + ) + end # The ID of the output item that the file search call is initiated. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_file_search_tool_call.rbi b/rbi/openai/models/responses/response_file_search_tool_call.rbi index 9a7ca93a..c2b95318 100644 --- a/rbi/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/openai/models/responses/response_file_search_tool_call.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the file search tool call. sig { returns(String) } @@ -143,7 +149,12 @@ module OpenAI class Result < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseFileSearchToolCall::Result, + OpenAI::Internal::AnyHash + ) + end # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/rbi/openai/models/responses/response_format_text_json_schema_config.rbi b/rbi/openai/models/responses/response_format_text_json_schema_config.rbi index 0ec62279..0b27d712 100644 --- a/rbi/openai/models/responses/response_format_text_json_schema_config.rbi +++ b/rbi/openai/models/responses/response_format_text_json_schema_config.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::Internal::AnyHash + ) + end # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores # and dashes, with a maximum length of 64. diff --git a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi index 2932676c..3e24f3b5 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The function-call arguments delta that is added. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi index 76dec426..5ec5cb31 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The function-call arguments. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_function_tool_call.rbi b/rbi/openai/models/responses/response_function_tool_call.rbi index ed3d200e..5e98ba1a 100644 --- a/rbi/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/openai/models/responses/response_function_tool_call.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Internal::AnyHash + ) + end # A JSON string of the arguments to pass to the function. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_function_tool_call_item.rbi b/rbi/openai/models/responses/response_function_tool_call_item.rbi index 75156efc..514a0568 100644 --- a/rbi/openai/models/responses/response_function_tool_call_item.rbi +++ b/rbi/openai/models/responses/response_function_tool_call_item.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunctionToolCall - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the function tool call. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/openai/models/responses/response_function_tool_call_output_item.rbi index f050d6bc..b3820b0d 100644 --- a/rbi/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/openai/models/responses/response_function_tool_call_output_item.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionToolCallOutputItem, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the function call tool output. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_function_web_search.rbi b/rbi/openai/models/responses/response_function_web_search.rbi index 2e535daa..d95d1cb9 100644 --- a/rbi/openai/models/responses/response_function_web_search.rbi +++ b/rbi/openai/models/responses/response_function_web_search.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the web search tool call. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_in_progress_event.rbi b/rbi/openai/models/responses/response_in_progress_event.rbi index 933f623b..3bb0ccb9 100644 --- a/rbi/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_in_progress_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInProgressEvent, + OpenAI::Internal::AnyHash + ) + end # The response that is in progress. sig { returns(OpenAI::Responses::Response) } diff --git a/rbi/openai/models/responses/response_incomplete_event.rbi b/rbi/openai/models/responses/response_incomplete_event.rbi index 047ad561..a30d27a8 100644 --- a/rbi/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/openai/models/responses/response_incomplete_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseIncompleteEvent, + OpenAI::Internal::AnyHash + ) + end # The response that was incomplete. sig { returns(OpenAI::Responses::Response) } diff --git a/rbi/openai/models/responses/response_input_audio.rbi b/rbi/openai/models/responses/response_input_audio.rbi index 0cacbacd..6b771d48 100644 --- a/rbi/openai/models/responses/response_input_audio.rbi +++ b/rbi/openai/models/responses/response_input_audio.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseInputAudio < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputAudio, + OpenAI::Internal::AnyHash + ) + end # Base64-encoded audio data. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_input_file.rbi b/rbi/openai/models/responses/response_input_file.rbi index 3275a394..19ecb616 100644 --- a/rbi/openai/models/responses/response_input_file.rbi +++ b/rbi/openai/models/responses/response_input_file.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseInputFile < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputFile, + OpenAI::Internal::AnyHash + ) + end # The type of the input item. Always `input_file`. sig { returns(Symbol) } diff --git a/rbi/openai/models/responses/response_input_image.rbi b/rbi/openai/models/responses/response_input_image.rbi index 182aa6d4..a553503b 100644 --- a/rbi/openai/models/responses/response_input_image.rbi +++ b/rbi/openai/models/responses/response_input_image.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseInputImage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputImage, + OpenAI::Internal::AnyHash + ) + end # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. diff --git a/rbi/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi index 3d0c8ba2..2d59b644 100644 --- a/rbi/openai/models/responses/response_input_item.rbi +++ b/rbi/openai/models/responses/response_input_item.rbi @@ -30,7 +30,12 @@ module OpenAI class Message < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Internal::AnyHash + ) + end # A list of one or many input items to the model, containing different content # types. @@ -264,7 +269,12 @@ module OpenAI class ComputerCallOutput < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Internal::AnyHash + ) + end # The ID of the computer tool call that produced the output. sig { returns(String) } @@ -380,7 +390,12 @@ module OpenAI class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck, + OpenAI::Internal::AnyHash + ) + end # The ID of the pending safety check. sig { returns(String) } @@ -469,7 +484,12 @@ module OpenAI class FunctionCallOutput < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the function tool call generated by the model. sig { returns(String) } @@ -589,7 +609,12 @@ module OpenAI class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::ItemReference, + OpenAI::Internal::AnyHash + ) + end # The ID of the item to reference. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_input_message_item.rbi b/rbi/openai/models/responses/response_input_message_item.rbi index 25f40889..c1190de8 100644 --- a/rbi/openai/models/responses/response_input_message_item.rbi +++ b/rbi/openai/models/responses/response_input_message_item.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputMessageItem, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the message input. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_input_text.rbi b/rbi/openai/models/responses/response_input_text.rbi index 566e9627..fddfbd40 100644 --- a/rbi/openai/models/responses/response_input_text.rbi +++ b/rbi/openai/models/responses/response_input_text.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseInputText < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputText, + OpenAI::Internal::AnyHash + ) + end # The text input to the model. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi index ef2ba10b..b819f00a 100644 --- a/rbi/openai/models/responses/response_item_list.rbi +++ b/rbi/openai/models/responses/response_item_list.rbi @@ -6,7 +6,13 @@ module OpenAI module Responses class ResponseItemList < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItemList, + OpenAI::Internal::AnyHash + ) + end # A list of items used to generate this response. sig do diff --git a/rbi/openai/models/responses/response_output_audio.rbi b/rbi/openai/models/responses/response_output_audio.rbi index bfcafd48..49472582 100644 --- a/rbi/openai/models/responses/response_output_audio.rbi +++ b/rbi/openai/models/responses/response_output_audio.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputAudio, + OpenAI::Internal::AnyHash + ) + end # Base64-encoded audio data from the model. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index 7f0f565c..8f37ebb9 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItemAddedEvent, + OpenAI::Internal::AnyHash + ) + end # The output item that was added. sig do diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index 94e4db56..ff21f27a 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItemDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The output item that was marked done. sig do diff --git a/rbi/openai/models/responses/response_output_message.rbi b/rbi/openai/models/responses/response_output_message.rbi index e807c5bc..8e057823 100644 --- a/rbi/openai/models/responses/response_output_message.rbi +++ b/rbi/openai/models/responses/response_output_message.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Internal::AnyHash + ) + end # The unique ID of the output message. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_output_refusal.rbi b/rbi/openai/models/responses/response_output_refusal.rbi index 0d0d1a26..ef08df31 100644 --- a/rbi/openai/models/responses/response_output_refusal.rbi +++ b/rbi/openai/models/responses/response_output_refusal.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputRefusal, + OpenAI::Internal::AnyHash + ) + end # The refusal explanationfrom the model. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi index cea49a6e..f73a5755 100644 --- a/rbi/openai/models/responses/response_output_text.rbi +++ b/rbi/openai/models/responses/response_output_text.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseOutputText < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText, + OpenAI::Internal::AnyHash + ) + end # The annotations of the text output. sig do @@ -87,7 +93,12 @@ module OpenAI class FileCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, + OpenAI::Internal::AnyHash + ) + end # The ID of the file. sig { returns(String) } @@ -128,7 +139,12 @@ module OpenAI class URLCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Internal::AnyHash + ) + end # The index of the last character of the URL citation in the message. sig { returns(Integer) } @@ -191,7 +207,12 @@ module OpenAI class FilePath < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::FilePath, + OpenAI::Internal::AnyHash + ) + end # The ID of the file. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_reasoning_item.rbi b/rbi/openai/models/responses/response_reasoning_item.rbi index d54abb0d..0317e535 100644 --- a/rbi/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/openai/models/responses/response_reasoning_item.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Internal::AnyHash + ) + end # The unique identifier of the reasoning content. sig { returns(String) } @@ -92,7 +98,12 @@ module OpenAI class Summary < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningItem::Summary, + OpenAI::Internal::AnyHash + ) + end # A short summary of the reasoning used by the model when generating the response. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi index 260bed98..7e0103dc 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, + OpenAI::Internal::AnyHash + ) + end # The ID of the item this summary part is associated with. sig { returns(String) } @@ -80,7 +86,12 @@ module OpenAI class Part < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + OpenAI::Internal::AnyHash + ) + end # The text of the summary part. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi index 036aaa0d..ecf52172 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The ID of the item this summary part is associated with. sig { returns(String) } @@ -80,7 +86,12 @@ module OpenAI class Part < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + OpenAI::Internal::AnyHash + ) + end # The text of the summary part. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi index 6fb92b4d..0367f592 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The text delta that was added to the summary. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi index a9f0d59c..89590e6b 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The ID of the item this summary text is associated with. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_refusal_delta_event.rbi b/rbi/openai/models/responses/response_refusal_delta_event.rbi index a89025e3..cde0e1ec 100644 --- a/rbi/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/openai/models/responses/response_refusal_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseRefusalDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The index of the content part that the refusal text is added to. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_refusal_done_event.rbi b/rbi/openai/models/responses/response_refusal_done_event.rbi index 0100485a..db039d4f 100644 --- a/rbi/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/openai/models/responses/response_refusal_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseRefusalDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The index of the content part that the refusal text is finalized. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_retrieve_params.rbi b/rbi/openai/models/responses/response_retrieve_params.rbi index 95522e58..c1e80237 100644 --- a/rbi/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/openai/models/responses/response_retrieve_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseRetrieveParams, + OpenAI::Internal::AnyHash + ) + end # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. diff --git a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi index c235f425..d42d93ce 100644 --- a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # A citation to a file. sig do @@ -105,7 +111,12 @@ module OpenAI class FileCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, + OpenAI::Internal::AnyHash + ) + end # The ID of the file. sig { returns(String) } @@ -146,7 +157,12 @@ module OpenAI class URLCitation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, + OpenAI::Internal::AnyHash + ) + end # The index of the last character of the URL citation in the message. sig { returns(Integer) } @@ -209,7 +225,12 @@ module OpenAI class FilePath < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath, + OpenAI::Internal::AnyHash + ) + end # The ID of the file. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_text_config.rbi b/rbi/openai/models/responses/response_text_config.rbi index ddd0d65c..5103891f 100644 --- a/rbi/openai/models/responses/response_text_config.rbi +++ b/rbi/openai/models/responses/response_text_config.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseTextConfig < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextConfig, + OpenAI::Internal::AnyHash + ) + end # An object specifying the format that the model must output. # diff --git a/rbi/openai/models/responses/response_text_delta_event.rbi b/rbi/openai/models/responses/response_text_delta_event.rbi index 5b4d13ac..661b88d1 100644 --- a/rbi/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_delta_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextDeltaEvent, + OpenAI::Internal::AnyHash + ) + end # The index of the content part that the text delta was added to. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_text_done_event.rbi b/rbi/openai/models/responses/response_text_done_event.rbi index 1b05e949..2b142d10 100644 --- a/rbi/openai/models/responses/response_text_done_event.rbi +++ b/rbi/openai/models/responses/response_text_done_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextDoneEvent, + OpenAI::Internal::AnyHash + ) + end # The index of the content part that the text content is finalized. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_usage.rbi b/rbi/openai/models/responses/response_usage.rbi index e9e0c3bc..34f8734a 100644 --- a/rbi/openai/models/responses/response_usage.rbi +++ b/rbi/openai/models/responses/response_usage.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class ResponseUsage < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::ResponseUsage, OpenAI::Internal::AnyHash) + end # The number of input tokens. sig { returns(Integer) } @@ -87,7 +90,12 @@ module OpenAI class InputTokensDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseUsage::InputTokensDetails, + OpenAI::Internal::AnyHash + ) + end # The number of tokens that were retrieved from the cache. # [More on prompt caching](https://platform.openai.com/docs/guides/prompt-caching). @@ -110,7 +118,12 @@ module OpenAI class OutputTokensDetails < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::ResponseUsage::OutputTokensDetails, + OpenAI::Internal::AnyHash + ) + end # The number of reasoning tokens. sig { returns(Integer) } diff --git a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi index 10e66da7..f090f369 100644 --- a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseWebSearchCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end # Unique ID for the output item associated with the web search call. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi index 22a00bbf..c8fdaf5a 100644 --- a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseWebSearchCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end # Unique ID for the output item associated with the web search call. sig { returns(String) } diff --git a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi index 1d1b8d70..5f0b5d33 100644 --- a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseWebSearchCallSearchingEvent, + OpenAI::Internal::AnyHash + ) + end # Unique ID for the output item associated with the web search call. sig { returns(String) } diff --git a/rbi/openai/models/responses/tool_choice_function.rbi b/rbi/openai/models/responses/tool_choice_function.rbi index 44ea5976..e74411c8 100644 --- a/rbi/openai/models/responses/tool_choice_function.rbi +++ b/rbi/openai/models/responses/tool_choice_function.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module Responses class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceFunction, + OpenAI::Internal::AnyHash + ) + end # The name of the function to call. sig { returns(String) } diff --git a/rbi/openai/models/responses/tool_choice_types.rbi b/rbi/openai/models/responses/tool_choice_types.rbi index 1013d8f9..3851436c 100644 --- a/rbi/openai/models/responses/tool_choice_types.rbi +++ b/rbi/openai/models/responses/tool_choice_types.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::ToolChoiceTypes, OpenAI::Internal::AnyHash) + end # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). diff --git a/rbi/openai/models/responses/web_search_tool.rbi b/rbi/openai/models/responses/web_search_tool.rbi index 7c952749..5d05f938 100644 --- a/rbi/openai/models/responses/web_search_tool.rbi +++ b/rbi/openai/models/responses/web_search_tool.rbi @@ -4,7 +4,10 @@ module OpenAI module Models module Responses class WebSearchTool < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Responses::WebSearchTool, OpenAI::Internal::AnyHash) + end # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. @@ -153,7 +156,12 @@ module OpenAI class UserLocation < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::Responses::WebSearchTool::UserLocation, + OpenAI::Internal::AnyHash + ) + end # The type of location approximation. Always `approximate`. sig { returns(Symbol) } diff --git a/rbi/openai/models/static_file_chunking_strategy.rbi b/rbi/openai/models/static_file_chunking_strategy.rbi index 2bfb9abc..a75f0b65 100644 --- a/rbi/openai/models/static_file_chunking_strategy.rbi +++ b/rbi/openai/models/static_file_chunking_strategy.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::StaticFileChunkingStrategy, OpenAI::Internal::AnyHash) + end # The number of tokens that overlap between chunks. The default value is `400`. # diff --git a/rbi/openai/models/static_file_chunking_strategy_object.rbi b/rbi/openai/models/static_file_chunking_strategy_object.rbi index 3e1ca623..7dfe6e52 100644 --- a/rbi/openai/models/static_file_chunking_strategy_object.rbi +++ b/rbi/openai/models/static_file_chunking_strategy_object.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::StaticFileChunkingStrategyObject, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::StaticFileChunkingStrategy) } attr_reader :static diff --git a/rbi/openai/models/static_file_chunking_strategy_object_param.rbi b/rbi/openai/models/static_file_chunking_strategy_object_param.rbi index ede12a4b..0197459c 100644 --- a/rbi/openai/models/static_file_chunking_strategy_object_param.rbi +++ b/rbi/openai/models/static_file_chunking_strategy_object_param.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::StaticFileChunkingStrategyObjectParam, + OpenAI::Internal::AnyHash + ) + end sig { returns(OpenAI::StaticFileChunkingStrategy) } attr_reader :static diff --git a/rbi/openai/models/upload.rbi b/rbi/openai/models/upload.rbi index 95c1b624..1c57dd2b 100644 --- a/rbi/openai/models/upload.rbi +++ b/rbi/openai/models/upload.rbi @@ -3,7 +3,7 @@ module OpenAI module Models class Upload < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = T.type_alias { T.any(OpenAI::Upload, OpenAI::Internal::AnyHash) } # The Upload unique identifier, which can be referenced in API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/upload_cancel_params.rbi b/rbi/openai/models/upload_cancel_params.rbi index 283948ca..7f48aa91 100644 --- a/rbi/openai/models/upload_cancel_params.rbi +++ b/rbi/openai/models/upload_cancel_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::UploadCancelParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/upload_complete_params.rbi b/rbi/openai/models/upload_complete_params.rbi index a8770632..fc57184f 100644 --- a/rbi/openai/models/upload_complete_params.rbi +++ b/rbi/openai/models/upload_complete_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::UploadCompleteParams, OpenAI::Internal::AnyHash) + end # The ordered list of Part IDs. sig { returns(T::Array[String]) } diff --git a/rbi/openai/models/upload_create_params.rbi b/rbi/openai/models/upload_create_params.rbi index 43ab3774..d7e3500c 100644 --- a/rbi/openai/models/upload_create_params.rbi +++ b/rbi/openai/models/upload_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::UploadCreateParams, OpenAI::Internal::AnyHash) + end # The number of bytes in the file you are uploading. sig { returns(Integer) } diff --git a/rbi/openai/models/uploads/part_create_params.rbi b/rbi/openai/models/uploads/part_create_params.rbi index 9a8822d5..1b069db5 100644 --- a/rbi/openai/models/uploads/part_create_params.rbi +++ b/rbi/openai/models/uploads/part_create_params.rbi @@ -7,7 +7,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Uploads::PartCreateParams, OpenAI::Internal::AnyHash) + end # The chunk of bytes for this Part. sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } diff --git a/rbi/openai/models/uploads/upload_part.rbi b/rbi/openai/models/uploads/upload_part.rbi index a9f0b817..71d2d997 100644 --- a/rbi/openai/models/uploads/upload_part.rbi +++ b/rbi/openai/models/uploads/upload_part.rbi @@ -6,7 +6,10 @@ module OpenAI module Uploads class UploadPart < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::Uploads::UploadPart, OpenAI::Internal::AnyHash) + end # The upload Part unique identifier, which can be referenced in API endpoints. sig { returns(String) } diff --git a/rbi/openai/models/vector_store.rbi b/rbi/openai/models/vector_store.rbi index a4140933..1e5ec9ca 100644 --- a/rbi/openai/models/vector_store.rbi +++ b/rbi/openai/models/vector_store.rbi @@ -3,7 +3,8 @@ module OpenAI module Models class VectorStore < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::VectorStore, OpenAI::Internal::AnyHash) } # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -133,7 +134,10 @@ module OpenAI end class FileCounts < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStore::FileCounts, OpenAI::Internal::AnyHash) + end # The number of files that were cancelled. sig { returns(Integer) } @@ -216,7 +220,10 @@ module OpenAI end class ExpiresAfter < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash) + end # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. diff --git a/rbi/openai/models/vector_store_create_params.rbi b/rbi/openai/models/vector_store_create_params.rbi index 38d38a30..674fc93d 100644 --- a/rbi/openai/models/vector_store_create_params.rbi +++ b/rbi/openai/models/vector_store_create_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreCreateParams, OpenAI::Internal::AnyHash) + end # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. @@ -126,7 +129,13 @@ module OpenAI end class ExpiresAfter < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. diff --git a/rbi/openai/models/vector_store_delete_params.rbi b/rbi/openai/models/vector_store_delete_params.rbi index bb86910b..e7cf7db1 100644 --- a/rbi/openai/models/vector_store_delete_params.rbi +++ b/rbi/openai/models/vector_store_delete_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreDeleteParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/vector_store_deleted.rbi b/rbi/openai/models/vector_store_deleted.rbi index 3236938a..4967947f 100644 --- a/rbi/openai/models/vector_store_deleted.rbi +++ b/rbi/openai/models/vector_store_deleted.rbi @@ -3,7 +3,10 @@ module OpenAI module Models class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreDeleted, OpenAI::Internal::AnyHash) + end sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/models/vector_store_list_params.rbi b/rbi/openai/models/vector_store_list_params.rbi index 13a9b81d..4ce80ba7 100644 --- a/rbi/openai/models/vector_store_list_params.rbi +++ b/rbi/openai/models/vector_store_list_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreListParams, OpenAI::Internal::AnyHash) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/vector_store_retrieve_params.rbi b/rbi/openai/models/vector_store_retrieve_params.rbi index c96ace2f..337cd6e5 100644 --- a/rbi/openai/models/vector_store_retrieve_params.rbi +++ b/rbi/openai/models/vector_store_retrieve_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreRetrieveParams, OpenAI::Internal::AnyHash) + end sig do params(request_options: OpenAI::RequestOptions::OrHash).returns( diff --git a/rbi/openai/models/vector_store_search_params.rbi b/rbi/openai/models/vector_store_search_params.rbi index 97e4d691..20dd60cc 100644 --- a/rbi/openai/models/vector_store_search_params.rbi +++ b/rbi/openai/models/vector_store_search_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreSearchParams, OpenAI::Internal::AnyHash) + end # A query string for a search sig { returns(T.any(String, T::Array[String])) } @@ -146,7 +149,13 @@ module OpenAI end class RankingOptions < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreSearchParams::RankingOptions, + OpenAI::Internal::AnyHash + ) + end sig do returns( diff --git a/rbi/openai/models/vector_store_search_response.rbi b/rbi/openai/models/vector_store_search_response.rbi index bca7112b..05bfc3de 100644 --- a/rbi/openai/models/vector_store_search_response.rbi +++ b/rbi/openai/models/vector_store_search_response.rbi @@ -3,7 +3,13 @@ module OpenAI module Models class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::VectorStoreSearchResponse, + OpenAI::Internal::AnyHash + ) + end # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -97,7 +103,13 @@ module OpenAI end class Content < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::VectorStoreSearchResponse::Content, + OpenAI::Internal::AnyHash + ) + end # The text content returned from search. sig { returns(String) } diff --git a/rbi/openai/models/vector_store_update_params.rbi b/rbi/openai/models/vector_store_update_params.rbi index 1bc41e32..1d755b92 100644 --- a/rbi/openai/models/vector_store_update_params.rbi +++ b/rbi/openai/models/vector_store_update_params.rbi @@ -6,7 +6,10 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreUpdateParams, OpenAI::Internal::AnyHash) + end # The expiration policy for a vector store. sig { returns(T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter)) } @@ -73,7 +76,13 @@ module OpenAI end class ExpiresAfter < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreUpdateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end # Anchor timestamp after which the expiration policy applies. Supported anchors: # `last_active_at`. diff --git a/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi b/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi index 018b178f..dbee23e0 100644 --- a/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_cancel_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileBatchCancelParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/openai/models/vector_stores/file_batch_create_params.rbi index 99ae5367..3ac858b8 100644 --- a/rbi/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileBatchCreateParams, + OpenAI::Internal::AnyHash + ) + end # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # the vector store should use. Useful for tools like `file_search` that can access diff --git a/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi index 92c9e94d..036aa011 100644 --- a/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileBatchListFilesParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi b/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi index 9269b441..19120d57 100644 --- a/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_retrieve_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileBatchRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/file_content_params.rbi b/rbi/openai/models/vector_stores/file_content_params.rbi index 808e189c..91dccd3c 100644 --- a/rbi/openai/models/vector_stores/file_content_params.rbi +++ b/rbi/openai/models/vector_stores/file_content_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileContentParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/file_content_response.rbi b/rbi/openai/models/vector_stores/file_content_response.rbi index c0c3ab04..1a363bd6 100644 --- a/rbi/openai/models/vector_stores/file_content_response.rbi +++ b/rbi/openai/models/vector_stores/file_content_response.rbi @@ -4,7 +4,13 @@ module OpenAI module Models module VectorStores class FileContentResponse < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::Models::VectorStores::FileContentResponse, + OpenAI::Internal::AnyHash + ) + end # The text content sig { returns(T.nilable(String)) } diff --git a/rbi/openai/models/vector_stores/file_create_params.rbi b/rbi/openai/models/vector_stores/file_create_params.rbi index 2a56c040..b98a191b 100644 --- a/rbi/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_create_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileCreateParams, + OpenAI::Internal::AnyHash + ) + end # A [File](https://platform.openai.com/docs/api-reference/files) ID that the # vector store should use. Useful for tools like `file_search` that can access diff --git a/rbi/openai/models/vector_stores/file_delete_params.rbi b/rbi/openai/models/vector_stores/file_delete_params.rbi index c93454bd..28f556ed 100644 --- a/rbi/openai/models/vector_stores/file_delete_params.rbi +++ b/rbi/openai/models/vector_stores/file_delete_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileDeleteParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/file_list_params.rbi b/rbi/openai/models/vector_stores/file_list_params.rbi index f3697d8c..87c16199 100644 --- a/rbi/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/openai/models/vector_stores/file_list_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileListParams, + OpenAI::Internal::AnyHash + ) + end # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, diff --git a/rbi/openai/models/vector_stores/file_retrieve_params.rbi b/rbi/openai/models/vector_stores/file_retrieve_params.rbi index 5904b6a0..02669095 100644 --- a/rbi/openai/models/vector_stores/file_retrieve_params.rbi +++ b/rbi/openai/models/vector_stores/file_retrieve_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileRetrieveParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/file_update_params.rbi b/rbi/openai/models/vector_stores/file_update_params.rbi index 2a92b688..c8d7d898 100644 --- a/rbi/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/openai/models/vector_stores/file_update_params.rbi @@ -7,7 +7,13 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::FileUpdateParams, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :vector_store_id diff --git a/rbi/openai/models/vector_stores/vector_store_file.rbi b/rbi/openai/models/vector_stores/vector_store_file.rbi index e75ae730..7cf4a149 100644 --- a/rbi/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file.rbi @@ -6,7 +6,13 @@ module OpenAI module VectorStores class VectorStoreFile < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::VectorStoreFile, + OpenAI::Internal::AnyHash + ) + end # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -173,7 +179,12 @@ module OpenAI class LastError < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::VectorStores::VectorStoreFile::LastError, + OpenAI::Internal::AnyHash + ) + end # One of `server_error` or `rate_limit_exceeded`. sig do diff --git a/rbi/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi index 1dc7f034..e464e984 100644 --- a/rbi/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file_batch.rbi @@ -6,7 +6,13 @@ module OpenAI module VectorStores class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::VectorStoreFileBatch, + OpenAI::Internal::AnyHash + ) + end # The identifier, which can be referenced in API endpoints. sig { returns(String) } @@ -100,7 +106,12 @@ module OpenAI class FileCounts < OpenAI::Internal::Type::BaseModel OrHash = - T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + T.type_alias do + T.any( + OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, + OpenAI::Internal::AnyHash + ) + end # The number of files that where cancelled. sig { returns(Integer) } diff --git a/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi index 3317327e..a8813c5b 100644 --- a/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file_deleted.rbi @@ -6,7 +6,13 @@ module OpenAI module VectorStores class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStores::VectorStoreFileDeleted, + OpenAI::Internal::AnyHash + ) + end sig { returns(String) } attr_accessor :id diff --git a/rbi/openai/request_options.rbi b/rbi/openai/request_options.rbi index 459f3a88..144d5a33 100644 --- a/rbi/openai/request_options.rbi +++ b/rbi/openai/request_options.rbi @@ -7,7 +7,8 @@ module OpenAI # When making a request, you can pass an actual {RequestOptions} instance, or # simply pass a Hash with symbol keys matching the attributes on this class. class RequestOptions < OpenAI::Internal::Type::BaseModel - OrHash = T.type_alias { T.any(T.self_type, OpenAI::Internal::AnyHash) } + OrHash = + T.type_alias { T.any(OpenAI::RequestOptions, OpenAI::Internal::AnyHash) } # @api private sig { params(opts: OpenAI::RequestOptions::OrHash).void } diff --git a/sig/openai/internal.rbs b/sig/openai/internal.rbs index cd18cd99..8dc7f62a 100644 --- a/sig/openai/internal.rbs +++ b/sig/openai/internal.rbs @@ -1,5 +1,7 @@ module OpenAI module Internal + extend OpenAI::Internal::Util::SorbetRuntimeSupport + OMIT: Object end end diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs index 3b3e9b44..594f2d82 100644 --- a/sig/openai/internal/transport/base_client.rbs +++ b/sig/openai/internal/transport/base_client.rbs @@ -2,6 +2,8 @@ module OpenAI module Internal module Transport class BaseClient + extend OpenAI::Internal::Util::SorbetRuntimeSupport + type request_components = { method: Symbol, @@ -20,7 +22,6 @@ module OpenAI model: OpenAI::Internal::Type::Converter::input?, options: OpenAI::request_opts? } - type request_input = { method: Symbol, diff --git a/sig/openai/internal/transport/pooled_net_requester.rbs b/sig/openai/internal/transport/pooled_net_requester.rbs index 87b79180..038e5f19 100644 --- a/sig/openai/internal/transport/pooled_net_requester.rbs +++ b/sig/openai/internal/transport/pooled_net_requester.rbs @@ -2,6 +2,8 @@ module OpenAI module Internal module Transport class PooledNetRequester + extend OpenAI::Internal::Util::SorbetRuntimeSupport + type request = { method: Symbol, diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 80fcc2a2..265aa8da 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -9,7 +9,7 @@ module OpenAI | ^-> OpenAI::Internal::Type::Converter::input | OpenAI::Internal::Type::Converter::input type_info, ?::Hash[Symbol, top] spec - ) -> instance + ) -> self def ===: (top other) -> bool diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 177b22b8..93def04e 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -3,10 +3,13 @@ module OpenAI module Type class BaseModel extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport type known_field = { mode: (:coerce | :dump)?, required: bool, nilable: bool } + def self.inherited: (self child) -> void + def self.known_fields: -> ::Hash[Symbol, (OpenAI::Internal::Type::BaseModel::known_field & { type_fn: (^-> OpenAI::Internal::Type::Converter::input) })] @@ -58,10 +61,10 @@ module OpenAI def self.coerce: ( OpenAI::Internal::Type::BaseModel | ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state - ) -> (instance | top) + ) -> (self | top) def self.dump: ( - instance | top value, + self | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[top, top] | top) diff --git a/sig/openai/internal/type/converter.rbs b/sig/openai/internal/type/converter.rbs index 0e4c56aa..5470abbf 100644 --- a/sig/openai/internal/type/converter.rbs +++ b/sig/openai/internal/type/converter.rbs @@ -2,6 +2,8 @@ module OpenAI module Internal module Type module Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport + type input = OpenAI::Internal::Type::Converter | Class type coerce_state = diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 897ae9eb..69f1c6bd 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -3,6 +3,7 @@ module OpenAI module Type module Enum include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index 26f65397..af0022a0 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -9,7 +9,7 @@ module OpenAI | ^-> OpenAI::Internal::Type::Converter::input | OpenAI::Internal::Type::Converter::input type_info, ?::Hash[Symbol, top] spec - ) -> instance + ) -> self def ===: (top other) -> bool diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 86c308a6..57d122c3 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -3,6 +3,7 @@ module OpenAI module Type module Union include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Internal::Type::Converter::input)]] diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index 725c664b..c8416c7c 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -1,6 +1,8 @@ module OpenAI module Internal module Util + extend OpenAI::Internal::Util::SorbetRuntimeSupport + def self?.monotonic_secs: -> Float def self?.arch: -> String @@ -155,6 +157,17 @@ module OpenAI def self?.decode_sse: ( Enumerable[String] lines ) -> Enumerable[OpenAI::Internal::Util::server_sent_event] + + module SorbetRuntimeSupport + class MissingSorbetRuntimeError < ::RuntimeError + end + + private def sorbet_runtime_constants: -> ::Hash[Symbol, top] + + def const_missing: (Symbol name) -> void + + def define_sorbet_constant!: (Symbol name) { -> top } -> void + end end end end diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index b084b1b5..85ec0868 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -3,14 +3,9 @@ require_relative "test_helper" class OpenAITest < Minitest::Test + extend Minitest::Serial include WebMock::API - class << self - def test_order = :random - - def run_one_method(...) = Minitest::Runnable.run_one_method(...) - end - def before_all super WebMock.enable! diff --git a/test/openai/internal/sorbet_runtime_support_test.rb b/test/openai/internal/sorbet_runtime_support_test.rb new file mode 100644 index 00000000..1c48a5ff --- /dev/null +++ b/test/openai/internal/sorbet_runtime_support_test.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::SorbetRuntimeSupportTest < Minitest::Test + i_suck_and_my_tests_are_order_dependent! + + module E + extend OpenAI::Internal::Type::Enum + + define_sorbet_constant!(:TaggedSymbol) { 1 } + end + + module U + extend OpenAI::Internal::Type::Union + + define_sorbet_constant!(:Variants) { 2 } + end + + class M < OpenAI::Internal::Type::BaseModel + define_sorbet_constant!(:OrHash) { 3 } + end + + def test_nil_aliases + err = OpenAI::Internal::Util::SorbetRuntimeSupport::MissingSorbetRuntimeError + + assert_raises(err) { OpenAI::Internal::AnyHash } + assert_raises(err) { OpenAI::Internal::Type::Converter::Input } + assert_raises(err) { OpenAI::Internal::Type::Converter::CoerceState } + assert_raises(err) { OpenAI::Internal::Type::Converter::DumpState } + assert_raises(err) { OpenAI::Internal::Type::BaseModel::KnownField } + assert_raises(err) { OpenAI::Internal::Util::ParsedUri } + assert_raises(err) { OpenAI::Internal::Util::ServerSentEvent } + assert_raises(err) { OpenAI::Internal::Transport::BaseClient::RequestComponents } + assert_raises(err) { OpenAI::Internal::Transport::BaseClient::RequestInput } + assert_raises(err) { OpenAI::Internal::Transport::PooledNetRequester::Request } + assert_raises(err) { E::TaggedSymbol } + assert_raises(err) { U::Variants } + assert_raises(err) { M::OrHash } + end + + def test_stubbed_aliases + Kernel.instance_eval { const_set(:T, nil) } + + assert_equal(1, E::TaggedSymbol) + assert_equal(2, U::Variants) + assert_equal(3, M::OrHash) + end +end diff --git a/test/openai/test_helper.rb b/test/openai/test_helper.rb index 98b833aa..0878fc76 100644 --- a/test/openai/test_helper.rb +++ b/test/openai/test_helper.rb @@ -52,6 +52,12 @@ def initialize end end +module Minitest::Serial + def test_order = :random + + def run_one_method(...) = Minitest::Runnable.run_one_method(...) +end + class Minitest::Test include Minitest::Hooks From f76eb0baec50cb3f213e69ba729e745cf515b9f2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 20:25:12 +0000 Subject: [PATCH 186/295] chore: fix misc linting / minor issues --- .rubocop.yml | 7 ++++++- Rakefile | 2 +- lib/openai.rb | 2 ++ lib/openai/errors.rb | 2 +- rbi/openai/errors.rbi | 2 +- sig/openai/internal/type/array_of.rbs | 2 +- sig/openai/internal/type/base_model.rbs | 6 +++--- sig/openai/internal/type/base_page.rbs | 2 +- sig/openai/internal/type/hash_of.rbs | 2 +- 9 files changed, 17 insertions(+), 10 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index 42345dee..d9007489 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -8,7 +8,7 @@ AllCops: - "bin/*" NewCops: enable SuggestExtensions: false - TargetRubyVersion: 3.1.0 + TargetRubyVersion: 3.2.0 # Whether MFA is required or not should be left to the token configuration. Gemspec/RequireMFA: @@ -112,6 +112,8 @@ Metrics/AbcSize: Metrics/BlockLength: AllowedPatterns: - assert_pattern + - type_alias + - define_sorbet_constant! Exclude: - "**/*.rbi" @@ -182,6 +184,9 @@ Style/ClassAndModuleChildren: Exclude: - "test/**/*" +Style/CommentAnnotation: + Enabled: false + # We should go back and add these docs, but ignore for now. Style/Documentation: Enabled: false diff --git a/Rakefile b/Rakefile index 8361a8b9..fe1523b8 100644 --- a/Rakefile +++ b/Rakefile @@ -41,7 +41,7 @@ desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] - rubocop = %w[rubocop --fail-level E] + rubocop = %w[rubocop] rubocop += %w[--format github] if ENV.key?("CI") # some lines cannot be shortened diff --git a/lib/openai.rb b/lib/openai.rb index 99a75f6c..ef64bfc0 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true # Standard libraries. +# rubocop:disable Lint/RedundantRequireStatement require "English" require "cgi" require "date" @@ -15,6 +16,7 @@ require "stringio" require "time" require "uri" +# rubocop:enable Lint/RedundantRequireStatement # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 6f159c87..1e75256e 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -120,7 +120,7 @@ class APIStatusError < OpenAI::Errors::APIError # @param response [nil] # @param message [String, nil] # - # @return [OpenAI::Errors::APIStatusError] + # @return [self] def self.for(url:, status:, body:, request:, response:, message: nil) kwargs = { url: url, diff --git a/rbi/openai/errors.rbi b/rbi/openai/errors.rbi index 4fa76a30..4ade8ef1 100644 --- a/rbi/openai/errors.rbi +++ b/rbi/openai/errors.rbi @@ -122,7 +122,7 @@ module OpenAI request: NilClass, response: NilClass, message: T.nilable(String) - ).returns(T.self_type) + ).returns(T.attached_class) end def self.for(url:, status:, body:, request:, response:, message: nil) end diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 265aa8da..80fcc2a2 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -9,7 +9,7 @@ module OpenAI | ^-> OpenAI::Internal::Type::Converter::input | OpenAI::Internal::Type::Converter::input type_info, ?::Hash[Symbol, top] spec - ) -> self + ) -> instance def ===: (top other) -> bool diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 93def04e..0e282bcd 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -61,10 +61,10 @@ module OpenAI def self.coerce: ( OpenAI::Internal::Type::BaseModel | ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state - ) -> (self | top) + ) -> (instance | top) def self.dump: ( - self | top value, + instance | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[top, top] | top) @@ -84,7 +84,7 @@ module OpenAI def to_yaml: (*top a) -> String - def initialize: (?::Hash[Symbol, top] | self data) -> void + def initialize: (?::Hash[Symbol, top] | instance data) -> void def self.inspect: (?depth: Integer) -> String diff --git a/sig/openai/internal/type/base_page.rbs b/sig/openai/internal/type/base_page.rbs index 216a4e0b..b04062a6 100644 --- a/sig/openai/internal/type/base_page.rbs +++ b/sig/openai/internal/type/base_page.rbs @@ -4,7 +4,7 @@ module OpenAI module BasePage[Elem] def next_page?: -> bool - def next_page: -> self + def next_page: -> instance def auto_paging_each: { (Elem arg0) -> void } -> void diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index af0022a0..26f65397 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -9,7 +9,7 @@ module OpenAI | ^-> OpenAI::Internal::Type::Converter::input | OpenAI::Internal::Type::Converter::input type_info, ?::Hash[Symbol, top] spec - ) -> self + ) -> instance def ===: (top other) -> bool From eb285626594b08e0755a4661496cf9e0cb00a6bd Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 21:24:21 +0000 Subject: [PATCH 187/295] feat: expose recursive `#to_h` conversion --- lib/openai/internal/type/base_model.rb | 71 ++++++++++++++++--------- rbi/openai/internal/type/base_model.rbi | 29 ++++++---- sig/openai/internal/type/base_model.rbs | 11 ++-- 3 files changed, 71 insertions(+), 40 deletions(-) diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index dc28182f..8b5b608d 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -306,6 +306,39 @@ def dump(value, state:) end end + class << self + # @api private + # + # @param model [OpenAI::Internal::Type::BaseModel] + # @param convert [Boolean] + # + # @return [Hash{Symbol=>Object}] + def recursively_to_h(model, convert:) + rec = ->(x) do + case x + in OpenAI::Internal::Type::BaseModel + if convert + fields = x.class.known_fields + x.to_h.to_h do |key, val| + [key, rec.call(fields.key?(key) ? x.public_send(key) : val)] + rescue OpenAI::Errors::ConversionError + [key, rec.call(val)] + end + else + rec.call(x.to_h) + end + in Hash + x.transform_values(&rec) + in Array + x.map(&rec) + else + x + end + end + rec.call(model) + end + end + # @api public # # Returns the raw value associated with the given key, if found. Otherwise, nil is @@ -342,6 +375,14 @@ def to_h = @data alias_method :to_hash, :to_h + # @api public + # + # In addition to the behaviour of `#to_h`, this method will recursively call + # `#to_h` on nested models. + # + # @return [Hash{Symbol=>Object}] + def deep_to_h = self.class.recursively_to_h(@data, convert: false) + # @param keys [Array, nil] # # @return [Hash{Symbol=>Object}] @@ -357,29 +398,6 @@ def deconstruct_keys(keys) .to_h end - class << self - # @api private - # - # @param model [OpenAI::Internal::Type::BaseModel] - # - # @return [Hash{Symbol=>Object}] - def walk(model) - walk = ->(x) do - case x - in OpenAI::Internal::Type::BaseModel - walk.call(x.to_h) - in Hash - x.transform_values(&walk) - in Array - x.map(&walk) - else - x - end - end - walk.call(model) - end - end - # @api public # # @param a [Object] @@ -425,12 +443,15 @@ def inspect(depth: 0) # @api public # # @return [String] - def to_s = self.class.walk(@data).to_s + def to_s = deep_to_h.to_s # @api private # # @return [String] - def inspect = "#<#{self.class}:0x#{object_id.to_s(16)} #{self}>" + def inspect + converted = self.class.recursively_to_h(self, convert: true) + "#<#{self.class}:0x#{object_id.to_s(16)} #{converted}>" + end define_sorbet_constant!(:KnownField) do T.type_alias { {mode: T.nilable(Symbol), required: T::Boolean, nilable: T::Boolean} } diff --git a/rbi/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi index 3357deae..15fabf91 100644 --- a/rbi/openai/internal/type/base_model.rbi +++ b/rbi/openai/internal/type/base_model.rbi @@ -192,6 +192,18 @@ module OpenAI end end + class << self + # @api private + sig do + params( + model: OpenAI::Internal::Type::BaseModel, + convert: T::Boolean + ).returns(OpenAI::Internal::AnyHash) + end + def recursively_to_h(model, convert:) + end + end + # Returns the raw value associated with the given key, if found. Otherwise, nil is # returned. # @@ -226,6 +238,12 @@ module OpenAI def to_hash end + # In addition to the behaviour of `#to_h`, this method will recursively call + # `#to_h` on nested models. + sig { overridable.returns(OpenAI::Internal::AnyHash) } + def deep_to_h + end + sig do params(keys: T.nilable(T::Array[Symbol])).returns( OpenAI::Internal::AnyHash @@ -234,17 +252,6 @@ module OpenAI def deconstruct_keys(keys) end - class << self - # @api private - sig do - params(model: OpenAI::Internal::Type::BaseModel).returns( - OpenAI::Internal::AnyHash - ) - end - def walk(model) - end - end - sig { params(a: T.anything).returns(String) } def to_json(*a) end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 0e282bcd..67646785 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -68,17 +68,20 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[top, top] | top) + def self.recursively_to_h: ( + OpenAI::Internal::Type::BaseModel model, + convert: bool + ) -> ::Hash[Symbol, top] + def []: (Symbol key) -> top? def to_h: -> ::Hash[Symbol, top] alias to_hash to_h - def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] + def deep_to_h: -> ::Hash[Symbol, top] - def self.walk: ( - OpenAI::Internal::Type::BaseModel model - ) -> ::Hash[Symbol, top] + def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] def to_json: (*top a) -> String From 8e899a9e6155364856a6e27067dba7013d12e027 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 22:19:57 +0000 Subject: [PATCH 188/295] feat: expose base client options as read only attributes --- lib/openai/internal/transport/base_client.rb | 35 ++++++++++++++++--- rbi/openai/internal/transport/base_client.rbi | 21 +++++++++++ sig/openai/internal/transport/base_client.rbs | 14 ++++++++ 3 files changed, 65 insertions(+), 5 deletions(-) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index db914a5c..8e6f703c 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -153,6 +153,27 @@ def reap_connection!(status, stream:) end end + # @return [URI::Generic] + attr_reader :base_url + + # @return [Float] + attr_reader :timeout + + # @return [Integer] + attr_reader :max_retries + + # @return [Float] + attr_reader :initial_retry_delay + + # @return [Float] + attr_reader :max_retry_delay + + # @return [Hash{String=>String}] + attr_reader :headers + + # @return [String, nil] + attr_reader :idempotency_header + # @api private # @return [OpenAI::Internal::Transport::PooledNetRequester] attr_reader :requester @@ -184,10 +205,11 @@ def initialize( }, headers ) - @base_url = OpenAI::Internal::Util.parse_uri(base_url) + @base_url_components = OpenAI::Internal::Util.parse_uri(base_url) + @base_url = OpenAI::Internal::Util.unparse_uri(@base_url_components) @idempotency_header = idempotency_header&.to_s&.downcase - @max_retries = max_retries @timeout = timeout + @max_retries = max_retries @initial_retry_delay = initial_retry_delay @max_retry_delay = max_retry_delay end @@ -278,10 +300,14 @@ def initialize( OpenAI::Internal::Util.deep_merge(*[req[:body], opts[:extra_body]].compact) end + url = OpenAI::Internal::Util.join_parsed_uri( + @base_url_components, + {**req, path: path, query: query} + ) headers, encoded = OpenAI::Internal::Util.encode_content(headers, body) { method: method, - url: OpenAI::Internal::Util.join_parsed_uri(@base_url, {**req, path: path, query: query}), + url: url, headers: headers, body: encoded, max_retries: opts.fetch(:max_retries, @max_retries), @@ -475,8 +501,7 @@ def request(req) # @return [String] def inspect # rubocop:disable Layout/LineLength - base_url = OpenAI::Internal::Util.unparse_uri(@base_url) - "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" + "#<#{self.class.name}:0x#{object_id.to_s(16)} base_url=#{@base_url} max_retries=#{@max_retries} timeout=#{@timeout}>" # rubocop:enable Layout/LineLength end diff --git a/rbi/openai/internal/transport/base_client.rbi b/rbi/openai/internal/transport/base_client.rbi index b66e1262..d41cf4f8 100644 --- a/rbi/openai/internal/transport/base_client.rbi +++ b/rbi/openai/internal/transport/base_client.rbi @@ -122,6 +122,27 @@ module OpenAI end end + sig { returns(URI::Generic) } + attr_reader :base_url + + sig { returns(Float) } + attr_reader :timeout + + sig { returns(Integer) } + attr_reader :max_retries + + sig { returns(Float) } + attr_reader :initial_retry_delay + + sig { returns(Float) } + attr_reader :max_retry_delay + + sig { returns(T::Hash[String, String]) } + attr_reader :headers + + sig { returns(T.nilable(String)) } + attr_reader :idempotency_header + # @api private sig { returns(OpenAI::Internal::Transport::PooledNetRequester) } attr_reader :requester diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs index 594f2d82..109af718 100644 --- a/sig/openai/internal/transport/base_client.rbs +++ b/sig/openai/internal/transport/base_client.rbs @@ -56,6 +56,20 @@ module OpenAI stream: Enumerable[String]? ) -> void + attr_reader base_url: URI::Generic + + attr_reader timeout: Float + + attr_reader max_retries: Integer + + attr_reader initial_retry_delay: Float + + attr_reader max_retry_delay: Float + + attr_reader headers: ::Hash[String, String] + + attr_reader idempotency_header: String? + # @api private attr_reader requester: OpenAI::Internal::Transport::PooledNetRequester From d59b4b269deec4b6ea5cd65cd0f201d5435e3903 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 14 May 2025 21:27:44 +0000 Subject: [PATCH 189/295] feat: bump default connection pool size limit to minimum of 99 --- lib/openai/client.rb | 8 ++++---- lib/openai/internal/transport/pooled_net_requester.rb | 4 +++- rbi/openai/internal/transport/pooled_net_requester.rbi | 6 +++++- sig/openai/internal/transport/pooled_net_requester.rbs | 2 ++ 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/lib/openai/client.rb b/lib/openai/client.rb index f1ae51e6..4673f743 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -104,10 +104,10 @@ def initialize( organization: ENV["OPENAI_ORG_ID"], project: ENV["OPENAI_PROJECT_ID"], base_url: ENV["OPENAI_BASE_URL"], - max_retries: OpenAI::Client::DEFAULT_MAX_RETRIES, - timeout: OpenAI::Client::DEFAULT_TIMEOUT_IN_SECONDS, - initial_retry_delay: OpenAI::Client::DEFAULT_INITIAL_RETRY_DELAY, - max_retry_delay: OpenAI::Client::DEFAULT_MAX_RETRY_DELAY + max_retries: self.class::DEFAULT_MAX_RETRIES, + timeout: self.class::DEFAULT_TIMEOUT_IN_SECONDS, + initial_retry_delay: self.class::DEFAULT_INITIAL_RETRY_DELAY, + max_retry_delay: self.class::DEFAULT_MAX_RETRY_DELAY ) base_url ||= "https://api.openai.com/v1" diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index 2c93cc86..7891c279 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -11,6 +11,8 @@ class PooledNetRequester # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 KEEP_ALIVE_TIMEOUT = 30 + DEFAULT_MAX_CONNECTIONS = [Etc.nprocessors, 99].max + class << self # @api private # @@ -184,7 +186,7 @@ def execute(request) # @api private # # @param size [Integer] - def initialize(size: Etc.nprocessors) + def initialize(size: self.class::DEFAULT_MAX_CONNECTIONS) @mutex = Mutex.new @size = size @pools = {} diff --git a/rbi/openai/internal/transport/pooled_net_requester.rbi b/rbi/openai/internal/transport/pooled_net_requester.rbi index f8eeecf5..245308c5 100644 --- a/rbi/openai/internal/transport/pooled_net_requester.rbi +++ b/rbi/openai/internal/transport/pooled_net_requester.rbi @@ -22,6 +22,8 @@ module OpenAI # https://github.com/golang/go/blob/c8eced8580028328fde7c03cbfcb720ce15b2358/src/net/http/transport.go#L49 KEEP_ALIVE_TIMEOUT = 30 + DEFAULT_MAX_CONNECTIONS = T.let(T.unsafe(nil), Integer) + class << self # @api private sig { params(url: URI::Generic).returns(Net::HTTP) } @@ -66,7 +68,9 @@ module OpenAI # @api private sig { params(size: Integer).returns(T.attached_class) } - def self.new(size: Etc.nprocessors) + def self.new( + size: OpenAI::Internal::Transport::PooledNetRequester::DEFAULT_MAX_CONNECTIONS + ) end end end diff --git a/sig/openai/internal/transport/pooled_net_requester.rbs b/sig/openai/internal/transport/pooled_net_requester.rbs index 038e5f19..1719d8fd 100644 --- a/sig/openai/internal/transport/pooled_net_requester.rbs +++ b/sig/openai/internal/transport/pooled_net_requester.rbs @@ -15,6 +15,8 @@ module OpenAI KEEP_ALIVE_TIMEOUT: 30 + DEFAULT_MAX_CONNECTIONS: Integer + def self.connect: (URI::Generic url) -> top def self.calibrate_socket_timeout: (top conn, Float deadline) -> void From 323d1774936ed8a0ce9f9adffbd7a8d2917f253b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 14 May 2025 21:33:38 +0000 Subject: [PATCH 190/295] docs: rewrite much of README.md for readability --- README.md | 185 +++++++++++++++---------- lib/openai/internal/type/base_model.rb | 8 ++ 2 files changed, 123 insertions(+), 70 deletions(-) diff --git a/README.md b/README.md index 4fc254e3..74bbb8c0 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # OpenAI Ruby API library -The OpenAI Ruby library provides convenient access to the OpenAI REST API from any Ruby 3.2.0+ application. +The OpenAI Ruby library provides convenient access to the OpenAI REST API from any Ruby 3.2.0+ application. It ships with comprehensive types & docstrings in Yard, RBS, and RBI – [see below](https://github.com/openai/openai-ruby#Sorbet) for usage with Sorbet. The standard library's `net/http` is used as the HTTP transport, with connection pooling via the `connection_pool` gem. ## Documentation @@ -38,17 +38,19 @@ chat_completion = openai.chat.completions.create( puts(chat_completion) ``` -## Sorbet - -This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. +### Streaming -When using sorbet, it is recommended to use model classes as below. This provides stronger type checking and tooling integration. +We provide support for streaming responses using Server-Sent Events (SSE). ```ruby -openai.chat.completions.create( - messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], +stream = openai.chat.completions.stream_raw( + messages: [{role: "user", content: "Say this is a test"}], model: :"gpt-4.1" ) + +stream.each do |completion| + puts(completion) +end ``` ### Pagination @@ -70,47 +72,52 @@ page.auto_paging_each do |job| end ``` -### Streaming - -We provide support for streaming responses using Server-Sent Events (SSE). +Alternatively, you can use the `#next_page?` and `#next_page` methods for more granular control working with pages. ```ruby -stream = openai.chat.completions.stream_raw( - messages: [{role: "user", content: "Say this is a test"}], - model: :"gpt-4.1" -) - -stream.each do |completion| - puts(completion) +if page.next_page? + new_page = page.next_page + puts(new_page.data[0].id) end ``` ### File uploads -Request parameters that correspond to file uploads can be passed as `StringIO`, or a [`Pathname`](https://rubyapi.org/3.2/o/pathname) instance. +Request parameters that correspond to file uploads can be passed as raw contents, a [`Pathname`](https://rubyapi.org/3.2/o/pathname) instance, [`StringIO`](https://rubyapi.org/3.2/o/stringio), or more. ```ruby require "pathname" -# using `Pathname`, the file will be lazily read, without reading everything in to memory +# Use `Pathname` to send the filename and/or avoid paging a large file into memory: file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: "fine-tune") -file = File.read("input.jsonl") -# using `StringIO`, useful if you already have the data in memory -file_object = openai.files.create(file: StringIO.new(file), purpose: "fine-tune") +# Alternatively, pass file contents or a `StringIO` directly: +file_object = openai.files.create(file: File.read("input.jsonl"), purpose: "fine-tune") + +# Or, to control the filename and/or content type: +file = OpenAI::FilePart.new(File.read("input.jsonl"), filename: "input.jsonl", content_type: "…") +file_object = openai.files.create(file: file, purpose: "fine-tune") puts(file_object.id) ``` -### Errors +Note that you can also pass a raw `IO` descriptor, but this disables retries, as the library can't be sure if the descriptor is a file or pipe (which cannot be rewound). + +### Handling errors When the library is unable to connect to the API, or if the API returns a non-success status code (i.e., 4xx or 5xx response), a subclass of `OpenAI::Errors::APIError` will be thrown: ```ruby begin job = openai.fine_tuning.jobs.create(model: :"babbage-002", training_file: "file-abc123") -rescue OpenAI::Errors::APIError => e - puts(e.status) # 400 +rescue OpenAI::Errors::APIConnectionError => e + puts("The server could not be reached") + puts(e.cause) # an underlying Exception, likely raised within `net/http` +rescue OpenAI::Errors::RateLimitError => e + puts("A 429 status code was received; we should back off a bit.") +rescue OpenAI::Errors::APIStatusError => e + puts("Another non-200-range status code was received") + puts(e.status) end ``` @@ -154,11 +161,7 @@ openai.chat.completions.create( ### Timeouts -By default, requests will time out after 600 seconds. - -Timeouts are applied separately to the initial connection and the overall request time, so in some cases a request could wait 2\*timeout seconds before it fails. - -You can use the `timeout` option to configure or disable this: +By default, requests will time out after 600 seconds. You can use the timeout option to configure or disable this: ```ruby # Configure the default for all requests: @@ -174,43 +177,54 @@ openai.chat.completions.create( ) ``` -## Model DSL +On timeout, `OpenAI::Errors::APITimeoutError` is raised. + +Note that requests that time out are retried by default. -This library uses a simple DSL to represent request parameters and response shapes in `lib/openai/models`. +## Advanced concepts -With the right [editor plugins](https://shopify.github.io/ruby-lsp), you can ctrl-click on elements of the DSL to navigate around and explore the library. +### BaseModel -In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can also be used. For example, the following are interchangeable as arguments: +All parameter and response objects inherit from `OpenAI::Internal::Type::BaseModel`, which provides several conveniences, including: -```ruby -# This has tooling readability, for auto-completion, static analysis, and goto definition with supported language services -params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], - model: :"gpt-4.1" -) +1. All fields, including unknown ones, are accessible with `obj[:prop]` syntax, and can be destructured with `obj => {prop: prop}` or pattern-matching syntax. -# This also works -params = { - messages: [{role: "user", content: "Say this is a test"}], - model: :"gpt-4.1" -} -``` +2. Structural equivalence for equality; if two API calls return the same values, comparing the responses with == will return true. -## Editor support +3. Both instances and the classes themselves can be pretty-printed. -A combination of [Shopify LSP](https://shopify.github.io/ruby-lsp) and [Solargraph](https://solargraph.org/) is recommended for non-[Sorbet](https://sorbet.org) users. The former is especially good at go to definition, while the latter has much better auto-completion support. +4. Helpers such as `#to_h`, `#deep_to_h`, `#to_json`, and `#to_yaml`. -## Advanced concepts +### Making custom or undocumented requests -### Making custom/undocumented requests +#### Undocumented properties + +You can send undocumented parameters to any endpoint, and read undocumented response properties, like so: + +Note: the `extra_` parameters of the same name overrides the documented parameters. + +```ruby +chat_completion = + openai.chat.completions.create( + messages: [{role: "user", content: "How can I get the name of the current day in JavaScript?"}], + model: :"gpt-4.1", + request_options: { + extra_query: {my_query_parameter: value}, + extra_body: {my_body_parameter: value}, + extra_headers: {"my-header": value} + } + ) + +puts(chat_completion[:my_undocumented_property]) +``` #### Undocumented request params -If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a requests as seen in examples above. +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a request as seen in examples above. #### Undocumented endpoints -To make requests to undocumented endpoints, you can make requests using `client.request`. Options on the client will be respected (such as retries) when making this request. +To make requests to undocumented endpoints while retaining the benefit of auth, retries, and so on, you can make requests using `client.request`, like so: ```ruby response = client.request( @@ -218,47 +232,78 @@ response = client.request( path: '/undocumented/endpoint', query: {"dog": "woof"}, headers: {"useful-header": "interesting-value"}, - body: {"he": "llo"}, + body: {"hello": "world"} ) ``` ### Concurrency & connection pooling -The `OpenAI::Client` instances are thread-safe, and should be re-used across multiple threads. By default, each `Client` have their own HTTP connection pool, with a maximum number of connections equal to thread count. +The `OpenAI::Client` instances are threadsafe, but only are fork-safe when there are no in-flight HTTP requests. -When the maximum number of connections has been checked out from the connection pool, the `Client` will wait for an in use connection to become available. The queue time for this mechanism is accounted for by the per-request timeout. +Each instance of `OpenAI::Client` has its own HTTP connection pool with a default size of 99. As such, we recommend instantiating the client once per application in most settings. -Unless otherwise specified, other classes in the SDK do not have locks protecting their underlying data structure. - -Currently, `OpenAI::Client` instances are only fork-safe if there are no in-flight HTTP requests. +When all available connections from the pool are checked out, requests wait for a new connection to become available, with queue time counting towards the request timeout. -### Sorbet +Unless otherwise specified, other classes in the SDK do not have locks protecting their underlying data structure. -#### Enums +## Sorbet -Sorbet's typed enums require sub-classing of the [`T::Enum` class](https://sorbet.org/docs/tenum) from the `sorbet-runtime` gem. +This library provides comprehensive [RBI](https://sorbet.org/docs/rbi) definitions, and has no dependency on sorbet-runtime. -Since this library does not depend on `sorbet-runtime`, it uses a [`T.all` intersection type](https://sorbet.org/docs/intersection-types) with a ruby primitive type to construct a "tagged alias" instead. +You can provide typesafe request parameters like so: ```ruby -module OpenAI::ChatModel - # This alias aids language service driven navigation. - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ChatModel) } -end +openai.chat.completions.create( + messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], + model: :"gpt-4.1" +) ``` -#### Argument passing trick - -It is possible to pass a compatible model / parameter class to a method that expects keyword arguments by using the `**` splat operator. +Or, equivalently: ```ruby -params = OpenAI::Models::Chat::CompletionCreateParams.new( +# Hashes work, but are not typesafe: +openai.chat.completions.create( + messages: [{role: "user", content: "Say this is a test"}], + model: :"gpt-4.1" +) + +# You can also splat a full Params class: +params = OpenAI::Chat::CompletionCreateParams.new( messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], model: :"gpt-4.1" ) openai.chat.completions.create(**params) ``` +### Enums + +Since this library does not depend on `sorbet-runtime`, it cannot provide [`T::Enum`](https://sorbet.org/docs/tenum) instances. Instead, we provide "tagged symbols" instead, which is always a primitive at runtime: + +```ruby +# :low +puts(OpenAI::ReasoningEffort::LOW) + +# Revealed type: `T.all(OpenAI::ReasoningEffort, Symbol)` +T.reveal_type(OpenAI::ReasoningEffort::LOW) +``` + +Enum parameters have a "relaxed" type, so you can either pass in enum constants or their literal value: + +```ruby +# Using the enum constants preserves the tagged type information: +openai.chat.completions.create( + reasoning_effort: OpenAI::ReasoningEffort::LOW, + # … +) + +# Literal values is also permissible: +openai.chat.completions.create( + reasoning_effort: :low, + # … +) +``` + ## Versioning This package follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions. As the library is in initial development and has a major version of `0`, APIs may change at any time. diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 8b5b608d..7fd973f0 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -386,6 +386,14 @@ def deep_to_h = self.class.recursively_to_h(@data, convert: false) # @param keys [Array, nil] # # @return [Hash{Symbol=>Object}] + # + # @example + # # `comparison_filter` is a `OpenAI::ComparisonFilter` + # comparison_filter => { + # key: key, + # type: type, + # value: value + # } def deconstruct_keys(keys) (keys || self.class.known_fields.keys) .filter_map do |k| From 54ed1245d7a470364922e6761d169dd1abd84a72 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 15 May 2025 23:48:13 +0000 Subject: [PATCH 191/295] feat(api): manual updates --- .stats.yml | 6 +- lib/openai.rb | 7 + lib/openai/models.rb | 6 + .../audio/transcription_create_params.rb | 95 +- .../beta/thread_create_and_run_params.rb | 53 +- lib/openai/models/beta/threads/run.rb | 52 +- .../models/beta/threads/run_create_params.rb | 53 +- lib/openai/models/beta/truncation_object.rb | 52 + lib/openai/models/embedding_create_params.rb | 14 +- lib/openai/models/eval_create_params.rb | 191 +--- lib/openai/models/eval_create_response.rb | 19 +- lib/openai/models/eval_item.rb | 119 +++ lib/openai/models/eval_list_response.rb | 19 +- .../models/eval_logs_data_source_config.rb | 47 + lib/openai/models/eval_retrieve_response.rb | 19 +- ...l_stored_completions_data_source_config.rb | 17 +- lib/openai/models/eval_update_response.rb | 19 +- ...create_eval_completions_run_data_source.rb | 220 +--- .../create_eval_jsonl_run_data_source.rb | 72 +- .../create_eval_responses_run_data_source.rb | 363 +++++++ .../evals/eval_jsonl_file_content_source.rb | 45 + .../models/evals/eval_jsonl_file_id_source.rb | 28 + .../models/evals/run_cancel_response.rb | 539 +--------- lib/openai/models/evals/run_create_params.rb | 586 +---------- .../models/evals/run_create_response.rb | 539 +--------- lib/openai/models/evals/run_list_response.rb | 535 +--------- .../models/evals/run_retrieve_response.rb | 541 +--------- .../models/fine_tuning/fine_tuning_job.rb | 14 +- .../models/graders/label_model_grader.rb | 125 +-- .../models/graders/score_model_grader.rb | 125 +-- lib/openai/models/vector_store.rb | 32 +- .../models/vector_store_create_params.rb | 31 +- .../models/vector_store_expiration_after.rb | 30 + .../models/vector_store_update_params.rb | 31 +- lib/openai/resources/audio/transcriptions.rb | 8 +- lib/openai/resources/beta/threads.rb | 4 +- lib/openai/resources/beta/threads/runs.rb | 4 +- lib/openai/resources/evals.rb | 2 +- lib/openai/resources/evals/runs.rb | 2 +- lib/openai/resources/vector_stores.rb | 4 +- rbi/openai/models.rbi | 6 + .../audio/transcription_create_params.rbi | 173 ++++ .../beta/thread_create_and_run_params.rbi | 118 +-- rbi/openai/models/beta/threads/run.rbi | 112 +- .../models/beta/threads/run_create_params.rbi | 118 +-- rbi/openai/models/beta/truncation_object.rbi | 85 ++ rbi/openai/models/embedding_create_params.rbi | 21 +- rbi/openai/models/eval_create_params.rbi | 298 +----- rbi/openai/models/eval_create_response.rbi | 4 + rbi/openai/models/eval_item.rbi | 161 +++ rbi/openai/models/eval_list_response.rbi | 4 + .../models/eval_logs_data_source_config.rbi | 70 ++ rbi/openai/models/eval_retrieve_response.rbi | 4 + ..._stored_completions_data_source_config.rbi | 12 +- rbi/openai/models/eval_update_response.rbi | 4 + ...reate_eval_completions_run_data_source.rbi | 389 +------ .../create_eval_jsonl_run_data_source.rbi | 141 +-- .../create_eval_responses_run_data_source.rbi | 591 +++++++++++ .../evals/eval_jsonl_file_content_source.rbi | 97 ++ .../evals/eval_jsonl_file_id_source.rbi | 40 + .../models/evals/run_cancel_response.rbi | 924 +---------------- rbi/openai/models/evals/run_create_params.rbi | 958 +----------------- .../models/evals/run_create_response.rbi | 924 +---------------- rbi/openai/models/evals/run_list_response.rbi | 924 +---------------- .../models/evals/run_retrieve_response.rbi | 924 +---------------- .../models/fine_tuning/fine_tuning_job.rbi | 14 +- .../models/graders/label_model_grader.rbi | 225 +--- .../models/graders/score_model_grader.rbi | 225 +--- rbi/openai/models/vector_store.rbi | 39 +- .../models/vector_store_create_params.rbi | 44 +- .../models/vector_store_expiration_after.rbi | 36 + .../models/vector_store_update_params.rbi | 45 +- rbi/openai/resources/audio/transcriptions.rbi | 24 + rbi/openai/resources/beta/threads.rbi | 8 +- rbi/openai/resources/beta/threads/runs.rbi | 8 +- rbi/openai/resources/embeddings.rbi | 7 +- rbi/openai/resources/evals.rbi | 1 + rbi/openai/resources/evals/runs.rbi | 2 +- rbi/openai/resources/vector_stores.rbi | 5 +- sig/openai/models.rbs | 6 + .../audio/transcription_create_params.rbs | 55 + .../beta/thread_create_and_run_params.rbs | 34 +- sig/openai/models/beta/threads/run.rbs | 34 +- .../models/beta/threads/run_create_params.rbs | 34 +- sig/openai/models/beta/truncation_object.rbs | 30 + sig/openai/models/eval_create_params.rbs | 90 +- sig/openai/models/eval_create_response.rbs | 1 + sig/openai/models/eval_item.rbs | 70 ++ sig/openai/models/eval_list_response.rbs | 1 + .../models/eval_logs_data_source_config.rbs | 24 + sig/openai/models/eval_retrieve_response.rbs | 1 + ..._stored_completions_data_source_config.rbs | 6 +- sig/openai/models/eval_update_response.rbs | 1 + ...reate_eval_completions_run_data_source.rbs | 119 +-- .../create_eval_jsonl_run_data_source.rbs | 47 +- .../create_eval_responses_run_data_source.rbs | 216 ++++ .../evals/eval_jsonl_file_content_source.rbs | 40 + .../evals/eval_jsonl_file_id_source.rbs | 17 + .../models/evals/run_cancel_response.rbs | 318 +----- sig/openai/models/evals/run_create_params.rbs | 328 +----- .../models/evals/run_create_response.rbs | 318 +----- sig/openai/models/evals/run_list_response.rbs | 318 +----- .../models/evals/run_retrieve_response.rbs | 318 +----- .../models/fine_tuning/fine_tuning_job.rbs | 12 +- .../models/graders/label_model_grader.rbs | 75 +- .../models/graders/score_model_grader.rbs | 75 +- sig/openai/models/vector_store.rbs | 20 +- .../models/vector_store_create_params.rbs | 20 +- .../models/vector_store_expiration_after.rbs | 14 + .../models/vector_store_update_params.rbs | 16 +- sig/openai/resources/audio/transcriptions.rbs | 2 + sig/openai/resources/beta/threads.rbs | 4 +- sig/openai/resources/beta/threads/runs.rbs | 4 +- sig/openai/resources/vector_stores.rbs | 4 +- .../resources/beta/threads/runs_test.rb | 12 +- test/openai/resources/beta/threads_test.rb | 2 +- test/openai/resources/vector_stores_test.rb | 8 +- 117 files changed, 3009 insertions(+), 12138 deletions(-) create mode 100644 lib/openai/models/beta/truncation_object.rb create mode 100644 lib/openai/models/eval_item.rb create mode 100644 lib/openai/models/eval_logs_data_source_config.rb create mode 100644 lib/openai/models/evals/create_eval_responses_run_data_source.rb create mode 100644 lib/openai/models/evals/eval_jsonl_file_content_source.rb create mode 100644 lib/openai/models/evals/eval_jsonl_file_id_source.rb create mode 100644 lib/openai/models/vector_store_expiration_after.rb create mode 100644 rbi/openai/models/beta/truncation_object.rbi create mode 100644 rbi/openai/models/eval_item.rbi create mode 100644 rbi/openai/models/eval_logs_data_source_config.rbi create mode 100644 rbi/openai/models/evals/create_eval_responses_run_data_source.rbi create mode 100644 rbi/openai/models/evals/eval_jsonl_file_content_source.rbi create mode 100644 rbi/openai/models/evals/eval_jsonl_file_id_source.rbi create mode 100644 rbi/openai/models/vector_store_expiration_after.rbi create mode 100644 sig/openai/models/beta/truncation_object.rbs create mode 100644 sig/openai/models/eval_item.rbs create mode 100644 sig/openai/models/eval_logs_data_source_config.rbs create mode 100644 sig/openai/models/evals/create_eval_responses_run_data_source.rbs create mode 100644 sig/openai/models/evals/eval_jsonl_file_content_source.rbs create mode 100644 sig/openai/models/evals/eval_jsonl_file_id_source.rbs create mode 100644 sig/openai/models/vector_store_expiration_after.rbs diff --git a/.stats.yml b/.stats.yml index 5a1f2ff0..d00e2bb3 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-794a6ed3c3d3d77887564755168056af8a426b17cf1ec721e3a300503dc22a41.yml -openapi_spec_hash: 25a81c220713cd5b0bafc221d1dfa79a -config_hash: 0b768ed1b56c6d82816f0fa40dc4aaf5 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-161ca7f1cfd7b33c1fc07d0ce25dfe4be5a7271c394f4cb526b7fb21b0729900.yml +openapi_spec_hash: 602e14add4bee018c6774e320ce309b8 +config_hash: bdacc55eb995c15255ec82130eb8c3bb diff --git a/lib/openai.rb b/lib/openai.rb index ef64bfc0..a2dd90db 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -175,6 +175,7 @@ require_relative "openai/models/beta/threads/text_delta_block" require_relative "openai/models/beta/thread_stream_event" require_relative "openai/models/beta/thread_update_params" +require_relative "openai/models/beta/truncation_object" require_relative "openai/models/chat/chat_completion" require_relative "openai/models/chat/chat_completion_assistant_message_param" require_relative "openai/models/chat/chat_completion_audio" @@ -227,13 +228,18 @@ require_relative "openai/models/eval_custom_data_source_config" require_relative "openai/models/eval_delete_params" require_relative "openai/models/eval_delete_response" +require_relative "openai/models/eval_item" require_relative "openai/models/eval_list_params" require_relative "openai/models/eval_list_response" +require_relative "openai/models/eval_logs_data_source_config" require_relative "openai/models/eval_retrieve_params" require_relative "openai/models/eval_retrieve_response" require_relative "openai/models/evals/create_eval_completions_run_data_source" require_relative "openai/models/evals/create_eval_jsonl_run_data_source" +require_relative "openai/models/evals/create_eval_responses_run_data_source" require_relative "openai/models/evals/eval_api_error" +require_relative "openai/models/evals/eval_jsonl_file_content_source" +require_relative "openai/models/evals/eval_jsonl_file_id_source" require_relative "openai/models/evals/run_cancel_params" require_relative "openai/models/evals/run_cancel_response" require_relative "openai/models/evals/run_create_params" @@ -418,6 +424,7 @@ require_relative "openai/models/vector_store_create_params" require_relative "openai/models/vector_store_deleted" require_relative "openai/models/vector_store_delete_params" +require_relative "openai/models/vector_store_expiration_after" require_relative "openai/models/vector_store_list_params" require_relative "openai/models/vector_store_retrieve_params" require_relative "openai/models/vector_stores/file_batch_cancel_params" diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 6406a297..c993468a 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -86,8 +86,12 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams + EvalItem = OpenAI::Models::EvalItem + EvalListParams = OpenAI::Models::EvalListParams + EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig + EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams Evals = OpenAI::Models::Evals @@ -204,6 +208,8 @@ module OpenAI VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams + VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter + VectorStoreListParams = OpenAI::Models::VectorStoreListParams VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 4015cac8..0c99423b 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -25,6 +25,17 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::AudioModel] required :model, union: -> { OpenAI::Audio::TranscriptionCreateParams::Model } + # @!attribute chunking_strategy + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + # + # @return [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] + optional :chunking_strategy, + union: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy }, + nil?: true + # @!attribute include # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the @@ -83,7 +94,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] } - # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @!method initialize(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. # @@ -91,6 +102,8 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # + # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt @@ -124,6 +137,86 @@ module Model end end + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + module ChunkingStrategy + extend OpenAI::Internal::Type::Union + + # Automatically set chunking parameters based on the audio. Must be set to `"auto"`. + variant const: :auto + + variant -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig } + + class VadConfig < OpenAI::Internal::Type::BaseModel + # @!attribute type + # Must be set to `server_vad` to enable manual chunking using server side VAD. + # + # @return [Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] + required :type, + enum: -> { + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type + } + + # @!attribute prefix_padding_ms + # Amount of audio to include before the VAD detected speech (in milliseconds). + # + # @return [Integer, nil] + optional :prefix_padding_ms, Integer + + # @!attribute silence_duration_ms + # Duration of silence to detect speech stop (in milliseconds). With shorter values + # the model will respond more quickly, but may jump in on short pauses from the + # user. + # + # @return [Integer, nil] + optional :silence_duration_ms, Integer + + # @!attribute threshold + # Sensitivity threshold (0.0 to 1.0) for voice activity detection. A higher + # threshold will require louder audio to activate the model, and thus might + # perform better in noisy environments. + # + # @return [Float, nil] + optional :threshold, Float + + # @!method initialize(type:, prefix_padding_ms: nil, silence_duration_ms: nil, threshold: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig} for more + # details. + # + # @param type [Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] Must be set to `server_vad` to enable manual chunking using server side VAD. + # + # @param prefix_padding_ms [Integer] Amount of audio to include before the VAD detected speech (in + # + # @param silence_duration_ms [Integer] Duration of silence to detect speech stop (in milliseconds). + # + # @param threshold [Float] Sensitivity threshold (0.0 to 1.0) for voice activity detection. A + + # Must be set to `server_vad` to enable manual chunking using server side VAD. + # + # @see OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig#type + module Type + extend OpenAI::Internal::Type::Enum + + SERVER_VAD = :server_vad + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig) + end + end + end + module TimestampGranularity extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index f9addd97..cbdac287 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -159,10 +159,8 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] - optional :truncation_strategy, - -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy }, - nil?: true + # @return [OpenAI::Beta::TruncationObject, nil] + optional :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -196,7 +194,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -708,51 +706,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect end end - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] - required :type, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } - - # @!attribute last_messages - # The number of most recent messages from the thread when constructing the context - # for the run. - # - # @return [Integer, nil] - optional :last_messages, Integer, nil?: true - - # @!method initialize(type:, last_messages: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more details. - # - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @param type [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # - # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @see OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy#type - module Type - extend OpenAI::Internal::Type::Enum - - AUTO = :auto - LAST_MESSAGES = :last_messages - - # @!method self.values - # @return [Array] - end - end end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index a4c6345e..5ee65dfe 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -197,8 +197,8 @@ class Run < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] - required :truncation_strategy, -> { OpenAI::Beta::Threads::Run::TruncationStrategy }, nil?: true + # @return [OpenAI::Beta::TruncationObject, nil] + required :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true # @!attribute usage # Usage statistics related to the run. This value will be `null` if the run is not @@ -270,7 +270,7 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe # - # @param truncation_strategy [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param usage [OpenAI::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not # @@ -392,52 +392,6 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel end end - # @see OpenAI::Beta::Threads::Run#truncation_strategy - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @return [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] - required :type, enum: -> { OpenAI::Beta::Threads::Run::TruncationStrategy::Type } - - # @!attribute last_messages - # The number of most recent messages from the thread when constructing the context - # for the run. - # - # @return [Integer, nil] - optional :last_messages, Integer, nil?: true - - # @!method initialize(type:, last_messages: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Run::TruncationStrategy} for more details. - # - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @param type [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # - # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @see OpenAI::Beta::Threads::Run::TruncationStrategy#type - module Type - extend OpenAI::Internal::Type::Enum - - AUTO = :auto - LAST_MESSAGES = :last_messages - - # @!method self.values - # @return [Array] - end - end - # @see OpenAI::Beta::Threads::Run#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 65cf8129..17ab91c4 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -187,10 +187,8 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] - optional :truncation_strategy, - -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy }, - nil?: true + # @return [OpenAI::Beta::TruncationObject, nil] + optional :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -228,7 +226,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -419,51 +417,6 @@ module Model T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } end end - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] - required :type, enum: -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type } - - # @!attribute last_messages - # The number of most recent messages from the thread when constructing the context - # for the run. - # - # @return [Integer, nil] - optional :last_messages, Integer, nil?: true - - # @!method initialize(type:, last_messages: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy} for more details. - # - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @param type [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # - # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @see OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy#type - module Type - extend OpenAI::Internal::Type::Enum - - AUTO = :auto - LAST_MESSAGES = :last_messages - - # @!method self.values - # @return [Array] - end - end end end end diff --git a/lib/openai/models/beta/truncation_object.rb b/lib/openai/models/beta/truncation_object.rb new file mode 100644 index 00000000..c406a0b7 --- /dev/null +++ b/lib/openai/models/beta/truncation_object.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Beta + class TruncationObject < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Beta::TruncationObject::Type] + required :type, enum: -> { OpenAI::Beta::TruncationObject::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Beta::TruncationObject} for more details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Beta::TruncationObject::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Beta::TruncationObject#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 7640b480..51f4ffa2 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -11,11 +11,12 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # all embedding models), cannot be an empty string, and any array must be 2048 # dimensions or less. # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # for counting tokens. In addition to the per-input token limit, all embedding + # models enforce a maximum of 300,000 tokens summed across all inputs in a single + # request. # # @return [String, Array, Array, Array>] required :input, union: -> { OpenAI::EmbeddingCreateParams::Input } @@ -71,11 +72,12 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # all embedding models), cannot be an empty string, and any array must be 2048 # dimensions or less. # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # for counting tokens. In addition to the per-input token limit, all embedding + # models enforce a maximum of 300,000 tokens summed across all inputs in a single + # request. module Input extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 1dbd1acd..a80e9544 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -10,7 +10,7 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # The configuration for the data source used for the evaluation runs. # - # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] required :data_source_config, union: -> { OpenAI::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria @@ -41,7 +41,7 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalCreateParams} for more details. # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. # # @param testing_criteria [Array] A list of graders for all eval runs in this group. # @@ -63,9 +63,12 @@ module DataSourceConfig # - What data is required when creating a run variant :custom, -> { OpenAI::EvalCreateParams::DataSourceConfig::Custom } - # A data source config which specifies the metadata property of your stored completions query. + # A data source config which specifies the metadata property of your logs query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. - variant :stored_completions, -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } + variant :logs, -> { OpenAI::EvalCreateParams::DataSourceConfig::Logs } + + # Deprecated in favor of LogsDataSourceConfig. + variant :"stored-completions", -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } class Custom < OpenAI::Internal::Type::BaseModel # @!attribute item_schema @@ -105,12 +108,34 @@ class Custom < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :custom] The type of data source. Always `custom`. end + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Metadata filters for the logs data source. + # + # @return [Hash{Symbol=>Object}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(metadata: nil, type: :logs) + # A data source config which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + # + # @param metadata [Hash{Symbol=>Object}] Metadata filters for the logs data source. + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. + end + class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of data source. Always `stored_completions`. + # The type of data source. Always `stored-completions`. # - # @return [Symbol, :stored_completions] - required :type, const: :stored_completions + # @return [Symbol, :"stored-completions"] + required :type, const: :"stored-completions" # @!attribute metadata # Metadata filters for the stored completions data source. @@ -118,23 +143,22 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!method initialize(metadata: nil, type: :stored_completions) - # A data source config which specifies the metadata property of your stored - # completions query. This is usually metadata like `usecase=chatbot` or - # `prompt-version=v2`, etc. + # @!method initialize(metadata: nil, type: :"stored-completions") + # Deprecated in favor of LogsDataSourceConfig. # # @param metadata [Hash{Symbol=>Object}] Metadata filters for the stored completions data source. # - # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. + # @param type [Symbol, :"stored-completions"] The type of data source. Always `stored-completions`. end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] + # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ) end @@ -169,7 +193,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] @@ -212,7 +236,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param labels [Array] The labels to classify to each item in the evaluation. # @@ -236,7 +260,7 @@ module Input # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem } + variant -> { OpenAI::EvalItem } class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -257,145 +281,14 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] - required :content, - union: -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content - } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] - required :role, - enum: -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role - } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] - optional :type, - enum: -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type - } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} for - # more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] + # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalItem)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalItem ) end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 6030deb8..caf24f3a 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -19,7 +19,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -91,18 +91,25 @@ module DataSourceConfig # - What data is required when creating a run variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } - # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # A LogsDataSourceConfig which specifies the metadata property of your logs query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + + # Deprecated in favor of LogsDataSourceConfig. + variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do - T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) end end end diff --git a/lib/openai/models/eval_item.rb b/lib/openai/models/eval_item.rb new file mode 100644 index 00000000..b134b33b --- /dev/null +++ b/lib/openai/models/eval_item.rb @@ -0,0 +1,119 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText] + required :content, union: -> { OpenAI::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::EvalItem::Role] + required :role, enum: -> { OpenAI::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::EvalItem::Type, nil] + optional :type, enum: -> { OpenAI::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see {OpenAI::EvalItem} for + # more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::EvalItem::Content::OutputText} for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + end +end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 966d6bcb..577ef0b7 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -19,7 +19,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalListResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -91,18 +91,25 @@ module DataSourceConfig # - What data is required when creating a run variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } - # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # A LogsDataSourceConfig which specifies the metadata property of your logs query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + + # Deprecated in favor of LogsDataSourceConfig. + variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do - T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) end end end diff --git a/lib/openai/models/eval_logs_data_source_config.rb b/lib/openai/models/eval_logs_data_source_config.rb new file mode 100644 index 00000000..0412bec6 --- /dev/null +++ b/lib/openai/models/eval_logs_data_source_config.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::EvalLogsDataSourceConfig} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. + end + end +end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index d8e2ca54..0b7c4ad7 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -19,7 +19,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -91,18 +91,25 @@ module DataSourceConfig # - What data is required when creating a run variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } - # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # A LogsDataSourceConfig which specifies the metadata property of your logs query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + + # Deprecated in favor of LogsDataSourceConfig. + variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do - T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) end end end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 6bb63bf6..28bdf315 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -2,6 +2,7 @@ module OpenAI module Models + # @deprecated class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @!attribute schema # The json schema for the run data source items. Learn how to build JSON schemas @@ -11,10 +12,10 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!attribute type - # The type of data source. Always `stored_completions`. + # The type of data source. Always `stored-completions`. # - # @return [Symbol, :stored_completions] - required :type, const: :stored_completions + # @return [Symbol, :"stored-completions"] + required :type, const: :"stored-completions" # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -27,21 +28,17 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!method initialize(schema:, metadata: nil, type: :stored_completions) + # @!method initialize(schema:, metadata: nil, type: :"stored-completions") # Some parameter documentations has been truncated, see # {OpenAI::EvalStoredCompletionsDataSourceConfig} for more details. # - # A StoredCompletionsDataSourceConfig which specifies the metadata property of - # your stored completions query. This is usually metadata like `usecase=chatbot` - # or `prompt-version=v2`, etc. The schema returned by this data source config is - # used to defined what variables are available in your evals. `item` and `sample` - # are both defined when using this data source config. + # Deprecated in favor of LogsDataSourceConfig. # # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. + # @param type [Symbol, :"stored-completions"] The type of data source. Always `stored-completions`. end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 61f3e677..b8357d8e 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -19,7 +19,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -91,18 +91,25 @@ module DataSourceConfig # - What data is required when creating a run variant :custom, -> { OpenAI::EvalCustomDataSourceConfig } - # A StoredCompletionsDataSourceConfig which specifies the metadata property of your stored completions query. + # A LogsDataSourceConfig which specifies the metadata property of your logs query. # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + + # Deprecated in favor of LogsDataSourceConfig. + variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do - T.any(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig) + T.any( + OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, + OpenAI::EvalStoredCompletionsDataSourceConfig + ) end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 92535eb5..8f17fd52 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -7,7 +7,7 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # A StoredCompletionsRunDataSource configuration describing a set of filters # - # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] required :source, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source } # @!attribute type @@ -41,7 +41,7 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters + # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters # # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. # @@ -59,71 +59,14 @@ module Source discriminator :type - variant :file_content, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent } + variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } - variant :file_id, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID } + variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } # A StoredCompletionsRunDataSource configuration describing a set of filters variant :stored_completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions } - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] - } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of source. Always `stored_completions`. @@ -187,13 +130,13 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] + # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) end @@ -228,7 +171,7 @@ class Template < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] @@ -245,7 +188,7 @@ class Template < OpenAI::Internal::Type::BaseModel # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} for # more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -271,152 +214,13 @@ module Template # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, - -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - } - - class Message < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] - required :content, - union: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content - } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] - required :role, - enum: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role - } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] - optional :type, - enum: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type - } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} - # for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end + variant :message, -> { OpenAI::EvalItem } # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] + # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem)] define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - ) - end + T.type_alias { T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) } end end end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index faedad0e..3819554d 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -6,7 +6,7 @@ module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource] required :source, union: -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source } # @!attribute type @@ -19,7 +19,7 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # A JsonlRunDataSource object with that specifies a JSONL file that matches the # eval # - # @param source [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource] # # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. @@ -29,76 +29,16 @@ module Source discriminator :type - variant :file_content, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent } + variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } - variant :file_id, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID } - - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] - } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end + variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] + # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource)] define_sorbet_constant!(:Variants) do T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID - ) + T.any(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource) end end end diff --git a/lib/openai/models/evals/create_eval_responses_run_data_source.rb b/lib/openai/models/evals/create_eval_responses_run_data_source.rb new file mode 100644 index 00000000..95b9d3c8 --- /dev/null +++ b/lib/openai/models/evals/create_eval_responses_run_data_source.rb @@ -0,0 +1,363 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses] + required :source, union: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, OpenAI::Evals::CreateEvalResponsesRunDataSource::Type] + required :type, enum: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Type } + + # @!attribute input_messages + # + # @return [OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] + optional :input_messages, union: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams, nil] + optional :sampling_params, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams } + + # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::CreateEvalResponsesRunDataSource} for more details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # + # @param type [Symbol, OpenAI::Evals::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. + # + # @param input_messages [OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } + + variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses} for more + # details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + end + end + + # The type of run data source. Always `responses`. + # + # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#type + module Type + extend OpenAI::Internal::Type::Enum + + RESPONSES = :responses + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template] + } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template} for + # more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + # @!method self.variants + # @return [Array(OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::EvalItem + ) + end + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + end + end + + # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + end + end + end + end +end diff --git a/lib/openai/models/evals/eval_jsonl_file_content_source.rb b/lib/openai/models/evals/eval_jsonl_file_content_source.rb new file mode 100644 index 00000000..add2daad --- /dev/null +++ b/lib/openai/models/evals/eval_jsonl_file_content_source.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::EvalJSONLFileContentSource::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + end + + EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource + end +end diff --git a/lib/openai/models/evals/eval_jsonl_file_id_source.rb b/lib/openai/models/evals/eval_jsonl_file_id_source.rb new file mode 100644 index 00000000..3347ee13 --- /dev/null +++ b/lib/openai/models/evals/eval_jsonl_file_id_source.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Evals + class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + end + + EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource + end +end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index e9ac234b..3e7dacc0 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -20,7 +20,7 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,546 +145,17 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions } - - class Completions < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] - required :source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source } - - # @!attribute type - # The type of run data source. Always `completions`. - # - # @return [Symbol, :completions] - required :type, const: :completions - - # @!attribute input_messages - # - # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference, nil] - optional :input_messages, - union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, nil] - optional :sampling_params, - -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams } - - # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions} for more - # details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams] - # - # @param type [Symbol, :completions] The type of run data source. Always `completions`. - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, - -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent } - - variant :file_id, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, - -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses } - - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content] } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute allow_parallel_tool_calls - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional search string for instructions. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses} - # for more details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses - ) - end - end - end - - # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, - -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> do - OpenAI::Internal::Type::ArrayOf[ - union: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template - ] - end - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template} - # for more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - required :content, - union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - required :role, - enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] - optional :type, - enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} - # for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end + variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 849d33d9..7437852c 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -11,7 +11,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Details about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Evals::RunCreateParams::DataSource } # @!attribute metadata @@ -35,7 +35,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -54,593 +54,17 @@ module DataSource variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource } - - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] - required :source, - union: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source - } - - # @!attribute type - # The type of run data source. Always `completions`. - # - # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] - required :type, - enum: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type - } - - # @!attribute input_messages - # - # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] - optional :input_messages, - union: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages - } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] - optional :sampling_params, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - } - - # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} - # for more details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `completions`. - # - # @param input_messages [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent - } - - variant :file_id, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID - } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - } - - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] - } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute allow_parallel_tool_calls - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional search string for instructions. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} - # for more details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - end - end - - # The type of run data source. Always `completions`. - # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type - module Type - extend OpenAI::Internal::Type::Enum - - COMPLETIONS = :completions - - # @!method self.values - # @return [Array] - end - - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template - } - - variant :item_reference, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> do - OpenAI::Internal::Type::ArrayOf[ - union: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template - ] - end - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} - # for more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] - required :content, - union: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content - } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] - required :role, - enum: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role - } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] - optional :type, - enum: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type - } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} - # for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end + variant -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 988f2fe9..cc42d18c 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -20,7 +20,7 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,546 +145,17 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions } - - class Completions < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] - required :source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source } - - # @!attribute type - # The type of run data source. Always `completions`. - # - # @return [Symbol, :completions] - required :type, const: :completions - - # @!attribute input_messages - # - # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference, nil] - optional :input_messages, - union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, nil] - optional :sampling_params, - -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams } - - # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions} for more - # details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams] - # - # @param type [Symbol, :completions] The type of run data source. Always `completions`. - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, - -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent } - - variant :file_id, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, - -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses } - - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content] } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute allow_parallel_tool_calls - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional search string for instructions. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses} - # for more details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses - ) - end - end - end - - # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, - -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> do - OpenAI::Internal::Type::ArrayOf[ - union: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template - ] - end - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template} - # for more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - required :content, - union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - required :role, - enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] - optional :type, - enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} - # for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end + variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 6a038c54..846ca767 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -20,7 +20,7 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,542 +145,17 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions } - - class Completions < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] - required :source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source } - - # @!attribute type - # The type of run data source. Always `completions`. - # - # @return [Symbol, :completions] - required :type, const: :completions - - # @!attribute input_messages - # - # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference, nil] - optional :input_messages, - union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, nil] - optional :sampling_params, - -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams } - - # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions} for more - # details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams] - # - # @param type [Symbol, :completions] The type of run data source. Always `completions`. - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, - -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent } - - variant :file_id, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, - -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses } - - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content] } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute allow_parallel_tool_calls - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional search string for instructions. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses} - # for more details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses - ) - end - end - end - - # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, - -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template] } - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template} - # for more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - required :content, - union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - required :role, - enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] - optional :type, - enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} - # for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Models::Evals::RunListResponse::DataSource::Completions#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end + variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 6286d0fd..60f06879 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -20,7 +20,7 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,548 +145,17 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :completions, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions } - - class Completions < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] - required :source, - union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source } - - # @!attribute type - # The type of run data source. Always `completions`. - # - # @return [Symbol, :completions] - required :type, const: :completions - - # @!attribute input_messages - # - # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference, nil] - optional :input_messages, - union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, nil] - optional :sampling_params, - -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams } - - # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :completions) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions} for more - # details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams] - # - # @param type [Symbol, :completions] The type of run data source. Always `completions`. - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, - -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent } - - variant :file_id, - -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, - -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses } - - class FileContent < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content] } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute allow_parallel_tool_calls - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :allow_parallel_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional search string for instructions. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(allow_parallel_tool_calls: nil, created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses} - # for more details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param allow_parallel_tool_calls [Boolean, nil] Whether to allow parallel tool calls. This is a query parameter used to select r - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional search string for instructions. This is a query parameter used to selec - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses - ) - end - end - end - - # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, - -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> do - OpenAI::Internal::Type::ArrayOf[ - union: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template - ] - end - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template} - # for more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] - required :content, - union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] - required :role, - enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type, nil] - optional :type, - enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem} - # for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText} - # for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end + variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 80821e35..189aabae 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -226,8 +226,10 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @return [Symbol, :auto, Integer, nil] - optional :batch_size, union: -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize } + # @return [Object, Symbol, :auto, Integer, nil] + optional :batch_size, + union: -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize }, + nil?: true # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -251,7 +253,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # - # @param batch_size [Symbol, :auto, Integer] Number of examples in each batch. A larger batch size means that model parameter + # @param batch_size [Object, Symbol, :auto, Integer, nil] Number of examples in each batch. A larger batch size means that model parameter # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a # @@ -264,15 +266,17 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel module BatchSize extend OpenAI::Internal::Type::Union + variant OpenAI::Internal::Type::Unknown + variant const: :auto variant Integer # @!method self.variants - # @return [Array(Symbol, :auto, Integer)] + # @return [Array(Object, Symbol, :auto, Integer)] define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } + T.type_alias { T.nilable(T.any(T.anything, Symbol, Integer)) } end end diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index 801b3432..b6fa6510 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -6,8 +6,8 @@ module Graders class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::LabelModelGrader::Input] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalItem] } # @!attribute labels # The labels to assign to each item in the evaluation. @@ -43,7 +43,7 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # # @param labels [Array] The labels to assign to each item in the evaluation. # @@ -54,125 +54,6 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. # # @param type [Symbol, :label_model] The object type, which is always `label_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] - required :content, union: -> { OpenAI::Graders::LabelModelGrader::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] - required :role, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type, nil] - optional :type, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Graders::LabelModelGrader::Input} for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Graders::LabelModelGrader::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Graders::LabelModelGrader::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Graders::LabelModelGrader::Input::Content::OutputText} for more - # details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Graders::LabelModelGrader::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Graders::LabelModelGrader::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end end diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index 7742ec75..353d7ae9 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -7,8 +7,8 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # The input text. This may include template strings. # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::ScoreModelGrader::Input] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalItem] } # @!attribute model # The model to use for the evaluation. @@ -43,7 +43,7 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. + # @param input [Array] The input text. This may include template strings. # # @param model [String] The model to use for the evaluation. # @@ -54,125 +54,6 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @param sampling_params [Object] The sampling parameters for the model. # # @param type [Symbol, :score_model] The object type, which is always `score_model`. - - class Input < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] - required :content, union: -> { OpenAI::Graders::ScoreModelGrader::Input::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] - required :role, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type, nil] - optional :type, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Graders::ScoreModelGrader::Input} for more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::Graders::ScoreModelGrader::Input#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText} for more - # details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText - ) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::Graders::ScoreModelGrader::Input#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::Graders::ScoreModelGrader::Input#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 48c1a984..a48b84f7 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -67,8 +67,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStore::ExpiresAfter, nil] - optional :expires_after, -> { OpenAI::VectorStore::ExpiresAfter } + # @return [OpenAI::VectorStoreExpirationAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter } # @!attribute expires_at # The Unix timestamp (in seconds) for when the vector store will expire. @@ -99,7 +99,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param usage_bytes [Integer] The total number of bytes used by the files in the vector store. # - # @param expires_after [OpenAI::VectorStore::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. # # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store will expire. # @@ -164,32 +164,6 @@ module Status # @!method self.values # @return [Array] end - - # @see OpenAI::VectorStore#expires_after - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - # @!attribute anchor - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - # - # @return [Symbol, :last_active_at] - required :anchor, const: :last_active_at - - # @!attribute days - # The number of days after the anchor time that the vector store will expire. - # - # @return [Integer] - required :days, Integer - - # @!method initialize(days:, anchor: :last_active_at) - # Some parameter documentations has been truncated, see - # {OpenAI::VectorStore::ExpiresAfter} for more details. - # - # The expiration policy for a vector store. - # - # @param days [Integer] The number of days after the anchor time that the vector store will expire. - # - # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - end end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index a4babcf4..237d6f5f 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -17,8 +17,8 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreCreateParams::ExpiresAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreCreateParams::ExpiresAfter } + # @return [OpenAI::VectorStoreExpirationAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter } # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that @@ -51,7 +51,7 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -60,31 +60,6 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @param name [String] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - # @!attribute anchor - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - # - # @return [Symbol, :last_active_at] - required :anchor, const: :last_active_at - - # @!attribute days - # The number of days after the anchor time that the vector store will expire. - # - # @return [Integer] - required :days, Integer - - # @!method initialize(days:, anchor: :last_active_at) - # Some parameter documentations has been truncated, see - # {OpenAI::VectorStoreCreateParams::ExpiresAfter} for more details. - # - # The expiration policy for a vector store. - # - # @param days [Integer] The number of days after the anchor time that the vector store will expire. - # - # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - end end end end diff --git a/lib/openai/models/vector_store_expiration_after.rb b/lib/openai/models/vector_store_expiration_after.rb new file mode 100644 index 00000000..905f4eaa --- /dev/null +++ b/lib/openai/models/vector_store_expiration_after.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::VectorStoreExpirationAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end + end +end diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 9fab30cf..fef654c0 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -10,8 +10,8 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreUpdateParams::ExpiresAfter }, nil?: true + # @return [OpenAI::VectorStoreExpirationAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -34,38 +34,13 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreUpdateParams} for more details. # - # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreExpirationAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String, nil] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - # @!attribute anchor - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - # - # @return [Symbol, :last_active_at] - required :anchor, const: :last_active_at - - # @!attribute days - # The number of days after the anchor time that the vector store will expire. - # - # @return [Integer] - required :days, Integer - - # @!method initialize(days:, anchor: :last_active_at) - # Some parameter documentations has been truncated, see - # {OpenAI::VectorStoreUpdateParams::ExpiresAfter} for more details. - # - # The expiration policy for a vector store. - # - # @param days [Integer] The number of days after the anchor time that the vector store will expire. - # - # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - end end end end diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index df39fc22..65b01cb8 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -12,12 +12,14 @@ class Transcriptions # # Transcribes audio into the input language. # - # @overload create(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @overload create(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # + # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt @@ -59,12 +61,14 @@ def create(params) # # Transcribes audio into the input language. # - # @overload create_streaming(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @overload create_streaming(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # + # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 37b1d488..1f7c534f 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -145,7 +145,7 @@ def delete(thread_id, params = {}) # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -205,7 +205,7 @@ def create_and_run(params) # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index cbf1b293..a240f654 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -52,7 +52,7 @@ class Runs # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -120,7 +120,7 @@ def create(thread_id, params) # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 5d87bc09..448254eb 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -17,7 +17,7 @@ class Evals # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. # # @param testing_criteria [Array] A list of graders for all eval runs in this group. # diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index 590951f6..9b7fe78b 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -16,7 +16,7 @@ class Runs # # @param eval_id [String] The ID of the evaluation to create a run for. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index c1d3c184..4d0470d5 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -18,7 +18,7 @@ class VectorStores # # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -71,7 +71,7 @@ def retrieve(vector_store_id, params = {}) # # @param vector_store_id [String] The ID of the vector store to modify. # - # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreExpirationAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index f26af6b7..7de90e20 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -59,8 +59,12 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams + EvalItem = OpenAI::Models::EvalItem + EvalListParams = OpenAI::Models::EvalListParams + EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig + EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams Evals = OpenAI::Models::Evals @@ -180,6 +184,8 @@ module OpenAI VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams + VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter + VectorStoreListParams = OpenAI::Models::VectorStoreListParams VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/rbi/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi index 1bffb2a0..e35cfeea 100644 --- a/rbi/openai/models/audio/transcription_create_params.rbi +++ b/rbi/openai/models/audio/transcription_create_params.rbi @@ -26,6 +26,22 @@ module OpenAI sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } attr_accessor :model + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + sig do + returns( + T.nilable( + T.any( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig + ) + ) + ) + end + attr_accessor :chunking_strategy + # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the # model's confidence in the transcription. `logprobs` only works with @@ -116,6 +132,13 @@ module OpenAI params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::AudioModel::OrSymbol), + chunking_strategy: + T.nilable( + T.any( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::OrHash + ) + ), include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, @@ -136,6 +159,11 @@ module OpenAI # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). model:, + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + chunking_strategy: nil, # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the # model's confidence in the transcription. `logprobs` only works with @@ -176,6 +204,13 @@ module OpenAI { file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::AudioModel::OrSymbol), + chunking_strategy: + T.nilable( + T.any( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig + ) + ), include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, @@ -212,6 +247,144 @@ module OpenAI end end + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + module ChunkingStrategy + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig + ) + end + + class VadConfig < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, + OpenAI::Internal::AnyHash + ) + end + + # Must be set to `server_vad` to enable manual chunking using server side VAD. + sig do + returns( + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type::OrSymbol + ) + end + attr_accessor :type + + # Amount of audio to include before the VAD detected speech (in milliseconds). + sig { returns(T.nilable(Integer)) } + attr_reader :prefix_padding_ms + + sig { params(prefix_padding_ms: Integer).void } + attr_writer :prefix_padding_ms + + # Duration of silence to detect speech stop (in milliseconds). With shorter values + # the model will respond more quickly, but may jump in on short pauses from the + # user. + sig { returns(T.nilable(Integer)) } + attr_reader :silence_duration_ms + + sig { params(silence_duration_ms: Integer).void } + attr_writer :silence_duration_ms + + # Sensitivity threshold (0.0 to 1.0) for voice activity detection. A higher + # threshold will require louder audio to activate the model, and thus might + # perform better in noisy environments. + sig { returns(T.nilable(Float)) } + attr_reader :threshold + + sig { params(threshold: Float).void } + attr_writer :threshold + + sig do + params( + type: + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type::OrSymbol, + prefix_padding_ms: Integer, + silence_duration_ms: Integer, + threshold: Float + ).returns(T.attached_class) + end + def self.new( + # Must be set to `server_vad` to enable manual chunking using server side VAD. + type:, + # Amount of audio to include before the VAD detected speech (in milliseconds). + prefix_padding_ms: nil, + # Duration of silence to detect speech stop (in milliseconds). With shorter values + # the model will respond more quickly, but may jump in on short pauses from the + # user. + silence_duration_ms: nil, + # Sensitivity threshold (0.0 to 1.0) for voice activity detection. A higher + # threshold will require louder audio to activate the model, and thus might + # perform better in noisy environments. + threshold: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type::OrSymbol, + prefix_padding_ms: Integer, + silence_duration_ms: Integer, + threshold: Float + } + ) + end + def to_hash + end + + # Must be set to `server_vad` to enable manual chunking using server side VAD. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SERVER_VAD = + T.let( + :server_vad, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::Variants + ] + ) + end + def self.variants + end + end + module TimestampGranularity extend OpenAI::Internal::Type::Enum diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index 7e2348cc..153977c8 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -188,21 +188,13 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig do - returns( - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy - ) - ) - end + sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash - ) + T.nilable(OpenAI::Beta::TruncationObject::OrHash) ).void end attr_writer :truncation_strategy @@ -250,9 +242,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -392,10 +382,7 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy - ), + truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), request_options: OpenAI::RequestOptions } ) @@ -1463,103 +1450,6 @@ module OpenAI end end end - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, - OpenAI::Internal::AnyHash - ) - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - sig do - returns( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol - ) - end - attr_accessor :type - - # The number of most recent messages from the thread when constructing the context - # for the run. - sig { returns(T.nilable(Integer)) } - attr_accessor :last_messages - - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - sig do - params( - type: - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - ).returns(T.attached_class) - end - def self.new( - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - type:, - # The number of most recent messages from the thread when constructing the context - # for the run. - last_messages: nil - ) - end - - sig do - override.returns( - { - type: - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) - end - def to_hash - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - AUTO = - T.let( - :auto, - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol - ) - LAST_MESSAGES = - T.let( - :last_messages, - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end end end diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index aba1382a..e6879317 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -207,17 +207,13 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig do - returns(T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy)) - end + sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash - ) + T.nilable(OpenAI::Beta::TruncationObject::OrHash) ).void end attr_writer :truncation_strategy @@ -295,9 +291,7 @@ module OpenAI ) ], truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage::OrHash), temperature: T.nilable(Float), top_p: T.nilable(Float), @@ -460,8 +454,7 @@ module OpenAI OpenAI::Beta::FunctionTool ) ], - truncation_strategy: - T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy), + truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage), temperature: T.nilable(Float), top_p: T.nilable(Float) @@ -753,103 +746,6 @@ module OpenAI end end - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Beta::Threads::Run::TruncationStrategy, - OpenAI::Internal::AnyHash - ) - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - sig do - returns( - OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol - ) - end - attr_accessor :type - - # The number of most recent messages from the thread when constructing the context - # for the run. - sig { returns(T.nilable(Integer)) } - attr_accessor :last_messages - - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - sig do - params( - type: - OpenAI::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - ).returns(T.attached_class) - end - def self.new( - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - type:, - # The number of most recent messages from the thread when constructing the context - # for the run. - last_messages: nil - ) - end - - sig do - override.returns( - { - type: - OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, - last_messages: T.nilable(Integer) - } - ) - end - def to_hash - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Beta::Threads::Run::TruncationStrategy::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - AUTO = - T.let( - :auto, - OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol - ) - LAST_MESSAGES = - T.let( - :last_messages, - OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - class Usage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index 7e4e98e4..e274167c 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -206,21 +206,13 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig do - returns( - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy - ) - ) - end + sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash - ) + T.nilable(OpenAI::Beta::TruncationObject::OrHash) ).void end attr_writer :truncation_strategy @@ -273,9 +265,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -436,10 +426,7 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy - ), + truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), request_options: OpenAI::RequestOptions } ) @@ -809,103 +796,6 @@ module OpenAI def self.variants end end - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, - OpenAI::Internal::AnyHash - ) - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - sig do - returns( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol - ) - end - attr_accessor :type - - # The number of most recent messages from the thread when constructing the context - # for the run. - sig { returns(T.nilable(Integer)) } - attr_accessor :last_messages - - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - sig do - params( - type: - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - ).returns(T.attached_class) - end - def self.new( - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - type:, - # The number of most recent messages from the thread when constructing the context - # for the run. - last_messages: nil - ) - end - - sig do - override.returns( - { - type: - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) - end - def to_hash - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - AUTO = - T.let( - :auto, - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol - ) - LAST_MESSAGES = - T.let( - :last_messages, - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end end end diff --git a/rbi/openai/models/beta/truncation_object.rbi b/rbi/openai/models/beta/truncation_object.rbi new file mode 100644 index 00000000..c763ead6 --- /dev/null +++ b/rbi/openai/models/beta/truncation_object.rbi @@ -0,0 +1,85 @@ +# typed: strong + +module OpenAI + module Models + module Beta + class TruncationObject < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Beta::TruncationObject, OpenAI::Internal::AnyHash) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig { returns(OpenAI::Beta::TruncationObject::Type::OrSymbol) } + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: OpenAI::Beta::TruncationObject::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: OpenAI::Beta::TruncationObject::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Beta::TruncationObject::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let(:auto, OpenAI::Beta::TruncationObject::Type::TaggedSymbol) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::TruncationObject::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::Beta::TruncationObject::Type::TaggedSymbol] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi index dd303b0d..147292fe 100644 --- a/rbi/openai/models/embedding_create_params.rbi +++ b/rbi/openai/models/embedding_create_params.rbi @@ -14,11 +14,12 @@ module OpenAI # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # all embedding models), cannot be an empty string, and any array must be 2048 # dimensions or less. # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # for counting tokens. In addition to the per-input token limit, all embedding + # models enforce a maximum of 300,000 tokens summed across all inputs in a single + # request. sig do returns( T.any( @@ -94,11 +95,12 @@ module OpenAI # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # all embedding models), cannot be an empty string, and any array must be 2048 # dimensions or less. # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # for counting tokens. In addition to the per-input token limit, all embedding + # models enforce a maximum of 300,000 tokens summed across all inputs in a single + # request. input:, # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -145,11 +147,12 @@ module OpenAI # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # all embedding models), cannot be an empty string, and any array must be 2048 # dimensions or less. # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # for counting tokens. In addition to the per-input token limit, all embedding + # models enforce a maximum of 300,000 tokens summed across all inputs in a single + # request. module Input extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index baafa832..a1ba68fb 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -16,6 +16,7 @@ module OpenAI returns( T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ) ) @@ -59,6 +60,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom::OrHash, + OpenAI::EvalCreateParams::DataSourceConfig::Logs::OrHash, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions::OrHash ), testing_criteria: @@ -100,6 +102,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ), testing_criteria: @@ -129,6 +132,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom, + OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions ) end @@ -195,6 +199,50 @@ module OpenAI end end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Metadata filters for the logs data source. + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :metadata + + sig { params(metadata: T::Hash[Symbol, T.anything]).void } + attr_writer :metadata + + # A data source config which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. + sig do + params(metadata: T::Hash[Symbol, T.anything], type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # Metadata filters for the logs data source. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { type: Symbol, metadata: T::Hash[Symbol, T.anything] } + ) + end + def to_hash + end + end + class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do @@ -204,7 +252,7 @@ module OpenAI ) end - # The type of data source. Always `stored_completions`. + # The type of data source. Always `stored-completions`. sig { returns(Symbol) } attr_accessor :type @@ -215,9 +263,7 @@ module OpenAI sig { params(metadata: T::Hash[Symbol, T.anything]).void } attr_writer :metadata - # A data source config which specifies the metadata property of your stored - # completions query. This is usually metadata like `usecase=chatbot` or - # `prompt-version=v2`, etc. + # Deprecated in favor of LogsDataSourceConfig. sig do params(metadata: T::Hash[Symbol, T.anything], type: Symbol).returns( T.attached_class @@ -226,8 +272,8 @@ module OpenAI def self.new( # Metadata filters for the stored completions data source. metadata: nil, - # The type of data source. Always `stored_completions`. - type: :stored_completions + # The type of data source. Always `stored-completions`. + type: :"stored-completions" ) end @@ -281,7 +327,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalItem ) ] ) @@ -316,7 +362,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage::OrHash, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::OrHash + OpenAI::EvalItem::OrHash ) ], labels: T::Array[String], @@ -350,7 +396,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalItem ) ], labels: T::Array[String], @@ -373,7 +419,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + OpenAI::EvalItem ) end @@ -410,238 +456,6 @@ module OpenAI end end - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash - ), - role: - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, - type: - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - ), - role: - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, - type: - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index fd3d328d..a3433afc 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -21,6 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -73,6 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalLogsDataSourceConfig::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -121,6 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -150,6 +153,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/rbi/openai/models/eval_item.rbi b/rbi/openai/models/eval_item.rbi new file mode 100644 index 00000000..841d9960 --- /dev/null +++ b/rbi/openai/models/eval_item.rbi @@ -0,0 +1,161 @@ +# typed: strong + +module OpenAI + module Models + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias { T.any(OpenAI::EvalItem, OpenAI::Internal::AnyHash) } + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig { returns(OpenAI::EvalItem::Role::OrSymbol) } + attr_accessor :role + + # The type of the message input. Always `message`. + sig { returns(T.nilable(OpenAI::EvalItem::Type::OrSymbol)) } + attr_reader :type + + sig { params(type: OpenAI::EvalItem::Type::OrSymbol).void } + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::EvalItem::Content::OutputText::OrHash + ), + role: OpenAI::EvalItem::Role::OrSymbol, + type: OpenAI::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalItem::Content::OutputText + ), + role: OpenAI::EvalItem::Role::OrSymbol, + type: OpenAI::EvalItem::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig { override.returns(T::Array[OpenAI::EvalItem::Content::Variants]) } + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EvalItem::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = T.let(:user, OpenAI::EvalItem::Role::TaggedSymbol) + ASSISTANT = T.let(:assistant, OpenAI::EvalItem::Role::TaggedSymbol) + SYSTEM = T.let(:system, OpenAI::EvalItem::Role::TaggedSymbol) + DEVELOPER = T.let(:developer, OpenAI::EvalItem::Role::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::EvalItem::Role::TaggedSymbol]) } + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EvalItem::Type) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = T.let(:message, OpenAI::EvalItem::Type::TaggedSymbol) + + sig { override.returns(T::Array[OpenAI::EvalItem::Type::TaggedSymbol]) } + def self.values + end + end + end + end +end diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index 32fb803c..c8e63e69 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -21,6 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -73,6 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalLogsDataSourceConfig::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -121,6 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -150,6 +153,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/rbi/openai/models/eval_logs_data_source_config.rbi b/rbi/openai/models/eval_logs_data_source_config.rbi new file mode 100644 index 00000000..bbaac918 --- /dev/null +++ b/rbi/openai/models/eval_logs_data_source_config.rbi @@ -0,0 +1,70 @@ +# typed: strong + +module OpenAI + module Models + class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::EvalLogsDataSourceConfig, OpenAI::Internal::AnyHash) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + end +end diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index 65635306..baacb528 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -21,6 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -73,6 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalLogsDataSourceConfig::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -121,6 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -150,6 +153,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/rbi/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/openai/models/eval_stored_completions_data_source_config.rbi index 0658e8f4..ac338ea9 100644 --- a/rbi/openai/models/eval_stored_completions_data_source_config.rbi +++ b/rbi/openai/models/eval_stored_completions_data_source_config.rbi @@ -16,7 +16,7 @@ module OpenAI sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :schema - # The type of data source. Always `stored_completions`. + # The type of data source. Always `stored-completions`. sig { returns(Symbol) } attr_accessor :type @@ -29,11 +29,7 @@ module OpenAI sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata - # A StoredCompletionsDataSourceConfig which specifies the metadata property of - # your stored completions query. This is usually metadata like `usecase=chatbot` - # or `prompt-version=v2`, etc. The schema returned by this data source config is - # used to defined what variables are available in your evals. `item` and `sample` - # are both defined when using this data source config. + # Deprecated in favor of LogsDataSourceConfig. sig do params( schema: T::Hash[Symbol, T.anything], @@ -52,8 +48,8 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, - # The type of data source. Always `stored_completions`. - type: :stored_completions + # The type of data source. Always `stored-completions`. + type: :"stored-completions" ) end diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index c3cefaf3..f82939b7 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -21,6 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -73,6 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, + OpenAI::EvalLogsDataSourceConfig::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -121,6 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -150,6 +153,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, + OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 7dc975b1..5ab2adf7 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -16,8 +16,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) ) @@ -84,8 +84,8 @@ module OpenAI params( source: T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID::OrHash, + OpenAI::Evals::EvalJSONLFileContentSource::OrHash, + OpenAI::Evals::EvalJSONLFileIDSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions::OrHash ), type: @@ -117,8 +117,8 @@ module OpenAI { source: T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ), type: @@ -144,137 +144,12 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) end - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do @@ -427,10 +302,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - ) + T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) ] ) end @@ -446,7 +318,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::OrHash + OpenAI::EvalItem::OrHash ) ], type: Symbol @@ -468,7 +340,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::EvalItem ) ], type: Symbol @@ -488,246 +360,9 @@ module OpenAI Variants = T.type_alias do - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - ) - end - - class Message < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash - ), - role: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, - type: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) + T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) end - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - ), - role: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, - type: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi index f27424f1..af53b5ed 100644 --- a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -15,8 +15,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource ) ) end @@ -32,8 +32,8 @@ module OpenAI params( source: T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::OrHash, - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID::OrHash + OpenAI::Evals::EvalJSONLFileContentSource::OrHash, + OpenAI::Evals::EvalJSONLFileIDSource::OrHash ), type: Symbol ).returns(T.attached_class) @@ -50,8 +50,8 @@ module OpenAI { source: T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource ), type: Symbol } @@ -66,136 +66,11 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource ) end - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi b/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi new file mode 100644 index 00000000..1567b220 --- /dev/null +++ b/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi @@ -0,0 +1,591 @@ +# typed: strong + +module OpenAI + module Models + module Evals + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource, + OpenAI::Internal::AnyHash + ) + end + + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig do + returns( + OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Evals::EvalJSONLFileContentSource::OrHash, + OpenAI::Evals::EvalJSONLFileIDSource::OrHash, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses::OrHash + ), + type: + OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).returns(T.attached_class) + end + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + # The type of run data source. Always `responses`. + type:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses + ), + type: + OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams + } + ) + end + def to_hash + end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. + has_tool_calls: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Variants + ] + ) + end + def self.variants + end + end + + # The type of run data source. Always `responses`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + RESPONSES = + T.let( + :responses, + OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig { override.returns({ item_reference: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi b/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi new file mode 100644 index 00000000..741fced0 --- /dev/null +++ b/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi @@ -0,0 +1,97 @@ +# typed: strong + +module OpenAI + module Models + EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource + + module Evals + class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::EvalJSONLFileContentSource, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns(T::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content]) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::EvalJSONLFileContentSource::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::EvalJSONLFileContentSource::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi b/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi new file mode 100644 index 00000000..c9da7ca8 --- /dev/null +++ b/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource + + module Evals + class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 2c1f6358..a305b087 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) ) end @@ -111,7 +111,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::OrHash + OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -179,7 +179,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -214,926 +214,10 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end - class Completions < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `completions`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses::OrHash - ), - input_messages: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams::OrHash, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil, - # The type of run data source. Always `completions`. - type: :completions - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses - ), - type: Symbol, - input_messages: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses - ) - end - - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(id: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :allow_parallel_tool_calls - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional search string for instructions. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig do - returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) - end - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - allow_parallel_tool_calls: nil, - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional search string for instructions. This is a query parameter used to - # select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Variants - ] - ) - end - def self.variants - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash - ), - role: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig do - override.returns({ item_reference: String, type: Symbol }) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index 7b1ebb3f..878fc731 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -18,7 +18,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + OpenAI::Evals::CreateEvalResponsesRunDataSource ) ) end @@ -46,7 +46,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -76,7 +76,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + OpenAI::Evals::CreateEvalResponsesRunDataSource ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -96,960 +96,10 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `completions`. - sig do - returns( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol - ) - end - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses::OrHash - ), - type: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - # The type of run data source. Always `completions`. - type:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - ), - type: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(id: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :allow_parallel_tool_calls - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional search string for instructions. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - allow_parallel_tool_calls: nil, - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional search string for instructions. This is a query parameter used to - # select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Variants - ] - ) - end - def self.variants - end - end - - # The type of run data source. Always `completions`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - COMPLETIONS = - T.let( - :completions, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash - ), - role: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig do - override.returns({ item_reference: String, type: Symbol }) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - sig do override.returns( T::Array[OpenAI::Evals::RunCreateParams::DataSource::Variants] diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index d4f02bc5..ffa092b4 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) ) end @@ -111,7 +111,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::OrHash + OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -179,7 +179,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -214,926 +214,10 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end - class Completions < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `completions`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses::OrHash - ), - input_messages: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams::OrHash, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil, - # The type of run data source. Always `completions`. - type: :completions - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses - ), - type: Symbol, - input_messages: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses - ) - end - - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(id: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :allow_parallel_tool_calls - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional search string for instructions. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig do - returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) - end - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - allow_parallel_tool_calls: nil, - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional search string for instructions. This is a query parameter used to - # select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Variants - ] - ) - end - def self.variants - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash - ), - role: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig do - override.returns({ item_reference: String, type: Symbol }) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index 1306554c..f398957e 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) ) end @@ -111,7 +111,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::OrHash + OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -179,7 +179,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -212,926 +212,10 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end - class Completions < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `completions`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses::OrHash - ), - input_messages: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams::OrHash, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil, - # The type of run data source. Always `completions`. - type: :completions - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses - ), - type: Symbol, - input_messages: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses - ) - end - - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(id: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :allow_parallel_tool_calls - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional search string for instructions. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig do - returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) - end - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - allow_parallel_tool_calls: nil, - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional search string for instructions. This is a query parameter used to - # select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Variants - ] - ) - end - def self.variants - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash - ), - role: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig do - override.returns({ item_reference: String, type: Symbol }) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index eab94463..81fb22ff 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) ) end @@ -113,7 +113,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::OrHash + OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -181,7 +181,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -216,926 +216,10 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + OpenAI::Evals::CreateEvalResponsesRunDataSource ) end - class Completions < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `completions`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses::OrHash - ), - input_messages: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams::OrHash, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil, - # The type of run data source. Always `completions`. - type: :completions - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses - ), - type: Symbol, - input_messages: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses - ) - end - - class FileContent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content - ] - ) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content - ], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - - class FileID < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(id: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :allow_parallel_tool_calls - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional search string for instructions. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig do - returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) - end - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Whether to allow parallel tool calls. This is a query parameter used to select - # responses. - allow_parallel_tool_calls: nil, - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional search string for instructions. This is a query parameter used to - # select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - allow_parallel_tool_calls: T.nilable(T::Boolean), - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), - temperature: T.nilable(Float), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Variants - ] - ) - end - def self.variants - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash - ), - role: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::OrSymbol, - type: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ), - role: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, - type: - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig do - override.returns({ item_reference: String, type: Symbol }) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - sig do override.returns( T::Array[ diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index 21a26e15..e43f6cd0 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -318,11 +318,8 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } - attr_reader :batch_size - - sig { params(batch_size: T.any(Symbol, Integer)).void } - attr_writer :batch_size + sig { returns(T.nilable(T.any(T.anything, Symbol, Integer))) } + attr_accessor :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. @@ -344,7 +341,7 @@ module OpenAI # returned when running `supervised` jobs. sig do params( - batch_size: T.any(Symbol, Integer), + batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) ).returns(T.attached_class) @@ -365,7 +362,7 @@ module OpenAI sig do override.returns( { - batch_size: T.any(Symbol, Integer), + batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) } @@ -379,7 +376,8 @@ module OpenAI module BatchSize extend OpenAI::Internal::Type::Union - Variants = T.type_alias { T.any(Symbol, Integer) } + Variants = + T.type_alias { T.nilable(T.any(T.anything, Symbol, Integer)) } sig do override.returns( diff --git a/rbi/openai/models/graders/label_model_grader.rbi b/rbi/openai/models/graders/label_model_grader.rbi index 9d062b87..42632db8 100644 --- a/rbi/openai/models/graders/label_model_grader.rbi +++ b/rbi/openai/models/graders/label_model_grader.rbi @@ -11,7 +11,7 @@ module OpenAI T.any(OpenAI::Graders::LabelModelGrader, OpenAI::Internal::AnyHash) end - sig { returns(T::Array[OpenAI::Graders::LabelModelGrader::Input]) } + sig { returns(T::Array[OpenAI::EvalItem]) } attr_accessor :input # The labels to assign to each item in the evaluation. @@ -38,7 +38,7 @@ module OpenAI # the evaluation. sig do params( - input: T::Array[OpenAI::Graders::LabelModelGrader::Input::OrHash], + input: T::Array[OpenAI::EvalItem::OrHash], labels: T::Array[String], model: String, name: String, @@ -64,7 +64,7 @@ module OpenAI sig do override.returns( { - input: T::Array[OpenAI::Graders::LabelModelGrader::Input], + input: T::Array[OpenAI::EvalItem], labels: T::Array[String], model: String, name: String, @@ -75,225 +75,6 @@ module OpenAI end def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader::Input, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns(OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash - ), - role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, - type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText - ), - role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, - type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Graders::LabelModelGrader::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Role) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end end end diff --git a/rbi/openai/models/graders/score_model_grader.rbi b/rbi/openai/models/graders/score_model_grader.rbi index ab602eda..d17b745c 100644 --- a/rbi/openai/models/graders/score_model_grader.rbi +++ b/rbi/openai/models/graders/score_model_grader.rbi @@ -12,7 +12,7 @@ module OpenAI end # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::Graders::ScoreModelGrader::Input]) } + sig { returns(T::Array[OpenAI::EvalItem]) } attr_accessor :input # The model to use for the evaluation. @@ -44,7 +44,7 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[OpenAI::Graders::ScoreModelGrader::Input::OrHash], + input: T::Array[OpenAI::EvalItem::OrHash], model: String, name: String, range: T::Array[Float], @@ -71,7 +71,7 @@ module OpenAI sig do override.returns( { - input: T::Array[OpenAI::Graders::ScoreModelGrader::Input], + input: T::Array[OpenAI::EvalItem], model: String, name: String, type: Symbol, @@ -82,225 +82,6 @@ module OpenAI end def to_hash end - - class Input < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Graders::ScoreModelGrader::Input, - OpenAI::Internal::AnyHash - ) - end - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig do - returns(OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol) - end - attr_accessor :role - - # The type of the message input. Always `message`. - sig do - returns( - T.nilable( - OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol - ) - ) - end - attr_reader :type - - sig do - params( - type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol - ).void - end - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash - ), - role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, - type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText - ), - role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, - type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig do - params(text: String, type: Symbol).returns(T.attached_class) - end - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Graders::ScoreModelGrader::Input::Content::Variants - ] - ) - end - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = - T.let( - :user, - OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol - ) - ASSISTANT = - T.let( - :assistant, - OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol - ) - SYSTEM = - T.let( - :system, - OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol - ) - DEVELOPER = - T.let( - :developer, - OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol - ] - ) - end - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = - T.let( - :message, - OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - end end end end diff --git a/rbi/openai/models/vector_store.rbi b/rbi/openai/models/vector_store.rbi index 1e5ec9ca..c3c6dc2d 100644 --- a/rbi/openai/models/vector_store.rbi +++ b/rbi/openai/models/vector_store.rbi @@ -52,11 +52,11 @@ module OpenAI attr_accessor :usage_bytes # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStore::ExpiresAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } attr_reader :expires_after sig do - params(expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash).void + params(expires_after: OpenAI::VectorStoreExpirationAfter::OrHash).void end attr_writer :expires_after @@ -76,7 +76,7 @@ module OpenAI name: String, status: OpenAI::VectorStore::Status::OrSymbol, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash, + expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, expires_at: T.nilable(Integer), object: Symbol ).returns(T.attached_class) @@ -125,7 +125,7 @@ module OpenAI object: Symbol, status: OpenAI::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter, + expires_after: OpenAI::VectorStoreExpirationAfter, expires_at: T.nilable(Integer) } ) @@ -218,37 +218,6 @@ module OpenAI def self.values end end - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash) - end - - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - sig { returns(Symbol) } - attr_accessor :anchor - - # The number of days after the anchor time that the vector store will expire. - sig { returns(Integer) } - attr_accessor :days - - # The expiration policy for a vector store. - sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new( - # The number of days after the anchor time that the vector store will expire. - days:, - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - anchor: :last_active_at - ) - end - - sig { override.returns({ anchor: Symbol, days: Integer }) } - def to_hash - end - end end end end diff --git a/rbi/openai/models/vector_store_create_params.rbi b/rbi/openai/models/vector_store_create_params.rbi index 674fc93d..fb7b5912 100644 --- a/rbi/openai/models/vector_store_create_params.rbi +++ b/rbi/openai/models/vector_store_create_params.rbi @@ -37,13 +37,11 @@ module OpenAI attr_writer :chunking_strategy # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreCreateParams::ExpiresAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } attr_reader :expires_after sig do - params( - expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash - ).void + params(expires_after: OpenAI::VectorStoreExpirationAfter::OrHash).void end attr_writer :expires_after @@ -79,7 +77,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam::OrHash, OpenAI::StaticFileChunkingStrategyObjectParam::OrHash ), - expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, + expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -117,7 +115,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + expires_after: OpenAI::VectorStoreExpirationAfter, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -127,40 +125,6 @@ module OpenAI end def to_hash end - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::VectorStoreCreateParams::ExpiresAfter, - OpenAI::Internal::AnyHash - ) - end - - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - sig { returns(Symbol) } - attr_accessor :anchor - - # The number of days after the anchor time that the vector store will expire. - sig { returns(Integer) } - attr_accessor :days - - # The expiration policy for a vector store. - sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new( - # The number of days after the anchor time that the vector store will expire. - days:, - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - anchor: :last_active_at - ) - end - - sig { override.returns({ anchor: Symbol, days: Integer }) } - def to_hash - end - end end end end diff --git a/rbi/openai/models/vector_store_expiration_after.rbi b/rbi/openai/models/vector_store_expiration_after.rbi new file mode 100644 index 00000000..7b06060c --- /dev/null +++ b/rbi/openai/models/vector_store_expiration_after.rbi @@ -0,0 +1,36 @@ +# typed: strong + +module OpenAI + module Models + class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::VectorStoreExpirationAfter, OpenAI::Internal::AnyHash) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/vector_store_update_params.rbi b/rbi/openai/models/vector_store_update_params.rbi index 1d755b92..8e2409f5 100644 --- a/rbi/openai/models/vector_store_update_params.rbi +++ b/rbi/openai/models/vector_store_update_params.rbi @@ -12,13 +12,12 @@ module OpenAI end # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } attr_reader :expires_after sig do params( - expires_after: - T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash) + expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash) ).void end attr_writer :expires_after @@ -38,8 +37,7 @@ module OpenAI sig do params( - expires_after: - T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), + expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions::OrHash @@ -64,8 +62,7 @@ module OpenAI sig do override.returns( { - expires_after: - T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter), + expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions @@ -74,40 +71,6 @@ module OpenAI end def to_hash end - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::VectorStoreUpdateParams::ExpiresAfter, - OpenAI::Internal::AnyHash - ) - end - - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - sig { returns(Symbol) } - attr_accessor :anchor - - # The number of days after the anchor time that the vector store will expire. - sig { returns(Integer) } - attr_accessor :days - - # The expiration policy for a vector store. - sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new( - # The number of days after the anchor time that the vector store will expire. - days:, - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - anchor: :last_active_at - ) - end - - sig { override.returns({ anchor: Symbol, days: Integer }) } - def to_hash - end - end end end end diff --git a/rbi/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi index 80543b1d..02148f4b 100644 --- a/rbi/openai/resources/audio/transcriptions.rbi +++ b/rbi/openai/resources/audio/transcriptions.rbi @@ -12,6 +12,13 @@ module OpenAI params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::AudioModel::OrSymbol), + chunking_strategy: + T.nilable( + T.any( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::OrHash + ) + ), include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, @@ -38,6 +45,11 @@ module OpenAI # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). model:, + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + chunking_strategy: nil, # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the # model's confidence in the transcription. `logprobs` only works with @@ -84,6 +96,13 @@ module OpenAI params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), model: T.any(String, OpenAI::AudioModel::OrSymbol), + chunking_strategy: + T.nilable( + T.any( + Symbol, + OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::OrHash + ) + ), include: T::Array[OpenAI::Audio::TranscriptionInclude::OrSymbol], language: String, prompt: String, @@ -112,6 +131,11 @@ module OpenAI # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). model:, + # Controls how the audio is cut into chunks. When set to `"auto"`, the server + # first normalizes loudness and then uses voice activity detection (VAD) to choose + # boundaries. `server_vad` object can be provided to tweak VAD detection + # parameters manually. If unset, the audio is transcribed as a single block. + chunking_strategy: nil, # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the # model's confidence in the transcription. `logprobs` only works with diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index 9279a612..d3b01a5d 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -148,9 +148,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Beta::Threads::Run) @@ -298,9 +296,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index a1d803bf..755fcfbc 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -61,9 +61,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Beta::Threads::Run) @@ -237,9 +235,7 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash - ), + T.nilable(OpenAI::Beta::TruncationObject::OrHash), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( diff --git a/rbi/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi index aec35a9c..061e114c 100644 --- a/rbi/openai/resources/embeddings.rbi +++ b/rbi/openai/resources/embeddings.rbi @@ -25,11 +25,12 @@ module OpenAI # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. # The input must not exceed the max input tokens for the model (8192 tokens for - # `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 + # all embedding models), cannot be an empty string, and any array must be 2048 # dimensions or less. # [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - # for counting tokens. Some models may also impose a limit on total number of - # tokens summed across inputs. + # for counting tokens. In addition to the per-input token limit, all embedding + # models enforce a maximum of 300,000 tokens summed across all inputs in a single + # request. input:, # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to diff --git a/rbi/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi index bcc6ae0a..c7b5ca65 100644 --- a/rbi/openai/resources/evals.rbi +++ b/rbi/openai/resources/evals.rbi @@ -16,6 +16,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCreateParams::DataSourceConfig::Custom::OrHash, + OpenAI::EvalCreateParams::DataSourceConfig::Logs::OrHash, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions::OrHash ), testing_criteria: diff --git a/rbi/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi index 183f65ed..86e351a0 100644 --- a/rbi/openai/resources/evals/runs.rbi +++ b/rbi/openai/resources/evals/runs.rbi @@ -15,7 +15,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, diff --git a/rbi/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi index 7be8933d..802f6110 100644 --- a/rbi/openai/resources/vector_stores.rbi +++ b/rbi/openai/resources/vector_stores.rbi @@ -17,7 +17,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam::OrHash, OpenAI::StaticFileChunkingStrategyObjectParam::OrHash ), - expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, + expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -65,8 +65,7 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: - T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), + expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions::OrHash diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index f7ef937f..e151276a 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -57,8 +57,12 @@ module OpenAI class EvalDeleteParams = OpenAI::Models::EvalDeleteParams + class EvalItem = OpenAI::Models::EvalItem + class EvalListParams = OpenAI::Models::EvalListParams + class EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig + class EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams module Evals = OpenAI::Models::Evals @@ -173,6 +177,8 @@ module OpenAI class VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams + class VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter + class VectorStoreListParams = OpenAI::Models::VectorStoreListParams class VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index fda04c86..18d8e5aa 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -5,6 +5,7 @@ module OpenAI { file: (Pathname | StringIO | IO | OpenAI::FilePart), model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, include: ::Array[OpenAI::Models::Audio::transcription_include], language: String, prompt: String, @@ -22,6 +23,8 @@ module OpenAI attr_accessor model: OpenAI::Models::Audio::TranscriptionCreateParams::model + attr_accessor chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy? + attr_reader include: ::Array[OpenAI::Models::Audio::transcription_include]? def include=: ( @@ -55,6 +58,7 @@ module OpenAI def initialize: ( file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, ?prompt: String, @@ -72,6 +76,57 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::model] end + type chunking_strategy = + :auto + | OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig + + module ChunkingStrategy + extend OpenAI::Internal::Type::Union + + type vad_config = + { + type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + prefix_padding_ms: Integer, + silence_duration_ms: Integer, + threshold: Float + } + + class VadConfig < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_ + + attr_reader prefix_padding_ms: Integer? + + def prefix_padding_ms=: (Integer) -> Integer + + attr_reader silence_duration_ms: Integer? + + def silence_duration_ms=: (Integer) -> Integer + + attr_reader threshold: Float? + + def threshold=: (Float) -> Float + + def initialize: ( + type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + ?prefix_padding_ms: Integer, + ?silence_duration_ms: Integer, + ?threshold: Float + ) -> void + + type type_ = :server_vad + + module Type + extend OpenAI::Internal::Type::Enum + + SERVER_VAD: :server_vad + + def self?.values: -> ::Array[OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy] + end + type timestamp_granularity = :word | :segment module TimestampGranularity diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index ca752e94..406904b2 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -17,7 +17,7 @@ module OpenAI tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? + truncation_strategy: OpenAI::Beta::TruncationObject? } & OpenAI::Internal::Type::request_parameters @@ -59,7 +59,7 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? + attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? def initialize: ( assistant_id: String, @@ -76,7 +76,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::TruncationObject?, ?request_options: OpenAI::request_opts ) -> void @@ -383,34 +383,6 @@ module OpenAI def initialize: (?vector_store_ids: ::Array[String]) -> void end end - - type truncation_strategy = - { - type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, - last_messages: Integer? - } - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ - - attr_accessor last_messages: Integer? - - def initialize: ( - type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, - ?last_messages: Integer? - ) -> void - - type type_ = :auto | :last_messages - - module Type - extend OpenAI::Internal::Type::Enum - - AUTO: :auto - LAST_MESSAGES: :last_messages - - def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] - end - end end end end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index 0d4ed055..50a8ae87 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -27,7 +27,7 @@ module OpenAI thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, + truncation_strategy: OpenAI::Beta::TruncationObject?, usage: OpenAI::Beta::Threads::Run::Usage?, temperature: Float?, top_p: Float? @@ -80,7 +80,7 @@ module OpenAI attr_accessor tools: ::Array[OpenAI::Models::Beta::assistant_tool] - attr_accessor truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy? + attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? attr_accessor usage: OpenAI::Beta::Threads::Run::Usage? @@ -111,7 +111,7 @@ module OpenAI thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, + truncation_strategy: OpenAI::Beta::TruncationObject?, usage: OpenAI::Beta::Threads::Run::Usage?, ?temperature: Float?, ?top_p: Float?, @@ -203,34 +203,6 @@ module OpenAI end end - type truncation_strategy = - { - type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, - last_messages: Integer? - } - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_ - - attr_accessor last_messages: Integer? - - def initialize: ( - type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, - ?last_messages: Integer? - ) -> void - - type type_ = :auto | :last_messages - - module Type - extend OpenAI::Internal::Type::Enum - - AUTO: :auto - LAST_MESSAGES: :last_messages - - def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::TruncationStrategy::type_] - end - end - type usage = { completion_tokens: Integer, diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index ab0e1ca8..c4f2cac4 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -20,7 +20,7 @@ module OpenAI tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? + truncation_strategy: OpenAI::Beta::TruncationObject? } & OpenAI::Internal::Type::request_parameters @@ -66,7 +66,7 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? + attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? def initialize: ( assistant_id: String, @@ -85,7 +85,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::TruncationObject?, ?request_options: OpenAI::request_opts ) -> void @@ -185,34 +185,6 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::model] end - - type truncation_strategy = - { - type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, - last_messages: Integer? - } - - class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_ - - attr_accessor last_messages: Integer? - - def initialize: ( - type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, - ?last_messages: Integer? - ) -> void - - type type_ = :auto | :last_messages - - module Type - extend OpenAI::Internal::Type::Enum - - AUTO: :auto - LAST_MESSAGES: :last_messages - - def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_] - end - end end end end diff --git a/sig/openai/models/beta/truncation_object.rbs b/sig/openai/models/beta/truncation_object.rbs new file mode 100644 index 00000000..4e4f1ae3 --- /dev/null +++ b/sig/openai/models/beta/truncation_object.rbs @@ -0,0 +1,30 @@ +module OpenAI + module Models + module Beta + type truncation_object = + { type: OpenAI::Beta::TruncationObject::type_, last_messages: Integer? } + + class TruncationObject < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Beta::TruncationObject::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Beta::TruncationObject::type_, + ?last_messages: Integer? + ) -> void + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Beta::TruncationObject::type_] + end + end + end + end +end diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 32949757..65ab4944 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -33,6 +33,7 @@ module OpenAI type data_source_config = OpenAI::EvalCreateParams::DataSourceConfig::Custom + | OpenAI::EvalCreateParams::DataSourceConfig::Logs | OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions module DataSourceConfig @@ -61,11 +62,23 @@ module OpenAI ) -> void end + type logs = { type: :logs, metadata: ::Hash[Symbol, top] } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor type: :logs + + attr_reader metadata: ::Hash[Symbol, top]? + + def metadata=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: (?metadata: ::Hash[Symbol, top], ?type: :logs) -> void + end + type stored_completions = - { type: :stored_completions, metadata: ::Hash[Symbol, top] } + { type: :"stored-completions", metadata: ::Hash[Symbol, top] } class StoredCompletions < OpenAI::Internal::Type::BaseModel - attr_accessor type: :stored_completions + attr_accessor type: :"stored-completions" attr_reader metadata: ::Hash[Symbol, top]? @@ -73,7 +86,7 @@ module OpenAI def initialize: ( ?metadata: ::Hash[Symbol, top], - ?type: :stored_completions + ?type: :"stored-completions" ) -> void end @@ -124,7 +137,7 @@ module OpenAI type input = OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage - | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem + | OpenAI::EvalItem module Input extend OpenAI::Internal::Type::Union @@ -139,75 +152,6 @@ module OpenAI def initialize: (content: String, role: String) -> void end - type eval_item = - { - content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, - role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, - type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content - - attr_accessor role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role - - attr_reader type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? - - def type=: ( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ - ) -> OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ - - def initialize: ( - content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, - role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, - ?type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] - end - end - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] end end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 3f610608..34408cec 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -38,6 +38,7 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalLogsDataSourceConfig | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig diff --git a/sig/openai/models/eval_item.rbs b/sig/openai/models/eval_item.rbs new file mode 100644 index 00000000..fe989ca9 --- /dev/null +++ b/sig/openai/models/eval_item.rbs @@ -0,0 +1,70 @@ +module OpenAI + module Models + type eval_item = + { + content: OpenAI::EvalItem::content, + role: OpenAI::EvalItem::role, + type: OpenAI::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::EvalItem::content + + attr_accessor role: OpenAI::EvalItem::role + + attr_reader type: OpenAI::EvalItem::type_? + + def type=: (OpenAI::EvalItem::type_) -> OpenAI::EvalItem::type_ + + def initialize: ( + content: OpenAI::EvalItem::content, + role: OpenAI::EvalItem::role, + ?type: OpenAI::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::EvalItem::type_] + end + end + end +end diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index b53da12b..d9f4939c 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -38,6 +38,7 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalLogsDataSourceConfig | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig diff --git a/sig/openai/models/eval_logs_data_source_config.rbs b/sig/openai/models/eval_logs_data_source_config.rbs new file mode 100644 index 00000000..fb0e37aa --- /dev/null +++ b/sig/openai/models/eval_logs_data_source_config.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Models + type eval_logs_data_source_config = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + end + end +end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index 5bbb313f..e04883e7 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -38,6 +38,7 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalLogsDataSourceConfig | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig diff --git a/sig/openai/models/eval_stored_completions_data_source_config.rbs b/sig/openai/models/eval_stored_completions_data_source_config.rbs index 345b12d6..f77af6cc 100644 --- a/sig/openai/models/eval_stored_completions_data_source_config.rbs +++ b/sig/openai/models/eval_stored_completions_data_source_config.rbs @@ -3,21 +3,21 @@ module OpenAI type eval_stored_completions_data_source_config = { schema: ::Hash[Symbol, top], - type: :stored_completions, + type: :"stored-completions", metadata: OpenAI::Models::metadata? } class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel attr_accessor schema: ::Hash[Symbol, top] - attr_accessor type: :stored_completions + attr_accessor type: :"stored-completions" attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( schema: ::Hash[Symbol, top], ?metadata: OpenAI::Models::metadata?, - ?type: :stored_completions + ?type: :"stored-completions" ) -> void end end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index e1e0e429..cf2da5e0 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -38,6 +38,7 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig + | OpenAI::EvalLogsDataSourceConfig | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index bec2dc8b..99116842 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -40,56 +40,13 @@ module OpenAI ) -> void type source = - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent - | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID + OpenAI::Evals::EvalJSONLFileContentSource + | OpenAI::Evals::EvalJSONLFileIDSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions module Source extend OpenAI::Internal::Type::Union - type file_content = - { - content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - type stored_completions = { type: :stored_completions, @@ -160,81 +117,11 @@ module OpenAI ) -> void type template = - OpenAI::Responses::EasyInputMessage - | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Responses::EasyInputMessage | OpenAI::EvalItem module Template extend OpenAI::Internal::Type::Union - type message = - { - content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - } - - class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content - - attr_accessor role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role - - attr_reader type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? - - def type=: ( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - - def initialize: ( - content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - ?type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] - end - end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] end end diff --git a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs index c376b674..c7352e33 100644 --- a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs @@ -18,55 +18,12 @@ module OpenAI ) -> void type source = - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent - | OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + OpenAI::Evals::EvalJSONLFileContentSource + | OpenAI::Evals::EvalJSONLFileIDSource module Source extend OpenAI::Internal::Type::Union - type file_content = - { - content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::source] end end diff --git a/sig/openai/models/evals/create_eval_responses_run_data_source.rbs b/sig/openai/models/evals/create_eval_responses_run_data_source.rbs new file mode 100644 index 00000000..2f24f2f9 --- /dev/null +++ b/sig/openai/models/evals/create_eval_responses_run_data_source.rbs @@ -0,0 +1,216 @@ +module OpenAI + module Models + module Evals + type create_eval_responses_run_data_source = + { + source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams + } + + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source + + attr_accessor type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_ + + attr_reader input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages? + + def input_messages=: ( + OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages + ) -> OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams? + + def sampling_params=: ( + OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams + ) -> OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams + + def initialize: ( + source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_, + ?input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages, + ?model: String, + ?sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams + ) -> void + + type source = + OpenAI::Evals::EvalJSONLFileContentSource + | OpenAI::Evals::EvalJSONLFileIDSource + | OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::source] + end + + type type_ = :responses + + module Type + extend OpenAI::Internal::Type::Enum + + RESPONSES: :responses + + def self?.values: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::type_] + end + + type input_messages = + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template + | OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + ?type: :template + ) -> void + + type template = + OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + | OpenAI::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + end + end + end + end +end diff --git a/sig/openai/models/evals/eval_jsonl_file_content_source.rbs b/sig/openai/models/evals/eval_jsonl_file_content_source.rbs new file mode 100644 index 00000000..09d077ee --- /dev/null +++ b/sig/openai/models/evals/eval_jsonl_file_content_source.rbs @@ -0,0 +1,40 @@ +module OpenAI + module Models + class EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource + + module Evals + type eval_jsonl_file_content_source = + { + content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], + type: :file_content + } + + class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + end + end +end diff --git a/sig/openai/models/evals/eval_jsonl_file_id_source.rbs b/sig/openai/models/evals/eval_jsonl_file_id_source.rbs new file mode 100644 index 00000000..611068c4 --- /dev/null +++ b/sig/openai/models/evals/eval_jsonl_file_id_source.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + class EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource + + module Evals + type eval_jsonl_file_id_source = { id: String, type: :file_id } + + class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + end + end +end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index 359ecd76..6aa68c38 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -68,327 +68,11 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions + | OpenAI::Evals::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union - type completions = - { - source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source, - type: :completions, - input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages, - model: String, - sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams - } - - class Completions < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source - - attr_accessor type: :completions - - attr_reader input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages? - - def input_messages=: ( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages - ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams? - - def sampling_params=: ( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams - ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams - - def initialize: ( - source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source, - ?input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages, - ?model: String, - ?sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::SamplingParams, - ?type: :completions - ) -> void - - type source = - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent - | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileID - | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type file_content = - { - content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - - type responses = - { - type: :responses, - allow_parallel_tool_calls: bool?, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor allow_parallel_tool_calls: bool? - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?allow_parallel_tool_calls: bool?, - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::source] - end - - type input_messages = - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template - | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage - | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - type eval_item = - { - content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content - - attr_accessor role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role - - attr_reader type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? - - def type=: ( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - - def initialize: ( - content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: ( - text: String, - ?type: :output_text - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] - end - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Completions::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::data_source] end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index 81184938..e52fc6de 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -31,337 +31,11 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource + | OpenAI::Evals::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union - type create_eval_responses_run_data_source = - { - source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, - input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, - model: String, - sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - } - - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source - - attr_accessor type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ - - attr_reader input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? - - def input_messages=: ( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages - ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams? - - def sampling_params=: ( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - - def initialize: ( - source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, - ?input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, - ?model: String, - ?sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - ) -> void - - type source = - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent - | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID - | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type file_content = - { - content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - - type responses = - { - type: :responses, - allow_parallel_tool_calls: bool?, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor allow_parallel_tool_calls: bool? - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?allow_parallel_tool_calls: bool?, - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source] - end - - type type_ = :completions - - module Type - extend OpenAI::Internal::Type::Enum - - COMPLETIONS: :completions - - def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] - end - - type input_messages = - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template - | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - type eval_item = - { - content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content - - attr_accessor role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role - - attr_reader type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? - - def type=: ( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ - - def initialize: ( - content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: ( - text: String, - ?type: :output_text - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] - end - end - - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::data_source] end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index f6ab37df..8bea9b11 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -68,327 +68,11 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions + | OpenAI::Evals::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union - type completions = - { - source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source, - type: :completions, - input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages, - model: String, - sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams - } - - class Completions < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source - - attr_accessor type: :completions - - attr_reader input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages? - - def input_messages=: ( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages - ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams? - - def sampling_params=: ( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams - ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams - - def initialize: ( - source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source, - ?input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages, - ?model: String, - ?sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::SamplingParams, - ?type: :completions - ) -> void - - type source = - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent - | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileID - | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type file_content = - { - content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - - type responses = - { - type: :responses, - allow_parallel_tool_calls: bool?, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor allow_parallel_tool_calls: bool? - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?allow_parallel_tool_calls: bool?, - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::source] - end - - type input_messages = - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template - | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage - | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - type eval_item = - { - content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content - - attr_accessor role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role - - attr_reader type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? - - def type=: ( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - - def initialize: ( - content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: ( - text: String, - ?type: :output_text - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] - end - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Completions::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::data_source] end diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index baadd18a..4b372182 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -68,327 +68,11 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Models::Evals::RunListResponse::DataSource::Completions + | OpenAI::Evals::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union - type completions = - { - source: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source, - type: :completions, - input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages, - model: String, - sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams - } - - class Completions < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source - - attr_accessor type: :completions - - attr_reader input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages? - - def input_messages=: ( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages - ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams? - - def sampling_params=: ( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams - ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams - - def initialize: ( - source: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source, - ?input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages, - ?model: String, - ?sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::SamplingParams, - ?type: :completions - ) -> void - - type source = - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent - | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileID - | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type file_content = - { - content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - - type responses = - { - type: :responses, - allow_parallel_tool_calls: bool?, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor allow_parallel_tool_calls: bool? - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?allow_parallel_tool_calls: bool?, - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::source] - end - - type input_messages = - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template - | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage - | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - type eval_item = - { - content: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content - - attr_accessor role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role - - attr_reader type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? - - def type=: ( - OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - - def initialize: ( - content: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: ( - text: String, - ?type: :output_text - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] - end - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Completions::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::data_source] end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index d8bce8d1..f6247ab4 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -68,327 +68,11 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions + | OpenAI::Evals::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union - type completions = - { - source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source, - type: :completions, - input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages, - model: String, - sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams - } - - class Completions < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source - - attr_accessor type: :completions - - attr_reader input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages? - - def input_messages=: ( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages - ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams? - - def sampling_params=: ( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams - ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams - - def initialize: ( - source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source, - ?input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages, - ?model: String, - ?sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::SamplingParams, - ?type: :completions - ) -> void - - type source = - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent - | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileID - | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type file_content = - { - content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content], - type: :file_content - } - - class FileContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::Source::FileContent::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - - type file_id = { id: String, type: :file_id } - - class FileID < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - - type responses = - { - type: :responses, - allow_parallel_tool_calls: bool?, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor allow_parallel_tool_calls: bool? - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?allow_parallel_tool_calls: bool?, - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::source] - end - - type input_messages = - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template - | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::ChatMessage - | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - type eval_item = - { - content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content - - attr_accessor role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role - - attr_reader type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_? - - def type=: ( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - - def initialize: ( - content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: ( - text: String, - ?type: :output_text - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::Template::EvalItem::type_] - end - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Completions::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::data_source] end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index c109a5a8..59841d1e 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -109,17 +109,13 @@ module OpenAI type hyperparameters = { - batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size, + batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size?, learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size? - - def batch_size=: ( - OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size + attr_accessor batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size? attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? @@ -134,12 +130,12 @@ module OpenAI ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size, + ?batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size?, ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, ?n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs ) -> void - type batch_size = :auto | Integer + type batch_size = (top | :auto | Integer)? module BatchSize extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/graders/label_model_grader.rbs b/sig/openai/models/graders/label_model_grader.rbs index 93d8ef92..381664df 100644 --- a/sig/openai/models/graders/label_model_grader.rbs +++ b/sig/openai/models/graders/label_model_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type label_model_grader = { - input: ::Array[OpenAI::Graders::LabelModelGrader::Input], + input: ::Array[OpenAI::EvalItem], labels: ::Array[String], model: String, name: String, @@ -14,7 +14,7 @@ module OpenAI } class LabelModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Graders::LabelModelGrader::Input] + attr_accessor input: ::Array[OpenAI::EvalItem] attr_accessor labels: ::Array[String] @@ -27,82 +27,13 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::Graders::LabelModelGrader::Input], + input: ::Array[OpenAI::EvalItem], labels: ::Array[String], model: String, name: String, passing_labels: ::Array[String], ?type: :label_model ) -> void - - type input = - { - content: OpenAI::Graders::LabelModelGrader::Input::content, - role: OpenAI::Graders::LabelModelGrader::Input::role, - type: OpenAI::Graders::LabelModelGrader::Input::type_ - } - - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Graders::LabelModelGrader::Input::content - - attr_accessor role: OpenAI::Graders::LabelModelGrader::Input::role - - attr_reader type: OpenAI::Graders::LabelModelGrader::Input::type_? - - def type=: ( - OpenAI::Graders::LabelModelGrader::Input::type_ - ) -> OpenAI::Graders::LabelModelGrader::Input::type_ - - def initialize: ( - content: OpenAI::Graders::LabelModelGrader::Input::content, - role: OpenAI::Graders::LabelModelGrader::Input::role, - ?type: OpenAI::Graders::LabelModelGrader::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Graders::LabelModelGrader::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::type_] - end - end end end end diff --git a/sig/openai/models/graders/score_model_grader.rbs b/sig/openai/models/graders/score_model_grader.rbs index 74b9785e..7783564c 100644 --- a/sig/openai/models/graders/score_model_grader.rbs +++ b/sig/openai/models/graders/score_model_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type score_model_grader = { - input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], + input: ::Array[OpenAI::EvalItem], model: String, name: String, type: :score_model, @@ -14,7 +14,7 @@ module OpenAI } class ScoreModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::Graders::ScoreModelGrader::Input] + attr_accessor input: ::Array[OpenAI::EvalItem] attr_accessor model: String @@ -31,82 +31,13 @@ module OpenAI def sampling_params=: (top) -> top def initialize: ( - input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], + input: ::Array[OpenAI::EvalItem], model: String, name: String, ?range: ::Array[Float], ?sampling_params: top, ?type: :score_model ) -> void - - type input = - { - content: OpenAI::Graders::ScoreModelGrader::Input::content, - role: OpenAI::Graders::ScoreModelGrader::Input::role, - type: OpenAI::Graders::ScoreModelGrader::Input::type_ - } - - class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Graders::ScoreModelGrader::Input::content - - attr_accessor role: OpenAI::Graders::ScoreModelGrader::Input::role - - attr_reader type: OpenAI::Graders::ScoreModelGrader::Input::type_? - - def type=: ( - OpenAI::Graders::ScoreModelGrader::Input::type_ - ) -> OpenAI::Graders::ScoreModelGrader::Input::type_ - - def initialize: ( - content: OpenAI::Graders::ScoreModelGrader::Input::content, - role: OpenAI::Graders::ScoreModelGrader::Input::role, - ?type: OpenAI::Graders::ScoreModelGrader::Input::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::type_] - end - end end end end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 5ba3aa0f..2fc80760 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -11,7 +11,7 @@ module OpenAI object: :vector_store, status: OpenAI::VectorStore::status, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter, + expires_after: OpenAI::VectorStoreExpirationAfter, expires_at: Integer? } @@ -34,11 +34,11 @@ module OpenAI attr_accessor usage_bytes: Integer - attr_reader expires_after: OpenAI::VectorStore::ExpiresAfter? + attr_reader expires_after: OpenAI::VectorStoreExpirationAfter? def expires_after=: ( - OpenAI::VectorStore::ExpiresAfter - ) -> OpenAI::VectorStore::ExpiresAfter + OpenAI::VectorStoreExpirationAfter + ) -> OpenAI::VectorStoreExpirationAfter attr_accessor expires_at: Integer? @@ -51,7 +51,7 @@ module OpenAI name: String, status: OpenAI::VectorStore::status, usage_bytes: Integer, - ?expires_after: OpenAI::VectorStore::ExpiresAfter, + ?expires_after: OpenAI::VectorStoreExpirationAfter, ?expires_at: Integer?, ?object: :vector_store ) -> void @@ -96,16 +96,6 @@ module OpenAI def self?.values: -> ::Array[OpenAI::VectorStore::status] end - - type expires_after = { anchor: :last_active_at, days: Integer } - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - attr_accessor anchor: :last_active_at - - attr_accessor days: Integer - - def initialize: (days: Integer, ?anchor: :last_active_at) -> void - end end end end diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 67a550f7..151f9abf 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_create_params = { chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + expires_after: OpenAI::VectorStoreExpirationAfter, file_ids: ::Array[String], metadata: OpenAI::Models::metadata?, name: String @@ -20,11 +20,11 @@ module OpenAI OpenAI::Models::file_chunking_strategy_param ) -> OpenAI::Models::file_chunking_strategy_param - attr_reader expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter? + attr_reader expires_after: OpenAI::VectorStoreExpirationAfter? def expires_after=: ( - OpenAI::VectorStoreCreateParams::ExpiresAfter - ) -> OpenAI::VectorStoreCreateParams::ExpiresAfter + OpenAI::VectorStoreExpirationAfter + ) -> OpenAI::VectorStoreExpirationAfter attr_reader file_ids: ::Array[String]? @@ -38,22 +38,12 @@ module OpenAI def initialize: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + ?expires_after: OpenAI::VectorStoreExpirationAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void - - type expires_after = { anchor: :last_active_at, days: Integer } - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - attr_accessor anchor: :last_active_at - - attr_accessor days: Integer - - def initialize: (days: Integer, ?anchor: :last_active_at) -> void - end end end end diff --git a/sig/openai/models/vector_store_expiration_after.rbs b/sig/openai/models/vector_store_expiration_after.rbs new file mode 100644 index 00000000..3521d748 --- /dev/null +++ b/sig/openai/models/vector_store_expiration_after.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + type vector_store_expiration_after = + { anchor: :last_active_at, days: Integer } + + class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + end + end +end diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index cb5a0433..f86edba5 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type vector_store_update_params = { - expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, + expires_after: OpenAI::VectorStoreExpirationAfter?, metadata: OpenAI::Models::metadata?, name: String? } @@ -12,28 +12,18 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter? + attr_accessor expires_after: OpenAI::VectorStoreExpirationAfter? attr_accessor metadata: OpenAI::Models::metadata? attr_accessor name: String? def initialize: ( - ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, + ?expires_after: OpenAI::VectorStoreExpirationAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts ) -> void - - type expires_after = { anchor: :last_active_at, days: Integer } - - class ExpiresAfter < OpenAI::Internal::Type::BaseModel - attr_accessor anchor: :last_active_at - - attr_accessor days: Integer - - def initialize: (days: Integer, ?anchor: :last_active_at) -> void - end end end end diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 6877c0f6..0d72b943 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -5,6 +5,7 @@ module OpenAI def create: ( file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, ?prompt: String, @@ -17,6 +18,7 @@ module OpenAI def create_streaming: ( file: Pathname | StringIO | IO | OpenAI::FilePart, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], ?language: String, ?prompt: String, diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index 27b8eeaa..fe80a355 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -45,7 +45,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::TruncationObject?, ?request_options: OpenAI::request_opts ) -> OpenAI::Beta::Threads::Run @@ -64,7 +64,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::TruncationObject?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 38743701..8ca38f5d 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -23,7 +23,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::TruncationObject?, ?request_options: OpenAI::request_opts ) -> OpenAI::Beta::Threads::Run @@ -45,7 +45,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, + ?truncation_strategy: OpenAI::Beta::TruncationObject?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index d717bd54..7bf0650d 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -7,7 +7,7 @@ module OpenAI def create: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + ?expires_after: OpenAI::VectorStoreExpirationAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, @@ -21,7 +21,7 @@ module OpenAI def update: ( String vector_store_id, - ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, + ?expires_after: OpenAI::VectorStoreExpirationAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index bed1e829..c43e45d1 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -35,7 +35,7 @@ def test_create_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -75,7 +75,7 @@ def test_retrieve_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -115,7 +115,7 @@ def test_update_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -162,7 +162,7 @@ def test_list thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -202,7 +202,7 @@ def test_cancel_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -243,7 +243,7 @@ def test_submit_tool_outputs_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index 903a5185..681dad3c 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -105,7 +105,7 @@ def test_create_and_run_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, + truncation_strategy: OpenAI::Beta::TruncationObject | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index bbce9895..4e020135 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -21,7 +21,7 @@ def test_create object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStoreExpirationAfter | nil, expires_at: Integer | nil } end @@ -45,7 +45,7 @@ def test_retrieve object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStoreExpirationAfter | nil, expires_at: Integer | nil } end @@ -69,7 +69,7 @@ def test_update object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStoreExpirationAfter | nil, expires_at: Integer | nil } end @@ -100,7 +100,7 @@ def test_list object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStore::ExpiresAfter | nil, + expires_after: OpenAI::VectorStoreExpirationAfter | nil, expires_at: Integer | nil } end From 1dc390147bdd324f4cf485faf016de1783d86871 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 16 May 2025 16:04:34 +0000 Subject: [PATCH 192/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index e8285b71..e613b816 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.5" + ".": "0.1.0-beta.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 065e1cd1..aa8e1840 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.alpha.5) + openai (0.1.0.pre.beta.1) connection_pool GEM diff --git a/README.md b/README.md index 74bbb8c0..5cb3bf90 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.1.0.pre.alpha.5" +gem "openai", "~> 0.1.0.pre.beta.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index c92a3443..c559f45b 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.alpha.5" + VERSION = "0.1.0.pre.beta.1" end From 110d0b8b17acc4f552e7745e160442e46742a4c9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 16 May 2025 17:11:47 +0000 Subject: [PATCH 193/295] feat(api): Updating Assistants and Evals API schemas --- .stats.yml | 6 +- lib/openai.rb | 7 - lib/openai/models.rb | 6 - .../beta/thread_create_and_run_params.rb | 53 +- lib/openai/models/beta/threads/run.rb | 52 +- .../models/beta/threads/run_create_params.rb | 53 +- lib/openai/models/beta/truncation_object.rb | 52 - lib/openai/models/chat_model.rb | 1 + lib/openai/models/eval_create_params.rb | 141 ++- lib/openai/models/eval_create_response.rb | 52 +- lib/openai/models/eval_item.rb | 119 --- lib/openai/models/eval_list_response.rb | 52 +- .../models/eval_logs_data_source_config.rb | 47 - lib/openai/models/eval_retrieve_response.rb | 52 +- lib/openai/models/eval_update_response.rb | 52 +- ...create_eval_completions_run_data_source.rb | 220 +++- .../create_eval_jsonl_run_data_source.rb | 72 +- .../create_eval_responses_run_data_source.rb | 363 ------- .../evals/eval_jsonl_file_content_source.rb | 45 - .../models/evals/eval_jsonl_file_id_source.rb | 28 - .../models/evals/run_cancel_response.rb | 534 +++++++++- lib/openai/models/evals/run_create_params.rb | 585 ++++++++++- .../models/evals/run_create_response.rb | 534 +++++++++- lib/openai/models/evals/run_list_response.rb | 534 +++++++++- .../models/evals/run_retrieve_response.rb | 538 +++++++++- .../models/graders/label_model_grader.rb | 125 ++- lib/openai/models/graders/multi_grader.rb | 4 +- .../models/graders/score_model_grader.rb | 125 ++- lib/openai/models/vector_store.rb | 32 +- .../models/vector_store_create_params.rb | 31 +- .../models/vector_store_expiration_after.rb | 30 - .../models/vector_store_update_params.rb | 31 +- lib/openai/resources/beta/threads.rb | 4 +- lib/openai/resources/beta/threads/runs.rb | 4 +- lib/openai/resources/evals/runs.rb | 2 +- lib/openai/resources/vector_stores.rb | 4 +- rbi/openai/models.rbi | 6 - .../beta/thread_create_and_run_params.rbi | 118 ++- rbi/openai/models/beta/threads/run.rbi | 112 +- .../models/beta/threads/run_create_params.rbi | 118 ++- rbi/openai/models/beta/truncation_object.rbi | 85 -- rbi/openai/models/chat_model.rbi | 2 + rbi/openai/models/eval_create_params.rbi | 240 ++++- rbi/openai/models/eval_create_response.rbi | 76 +- rbi/openai/models/eval_item.rbi | 161 --- rbi/openai/models/eval_list_response.rbi | 76 +- .../models/eval_logs_data_source_config.rbi | 70 -- rbi/openai/models/eval_retrieve_response.rbi | 76 +- rbi/openai/models/eval_update_response.rbi | 76 +- ...reate_eval_completions_run_data_source.rbi | 389 ++++++- .../create_eval_jsonl_run_data_source.rbi | 141 ++- .../create_eval_responses_run_data_source.rbi | 591 ----------- .../evals/eval_jsonl_file_content_source.rbi | 97 -- .../evals/eval_jsonl_file_id_source.rbi | 40 - .../models/evals/run_cancel_response.rbi | 922 ++++++++++++++++- rbi/openai/models/evals/run_create_params.rbi | 956 +++++++++++++++++- .../models/evals/run_create_response.rbi | 922 ++++++++++++++++- rbi/openai/models/evals/run_list_response.rbi | 922 ++++++++++++++++- .../models/evals/run_retrieve_response.rbi | 922 ++++++++++++++++- .../models/graders/label_model_grader.rbi | 225 ++++- rbi/openai/models/graders/multi_grader.rbi | 4 +- .../models/graders/score_model_grader.rbi | 225 ++++- rbi/openai/models/vector_store.rbi | 39 +- .../models/vector_store_create_params.rbi | 44 +- .../models/vector_store_expiration_after.rbi | 36 - .../models/vector_store_update_params.rbi | 45 +- rbi/openai/resources/beta/threads.rbi | 8 +- rbi/openai/resources/beta/threads/runs.rbi | 8 +- rbi/openai/resources/evals/runs.rbi | 2 +- rbi/openai/resources/vector_stores.rbi | 5 +- sig/openai/models.rbs | 6 - .../beta/thread_create_and_run_params.rbs | 34 +- sig/openai/models/beta/threads/run.rbs | 34 +- .../models/beta/threads/run_create_params.rbs | 34 +- sig/openai/models/beta/truncation_object.rbs | 30 - sig/openai/models/chat_model.rbs | 2 + sig/openai/models/eval_create_params.rbs | 71 +- sig/openai/models/eval_create_response.rbs | 23 +- sig/openai/models/eval_item.rbs | 70 -- sig/openai/models/eval_list_response.rbs | 23 +- .../models/eval_logs_data_source_config.rbs | 24 - sig/openai/models/eval_retrieve_response.rbs | 23 +- sig/openai/models/eval_update_response.rbs | 23 +- ...reate_eval_completions_run_data_source.rbs | 119 ++- .../create_eval_jsonl_run_data_source.rbs | 47 +- .../create_eval_responses_run_data_source.rbs | 216 ---- .../evals/eval_jsonl_file_content_source.rbs | 40 - .../evals/eval_jsonl_file_id_source.rbs | 17 - .../models/evals/run_cancel_response.rbs | 318 +++++- sig/openai/models/evals/run_create_params.rbs | 328 +++++- .../models/evals/run_create_response.rbs | 318 +++++- sig/openai/models/evals/run_list_response.rbs | 318 +++++- .../models/evals/run_retrieve_response.rbs | 318 +++++- .../models/graders/label_model_grader.rbs | 75 +- .../models/graders/score_model_grader.rbs | 75 +- sig/openai/models/vector_store.rbs | 20 +- .../models/vector_store_create_params.rbs | 20 +- .../models/vector_store_expiration_after.rbs | 14 - .../models/vector_store_update_params.rbs | 16 +- sig/openai/resources/beta/threads.rbs | 4 +- sig/openai/resources/beta/threads/runs.rbs | 4 +- sig/openai/resources/vector_stores.rbs | 4 +- .../resources/beta/threads/runs_test.rb | 12 +- test/openai/resources/beta/threads_test.rb | 2 +- test/openai/resources/vector_stores_test.rb | 8 +- 105 files changed, 12576 insertions(+), 2463 deletions(-) delete mode 100644 lib/openai/models/beta/truncation_object.rb delete mode 100644 lib/openai/models/eval_item.rb delete mode 100644 lib/openai/models/eval_logs_data_source_config.rb delete mode 100644 lib/openai/models/evals/create_eval_responses_run_data_source.rb delete mode 100644 lib/openai/models/evals/eval_jsonl_file_content_source.rb delete mode 100644 lib/openai/models/evals/eval_jsonl_file_id_source.rb delete mode 100644 lib/openai/models/vector_store_expiration_after.rb delete mode 100644 rbi/openai/models/beta/truncation_object.rbi delete mode 100644 rbi/openai/models/eval_item.rbi delete mode 100644 rbi/openai/models/eval_logs_data_source_config.rbi delete mode 100644 rbi/openai/models/evals/create_eval_responses_run_data_source.rbi delete mode 100644 rbi/openai/models/evals/eval_jsonl_file_content_source.rbi delete mode 100644 rbi/openai/models/evals/eval_jsonl_file_id_source.rbi delete mode 100644 rbi/openai/models/vector_store_expiration_after.rbi delete mode 100644 sig/openai/models/beta/truncation_object.rbs delete mode 100644 sig/openai/models/eval_item.rbs delete mode 100644 sig/openai/models/eval_logs_data_source_config.rbs delete mode 100644 sig/openai/models/evals/create_eval_responses_run_data_source.rbs delete mode 100644 sig/openai/models/evals/eval_jsonl_file_content_source.rbs delete mode 100644 sig/openai/models/evals/eval_jsonl_file_id_source.rbs delete mode 100644 sig/openai/models/vector_store_expiration_after.rbs diff --git a/.stats.yml b/.stats.yml index d00e2bb3..0f16b69f 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-161ca7f1cfd7b33c1fc07d0ce25dfe4be5a7271c394f4cb526b7fb21b0729900.yml -openapi_spec_hash: 602e14add4bee018c6774e320ce309b8 -config_hash: bdacc55eb995c15255ec82130eb8c3bb +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5fa16b9a02985ae06e41be14946a9c325dc672fb014b3c19abca65880c6990e6.yml +openapi_spec_hash: da3e669f65130043b1170048c0727890 +config_hash: d8d5fda350f6db77c784f35429741a2e diff --git a/lib/openai.rb b/lib/openai.rb index a2dd90db..ef64bfc0 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -175,7 +175,6 @@ require_relative "openai/models/beta/threads/text_delta_block" require_relative "openai/models/beta/thread_stream_event" require_relative "openai/models/beta/thread_update_params" -require_relative "openai/models/beta/truncation_object" require_relative "openai/models/chat/chat_completion" require_relative "openai/models/chat/chat_completion_assistant_message_param" require_relative "openai/models/chat/chat_completion_audio" @@ -228,18 +227,13 @@ require_relative "openai/models/eval_custom_data_source_config" require_relative "openai/models/eval_delete_params" require_relative "openai/models/eval_delete_response" -require_relative "openai/models/eval_item" require_relative "openai/models/eval_list_params" require_relative "openai/models/eval_list_response" -require_relative "openai/models/eval_logs_data_source_config" require_relative "openai/models/eval_retrieve_params" require_relative "openai/models/eval_retrieve_response" require_relative "openai/models/evals/create_eval_completions_run_data_source" require_relative "openai/models/evals/create_eval_jsonl_run_data_source" -require_relative "openai/models/evals/create_eval_responses_run_data_source" require_relative "openai/models/evals/eval_api_error" -require_relative "openai/models/evals/eval_jsonl_file_content_source" -require_relative "openai/models/evals/eval_jsonl_file_id_source" require_relative "openai/models/evals/run_cancel_params" require_relative "openai/models/evals/run_cancel_response" require_relative "openai/models/evals/run_create_params" @@ -424,7 +418,6 @@ require_relative "openai/models/vector_store_create_params" require_relative "openai/models/vector_store_deleted" require_relative "openai/models/vector_store_delete_params" -require_relative "openai/models/vector_store_expiration_after" require_relative "openai/models/vector_store_list_params" require_relative "openai/models/vector_store_retrieve_params" require_relative "openai/models/vector_stores/file_batch_cancel_params" diff --git a/lib/openai/models.rb b/lib/openai/models.rb index c993468a..6406a297 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -86,12 +86,8 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams - EvalItem = OpenAI::Models::EvalItem - EvalListParams = OpenAI::Models::EvalListParams - EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig - EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams Evals = OpenAI::Models::Evals @@ -208,8 +204,6 @@ module OpenAI VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams - VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter - VectorStoreListParams = OpenAI::Models::VectorStoreListParams VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index cbdac287..f9addd97 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -159,8 +159,10 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::TruncationObject, nil] - optional :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true + # @return [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + optional :truncation_strategy, + -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy }, + nil?: true # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -194,7 +196,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -706,6 +708,51 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages + + # @!method self.values + # @return [Array] + end + end end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 5ee65dfe..a4c6345e 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -197,8 +197,8 @@ class Run < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::TruncationObject, nil] - required :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true + # @return [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] + required :truncation_strategy, -> { OpenAI::Beta::Threads::Run::TruncationStrategy }, nil?: true # @!attribute usage # Usage statistics related to the run. This value will be `null` if the run is not @@ -270,7 +270,7 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param usage [OpenAI::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not # @@ -392,6 +392,52 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel end end + # @see OpenAI::Beta::Threads::Run#truncation_strategy + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::Threads::Run::TruncationStrategy::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Beta::Threads::Run::TruncationStrategy} for more details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Beta::Threads::Run::TruncationStrategy#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages + + # @!method self.values + # @return [Array] + end + end + # @see OpenAI::Beta::Threads::Run#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 17ab91c4..65cf8129 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -187,8 +187,10 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::TruncationObject, nil] - optional :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true + # @return [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + optional :truncation_strategy, + -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy }, + nil?: true # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -226,7 +228,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -417,6 +419,51 @@ module Model T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy} for more details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages + + # @!method self.values + # @return [Array] + end + end end end end diff --git a/lib/openai/models/beta/truncation_object.rb b/lib/openai/models/beta/truncation_object.rb deleted file mode 100644 index c406a0b7..00000000 --- a/lib/openai/models/beta/truncation_object.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Beta - class TruncationObject < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @return [Symbol, OpenAI::Beta::TruncationObject::Type] - required :type, enum: -> { OpenAI::Beta::TruncationObject::Type } - - # @!attribute last_messages - # The number of most recent messages from the thread when constructing the context - # for the run. - # - # @return [Integer, nil] - optional :last_messages, Integer, nil?: true - - # @!method initialize(type:, last_messages: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Beta::TruncationObject} for more details. - # - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @param type [Symbol, OpenAI::Beta::TruncationObject::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # - # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @see OpenAI::Beta::TruncationObject#type - module Type - extend OpenAI::Internal::Type::Enum - - AUTO = :auto - LAST_MESSAGES = :last_messages - - # @!method self.values - # @return [Array] - end - end - end - end -end diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index e2cf7b8d..6f8732aa 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -37,6 +37,7 @@ module ChatModel GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST = :"chatgpt-4o-latest" + CODEX_MINI_LATEST = :"codex-mini-latest" GPT_4O_MINI = :"gpt-4o-mini" GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" GPT_4_TURBO = :"gpt-4-turbo" diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index a80e9544..c116ef8e 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -193,7 +193,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] @@ -236,7 +236,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param labels [Array] The labels to classify to each item in the evaluation. # @@ -260,7 +260,7 @@ module Input # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant -> { OpenAI::EvalItem } + variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem } class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -281,14 +281,145 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + required :content, + union: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content + } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] + required :role, + enum: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role + } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] + optional :type, + enum: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type + } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} for + # more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalItem)] + # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index caf24f3a..9ef32e72 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -19,7 +19,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -95,19 +95,61 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. + end + # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/lib/openai/models/eval_item.rb b/lib/openai/models/eval_item.rb deleted file mode 100644 index b134b33b..00000000 --- a/lib/openai/models/eval_item.rb +++ /dev/null @@ -1,119 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText] - required :content, union: -> { OpenAI::EvalItem::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::EvalItem::Role] - required :role, enum: -> { OpenAI::EvalItem::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::EvalItem::Type, nil] - optional :type, enum: -> { OpenAI::EvalItem::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see {OpenAI::EvalItem} for - # more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::EvalItem::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalItem::Content::OutputText} for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - end -end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 577ef0b7..dedd586f 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -19,7 +19,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalListResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -95,19 +95,61 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalListResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. + end + # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/lib/openai/models/eval_logs_data_source_config.rb b/lib/openai/models/eval_logs_data_source_config.rb deleted file mode 100644 index 0412bec6..00000000 --- a/lib/openai/models/eval_logs_data_source_config.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel - # @!attribute schema - # The json schema for the run data source items. Learn how to build JSON schemas - # [here](https://json-schema.org/). - # - # @return [Hash{Symbol=>Object}] - required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute type - # The type of data source. Always `logs`. - # - # @return [Symbol, :logs] - required :type, const: :logs - - # @!attribute metadata - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - - # @!method initialize(schema:, metadata: nil, type: :logs) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalLogsDataSourceConfig} for more details. - # - # A LogsDataSourceConfig which specifies the metadata property of your logs query. - # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The - # schema returned by this data source config is used to defined what variables are - # available in your evals. `item` and `sample` are both defined when using this - # data source config. - # - # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. - # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be - # - # @param type [Symbol, :logs] The type of data source. Always `logs`. - end - end -end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 0b7c4ad7..f608708d 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -19,7 +19,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -95,19 +95,61 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. + end + # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index b8357d8e..775bae32 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -19,7 +19,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig } # @!attribute metadata @@ -67,7 +67,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -95,19 +95,61 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. + end + # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 8f17fd52..92535eb5 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -7,7 +7,7 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # A StoredCompletionsRunDataSource configuration describing a set of filters # - # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] required :source, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source } # @!attribute type @@ -41,7 +41,7 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters + # @param source [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters # # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. # @@ -59,14 +59,71 @@ module Source discriminator :type - variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } + variant :file_content, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent } - variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } + variant :file_id, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID } # A StoredCompletionsRunDataSource configuration describing a set of filters variant :stored_completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions } + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of source. Always `stored_completions`. @@ -130,13 +187,13 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] + # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) end @@ -171,7 +228,7 @@ class Template < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the "item" namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] @@ -188,7 +245,7 @@ class Template < OpenAI::Internal::Type::BaseModel # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} for # more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -214,13 +271,152 @@ module Template # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, -> { OpenAI::EvalItem } + variant :message, + -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + } + + class Message < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + required :content, + union: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content + } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + required :role, + enum: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] + optional :type, + enum: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem)] + # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) } + T.type_alias do + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) + end end end end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 3819554d..faedad0e 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -6,7 +6,7 @@ module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # - # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] required :source, union: -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source } # @!attribute type @@ -19,7 +19,7 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # A JsonlRunDataSource object with that specifies a JSONL file that matches the # eval # - # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource] + # @param source [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] # # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. @@ -29,16 +29,76 @@ module Source discriminator :type - variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } + variant :file_content, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent } - variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } + variant :file_id, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end # @!method self.variants - # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] define_sorbet_constant!(:Variants) do T.type_alias do - T.any(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource) + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID + ) end end end diff --git a/lib/openai/models/evals/create_eval_responses_run_data_source.rb b/lib/openai/models/evals/create_eval_responses_run_data_source.rb deleted file mode 100644 index 95b9d3c8..00000000 --- a/lib/openai/models/evals/create_eval_responses_run_data_source.rb +++ /dev/null @@ -1,363 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Evals - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses] - required :source, union: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Source } - - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, OpenAI::Evals::CreateEvalResponsesRunDataSource::Type] - required :type, enum: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Type } - - # @!attribute input_messages - # - # @return [OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] - optional :input_messages, union: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams, nil] - optional :sampling_params, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams } - - # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalResponsesRunDataSource} for more details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param type [Symbol, OpenAI::Evals::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. - # - # @param input_messages [OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams] - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } - - variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses } - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional string to search the 'instructions' field. This is a query parameter - # used to select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute tools - # List of tool names. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses} for more - # details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - end - end - - # The type of run data source. Always `responses`. - # - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#type - module Type - extend OpenAI::Internal::Type::Enum - - RESPONSES = :responses - - # @!method self.values - # @return [Array] - end - - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template] - } - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template} for - # more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { OpenAI::EvalItem } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end - end - end -end diff --git a/lib/openai/models/evals/eval_jsonl_file_content_source.rb b/lib/openai/models/evals/eval_jsonl_file_content_source.rb deleted file mode 100644 index add2daad..00000000 --- a/lib/openai/models/evals/eval_jsonl_file_content_source.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Evals - class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::EvalJSONLFileContentSource::Content] } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - end - - EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource - end -end diff --git a/lib/openai/models/evals/eval_jsonl_file_id_source.rb b/lib/openai/models/evals/eval_jsonl_file_id_source.rb deleted file mode 100644 index 3347ee13..00000000 --- a/lib/openai/models/evals/eval_jsonl_file_id_source.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Evals - class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - end - - EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource - end -end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 3e7dacc0..fcca1f52 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -20,7 +20,7 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,17 +145,541 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # + # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + ) + end + end + end + + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + end + end + + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + end + end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses ) end end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 7437852c..dcc9e64b 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -11,7 +11,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Details about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Evals::RunCreateParams::DataSource } # @!attribute metadata @@ -35,7 +35,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -54,17 +54,592 @@ module DataSource variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource } + + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + required :source, + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source + } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + required :type, + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type + } + + # @!attribute input_messages + # + # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages + } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] + optional :sampling_params, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + + # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} + # for more details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # + # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. + # + # @param input_messages [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + } + + variant :file_id, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + end + end + + # The type of run data source. Always `responses`. + # + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type + module Type + extend OpenAI::Internal::Type::Enum + + RESPONSES = :responses + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + } + + variant :item_reference, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content + } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role + } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type + } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + end + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + end + end + + # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + end + end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index cc42d18c..67483e4b 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -20,7 +20,7 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,17 +145,541 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # + # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + ) + end + end + end + + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + end + end + + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + end + end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses ) end end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 846ca767..d3ed7e86 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -20,7 +20,7 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,17 +145,541 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # + # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + ) + end + end + end + + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + end + end + + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + end + end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunListResponse::DataSource::Responses ) end end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 60f06879..58baf568 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -20,7 +20,7 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } # @!attribute error @@ -106,7 +106,7 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] Information about the run's data source. # # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. # @@ -145,17 +145,545 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # A EvalResponsesSource object describing a run data source configuration. + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # + # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference] + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # A EvalResponsesSource object describing a run data source configuration. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute has_tool_calls + # Whether the response has tool calls. This is a query parameter used to select + # responses. + # + # @return [Boolean, nil] + optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + ) + end + end + end + + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + end + end + + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + end + end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses)] define_sorbet_constant!(:Variants) do T.type_alias do T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses ) end end diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index b6fa6510..801b3432 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -6,8 +6,8 @@ module Graders class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalItem] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::LabelModelGrader::Input] } # @!attribute labels # The labels to assign to each item in the evaluation. @@ -43,7 +43,7 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # # @param labels [Array] The labels to assign to each item in the evaluation. # @@ -54,6 +54,125 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. # # @param type [Symbol, :label_model] The object type, which is always `label_model`. + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Graders::LabelModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::LabelModelGrader::Input} for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Graders::LabelModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Graders::LabelModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::LabelModelGrader::Input::Content::OutputText} for more + # details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Graders::LabelModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Graders::LabelModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end end end diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb index 520ef0e2..90a78fdc 100644 --- a/lib/openai/models/graders/multi_grader.rb +++ b/lib/openai/models/graders/multi_grader.rb @@ -22,7 +22,7 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel required :name, String # @!attribute type - # The type of grader. + # The object type, which is always `multi`. # # @return [Symbol, :multi] required :type, const: :multi @@ -37,7 +37,7 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the grader. # - # @param type [Symbol, :multi] The type of grader. + # @param type [Symbol, :multi] The object type, which is always `multi`. # A StringCheckGrader object that performs a string comparison between input and # reference using a specified operation. diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index 353d7ae9..7742ec75 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -7,8 +7,8 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # The input text. This may include template strings. # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalItem] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::ScoreModelGrader::Input] } # @!attribute model # The model to use for the evaluation. @@ -43,7 +43,7 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. + # @param input [Array] The input text. This may include template strings. # # @param model [String] The model to use for the evaluation. # @@ -54,6 +54,125 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @param sampling_params [Object] The sampling parameters for the model. # # @param type [Symbol, :score_model] The object type, which is always `score_model`. + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Graders::ScoreModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::ScoreModelGrader::Input} for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Graders::ScoreModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText} for more + # details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText)] + + define_sorbet_constant!(:Variants) do + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + end + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Graders::ScoreModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Graders::ScoreModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index a48b84f7..48c1a984 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -67,8 +67,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreExpirationAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter } + # @return [OpenAI::VectorStore::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStore::ExpiresAfter } # @!attribute expires_at # The Unix timestamp (in seconds) for when the vector store will expire. @@ -99,7 +99,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param usage_bytes [Integer] The total number of bytes used by the files in the vector store. # - # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStore::ExpiresAfter] The expiration policy for a vector store. # # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store will expire. # @@ -164,6 +164,32 @@ module Status # @!method self.values # @return [Array] end + + # @see OpenAI::VectorStore#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::VectorStore::ExpiresAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 237d6f5f..a4babcf4 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -17,8 +17,8 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreExpirationAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter } + # @return [OpenAI::VectorStoreCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreCreateParams::ExpiresAfter } # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that @@ -51,7 +51,7 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -60,6 +60,31 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @param name [String] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::VectorStoreCreateParams::ExpiresAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/vector_store_expiration_after.rb b/lib/openai/models/vector_store_expiration_after.rb deleted file mode 100644 index 905f4eaa..00000000 --- a/lib/openai/models/vector_store_expiration_after.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel - # @!attribute anchor - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - # - # @return [Symbol, :last_active_at] - required :anchor, const: :last_active_at - - # @!attribute days - # The number of days after the anchor time that the vector store will expire. - # - # @return [Integer] - required :days, Integer - - # @!method initialize(days:, anchor: :last_active_at) - # Some parameter documentations has been truncated, see - # {OpenAI::VectorStoreExpirationAfter} for more details. - # - # The expiration policy for a vector store. - # - # @param days [Integer] The number of days after the anchor time that the vector store will expire. - # - # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - end - end -end diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index fef654c0..9fab30cf 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -10,8 +10,8 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreExpirationAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter }, nil?: true + # @return [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreUpdateParams::ExpiresAfter }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -34,13 +34,38 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreUpdateParams} for more details. # - # @param expires_after [OpenAI::VectorStoreExpirationAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String, nil] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::VectorStoreUpdateParams::ExpiresAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 1f7c534f..37b1d488 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -145,7 +145,7 @@ def delete(thread_id, params = {}) # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -205,7 +205,7 @@ def create_and_run(params) # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index a240f654..cbf1b293 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -52,7 +52,7 @@ class Runs # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -120,7 +120,7 @@ def create(thread_id, params) # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index 9b7fe78b..590951f6 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -16,7 +16,7 @@ class Runs # # @param eval_id [String] The ID of the evaluation to create a run for. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 4d0470d5..c1d3c184 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -18,7 +18,7 @@ class VectorStores # # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -71,7 +71,7 @@ def retrieve(vector_store_id, params = {}) # # @param vector_store_id [String] The ID of the vector store to modify. # - # @param expires_after [OpenAI::VectorStoreExpirationAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index 7de90e20..f26af6b7 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -59,12 +59,8 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams - EvalItem = OpenAI::Models::EvalItem - EvalListParams = OpenAI::Models::EvalListParams - EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig - EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams Evals = OpenAI::Models::Evals @@ -184,8 +180,6 @@ module OpenAI VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams - VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter - VectorStoreListParams = OpenAI::Models::VectorStoreListParams VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index 153977c8..7e2348cc 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -188,13 +188,21 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy + ) + ) + end attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash) + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ) ).void end attr_writer :truncation_strategy @@ -242,7 +250,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -382,7 +392,10 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy + ), request_options: OpenAI::RequestOptions } ) @@ -1450,6 +1463,103 @@ module OpenAI end end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig do + returns( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol + ) + end + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index e6879317..aba1382a 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -207,13 +207,17 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } + sig do + returns(T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy)) + end attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash) + T.nilable( + OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash + ) ).void end attr_writer :truncation_strategy @@ -291,7 +295,9 @@ module OpenAI ) ], truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash + ), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage::OrHash), temperature: T.nilable(Float), top_p: T.nilable(Float), @@ -454,7 +460,8 @@ module OpenAI OpenAI::Beta::FunctionTool ) ], - truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), + truncation_strategy: + T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage), temperature: T.nilable(Float), top_p: T.nilable(Float) @@ -746,6 +753,103 @@ module OpenAI end end + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig do + returns( + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + end + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + class Usage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index e274167c..7e4e98e4 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -206,13 +206,21 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy + ) + ) + end attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash) + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ) ).void end attr_writer :truncation_strategy @@ -265,7 +273,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -426,7 +436,10 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy + ), request_options: OpenAI::RequestOptions } ) @@ -796,6 +809,103 @@ module OpenAI def self.variants end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig do + returns( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol + ) + end + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/beta/truncation_object.rbi b/rbi/openai/models/beta/truncation_object.rbi deleted file mode 100644 index c763ead6..00000000 --- a/rbi/openai/models/beta/truncation_object.rbi +++ /dev/null @@ -1,85 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Beta - class TruncationObject < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::Beta::TruncationObject, OpenAI::Internal::AnyHash) - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Beta::TruncationObject::Type::OrSymbol) } - attr_accessor :type - - # The number of most recent messages from the thread when constructing the context - # for the run. - sig { returns(T.nilable(Integer)) } - attr_accessor :last_messages - - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - sig do - params( - type: OpenAI::Beta::TruncationObject::Type::OrSymbol, - last_messages: T.nilable(Integer) - ).returns(T.attached_class) - end - def self.new( - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - type:, - # The number of most recent messages from the thread when constructing the context - # for the run. - last_messages: nil - ) - end - - sig do - override.returns( - { - type: OpenAI::Beta::TruncationObject::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) - end - def to_hash - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Beta::TruncationObject::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - AUTO = - T.let(:auto, OpenAI::Beta::TruncationObject::Type::TaggedSymbol) - LAST_MESSAGES = - T.let( - :last_messages, - OpenAI::Beta::TruncationObject::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[OpenAI::Beta::TruncationObject::Type::TaggedSymbol] - ) - end - def self.values - end - end - end - end - end -end diff --git a/rbi/openai/models/chat_model.rbi b/rbi/openai/models/chat_model.rbi index 106901df..5046338e 100644 --- a/rbi/openai/models/chat_model.rbi +++ b/rbi/openai/models/chat_model.rbi @@ -75,6 +75,8 @@ module OpenAI ) CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::ChatModel::TaggedSymbol) + CODEX_MINI_LATEST = + T.let(:"codex-mini-latest", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::ChatModel::TaggedSymbol) diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index a1ba68fb..3a866373 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -327,7 +327,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ] ) @@ -362,7 +362,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage::OrHash, - OpenAI::EvalItem::OrHash + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::OrHash ) ], labels: T::Array[String], @@ -396,7 +396,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ], labels: T::Array[String], @@ -419,7 +419,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) end @@ -456,6 +456,238 @@ module OpenAI end end + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ), + role: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index a3433afc..90045b13 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -21,7 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -74,7 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -123,7 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -153,11 +153,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/eval_item.rbi b/rbi/openai/models/eval_item.rbi deleted file mode 100644 index 841d9960..00000000 --- a/rbi/openai/models/eval_item.rbi +++ /dev/null @@ -1,161 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(OpenAI::EvalItem, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig { returns(OpenAI::EvalItem::Role::OrSymbol) } - attr_accessor :role - - # The type of the message input. Always `message`. - sig { returns(T.nilable(OpenAI::EvalItem::Type::OrSymbol)) } - attr_reader :type - - sig { params(type: OpenAI::EvalItem::Type::OrSymbol).void } - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::EvalItem::Content::OutputText::OrHash - ), - role: OpenAI::EvalItem::Role::OrSymbol, - type: OpenAI::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalItem::Content::OutputText - ), - role: OpenAI::EvalItem::Role::OrSymbol, - type: OpenAI::EvalItem::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig { override.returns(T::Array[OpenAI::EvalItem::Content::Variants]) } - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EvalItem::Role) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = T.let(:user, OpenAI::EvalItem::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::EvalItem::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::EvalItem::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::EvalItem::Role::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::EvalItem::Role::TaggedSymbol]) } - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EvalItem::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = T.let(:message, OpenAI::EvalItem::Type::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::EvalItem::Type::TaggedSymbol]) } - def self.values - end - end - end - end -end diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index c8e63e69..7b8d9b2d 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -21,7 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -74,7 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -123,7 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -153,11 +153,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/eval_logs_data_source_config.rbi b/rbi/openai/models/eval_logs_data_source_config.rbi deleted file mode 100644 index bbaac918..00000000 --- a/rbi/openai/models/eval_logs_data_source_config.rbi +++ /dev/null @@ -1,70 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::EvalLogsDataSourceConfig, OpenAI::Internal::AnyHash) - end - - # The json schema for the run data source items. Learn how to build JSON schemas - # [here](https://json-schema.org/). - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :schema - - # The type of data source. Always `logs`. - sig { returns(Symbol) } - attr_accessor :type - - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - sig { returns(T.nilable(T::Hash[Symbol, String])) } - attr_accessor :metadata - - # A LogsDataSourceConfig which specifies the metadata property of your logs query. - # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The - # schema returned by this data source config is used to defined what variables are - # available in your evals. `item` and `sample` are both defined when using this - # data source config. - sig do - params( - schema: T::Hash[Symbol, T.anything], - metadata: T.nilable(T::Hash[Symbol, String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The json schema for the run data source items. Learn how to build JSON schemas - # [here](https://json-schema.org/). - schema:, - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - metadata: nil, - # The type of data source. Always `logs`. - type: :logs - ) - end - - sig do - override.returns( - { - schema: T::Hash[Symbol, T.anything], - type: Symbol, - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) - end - def to_hash - end - end - end -end diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index baacb528..90427a71 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -21,7 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -74,7 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -123,7 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -153,11 +153,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index f82939b7..28512653 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -21,7 +21,7 @@ module OpenAI returns( T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) ) @@ -74,7 +74,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -123,7 +123,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -153,11 +153,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 5ab2adf7..7dc975b1 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -16,8 +16,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) ) @@ -84,8 +84,8 @@ module OpenAI params( source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource::OrHash, - OpenAI::Evals::EvalJSONLFileIDSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions::OrHash ), type: @@ -117,8 +117,8 @@ module OpenAI { source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ), type: @@ -144,12 +144,137 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do @@ -302,7 +427,10 @@ module OpenAI sig do returns( T::Array[ - T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) ] ) end @@ -318,7 +446,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::EvalItem::OrHash + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::OrHash ) ], type: Symbol @@ -340,7 +468,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage, - OpenAI::EvalItem + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ], type: Symbol @@ -360,9 +488,246 @@ module OpenAI Variants = T.type_alias do - T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) + end + + class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash + ), + role: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) end + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ), + role: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi index af53b5ed..f27424f1 100644 --- a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -15,8 +15,8 @@ module OpenAI sig do returns( T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ) ) end @@ -32,8 +32,8 @@ module OpenAI params( source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource::OrHash, - OpenAI::Evals::EvalJSONLFileIDSource::OrHash + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID::OrHash ), type: Symbol ).returns(T.attached_class) @@ -50,8 +50,8 @@ module OpenAI { source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ), type: Symbol } @@ -66,11 +66,136 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ) end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi b/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi deleted file mode 100644 index 1567b220..00000000 --- a/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi +++ /dev/null @@ -1,591 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Evals - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `responses`. - sig do - returns( - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol - ) - end - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Evals::EvalJSONLFileContentSource::OrHash, - OpenAI::Evals::EvalJSONLFileIDSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses::OrHash - ), - type: - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams::OrHash - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - # The type of run data source. Always `responses`. - type:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ), - type: - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional string to search the 'instructions' field. This is a query parameter - # used to select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # List of tool names. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :tools - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - tools: T.nilable(T::Array[String]), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional string to search the 'instructions' field. This is a query parameter - # used to select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # List of tool names. This is a query parameter used to select responses. - tools: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - tools: T.nilable(T::Array[String]), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Variants - ] - ) - end - def self.variants - end - end - - # The type of run data source. Always `responses`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - RESPONSES = - T.let( - :responses, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig { override.returns({ item_reference: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - end - end -end diff --git a/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi b/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi deleted file mode 100644 index 741fced0..00000000 --- a/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi +++ /dev/null @@ -1,97 +0,0 @@ -# typed: strong - -module OpenAI - module Models - EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource - - module Evals - class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns(T::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content]) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Evals::EvalJSONLFileContentSource::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - end - end -end diff --git a/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi b/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi deleted file mode 100644 index c9da7ca8..00000000 --- a/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi +++ /dev/null @@ -1,40 +0,0 @@ -# typed: strong - -module OpenAI - module Models - EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource - - module Evals - class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - end - end -end diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index a305b087..639b472c 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses ) ) end @@ -111,7 +111,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -179,7 +179,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -214,10 +214,924 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses ) end + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + ), + type: Symbol, + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. + has_tool_calls: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index 878fc731..616c505a 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -18,7 +18,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) ) end @@ -46,7 +46,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -76,7 +76,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -96,10 +96,958 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) end + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource, + OpenAI::Internal::AnyHash + ) + end + + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig do + returns( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol + ) + end + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses::OrHash + ), + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).returns(T.attached_class) + end + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + # The type of run data source. Always `responses`. + type:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ), + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + ) + end + def to_hash + end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. + has_tool_calls: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Variants + ] + ) + end + def self.variants + end + end + + # The type of run data source. Always `responses`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + RESPONSES = + T.let( + :responses, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + sig do override.returns( T::Array[OpenAI::Evals::RunCreateParams::DataSource::Variants] diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index ffa092b4..c789a4a8 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses ) ) end @@ -111,7 +111,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -179,7 +179,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -214,10 +214,924 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses ) end + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + ), + type: Symbol, + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. + has_tool_calls: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index f398957e..0ed89256 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunListResponse::DataSource::Responses ) ) end @@ -111,7 +111,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -179,7 +179,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunListResponse::DataSource::Responses ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -212,10 +212,924 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunListResponse::DataSource::Responses ) end + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + ), + type: Symbol, + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. + has_tool_calls: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 81fb22ff..1bdf4d77 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -26,7 +26,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses ) ) end @@ -113,7 +113,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -181,7 +181,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses ), error: OpenAI::Evals::EvalAPIError, eval_id: String, @@ -216,10 +216,924 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses ) end + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # A EvalResponsesSource object describing a run data source configuration. + sig do + returns( + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + returns( + T.nilable( + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A EvalResponsesSource object describing a run data source configuration. + source:, + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + ), + type: Symbol, + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # A EvalResponsesSource object describing a run data source configuration. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Whether the response has tool calls. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :has_tool_calls + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Whether the response has tool calls. This is a query parameter used to select + # responses. + has_tool_calls: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + has_tool_calls: T.nilable(T::Boolean), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the "item" namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the "item" namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the "item" namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/graders/label_model_grader.rbi b/rbi/openai/models/graders/label_model_grader.rbi index 42632db8..9d062b87 100644 --- a/rbi/openai/models/graders/label_model_grader.rbi +++ b/rbi/openai/models/graders/label_model_grader.rbi @@ -11,7 +11,7 @@ module OpenAI T.any(OpenAI::Graders::LabelModelGrader, OpenAI::Internal::AnyHash) end - sig { returns(T::Array[OpenAI::EvalItem]) } + sig { returns(T::Array[OpenAI::Graders::LabelModelGrader::Input]) } attr_accessor :input # The labels to assign to each item in the evaluation. @@ -38,7 +38,7 @@ module OpenAI # the evaluation. sig do params( - input: T::Array[OpenAI::EvalItem::OrHash], + input: T::Array[OpenAI::Graders::LabelModelGrader::Input::OrHash], labels: T::Array[String], model: String, name: String, @@ -64,7 +64,7 @@ module OpenAI sig do override.returns( { - input: T::Array[OpenAI::EvalItem], + input: T::Array[OpenAI::Graders::LabelModelGrader::Input], labels: T::Array[String], model: String, name: String, @@ -75,6 +75,225 @@ module OpenAI end def to_hash end + + class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns(OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ), + role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/graders/multi_grader.rbi b/rbi/openai/models/graders/multi_grader.rbi index bed184f8..bbf5c142 100644 --- a/rbi/openai/models/graders/multi_grader.rbi +++ b/rbi/openai/models/graders/multi_grader.rbi @@ -35,7 +35,7 @@ module OpenAI sig { returns(String) } attr_accessor :name - # The type of grader. + # The object type, which is always `multi`. sig { returns(Symbol) } attr_accessor :type @@ -65,7 +65,7 @@ module OpenAI graders:, # The name of the grader. name:, - # The type of grader. + # The object type, which is always `multi`. type: :multi ) end diff --git a/rbi/openai/models/graders/score_model_grader.rbi b/rbi/openai/models/graders/score_model_grader.rbi index d17b745c..ab602eda 100644 --- a/rbi/openai/models/graders/score_model_grader.rbi +++ b/rbi/openai/models/graders/score_model_grader.rbi @@ -12,7 +12,7 @@ module OpenAI end # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::EvalItem]) } + sig { returns(T::Array[OpenAI::Graders::ScoreModelGrader::Input]) } attr_accessor :input # The model to use for the evaluation. @@ -44,7 +44,7 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[OpenAI::EvalItem::OrHash], + input: T::Array[OpenAI::Graders::ScoreModelGrader::Input::OrHash], model: String, name: String, range: T::Array[Float], @@ -71,7 +71,7 @@ module OpenAI sig do override.returns( { - input: T::Array[OpenAI::EvalItem], + input: T::Array[OpenAI::Graders::ScoreModelGrader::Input], model: String, name: String, type: Symbol, @@ -82,6 +82,225 @@ module OpenAI end def to_hash end + + class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns(OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ), + role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/vector_store.rbi b/rbi/openai/models/vector_store.rbi index c3c6dc2d..1e5ec9ca 100644 --- a/rbi/openai/models/vector_store.rbi +++ b/rbi/openai/models/vector_store.rbi @@ -52,11 +52,11 @@ module OpenAI attr_accessor :usage_bytes # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } + sig { returns(T.nilable(OpenAI::VectorStore::ExpiresAfter)) } attr_reader :expires_after sig do - params(expires_after: OpenAI::VectorStoreExpirationAfter::OrHash).void + params(expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash).void end attr_writer :expires_after @@ -76,7 +76,7 @@ module OpenAI name: String, status: OpenAI::VectorStore::Status::OrSymbol, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, + expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash, expires_at: T.nilable(Integer), object: Symbol ).returns(T.attached_class) @@ -125,7 +125,7 @@ module OpenAI object: Symbol, status: OpenAI::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer) } ) @@ -218,6 +218,37 @@ module OpenAI def self.values end end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/vector_store_create_params.rbi b/rbi/openai/models/vector_store_create_params.rbi index fb7b5912..674fc93d 100644 --- a/rbi/openai/models/vector_store_create_params.rbi +++ b/rbi/openai/models/vector_store_create_params.rbi @@ -37,11 +37,13 @@ module OpenAI attr_writer :chunking_strategy # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreCreateParams::ExpiresAfter)) } attr_reader :expires_after sig do - params(expires_after: OpenAI::VectorStoreExpirationAfter::OrHash).void + params( + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash + ).void end attr_writer :expires_after @@ -77,7 +79,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam::OrHash, OpenAI::StaticFileChunkingStrategyObjectParam::OrHash ), - expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -115,7 +117,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -125,6 +127,40 @@ module OpenAI end def to_hash end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/vector_store_expiration_after.rbi b/rbi/openai/models/vector_store_expiration_after.rbi deleted file mode 100644 index 7b06060c..00000000 --- a/rbi/openai/models/vector_store_expiration_after.rbi +++ /dev/null @@ -1,36 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::VectorStoreExpirationAfter, OpenAI::Internal::AnyHash) - end - - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - sig { returns(Symbol) } - attr_accessor :anchor - - # The number of days after the anchor time that the vector store will expire. - sig { returns(Integer) } - attr_accessor :days - - # The expiration policy for a vector store. - sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new( - # The number of days after the anchor time that the vector store will expire. - days:, - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - anchor: :last_active_at - ) - end - - sig { override.returns({ anchor: Symbol, days: Integer }) } - def to_hash - end - end - end -end diff --git a/rbi/openai/models/vector_store_update_params.rbi b/rbi/openai/models/vector_store_update_params.rbi index 8e2409f5..1d755b92 100644 --- a/rbi/openai/models/vector_store_update_params.rbi +++ b/rbi/openai/models/vector_store_update_params.rbi @@ -12,12 +12,13 @@ module OpenAI end # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter)) } attr_reader :expires_after sig do params( - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash) + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash) ).void end attr_writer :expires_after @@ -37,7 +38,8 @@ module OpenAI sig do params( - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions::OrHash @@ -62,7 +64,8 @@ module OpenAI sig do override.returns( { - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions @@ -71,6 +74,40 @@ module OpenAI end def to_hash end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreUpdateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index d3b01a5d..9279a612 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -148,7 +148,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Beta::Threads::Run) @@ -296,7 +298,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index 755fcfbc..a1d803bf 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -61,7 +61,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Beta::Threads::Run) @@ -235,7 +237,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( diff --git a/rbi/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi index 86e351a0..183f65ed 100644 --- a/rbi/openai/resources/evals/runs.rbi +++ b/rbi/openai/resources/evals/runs.rbi @@ -15,7 +15,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, diff --git a/rbi/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi index 802f6110..7be8933d 100644 --- a/rbi/openai/resources/vector_stores.rbi +++ b/rbi/openai/resources/vector_stores.rbi @@ -17,7 +17,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam::OrHash, OpenAI::StaticFileChunkingStrategyObjectParam::OrHash ), - expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -65,7 +65,8 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions::OrHash diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index e151276a..f7ef937f 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -57,12 +57,8 @@ module OpenAI class EvalDeleteParams = OpenAI::Models::EvalDeleteParams - class EvalItem = OpenAI::Models::EvalItem - class EvalListParams = OpenAI::Models::EvalListParams - class EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig - class EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams module Evals = OpenAI::Models::Evals @@ -177,8 +173,6 @@ module OpenAI class VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams - class VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter - class VectorStoreListParams = OpenAI::Models::VectorStoreListParams class VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 406904b2..ca752e94 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -17,7 +17,7 @@ module OpenAI tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Beta::TruncationObject? + truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? } & OpenAI::Internal::Type::request_parameters @@ -59,7 +59,7 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? + attr_accessor truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? def initialize: ( assistant_id: String, @@ -76,7 +76,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> void @@ -383,6 +383,34 @@ module OpenAI def initialize: (?vector_store_ids: ::Array[String]) -> void end end + + type truncation_strategy = + { + type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + last_messages: Integer? + } + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] + end + end end end end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index 50a8ae87..0d4ed055 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -27,7 +27,7 @@ module OpenAI thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Beta::TruncationObject?, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, usage: OpenAI::Beta::Threads::Run::Usage?, temperature: Float?, top_p: Float? @@ -80,7 +80,7 @@ module OpenAI attr_accessor tools: ::Array[OpenAI::Models::Beta::assistant_tool] - attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? + attr_accessor truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy? attr_accessor usage: OpenAI::Beta::Threads::Run::Usage? @@ -111,7 +111,7 @@ module OpenAI thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Beta::TruncationObject?, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, usage: OpenAI::Beta::Threads::Run::Usage?, ?temperature: Float?, ?top_p: Float?, @@ -203,6 +203,34 @@ module OpenAI end end + type truncation_strategy = + { + type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, + last_messages: Integer? + } + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::TruncationStrategy::type_] + end + end + type usage = { completion_tokens: Integer, diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index c4f2cac4..ab0e1ca8 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -20,7 +20,7 @@ module OpenAI tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Beta::TruncationObject? + truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? } & OpenAI::Internal::Type::request_parameters @@ -66,7 +66,7 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? + attr_accessor truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? def initialize: ( assistant_id: String, @@ -85,7 +85,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> void @@ -185,6 +185,34 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::model] end + + type truncation_strategy = + { + type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + last_messages: Integer? + } + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_] + end + end end end end diff --git a/sig/openai/models/beta/truncation_object.rbs b/sig/openai/models/beta/truncation_object.rbs deleted file mode 100644 index 4e4f1ae3..00000000 --- a/sig/openai/models/beta/truncation_object.rbs +++ /dev/null @@ -1,30 +0,0 @@ -module OpenAI - module Models - module Beta - type truncation_object = - { type: OpenAI::Beta::TruncationObject::type_, last_messages: Integer? } - - class TruncationObject < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::TruncationObject::type_ - - attr_accessor last_messages: Integer? - - def initialize: ( - type: OpenAI::Beta::TruncationObject::type_, - ?last_messages: Integer? - ) -> void - - type type_ = :auto | :last_messages - - module Type - extend OpenAI::Internal::Type::Enum - - AUTO: :auto - LAST_MESSAGES: :last_messages - - def self?.values: -> ::Array[OpenAI::Beta::TruncationObject::type_] - end - end - end - end -end diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 8f80d09b..52519f63 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -33,6 +33,7 @@ module OpenAI | :"gpt-4o-search-preview-2025-03-11" | :"gpt-4o-mini-search-preview-2025-03-11" | :"chatgpt-4o-latest" + | :"codex-mini-latest" | :"gpt-4o-mini" | :"gpt-4o-mini-2024-07-18" | :"gpt-4-turbo" @@ -90,6 +91,7 @@ module OpenAI GPT_4O_SEARCH_PREVIEW_2025_03_11: :"gpt-4o-search-preview-2025-03-11" GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11: :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST: :"chatgpt-4o-latest" + CODEX_MINI_LATEST: :"codex-mini-latest" GPT_4O_MINI: :"gpt-4o-mini" GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" GPT_4_TURBO: :"gpt-4-turbo" diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 65ab4944..958726de 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -137,7 +137,7 @@ module OpenAI type input = OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage - | OpenAI::EvalItem + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem module Input extend OpenAI::Internal::Type::Union @@ -152,6 +152,75 @@ module OpenAI def initialize: (content: String, role: String) -> void end + type eval_item = + { + content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content + + attr_accessor role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role + + attr_reader type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? + + def type=: ( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + + def initialize: ( + content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + ?type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] + end + end + def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] end end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 34408cec..33ae38e4 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -38,12 +38,33 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::data_source_config] end diff --git a/sig/openai/models/eval_item.rbs b/sig/openai/models/eval_item.rbs deleted file mode 100644 index fe989ca9..00000000 --- a/sig/openai/models/eval_item.rbs +++ /dev/null @@ -1,70 +0,0 @@ -module OpenAI - module Models - type eval_item = - { - content: OpenAI::EvalItem::content, - role: OpenAI::EvalItem::role, - type: OpenAI::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::EvalItem::content - - attr_accessor role: OpenAI::EvalItem::role - - attr_reader type: OpenAI::EvalItem::type_? - - def type=: (OpenAI::EvalItem::type_) -> OpenAI::EvalItem::type_ - - def initialize: ( - content: OpenAI::EvalItem::content, - role: OpenAI::EvalItem::role, - ?type: OpenAI::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::EvalItem::type_] - end - end - end -end diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index d9f4939c..67e93038 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -38,12 +38,33 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalListResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::data_source_config] end diff --git a/sig/openai/models/eval_logs_data_source_config.rbs b/sig/openai/models/eval_logs_data_source_config.rbs deleted file mode 100644 index fb0e37aa..00000000 --- a/sig/openai/models/eval_logs_data_source_config.rbs +++ /dev/null @@ -1,24 +0,0 @@ -module OpenAI - module Models - type eval_logs_data_source_config = - { - schema: ::Hash[Symbol, top], - type: :logs, - metadata: OpenAI::Models::metadata? - } - - class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel - attr_accessor schema: ::Hash[Symbol, top] - - attr_accessor type: :logs - - attr_accessor metadata: OpenAI::Models::metadata? - - def initialize: ( - schema: ::Hash[Symbol, top], - ?metadata: OpenAI::Models::metadata?, - ?type: :logs - ) -> void - end - end -end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index e04883e7..1a0c3d72 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -38,12 +38,33 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::data_source_config] end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index cf2da5e0..3b28136d 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -38,12 +38,33 @@ module OpenAI type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::data_source_config] end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index 99116842..bec2dc8b 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -40,13 +40,56 @@ module OpenAI ) -> void type source = - OpenAI::Evals::EvalJSONLFileContentSource - | OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions module Source extend OpenAI::Internal::Type::Union + type file_content = + { + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + type stored_completions = { type: :stored_completions, @@ -117,11 +160,81 @@ module OpenAI ) -> void type template = - OpenAI::Responses::EasyInputMessage | OpenAI::EvalItem + OpenAI::Responses::EasyInputMessage + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message module Template extend OpenAI::Internal::Type::Union + type message = + { + content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + } + + class Message < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content + + attr_accessor role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role + + attr_reader type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? + + def type=: ( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + + def initialize: ( + content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + ?type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] + end + end + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] end end diff --git a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs index c7352e33..c376b674 100644 --- a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs @@ -18,12 +18,55 @@ module OpenAI ) -> void type source = - OpenAI::Evals::EvalJSONLFileContentSource - | OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent + | OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID module Source extend OpenAI::Internal::Type::Union + type file_content = + { + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::source] end end diff --git a/sig/openai/models/evals/create_eval_responses_run_data_source.rbs b/sig/openai/models/evals/create_eval_responses_run_data_source.rbs deleted file mode 100644 index 2f24f2f9..00000000 --- a/sig/openai/models/evals/create_eval_responses_run_data_source.rbs +++ /dev/null @@ -1,216 +0,0 @@ -module OpenAI - module Models - module Evals - type create_eval_responses_run_data_source = - { - source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_, - input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages, - model: String, - sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - } - - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source - - attr_accessor type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_ - - attr_reader input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages? - - def input_messages=: ( - OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages - ) -> OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams? - - def sampling_params=: ( - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - ) -> OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - - def initialize: ( - source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_, - ?input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages, - ?model: String, - ?sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - ) -> void - - type source = - OpenAI::Evals::EvalJSONLFileContentSource - | OpenAI::Evals::EvalJSONLFileIDSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type responses = - { - type: :responses, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - tools: ::Array[String]?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor tools: ::Array[String]? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?tools: ::Array[String]?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::source] - end - - type type_ = :responses - - module Type - extend OpenAI::Internal::Type::Enum - - RESPONSES: :responses - - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::type_] - end - - type input_messages = - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template - | OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - | OpenAI::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - end - end -end diff --git a/sig/openai/models/evals/eval_jsonl_file_content_source.rbs b/sig/openai/models/evals/eval_jsonl_file_content_source.rbs deleted file mode 100644 index 09d077ee..00000000 --- a/sig/openai/models/evals/eval_jsonl_file_content_source.rbs +++ /dev/null @@ -1,40 +0,0 @@ -module OpenAI - module Models - class EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource - - module Evals - type eval_jsonl_file_content_source = - { - content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], - type: :file_content - } - - class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - end - end -end diff --git a/sig/openai/models/evals/eval_jsonl_file_id_source.rbs b/sig/openai/models/evals/eval_jsonl_file_id_source.rbs deleted file mode 100644 index 611068c4..00000000 --- a/sig/openai/models/evals/eval_jsonl_file_id_source.rbs +++ /dev/null @@ -1,17 +0,0 @@ -module OpenAI - module Models - class EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource - - module Evals - type eval_jsonl_file_id_source = { id: String, type: :file_id } - - class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - end - end -end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index 6aa68c38..b035a401 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -68,11 +68,327 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + type source = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + type template = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::data_source] end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index e52fc6de..b14b7bbf 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -31,11 +31,337 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union + type create_eval_responses_run_data_source = + { + source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source + + attr_accessor type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ + + attr_reader input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? + + def input_messages=: ( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams? + + def sampling_params=: ( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + + def initialize: ( + source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + ?input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + ?model: String, + ?sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> void + + type source = + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source] + end + + type type_ = :responses + + module Type + extend OpenAI::Internal::Type::Enum + + RESPONSES: :responses + + def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] + end + + type input_messages = + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + ?type: :template + ) -> void + + type template = + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + end + + type eval_item = + { + content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::data_source] end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 8bea9b11..9de26593 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -68,11 +68,327 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + type source = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + type template = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::data_source] end diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index 4b372182..8e8c3ae2 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -68,11 +68,327 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + type source = + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + type template = + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::data_source] end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index f6247ab4..d9dda7ea 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -68,11 +68,327 @@ module OpenAI type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + type source = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + has_tool_calls: bool?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor has_tool_calls: bool? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?has_tool_calls: bool?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + type template = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::data_source] end diff --git a/sig/openai/models/graders/label_model_grader.rbs b/sig/openai/models/graders/label_model_grader.rbs index 381664df..93d8ef92 100644 --- a/sig/openai/models/graders/label_model_grader.rbs +++ b/sig/openai/models/graders/label_model_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type label_model_grader = { - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], labels: ::Array[String], model: String, name: String, @@ -14,7 +14,7 @@ module OpenAI } class LabelModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalItem] + attr_accessor input: ::Array[OpenAI::Graders::LabelModelGrader::Input] attr_accessor labels: ::Array[String] @@ -27,13 +27,82 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], labels: ::Array[String], model: String, name: String, passing_labels: ::Array[String], ?type: :label_model ) -> void + + type input = + { + content: OpenAI::Graders::LabelModelGrader::Input::content, + role: OpenAI::Graders::LabelModelGrader::Input::role, + type: OpenAI::Graders::LabelModelGrader::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Graders::LabelModelGrader::Input::content + + attr_accessor role: OpenAI::Graders::LabelModelGrader::Input::role + + attr_reader type: OpenAI::Graders::LabelModelGrader::Input::type_? + + def type=: ( + OpenAI::Graders::LabelModelGrader::Input::type_ + ) -> OpenAI::Graders::LabelModelGrader::Input::type_ + + def initialize: ( + content: OpenAI::Graders::LabelModelGrader::Input::content, + role: OpenAI::Graders::LabelModelGrader::Input::role, + ?type: OpenAI::Graders::LabelModelGrader::Input::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::type_] + end + end end end end diff --git a/sig/openai/models/graders/score_model_grader.rbs b/sig/openai/models/graders/score_model_grader.rbs index 7783564c..74b9785e 100644 --- a/sig/openai/models/graders/score_model_grader.rbs +++ b/sig/openai/models/graders/score_model_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type score_model_grader = { - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], model: String, name: String, type: :score_model, @@ -14,7 +14,7 @@ module OpenAI } class ScoreModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalItem] + attr_accessor input: ::Array[OpenAI::Graders::ScoreModelGrader::Input] attr_accessor model: String @@ -31,13 +31,82 @@ module OpenAI def sampling_params=: (top) -> top def initialize: ( - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], model: String, name: String, ?range: ::Array[Float], ?sampling_params: top, ?type: :score_model ) -> void + + type input = + { + content: OpenAI::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Graders::ScoreModelGrader::Input::role, + type: OpenAI::Graders::ScoreModelGrader::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Graders::ScoreModelGrader::Input::content + + attr_accessor role: OpenAI::Graders::ScoreModelGrader::Input::role + + attr_reader type: OpenAI::Graders::ScoreModelGrader::Input::type_? + + def type=: ( + OpenAI::Graders::ScoreModelGrader::Input::type_ + ) -> OpenAI::Graders::ScoreModelGrader::Input::type_ + + def initialize: ( + content: OpenAI::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Graders::ScoreModelGrader::Input::role, + ?type: OpenAI::Graders::ScoreModelGrader::Input::type_ + ) -> void + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + end + + def self?.variants: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::type_] + end + end end end end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 2fc80760..5ba3aa0f 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -11,7 +11,7 @@ module OpenAI object: :vector_store, status: OpenAI::VectorStore::status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStore::ExpiresAfter, expires_at: Integer? } @@ -34,11 +34,11 @@ module OpenAI attr_accessor usage_bytes: Integer - attr_reader expires_after: OpenAI::VectorStoreExpirationAfter? + attr_reader expires_after: OpenAI::VectorStore::ExpiresAfter? def expires_after=: ( - OpenAI::VectorStoreExpirationAfter - ) -> OpenAI::VectorStoreExpirationAfter + OpenAI::VectorStore::ExpiresAfter + ) -> OpenAI::VectorStore::ExpiresAfter attr_accessor expires_at: Integer? @@ -51,7 +51,7 @@ module OpenAI name: String, status: OpenAI::VectorStore::status, usage_bytes: Integer, - ?expires_after: OpenAI::VectorStoreExpirationAfter, + ?expires_after: OpenAI::VectorStore::ExpiresAfter, ?expires_at: Integer?, ?object: :vector_store ) -> void @@ -96,6 +96,16 @@ module OpenAI def self?.values: -> ::Array[OpenAI::VectorStore::status] end + + type expires_after = { anchor: :last_active_at, days: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + end end end end diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 151f9abf..67a550f7 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_create_params = { chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, file_ids: ::Array[String], metadata: OpenAI::Models::metadata?, name: String @@ -20,11 +20,11 @@ module OpenAI OpenAI::Models::file_chunking_strategy_param ) -> OpenAI::Models::file_chunking_strategy_param - attr_reader expires_after: OpenAI::VectorStoreExpirationAfter? + attr_reader expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter? def expires_after=: ( - OpenAI::VectorStoreExpirationAfter - ) -> OpenAI::VectorStoreExpirationAfter + OpenAI::VectorStoreCreateParams::ExpiresAfter + ) -> OpenAI::VectorStoreCreateParams::ExpiresAfter attr_reader file_ids: ::Array[String]? @@ -38,12 +38,22 @@ module OpenAI def initialize: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::VectorStoreExpirationAfter, + ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void + + type expires_after = { anchor: :last_active_at, days: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + end end end end diff --git a/sig/openai/models/vector_store_expiration_after.rbs b/sig/openai/models/vector_store_expiration_after.rbs deleted file mode 100644 index 3521d748..00000000 --- a/sig/openai/models/vector_store_expiration_after.rbs +++ /dev/null @@ -1,14 +0,0 @@ -module OpenAI - module Models - type vector_store_expiration_after = - { anchor: :last_active_at, days: Integer } - - class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel - attr_accessor anchor: :last_active_at - - attr_accessor days: Integer - - def initialize: (days: Integer, ?anchor: :last_active_at) -> void - end - end -end diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index f86edba5..cb5a0433 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type vector_store_update_params = { - expires_after: OpenAI::VectorStoreExpirationAfter?, + expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, metadata: OpenAI::Models::metadata?, name: String? } @@ -12,18 +12,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor expires_after: OpenAI::VectorStoreExpirationAfter? + attr_accessor expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter? attr_accessor metadata: OpenAI::Models::metadata? attr_accessor name: String? def initialize: ( - ?expires_after: OpenAI::VectorStoreExpirationAfter?, + ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts ) -> void + + type expires_after = { anchor: :last_active_at, days: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + end end end end diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index fe80a355..27b8eeaa 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -45,7 +45,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Beta::Threads::Run @@ -64,7 +64,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 8ca38f5d..38743701 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -23,7 +23,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Beta::Threads::Run @@ -45,7 +45,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index 7bf0650d..d717bd54 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -7,7 +7,7 @@ module OpenAI def create: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::VectorStoreExpirationAfter, + ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, @@ -21,7 +21,7 @@ module OpenAI def update: ( String vector_store_id, - ?expires_after: OpenAI::VectorStoreExpirationAfter?, + ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index c43e45d1..bed1e829 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -35,7 +35,7 @@ def test_create_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -75,7 +75,7 @@ def test_retrieve_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -115,7 +115,7 @@ def test_update_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -162,7 +162,7 @@ def test_list thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -202,7 +202,7 @@ def test_cancel_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -243,7 +243,7 @@ def test_submit_tool_outputs_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index 681dad3c..903a5185 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -105,7 +105,7 @@ def test_create_and_run_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index 4e020135..bbce9895 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -21,7 +21,7 @@ def test_create object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -45,7 +45,7 @@ def test_retrieve object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -69,7 +69,7 @@ def test_update object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -100,7 +100,7 @@ def test_list object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end From a3c865413bc764b80bebbb420d7e7cc8cb48e9a5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 16 May 2025 19:41:49 +0000 Subject: [PATCH 194/295] feat(api): further updates for evals API --- .stats.yml | 4 +-- lib/openai/models/eval_create_params.rb | 32 ++++++++++------- lib/openai/models/eval_create_response.rb | 4 +-- lib/openai/models/eval_list_response.rb | 4 +-- lib/openai/models/eval_retrieve_response.rb | 4 +-- ...l_stored_completions_data_source_config.rb | 10 +++--- lib/openai/models/eval_update_response.rb | 4 +-- ...create_eval_completions_run_data_source.rb | 23 ++++++++---- .../create_eval_jsonl_run_data_source.rb | 5 ++- .../models/evals/run_cancel_response.rb | 34 +++++++++--------- lib/openai/models/evals/run_create_params.rb | 34 +++++++++--------- .../models/evals/run_create_response.rb | 34 +++++++++--------- lib/openai/models/evals/run_list_response.rb | 34 +++++++++--------- .../models/evals/run_retrieve_response.rb | 34 +++++++++--------- lib/openai/resources/evals.rb | 7 ++-- lib/openai/resources/evals/runs.rb | 4 ++- rbi/openai/models/eval_create_params.rbi | 31 ++++++++++------ rbi/openai/models/eval_create_response.rbi | 2 +- rbi/openai/models/eval_list_response.rbi | 2 +- rbi/openai/models/eval_retrieve_response.rbi | 2 +- ..._stored_completions_data_source_config.rbi | 6 ++-- rbi/openai/models/eval_update_response.rbi | 2 +- ...reate_eval_completions_run_data_source.rbi | 26 ++++++++++---- .../create_eval_jsonl_run_data_source.rbi | 3 ++ .../models/evals/run_cancel_response.rbi | 36 ++++++++++--------- rbi/openai/models/evals/run_create_params.rbi | 36 ++++++++++--------- .../models/evals/run_create_response.rbi | 36 ++++++++++--------- rbi/openai/models/evals/run_list_response.rbi | 36 ++++++++++--------- .../models/evals/run_retrieve_response.rbi | 36 ++++++++++--------- rbi/openai/resources/evals.rbi | 11 ++++-- rbi/openai/resources/evals/runs.rbi | 4 ++- sig/openai/models/eval_create_params.rbs | 6 ++-- ..._stored_completions_data_source_config.rbs | 6 ++-- .../models/evals/run_cancel_response.rbs | 4 --- sig/openai/models/evals/run_create_params.rbs | 4 --- .../models/evals/run_create_response.rbs | 4 --- sig/openai/models/evals/run_list_response.rbs | 4 --- .../models/evals/run_retrieve_response.rbs | 4 --- 38 files changed, 307 insertions(+), 265 deletions(-) diff --git a/.stats.yml b/.stats.yml index 0f16b69f..92b3b624 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-5fa16b9a02985ae06e41be14946a9c325dc672fb014b3c19abca65880c6990e6.yml -openapi_spec_hash: da3e669f65130043b1170048c0727890 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-262e171d0a8150ea1192474d16ba3afdf9a054b399f1a49a9c9b697a3073c136.yml +openapi_spec_hash: 33e00a48df8f94c94f46290c489f132b config_hash: d8d5fda350f6db77c784f35429741a2e diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index c116ef8e..395ac73a 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -8,13 +8,17 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel include OpenAI::Internal::Type::RequestParameters # @!attribute data_source_config - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. # # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] required :data_source_config, union: -> { OpenAI::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). # # @return [Array] required :testing_criteria, @@ -41,9 +45,9 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalCreateParams} for more details. # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -51,7 +55,8 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -68,7 +73,7 @@ module DataSourceConfig variant :logs, -> { OpenAI::EvalCreateParams::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } + variant :stored_completions, -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } class Custom < OpenAI::Internal::Type::BaseModel # @!attribute item_schema @@ -130,12 +135,13 @@ class Logs < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :logs] The type of data source. Always `logs`. end + # @deprecated class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. # - # @return [Symbol, :"stored-completions"] - required :type, const: :"stored-completions" + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions # @!attribute metadata # Metadata filters for the stored completions data source. @@ -143,12 +149,12 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!method initialize(metadata: nil, type: :"stored-completions") + # @!method initialize(metadata: nil, type: :stored_completions) # Deprecated in favor of LogsDataSourceConfig. # # @param metadata [Hash{Symbol=>Object}] Metadata filters for the stored completions data source. # - # @param type [Symbol, :"stored-completions"] The type of data source. Always `stored-completions`. + # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end # @!method self.variants @@ -191,7 +197,7 @@ module TestingCriterion class LabelModel < OpenAI::Internal::Type::BaseModel # @!attribute input # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :input, @@ -249,7 +255,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :label_model] The object type, which is always `label_model`. # A chat message that makes up the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 9ef32e72..965ad60b 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -61,7 +61,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # @@ -98,7 +98,7 @@ module DataSourceConfig variant :logs, -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute schema diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index dedd586f..88b9d44f 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -61,7 +61,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # @@ -98,7 +98,7 @@ module DataSourceConfig variant :logs, -> { OpenAI::Models::EvalListResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute schema diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index f608708d..74b959f6 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -61,7 +61,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # @@ -98,7 +98,7 @@ module DataSourceConfig variant :logs, -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute schema diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 28bdf315..f9b04990 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -12,10 +12,10 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!attribute type - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. # - # @return [Symbol, :"stored-completions"] - required :type, const: :"stored-completions" + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -28,7 +28,7 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!method initialize(schema:, metadata: nil, type: :"stored-completions") + # @!method initialize(schema:, metadata: nil, type: :stored_completions) # Some parameter documentations has been truncated, see # {OpenAI::EvalStoredCompletionsDataSourceConfig} for more details. # @@ -38,7 +38,7 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param type [Symbol, :"stored-completions"] The type of data source. Always `stored-completions`. + # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 775bae32..bf636971 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -61,7 +61,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # @@ -98,7 +98,7 @@ module DataSourceConfig variant :logs, -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute schema diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 92535eb5..b91eadce 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -5,7 +5,7 @@ module Models module Evals class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. # # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] required :source, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source } @@ -17,6 +17,10 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type } # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, @@ -41,17 +45,17 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters + # @param source [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] Determines what populates the `item` namespace in this run's data source. # # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. # - # @param input_messages [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # @param input_messages [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # # @param sampling_params [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. # # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#source module Source @@ -212,6 +216,11 @@ module Type # @return [Array] end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -226,7 +235,7 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :template, @@ -423,7 +432,7 @@ module Type class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" # # @return [String] required :item_reference, String @@ -435,7 +444,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index faedad0e..abeca553 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -5,6 +5,7 @@ module Models module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source + # Determines what populates the `item` namespace in the data source. # # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] required :source, union: -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source } @@ -19,10 +20,12 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # A JsonlRunDataSource object with that specifies a JSONL file that matches the # eval # - # @param source [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @param source [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] Determines what populates the `item` namespace in the data source. # # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. + # Determines what populates the `item` namespace in the data source. + # # @see OpenAI::Evals::CreateEvalJSONLRunDataSource#source module Source extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index fcca1f52..892903bb 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -149,7 +149,7 @@ module DataSource class Responses < OpenAI::Internal::Type::BaseModel # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] required :source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source } @@ -161,6 +161,10 @@ class Responses < OpenAI::Internal::Type::BaseModel required :type, const: :responses # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference, nil] optional :input_messages, @@ -185,9 +189,9 @@ class Responses < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. # - # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference] + # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # @@ -195,7 +199,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :responses] The type of run data source. Always `responses`. - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#source module Source @@ -288,13 +292,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :created_before, Integer, nil?: true - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - # @!attribute instructions_search # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. @@ -347,7 +344,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses} # for more details. @@ -358,8 +355,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp @@ -393,6 +388,11 @@ class Responses < OpenAI::Internal::Type::BaseModel end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -408,7 +408,7 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :template, @@ -604,7 +604,7 @@ module Type class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" # # @return [String] required :item_reference, String @@ -616,7 +616,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index dcc9e64b..810e3b1d 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -58,7 +58,7 @@ module DataSource class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] required :source, @@ -76,6 +76,10 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel } # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, @@ -104,17 +108,17 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # @param source [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] Determines what populates the `item` namespace in this run's data source. # # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. # - # @param input_messages [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] + # @param input_messages [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # # @param sampling_params [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source module Source @@ -216,13 +220,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :created_before, Integer, nil?: true - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - # @!attribute instructions_search # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. @@ -275,7 +272,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} # for more details. @@ -286,8 +283,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp @@ -333,6 +328,11 @@ module Type # @return [Array] end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -352,7 +352,7 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :template, @@ -564,7 +564,7 @@ module Type class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" # # @return [String] required :item_reference, String @@ -576,7 +576,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 67483e4b..e3953ad0 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -149,7 +149,7 @@ module DataSource class Responses < OpenAI::Internal::Type::BaseModel # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] required :source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source } @@ -161,6 +161,10 @@ class Responses < OpenAI::Internal::Type::BaseModel required :type, const: :responses # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference, nil] optional :input_messages, @@ -185,9 +189,9 @@ class Responses < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. # - # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference] + # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # @@ -195,7 +199,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :responses] The type of run data source. Always `responses`. - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#source module Source @@ -288,13 +292,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :created_before, Integer, nil?: true - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - # @!attribute instructions_search # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. @@ -347,7 +344,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses} # for more details. @@ -358,8 +355,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp @@ -393,6 +388,11 @@ class Responses < OpenAI::Internal::Type::BaseModel end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -408,7 +408,7 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :template, @@ -604,7 +604,7 @@ module Type class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" # # @return [String] required :item_reference, String @@ -616,7 +616,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index d3ed7e86..25b8de4c 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -149,7 +149,7 @@ module DataSource class Responses < OpenAI::Internal::Type::BaseModel # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] required :source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source } @@ -161,6 +161,10 @@ class Responses < OpenAI::Internal::Type::BaseModel required :type, const: :responses # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference, nil] optional :input_messages, @@ -185,9 +189,9 @@ class Responses < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. # - # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference] + # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # @@ -195,7 +199,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :responses] The type of run data source. Always `responses`. - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#source module Source @@ -288,13 +292,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :created_before, Integer, nil?: true - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - # @!attribute instructions_search # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. @@ -347,7 +344,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses} # for more details. @@ -358,8 +355,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp @@ -393,6 +388,11 @@ class Responses < OpenAI::Internal::Type::BaseModel end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -408,7 +408,7 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :template, @@ -604,7 +604,7 @@ module Type class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" # # @return [String] required :item_reference, String @@ -616,7 +616,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 58baf568..f3ee8394 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -149,7 +149,7 @@ module DataSource class Responses < OpenAI::Internal::Type::BaseModel # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] required :source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source } @@ -161,6 +161,10 @@ class Responses < OpenAI::Internal::Type::BaseModel required :type, const: :responses # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference, nil] optional :input_messages, @@ -185,9 +189,9 @@ class Responses < OpenAI::Internal::Type::BaseModel # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] A EvalResponsesSource object describing a run data source configuration. + # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. # - # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference] + # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # @@ -195,7 +199,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :responses] The type of run data source. Always `responses`. - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. # # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#source module Source @@ -288,13 +292,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :created_before, Integer, nil?: true - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - # @!attribute instructions_search # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. @@ -347,7 +344,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses} # for more details. @@ -358,8 +355,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us # # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp @@ -393,6 +388,11 @@ class Responses < OpenAI::Internal::Type::BaseModel end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -408,7 +408,7 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # # @return [Array] required :template, @@ -608,7 +608,7 @@ module Type class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" # # @return [String] required :item_reference, String @@ -620,7 +620,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 448254eb..f673f38f 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -10,16 +10,17 @@ class Evals # {OpenAI::Models::EvalCreateParams} for more details. # # Create the structure of an evaluation that can be used to test a model's - # performance. An evaluation is a set of testing criteria and a datasource. After + # performance. An evaluation is a set of testing criteria and the config for a + # data source, which dictates the schema of the data used in the evaluation. After # creating an evaluation, you can run it on different models and model parameters. # We support several types of graders and datasources. For more information, see # the [Evals guide](https://platform.openai.com/docs/guides/evals). # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index 590951f6..cba5fdcd 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -10,7 +10,9 @@ class Runs # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # Create a new evaluation run. This is the endpoint that will kick off grading. + # Kicks off a new run for a given evaluation, specifying the data source, and what + # model configuration to use to test. The datasource will be validated against the + # schema specified in the config of the evaluation. # # @overload create(eval_id, data_source:, metadata: nil, name: nil, request_options: {}) # diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index 3a866373..7131576a 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -11,7 +11,8 @@ module OpenAI T.any(OpenAI::EvalCreateParams, OpenAI::Internal::AnyHash) end - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. sig do returns( T.any( @@ -23,7 +24,10 @@ module OpenAI end attr_accessor :data_source_config - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). sig do returns( T::Array[ @@ -79,9 +83,13 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. data_source_config:, - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). testing_criteria:, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -124,7 +132,8 @@ module OpenAI def to_hash end - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -252,7 +261,7 @@ module OpenAI ) end - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. sig { returns(Symbol) } attr_accessor :type @@ -272,8 +281,8 @@ module OpenAI def self.new( # Metadata filters for the stored completions data source. metadata: nil, - # The type of data source. Always `stored-completions`. - type: :"stored-completions" + # The type of data source. Always `stored_completions`. + type: :stored_completions ) end @@ -321,7 +330,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -374,7 +383,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. input:, # The labels to classify to each item in the evaluation. labels:, @@ -411,7 +420,7 @@ module OpenAI end # A chat message that makes up the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index 90045b13..a3dd70c2 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -66,7 +66,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index 7b8d9b2d..92683e20 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -66,7 +66,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index 90427a71..ead84473 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -66,7 +66,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, diff --git a/rbi/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/openai/models/eval_stored_completions_data_source_config.rbi index ac338ea9..013b469d 100644 --- a/rbi/openai/models/eval_stored_completions_data_source_config.rbi +++ b/rbi/openai/models/eval_stored_completions_data_source_config.rbi @@ -16,7 +16,7 @@ module OpenAI sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :schema - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. sig { returns(Symbol) } attr_accessor :type @@ -48,8 +48,8 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, - # The type of data source. Always `stored-completions`. - type: :"stored-completions" + # The type of data source. Always `stored_completions`. + type: :stored_completions ) end diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index 28512653..e2241470 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -66,7 +66,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 7dc975b1..3b8ebdb7 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -12,7 +12,7 @@ module OpenAI ) end - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( @@ -32,6 +32,10 @@ module OpenAI end attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -101,10 +105,14 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. source:, # The type of run data source. Always `completions`. type:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -137,7 +145,7 @@ module OpenAI def to_hash end - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union @@ -402,6 +410,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -423,7 +435,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -454,7 +466,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -749,7 +761,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" sig { returns(String) } attr_accessor :item_reference @@ -763,7 +775,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi index f27424f1..d72d28b8 100644 --- a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -12,6 +12,7 @@ module OpenAI ) end + # Determines what populates the `item` namespace in the data source. sig do returns( T.any( @@ -39,6 +40,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( + # Determines what populates the `item` namespace in the data source. source:, # The type of data source. Always `jsonl`. type: :jsonl @@ -60,6 +62,7 @@ module OpenAI def to_hash end + # Determines what populates the `item` namespace in the data source. module Source extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 639b472c..163528b5 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -227,7 +227,7 @@ module OpenAI ) end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( @@ -243,6 +243,10 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -311,8 +315,12 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -346,7 +354,7 @@ module OpenAI def to_hash end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union @@ -509,11 +517,6 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :created_before - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. sig { returns(T.nilable(String)) } @@ -557,7 +560,6 @@ module OpenAI params( created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -577,9 +579,6 @@ module OpenAI # Only include items created before this timestamp (inclusive). This is a query # parameter used to select responses. created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. instructions_search: nil, @@ -611,7 +610,6 @@ module OpenAI type: Symbol, created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -639,6 +637,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -660,7 +662,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -691,7 +693,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -1021,7 +1023,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -1035,7 +1037,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index 616c505a..eba90132 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -109,7 +109,7 @@ module OpenAI ) end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( @@ -129,6 +129,10 @@ module OpenAI end attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -198,10 +202,14 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. source:, # The type of run data source. Always `responses`. type:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -234,7 +242,7 @@ module OpenAI def to_hash end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union @@ -397,11 +405,6 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :created_before - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. sig { returns(T.nilable(String)) } @@ -443,7 +446,6 @@ module OpenAI params( created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -463,9 +465,6 @@ module OpenAI # Only include items created before this timestamp (inclusive). This is a query # parameter used to select responses. created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. instructions_search: nil, @@ -497,7 +496,6 @@ module OpenAI type: Symbol, created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -555,6 +553,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -576,7 +578,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -607,7 +609,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -937,7 +939,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -951,7 +953,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index c789a4a8..cab19c2c 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -227,7 +227,7 @@ module OpenAI ) end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( @@ -243,6 +243,10 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -311,8 +315,12 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -346,7 +354,7 @@ module OpenAI def to_hash end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union @@ -509,11 +517,6 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :created_before - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. sig { returns(T.nilable(String)) } @@ -557,7 +560,6 @@ module OpenAI params( created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -577,9 +579,6 @@ module OpenAI # Only include items created before this timestamp (inclusive). This is a query # parameter used to select responses. created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. instructions_search: nil, @@ -611,7 +610,6 @@ module OpenAI type: Symbol, created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -639,6 +637,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -660,7 +662,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -691,7 +693,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -1021,7 +1023,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -1035,7 +1037,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index 0ed89256..bf2e1d33 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -225,7 +225,7 @@ module OpenAI ) end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( @@ -241,6 +241,10 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -309,8 +313,12 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -344,7 +352,7 @@ module OpenAI def to_hash end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union @@ -507,11 +515,6 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :created_before - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. sig { returns(T.nilable(String)) } @@ -555,7 +558,6 @@ module OpenAI params( created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -575,9 +577,6 @@ module OpenAI # Only include items created before this timestamp (inclusive). This is a query # parameter used to select responses. created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. instructions_search: nil, @@ -609,7 +608,6 @@ module OpenAI type: Symbol, created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -637,6 +635,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -658,7 +660,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -689,7 +691,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -1019,7 +1021,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -1033,7 +1035,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 1bdf4d77..82ffc62a 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -229,7 +229,7 @@ module OpenAI ) end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( @@ -245,6 +245,10 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -313,8 +317,12 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -348,7 +356,7 @@ module OpenAI def to_hash end - # A EvalResponsesSource object describing a run data source configuration. + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union @@ -511,11 +519,6 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :created_before - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. sig { returns(T.nilable(String)) } @@ -559,7 +562,6 @@ module OpenAI params( created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -579,9 +581,6 @@ module OpenAI # Only include items created before this timestamp (inclusive). This is a query # parameter used to select responses. created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, # Optional string to search the 'instructions' field. This is a query parameter # used to select responses. instructions_search: nil, @@ -613,7 +612,6 @@ module OpenAI type: Symbol, created_after: T.nilable(Integer), created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), instructions_search: T.nilable(String), metadata: T.nilable(T.anything), model: T.nilable(String), @@ -641,6 +639,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -662,7 +664,7 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ @@ -693,7 +695,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -1023,7 +1025,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" sig { returns(String) } attr_accessor :item_reference @@ -1037,7 +1039,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.name" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi index c7b5ca65..9ec9f490 100644 --- a/rbi/openai/resources/evals.rbi +++ b/rbi/openai/resources/evals.rbi @@ -7,7 +7,8 @@ module OpenAI attr_reader :runs # Create the structure of an evaluation that can be used to test a model's - # performance. An evaluation is a set of testing criteria and a datasource. After + # performance. An evaluation is a set of testing criteria and the config for a + # data source, which dictates the schema of the data used in the evaluation. After # creating an evaluation, you can run it on different models and model parameters. # We support several types of graders and datasources. For more information, see # the [Evals guide](https://platform.openai.com/docs/guides/evals). @@ -35,9 +36,13 @@ module OpenAI ).returns(OpenAI::Models::EvalCreateResponse) end def create( - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. data_source_config:, - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). testing_criteria:, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/rbi/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi index 183f65ed..20df812d 100644 --- a/rbi/openai/resources/evals/runs.rbi +++ b/rbi/openai/resources/evals/runs.rbi @@ -7,7 +7,9 @@ module OpenAI sig { returns(OpenAI::Resources::Evals::Runs::OutputItems) } attr_reader :output_items - # Create a new evaluation run. This is the endpoint that will kick off grading. + # Kicks off a new run for a given evaluation, specifying the data source, and what + # model configuration to use to test. The datasource will be validated against the + # schema specified in the config of the evaluation. sig do params( eval_id: String, diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 958726de..8f48feac 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -75,10 +75,10 @@ module OpenAI end type stored_completions = - { type: :"stored-completions", metadata: ::Hash[Symbol, top] } + { type: :stored_completions, metadata: ::Hash[Symbol, top] } class StoredCompletions < OpenAI::Internal::Type::BaseModel - attr_accessor type: :"stored-completions" + attr_accessor type: :stored_completions attr_reader metadata: ::Hash[Symbol, top]? @@ -86,7 +86,7 @@ module OpenAI def initialize: ( ?metadata: ::Hash[Symbol, top], - ?type: :"stored-completions" + ?type: :stored_completions ) -> void end diff --git a/sig/openai/models/eval_stored_completions_data_source_config.rbs b/sig/openai/models/eval_stored_completions_data_source_config.rbs index f77af6cc..345b12d6 100644 --- a/sig/openai/models/eval_stored_completions_data_source_config.rbs +++ b/sig/openai/models/eval_stored_completions_data_source_config.rbs @@ -3,21 +3,21 @@ module OpenAI type eval_stored_completions_data_source_config = { schema: ::Hash[Symbol, top], - type: :"stored-completions", + type: :stored_completions, metadata: OpenAI::Models::metadata? } class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel attr_accessor schema: ::Hash[Symbol, top] - attr_accessor type: :"stored-completions" + attr_accessor type: :stored_completions attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( schema: ::Hash[Symbol, top], ?metadata: OpenAI::Models::metadata?, - ?type: :"stored-completions" + ?type: :stored_completions ) -> void end end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index b035a401..6a69a19d 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -167,7 +167,6 @@ module OpenAI type: :responses, created_after: Integer?, created_before: Integer?, - has_tool_calls: bool?, instructions_search: String?, metadata: top?, model: String?, @@ -185,8 +184,6 @@ module OpenAI attr_accessor created_before: Integer? - attr_accessor has_tool_calls: bool? - attr_accessor instructions_search: String? attr_accessor metadata: top? @@ -206,7 +203,6 @@ module OpenAI def initialize: ( ?created_after: Integer?, ?created_before: Integer?, - ?has_tool_calls: bool?, ?instructions_search: String?, ?metadata: top?, ?model: String?, diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index b14b7bbf..d33730a8 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -130,7 +130,6 @@ module OpenAI type: :responses, created_after: Integer?, created_before: Integer?, - has_tool_calls: bool?, instructions_search: String?, metadata: top?, model: String?, @@ -148,8 +147,6 @@ module OpenAI attr_accessor created_before: Integer? - attr_accessor has_tool_calls: bool? - attr_accessor instructions_search: String? attr_accessor metadata: top? @@ -169,7 +166,6 @@ module OpenAI def initialize: ( ?created_after: Integer?, ?created_before: Integer?, - ?has_tool_calls: bool?, ?instructions_search: String?, ?metadata: top?, ?model: String?, diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 9de26593..767d086d 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -167,7 +167,6 @@ module OpenAI type: :responses, created_after: Integer?, created_before: Integer?, - has_tool_calls: bool?, instructions_search: String?, metadata: top?, model: String?, @@ -185,8 +184,6 @@ module OpenAI attr_accessor created_before: Integer? - attr_accessor has_tool_calls: bool? - attr_accessor instructions_search: String? attr_accessor metadata: top? @@ -206,7 +203,6 @@ module OpenAI def initialize: ( ?created_after: Integer?, ?created_before: Integer?, - ?has_tool_calls: bool?, ?instructions_search: String?, ?metadata: top?, ?model: String?, diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index 8e8c3ae2..9e1b99da 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -167,7 +167,6 @@ module OpenAI type: :responses, created_after: Integer?, created_before: Integer?, - has_tool_calls: bool?, instructions_search: String?, metadata: top?, model: String?, @@ -185,8 +184,6 @@ module OpenAI attr_accessor created_before: Integer? - attr_accessor has_tool_calls: bool? - attr_accessor instructions_search: String? attr_accessor metadata: top? @@ -206,7 +203,6 @@ module OpenAI def initialize: ( ?created_after: Integer?, ?created_before: Integer?, - ?has_tool_calls: bool?, ?instructions_search: String?, ?metadata: top?, ?model: String?, diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index d9dda7ea..f843309b 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -167,7 +167,6 @@ module OpenAI type: :responses, created_after: Integer?, created_before: Integer?, - has_tool_calls: bool?, instructions_search: String?, metadata: top?, model: String?, @@ -185,8 +184,6 @@ module OpenAI attr_accessor created_before: Integer? - attr_accessor has_tool_calls: bool? - attr_accessor instructions_search: String? attr_accessor metadata: top? @@ -206,7 +203,6 @@ module OpenAI def initialize: ( ?created_after: Integer?, ?created_before: Integer?, - ?has_tool_calls: bool?, ?instructions_search: String?, ?metadata: top?, ?model: String?, From f57f446af1f4d443a91bcffcac5440d870acbb38 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 13:13:27 +0000 Subject: [PATCH 195/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 92b3b624..297d33cc 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-262e171d0a8150ea1192474d16ba3afdf9a054b399f1a49a9c9b697a3073c136.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d51538ac955164de98b0c94a0a4718d96623fe39bf31a1d168be06c93c94e645.yml openapi_spec_hash: 33e00a48df8f94c94f46290c489f132b -config_hash: d8d5fda350f6db77c784f35429741a2e +config_hash: c42d37618b8628ce7e1c76437db5dd8f From 96c68025d5f64f3bfd508cf4208a4aecb555703c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 15:31:48 +0000 Subject: [PATCH 196/295] chore: whitespaces --- Steepfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Steepfile b/Steepfile index d7aebca1..528b48c3 100644 --- a/Steepfile +++ b/Steepfile @@ -7,7 +7,7 @@ target(:lib) do signature("sig") - YAML.safe_load_file("./manifest.yaml", symbolize_names: true) => { dependencies: } + YAML.safe_load_file("./manifest.yaml", symbolize_names: true) => {dependencies:} # currently these libraries lack the `*.rbs` annotations required by `steep` stdlibs = dependencies - %w[English etc net/http rbconfig set stringio] From aaaf4af9bbd1b7d710d3e702f224bd6202effcc4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 19:56:57 +0000 Subject: [PATCH 197/295] fix: correctly instantiate sorbet type aliases for enums and unions --- lib/openai/internal/type/array_of.rb | 8 +++ lib/openai/internal/type/base_model.rb | 7 +++ lib/openai/internal/type/boolean.rb | 8 +++ lib/openai/internal/type/enum.rb | 12 +++++ lib/openai/internal/type/file_input.rb | 7 +++ lib/openai/internal/type/hash_of.rb | 8 +++ lib/openai/internal/type/union.rb | 12 +++++ lib/openai/internal/type/unknown.rb | 8 +++ lib/openai/internal/util.rb | 45 +++++++++++++++++ lib/openai/models.rb | 49 ++++++++++++------- lib/openai/models/all_models.rb | 6 --- .../models/audio/speech_create_params.rb | 4 -- .../audio/transcription_create_params.rb | 10 ---- .../audio/transcription_create_response.rb | 4 -- .../audio/transcription_stream_event.rb | 6 --- .../models/audio/translation_create_params.rb | 4 -- .../audio/translation_create_response.rb | 4 -- .../models/beta/assistant_create_params.rb | 13 ----- .../beta/assistant_response_format_option.rb | 11 ----- .../models/beta/assistant_stream_event.rb | 31 ------------ lib/openai/models/beta/assistant_tool.rb | 6 --- .../beta/assistant_tool_choice_option.rb | 9 ---- .../models/beta/message_stream_event.rb | 12 ----- .../models/beta/run_step_stream_event.rb | 14 ------ lib/openai/models/beta/run_stream_event.rb | 17 ------- .../beta/thread_create_and_run_params.rb | 37 -------------- .../models/beta/thread_create_params.rb | 33 ------------- lib/openai/models/beta/threads/annotation.rb | 6 --- .../models/beta/threads/annotation_delta.rb | 9 ---- lib/openai/models/beta/threads/message.rb | 9 ---- .../models/beta/threads/message_content.rb | 11 ----- .../beta/threads/message_content_delta.rb | 11 ----- .../threads/message_content_part_param.rb | 10 ---- .../beta/threads/message_create_params.rb | 24 --------- .../models/beta/threads/run_create_params.rb | 28 ----------- .../runs/code_interpreter_tool_call.rb | 9 ---- .../runs/code_interpreter_tool_call_delta.rb | 9 ---- .../models/beta/threads/runs/run_step.rb | 9 ---- .../beta/threads/runs/run_step_delta.rb | 9 ---- .../models/beta/threads/runs/tool_call.rb | 10 ---- .../beta/threads/runs/tool_call_delta.rb | 10 ---- ...chat_completion_assistant_message_param.rb | 21 -------- .../chat/chat_completion_content_part.rb | 11 ----- ...chat_completion_developer_message_param.rb | 4 -- .../chat/chat_completion_message_param.rb | 13 ----- .../chat_completion_prediction_content.rb | 4 -- .../chat_completion_system_message_param.rb | 4 -- .../chat_completion_tool_choice_option.rb | 9 ---- .../chat_completion_tool_message_param.rb | 4 -- .../chat_completion_user_message_param.rb | 16 ------ .../models/chat/completion_create_params.rb | 27 ---------- lib/openai/models/comparison_filter.rb | 4 -- lib/openai/models/completion_create_params.rb | 8 --- lib/openai/models/compound_filter.rb | 4 -- lib/openai/models/embedding_create_params.rb | 8 --- lib/openai/models/eval_create_params.rb | 41 ---------------- lib/openai/models/eval_create_response.rb | 22 --------- lib/openai/models/eval_list_response.rb | 22 --------- lib/openai/models/eval_retrieve_response.rb | 22 --------- lib/openai/models/eval_update_response.rb | 22 --------- ...create_eval_completions_run_data_source.rb | 38 -------------- .../create_eval_jsonl_run_data_source.rb | 9 ---- .../models/evals/run_cancel_response.rb | 48 ------------------ lib/openai/models/evals/run_create_params.rb | 48 ------------------ .../models/evals/run_create_response.rb | 48 ------------------ lib/openai/models/evals/run_list_response.rb | 48 ------------------ .../models/evals/run_retrieve_response.rb | 48 ------------------ lib/openai/models/file_chunking_strategy.rb | 6 --- .../models/file_chunking_strategy_param.rb | 6 --- .../fine_tuning/alpha/grader_run_params.rb | 16 ------ .../alpha/grader_validate_params.rb | 12 ----- .../alpha/grader_validate_response.rb | 12 ----- .../models/fine_tuning/dpo_hyperparameters.rb | 16 ------ .../models/fine_tuning/fine_tuning_job.rb | 12 ----- .../models/fine_tuning/job_create_params.rb | 12 ----- .../reinforcement_hyperparameters.rb | 24 --------- .../fine_tuning/reinforcement_method.rb | 12 ----- .../fine_tuning/supervised_hyperparameters.rb | 12 ----- .../models/graders/label_model_grader.rb | 10 ---- lib/openai/models/graders/multi_grader.rb | 12 ----- .../models/graders/score_model_grader.rb | 10 ---- .../models/image_create_variation_params.rb | 4 -- lib/openai/models/image_edit_params.rb | 8 --- lib/openai/models/image_generate_params.rb | 4 -- lib/openai/models/moderation_create_params.rb | 14 ------ .../models/moderation_multi_modal_input.rb | 4 -- .../models/responses/easy_input_message.rb | 15 ------ .../models/responses/file_search_tool.rb | 4 -- lib/openai/models/responses/response.rb | 10 ---- .../response_code_interpreter_tool_call.rb | 9 ---- .../responses/response_computer_tool_call.rb | 16 ------ .../models/responses/response_content.rb | 12 ----- .../response_content_part_added_event.rb | 6 --- .../response_content_part_done_event.rb | 6 --- .../responses/response_create_params.rb | 33 ------------- .../response_file_search_tool_call.rb | 4 -- .../responses/response_format_text_config.rb | 10 ---- .../responses/response_input_content.rb | 10 ---- .../models/responses/response_input_item.rb | 18 ------- lib/openai/models/responses/response_item.rb | 15 ------ .../models/responses/response_output_item.rb | 13 ----- .../responses/response_output_message.rb | 6 --- .../models/responses/response_output_text.rb | 10 ---- .../models/responses/response_stream_event.rb | 43 ---------------- .../response_text_annotation_delta_event.rb | 10 ---- lib/openai/models/responses/tool.rb | 11 ----- lib/openai/models/responses_model.rb | 10 ---- .../models/vector_store_search_params.rb | 8 --- .../models/vector_store_search_response.rb | 4 -- .../vector_stores/file_batch_create_params.rb | 4 -- .../vector_stores/file_create_params.rb | 4 -- .../vector_stores/file_update_params.rb | 4 -- .../models/vector_stores/vector_store_file.rb | 4 -- rbi/openai/internal/type/array_of.rbi | 6 +++ rbi/openai/internal/type/base_model.rbi | 5 ++ rbi/openai/internal/type/boolean.rbi | 6 +++ rbi/openai/internal/type/enum.rbi | 5 ++ rbi/openai/internal/type/file_input.rbi | 5 ++ rbi/openai/internal/type/hash_of.rbi | 6 +++ rbi/openai/internal/type/union.rbi | 5 ++ rbi/openai/internal/type/unknown.rbi | 6 +++ rbi/openai/internal/util.rbi | 31 ++++++++++++ sig/openai/internal/type/array_of.rbs | 3 ++ sig/openai/internal/type/base_model.rbs | 2 + sig/openai/internal/type/boolean.rbs | 3 ++ sig/openai/internal/type/enum.rbs | 2 + sig/openai/internal/type/file_input.rbs | 2 + sig/openai/internal/type/hash_of.rbs | 3 ++ sig/openai/internal/type/union.rbs | 2 + sig/openai/internal/type/unknown.rbs | 3 ++ sig/openai/internal/util.rbs | 12 +++++ 131 files changed, 252 insertions(+), 1462 deletions(-) diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 3c043142..80606cca 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -12,6 +12,7 @@ module Type # Array of items of a given type. class ArrayOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport private_class_method :new @@ -110,6 +111,13 @@ def dump(value, state:) end end + # @api private + # + # @return [Object] + def to_sorbet_type + T::Array[OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(item_type)] + end + # @api private # # @return [generic] diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 7fd973f0..2ed6657f 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -304,6 +304,13 @@ def dump(value, state:) acc end + + # @api private + # + # @return [Object] + def to_sorbet_type + self + end end class << self diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index 3e852539..23c4d1f9 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -10,6 +10,7 @@ module Type # Ruby has no Boolean class; this is something for models to refer to. class Boolean extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport private_class_method :new @@ -56,6 +57,13 @@ def coerce(value, state:) # @option state [Boolean] :can_retry # # @return [Boolean, Object] + + # @api private + # + # @return [Object] + def to_sorbet_type + T::Boolean + end end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index c28bf11f..9dd70f63 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -112,6 +112,18 @@ def coerce(value, state:) # # @return [Symbol, Object] + # @api private + # + # @return [Object] + def to_sorbet_type + case values + in [] + T.noreturn + in [value, *_] + T.all(OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(value), self) + end + end + # @api private # # @param depth [Integer] diff --git a/lib/openai/internal/type/file_input.rb b/lib/openai/internal/type/file_input.rb index 3ed13ec6..e1e948f3 100644 --- a/lib/openai/internal/type/file_input.rb +++ b/lib/openai/internal/type/file_input.rb @@ -89,6 +89,13 @@ def dump(value, state:) value end + + # @api private + # + # @return [Object] + def to_sorbet_type + T.any(Pathname, StringIO, IO, String, OpenAI::FilePart) + end end end end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 6e60bc15..9dcf259b 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -12,6 +12,7 @@ module Type # Hash of items of a given type. class HashOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport private_class_method :new @@ -130,6 +131,13 @@ def dump(value, state:) end end + # @api private + # + # @return [Object] + def to_sorbet_type + T::Hash[OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(item_type)] + end + # @api private # # @return [generic] diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 47a040ff..3eed40a8 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -216,6 +216,18 @@ def dump(value, state:) super end + # @api private + # + # @return [Object] + def to_sorbet_type + case (v = variants) + in [] + T.noreturn + else + T.any(*v.map { OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(_1) }) + end + end + # rubocop:enable Style/CaseEquality # rubocop:enable Style/HashEachMethods diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index a629570f..bc8b7a95 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -10,6 +10,7 @@ module Type # When we don't know what to expect for the value. class Unknown extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport # rubocop:disable Lint/UnusedMethodArgument @@ -58,6 +59,13 @@ def coerce(value, state:) # @option state [Boolean] :can_retry # # @return [Object] + + # @api private + # + # @return [Object] + def to_sorbet_type + T.anything + end end # rubocop:enable Lint/UnusedMethodArgument diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index b33fa88f..c84c9e78 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -9,6 +9,23 @@ module Util # @return [Float] def self.monotonic_secs = Process.clock_gettime(Process::CLOCK_MONOTONIC) + # @api private + # + # @param ns [Module, Class] + # + # @return [Enumerable] + def self.walk_namespaces(ns) + ns.constants(false).lazy.flat_map do + case (c = ns.const_get(_1, false)) + in Module | Class + walk_namespaces(c) + else + [] + end + end + .chain([ns]) + end + class << self # @api private # @@ -826,11 +843,39 @@ def const_missing(name) sorbet_runtime_constants.fetch(name).call end + # @api private + # + # @param name [Symbol] + # + # @return [Boolean] + def sorbet_constant_defined?(name) = sorbet_runtime_constants.key?(name) + # @api private # # @param name [Symbol] # @param blk [Proc] def define_sorbet_constant!(name, &blk) = sorbet_runtime_constants.store(name, blk) + + # @api private + # + # @return [Object] + def to_sorbet_type = raise NotImplementedError + + class << self + # @api private + # + # @param type [OpenAI::Internal::Util::SorbetRuntimeSupport, Object] + # + # @return [Object] + def to_sorbet_type(type) + case type + in OpenAI::Internal::Util::SorbetRuntimeSupport + type.to_sorbet_type + else + type + end + end + end end extend OpenAI::Internal::Util::SorbetRuntimeSupport diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 6406a297..887d85e6 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -5,29 +5,40 @@ module OpenAI cls.define_sorbet_constant!(:OrHash) { T.type_alias { T.any(cls, OpenAI::Internal::AnyHash) } } end - [ - *OpenAI::Internal::Type::Enum.included_modules, - *OpenAI::Internal::Type::Union.included_modules - ].each do |cls| - cls.constants.each do |name| - case cls.const_get(name) - in true | false - cls.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T.all(T::Boolean, cls) } } - cls.define_sorbet_constant!(:OrBoolean) { T.type_alias { T::Boolean } } - in Integer - cls.define_sorbet_constant!(:TaggedInteger) { T.type_alias { T.all(Integer, cls) } } - cls.define_sorbet_constant!(:OrInteger) { T.type_alias { Integer } } - in Float - cls.define_sorbet_constant!(:TaggedFloat) { T.type_alias { T.all(Float, cls) } } - cls.define_sorbet_constant!(:OrFloat) { T.type_alias { Float } } - in Symbol - cls.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { T.all(Symbol, cls) } } - cls.define_sorbet_constant!(:OrSymbol) { T.type_alias { T.any(Symbol, String) } } - else + OpenAI::Internal::Util.walk_namespaces(OpenAI::Models).each do |mod| + case mod + in OpenAI::Internal::Type::Enum | OpenAI::Internal::Type::Union + mod.constants.each do |name| + case mod.const_get(name) + in true | false + mod.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T.all(T::Boolean, mod) } } + mod.define_sorbet_constant!(:OrBoolean) { T.type_alias { T::Boolean } } + in Integer + mod.define_sorbet_constant!(:TaggedInteger) { T.type_alias { T.all(Integer, mod) } } + mod.define_sorbet_constant!(:OrInteger) { T.type_alias { Integer } } + in Float + mod.define_sorbet_constant!(:TaggedFloat) { T.type_alias { T.all(Float, mod) } } + mod.define_sorbet_constant!(:OrFloat) { T.type_alias { Float } } + in Symbol + mod.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { T.all(Symbol, mod) } } + mod.define_sorbet_constant!(:OrSymbol) { T.type_alias { T.any(Symbol, String) } } + else + end end + else end end + OpenAI::Internal::Util.walk_namespaces(OpenAI::Models) + .lazy + .grep(OpenAI::Internal::Type::Union) + .each do |mod| + const = :Variants + next if mod.sorbet_constant_defined?(const) + + mod.define_sorbet_constant!(const) { T.type_alias { mod.to_sorbet_type } } + end + AllModels = OpenAI::Models::AllModels Audio = OpenAI::Models::Audio diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index a337cfe0..dd458b9f 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -25,12 +25,6 @@ module ResponsesOnlyModel # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::AllModels::ResponsesOnlyModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(String, OpenAI::ChatModel::TaggedSymbol, OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol) - end - end end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 6a4e469f..9fab3b47 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -81,10 +81,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::Audio::SpeechModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::Audio::SpeechModel::TaggedSymbol) } - end end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 0c99423b..ff021271 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -131,10 +131,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::AudioModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } - end end # Controls how the audio is cut into chunks. When set to `"auto"`, the server @@ -209,12 +205,6 @@ module Type # @!method self.variants # @return [Array(Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig) - end - end end module TimestampGranularity diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 8f0ea45a..ec74e17e 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -20,10 +20,6 @@ module TranscriptionCreateResponse # @!method self.variants # @return [Array(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose) } - end end end end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 93079e55..9d386b9b 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -20,12 +20,6 @@ module TranscriptionStreamEvent # @!method self.variants # @return [Array(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent) - end - end end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index a3594a7c..ed3107e2 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -76,10 +76,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::AudioModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } - end end # The format of the output, in one of these options: `json`, `text`, `srt`, diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index c0fcb1fc..f24d4b2b 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -13,10 +13,6 @@ module TranslationCreateResponse # @!method self.variants # @return [Array(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) } - end end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index c3a32b57..f05d1764 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -162,10 +162,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end end class ToolResources < OpenAI::Internal::Type::BaseModel @@ -366,15 +362,6 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - end - end end end end diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 4e58c10e..411077cb 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -44,17 +44,6 @@ module AssistantResponseFormatOption # @!method self.variants # @return [Array(Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) - end - end end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 6f78210f..74345189 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -730,37 +730,6 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Beta::AssistantStreamEvent::ErrorEvent)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) - end - end end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index c6f7c311..111defb9 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -16,12 +16,6 @@ module AssistantTool # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool) - end - end end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 0561c756..5a87d00a 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -36,15 +36,6 @@ module Auto # @!method self.variants # @return [Array(Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Beta::AssistantToolChoice - ) - end - end end end end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 564f9417..ce394898 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -161,18 +161,6 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, - OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, - OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete - ) - end - end end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 165aeefc..40fef09f 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -213,20 +213,6 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired - ) - end - end end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 8c2d8801..ca41f968 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -292,23 +292,6 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Beta::RunStreamEvent::ThreadRunExpired)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::RunStreamEvent::ThreadRunCreated, - OpenAI::Beta::RunStreamEvent::ThreadRunQueued, - OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, - OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, - OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::RunStreamEvent::ThreadRunFailed, - OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, - OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, - OpenAI::Beta::RunStreamEvent::ThreadRunExpired - ) - end - end end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index f9addd97..5d4533ef 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -214,10 +214,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end end class Thread < OpenAI::Internal::Type::BaseModel @@ -336,21 +332,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -420,15 +401,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - end - end end end end @@ -633,15 +605,6 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - end - end end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index df5b80d4..c4d1c025 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -113,21 +113,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -197,15 +182,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - end - end end end end @@ -404,15 +380,6 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 080262b3..c110cbbd 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -20,12 +20,6 @@ module Annotation # @!method self.variants # @return [Array(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation) - end - end end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 2dbd5956..e5b290d5 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -20,15 +20,6 @@ module AnnotationDelta # @!method self.variants # @return [Array(OpenAI::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Beta::Threads::FilePathDeltaAnnotation)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Beta::Threads::FilePathDeltaAnnotation - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index ad5cd0f2..19af32cc 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -184,15 +184,6 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - end - end end end diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index f40e35dd..295d6858 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -25,17 +25,6 @@ module MessageContent # @!method self.variants # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlock, OpenAI::Beta::Threads::RefusalContentBlock)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlock, - OpenAI::Beta::Threads::RefusalContentBlock - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 80ecb9cb..a6b04dc3 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -25,17 +25,6 @@ module MessageContentDelta # @!method self.variants # @return [Array(OpenAI::Beta::Threads::ImageFileDeltaBlock, OpenAI::Beta::Threads::TextDeltaBlock, OpenAI::Beta::Threads::RefusalDeltaBlock, OpenAI::Beta::Threads::ImageURLDeltaBlock)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Beta::Threads::TextDeltaBlock, - OpenAI::Beta::Threads::RefusalDeltaBlock, - OpenAI::Beta::Threads::ImageURLDeltaBlock - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 65e1a5b3..93fd228a 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -22,16 +22,6 @@ module MessageContentPartParam # @!method self.variants # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlockParam)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 11c3ea83..223feced 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -74,21 +74,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -156,15 +141,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 65cf8129..773bbb7b 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -303,21 +303,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Beta::Threads::MessageContentPartParam }] @@ -387,15 +372,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - end - end end end end @@ -414,10 +390,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end end class TruncationStrategy < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 5d03c473..4c7b9f6a 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -148,15 +148,6 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 6ce54421..98ceaa6b 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -89,15 +89,6 @@ module Output # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 656fc313..2dc26909 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -223,15 +223,6 @@ module StepDetails # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Beta::Threads::Runs::ToolCallsStepDetails - ) - end - end end # The type of run step, which can be either `message_creation` or `tool_calls`. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index a4848a8c..4666af0b 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -33,15 +33,6 @@ module StepDetails # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Beta::Threads::Runs::ToolCallDeltaObject - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 9452c475..4140ec79 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -20,16 +20,6 @@ module ToolCall # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Beta::Threads::Runs::FunctionToolCall)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Beta::Threads::Runs::FunctionToolCall - ) - end - end end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index 10ad55e8..ab51e0a3 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -20,16 +20,6 @@ module ToolCallDelta # @!method self.variants # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Beta::Threads::Runs::FunctionToolCallDelta)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Beta::Threads::Runs::FunctionToolCallDelta - ) - end - end end end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index b49a8303..2f9fbe2b 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -126,32 +126,11 @@ module ArrayOfContentPart # @!method self.variants # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) - end - end end # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - )] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] ArrayOfContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index a79c7dbf..64a02cf0 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -77,17 +77,6 @@ class File < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartImage, OpenAI::Chat::ChatCompletionContentPartInputAudio, OpenAI::Chat::ChatCompletionContentPart::File)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - end - end end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 492403d8..31fafa47 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -54,10 +54,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end - # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index b3ba243b..6710bee4 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -34,19 +34,6 @@ module ChatCompletionMessageParam # @!method self.variants # @return [Array(OpenAI::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Chat::ChatCompletionSystemMessageParam, OpenAI::Chat::ChatCompletionUserMessageParam, OpenAI::Chat::ChatCompletionAssistantMessageParam, OpenAI::Chat::ChatCompletionToolMessageParam, OpenAI::Chat::ChatCompletionFunctionMessageParam)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Chat::ChatCompletionSystemMessageParam, - OpenAI::Chat::ChatCompletionUserMessageParam, - OpenAI::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Chat::ChatCompletionToolMessageParam, - OpenAI::Chat::ChatCompletionFunctionMessageParam - ) - end - end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index 0b0ba1f0..eeccd9df 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -50,10 +50,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end - # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 746905d7..43b7a5c4 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -54,10 +54,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end - # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 506d3899..4b8a6fe3 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -37,15 +37,6 @@ module Auto # @!method self.variants # @return [Array(Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice - ) - end - end end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 6ab285b1..c88bb51c 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -46,10 +46,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end - # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 33a1f532..ffd7b68a 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -51,22 +51,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 69ccac80..3a3a3c23 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -412,10 +412,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end end # @deprecated @@ -458,15 +454,6 @@ module FunctionCallMode # @!method self.variants # @return [Array(Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, - OpenAI::Chat::ChatCompletionFunctionCallOption - ) - end - end end # @deprecated @@ -546,16 +533,6 @@ module ResponseFormat # @!method self.variants # @return [Array(OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONSchema, - OpenAI::ResponseFormatJSONObject - ) - end - end end # Specifies the latency tier to use for processing the request. This parameter is @@ -600,10 +577,6 @@ module Stop # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.nilable(T.any(String, T::Array[String])) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 2847efa6..582e2f5a 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -81,10 +81,6 @@ module Value # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index bc8a3619..27e721da 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -274,10 +274,6 @@ module Prompt # @!method self.variants # @return [Array(String, Array, Array, Array>)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -302,10 +298,6 @@ module Stop # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.nilable(T.any(String, T::Array[String])) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 2791671b..8bfaf4d2 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -38,10 +38,6 @@ module Filter # @!method self.variants # @return [Array(OpenAI::ComparisonFilter, Object)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ComparisonFilter, T.anything) } - end end # Type of operation: `and` or `or`. diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 51f4ffa2..6e76d74e 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -96,10 +96,6 @@ module Input # @!method self.variants # @return [Array(String, Array, Array, Array>)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -125,10 +121,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::EmbeddingModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::EmbeddingModel::TaggedSymbol) } - end end # The format to return the embeddings in. Can be either `float` or diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 395ac73a..5e76ac50 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -159,16 +159,6 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::EvalCreateParams::DataSourceConfig::Logs, - OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions - ) - end - end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -377,16 +367,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -420,15 +400,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem - ) - end - end end end @@ -473,18 +444,6 @@ class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Graders::StringCheckGrader, OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::Graders::StringCheckGrader, - OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, - OpenAI::EvalCreateParams::TestingCriterion::Python, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel - ) - end - end end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 965ad60b..6fa95a6d 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -144,16 +144,6 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end - end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -218,18 +208,6 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end end end end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 88b9d44f..3012d7c1 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -144,16 +144,6 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end - end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -218,18 +208,6 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 74b959f6..6dd4004d 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -144,16 +144,6 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end - end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -218,18 +208,6 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index bf636971..592ed41e 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -144,16 +144,6 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end - end end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -218,18 +208,6 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader # @!method self.variants # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index b91eadce..8ac9aff1 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -192,16 +192,6 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions - ) - end - end end # The type of run data source. Always `completions`. @@ -375,16 +365,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -418,15 +398,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - ) - end - end end end @@ -451,15 +422,6 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ) - end - end end # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#sampling_params diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index abeca553..37c22655 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -95,15 +95,6 @@ class FileID < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, - OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID - ) - end - end end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 892903bb..bbcbb6e7 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -376,16 +376,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses - ) - end - end end # Used when sampling from a model. Dictates the structure of the messages passed @@ -547,16 +537,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -590,15 +570,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) - end - end end end @@ -623,15 +594,6 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference - ) - end - end end # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#sampling_params @@ -673,16 +635,6 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses - ) - end - end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 810e3b1d..d300ba18 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -304,16 +304,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - end end # The type of run data source. Always `responses`. @@ -507,16 +497,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -550,15 +530,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - ) - end - end end end @@ -583,15 +554,6 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - end end # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params @@ -633,16 +595,6 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource - ) - end - end end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index e3953ad0..f3cba684 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -376,16 +376,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses - ) - end - end end # Used when sampling from a model. Dictates the structure of the messages passed @@ -547,16 +537,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -590,15 +570,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) - end - end end end @@ -623,15 +594,6 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference - ) - end - end end # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#sampling_params @@ -673,16 +635,6 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses - ) - end - end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 25b8de4c..bd896908 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -376,16 +376,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses - ) - end - end end # Used when sampling from a model. Dictates the structure of the messages passed @@ -547,16 +537,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -590,15 +570,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) - end - end end end @@ -623,15 +594,6 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference - ) - end - end end # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#sampling_params @@ -673,16 +635,6 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses - ) - end - end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index f3ee8394..cdba5c53 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -376,16 +376,6 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses - ) - end - end end # Used when sampling from a model. Dictates the structure of the messages passed @@ -551,16 +541,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -594,15 +574,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) - end - end end end @@ -627,15 +598,6 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference - ) - end - end end # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#sampling_params @@ -677,16 +639,6 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses - ) - end - end end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 117b02fa..5ee317be 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -15,12 +15,6 @@ module FileChunkingStrategy # @!method self.variants # @return [Array(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject) - end - end end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index c70c3336..6a46bdfc 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -17,12 +17,6 @@ module FileChunkingStrategyParam # @!method self.variants # @return [Array(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam) - end - end end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb index e425c638..921ece7c 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb @@ -62,18 +62,6 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end end # The reference answer for the evaluation. @@ -91,10 +79,6 @@ module ReferenceAnswer # @!method self.variants # @return [Array(String, Object, Array, Float)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T.anything, T::Array[T.anything], Float) } - end - # @type [OpenAI::Internal::Type::Converter] UnionMember2Array = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb index 6ec580aa..a252fb6a 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb @@ -41,18 +41,6 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb index f0c28dcc..a89553ee 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb @@ -38,18 +38,6 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end end end end diff --git a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb index 54b87256..b57d2245 100644 --- a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb @@ -60,10 +60,6 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # The beta value for the DPO method. A higher beta value will increase the weight @@ -79,10 +75,6 @@ module Beta # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -98,10 +90,6 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -117,10 +105,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 189aabae..b6041041 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -274,10 +274,6 @@ module BatchSize # @!method self.variants # @return [Array(Object, Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.nilable(T.any(T.anything, Symbol, Integer)) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -293,10 +289,6 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -312,10 +304,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 9b1d6016..f735d85b 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -212,10 +212,6 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -231,10 +227,6 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -250,10 +242,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end diff --git a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb index ee22496e..7df826e1 100644 --- a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb @@ -87,10 +87,6 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Multiplier on amount of compute used for exploring search space during training. @@ -105,10 +101,6 @@ module ComputeMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of training steps between evaluation runs. @@ -123,10 +115,6 @@ module EvalInterval # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Number of evaluation samples to generate per training step. @@ -141,10 +129,6 @@ module EvalSamples # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -160,10 +144,6 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -179,10 +159,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Level of reasoning effort. diff --git a/lib/openai/models/fine_tuning/reinforcement_method.rb b/lib/openai/models/fine_tuning/reinforcement_method.rb index cfe966c9..d2955f1e 100644 --- a/lib/openai/models/fine_tuning/reinforcement_method.rb +++ b/lib/openai/models/fine_tuning/reinforcement_method.rb @@ -46,18 +46,6 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end end end end diff --git a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb index 2a907416..612870f0 100644 --- a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb @@ -51,10 +51,6 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -70,10 +66,6 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -89,10 +81,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end end diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index 801b3432..99e0e087 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -133,16 +133,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb index 90a78fdc..7b93a32c 100644 --- a/lib/openai/models/graders/multi_grader.rb +++ b/lib/openai/models/graders/multi_grader.rb @@ -62,18 +62,6 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::LabelModelGrader - ) - end - end end end end diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index 7742ec75..62cf1a6c 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -133,16 +133,6 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index f83ea80a..66ad7ea0 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -80,10 +80,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ImageModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } - end end # The format in which the generated images are returned. Must be one of `url` or diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index aec90bd0..c38c7821 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -137,10 +137,6 @@ module Image # @!method self.variants # @return [Array(StringIO, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(StringIO, T::Array[StringIO]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput] end @@ -176,10 +172,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ImageModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } - end end # The quality of the image that will be generated. `high`, `medium` and `low` are diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index a3aca3cb..c46f1558 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -172,10 +172,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ImageModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } - end end # Control the content-moderation level for images generated by `gpt-image-1`. Must diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 72f3a355..59e0cdd9 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -50,16 +50,6 @@ module Input # @!method self.variants # @return [Array(String, Array, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[String], - T::Array[T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] - ) - end - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -84,10 +74,6 @@ module Model # @!method self.variants # @return [Array(String, Symbol, OpenAI::ModerationModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ModerationModel::TaggedSymbol) } - end end end end diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index ef09466b..5142ae94 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -16,10 +16,6 @@ module ModerationMultiModalInput # @!method self.variants # @return [Array(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput) } - end end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index ecf0c374..3cdb0887 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -56,21 +56,6 @@ module Content # @!method self.variants # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ) - end - end end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 938f84c6..515ef7a2 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -67,10 +67,6 @@ module Filters # @!method self.variants # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) } - end end # @see OpenAI::Responses::FileSearchTool#ranking_options diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index d8358f27..4813a1af 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -328,16 +328,6 @@ module ToolChoice # @!method self.variants # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ) - end - end end # Specifies the latency tier to use for processing the request. This parameter is diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 81114445..16bac78f 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -142,15 +142,6 @@ class File < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - end - end end # The status of the code interpreter tool call. diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 9a70b230..b18746b0 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -424,22 +424,6 @@ class Wait < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Responses::ResponseComputerToolCall::Action::Wait - ) - end - end end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 7b4e0c77..95e1afdd 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -24,18 +24,6 @@ module ResponseContent # @!method self.variants # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile, OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile, - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ) - end - end end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 6b16ea84..1bb0cddf 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -66,12 +66,6 @@ module Part # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) - end - end end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index e2af41dd..56b1e274 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -66,12 +66,6 @@ module Part # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) - end - end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index affb7fa5..4a38c09e 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -275,29 +275,6 @@ module Input # @!method self.variants # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] - ) - end - end end # Specifies the latency tier to use for processing the request. This parameter is @@ -353,16 +330,6 @@ module ToolChoice # @!method self.variants # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ) - end - end end # The truncation strategy to use for the model response. diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index f8f7fc1b..cc1b88ec 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -138,10 +138,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index d81ab862..81d0ad54 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -36,16 +36,6 @@ module ResponseFormatTextConfig # @!method self.variants # @return [Array(OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::ResponseFormatText, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::ResponseFormatJSONObject - ) - end - end end end end diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index d518271c..7e349985 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -20,16 +20,6 @@ module ResponseInputContent # @!method self.variants # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - end - end end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 62cf20ee..8ce7e375 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -357,24 +357,6 @@ module Type # @!method self.variants # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - end - end end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index fe6001ab..e0bd4301 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -36,21 +36,6 @@ module ResponseItem # @!method self.variants # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) - end - end end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 502acd49..8e653d3a 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -36,19 +36,6 @@ module ResponseOutputItem # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - end - end end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index d0d7f291..2e7eee71 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -68,12 +68,6 @@ module Content # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) - end - end end # The status of the message input. One of `in_progress`, `completed`, or diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index fc08cb56..6234918b 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -157,16 +157,6 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Responses::ResponseOutputText::Annotation::FilePath - ) - end - end end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 4f6c6c4a..48456245 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -140,49 +140,6 @@ module ResponseStreamEvent # @!method self.variants # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseAudioDeltaEvent, - OpenAI::Responses::ResponseAudioDoneEvent, - OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Responses::ResponseCompletedEvent, - OpenAI::Responses::ResponseContentPartAddedEvent, - OpenAI::Responses::ResponseContentPartDoneEvent, - OpenAI::Responses::ResponseCreatedEvent, - OpenAI::Responses::ResponseErrorEvent, - OpenAI::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Responses::ResponseInProgressEvent, - OpenAI::Responses::ResponseFailedEvent, - OpenAI::Responses::ResponseIncompleteEvent, - OpenAI::Responses::ResponseOutputItemAddedEvent, - OpenAI::Responses::ResponseOutputItemDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, - OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, - OpenAI::Responses::ResponseRefusalDeltaEvent, - OpenAI::Responses::ResponseRefusalDoneEvent, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Responses::ResponseTextDeltaEvent, - OpenAI::Responses::ResponseTextDoneEvent, - OpenAI::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Responses::ResponseWebSearchCallSearchingEvent - ) - end - end end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index f8cc77c2..8ffc14a6 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -188,16 +188,6 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - end - end end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 2d2c6702..97939459 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -23,17 +23,6 @@ module Tool # @!method self.variants # @return [Array(OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::WebSearchTool)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::FileSearchTool, - OpenAI::Responses::FunctionTool, - OpenAI::Responses::ComputerTool, - OpenAI::Responses::WebSearchTool - ) - end - end end end end diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index a35d5278..5634269f 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -25,16 +25,6 @@ module ResponsesOnlyModel # @!method self.variants # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::ResponsesModel::ResponsesOnlyModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::ChatModel::TaggedSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol - ) - end - end end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 2009716d..32bf6133 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -65,10 +65,6 @@ module Query # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[String]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -85,10 +81,6 @@ module Filters # @!method self.variants # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) } - end end class RankingOptions < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 31f23ac7..5b623829 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -66,10 +66,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end class Content < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index f590ab7f..5fd03105 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -60,10 +60,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 0d1bc6ed..8cc4cee4 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -60,10 +60,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 30c15708..be3d5d7b 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -48,10 +48,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 594fd1bc..eab6df4b 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -163,10 +163,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/rbi/openai/internal/type/array_of.rbi b/rbi/openai/internal/type/array_of.rbi index 9cc138b7..28eaab6f 100644 --- a/rbi/openai/internal/type/array_of.rbi +++ b/rbi/openai/internal/type/array_of.rbi @@ -8,6 +8,7 @@ module OpenAI # Array of items of a given type. class ArrayOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -63,6 +64,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { returns(Elem) } protected def item_type diff --git a/rbi/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi index 15fabf91..df16ad79 100644 --- a/rbi/openai/internal/type/base_model.rbi +++ b/rbi/openai/internal/type/base_model.rbi @@ -190,6 +190,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end class << self diff --git a/rbi/openai/internal/type/boolean.rbi b/rbi/openai/internal/type/boolean.rbi index b7cc1e3d..73bf95cb 100644 --- a/rbi/openai/internal/type/boolean.rbi +++ b/rbi/openai/internal/type/boolean.rbi @@ -8,6 +8,7 @@ module OpenAI # Ruby has no Boolean class; this is something for models to refer to. class Boolean extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -43,6 +44,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end end end diff --git a/rbi/openai/internal/type/enum.rbi b/rbi/openai/internal/type/enum.rbi index 1a1c4c47..fa242fb4 100644 --- a/rbi/openai/internal/type/enum.rbi +++ b/rbi/openai/internal/type/enum.rbi @@ -67,6 +67,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { params(depth: Integer).returns(String) } def inspect(depth: 0) diff --git a/rbi/openai/internal/type/file_input.rbi b/rbi/openai/internal/type/file_input.rbi index 19e6c2e1..09dde1b1 100644 --- a/rbi/openai/internal/type/file_input.rbi +++ b/rbi/openai/internal/type/file_input.rbi @@ -47,6 +47,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end end end diff --git a/rbi/openai/internal/type/hash_of.rbi b/rbi/openai/internal/type/hash_of.rbi index 25123f07..d9f9f9ec 100644 --- a/rbi/openai/internal/type/hash_of.rbi +++ b/rbi/openai/internal/type/hash_of.rbi @@ -8,6 +8,7 @@ module OpenAI # Hash of items of a given type. class HashOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -63,6 +64,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { returns(Elem) } protected def item_type diff --git a/rbi/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi index f8598117..7c6958c1 100644 --- a/rbi/openai/internal/type/union.rbi +++ b/rbi/openai/internal/type/union.rbi @@ -101,6 +101,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { params(depth: Integer).returns(String) } def inspect(depth: 0) diff --git a/rbi/openai/internal/type/unknown.rbi b/rbi/openai/internal/type/unknown.rbi index 48a18c80..3b5d1139 100644 --- a/rbi/openai/internal/type/unknown.rbi +++ b/rbi/openai/internal/type/unknown.rbi @@ -8,6 +8,7 @@ module OpenAI # When we don't know what to expect for the value. class Unknown extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -43,6 +44,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end end end diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index fe1e8cac..ddce5834 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -11,6 +11,15 @@ module OpenAI def self.monotonic_secs end + # @api private + sig do + params(ns: T.any(Module, T::Class[T.anything])).returns( + T::Enumerable[T.any(Module, T::Class[T.anything])] + ) + end + def self.walk_namespaces(ns) + end + class << self # @api private sig { returns(String) } @@ -441,10 +450,32 @@ module OpenAI def const_missing(name) end + # @api private + sig { params(name: Symbol).returns(T::Boolean) } + def sorbet_constant_defined?(name) + end + # @api private sig { params(name: Symbol, blk: T.proc.returns(T.anything)).void } def define_sorbet_constant!(name, &blk) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + + class << self + # @api private + sig do + params( + type: + T.any(OpenAI::Internal::Util::SorbetRuntimeSupport, T.anything) + ).returns(T.anything) + end + def to_sorbet_type(type) + end + end end end end diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 80fcc2a2..000ed3f5 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class ArrayOf[Elem] include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport def self.[]: ( ::Hash[Symbol, top] @@ -27,6 +28,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Array[top] | top) + def to_sorbet_type: -> top + def item_type: -> Elem def nilable?: -> bool diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 67646785..f9e57a2e 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -68,6 +68,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[top, top] | top) + def self.to_sorbet_type: -> top + def self.recursively_to_h: ( OpenAI::Internal::Type::BaseModel model, convert: bool diff --git a/sig/openai/internal/type/boolean.rbs b/sig/openai/internal/type/boolean.rbs index 0a654ab4..04edea4b 100644 --- a/sig/openai/internal/type/boolean.rbs +++ b/sig/openai/internal/type/boolean.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class Boolean extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport def self.===: (top other) -> bool @@ -17,6 +18,8 @@ module OpenAI bool | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (bool | top) + + def self.to_sorbet_type: -> top end end end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 69f1c6bd..2b6f3fb2 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -23,6 +23,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (Symbol | top) + def to_sorbet_type: -> top + def inspect: (?depth: Integer) -> String end end diff --git a/sig/openai/internal/type/file_input.rbs b/sig/openai/internal/type/file_input.rbs index 862c2111..db81644c 100644 --- a/sig/openai/internal/type/file_input.rbs +++ b/sig/openai/internal/type/file_input.rbs @@ -17,6 +17,8 @@ module OpenAI Pathname | StringIO | IO | String | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (Pathname | StringIO | IO | String | top) + + def self.to_sorbet_type: -> top end end end diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index 26f65397..1c9d1d58 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class HashOf[Elem] include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport def self.[]: ( ::Hash[Symbol, top] @@ -27,6 +28,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[Symbol, top] | top) + def to_sorbet_type: -> top + def item_type: -> Elem def nilable?: -> bool diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 57d122c3..7a01ff6e 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -43,6 +43,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> top + def to_sorbet_type: -> top + def inspect: (?depth: Integer) -> String end end diff --git a/sig/openai/internal/type/unknown.rbs b/sig/openai/internal/type/unknown.rbs index 0f9142d2..249f91dc 100644 --- a/sig/openai/internal/type/unknown.rbs +++ b/sig/openai/internal/type/unknown.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class Unknown extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport def self.===: (top other) -> bool @@ -17,6 +18,8 @@ module OpenAI top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> top + + def self.to_sorbet_type: -> top end end end diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index c8416c7c..ec425e9f 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -5,6 +5,10 @@ module OpenAI def self?.monotonic_secs: -> Float + def self?.walk_namespaces: ( + Module | Class ns + ) -> Enumerable[(Module | Class)] + def self?.arch: -> String def self?.os: -> String @@ -166,7 +170,15 @@ module OpenAI def const_missing: (Symbol name) -> void + def sorbet_constant_defined?: (Symbol name) -> bool + def define_sorbet_constant!: (Symbol name) { -> top } -> void + + def to_sorbet_type: -> top + + def self.to_sorbet_type: ( + OpenAI::Internal::Util::SorbetRuntimeSupport | top `type` + ) -> top end end end From 1cd87ce1e8eeb4de437fa0c41bc6585f9a259314 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 20:12:00 +0000 Subject: [PATCH 198/295] chore: use sorbet union aliases where available --- .../models/audio/speech_create_params.rbi | 21 +-- .../audio/transcription_create_params.rbi | 8 +- .../audio/translation_create_params.rbi | 6 +- rbi/openai/models/beta/assistant.rbi | 37 +---- .../models/beta/assistant_create_params.rbi | 6 +- .../models/beta/assistant_update_params.rbi | 25 +--- .../beta/thread_create_and_run_params.rbi | 55 +++----- .../models/beta/thread_create_params.rbi | 39 +----- rbi/openai/models/beta/threads/message.rbi | 30 +---- .../beta/threads/message_create_params.rbi | 39 +----- .../models/beta/threads/message_delta.rbi | 16 +-- rbi/openai/models/beta/threads/run.rbi | 51 +------ .../models/beta/threads/run_create_params.rbi | 55 +++----- .../runs/code_interpreter_tool_call.rbi | 10 +- .../runs/code_interpreter_tool_call_delta.rbi | 10 +- .../models/beta/threads/runs/run_step.rbi | 10 +- .../beta/threads/runs/run_step_delta.rbi | 10 +- .../threads/runs/tool_call_delta_object.rbi | 14 +- .../threads/runs/tool_calls_step_details.rbi | 18 +-- rbi/openai/models/beta/threads/text.rbi | 18 +-- rbi/openai/models/beta/threads/text_delta.rbi | 14 +- ...hat_completion_assistant_message_param.rbi | 35 +---- .../chat/chat_completion_audio_param.rbi | 21 +-- ...hat_completion_developer_message_param.rbi | 12 +- .../chat_completion_prediction_content.rbi | 12 +- .../chat_completion_system_message_param.rbi | 12 +- .../chat_completion_tool_message_param.rbi | 12 +- .../chat_completion_user_message_param.rbi | 45 +------ .../models/chat/completion_create_params.rbi | 18 ++- rbi/openai/models/comparison_filter.rbi | 6 +- .../models/completion_create_params.rbi | 46 ++----- rbi/openai/models/embedding_create_params.rbi | 33 +---- rbi/openai/models/eval_create_response.rbi | 30 +---- rbi/openai/models/eval_list_response.rbi | 32 +---- rbi/openai/models/eval_retrieve_response.rbi | 28 +--- rbi/openai/models/eval_update_response.rbi | 30 +---- .../models/evals/run_cancel_response.rbi | 56 ++------ .../models/evals/run_create_response.rbi | 56 ++------ rbi/openai/models/evals/run_list_response.rbi | 58 ++------ .../models/evals/run_retrieve_response.rbi | 56 ++------ .../fine_tuning/alpha/grader_run_params.rbi | 8 +- .../alpha/grader_validate_response.rbi | 16 +-- .../fine_tuning/dpo_hyperparameters.rbi | 75 ++++++++--- .../models/fine_tuning/fine_tuning_job.rbi | 60 +++++++-- .../models/fine_tuning/job_create_params.rbi | 81 +++++++---- .../reinforcement_hyperparameters.rbi | 126 ++++++++++++++---- .../supervised_hyperparameters.rbi | 63 +++++++-- .../models/image_create_variation_params.rbi | 9 +- rbi/openai/models/image_edit_params.rbi | 36 +---- rbi/openai/models/image_generate_params.rbi | 6 +- .../models/moderation_create_params.rbi | 54 ++------ .../models/responses/easy_input_message.rbi | 47 +------ rbi/openai/models/responses/response.rbi | 89 ++----------- .../response_code_interpreter_tool_call.rbi | 10 +- .../response_content_part_added_event.rbi | 10 +- .../response_content_part_done_event.rbi | 10 +- .../responses/response_create_params.rbi | 101 +------------- .../response_file_search_tool_call.rbi | 21 ++- .../responses/response_input_message_item.rbi | 18 +-- .../models/responses/response_item_list.rbi | 31 +---- .../response_output_item_added_event.rbi | 23 +--- .../response_output_item_done_event.rbi | 23 +--- .../response_text_annotation_delta_event.rbi | 12 +- .../models/vector_store_search_params.rbi | 6 +- .../models/vector_store_search_response.rbi | 23 +++- .../file_batch_create_params.rbi | 23 +++- .../vector_stores/file_create_params.rbi | 23 +++- .../vector_stores/file_update_params.rbi | 23 +++- .../vector_stores/vector_store_file.rbi | 38 +++--- rbi/openai/resources/audio/speech.rbi | 5 +- rbi/openai/resources/audio/transcriptions.rbi | 14 +- rbi/openai/resources/audio/translations.rbi | 6 +- rbi/openai/resources/beta/assistants.rbi | 8 +- rbi/openai/resources/beta/threads.rbi | 37 ++--- .../resources/beta/threads/messages.rbi | 11 +- rbi/openai/resources/beta/threads/runs.rbi | 64 ++------- rbi/openai/resources/chat/completions.rbi | 10 +- rbi/openai/resources/completions.rbi | 28 +--- rbi/openai/resources/embeddings.rbi | 10 +- .../resources/fine_tuning/alpha/graders.rbi | 2 +- rbi/openai/resources/fine_tuning/jobs.rbi | 6 +- rbi/openai/resources/images.rbi | 15 +-- rbi/openai/resources/moderations.rbi | 14 +- rbi/openai/resources/responses.rbi | 93 +------------ .../resources/responses/input_items.rbi | 11 +- rbi/openai/resources/vector_stores.rbi | 2 +- .../resources/vector_stores/file_batches.rbi | 7 +- rbi/openai/resources/vector_stores/files.rbi | 14 +- 88 files changed, 784 insertions(+), 1728 deletions(-) diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index 87b9e0b9..087fb21d 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -18,18 +18,14 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - sig { returns(T.any(String, OpenAI::Audio::SpeechModel::OrSymbol)) } + sig { returns(OpenAI::Audio::SpeechCreateParams::Model::Variants) } attr_accessor :model # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig do - returns( - T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol) - ) - end + sig { returns(OpenAI::Audio::SpeechCreateParams::Voice::Variants) } attr_accessor :voice # Control the voice of your generated audio with additional instructions. Does not @@ -70,9 +66,8 @@ module OpenAI sig do params( input: String, - model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), - voice: - T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol), + model: OpenAI::Audio::SpeechCreateParams::Model::Variants, + voice: OpenAI::Audio::SpeechCreateParams::Voice::Variants, instructions: String, response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, @@ -108,12 +103,8 @@ module OpenAI override.returns( { input: String, - model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), - voice: - T.any( - String, - OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol - ), + model: OpenAI::Audio::SpeechCreateParams::Model::Variants, + voice: OpenAI::Audio::SpeechCreateParams::Voice::Variants, instructions: String, response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi index e35cfeea..26cf8eff 100644 --- a/rbi/openai/models/audio/transcription_create_params.rbi +++ b/rbi/openai/models/audio/transcription_create_params.rbi @@ -23,7 +23,9 @@ module OpenAI # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } + sig do + returns(OpenAI::Audio::TranscriptionCreateParams::Model::Variants) + end attr_accessor :model # Controls how the audio is cut into chunks. When set to `"auto"`, the server @@ -131,7 +133,7 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, chunking_strategy: T.nilable( T.any( @@ -203,7 +205,7 @@ module OpenAI override.returns( { file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, chunking_strategy: T.nilable( T.any( diff --git a/rbi/openai/models/audio/translation_create_params.rbi b/rbi/openai/models/audio/translation_create_params.rbi index bcfb2484..7714a71b 100644 --- a/rbi/openai/models/audio/translation_create_params.rbi +++ b/rbi/openai/models/audio/translation_create_params.rbi @@ -22,7 +22,7 @@ module OpenAI # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } + sig { returns(OpenAI::Audio::TranslationCreateParams::Model::Variants) } attr_accessor :model # An optional text to guide the model's style or continue a previous audio @@ -68,7 +68,7 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranslationCreateParams::Model::Variants, prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, @@ -105,7 +105,7 @@ module OpenAI override.returns( { file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranslationCreateParams::Model::Variants, prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/models/beta/assistant.rbi b/rbi/openai/models/beta/assistant.rbi index 4b635b82..7842b859 100644 --- a/rbi/openai/models/beta/assistant.rbi +++ b/rbi/openai/models/beta/assistant.rbi @@ -54,17 +54,7 @@ module OpenAI # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. - sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Beta::AssistantTool::Variants]) } attr_accessor :tools # Specifies the format that the model must output. Compatible with @@ -89,14 +79,7 @@ module OpenAI # max context length. sig do returns( - T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) - ) + T.nilable(OpenAI::Beta::AssistantResponseFormatOption::Variants) ) end attr_accessor :response_format @@ -245,22 +228,10 @@ module OpenAI model: String, name: T.nilable(String), object: Symbol, - tools: - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ], + tools: T::Array[OpenAI::Beta::AssistantTool::Variants], response_format: T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) + OpenAI::Beta::AssistantResponseFormatOption::Variants ), temperature: T.nilable(Float), tool_resources: T.nilable(OpenAI::Beta::Assistant::ToolResources), diff --git a/rbi/openai/models/beta/assistant_create_params.rbi b/rbi/openai/models/beta/assistant_create_params.rbi index c04793b1..87b41eb7 100644 --- a/rbi/openai/models/beta/assistant_create_params.rbi +++ b/rbi/openai/models/beta/assistant_create_params.rbi @@ -20,7 +20,7 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::ChatModel::OrSymbol)) } + sig { returns(OpenAI::Beta::AssistantCreateParams::Model::Variants) } attr_accessor :model # The description of the assistant. The maximum length is 512 characters. @@ -155,7 +155,7 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Beta::AssistantCreateParams::Model::Variants, description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), @@ -262,7 +262,7 @@ module OpenAI sig do override.returns( { - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Beta::AssistantCreateParams::Model::Variants, description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), diff --git a/rbi/openai/models/beta/assistant_update_params.rbi b/rbi/openai/models/beta/assistant_update_params.rbi index b044c34b..e9a3040d 100644 --- a/rbi/openai/models/beta/assistant_update_params.rbi +++ b/rbi/openai/models/beta/assistant_update_params.rbi @@ -40,23 +40,14 @@ module OpenAI # them. sig do returns( - T.nilable( - T.any( - String, - OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol - ) - ) + T.nilable(OpenAI::Beta::AssistantUpdateParams::Model::Variants) ) end attr_reader :model sig do params( - model: - T.any( - String, - OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol - ) + model: OpenAI::Beta::AssistantUpdateParams::Model::Variants ).void end attr_writer :model @@ -178,11 +169,7 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.any( - String, - OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol - ), + model: OpenAI::Beta::AssistantUpdateParams::Model::Variants, name: T.nilable(String), reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -289,11 +276,7 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.any( - String, - OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol - ), + model: OpenAI::Beta::AssistantUpdateParams::Model::Variants, name: T.nilable(String), reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index 7e2348cc..16fb54ce 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -55,7 +55,11 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol))) } + sig do + returns( + T.nilable(OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants) + ) + end attr_accessor :model # Whether to enable @@ -214,7 +218,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants + ), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -357,7 +364,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants + ), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -550,16 +560,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants ) end attr_accessor :content @@ -601,16 +602,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: @@ -648,16 +640,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: @@ -682,11 +665,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end diff --git a/rbi/openai/models/beta/thread_create_params.rbi b/rbi/openai/models/beta/thread_create_params.rbi index aa3692ba..da3aedab 100644 --- a/rbi/openai/models/beta/thread_create_params.rbi +++ b/rbi/openai/models/beta/thread_create_params.rbi @@ -113,16 +113,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants ) end attr_accessor :content @@ -160,16 +151,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable( @@ -206,16 +188,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable( @@ -239,11 +212,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end diff --git a/rbi/openai/models/beta/threads/message.rbi b/rbi/openai/models/beta/threads/message.rbi index 82120763..c9a985aa 100644 --- a/rbi/openai/models/beta/threads/message.rbi +++ b/rbi/openai/models/beta/threads/message.rbi @@ -34,16 +34,7 @@ module OpenAI # The content of the message in array of text and/or images. sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlock, - OpenAI::Beta::Threads::RefusalContentBlock - ) - ] - ) + returns(T::Array[OpenAI::Beta::Threads::MessageContent::Variants]) end attr_accessor :content @@ -194,14 +185,7 @@ module OpenAI ), completed_at: T.nilable(Integer), content: - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlock, - OpenAI::Beta::Threads::RefusalContentBlock - ) - ], + T::Array[OpenAI::Beta::Threads::MessageContent::Variants], created_at: Integer, incomplete_at: T.nilable(Integer), incomplete_details: @@ -239,10 +223,7 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + OpenAI::Beta::Threads::Message::Attachment::Tool::Variants ] ) ) @@ -288,10 +269,7 @@ module OpenAI file_id: String, tools: T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + OpenAI::Beta::Threads::Message::Attachment::Tool::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/message_create_params.rbi b/rbi/openai/models/beta/threads/message_create_params.rbi index 4e6416b0..0fca1da4 100644 --- a/rbi/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/openai/models/beta/threads/message_create_params.rbi @@ -19,16 +19,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants ) end attr_accessor :content @@ -66,16 +57,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants, role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( @@ -114,16 +96,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants, role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: @@ -149,11 +122,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end diff --git a/rbi/openai/models/beta/threads/message_delta.rbi b/rbi/openai/models/beta/threads/message_delta.rbi index f234666f..7348c453 100644 --- a/rbi/openai/models/beta/threads/message_delta.rbi +++ b/rbi/openai/models/beta/threads/message_delta.rbi @@ -17,14 +17,7 @@ module OpenAI sig do returns( T.nilable( - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Beta::Threads::TextDeltaBlock, - OpenAI::Beta::Threads::RefusalDeltaBlock, - OpenAI::Beta::Threads::ImageURLDeltaBlock - ) - ] + T::Array[OpenAI::Beta::Threads::MessageContentDelta::Variants] ) ) end @@ -88,12 +81,7 @@ module OpenAI { content: T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Beta::Threads::TextDeltaBlock, - OpenAI::Beta::Threads::RefusalDeltaBlock, - OpenAI::Beta::Threads::ImageURLDeltaBlock - ) + OpenAI::Beta::Threads::MessageContentDelta::Variants ], role: OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol } diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index aba1382a..7940c801 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -143,14 +143,7 @@ module OpenAI # max context length. sig do returns( - T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) - ) + T.nilable(OpenAI::Beta::AssistantResponseFormatOption::Variants) ) end attr_accessor :response_format @@ -179,12 +172,7 @@ module OpenAI # call that tool. sig do returns( - T.nilable( - T.any( - OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Beta::AssistantToolChoice - ) - ) + T.nilable(OpenAI::Beta::AssistantToolChoiceOption::Variants) ) end attr_accessor :tool_choice @@ -192,17 +180,7 @@ module OpenAI # The list of tools that the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. - sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Beta::AssistantTool::Variants]) } attr_accessor :tools # Controls for how a thread will be truncated prior to the run. Use this to @@ -435,31 +413,14 @@ module OpenAI T.nilable(OpenAI::Beta::Threads::Run::RequiredAction), response_format: T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) + OpenAI::Beta::AssistantResponseFormatOption::Variants ), started_at: T.nilable(Integer), status: OpenAI::Beta::Threads::RunStatus::TaggedSymbol, thread_id: String, tool_choice: - T.nilable( - T.any( - OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Beta::AssistantToolChoice - ) - ), - tools: - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ], + T.nilable(OpenAI::Beta::AssistantToolChoiceOption::Variants), + tools: T::Array[OpenAI::Beta::AssistantTool::Variants], truncation_strategy: T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage), diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index 7e4e98e4..032415a6 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -99,7 +99,11 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig { returns(T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol))) } + sig do + returns( + T.nilable(OpenAI::Beta::Threads::RunCreateParams::Model::Variants) + ) + end attr_accessor :model # Whether to enable @@ -241,7 +245,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::Model::Variants + ), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -405,7 +412,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::Model::Variants + ), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -459,16 +469,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants ) end attr_accessor :content @@ -510,16 +511,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants, role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: @@ -557,16 +549,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants, role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: @@ -591,11 +574,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 2989b129..1b4be0d3 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -91,10 +91,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Variants ] ) end @@ -129,10 +126,7 @@ module OpenAI input: String, outputs: T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 028059df..03693d1b 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -108,10 +108,7 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output::Variants ] ) ) @@ -160,10 +157,7 @@ module OpenAI input: String, outputs: T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/runs/run_step.rbi b/rbi/openai/models/beta/threads/runs/run_step.rbi index 1204d5fe..1a87ede6 100644 --- a/rbi/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step.rbi @@ -96,10 +96,7 @@ module OpenAI # The details of the run step. sig do returns( - T.any( - OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Beta::Threads::Runs::ToolCallsStepDetails - ) + OpenAI::Beta::Threads::Runs::RunStep::StepDetails::Variants ) end attr_accessor :step_details @@ -228,10 +225,7 @@ module OpenAI status: OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, step_details: - T.any( - OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Beta::Threads::Runs::ToolCallsStepDetails - ), + OpenAI::Beta::Threads::Runs::RunStep::StepDetails::Variants, thread_id: String, type: OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi index cbe1297d..ba659f76 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi @@ -20,10 +20,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Beta::Threads::Runs::ToolCallDeltaObject - ) + OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails::Variants ) ) end @@ -60,10 +57,7 @@ module OpenAI override.returns( { step_details: - T.any( - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Beta::Threads::Runs::ToolCallDeltaObject - ) + OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails::Variants } ) end diff --git a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi index e034b7e3..0dcb1dac 100644 --- a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -24,13 +24,7 @@ module OpenAI sig do returns( T.nilable( - T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Beta::Threads::Runs::FunctionToolCallDelta - ) - ] + T::Array[OpenAI::Beta::Threads::Runs::ToolCallDelta::Variants] ) ) end @@ -80,11 +74,7 @@ module OpenAI type: Symbol, tool_calls: T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Beta::Threads::Runs::FunctionToolCallDelta - ) + OpenAI::Beta::Threads::Runs::ToolCallDelta::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 61ef18e1..cda8ec9b 100644 --- a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -18,15 +18,7 @@ module OpenAI # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Beta::Threads::Runs::FunctionToolCall - ) - ] - ) + returns(T::Array[OpenAI::Beta::Threads::Runs::ToolCall::Variants]) end attr_accessor :tool_calls @@ -62,13 +54,7 @@ module OpenAI override.returns( { tool_calls: - T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Beta::Threads::Runs::FunctionToolCall - ) - ], + T::Array[OpenAI::Beta::Threads::Runs::ToolCall::Variants], type: Symbol } ) diff --git a/rbi/openai/models/beta/threads/text.rbi b/rbi/openai/models/beta/threads/text.rbi index 25110168..e36d074a 100644 --- a/rbi/openai/models/beta/threads/text.rbi +++ b/rbi/openai/models/beta/threads/text.rbi @@ -10,16 +10,7 @@ module OpenAI T.any(OpenAI::Beta::Threads::Text, OpenAI::Internal::AnyHash) end - sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationAnnotation, - OpenAI::Beta::Threads::FilePathAnnotation - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Beta::Threads::Annotation::Variants]) } attr_accessor :annotations # The data that makes up the text. @@ -49,12 +40,7 @@ module OpenAI override.returns( { annotations: - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationAnnotation, - OpenAI::Beta::Threads::FilePathAnnotation - ) - ], + T::Array[OpenAI::Beta::Threads::Annotation::Variants], value: String } ) diff --git a/rbi/openai/models/beta/threads/text_delta.rbi b/rbi/openai/models/beta/threads/text_delta.rbi index 546b0523..f784ef87 100644 --- a/rbi/openai/models/beta/threads/text_delta.rbi +++ b/rbi/openai/models/beta/threads/text_delta.rbi @@ -13,12 +13,7 @@ module OpenAI sig do returns( T.nilable( - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Beta::Threads::FilePathDeltaAnnotation - ) - ] + T::Array[OpenAI::Beta::Threads::AnnotationDelta::Variants] ) ) end @@ -67,12 +62,7 @@ module OpenAI override.returns( { annotations: - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Beta::Threads::FilePathDeltaAnnotation - ) - ], + T::Array[OpenAI::Beta::Threads::AnnotationDelta::Variants], value: String } ) diff --git a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi index 2e10edd1..b2233b53 100644 --- a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -43,15 +43,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) - ] - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants ) ) end @@ -115,15 +107,7 @@ module OpenAI ), content: T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText::OrHash, - OpenAI::Chat::ChatCompletionContentPartRefusal::OrHash - ) - ] - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants ), function_call: T.nilable( @@ -168,15 +152,7 @@ module OpenAI ), content: T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) - ] - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants ), function_call: T.nilable( @@ -228,10 +204,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart::Variants ] ) end diff --git a/rbi/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi index 49a3dd9d..7639b70c 100644 --- a/rbi/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/openai/models/chat/chat_completion_audio_param.rbi @@ -23,14 +23,7 @@ module OpenAI # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. - sig do - returns( - T.any( - String, - OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol - ) - ) - end + sig { returns(OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants) } attr_accessor :voice # Parameters for audio output. Required when audio output is requested with @@ -39,11 +32,7 @@ module OpenAI sig do params( format_: OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: - T.any( - String, - OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol - ) + voice: OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants ).returns(T.attached_class) end def self.new( @@ -60,11 +49,7 @@ module OpenAI override.returns( { format_: OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: - T.any( - String, - OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol - ) + voice: OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants } ) end diff --git a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi index 8326b4e7..79d40f0c 100644 --- a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi @@ -18,7 +18,7 @@ module OpenAI # The contents of the developer message. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants ) end attr_accessor :content @@ -41,10 +41,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants, name: String, role: Symbol ).returns(T.attached_class) @@ -64,10 +61,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants, role: Symbol, name: String } diff --git a/rbi/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/openai/models/chat/chat_completion_prediction_content.rbi index 3e8b7a9c..e6fde114 100644 --- a/rbi/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/openai/models/chat/chat_completion_prediction_content.rbi @@ -19,7 +19,7 @@ module OpenAI # returned much more quickly. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants ) end attr_accessor :content @@ -34,10 +34,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants, type: Symbol ).returns(T.attached_class) end @@ -56,10 +53,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants, type: Symbol } ) diff --git a/rbi/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/openai/models/chat/chat_completion_system_message_param.rbi index bd7d25b4..9a4f0597 100644 --- a/rbi/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_system_message_param.rbi @@ -17,7 +17,7 @@ module OpenAI # The contents of the system message. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants ) end attr_accessor :content @@ -40,10 +40,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants, name: String, role: Symbol ).returns(T.attached_class) @@ -63,10 +60,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants, role: Symbol, name: String } diff --git a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi index 6aab530f..c0ba7e87 100644 --- a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi @@ -17,7 +17,7 @@ module OpenAI # The contents of the tool message. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants ) end attr_accessor :content @@ -33,10 +33,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants, tool_call_id: String, role: Symbol ).returns(T.attached_class) @@ -55,10 +52,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants, role: Symbol, tool_call_id: String } diff --git a/rbi/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/openai/models/chat/chat_completion_user_message_param.rbi index d53e5739..f0fdf008 100644 --- a/rbi/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_user_message_param.rbi @@ -17,17 +17,7 @@ module OpenAI # The contents of the user message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] - ) + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants ) end attr_accessor :content @@ -49,17 +39,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText::OrHash, - OpenAI::Chat::ChatCompletionContentPartImage::OrHash, - OpenAI::Chat::ChatCompletionContentPartInputAudio::OrHash, - OpenAI::Chat::ChatCompletionContentPart::File::OrHash - ) - ] - ), + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants, name: String, role: Symbol ).returns(T.attached_class) @@ -79,17 +59,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] - ), + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants, role: Symbol, name: String } @@ -106,14 +76,7 @@ module OpenAI T.type_alias do T.any( String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] + T::Array[OpenAI::Chat::ChatCompletionContentPart::Variants] ) end diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index a0f4b474..5d9fa08e 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -42,7 +42,7 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(T.any(String, OpenAI::ChatModel::OrSymbol)) } + sig { returns(OpenAI::Chat::CompletionCreateParams::Model::Variants) } attr_accessor :model # Parameters for audio output. Required when audio output is requested with @@ -298,7 +298,11 @@ module OpenAI # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - sig { returns(T.nilable(T.any(String, T::Array[String]))) } + sig do + returns( + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants) + ) + end attr_accessor :stop # Whether or not to store the output of this chat completion request for use in @@ -423,7 +427,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash ) ], - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Chat::CompletionCreateParams::Model::Variants, audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), function_call: @@ -461,7 +465,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), @@ -687,7 +692,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam ) ], - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Chat::CompletionCreateParams::Model::Variants, audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), function_call: @@ -725,7 +730,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), diff --git a/rbi/openai/models/comparison_filter.rbi b/rbi/openai/models/comparison_filter.rbi index c18ff737..9ae08eed 100644 --- a/rbi/openai/models/comparison_filter.rbi +++ b/rbi/openai/models/comparison_filter.rbi @@ -25,7 +25,7 @@ module OpenAI # The value to compare against the attribute key; supports string, number, or # boolean types. - sig { returns(T.any(String, Float, T::Boolean)) } + sig { returns(OpenAI::ComparisonFilter::Value::Variants) } attr_accessor :value # A filter used to compare a specified attribute key to a given value using a @@ -34,7 +34,7 @@ module OpenAI params( key: String, type: OpenAI::ComparisonFilter::Type::OrSymbol, - value: T.any(String, Float, T::Boolean) + value: OpenAI::ComparisonFilter::Value::Variants ).returns(T.attached_class) end def self.new( @@ -60,7 +60,7 @@ module OpenAI { key: String, type: OpenAI::ComparisonFilter::Type::OrSymbol, - value: T.any(String, Float, T::Boolean) + value: OpenAI::ComparisonFilter::Value::Variants } ) end diff --git a/rbi/openai/models/completion_create_params.rbi b/rbi/openai/models/completion_create_params.rbi index 4f9c3b9c..8112f72a 100644 --- a/rbi/openai/models/completion_create_params.rbi +++ b/rbi/openai/models/completion_create_params.rbi @@ -16,9 +16,7 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig do - returns(T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol)) - end + sig { returns(OpenAI::CompletionCreateParams::Model::Variants) } attr_accessor :model # The prompt(s) to generate completions for, encoded as a string, array of @@ -28,16 +26,7 @@ module OpenAI # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. sig do - returns( - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ) - ) + returns(T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants)) end attr_accessor :prompt @@ -128,7 +117,7 @@ module OpenAI # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - sig { returns(T.nilable(T.any(String, T::Array[String]))) } + sig { returns(T.nilable(OpenAI::CompletionCreateParams::Stop::Variants)) } attr_accessor :stop # Options for streaming response. Only set this when you set `stream: true`. @@ -176,16 +165,8 @@ module OpenAI sig do params( - model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + model: OpenAI::CompletionCreateParams::Model::Variants, + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -195,7 +176,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), @@ -318,17 +299,8 @@ module OpenAI sig do override.returns( { - model: - T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + model: OpenAI::CompletionCreateParams::Model::Variants, + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -338,7 +310,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), suffix: T.nilable(String), diff --git a/rbi/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi index 147292fe..c67465a0 100644 --- a/rbi/openai/models/embedding_create_params.rbi +++ b/rbi/openai/models/embedding_create_params.rbi @@ -20,16 +20,7 @@ module OpenAI # for counting tokens. In addition to the per-input token limit, all embedding # models enforce a maximum of 300,000 tokens summed across all inputs in a single # request. - sig do - returns( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ) - end + sig { returns(OpenAI::EmbeddingCreateParams::Input::Variants) } attr_accessor :input # ID of the model to use. You can use the @@ -37,7 +28,7 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(T.any(String, OpenAI::EmbeddingModel::OrSymbol)) } + sig { returns(OpenAI::EmbeddingCreateParams::Model::Variants) } attr_accessor :model # The number of dimensions the resulting output embeddings should have. Only @@ -76,14 +67,8 @@ module OpenAI sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ), - model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), + input: OpenAI::EmbeddingCreateParams::Input::Variants, + model: OpenAI::EmbeddingCreateParams::Model::Variants, dimensions: Integer, encoding_format: OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, @@ -125,14 +110,8 @@ module OpenAI sig do override.returns( { - input: - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ), - model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), + input: OpenAI::EmbeddingCreateParams::Input::Variants, + model: OpenAI::EmbeddingCreateParams::Model::Variants, dimensions: Integer, encoding_format: OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index a3dd70c2..891ce3fe 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -18,13 +18,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do - returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - ) + returns(OpenAI::Models::EvalCreateResponse::DataSourceConfig::Variants) end attr_accessor :data_source_config @@ -49,13 +43,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalCreateResponse::TestingCriterion::Variants ] ) end @@ -121,23 +109,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalCreateResponse::TestingCriterion::Variants ] } ) diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index 92683e20..c1d1d4d7 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -18,13 +18,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do - returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - ) + returns(OpenAI::Models::EvalListResponse::DataSourceConfig::Variants) end attr_accessor :data_source_config @@ -48,15 +42,7 @@ module OpenAI # A list of testing criteria. sig do returns( - T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel - ) - ] + T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::Variants] ) end attr_accessor :testing_criteria @@ -121,23 +107,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalListResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalListResponse::TestingCriterion::Variants ] } ) diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index ead84473..5cdaafb0 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -19,11 +19,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Variants ) end attr_accessor :data_source_config @@ -49,13 +45,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Variants ] ) end @@ -121,23 +111,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Variants ] } ) diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index e2241470..780e123c 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -18,13 +18,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do - returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - ) + returns(OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Variants) end attr_accessor :data_source_config @@ -49,13 +43,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Variants ] ) end @@ -121,23 +109,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Variants ] } ) diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 163528b5..d6f3d390 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -23,11 +23,7 @@ module OpenAI # Information about the run's data source. sig do returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Variants ) end attr_accessor :data_source @@ -176,11 +172,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses - ), + OpenAI::Models::Evals::RunCancelResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -230,11 +222,7 @@ module OpenAI # Determines what populates the `item` namespace in this run's data source. sig do returns( - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Variants ) end attr_accessor :source @@ -250,10 +238,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Variants ) ) end @@ -334,17 +319,10 @@ module OpenAI override.returns( { source: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses - ), + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Variants, type: Symbol, input_messages: - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference - ), + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Variants, model: String, sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams @@ -666,10 +644,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::Variants ] ) end @@ -705,10 +680,7 @@ module OpenAI { template: T::Array[ - T.any( - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::Variants ], type: Symbol } @@ -780,11 +752,7 @@ module OpenAI # Text inputs to the model - can contain template strings. sig do returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants ) end attr_accessor :content @@ -850,11 +818,7 @@ module OpenAI override.returns( { content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ), + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, type: diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index cab19c2c..32cae98d 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -23,11 +23,7 @@ module OpenAI # Information about the run's data source. sig do returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Variants ) end attr_accessor :data_source @@ -176,11 +172,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses - ), + OpenAI::Models::Evals::RunCreateResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -230,11 +222,7 @@ module OpenAI # Determines what populates the `item` namespace in this run's data source. sig do returns( - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Variants ) end attr_accessor :source @@ -250,10 +238,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Variants ) ) end @@ -334,17 +319,10 @@ module OpenAI override.returns( { source: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses - ), + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Variants, type: Symbol, input_messages: - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference - ), + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Variants, model: String, sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams @@ -666,10 +644,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::Variants ] ) end @@ -705,10 +680,7 @@ module OpenAI { template: T::Array[ - T.any( - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::Variants ], type: Symbol } @@ -780,11 +752,7 @@ module OpenAI # Text inputs to the model - can contain template strings. sig do returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants ) end attr_accessor :content @@ -850,11 +818,7 @@ module OpenAI override.returns( { content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ), + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, type: diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index bf2e1d33..272e7bb7 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -22,13 +22,7 @@ module OpenAI # Information about the run's data source. sig do - returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses - ) - ) + returns(OpenAI::Models::Evals::RunListResponse::DataSource::Variants) end attr_accessor :data_source @@ -176,11 +170,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses - ), + OpenAI::Models::Evals::RunListResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -228,11 +218,7 @@ module OpenAI # Determines what populates the `item` namespace in this run's data source. sig do returns( - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses - ) + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Variants ) end attr_accessor :source @@ -248,10 +234,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference - ) + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Variants ) ) end @@ -332,17 +315,10 @@ module OpenAI override.returns( { source: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses - ), + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Variants, type: Symbol, input_messages: - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference - ), + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Variants, model: String, sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams @@ -664,10 +640,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::Variants ] ) end @@ -703,10 +676,7 @@ module OpenAI { template: T::Array[ - T.any( - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::Variants ], type: Symbol } @@ -778,11 +748,7 @@ module OpenAI # Text inputs to the model - can contain template strings. sig do returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants ) end attr_accessor :content @@ -848,11 +814,7 @@ module OpenAI override.returns( { content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ), + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, type: diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 82ffc62a..a51ecb78 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -23,11 +23,7 @@ module OpenAI # Information about the run's data source. sig do returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Variants ) end attr_accessor :data_source @@ -178,11 +174,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses - ), + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -232,11 +224,7 @@ module OpenAI # Determines what populates the `item` namespace in this run's data source. sig do returns( - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Variants ) end attr_accessor :source @@ -252,10 +240,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Variants ) ) end @@ -336,17 +321,10 @@ module OpenAI override.returns( { source: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses - ), + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Variants, type: Symbol, input_messages: - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference - ), + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Variants, model: String, sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams @@ -668,10 +646,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::Variants ] ) end @@ -707,10 +682,7 @@ module OpenAI { template: T::Array[ - T.any( - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::Variants ], type: Symbol } @@ -782,11 +754,7 @@ module OpenAI # Text inputs to the model - can contain template strings. sig do returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants ) end attr_accessor :content @@ -852,11 +820,7 @@ module OpenAI override.returns( { content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText - ), + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, type: diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi index 1c5d0dbd..bca68bdb 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi @@ -36,7 +36,9 @@ module OpenAI # The reference answer for the evaluation. sig do - returns(T.any(String, T.anything, T::Array[T.anything], Float)) + returns( + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants + ) end attr_accessor :reference_answer @@ -52,7 +54,7 @@ module OpenAI ), model_sample: String, reference_answer: - T.any(String, T.anything, T::Array[T.anything], Float), + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -80,7 +82,7 @@ module OpenAI ), model_sample: String, reference_answer: - T.any(String, T.anything, T::Array[T.anything], Float), + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi index 138bf221..20651675 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi @@ -17,13 +17,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader::Variants ) ) end @@ -65,13 +59,7 @@ module OpenAI override.returns( { grader: - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader::Variants } ) end diff --git a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi index 4172bfd0..715e1e48 100644 --- a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi @@ -14,43 +14,83 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants + ) + ) + end attr_reader :batch_size - sig { params(batch_size: T.any(Symbol, Integer)).void } + sig do + params( + batch_size: + OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants + ).void + end attr_writer :batch_size # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable(OpenAI::FineTuning::DpoHyperparameters::Beta::Variants) + ) + end attr_reader :beta - sig { params(beta: T.any(Symbol, Float)).void } + sig do + params( + beta: OpenAI::FineTuning::DpoHyperparameters::Beta::Variants + ).void + end attr_writer :beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants + ) + ) + end attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params( + learning_rate_multiplier: + OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants + ).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable(OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants) + ) + end attr_reader :n_epochs - sig { params(n_epochs: T.any(Symbol, Integer)).void } + sig do + params( + n_epochs: OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants + ).void + end attr_writer :n_epochs # The hyperparameters used for the DPO fine-tuning job. sig do params( - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants, + beta: OpenAI::FineTuning::DpoHyperparameters::Beta::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants, + n_epochs: OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants ).returns(T.attached_class) end def self.new( @@ -72,10 +112,13 @@ module OpenAI sig do override.returns( { - batch_size: T.any(Symbol, Integer), - beta: T.any(Symbol, Float), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants, + beta: OpenAI::FineTuning::DpoHyperparameters::Beta::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants } ) end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index e43f6cd0..cacb180f 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -318,32 +318,65 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(T.anything, Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants + ) + ) + end attr_accessor :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants + ) + ) + end attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params( + learning_rate_multiplier: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants + ).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants + ) + ) + end attr_reader :n_epochs - sig { params(n_epochs: T.any(Symbol, Integer)).void } + sig do + params( + n_epochs: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants + ).void + end attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. sig do params( - batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants + ), + learning_rate_multiplier: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants ).returns(T.attached_class) end def self.new( @@ -362,9 +395,14 @@ module OpenAI sig do override.returns( { - batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants + ), + learning_rate_multiplier: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants } ) end diff --git a/rbi/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi index 32e49ed1..f714b263 100644 --- a/rbi/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/openai/models/fine_tuning/job_create_params.rbi @@ -17,11 +17,7 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - sig do - returns( - T.any(String, OpenAI::FineTuning::JobCreateParams::Model::OrSymbol) - ) - end + sig { returns(OpenAI::FineTuning::JobCreateParams::Model::Variants) } attr_accessor :model # The ID of an uploaded file that contains training data. @@ -122,11 +118,7 @@ module OpenAI sig do params( - model: - T.any( - String, - OpenAI::FineTuning::JobCreateParams::Model::OrSymbol - ), + model: OpenAI::FineTuning::JobCreateParams::Model::Variants, training_file: String, hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash, @@ -210,11 +202,7 @@ module OpenAI sig do override.returns( { - model: - T.any( - String, - OpenAI::FineTuning::JobCreateParams::Model::OrSymbol - ), + model: OpenAI::FineTuning::JobCreateParams::Model::Variants, training_file: String, hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, @@ -294,35 +282,71 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants + ) + ) + end attr_reader :batch_size - sig { params(batch_size: T.any(Symbol, Integer)).void } + sig do + params( + batch_size: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants + ).void + end attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants + ) + ) + end attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params( + learning_rate_multiplier: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants + ).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants + ) + ) + end attr_reader :n_epochs - sig { params(n_epochs: T.any(Symbol, Integer)).void } + sig do + params( + n_epochs: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants + ).void + end attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. sig do params( - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants ).returns(T.attached_class) end def self.new( @@ -341,9 +365,12 @@ module OpenAI sig do override.returns( { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants } ) end diff --git a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi index 3105e2ef..c2d44f4c 100644 --- a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi @@ -14,47 +14,113 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants + ) + ) + end attr_reader :batch_size - sig { params(batch_size: T.any(Symbol, Integer)).void } + sig do + params( + batch_size: + OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants + ).void + end attr_writer :batch_size # Multiplier on amount of compute used for exploring search space during training. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants + ) + ) + end attr_reader :compute_multiplier - sig { params(compute_multiplier: T.any(Symbol, Float)).void } + sig do + params( + compute_multiplier: + OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants + ).void + end attr_writer :compute_multiplier # The number of training steps between evaluation runs. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants + ) + ) + end attr_reader :eval_interval - sig { params(eval_interval: T.any(Symbol, Integer)).void } + sig do + params( + eval_interval: + OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants + ).void + end attr_writer :eval_interval # Number of evaluation samples to generate per training step. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants + ) + ) + end attr_reader :eval_samples - sig { params(eval_samples: T.any(Symbol, Integer)).void } + sig do + params( + eval_samples: + OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants + ).void + end attr_writer :eval_samples # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants + ) + ) + end attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params( + learning_rate_multiplier: + OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants + ).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants + ) + ) + end attr_reader :n_epochs - sig { params(n_epochs: T.any(Symbol, Integer)).void } + sig do + params( + n_epochs: + OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants + ).void + end attr_writer :n_epochs # Level of reasoning effort. @@ -78,12 +144,18 @@ module OpenAI # The hyperparameters used for the reinforcement fine-tuning job. sig do params( - batch_size: T.any(Symbol, Integer), - compute_multiplier: T.any(Symbol, Float), - eval_interval: T.any(Symbol, Integer), - eval_samples: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer), + batch_size: + OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants, + compute_multiplier: + OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants, + eval_interval: + OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants, + eval_samples: + OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants, reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol ).returns(T.attached_class) @@ -112,12 +184,18 @@ module OpenAI sig do override.returns( { - batch_size: T.any(Symbol, Integer), - compute_multiplier: T.any(Symbol, Float), - eval_interval: T.any(Symbol, Integer), - eval_samples: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer), + batch_size: + OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants, + compute_multiplier: + OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants, + eval_interval: + OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants, + eval_samples: + OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants, reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol } diff --git a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi index dc8167ab..8e743013 100644 --- a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi @@ -14,34 +14,70 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants + ) + ) + end attr_reader :batch_size - sig { params(batch_size: T.any(Symbol, Integer)).void } + sig do + params( + batch_size: + OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants + ).void + end attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants + ) + ) + end attr_reader :learning_rate_multiplier - sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } + sig do + params( + learning_rate_multiplier: + OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants + ).void + end attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants + ) + ) + end attr_reader :n_epochs - sig { params(n_epochs: T.any(Symbol, Integer)).void } + sig do + params( + n_epochs: + OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants + ).void + end attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. sig do params( - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants ).returns(T.attached_class) end def self.new( @@ -60,9 +96,12 @@ module OpenAI sig do override.returns( { - batch_size: T.any(Symbol, Integer), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants, + learning_rate_multiplier: + OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants } ) end diff --git a/rbi/openai/models/image_create_variation_params.rbi b/rbi/openai/models/image_create_variation_params.rbi index a6be588e..897c141a 100644 --- a/rbi/openai/models/image_create_variation_params.rbi +++ b/rbi/openai/models/image_create_variation_params.rbi @@ -18,7 +18,9 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } + sig do + returns(T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants)) + end attr_accessor :model # The number of images to generate. Must be between 1 and 10. @@ -56,7 +58,7 @@ module OpenAI sig do params( image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants), n: T.nilable(Integer), response_format: T.nilable( @@ -95,7 +97,8 @@ module OpenAI override.returns( { image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: + T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants), n: T.nilable(Integer), response_format: T.nilable( diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 7225f73f..6413df44 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -18,17 +18,7 @@ module OpenAI # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. - sig do - returns( - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ) - ) - end + sig { returns(OpenAI::ImageEditParams::Image::Variants) } attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 @@ -61,7 +51,7 @@ module OpenAI # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. - sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } + sig { returns(T.nilable(OpenAI::ImageEditParams::Model::Variants)) } attr_accessor :model # The number of images to generate. Must be between 1 and 10. @@ -100,18 +90,11 @@ module OpenAI sig do params( - image: - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageEditParams::Model::Variants), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: @@ -176,19 +159,12 @@ module OpenAI sig do override.returns( { - image: - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageEditParams::Model::Variants), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: diff --git a/rbi/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi index c81dbbcf..f11b2548 100644 --- a/rbi/openai/models/image_generate_params.rbi +++ b/rbi/openai/models/image_generate_params.rbi @@ -32,7 +32,7 @@ module OpenAI # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to # `gpt-image-1` is used. - sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } + sig { returns(T.nilable(OpenAI::ImageGenerateParams::Model::Variants)) } attr_accessor :model # Control the content-moderation level for images generated by `gpt-image-1`. Must @@ -109,7 +109,7 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageGenerateParams::Model::Variants), moderation: T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), @@ -192,7 +192,7 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageGenerateParams::Model::Variants), moderation: T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), diff --git a/rbi/openai/models/moderation_create_params.rbi b/rbi/openai/models/moderation_create_params.rbi index 1f2a76f8..aba9c630 100644 --- a/rbi/openai/models/moderation_create_params.rbi +++ b/rbi/openai/models/moderation_create_params.rbi @@ -13,20 +13,7 @@ module OpenAI # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - sig do - returns( - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput, - OpenAI::ModerationTextInput - ) - ] - ) - ) - end + sig { returns(OpenAI::ModerationCreateParams::Input::Variants) } attr_accessor :input # The content moderation model you would like to use. Learn more in @@ -34,29 +21,19 @@ module OpenAI # learn about available models # [here](https://platform.openai.com/docs/models#moderation). sig do - returns(T.nilable(T.any(String, OpenAI::ModerationModel::OrSymbol))) + returns(T.nilable(OpenAI::ModerationCreateParams::Model::Variants)) end attr_reader :model sig do - params(model: T.any(String, OpenAI::ModerationModel::OrSymbol)).void + params(model: OpenAI::ModerationCreateParams::Model::Variants).void end attr_writer :model sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput::OrHash, - OpenAI::ModerationTextInput::OrHash - ) - ] - ), - model: T.any(String, OpenAI::ModerationModel::OrSymbol), + input: OpenAI::ModerationCreateParams::Input::Variants, + model: OpenAI::ModerationCreateParams::Model::Variants, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -76,18 +53,8 @@ module OpenAI sig do override.returns( { - input: - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput, - OpenAI::ModerationTextInput - ) - ] - ), - model: T.any(String, OpenAI::ModerationModel::OrSymbol), + input: OpenAI::ModerationCreateParams::Input::Variants, + model: OpenAI::ModerationCreateParams::Model::Variants, request_options: OpenAI::RequestOptions } ) @@ -105,12 +72,7 @@ module OpenAI T.any( String, T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput, - OpenAI::ModerationTextInput - ) - ] + T::Array[OpenAI::ModerationMultiModalInput::Variants] ) end diff --git a/rbi/openai/models/responses/easy_input_message.rbi b/rbi/openai/models/responses/easy_input_message.rbi index f4f1e6e9..10935954 100644 --- a/rbi/openai/models/responses/easy_input_message.rbi +++ b/rbi/openai/models/responses/easy_input_message.rbi @@ -14,20 +14,7 @@ module OpenAI # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - sig do - returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ) - ) - end + sig { returns(OpenAI::Responses::EasyInputMessage::Content::Variants) } attr_accessor :content # The role of the message input. One of `user`, `assistant`, `system`, or @@ -55,17 +42,7 @@ module OpenAI # interactions. sig do params( - content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Responses::ResponseInputImage::OrHash, - OpenAI::Responses::ResponseInputFile::OrHash - ) - ] - ), + content: OpenAI::Responses::EasyInputMessage::Content::Variants, role: OpenAI::Responses::EasyInputMessage::Role::OrSymbol, type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol ).returns(T.attached_class) @@ -85,17 +62,7 @@ module OpenAI sig do override.returns( { - content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ), + content: OpenAI::Responses::EasyInputMessage::Content::Variants, role: OpenAI::Responses::EasyInputMessage::Role::OrSymbol, type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol } @@ -113,13 +80,7 @@ module OpenAI T.type_alias do T.any( String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] + T::Array[OpenAI::Responses::ResponseInputContent::Variants] ) end diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index fcc7367f..0e57a678 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -65,15 +65,7 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig do - returns( - T.any( - String, - OpenAI::ChatModel::TaggedSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol - ) - ) - end + sig { returns(OpenAI::ResponsesModel::Variants) } attr_accessor :model # The object type of this resource - always set to `response`. @@ -88,18 +80,7 @@ module OpenAI # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ] - ) + returns(T::Array[OpenAI::Responses::ResponseOutputItem::Variants]) end attr_accessor :output @@ -117,15 +98,7 @@ module OpenAI # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - sig do - returns( - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ) - ) - end + sig { returns(OpenAI::Responses::Response::ToolChoice::Variants) } attr_accessor :tool_choice # An array of tools the model may call while generating a response. You can @@ -142,18 +115,7 @@ module OpenAI # - **Function calls (custom tools)**: Functions that are defined by you, enabling # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). - sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::FileSearchTool, - OpenAI::Responses::FunctionTool, - OpenAI::Responses::ComputerTool, - OpenAI::Responses::WebSearchTool - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Responses::Tool::Variants]) } attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the @@ -271,12 +233,7 @@ module OpenAI T.nilable(OpenAI::Responses::Response::IncompleteDetails::OrHash), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), + model: OpenAI::ResponsesModel::Variants, output: T::Array[ T.any( @@ -459,41 +416,13 @@ module OpenAI T.nilable(OpenAI::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.any( - String, - OpenAI::ChatModel::TaggedSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol - ), + model: OpenAI::ResponsesModel::Variants, object: Symbol, - output: - T::Array[ - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ], + output: T::Array[OpenAI::Responses::ResponseOutputItem::Variants], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ), - tools: - T::Array[ - T.any( - OpenAI::Responses::FileSearchTool, - OpenAI::Responses::FunctionTool, - OpenAI::Responses::ComputerTool, - OpenAI::Responses::WebSearchTool - ) - ], + tool_choice: OpenAI::Responses::Response::ToolChoice::Variants, + tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: T.nilable(Float), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index 6d787089..d0bbe3ac 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -24,10 +24,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Variants ] ) end @@ -83,10 +80,7 @@ module OpenAI code: String, results: T::Array[ - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Variants ], status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, diff --git a/rbi/openai/models/responses/response_content_part_added_event.rbi b/rbi/openai/models/responses/response_content_part_added_event.rbi index 3cc852cc..09a3043f 100644 --- a/rbi/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/openai/models/responses/response_content_part_added_event.rbi @@ -27,10 +27,7 @@ module OpenAI # The content part that was added. sig do returns( - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ) + OpenAI::Responses::ResponseContentPartAddedEvent::Part::Variants ) end attr_accessor :part @@ -74,10 +71,7 @@ module OpenAI item_id: String, output_index: Integer, part: - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ), + OpenAI::Responses::ResponseContentPartAddedEvent::Part::Variants, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_content_part_done_event.rbi b/rbi/openai/models/responses/response_content_part_done_event.rbi index 6bf2b951..f0fce322 100644 --- a/rbi/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/openai/models/responses/response_content_part_done_event.rbi @@ -27,10 +27,7 @@ module OpenAI # The content part that is done. sig do returns( - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ) + OpenAI::Responses::ResponseContentPartDoneEvent::Part::Variants ) end attr_accessor :part @@ -74,10 +71,7 @@ module OpenAI item_id: String, output_index: Integer, part: - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ), + OpenAI::Responses::ResponseContentPartDoneEvent::Part::Variants, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 0e582266..eb48a2f0 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -25,26 +25,7 @@ module OpenAI # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) sig do - returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] - ) - ) + returns(OpenAI::Responses::ResponseCreateParams::Input::Variants) end attr_accessor :input @@ -53,15 +34,7 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig do - returns( - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ) - ) - end + sig { returns(OpenAI::ResponsesModel::Variants) } attr_accessor :model # Specify additional output data to include in the model response. Currently @@ -284,31 +257,8 @@ module OpenAI sig do params( - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Responses::ResponseInputItem::Message::OrHash, - OpenAI::Responses::ResponseOutputMessage::OrHash, - OpenAI::Responses::ResponseFileSearchToolCall::OrHash, - OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, - OpenAI::Responses::ResponseFunctionWebSearch::OrHash, - OpenAI::Responses::ResponseFunctionToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash, - OpenAI::Responses::ResponseInputItem::ItemReference::OrHash - ) - ] - ), - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, + model: OpenAI::ResponsesModel::Variants, include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -485,31 +435,8 @@ module OpenAI sig do override.returns( { - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] - ), - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, + model: OpenAI::ResponsesModel::Variants, include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -571,21 +498,7 @@ module OpenAI T.type_alias do T.any( String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] + T::Array[OpenAI::Responses::ResponseInputItem::Variants] ) end diff --git a/rbi/openai/models/responses/response_file_search_tool_call.rbi b/rbi/openai/models/responses/response_file_search_tool_call.rbi index c2b95318..c6864d65 100644 --- a/rbi/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/openai/models/responses/response_file_search_tool_call.rbi @@ -163,7 +163,12 @@ module OpenAI # characters, booleans, or numbers. sig do returns( - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]) + T.nilable( + T::Hash[ + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ) ) end attr_accessor :attributes @@ -199,7 +204,12 @@ module OpenAI sig do params( attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ), file_id: String, filename: String, score: Float, @@ -228,7 +238,12 @@ module OpenAI override.returns( { attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ), file_id: String, filename: String, score: Float, diff --git a/rbi/openai/models/responses/response_input_message_item.rbi b/rbi/openai/models/responses/response_input_message_item.rbi index c1190de8..f36d17e6 100644 --- a/rbi/openai/models/responses/response_input_message_item.rbi +++ b/rbi/openai/models/responses/response_input_message_item.rbi @@ -19,15 +19,7 @@ module OpenAI # A list of one or many input items to the model, containing different content # types. sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ) + returns(T::Array[OpenAI::Responses::ResponseInputContent::Variants]) end attr_accessor :content @@ -113,13 +105,7 @@ module OpenAI { id: String, content: - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ], + T::Array[OpenAI::Responses::ResponseInputContent::Variants], role: OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol, status: diff --git a/rbi/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi index b819f00a..4157b8bd 100644 --- a/rbi/openai/models/responses/response_item_list.rbi +++ b/rbi/openai/models/responses/response_item_list.rbi @@ -15,22 +15,7 @@ module OpenAI end # A list of items used to generate this response. - sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Responses::ResponseItem::Variants]) } attr_accessor :data # The ID of the first item in the list. @@ -88,19 +73,7 @@ module OpenAI sig do override.returns( { - data: - T::Array[ - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) - ], + data: T::Array[OpenAI::Responses::ResponseItem::Variants], first_id: String, has_more: T::Boolean, last_id: String, diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index 8f37ebb9..b831c02f 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -13,18 +13,7 @@ module OpenAI end # The output item that was added. - sig do - returns( - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ) - end + sig { returns(OpenAI::Responses::ResponseOutputItem::Variants) } attr_accessor :item # The index of the output item that was added. @@ -64,15 +53,7 @@ module OpenAI sig do override.returns( { - item: - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ), + item: OpenAI::Responses::ResponseOutputItem::Variants, output_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index ff21f27a..37e58da7 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -13,18 +13,7 @@ module OpenAI end # The output item that was marked done. - sig do - returns( - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ) - end + sig { returns(OpenAI::Responses::ResponseOutputItem::Variants) } attr_accessor :item # The index of the output item that was marked done. @@ -64,15 +53,7 @@ module OpenAI sig do override.returns( { - item: - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ), + item: OpenAI::Responses::ResponseOutputItem::Variants, output_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi index d42d93ce..fa115d06 100644 --- a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi @@ -15,11 +15,7 @@ module OpenAI # A citation to a file. sig do returns( - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants ) end attr_accessor :annotation @@ -80,11 +76,7 @@ module OpenAI override.returns( { annotation: - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ), + OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants, annotation_index: Integer, content_index: Integer, item_id: String, diff --git a/rbi/openai/models/vector_store_search_params.rbi b/rbi/openai/models/vector_store_search_params.rbi index 20dd60cc..3d78d7c9 100644 --- a/rbi/openai/models/vector_store_search_params.rbi +++ b/rbi/openai/models/vector_store_search_params.rbi @@ -12,7 +12,7 @@ module OpenAI end # A query string for a search - sig { returns(T.any(String, T::Array[String])) } + sig { returns(OpenAI::VectorStoreSearchParams::Query::Variants) } attr_accessor :query # A filter to apply based on file attributes. @@ -65,7 +65,7 @@ module OpenAI sig do params( - query: T.any(String, T::Array[String]), + query: OpenAI::VectorStoreSearchParams::Query::Variants, filters: T.any( OpenAI::ComparisonFilter::OrHash, @@ -97,7 +97,7 @@ module OpenAI sig do override.returns( { - query: T.any(String, T::Array[String]), + query: OpenAI::VectorStoreSearchParams::Query::Variants, filters: T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter), max_num_results: Integer, ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, diff --git a/rbi/openai/models/vector_store_search_response.rbi b/rbi/openai/models/vector_store_search_response.rbi index 05bfc3de..eb825847 100644 --- a/rbi/openai/models/vector_store_search_response.rbi +++ b/rbi/openai/models/vector_store_search_response.rbi @@ -17,7 +17,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -42,7 +49,12 @@ module OpenAI sig do params( attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ), content: T::Array[ OpenAI::Models::VectorStoreSearchResponse::Content::OrHash @@ -74,7 +86,12 @@ module OpenAI override.returns( { attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ), content: T::Array[OpenAI::Models::VectorStoreSearchResponse::Content], file_id: String, diff --git a/rbi/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/openai/models/vector_stores/file_batch_create_params.rbi index 3ac858b8..c4e42f6b 100644 --- a/rbi/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_create_params.rbi @@ -27,7 +27,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -60,7 +67,12 @@ module OpenAI params( file_ids: T::Array[String], attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, @@ -92,7 +104,12 @@ module OpenAI { file_ids: T::Array[String], attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam, diff --git a/rbi/openai/models/vector_stores/file_create_params.rbi b/rbi/openai/models/vector_stores/file_create_params.rbi index b98a191b..a335e71c 100644 --- a/rbi/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_create_params.rbi @@ -27,7 +27,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -60,7 +67,12 @@ module OpenAI params( file_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, @@ -92,7 +104,12 @@ module OpenAI { file_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam, diff --git a/rbi/openai/models/vector_stores/file_update_params.rbi b/rbi/openai/models/vector_stores/file_update_params.rbi index c8d7d898..da5190dc 100644 --- a/rbi/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/openai/models/vector_stores/file_update_params.rbi @@ -24,7 +24,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -32,7 +39,12 @@ module OpenAI params( vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -53,7 +65,12 @@ module OpenAI { vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ), request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/models/vector_stores/vector_store_file.rbi b/rbi/openai/models/vector_stores/vector_store_file.rbi index 7cf4a149..9207da17 100644 --- a/rbi/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file.rbi @@ -68,22 +68,20 @@ module OpenAI # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - attr_accessor :attributes - - # The strategy used to chunk the file. sig do returns( T.nilable( - T.any( - OpenAI::StaticFileChunkingStrategyObject, - OpenAI::OtherFileChunkingStrategyObject - ) + T::Hash[ + Symbol, + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] ) ) end + attr_accessor :attributes + + # The strategy used to chunk the file. + sig { returns(T.nilable(OpenAI::FileChunkingStrategy::Variants)) } attr_reader :chunking_strategy sig do @@ -110,7 +108,12 @@ module OpenAI usage_bytes: Integer, vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::StaticFileChunkingStrategyObject::OrHash, @@ -165,12 +168,13 @@ module OpenAI usage_bytes: Integer, vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: - T.any( - OpenAI::StaticFileChunkingStrategyObject, - OpenAI::OtherFileChunkingStrategyObject - ) + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] + ), + chunking_strategy: OpenAI::FileChunkingStrategy::Variants } ) end diff --git a/rbi/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi index 2c8d03a3..6bbb1186 100644 --- a/rbi/openai/resources/audio/speech.rbi +++ b/rbi/openai/resources/audio/speech.rbi @@ -8,9 +8,8 @@ module OpenAI sig do params( input: String, - model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), - voice: - T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol), + model: OpenAI::Audio::SpeechCreateParams::Model::Variants, + voice: OpenAI::Audio::SpeechCreateParams::Voice::Variants, instructions: String, response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi index 02148f4b..f48d5abc 100644 --- a/rbi/openai/resources/audio/transcriptions.rbi +++ b/rbi/openai/resources/audio/transcriptions.rbi @@ -11,7 +11,7 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, chunking_strategy: T.nilable( T.any( @@ -31,10 +31,7 @@ module OpenAI stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( - T.any( - OpenAI::Audio::Transcription, - OpenAI::Audio::TranscriptionVerbose - ) + OpenAI::Models::Audio::TranscriptionCreateResponse::Variants ) end def create( @@ -95,7 +92,7 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, chunking_strategy: T.nilable( T.any( @@ -116,10 +113,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Audio::TranscriptionTextDeltaEvent, - OpenAI::Audio::TranscriptionTextDoneEvent - ) + OpenAI::Audio::TranscriptionStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/audio/translations.rbi b/rbi/openai/resources/audio/translations.rbi index b2427856..7c462f9f 100644 --- a/rbi/openai/resources/audio/translations.rbi +++ b/rbi/openai/resources/audio/translations.rbi @@ -8,15 +8,13 @@ module OpenAI sig do params( file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.any(String, OpenAI::AudioModel::OrSymbol), + model: OpenAI::Audio::TranslationCreateParams::Model::Variants, prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: OpenAI::RequestOptions::OrHash - ).returns( - T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) - ) + ).returns(OpenAI::Models::Audio::TranslationCreateResponse::Variants) end def create( # The audio file object (not file name) translate, in one of these formats: flac, diff --git a/rbi/openai/resources/beta/assistants.rbi b/rbi/openai/resources/beta/assistants.rbi index d58ecbb1..3ec75274 100644 --- a/rbi/openai/resources/beta/assistants.rbi +++ b/rbi/openai/resources/beta/assistants.rbi @@ -7,7 +7,7 @@ module OpenAI # Create an assistant with a model and instructions. sig do params( - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Beta::AssistantCreateParams::Model::Variants, description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), @@ -132,11 +132,7 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.any( - String, - OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol - ), + model: OpenAI::Beta::AssistantUpdateParams::Model::Variants, name: T.nilable(String), reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index 9279a612..360e8120 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -112,7 +112,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants + ), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -262,7 +265,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants + ), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -305,32 +311,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) + OpenAI::Beta::AssistantStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/beta/threads/messages.rbi b/rbi/openai/resources/beta/threads/messages.rbi index 593ceae0..1562cf34 100644 --- a/rbi/openai/resources/beta/threads/messages.rbi +++ b/rbi/openai/resources/beta/threads/messages.rbi @@ -10,16 +10,7 @@ module OpenAI params( thread_id: String, content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants, role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index a1d803bf..c8190d18 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -29,7 +29,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::Model::Variants + ), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -205,7 +208,10 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), + model: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::Model::Variants + ), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -244,32 +250,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) + OpenAI::Beta::AssistantStreamEvent::Variants ] ) end @@ -539,32 +520,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) + OpenAI::Beta::AssistantStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 46d3f6ba..9abe48e4 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -39,7 +39,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash ) ], - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Chat::CompletionCreateParams::Model::Variants, audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), function_call: @@ -77,7 +77,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), @@ -325,7 +326,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash ) ], - model: T.any(String, OpenAI::ChatModel::OrSymbol), + model: OpenAI::Chat::CompletionCreateParams::Model::Variants, audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), function_call: @@ -363,7 +364,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), diff --git a/rbi/openai/resources/completions.rbi b/rbi/openai/resources/completions.rbi index 685399d1..e99bb46e 100644 --- a/rbi/openai/resources/completions.rbi +++ b/rbi/openai/resources/completions.rbi @@ -8,16 +8,8 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + model: OpenAI::CompletionCreateParams::Model::Variants, + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -27,7 +19,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), @@ -156,16 +148,8 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + model: OpenAI::CompletionCreateParams::Model::Variants, + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -175,7 +159,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), diff --git a/rbi/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi index 061e114c..fc6482f7 100644 --- a/rbi/openai/resources/embeddings.rbi +++ b/rbi/openai/resources/embeddings.rbi @@ -6,14 +6,8 @@ module OpenAI # Creates an embedding vector representing the input text. sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ), - model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), + input: OpenAI::EmbeddingCreateParams::Input::Variants, + model: OpenAI::EmbeddingCreateParams::Model::Variants, dimensions: Integer, encoding_format: OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, diff --git a/rbi/openai/resources/fine_tuning/alpha/graders.rbi b/rbi/openai/resources/fine_tuning/alpha/graders.rbi index 4e22b461..8b1683c1 100644 --- a/rbi/openai/resources/fine_tuning/alpha/graders.rbi +++ b/rbi/openai/resources/fine_tuning/alpha/graders.rbi @@ -18,7 +18,7 @@ module OpenAI ), model_sample: String, reference_answer: - T.any(String, T.anything, T::Array[T.anything], Float), + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Models::FineTuning::Alpha::GraderRunResponse) end diff --git a/rbi/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi index e60f057d..3a699c44 100644 --- a/rbi/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/openai/resources/fine_tuning/jobs.rbi @@ -16,11 +16,7 @@ module OpenAI # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( - model: - T.any( - String, - OpenAI::FineTuning::JobCreateParams::Model::OrSymbol - ), + model: OpenAI::FineTuning::JobCreateParams::Model::Variants, training_file: String, hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash, diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index bc10faa9..e05ab0e0 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -7,7 +7,7 @@ module OpenAI sig do params( image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants), n: T.nilable(Integer), response_format: T.nilable( @@ -46,18 +46,11 @@ module OpenAI # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. sig do params( - image: - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageEditParams::Model::Variants), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: @@ -126,7 +119,7 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + model: T.nilable(OpenAI::ImageGenerateParams::Model::Variants), moderation: T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), diff --git a/rbi/openai/resources/moderations.rbi b/rbi/openai/resources/moderations.rbi index 06d15b68..4b79dfd5 100644 --- a/rbi/openai/resources/moderations.rbi +++ b/rbi/openai/resources/moderations.rbi @@ -7,18 +7,8 @@ module OpenAI # the [moderation guide](https://platform.openai.com/docs/guides/moderation). sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput::OrHash, - OpenAI::ModerationTextInput::OrHash - ) - ] - ), - model: T.any(String, OpenAI::ModerationModel::OrSymbol), + input: OpenAI::ModerationCreateParams::Input::Variants, + model: OpenAI::ModerationCreateParams::Model::Variants, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Models::ModerationCreateResponse) end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 2c880f53..e3952419 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -21,31 +21,8 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Responses::ResponseInputItem::Message::OrHash, - OpenAI::Responses::ResponseOutputMessage::OrHash, - OpenAI::Responses::ResponseFileSearchToolCall::OrHash, - OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, - OpenAI::Responses::ResponseFunctionWebSearch::OrHash, - OpenAI::Responses::ResponseFunctionToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash, - OpenAI::Responses::ResponseInputItem::ItemReference::OrHash - ) - ] - ), - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, + model: OpenAI::ResponsesModel::Variants, include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -238,31 +215,8 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Responses::ResponseInputItem::Message::OrHash, - OpenAI::Responses::ResponseOutputMessage::OrHash, - OpenAI::Responses::ResponseFileSearchToolCall::OrHash, - OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, - OpenAI::Responses::ResponseFunctionWebSearch::OrHash, - OpenAI::Responses::ResponseFunctionToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash, - OpenAI::Responses::ResponseInputItem::ItemReference::OrHash - ) - ] - ), - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, + model: OpenAI::ResponsesModel::Variants, include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -305,44 +259,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Responses::ResponseAudioDeltaEvent, - OpenAI::Responses::ResponseAudioDoneEvent, - OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Responses::ResponseCompletedEvent, - OpenAI::Responses::ResponseContentPartAddedEvent, - OpenAI::Responses::ResponseContentPartDoneEvent, - OpenAI::Responses::ResponseCreatedEvent, - OpenAI::Responses::ResponseErrorEvent, - OpenAI::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Responses::ResponseInProgressEvent, - OpenAI::Responses::ResponseFailedEvent, - OpenAI::Responses::ResponseIncompleteEvent, - OpenAI::Responses::ResponseOutputItemAddedEvent, - OpenAI::Responses::ResponseOutputItemDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, - OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, - OpenAI::Responses::ResponseRefusalDeltaEvent, - OpenAI::Responses::ResponseRefusalDoneEvent, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Responses::ResponseTextDeltaEvent, - OpenAI::Responses::ResponseTextDoneEvent, - OpenAI::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Responses::ResponseWebSearchCallSearchingEvent - ) + OpenAI::Responses::ResponseStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi index feef44a4..de7fb1ae 100644 --- a/rbi/openai/resources/responses/input_items.rbi +++ b/rbi/openai/resources/responses/input_items.rbi @@ -16,16 +16,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::CursorPage[ - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) + OpenAI::Responses::ResponseItem::Variants ] ) end diff --git a/rbi/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi index 7be8933d..727abd63 100644 --- a/rbi/openai/resources/vector_stores.rbi +++ b/rbi/openai/resources/vector_stores.rbi @@ -140,7 +140,7 @@ module OpenAI sig do params( vector_store_id: String, - query: T.any(String, T::Array[String]), + query: OpenAI::VectorStoreSearchParams::Query::Variants, filters: T.any( OpenAI::ComparisonFilter::OrHash, diff --git a/rbi/openai/resources/vector_stores/file_batches.rbi b/rbi/openai/resources/vector_stores/file_batches.rbi index 43c47915..c6aca892 100644 --- a/rbi/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/openai/resources/vector_stores/file_batches.rbi @@ -10,7 +10,12 @@ module OpenAI vector_store_id: String, file_ids: T::Array[String], attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, diff --git a/rbi/openai/resources/vector_stores/files.rbi b/rbi/openai/resources/vector_stores/files.rbi index 8d760b4a..711c88f0 100644 --- a/rbi/openai/resources/vector_stores/files.rbi +++ b/rbi/openai/resources/vector_stores/files.rbi @@ -12,7 +12,12 @@ module OpenAI vector_store_id: String, file_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, @@ -64,7 +69,12 @@ module OpenAI file_id: String, vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ), request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::VectorStores::VectorStoreFile) end From 309e78436a1afb1e2afb1b9c6d9ae894a69f52a5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 20 May 2025 21:58:13 +0000 Subject: [PATCH 199/295] chore: refine Yard and Sorbet types and ensure linting is turned on for examples --- Rakefile | 13 +- lib/openai/internal.rb | 3 + .../audio/transcription_create_params.rb | 4 +- .../models/audio/translation_create_params.rb | 4 +- lib/openai/models/file_create_params.rb | 4 +- .../models/image_create_variation_params.rb | 4 +- lib/openai/models/image_edit_params.rb | 8 +- .../models/uploads/part_create_params.rb | 4 +- lib/openai/resources/audio/transcriptions.rb | 4 +- lib/openai/resources/audio/translations.rb | 2 +- lib/openai/resources/files.rb | 2 +- lib/openai/resources/images.rb | 6 +- lib/openai/resources/uploads/parts.rb | 2 +- rbi/openai/internal.rbi | 3 + .../models/audio/speech_create_params.rbi | 21 ++- .../audio/transcription_create_params.rbi | 14 +- .../audio/translation_create_params.rbi | 12 +- .../models/beta/assistant_create_params.rbi | 6 +- .../models/beta/assistant_update_params.rbi | 25 +++- .../beta/thread_create_and_run_params.rbi | 16 +-- .../models/beta/threads/run_create_params.rbi | 16 +-- .../chat/chat_completion_audio_param.rbi | 21 ++- .../models/chat/completion_create_params.rbi | 6 +- .../models/completion_create_params.rbi | 9 +- rbi/openai/models/embedding_create_params.rbi | 6 +- rbi/openai/models/file_create_params.rbi | 6 +- .../fine_tuning/dpo_hyperparameters.rbi | 75 +++-------- .../models/fine_tuning/fine_tuning_job.rbi | 25 +--- .../models/fine_tuning/job_create_params.rbi | 81 ++++------- .../reinforcement_hyperparameters.rbi | 126 ++++-------------- .../supervised_hyperparameters.rbi | 63 ++------- .../models/image_create_variation_params.rbi | 15 +-- rbi/openai/models/image_edit_params.rbi | 16 +-- rbi/openai/models/image_generate_params.rbi | 6 +- .../models/moderation_create_params.rbi | 8 +- rbi/openai/models/responses/response.rbi | 7 +- .../responses/response_create_params.rbi | 24 +++- .../models/uploads/part_create_params.rbi | 6 +- rbi/openai/resources/audio/speech.rbi | 5 +- rbi/openai/resources/audio/transcriptions.rbi | 8 +- rbi/openai/resources/audio/translations.rbi | 4 +- rbi/openai/resources/beta/assistants.rbi | 8 +- rbi/openai/resources/beta/threads.rbi | 10 +- rbi/openai/resources/beta/threads/runs.rbi | 10 +- rbi/openai/resources/chat/completions.rbi | 4 +- rbi/openai/resources/completions.rbi | 4 +- rbi/openai/resources/embeddings.rbi | 2 +- rbi/openai/resources/files.rbi | 2 +- rbi/openai/resources/fine_tuning/jobs.rbi | 6 +- rbi/openai/resources/images.rbi | 10 +- rbi/openai/resources/moderations.rbi | 2 +- rbi/openai/resources/responses.rbi | 14 +- rbi/openai/resources/uploads/parts.rbi | 2 +- sig/openai/internal.rbs | 2 + .../audio/transcription_create_params.rbs | 6 +- .../audio/translation_create_params.rbs | 6 +- sig/openai/models/file_create_params.rbs | 6 +- .../models/image_create_variation_params.rbs | 6 +- sig/openai/models/image_edit_params.rbs | 16 +-- .../models/uploads/part_create_params.rbs | 6 +- sig/openai/resources/audio/transcriptions.rbs | 4 +- sig/openai/resources/audio/translations.rbs | 2 +- sig/openai/resources/files.rbs | 2 +- sig/openai/resources/images.rbs | 4 +- sig/openai/resources/uploads/parts.rbs | 2 +- sorbet/config | 2 +- .../internal/sorbet_runtime_support_test.rb | 1 + 67 files changed, 340 insertions(+), 489 deletions(-) diff --git a/Rakefile b/Rakefile index fe1523b8..ad33026e 100644 --- a/Rakefile +++ b/Rakefile @@ -9,6 +9,7 @@ require "rake/clean" require "rubocop/rake_task" tapioca = "sorbet/tapioca" +examples = "examples" ignore_file = ".ignore" CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) @@ -39,7 +40,7 @@ locale = {"LC_ALL" => "C.UTF-8"} desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do - find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] + find = %w[find ./lib ./test ./rbi ./examples -type f -and ( -name *.rb -or -name *.rbi ) -print0] rubocop = %w[rubocop] rubocop += %w[--format github] if ENV.key?("CI") @@ -54,7 +55,7 @@ end desc("Format `*.rb`") multitask(:"format:rb") do # while `syntax_tree` is much faster than `rubocop`, `rubocop` is the only formatter with full syntax support - find = %w[find ./lib ./test -type f -and -name *.rb -print0] + find = %w[find ./lib ./test ./examples -type f -and -name *.rb -print0] fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] sh("#{find.shelljoin} | #{fmt.shelljoin}") end @@ -117,12 +118,14 @@ multitask(:"typecheck:steep") do sh(*%w[steep check]) end +directory(examples) + desc("Typecheck `*.rbi`") -multitask(:"typecheck:sorbet") do - sh(*%w[srb typecheck]) +multitask("typecheck:sorbet": examples) do + sh(*%w[srb typecheck --dir], examples) end -file(tapioca) do +directory(tapioca) do sh(*%w[tapioca init]) end diff --git a/lib/openai/internal.rb b/lib/openai/internal.rb index 01f2c4b7..0fc8d8ad 100644 --- a/lib/openai/internal.rb +++ b/lib/openai/internal.rb @@ -13,5 +13,8 @@ module Internal define_sorbet_constant!(:AnyHash) do T.type_alias { T::Hash[Symbol, T.anything] } end + define_sorbet_constant!(:FileInput) do + T.type_alias { T.any(Pathname, StringIO, IO, String, OpenAI::FilePart) } + end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index ff021271..c4a25c41 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -14,7 +14,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :file, OpenAI::Internal::Type::FileInput # @!attribute model @@ -98,7 +98,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index ed3107e2..bfcb151b 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -12,7 +12,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :file, OpenAI::Internal::Type::FileInput # @!attribute model @@ -52,7 +52,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranslationCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index c88d1052..63dec904 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -10,7 +10,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file # The File object (not file name) to be uploaded. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :file, OpenAI::Internal::Type::FileInput # @!attribute purpose @@ -26,7 +26,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::FileCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index 66ad7ea0..fa6854a2 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -11,7 +11,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :image, OpenAI::Internal::Type::FileInput # @!attribute model @@ -54,7 +54,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageCreateVariationParams} for more details. # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index c38c7821..25129f87 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -16,7 +16,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart, Array] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] required :image, union: -> { OpenAI::ImageEditParams::Image } # @!attribute prompt @@ -44,7 +44,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart, nil] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart, nil] optional :mask, OpenAI::Internal::Type::FileInput # @!attribute model @@ -98,13 +98,13 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 1ed39993..1df047de 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -11,14 +11,14 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data # The chunk of bytes for this Part. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :data, OpenAI::Internal::Type::FileInput # @!method initialize(data:, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Uploads::PartCreateParams} for more details. # - # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. + # @param data [Pathname, StringIO, IO, String, OpenAI::FilePart] The chunk of bytes for this Part. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 65b01cb8..e6df425e 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -14,7 +14,7 @@ class Transcriptions # # @overload create(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # @@ -63,7 +63,7 @@ def create(params) # # @overload create_streaming(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index f43551ab..acaf3c58 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -11,7 +11,7 @@ class Translations # # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index a7cff25c..2726345d 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -30,7 +30,7 @@ class Files # # @overload create(file:, purpose:, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index b04d709d..69150b5f 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -10,7 +10,7 @@ class Images # # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # @@ -47,13 +47,13 @@ def create_variation(params) # # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index 7ad2e042..f971413c 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -23,7 +23,7 @@ class Parts # # @param upload_id [String] The ID of the Upload. # - # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. + # @param data [Pathname, StringIO, IO, String, OpenAI::FilePart] The chunk of bytes for this Part. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/rbi/openai/internal.rbi b/rbi/openai/internal.rbi index 135f6cba..eeddce6e 100644 --- a/rbi/openai/internal.rbi +++ b/rbi/openai/internal.rbi @@ -8,6 +8,9 @@ module OpenAI # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } + FileInput = + T.type_alias { T.any(Pathname, StringIO, IO, String, OpenAI::FilePart) } + OMIT = T.let(Object.new.freeze, T.anything) end end diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index 087fb21d..87b9e0b9 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -18,14 +18,18 @@ module OpenAI # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. - sig { returns(OpenAI::Audio::SpeechCreateParams::Model::Variants) } + sig { returns(T.any(String, OpenAI::Audio::SpeechModel::OrSymbol)) } attr_accessor :model # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `onyx`, `nova`, `sage`, `shimmer`, and # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). - sig { returns(OpenAI::Audio::SpeechCreateParams::Voice::Variants) } + sig do + returns( + T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol) + ) + end attr_accessor :voice # Control the voice of your generated audio with additional instructions. Does not @@ -66,8 +70,9 @@ module OpenAI sig do params( input: String, - model: OpenAI::Audio::SpeechCreateParams::Model::Variants, - voice: OpenAI::Audio::SpeechCreateParams::Voice::Variants, + model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), + voice: + T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, @@ -103,8 +108,12 @@ module OpenAI override.returns( { input: String, - model: OpenAI::Audio::SpeechCreateParams::Model::Variants, - voice: OpenAI::Audio::SpeechCreateParams::Voice::Variants, + model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), + voice: + T.any( + String, + OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol + ), instructions: String, response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi index 26cf8eff..c3dc13df 100644 --- a/rbi/openai/models/audio/transcription_create_params.rbi +++ b/rbi/openai/models/audio/transcription_create_params.rbi @@ -17,15 +17,13 @@ module OpenAI # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :file # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). - sig do - returns(OpenAI::Audio::TranscriptionCreateParams::Model::Variants) - end + sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } attr_accessor :model # Controls how the audio is cut into chunks. When set to `"auto"`, the server @@ -132,8 +130,8 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( T.any( @@ -204,8 +202,8 @@ module OpenAI sig do override.returns( { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( T.any( diff --git a/rbi/openai/models/audio/translation_create_params.rbi b/rbi/openai/models/audio/translation_create_params.rbi index 7714a71b..1dc35166 100644 --- a/rbi/openai/models/audio/translation_create_params.rbi +++ b/rbi/openai/models/audio/translation_create_params.rbi @@ -17,12 +17,12 @@ module OpenAI # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :file # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. - sig { returns(OpenAI::Audio::TranslationCreateParams::Model::Variants) } + sig { returns(T.any(String, OpenAI::AudioModel::OrSymbol)) } attr_accessor :model # An optional text to guide the model's style or continue a previous audio @@ -67,8 +67,8 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranslationCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, @@ -104,8 +104,8 @@ module OpenAI sig do override.returns( { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranslationCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/models/beta/assistant_create_params.rbi b/rbi/openai/models/beta/assistant_create_params.rbi index 87b41eb7..c04793b1 100644 --- a/rbi/openai/models/beta/assistant_create_params.rbi +++ b/rbi/openai/models/beta/assistant_create_params.rbi @@ -20,7 +20,7 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(OpenAI::Beta::AssistantCreateParams::Model::Variants) } + sig { returns(T.any(String, OpenAI::ChatModel::OrSymbol)) } attr_accessor :model # The description of the assistant. The maximum length is 512 characters. @@ -155,7 +155,7 @@ module OpenAI sig do params( - model: OpenAI::Beta::AssistantCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), @@ -262,7 +262,7 @@ module OpenAI sig do override.returns( { - model: OpenAI::Beta::AssistantCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), diff --git a/rbi/openai/models/beta/assistant_update_params.rbi b/rbi/openai/models/beta/assistant_update_params.rbi index e9a3040d..b044c34b 100644 --- a/rbi/openai/models/beta/assistant_update_params.rbi +++ b/rbi/openai/models/beta/assistant_update_params.rbi @@ -40,14 +40,23 @@ module OpenAI # them. sig do returns( - T.nilable(OpenAI::Beta::AssistantUpdateParams::Model::Variants) + T.nilable( + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ) + ) ) end attr_reader :model sig do params( - model: OpenAI::Beta::AssistantUpdateParams::Model::Variants + model: + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ) ).void end attr_writer :model @@ -169,7 +178,11 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: OpenAI::Beta::AssistantUpdateParams::Model::Variants, + model: + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ), name: T.nilable(String), reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -276,7 +289,11 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: OpenAI::Beta::AssistantUpdateParams::Model::Variants, + model: + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ), name: T.nilable(String), reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index 16fb54ce..2a603dd6 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -55,11 +55,7 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig do - returns( - T.nilable(OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants) - ) - end + sig { returns(T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol))) } attr_accessor :model # Whether to enable @@ -218,10 +214,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -364,10 +357,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index 032415a6..bb9d6882 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -99,11 +99,7 @@ module OpenAI # be used to execute this run. If a value is provided here, it will override the # model associated with the assistant. If not, the model associated with the # assistant will be used. - sig do - returns( - T.nilable(OpenAI::Beta::Threads::RunCreateParams::Model::Variants) - ) - end + sig { returns(T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol))) } attr_accessor :model # Whether to enable @@ -245,10 +241,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -412,10 +405,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: diff --git a/rbi/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi index 7639b70c..49a3dd9d 100644 --- a/rbi/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/openai/models/chat/chat_completion_audio_param.rbi @@ -23,7 +23,14 @@ module OpenAI # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. - sig { returns(OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants) } + sig do + returns( + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) + ) + end attr_accessor :voice # Parameters for audio output. Required when audio output is requested with @@ -32,7 +39,11 @@ module OpenAI sig do params( format_: OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants + voice: + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) ).returns(T.attached_class) end def self.new( @@ -49,7 +60,11 @@ module OpenAI override.returns( { format_: OpenAI::Chat::ChatCompletionAudioParam::Format::OrSymbol, - voice: OpenAI::Chat::ChatCompletionAudioParam::Voice::Variants + voice: + T.any( + String, + OpenAI::Chat::ChatCompletionAudioParam::Voice::OrSymbol + ) } ) end diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 5d9fa08e..49cdaeba 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -42,7 +42,7 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(OpenAI::Chat::CompletionCreateParams::Model::Variants) } + sig { returns(T.any(String, OpenAI::ChatModel::OrSymbol)) } attr_accessor :model # Parameters for audio output. Required when audio output is requested with @@ -427,7 +427,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash ) ], - model: OpenAI::Chat::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), function_call: @@ -692,7 +692,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam ) ], - model: OpenAI::Chat::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam), frequency_penalty: T.nilable(Float), function_call: diff --git a/rbi/openai/models/completion_create_params.rbi b/rbi/openai/models/completion_create_params.rbi index 8112f72a..6a8a8a87 100644 --- a/rbi/openai/models/completion_create_params.rbi +++ b/rbi/openai/models/completion_create_params.rbi @@ -16,7 +16,9 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(OpenAI::CompletionCreateParams::Model::Variants) } + sig do + returns(T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol)) + end attr_accessor :model # The prompt(s) to generate completions for, encoded as a string, array of @@ -165,7 +167,7 @@ module OpenAI sig do params( - model: OpenAI::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), @@ -299,7 +301,8 @@ module OpenAI sig do override.returns( { - model: OpenAI::CompletionCreateParams::Model::Variants, + model: + T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), diff --git a/rbi/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi index c67465a0..9d60fac5 100644 --- a/rbi/openai/models/embedding_create_params.rbi +++ b/rbi/openai/models/embedding_create_params.rbi @@ -28,7 +28,7 @@ module OpenAI # see all of your available models, or see our # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. - sig { returns(OpenAI::EmbeddingCreateParams::Model::Variants) } + sig { returns(T.any(String, OpenAI::EmbeddingModel::OrSymbol)) } attr_accessor :model # The number of dimensions the resulting output embeddings should have. Only @@ -68,7 +68,7 @@ module OpenAI sig do params( input: OpenAI::EmbeddingCreateParams::Input::Variants, - model: OpenAI::EmbeddingCreateParams::Model::Variants, + model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, encoding_format: OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, @@ -111,7 +111,7 @@ module OpenAI override.returns( { input: OpenAI::EmbeddingCreateParams::Input::Variants, - model: OpenAI::EmbeddingCreateParams::Model::Variants, + model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, encoding_format: OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, diff --git a/rbi/openai/models/file_create_params.rbi b/rbi/openai/models/file_create_params.rbi index d3a1c945..493d915c 100644 --- a/rbi/openai/models/file_create_params.rbi +++ b/rbi/openai/models/file_create_params.rbi @@ -12,7 +12,7 @@ module OpenAI end # The File object (not file name) to be uploaded. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :file # The intended purpose of the uploaded file. One of: - `assistants`: Used in the @@ -24,7 +24,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) @@ -44,7 +44,7 @@ module OpenAI sig do override.returns( { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions } diff --git a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi index 715e1e48..4172bfd0 100644 --- a/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/dpo_hyperparameters.rbi @@ -14,83 +14,43 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig do - returns( - T.nilable( - OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size - sig do - params( - batch_size: - OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants - ).void - end + sig { params(batch_size: T.any(Symbol, Integer)).void } attr_writer :batch_size # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. - sig do - returns( - T.nilable(OpenAI::FineTuning::DpoHyperparameters::Beta::Variants) - ) - end + sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :beta - sig do - params( - beta: OpenAI::FineTuning::DpoHyperparameters::Beta::Variants - ).void - end + sig { params(beta: T.any(Symbol, Float)).void } attr_writer :beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig do - returns( - T.nilable( - OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig do - params( - learning_rate_multiplier: - OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants - ).void - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig do - returns( - T.nilable(OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs - sig do - params( - n_epochs: OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants - ).void - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } attr_writer :n_epochs # The hyperparameters used for the DPO fine-tuning job. sig do params( - batch_size: - OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants, - beta: OpenAI::FineTuning::DpoHyperparameters::Beta::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants, - n_epochs: OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants + batch_size: T.any(Symbol, Integer), + beta: T.any(Symbol, Float), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) ).returns(T.attached_class) end def self.new( @@ -112,13 +72,10 @@ module OpenAI sig do override.returns( { - batch_size: - OpenAI::FineTuning::DpoHyperparameters::BatchSize::Variants, - beta: OpenAI::FineTuning::DpoHyperparameters::Beta::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::DpoHyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::DpoHyperparameters::NEpochs::Variants + batch_size: T.any(Symbol, Integer), + beta: T.any(Symbol, Float), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) } ) end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index cacb180f..8e4d7a3a 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -338,12 +338,7 @@ module OpenAI end attr_reader :learning_rate_multiplier - sig do - params( - learning_rate_multiplier: - OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants - ).void - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle @@ -357,26 +352,16 @@ module OpenAI end attr_reader :n_epochs - sig do - params( - n_epochs: - OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants - ).void - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. sig do params( - batch_size: - T.nilable( - OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants - ), - learning_rate_multiplier: - OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants + batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) ).returns(T.attached_class) end def self.new( diff --git a/rbi/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi index f714b263..32e49ed1 100644 --- a/rbi/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/openai/models/fine_tuning/job_create_params.rbi @@ -17,7 +17,11 @@ module OpenAI # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). - sig { returns(OpenAI::FineTuning::JobCreateParams::Model::Variants) } + sig do + returns( + T.any(String, OpenAI::FineTuning::JobCreateParams::Model::OrSymbol) + ) + end attr_accessor :model # The ID of an uploaded file that contains training data. @@ -118,7 +122,11 @@ module OpenAI sig do params( - model: OpenAI::FineTuning::JobCreateParams::Model::Variants, + model: + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::OrSymbol + ), training_file: String, hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash, @@ -202,7 +210,11 @@ module OpenAI sig do override.returns( { - model: OpenAI::FineTuning::JobCreateParams::Model::Variants, + model: + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::OrSymbol + ), training_file: String, hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, @@ -282,71 +294,35 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig do - returns( - T.nilable( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size - sig do - params( - batch_size: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants - ).void - end + sig { params(batch_size: T.any(Symbol, Integer)).void } attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig do - returns( - T.nilable( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig do - params( - learning_rate_multiplier: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants - ).void - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig do - returns( - T.nilable( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs - sig do - params( - n_epochs: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants - ).void - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. sig do params( - batch_size: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) ).returns(T.attached_class) end def self.new( @@ -365,12 +341,9 @@ module OpenAI sig do override.returns( { - batch_size: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::BatchSize::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::JobCreateParams::Hyperparameters::NEpochs::Variants + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) } ) end diff --git a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi index c2d44f4c..3105e2ef 100644 --- a/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/reinforcement_hyperparameters.rbi @@ -14,113 +14,47 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig do - returns( - T.nilable( - OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size - sig do - params( - batch_size: - OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants - ).void - end + sig { params(batch_size: T.any(Symbol, Integer)).void } attr_writer :batch_size # Multiplier on amount of compute used for exploring search space during training. - sig do - returns( - T.nilable( - OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :compute_multiplier - sig do - params( - compute_multiplier: - OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants - ).void - end + sig { params(compute_multiplier: T.any(Symbol, Float)).void } attr_writer :compute_multiplier # The number of training steps between evaluation runs. - sig do - returns( - T.nilable( - OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :eval_interval - sig do - params( - eval_interval: - OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants - ).void - end + sig { params(eval_interval: T.any(Symbol, Integer)).void } attr_writer :eval_interval # Number of evaluation samples to generate per training step. - sig do - returns( - T.nilable( - OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :eval_samples - sig do - params( - eval_samples: - OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants - ).void - end + sig { params(eval_samples: T.any(Symbol, Integer)).void } attr_writer :eval_samples # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig do - returns( - T.nilable( - OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig do - params( - learning_rate_multiplier: - OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants - ).void - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig do - returns( - T.nilable( - OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs - sig do - params( - n_epochs: - OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants - ).void - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } attr_writer :n_epochs # Level of reasoning effort. @@ -144,18 +78,12 @@ module OpenAI # The hyperparameters used for the reinforcement fine-tuning job. sig do params( - batch_size: - OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants, - compute_multiplier: - OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants, - eval_interval: - OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants, - eval_samples: - OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants, + batch_size: T.any(Symbol, Integer), + compute_multiplier: T.any(Symbol, Float), + eval_interval: T.any(Symbol, Integer), + eval_samples: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer), reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol ).returns(T.attached_class) @@ -184,18 +112,12 @@ module OpenAI sig do override.returns( { - batch_size: - OpenAI::FineTuning::ReinforcementHyperparameters::BatchSize::Variants, - compute_multiplier: - OpenAI::FineTuning::ReinforcementHyperparameters::ComputeMultiplier::Variants, - eval_interval: - OpenAI::FineTuning::ReinforcementHyperparameters::EvalInterval::Variants, - eval_samples: - OpenAI::FineTuning::ReinforcementHyperparameters::EvalSamples::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::ReinforcementHyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::ReinforcementHyperparameters::NEpochs::Variants, + batch_size: T.any(Symbol, Integer), + compute_multiplier: T.any(Symbol, Float), + eval_interval: T.any(Symbol, Integer), + eval_samples: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer), reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort::OrSymbol } diff --git a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi index 8e743013..dc8167ab 100644 --- a/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi +++ b/rbi/openai/models/fine_tuning/supervised_hyperparameters.rbi @@ -14,70 +14,34 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig do - returns( - T.nilable( - OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :batch_size - sig do - params( - batch_size: - OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants - ).void - end + sig { params(batch_size: T.any(Symbol, Integer)).void } attr_writer :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig do - returns( - T.nilable( - OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Float))) } attr_reader :learning_rate_multiplier - sig do - params( - learning_rate_multiplier: - OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants - ).void - end + sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } attr_writer :learning_rate_multiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig do - returns( - T.nilable( - OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants - ) - ) - end + sig { returns(T.nilable(T.any(Symbol, Integer))) } attr_reader :n_epochs - sig do - params( - n_epochs: - OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants - ).void - end + sig { params(n_epochs: T.any(Symbol, Integer)).void } attr_writer :n_epochs # The hyperparameters used for the fine-tuning job. sig do params( - batch_size: - OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) ).returns(T.attached_class) end def self.new( @@ -96,12 +60,9 @@ module OpenAI sig do override.returns( { - batch_size: - OpenAI::FineTuning::SupervisedHyperparameters::BatchSize::Variants, - learning_rate_multiplier: - OpenAI::FineTuning::SupervisedHyperparameters::LearningRateMultiplier::Variants, - n_epochs: - OpenAI::FineTuning::SupervisedHyperparameters::NEpochs::Variants + batch_size: T.any(Symbol, Integer), + learning_rate_multiplier: T.any(Symbol, Float), + n_epochs: T.any(Symbol, Integer) } ) end diff --git a/rbi/openai/models/image_create_variation_params.rbi b/rbi/openai/models/image_create_variation_params.rbi index 897c141a..1c53b22b 100644 --- a/rbi/openai/models/image_create_variation_params.rbi +++ b/rbi/openai/models/image_create_variation_params.rbi @@ -13,14 +13,12 @@ module OpenAI # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :image # The model to use for image generation. Only `dall-e-2` is supported at this # time. - sig do - returns(T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants)) - end + sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } attr_accessor :model # The number of images to generate. Must be between 1 and 10. @@ -57,8 +55,8 @@ module OpenAI sig do params( - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants), + image: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable( @@ -96,9 +94,8 @@ module OpenAI sig do override.returns( { - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: - T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants), + image: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable( diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 6413df44..b03c85dd 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -40,18 +40,16 @@ module OpenAI # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. - sig do - returns(T.nilable(T.any(Pathname, StringIO, IO, OpenAI::FilePart))) - end + sig { returns(T.nilable(OpenAI::Internal::FileInput)) } attr_reader :mask - sig { params(mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart)).void } + sig { params(mask: OpenAI::Internal::FileInput).void } attr_writer :mask # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. - sig { returns(T.nilable(OpenAI::ImageEditParams::Model::Variants)) } + sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } attr_accessor :model # The number of images to generate. Must be between 1 and 10. @@ -93,8 +91,8 @@ module OpenAI image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(OpenAI::ImageEditParams::Model::Variants), + mask: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: @@ -163,8 +161,8 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(OpenAI::ImageEditParams::Model::Variants), + mask: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: diff --git a/rbi/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi index f11b2548..c81dbbcf 100644 --- a/rbi/openai/models/image_generate_params.rbi +++ b/rbi/openai/models/image_generate_params.rbi @@ -32,7 +32,7 @@ module OpenAI # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to # `gpt-image-1` is used. - sig { returns(T.nilable(OpenAI::ImageGenerateParams::Model::Variants)) } + sig { returns(T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol))) } attr_accessor :model # Control the content-moderation level for images generated by `gpt-image-1`. Must @@ -109,7 +109,7 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(OpenAI::ImageGenerateParams::Model::Variants), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), moderation: T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), @@ -192,7 +192,7 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(OpenAI::ImageGenerateParams::Model::Variants), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), moderation: T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), diff --git a/rbi/openai/models/moderation_create_params.rbi b/rbi/openai/models/moderation_create_params.rbi index aba9c630..0d5c60fa 100644 --- a/rbi/openai/models/moderation_create_params.rbi +++ b/rbi/openai/models/moderation_create_params.rbi @@ -21,19 +21,19 @@ module OpenAI # learn about available models # [here](https://platform.openai.com/docs/models#moderation). sig do - returns(T.nilable(OpenAI::ModerationCreateParams::Model::Variants)) + returns(T.nilable(T.any(String, OpenAI::ModerationModel::OrSymbol))) end attr_reader :model sig do - params(model: OpenAI::ModerationCreateParams::Model::Variants).void + params(model: T.any(String, OpenAI::ModerationModel::OrSymbol)).void end attr_writer :model sig do params( input: OpenAI::ModerationCreateParams::Input::Variants, - model: OpenAI::ModerationCreateParams::Model::Variants, + model: T.any(String, OpenAI::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -54,7 +54,7 @@ module OpenAI override.returns( { input: OpenAI::ModerationCreateParams::Input::Variants, - model: OpenAI::ModerationCreateParams::Model::Variants, + model: T.any(String, OpenAI::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 0e57a678..b6b6c611 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -233,7 +233,12 @@ module OpenAI T.nilable(OpenAI::Responses::Response::IncompleteDetails::OrHash), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: OpenAI::ResponsesModel::Variants, + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), output: T::Array[ T.any( diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index eb48a2f0..87695615 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -34,7 +34,15 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig { returns(OpenAI::ResponsesModel::Variants) } + sig do + returns( + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ) + ) + end attr_accessor :model # Specify additional output data to include in the model response. Currently @@ -258,7 +266,12 @@ module OpenAI sig do params( input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: OpenAI::ResponsesModel::Variants, + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -436,7 +449,12 @@ module OpenAI override.returns( { input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: OpenAI::ResponsesModel::Variants, + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] diff --git a/rbi/openai/models/uploads/part_create_params.rbi b/rbi/openai/models/uploads/part_create_params.rbi index 1b069db5..031b224d 100644 --- a/rbi/openai/models/uploads/part_create_params.rbi +++ b/rbi/openai/models/uploads/part_create_params.rbi @@ -13,12 +13,12 @@ module OpenAI end # The chunk of bytes for this Part. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :data sig do params( - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + data: OpenAI::Internal::FileInput, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -32,7 +32,7 @@ module OpenAI sig do override.returns( { - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + data: OpenAI::Internal::FileInput, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi index 6bbb1186..2c8d03a3 100644 --- a/rbi/openai/resources/audio/speech.rbi +++ b/rbi/openai/resources/audio/speech.rbi @@ -8,8 +8,9 @@ module OpenAI sig do params( input: String, - model: OpenAI::Audio::SpeechCreateParams::Model::Variants, - voice: OpenAI::Audio::SpeechCreateParams::Voice::Variants, + model: T.any(String, OpenAI::Audio::SpeechModel::OrSymbol), + voice: + T.any(String, OpenAI::Audio::SpeechCreateParams::Voice::OrSymbol), instructions: String, response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi index f48d5abc..187218b0 100644 --- a/rbi/openai/resources/audio/transcriptions.rbi +++ b/rbi/openai/resources/audio/transcriptions.rbi @@ -10,8 +10,8 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( T.any( @@ -91,8 +91,8 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranscriptionCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( T.any( diff --git a/rbi/openai/resources/audio/translations.rbi b/rbi/openai/resources/audio/translations.rbi index 7c462f9f..bd8adba2 100644 --- a/rbi/openai/resources/audio/translations.rbi +++ b/rbi/openai/resources/audio/translations.rbi @@ -7,8 +7,8 @@ module OpenAI # Translates audio into English. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: OpenAI::Audio::TranslationCreateParams::Model::Variants, + file: OpenAI::Internal::FileInput, + model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, diff --git a/rbi/openai/resources/beta/assistants.rbi b/rbi/openai/resources/beta/assistants.rbi index 3ec75274..d58ecbb1 100644 --- a/rbi/openai/resources/beta/assistants.rbi +++ b/rbi/openai/resources/beta/assistants.rbi @@ -7,7 +7,7 @@ module OpenAI # Create an assistant with a model and instructions. sig do params( - model: OpenAI::Beta::AssistantCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), @@ -132,7 +132,11 @@ module OpenAI description: T.nilable(String), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: OpenAI::Beta::AssistantUpdateParams::Model::Variants, + model: + T.any( + String, + OpenAI::Beta::AssistantUpdateParams::Model::OrSymbol + ), name: T.nilable(String), reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index 360e8120..738bcf29 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -112,10 +112,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( @@ -265,10 +262,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::ThreadCreateAndRunParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, response_format: T.nilable( diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index c8190d18..ba97e4db 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -29,10 +29,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: @@ -208,10 +205,7 @@ module OpenAI max_completion_tokens: T.nilable(Integer), max_prompt_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.nilable( - OpenAI::Beta::Threads::RunCreateParams::Model::Variants - ), + model: T.nilable(T.any(String, OpenAI::ChatModel::OrSymbol)), parallel_tool_calls: T::Boolean, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 9abe48e4..6193c799 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -39,7 +39,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash ) ], - model: OpenAI::Chat::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), function_call: @@ -326,7 +326,7 @@ module OpenAI OpenAI::Chat::ChatCompletionFunctionMessageParam::OrHash ) ], - model: OpenAI::Chat::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::ChatModel::OrSymbol), audio: T.nilable(OpenAI::Chat::ChatCompletionAudioParam::OrHash), frequency_penalty: T.nilable(Float), function_call: diff --git a/rbi/openai/resources/completions.rbi b/rbi/openai/resources/completions.rbi index e99bb46e..507da5ba 100644 --- a/rbi/openai/resources/completions.rbi +++ b/rbi/openai/resources/completions.rbi @@ -8,7 +8,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: OpenAI::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), @@ -148,7 +148,7 @@ module OpenAI # Creates a completion for the provided prompt and parameters. sig do params( - model: OpenAI::CompletionCreateParams::Model::Variants, + model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), diff --git a/rbi/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi index fc6482f7..bfdafb67 100644 --- a/rbi/openai/resources/embeddings.rbi +++ b/rbi/openai/resources/embeddings.rbi @@ -7,7 +7,7 @@ module OpenAI sig do params( input: OpenAI::EmbeddingCreateParams::Input::Variants, - model: OpenAI::EmbeddingCreateParams::Model::Variants, + model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, encoding_format: OpenAI::EmbeddingCreateParams::EncodingFormat::OrSymbol, diff --git a/rbi/openai/resources/files.rbi b/rbi/openai/resources/files.rbi index 022613a9..9ac9665f 100644 --- a/rbi/openai/resources/files.rbi +++ b/rbi/openai/resources/files.rbi @@ -26,7 +26,7 @@ module OpenAI # storage limits. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::FileObject) diff --git a/rbi/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi index 3a699c44..e60f057d 100644 --- a/rbi/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/openai/resources/fine_tuning/jobs.rbi @@ -16,7 +16,11 @@ module OpenAI # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) sig do params( - model: OpenAI::FineTuning::JobCreateParams::Model::Variants, + model: + T.any( + String, + OpenAI::FineTuning::JobCreateParams::Model::OrSymbol + ), training_file: String, hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters::OrHash, diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index e05ab0e0..e3a93ec3 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -6,8 +6,8 @@ module OpenAI # Creates a variation of a given image. This endpoint only supports `dall-e-2`. sig do params( - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(OpenAI::ImageCreateVariationParams::Model::Variants), + image: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: T.nilable( @@ -49,8 +49,8 @@ module OpenAI image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), - model: T.nilable(OpenAI::ImageEditParams::Model::Variants), + mask: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: @@ -119,7 +119,7 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), - model: T.nilable(OpenAI::ImageGenerateParams::Model::Variants), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), moderation: T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), n: T.nilable(Integer), diff --git a/rbi/openai/resources/moderations.rbi b/rbi/openai/resources/moderations.rbi index 4b79dfd5..38d862ca 100644 --- a/rbi/openai/resources/moderations.rbi +++ b/rbi/openai/resources/moderations.rbi @@ -8,7 +8,7 @@ module OpenAI sig do params( input: OpenAI::ModerationCreateParams::Input::Variants, - model: OpenAI::ModerationCreateParams::Model::Variants, + model: T.any(String, OpenAI::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Models::ModerationCreateResponse) end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index e3952419..6b3aa247 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -22,7 +22,12 @@ module OpenAI sig do params( input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: OpenAI::ResponsesModel::Variants, + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -216,7 +221,12 @@ module OpenAI sig do params( input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: OpenAI::ResponsesModel::Variants, + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] diff --git a/rbi/openai/resources/uploads/parts.rbi b/rbi/openai/resources/uploads/parts.rbi index ae17f0fc..055b46b9 100644 --- a/rbi/openai/resources/uploads/parts.rbi +++ b/rbi/openai/resources/uploads/parts.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( upload_id: String, - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + data: OpenAI::Internal::FileInput, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Uploads::UploadPart) end diff --git a/sig/openai/internal.rbs b/sig/openai/internal.rbs index 8dc7f62a..105072ce 100644 --- a/sig/openai/internal.rbs +++ b/sig/openai/internal.rbs @@ -2,6 +2,8 @@ module OpenAI module Internal extend OpenAI::Internal::Util::SorbetRuntimeSupport + type file_input = Pathname | StringIO | IO | String | OpenAI::FilePart + OMIT: Object end end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 18d8e5aa..92e65b19 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type transcription_create_params = { - file: (Pathname | StringIO | IO | OpenAI::FilePart), + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, include: ::Array[OpenAI::Models::Audio::transcription_include], @@ -19,7 +19,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor file: OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::Audio::TranscriptionCreateParams::model @@ -56,7 +56,7 @@ module OpenAI ) -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] def initialize: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 5a9c7d5f..94188d94 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type translation_create_params = { - file: (Pathname | StringIO | IO | OpenAI::FilePart), + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranslationCreateParams::model, prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor file: OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::Audio::TranslationCreateParams::model @@ -34,7 +34,7 @@ module OpenAI def temperature=: (Float) -> Float def initialize: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 54eaab64..7973f161 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type file_create_params = { - file: (Pathname | StringIO | IO | OpenAI::FilePart), + file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose } & OpenAI::Internal::Type::request_parameters @@ -11,12 +11,12 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor file: OpenAI::Internal::file_input attr_accessor purpose: OpenAI::Models::file_purpose def initialize: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index e47b6178..8d1342cf 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type image_create_variation_params = { - image: (Pathname | StringIO | IO | OpenAI::FilePart), + image: OpenAI::Internal::file_input, model: OpenAI::Models::ImageCreateVariationParams::model?, n: Integer?, response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor image: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor image: OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::ImageCreateVariationParams::model? @@ -30,7 +30,7 @@ module OpenAI def user=: (String) -> String def initialize: ( - image: Pathname | StringIO | IO | OpenAI::FilePart, + image: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 75a3c9e4..ef128fa7 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -5,7 +5,7 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, background: OpenAI::Models::ImageEditParams::background?, - mask: (Pathname | StringIO | IO | OpenAI::FilePart), + mask: OpenAI::Internal::file_input, model: OpenAI::Models::ImageEditParams::model?, n: Integer?, quality: OpenAI::Models::ImageEditParams::quality?, @@ -25,11 +25,9 @@ module OpenAI attr_accessor background: OpenAI::Models::ImageEditParams::background? - attr_reader mask: (Pathname | StringIO | IO | OpenAI::FilePart)? + attr_reader mask: OpenAI::Internal::file_input? - def mask=: ( - Pathname | StringIO | IO | OpenAI::FilePart - ) -> (Pathname | StringIO | IO | OpenAI::FilePart) + def mask=: (OpenAI::Internal::file_input) -> OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::ImageEditParams::model? @@ -49,7 +47,7 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, - ?mask: Pathname | StringIO | IO | OpenAI::FilePart, + ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, @@ -60,11 +58,7 @@ module OpenAI ) -> void type image = - Pathname - | StringIO - | IO - | OpenAI::FilePart - | ::Array[Pathname | StringIO | IO | OpenAI::FilePart] + OpenAI::Internal::file_input | ::Array[OpenAI::Internal::file_input] module Image extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index 7a94c0ae..380d4829 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -2,17 +2,17 @@ module OpenAI module Models module Uploads type part_create_params = - { data: (Pathname | StringIO | IO | OpenAI::FilePart) } + { data: OpenAI::Internal::file_input } & OpenAI::Internal::Type::request_parameters class PartCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor data: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor data: OpenAI::Internal::file_input def initialize: ( - data: Pathname | StringIO | IO | OpenAI::FilePart, + data: OpenAI::Internal::file_input, ?request_options: OpenAI::request_opts ) -> void end diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 0d72b943..9f70fdd9 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Transcriptions def create: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], @@ -16,7 +16,7 @@ module OpenAI ) -> OpenAI::Models::Audio::transcription_create_response def create_streaming: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index a2cece60..bd3560b7 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Translations def create: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index 2af65729..49435b99 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Files def create: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> OpenAI::FileObject diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index 387d37d0..f441f385 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Images def create_variation: ( - image: Pathname | StringIO | IO | OpenAI::FilePart, + image: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -15,7 +15,7 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, - ?mask: Pathname | StringIO | IO | OpenAI::FilePart, + ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index 20aac99a..41bfee05 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -4,7 +4,7 @@ module OpenAI class Parts def create: ( String upload_id, - data: Pathname | StringIO | IO | OpenAI::FilePart, + data: OpenAI::Internal::file_input, ?request_options: OpenAI::request_opts ) -> OpenAI::Uploads::UploadPart diff --git a/sorbet/config b/sorbet/config index 538c1528..6fe84ed8 100644 --- a/sorbet/config +++ b/sorbet/config @@ -1,2 +1,2 @@ ---dir=rbi +--dir=rbi/ --ignore=test/ diff --git a/test/openai/internal/sorbet_runtime_support_test.rb b/test/openai/internal/sorbet_runtime_support_test.rb index 1c48a5ff..d1179f7e 100644 --- a/test/openai/internal/sorbet_runtime_support_test.rb +++ b/test/openai/internal/sorbet_runtime_support_test.rb @@ -25,6 +25,7 @@ def test_nil_aliases err = OpenAI::Internal::Util::SorbetRuntimeSupport::MissingSorbetRuntimeError assert_raises(err) { OpenAI::Internal::AnyHash } + assert_raises(err) { OpenAI::Internal::FileInput } assert_raises(err) { OpenAI::Internal::Type::Converter::Input } assert_raises(err) { OpenAI::Internal::Type::Converter::CoerceState } assert_raises(err) { OpenAI::Internal::Type::Converter::DumpState } From be888272595181016d72c037c914d6d777bd2be9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 15:11:26 +0000 Subject: [PATCH 200/295] chore(docs): grammar improvements --- README.md | 6 +++--- SECURITY.md | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 5cb3bf90..73d47cda 100644 --- a/README.md +++ b/README.md @@ -220,7 +220,7 @@ puts(chat_completion[:my_undocumented_property]) #### Undocumented request params -If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a request as seen in examples above. +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a request, as seen in the examples above. #### Undocumented endpoints @@ -238,7 +238,7 @@ response = client.request( ### Concurrency & connection pooling -The `OpenAI::Client` instances are threadsafe, but only are fork-safe when there are no in-flight HTTP requests. +The `OpenAI::Client` instances are threadsafe, but are only are fork-safe when there are no in-flight HTTP requests. Each instance of `OpenAI::Client` has its own HTTP connection pool with a default size of 99. As such, we recommend instantiating the client once per application in most settings. @@ -297,7 +297,7 @@ openai.chat.completions.create( # … ) -# Literal values is also permissible: +# Literal values are also permissible: openai.chat.completions.create( reasoning_effort: :low, # … diff --git a/SECURITY.md b/SECURITY.md index 3b3bd8a6..4adb0c54 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -16,13 +16,13 @@ before making any information public. ## Reporting Non-SDK Related Security Issues If you encounter security issues that are not directly related to SDKs but pertain to the services -or products provided by OpenAI please follow the respective company's security reporting guidelines. +or products provided by OpenAI, please follow the respective company's security reporting guidelines. ### OpenAI Terms and Policies Our Security Policy can be found at [Security Policy URL](https://openai.com/policies/coordinated-vulnerability-disclosure-policy). -Please contact disclosure@openai.com for any questions or concerns regarding security of our services. +Please contact disclosure@openai.com for any questions or concerns regarding the security of our services. --- From a7b2aa759db645d688568ada5ee2187afd954cb4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 15:59:30 +0000 Subject: [PATCH 201/295] chore: force utf-8 locale via `RUBYOPT` when formatting --- Rakefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Rakefile b/Rakefile index ad33026e..bc850886 100644 --- a/Rakefile +++ b/Rakefile @@ -36,7 +36,7 @@ multitask(:test) do end xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] -locale = {"LC_ALL" => "C.UTF-8"} +ruby_opt = {"RUBYOPT" => [ENV["RUBYOPT"], "--encoding=UTF-8"].compact.join(" ")} desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do @@ -64,7 +64,7 @@ desc("Format `*.rbi`") multitask(:"format:rbi") do find = %w[find ./rbi -type f -and -name *.rbi -print0] fmt = xargs + %w[stree write --] - sh(locale, "#{find.shelljoin} | #{fmt.shelljoin}") + sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") end desc("Format `*.rbs`") @@ -100,7 +100,7 @@ multitask(:"format:rbs") do # transform class aliases to type aliases, which syntax tree has no trouble with sh("#{find.shelljoin} | #{pre.shelljoin}") # run syntax tree to format `*.rbs` files - sh(locale, "#{find.shelljoin} | #{fmt.shelljoin}") do + sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") do success = _1 end # transform type aliases back to class aliases From b4257d27df47b7b90cff9a12f9346946b8fee70c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 16:20:42 +0000 Subject: [PATCH 202/295] feat(api): new API tools --- .stats.yml | 6 +- lib/openai.rb | 18 + lib/openai/models/responses/response.rb | 21 +- .../response_code_interpreter_tool_call.rb | 12 +- .../responses/response_create_params.rb | 21 +- ...response_image_gen_call_completed_event.rb | 37 + ...esponse_image_gen_call_generating_event.rb | 45 + ...sponse_image_gen_call_in_progress_event.rb | 44 + ...onse_image_gen_call_partial_image_event.rb | 64 + .../models/responses/response_input_item.rb | 507 +++++++- lib/openai/models/responses/response_item.rb | 507 +++++++- .../models/responses/response_item_list.rb | 4 +- ...response_mcp_call_arguments_delta_event.rb | 45 + .../response_mcp_call_arguments_done_event.rb | 44 + .../response_mcp_call_completed_event.rb | 20 + .../response_mcp_call_failed_event.rb | 20 + .../response_mcp_call_in_progress_event.rb | 36 + ...response_mcp_list_tools_completed_event.rb | 20 + .../response_mcp_list_tools_failed_event.rb | 20 + ...sponse_mcp_list_tools_in_progress_event.rb | 21 + .../models/responses/response_output_item.rb | 398 ++++++- .../response_output_item_added_event.rb | 4 +- .../response_output_item_done_event.rb | 4 +- ...onse_output_text_annotation_added_event.rb | 60 + .../models/responses/response_queued_event.rb | 28 + .../response_reasoning_delta_event.rb | 52 + .../response_reasoning_done_event.rb | 52 + .../response_reasoning_summary_delta_event.rb | 56 + .../response_reasoning_summary_done_event.rb | 52 + .../models/responses/response_status.rb | 4 +- .../models/responses/response_stream_event.rb | 77 +- lib/openai/models/responses/tool.rb | 496 +++++++- .../models/responses/tool_choice_types.rb | 9 + lib/openai/resources/responses.rb | 18 +- lib/openai/resources/responses/input_items.rb | 2 +- rbi/openai/models/responses/response.rbi | 28 +- .../response_code_interpreter_tool_call.rbi | 29 +- .../responses/response_create_params.rbi | 34 +- ...esponse_image_gen_call_completed_event.rbi | 54 + ...sponse_image_gen_call_generating_event.rbi | 71 ++ ...ponse_image_gen_call_in_progress_event.rbi | 67 ++ ...nse_image_gen_call_partial_image_event.rbi | 85 ++ .../models/responses/response_input_item.rbi | 793 +++++++++++++ rbi/openai/models/responses/response_item.rbi | 790 ++++++++++++- .../models/responses/response_item_list.rbi | 10 +- ...esponse_mcp_call_arguments_delta_event.rbi | 68 ++ ...response_mcp_call_arguments_done_event.rbi | 67 ++ .../response_mcp_call_completed_event.rbi | 33 + .../response_mcp_call_failed_event.rbi | 33 + .../response_mcp_call_in_progress_event.rbi | 53 + ...esponse_mcp_list_tools_completed_event.rbi | 33 + .../response_mcp_list_tools_failed_event.rbi | 33 + ...ponse_mcp_list_tools_in_progress_event.rbi | 34 + .../models/responses/response_output_item.rbi | 615 +++++++++- .../response_output_item_added_event.rbi | 8 +- .../response_output_item_done_event.rbi | 8 +- ...nse_output_text_annotation_added_event.rbi | 83 ++ .../responses/response_queued_event.rbi | 51 + .../response_reasoning_delta_event.rbi | 75 ++ .../response_reasoning_done_event.rbi | 75 ++ ...response_reasoning_summary_delta_event.rbi | 77 ++ .../response_reasoning_summary_done_event.rbi | 75 ++ .../models/responses/response_status.rbi | 5 +- .../responses/response_stream_event.rbi | 20 +- rbi/openai/models/responses/tool.rbi | 1050 ++++++++++++++++- .../models/responses/tool_choice_types.rbi | 21 + rbi/openai/resources/responses.rbi | 20 +- sig/openai/models/responses/response.rbs | 4 + .../response_code_interpreter_tool_call.rbs | 8 +- .../responses/response_create_params.rbs | 4 + ...esponse_image_gen_call_completed_event.rbs | 26 + ...sponse_image_gen_call_generating_event.rbs | 32 + ...ponse_image_gen_call_in_progress_event.rbs | 30 + ...nse_image_gen_call_partial_image_event.rbs | 38 + .../models/responses/response_input_item.rbs | 306 +++++ sig/openai/models/responses/response_item.rbs | 306 +++++ ...esponse_mcp_call_arguments_delta_event.rbs | 30 + ...response_mcp_call_arguments_done_event.rbs | 30 + .../response_mcp_call_completed_event.rbs | 14 + .../response_mcp_call_failed_event.rbs | 14 + .../response_mcp_call_in_progress_event.rbs | 26 + ...esponse_mcp_list_tools_completed_event.rbs | 14 + .../response_mcp_list_tools_failed_event.rbs | 14 + ...ponse_mcp_list_tools_in_progress_event.rbs | 14 + .../models/responses/response_output_item.rbs | 238 ++++ ...nse_output_text_annotation_added_event.rbs | 38 + .../responses/response_queued_event.rbs | 19 + .../response_reasoning_delta_event.rbs | 34 + .../response_reasoning_done_event.rbs | 34 + ...response_reasoning_summary_delta_event.rbs | 34 + .../response_reasoning_summary_done_event.rbs | 34 + .../models/responses/response_status.rbs | 5 +- .../responses/response_stream_event.rbs | 18 + sig/openai/models/responses/tool.rbs | 349 +++++- .../models/responses/tool_choice_types.rbs | 6 + sig/openai/resources/responses.rbs | 2 + .../resources/responses/input_items_test.rb | 59 + test/openai/resources/responses_test.rb | 2 + 98 files changed, 9036 insertions(+), 68 deletions(-) create mode 100644 lib/openai/models/responses/response_image_gen_call_completed_event.rb create mode 100644 lib/openai/models/responses/response_image_gen_call_generating_event.rb create mode 100644 lib/openai/models/responses/response_image_gen_call_in_progress_event.rb create mode 100644 lib/openai/models/responses/response_image_gen_call_partial_image_event.rb create mode 100644 lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb create mode 100644 lib/openai/models/responses/response_mcp_call_arguments_done_event.rb create mode 100644 lib/openai/models/responses/response_mcp_call_completed_event.rb create mode 100644 lib/openai/models/responses/response_mcp_call_failed_event.rb create mode 100644 lib/openai/models/responses/response_mcp_call_in_progress_event.rb create mode 100644 lib/openai/models/responses/response_mcp_list_tools_completed_event.rb create mode 100644 lib/openai/models/responses/response_mcp_list_tools_failed_event.rb create mode 100644 lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb create mode 100644 lib/openai/models/responses/response_output_text_annotation_added_event.rb create mode 100644 lib/openai/models/responses/response_queued_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_delta_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_done_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_summary_delta_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_summary_done_event.rb create mode 100644 rbi/openai/models/responses/response_image_gen_call_completed_event.rbi create mode 100644 rbi/openai/models/responses/response_image_gen_call_generating_event.rbi create mode 100644 rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi create mode 100644 rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_call_completed_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_call_failed_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi create mode 100644 rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi create mode 100644 rbi/openai/models/responses/response_output_text_annotation_added_event.rbi create mode 100644 rbi/openai/models/responses/response_queued_event.rbi create mode 100644 rbi/openai/models/responses/response_reasoning_delta_event.rbi create mode 100644 rbi/openai/models/responses/response_reasoning_done_event.rbi create mode 100644 rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi create mode 100644 rbi/openai/models/responses/response_reasoning_summary_done_event.rbi create mode 100644 sig/openai/models/responses/response_image_gen_call_completed_event.rbs create mode 100644 sig/openai/models/responses/response_image_gen_call_generating_event.rbs create mode 100644 sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs create mode 100644 sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_call_completed_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_call_failed_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_call_in_progress_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs create mode 100644 sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs create mode 100644 sig/openai/models/responses/response_output_text_annotation_added_event.rbs create mode 100644 sig/openai/models/responses/response_queued_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_delta_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_done_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_summary_delta_event.rbs create mode 100644 sig/openai/models/responses/response_reasoning_summary_done_event.rbs diff --git a/.stats.yml b/.stats.yml index 297d33cc..b5ab8d66 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d51538ac955164de98b0c94a0a4718d96623fe39bf31a1d168be06c93c94e645.yml -openapi_spec_hash: 33e00a48df8f94c94f46290c489f132b -config_hash: c42d37618b8628ce7e1c76437db5dd8f +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a5651cb97f86d1e2531af6aef8c5230f1ea350560fbae790ca2e481b30a6c217.yml +openapi_spec_hash: 66a5104fd3bb43383cf919225df7a6fd +config_hash: bb657c3fed232a56930035de3aaed936 diff --git a/lib/openai.rb b/lib/openai.rb index ef64bfc0..f853f3fe 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -360,6 +360,10 @@ require_relative "openai/models/responses/response_function_tool_call_item" require_relative "openai/models/responses/response_function_tool_call_output_item" require_relative "openai/models/responses/response_function_web_search" +require_relative "openai/models/responses/response_image_gen_call_completed_event" +require_relative "openai/models/responses/response_image_gen_call_generating_event" +require_relative "openai/models/responses/response_image_gen_call_in_progress_event" +require_relative "openai/models/responses/response_image_gen_call_partial_image_event" require_relative "openai/models/responses/response_includable" require_relative "openai/models/responses/response_incomplete_event" require_relative "openai/models/responses/response_in_progress_event" @@ -374,6 +378,14 @@ require_relative "openai/models/responses/response_input_text" require_relative "openai/models/responses/response_item" require_relative "openai/models/responses/response_item_list" +require_relative "openai/models/responses/response_mcp_call_arguments_delta_event" +require_relative "openai/models/responses/response_mcp_call_arguments_done_event" +require_relative "openai/models/responses/response_mcp_call_completed_event" +require_relative "openai/models/responses/response_mcp_call_failed_event" +require_relative "openai/models/responses/response_mcp_call_in_progress_event" +require_relative "openai/models/responses/response_mcp_list_tools_completed_event" +require_relative "openai/models/responses/response_mcp_list_tools_failed_event" +require_relative "openai/models/responses/response_mcp_list_tools_in_progress_event" require_relative "openai/models/responses/response_output_audio" require_relative "openai/models/responses/response_output_item" require_relative "openai/models/responses/response_output_item_added_event" @@ -381,7 +393,13 @@ require_relative "openai/models/responses/response_output_message" require_relative "openai/models/responses/response_output_refusal" require_relative "openai/models/responses/response_output_text" +require_relative "openai/models/responses/response_output_text_annotation_added_event" +require_relative "openai/models/responses/response_queued_event" +require_relative "openai/models/responses/response_reasoning_delta_event" +require_relative "openai/models/responses/response_reasoning_done_event" require_relative "openai/models/responses/response_reasoning_item" +require_relative "openai/models/responses/response_reasoning_summary_delta_event" +require_relative "openai/models/responses/response_reasoning_summary_done_event" require_relative "openai/models/responses/response_reasoning_summary_part_added_event" require_relative "openai/models/responses/response_reasoning_summary_part_done_event" require_relative "openai/models/responses/response_reasoning_summary_text_delta_event" diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 4813a1af..5a548714 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -78,7 +78,7 @@ class Response < OpenAI::Internal::Type::BaseModel # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. # - # @return [Array] + # @return [Array] required :output, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem] } # @!attribute parallel_tool_calls @@ -120,7 +120,7 @@ class Response < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -133,6 +133,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Float, nil] required :top_p, Float, nil?: true + # @!attribute background + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + # + # @return [Boolean, nil] + optional :background, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute max_output_tokens # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and @@ -182,7 +189,7 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. # # @return [Symbol, OpenAI::Responses::ResponseStatus, nil] optional :status, enum: -> { OpenAI::Responses::ResponseStatus } @@ -224,7 +231,7 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Responses::Response} for more details. # @@ -242,7 +249,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param output [Array] An array of content items generated by the model. + # @param output [Array] An array of content items generated by the model. # # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. # @@ -250,10 +257,12 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # + # @param background [Boolean, nil] Whether to run the model response in the background. + # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 16bac78f..70e856b7 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -37,7 +37,13 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter_call] required :type, const: :code_interpreter_call - # @!method initialize(id:, code:, results:, status:, type: :code_interpreter_call) + # @!attribute container_id + # The ID of the container used to run the code. + # + # @return [String, nil] + optional :container_id, String + + # @!method initialize(id:, code:, results:, status:, container_id: nil, type: :code_interpreter_call) # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCodeInterpreterToolCall} for more details. # @@ -51,9 +57,11 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # # @param status [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. # + # @param container_id [String] The ID of the container used to run the code. + # # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. - # The output of a code interpreter tool call that is text. + # The output of a code interpreter tool. module Result extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 4a38c09e..aacec08d 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -21,7 +21,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @return [String, Array] + # @return [String, Array] required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } # @!attribute model @@ -34,6 +34,13 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::ResponsesModel } + # @!attribute background + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + # + # @return [Boolean, nil] + optional :background, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute include # Specify additional output data to include in the model response. Currently # supported values are: @@ -178,7 +185,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -211,14 +218,16 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # + # @param background [Boolean, nil] Whether to run the model response in the background. + # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context @@ -243,7 +252,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # @@ -274,7 +283,7 @@ module Input variant -> { OpenAI::Responses::ResponseInput } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] end # Specifies the latency tier to use for processing the request. This parameter is diff --git a/lib/openai/models/responses/response_image_gen_call_completed_event.rb b/lib/openai/models/responses/response_image_gen_call_completed_event.rb new file mode 100644 index 00000000..d1298dab --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_completed_event.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.completed'. + # + # @return [Symbol, :"response.image_generation_call.completed"] + required :type, const: :"response.image_generation_call.completed" + + # @!method initialize(item_id:, output_index:, type: :"response.image_generation_call.completed") + # Emitted when an image generation tool call has completed and the final image is + # available. + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param type [Symbol, :"response.image_generation_call.completed"] The type of the event. Always 'response.image_generation_call.completed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_image_gen_call_generating_event.rb b/lib/openai/models/responses/response_image_gen_call_generating_event.rb new file mode 100644 index 00000000..51757dbb --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_generating_event.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.generating'. + # + # @return [Symbol, :"response.image_generation_call.generating"] + required :type, const: :"response.image_generation_call.generating" + + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer, nil] + optional :sequence_number, Integer + + # @!method initialize(item_id:, output_index:, sequence_number: nil, type: :"response.image_generation_call.generating") + # Emitted when an image generation tool call is actively generating an image + # (intermediate state). + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of the image generation item being processed. + # + # @param type [Symbol, :"response.image_generation_call.generating"] The type of the event. Always 'response.image_generation_call.generating'. + end + end + end +end diff --git a/lib/openai/models/responses/response_image_gen_call_in_progress_event.rb b/lib/openai/models/responses/response_image_gen_call_in_progress_event.rb new file mode 100644 index 00000000..83d9ac0a --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_in_progress_event.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.in_progress'. + # + # @return [Symbol, :"response.image_generation_call.in_progress"] + required :type, const: :"response.image_generation_call.in_progress" + + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.image_generation_call.in_progress") + # Emitted when an image generation tool call is in progress. + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of the image generation item being processed. + # + # @param type [Symbol, :"response.image_generation_call.in_progress"] The type of the event. Always 'response.image_generation_call.in_progress'. + end + end + end +end diff --git a/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb b/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb new file mode 100644 index 00000000..e7e8e1af --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute partial_image_b64 + # Base64-encoded partial image data, suitable for rendering as an image. + # + # @return [String] + required :partial_image_b64, String + + # @!attribute partial_image_index + # 0-based index for the partial image (backend is 1-based, but this is 0-based for + # the user). + # + # @return [Integer] + required :partial_image_index, Integer + + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.partial_image'. + # + # @return [Symbol, :"response.image_generation_call.partial_image"] + required :type, const: :"response.image_generation_call.partial_image" + + # @!method initialize(item_id:, output_index:, partial_image_b64:, partial_image_index:, sequence_number:, type: :"response.image_generation_call.partial_image") + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseImageGenCallPartialImageEvent} for more details. + # + # Emitted when a partial image is available during image generation streaming. + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param partial_image_b64 [String] Base64-encoded partial image data, suitable for rendering as an image. + # + # @param partial_image_index [Integer] 0-based index for the partial image (backend is 1-based, but this is 0-based for + # + # @param sequence_number [Integer] The sequence number of the image generation item being processed. + # + # @param type [Symbol, :"response.image_generation_call.partial_image"] The type of the event. Always 'response.image_generation_call.partial_image'. + end + end + end +end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 8ce7e375..d33546fd 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -56,6 +56,30 @@ module ResponseInputItem # [managing context](https://platform.openai.com/docs/guides/conversation-state). variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Responses::ResponseInputItem::ImageGenerationCall } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Responses::ResponseInputItem::LocalShellCall } + + # The output of a local shell tool call. + variant :local_shell_call_output, -> { OpenAI::Responses::ResponseInputItem::LocalShellCallOutput } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Responses::ResponseInputItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Responses::ResponseInputItem::McpApprovalRequest } + + # A response to an MCP approval request. + variant :mcp_approval_response, -> { OpenAI::Responses::ResponseInputItem::McpApprovalResponse } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Responses::ResponseInputItem::McpCall } + # An internal identifier for an item to reference. variant :item_reference, -> { OpenAI::Responses::ResponseInputItem::ItemReference } @@ -322,6 +346,487 @@ module Status end end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::ImageGenerationCall} for more details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Responses::ResponseInputItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Responses::ResponseInputItem::LocalShellCall::Action] + required :action, -> { OpenAI::Responses::ResponseInputItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseInputItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::LocalShellCall} for more details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Responses::ResponseInputItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Responses::ResponseInputItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::LocalShellCall::Action} for more details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Responses::ResponseInputItem::LocalShellCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :id, String + + # @!attribute output + # A JSON string of the output of the local shell tool call. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the local shell tool call output. Always `local_shell_call_output`. + # + # @return [Symbol, :local_shell_call_output] + required :type, const: :local_shell_call_output + + # @!attribute status + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @return [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] + optional :status, + enum: -> { OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status }, + nil?: true + + # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::LocalShellCallOutput} for more details. + # + # The output of a local shell tool call. + # + # @param id [String] The unique ID of the local shell tool call generated by the model. + # + # @param output [String] A JSON string of the output of the local shell tool call. + # + # @param status [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @see OpenAI::Responses::ResponseInputItem::LocalShellCallOutput#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] + } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::McpListTools} for more details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::McpListTools::Tool} for more details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::McpApprovalRequest} for more details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + # @!attribute approval_request_id + # The ID of the approval request being answered. + # + # @return [String] + required :approval_request_id, String + + # @!attribute approve + # Whether the request was approved. + # + # @return [Boolean] + required :approve, OpenAI::Internal::Type::Boolean + + # @!attribute type + # The type of the item. Always `mcp_approval_response`. + # + # @return [Symbol, :mcp_approval_response] + required :type, const: :mcp_approval_response + + # @!attribute id + # The unique ID of the approval response + # + # @return [String, nil] + optional :id, String, nil?: true + + # @!attribute reason + # Optional reason for the decision. + # + # @return [String, nil] + optional :reason, String, nil?: true + + # @!method initialize(approval_request_id:, approve:, id: nil, reason: nil, type: :mcp_approval_response) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::McpApprovalResponse} for more details. + # + # A response to an MCP approval request. + # + # @param approval_request_id [String] The ID of the approval request being answered. + # + # @param approve [Boolean] Whether the request was approved. + # + # @param id [String, nil] The unique ID of the approval response + # + # @param reason [String, nil] Optional reason for the decision. + # + # @param type [Symbol, :mcp_approval_response] The type of the item. Always `mcp_approval_response`. + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseInputItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the item to reference. @@ -356,7 +861,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ItemReference)] + # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseInputItem::LocalShellCall, OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Responses::ResponseInputItem::McpListTools, OpenAI::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Responses::ResponseInputItem::McpCall, OpenAI::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index e0bd4301..1bc8020a 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -34,8 +34,513 @@ module ResponseItem variant :function_call_output, -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem } + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Responses::ResponseItem::ImageGenerationCall } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Responses::ResponseItem::LocalShellCall } + + # The output of a local shell tool call. + variant :local_shell_call_output, -> { OpenAI::Responses::ResponseItem::LocalShellCallOutput } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Responses::ResponseItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Responses::ResponseItem::McpApprovalRequest } + + # A response to an MCP approval request. + variant :mcp_approval_response, -> { OpenAI::Responses::ResponseItem::McpApprovalResponse } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Responses::ResponseItem::McpCall } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Responses::ResponseItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::ImageGenerationCall} for more details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Responses::ResponseItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Responses::ResponseItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Responses::ResponseItem::LocalShellCall::Action] + required :action, -> { OpenAI::Responses::ResponseItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Responses::ResponseItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::LocalShellCall} for more details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Responses::ResponseItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Responses::ResponseItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Responses::ResponseItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::LocalShellCall::Action} for more details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Responses::ResponseItem::LocalShellCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :id, String + + # @!attribute output + # A JSON string of the output of the local shell tool call. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the local shell tool call output. Always `local_shell_call_output`. + # + # @return [Symbol, :local_shell_call_output] + required :type, const: :local_shell_call_output + + # @!attribute status + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @return [Symbol, OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status, nil] + optional :status, + enum: -> { + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status + }, + nil?: true + + # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::LocalShellCallOutput} for more details. + # + # The output of a local shell tool call. + # + # @param id [String] The unique ID of the local shell tool call generated by the model. + # + # @param output [String] A JSON string of the output of the local shell tool call. + # + # @param status [Symbol, OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @see OpenAI::Responses::ResponseItem::LocalShellCallOutput#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseItem::McpListTools::Tool] } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::McpListTools} for more details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::McpListTools::Tool} for more details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::McpApprovalRequest} for more details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval response + # + # @return [String] + required :id, String + + # @!attribute approval_request_id + # The ID of the approval request being answered. + # + # @return [String] + required :approval_request_id, String + + # @!attribute approve + # Whether the request was approved. + # + # @return [Boolean] + required :approve, OpenAI::Internal::Type::Boolean + + # @!attribute type + # The type of the item. Always `mcp_approval_response`. + # + # @return [Symbol, :mcp_approval_response] + required :type, const: :mcp_approval_response + + # @!attribute reason + # Optional reason for the decision. + # + # @return [String, nil] + optional :reason, String, nil?: true + + # @!method initialize(id:, approval_request_id:, approve:, reason: nil, type: :mcp_approval_response) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::McpApprovalResponse} for more details. + # + # A response to an MCP approval request. + # + # @param id [String] The unique ID of the approval response + # + # @param approval_request_id [String] The ID of the approval request being answered. + # + # @param approve [Boolean] Whether the request was approved. + # + # @param reason [String, nil] Optional reason for the decision. + # + # @param type [Symbol, :mcp_approval_response] The type of the item. Always `mcp_approval_response`. + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem)] + # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Responses::ResponseItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseItem::LocalShellCall, OpenAI::Responses::ResponseItem::LocalShellCallOutput, OpenAI::Responses::ResponseItem::McpListTools, OpenAI::Responses::ResponseItem::McpApprovalRequest, OpenAI::Responses::ResponseItem::McpApprovalResponse, OpenAI::Responses::ResponseItem::McpCall)] end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 6d7d8e30..436ecce9 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -7,7 +7,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!attribute data # A list of items used to generate this response. # - # @return [Array] + # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseItem] } # @!attribute first_id @@ -37,7 +37,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) # A list of Response items. # - # @param data [Array] A list of items used to generate this response. + # @param data [Array] A list of items used to generate this response. # # @param first_id [String] The ID of the first item in the list. # diff --git a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb new file mode 100644 index 00000000..e238043d --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute delta + # The partial update to the arguments for the MCP tool call. + # + # @return [Object] + required :delta, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the MCP tool call item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.arguments_delta'. + # + # @return [Symbol, :"response.mcp_call.arguments_delta"] + required :type, const: :"response.mcp_call.arguments_delta" + + # @!method initialize(delta:, item_id:, output_index:, type: :"response.mcp_call.arguments_delta") + # Emitted when there is a delta (partial update) to the arguments of an MCP tool + # call. + # + # @param delta [Object] The partial update to the arguments for the MCP tool call. + # + # @param item_id [String] The unique identifier of the MCP tool call item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param type [Symbol, :"response.mcp_call.arguments_delta"] The type of the event. Always 'response.mcp_call.arguments_delta'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb new file mode 100644 index 00000000..875cfee3 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute arguments + # The finalized arguments for the MCP tool call. + # + # @return [Object] + required :arguments, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the MCP tool call item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.arguments_done'. + # + # @return [Symbol, :"response.mcp_call.arguments_done"] + required :type, const: :"response.mcp_call.arguments_done" + + # @!method initialize(arguments:, item_id:, output_index:, type: :"response.mcp_call.arguments_done") + # Emitted when the arguments for an MCP tool call are finalized. + # + # @param arguments [Object] The finalized arguments for the MCP tool call. + # + # @param item_id [String] The unique identifier of the MCP tool call item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param type [Symbol, :"response.mcp_call.arguments_done"] The type of the event. Always 'response.mcp_call.arguments_done'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_completed_event.rb b/lib/openai/models/responses/response_mcp_call_completed_event.rb new file mode 100644 index 00000000..2b20fa27 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_completed_event.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the event. Always 'response.mcp_call.completed'. + # + # @return [Symbol, :"response.mcp_call.completed"] + required :type, const: :"response.mcp_call.completed" + + # @!method initialize(type: :"response.mcp_call.completed") + # Emitted when an MCP tool call has completed successfully. + # + # @param type [Symbol, :"response.mcp_call.completed"] The type of the event. Always 'response.mcp_call.completed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_failed_event.rb b/lib/openai/models/responses/response_mcp_call_failed_event.rb new file mode 100644 index 00000000..0429006e --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_failed_event.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the event. Always 'response.mcp_call.failed'. + # + # @return [Symbol, :"response.mcp_call.failed"] + required :type, const: :"response.mcp_call.failed" + + # @!method initialize(type: :"response.mcp_call.failed") + # Emitted when an MCP tool call has failed. + # + # @param type [Symbol, :"response.mcp_call.failed"] The type of the event. Always 'response.mcp_call.failed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_in_progress_event.rb b/lib/openai/models/responses/response_mcp_call_in_progress_event.rb new file mode 100644 index 00000000..f19fdaa7 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_in_progress_event.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the MCP tool call item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.in_progress'. + # + # @return [Symbol, :"response.mcp_call.in_progress"] + required :type, const: :"response.mcp_call.in_progress" + + # @!method initialize(item_id:, output_index:, type: :"response.mcp_call.in_progress") + # Emitted when an MCP tool call is in progress. + # + # @param item_id [String] The unique identifier of the MCP tool call item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param type [Symbol, :"response.mcp_call.in_progress"] The type of the event. Always 'response.mcp_call.in_progress'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb new file mode 100644 index 00000000..bad2a11b --- /dev/null +++ b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the event. Always 'response.mcp_list_tools.completed'. + # + # @return [Symbol, :"response.mcp_list_tools.completed"] + required :type, const: :"response.mcp_list_tools.completed" + + # @!method initialize(type: :"response.mcp_list_tools.completed") + # Emitted when the list of available MCP tools has been successfully retrieved. + # + # @param type [Symbol, :"response.mcp_list_tools.completed"] The type of the event. Always 'response.mcp_list_tools.completed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb new file mode 100644 index 00000000..42df67d4 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the event. Always 'response.mcp_list_tools.failed'. + # + # @return [Symbol, :"response.mcp_list_tools.failed"] + required :type, const: :"response.mcp_list_tools.failed" + + # @!method initialize(type: :"response.mcp_list_tools.failed") + # Emitted when the attempt to list available MCP tools has failed. + # + # @param type [Symbol, :"response.mcp_list_tools.failed"] The type of the event. Always 'response.mcp_list_tools.failed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb new file mode 100644 index 00000000..e1f44fe5 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the event. Always 'response.mcp_list_tools.in_progress'. + # + # @return [Symbol, :"response.mcp_list_tools.in_progress"] + required :type, const: :"response.mcp_list_tools.in_progress" + + # @!method initialize(type: :"response.mcp_list_tools.in_progress") + # Emitted when the system is in the process of retrieving the list of available + # MCP tools. + # + # @param type [Symbol, :"response.mcp_list_tools.in_progress"] The type of the event. Always 'response.mcp_list_tools.in_progress'. + end + end + end +end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 8e653d3a..1fdd4e91 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -34,8 +34,404 @@ module ResponseOutputItem # [managing context](https://platform.openai.com/docs/guides/conversation-state). variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Responses::ResponseOutputItem::ImageGenerationCall } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Responses::ResponseOutputItem::McpCall } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Responses::ResponseOutputItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Responses::ResponseOutputItem::McpApprovalRequest } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::ImageGenerationCall} for more details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Responses::ResponseOutputItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action] + required :action, -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::LocalShellCall} for more details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Responses::ResponseOutputItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action} for more + # details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Responses::ResponseOutputItem::LocalShellCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] + } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::McpListTools} for more details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::McpListTools::Tool} for more details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseOutputItem::McpApprovalRequest} for more details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem)] + # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 94383e4f..8354f678 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was added. # - # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] + # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -28,7 +28,7 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # # Emitted when a new output item is added. # - # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] The output item that was added. + # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was added. # # @param output_index [Integer] The index of the output item that was added. # diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 54d941ad..40d99812 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was marked done. # - # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] + # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -28,7 +28,7 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # # Emitted when an output item is marked done. # - # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] The output item that was marked done. + # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was marked done. # # @param output_index [Integer] The index of the output item that was marked done. # diff --git a/lib/openai/models/responses/response_output_text_annotation_added_event.rb b/lib/openai/models/responses/response_output_text_annotation_added_event.rb new file mode 100644 index 00000000..8efbedff --- /dev/null +++ b/lib/openai/models/responses/response_output_text_annotation_added_event.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute annotation + # The annotation object being added. (See annotation schema for details.) + # + # @return [Object] + required :annotation, OpenAI::Internal::Type::Unknown + + # @!attribute annotation_index + # The index of the annotation within the content part. + # + # @return [Integer] + required :annotation_index, Integer + + # @!attribute content_index + # The index of the content part within the output item. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute item_id + # The unique identifier of the item to which the annotation is being added. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.output_text_annotation.added'. + # + # @return [Symbol, :"response.output_text_annotation.added"] + required :type, const: :"response.output_text_annotation.added" + + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text_annotation.added") + # Emitted when an annotation is added to output text content. + # + # @param annotation [Object] The annotation object being added. (See annotation schema for details.) + # + # @param annotation_index [Integer] The index of the annotation within the content part. + # + # @param content_index [Integer] The index of the content part within the output item. + # + # @param item_id [String] The unique identifier of the item to which the annotation is being added. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param type [Symbol, :"response.output_text_annotation.added"] The type of the event. Always 'response.output_text_annotation.added'. + end + end + end +end diff --git a/lib/openai/models/responses/response_queued_event.rb b/lib/openai/models/responses/response_queued_event.rb new file mode 100644 index 00000000..065f4cc1 --- /dev/null +++ b/lib/openai/models/responses/response_queued_event.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute response + # The full response object that is queued. + # + # @return [OpenAI::Responses::Response] + required :response, -> { OpenAI::Responses::Response } + + # @!attribute type + # The type of the event. Always 'response.queued'. + # + # @return [Symbol, :"response.queued"] + required :type, const: :"response.queued" + + # @!method initialize(response:, type: :"response.queued") + # Emitted when a response is queued and waiting to be processed. + # + # @param response [OpenAI::Responses::Response] The full response object that is queued. + # + # @param type [Symbol, :"response.queued"] The type of the event. Always 'response.queued'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_delta_event.rb b/lib/openai/models/responses/response_reasoning_delta_event.rb new file mode 100644 index 00000000..18afe6ee --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_delta_event.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute content_index + # The index of the reasoning content part within the output item. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute delta + # The partial update to the reasoning content. + # + # @return [Object] + required :delta, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the item for which reasoning is being updated. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute type + # The type of the event. Always 'response.reasoning.delta'. + # + # @return [Symbol, :"response.reasoning.delta"] + required :type, const: :"response.reasoning.delta" + + # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.reasoning.delta") + # Emitted when there is a delta (partial update) to the reasoning content. + # + # @param content_index [Integer] The index of the reasoning content part within the output item. + # + # @param delta [Object] The partial update to the reasoning content. + # + # @param item_id [String] The unique identifier of the item for which reasoning is being updated. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param type [Symbol, :"response.reasoning.delta"] The type of the event. Always 'response.reasoning.delta'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_done_event.rb b/lib/openai/models/responses/response_reasoning_done_event.rb new file mode 100644 index 00000000..887f1f58 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_done_event.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute content_index + # The index of the reasoning content part within the output item. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute item_id + # The unique identifier of the item for which reasoning is finalized. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute text + # The finalized reasoning text. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always 'response.reasoning.done'. + # + # @return [Symbol, :"response.reasoning.done"] + required :type, const: :"response.reasoning.done" + + # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.reasoning.done") + # Emitted when the reasoning content is finalized for an item. + # + # @param content_index [Integer] The index of the reasoning content part within the output item. + # + # @param item_id [String] The unique identifier of the item for which reasoning is finalized. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param text [String] The finalized reasoning text. + # + # @param type [Symbol, :"response.reasoning.done"] The type of the event. Always 'response.reasoning.done'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb new file mode 100644 index 00000000..188b3ef8 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute delta + # The partial update to the reasoning summary content. + # + # @return [Object] + required :delta, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the item for which the reasoning summary is being + # updated. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute summary_index + # The index of the summary part within the output item. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute type + # The type of the event. Always 'response.reasoning_summary.delta'. + # + # @return [Symbol, :"response.reasoning_summary.delta"] + required :type, const: :"response.reasoning_summary.delta" + + # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::ResponseReasoningSummaryDeltaEvent} for more details. + # + # Emitted when there is a delta (partial update) to the reasoning summary content. + # + # @param delta [Object] The partial update to the reasoning summary content. + # + # @param item_id [String] The unique identifier of the item for which the reasoning summary is being updat + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param summary_index [Integer] The index of the summary part within the output item. + # + # @param type [Symbol, :"response.reasoning_summary.delta"] The type of the event. Always 'response.reasoning_summary.delta'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_done_event.rb new file mode 100644 index 00000000..54715dd8 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_done_event.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the item for which the reasoning summary is finalized. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute summary_index + # The index of the summary part within the output item. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute text + # The finalized reasoning summary text. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always 'response.reasoning_summary.done'. + # + # @return [Symbol, :"response.reasoning_summary.done"] + required :type, const: :"response.reasoning_summary.done" + + # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary.done") + # Emitted when the reasoning summary content is finalized for an item. + # + # @param item_id [String] The unique identifier of the item for which the reasoning summary is finalized. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param summary_index [Integer] The index of the summary part within the output item. + # + # @param text [String] The finalized reasoning summary text. + # + # @param type [Symbol, :"response.reasoning_summary.done"] The type of the event. Always 'response.reasoning_summary.done'. + end + end + end +end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index eb628952..20e16a85 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -4,13 +4,15 @@ module OpenAI module Models module Responses # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. module ResponseStatus extend OpenAI::Internal::Type::Enum COMPLETED = :completed FAILED = :failed IN_PROGRESS = :in_progress + CANCELLED = :cancelled + QUEUED = :queued INCOMPLETE = :incomplete # @!method self.values diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 48456245..08fbf6b8 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -138,8 +138,83 @@ module ResponseStreamEvent variant :"response.web_search_call.searching", -> { OpenAI::Responses::ResponseWebSearchCallSearchingEvent } + # Emitted when an image generation tool call has completed and the final image is available. + variant :"response.image_generation_call.completed", + -> { OpenAI::Responses::ResponseImageGenCallCompletedEvent } + + # Emitted when an image generation tool call is actively generating an image (intermediate state). + variant :"response.image_generation_call.generating", + -> { OpenAI::Responses::ResponseImageGenCallGeneratingEvent } + + # Emitted when an image generation tool call is in progress. + variant :"response.image_generation_call.in_progress", + -> { OpenAI::Responses::ResponseImageGenCallInProgressEvent } + + # Emitted when a partial image is available during image generation streaming. + variant :"response.image_generation_call.partial_image", + -> { OpenAI::Responses::ResponseImageGenCallPartialImageEvent } + + # Emitted when there is a delta (partial update) to the arguments of an MCP tool call. + variant :"response.mcp_call.arguments_delta", + -> { + OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent + } + + # Emitted when the arguments for an MCP tool call are finalized. + variant :"response.mcp_call.arguments_done", + -> { + OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent + } + + # Emitted when an MCP tool call has completed successfully. + variant :"response.mcp_call.completed", -> { OpenAI::Responses::ResponseMcpCallCompletedEvent } + + # Emitted when an MCP tool call has failed. + variant :"response.mcp_call.failed", -> { OpenAI::Responses::ResponseMcpCallFailedEvent } + + # Emitted when an MCP tool call is in progress. + variant :"response.mcp_call.in_progress", -> { OpenAI::Responses::ResponseMcpCallInProgressEvent } + + # Emitted when the list of available MCP tools has been successfully retrieved. + variant :"response.mcp_list_tools.completed", + -> { + OpenAI::Responses::ResponseMcpListToolsCompletedEvent + } + + # Emitted when the attempt to list available MCP tools has failed. + variant :"response.mcp_list_tools.failed", -> { OpenAI::Responses::ResponseMcpListToolsFailedEvent } + + # Emitted when the system is in the process of retrieving the list of available MCP tools. + variant :"response.mcp_list_tools.in_progress", + -> { OpenAI::Responses::ResponseMcpListToolsInProgressEvent } + + # Emitted when an annotation is added to output text content. + variant :"response.output_text_annotation.added", + -> { OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent } + + # Emitted when a response is queued and waiting to be processed. + variant :"response.queued", -> { OpenAI::Responses::ResponseQueuedEvent } + + # Emitted when there is a delta (partial update) to the reasoning content. + variant :"response.reasoning.delta", -> { OpenAI::Responses::ResponseReasoningDeltaEvent } + + # Emitted when the reasoning content is finalized for an item. + variant :"response.reasoning.done", -> { OpenAI::Responses::ResponseReasoningDoneEvent } + + # Emitted when there is a delta (partial update) to the reasoning summary content. + variant :"response.reasoning_summary.delta", + -> { + OpenAI::Responses::ResponseReasoningSummaryDeltaEvent + } + + # Emitted when the reasoning summary content is finalized for an item. + variant :"response.reasoning_summary.done", + -> { + OpenAI::Responses::ResponseReasoningSummaryDoneEvent + } + # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent)] + # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Responses::ResponseMcpCallCompletedEvent, OpenAI::Responses::ResponseMcpCallFailedEvent, OpenAI::Responses::ResponseMcpCallInProgressEvent, OpenAI::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Responses::ResponseQueuedEvent, OpenAI::Responses::ResponseReasoningDeltaEvent, OpenAI::Responses::ResponseReasoningDoneEvent, OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryDoneEvent)] end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 97939459..7e7cb2a2 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -9,20 +9,508 @@ module Tool discriminator :type - # A tool that searches for relevant content from uploaded files. Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - variant :file_search, -> { OpenAI::Responses::FileSearchTool } - # Defines a function in your own code the model can choose to call. Learn more about [function calling](https://platform.openai.com/docs/guides/function-calling). variant :function, -> { OpenAI::Responses::FunctionTool } + # A tool that searches for relevant content from uploaded files. Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + variant :file_search, -> { OpenAI::Responses::FileSearchTool } + # A tool that controls a virtual computer. Learn more about the [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). variant :computer_use_preview, -> { OpenAI::Responses::ComputerTool } + # Give the model access to additional tools via remote Model Context Protocol + # (MCP) servers. [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). + variant :mcp, -> { OpenAI::Responses::Tool::Mcp } + + # A tool that runs Python code to help generate a response to a prompt. + variant :code_interpreter, -> { OpenAI::Responses::Tool::CodeInterpreter } + + # A tool that generates images using a model like `gpt-image-1`. + variant :image_generation, -> { OpenAI::Responses::Tool::ImageGeneration } + + # A tool that allows the model to execute shell commands in a local environment. + variant :local_shell, -> { OpenAI::Responses::Tool::LocalShell } + # This tool searches the web for relevant results to use in a response. Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Responses::WebSearchTool } + class Mcp < OpenAI::Internal::Type::BaseModel + # @!attribute server_label + # A label for this MCP server, used to identify it in tool calls. + # + # @return [String] + required :server_label, String + + # @!attribute server_url + # The URL for the MCP server. + # + # @return [String] + required :server_url, String + + # @!attribute type + # The type of the MCP tool. Always `mcp`. + # + # @return [Symbol, :mcp] + required :type, const: :mcp + + # @!attribute allowed_tools + # List of allowed tool names or a filter object. + # + # @return [Array, OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] + optional :allowed_tools, union: -> { OpenAI::Responses::Tool::Mcp::AllowedTools }, nil?: true + + # @!attribute headers + # Optional HTTP headers to send to the MCP server. Use for authentication or other + # purposes. + # + # @return [Hash{Symbol=>String}, nil] + optional :headers, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute require_approval + # Specify which of the MCP server's tools require approval. + # + # @return [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] + optional :require_approval, union: -> { OpenAI::Responses::Tool::Mcp::RequireApproval }, nil?: true + + # @!method initialize(server_label:, server_url:, allowed_tools: nil, headers: nil, require_approval: nil, type: :mcp) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::Tool::Mcp} for more details. + # + # Give the model access to additional tools via remote Model Context Protocol + # (MCP) servers. + # [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). + # + # @param server_label [String] A label for this MCP server, used to identify it in tool calls. + # + # @param server_url [String] The URL for the MCP server. + # + # @param allowed_tools [Array, OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] List of allowed tool names or a filter object. + # + # @param headers [Hash{Symbol=>String}, nil] Optional HTTP headers to send to the MCP server. Use for authentication + # + # @param require_approval [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] Specify which of the MCP server's tools require approval. + # + # @param type [Symbol, :mcp] The type of the MCP tool. Always `mcp`. + + # List of allowed tool names or a filter object. + # + # @see OpenAI::Responses::Tool::Mcp#allowed_tools + module AllowedTools + extend OpenAI::Internal::Type::Union + + # A string array of allowed tool names + variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::StringArray } + + # A filter object to specify which tools are allowed. + variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter } + + class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + # @!attribute tool_names + # List of allowed tool names. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(tool_names: nil) + # A filter object to specify which tools are allowed. + # + # @param tool_names [Array] List of allowed tool names. + end + + # @!method self.variants + # @return [Array(Array, OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter)] + + # @type [OpenAI::Internal::Type::Converter] + StringArray = OpenAI::Internal::Type::ArrayOf[String] + end + + # Specify which of the MCP server's tools require approval. + # + # @see OpenAI::Responses::Tool::Mcp#require_approval + module RequireApproval + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter } + + # Specify a single approval policy for all tools. One of `always` or + # `never`. When set to `always`, all tools will require approval. When + # set to `never`, all tools will not require approval. + variant enum: -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting } + + class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel + # @!attribute always + # A list of tools that always require approval. + # + # @return [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, nil] + optional :always, + -> { + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + } + + # @!attribute never + # A list of tools that never require approval. + # + # @return [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, nil] + optional :never, + -> { + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + } + + # @!attribute tool_names + # List of allowed tool names. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(always: nil, never: nil, tool_names: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter} for more + # details. + # + # @param always [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A list of tools that always require approval. + # + # @param never [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A list of tools that never require approval. + # + # @param tool_names [Array] List of allowed tool names. + + # @see OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#always + class Always < OpenAI::Internal::Type::BaseModel + # @!attribute tool_names + # List of tools that require approval. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(tool_names: nil) + # A list of tools that always require approval. + # + # @param tool_names [Array] List of tools that require approval. + end + + # @see OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#never + class Never < OpenAI::Internal::Type::BaseModel + # @!attribute tool_names + # List of tools that do not require approval. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(tool_names: nil) + # A list of tools that never require approval. + # + # @param tool_names [Array] List of tools that do not require approval. + end + end + + # Specify a single approval policy for all tools. One of `always` or `never`. When + # set to `always`, all tools will require approval. When set to `never`, all tools + # will not require approval. + module McpToolApprovalSetting + extend OpenAI::Internal::Type::Enum + + ALWAYS = :always + NEVER = :never + + # @!method self.values + # @return [Array] + end + + # @!method self.variants + # @return [Array(OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting)] + end + end + + class CodeInterpreter < OpenAI::Internal::Type::BaseModel + # @!attribute container + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + # + # @return [String, OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] + required :container, union: -> { OpenAI::Responses::Tool::CodeInterpreter::Container } + + # @!attribute type + # The type of the code interpreter tool. Always `code_interpreter`. + # + # @return [Symbol, :code_interpreter] + required :type, const: :code_interpreter + + # @!method initialize(container:, type: :code_interpreter) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::Tool::CodeInterpreter} for more details. + # + # A tool that runs Python code to help generate a response to a prompt. + # + # @param container [String, OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] The code interpreter container. Can be a container ID or an object that + # + # @param type [Symbol, :code_interpreter] The type of the code interpreter tool. Always `code_interpreter`. + + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + # + # @see OpenAI::Responses::Tool::CodeInterpreter#container + module Container + extend OpenAI::Internal::Type::Union + + # The container ID. + variant String + + # Configuration for a code interpreter container. Optionally specify the IDs + # of the files to run the code on. + variant -> { OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto } + + class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel + # @!attribute type + # Always `auto`. + # + # @return [Symbol, :auto] + required :type, const: :auto + + # @!attribute file_ids + # An optional list of uploaded files to make available to your code. + # + # @return [Array, nil] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(file_ids: nil, type: :auto) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto} + # for more details. + # + # Configuration for a code interpreter container. Optionally specify the IDs of + # the files to run the code on. + # + # @param file_ids [Array] An optional list of uploaded files to make available to your code. + # + # @param type [Symbol, :auto] Always `auto`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto)] + end + end + + class ImageGeneration < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the image generation tool. Always `image_generation`. + # + # @return [Symbol, :image_generation] + required :type, const: :image_generation + + # @!attribute background + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + # + # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Background, nil] + optional :background, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Background } + + # @!attribute input_image_mask + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + # + # @return [OpenAI::Responses::Tool::ImageGeneration::InputImageMask, nil] + optional :input_image_mask, -> { OpenAI::Responses::Tool::ImageGeneration::InputImageMask } + + # @!attribute model + # The image generation model to use. Default: `gpt-image-1`. + # + # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Model, nil] + optional :model, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Model } + + # @!attribute moderation + # Moderation level for the generated image. Default: `auto`. + # + # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Moderation, nil] + optional :moderation, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Moderation } + + # @!attribute output_compression + # Compression level for the output image. Default: 100. + # + # @return [Integer, nil] + optional :output_compression, Integer + + # @!attribute output_format + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + # + # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::OutputFormat, nil] + optional :output_format, enum: -> { OpenAI::Responses::Tool::ImageGeneration::OutputFormat } + + # @!attribute partial_images + # Number of partial images to generate in streaming mode, from 0 (default value) + # to 3. + # + # @return [Integer, nil] + optional :partial_images, Integer + + # @!attribute quality + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + # + # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Quality, nil] + optional :quality, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Quality } + + # @!attribute size + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + # + # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Size, nil] + optional :size, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Size } + + # @!method initialize(background: nil, input_image_mask: nil, model: nil, moderation: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, size: nil, type: :image_generation) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::Tool::ImageGeneration} for more details. + # + # A tool that generates images using a model like `gpt-image-1`. + # + # @param background [Symbol, OpenAI::Responses::Tool::ImageGeneration::Background] Background type for the generated image. One of `transparent`, + # + # @param input_image_mask [OpenAI::Responses::Tool::ImageGeneration::InputImageMask] Optional mask for inpainting. Contains `image_url` + # + # @param model [Symbol, OpenAI::Responses::Tool::ImageGeneration::Model] The image generation model to use. Default: `gpt-image-1`. + # + # @param moderation [Symbol, OpenAI::Responses::Tool::ImageGeneration::Moderation] Moderation level for the generated image. Default: `auto`. + # + # @param output_compression [Integer] Compression level for the output image. Default: 100. + # + # @param output_format [Symbol, OpenAI::Responses::Tool::ImageGeneration::OutputFormat] The output format of the generated image. One of `png`, `webp`, or + # + # @param partial_images [Integer] Number of partial images to generate in streaming mode, from 0 (default value) t + # + # @param quality [Symbol, OpenAI::Responses::Tool::ImageGeneration::Quality] The quality of the generated image. One of `low`, `medium`, `high`, + # + # @param size [Symbol, OpenAI::Responses::Tool::ImageGeneration::Size] The size of the generated image. One of `1024x1024`, `1024x1536`, + # + # @param type [Symbol, :image_generation] The type of the image generation tool. Always `image_generation`. + + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + # + # @see OpenAI::Responses::Tool::ImageGeneration#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Responses::Tool::ImageGeneration#input_image_mask + class InputImageMask < OpenAI::Internal::Type::BaseModel + # @!attribute file_id + # File ID for the mask image. + # + # @return [String, nil] + optional :file_id, String + + # @!attribute image_url + # Base64-encoded mask image. + # + # @return [String, nil] + optional :image_url, String + + # @!method initialize(file_id: nil, image_url: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Responses::Tool::ImageGeneration::InputImageMask} for more details. + # + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + # + # @param file_id [String] File ID for the mask image. + # + # @param image_url [String] Base64-encoded mask image. + end + + # The image generation model to use. Default: `gpt-image-1`. + # + # @see OpenAI::Responses::Tool::ImageGeneration#model + module Model + extend OpenAI::Internal::Type::Enum + + GPT_IMAGE_1 = :"gpt-image-1" + + # @!method self.values + # @return [Array] + end + + # Moderation level for the generated image. Default: `auto`. + # + # @see OpenAI::Responses::Tool::ImageGeneration#moderation + module Moderation + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LOW = :low + + # @!method self.values + # @return [Array] + end + + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + # + # @see OpenAI::Responses::Tool::ImageGeneration#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + # + # @see OpenAI::Responses::Tool::ImageGeneration#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + # + # @see OpenAI::Responses::Tool::ImageGeneration#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + AUTO = :auto + + # @!method self.values + # @return [Array] + end + end + + class LocalShell < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the local shell tool. Always `local_shell`. + # + # @return [Symbol, :local_shell] + required :type, const: :local_shell + + # @!method initialize(type: :local_shell) + # A tool that allows the model to execute shell commands in a local environment. + # + # @param type [Symbol, :local_shell] The type of the local shell tool. Always `local_shell`. + end + # @!method self.variants - # @return [Array(OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::WebSearchTool)] + # @return [Array(OpenAI::Responses::FunctionTool, OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::Tool::Mcp, OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 504daa0a..bcac7f83 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -13,6 +13,9 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` # # @return [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] required :type, enum: -> { OpenAI::Responses::ToolChoiceTypes::Type } @@ -34,6 +37,9 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` # # @see OpenAI::Responses::ToolChoiceTypes#type module Type @@ -43,6 +49,9 @@ module Type WEB_SEARCH_PREVIEW = :web_search_preview COMPUTER_USE_PREVIEW = :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 + IMAGE_GENERATION = :image_generation + CODE_INTERPRETER = :code_interpreter + MCP = :mcp # @!method self.values # @return [Array] diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index f3c18eea..315deae0 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,12 +23,14 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # + # @param background [Boolean, nil] Whether to run the model response in the background. + # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context @@ -53,7 +55,7 @@ class Responses # # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # @@ -98,12 +100,14 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # + # @param background [Boolean, nil] Whether to run the model response in the background. + # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context @@ -128,7 +132,7 @@ def create(params) # # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # @@ -138,7 +142,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 3cec0416..36681e73 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -25,7 +25,7 @@ class InputItems # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Responses::InputItemListParams def list(response_id, params = {}) diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index b6b6c611..89969f4a 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -126,6 +126,11 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :top_p + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :background + # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). @@ -173,7 +178,7 @@ module OpenAI attr_accessor :service_tier # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. sig do returns(T.nilable(OpenAI::Responses::ResponseStatus::TaggedSymbol)) end @@ -247,7 +252,13 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ) ], parallel_tool_calls: T::Boolean, @@ -261,13 +272,18 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], top_p: T.nilable(Float), + background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Reasoning::OrHash), @@ -351,6 +367,9 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). @@ -383,7 +402,7 @@ module OpenAI # utilized. service_tier: nil, # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. status: nil, # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -429,6 +448,7 @@ module OpenAI tool_choice: OpenAI::Responses::Response::ToolChoice::Variants, tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: T.nilable(Float), + background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Reasoning), diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index d0bbe3ac..95089eaa 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -24,7 +24,10 @@ module OpenAI sig do returns( T::Array[ - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Variants + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) ] ) end @@ -33,7 +36,7 @@ module OpenAI # The status of the code interpreter tool call. sig do returns( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol ) end attr_accessor :status @@ -42,6 +45,13 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # The ID of the container used to run the code. + sig { returns(T.nilable(String)) } + attr_reader :container_id + + sig { params(container_id: String).void } + attr_writer :container_id + # A tool call to run code. sig do params( @@ -56,6 +66,7 @@ module OpenAI ], status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, + container_id: String, type: Symbol ).returns(T.attached_class) end @@ -68,6 +79,8 @@ module OpenAI results:, # The status of the code interpreter tool call. status:, + # The ID of the container used to run the code. + container_id: nil, # The type of the code interpreter tool call. Always `code_interpreter_call`. type: :code_interpreter_call ) @@ -80,18 +93,22 @@ module OpenAI code: String, results: T::Array[ - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Variants + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files + ) ], status: - OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, - type: Symbol + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, + type: Symbol, + container_id: String } ) end def to_hash end - # The output of a code interpreter tool call that is text. + # The output of a code interpreter tool. module Result extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 87695615..8ca7d2bb 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -45,6 +45,11 @@ module OpenAI end attr_accessor :model + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :background + # Specify additional output data to include in the model response. Currently # supported values are: # @@ -204,9 +209,13 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool ) ] @@ -220,9 +229,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -272,6 +285,7 @@ module OpenAI OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -298,9 +312,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -330,6 +348,9 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. model:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -455,6 +476,7 @@ module OpenAI OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -481,9 +503,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool ) ], diff --git a/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi b/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi new file mode 100644 index 00000000..b6891594 --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi @@ -0,0 +1,54 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.image_generation_call.completed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an image generation tool call has completed and the final image is + # available. + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The type of the event. Always 'response.image_generation_call.completed'. + type: :"response.image_generation_call.completed" + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi b/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi new file mode 100644 index 00000000..66bad12d --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi @@ -0,0 +1,71 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallGeneratingEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.image_generation_call.generating'. + sig { returns(Symbol) } + attr_accessor :type + + # The sequence number of the image generation item being processed. + sig { returns(T.nilable(Integer)) } + attr_reader :sequence_number + + sig { params(sequence_number: Integer).void } + attr_writer :sequence_number + + # Emitted when an image generation tool call is actively generating an image + # (intermediate state). + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of the image generation item being processed. + sequence_number: nil, + # The type of the event. Always 'response.image_generation_call.generating'. + type: :"response.image_generation_call.generating" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + type: Symbol, + sequence_number: Integer + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi b/rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi new file mode 100644 index 00000000..7bf0d0e4 --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi @@ -0,0 +1,67 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of the image generation item being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.image_generation_call.in_progress'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an image generation tool call is in progress. + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of the image generation item being processed. + sequence_number:, + # The type of the event. Always 'response.image_generation_call.in_progress'. + type: :"response.image_generation_call.in_progress" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi b/rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi new file mode 100644 index 00000000..b5f9909a --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi @@ -0,0 +1,85 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallPartialImageEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # Base64-encoded partial image data, suitable for rendering as an image. + sig { returns(String) } + attr_accessor :partial_image_b64 + + # 0-based index for the partial image (backend is 1-based, but this is 0-based for + # the user). + sig { returns(Integer) } + attr_accessor :partial_image_index + + # The sequence number of the image generation item being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.image_generation_call.partial_image'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a partial image is available during image generation streaming. + sig do + params( + item_id: String, + output_index: Integer, + partial_image_b64: String, + partial_image_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # Base64-encoded partial image data, suitable for rendering as an image. + partial_image_b64:, + # 0-based index for the partial image (backend is 1-based, but this is 0-based for + # the user). + partial_image_index:, + # The sequence number of the image generation item being processed. + sequence_number:, + # The type of the event. Always 'response.image_generation_call.partial_image'. + type: :"response.image_generation_call.partial_image" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + partial_image_b64: String, + partial_image_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi index 2d59b644..bbafe9cb 100644 --- a/rbi/openai/models/responses/response_input_item.rbi +++ b/rbi/openai/models/responses/response_input_item.rbi @@ -24,6 +24,14 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Responses::ResponseInputItem::McpCall, OpenAI::Responses::ResponseInputItem::ItemReference ) end @@ -607,6 +615,791 @@ module OpenAI end end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::OrSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns( + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action + ) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::OrSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::OrSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the output of the local shell tool call. + sig { returns(String) } + attr_accessor :output + + # The type of the local shell tool call output. Always `local_shell_call_output`. + sig { returns(Symbol) } + attr_accessor :type + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::OrSymbol + ) + ) + end + attr_accessor :status + + # The output of a local shell tool call. + sig do + params( + id: String, + output: String, + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::OrSymbol + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell tool call generated by the model. + id:, + # A JSON string of the output of the local shell tool call. + output:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + status: nil, + # The type of the local shell tool call output. Always `local_shell_call_output`. + type: :local_shell_call_output + ) + end + + sig do + override.returns( + { + id: String, + output: String, + type: Symbol, + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::OrSymbol + ) + } + ) + end + def to_hash + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseInputItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseInputItem::McpListTools::Tool + ], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the approval request being answered. + sig { returns(String) } + attr_accessor :approval_request_id + + # Whether the request was approved. + sig { returns(T::Boolean) } + attr_accessor :approve + + # The type of the item. Always `mcp_approval_response`. + sig { returns(Symbol) } + attr_accessor :type + + # The unique ID of the approval response + sig { returns(T.nilable(String)) } + attr_accessor :id + + # Optional reason for the decision. + sig { returns(T.nilable(String)) } + attr_accessor :reason + + # A response to an MCP approval request. + sig do + params( + approval_request_id: String, + approve: T::Boolean, + id: T.nilable(String), + reason: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the approval request being answered. + approval_request_id:, + # Whether the request was approved. + approve:, + # The unique ID of the approval response + id: nil, + # Optional reason for the decision. + reason: nil, + # The type of the item. Always `mcp_approval_response`. + type: :mcp_approval_response + ) + end + + sig do + override.returns( + { + approval_request_id: String, + approve: T::Boolean, + type: Symbol, + id: T.nilable(String), + reason: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/responses/response_item.rbi b/rbi/openai/models/responses/response_item.rbi index 021afb6a..9566555a 100644 --- a/rbi/openai/models/responses/response_item.rbi +++ b/rbi/openai/models/responses/response_item.rbi @@ -17,10 +17,798 @@ module OpenAI OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem + OpenAI::Responses::ResponseFunctionToolCallOutputItem, + OpenAI::Responses::ResponseItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseItem::LocalShellCall, + OpenAI::Responses::ResponseItem::LocalShellCallOutput, + OpenAI::Responses::ResponseItem::McpListTools, + OpenAI::Responses::ResponseItem::McpApprovalRequest, + OpenAI::Responses::ResponseItem::McpApprovalResponse, + OpenAI::Responses::ResponseItem::McpCall ) end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns(OpenAI::Responses::ResponseItem::LocalShellCall::Action) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Responses::ResponseItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Responses::ResponseItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Responses::ResponseItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::LocalShellCallOutput, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the output of the local shell tool call. + sig { returns(String) } + attr_accessor :output + + # The type of the local shell tool call output. Always `local_shell_call_output`. + sig { returns(Symbol) } + attr_accessor :type + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + ) + end + attr_accessor :status + + # The output of a local shell tool call. + sig do + params( + id: String, + output: String, + status: + T.nilable( + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::OrSymbol + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell tool call generated by the model. + id:, + # A JSON string of the output of the local shell tool call. + output:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + status: nil, + # The type of the local shell tool call output. Always `local_shell_call_output`. + type: :local_shell_call_output + ) + end + + sig do + override.returns( + { + id: String, + output: String, + type: Symbol, + status: + T.nilable( + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + } + ) + end + def to_hash + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpApprovalResponse, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval response + sig { returns(String) } + attr_accessor :id + + # The ID of the approval request being answered. + sig { returns(String) } + attr_accessor :approval_request_id + + # Whether the request was approved. + sig { returns(T::Boolean) } + attr_accessor :approve + + # The type of the item. Always `mcp_approval_response`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional reason for the decision. + sig { returns(T.nilable(String)) } + attr_accessor :reason + + # A response to an MCP approval request. + sig do + params( + id: String, + approval_request_id: String, + approve: T::Boolean, + reason: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval response + id:, + # The ID of the approval request being answered. + approval_request_id:, + # Whether the request was approved. + approve:, + # Optional reason for the decision. + reason: nil, + # The type of the item. Always `mcp_approval_response`. + type: :mcp_approval_response + ) + end + + sig do + override.returns( + { + id: String, + approval_request_id: String, + approve: T::Boolean, + type: Symbol, + reason: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + sig do override.returns(T::Array[OpenAI::Responses::ResponseItem::Variants]) end diff --git a/rbi/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi index 4157b8bd..4e645d9f 100644 --- a/rbi/openai/models/responses/response_item_list.rbi +++ b/rbi/openai/models/responses/response_item_list.rbi @@ -47,7 +47,15 @@ module OpenAI OpenAI::Responses::ResponseComputerToolCallOutputItem::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseFunctionToolCallItem::OrHash, - OpenAI::Responses::ResponseFunctionToolCallOutputItem::OrHash + OpenAI::Responses::ResponseFunctionToolCallOutputItem::OrHash, + OpenAI::Responses::ResponseItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::OrHash, + OpenAI::Responses::ResponseItem::McpListTools::OrHash, + OpenAI::Responses::ResponseItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseItem::McpApprovalResponse::OrHash, + OpenAI::Responses::ResponseItem::McpCall::OrHash ) ], first_id: String, diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi new file mode 100644 index 00000000..543fa935 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi @@ -0,0 +1,68 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The partial update to the arguments for the MCP tool call. + sig { returns(T.anything) } + attr_accessor :delta + + # The unique identifier of the MCP tool call item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.mcp_call.arguments_delta'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when there is a delta (partial update) to the arguments of an MCP tool + # call. + sig do + params( + delta: T.anything, + item_id: String, + output_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The partial update to the arguments for the MCP tool call. + delta:, + # The unique identifier of the MCP tool call item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The type of the event. Always 'response.mcp_call.arguments_delta'. + type: :"response.mcp_call.arguments_delta" + ) + end + + sig do + override.returns( + { + delta: T.anything, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi new file mode 100644 index 00000000..30a6a9dc --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi @@ -0,0 +1,67 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The finalized arguments for the MCP tool call. + sig { returns(T.anything) } + attr_accessor :arguments + + # The unique identifier of the MCP tool call item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.mcp_call.arguments_done'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the arguments for an MCP tool call are finalized. + sig do + params( + arguments: T.anything, + item_id: String, + output_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The finalized arguments for the MCP tool call. + arguments:, + # The unique identifier of the MCP tool call item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The type of the event. Always 'response.mcp_call.arguments_done'. + type: :"response.mcp_call.arguments_done" + ) + end + + sig do + override.returns( + { + arguments: T.anything, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_completed_event.rbi b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi new file mode 100644 index 00000000..ca8572f4 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The type of the event. Always 'response.mcp_call.completed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an MCP tool call has completed successfully. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the event. Always 'response.mcp_call.completed'. + type: :"response.mcp_call.completed" + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_failed_event.rbi b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi new file mode 100644 index 00000000..58153d24 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallFailedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The type of the event. Always 'response.mcp_call.failed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an MCP tool call has failed. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the event. Always 'response.mcp_call.failed'. + type: :"response.mcp_call.failed" + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi new file mode 100644 index 00000000..dc3261b2 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi @@ -0,0 +1,53 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the MCP tool call item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.mcp_call.in_progress'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an MCP tool call is in progress. + sig do + params(item_id: String, output_index: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The unique identifier of the MCP tool call item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The type of the event. Always 'response.mcp_call.in_progress'. + type: :"response.mcp_call.in_progress" + ) + end + + sig do + override.returns( + { item_id: String, output_index: Integer, type: Symbol } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi new file mode 100644 index 00000000..5fdc05fd --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpListToolsCompletedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The type of the event. Always 'response.mcp_list_tools.completed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the list of available MCP tools has been successfully retrieved. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the event. Always 'response.mcp_list_tools.completed'. + type: :"response.mcp_list_tools.completed" + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi new file mode 100644 index 00000000..ab9f7270 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpListToolsFailedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The type of the event. Always 'response.mcp_list_tools.failed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the attempt to list available MCP tools has failed. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the event. Always 'response.mcp_list_tools.failed'. + type: :"response.mcp_list_tools.failed" + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi new file mode 100644 index 00000000..c96ab479 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi @@ -0,0 +1,34 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpListToolsInProgressEvent, + OpenAI::Internal::AnyHash + ) + end + + # The type of the event. Always 'response.mcp_list_tools.in_progress'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the system is in the process of retrieving the list of available + # MCP tools. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the event. Always 'response.mcp_list_tools.in_progress'. + type: :"response.mcp_list_tools.in_progress" + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_output_item.rbi b/rbi/openai/models/responses/response_output_item.rbi index 01dd560a..51542fc3 100644 --- a/rbi/openai/models/responses/response_output_item.rbi +++ b/rbi/openai/models/responses/response_output_item.rbi @@ -15,10 +15,623 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseOutputItem::LocalShellCall, + OpenAI::Responses::ResponseOutputItem::McpCall, + OpenAI::Responses::ResponseOutputItem::McpListTools, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest ) end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns( + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action + ) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool + ] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool + ], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[OpenAI::Responses::ResponseOutputItem::Variants] diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index b831c02f..d507d0a1 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -34,7 +34,13 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ), output_index: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index 37e58da7..8c21edf4 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -34,7 +34,13 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ), output_index: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi new file mode 100644 index 00000000..3cd7a5db --- /dev/null +++ b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The annotation object being added. (See annotation schema for details.) + sig { returns(T.anything) } + attr_accessor :annotation + + # The index of the annotation within the content part. + sig { returns(Integer) } + attr_accessor :annotation_index + + # The index of the content part within the output item. + sig { returns(Integer) } + attr_accessor :content_index + + # The unique identifier of the item to which the annotation is being added. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.output_text_annotation.added'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an annotation is added to output text content. + sig do + params( + annotation: T.anything, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The annotation object being added. (See annotation schema for details.) + annotation:, + # The index of the annotation within the content part. + annotation_index:, + # The index of the content part within the output item. + content_index:, + # The unique identifier of the item to which the annotation is being added. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The type of the event. Always 'response.output_text_annotation.added'. + type: :"response.output_text_annotation.added" + ) + end + + sig do + override.returns( + { + annotation: T.anything, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_queued_event.rbi b/rbi/openai/models/responses/response_queued_event.rbi new file mode 100644 index 00000000..066f73a9 --- /dev/null +++ b/rbi/openai/models/responses/response_queued_event.rbi @@ -0,0 +1,51 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseQueuedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The full response object that is queued. + sig { returns(OpenAI::Responses::Response) } + attr_reader :response + + sig { params(response: OpenAI::Responses::Response::OrHash).void } + attr_writer :response + + # The type of the event. Always 'response.queued'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a response is queued and waiting to be processed. + sig do + params( + response: OpenAI::Responses::Response::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The full response object that is queued. + response:, + # The type of the event. Always 'response.queued'. + type: :"response.queued" + ) + end + + sig do + override.returns( + { response: OpenAI::Responses::Response, type: Symbol } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_delta_event.rbi new file mode 100644 index 00000000..5de816d0 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_delta_event.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The index of the reasoning content part within the output item. + sig { returns(Integer) } + attr_accessor :content_index + + # The partial update to the reasoning content. + sig { returns(T.anything) } + attr_accessor :delta + + # The unique identifier of the item for which reasoning is being updated. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The type of the event. Always 'response.reasoning.delta'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when there is a delta (partial update) to the reasoning content. + sig do + params( + content_index: Integer, + delta: T.anything, + item_id: String, + output_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The index of the reasoning content part within the output item. + content_index:, + # The partial update to the reasoning content. + delta:, + # The unique identifier of the item for which reasoning is being updated. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The type of the event. Always 'response.reasoning.delta'. + type: :"response.reasoning.delta" + ) + end + + sig do + override.returns( + { + content_index: Integer, + delta: T.anything, + item_id: String, + output_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_done_event.rbi b/rbi/openai/models/responses/response_reasoning_done_event.rbi new file mode 100644 index 00000000..17d3272d --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_done_event.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The index of the reasoning content part within the output item. + sig { returns(Integer) } + attr_accessor :content_index + + # The unique identifier of the item for which reasoning is finalized. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The finalized reasoning text. + sig { returns(String) } + attr_accessor :text + + # The type of the event. Always 'response.reasoning.done'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the reasoning content is finalized for an item. + sig do + params( + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The index of the reasoning content part within the output item. + content_index:, + # The unique identifier of the item for which reasoning is finalized. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The finalized reasoning text. + text:, + # The type of the event. Always 'response.reasoning.done'. + type: :"response.reasoning.done" + ) + end + + sig do + override.returns( + { + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi new file mode 100644 index 00000000..9f7deac4 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi @@ -0,0 +1,77 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The partial update to the reasoning summary content. + sig { returns(T.anything) } + attr_accessor :delta + + # The unique identifier of the item for which the reasoning summary is being + # updated. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The index of the summary part within the output item. + sig { returns(Integer) } + attr_accessor :summary_index + + # The type of the event. Always 'response.reasoning_summary.delta'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when there is a delta (partial update) to the reasoning summary content. + sig do + params( + delta: T.anything, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The partial update to the reasoning summary content. + delta:, + # The unique identifier of the item for which the reasoning summary is being + # updated. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The index of the summary part within the output item. + summary_index:, + # The type of the event. Always 'response.reasoning_summary.delta'. + type: :"response.reasoning_summary.delta" + ) + end + + sig do + override.returns( + { + delta: T.anything, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi new file mode 100644 index 00000000..24bebaed --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the item for which the reasoning summary is finalized. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The index of the summary part within the output item. + sig { returns(Integer) } + attr_accessor :summary_index + + # The finalized reasoning summary text. + sig { returns(String) } + attr_accessor :text + + # The type of the event. Always 'response.reasoning_summary.done'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the reasoning summary content is finalized for an item. + sig do + params( + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the item for which the reasoning summary is finalized. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The index of the summary part within the output item. + summary_index:, + # The finalized reasoning summary text. + text:, + # The type of the event. Always 'response.reasoning_summary.done'. + type: :"response.reasoning_summary.done" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_status.rbi b/rbi/openai/models/responses/response_status.rbi index ed8a5a5d..5eb9802f 100644 --- a/rbi/openai/models/responses/response_status.rbi +++ b/rbi/openai/models/responses/response_status.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. module ResponseStatus extend OpenAI::Internal::Type::Enum @@ -17,6 +17,9 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Responses::ResponseStatus::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Responses::ResponseStatus::TaggedSymbol) + CANCELLED = + T.let(:cancelled, OpenAI::Responses::ResponseStatus::TaggedSymbol) + QUEUED = T.let(:queued, OpenAI::Responses::ResponseStatus::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Responses::ResponseStatus::TaggedSymbol) diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index de4fe138..a29df0c1 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -45,7 +45,25 @@ module OpenAI OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Responses::ResponseWebSearchCallSearchingEvent + OpenAI::Responses::ResponseWebSearchCallSearchingEvent, + OpenAI::Responses::ResponseImageGenCallCompletedEvent, + OpenAI::Responses::ResponseImageGenCallGeneratingEvent, + OpenAI::Responses::ResponseImageGenCallInProgressEvent, + OpenAI::Responses::ResponseImageGenCallPartialImageEvent, + OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent, + OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent, + OpenAI::Responses::ResponseMcpCallCompletedEvent, + OpenAI::Responses::ResponseMcpCallFailedEvent, + OpenAI::Responses::ResponseMcpCallInProgressEvent, + OpenAI::Responses::ResponseMcpListToolsCompletedEvent, + OpenAI::Responses::ResponseMcpListToolsFailedEvent, + OpenAI::Responses::ResponseMcpListToolsInProgressEvent, + OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, + OpenAI::Responses::ResponseQueuedEvent, + OpenAI::Responses::ResponseReasoningDeltaEvent, + OpenAI::Responses::ResponseReasoningDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, + OpenAI::Responses::ResponseReasoningSummaryDoneEvent ) end diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index f9eb832e..c46f1d66 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -10,13 +10,1061 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool ) end + class Mcp < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Responses::Tool::Mcp, OpenAI::Internal::AnyHash) + end + + # A label for this MCP server, used to identify it in tool calls. + sig { returns(String) } + attr_accessor :server_label + + # The URL for the MCP server. + sig { returns(String) } + attr_accessor :server_url + + # The type of the MCP tool. Always `mcp`. + sig { returns(Symbol) } + attr_accessor :type + + # List of allowed tool names or a filter object. + sig do + returns( + T.nilable( + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + ) + ) + ) + end + attr_accessor :allowed_tools + + # Optional HTTP headers to send to the MCP server. Use for authentication or other + # purposes. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :headers + + # Specify which of the MCP server's tools require approval. + sig do + returns( + T.nilable( + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol + ) + ) + ) + end + attr_accessor :require_approval + + # Give the model access to additional tools via remote Model Context Protocol + # (MCP) servers. + # [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). + sig do + params( + server_label: String, + server_url: String, + allowed_tools: + T.nilable( + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter::OrHash + ) + ), + headers: T.nilable(T::Hash[Symbol, String]), + require_approval: + T.nilable( + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::OrHash, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol + ) + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A label for this MCP server, used to identify it in tool calls. + server_label:, + # The URL for the MCP server. + server_url:, + # List of allowed tool names or a filter object. + allowed_tools: nil, + # Optional HTTP headers to send to the MCP server. Use for authentication or other + # purposes. + headers: nil, + # Specify which of the MCP server's tools require approval. + require_approval: nil, + # The type of the MCP tool. Always `mcp`. + type: :mcp + ) + end + + sig do + override.returns( + { + server_label: String, + server_url: String, + type: Symbol, + allowed_tools: + T.nilable( + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + ) + ), + headers: T.nilable(T::Hash[Symbol, String]), + require_approval: + T.nilable( + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol + ) + ) + } + ) + end + def to_hash + end + + # List of allowed tool names or a filter object. + module AllowedTools + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + ) + end + + class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, + OpenAI::Internal::AnyHash + ) + end + + # List of allowed tool names. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + # A filter object to specify which tools are allowed. + sig do + params(tool_names: T::Array[String]).returns(T.attached_class) + end + def self.new( + # List of allowed tool names. + tool_names: nil + ) + end + + sig { override.returns({ tool_names: T::Array[String] }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[OpenAI::Responses::Tool::Mcp::AllowedTools::Variants] + ) + end + def self.variants + end + + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) + end + + # Specify which of the MCP server's tools require approval. + module RequireApproval + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ) + end + + class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Internal::AnyHash + ) + end + + # A list of tools that always require approval. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + ) + ) + end + attr_reader :always + + sig do + params( + always: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always::OrHash + ).void + end + attr_writer :always + + # A list of tools that never require approval. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + ) + ) + end + attr_reader :never + + sig do + params( + never: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never::OrHash + ).void + end + attr_writer :never + + # List of allowed tool names. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + sig do + params( + always: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always::OrHash, + never: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never::OrHash, + tool_names: T::Array[String] + ).returns(T.attached_class) + end + def self.new( + # A list of tools that always require approval. + always: nil, + # A list of tools that never require approval. + never: nil, + # List of allowed tool names. + tool_names: nil + ) + end + + sig do + override.returns( + { + always: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + never: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, + tool_names: T::Array[String] + } + ) + end + def to_hash + end + + class Always < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + OpenAI::Internal::AnyHash + ) + end + + # List of tools that require approval. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + # A list of tools that always require approval. + sig do + params(tool_names: T::Array[String]).returns(T.attached_class) + end + def self.new( + # List of tools that require approval. + tool_names: nil + ) + end + + sig { override.returns({ tool_names: T::Array[String] }) } + def to_hash + end + end + + class Never < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, + OpenAI::Internal::AnyHash + ) + end + + # List of tools that do not require approval. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + # A list of tools that never require approval. + sig do + params(tool_names: T::Array[String]).returns(T.attached_class) + end + def self.new( + # List of tools that do not require approval. + tool_names: nil + ) + end + + sig { override.returns({ tool_names: T::Array[String] }) } + def to_hash + end + end + end + + # Specify a single approval policy for all tools. One of `always` or `never`. When + # set to `always`, all tools will require approval. When set to `never`, all tools + # will not require approval. + module McpToolApprovalSetting + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ALWAYS = + T.let( + :always, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ) + NEVER = + T.let( + :never, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ] + ) + end + def self.values + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::Mcp::RequireApproval::Variants + ] + ) + end + def self.variants + end + end + end + + class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end + + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + sig do + returns( + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + ) + ) + end + attr_accessor :container + + # The type of the code interpreter tool. Always `code_interpreter`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool that runs Python code to help generate a response to a prompt. + sig do + params( + container: + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto::OrHash + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + container:, + # The type of the code interpreter tool. Always `code_interpreter`. + type: :code_interpreter + ) + end + + sig do + override.returns( + { + container: + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + ), + type: Symbol + } + ) + end + def to_hash + end + + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + module Container + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + ) + end + + class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto, + OpenAI::Internal::AnyHash + ) + end + + # Always `auto`. + sig { returns(Symbol) } + attr_accessor :type + + # An optional list of uploaded files to make available to your code. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :file_ids + + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids + + # Configuration for a code interpreter container. Optionally specify the IDs of + # the files to run the code on. + sig do + params(file_ids: T::Array[String], type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # An optional list of uploaded files to make available to your code. + file_ids: nil, + # Always `auto`. + type: :auto + ) + end + + sig do + override.returns({ type: Symbol, file_ids: T::Array[String] }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::CodeInterpreter::Container::Variants + ] + ) + end + def self.variants + end + end + end + + class ImageGeneration < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Internal::AnyHash + ) + end + + # The type of the image generation tool. Always `image_generation`. + sig { returns(Symbol) } + attr_accessor :type + + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol + ) + ) + end + attr_reader :background + + sig do + params( + background: + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol + ).void + end + attr_writer :background + + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::InputImageMask + ) + ) + end + attr_reader :input_image_mask + + sig do + params( + input_image_mask: + OpenAI::Responses::Tool::ImageGeneration::InputImageMask::OrHash + ).void + end + attr_writer :input_image_mask + + # The image generation model to use. Default: `gpt-image-1`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol + ) + ) + end + attr_reader :model + + sig do + params( + model: OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol + ).void + end + attr_writer :model + + # Moderation level for the generated image. Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol + ) + ) + end + attr_reader :moderation + + sig do + params( + moderation: + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol + ).void + end + attr_writer :moderation + + # Compression level for the output image. Default: 100. + sig { returns(T.nilable(Integer)) } + attr_reader :output_compression + + sig { params(output_compression: Integer).void } + attr_writer :output_compression + + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol + ) + ) + end + attr_reader :output_format + + sig do + params( + output_format: + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol + ).void + end + attr_writer :output_format + + # Number of partial images to generate in streaming mode, from 0 (default value) + # to 3. + sig { returns(T.nilable(Integer)) } + attr_reader :partial_images + + sig { params(partial_images: Integer).void } + attr_writer :partial_images + + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol + ) + ) + end + attr_reader :quality + + sig do + params( + quality: + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol + ).void + end + attr_writer :quality + + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol + ) + ) + end + attr_reader :size + + sig do + params( + size: OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol + ).void + end + attr_writer :size + + # A tool that generates images using a model like `gpt-image-1`. + sig do + params( + background: + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol, + input_image_mask: + OpenAI::Responses::Tool::ImageGeneration::InputImageMask::OrHash, + model: OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol, + moderation: + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol, + output_compression: Integer, + output_format: + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol, + partial_images: Integer, + quality: + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol, + size: OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + background: nil, + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + input_image_mask: nil, + # The image generation model to use. Default: `gpt-image-1`. + model: nil, + # Moderation level for the generated image. Default: `auto`. + moderation: nil, + # Compression level for the output image. Default: 100. + output_compression: nil, + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + output_format: nil, + # Number of partial images to generate in streaming mode, from 0 (default value) + # to 3. + partial_images: nil, + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + quality: nil, + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + size: nil, + # The type of the image generation tool. Always `image_generation`. + type: :image_generation + ) + end + + sig do + override.returns( + { + type: Symbol, + background: + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol, + input_image_mask: + OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + model: + OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol, + moderation: + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol, + output_compression: Integer, + output_format: + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol, + partial_images: Integer, + quality: + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol, + size: OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol + } + ) + end + def to_hash + end + + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::Background + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let( + :transparent, + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ) + OPAQUE = + T.let( + :opaque, + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ] + ) + end + def self.values + end + end + + class InputImageMask < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + OpenAI::Internal::AnyHash + ) + end + + # File ID for the mask image. + sig { returns(T.nilable(String)) } + attr_reader :file_id + + sig { params(file_id: String).void } + attr_writer :file_id + + # Base64-encoded mask image. + sig { returns(T.nilable(String)) } + attr_reader :image_url + + sig { params(image_url: String).void } + attr_writer :image_url + + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + sig do + params(file_id: String, image_url: String).returns( + T.attached_class + ) + end + def self.new( + # File ID for the mask image. + file_id: nil, + # Base64-encoded mask image. + image_url: nil + ) + end + + sig { override.returns({ file_id: String, image_url: String }) } + def to_hash + end + end + + # The image generation model to use. Default: `gpt-image-1`. + module Model + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::ImageGeneration::Model) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + GPT_IMAGE_1 = + T.let( + :"gpt-image-1", + OpenAI::Responses::Tool::ImageGeneration::Model::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Model::TaggedSymbol + ] + ) + end + def self.values + end + end + + # Moderation level for the generated image. Default: `auto`. + module Moderation + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::Moderation + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Moderation::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Responses::Tool::ImageGeneration::Moderation::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Moderation::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = + T.let( + :png, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ) + WEBP = + T.let( + :webp, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ) + JPEG = + T.let( + :jpeg, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::ImageGeneration::Quality) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::ImageGeneration::Size) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + SIZE_1024X1536 = + T.let( + :"1024x1536", + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + SIZE_1536X1024 = + T.let( + :"1536x1024", + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShell < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::LocalShell, + OpenAI::Internal::AnyHash + ) + end + + # The type of the local shell tool. Always `local_shell`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool that allows the model to execute shell commands in a local environment. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the local shell tool. Always `local_shell`. + type: :local_shell + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + sig { override.returns(T::Array[OpenAI::Responses::Tool::Variants]) } def self.variants end diff --git a/rbi/openai/models/responses/tool_choice_types.rbi b/rbi/openai/models/responses/tool_choice_types.rbi index 3851436c..f66d4aac 100644 --- a/rbi/openai/models/responses/tool_choice_types.rbi +++ b/rbi/openai/models/responses/tool_choice_types.rbi @@ -17,6 +17,9 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` sig { returns(OpenAI::Responses::ToolChoiceTypes::Type::OrSymbol) } attr_accessor :type @@ -36,6 +39,9 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` type: ) end @@ -56,6 +62,9 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` module Type extend OpenAI::Internal::Type::Enum @@ -85,6 +94,18 @@ module OpenAI :web_search_preview_2025_03_11, OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol ) + IMAGE_GENERATION = + T.let( + :image_generation, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) + CODE_INTERPRETER = + T.let( + :code_interpreter, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) + MCP = + T.let(:mcp, OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol) sig do override.returns( diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 6b3aa247..e5f71338 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -28,6 +28,7 @@ module OpenAI OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -54,9 +55,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -87,6 +92,9 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. model:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -227,6 +235,7 @@ module OpenAI OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -253,9 +262,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -290,6 +303,9 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. model:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # Specify additional output data to include in the model response. Currently # supported values are: # diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index ea887796..4b767068 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -17,6 +17,7 @@ module OpenAI tool_choice: OpenAI::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, + background: bool?, max_output_tokens: Integer?, previous_response_id: String?, reasoning: OpenAI::Reasoning?, @@ -57,6 +58,8 @@ module OpenAI attr_accessor top_p: Float? + attr_accessor background: bool? + attr_accessor max_output_tokens: Integer? attr_accessor previous_response_id: String? @@ -103,6 +106,7 @@ module OpenAI tool_choice: OpenAI::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, + ?background: bool?, ?max_output_tokens: Integer?, ?previous_response_id: String?, ?reasoning: OpenAI::Reasoning?, diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index c892569f..e1a774bd 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -7,7 +7,8 @@ module OpenAI code: String, results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, - type: :code_interpreter_call + type: :code_interpreter_call, + container_id: String } class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel @@ -21,11 +22,16 @@ module OpenAI attr_accessor type: :code_interpreter_call + attr_reader container_id: String? + + def container_id=: (String) -> String + def initialize: ( id: String, code: String, results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, + ?container_id: String, ?type: :code_interpreter_call ) -> void diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index cf306e43..0587dc2e 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -5,6 +5,7 @@ module OpenAI { input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + background: bool?, include: ::Array[OpenAI::Models::Responses::response_includable]?, instructions: String?, max_output_tokens: Integer?, @@ -32,6 +33,8 @@ module OpenAI attr_accessor model: OpenAI::Models::responses_model + attr_accessor background: bool? + attr_accessor include: ::Array[OpenAI::Models::Responses::response_includable]? attr_accessor instructions: String? @@ -81,6 +84,7 @@ module OpenAI def initialize: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, diff --git a/sig/openai/models/responses/response_image_gen_call_completed_event.rbs b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs new file mode 100644 index 00000000..56a60bce --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_completed_event = + { + item_id: String, + output_index: Integer, + type: :"response.image_generation_call.completed" + } + + class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.image_generation_call.completed" + + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.image_generation_call.completed" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_image_gen_call_generating_event.rbs b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs new file mode 100644 index 00000000..20d111fd --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs @@ -0,0 +1,32 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_generating_event = + { + item_id: String, + output_index: Integer, + type: :"response.image_generation_call.generating", + sequence_number: Integer + } + + class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.image_generation_call.generating" + + attr_reader sequence_number: Integer? + + def sequence_number=: (Integer) -> Integer + + def initialize: ( + item_id: String, + output_index: Integer, + ?sequence_number: Integer, + ?type: :"response.image_generation_call.generating" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs b/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs new file mode 100644 index 00000000..a90abf26 --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs @@ -0,0 +1,30 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_in_progress_event = + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.in_progress" + } + + class ResponseImageGenCallInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.image_generation_call.in_progress" + + def initialize: ( + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.image_generation_call.in_progress" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs b/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs new file mode 100644 index 00000000..5eaed059 --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_partial_image_event = + { + item_id: String, + output_index: Integer, + :partial_image_b64 => String, + partial_image_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.partial_image" + } + + class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor partial_image_b64: String + + attr_accessor partial_image_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.image_generation_call.partial_image" + + def initialize: ( + item_id: String, + output_index: Integer, + partial_image_b64: String, + partial_image_index: Integer, + sequence_number: Integer, + ?type: :"response.image_generation_call.partial_image" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index f3cb1160..ab7d1946 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -12,6 +12,14 @@ module OpenAI | OpenAI::Responses::ResponseFunctionToolCall | OpenAI::Responses::ResponseInputItem::FunctionCallOutput | OpenAI::Responses::ResponseReasoningItem + | OpenAI::Responses::ResponseInputItem::ImageGenerationCall + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Responses::ResponseInputItem::LocalShellCall + | OpenAI::Responses::ResponseInputItem::LocalShellCallOutput + | OpenAI::Responses::ResponseInputItem::McpListTools + | OpenAI::Responses::ResponseInputItem::McpApprovalRequest + | OpenAI::Responses::ResponseInputItem::McpApprovalResponse + | OpenAI::Responses::ResponseInputItem::McpCall | OpenAI::Responses::ResponseInputItem::ItemReference module ResponseInputItem @@ -187,6 +195,304 @@ module OpenAI end end + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseInputItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Responses::ResponseInputItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseInputItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::LocalShellCall::status] + end + end + + type local_shell_call_output = + { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status? + } + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor output: String + + attr_accessor type: :local_shell_call_output + + attr_accessor status: OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status? + + def initialize: ( + id: String, + output: String, + ?status: OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status?, + ?type: :local_shell_call_output + ) -> void + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status] + end + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + end + + type mcp_approval_response = + { + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + id: String?, + reason: String? + } + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + attr_accessor approval_request_id: String + + attr_accessor approve: bool + + attr_accessor type: :mcp_approval_response + + attr_accessor id: String? + + attr_accessor reason: String? + + def initialize: ( + approval_request_id: String, + approve: bool, + ?id: String?, + ?reason: String?, + ?type: :mcp_approval_response + ) -> void + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + end + type item_reference = { id: String, diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index ab5eaa4c..441c732c 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -10,10 +10,316 @@ module OpenAI | OpenAI::Responses::ResponseFunctionWebSearch | OpenAI::Responses::ResponseFunctionToolCallItem | OpenAI::Responses::ResponseFunctionToolCallOutputItem + | OpenAI::Responses::ResponseItem::ImageGenerationCall + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Responses::ResponseItem::LocalShellCall + | OpenAI::Responses::ResponseItem::LocalShellCallOutput + | OpenAI::Responses::ResponseItem::McpListTools + | OpenAI::Responses::ResponseItem::McpApprovalRequest + | OpenAI::Responses::ResponseItem::McpApprovalResponse + | OpenAI::Responses::ResponseItem::McpCall module ResponseItem extend OpenAI::Internal::Type::Union + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Responses::ResponseItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Responses::ResponseItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Responses::ResponseItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Responses::ResponseItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Responses::ResponseItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Responses::ResponseItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Responses::ResponseItem::LocalShellCall::status] + end + end + + type local_shell_call_output = + { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::status? + } + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor output: String + + attr_accessor type: :local_shell_call_output + + attr_accessor status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::status? + + def initialize: ( + id: String, + output: String, + ?status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::status?, + ?type: :local_shell_call_output + ) -> void + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Responses::ResponseItem::LocalShellCallOutput::status] + end + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + end + + type mcp_approval_response = + { + id: String, + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + reason: String? + } + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor approval_request_id: String + + attr_accessor approve: bool + + attr_accessor type: :mcp_approval_response + + attr_accessor reason: String? + + def initialize: ( + id: String, + approval_request_id: String, + approve: bool, + ?reason: String?, + ?type: :mcp_approval_response + ) -> void + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_item] end end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs new file mode 100644 index 00000000..0fd30fe8 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs @@ -0,0 +1,30 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_arguments_delta_event = + { + delta: top, + item_id: String, + output_index: Integer, + type: :"response.mcp_call.arguments_delta" + } + + class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor delta: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.mcp_call.arguments_delta" + + def initialize: ( + delta: top, + item_id: String, + output_index: Integer, + ?type: :"response.mcp_call.arguments_delta" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs new file mode 100644 index 00000000..33550d67 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs @@ -0,0 +1,30 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_arguments_done_event = + { + arguments: top, + item_id: String, + output_index: Integer, + type: :"response.mcp_call.arguments_done" + } + + class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor arguments: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.mcp_call.arguments_done" + + def initialize: ( + arguments: top, + item_id: String, + output_index: Integer, + ?type: :"response.mcp_call.arguments_done" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_completed_event.rbs b/sig/openai/models/responses/response_mcp_call_completed_event.rbs new file mode 100644 index 00000000..37b0b9e4 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_completed_event.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_completed_event = + { type: :"response.mcp_call.completed" } + + class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor type: :"response.mcp_call.completed" + + def initialize: (?type: :"response.mcp_call.completed") -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_failed_event.rbs b/sig/openai/models/responses/response_mcp_call_failed_event.rbs new file mode 100644 index 00000000..39d6df8d --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_failed_event.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_failed_event = + { type: :"response.mcp_call.failed" } + + class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor type: :"response.mcp_call.failed" + + def initialize: (?type: :"response.mcp_call.failed") -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs new file mode 100644 index 00000000..3fe682e3 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_in_progress_event = + { + item_id: String, + output_index: Integer, + type: :"response.mcp_call.in_progress" + } + + class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.mcp_call.in_progress" + + def initialize: ( + item_id: String, + output_index: Integer, + ?type: :"response.mcp_call.in_progress" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs new file mode 100644 index 00000000..304d51b9 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module Responses + type response_mcp_list_tools_completed_event = + { type: :"response.mcp_list_tools.completed" } + + class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor type: :"response.mcp_list_tools.completed" + + def initialize: (?type: :"response.mcp_list_tools.completed") -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs new file mode 100644 index 00000000..8e4ea36a --- /dev/null +++ b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module Responses + type response_mcp_list_tools_failed_event = + { type: :"response.mcp_list_tools.failed" } + + class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor type: :"response.mcp_list_tools.failed" + + def initialize: (?type: :"response.mcp_list_tools.failed") -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs new file mode 100644 index 00000000..1b4fa40e --- /dev/null +++ b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs @@ -0,0 +1,14 @@ +module OpenAI + module Models + module Responses + type response_mcp_list_tools_in_progress_event = + { type: :"response.mcp_list_tools.in_progress" } + + class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor type: :"response.mcp_list_tools.in_progress" + + def initialize: (?type: :"response.mcp_list_tools.in_progress") -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index cee5b2c7..5007eaf4 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -8,10 +8,248 @@ module OpenAI | OpenAI::Responses::ResponseFunctionWebSearch | OpenAI::Responses::ResponseComputerToolCall | OpenAI::Responses::ResponseReasoningItem + | OpenAI::Responses::ResponseOutputItem::ImageGenerationCall + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Responses::ResponseOutputItem::LocalShellCall + | OpenAI::Responses::ResponseOutputItem::McpCall + | OpenAI::Responses::ResponseOutputItem::McpListTools + | OpenAI::Responses::ResponseOutputItem::McpApprovalRequest module ResponseOutputItem extend OpenAI::Internal::Type::Union + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseOutputItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Responses::ResponseOutputItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseOutputItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputItem::LocalShellCall::status] + end + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_output_item] end end diff --git a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs new file mode 100644 index 00000000..82004cef --- /dev/null +++ b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Models + module Responses + type response_output_text_annotation_added_event = + { + annotation: top, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + type: :"response.output_text_annotation.added" + } + + class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor annotation: top + + attr_accessor annotation_index: Integer + + attr_accessor content_index: Integer + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.output_text_annotation.added" + + def initialize: ( + annotation: top, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + ?type: :"response.output_text_annotation.added" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_queued_event.rbs b/sig/openai/models/responses/response_queued_event.rbs new file mode 100644 index 00000000..f1334abf --- /dev/null +++ b/sig/openai/models/responses/response_queued_event.rbs @@ -0,0 +1,19 @@ +module OpenAI + module Models + module Responses + type response_queued_event = + { response: OpenAI::Responses::Response, type: :"response.queued" } + + class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor response: OpenAI::Responses::Response + + attr_accessor type: :"response.queued" + + def initialize: ( + response: OpenAI::Responses::Response, + ?type: :"response.queued" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_delta_event.rbs b/sig/openai/models/responses/response_reasoning_delta_event.rbs new file mode 100644 index 00000000..00b8c199 --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_delta_event.rbs @@ -0,0 +1,34 @@ +module OpenAI + module Models + module Responses + type response_reasoning_delta_event = + { + content_index: Integer, + delta: top, + item_id: String, + output_index: Integer, + type: :"response.reasoning.delta" + } + + class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor content_index: Integer + + attr_accessor delta: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor type: :"response.reasoning.delta" + + def initialize: ( + content_index: Integer, + delta: top, + item_id: String, + output_index: Integer, + ?type: :"response.reasoning.delta" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_done_event.rbs b/sig/openai/models/responses/response_reasoning_done_event.rbs new file mode 100644 index 00000000..75577efa --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_done_event.rbs @@ -0,0 +1,34 @@ +module OpenAI + module Models + module Responses + type response_reasoning_done_event = + { + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + type: :"response.reasoning.done" + } + + class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor content_index: Integer + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor text: String + + attr_accessor type: :"response.reasoning.done" + + def initialize: ( + content_index: Integer, + item_id: String, + output_index: Integer, + text: String, + ?type: :"response.reasoning.done" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs new file mode 100644 index 00000000..c745f654 --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs @@ -0,0 +1,34 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_delta_event = + { + delta: top, + item_id: String, + output_index: Integer, + summary_index: Integer, + type: :"response.reasoning_summary.delta" + } + + class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor delta: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor summary_index: Integer + + attr_accessor type: :"response.reasoning_summary.delta" + + def initialize: ( + delta: top, + item_id: String, + output_index: Integer, + summary_index: Integer, + ?type: :"response.reasoning_summary.delta" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs new file mode 100644 index 00000000..bdde17bb --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs @@ -0,0 +1,34 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_done_event = + { + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary.done" + } + + class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor summary_index: Integer + + attr_accessor text: String + + attr_accessor type: :"response.reasoning_summary.done" + + def initialize: ( + item_id: String, + output_index: Integer, + summary_index: Integer, + text: String, + ?type: :"response.reasoning_summary.done" + ) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index d78fb3a2..e54cd85f 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -1,7 +1,8 @@ module OpenAI module Models module Responses - type response_status = :completed | :failed | :in_progress | :incomplete + type response_status = + :completed | :failed | :in_progress | :cancelled | :queued | :incomplete module ResponseStatus extend OpenAI::Internal::Type::Enum @@ -9,6 +10,8 @@ module OpenAI COMPLETED: :completed FAILED: :failed IN_PROGRESS: :in_progress + CANCELLED: :cancelled + QUEUED: :queued INCOMPLETE: :incomplete def self?.values: -> ::Array[OpenAI::Models::Responses::response_status] diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index b8f72810..a14a4f29 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -38,6 +38,24 @@ module OpenAI | OpenAI::Responses::ResponseWebSearchCallCompletedEvent | OpenAI::Responses::ResponseWebSearchCallInProgressEvent | OpenAI::Responses::ResponseWebSearchCallSearchingEvent + | OpenAI::Responses::ResponseImageGenCallCompletedEvent + | OpenAI::Responses::ResponseImageGenCallGeneratingEvent + | OpenAI::Responses::ResponseImageGenCallInProgressEvent + | OpenAI::Responses::ResponseImageGenCallPartialImageEvent + | OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent + | OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent + | OpenAI::Responses::ResponseMcpCallCompletedEvent + | OpenAI::Responses::ResponseMcpCallFailedEvent + | OpenAI::Responses::ResponseMcpCallInProgressEvent + | OpenAI::Responses::ResponseMcpListToolsCompletedEvent + | OpenAI::Responses::ResponseMcpListToolsFailedEvent + | OpenAI::Responses::ResponseMcpListToolsInProgressEvent + | OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent + | OpenAI::Responses::ResponseQueuedEvent + | OpenAI::Responses::ResponseReasoningDeltaEvent + | OpenAI::Responses::ResponseReasoningDoneEvent + | OpenAI::Responses::ResponseReasoningSummaryDeltaEvent + | OpenAI::Responses::ResponseReasoningSummaryDoneEvent module ResponseStreamEvent extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 81616de5..40a2e90b 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -2,14 +2,359 @@ module OpenAI module Models module Responses type tool = - OpenAI::Responses::FileSearchTool - | OpenAI::Responses::FunctionTool + OpenAI::Responses::FunctionTool + | OpenAI::Responses::FileSearchTool | OpenAI::Responses::ComputerTool + | OpenAI::Responses::Tool::Mcp + | OpenAI::Responses::Tool::CodeInterpreter + | OpenAI::Responses::Tool::ImageGeneration + | OpenAI::Responses::Tool::LocalShell | OpenAI::Responses::WebSearchTool module Tool extend OpenAI::Internal::Type::Union + type mcp = + { + server_label: String, + server_url: String, + type: :mcp, + allowed_tools: OpenAI::Responses::Tool::Mcp::allowed_tools?, + headers: ::Hash[Symbol, String]?, + require_approval: OpenAI::Responses::Tool::Mcp::require_approval? + } + + class Mcp < OpenAI::Internal::Type::BaseModel + attr_accessor server_label: String + + attr_accessor server_url: String + + attr_accessor type: :mcp + + attr_accessor allowed_tools: OpenAI::Responses::Tool::Mcp::allowed_tools? + + attr_accessor headers: ::Hash[Symbol, String]? + + attr_accessor require_approval: OpenAI::Responses::Tool::Mcp::require_approval? + + def initialize: ( + server_label: String, + server_url: String, + ?allowed_tools: OpenAI::Responses::Tool::Mcp::allowed_tools?, + ?headers: ::Hash[Symbol, String]?, + ?require_approval: OpenAI::Responses::Tool::Mcp::require_approval?, + ?type: :mcp + ) -> void + + type allowed_tools = + ::Array[String] + | OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + + module AllowedTools + extend OpenAI::Internal::Type::Union + + type mcp_allowed_tools_filter = { tool_names: ::Array[String] } + + class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: (?tool_names: ::Array[String]) -> void + end + + def self?.variants: -> ::Array[OpenAI::Responses::Tool::Mcp::allowed_tools] + + StringArray: OpenAI::Internal::Type::Converter + end + + type require_approval = + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter + | OpenAI::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting + + module RequireApproval + extend OpenAI::Internal::Type::Union + + type mcp_tool_approval_filter = + { + always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, + tool_names: ::Array[String] + } + + class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel + attr_reader always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always? + + def always=: ( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + ) -> OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + + attr_reader never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never? + + def never=: ( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + ) -> OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: ( + ?always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + ?never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, + ?tool_names: ::Array[String] + ) -> void + + type always = { tool_names: ::Array[String] } + + class Always < OpenAI::Internal::Type::BaseModel + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: (?tool_names: ::Array[String]) -> void + end + + type never = { tool_names: ::Array[String] } + + class Never < OpenAI::Internal::Type::BaseModel + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: (?tool_names: ::Array[String]) -> void + end + end + + type mcp_tool_approval_setting = :always | :never + + module McpToolApprovalSetting + extend OpenAI::Internal::Type::Enum + + ALWAYS: :always + NEVER: :never + + def self?.values: -> ::Array[OpenAI::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting] + end + + def self?.variants: -> ::Array[OpenAI::Responses::Tool::Mcp::require_approval] + end + end + + type code_interpreter = + { + container: OpenAI::Responses::Tool::CodeInterpreter::container, + type: :code_interpreter + } + + class CodeInterpreter < OpenAI::Internal::Type::BaseModel + attr_accessor container: OpenAI::Responses::Tool::CodeInterpreter::container + + attr_accessor type: :code_interpreter + + def initialize: ( + container: OpenAI::Responses::Tool::CodeInterpreter::container, + ?type: :code_interpreter + ) -> void + + type container = + String + | OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + + module Container + extend OpenAI::Internal::Type::Union + + type code_interpreter_tool_auto = + { type: :auto, file_ids: ::Array[String] } + + class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel + attr_accessor type: :auto + + attr_reader file_ids: ::Array[String]? + + def file_ids=: (::Array[String]) -> ::Array[String] + + def initialize: (?file_ids: ::Array[String], ?type: :auto) -> void + end + + def self?.variants: -> ::Array[OpenAI::Responses::Tool::CodeInterpreter::container] + end + end + + type image_generation = + { + type: :image_generation, + background: OpenAI::Responses::Tool::ImageGeneration::background, + input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + model: OpenAI::Responses::Tool::ImageGeneration::model, + moderation: OpenAI::Responses::Tool::ImageGeneration::moderation, + output_compression: Integer, + output_format: OpenAI::Responses::Tool::ImageGeneration::output_format, + partial_images: Integer, + quality: OpenAI::Responses::Tool::ImageGeneration::quality, + size: OpenAI::Responses::Tool::ImageGeneration::size + } + + class ImageGeneration < OpenAI::Internal::Type::BaseModel + attr_accessor type: :image_generation + + attr_reader background: OpenAI::Responses::Tool::ImageGeneration::background? + + def background=: ( + OpenAI::Responses::Tool::ImageGeneration::background + ) -> OpenAI::Responses::Tool::ImageGeneration::background + + attr_reader input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask? + + def input_image_mask=: ( + OpenAI::Responses::Tool::ImageGeneration::InputImageMask + ) -> OpenAI::Responses::Tool::ImageGeneration::InputImageMask + + attr_reader model: OpenAI::Responses::Tool::ImageGeneration::model? + + def model=: ( + OpenAI::Responses::Tool::ImageGeneration::model + ) -> OpenAI::Responses::Tool::ImageGeneration::model + + attr_reader moderation: OpenAI::Responses::Tool::ImageGeneration::moderation? + + def moderation=: ( + OpenAI::Responses::Tool::ImageGeneration::moderation + ) -> OpenAI::Responses::Tool::ImageGeneration::moderation + + attr_reader output_compression: Integer? + + def output_compression=: (Integer) -> Integer + + attr_reader output_format: OpenAI::Responses::Tool::ImageGeneration::output_format? + + def output_format=: ( + OpenAI::Responses::Tool::ImageGeneration::output_format + ) -> OpenAI::Responses::Tool::ImageGeneration::output_format + + attr_reader partial_images: Integer? + + def partial_images=: (Integer) -> Integer + + attr_reader quality: OpenAI::Responses::Tool::ImageGeneration::quality? + + def quality=: ( + OpenAI::Responses::Tool::ImageGeneration::quality + ) -> OpenAI::Responses::Tool::ImageGeneration::quality + + attr_reader size: OpenAI::Responses::Tool::ImageGeneration::size? + + def size=: ( + OpenAI::Responses::Tool::ImageGeneration::size + ) -> OpenAI::Responses::Tool::ImageGeneration::size + + def initialize: ( + ?background: OpenAI::Responses::Tool::ImageGeneration::background, + ?input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + ?model: OpenAI::Responses::Tool::ImageGeneration::model, + ?moderation: OpenAI::Responses::Tool::ImageGeneration::moderation, + ?output_compression: Integer, + ?output_format: OpenAI::Responses::Tool::ImageGeneration::output_format, + ?partial_images: Integer, + ?quality: OpenAI::Responses::Tool::ImageGeneration::quality, + ?size: OpenAI::Responses::Tool::ImageGeneration::size, + ?type: :image_generation + ) -> void + + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::background] + end + + type input_image_mask = { file_id: String, image_url: String } + + class InputImageMask < OpenAI::Internal::Type::BaseModel + attr_reader file_id: String? + + def file_id=: (String) -> String + + attr_reader image_url: String? + + def image_url=: (String) -> String + + def initialize: (?file_id: String, ?image_url: String) -> void + end + + type model = :"gpt-image-1" + + module Model + extend OpenAI::Internal::Type::Enum + + GPT_IMAGE_1: :"gpt-image-1" + + def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::model] + end + + type moderation = :auto | :low + + module Moderation + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LOW: :low + + def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::moderation] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::output_format] + end + + type quality = :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::size] + end + end + + type local_shell = { type: :local_shell } + + class LocalShell < OpenAI::Internal::Type::BaseModel + attr_accessor type: :local_shell + + def initialize: (?type: :local_shell) -> void + end + def self?.variants: -> ::Array[OpenAI::Models::Responses::tool] end end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 71458e63..658e5091 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -16,6 +16,9 @@ module OpenAI | :web_search_preview | :computer_use_preview | :web_search_preview_2025_03_11 + | :image_generation + | :code_interpreter + | :mcp module Type extend OpenAI::Internal::Type::Enum @@ -24,6 +27,9 @@ module OpenAI WEB_SEARCH_PREVIEW: :web_search_preview COMPUTER_USE_PREVIEW: :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 + IMAGE_GENERATION: :image_generation + CODE_INTERPRETER: :code_interpreter + MCP: :mcp def self?.values: -> ::Array[OpenAI::Responses::ToolChoiceTypes::type_] end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 6cbfd4aa..4a37f2c8 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -6,6 +6,7 @@ module OpenAI def create: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, @@ -28,6 +29,7 @@ module OpenAI def stream_raw: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 3c32f365..51476189 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -27,6 +27,14 @@ def test_list in OpenAI::Responses::ResponseFunctionWebSearch in OpenAI::Responses::ResponseFunctionToolCallItem in OpenAI::Responses::ResponseFunctionToolCallOutputItem + in OpenAI::Responses::ResponseItem::ImageGenerationCall + in OpenAI::Responses::ResponseCodeInterpreterToolCall + in OpenAI::Responses::ResponseItem::LocalShellCall + in OpenAI::Responses::ResponseItem::LocalShellCallOutput + in OpenAI::Responses::ResponseItem::McpListTools + in OpenAI::Responses::ResponseItem::McpApprovalRequest + in OpenAI::Responses::ResponseItem::McpApprovalResponse + in OpenAI::Responses::ResponseItem::McpCall end end @@ -77,6 +85,57 @@ def test_list output: String, status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status | nil } + in { + type: :image_generation_call, + id: String, + result: String | nil, + status: OpenAI::Responses::ResponseItem::ImageGenerationCall::Status + } + in { + type: :code_interpreter_call, + id: String, + code: String, + results: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result]), + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status, + container_id: String | nil + } + in { + type: :local_shell_call, + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseItem::LocalShellCall::Status + } + in { + type: :local_shell_call_output, + id: String, + output: String, + status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status | nil + } + in { + type: :mcp_list_tools, + id: String, + server_label: String, + tools: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseItem::McpListTools::Tool]), + error: String | nil + } + in {type: :mcp_approval_request, id: String, arguments: String, name: String, server_label: String} + in { + type: :mcp_approval_response, + id: String, + approval_request_id: String, + approve: OpenAI::Internal::Type::Boolean, + reason: String | nil + } + in { + type: :mcp_call, + id: String, + arguments: String, + name: String, + server_label: String, + error: String | nil, + output: String | nil + } end end end diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index a01200f4..04b15b54 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -26,6 +26,7 @@ def test_create_required_params tool_choice: OpenAI::Responses::Response::ToolChoice, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, + background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, reasoning: OpenAI::Reasoning | nil, @@ -62,6 +63,7 @@ def test_retrieve tool_choice: OpenAI::Responses::Response::ToolChoice, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, + background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, reasoning: OpenAI::Reasoning | nil, From d0d484e781dcfb85b7062c2ff77f614907275bad Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 18:37:41 +0000 Subject: [PATCH 203/295] feat(api): add container endpoint --- .stats.yml | 8 +- lib/openai.rb | 19 ++ lib/openai/client.rb | 4 + lib/openai/models.rb | 10 + lib/openai/models/container_create_params.rb | 75 +++++++ .../models/container_create_response.rb | 98 +++++++++ lib/openai/models/container_delete_params.rb | 14 ++ lib/openai/models/container_list_params.rb | 58 ++++++ lib/openai/models/container_list_response.rb | 98 +++++++++ .../models/container_retrieve_params.rb | 14 ++ .../models/container_retrieve_response.rb | 98 +++++++++ .../models/containers/file_create_params.rb | 35 ++++ .../models/containers/file_create_response.rb | 67 ++++++ .../models/containers/file_delete_params.rb | 22 ++ .../models/containers/file_list_params.rb | 60 ++++++ .../models/containers/file_list_response.rb | 67 ++++++ .../models/containers/file_retrieve_params.rb | 22 ++ .../containers/file_retrieve_response.rb | 67 ++++++ .../files/content_retrieve_params.rb | 24 +++ .../responses/response_audio_delta_event.rb | 10 +- .../responses/response_audio_done_event.rb | 10 +- .../response_audio_transcript_delta_event.rb | 10 +- .../response_audio_transcript_done_event.rb | 10 +- .../responses/response_cancel_params.rb | 16 ++ ..._code_interpreter_call_code_delta_event.rb | 10 +- ...e_code_interpreter_call_code_done_event.rb | 10 +- ...e_code_interpreter_call_completed_event.rb | 10 +- ...code_interpreter_call_in_progress_event.rb | 10 +- ...ode_interpreter_call_interpreting_event.rb | 10 +- .../responses/response_completed_event.rb | 10 +- .../response_content_part_added_event.rb | 10 +- .../response_content_part_done_event.rb | 10 +- .../responses/response_created_event.rb | 10 +- .../models/responses/response_error_event.rb | 10 +- .../models/responses/response_failed_event.rb | 10 +- ...sponse_file_search_call_completed_event.rb | 10 +- ...onse_file_search_call_in_progress_event.rb | 10 +- ...sponse_file_search_call_searching_event.rb | 10 +- ...nse_function_call_arguments_delta_event.rb | 10 +- ...onse_function_call_arguments_done_event.rb | 10 +- ...response_image_gen_call_completed_event.rb | 10 +- ...esponse_image_gen_call_generating_event.rb | 14 +- .../responses/response_in_progress_event.rb | 10 +- .../responses/response_incomplete_event.rb | 10 +- ...response_mcp_call_arguments_delta_event.rb | 10 +- .../response_mcp_call_arguments_done_event.rb | 10 +- .../response_mcp_call_completed_event.rb | 10 +- .../response_mcp_call_failed_event.rb | 10 +- .../response_mcp_call_in_progress_event.rb | 10 +- ...response_mcp_list_tools_completed_event.rb | 10 +- .../response_mcp_list_tools_failed_event.rb | 10 +- ...sponse_mcp_list_tools_in_progress_event.rb | 10 +- .../response_output_item_added_event.rb | 10 +- .../response_output_item_done_event.rb | 10 +- ...onse_output_text_annotation_added_event.rb | 10 +- .../models/responses/response_queued_event.rb | 10 +- .../response_reasoning_delta_event.rb | 10 +- .../response_reasoning_done_event.rb | 10 +- .../response_reasoning_summary_delta_event.rb | 10 +- .../response_reasoning_summary_done_event.rb | 10 +- ...onse_reasoning_summary_part_added_event.rb | 10 +- ...ponse_reasoning_summary_part_done_event.rb | 10 +- ...onse_reasoning_summary_text_delta_event.rb | 10 +- ...ponse_reasoning_summary_text_done_event.rb | 10 +- .../responses/response_refusal_delta_event.rb | 10 +- .../responses/response_refusal_done_event.rb | 10 +- .../response_text_annotation_delta_event.rb | 10 +- .../responses/response_text_delta_event.rb | 10 +- .../responses/response_text_done_event.rb | 10 +- lib/openai/resources/containers.rb | 112 ++++++++++ lib/openai/resources/containers/files.rb | 134 ++++++++++++ .../resources/containers/files/content.rb | 43 ++++ lib/openai/resources/responses.rb | 22 ++ rbi/openai/client.rbi | 3 + rbi/openai/models.rbi | 10 + rbi/openai/models/container_create_params.rbi | 145 +++++++++++++ .../models/container_create_response.rbi | 192 +++++++++++++++++ rbi/openai/models/container_delete_params.rbi | 27 +++ rbi/openai/models/container_list_params.rbi | 99 +++++++++ rbi/openai/models/container_list_response.rbi | 190 +++++++++++++++++ .../models/container_retrieve_params.rbi | 27 +++ .../models/container_retrieve_response.rbi | 193 ++++++++++++++++++ .../models/containers/file_create_params.rbi | 62 ++++++ .../containers/file_create_response.rbi | 90 ++++++++ .../models/containers/file_delete_params.rbi | 40 ++++ .../models/containers/file_list_params.rbi | 116 +++++++++++ .../models/containers/file_list_response.rbi | 90 ++++++++ .../containers/file_retrieve_params.rbi | 40 ++++ .../containers/file_retrieve_response.rbi | 90 ++++++++ .../files/content_retrieve_params.rbi | 42 ++++ .../responses/response_audio_delta_event.rbi | 18 +- .../responses/response_audio_done_event.rbi | 14 +- .../response_audio_transcript_delta_event.rbi | 18 +- .../response_audio_transcript_done_event.rbi | 14 +- .../responses/response_cancel_params.rbi | 32 +++ ...code_interpreter_call_code_delta_event.rbi | 22 +- ..._code_interpreter_call_code_done_event.rbi | 22 +- ..._code_interpreter_call_completed_event.rbi | 8 + ...ode_interpreter_call_in_progress_event.rbi | 8 + ...de_interpreter_call_interpreting_event.rbi | 8 + .../responses/response_completed_event.rbi | 13 +- .../response_content_part_added_event.rbi | 8 + .../response_content_part_done_event.rbi | 8 + .../responses/response_created_event.rbi | 13 +- .../models/responses/response_error_event.rbi | 8 + .../responses/response_failed_event.rbi | 13 +- ...ponse_file_search_call_completed_event.rbi | 22 +- ...nse_file_search_call_in_progress_event.rbi | 22 +- ...ponse_file_search_call_searching_event.rbi | 22 +- ...se_function_call_arguments_delta_event.rbi | 8 + ...nse_function_call_arguments_done_event.rbi | 8 + ...esponse_image_gen_call_completed_event.rbi | 22 +- ...sponse_image_gen_call_generating_event.rbi | 17 +- .../responses/response_in_progress_event.rbi | 13 +- .../responses/response_incomplete_event.rbi | 13 +- ...esponse_mcp_call_arguments_delta_event.rbi | 8 + ...response_mcp_call_arguments_done_event.rbi | 8 + .../response_mcp_call_completed_event.rbi | 14 +- .../response_mcp_call_failed_event.rbi | 14 +- .../response_mcp_call_in_progress_event.rbi | 22 +- ...esponse_mcp_list_tools_completed_event.rbi | 14 +- .../response_mcp_list_tools_failed_event.rbi | 14 +- ...ponse_mcp_list_tools_in_progress_event.rbi | 14 +- .../response_output_item_added_event.rbi | 8 + .../response_output_item_done_event.rbi | 8 + ...nse_output_text_annotation_added_event.rbi | 8 + .../responses/response_queued_event.rbi | 13 +- .../response_reasoning_delta_event.rbi | 8 + .../response_reasoning_done_event.rbi | 8 + ...response_reasoning_summary_delta_event.rbi | 8 + .../response_reasoning_summary_done_event.rbi | 8 + ...nse_reasoning_summary_part_added_event.rbi | 8 + ...onse_reasoning_summary_part_done_event.rbi | 8 + ...nse_reasoning_summary_text_delta_event.rbi | 8 + ...onse_reasoning_summary_text_done_event.rbi | 8 + .../response_refusal_delta_event.rbi | 8 + .../responses/response_refusal_done_event.rbi | 8 + .../response_text_annotation_delta_event.rbi | 8 + .../responses/response_text_delta_event.rbi | 8 + .../responses/response_text_done_event.rbi | 8 + rbi/openai/resources/containers.rbi | 82 ++++++++ rbi/openai/resources/containers/files.rbi | 92 +++++++++ .../resources/containers/files/content.rbi | 27 +++ rbi/openai/resources/responses.rbi | 16 ++ sig/openai/client.rbs | 2 + sig/openai/models.rbs | 10 + sig/openai/models/container_create_params.rbs | 62 ++++++ .../models/container_create_response.rbs | 73 +++++++ sig/openai/models/container_delete_params.rbs | 13 ++ sig/openai/models/container_list_params.rbs | 48 +++++ sig/openai/models/container_list_response.rbs | 73 +++++++ .../models/container_retrieve_params.rbs | 13 ++ .../models/container_retrieve_response.rbs | 73 +++++++ .../models/containers/file_create_params.rbs | 30 +++ .../containers/file_create_response.rbs | 42 ++++ .../models/containers/file_delete_params.rbs | 20 ++ .../models/containers/file_list_params.rbs | 50 +++++ .../models/containers/file_list_response.rbs | 42 ++++ .../containers/file_retrieve_params.rbs | 20 ++ .../containers/file_retrieve_response.rbs | 42 ++++ .../files/content_retrieve_params.rbs | 22 ++ .../responses/response_audio_delta_event.rbs | 14 +- .../responses/response_audio_done_event.rbs | 10 +- .../response_audio_transcript_delta_event.rbs | 9 +- .../response_audio_transcript_done_event.rbs | 9 +- .../responses/response_cancel_params.rbs | 15 ++ ...code_interpreter_call_code_delta_event.rbs | 4 + ..._code_interpreter_call_code_done_event.rbs | 4 + ..._code_interpreter_call_completed_event.rbs | 4 + ...ode_interpreter_call_in_progress_event.rbs | 4 + ...de_interpreter_call_interpreting_event.rbs | 4 + .../responses/response_completed_event.rbs | 9 +- .../response_content_part_added_event.rbs | 4 + .../response_content_part_done_event.rbs | 4 + .../responses/response_created_event.rbs | 9 +- .../models/responses/response_error_event.rbs | 11 +- .../responses/response_failed_event.rbs | 9 +- ...ponse_file_search_call_completed_event.rbs | 4 + ...nse_file_search_call_in_progress_event.rbs | 4 + ...ponse_file_search_call_searching_event.rbs | 4 + ...se_function_call_arguments_delta_event.rbs | 4 + ...nse_function_call_arguments_done_event.rbs | 4 + ...esponse_image_gen_call_completed_event.rbs | 4 + ...sponse_image_gen_call_generating_event.rbs | 12 +- .../responses/response_in_progress_event.rbs | 9 +- .../responses/response_incomplete_event.rbs | 9 +- ...esponse_mcp_call_arguments_delta_event.rbs | 4 + ...response_mcp_call_arguments_done_event.rbs | 4 + .../response_mcp_call_completed_event.rbs | 9 +- .../response_mcp_call_failed_event.rbs | 9 +- .../response_mcp_call_in_progress_event.rbs | 4 + ...esponse_mcp_list_tools_completed_event.rbs | 9 +- .../response_mcp_list_tools_failed_event.rbs | 9 +- ...ponse_mcp_list_tools_in_progress_event.rbs | 12 +- .../response_output_item_added_event.rbs | 4 + .../response_output_item_done_event.rbs | 4 + ...nse_output_text_annotation_added_event.rbs | 4 + .../responses/response_queued_event.rbs | 9 +- .../response_reasoning_delta_event.rbs | 4 + .../response_reasoning_done_event.rbs | 4 + ...response_reasoning_summary_delta_event.rbs | 4 + .../response_reasoning_summary_done_event.rbs | 4 + ...nse_reasoning_summary_part_added_event.rbs | 4 + ...onse_reasoning_summary_part_done_event.rbs | 4 + ...nse_reasoning_summary_text_delta_event.rbs | 4 + ...onse_reasoning_summary_text_done_event.rbs | 4 + .../response_refusal_delta_event.rbs | 4 + .../responses/response_refusal_done_event.rbs | 4 + .../response_text_annotation_delta_event.rbs | 4 + .../responses/response_text_delta_event.rbs | 4 + .../responses/response_text_done_event.rbs | 4 + sig/openai/resources/containers.rbs | 33 +++ sig/openai/resources/containers/files.rbs | 38 ++++ .../resources/containers/files/content.rbs | 17 ++ sig/openai/resources/responses.rbs | 5 + test/openai/resource_namespaces.rb | 8 + .../containers/files/content_test.rb | 13 ++ .../openai/resources/containers/files_test.rb | 80 ++++++++ test/openai/resources/containers_test.rb | 77 +++++++ test/openai/resources/responses_test.rb | 8 + 220 files changed, 5049 insertions(+), 152 deletions(-) create mode 100644 lib/openai/models/container_create_params.rb create mode 100644 lib/openai/models/container_create_response.rb create mode 100644 lib/openai/models/container_delete_params.rb create mode 100644 lib/openai/models/container_list_params.rb create mode 100644 lib/openai/models/container_list_response.rb create mode 100644 lib/openai/models/container_retrieve_params.rb create mode 100644 lib/openai/models/container_retrieve_response.rb create mode 100644 lib/openai/models/containers/file_create_params.rb create mode 100644 lib/openai/models/containers/file_create_response.rb create mode 100644 lib/openai/models/containers/file_delete_params.rb create mode 100644 lib/openai/models/containers/file_list_params.rb create mode 100644 lib/openai/models/containers/file_list_response.rb create mode 100644 lib/openai/models/containers/file_retrieve_params.rb create mode 100644 lib/openai/models/containers/file_retrieve_response.rb create mode 100644 lib/openai/models/containers/files/content_retrieve_params.rb create mode 100644 lib/openai/models/responses/response_cancel_params.rb create mode 100644 lib/openai/resources/containers.rb create mode 100644 lib/openai/resources/containers/files.rb create mode 100644 lib/openai/resources/containers/files/content.rb create mode 100644 rbi/openai/models/container_create_params.rbi create mode 100644 rbi/openai/models/container_create_response.rbi create mode 100644 rbi/openai/models/container_delete_params.rbi create mode 100644 rbi/openai/models/container_list_params.rbi create mode 100644 rbi/openai/models/container_list_response.rbi create mode 100644 rbi/openai/models/container_retrieve_params.rbi create mode 100644 rbi/openai/models/container_retrieve_response.rbi create mode 100644 rbi/openai/models/containers/file_create_params.rbi create mode 100644 rbi/openai/models/containers/file_create_response.rbi create mode 100644 rbi/openai/models/containers/file_delete_params.rbi create mode 100644 rbi/openai/models/containers/file_list_params.rbi create mode 100644 rbi/openai/models/containers/file_list_response.rbi create mode 100644 rbi/openai/models/containers/file_retrieve_params.rbi create mode 100644 rbi/openai/models/containers/file_retrieve_response.rbi create mode 100644 rbi/openai/models/containers/files/content_retrieve_params.rbi create mode 100644 rbi/openai/models/responses/response_cancel_params.rbi create mode 100644 rbi/openai/resources/containers.rbi create mode 100644 rbi/openai/resources/containers/files.rbi create mode 100644 rbi/openai/resources/containers/files/content.rbi create mode 100644 sig/openai/models/container_create_params.rbs create mode 100644 sig/openai/models/container_create_response.rbs create mode 100644 sig/openai/models/container_delete_params.rbs create mode 100644 sig/openai/models/container_list_params.rbs create mode 100644 sig/openai/models/container_list_response.rbs create mode 100644 sig/openai/models/container_retrieve_params.rbs create mode 100644 sig/openai/models/container_retrieve_response.rbs create mode 100644 sig/openai/models/containers/file_create_params.rbs create mode 100644 sig/openai/models/containers/file_create_response.rbs create mode 100644 sig/openai/models/containers/file_delete_params.rbs create mode 100644 sig/openai/models/containers/file_list_params.rbs create mode 100644 sig/openai/models/containers/file_list_response.rbs create mode 100644 sig/openai/models/containers/file_retrieve_params.rbs create mode 100644 sig/openai/models/containers/file_retrieve_response.rbs create mode 100644 sig/openai/models/containers/files/content_retrieve_params.rbs create mode 100644 sig/openai/models/responses/response_cancel_params.rbs create mode 100644 sig/openai/resources/containers.rbs create mode 100644 sig/openai/resources/containers/files.rbs create mode 100644 sig/openai/resources/containers/files/content.rbs create mode 100644 test/openai/resources/containers/files/content_test.rb create mode 100644 test/openai/resources/containers/files_test.rb create mode 100644 test/openai/resources/containers_test.rb diff --git a/.stats.yml b/.stats.yml index b5ab8d66..250c0842 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a5651cb97f86d1e2531af6aef8c5230f1ea350560fbae790ca2e481b30a6c217.yml -openapi_spec_hash: 66a5104fd3bb43383cf919225df7a6fd -config_hash: bb657c3fed232a56930035de3aaed936 +configured_endpoints: 109 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6af14840a810139bf407013167ce1c8fb21b6ef8eb0cc3db58b51af7d52c4b5a.yml +openapi_spec_hash: 3241bde6b273cfec0035e522bd07985d +config_hash: 7367b68a4e7db36885c1a886f57b17f6 diff --git a/lib/openai.rb b/lib/openai.rb index f853f3fe..bf5c2abf 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -217,6 +217,21 @@ require_relative "openai/models/completion_create_params" require_relative "openai/models/completion_usage" require_relative "openai/models/compound_filter" +require_relative "openai/models/container_create_params" +require_relative "openai/models/container_create_response" +require_relative "openai/models/container_delete_params" +require_relative "openai/models/container_list_params" +require_relative "openai/models/container_list_response" +require_relative "openai/models/container_retrieve_params" +require_relative "openai/models/container_retrieve_response" +require_relative "openai/models/containers/file_create_params" +require_relative "openai/models/containers/file_create_response" +require_relative "openai/models/containers/file_delete_params" +require_relative "openai/models/containers/file_list_params" +require_relative "openai/models/containers/file_list_response" +require_relative "openai/models/containers/file_retrieve_params" +require_relative "openai/models/containers/file_retrieve_response" +require_relative "openai/models/containers/files/content_retrieve_params" require_relative "openai/models/create_embedding_response" require_relative "openai/models/embedding" require_relative "openai/models/embedding_create_params" @@ -330,6 +345,7 @@ require_relative "openai/models/responses/response_audio_done_event" require_relative "openai/models/responses/response_audio_transcript_delta_event" require_relative "openai/models/responses/response_audio_transcript_done_event" +require_relative "openai/models/responses/response_cancel_params" require_relative "openai/models/responses/response_code_interpreter_call_code_delta_event" require_relative "openai/models/responses/response_code_interpreter_call_code_done_event" require_relative "openai/models/responses/response_code_interpreter_call_completed_event" @@ -471,6 +487,9 @@ require_relative "openai/resources/chat/completions" require_relative "openai/resources/chat/completions/messages" require_relative "openai/resources/completions" +require_relative "openai/resources/containers" +require_relative "openai/resources/containers/files" +require_relative "openai/resources/containers/files/content" require_relative "openai/resources/embeddings" require_relative "openai/resources/evals" require_relative "openai/resources/evals/runs" diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 4673f743..b583ead2 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -72,6 +72,9 @@ class Client < OpenAI::Internal::Transport::BaseClient # @return [OpenAI::Resources::Evals] attr_reader :evals + # @return [OpenAI::Resources::Containers] + attr_reader :containers + # @api private # # @return [Hash{String=>String}] @@ -147,6 +150,7 @@ def initialize( @uploads = OpenAI::Resources::Uploads.new(client: self) @responses = OpenAI::Resources::Responses.new(client: self) @evals = OpenAI::Resources::Evals.new(client: self) + @containers = OpenAI::Resources::Containers.new(client: self) end end end diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 887d85e6..a0f5d753 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -81,6 +81,16 @@ module OpenAI CompoundFilter = OpenAI::Models::CompoundFilter + ContainerCreateParams = OpenAI::Models::ContainerCreateParams + + ContainerDeleteParams = OpenAI::Models::ContainerDeleteParams + + ContainerListParams = OpenAI::Models::ContainerListParams + + ContainerRetrieveParams = OpenAI::Models::ContainerRetrieveParams + + Containers = OpenAI::Models::Containers + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse Embedding = OpenAI::Models::Embedding diff --git a/lib/openai/models/container_create_params.rb b/lib/openai/models/container_create_params.rb new file mode 100644 index 00000000..2db5a5a9 --- /dev/null +++ b/lib/openai/models/container_create_params.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#create + class ContainerCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute name + # Name of the container to create. + # + # @return [String] + required :name, String + + # @!attribute expires_after + # Container expiration time in seconds relative to the 'anchor' time. + # + # @return [OpenAI::ContainerCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::ContainerCreateParams::ExpiresAfter } + + # @!attribute file_ids + # IDs of files to copy to the container. + # + # @return [Array, nil] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(name:, expires_after: nil, file_ids: nil, request_options: {}) + # @param name [String] Name of the container to create. + # + # @param expires_after [OpenAI::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. + # + # @param file_ids [Array] IDs of files to copy to the container. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + # + # @return [Symbol, OpenAI::ContainerCreateParams::ExpiresAfter::Anchor] + required :anchor, enum: -> { OpenAI::ContainerCreateParams::ExpiresAfter::Anchor } + + # @!attribute minutes + # + # @return [Integer] + required :minutes, Integer + + # @!method initialize(anchor:, minutes:) + # Some parameter documentations has been truncated, see + # {OpenAI::ContainerCreateParams::ExpiresAfter} for more details. + # + # Container expiration time in seconds relative to the 'anchor' time. + # + # @param anchor [Symbol, OpenAI::ContainerCreateParams::ExpiresAfter::Anchor] Time anchor for the expiration time. Currently only 'last_active_at' is supporte + # + # @param minutes [Integer] + + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + # + # @see OpenAI::ContainerCreateParams::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/container_create_response.rb b/lib/openai/models/container_create_response.rb new file mode 100644 index 00000000..3db15ec9 --- /dev/null +++ b/lib/openai/models/container_create_response.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#create + class ContainerCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the container. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the container was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute name + # Name of the container. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of this object. + # + # @return [String] + required :object, String + + # @!attribute status + # Status of the container (e.g., active, deleted). + # + # @return [String] + required :status, String + + # @!attribute expires_after + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @return [OpenAI::Models::ContainerCreateResponse::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::Models::ContainerCreateResponse::ExpiresAfter } + + # @!method initialize(id:, created_at:, name:, object:, status:, expires_after: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerCreateResponse} for more details. + # + # @param id [String] Unique identifier for the container. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the container was created. + # + # @param name [String] Name of the container. + # + # @param object [String] The type of this object. + # + # @param status [String] Status of the container (e.g., active, deleted). + # + # @param expires_after [OpenAI::Models::ContainerCreateResponse::ExpiresAfter] The container will expire after this time period. + + # @see OpenAI::Models::ContainerCreateResponse#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # The reference point for the expiration. + # + # @return [Symbol, OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor, nil] + optional :anchor, enum: -> { OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor } + + # @!attribute minutes + # The number of minutes after the anchor before the container expires. + # + # @return [Integer, nil] + optional :minutes, Integer + + # @!method initialize(anchor: nil, minutes: nil) + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @param anchor [Symbol, OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor] The reference point for the expiration. + # + # @param minutes [Integer] The number of minutes after the anchor before the container expires. + + # The reference point for the expiration. + # + # @see OpenAI::Models::ContainerCreateResponse::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/container_delete_params.rb b/lib/openai/models/container_delete_params.rb new file mode 100644 index 00000000..becd24c9 --- /dev/null +++ b/lib/openai/models/container_delete_params.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#delete + class ContainerDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end +end diff --git a/lib/openai/models/container_list_params.rb b/lib/openai/models/container_list_params.rb new file mode 100644 index 00000000..2aa224d4 --- /dev/null +++ b/lib/openai/models/container_list_params.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#list + class ContainerListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute after + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + # + # @return [String, nil] + optional :after, String + + # @!attribute limit + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!attribute order + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + # + # @return [Symbol, OpenAI::ContainerListParams::Order, nil] + optional :order, enum: -> { OpenAI::ContainerListParams::Order } + + # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + # @!method self.values + # @return [Array] + end + end + end +end diff --git a/lib/openai/models/container_list_response.rb b/lib/openai/models/container_list_response.rb new file mode 100644 index 00000000..6d12abc7 --- /dev/null +++ b/lib/openai/models/container_list_response.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#list + class ContainerListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the container. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the container was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute name + # Name of the container. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of this object. + # + # @return [String] + required :object, String + + # @!attribute status + # Status of the container (e.g., active, deleted). + # + # @return [String] + required :status, String + + # @!attribute expires_after + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @return [OpenAI::Models::ContainerListResponse::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::Models::ContainerListResponse::ExpiresAfter } + + # @!method initialize(id:, created_at:, name:, object:, status:, expires_after: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerListResponse} for more details. + # + # @param id [String] Unique identifier for the container. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the container was created. + # + # @param name [String] Name of the container. + # + # @param object [String] The type of this object. + # + # @param status [String] Status of the container (e.g., active, deleted). + # + # @param expires_after [OpenAI::Models::ContainerListResponse::ExpiresAfter] The container will expire after this time period. + + # @see OpenAI::Models::ContainerListResponse#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # The reference point for the expiration. + # + # @return [Symbol, OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor, nil] + optional :anchor, enum: -> { OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor } + + # @!attribute minutes + # The number of minutes after the anchor before the container expires. + # + # @return [Integer, nil] + optional :minutes, Integer + + # @!method initialize(anchor: nil, minutes: nil) + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @param anchor [Symbol, OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor] The reference point for the expiration. + # + # @param minutes [Integer] The number of minutes after the anchor before the container expires. + + # The reference point for the expiration. + # + # @see OpenAI::Models::ContainerListResponse::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/container_retrieve_params.rb b/lib/openai/models/container_retrieve_params.rb new file mode 100644 index 00000000..821d0549 --- /dev/null +++ b/lib/openai/models/container_retrieve_params.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#retrieve + class ContainerRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end +end diff --git a/lib/openai/models/container_retrieve_response.rb b/lib/openai/models/container_retrieve_response.rb new file mode 100644 index 00000000..19520ab0 --- /dev/null +++ b/lib/openai/models/container_retrieve_response.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#retrieve + class ContainerRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the container. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the container was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute name + # Name of the container. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of this object. + # + # @return [String] + required :object, String + + # @!attribute status + # Status of the container (e.g., active, deleted). + # + # @return [String] + required :status, String + + # @!attribute expires_after + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @return [OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter } + + # @!method initialize(id:, created_at:, name:, object:, status:, expires_after: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerRetrieveResponse} for more details. + # + # @param id [String] Unique identifier for the container. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the container was created. + # + # @param name [String] Name of the container. + # + # @param object [String] The type of this object. + # + # @param status [String] Status of the container (e.g., active, deleted). + # + # @param expires_after [OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter] The container will expire after this time period. + + # @see OpenAI::Models::ContainerRetrieveResponse#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # The reference point for the expiration. + # + # @return [Symbol, OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor, nil] + optional :anchor, enum: -> { OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor } + + # @!attribute minutes + # The number of minutes after the anchor before the container expires. + # + # @return [Integer, nil] + optional :minutes, Integer + + # @!method initialize(anchor: nil, minutes: nil) + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @param anchor [Symbol, OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor] The reference point for the expiration. + # + # @param minutes [Integer] The number of minutes after the anchor before the container expires. + + # The reference point for the expiration. + # + # @see OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/containers/file_create_params.rb b/lib/openai/models/containers/file_create_params.rb new file mode 100644 index 00000000..07528c8e --- /dev/null +++ b/lib/openai/models/containers/file_create_params.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#create + class FileCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute file + # The File object (not file name) to be uploaded. + # + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart, nil] + optional :file, OpenAI::Internal::Type::FileInput + + # @!attribute file_id + # Name of the file to create. + # + # @return [String, nil] + optional :file_id, String + + # @!method initialize(file: nil, file_id: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileCreateParams} for more details. + # + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. + # + # @param file_id [String] Name of the file to create. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/containers/file_create_response.rb b/lib/openai/models/containers/file_create_response.rb new file mode 100644 index 00000000..408ac8c7 --- /dev/null +++ b/lib/openai/models/containers/file_create_response.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#create + class FileCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the file. + # + # @return [String] + required :id, String + + # @!attribute bytes + # Size of the file in bytes. + # + # @return [Integer] + required :bytes, Integer + + # @!attribute container_id + # The container this file belongs to. + # + # @return [String] + required :container_id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the file was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The type of this object (`container.file`). + # + # @return [Symbol, :"container.file"] + required :object, const: :"container.file" + + # @!attribute path + # Path of the file in the container. + # + # @return [String] + required :path, String + + # @!attribute source + # Source of the file (e.g., `user`, `assistant`). + # + # @return [String] + required :source, String + + # @!method initialize(id:, bytes:, container_id:, created_at:, path:, source:, object: :"container.file") + # @param id [String] Unique identifier for the file. + # + # @param bytes [Integer] Size of the file in bytes. + # + # @param container_id [String] The container this file belongs to. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the file was created. + # + # @param path [String] Path of the file in the container. + # + # @param source [String] Source of the file (e.g., `user`, `assistant`). + # + # @param object [Symbol, :"container.file"] The type of this object (`container.file`). + end + end + end +end diff --git a/lib/openai/models/containers/file_delete_params.rb b/lib/openai/models/containers/file_delete_params.rb new file mode 100644 index 00000000..b9865119 --- /dev/null +++ b/lib/openai/models/containers/file_delete_params.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#delete + class FileDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute container_id + # + # @return [String] + required :container_id, String + + # @!method initialize(container_id:, request_options: {}) + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/containers/file_list_params.rb b/lib/openai/models/containers/file_list_params.rb new file mode 100644 index 00000000..d1494255 --- /dev/null +++ b/lib/openai/models/containers/file_list_params.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#list + class FileListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute after + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + # + # @return [String, nil] + optional :after, String + + # @!attribute limit + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!attribute order + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + # + # @return [Symbol, OpenAI::Containers::FileListParams::Order, nil] + optional :order, enum: -> { OpenAI::Containers::FileListParams::Order } + + # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/containers/file_list_response.rb b/lib/openai/models/containers/file_list_response.rb new file mode 100644 index 00000000..55433ce5 --- /dev/null +++ b/lib/openai/models/containers/file_list_response.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#list + class FileListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the file. + # + # @return [String] + required :id, String + + # @!attribute bytes + # Size of the file in bytes. + # + # @return [Integer] + required :bytes, Integer + + # @!attribute container_id + # The container this file belongs to. + # + # @return [String] + required :container_id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the file was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The type of this object (`container.file`). + # + # @return [Symbol, :"container.file"] + required :object, const: :"container.file" + + # @!attribute path + # Path of the file in the container. + # + # @return [String] + required :path, String + + # @!attribute source + # Source of the file (e.g., `user`, `assistant`). + # + # @return [String] + required :source, String + + # @!method initialize(id:, bytes:, container_id:, created_at:, path:, source:, object: :"container.file") + # @param id [String] Unique identifier for the file. + # + # @param bytes [Integer] Size of the file in bytes. + # + # @param container_id [String] The container this file belongs to. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the file was created. + # + # @param path [String] Path of the file in the container. + # + # @param source [String] Source of the file (e.g., `user`, `assistant`). + # + # @param object [Symbol, :"container.file"] The type of this object (`container.file`). + end + end + end +end diff --git a/lib/openai/models/containers/file_retrieve_params.rb b/lib/openai/models/containers/file_retrieve_params.rb new file mode 100644 index 00000000..781e8f38 --- /dev/null +++ b/lib/openai/models/containers/file_retrieve_params.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#retrieve + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute container_id + # + # @return [String] + required :container_id, String + + # @!method initialize(container_id:, request_options: {}) + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/containers/file_retrieve_response.rb b/lib/openai/models/containers/file_retrieve_response.rb new file mode 100644 index 00000000..4c905b59 --- /dev/null +++ b/lib/openai/models/containers/file_retrieve_response.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#retrieve + class FileRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the file. + # + # @return [String] + required :id, String + + # @!attribute bytes + # Size of the file in bytes. + # + # @return [Integer] + required :bytes, Integer + + # @!attribute container_id + # The container this file belongs to. + # + # @return [String] + required :container_id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the file was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The type of this object (`container.file`). + # + # @return [Symbol, :"container.file"] + required :object, const: :"container.file" + + # @!attribute path + # Path of the file in the container. + # + # @return [String] + required :path, String + + # @!attribute source + # Source of the file (e.g., `user`, `assistant`). + # + # @return [String] + required :source, String + + # @!method initialize(id:, bytes:, container_id:, created_at:, path:, source:, object: :"container.file") + # @param id [String] Unique identifier for the file. + # + # @param bytes [Integer] Size of the file in bytes. + # + # @param container_id [String] The container this file belongs to. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the file was created. + # + # @param path [String] Path of the file in the container. + # + # @param source [String] Source of the file (e.g., `user`, `assistant`). + # + # @param object [Symbol, :"container.file"] The type of this object (`container.file`). + end + end + end +end diff --git a/lib/openai/models/containers/files/content_retrieve_params.rb b/lib/openai/models/containers/files/content_retrieve_params.rb new file mode 100644 index 00000000..b765ee47 --- /dev/null +++ b/lib/openai/models/containers/files/content_retrieve_params.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + module Files + # @see OpenAI::Resources::Containers::Files::Content#retrieve + class ContentRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute container_id + # + # @return [String] + required :container_id, String + + # @!method initialize(container_id:, request_options: {}) + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end + end +end diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index f630ddd2..d5dc88d7 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -10,13 +10,19 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :delta, String + # @!attribute sequence_number + # A sequence number for this chunk of the stream response. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.delta`. # # @return [Symbol, :"response.audio.delta"] required :type, const: :"response.audio.delta" - # @!method initialize(delta:, type: :"response.audio.delta") + # @!method initialize(delta:, sequence_number:, type: :"response.audio.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseAudioDeltaEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param delta [String] A chunk of Base64 encoded response audio bytes. # + # @param sequence_number [Integer] A sequence number for this chunk of the stream response. + # # @param type [Symbol, :"response.audio.delta"] The type of the event. Always `response.audio.delta`. end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index d156cd64..e7f889a4 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -4,18 +4,26 @@ module OpenAI module Models module Responses class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of the delta. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.done`. # # @return [Symbol, :"response.audio.done"] required :type, const: :"response.audio.done" - # @!method initialize(type: :"response.audio.done") + # @!method initialize(sequence_number:, type: :"response.audio.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseAudioDoneEvent} for more details. # # Emitted when the audio response is complete. # + # @param sequence_number [Integer] The sequence number of the delta. + # # @param type [Symbol, :"response.audio.done"] The type of the event. Always `response.audio.done`. end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 1361afe0..10692c30 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -10,13 +10,19 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :delta, String + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.transcript.delta`. # # @return [Symbol, :"response.audio.transcript.delta"] required :type, const: :"response.audio.transcript.delta" - # @!method initialize(delta:, type: :"response.audio.transcript.delta") + # @!method initialize(delta:, sequence_number:, type: :"response.audio.transcript.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseAudioTranscriptDeltaEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param delta [String] The partial transcript of the audio response. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.audio.transcript.delta"] The type of the event. Always `response.audio.transcript.delta`. end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 13d588f5..5e0e71ff 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -4,18 +4,26 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.transcript.done`. # # @return [Symbol, :"response.audio.transcript.done"] required :type, const: :"response.audio.transcript.done" - # @!method initialize(type: :"response.audio.transcript.done") + # @!method initialize(sequence_number:, type: :"response.audio.transcript.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseAudioTranscriptDoneEvent} for more details. # # Emitted when the full audio transcript is completed. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.audio.transcript.done"] The type of the event. Always `response.audio.transcript.done`. end end diff --git a/lib/openai/models/responses/response_cancel_params.rb b/lib/openai/models/responses/response_cancel_params.rb new file mode 100644 index 00000000..a06d628c --- /dev/null +++ b/lib/openai/models/responses/response_cancel_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + # @see OpenAI::Resources::Responses#cancel + class ResponseCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 07cebc16..71790da8 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -16,13 +16,19 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.code.delta`. # # @return [Symbol, :"response.code_interpreter_call.code.delta"] required :type, const: :"response.code_interpreter_call.code.delta" - # @!method initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") + # @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.code.delta"] The type of the event. Always `response.code_interpreter_call.code.delta`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 40845a15..8fc9434f 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -16,13 +16,19 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.code.done`. # # @return [Symbol, :"response.code_interpreter_call.code.done"] required :type, const: :"response.code_interpreter_call.code.done" - # @!method initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done") + # @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.code.done"] The type of the event. Always `response.code_interpreter_call.code.done`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index 37bf5f84..ce19b55e 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -16,13 +16,19 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.completed`. # # @return [Symbol, :"response.code_interpreter_call.completed"] required :type, const: :"response.code_interpreter_call.completed" - # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") + # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.completed") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 96683a3d..5ed061e8 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -16,13 +16,19 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.in_progress`. # # @return [Symbol, :"response.code_interpreter_call.in_progress"] required :type, const: :"response.code_interpreter_call.in_progress" - # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") + # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.in_progress") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent} for more # details. @@ -33,6 +39,8 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 1cf23747..5a9ca91b 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -16,13 +16,19 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.interpreting`. # # @return [Symbol, :"response.code_interpreter_call.interpreting"] required :type, const: :"response.code_interpreter_call.interpreting" - # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") + # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.interpreting") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent} for more # details. @@ -33,6 +39,8 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`. end end diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index 5bbb6426..cba90f82 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -10,13 +10,19 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.completed`. # # @return [Symbol, :"response.completed"] required :type, const: :"response.completed" - # @!method initialize(response:, type: :"response.completed") + # @!method initialize(response:, sequence_number:, type: :"response.completed") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCompletedEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # # @param response [OpenAI::Responses::Response] Properties of the completed response. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param type [Symbol, :"response.completed"] The type of the event. Always `response.completed`. end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 1bb0cddf..2a838cfa 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -28,13 +28,19 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] required :part, union: -> { OpenAI::Responses::ResponseContentPartAddedEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.content_part.added`. # # @return [Symbol, :"response.content_part.added"] required :type, const: :"response.content_part.added" - # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") + # @!method initialize(content_index:, item_id:, output_index:, part:, sequence_number:, type: :"response.content_part.added") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseContentPartAddedEvent} for more details. # @@ -48,6 +54,8 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that was added. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.content_part.added"] The type of the event. Always `response.content_part.added`. # The content part that was added. diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index 56b1e274..a7295842 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -28,13 +28,19 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] required :part, union: -> { OpenAI::Responses::ResponseContentPartDoneEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.content_part.done`. # # @return [Symbol, :"response.content_part.done"] required :type, const: :"response.content_part.done" - # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") + # @!method initialize(content_index:, item_id:, output_index:, part:, sequence_number:, type: :"response.content_part.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseContentPartDoneEvent} for more details. # @@ -48,6 +54,8 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that is done. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.content_part.done"] The type of the event. Always `response.content_part.done`. # The content part that is done. diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index ec044aac..2724e02c 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -10,13 +10,19 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.created`. # # @return [Symbol, :"response.created"] required :type, const: :"response.created" - # @!method initialize(response:, type: :"response.created") + # @!method initialize(response:, sequence_number:, type: :"response.created") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseCreatedEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # # @param response [OpenAI::Responses::Response] The response that was created. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param type [Symbol, :"response.created"] The type of the event. Always `response.created`. end end diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index 651a400d..059f086c 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -22,13 +22,19 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @return [String, nil] required :param, String, nil?: true + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `error`. # # @return [Symbol, :error] required :type, const: :error - # @!method initialize(code:, message:, param:, type: :error) + # @!method initialize(code:, message:, param:, sequence_number:, type: :error) # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseErrorEvent} for more details. # @@ -40,6 +46,8 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # # @param param [String, nil] The error parameter. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :error] The type of the event. Always `error`. end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index f3454dd5..e9dd926c 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -10,13 +10,19 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.failed`. # # @return [Symbol, :"response.failed"] required :type, const: :"response.failed" - # @!method initialize(response:, type: :"response.failed") + # @!method initialize(response:, sequence_number:, type: :"response.failed") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseFailedEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # # @param response [OpenAI::Responses::Response] The response that failed. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.failed"] The type of the event. Always `response.failed`. end end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 5c616920..2fd752b3 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -16,13 +16,19 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.file_search_call.completed`. # # @return [Symbol, :"response.file_search_call.completed"] required :type, const: :"response.file_search_call.completed" - # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.completed") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseFileSearchCallCompletedEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the file search call is initiated. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.file_search_call.completed"] The type of the event. Always `response.file_search_call.completed`. end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 15ff92de..e319981c 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -16,13 +16,19 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.file_search_call.in_progress`. # # @return [Symbol, :"response.file_search_call.in_progress"] required :type, const: :"response.file_search_call.in_progress" - # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.in_progress") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseFileSearchCallInProgressEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the file search call is initiated. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.file_search_call.in_progress"] The type of the event. Always `response.file_search_call.in_progress`. end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index 58540d2a..d07eba07 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -16,13 +16,19 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.file_search_call.searching`. # # @return [Symbol, :"response.file_search_call.searching"] required :type, const: :"response.file_search_call.searching" - # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.searching") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.searching") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseFileSearchCallSearchingEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the file search call is searching. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.file_search_call.searching"] The type of the event. Always `response.file_search_call.searching`. end end diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index 05a59f54..d01efcf0 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -22,13 +22,19 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.function_call_arguments.delta`. # # @return [Symbol, :"response.function_call_arguments.delta"] required :type, const: :"response.function_call_arguments.delta" - # @!method initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.function_call_arguments.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more details. # @@ -40,6 +46,8 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # # @param output_index [Integer] The index of the output item that the function-call arguments delta is added to. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.function_call_arguments.delta"] The type of the event. Always `response.function_call_arguments.delta`. end end diff --git a/lib/openai/models/responses/response_function_call_arguments_done_event.rb b/lib/openai/models/responses/response_function_call_arguments_done_event.rb index ded7ece3..a5b29f4b 100644 --- a/lib/openai/models/responses/response_function_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_done_event.rb @@ -22,12 +22,18 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # # @return [Symbol, :"response.function_call_arguments.done"] required :type, const: :"response.function_call_arguments.done" - # @!method initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done") + # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.function_call_arguments.done") # Emitted when function-call arguments are finalized. # # @param arguments [String] The function-call arguments. @@ -36,6 +42,8 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.function_call_arguments.done"] end end diff --git a/lib/openai/models/responses/response_image_gen_call_completed_event.rb b/lib/openai/models/responses/response_image_gen_call_completed_event.rb index d1298dab..ceccdcf6 100644 --- a/lib/openai/models/responses/response_image_gen_call_completed_event.rb +++ b/lib/openai/models/responses/response_image_gen_call_completed_event.rb @@ -16,13 +16,19 @@ class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.image_generation_call.completed'. # # @return [Symbol, :"response.image_generation_call.completed"] required :type, const: :"response.image_generation_call.completed" - # @!method initialize(item_id:, output_index:, type: :"response.image_generation_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.image_generation_call.completed") # Emitted when an image generation tool call has completed and the final image is # available. # @@ -30,6 +36,8 @@ class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.image_generation_call.completed"] The type of the event. Always 'response.image_generation_call.completed'. end end diff --git a/lib/openai/models/responses/response_image_gen_call_generating_event.rb b/lib/openai/models/responses/response_image_gen_call_generating_event.rb index 51757dbb..d8f9683d 100644 --- a/lib/openai/models/responses/response_image_gen_call_generating_event.rb +++ b/lib/openai/models/responses/response_image_gen_call_generating_event.rb @@ -16,19 +16,19 @@ class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.image_generation_call.generating'. # # @return [Symbol, :"response.image_generation_call.generating"] required :type, const: :"response.image_generation_call.generating" - # @!attribute sequence_number - # The sequence number of the image generation item being processed. - # - # @return [Integer, nil] - optional :sequence_number, Integer - - # @!method initialize(item_id:, output_index:, sequence_number: nil, type: :"response.image_generation_call.generating") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.image_generation_call.generating") # Emitted when an image generation tool call is actively generating an image # (intermediate state). # diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index 788e9ea6..b4079e2b 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -10,13 +10,19 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.in_progress`. # # @return [Symbol, :"response.in_progress"] required :type, const: :"response.in_progress" - # @!method initialize(response:, type: :"response.in_progress") + # @!method initialize(response:, sequence_number:, type: :"response.in_progress") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseInProgressEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # # @param response [OpenAI::Responses::Response] The response that is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.in_progress"] The type of the event. Always `response.in_progress`. end end diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index d33b32fc..709acfeb 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -10,13 +10,19 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.incomplete`. # # @return [Symbol, :"response.incomplete"] required :type, const: :"response.incomplete" - # @!method initialize(response:, type: :"response.incomplete") + # @!method initialize(response:, sequence_number:, type: :"response.incomplete") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseIncompleteEvent} for more details. # @@ -24,6 +30,8 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # # @param response [OpenAI::Responses::Response] The response that was incomplete. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.incomplete"] The type of the event. Always `response.incomplete`. end end diff --git a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb index e238043d..0c734e96 100644 --- a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb @@ -22,13 +22,19 @@ class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_call.arguments_delta'. # # @return [Symbol, :"response.mcp_call.arguments_delta"] required :type, const: :"response.mcp_call.arguments_delta" - # @!method initialize(delta:, item_id:, output_index:, type: :"response.mcp_call.arguments_delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_delta") # Emitted when there is a delta (partial update) to the arguments of an MCP tool # call. # @@ -38,6 +44,8 @@ class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_call.arguments_delta"] The type of the event. Always 'response.mcp_call.arguments_delta'. end end diff --git a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb index 875cfee3..f8f70329 100644 --- a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb @@ -22,13 +22,19 @@ class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_call.arguments_done'. # # @return [Symbol, :"response.mcp_call.arguments_done"] required :type, const: :"response.mcp_call.arguments_done" - # @!method initialize(arguments:, item_id:, output_index:, type: :"response.mcp_call.arguments_done") + # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_done") # Emitted when the arguments for an MCP tool call are finalized. # # @param arguments [Object] The finalized arguments for the MCP tool call. @@ -37,6 +43,8 @@ class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_call.arguments_done"] The type of the event. Always 'response.mcp_call.arguments_done'. end end diff --git a/lib/openai/models/responses/response_mcp_call_completed_event.rb b/lib/openai/models/responses/response_mcp_call_completed_event.rb index 2b20fa27..082474fd 100644 --- a/lib/openai/models/responses/response_mcp_call_completed_event.rb +++ b/lib/openai/models/responses/response_mcp_call_completed_event.rb @@ -4,15 +4,23 @@ module OpenAI module Models module Responses class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_call.completed'. # # @return [Symbol, :"response.mcp_call.completed"] required :type, const: :"response.mcp_call.completed" - # @!method initialize(type: :"response.mcp_call.completed") + # @!method initialize(sequence_number:, type: :"response.mcp_call.completed") # Emitted when an MCP tool call has completed successfully. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_call.completed"] The type of the event. Always 'response.mcp_call.completed'. end end diff --git a/lib/openai/models/responses/response_mcp_call_failed_event.rb b/lib/openai/models/responses/response_mcp_call_failed_event.rb index 0429006e..cca4a22d 100644 --- a/lib/openai/models/responses/response_mcp_call_failed_event.rb +++ b/lib/openai/models/responses/response_mcp_call_failed_event.rb @@ -4,15 +4,23 @@ module OpenAI module Models module Responses class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_call.failed'. # # @return [Symbol, :"response.mcp_call.failed"] required :type, const: :"response.mcp_call.failed" - # @!method initialize(type: :"response.mcp_call.failed") + # @!method initialize(sequence_number:, type: :"response.mcp_call.failed") # Emitted when an MCP tool call has failed. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_call.failed"] The type of the event. Always 'response.mcp_call.failed'. end end diff --git a/lib/openai/models/responses/response_mcp_call_in_progress_event.rb b/lib/openai/models/responses/response_mcp_call_in_progress_event.rb index f19fdaa7..4d02f14f 100644 --- a/lib/openai/models/responses/response_mcp_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_mcp_call_in_progress_event.rb @@ -16,19 +16,27 @@ class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_call.in_progress'. # # @return [Symbol, :"response.mcp_call.in_progress"] required :type, const: :"response.mcp_call.in_progress" - # @!method initialize(item_id:, output_index:, type: :"response.mcp_call.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_call.in_progress") # Emitted when an MCP tool call is in progress. # # @param item_id [String] The unique identifier of the MCP tool call item being processed. # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_call.in_progress"] The type of the event. Always 'response.mcp_call.in_progress'. end end diff --git a/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb index bad2a11b..dd6bfdcb 100644 --- a/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb +++ b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb @@ -4,15 +4,23 @@ module OpenAI module Models module Responses class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_list_tools.completed'. # # @return [Symbol, :"response.mcp_list_tools.completed"] required :type, const: :"response.mcp_list_tools.completed" - # @!method initialize(type: :"response.mcp_list_tools.completed") + # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.completed") # Emitted when the list of available MCP tools has been successfully retrieved. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_list_tools.completed"] The type of the event. Always 'response.mcp_list_tools.completed'. end end diff --git a/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb index 42df67d4..a4c2fc10 100644 --- a/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb +++ b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb @@ -4,15 +4,23 @@ module OpenAI module Models module Responses class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_list_tools.failed'. # # @return [Symbol, :"response.mcp_list_tools.failed"] required :type, const: :"response.mcp_list_tools.failed" - # @!method initialize(type: :"response.mcp_list_tools.failed") + # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.failed") # Emitted when the attempt to list available MCP tools has failed. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_list_tools.failed"] The type of the event. Always 'response.mcp_list_tools.failed'. end end diff --git a/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb index e1f44fe5..e931fb3b 100644 --- a/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb +++ b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb @@ -4,16 +4,24 @@ module OpenAI module Models module Responses class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.mcp_list_tools.in_progress'. # # @return [Symbol, :"response.mcp_list_tools.in_progress"] required :type, const: :"response.mcp_list_tools.in_progress" - # @!method initialize(type: :"response.mcp_list_tools.in_progress") + # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.in_progress") # Emitted when the system is in the process of retrieving the list of available # MCP tools. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.mcp_list_tools.in_progress"] The type of the event. Always 'response.mcp_list_tools.in_progress'. end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 8354f678..a5bce7c2 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -16,13 +16,19 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_item.added`. # # @return [Symbol, :"response.output_item.added"] required :type, const: :"response.output_item.added" - # @!method initialize(item:, output_index:, type: :"response.output_item.added") + # @!method initialize(item:, output_index:, sequence_number:, type: :"response.output_item.added") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseOutputItemAddedEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that was added. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.output_item.added"] The type of the event. Always `response.output_item.added`. end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 40d99812..642bc92d 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -16,13 +16,19 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_item.done`. # # @return [Symbol, :"response.output_item.done"] required :type, const: :"response.output_item.done" - # @!method initialize(item:, output_index:, type: :"response.output_item.done") + # @!method initialize(item:, output_index:, sequence_number:, type: :"response.output_item.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseOutputItemDoneEvent} for more details. # @@ -32,6 +38,8 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that was marked done. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.output_item.done"] The type of the event. Always `response.output_item.done`. end end diff --git a/lib/openai/models/responses/response_output_text_annotation_added_event.rb b/lib/openai/models/responses/response_output_text_annotation_added_event.rb index 8efbedff..eba4d8a7 100644 --- a/lib/openai/models/responses/response_output_text_annotation_added_event.rb +++ b/lib/openai/models/responses/response_output_text_annotation_added_event.rb @@ -34,13 +34,19 @@ class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.output_text_annotation.added'. # # @return [Symbol, :"response.output_text_annotation.added"] required :type, const: :"response.output_text_annotation.added" - # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text_annotation.added") + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text_annotation.added") # Emitted when an annotation is added to output text content. # # @param annotation [Object] The annotation object being added. (See annotation schema for details.) @@ -53,6 +59,8 @@ class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.output_text_annotation.added"] The type of the event. Always 'response.output_text_annotation.added'. end end diff --git a/lib/openai/models/responses/response_queued_event.rb b/lib/openai/models/responses/response_queued_event.rb index 065f4cc1..f84bd2b0 100644 --- a/lib/openai/models/responses/response_queued_event.rb +++ b/lib/openai/models/responses/response_queued_event.rb @@ -10,17 +10,25 @@ class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.queued'. # # @return [Symbol, :"response.queued"] required :type, const: :"response.queued" - # @!method initialize(response:, type: :"response.queued") + # @!method initialize(response:, sequence_number:, type: :"response.queued") # Emitted when a response is queued and waiting to be processed. # # @param response [OpenAI::Responses::Response] The full response object that is queued. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param type [Symbol, :"response.queued"] The type of the event. Always 'response.queued'. end end diff --git a/lib/openai/models/responses/response_reasoning_delta_event.rb b/lib/openai/models/responses/response_reasoning_delta_event.rb index 18afe6ee..a8b51c21 100644 --- a/lib/openai/models/responses/response_reasoning_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_delta_event.rb @@ -28,13 +28,19 @@ class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always 'response.reasoning.delta'. # # @return [Symbol, :"response.reasoning.delta"] required :type, const: :"response.reasoning.delta" - # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.reasoning.delta") + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.reasoning.delta") # Emitted when there is a delta (partial update) to the reasoning content. # # @param content_index [Integer] The index of the reasoning content part within the output item. @@ -45,6 +51,8 @@ class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.reasoning.delta"] The type of the event. Always 'response.reasoning.delta'. end end diff --git a/lib/openai/models/responses/response_reasoning_done_event.rb b/lib/openai/models/responses/response_reasoning_done_event.rb index 887f1f58..0c5e1861 100644 --- a/lib/openai/models/responses/response_reasoning_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_done_event.rb @@ -22,6 +22,12 @@ class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute text # The finalized reasoning text. # @@ -34,7 +40,7 @@ class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning.done"] required :type, const: :"response.reasoning.done" - # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.reasoning.done") + # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.reasoning.done") # Emitted when the reasoning content is finalized for an item. # # @param content_index [Integer] The index of the reasoning content part within the output item. @@ -43,6 +49,8 @@ class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param text [String] The finalized reasoning text. # # @param type [Symbol, :"response.reasoning.done"] The type of the event. Always 'response.reasoning.done'. diff --git a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb index 188b3ef8..96529b8e 100644 --- a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb @@ -23,6 +23,12 @@ class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the output item. # @@ -35,7 +41,7 @@ class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary.delta"] required :type, const: :"response.reasoning_summary.delta" - # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary.delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseReasoningSummaryDeltaEvent} for more details. # @@ -47,6 +53,8 @@ class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the output item. # # @param type [Symbol, :"response.reasoning_summary.delta"] The type of the event. Always 'response.reasoning_summary.delta'. diff --git a/lib/openai/models/responses/response_reasoning_summary_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_done_event.rb index 54715dd8..42716381 100644 --- a/lib/openai/models/responses/response_reasoning_summary_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_done_event.rb @@ -16,6 +16,12 @@ class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the output item. # @@ -34,13 +40,15 @@ class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary.done"] required :type, const: :"response.reasoning_summary.done" - # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary.done") + # @!method initialize(item_id:, output_index:, sequence_number:, summary_index:, text:, type: :"response.reasoning_summary.done") # Emitted when the reasoning summary content is finalized for an item. # # @param item_id [String] The unique identifier of the item for which the reasoning summary is finalized. # # @param output_index [Integer] The index of the output item in the response's output array. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the output item. # # @param text [String] The finalized reasoning summary text. diff --git a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb index 4701beaa..555e71a5 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb @@ -22,6 +22,12 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,7 +40,7 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_part.added"] required :type, const: :"response.reasoning_summary_part.added" - # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.added") + # @!method initialize(item_id:, output_index:, part:, sequence_number:, summary_index:, type: :"response.reasoning_summary_part.added") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent} for more details. # @@ -46,6 +52,8 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param part [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param type [Symbol, :"response.reasoning_summary_part.added"] The type of the event. Always `response.reasoning_summary_part.added`. diff --git a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb index 8b67b1ea..6a4cc1d6 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb @@ -22,6 +22,12 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,7 +40,7 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_part.done"] required :type, const: :"response.reasoning_summary_part.done" - # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.done") + # @!method initialize(item_id:, output_index:, part:, sequence_number:, summary_index:, type: :"response.reasoning_summary_part.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent} for more details. # @@ -46,6 +52,8 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param part [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param type [Symbol, :"response.reasoning_summary_part.done"] The type of the event. Always `response.reasoning_summary_part.done`. diff --git a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb index 2aaefaf3..2a5fd60e 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb @@ -22,6 +22,12 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,7 +40,7 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_text.delta"] required :type, const: :"response.reasoning_summary_text.delta" - # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary_text.delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary_text.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent} for more details. # @@ -46,6 +52,8 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary text delta is associated with. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param type [Symbol, :"response.reasoning_summary_text.delta"] The type of the event. Always `response.reasoning_summary_text.delta`. diff --git a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb index 5359b84a..e5860672 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb @@ -16,6 +16,12 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,7 +40,7 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_text.done"] required :type, const: :"response.reasoning_summary_text.done" - # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary_text.done") + # @!method initialize(item_id:, output_index:, sequence_number:, summary_index:, text:, type: :"response.reasoning_summary_text.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent} for more details. # @@ -44,6 +50,8 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary text is associated with. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param text [String] The full text of the completed reasoning summary. diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index aef8a0d5..9314f0a6 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -28,13 +28,19 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.refusal.delta`. # # @return [Symbol, :"response.refusal.delta"] required :type, const: :"response.refusal.delta" - # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.refusal.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseRefusalDeltaEvent} for more details. # @@ -48,6 +54,8 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the refusal text is added to. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.refusal.delta"] The type of the event. Always `response.refusal.delta`. end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index 912b1796..36d56472 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -28,13 +28,19 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :refusal, String + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.refusal.done`. # # @return [Symbol, :"response.refusal.done"] required :type, const: :"response.refusal.done" - # @!method initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") + # @!method initialize(content_index:, item_id:, output_index:, refusal:, sequence_number:, type: :"response.refusal.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseRefusalDoneEvent} for more details. # @@ -48,6 +54,8 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # # @param refusal [String] The refusal text that is finalized. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.refusal.done"] The type of the event. Always `response.refusal.done`. end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 8ffc14a6..1ab7db61 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -34,13 +34,19 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_text.annotation.added`. # # @return [Symbol, :"response.output_text.annotation.added"] required :type, const: :"response.output_text.annotation.added" - # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text.annotation.added") + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text.annotation.added") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent} for more details. # @@ -56,6 +62,8 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the text annotation was added to. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always `response.output_text.annotation.added`. # A citation to a file. diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 48c639ef..46236823 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -28,13 +28,19 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_text.delta`. # # @return [Symbol, :"response.output_text.delta"] required :type, const: :"response.output_text.delta" - # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.output_text.delta") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseTextDeltaEvent} for more details. # @@ -48,6 +54,8 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the text delta was added to. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param type [Symbol, :"response.output_text.delta"] The type of the event. Always `response.output_text.delta`. end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 7b8921d4..8c391c93 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -22,6 +22,12 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute text # The text content that is finalized. # @@ -34,7 +40,7 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.done"] required :type, const: :"response.output_text.done" - # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") + # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.output_text.done") # Some parameter documentations has been truncated, see # {OpenAI::Responses::ResponseTextDoneEvent} for more details. # @@ -46,6 +52,8 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the text content is finalized. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param text [String] The text content that is finalized. # # @param type [Symbol, :"response.output_text.done"] The type of the event. Always `response.output_text.done`. diff --git a/lib/openai/resources/containers.rb b/lib/openai/resources/containers.rb new file mode 100644 index 00000000..44ccdb6f --- /dev/null +++ b/lib/openai/resources/containers.rb @@ -0,0 +1,112 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Containers + # @return [OpenAI::Resources::Containers::Files] + attr_reader :files + + # Create Container + # + # @overload create(name:, expires_after: nil, file_ids: nil, request_options: {}) + # + # @param name [String] Name of the container to create. + # + # @param expires_after [OpenAI::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. + # + # @param file_ids [Array] IDs of files to copy to the container. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::ContainerCreateResponse] + # + # @see OpenAI::Models::ContainerCreateParams + def create(params) + parsed, options = OpenAI::ContainerCreateParams.dump_request(params) + @client.request( + method: :post, + path: "containers", + body: parsed, + model: OpenAI::Models::ContainerCreateResponse, + options: options + ) + end + + # Retrieve Container + # + # @overload retrieve(container_id, request_options: {}) + # + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::ContainerRetrieveResponse] + # + # @see OpenAI::Models::ContainerRetrieveParams + def retrieve(container_id, params = {}) + @client.request( + method: :get, + path: ["containers/%1$s", container_id], + model: OpenAI::Models::ContainerRetrieveResponse, + options: params[:request_options] + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerListParams} for more details. + # + # List Containers + # + # @overload list(after: nil, limit: nil, order: nil, request_options: {}) + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::ContainerListParams + def list(params = {}) + parsed, options = OpenAI::ContainerListParams.dump_request(params) + @client.request( + method: :get, + path: "containers", + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::ContainerListResponse, + options: options + ) + end + + # Delete Container + # + # @overload delete(container_id, request_options: {}) + # + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::ContainerDeleteParams + def delete(container_id, params = {}) + @client.request( + method: :delete, + path: ["containers/%1$s", container_id], + model: NilClass, + options: params[:request_options] + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @files = OpenAI::Resources::Containers::Files.new(client: client) + end + end + end +end diff --git a/lib/openai/resources/containers/files.rb b/lib/openai/resources/containers/files.rb new file mode 100644 index 00000000..7e6fd334 --- /dev/null +++ b/lib/openai/resources/containers/files.rb @@ -0,0 +1,134 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Containers + class Files + # @return [OpenAI::Resources::Containers::Files::Content] + attr_reader :content + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileCreateParams} for more details. + # + # Create a Container File + # + # You can send either a multipart/form-data request with the raw file content, or + # a JSON request with a file ID. + # + # @overload create(container_id, file: nil, file_id: nil, request_options: {}) + # + # @param container_id [String] + # + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. + # + # @param file_id [String] Name of the file to create. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Containers::FileCreateResponse] + # + # @see OpenAI::Models::Containers::FileCreateParams + def create(container_id, params = {}) + parsed, options = OpenAI::Containers::FileCreateParams.dump_request(params) + @client.request( + method: :post, + path: ["containers/%1$s/files", container_id], + body: parsed, + model: OpenAI::Models::Containers::FileCreateResponse, + options: options + ) + end + + # Retrieve Container File + # + # @overload retrieve(file_id, container_id:, request_options: {}) + # + # @param file_id [String] + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Containers::FileRetrieveResponse] + # + # @see OpenAI::Models::Containers::FileRetrieveParams + def retrieve(file_id, params) + parsed, options = OpenAI::Containers::FileRetrieveParams.dump_request(params) + container_id = + parsed.delete(:container_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["containers/%1$s/files/%2$s", container_id, file_id], + model: OpenAI::Models::Containers::FileRetrieveResponse, + options: options + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileListParams} for more details. + # + # List Container files + # + # @overload list(container_id, after: nil, limit: nil, order: nil, request_options: {}) + # + # @param container_id [String] + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::Containers::FileListParams + def list(container_id, params = {}) + parsed, options = OpenAI::Containers::FileListParams.dump_request(params) + @client.request( + method: :get, + path: ["containers/%1$s/files", container_id], + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::Containers::FileListResponse, + options: options + ) + end + + # Delete Container File + # + # @overload delete(file_id, container_id:, request_options: {}) + # + # @param file_id [String] + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::Containers::FileDeleteParams + def delete(file_id, params) + parsed, options = OpenAI::Containers::FileDeleteParams.dump_request(params) + container_id = + parsed.delete(:container_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :delete, + path: ["containers/%1$s/files/%2$s", container_id, file_id], + model: NilClass, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @content = OpenAI::Resources::Containers::Files::Content.new(client: client) + end + end + end + end +end diff --git a/lib/openai/resources/containers/files/content.rb b/lib/openai/resources/containers/files/content.rb new file mode 100644 index 00000000..3d07b16e --- /dev/null +++ b/lib/openai/resources/containers/files/content.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Containers + class Files + class Content + # Retrieve Container File Content + # + # @overload retrieve(file_id, container_id:, request_options: {}) + # + # @param file_id [String] + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::Containers::Files::ContentRetrieveParams + def retrieve(file_id, params) + parsed, options = OpenAI::Containers::Files::ContentRetrieveParams.dump_request(params) + container_id = + parsed.delete(:container_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["containers/%1$s/files/%2$s/content", container_id, file_id], + model: NilClass, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end + end +end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 315deae0..68c6c276 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -210,6 +210,28 @@ def delete(response_id, params = {}) ) end + # Cancels a model response with the given ID. Only responses created with the + # `background` parameter set to `true` can be cancelled. + # [Learn more](https://platform.openai.com/docs/guides/background). + # + # @overload cancel(response_id, request_options: {}) + # + # @param response_id [String] The ID of the response to cancel. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::Responses::ResponseCancelParams + def cancel(response_id, params = {}) + @client.request( + method: :post, + path: ["responses/%1$s/cancel", response_id], + model: NilClass, + options: params[:request_options] + ) + end + # @api private # # @param client [OpenAI::Client] diff --git a/rbi/openai/client.rbi b/rbi/openai/client.rbi index b89f1995..fc1f9348 100644 --- a/rbi/openai/client.rbi +++ b/rbi/openai/client.rbi @@ -67,6 +67,9 @@ module OpenAI sig { returns(OpenAI::Resources::Evals) } attr_reader :evals + sig { returns(OpenAI::Resources::Containers) } + attr_reader :containers + # @api private sig { override.returns(T::Hash[String, String]) } private def auth_headers diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index f26af6b7..c4a20f0f 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -43,6 +43,16 @@ module OpenAI CompoundFilter = OpenAI::Models::CompoundFilter + ContainerCreateParams = OpenAI::Models::ContainerCreateParams + + ContainerDeleteParams = OpenAI::Models::ContainerDeleteParams + + ContainerListParams = OpenAI::Models::ContainerListParams + + ContainerRetrieveParams = OpenAI::Models::ContainerRetrieveParams + + Containers = OpenAI::Models::Containers + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse Embedding = OpenAI::Models::Embedding diff --git a/rbi/openai/models/container_create_params.rbi b/rbi/openai/models/container_create_params.rbi new file mode 100644 index 00000000..ad4342cb --- /dev/null +++ b/rbi/openai/models/container_create_params.rbi @@ -0,0 +1,145 @@ +# typed: strong + +module OpenAI + module Models + class ContainerCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerCreateParams, OpenAI::Internal::AnyHash) + end + + # Name of the container to create. + sig { returns(String) } + attr_accessor :name + + # Container expiration time in seconds relative to the 'anchor' time. + sig { returns(T.nilable(OpenAI::ContainerCreateParams::ExpiresAfter)) } + attr_reader :expires_after + + sig do + params( + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + # IDs of files to copy to the container. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :file_ids + + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids + + sig do + params( + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter::OrHash, + file_ids: T::Array[String], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # Name of the container to create. + name:, + # Container expiration time in seconds relative to the 'anchor' time. + expires_after: nil, + # IDs of files to copy to the container. + file_ids: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + file_ids: T::Array[String], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::ContainerCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + sig do + returns(OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::OrSymbol) + end + attr_accessor :anchor + + sig { returns(Integer) } + attr_accessor :minutes + + # Container expiration time in seconds relative to the 'anchor' time. + sig do + params( + anchor: + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + anchor:, + minutes: + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ContainerCreateParams::ExpiresAfter::Anchor) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/container_create_response.rbi b/rbi/openai/models/container_create_response.rbi new file mode 100644 index 00000000..4552eb5f --- /dev/null +++ b/rbi/openai/models/container_create_response.rbi @@ -0,0 +1,192 @@ +# typed: strong + +module OpenAI + module Models + class ContainerCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerCreateResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the container. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the container was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Name of the container. + sig { returns(String) } + attr_accessor :name + + # The type of this object. + sig { returns(String) } + attr_accessor :object + + # Status of the container (e.g., active, deleted). + sig { returns(String) } + attr_accessor :status + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + returns( + T.nilable(OpenAI::Models::ContainerCreateResponse::ExpiresAfter) + ) + end + attr_reader :expires_after + + sig do + params( + expires_after: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + sig do + params( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::OrHash + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the container. + id:, + # Unix timestamp (in seconds) when the container was created. + created_at:, + # Name of the container. + name:, + # The type of this object. + object:, + # Status of the container (e.g., active, deleted). + status:, + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + expires_after: nil + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # The reference point for the expiration. + sig do + returns( + T.nilable( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + ) + end + attr_reader :anchor + + sig do + params( + anchor: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::OrSymbol + ).void + end + attr_writer :anchor + + # The number of minutes after the anchor before the container expires. + sig { returns(T.nilable(Integer)) } + attr_reader :minutes + + sig { params(minutes: Integer).void } + attr_writer :minutes + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + params( + anchor: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # The reference point for the expiration. + anchor: nil, + # The number of minutes after the anchor before the container expires. + minutes: nil + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # The reference point for the expiration. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/container_delete_params.rbi b/rbi/openai/models/container_delete_params.rbi new file mode 100644 index 00000000..85d0c862 --- /dev/null +++ b/rbi/openai/models/container_delete_params.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Models + class ContainerDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerDeleteParams, OpenAI::Internal::AnyHash) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/container_list_params.rbi b/rbi/openai/models/container_list_params.rbi new file mode 100644 index 00000000..3f3ebd63 --- /dev/null +++ b/rbi/openai/models/container_list_params.rbi @@ -0,0 +1,99 @@ +# typed: strong + +module OpenAI + module Models + class ContainerListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerListParams, OpenAI::Internal::AnyHash) + end + + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + sig { returns(T.nilable(OpenAI::ContainerListParams::Order::OrSymbol)) } + attr_reader :order + + sig { params(order: OpenAI::ContainerListParams::Order::OrSymbol).void } + attr_writer :order + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::ContainerListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::ContainerListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ContainerListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ASC = T.let(:asc, OpenAI::ContainerListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::ContainerListParams::Order::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ContainerListParams::Order::TaggedSymbol] + ) + end + def self.values + end + end + end + end +end diff --git a/rbi/openai/models/container_list_response.rbi b/rbi/openai/models/container_list_response.rbi new file mode 100644 index 00000000..4431fa79 --- /dev/null +++ b/rbi/openai/models/container_list_response.rbi @@ -0,0 +1,190 @@ +# typed: strong + +module OpenAI + module Models + class ContainerListResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerListResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the container. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the container was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Name of the container. + sig { returns(String) } + attr_accessor :name + + # The type of this object. + sig { returns(String) } + attr_accessor :object + + # Status of the container (e.g., active, deleted). + sig { returns(String) } + attr_accessor :status + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + returns(T.nilable(OpenAI::Models::ContainerListResponse::ExpiresAfter)) + end + attr_reader :expires_after + + sig do + params( + expires_after: + OpenAI::Models::ContainerListResponse::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + sig do + params( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerListResponse::ExpiresAfter::OrHash + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the container. + id:, + # Unix timestamp (in seconds) when the container was created. + created_at:, + # Name of the container. + name:, + # The type of this object. + object:, + # Status of the container (e.g., active, deleted). + status:, + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + expires_after: nil + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerListResponse::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # The reference point for the expiration. + sig do + returns( + T.nilable( + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + ) + end + attr_reader :anchor + + sig do + params( + anchor: + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::OrSymbol + ).void + end + attr_writer :anchor + + # The number of minutes after the anchor before the container expires. + sig { returns(T.nilable(Integer)) } + attr_reader :minutes + + sig { params(minutes: Integer).void } + attr_writer :minutes + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + params( + anchor: + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # The reference point for the expiration. + anchor: nil, + # The number of minutes after the anchor before the container expires. + minutes: nil + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # The reference point for the expiration. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/container_retrieve_params.rbi b/rbi/openai/models/container_retrieve_params.rbi new file mode 100644 index 00000000..6c987580 --- /dev/null +++ b/rbi/openai/models/container_retrieve_params.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Models + class ContainerRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerRetrieveParams, OpenAI::Internal::AnyHash) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/container_retrieve_response.rbi b/rbi/openai/models/container_retrieve_response.rbi new file mode 100644 index 00000000..a81e2d6a --- /dev/null +++ b/rbi/openai/models/container_retrieve_response.rbi @@ -0,0 +1,193 @@ +# typed: strong + +module OpenAI + module Models + class ContainerRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the container. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the container was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Name of the container. + sig { returns(String) } + attr_accessor :name + + # The type of this object. + sig { returns(String) } + attr_accessor :object + + # Status of the container (e.g., active, deleted). + sig { returns(String) } + attr_accessor :status + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + returns( + T.nilable(OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter) + ) + end + attr_reader :expires_after + + sig do + params( + expires_after: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + sig do + params( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::OrHash + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the container. + id:, + # Unix timestamp (in seconds) when the container was created. + created_at:, + # Name of the container. + name:, + # The type of this object. + object:, + # Status of the container (e.g., active, deleted). + status:, + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + expires_after: nil + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # The reference point for the expiration. + sig do + returns( + T.nilable( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + ) + end + attr_reader :anchor + + sig do + params( + anchor: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::OrSymbol + ).void + end + attr_writer :anchor + + # The number of minutes after the anchor before the container expires. + sig { returns(T.nilable(Integer)) } + attr_reader :minutes + + sig { params(minutes: Integer).void } + attr_writer :minutes + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + params( + anchor: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # The reference point for the expiration. + anchor: nil, + # The number of minutes after the anchor before the container expires. + minutes: nil + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # The reference point for the expiration. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_create_params.rbi b/rbi/openai/models/containers/file_create_params.rbi new file mode 100644 index 00000000..4d752f50 --- /dev/null +++ b/rbi/openai/models/containers/file_create_params.rbi @@ -0,0 +1,62 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::FileCreateParams, + OpenAI::Internal::AnyHash + ) + end + + # The File object (not file name) to be uploaded. + sig { returns(T.nilable(OpenAI::Internal::FileInput)) } + attr_reader :file + + sig { params(file: OpenAI::Internal::FileInput).void } + attr_writer :file + + # Name of the file to create. + sig { returns(T.nilable(String)) } + attr_reader :file_id + + sig { params(file_id: String).void } + attr_writer :file_id + + sig do + params( + file: OpenAI::Internal::FileInput, + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # The File object (not file name) to be uploaded. + file: nil, + # Name of the file to create. + file_id: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + file: OpenAI::Internal::FileInput, + file_id: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_create_response.rbi b/rbi/openai/models/containers/file_create_response.rbi new file mode 100644 index 00000000..c39f81c6 --- /dev/null +++ b/rbi/openai/models/containers/file_create_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Containers::FileCreateResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the file. + sig { returns(String) } + attr_accessor :id + + # Size of the file in bytes. + sig { returns(Integer) } + attr_accessor :bytes + + # The container this file belongs to. + sig { returns(String) } + attr_accessor :container_id + + # Unix timestamp (in seconds) when the file was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The type of this object (`container.file`). + sig { returns(Symbol) } + attr_accessor :object + + # Path of the file in the container. + sig { returns(String) } + attr_accessor :path + + # Source of the file (e.g., `user`, `assistant`). + sig { returns(String) } + attr_accessor :source + + sig do + params( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the file. + id:, + # Size of the file in bytes. + bytes:, + # The container this file belongs to. + container_id:, + # Unix timestamp (in seconds) when the file was created. + created_at:, + # Path of the file in the container. + path:, + # Source of the file (e.g., `user`, `assistant`). + source:, + # The type of this object (`container.file`). + object: :"container.file" + ) + end + + sig do + override.returns( + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_delete_params.rbi b/rbi/openai/models/containers/file_delete_params.rbi new file mode 100644 index 00000000..42830150 --- /dev/null +++ b/rbi/openai/models/containers/file_delete_params.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::FileDeleteParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :container_id + + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(container_id:, request_options: {}) + end + + sig do + override.returns( + { container_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_list_params.rbi b/rbi/openai/models/containers/file_list_params.rbi new file mode 100644 index 00000000..f62afa88 --- /dev/null +++ b/rbi/openai/models/containers/file_list_params.rbi @@ -0,0 +1,116 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::Containers::FileListParams, OpenAI::Internal::AnyHash) + end + + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + sig do + returns( + T.nilable(OpenAI::Containers::FileListParams::Order::OrSymbol) + ) + end + attr_reader :order + + sig do + params( + order: OpenAI::Containers::FileListParams::Order::OrSymbol + ).void + end + attr_writer :order + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::Containers::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::Containers::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Containers::FileListParams::Order) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ASC = + T.let(:asc, OpenAI::Containers::FileListParams::Order::TaggedSymbol) + DESC = + T.let( + :desc, + OpenAI::Containers::FileListParams::Order::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::Containers::FileListParams::Order::TaggedSymbol] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_list_response.rbi b/rbi/openai/models/containers/file_list_response.rbi new file mode 100644 index 00000000..f33ad693 --- /dev/null +++ b/rbi/openai/models/containers/file_list_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileListResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Containers::FileListResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the file. + sig { returns(String) } + attr_accessor :id + + # Size of the file in bytes. + sig { returns(Integer) } + attr_accessor :bytes + + # The container this file belongs to. + sig { returns(String) } + attr_accessor :container_id + + # Unix timestamp (in seconds) when the file was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The type of this object (`container.file`). + sig { returns(Symbol) } + attr_accessor :object + + # Path of the file in the container. + sig { returns(String) } + attr_accessor :path + + # Source of the file (e.g., `user`, `assistant`). + sig { returns(String) } + attr_accessor :source + + sig do + params( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the file. + id:, + # Size of the file in bytes. + bytes:, + # The container this file belongs to. + container_id:, + # Unix timestamp (in seconds) when the file was created. + created_at:, + # Path of the file in the container. + path:, + # Source of the file (e.g., `user`, `assistant`). + source:, + # The type of this object (`container.file`). + object: :"container.file" + ) + end + + sig do + override.returns( + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_retrieve_params.rbi b/rbi/openai/models/containers/file_retrieve_params.rbi new file mode 100644 index 00000000..0e9bfd6e --- /dev/null +++ b/rbi/openai/models/containers/file_retrieve_params.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::FileRetrieveParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :container_id + + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(container_id:, request_options: {}) + end + + sig do + override.returns( + { container_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_retrieve_response.rbi b/rbi/openai/models/containers/file_retrieve_response.rbi new file mode 100644 index 00000000..eea83ee4 --- /dev/null +++ b/rbi/openai/models/containers/file_retrieve_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Containers::FileRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the file. + sig { returns(String) } + attr_accessor :id + + # Size of the file in bytes. + sig { returns(Integer) } + attr_accessor :bytes + + # The container this file belongs to. + sig { returns(String) } + attr_accessor :container_id + + # Unix timestamp (in seconds) when the file was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The type of this object (`container.file`). + sig { returns(Symbol) } + attr_accessor :object + + # Path of the file in the container. + sig { returns(String) } + attr_accessor :path + + # Source of the file (e.g., `user`, `assistant`). + sig { returns(String) } + attr_accessor :source + + sig do + params( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the file. + id:, + # Size of the file in bytes. + bytes:, + # The container this file belongs to. + container_id:, + # Unix timestamp (in seconds) when the file was created. + created_at:, + # Path of the file in the container. + path:, + # Source of the file (e.g., `user`, `assistant`). + source:, + # The type of this object (`container.file`). + object: :"container.file" + ) + end + + sig do + override.returns( + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/files/content_retrieve_params.rbi b/rbi/openai/models/containers/files/content_retrieve_params.rbi new file mode 100644 index 00000000..76a5f6a1 --- /dev/null +++ b/rbi/openai/models/containers/files/content_retrieve_params.rbi @@ -0,0 +1,42 @@ +# typed: strong + +module OpenAI + module Models + module Containers + module Files + class ContentRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::Files::ContentRetrieveParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :container_id + + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(container_id:, request_options: {}) + end + + sig do + override.returns( + { container_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_audio_delta_event.rbi b/rbi/openai/models/responses/response_audio_delta_event.rbi index 87c6eff6..fbc13de2 100644 --- a/rbi/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_delta_event.rbi @@ -16,21 +16,35 @@ module OpenAI sig { returns(String) } attr_accessor :delta + # A sequence number for this chunk of the stream response. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.delta`. sig { returns(Symbol) } attr_accessor :type # Emitted when there is a partial audio response. - sig { params(delta: String, type: Symbol).returns(T.attached_class) } + sig do + params(delta: String, sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A chunk of Base64 encoded response audio bytes. delta:, + # A sequence number for this chunk of the stream response. + sequence_number:, # The type of the event. Always `response.audio.delta`. type: :"response.audio.delta" ) end - sig { override.returns({ delta: String, type: Symbol }) } + sig do + override.returns( + { delta: String, sequence_number: Integer, type: Symbol } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_audio_done_event.rbi b/rbi/openai/models/responses/response_audio_done_event.rbi index d9a86980..20be4298 100644 --- a/rbi/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_done_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of the delta. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } attr_accessor :type # Emitted when the audio response is complete. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of the delta. + sequence_number:, # The type of the event. Always `response.audio.done`. type: :"response.audio.done" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi index 74e69b9b..ef75b6be 100644 --- a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -16,21 +16,35 @@ module OpenAI sig { returns(String) } attr_accessor :delta + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.transcript.delta`. sig { returns(Symbol) } attr_accessor :type # Emitted when there is a partial transcript of audio. - sig { params(delta: String, type: Symbol).returns(T.attached_class) } + sig do + params(delta: String, sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The partial transcript of the audio response. delta:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.audio.transcript.delta`. type: :"response.audio.transcript.delta" ) end - sig { override.returns({ delta: String, type: Symbol }) } + sig do + override.returns( + { delta: String, sequence_number: Integer, type: Symbol } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi index d0d670a2..f6a29e91 100644 --- a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } attr_accessor :type # Emitted when the full audio transcript is completed. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.audio.transcript.done`. type: :"response.audio.transcript.done" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_cancel_params.rbi b/rbi/openai/models/responses/response_cancel_params.rbi new file mode 100644 index 00000000..296eaaab --- /dev/null +++ b/rbi/openai/models/responses/response_cancel_params.rbi @@ -0,0 +1,32 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCancelParams, + OpenAI::Internal::AnyHash + ) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 220e24ca..a26838ef 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.code.delta`. sig { returns(Symbol) } attr_accessor :type # Emitted when a partial code snippet is added by the code interpreter. sig do - params(delta: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + delta: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The partial code snippet added by the code interpreter. delta:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.code.delta`. type: :"response.code_interpreter_call.code.delta" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { delta: String, output_index: Integer, type: Symbol } + { + delta: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 43a90d5f..fdd8c46e 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.code.done`. sig { returns(Symbol) } attr_accessor :type # Emitted when code snippet output is finalized by the code interpreter. sig do - params(code: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + code: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The final code snippet output by the code interpreter. code:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.code.done`. type: :"response.code_interpreter_call.code.done" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { code: String, output_index: Integer, type: Symbol } + { + code: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi index be772f72..5e40e50d 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.completed`. sig { returns(Symbol) } attr_accessor :type @@ -38,6 +42,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -46,6 +51,8 @@ module OpenAI code_interpreter_call:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.completed`. type: :"response.code_interpreter_call.completed" ) @@ -57,6 +64,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 578fdb15..bf880f76 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.in_progress`. sig { returns(Symbol) } attr_accessor :type @@ -38,6 +42,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -46,6 +51,8 @@ module OpenAI code_interpreter_call:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.in_progress`. type: :"response.code_interpreter_call.in_progress" ) @@ -57,6 +64,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index bdf1313a..eb135de8 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.interpreting`. sig { returns(Symbol) } attr_accessor :type @@ -38,6 +42,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -46,6 +51,8 @@ module OpenAI code_interpreter_call:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.interpreting`. type: :"response.code_interpreter_call.interpreting" ) @@ -57,6 +64,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_completed_event.rbi b/rbi/openai/models/responses/response_completed_event.rbi index 18dd991a..a583bacf 100644 --- a/rbi/openai/models/responses/response_completed_event.rbi +++ b/rbi/openai/models/responses/response_completed_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.completed`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # Properties of the completed response. response:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always `response.completed`. type: :"response.completed" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_content_part_added_event.rbi b/rbi/openai/models/responses/response_content_part_added_event.rbi index 09a3043f..82ef446c 100644 --- a/rbi/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/openai/models/responses/response_content_part_added_event.rbi @@ -32,6 +32,10 @@ module OpenAI end attr_accessor :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.content_part.added`. sig { returns(Symbol) } attr_accessor :type @@ -47,6 +51,7 @@ module OpenAI OpenAI::Responses::ResponseOutputText::OrHash, OpenAI::Responses::ResponseOutputRefusal::OrHash ), + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -59,6 +64,8 @@ module OpenAI output_index:, # The content part that was added. part:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.content_part.added`. type: :"response.content_part.added" ) @@ -72,6 +79,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseContentPartAddedEvent::Part::Variants, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_content_part_done_event.rbi b/rbi/openai/models/responses/response_content_part_done_event.rbi index f0fce322..51be81c7 100644 --- a/rbi/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/openai/models/responses/response_content_part_done_event.rbi @@ -32,6 +32,10 @@ module OpenAI end attr_accessor :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.content_part.done`. sig { returns(Symbol) } attr_accessor :type @@ -47,6 +51,7 @@ module OpenAI OpenAI::Responses::ResponseOutputText::OrHash, OpenAI::Responses::ResponseOutputRefusal::OrHash ), + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -59,6 +64,8 @@ module OpenAI output_index:, # The content part that is done. part:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.content_part.done`. type: :"response.content_part.done" ) @@ -72,6 +79,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseContentPartDoneEvent::Part::Variants, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_created_event.rbi b/rbi/openai/models/responses/response_created_event.rbi index ab66dcdd..ce2a5cff 100644 --- a/rbi/openai/models/responses/response_created_event.rbi +++ b/rbi/openai/models/responses/response_created_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.created`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that was created. response:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always `response.created`. type: :"response.created" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_error_event.rbi b/rbi/openai/models/responses/response_error_event.rbi index 5c93890e..b651b4d8 100644 --- a/rbi/openai/models/responses/response_error_event.rbi +++ b/rbi/openai/models/responses/response_error_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :param + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `error`. sig { returns(Symbol) } attr_accessor :type @@ -34,6 +38,7 @@ module OpenAI code: T.nilable(String), message: String, param: T.nilable(String), + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -44,6 +49,8 @@ module OpenAI message:, # The error parameter. param:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `error`. type: :error ) @@ -55,6 +62,7 @@ module OpenAI code: T.nilable(String), message: String, param: T.nilable(String), + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_failed_event.rbi b/rbi/openai/models/responses/response_failed_event.rbi index 8f8f2c19..8ac778a5 100644 --- a/rbi/openai/models/responses/response_failed_event.rbi +++ b/rbi/openai/models/responses/response_failed_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.failed`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that failed. response:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.failed`. type: :"response.failed" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi index 3b6a2331..dec63c3d 100644 --- a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.file_search_call.completed`. sig { returns(Symbol) } attr_accessor :type # Emitted when a file search call is completed (results found). sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the output item that the file search call is initiated. item_id:, # The index of the output item that the file search call is initiated. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.file_search_call.completed`. type: :"response.file_search_call.completed" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi index d93e58f0..e77a9b13 100644 --- a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.file_search_call.in_progress`. sig { returns(Symbol) } attr_accessor :type # Emitted when a file search call is initiated. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the output item that the file search call is initiated. item_id:, # The index of the output item that the file search call is initiated. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.file_search_call.in_progress`. type: :"response.file_search_call.in_progress" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi index 6fd13a21..5a9be332 100644 --- a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.file_search_call.searching`. sig { returns(Symbol) } attr_accessor :type # Emitted when a file search is currently searching. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the output item that the file search call is initiated. item_id:, # The index of the output item that the file search call is searching. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.file_search_call.searching`. type: :"response.file_search_call.searching" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi index 3e24f3b5..eb59905d 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.function_call_arguments.delta`. sig { returns(Symbol) } attr_accessor :type @@ -34,6 +38,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -44,6 +49,8 @@ module OpenAI item_id:, # The index of the output item that the function-call arguments delta is added to. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.function_call_arguments.delta`. type: :"response.function_call_arguments.delta" ) @@ -55,6 +62,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi index 5ec5cb31..dac56326 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + sig { returns(Symbol) } attr_accessor :type @@ -33,6 +37,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -43,6 +48,8 @@ module OpenAI item_id:, # The index of the output item. output_index:, + # The sequence number of this event. + sequence_number:, type: :"response.function_call_arguments.done" ) end @@ -53,6 +60,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi b/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi index b6891594..176049a4 100644 --- a/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi @@ -20,6 +20,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.image_generation_call.completed'. sig { returns(Symbol) } attr_accessor :type @@ -27,15 +31,20 @@ module OpenAI # Emitted when an image generation tool call has completed and the final image is # available. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The unique identifier of the image generation item being processed. item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.image_generation_call.completed'. type: :"response.image_generation_call.completed" ) @@ -43,7 +52,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi b/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi index 66bad12d..ece2928d 100644 --- a/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi +++ b/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi @@ -20,17 +20,14 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the image generation item being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.image_generation_call.generating'. sig { returns(Symbol) } attr_accessor :type - # The sequence number of the image generation item being processed. - sig { returns(T.nilable(Integer)) } - attr_reader :sequence_number - - sig { params(sequence_number: Integer).void } - attr_writer :sequence_number - # Emitted when an image generation tool call is actively generating an image # (intermediate state). sig do @@ -47,7 +44,7 @@ module OpenAI # The index of the output item in the response's output array. output_index:, # The sequence number of the image generation item being processed. - sequence_number: nil, + sequence_number:, # The type of the event. Always 'response.image_generation_call.generating'. type: :"response.image_generation_call.generating" ) @@ -58,8 +55,8 @@ module OpenAI { item_id: String, output_index: Integer, - type: Symbol, - sequence_number: Integer + sequence_number: Integer, + type: Symbol } ) end diff --git a/rbi/openai/models/responses/response_in_progress_event.rbi b/rbi/openai/models/responses/response_in_progress_event.rbi index 3bb0ccb9..c3dd5e40 100644 --- a/rbi/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_in_progress_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.in_progress`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that is in progress. response:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.in_progress`. type: :"response.in_progress" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_incomplete_event.rbi b/rbi/openai/models/responses/response_incomplete_event.rbi index a30d27a8..591c46e8 100644 --- a/rbi/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/openai/models/responses/response_incomplete_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.incomplete`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that was incomplete. response:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.incomplete`. type: :"response.incomplete" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi index 543fa935..be6f8e3e 100644 --- a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_call.arguments_delta'. sig { returns(Symbol) } attr_accessor :type @@ -35,6 +39,7 @@ module OpenAI delta: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -45,6 +50,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_call.arguments_delta'. type: :"response.mcp_call.arguments_delta" ) @@ -56,6 +63,7 @@ module OpenAI delta: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi index 30a6a9dc..df22b5d2 100644 --- a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_call.arguments_done'. sig { returns(Symbol) } attr_accessor :type @@ -34,6 +38,7 @@ module OpenAI arguments: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -44,6 +49,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_call.arguments_done'. type: :"response.mcp_call.arguments_done" ) @@ -55,6 +62,7 @@ module OpenAI arguments: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_mcp_call_completed_event.rbi b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi index ca8572f4..4ba445d9 100644 --- a/rbi/openai/models/responses/response_mcp_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_call.completed'. sig { returns(Symbol) } attr_accessor :type # Emitted when an MCP tool call has completed successfully. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_call.completed'. type: :"response.mcp_call.completed" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_call_failed_event.rbi b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi index 58153d24..81d64fff 100644 --- a/rbi/openai/models/responses/response_mcp_call_failed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_call.failed'. sig { returns(Symbol) } attr_accessor :type # Emitted when an MCP tool call has failed. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_call.failed'. type: :"response.mcp_call.failed" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi index dc3261b2..ebe6399f 100644 --- a/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_call.in_progress'. sig { returns(Symbol) } attr_accessor :type # Emitted when an MCP tool call is in progress. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The unique identifier of the MCP tool call item being processed. item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_call.in_progress'. type: :"response.mcp_call.in_progress" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi index 5fdc05fd..619af81d 100644 --- a/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_list_tools.completed'. sig { returns(Symbol) } attr_accessor :type # Emitted when the list of available MCP tools has been successfully retrieved. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_list_tools.completed'. type: :"response.mcp_list_tools.completed" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi index ab9f7270..5ac00403 100644 --- a/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_list_tools.failed'. sig { returns(Symbol) } attr_accessor :type # Emitted when the attempt to list available MCP tools has failed. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_list_tools.failed'. type: :"response.mcp_list_tools.failed" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi index c96ab479..7bbcbda2 100644 --- a/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi @@ -12,20 +12,30 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.mcp_list_tools.in_progress'. sig { returns(Symbol) } attr_accessor :type # Emitted when the system is in the process of retrieving the list of available # MCP tools. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.mcp_list_tools.in_progress'. type: :"response.mcp_list_tools.in_progress" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index d507d0a1..6479c80d 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -20,6 +20,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_item.added`. sig { returns(Symbol) } attr_accessor :type @@ -43,6 +47,7 @@ module OpenAI OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ), output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item:, # The index of the output item that was added. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.output_item.added`. type: :"response.output_item.added" ) @@ -61,6 +68,7 @@ module OpenAI { item: OpenAI::Responses::ResponseOutputItem::Variants, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index 8c21edf4..7789f951 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -20,6 +20,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_item.done`. sig { returns(Symbol) } attr_accessor :type @@ -43,6 +47,7 @@ module OpenAI OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ), output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item:, # The index of the output item that was marked done. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.output_item.done`. type: :"response.output_item.done" ) @@ -61,6 +68,7 @@ module OpenAI { item: OpenAI::Responses::ResponseOutputItem::Variants, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi index 3cd7a5db..422e56dc 100644 --- a/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi +++ b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi @@ -32,6 +32,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.output_text_annotation.added'. sig { returns(Symbol) } attr_accessor :type @@ -44,6 +48,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -58,6 +63,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.output_text_annotation.added'. type: :"response.output_text_annotation.added" ) @@ -71,6 +78,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_queued_event.rbi b/rbi/openai/models/responses/response_queued_event.rbi index 066f73a9..015a597c 100644 --- a/rbi/openai/models/responses/response_queued_event.rbi +++ b/rbi/openai/models/responses/response_queued_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.queued'. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The full response object that is queued. response:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always 'response.queued'. type: :"response.queued" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_reasoning_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_delta_event.rbi index 5de816d0..37fc9e63 100644 --- a/rbi/openai/models/responses/response_reasoning_delta_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_delta_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always 'response.reasoning.delta'. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI delta: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always 'response.reasoning.delta'. type: :"response.reasoning.delta" ) @@ -63,6 +70,7 @@ module OpenAI delta: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_reasoning_done_event.rbi b/rbi/openai/models/responses/response_reasoning_done_event.rbi index 17d3272d..05c8b893 100644 --- a/rbi/openai/models/responses/response_reasoning_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_done_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The finalized reasoning text. sig { returns(String) } attr_accessor :text @@ -38,6 +42,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: Symbol ).returns(T.attached_class) @@ -49,6 +54,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The finalized reasoning text. text:, # The type of the event. Always 'response.reasoning.done'. @@ -62,6 +69,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi index 9f7deac4..c92fd014 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi @@ -25,6 +25,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the output item. sig { returns(Integer) } attr_accessor :summary_index @@ -39,6 +43,7 @@ module OpenAI delta: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -51,6 +56,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the output item. summary_index:, # The type of the event. Always 'response.reasoning_summary.delta'. @@ -64,6 +71,7 @@ module OpenAI delta: T.anything, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi index 24bebaed..eead2395 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi @@ -20,6 +20,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the output item. sig { returns(Integer) } attr_accessor :summary_index @@ -37,6 +41,7 @@ module OpenAI params( item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: Symbol @@ -47,6 +52,8 @@ module OpenAI item_id:, # The index of the output item in the response's output array. output_index:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the output item. summary_index:, # The finalized reasoning summary text. @@ -61,6 +68,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: Symbol diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi index 7e0103dc..400eaae7 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi @@ -36,6 +36,10 @@ module OpenAI end attr_writer :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -51,6 +55,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part::OrHash, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -62,6 +67,8 @@ module OpenAI output_index:, # The summary part that was added. part:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The type of the event. Always `response.reasoning_summary_part.added`. @@ -76,6 +83,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi index ecf52172..40caaff1 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi @@ -36,6 +36,10 @@ module OpenAI end attr_writer :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -51,6 +55,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part::OrHash, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -62,6 +67,8 @@ module OpenAI output_index:, # The completed summary part. part:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The type of the event. Always `response.reasoning_summary_part.done`. @@ -76,6 +83,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi index 0367f592..f4af0148 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -38,6 +42,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -49,6 +54,8 @@ module OpenAI item_id:, # The index of the output item this summary text delta is associated with. output_index:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The type of the event. Always `response.reasoning_summary_text.delta`. @@ -62,6 +69,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi index 89590e6b..95ab837f 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi @@ -20,6 +20,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -37,6 +41,7 @@ module OpenAI params( item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: Symbol @@ -47,6 +52,8 @@ module OpenAI item_id:, # The index of the output item this summary text is associated with. output_index:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The full text of the completed reasoning summary. @@ -61,6 +68,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: Symbol diff --git a/rbi/openai/models/responses/response_refusal_delta_event.rbi b/rbi/openai/models/responses/response_refusal_delta_event.rbi index cde0e1ec..34271558 100644 --- a/rbi/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/openai/models/responses/response_refusal_delta_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.refusal.delta`. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item_id:, # The index of the output item that the refusal text is added to. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.refusal.delta`. type: :"response.refusal.delta" ) @@ -63,6 +70,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_refusal_done_event.rbi b/rbi/openai/models/responses/response_refusal_done_event.rbi index db039d4f..3f7a62e8 100644 --- a/rbi/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/openai/models/responses/response_refusal_done_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(String) } attr_accessor :refusal + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.refusal.done`. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI output_index:, # The refusal text that is finalized. refusal:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.refusal.done`. type: :"response.refusal.done" ) @@ -63,6 +70,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi index fa115d06..53babd9a 100644 --- a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi @@ -36,6 +36,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_text.annotation.added`. sig { returns(Symbol) } attr_accessor :type @@ -53,6 +57,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -67,6 +72,8 @@ module OpenAI item_id:, # The index of the output item that the text annotation was added to. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.output_text.annotation.added`. type: :"response.output_text.annotation.added" ) @@ -81,6 +88,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_text_delta_event.rbi b/rbi/openai/models/responses/response_text_delta_event.rbi index 661b88d1..ba989ab5 100644 --- a/rbi/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_delta_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_text.delta`. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item_id:, # The index of the output item that the text delta was added to. output_index:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always `response.output_text.delta`. type: :"response.output_text.delta" ) @@ -63,6 +70,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_text_done_event.rbi b/rbi/openai/models/responses/response_text_done_event.rbi index 2b142d10..aa712038 100644 --- a/rbi/openai/models/responses/response_text_done_event.rbi +++ b/rbi/openai/models/responses/response_text_done_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The text content that is finalized. sig { returns(String) } attr_accessor :text @@ -38,6 +42,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: Symbol ).returns(T.attached_class) @@ -49,6 +54,8 @@ module OpenAI item_id:, # The index of the output item that the text content is finalized. output_index:, + # The sequence number for this event. + sequence_number:, # The text content that is finalized. text:, # The type of the event. Always `response.output_text.done`. @@ -62,6 +69,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: Symbol } diff --git a/rbi/openai/resources/containers.rbi b/rbi/openai/resources/containers.rbi new file mode 100644 index 00000000..2ca5a9df --- /dev/null +++ b/rbi/openai/resources/containers.rbi @@ -0,0 +1,82 @@ +# typed: strong + +module OpenAI + module Resources + class Containers + sig { returns(OpenAI::Resources::Containers::Files) } + attr_reader :files + + # Create Container + sig do + params( + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter::OrHash, + file_ids: T::Array[String], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::ContainerCreateResponse) + end + def create( + # Name of the container to create. + name:, + # Container expiration time in seconds relative to the 'anchor' time. + expires_after: nil, + # IDs of files to copy to the container. + file_ids: nil, + request_options: {} + ) + end + + # Retrieve Container + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::ContainerRetrieveResponse) + end + def retrieve(container_id, request_options: {}) + end + + # List Containers + sig do + params( + after: String, + limit: Integer, + order: OpenAI::ContainerListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::Models::ContainerListResponse] + ) + end + def list( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + # Delete Container + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def delete(container_id, request_options: {}) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end +end diff --git a/rbi/openai/resources/containers/files.rbi b/rbi/openai/resources/containers/files.rbi new file mode 100644 index 00000000..3f49be09 --- /dev/null +++ b/rbi/openai/resources/containers/files.rbi @@ -0,0 +1,92 @@ +# typed: strong + +module OpenAI + module Resources + class Containers + class Files + sig { returns(OpenAI::Resources::Containers::Files::Content) } + attr_reader :content + + # Create a Container File + # + # You can send either a multipart/form-data request with the raw file content, or + # a JSON request with a file ID. + sig do + params( + container_id: String, + file: OpenAI::Internal::FileInput, + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Containers::FileCreateResponse) + end + def create( + container_id, + # The File object (not file name) to be uploaded. + file: nil, + # Name of the file to create. + file_id: nil, + request_options: {} + ) + end + + # Retrieve Container File + sig do + params( + file_id: String, + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Containers::FileRetrieveResponse) + end + def retrieve(file_id, container_id:, request_options: {}) + end + + # List Container files + sig do + params( + container_id: String, + after: String, + limit: Integer, + order: OpenAI::Containers::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + OpenAI::Models::Containers::FileListResponse + ] + ) + end + def list( + container_id, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + # Delete Container File + sig do + params( + file_id: String, + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def delete(file_id, container_id:, request_options: {}) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end +end diff --git a/rbi/openai/resources/containers/files/content.rbi b/rbi/openai/resources/containers/files/content.rbi new file mode 100644 index 00000000..a299fe68 --- /dev/null +++ b/rbi/openai/resources/containers/files/content.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Resources + class Containers + class Files + class Content + # Retrieve Container File Content + sig do + params( + file_id: String, + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def retrieve(file_id, container_id:, request_options: {}) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end + end +end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index e5f71338..e4f15ce4 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -456,6 +456,22 @@ module OpenAI ) end + # Cancels a model response with the given ID. Only responses created with the + # `background` parameter set to `true` can be cancelled. + # [Learn more](https://platform.openai.com/docs/guides/background). + sig do + params( + response_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def cancel( + # The ID of the response to cancel. + response_id, + request_options: {} + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 929bc894..2b085363 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -46,6 +46,8 @@ module OpenAI attr_reader evals: OpenAI::Resources::Evals + attr_reader containers: OpenAI::Resources::Containers + private def auth_headers: -> ::Hash[String, String] def initialize: ( diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index f7ef937f..3d40bce0 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -41,6 +41,16 @@ module OpenAI class CompoundFilter = OpenAI::Models::CompoundFilter + class ContainerCreateParams = OpenAI::Models::ContainerCreateParams + + class ContainerDeleteParams = OpenAI::Models::ContainerDeleteParams + + class ContainerListParams = OpenAI::Models::ContainerListParams + + class ContainerRetrieveParams = OpenAI::Models::ContainerRetrieveParams + + module Containers = OpenAI::Models::Containers + class CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse class Embedding = OpenAI::Models::Embedding diff --git a/sig/openai/models/container_create_params.rbs b/sig/openai/models/container_create_params.rbs new file mode 100644 index 00000000..b7a1c68f --- /dev/null +++ b/sig/openai/models/container_create_params.rbs @@ -0,0 +1,62 @@ +module OpenAI + module Models + type container_create_params = + { + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + file_ids: ::Array[String] + } + & OpenAI::Internal::Type::request_parameters + + class ContainerCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor name: String + + attr_reader expires_after: OpenAI::ContainerCreateParams::ExpiresAfter? + + def expires_after=: ( + OpenAI::ContainerCreateParams::ExpiresAfter + ) -> OpenAI::ContainerCreateParams::ExpiresAfter + + attr_reader file_ids: ::Array[String]? + + def file_ids=: (::Array[String]) -> ::Array[String] + + def initialize: ( + name: String, + ?expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?request_options: OpenAI::request_opts + ) -> void + + type expires_after = + { + anchor: OpenAI::ContainerCreateParams::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: OpenAI::ContainerCreateParams::ExpiresAfter::anchor + + attr_accessor minutes: Integer + + def initialize: ( + anchor: OpenAI::ContainerCreateParams::ExpiresAfter::anchor, + minutes: Integer + ) -> void + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::ContainerCreateParams::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/container_create_response.rbs b/sig/openai/models/container_create_response.rbs new file mode 100644 index 00000000..262d50cf --- /dev/null +++ b/sig/openai/models/container_create_response.rbs @@ -0,0 +1,73 @@ +module OpenAI + module Models + type container_create_response = + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + } + + class ContainerCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor name: String + + attr_accessor object: String + + attr_accessor status: String + + attr_reader expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter? + + def expires_after=: ( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter + ) -> OpenAI::Models::ContainerCreateResponse::ExpiresAfter + + def initialize: ( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + ?expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + ) -> void + + type expires_after = + { + anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_reader anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor? + + def anchor=: ( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor + ) -> OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor + + attr_reader minutes: Integer? + + def minutes=: (Integer) -> Integer + + def initialize: ( + ?anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, + ?minutes: Integer + ) -> void + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/container_delete_params.rbs b/sig/openai/models/container_delete_params.rbs new file mode 100644 index 00000000..abb4eb46 --- /dev/null +++ b/sig/openai/models/container_delete_params.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Models + type container_delete_params = + { } & OpenAI::Internal::Type::request_parameters + + class ContainerDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + end + end +end diff --git a/sig/openai/models/container_list_params.rbs b/sig/openai/models/container_list_params.rbs new file mode 100644 index 00000000..76c90fcc --- /dev/null +++ b/sig/openai/models/container_list_params.rbs @@ -0,0 +1,48 @@ +module OpenAI + module Models + type container_list_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::ContainerListParams::order + } + & OpenAI::Internal::Type::request_parameters + + class ContainerListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::ContainerListParams::order? + + def order=: ( + OpenAI::Models::ContainerListParams::order + ) -> OpenAI::Models::ContainerListParams::order + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::ContainerListParams::order, + ?request_options: OpenAI::request_opts + ) -> void + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::ContainerListParams::order] + end + end + end +end diff --git a/sig/openai/models/container_list_response.rbs b/sig/openai/models/container_list_response.rbs new file mode 100644 index 00000000..45796baa --- /dev/null +++ b/sig/openai/models/container_list_response.rbs @@ -0,0 +1,73 @@ +module OpenAI + module Models + type container_list_response = + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + } + + class ContainerListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor name: String + + attr_accessor object: String + + attr_accessor status: String + + attr_reader expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter? + + def expires_after=: ( + OpenAI::Models::ContainerListResponse::ExpiresAfter + ) -> OpenAI::Models::ContainerListResponse::ExpiresAfter + + def initialize: ( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + ?expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + ) -> void + + type expires_after = + { + anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_reader anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor? + + def anchor=: ( + OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor + ) -> OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor + + attr_reader minutes: Integer? + + def minutes=: (Integer) -> Integer + + def initialize: ( + ?anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, + ?minutes: Integer + ) -> void + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/container_retrieve_params.rbs b/sig/openai/models/container_retrieve_params.rbs new file mode 100644 index 00000000..c7a46fc4 --- /dev/null +++ b/sig/openai/models/container_retrieve_params.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Models + type container_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters + + class ContainerRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + end + end +end diff --git a/sig/openai/models/container_retrieve_response.rbs b/sig/openai/models/container_retrieve_response.rbs new file mode 100644 index 00000000..0325b595 --- /dev/null +++ b/sig/openai/models/container_retrieve_response.rbs @@ -0,0 +1,73 @@ +module OpenAI + module Models + type container_retrieve_response = + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + } + + class ContainerRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor name: String + + attr_accessor object: String + + attr_accessor status: String + + attr_reader expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter? + + def expires_after=: ( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + ) -> OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + + def initialize: ( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + ?expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + ) -> void + + type expires_after = + { + anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_reader anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor? + + def anchor=: ( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor + ) -> OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor + + attr_reader minutes: Integer? + + def minutes=: (Integer) -> Integer + + def initialize: ( + ?anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, + ?minutes: Integer + ) -> void + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/containers/file_create_params.rbs b/sig/openai/models/containers/file_create_params.rbs new file mode 100644 index 00000000..dc5dc476 --- /dev/null +++ b/sig/openai/models/containers/file_create_params.rbs @@ -0,0 +1,30 @@ +module OpenAI + module Models + module Containers + type file_create_params = + { file: OpenAI::Internal::file_input, file_id: String } + & OpenAI::Internal::Type::request_parameters + + class FileCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader file: OpenAI::Internal::file_input? + + def file=: ( + OpenAI::Internal::file_input + ) -> OpenAI::Internal::file_input + + attr_reader file_id: String? + + def file_id=: (String) -> String + + def initialize: ( + ?file: OpenAI::Internal::file_input, + ?file_id: String, + ?request_options: OpenAI::request_opts + ) -> void + end + end + end +end diff --git a/sig/openai/models/containers/file_create_response.rbs b/sig/openai/models/containers/file_create_response.rbs new file mode 100644 index 00000000..cb3aa874 --- /dev/null +++ b/sig/openai/models/containers/file_create_response.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Containers + type file_create_response = + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + + class FileCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor bytes: Integer + + attr_accessor container_id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"container.file" + + attr_accessor path: String + + attr_accessor source: String + + def initialize: ( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + ?object: :"container.file" + ) -> void + end + end + end +end diff --git a/sig/openai/models/containers/file_delete_params.rbs b/sig/openai/models/containers/file_delete_params.rbs new file mode 100644 index 00000000..e53a0bee --- /dev/null +++ b/sig/openai/models/containers/file_delete_params.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Models + module Containers + type file_delete_params = + { container_id: String } & OpenAI::Internal::Type::request_parameters + + class FileDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor container_id: String + + def initialize: ( + container_id: String, + ?request_options: OpenAI::request_opts + ) -> void + end + end + end +end diff --git a/sig/openai/models/containers/file_list_params.rbs b/sig/openai/models/containers/file_list_params.rbs new file mode 100644 index 00000000..bc780ba7 --- /dev/null +++ b/sig/openai/models/containers/file_list_params.rbs @@ -0,0 +1,50 @@ +module OpenAI + module Models + module Containers + type file_list_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::Containers::FileListParams::order + } + & OpenAI::Internal::Type::request_parameters + + class FileListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::Containers::FileListParams::order? + + def order=: ( + OpenAI::Models::Containers::FileListParams::order + ) -> OpenAI::Models::Containers::FileListParams::order + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Containers::FileListParams::order, + ?request_options: OpenAI::request_opts + ) -> void + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::Containers::FileListParams::order] + end + end + end + end +end diff --git a/sig/openai/models/containers/file_list_response.rbs b/sig/openai/models/containers/file_list_response.rbs new file mode 100644 index 00000000..5c878ced --- /dev/null +++ b/sig/openai/models/containers/file_list_response.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Containers + type file_list_response = + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + + class FileListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor bytes: Integer + + attr_accessor container_id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"container.file" + + attr_accessor path: String + + attr_accessor source: String + + def initialize: ( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + ?object: :"container.file" + ) -> void + end + end + end +end diff --git a/sig/openai/models/containers/file_retrieve_params.rbs b/sig/openai/models/containers/file_retrieve_params.rbs new file mode 100644 index 00000000..c67ff1c9 --- /dev/null +++ b/sig/openai/models/containers/file_retrieve_params.rbs @@ -0,0 +1,20 @@ +module OpenAI + module Models + module Containers + type file_retrieve_params = + { container_id: String } & OpenAI::Internal::Type::request_parameters + + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor container_id: String + + def initialize: ( + container_id: String, + ?request_options: OpenAI::request_opts + ) -> void + end + end + end +end diff --git a/sig/openai/models/containers/file_retrieve_response.rbs b/sig/openai/models/containers/file_retrieve_response.rbs new file mode 100644 index 00000000..d52e25e7 --- /dev/null +++ b/sig/openai/models/containers/file_retrieve_response.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Containers + type file_retrieve_response = + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + + class FileRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor bytes: Integer + + attr_accessor container_id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"container.file" + + attr_accessor path: String + + attr_accessor source: String + + def initialize: ( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + ?object: :"container.file" + ) -> void + end + end + end +end diff --git a/sig/openai/models/containers/files/content_retrieve_params.rbs b/sig/openai/models/containers/files/content_retrieve_params.rbs new file mode 100644 index 00000000..6f56cf72 --- /dev/null +++ b/sig/openai/models/containers/files/content_retrieve_params.rbs @@ -0,0 +1,22 @@ +module OpenAI + module Models + module Containers + module Files + type content_retrieve_params = + { container_id: String } & OpenAI::Internal::Type::request_parameters + + class ContentRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor container_id: String + + def initialize: ( + container_id: String, + ?request_options: OpenAI::request_opts + ) -> void + end + end + end + end +end diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index 370be606..efa89936 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -2,14 +2,24 @@ module OpenAI module Models module Responses type response_audio_delta_event = - { delta: String, type: :"response.audio.delta" } + { + delta: String, + sequence_number: Integer, + type: :"response.audio.delta" + } class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.delta" - def initialize: (delta: String, ?type: :"response.audio.delta") -> void + def initialize: ( + delta: String, + sequence_number: Integer, + ?type: :"response.audio.delta" + ) -> void end end end diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index 9399bfae..405a0893 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -1,12 +1,18 @@ module OpenAI module Models module Responses - type response_audio_done_event = { type: :"response.audio.done" } + type response_audio_done_event = + { sequence_number: Integer, type: :"response.audio.done" } class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.done" - def initialize: (?type: :"response.audio.done") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.audio.done" + ) -> void end end end diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index ef7c93c3..dd04d32a 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_audio_transcript_delta_event = - { delta: String, type: :"response.audio.transcript.delta" } + { + delta: String, + sequence_number: Integer, + type: :"response.audio.transcript.delta" + } class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.transcript.delta" def initialize: ( delta: String, + sequence_number: Integer, ?type: :"response.audio.transcript.delta" ) -> void end diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index 7397feff..c4fe0f1e 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -2,12 +2,17 @@ module OpenAI module Models module Responses type response_audio_transcript_done_event = - { type: :"response.audio.transcript.done" } + { sequence_number: Integer, type: :"response.audio.transcript.done" } class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.transcript.done" - def initialize: (?type: :"response.audio.transcript.done") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.audio.transcript.done" + ) -> void end end end diff --git a/sig/openai/models/responses/response_cancel_params.rbs b/sig/openai/models/responses/response_cancel_params.rbs new file mode 100644 index 00000000..75dd01ea --- /dev/null +++ b/sig/openai/models/responses/response_cancel_params.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Models + module Responses + type response_cancel_params = + { } & OpenAI::Internal::Type::request_parameters + + class ResponseCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + end + end + end +end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index 212d9bfa..6d41f4d7 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -5,6 +5,7 @@ module OpenAI { delta: String, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.code.delta" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.code.delta" def initialize: ( delta: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.code.delta" ) -> void end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index 72f8bc5d..7fb5a59a 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code: String, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.code.done" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.code.done" def initialize: ( code: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.code.done" ) -> void end diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index d2d8e451..ef439638 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.completed" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.completed" def initialize: ( code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.completed" ) -> void end diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index d4d09537..782ccb19 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.in_progress" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.in_progress" def initialize: ( code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.in_progress" ) -> void end diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index 8fd6cac0..572ce437 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.interpreting" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.interpreting" def initialize: ( code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.interpreting" ) -> void end diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 4900852e..0d4cbe1d 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_completed_event = - { response: OpenAI::Responses::Response, type: :"response.completed" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.completed" + } class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.completed" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.completed" ) -> void end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 7f1ecbc3..1e116912 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -7,6 +7,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseContentPartAddedEvent::part, + sequence_number: Integer, type: :"response.content_part.added" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor part: OpenAI::Responses::ResponseContentPartAddedEvent::part + attr_accessor sequence_number: Integer + attr_accessor type: :"response.content_part.added" def initialize: ( @@ -26,6 +29,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseContentPartAddedEvent::part, + sequence_number: Integer, ?type: :"response.content_part.added" ) -> void diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 2cfd7195..267b2a93 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -7,6 +7,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseContentPartDoneEvent::part, + sequence_number: Integer, type: :"response.content_part.done" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor part: OpenAI::Responses::ResponseContentPartDoneEvent::part + attr_accessor sequence_number: Integer + attr_accessor type: :"response.content_part.done" def initialize: ( @@ -26,6 +29,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseContentPartDoneEvent::part, + sequence_number: Integer, ?type: :"response.content_part.done" ) -> void diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index e93fe5a7..fb7adb7c 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_created_event = - { response: OpenAI::Responses::Response, type: :"response.created" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.created" + } class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.created" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.created" ) -> void end diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index ac79ae54..305a05ec 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -2,7 +2,13 @@ module OpenAI module Models module Responses type response_error_event = - { code: String?, message: String, param: String?, type: :error } + { + code: String?, + message: String, + param: String?, + sequence_number: Integer, + type: :error + } class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel attr_accessor code: String? @@ -11,12 +17,15 @@ module OpenAI attr_accessor param: String? + attr_accessor sequence_number: Integer + attr_accessor type: :error def initialize: ( code: String?, message: String, param: String?, + sequence_number: Integer, ?type: :error ) -> void end diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index fe0562a3..021ce3c1 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_failed_event = - { response: OpenAI::Responses::Response, type: :"response.failed" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.failed" + } class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.failed" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.failed" ) -> void end diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index dc0013fc..85d8a2fb 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.file_search_call.completed" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.file_search_call.completed" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.file_search_call.completed" ) -> void end diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index 36f69c57..babf065c 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.file_search_call.in_progress" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.file_search_call.in_progress" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.file_search_call.in_progress" ) -> void end diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index f1994439..7a5dadfd 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.file_search_call.searching" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.file_search_call.searching" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.file_search_call.searching" ) -> void end diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index e4486520..eb630a74 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.function_call_arguments.delta" } @@ -16,12 +17,15 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.function_call_arguments.delta" def initialize: ( delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.function_call_arguments.delta" ) -> void end diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index 3e023f5c..033aba4a 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.function_call_arguments.done" } @@ -16,12 +17,15 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.function_call_arguments.done" def initialize: ( arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.function_call_arguments.done" ) -> void end diff --git a/sig/openai/models/responses/response_image_gen_call_completed_event.rbs b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs index 56a60bce..54310938 100644 --- a/sig/openai/models/responses/response_image_gen_call_completed_event.rbs +++ b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.image_generation_call.completed" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.image_generation_call.completed" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.image_generation_call.completed" ) -> void end diff --git a/sig/openai/models/responses/response_image_gen_call_generating_event.rbs b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs index 20d111fd..1cab46cc 100644 --- a/sig/openai/models/responses/response_image_gen_call_generating_event.rbs +++ b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs @@ -5,8 +5,8 @@ module OpenAI { item_id: String, output_index: Integer, - type: :"response.image_generation_call.generating", - sequence_number: Integer + sequence_number: Integer, + type: :"response.image_generation_call.generating" } class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel @@ -14,16 +14,14 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor type: :"response.image_generation_call.generating" - - attr_reader sequence_number: Integer? + attr_accessor sequence_number: Integer - def sequence_number=: (Integer) -> Integer + attr_accessor type: :"response.image_generation_call.generating" def initialize: ( item_id: String, output_index: Integer, - ?sequence_number: Integer, + sequence_number: Integer, ?type: :"response.image_generation_call.generating" ) -> void end diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index 4d9f9e2a..45a97dac 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_in_progress_event = - { response: OpenAI::Responses::Response, type: :"response.in_progress" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.in_progress" + } class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.in_progress" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.in_progress" ) -> void end diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index 5e527b2f..6d7c7d11 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_incomplete_event = - { response: OpenAI::Responses::Response, type: :"response.incomplete" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.incomplete" + } class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.incomplete" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.incomplete" ) -> void end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs index 0fd30fe8..edc8ed3f 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI delta: top, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.mcp_call.arguments_delta" } @@ -16,12 +17,15 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_call.arguments_delta" def initialize: ( delta: top, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.mcp_call.arguments_delta" ) -> void end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs index 33550d67..38693df6 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI arguments: top, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.mcp_call.arguments_done" } @@ -16,12 +17,15 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_call.arguments_done" def initialize: ( arguments: top, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.mcp_call.arguments_done" ) -> void end diff --git a/sig/openai/models/responses/response_mcp_call_completed_event.rbs b/sig/openai/models/responses/response_mcp_call_completed_event.rbs index 37b0b9e4..71dd9297 100644 --- a/sig/openai/models/responses/response_mcp_call_completed_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_completed_event.rbs @@ -2,12 +2,17 @@ module OpenAI module Models module Responses type response_mcp_call_completed_event = - { type: :"response.mcp_call.completed" } + { sequence_number: Integer, type: :"response.mcp_call.completed" } class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_call.completed" - def initialize: (?type: :"response.mcp_call.completed") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_call.completed" + ) -> void end end end diff --git a/sig/openai/models/responses/response_mcp_call_failed_event.rbs b/sig/openai/models/responses/response_mcp_call_failed_event.rbs index 39d6df8d..6ab340fe 100644 --- a/sig/openai/models/responses/response_mcp_call_failed_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_failed_event.rbs @@ -2,12 +2,17 @@ module OpenAI module Models module Responses type response_mcp_call_failed_event = - { type: :"response.mcp_call.failed" } + { sequence_number: Integer, type: :"response.mcp_call.failed" } class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_call.failed" - def initialize: (?type: :"response.mcp_call.failed") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_call.failed" + ) -> void end end end diff --git a/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs index 3fe682e3..fd7ba1a2 100644 --- a/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.mcp_call.in_progress" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_call.in_progress" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.mcp_call.in_progress" ) -> void end diff --git a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs index 304d51b9..f34961c0 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs @@ -2,12 +2,17 @@ module OpenAI module Models module Responses type response_mcp_list_tools_completed_event = - { type: :"response.mcp_list_tools.completed" } + { sequence_number: Integer, type: :"response.mcp_list_tools.completed" } class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_list_tools.completed" - def initialize: (?type: :"response.mcp_list_tools.completed") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_list_tools.completed" + ) -> void end end end diff --git a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs index 8e4ea36a..dad83031 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs @@ -2,12 +2,17 @@ module OpenAI module Models module Responses type response_mcp_list_tools_failed_event = - { type: :"response.mcp_list_tools.failed" } + { sequence_number: Integer, type: :"response.mcp_list_tools.failed" } class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_list_tools.failed" - def initialize: (?type: :"response.mcp_list_tools.failed") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_list_tools.failed" + ) -> void end end end diff --git a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs index 1b4fa40e..bd9bbbb7 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs @@ -2,12 +2,20 @@ module OpenAI module Models module Responses type response_mcp_list_tools_in_progress_event = - { type: :"response.mcp_list_tools.in_progress" } + { + sequence_number: Integer, + type: :"response.mcp_list_tools.in_progress" + } class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.mcp_list_tools.in_progress" - def initialize: (?type: :"response.mcp_list_tools.in_progress") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_list_tools.in_progress" + ) -> void end end end diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index 03ac25c8..b85325a4 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, type: :"response.output_item.added" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_item.added" def initialize: ( item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_item.added" ) -> void end diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index 506bcabd..b61455c4 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, type: :"response.output_item.done" } @@ -13,11 +14,14 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_item.done" def initialize: ( item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_item.done" ) -> void end diff --git a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs index 82004cef..6e79267f 100644 --- a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs +++ b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs @@ -8,6 +8,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.output_text_annotation.added" } @@ -22,6 +23,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_text_annotation.added" def initialize: ( @@ -30,6 +33,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_text_annotation.added" ) -> void end diff --git a/sig/openai/models/responses/response_queued_event.rbs b/sig/openai/models/responses/response_queued_event.rbs index f1334abf..c6311eda 100644 --- a/sig/openai/models/responses/response_queued_event.rbs +++ b/sig/openai/models/responses/response_queued_event.rbs @@ -2,15 +2,22 @@ module OpenAI module Models module Responses type response_queued_event = - { response: OpenAI::Responses::Response, type: :"response.queued" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.queued" + } class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.queued" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.queued" ) -> void end diff --git a/sig/openai/models/responses/response_reasoning_delta_event.rbs b/sig/openai/models/responses/response_reasoning_delta_event.rbs index 00b8c199..2d918544 100644 --- a/sig/openai/models/responses/response_reasoning_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_delta_event.rbs @@ -7,6 +7,7 @@ module OpenAI delta: top, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.reasoning.delta" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.reasoning.delta" def initialize: ( @@ -26,6 +29,7 @@ module OpenAI delta: top, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.reasoning.delta" ) -> void end diff --git a/sig/openai/models/responses/response_reasoning_done_event.rbs b/sig/openai/models/responses/response_reasoning_done_event.rbs index 75577efa..5cb2b130 100644 --- a/sig/openai/models/responses/response_reasoning_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: :"response.reasoning.done" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor text: String attr_accessor type: :"response.reasoning.done" @@ -25,6 +28,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, ?type: :"response.reasoning.done" ) -> void diff --git a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs index c745f654..0bed2549 100644 --- a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI delta: top, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary.delta" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary.delta" @@ -25,6 +28,7 @@ module OpenAI delta: top, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary.delta" ) -> void diff --git a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs index bdde17bb..e71572ba 100644 --- a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: :"response.reasoning_summary.done" @@ -15,6 +16,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor text: String @@ -24,6 +27,7 @@ module OpenAI def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, ?type: :"response.reasoning_summary.done" diff --git a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs index 05c6f71d..f822b599 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs @@ -6,6 +6,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary_part.added" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary_part.added" @@ -25,6 +28,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary_part.added" ) -> void diff --git a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs index c03f6cf4..ee6f29c0 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary_part.done" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary_part.done" @@ -25,6 +28,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary_part.done" ) -> void diff --git a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs index 494fa8c3..fcede6d5 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary_text.delta" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary_text.delta" @@ -25,6 +28,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary_text.delta" ) -> void diff --git a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs index 36bb9006..1fd8a58d 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: :"response.reasoning_summary_text.done" @@ -15,6 +16,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor text: String @@ -24,6 +27,7 @@ module OpenAI def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, ?type: :"response.reasoning_summary_text.done" diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index 2dc7df34..718ab10b 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -7,6 +7,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.refusal.delta" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.refusal.delta" def initialize: ( @@ -26,6 +29,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.refusal.delta" ) -> void end diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 852fbb7b..1ad63da9 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -7,6 +7,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, type: :"response.refusal.done" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor refusal: String + attr_accessor sequence_number: Integer + attr_accessor type: :"response.refusal.done" def initialize: ( @@ -26,6 +29,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, ?type: :"response.refusal.done" ) -> void end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 36d79ace..410471dc 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -8,6 +8,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.output_text.annotation.added" } @@ -22,6 +23,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_text.annotation.added" def initialize: ( @@ -30,6 +33,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_text.annotation.added" ) -> void diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index c5025499..6c7fb1db 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -7,6 +7,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.output_text.delta" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_text.delta" def initialize: ( @@ -26,6 +29,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_text.delta" ) -> void end diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 0585e9f1..427e7b51 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: :"response.output_text.done" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor text: String attr_accessor type: :"response.output_text.done" @@ -25,6 +28,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, ?type: :"response.output_text.done" ) -> void diff --git a/sig/openai/resources/containers.rbs b/sig/openai/resources/containers.rbs new file mode 100644 index 00000000..dfbe77b8 --- /dev/null +++ b/sig/openai/resources/containers.rbs @@ -0,0 +1,33 @@ +module OpenAI + module Resources + class Containers + attr_reader files: OpenAI::Resources::Containers::Files + + def create: ( + name: String, + ?expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ContainerCreateResponse + + def retrieve: ( + String container_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ContainerRetrieveResponse + + def list: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::ContainerListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::ContainerListResponse] + + def delete: ( + String container_id, + ?request_options: OpenAI::request_opts + ) -> nil + + def initialize: (client: OpenAI::Client) -> void + end + end +end diff --git a/sig/openai/resources/containers/files.rbs b/sig/openai/resources/containers/files.rbs new file mode 100644 index 00000000..561898fb --- /dev/null +++ b/sig/openai/resources/containers/files.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Resources + class Containers + class Files + attr_reader content: OpenAI::Resources::Containers::Files::Content + + def create: ( + String container_id, + ?file: OpenAI::Internal::file_input, + ?file_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Containers::FileCreateResponse + + def retrieve: ( + String file_id, + container_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Containers::FileRetrieveResponse + + def list: ( + String container_id, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Containers::FileListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Containers::FileListResponse] + + def delete: ( + String file_id, + container_id: String, + ?request_options: OpenAI::request_opts + ) -> nil + + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/containers/files/content.rbs b/sig/openai/resources/containers/files/content.rbs new file mode 100644 index 00000000..898c9988 --- /dev/null +++ b/sig/openai/resources/containers/files/content.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Resources + class Containers + class Files + class Content + def retrieve: ( + String file_id, + container_id: String, + ?request_options: OpenAI::request_opts + ) -> nil + + def initialize: (client: OpenAI::Client) -> void + end + end + end + end +end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 4a37f2c8..97ccd557 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -60,6 +60,11 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> nil + def cancel: ( + String response_id, + ?request_options: OpenAI::request_opts + ) -> nil + def initialize: (client: OpenAI::Client) -> void end end diff --git a/test/openai/resource_namespaces.rb b/test/openai/resource_namespaces.rb index dc01c156..5134069c 100644 --- a/test/openai/resource_namespaces.rb +++ b/test/openai/resource_namespaces.rb @@ -27,11 +27,19 @@ module Checkpoints module Completions end + module Containers + module Files + end + end + module Evals module Runs end end + module Files + end + module FineTuning module Alpha end diff --git a/test/openai/resources/containers/files/content_test.rb b/test/openai/resources/containers/files/content_test.rb new file mode 100644 index 00000000..994b2806 --- /dev/null +++ b/test/openai/resources/containers/files/content_test.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::Containers::Files::ContentTest < OpenAI::Test::ResourceTest + def test_retrieve_required_params + response = @openai.containers.files.content.retrieve("file_id", container_id: "container_id") + + assert_pattern do + response => nil + end + end +end diff --git a/test/openai/resources/containers/files_test.rb b/test/openai/resources/containers/files_test.rb new file mode 100644 index 00000000..d2522cc7 --- /dev/null +++ b/test/openai/resources/containers/files_test.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::Containers::FilesTest < OpenAI::Test::ResourceTest + def test_create + response = @openai.containers.files.create("container_id") + + assert_pattern do + response => OpenAI::Models::Containers::FileCreateResponse + end + + assert_pattern do + response => { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + end + end + + def test_retrieve_required_params + response = @openai.containers.files.retrieve("file_id", container_id: "container_id") + + assert_pattern do + response => OpenAI::Models::Containers::FileRetrieveResponse + end + + assert_pattern do + response => { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + end + end + + def test_list + response = @openai.containers.files.list("container_id") + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::Containers::FileListResponse + end + + assert_pattern do + row => { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + end + end + + def test_delete_required_params + response = @openai.containers.files.delete("file_id", container_id: "container_id") + + assert_pattern do + response => nil + end + end +end diff --git a/test/openai/resources/containers_test.rb b/test/openai/resources/containers_test.rb new file mode 100644 index 00000000..a04742ee --- /dev/null +++ b/test/openai/resources/containers_test.rb @@ -0,0 +1,77 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::Resources::ContainersTest < OpenAI::Test::ResourceTest + def test_create_required_params + response = @openai.containers.create(name: "name") + + assert_pattern do + response => OpenAI::Models::ContainerCreateResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter | nil + } + end + end + + def test_retrieve + response = @openai.containers.retrieve("container_id") + + assert_pattern do + response => OpenAI::Models::ContainerRetrieveResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter | nil + } + end + end + + def test_list + response = @openai.containers.list + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::ContainerListResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter | nil + } + end + end + + def test_delete + response = @openai.containers.delete("container_id") + + assert_pattern do + response => nil + end + end +end diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 04b15b54..deb7c605 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -84,4 +84,12 @@ def test_delete response => nil end end + + def test_cancel + response = @openai.responses.cancel("resp_677efb5139a88190b512bc3fef8e535d") + + assert_pattern do + response => nil + end + end end From db027223dfb7ab49e649287be1fbb72d11fc0236 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 01:22:17 +0000 Subject: [PATCH 204/295] chore: use fully qualified names for yard annotations and rbs aliases --- lib/openai/models/all_models.rb | 2 +- .../models/audio/speech_create_params.rb | 36 ++-- lib/openai/models/audio/transcription.rb | 6 +- .../audio/transcription_create_params.rb | 34 ++-- .../audio/transcription_create_response.rb | 2 +- .../models/audio/transcription_segment.rb | 2 +- .../audio/transcription_stream_event.rb | 2 +- .../audio/transcription_text_delta_event.rb | 8 +- .../audio/transcription_text_done_event.rb | 8 +- .../models/audio/transcription_verbose.rb | 8 +- .../models/audio/translation_create_params.rb | 10 +- .../audio/translation_create_response.rb | 2 +- .../models/audio/translation_verbose.rb | 4 +- lib/openai/models/batch.rb | 24 +-- lib/openai/models/batch_create_params.rb | 8 +- lib/openai/models/beta/assistant.rb | 35 ++-- .../models/beta/assistant_create_params.rb | 64 +++--- .../models/beta/assistant_list_params.rb | 4 +- .../beta/assistant_response_format_option.rb | 2 +- .../models/beta/assistant_stream_event.rb | 162 +++++++++------- lib/openai/models/beta/assistant_tool.rb | 2 +- .../models/beta/assistant_tool_choice.rb | 10 +- .../beta/assistant_tool_choice_option.rb | 2 +- .../models/beta/assistant_update_params.rb | 112 +++++------ lib/openai/models/beta/file_search_tool.rb | 23 +-- lib/openai/models/beta/function_tool.rb | 4 +- .../models/beta/message_stream_event.rb | 36 ++-- .../models/beta/run_step_stream_event.rb | 50 ++--- lib/openai/models/beta/run_stream_event.rb | 63 +++--- lib/openai/models/beta/thread.rb | 26 +-- .../beta/thread_create_and_run_params.rb | 142 +++++++------- .../models/beta/thread_create_params.rb | 79 ++++---- lib/openai/models/beta/thread_stream_event.rb | 6 +- .../models/beta/thread_update_params.rb | 23 +-- lib/openai/models/beta/threads/annotation.rb | 2 +- .../models/beta/threads/annotation_delta.rb | 2 +- .../beta/threads/file_citation_annotation.rb | 6 +- .../threads/file_citation_delta_annotation.rb | 6 +- .../beta/threads/file_path_annotation.rb | 6 +- .../threads/file_path_delta_annotation.rb | 6 +- lib/openai/models/beta/threads/image_file.rb | 8 +- .../beta/threads/image_file_content_block.rb | 4 +- .../models/beta/threads/image_file_delta.rb | 8 +- .../beta/threads/image_file_delta_block.rb | 4 +- lib/openai/models/beta/threads/image_url.rb | 8 +- .../beta/threads/image_url_content_block.rb | 4 +- .../models/beta/threads/image_url_delta.rb | 8 +- .../beta/threads/image_url_delta_block.rb | 4 +- lib/openai/models/beta/threads/message.rb | 40 ++-- .../models/beta/threads/message_content.rb | 2 +- .../beta/threads/message_content_delta.rb | 2 +- .../threads/message_content_part_param.rb | 2 +- .../beta/threads/message_create_params.rb | 22 +-- .../models/beta/threads/message_delta.rb | 10 +- .../beta/threads/message_delta_event.rb | 4 +- .../beta/threads/message_list_params.rb | 4 +- .../required_action_function_tool_call.rb | 9 +- lib/openai/models/beta/threads/run.rb | 80 ++++---- .../models/beta/threads/run_create_params.rb | 74 +++---- .../models/beta/threads/run_list_params.rb | 4 +- .../threads/run_submit_tool_outputs_params.rb | 6 +- .../runs/code_interpreter_output_image.rb | 10 +- .../runs/code_interpreter_tool_call.rb | 26 +-- .../runs/code_interpreter_tool_call_delta.rb | 19 +- .../threads/runs/file_search_tool_call.rb | 40 ++-- .../runs/file_search_tool_call_delta.rb | 2 +- .../beta/threads/runs/function_tool_call.rb | 11 +- .../threads/runs/function_tool_call_delta.rb | 11 +- .../runs/message_creation_step_details.rb | 6 +- .../models/beta/threads/runs/run_step.rb | 40 ++-- .../beta/threads/runs/run_step_delta.rb | 8 +- .../beta/threads/runs/run_step_delta_event.rb | 4 +- .../runs/run_step_delta_message_delta.rb | 6 +- .../beta/threads/runs/step_list_params.rb | 8 +- .../beta/threads/runs/step_retrieve_params.rb | 4 +- .../models/beta/threads/runs/tool_call.rb | 2 +- .../beta/threads/runs/tool_call_delta.rb | 2 +- .../threads/runs/tool_call_delta_object.rb | 6 +- .../threads/runs/tool_calls_step_details.rb | 6 +- lib/openai/models/beta/threads/text.rb | 4 +- .../models/beta/threads/text_content_block.rb | 4 +- lib/openai/models/beta/threads/text_delta.rb | 4 +- .../models/beta/threads/text_delta_block.rb | 4 +- lib/openai/models/chat/chat_completion.rb | 42 ++-- ...chat_completion_assistant_message_param.rb | 37 ++-- .../models/chat/chat_completion_audio.rb | 2 +- .../chat/chat_completion_audio_param.rb | 36 ++-- .../models/chat/chat_completion_chunk.rb | 82 ++++---- .../chat/chat_completion_content_part.rb | 10 +- .../chat_completion_content_part_image.rb | 15 +- ...hat_completion_content_part_input_audio.rb | 14 +- ...chat_completion_developer_message_param.rb | 14 +- .../models/chat/chat_completion_message.rb | 28 +-- .../chat/chat_completion_message_param.rb | 2 +- .../chat/chat_completion_message_tool_call.rb | 9 +- .../chat/chat_completion_named_tool_choice.rb | 6 +- .../chat_completion_prediction_content.rb | 14 +- .../chat/chat_completion_stream_options.rb | 2 +- .../chat_completion_system_message_param.rb | 14 +- .../chat/chat_completion_token_logprob.rb | 8 +- .../models/chat/chat_completion_tool.rb | 4 +- .../chat_completion_tool_choice_option.rb | 2 +- .../chat_completion_tool_message_param.rb | 12 +- .../chat_completion_user_message_param.rb | 12 +- .../models/chat/completion_create_params.rb | 93 ++++----- .../models/chat/completion_list_params.rb | 4 +- .../chat/completions/message_list_params.rb | 4 +- lib/openai/models/comparison_filter.rb | 12 +- lib/openai/models/completion.rb | 12 +- lib/openai/models/completion_choice.rb | 16 +- lib/openai/models/completion_create_params.rb | 22 +-- lib/openai/models/completion_usage.rb | 14 +- lib/openai/models/compound_filter.rb | 16 +- lib/openai/models/container_create_params.rb | 12 +- lib/openai/models/container_list_params.rb | 4 +- .../models/containers/file_list_params.rb | 4 +- .../models/create_embedding_response.rb | 10 +- lib/openai/models/embedding.rb | 4 +- lib/openai/models/embedding_create_params.rb | 16 +- lib/openai/models/eval_create_params.rb | 49 ++--- lib/openai/models/eval_create_response.rb | 12 +- .../models/eval_custom_data_source_config.rb | 2 +- lib/openai/models/eval_list_params.rb | 8 +- lib/openai/models/eval_list_response.rb | 12 +- lib/openai/models/eval_retrieve_response.rb | 12 +- ...l_stored_completions_data_source_config.rb | 2 +- lib/openai/models/eval_update_response.rb | 12 +- ...create_eval_completions_run_data_source.rb | 70 +++---- .../create_eval_jsonl_run_data_source.rb | 12 +- .../models/evals/run_cancel_response.rb | 20 +- lib/openai/models/evals/run_create_params.rb | 78 ++++---- .../models/evals/run_create_response.rb | 20 +- lib/openai/models/evals/run_list_params.rb | 8 +- lib/openai/models/evals/run_list_response.rb | 20 +- .../models/evals/run_retrieve_response.rb | 20 +- .../evals/runs/output_item_list_params.rb | 8 +- .../evals/runs/output_item_list_response.rb | 4 +- .../runs/output_item_retrieve_response.rb | 4 +- lib/openai/models/file_chunking_strategy.rb | 2 +- .../models/file_chunking_strategy_param.rb | 2 +- lib/openai/models/file_create_params.rb | 4 +- lib/openai/models/file_list_params.rb | 4 +- lib/openai/models/file_object.rb | 16 +- .../fine_tuning/alpha/grader_run_params.rb | 8 +- .../alpha/grader_validate_params.rb | 6 +- .../alpha/grader_validate_response.rb | 6 +- .../checkpoints/permission_retrieve_params.rb | 4 +- .../models/fine_tuning/dpo_hyperparameters.rb | 10 +- lib/openai/models/fine_tuning/dpo_method.rb | 4 +- .../models/fine_tuning/fine_tuning_job.rb | 58 +++--- .../fine_tuning/fine_tuning_job_event.rb | 12 +- .../fine_tuning_job_wandb_integration.rb | 2 +- ...ine_tuning_job_wandb_integration_object.rb | 7 +- .../models/fine_tuning/job_create_params.rb | 61 +++--- .../jobs/fine_tuning_job_checkpoint.rb | 6 +- .../reinforcement_hyperparameters.rb | 20 +- .../fine_tuning/reinforcement_method.rb | 12 +- .../fine_tuning/supervised_hyperparameters.rb | 8 +- .../models/fine_tuning/supervised_method.rb | 4 +- lib/openai/models/function_definition.rb | 2 +- .../models/graders/label_model_grader.rb | 28 +-- lib/openai/models/graders/multi_grader.rb | 6 +- .../models/graders/score_model_grader.rb | 28 +-- .../models/graders/string_check_grader.rb | 6 +- .../models/graders/text_similarity_grader.rb | 8 +- lib/openai/models/image.rb | 4 +- .../models/image_create_variation_params.rb | 14 +- lib/openai/models/image_edit_params.rb | 24 +-- lib/openai/models/image_generate_params.rb | 34 ++-- lib/openai/models/images_response.rb | 20 +- lib/openai/models/moderation.rb | 76 ++++---- lib/openai/models/moderation_create_params.rb | 16 +- .../models/moderation_create_response.rb | 4 +- .../models/moderation_image_url_input.rb | 6 +- .../models/moderation_multi_modal_input.rb | 2 +- lib/openai/models/reasoning.rb | 20 +- .../models/response_format_json_schema.rb | 10 +- lib/openai/models/responses/computer_tool.rb | 6 +- .../models/responses/easy_input_message.rb | 22 +-- .../models/responses/file_search_tool.rb | 24 +-- lib/openai/models/responses/function_tool.rb | 2 +- .../responses/input_item_list_params.rb | 8 +- lib/openai/models/responses/response.rb | 66 +++---- .../responses/response_audio_delta_event.rb | 2 +- .../responses/response_audio_done_event.rb | 2 +- .../response_audio_transcript_delta_event.rb | 2 +- .../response_audio_transcript_done_event.rb | 2 +- ..._code_interpreter_call_code_delta_event.rb | 3 +- ...e_code_interpreter_call_code_done_event.rb | 3 +- ...e_code_interpreter_call_completed_event.rb | 7 +- ...code_interpreter_call_in_progress_event.rb | 6 +- ...ode_interpreter_call_interpreting_event.rb | 8 +- .../response_code_interpreter_tool_call.rb | 30 +-- .../responses/response_completed_event.rb | 6 +- .../responses/response_computer_tool_call.rb | 68 ++++--- ...response_computer_tool_call_output_item.rb | 17 +- ...se_computer_tool_call_output_screenshot.rb | 3 +- .../models/responses/response_content.rb | 2 +- .../response_content_part_added_event.rb | 10 +- .../response_content_part_done_event.rb | 10 +- .../responses/response_create_params.rb | 40 ++-- .../responses/response_created_event.rb | 6 +- lib/openai/models/responses/response_error.rb | 8 +- .../models/responses/response_error_event.rb | 2 +- .../models/responses/response_failed_event.rb | 6 +- ...sponse_file_search_call_completed_event.rb | 3 +- ...onse_file_search_call_in_progress_event.rb | 3 +- ...sponse_file_search_call_searching_event.rb | 3 +- .../response_file_search_tool_call.rb | 15 +- .../responses/response_format_text_config.rb | 2 +- ...response_format_text_json_schema_config.rb | 3 +- ...nse_function_call_arguments_delta_event.rb | 3 +- .../responses/response_function_tool_call.rb | 8 +- .../response_function_tool_call_item.rb | 2 +- ...response_function_tool_call_output_item.rb | 9 +- .../responses/response_function_web_search.rb | 8 +- ...onse_image_gen_call_partial_image_event.rb | 3 +- .../responses/response_in_progress_event.rb | 6 +- .../responses/response_incomplete_event.rb | 6 +- .../models/responses/response_input_audio.rb | 8 +- .../responses/response_input_content.rb | 2 +- .../models/responses/response_input_file.rb | 2 +- .../models/responses/response_input_image.rb | 8 +- .../models/responses/response_input_item.rb | 110 ++++++----- .../responses/response_input_message_item.rb | 24 +-- lib/openai/models/responses/response_item.rb | 50 ++--- .../models/responses/response_item_list.rb | 4 +- .../models/responses/response_output_audio.rb | 2 +- .../models/responses/response_output_item.rb | 42 ++-- .../response_output_item_added_event.rb | 6 +- .../response_output_item_done_event.rb | 6 +- .../responses/response_output_message.rb | 14 +- .../models/responses/response_output_text.rb | 9 +- .../models/responses/response_queued_event.rb | 4 +- .../responses/response_reasoning_item.rb | 14 +- .../response_reasoning_summary_delta_event.rb | 3 +- ...onse_reasoning_summary_part_added_event.rb | 9 +- ...ponse_reasoning_summary_part_done_event.rb | 9 +- ...onse_reasoning_summary_text_delta_event.rb | 3 +- ...ponse_reasoning_summary_text_done_event.rb | 3 +- .../responses/response_refusal_delta_event.rb | 2 +- .../responses/response_refusal_done_event.rb | 2 +- .../responses/response_retrieve_params.rb | 4 +- .../models/responses/response_stream_event.rb | 2 +- .../response_text_annotation_delta_event.rb | 14 +- .../models/responses/response_text_config.rb | 6 +- .../responses/response_text_delta_event.rb | 2 +- .../responses/response_text_done_event.rb | 2 +- lib/openai/models/responses/response_usage.rb | 14 +- ...esponse_web_search_call_completed_event.rb | 3 +- ...ponse_web_search_call_in_progress_event.rb | 3 +- ...esponse_web_search_call_searching_event.rb | 3 +- lib/openai/models/responses/tool.rb | 97 +++++----- .../models/responses/tool_choice_types.rb | 8 +- .../models/responses/web_search_tool.rb | 22 +-- lib/openai/models/responses_model.rb | 2 +- .../models/static_file_chunking_strategy.rb | 2 +- .../static_file_chunking_strategy_object.rb | 4 +- ...tic_file_chunking_strategy_object_param.rb | 4 +- lib/openai/models/upload.rb | 14 +- lib/openai/models/upload_create_params.rb | 4 +- lib/openai/models/vector_store.rb | 24 +-- .../models/vector_store_create_params.rb | 10 +- lib/openai/models/vector_store_list_params.rb | 4 +- .../models/vector_store_search_params.rb | 18 +- .../models/vector_store_update_params.rb | 6 +- .../vector_stores/file_batch_create_params.rb | 4 +- .../file_batch_list_files_params.rb | 8 +- .../vector_stores/file_create_params.rb | 4 +- .../models/vector_stores/file_list_params.rb | 8 +- .../models/vector_stores/vector_store_file.rb | 24 +-- .../vector_stores/vector_store_file_batch.rb | 14 +- lib/openai/resources/audio/speech.rb | 6 +- lib/openai/resources/audio/transcriptions.rb | 24 +-- lib/openai/resources/audio/translations.rb | 6 +- lib/openai/resources/batches.rb | 12 +- lib/openai/resources/beta/assistants.rb | 32 +-- lib/openai/resources/beta/threads.rb | 46 ++--- lib/openai/resources/beta/threads/messages.rb | 18 +- lib/openai/resources/beta/threads/runs.rb | 54 +++--- .../resources/beta/threads/runs/steps.rb | 10 +- lib/openai/resources/chat/completions.rb | 70 +++---- .../resources/chat/completions/messages.rb | 4 +- lib/openai/resources/completions.rb | 12 +- lib/openai/resources/containers.rb | 4 +- lib/openai/resources/containers/files.rb | 2 +- lib/openai/resources/embeddings.rb | 6 +- lib/openai/resources/evals.rb | 8 +- lib/openai/resources/evals/runs.rb | 6 +- .../resources/evals/runs/output_items.rb | 4 +- lib/openai/resources/files.rb | 12 +- .../resources/fine_tuning/alpha/graders.rb | 4 +- .../fine_tuning/checkpoints/permissions.rb | 2 +- lib/openai/resources/fine_tuning/jobs.rb | 22 +-- .../resources/fine_tuning/jobs/checkpoints.rb | 2 +- lib/openai/resources/images.rb | 38 ++-- lib/openai/resources/models.rb | 6 +- lib/openai/resources/moderations.rb | 4 +- lib/openai/resources/responses.rb | 44 ++--- lib/openai/resources/responses/input_items.rb | 6 +- lib/openai/resources/uploads.rb | 8 +- lib/openai/resources/uploads/parts.rb | 2 +- lib/openai/resources/vector_stores.rb | 22 +-- .../resources/vector_stores/file_batches.rb | 14 +- lib/openai/resources/vector_stores/files.rb | 16 +- rbi/openai/models/eval_create_response.rbi | 8 +- rbi/openai/models/eval_list_response.rbi | 8 +- rbi/openai/models/eval_retrieve_response.rbi | 8 +- rbi/openai/models/eval_update_response.rbi | 8 +- sig/openai/models/all_models.rbs | 4 +- .../models/audio/speech_create_params.rbs | 10 + sig/openai/models/audio/transcription.rbs | 11 ++ .../audio/transcription_create_params.rbs | 40 +++- .../models/audio/transcription_segment.rbs | 13 ++ .../audio/transcription_text_delta_event.rbs | 8 + .../audio/transcription_text_done_event.rbs | 8 + .../models/audio/transcription_verbose.rbs | 8 + .../models/audio/transcription_word.rbs | 2 + sig/openai/models/audio/translation.rbs | 2 + .../audio/translation_create_params.rbs | 9 + .../models/audio/translation_verbose.rbs | 7 + .../auto_file_chunking_strategy_param.rbs | 2 + sig/openai/models/batch.rbs | 33 +++- sig/openai/models/batch_cancel_params.rbs | 2 + sig/openai/models/batch_create_params.rbs | 8 + sig/openai/models/batch_error.rbs | 7 + sig/openai/models/batch_list_params.rbs | 6 + sig/openai/models/batch_request_counts.rbs | 2 + sig/openai/models/batch_retrieve_params.rbs | 2 + sig/openai/models/beta/assistant.rbs | 25 +++ .../models/beta/assistant_create_params.rbs | 57 +++++- .../models/beta/assistant_delete_params.rbs | 2 + sig/openai/models/beta/assistant_deleted.rbs | 6 + .../models/beta/assistant_list_params.rbs | 8 + .../models/beta/assistant_retrieve_params.rbs | 2 + .../models/beta/assistant_stream_event.rbs | 118 +++++++++++ .../models/beta/assistant_tool_choice.rbs | 13 +- .../beta/assistant_tool_choice_function.rbs | 2 + .../beta/assistant_tool_choice_option.rbs | 4 +- .../models/beta/assistant_update_params.rbs | 24 +++ .../models/beta/code_interpreter_tool.rbs | 2 + sig/openai/models/beta/file_search_tool.rbs | 27 ++- sig/openai/models/beta/function_tool.rbs | 5 + .../models/beta/message_stream_event.rbs | 25 +++ .../models/beta/run_step_stream_event.rbs | 35 ++++ sig/openai/models/beta/run_stream_event.rbs | 50 +++++ sig/openai/models/beta/thread.rbs | 17 ++ .../beta/thread_create_and_run_params.rbs | 131 ++++++++++--- .../models/beta/thread_create_params.rbs | 91 +++++++-- .../models/beta/thread_delete_params.rbs | 2 + sig/openai/models/beta/thread_deleted.rbs | 2 + .../models/beta/thread_retrieve_params.rbs | 2 + .../models/beta/thread_stream_event.rbs | 6 + .../models/beta/thread_update_params.rbs | 15 ++ .../beta/threads/file_citation_annotation.rbs | 10 + .../file_citation_delta_annotation.rbs | 11 ++ .../beta/threads/file_path_annotation.rbs | 10 + .../threads/file_path_delta_annotation.rbs | 11 ++ sig/openai/models/beta/threads/image_file.rbs | 20 +- .../beta/threads/image_file_content_block.rbs | 5 + .../models/beta/threads/image_file_delta.rbs | 17 +- .../beta/threads/image_file_delta_block.rbs | 6 + sig/openai/models/beta/threads/image_url.rbs | 20 +- .../beta/threads/image_url_content_block.rbs | 5 + .../models/beta/threads/image_url_delta.rbs | 20 +- .../beta/threads/image_url_delta_block.rbs | 6 + sig/openai/models/beta/threads/message.rbs | 64 ++++-- .../beta/threads/message_create_params.rbs | 27 ++- .../beta/threads/message_delete_params.rbs | 5 + .../models/beta/threads/message_deleted.rbs | 6 + .../models/beta/threads/message_delta.rbs | 17 +- .../beta/threads/message_delta_event.rbs | 6 + .../beta/threads/message_list_params.rbs | 9 + .../beta/threads/message_retrieve_params.rbs | 5 + .../beta/threads/message_update_params.rbs | 6 + .../beta/threads/refusal_content_block.rbs | 2 + .../beta/threads/refusal_delta_block.rbs | 2 + .../required_action_function_tool_call.rbs | 8 + sig/openai/models/beta/threads/run.rbs | 89 +++++++-- .../models/beta/threads/run_cancel_params.rbs | 5 + .../models/beta/threads/run_create_params.rbs | 76 ++++++-- .../models/beta/threads/run_list_params.rbs | 8 + .../beta/threads/run_retrieve_params.rbs | 5 + .../run_submit_tool_outputs_params.rbs | 8 + .../models/beta/threads/run_update_params.rbs | 6 + .../threads/runs/code_interpreter_logs.rbs | 2 + .../runs/code_interpreter_output_image.rbs | 8 + .../runs/code_interpreter_tool_call.rbs | 28 ++- .../runs/code_interpreter_tool_call_delta.rbs | 24 ++- .../threads/runs/file_search_tool_call.rbs | 48 ++++- .../runs/file_search_tool_call_delta.rbs | 7 + .../beta/threads/runs/function_tool_call.rbs | 12 ++ .../threads/runs/function_tool_call_delta.rbs | 13 ++ .../runs/message_creation_step_details.rbs | 7 + .../models/beta/threads/runs/run_step.rbs | 62 ++++-- .../beta/threads/runs/run_step_delta.rbs | 16 +- .../threads/runs/run_step_delta_event.rbs | 6 + .../runs/run_step_delta_message_delta.rbs | 7 + .../beta/threads/runs/step_list_params.rbs | 10 + .../threads/runs/step_retrieve_params.rbs | 7 + .../threads/runs/tool_call_delta_object.rbs | 5 + .../threads/runs/tool_calls_step_details.rbs | 5 + sig/openai/models/beta/threads/text.rbs | 5 + .../beta/threads/text_content_block.rbs | 2 + .../beta/threads/text_content_block_param.rbs | 2 + sig/openai/models/beta/threads/text_delta.rbs | 5 + .../models/beta/threads/text_delta_block.rbs | 6 + sig/openai/models/chat/chat_completion.rbs | 39 +++- ...hat_completion_assistant_message_param.rbs | 26 ++- .../models/chat/chat_completion_audio.rbs | 7 + .../chat/chat_completion_audio_param.rbs | 21 +- .../models/chat/chat_completion_chunk.rbs | 82 ++++++-- .../chat/chat_completion_content_part.rbs | 11 ++ .../chat_completion_content_part_image.rbs | 22 ++- ...at_completion_content_part_input_audio.rbs | 18 +- .../chat_completion_content_part_refusal.rbs | 2 + .../chat_completion_content_part_text.rbs | 2 + .../models/chat/chat_completion_deleted.rbs | 6 + ...hat_completion_developer_message_param.rbs | 14 +- .../chat_completion_function_call_option.rbs | 2 + ...chat_completion_function_message_param.rbs | 2 + .../models/chat/chat_completion_message.rbs | 24 +++ .../chat_completion_message_tool_call.rbs | 8 + .../chat_completion_named_tool_choice.rbs | 7 + .../chat_completion_prediction_content.rbs | 13 +- .../chat/chat_completion_store_message.rbs | 2 + .../chat/chat_completion_stream_options.rbs | 2 + .../chat_completion_system_message_param.rbs | 14 +- .../chat/chat_completion_token_logprob.rbs | 13 ++ .../models/chat/chat_completion_tool.rbs | 5 + .../chat_completion_tool_choice_option.rbs | 4 +- .../chat_completion_tool_message_param.rbs | 14 +- .../chat_completion_user_message_param.rbs | 14 +- .../models/chat/completion_create_params.rbs | 81 ++++++-- .../models/chat/completion_delete_params.rbs | 2 + .../models/chat/completion_list_params.rbs | 9 + .../chat/completion_retrieve_params.rbs | 2 + .../models/chat/completion_update_params.rbs | 5 + .../chat/completions/message_list_params.rbs | 7 + sig/openai/models/comparison_filter.rbs | 22 ++- sig/openai/models/completion.rbs | 10 + sig/openai/models/completion_choice.rbs | 22 ++- .../models/completion_create_params.rbs | 21 ++ sig/openai/models/completion_usage.rbs | 17 ++ sig/openai/models/compound_filter.rbs | 21 +- sig/openai/models/container_create_params.rbs | 20 +- .../models/container_create_response.rbs | 14 ++ sig/openai/models/container_delete_params.rbs | 2 + sig/openai/models/container_list_params.rbs | 7 + sig/openai/models/container_list_response.rbs | 14 ++ .../models/container_retrieve_params.rbs | 2 + .../models/container_retrieve_response.rbs | 14 ++ .../models/containers/file_create_params.rbs | 6 + .../containers/file_create_response.rbs | 10 + .../models/containers/file_delete_params.rbs | 5 + .../models/containers/file_list_params.rbs | 7 + .../models/containers/file_list_response.rbs | 10 + .../containers/file_retrieve_params.rbs | 5 + .../containers/file_retrieve_response.rbs | 10 + .../files/content_retrieve_params.rbs | 5 + .../models/create_embedding_response.rbs | 9 + sig/openai/models/embedding.rbs | 6 + sig/openai/models/embedding_create_params.rbs | 9 + sig/openai/models/error_object.rbs | 7 + sig/openai/models/eval_create_params.rbs | 90 ++++++--- sig/openai/models/eval_create_response.rbs | 26 ++- .../models/eval_custom_data_source_config.rbs | 2 + sig/openai/models/eval_delete_params.rbs | 2 + sig/openai/models/eval_delete_response.rbs | 2 + sig/openai/models/eval_list_params.rbs | 8 + sig/openai/models/eval_list_response.rbs | 26 ++- sig/openai/models/eval_retrieve_params.rbs | 2 + sig/openai/models/eval_retrieve_response.rbs | 26 ++- ..._stored_completions_data_source_config.rbs | 6 + sig/openai/models/eval_update_params.rbs | 6 + sig/openai/models/eval_update_response.rbs | 26 ++- ...reate_eval_completions_run_data_source.rbs | 115 ++++++++--- .../create_eval_jsonl_run_data_source.rbs | 25 ++- sig/openai/models/evals/eval_api_error.rbs | 2 + sig/openai/models/evals/run_cancel_params.rbs | 5 + .../models/evals/run_cancel_response.rbs | 100 ++++++++++ sig/openai/models/evals/run_create_params.rbs | 132 ++++++++++--- .../models/evals/run_create_response.rbs | 100 ++++++++++ sig/openai/models/evals/run_delete_params.rbs | 5 + .../models/evals/run_delete_response.rbs | 2 + sig/openai/models/evals/run_list_params.rbs | 8 + sig/openai/models/evals/run_list_response.rbs | 100 ++++++++++ .../models/evals/run_retrieve_params.rbs | 5 + .../models/evals/run_retrieve_response.rbs | 100 ++++++++++ .../evals/runs/output_item_list_params.rbs | 9 + .../evals/runs/output_item_list_response.rbs | 37 ++++ .../runs/output_item_retrieve_params.rbs | 6 + .../runs/output_item_retrieve_response.rbs | 37 ++++ sig/openai/models/file_content_params.rbs | 2 + sig/openai/models/file_create_params.rbs | 6 + sig/openai/models/file_delete_params.rbs | 2 + sig/openai/models/file_deleted.rbs | 2 + sig/openai/models/file_list_params.rbs | 8 + sig/openai/models/file_object.rbs | 28 ++- sig/openai/models/file_retrieve_params.rbs | 2 + .../fine_tuning/alpha/grader_run_params.rbs | 7 + .../fine_tuning/alpha/grader_run_response.rbs | 34 ++++ .../alpha/grader_validate_params.rbs | 5 + .../alpha/grader_validate_response.rbs | 4 + .../checkpoints/permission_create_params.rbs | 5 + .../permission_create_response.rbs | 7 + .../checkpoints/permission_delete_params.rbs | 5 + .../permission_delete_response.rbs | 6 + .../permission_retrieve_params.rbs | 8 + .../permission_retrieve_response.rbs | 15 ++ .../fine_tuning/dpo_hyperparameters.rbs | 55 +++--- sig/openai/models/fine_tuning/dpo_method.rbs | 4 + .../models/fine_tuning/fine_tuning_job.rbs | 85 +++++--- .../fine_tuning/fine_tuning_job_event.rbs | 30 ++- .../fine_tuning_job_wandb_integration.rbs | 7 + ...ne_tuning_job_wandb_integration_object.rbs | 5 + .../models/fine_tuning/job_cancel_params.rbs | 2 + .../models/fine_tuning/job_create_params.rbs | 82 +++++--- .../fine_tuning/job_list_events_params.rbs | 6 + .../models/fine_tuning/job_list_params.rbs | 7 + .../models/fine_tuning/job_pause_params.rbs | 2 + .../models/fine_tuning/job_resume_params.rbs | 2 + .../fine_tuning/job_retrieve_params.rbs | 2 + .../jobs/checkpoint_list_params.rbs | 6 + .../jobs/fine_tuning_job_checkpoint.rbs | 20 ++ .../reinforcement_hyperparameters.rbs | 94 +++++---- .../fine_tuning/reinforcement_method.rbs | 13 +- .../supervised_hyperparameters.rbs | 42 ++-- .../models/fine_tuning/supervised_method.rbs | 4 + sig/openai/models/function_definition.rbs | 7 + .../models/graders/label_model_grader.rbs | 45 +++-- sig/openai/models/graders/multi_grader.rbs | 15 +- sig/openai/models/graders/python_grader.rbs | 7 + .../models/graders/score_model_grader.rbs | 45 +++-- .../models/graders/string_check_grader.rbs | 16 +- .../models/graders/text_similarity_grader.rbs | 16 +- sig/openai/models/image.rbs | 6 + .../models/image_create_variation_params.rbs | 10 + sig/openai/models/image_edit_params.rbs | 14 ++ sig/openai/models/image_generate_params.rbs | 16 ++ sig/openai/models/images_response.rbs | 15 ++ sig/openai/models/model.rbs | 7 + sig/openai/models/model_delete_params.rbs | 2 + sig/openai/models/model_deleted.rbs | 2 + sig/openai/models/model_list_params.rbs | 2 + sig/openai/models/model_retrieve_params.rbs | 2 + sig/openai/models/moderation.rbs | 159 ++++++++++----- .../models/moderation_create_params.rbs | 6 + .../models/moderation_create_response.rbs | 6 + .../models/moderation_image_url_input.rbs | 7 + sig/openai/models/moderation_text_input.rbs | 2 + .../other_file_chunking_strategy_object.rbs | 2 + sig/openai/models/reasoning.rbs | 22 ++- .../models/response_format_json_object.rbs | 2 + .../models/response_format_json_schema.rbs | 12 ++ sig/openai/models/response_format_text.rbs | 2 + sig/openai/models/responses/computer_tool.rbs | 15 +- .../models/responses/easy_input_message.rbs | 34 ++-- .../models/responses/file_search_tool.rbs | 33 +++- sig/openai/models/responses/function_tool.rbs | 8 + .../responses/input_item_list_params.rbs | 9 + sig/openai/models/responses/response.rbs | 69 +++++-- .../responses/response_audio_delta_event.rbs | 6 + .../responses/response_audio_done_event.rbs | 5 + .../response_audio_transcript_delta_event.rbs | 6 + .../response_audio_transcript_done_event.rbs | 5 + .../responses/response_cancel_params.rbs | 2 + ...code_interpreter_call_code_delta_event.rbs | 7 + ..._code_interpreter_call_code_done_event.rbs | 7 + ..._code_interpreter_call_completed_event.rbs | 7 + ...ode_interpreter_call_in_progress_event.rbs | 7 + ...de_interpreter_call_interpreting_event.rbs | 7 + .../response_code_interpreter_tool_call.rbs | 34 +++- .../responses/response_completed_event.rbs | 6 + .../responses/response_computer_tool_call.rbs | 77 ++++++-- ...esponse_computer_tool_call_output_item.rbs | 23 ++- ...e_computer_tool_call_output_screenshot.rbs | 6 + .../response_content_part_added_event.rbs | 17 +- .../response_content_part_done_event.rbs | 17 +- .../responses/response_create_params.rbs | 23 +++ .../responses/response_created_event.rbs | 6 + .../responses/response_delete_params.rbs | 2 + .../models/responses/response_error.rbs | 16 +- .../models/responses/response_error_event.rbs | 8 + .../responses/response_failed_event.rbs | 6 + ...ponse_file_search_call_completed_event.rbs | 7 + ...nse_file_search_call_in_progress_event.rbs | 7 + ...ponse_file_search_call_searching_event.rbs | 7 + .../response_file_search_tool_call.rbs | 32 ++- ...esponse_format_text_json_schema_config.rbs | 8 + ...se_function_call_arguments_delta_event.rbs | 8 + ...nse_function_call_arguments_done_event.rbs | 8 + .../responses/response_function_tool_call.rbs | 21 +- .../response_function_tool_call_item.rbs | 2 + ...esponse_function_tool_call_output_item.rbs | 20 +- .../response_function_web_search.rbs | 14 +- ...esponse_image_gen_call_completed_event.rbs | 7 + ...sponse_image_gen_call_generating_event.rbs | 7 + ...ponse_image_gen_call_in_progress_event.rbs | 7 + ...nse_image_gen_call_partial_image_event.rbs | 9 + .../responses/response_in_progress_event.rbs | 6 + .../responses/response_incomplete_event.rbs | 6 + .../models/responses/response_input_audio.rbs | 14 +- .../models/responses/response_input_file.rbs | 7 + .../models/responses/response_input_image.rbs | 15 +- .../models/responses/response_input_item.rbs | 183 ++++++++++++++---- .../responses/response_input_message_item.rbs | 40 ++-- .../models/responses/response_input_text.rbs | 2 + sig/openai/models/responses/response_item.rbs | 96 +++++++-- .../models/responses/response_item_list.rbs | 8 + ...esponse_mcp_call_arguments_delta_event.rbs | 8 + ...response_mcp_call_arguments_done_event.rbs | 8 + .../response_mcp_call_completed_event.rbs | 5 + .../response_mcp_call_failed_event.rbs | 5 + .../response_mcp_call_in_progress_event.rbs | 7 + ...esponse_mcp_list_tools_completed_event.rbs | 5 + .../response_mcp_list_tools_failed_event.rbs | 5 + ...ponse_mcp_list_tools_in_progress_event.rbs | 5 + .../responses/response_output_audio.rbs | 6 + .../models/responses/response_output_item.rbs | 73 ++++++- .../response_output_item_added_event.rbs | 7 + .../response_output_item_done_event.rbs | 7 + .../responses/response_output_message.rbs | 24 ++- .../responses/response_output_refusal.rbs | 2 + .../models/responses/response_output_text.rbs | 34 +++- ...nse_output_text_annotation_added_event.rbs | 10 + .../responses/response_queued_event.rbs | 6 + .../response_reasoning_delta_event.rbs | 9 + .../response_reasoning_done_event.rbs | 9 + .../responses/response_reasoning_item.rbs | 22 ++- ...response_reasoning_summary_delta_event.rbs | 9 + .../response_reasoning_summary_done_event.rbs | 9 + ...nse_reasoning_summary_part_added_event.rbs | 11 ++ ...onse_reasoning_summary_part_done_event.rbs | 11 ++ ...nse_reasoning_summary_text_delta_event.rbs | 9 + ...onse_reasoning_summary_text_done_event.rbs | 9 + .../response_refusal_delta_event.rbs | 9 + .../responses/response_refusal_done_event.rbs | 9 + .../responses/response_retrieve_params.rbs | 5 + .../response_text_annotation_delta_event.rbs | 38 +++- .../models/responses/response_text_config.rbs | 4 + .../responses/response_text_delta_event.rbs | 9 + .../responses/response_text_done_event.rbs | 9 + .../models/responses/response_usage.rbs | 12 ++ ...sponse_web_search_call_completed_event.rbs | 6 + ...onse_web_search_call_in_progress_event.rbs | 6 + ...sponse_web_search_call_searching_event.rbs | 6 + sig/openai/models/responses/tool.rbs | 145 +++++++++----- .../models/responses/tool_choice_function.rbs | 2 + .../models/responses/tool_choice_types.rbs | 12 +- .../models/responses/web_search_tool.rbs | 34 +++- sig/openai/models/responses_model.rbs | 4 +- .../models/static_file_chunking_strategy.rbs | 5 + .../static_file_chunking_strategy_object.rbs | 5 + ...ic_file_chunking_strategy_object_param.rbs | 5 + sig/openai/models/upload.rbs | 20 +- sig/openai/models/upload_cancel_params.rbs | 2 + sig/openai/models/upload_complete_params.rbs | 6 + sig/openai/models/upload_create_params.rbs | 8 + .../models/uploads/part_create_params.rbs | 5 + sig/openai/models/uploads/upload_part.rbs | 7 + sig/openai/models/vector_store.rbs | 32 ++- .../models/vector_store_create_params.rbs | 11 ++ .../models/vector_store_delete_params.rbs | 2 + sig/openai/models/vector_store_deleted.rbs | 6 + .../models/vector_store_list_params.rbs | 8 + .../models/vector_store_retrieve_params.rbs | 2 + .../models/vector_store_search_params.rbs | 26 ++- .../models/vector_store_search_response.rbs | 13 ++ .../models/vector_store_update_params.rbs | 9 + .../file_batch_cancel_params.rbs | 5 + .../file_batch_create_params.rbs | 15 +- .../file_batch_list_files_params.rbs | 10 + .../file_batch_retrieve_params.rbs | 5 + .../vector_stores/file_content_params.rbs | 5 + .../vector_stores/file_content_response.rbs | 2 + .../vector_stores/file_create_params.rbs | 15 +- .../vector_stores/file_delete_params.rbs | 5 + .../models/vector_stores/file_list_params.rbs | 9 + .../vector_stores/file_retrieve_params.rbs | 5 + .../vector_stores/file_update_params.rbs | 14 +- .../vector_stores/vector_store_file.rbs | 41 ++-- .../vector_stores/vector_store_file_batch.rbs | 25 ++- .../vector_store_file_deleted.rbs | 6 + sig/openai/resources/audio/transcriptions.rbs | 4 +- sig/openai/resources/chat/completions.rbs | 4 +- sig/openai/resources/evals.rbs | 2 +- .../resources/vector_stores/file_batches.rbs | 2 +- sig/openai/resources/vector_stores/files.rbs | 4 +- 689 files changed, 7877 insertions(+), 3351 deletions(-) diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index dd458b9f..06729722 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -24,7 +24,7 @@ module ResponsesOnlyModel end # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::AllModels::ResponsesOnlyModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 9fab3b47..28cb4113 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -18,7 +18,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # - # @return [String, Symbol, OpenAI::Audio::SpeechModel] + # @return [String, Symbol, OpenAI::Models::Audio::SpeechModel] required :model, union: -> { OpenAI::Audio::SpeechCreateParams::Model } # @!attribute voice @@ -27,7 +27,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # - # @return [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] + # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, union: -> { OpenAI::Audio::SpeechCreateParams::Voice } # @!attribute instructions @@ -41,7 +41,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. # - # @return [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Audio::SpeechCreateParams::ResponseFormat } # @!attribute speed @@ -57,13 +57,13 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # - # @param model [String, Symbol, OpenAI::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): # - # @param voice [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not # - # @param response_format [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # @@ -80,7 +80,7 @@ module Model variant enum: -> { OpenAI::Audio::SpeechModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Audio::SpeechModel)] + # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -92,27 +92,27 @@ module Voice variant String - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ALLOY } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ALLOY } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ASH } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ASH } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::BALLAD } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::BALLAD } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::CORAL } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::CORAL } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ECHO } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::FABLE } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ONYX } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::NOVA } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::SAGE } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::SHIMMER } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::VERSE } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE } # @!method self.variants # @return [Array(String, Symbol)] diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index cd9c0b2d..82dc9e67 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -15,19 +15,19 @@ class Transcription < OpenAI::Internal::Type::BaseModel # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::Transcription::Logprob] } # @!method initialize(text:, logprobs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::Transcription} for more details. + # {OpenAI::Models::Audio::Transcription} for more details. # # Represents a transcription response returned by model, based on the provided # input. # # @param text [String] The transcribed text. # - # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the + # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index c4a25c41..2d51435c 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -22,7 +22,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). # - # @return [String, Symbol, OpenAI::AudioModel] + # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Audio::TranscriptionCreateParams::Model } # @!attribute chunking_strategy @@ -31,7 +31,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # boundaries. `server_vad` object can be provided to tweak VAD detection # parameters manually. If unset, the audio is transcribed as a single block. # - # @return [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] + # @return [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] optional :chunking_strategy, union: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy }, nil?: true @@ -43,7 +43,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionInclude] } # @!attribute language @@ -68,7 +68,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. # - # @return [Symbol, OpenAI::AudioResponseFormat, nil] + # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] optional :response_format, enum: -> { OpenAI::AudioResponseFormat } # @!attribute temperature @@ -88,7 +88,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. # - # @return [Array, nil] + # @return [Array, nil] optional :timestamp_granularities, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] @@ -100,21 +100,21 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # @param chunking_strategy [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs # - # @param include [Array] Additional information to include in the transcription response. + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -130,7 +130,7 @@ module Model variant enum: -> { OpenAI::AudioModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::AudioModel)] + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end # Controls how the audio is cut into chunks. When set to `"auto"`, the server @@ -149,7 +149,7 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # @!attribute type # Must be set to `server_vad` to enable manual chunking using server side VAD. # - # @return [Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] + # @return [Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] required :type, enum: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type @@ -179,10 +179,10 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, prefix_padding_ms: nil, silence_duration_ms: nil, threshold: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig} for more - # details. + # {OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig} + # for more details. # - # @param type [Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] Must be set to `server_vad` to enable manual chunking using server side VAD. + # @param type [Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] Must be set to `server_vad` to enable manual chunking using server side VAD. # # @param prefix_padding_ms [Integer] Amount of audio to include before the VAD detected speech (in # @@ -192,7 +192,7 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # Must be set to `server_vad` to enable manual chunking using server side VAD. # - # @see OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig#type + # @see OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig#type module Type extend OpenAI::Internal::Type::Enum @@ -204,7 +204,7 @@ module Type end # @!method self.variants - # @return [Array(Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig)] + # @return [Array(Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig)] end module TimestampGranularity diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index ec74e17e..0bbe16b7 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -19,7 +19,7 @@ module TranscriptionCreateResponse variant -> { OpenAI::Audio::TranscriptionVerbose } # @!method self.variants - # @return [Array(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose)] + # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] end end end diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 8b7b4416..3ca8d867 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -69,7 +69,7 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionSegment} for more details. + # {OpenAI::Models::Audio::TranscriptionSegment} for more details. # # @param id [Integer] Unique identifier of the segment. # diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 9d386b9b..2112080e 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -19,7 +19,7 @@ module TranscriptionStreamEvent variant :"transcript.text.done", -> { OpenAI::Audio::TranscriptionTextDoneEvent } # @!method self.variants - # @return [Array(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent)] + # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] end end end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 58db67b8..0541f312 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -21,13 +21,13 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] } # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDeltaEvent} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent} for more details. # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you @@ -36,7 +36,7 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param delta [String] The text delta that was additionally transcribed. # - # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription + # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription # # @param type [Symbol, :"transcript.text.delta"] The type of the event. Always `transcript.text.delta`. @@ -61,7 +61,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token: nil, bytes: nil, logprob: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. # # @param token [String] The token that was used to generate the log probability. # diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 2b2eb5b2..2651d973 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -22,13 +22,13 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] } # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDoneEvent} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDoneEvent} for more details. # # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you @@ -37,7 +37,7 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param text [String] The text that was transcribed. # - # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ + # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ # # @param type [Symbol, :"transcript.text.done"] The type of the event. Always `transcript.text.done`. @@ -62,7 +62,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token: nil, bytes: nil, logprob: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDoneEvent::Logprob} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob} for more details. # # @param token [String] The token that was used to generate the log probability. # diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 00cf9ea0..eaa0ebf3 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -25,13 +25,13 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @!attribute segments # Segments of the transcribed text and their corresponding details. # - # @return [Array, nil] + # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } # @!attribute words # Extracted words and their corresponding timestamps. # - # @return [Array, nil] + # @return [Array, nil] optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionWord] } # @!method initialize(duration:, language:, text:, segments: nil, words: nil) @@ -44,9 +44,9 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # # @param text [String] The transcribed text. # - # @param segments [Array] Segments of the transcribed text and their corresponding details. + # @param segments [Array] Segments of the transcribed text and their corresponding details. # - # @param words [Array] Extracted words and their corresponding timestamps. + # @param words [Array] Extracted words and their corresponding timestamps. end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index bfcb151b..35e3dd1b 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -19,7 +19,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. # - # @return [String, Symbol, OpenAI::AudioModel] + # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Audio::TranslationCreateParams::Model } # @!attribute prompt @@ -35,7 +35,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # - # @return [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Audio::TranslationCreateParams::ResponseFormat } # @!attribute temperature @@ -54,11 +54,11 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # @@ -75,7 +75,7 @@ module Model variant enum: -> { OpenAI::AudioModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::AudioModel)] + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end # The format of the output, in one of these options: `json`, `text`, `srt`, diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index f24d4b2b..7e056468 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -12,7 +12,7 @@ module TranslationCreateResponse variant -> { OpenAI::Audio::TranslationVerbose } # @!method self.variants - # @return [Array(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose)] + # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] end end end diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index a237803c..1bb16b1e 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -25,7 +25,7 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # @!attribute segments # Segments of the translated text and their corresponding details. # - # @return [Array, nil] + # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } # @!method initialize(duration:, language:, text:, segments: nil) @@ -35,7 +35,7 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # # @param text [String] The translated text. # - # @param segments [Array] Segments of the translated text and their corresponding details. + # @param segments [Array] Segments of the translated text and their corresponding details. end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 84f42355..b8dffe10 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -42,7 +42,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute status # The current status of the batch. # - # @return [Symbol, OpenAI::Batch::Status] + # @return [Symbol, OpenAI::Models::Batch::Status] required :status, enum: -> { OpenAI::Batch::Status } # @!attribute cancelled_at @@ -71,7 +71,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute errors # - # @return [OpenAI::Batch::Errors, nil] + # @return [OpenAI::Models::Batch::Errors, nil] optional :errors, -> { OpenAI::Batch::Errors } # @!attribute expired_at @@ -124,12 +124,12 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute request_counts # The request counts for different statuses within the batch. # - # @return [OpenAI::BatchRequestCounts, nil] + # @return [OpenAI::Models::BatchRequestCounts, nil] optional :request_counts, -> { OpenAI::BatchRequestCounts } # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) - # Some parameter documentations has been truncated, see {OpenAI::Batch} for more - # details. + # Some parameter documentations has been truncated, see {OpenAI::Models::Batch} + # for more details. # # @param id [String] # @@ -141,7 +141,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param input_file_id [String] The ID of the input file for the batch. # - # @param status [Symbol, OpenAI::Batch::Status] The current status of the batch. + # @param status [Symbol, OpenAI::Models::Batch::Status] The current status of the batch. # # @param cancelled_at [Integer] The Unix timestamp (in seconds) for when the batch was cancelled. # @@ -151,7 +151,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param error_file_id [String] The ID of the file containing the outputs of requests with errors. # - # @param errors [OpenAI::Batch::Errors] + # @param errors [OpenAI::Models::Batch::Errors] # # @param expired_at [Integer] The Unix timestamp (in seconds) for when the batch expired. # @@ -167,13 +167,13 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param output_file_id [String] The ID of the file containing the outputs of successfully executed requests. # - # @param request_counts [OpenAI::BatchRequestCounts] The request counts for different statuses within the batch. + # @param request_counts [OpenAI::Models::BatchRequestCounts] The request counts for different statuses within the batch. # # @param object [Symbol, :batch] The object type, which is always `batch`. # The current status of the batch. # - # @see OpenAI::Batch#status + # @see OpenAI::Models::Batch#status module Status extend OpenAI::Internal::Type::Enum @@ -190,11 +190,11 @@ module Status # @return [Array] end - # @see OpenAI::Batch#errors + # @see OpenAI::Models::Batch#errors class Errors < OpenAI::Internal::Type::BaseModel # @!attribute data # - # @return [Array, nil] + # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::BatchError] } # @!attribute object @@ -204,7 +204,7 @@ class Errors < OpenAI::Internal::Type::BaseModel optional :object, String # @!method initialize(data: nil, object: nil) - # @param data [Array] + # @param data [Array] # # @param object [String] The object type, which is always `list`. end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index ce21fc86..9b39fcd2 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -11,7 +11,7 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # The time frame within which the batch should be processed. Currently only `24h` # is supported. # - # @return [Symbol, OpenAI::BatchCreateParams::CompletionWindow] + # @return [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] required :completion_window, enum: -> { OpenAI::BatchCreateParams::CompletionWindow } # @!attribute endpoint @@ -20,7 +20,7 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. # - # @return [Symbol, OpenAI::BatchCreateParams::Endpoint] + # @return [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] required :endpoint, enum: -> { OpenAI::BatchCreateParams::Endpoint } # @!attribute input_file_id @@ -52,9 +52,9 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::BatchCreateParams} for more details. # - # @param completion_window [Symbol, OpenAI::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # - # @param endpoint [Symbol, OpenAI::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` # # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. # diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index d6aa3ad5..7bbd1868 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -68,7 +68,7 @@ class Assistant < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute response_format @@ -93,7 +93,7 @@ class Assistant < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -110,7 +110,7 @@ class Assistant < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::Assistant::ToolResources, nil] + # @return [OpenAI::Models::Beta::Assistant::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::Assistant::ToolResources }, nil?: true # @!attribute top_p @@ -124,8 +124,8 @@ class Assistant < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(id:, created_at:, description:, instructions:, metadata:, model:, name:, tools:, response_format: nil, temperature: nil, tool_resources: nil, top_p: nil, object: :assistant) - # Some parameter documentations has been truncated, see {OpenAI::Beta::Assistant} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Assistant} for more details. # # Represents an `assistant` that can call the model and use tools. # @@ -143,28 +143,28 @@ class Assistant < OpenAI::Internal::Type::BaseModel # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # # @param object [Symbol, :assistant] The object type, which is always `assistant`. - # @see OpenAI::Beta::Assistant#tool_resources + # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::Assistant::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::Assistant::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::Assistant::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -173,10 +173,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::Assistant::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::Assistant::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] - # @see OpenAI::Beta::Assistant::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -188,12 +188,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Assistant::ToolResources::CodeInterpreter} for more details. + # {OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter} for more + # details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::Assistant::ToolResources#file_search + # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The ID of the @@ -206,7 +207,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Assistant::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::Assistant::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index f05d1764..84e6d083 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -15,7 +15,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::ChatModel] + # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Beta::AssistantCreateParams::Model } # @!attribute description @@ -56,7 +56,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -81,7 +81,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -98,7 +98,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::AssistantCreateParams::ToolResources }, nil?: true # @!attribute tools @@ -106,7 +106,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute top_p @@ -123,7 +123,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::AssistantCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # @@ -133,15 +133,15 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # @@ -161,13 +161,13 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter @@ -175,7 +175,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -184,10 +184,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::AssistantCreateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -199,13 +199,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} + # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::AssistantCreateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -222,7 +222,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this assistant. There can be a maximum of 1 # vector store attached to the assistant. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] @@ -230,19 +230,19 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, union: -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy @@ -269,10 +269,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} # for more details. # - # @param chunking_strategy [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad # @@ -281,7 +281,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -315,7 +315,7 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -328,11 +328,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -351,7 +351,7 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. @@ -361,7 +361,7 @@ class Static < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 3c936a2a..752af1a6 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -37,7 +37,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::AssistantListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::AssistantListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) @@ -50,7 +50,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 411077cb..7541c8b2 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -43,7 +43,7 @@ module AssistantResponseFormatOption variant -> { OpenAI::ResponseFormatJSONSchema } # @!method self.variants - # @return [Array(Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema)] + # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 74345189..0245a53a 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -123,7 +123,7 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] required :data, -> { OpenAI::Beta::Thread } # @!attribute event @@ -139,13 +139,13 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, enabled: nil, event: :"thread.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated} for more details. # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap + # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap # # @param enabled [Boolean] Whether to enable input audio transcription. # @@ -157,7 +157,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -167,12 +167,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.created"] end @@ -182,7 +182,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -192,12 +192,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.queued") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.queued"] end @@ -207,7 +207,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -217,12 +217,13 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.in_progress"] end @@ -232,7 +233,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -242,12 +243,13 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.requires_action") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.requires_action"] end @@ -257,7 +259,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -267,12 +269,13 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.completed"] end @@ -282,7 +285,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -292,12 +295,13 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.incomplete"] end @@ -307,7 +311,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -317,12 +321,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.failed"] end @@ -332,7 +336,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -342,12 +346,13 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelling") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelling"] end @@ -357,7 +362,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -367,12 +372,13 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelled"] end @@ -382,7 +388,7 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -392,12 +398,12 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.expired"] end @@ -406,7 +412,7 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -416,13 +422,14 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.created"] end @@ -431,7 +438,7 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -441,13 +448,14 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -457,7 +465,7 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] required :data, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaEvent } # @!attribute event @@ -467,13 +475,14 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more + # details. # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami # # @param event [Symbol, :"thread.run.step.delta"] end @@ -482,7 +491,7 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -492,13 +501,14 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.completed"] end @@ -507,7 +517,7 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -517,13 +527,14 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.failed"] end @@ -532,7 +543,7 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -542,13 +553,14 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -557,7 +569,7 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -567,13 +579,14 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.expired"] end @@ -583,7 +596,7 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -593,13 +606,14 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.created"] end @@ -609,7 +623,7 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -619,13 +633,14 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.in_progress"] end @@ -635,7 +650,7 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @return [OpenAI::Beta::Threads::MessageDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] required :data, -> { OpenAI::Beta::Threads::MessageDeltaEvent } # @!attribute event @@ -645,13 +660,14 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta} for more + # details. # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming # # @param event [Symbol, :"thread.message.delta"] end @@ -661,7 +677,7 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -671,13 +687,14 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.completed"] end @@ -687,7 +704,7 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -697,13 +714,14 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.incomplete"] end @@ -711,7 +729,7 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!attribute data # - # @return [OpenAI::ErrorObject] + # @return [OpenAI::Models::ErrorObject] required :data, -> { OpenAI::ErrorObject } # @!attribute event @@ -724,12 +742,12 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. # - # @param data [OpenAI::ErrorObject] + # @param data [OpenAI::Models::ErrorObject] # @param event [Symbol, :error] end # @!method self.variants - # @return [Array(OpenAI::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Beta::AssistantStreamEvent::ErrorEvent)] + # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 111defb9..495ff39a 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -15,7 +15,7 @@ module AssistantTool variant :function, -> { OpenAI::Beta::FunctionTool } # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool)] + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 7f43fa4a..a4fc7a03 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -7,25 +7,25 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the tool. If type is `function`, the function name must be set # - # @return [Symbol, OpenAI::Beta::AssistantToolChoice::Type] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] required :type, enum: -> { OpenAI::Beta::AssistantToolChoice::Type } # @!attribute function # - # @return [OpenAI::Beta::AssistantToolChoiceFunction, nil] + # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction, nil] optional :function, -> { OpenAI::Beta::AssistantToolChoiceFunction } # @!method initialize(type:, function: nil) # Specifies a tool the model should use. Use to force the model to call a specific # tool. # - # @param type [Symbol, OpenAI::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set + # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set # - # @param function [OpenAI::Beta::AssistantToolChoiceFunction] + # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] # The type of the tool. If type is `function`, the function name must be set # - # @see OpenAI::Beta::AssistantToolChoice#type + # @see OpenAI::Models::Beta::AssistantToolChoice#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 5a87d00a..8bfdb818 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -35,7 +35,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice)] + # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 40734bc9..6f8f9b27 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -39,7 +39,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model, nil] + # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] optional :model, union: -> { OpenAI::Beta::AssistantUpdateParams::Model } # @!attribute name @@ -56,7 +56,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -81,7 +81,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -98,7 +98,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources }, nil?: true # @!attribute tools @@ -106,7 +106,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute top_p @@ -129,19 +129,19 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # @@ -157,77 +157,77 @@ module Model variant String - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O3_MINI } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O1 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0314 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0314 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0613 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } # @!method self.variants # @return [Array(String, Symbol)] @@ -281,7 +281,7 @@ module Model class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter @@ -289,7 +289,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # - # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -298,10 +298,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::AssistantUpdateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # Overrides the list of @@ -314,13 +314,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} + # for more details. # # @param file_ids [Array] Overrides the list of [file](https://platform.openai.com/docs/api-reference/file end - # @see OpenAI::Beta::AssistantUpdateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # Overrides the @@ -333,8 +333,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] Overrides the [vector store](https://platform.openai.com/docs/api-reference/vect end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index c521e6a6..e12b3e5a 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -13,15 +13,15 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute file_search # Overrides for the file search tool. # - # @return [OpenAI::Beta::FileSearchTool::FileSearch, nil] + # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::FileSearchTool::FileSearch } # @!method initialize(file_search: nil, type: :file_search) - # @param file_search [OpenAI::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. + # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. # # @param type [Symbol, :file_search] The type of tool being defined: `file_search` - # @see OpenAI::Beta::FileSearchTool#file_search + # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute max_num_results # The maximum number of results the file search tool should output. The default is @@ -44,20 +44,20 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions, nil] + # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions } # @!method initialize(max_num_results: nil, ranking_options: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::FileSearchTool::FileSearch} for more details. + # {OpenAI::Models::Beta::FileSearchTool::FileSearch} for more details. # # Overrides for the file search tool. # # @param max_num_results [Integer] The maximum number of results the file search tool should output. The default is # - # @param ranking_options [OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool + # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool - # @see OpenAI::Beta::FileSearchTool::FileSearch#ranking_options + # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point @@ -70,12 +70,13 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @return [Symbol, OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] + # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } # @!method initialize(score_threshold:, ranker: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions} for more details. + # {OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions} for more + # details. # # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. @@ -86,12 +87,12 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num # - # @param ranker [Symbol, OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank + # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @see OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions#ranker + # @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index 512eb078..361c2c44 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -6,7 +6,7 @@ module Beta class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::FunctionDefinition] + # @return [OpenAI::Models::FunctionDefinition] required :function, -> { OpenAI::FunctionDefinition } # @!attribute type @@ -16,7 +16,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(function:, type: :function) - # @param function [OpenAI::FunctionDefinition] + # @param function [OpenAI::Models::FunctionDefinition] # # @param type [Symbol, :function] The type of tool being defined: `function` end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index ce394898..9f75576e 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -34,7 +34,7 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -44,13 +44,14 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.created"] end @@ -60,7 +61,7 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -70,13 +71,14 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.in_progress"] end @@ -86,7 +88,7 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @return [OpenAI::Beta::Threads::MessageDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] required :data, -> { OpenAI::Beta::Threads::MessageDeltaEvent } # @!attribute event @@ -96,13 +98,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming # # @param event [Symbol, :"thread.message.delta"] end @@ -112,7 +114,7 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -122,13 +124,14 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.completed"] end @@ -138,7 +141,7 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -148,19 +151,20 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.incomplete"] end # @!method self.variants - # @return [Array(OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete)] + # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 40fef09f..9dbbbb9c 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -39,7 +39,7 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -49,13 +49,14 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.created"] end @@ -64,7 +65,7 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -74,13 +75,14 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -90,7 +92,7 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] required :data, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaEvent } # @!attribute event @@ -100,13 +102,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami # # @param event [Symbol, :"thread.run.step.delta"] end @@ -115,7 +117,7 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -125,13 +127,14 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.completed"] end @@ -140,7 +143,7 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -150,13 +153,14 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.failed"] end @@ -165,7 +169,7 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -175,13 +179,14 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -190,7 +195,7 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -200,19 +205,20 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.expired"] end # @!method self.variants - # @return [Array(OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired)] + # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index ca41f968..2bfe1450 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -45,7 +45,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -55,12 +55,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCreated} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated} for more details. # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.created"] end @@ -70,7 +70,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -80,12 +80,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.queued") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunQueued} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.queued"] end @@ -95,7 +95,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -105,12 +105,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunInProgress} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.in_progress"] end @@ -120,7 +120,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -130,12 +130,13 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.requires_action") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.requires_action"] end @@ -145,7 +146,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -155,12 +156,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCompleted} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.completed"] end @@ -170,7 +171,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -180,12 +181,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.incomplete"] end @@ -195,7 +196,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -205,12 +206,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunFailed} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.failed"] end @@ -220,7 +221,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -230,12 +231,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelling") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCancelling} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelling"] end @@ -245,7 +246,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -255,12 +256,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCancelled} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelled"] end @@ -270,7 +271,7 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -280,18 +281,18 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunExpired} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.expired"] end # @!method self.variants - # @return [Array(OpenAI::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Beta::RunStreamEvent::ThreadRunExpired)] + # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] end end end diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 11d37b69..21ee9dd0 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -40,12 +40,12 @@ class Thread < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::Thread::ToolResources, nil] + # @return [OpenAI::Models::Beta::Thread::ToolResources, nil] required :tool_resources, -> { OpenAI::Beta::Thread::ToolResources }, nil?: true # @!method initialize(id:, created_at:, metadata:, tool_resources:, object: :thread) - # Some parameter documentations has been truncated, see {OpenAI::Beta::Thread} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Thread} for more details. # # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -56,20 +56,20 @@ class Thread < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param object [Symbol, :thread] The object type, which is always `thread`. - # @see OpenAI::Beta::Thread#tool_resources + # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::Thread::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::Thread::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::Thread::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::Thread::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -78,10 +78,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::Thread::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::Thread::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] - # @see OpenAI::Beta::Thread::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -93,12 +93,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Thread::ToolResources::CodeInterpreter} for more details. + # {OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter} for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::Thread::ToolResources#file_search + # @see OpenAI::Models::Beta::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -111,7 +111,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Thread::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::Thread::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 5d4533ef..d244bcee 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -62,7 +62,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. # - # @return [String, Symbol, OpenAI::ChatModel, nil] + # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Model }, nil?: true # @!attribute parallel_tool_calls @@ -95,7 +95,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -110,7 +110,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, nil] optional :thread, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread } # @!attribute tool_choice @@ -122,7 +122,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tool_resources @@ -131,14 +131,14 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] @@ -159,7 +159,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] optional :truncation_strategy, -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy }, nil?: true @@ -178,25 +178,25 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -213,7 +213,7 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class Thread < OpenAI::Internal::Type::BaseModel @@ -221,7 +221,7 @@ class Thread < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # - # @return [Array, nil] + # @return [Array, nil] optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] @@ -244,7 +244,7 @@ class Thread < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources @@ -253,22 +253,22 @@ class Thread < OpenAI::Internal::Type::BaseModel # @!method initialize(messages: nil, metadata: nil, tool_resources: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread} for more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread} for more details. # # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content } # @!attribute role @@ -279,13 +279,13 @@ class Message < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role] + # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] required :role, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment] @@ -305,19 +305,20 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message} for more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message} for more + # details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message#content + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#content module Content extend OpenAI::Internal::Type::Union @@ -325,12 +326,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray - } + variant -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -344,7 +343,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message#role + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -365,7 +364,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] @@ -374,7 +373,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -400,22 +399,22 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] end end end - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread#tool_resources + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch @@ -427,10 +426,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -442,13 +441,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -465,7 +464,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] @@ -473,19 +472,19 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} for - # more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} + # for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy @@ -512,10 +511,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} # for more details. # - # @param chunking_strategy [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad # @@ -524,7 +523,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -558,7 +557,7 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -571,11 +570,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -594,7 +593,7 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. @@ -604,7 +603,7 @@ class Static < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end @@ -614,7 +613,7 @@ class Static < OpenAI::Internal::Type::BaseModel class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter @@ -622,7 +621,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -631,10 +630,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -646,13 +645,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} for - # more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} + # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadCreateAndRunParams::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The ID of the @@ -665,8 +664,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect end @@ -679,7 +678,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] + # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] required :type, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } # @!attribute last_messages @@ -691,12 +690,13 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, last_messages: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more + # details. # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context @@ -705,7 +705,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy#type + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index c4d1c025..7c5b41ef 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -12,7 +12,7 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # - # @return [Array, nil] + # @return [Array, nil] optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::Message] } # @!attribute metadata @@ -32,18 +32,18 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadCreateParams::ToolResources }, nil?: true # @!method initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadCreateParams} for more details. # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -51,7 +51,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::ThreadCreateParams::Message::Content } # @!attribute role @@ -62,13 +62,13 @@ class Message < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role] + # @return [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] required :role, enum: -> { OpenAI::Beta::ThreadCreateParams::Message::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::Message::Attachment] @@ -88,19 +88,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::Message} for more details. + # {OpenAI::Models::Beta::ThreadCreateParams::Message} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Beta::ThreadCreateParams::Message#content + # @see OpenAI::Models::Beta::ThreadCreateParams::Message#content module Content extend OpenAI::Internal::Type::Union @@ -108,10 +108,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } + variant -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -125,7 +125,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Beta::ThreadCreateParams::Message#role + # @see OpenAI::Models::Beta::ThreadCreateParams::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -146,7 +146,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool] @@ -155,7 +155,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -181,7 +181,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] end end end @@ -189,12 +189,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -203,10 +203,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadCreateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -218,13 +218,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for + # more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadCreateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -241,7 +241,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] @@ -249,18 +249,19 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, union: -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy @@ -287,10 +288,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} for - # more details. + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} + # for more details. # - # @param chunking_strategy [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad # @@ -299,7 +300,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -333,7 +334,7 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -346,11 +347,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -369,7 +370,7 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. @@ -379,7 +380,7 @@ class Static < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] + # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index 1dbc9873..2af595d4 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -8,7 +8,7 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] required :data, -> { OpenAI::Beta::Thread } # @!attribute event @@ -24,13 +24,13 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, enabled: nil, event: :"thread.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadStreamEvent} for more details. + # {OpenAI::Models::Beta::ThreadStreamEvent} for more details. # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap + # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap # # @param enabled [Boolean] Whether to enable input audio transcription. # diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 742aeb19..f2ddde6e 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -25,7 +25,7 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources }, nil?: true # @!method initialize(metadata: nil, tool_resources: nil, request_options: {}) @@ -34,19 +34,19 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -55,10 +55,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadUpdateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -70,13 +70,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for + # more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadUpdateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -89,7 +89,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index c110cbbd..bcb67b49 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -19,7 +19,7 @@ module Annotation variant :file_path, -> { OpenAI::Beta::Threads::FilePathAnnotation } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation)] + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index e5b290d5..9eb54f5e 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -19,7 +19,7 @@ module AnnotationDelta variant :file_path, -> { OpenAI::Beta::Threads::FilePathDeltaAnnotation } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Beta::Threads::FilePathDeltaAnnotation)] + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index bb8a4050..1d0a2a74 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -12,7 +12,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_citation # - # @return [OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation] + # @return [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] required :file_citation, -> { OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation } # @!attribute start_index @@ -39,7 +39,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_citation [OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] # # @param start_index [Integer] # @@ -47,7 +47,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_citation] Always `file_citation`. - # @see OpenAI::Beta::Threads::FileCitationAnnotation#file_citation + # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 4449922e..db18b6e4 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -24,7 +24,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_citation # - # @return [OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] + # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] optional :file_citation, -> { OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation } # @!attribute start_index @@ -47,7 +47,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_citation [OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] # # @param start_index [Integer] # @@ -55,7 +55,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_citation] Always `file_citation`. - # @see OpenAI::Beta::Threads::FileCitationDeltaAnnotation#file_citation + # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index 90055353..6c4e70c1 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -12,7 +12,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_path # - # @return [OpenAI::Beta::Threads::FilePathAnnotation::FilePath] + # @return [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] required :file_path, -> { OpenAI::Beta::Threads::FilePathAnnotation::FilePath } # @!attribute start_index @@ -38,7 +38,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_path [OpenAI::Beta::Threads::FilePathAnnotation::FilePath] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] # # @param start_index [Integer] # @@ -46,7 +46,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_path] Always `file_path`. - # @see OpenAI::Beta::Threads::FilePathAnnotation#file_path + # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 659b9518..041f2a4a 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -24,7 +24,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_path # - # @return [OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] + # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] optional :file_path, -> { OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath } # @!attribute start_index @@ -46,7 +46,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_path [OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] # # @param start_index [Integer] # @@ -54,7 +54,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_path] Always `file_path`. - # @see OpenAI::Beta::Threads::FilePathDeltaAnnotation#file_path + # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index b71b6a5a..53cf02ed 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -17,21 +17,21 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @return [Symbol, OpenAI::Beta::Threads::ImageFile::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageFile::Detail } # @!method initialize(file_id:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageFile} for more details. + # {OpenAI::Models::Beta::Threads::ImageFile} for more details. # # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @see OpenAI::Beta::Threads::ImageFile#detail + # @see OpenAI::Models::Beta::Threads::ImageFile#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 09da28f8..2ae8fe56 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -7,7 +7,7 @@ module Threads class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # - # @return [OpenAI::Beta::Threads::ImageFile] + # @return [OpenAI::Models::Beta::Threads::ImageFile] required :image_file, -> { OpenAI::Beta::Threads::ImageFile } # @!attribute type @@ -20,7 +20,7 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. # - # @param image_file [OpenAI::Beta::Threads::ImageFile] + # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] # # @param type [Symbol, :image_file] Always `image_file`. end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 886ed307..b1d4c62e 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -9,7 +9,7 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @return [Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageFileDelta::Detail } # @!attribute file_id @@ -22,16 +22,16 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(detail: nil, file_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageFileDelta} for more details. + # {OpenAI::Models::Beta::Threads::ImageFileDelta} for more details. # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few # # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @see OpenAI::Beta::Threads::ImageFileDelta#detail + # @see OpenAI::Models::Beta::Threads::ImageFileDelta#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 206e36dd..8657f912 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -19,7 +19,7 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # - # @return [OpenAI::Beta::Threads::ImageFileDelta, nil] + # @return [OpenAI::Models::Beta::Threads::ImageFileDelta, nil] optional :image_file, -> { OpenAI::Beta::Threads::ImageFileDelta } # @!method initialize(index:, image_file: nil, type: :image_file) @@ -28,7 +28,7 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param image_file [OpenAI::Beta::Threads::ImageFileDelta] + # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] # # @param type [Symbol, :image_file] Always `image_file`. end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index c932079d..a78260eb 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -16,21 +16,21 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # - # @return [Symbol, OpenAI::Beta::Threads::ImageURL::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageURL::Detail } # @!method initialize(url:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageURL} for more details. + # {OpenAI::Models::Beta::Threads::ImageURL} for more details. # # @param url [String] The external URL of the image, must be a supported image types: jpeg, jpg, png, # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # - # @see OpenAI::Beta::Threads::ImageURL#detail + # @see OpenAI::Models::Beta::Threads::ImageURL#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index 52d35a06..d2f1a28d 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -7,7 +7,7 @@ module Threads class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Beta::Threads::ImageURL] + # @return [OpenAI::Models::Beta::Threads::ImageURL] required :image_url, -> { OpenAI::Beta::Threads::ImageURL } # @!attribute type @@ -19,7 +19,7 @@ class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # References an image URL in the content of a message. # - # @param image_url [OpenAI::Beta::Threads::ImageURL] + # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] # # @param type [Symbol, :image_url] The type of the content part. end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index c8b10e43..43fba03f 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -9,7 +9,7 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # - # @return [Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageURLDelta::Detail } # @!attribute url @@ -21,16 +21,16 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(detail: nil, url: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageURLDelta} for more details. + # {OpenAI::Models::Beta::Threads::ImageURLDelta} for more details. # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # # @param url [String] The URL of the image, must be a supported image types: jpeg, jpg, png, gif, webp # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # - # @see OpenAI::Beta::Threads::ImageURLDelta#detail + # @see OpenAI::Models::Beta::Threads::ImageURLDelta#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index efe44526..72079ef1 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -19,7 +19,7 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Beta::Threads::ImageURLDelta, nil] + # @return [OpenAI::Models::Beta::Threads::ImageURLDelta, nil] optional :image_url, -> { OpenAI::Beta::Threads::ImageURLDelta } # @!method initialize(index:, image_url: nil, type: :image_url) @@ -27,7 +27,7 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param image_url [OpenAI::Beta::Threads::ImageURLDelta] + # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] # # @param type [Symbol, :image_url] Always `image_url`. end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 19af32cc..d0053ec3 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -23,7 +23,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute attachments # A list of files attached to the message, and the tools they were added to. # - # @return [Array, nil] + # @return [Array, nil] required :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment] }, nil?: true @@ -37,7 +37,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the message in array of text and/or images. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent] @@ -58,7 +58,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute incomplete_details # On an incomplete message, details about why the message is incomplete. # - # @return [OpenAI::Beta::Threads::Message::IncompleteDetails, nil] + # @return [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Beta::Threads::Message::IncompleteDetails }, nil?: true # @!attribute metadata @@ -81,7 +81,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # - # @return [Symbol, OpenAI::Beta::Threads::Message::Role] + # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Role] required :role, enum: -> { OpenAI::Beta::Threads::Message::Role } # @!attribute run_id @@ -96,7 +96,7 @@ class Message < OpenAI::Internal::Type::BaseModel # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. # - # @return [Symbol, OpenAI::Beta::Threads::Message::Status] + # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Status] required :status, enum: -> { OpenAI::Beta::Threads::Message::Status } # @!attribute thread_id @@ -108,7 +108,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, assistant_id:, attachments:, completed_at:, content:, created_at:, incomplete_at:, incomplete_details:, metadata:, role:, run_id:, status:, thread_id:, object: :"thread.message") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Message} for more details. + # {OpenAI::Models::Beta::Threads::Message} for more details. # # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -117,25 +117,25 @@ class Message < OpenAI::Internal::Type::BaseModel # # @param assistant_id [String, nil] If applicable, the ID of the [assistant](https://platform.openai.com/docs/api-re # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. # # @param completed_at [Integer, nil] The Unix timestamp (in seconds) for when the message was completed. # - # @param content [Array] The content of the message in array of text and/or images. + # @param content [Array] The content of the message in array of text and/or images. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the message was created. # # @param incomplete_at [Integer, nil] The Unix timestamp (in seconds) for when the message was marked as incomplete. # - # @param incomplete_details [OpenAI::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. + # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param role [Symbol, OpenAI::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. + # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. # # @param run_id [String, nil] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) associa # - # @param status [Symbol, OpenAI::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` + # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` # # @param thread_id [String] The [thread](https://platform.openai.com/docs/api-reference/threads) ID that thi # @@ -151,7 +151,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool] @@ -160,7 +160,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -183,26 +183,26 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] end end - # @see OpenAI::Beta::Threads::Message#incomplete_details + # @see OpenAI::Models::Beta::Threads::Message#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason the message is incomplete. # - # @return [Symbol, OpenAI::Beta::Threads::Message::IncompleteDetails::Reason] + # @return [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] required :reason, enum: -> { OpenAI::Beta::Threads::Message::IncompleteDetails::Reason } # @!method initialize(reason:) # On an incomplete message, details about why the message is incomplete. # - # @param reason [Symbol, OpenAI::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. # The reason the message is incomplete. # - # @see OpenAI::Beta::Threads::Message::IncompleteDetails#reason + # @see OpenAI::Models::Beta::Threads::Message::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -219,7 +219,7 @@ module Reason # The entity that produced the message. One of `user` or `assistant`. # - # @see OpenAI::Beta::Threads::Message#role + # @see OpenAI::Models::Beta::Threads::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -233,7 +233,7 @@ module Role # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. # - # @see OpenAI::Beta::Threads::Message#status + # @see OpenAI::Models::Beta::Threads::Message#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index 295d6858..f0771098 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -24,7 +24,7 @@ module MessageContent variant :refusal, -> { OpenAI::Beta::Threads::RefusalContentBlock } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlock, OpenAI::Beta::Threads::RefusalContentBlock)] + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index a6b04dc3..908eb4d7 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -24,7 +24,7 @@ module MessageContentDelta variant :image_url, -> { OpenAI::Beta::Threads::ImageURLDeltaBlock } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::ImageFileDeltaBlock, OpenAI::Beta::Threads::TextDeltaBlock, OpenAI::Beta::Threads::RefusalDeltaBlock, OpenAI::Beta::Threads::ImageURLDeltaBlock)] + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 93fd228a..254bd67f 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -21,7 +21,7 @@ module MessageContentPartParam variant :text, -> { OpenAI::Beta::Threads::TextContentBlockParam } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlockParam)] + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 223feced..25a63182 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -12,7 +12,7 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::Threads::MessageCreateParams::Content } # @!attribute role @@ -23,13 +23,13 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] + # @return [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] required :role, enum: -> { OpenAI::Beta::Threads::MessageCreateParams::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::MessageCreateParams::Attachment] @@ -51,11 +51,11 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -69,10 +69,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } + variant -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -105,7 +105,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool] @@ -114,7 +114,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -140,7 +140,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 831356c3..8d151261 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -8,26 +8,26 @@ class MessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the message in array of text and/or images. # - # @return [Array, nil] + # @return [Array, nil] optional :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContentDelta] } # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # - # @return [Symbol, OpenAI::Beta::Threads::MessageDelta::Role, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role, nil] optional :role, enum: -> { OpenAI::Beta::Threads::MessageDelta::Role } # @!method initialize(content: nil, role: nil) # The delta containing the fields that have changed on the Message. # - # @param content [Array] The content of the message in array of text and/or images. + # @param content [Array] The content of the message in array of text and/or images. # - # @param role [Symbol, OpenAI::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. # The entity that produced the message. One of `user` or `assistant`. # - # @see OpenAI::Beta::Threads::MessageDelta#role + # @see OpenAI::Models::Beta::Threads::MessageDelta#role module Role extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index 510cd5cf..14190d67 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -14,7 +14,7 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The delta containing the fields that have changed on the Message. # - # @return [OpenAI::Beta::Threads::MessageDelta] + # @return [OpenAI::Models::Beta::Threads::MessageDelta] required :delta, -> { OpenAI::Beta::Threads::MessageDelta } # @!attribute object @@ -29,7 +29,7 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param id [String] The identifier of the message, which can be referenced in API endpoints. # - # @param delta [OpenAI::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. + # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. # # @param object [Symbol, :"thread.message.delta"] The object type, which is always `thread.message.delta`. end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 6ffe6655..1358425b 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -38,7 +38,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::Threads::MessageListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::Threads::MessageListParams::Order } # @!attribute run_id @@ -57,7 +57,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param run_id [String] Filter messages by the run ID that generated them. # diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index 66fbe931..7f1eee07 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -17,7 +17,7 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The function definition. # - # @return [OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function] + # @return [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] required :function, -> { OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function } # @!attribute type @@ -29,17 +29,18 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RequiredActionFunctionToolCall} for more details. + # {OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall} for more + # details. # # Tool call objects # # @param id [String] The ID of the tool call. This ID must be referenced when you submit the tool out # - # @param function [OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. + # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. # # @param type [Symbol, :function] The type of tool call the output is required for. For now, this is always `funct - # @see OpenAI::Beta::Threads::RequiredActionFunctionToolCall#function + # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments that the model expects you to pass to the function. diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index a4c6345e..2533a7a1 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -56,7 +56,7 @@ class Run < OpenAI::Internal::Type::BaseModel # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @return [OpenAI::Beta::Threads::Run::IncompleteDetails, nil] + # @return [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Beta::Threads::Run::IncompleteDetails }, nil?: true # @!attribute instructions @@ -70,7 +70,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute last_error # The last error associated with this run. Will be `null` if there are no errors. # - # @return [OpenAI::Beta::Threads::Run::LastError, nil] + # @return [OpenAI::Models::Beta::Threads::Run::LastError, nil] required :last_error, -> { OpenAI::Beta::Threads::Run::LastError }, nil?: true # @!attribute max_completion_tokens @@ -124,7 +124,7 @@ class Run < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @return [OpenAI::Beta::Threads::Run::RequiredAction, nil] + # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] required :required_action, -> { OpenAI::Beta::Threads::Run::RequiredAction }, nil?: true # @!attribute response_format @@ -149,7 +149,7 @@ class Run < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] required :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute started_at @@ -163,7 +163,7 @@ class Run < OpenAI::Internal::Type::BaseModel # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. # - # @return [Symbol, OpenAI::Beta::Threads::RunStatus] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunStatus] required :status, enum: -> { OpenAI::Beta::Threads::RunStatus } # @!attribute thread_id @@ -182,7 +182,7 @@ class Run < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] required :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools @@ -190,21 +190,21 @@ class Run < OpenAI::Internal::Type::BaseModel # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] + # @return [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] required :truncation_strategy, -> { OpenAI::Beta::Threads::Run::TruncationStrategy }, nil?: true # @!attribute usage # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). # - # @return [OpenAI::Beta::Threads::Run::Usage, nil] + # @return [OpenAI::Models::Beta::Threads::Run::Usage, nil] required :usage, -> { OpenAI::Beta::Threads::Run::Usage }, nil?: true # @!attribute temperature @@ -221,7 +221,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expires_at:, failed_at:, incomplete_details:, instructions:, last_error:, max_completion_tokens:, max_prompt_tokens:, metadata:, model:, parallel_tool_calls:, required_action:, response_format:, started_at:, status:, thread_id:, tool_choice:, tools:, truncation_strategy:, usage:, temperature: nil, top_p: nil, object: :"thread.run") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Run} for more details. + # {OpenAI::Models::Beta::Threads::Run} for more details. # # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -240,11 +240,11 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run failed. # - # @param incomplete_details [OpenAI::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet + # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet # # @param instructions [String] The instructions that the [assistant](https://platform.openai.com/docs/api-refer # - # @param last_error [OpenAI::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. + # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens specified to have been used over the cou # @@ -256,23 +256,23 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param required_action [OpenAI::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action + # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param started_at [Integer, nil] The Unix timestamp (in seconds) for when the run was started. # - # @param status [Symbol, OpenAI::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac + # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe + # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe # - # @param truncation_strategy [OpenAI::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # - # @param usage [OpenAI::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not + # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not # # @param temperature [Float, nil] The sampling temperature used for this run. If not set, defaults to 1. # @@ -280,28 +280,28 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param object [Symbol, :"thread.run"] The object type, which is always `thread.run`. - # @see OpenAI::Beta::Threads::Run#incomplete_details + # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # - # @return [Symbol, OpenAI::Beta::Threads::Run::IncompleteDetails::Reason, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Beta::Threads::Run::IncompleteDetails::Reason } # @!method initialize(reason: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Run::IncompleteDetails} for more details. + # {OpenAI::Models::Beta::Threads::Run::IncompleteDetails} for more details. # # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @param reason [Symbol, OpenAI::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # - # @see OpenAI::Beta::Threads::Run::IncompleteDetails#reason + # @see OpenAI::Models::Beta::Threads::Run::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -313,12 +313,12 @@ module Reason end end - # @see OpenAI::Beta::Threads::Run#last_error + # @see OpenAI::Models::Beta::Threads::Run#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # - # @return [Symbol, OpenAI::Beta::Threads::Run::LastError::Code] + # @return [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] required :code, enum: -> { OpenAI::Beta::Threads::Run::LastError::Code } # @!attribute message @@ -330,13 +330,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # The last error associated with this run. Will be `null` if there are no errors. # - # @param code [Symbol, OpenAI::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # # @param message [String] A human-readable description of the error. # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # - # @see OpenAI::Beta::Threads::Run::LastError#code + # @see OpenAI::Models::Beta::Threads::Run::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -349,12 +349,12 @@ module Code end end - # @see OpenAI::Beta::Threads::Run#required_action + # @see OpenAI::Models::Beta::Threads::Run#required_action class RequiredAction < OpenAI::Internal::Type::BaseModel # @!attribute submit_tool_outputs # Details on the tool outputs needed for this run to continue. # - # @return [OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] + # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] required :submit_tool_outputs, -> { OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs @@ -370,16 +370,16 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @param submit_tool_outputs [OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. + # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. # # @param type [Symbol, :submit_tool_outputs] For now, this is always `submit_tool_outputs`. - # @see OpenAI::Beta::Threads::Run::RequiredAction#submit_tool_outputs + # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # A list of the relevant tool calls. # - # @return [Array] + # @return [Array] required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] @@ -388,11 +388,11 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!method initialize(tool_calls:) # Details on the tool outputs needed for this run to continue. # - # @param tool_calls [Array] A list of the relevant tool calls. + # @param tool_calls [Array] A list of the relevant tool calls. end end - # @see OpenAI::Beta::Threads::Run#truncation_strategy + # @see OpenAI::Models::Beta::Threads::Run#truncation_strategy class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!attribute type # The truncation strategy to use for the thread. The default is `auto`. If set to @@ -400,7 +400,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @return [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] + # @return [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] required :type, enum: -> { OpenAI::Beta::Threads::Run::TruncationStrategy::Type } # @!attribute last_messages @@ -412,12 +412,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, last_messages: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Run::TruncationStrategy} for more details. + # {OpenAI::Models::Beta::Threads::Run::TruncationStrategy} for more details. # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context @@ -426,7 +426,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @see OpenAI::Beta::Threads::Run::TruncationStrategy#type + # @see OpenAI::Models::Beta::Threads::Run::TruncationStrategy#type module Type extend OpenAI::Internal::Type::Enum @@ -438,7 +438,7 @@ module Type end end - # @see OpenAI::Beta::Threads::Run#usage + # @see OpenAI::Models::Beta::Threads::Run#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 773bbb7b..9efd3f28 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -28,7 +28,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } @@ -43,7 +43,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute additional_messages # Adds additional messages to the thread before creating the run. # - # @return [Array, nil] + # @return [Array, nil] optional :additional_messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage] @@ -95,7 +95,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. # - # @return [String, Symbol, OpenAI::ChatModel, nil] + # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Beta::Threads::RunCreateParams::Model }, nil?: true # @!attribute parallel_tool_calls @@ -114,7 +114,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -139,7 +139,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -159,14 +159,14 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] @@ -187,7 +187,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @return [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] optional :truncation_strategy, -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy }, nil?: true @@ -198,11 +198,11 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param additional_instructions [String, nil] Appends additional instructions at the end of the instructions for the run. This # - # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Overrides the [instructions](https://platform.openai.com/docs/api-reference/assi # @@ -212,23 +212,23 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -236,7 +236,7 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content @@ -250,13 +250,13 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] required :role, enum: -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment] @@ -276,19 +276,20 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage} for more details. + # {OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage} for more + # details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage#content + # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#content module Content extend OpenAI::Internal::Type::Union @@ -296,12 +297,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { - OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray - } + variant -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -315,7 +314,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage#role + # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#role module Role extend OpenAI::Internal::Type::Enum @@ -336,7 +335,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] @@ -345,7 +344,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -371,7 +370,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] end end end @@ -389,7 +388,7 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -399,7 +398,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] required :type, enum: -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type } # @!attribute last_messages @@ -411,12 +410,13 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, last_messages: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy} for more details. + # {OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy} for more + # details. # # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @param type [Symbol, OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context @@ -425,7 +425,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # the thread. When set to `auto`, messages in the middle of the thread will be # dropped to fit the context length of the model, `max_prompt_tokens`. # - # @see OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy#type + # @see OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 0399613f..5f50a4bb 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -38,7 +38,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::Threads::RunListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::Threads::RunListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) @@ -51,7 +51,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 85f8acff..d37572fd 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -19,7 +19,7 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!attribute tool_outputs # A list of tools for which the outputs are being submitted. # - # @return [Array] + # @return [Array] required :tool_outputs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] @@ -28,7 +28,7 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!method initialize(thread_id:, tool_outputs:, request_options: {}) # @param thread_id [String] # - # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -48,7 +48,7 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel # @!method initialize(output: nil, tool_call_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more + # {OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more # details. # # @param output [String] The output of the tool call to be submitted to continue the run. diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index 8ef2e615..b00adb80 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -20,17 +20,17 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @!attribute image # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] optional :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } # @!method initialize(index:, image: nil, type: :image) # @param index [Integer] The index of the output in the outputs array. # - # @param image [OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] # # @param type [Symbol, :image] Always `image`. - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage#image + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -41,8 +41,8 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for more - # details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for + # more details. # # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 4c7b9f6a..708b96f8 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -15,7 +15,7 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # The Code Interpreter tool call definition. # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] required :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter @@ -30,17 +30,17 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code_interpreter:, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. # # Details of the Code Interpreter tool call the run step was involved in. # # @param id [String] The ID of the tool call. # - # @param code_interpreter [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. # # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -53,7 +53,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. # - # @return [Array] + # @return [Array] required :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] @@ -61,14 +61,14 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, outputs:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} + # for more details. # # The Code Interpreter tool call definition. # # @param input [String] The input to the Code Interpreter tool call. # - # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -111,7 +111,7 @@ class Logs < OpenAI::Internal::Type::BaseModel class Image < OpenAI::Internal::Type::BaseModel # @!attribute image # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] required :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image @@ -124,11 +124,11 @@ class Image < OpenAI::Internal::Type::BaseModel required :type, const: :image # @!method initialize(image:, type: :image) - # @param image [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] # # @param type [Symbol, :image] Always `image`. - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -139,7 +139,7 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} # for more details. # # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image @@ -147,7 +147,7 @@ class Image < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 98ceaa6b..b123ac62 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -28,13 +28,14 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # The Code Interpreter tool call definition. # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more + # details. # # Details of the Code Interpreter tool call the run step was involved in. # @@ -42,11 +43,11 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the tool call. # - # @param code_interpreter [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. # # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -59,7 +60,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. # - # @return [Array, nil] + # @return [Array, nil] optional :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] @@ -67,14 +68,14 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(input: nil, outputs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} for - # more details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} + # for more details. # # The Code Interpreter tool call definition. # # @param input [String] The input to the Code Interpreter tool call. # - # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -88,7 +89,7 @@ module Output variant :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage)] + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index b8d0d149..68036d67 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -15,7 +15,7 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute file_search # For now, this is always going to be an empty object. # - # @return [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch] + # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] required :file_search, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch } # @!attribute type @@ -27,27 +27,27 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, file_search:, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCall} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall} for more details. # # @param id [String] The ID of the tool call object. # - # @param file_search [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. + # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. # # @param type [Symbol, :file_search] The type of tool call. This is always going to be `file_search` for this type of - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall#file_search + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # The ranking options for the file search. # - # @return [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions } # @!attribute results # The results of the file search. # - # @return [Array, nil] + # @return [Array, nil] optional :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] @@ -56,17 +56,17 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. # - # @param ranking_options [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. + # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. # - # @param results [Array] The results of the file search. + # @param results [Array] The results of the file search. - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] required :ranker, enum: -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker @@ -81,19 +81,19 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker:, score_threshold:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} # for more details. # # The ranking options for the file search. # - # @param ranker [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank + # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank # # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum @@ -129,7 +129,7 @@ class Result < OpenAI::Internal::Type::BaseModel # The content of the result that was found. The content is only included if # requested via the include query parameter. # - # @return [Array, nil] + # @return [Array, nil] optional :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] @@ -137,8 +137,8 @@ class Result < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, file_name:, score:, content: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} for more - # details. + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} + # for more details. # # A result instance of the file search. # @@ -148,7 +148,7 @@ class Result < OpenAI::Internal::Type::BaseModel # # @param score [Float] The score of the result. All values must be a floating point number between 0 an # - # @param content [Array] The content of the result that was found. The content is only included if reques + # @param content [Array] The content of the result that was found. The content is only included if reques class Content < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -160,7 +160,7 @@ class Content < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the content. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] optional :type, enum: -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type @@ -169,11 +169,11 @@ class Content < OpenAI::Internal::Type::BaseModel # @!method initialize(text: nil, type: nil) # @param text [String] The text content of the file. # - # @param type [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. # The type of the content. # - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index a0896ee1..13c9f547 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -33,7 +33,7 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(file_search:, index:, id: nil, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. # # @param file_search [Object] For now, this is always going to be an empty object. # diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index ce1e3ad3..de633613 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -15,7 +15,7 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The definition of the function that was called. # - # @return [OpenAI::Beta::Threads::Runs::FunctionToolCall::Function] + # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] required :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCall::Function } # @!attribute type @@ -27,15 +27,15 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCall} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall} for more details. # # @param id [String] The ID of the tool call object. # - # @param function [OpenAI::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. # # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to - # @see OpenAI::Beta::Threads::Runs::FunctionToolCall#function + # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. @@ -59,7 +59,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:, output:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCall::Function} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function} for more + # details. # # The definition of the function that was called. # diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index 9dc353ce..ad8391bb 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -28,22 +28,22 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute function # The definition of the function that was called. # - # @return [OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] optional :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function } # @!method initialize(index:, id: nil, function: nil, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCallDelta} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta} for more details. # # @param index [Integer] The index of the tool call in the tool calls array. # # @param id [String] The ID of the tool call object. # - # @param function [OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. # # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to - # @see OpenAI::Beta::Threads::Runs::FunctionToolCallDelta#function + # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. @@ -67,7 +67,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil, output: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more + # details. # # The definition of the function that was called. # diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index f39e253b..0b5b6ac9 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -8,7 +8,7 @@ module Runs class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # - # @return [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] required :message_creation, -> { OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation } @@ -21,11 +21,11 @@ class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(message_creation:, type: :message_creation) # Details of the message creation by the run step. # - # @param message_creation [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] # # @param type [Symbol, :message_creation] Always `message_creation`. - # @see OpenAI::Beta::Threads::Runs::MessageCreationStepDetails#message_creation + # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 2dc26909..fde3abeb 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -56,7 +56,7 @@ class RunStep < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @return [OpenAI::Beta::Threads::Runs::RunStep::LastError, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] required :last_error, -> { OpenAI::Beta::Threads::Runs::RunStep::LastError }, nil?: true # @!attribute metadata @@ -87,13 +87,13 @@ class RunStep < OpenAI::Internal::Type::BaseModel # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] required :status, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::Status } # @!attribute step_details # The details of the run step. # - # @return [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails] + # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] required :step_details, union: -> { OpenAI::Beta::Threads::Runs::RunStep::StepDetails } # @!attribute thread_id @@ -106,19 +106,19 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of run step, which can be either `message_creation` or `tool_calls`. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] required :type, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::Type } # @!attribute usage # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. # - # @return [OpenAI::Beta::Threads::Runs::RunStep::Usage, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] required :usage, -> { OpenAI::Beta::Threads::Runs::RunStep::Usage }, nil?: true # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expired_at:, failed_at:, last_error:, metadata:, run_id:, status:, step_details:, thread_id:, type:, usage:, object: :"thread.run.step") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::RunStep} for more details. + # {OpenAI::Models::Beta::Threads::Runs::RunStep} for more details. # # Represents a step in execution of a run. # @@ -136,30 +136,30 @@ class RunStep < OpenAI::Internal::Type::BaseModel # # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run step failed. # - # @param last_error [OpenAI::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err + # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param run_id [String] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that th # - # @param status [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai + # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai # - # @param step_details [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. + # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t # - # @param type [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. # - # @param usage [OpenAI::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru + # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru # # @param object [Symbol, :"thread.run.step"] The object type, which is always `thread.run.step`. - # @see OpenAI::Beta::Threads::Runs::RunStep#last_error + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::LastError::Code] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] required :code, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::LastError::Code } # @!attribute message @@ -172,13 +172,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @param code [Symbol, OpenAI::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. # # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # - # @see OpenAI::Beta::Threads::Runs::RunStep::LastError#code + # @see OpenAI::Models::Beta::Threads::Runs::RunStep::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -193,7 +193,7 @@ module Code # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. # - # @see OpenAI::Beta::Threads::Runs::RunStep#status + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#status module Status extend OpenAI::Internal::Type::Enum @@ -209,7 +209,7 @@ module Status # The details of the run step. # - # @see OpenAI::Beta::Threads::Runs::RunStep#step_details + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#step_details module StepDetails extend OpenAI::Internal::Type::Union @@ -222,12 +222,12 @@ module StepDetails variant :tool_calls, -> { OpenAI::Beta::Threads::Runs::ToolCallsStepDetails } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails)] + # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] end # The type of run step, which can be either `message_creation` or `tool_calls`. # - # @see OpenAI::Beta::Threads::Runs::RunStep#type + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#type module Type extend OpenAI::Internal::Type::Enum @@ -238,7 +238,7 @@ module Type # @return [Array] end - # @see OpenAI::Beta::Threads::Runs::RunStep#usage + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 4666af0b..3dd76094 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -9,17 +9,17 @@ class RunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute step_details # The details of the run step. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject, nil] optional :step_details, union: -> { OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails } # @!method initialize(step_details: nil) # The delta containing the fields that have changed on the run step. # - # @param step_details [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. + # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. # The details of the run step. # - # @see OpenAI::Beta::Threads::Runs::RunStepDelta#step_details + # @see OpenAI::Models::Beta::Threads::Runs::RunStepDelta#step_details module StepDetails extend OpenAI::Internal::Type::Union @@ -32,7 +32,7 @@ module StepDetails variant :tool_calls, -> { OpenAI::Beta::Threads::Runs::ToolCallDeltaObject } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject)] + # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index 19c633c5..abca9d4b 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -15,7 +15,7 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The delta containing the fields that have changed on the run step. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDelta] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] required :delta, -> { OpenAI::Beta::Threads::Runs::RunStepDelta } # @!attribute object @@ -30,7 +30,7 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param id [String] The identifier of the run step, which can be referenced in API endpoints. # - # @param delta [OpenAI::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. + # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. # # @param object [Symbol, :"thread.run.step.delta"] The object type, which is always `thread.run.step.delta`. end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 1c617d09..5cc90d66 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -14,7 +14,7 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] optional :message_creation, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation @@ -23,11 +23,11 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(message_creation: nil, type: :message_creation) # Details of the message creation by the run step. # - # @param message_creation [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] # # @param type [Symbol, :message_creation] Always `message_creation`. - # @see OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation + # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index e5251302..2a263bdc 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -42,7 +42,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } @@ -57,7 +57,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::Threads::Runs::StepListParams::Order } # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) @@ -70,11 +70,11 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index f6238fd8..349177b1 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -29,7 +29,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } @@ -41,7 +41,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # # @param run_id [String] # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 4140ec79..eb99ad15 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -19,7 +19,7 @@ module ToolCall variant :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCall } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Beta::Threads::Runs::FunctionToolCall)] + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index ab51e0a3..14cdb2c1 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -19,7 +19,7 @@ module ToolCallDelta variant :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCallDelta } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Beta::Threads::Runs::FunctionToolCallDelta)] + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 668b9ec8..c6900dcc 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -17,17 +17,17 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCallDelta] } # @!method initialize(tool_calls: nil, type: :tool_calls) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::ToolCallDeltaObject} for more details. + # {OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject} for more details. # # Details of the tool call. # - # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit # # @param type [Symbol, :tool_calls] Always `tool_calls`. end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 368ce9b5..9adca268 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -11,7 +11,7 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # - # @return [Array] + # @return [Array] required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCall] @@ -25,11 +25,11 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(tool_calls:, type: :tool_calls) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::ToolCallsStepDetails} for more details. + # {OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails} for more details. # # Details of the tool call. # - # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit # # @param type [Symbol, :tool_calls] Always `tool_calls`. end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index 180b3abb..a0247200 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -7,7 +7,7 @@ module Threads class Text < OpenAI::Internal::Type::BaseModel # @!attribute annotations # - # @return [Array] + # @return [Array] required :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Annotation] @@ -20,7 +20,7 @@ class Text < OpenAI::Internal::Type::BaseModel required :value, String # @!method initialize(annotations:, value:) - # @param annotations [Array] + # @param annotations [Array] # # @param value [String] The data that makes up the text. end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index 73b0bd54..c1c391fa 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -7,7 +7,7 @@ module Threads class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # - # @return [OpenAI::Beta::Threads::Text] + # @return [OpenAI::Models::Beta::Threads::Text] required :text, -> { OpenAI::Beta::Threads::Text } # @!attribute type @@ -19,7 +19,7 @@ class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :text) # The text content that is part of a message. # - # @param text [OpenAI::Beta::Threads::Text] + # @param text [OpenAI::Models::Beta::Threads::Text] # # @param type [Symbol, :text] Always `text`. end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index 63ad3975..ef33693b 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -7,7 +7,7 @@ module Threads class TextDelta < OpenAI::Internal::Type::BaseModel # @!attribute annotations # - # @return [Array, nil] + # @return [Array, nil] optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::AnnotationDelta] } @@ -18,7 +18,7 @@ class TextDelta < OpenAI::Internal::Type::BaseModel optional :value, String # @!method initialize(annotations: nil, value: nil) - # @param annotations [Array] + # @param annotations [Array] # # @param value [String] The data that makes up the text. end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 126aefbd..f55a50f7 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -19,7 +19,7 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # - # @return [OpenAI::Beta::Threads::TextDelta, nil] + # @return [OpenAI::Models::Beta::Threads::TextDelta, nil] optional :text, -> { OpenAI::Beta::Threads::TextDelta } # @!method initialize(index:, text: nil, type: :text) @@ -27,7 +27,7 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param text [OpenAI::Beta::Threads::TextDelta] + # @param text [OpenAI::Models::Beta::Threads::TextDelta] # # @param type [Symbol, :text] Always `text`. end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 8fb216ee..6f5b922c 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -17,7 +17,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # A list of chat completion choices. Can be more than one if `n` is greater # than 1. # - # @return [Array] + # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice] } # @!attribute created @@ -57,7 +57,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Chat::ChatCompletion::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletion::ServiceTier }, nil?: true # @!attribute system_fingerprint @@ -72,29 +72,29 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics for the completion request. # - # @return [OpenAI::CompletionUsage, nil] + # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletion} for more details. + # {OpenAI::Models::Chat::ChatCompletion} for more details. # # Represents a chat completion response returned by model, based on the provided # input. # # @param id [String] A unique identifier for the chat completion. # - # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 + # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 # # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. # # @param model [String] The model used for the chat completion. # - # @param service_tier [Symbol, OpenAI::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # - # @param usage [OpenAI::CompletionUsage] Usage statistics for the completion request. + # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. # # @param object [Symbol, :"chat.completion"] The object type, which is always `chat.completion`. @@ -107,7 +107,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @return [Symbol, OpenAI::Chat::ChatCompletion::Choice::FinishReason] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason } # @!attribute index @@ -119,26 +119,26 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # Log probability information for the choice. # - # @return [OpenAI::Chat::ChatCompletion::Choice::Logprobs, nil] + # @return [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] required :logprobs, -> { OpenAI::Chat::ChatCompletion::Choice::Logprobs }, nil?: true # @!attribute message # A chat completion message generated by the model. # - # @return [OpenAI::Chat::ChatCompletionMessage] + # @return [OpenAI::Models::Chat::ChatCompletionMessage] required :message, -> { OpenAI::Chat::ChatCompletionMessage } # @!method initialize(finish_reason:, index:, logprobs:, message:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletion::Choice} for more details. + # {OpenAI::Models::Chat::ChatCompletion::Choice} for more details. # - # @param finish_reason [Symbol, OpenAI::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] The index of the choice in the list of choices. # - # @param logprobs [OpenAI::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. + # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. # - # @param message [OpenAI::Chat::ChatCompletionMessage] A chat completion message generated by the model. + # @param message [OpenAI::Models::Chat::ChatCompletionMessage] A chat completion message generated by the model. # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -147,7 +147,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @see OpenAI::Chat::ChatCompletion::Choice#finish_reason + # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -161,12 +161,12 @@ module FinishReason # @return [Array] end - # @see OpenAI::Chat::ChatCompletion::Choice#logprobs + # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -174,7 +174,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute refusal # A list of message refusal tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -182,9 +182,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] A list of message content tokens with log probability information. + # @param content [Array, nil] A list of message content tokens with log probability information. # - # @param refusal [Array, nil] A list of message refusal tokens with log probability information. + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end @@ -206,7 +206,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Chat::ChatCompletion#service_tier + # @see OpenAI::Models::Chat::ChatCompletion#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 2f9fbe2b..338351a8 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -14,14 +14,14 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] optional :audio, -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio }, nil?: true # @!attribute content # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. # - # @return [String, Array, nil] + # @return [String, Array, nil] optional :content, union: -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Content @@ -34,7 +34,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] optional :function_call, -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall }, nil?: true @@ -55,7 +55,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] @@ -63,25 +63,25 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAssistantMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam} for more details. # # Messages sent by the model in response to user messages. # - # @param audio [OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. + # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. # - # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function + # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function # - # @param function_call [OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # # @param name [String] An optional name for the participant. Provides the model information to differen # # @param refusal [String, nil] The refusal message by the assistant. # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the messages author, in this case `assistant`. - # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#audio + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for a previous audio response from the model. @@ -91,7 +91,8 @@ class Audio < OpenAI::Internal::Type::BaseModel # @!method initialize(id:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio} for more details. + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio} for more + # details. # # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). @@ -102,7 +103,7 @@ class Audio < OpenAI::Internal::Type::BaseModel # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. # - # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -110,7 +111,7 @@ module Content variant String # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. - variant -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } + variant -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). @@ -125,11 +126,11 @@ module ArrayOfContentPart variant :refusal, -> { OpenAI::Chat::ChatCompletionContentPartRefusal } # @!method self.variants - # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartRefusal)] + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] end # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ArrayOfContentPartArray = @@ -140,7 +141,7 @@ module ArrayOfContentPart # @deprecated # - # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#function_call + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -159,8 +160,8 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for more - # details. + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for + # more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index 861309b5..fae460ec 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -32,7 +32,7 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, data:, expires_at:, transcript:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAudio} for more details. + # {OpenAI::Models::Chat::ChatCompletionAudio} for more details. # # If the audio output modality is requested, this object contains data about the # audio response from the model. diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 3ab3a1d0..46f1463f 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -8,32 +8,32 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # - # @return [Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] required :format_, enum: -> { OpenAI::Chat::ChatCompletionAudioParam::Format }, api_name: :format # @!attribute voice # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # - # @return [String, Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice] + # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice } # @!method initialize(format_:, voice:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAudioParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionAudioParam} for more details. # # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param format_ [Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, # - # @param voice [String, Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are + # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # - # @see OpenAI::Chat::ChatCompletionAudioParam#format_ + # @see OpenAI::Models::Chat::ChatCompletionAudioParam#format_ module Format extend OpenAI::Internal::Type::Enum @@ -51,33 +51,33 @@ module Format # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # - # @see OpenAI::Chat::ChatCompletionAudioParam#voice + # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice module Voice extend OpenAI::Internal::Type::Union variant String - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ALLOY } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ALLOY } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ASH } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ASH } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::BALLAD } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::BALLAD } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::CORAL } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::CORAL } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ECHO } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::FABLE } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ONYX } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::NOVA } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::SAGE } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::SHIMMER } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::VERSE } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE } # @!method self.variants # @return [Array(String, Symbol)] diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 8f94cd1e..9dfe771e 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -15,7 +15,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # greater than 1. Can also be empty for the last chunk if you set # `stream_options: {"include_usage": true}`. # - # @return [Array] + # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice] } # @!attribute created @@ -56,7 +56,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletionChunk::ServiceTier }, nil?: true # @!attribute system_fingerprint @@ -76,12 +76,12 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # **NOTE:** If the stream is interrupted or cancelled, you may not receive the # final usage chunk which contains the total token usage for the request. # - # @return [OpenAI::CompletionUsage, nil] + # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::CompletionUsage }, nil?: true # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk") # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk} for more details. + # {OpenAI::Models::Chat::ChatCompletionChunk} for more details. # # Represents a streamed chunk of a chat completion response returned by the model, # based on the provided input. @@ -89,17 +89,17 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # # @param id [String] A unique identifier for the chat completion. Each chunk has the same ID. # - # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is + # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is # # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. Each ch # # @param model [String] The model to generate the completion. # - # @param service_tier [Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # - # @param usage [OpenAI::CompletionUsage, nil] An optional field that will only be present when you set + # @param usage [OpenAI::Models::CompletionUsage, nil] An optional field that will only be present when you set # # @param object [Symbol, :"chat.completion.chunk"] The object type, which is always `chat.completion.chunk`. @@ -107,7 +107,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta # A chat completion delta generated by streamed model responses. # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] required :delta, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta } # @!attribute finish_reason @@ -118,7 +118,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason @@ -134,22 +134,22 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # Log probability information for the choice. # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, nil] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] optional :logprobs, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true # @!method initialize(delta:, finish_reason:, index:, logprobs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice} for more details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice} for more details. # - # @param delta [OpenAI::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. + # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. # - # @param finish_reason [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] The index of the choice in the list of choices. # - # @param logprobs [OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. + # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. - # @see OpenAI::Chat::ChatCompletionChunk::Choice#delta + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the chunk message. @@ -163,7 +163,7 @@ class Delta < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] optional :function_call, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } # @!attribute refusal @@ -175,12 +175,12 @@ class Delta < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the author of this message. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] optional :role, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role } # @!attribute tool_calls # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] @@ -188,23 +188,23 @@ class Delta < OpenAI::Internal::Type::BaseModel # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta} for more details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta} for more details. # # A chat completion delta generated by streamed model responses. # # @param content [String, nil] The contents of the chunk message. # - # @param function_call [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # # @param refusal [String, nil] The refusal message generated by the model. # - # @param role [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. + # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. # - # @param tool_calls [Array] + # @param tool_calls [Array] # @deprecated # - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta#function_call + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -223,8 +223,8 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for more - # details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for + # more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. @@ -236,7 +236,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # The role of the author of this message. # - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta#role + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#role module Role extend OpenAI::Internal::Type::Enum @@ -264,13 +264,13 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] optional :function, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function } # @!attribute type # The type of the tool. Currently, only `function` is supported. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] optional :type, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type } # @!method initialize(index:, id: nil, function: nil, type: nil) @@ -278,11 +278,11 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the tool call. # - # @param function [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] + # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] # - # @param type [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. + # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -301,8 +301,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} for more - # details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} + # for more details. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma # @@ -311,7 +311,7 @@ class Function < OpenAI::Internal::Type::BaseModel # The type of the tool. Currently, only `function` is supported. # - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type module Type extend OpenAI::Internal::Type::Enum @@ -330,7 +330,7 @@ module Type # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @see OpenAI::Chat::ChatCompletionChunk::Choice#finish_reason + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -344,12 +344,12 @@ module FinishReason # @return [Array] end - # @see OpenAI::Chat::ChatCompletionChunk::Choice#logprobs + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -357,7 +357,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute refusal # A list of message refusal tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -365,9 +365,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] A list of message content tokens with log probability information. + # @param content [Array, nil] A list of message content tokens with log probability information. # - # @param refusal [Array, nil] A list of message refusal tokens with log probability information. + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end @@ -389,7 +389,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Chat::ChatCompletionChunk#service_tier + # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 64a02cf0..ba2d6918 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -25,7 +25,7 @@ module ChatCompletionContentPart class File < OpenAI::Internal::Type::BaseModel # @!attribute file # - # @return [OpenAI::Chat::ChatCompletionContentPart::File::File] + # @return [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] required :file, -> { OpenAI::Chat::ChatCompletionContentPart::File::File } # @!attribute type @@ -38,11 +38,11 @@ class File < OpenAI::Internal::Type::BaseModel # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text # generation. # - # @param file [OpenAI::Chat::ChatCompletionContentPart::File::File] + # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] # # @param type [Symbol, :file] The type of the content part. Always `file`. - # @see OpenAI::Chat::ChatCompletionContentPart::File#file + # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel # @!attribute file_data # The base64 encoded file data, used when passing the file to the model as a @@ -65,7 +65,7 @@ class File < OpenAI::Internal::Type::BaseModel # @!method initialize(file_data: nil, file_id: nil, filename: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionContentPart::File::File} for more details. + # {OpenAI::Models::Chat::ChatCompletionContentPart::File::File} for more details. # # @param file_data [String] The base64 encoded file data, used when passing the file to the model # @@ -76,7 +76,7 @@ class File < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartImage, OpenAI::Chat::ChatCompletionContentPartInputAudio, OpenAI::Chat::ChatCompletionContentPart::File)] + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 9288c5d4..f5971945 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Chat::ChatCompletionContentPartImage::ImageURL] + # @return [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] required :image_url, -> { OpenAI::Chat::ChatCompletionContentPartImage::ImageURL } # @!attribute type @@ -18,11 +18,11 @@ class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param image_url [OpenAI::Chat::ChatCompletionContentPartImage::ImageURL] + # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] # # @param type [Symbol, :image_url] The type of the content part. - # @see OpenAI::Chat::ChatCompletionContentPartImage#image_url + # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. @@ -34,21 +34,22 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # - # @return [Symbol, OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail } # @!method initialize(url:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionContentPartImage::ImageURL} for more details. + # {OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL} for more + # details. # # @param url [String] Either a URL of the image or the base64 encoded image data. # - # @param detail [Symbol, OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: + # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # - # @see OpenAI::Chat::ChatCompletionContentPartImage::ImageURL#detail + # @see OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 86a9a7d5..d8f86fb2 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute input_audio # - # @return [OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @return [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] required :input_audio, -> { OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio } # @!attribute type @@ -18,11 +18,11 @@ class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(input_audio:, type: :input_audio) # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). # - # @param input_audio [OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] # # @param type [Symbol, :input_audio] The type of the content part. Always `input_audio`. - # @see OpenAI::Chat::ChatCompletionContentPartInputAudio#input_audio + # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64 encoded audio data. @@ -33,23 +33,23 @@ class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute format_ # The format of the encoded audio data. Currently supports "wav" and "mp3". # - # @return [Symbol, OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] required :format_, enum: -> { OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format }, api_name: :format # @!method initialize(data:, format_:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more + # {OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more # details. # # @param data [String] Base64 encoded audio data. # - # @param format_ [Symbol, OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". # The format of the encoded audio data. Currently supports "wav" and "mp3". # - # @see OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ + # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ module Format extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 31fafa47..828698cb 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the developer message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content } # @!attribute role @@ -25,13 +25,13 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :developer) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionDeveloperMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam} for more details. # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. # - # @param content [String, Array] The contents of the developer message. + # @param content [String, Array] The contents of the developer message. # # @param name [String] An optional name for the participant. Provides the model information to differen # @@ -39,7 +39,7 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the developer message. # - # @see OpenAI::Chat::ChatCompletionDeveloperMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -47,12 +47,10 @@ module Content variant String # An array of content parts with a defined type. For developer messages, only type `text` is supported. - variant -> { - OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 275a8339..4459c8a6 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -26,7 +26,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @return [Array, nil] + # @return [Array, nil] optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessage::Annotation] } @@ -35,7 +35,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Chat::ChatCompletionAudio, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAudio, nil] optional :audio, -> { OpenAI::Chat::ChatCompletionAudio }, nil?: true # @!attribute function_call @@ -44,13 +44,13 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Chat::ChatCompletionMessage::FunctionCall, nil] + # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, nil] optional :function_call, -> { OpenAI::Chat::ChatCompletionMessage::FunctionCall } # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] @@ -58,7 +58,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionMessage} for more details. + # {OpenAI::Models::Chat::ChatCompletionMessage} for more details. # # A chat completion message generated by the model. # @@ -66,13 +66,13 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # # @param refusal [String, nil] The refusal message generated by the model. # - # @param annotations [Array] Annotations for the message, when applicable, as when using the + # @param annotations [Array] Annotations for the message, when applicable, as when using the # - # @param audio [OpenAI::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data + # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data # - # @param function_call [OpenAI::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the author of this message. @@ -86,17 +86,17 @@ class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute url_citation # A URL citation when using web search. # - # @return [OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation] + # @return [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] required :url_citation, -> { OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation } # @!method initialize(url_citation:, type: :url_citation) # A URL citation when using web search. # - # @param url_citation [OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. + # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. # # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. - # @see OpenAI::Chat::ChatCompletionMessage::Annotation#url_citation + # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. @@ -137,7 +137,7 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @deprecated # - # @see OpenAI::Chat::ChatCompletionMessage#function_call + # @see OpenAI::Models::Chat::ChatCompletionMessage#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -156,7 +156,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionMessage::FunctionCall} for more details. + # {OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall} for more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index 6710bee4..b25933e3 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -33,7 +33,7 @@ module ChatCompletionMessageParam variant :function, -> { OpenAI::Chat::ChatCompletionFunctionMessageParam } # @!method self.variants - # @return [Array(OpenAI::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Chat::ChatCompletionSystemMessageParam, OpenAI::Chat::ChatCompletionUserMessageParam, OpenAI::Chat::ChatCompletionAssistantMessageParam, OpenAI::Chat::ChatCompletionToolMessageParam, OpenAI::Chat::ChatCompletionFunctionMessageParam)] + # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] end end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index c90216d9..99793fa0 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -13,7 +13,7 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The function that the model called. # - # @return [OpenAI::Chat::ChatCompletionMessageToolCall::Function] + # @return [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] required :function, -> { OpenAI::Chat::ChatCompletionMessageToolCall::Function } # @!attribute type @@ -25,11 +25,11 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # @param id [String] The ID of the tool call. # - # @param function [OpenAI::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. + # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Chat::ChatCompletionMessageToolCall#function + # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -48,7 +48,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionMessageToolCall::Function} for more details. + # {OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function} for more + # details. # # The function that the model called. # diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index afab8ee0..493e6c0c 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Chat::ChatCompletionNamedToolChoice::Function] + # @return [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] required :function, -> { OpenAI::Chat::ChatCompletionNamedToolChoice::Function } # @!attribute type @@ -19,11 +19,11 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # Specifies a tool the model should use. Use to force the model to call a specific # function. # - # @param function [OpenAI::Chat::ChatCompletionNamedToolChoice::Function] + # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Chat::ChatCompletionNamedToolChoice#function + # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index eeccd9df..52235ae7 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -9,7 +9,7 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # generated tokens would match this content, the entire model response can be # returned much more quickly. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionPredictionContent::Content } # @!attribute type @@ -21,12 +21,12 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, type: :content) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionPredictionContent} for more details. + # {OpenAI::Models::Chat::ChatCompletionPredictionContent} for more details. # # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @param content [String, Array] The content that should be matched when generating a model response. + # @param content [String, Array] The content that should be matched when generating a model response. # # @param type [Symbol, :content] The type of the predicted content you want to provide. This type is @@ -34,7 +34,7 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # generated tokens would match this content, the entire model response can be # returned much more quickly. # - # @see OpenAI::Chat::ChatCompletionPredictionContent#content + # @see OpenAI::Models::Chat::ChatCompletionPredictionContent#content module Content extend OpenAI::Internal::Type::Union @@ -43,12 +43,10 @@ module Content variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. - variant -> { - OpenAI::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index ffbaa513..acb72bd9 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -18,7 +18,7 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(include_usage: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionStreamOptions} for more details. + # {OpenAI::Models::Chat::ChatCompletionStreamOptions} for more details. # # Options for streaming response. Only set this when you set `stream: true`. # diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 43b7a5c4..2d391de3 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the system message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionSystemMessageParam::Content } # @!attribute role @@ -25,13 +25,13 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :system) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionSystemMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionSystemMessageParam} for more details. # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages # for this purpose instead. # - # @param content [String, Array] The contents of the system message. + # @param content [String, Array] The contents of the system message. # # @param name [String] An optional name for the participant. Provides the model information to differen # @@ -39,7 +39,7 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the system message. # - # @see OpenAI::Chat::ChatCompletionSystemMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionSystemMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -47,12 +47,10 @@ module Content variant String # An array of content parts with a defined type. For system messages, only type `text` is supported. - variant -> { - OpenAI::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index c45fcee7..7a09b7c2 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -32,13 +32,13 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # position. In rare cases, there may be fewer than the number of requested # `top_logprobs` returned. # - # @return [Array] + # @return [Array] required :top_logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] } # @!method initialize(token:, bytes:, logprob:, top_logprobs:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionTokenLogprob} for more details. + # {OpenAI::Models::Chat::ChatCompletionTokenLogprob} for more details. # # @param token [String] The token. # @@ -46,7 +46,7 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # # @param logprob [Float] The log probability of this token, if it is within the top 20 most likely tokens # - # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position + # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -74,7 +74,7 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token:, bytes:, logprob:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. + # {OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. # # @param token [String] The token. # diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index 21a86040..d6ee8c94 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::FunctionDefinition] + # @return [OpenAI::Models::FunctionDefinition] required :function, -> { OpenAI::FunctionDefinition } # @!attribute type @@ -16,7 +16,7 @@ class ChatCompletionTool < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(function:, type: :function) - # @param function [OpenAI::FunctionDefinition] + # @param function [OpenAI::Models::FunctionDefinition] # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 4b8a6fe3..73874236 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -36,7 +36,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice)] + # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index c88bb51c..4685c606 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the tool message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionToolMessageParam::Content } # @!attribute role @@ -23,7 +23,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel required :tool_call_id, String # @!method initialize(content:, tool_call_id:, role: :tool) - # @param content [String, Array] The contents of the tool message. + # @param content [String, Array] The contents of the tool message. # # @param tool_call_id [String] Tool call that this message is responding to. # @@ -31,7 +31,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the tool message. # - # @see OpenAI::Chat::ChatCompletionToolMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionToolMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -39,12 +39,10 @@ module Content variant String # An array of content parts with a defined type. For tool messages, only type `text` is supported. - variant -> { - OpenAI::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index ffd7b68a..7335c7f0 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the user message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionUserMessageParam::Content } # @!attribute role @@ -25,12 +25,12 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :user) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionUserMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionUserMessageParam} for more details. # # Messages sent by an end user, containing prompts or additional context # information. # - # @param content [String, Array] The contents of the user message. + # @param content [String, Array] The contents of the user message. # # @param name [String] An optional name for the participant. Provides the model information to differen # @@ -38,7 +38,7 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the user message. # - # @see OpenAI::Chat::ChatCompletionUserMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionUserMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -46,10 +46,10 @@ module Content variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. - variant -> { OpenAI::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } + variant -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartArray = diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 3a3a3c23..65446009 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -18,7 +18,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [images](https://platform.openai.com/docs/guides/vision), and # [audio](https://platform.openai.com/docs/guides/audio). # - # @return [Array] + # @return [Array] required :messages, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionMessageParam] } @@ -29,7 +29,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::ChatModel] + # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Chat::CompletionCreateParams::Model } # @!attribute audio @@ -37,7 +37,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Chat::ChatCompletionAudioParam, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] optional :audio, -> { OpenAI::Chat::ChatCompletionAudioParam }, nil?: true # @!attribute frequency_penalty @@ -66,7 +66,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no functions are present. `auto` is the default if # functions are present. # - # @return [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, nil] optional :function_call, union: -> { OpenAI::Chat::CompletionCreateParams::FunctionCall } # @!attribute functions @@ -76,7 +76,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # A list of functions the model may generate JSON inputs for. # - # @return [Array, nil] + # @return [Array, nil] optional :functions, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::CompletionCreateParams::Function] } @@ -146,7 +146,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # `["text", "audio"]` # - # @return [Array, nil] + # @return [Array, nil] optional :modalities, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Chat::CompletionCreateParams::Modality] }, nil?: true @@ -171,7 +171,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @return [OpenAI::Chat::ChatCompletionPredictionContent, nil] + # @return [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] optional :prediction, -> { OpenAI::Chat::ChatCompletionPredictionContent }, nil?: true # @!attribute presence_penalty @@ -190,7 +190,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -205,7 +205,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. # - # @return [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject, nil] + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, union: -> { OpenAI::Chat::CompletionCreateParams::ResponseFormat } # @!attribute seed @@ -237,7 +237,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::CompletionCreateParams::ServiceTier }, nil?: true # @!attribute stop @@ -260,7 +260,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. # - # @return [OpenAI::Chat::ChatCompletionStreamOptions, nil] + # @return [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] optional :stream_options, -> { OpenAI::Chat::ChatCompletionStreamOptions }, nil?: true # @!attribute temperature @@ -283,7 +283,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no tools are present. `auto` is the default if tools # are present. # - # @return [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Chat::ChatCompletionToolChoiceOption } # @!attribute tools @@ -291,7 +291,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTool] } # @!attribute top_logprobs @@ -325,24 +325,24 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions, nil] + # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. + # @param functions [Array] Deprecated in favor of `tools`. # # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # @@ -354,35 +354,35 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -390,7 +390,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # - # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -411,7 +411,7 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end # @deprecated @@ -453,7 +453,7 @@ module FunctionCallMode end # @!method self.variants - # @return [Array(Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption)] + # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] end # @deprecated @@ -486,7 +486,7 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, parameters: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::Function} for more details. + # {OpenAI::Models::Chat::CompletionCreateParams::Function} for more details. # # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc # @@ -532,7 +532,7 @@ module ResponseFormat variant -> { OpenAI::ResponseFormatJSONObject } # @!method self.variants - # @return [Array(OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject)] + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -572,7 +572,7 @@ module Stop variant String - variant -> { OpenAI::Chat::CompletionCreateParams::Stop::StringArray } + variant -> { OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray } # @!method self.variants # @return [Array(String, Array)] @@ -586,34 +586,35 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @return [Symbol, OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize } # @!attribute user_location # Approximate location parameters for the search. # - # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] + # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] optional :user_location, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation }, nil?: true # @!method initialize(search_context_size: nil, user_location: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions} for more details. + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions} for more + # details. # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @param search_context_size [Symbol, OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the + # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the # - # @param user_location [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. + # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions#search_context_size + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#search_context_size module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -625,12 +626,12 @@ module SearchContextSize # @return [Array] end - # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions#user_location + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute approximate # Approximate location parameters for the search. # - # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] + # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] required :approximate, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate } @@ -642,16 +643,16 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!method initialize(approximate:, type: :approximate) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} for more - # details. + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} + # for more details. # # Approximate location parameters for the search. # - # @param approximate [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. + # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. # # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. - # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. @@ -681,7 +682,7 @@ class Approximate < OpenAI::Internal::Type::BaseModel # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} # for more details. # # Approximate location parameters for the search. diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index b1f9e734..0e18202c 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -38,7 +38,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Chat::CompletionListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order, nil] optional :order, enum: -> { OpenAI::Chat::CompletionListParams::Order } # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) @@ -53,7 +53,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # # @param model [String] The model used to generate the Chat Completions. # - # @param order [Symbol, OpenAI::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 8292066a..8f2c139c 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -25,7 +25,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Chat::Completions::MessageListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Chat::Completions::MessageListParams::Order } # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) @@ -36,7 +36,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of messages to retrieve. # - # @param order [Symbol, OpenAI::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 582e2f5a..a8a32298 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -19,7 +19,7 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # - `lt`: less than # - `lte`: less than or equal # - # @return [Symbol, OpenAI::ComparisonFilter::Type] + # @return [Symbol, OpenAI::Models::ComparisonFilter::Type] required :type, enum: -> { OpenAI::ComparisonFilter::Type } # @!attribute value @@ -30,15 +30,15 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel required :value, union: -> { OpenAI::ComparisonFilter::Value } # @!method initialize(key:, type:, value:) - # Some parameter documentations has been truncated, see {OpenAI::ComparisonFilter} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ComparisonFilter} for more details. # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. # # @param key [String] The key to compare against the value. # - # @param type [Symbol, OpenAI::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. + # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # # @param value [String, Float, Boolean] The value to compare against the attribute key; supports string, number, or bool @@ -51,7 +51,7 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # - `lt`: less than # - `lte`: less than or equal # - # @see OpenAI::ComparisonFilter#type + # @see OpenAI::Models::ComparisonFilter#type module Type extend OpenAI::Internal::Type::Enum @@ -69,7 +69,7 @@ module Type # The value to compare against the attribute key; supports string, number, or # boolean types. # - # @see OpenAI::ComparisonFilter#value + # @see OpenAI::Models::ComparisonFilter#value module Value extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 8d33e0b0..e6a702d0 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -15,7 +15,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute choices # The list of completion choices the model generated for the input prompt. # - # @return [Array] + # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::CompletionChoice] } # @!attribute created @@ -48,19 +48,19 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics for the completion request. # - # @return [OpenAI::CompletionUsage, nil] + # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) - # Some parameter documentations has been truncated, see {OpenAI::Completion} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Completion} for more details. # # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). # # @param id [String] A unique identifier for the completion. # - # @param choices [Array] The list of completion choices the model generated for the input prompt. + # @param choices [Array] The list of completion choices the model generated for the input prompt. # # @param created [Integer] The Unix timestamp (in seconds) of when the completion was created. # @@ -68,7 +68,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # - # @param usage [OpenAI::CompletionUsage] Usage statistics for the completion request. + # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. # # @param object [Symbol, :text_completion] The object type, which is always "text_completion" end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index c2a91320..07f6b428 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -9,7 +9,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. # - # @return [Symbol, OpenAI::CompletionChoice::FinishReason] + # @return [Symbol, OpenAI::Models::CompletionChoice::FinishReason] required :finish_reason, enum: -> { OpenAI::CompletionChoice::FinishReason } # @!attribute index @@ -19,7 +19,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # - # @return [OpenAI::CompletionChoice::Logprobs, nil] + # @return [OpenAI::Models::CompletionChoice::Logprobs, nil] required :logprobs, -> { OpenAI::CompletionChoice::Logprobs }, nil?: true # @!attribute text @@ -28,14 +28,14 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel required :text, String # @!method initialize(finish_reason:, index:, logprobs:, text:) - # Some parameter documentations has been truncated, see {OpenAI::CompletionChoice} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionChoice} for more details. # - # @param finish_reason [Symbol, OpenAI::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model + # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] # - # @param logprobs [OpenAI::CompletionChoice::Logprobs, nil] + # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] # # @param text [String] @@ -44,7 +44,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. # - # @see OpenAI::CompletionChoice#finish_reason + # @see OpenAI::Models::CompletionChoice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -56,7 +56,7 @@ module FinishReason # @return [Array] end - # @see OpenAI::CompletionChoice#logprobs + # @see OpenAI::Models::CompletionChoice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute text_offset # diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 27e721da..4da42de4 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -16,7 +16,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::CompletionCreateParams::Model] + # @return [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] required :model, union: -> { OpenAI::CompletionCreateParams::Model } # @!attribute prompt @@ -143,7 +143,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. # - # @return [OpenAI::Chat::ChatCompletionStreamOptions, nil] + # @return [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] optional :stream_options, -> { OpenAI::Chat::ChatCompletionStreamOptions }, nil?: true # @!attribute suffix @@ -186,7 +186,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::CompletionCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings # @@ -210,7 +210,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # @@ -232,11 +232,11 @@ module Model variant String - variant const: -> { OpenAI::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } - variant const: -> { OpenAI::CompletionCreateParams::Model::DAVINCI_002 } + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::DAVINCI_002 } - variant const: -> { OpenAI::CompletionCreateParams::Model::BABBAGE_002 } + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 } # @!method self.variants # @return [Array(String, Symbol)] @@ -265,11 +265,11 @@ module Prompt variant String - variant -> { OpenAI::CompletionCreateParams::Prompt::StringArray } + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::StringArray } - variant -> { OpenAI::CompletionCreateParams::Prompt::IntegerArray } + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::IntegerArray } - variant -> { OpenAI::CompletionCreateParams::Prompt::ArrayOfToken2DArray } + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray } # @!method self.variants # @return [Array(String, Array, Array, Array>)] @@ -293,7 +293,7 @@ module Stop variant String - variant -> { OpenAI::CompletionCreateParams::Stop::StringArray } + variant -> { OpenAI::Models::CompletionCreateParams::Stop::StringArray } # @!method self.variants # @return [Array(String, Array)] diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index 202f9218..8ca37f94 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -24,13 +24,13 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens_details # Breakdown of tokens used in a completion. # - # @return [OpenAI::CompletionUsage::CompletionTokensDetails, nil] + # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails, nil] optional :completion_tokens_details, -> { OpenAI::CompletionUsage::CompletionTokensDetails } # @!attribute prompt_tokens_details # Breakdown of tokens used in the prompt. # - # @return [OpenAI::CompletionUsage::PromptTokensDetails, nil] + # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails, nil] optional :prompt_tokens_details, -> { OpenAI::CompletionUsage::PromptTokensDetails } # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) @@ -42,11 +42,11 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # # @param total_tokens [Integer] Total number of tokens used in the request (prompt + completion). # - # @param completion_tokens_details [OpenAI::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. + # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. # - # @param prompt_tokens_details [OpenAI::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. + # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. - # @see OpenAI::CompletionUsage#completion_tokens_details + # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that @@ -78,7 +78,7 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) # Some parameter documentations has been truncated, see - # {OpenAI::CompletionUsage::CompletionTokensDetails} for more details. + # {OpenAI::Models::CompletionUsage::CompletionTokensDetails} for more details. # # Breakdown of tokens used in a completion. # @@ -91,7 +91,7 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @param rejected_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the end - # @see OpenAI::CompletionUsage#prompt_tokens_details + # @see OpenAI::Models::CompletionUsage#prompt_tokens_details class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute audio_tokens # Audio input tokens present in the prompt. diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 8bfaf4d2..e4e7fdcf 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -7,24 +7,24 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. # - # @return [Array] + # @return [Array] required :filters, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::CompoundFilter::Filter] } # @!attribute type # Type of operation: `and` or `or`. # - # @return [Symbol, OpenAI::CompoundFilter::Type] + # @return [Symbol, OpenAI::Models::CompoundFilter::Type] required :type, enum: -> { OpenAI::CompoundFilter::Type } # @!method initialize(filters:, type:) - # Some parameter documentations has been truncated, see {OpenAI::CompoundFilter} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompoundFilter} for more details. # # Combine multiple filters using `and` or `or`. # - # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` + # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` # - # @param type [Symbol, OpenAI::CompoundFilter::Type] Type of operation: `and` or `or`. + # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] Type of operation: `and` or `or`. # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. @@ -37,12 +37,12 @@ module Filter variant OpenAI::Internal::Type::Unknown # @!method self.variants - # @return [Array(OpenAI::ComparisonFilter, Object)] + # @return [Array(OpenAI::Models::ComparisonFilter, Object)] end # Type of operation: `and` or `or`. # - # @see OpenAI::CompoundFilter#type + # @see OpenAI::Models::CompoundFilter#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/container_create_params.rb b/lib/openai/models/container_create_params.rb index 2db5a5a9..00a41b82 100644 --- a/lib/openai/models/container_create_params.rb +++ b/lib/openai/models/container_create_params.rb @@ -16,7 +16,7 @@ class ContainerCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # Container expiration time in seconds relative to the 'anchor' time. # - # @return [OpenAI::ContainerCreateParams::ExpiresAfter, nil] + # @return [OpenAI::Models::ContainerCreateParams::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::ContainerCreateParams::ExpiresAfter } # @!attribute file_ids @@ -28,7 +28,7 @@ class ContainerCreateParams < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, expires_after: nil, file_ids: nil, request_options: {}) # @param name [String] Name of the container to create. # - # @param expires_after [OpenAI::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. + # @param expires_after [OpenAI::Models::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. # # @param file_ids [Array] IDs of files to copy to the container. # @@ -39,7 +39,7 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # Time anchor for the expiration time. Currently only 'last_active_at' is # supported. # - # @return [Symbol, OpenAI::ContainerCreateParams::ExpiresAfter::Anchor] + # @return [Symbol, OpenAI::Models::ContainerCreateParams::ExpiresAfter::Anchor] required :anchor, enum: -> { OpenAI::ContainerCreateParams::ExpiresAfter::Anchor } # @!attribute minutes @@ -49,18 +49,18 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(anchor:, minutes:) # Some parameter documentations has been truncated, see - # {OpenAI::ContainerCreateParams::ExpiresAfter} for more details. + # {OpenAI::Models::ContainerCreateParams::ExpiresAfter} for more details. # # Container expiration time in seconds relative to the 'anchor' time. # - # @param anchor [Symbol, OpenAI::ContainerCreateParams::ExpiresAfter::Anchor] Time anchor for the expiration time. Currently only 'last_active_at' is supporte + # @param anchor [Symbol, OpenAI::Models::ContainerCreateParams::ExpiresAfter::Anchor] Time anchor for the expiration time. Currently only 'last_active_at' is supporte # # @param minutes [Integer] # Time anchor for the expiration time. Currently only 'last_active_at' is # supported. # - # @see OpenAI::ContainerCreateParams::ExpiresAfter#anchor + # @see OpenAI::Models::ContainerCreateParams::ExpiresAfter#anchor module Anchor extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/container_list_params.rb b/lib/openai/models/container_list_params.rb index 2aa224d4..017dc9cf 100644 --- a/lib/openai/models/container_list_params.rb +++ b/lib/openai/models/container_list_params.rb @@ -27,7 +27,7 @@ class ContainerListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::ContainerListParams::Order, nil] + # @return [Symbol, OpenAI::Models::ContainerListParams::Order, nil] optional :order, enum: -> { OpenAI::ContainerListParams::Order } # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) @@ -38,7 +38,7 @@ class ContainerListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/containers/file_list_params.rb b/lib/openai/models/containers/file_list_params.rb index d1494255..a7b2de9b 100644 --- a/lib/openai/models/containers/file_list_params.rb +++ b/lib/openai/models/containers/file_list_params.rb @@ -28,7 +28,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Containers::FileListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Containers::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Containers::FileListParams::Order } # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) @@ -39,7 +39,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index ec420899..a7bcbff1 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -7,7 +7,7 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of embeddings generated by the model. # - # @return [Array] + # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Embedding] } # @!attribute model @@ -25,19 +25,19 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute usage # The usage information for the request. # - # @return [OpenAI::CreateEmbeddingResponse::Usage] + # @return [OpenAI::Models::CreateEmbeddingResponse::Usage] required :usage, -> { OpenAI::CreateEmbeddingResponse::Usage } # @!method initialize(data:, model:, usage:, object: :list) - # @param data [Array] The list of embeddings generated by the model. + # @param data [Array] The list of embeddings generated by the model. # # @param model [String] The name of the model used to generate the embedding. # - # @param usage [OpenAI::CreateEmbeddingResponse::Usage] The usage information for the request. + # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] The usage information for the request. # # @param object [Symbol, :list] The object type, which is always "list". - # @see OpenAI::CreateEmbeddingResponse#usage + # @see OpenAI::Models::CreateEmbeddingResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute prompt_tokens # The number of tokens used by the prompt. diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index 6f8e6de3..9a511def 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -24,8 +24,8 @@ class Embedding < OpenAI::Internal::Type::BaseModel required :object, const: :embedding # @!method initialize(embedding:, index:, object: :embedding) - # Some parameter documentations has been truncated, see {OpenAI::Embedding} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Embedding} for more details. # # Represents an embedding vector returned by embedding endpoint. # diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 6e76d74e..fea9547e 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -28,7 +28,7 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::EmbeddingModel] + # @return [String, Symbol, OpenAI::Models::EmbeddingModel] required :model, union: -> { OpenAI::EmbeddingCreateParams::Model } # @!attribute dimensions @@ -42,7 +42,7 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). # - # @return [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat, nil] + # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat, nil] optional :encoding_format, enum: -> { OpenAI::EmbeddingCreateParams::EncodingFormat } # @!attribute user @@ -59,11 +59,11 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i # - # @param model [String, Symbol, OpenAI::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo # - # @param encoding_format [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -85,13 +85,13 @@ module Input variant String # The array of strings that will be turned into an embedding. - variant -> { OpenAI::EmbeddingCreateParams::Input::StringArray } + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::StringArray } # The array of integers that will be turned into an embedding. - variant -> { OpenAI::EmbeddingCreateParams::Input::IntegerArray } + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::IntegerArray } # The array of arrays containing integers that will be turned into an embedding. - variant -> { OpenAI::EmbeddingCreateParams::Input::ArrayOfToken2DArray } + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray } # @!method self.variants # @return [Array(String, Array, Array, Array>)] @@ -120,7 +120,7 @@ module Model variant enum: -> { OpenAI::EmbeddingModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::EmbeddingModel)] + # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] end # The format to return the embeddings in. Can be either `float` or diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 5e76ac50..cdf26b6c 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -11,7 +11,7 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # The configuration for the data source used for the evaluation runs. Dictates the # schema of the data used in the evaluation. # - # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @return [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] required :data_source_config, union: -> { OpenAI::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria @@ -20,7 +20,7 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # `{{item.variable_name}}`. To reference the model's output, use the `sample` # namespace (ie, `{{sample.output_text}}`). # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion] } @@ -45,9 +45,9 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalCreateParams} for more details. # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl + # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -97,7 +97,7 @@ class Custom < OpenAI::Internal::Type::BaseModel # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::DataSourceConfig::Custom} for more details. + # {OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom} for more details. # # A CustomDataSourceConfig object that defines the schema for the data source used # for the evaluation runs. This schema is used to define the shape of the data @@ -158,7 +158,7 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] + # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -189,7 +189,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the `item` namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] @@ -227,12 +227,13 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel} for more details. + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel} for more + # details. # # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param labels [Array] The labels to classify to each item in the evaluation. # @@ -281,7 +282,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content @@ -291,7 +292,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] required :role, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role @@ -300,7 +301,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] optional :type, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type @@ -308,8 +309,8 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} for - # more details. + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} + # for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -317,15 +318,15 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content module Content extend OpenAI::Internal::Type::Union @@ -355,7 +356,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} # for more details. # # A text output from the model. @@ -366,13 +367,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role module Role extend OpenAI::Internal::Type::Enum @@ -387,7 +388,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type module Type extend OpenAI::Internal::Type::Enum @@ -399,7 +400,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] end end @@ -443,7 +444,7 @@ class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Graders::StringCheckGrader, OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index 6fa95a6d..64d013fc 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -19,7 +19,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -143,7 +143,7 @@ class Logs < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -207,7 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb index 38c0e0ec..5ff8b5db 100644 --- a/lib/openai/models/eval_custom_data_source_config.rb +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -18,7 +18,7 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(schema:, type: :custom) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCustomDataSourceConfig} for more details. + # {OpenAI::Models::EvalCustomDataSourceConfig} for more details. # # A CustomDataSourceConfig which specifies the schema of your `item` and # optionally `sample` namespaces. The response schema defines the shape of the diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index c772f07f..ff5f8d60 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -23,14 +23,14 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. # - # @return [Symbol, OpenAI::EvalListParams::Order, nil] + # @return [Symbol, OpenAI::Models::EvalListParams::Order, nil] optional :order, enum: -> { OpenAI::EvalListParams::Order } # @!attribute order_by # Evals can be ordered by creation time or last updated time. Use `created_at` for # creation time or `updated_at` for last updated time. # - # @return [Symbol, OpenAI::EvalListParams::OrderBy, nil] + # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy, nil] optional :order_by, enum: -> { OpenAI::EvalListParams::OrderBy } # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) @@ -41,9 +41,9 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of evals to retrieve. # - # @param order [Symbol, OpenAI::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d # - # @param order_by [Symbol, OpenAI::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 3012d7c1..db2de0e9 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -19,7 +19,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalListResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -143,7 +143,7 @@ class Logs < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -207,7 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 6dd4004d..04a1e866 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -19,7 +19,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -143,7 +143,7 @@ class Logs < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -207,7 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index f9b04990..2a57fdfd 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -30,7 +30,7 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(schema:, metadata: nil, type: :stored_completions) # Some parameter documentations has been truncated, see - # {OpenAI::EvalStoredCompletionsDataSourceConfig} for more details. + # {OpenAI::Models::EvalStoredCompletionsDataSourceConfig} for more details. # # Deprecated in favor of LogsDataSourceConfig. # diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index 592ed41e..475374c9 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -19,7 +19,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } @@ -67,13 +67,13 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -143,7 +143,7 @@ class Logs < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig)] + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -207,7 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 8ac9aff1..28f9e688 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -7,13 +7,13 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # Determines what populates the `item` namespace in this run's data source. # - # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] required :source, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source } # @!attribute type # The type of run data source. Always `completions`. # - # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] required :type, enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type } # @!attribute input_messages @@ -22,7 +22,7 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # `item.input_trajectory`), or a template with variable references to the `item` # namespace. # - # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages @@ -36,28 +36,28 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute sampling_params # - # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] optional :sampling_params, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource} for more details. + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource} for more details. # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] Determines what populates the `item` namespace in this run's data source. + # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] Determines what populates the `item` namespace in this run's data source. # - # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. # - # @param input_messages [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # - # @param sampling_params [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] + # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # Determines what populates the `item` namespace in this run's data source. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#source + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#source module Source extend OpenAI::Internal::Type::Union @@ -75,7 +75,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the jsonl file. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] @@ -88,7 +88,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. + # @param content [Array] The content of the jsonl file. # # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. @@ -172,7 +172,7 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method initialize(created_after: nil, created_before: nil, limit: nil, metadata: nil, model: nil, type: :stored_completions) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} # for more details. # # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -191,12 +191,12 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] end # The type of run data source. Always `completions`. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#type + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#type module Type extend OpenAI::Internal::Type::Enum @@ -211,7 +211,7 @@ module Type # `item.input_trajectory`), or a template with variable references to the `item` # namespace. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#input_messages + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -227,7 +227,7 @@ class Template < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the `item` namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] @@ -241,10 +241,10 @@ class Template < OpenAI::Internal::Type::BaseModel # @!method initialize(template:, type: :template) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} for - # more details. + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} + # for more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -279,7 +279,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] required :content, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content @@ -289,7 +289,7 @@ class Message < OpenAI::Internal::Type::BaseModel # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] required :role, enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role @@ -298,7 +298,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] optional :type, enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type @@ -306,7 +306,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} # for more details. # # A message input to the model with a role indicating instruction following @@ -315,15 +315,15 @@ class Message < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content module Content extend OpenAI::Internal::Type::Union @@ -353,7 +353,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} # for more details. # # A text output from the model. @@ -364,13 +364,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -385,7 +385,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type module Type extend OpenAI::Internal::Type::Enum @@ -397,7 +397,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] end end @@ -421,10 +421,10 @@ class ItemReference < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] end - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#sampling_params + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 37c22655..fc29873a 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -7,7 +7,7 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # Determines what populates the `item` namespace in the data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] required :source, union: -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source } # @!attribute type @@ -20,13 +20,13 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # A JsonlRunDataSource object with that specifies a JSONL file that matches the # eval # - # @param source [OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID] Determines what populates the `item` namespace in the data source. + # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] Determines what populates the `item` namespace in the data source. # # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. # Determines what populates the `item` namespace in the data source. # - # @see OpenAI::Evals::CreateEvalJSONLRunDataSource#source + # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source module Source extend OpenAI::Internal::Type::Union @@ -40,7 +40,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the jsonl file. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] @@ -53,7 +53,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. + # @param content [Array] The content of the jsonl file. # # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. @@ -94,7 +94,7 @@ class FileID < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index bbcbb6e7..fd8642a6 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -20,13 +20,13 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -317,7 +317,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature @@ -361,7 +361,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -459,7 +459,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -489,7 +489,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # @@ -536,7 +536,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -634,7 +634,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index d300ba18..bc703e7f 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -11,7 +11,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Details about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Evals::RunCreateParams::DataSource } # @!attribute metadata @@ -35,7 +35,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -60,7 +60,7 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source # Determines what populates the `item` namespace in this run's data source. # - # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] required :source, union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source @@ -69,7 +69,7 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of run data source. Always `responses`. # - # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] required :type, enum: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type @@ -81,7 +81,7 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # `item.input_trajectory`), or a template with variable references to the `item` # namespace. # - # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages @@ -95,7 +95,7 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute sampling_params # - # @return [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] optional :sampling_params, -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams @@ -103,24 +103,24 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} # for more details. # # A ResponsesRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] Determines what populates the `item` namespace in this run's data source. + # @param source [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] Determines what populates the `item` namespace in this run's data source. # - # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. # - # @param input_messages [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # @param input_messages [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # - # @param sampling_params [OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] + # @param sampling_params [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] # Determines what populates the `item` namespace in this run's data source. # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source module Source extend OpenAI::Internal::Type::Union @@ -146,7 +146,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the jsonl file. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] @@ -159,7 +159,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel required :type, const: :file_content # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. + # @param content [Array] The content of the jsonl file. # # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. @@ -245,7 +245,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature @@ -274,7 +274,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} # for more details. # # A EvalResponsesSource object describing a run data source configuration. @@ -289,7 +289,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -303,12 +303,12 @@ class Responses < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] end # The type of run data source. Always `responses`. # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type module Type extend OpenAI::Internal::Type::Enum @@ -323,7 +323,7 @@ module Type # `item.input_trajectory`), or a template with variable references to the `item` # namespace. # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -344,7 +344,7 @@ class Template < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the `item` namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> do OpenAI::Internal::Type::ArrayOf[ @@ -360,10 +360,10 @@ class Template < OpenAI::Internal::Type::BaseModel # @!method initialize(template:, type: :template) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} # for more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -411,7 +411,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content @@ -421,7 +421,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] required :role, enum: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role @@ -430,7 +430,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] optional :type, enum: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type @@ -438,7 +438,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} # for more details. # # A message input to the model with a role indicating instruction following @@ -447,15 +447,15 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content module Content extend OpenAI::Internal::Type::Union @@ -485,7 +485,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} # for more details. # # A text output from the model. @@ -496,13 +496,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role module Role extend OpenAI::Internal::Type::Enum @@ -517,7 +517,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type module Type extend OpenAI::Internal::Type::Enum @@ -529,7 +529,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] end end @@ -553,10 +553,10 @@ class ItemReference < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] end - # @see OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. @@ -594,7 +594,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index f3cba684..3a110d09 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -20,13 +20,13 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -317,7 +317,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature @@ -361,7 +361,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -459,7 +459,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -489,7 +489,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # @@ -536,7 +536,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -634,7 +634,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index bfb9d83f..3e0a45b3 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -24,14 +24,14 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Evals::RunListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Evals::RunListParams::Order } # @!attribute status # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # | `canceled`. # - # @return [Symbol, OpenAI::Evals::RunListParams::Status, nil] + # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] optional :status, enum: -> { OpenAI::Evals::RunListParams::Status } # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) @@ -42,9 +42,9 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of runs to retrieve. # - # @param order [Symbol, OpenAI::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de # - # @param status [Symbol, OpenAI::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index bd896908..83907899 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -20,13 +20,13 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -317,7 +317,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature @@ -361,7 +361,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -459,7 +459,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -489,7 +489,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # @@ -536,7 +536,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -634,7 +634,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index cdba5c53..9db0bb26 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -20,13 +20,13 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -317,7 +317,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # Optional reasoning effort parameter. This is a query parameter used to select # responses. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute temperature @@ -361,7 +361,7 @@ class Responses < OpenAI::Internal::Type::BaseModel # # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re # # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. # @@ -463,7 +463,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] required :content, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -493,7 +493,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # @@ -540,7 +540,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -638,7 +638,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses)] + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index 0d0e6406..7fd27487 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -30,14 +30,14 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Evals::Runs::OutputItemListParams::Order } # @!attribute status # Filter output items by status. Use `failed` to filter by failed output items or # `pass` to filter by passed output items. # - # @return [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status, nil] + # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status, nil] optional :status, enum: -> { OpenAI::Evals::Runs::OutputItemListParams::Status } # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) @@ -50,9 +50,9 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of output items to retrieve. # - # @param order [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc # - # @param status [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index 85505173..d3271c97 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -95,7 +95,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute finish_reason @@ -160,7 +160,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param finish_reason [String] The reason why the sample generation was finished. # diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index 5a75e4a0..e43f1fcf 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -95,7 +95,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute finish_reason @@ -161,7 +161,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param finish_reason [String] The reason why the sample generation was finished. # diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 5ee317be..9d196745 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -14,7 +14,7 @@ module FileChunkingStrategy variant :other, -> { OpenAI::OtherFileChunkingStrategyObject } # @!method self.variants - # @return [Array(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject)] + # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 6a46bdfc..8f756013 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -16,7 +16,7 @@ module FileChunkingStrategyParam variant :static, -> { OpenAI::StaticFileChunkingStrategyObjectParam } # @!method self.variants - # @return [Array(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam)] + # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 63dec904..44e48264 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -19,7 +19,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets # - # @return [Symbol, OpenAI::FilePurpose] + # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::FilePurpose } # @!method initialize(file:, purpose:, request_options: {}) @@ -28,7 +28,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 82ca75f1..193eebac 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -27,7 +27,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::FileListParams::Order, nil] + # @return [Symbol, OpenAI::Models::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::FileListParams::Order } # @!attribute purpose @@ -44,7 +44,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param purpose [String] Only return files with the given purpose. # diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index ec5c9839..ac42a195 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -39,7 +39,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. # - # @return [Symbol, OpenAI::FileObject::Purpose] + # @return [Symbol, OpenAI::Models::FileObject::Purpose] required :purpose, enum: -> { OpenAI::FileObject::Purpose } # @!attribute status @@ -48,7 +48,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # - # @return [Symbol, OpenAI::FileObject::Status] + # @return [Symbol, OpenAI::Models::FileObject::Status] required :status, enum: -> { OpenAI::FileObject::Status } # @!attribute expires_at @@ -67,8 +67,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel optional :status_details, String # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) - # Some parameter documentations has been truncated, see {OpenAI::FileObject} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileObject} for more details. # # The `File` object represents a document that has been uploaded to OpenAI. # @@ -80,9 +80,9 @@ class FileObject < OpenAI::Internal::Type::BaseModel # # @param filename [String] The name of the file. # - # @param purpose [Symbol, OpenAI::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants + # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants # - # @param status [Symbol, OpenAI::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro + # @param status [Symbol, OpenAI::Models::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro # # @param expires_at [Integer] The Unix timestamp (in seconds) for when the file will expire. # @@ -94,7 +94,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. # - # @see OpenAI::FileObject#purpose + # @see OpenAI::Models::FileObject#purpose module Purpose extend OpenAI::Internal::Type::Enum @@ -115,7 +115,7 @@ module Purpose # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # - # @see OpenAI::FileObject#status + # @see OpenAI::Models::FileObject#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb index 921ece7c..0dfe4ffc 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb @@ -12,7 +12,7 @@ class GraderRunParams < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderRunParams::Grader } # @!attribute model_sample @@ -31,7 +31,7 @@ class GraderRunParams < OpenAI::Internal::Type::BaseModel } # @!method initialize(grader:, model_sample:, reference_answer:, request_options: {}) - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param model_sample [String] The model sample to be evaluated. # @@ -61,7 +61,7 @@ module Grader variant :multi, -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end # The reference answer for the evaluation. @@ -72,7 +72,7 @@ module ReferenceAnswer variant OpenAI::Internal::Type::Unknown - variant -> { OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::UnionMember2Array } + variant -> { OpenAI::Models::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::UnionMember2Array } variant Float diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb index a252fb6a..fb0650a0 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb @@ -12,11 +12,11 @@ class GraderValidateParams < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderValidateParams::Grader } # @!method initialize(grader:, request_options: {}) - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -40,7 +40,7 @@ module Grader variant -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb index a89553ee..9d7458fc 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb @@ -9,11 +9,11 @@ class GraderValidateResponse < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader, nil] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader, nil] optional :grader, union: -> { OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader } # @!method initialize(grader: nil) - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # The grader used for the fine-tuning job. # @@ -37,7 +37,7 @@ module Grader variant -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index 9bdaffeb..d49a0e2d 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -24,7 +24,7 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # @!attribute order # The order in which to retrieve permissions. # - # @return [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] + # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] optional :order, enum: -> { OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order } # @!attribute project_id @@ -38,7 +38,7 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of permissions to retrieve. # - # @param order [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. # # @param project_id [String] The ID of the project to get permissions for. # diff --git a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb index b57d2245..e5dd0bfc 100644 --- a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb @@ -35,7 +35,7 @@ class DpoHyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::DpoHyperparameters} for more details. + # {OpenAI::Models::FineTuning::DpoHyperparameters} for more details. # # The hyperparameters used for the DPO fine-tuning job. # @@ -50,7 +50,7 @@ class DpoHyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::DpoHyperparameters#batch_size + # @see OpenAI::Models::FineTuning::DpoHyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -65,7 +65,7 @@ module BatchSize # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # - # @see OpenAI::FineTuning::DpoHyperparameters#beta + # @see OpenAI::Models::FineTuning::DpoHyperparameters#beta module Beta extend OpenAI::Internal::Type::Union @@ -80,7 +80,7 @@ module Beta # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::DpoHyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::DpoHyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -95,7 +95,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::DpoHyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::DpoHyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/fine_tuning/dpo_method.rb b/lib/openai/models/fine_tuning/dpo_method.rb index 023cf72f..57bfe306 100644 --- a/lib/openai/models/fine_tuning/dpo_method.rb +++ b/lib/openai/models/fine_tuning/dpo_method.rb @@ -7,13 +7,13 @@ class DpoMethod < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the DPO fine-tuning job. # - # @return [OpenAI::FineTuning::DpoHyperparameters, nil] + # @return [OpenAI::Models::FineTuning::DpoHyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::DpoHyperparameters } # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # - # @param hyperparameters [OpenAI::FineTuning::DpoHyperparameters] The hyperparameters used for the DPO fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::DpoHyperparameters] The hyperparameters used for the DPO fine-tuning job. end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index b6041041..64df4360 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -21,7 +21,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. # - # @return [OpenAI::FineTuning::FineTuningJob::Error, nil] + # @return [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] required :error, -> { OpenAI::FineTuning::FineTuningJob::Error }, nil?: true # @!attribute fine_tuned_model @@ -42,7 +42,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # - # @return [OpenAI::FineTuning::FineTuningJob::Hyperparameters] + # @return [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] required :hyperparameters, -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters } # @!attribute model @@ -81,7 +81,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Status] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] required :status, enum: -> { OpenAI::FineTuning::FineTuningJob::Status } # @!attribute trained_tokens @@ -116,7 +116,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute integrations # A list of integrations to enable for this fine-tuning job. # - # @return [Array, nil] + # @return [Array, nil] optional :integrations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject] @@ -137,12 +137,12 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute method_ # The method used for fine-tuning. # - # @return [OpenAI::FineTuning::FineTuningJob::Method, nil] + # @return [OpenAI::Models::FineTuning::FineTuningJob::Method, nil] optional :method_, -> { OpenAI::FineTuning::FineTuningJob::Method }, api_name: :method # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob} for more details. + # {OpenAI::Models::FineTuning::FineTuningJob} for more details. # # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. @@ -151,13 +151,13 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. # - # @param error [OpenAI::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t + # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t # # @param fine_tuned_model [String, nil] The name of the fine-tuned model that is being created. The value will be null i # # @param finished_at [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job was finished. The v # - # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return # # @param model [String] The base model that is being fine-tuned. # @@ -167,7 +167,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param seed [Integer] The seed used for the fine-tuning job. # - # @param status [Symbol, OpenAI::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files + # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files # # @param trained_tokens [Integer, nil] The total number of billable tokens processed by this fine-tuning job. The value # @@ -177,15 +177,15 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param estimated_finish [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job is estimated to fin # - # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::FineTuning::FineTuningJob::Method] The method used for fine-tuning. + # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] The method used for fine-tuning. # # @param object [Symbol, :"fine_tuning.job"] The object type, which is always "fine_tuning.job". - # @see OpenAI::FineTuning::FineTuningJob#error + # @see OpenAI::Models::FineTuning::FineTuningJob#error class Error < OpenAI::Internal::Type::BaseModel # @!attribute code # A machine-readable error code. @@ -208,7 +208,7 @@ class Error < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:, param:) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob::Error} for more details. + # {OpenAI::Models::FineTuning::FineTuningJob::Error} for more details. # # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. @@ -220,7 +220,7 @@ class Error < OpenAI::Internal::Type::BaseModel # @param param [String, nil] The parameter that was invalid, usually `training_file` or `validation_file`. Th end - # @see OpenAI::FineTuning::FineTuningJob#hyperparameters + # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model @@ -248,7 +248,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob::Hyperparameters} for more details. + # {OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. @@ -262,7 +262,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#batch_size + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -279,7 +279,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -294,7 +294,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -310,7 +310,7 @@ module NEpochs # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # - # @see OpenAI::FineTuning::FineTuningJob#status + # @see OpenAI::Models::FineTuning::FineTuningJob#status module Status extend OpenAI::Internal::Type::Enum @@ -325,46 +325,46 @@ module Status # @return [Array] end - # @see OpenAI::FineTuning::FineTuningJob#method_ + # @see OpenAI::Models::FineTuning::FineTuningJob#method_ class Method < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] required :type, enum: -> { OpenAI::FineTuning::FineTuningJob::Method::Type } # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::FineTuning::DpoMethod, nil] + # @return [OpenAI::Models::FineTuning::DpoMethod, nil] optional :dpo, -> { OpenAI::FineTuning::DpoMethod } # @!attribute reinforcement # Configuration for the reinforcement fine-tuning method. # - # @return [OpenAI::FineTuning::ReinforcementMethod, nil] + # @return [OpenAI::Models::FineTuning::ReinforcementMethod, nil] optional :reinforcement, -> { OpenAI::FineTuning::ReinforcementMethod } # @!attribute supervised # Configuration for the supervised fine-tuning method. # - # @return [OpenAI::FineTuning::SupervisedMethod, nil] + # @return [OpenAI::Models::FineTuning::SupervisedMethod, nil] optional :supervised, -> { OpenAI::FineTuning::SupervisedMethod } # @!method initialize(type:, dpo: nil, reinforcement: nil, supervised: nil) # The method used for fine-tuning. # - # @param type [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @param dpo [OpenAI::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. + # @param dpo [OpenAI::Models::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. # - # @param reinforcement [OpenAI::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. + # @param reinforcement [OpenAI::Models::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. # - # @param supervised [OpenAI::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. + # @param supervised [OpenAI::Models::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @see OpenAI::FineTuning::FineTuningJob::Method#type + # @see OpenAI::Models::FineTuning::FineTuningJob::Method#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 743df8cc..d609e035 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -20,7 +20,7 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute level # The log level of the event. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] required :level, enum: -> { OpenAI::FineTuning::FineTuningJobEvent::Level } # @!attribute message @@ -44,7 +44,7 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of event. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type, nil] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type, nil] optional :type, enum: -> { OpenAI::FineTuning::FineTuningJobEvent::Type } # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") @@ -54,19 +54,19 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. # - # @param level [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level] The log level of the event. + # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] The log level of the event. # # @param message [String] The message of the event. # # @param data [Object] The data associated with the event. # - # @param type [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type] The type of event. + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] The type of event. # # @param object [Symbol, :"fine_tuning.job.event"] The object type, which is always "fine_tuning.job.event". # The log level of the event. # - # @see OpenAI::FineTuning::FineTuningJobEvent#level + # @see OpenAI::Models::FineTuning::FineTuningJobEvent#level module Level extend OpenAI::Internal::Type::Enum @@ -80,7 +80,7 @@ module Level # The type of event. # - # @see OpenAI::FineTuning::FineTuningJobEvent#type + # @see OpenAI::Models::FineTuning::FineTuningJobEvent#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 07b43842..e9e7c30d 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -35,7 +35,7 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @!method initialize(project:, entity: nil, name: nil, tags: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJobWandbIntegration} for more details. + # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegration} for more details. # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 4b691bd6..54781dde 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -16,14 +16,15 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @return [OpenAI::FineTuning::FineTuningJobWandbIntegration] + # @return [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] required :wandb, -> { OpenAI::FineTuning::FineTuningJobWandbIntegration } # @!method initialize(wandb:, type: :wandb) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJobWandbIntegrationObject} for more details. + # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject} for more + # details. # - # @param wandb [OpenAI::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie + # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie # # @param type [Symbol, :wandb] The type of the integration being enabled for the fine-tuning job end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index f735d85b..193bc2d0 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -12,7 +12,7 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # - # @return [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] + # @return [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] required :model, union: -> { OpenAI::FineTuning::JobCreateParams::Model } # @!attribute training_file @@ -43,13 +43,13 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # - # @return [OpenAI::FineTuning::JobCreateParams::Hyperparameters, nil] + # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::JobCreateParams::Hyperparameters } # @!attribute integrations # A list of integrations to enable for your fine-tuning job. # - # @return [Array, nil] + # @return [Array, nil] optional :integrations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::JobCreateParams::Integration] }, nil?: true @@ -68,7 +68,7 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute method_ # The method used for fine-tuning. # - # @return [OpenAI::FineTuning::JobCreateParams::Method, nil] + # @return [OpenAI::Models::FineTuning::JobCreateParams::Method, nil] optional :method_, -> { OpenAI::FineTuning::JobCreateParams::Method }, api_name: :method # @!attribute seed @@ -110,17 +110,17 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::FineTuning::JobCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the # # @param training_file [String] The ID of an uploaded file that contains training data. # - # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. # - # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. # # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j # @@ -137,13 +137,13 @@ module Model variant String - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::BABBAGE_002 } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::BABBAGE_002 } - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::DAVINCI_002 } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::DAVINCI_002 } - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::GPT_4O_MINI } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI } # @!method self.variants # @return [Array(String, Symbol)] @@ -188,7 +188,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Hyperparameters} for more details. + # {OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. @@ -202,7 +202,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#batch_size + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -217,7 +217,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -232,7 +232,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -259,18 +259,18 @@ class Integration < OpenAI::Internal::Type::BaseModel # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @return [OpenAI::FineTuning::JobCreateParams::Integration::Wandb] + # @return [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] required :wandb, -> { OpenAI::FineTuning::JobCreateParams::Integration::Wandb } # @!method initialize(wandb:, type: :wandb) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Integration} for more details. + # {OpenAI::Models::FineTuning::JobCreateParams::Integration} for more details. # - # @param wandb [OpenAI::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie + # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie # # @param type [Symbol, :wandb] The type of integration to enable. Currently, only "wandb" (Weights and Biases) - # @see OpenAI::FineTuning::JobCreateParams::Integration#wandb + # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::Internal::Type::BaseModel # @!attribute project # The name of the project that the new run will be created under. @@ -303,7 +303,8 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @!method initialize(project:, entity: nil, name: nil, tags: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Integration::Wandb} for more details. + # {OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb} for more + # details. # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an @@ -324,41 +325,41 @@ class Method < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @return [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] + # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] required :type, enum: -> { OpenAI::FineTuning::JobCreateParams::Method::Type } # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::FineTuning::DpoMethod, nil] + # @return [OpenAI::Models::FineTuning::DpoMethod, nil] optional :dpo, -> { OpenAI::FineTuning::DpoMethod } # @!attribute reinforcement # Configuration for the reinforcement fine-tuning method. # - # @return [OpenAI::FineTuning::ReinforcementMethod, nil] + # @return [OpenAI::Models::FineTuning::ReinforcementMethod, nil] optional :reinforcement, -> { OpenAI::FineTuning::ReinforcementMethod } # @!attribute supervised # Configuration for the supervised fine-tuning method. # - # @return [OpenAI::FineTuning::SupervisedMethod, nil] + # @return [OpenAI::Models::FineTuning::SupervisedMethod, nil] optional :supervised, -> { OpenAI::FineTuning::SupervisedMethod } # @!method initialize(type:, dpo: nil, reinforcement: nil, supervised: nil) # The method used for fine-tuning. # - # @param type [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @param dpo [OpenAI::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. + # @param dpo [OpenAI::Models::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. # - # @param reinforcement [OpenAI::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. + # @param reinforcement [OpenAI::Models::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. # - # @param supervised [OpenAI::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. + # @param supervised [OpenAI::Models::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @see OpenAI::FineTuning::JobCreateParams::Method#type + # @see OpenAI::Models::FineTuning::JobCreateParams::Method#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index f0a8ff33..cb4e4a9b 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -33,7 +33,7 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @!attribute metrics # Metrics at the step number during the fine-tuning job. # - # @return [OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] + # @return [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] required :metrics, -> { OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics } # @!attribute object @@ -60,13 +60,13 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # # @param fine_tuning_job_id [String] The name of the fine-tuning job that this checkpoint was created from. # - # @param metrics [OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. + # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. # # @param step_number [Integer] The step number that the checkpoint was created at. # # @param object [Symbol, :"fine_tuning.job.checkpoint"] The object type, which is always "fine_tuning.job.checkpoint". - # @see OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint#metrics + # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel # @!attribute full_valid_loss # diff --git a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb index 7df826e1..8484df8c 100644 --- a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb @@ -48,7 +48,7 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_effort # Level of reasoning effort. # - # @return [Symbol, OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::FineTuning::ReinforcementHyperparameters::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort @@ -56,7 +56,7 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, compute_multiplier: nil, eval_interval: nil, eval_samples: nil, learning_rate_multiplier: nil, n_epochs: nil, reasoning_effort: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::ReinforcementHyperparameters} for more details. + # {OpenAI::Models::FineTuning::ReinforcementHyperparameters} for more details. # # The hyperparameters used for the reinforcement fine-tuning job. # @@ -72,12 +72,12 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t # - # @param reasoning_effort [Symbol, OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort] Level of reasoning effort. + # @param reasoning_effort [Symbol, OpenAI::Models::FineTuning::ReinforcementHyperparameters::ReasoningEffort] Level of reasoning effort. # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#batch_size + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -91,7 +91,7 @@ module BatchSize # Multiplier on amount of compute used for exploring search space during training. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#compute_multiplier + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#compute_multiplier module ComputeMultiplier extend OpenAI::Internal::Type::Union @@ -105,7 +105,7 @@ module ComputeMultiplier # The number of training steps between evaluation runs. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#eval_interval + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#eval_interval module EvalInterval extend OpenAI::Internal::Type::Union @@ -119,7 +119,7 @@ module EvalInterval # Number of evaluation samples to generate per training step. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#eval_samples + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#eval_samples module EvalSamples extend OpenAI::Internal::Type::Union @@ -134,7 +134,7 @@ module EvalSamples # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -149,7 +149,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -163,7 +163,7 @@ module NEpochs # Level of reasoning effort. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#reasoning_effort + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#reasoning_effort module ReasoningEffort extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/reinforcement_method.rb b/lib/openai/models/fine_tuning/reinforcement_method.rb index d2955f1e..40395ee8 100644 --- a/lib/openai/models/fine_tuning/reinforcement_method.rb +++ b/lib/openai/models/fine_tuning/reinforcement_method.rb @@ -7,25 +7,25 @@ class ReinforcementMethod < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] required :grader, union: -> { OpenAI::FineTuning::ReinforcementMethod::Grader } # @!attribute hyperparameters # The hyperparameters used for the reinforcement fine-tuning job. # - # @return [OpenAI::FineTuning::ReinforcementHyperparameters, nil] + # @return [OpenAI::Models::FineTuning::ReinforcementHyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::ReinforcementHyperparameters } # @!method initialize(grader:, hyperparameters: nil) # Configuration for the reinforcement fine-tuning method. # - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # - # @param hyperparameters [OpenAI::FineTuning::ReinforcementHyperparameters] The hyperparameters used for the reinforcement fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::ReinforcementHyperparameters] The hyperparameters used for the reinforcement fine-tuning job. # The grader used for the fine-tuning job. # - # @see OpenAI::FineTuning::ReinforcementMethod#grader + # @see OpenAI::Models::FineTuning::ReinforcementMethod#grader module Grader extend OpenAI::Internal::Type::Union @@ -45,7 +45,7 @@ module Grader variant -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end end end diff --git a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb index 612870f0..a7965756 100644 --- a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb @@ -28,7 +28,7 @@ class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::SupervisedHyperparameters} for more details. + # {OpenAI::Models::FineTuning::SupervisedHyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. # @@ -41,7 +41,7 @@ class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::SupervisedHyperparameters#batch_size + # @see OpenAI::Models::FineTuning::SupervisedHyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -56,7 +56,7 @@ module BatchSize # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::SupervisedHyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::SupervisedHyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -71,7 +71,7 @@ module LearningRateMultiplier # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::SupervisedHyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::SupervisedHyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union diff --git a/lib/openai/models/fine_tuning/supervised_method.rb b/lib/openai/models/fine_tuning/supervised_method.rb index f45655a0..f81f3648 100644 --- a/lib/openai/models/fine_tuning/supervised_method.rb +++ b/lib/openai/models/fine_tuning/supervised_method.rb @@ -7,13 +7,13 @@ class SupervisedMethod < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # - # @return [OpenAI::FineTuning::SupervisedHyperparameters, nil] + # @return [OpenAI::Models::FineTuning::SupervisedHyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::SupervisedHyperparameters } # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # - # @param hyperparameters [OpenAI::FineTuning::SupervisedHyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::SupervisedHyperparameters] The hyperparameters used for the fine-tuning job. end end end diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 10fe7935..14e2bc4c 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -41,7 +41,7 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, parameters: nil, strict: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FunctionDefinition} for more details. + # {OpenAI::Models::FunctionDefinition} for more details. # # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc # diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index 99e0e087..0f0d52aa 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -6,7 +6,7 @@ module Graders class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::LabelModelGrader::Input] } # @!attribute labels @@ -43,7 +43,7 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # # @param labels [Array] The labels to assign to each item in the evaluation. # @@ -59,25 +59,25 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText] required :content, union: -> { OpenAI::Graders::LabelModelGrader::Input::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] + # @return [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Role] required :role, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type, nil] + # @return [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Type, nil] optional :type, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Graders::LabelModelGrader::Input} for more details. + # {OpenAI::Models::Graders::LabelModelGrader::Input} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -85,15 +85,15 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Graders::LabelModelGrader::Input#content + # @see OpenAI::Models::Graders::LabelModelGrader::Input#content module Content extend OpenAI::Internal::Type::Union @@ -121,7 +121,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Graders::LabelModelGrader::Input::Content::OutputText} for more + # {OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText} for more # details. # # A text output from the model. @@ -132,13 +132,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::LabelModelGrader::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Graders::LabelModelGrader::Input#role + # @see OpenAI::Models::Graders::LabelModelGrader::Input#role module Role extend OpenAI::Internal::Type::Enum @@ -153,7 +153,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Graders::LabelModelGrader::Input#type + # @see OpenAI::Models::Graders::LabelModelGrader::Input#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb index 7b93a32c..de0d1240 100644 --- a/lib/openai/models/graders/multi_grader.rb +++ b/lib/openai/models/graders/multi_grader.rb @@ -12,7 +12,7 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel # @!attribute graders # - # @return [Hash{Symbol=>OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader}] + # @return [Hash{Symbol=>OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader}] required :graders, -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Graders::MultiGrader::Grader] } # @!attribute name @@ -33,7 +33,7 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel # # @param calculate_output [String] A formula to calculate the output based on grader results. # - # @param graders [Hash{Symbol=>OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader}] + # @param graders [Hash{Symbol=>OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader}] # # @param name [String] The name of the grader. # @@ -61,7 +61,7 @@ module Grader variant -> { OpenAI::Graders::LabelModelGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader)] + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader)] end end end diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index 62cf1a6c..bdec7e50 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -7,7 +7,7 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # The input text. This may include template strings. # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::ScoreModelGrader::Input] } # @!attribute model @@ -43,7 +43,7 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. + # @param input [Array] The input text. This may include template strings. # # @param model [String] The model to use for the evaluation. # @@ -59,25 +59,25 @@ class Input < OpenAI::Internal::Type::BaseModel # @!attribute content # Text inputs to the model - can contain template strings. # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText] required :content, union: -> { OpenAI::Graders::ScoreModelGrader::Input::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] + # @return [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Role] required :role, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type, nil] + # @return [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Type, nil] optional :type, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Graders::ScoreModelGrader::Input} for more details. + # {OpenAI::Models::Graders::ScoreModelGrader::Input} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -85,15 +85,15 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. # Text inputs to the model - can contain template strings. # - # @see OpenAI::Graders::ScoreModelGrader::Input#content + # @see OpenAI::Models::Graders::ScoreModelGrader::Input#content module Content extend OpenAI::Internal::Type::Union @@ -121,7 +121,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText} for more + # {OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText} for more # details. # # A text output from the model. @@ -132,13 +132,13 @@ class OutputText < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Graders::ScoreModelGrader::Input#role + # @see OpenAI::Models::Graders::ScoreModelGrader::Input#role module Role extend OpenAI::Internal::Type::Enum @@ -153,7 +153,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Graders::ScoreModelGrader::Input#type + # @see OpenAI::Models::Graders::ScoreModelGrader::Input#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/graders/string_check_grader.rb b/lib/openai/models/graders/string_check_grader.rb index 90aeb689..2d8f5a30 100644 --- a/lib/openai/models/graders/string_check_grader.rb +++ b/lib/openai/models/graders/string_check_grader.rb @@ -19,7 +19,7 @@ class StringCheckGrader < OpenAI::Internal::Type::BaseModel # @!attribute operation # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # - # @return [Symbol, OpenAI::Graders::StringCheckGrader::Operation] + # @return [Symbol, OpenAI::Models::Graders::StringCheckGrader::Operation] required :operation, enum: -> { OpenAI::Graders::StringCheckGrader::Operation } # @!attribute reference @@ -42,7 +42,7 @@ class StringCheckGrader < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the grader. # - # @param operation [Symbol, OpenAI::Graders::StringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # @param operation [Symbol, OpenAI::Models::Graders::StringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # # @param reference [String] The reference text. This may include template strings. # @@ -50,7 +50,7 @@ class StringCheckGrader < OpenAI::Internal::Type::BaseModel # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # - # @see OpenAI::Graders::StringCheckGrader#operation + # @see OpenAI::Models::Graders::StringCheckGrader#operation module Operation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/graders/text_similarity_grader.rb b/lib/openai/models/graders/text_similarity_grader.rb index 30b7eaad..a64e3314 100644 --- a/lib/openai/models/graders/text_similarity_grader.rb +++ b/lib/openai/models/graders/text_similarity_grader.rb @@ -8,7 +8,7 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # - # @return [Symbol, OpenAI::Graders::TextSimilarityGrader::EvaluationMetric] + # @return [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] required :evaluation_metric, enum: -> { OpenAI::Graders::TextSimilarityGrader::EvaluationMetric } # @!attribute input @@ -37,11 +37,11 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(evaluation_metric:, input:, name:, reference:, type: :text_similarity) # Some parameter documentations has been truncated, see - # {OpenAI::Graders::TextSimilarityGrader} for more details. + # {OpenAI::Models::Graders::TextSimilarityGrader} for more details. # # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @param evaluation_metric [Symbol, OpenAI::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r + # @param evaluation_metric [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r # # @param input [String] The text being graded. # @@ -54,7 +54,7 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # - # @see OpenAI::Graders::TextSimilarityGrader#evaluation_metric + # @see OpenAI::Models::Graders::TextSimilarityGrader#evaluation_metric module EvaluationMetric extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index e10a18f2..bedd8bcf 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -26,8 +26,8 @@ class Image < OpenAI::Internal::Type::BaseModel optional :url, String # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) - # Some parameter documentations has been truncated, see {OpenAI::Image} for more - # details. + # Some parameter documentations has been truncated, see {OpenAI::Models::Image} + # for more details. # # Represents the content or the URL of an image generated by the OpenAI API. # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index fa6854a2..29559ca4 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -18,7 +18,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # The model to use for image generation. Only `dall-e-2` is supported at this # time. # - # @return [String, Symbol, OpenAI::ImageModel, nil] + # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::ImageCreateVariationParams::Model }, nil?: true # @!attribute n @@ -32,14 +32,14 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. # - # @return [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::ImageCreateVariationParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. # - # @return [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] + # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] optional :size, enum: -> { OpenAI::ImageCreateVariationParams::Size }, nil?: true # @!attribute user @@ -56,13 +56,13 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param response_format [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -79,7 +79,7 @@ module Model variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ImageModel)] + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 25129f87..ea3225f3 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -35,7 +35,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. # - # @return [Symbol, OpenAI::ImageEditParams::Background, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::Background, nil] optional :background, enum: -> { OpenAI::ImageEditParams::Background }, nil?: true # @!attribute mask @@ -52,7 +52,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. # - # @return [String, Symbol, OpenAI::ImageModel, nil] + # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::ImageEditParams::Model }, nil?: true # @!attribute n @@ -66,7 +66,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. # - # @return [Symbol, OpenAI::ImageEditParams::Quality, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] optional :quality, enum: -> { OpenAI::ImageEditParams::Quality }, nil?: true # @!attribute response_format @@ -75,7 +75,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` # will always return base64-encoded images. # - # @return [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::ImageEditParams::ResponseFormat }, nil?: true # @!attribute size @@ -83,7 +83,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # (landscape), `1024x1536` (portrait), or `auto` (default value) for # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. # - # @return [Symbol, OpenAI::ImageEditParams::Size, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] optional :size, enum: -> { OpenAI::ImageEditParams::Size }, nil?: true # @!attribute user @@ -102,19 +102,19 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # - # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param quality [Symbol, OpenAI::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # - # @param response_format [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -132,7 +132,7 @@ module Image variant OpenAI::Internal::Type::FileInput - variant -> { OpenAI::ImageEditParams::Image::StringArray } + variant -> { OpenAI::Models::ImageEditParams::Image::StringArray } # @!method self.variants # @return [Array(StringIO, Array)] @@ -171,7 +171,7 @@ module Model variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ImageModel)] + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The quality of the image that will be generated. `high`, `medium` and `low` are diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index c46f1558..b0b47d41 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -24,7 +24,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. # - # @return [Symbol, OpenAI::ImageGenerateParams::Background, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] optional :background, enum: -> { OpenAI::ImageGenerateParams::Background }, nil?: true # @!attribute model @@ -32,14 +32,14 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to # `gpt-image-1` is used. # - # @return [String, Symbol, OpenAI::ImageModel, nil] + # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::ImageGenerateParams::Model }, nil?: true # @!attribute moderation # Control the content-moderation level for images generated by `gpt-image-1`. Must # be either `low` for less restrictive filtering or `auto` (default value). # - # @return [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] optional :moderation, enum: -> { OpenAI::ImageGenerateParams::Moderation }, nil?: true # @!attribute n @@ -61,7 +61,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # The format in which the generated images are returned. This parameter is only # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. # - # @return [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] optional :output_format, enum: -> { OpenAI::ImageGenerateParams::OutputFormat }, nil?: true # @!attribute quality @@ -73,7 +73,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # - `hd` and `standard` are supported for `dall-e-3`. # - `standard` is the only option for `dall-e-2`. # - # @return [Symbol, OpenAI::ImageGenerateParams::Quality, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] optional :quality, enum: -> { OpenAI::ImageGenerateParams::Quality }, nil?: true # @!attribute response_format @@ -82,7 +82,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # after the image has been generated. This parameter isn't supported for # `gpt-image-1` which will always return base64-encoded images. # - # @return [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::ImageGenerateParams::ResponseFormat }, nil?: true # @!attribute size @@ -91,7 +91,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. # - # @return [Symbol, OpenAI::ImageGenerateParams::Size, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] optional :size, enum: -> { OpenAI::ImageGenerateParams::Size }, nil?: true # @!attribute style @@ -100,7 +100,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # towards generating hyper-real and dramatic images. Natural causes the model to # produce more natural, less hyper-real looking images. # - # @return [Symbol, OpenAI::ImageGenerateParams::Style, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] optional :style, enum: -> { OpenAI::ImageGenerateParams::Style }, nil?: true # @!attribute user @@ -117,25 +117,25 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte # - # @param background [Symbol, OpenAI::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im # - # @param moderation [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only # - # @param output_format [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # - # @param quality [Symbol, OpenAI::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # - # @param response_format [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned # - # @param size [Symbol, OpenAI::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # - # @param style [Symbol, OpenAI::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -171,7 +171,7 @@ module Model variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ImageModel)] + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # Control the content-moderation level for images generated by `gpt-image-1`. Must diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index df579e91..ecd33bc9 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -13,28 +13,28 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of generated images. # - # @return [Array, nil] + # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Image] } # @!attribute usage # For `gpt-image-1` only, the token usage information for the image generation. # - # @return [OpenAI::ImagesResponse::Usage, nil] + # @return [OpenAI::Models::ImagesResponse::Usage, nil] optional :usage, -> { OpenAI::ImagesResponse::Usage } # @!method initialize(created:, data: nil, usage: nil) - # Some parameter documentations has been truncated, see {OpenAI::ImagesResponse} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImagesResponse} for more details. # # The response from the image generation endpoint. # # @param created [Integer] The Unix timestamp (in seconds) of when the image was created. # - # @param data [Array] The list of generated images. + # @param data [Array] The list of generated images. # - # @param usage [OpenAI::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. + # @param usage [OpenAI::Models::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. - # @see OpenAI::ImagesResponse#usage + # @see OpenAI::Models::ImagesResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens # The number of tokens (images and text) in the input prompt. @@ -45,7 +45,7 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens_details # The input tokens detailed information for the image generation. # - # @return [OpenAI::ImagesResponse::Usage::InputTokensDetails] + # @return [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] required :input_tokens_details, -> { OpenAI::ImagesResponse::Usage::InputTokensDetails } # @!attribute output_tokens @@ -65,13 +65,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. # - # @param input_tokens_details [OpenAI::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. + # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. # # @param output_tokens [Integer] The number of image tokens in the output image. # # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. - # @see OpenAI::ImagesResponse::Usage#input_tokens_details + # @see OpenAI::Models::ImagesResponse::Usage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute image_tokens # The number of image tokens in the input prompt. diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 39fc2d41..82a8c595 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -6,19 +6,19 @@ class Moderation < OpenAI::Internal::Type::BaseModel # @!attribute categories # A list of the categories, and whether they are flagged or not. # - # @return [OpenAI::Moderation::Categories] + # @return [OpenAI::Models::Moderation::Categories] required :categories, -> { OpenAI::Moderation::Categories } # @!attribute category_applied_input_types # A list of the categories along with the input type(s) that the score applies to. # - # @return [OpenAI::Moderation::CategoryAppliedInputTypes] + # @return [OpenAI::Models::Moderation::CategoryAppliedInputTypes] required :category_applied_input_types, -> { OpenAI::Moderation::CategoryAppliedInputTypes } # @!attribute category_scores # A list of the categories along with their scores as predicted by model. # - # @return [OpenAI::Moderation::CategoryScores] + # @return [OpenAI::Models::Moderation::CategoryScores] required :category_scores, -> { OpenAI::Moderation::CategoryScores } # @!attribute flagged @@ -28,18 +28,18 @@ class Moderation < OpenAI::Internal::Type::BaseModel required :flagged, OpenAI::Internal::Type::Boolean # @!method initialize(categories:, category_applied_input_types:, category_scores:, flagged:) - # Some parameter documentations has been truncated, see {OpenAI::Moderation} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Moderation} for more details. # - # @param categories [OpenAI::Moderation::Categories] A list of the categories, and whether they are flagged or not. + # @param categories [OpenAI::Models::Moderation::Categories] A list of the categories, and whether they are flagged or not. # - # @param category_applied_input_types [OpenAI::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. + # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. # - # @param category_scores [OpenAI::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. + # @param category_scores [OpenAI::Models::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. # # @param flagged [Boolean] Whether any of the below categories are flagged. - # @see OpenAI::Moderation#categories + # @see OpenAI::Models::Moderation#categories class Categories < OpenAI::Internal::Type::BaseModel # @!attribute harassment # Content that expresses, incites, or promotes harassing language towards any @@ -138,7 +138,7 @@ class Categories < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # Some parameter documentations has been truncated, see - # {OpenAI::Moderation::Categories} for more details. + # {OpenAI::Models::Moderation::Categories} for more details. # # A list of the categories, and whether they are flagged or not. # @@ -169,12 +169,12 @@ class Categories < OpenAI::Internal::Type::BaseModel # @param violence_graphic [Boolean] Content that depicts death, violence, or physical injury in graphic detail. end - # @see OpenAI::Moderation#category_applied_input_types + # @see OpenAI::Models::Moderation#category_applied_input_types class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The applied input type(s) for the category 'harassment'. # - # @return [Array] + # @return [Array] required :harassment, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Harassment] @@ -183,7 +183,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute harassment_threatening # The applied input type(s) for the category 'harassment/threatening'. # - # @return [Array] + # @return [Array] required :harassment_threatening, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening] @@ -193,7 +193,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute hate # The applied input type(s) for the category 'hate'. # - # @return [Array] + # @return [Array] required :hate, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Hate] @@ -202,7 +202,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute hate_threatening # The applied input type(s) for the category 'hate/threatening'. # - # @return [Array] + # @return [Array] required :hate_threatening, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening] @@ -212,7 +212,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute illicit # The applied input type(s) for the category 'illicit'. # - # @return [Array] + # @return [Array] required :illicit, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Illicit] @@ -221,7 +221,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute illicit_violent # The applied input type(s) for the category 'illicit/violent'. # - # @return [Array] + # @return [Array] required :illicit_violent, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent] @@ -231,7 +231,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute self_harm # The applied input type(s) for the category 'self-harm'. # - # @return [Array] + # @return [Array] required :self_harm, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm] @@ -241,7 +241,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute self_harm_instructions # The applied input type(s) for the category 'self-harm/instructions'. # - # @return [Array] + # @return [Array] required :self_harm_instructions, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction] @@ -251,7 +251,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute self_harm_intent # The applied input type(s) for the category 'self-harm/intent'. # - # @return [Array] + # @return [Array] required :self_harm_intent, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent] @@ -261,7 +261,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute sexual # The applied input type(s) for the category 'sexual'. # - # @return [Array] + # @return [Array] required :sexual, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Sexual] @@ -270,7 +270,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute sexual_minors # The applied input type(s) for the category 'sexual/minors'. # - # @return [Array] + # @return [Array] required :sexual_minors, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor] @@ -280,7 +280,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute violence # The applied input type(s) for the category 'violence'. # - # @return [Array] + # @return [Array] required :violence, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Violence] @@ -289,7 +289,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute violence_graphic # The applied input type(s) for the category 'violence/graphic'. # - # @return [Array] + # @return [Array] required :violence_graphic, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] @@ -299,31 +299,31 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # A list of the categories along with the input type(s) that the score applies to. # - # @param harassment [Array] The applied input type(s) for the category 'harassment'. + # @param harassment [Array] The applied input type(s) for the category 'harassment'. # - # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. + # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. # - # @param hate [Array] The applied input type(s) for the category 'hate'. + # @param hate [Array] The applied input type(s) for the category 'hate'. # - # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. + # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. # - # @param illicit [Array] The applied input type(s) for the category 'illicit'. + # @param illicit [Array] The applied input type(s) for the category 'illicit'. # - # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. + # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. # - # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. + # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. # - # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. + # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. # - # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. + # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. # - # @param sexual [Array] The applied input type(s) for the category 'sexual'. + # @param sexual [Array] The applied input type(s) for the category 'sexual'. # - # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. + # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. # - # @param violence [Array] The applied input type(s) for the category 'violence'. + # @param violence [Array] The applied input type(s) for the category 'violence'. # - # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. + # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. module Harassment extend OpenAI::Internal::Type::Enum @@ -449,7 +449,7 @@ module ViolenceGraphic end end - # @see OpenAI::Moderation#category_scores + # @see OpenAI::Models::Moderation#category_scores class CategoryScores < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The score for the category 'harassment'. diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 59e0cdd9..008c8b28 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -11,7 +11,7 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. # - # @return [String, Array, Array] + # @return [String, Array, Array] required :input, union: -> { OpenAI::ModerationCreateParams::Input } # @!attribute model @@ -20,16 +20,16 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # learn about available models # [here](https://platform.openai.com/docs/models#moderation). # - # @return [String, Symbol, OpenAI::ModerationModel, nil] + # @return [String, Symbol, OpenAI::Models::ModerationModel, nil] optional :model, union: -> { OpenAI::ModerationCreateParams::Model } # @!method initialize(input:, model: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ModerationCreateParams} for more details. # - # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or # - # @param model [String, Symbol, OpenAI::ModerationModel] The content moderation model you would like to use. Learn more in + # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -42,13 +42,13 @@ module Input variant String # An array of strings to classify for moderation. - variant -> { OpenAI::ModerationCreateParams::Input::StringArray } + variant -> { OpenAI::Models::ModerationCreateParams::Input::StringArray } # An array of multi-modal inputs to the moderation model. - variant -> { OpenAI::ModerationCreateParams::Input::ModerationMultiModalInputArray } + variant -> { OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray } # @!method self.variants - # @return [Array(String, Array, Array)] + # @return [Array(String, Array, Array)] # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -73,7 +73,7 @@ module Model variant enum: -> { OpenAI::ModerationModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ModerationModel)] + # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] end end end diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 17c60d91..0085e8a8 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -19,7 +19,7 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute results # A list of moderation objects. # - # @return [Array] + # @return [Array] required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Moderation] } # @!method initialize(id:, model:, results:) @@ -29,7 +29,7 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # # @param model [String] The model used to generate the moderation results. # - # @param results [Array] A list of moderation objects. + # @param results [Array] A list of moderation objects. end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index ed95c5b8..dca658e1 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -6,7 +6,7 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!attribute image_url # Contains either an image URL or a data URL for a base64 encoded image. # - # @return [OpenAI::ModerationImageURLInput::ImageURL] + # @return [OpenAI::Models::ModerationImageURLInput::ImageURL] required :image_url, -> { OpenAI::ModerationImageURLInput::ImageURL } # @!attribute type @@ -18,11 +18,11 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # An object describing an image to classify. # - # @param image_url [OpenAI::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. + # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. # # @param type [Symbol, :image_url] Always `image_url`. - # @see OpenAI::ModerationImageURLInput#image_url + # @see OpenAI::Models::ModerationImageURLInput#image_url class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index 5142ae94..8b89503d 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -15,7 +15,7 @@ module ModerationMultiModalInput variant :text, -> { OpenAI::ModerationTextInput } # @!method self.variants - # @return [Array(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] + # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 9c418c18..2cdebe06 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -11,7 +11,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute generate_summary @@ -23,7 +23,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @return [Symbol, OpenAI::Reasoning::GenerateSummary, nil] + # @return [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] optional :generate_summary, enum: -> { OpenAI::Reasoning::GenerateSummary }, nil?: true # @!attribute summary @@ -31,23 +31,23 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @return [Symbol, OpenAI::Reasoning::Summary, nil] + # @return [Symbol, OpenAI::Models::Reasoning::Summary, nil] optional :summary, enum: -> { OpenAI::Reasoning::Summary }, nil?: true # @!method initialize(effort: nil, generate_summary: nil, summary: nil) - # Some parameter documentations has been truncated, see {OpenAI::Reasoning} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Reasoning} for more details. # # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @param effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param generate_summary [Symbol, OpenAI::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. + # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. # - # @param summary [Symbol, OpenAI::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be + # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be # @deprecated # @@ -57,7 +57,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @see OpenAI::Reasoning#generate_summary + # @see OpenAI::Models::Reasoning#generate_summary module GenerateSummary extend OpenAI::Internal::Type::Enum @@ -73,7 +73,7 @@ module GenerateSummary # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @see OpenAI::Reasoning#summary + # @see OpenAI::Models::Reasoning#summary module Summary extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 294afc55..d8a941a3 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -6,7 +6,7 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute json_schema # Structured Outputs configuration options, including a JSON Schema. # - # @return [OpenAI::ResponseFormatJSONSchema::JSONSchema] + # @return [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] required :json_schema, -> { OpenAI::ResponseFormatJSONSchema::JSONSchema } # @!attribute type @@ -17,17 +17,17 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!method initialize(json_schema:, type: :json_schema) # Some parameter documentations has been truncated, see - # {OpenAI::ResponseFormatJSONSchema} for more details. + # {OpenAI::Models::ResponseFormatJSONSchema} for more details. # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # - # @param json_schema [OpenAI::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. + # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. # # @param type [Symbol, :json_schema] The type of response format being defined. Always `json_schema`. - # @see OpenAI::ResponseFormatJSONSchema#json_schema + # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores @@ -62,7 +62,7 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, schema: nil, strict: nil) # Some parameter documentations has been truncated, see - # {OpenAI::ResponseFormatJSONSchema::JSONSchema} for more details. + # {OpenAI::Models::ResponseFormatJSONSchema::JSONSchema} for more details. # # Structured Outputs configuration options, including a JSON Schema. # diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index e6b14978..b883865c 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -19,7 +19,7 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @!attribute environment # The type of computer environment to control. # - # @return [Symbol, OpenAI::Responses::ComputerTool::Environment] + # @return [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] required :environment, enum: -> { OpenAI::Responses::ComputerTool::Environment } # @!attribute type @@ -36,13 +36,13 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # # @param display_width [Integer] The width of the computer display. # - # @param environment [Symbol, OpenAI::Responses::ComputerTool::Environment] The type of computer environment to control. + # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] The type of computer environment to control. # # @param type [Symbol, :computer_use_preview] The type of the computer use tool. Always `computer_use_preview`. # The type of computer environment to control. # - # @see OpenAI::Responses::ComputerTool#environment + # @see OpenAI::Models::Responses::ComputerTool#environment module Environment extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 3cdb0887..64f37584 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -8,25 +8,25 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Responses::EasyInputMessage::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Responses::EasyInputMessage::Role] + # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] required :role, enum: -> { OpenAI::Responses::EasyInputMessage::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Responses::EasyInputMessage::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type, nil] optional :type, enum: -> { OpenAI::Responses::EasyInputMessage::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::EasyInputMessage} for more details. + # {OpenAI::Models::Responses::EasyInputMessage} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -34,16 +34,16 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. + # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. # - # @param role [Symbol, OpenAI::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # - # @see OpenAI::Responses::EasyInputMessage#content + # @see OpenAI::Models::Responses::EasyInputMessage#content module Content extend OpenAI::Internal::Type::Union @@ -55,13 +55,13 @@ module Content variant -> { OpenAI::Responses::ResponseInputMessageContentList } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Responses::EasyInputMessage#role + # @see OpenAI::Models::Responses::EasyInputMessage#role module Role extend OpenAI::Internal::Type::Enum @@ -76,7 +76,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Responses::EasyInputMessage#type + # @see OpenAI::Models::Responses::EasyInputMessage#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 515ef7a2..aead0521 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -19,7 +19,7 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute filters # A filter to apply. # - # @return [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] + # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::Responses::FileSearchTool::Filters }, nil?: true # @!attribute max_num_results @@ -32,12 +32,12 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # Ranking options for search. # - # @return [OpenAI::Responses::FileSearchTool::RankingOptions, nil] + # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Responses::FileSearchTool::RankingOptions } # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::FileSearchTool} for more details. + # {OpenAI::Models::Responses::FileSearchTool} for more details. # # A tool that searches for relevant content from uploaded files. Learn more about # the @@ -45,17 +45,17 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @param vector_store_ids [Array] The IDs of the vector stores to search. # - # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] A filter to apply. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] A filter to apply. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 # - # @param ranking_options [OpenAI::Responses::FileSearchTool::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] Ranking options for search. # # @param type [Symbol, :file_search] The type of the file search tool. Always `file_search`. # A filter to apply. # - # @see OpenAI::Responses::FileSearchTool#filters + # @see OpenAI::Models::Responses::FileSearchTool#filters module Filters extend OpenAI::Internal::Type::Union @@ -66,15 +66,15 @@ module Filters variant -> { OpenAI::CompoundFilter } # @!method self.variants - # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end - # @see OpenAI::Responses::FileSearchTool#ranking_options + # @see OpenAI::Models::Responses::FileSearchTool#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. # - # @return [Symbol, OpenAI::Responses::FileSearchTool::RankingOptions::Ranker, nil] + # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Responses::FileSearchTool::RankingOptions::Ranker } # @!attribute score_threshold @@ -87,17 +87,17 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::FileSearchTool::RankingOptions} for more details. + # {OpenAI::Models::Responses::FileSearchTool::RankingOptions} for more details. # # Ranking options for search. # - # @param ranker [Symbol, OpenAI::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. + # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. # # @param score_threshold [Float] The score threshold for the file search, a number between 0 and 1. Numbers close # The ranker to use for the file search. # - # @see OpenAI::Responses::FileSearchTool::RankingOptions#ranker + # @see OpenAI::Models::Responses::FileSearchTool::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index 246cf91a..f4db7602 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -37,7 +37,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, parameters:, strict:, description: nil, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::FunctionTool} for more details. + # {OpenAI::Models::Responses::FunctionTool} for more details. # # Defines a function in your own code the model can choose to call. Learn more # about diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index fb2306d3..bd23a462 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -24,7 +24,7 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } # @!attribute limit @@ -40,7 +40,7 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. # - # @return [Symbol, OpenAI::Responses::InputItemListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Responses::InputItemListParams::Order } # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) @@ -51,11 +51,11 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # # @param before [String] An item ID to list items before, used in pagination. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 5a548714..6d86a9c8 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -22,13 +22,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute error # An error object returned when the model fails to generate a Response. # - # @return [OpenAI::Responses::ResponseError, nil] + # @return [OpenAI::Models::Responses::ResponseError, nil] required :error, -> { OpenAI::Responses::ResponseError }, nil?: true # @!attribute incomplete_details # Details about why the response is incomplete. # - # @return [OpenAI::Responses::Response::IncompleteDetails, nil] + # @return [OpenAI::Models::Responses::Response::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Responses::Response::IncompleteDetails }, nil?: true # @!attribute instructions @@ -60,7 +60,7 @@ class Response < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::ResponsesModel } # @!attribute object @@ -78,7 +78,7 @@ class Response < OpenAI::Internal::Type::BaseModel # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. # - # @return [Array] + # @return [Array] required :output, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem] } # @!attribute parallel_tool_calls @@ -101,7 +101,7 @@ class Response < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] required :tool_choice, union: -> { OpenAI::Responses::Response::ToolChoice } # @!attribute tools @@ -120,7 +120,7 @@ class Response < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -162,7 +162,7 @@ class Response < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @return [OpenAI::Reasoning, nil] + # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier @@ -184,14 +184,14 @@ class Response < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Responses::Response::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Responses::Response::ServiceTier }, nil?: true # @!attribute status # The status of the response generation. One of `completed`, `failed`, # `in_progress`, `cancelled`, `queued`, or `incomplete`. # - # @return [Symbol, OpenAI::Responses::ResponseStatus, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseStatus, nil] optional :status, enum: -> { OpenAI::Responses::ResponseStatus } # @!attribute text @@ -201,7 +201,7 @@ class Response < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @return [OpenAI::Responses::ResponseTextConfig, nil] + # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute truncation @@ -213,14 +213,14 @@ class Response < OpenAI::Internal::Type::BaseModel # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @return [Symbol, OpenAI::Responses::Response::Truncation, nil] + # @return [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] optional :truncation, enum: -> { OpenAI::Responses::Response::Truncation }, nil?: true # @!attribute usage # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. # - # @return [OpenAI::Responses::ResponseUsage, nil] + # @return [OpenAI::Models::Responses::ResponseUsage, nil] optional :usage, -> { OpenAI::Responses::ResponseUsage } # @!attribute user @@ -233,31 +233,31 @@ class Response < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Response} for more details. + # {OpenAI::Models::Responses::Response} for more details. # # @param id [String] Unique identifier for this Response. # # @param created_at [Float] Unix timestamp (in seconds) of when this Response was created. # - # @param error [OpenAI::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. + # @param error [OpenAI::Models::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. # - # @param incomplete_details [OpenAI::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. + # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param output [Array] An array of content items generated by the model. + # @param output [Array] An array of content items generated by the model. # # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # @@ -267,38 +267,38 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param status [Symbol, OpenAI::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, + # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param truncation [Symbol, OpenAI::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. # - # @param usage [OpenAI::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, + # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param object [Symbol, :response] The object type of this resource - always set to `response`. - # @see OpenAI::Responses::Response#incomplete_details + # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason why the response is incomplete. # - # @return [Symbol, OpenAI::Responses::Response::IncompleteDetails::Reason, nil] + # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Responses::Response::IncompleteDetails::Reason } # @!method initialize(reason: nil) # Details about why the response is incomplete. # - # @param reason [Symbol, OpenAI::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. + # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. # The reason why the response is incomplete. # - # @see OpenAI::Responses::Response::IncompleteDetails#reason + # @see OpenAI::Models::Responses::Response::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -314,7 +314,7 @@ module Reason # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @see OpenAI::Responses::Response#tool_choice + # @see OpenAI::Models::Responses::Response#tool_choice module ToolChoice extend OpenAI::Internal::Type::Union @@ -336,7 +336,7 @@ module ToolChoice variant -> { OpenAI::Responses::ToolChoiceFunction } # @!method self.variants - # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -357,7 +357,7 @@ module ToolChoice # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Responses::Response#service_tier + # @see OpenAI::Models::Responses::Response#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum @@ -377,7 +377,7 @@ module ServiceTier # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @see OpenAI::Responses::Response#truncation + # @see OpenAI::Models::Responses::Response#truncation module Truncation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index d5dc88d7..07ecdb3d 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -24,7 +24,7 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, sequence_number:, type: :"response.audio.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioDeltaEvent} for more details. # # Emitted when there is a partial audio response. # diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index e7f889a4..c40cf77a 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -18,7 +18,7 @@ class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(sequence_number:, type: :"response.audio.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioDoneEvent} for more details. # # Emitted when the audio response is complete. # diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 10692c30..96372cc0 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -24,7 +24,7 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, sequence_number:, type: :"response.audio.transcript.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioTranscriptDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent} for more details. # # Emitted when there is a partial transcript of audio. # diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 5e0e71ff..9e0d38b9 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -18,7 +18,7 @@ class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(sequence_number:, type: :"response.audio.transcript.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioTranscriptDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent} for more details. # # Emitted when the full audio transcript is completed. # diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 71790da8..e8413a69 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -30,7 +30,8 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more + # details. # # Emitted when a partial code snippet is added by the code interpreter. # diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 8fc9434f..beaab321 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -30,7 +30,8 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more + # details. # # Emitted when code snippet output is finalized by the code interpreter. # diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index ce19b55e..0e4b05f0 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -7,7 +7,7 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index @@ -30,11 +30,12 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent} for more + # details. # # Emitted when the code interpreter call is completed. # - # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 5ed061e8..26ec12b4 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -7,7 +7,7 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index @@ -30,12 +30,12 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent} for more + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent} for more # details. # # Emitted when a code interpreter call is in progress. # - # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 5a9ca91b..f0cf91cc 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -7,7 +7,7 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index @@ -30,12 +30,12 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.interpreting") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent} for more - # details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent} for + # more details. # # Emitted when the code interpreter is actively interpreting the code snippet. # - # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 70e856b7..f04eba44 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -19,7 +19,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute results # The results of the code interpreter tool call. # - # @return [Array] + # @return [Array] required :results, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result] @@ -28,7 +28,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the code interpreter tool call. # - # @return [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Status } # @!attribute type @@ -45,7 +45,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code:, results:, status:, container_id: nil, type: :code_interpreter_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details. # # A tool call to run code. # @@ -53,9 +53,9 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # # @param code [String] The code to run. # - # @param results [Array] The results of the code interpreter tool call. + # @param results [Array] The results of the code interpreter tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. # # @param container_id [String] The ID of the container used to run the code. # @@ -88,8 +88,8 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method initialize(logs:, type: :logs) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for more - # details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for + # more details. # # The output of a code interpreter tool call that is text. # @@ -101,7 +101,7 @@ class Logs < OpenAI::Internal::Type::BaseModel class Files < OpenAI::Internal::Type::BaseModel # @!attribute files # - # @return [Array] + # @return [Array] required :files, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] @@ -115,12 +115,12 @@ class Files < OpenAI::Internal::Type::BaseModel # @!method initialize(files:, type: :files) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files} for more - # details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for + # more details. # # The output of a code interpreter tool call that is a file. # - # @param files [Array] + # @param files [Array] # # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`. @@ -139,8 +139,8 @@ class File < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, mime_type:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} for - # more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} + # for more details. # # @param file_id [String] The ID of the file. # @@ -149,12 +149,12 @@ class File < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files)] + # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] end # The status of the code interpreter tool call. # - # @see OpenAI::Responses::ResponseCodeInterpreterToolCall#status + # @see OpenAI::Models::Responses::ResponseCodeInterpreterToolCall#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index cba90f82..37e03c75 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -7,7 +7,7 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # Properties of the completed response. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } # @!attribute sequence_number @@ -24,11 +24,11 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, sequence_number:, type: :"response.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseCompletedEvent} for more details. # # Emitted when the model response is complete. # - # @param response [OpenAI::Responses::Response] Properties of the completed response. + # @param response [OpenAI::Models::Responses::Response] Properties of the completed response. # # @param sequence_number [Integer] The sequence number for this event. # diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index b18746b0..4bb9bf28 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -13,7 +13,7 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute action # A click action. # - # @return [OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait] + # @return [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] required :action, union: -> { OpenAI::Responses::ResponseComputerToolCall::Action } # @!attribute call_id @@ -25,7 +25,7 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute pending_safety_checks # The pending safety checks for the computer call. # - # @return [Array] + # @return [Array] required :pending_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] @@ -35,18 +35,18 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Status } # @!attribute type # The type of the computer call. Always `computer_call`. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Type] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] required :type, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Type } # @!method initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall} for more details. # # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) @@ -54,19 +54,19 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the computer call. # - # @param action [OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait] A click action. + # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] A click action. # # @param call_id [String] An identifier used when responding to the tool call with output. # - # @param pending_safety_checks [Array] The pending safety checks for the computer call. + # @param pending_safety_checks [Array] The pending safety checks for the computer call. # - # @param status [Symbol, OpenAI::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. # A click action. # - # @see OpenAI::Responses::ResponseComputerToolCall#action + # @see OpenAI::Models::Responses::ResponseComputerToolCall#action module Action extend OpenAI::Internal::Type::Union @@ -104,7 +104,7 @@ class Click < OpenAI::Internal::Type::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] required :button, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button } # @!attribute type @@ -128,11 +128,12 @@ class Click < OpenAI::Internal::Type::BaseModel # @!method initialize(button:, x:, y_:, type: :click) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Click} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click} for more + # details. # # A click action. # - # @param button [Symbol, OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right + # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right # # @param x [Integer] The x-coordinate where the click occurred. # @@ -143,7 +144,7 @@ class Click < OpenAI::Internal::Type::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # - # @see OpenAI::Responses::ResponseComputerToolCall::Action::Click#button + # @see OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click#button module Button extend OpenAI::Internal::Type::Enum @@ -180,8 +181,8 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:, type: :double_click) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick} for more - # details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick} for + # more details. # # A double click action. # @@ -204,7 +205,7 @@ class Drag < OpenAI::Internal::Type::BaseModel # ] # ``` # - # @return [Array] + # @return [Array] required :path, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path] @@ -219,11 +220,12 @@ class Drag < OpenAI::Internal::Type::BaseModel # @!method initialize(path:, type: :drag) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Drag} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag} for more + # details. # # A drag action. # - # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi + # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi # # @param type [Symbol, :drag] Specifies the event type. For a drag action, this property is @@ -242,8 +244,8 @@ class Path < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path} for more - # details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path} for + # more details. # # A series of x/y coordinate pairs in the drag path. # @@ -270,7 +272,7 @@ class Keypress < OpenAI::Internal::Type::BaseModel # @!method initialize(keys:, type: :keypress) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Keypress} for more + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress} for more # details. # # A collection of keypresses the model would like to perform. @@ -302,7 +304,8 @@ class Move < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:, type: :move) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Move} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move} for more + # details. # # A mouse move action. # @@ -323,8 +326,8 @@ class Screenshot < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :screenshot) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot} for more - # details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot} for + # more details. # # A screenshot action. # @@ -365,7 +368,8 @@ class Scroll < OpenAI::Internal::Type::BaseModel # @!method initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Scroll} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll} for more + # details. # # A scroll action. # @@ -396,7 +400,8 @@ class Type < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :type) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Type} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type} for more + # details. # # An action to type in text. # @@ -415,7 +420,8 @@ class Wait < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :wait) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Wait} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait} for more + # details. # # A wait action. # @@ -423,7 +429,7 @@ class Wait < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait)] + # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel @@ -458,7 +464,7 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseComputerToolCall#status + # @see OpenAI::Models::Responses::ResponseComputerToolCall#status module Status extend OpenAI::Internal::Type::Enum @@ -472,7 +478,7 @@ module Status # The type of the computer call. Always `computer_call`. # - # @see OpenAI::Responses::ResponseComputerToolCall#type + # @see OpenAI::Models::Responses::ResponseComputerToolCall#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index f18b9a16..71412a11 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -19,7 +19,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] + # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] required :output, -> { OpenAI::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type @@ -32,7 +32,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The safety checks reported by the API that have been acknowledged by the # developer. # - # @return [Array, nil] + # @return [Array, nil] optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] @@ -42,22 +42,23 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCallOutputItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseComputerToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCallOutputItem} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCallOutputItem} for more + # details. # # @param id [String] The unique ID of the computer call tool output. # # @param call_id [String] The ID of the computer tool call that produced the output. # - # @param output [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. # - # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the + # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the # - # @param status [Symbol, OpenAI::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or # # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. @@ -93,7 +94,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Responses::ResponseComputerToolCallOutputItem#status + # @see OpenAI::Models::Responses::ResponseComputerToolCallOutputItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 962d8ed7..91dcc4a5 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -25,7 +25,8 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCallOutputScreenshot} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot} for more + # details. # # A computer screenshot image used with the computer use tool. # diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 95e1afdd..6c8a047a 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -23,7 +23,7 @@ module ResponseContent variant -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile, OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 2a838cfa..9ef256b0 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -25,7 +25,7 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The content part that was added. # - # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] + # @return [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] required :part, union: -> { OpenAI::Responses::ResponseContentPartAddedEvent::Part } # @!attribute sequence_number @@ -42,7 +42,7 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, part:, sequence_number:, type: :"response.content_part.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseContentPartAddedEvent} for more details. + # {OpenAI::Models::Responses::ResponseContentPartAddedEvent} for more details. # # Emitted when a new content part is added. # @@ -52,7 +52,7 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the content part was added to. # - # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that was added. + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that was added. # # @param sequence_number [Integer] The sequence number of this event. # @@ -60,7 +60,7 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # The content part that was added. # - # @see OpenAI::Responses::ResponseContentPartAddedEvent#part + # @see OpenAI::Models::Responses::ResponseContentPartAddedEvent#part module Part extend OpenAI::Internal::Type::Union @@ -73,7 +73,7 @@ module Part variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index a7295842..1b7603b6 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -25,7 +25,7 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The content part that is done. # - # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] + # @return [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] required :part, union: -> { OpenAI::Responses::ResponseContentPartDoneEvent::Part } # @!attribute sequence_number @@ -42,7 +42,7 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, part:, sequence_number:, type: :"response.content_part.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseContentPartDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseContentPartDoneEvent} for more details. # # Emitted when a content part is done. # @@ -52,7 +52,7 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the content part was added to. # - # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that is done. + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that is done. # # @param sequence_number [Integer] The sequence number of this event. # @@ -60,7 +60,7 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # The content part that is done. # - # @see OpenAI::Responses::ResponseContentPartDoneEvent#part + # @see OpenAI::Models::Responses::ResponseContentPartDoneEvent#part module Part extend OpenAI::Internal::Type::Union @@ -73,7 +73,7 @@ module Part variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index aacec08d..eb148cf0 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -21,7 +21,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @return [String, Array] + # @return [String, Array] required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } # @!attribute model @@ -31,7 +31,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::ResponsesModel } # @!attribute background @@ -56,7 +56,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }, nil?: true @@ -111,7 +111,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @return [OpenAI::Reasoning, nil] + # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier @@ -133,7 +133,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Responses::ResponseCreateParams::ServiceTier }, nil?: true # @!attribute store @@ -158,7 +158,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @return [OpenAI::Responses::ResponseTextConfig, nil] + # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute tool_choice @@ -166,7 +166,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, nil] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] optional :tool_choice, union: -> { OpenAI::Responses::ResponseCreateParams::ToolChoice } # @!attribute tools @@ -185,7 +185,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -207,7 +207,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @return [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] optional :truncation, enum: -> { OpenAI::Responses::ResponseCreateParams::Truncation }, nil?: true # @!attribute user @@ -222,13 +222,13 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # # @param background [Boolean, nil] Whether to run the model response in the background. # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # @@ -240,23 +240,23 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -283,7 +283,7 @@ module Input variant -> { OpenAI::Responses::ResponseInput } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -338,7 +338,7 @@ module ToolChoice variant -> { OpenAI::Responses::ToolChoiceFunction } # @!method self.variants - # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # The truncation strategy to use for the model response. diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index 2724e02c..58d54869 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -7,7 +7,7 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was created. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } # @!attribute sequence_number @@ -24,11 +24,11 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, sequence_number:, type: :"response.created") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCreatedEvent} for more details. + # {OpenAI::Models::Responses::ResponseCreatedEvent} for more details. # # An event that is emitted when a response is created. # - # @param response [OpenAI::Responses::Response] The response that was created. + # @param response [OpenAI::Models::Responses::Response] The response that was created. # # @param sequence_number [Integer] The sequence number for this event. # diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 45c2dda9..90c420f1 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -7,7 +7,7 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @!attribute code # The error code for the response. # - # @return [Symbol, OpenAI::Responses::ResponseError::Code] + # @return [Symbol, OpenAI::Models::Responses::ResponseError::Code] required :code, enum: -> { OpenAI::Responses::ResponseError::Code } # @!attribute message @@ -18,17 +18,17 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseError} for more details. + # {OpenAI::Models::Responses::ResponseError} for more details. # # An error object returned when the model fails to generate a Response. # - # @param code [Symbol, OpenAI::Responses::ResponseError::Code] The error code for the response. + # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] The error code for the response. # # @param message [String] A human-readable description of the error. # The error code for the response. # - # @see OpenAI::Responses::ResponseError#code + # @see OpenAI::Models::Responses::ResponseError#code module Code extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index 059f086c..9dde9fb4 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -36,7 +36,7 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:, param:, sequence_number:, type: :error) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseErrorEvent} for more details. + # {OpenAI::Models::Responses::ResponseErrorEvent} for more details. # # Emitted when an error occurs. # diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index e9dd926c..064ba785 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -7,7 +7,7 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that failed. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } # @!attribute sequence_number @@ -24,11 +24,11 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, sequence_number:, type: :"response.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFailedEvent} for more details. + # {OpenAI::Models::Responses::ResponseFailedEvent} for more details. # # An event that is emitted when a response fails. # - # @param response [OpenAI::Responses::Response] The response that failed. + # @param response [OpenAI::Models::Responses::Response] The response that failed. # # @param sequence_number [Integer] The sequence number of this event. # diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 2fd752b3..8ae559c0 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -30,7 +30,8 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchCallCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent} for more + # details. # # Emitted when a file search call is completed (results found). # diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index e319981c..2dc256ec 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -30,7 +30,8 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchCallInProgressEvent} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent} for more + # details. # # Emitted when a file search call is initiated. # diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index d07eba07..9cd93593 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -30,7 +30,8 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.searching") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchCallSearchingEvent} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent} for more + # details. # # Emitted when a file search is currently searching. # diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index cc1b88ec..034f63b7 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -20,7 +20,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # - # @return [Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseFileSearchToolCall::Status } # @!attribute type @@ -32,7 +32,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute results # The results of the file search tool call. # - # @return [Array, nil] + # @return [Array, nil] optional :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFileSearchToolCall::Result] @@ -41,7 +41,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, queries:, status:, results: nil, type: :file_search_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchToolCall} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchToolCall} for more details. # # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) @@ -51,16 +51,16 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # # @param queries [Array] The queries used to search for files. # - # @param status [Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, + # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, # - # @param results [Array, nil] The results of the file search tool call. + # @param results [Array, nil] The results of the file search tool call. # # @param type [Symbol, :file_search_call] The type of the file search tool call. Always `file_search_call`. # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # - # @see OpenAI::Responses::ResponseFileSearchToolCall#status + # @see OpenAI::Models::Responses::ResponseFileSearchToolCall#status module Status extend OpenAI::Internal::Type::Enum @@ -115,7 +115,8 @@ class Result < OpenAI::Internal::Type::BaseModel # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchToolCall::Result} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchToolCall::Result} for more + # details. # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index 81d0ad54..43f6ad0c 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -35,7 +35,7 @@ module ResponseFormatTextConfig variant :json_object, -> { OpenAI::ResponseFormatJSONObject } # @!method self.variants - # @return [Array(OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject)] + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] end end end diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index c4e33d24..06e57803 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -43,7 +43,8 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, schema:, description: nil, strict: nil, type: :json_schema) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFormatTextJSONSchemaConfig} for more details. + # {OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig} for more + # details. # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index d01efcf0..70a57547 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -36,7 +36,8 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.function_call_arguments.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more + # details. # # Emitted when there is a partial function-call arguments delta. # diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index e972d9e8..55602423 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -38,12 +38,12 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseFunctionToolCall::Status } # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionToolCall} for more details. + # {OpenAI::Models::Responses::ResponseFunctionToolCall} for more details. # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) @@ -57,14 +57,14 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the function tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or # # @param type [Symbol, :function_call] The type of the function tool call. Always `function_call`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseFunctionToolCall#status + # @see OpenAI::Models::Responses::ResponseFunctionToolCall#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index e113338f..b0cbb3be 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -12,7 +12,7 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # @!method initialize(id:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionToolCallItem} for more details. + # {OpenAI::Models::Responses::ResponseFunctionToolCallItem} for more details. # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 53e8afd3..bae34c36 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -32,12 +32,13 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionToolCallOutputItem} for more details. + # {OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem} for more + # details. # # @param id [String] The unique ID of the function call tool output. # @@ -45,14 +46,14 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @param output [String] A JSON string of the output of the function tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or # # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseFunctionToolCallOutputItem#status + # @see OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index f69ad27a..a95b9030 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -13,7 +13,7 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the web search tool call. # - # @return [Symbol, OpenAI::Responses::ResponseFunctionWebSearch::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] required :status, enum: -> { OpenAI::Responses::ResponseFunctionWebSearch::Status } # @!attribute type @@ -24,7 +24,7 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, status:, type: :web_search_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionWebSearch} for more details. + # {OpenAI::Models::Responses::ResponseFunctionWebSearch} for more details. # # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for @@ -32,13 +32,13 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the web search tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. # # @param type [Symbol, :web_search_call] The type of the web search tool call. Always `web_search_call`. # The status of the web search tool call. # - # @see OpenAI::Responses::ResponseFunctionWebSearch#status + # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb b/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb index e7e8e1af..36a2c557 100644 --- a/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb +++ b/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb @@ -43,7 +43,8 @@ class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, partial_image_b64:, partial_image_index:, sequence_number:, type: :"response.image_generation_call.partial_image") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseImageGenCallPartialImageEvent} for more details. + # {OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent} for more + # details. # # Emitted when a partial image is available during image generation streaming. # diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index b4079e2b..775c9b0c 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -7,7 +7,7 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that is in progress. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } # @!attribute sequence_number @@ -24,11 +24,11 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, sequence_number:, type: :"response.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInProgressEvent} for more details. + # {OpenAI::Models::Responses::ResponseInProgressEvent} for more details. # # Emitted when the response is in progress. # - # @param response [OpenAI::Responses::Response] The response that is in progress. + # @param response [OpenAI::Models::Responses::Response] The response that is in progress. # # @param sequence_number [Integer] The sequence number of this event. # diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index 709acfeb..bd1402da 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -7,7 +7,7 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was incomplete. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } # @!attribute sequence_number @@ -24,11 +24,11 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, sequence_number:, type: :"response.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseIncompleteEvent} for more details. + # {OpenAI::Models::Responses::ResponseIncompleteEvent} for more details. # # An event that is emitted when a response finishes as incomplete. # - # @param response [OpenAI::Responses::Response] The response that was incomplete. + # @param response [OpenAI::Models::Responses::Response] The response that was incomplete. # # @param sequence_number [Integer] The sequence number of this event. # diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index d403a4ca..ae4a6908 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -13,7 +13,7 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute format_ # The format of the audio data. Currently supported formats are `mp3` and `wav`. # - # @return [Symbol, OpenAI::Responses::ResponseInputAudio::Format] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] required :format_, enum: -> { OpenAI::Responses::ResponseInputAudio::Format }, api_name: :format # @!attribute type @@ -24,19 +24,19 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, format_:, type: :input_audio) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputAudio} for more details. + # {OpenAI::Models::Responses::ResponseInputAudio} for more details. # # An audio input to the model. # # @param data [String] Base64-encoded audio data. # - # @param format_ [Symbol, OpenAI::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and + # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and # # @param type [Symbol, :input_audio] The type of the input item. Always `input_audio`. # The format of the audio data. Currently supported formats are `mp3` and `wav`. # - # @see OpenAI::Responses::ResponseInputAudio#format_ + # @see OpenAI::Models::Responses::ResponseInputAudio#format_ module Format extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 7e349985..59c6970e 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -19,7 +19,7 @@ module ResponseInputContent variant :input_file, -> { OpenAI::Responses::ResponseInputFile } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile)] + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] end end end diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 3c992986..229e942a 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -30,7 +30,7 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputFile} for more details. + # {OpenAI::Models::Responses::ResponseInputFile} for more details. # # A file input to the model. # diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 242f378e..261bd7d6 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -8,7 +8,7 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # - # @return [Symbol, OpenAI::Responses::ResponseInputImage::Detail] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] required :detail, enum: -> { OpenAI::Responses::ResponseInputImage::Detail } # @!attribute type @@ -32,12 +32,12 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @!method initialize(detail:, file_id: nil, image_url: nil, type: :input_image) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputImage} for more details. + # {OpenAI::Models::Responses::ResponseInputImage} for more details. # # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param detail [Symbol, OpenAI::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or + # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or # # @param file_id [String, nil] The ID of the file to be sent to the model. # @@ -48,7 +48,7 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # - # @see OpenAI::Responses::ResponseInputImage#detail + # @see OpenAI::Models::Responses::ResponseInputImage#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index d33546fd..a8f469fa 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -88,7 +88,7 @@ class Message < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content # types. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] @@ -97,41 +97,41 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Role] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] required :role, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Role } # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Status } # @!attribute type # The type of the message input. Always set to `message`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type, nil] optional :type, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Type } # @!method initialize(content:, role:, status: nil, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::Message} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::Message} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. # - # @param content [Array] A list of one or many input items to the model, containing different content + # @param content [Array] A list of one or many input items to the model, containing different content # - # @param role [Symbol, OpenAI::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. # The role of the message input. One of `user`, `system`, or `developer`. # - # @see OpenAI::Responses::ResponseInputItem::Message#role + # @see OpenAI::Models::Responses::ResponseInputItem::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -146,7 +146,7 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseInputItem::Message#status + # @see OpenAI::Models::Responses::ResponseInputItem::Message#status module Status extend OpenAI::Internal::Type::Enum @@ -160,7 +160,7 @@ module Status # The type of the message input. Always set to `message`. # - # @see OpenAI::Responses::ResponseInputItem::Message#type + # @see OpenAI::Models::Responses::ResponseInputItem::Message#type module Type extend OpenAI::Internal::Type::Enum @@ -181,7 +181,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] + # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] required :output, -> { OpenAI::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type @@ -200,7 +200,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The safety checks reported by the API that have been acknowledged by the # developer. # - # @return [Array, nil] + # @return [Array, nil] optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] @@ -211,26 +211,27 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status }, nil?: true # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::ComputerCallOutput} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput} for more + # details. # # The output of a computer tool call. # # @param call_id [String] The ID of the computer tool call that produced the output. # - # @param output [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. # # @param id [String, nil] The ID of the computer tool call output. # - # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop + # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple # # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. @@ -266,7 +267,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Responses::ResponseInputItem::ComputerCallOutput#status + # @see OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -309,14 +310,15 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status }, nil?: true # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::FunctionCallOutput} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput} for more + # details. # # The output of a function tool call. # @@ -326,14 +328,14 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # # @param id [String, nil] The unique ID of the function tool call output. Populated when this item is retu # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu # # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseInputItem::FunctionCallOutput#status + # @see OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -362,7 +364,7 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the image generation call. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status } # @!attribute type @@ -373,7 +375,8 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, result:, status:, type: :image_generation_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::ImageGenerationCall} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall} for more + # details. # # An image generation request made by the model. # @@ -381,13 +384,13 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # # @param result [String, nil] The generated image encoded in base64. # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status] The status of the image generation call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::Status] The status of the image generation call. # # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. # The status of the image generation call. # - # @see OpenAI::Responses::ResponseInputItem::ImageGenerationCall#status + # @see OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall#status module Status extend OpenAI::Internal::Type::Enum @@ -411,7 +414,7 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!attribute action # Execute a shell command on the server. # - # @return [OpenAI::Responses::ResponseInputItem::LocalShellCall::Action] + # @return [OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Action] required :action, -> { OpenAI::Responses::ResponseInputItem::LocalShellCall::Action } # @!attribute call_id @@ -423,7 +426,7 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the local shell call. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseInputItem::LocalShellCall::Status } # @!attribute type @@ -434,21 +437,21 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::LocalShellCall} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::LocalShellCall} for more details. # # A tool call to run a command on the local shell. # # @param id [String] The unique ID of the local shell call. # - # @param action [OpenAI::Responses::ResponseInputItem::LocalShellCall::Action] Execute a shell command on the server. + # @param action [OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Action] Execute a shell command on the server. # # @param call_id [String] The unique ID of the local shell tool call generated by the model. # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCall::Status] The status of the local shell call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Status] The status of the local shell call. # # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. - # @see OpenAI::Responses::ResponseInputItem::LocalShellCall#action + # @see OpenAI::Models::Responses::ResponseInputItem::LocalShellCall#action class Action < OpenAI::Internal::Type::BaseModel # @!attribute command # The command to run. @@ -488,7 +491,8 @@ class Action < OpenAI::Internal::Type::BaseModel # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::LocalShellCall::Action} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Action} for more + # details. # # Execute a shell command on the server. # @@ -507,7 +511,7 @@ class Action < OpenAI::Internal::Type::BaseModel # The status of the local shell call. # - # @see OpenAI::Responses::ResponseInputItem::LocalShellCall#status + # @see OpenAI::Models::Responses::ResponseInputItem::LocalShellCall#status module Status extend OpenAI::Internal::Type::Enum @@ -542,14 +546,15 @@ class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status }, nil?: true # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::LocalShellCallOutput} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput} for more + # details. # # The output of a local shell tool call. # @@ -557,13 +562,13 @@ class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel # # @param output [String] A JSON string of the output of the local shell tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. # # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # - # @see OpenAI::Responses::ResponseInputItem::LocalShellCallOutput#status + # @see OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -592,7 +597,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools available on the server. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] @@ -612,7 +617,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::McpListTools} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::McpListTools} for more details. # # A list of tools available on an MCP server. # @@ -620,7 +625,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # # @param server_label [String] The label of the MCP server. # - # @param tools [Array] The tools available on the server. + # @param tools [Array] The tools available on the server. # # @param error [String, nil] Error message if the server could not list tools. # @@ -653,7 +658,8 @@ class Tool < OpenAI::Internal::Type::BaseModel # @!method initialize(input_schema:, name:, annotations: nil, description: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::McpListTools::Tool} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::McpListTools::Tool} for more + # details. # # A tool available on an MCP server. # @@ -700,7 +706,8 @@ class McpApprovalRequest < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::McpApprovalRequest} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest} for more + # details. # # A request for human approval of a tool invocation. # @@ -748,7 +755,8 @@ class McpApprovalResponse < OpenAI::Internal::Type::BaseModel # @!method initialize(approval_request_id:, approve:, id: nil, reason: nil, type: :mcp_approval_response) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::McpApprovalResponse} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse} for more + # details. # # A response to an MCP approval request. # @@ -808,7 +816,7 @@ class McpCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::McpCall} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::McpCall} for more details. # # An invocation of a tool on an MCP server. # @@ -837,7 +845,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of item to reference. Always `item_reference`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::ItemReference::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] optional :type, enum: -> { OpenAI::Responses::ResponseInputItem::ItemReference::Type }, nil?: true # @!method initialize(id:, type: nil) @@ -845,11 +853,11 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the item to reference. # - # @param type [Symbol, OpenAI::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. # The type of item to reference. Always `item_reference`. # - # @see OpenAI::Responses::ResponseInputItem::ItemReference#type + # @see OpenAI::Models::Responses::ResponseInputItem::ItemReference#type module Type extend OpenAI::Internal::Type::Enum @@ -861,7 +869,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseInputItem::LocalShellCall, OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Responses::ResponseInputItem::McpListTools, OpenAI::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Responses::ResponseInputItem::McpCall, OpenAI::Responses::ResponseInputItem::ItemReference)] + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 6bad6d0f..90fade2d 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -14,7 +14,7 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content # types. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] @@ -23,39 +23,39 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. # - # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Role] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] required :role, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Role } # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Status } # @!attribute type # The type of the message input. Always set to `message`. # - # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type, nil] optional :type, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Type } # @!method initialize(id:, content:, role:, status: nil, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputMessageItem} for more details. + # {OpenAI::Models::Responses::ResponseInputMessageItem} for more details. # # @param id [String] The unique ID of the message input. # - # @param content [Array] A list of one or many input items to the model, containing different content + # @param content [Array] A list of one or many input items to the model, containing different content # - # @param role [Symbol, OpenAI::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. # - # @param status [Symbol, OpenAI::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. # The role of the message input. One of `user`, `system`, or `developer`. # - # @see OpenAI::Responses::ResponseInputMessageItem#role + # @see OpenAI::Models::Responses::ResponseInputMessageItem#role module Role extend OpenAI::Internal::Type::Enum @@ -70,7 +70,7 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseInputMessageItem#status + # @see OpenAI::Models::Responses::ResponseInputMessageItem#status module Status extend OpenAI::Internal::Type::Enum @@ -84,7 +84,7 @@ module Status # The type of the message input. Always set to `message`. # - # @see OpenAI::Responses::ResponseInputMessageItem#type + # @see OpenAI::Models::Responses::ResponseInputMessageItem#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index 1bc8020a..4b501f36 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -74,7 +74,7 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the image generation call. # - # @return [Symbol, OpenAI::Responses::ResponseItem::ImageGenerationCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseItem::ImageGenerationCall::Status } # @!attribute type @@ -85,7 +85,7 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, result:, status:, type: :image_generation_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::ImageGenerationCall} for more details. + # {OpenAI::Models::Responses::ResponseItem::ImageGenerationCall} for more details. # # An image generation request made by the model. # @@ -93,13 +93,13 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # # @param result [String, nil] The generated image encoded in base64. # - # @param status [Symbol, OpenAI::Responses::ResponseItem::ImageGenerationCall::Status] The status of the image generation call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::Status] The status of the image generation call. # # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. # The status of the image generation call. # - # @see OpenAI::Responses::ResponseItem::ImageGenerationCall#status + # @see OpenAI::Models::Responses::ResponseItem::ImageGenerationCall#status module Status extend OpenAI::Internal::Type::Enum @@ -123,7 +123,7 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!attribute action # Execute a shell command on the server. # - # @return [OpenAI::Responses::ResponseItem::LocalShellCall::Action] + # @return [OpenAI::Models::Responses::ResponseItem::LocalShellCall::Action] required :action, -> { OpenAI::Responses::ResponseItem::LocalShellCall::Action } # @!attribute call_id @@ -135,7 +135,7 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the local shell call. # - # @return [Symbol, OpenAI::Responses::ResponseItem::LocalShellCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseItem::LocalShellCall::Status } # @!attribute type @@ -146,21 +146,21 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::LocalShellCall} for more details. + # {OpenAI::Models::Responses::ResponseItem::LocalShellCall} for more details. # # A tool call to run a command on the local shell. # # @param id [String] The unique ID of the local shell call. # - # @param action [OpenAI::Responses::ResponseItem::LocalShellCall::Action] Execute a shell command on the server. + # @param action [OpenAI::Models::Responses::ResponseItem::LocalShellCall::Action] Execute a shell command on the server. # # @param call_id [String] The unique ID of the local shell tool call generated by the model. # - # @param status [Symbol, OpenAI::Responses::ResponseItem::LocalShellCall::Status] The status of the local shell call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCall::Status] The status of the local shell call. # # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. - # @see OpenAI::Responses::ResponseItem::LocalShellCall#action + # @see OpenAI::Models::Responses::ResponseItem::LocalShellCall#action class Action < OpenAI::Internal::Type::BaseModel # @!attribute command # The command to run. @@ -200,7 +200,8 @@ class Action < OpenAI::Internal::Type::BaseModel # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::LocalShellCall::Action} for more details. + # {OpenAI::Models::Responses::ResponseItem::LocalShellCall::Action} for more + # details. # # Execute a shell command on the server. # @@ -219,7 +220,7 @@ class Action < OpenAI::Internal::Type::BaseModel # The status of the local shell call. # - # @see OpenAI::Responses::ResponseItem::LocalShellCall#status + # @see OpenAI::Models::Responses::ResponseItem::LocalShellCall#status module Status extend OpenAI::Internal::Type::Enum @@ -254,7 +255,7 @@ class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # - # @return [Symbol, OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status @@ -263,7 +264,8 @@ class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::LocalShellCallOutput} for more details. + # {OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput} for more + # details. # # The output of a local shell tool call. # @@ -271,13 +273,13 @@ class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel # # @param output [String] A JSON string of the output of the local shell tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # @param status [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. # # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # - # @see OpenAI::Responses::ResponseItem::LocalShellCallOutput#status + # @see OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -306,7 +308,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools available on the server. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseItem::McpListTools::Tool] } @@ -324,7 +326,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::McpListTools} for more details. + # {OpenAI::Models::Responses::ResponseItem::McpListTools} for more details. # # A list of tools available on an MCP server. # @@ -332,7 +334,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # # @param server_label [String] The label of the MCP server. # - # @param tools [Array] The tools available on the server. + # @param tools [Array] The tools available on the server. # # @param error [String, nil] Error message if the server could not list tools. # @@ -365,7 +367,7 @@ class Tool < OpenAI::Internal::Type::BaseModel # @!method initialize(input_schema:, name:, annotations: nil, description: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::McpListTools::Tool} for more details. + # {OpenAI::Models::Responses::ResponseItem::McpListTools::Tool} for more details. # # A tool available on an MCP server. # @@ -412,7 +414,7 @@ class McpApprovalRequest < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::McpApprovalRequest} for more details. + # {OpenAI::Models::Responses::ResponseItem::McpApprovalRequest} for more details. # # A request for human approval of a tool invocation. # @@ -460,7 +462,7 @@ class McpApprovalResponse < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, approval_request_id:, approve:, reason: nil, type: :mcp_approval_response) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::McpApprovalResponse} for more details. + # {OpenAI::Models::Responses::ResponseItem::McpApprovalResponse} for more details. # # A response to an MCP approval request. # @@ -520,7 +522,7 @@ class McpCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseItem::McpCall} for more details. + # {OpenAI::Models::Responses::ResponseItem::McpCall} for more details. # # An invocation of a tool on an MCP server. # @@ -540,7 +542,7 @@ class McpCall < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Responses::ResponseItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseItem::LocalShellCall, OpenAI::Responses::ResponseItem::LocalShellCallOutput, OpenAI::Responses::ResponseItem::McpListTools, OpenAI::Responses::ResponseItem::McpApprovalRequest, OpenAI::Responses::ResponseItem::McpApprovalResponse, OpenAI::Responses::ResponseItem::McpCall)] + # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Models::Responses::ResponseItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseItem::LocalShellCall, OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseItem::McpListTools, OpenAI::Models::Responses::ResponseItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseItem::McpCall)] end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 436ecce9..ea2b6f1b 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -7,7 +7,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!attribute data # A list of items used to generate this response. # - # @return [Array] + # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseItem] } # @!attribute first_id @@ -37,7 +37,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) # A list of Response items. # - # @param data [Array] A list of items used to generate this response. + # @param data [Array] A list of items used to generate this response. # # @param first_id [String] The ID of the first item in the list. # diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index 8183cdda..811fed00 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -24,7 +24,7 @@ class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, transcript:, type: :output_audio) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputAudio} for more details. + # {OpenAI::Models::Responses::ResponseOutputAudio} for more details. # # An audio output from the model. # diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 1fdd4e91..b6fb663b 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -68,7 +68,7 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the image generation call. # - # @return [Symbol, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status } # @!attribute type @@ -79,7 +79,8 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, result:, status:, type: :image_generation_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::ImageGenerationCall} for more details. + # {OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall} for more + # details. # # An image generation request made by the model. # @@ -87,13 +88,13 @@ class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # # @param result [String, nil] The generated image encoded in base64. # - # @param status [Symbol, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status] The status of the image generation call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::Status] The status of the image generation call. # # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. # The status of the image generation call. # - # @see OpenAI::Responses::ResponseOutputItem::ImageGenerationCall#status + # @see OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall#status module Status extend OpenAI::Internal::Type::Enum @@ -117,7 +118,7 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!attribute action # Execute a shell command on the server. # - # @return [OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action] + # @return [OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Action] required :action, -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action } # @!attribute call_id @@ -129,7 +130,7 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the local shell call. # - # @return [Symbol, OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status } # @!attribute type @@ -140,21 +141,22 @@ class LocalShellCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::LocalShellCall} for more details. + # {OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall} for more + # details. # # A tool call to run a command on the local shell. # # @param id [String] The unique ID of the local shell call. # - # @param action [OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action] Execute a shell command on the server. + # @param action [OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Action] Execute a shell command on the server. # # @param call_id [String] The unique ID of the local shell tool call generated by the model. # - # @param status [Symbol, OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status] The status of the local shell call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Status] The status of the local shell call. # # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. - # @see OpenAI::Responses::ResponseOutputItem::LocalShellCall#action + # @see OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall#action class Action < OpenAI::Internal::Type::BaseModel # @!attribute command # The command to run. @@ -194,7 +196,7 @@ class Action < OpenAI::Internal::Type::BaseModel # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action} for more + # {OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Action} for more # details. # # Execute a shell command on the server. @@ -214,7 +216,7 @@ class Action < OpenAI::Internal::Type::BaseModel # The status of the local shell call. # - # @see OpenAI::Responses::ResponseOutputItem::LocalShellCall#status + # @see OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall#status module Status extend OpenAI::Internal::Type::Enum @@ -272,7 +274,7 @@ class McpCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::McpCall} for more details. + # {OpenAI::Models::Responses::ResponseOutputItem::McpCall} for more details. # # An invocation of a tool on an MCP server. # @@ -307,7 +309,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools available on the server. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] @@ -327,7 +329,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::McpListTools} for more details. + # {OpenAI::Models::Responses::ResponseOutputItem::McpListTools} for more details. # # A list of tools available on an MCP server. # @@ -335,7 +337,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # # @param server_label [String] The label of the MCP server. # - # @param tools [Array] The tools available on the server. + # @param tools [Array] The tools available on the server. # # @param error [String, nil] Error message if the server could not list tools. # @@ -368,7 +370,8 @@ class Tool < OpenAI::Internal::Type::BaseModel # @!method initialize(input_schema:, name:, annotations: nil, description: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::McpListTools::Tool} for more details. + # {OpenAI::Models::Responses::ResponseOutputItem::McpListTools::Tool} for more + # details. # # A tool available on an MCP server. # @@ -415,7 +418,8 @@ class McpApprovalRequest < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItem::McpApprovalRequest} for more details. + # {OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest} for more + # details. # # A request for human approval of a tool invocation. # @@ -431,7 +435,7 @@ class McpApprovalRequest < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index a5bce7c2..119bea71 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was added. # - # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] + # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -30,11 +30,11 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item:, output_index:, sequence_number:, type: :"response.output_item.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItemAddedEvent} for more details. + # {OpenAI::Models::Responses::ResponseOutputItemAddedEvent} for more details. # # Emitted when a new output item is added. # - # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was added. + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was added. # # @param output_index [Integer] The index of the output item that was added. # diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 642bc92d..45b061b7 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was marked done. # - # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] + # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -30,11 +30,11 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item:, output_index:, sequence_number:, type: :"response.output_item.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItemDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseOutputItemDoneEvent} for more details. # # Emitted when an output item is marked done. # - # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Responses::ResponseCodeInterpreterToolCall, OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, OpenAI::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was marked done. + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was marked done. # # @param output_index [Integer] The index of the output item that was marked done. # diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 2e7eee71..72e65a6d 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -13,7 +13,7 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the output message. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputMessage::Content] @@ -29,7 +29,7 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseOutputMessage::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] required :status, enum: -> { OpenAI::Responses::ResponseOutputMessage::Status } # @!attribute type @@ -40,15 +40,15 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, content:, status:, role: :assistant, type: :message) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputMessage} for more details. + # {OpenAI::Models::Responses::ResponseOutputMessage} for more details. # # An output message from the model. # # @param id [String] The unique ID of the output message. # - # @param content [Array] The content of the output message. + # @param content [Array] The content of the output message. # - # @param status [Symbol, OpenAI::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or # # @param role [Symbol, :assistant] The role of the output message. Always `assistant`. # @@ -67,13 +67,13 @@ module Content variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Responses::ResponseOutputMessage#status + # @see OpenAI::Models::Responses::ResponseOutputMessage#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 6234918b..a9646f34 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -7,7 +7,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!attribute annotations # The annotations of the text output. # - # @return [Array] + # @return [Array] required :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputText::Annotation] @@ -28,7 +28,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(annotations:, text:, type: :output_text) # A text output from the model. # - # @param annotations [Array] The annotations of the text output. + # @param annotations [Array] The annotations of the text output. # # @param text [String] The text output from the model. # @@ -144,7 +144,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, index:, type: :file_path) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputText::Annotation::FilePath} for more details. + # {OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath} for more + # details. # # A path to a file. # @@ -156,7 +157,7 @@ class FilePath < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_queued_event.rb b/lib/openai/models/responses/response_queued_event.rb index f84bd2b0..0625aa8b 100644 --- a/lib/openai/models/responses/response_queued_event.rb +++ b/lib/openai/models/responses/response_queued_event.rb @@ -7,7 +7,7 @@ class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The full response object that is queued. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } # @!attribute sequence_number @@ -25,7 +25,7 @@ class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(response:, sequence_number:, type: :"response.queued") # Emitted when a response is queued and waiting to be processed. # - # @param response [OpenAI::Responses::Response] The full response object that is queued. + # @param response [OpenAI::Models::Responses::Response] The full response object that is queued. # # @param sequence_number [Integer] The sequence number for this event. # diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 47d20960..fa878448 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -13,7 +13,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @!attribute summary # Reasoning text contents. # - # @return [Array] + # @return [Array] required :summary, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Summary] } @@ -34,12 +34,12 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseReasoningItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseReasoningItem::Status } # @!method initialize(id:, summary:, encrypted_content: nil, status: nil, type: :reasoning) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningItem} for more details. + # {OpenAI::Models::Responses::ResponseReasoningItem} for more details. # # A description of the chain of thought used by a reasoning model while generating # a response. Be sure to include these items in your `input` to the Responses API @@ -48,11 +48,11 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique identifier of the reasoning content. # - # @param summary [Array] Reasoning text contents. + # @param summary [Array] Reasoning text contents. # # @param encrypted_content [String, nil] The encrypted content of the reasoning item - populated when a response is # - # @param status [Symbol, OpenAI::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or # # @param type [Symbol, :reasoning] The type of the object. Always `reasoning`. @@ -71,7 +71,7 @@ class Summary < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :summary_text) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningItem::Summary} for more details. + # {OpenAI::Models::Responses::ResponseReasoningItem::Summary} for more details. # # @param text [String] A short summary of the reasoning used by the model when generating # @@ -81,7 +81,7 @@ class Summary < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseReasoningItem#status + # @see OpenAI::Models::Responses::ResponseReasoningItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb index 96529b8e..f3299e9f 100644 --- a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb @@ -43,7 +43,8 @@ class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent} for more + # details. # # Emitted when there is a delta (partial update) to the reasoning summary content. # diff --git a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb index 555e71a5..88b7f071 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb @@ -19,7 +19,7 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The summary part that was added. # - # @return [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] + # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part } # @!attribute sequence_number @@ -42,7 +42,8 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, part:, sequence_number:, summary_index:, type: :"response.reasoning_summary_part.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent} for more + # details. # # Emitted when a new reasoning summary part is added. # @@ -50,7 +51,7 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary part is associated with. # - # @param part [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. # # @param sequence_number [Integer] The sequence number of this event. # @@ -58,7 +59,7 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :"response.reasoning_summary_part.added"] The type of the event. Always `response.reasoning_summary_part.added`. - # @see OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent#part + # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent#part class Part < OpenAI::Internal::Type::BaseModel # @!attribute text # The text of the summary part. diff --git a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb index 6a4cc1d6..c45beaa4 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb @@ -19,7 +19,7 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The completed summary part. # - # @return [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] + # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part } # @!attribute sequence_number @@ -42,7 +42,8 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, part:, sequence_number:, summary_index:, type: :"response.reasoning_summary_part.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent} for more + # details. # # Emitted when a reasoning summary part is completed. # @@ -50,7 +51,7 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary part is associated with. # - # @param part [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. # # @param sequence_number [Integer] The sequence number of this event. # @@ -58,7 +59,7 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :"response.reasoning_summary_part.done"] The type of the event. Always `response.reasoning_summary_part.done`. - # @see OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent#part + # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent#part class Part < OpenAI::Internal::Type::BaseModel # @!attribute text # The text of the summary part. diff --git a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb index 2a5fd60e..9f801ed9 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb @@ -42,7 +42,8 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary_text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent} for more + # details. # # Emitted when a delta is added to a reasoning summary text. # diff --git a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb index e5860672..d77661fc 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb @@ -42,7 +42,8 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, sequence_number:, summary_index:, text:, type: :"response.reasoning_summary_text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent} for more + # details. # # Emitted when a reasoning summary text is completed. # diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index 9314f0a6..f912ec9e 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -42,7 +42,7 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.refusal.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseRefusalDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseRefusalDeltaEvent} for more details. # # Emitted when there is a partial refusal text. # diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index 36d56472..4d428943 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -42,7 +42,7 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, refusal:, sequence_number:, type: :"response.refusal.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseRefusalDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseRefusalDoneEvent} for more details. # # Emitted when refusal text is finalized. # diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 07638190..8063503f 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -12,14 +12,14 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } # @!method initialize(include: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 08fbf6b8..6da18a36 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -214,7 +214,7 @@ module ResponseStreamEvent } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Responses::ResponseMcpCallCompletedEvent, OpenAI::Responses::ResponseMcpCallFailedEvent, OpenAI::Responses::ResponseMcpCallInProgressEvent, OpenAI::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Responses::ResponseQueuedEvent, OpenAI::Responses::ResponseReasoningDeltaEvent, OpenAI::Responses::ResponseReasoningDoneEvent, OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryDoneEvent)] + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index 1ab7db61..c22ecad8 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -7,7 +7,7 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute annotation # A citation to a file. # - # @return [OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + # @return [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] required :annotation, union: -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation } # @!attribute annotation_index @@ -48,11 +48,11 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text.annotation.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent} for more details. # # Emitted when a text annotation is added. # - # @param annotation [OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. + # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. # # @param annotation_index [Integer] The index of the annotation that was added. # @@ -68,7 +68,7 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # A citation to a file. # - # @see OpenAI::Responses::ResponseTextAnnotationDeltaEvent#annotation + # @see OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent#annotation module Annotation extend OpenAI::Internal::Type::Union @@ -182,8 +182,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, index:, type: :file_path) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} for - # more details. + # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} + # for more details. # # A path to a file. # @@ -195,7 +195,7 @@ class FilePath < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] + # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 2d22f544..9a203e84 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -19,12 +19,12 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. # - # @return [OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject, nil] + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format # @!method initialize(format_: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextConfig} for more details. + # {OpenAI::Models::Responses::ResponseTextConfig} for more details. # # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -32,7 +32,7 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @param format_ [OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 46236823..b08caebc 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -42,7 +42,7 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.output_text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseTextDeltaEvent} for more details. # # Emitted when there is an additional text delta. # diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 8c391c93..319b4eeb 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -42,7 +42,7 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.output_text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseTextDoneEvent} for more details. # # Emitted when text content is finalized. # diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 09831893..bd574dfa 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -13,7 +13,7 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens_details # A detailed breakdown of the input tokens. # - # @return [OpenAI::Responses::ResponseUsage::InputTokensDetails] + # @return [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] required :input_tokens_details, -> { OpenAI::Responses::ResponseUsage::InputTokensDetails } # @!attribute output_tokens @@ -25,7 +25,7 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute output_tokens_details # A detailed breakdown of the output tokens. # - # @return [OpenAI::Responses::ResponseUsage::OutputTokensDetails] + # @return [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] required :output_tokens_details, -> { OpenAI::Responses::ResponseUsage::OutputTokensDetails } # @!attribute total_tokens @@ -40,15 +40,15 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # # @param input_tokens [Integer] The number of input tokens. # - # @param input_tokens_details [OpenAI::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. + # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. # # @param output_tokens [Integer] The number of output tokens. # - # @param output_tokens_details [OpenAI::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. + # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. # # @param total_tokens [Integer] The total number of tokens used. - # @see OpenAI::Responses::ResponseUsage#input_tokens_details + # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute cached_tokens # The number of tokens that were retrieved from the cache. @@ -59,14 +59,14 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(cached_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseUsage::InputTokensDetails} for more details. + # {OpenAI::Models::Responses::ResponseUsage::InputTokensDetails} for more details. # # A detailed breakdown of the input tokens. # # @param cached_tokens [Integer] The number of tokens that were retrieved from the cache. end - # @see OpenAI::Responses::ResponseUsage#output_tokens_details + # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_tokens # The number of reasoning tokens. diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 4a8461ac..c7a5da28 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -24,7 +24,8 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseWebSearchCallCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent} for more + # details. # # Emitted when a web search call is completed. # diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index a930db0f..dc6f12fa 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -24,7 +24,8 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseWebSearchCallInProgressEvent} for more details. + # {OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent} for more + # details. # # Emitted when a web search call is initiated. # diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index 31a1e8c1..74cebb9c 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -24,7 +24,8 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseWebSearchCallSearchingEvent} for more details. + # {OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent} for more + # details. # # Emitted when a web search call is executing. # diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 7e7cb2a2..ede6dc3c 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -56,7 +56,7 @@ class Mcp < OpenAI::Internal::Type::BaseModel # @!attribute allowed_tools # List of allowed tool names or a filter object. # - # @return [Array, OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] + # @return [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] optional :allowed_tools, union: -> { OpenAI::Responses::Tool::Mcp::AllowedTools }, nil?: true # @!attribute headers @@ -69,12 +69,12 @@ class Mcp < OpenAI::Internal::Type::BaseModel # @!attribute require_approval # Specify which of the MCP server's tools require approval. # - # @return [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] + # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] optional :require_approval, union: -> { OpenAI::Responses::Tool::Mcp::RequireApproval }, nil?: true # @!method initialize(server_label:, server_url:, allowed_tools: nil, headers: nil, require_approval: nil, type: :mcp) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Tool::Mcp} for more details. + # {OpenAI::Models::Responses::Tool::Mcp} for more details. # # Give the model access to additional tools via remote Model Context Protocol # (MCP) servers. @@ -84,22 +84,22 @@ class Mcp < OpenAI::Internal::Type::BaseModel # # @param server_url [String] The URL for the MCP server. # - # @param allowed_tools [Array, OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] List of allowed tool names or a filter object. + # @param allowed_tools [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] List of allowed tool names or a filter object. # # @param headers [Hash{Symbol=>String}, nil] Optional HTTP headers to send to the MCP server. Use for authentication # - # @param require_approval [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] Specify which of the MCP server's tools require approval. + # @param require_approval [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] Specify which of the MCP server's tools require approval. # # @param type [Symbol, :mcp] The type of the MCP tool. Always `mcp`. # List of allowed tool names or a filter object. # - # @see OpenAI::Responses::Tool::Mcp#allowed_tools + # @see OpenAI::Models::Responses::Tool::Mcp#allowed_tools module AllowedTools extend OpenAI::Internal::Type::Union # A string array of allowed tool names - variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::StringArray } + variant -> { OpenAI::Models::Responses::Tool::Mcp::AllowedTools::StringArray } # A filter object to specify which tools are allowed. variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter } @@ -118,7 +118,7 @@ class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(Array, OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter)] + # @return [Array(Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter)] # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -126,7 +126,7 @@ class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel # Specify which of the MCP server's tools require approval. # - # @see OpenAI::Responses::Tool::Mcp#require_approval + # @see OpenAI::Models::Responses::Tool::Mcp#require_approval module RequireApproval extend OpenAI::Internal::Type::Union @@ -141,7 +141,7 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # @!attribute always # A list of tools that always require approval. # - # @return [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, nil] + # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, nil] optional :always, -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always @@ -150,7 +150,7 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # @!attribute never # A list of tools that never require approval. # - # @return [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, nil] + # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, nil] optional :never, -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never @@ -164,16 +164,16 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # @!method initialize(always: nil, never: nil, tool_names: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter} for more - # details. + # {OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter} + # for more details. # - # @param always [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A list of tools that always require approval. + # @param always [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A list of tools that always require approval. # - # @param never [OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A list of tools that never require approval. + # @param never [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A list of tools that never require approval. # # @param tool_names [Array] List of allowed tool names. - # @see OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#always + # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#always class Always < OpenAI::Internal::Type::BaseModel # @!attribute tool_names # List of tools that require approval. @@ -187,7 +187,7 @@ class Always < OpenAI::Internal::Type::BaseModel # @param tool_names [Array] List of tools that require approval. end - # @see OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#never + # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#never class Never < OpenAI::Internal::Type::BaseModel # @!attribute tool_names # List of tools that do not require approval. @@ -216,7 +216,7 @@ module McpToolApprovalSetting end # @!method self.variants - # @return [Array(OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting)] + # @return [Array(OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting)] end end @@ -225,7 +225,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # The code interpreter container. Can be a container ID or an object that # specifies uploaded file IDs to make available to your code. # - # @return [String, OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] + # @return [String, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] required :container, union: -> { OpenAI::Responses::Tool::CodeInterpreter::Container } # @!attribute type @@ -236,18 +236,18 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(container:, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Tool::CodeInterpreter} for more details. + # {OpenAI::Models::Responses::Tool::CodeInterpreter} for more details. # # A tool that runs Python code to help generate a response to a prompt. # - # @param container [String, OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] The code interpreter container. Can be a container ID or an object that + # @param container [String, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] The code interpreter container. Can be a container ID or an object that # # @param type [Symbol, :code_interpreter] The type of the code interpreter tool. Always `code_interpreter`. # The code interpreter container. Can be a container ID or an object that # specifies uploaded file IDs to make available to your code. # - # @see OpenAI::Responses::Tool::CodeInterpreter#container + # @see OpenAI::Models::Responses::Tool::CodeInterpreter#container module Container extend OpenAI::Internal::Type::Union @@ -273,7 +273,7 @@ class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil, type: :auto) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto} + # {OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto} # for more details. # # Configuration for a code interpreter container. Optionally specify the IDs of @@ -285,7 +285,7 @@ class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto)] + # @return [Array(String, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto)] end end @@ -300,26 +300,26 @@ class ImageGeneration < OpenAI::Internal::Type::BaseModel # Background type for the generated image. One of `transparent`, `opaque`, or # `auto`. Default: `auto`. # - # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Background, nil] + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background, nil] optional :background, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Background } # @!attribute input_image_mask # Optional mask for inpainting. Contains `image_url` (string, optional) and # `file_id` (string, optional). # - # @return [OpenAI::Responses::Tool::ImageGeneration::InputImageMask, nil] + # @return [OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask, nil] optional :input_image_mask, -> { OpenAI::Responses::Tool::ImageGeneration::InputImageMask } # @!attribute model # The image generation model to use. Default: `gpt-image-1`. # - # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Model, nil] + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Model, nil] optional :model, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Model } # @!attribute moderation # Moderation level for the generated image. Default: `auto`. # - # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Moderation, nil] + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Moderation, nil] optional :moderation, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Moderation } # @!attribute output_compression @@ -332,7 +332,7 @@ class ImageGeneration < OpenAI::Internal::Type::BaseModel # The output format of the generated image. One of `png`, `webp`, or `jpeg`. # Default: `png`. # - # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::OutputFormat, nil] + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::OutputFormat, nil] optional :output_format, enum: -> { OpenAI::Responses::Tool::ImageGeneration::OutputFormat } # @!attribute partial_images @@ -346,46 +346,46 @@ class ImageGeneration < OpenAI::Internal::Type::BaseModel # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. # Default: `auto`. # - # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Quality, nil] + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Quality, nil] optional :quality, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Quality } # @!attribute size # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, # or `auto`. Default: `auto`. # - # @return [Symbol, OpenAI::Responses::Tool::ImageGeneration::Size, nil] + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Size, nil] optional :size, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Size } # @!method initialize(background: nil, input_image_mask: nil, model: nil, moderation: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, size: nil, type: :image_generation) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Tool::ImageGeneration} for more details. + # {OpenAI::Models::Responses::Tool::ImageGeneration} for more details. # # A tool that generates images using a model like `gpt-image-1`. # - # @param background [Symbol, OpenAI::Responses::Tool::ImageGeneration::Background] Background type for the generated image. One of `transparent`, + # @param background [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background] Background type for the generated image. One of `transparent`, # - # @param input_image_mask [OpenAI::Responses::Tool::ImageGeneration::InputImageMask] Optional mask for inpainting. Contains `image_url` + # @param input_image_mask [OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask] Optional mask for inpainting. Contains `image_url` # - # @param model [Symbol, OpenAI::Responses::Tool::ImageGeneration::Model] The image generation model to use. Default: `gpt-image-1`. + # @param model [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Model] The image generation model to use. Default: `gpt-image-1`. # - # @param moderation [Symbol, OpenAI::Responses::Tool::ImageGeneration::Moderation] Moderation level for the generated image. Default: `auto`. + # @param moderation [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Moderation] Moderation level for the generated image. Default: `auto`. # # @param output_compression [Integer] Compression level for the output image. Default: 100. # - # @param output_format [Symbol, OpenAI::Responses::Tool::ImageGeneration::OutputFormat] The output format of the generated image. One of `png`, `webp`, or + # @param output_format [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::OutputFormat] The output format of the generated image. One of `png`, `webp`, or # # @param partial_images [Integer] Number of partial images to generate in streaming mode, from 0 (default value) t # - # @param quality [Symbol, OpenAI::Responses::Tool::ImageGeneration::Quality] The quality of the generated image. One of `low`, `medium`, `high`, + # @param quality [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Quality] The quality of the generated image. One of `low`, `medium`, `high`, # - # @param size [Symbol, OpenAI::Responses::Tool::ImageGeneration::Size] The size of the generated image. One of `1024x1024`, `1024x1536`, + # @param size [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Size] The size of the generated image. One of `1024x1024`, `1024x1536`, # # @param type [Symbol, :image_generation] The type of the image generation tool. Always `image_generation`. # Background type for the generated image. One of `transparent`, `opaque`, or # `auto`. Default: `auto`. # - # @see OpenAI::Responses::Tool::ImageGeneration#background + # @see OpenAI::Models::Responses::Tool::ImageGeneration#background module Background extend OpenAI::Internal::Type::Enum @@ -397,7 +397,7 @@ module Background # @return [Array] end - # @see OpenAI::Responses::Tool::ImageGeneration#input_image_mask + # @see OpenAI::Models::Responses::Tool::ImageGeneration#input_image_mask class InputImageMask < OpenAI::Internal::Type::BaseModel # @!attribute file_id # File ID for the mask image. @@ -413,7 +413,8 @@ class InputImageMask < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, image_url: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Tool::ImageGeneration::InputImageMask} for more details. + # {OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask} for more + # details. # # Optional mask for inpainting. Contains `image_url` (string, optional) and # `file_id` (string, optional). @@ -425,7 +426,7 @@ class InputImageMask < OpenAI::Internal::Type::BaseModel # The image generation model to use. Default: `gpt-image-1`. # - # @see OpenAI::Responses::Tool::ImageGeneration#model + # @see OpenAI::Models::Responses::Tool::ImageGeneration#model module Model extend OpenAI::Internal::Type::Enum @@ -437,7 +438,7 @@ module Model # Moderation level for the generated image. Default: `auto`. # - # @see OpenAI::Responses::Tool::ImageGeneration#moderation + # @see OpenAI::Models::Responses::Tool::ImageGeneration#moderation module Moderation extend OpenAI::Internal::Type::Enum @@ -451,7 +452,7 @@ module Moderation # The output format of the generated image. One of `png`, `webp`, or `jpeg`. # Default: `png`. # - # @see OpenAI::Responses::Tool::ImageGeneration#output_format + # @see OpenAI::Models::Responses::Tool::ImageGeneration#output_format module OutputFormat extend OpenAI::Internal::Type::Enum @@ -466,7 +467,7 @@ module OutputFormat # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. # Default: `auto`. # - # @see OpenAI::Responses::Tool::ImageGeneration#quality + # @see OpenAI::Models::Responses::Tool::ImageGeneration#quality module Quality extend OpenAI::Internal::Type::Enum @@ -482,7 +483,7 @@ module Quality # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, # or `auto`. Default: `auto`. # - # @see OpenAI::Responses::Tool::ImageGeneration#size + # @see OpenAI::Models::Responses::Tool::ImageGeneration#size module Size extend OpenAI::Internal::Type::Enum @@ -510,7 +511,7 @@ class LocalShell < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::FunctionTool, OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::Tool::Mcp, OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool)] + # @return [Array(OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index bcac7f83..ef4278cd 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -17,17 +17,17 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `mcp` # - `image_generation` # - # @return [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] required :type, enum: -> { OpenAI::Responses::ToolChoiceTypes::Type } # @!method initialize(type:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ToolChoiceTypes} for more details. + # {OpenAI::Models::Responses::ToolChoiceTypes} for more details. # # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). # - # @param type [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about + # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -41,7 +41,7 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `mcp` # - `image_generation` # - # @see OpenAI::Responses::ToolChoiceTypes#type + # @see OpenAI::Models::Responses::ToolChoiceTypes#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 37dd39fe..4dd6b2ec 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -8,40 +8,40 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. # - # @return [Symbol, OpenAI::Responses::WebSearchTool::Type] + # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] required :type, enum: -> { OpenAI::Responses::WebSearchTool::Type } # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @return [Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize, nil] + # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Responses::WebSearchTool::SearchContextSize } # @!attribute user_location # The user's location. # - # @return [OpenAI::Responses::WebSearchTool::UserLocation, nil] + # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] optional :user_location, -> { OpenAI::Responses::WebSearchTool::UserLocation }, nil?: true # @!method initialize(type:, search_context_size: nil, user_location: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::WebSearchTool} for more details. + # {OpenAI::Models::Responses::WebSearchTool} for more details. # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). # - # @param type [Symbol, OpenAI::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev + # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev # - # @param search_context_size [Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search + # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search # - # @param user_location [OpenAI::Responses::WebSearchTool::UserLocation, nil] The user's location. + # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] The user's location. # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. # - # @see OpenAI::Responses::WebSearchTool#type + # @see OpenAI::Models::Responses::WebSearchTool#type module Type extend OpenAI::Internal::Type::Enum @@ -55,7 +55,7 @@ module Type # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @see OpenAI::Responses::WebSearchTool#search_context_size + # @see OpenAI::Models::Responses::WebSearchTool#search_context_size module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -67,7 +67,7 @@ module SearchContextSize # @return [Array] end - # @see OpenAI::Responses::WebSearchTool#user_location + # @see OpenAI::Models::Responses::WebSearchTool#user_location class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of location approximation. Always `approximate`. @@ -103,7 +103,7 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::WebSearchTool::UserLocation} for more details. + # {OpenAI::Models::Responses::WebSearchTool::UserLocation} for more details. # # The user's location. # diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 5634269f..68b8b32e 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -24,7 +24,7 @@ module ResponsesOnlyModel end # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::ResponsesModel::ResponsesOnlyModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 30388ebb..ac676b5d 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -20,7 +20,7 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::StaticFileChunkingStrategy} for more details. + # {OpenAI::Models::StaticFileChunkingStrategy} for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. # diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index 2169aa9f..43c0f303 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -5,7 +5,7 @@ module Models class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::StaticFileChunkingStrategy] + # @return [OpenAI::Models::StaticFileChunkingStrategy] required :static, -> { OpenAI::StaticFileChunkingStrategy } # @!attribute type @@ -15,7 +15,7 @@ class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::StaticFileChunkingStrategy] + # @param static [OpenAI::Models::StaticFileChunkingStrategy] # # @param type [Symbol, :static] Always `static`. end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index 304bacb5..3368e144 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -5,7 +5,7 @@ module Models class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::StaticFileChunkingStrategy] + # @return [OpenAI::Models::StaticFileChunkingStrategy] required :static, -> { OpenAI::StaticFileChunkingStrategy } # @!attribute type @@ -17,7 +17,7 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # Customize your own chunking strategy by setting chunk size and chunk overlap. # - # @param static [OpenAI::StaticFileChunkingStrategy] + # @param static [OpenAI::Models::StaticFileChunkingStrategy] # # @param type [Symbol, :static] Always `static`. end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 8fe51192..e51974b2 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -51,18 +51,18 @@ class Upload < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the Upload. # - # @return [Symbol, OpenAI::Upload::Status] + # @return [Symbol, OpenAI::Models::Upload::Status] required :status, enum: -> { OpenAI::Upload::Status } # @!attribute file # The `File` object represents a document that has been uploaded to OpenAI. # - # @return [OpenAI::FileObject, nil] + # @return [OpenAI::Models::FileObject, nil] optional :file, -> { OpenAI::FileObject }, nil?: true # @!method initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) - # Some parameter documentations has been truncated, see {OpenAI::Upload} for more - # details. + # Some parameter documentations has been truncated, see {OpenAI::Models::Upload} + # for more details. # # The Upload object can accept byte chunks in the form of Parts. # @@ -78,15 +78,15 @@ class Upload < OpenAI::Internal::Type::BaseModel # # @param purpose [String] The intended purpose of the file. [Please refer here](https://platform.openai.co # - # @param status [Symbol, OpenAI::Upload::Status] The status of the Upload. + # @param status [Symbol, OpenAI::Models::Upload::Status] The status of the Upload. # - # @param file [OpenAI::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. + # @param file [OpenAI::Models::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. # # @param object [Symbol, :upload] The object type, which is always "upload". # The status of the Upload. # - # @see OpenAI::Upload#status + # @see OpenAI::Models::Upload#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 13caaed5..54e7530f 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -34,7 +34,7 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). # - # @return [Symbol, OpenAI::FilePurpose] + # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::FilePurpose } # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) @@ -47,7 +47,7 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # # @param mime_type [String] The MIME type of the file. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 48c1a984..6a930067 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -18,7 +18,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute file_counts # - # @return [OpenAI::VectorStore::FileCounts] + # @return [OpenAI::Models::VectorStore::FileCounts] required :file_counts, -> { OpenAI::VectorStore::FileCounts } # @!attribute last_active_at @@ -55,7 +55,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # `completed`. A status of `completed` indicates that the vector store is ready # for use. # - # @return [Symbol, OpenAI::VectorStore::Status] + # @return [Symbol, OpenAI::Models::VectorStore::Status] required :status, enum: -> { OpenAI::VectorStore::Status } # @!attribute usage_bytes @@ -67,7 +67,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStore::ExpiresAfter, nil] + # @return [OpenAI::Models::VectorStore::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::VectorStore::ExpiresAfter } # @!attribute expires_at @@ -77,8 +77,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :expires_at, Integer, nil?: true # @!method initialize(id:, created_at:, file_counts:, last_active_at:, metadata:, name:, status:, usage_bytes:, expires_after: nil, expires_at: nil, object: :vector_store) - # Some parameter documentations has been truncated, see {OpenAI::VectorStore} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStore} for more details. # # A vector store is a collection of processed files can be used by the # `file_search` tool. @@ -87,7 +87,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store was created. # - # @param file_counts [OpenAI::VectorStore::FileCounts] + # @param file_counts [OpenAI::Models::VectorStore::FileCounts] # # @param last_active_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store was last active. # @@ -95,17 +95,17 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the vector store. # - # @param status [Symbol, OpenAI::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or + # @param status [Symbol, OpenAI::Models::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or # # @param usage_bytes [Integer] The total number of bytes used by the files in the vector store. # - # @param expires_after [OpenAI::VectorStore::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] The expiration policy for a vector store. # # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store will expire. # # @param object [Symbol, :vector_store] The object type, which is always `vector_store`. - # @see OpenAI::VectorStore#file_counts + # @see OpenAI::Models::VectorStore#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that were cancelled. @@ -153,7 +153,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # `completed`. A status of `completed` indicates that the vector store is ready # for use. # - # @see OpenAI::VectorStore#status + # @see OpenAI::Models::VectorStore#status module Status extend OpenAI::Internal::Type::Enum @@ -165,7 +165,7 @@ module Status # @return [Array] end - # @see OpenAI::VectorStore#expires_after + # @see OpenAI::Models::VectorStore#expires_after class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor # Anchor timestamp after which the expiration policy applies. Supported anchors: @@ -182,7 +182,7 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(days:, anchor: :last_active_at) # Some parameter documentations has been truncated, see - # {OpenAI::VectorStore::ExpiresAfter} for more details. + # {OpenAI::Models::VectorStore::ExpiresAfter} for more details. # # The expiration policy for a vector store. # diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index a4babcf4..dfe50418 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -11,13 +11,13 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreCreateParams::ExpiresAfter, nil] + # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::VectorStoreCreateParams::ExpiresAfter } # @!attribute file_ids @@ -49,9 +49,9 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreCreateParams} for more details. # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -77,7 +77,7 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(days:, anchor: :last_active_at) # Some parameter documentations has been truncated, see - # {OpenAI::VectorStoreCreateParams::ExpiresAfter} for more details. + # {OpenAI::Models::VectorStoreCreateParams::ExpiresAfter} for more details. # # The expiration policy for a vector store. # diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 40712974..fcdc7d40 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -36,7 +36,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::VectorStoreListParams::Order, nil] + # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order, nil] optional :order, enum: -> { OpenAI::VectorStoreListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) @@ -49,7 +49,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 32bf6133..c1ae6419 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -16,7 +16,7 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!attribute filters # A filter to apply based on file attributes. # - # @return [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] + # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::VectorStoreSearchParams::Filters } # @!attribute max_num_results @@ -29,7 +29,7 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # Ranking options for search. # - # @return [OpenAI::VectorStoreSearchParams::RankingOptions, nil] + # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions, nil] optional :ranking_options, -> { OpenAI::VectorStoreSearchParams::RankingOptions } # @!attribute rewrite_query @@ -44,11 +44,11 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # # @param query [String, Array] A query string for a search # - # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 # - # @param ranking_options [OpenAI::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. # # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. # @@ -60,7 +60,7 @@ module Query variant String - variant -> { OpenAI::VectorStoreSearchParams::Query::StringArray } + variant -> { OpenAI::Models::VectorStoreSearchParams::Query::StringArray } # @!method self.variants # @return [Array(String, Array)] @@ -80,13 +80,13 @@ module Filters variant -> { OpenAI::CompoundFilter } # @!method self.variants - # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # - # @return [Symbol, OpenAI::VectorStoreSearchParams::RankingOptions::Ranker, nil] + # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::VectorStoreSearchParams::RankingOptions::Ranker } # @!attribute score_threshold @@ -97,10 +97,10 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # - # @param ranker [Symbol, OpenAI::VectorStoreSearchParams::RankingOptions::Ranker] + # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] # @param score_threshold [Float] - # @see OpenAI::VectorStoreSearchParams::RankingOptions#ranker + # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 9fab30cf..81557821 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -10,7 +10,7 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] + # @return [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::VectorStoreUpdateParams::ExpiresAfter }, nil?: true # @!attribute metadata @@ -34,7 +34,7 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreUpdateParams} for more details. # - # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -58,7 +58,7 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!method initialize(days:, anchor: :last_active_at) # Some parameter documentations has been truncated, see - # {OpenAI::VectorStoreUpdateParams::ExpiresAfter} for more details. + # {OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter} for more details. # # The expiration policy for a vector store. # diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 5fd03105..0815e0f1 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -34,7 +34,7 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) @@ -45,7 +45,7 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index a6e6e635..d9f5bb5c 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -34,7 +34,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # - # @return [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter, nil] optional :filter, enum: -> { OpenAI::VectorStores::FileBatchListFilesParams::Filter } # @!attribute limit @@ -48,7 +48,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order, nil] optional :order, enum: -> { OpenAI::VectorStores::FileBatchListFilesParams::Order } # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) @@ -61,11 +61,11 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param filter [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 8cc4cee4..e4bfafa9 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -34,7 +34,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) @@ -45,7 +45,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index dcebcd20..2540afea 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -29,7 +29,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # - # @return [Symbol, OpenAI::VectorStores::FileListParams::Filter, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter, nil] optional :filter, enum: -> { OpenAI::VectorStores::FileListParams::Filter } # @!attribute limit @@ -43,7 +43,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::VectorStores::FileListParams::Order, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::VectorStores::FileListParams::Order } # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) @@ -54,11 +54,11 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param filter [Symbol, OpenAI::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index eab6df4b..c6e737e3 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -21,7 +21,7 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @return [OpenAI::VectorStores::VectorStoreFile::LastError, nil] + # @return [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] required :last_error, -> { OpenAI::VectorStores::VectorStoreFile::LastError }, nil?: true # @!attribute object @@ -35,7 +35,7 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. # - # @return [Symbol, OpenAI::VectorStores::VectorStoreFile::Status] + # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] required :status, enum: -> { OpenAI::VectorStores::VectorStoreFile::Status } # @!attribute usage_bytes @@ -71,12 +71,12 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The strategy used to chunk the file. # - # @return [OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject, nil] + # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategy } # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") # Some parameter documentations has been truncated, see - # {OpenAI::VectorStores::VectorStoreFile} for more details. + # {OpenAI::Models::VectorStores::VectorStoreFile} for more details. # # A list of files attached to a vector store. # @@ -84,9 +84,9 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store file was created. # - # @param last_error [OpenAI::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a + # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a # - # @param status [Symbol, OpenAI::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet # # @param usage_bytes [Integer] The total vector store usage in bytes. Note that this may be different from the # @@ -94,16 +94,16 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject] The strategy used to chunk the file. + # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] The strategy used to chunk the file. # # @param object [Symbol, :"vector_store.file"] The object type, which is always `vector_store.file`. - # @see OpenAI::VectorStores::VectorStoreFile#last_error + # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # - # @return [Symbol, OpenAI::VectorStores::VectorStoreFile::LastError::Code] + # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] required :code, enum: -> { OpenAI::VectorStores::VectorStoreFile::LastError::Code } # @!attribute message @@ -116,13 +116,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @param code [Symbol, OpenAI::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. # # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # - # @see OpenAI::VectorStores::VectorStoreFile::LastError#code + # @see OpenAI::Models::VectorStores::VectorStoreFile::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -139,7 +139,7 @@ module Code # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. # - # @see OpenAI::VectorStores::VectorStoreFile#status + # @see OpenAI::Models::VectorStores::VectorStoreFile#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index b66e2b14..f4b251ee 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -20,7 +20,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!attribute file_counts # - # @return [OpenAI::VectorStores::VectorStoreFileBatch::FileCounts] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] required :file_counts, -> { OpenAI::VectorStores::VectorStoreFileBatch::FileCounts } # @!attribute object @@ -33,7 +33,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. # - # @return [Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status] + # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] required :status, enum: -> { OpenAI::VectorStores::VectorStoreFileBatch::Status } # @!attribute vector_store_id @@ -47,7 +47,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") # Some parameter documentations has been truncated, see - # {OpenAI::VectorStores::VectorStoreFileBatch} for more details. + # {OpenAI::Models::VectorStores::VectorStoreFileBatch} for more details. # # A batch of files attached to a vector store. # @@ -55,15 +55,15 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store files batch was create # - # @param file_counts [OpenAI::VectorStores::VectorStoreFileBatch::FileCounts] + # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] # - # @param status [Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` # # @param vector_store_id [String] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect # # @param object [Symbol, :"vector_store.files_batch"] The object type, which is always `vector_store.file_batch`. - # @see OpenAI::VectorStores::VectorStoreFileBatch#file_counts + # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that where cancelled. @@ -110,7 +110,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. # - # @see OpenAI::VectorStores::VectorStoreFileBatch#status + # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index befc2a60..744c3b69 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -13,13 +13,13 @@ class Speech # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # - # @param model [String, Symbol, OpenAI::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): # - # @param voice [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not # - # @param response_format [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index e6df425e..45570d65 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -16,25 +16,25 @@ class Transcriptions # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # @param chunking_strategy [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs # - # @param include [Array] Additional information to include in the transcription response. + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose] + # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def create(params) @@ -65,25 +65,25 @@ def create(params) # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # @param chunking_strategy [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs # - # @param include [Array] Additional information to include in the transcription response. + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def create_streaming(params) diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index acaf3c58..35ce0d09 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -13,17 +13,17 @@ class Translations # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose] + # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] # # @see OpenAI::Models::Audio::TranslationCreateParams def create(params) diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 718d0a81..bdd8d876 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -10,9 +10,9 @@ class Batches # # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) # - # @param completion_window [Symbol, OpenAI::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # - # @param endpoint [Symbol, OpenAI::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` # # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. # @@ -20,7 +20,7 @@ class Batches # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Batch] + # @return [OpenAI::Models::Batch] # # @see OpenAI::Models::BatchCreateParams def create(params) @@ -36,7 +36,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Batch] + # @return [OpenAI::Models::Batch] # # @see OpenAI::Models::BatchRetrieveParams def retrieve(batch_id, params = {}) @@ -61,7 +61,7 @@ def retrieve(batch_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::BatchListParams def list(params = {}) @@ -86,7 +86,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Batch] + # @return [OpenAI::Models::Batch] # # @see OpenAI::Models::BatchCancelParams def cancel(batch_id, params = {}) diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index ca192073..8b8adf5a 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -11,7 +11,7 @@ class Assistants # # @overload create(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # @@ -21,21 +21,21 @@ class Assistants # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Assistant] + # @return [OpenAI::Models::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantCreateParams def create(params) @@ -57,7 +57,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Assistant] + # @return [OpenAI::Models::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantRetrieveParams def retrieve(assistant_id, params = {}) @@ -84,25 +84,25 @@ def retrieve(assistant_id, params = {}) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Assistant] + # @return [OpenAI::Models::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantUpdateParams def update(assistant_id, params = {}) @@ -129,11 +129,11 @@ def update(assistant_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::AssistantListParams def list(params = {}) @@ -156,7 +156,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::AssistantDeleted] + # @return [OpenAI::Models::Beta::AssistantDeleted] # # @see OpenAI::Models::Beta::AssistantDeleteParams def delete(assistant_id, params = {}) diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 37b1d488..22ff4228 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -17,15 +17,15 @@ class Threads # # @overload create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadCreateParams def create(params = {}) @@ -47,7 +47,7 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadRetrieveParams def retrieve(thread_id, params = {}) @@ -70,11 +70,11 @@ def retrieve(thread_id, params = {}) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadUpdateParams def update(thread_id, params = {}) @@ -96,7 +96,7 @@ def update(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::ThreadDeleted] + # @return [OpenAI::Models::Beta::ThreadDeleted] # # @see OpenAI::Models::Beta::ThreadDeleteParams def delete(thread_id, params = {}) @@ -127,29 +127,29 @@ def delete(thread_id, params = {}) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def create_and_run(params) @@ -187,29 +187,29 @@ def create_and_run(params) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def stream_raw(params) diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 503f7228..fbc2fcb8 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -14,17 +14,17 @@ class Messages # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageCreateParams def create(thread_id, params) @@ -51,7 +51,7 @@ def create(thread_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageRetrieveParams def retrieve(message_id, params) @@ -83,7 +83,7 @@ def retrieve(message_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageUpdateParams def update(message_id, params) @@ -116,13 +116,13 @@ def update(message_id, params) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param run_id [String] Filter messages by the run ID that generated them. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::MessageListParams def list(thread_id, params = {}) @@ -147,7 +147,7 @@ def list(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::MessageDeleted] + # @return [OpenAI::Models::Beta::Threads::MessageDeleted] # # @see OpenAI::Models::Beta::Threads::MessageDeleteParams def delete(message_id, params) diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index cbf1b293..50128b5a 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -22,11 +22,11 @@ class Runs # # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t # - # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re # @@ -36,27 +36,27 @@ class Runs # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca # - # @param model [String, Symbol, OpenAI::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Body param: **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def create(thread_id, params) @@ -90,11 +90,11 @@ def create(thread_id, params) # # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t # - # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re # @@ -104,27 +104,27 @@ def create(thread_id, params) # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca # - # @param model [String, Symbol, OpenAI::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Body param: **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def create_stream_raw(thread_id, params) @@ -160,7 +160,7 @@ def create_stream_raw(thread_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunRetrieveParams def retrieve(run_id, params) @@ -192,7 +192,7 @@ def retrieve(run_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunUpdateParams def update(run_id, params) @@ -225,11 +225,11 @@ def update(run_id, params) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::RunListParams def list(thread_id, params = {}) @@ -254,7 +254,7 @@ def list(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunCancelParams def cancel(run_id, params) @@ -288,11 +288,11 @@ def cancel(run_id, params) # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc # - # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs(run_id, params) @@ -331,11 +331,11 @@ def submit_tool_outputs(run_id, params) # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc # - # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs_stream_raw(run_id, params) diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index eaa27d6e..6accaeb6 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -19,11 +19,11 @@ class Steps # # @param run_id [String] Path param: The ID of the run to which the run step belongs. # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @see OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams def retrieve(step_id, params) @@ -60,15 +60,15 @@ def retrieve(step_id, params) # # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be # - # @param order [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::Runs::StepListParams def list(run_id, params) diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index b44f7afc..8d5c0fda 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -32,17 +32,17 @@ class Completions # # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. + # @param functions [Array] Deprecated in favor of `tools`. # # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # @@ -54,35 +54,35 @@ class Completions # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -90,11 +90,11 @@ class Completions # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # - # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletion] + # @return [OpenAI::Models::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionCreateParams def create(params) @@ -137,17 +137,17 @@ def create(params) # # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. + # @param functions [Array] Deprecated in favor of `tools`. # # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # @@ -159,35 +159,35 @@ def create(params) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -195,11 +195,11 @@ def create(params) # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # - # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams def stream_raw(params) @@ -229,7 +229,7 @@ def stream_raw(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletion] + # @return [OpenAI::Models::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionRetrieveParams def retrieve(completion_id, params = {}) @@ -256,7 +256,7 @@ def retrieve(completion_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletion] + # @return [OpenAI::Models::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionUpdateParams def update(completion_id, params) @@ -286,11 +286,11 @@ def update(completion_id, params) # # @param model [String] The model used to generate the Chat Completions. # - # @param order [Symbol, OpenAI::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::CompletionListParams def list(params = {}) @@ -314,7 +314,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletionDeleted] + # @return [OpenAI::Models::Chat::ChatCompletionDeleted] # # @see OpenAI::Models::Chat::CompletionDeleteParams def delete(completion_id, params = {}) diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index 36a3e3c8..ea3e3382 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -19,11 +19,11 @@ class Messages # # @param limit [Integer] Number of messages to retrieve. # - # @param order [Symbol, OpenAI::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::Completions::MessageListParams def list(completion_id, params = {}) diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index f65e1891..de1e8786 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -12,7 +12,7 @@ class Completions # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings # @@ -36,7 +36,7 @@ class Completions # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # @@ -48,7 +48,7 @@ class Completions # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Completion] + # @return [OpenAI::Models::Completion] # # @see OpenAI::Models::CompletionCreateParams def create(params) @@ -75,7 +75,7 @@ def create(params) # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings # @@ -99,7 +99,7 @@ def create(params) # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # @@ -111,7 +111,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::CompletionCreateParams def create_streaming(params) diff --git a/lib/openai/resources/containers.rb b/lib/openai/resources/containers.rb index 44ccdb6f..01d685d2 100644 --- a/lib/openai/resources/containers.rb +++ b/lib/openai/resources/containers.rb @@ -12,7 +12,7 @@ class Containers # # @param name [String] Name of the container to create. # - # @param expires_after [OpenAI::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. + # @param expires_after [OpenAI::Models::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. # # @param file_ids [Array] IDs of files to copy to the container. # @@ -62,7 +62,7 @@ def retrieve(container_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/containers/files.rb b/lib/openai/resources/containers/files.rb index 7e6fd334..c2adf25f 100644 --- a/lib/openai/resources/containers/files.rb +++ b/lib/openai/resources/containers/files.rb @@ -77,7 +77,7 @@ def retrieve(file_id, params) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index 8d709c8b..934b58cc 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -12,17 +12,17 @@ class Embeddings # # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i # - # @param model [String, Symbol, OpenAI::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo # - # @param encoding_format [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CreateEmbeddingResponse] + # @return [OpenAI::Models::CreateEmbeddingResponse] # # @see OpenAI::Models::EmbeddingCreateParams def create(params) diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index f673f38f..cabdba94 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -18,9 +18,9 @@ class Evals # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl + # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -102,9 +102,9 @@ def update(eval_id, params = {}) # # @param limit [Integer] Number of evals to retrieve. # - # @param order [Symbol, OpenAI::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d # - # @param order_by [Symbol, OpenAI::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index cba5fdcd..68e1590e 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -18,7 +18,7 @@ class Runs # # @param eval_id [String] The ID of the evaluation to create a run for. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -80,9 +80,9 @@ def retrieve(run_id, params) # # @param limit [Integer] Number of runs to retrieve. # - # @param order [Symbol, OpenAI::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de # - # @param status [Symbol, OpenAI::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/evals/runs/output_items.rb b/lib/openai/resources/evals/runs/output_items.rb index 0d80996a..41f665d8 100644 --- a/lib/openai/resources/evals/runs/output_items.rb +++ b/lib/openai/resources/evals/runs/output_items.rb @@ -53,9 +53,9 @@ def retrieve(output_item_id, params) # # @param limit [Integer] Query param: Number of output items to retrieve. # - # @param order [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o # - # @param status [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 2726345d..34eccf82 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -32,11 +32,11 @@ class Files # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FileObject] + # @return [OpenAI::Models::FileObject] # # @see OpenAI::Models::FileCreateParams def create(params) @@ -59,7 +59,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FileObject] + # @return [OpenAI::Models::FileObject] # # @see OpenAI::Models::FileRetrieveParams def retrieve(file_id, params = {}) @@ -82,13 +82,13 @@ def retrieve(file_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param purpose [String] Only return files with the given purpose. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FileListParams def list(params = {}) @@ -111,7 +111,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FileDeleted] + # @return [OpenAI::Models::FileDeleted] # # @see OpenAI::Models::FileDeleteParams def delete(file_id, params = {}) diff --git a/lib/openai/resources/fine_tuning/alpha/graders.rb b/lib/openai/resources/fine_tuning/alpha/graders.rb index 5c7ba54b..52c4cc77 100644 --- a/lib/openai/resources/fine_tuning/alpha/graders.rb +++ b/lib/openai/resources/fine_tuning/alpha/graders.rb @@ -9,7 +9,7 @@ class Graders # # @overload run(grader:, model_sample:, reference_answer:, request_options: {}) # - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param model_sample [String] The model sample to be evaluated. # @@ -35,7 +35,7 @@ def run(params) # # @overload validate(grader:, request_options: {}) # - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index e11f3d60..f0afec14 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -54,7 +54,7 @@ def create(fine_tuned_model_checkpoint, params) # # @param limit [Integer] Number of permissions to retrieve. # - # @param order [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. # # @param project_id [String] The ID of the project to get permissions for. # diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index b6458288..c978c56c 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -20,17 +20,17 @@ class Jobs # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the # # @param training_file [String] The ID of an uploaded file that contains training data. # - # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. # - # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. # # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j # @@ -40,7 +40,7 @@ class Jobs # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobCreateParams def create(params) @@ -67,7 +67,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobRetrieveParams def retrieve(fine_tuning_job_id, params = {}) @@ -94,7 +94,7 @@ def retrieve(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListParams def list(params = {}) @@ -120,7 +120,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobCancelParams def cancel(fine_tuning_job_id, params = {}) @@ -147,7 +147,7 @@ def cancel(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListEventsParams def list_events(fine_tuning_job_id, params = {}) @@ -173,7 +173,7 @@ def list_events(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobPauseParams def pause(fine_tuning_job_id, params = {}) @@ -196,7 +196,7 @@ def pause(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobResumeParams def resume(fine_tuning_job_id, params = {}) diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index b588de3d..0483645d 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -20,7 +20,7 @@ class Checkpoints # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::Jobs::CheckpointListParams def list(fine_tuning_job_id, params = {}) diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 69150b5f..6a521f3b 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -12,19 +12,19 @@ class Images # # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param response_format [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ImagesResponse] + # @return [OpenAI::Models::ImagesResponse] # # @see OpenAI::Models::ImageCreateVariationParams def create_variation(params) @@ -51,25 +51,25 @@ def create_variation(params) # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # - # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param quality [Symbol, OpenAI::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # - # @param response_format [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ImagesResponse] + # @return [OpenAI::Models::ImagesResponse] # # @see OpenAI::Models::ImageEditParams def edit(params) @@ -94,31 +94,31 @@ def edit(params) # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte # - # @param background [Symbol, OpenAI::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im # - # @param moderation [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only # - # @param output_format [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # - # @param quality [Symbol, OpenAI::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # - # @param response_format [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned # - # @param size [Symbol, OpenAI::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # - # @param style [Symbol, OpenAI::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ImagesResponse] + # @return [OpenAI::Models::ImagesResponse] # # @see OpenAI::Models::ImageGenerateParams def generate(params) diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index 1dd26c31..8a3c6bab 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -12,7 +12,7 @@ class Models # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Model] + # @return [OpenAI::Models::Model] # # @see OpenAI::Models::ModelRetrieveParams def retrieve(model, params = {}) @@ -31,7 +31,7 @@ def retrieve(model, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Page] + # @return [OpenAI::Internal::Page] # # @see OpenAI::Models::ModelListParams def list(params = {}) @@ -53,7 +53,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ModelDeleted] + # @return [OpenAI::Models::ModelDeleted] # # @see OpenAI::Models::ModelDeleteParams def delete(model, params = {}) diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index 568e03d3..a0b0e774 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -11,9 +11,9 @@ class Moderations # # @overload create(input:, model: nil, request_options: {}) # - # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or # - # @param model [String, Symbol, OpenAI::ModerationModel] The content moderation model you would like to use. Learn more in + # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 68c6c276..43ae26e8 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -25,13 +25,13 @@ class Responses # # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # # @param background [Boolean, nil] Whether to run the model response in the background. # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # @@ -43,29 +43,29 @@ class Responses # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] # # @see OpenAI::Models::Responses::ResponseCreateParams def create(params) @@ -102,13 +102,13 @@ def create(params) # # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # # @param background [Boolean, nil] Whether to run the model response in the background. # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # @@ -120,29 +120,29 @@ def create(params) # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) @@ -172,11 +172,11 @@ def stream_raw(params) # # @param response_id [String] The ID of the response to retrieve. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] # # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve(response_id, params = {}) diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 36681e73..de693bf2 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -17,15 +17,15 @@ class InputItems # # @param before [String] An item ID to list items before, used in pagination. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Responses::InputItemListParams def list(response_id, params = {}) diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 363b2e57..65d75883 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -37,11 +37,11 @@ class Uploads # # @param mime_type [String] The MIME type of the file. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Upload] + # @return [OpenAI::Models::Upload] # # @see OpenAI::Models::UploadCreateParams def create(params) @@ -60,7 +60,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Upload] + # @return [OpenAI::Models::Upload] # # @see OpenAI::Models::UploadCancelParams def cancel(upload_id, params = {}) @@ -99,7 +99,7 @@ def cancel(upload_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Upload] + # @return [OpenAI::Models::Upload] # # @see OpenAI::Models::UploadCompleteParams def complete(upload_id, params) diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index f971413c..05f10f4c 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -27,7 +27,7 @@ class Parts # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Uploads::UploadPart] + # @return [OpenAI::Models::Uploads::UploadPart] # # @see OpenAI::Models::Uploads::PartCreateParams def create(upload_id, params) diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index c1d3c184..d903b9ef 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -16,9 +16,9 @@ class VectorStores # # @overload create(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -28,7 +28,7 @@ class VectorStores # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStore] + # @return [OpenAI::Models::VectorStore] # # @see OpenAI::Models::VectorStoreCreateParams def create(params = {}) @@ -50,7 +50,7 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStore] + # @return [OpenAI::Models::VectorStore] # # @see OpenAI::Models::VectorStoreRetrieveParams def retrieve(vector_store_id, params = {}) @@ -71,7 +71,7 @@ def retrieve(vector_store_id, params = {}) # # @param vector_store_id [String] The ID of the vector store to modify. # - # @param expires_after [OpenAI::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -79,7 +79,7 @@ def retrieve(vector_store_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStore] + # @return [OpenAI::Models::VectorStore] # # @see OpenAI::Models::VectorStoreUpdateParams def update(vector_store_id, params = {}) @@ -106,11 +106,11 @@ def update(vector_store_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStoreListParams def list(params = {}) @@ -133,7 +133,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStoreDeleted] + # @return [OpenAI::Models::VectorStoreDeleted] # # @see OpenAI::Models::VectorStoreDeleteParams def delete(vector_store_id, params = {}) @@ -157,11 +157,11 @@ def delete(vector_store_id, params = {}) # # @param query [String, Array] A query string for a search # - # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 # - # @param ranking_options [OpenAI::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. # # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. # diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index b8b4133c..99d3e7df 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -17,11 +17,11 @@ class FileBatches # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchCreateParams def create(vector_store_id, params) @@ -45,7 +45,7 @@ def create(vector_store_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchRetrieveParams def retrieve(batch_id, params) @@ -73,7 +73,7 @@ def retrieve(batch_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchCancelParams def cancel(batch_id, params) @@ -105,15 +105,15 @@ def cancel(batch_id, params) # # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin # - # @param filter [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, # # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be # - # @param order [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileBatchListFilesParams def list_files(batch_id, params) diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 79c76c82..8852aed7 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -19,11 +19,11 @@ class Files # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFile] + # @return [OpenAI::Models::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileCreateParams def create(vector_store_id, params) @@ -47,7 +47,7 @@ def create(vector_store_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFile] + # @return [OpenAI::Models::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileRetrieveParams def retrieve(file_id, params) @@ -79,7 +79,7 @@ def retrieve(file_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFile] + # @return [OpenAI::Models::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileUpdateParams def update(file_id, params) @@ -110,15 +110,15 @@ def update(file_id, params) # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param filter [Symbol, OpenAI::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileListParams def list(vector_store_id, params = {}) @@ -146,7 +146,7 @@ def list(vector_store_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileDeleted] + # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] # # @see OpenAI::Models::VectorStores::FileDeleteParams def delete(file_id, params) diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index 891ce3fe..6513b6f7 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -70,8 +70,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -223,8 +223,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index c1d1d4d7..1158ef42 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -68,8 +68,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -221,8 +221,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index 5cdaafb0..66b5570d 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -72,8 +72,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -225,8 +225,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index 780e123c..07d46351 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -70,8 +70,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -223,8 +223,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 90320393..4d649383 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -3,7 +3,7 @@ module OpenAI type all_models = String | OpenAI::Models::chat_model - | OpenAI::AllModels::responses_only_model + | OpenAI::Models::AllModels::responses_only_model module AllModels extend OpenAI::Internal::Type::Union @@ -22,7 +22,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.values: -> ::Array[OpenAI::AllModels::responses_only_model] + def self?.values: -> ::Array[OpenAI::Models::AllModels::responses_only_model] end def self?.variants: -> ::Array[OpenAI::Models::all_models] diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 2faac5ef..6c188596 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -46,6 +46,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: String, + model: OpenAI::Models::Audio::SpeechCreateParams::model, + voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + instructions: String, + response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, + speed: Float, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::Audio::speech_model module Model diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 3e522926..2e1beee5 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -21,6 +21,11 @@ module OpenAI ?logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] ) -> void + def to_hash: -> { + text: String, + logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] + } + type logprob = { token: String, bytes: ::Array[Float], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -41,6 +46,12 @@ module OpenAI ?bytes: ::Array[Float], ?logprob: Float ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Float], + logprob: Float + } end end end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 92e65b19..28e08060 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -11,7 +11,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::audio_response_format, temperature: Float, - timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] } & OpenAI::Internal::Type::request_parameters @@ -49,11 +49,11 @@ module OpenAI def temperature=: (Float) -> Float - attr_reader timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity]? + attr_reader timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity]? def timestamp_granularities=: ( - ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] - ) -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] + ) -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] def initialize: ( file: OpenAI::Internal::file_input, @@ -64,10 +64,23 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file: OpenAI::Internal::file_input, + model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, + include: ::Array[OpenAI::Models::Audio::transcription_include], + language: String, + prompt: String, + response_format: OpenAI::Models::audio_response_format, + temperature: Float, + timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::audio_model module Model @@ -85,14 +98,14 @@ module OpenAI type vad_config = { - type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, prefix_padding_ms: Integer, silence_duration_ms: Integer, threshold: Float } class VadConfig < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_ + attr_accessor type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_ attr_reader prefix_padding_ms: Integer? @@ -107,12 +120,19 @@ module OpenAI def threshold=: (Float) -> Float def initialize: ( - type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, ?prefix_padding_ms: Integer, ?silence_duration_ms: Integer, ?threshold: Float ) -> void + def to_hash: -> { + type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + prefix_padding_ms: Integer, + silence_duration_ms: Integer, + threshold: Float + } + type type_ = :server_vad module Type @@ -120,7 +140,7 @@ module OpenAI SERVER_VAD: :server_vad - def self?.values: -> ::Array[OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_] + def self?.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_] end end @@ -135,7 +155,7 @@ module OpenAI WORD: :word SEGMENT: :segment - def self?.values: -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + def self?.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] end end end diff --git a/sig/openai/models/audio/transcription_segment.rbs b/sig/openai/models/audio/transcription_segment.rbs index 9c38a1be..82da7095 100644 --- a/sig/openai/models/audio/transcription_segment.rbs +++ b/sig/openai/models/audio/transcription_segment.rbs @@ -48,6 +48,19 @@ module OpenAI text: String, tokens: ::Array[Integer] ) -> void + + def to_hash: -> { + id: Integer, + avg_logprob: Float, + compression_ratio: Float, + end_: Float, + no_speech_prob: Float, + seek: Integer, + start: Float, + temperature: Float, + text: String, + tokens: ::Array[Integer] + } end end end diff --git a/sig/openai/models/audio/transcription_text_delta_event.rbs b/sig/openai/models/audio/transcription_text_delta_event.rbs index 09c419d7..155b8e1d 100644 --- a/sig/openai/models/audio/transcription_text_delta_event.rbs +++ b/sig/openai/models/audio/transcription_text_delta_event.rbs @@ -25,6 +25,12 @@ module OpenAI ?type: :"transcript.text.delta" ) -> void + def to_hash: -> { + delta: String, + type: :"transcript.text.delta", + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] + } + type logprob = { token: String, bytes: ::Array[top], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -45,6 +51,8 @@ module OpenAI ?bytes: ::Array[top], ?logprob: Float ) -> void + + def to_hash: -> { token: String, bytes: ::Array[top], logprob: Float } end end end diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs index c2fb0bc1..d8f864e3 100644 --- a/sig/openai/models/audio/transcription_text_done_event.rbs +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -25,6 +25,12 @@ module OpenAI ?type: :"transcript.text.done" ) -> void + def to_hash: -> { + text: String, + type: :"transcript.text.done", + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + } + type logprob = { token: String, bytes: ::Array[top], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -45,6 +51,8 @@ module OpenAI ?bytes: ::Array[top], ?logprob: Float ) -> void + + def to_hash: -> { token: String, bytes: ::Array[top], logprob: Float } end end end diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 6e40e651..171bfa08 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -36,6 +36,14 @@ module OpenAI ?segments: ::Array[OpenAI::Audio::TranscriptionSegment], ?words: ::Array[OpenAI::Audio::TranscriptionWord] ) -> void + + def to_hash: -> { + duration: Float, + language: String, + text: String, + segments: ::Array[OpenAI::Audio::TranscriptionSegment], + words: ::Array[OpenAI::Audio::TranscriptionWord] + } end end end diff --git a/sig/openai/models/audio/transcription_word.rbs b/sig/openai/models/audio/transcription_word.rbs index eb48b12d..1bd7d752 100644 --- a/sig/openai/models/audio/transcription_word.rbs +++ b/sig/openai/models/audio/transcription_word.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor word: String def initialize: (end_: Float, start: Float, word: String) -> void + + def to_hash: -> { end_: Float, start: Float, word: String } end end end diff --git a/sig/openai/models/audio/translation.rbs b/sig/openai/models/audio/translation.rbs index cc45a2b2..832a27d3 100644 --- a/sig/openai/models/audio/translation.rbs +++ b/sig/openai/models/audio/translation.rbs @@ -7,6 +7,8 @@ module OpenAI attr_accessor text: String def initialize: (text: String) -> void + + def to_hash: -> { text: String } end end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 94188d94..da4cbb3c 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -42,6 +42,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file: OpenAI::Internal::file_input, + model: OpenAI::Models::Audio::TranslationCreateParams::model, + prompt: String, + response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, + temperature: Float, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::audio_model module Model diff --git a/sig/openai/models/audio/translation_verbose.rbs b/sig/openai/models/audio/translation_verbose.rbs index 20796402..cac25f84 100644 --- a/sig/openai/models/audio/translation_verbose.rbs +++ b/sig/openai/models/audio/translation_verbose.rbs @@ -28,6 +28,13 @@ module OpenAI text: String, ?segments: ::Array[OpenAI::Audio::TranscriptionSegment] ) -> void + + def to_hash: -> { + duration: Float, + language: String, + text: String, + segments: ::Array[OpenAI::Audio::TranscriptionSegment] + } end end end diff --git a/sig/openai/models/auto_file_chunking_strategy_param.rbs b/sig/openai/models/auto_file_chunking_strategy_param.rbs index 81e99fa2..54aeed24 100644 --- a/sig/openai/models/auto_file_chunking_strategy_param.rbs +++ b/sig/openai/models/auto_file_chunking_strategy_param.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end end end diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 702f7dff..cdba6cdc 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -8,7 +8,7 @@ module OpenAI endpoint: String, input_file_id: String, object: :batch, - status: OpenAI::Batch::status, + status: OpenAI::Models::Batch::status, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -37,7 +37,7 @@ module OpenAI attr_accessor object: :batch - attr_accessor status: OpenAI::Batch::status + attr_accessor status: OpenAI::Models::Batch::status attr_reader cancelled_at: Integer? @@ -97,7 +97,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Batch::status, + status: OpenAI::Models::Batch::status, ?cancelled_at: Integer, ?cancelling_at: Integer, ?completed_at: Integer, @@ -114,6 +114,29 @@ module OpenAI ?object: :batch ) -> void + def to_hash: -> { + id: String, + completion_window: String, + created_at: Integer, + endpoint: String, + input_file_id: String, + object: :batch, + status: OpenAI::Models::Batch::status, + cancelled_at: Integer, + cancelling_at: Integer, + completed_at: Integer, + error_file_id: String, + errors: OpenAI::Batch::Errors, + expired_at: Integer, + expires_at: Integer, + failed_at: Integer, + finalizing_at: Integer, + in_progress_at: Integer, + metadata: OpenAI::Models::metadata?, + output_file_id: String, + request_counts: OpenAI::BatchRequestCounts + } + type status = :validating | :failed @@ -136,7 +159,7 @@ module OpenAI CANCELLING: :cancelling CANCELLED: :cancelled - def self?.values: -> ::Array[OpenAI::Batch::status] + def self?.values: -> ::Array[OpenAI::Models::Batch::status] end type errors = { data: ::Array[OpenAI::BatchError], object: String } @@ -154,6 +177,8 @@ module OpenAI ?data: ::Array[OpenAI::BatchError], ?object: String ) -> void + + def to_hash: -> { data: ::Array[OpenAI::BatchError], object: String } end end end diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index 9b655647..944d9c29 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index b5519cea..85193e35 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -29,6 +29,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + completion_window: OpenAI::Models::BatchCreateParams::completion_window, + endpoint: OpenAI::Models::BatchCreateParams::endpoint, + input_file_id: String, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } + type completion_window = :"24h" module CompletionWindow diff --git a/sig/openai/models/batch_error.rbs b/sig/openai/models/batch_error.rbs index 07c43d31..028bc05a 100644 --- a/sig/openai/models/batch_error.rbs +++ b/sig/openai/models/batch_error.rbs @@ -22,6 +22,13 @@ module OpenAI ?message: String, ?param: String? ) -> void + + def to_hash: -> { + code: String, + line: Integer?, + message: String, + param: String? + } end end end diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index 9f1b2961..59ce8fb7 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -21,6 +21,12 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/batch_request_counts.rbs b/sig/openai/models/batch_request_counts.rbs index 38d29256..d8013508 100644 --- a/sig/openai/models/batch_request_counts.rbs +++ b/sig/openai/models/batch_request_counts.rbs @@ -15,6 +15,8 @@ module OpenAI failed: Integer, total: Integer ) -> void + + def to_hash: -> { completed: Integer, failed: Integer, total: Integer } end end end diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index b1deb5c3..232d9ed7 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/assistant.rbs b/sig/openai/models/beta/assistant.rbs index 867ab8e5..3128e9ba 100644 --- a/sig/openai/models/beta/assistant.rbs +++ b/sig/openai/models/beta/assistant.rbs @@ -61,6 +61,22 @@ module OpenAI ?object: :assistant ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: String, + name: String?, + object: :assistant, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_resources: OpenAI::Beta::Assistant::ToolResources?, + top_p: Float? + } + type tool_resources = { code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, @@ -85,6 +101,11 @@ module OpenAI ?file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -93,6 +114,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -103,6 +126,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 574c22a2..f4bdd80f 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -62,6 +62,21 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + model: OpenAI::Models::Beta::AssistantCreateParams::model, + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + name: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_resources: OpenAI::Beta::AssistantCreateParams::ToolResources?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + top_p: Float?, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::chat_model module Model @@ -94,6 +109,11 @@ module OpenAI ?file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -102,6 +122,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = @@ -126,19 +148,24 @@ module OpenAI ?vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] ) -> void + def to_hash: -> { + vector_store_ids: ::Array[String], + vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + } + type vector_store = { - chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -147,11 +174,17 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata? + } + type chunking_strategy = OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static @@ -165,6 +198,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end type static = @@ -183,6 +218,11 @@ module OpenAI ?type: :static ) -> void + def to_hash: -> { + static: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: :static + } + type static = { chunk_overlap_tokens: Integer, @@ -198,10 +238,15 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end - def self?.variants: -> ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] + def self?.variants: -> ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index e429cf3e..e476dced 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/assistant_deleted.rbs b/sig/openai/models/beta/assistant_deleted.rbs index 5ea06673..f5fe869a 100644 --- a/sig/openai/models/beta/assistant_deleted.rbs +++ b/sig/openai/models/beta/assistant_deleted.rbs @@ -16,6 +16,12 @@ module OpenAI deleted: bool, ?object: :"assistant.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"assistant.deleted" + } end end end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 252e6b46..d9fea65e 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -40,6 +40,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::Beta::AssistantListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index a1fec037..be66a758 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index e0797c37..0852c3c3 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -51,6 +51,12 @@ module OpenAI ?enabled: bool, ?event: :"thread.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Thread, + event: :"thread.created", + enabled: bool + } end type thread_run_created = @@ -65,6 +71,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.created" + } end type thread_run_queued = @@ -79,6 +90,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.queued" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.queued" + } end type thread_run_in_progress = @@ -93,6 +109,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.in_progress" + } end type thread_run_requires_action = @@ -110,6 +131,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.requires_action" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.requires_action" + } end type thread_run_completed = @@ -124,6 +150,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.completed" + } end type thread_run_incomplete = @@ -138,6 +169,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.incomplete" + } end type thread_run_failed = @@ -152,6 +188,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.failed" + } end type thread_run_cancelling = @@ -166,6 +207,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelling" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelling" + } end type thread_run_cancelled = @@ -180,6 +226,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelled" + } end type thread_run_expired = @@ -194,6 +245,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.expired" + } end type thread_run_step_created = @@ -211,6 +267,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.created" + } end type thread_run_step_in_progress = @@ -228,6 +289,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.in_progress" + } end type thread_run_step_delta = @@ -245,6 +311,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, ?event: :"thread.run.step.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + event: :"thread.run.step.delta" + } end type thread_run_step_completed = @@ -262,6 +333,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.completed" + } end type thread_run_step_failed = @@ -279,6 +355,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.failed" + } end type thread_run_step_cancelled = @@ -296,6 +377,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.cancelled" + } end type thread_run_step_expired = @@ -313,6 +399,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.expired" + } end type thread_message_created = @@ -330,6 +421,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.created" + } end type thread_message_in_progress = @@ -347,6 +443,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.in_progress" + } end type thread_message_delta = @@ -364,6 +465,11 @@ module OpenAI data: OpenAI::Beta::Threads::MessageDeltaEvent, ?event: :"thread.message.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::MessageDeltaEvent, + event: :"thread.message.delta" + } end type thread_message_completed = @@ -381,6 +487,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.completed" + } end type thread_message_incomplete = @@ -398,6 +509,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.incomplete" + } end type error_event = { data: OpenAI::ErrorObject, event: :error } @@ -408,6 +524,8 @@ module OpenAI attr_accessor event: :error def initialize: (data: OpenAI::ErrorObject, ?event: :error) -> void + + def to_hash: -> { data: OpenAI::ErrorObject, event: :error } end def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index 526b290d..1d80009b 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -3,12 +3,12 @@ module OpenAI module Beta type assistant_tool_choice = { - type: OpenAI::Beta::AssistantToolChoice::type_, + type: OpenAI::Models::Beta::AssistantToolChoice::type_, function: OpenAI::Beta::AssistantToolChoiceFunction } class AssistantToolChoice < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::AssistantToolChoice::type_ + attr_accessor type: OpenAI::Models::Beta::AssistantToolChoice::type_ attr_reader function: OpenAI::Beta::AssistantToolChoiceFunction? @@ -17,10 +17,15 @@ module OpenAI ) -> OpenAI::Beta::AssistantToolChoiceFunction def initialize: ( - type: OpenAI::Beta::AssistantToolChoice::type_, + type: OpenAI::Models::Beta::AssistantToolChoice::type_, ?function: OpenAI::Beta::AssistantToolChoiceFunction ) -> void + def to_hash: -> { + type: OpenAI::Models::Beta::AssistantToolChoice::type_, + function: OpenAI::Beta::AssistantToolChoiceFunction + } + type type_ = :function | :code_interpreter | :file_search module Type @@ -30,7 +35,7 @@ module OpenAI CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search - def self?.values: -> ::Array[OpenAI::Beta::AssistantToolChoice::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoice::type_] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_function.rbs b/sig/openai/models/beta/assistant_tool_choice_function.rbs index 36f0983c..b97b9891 100644 --- a/sig/openai/models/beta/assistant_tool_choice_function.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_function.rbs @@ -7,6 +7,8 @@ module OpenAI attr_accessor name: String def initialize: (name: String) -> void + + def to_hash: -> { name: String } end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index ddd66365..e879d0d5 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -2,7 +2,7 @@ module OpenAI module Models module Beta type assistant_tool_choice_option = - OpenAI::Beta::AssistantToolChoiceOption::auto + OpenAI::Models::Beta::AssistantToolChoiceOption::auto | OpenAI::Beta::AssistantToolChoice module AssistantToolChoiceOption @@ -17,7 +17,7 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self?.values: -> ::Array[OpenAI::Beta::AssistantToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::auto] end def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_tool_choice_option] diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index de493080..6ee6405d 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -66,6 +66,21 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Beta::AssistantUpdateParams::model, + name: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_resources: OpenAI::Beta::AssistantUpdateParams::ToolResources?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + top_p: Float?, + request_options: OpenAI::RequestOptions + } + type model = String | :"gpt-4.1" @@ -172,6 +187,11 @@ module OpenAI ?file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -180,6 +200,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -190,6 +212,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/code_interpreter_tool.rbs b/sig/openai/models/beta/code_interpreter_tool.rbs index 84b353d0..bd9a6760 100644 --- a/sig/openai/models/beta/code_interpreter_tool.rbs +++ b/sig/openai/models/beta/code_interpreter_tool.rbs @@ -7,6 +7,8 @@ module OpenAI attr_accessor type: :code_interpreter def initialize: (?type: :code_interpreter) -> void + + def to_hash: -> { type: :code_interpreter } end end end diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index 9e544ea9..85f54d10 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -21,6 +21,11 @@ module OpenAI ?type: :file_search ) -> void + def to_hash: -> { + type: :file_search, + file_search: OpenAI::Beta::FileSearchTool::FileSearch + } + type file_search = { max_num_results: Integer, @@ -43,26 +48,36 @@ module OpenAI ?ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions ) -> void + def to_hash: -> { + max_num_results: Integer, + ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions + } + type ranking_options = { score_threshold: Float, - ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker } class RankingOptions < OpenAI::Internal::Type::BaseModel attr_accessor score_threshold: Float - attr_reader ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? + attr_reader ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? def ranker=: ( - OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker - ) -> OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker def initialize: ( score_threshold: Float, - ?ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ?ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker ) -> void + def to_hash: -> { + score_threshold: Float, + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + } + type ranker = :auto | :default_2024_08_21 module Ranker @@ -71,7 +86,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self?.values: -> ::Array[OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] end end end diff --git a/sig/openai/models/beta/function_tool.rbs b/sig/openai/models/beta/function_tool.rbs index d93fda7c..8798cc58 100644 --- a/sig/openai/models/beta/function_tool.rbs +++ b/sig/openai/models/beta/function_tool.rbs @@ -13,6 +13,11 @@ module OpenAI function: OpenAI::FunctionDefinition, ?type: :function ) -> void + + def to_hash: -> { + function: OpenAI::FunctionDefinition, + type: :function + } end end end diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index e13d605b..eb4aed3e 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -26,6 +26,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.created" + } end type thread_message_in_progress = @@ -43,6 +48,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.in_progress" + } end type thread_message_delta = @@ -60,6 +70,11 @@ module OpenAI data: OpenAI::Beta::Threads::MessageDeltaEvent, ?event: :"thread.message.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::MessageDeltaEvent, + event: :"thread.message.delta" + } end type thread_message_completed = @@ -77,6 +92,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.completed" + } end type thread_message_incomplete = @@ -94,6 +114,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.incomplete" + } end def self?.variants: -> ::Array[OpenAI::Models::Beta::message_stream_event] diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 3b292e7c..cf3454d9 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -28,6 +28,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.created" + } end type thread_run_step_in_progress = @@ -45,6 +50,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.in_progress" + } end type thread_run_step_delta = @@ -62,6 +72,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, ?event: :"thread.run.step.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + event: :"thread.run.step.delta" + } end type thread_run_step_completed = @@ -79,6 +94,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.completed" + } end type thread_run_step_failed = @@ -96,6 +116,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.failed" + } end type thread_run_step_cancelled = @@ -113,6 +138,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.cancelled" + } end type thread_run_step_expired = @@ -130,6 +160,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.expired" + } end def self?.variants: -> ::Array[OpenAI::Models::Beta::run_step_stream_event] diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index e1c4d276..4bffb3ac 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -28,6 +28,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.created" + } end type thread_run_queued = @@ -42,6 +47,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.queued" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.queued" + } end type thread_run_in_progress = @@ -56,6 +66,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.in_progress" + } end type thread_run_requires_action = @@ -73,6 +88,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.requires_action" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.requires_action" + } end type thread_run_completed = @@ -87,6 +107,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.completed" + } end type thread_run_incomplete = @@ -101,6 +126,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.incomplete" + } end type thread_run_failed = @@ -115,6 +145,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.failed" + } end type thread_run_cancelling = @@ -129,6 +164,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelling" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelling" + } end type thread_run_cancelled = @@ -143,6 +183,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelled" + } end type thread_run_expired = @@ -157,6 +202,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.expired" + } end def self?.variants: -> ::Array[OpenAI::Models::Beta::run_stream_event] diff --git a/sig/openai/models/beta/thread.rbs b/sig/openai/models/beta/thread.rbs index c934342f..98eb5490 100644 --- a/sig/openai/models/beta/thread.rbs +++ b/sig/openai/models/beta/thread.rbs @@ -29,6 +29,14 @@ module OpenAI ?object: :thread ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + metadata: OpenAI::Models::metadata?, + object: :thread, + tool_resources: OpenAI::Beta::Thread::ToolResources? + } + type tool_resources = { code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter, @@ -53,6 +61,11 @@ module OpenAI ?file_search: OpenAI::Beta::Thread::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Thread::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -61,6 +74,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -71,6 +86,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index ca752e94..4dd4c103 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -80,6 +80,25 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + assistant_id: String, + instructions: String?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + parallel_tool_calls: bool, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + top_p: Float?, + truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::chat_model module Model @@ -112,30 +131,43 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? ) -> void + def to_hash: -> { + messages: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message], + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + } + type message = { - content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role, + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, metadata: OpenAI::Models::metadata? } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content + attr_accessor content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content - attr_accessor role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role + attr_accessor role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role attr_accessor attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role, + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, ?attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, + attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, + metadata: OpenAI::Models::metadata? + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -143,7 +175,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -156,13 +188,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -170,17 +202,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] - ) -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch @@ -194,9 +231,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] end end end @@ -225,6 +264,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -233,6 +277,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = @@ -257,19 +303,24 @@ module OpenAI ?vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] ) -> void + def to_hash: -> { + vector_store_ids: ::Array[String], + vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + } + type vector_store = { - chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -278,11 +329,17 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata? + } + type chunking_strategy = OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static @@ -296,6 +353,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end type static = @@ -314,6 +373,11 @@ module OpenAI ?type: :static ) -> void + def to_hash: -> { + static: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: :static + } + type static = { chunk_overlap_tokens: Integer, @@ -329,10 +393,15 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end @@ -363,6 +432,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -371,6 +445,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -381,25 +457,32 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end type truncation_strategy = { - type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, last_messages: Integer? } class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ + attr_accessor type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ attr_accessor last_messages: Integer? def initialize: ( - type: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, ?last_messages: Integer? ) -> void + def to_hash: -> { + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + last_messages: Integer? + } + type type_ = :auto | :last_messages module Type @@ -408,7 +491,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index d3593936..e8d03f9c 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -30,30 +30,44 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + messages: ::Array[OpenAI::Beta::ThreadCreateParams::Message], + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Beta::ThreadCreateParams::ToolResources?, + request_options: OpenAI::RequestOptions + } + type message = { - content: OpenAI::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Beta::ThreadCreateParams::Message::role, + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, metadata: OpenAI::Models::metadata? } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Beta::ThreadCreateParams::Message::content + attr_accessor content: OpenAI::Models::Beta::ThreadCreateParams::Message::content - attr_accessor role: OpenAI::Beta::ThreadCreateParams::Message::role + attr_accessor role: OpenAI::Models::Beta::ThreadCreateParams::Message::role attr_accessor attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Beta::ThreadCreateParams::Message::role, + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, ?attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, + attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, + metadata: OpenAI::Models::metadata? + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -61,7 +75,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::content] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -74,13 +88,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -88,17 +102,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] - ) -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch @@ -112,9 +131,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] end end end @@ -143,6 +164,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -151,6 +177,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = @@ -175,19 +203,24 @@ module OpenAI ?vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] ) -> void + def to_hash: -> { + vector_store_ids: ::Array[String], + vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + } + type vector_store = { - chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -196,11 +229,17 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata? + } + type chunking_strategy = OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static @@ -214,6 +253,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end type static = @@ -232,6 +273,11 @@ module OpenAI ?type: :static ) -> void + def to_hash: -> { + static: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: :static + } + type static = { chunk_overlap_tokens: Integer, @@ -247,10 +293,15 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index fa242461..c33f5354 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/thread_deleted.rbs b/sig/openai/models/beta/thread_deleted.rbs index 9ca391a5..c43c132b 100644 --- a/sig/openai/models/beta/thread_deleted.rbs +++ b/sig/openai/models/beta/thread_deleted.rbs @@ -16,6 +16,8 @@ module OpenAI deleted: bool, ?object: :"thread.deleted" ) -> void + + def to_hash: -> { id: String, deleted: bool, object: :"thread.deleted" } end end end diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index 90b81360..dd6b78cf 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/thread_stream_event.rbs b/sig/openai/models/beta/thread_stream_event.rbs index 01af4a19..9c8af3b5 100644 --- a/sig/openai/models/beta/thread_stream_event.rbs +++ b/sig/openai/models/beta/thread_stream_event.rbs @@ -18,6 +18,12 @@ module OpenAI ?enabled: bool, ?event: :"thread.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Thread, + event: :"thread.created", + enabled: bool + } end end end diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index 98253f4e..d5d3375e 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -22,6 +22,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Beta::ThreadUpdateParams::ToolResources?, + request_options: OpenAI::RequestOptions + } + type tool_resources = { code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, @@ -46,6 +52,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -54,6 +65,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -64,6 +77,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/threads/file_citation_annotation.rbs b/sig/openai/models/beta/threads/file_citation_annotation.rbs index 424350aa..9b224788 100644 --- a/sig/openai/models/beta/threads/file_citation_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_annotation.rbs @@ -30,12 +30,22 @@ module OpenAI ?type: :file_citation ) -> void + def to_hash: -> { + end_index: Integer, + file_citation: OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation, + start_index: Integer, + text: String, + type: :file_citation + } + type file_citation = { file_id: String } class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs index ba8e1399..8bb06279 100644 --- a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs @@ -44,6 +44,15 @@ module OpenAI ?type: :file_citation ) -> void + def to_hash: -> { + index: Integer, + type: :file_citation, + end_index: Integer, + file_citation: OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + start_index: Integer, + text: String + } + type file_citation = { file_id: String, quote: String } class FileCitation < OpenAI::Internal::Type::BaseModel @@ -56,6 +65,8 @@ module OpenAI def quote=: (String) -> String def initialize: (?file_id: String, ?quote: String) -> void + + def to_hash: -> { file_id: String, quote: String } end end end diff --git a/sig/openai/models/beta/threads/file_path_annotation.rbs b/sig/openai/models/beta/threads/file_path_annotation.rbs index 95a8cd5d..95d59fc3 100644 --- a/sig/openai/models/beta/threads/file_path_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_annotation.rbs @@ -30,12 +30,22 @@ module OpenAI ?type: :file_path ) -> void + def to_hash: -> { + end_index: Integer, + file_path: OpenAI::Beta::Threads::FilePathAnnotation::FilePath, + start_index: Integer, + text: String, + type: :file_path + } + type file_path = { file_id: String } class FilePath < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs index c75d696a..8bac30d8 100644 --- a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs @@ -44,6 +44,15 @@ module OpenAI ?type: :file_path ) -> void + def to_hash: -> { + index: Integer, + type: :file_path, + end_index: Integer, + file_path: OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, + start_index: Integer, + text: String + } + type file_path = { file_id: String } class FilePath < OpenAI::Internal::Type::BaseModel @@ -52,6 +61,8 @@ module OpenAI def file_id=: (String) -> String def initialize: (?file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 38e77c68..6b59968c 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -3,22 +3,30 @@ module OpenAI module Beta module Threads type image_file = - { file_id: String, detail: OpenAI::Beta::Threads::ImageFile::detail } + { + file_id: String, + detail: OpenAI::Models::Beta::Threads::ImageFile::detail + } class ImageFile < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String - attr_reader detail: OpenAI::Beta::Threads::ImageFile::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageFile::detail? def detail=: ( - OpenAI::Beta::Threads::ImageFile::detail - ) -> OpenAI::Beta::Threads::ImageFile::detail + OpenAI::Models::Beta::Threads::ImageFile::detail + ) -> OpenAI::Models::Beta::Threads::ImageFile::detail def initialize: ( file_id: String, - ?detail: OpenAI::Beta::Threads::ImageFile::detail + ?detail: OpenAI::Models::Beta::Threads::ImageFile::detail ) -> void + def to_hash: -> { + file_id: String, + detail: OpenAI::Models::Beta::Threads::ImageFile::detail + } + type detail = :auto | :low | :high module Detail @@ -28,7 +36,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageFile::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFile::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_content_block.rbs b/sig/openai/models/beta/threads/image_file_content_block.rbs index 103b48ac..5b91259d 100644 --- a/sig/openai/models/beta/threads/image_file_content_block.rbs +++ b/sig/openai/models/beta/threads/image_file_content_block.rbs @@ -14,6 +14,11 @@ module OpenAI image_file: OpenAI::Beta::Threads::ImageFile, ?type: :image_file ) -> void + + def to_hash: -> { + image_file: OpenAI::Beta::Threads::ImageFile, + type: :image_file + } end end end diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index 5ed4a435..2f0784dd 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -4,26 +4,31 @@ module OpenAI module Threads type image_file_delta = { - detail: OpenAI::Beta::Threads::ImageFileDelta::detail, + detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, file_id: String } class ImageFileDelta < OpenAI::Internal::Type::BaseModel - attr_reader detail: OpenAI::Beta::Threads::ImageFileDelta::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail? def detail=: ( - OpenAI::Beta::Threads::ImageFileDelta::detail - ) -> OpenAI::Beta::Threads::ImageFileDelta::detail + OpenAI::Models::Beta::Threads::ImageFileDelta::detail + ) -> OpenAI::Models::Beta::Threads::ImageFileDelta::detail attr_reader file_id: String? def file_id=: (String) -> String def initialize: ( - ?detail: OpenAI::Beta::Threads::ImageFileDelta::detail, + ?detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, ?file_id: String ) -> void + def to_hash: -> { + detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, + file_id: String + } + type detail = :auto | :low | :high module Detail @@ -33,7 +38,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageFileDelta::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_delta_block.rbs b/sig/openai/models/beta/threads/image_file_delta_block.rbs index 7aca7085..e753fcc9 100644 --- a/sig/openai/models/beta/threads/image_file_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_file_delta_block.rbs @@ -25,6 +25,12 @@ module OpenAI ?image_file: OpenAI::Beta::Threads::ImageFileDelta, ?type: :image_file ) -> void + + def to_hash: -> { + index: Integer, + type: :image_file, + image_file: OpenAI::Beta::Threads::ImageFileDelta + } end end end diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 8808afc1..d0a0d066 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -3,22 +3,30 @@ module OpenAI module Beta module Threads type image_url = - { url: String, detail: OpenAI::Beta::Threads::ImageURL::detail } + { + url: String, + detail: OpenAI::Models::Beta::Threads::ImageURL::detail + } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String - attr_reader detail: OpenAI::Beta::Threads::ImageURL::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageURL::detail? def detail=: ( - OpenAI::Beta::Threads::ImageURL::detail - ) -> OpenAI::Beta::Threads::ImageURL::detail + OpenAI::Models::Beta::Threads::ImageURL::detail + ) -> OpenAI::Models::Beta::Threads::ImageURL::detail def initialize: ( url: String, - ?detail: OpenAI::Beta::Threads::ImageURL::detail + ?detail: OpenAI::Models::Beta::Threads::ImageURL::detail ) -> void + def to_hash: -> { + url: String, + detail: OpenAI::Models::Beta::Threads::ImageURL::detail + } + type detail = :auto | :low | :high module Detail @@ -28,7 +36,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURL::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_content_block.rbs b/sig/openai/models/beta/threads/image_url_content_block.rbs index 445e5239..8cc69592 100644 --- a/sig/openai/models/beta/threads/image_url_content_block.rbs +++ b/sig/openai/models/beta/threads/image_url_content_block.rbs @@ -14,6 +14,11 @@ module OpenAI image_url: OpenAI::Beta::Threads::ImageURL, ?type: :image_url ) -> void + + def to_hash: -> { + image_url: OpenAI::Beta::Threads::ImageURL, + type: :image_url + } end end end diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 54d6425a..2c0721e9 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -3,24 +3,32 @@ module OpenAI module Beta module Threads type image_url_delta = - { detail: OpenAI::Beta::Threads::ImageURLDelta::detail, url: String } + { + detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + url: String + } class ImageURLDelta < OpenAI::Internal::Type::BaseModel - attr_reader detail: OpenAI::Beta::Threads::ImageURLDelta::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail? def detail=: ( - OpenAI::Beta::Threads::ImageURLDelta::detail - ) -> OpenAI::Beta::Threads::ImageURLDelta::detail + OpenAI::Models::Beta::Threads::ImageURLDelta::detail + ) -> OpenAI::Models::Beta::Threads::ImageURLDelta::detail attr_reader url: String? def url=: (String) -> String def initialize: ( - ?detail: OpenAI::Beta::Threads::ImageURLDelta::detail, + ?detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, ?url: String ) -> void + def to_hash: -> { + detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + url: String + } + type detail = :auto | :low | :high module Detail @@ -30,7 +38,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageURLDelta::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_delta_block.rbs b/sig/openai/models/beta/threads/image_url_delta_block.rbs index d3375983..a0fc6366 100644 --- a/sig/openai/models/beta/threads/image_url_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_url_delta_block.rbs @@ -25,6 +25,12 @@ module OpenAI ?image_url: OpenAI::Beta::Threads::ImageURLDelta, ?type: :image_url ) -> void + + def to_hash: -> { + index: Integer, + type: :image_url, + image_url: OpenAI::Beta::Threads::ImageURLDelta + } end end end diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 10c9bca6..0aa6f377 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -14,9 +14,9 @@ module OpenAI incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, metadata: OpenAI::Models::metadata?, object: :"thread.message", - role: OpenAI::Beta::Threads::Message::role, + role: OpenAI::Models::Beta::Threads::Message::role, run_id: String?, - status: OpenAI::Beta::Threads::Message::status, + status: OpenAI::Models::Beta::Threads::Message::status, thread_id: String } @@ -41,11 +41,11 @@ module OpenAI attr_accessor object: :"thread.message" - attr_accessor role: OpenAI::Beta::Threads::Message::role + attr_accessor role: OpenAI::Models::Beta::Threads::Message::role attr_accessor run_id: String? - attr_accessor status: OpenAI::Beta::Threads::Message::status + attr_accessor status: OpenAI::Models::Beta::Threads::Message::status attr_accessor thread_id: String @@ -59,17 +59,34 @@ module OpenAI incomplete_at: Integer?, incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, metadata: OpenAI::Models::metadata?, - role: OpenAI::Beta::Threads::Message::role, + role: OpenAI::Models::Beta::Threads::Message::role, run_id: String?, - status: OpenAI::Beta::Threads::Message::status, + status: OpenAI::Models::Beta::Threads::Message::status, thread_id: String, ?object: :"thread.message" ) -> void + def to_hash: -> { + id: String, + assistant_id: String?, + attachments: ::Array[OpenAI::Beta::Threads::Message::Attachment]?, + completed_at: Integer?, + content: ::Array[OpenAI::Models::Beta::Threads::message_content], + created_at: Integer, + incomplete_at: Integer?, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, + metadata: OpenAI::Models::metadata?, + object: :"thread.message", + role: OpenAI::Models::Beta::Threads::Message::role, + run_id: String?, + status: OpenAI::Models::Beta::Threads::Message::status, + thread_id: String + } + type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -77,17 +94,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] - ) -> ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly @@ -102,24 +124,30 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] end end type incomplete_details = { - reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_accessor reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason + attr_accessor reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason def initialize: ( - reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason ) -> void + def to_hash: -> { + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason + } + type reason = :content_filter | :max_tokens @@ -136,7 +164,7 @@ module OpenAI RUN_EXPIRED: :run_expired RUN_FAILED: :run_failed - def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason] end end @@ -148,7 +176,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::role] end type status = :in_progress | :incomplete | :completed @@ -160,7 +188,7 @@ module OpenAI INCOMPLETE: :incomplete COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::status] end end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 48d32702..c1229739 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -31,6 +31,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, + attachments: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment]?, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -57,7 +65,7 @@ module OpenAI type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -65,17 +73,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] - ) -> ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch @@ -89,9 +102,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] end end end diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index 9edbd8b5..50109473 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/message_deleted.rbs b/sig/openai/models/beta/threads/message_deleted.rbs index d3b0da8c..5e95bfc4 100644 --- a/sig/openai/models/beta/threads/message_deleted.rbs +++ b/sig/openai/models/beta/threads/message_deleted.rbs @@ -17,6 +17,12 @@ module OpenAI deleted: bool, ?object: :"thread.message.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"thread.message.deleted" + } end end end diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index d953aeb6..a7f93140 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -5,7 +5,7 @@ module OpenAI type message_delta = { content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - role: OpenAI::Beta::Threads::MessageDelta::role + role: OpenAI::Models::Beta::Threads::MessageDelta::role } class MessageDelta < OpenAI::Internal::Type::BaseModel @@ -15,17 +15,22 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::message_content_delta] ) -> ::Array[OpenAI::Models::Beta::Threads::message_content_delta] - attr_reader role: OpenAI::Beta::Threads::MessageDelta::role? + attr_reader role: OpenAI::Models::Beta::Threads::MessageDelta::role? def role=: ( - OpenAI::Beta::Threads::MessageDelta::role - ) -> OpenAI::Beta::Threads::MessageDelta::role + OpenAI::Models::Beta::Threads::MessageDelta::role + ) -> OpenAI::Models::Beta::Threads::MessageDelta::role def initialize: ( ?content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - ?role: OpenAI::Beta::Threads::MessageDelta::role + ?role: OpenAI::Models::Beta::Threads::MessageDelta::role ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], + role: OpenAI::Models::Beta::Threads::MessageDelta::role + } + type role = :user | :assistant module Role @@ -34,7 +39,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::Threads::MessageDelta::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageDelta::role] end end end diff --git a/sig/openai/models/beta/threads/message_delta_event.rbs b/sig/openai/models/beta/threads/message_delta_event.rbs index d84d3446..8068a35a 100644 --- a/sig/openai/models/beta/threads/message_delta_event.rbs +++ b/sig/openai/models/beta/threads/message_delta_event.rbs @@ -21,6 +21,12 @@ module OpenAI delta: OpenAI::Beta::Threads::MessageDelta, ?object: :"thread.message.delta" ) -> void + + def to_hash: -> { + id: String, + delta: OpenAI::Beta::Threads::MessageDelta, + object: :"thread.message.delta" + } end end end diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index eede9b56..32254799 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -47,6 +47,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::Beta::Threads::MessageListParams::order, + run_id: String, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index 847e3c4c..b0dc3313 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index 38806f27..a4a5cb18 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/refusal_content_block.rbs b/sig/openai/models/beta/threads/refusal_content_block.rbs index 98c6625d..815e53d2 100644 --- a/sig/openai/models/beta/threads/refusal_content_block.rbs +++ b/sig/openai/models/beta/threads/refusal_content_block.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } end end end diff --git a/sig/openai/models/beta/threads/refusal_delta_block.rbs b/sig/openai/models/beta/threads/refusal_delta_block.rbs index c6f86524..e38a21fa 100644 --- a/sig/openai/models/beta/threads/refusal_delta_block.rbs +++ b/sig/openai/models/beta/threads/refusal_delta_block.rbs @@ -19,6 +19,8 @@ module OpenAI ?refusal: String, ?type: :refusal ) -> void + + def to_hash: -> { index: Integer, type: :refusal, refusal: String } end end end diff --git a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs index 5fcd15c2..8f6d8be6 100644 --- a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs +++ b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs @@ -22,6 +22,12 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + id: String, + function: OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function, + type: :function + } + type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -30,6 +36,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index 0d4ed055..92f30e03 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -118,20 +118,56 @@ module OpenAI ?object: :"thread.run" ) -> void + def to_hash: -> { + id: String, + assistant_id: String, + cancelled_at: Integer?, + completed_at: Integer?, + created_at: Integer, + expires_at: Integer?, + failed_at: Integer?, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails?, + instructions: String, + last_error: OpenAI::Beta::Threads::Run::LastError?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: String, + object: :"thread.run", + parallel_tool_calls: bool, + required_action: OpenAI::Beta::Threads::Run::RequiredAction?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + started_at: Integer?, + status: OpenAI::Models::Beta::Threads::run_status, + thread_id: String, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, + usage: OpenAI::Beta::Threads::Run::Usage?, + temperature: Float?, + top_p: Float? + } + type incomplete_details = - { reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason } + { + reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_reader reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason? + attr_reader reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason? def reason=: ( - OpenAI::Beta::Threads::Run::IncompleteDetails::reason - ) -> OpenAI::Beta::Threads::Run::IncompleteDetails::reason + OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + ) -> OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason def initialize: ( - ?reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason + ?reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason ) -> void + def to_hash: -> { + reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + } + type reason = :max_completion_tokens | :max_prompt_tokens module Reason @@ -140,26 +176,31 @@ module OpenAI MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens - def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason] end end type last_error = { - code: OpenAI::Beta::Threads::Run::LastError::code, + code: OpenAI::Models::Beta::Threads::Run::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Beta::Threads::Run::LastError::code + attr_accessor code: OpenAI::Models::Beta::Threads::Run::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Beta::Threads::Run::LastError::code, + code: OpenAI::Models::Beta::Threads::Run::LastError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::Beta::Threads::Run::LastError::code, + message: String + } + type code = :server_error | :rate_limit_exceeded | :invalid_prompt module Code @@ -169,7 +210,7 @@ module OpenAI RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt - def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::LastError::code] end end @@ -189,6 +230,11 @@ module OpenAI ?type: :submit_tool_outputs ) -> void + def to_hash: -> { + submit_tool_outputs: OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + type: :submit_tool_outputs + } + type submit_tool_outputs = { tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] @@ -200,25 +246,34 @@ module OpenAI def initialize: ( tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] ) -> void + + def to_hash: -> { + tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] + } end end type truncation_strategy = { - type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, last_messages: Integer? } class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_ + attr_accessor type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_ attr_accessor last_messages: Integer? def initialize: ( - type: OpenAI::Beta::Threads::Run::TruncationStrategy::type_, + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, ?last_messages: Integer? ) -> void + def to_hash: -> { + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + last_messages: Integer? + } + type type_ = :auto | :last_messages module Type @@ -227,7 +282,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_] end end @@ -250,6 +305,12 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 03525bb2..d96641a6 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index ab0e1ca8..a9c8e394 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -89,30 +89,58 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + assistant_id: String, + include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + additional_instructions: String?, + additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]?, + instructions: String?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + parallel_tool_calls: bool, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + top_p: Float?, + truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, + request_options: OpenAI::RequestOptions + } + type additional_message = { - content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role, + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, metadata: OpenAI::Models::metadata? } class AdditionalMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content + attr_accessor content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content - attr_accessor role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role + attr_accessor role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role attr_accessor attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role, + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, ?attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, + attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, + metadata: OpenAI::Models::metadata? + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -120,7 +148,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -133,13 +161,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -147,17 +175,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] - ) -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch @@ -171,9 +204,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] end end end @@ -188,20 +223,25 @@ module OpenAI type truncation_strategy = { - type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, last_messages: Integer? } class TruncationStrategy < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_ + attr_accessor type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_ attr_accessor last_messages: Integer? def initialize: ( - type: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, ?last_messages: Integer? ) -> void + def to_hash: -> { + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + last_messages: Integer? + } + type type_ = :auto | :last_messages module Type @@ -210,7 +250,7 @@ module OpenAI AUTO: :auto LAST_MESSAGES: :last_messages - def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index de898e20..fa76718f 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -41,6 +41,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::Beta::Threads::RunListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index c9efe99b..d100bece 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index 4e87b641..8d499043 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -23,6 +23,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + thread_id: String, + tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + request_options: OpenAI::RequestOptions + } + type tool_output = { output: String, tool_call_id: String } class ToolOutput < OpenAI::Internal::Type::BaseModel @@ -35,6 +41,8 @@ module OpenAI def tool_call_id=: (String) -> String def initialize: (?output: String, ?tool_call_id: String) -> void + + def to_hash: -> { output: String, tool_call_id: String } end end end diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index 28ff20c9..664db83c 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs index 35175f5b..31b1e593 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs @@ -20,6 +20,8 @@ module OpenAI ?logs: String, ?type: :logs ) -> void + + def to_hash: -> { index: Integer, type: :logs, logs: String } end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs index a90448ca..012b6c3f 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs @@ -27,6 +27,12 @@ module OpenAI ?type: :image ) -> void + def to_hash: -> { + index: Integer, + type: :image, + image: OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + } + type image = { file_id: String } class Image < OpenAI::Internal::Type::BaseModel @@ -35,6 +41,8 @@ module OpenAI def file_id=: (String) -> String def initialize: (?file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index a5051e0b..68ed586a 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -23,22 +23,33 @@ module OpenAI ?type: :code_interpreter ) -> void + def to_hash: -> { + id: String, + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + type: :code_interpreter + } + type code_interpreter = { input: String, - outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_accessor input: String - attr_accessor outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + attr_accessor outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] def initialize: ( input: String, - outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] ) -> void + def to_hash: -> { + input: String, + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + } + type output = OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs | OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image @@ -54,6 +65,8 @@ module OpenAI attr_accessor type: :logs def initialize: (logs: String, ?type: :logs) -> void + + def to_hash: -> { logs: String, type: :logs } end type image = @@ -72,16 +85,23 @@ module OpenAI ?type: :image ) -> void + def to_hash: -> { + image: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + type: :image + } + type image = { file_id: String } class Image < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void + + def to_hash: -> { file_id: String } end end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index e15f351d..650b7203 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -33,10 +33,17 @@ module OpenAI ?type: :code_interpreter ) -> void + def to_hash: -> { + index: Integer, + type: :code_interpreter, + id: String, + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + } + type code_interpreter = { input: String, - outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -44,17 +51,22 @@ module OpenAI def input=: (String) -> String - attr_reader outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output]? + attr_reader outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output]? def outputs=: ( - ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] - ) -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] def initialize: ( ?input: String, - ?outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ?outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] ) -> void + def to_hash: -> { + input: String, + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + } + type output = OpenAI::Beta::Threads::Runs::CodeInterpreterLogs | OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage @@ -62,7 +74,7 @@ module OpenAI module Output extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 5c49e956..ca0f5a8e 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -23,6 +23,12 @@ module OpenAI ?type: :file_search ) -> void + def to_hash: -> { + id: String, + file_search: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + type: :file_search + } + type file_search = { ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, @@ -47,22 +53,32 @@ module OpenAI ?results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] ) -> void + def to_hash: -> { + ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + } + type ranking_options = { - ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_accessor ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker + attr_accessor ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker attr_accessor score_threshold: Float def initialize: ( - ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, score_threshold: Float ) -> void + def to_hash: -> { + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + score_threshold: Float + } + type ranker = :auto | :default_2024_08_21 module Ranker @@ -71,7 +87,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] end end @@ -103,10 +119,17 @@ module OpenAI ?content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] ) -> void + def to_hash: -> { + file_id: String, + file_name: String, + score: Float, + content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + } + type content = { text: String, - type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ } class Content < OpenAI::Internal::Type::BaseModel @@ -114,17 +137,22 @@ module OpenAI def text=: (String) -> String - attr_reader type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_? + attr_reader type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_? def type=: ( - OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ - ) -> OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ) -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ def initialize: ( ?text: String, - ?type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ?type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ ) -> void + def to_hash: -> { + text: String, + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + } + type type_ = :text module Type @@ -132,7 +160,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs index 15b045e3..ebac80d2 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs @@ -23,6 +23,13 @@ module OpenAI ?id: String, ?type: :file_search ) -> void + + def to_hash: -> { + file_search: top, + index: Integer, + type: :file_search, + id: String + } end end end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call.rbs b/sig/openai/models/beta/threads/runs/function_tool_call.rbs index cb76a0f6..aa424fe9 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call.rbs @@ -23,6 +23,12 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + id: String, + function: OpenAI::Beta::Threads::Runs::FunctionToolCall::Function, + type: :function + } + type function = { arguments: String, name: String, output: String? } class Function < OpenAI::Internal::Type::BaseModel @@ -37,6 +43,12 @@ module OpenAI name: String, output: String? ) -> void + + def to_hash: -> { + arguments: String, + name: String, + output: String? + } end end end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs index 44cf047c..2955d8c3 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs @@ -33,6 +33,13 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + index: Integer, + type: :function, + id: String, + function: OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function + } + type function = { arguments: String, name: String, output: String? } class Function < OpenAI::Internal::Type::BaseModel @@ -51,6 +58,12 @@ module OpenAI ?name: String, ?output: String? ) -> void + + def to_hash: -> { + arguments: String, + name: String, + output: String? + } end end end diff --git a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs index 0ec0c19f..6cfec2be 100644 --- a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs @@ -19,12 +19,19 @@ module OpenAI ?type: :message_creation ) -> void + def to_hash: -> { + message_creation: OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + type: :message_creation + } + type message_creation = { message_id: String } class MessageCreation < OpenAI::Internal::Type::BaseModel attr_accessor message_id: String def initialize: (message_id: String) -> void + + def to_hash: -> { message_id: String } end end end diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 164ba811..f58bd943 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -18,10 +18,10 @@ module OpenAI metadata: OpenAI::Models::metadata?, object: :"thread.run.step", run_id: String, - status: OpenAI::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, thread_id: String, - type: OpenAI::Beta::Threads::Runs::RunStep::type_, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? } @@ -48,13 +48,13 @@ module OpenAI attr_accessor run_id: String - attr_accessor status: OpenAI::Beta::Threads::Runs::RunStep::status + attr_accessor status: OpenAI::Models::Beta::Threads::Runs::RunStep::status - attr_accessor step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details + attr_accessor step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details attr_accessor thread_id: String - attr_accessor type: OpenAI::Beta::Threads::Runs::RunStep::type_ + attr_accessor type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_ attr_accessor usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? @@ -69,30 +69,54 @@ module OpenAI last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError?, metadata: OpenAI::Models::metadata?, run_id: String, - status: OpenAI::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, thread_id: String, - type: OpenAI::Beta::Threads::Runs::RunStep::type_, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, usage: OpenAI::Beta::Threads::Runs::RunStep::Usage?, ?object: :"thread.run.step" ) -> void + def to_hash: -> { + id: String, + assistant_id: String, + cancelled_at: Integer?, + completed_at: Integer?, + created_at: Integer, + expired_at: Integer?, + failed_at: Integer?, + last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError?, + metadata: OpenAI::Models::metadata?, + object: :"thread.run.step", + run_id: String, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, + thread_id: String, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, + usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? + } + type last_error = { - code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code, + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code + attr_accessor code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code, + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, + message: String + } + type code = :server_error | :rate_limit_exceeded module Code @@ -101,7 +125,7 @@ module OpenAI SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code] end end @@ -117,7 +141,7 @@ module OpenAI COMPLETED: :completed EXPIRED: :expired - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::status] end type step_details = @@ -127,7 +151,7 @@ module OpenAI module StepDetails extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::step_details] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::step_details] end type type_ = :message_creation | :tool_calls @@ -138,7 +162,7 @@ module OpenAI MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::type_] end type usage = @@ -160,6 +184,12 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index cd977802..010d9373 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -7,20 +7,24 @@ module OpenAI module Runs type run_step_delta = { - step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details } class RunStepDelta < OpenAI::Internal::Type::BaseModel - attr_reader step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details? + attr_reader step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details? def step_details=: ( - OpenAI::Beta::Threads::Runs::RunStepDelta::step_details - ) -> OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + ) -> OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details def initialize: ( - ?step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + ?step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details ) -> void + def to_hash: -> { + step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + } + type step_details = OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta | OpenAI::Beta::Threads::Runs::ToolCallDeltaObject @@ -28,7 +32,7 @@ module OpenAI module StepDetails extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::RunStepDelta::step_details] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details] end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index a0e9f5e8..70ca4c44 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -24,6 +24,12 @@ module OpenAI delta: OpenAI::Beta::Threads::Runs::RunStepDelta, ?object: :"thread.run.step.delta" ) -> void + + def to_hash: -> { + id: String, + delta: OpenAI::Beta::Threads::Runs::RunStepDelta, + object: :"thread.run.step.delta" + } end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index dae61d3d..0bb8a0bb 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -25,6 +25,11 @@ module OpenAI ?type: :message_creation ) -> void + def to_hash: -> { + type: :message_creation, + message_creation: OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + } + type message_creation = { message_id: String } class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -33,6 +38,8 @@ module OpenAI def message_id=: (String) -> String def initialize: (?message_id: String) -> void + + def to_hash: -> { message_id: String } end end end diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 786d087f..2eb9aae2 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -54,6 +54,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + thread_id: String, + after: String, + before: String, + include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + limit: Integer, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index 414a4b61..94750c9f 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -31,6 +31,13 @@ module OpenAI ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + run_id: String, + include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs index 2b6aefef..758dd0c0 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs @@ -22,6 +22,11 @@ module OpenAI ?tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], ?type: :tool_calls ) -> void + + def to_hash: -> { + type: :tool_calls, + tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta] + } end end end diff --git a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs index b5aaf17e..91baa731 100644 --- a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs @@ -18,6 +18,11 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], ?type: :tool_calls ) -> void + + def to_hash: -> { + tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], + type: :tool_calls + } end end end diff --git a/sig/openai/models/beta/threads/text.rbs b/sig/openai/models/beta/threads/text.rbs index a6585bbd..faff33c1 100644 --- a/sig/openai/models/beta/threads/text.rbs +++ b/sig/openai/models/beta/threads/text.rbs @@ -17,6 +17,11 @@ module OpenAI annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], value: String ) -> void + + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], + value: String + } end end end diff --git a/sig/openai/models/beta/threads/text_content_block.rbs b/sig/openai/models/beta/threads/text_content_block.rbs index 5192465f..16343075 100644 --- a/sig/openai/models/beta/threads/text_content_block.rbs +++ b/sig/openai/models/beta/threads/text_content_block.rbs @@ -14,6 +14,8 @@ module OpenAI text: OpenAI::Beta::Threads::Text, ?type: :text ) -> void + + def to_hash: -> { text: OpenAI::Beta::Threads::Text, type: :text } end end end diff --git a/sig/openai/models/beta/threads/text_content_block_param.rbs b/sig/openai/models/beta/threads/text_content_block_param.rbs index 7b4be77b..2805c546 100644 --- a/sig/openai/models/beta/threads/text_content_block_param.rbs +++ b/sig/openai/models/beta/threads/text_content_block_param.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } end end end diff --git a/sig/openai/models/beta/threads/text_delta.rbs b/sig/openai/models/beta/threads/text_delta.rbs index 9bad2d71..fe100222 100644 --- a/sig/openai/models/beta/threads/text_delta.rbs +++ b/sig/openai/models/beta/threads/text_delta.rbs @@ -23,6 +23,11 @@ module OpenAI ?annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], ?value: String ) -> void + + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], + value: String + } end end end diff --git a/sig/openai/models/beta/threads/text_delta_block.rbs b/sig/openai/models/beta/threads/text_delta_block.rbs index 261d3fc6..5db737c3 100644 --- a/sig/openai/models/beta/threads/text_delta_block.rbs +++ b/sig/openai/models/beta/threads/text_delta_block.rbs @@ -25,6 +25,12 @@ module OpenAI ?text: OpenAI::Beta::Threads::TextDelta, ?type: :text ) -> void + + def to_hash: -> { + index: Integer, + type: :text, + text: OpenAI::Beta::Threads::TextDelta + } end end end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index b717a8ba..e66a399d 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -10,7 +10,7 @@ module OpenAI created: Integer, model: String, object: :"chat.completion", - service_tier: OpenAI::Chat::ChatCompletion::service_tier?, + service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, system_fingerprint: String, usage: OpenAI::CompletionUsage } @@ -26,7 +26,7 @@ module OpenAI attr_accessor object: :"chat.completion" - attr_accessor service_tier: OpenAI::Chat::ChatCompletion::service_tier? + attr_accessor service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier? attr_reader system_fingerprint: String? @@ -41,22 +41,33 @@ module OpenAI choices: ::Array[OpenAI::Chat::ChatCompletion::Choice], created: Integer, model: String, - ?service_tier: OpenAI::Chat::ChatCompletion::service_tier?, + ?service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, ?system_fingerprint: String, ?usage: OpenAI::CompletionUsage, ?object: :"chat.completion" ) -> void + def to_hash: -> { + id: String, + choices: ::Array[OpenAI::Chat::ChatCompletion::Choice], + created: Integer, + model: String, + object: :"chat.completion", + service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, + system_fingerprint: String, + usage: OpenAI::CompletionUsage + } + type choice = { - finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, index: Integer, logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, message: OpenAI::Chat::ChatCompletionMessage } class Choice < OpenAI::Internal::Type::BaseModel - attr_accessor finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason + attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason attr_accessor index: Integer @@ -65,12 +76,19 @@ module OpenAI attr_accessor message: OpenAI::Chat::ChatCompletionMessage def initialize: ( - finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, index: Integer, logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, message: OpenAI::Chat::ChatCompletionMessage ) -> void + def to_hash: -> { + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, + index: Integer, + logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, + message: OpenAI::Chat::ChatCompletionMessage + } + type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call @@ -83,7 +101,7 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletion::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason] end type logprobs = @@ -101,6 +119,11 @@ module OpenAI content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? + } end end @@ -113,7 +136,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletion::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index bf03ff8c..afc4b011 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -7,7 +7,7 @@ module OpenAI { role: :assistant, audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, - content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content?, + content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, name: String, refusal: String?, @@ -19,7 +19,7 @@ module OpenAI attr_accessor audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio? - attr_accessor content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content? + attr_accessor content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content? attr_accessor function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall? @@ -37,7 +37,7 @@ module OpenAI def initialize: ( ?audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, - ?content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content?, + ?content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, ?function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, ?name: String, ?refusal: String?, @@ -45,17 +45,29 @@ module OpenAI ?role: :assistant ) -> void + def to_hash: -> { + role: :assistant, + audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, + content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, + function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, + name: String, + refusal: String?, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + } + type audio = { id: String } class Audio < OpenAI::Internal::Type::BaseModel attr_accessor id: String def initialize: (id: String) -> void + + def to_hash: -> { id: String } end type content = String - | ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] + | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] module Content extend OpenAI::Internal::Type::Union @@ -67,10 +79,10 @@ module OpenAI module ArrayOfContentPart extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] end - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content] ArrayOfContentPartArray: OpenAI::Internal::Type::Converter end @@ -83,6 +95,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index 596be96d..e1cbcccc 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -21,6 +21,13 @@ module OpenAI expires_at: Integer, transcript: String ) -> void + + def to_hash: -> { + id: String, + data: String, + expires_at: Integer, + transcript: String + } end end end diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index b326faf7..d4689dfe 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -5,20 +5,25 @@ module OpenAI module Chat type chat_completion_audio_param = { - format_: OpenAI::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Chat::ChatCompletionAudioParam::voice + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice } class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel - attr_accessor format_: OpenAI::Chat::ChatCompletionAudioParam::format_ + attr_accessor format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_ - attr_accessor voice: OpenAI::Chat::ChatCompletionAudioParam::voice + attr_accessor voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice def initialize: ( - format_: OpenAI::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Chat::ChatCompletionAudioParam::voice + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice ) -> void + def to_hash: -> { + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice + } + type format_ = :wav | :aac | :mp3 | :flac | :opus | :pcm16 module Format @@ -31,7 +36,7 @@ module OpenAI OPUS: :opus PCM16: :pcm16 - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionAudioParam::format_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::format_] end type voice = @@ -51,7 +56,7 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAudioParam::voice] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::voice] ALLOY: :alloy ASH: :ash diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 2a451ed9..0e7acf36 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -10,7 +10,7 @@ module OpenAI created: Integer, model: String, object: :"chat.completion.chunk", - service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier?, + service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, system_fingerprint: String, usage: OpenAI::CompletionUsage? } @@ -26,7 +26,7 @@ module OpenAI attr_accessor object: :"chat.completion.chunk" - attr_accessor service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier? + attr_accessor service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier? attr_reader system_fingerprint: String? @@ -39,16 +39,27 @@ module OpenAI choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - ?service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier?, + ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, ?system_fingerprint: String, ?usage: OpenAI::CompletionUsage?, ?object: :"chat.completion.chunk" ) -> void + def to_hash: -> { + id: String, + choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice], + created: Integer, + model: String, + object: :"chat.completion.chunk", + service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, + system_fingerprint: String, + usage: OpenAI::CompletionUsage? + } + type choice = { delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason?, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? } @@ -56,7 +67,7 @@ module OpenAI class Choice < OpenAI::Internal::Type::BaseModel attr_accessor delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta - attr_accessor finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason? + attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason? attr_accessor index: Integer @@ -64,17 +75,24 @@ module OpenAI def initialize: ( delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason?, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, ?logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? ) -> void + def to_hash: -> { + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, + index: Integer, + logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? + } + type delta = { content: String?, function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: String?, - role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } @@ -89,11 +107,11 @@ module OpenAI attr_accessor refusal: String? - attr_reader role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role? + attr_reader role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role? def role=: ( - OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role - ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role + ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]? @@ -105,10 +123,18 @@ module OpenAI ?content: String?, ?function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, ?refusal: String?, - ?role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role, + ?role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) -> void + def to_hash: -> { + content: String?, + function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + refusal: String?, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + } + type function_call = { arguments: String, name: String } class FunctionCall < OpenAI::Internal::Type::BaseModel @@ -121,6 +147,8 @@ module OpenAI def name=: (String) -> String def initialize: (?arguments: String, ?name: String) -> void + + def to_hash: -> { arguments: String, name: String } end type role = :developer | :system | :user | :assistant | :tool @@ -134,7 +162,7 @@ module OpenAI ASSISTANT: :assistant TOOL: :tool - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role] end type tool_call = @@ -142,7 +170,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ } class ToolCall < OpenAI::Internal::Type::BaseModel @@ -158,19 +186,26 @@ module OpenAI OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function - attr_reader type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_? + attr_reader type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_? def type=: ( - OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ - ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ def initialize: ( index: Integer, ?id: String, ?function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - ?type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ?type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ ) -> void + def to_hash: -> { + index: Integer, + id: String, + function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + } + type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -183,6 +218,8 @@ module OpenAI def name=: (String) -> String def initialize: (?arguments: String, ?name: String) -> void + + def to_hash: -> { arguments: String, name: String } end type type_ = :function @@ -192,7 +229,7 @@ module OpenAI FUNCTION: :function - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] end end end @@ -209,7 +246,7 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason] end type logprobs = @@ -227,6 +264,11 @@ module OpenAI content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? + } end end @@ -239,7 +281,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 77010098..c0750762 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -28,6 +28,11 @@ module OpenAI ?type: :file ) -> void + def to_hash: -> { + file: OpenAI::Chat::ChatCompletionContentPart::File::File, + type: :file + } + type file = { file_data: String, file_id: String, filename: String } class File < OpenAI::Internal::Type::BaseModel @@ -48,6 +53,12 @@ module OpenAI ?file_id: String, ?filename: String ) -> void + + def to_hash: -> { + file_data: String, + file_id: String, + filename: String + } end end diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index f7088321..c8ae374c 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -19,26 +19,36 @@ module OpenAI ?type: :image_url ) -> void + def to_hash: -> { + image_url: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL, + type: :image_url + } + type image_url = { url: String, - detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String - attr_reader detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail? + attr_reader detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail? def detail=: ( - OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail - ) -> OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + ) -> OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail def initialize: ( url: String, - ?detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + ?detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail ) -> void + def to_hash: -> { + url: String, + detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + } + type detail = :auto | :low | :high module Detail @@ -48,7 +58,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index 85d1abea..e2818299 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -19,22 +19,32 @@ module OpenAI ?type: :input_audio ) -> void + def to_hash: -> { + input_audio: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio, + type: :input_audio + } + type input_audio = { data: String, - format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ } class InputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String - attr_accessor format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + attr_accessor format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ def initialize: ( data: String, - format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ ) -> void + def to_hash: -> { + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + } + type format_ = :wav | :mp3 module Format @@ -43,7 +53,7 @@ module OpenAI WAV: :wav MP3: :mp3 - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index 9845f993..3511c222 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -12,6 +12,8 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 799d8e14..0581e14c 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } end end end diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index eb9ae980..c37e21de 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -18,6 +18,12 @@ module OpenAI deleted: bool, ?object: :"chat.completion.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"chat.completion.deleted" + } end end end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index ecf27a5d..3333b2ce 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_developer_message_param = { - content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, role: :developer, name: String } class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content attr_accessor role: :developer @@ -20,18 +20,24 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, ?name: String, ?role: :developer ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, + role: :developer, + name: String + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionDeveloperMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index b18d80c5..5ce652cc 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor name: String def initialize: (name: String) -> void + + def to_hash: -> { name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index 17944160..4bb967bc 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -18,6 +18,8 @@ module OpenAI name: String, ?role: :function ) -> void + + def to_hash: -> { content: String?, name: String, role: :function } end end end diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index bea71029..7225d0a0 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -51,6 +51,16 @@ module OpenAI ?role: :assistant ) -> void + def to_hash: -> { + content: String?, + refusal: String?, + role: :assistant, + annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], + audio: OpenAI::Chat::ChatCompletionAudio?, + function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + } + type annotation = { type: :url_citation, @@ -67,6 +77,11 @@ module OpenAI ?type: :url_citation ) -> void + def to_hash: -> { + type: :url_citation, + url_citation: OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation + } + type url_citation = { end_index: Integer, @@ -90,6 +105,13 @@ module OpenAI title: String, url: String ) -> void + + def to_hash: -> { + end_index: Integer, + start_index: Integer, + title: String, + url: String + } end end @@ -101,6 +123,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index d15d52bf..c787ea9c 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -23,6 +23,12 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + id: String, + function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, + type: :function + } + type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -31,6 +37,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 38e0aeb5..50395d8f 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -19,12 +19,19 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + function: OpenAI::Chat::ChatCompletionNamedToolChoice::Function, + type: :function + } + type function = { name: String } class Function < OpenAI::Internal::Type::BaseModel attr_accessor name: String def initialize: (name: String) -> void + + def to_hash: -> { name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index 65a4a7a6..c52fc481 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -5,27 +5,32 @@ module OpenAI module Chat type chat_completion_prediction_content = { - content: OpenAI::Chat::ChatCompletionPredictionContent::content, + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, type: :content } class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionPredictionContent::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content attr_accessor type: :content def initialize: ( - content: OpenAI::Chat::ChatCompletionPredictionContent::content, + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, ?type: :content ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, + type: :content + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionPredictionContent::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionPredictionContent::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 13440ab9..7b236cff 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -11,6 +11,8 @@ module OpenAI def id=: (String _) -> String def initialize: (id: String) -> void + + def to_hash: -> { id: String } end end end diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 4b0267d1..6905d394 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -11,6 +11,8 @@ module OpenAI def include_usage=: (bool) -> bool def initialize: (?include_usage: bool) -> void + + def to_hash: -> { include_usage: bool } end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 067f582c..e1cd80e9 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_system_message_param = { - content: OpenAI::Chat::ChatCompletionSystemMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, role: :system, name: String } class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionSystemMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content attr_accessor role: :system @@ -20,18 +20,24 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Chat::ChatCompletionSystemMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, ?name: String, ?role: :system ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, + role: :system, + name: String + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionSystemMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index f2bd18a1..3ad85411 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -27,6 +27,13 @@ module OpenAI top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] ) -> void + def to_hash: -> { + token: String, + bytes: ::Array[Integer]?, + logprob: Float, + top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] + } + type top_logprob = { token: String, bytes: ::Array[Integer]?, logprob: Float } @@ -42,6 +49,12 @@ module OpenAI bytes: ::Array[Integer]?, logprob: Float ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer]?, + logprob: Float + } end end end diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index e5d54e1b..23153c68 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -15,6 +15,11 @@ module OpenAI function: OpenAI::FunctionDefinition, ?type: :function ) -> void + + def to_hash: -> { + function: OpenAI::FunctionDefinition, + type: :function + } end end end diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index a5c2df81..523db9a4 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -4,7 +4,7 @@ module OpenAI module Chat type chat_completion_tool_choice_option = - OpenAI::Chat::ChatCompletionToolChoiceOption::auto + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto | OpenAI::Chat::ChatCompletionNamedToolChoice module ChatCompletionToolChoiceOption @@ -19,7 +19,7 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto] end def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_tool_choice_option] diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 21997531..10c157ab 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -5,31 +5,37 @@ module OpenAI module Chat type chat_completion_tool_message_param = { - content: OpenAI::Chat::ChatCompletionToolMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, role: :tool, tool_call_id: String } class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionToolMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content attr_accessor role: :tool attr_accessor tool_call_id: String def initialize: ( - content: OpenAI::Chat::ChatCompletionToolMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, tool_call_id: String, ?role: :tool ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, + role: :tool, + tool_call_id: String + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionToolMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 9bc5d59c..a021b82a 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_user_message_param = { - content: OpenAI::Chat::ChatCompletionUserMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, role: :user, name: String } class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionUserMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content attr_accessor role: :user @@ -20,18 +20,24 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Chat::ChatCompletionUserMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, ?name: String, ?role: :user ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, + role: :user, + name: String + } + type content = String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionUserMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionUserMessageParam::content] ChatCompletionContentPartArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 05054037..0f3ad11e 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -14,7 +14,7 @@ module OpenAI max_completion_tokens: Integer?, max_tokens: Integer?, metadata: OpenAI::Models::metadata?, - modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, n: Integer?, parallel_tool_calls: bool, prediction: OpenAI::Chat::ChatCompletionPredictionContent?, @@ -70,7 +70,7 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]? + attr_accessor modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]? attr_accessor n: Integer? @@ -140,7 +140,7 @@ module OpenAI ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, @@ -162,6 +162,40 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], + model: OpenAI::Models::Chat::CompletionCreateParams::model, + audio: OpenAI::Chat::ChatCompletionAudioParam?, + frequency_penalty: Float?, + function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function], + logit_bias: ::Hash[Symbol, Integer]?, + logprobs: bool?, + max_completion_tokens: Integer?, + max_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + n: Integer?, + parallel_tool_calls: bool, + prediction: OpenAI::Chat::ChatCompletionPredictionContent?, + presence_penalty: Float?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + seed: Integer?, + service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + store: bool?, + stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, + temperature: Float?, + tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + tools: ::Array[OpenAI::Chat::ChatCompletionTool], + top_logprobs: Integer?, + top_p: Float?, + user: String, + web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::chat_model module Model @@ -171,7 +205,7 @@ module OpenAI end type function_call = - OpenAI::Chat::CompletionCreateParams::FunctionCall::function_call_mode + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode | OpenAI::Chat::ChatCompletionFunctionCallOption module FunctionCall @@ -185,7 +219,7 @@ module OpenAI NONE: :none AUTO: :auto - def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::FunctionCall::function_call_mode] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode] end def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::function_call] @@ -216,6 +250,12 @@ module OpenAI ?description: String, ?parameters: OpenAI::Models::function_parameters ) -> void + + def to_hash: -> { + name: String, + description: String, + parameters: OpenAI::Models::function_parameters + } end type modality = :text | :audio @@ -226,7 +266,7 @@ module OpenAI TEXT: :text AUDIO: :audio - def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::modality] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality] end type response_format = @@ -264,24 +304,29 @@ module OpenAI type web_search_options = { - search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? } class WebSearchOptions < OpenAI::Internal::Type::BaseModel - attr_reader search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? + attr_reader search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? def search_context_size=: ( - OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size - ) -> OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size + ) -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size attr_accessor user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? def initialize: ( - ?search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + ?search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, ?user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? ) -> void + def to_hash: -> { + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + } + type search_context_size = :low | :medium | :high module SearchContextSize @@ -291,7 +336,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] end type user_location = @@ -310,6 +355,11 @@ module OpenAI ?type: :approximate ) -> void + def to_hash: -> { + approximate: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + type: :approximate + } + type approximate = { city: String, @@ -341,6 +391,13 @@ module OpenAI ?region: String, ?timezone: String ) -> void + + def to_hash: -> { + city: String, + country: String, + region: String, + timezone: String + } end end end diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index e20bc3a4..d3ddc656 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 83b22904..692b13c1 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -44,6 +44,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + metadata: OpenAI::Models::metadata?, + model: String, + order: OpenAI::Models::Chat::CompletionListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 8d4b832e..e91d58a6 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index 942741c4..6a169ae6 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -15,6 +15,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 0257770c..0e65a06b 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -35,6 +35,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::Chat::Completions::MessageListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index a2288002..f5f686ba 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -3,23 +3,29 @@ module OpenAI type comparison_filter = { key: String, - type: OpenAI::ComparisonFilter::type_, - value: OpenAI::ComparisonFilter::value + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value } class ComparisonFilter < OpenAI::Internal::Type::BaseModel attr_accessor key: String - attr_accessor type: OpenAI::ComparisonFilter::type_ + attr_accessor type: OpenAI::Models::ComparisonFilter::type_ - attr_accessor value: OpenAI::ComparisonFilter::value + attr_accessor value: OpenAI::Models::ComparisonFilter::value def initialize: ( key: String, - type: OpenAI::ComparisonFilter::type_, - value: OpenAI::ComparisonFilter::value + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value ) -> void + def to_hash: -> { + key: String, + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value + } + type type_ = :eq | :ne | :gt | :gte | :lt | :lte module Type @@ -32,7 +38,7 @@ module OpenAI LT: :lt LTE: :lte - def self?.values: -> ::Array[OpenAI::ComparisonFilter::type_] + def self?.values: -> ::Array[OpenAI::Models::ComparisonFilter::type_] end type value = String | Float | bool @@ -40,7 +46,7 @@ module OpenAI module Value extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::ComparisonFilter::value] + def self?.variants: -> ::Array[OpenAI::Models::ComparisonFilter::value] end end end diff --git a/sig/openai/models/completion.rbs b/sig/openai/models/completion.rbs index a9d8a71b..40b716e4 100644 --- a/sig/openai/models/completion.rbs +++ b/sig/openai/models/completion.rbs @@ -39,6 +39,16 @@ module OpenAI ?usage: OpenAI::CompletionUsage, ?object: :text_completion ) -> void + + def to_hash: -> { + id: String, + choices: ::Array[OpenAI::CompletionChoice], + created: Integer, + model: String, + object: :text_completion, + system_fingerprint: String, + usage: OpenAI::CompletionUsage + } end end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index a42cad21..53a0ea09 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -2,14 +2,14 @@ module OpenAI module Models type completion_choice = { - finish_reason: OpenAI::CompletionChoice::finish_reason, + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, index: Integer, logprobs: OpenAI::CompletionChoice::Logprobs?, text: String } class CompletionChoice < OpenAI::Internal::Type::BaseModel - attr_accessor finish_reason: OpenAI::CompletionChoice::finish_reason + attr_accessor finish_reason: OpenAI::Models::CompletionChoice::finish_reason attr_accessor index: Integer @@ -18,12 +18,19 @@ module OpenAI attr_accessor text: String def initialize: ( - finish_reason: OpenAI::CompletionChoice::finish_reason, + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, index: Integer, logprobs: OpenAI::CompletionChoice::Logprobs?, text: String ) -> void + def to_hash: -> { + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, + index: Integer, + logprobs: OpenAI::CompletionChoice::Logprobs?, + text: String + } + type finish_reason = :stop | :length | :content_filter module FinishReason @@ -33,7 +40,7 @@ module OpenAI LENGTH: :length CONTENT_FILTER: :content_filter - def self?.values: -> ::Array[OpenAI::CompletionChoice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::CompletionChoice::finish_reason] end type logprobs = @@ -69,6 +76,13 @@ module OpenAI ?tokens: ::Array[String], ?top_logprobs: ::Array[::Hash[Symbol, Float]] ) -> void + + def to_hash: -> { + text_offset: ::Array[Integer], + token_logprobs: ::Array[Float], + tokens: ::Array[String], + top_logprobs: ::Array[::Hash[Symbol, Float]] + } end end end diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index f2a4f357..5dcdfb9d 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -83,6 +83,27 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + model: OpenAI::Models::CompletionCreateParams::model, + prompt: OpenAI::Models::CompletionCreateParams::prompt?, + best_of: Integer?, + echo: bool?, + frequency_penalty: Float?, + logit_bias: ::Hash[Symbol, Integer]?, + logprobs: Integer?, + max_tokens: Integer?, + n: Integer?, + presence_penalty: Float?, + seed: Integer?, + stop: OpenAI::Models::CompletionCreateParams::stop?, + stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, + suffix: String?, + temperature: Float?, + top_p: Float?, + user: String, + request_options: OpenAI::RequestOptions + } + type model = String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" diff --git a/sig/openai/models/completion_usage.rbs b/sig/openai/models/completion_usage.rbs index f50f1dad..d70bb65e 100644 --- a/sig/openai/models/completion_usage.rbs +++ b/sig/openai/models/completion_usage.rbs @@ -36,6 +36,14 @@ module OpenAI ?prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails ) -> void + def to_hash: -> { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer, + completion_tokens_details: OpenAI::CompletionUsage::CompletionTokensDetails, + prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails + } + type completion_tokens_details = { accepted_prediction_tokens: Integer, @@ -67,6 +75,13 @@ module OpenAI ?reasoning_tokens: Integer, ?rejected_prediction_tokens: Integer ) -> void + + def to_hash: -> { + accepted_prediction_tokens: Integer, + audio_tokens: Integer, + reasoning_tokens: Integer, + rejected_prediction_tokens: Integer + } end type prompt_tokens_details = @@ -85,6 +100,8 @@ module OpenAI ?audio_tokens: Integer, ?cached_tokens: Integer ) -> void + + def to_hash: -> { audio_tokens: Integer, cached_tokens: Integer } end end end diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index f5c17954..ccf1036d 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -2,26 +2,31 @@ module OpenAI module Models type compound_filter = { - filters: ::Array[OpenAI::CompoundFilter::filter], - type: OpenAI::CompoundFilter::type_ + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ } class CompoundFilter < OpenAI::Internal::Type::BaseModel - attr_accessor filters: ::Array[OpenAI::CompoundFilter::filter] + attr_accessor filters: ::Array[OpenAI::Models::CompoundFilter::filter] - attr_accessor type: OpenAI::CompoundFilter::type_ + attr_accessor type: OpenAI::Models::CompoundFilter::type_ def initialize: ( - filters: ::Array[OpenAI::CompoundFilter::filter], - type: OpenAI::CompoundFilter::type_ + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ ) -> void + def to_hash: -> { + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ + } + type filter = OpenAI::ComparisonFilter | top module Filter extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::CompoundFilter::filter] + def self?.variants: -> ::Array[OpenAI::Models::CompoundFilter::filter] end type type_ = :and | :or @@ -32,7 +37,7 @@ module OpenAI AND: :and OR: :or - def self?.values: -> ::Array[OpenAI::CompoundFilter::type_] + def self?.values: -> ::Array[OpenAI::Models::CompoundFilter::type_] end end end diff --git a/sig/openai/models/container_create_params.rbs b/sig/openai/models/container_create_params.rbs index b7a1c68f..3c51ec67 100644 --- a/sig/openai/models/container_create_params.rbs +++ b/sig/openai/models/container_create_params.rbs @@ -31,22 +31,34 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + file_ids: ::Array[String], + request_options: OpenAI::RequestOptions + } + type expires_after = { - anchor: OpenAI::ContainerCreateParams::ExpiresAfter::anchor, + anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor, minutes: Integer } class ExpiresAfter < OpenAI::Internal::Type::BaseModel - attr_accessor anchor: OpenAI::ContainerCreateParams::ExpiresAfter::anchor + attr_accessor anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor attr_accessor minutes: Integer def initialize: ( - anchor: OpenAI::ContainerCreateParams::ExpiresAfter::anchor, + anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor, minutes: Integer ) -> void + def to_hash: -> { + anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor, + minutes: Integer + } + type anchor = :last_active_at module Anchor @@ -54,7 +66,7 @@ module OpenAI LAST_ACTIVE_AT: :last_active_at - def self?.values: -> ::Array[OpenAI::ContainerCreateParams::ExpiresAfter::anchor] + def self?.values: -> ::Array[OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor] end end end diff --git a/sig/openai/models/container_create_response.rbs b/sig/openai/models/container_create_response.rbs index 262d50cf..ddc8f23d 100644 --- a/sig/openai/models/container_create_response.rbs +++ b/sig/openai/models/container_create_response.rbs @@ -36,6 +36,15 @@ module OpenAI ?expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + } + type expires_after = { anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, @@ -58,6 +67,11 @@ module OpenAI ?minutes: Integer ) -> void + def to_hash: -> { + anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, + minutes: Integer + } + type anchor = :last_active_at module Anchor diff --git a/sig/openai/models/container_delete_params.rbs b/sig/openai/models/container_delete_params.rbs index abb4eb46..940b54a7 100644 --- a/sig/openai/models/container_delete_params.rbs +++ b/sig/openai/models/container_delete_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/container_list_params.rbs b/sig/openai/models/container_list_params.rbs index 76c90fcc..ec6092bb 100644 --- a/sig/openai/models/container_list_params.rbs +++ b/sig/openai/models/container_list_params.rbs @@ -33,6 +33,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::ContainerListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/container_list_response.rbs b/sig/openai/models/container_list_response.rbs index 45796baa..97f971f0 100644 --- a/sig/openai/models/container_list_response.rbs +++ b/sig/openai/models/container_list_response.rbs @@ -36,6 +36,15 @@ module OpenAI ?expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + } + type expires_after = { anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, @@ -58,6 +67,11 @@ module OpenAI ?minutes: Integer ) -> void + def to_hash: -> { + anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, + minutes: Integer + } + type anchor = :last_active_at module Anchor diff --git a/sig/openai/models/container_retrieve_params.rbs b/sig/openai/models/container_retrieve_params.rbs index c7a46fc4..74ca0b0c 100644 --- a/sig/openai/models/container_retrieve_params.rbs +++ b/sig/openai/models/container_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/container_retrieve_response.rbs b/sig/openai/models/container_retrieve_response.rbs index 0325b595..fac17ec3 100644 --- a/sig/openai/models/container_retrieve_response.rbs +++ b/sig/openai/models/container_retrieve_response.rbs @@ -36,6 +36,15 @@ module OpenAI ?expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + } + type expires_after = { anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, @@ -58,6 +67,11 @@ module OpenAI ?minutes: Integer ) -> void + def to_hash: -> { + anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, + minutes: Integer + } + type anchor = :last_active_at module Anchor diff --git a/sig/openai/models/containers/file_create_params.rbs b/sig/openai/models/containers/file_create_params.rbs index dc5dc476..8ebe9ad8 100644 --- a/sig/openai/models/containers/file_create_params.rbs +++ b/sig/openai/models/containers/file_create_params.rbs @@ -24,6 +24,12 @@ module OpenAI ?file_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + file: OpenAI::Internal::file_input, + file_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/containers/file_create_response.rbs b/sig/openai/models/containers/file_create_response.rbs index cb3aa874..ccb96c98 100644 --- a/sig/openai/models/containers/file_create_response.rbs +++ b/sig/openai/models/containers/file_create_response.rbs @@ -36,6 +36,16 @@ module OpenAI source: String, ?object: :"container.file" ) -> void + + def to_hash: -> { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } end end end diff --git a/sig/openai/models/containers/file_delete_params.rbs b/sig/openai/models/containers/file_delete_params.rbs index e53a0bee..b8cd479f 100644 --- a/sig/openai/models/containers/file_delete_params.rbs +++ b/sig/openai/models/containers/file_delete_params.rbs @@ -14,6 +14,11 @@ module OpenAI container_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + container_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/containers/file_list_params.rbs b/sig/openai/models/containers/file_list_params.rbs index bc780ba7..8ea4c3aa 100644 --- a/sig/openai/models/containers/file_list_params.rbs +++ b/sig/openai/models/containers/file_list_params.rbs @@ -34,6 +34,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::Containers::FileListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/containers/file_list_response.rbs b/sig/openai/models/containers/file_list_response.rbs index 5c878ced..1314ef0f 100644 --- a/sig/openai/models/containers/file_list_response.rbs +++ b/sig/openai/models/containers/file_list_response.rbs @@ -36,6 +36,16 @@ module OpenAI source: String, ?object: :"container.file" ) -> void + + def to_hash: -> { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } end end end diff --git a/sig/openai/models/containers/file_retrieve_params.rbs b/sig/openai/models/containers/file_retrieve_params.rbs index c67ff1c9..aba88985 100644 --- a/sig/openai/models/containers/file_retrieve_params.rbs +++ b/sig/openai/models/containers/file_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI container_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + container_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/containers/file_retrieve_response.rbs b/sig/openai/models/containers/file_retrieve_response.rbs index d52e25e7..33e75b90 100644 --- a/sig/openai/models/containers/file_retrieve_response.rbs +++ b/sig/openai/models/containers/file_retrieve_response.rbs @@ -36,6 +36,16 @@ module OpenAI source: String, ?object: :"container.file" ) -> void + + def to_hash: -> { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } end end end diff --git a/sig/openai/models/containers/files/content_retrieve_params.rbs b/sig/openai/models/containers/files/content_retrieve_params.rbs index 6f56cf72..8912648d 100644 --- a/sig/openai/models/containers/files/content_retrieve_params.rbs +++ b/sig/openai/models/containers/files/content_retrieve_params.rbs @@ -15,6 +15,11 @@ module OpenAI container_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + container_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/create_embedding_response.rbs b/sig/openai/models/create_embedding_response.rbs index d406e7da..92433cab 100644 --- a/sig/openai/models/create_embedding_response.rbs +++ b/sig/openai/models/create_embedding_response.rbs @@ -24,6 +24,13 @@ module OpenAI ?object: :list ) -> void + def to_hash: -> { + data: ::Array[OpenAI::Embedding], + model: String, + object: :list, + usage: OpenAI::CreateEmbeddingResponse::Usage + } + type usage = { prompt_tokens: Integer, total_tokens: Integer } class Usage < OpenAI::Internal::Type::BaseModel @@ -32,6 +39,8 @@ module OpenAI attr_accessor total_tokens: Integer def initialize: (prompt_tokens: Integer, total_tokens: Integer) -> void + + def to_hash: -> { prompt_tokens: Integer, total_tokens: Integer } end end end diff --git a/sig/openai/models/embedding.rbs b/sig/openai/models/embedding.rbs index 88478e43..3c411347 100644 --- a/sig/openai/models/embedding.rbs +++ b/sig/openai/models/embedding.rbs @@ -15,6 +15,12 @@ module OpenAI index: Integer, ?object: :embedding ) -> void + + def to_hash: -> { + embedding: ::Array[Float], + index: Integer, + object: :embedding + } end end end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 73e2bc84..4600282f 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -41,6 +41,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: OpenAI::Models::EmbeddingCreateParams::input, + model: OpenAI::Models::EmbeddingCreateParams::model, + dimensions: Integer, + encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, + user: String, + request_options: OpenAI::RequestOptions + } + type input = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] diff --git a/sig/openai/models/error_object.rbs b/sig/openai/models/error_object.rbs index f3cb58bb..9ee1bb01 100644 --- a/sig/openai/models/error_object.rbs +++ b/sig/openai/models/error_object.rbs @@ -18,6 +18,13 @@ module OpenAI param: String?, type: String ) -> void + + def to_hash: -> { + code: String?, + message: String, + param: String?, + type: String + } end end end diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 8f48feac..26468913 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type eval_create_params = { data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], metadata: OpenAI::Models::metadata?, name: String } @@ -15,7 +15,7 @@ module OpenAI attr_accessor data_source_config: OpenAI::Models::EvalCreateParams::data_source_config - attr_accessor testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion] + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion] attr_accessor metadata: OpenAI::Models::metadata? @@ -25,12 +25,20 @@ module OpenAI def initialize: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } + type data_source_config = OpenAI::EvalCreateParams::DataSourceConfig::Custom | OpenAI::EvalCreateParams::DataSourceConfig::Logs @@ -60,6 +68,12 @@ module OpenAI ?include_sample_schema: bool, ?type: :custom ) -> void + + def to_hash: -> { + item_schema: ::Hash[Symbol, top], + type: :custom, + include_sample_schema: bool + } end type logs = { type: :logs, metadata: ::Hash[Symbol, top] } @@ -72,6 +86,8 @@ module OpenAI def metadata=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] def initialize: (?metadata: ::Hash[Symbol, top], ?type: :logs) -> void + + def to_hash: -> { type: :logs, metadata: ::Hash[Symbol, top] } end type stored_completions = @@ -88,6 +104,11 @@ module OpenAI ?metadata: ::Hash[Symbol, top], ?type: :stored_completions ) -> void + + def to_hash: -> { + type: :stored_completions, + metadata: ::Hash[Symbol, top] + } end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::data_source_config] @@ -105,7 +126,7 @@ module OpenAI type label_model = { - input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input], + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], labels: ::Array[String], model: String, name: String, @@ -114,7 +135,7 @@ module OpenAI } class LabelModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] + attr_accessor input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input] attr_accessor labels: ::Array[String] @@ -127,7 +148,7 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input], + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], labels: ::Array[String], model: String, name: String, @@ -135,6 +156,15 @@ module OpenAI ?type: :label_model ) -> void + def to_hash: -> { + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + type input = OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem @@ -150,32 +180,40 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type eval_item = { - content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, - role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, - type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ } class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content + attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content - attr_accessor role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role + attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role - attr_reader type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? + attr_reader type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? def type=: ( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ - ) -> OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ def initialize: ( - content: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, - role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, - ?type: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + ?type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -192,9 +230,11 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } end - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -207,7 +247,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] end type type_ = :message @@ -217,11 +257,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] end end - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input] end end @@ -233,6 +273,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type python = { pass_threshold: Float } @@ -243,6 +285,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type score_model = { pass_threshold: Float } @@ -253,9 +297,11 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::testing_criterion] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::testing_criterion] end end end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 33ae38e4..0d4799af 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -36,6 +36,16 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalCreateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig | OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs @@ -63,14 +73,20 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?type: :logs ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel @@ -86,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -96,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -106,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] diff --git a/sig/openai/models/eval_custom_data_source_config.rbs b/sig/openai/models/eval_custom_data_source_config.rbs index 8b53b580..6d412cef 100644 --- a/sig/openai/models/eval_custom_data_source_config.rbs +++ b/sig/openai/models/eval_custom_data_source_config.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :custom def initialize: (schema: ::Hash[Symbol, top], ?type: :custom) -> void + + def to_hash: -> { schema: ::Hash[Symbol, top], type: :custom } end end end diff --git a/sig/openai/models/eval_delete_params.rbs b/sig/openai/models/eval_delete_params.rbs index 4fb3f99f..f5a6eb71 100644 --- a/sig/openai/models/eval_delete_params.rbs +++ b/sig/openai/models/eval_delete_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/eval_delete_response.rbs b/sig/openai/models/eval_delete_response.rbs index 4f0adf53..95ec2c86 100644 --- a/sig/openai/models/eval_delete_response.rbs +++ b/sig/openai/models/eval_delete_response.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor object: String def initialize: (deleted: bool, eval_id: String, object: String) -> void + + def to_hash: -> { deleted: bool, eval_id: String, object: String } end end end diff --git a/sig/openai/models/eval_list_params.rbs b/sig/openai/models/eval_list_params.rbs index 57ad5212..ff25dbd4 100644 --- a/sig/openai/models/eval_list_params.rbs +++ b/sig/openai/models/eval_list_params.rbs @@ -41,6 +41,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::EvalListParams::order, + order_by: OpenAI::Models::EvalListParams::order_by, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index 67e93038..ef01d7c9 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -36,6 +36,16 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalListResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig | OpenAI::Models::EvalListResponse::DataSourceConfig::Logs @@ -63,14 +73,20 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?type: :logs ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } end def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel @@ -86,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -96,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -106,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::testing_criterion] diff --git a/sig/openai/models/eval_retrieve_params.rbs b/sig/openai/models/eval_retrieve_params.rbs index c6242dcb..167a0920 100644 --- a/sig/openai/models/eval_retrieve_params.rbs +++ b/sig/openai/models/eval_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index 1a0c3d72..10f46696 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -36,6 +36,16 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalRetrieveResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig | OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs @@ -63,14 +73,20 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?type: :logs ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } end def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel @@ -86,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -96,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -106,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] diff --git a/sig/openai/models/eval_stored_completions_data_source_config.rbs b/sig/openai/models/eval_stored_completions_data_source_config.rbs index 345b12d6..1237e523 100644 --- a/sig/openai/models/eval_stored_completions_data_source_config.rbs +++ b/sig/openai/models/eval_stored_completions_data_source_config.rbs @@ -19,6 +19,12 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?type: :stored_completions ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :stored_completions, + metadata: OpenAI::Models::metadata? + } end end end diff --git a/sig/openai/models/eval_update_params.rbs b/sig/openai/models/eval_update_params.rbs index cb60c3c2..fc6c2540 100644 --- a/sig/openai/models/eval_update_params.rbs +++ b/sig/openai/models/eval_update_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?name: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index 3b28136d..532a5435 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -36,6 +36,16 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalUpdateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig | OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs @@ -63,14 +73,20 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?type: :logs ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } end def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel @@ -86,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -96,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -106,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index bec2dc8b..181d516b 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -3,23 +3,23 @@ module OpenAI module Evals type create_eval_completions_run_data_source = { - source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source, - type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_, - input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages, + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, model: String, sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source + attr_accessor source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source - attr_accessor type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_ + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_ - attr_reader input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages? + attr_reader input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages? def input_messages=: ( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages - ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages attr_reader model: String? @@ -32,13 +32,21 @@ module OpenAI ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams def initialize: ( - source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source, - type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_, - ?input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages, + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + ?input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, ?model: String, ?sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + } + type source = OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID @@ -63,6 +71,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -77,6 +90,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -88,6 +106,8 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end type stored_completions = @@ -121,9 +141,18 @@ module OpenAI ?model: String?, ?type: :stored_completions ) -> void + + def to_hash: -> { + type: :stored_completions, + created_after: Integer?, + created_before: Integer?, + limit: Integer?, + metadata: OpenAI::Models::metadata?, + model: String? + } end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::source] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source] end type type_ = :completions @@ -133,7 +162,7 @@ module OpenAI COMPLETIONS: :completions - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_] end type input_messages = @@ -145,20 +174,25 @@ module OpenAI type template = { - template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], type: :template } class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + attr_accessor template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] attr_accessor type: :template def initialize: ( - template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + type: :template + } + type template = OpenAI::Responses::EasyInputMessage | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message @@ -168,28 +202,34 @@ module OpenAI type message = { - content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content + attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content - attr_accessor role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role + attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role - attr_reader type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? + attr_reader type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? def type=: ( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ def initialize: ( - content: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - ?type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + ?type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -206,9 +246,11 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] end type role = :user | :assistant | :system | :developer @@ -221,7 +263,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] end type type_ = :message @@ -231,11 +273,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] end end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] end end @@ -251,9 +293,11 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { item_reference: String, type: :item_reference } end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages] end type sampling_params = @@ -287,6 +331,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end end diff --git a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs index c376b674..ff6c8b6a 100644 --- a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs @@ -3,20 +3,25 @@ module OpenAI module Evals type create_eval_jsonl_run_data_source = { - source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source, + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, type: :jsonl } class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source + attr_accessor source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source attr_accessor type: :jsonl def initialize: ( - source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source, + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, ?type: :jsonl ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, + type: :jsonl + } + type source = OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent | OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID @@ -40,6 +45,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -54,6 +64,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -65,9 +80,11 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::source] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source] end end end diff --git a/sig/openai/models/evals/eval_api_error.rbs b/sig/openai/models/evals/eval_api_error.rbs index 70bd686a..9504768f 100644 --- a/sig/openai/models/evals/eval_api_error.rbs +++ b/sig/openai/models/evals/eval_api_error.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor message: String def initialize: (code: String, message: String) -> void + + def to_hash: -> { code: String, message: String } end end end diff --git a/sig/openai/models/evals/run_cancel_params.rbs b/sig/openai/models/evals/run_cancel_params.rbs index 32a0b270..19118357 100644 --- a/sig/openai/models/evals/run_cancel_params.rbs +++ b/sig/openai/models/evals/run_cancel_params.rbs @@ -14,6 +14,11 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index 6a69a19d..53408038 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -65,6 +65,23 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCancelResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource @@ -111,6 +128,14 @@ module OpenAI ?type: :responses ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + } + type source = OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID @@ -135,6 +160,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -149,6 +179,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -160,6 +195,8 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end type responses = @@ -213,6 +250,20 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source] @@ -241,6 +292,11 @@ module OpenAI ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + type template = OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem @@ -256,6 +312,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type eval_item = @@ -282,6 +340,12 @@ module OpenAI ?type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -301,6 +365,8 @@ module OpenAI text: String, ?type: :output_text ) -> void + + def to_hash: -> { text: String, type: :output_text } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] @@ -346,6 +412,11 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages] @@ -382,6 +453,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end @@ -419,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -436,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -456,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index d33730a8..7c36c313 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -28,6 +28,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + data_source: OpenAI::Models::Evals::RunCreateParams::data_source, + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource @@ -38,23 +45,23 @@ module OpenAI type create_eval_responses_run_data_source = { - source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, - input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, model: String, sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams } class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source + attr_accessor source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source - attr_accessor type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ + attr_accessor type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ - attr_reader input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? + attr_reader input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? def input_messages=: ( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages - ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages attr_reader model: String? @@ -67,13 +74,21 @@ module OpenAI ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams def initialize: ( - source: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, - ?input_messages: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + ?input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, ?model: String, ?sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + type source = OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID @@ -98,6 +113,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -112,6 +132,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -123,6 +148,8 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end type responses = @@ -176,9 +203,23 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } end - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source] end type type_ = :responses @@ -188,7 +229,7 @@ module OpenAI RESPONSES: :responses - def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] end type input_messages = @@ -200,20 +241,25 @@ module OpenAI type template = { - template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], type: :template } class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] attr_accessor type: :template def initialize: ( - template: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + type: :template + } + type template = OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem @@ -229,32 +275,40 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type eval_item = { - content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, - type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ } class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content + attr_accessor content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content - attr_accessor role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role + attr_accessor role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role - attr_reader type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? + attr_reader type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? def type=: ( - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ - ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ def initialize: ( - content: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, - role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, - ?type: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -274,9 +328,11 @@ module OpenAI text: String, ?type: :output_text ) -> void + + def to_hash: -> { text: String, type: :output_text } end - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] end type role = :user | :assistant | :system | :developer @@ -289,7 +345,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] end type type_ = :message @@ -299,11 +355,11 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] end end - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] end end @@ -319,9 +375,14 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } end - def self?.variants: -> ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages] + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages] end type sampling_params = @@ -355,6 +416,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 767d086d..6076ce25 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -65,6 +65,23 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCreateResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource @@ -111,6 +128,14 @@ module OpenAI ?type: :responses ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + } + type source = OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID @@ -135,6 +160,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -149,6 +179,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -160,6 +195,8 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end type responses = @@ -213,6 +250,20 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source] @@ -241,6 +292,11 @@ module OpenAI ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + type template = OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem @@ -256,6 +312,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type eval_item = @@ -282,6 +340,12 @@ module OpenAI ?type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -301,6 +365,8 @@ module OpenAI text: String, ?type: :output_text ) -> void + + def to_hash: -> { text: String, type: :output_text } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] @@ -346,6 +412,11 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages] @@ -382,6 +453,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end @@ -419,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -436,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -456,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/run_delete_params.rbs b/sig/openai/models/evals/run_delete_params.rbs index f775e4d5..097144da 100644 --- a/sig/openai/models/evals/run_delete_params.rbs +++ b/sig/openai/models/evals/run_delete_params.rbs @@ -14,6 +14,11 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/run_delete_response.rbs b/sig/openai/models/evals/run_delete_response.rbs index 21d23706..37a02bec 100644 --- a/sig/openai/models/evals/run_delete_response.rbs +++ b/sig/openai/models/evals/run_delete_response.rbs @@ -22,6 +22,8 @@ module OpenAI ?object: String, ?run_id: String ) -> void + + def to_hash: -> { deleted: bool, object: String, run_id: String } end end end diff --git a/sig/openai/models/evals/run_list_params.rbs b/sig/openai/models/evals/run_list_params.rbs index 33c002ca..95f65ca4 100644 --- a/sig/openai/models/evals/run_list_params.rbs +++ b/sig/openai/models/evals/run_list_params.rbs @@ -42,6 +42,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::Evals::RunListParams::order, + status: OpenAI::Models::Evals::RunListParams::status, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index 9e1b99da..b87620d8 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -65,6 +65,23 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunListResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource @@ -111,6 +128,14 @@ module OpenAI ?type: :responses ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + } + type source = OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID @@ -135,6 +160,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -149,6 +179,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -160,6 +195,8 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end type responses = @@ -213,6 +250,20 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source] @@ -241,6 +292,11 @@ module OpenAI ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + type template = OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem @@ -256,6 +312,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type eval_item = @@ -282,6 +340,12 @@ module OpenAI ?type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -301,6 +365,8 @@ module OpenAI text: String, ?type: :output_text ) -> void + + def to_hash: -> { text: String, type: :output_text } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] @@ -346,6 +412,11 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages] @@ -382,6 +453,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end @@ -419,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -436,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -456,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/run_retrieve_params.rbs b/sig/openai/models/evals/run_retrieve_params.rbs index 955259eb..bc35ad38 100644 --- a/sig/openai/models/evals/run_retrieve_params.rbs +++ b/sig/openai/models/evals/run_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index f843309b..97d98b20 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -65,6 +65,23 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource @@ -111,6 +128,14 @@ module OpenAI ?type: :responses ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + } + type source = OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID @@ -135,6 +160,11 @@ module OpenAI ?type: :file_content ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + type content = { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } @@ -149,6 +179,11 @@ module OpenAI item: ::Hash[Symbol, top], ?sample: ::Hash[Symbol, top] ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } end end @@ -160,6 +195,8 @@ module OpenAI attr_accessor type: :file_id def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } end type responses = @@ -213,6 +250,20 @@ module OpenAI ?users: ::Array[String]?, ?type: :responses ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source] @@ -241,6 +292,11 @@ module OpenAI ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + type template = OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem @@ -256,6 +312,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type eval_item = @@ -282,6 +340,12 @@ module OpenAI ?type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -301,6 +365,8 @@ module OpenAI text: String, ?type: :output_text ) -> void + + def to_hash: -> { text: String, type: :output_text } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] @@ -346,6 +412,11 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } end def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages] @@ -382,6 +453,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end @@ -419,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -436,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -456,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/runs/output_item_list_params.rbs b/sig/openai/models/evals/runs/output_item_list_params.rbs index 4ff88c6f..650dddf9 100644 --- a/sig/openai/models/evals/runs/output_item_list_params.rbs +++ b/sig/openai/models/evals/runs/output_item_list_params.rbs @@ -47,6 +47,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::Runs::OutputItemListParams::order, + status: OpenAI::Models::Evals::Runs::OutputItemListParams::status, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/evals/runs/output_item_list_response.rbs b/sig/openai/models/evals/runs/output_item_list_response.rbs index 8f79060c..53dfbc98 100644 --- a/sig/openai/models/evals/runs/output_item_list_response.rbs +++ b/sig/openai/models/evals/runs/output_item_list_response.rbs @@ -50,6 +50,19 @@ module OpenAI ?object: :"eval.run.output_item" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + object: :"eval.run.output_item", + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String + } + type sample = { error: OpenAI::Evals::EvalAPIError, @@ -98,6 +111,19 @@ module OpenAI usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage ) -> void + def to_hash: -> { + error: OpenAI::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + } + type input = { content: String, role: String } class Input < OpenAI::Internal::Type::BaseModel @@ -106,6 +132,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type output = { content: String, role: String } @@ -120,6 +148,8 @@ module OpenAI def role=: (String) -> String def initialize: (?content: String, ?role: String) -> void + + def to_hash: -> { content: String, role: String } end type usage = @@ -145,6 +175,13 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs index 91add94f..65456919 100644 --- a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs +++ b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs @@ -19,6 +19,12 @@ module OpenAI run_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + run_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs index 87d32fa3..70c37909 100644 --- a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs +++ b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs @@ -50,6 +50,19 @@ module OpenAI ?object: :"eval.run.output_item" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + object: :"eval.run.output_item", + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String + } + type sample = { error: OpenAI::Evals::EvalAPIError, @@ -98,6 +111,19 @@ module OpenAI usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage ) -> void + def to_hash: -> { + error: OpenAI::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + } + type input = { content: String, role: String } class Input < OpenAI::Internal::Type::BaseModel @@ -106,6 +132,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type output = { content: String, role: String } @@ -120,6 +148,8 @@ module OpenAI def role=: (String) -> String def initialize: (?content: String, ?role: String) -> void + + def to_hash: -> { content: String, role: String } end type usage = @@ -145,6 +175,13 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index df1a12a0..c36aedab 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 7973f161..2abf1615 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -20,6 +20,12 @@ module OpenAI purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + file: OpenAI::Internal::file_input, + purpose: OpenAI::Models::file_purpose, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index 8cd08d59..3c3ea094 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/file_deleted.rbs b/sig/openai/models/file_deleted.rbs index fd681d7c..c091a995 100644 --- a/sig/openai/models/file_deleted.rbs +++ b/sig/openai/models/file_deleted.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor object: :file def initialize: (id: String, deleted: bool, ?object: :file) -> void + + def to_hash: -> { id: String, deleted: bool, object: :file } end end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 51c07c76..2d459f2d 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -39,6 +39,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::FileListParams::order, + purpose: String, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 3ebcb910..acbdedc2 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -7,8 +7,8 @@ module OpenAI created_at: Integer, filename: String, object: :file, - purpose: OpenAI::FileObject::purpose, - status: OpenAI::FileObject::status, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, expires_at: Integer, status_details: String } @@ -24,9 +24,9 @@ module OpenAI attr_accessor object: :file - attr_accessor purpose: OpenAI::FileObject::purpose + attr_accessor purpose: OpenAI::Models::FileObject::purpose - attr_accessor status: OpenAI::FileObject::status + attr_accessor status: OpenAI::Models::FileObject::status attr_reader expires_at: Integer? @@ -41,13 +41,25 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::FileObject::purpose, - status: OpenAI::FileObject::status, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, ?expires_at: Integer, ?status_details: String, ?object: :file ) -> void + def to_hash: -> { + id: String, + bytes: Integer, + created_at: Integer, + filename: String, + object: :file, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, + expires_at: Integer, + status_details: String + } + type purpose = :assistants | :assistants_output @@ -68,7 +80,7 @@ module OpenAI FINE_TUNE_RESULTS: :"fine-tune-results" VISION: :vision - def self?.values: -> ::Array[OpenAI::FileObject::purpose] + def self?.values: -> ::Array[OpenAI::Models::FileObject::purpose] end type status = :uploaded | :processed | :error @@ -80,7 +92,7 @@ module OpenAI PROCESSED: :processed ERROR: :error - def self?.values: -> ::Array[OpenAI::FileObject::status] + def self?.values: -> ::Array[OpenAI::Models::FileObject::status] end end end diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index 4788e5a6..01eca336 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs index 4c094ab8..0fcec2cc 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs @@ -27,6 +27,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, + model_sample: String, + reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + request_options: OpenAI::RequestOptions + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs index fdc022c0..e54eb087 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs @@ -26,6 +26,13 @@ module OpenAI sub_rewards: ::Hash[Symbol, top] ) -> void + def to_hash: -> { + metadata: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + model_grader_token_usage_per_model: ::Hash[Symbol, top], + reward: Float, + sub_rewards: ::Hash[Symbol, top] + } + type metadata = { errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, @@ -62,6 +69,16 @@ module OpenAI type: String ) -> void + def to_hash: -> { + errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, + execution_time: Float, + name: String, + sampled_model_name: String?, + scores: ::Hash[Symbol, top], + token_usage: Integer?, + type: String + } + type errors = { formula_parse_error: bool, @@ -125,6 +142,23 @@ module OpenAI truncated_observation_error: bool, unresponsive_reward_error: bool ) -> void + + def to_hash: -> { + formula_parse_error: bool, + invalid_variable_error: bool, + model_grader_parse_error: bool, + model_grader_refusal_error: bool, + model_grader_server_error: bool, + model_grader_server_error_details: String?, + other_error: bool, + python_grader_runtime_error: bool, + python_grader_runtime_error_details: String?, + python_grader_server_error: bool, + python_grader_server_error_type: String?, + sample_parse_error: bool, + truncated_observation_error: bool, + unresponsive_reward_error: bool + } end end end diff --git a/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs index 8eec588b..c7e0f385 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs @@ -19,6 +19,11 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader, + request_options: OpenAI::RequestOptions + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader diff --git a/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs b/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs index a2139b77..939e54e2 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs @@ -18,6 +18,10 @@ module OpenAI ?grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs index d8a52d89..b74273b6 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs @@ -16,6 +16,11 @@ module OpenAI project_ids: ::Array[String], ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + project_ids: ::Array[String], + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs index 1bb85f1a..9e54b6cd 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs @@ -25,6 +25,13 @@ module OpenAI project_id: String, ?object: :"checkpoint.permission" ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs index 78bbd44d..2d265756 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs @@ -16,6 +16,11 @@ module OpenAI fine_tuned_model_checkpoint: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + fine_tuned_model_checkpoint: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs index 7cf0427b..c2d55981 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs @@ -17,6 +17,12 @@ module OpenAI deleted: bool, ?object: :"checkpoint.permission" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"checkpoint.permission" + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs index 0b609dbc..a76caaa1 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs @@ -41,6 +41,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, + project_id: String, + request_options: OpenAI::RequestOptions + } + type order = :ascending | :descending module Order diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs index db9a1b9d..54f9630a 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs @@ -30,6 +30,14 @@ module OpenAI ?object: :list ) -> void + def to_hash: -> { + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + object: :list, + first_id: String?, + last_id: String? + } + type data = { id: String, @@ -53,6 +61,13 @@ module OpenAI project_id: String, ?object: :"checkpoint.permission" ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } end end end diff --git a/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs b/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs index 49aa63fd..0abf70d0 100644 --- a/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs +++ b/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs @@ -3,50 +3,57 @@ module OpenAI module FineTuning type dpo_hyperparameters = { - batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size, - beta: OpenAI::FineTuning::DpoHyperparameters::beta, - learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size, + beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta, + learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs } class DpoHyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::DpoHyperparameters::batch_size - ) -> OpenAI::FineTuning::DpoHyperparameters::batch_size + OpenAI::Models::FineTuning::DpoHyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::batch_size - attr_reader beta: OpenAI::FineTuning::DpoHyperparameters::beta? + attr_reader beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta? def beta=: ( - OpenAI::FineTuning::DpoHyperparameters::beta - ) -> OpenAI::FineTuning::DpoHyperparameters::beta + OpenAI::Models::FineTuning::DpoHyperparameters::beta + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::beta - attr_reader learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::DpoHyperparameters::n_epochs - ) -> OpenAI::FineTuning::DpoHyperparameters::n_epochs + OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size, - ?beta: OpenAI::FineTuning::DpoHyperparameters::beta, - ?learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size, + ?beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size, + beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta, + learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::batch_size] end type beta = :auto | Float @@ -54,7 +61,7 @@ module OpenAI module Beta extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::beta] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::beta] end type learning_rate_multiplier = :auto | Float @@ -62,7 +69,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -70,7 +77,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs] end end end diff --git a/sig/openai/models/fine_tuning/dpo_method.rbs b/sig/openai/models/fine_tuning/dpo_method.rbs index 4284a080..094cebbf 100644 --- a/sig/openai/models/fine_tuning/dpo_method.rbs +++ b/sig/openai/models/fine_tuning/dpo_method.rbs @@ -14,6 +14,10 @@ module OpenAI def initialize: ( ?hyperparameters: OpenAI::FineTuning::DpoHyperparameters ) -> void + + def to_hash: -> { + hyperparameters: OpenAI::FineTuning::DpoHyperparameters + } end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 59841d1e..a5368cef 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -16,7 +16,7 @@ module OpenAI organization_id: String, result_files: ::Array[String], seed: Integer, - status: OpenAI::FineTuning::FineTuningJob::status, + status: OpenAI::Models::FineTuning::FineTuningJob::status, trained_tokens: Integer?, training_file: String, validation_file: String?, @@ -49,7 +49,7 @@ module OpenAI attr_accessor seed: Integer - attr_accessor status: OpenAI::FineTuning::FineTuningJob::status + attr_accessor status: OpenAI::Models::FineTuning::FineTuningJob::status attr_accessor trained_tokens: Integer? @@ -80,7 +80,7 @@ module OpenAI organization_id: String, result_files: ::Array[String], seed: Integer, - status: OpenAI::FineTuning::FineTuningJob::status, + status: OpenAI::Models::FineTuning::FineTuningJob::status, trained_tokens: Integer?, training_file: String, validation_file: String?, @@ -91,6 +91,28 @@ module OpenAI ?object: :"fine_tuning.job" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + error: OpenAI::FineTuning::FineTuningJob::Error?, + fine_tuned_model: String?, + finished_at: Integer?, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, + model: String, + object: :"fine_tuning.job", + organization_id: String, + result_files: ::Array[String], + seed: Integer, + status: OpenAI::Models::FineTuning::FineTuningJob::status, + trained_tokens: Integer?, + training_file: String, + validation_file: String?, + estimated_finish: Integer?, + integrations: ::Array[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]?, + metadata: OpenAI::Models::metadata?, + method_: OpenAI::FineTuning::FineTuningJob::Method + } + type error = { code: String, message: String, param: String? } class Error < OpenAI::Internal::Type::BaseModel @@ -105,42 +127,50 @@ module OpenAI message: String, param: String? ) -> void + + def to_hash: -> { code: String, message: String, param: String? } end type hyperparameters = { - batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size?, - learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size?, + learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_accessor batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size? + attr_accessor batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size? - attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size?, - ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size?, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size?, + learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + } + type batch_size = (top | :auto | Integer)? module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -148,7 +178,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -156,7 +186,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs] end end @@ -178,19 +208,19 @@ module OpenAI FAILED: :failed CANCELLED: :cancelled - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::status] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::status] end type method_ = { - type: OpenAI::FineTuning::FineTuningJob::Method::type_, + type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_, dpo: OpenAI::FineTuning::DpoMethod, reinforcement: OpenAI::FineTuning::ReinforcementMethod, supervised: OpenAI::FineTuning::SupervisedMethod } class Method < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::FineTuning::FineTuningJob::Method::type_ + attr_accessor type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ attr_reader dpo: OpenAI::FineTuning::DpoMethod? @@ -211,12 +241,19 @@ module OpenAI ) -> OpenAI::FineTuning::SupervisedMethod def initialize: ( - type: OpenAI::FineTuning::FineTuningJob::Method::type_, + type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_, ?dpo: OpenAI::FineTuning::DpoMethod, ?reinforcement: OpenAI::FineTuning::ReinforcementMethod, ?supervised: OpenAI::FineTuning::SupervisedMethod ) -> void + def to_hash: -> { + type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod + } + type type_ = :supervised | :dpo | :reinforcement module Type @@ -226,7 +263,7 @@ module OpenAI DPO: :dpo REINFORCEMENT: :reinforcement - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index 018e7195..b1c4f9c4 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -7,11 +7,11 @@ module OpenAI { id: String, created_at: Integer, - level: OpenAI::FineTuning::FineTuningJobEvent::level, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, message: String, object: :"fine_tuning.job.event", data: top, - type: OpenAI::FineTuning::FineTuningJobEvent::type_ + type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_ } class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel @@ -19,7 +19,7 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor level: OpenAI::FineTuning::FineTuningJobEvent::level + attr_accessor level: OpenAI::Models::FineTuning::FineTuningJobEvent::level attr_accessor message: String @@ -29,22 +29,32 @@ module OpenAI def data=: (top) -> top - attr_reader type: OpenAI::FineTuning::FineTuningJobEvent::type_? + attr_reader type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_? def type=: ( - OpenAI::FineTuning::FineTuningJobEvent::type_ - ) -> OpenAI::FineTuning::FineTuningJobEvent::type_ + OpenAI::Models::FineTuning::FineTuningJobEvent::type_ + ) -> OpenAI::Models::FineTuning::FineTuningJobEvent::type_ def initialize: ( id: String, created_at: Integer, - level: OpenAI::FineTuning::FineTuningJobEvent::level, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, message: String, ?data: top, - ?type: OpenAI::FineTuning::FineTuningJobEvent::type_, + ?type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, ?object: :"fine_tuning.job.event" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, + message: String, + object: :"fine_tuning.job.event", + data: top, + type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_ + } + type level = :info | :warn | :error module Level @@ -54,7 +64,7 @@ module OpenAI WARN: :warn ERROR: :error - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJobEvent::level] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::level] end type type_ = :message | :metrics @@ -65,7 +75,7 @@ module OpenAI MESSAGE: :message METRICS: :metrics - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJobEvent::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index a1e46c1a..1af84b2b 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -28,6 +28,13 @@ module OpenAI ?name: String?, ?tags: ::Array[String] ) -> void + + def to_hash: -> { + project: String, + entity: String?, + name: String?, + tags: ::Array[String] + } end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index cef52fc6..aeb6caad 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -18,6 +18,11 @@ module OpenAI wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration, ?type: :wandb ) -> void + + def to_hash: -> { + type: :wandb, + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration + } end end end diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index bba98884..2548b37f 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 37e6178d..f1f03e19 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -58,6 +58,19 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + model: OpenAI::Models::FineTuning::JobCreateParams::model, + training_file: String, + hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, + integrations: ::Array[OpenAI::FineTuning::JobCreateParams::Integration]?, + metadata: OpenAI::Models::metadata?, + method_: OpenAI::FineTuning::JobCreateParams::Method, + seed: Integer?, + suffix: String?, + validation_file: String?, + request_options: OpenAI::RequestOptions + } + type model = String | :"babbage-002" @@ -78,42 +91,48 @@ module OpenAI type hyperparameters = { - batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size + OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -121,7 +140,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -129,7 +148,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs] end end @@ -149,6 +168,11 @@ module OpenAI ?type: :wandb ) -> void + def to_hash: -> { + type: :wandb, + wandb: OpenAI::FineTuning::JobCreateParams::Integration::Wandb + } + type wandb = { project: String, @@ -174,19 +198,26 @@ module OpenAI ?name: String?, ?tags: ::Array[String] ) -> void + + def to_hash: -> { + project: String, + entity: String?, + name: String?, + tags: ::Array[String] + } end end type method_ = { - type: OpenAI::FineTuning::JobCreateParams::Method::type_, + type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_, dpo: OpenAI::FineTuning::DpoMethod, reinforcement: OpenAI::FineTuning::ReinforcementMethod, supervised: OpenAI::FineTuning::SupervisedMethod } class Method < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::FineTuning::JobCreateParams::Method::type_ + attr_accessor type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ attr_reader dpo: OpenAI::FineTuning::DpoMethod? @@ -207,12 +238,19 @@ module OpenAI ) -> OpenAI::FineTuning::SupervisedMethod def initialize: ( - type: OpenAI::FineTuning::JobCreateParams::Method::type_, + type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_, ?dpo: OpenAI::FineTuning::DpoMethod, ?reinforcement: OpenAI::FineTuning::ReinforcementMethod, ?supervised: OpenAI::FineTuning::SupervisedMethod ) -> void + def to_hash: -> { + type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod + } + type type_ = :supervised | :dpo | :reinforcement module Type @@ -222,7 +260,7 @@ module OpenAI DPO: :dpo REINFORCEMENT: :reinforcement - def self?.values: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index f3d26bd2..42a77bac 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -22,6 +22,12 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index db301888..d45893f4 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -25,6 +25,13 @@ module OpenAI ?metadata: ::Hash[Symbol, String]?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + metadata: ::Hash[Symbol, String]?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/job_pause_params.rbs b/sig/openai/models/fine_tuning/job_pause_params.rbs index a5ac0d51..3b55e229 100644 --- a/sig/openai/models/fine_tuning/job_pause_params.rbs +++ b/sig/openai/models/fine_tuning/job_pause_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/job_resume_params.rbs b/sig/openai/models/fine_tuning/job_resume_params.rbs index fa50a15b..3a1d7da9 100644 --- a/sig/openai/models/fine_tuning/job_resume_params.rbs +++ b/sig/openai/models/fine_tuning/job_resume_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index ed195b91..1535b93a 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index 2cf57298..28b14de0 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -23,6 +23,12 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs index 51e7737c..15da3658 100644 --- a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs +++ b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs @@ -38,6 +38,16 @@ module OpenAI ?object: :"fine_tuning.job.checkpoint" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + fine_tuned_model_checkpoint: String, + fine_tuning_job_id: String, + metrics: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + object: :"fine_tuning.job.checkpoint", + step_number: Integer + } + type metrics = { full_valid_loss: Float, @@ -87,6 +97,16 @@ module OpenAI ?valid_loss: Float, ?valid_mean_token_accuracy: Float ) -> void + + def to_hash: -> { + full_valid_loss: Float, + full_valid_mean_token_accuracy: Float, + step: Float, + train_loss: Float, + train_mean_token_accuracy: Float, + valid_loss: Float, + valid_mean_token_accuracy: Float + } end end end diff --git a/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs b/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs index 64891385..fdd3cc5b 100644 --- a/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs +++ b/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs @@ -3,74 +3,84 @@ module OpenAI module FineTuning type reinforcement_hyperparameters = { - batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size, - compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier, - eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval, - eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples, - learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs, - reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size, + compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier, + eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval, + eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples, + learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs, + reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort } class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::batch_size - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::batch_size + OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size - attr_reader compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier? + attr_reader compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier? def compute_multiplier=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier + OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier - attr_reader eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval? + attr_reader eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval? def eval_interval=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval + OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval - attr_reader eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples? + attr_reader eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples? def eval_samples=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples + OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples - attr_reader learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs + OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs - attr_reader reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort? + attr_reader reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort? def reasoning_effort=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort def initialize: ( - ?batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size, - ?compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier, - ?eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval, - ?eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples, - ?learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs, - ?reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + ?batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size, + ?compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier, + ?eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval, + ?eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs, + ?reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size, + compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier, + eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval, + eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples, + learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs, + reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size] end type compute_multiplier = :auto | Float @@ -78,7 +88,7 @@ module OpenAI module ComputeMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier] end type eval_interval = :auto | Integer @@ -86,7 +96,7 @@ module OpenAI module EvalInterval extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval] end type eval_samples = :auto | Integer @@ -94,7 +104,7 @@ module OpenAI module EvalSamples extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples] end type learning_rate_multiplier = :auto | Float @@ -102,7 +112,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -110,7 +120,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs] end type reasoning_effort = :default | :low | :medium | :high @@ -123,7 +133,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort] end end end diff --git a/sig/openai/models/fine_tuning/reinforcement_method.rbs b/sig/openai/models/fine_tuning/reinforcement_method.rbs index 4205559a..3f637b95 100644 --- a/sig/openai/models/fine_tuning/reinforcement_method.rbs +++ b/sig/openai/models/fine_tuning/reinforcement_method.rbs @@ -3,12 +3,12 @@ module OpenAI module FineTuning type reinforcement_method = { - grader: OpenAI::FineTuning::ReinforcementMethod::grader, + grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader, hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters } class ReinforcementMethod < OpenAI::Internal::Type::BaseModel - attr_accessor grader: OpenAI::FineTuning::ReinforcementMethod::grader + attr_accessor grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader attr_reader hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters? @@ -17,10 +17,15 @@ module OpenAI ) -> OpenAI::FineTuning::ReinforcementHyperparameters def initialize: ( - grader: OpenAI::FineTuning::ReinforcementMethod::grader, + grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader, ?hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader, + hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader @@ -31,7 +36,7 @@ module OpenAI module Grader extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementMethod::grader] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementMethod::grader] end end end diff --git a/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs b/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs index 7ca1ee8f..ce01622a 100644 --- a/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs +++ b/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs @@ -3,42 +3,48 @@ module OpenAI module FineTuning type supervised_hyperparameters = { - batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size, - learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs } class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::SupervisedHyperparameters::batch_size - ) -> OpenAI::FineTuning::SupervisedHyperparameters::batch_size + OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::SupervisedHyperparameters::n_epochs - ) -> OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -46,7 +52,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -54,7 +60,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs] end end end diff --git a/sig/openai/models/fine_tuning/supervised_method.rbs b/sig/openai/models/fine_tuning/supervised_method.rbs index eccaf9d3..dbebf97e 100644 --- a/sig/openai/models/fine_tuning/supervised_method.rbs +++ b/sig/openai/models/fine_tuning/supervised_method.rbs @@ -14,6 +14,10 @@ module OpenAI def initialize: ( ?hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters ) -> void + + def to_hash: -> { + hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters + } end end end diff --git a/sig/openai/models/function_definition.rbs b/sig/openai/models/function_definition.rbs index 40bf94b9..7a9696f3 100644 --- a/sig/openai/models/function_definition.rbs +++ b/sig/openai/models/function_definition.rbs @@ -29,6 +29,13 @@ module OpenAI ?parameters: OpenAI::Models::function_parameters, ?strict: bool? ) -> void + + def to_hash: -> { + name: String, + description: String, + parameters: OpenAI::Models::function_parameters, + strict: bool? + } end end end diff --git a/sig/openai/models/graders/label_model_grader.rbs b/sig/openai/models/graders/label_model_grader.rbs index 93d8ef92..e6970199 100644 --- a/sig/openai/models/graders/label_model_grader.rbs +++ b/sig/openai/models/graders/label_model_grader.rbs @@ -35,30 +35,45 @@ module OpenAI ?type: :label_model ) -> void + def to_hash: -> { + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + type input = { - content: OpenAI::Graders::LabelModelGrader::Input::content, - role: OpenAI::Graders::LabelModelGrader::Input::role, - type: OpenAI::Graders::LabelModelGrader::Input::type_ + content: OpenAI::Models::Graders::LabelModelGrader::Input::content, + role: OpenAI::Models::Graders::LabelModelGrader::Input::role, + type: OpenAI::Models::Graders::LabelModelGrader::Input::type_ } class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Graders::LabelModelGrader::Input::content + attr_accessor content: OpenAI::Models::Graders::LabelModelGrader::Input::content - attr_accessor role: OpenAI::Graders::LabelModelGrader::Input::role + attr_accessor role: OpenAI::Models::Graders::LabelModelGrader::Input::role - attr_reader type: OpenAI::Graders::LabelModelGrader::Input::type_? + attr_reader type: OpenAI::Models::Graders::LabelModelGrader::Input::type_? def type=: ( - OpenAI::Graders::LabelModelGrader::Input::type_ - ) -> OpenAI::Graders::LabelModelGrader::Input::type_ + OpenAI::Models::Graders::LabelModelGrader::Input::type_ + ) -> OpenAI::Models::Graders::LabelModelGrader::Input::type_ def initialize: ( - content: OpenAI::Graders::LabelModelGrader::Input::content, - role: OpenAI::Graders::LabelModelGrader::Input::role, - ?type: OpenAI::Graders::LabelModelGrader::Input::type_ + content: OpenAI::Models::Graders::LabelModelGrader::Input::content, + role: OpenAI::Models::Graders::LabelModelGrader::Input::role, + ?type: OpenAI::Models::Graders::LabelModelGrader::Input::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Graders::LabelModelGrader::Input::content, + role: OpenAI::Models::Graders::LabelModelGrader::Input::role, + type: OpenAI::Models::Graders::LabelModelGrader::Input::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -75,9 +90,11 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } end - def self?.variants: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::content] + def self?.variants: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::content] end type role = :user | :assistant | :system | :developer @@ -90,7 +107,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::role] + def self?.values: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::role] end type type_ = :message @@ -100,7 +117,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Graders::LabelModelGrader::Input::type_] + def self?.values: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::type_] end end end diff --git a/sig/openai/models/graders/multi_grader.rbs b/sig/openai/models/graders/multi_grader.rbs index 295e5177..206d8144 100644 --- a/sig/openai/models/graders/multi_grader.rbs +++ b/sig/openai/models/graders/multi_grader.rbs @@ -6,7 +6,7 @@ module OpenAI type multi_grader = { calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader], + graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], name: String, type: :multi } @@ -14,7 +14,7 @@ module OpenAI class MultiGrader < OpenAI::Internal::Type::BaseModel attr_accessor calculate_output: String - attr_accessor graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader] + attr_accessor graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader] attr_accessor name: String @@ -22,11 +22,18 @@ module OpenAI def initialize: ( calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader], + graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], name: String, ?type: :multi ) -> void + def to_hash: -> { + calculate_output: String, + graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], + name: String, + type: :multi + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader @@ -37,7 +44,7 @@ module OpenAI module Grader extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Graders::MultiGrader::grader] + def self?.variants: -> ::Array[OpenAI::Models::Graders::MultiGrader::grader] end end end diff --git a/sig/openai/models/graders/python_grader.rbs b/sig/openai/models/graders/python_grader.rbs index e23c7c85..fdd1b7ac 100644 --- a/sig/openai/models/graders/python_grader.rbs +++ b/sig/openai/models/graders/python_grader.rbs @@ -23,6 +23,13 @@ module OpenAI ?image_tag: String, ?type: :python ) -> void + + def to_hash: -> { + name: String, + source: String, + type: :python, + image_tag: String + } end end end diff --git a/sig/openai/models/graders/score_model_grader.rbs b/sig/openai/models/graders/score_model_grader.rbs index 74b9785e..caf20048 100644 --- a/sig/openai/models/graders/score_model_grader.rbs +++ b/sig/openai/models/graders/score_model_grader.rbs @@ -39,30 +39,45 @@ module OpenAI ?type: :score_model ) -> void + def to_hash: -> { + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], + model: String, + name: String, + type: :score_model, + range: ::Array[Float], + sampling_params: top + } + type input = { - content: OpenAI::Graders::ScoreModelGrader::Input::content, - role: OpenAI::Graders::ScoreModelGrader::Input::role, - type: OpenAI::Graders::ScoreModelGrader::Input::type_ + content: OpenAI::Models::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Models::Graders::ScoreModelGrader::Input::role, + type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_ } class Input < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Graders::ScoreModelGrader::Input::content + attr_accessor content: OpenAI::Models::Graders::ScoreModelGrader::Input::content - attr_accessor role: OpenAI::Graders::ScoreModelGrader::Input::role + attr_accessor role: OpenAI::Models::Graders::ScoreModelGrader::Input::role - attr_reader type: OpenAI::Graders::ScoreModelGrader::Input::type_? + attr_reader type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_? def type=: ( - OpenAI::Graders::ScoreModelGrader::Input::type_ - ) -> OpenAI::Graders::ScoreModelGrader::Input::type_ + OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + ) -> OpenAI::Models::Graders::ScoreModelGrader::Input::type_ def initialize: ( - content: OpenAI::Graders::ScoreModelGrader::Input::content, - role: OpenAI::Graders::ScoreModelGrader::Input::role, - ?type: OpenAI::Graders::ScoreModelGrader::Input::type_ + content: OpenAI::Models::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Models::Graders::ScoreModelGrader::Input::role, + ?type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Models::Graders::ScoreModelGrader::Input::role, + type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + } + type content = String | OpenAI::Responses::ResponseInputText @@ -79,9 +94,11 @@ module OpenAI attr_accessor type: :output_text def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } end - def self?.variants: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::content] + def self?.variants: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::content] end type role = :user | :assistant | :system | :developer @@ -94,7 +111,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::role] + def self?.values: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::role] end type type_ = :message @@ -104,7 +121,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Graders::ScoreModelGrader::Input::type_] + def self?.values: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::type_] end end end diff --git a/sig/openai/models/graders/string_check_grader.rbs b/sig/openai/models/graders/string_check_grader.rbs index 5f60ffb0..039498de 100644 --- a/sig/openai/models/graders/string_check_grader.rbs +++ b/sig/openai/models/graders/string_check_grader.rbs @@ -7,7 +7,7 @@ module OpenAI { input: String, name: String, - operation: OpenAI::Graders::StringCheckGrader::operation, + operation: OpenAI::Models::Graders::StringCheckGrader::operation, reference: String, type: :string_check } @@ -17,7 +17,7 @@ module OpenAI attr_accessor name: String - attr_accessor operation: OpenAI::Graders::StringCheckGrader::operation + attr_accessor operation: OpenAI::Models::Graders::StringCheckGrader::operation attr_accessor reference: String @@ -26,11 +26,19 @@ module OpenAI def initialize: ( input: String, name: String, - operation: OpenAI::Graders::StringCheckGrader::operation, + operation: OpenAI::Models::Graders::StringCheckGrader::operation, reference: String, ?type: :string_check ) -> void + def to_hash: -> { + input: String, + name: String, + operation: OpenAI::Models::Graders::StringCheckGrader::operation, + reference: String, + type: :string_check + } + type operation = :eq | :ne | :like | :ilike module Operation @@ -41,7 +49,7 @@ module OpenAI LIKE: :like ILIKE: :ilike - def self?.values: -> ::Array[OpenAI::Graders::StringCheckGrader::operation] + def self?.values: -> ::Array[OpenAI::Models::Graders::StringCheckGrader::operation] end end end diff --git a/sig/openai/models/graders/text_similarity_grader.rbs b/sig/openai/models/graders/text_similarity_grader.rbs index 24453b12..9002b540 100644 --- a/sig/openai/models/graders/text_similarity_grader.rbs +++ b/sig/openai/models/graders/text_similarity_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type text_similarity_grader = { - evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric, + evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric, input: String, name: String, reference: String, @@ -13,7 +13,7 @@ module OpenAI } class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel - attr_accessor evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric + attr_accessor evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric attr_accessor input: String @@ -24,13 +24,21 @@ module OpenAI attr_accessor type: :text_similarity def initialize: ( - evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric, + evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric, input: String, name: String, reference: String, ?type: :text_similarity ) -> void + def to_hash: -> { + evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric, + input: String, + name: String, + reference: String, + type: :text_similarity + } + type evaluation_metric = :fuzzy_match | :bleu @@ -57,7 +65,7 @@ module OpenAI ROUGE_5: :rouge_5 ROUGE_L: :rouge_l - def self?.values: -> ::Array[OpenAI::Graders::TextSimilarityGrader::evaluation_metric] + def self?.values: -> ::Array[OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric] end end end diff --git a/sig/openai/models/image.rbs b/sig/openai/models/image.rbs index 96a4fccc..fd7554b0 100644 --- a/sig/openai/models/image.rbs +++ b/sig/openai/models/image.rbs @@ -20,6 +20,12 @@ module OpenAI ?revised_prompt: String, ?url: String ) -> void + + def to_hash: -> { + :b64_json => String, + revised_prompt: String, + url: String + } end end end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index 8d1342cf..47601c14 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -39,6 +39,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + image: OpenAI::Internal::file_input, + model: OpenAI::Models::ImageCreateVariationParams::model?, + n: Integer?, + response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, + size: OpenAI::Models::ImageCreateVariationParams::size?, + user: String, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::image_model module Model diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index ef128fa7..0fe6ec3d 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -57,6 +57,20 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + image: OpenAI::Models::ImageEditParams::image, + prompt: String, + background: OpenAI::Models::ImageEditParams::background?, + mask: OpenAI::Internal::file_input, + model: OpenAI::Models::ImageEditParams::model?, + n: Integer?, + quality: OpenAI::Models::ImageEditParams::quality?, + response_format: OpenAI::Models::ImageEditParams::response_format?, + size: OpenAI::Models::ImageEditParams::size?, + user: String, + request_options: OpenAI::RequestOptions + } + type image = OpenAI::Internal::file_input | ::Array[OpenAI::Internal::file_input] diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index d6367505..e9b0e2e0 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -63,6 +63,22 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + prompt: String, + background: OpenAI::Models::ImageGenerateParams::background?, + model: OpenAI::Models::ImageGenerateParams::model?, + moderation: OpenAI::Models::ImageGenerateParams::moderation?, + n: Integer?, + output_compression: Integer?, + output_format: OpenAI::Models::ImageGenerateParams::output_format?, + quality: OpenAI::Models::ImageGenerateParams::quality?, + response_format: OpenAI::Models::ImageGenerateParams::response_format?, + size: OpenAI::Models::ImageGenerateParams::size?, + style: OpenAI::Models::ImageGenerateParams::style?, + user: String, + request_options: OpenAI::RequestOptions + } + type background = :transparent | :opaque | :auto module Background diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index da6579dc..fd899bb5 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -26,6 +26,12 @@ module OpenAI ?usage: OpenAI::ImagesResponse::Usage ) -> void + def to_hash: -> { + created: Integer, + data: ::Array[OpenAI::Image], + usage: OpenAI::ImagesResponse::Usage + } + type usage = { input_tokens: Integer, @@ -50,6 +56,13 @@ module OpenAI total_tokens: Integer ) -> void + def to_hash: -> { + input_tokens: Integer, + input_tokens_details: OpenAI::ImagesResponse::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + type input_tokens_details = { image_tokens: Integer, text_tokens: Integer } @@ -59,6 +72,8 @@ module OpenAI attr_accessor text_tokens: Integer def initialize: (image_tokens: Integer, text_tokens: Integer) -> void + + def to_hash: -> { image_tokens: Integer, text_tokens: Integer } end end end diff --git a/sig/openai/models/model.rbs b/sig/openai/models/model.rbs index 79402173..f6d604c9 100644 --- a/sig/openai/models/model.rbs +++ b/sig/openai/models/model.rbs @@ -18,6 +18,13 @@ module OpenAI owned_by: String, ?object: :model ) -> void + + def to_hash: -> { + id: String, + created: Integer, + object: :model, + owned_by: String + } end end end diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index 16285da7..68406d8c 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/model_deleted.rbs b/sig/openai/models/model_deleted.rbs index b95b2a67..2676f627 100644 --- a/sig/openai/models/model_deleted.rbs +++ b/sig/openai/models/model_deleted.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor object: String def initialize: (id: String, deleted: bool, object: String) -> void + + def to_hash: -> { id: String, deleted: bool, object: String } end end end diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 37d678e8..915bdcf6 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index fed08da1..c764c41a 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index 7d543126..379bbdc3 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -24,6 +24,13 @@ module OpenAI flagged: bool ) -> void + def to_hash: -> { + categories: OpenAI::Moderation::Categories, + category_applied_input_types: OpenAI::Moderation::CategoryAppliedInputTypes, + category_scores: OpenAI::Moderation::CategoryScores, + flagged: bool + } + type categories = { harassment: bool, @@ -83,68 +90,100 @@ module OpenAI violence: bool, violence_graphic: bool ) -> void + + def to_hash: -> { + harassment: bool, + harassment_threatening: bool, + hate: bool, + hate_threatening: bool, + illicit: bool?, + illicit_violent: bool?, + self_harm: bool, + self_harm_instructions: bool, + self_harm_intent: bool, + sexual: bool, + sexual_minors: bool, + violence: bool, + violence_graphic: bool + } end type category_applied_input_types = { - harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] } class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel - attr_accessor harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment] + attr_accessor harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] - attr_accessor harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening] + attr_accessor harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] - attr_accessor hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate] + attr_accessor hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] - attr_accessor hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening] + attr_accessor hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] - attr_accessor illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit] + attr_accessor illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] - attr_accessor illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent] + attr_accessor illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] - attr_accessor self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm] + attr_accessor self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] - attr_accessor self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + attr_accessor self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] - attr_accessor self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent] + attr_accessor self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] - attr_accessor sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual] + attr_accessor sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] - attr_accessor sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor] + attr_accessor sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] - attr_accessor violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence] + attr_accessor violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] - attr_accessor violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + attr_accessor violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] def initialize: ( - harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] ) -> void + def to_hash: -> { + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + } + type harassment = :text module Harassment @@ -152,7 +191,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] end type harassment_threatening = :text @@ -162,7 +201,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] end type hate = :text @@ -172,7 +211,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] end type hate_threatening = :text @@ -182,7 +221,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] end type illicit = :text @@ -192,7 +231,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] end type illicit_violent = :text @@ -202,7 +241,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] end type self_harm = :text | :image @@ -213,7 +252,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] end type self_harm_instruction = :text | :image @@ -224,7 +263,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] end type self_harm_intent = :text | :image @@ -235,7 +274,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] end type sexual = :text | :image @@ -246,7 +285,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] end type sexual_minor = :text @@ -256,7 +295,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] end type violence = :text | :image @@ -267,7 +306,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] end type violence_graphic = :text | :image @@ -278,7 +317,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] end end @@ -341,6 +380,22 @@ module OpenAI violence: Float, violence_graphic: Float ) -> void + + def to_hash: -> { + harassment: Float, + harassment_threatening: Float, + hate: Float, + hate_threatening: Float, + illicit: Float, + illicit_violent: Float, + self_harm: Float, + self_harm_instructions: Float, + self_harm_intent: Float, + sexual: Float, + sexual_minors: Float, + violence: Float, + violence_graphic: Float + } end end end diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index f681a3bb..832d4052 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -25,6 +25,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: OpenAI::Models::ModerationCreateParams::input, + model: OpenAI::Models::ModerationCreateParams::model, + request_options: OpenAI::RequestOptions + } + type input = String | ::Array[String] diff --git a/sig/openai/models/moderation_create_response.rbs b/sig/openai/models/moderation_create_response.rbs index 616ad949..6f9757a1 100644 --- a/sig/openai/models/moderation_create_response.rbs +++ b/sig/openai/models/moderation_create_response.rbs @@ -15,6 +15,12 @@ module OpenAI model: String, results: ::Array[OpenAI::Moderation] ) -> void + + def to_hash: -> { + id: String, + model: String, + results: ::Array[OpenAI::Moderation] + } end end end diff --git a/sig/openai/models/moderation_image_url_input.rbs b/sig/openai/models/moderation_image_url_input.rbs index e099ab00..d373c517 100644 --- a/sig/openai/models/moderation_image_url_input.rbs +++ b/sig/openai/models/moderation_image_url_input.rbs @@ -13,12 +13,19 @@ module OpenAI ?type: :image_url ) -> void + def to_hash: -> { + image_url: OpenAI::ModerationImageURLInput::ImageURL, + type: :image_url + } + type image_url = { url: String } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String def initialize: (url: String) -> void + + def to_hash: -> { url: String } end end end diff --git a/sig/openai/models/moderation_text_input.rbs b/sig/openai/models/moderation_text_input.rbs index 6f258e86..fea26990 100644 --- a/sig/openai/models/moderation_text_input.rbs +++ b/sig/openai/models/moderation_text_input.rbs @@ -8,6 +8,8 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } end end end diff --git a/sig/openai/models/other_file_chunking_strategy_object.rbs b/sig/openai/models/other_file_chunking_strategy_object.rbs index 90cc48b4..10298f79 100644 --- a/sig/openai/models/other_file_chunking_strategy_object.rbs +++ b/sig/openai/models/other_file_chunking_strategy_object.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :other def initialize: (?type: :other) -> void + + def to_hash: -> { type: :other } end end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index 9cfe03fc..7a7e744d 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -3,23 +3,29 @@ module OpenAI type reasoning = { effort: OpenAI::Models::reasoning_effort?, - generate_summary: OpenAI::Reasoning::generate_summary?, - summary: OpenAI::Reasoning::summary? + generate_summary: OpenAI::Models::Reasoning::generate_summary?, + summary: OpenAI::Models::Reasoning::summary? } class Reasoning < OpenAI::Internal::Type::BaseModel attr_accessor effort: OpenAI::Models::reasoning_effort? - attr_accessor generate_summary: OpenAI::Reasoning::generate_summary? + attr_accessor generate_summary: OpenAI::Models::Reasoning::generate_summary? - attr_accessor summary: OpenAI::Reasoning::summary? + attr_accessor summary: OpenAI::Models::Reasoning::summary? def initialize: ( ?effort: OpenAI::Models::reasoning_effort?, - ?generate_summary: OpenAI::Reasoning::generate_summary?, - ?summary: OpenAI::Reasoning::summary? + ?generate_summary: OpenAI::Models::Reasoning::generate_summary?, + ?summary: OpenAI::Models::Reasoning::summary? ) -> void + def to_hash: -> { + effort: OpenAI::Models::reasoning_effort?, + generate_summary: OpenAI::Models::Reasoning::generate_summary?, + summary: OpenAI::Models::Reasoning::summary? + } + type generate_summary = :auto | :concise | :detailed module GenerateSummary @@ -29,7 +35,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self?.values: -> ::Array[OpenAI::Reasoning::generate_summary] + def self?.values: -> ::Array[OpenAI::Models::Reasoning::generate_summary] end type summary = :auto | :concise | :detailed @@ -41,7 +47,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self?.values: -> ::Array[OpenAI::Reasoning::summary] + def self?.values: -> ::Array[OpenAI::Models::Reasoning::summary] end end end diff --git a/sig/openai/models/response_format_json_object.rbs b/sig/openai/models/response_format_json_object.rbs index b34ea82e..f8141178 100644 --- a/sig/openai/models/response_format_json_object.rbs +++ b/sig/openai/models/response_format_json_object.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :json_object def initialize: (?type: :json_object) -> void + + def to_hash: -> { type: :json_object } end end end diff --git a/sig/openai/models/response_format_json_schema.rbs b/sig/openai/models/response_format_json_schema.rbs index 6f200e90..e806b510 100644 --- a/sig/openai/models/response_format_json_schema.rbs +++ b/sig/openai/models/response_format_json_schema.rbs @@ -16,6 +16,11 @@ module OpenAI ?type: :json_schema ) -> void + def to_hash: -> { + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema, + type: :json_schema + } + type json_schema = { name: String, @@ -43,6 +48,13 @@ module OpenAI ?schema: ::Hash[Symbol, top], ?strict: bool? ) -> void + + def to_hash: -> { + name: String, + description: String, + schema: ::Hash[Symbol, top], + strict: bool? + } end end end diff --git a/sig/openai/models/response_format_text.rbs b/sig/openai/models/response_format_text.rbs index b245c218..305c9a4d 100644 --- a/sig/openai/models/response_format_text.rbs +++ b/sig/openai/models/response_format_text.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :text def initialize: (?type: :text) -> void + + def to_hash: -> { type: :text } end end end diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 23544e1a..a2e42564 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -5,7 +5,7 @@ module OpenAI { display_height: Integer, display_width: Integer, - environment: OpenAI::Responses::ComputerTool::environment, + environment: OpenAI::Models::Responses::ComputerTool::environment, type: :computer_use_preview } @@ -14,17 +14,24 @@ module OpenAI attr_accessor display_width: Integer - attr_accessor environment: OpenAI::Responses::ComputerTool::environment + attr_accessor environment: OpenAI::Models::Responses::ComputerTool::environment attr_accessor type: :computer_use_preview def initialize: ( display_height: Integer, display_width: Integer, - environment: OpenAI::Responses::ComputerTool::environment, + environment: OpenAI::Models::Responses::ComputerTool::environment, ?type: :computer_use_preview ) -> void + def to_hash: -> { + display_height: Integer, + display_width: Integer, + environment: OpenAI::Models::Responses::ComputerTool::environment, + type: :computer_use_preview + } + type environment = :windows | :mac | :linux | :ubuntu | :browser module Environment @@ -36,7 +43,7 @@ module OpenAI UBUNTU: :ubuntu BROWSER: :browser - def self?.values: -> ::Array[OpenAI::Responses::ComputerTool::environment] + def self?.values: -> ::Array[OpenAI::Models::Responses::ComputerTool::environment] end end end diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 0b52f94f..eb94d8c1 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -3,28 +3,34 @@ module OpenAI module Responses type easy_input_message = { - content: OpenAI::Responses::EasyInputMessage::content, - role: OpenAI::Responses::EasyInputMessage::role, - type: OpenAI::Responses::EasyInputMessage::type_ + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + type: OpenAI::Models::Responses::EasyInputMessage::type_ } class EasyInputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Responses::EasyInputMessage::content + attr_accessor content: OpenAI::Models::Responses::EasyInputMessage::content - attr_accessor role: OpenAI::Responses::EasyInputMessage::role + attr_accessor role: OpenAI::Models::Responses::EasyInputMessage::role - attr_reader type: OpenAI::Responses::EasyInputMessage::type_? + attr_reader type: OpenAI::Models::Responses::EasyInputMessage::type_? def type=: ( - OpenAI::Responses::EasyInputMessage::type_ - ) -> OpenAI::Responses::EasyInputMessage::type_ + OpenAI::Models::Responses::EasyInputMessage::type_ + ) -> OpenAI::Models::Responses::EasyInputMessage::type_ def initialize: ( - content: OpenAI::Responses::EasyInputMessage::content, - role: OpenAI::Responses::EasyInputMessage::role, - ?type: OpenAI::Responses::EasyInputMessage::type_ + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + ?type: OpenAI::Models::Responses::EasyInputMessage::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + type: OpenAI::Models::Responses::EasyInputMessage::type_ + } + type content = String | OpenAI::Models::Responses::response_input_message_content_list @@ -32,7 +38,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::EasyInputMessage::content] + def self?.variants: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::content] end type role = :user | :assistant | :system | :developer @@ -45,7 +51,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Responses::EasyInputMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::role] end type type_ = :message @@ -55,7 +61,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Responses::EasyInputMessage::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] end end end diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index da6a8189..cc1a7d01 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -5,7 +5,7 @@ module OpenAI { type: :file_search, vector_store_ids: ::Array[String], - filters: OpenAI::Responses::FileSearchTool::filters?, + filters: OpenAI::Models::Responses::FileSearchTool::filters?, max_num_results: Integer, ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions } @@ -15,7 +15,7 @@ module OpenAI attr_accessor vector_store_ids: ::Array[String] - attr_accessor filters: OpenAI::Responses::FileSearchTool::filters? + attr_accessor filters: OpenAI::Models::Responses::FileSearchTool::filters? attr_reader max_num_results: Integer? @@ -29,42 +29,55 @@ module OpenAI def initialize: ( vector_store_ids: ::Array[String], - ?filters: OpenAI::Responses::FileSearchTool::filters?, + ?filters: OpenAI::Models::Responses::FileSearchTool::filters?, ?max_num_results: Integer, ?ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions, ?type: :file_search ) -> void + def to_hash: -> { + type: :file_search, + vector_store_ids: ::Array[String], + filters: OpenAI::Models::Responses::FileSearchTool::filters?, + max_num_results: Integer, + ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions + } + type filters = OpenAI::ComparisonFilter | OpenAI::CompoundFilter module Filters extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::FileSearchTool::filters] + def self?.variants: -> ::Array[OpenAI::Models::Responses::FileSearchTool::filters] end type ranking_options = { - ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker, + ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_reader ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker? + attr_reader ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker? def ranker=: ( - OpenAI::Responses::FileSearchTool::RankingOptions::ranker - ) -> OpenAI::Responses::FileSearchTool::RankingOptions::ranker + OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker + ) -> OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker attr_reader score_threshold: Float? def score_threshold=: (Float) -> Float def initialize: ( - ?ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker, + ?ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, ?score_threshold: Float ) -> void + def to_hash: -> { + ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, + score_threshold: Float + } + type ranker = :auto | :"default-2024-11-15" module Ranker @@ -73,7 +86,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self?.values: -> ::Array[OpenAI::Responses::FileSearchTool::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker] end end end diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index eb7df928..df249b37 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -28,6 +28,14 @@ module OpenAI ?description: String?, ?type: :function ) -> void + + def to_hash: -> { + name: String, + parameters: ::Hash[Symbol, top]?, + strict: bool?, + type: :function, + description: String? + } end end end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 0897b4e9..4760e97a 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -48,6 +48,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + include: ::Array[OpenAI::Models::Responses::response_includable], + limit: Integer, + order: OpenAI::Models::Responses::InputItemListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 4b767068..d5658b8f 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -14,17 +14,17 @@ module OpenAI output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, - tool_choice: OpenAI::Responses::Response::tool_choice, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, background: bool?, max_output_tokens: Integer?, previous_response_id: String?, reasoning: OpenAI::Reasoning?, - service_tier: OpenAI::Responses::Response::service_tier?, + service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Responses::ResponseTextConfig, - truncation: OpenAI::Responses::Response::truncation?, + truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, user: String } @@ -52,7 +52,7 @@ module OpenAI attr_accessor temperature: Float? - attr_accessor tool_choice: OpenAI::Responses::Response::tool_choice + attr_accessor tool_choice: OpenAI::Models::Responses::Response::tool_choice attr_accessor tools: ::Array[OpenAI::Models::Responses::tool] @@ -66,7 +66,7 @@ module OpenAI attr_accessor reasoning: OpenAI::Reasoning? - attr_accessor service_tier: OpenAI::Responses::Response::service_tier? + attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier? attr_reader status: OpenAI::Models::Responses::response_status? @@ -80,7 +80,7 @@ module OpenAI OpenAI::Responses::ResponseTextConfig ) -> OpenAI::Responses::ResponseTextConfig - attr_accessor truncation: OpenAI::Responses::Response::truncation? + attr_accessor truncation: OpenAI::Models::Responses::Response::truncation? attr_reader usage: OpenAI::Responses::ResponseUsage? @@ -103,36 +103,69 @@ module OpenAI output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, - tool_choice: OpenAI::Responses::Response::tool_choice, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, ?background: bool?, ?max_output_tokens: Integer?, ?previous_response_id: String?, ?reasoning: OpenAI::Reasoning?, - ?service_tier: OpenAI::Responses::Response::service_tier?, + ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, ?text: OpenAI::Responses::ResponseTextConfig, - ?truncation: OpenAI::Responses::Response::truncation?, + ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, ?user: String, ?object: :response ) -> void + def to_hash: -> { + id: String, + created_at: Float, + error: OpenAI::Responses::ResponseError?, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::responses_model, + object: :response, + output: ::Array[OpenAI::Models::Responses::response_output_item], + parallel_tool_calls: bool, + temperature: Float?, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, + tools: ::Array[OpenAI::Models::Responses::tool], + top_p: Float?, + background: bool?, + max_output_tokens: Integer?, + previous_response_id: String?, + reasoning: OpenAI::Reasoning?, + service_tier: OpenAI::Models::Responses::Response::service_tier?, + status: OpenAI::Models::Responses::response_status, + text: OpenAI::Responses::ResponseTextConfig, + truncation: OpenAI::Models::Responses::Response::truncation?, + usage: OpenAI::Responses::ResponseUsage, + user: String + } + type incomplete_details = - { reason: OpenAI::Responses::Response::IncompleteDetails::reason } + { + reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_reader reason: OpenAI::Responses::Response::IncompleteDetails::reason? + attr_reader reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason? def reason=: ( - OpenAI::Responses::Response::IncompleteDetails::reason - ) -> OpenAI::Responses::Response::IncompleteDetails::reason + OpenAI::Models::Responses::Response::IncompleteDetails::reason + ) -> OpenAI::Models::Responses::Response::IncompleteDetails::reason def initialize: ( - ?reason: OpenAI::Responses::Response::IncompleteDetails::reason + ?reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason ) -> void + def to_hash: -> { + reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + } + type reason = :max_output_tokens | :content_filter module Reason @@ -141,7 +174,7 @@ module OpenAI MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter - def self?.values: -> ::Array[OpenAI::Responses::Response::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::IncompleteDetails::reason] end end @@ -153,7 +186,7 @@ module OpenAI module ToolChoice extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::Response::tool_choice] + def self?.variants: -> ::Array[OpenAI::Models::Responses::Response::tool_choice] end type service_tier = :auto | :default | :flex @@ -165,7 +198,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Responses::Response::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] end type truncation = :auto | :disabled @@ -176,7 +209,7 @@ module OpenAI AUTO: :auto DISABLED: :disabled - def self?.values: -> ::Array[OpenAI::Responses::Response::truncation] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] end end end diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index efa89936..f0bcab36 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.audio.delta" ) -> void + + def to_hash: -> { + delta: String, + sequence_number: Integer, + type: :"response.audio.delta" + } end end end diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index 405a0893..1c891acf 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -13,6 +13,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.audio.done" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.audio.done" + } end end end diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index dd04d32a..57c45293 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.audio.transcript.delta" ) -> void + + def to_hash: -> { + delta: String, + sequence_number: Integer, + type: :"response.audio.transcript.delta" + } end end end diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index c4fe0f1e..7bd59ad5 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -13,6 +13,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.audio.transcript.done" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.audio.transcript.done" + } end end end diff --git a/sig/openai/models/responses/response_cancel_params.rbs b/sig/openai/models/responses/response_cancel_params.rbs index 75dd01ea..6f0df4a4 100644 --- a/sig/openai/models/responses/response_cancel_params.rbs +++ b/sig/openai/models/responses/response_cancel_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index 6d41f4d7..2da40939 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.code_interpreter_call.code.delta" ) -> void + + def to_hash: -> { + delta: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.code.delta" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index 7fb5a59a..5f796490 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.code_interpreter_call.code.done" ) -> void + + def to_hash: -> { + code: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.code.done" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index ef439638..d7833fb2 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.code_interpreter_call.completed" ) -> void + + def to_hash: -> { + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index 782ccb19..5efe8048 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.code_interpreter_call.in_progress" ) -> void + + def to_hash: -> { + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index 572ce437..ccdd6dae 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.code_interpreter_call.interpreting" ) -> void + + def to_hash: -> { + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.interpreting" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index e1a774bd..cfca1f50 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -5,8 +5,8 @@ module OpenAI { id: String, code: String, - results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, type: :code_interpreter_call, container_id: String } @@ -16,9 +16,9 @@ module OpenAI attr_accessor code: String - attr_accessor results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result] + attr_accessor results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] - attr_accessor status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status attr_accessor type: :code_interpreter_call @@ -29,12 +29,21 @@ module OpenAI def initialize: ( id: String, code: String, - results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, ?container_id: String, ?type: :code_interpreter_call ) -> void + def to_hash: -> { + id: String, + code: String, + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + type: :code_interpreter_call, + container_id: String + } + type result = OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs | OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files @@ -50,6 +59,8 @@ module OpenAI attr_accessor type: :logs def initialize: (logs: String, ?type: :logs) -> void + + def to_hash: -> { logs: String, type: :logs } end type files = @@ -68,6 +79,11 @@ module OpenAI ?type: :files ) -> void + def to_hash: -> { + files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + type: :files + } + type file = { file_id: String, mime_type: String } class File < OpenAI::Internal::Type::BaseModel @@ -76,10 +92,12 @@ module OpenAI attr_accessor mime_type: String def initialize: (file_id: String, mime_type: String) -> void + + def to_hash: -> { file_id: String, mime_type: String } end end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] end type status = :in_progress | :interpreting | :completed @@ -91,7 +109,7 @@ module OpenAI INTERPRETING: :interpreting COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status] end end end diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 0d4cbe1d..e3c62ea6 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.completed" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.completed" + } end end end diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 52c82604..094df5fd 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -4,35 +4,44 @@ module OpenAI type response_computer_tool_call = { id: String, - action: OpenAI::Responses::ResponseComputerToolCall::action, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, call_id: String, pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Responses::ResponseComputerToolCall::status, - type: OpenAI::Responses::ResponseComputerToolCall::type_ + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ } class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor action: OpenAI::Responses::ResponseComputerToolCall::action + attr_accessor action: OpenAI::Models::Responses::ResponseComputerToolCall::action attr_accessor call_id: String attr_accessor pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] - attr_accessor status: OpenAI::Responses::ResponseComputerToolCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseComputerToolCall::status - attr_accessor type: OpenAI::Responses::ResponseComputerToolCall::type_ + attr_accessor type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ def initialize: ( id: String, - action: OpenAI::Responses::ResponseComputerToolCall::action, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, call_id: String, pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Responses::ResponseComputerToolCall::status, - type: OpenAI::Responses::ResponseComputerToolCall::type_ + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ ) -> void + def to_hash: -> { + id: String, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, + call_id: String, + pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ + } + type action = OpenAI::Responses::ResponseComputerToolCall::Action::Click | OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick @@ -49,14 +58,14 @@ module OpenAI type click = { - button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button, + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, type: :click, x: Integer, y_: Integer } class Click < OpenAI::Internal::Type::BaseModel - attr_accessor button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button + attr_accessor button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button attr_accessor type: :click @@ -65,12 +74,19 @@ module OpenAI attr_accessor y_: Integer def initialize: ( - button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button, + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, x: Integer, y_: Integer, ?type: :click ) -> void + def to_hash: -> { + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, + type: :click, + x: Integer, + y_: Integer + } + type button = :left | :right | :wheel | :back | :forward module Button @@ -82,7 +98,7 @@ module OpenAI BACK: :back FORWARD: :forward - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Click::button] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button] end end @@ -100,6 +116,8 @@ module OpenAI y_: Integer, ?type: :double_click ) -> void + + def to_hash: -> { type: :double_click, x: Integer, y_: Integer } end type drag = @@ -118,6 +136,11 @@ module OpenAI ?type: :drag ) -> void + def to_hash: -> { + path: ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path], + type: :drag + } + type path = { x: Integer, y_: Integer } class Path < OpenAI::Internal::Type::BaseModel @@ -126,6 +149,8 @@ module OpenAI attr_accessor y_: Integer def initialize: (x: Integer, y_: Integer) -> void + + def to_hash: -> { x: Integer, y_: Integer } end end @@ -137,6 +162,8 @@ module OpenAI attr_accessor type: :keypress def initialize: (keys: ::Array[String], ?type: :keypress) -> void + + def to_hash: -> { keys: ::Array[String], type: :keypress } end type move = { type: :move, x: Integer, y_: Integer } @@ -149,6 +176,8 @@ module OpenAI attr_accessor y_: Integer def initialize: (x: Integer, y_: Integer, ?type: :move) -> void + + def to_hash: -> { type: :move, x: Integer, y_: Integer } end type screenshot = { type: :screenshot } @@ -157,6 +186,8 @@ module OpenAI attr_accessor type: :screenshot def initialize: (?type: :screenshot) -> void + + def to_hash: -> { type: :screenshot } end type scroll = @@ -186,6 +217,14 @@ module OpenAI y_: Integer, ?type: :scroll ) -> void + + def to_hash: -> { + scroll_x: Integer, + scroll_y: Integer, + type: :scroll, + x: Integer, + y_: Integer + } end type type_ = { text: String, type: :type } @@ -196,6 +235,8 @@ module OpenAI attr_accessor type: :type def initialize: (text: String, ?type: :type) -> void + + def to_hash: -> { text: String, type: :type } end type wait = { type: :wait } @@ -204,9 +245,11 @@ module OpenAI attr_accessor type: :wait def initialize: (?type: :wait) -> void + + def to_hash: -> { type: :wait } end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::action] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::action] end type pending_safety_check = @@ -220,6 +263,8 @@ module OpenAI attr_accessor message: String def initialize: (id: String, code: String, message: String) -> void + + def to_hash: -> { id: String, code: String, message: String } end type status = :in_progress | :completed | :incomplete @@ -231,7 +276,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::status] end type type_ = :computer_call @@ -241,7 +286,7 @@ module OpenAI COMPUTER_CALL: :computer_call - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 4ce6f3a0..a21f2310 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -8,7 +8,7 @@ module OpenAI output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, type: :computer_call_output, acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status } class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel @@ -26,21 +26,30 @@ module OpenAI ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] ) -> ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] - attr_reader status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status? def status=: ( - OpenAI::Responses::ResponseComputerToolCallOutputItem::status - ) -> OpenAI::Responses::ResponseComputerToolCallOutputItem::status + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + ) -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status def initialize: ( id: String, call_id: String, output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, ?acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - ?status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status, + ?status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, ?type: :computer_call_output ) -> void + def to_hash: -> { + id: String, + call_id: String, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + type: :computer_call_output, + acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + } + type acknowledged_safety_check = { id: String, code: String, message: String } @@ -52,6 +61,8 @@ module OpenAI attr_accessor message: String def initialize: (id: String, code: String, message: String) -> void + + def to_hash: -> { id: String, code: String, message: String } end type status = :in_progress | :completed | :incomplete @@ -63,7 +74,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs index 5cea0d21..3f522d97 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -20,6 +20,12 @@ module OpenAI ?image_url: String, ?type: :computer_screenshot ) -> void + + def to_hash: -> { + type: :computer_screenshot, + file_id: String, + image_url: String + } end end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 1e116912..efdf1ffe 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -6,7 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartAddedEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, sequence_number: Integer, type: :"response.content_part.added" } @@ -18,7 +18,7 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Responses::ResponseContentPartAddedEvent::part + attr_accessor part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part attr_accessor sequence_number: Integer @@ -28,11 +28,20 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartAddedEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, sequence_number: Integer, ?type: :"response.content_part.added" ) -> void + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + sequence_number: Integer, + type: :"response.content_part.added" + } + type part = OpenAI::Responses::ResponseOutputText | OpenAI::Responses::ResponseOutputRefusal @@ -40,7 +49,7 @@ module OpenAI module Part extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseContentPartAddedEvent::part] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseContentPartAddedEvent::part] end end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 267b2a93..53ea73b3 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -6,7 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartDoneEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, sequence_number: Integer, type: :"response.content_part.done" } @@ -18,7 +18,7 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Responses::ResponseContentPartDoneEvent::part + attr_accessor part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part attr_accessor sequence_number: Integer @@ -28,11 +28,20 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartDoneEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, sequence_number: Integer, ?type: :"response.content_part.done" ) -> void + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + sequence_number: Integer, + type: :"response.content_part.done" + } + type part = OpenAI::Responses::ResponseOutputText | OpenAI::Responses::ResponseOutputRefusal @@ -40,7 +49,7 @@ module OpenAI module Part extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseContentPartDoneEvent::part] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseContentPartDoneEvent::part] end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 0587dc2e..3050697d 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -104,6 +104,29 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: OpenAI::Models::Responses::ResponseCreateParams::input, + model: OpenAI::Models::responses_model, + background: bool?, + include: ::Array[OpenAI::Models::Responses::response_includable]?, + instructions: String?, + max_output_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + parallel_tool_calls: bool?, + previous_response_id: String?, + reasoning: OpenAI::Reasoning?, + service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, + store: bool?, + temperature: Float?, + text: OpenAI::Responses::ResponseTextConfig, + tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + tools: ::Array[OpenAI::Models::Responses::tool], + top_p: Float?, + truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + user: String, + request_options: OpenAI::RequestOptions + } + type input = String | OpenAI::Models::Responses::response_input module Input diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index fb7adb7c..1681e66e 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.created" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.created" + } end end end diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index fceca07d..15aa4a54 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index ee1b5f7a..a894688d 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -2,18 +2,26 @@ module OpenAI module Models module Responses type response_error = - { code: OpenAI::Responses::ResponseError::code, message: String } + { + code: OpenAI::Models::Responses::ResponseError::code, + message: String + } class ResponseError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Responses::ResponseError::code + attr_accessor code: OpenAI::Models::Responses::ResponseError::code attr_accessor message: String def initialize: ( - code: OpenAI::Responses::ResponseError::code, + code: OpenAI::Models::Responses::ResponseError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::Responses::ResponseError::code, + message: String + } + type code = :server_error | :rate_limit_exceeded @@ -56,7 +64,7 @@ module OpenAI FAILED_TO_DOWNLOAD_IMAGE: :failed_to_download_image IMAGE_FILE_NOT_FOUND: :image_file_not_found - def self?.values: -> ::Array[OpenAI::Responses::ResponseError::code] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseError::code] end end end diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index 305a05ec..a7aa3f3f 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -28,6 +28,14 @@ module OpenAI sequence_number: Integer, ?type: :error ) -> void + + def to_hash: -> { + code: String?, + message: String, + param: String?, + sequence_number: Integer, + type: :error + } end end end diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index 021ce3c1..27befafe 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.failed" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.failed" + } end end end diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index 85d8a2fb..80a0bc8e 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.file_search_call.completed" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.file_search_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index babf065c..333b4e44 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.file_search_call.in_progress" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.file_search_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index 7a5dadfd..66149ea3 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.file_search_call.searching" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.file_search_call.searching" + } end end end diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index d97daaa2..b83d2d56 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -5,7 +5,7 @@ module OpenAI { id: String, queries: ::Array[String], - status: OpenAI::Responses::ResponseFileSearchToolCall::status, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, type: :file_search_call, results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]? } @@ -15,7 +15,7 @@ module OpenAI attr_accessor queries: ::Array[String] - attr_accessor status: OpenAI::Responses::ResponseFileSearchToolCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status attr_accessor type: :file_search_call @@ -24,11 +24,19 @@ module OpenAI def initialize: ( id: String, queries: ::Array[String], - status: OpenAI::Responses::ResponseFileSearchToolCall::status, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, ?results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]?, ?type: :file_search_call ) -> void + def to_hash: -> { + id: String, + queries: ::Array[String], + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, + type: :file_search_call, + results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]? + } + type status = :in_progress | :searching | :completed | :incomplete | :failed @@ -41,12 +49,12 @@ module OpenAI INCOMPLETE: :incomplete FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseFileSearchToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::status] end type result = { - attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, file_id: String, filename: String, score: Float, @@ -54,7 +62,7 @@ module OpenAI } class Result < OpenAI::Internal::Type::BaseModel - attr_accessor attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]? attr_reader file_id: String? @@ -73,19 +81,27 @@ module OpenAI def text=: (String) -> String def initialize: ( - ?attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, ?file_id: String, ?filename: String, ?score: Float, ?text: String ) -> void + def to_hash: -> { + attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, + file_id: String, + filename: String, + score: Float, + text: String + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute] end end end diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index 319fd52e..23800680 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -30,6 +30,14 @@ module OpenAI ?strict: bool?, ?type: :json_schema ) -> void + + def to_hash: -> { + name: String, + schema: ::Hash[Symbol, top], + type: :json_schema, + description: String, + strict: bool? + } end end end diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index eb630a74..9cc2eb09 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -28,6 +28,14 @@ module OpenAI sequence_number: Integer, ?type: :"response.function_call_arguments.delta" ) -> void + + def to_hash: -> { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.function_call_arguments.delta" + } end end end diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index 033aba4a..acedda9d 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -28,6 +28,14 @@ module OpenAI sequence_number: Integer, ?type: :"response.function_call_arguments.done" ) -> void + + def to_hash: -> { + arguments: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.function_call_arguments.done" + } end end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index 47884c8a..4da59dc0 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -8,7 +8,7 @@ module OpenAI name: String, type: :function_call, id: String, - status: OpenAI::Responses::ResponseFunctionToolCall::status + status: OpenAI::Models::Responses::ResponseFunctionToolCall::status } class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel @@ -24,21 +24,30 @@ module OpenAI def id=: (String) -> String - attr_reader status: OpenAI::Responses::ResponseFunctionToolCall::status? + attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCall::status? def status=: ( - OpenAI::Responses::ResponseFunctionToolCall::status - ) -> OpenAI::Responses::ResponseFunctionToolCall::status + OpenAI::Models::Responses::ResponseFunctionToolCall::status + ) -> OpenAI::Models::Responses::ResponseFunctionToolCall::status def initialize: ( arguments: String, call_id: String, name: String, ?id: String, - ?status: OpenAI::Responses::ResponseFunctionToolCall::status, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, ?type: :function_call ) -> void + def to_hash: -> { + arguments: String, + call_id: String, + name: String, + type: :function_call, + id: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCall::status + } + type status = :in_progress | :completed | :incomplete module Status @@ -48,7 +57,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::status] end end end diff --git a/sig/openai/models/responses/response_function_tool_call_item.rbs b/sig/openai/models/responses/response_function_tool_call_item.rbs index 81206561..38d76eb8 100644 --- a/sig/openai/models/responses/response_function_tool_call_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_item.rbs @@ -9,6 +9,8 @@ module OpenAI def id=: (String _) -> String def initialize: (id: String) -> void + + def to_hash: -> { id: String } end end end diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 048189b6..e9a67d83 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -7,7 +7,7 @@ module OpenAI call_id: String, output: String, type: :function_call_output, - status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status } class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel @@ -19,20 +19,28 @@ module OpenAI attr_accessor type: :function_call_output - attr_reader status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status? def status=: ( - OpenAI::Responses::ResponseFunctionToolCallOutputItem::status - ) -> OpenAI::Responses::ResponseFunctionToolCallOutputItem::status + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + ) -> OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status def initialize: ( id: String, call_id: String, output: String, - ?status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, ?type: :function_call_output ) -> void + def to_hash: -> { + id: String, + call_id: String, + output: String, + type: :function_call_output, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + } + type status = :in_progress | :completed | :incomplete module Status @@ -42,7 +50,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 5efa6740..0aa3c5a6 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -4,23 +4,29 @@ module OpenAI type response_function_web_search = { id: String, - status: OpenAI::Responses::ResponseFunctionWebSearch::status, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, type: :web_search_call } class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor status: OpenAI::Responses::ResponseFunctionWebSearch::status + attr_accessor status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status attr_accessor type: :web_search_call def initialize: ( id: String, - status: OpenAI::Responses::ResponseFunctionWebSearch::status, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, ?type: :web_search_call ) -> void + def to_hash: -> { + id: String, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, + type: :web_search_call + } + type status = :in_progress | :searching | :completed | :failed module Status @@ -31,7 +37,7 @@ module OpenAI COMPLETED: :completed FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionWebSearch::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::status] end end end diff --git a/sig/openai/models/responses/response_image_gen_call_completed_event.rbs b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs index 54310938..aadbdaa5 100644 --- a/sig/openai/models/responses/response_image_gen_call_completed_event.rbs +++ b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.image_generation_call.completed" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_image_gen_call_generating_event.rbs b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs index 1cab46cc..a9e514e9 100644 --- a/sig/openai/models/responses/response_image_gen_call_generating_event.rbs +++ b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.image_generation_call.generating" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.generating" + } end end end diff --git a/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs b/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs index a90abf26..70a36323 100644 --- a/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.image_generation_call.in_progress" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs b/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs index 5eaed059..72ebce9d 100644 --- a/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs +++ b/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs @@ -32,6 +32,15 @@ module OpenAI sequence_number: Integer, ?type: :"response.image_generation_call.partial_image" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + :partial_image_b64 => String, + partial_image_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.partial_image" + } end end end diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index 45a97dac..f3d877eb 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.in_progress" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index 6d7c7d11..179657cd 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.incomplete" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.incomplete" + } end end end diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index 542c1371..42b5a713 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -4,23 +4,29 @@ module OpenAI type response_input_audio = { data: String, - format_: OpenAI::Responses::ResponseInputAudio::format_, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, type: :input_audio } class ResponseInputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String - attr_accessor format_: OpenAI::Responses::ResponseInputAudio::format_ + attr_accessor format_: OpenAI::Models::Responses::ResponseInputAudio::format_ attr_accessor type: :input_audio def initialize: ( data: String, - format_: OpenAI::Responses::ResponseInputAudio::format_, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, ?type: :input_audio ) -> void + def to_hash: -> { + data: String, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, + type: :input_audio + } + type format_ = :mp3 | :wav module Format @@ -29,7 +35,7 @@ module OpenAI MP3: :mp3 WAV: :wav - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputAudio::format_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputAudio::format_] end end end diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index c5060c70..bdfcd495 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -28,6 +28,13 @@ module OpenAI ?filename: String, ?type: :input_file ) -> void + + def to_hash: -> { + type: :input_file, + file_data: String, + file_id: String?, + filename: String + } end end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index ebc718c8..6b9cf49e 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -3,14 +3,14 @@ module OpenAI module Responses type response_input_image = { - detail: OpenAI::Responses::ResponseInputImage::detail, + detail: OpenAI::Models::Responses::ResponseInputImage::detail, type: :input_image, file_id: String?, image_url: String? } class ResponseInputImage < OpenAI::Internal::Type::BaseModel - attr_accessor detail: OpenAI::Responses::ResponseInputImage::detail + attr_accessor detail: OpenAI::Models::Responses::ResponseInputImage::detail attr_accessor type: :input_image @@ -19,12 +19,19 @@ module OpenAI attr_accessor image_url: String? def initialize: ( - detail: OpenAI::Responses::ResponseInputImage::detail, + detail: OpenAI::Models::Responses::ResponseInputImage::detail, ?file_id: String?, ?image_url: String?, ?type: :input_image ) -> void + def to_hash: -> { + detail: OpenAI::Models::Responses::ResponseInputImage::detail, + type: :input_image, + file_id: String?, + image_url: String? + } + type detail = :low | :high | :auto module Detail @@ -34,7 +41,7 @@ module OpenAI HIGH: :high AUTO: :auto - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputImage::detail] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputImage::detail] end end end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index ab7d1946..35bf8908 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -28,35 +28,42 @@ module OpenAI type message = { content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputItem::Message::role, - status: OpenAI::Responses::ResponseInputItem::Message::status, - type: OpenAI::Responses::ResponseInputItem::Message::type_ + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ } class Message < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - attr_accessor role: OpenAI::Responses::ResponseInputItem::Message::role + attr_accessor role: OpenAI::Models::Responses::ResponseInputItem::Message::role - attr_reader status: OpenAI::Responses::ResponseInputItem::Message::status? + attr_reader status: OpenAI::Models::Responses::ResponseInputItem::Message::status? def status=: ( - OpenAI::Responses::ResponseInputItem::Message::status - ) -> OpenAI::Responses::ResponseInputItem::Message::status + OpenAI::Models::Responses::ResponseInputItem::Message::status + ) -> OpenAI::Models::Responses::ResponseInputItem::Message::status - attr_reader type: OpenAI::Responses::ResponseInputItem::Message::type_? + attr_reader type: OpenAI::Models::Responses::ResponseInputItem::Message::type_? def type=: ( - OpenAI::Responses::ResponseInputItem::Message::type_ - ) -> OpenAI::Responses::ResponseInputItem::Message::type_ + OpenAI::Models::Responses::ResponseInputItem::Message::type_ + ) -> OpenAI::Models::Responses::ResponseInputItem::Message::type_ def initialize: ( content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputItem::Message::role, - ?status: OpenAI::Responses::ResponseInputItem::Message::status, - ?type: OpenAI::Responses::ResponseInputItem::Message::type_ + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + ?status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + ?type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ + } + type role = :user | :system | :developer module Role @@ -66,7 +73,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::role] end type status = :in_progress | :completed | :incomplete @@ -78,7 +85,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::status] end type type_ = :message @@ -88,7 +95,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] end end @@ -99,7 +106,7 @@ module OpenAI type: :computer_call_output, id: String?, acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, - status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status? + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? } class ComputerCallOutput < OpenAI::Internal::Type::BaseModel @@ -113,17 +120,26 @@ module OpenAI attr_accessor acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]? - attr_accessor status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status? + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? def initialize: ( call_id: String, output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, ?id: String?, ?acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, - ?status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status?, + ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status?, ?type: :computer_call_output ) -> void + def to_hash: -> { + call_id: String, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + type: :computer_call_output, + id: String?, + acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? + } + type acknowledged_safety_check = { id: String, code: String?, message: String? } @@ -139,6 +155,8 @@ module OpenAI ?code: String?, ?message: String? ) -> void + + def to_hash: -> { id: String, code: String?, message: String? } end type status = :in_progress | :completed | :incomplete @@ -150,7 +168,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status] end end @@ -160,7 +178,7 @@ module OpenAI output: String, type: :function_call_output, id: String?, - status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status? + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? } class FunctionCallOutput < OpenAI::Internal::Type::BaseModel @@ -172,16 +190,24 @@ module OpenAI attr_accessor id: String? - attr_accessor status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status? + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? def initialize: ( call_id: String, output: String, ?id: String?, - ?status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status?, + ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status?, ?type: :function_call_output ) -> void + def to_hash: -> { + call_id: String, + output: String, + type: :function_call_output, + id: String?, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? + } + type status = :in_progress | :completed | :incomplete module Status @@ -191,7 +217,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status] end end @@ -199,7 +225,7 @@ module OpenAI { id: String, result: String?, - status: OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status, + status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status, type: :image_generation_call } @@ -208,17 +234,24 @@ module OpenAI attr_accessor result: String? - attr_accessor status: OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status attr_accessor type: :image_generation_call def initialize: ( id: String, result: String?, - status: OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status, + status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status, ?type: :image_generation_call ) -> void + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status, + type: :image_generation_call + } + type status = :in_progress | :completed | :generating | :failed module Status @@ -229,7 +262,7 @@ module OpenAI GENERATING: :generating FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ImageGenerationCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status] end end @@ -238,7 +271,7 @@ module OpenAI id: String, action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, call_id: String, - status: OpenAI::Responses::ResponseInputItem::LocalShellCall::status, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status, type: :local_shell_call } @@ -249,7 +282,7 @@ module OpenAI attr_accessor call_id: String - attr_accessor status: OpenAI::Responses::ResponseInputItem::LocalShellCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status attr_accessor type: :local_shell_call @@ -257,10 +290,18 @@ module OpenAI id: String, action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, call_id: String, - status: OpenAI::Responses::ResponseInputItem::LocalShellCall::status, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status, ?type: :local_shell_call ) -> void + def to_hash: -> { + id: String, + action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status, + type: :local_shell_call + } + type action = { command: ::Array[String], @@ -292,6 +333,15 @@ module OpenAI ?working_directory: String?, ?type: :exec ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } end type status = :in_progress | :completed | :incomplete @@ -303,7 +353,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::LocalShellCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status] end end @@ -312,7 +362,7 @@ module OpenAI id: String, output: String, type: :local_shell_call_output, - status: OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status? + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status? } class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel @@ -322,15 +372,22 @@ module OpenAI attr_accessor type: :local_shell_call_output - attr_accessor status: OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status? + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status? def initialize: ( id: String, output: String, - ?status: OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status?, + ?status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status?, ?type: :local_shell_call_output ) -> void + def to_hash: -> { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status? + } + type status = :in_progress | :completed | :incomplete module Status @@ -340,7 +397,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status] end end @@ -372,6 +429,14 @@ module OpenAI ?type: :mcp_list_tools ) -> void + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + type tool = { input_schema: top, @@ -395,6 +460,13 @@ module OpenAI ?annotations: top?, ?description: String? ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } end end @@ -425,6 +497,14 @@ module OpenAI server_label: String, ?type: :mcp_approval_request ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } end type mcp_approval_response = @@ -454,6 +534,14 @@ module OpenAI ?reason: String?, ?type: :mcp_approval_response ) -> void + + def to_hash: -> { + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + id: String?, + reason: String? + } end type mcp_call = @@ -491,24 +579,39 @@ module OpenAI ?output: String?, ?type: :mcp_call ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } end type item_reference = { id: String, - type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? + type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? } class ItemReference < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? + attr_accessor type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? def initialize: ( id: String, - ?type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? + ?type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? ) -> void + def to_hash: -> { + id: String, + type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + } + type type_ = :item_reference module Type @@ -516,7 +619,7 @@ module OpenAI ITEM_REFERENCE: :item_reference - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ItemReference::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_] end end diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index f9cfd8b8..ff6874a7 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -5,9 +5,9 @@ module OpenAI { id: String, content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputMessageItem::role, - status: OpenAI::Responses::ResponseInputMessageItem::status, - type: OpenAI::Responses::ResponseInputMessageItem::type_ + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ } class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel @@ -15,28 +15,36 @@ module OpenAI attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - attr_accessor role: OpenAI::Responses::ResponseInputMessageItem::role + attr_accessor role: OpenAI::Models::Responses::ResponseInputMessageItem::role - attr_reader status: OpenAI::Responses::ResponseInputMessageItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseInputMessageItem::status? def status=: ( - OpenAI::Responses::ResponseInputMessageItem::status - ) -> OpenAI::Responses::ResponseInputMessageItem::status + OpenAI::Models::Responses::ResponseInputMessageItem::status + ) -> OpenAI::Models::Responses::ResponseInputMessageItem::status - attr_reader type: OpenAI::Responses::ResponseInputMessageItem::type_? + attr_reader type: OpenAI::Models::Responses::ResponseInputMessageItem::type_? def type=: ( - OpenAI::Responses::ResponseInputMessageItem::type_ - ) -> OpenAI::Responses::ResponseInputMessageItem::type_ + OpenAI::Models::Responses::ResponseInputMessageItem::type_ + ) -> OpenAI::Models::Responses::ResponseInputMessageItem::type_ def initialize: ( id: String, content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputMessageItem::role, - ?status: OpenAI::Responses::ResponseInputMessageItem::status, - ?type: OpenAI::Responses::ResponseInputMessageItem::type_ + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + ?status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + ?type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ ) -> void + def to_hash: -> { + id: String, + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + } + type role = :user | :system | :developer module Role @@ -46,7 +54,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::role] end type status = :in_progress | :completed | :incomplete @@ -58,7 +66,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::status] end type type_ = :message @@ -68,7 +76,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] end end end diff --git a/sig/openai/models/responses/response_input_text.rbs b/sig/openai/models/responses/response_input_text.rbs index e60a488e..76771a1f 100644 --- a/sig/openai/models/responses/response_input_text.rbs +++ b/sig/openai/models/responses/response_input_text.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :input_text def initialize: (text: String, ?type: :input_text) -> void + + def to_hash: -> { text: String, type: :input_text } end end end diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index 441c732c..f8f79e06 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -26,7 +26,7 @@ module OpenAI { id: String, result: String?, - status: OpenAI::Responses::ResponseItem::ImageGenerationCall::status, + status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status, type: :image_generation_call } @@ -35,17 +35,24 @@ module OpenAI attr_accessor result: String? - attr_accessor status: OpenAI::Responses::ResponseItem::ImageGenerationCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status attr_accessor type: :image_generation_call def initialize: ( id: String, result: String?, - status: OpenAI::Responses::ResponseItem::ImageGenerationCall::status, + status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status, ?type: :image_generation_call ) -> void + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status, + type: :image_generation_call + } + type status = :in_progress | :completed | :generating | :failed module Status @@ -56,7 +63,7 @@ module OpenAI GENERATING: :generating FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseItem::ImageGenerationCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status] end end @@ -65,7 +72,7 @@ module OpenAI id: String, action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, call_id: String, - status: OpenAI::Responses::ResponseItem::LocalShellCall::status, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status, type: :local_shell_call } @@ -76,7 +83,7 @@ module OpenAI attr_accessor call_id: String - attr_accessor status: OpenAI::Responses::ResponseItem::LocalShellCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status attr_accessor type: :local_shell_call @@ -84,10 +91,18 @@ module OpenAI id: String, action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, call_id: String, - status: OpenAI::Responses::ResponseItem::LocalShellCall::status, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status, ?type: :local_shell_call ) -> void + def to_hash: -> { + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status, + type: :local_shell_call + } + type action = { command: ::Array[String], @@ -119,6 +134,15 @@ module OpenAI ?working_directory: String?, ?type: :exec ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } end type status = :in_progress | :completed | :incomplete @@ -130,7 +154,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseItem::LocalShellCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseItem::LocalShellCall::status] end end @@ -139,7 +163,7 @@ module OpenAI id: String, output: String, type: :local_shell_call_output, - status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::status? + status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status? } class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel @@ -149,15 +173,22 @@ module OpenAI attr_accessor type: :local_shell_call_output - attr_accessor status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::status? + attr_accessor status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status? def initialize: ( id: String, output: String, - ?status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::status?, + ?status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status?, ?type: :local_shell_call_output ) -> void + def to_hash: -> { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status? + } + type status = :in_progress | :completed | :incomplete module Status @@ -167,7 +198,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseItem::LocalShellCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status] end end @@ -199,6 +230,14 @@ module OpenAI ?type: :mcp_list_tools ) -> void + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + type tool = { input_schema: top, @@ -222,6 +261,13 @@ module OpenAI ?annotations: top?, ?description: String? ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } end end @@ -252,6 +298,14 @@ module OpenAI server_label: String, ?type: :mcp_approval_request ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } end type mcp_approval_response = @@ -281,6 +335,14 @@ module OpenAI ?reason: String?, ?type: :mcp_approval_response ) -> void + + def to_hash: -> { + id: String, + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + reason: String? + } end type mcp_call = @@ -318,6 +380,16 @@ module OpenAI ?output: String?, ?type: :mcp_call ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } end def self?.variants: -> ::Array[OpenAI::Models::Responses::response_item] diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index e7390def..7614060c 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -30,6 +30,14 @@ module OpenAI last_id: String, ?object: :list ) -> void + + def to_hash: -> { + data: ::Array[OpenAI::Models::Responses::response_item], + first_id: String, + has_more: bool, + last_id: String, + object: :list + } end end end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs index edc8ed3f..bb94cc20 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs @@ -28,6 +28,14 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_call.arguments_delta" ) -> void + + def to_hash: -> { + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.arguments_delta" + } end end end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs index 38693df6..f0a16dcf 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs @@ -28,6 +28,14 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_call.arguments_done" ) -> void + + def to_hash: -> { + arguments: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.arguments_done" + } end end end diff --git a/sig/openai/models/responses/response_mcp_call_completed_event.rbs b/sig/openai/models/responses/response_mcp_call_completed_event.rbs index 71dd9297..336d5eed 100644 --- a/sig/openai/models/responses/response_mcp_call_completed_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_completed_event.rbs @@ -13,6 +13,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_call.completed" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_mcp_call_failed_event.rbs b/sig/openai/models/responses/response_mcp_call_failed_event.rbs index 6ab340fe..9d643dbc 100644 --- a/sig/openai/models/responses/response_mcp_call_failed_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_failed_event.rbs @@ -13,6 +13,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_call.failed" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_call.failed" + } end end end diff --git a/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs index fd7ba1a2..7dc8afd2 100644 --- a/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_call.in_progress" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs index f34961c0..af17675a 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs @@ -13,6 +13,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_list_tools.completed" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_list_tools.completed" + } end end end diff --git a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs index dad83031..fab64580 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs @@ -13,6 +13,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_list_tools.failed" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_list_tools.failed" + } end end end diff --git a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs index bd9bbbb7..72e57c18 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs @@ -16,6 +16,11 @@ module OpenAI sequence_number: Integer, ?type: :"response.mcp_list_tools.in_progress" ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_list_tools.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_output_audio.rbs b/sig/openai/models/responses/response_output_audio.rbs index a2c7f16e..0a8d8e3a 100644 --- a/sig/openai/models/responses/response_output_audio.rbs +++ b/sig/openai/models/responses/response_output_audio.rbs @@ -16,6 +16,12 @@ module OpenAI transcript: String, ?type: :output_audio ) -> void + + def to_hash: -> { + data: String, + transcript: String, + type: :output_audio + } end end end diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index 5007eaf4..dc0254bd 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -22,7 +22,7 @@ module OpenAI { id: String, result: String?, - status: OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status, + status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status, type: :image_generation_call } @@ -31,17 +31,24 @@ module OpenAI attr_accessor result: String? - attr_accessor status: OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status attr_accessor type: :image_generation_call def initialize: ( id: String, result: String?, - status: OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status, + status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status, ?type: :image_generation_call ) -> void + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status, + type: :image_generation_call + } + type status = :in_progress | :completed | :generating | :failed module Status @@ -52,7 +59,7 @@ module OpenAI GENERATING: :generating FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status] end end @@ -61,7 +68,7 @@ module OpenAI id: String, action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, call_id: String, - status: OpenAI::Responses::ResponseOutputItem::LocalShellCall::status, + status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status, type: :local_shell_call } @@ -72,7 +79,7 @@ module OpenAI attr_accessor call_id: String - attr_accessor status: OpenAI::Responses::ResponseOutputItem::LocalShellCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status attr_accessor type: :local_shell_call @@ -80,10 +87,18 @@ module OpenAI id: String, action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, call_id: String, - status: OpenAI::Responses::ResponseOutputItem::LocalShellCall::status, + status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status, ?type: :local_shell_call ) -> void + def to_hash: -> { + id: String, + action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status, + type: :local_shell_call + } + type action = { command: ::Array[String], @@ -115,6 +130,15 @@ module OpenAI ?working_directory: String?, ?type: :exec ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } end type status = :in_progress | :completed | :incomplete @@ -126,7 +150,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputItem::LocalShellCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status] end end @@ -165,6 +189,16 @@ module OpenAI ?output: String?, ?type: :mcp_call ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } end type mcp_list_tools = @@ -195,6 +229,14 @@ module OpenAI ?type: :mcp_list_tools ) -> void + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + type tool = { input_schema: top, @@ -218,6 +260,13 @@ module OpenAI ?annotations: top?, ?description: String? ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } end end @@ -248,6 +297,14 @@ module OpenAI server_label: String, ?type: :mcp_approval_request ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } end def self?.variants: -> ::Array[OpenAI::Models::Responses::response_output_item] diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index b85325a4..25781321 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.output_item.added" ) -> void + + def to_hash: -> { + item: OpenAI::Models::Responses::response_output_item, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_item.added" + } end end end diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index b61455c4..3012b2ab 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -24,6 +24,13 @@ module OpenAI sequence_number: Integer, ?type: :"response.output_item.done" ) -> void + + def to_hash: -> { + item: OpenAI::Models::Responses::response_output_item, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_item.done" + } end end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index 47827460..223256b8 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -4,31 +4,39 @@ module OpenAI type response_output_message = { id: String, - content: ::Array[OpenAI::Responses::ResponseOutputMessage::content], + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], role: :assistant, - status: OpenAI::Responses::ResponseOutputMessage::status, + status: OpenAI::Models::Responses::ResponseOutputMessage::status, type: :message } class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor content: ::Array[OpenAI::Responses::ResponseOutputMessage::content] + attr_accessor content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content] attr_accessor role: :assistant - attr_accessor status: OpenAI::Responses::ResponseOutputMessage::status + attr_accessor status: OpenAI::Models::Responses::ResponseOutputMessage::status attr_accessor type: :message def initialize: ( id: String, - content: ::Array[OpenAI::Responses::ResponseOutputMessage::content], - status: OpenAI::Responses::ResponseOutputMessage::status, + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], + status: OpenAI::Models::Responses::ResponseOutputMessage::status, ?role: :assistant, ?type: :message ) -> void + def to_hash: -> { + id: String, + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], + role: :assistant, + status: OpenAI::Models::Responses::ResponseOutputMessage::status, + type: :message + } + type content = OpenAI::Responses::ResponseOutputText | OpenAI::Responses::ResponseOutputRefusal @@ -36,7 +44,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseOutputMessage::content] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content] end type status = :in_progress | :completed | :incomplete @@ -48,7 +56,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputMessage::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::status] end end end diff --git a/sig/openai/models/responses/response_output_refusal.rbs b/sig/openai/models/responses/response_output_refusal.rbs index 14de45aa..e2347cc1 100644 --- a/sig/openai/models/responses/response_output_refusal.rbs +++ b/sig/openai/models/responses/response_output_refusal.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } end end end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index 368b2b07..d374deb4 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -3,24 +3,30 @@ module OpenAI module Responses type response_output_text = { - annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation], + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, type: :output_text } class ResponseOutputText < OpenAI::Internal::Type::BaseModel - attr_accessor annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation] + attr_accessor annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] attr_accessor text: String attr_accessor type: :output_text def initialize: ( - annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation], + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, ?type: :output_text ) -> void + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], + text: String, + type: :output_text + } + type annotation = OpenAI::Responses::ResponseOutputText::Annotation::FileCitation | OpenAI::Responses::ResponseOutputText::Annotation::URLCitation @@ -44,6 +50,12 @@ module OpenAI index: Integer, ?type: :file_citation ) -> void + + def to_hash: -> { + file_id: String, + index: Integer, + type: :file_citation + } end type url_citation = @@ -73,6 +85,14 @@ module OpenAI url: String, ?type: :url_citation ) -> void + + def to_hash: -> { + end_index: Integer, + start_index: Integer, + title: String, + type: :url_citation, + url: String + } end type file_path = { file_id: String, index: Integer, type: :file_path } @@ -89,9 +109,15 @@ module OpenAI index: Integer, ?type: :file_path ) -> void + + def to_hash: -> { + file_id: String, + index: Integer, + type: :file_path + } end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseOutputText::annotation] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] end end end diff --git a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs index 6e79267f..499b4556 100644 --- a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs +++ b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs @@ -36,6 +36,16 @@ module OpenAI sequence_number: Integer, ?type: :"response.output_text_annotation.added" ) -> void + + def to_hash: -> { + annotation: top, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_text_annotation.added" + } end end end diff --git a/sig/openai/models/responses/response_queued_event.rbs b/sig/openai/models/responses/response_queued_event.rbs index c6311eda..4db7787a 100644 --- a/sig/openai/models/responses/response_queued_event.rbs +++ b/sig/openai/models/responses/response_queued_event.rbs @@ -20,6 +20,12 @@ module OpenAI sequence_number: Integer, ?type: :"response.queued" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.queued" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_delta_event.rbs b/sig/openai/models/responses/response_reasoning_delta_event.rbs index 2d918544..a56121bc 100644 --- a/sig/openai/models/responses/response_reasoning_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_delta_event.rbs @@ -32,6 +32,15 @@ module OpenAI sequence_number: Integer, ?type: :"response.reasoning.delta" ) -> void + + def to_hash: -> { + content_index: Integer, + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.reasoning.delta" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_done_event.rbs b/sig/openai/models/responses/response_reasoning_done_event.rbs index 5cb2b130..0123cfcf 100644 --- a/sig/openai/models/responses/response_reasoning_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_done_event.rbs @@ -32,6 +32,15 @@ module OpenAI text: String, ?type: :"response.reasoning.done" ) -> void + + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: :"response.reasoning.done" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 7efc2a2d..620ee9dc 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -7,7 +7,7 @@ module OpenAI summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], type: :reasoning, encrypted_content: String?, - status: OpenAI::Responses::ResponseReasoningItem::status + status: OpenAI::Models::Responses::ResponseReasoningItem::status } class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel @@ -19,20 +19,28 @@ module OpenAI attr_accessor encrypted_content: String? - attr_reader status: OpenAI::Responses::ResponseReasoningItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseReasoningItem::status? def status=: ( - OpenAI::Responses::ResponseReasoningItem::status - ) -> OpenAI::Responses::ResponseReasoningItem::status + OpenAI::Models::Responses::ResponseReasoningItem::status + ) -> OpenAI::Models::Responses::ResponseReasoningItem::status def initialize: ( id: String, summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], ?encrypted_content: String?, - ?status: OpenAI::Responses::ResponseReasoningItem::status, + ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, ?type: :reasoning ) -> void + def to_hash: -> { + id: String, + summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], + type: :reasoning, + encrypted_content: String?, + status: OpenAI::Models::Responses::ResponseReasoningItem::status + } + type summary = { text: String, type: :summary_text } class Summary < OpenAI::Internal::Type::BaseModel @@ -41,6 +49,8 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } end type status = :in_progress | :completed | :incomplete @@ -52,7 +62,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseReasoningItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseReasoningItem::status] end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs index 0bed2549..4e613606 100644 --- a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs @@ -32,6 +32,15 @@ module OpenAI summary_index: Integer, ?type: :"response.reasoning_summary.delta" ) -> void + + def to_hash: -> { + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary.delta" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs index e71572ba..cb56e84f 100644 --- a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs @@ -32,6 +32,15 @@ module OpenAI text: String, ?type: :"response.reasoning_summary.done" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary.done" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs index f822b599..597bae6f 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs @@ -33,6 +33,15 @@ module OpenAI ?type: :"response.reasoning_summary_part.added" ) -> void + def to_hash: -> { + item_id: String, + output_index: Integer, + part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_part.added" + } + type part = { text: String, type: :summary_text } class Part < OpenAI::Internal::Type::BaseModel @@ -41,6 +50,8 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs index ee6f29c0..79c25aa5 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs @@ -33,6 +33,15 @@ module OpenAI ?type: :"response.reasoning_summary_part.done" ) -> void + def to_hash: -> { + item_id: String, + output_index: Integer, + part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_part.done" + } + type part = { text: String, type: :summary_text } class Part < OpenAI::Internal::Type::BaseModel @@ -41,6 +50,8 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs index fcede6d5..8d39bef7 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs @@ -32,6 +32,15 @@ module OpenAI summary_index: Integer, ?type: :"response.reasoning_summary_text.delta" ) -> void + + def to_hash: -> { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_text.delta" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs index 1fd8a58d..50cffece 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs @@ -32,6 +32,15 @@ module OpenAI text: String, ?type: :"response.reasoning_summary_text.done" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary_text.done" + } end end end diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index 718ab10b..4985d6bd 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -32,6 +32,15 @@ module OpenAI sequence_number: Integer, ?type: :"response.refusal.delta" ) -> void + + def to_hash: -> { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.refusal.delta" + } end end end diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 1ad63da9..ce78512b 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -32,6 +32,15 @@ module OpenAI sequence_number: Integer, ?type: :"response.refusal.done" ) -> void + + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + refusal: String, + sequence_number: Integer, + type: :"response.refusal.done" + } end end end diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 2a8ef689..c90b79c2 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -19,6 +19,11 @@ module OpenAI ?include: ::Array[OpenAI::Models::Responses::response_includable], ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + include: ::Array[OpenAI::Models::Responses::response_includable], + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs index 410471dc..46c53b36 100644 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ b/sig/openai/models/responses/response_text_annotation_delta_event.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_text_annotation_delta_event = { - annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation, + annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, annotation_index: Integer, content_index: Integer, item_id: String, @@ -13,7 +13,7 @@ module OpenAI } class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation + attr_accessor annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation attr_accessor annotation_index: Integer @@ -28,7 +28,7 @@ module OpenAI attr_accessor type: :"response.output_text.annotation.added" def initialize: ( - annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation, + annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, annotation_index: Integer, content_index: Integer, item_id: String, @@ -37,6 +37,16 @@ module OpenAI ?type: :"response.output_text.annotation.added" ) -> void + def to_hash: -> { + annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_text.annotation.added" + } + type annotation = OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation @@ -60,6 +70,12 @@ module OpenAI index: Integer, ?type: :file_citation ) -> void + + def to_hash: -> { + file_id: String, + index: Integer, + type: :file_citation + } end type url_citation = @@ -89,6 +105,14 @@ module OpenAI url: String, ?type: :url_citation ) -> void + + def to_hash: -> { + end_index: Integer, + start_index: Integer, + title: String, + type: :url_citation, + url: String + } end type file_path = { file_id: String, index: Integer, type: :file_path } @@ -105,9 +129,15 @@ module OpenAI index: Integer, ?type: :file_path ) -> void + + def to_hash: -> { + file_id: String, + index: Integer, + type: :file_path + } end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation] end end end diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index 816d5b67..e60dae0f 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -14,6 +14,10 @@ module OpenAI def initialize: ( ?format_: OpenAI::Models::Responses::response_format_text_config ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } end end end diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index 6c7fb1db..bbea6304 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -32,6 +32,15 @@ module OpenAI sequence_number: Integer, ?type: :"response.output_text.delta" ) -> void + + def to_hash: -> { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_text.delta" + } end end end diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 427e7b51..742fb5e3 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -32,6 +32,15 @@ module OpenAI text: String, ?type: :"response.output_text.done" ) -> void + + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: :"response.output_text.done" + } end end end diff --git a/sig/openai/models/responses/response_usage.rbs b/sig/openai/models/responses/response_usage.rbs index a5b8cbc5..2245372b 100644 --- a/sig/openai/models/responses/response_usage.rbs +++ b/sig/openai/models/responses/response_usage.rbs @@ -29,12 +29,22 @@ module OpenAI total_tokens: Integer ) -> void + def to_hash: -> { + input_tokens: Integer, + input_tokens_details: OpenAI::Responses::ResponseUsage::InputTokensDetails, + output_tokens: Integer, + output_tokens_details: OpenAI::Responses::ResponseUsage::OutputTokensDetails, + total_tokens: Integer + } + type input_tokens_details = { cached_tokens: Integer } class InputTokensDetails < OpenAI::Internal::Type::BaseModel attr_accessor cached_tokens: Integer def initialize: (cached_tokens: Integer) -> void + + def to_hash: -> { cached_tokens: Integer } end type output_tokens_details = { reasoning_tokens: Integer } @@ -43,6 +53,8 @@ module OpenAI attr_accessor reasoning_tokens: Integer def initialize: (reasoning_tokens: Integer) -> void + + def to_hash: -> { reasoning_tokens: Integer } end end end diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index 852a046e..8f7eb8fb 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -20,6 +20,12 @@ module OpenAI output_index: Integer, ?type: :"response.web_search_call.completed" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + type: :"response.web_search_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index 996acf2e..9cb98649 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -20,6 +20,12 @@ module OpenAI output_index: Integer, ?type: :"response.web_search_call.in_progress" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + type: :"response.web_search_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index b77bc5aa..55563d33 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -20,6 +20,12 @@ module OpenAI output_index: Integer, ?type: :"response.web_search_call.searching" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + type: :"response.web_search_call.searching" + } end end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 40a2e90b..7d2d5cec 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -19,9 +19,9 @@ module OpenAI server_label: String, server_url: String, type: :mcp, - allowed_tools: OpenAI::Responses::Tool::Mcp::allowed_tools?, + allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, headers: ::Hash[Symbol, String]?, - require_approval: OpenAI::Responses::Tool::Mcp::require_approval? + require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? } class Mcp < OpenAI::Internal::Type::BaseModel @@ -31,21 +31,30 @@ module OpenAI attr_accessor type: :mcp - attr_accessor allowed_tools: OpenAI::Responses::Tool::Mcp::allowed_tools? + attr_accessor allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools? attr_accessor headers: ::Hash[Symbol, String]? - attr_accessor require_approval: OpenAI::Responses::Tool::Mcp::require_approval? + attr_accessor require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? def initialize: ( server_label: String, server_url: String, - ?allowed_tools: OpenAI::Responses::Tool::Mcp::allowed_tools?, + ?allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, ?headers: ::Hash[Symbol, String]?, - ?require_approval: OpenAI::Responses::Tool::Mcp::require_approval?, + ?require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, ?type: :mcp ) -> void + def to_hash: -> { + server_label: String, + server_url: String, + type: :mcp, + allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + headers: ::Hash[Symbol, String]?, + require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + } + type allowed_tools = ::Array[String] | OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter @@ -61,16 +70,18 @@ module OpenAI def tool_names=: (::Array[String]) -> ::Array[String] def initialize: (?tool_names: ::Array[String]) -> void + + def to_hash: -> { tool_names: ::Array[String] } end - def self?.variants: -> ::Array[OpenAI::Responses::Tool::Mcp::allowed_tools] + def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::allowed_tools] StringArray: OpenAI::Internal::Type::Converter end type require_approval = OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter - | OpenAI::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting + | OpenAI::Models::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting module RequireApproval extend OpenAI::Internal::Type::Union @@ -105,6 +116,12 @@ module OpenAI ?tool_names: ::Array[String] ) -> void + def to_hash: -> { + always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, + tool_names: ::Array[String] + } + type always = { tool_names: ::Array[String] } class Always < OpenAI::Internal::Type::BaseModel @@ -113,6 +130,8 @@ module OpenAI def tool_names=: (::Array[String]) -> ::Array[String] def initialize: (?tool_names: ::Array[String]) -> void + + def to_hash: -> { tool_names: ::Array[String] } end type never = { tool_names: ::Array[String] } @@ -123,6 +142,8 @@ module OpenAI def tool_names=: (::Array[String]) -> ::Array[String] def initialize: (?tool_names: ::Array[String]) -> void + + def to_hash: -> { tool_names: ::Array[String] } end end @@ -134,29 +155,34 @@ module OpenAI ALWAYS: :always NEVER: :never - def self?.values: -> ::Array[OpenAI::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting] end - def self?.variants: -> ::Array[OpenAI::Responses::Tool::Mcp::require_approval] + def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::require_approval] end end type code_interpreter = { - container: OpenAI::Responses::Tool::CodeInterpreter::container, + container: OpenAI::Models::Responses::Tool::CodeInterpreter::container, type: :code_interpreter } class CodeInterpreter < OpenAI::Internal::Type::BaseModel - attr_accessor container: OpenAI::Responses::Tool::CodeInterpreter::container + attr_accessor container: OpenAI::Models::Responses::Tool::CodeInterpreter::container attr_accessor type: :code_interpreter def initialize: ( - container: OpenAI::Responses::Tool::CodeInterpreter::container, + container: OpenAI::Models::Responses::Tool::CodeInterpreter::container, ?type: :code_interpreter ) -> void + def to_hash: -> { + container: OpenAI::Models::Responses::Tool::CodeInterpreter::container, + type: :code_interpreter + } + type container = String | OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto @@ -175,34 +201,36 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String], ?type: :auto) -> void + + def to_hash: -> { type: :auto, file_ids: ::Array[String] } end - def self?.variants: -> ::Array[OpenAI::Responses::Tool::CodeInterpreter::container] + def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::CodeInterpreter::container] end end type image_generation = { type: :image_generation, - background: OpenAI::Responses::Tool::ImageGeneration::background, + background: OpenAI::Models::Responses::Tool::ImageGeneration::background, input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, - model: OpenAI::Responses::Tool::ImageGeneration::model, - moderation: OpenAI::Responses::Tool::ImageGeneration::moderation, + model: OpenAI::Models::Responses::Tool::ImageGeneration::model, + moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, output_compression: Integer, - output_format: OpenAI::Responses::Tool::ImageGeneration::output_format, + output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format, partial_images: Integer, - quality: OpenAI::Responses::Tool::ImageGeneration::quality, - size: OpenAI::Responses::Tool::ImageGeneration::size + quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality, + size: OpenAI::Models::Responses::Tool::ImageGeneration::size } class ImageGeneration < OpenAI::Internal::Type::BaseModel attr_accessor type: :image_generation - attr_reader background: OpenAI::Responses::Tool::ImageGeneration::background? + attr_reader background: OpenAI::Models::Responses::Tool::ImageGeneration::background? def background=: ( - OpenAI::Responses::Tool::ImageGeneration::background - ) -> OpenAI::Responses::Tool::ImageGeneration::background + OpenAI::Models::Responses::Tool::ImageGeneration::background + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::background attr_reader input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask? @@ -210,57 +238,70 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::InputImageMask ) -> OpenAI::Responses::Tool::ImageGeneration::InputImageMask - attr_reader model: OpenAI::Responses::Tool::ImageGeneration::model? + attr_reader model: OpenAI::Models::Responses::Tool::ImageGeneration::model? def model=: ( - OpenAI::Responses::Tool::ImageGeneration::model - ) -> OpenAI::Responses::Tool::ImageGeneration::model + OpenAI::Models::Responses::Tool::ImageGeneration::model + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::model - attr_reader moderation: OpenAI::Responses::Tool::ImageGeneration::moderation? + attr_reader moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation? def moderation=: ( - OpenAI::Responses::Tool::ImageGeneration::moderation - ) -> OpenAI::Responses::Tool::ImageGeneration::moderation + OpenAI::Models::Responses::Tool::ImageGeneration::moderation + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::moderation attr_reader output_compression: Integer? def output_compression=: (Integer) -> Integer - attr_reader output_format: OpenAI::Responses::Tool::ImageGeneration::output_format? + attr_reader output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format? def output_format=: ( - OpenAI::Responses::Tool::ImageGeneration::output_format - ) -> OpenAI::Responses::Tool::ImageGeneration::output_format + OpenAI::Models::Responses::Tool::ImageGeneration::output_format + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::output_format attr_reader partial_images: Integer? def partial_images=: (Integer) -> Integer - attr_reader quality: OpenAI::Responses::Tool::ImageGeneration::quality? + attr_reader quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality? def quality=: ( - OpenAI::Responses::Tool::ImageGeneration::quality - ) -> OpenAI::Responses::Tool::ImageGeneration::quality + OpenAI::Models::Responses::Tool::ImageGeneration::quality + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::quality - attr_reader size: OpenAI::Responses::Tool::ImageGeneration::size? + attr_reader size: OpenAI::Models::Responses::Tool::ImageGeneration::size? def size=: ( - OpenAI::Responses::Tool::ImageGeneration::size - ) -> OpenAI::Responses::Tool::ImageGeneration::size + OpenAI::Models::Responses::Tool::ImageGeneration::size + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::size def initialize: ( - ?background: OpenAI::Responses::Tool::ImageGeneration::background, + ?background: OpenAI::Models::Responses::Tool::ImageGeneration::background, ?input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, - ?model: OpenAI::Responses::Tool::ImageGeneration::model, - ?moderation: OpenAI::Responses::Tool::ImageGeneration::moderation, + ?model: OpenAI::Models::Responses::Tool::ImageGeneration::model, + ?moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, ?output_compression: Integer, - ?output_format: OpenAI::Responses::Tool::ImageGeneration::output_format, + ?output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format, ?partial_images: Integer, - ?quality: OpenAI::Responses::Tool::ImageGeneration::quality, - ?size: OpenAI::Responses::Tool::ImageGeneration::size, + ?quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality, + ?size: OpenAI::Models::Responses::Tool::ImageGeneration::size, ?type: :image_generation ) -> void + def to_hash: -> { + type: :image_generation, + background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + model: OpenAI::Models::Responses::Tool::ImageGeneration::model, + moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, + output_compression: Integer, + output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format, + partial_images: Integer, + quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality, + size: OpenAI::Models::Responses::Tool::ImageGeneration::size + } + type background = :transparent | :opaque | :auto module Background @@ -270,7 +311,7 @@ module OpenAI OPAQUE: :opaque AUTO: :auto - def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::background] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::background] end type input_image_mask = { file_id: String, image_url: String } @@ -285,6 +326,8 @@ module OpenAI def image_url=: (String) -> String def initialize: (?file_id: String, ?image_url: String) -> void + + def to_hash: -> { file_id: String, image_url: String } end type model = :"gpt-image-1" @@ -294,7 +337,7 @@ module OpenAI GPT_IMAGE_1: :"gpt-image-1" - def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::model] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::model] end type moderation = :auto | :low @@ -305,7 +348,7 @@ module OpenAI AUTO: :auto LOW: :low - def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::moderation] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::moderation] end type output_format = :png | :webp | :jpeg @@ -317,7 +360,7 @@ module OpenAI WEBP: :webp JPEG: :jpeg - def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::output_format] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::output_format] end type quality = :low | :medium | :high | :auto @@ -330,7 +373,7 @@ module OpenAI HIGH: :high AUTO: :auto - def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::quality] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::quality] end type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto @@ -343,7 +386,7 @@ module OpenAI SIZE_1536X1024: :"1536x1024" AUTO: :auto - def self?.values: -> ::Array[OpenAI::Responses::Tool::ImageGeneration::size] + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::size] end end @@ -353,6 +396,8 @@ module OpenAI attr_accessor type: :local_shell def initialize: (?type: :local_shell) -> void + + def to_hash: -> { type: :local_shell } end def self?.variants: -> ::Array[OpenAI::Models::Responses::tool] diff --git a/sig/openai/models/responses/tool_choice_function.rbs b/sig/openai/models/responses/tool_choice_function.rbs index 1aa68ba1..10aa7372 100644 --- a/sig/openai/models/responses/tool_choice_function.rbs +++ b/sig/openai/models/responses/tool_choice_function.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :function def initialize: (name: String, ?type: :function) -> void + + def to_hash: -> { name: String, type: :function } end end end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 658e5091..5b7a418e 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -2,15 +2,19 @@ module OpenAI module Models module Responses type tool_choice_types = - { type: OpenAI::Responses::ToolChoiceTypes::type_ } + { type: OpenAI::Models::Responses::ToolChoiceTypes::type_ } class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Responses::ToolChoiceTypes::type_ + attr_accessor type: OpenAI::Models::Responses::ToolChoiceTypes::type_ def initialize: ( - type: OpenAI::Responses::ToolChoiceTypes::type_ + type: OpenAI::Models::Responses::ToolChoiceTypes::type_ ) -> void + def to_hash: -> { + type: OpenAI::Models::Responses::ToolChoiceTypes::type_ + } + type type_ = :file_search | :web_search_preview @@ -31,7 +35,7 @@ module OpenAI CODE_INTERPRETER: :code_interpreter MCP: :mcp - def self?.values: -> ::Array[OpenAI::Responses::ToolChoiceTypes::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceTypes::type_] end end end diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index dba0454c..3812d3bb 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -3,28 +3,34 @@ module OpenAI module Responses type web_search_tool = { - type: OpenAI::Responses::WebSearchTool::type_, - search_context_size: OpenAI::Responses::WebSearchTool::search_context_size, + type: OpenAI::Models::Responses::WebSearchTool::type_, + search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, user_location: OpenAI::Responses::WebSearchTool::UserLocation? } class WebSearchTool < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Responses::WebSearchTool::type_ + attr_accessor type: OpenAI::Models::Responses::WebSearchTool::type_ - attr_reader search_context_size: OpenAI::Responses::WebSearchTool::search_context_size? + attr_reader search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size? def search_context_size=: ( - OpenAI::Responses::WebSearchTool::search_context_size - ) -> OpenAI::Responses::WebSearchTool::search_context_size + OpenAI::Models::Responses::WebSearchTool::search_context_size + ) -> OpenAI::Models::Responses::WebSearchTool::search_context_size attr_accessor user_location: OpenAI::Responses::WebSearchTool::UserLocation? def initialize: ( - type: OpenAI::Responses::WebSearchTool::type_, - ?search_context_size: OpenAI::Responses::WebSearchTool::search_context_size, + type: OpenAI::Models::Responses::WebSearchTool::type_, + ?search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, ?user_location: OpenAI::Responses::WebSearchTool::UserLocation? ) -> void + def to_hash: -> { + type: OpenAI::Models::Responses::WebSearchTool::type_, + search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, + user_location: OpenAI::Responses::WebSearchTool::UserLocation? + } + type type_ = :web_search_preview | :web_search_preview_2025_03_11 module Type @@ -33,7 +39,7 @@ module OpenAI WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 - def self?.values: -> ::Array[OpenAI::Responses::WebSearchTool::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::type_] end type search_context_size = :low | :medium | :high @@ -45,7 +51,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::Responses::WebSearchTool::search_context_size] + def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::search_context_size] end type user_location = @@ -75,6 +81,14 @@ module OpenAI ?timezone: String?, ?type: :approximate ) -> void + + def to_hash: -> { + type: :approximate, + city: String?, + country: String?, + region: String?, + timezone: String? + } end end end diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 42a66641..3014c03a 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -3,7 +3,7 @@ module OpenAI type responses_model = String | OpenAI::Models::chat_model - | OpenAI::ResponsesModel::responses_only_model + | OpenAI::Models::ResponsesModel::responses_only_model module ResponsesModel extend OpenAI::Internal::Type::Union @@ -22,7 +22,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.values: -> ::Array[OpenAI::ResponsesModel::responses_only_model] + def self?.values: -> ::Array[OpenAI::Models::ResponsesModel::responses_only_model] end def self?.variants: -> ::Array[OpenAI::Models::responses_model] diff --git a/sig/openai/models/static_file_chunking_strategy.rbs b/sig/openai/models/static_file_chunking_strategy.rbs index 0ba93ae4..1d8dee49 100644 --- a/sig/openai/models/static_file_chunking_strategy.rbs +++ b/sig/openai/models/static_file_chunking_strategy.rbs @@ -12,6 +12,11 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end end diff --git a/sig/openai/models/static_file_chunking_strategy_object.rbs b/sig/openai/models/static_file_chunking_strategy_object.rbs index 6b2d231e..e65aa3db 100644 --- a/sig/openai/models/static_file_chunking_strategy_object.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object.rbs @@ -12,6 +12,11 @@ module OpenAI static: OpenAI::StaticFileChunkingStrategy, ?type: :static ) -> void + + def to_hash: -> { + static: OpenAI::StaticFileChunkingStrategy, + type: :static + } end end end diff --git a/sig/openai/models/static_file_chunking_strategy_object_param.rbs b/sig/openai/models/static_file_chunking_strategy_object_param.rbs index c0f5182c..a722310f 100644 --- a/sig/openai/models/static_file_chunking_strategy_object_param.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object_param.rbs @@ -12,6 +12,11 @@ module OpenAI static: OpenAI::StaticFileChunkingStrategy, ?type: :static ) -> void + + def to_hash: -> { + static: OpenAI::StaticFileChunkingStrategy, + type: :static + } end end end diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index 955ba4fe..01762451 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -9,7 +9,7 @@ module OpenAI filename: String, object: :upload, purpose: String, - status: OpenAI::Upload::status, + status: OpenAI::Models::Upload::status, file: OpenAI::FileObject? } @@ -28,7 +28,7 @@ module OpenAI attr_accessor purpose: String - attr_accessor status: OpenAI::Upload::status + attr_accessor status: OpenAI::Models::Upload::status attr_accessor file: OpenAI::FileObject? @@ -39,11 +39,23 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Upload::status, + status: OpenAI::Models::Upload::status, ?file: OpenAI::FileObject?, ?object: :upload ) -> void + def to_hash: -> { + id: String, + bytes: Integer, + created_at: Integer, + expires_at: Integer, + filename: String, + object: :upload, + purpose: String, + status: OpenAI::Models::Upload::status, + file: OpenAI::FileObject? + } + type status = :pending | :completed | :cancelled | :expired module Status @@ -54,7 +66,7 @@ module OpenAI CANCELLED: :cancelled EXPIRED: :expired - def self?.values: -> ::Array[OpenAI::Upload::status] + def self?.values: -> ::Array[OpenAI::Models::Upload::status] end end end diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index c5cb5b12..92040ad5 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index 4bdf3d87..c223bb70 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?md5: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + part_ids: ::Array[String], + :md5 => String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index dce638fe..85f45a52 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -28,6 +28,14 @@ module OpenAI purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + bytes: Integer, + filename: String, + mime_type: String, + purpose: OpenAI::Models::file_purpose, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index 380d4829..deeec480 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -15,6 +15,11 @@ module OpenAI data: OpenAI::Internal::file_input, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + data: OpenAI::Internal::file_input, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 17613909..60554e85 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -26,6 +26,13 @@ module OpenAI upload_id: String, ?object: :"upload.part" ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"upload.part", + upload_id: String + } end end end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 5ba3aa0f..24d34d54 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -9,7 +9,7 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String, object: :vector_store, - status: OpenAI::VectorStore::status, + status: OpenAI::Models::VectorStore::status, usage_bytes: Integer, expires_after: OpenAI::VectorStore::ExpiresAfter, expires_at: Integer? @@ -30,7 +30,7 @@ module OpenAI attr_accessor object: :vector_store - attr_accessor status: OpenAI::VectorStore::status + attr_accessor status: OpenAI::Models::VectorStore::status attr_accessor usage_bytes: Integer @@ -49,13 +49,27 @@ module OpenAI last_active_at: Integer?, metadata: OpenAI::Models::metadata?, name: String, - status: OpenAI::VectorStore::status, + status: OpenAI::Models::VectorStore::status, usage_bytes: Integer, ?expires_after: OpenAI::VectorStore::ExpiresAfter, ?expires_at: Integer?, ?object: :vector_store ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + file_counts: OpenAI::VectorStore::FileCounts, + last_active_at: Integer?, + metadata: OpenAI::Models::metadata?, + name: String, + object: :vector_store, + status: OpenAI::Models::VectorStore::status, + usage_bytes: Integer, + expires_after: OpenAI::VectorStore::ExpiresAfter, + expires_at: Integer? + } + type file_counts = { cancelled: Integer, @@ -83,6 +97,14 @@ module OpenAI in_progress: Integer, total: Integer ) -> void + + def to_hash: -> { + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + } end type status = :expired | :in_progress | :completed @@ -94,7 +116,7 @@ module OpenAI IN_PROGRESS: :in_progress COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::VectorStore::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStore::status] end type expires_after = { anchor: :last_active_at, days: Integer } @@ -105,6 +127,8 @@ module OpenAI attr_accessor days: Integer def initialize: (days: Integer, ?anchor: :last_active_at) -> void + + def to_hash: -> { anchor: :last_active_at, days: Integer } end end end diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 67a550f7..d5c48eb6 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -45,6 +45,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } + type expires_after = { anchor: :last_active_at, days: Integer } class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -53,6 +62,8 @@ module OpenAI attr_accessor days: Integer def initialize: (days: Integer, ?anchor: :last_active_at) -> void + + def to_hash: -> { anchor: :last_active_at, days: Integer } end end end diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index 89da672f..d64b7288 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/vector_store_deleted.rbs b/sig/openai/models/vector_store_deleted.rbs index 8bc51626..1ca36ab4 100644 --- a/sig/openai/models/vector_store_deleted.rbs +++ b/sig/openai/models/vector_store_deleted.rbs @@ -15,6 +15,12 @@ module OpenAI deleted: bool, ?object: :"vector_store.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"vector_store.deleted" + } end end end diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index ed5b3079..dfbe777d 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -39,6 +39,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::VectorStoreListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 6b466528..92c244d4 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 863ea4ab..0ad7493a 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -45,6 +45,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + query: OpenAI::Models::VectorStoreSearchParams::query, + filters: OpenAI::Models::VectorStoreSearchParams::filters, + max_num_results: Integer, + ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, + rewrite_query: bool, + request_options: OpenAI::RequestOptions + } + type query = String | ::Array[String] module Query @@ -65,26 +74,31 @@ module OpenAI type ranking_options = { - ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker, + ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_reader ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker? + attr_reader ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker? def ranker=: ( - OpenAI::VectorStoreSearchParams::RankingOptions::ranker - ) -> OpenAI::VectorStoreSearchParams::RankingOptions::ranker + OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker + ) -> OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker attr_reader score_threshold: Float? def score_threshold=: (Float) -> Float def initialize: ( - ?ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker, + ?ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, ?score_threshold: Float ) -> void + def to_hash: -> { + ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, + score_threshold: Float + } + type ranker = :auto | :"default-2024-11-15" module Ranker @@ -93,7 +107,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self?.values: -> ::Array[OpenAI::VectorStoreSearchParams::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker] end end end diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index 2c977b03..3ce0a392 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -28,6 +28,14 @@ module OpenAI score: Float ) -> void + def to_hash: -> { + attributes: ::Hash[Symbol, OpenAI::Models::VectorStoreSearchResponse::attribute]?, + content: ::Array[OpenAI::Models::VectorStoreSearchResponse::Content], + file_id: String, + filename: String, + score: Float + } + type attribute = String | Float | bool module Attribute @@ -52,6 +60,11 @@ module OpenAI type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ ) -> void + def to_hash: -> { + text: String, + type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ + } + type type_ = :text module Type diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index cb5a0433..4faf2804 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -25,6 +25,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, + metadata: OpenAI::Models::metadata?, + name: String?, + request_options: OpenAI::RequestOptions + } + type expires_after = { anchor: :last_active_at, days: Integer } class ExpiresAfter < OpenAI::Internal::Type::BaseModel @@ -33,6 +40,8 @@ module OpenAI attr_accessor days: Integer def initialize: (days: Integer, ?anchor: :last_active_at) -> void + + def to_hash: -> { anchor: :last_active_at, days: Integer } end end end diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index 0ee2d4d8..22b94cc4 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 3715958c..b9eca2b5 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_batch_create_params = { file_ids: ::Array[String], - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } & OpenAI::Internal::Type::request_parameters @@ -15,7 +15,7 @@ module OpenAI attr_accessor file_ids: ::Array[String] - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? @@ -25,17 +25,24 @@ module OpenAI def initialize: ( file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file_ids: ::Array[String], + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + request_options: OpenAI::RequestOptions + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::FileBatchCreateParams::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::FileBatchCreateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index 918e28f0..92bc31fb 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -52,6 +52,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + vector_store_id: String, + after: String, + before: String, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, + limit: Integer, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, + request_options: OpenAI::RequestOptions + } + type filter = :in_progress | :completed | :failed | :cancelled module Filter diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index 1e21e9d1..3f2ee92a 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index 5d35fcf1..1797ebf6 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_content_response.rbs b/sig/openai/models/vector_stores/file_content_response.rbs index 23306b97..200ec37f 100644 --- a/sig/openai/models/vector_stores/file_content_response.rbs +++ b/sig/openai/models/vector_stores/file_content_response.rbs @@ -13,6 +13,8 @@ module OpenAI def type=: (String) -> String def initialize: (?text: String, ?type: String) -> void + + def to_hash: -> { text: String, type: String } end end end diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 29469ef1..b5505515 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_create_params = { file_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } & OpenAI::Internal::Type::request_parameters @@ -15,7 +15,7 @@ module OpenAI attr_accessor file_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? @@ -25,17 +25,24 @@ module OpenAI def initialize: ( file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + request_options: OpenAI::RequestOptions + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::FileCreateParams::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::FileCreateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index 5fc9986b..06e77cba 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 623629d4..45e9fc25 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -48,6 +48,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + filter: OpenAI::Models::VectorStores::FileListParams::filter, + limit: Integer, + order: OpenAI::Models::VectorStores::FileListParams::order, + request_options: OpenAI::RequestOptions + } + type filter = :in_progress | :completed | :failed | :cancelled module Filter diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index d46822ed..c0cac542 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 58700418..83729e3e 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_update_params = { vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]? + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? } & OpenAI::Internal::Type::request_parameters @@ -14,20 +14,26 @@ module OpenAI attr_accessor vector_store_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? def initialize: ( vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + vector_store_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, + request_options: OpenAI::RequestOptions + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::FileUpdateParams::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::FileUpdateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 5bbb1310..3c64122d 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -9,10 +9,10 @@ module OpenAI created_at: Integer, last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, object: :"vector_store.file", - status: OpenAI::VectorStores::VectorStoreFile::status, + status: OpenAI::Models::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy } @@ -25,13 +25,13 @@ module OpenAI attr_accessor object: :"vector_store.file" - attr_accessor status: OpenAI::VectorStores::VectorStoreFile::status + attr_accessor status: OpenAI::Models::VectorStores::VectorStoreFile::status attr_accessor usage_bytes: Integer attr_accessor vector_store_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy? @@ -43,30 +43,47 @@ module OpenAI id: String, created_at: Integer, last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, - status: OpenAI::VectorStores::VectorStoreFile::status, + status: OpenAI::Models::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy, ?object: :"vector_store.file" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, + object: :"vector_store.file", + status: OpenAI::Models::VectorStores::VectorStoreFile::status, + usage_bytes: Integer, + vector_store_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, + chunking_strategy: OpenAI::Models::file_chunking_strategy + } + type last_error = { - code: OpenAI::VectorStores::VectorStoreFile::LastError::code, + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::VectorStores::VectorStoreFile::LastError::code + attr_accessor code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::VectorStores::VectorStoreFile::LastError::code, + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, + message: String + } + type code = :server_error | :unsupported_file | :invalid_file module Code @@ -76,7 +93,7 @@ module OpenAI UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file - def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFile::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::code] end end @@ -90,7 +107,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFile::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::status] end type attribute = String | Float | bool @@ -98,7 +115,7 @@ module OpenAI module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::VectorStoreFile::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::attribute] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index b21e96bd..6ad78bb0 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -9,7 +9,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, object: :"vector_store.files_batch", - status: OpenAI::VectorStores::VectorStoreFileBatch::status, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, vector_store_id: String } @@ -22,7 +22,7 @@ module OpenAI attr_accessor object: :"vector_store.files_batch" - attr_accessor status: OpenAI::VectorStores::VectorStoreFileBatch::status + attr_accessor status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status attr_accessor vector_store_id: String @@ -30,11 +30,20 @@ module OpenAI id: String, created_at: Integer, file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, - status: OpenAI::VectorStores::VectorStoreFileBatch::status, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, vector_store_id: String, ?object: :"vector_store.files_batch" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, + object: :"vector_store.files_batch", + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, + vector_store_id: String + } + type file_counts = { cancelled: Integer, @@ -62,6 +71,14 @@ module OpenAI in_progress: Integer, total: Integer ) -> void + + def to_hash: -> { + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + } end type status = :in_progress | :completed | :cancelled | :failed @@ -74,7 +91,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFileBatch::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::status] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index d9d9038a..235b13ae 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -18,6 +18,12 @@ module OpenAI deleted: bool, ?object: :"vector_store.file.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"vector_store.file.deleted" + } end end end diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 9f70fdd9..0130f147 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -11,7 +11,7 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response @@ -24,7 +24,7 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Audio::transcription_stream_event] diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 3eb309a0..5e457166 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -16,7 +16,7 @@ module OpenAI ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, @@ -50,7 +50,7 @@ module OpenAI ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, diff --git a/sig/openai/resources/evals.rbs b/sig/openai/resources/evals.rbs index b4ed4454..dd3d6cc5 100644 --- a/sig/openai/resources/evals.rbs +++ b/sig/openai/resources/evals.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts diff --git a/sig/openai/resources/vector_stores/file_batches.rbs b/sig/openai/resources/vector_stores/file_batches.rbs index 448f8ebb..1228381c 100644 --- a/sig/openai/resources/vector_stores/file_batches.rbs +++ b/sig/openai/resources/vector_stores/file_batches.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( String vector_store_id, file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> OpenAI::VectorStores::VectorStoreFileBatch diff --git a/sig/openai/resources/vector_stores/files.rbs b/sig/openai/resources/vector_stores/files.rbs index 2a650189..dfbccc37 100644 --- a/sig/openai/resources/vector_stores/files.rbs +++ b/sig/openai/resources/vector_stores/files.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( String vector_store_id, file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> OpenAI::VectorStores::VectorStoreFile @@ -19,7 +19,7 @@ module OpenAI def update: ( String file_id, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, ?request_options: OpenAI::request_opts ) -> OpenAI::VectorStores::VectorStoreFile From 9ef04251ab9ce210ac3834d9c43963656e2db2bf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 19:58:27 +0000 Subject: [PATCH 205/295] feat(api): new streaming helpers for background responses --- .stats.yml | 6 +- lib/openai.rb | 1 - .../models/chat/completion_create_params.rb | 6 +- .../responses/input_item_list_params.rb | 6 +- lib/openai/models/responses/response.rb | 6 +- .../responses/response_create_params.rb | 6 +- .../models/responses/response_stream_event.rb | 6 +- .../response_text_annotation_delta_event.rb | 203 ------------- ...esponse_web_search_call_completed_event.rb | 10 +- ...ponse_web_search_call_in_progress_event.rb | 10 +- ...esponse_web_search_call_searching_event.rb | 10 +- lib/openai/models/responses/tool.rb | 10 +- lib/openai/resources/chat/completions.rb | 4 +- lib/openai/resources/containers.rb | 3 +- lib/openai/resources/containers/files.rb | 1 + lib/openai/resources/responses.rb | 6 +- lib/openai/resources/responses/input_items.rb | 2 +- .../models/chat/completion_create_params.rbi | 8 +- .../responses/input_item_list_params.rbi | 6 +- rbi/openai/models/responses/response.rbi | 8 +- .../responses/response_create_params.rbi | 8 +- .../responses/response_stream_event.rbi | 1 - .../response_text_annotation_delta_event.rbi | 285 ------------------ ...sponse_web_search_call_completed_event.rbi | 22 +- ...onse_web_search_call_in_progress_event.rbi | 22 +- ...sponse_web_search_call_searching_event.rbi | 22 +- rbi/openai/models/responses/tool.rbi | 17 +- rbi/openai/resources/chat/completions.rbi | 8 +- rbi/openai/resources/containers.rbi | 6 +- rbi/openai/resources/responses.rbi | 8 +- .../resources/responses/input_items.rbi | 2 +- .../responses/response_stream_event.rbs | 1 - .../response_text_annotation_delta_event.rbs | 145 --------- ...sponse_web_search_call_completed_event.rbs | 5 + ...onse_web_search_call_in_progress_event.rbs | 5 + ...sponse_web_search_call_searching_event.rbs | 5 + sig/openai/models/responses/tool.rbs | 13 +- 37 files changed, 157 insertions(+), 736 deletions(-) delete mode 100644 lib/openai/models/responses/response_text_annotation_delta_event.rb delete mode 100644 rbi/openai/models/responses/response_text_annotation_delta_event.rbi delete mode 100644 sig/openai/models/responses/response_text_annotation_delta_event.rbs diff --git a/.stats.yml b/.stats.yml index 250c0842..2614f4ca 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6af14840a810139bf407013167ce1c8fb21b6ef8eb0cc3db58b51af7d52c4b5a.yml -openapi_spec_hash: 3241bde6b273cfec0035e522bd07985d -config_hash: 7367b68a4e7db36885c1a886f57b17f6 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml +openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c +config_hash: c497f6b750cc89c0bf2eefc0bc839c70 diff --git a/lib/openai.rb b/lib/openai.rb index bf5c2abf..177d635e 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -425,7 +425,6 @@ require_relative "openai/models/responses/response_retrieve_params" require_relative "openai/models/responses/response_status" require_relative "openai/models/responses/response_stream_event" -require_relative "openai/models/responses/response_text_annotation_delta_event" require_relative "openai/models/responses/response_text_config" require_relative "openai/models/responses/response_text_delta_event" require_relative "openai/models/responses/response_text_done_event" diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 65446009..efe22a7d 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -313,8 +313,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!attribute user - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] @@ -388,7 +388,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index bd23a462..cb40574b 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -35,7 +35,7 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel optional :limit, Integer # @!attribute order - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. @@ -55,11 +55,11 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `desc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 6d86a9c8..20471340 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -224,8 +224,8 @@ class Response < OpenAI::Internal::Type::BaseModel optional :usage, -> { OpenAI::Responses::ResponseUsage } # @!attribute user - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] @@ -279,7 +279,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param object [Symbol, :response] The object type of this resource - always set to `response`. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index eb148cf0..2058a351 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -211,8 +211,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :truncation, enum: -> { OpenAI::Responses::ResponseCreateParams::Truncation }, nil?: true # @!attribute user - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] @@ -258,7 +258,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 6da18a36..eb98e953 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -116,10 +116,6 @@ module ResponseStreamEvent # Emitted when refusal text is finalized. variant :"response.refusal.done", -> { OpenAI::Responses::ResponseRefusalDoneEvent } - # Emitted when a text annotation is added. - variant :"response.output_text.annotation.added", - -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent } - # Emitted when there is an additional text delta. variant :"response.output_text.delta", -> { OpenAI::Responses::ResponseTextDeltaEvent } @@ -214,7 +210,7 @@ module ResponseStreamEvent } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb deleted file mode 100644 index c22ecad8..00000000 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ /dev/null @@ -1,203 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Responses - class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - # @!attribute annotation - # A citation to a file. - # - # @return [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - required :annotation, union: -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation } - - # @!attribute annotation_index - # The index of the annotation that was added. - # - # @return [Integer] - required :annotation_index, Integer - - # @!attribute content_index - # The index of the content part that the text annotation was added to. - # - # @return [Integer] - required :content_index, Integer - - # @!attribute item_id - # The ID of the output item that the text annotation was added to. - # - # @return [String] - required :item_id, String - - # @!attribute output_index - # The index of the output item that the text annotation was added to. - # - # @return [Integer] - required :output_index, Integer - - # @!attribute sequence_number - # The sequence number of this event. - # - # @return [Integer] - required :sequence_number, Integer - - # @!attribute type - # The type of the event. Always `response.output_text.annotation.added`. - # - # @return [Symbol, :"response.output_text.annotation.added"] - required :type, const: :"response.output_text.annotation.added" - - # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text.annotation.added") - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent} for more details. - # - # Emitted when a text annotation is added. - # - # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. - # - # @param annotation_index [Integer] The index of the annotation that was added. - # - # @param content_index [Integer] The index of the content part that the text annotation was added to. - # - # @param item_id [String] The ID of the output item that the text annotation was added to. - # - # @param output_index [Integer] The index of the output item that the text annotation was added to. - # - # @param sequence_number [Integer] The sequence number of this event. - # - # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always `response.output_text.annotation.added`. - - # A citation to a file. - # - # @see OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent#annotation - module Annotation - extend OpenAI::Internal::Type::Union - - discriminator :type - - # A citation to a file. - variant :file_citation, - -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation } - - # A citation for a web resource used to generate a model response. - variant :url_citation, - -> { - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation - } - - # A path to a file. - variant :file_path, -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath } - - class FileCitation < OpenAI::Internal::Type::BaseModel - # @!attribute file_id - # The ID of the file. - # - # @return [String] - required :file_id, String - - # @!attribute index - # The index of the file in the list of files. - # - # @return [Integer] - required :index, Integer - - # @!attribute type - # The type of the file citation. Always `file_citation`. - # - # @return [Symbol, :file_citation] - required :type, const: :file_citation - - # @!method initialize(file_id:, index:, type: :file_citation) - # A citation to a file. - # - # @param file_id [String] The ID of the file. - # - # @param index [Integer] The index of the file in the list of files. - # - # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. - end - - class URLCitation < OpenAI::Internal::Type::BaseModel - # @!attribute end_index - # The index of the last character of the URL citation in the message. - # - # @return [Integer] - required :end_index, Integer - - # @!attribute start_index - # The index of the first character of the URL citation in the message. - # - # @return [Integer] - required :start_index, Integer - - # @!attribute title - # The title of the web resource. - # - # @return [String] - required :title, String - - # @!attribute type - # The type of the URL citation. Always `url_citation`. - # - # @return [Symbol, :url_citation] - required :type, const: :url_citation - - # @!attribute url - # The URL of the web resource. - # - # @return [String] - required :url, String - - # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) - # A citation for a web resource used to generate a model response. - # - # @param end_index [Integer] The index of the last character of the URL citation in the message. - # - # @param start_index [Integer] The index of the first character of the URL citation in the message. - # - # @param title [String] The title of the web resource. - # - # @param url [String] The URL of the web resource. - # - # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. - end - - class FilePath < OpenAI::Internal::Type::BaseModel - # @!attribute file_id - # The ID of the file. - # - # @return [String] - required :file_id, String - - # @!attribute index - # The index of the file in the list of files. - # - # @return [Integer] - required :index, Integer - - # @!attribute type - # The type of the file path. Always `file_path`. - # - # @return [Symbol, :file_path] - required :type, const: :file_path - - # @!method initialize(file_id:, index:, type: :file_path) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} - # for more details. - # - # A path to a file. - # - # @param file_id [String] The ID of the file. - # - # @param index [Integer] The index of the file in the list of files. - # - # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. - end - - # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] - end - end - end - end -end diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index c7a5da28..91ae2f37 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -16,13 +16,19 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the web search call being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.web_search_call.completed`. # # @return [Symbol, :"response.web_search_call.completed"] required :type, const: :"response.web_search_call.completed" - # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.web_search_call.completed") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent} for more # details. @@ -33,6 +39,8 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the web search call is associated with. # + # @param sequence_number [Integer] The sequence number of the web search call being processed. + # # @param type [Symbol, :"response.web_search_call.completed"] The type of the event. Always `response.web_search_call.completed`. end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index dc6f12fa..f00f81c0 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -16,13 +16,19 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the web search call being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.web_search_call.in_progress`. # # @return [Symbol, :"response.web_search_call.in_progress"] required :type, const: :"response.web_search_call.in_progress" - # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.web_search_call.in_progress") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent} for more # details. @@ -33,6 +39,8 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the web search call is associated with. # + # @param sequence_number [Integer] The sequence number of the web search call being processed. + # # @param type [Symbol, :"response.web_search_call.in_progress"] The type of the event. Always `response.web_search_call.in_progress`. end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index 74cebb9c..d1552a07 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -16,13 +16,19 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the web search call being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.web_search_call.searching`. # # @return [Symbol, :"response.web_search_call.searching"] required :type, const: :"response.web_search_call.searching" - # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.web_search_call.searching") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent} for more # details. @@ -33,6 +39,8 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the web search call is associated with. # + # @param sequence_number [Integer] The sequence number of the web search call being processed. + # # @param type [Symbol, :"response.web_search_call.searching"] The type of the event. Always `response.web_search_call.searching`. end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index ede6dc3c..f97fae7e 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -156,13 +156,7 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } - # @!attribute tool_names - # List of allowed tool names. - # - # @return [Array, nil] - optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] - - # @!method initialize(always: nil, never: nil, tool_names: nil) + # @!method initialize(always: nil, never: nil) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter} # for more details. @@ -170,8 +164,6 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # @param always [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A list of tools that always require approval. # # @param never [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A list of tools that never require approval. - # - # @param tool_names [Array] List of allowed tool names. # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#always class Always < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 8d5c0fda..4dd48bca 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -88,7 +88,7 @@ class Completions # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # @@ -193,7 +193,7 @@ def create(params) # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # diff --git a/lib/openai/resources/containers.rb b/lib/openai/resources/containers.rb index 01d685d2..2d582be8 100644 --- a/lib/openai/resources/containers.rb +++ b/lib/openai/resources/containers.rb @@ -85,7 +85,8 @@ def list(params = {}) # # @overload delete(container_id, request_options: {}) # - # @param container_id [String] + # @param container_id [String] The ID of the container to delete. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [nil] diff --git a/lib/openai/resources/containers/files.rb b/lib/openai/resources/containers/files.rb index c2adf25f..356bead3 100644 --- a/lib/openai/resources/containers/files.rb +++ b/lib/openai/resources/containers/files.rb @@ -33,6 +33,7 @@ def create(container_id, params = {}) @client.request( method: :post, path: ["containers/%1$s/files", container_id], + headers: {"content-type" => "multipart/form-data"}, body: parsed, model: OpenAI::Models::Containers::FileCreateResponse, options: options diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 43ae26e8..b2964e96 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -61,7 +61,7 @@ class Responses # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -138,11 +138,11 @@ def create(params) # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index de693bf2..9b62eaa2 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -21,7 +21,7 @@ class InputItems # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `desc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 49cdaeba..98b52bc4 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -387,8 +387,8 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :top_p - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -666,8 +666,8 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more diff --git a/rbi/openai/models/responses/input_item_list_params.rbi b/rbi/openai/models/responses/input_item_list_params.rbi index 403928db..f495c4f3 100644 --- a/rbi/openai/models/responses/input_item_list_params.rbi +++ b/rbi/openai/models/responses/input_item_list_params.rbi @@ -53,7 +53,7 @@ module OpenAI sig { params(limit: Integer).void } attr_writer :limit - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. @@ -92,7 +92,7 @@ module OpenAI # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. limit: nil, - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. @@ -117,7 +117,7 @@ module OpenAI def to_hash end - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 89969f4a..97408461 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -220,8 +220,8 @@ module OpenAI sig { params(usage: OpenAI::Responses::ResponseUsage::OrHash).void } attr_writer :usage - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -421,8 +421,8 @@ module OpenAI # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. usage: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # The object type of this resource - always set to `response`. diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 8ca7d2bb..592d684d 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -267,8 +267,8 @@ module OpenAI end attr_accessor :truncation - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -458,8 +458,8 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index a29df0c1..1afd018a 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -40,7 +40,6 @@ module OpenAI OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, diff --git a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi deleted file mode 100644 index 53babd9a..00000000 --- a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi +++ /dev/null @@ -1,285 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Responses - class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Internal::AnyHash - ) - end - - # A citation to a file. - sig do - returns( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants - ) - end - attr_accessor :annotation - - # The index of the annotation that was added. - sig { returns(Integer) } - attr_accessor :annotation_index - - # The index of the content part that the text annotation was added to. - sig { returns(Integer) } - attr_accessor :content_index - - # The ID of the output item that the text annotation was added to. - sig { returns(String) } - attr_accessor :item_id - - # The index of the output item that the text annotation was added to. - sig { returns(Integer) } - attr_accessor :output_index - - # The sequence number of this event. - sig { returns(Integer) } - attr_accessor :sequence_number - - # The type of the event. Always `response.output_text.annotation.added`. - sig { returns(Symbol) } - attr_accessor :type - - # Emitted when a text annotation is added. - sig do - params( - annotation: - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation::OrHash, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation::OrHash, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath::OrHash - ), - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A citation to a file. - annotation:, - # The index of the annotation that was added. - annotation_index:, - # The index of the content part that the text annotation was added to. - content_index:, - # The ID of the output item that the text annotation was added to. - item_id:, - # The index of the output item that the text annotation was added to. - output_index:, - # The sequence number of this event. - sequence_number:, - # The type of the event. Always `response.output_text.annotation.added`. - type: :"response.output_text.annotation.added" - ) - end - - sig do - override.returns( - { - annotation: - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: Symbol - } - ) - end - def to_hash - end - - # A citation to a file. - module Annotation - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - end - - class FileCitation < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Internal::AnyHash - ) - end - - # The ID of the file. - sig { returns(String) } - attr_accessor :file_id - - # The index of the file in the list of files. - sig { returns(Integer) } - attr_accessor :index - - # The type of the file citation. Always `file_citation`. - sig { returns(Symbol) } - attr_accessor :type - - # A citation to a file. - sig do - params(file_id: String, index: Integer, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The ID of the file. - file_id:, - # The index of the file in the list of files. - index:, - # The type of the file citation. Always `file_citation`. - type: :file_citation - ) - end - - sig do - override.returns( - { file_id: String, index: Integer, type: Symbol } - ) - end - def to_hash - end - end - - class URLCitation < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Internal::AnyHash - ) - end - - # The index of the last character of the URL citation in the message. - sig { returns(Integer) } - attr_accessor :end_index - - # The index of the first character of the URL citation in the message. - sig { returns(Integer) } - attr_accessor :start_index - - # The title of the web resource. - sig { returns(String) } - attr_accessor :title - - # The type of the URL citation. Always `url_citation`. - sig { returns(Symbol) } - attr_accessor :type - - # The URL of the web resource. - sig { returns(String) } - attr_accessor :url - - # A citation for a web resource used to generate a model response. - sig do - params( - end_index: Integer, - start_index: Integer, - title: String, - url: String, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The index of the last character of the URL citation in the message. - end_index:, - # The index of the first character of the URL citation in the message. - start_index:, - # The title of the web resource. - title:, - # The URL of the web resource. - url:, - # The type of the URL citation. Always `url_citation`. - type: :url_citation - ) - end - - sig do - override.returns( - { - end_index: Integer, - start_index: Integer, - title: String, - type: Symbol, - url: String - } - ) - end - def to_hash - end - end - - class FilePath < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath, - OpenAI::Internal::AnyHash - ) - end - - # The ID of the file. - sig { returns(String) } - attr_accessor :file_id - - # The index of the file in the list of files. - sig { returns(Integer) } - attr_accessor :index - - # The type of the file path. Always `file_path`. - sig { returns(Symbol) } - attr_accessor :type - - # A path to a file. - sig do - params(file_id: String, index: Integer, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The ID of the file. - file_id:, - # The index of the file in the list of files. - index:, - # The type of the file path. Always `file_path`. - type: :file_path - ) - end - - sig do - override.returns( - { file_id: String, index: Integer, type: Symbol } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants - ] - ) - end - def self.variants - end - end - end - end - end -end diff --git a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi index f090f369..78f9a4e9 100644 --- a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the web search call being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.web_search_call.completed`. sig { returns(Symbol) } attr_accessor :type # Emitted when a web search call is completed. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # Unique ID for the output item associated with the web search call. item_id:, # The index of the output item that the web search call is associated with. output_index:, + # The sequence number of the web search call being processed. + sequence_number:, # The type of the event. Always `response.web_search_call.completed`. type: :"response.web_search_call.completed" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi index c8fdaf5a..8fc0415b 100644 --- a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the web search call being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.web_search_call.in_progress`. sig { returns(Symbol) } attr_accessor :type # Emitted when a web search call is initiated. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # Unique ID for the output item associated with the web search call. item_id:, # The index of the output item that the web search call is associated with. output_index:, + # The sequence number of the web search call being processed. + sequence_number:, # The type of the event. Always `response.web_search_call.in_progress`. type: :"response.web_search_call.in_progress" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi index 5f0b5d33..15ce4ac3 100644 --- a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the web search call being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.web_search_call.searching`. sig { returns(Symbol) } attr_accessor :type # Emitted when a web search call is executing. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # Unique ID for the output item associated with the web search call. item_id:, # The index of the output item that the web search call is associated with. output_index:, + # The sequence number of the web search call being processed. + sequence_number:, # The type of the event. Always `response.web_search_call.searching`. type: :"response.web_search_call.searching" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index c46f1d66..0b7fbd91 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -254,29 +254,19 @@ module OpenAI end attr_writer :never - # List of allowed tool names. - sig { returns(T.nilable(T::Array[String])) } - attr_reader :tool_names - - sig { params(tool_names: T::Array[String]).void } - attr_writer :tool_names - sig do params( always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always::OrHash, never: - OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never::OrHash, - tool_names: T::Array[String] + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never::OrHash ).returns(T.attached_class) end def self.new( # A list of tools that always require approval. always: nil, # A list of tools that never require approval. - never: nil, - # List of allowed tool names. - tool_names: nil + never: nil ) end @@ -286,8 +276,7 @@ module OpenAI always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, never: - OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, - tool_names: T::Array[String] + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } ) end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 6193c799..2261046e 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -279,8 +279,8 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more @@ -566,8 +566,8 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more diff --git a/rbi/openai/resources/containers.rbi b/rbi/openai/resources/containers.rbi index 2ca5a9df..fc23a5f6 100644 --- a/rbi/openai/resources/containers.rbi +++ b/rbi/openai/resources/containers.rbi @@ -70,7 +70,11 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).void end - def delete(container_id, request_options: {}) + def delete( + # The ID of the container to delete. + container_id, + request_options: {} + ) end # @api private diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index e4f15ce4..3753b593 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -202,8 +202,8 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` @@ -413,8 +413,8 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` diff --git a/rbi/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi index de7fb1ae..db8126f6 100644 --- a/rbi/openai/resources/responses/input_items.rbi +++ b/rbi/openai/resources/responses/input_items.rbi @@ -33,7 +33,7 @@ module OpenAI # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. limit: nil, - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index a14a4f29..21511833 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -32,7 +32,6 @@ module OpenAI | OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent | OpenAI::Responses::ResponseRefusalDeltaEvent | OpenAI::Responses::ResponseRefusalDoneEvent - | OpenAI::Responses::ResponseTextAnnotationDeltaEvent | OpenAI::Responses::ResponseTextDeltaEvent | OpenAI::Responses::ResponseTextDoneEvent | OpenAI::Responses::ResponseWebSearchCallCompletedEvent diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs deleted file mode 100644 index 46c53b36..00000000 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ /dev/null @@ -1,145 +0,0 @@ -module OpenAI - module Models - module Responses - type response_text_annotation_delta_event = - { - annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: :"response.output_text.annotation.added" - } - - class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation - - attr_accessor annotation_index: Integer - - attr_accessor content_index: Integer - - attr_accessor item_id: String - - attr_accessor output_index: Integer - - attr_accessor sequence_number: Integer - - attr_accessor type: :"response.output_text.annotation.added" - - def initialize: ( - annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - ?type: :"response.output_text.annotation.added" - ) -> void - - def to_hash: -> { - annotation: OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: :"response.output_text.annotation.added" - } - - type annotation = - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation - | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation - | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - - module Annotation - extend OpenAI::Internal::Type::Union - - type file_citation = - { file_id: String, index: Integer, type: :file_citation } - - class FileCitation < OpenAI::Internal::Type::BaseModel - attr_accessor file_id: String - - attr_accessor index: Integer - - attr_accessor type: :file_citation - - def initialize: ( - file_id: String, - index: Integer, - ?type: :file_citation - ) -> void - - def to_hash: -> { - file_id: String, - index: Integer, - type: :file_citation - } - end - - type url_citation = - { - end_index: Integer, - start_index: Integer, - title: String, - type: :url_citation, - url: String - } - - class URLCitation < OpenAI::Internal::Type::BaseModel - attr_accessor end_index: Integer - - attr_accessor start_index: Integer - - attr_accessor title: String - - attr_accessor type: :url_citation - - attr_accessor url: String - - def initialize: ( - end_index: Integer, - start_index: Integer, - title: String, - url: String, - ?type: :url_citation - ) -> void - - def to_hash: -> { - end_index: Integer, - start_index: Integer, - title: String, - type: :url_citation, - url: String - } - end - - type file_path = { file_id: String, index: Integer, type: :file_path } - - class FilePath < OpenAI::Internal::Type::BaseModel - attr_accessor file_id: String - - attr_accessor index: Integer - - attr_accessor type: :file_path - - def initialize: ( - file_id: String, - index: Integer, - ?type: :file_path - ) -> void - - def to_hash: -> { - file_id: String, - index: Integer, - type: :file_path - } - end - - def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::annotation] - end - end - end - end -end diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index 8f7eb8fb..66882df9 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.completed" } @@ -13,17 +14,21 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.web_search_call.completed" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.web_search_call.completed" ) -> void def to_hash: -> { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.completed" } end diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index 9cb98649..b2928e82 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.in_progress" } @@ -13,17 +14,21 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.web_search_call.in_progress" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.web_search_call.in_progress" ) -> void def to_hash: -> { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.in_progress" } end diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index 55563d33..4c3a659f 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.searching" } @@ -13,17 +14,21 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.web_search_call.searching" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.web_search_call.searching" ) -> void def to_hash: -> { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.searching" } end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 7d2d5cec..0655d5c6 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -89,8 +89,7 @@ module OpenAI type mcp_tool_approval_filter = { always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, - never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, - tool_names: ::Array[String] + never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel @@ -106,20 +105,14 @@ module OpenAI OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never ) -> OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never - attr_reader tool_names: ::Array[String]? - - def tool_names=: (::Array[String]) -> ::Array[String] - def initialize: ( ?always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, - ?never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, - ?tool_names: ::Array[String] + ?never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never ) -> void def to_hash: -> { always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, - never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, - tool_names: ::Array[String] + never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } type always = { tool_names: ::Array[String] } From a030f0cd522415bc5d99980aafa2f0942a1e2b0f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 20:11:20 +0000 Subject: [PATCH 206/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index e613b816..aff3ead3 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-beta.1" + ".": "0.1.0-beta.2" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index aa8e1840..30d820b9 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.beta.1) + openai (0.1.0.pre.beta.2) connection_pool GEM diff --git a/README.md b/README.md index 73d47cda..b5d65e8d 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.1.0.pre.beta.1" +gem "openai", "~> 0.1.0.pre.beta.2" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index c559f45b..5dc9f2b6 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.beta.1" + VERSION = "0.1.0.pre.beta.2" end From c48762b7056f1bd6729bf3ad2bb35abfd18f90f5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 21:06:36 +0000 Subject: [PATCH 207/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 2614f4ca..57774fe0 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c -config_hash: c497f6b750cc89c0bf2eefc0bc839c70 +config_hash: 535b6e5f26a295d609b259c8cb8f656c From 7648226ac4b09732b5010ba272f4e34499058e04 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 22 May 2025 21:29:53 +0000 Subject: [PATCH 208/295] chore(internal): fix release workflows --- .github/workflows/create-releases.yml | 28 +++++++++++++++++++++++++++ .github/workflows/publish-gem.yml | 8 ++------ .github/workflows/release-doctor.yml | 1 + bin/check-release-environment | 4 ++++ 4 files changed, 35 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/create-releases.yml diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml new file mode 100644 index 00000000..6ae563b0 --- /dev/null +++ b/.github/workflows/create-releases.yml @@ -0,0 +1,28 @@ +name: Create releases +on: + schedule: + - cron: '0 5 * * *' # every day at 5am UTC + push: + branches: + - main + +jobs: + release: + name: release + if: github.ref == 'refs/heads/main' && github.repository == 'openai/openai-ruby' + runs-on: ubuntu-latest + environment: publish + + steps: + - uses: actions/checkout@v4 + + - uses: stainless-api/trigger-release-please@v1 + id: release + with: + repo: ${{ github.event.repository.full_name }} + stainless-api-key: ${{ secrets.STAINLESS_API_KEY }} + + - name: Update RubyDocs + if: ${{ steps.release.outputs.releases_created }} + run: | + curl --request POST --include --header 'Content-Type: application/json' --data '{"repository":{"url":"https://github.com/openai/openai-ruby"}}' -- https://www.rubydoc.info/checkout diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index 7a7a1d07..725febaa 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -1,13 +1,9 @@ -# This workflow is triggered when a GitHub release is created. -# It can also be run manually to re-publish to rubygems.org in case it failed for some reason. -# You can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-gem.yml +# workflow for re-running publishing to rubygems.org in case it fails for some reason +# you can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-gem.yml name: Publish Gem on: workflow_dispatch: - release: - types: [published] - jobs: publish: name: publish diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index 1659237f..fc3ec131 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -19,5 +19,6 @@ jobs: run: | bash ./bin/check-release-environment env: + STAINLESS_API_KEY: ${{ secrets.STAINLESS_API_KEY }} RUBYGEMS_HOST: ${{ secrets.OPENAI_RUBYGEMS_HOST || secrets.RUBYGEMS_HOST }} GEM_HOST_API_KEY: ${{ secrets.OPENAI_GEM_HOST_API_KEY || secrets.GEM_HOST_API_KEY }} diff --git a/bin/check-release-environment b/bin/check-release-environment index 6303e291..6aa95c4f 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -2,6 +2,10 @@ errors=() +if [ -z "${STAINLESS_API_KEY}" ]; then + errors+=("The STAINLESS_API_KEY secret has not been set. Please contact Stainless for an API key & set it in your organization secrets on GitHub.") +fi + if [ -z "${GEM_HOST_API_KEY}" ]; then errors+=("The OPENAI_GEM_HOST_API_KEY secret has not been set. Please set it in either this repository's secrets or your organization secrets") fi From 93c7be545c41e6fd79c816a6337bcd930530c320 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 15:36:07 +0000 Subject: [PATCH 209/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index aff3ead3..1f0d8a9b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-beta.2" + ".": "0.4.0-beta.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 30d820b9..9496ce68 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.beta.2) + openai (0.4.0.pre.beta.1) connection_pool GEM diff --git a/README.md b/README.md index b5d65e8d..d96410b4 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.1.0.pre.beta.2" +gem "openai", "~> 0.4.0.pre.beta.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 5dc9f2b6..03fe9c50 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.beta.2" + VERSION = "0.4.0.pre.beta.1" end From 1ff137a2e0433e894555ea7e1d8de5fc990deb2d Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 15:57:59 +0000 Subject: [PATCH 210/295] fix: prevent rubocop from mangling `===` to `is_a?` check --- lib/openai/internal/util.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index c84c9e78..9e11d6ed 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -600,11 +600,13 @@ class << self # # @return [Object] def encode_content(headers, body) + # rubocop:disable Style/CaseEquality + # rubocop:disable Layout/LineLength content_type = headers["content-type"] case [content_type, body] in [OpenAI::Internal::Util::JSON_CONTENT, Hash | Array | -> { primitive?(_1) }] [headers, JSON.generate(body)] - in [OpenAI::Internal::Util::JSONL_CONTENT, Enumerable] unless body.is_a?(OpenAI::Internal::Type::FileInput) + in [OpenAI::Internal::Util::JSONL_CONTENT, Enumerable] unless OpenAI::Internal::Type::FileInput === body [headers, body.lazy.map { JSON.generate(_1) }] in [%r{^multipart/form-data}, Hash | OpenAI::Internal::Type::FileInput] boundary, strio = encode_multipart_streaming(body) @@ -619,6 +621,8 @@ def encode_content(headers, body) else [headers, body] end + # rubocop:enable Layout/LineLength + # rubocop:enable Style/CaseEquality end # @api private From afd2b1621d706a0914cdbd0a422cdc9485e36088 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 23 May 2025 17:13:54 +0000 Subject: [PATCH 211/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 1f0d8a9b..3e2bf498 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.4.0-beta.1" + ".": "0.4.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 9496ce68..64aeded6 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.4.0.pre.beta.1) + openai (0.4.1) connection_pool GEM diff --git a/README.md b/README.md index d96410b4..478a35e5 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.4.0.pre.beta.1" +gem "openai", "~> 0.4.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 03fe9c50..a71b9348 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.4.0.pre.beta.1" + VERSION = "0.4.1" end From c4f4bc5531beb2f926bb36284f69764cae8427e7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 27 May 2025 15:34:35 +0000 Subject: [PATCH 212/295] fix: sorbet types for enums, and make tapioca detection ignore `tapioca dsl` --- lib/openai.rb | 4 +++- lib/openai/internal/util.rb | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/openai.rb b/lib/openai.rb index 177d635e..5d4ebee6 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -20,7 +20,9 @@ # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. -if Object.const_defined?(:Tapioca) && caller.chain([$PROGRAM_NAME]).chain(ARGV).grep(/tapioca/) +if Object.const_defined?(:Tapioca) && + caller.chain([$PROGRAM_NAME]).chain(ARGV).any?(/tapioca/) && + ARGV.none?(/dsl/) return end diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 9e11d6ed..eb5d1ffc 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -875,8 +875,12 @@ def to_sorbet_type(type) case type in OpenAI::Internal::Util::SorbetRuntimeSupport type.to_sorbet_type - else + in Class | Module type + in true | false + T::Boolean + else + type.class end end end From 68a0e52bf02c01ae72faadec0fbf9d45f1046e13 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 19:36:40 +0000 Subject: [PATCH 213/295] chore: deprecate Assistants API --- .stats.yml | 2 +- lib/openai/resources/beta/threads.rb | 13 +++++++++++++ lib/openai/resources/beta/threads/messages.rb | 11 +++++++++++ lib/openai/resources/beta/threads/runs.rb | 17 +++++++++++++++++ lib/openai/resources/beta/threads/runs/steps.rb | 5 +++++ 5 files changed, 47 insertions(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 57774fe0..dcba0d15 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c -config_hash: 535b6e5f26a295d609b259c8cb8f656c +config_hash: 3b590818075ca4b54949578b97494525 diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index 22ff4228..7af8b256 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -3,6 +3,7 @@ module OpenAI module Resources class Beta + # @deprecated The Assistants API is deprecated in favor of the Responses API class Threads # @return [OpenAI::Resources::Beta::Threads::Runs] attr_reader :runs @@ -10,6 +11,8 @@ class Threads # @return [OpenAI::Resources::Beta::Threads::Messages] attr_reader :messages + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadCreateParams} for more details. # @@ -39,6 +42,8 @@ def create(params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Retrieves a thread. # # @overload retrieve(thread_id, request_options: {}) @@ -59,6 +64,8 @@ def retrieve(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadUpdateParams} for more details. # @@ -88,6 +95,8 @@ def update(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Delete a thread. # # @overload delete(thread_id, request_options: {}) @@ -108,6 +117,8 @@ def delete(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads#stream_raw} for streaming counterpart. # # Some parameter documentations has been truncated, see @@ -167,6 +178,8 @@ def create_and_run(params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads#create_and_run} for non-streaming # counterpart. # diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index fbc2fcb8..50bafc68 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -4,7 +4,10 @@ module OpenAI module Resources class Beta class Threads + # @deprecated The Assistants API is deprecated in favor of the Responses API class Messages + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. # @@ -38,6 +41,8 @@ def create(thread_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageRetrieveParams} for more details. # @@ -68,6 +73,8 @@ def retrieve(message_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageUpdateParams} for more details. # @@ -101,6 +108,8 @@ def update(message_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageListParams} for more details. # @@ -137,6 +146,8 @@ def list(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Deletes a message. # # @overload delete(message_id, thread_id:, request_options: {}) diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 50128b5a..6c1d4b54 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -4,10 +4,13 @@ module OpenAI module Resources class Beta class Threads + # @deprecated The Assistants API is deprecated in favor of the Responses API class Runs # @return [OpenAI::Resources::Beta::Threads::Runs::Steps] attr_reader :steps + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#create_stream_raw} for streaming # counterpart. # @@ -76,6 +79,8 @@ def create(thread_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#create} for non-streaming # counterpart. # @@ -147,6 +152,8 @@ def create_stream_raw(thread_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunRetrieveParams} for more details. # @@ -177,6 +184,8 @@ def retrieve(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunUpdateParams} for more details. # @@ -210,6 +219,8 @@ def update(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::RunListParams} for more details. # @@ -244,6 +255,8 @@ def list(thread_id, params = {}) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Cancels a run that is `in_progress`. # # @overload cancel(run_id, thread_id:, request_options: {}) @@ -271,6 +284,8 @@ def cancel(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw} for # streaming counterpart. # @@ -314,6 +329,8 @@ def submit_tool_outputs(run_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # See {OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs} for # non-streaming counterpart. # diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index 6accaeb6..669ce368 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -5,7 +5,10 @@ module Resources class Beta class Threads class Runs + # @deprecated The Assistants API is deprecated in favor of the Responses API class Steps + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams} for more details. # @@ -45,6 +48,8 @@ def retrieve(step_id, params) ) end + # @deprecated The Assistants API is deprecated in favor of the Responses API + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::Runs::StepListParams} for more details. # From 886613ecb8c1c0a8e877ef4d86642c3b031c991f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 20:47:27 +0000 Subject: [PATCH 214/295] feat(api): Config update for pakrym-stream-param --- .stats.yml | 6 +- .../models/responses/response_output_text.rb | 69 +++++++++- .../responses/response_retrieve_params.rb | 12 +- lib/openai/resources/responses.rb | 48 ++++++- .../models/responses/response_output_text.rbi | 118 +++++++++++++++++- .../responses/response_retrieve_params.rbi | 11 ++ rbi/openai/resources/responses.rbi | 40 ++++++ .../models/responses/response_output_text.rbs | 68 +++++++++- .../responses/response_retrieve_params.rbs | 11 +- sig/openai/resources/responses.rbs | 8 ++ 10 files changed, 381 insertions(+), 10 deletions(-) diff --git a/.stats.yml b/.stats.yml index dcba0d15..4aa085f5 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml -openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c -config_hash: 3b590818075ca4b54949578b97494525 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml +openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615 +config_hash: 2102e4b25bbcab5d32d5ffa5d34daa0c diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index a9646f34..f188fae8 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -25,13 +25,23 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :output_text] required :type, const: :output_text - # @!method initialize(annotations:, text:, type: :output_text) + # @!attribute logprobs + # + # @return [Array, nil] + optional :logprobs, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob] + } + + # @!method initialize(annotations:, text:, logprobs: nil, type: :output_text) # A text output from the model. # # @param annotations [Array] The annotations of the text output. # # @param text [String] The text output from the model. # + # @param logprobs [Array] + # # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. # A citation to a file. @@ -159,6 +169,63 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method self.variants # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end + + class Logprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # + # @return [String] + required :token, String + + # @!attribute bytes + # + # @return [Array] + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer] + + # @!attribute logprob + # + # @return [Float] + required :logprob, Float + + # @!attribute top_logprobs + # + # @return [Array] + required :top_logprobs, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + } + + # @!method initialize(token:, bytes:, logprob:, top_logprobs:) + # The log probability of a token. + # + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] + # @param top_logprobs [Array] + + class TopLogprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # + # @return [String] + required :token, String + + # @!attribute bytes + # + # @return [Array] + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer] + + # @!attribute logprob + # + # @return [Float] + required :logprob, Float + + # @!method initialize(token:, bytes:, logprob:) + # The top log probability of a token. + # + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] + end + end end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 8063503f..979fe0b2 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -4,6 +4,8 @@ module OpenAI module Models module Responses # @see OpenAI::Resources::Responses#retrieve + # + # @see OpenAI::Resources::Responses#retrieve_streaming class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -15,12 +17,20 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } - # @!method initialize(include: nil, request_options: {}) + # @!attribute starting_after + # The sequence number of the event after which to start streaming. + # + # @return [Integer, nil] + optional :starting_after, Integer + + # @!method initialize(include: nil, starting_after: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param starting_after [Integer] The sequence number of the event after which to start streaming. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index b2964e96..dd932421 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -163,17 +163,21 @@ def stream_raw(params) ) end + # See {OpenAI::Resources::Responses#retrieve_streaming} for streaming counterpart. + # # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # # Retrieves a model response with the given ID. # - # @overload retrieve(response_id, include: nil, request_options: {}) + # @overload retrieve(response_id, include: nil, starting_after: nil, request_options: {}) # # @param response_id [String] The ID of the response to retrieve. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param starting_after [Integer] The sequence number of the event after which to start streaming. + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] @@ -181,6 +185,10 @@ def stream_raw(params) # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve(response_id, params = {}) parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params) + if parsed[:stream] + message = "Please use `#retrieve_streaming` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :get, path: ["responses/%1$s", response_id], @@ -190,6 +198,44 @@ def retrieve(response_id, params = {}) ) end + # See {OpenAI::Resources::Responses#retrieve} for non-streaming counterpart. + # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. + # + # Retrieves a model response with the given ID. + # + # @overload retrieve_streaming(response_id, include: nil, starting_after: nil, request_options: {}) + # + # @param response_id [String] The ID of the response to retrieve. + # + # @param include [Array] Additional fields to include in the response. See the `include` + # + # @param starting_after [Integer] The sequence number of the event after which to start streaming. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::Stream] + # + # @see OpenAI::Models::Responses::ResponseRetrieveParams + def retrieve_streaming(response_id, params = {}) + parsed, options = OpenAI::Responses::ResponseRetrieveParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#retrieve` for the non-streaming use case." + raise ArgumentError.new(message) + end + parsed.store(:stream, true) + @client.request( + method: :get, + path: ["responses/%1$s", response_id], + query: parsed, + headers: {"accept" => "text/event-stream"}, + stream: OpenAI::Internal::Stream, + model: OpenAI::Responses::ResponseStreamEvent, + options: options + ) + end + # Deletes a model response with the given ID. # # @overload delete(response_id, request_options: {}) diff --git a/rbi/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi index f73a5755..35832193 100644 --- a/rbi/openai/models/responses/response_output_text.rbi +++ b/rbi/openai/models/responses/response_output_text.rbi @@ -34,6 +34,21 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseOutputText::Logprob]) + ) + end + attr_reader :logprobs + + sig do + params( + logprobs: + T::Array[OpenAI::Responses::ResponseOutputText::Logprob::OrHash] + ).void + end + attr_writer :logprobs + # A text output from the model. sig do params( @@ -46,6 +61,8 @@ module OpenAI ) ], text: String, + logprobs: + T::Array[OpenAI::Responses::ResponseOutputText::Logprob::OrHash], type: Symbol ).returns(T.attached_class) end @@ -54,6 +71,7 @@ module OpenAI annotations:, # The text output from the model. text:, + logprobs: nil, # The type of the output text. Always `output_text`. type: :output_text ) @@ -71,7 +89,8 @@ module OpenAI ) ], text: String, - type: Symbol + type: Symbol, + logprobs: T::Array[OpenAI::Responses::ResponseOutputText::Logprob] } ) end @@ -261,6 +280,103 @@ module OpenAI def self.variants end end + + class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Logprob, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :token + + sig { returns(T::Array[Integer]) } + attr_accessor :bytes + + sig { returns(Float) } + attr_accessor :logprob + + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob + ] + ) + end + attr_accessor :top_logprobs + + # The log probability of a token. + sig do + params( + token: String, + bytes: T::Array[Integer], + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob::OrHash + ] + ).returns(T.attached_class) + end + def self.new(token:, bytes:, logprob:, top_logprobs:) + end + + sig do + override.returns( + { + token: String, + bytes: T::Array[Integer], + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob + ] + } + ) + end + def to_hash + end + + class TopLogprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :token + + sig { returns(T::Array[Integer]) } + attr_accessor :bytes + + sig { returns(Float) } + attr_accessor :logprob + + # The top log probability of a token. + sig do + params( + token: String, + bytes: T::Array[Integer], + logprob: Float + ).returns(T.attached_class) + end + def self.new(token:, bytes:, logprob:) + end + + sig do + override.returns( + { token: String, bytes: T::Array[Integer], logprob: Float } + ) + end + def to_hash + end + end + end end end end diff --git a/rbi/openai/models/responses/response_retrieve_params.rbi b/rbi/openai/models/responses/response_retrieve_params.rbi index c1e80237..c25abeb6 100644 --- a/rbi/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/openai/models/responses/response_retrieve_params.rbi @@ -31,9 +31,17 @@ module OpenAI end attr_writer :include + # The sequence number of the event after which to start streaming. + sig { returns(T.nilable(Integer)) } + attr_reader :starting_after + + sig { params(starting_after: Integer).void } + attr_writer :starting_after + sig do params( include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -41,6 +49,8 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # The sequence number of the event after which to start streaming. + starting_after: nil, request_options: {} ) end @@ -50,6 +60,7 @@ module OpenAI { include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 3753b593..a3a788af 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -424,11 +424,15 @@ module OpenAI ) end + # See {OpenAI::Resources::Responses#retrieve_streaming} for streaming counterpart. + # # Retrieves a model response with the given ID. sig do params( response_id: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, + stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Responses::Response) end @@ -438,6 +442,42 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # The sequence number of the event after which to start streaming. + starting_after: nil, + # There is no need to provide `stream:`. Instead, use `#retrieve_streaming` or + # `#retrieve` for streaming and non-streaming use cases, respectively. + stream: false, + request_options: {} + ) + end + + # See {OpenAI::Resources::Responses#retrieve} for non-streaming counterpart. + # + # Retrieves a model response with the given ID. + sig do + params( + response_id: String, + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + starting_after: Integer, + stream: T.noreturn, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[ + OpenAI::Responses::ResponseStreamEvent::Variants + ] + ) + end + def retrieve_streaming( + # The ID of the response to retrieve. + response_id, + # Additional fields to include in the response. See the `include` parameter for + # Response creation above for more information. + include: nil, + # The sequence number of the event after which to start streaming. + starting_after: nil, + # There is no need to provide `stream:`. Instead, use `#retrieve_streaming` or + # `#retrieve` for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index d374deb4..d29dc9ec 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -5,7 +5,8 @@ module OpenAI { annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, - type: :output_text + type: :output_text, + logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob] } class ResponseOutputText < OpenAI::Internal::Type::BaseModel @@ -15,16 +16,24 @@ module OpenAI attr_accessor type: :output_text + attr_reader logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob]? + + def logprobs=: ( + ::Array[OpenAI::Responses::ResponseOutputText::Logprob] + ) -> ::Array[OpenAI::Responses::ResponseOutputText::Logprob] + def initialize: ( annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, + ?logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob], ?type: :output_text ) -> void def to_hash: -> { annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, - type: :output_text + type: :output_text, + logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob] } type annotation = @@ -119,6 +128,61 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] end + + type logprob = + { + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + } + + class Logprob < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor bytes: ::Array[Integer] + + attr_accessor logprob: Float + + attr_accessor top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + + def initialize: ( + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] + } + + type top_logprob = + { token: String, bytes: ::Array[Integer], logprob: Float } + + class TopLogprob < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor bytes: ::Array[Integer] + + attr_accessor logprob: Float + + def initialize: ( + token: String, + bytes: ::Array[Integer], + logprob: Float + ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float + } + end + end end end end diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index c90b79c2..56f3ed6a 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -2,7 +2,10 @@ module OpenAI module Models module Responses type response_retrieve_params = - { include: ::Array[OpenAI::Models::Responses::response_includable] } + { + include: ::Array[OpenAI::Models::Responses::response_includable], + starting_after: Integer + } & OpenAI::Internal::Type::request_parameters class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel @@ -15,13 +18,19 @@ module OpenAI ::Array[OpenAI::Models::Responses::response_includable] ) -> ::Array[OpenAI::Models::Responses::response_includable] + attr_reader starting_after: Integer? + + def starting_after=: (Integer) -> Integer + def initialize: ( ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> void def to_hash: -> { include: ::Array[OpenAI::Models::Responses::response_includable], + starting_after: Integer, request_options: OpenAI::RequestOptions } end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 97ccd557..ee118696 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -52,9 +52,17 @@ module OpenAI def retrieve: ( String response_id, ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> OpenAI::Responses::Response + def retrieve_streaming: ( + String response_id, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?starting_after: Integer, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::Stream[OpenAI::Models::Responses::response_stream_event] + def delete: ( String response_id, ?request_options: OpenAI::request_opts From 567522a274924c3ec8764f811e1ed643447cb6c9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 01:54:17 +0000 Subject: [PATCH 215/295] fix(client): return binary content from `get /containers/{container_id}/files/{file_id}/content` --- .stats.yml | 2 +- lib/openai/resources/containers/files/content.rb | 5 +++-- rbi/openai/resources/containers/files/content.rbi | 2 +- sig/openai/resources/containers/files/content.rbs | 2 +- test/openai/resources/containers/files/content_test.rb | 4 +++- 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/.stats.yml b/.stats.yml index 4aa085f5..2e733899 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615 -config_hash: 2102e4b25bbcab5d32d5ffa5d34daa0c +config_hash: d23f847b9ebb3f427d0f198035bd3e9f diff --git a/lib/openai/resources/containers/files/content.rb b/lib/openai/resources/containers/files/content.rb index 3d07b16e..ba07f678 100644 --- a/lib/openai/resources/containers/files/content.rb +++ b/lib/openai/resources/containers/files/content.rb @@ -13,7 +13,7 @@ class Content # @param container_id [String] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [nil] + # @return [StringIO] # # @see OpenAI::Models::Containers::Files::ContentRetrieveParams def retrieve(file_id, params) @@ -25,7 +25,8 @@ def retrieve(file_id, params) @client.request( method: :get, path: ["containers/%1$s/files/%2$s/content", container_id, file_id], - model: NilClass, + headers: {"accept" => "application/binary"}, + model: StringIO, options: options ) end diff --git a/rbi/openai/resources/containers/files/content.rbi b/rbi/openai/resources/containers/files/content.rbi index a299fe68..bb901b30 100644 --- a/rbi/openai/resources/containers/files/content.rbi +++ b/rbi/openai/resources/containers/files/content.rbi @@ -11,7 +11,7 @@ module OpenAI file_id: String, container_id: String, request_options: OpenAI::RequestOptions::OrHash - ).void + ).returns(StringIO) end def retrieve(file_id, container_id:, request_options: {}) end diff --git a/sig/openai/resources/containers/files/content.rbs b/sig/openai/resources/containers/files/content.rbs index 898c9988..03e09259 100644 --- a/sig/openai/resources/containers/files/content.rbs +++ b/sig/openai/resources/containers/files/content.rbs @@ -7,7 +7,7 @@ module OpenAI String file_id, container_id: String, ?request_options: OpenAI::request_opts - ) -> nil + ) -> StringIO def initialize: (client: OpenAI::Client) -> void end diff --git a/test/openai/resources/containers/files/content_test.rb b/test/openai/resources/containers/files/content_test.rb index 994b2806..4d4252d4 100644 --- a/test/openai/resources/containers/files/content_test.rb +++ b/test/openai/resources/containers/files/content_test.rb @@ -4,10 +4,12 @@ class OpenAI::Test::Resources::Containers::Files::ContentTest < OpenAI::Test::ResourceTest def test_retrieve_required_params + skip("skipped: test server currently has no support for method content-type") + response = @openai.containers.files.content.retrieve("file_id", container_id: "container_id") assert_pattern do - response => nil + response => StringIO end end end From dd2fd7ebd9e33bf63aabec4e86359831705984e0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 16:17:28 +0000 Subject: [PATCH 216/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 3e2bf498..2aca35ae 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.4.1" + ".": "0.5.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 64aeded6..503224bb 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.4.1) + openai (0.5.0) connection_pool GEM diff --git a/README.md b/README.md index 478a35e5..afa2e1bb 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.4.1" +gem "openai", "~> 0.5.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index a71b9348..56f83a91 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.4.1" + VERSION = "0.5.0" end From 8617f498f47728d289164a9358171c99c33d2945 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 29 May 2025 16:45:27 +0000 Subject: [PATCH 217/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 2e733899..0e9f19a5 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615 -config_hash: d23f847b9ebb3f427d0f198035bd3e9f +config_hash: e618aa8ff61aea826540916336de65a6 From 4b4c9589e6228cfd0f20f08e96208280a804d5e1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 2 Jun 2025 19:36:29 +0000 Subject: [PATCH 218/295] fix(api): Fix evals and code interpreter interfaces --- .stats.yml | 6 +- .../audio/transcription_text_delta_event.rb | 6 +- .../audio/transcription_text_done_event.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 8 +-- .../models/chat/chat_completion_chunk.rb | 8 +-- .../models/chat/completion_create_params.rb | 8 +-- .../fine_tuning/alpha/grader_run_params.rb | 47 +++++------- .../models/fine_tuning/fine_tuning_job.rb | 8 +-- lib/openai/models/graders/multi_grader.rb | 15 ++-- lib/openai/models/image_edit_params.rb | 4 +- lib/openai/models/responses/response.rb | 8 +-- ..._code_interpreter_call_code_delta_event.rb | 10 +-- ...e_code_interpreter_call_code_done_event.rb | 10 +-- .../responses/response_create_params.rb | 10 +-- .../models/responses/response_includable.rb | 3 + .../models/responses/response_output_text.rb | 55 +++++++++++++- .../models/responses/response_stream_event.rb | 4 +- .../resources/fine_tuning/alpha/graders.rb | 9 ++- lib/openai/resources/responses.rb | 4 +- .../audio/transcription_text_delta_event.rbi | 8 +-- .../audio/transcription_text_done_event.rbi | 8 +-- rbi/openai/models/chat/chat_completion.rbi | 12 ++-- .../models/chat/chat_completion_chunk.rbi | 12 ++-- .../models/chat/completion_create_params.rbi | 12 ++-- .../fine_tuning/alpha/grader_run_params.rbi | 67 +++++++---------- .../models/fine_tuning/fine_tuning_job.rbi | 5 +- rbi/openai/models/graders/multi_grader.rbi | 59 +++++++-------- rbi/openai/models/image_edit_params.rbi | 6 +- rbi/openai/models/responses/response.rbi | 12 ++-- ...code_interpreter_call_code_delta_event.rbi | 6 +- ..._code_interpreter_call_code_done_event.rbi | 6 +- .../responses/response_create_params.rbi | 16 +++-- .../models/responses/response_includable.rbi | 7 ++ .../models/responses/response_output_text.rbi | 72 +++++++++++++++++++ rbi/openai/resources/chat/completions.rbi | 8 +-- .../resources/fine_tuning/alpha/graders.rbi | 15 ++-- rbi/openai/resources/images.rbi | 2 +- rbi/openai/resources/responses.rbi | 14 ++-- .../audio/transcription_text_delta_event.rbs | 15 ++-- .../audio/transcription_text_done_event.rbs | 15 ++-- .../fine_tuning/alpha/grader_run_params.rbs | 20 ++---- .../models/fine_tuning/fine_tuning_job.rbs | 2 +- sig/openai/models/graders/multi_grader.rbs | 14 ++-- ...code_interpreter_call_code_delta_event.rbs | 8 +-- ..._code_interpreter_call_code_done_event.rbs | 8 +-- .../models/responses/response_includable.rbs | 2 + .../models/responses/response_output_text.rbs | 38 ++++++++++ .../resources/fine_tuning/alpha/graders.rbs | 2 +- sig/openai/resources/responses.rbs | 2 +- .../fine_tuning/alpha/graders_test.rb | 3 +- test/openai/resources/responses_test.rb | 31 +++++++- 51 files changed, 461 insertions(+), 275 deletions(-) diff --git a/.stats.yml b/.stats.yml index 0e9f19a5..fb17fac7 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d4bcffecf0cdadf746faa6708ed1ec81fac451f9b857deabbab26f0a343b9314.yml -openapi_spec_hash: 7c54a18b4381248bda7cc34c52142615 -config_hash: e618aa8ff61aea826540916336de65a6 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-2bcc845d8635bf93ddcf9ee723af4d7928248412a417bee5fc10d863a1e13867.yml +openapi_spec_hash: 865230cb3abeb01bd85de05891af23c4 +config_hash: ed1e6b3c5f93d12b80d31167f55c557c diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 0541f312..4c54ea63 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -50,8 +50,8 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute bytes # The bytes that were used to generate the log probability. # - # @return [Array, nil] - optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] + # @return [Array, nil] + optional :bytes, OpenAI::Internal::Type::ArrayOf[Integer] # @!attribute logprob # The log probability of the token. @@ -65,7 +65,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @param token [String] The token that was used to generate the log probability. # - # @param bytes [Array] The bytes that were used to generate the log probability. + # @param bytes [Array] The bytes that were used to generate the log probability. # # @param logprob [Float] The log probability of the token. end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 2651d973..eac7a34d 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -51,8 +51,8 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute bytes # The bytes that were used to generate the log probability. # - # @return [Array, nil] - optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] + # @return [Array, nil] + optional :bytes, OpenAI::Internal::Type::ArrayOf[Integer] # @!attribute logprob # The log probability of the token. @@ -66,7 +66,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @param token [String] The token that was used to generate the log probability. # - # @param bytes [Array] The bytes that were used to generate the log probability. + # @param bytes [Array] The bytes that were used to generate the log probability. # # @param logprob [Float] The log probability of the token. end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 6f5b922c..b1a17a6f 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -46,9 +46,9 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -195,9 +195,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 9dfe771e..63c1109e 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -45,9 +45,9 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -378,9 +378,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index efe22a7d..ee393bad 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -226,9 +226,9 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -542,9 +542,9 @@ module ResponseFormat # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb index 0dfe4ffc..152ac2a1 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb @@ -16,26 +16,32 @@ class GraderRunParams < OpenAI::Internal::Type::BaseModel required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderRunParams::Grader } # @!attribute model_sample - # The model sample to be evaluated. + # The model sample to be evaluated. This value will be used to populate the + # `sample` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + # The `output_json` variable will be populated if the model sample is a valid JSON + # string. # # @return [String] required :model_sample, String - # @!attribute reference_answer - # The reference answer for the evaluation. + # @!attribute item + # The dataset item provided to the grader. This will be used to populate the + # `item` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. # - # @return [String, Object, Array, Float] - required :reference_answer, - union: -> { - OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer - } + # @return [Object, nil] + optional :item, OpenAI::Internal::Type::Unknown - # @!method initialize(grader:, model_sample:, reference_answer:, request_options: {}) + # @!method initialize(grader:, model_sample:, item: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::Alpha::GraderRunParams} for more details. + # # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # - # @param model_sample [String] The model sample to be evaluated. + # @param model_sample [String] The model sample to be evaluated. This value will be used to populate # - # @param reference_answer [String, Object, Array, Float] The reference answer for the evaluation. + # @param item [Object] The dataset item provided to the grader. This will be used to populate # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -63,25 +69,6 @@ module Grader # @!method self.variants # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end - - # The reference answer for the evaluation. - module ReferenceAnswer - extend OpenAI::Internal::Type::Union - - variant String - - variant OpenAI::Internal::Type::Unknown - - variant -> { OpenAI::Models::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::UnionMember2Array } - - variant Float - - # @!method self.variants - # @return [Array(String, Object, Array, Float)] - - # @type [OpenAI::Internal::Type::Converter] - UnionMember2Array = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] - end end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 64df4360..dad79035 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -226,7 +226,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @return [Object, Symbol, :auto, Integer, nil] + # @return [Symbol, :auto, Integer, nil] optional :batch_size, union: -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize }, nil?: true @@ -253,7 +253,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # - # @param batch_size [Object, Symbol, :auto, Integer, nil] Number of examples in each batch. A larger batch size means that model parameter + # @param batch_size [Symbol, :auto, Integer, nil] Number of examples in each batch. A larger batch size means that model parameter # # @param learning_rate_multiplier [Symbol, :auto, Float] Scaling factor for the learning rate. A smaller learning rate may be useful to a # @@ -266,14 +266,12 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel module BatchSize extend OpenAI::Internal::Type::Union - variant OpenAI::Internal::Type::Unknown - variant const: :auto variant Integer # @!method self.variants - # @return [Array(Object, Symbol, :auto, Integer)] + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb index de0d1240..0f5bd82e 100644 --- a/lib/openai/models/graders/multi_grader.rb +++ b/lib/openai/models/graders/multi_grader.rb @@ -11,9 +11,11 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel required :calculate_output, String # @!attribute graders + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. # - # @return [Hash{Symbol=>OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader}] - required :graders, -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Graders::MultiGrader::Grader] } + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader] + required :graders, union: -> { OpenAI::Graders::MultiGrader::Graders } # @!attribute name # The name of the grader. @@ -28,12 +30,15 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel required :type, const: :multi # @!method initialize(calculate_output:, graders:, name:, type: :multi) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::MultiGrader} for more details. + # # A MultiGrader object combines the output of multiple graders to produce a single # score. # # @param calculate_output [String] A formula to calculate the output based on grader results. # - # @param graders [Hash{Symbol=>OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader}] + # @param graders [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader] A StringCheckGrader object that performs a string comparison between input and r # # @param name [String] The name of the grader. # @@ -41,7 +46,9 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel # A StringCheckGrader object that performs a string comparison between input and # reference using a specified operation. - module Grader + # + # @see OpenAI::Models::Graders::MultiGrader#graders + module Graders extend OpenAI::Internal::Type::Union # A StringCheckGrader object that performs a string comparison between input and reference using a specified operation. diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index ea3225f3..6162af1a 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -11,7 +11,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. You can provide up to 16 images. + # 50MB. You can provide up to 16 images. # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. @@ -123,7 +123,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. You can provide up to 16 images. + # 50MB. You can provide up to 16 images. # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 20471340..38b6465d 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -173,9 +173,9 @@ class Response < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -346,9 +346,9 @@ module ToolChoice # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index e8413a69..73bc4f43 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -23,12 +23,12 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo required :sequence_number, Integer # @!attribute type - # The type of the event. Always `response.code_interpreter_call.code.delta`. + # The type of the event. Always `response.code_interpreter_call_code.delta`. # - # @return [Symbol, :"response.code_interpreter_call.code.delta"] - required :type, const: :"response.code_interpreter_call.code.delta" + # @return [Symbol, :"response.code_interpreter_call_code.delta"] + required :type, const: :"response.code_interpreter_call_code.delta" - # @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.delta") + # @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.delta") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more # details. @@ -41,7 +41,7 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # # @param sequence_number [Integer] The sequence number of this event. # - # @param type [Symbol, :"response.code_interpreter_call.code.delta"] The type of the event. Always `response.code_interpreter_call.code.delta`. + # @param type [Symbol, :"response.code_interpreter_call_code.delta"] The type of the event. Always `response.code_interpreter_call_code.delta`. end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index beaab321..356bcee2 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -23,12 +23,12 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod required :sequence_number, Integer # @!attribute type - # The type of the event. Always `response.code_interpreter_call.code.done`. + # The type of the event. Always `response.code_interpreter_call_code.done`. # - # @return [Symbol, :"response.code_interpreter_call.code.done"] - required :type, const: :"response.code_interpreter_call.code.done" + # @return [Symbol, :"response.code_interpreter_call_code.done"] + required :type, const: :"response.code_interpreter_call_code.done" - # @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.done") + # @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.done") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more # details. @@ -41,7 +41,7 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # # @param sequence_number [Integer] The sequence number of this event. # - # @param type [Symbol, :"response.code_interpreter_call.code.done"] The type of the event. Always `response.code_interpreter_call.code.done`. + # @param type [Symbol, :"response.code_interpreter_call_code.done"] The type of the event. Always `response.code_interpreter_call_code.done`. end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 2058a351..643e98b0 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -55,6 +55,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. # # @return [Array, nil] optional :include, @@ -122,9 +124,9 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -293,9 +295,9 @@ module Input # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index f56e4278..7300f818 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -16,6 +16,8 @@ module Responses # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. module ResponseIncludable extend OpenAI::Internal::Type::Enum @@ -23,6 +25,7 @@ module ResponseIncludable MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" REASONING_ENCRYPTED_CONTENT = :"reasoning.encrypted_content" + CODE_INTERPRETER_CALL_OUTPUTS = :"code_interpreter_call.outputs" # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index f188fae8..6f0420ce 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -7,7 +7,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!attribute annotations # The annotations of the text output. # - # @return [Array] + # @return [Array] required :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputText::Annotation] @@ -36,7 +36,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(annotations:, text:, logprobs: nil, type: :output_text) # A text output from the model. # - # @param annotations [Array] The annotations of the text output. + # @param annotations [Array] The annotations of the text output. # # @param text [String] The text output from the model. # @@ -56,6 +56,10 @@ module Annotation # A citation for a web resource used to generate a model response. variant :url_citation, -> { OpenAI::Responses::ResponseOutputText::Annotation::URLCitation } + # A citation for a container file used to generate a model response. + variant :container_file_citation, + -> { OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation } + # A path to a file. variant :file_path, -> { OpenAI::Responses::ResponseOutputText::Annotation::FilePath } @@ -133,6 +137,51 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. end + class ContainerFileCitation < OpenAI::Internal::Type::BaseModel + # @!attribute container_id + # The ID of the container file. + # + # @return [String] + required :container_id, String + + # @!attribute end_index + # The index of the last character of the container file citation in the message. + # + # @return [Integer] + required :end_index, Integer + + # @!attribute file_id + # The ID of the file. + # + # @return [String] + required :file_id, String + + # @!attribute start_index + # The index of the first character of the container file citation in the message. + # + # @return [Integer] + required :start_index, Integer + + # @!attribute type + # The type of the container file citation. Always `container_file_citation`. + # + # @return [Symbol, :container_file_citation] + required :type, const: :container_file_citation + + # @!method initialize(container_id:, end_index:, file_id:, start_index:, type: :container_file_citation) + # A citation for a container file used to generate a model response. + # + # @param container_id [String] The ID of the container file. + # + # @param end_index [Integer] The index of the last character of the container file citation in the message. + # + # @param file_id [String] The ID of the file. + # + # @param start_index [Integer] The index of the first character of the container file citation in the message. + # + # @param type [Symbol, :container_file_citation] The type of the container file citation. Always `container_file_citation`. + end + class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file. @@ -167,7 +216,7 @@ class FilePath < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::ContainerFileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end class Logprob < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index eb98e953..b5e90a6d 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -25,11 +25,11 @@ module ResponseStreamEvent variant :"response.audio.transcript.done", -> { OpenAI::Responses::ResponseAudioTranscriptDoneEvent } # Emitted when a partial code snippet is added by the code interpreter. - variant :"response.code_interpreter_call.code.delta", + variant :"response.code_interpreter_call_code.delta", -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent } # Emitted when code snippet output is finalized by the code interpreter. - variant :"response.code_interpreter_call.code.done", + variant :"response.code_interpreter_call_code.done", -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent } # Emitted when the code interpreter call is completed. diff --git a/lib/openai/resources/fine_tuning/alpha/graders.rb b/lib/openai/resources/fine_tuning/alpha/graders.rb index 52c4cc77..ce7775c2 100644 --- a/lib/openai/resources/fine_tuning/alpha/graders.rb +++ b/lib/openai/resources/fine_tuning/alpha/graders.rb @@ -5,15 +5,18 @@ module Resources class FineTuning class Alpha class Graders + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FineTuning::Alpha::GraderRunParams} for more details. + # # Run a grader. # - # @overload run(grader:, model_sample:, reference_answer:, request_options: {}) + # @overload run(grader:, model_sample:, item: nil, request_options: {}) # # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # - # @param model_sample [String] The model sample to be evaluated. + # @param model_sample [String] The model sample to be evaluated. This value will be used to populate # - # @param reference_answer [String, Object, Array, Float] The reference answer for the evaluation. + # @param item [Object] The dataset item provided to the grader. This will be used to populate # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index dd932421..11fe39a6 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -266,14 +266,14 @@ def delete(response_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [nil] + # @return [OpenAI::Models::Responses::Response] # # @see OpenAI::Models::Responses::ResponseCancelParams def cancel(response_id, params = {}) @client.request( method: :post, path: ["responses/%1$s/cancel", response_id], - model: NilClass, + model: OpenAI::Responses::Response, options: params[:request_options] ) end diff --git a/rbi/openai/models/audio/transcription_text_delta_event.rbi b/rbi/openai/models/audio/transcription_text_delta_event.rbi index a196922d..d8707c86 100644 --- a/rbi/openai/models/audio/transcription_text_delta_event.rbi +++ b/rbi/openai/models/audio/transcription_text_delta_event.rbi @@ -98,10 +98,10 @@ module OpenAI attr_writer :token # The bytes that were used to generate the log probability. - sig { returns(T.nilable(T::Array[T.anything])) } + sig { returns(T.nilable(T::Array[Integer])) } attr_reader :bytes - sig { params(bytes: T::Array[T.anything]).void } + sig { params(bytes: T::Array[Integer]).void } attr_writer :bytes # The log probability of the token. @@ -114,7 +114,7 @@ module OpenAI sig do params( token: String, - bytes: T::Array[T.anything], + bytes: T::Array[Integer], logprob: Float ).returns(T.attached_class) end @@ -130,7 +130,7 @@ module OpenAI sig do override.returns( - { token: String, bytes: T::Array[T.anything], logprob: Float } + { token: String, bytes: T::Array[Integer], logprob: Float } ) end def to_hash diff --git a/rbi/openai/models/audio/transcription_text_done_event.rbi b/rbi/openai/models/audio/transcription_text_done_event.rbi index 80acac59..21788792 100644 --- a/rbi/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/openai/models/audio/transcription_text_done_event.rbi @@ -100,10 +100,10 @@ module OpenAI attr_writer :token # The bytes that were used to generate the log probability. - sig { returns(T.nilable(T::Array[T.anything])) } + sig { returns(T.nilable(T::Array[Integer])) } attr_reader :bytes - sig { params(bytes: T::Array[T.anything]).void } + sig { params(bytes: T::Array[Integer]).void } attr_writer :bytes # The log probability of the token. @@ -116,7 +116,7 @@ module OpenAI sig do params( token: String, - bytes: T::Array[T.anything], + bytes: T::Array[Integer], logprob: Float ).returns(T.attached_class) end @@ -132,7 +132,7 @@ module OpenAI sig do override.returns( - { token: String, bytes: T::Array[T.anything], logprob: Float } + { token: String, bytes: T::Array[Integer], logprob: Float } ) end def to_hash diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index c2353966..7408d715 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -39,9 +39,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -105,9 +105,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -370,9 +370,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index 9d06ce7f..b37b09ef 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -41,9 +41,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -120,9 +120,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -790,9 +790,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 98b52bc4..61b20248 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -275,9 +275,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -615,9 +615,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -991,9 +991,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi index bca68bdb..f9834f5b 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi @@ -30,17 +30,22 @@ module OpenAI end attr_accessor :grader - # The model sample to be evaluated. + # The model sample to be evaluated. This value will be used to populate the + # `sample` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + # The `output_json` variable will be populated if the model sample is a valid JSON + # string. sig { returns(String) } attr_accessor :model_sample - # The reference answer for the evaluation. - sig do - returns( - OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants - ) - end - attr_accessor :reference_answer + # The dataset item provided to the grader. This will be used to populate the + # `item` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + sig { returns(T.nilable(T.anything)) } + attr_reader :item + + sig { params(item: T.anything).void } + attr_writer :item sig do params( @@ -53,18 +58,23 @@ module OpenAI OpenAI::Graders::MultiGrader::OrHash ), model_sample: String, - reference_answer: - OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, + item: T.anything, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end def self.new( # The grader used for the fine-tuning job. grader:, - # The model sample to be evaluated. + # The model sample to be evaluated. This value will be used to populate the + # `sample` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + # The `output_json` variable will be populated if the model sample is a valid JSON + # string. model_sample:, - # The reference answer for the evaluation. - reference_answer:, + # The dataset item provided to the grader. This will be used to populate the + # `item` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + item: nil, request_options: {} ) end @@ -81,8 +91,7 @@ module OpenAI OpenAI::Graders::MultiGrader ), model_sample: String, - reference_answer: - OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, + item: T.anything, request_options: OpenAI::RequestOptions } ) @@ -115,34 +124,6 @@ module OpenAI def self.variants end end - - # The reference answer for the evaluation. - module ReferenceAnswer - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any(String, T.anything, T::Array[T.anything], Float) - end - - sig do - override.returns( - T::Array[ - OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants - ] - ) - end - def self.variants - end - - UnionMember2Array = - T.let( - OpenAI::Internal::Type::ArrayOf[ - OpenAI::Internal::Type::Unknown - ], - OpenAI::Internal::Type::Converter - ) - end end end end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index 8e4d7a3a..090fa734 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -359,7 +359,7 @@ module OpenAI # returned when running `supervised` jobs. sig do params( - batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), + batch_size: T.nilable(T.any(Symbol, Integer)), learning_rate_multiplier: T.any(Symbol, Float), n_epochs: T.any(Symbol, Integer) ).returns(T.attached_class) @@ -399,8 +399,7 @@ module OpenAI module BatchSize extend OpenAI::Internal::Type::Union - Variants = - T.type_alias { T.nilable(T.any(T.anything, Symbol, Integer)) } + Variants = T.type_alias { T.any(Symbol, Integer) } sig do override.returns( diff --git a/rbi/openai/models/graders/multi_grader.rbi b/rbi/openai/models/graders/multi_grader.rbi index bbf5c142..63f598ad 100644 --- a/rbi/openai/models/graders/multi_grader.rbi +++ b/rbi/openai/models/graders/multi_grader.rbi @@ -15,18 +15,17 @@ module OpenAI sig { returns(String) } attr_accessor :calculate_output + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. sig do returns( - T::Hash[ - Symbol, - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::LabelModelGrader - ) - ] + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::LabelModelGrader + ) ) end attr_accessor :graders @@ -45,16 +44,13 @@ module OpenAI params( calculate_output: String, graders: - T::Hash[ - Symbol, - T.any( - OpenAI::Graders::StringCheckGrader::OrHash, - OpenAI::Graders::TextSimilarityGrader::OrHash, - OpenAI::Graders::PythonGrader::OrHash, - OpenAI::Graders::ScoreModelGrader::OrHash, - OpenAI::Graders::LabelModelGrader::OrHash - ) - ], + T.any( + OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Graders::TextSimilarityGrader::OrHash, + OpenAI::Graders::PythonGrader::OrHash, + OpenAI::Graders::ScoreModelGrader::OrHash, + OpenAI::Graders::LabelModelGrader::OrHash + ), name: String, type: Symbol ).returns(T.attached_class) @@ -62,6 +58,8 @@ module OpenAI def self.new( # A formula to calculate the output based on grader results. calculate_output:, + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. graders:, # The name of the grader. name:, @@ -75,16 +73,13 @@ module OpenAI { calculate_output: String, graders: - T::Hash[ - Symbol, - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::LabelModelGrader - ) - ], + T.any( + OpenAI::Graders::StringCheckGrader, + OpenAI::Graders::TextSimilarityGrader, + OpenAI::Graders::PythonGrader, + OpenAI::Graders::ScoreModelGrader, + OpenAI::Graders::LabelModelGrader + ), name: String, type: Symbol } @@ -95,7 +90,7 @@ module OpenAI # A StringCheckGrader object that performs a string comparison between input and # reference using a specified operation. - module Grader + module Graders extend OpenAI::Internal::Type::Union Variants = @@ -111,7 +106,7 @@ module OpenAI sig do override.returns( - T::Array[OpenAI::Graders::MultiGrader::Grader::Variants] + T::Array[OpenAI::Graders::MultiGrader::Graders::Variants] ) end def self.variants diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index b03c85dd..f3e7df1e 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -14,7 +14,7 @@ module OpenAI # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. You can provide up to 16 images. + # 50MB. You can provide up to 16 images. # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. @@ -106,7 +106,7 @@ module OpenAI # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. You can provide up to 16 images. + # 50MB. You can provide up to 16 images. # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. @@ -179,7 +179,7 @@ module OpenAI # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. You can provide up to 16 images. + # 50MB. You can provide up to 16 images. # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 97408461..d7a1789e 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -160,9 +160,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -390,9 +390,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -588,9 +588,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index a26838ef..d0012c45 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -24,7 +24,7 @@ module OpenAI sig { returns(Integer) } attr_accessor :sequence_number - # The type of the event. Always `response.code_interpreter_call.code.delta`. + # The type of the event. Always `response.code_interpreter_call_code.delta`. sig { returns(Symbol) } attr_accessor :type @@ -44,8 +44,8 @@ module OpenAI output_index:, # The sequence number of this event. sequence_number:, - # The type of the event. Always `response.code_interpreter_call.code.delta`. - type: :"response.code_interpreter_call.code.delta" + # The type of the event. Always `response.code_interpreter_call_code.delta`. + type: :"response.code_interpreter_call_code.delta" ) end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index fdd8c46e..0ab6b04b 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -24,7 +24,7 @@ module OpenAI sig { returns(Integer) } attr_accessor :sequence_number - # The type of the event. Always `response.code_interpreter_call.code.done`. + # The type of the event. Always `response.code_interpreter_call_code.done`. sig { returns(Symbol) } attr_accessor :type @@ -44,8 +44,8 @@ module OpenAI output_index:, # The sequence number of this event. sequence_number:, - # The type of the event. Always `response.code_interpreter_call.code.done`. - type: :"response.code_interpreter_call.code.done" + # The type of the event. Always `response.code_interpreter_call_code.done`. + type: :"response.code_interpreter_call_code.done" ) end diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 592d684d..717f102e 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -63,6 +63,8 @@ module OpenAI # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. sig do returns( T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) @@ -121,9 +123,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -364,6 +366,8 @@ module OpenAI # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. include: nil, # Inserts a system (or developer) message as the first item in the model's # context. @@ -401,9 +405,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -562,9 +566,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/rbi/openai/models/responses/response_includable.rbi b/rbi/openai/models/responses/response_includable.rbi index b99bd61a..f5f63506 100644 --- a/rbi/openai/models/responses/response_includable.rbi +++ b/rbi/openai/models/responses/response_includable.rbi @@ -16,6 +16,8 @@ module OpenAI # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. module ResponseIncludable extend OpenAI::Internal::Type::Enum @@ -43,6 +45,11 @@ module OpenAI :"reasoning.encrypted_content", OpenAI::Responses::ResponseIncludable::TaggedSymbol ) + CODE_INTERPRETER_CALL_OUTPUTS = + T.let( + :"code_interpreter_call.outputs", + OpenAI::Responses::ResponseIncludable::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi index 35832193..f59f7ada 100644 --- a/rbi/openai/models/responses/response_output_text.rbi +++ b/rbi/openai/models/responses/response_output_text.rbi @@ -19,6 +19,7 @@ module OpenAI T.any( OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath ) ] @@ -57,6 +58,7 @@ module OpenAI T.any( OpenAI::Responses::ResponseOutputText::Annotation::FileCitation::OrHash, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation::OrHash, + OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation::OrHash, OpenAI::Responses::ResponseOutputText::Annotation::FilePath::OrHash ) ], @@ -85,6 +87,7 @@ module OpenAI T.any( OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath ) ], @@ -106,6 +109,7 @@ module OpenAI T.any( OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, + OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath ) end @@ -224,6 +228,74 @@ module OpenAI end end + class ContainerFileCitation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the container file. + sig { returns(String) } + attr_accessor :container_id + + # The index of the last character of the container file citation in the message. + sig { returns(Integer) } + attr_accessor :end_index + + # The ID of the file. + sig { returns(String) } + attr_accessor :file_id + + # The index of the first character of the container file citation in the message. + sig { returns(Integer) } + attr_accessor :start_index + + # The type of the container file citation. Always `container_file_citation`. + sig { returns(Symbol) } + attr_accessor :type + + # A citation for a container file used to generate a model response. + sig do + params( + container_id: String, + end_index: Integer, + file_id: String, + start_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the container file. + container_id:, + # The index of the last character of the container file citation in the message. + end_index:, + # The ID of the file. + file_id:, + # The index of the first character of the container file citation in the message. + start_index:, + # The type of the container file citation. Always `container_file_citation`. + type: :container_file_citation + ) + end + + sig do + override.returns( + { + container_id: String, + end_index: Integer, + file_id: String, + start_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + class FilePath < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 2261046e..5b66b431 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -228,9 +228,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -515,9 +515,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). diff --git a/rbi/openai/resources/fine_tuning/alpha/graders.rbi b/rbi/openai/resources/fine_tuning/alpha/graders.rbi index 8b1683c1..f3ea09fe 100644 --- a/rbi/openai/resources/fine_tuning/alpha/graders.rbi +++ b/rbi/openai/resources/fine_tuning/alpha/graders.rbi @@ -17,18 +17,23 @@ module OpenAI OpenAI::Graders::MultiGrader::OrHash ), model_sample: String, - reference_answer: - OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, + item: T.anything, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Models::FineTuning::Alpha::GraderRunResponse) end def run( # The grader used for the fine-tuning job. grader:, - # The model sample to be evaluated. + # The model sample to be evaluated. This value will be used to populate the + # `sample` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + # The `output_json` variable will be populated if the model sample is a valid JSON + # string. model_sample:, - # The reference answer for the evaluation. - reference_answer:, + # The dataset item provided to the grader. This will be used to populate the + # `item` namespace. See + # [the guide](https://platform.openai.com/docs/guides/graders) for more details. + item: nil, request_options: {} ) end diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index e3a93ec3..c8440e47 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -64,7 +64,7 @@ module OpenAI # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than - # 25MB. You can provide up to 16 images. + # 50MB. You can provide up to 16 images. # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index a3a788af..02921b0e 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -108,6 +108,8 @@ module OpenAI # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. include: nil, # Inserts a system (or developer) message as the first item in the model's # context. @@ -145,9 +147,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -319,6 +321,8 @@ module OpenAI # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. include: nil, # Inserts a system (or developer) message as the first item in the model's # context. @@ -356,9 +360,9 @@ module OpenAI # utilize scale tier credits until they are exhausted. # - If set to 'auto', and the Project is not Scale tier enabled, the request will # be processed using the default service tier with a lower uptime SLA and no - # latency guarentee. + # latency guarantee. # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarentee. + # tier with a lower uptime SLA and no latency guarantee. # - If set to 'flex', the request will be processed with the Flex Processing # service tier. # [Learn more](https://platform.openai.com/docs/guides/flex-processing). @@ -503,7 +507,7 @@ module OpenAI params( response_id: String, request_options: OpenAI::RequestOptions::OrHash - ).void + ).returns(OpenAI::Responses::Response) end def cancel( # The ID of the response to cancel. diff --git a/sig/openai/models/audio/transcription_text_delta_event.rbs b/sig/openai/models/audio/transcription_text_delta_event.rbs index 155b8e1d..08280006 100644 --- a/sig/openai/models/audio/transcription_text_delta_event.rbs +++ b/sig/openai/models/audio/transcription_text_delta_event.rbs @@ -31,16 +31,17 @@ module OpenAI logprobs: ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] } - type logprob = { token: String, bytes: ::Array[top], logprob: Float } + type logprob = + { token: String, bytes: ::Array[Integer], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel attr_reader token: String? def token=: (String) -> String - attr_reader bytes: ::Array[top]? + attr_reader bytes: ::Array[Integer]? - def bytes=: (::Array[top]) -> ::Array[top] + def bytes=: (::Array[Integer]) -> ::Array[Integer] attr_reader logprob: Float? @@ -48,11 +49,15 @@ module OpenAI def initialize: ( ?token: String, - ?bytes: ::Array[top], + ?bytes: ::Array[Integer], ?logprob: Float ) -> void - def to_hash: -> { token: String, bytes: ::Array[top], logprob: Float } + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float + } end end end diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs index d8f864e3..53e317e8 100644 --- a/sig/openai/models/audio/transcription_text_done_event.rbs +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -31,16 +31,17 @@ module OpenAI logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] } - type logprob = { token: String, bytes: ::Array[top], logprob: Float } + type logprob = + { token: String, bytes: ::Array[Integer], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel attr_reader token: String? def token=: (String) -> String - attr_reader bytes: ::Array[top]? + attr_reader bytes: ::Array[Integer]? - def bytes=: (::Array[top]) -> ::Array[top] + def bytes=: (::Array[Integer]) -> ::Array[Integer] attr_reader logprob: Float? @@ -48,11 +49,15 @@ module OpenAI def initialize: ( ?token: String, - ?bytes: ::Array[top], + ?bytes: ::Array[Integer], ?logprob: Float ) -> void - def to_hash: -> { token: String, bytes: ::Array[top], logprob: Float } + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float + } end end end diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs index 0fcec2cc..64b909d5 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs @@ -6,7 +6,7 @@ module OpenAI { grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, model_sample: String, - reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer + item: top } & OpenAI::Internal::Type::request_parameters @@ -18,19 +18,21 @@ module OpenAI attr_accessor model_sample: String - attr_accessor reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer + attr_reader item: top? + + def item=: (top) -> top def initialize: ( grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, model_sample: String, - reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + ?item: top, ?request_options: OpenAI::request_opts ) -> void def to_hash: -> { grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, model_sample: String, - reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + item: top, request_options: OpenAI::RequestOptions } @@ -46,16 +48,6 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader] end - - type reference_answer = String | top | ::Array[top] | Float - - module ReferenceAnswer - extend OpenAI::Internal::Type::Union - - def self?.variants: -> ::Array[OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer] - - UnionMember2Array: OpenAI::Internal::Type::Converter - end end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index a5368cef..f2c2812d 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -165,7 +165,7 @@ module OpenAI n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs } - type batch_size = (top | :auto | Integer)? + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/graders/multi_grader.rbs b/sig/openai/models/graders/multi_grader.rbs index 206d8144..d9ca09fc 100644 --- a/sig/openai/models/graders/multi_grader.rbs +++ b/sig/openai/models/graders/multi_grader.rbs @@ -6,7 +6,7 @@ module OpenAI type multi_grader = { calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], + graders: OpenAI::Models::Graders::MultiGrader::graders, name: String, type: :multi } @@ -14,7 +14,7 @@ module OpenAI class MultiGrader < OpenAI::Internal::Type::BaseModel attr_accessor calculate_output: String - attr_accessor graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader] + attr_accessor graders: OpenAI::Models::Graders::MultiGrader::graders attr_accessor name: String @@ -22,29 +22,29 @@ module OpenAI def initialize: ( calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], + graders: OpenAI::Models::Graders::MultiGrader::graders, name: String, ?type: :multi ) -> void def to_hash: -> { calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], + graders: OpenAI::Models::Graders::MultiGrader::graders, name: String, type: :multi } - type grader = + type graders = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader | OpenAI::Graders::PythonGrader | OpenAI::Graders::ScoreModelGrader | OpenAI::Graders::LabelModelGrader - module Grader + module Graders extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Models::Graders::MultiGrader::grader] + def self?.variants: -> ::Array[OpenAI::Models::Graders::MultiGrader::graders] end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index 2da40939..51d5e73d 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -6,7 +6,7 @@ module OpenAI delta: String, output_index: Integer, sequence_number: Integer, - type: :"response.code_interpreter_call.code.delta" + type: :"response.code_interpreter_call_code.delta" } class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel @@ -16,20 +16,20 @@ module OpenAI attr_accessor sequence_number: Integer - attr_accessor type: :"response.code_interpreter_call.code.delta" + attr_accessor type: :"response.code_interpreter_call_code.delta" def initialize: ( delta: String, output_index: Integer, sequence_number: Integer, - ?type: :"response.code_interpreter_call.code.delta" + ?type: :"response.code_interpreter_call_code.delta" ) -> void def to_hash: -> { delta: String, output_index: Integer, sequence_number: Integer, - type: :"response.code_interpreter_call.code.delta" + type: :"response.code_interpreter_call_code.delta" } end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index 5f796490..a025e240 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -6,7 +6,7 @@ module OpenAI code: String, output_index: Integer, sequence_number: Integer, - type: :"response.code_interpreter_call.code.done" + type: :"response.code_interpreter_call_code.done" } class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel @@ -16,20 +16,20 @@ module OpenAI attr_accessor sequence_number: Integer - attr_accessor type: :"response.code_interpreter_call.code.done" + attr_accessor type: :"response.code_interpreter_call_code.done" def initialize: ( code: String, output_index: Integer, sequence_number: Integer, - ?type: :"response.code_interpreter_call.code.done" + ?type: :"response.code_interpreter_call_code.done" ) -> void def to_hash: -> { code: String, output_index: Integer, sequence_number: Integer, - type: :"response.code_interpreter_call.code.done" + type: :"response.code_interpreter_call_code.done" } end end diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index 4f37a1b1..7d37f3af 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -6,6 +6,7 @@ module OpenAI | :"message.input_image.image_url" | :"computer_call_output.output.image_url" | :"reasoning.encrypted_content" + | :"code_interpreter_call.outputs" module ResponseIncludable extend OpenAI::Internal::Type::Enum @@ -14,6 +15,7 @@ module OpenAI MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" REASONING_ENCRYPTED_CONTENT: :"reasoning.encrypted_content" + CODE_INTERPRETER_CALL_OUTPUTS: :"code_interpreter_call.outputs" def self?.values: -> ::Array[OpenAI::Models::Responses::response_includable] end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index d29dc9ec..a72f5df5 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -39,6 +39,7 @@ module OpenAI type annotation = OpenAI::Responses::ResponseOutputText::Annotation::FileCitation | OpenAI::Responses::ResponseOutputText::Annotation::URLCitation + | OpenAI::Responses::ResponseOutputText::Annotation::ContainerFileCitation | OpenAI::Responses::ResponseOutputText::Annotation::FilePath module Annotation @@ -104,6 +105,43 @@ module OpenAI } end + type container_file_citation = + { + container_id: String, + end_index: Integer, + file_id: String, + start_index: Integer, + type: :container_file_citation + } + + class ContainerFileCitation < OpenAI::Internal::Type::BaseModel + attr_accessor container_id: String + + attr_accessor end_index: Integer + + attr_accessor file_id: String + + attr_accessor start_index: Integer + + attr_accessor type: :container_file_citation + + def initialize: ( + container_id: String, + end_index: Integer, + file_id: String, + start_index: Integer, + ?type: :container_file_citation + ) -> void + + def to_hash: -> { + container_id: String, + end_index: Integer, + file_id: String, + start_index: Integer, + type: :container_file_citation + } + end + type file_path = { file_id: String, index: Integer, type: :file_path } class FilePath < OpenAI::Internal::Type::BaseModel diff --git a/sig/openai/resources/fine_tuning/alpha/graders.rbs b/sig/openai/resources/fine_tuning/alpha/graders.rbs index a460c4c7..5283fadb 100644 --- a/sig/openai/resources/fine_tuning/alpha/graders.rbs +++ b/sig/openai/resources/fine_tuning/alpha/graders.rbs @@ -6,7 +6,7 @@ module OpenAI def run: ( grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, model_sample: String, - reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + ?item: top, ?request_options: OpenAI::request_opts ) -> OpenAI::Models::FineTuning::Alpha::GraderRunResponse diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index ee118696..7db1e5e4 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -71,7 +71,7 @@ module OpenAI def cancel: ( String response_id, ?request_options: OpenAI::request_opts - ) -> nil + ) -> OpenAI::Responses::Response def initialize: (client: OpenAI::Client) -> void end diff --git a/test/openai/resources/fine_tuning/alpha/graders_test.rb b/test/openai/resources/fine_tuning/alpha/graders_test.rb index 7a1c620a..9e2f659c 100644 --- a/test/openai/resources/fine_tuning/alpha/graders_test.rb +++ b/test/openai/resources/fine_tuning/alpha/graders_test.rb @@ -7,8 +7,7 @@ def test_run_required_params response = @openai.fine_tuning.alpha.graders.run( grader: {input: "input", name: "name", operation: :eq, reference: "reference", type: :string_check}, - model_sample: "model_sample", - reference_answer: "string" + model_sample: "model_sample" ) assert_pattern do diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index deb7c605..fe1e2ad5 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -89,7 +89,36 @@ def test_cancel response = @openai.responses.cancel("resp_677efb5139a88190b512bc3fef8e535d") assert_pattern do - response => nil + response => OpenAI::Responses::Response + end + + assert_pattern do + response => { + id: String, + created_at: Float, + error: OpenAI::Responses::ResponseError | nil, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails | nil, + instructions: String | nil, + metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, + model: OpenAI::ResponsesModel, + object: Symbol, + output: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem]), + parallel_tool_calls: OpenAI::Internal::Type::Boolean, + temperature: Float | nil, + tool_choice: OpenAI::Responses::Response::ToolChoice, + tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), + top_p: Float | nil, + background: OpenAI::Internal::Type::Boolean | nil, + max_output_tokens: Integer | nil, + previous_response_id: String | nil, + reasoning: OpenAI::Reasoning | nil, + service_tier: OpenAI::Responses::Response::ServiceTier | nil, + status: OpenAI::Responses::ResponseStatus | nil, + text: OpenAI::Responses::ResponseTextConfig | nil, + truncation: OpenAI::Responses::Response::Truncation | nil, + usage: OpenAI::Responses::ResponseUsage | nil, + user: String | nil + } end end end From d4b446d603ba10787c16b45e377abd58f80a3526 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 2 Jun 2025 19:41:15 +0000 Subject: [PATCH 219/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2aca35ae..d04f223f 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.5.0" + ".": "0.5.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 503224bb..42ec5d77 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.5.0) + openai (0.5.1) connection_pool GEM diff --git a/README.md b/README.md index afa2e1bb..41d3b352 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.5.0" +gem "openai", "~> 0.5.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 56f83a91..bd01e775 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.5.0" + VERSION = "0.5.1" end From 99344da3517abed58992e902a2ed4cb0864aa9ea Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 2 Jun 2025 20:01:47 +0000 Subject: [PATCH 220/295] fix: `to_sorbet_type` should not return branded types --- lib/openai/internal/type/enum.rb | 9 ++++++--- lib/openai/internal/type/union.rb | 7 +++++-- lib/openai/models.rb | 8 ++++---- 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 9dd70f63..71915618 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -116,11 +116,14 @@ def coerce(value, state:) # # @return [Object] def to_sorbet_type - case values + types = values.map { OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(_1) }.uniq + case types in [] T.noreturn - in [value, *_] - T.all(OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(value), self) + in [type] + type + else + T.any(*types) end end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 3eed40a8..8281b3e1 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -220,11 +220,14 @@ def dump(value, state:) # # @return [Object] def to_sorbet_type - case (v = variants) + types = variants.map { OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(_1) }.uniq + case types in [] T.noreturn + in [type] + type else - T.any(*v.map { OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(_1) }) + T.any(*types) end end diff --git a/lib/openai/models.rb b/lib/openai/models.rb index a0f5d753..5a1af010 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -11,16 +11,16 @@ module OpenAI mod.constants.each do |name| case mod.const_get(name) in true | false - mod.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T.all(T::Boolean, mod) } } + mod.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T::Boolean } } mod.define_sorbet_constant!(:OrBoolean) { T.type_alias { T::Boolean } } in Integer - mod.define_sorbet_constant!(:TaggedInteger) { T.type_alias { T.all(Integer, mod) } } + mod.define_sorbet_constant!(:TaggedInteger) { T.type_alias { Integer } } mod.define_sorbet_constant!(:OrInteger) { T.type_alias { Integer } } in Float - mod.define_sorbet_constant!(:TaggedFloat) { T.type_alias { T.all(Float, mod) } } + mod.define_sorbet_constant!(:TaggedFloat) { T.type_alias { Float } } mod.define_sorbet_constant!(:OrFloat) { T.type_alias { Float } } in Symbol - mod.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { T.all(Symbol, mod) } } + mod.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { Symbol } } mod.define_sorbet_constant!(:OrSymbol) { T.type_alias { T.any(Symbol, String) } } else end From 62f094dd5c9361e6f4c28af8018e6c2645561ae1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 3 Jun 2025 16:55:12 +0000 Subject: [PATCH 221/295] feat(api): add new realtime and audio models, realtime session options --- .stats.yml | 4 ++-- lib/openai/models/chat_model.rb | 1 + rbi/openai/models/chat_model.rbi | 5 +++++ sig/openai/models/chat_model.rbs | 2 ++ 4 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index fb17fac7..d8555608 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-2bcc845d8635bf93ddcf9ee723af4d7928248412a417bee5fc10d863a1e13867.yml -openapi_spec_hash: 865230cb3abeb01bd85de05891af23c4 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-04213ea42074f52b8e7e60e101ed7d7ae47b8abcc233c7e8eae310bba544454d.yml +openapi_spec_hash: 5fb148608764103ba3700cd6bda4f22e config_hash: ed1e6b3c5f93d12b80d31167f55c557c diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 6f8732aa..e62d4d46 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -30,6 +30,7 @@ module ChatModel GPT_4O_AUDIO_PREVIEW = :"gpt-4o-audio-preview" GPT_4O_AUDIO_PREVIEW_2024_10_01 = :"gpt-4o-audio-preview-2024-10-01" GPT_4O_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-audio-preview-2024-12-17" + GPT_4O_AUDIO_PREVIEW_2025_06_03 = :"gpt-4o-audio-preview-2025-06-03" GPT_4O_MINI_AUDIO_PREVIEW = :"gpt-4o-mini-audio-preview" GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = :"gpt-4o-mini-audio-preview-2024-12-17" GPT_4O_SEARCH_PREVIEW = :"gpt-4o-search-preview" diff --git a/rbi/openai/models/chat_model.rbi b/rbi/openai/models/chat_model.rbi index 5046338e..26dc28c7 100644 --- a/rbi/openai/models/chat_model.rbi +++ b/rbi/openai/models/chat_model.rbi @@ -52,6 +52,11 @@ module OpenAI :"gpt-4o-audio-preview-2024-12-17", OpenAI::ChatModel::TaggedSymbol ) + GPT_4O_AUDIO_PREVIEW_2025_06_03 = + T.let( + :"gpt-4o-audio-preview-2025-06-03", + OpenAI::ChatModel::TaggedSymbol + ) GPT_4O_MINI_AUDIO_PREVIEW = T.let(:"gpt-4o-mini-audio-preview", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17 = diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 52519f63..bbe91426 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -26,6 +26,7 @@ module OpenAI | :"gpt-4o-audio-preview" | :"gpt-4o-audio-preview-2024-10-01" | :"gpt-4o-audio-preview-2024-12-17" + | :"gpt-4o-audio-preview-2025-06-03" | :"gpt-4o-mini-audio-preview" | :"gpt-4o-mini-audio-preview-2024-12-17" | :"gpt-4o-search-preview" @@ -84,6 +85,7 @@ module OpenAI GPT_4O_AUDIO_PREVIEW: :"gpt-4o-audio-preview" GPT_4O_AUDIO_PREVIEW_2024_10_01: :"gpt-4o-audio-preview-2024-10-01" GPT_4O_AUDIO_PREVIEW_2024_12_17: :"gpt-4o-audio-preview-2024-12-17" + GPT_4O_AUDIO_PREVIEW_2025_06_03: :"gpt-4o-audio-preview-2025-06-03" GPT_4O_MINI_AUDIO_PREVIEW: :"gpt-4o-mini-audio-preview" GPT_4O_MINI_AUDIO_PREVIEW_2024_12_17: :"gpt-4o-mini-audio-preview-2024-12-17" GPT_4O_SEARCH_PREVIEW: :"gpt-4o-search-preview" From f1b4d82c0fabcd8d357ff1496075f6396d1bedb3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 3 Jun 2025 17:05:49 +0000 Subject: [PATCH 222/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index d8555608..96c16369 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-04213ea42074f52b8e7e60e101ed7d7ae47b8abcc233c7e8eae310bba544454d.yml -openapi_spec_hash: 5fb148608764103ba3700cd6bda4f22e +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-0205acb1015d29b2312a48526734c0399f93026d4fe2dff5c7768f566e333fd2.yml +openapi_spec_hash: 1772cc9056c2f6dfb2a4e9cb77ee6343 config_hash: ed1e6b3c5f93d12b80d31167f55c557c From db47eea573182a5976e7ffb85de6a70dda4e0f3e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 3 Jun 2025 17:14:23 +0000 Subject: [PATCH 223/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d04f223f..4208b5cb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.5.1" + ".": "0.6.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 42ec5d77..734dc4e3 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.5.1) + openai (0.6.0) connection_pool GEM diff --git a/README.md b/README.md index 41d3b352..b30bfcf0 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.5.1" +gem "openai", "~> 0.6.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index bd01e775..1e8185cd 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.5.1" + VERSION = "0.6.0" end From fca891c40d2b865c172a4a042afc6aaf82a39ea6 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 4 Jun 2025 15:43:38 +0000 Subject: [PATCH 224/295] fix: default content-type for text in multi-part formdata uploads should be text/plain --- lib/openai/internal/util.rb | 17 ++++++++--------- rbi/openai/internal/util.rbi | 2 ++ 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index eb5d1ffc..00653aa3 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -497,7 +497,7 @@ class << self # @param closing [Array] # @param content_type [String, nil] private def write_multipart_content(y, val:, closing:, content_type: nil) - content_type ||= "application/octet-stream" + content_line = "Content-Type: %s\r\n\r\n" case val in OpenAI::FilePart @@ -508,24 +508,21 @@ class << self content_type: val.content_type ) in Pathname - y << "Content-Type: #{content_type}\r\n\r\n" + y << format(content_line, content_type || "application/octet-stream") io = val.open(binmode: true) closing << io.method(:close) IO.copy_stream(io, y) in IO - y << "Content-Type: #{content_type}\r\n\r\n" + y << format(content_line, content_type || "application/octet-stream") IO.copy_stream(val, y) in StringIO - y << "Content-Type: #{content_type}\r\n\r\n" + y << format(content_line, content_type || "application/octet-stream") y << val.string - in String - y << "Content-Type: #{content_type}\r\n\r\n" - y << val.to_s in -> { primitive?(_1) } - y << "Content-Type: text/plain\r\n\r\n" + y << format(content_line, content_type || "text/plain") y << val.to_s else - y << "Content-Type: application/json\r\n\r\n" + y << format(content_line, content_type || "application/json") y << JSON.generate(val) end y << "\r\n" @@ -563,6 +560,8 @@ class << self # @api private # + # https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.1.md#special-considerations-for-multipart-content + # # @param body [Object] # # @return [Array(String, Enumerable)] diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index ddce5834..69ba15c5 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -332,6 +332,8 @@ module OpenAI end # @api private + # + # https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.1.md#special-considerations-for-multipart-content sig do params(body: T.anything).returns([String, T::Enumerable[String]]) end From e45ae5cde968f422418398c5d8d7de1f5c758912 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 9 Jun 2025 15:47:57 +0000 Subject: [PATCH 225/295] feat(api): Add tools and structured outputs to evals --- .stats.yml | 4 +- ...create_eval_completions_run_data_source.rb | 68 +++++- .../models/evals/run_cancel_response.rb | 78 +++++- lib/openai/models/evals/run_create_params.rb | 80 ++++++- .../models/evals/run_create_response.rb | 78 +++++- lib/openai/models/evals/run_list_response.rb | 77 +++++- .../models/evals/run_retrieve_response.rb | 78 +++++- ...reate_eval_completions_run_data_source.rbi | 110 +++++++++ .../models/evals/run_cancel_response.rbi | 187 +++++++++++++++ rbi/openai/models/evals/run_create_params.rbi | 222 ++++++++++++++++++ .../models/evals/run_create_response.rbi | 187 +++++++++++++++ rbi/openai/models/evals/run_list_response.rbi | 187 +++++++++++++++ .../models/evals/run_retrieve_response.rbi | 187 +++++++++++++++ ...reate_eval_completions_run_data_source.rbs | 29 +++ .../models/evals/run_cancel_response.rbs | 39 +++ sig/openai/models/evals/run_create_params.rbs | 39 +++ .../models/evals/run_create_response.rbs | 39 +++ sig/openai/models/evals/run_list_response.rbs | 39 +++ .../models/evals/run_retrieve_response.rbs | 39 +++ 19 files changed, 1759 insertions(+), 8 deletions(-) diff --git a/.stats.yml b/.stats.yml index 96c16369..8507b6ff 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-0205acb1015d29b2312a48526734c0399f93026d4fe2dff5c7768f566e333fd2.yml -openapi_spec_hash: 1772cc9056c2f6dfb2a4e9cb77ee6343 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4865dda2b62927bd141cbc85f81be3d88602f103e2c581e15eb1caded3e3aaa2.yml +openapi_spec_hash: 7d14a9b23ef4ac93ea46d629601b6f6b config_hash: ed1e6b3c5f93d12b80d31167f55c557c diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 28f9e688..6521c8f7 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -432,6 +432,24 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :max_completion_tokens, Integer + # @!attribute response_format + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :response_format, + union: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::ResponseFormat + } + # @!attribute seed # A seed value to initialize the randomness, during sampling. # @@ -444,20 +462,68 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float + # @!attribute tools + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. + # + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTool] } + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @!method initialize(max_completion_tokens: nil, response_format: nil, seed: nil, temperature: nil, tools: nil, top_p: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams} for + # more details. + # # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. # + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + # # @param seed [Integer] A seed value to initialize the randomness, during sampling. # # @param temperature [Float] A higher temperature increases randomness in the outputs. # + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams#response_format + module ResponseFormat + extend OpenAI::Internal::Type::Union + + # Default response format. Used to generate text responses. + variant -> { OpenAI::ResponseFormatText } + + # JSON Schema response format. Used to generate structured JSON responses. + # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + variant -> { OpenAI::ResponseFormatJSONSchema } + + # JSON object response format. An older method of generating JSON responses. + # Using `json_schema` is recommended for models that support it. Note that the + # model will not generate JSON without a system or user message instructing it + # to do so. + variant -> { OpenAI::ResponseFormatJSONObject } + + # @!method self.variants + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] + end end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index fd8642a6..74608cb9 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -616,20 +616,96 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float + # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text, nil] + optional :text, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text } + + # @!attribute tools + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, text: nil, tools: nil, top_p: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams} + # for more details. + # # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. # # @param seed [Integer] A seed value to initialize the randomness, during sampling. # # @param temperature [Float] A higher temperature increases randomness in the outputs. # + # @param text [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain + # + # @param tools [Array] An array of tools the model may call while generating a response. You + # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams#text + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, + union: -> { + OpenAI::Responses::ResponseFormatTextConfig + }, + api_name: :format + + # @!method initialize(format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text} + # for more details. + # + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + end end end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index bc703e7f..8b84487a 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -576,20 +576,98 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float + # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, nil] + optional :text, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text + } + + # @!attribute tools + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, text: nil, tools: nil, top_p: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams} + # for more details. + # # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. # # @param seed [Integer] A seed value to initialize the randomness, during sampling. # # @param temperature [Float] A higher temperature increases randomness in the outputs. # + # @param text [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text] Configuration options for a text response from the model. Can be plain + # + # @param tools [Array] An array of tools the model may call while generating a response. You + # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams#text + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, + union: -> { + OpenAI::Responses::ResponseFormatTextConfig + }, + api_name: :format + + # @!method initialize(format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text} + # for more details. + # + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 3a110d09..73327ea2 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -616,20 +616,96 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float + # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text, nil] + optional :text, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text } + + # @!attribute tools + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, text: nil, tools: nil, top_p: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams} + # for more details. + # # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. # # @param seed [Integer] A seed value to initialize the randomness, during sampling. # # @param temperature [Float] A higher temperature increases randomness in the outputs. # + # @param text [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain + # + # @param tools [Array] An array of tools the model may call while generating a response. You + # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams#text + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, + union: -> { + OpenAI::Responses::ResponseFormatTextConfig + }, + api_name: :format + + # @!method initialize(format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text} + # for more details. + # + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + end end end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 83907899..0f8abfad 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -616,20 +616,95 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float + # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text, nil] + optional :text, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text } + + # @!attribute tools + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, text: nil, tools: nil, top_p: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams} + # for more details. + # # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. # # @param seed [Integer] A seed value to initialize the randomness, during sampling. # # @param temperature [Float] A higher temperature increases randomness in the outputs. # + # @param text [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain + # + # @param tools [Array] An array of tools the model may call while generating a response. You + # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams#text + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, + union: -> { + OpenAI::Responses::ResponseFormatTextConfig + }, + api_name: :format + + # @!method initialize(format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text} + # for more details. + # + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + end end end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 9db0bb26..20526587 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -620,20 +620,96 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float + # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text, nil] + optional :text, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text } + + # @!attribute tools + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, text: nil, tools: nil, top_p: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams} + # for more details. + # # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. # # @param seed [Integer] A seed value to initialize the randomness, during sampling. # # @param temperature [Float] A higher temperature increases randomness in the outputs. # + # @param text [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain + # + # @param tools [Array] An array of tools the model may call while generating a response. You + # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams#text + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, + union: -> { + OpenAI::Responses::ResponseFormatTextConfig + }, + api_name: :format + + # @!method initialize(format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text} + # for more details. + # + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + end end end diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 3b8ebdb7..8298b14c 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -814,6 +814,41 @@ module OpenAI sig { params(max_completion_tokens: Integer).void } attr_writer :max_completion_tokens + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + T.any( + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject + ) + ) + ) + end + attr_reader :response_format + + sig do + params( + response_format: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :response_format + # A seed value to initialize the randomness, during sampling. sig { returns(T.nilable(Integer)) } attr_reader :seed @@ -828,6 +863,19 @@ module OpenAI sig { params(temperature: Float).void } attr_writer :temperature + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. + sig { returns(T.nilable(T::Array[OpenAI::Chat::ChatCompletionTool])) } + attr_reader :tools + + sig do + params( + tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash] + ).void + end + attr_writer :tools + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. sig { returns(T.nilable(Float)) } attr_reader :top_p @@ -838,18 +886,40 @@ module OpenAI sig do params( max_completion_tokens: Integer, + response_format: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ), seed: Integer, temperature: Float, + tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], top_p: Float ).returns(T.attached_class) end def self.new( # The maximum number of tokens in the generated output. max_completion_tokens: nil, + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + response_format: nil, # A seed value to initialize the randomness, during sampling. seed: nil, # A higher temperature increases randomness in the outputs. temperature: nil, + # A list of tools the model may call. Currently, only functions are supported as a + # tool. Use this to provide a list of functions the model may generate JSON inputs + # for. A max of 128 functions are supported. + tools: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil ) @@ -859,14 +929,54 @@ module OpenAI override.returns( { max_completion_tokens: Integer, + response_format: + T.any( + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject + ), seed: Integer, temperature: Float, + tools: T::Array[OpenAI::Chat::ChatCompletionTool], top_p: Float } ) end def to_hash end + + # An object specifying the format that the model must output. + # + # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured + # Outputs which ensures the model will match your supplied JSON schema. Learn more + # in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + module ResponseFormat + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::ResponseFormatText, + OpenAI::ResponseFormatJSONSchema, + OpenAI::ResponseFormatJSONObject + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::ResponseFormat::Variants + ] + ) + end + def self.variants + end + end end end end diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index d6f3d390..9ac8bf15 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -1056,6 +1056,66 @@ module OpenAI sig { params(temperature: Float).void } attr_writer :temperature + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text + ) + ) + end + attr_reader :text + + sig do + params( + text: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text::OrHash + ).void + end + attr_writer :text + + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + sig do + returns(T.nilable(T::Array[OpenAI::Responses::Tool::Variants])) + end + attr_reader :tools + + sig do + params( + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ] + ).void + end + attr_writer :tools + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. sig { returns(T.nilable(Float)) } attr_reader :top_p @@ -1068,6 +1128,21 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text::OrHash, + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: Float ).returns(T.attached_class) end @@ -1078,6 +1153,27 @@ module OpenAI seed: nil, # A higher temperature increases randomness in the outputs. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + text: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + tools: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil ) @@ -1089,12 +1185,103 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text, + tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: Float } ) end def to_hash end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseFormatTextConfig::Variants + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil + ) + end + + sig do + override.returns( + { + format_: + OpenAI::Responses::ResponseFormatTextConfig::Variants + } + ) + end + def to_hash + end + end end end diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index eba90132..32ad2f72 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -1008,6 +1008,81 @@ module OpenAI sig { params(temperature: Float).void } attr_writer :temperature + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + returns( + T.nilable( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text + ) + ) + end + attr_reader :text + + sig do + params( + text: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text::OrHash + ).void + end + attr_writer :text + + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + sig do + returns( + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::WebSearchTool + ) + ] + ) + ) + end + attr_reader :tools + + sig do + params( + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ] + ).void + end + attr_writer :tools + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. sig { returns(T.nilable(Float)) } attr_reader :top_p @@ -1020,6 +1095,21 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text::OrHash, + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: Float ).returns(T.attached_class) end @@ -1030,6 +1120,27 @@ module OpenAI seed: nil, # A higher temperature increases randomness in the outputs. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + text: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + tools: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil ) @@ -1041,12 +1152,123 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, + OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::WebSearchTool + ) + ], top_p: Float } ) end def to_hash end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject + ) + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil + ) + end + + sig do + override.returns( + { + format_: + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject + ) + } + ) + end + def to_hash + end + end end end diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index 32cae98d..ae40fb83 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -1056,6 +1056,66 @@ module OpenAI sig { params(temperature: Float).void } attr_writer :temperature + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text + ) + ) + end + attr_reader :text + + sig do + params( + text: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text::OrHash + ).void + end + attr_writer :text + + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + sig do + returns(T.nilable(T::Array[OpenAI::Responses::Tool::Variants])) + end + attr_reader :tools + + sig do + params( + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ] + ).void + end + attr_writer :tools + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. sig { returns(T.nilable(Float)) } attr_reader :top_p @@ -1068,6 +1128,21 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text::OrHash, + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: Float ).returns(T.attached_class) end @@ -1078,6 +1153,27 @@ module OpenAI seed: nil, # A higher temperature increases randomness in the outputs. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + text: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + tools: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil ) @@ -1089,12 +1185,103 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text, + tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: Float } ) end def to_hash end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseFormatTextConfig::Variants + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil + ) + end + + sig do + override.returns( + { + format_: + OpenAI::Responses::ResponseFormatTextConfig::Variants + } + ) + end + def to_hash + end + end end end diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index 272e7bb7..9d45d00b 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -1052,6 +1052,66 @@ module OpenAI sig { params(temperature: Float).void } attr_writer :temperature + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text + ) + ) + end + attr_reader :text + + sig do + params( + text: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text::OrHash + ).void + end + attr_writer :text + + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + sig do + returns(T.nilable(T::Array[OpenAI::Responses::Tool::Variants])) + end + attr_reader :tools + + sig do + params( + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ] + ).void + end + attr_writer :tools + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. sig { returns(T.nilable(Float)) } attr_reader :top_p @@ -1064,6 +1124,21 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text::OrHash, + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: Float ).returns(T.attached_class) end @@ -1074,6 +1149,27 @@ module OpenAI seed: nil, # A higher temperature increases randomness in the outputs. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + text: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + tools: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil ) @@ -1085,12 +1181,103 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text, + tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: Float } ) end def to_hash end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseFormatTextConfig::Variants + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil + ) + end + + sig do + override.returns( + { + format_: + OpenAI::Responses::ResponseFormatTextConfig::Variants + } + ) + end + def to_hash + end + end end end diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index a51ecb78..3a1d9ea8 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -1058,6 +1058,66 @@ module OpenAI sig { params(temperature: Float).void } attr_writer :temperature + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text + ) + ) + end + attr_reader :text + + sig do + params( + text: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text::OrHash + ).void + end + attr_writer :text + + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + sig do + returns(T.nilable(T::Array[OpenAI::Responses::Tool::Variants])) + end + attr_reader :tools + + sig do + params( + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ] + ).void + end + attr_writer :tools + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. sig { returns(T.nilable(Float)) } attr_reader :top_p @@ -1070,6 +1130,21 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text::OrHash, + tools: + T::Array[ + T.any( + OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, + OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::WebSearchTool::OrHash + ) + ], top_p: Float ).returns(T.attached_class) end @@ -1080,6 +1155,27 @@ module OpenAI seed: nil, # A higher temperature increases randomness in the outputs. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + text: nil, + # An array of tools the model may call while generating a response. You can + # specify which tool to use by setting the `tool_choice` parameter. + # + # The two categories of tools you can provide the model are: + # + # - **Built-in tools**: Tools that are provided by OpenAI that extend the model's + # capabilities, like + # [web search](https://platform.openai.com/docs/guides/tools-web-search) or + # [file search](https://platform.openai.com/docs/guides/tools-file-search). + # Learn more about + # [built-in tools](https://platform.openai.com/docs/guides/tools). + # - **Function calls (custom tools)**: Functions that are defined by you, enabling + # the model to call your own code. Learn more about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + tools: nil, # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. top_p: nil ) @@ -1091,12 +1187,103 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text, + tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: Float } ) end def to_hash end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseFormatTextConfig::Variants + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil + ) + end + + sig do + override.returns( + { + format_: + OpenAI::Responses::ResponseFormatTextConfig::Variants + } + ) + end + def to_hash + end + end end end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index 181d516b..8c12eed5 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -303,8 +303,10 @@ module OpenAI type sampling_params = { max_completion_tokens: Integer, + response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format, seed: Integer, temperature: Float, + tools: ::Array[OpenAI::Chat::ChatCompletionTool], top_p: Float } @@ -313,6 +315,12 @@ module OpenAI def max_completion_tokens=: (Integer) -> Integer + attr_reader response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format? + + def response_format=: ( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format + attr_reader seed: Integer? def seed=: (Integer) -> Integer @@ -321,23 +329,44 @@ module OpenAI def temperature=: (Float) -> Float + attr_reader tools: ::Array[OpenAI::Chat::ChatCompletionTool]? + + def tools=: ( + ::Array[OpenAI::Chat::ChatCompletionTool] + ) -> ::Array[OpenAI::Chat::ChatCompletionTool] + attr_reader top_p: Float? def top_p=: (Float) -> Float def initialize: ( ?max_completion_tokens: Integer, + ?response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format, ?seed: Integer, ?temperature: Float, + ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], ?top_p: Float ) -> void def to_hash: -> { max_completion_tokens: Integer, + response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format, seed: Integer, temperature: Float, + tools: ::Array[OpenAI::Chat::ChatCompletionTool], top_p: Float } + + type response_format = + OpenAI::ResponseFormatText + | OpenAI::ResponseFormatJSONSchema + | OpenAI::ResponseFormatJSONObject + + module ResponseFormat + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format] + end end end end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index 53408038..b9897b66 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -427,6 +427,8 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } @@ -443,6 +445,18 @@ module OpenAI def temperature=: (Float) -> Float + attr_reader text: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text? + + def text=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text + + attr_reader tools: ::Array[OpenAI::Models::Responses::tool]? + + def tools=: ( + ::Array[OpenAI::Models::Responses::tool] + ) -> ::Array[OpenAI::Models::Responses::tool] + attr_reader top_p: Float? def top_p=: (Float) -> Float @@ -451,6 +465,8 @@ module OpenAI ?max_completion_tokens: Integer, ?seed: Integer, ?temperature: Float, + ?text: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text, + ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float ) -> void @@ -458,8 +474,31 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } + + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } + end end end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index 7c36c313..1d4e8ac2 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -390,6 +390,8 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } @@ -406,6 +408,18 @@ module OpenAI def temperature=: (Float) -> Float + attr_reader text: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text? + + def text=: ( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text + + attr_reader tools: ::Array[OpenAI::Models::Responses::tool]? + + def tools=: ( + ::Array[OpenAI::Models::Responses::tool] + ) -> ::Array[OpenAI::Models::Responses::tool] + attr_reader top_p: Float? def top_p=: (Float) -> Float @@ -414,6 +428,8 @@ module OpenAI ?max_completion_tokens: Integer, ?seed: Integer, ?temperature: Float, + ?text: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, + ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float ) -> void @@ -421,8 +437,31 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } + + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } + end end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 6076ce25..97e64211 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -427,6 +427,8 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } @@ -443,6 +445,18 @@ module OpenAI def temperature=: (Float) -> Float + attr_reader text: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text? + + def text=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text + + attr_reader tools: ::Array[OpenAI::Models::Responses::tool]? + + def tools=: ( + ::Array[OpenAI::Models::Responses::tool] + ) -> ::Array[OpenAI::Models::Responses::tool] + attr_reader top_p: Float? def top_p=: (Float) -> Float @@ -451,6 +465,8 @@ module OpenAI ?max_completion_tokens: Integer, ?seed: Integer, ?temperature: Float, + ?text: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text, + ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float ) -> void @@ -458,8 +474,31 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } + + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } + end end end diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index b87620d8..be5a46e1 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -427,6 +427,8 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } @@ -443,6 +445,18 @@ module OpenAI def temperature=: (Float) -> Float + attr_reader text: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text? + + def text=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text + + attr_reader tools: ::Array[OpenAI::Models::Responses::tool]? + + def tools=: ( + ::Array[OpenAI::Models::Responses::tool] + ) -> ::Array[OpenAI::Models::Responses::tool] + attr_reader top_p: Float? def top_p=: (Float) -> Float @@ -451,6 +465,8 @@ module OpenAI ?max_completion_tokens: Integer, ?seed: Integer, ?temperature: Float, + ?text: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text, + ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float ) -> void @@ -458,8 +474,31 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } + + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } + end end end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index 97d98b20..63418be1 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -427,6 +427,8 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } @@ -443,6 +445,18 @@ module OpenAI def temperature=: (Float) -> Float + attr_reader text: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text? + + def text=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text + + attr_reader tools: ::Array[OpenAI::Models::Responses::tool]? + + def tools=: ( + ::Array[OpenAI::Models::Responses::tool] + ) -> ::Array[OpenAI::Models::Responses::tool] + attr_reader top_p: Float? def top_p=: (Float) -> Float @@ -451,6 +465,8 @@ module OpenAI ?max_completion_tokens: Integer, ?seed: Integer, ?temperature: Float, + ?text: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text, + ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_p: Float ) -> void @@ -458,8 +474,31 @@ module OpenAI max_completion_tokens: Integer, seed: Integer, temperature: Float, + text: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text, + tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float } + + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } + end end end From 325ab7b7e3d03407e4ec9be8a97cc20b532218e8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 9 Jun 2025 16:50:55 +0000 Subject: [PATCH 226/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4208b5cb..1b77f506 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.6.0" + ".": "0.7.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 734dc4e3..6bff8bd1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.6.0) + openai (0.7.0) connection_pool GEM diff --git a/README.md b/README.md index b30bfcf0..aca0d5f6 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.6.0" +gem "openai", "~> 0.7.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 1e8185cd..c2177e7b 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.6.0" + VERSION = "0.7.0" end From 0b7dbd8a17e12d9c219815b818e6b15544dc3f73 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 10 Jun 2025 16:38:18 +0000 Subject: [PATCH 227/295] feat(api): Add o3-pro model IDs --- .stats.yml | 6 +++--- lib/openai/models/all_models.rb | 2 ++ lib/openai/models/responses_model.rb | 2 ++ rbi/openai/models/all_models.rbi | 7 +++++++ rbi/openai/models/responses_model.rbi | 10 ++++++++++ sig/openai/models/all_models.rbs | 4 ++++ sig/openai/models/responses_model.rbs | 4 ++++ 7 files changed, 32 insertions(+), 3 deletions(-) diff --git a/.stats.yml b/.stats.yml index 8507b6ff..df602bb9 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4865dda2b62927bd141cbc85f81be3d88602f103e2c581e15eb1caded3e3aaa2.yml -openapi_spec_hash: 7d14a9b23ef4ac93ea46d629601b6f6b -config_hash: ed1e6b3c5f93d12b80d31167f55c557c +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-3ae9c18dd7ccfc3ac5206f24394665f563a19015cfa8847b2801a2694d012abc.yml +openapi_spec_hash: 48175b03b58805cd5c80793c66fd54e5 +config_hash: 4caff63b74a41f71006987db702f2918 diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 06729722..93144d36 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -16,6 +16,8 @@ module ResponsesOnlyModel O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + O3_PRO = :"o3-pro" + O3_PRO_2025_06_10 = :"o3-pro-2025-06-10" COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 68b8b32e..a344e7b6 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -16,6 +16,8 @@ module ResponsesOnlyModel O1_PRO = :"o1-pro" O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" + O3_PRO = :"o3-pro" + O3_PRO_2025_06_10 = :"o3-pro-2025-06-10" COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" diff --git a/rbi/openai/models/all_models.rbi b/rbi/openai/models/all_models.rbi index 66c0e6ae..acbc553b 100644 --- a/rbi/openai/models/all_models.rbi +++ b/rbi/openai/models/all_models.rbi @@ -28,6 +28,13 @@ module OpenAI :"o1-pro-2025-03-19", OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol ) + O3_PRO = + T.let(:"o3-pro", OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol) + O3_PRO_2025_06_10 = + T.let( + :"o3-pro-2025-06-10", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW = T.let( :"computer-use-preview", diff --git a/rbi/openai/models/responses_model.rbi b/rbi/openai/models/responses_model.rbi index 5fc0cd97..459a52c4 100644 --- a/rbi/openai/models/responses_model.rbi +++ b/rbi/openai/models/responses_model.rbi @@ -33,6 +33,16 @@ module OpenAI :"o1-pro-2025-03-19", OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol ) + O3_PRO = + T.let( + :"o3-pro", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) + O3_PRO_2025_06_10 = + T.let( + :"o3-pro-2025-06-10", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW = T.let( :"computer-use-preview", diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 4d649383..8a7b555a 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -11,6 +11,8 @@ module OpenAI type responses_only_model = :"o1-pro" | :"o1-pro-2025-03-19" + | :"o3-pro" + | :"o3-pro-2025-06-10" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" @@ -19,6 +21,8 @@ module OpenAI O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + O3_PRO: :"o3-pro" + O3_PRO_2025_06_10: :"o3-pro-2025-06-10" COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 3014c03a..cd5a5405 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -11,6 +11,8 @@ module OpenAI type responses_only_model = :"o1-pro" | :"o1-pro-2025-03-19" + | :"o3-pro" + | :"o3-pro-2025-06-10" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" @@ -19,6 +21,8 @@ module OpenAI O1_PRO: :"o1-pro" O1_PRO_2025_03_19: :"o1-pro-2025-03-19" + O3_PRO: :"o3-pro" + O3_PRO_2025_06_10: :"o3-pro-2025-06-10" COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" From 7e247eab4ec8117b0e829bc3f6a5f769eb5012a0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 10 Jun 2025 16:51:38 +0000 Subject: [PATCH 228/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 1b77f506..6538ca91 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.7.0" + ".": "0.8.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 6bff8bd1..1316055a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.7.0) + openai (0.8.0) connection_pool GEM diff --git a/README.md b/README.md index aca0d5f6..bb1d2721 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.7.0" +gem "openai", "~> 0.8.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index c2177e7b..3d1c96c8 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.7.0" + VERSION = "0.8.0" end From 25417c56c6ba799f70cfa4910386f909aca228c7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 11 Jun 2025 09:31:12 +0000 Subject: [PATCH 229/295] chore(ci): link to correct github repo --- .github/workflows/publish-gem.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index 725febaa..e4185573 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -1,5 +1,5 @@ # workflow for re-running publishing to rubygems.org in case it fails for some reason -# you can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-gem.yml +# you can run this workflow by navigating to https://www.github.com/openai/openai-ruby/actions/workflows/publish-gem.yml name: Publish Gem on: workflow_dispatch: From ca97f98df98d31b39f460876e963221d7bff0766 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 16 Jun 2025 18:31:29 +0000 Subject: [PATCH 230/295] feat(api): add reusable prompt IDs --- .stats.yml | 6 +- lib/openai.rb | 1 + lib/openai/models/chat/chat_completion.rb | 1 + .../models/chat/chat_completion_chunk.rb | 1 + .../models/chat/completion_create_params.rb | 1 + .../models/fine_tuning/job_create_params.rb | 6 +- lib/openai/models/image_edit_params.rb | 36 +++++- lib/openai/models/responses/response.rb | 47 ++++++- .../responses/response_create_params.rb | 17 ++- .../models/responses/response_prompt.rb | 63 +++++++++ lib/openai/resources/fine_tuning/jobs.rb | 4 +- lib/openai/resources/images.rb | 6 +- lib/openai/resources/responses.rb | 12 +- rbi/openai/models/chat/chat_completion.rbi | 5 + .../models/chat/chat_completion_chunk.rbi | 5 + .../models/chat/completion_create_params.rbi | 5 + .../models/fine_tuning/job_create_params.rbi | 12 +- rbi/openai/models/image_edit_params.rbi | 51 ++++++++ rbi/openai/models/responses/response.rbi | 73 ++++++++++- .../responses/response_create_params.rbi | 28 +++- .../models/responses/response_prompt.rbi | 120 ++++++++++++++++++ rbi/openai/resources/fine_tuning/jobs.rbi | 10 +- rbi/openai/resources/images.rbi | 11 ++ rbi/openai/resources/responses.rbi | 14 +- sig/openai/models/chat/chat_completion.rbs | 3 +- .../models/chat/chat_completion_chunk.rbs | 3 +- .../models/chat/completion_create_params.rbs | 3 +- sig/openai/models/image_edit_params.rbs | 22 ++++ sig/openai/models/responses/response.rbs | 27 +++- .../responses/response_create_params.rbs | 8 +- .../models/responses/response_prompt.rbs | 44 +++++++ sig/openai/resources/images.rbs | 2 + sig/openai/resources/responses.rbs | 2 + test/openai/resources/responses_test.rb | 9 +- 34 files changed, 600 insertions(+), 58 deletions(-) create mode 100644 lib/openai/models/responses/response_prompt.rb create mode 100644 rbi/openai/models/responses/response_prompt.rbi create mode 100644 sig/openai/models/responses/response_prompt.rbs diff --git a/.stats.yml b/.stats.yml index df602bb9..57d0d6d4 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-3ae9c18dd7ccfc3ac5206f24394665f563a19015cfa8847b2801a2694d012abc.yml -openapi_spec_hash: 48175b03b58805cd5c80793c66fd54e5 -config_hash: 4caff63b74a41f71006987db702f2918 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-9e41d2d5471d2c28bff0d616f4476f5b0e6c541ef4cb51bdaaef5fdf5e13c8b2.yml +openapi_spec_hash: 86f765e18d00e32cf2ce9db7ab84d946 +config_hash: fd2af1d5eff0995bb7dc02ac9a34851d diff --git a/lib/openai.rb b/lib/openai.rb index 5d4ebee6..8fbad7a4 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -412,6 +412,7 @@ require_relative "openai/models/responses/response_output_refusal" require_relative "openai/models/responses/response_output_text" require_relative "openai/models/responses/response_output_text_annotation_added_event" +require_relative "openai/models/responses/response_prompt" require_relative "openai/models/responses/response_queued_event" require_relative "openai/models/responses/response_reasoning_delta_event" require_relative "openai/models/responses/response_reasoning_done_event" diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index b1a17a6f..79d3e56e 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -213,6 +213,7 @@ module ServiceTier AUTO = :auto DEFAULT = :default FLEX = :flex + SCALE = :scale # @!method self.values # @return [Array] diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 63c1109e..bdce048f 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -396,6 +396,7 @@ module ServiceTier AUTO = :auto DEFAULT = :default FLEX = :flex + SCALE = :scale # @!method self.values # @return [Array] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index ee393bad..c47d3728 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -558,6 +558,7 @@ module ServiceTier AUTO = :auto DEFAULT = :default FLEX = :flex + SCALE = :scale # @!method self.values # @return [Array] diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 193bc2d0..1d4258c1 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -31,7 +31,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. # # @return [String] @@ -100,7 +101,8 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # Your dataset must be formatted as a JSONL file. You must upload your file with # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. # # @return [String, nil] diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 6162af1a..833d7a5a 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -61,6 +61,22 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true + # @!attribute output_compression + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + # + # @return [Integer, nil] + optional :output_compression, Integer, nil?: true + + # @!attribute output_format + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + # + # @return [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] + optional :output_format, enum: -> { OpenAI::ImageEditParams::OutputFormat }, nil?: true + # @!attribute quality # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. @@ -94,7 +110,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @!method initialize(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # @@ -110,6 +126,10 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # + # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter + # + # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is + # # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` @@ -174,6 +194,20 @@ module Model # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + JPEG = :jpeg + WEBP = :webp + + # @!method self.values + # @return [Array] + end + # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 38b6465d..0fbf7be3 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -32,15 +32,14 @@ class Response < OpenAI::Internal::Type::BaseModel required :incomplete_details, -> { OpenAI::Responses::Response::IncompleteDetails }, nil?: true # @!attribute instructions - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to # swap out system (or developer) messages in new responses. # - # @return [String, nil] - required :instructions, String, nil?: true + # @return [String, Array, nil] + required :instructions, union: -> { OpenAI::Responses::Response::Instructions }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -156,6 +155,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :previous_response_id, String, nil?: true + # @!attribute prompt + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + # + # @return [OpenAI::Models::Responses::ResponsePrompt, nil] + optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true + # @!attribute reasoning # **o-series models only** # @@ -231,7 +237,7 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Response} for more details. # @@ -243,7 +249,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. # - # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # @param instructions [String, Array, nil] A system (or developer) message inserted into the model's context. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -267,6 +273,8 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # + # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is @@ -310,6 +318,32 @@ module Reason end end + # A system (or developer) message inserted into the model's context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. + # + # @see OpenAI::Models::Responses::Response#instructions + module Instructions + extend OpenAI::Internal::Type::Union + + # A text input to the model, equivalent to a text input with the + # `developer` role. + variant String + + # A list of one or many input items to the model, containing + # different content types. + variant -> { OpenAI::Models::Responses::Response::Instructions::ResponseInputItemArray } + + # @!method self.variants + # @return [Array(String, Array)] + + # @type [OpenAI::Internal::Type::Converter] + ResponseInputItemArray = + OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Responses::ResponseInputItem }] + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. @@ -364,6 +398,7 @@ module ServiceTier AUTO = :auto DEFAULT = :default FLEX = :flex + SCALE = :scale # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 643e98b0..85db66cc 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -64,8 +64,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel nil?: true # @!attribute instructions - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to @@ -107,6 +106,13 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :previous_response_id, String, nil?: true + # @!attribute prompt + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + # + # @return [OpenAI::Models::Responses::ResponsePrompt, nil] + optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true + # @!attribute reasoning # **o-series models only** # @@ -220,7 +226,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # @@ -232,7 +238,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # - # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # @@ -242,6 +248,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # + # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is @@ -311,6 +319,7 @@ module ServiceTier AUTO = :auto DEFAULT = :default FLEX = :flex + SCALE = :scale # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_prompt.rb b/lib/openai/models/responses/response_prompt.rb new file mode 100644 index 00000000..aa5b7fb0 --- /dev/null +++ b/lib/openai/models/responses/response_prompt.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponsePrompt < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique identifier of the prompt template to use. + # + # @return [String] + required :id, String + + # @!attribute variables + # Optional map of values to substitute in for variables in your prompt. The + # substitution values can either be strings, or other Response input types like + # images or files. + # + # @return [Hash{Symbol=>String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile}, nil] + optional :variables, + -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Responses::ResponsePrompt::Variable] }, + nil?: true + + # @!attribute version + # Optional version of the prompt template. + # + # @return [String, nil] + optional :version, String, nil?: true + + # @!method initialize(id:, variables: nil, version: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponsePrompt} for more details. + # + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + # + # @param id [String] The unique identifier of the prompt template to use. + # + # @param variables [Hash{Symbol=>String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile}, nil] Optional map of values to substitute in for variables in your + # + # @param version [String, nil] Optional version of the prompt template. + + # A text input to the model. + module Variable + extend OpenAI::Internal::Type::Union + + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # An image input to the model. Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + variant -> { OpenAI::Responses::ResponseInputImage } + + # A file input to the model. + variant -> { OpenAI::Responses::ResponseInputFile } + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] + end + end + end + end +end diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index c978c56c..9db01f0c 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -16,7 +16,7 @@ class Jobs # Response includes details of the enqueued job including job status and the name # of the fine-tuned models once complete. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/model-optimization) # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # @@ -59,7 +59,7 @@ def create(params) # # Get info about a fine-tuning job. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/model-optimization) # # @overload retrieve(fine_tuning_job_id, request_options: {}) # diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 6a521f3b..e1d26736 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -45,7 +45,7 @@ def create_variation(params) # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. # - # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # @@ -59,6 +59,10 @@ def create_variation(params) # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # + # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter + # + # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is + # # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 11fe39a6..32d603fd 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,7 +23,7 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # @@ -33,7 +33,7 @@ class Responses # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # - # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # @@ -43,6 +43,8 @@ class Responses # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # + # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is @@ -100,7 +102,7 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # @@ -110,7 +112,7 @@ def create(params) # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # - # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context + # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # @@ -120,6 +122,8 @@ def create(params) # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # + # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index 7408d715..6c8172ca 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -404,6 +404,11 @@ module OpenAI :flex, OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol ) + SCALE = + T.let( + :scale, + OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index b37b09ef..cd2d322e 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -824,6 +824,11 @@ module OpenAI :flex, OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol ) + SCALE = + T.let( + :scale, + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 61b20248..9e6ffe46 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -1025,6 +1025,11 @@ module OpenAI :flex, OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol ) + SCALE = + T.let( + :scale, + OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/fine_tuning/job_create_params.rbi b/rbi/openai/models/fine_tuning/job_create_params.rbi index 32e49ed1..4ac7cefa 100644 --- a/rbi/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/openai/models/fine_tuning/job_create_params.rbi @@ -39,7 +39,8 @@ module OpenAI # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. sig { returns(String) } attr_accessor :training_file @@ -115,7 +116,8 @@ module OpenAI # Your dataset must be formatted as a JSONL file. You must upload your file with # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. sig { returns(T.nilable(String)) } attr_accessor :validation_file @@ -163,7 +165,8 @@ module OpenAI # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. training_file:, # The hyperparameters used for the fine-tuning job. This value is now deprecated @@ -200,7 +203,8 @@ module OpenAI # Your dataset must be formatted as a JSONL file. You must upload your file with # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. validation_file: nil, request_options: {} diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index f3e7df1e..0e0957b1 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -56,6 +56,20 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :n + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + sig { returns(T.nilable(Integer)) } + attr_accessor :output_compression + + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + sig do + returns(T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol)) + end + attr_accessor :output_format + # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. @@ -94,6 +108,9 @@ module OpenAI mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), + output_compression: T.nilable(Integer), + output_format: + T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), @@ -133,6 +150,14 @@ module OpenAI model: nil, # The number of images to generate. Must be between 1 and 10. n: nil, + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + output_compression: nil, + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + output_format: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. @@ -164,6 +189,9 @@ module OpenAI mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), + output_compression: T.nilable(Integer), + output_format: + T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), @@ -246,6 +274,29 @@ module OpenAI end end + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageEditParams::OutputFormat) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = T.let(:png, OpenAI::ImageEditParams::OutputFormat::TaggedSymbol) + JPEG = T.let(:jpeg, OpenAI::ImageEditParams::OutputFormat::TaggedSymbol) + WEBP = T.let(:webp, OpenAI::ImageEditParams::OutputFormat::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageEditParams::OutputFormat::TaggedSymbol] + ) + end + def self.values + end + end + # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index d7a1789e..1ebda937 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -42,13 +42,16 @@ module OpenAI end attr_writer :incomplete_details - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to # swap out system (or developer) messages in new responses. - sig { returns(T.nilable(String)) } + sig do + returns( + T.nilable(OpenAI::Responses::Response::Instructions::Variants) + ) + end attr_accessor :instructions # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -143,6 +146,18 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :previous_response_id + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + sig { returns(T.nilable(OpenAI::Responses::ResponsePrompt)) } + attr_reader :prompt + + sig do + params( + prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash) + ).void + end + attr_writer :prompt + # **o-series models only** # # Configuration options for @@ -236,7 +251,8 @@ module OpenAI error: T.nilable(OpenAI::Responses::ResponseError::OrHash), incomplete_details: T.nilable(OpenAI::Responses::Response::IncompleteDetails::OrHash), - instructions: T.nilable(String), + instructions: + T.nilable(OpenAI::Responses::Response::Instructions::Variants), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any( @@ -286,6 +302,7 @@ module OpenAI background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), + prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), reasoning: T.nilable(OpenAI::Reasoning::OrHash), service_tier: T.nilable(OpenAI::Responses::Response::ServiceTier::OrSymbol), @@ -307,8 +324,7 @@ module OpenAI error:, # Details about why the response is incomplete. incomplete_details:, - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to @@ -378,6 +394,9 @@ module OpenAI # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + prompt: nil, # **o-series models only** # # Configuration options for @@ -438,7 +457,8 @@ module OpenAI error: T.nilable(OpenAI::Responses::ResponseError), incomplete_details: T.nilable(OpenAI::Responses::Response::IncompleteDetails), - instructions: T.nilable(String), + instructions: + T.nilable(OpenAI::Responses::Response::Instructions::Variants), metadata: T.nilable(T::Hash[Symbol, String]), model: OpenAI::ResponsesModel::Variants, object: Symbol, @@ -451,6 +471,7 @@ module OpenAI background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), + prompt: T.nilable(OpenAI::Responses::ResponsePrompt), reasoning: T.nilable(OpenAI::Reasoning), service_tier: T.nilable( @@ -557,6 +578,39 @@ module OpenAI end end + # A system (or developer) message inserted into the model's context. + # + # When using along with `previous_response_id`, the instructions from a previous + # response will not be carried over to the next response. This makes it simple to + # swap out system (or developer) messages in new responses. + module Instructions + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + T::Array[OpenAI::Responses::ResponseInputItem::Variants] + ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::Response::Instructions::Variants] + ) + end + def self.variants + end + + ResponseInputItemArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Responses::ResponseInputItem + ], + OpenAI::Internal::Type::Converter + ) + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. @@ -616,6 +670,11 @@ module OpenAI ) FLEX = T.let(:flex, OpenAI::Responses::Response::ServiceTier::TaggedSymbol) + SCALE = + T.let( + :scale, + OpenAI::Responses::Response::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 717f102e..8063c9c6 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -72,8 +72,7 @@ module OpenAI end attr_accessor :include - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to @@ -106,6 +105,18 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :previous_response_id + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + sig { returns(T.nilable(OpenAI::Responses::ResponsePrompt)) } + attr_reader :prompt + + sig do + params( + prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash) + ).void + end + attr_writer :prompt + # **o-series models only** # # Configuration options for @@ -297,6 +308,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), + prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), reasoning: T.nilable(OpenAI::Reasoning::OrHash), service_tier: T.nilable( @@ -369,8 +381,7 @@ module OpenAI # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. include: nil, - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to @@ -393,6 +404,9 @@ module OpenAI # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + prompt: nil, # **o-series models only** # # Configuration options for @@ -490,6 +504,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), + prompt: T.nilable(OpenAI::Responses::ResponsePrompt), reasoning: T.nilable(OpenAI::Reasoning), service_tier: T.nilable( @@ -603,6 +618,11 @@ module OpenAI :flex, OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol ) + SCALE = + T.let( + :scale, + OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/responses/response_prompt.rbi b/rbi/openai/models/responses/response_prompt.rbi new file mode 100644 index 00000000..4a90fa5c --- /dev/null +++ b/rbi/openai/models/responses/response_prompt.rbi @@ -0,0 +1,120 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponsePrompt < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Responses::ResponsePrompt, OpenAI::Internal::AnyHash) + end + + # The unique identifier of the prompt template to use. + sig { returns(String) } + attr_accessor :id + + # Optional map of values to substitute in for variables in your prompt. The + # substitution values can either be strings, or other Response input types like + # images or files. + sig do + returns( + T.nilable( + T::Hash[ + Symbol, + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile + ) + ] + ) + ) + end + attr_accessor :variables + + # Optional version of the prompt template. + sig { returns(T.nilable(String)) } + attr_accessor :version + + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + sig do + params( + id: String, + variables: + T.nilable( + T::Hash[ + Symbol, + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Responses::ResponseInputImage::OrHash, + OpenAI::Responses::ResponseInputFile::OrHash + ) + ] + ), + version: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the prompt template to use. + id:, + # Optional map of values to substitute in for variables in your prompt. The + # substitution values can either be strings, or other Response input types like + # images or files. + variables: nil, + # Optional version of the prompt template. + version: nil + ) + end + + sig do + override.returns( + { + id: String, + variables: + T.nilable( + T::Hash[ + Symbol, + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile + ) + ] + ), + version: T.nilable(String) + } + ) + end + def to_hash + end + + # A text input to the model. + module Variable + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Responses::ResponseInputImage, + OpenAI::Responses::ResponseInputFile + ) + end + + sig do + override.returns( + T::Array[OpenAI::Responses::ResponsePrompt::Variable::Variants] + ) + end + def self.variants + end + end + end + end + end +end diff --git a/rbi/openai/resources/fine_tuning/jobs.rbi b/rbi/openai/resources/fine_tuning/jobs.rbi index e60f057d..4e823288 100644 --- a/rbi/openai/resources/fine_tuning/jobs.rbi +++ b/rbi/openai/resources/fine_tuning/jobs.rbi @@ -13,7 +13,7 @@ module OpenAI # Response includes details of the enqueued job including job status and the name # of the fine-tuned models once complete. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/model-optimization) sig do params( model: @@ -57,7 +57,8 @@ module OpenAI # [preference](https://platform.openai.com/docs/api-reference/fine-tuning/preference-input) # format. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. training_file:, # The hyperparameters used for the fine-tuning job. This value is now deprecated @@ -94,7 +95,8 @@ module OpenAI # Your dataset must be formatted as a JSONL file. You must upload your file with # the purpose `fine-tune`. # - # See the [fine-tuning guide](https://platform.openai.com/docs/guides/fine-tuning) + # See the + # [fine-tuning guide](https://platform.openai.com/docs/guides/model-optimization) # for more details. validation_file: nil, request_options: {} @@ -103,7 +105,7 @@ module OpenAI # Get info about a fine-tuning job. # - # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning) + # [Learn more about fine-tuning](https://platform.openai.com/docs/guides/model-optimization) sig do params( fine_tuning_job_id: String, diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index c8440e47..f4f67d10 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -52,6 +52,9 @@ module OpenAI mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), + output_compression: T.nilable(Integer), + output_format: + T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), @@ -91,6 +94,14 @@ module OpenAI model: nil, # The number of images to generate. Must be between 1 and 10. n: nil, + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + output_compression: nil, + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + output_format: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 02921b0e..c779d2c1 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -38,6 +38,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), + prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), reasoning: T.nilable(OpenAI::Reasoning::OrHash), service_tier: T.nilable( @@ -111,8 +112,7 @@ module OpenAI # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. include: nil, - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to @@ -135,6 +135,9 @@ module OpenAI # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + prompt: nil, # **o-series models only** # # Configuration options for @@ -247,6 +250,7 @@ module OpenAI metadata: T.nilable(T::Hash[Symbol, String]), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), + prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), reasoning: T.nilable(OpenAI::Reasoning::OrHash), service_tier: T.nilable( @@ -324,8 +328,7 @@ module OpenAI # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. include: nil, - # Inserts a system (or developer) message as the first item in the model's - # context. + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous # response will not be carried over to the next response. This makes it simple to @@ -348,6 +351,9 @@ module OpenAI # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). previous_response_id: nil, + # Reference to a prompt template and its variables. + # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). + prompt: nil, # **o-series models only** # # Configuration options for diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index e66a399d..9b3eb5f9 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -127,7 +127,7 @@ module OpenAI end end - type service_tier = :auto | :default | :flex + type service_tier = :auto | :default | :flex | :scale module ServiceTier extend OpenAI::Internal::Type::Enum @@ -135,6 +135,7 @@ module OpenAI AUTO: :auto DEFAULT: :default FLEX: :flex + SCALE: :scale def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] end diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 0e7acf36..e68150c9 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -272,7 +272,7 @@ module OpenAI end end - type service_tier = :auto | :default | :flex + type service_tier = :auto | :default | :flex | :scale module ServiceTier extend OpenAI::Internal::Type::Enum @@ -280,6 +280,7 @@ module OpenAI AUTO: :auto DEFAULT: :default FLEX: :flex + SCALE: :scale def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 0f3ad11e..e35205f3 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -280,7 +280,7 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::response_format] end - type service_tier = :auto | :default | :flex + type service_tier = :auto | :default | :flex | :scale module ServiceTier extend OpenAI::Internal::Type::Enum @@ -288,6 +288,7 @@ module OpenAI AUTO: :auto DEFAULT: :default FLEX: :flex + SCALE: :scale def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::service_tier] end diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 0fe6ec3d..ca2820e9 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -8,6 +8,8 @@ module OpenAI mask: OpenAI::Internal::file_input, model: OpenAI::Models::ImageEditParams::model?, n: Integer?, + output_compression: Integer?, + output_format: OpenAI::Models::ImageEditParams::output_format?, quality: OpenAI::Models::ImageEditParams::quality?, response_format: OpenAI::Models::ImageEditParams::response_format?, size: OpenAI::Models::ImageEditParams::size?, @@ -33,6 +35,10 @@ module OpenAI attr_accessor n: Integer? + attr_accessor output_compression: Integer? + + attr_accessor output_format: OpenAI::Models::ImageEditParams::output_format? + attr_accessor quality: OpenAI::Models::ImageEditParams::quality? attr_accessor response_format: OpenAI::Models::ImageEditParams::response_format? @@ -50,6 +56,8 @@ module OpenAI ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, + ?output_compression: Integer?, + ?output_format: OpenAI::Models::ImageEditParams::output_format?, ?quality: OpenAI::Models::ImageEditParams::quality?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, ?size: OpenAI::Models::ImageEditParams::size?, @@ -64,6 +72,8 @@ module OpenAI mask: OpenAI::Internal::file_input, model: OpenAI::Models::ImageEditParams::model?, n: Integer?, + output_compression: Integer?, + output_format: OpenAI::Models::ImageEditParams::output_format?, quality: OpenAI::Models::ImageEditParams::quality?, response_format: OpenAI::Models::ImageEditParams::response_format?, size: OpenAI::Models::ImageEditParams::size?, @@ -102,6 +112,18 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::ImageEditParams::model] end + type output_format = :png | :jpeg | :webp + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + JPEG: :jpeg + WEBP: :webp + + def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::output_format] + end + type quality = :standard | :low | :medium | :high | :auto module Quality diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index d5658b8f..8825885e 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -7,7 +7,7 @@ module OpenAI created_at: Float, error: OpenAI::Responses::ResponseError?, incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, - instructions: String?, + instructions: OpenAI::Models::Responses::Response::instructions?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, object: :response, @@ -20,6 +20,7 @@ module OpenAI background: bool?, max_output_tokens: Integer?, previous_response_id: String?, + prompt: OpenAI::Responses::ResponsePrompt?, reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, @@ -38,7 +39,7 @@ module OpenAI attr_accessor incomplete_details: OpenAI::Responses::Response::IncompleteDetails? - attr_accessor instructions: String? + attr_accessor instructions: OpenAI::Models::Responses::Response::instructions? attr_accessor metadata: OpenAI::Models::metadata? @@ -64,6 +65,8 @@ module OpenAI attr_accessor previous_response_id: String? + attr_accessor prompt: OpenAI::Responses::ResponsePrompt? + attr_accessor reasoning: OpenAI::Reasoning? attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier? @@ -97,7 +100,7 @@ module OpenAI created_at: Float, error: OpenAI::Responses::ResponseError?, incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, - instructions: String?, + instructions: OpenAI::Models::Responses::Response::instructions?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, output: ::Array[OpenAI::Models::Responses::response_output_item], @@ -109,6 +112,7 @@ module OpenAI ?background: bool?, ?max_output_tokens: Integer?, ?previous_response_id: String?, + ?prompt: OpenAI::Responses::ResponsePrompt?, ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, @@ -124,7 +128,7 @@ module OpenAI created_at: Float, error: OpenAI::Responses::ResponseError?, incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, - instructions: String?, + instructions: OpenAI::Models::Responses::Response::instructions?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, object: :response, @@ -137,6 +141,7 @@ module OpenAI background: bool?, max_output_tokens: Integer?, previous_response_id: String?, + prompt: OpenAI::Responses::ResponsePrompt?, reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, @@ -178,6 +183,17 @@ module OpenAI end end + type instructions = + String | ::Array[OpenAI::Models::Responses::response_input_item] + + module Instructions + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Responses::Response::instructions] + + ResponseInputItemArray: OpenAI::Internal::Type::Converter + end + type tool_choice = OpenAI::Models::Responses::tool_choice_options | OpenAI::Responses::ToolChoiceTypes @@ -189,7 +205,7 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::Response::tool_choice] end - type service_tier = :auto | :default | :flex + type service_tier = :auto | :default | :flex | :scale module ServiceTier extend OpenAI::Internal::Type::Enum @@ -197,6 +213,7 @@ module OpenAI AUTO: :auto DEFAULT: :default FLEX: :flex + SCALE: :scale def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 3050697d..28a6db94 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -12,6 +12,7 @@ module OpenAI metadata: OpenAI::Models::metadata?, parallel_tool_calls: bool?, previous_response_id: String?, + prompt: OpenAI::Responses::ResponsePrompt?, reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, @@ -47,6 +48,8 @@ module OpenAI attr_accessor previous_response_id: String? + attr_accessor prompt: OpenAI::Responses::ResponsePrompt? + attr_accessor reasoning: OpenAI::Reasoning? attr_accessor service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier? @@ -91,6 +94,7 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?parallel_tool_calls: bool?, ?previous_response_id: String?, + ?prompt: OpenAI::Responses::ResponsePrompt?, ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, @@ -114,6 +118,7 @@ module OpenAI metadata: OpenAI::Models::metadata?, parallel_tool_calls: bool?, previous_response_id: String?, + prompt: OpenAI::Responses::ResponsePrompt?, reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, @@ -135,7 +140,7 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::input] end - type service_tier = :auto | :default | :flex + type service_tier = :auto | :default | :flex | :scale module ServiceTier extend OpenAI::Internal::Type::Enum @@ -143,6 +148,7 @@ module OpenAI AUTO: :auto DEFAULT: :default FLEX: :flex + SCALE: :scale def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::service_tier] end diff --git a/sig/openai/models/responses/response_prompt.rbs b/sig/openai/models/responses/response_prompt.rbs new file mode 100644 index 00000000..0ccc20f8 --- /dev/null +++ b/sig/openai/models/responses/response_prompt.rbs @@ -0,0 +1,44 @@ +module OpenAI + module Models + module Responses + type response_prompt = + { + id: String, + variables: ::Hash[Symbol, OpenAI::Models::Responses::ResponsePrompt::variable]?, + version: String? + } + + class ResponsePrompt < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor variables: ::Hash[Symbol, OpenAI::Models::Responses::ResponsePrompt::variable]? + + attr_accessor version: String? + + def initialize: ( + id: String, + ?variables: ::Hash[Symbol, OpenAI::Models::Responses::ResponsePrompt::variable]?, + ?version: String? + ) -> void + + def to_hash: -> { + id: String, + variables: ::Hash[Symbol, OpenAI::Models::Responses::ResponsePrompt::variable]?, + version: String? + } + + type variable = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Responses::ResponseInputImage + | OpenAI::Responses::ResponseInputFile + + module Variable + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponsePrompt::variable] + end + end + end + end +end diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index f441f385..43595baa 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -18,6 +18,8 @@ module OpenAI ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, + ?output_compression: Integer?, + ?output_format: OpenAI::Models::ImageEditParams::output_format?, ?quality: OpenAI::Models::ImageEditParams::quality?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, ?size: OpenAI::Models::ImageEditParams::size?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 7db1e5e4..fdbdf256 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -13,6 +13,7 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?parallel_tool_calls: bool?, ?previous_response_id: String?, + ?prompt: OpenAI::Responses::ResponsePrompt?, ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, @@ -36,6 +37,7 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?parallel_tool_calls: bool?, ?previous_response_id: String?, + ?prompt: OpenAI::Responses::ResponsePrompt?, ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index fe1e2ad5..105b7ce6 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -16,7 +16,7 @@ def test_create_required_params created_at: Float, error: OpenAI::Responses::ResponseError | nil, incomplete_details: OpenAI::Responses::Response::IncompleteDetails | nil, - instructions: String | nil, + instructions: OpenAI::Responses::Response::Instructions | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: OpenAI::ResponsesModel, object: Symbol, @@ -29,6 +29,7 @@ def test_create_required_params background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, + prompt: OpenAI::Responses::ResponsePrompt | nil, reasoning: OpenAI::Reasoning | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, @@ -53,7 +54,7 @@ def test_retrieve created_at: Float, error: OpenAI::Responses::ResponseError | nil, incomplete_details: OpenAI::Responses::Response::IncompleteDetails | nil, - instructions: String | nil, + instructions: OpenAI::Responses::Response::Instructions | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: OpenAI::ResponsesModel, object: Symbol, @@ -66,6 +67,7 @@ def test_retrieve background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, + prompt: OpenAI::Responses::ResponsePrompt | nil, reasoning: OpenAI::Reasoning | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, @@ -98,7 +100,7 @@ def test_cancel created_at: Float, error: OpenAI::Responses::ResponseError | nil, incomplete_details: OpenAI::Responses::Response::IncompleteDetails | nil, - instructions: String | nil, + instructions: OpenAI::Responses::Response::Instructions | nil, metadata: ^(OpenAI::Internal::Type::HashOf[String]) | nil, model: OpenAI::ResponsesModel, object: Symbol, @@ -111,6 +113,7 @@ def test_cancel background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, + prompt: OpenAI::Responses::ResponsePrompt | nil, reasoning: OpenAI::Reasoning | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, From 824514bab0a07ece96015ddbab6cc57dd6f94707 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 16 Jun 2025 19:05:29 +0000 Subject: [PATCH 231/295] feat(api): manual updates --- .stats.yml | 2 +- .../permission_retrieve_response.rb | 85 ++++-------- .../fine_tuning/checkpoints/permissions.rb | 3 +- .../permission_retrieve_response.rbi | 121 ++++-------------- .../fine_tuning/checkpoints/permissions.rbi | 4 +- .../permission_retrieve_response.rbs | 69 +++------- .../fine_tuning/checkpoints/permissions.rbs | 2 +- .../checkpoints/permissions_test.rb | 18 ++- 8 files changed, 86 insertions(+), 218 deletions(-) diff --git a/.stats.yml b/.stats.yml index 57d0d6d4..c67f2762 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-9e41d2d5471d2c28bff0d616f4476f5b0e6c541ef4cb51bdaaef5fdf5e13c8b2.yml openapi_spec_hash: 86f765e18d00e32cf2ce9db7ab84d946 -config_hash: fd2af1d5eff0995bb7dc02ac9a34851d +config_hash: dc5515e257676a27cb1ace1784aa92b3 diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb index 6ffbdf4d..e22c36d8 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb @@ -6,76 +6,41 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#retrieve class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel - # @!attribute data + # @!attribute id + # The permission identifier, which can be referenced in the API endpoints. # - # @return [Array] - required :data, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data] } + # @return [String] + required :id, String - # @!attribute has_more + # @!attribute created_at + # The Unix timestamp (in seconds) for when the permission was created. # - # @return [Boolean] - required :has_more, OpenAI::Internal::Type::Boolean + # @return [Integer] + required :created_at, Integer # @!attribute object + # The object type, which is always "checkpoint.permission". # - # @return [Symbol, :list] - required :object, const: :list + # @return [Symbol, :"checkpoint.permission"] + required :object, const: :"checkpoint.permission" - # @!attribute first_id + # @!attribute project_id + # The project identifier that the permission is for. # - # @return [String, nil] - optional :first_id, String, nil?: true + # @return [String] + required :project_id, String - # @!attribute last_id + # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. # - # @return [String, nil] - optional :last_id, String, nil?: true - - # @!method initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list) - # @param data [Array] - # @param has_more [Boolean] - # @param first_id [String, nil] - # @param last_id [String, nil] - # @param object [Symbol, :list] - - class Data < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The permission identifier, which can be referenced in the API endpoints. - # - # @return [String] - required :id, String - - # @!attribute created_at - # The Unix timestamp (in seconds) for when the permission was created. - # - # @return [Integer] - required :created_at, Integer - - # @!attribute object - # The object type, which is always "checkpoint.permission". - # - # @return [Symbol, :"checkpoint.permission"] - required :object, const: :"checkpoint.permission" - - # @!attribute project_id - # The project identifier that the permission is for. - # - # @return [String] - required :project_id, String - - # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") - # The `checkpoint.permission` object represents a permission for a fine-tuned - # model checkpoint. - # - # @param id [String] The permission identifier, which can be referenced in the API endpoints. - # - # @param created_at [Integer] The Unix timestamp (in seconds) for when the permission was created. - # - # @param project_id [String] The project identifier that the permission is for. - # - # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". - end + # @param id [String] The permission identifier, which can be referenced in the API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the permission was created. + # + # @param project_id [String] The project identifier that the permission is for. + # + # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". end end end diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index f0afec14..4fe26ec9 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -60,7 +60,7 @@ def create(fine_tuned_model_checkpoint, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams def retrieve(fine_tuned_model_checkpoint, params = {}) @@ -69,6 +69,7 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) method: :get, path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], query: parsed, + page: OpenAI::Internal::CursorPage, model: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse, options: options ) diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi index 2501be7b..8690aad8 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi @@ -13,125 +13,56 @@ module OpenAI ) end - sig do - returns( - T::Array[ - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data - ] - ) - end - attr_accessor :data + # The permission identifier, which can be referenced in the API endpoints. + sig { returns(String) } + attr_accessor :id - sig { returns(T::Boolean) } - attr_accessor :has_more + # The Unix timestamp (in seconds) for when the permission was created. + sig { returns(Integer) } + attr_accessor :created_at + # The object type, which is always "checkpoint.permission". sig { returns(Symbol) } attr_accessor :object - sig { returns(T.nilable(String)) } - attr_accessor :first_id - - sig { returns(T.nilable(String)) } - attr_accessor :last_id + # The project identifier that the permission is for. + sig { returns(String) } + attr_accessor :project_id + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. sig do params( - data: - T::Array[ - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data::OrHash - ], - has_more: T::Boolean, - first_id: T.nilable(String), - last_id: T.nilable(String), + id: String, + created_at: Integer, + project_id: String, object: Symbol ).returns(T.attached_class) end def self.new( - data:, - has_more:, - first_id: nil, - last_id: nil, - object: :list + # The permission identifier, which can be referenced in the API endpoints. + id:, + # The Unix timestamp (in seconds) for when the permission was created. + created_at:, + # The project identifier that the permission is for. + project_id:, + # The object type, which is always "checkpoint.permission". + object: :"checkpoint.permission" ) end sig do override.returns( { - data: - T::Array[ - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data - ], - has_more: T::Boolean, + id: String, + created_at: Integer, object: Symbol, - first_id: T.nilable(String), - last_id: T.nilable(String) + project_id: String } ) end def to_hash end - - class Data < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data, - OpenAI::Internal::AnyHash - ) - end - - # The permission identifier, which can be referenced in the API endpoints. - sig { returns(String) } - attr_accessor :id - - # The Unix timestamp (in seconds) for when the permission was created. - sig { returns(Integer) } - attr_accessor :created_at - - # The object type, which is always "checkpoint.permission". - sig { returns(Symbol) } - attr_accessor :object - - # The project identifier that the permission is for. - sig { returns(String) } - attr_accessor :project_id - - # The `checkpoint.permission` object represents a permission for a fine-tuned - # model checkpoint. - sig do - params( - id: String, - created_at: Integer, - project_id: String, - object: Symbol - ).returns(T.attached_class) - end - def self.new( - # The permission identifier, which can be referenced in the API endpoints. - id:, - # The Unix timestamp (in seconds) for when the permission was created. - created_at:, - # The project identifier that the permission is for. - project_id:, - # The object type, which is always "checkpoint.permission". - object: :"checkpoint.permission" - ) - end - - sig do - override.returns( - { - id: String, - created_at: Integer, - object: Symbol, - project_id: String - } - ) - end - def to_hash - end - end end end end diff --git a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi index be76789c..dae4366b 100644 --- a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi +++ b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi @@ -43,7 +43,9 @@ module OpenAI project_id: String, request_options: OpenAI::RequestOptions::OrHash ).returns( - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + OpenAI::Internal::CursorPage[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + ] ) end def retrieve( diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs index 54f9630a..ef545155 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs @@ -4,71 +4,34 @@ module OpenAI module Checkpoints type permission_retrieve_response = { - data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], - has_more: bool, - object: :list, - first_id: String?, - last_id: String? + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String } class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel - attr_accessor data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data] + attr_accessor id: String - attr_accessor has_more: bool + attr_accessor created_at: Integer - attr_accessor object: :list + attr_accessor object: :"checkpoint.permission" - attr_accessor first_id: String? - - attr_accessor last_id: String? + attr_accessor project_id: String def initialize: ( - data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], - has_more: bool, - ?first_id: String?, - ?last_id: String?, - ?object: :list + id: String, + created_at: Integer, + project_id: String, + ?object: :"checkpoint.permission" ) -> void def to_hash: -> { - data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], - has_more: bool, - object: :list, - first_id: String?, - last_id: String? + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String } - - type data = - { - id: String, - created_at: Integer, - object: :"checkpoint.permission", - project_id: String - } - - class Data < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor created_at: Integer - - attr_accessor object: :"checkpoint.permission" - - attr_accessor project_id: String - - def initialize: ( - id: String, - created_at: Integer, - project_id: String, - ?object: :"checkpoint.permission" - ) -> void - - def to_hash: -> { - id: String, - created_at: Integer, - object: :"checkpoint.permission", - project_id: String - } - end end end end diff --git a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs index f36dcbbb..011e1286 100644 --- a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs +++ b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs @@ -16,7 +16,7 @@ module OpenAI ?order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, ?project_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse] def delete: ( String permission_id, diff --git a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb index 0ed0d4c1..60bc8e8f 100644 --- a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb +++ b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb @@ -35,16 +35,22 @@ def test_retrieve response = @openai.fine_tuning.checkpoints.permissions.retrieve("ft-AF1WoRqd3aJAHsqc9NY7iL8F") assert_pattern do - response => OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + response => OpenAI::Internal::CursorPage end + row = response.to_enum.first + return if row.nil? + assert_pattern do - response => { - data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data]), - has_more: OpenAI::Internal::Type::Boolean, + row => OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, object: Symbol, - first_id: String | nil, - last_id: String | nil + project_id: String } end end From bab05324372a64646b2adb7e87b413cecd40fab0 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 00:09:19 +0000 Subject: [PATCH 232/295] chore(ci): enable for pull requests --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6026ce13..ffa789c2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,6 +7,10 @@ on: - 'integrated/**' - 'stl-preview-head/**' - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' jobs: lint: From 3794c4ede8a7e6a6b25af2f11eefc777e77dbf6c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 14:28:04 +0000 Subject: [PATCH 233/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 6538ca91..6d78745c 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.8.0" + ".": "0.9.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 1316055a..48d1c60f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.8.0) + openai (0.9.0) connection_pool GEM diff --git a/README.md b/README.md index bb1d2721..672a19e6 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.8.0" +gem "openai", "~> 0.9.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 3d1c96c8..502435cb 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.8.0" + VERSION = "0.9.0" end From 16f13a9ce2a231a5de38d84b22b88322c6c9fa9a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 18 Jun 2025 01:54:47 +0000 Subject: [PATCH 234/295] fix: issue where we cannot mutate arrays on base model derivatives array properties are now always recursively coerced into the desire type upon being set, instead of "almost always" hash key names are no longer unnecessarily translated when creating base models via hash coercion errors are now stored and re-thrown instead of being re-computed each property access fixed inconsistencies where sometimes `TypeError`s would be thrown instead of `ArgumentError`s, and vice versa --- lib/openai/errors.rb | 22 ++++ lib/openai/internal/type/array_of.rb | 7 +- lib/openai/internal/type/base_model.rb | 102 ++++++++++++++----- lib/openai/internal/type/boolean.rb | 8 +- lib/openai/internal/type/converter.rb | 76 +++++++------- lib/openai/internal/type/enum.rb | 12 ++- lib/openai/internal/type/file_input.rb | 7 +- lib/openai/internal/type/hash_of.rb | 7 +- lib/openai/internal/type/union.rb | 19 ++-- lib/openai/internal/type/unknown.rb | 8 +- rbi/openai/errors.rbi | 16 +++ rbi/openai/internal/type/boolean.rbi | 2 + rbi/openai/internal/type/converter.rbi | 30 +++--- rbi/openai/internal/type/union.rbi | 5 + rbi/openai/internal/type/unknown.rbi | 2 + sig/openai/errors.rbs | 9 ++ sig/openai/internal/type/converter.rbs | 8 +- test/openai/internal/type/base_model_test.rb | 69 +++++++++---- 18 files changed, 299 insertions(+), 110 deletions(-) diff --git a/lib/openai/errors.rb b/lib/openai/errors.rb index 1e75256e..d7cd4e94 100644 --- a/lib/openai/errors.rb +++ b/lib/openai/errors.rb @@ -9,6 +9,28 @@ class Error < StandardError end class ConversionError < OpenAI::Errors::Error + # @return [StandardError, nil] + def cause = @cause.nil? ? super : @cause + + # @api private + # + # @param on [Class] + # @param method [Symbol] + # @param target [Object] + # @param value [Object] + # @param cause [StandardError, nil] + def initialize(on:, method:, target:, value:, cause: nil) + cls = on.name.split("::").last + + message = [ + "Failed to parse #{cls}.#{method} from #{value.class} to #{target.inspect}.", + "To get the unparsed API response, use #{cls}[#{method.inspect}].", + cause && "Cause: #{cause.message}" + ].filter(&:itself).join(" ") + + @cause = cause + super(message) + end end class APIError < OpenAI::Errors::Error diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 80606cca..f939a621 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -62,10 +62,14 @@ def hash = [self.class, item_type].hash # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Array, Object] @@ -74,6 +78,7 @@ def coerce(value, state:) unless value.is_a?(Array) exactness[:no] += 1 + state[:error] = TypeError.new("#{value.class} can't be coerced into #{Array}") return value end diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 2ed6657f..f5bfdcd6 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -60,7 +60,7 @@ def fields [OpenAI::Internal::Type::Converter.type_info(type_info), type_info] end - setter = "#{name_sym}=" + setter = :"#{name_sym}=" api_name = info.fetch(:api_name, name_sym) nilable = info.fetch(:nil?, false) const = required && !nilable ? info.fetch(:const, OpenAI::Internal::OMIT) : OpenAI::Internal::OMIT @@ -77,30 +77,61 @@ def fields type_fn: type_fn } - define_method(setter) { @data.store(name_sym, _1) } + define_method(setter) do |value| + target = type_fn.call + state = OpenAI::Internal::Type::Converter.new_coerce_state(translate_names: false) + coerced = OpenAI::Internal::Type::Converter.coerce(target, value, state: state) + status = @coerced.store(name_sym, state.fetch(:error) || true) + stored = + case [target, status] + in [OpenAI::Internal::Type::Converter | Symbol, true] + coerced + else + value + end + @data.store(name_sym, stored) + end + # rubocop:disable Style/CaseEquality + # rubocop:disable Metrics/BlockLength define_method(name_sym) do target = type_fn.call - value = @data.fetch(name_sym) { const == OpenAI::Internal::OMIT ? nil : const } - state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - if (nilable || !required) && value.nil? - nil - else - OpenAI::Internal::Type::Converter.coerce( - target, - value, - state: state + + case @coerced[name_sym] + in true | false if OpenAI::Internal::Type::Converter === target + @data.fetch(name_sym) + in ::StandardError => e + raise OpenAI::Errors::ConversionError.new( + on: self.class, + method: __method__, + target: target, + value: @data.fetch(name_sym), + cause: e ) + else + Kernel.then do + value = @data.fetch(name_sym) { const == OpenAI::Internal::OMIT ? nil : const } + state = OpenAI::Internal::Type::Converter.new_coerce_state(translate_names: false) + if (nilable || !required) && value.nil? + nil + else + OpenAI::Internal::Type::Converter.coerce( + target, value, state: state + ) + end + rescue StandardError => e + raise OpenAI::Errors::ConversionError.new( + on: self.class, + method: __method__, + target: target, + value: value, + cause: e + ) + end end - rescue StandardError => e - cls = self.class.name.split("::").last - message = [ - "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}.", - "To get the unparsed API response, use #{cls}[#{__method__.inspect}].", - "Cause: #{e.message}" - ].join(" ") - raise OpenAI::Errors::ConversionError.new(message) end + # rubocop:enable Metrics/BlockLength + # rubocop:enable Style/CaseEquality end # @api private @@ -200,23 +231,28 @@ class << self # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [self, Object] def coerce(value, state:) exactness = state.fetch(:exactness) - if value.is_a?(self.class) + if value.is_a?(self) exactness[:yes] += 1 return value end unless (val = OpenAI::Internal::Util.coerce_hash(value)).is_a?(Hash) exactness[:no] += 1 + state[:error] = TypeError.new("#{value.class} can't be coerced into #{Hash}") return value end exactness[:yes] += 1 @@ -224,13 +260,15 @@ def coerce(value, state:) keys = val.keys.to_set instance = new data = instance.to_h + status = instance.instance_variable_get(:@coerced) # rubocop:disable Metrics/BlockLength fields.each do |name, field| mode, required, target = field.fetch_values(:mode, :required, :type) api_name, nilable, const = field.fetch_values(:api_name, :nilable, :const) + src_name = state.fetch(:translate_names) ? api_name : name - unless val.key?(api_name) + unless val.key?(src_name) if required && mode != :dump && const == OpenAI::Internal::OMIT exactness[nilable ? :maybe : :no] += 1 else @@ -239,9 +277,10 @@ def coerce(value, state:) next end - item = val.fetch(api_name) - keys.delete(api_name) + item = val.fetch(src_name) + keys.delete(src_name) + state[:error] = nil converted = if item.nil? && (nilable || !required) exactness[nilable ? :yes : :maybe] += 1 @@ -255,6 +294,8 @@ def coerce(value, state:) item end end + + status.store(name, state.fetch(:error) || true) data.store(name, converted) end # rubocop:enable Metrics/BlockLength @@ -430,7 +471,18 @@ def to_yaml(*a) = OpenAI::Internal::Type::Converter.dump(self.class, self).to_ya # Create a new instance of a model. # # @param data [Hash{Symbol=>Object}, self] - def initialize(data = {}) = (@data = OpenAI::Internal::Util.coerce_hash!(data).to_h) + def initialize(data = {}) + @data = {} + @coerced = {} + OpenAI::Internal::Util.coerce_hash!(data).each do + if self.class.known_fields.key?(_1) + public_send(:"#{_1}=", _2) + else + @data.store(_1, _2) + @coerced.store(_1, false) + end + end + end class << self # @api private diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index 23c4d1f9..9c903929 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -31,14 +31,20 @@ def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::Bool class << self # @api private # + # Coerce value to Boolean if possible, otherwise return the original value. + # # @param value [Boolean, Object] # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Boolean, Object] diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 29cee8df..2784b03b 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -15,10 +15,14 @@ module Converter # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Object] @@ -94,6 +98,21 @@ def type_info(spec) end end + # @api private + # + # @param translate_names [Boolean] + # + # @return [Hash{Symbol=>Object}] + def new_coerce_state(translate_names: true) + { + translate_names: translate_names, + strictness: true, + exactness: {yes: 0, no: 0, maybe: 0}, + error: nil, + branched: 0 + } + end + # @api private # # Based on `target`, transform `value` into `target`, to the extent possible: @@ -110,14 +129,11 @@ def type_info(spec) # # @param value [Object] # - # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: + # @param state [Hash{Symbol=>Object}] The `strictness` is one of `true`, `false`. This informs the coercion strategy + # when we have to decide between multiple possible conversion targets: # # - `true`: the conversion must be exact, with minimum coercion. # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. # # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For # any given conversion attempt, the exactness will be updated based on how closely @@ -130,21 +146,20 @@ def type_info(spec) # # See implementation below for more details. # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Object] - def coerce( - target, - value, - state: {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - ) - # rubocop:disable Lint/SuppressedException + def coerce(target, value, state: OpenAI::Internal::Type::Converter.new_coerce_state) # rubocop:disable Metrics/BlockNesting - strictness, exactness = state.fetch_values(:strictness, :exactness) + exactness = state.fetch(:exactness) case target in OpenAI::Internal::Type::Converter @@ -160,29 +175,26 @@ def coerce( exactness[value.nil? ? :yes : :maybe] += 1 return nil in -> { _1 <= Integer } - if value.is_a?(Integer) + case value + in Integer exactness[:yes] += 1 return value - elsif strictness == :strong && Integer(value, exception: false) != value - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise value.is_a?(Numeric) ? ArgumentError.new(message) : TypeError.new(message) else Kernel.then do return Integer(value).tap { exactness[:maybe] += 1 } - rescue ArgumentError, TypeError + rescue ArgumentError, TypeError => e + state[:error] = e end end in -> { _1 <= Float } if value.is_a?(Numeric) exactness[:yes] += 1 return Float(value) - elsif strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) else Kernel.then do return Float(value).tap { exactness[:maybe] += 1 } - rescue ArgumentError, TypeError + rescue ArgumentError, TypeError => e + state[:error] = e end end in -> { _1 <= String } @@ -194,16 +206,13 @@ def coerce( exactness[:yes] += 1 return value.string else - if strictness == :strong - message = "no implicit conversion of #{value.class} into #{target.inspect}" - raise TypeError.new(message) - end + state[:error] = TypeError.new("#{value.class} can't be coerced into #{String}") end in -> { _1 <= Date || _1 <= Time } Kernel.then do return target.parse(value).tap { exactness[:yes] += 1 } rescue ArgumentError, TypeError => e - raise e if strictness == :strong + state[:error] = e end in -> { _1 <= StringIO } if value.is_a?(String) exactness[:yes] += 1 @@ -221,10 +230,8 @@ def coerce( return value end else - if strictness == :strong - message = "cannot convert non-matching #{value.class} into #{target.inspect}" - raise ArgumentError.new(message) - end + message = "cannot convert non-matching #{value.class} into #{target.inspect}" + state[:error] = ArgumentError.new(message) end else end @@ -232,7 +239,6 @@ def coerce( exactness[:no] += 1 value # rubocop:enable Metrics/BlockNesting - # rubocop:enable Lint/SuppressedException end # @api private @@ -277,8 +283,10 @@ def inspect(target, depth:) define_sorbet_constant!(:CoerceState) do T.type_alias do { - strictness: T.any(T::Boolean, Symbol), + translate_names: T::Boolean, + strictness: T::Boolean, exactness: {yes: Integer, no: Integer, maybe: Integer}, + error: T::Class[StandardError], branched: Integer } end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 71915618..3ea232d9 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -81,10 +81,14 @@ def hash = values.to_set.hash # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Symbol, Object] @@ -95,8 +99,12 @@ def coerce(value, state:) if values.include?(val) exactness[:yes] += 1 val + elsif values.first&.class == val.class + exactness[:maybe] += 1 + value else - exactness[values.first&.class == val.class ? :maybe : :no] += 1 + exactness[:no] += 1 + state[:error] = TypeError.new("#{value.class} can't be coerced into #{self}") value end end diff --git a/lib/openai/internal/type/file_input.rb b/lib/openai/internal/type/file_input.rb index e1e948f3..a5dcdef7 100644 --- a/lib/openai/internal/type/file_input.rb +++ b/lib/openai/internal/type/file_input.rb @@ -45,10 +45,14 @@ class << self # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [StringIO, Object] @@ -62,6 +66,7 @@ def coerce(value, state:) exactness[:yes] += 1 value else + state[:error] = TypeError.new("#{value.class} can't be coerced into #{StringIO}") exactness[:no] += 1 value end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 9dcf259b..1f178fcd 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -77,10 +77,14 @@ def hash = [self.class, item_type].hash # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Hash{Symbol=>Object}, Object] @@ -89,6 +93,7 @@ def coerce(value, state:) unless value.is_a?(Hash) exactness[:no] += 1 + state[:error] = TypeError.new("#{value.class} can't be coerced into #{Hash}") return value end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 8281b3e1..5b00bc39 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -140,14 +140,23 @@ def hash = variants.hash # @api private # + # Tries to efficiently coerce the given value to one of the known variants. + # + # If the value cannot match any of the known variants, the coercion is considered + # non-viable and returns the original value. + # # @param value [Object] # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Object] @@ -158,7 +167,6 @@ def coerce(value, state:) strictness = state.fetch(:strictness) exactness = state.fetch(:exactness) - state[:strictness] = strictness == :strong ? true : strictness alternatives = [] known_variants.each do |_, variant_fn| @@ -177,13 +185,10 @@ def coerce(value, state:) end end - case alternatives.sort_by(&:first) + case alternatives.sort_by!(&:first) in [] exactness[:no] += 1 - if strictness == :strong - message = "no possible conversion of #{value.class} into a variant of #{target.inspect}" - raise ArgumentError.new(message) - end + state[:error] = ArgumentError.new("no matching variant for #{value.inspect}") value in [[_, exact, coerced], *] exact.each { exactness[_1] += _2 } diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index bc8b7a95..76f3a043 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -33,14 +33,20 @@ def self.==(other) = other.is_a?(Class) && other <= OpenAI::Internal::Type::Unkn class << self # @api private # + # No coercion needed for Unknown type. + # # @param value [Object] # # @param state [Hash{Symbol=>Object}] . # - # @option state [Boolean, :strong] :strictness + # @option state [Boolean] :translate_names + # + # @option state [Boolean] :strictness # # @option state [Hash{Symbol=>Object}] :exactness # + # @option state [Class] :error + # # @option state [Integer] :branched # # @return [Object] diff --git a/rbi/openai/errors.rbi b/rbi/openai/errors.rbi index 4ade8ef1..04c913cb 100644 --- a/rbi/openai/errors.rbi +++ b/rbi/openai/errors.rbi @@ -8,6 +8,22 @@ module OpenAI end class ConversionError < OpenAI::Errors::Error + sig { returns(T.nilable(StandardError)) } + def cause + end + + # @api private + sig do + params( + on: T::Class[StandardError], + method: Symbol, + target: T.anything, + value: T.anything, + cause: T.nilable(StandardError) + ).returns(T.attached_class) + end + def self.new(on:, method:, target:, value:, cause: nil) + end end class APIError < OpenAI::Errors::Error diff --git a/rbi/openai/internal/type/boolean.rbi b/rbi/openai/internal/type/boolean.rbi index 73bf95cb..8324ff24 100644 --- a/rbi/openai/internal/type/boolean.rbi +++ b/rbi/openai/internal/type/boolean.rbi @@ -22,6 +22,8 @@ module OpenAI class << self # @api private + # + # Coerce value to Boolean if possible, otherwise return the original value. sig do override .params( diff --git a/rbi/openai/internal/type/converter.rbi b/rbi/openai/internal/type/converter.rbi index cbace65d..5996a904 100644 --- a/rbi/openai/internal/type/converter.rbi +++ b/rbi/openai/internal/type/converter.rbi @@ -15,12 +15,14 @@ module OpenAI CoerceState = T.type_alias do { - strictness: T.any(T::Boolean, Symbol), + translate_names: T::Boolean, + strictness: T::Boolean, exactness: { yes: Integer, no: Integer, maybe: Integer }, + error: T::Class[StandardError], branched: Integer } end @@ -84,6 +86,15 @@ module OpenAI def self.type_info(spec) end + # @api private + sig do + params(translate_names: T::Boolean).returns( + OpenAI::Internal::Type::Converter::CoerceState + ) + end + def self.new_coerce_state(translate_names: true) + end + # @api private # # Based on `target`, transform `value` into `target`, to the extent possible: @@ -105,14 +116,11 @@ module OpenAI def self.coerce( target, value, - # The `strictness` is one of `true`, `false`, or `:strong`. This informs the - # coercion strategy when we have to decide between multiple possible conversion - # targets: + # The `strictness` is one of `true`, `false`. This informs the coercion strategy + # when we have to decide between multiple possible conversion targets: # # - `true`: the conversion must be exact, with minimum coercion. # - `false`: the conversion can be approximate, with some coercion. - # - `:strong`: the conversion must be exact, with no coercion, and raise an error - # if not possible. # # The `exactness` is `Hash` with keys being one of `yes`, `no`, or `maybe`. For # any given conversion attempt, the exactness will be updated based on how closely @@ -124,15 +132,7 @@ module OpenAI # - `no`: the value cannot be converted to the target type. # # See implementation below for more details. - state: { - strictness: true, - exactness: { - yes: 0, - no: 0, - maybe: 0 - }, - branched: 0 - } + state: OpenAI::Internal::Type::Converter.new_coerce_state ) end diff --git a/rbi/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi index 7c6958c1..fee27fc2 100644 --- a/rbi/openai/internal/type/union.rbi +++ b/rbi/openai/internal/type/union.rbi @@ -78,6 +78,11 @@ module OpenAI end # @api private + # + # Tries to efficiently coerce the given value to one of the known variants. + # + # If the value cannot match any of the known variants, the coercion is considered + # non-viable and returns the original value. sig do override .params( diff --git a/rbi/openai/internal/type/unknown.rbi b/rbi/openai/internal/type/unknown.rbi index 3b5d1139..9f4045b3 100644 --- a/rbi/openai/internal/type/unknown.rbi +++ b/rbi/openai/internal/type/unknown.rbi @@ -22,6 +22,8 @@ module OpenAI class << self # @api private + # + # No coercion needed for Unknown type. sig do override .params( diff --git a/sig/openai/errors.rbs b/sig/openai/errors.rbs index 19583ccb..6bbd9f41 100644 --- a/sig/openai/errors.rbs +++ b/sig/openai/errors.rbs @@ -5,6 +5,15 @@ module OpenAI end class ConversionError < OpenAI::Errors::Error + def cause: -> StandardError? + + def initialize: ( + on: Class, + method: Symbol, + target: top, + value: top, + ?cause: StandardError? + ) -> void end class APIError < OpenAI::Errors::Error diff --git a/sig/openai/internal/type/converter.rbs b/sig/openai/internal/type/converter.rbs index 5470abbf..552a5330 100644 --- a/sig/openai/internal/type/converter.rbs +++ b/sig/openai/internal/type/converter.rbs @@ -8,8 +8,10 @@ module OpenAI type coerce_state = { - strictness: bool | :strong, + translate_names: bool, + strictness: bool, exactness: { yes: Integer, no: Integer, maybe: Integer }, + error: Class, branched: Integer } @@ -37,6 +39,10 @@ module OpenAI | OpenAI::Internal::Type::Converter::input spec ) -> (^-> top) + def self.new_coerce_state: ( + ?translate_names: bool + ) -> OpenAI::Internal::Type::Converter::coerce_state + def self.coerce: ( OpenAI::Internal::Type::Converter::input target, top value, diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index f29a04c2..f8d54669 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -66,7 +66,7 @@ def test_coerce cases.each do |lhs, rhs| target, input = lhs exactness, expect = rhs - state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + state = OpenAI::Internal::Type::Converter.new_coerce_state assert_pattern do OpenAI::Internal::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness @@ -108,18 +108,19 @@ def test_dump def test_coerce_errors cases = { - [Integer, "one"] => TypeError, - [Float, "one"] => TypeError, + [Integer, "one"] => ArgumentError, + [Float, "one"] => ArgumentError, [String, Time] => TypeError, [Date, "one"] => ArgumentError, [Time, "one"] => ArgumentError } - cases.each do - target, input = _1 - state = {strictness: :strong, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} - assert_raises(_2) do - OpenAI::Internal::Type::Converter.coerce(target, input, state: state) + cases.each do |testcase, expect| + target, input = testcase + state = OpenAI::Internal::Type::Converter.new_coerce_state + OpenAI::Internal::Type::Converter.coerce(target, input, state: state) + assert_pattern do + state => {error: ^expect} end end end @@ -217,7 +218,7 @@ def test_coerce cases.each do |lhs, rhs| target, input = lhs exactness, expect = rhs - state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + state = OpenAI::Internal::Type::Converter.new_coerce_state assert_pattern do OpenAI::Internal::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness @@ -291,7 +292,7 @@ def test_coerce cases.each do |lhs, rhs| target, input = lhs exactness, expect = rhs - state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + state = OpenAI::Internal::Type::Converter.new_coerce_state assert_pattern do OpenAI::Internal::Type::Converter.coerce(target, input, state: state) => ^expect state.fetch(:exactness).filter { _2.nonzero? }.to_h => ^exactness @@ -340,6 +341,7 @@ class M5 < OpenAI::Internal::Type::BaseModel class M6 < M1 required :a, OpenAI::Internal::Type::ArrayOf[M6] + optional :b, M6 end def test_coerce @@ -365,13 +367,14 @@ def test_coerce [M5, {d: "d"}] => [{yes: 3}, {d: :d}], [M5, {d: nil}] => [{yes: 2, no: 1}, {d: nil}], - [M6, {a: [{a: []}]}] => [{yes: 4}, -> { _1 in {a: [M6]} }] + [M6, {a: [{a: []}]}] => [{yes: 6}, -> { _1 in {a: [M6]} }], + [M6, {b: {a: []}}] => [{yes: 4, no: 1}, -> { _1 in {b: M6} }] } cases.each do |lhs, rhs| target, input = lhs exactness, expect = rhs - state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + state = OpenAI::Internal::Type::Converter.new_coerce_state assert_pattern do coerced = OpenAI::Internal::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) @@ -410,20 +413,26 @@ def test_dump def test_accessors cases = { - M2.new({a: "1990-09-19", b: "1"}) => {a: Time.new(1990, 9, 19), b: TypeError}, - M2.new(a: "one", b: "one") => {a: ArgumentError, b: TypeError}, - M2.new(a: nil, b: 2.0) => {a: TypeError}, - M2.new(a: nil, b: 2.2) => {a: TypeError, b: ArgumentError}, + M2.new({a: "1990-09-19", b: "1"}) => [{a: "1990-09-19", b: "1"}, {a: Time.new(1990, 9, 19), b: 1}], + M2.new(a: "one", b: "one") => [{a: "one", b: "one"}, {a: ArgumentError, b: ArgumentError}], + M2.new(a: nil, b: 2.0) => [{a: nil, b: 2.0}, {a: TypeError}], + M2.new(a: nil, b: 2.2) => [{a: nil, b: 2.2}, {a: TypeError, b: 2}], - M3.new => {d: :d}, - M3.new(d: 1) => {d: ArgumentError}, + M3.new => [{}, {d: :d}], + M3.new(d: 1) => [{d: 1}, {d: ArgumentError}], - M5.new => {c: :c, d: :d} + M5.new => [{}, {c: :c, d: :d}] } cases.each do target = _1 - _2.each do |accessor, expect| + data, attributes = _2 + + assert_pattern do + target.to_h => ^data + end + + attributes.each do |accessor, expect| case expect in Class if expect <= StandardError tap do @@ -438,6 +447,24 @@ def test_accessors end end end + + def test_inplace_modification + m1 = M6.new(a: []) + m1.a << M6.new(a: []) + + m2 = M6.new(b: M6.new(a: [])) + m2.b.a << M6.new(a: []) + + m3 = M6.new(a: []) + m4 = M6.new(b: m3) + m3.a << M6.new(a: []) + + assert_pattern do + m1 => {a: [{a: []}]} + m2 => {b: {a: [{a: []}]}} + m4 => {b: {a: [{a: []}]}} + end + end end class OpenAI::Test::UnionTest < Minitest::Test @@ -555,7 +582,7 @@ def test_coerce cases.each do |lhs, rhs| target, input = lhs exactness, branched, expect = rhs - state = {strictness: true, exactness: {yes: 0, no: 0, maybe: 0}, branched: 0} + state = OpenAI::Internal::Type::Converter.new_coerce_state assert_pattern do coerced = OpenAI::Internal::Type::Converter.coerce(target, input, state: state) assert_equal(coerced, coerced) From 4c6852ef2a6493fb55ef03375236fe68751f34d9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 20 Jun 2025 20:07:00 +0000 Subject: [PATCH 235/295] feat(api): make model and inputs not required to create response --- .stats.yml | 4 +- .../responses/response_create_params.rb | 58 +++---- lib/openai/resources/responses.rb | 24 +-- .../responses/response_create_params.rbi | 143 ++++++++++-------- rbi/openai/resources/responses.rbi | 92 +++++------ .../responses/response_create_params.rbs | 28 ++-- sig/openai/resources/responses.rbs | 8 +- test/openai/resources/responses_test.rb | 4 +- 8 files changed, 196 insertions(+), 165 deletions(-) diff --git a/.stats.yml b/.stats.yml index c67f2762..4c6ee8cc 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-9e41d2d5471d2c28bff0d616f4476f5b0e6c541ef4cb51bdaaef5fdf5e13c8b2.yml -openapi_spec_hash: 86f765e18d00e32cf2ce9db7ab84d946 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f411a68f272b8be0ab0c266043da33228687b9b2d76896724e3cef797de9563d.yml +openapi_spec_hash: 89bf866ea95ecfb3d76c8833237047d6 config_hash: dc5515e257676a27cb1ace1784aa92b3 diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 85db66cc..117716ed 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -10,30 +10,6 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute input - # Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - # - # @return [String, Array] - required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } - - # @!attribute model - # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - # - # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - required :model, union: -> { OpenAI::ResponsesModel } - # @!attribute background # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). @@ -63,6 +39,20 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }, nil?: true + # @!attribute input + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + # + # @return [String, Array, nil] + optional :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } + # @!attribute instructions # A system (or developer) message inserted into the model's context. # @@ -92,6 +82,16 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + # @!attribute model + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. + # + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel, nil] + optional :model, union: -> { OpenAI::ResponsesModel } + # @!attribute parallel_tool_calls # Whether to allow the model to run tool calls in parallel. # @@ -226,24 +226,24 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. - # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI - # # @param background [Boolean, nil] Whether to run the model response in the background. # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 32d603fd..230d44c9 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,22 +23,22 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) - # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. - # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to @@ -70,7 +70,7 @@ class Responses # @return [OpenAI::Models::Responses::Response] # # @see OpenAI::Models::Responses::ResponseCreateParams - def create(params) + def create(params = {}) parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params) if parsed[:stream] message = "Please use `#stream_raw` for the streaming use case." @@ -102,22 +102,22 @@ def create(params) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) - # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. - # - # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # # @param parallel_tool_calls [Boolean, nil] Whether to allow the model to run tool calls in parallel. # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to @@ -149,7 +149,7 @@ def create(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams - def stream_raw(params) + def stream_raw(params = {}) parsed, options = OpenAI::Responses::ResponseCreateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#create` for the non-streaming use case." diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 8063c9c6..da0022e2 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -15,36 +15,6 @@ module OpenAI ) end - # Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - sig do - returns(OpenAI::Responses::ResponseCreateParams::Input::Variants) - end - attr_accessor :input - - # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - sig do - returns( - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ) - ) - end - attr_accessor :model - # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). sig { returns(T.nilable(T::Boolean)) } @@ -72,6 +42,29 @@ module OpenAI end attr_accessor :include + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + sig do + returns( + T.nilable(OpenAI::Responses::ResponseCreateParams::Input::Variants) + ) + end + attr_reader :input + + sig do + params( + input: OpenAI::Responses::ResponseCreateParams::Input::Variants + ).void + end + attr_writer :input + # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous @@ -95,6 +88,36 @@ module OpenAI sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. + sig do + returns( + T.nilable( + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ) + ) + ) + end + attr_reader :model + + sig do + params( + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ) + ).void + end + attr_writer :model + # Whether to allow the model to run tool calls in parallel. sig { returns(T.nilable(T::Boolean)) } attr_accessor :parallel_tool_calls @@ -291,21 +314,21 @@ module OpenAI sig do params( - input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), @@ -346,22 +369,6 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - input:, - # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - model:, # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, @@ -381,6 +388,16 @@ module OpenAI # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. include: nil, + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + input: nil, # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous @@ -398,6 +415,12 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. + model: nil, # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, # The unique ID of the previous response to the model. Use this to create @@ -487,21 +510,21 @@ module OpenAI sig do override.returns( { - input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt), diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index c779d2c1..2e7db0e6 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -21,21 +21,21 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), @@ -77,22 +77,6 @@ module OpenAI ).returns(OpenAI::Responses::Response) end def create( - # Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - input:, - # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - model:, # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, @@ -112,6 +96,16 @@ module OpenAI # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. include: nil, + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + input: nil, # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous @@ -129,6 +123,12 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. + model: nil, # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, # The unique ID of the previous response to the model. Use this to create @@ -233,21 +233,21 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: OpenAI::Responses::ResponseCreateParams::Input::Variants, - model: - T.any( - String, - OpenAI::ChatModel::OrSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol - ), background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), + model: + T.any( + String, + OpenAI::ChatModel::OrSymbol, + OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol + ), parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), @@ -293,22 +293,6 @@ module OpenAI ) end def stream_raw( - # Text, image, or file inputs to the model, used to generate a response. - # - # Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Image inputs](https://platform.openai.com/docs/guides/images) - # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) - # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) - # - [Function calling](https://platform.openai.com/docs/guides/function-calling) - input:, - # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a - # wide range of models with different capabilities, performance characteristics, - # and price points. Refer to the - # [model guide](https://platform.openai.com/docs/models) to browse and compare - # available models. - model:, # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, @@ -328,6 +312,16 @@ module OpenAI # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. include: nil, + # Text, image, or file inputs to the model, used to generate a response. + # + # Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Image inputs](https://platform.openai.com/docs/guides/images) + # - [File inputs](https://platform.openai.com/docs/guides/pdf-files) + # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) + # - [Function calling](https://platform.openai.com/docs/guides/function-calling) + input: nil, # A system (or developer) message inserted into the model's context. # # When using along with `previous_response_id`, the instructions from a previous @@ -345,6 +339,12 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, + # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a + # wide range of models with different capabilities, performance characteristics, + # and price points. Refer to the + # [model guide](https://platform.openai.com/docs/models) to browse and compare + # available models. + model: nil, # Whether to allow the model to run tool calls in parallel. parallel_tool_calls: nil, # The unique ID of the previous response to the model. Use this to create diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 28a6db94..faedd8f8 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -3,13 +3,13 @@ module OpenAI module Responses type response_create_params = { - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::responses_model, background: bool?, include: ::Array[OpenAI::Models::Responses::response_includable]?, + input: OpenAI::Models::Responses::ResponseCreateParams::input, instructions: String?, max_output_tokens: Integer?, metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::responses_model, parallel_tool_calls: bool?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, @@ -30,20 +30,28 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor input: OpenAI::Models::Responses::ResponseCreateParams::input - - attr_accessor model: OpenAI::Models::responses_model - attr_accessor background: bool? attr_accessor include: ::Array[OpenAI::Models::Responses::response_includable]? + attr_reader input: OpenAI::Models::Responses::ResponseCreateParams::input? + + def input=: ( + OpenAI::Models::Responses::ResponseCreateParams::input + ) -> OpenAI::Models::Responses::ResponseCreateParams::input + attr_accessor instructions: String? attr_accessor max_output_tokens: Integer? attr_accessor metadata: OpenAI::Models::metadata? + attr_reader model: OpenAI::Models::responses_model? + + def model=: ( + OpenAI::Models::responses_model + ) -> OpenAI::Models::responses_model + attr_accessor parallel_tool_calls: bool? attr_accessor previous_response_id: String? @@ -85,13 +93,13 @@ module OpenAI def user=: (String) -> String def initialize: ( - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::responses_model, ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, ?max_output_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::responses_model, ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, @@ -109,13 +117,13 @@ module OpenAI ) -> void def to_hash: -> { - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::responses_model, background: bool?, include: ::Array[OpenAI::Models::Responses::response_includable]?, + input: OpenAI::Models::Responses::ResponseCreateParams::input, instructions: String?, max_output_tokens: Integer?, metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::responses_model, parallel_tool_calls: bool?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index fdbdf256..512efc12 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -4,13 +4,13 @@ module OpenAI attr_reader input_items: OpenAI::Resources::Responses::InputItems def create: ( - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::responses_model, ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, ?max_output_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::responses_model, ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, @@ -28,13 +28,13 @@ module OpenAI ) -> OpenAI::Responses::Response def stream_raw: ( - input: OpenAI::Models::Responses::ResponseCreateParams::input, - model: OpenAI::Models::responses_model, ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, + ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, ?max_output_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, + ?model: OpenAI::Models::responses_model, ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 105b7ce6..f760ae47 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -3,8 +3,8 @@ require_relative "../test_helper" class OpenAI::Test::Resources::ResponsesTest < OpenAI::Test::ResourceTest - def test_create_required_params - response = @openai.responses.create(input: "string", model: :"gpt-4o") + def test_create + response = @openai.responses.create assert_pattern do response => OpenAI::Responses::Response From 7bedfeeb949dc04b4ac2cf252669da8a5516fae4 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 23 Jun 2025 16:50:05 +0000 Subject: [PATCH 236/295] fix(internal): fix: should publish to ruby gems when a release is created --- .github/workflows/create-releases.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml index 6ae563b0..211eea8e 100644 --- a/.github/workflows/create-releases.yml +++ b/.github/workflows/create-releases.yml @@ -22,6 +22,15 @@ jobs: repo: ${{ github.event.repository.full_name }} stainless-api-key: ${{ secrets.STAINLESS_API_KEY }} + - name: Publish to RubyGems.org + if: ${{ steps.release.outputs.releases_created }} + run: | + bash ./bin/publish-gem + env: + # `RUBYGEMS_HOST` is only required for private gem repositories, not https://rubygems.org + RUBYGEMS_HOST: ${{ secrets.OPENAI_RUBYGEMS_HOST || secrets.RUBYGEMS_HOST }} + GEM_HOST_API_KEY: ${{ secrets.OPENAI_GEM_HOST_API_KEY || secrets.GEM_HOST_API_KEY }} + - name: Update RubyDocs if: ${{ steps.release.outputs.releases_created }} run: | From 2c1a1d62debdda41adc48a64e6aeb7a769e1f7d1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 23 Jun 2025 18:18:28 +0000 Subject: [PATCH 237/295] feat(api): update api shapes for usage and code interpreter --- .stats.yml | 6 +- .../models/audio/speech_create_params.rb | 25 +- lib/openai/models/audio/transcription.rb | 119 +++++++++- .../audio/transcription_text_done_event.rb | 81 ++++++- .../models/audio/transcription_verbose.rb | 32 ++- .../permission_retrieve_response.rb | 85 +++++-- ..._code_interpreter_call_code_delta_event.rb | 25 +- ...e_code_interpreter_call_code_done_event.rb | 24 +- ...e_code_interpreter_call_completed_event.rb | 21 +- ...code_interpreter_call_in_progress_event.rb | 21 +- ...ode_interpreter_call_interpreting_event.rb | 21 +- .../response_code_interpreter_tool_call.rb | 127 ++++------ .../models/responses/response_output_text.rb | 20 +- .../models/responses/response_stream_event.rb | 4 +- lib/openai/resources/audio/speech.rb | 4 +- .../fine_tuning/checkpoints/permissions.rb | 3 +- .../models/audio/speech_create_params.rbi | 61 ++++- rbi/openai/models/audio/transcription.rbi | 216 +++++++++++++++++- .../audio/transcription_text_done_event.rbi | 147 +++++++++++- .../models/audio/transcription_verbose.rbi | 47 ++++ .../permission_retrieve_response.rbi | 121 +++++++--- ...code_interpreter_call_code_delta_event.rbi | 24 +- ..._code_interpreter_call_code_done_event.rbi | 18 +- ..._code_interpreter_call_completed_event.rbi | 34 ++- ...ode_interpreter_call_in_progress_event.rbi | 34 ++- ...de_interpreter_call_interpreting_event.rbi | 34 ++- .../response_code_interpreter_tool_call.rbi | 208 +++++++---------- .../models/responses/response_output_text.rbi | 30 ++- rbi/openai/resources/audio/speech.rbi | 7 +- .../fine_tuning/checkpoints/permissions.rbi | 4 +- .../models/audio/speech_create_params.rbs | 22 +- sig/openai/models/audio/transcription.rbs | 98 +++++++- .../audio/transcription_text_done_event.rbs | 74 +++++- .../models/audio/transcription_verbose.rbs | 21 ++ .../permission_retrieve_response.rbs | 69 ++++-- ...code_interpreter_call_code_delta_event.rbs | 5 + ..._code_interpreter_call_code_done_event.rbs | 5 + ..._code_interpreter_call_completed_event.rbs | 8 +- ...ode_interpreter_call_in_progress_event.rbs | 8 +- ...de_interpreter_call_interpreting_event.rbs | 8 +- .../response_code_interpreter_tool_call.rbs | 83 +++---- .../models/responses/response_output_text.rbs | 16 +- sig/openai/resources/audio/speech.rbs | 1 + .../fine_tuning/checkpoints/permissions.rbs | 2 +- .../checkpoints/permissions_test.rb | 18 +- .../resources/responses/input_items_test.rb | 8 +- 46 files changed, 1539 insertions(+), 510 deletions(-) diff --git a/.stats.yml b/.stats.yml index 4c6ee8cc..e6290b25 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f411a68f272b8be0ab0c266043da33228687b9b2d76896724e3cef797de9563d.yml -openapi_spec_hash: 89bf866ea95ecfb3d76c8833237047d6 -config_hash: dc5515e257676a27cb1ace1784aa92b3 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-ef4ecb19eb61e24c49d77fef769ee243e5279bc0bdbaee8d0f8dba4da8722559.yml +openapi_spec_hash: 1b8a9767c9f04e6865b06c41948cdc24 +config_hash: fd2af1d5eff0995bb7dc02ac9a34851d diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 28cb4113..cf875025 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -46,12 +46,19 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute speed # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. Does not work with `gpt-4o-mini-tts`. + # the default. # # @return [Float, nil] optional :speed, Float - # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) + # @!attribute stream_format + # The format to stream the audio in. Supported formats are `sse` and `audio`. + # `sse` is not supported for `tts-1` or `tts-1-hd`. + # + # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::StreamFormat, nil] + optional :stream_format, enum: -> { OpenAI::Audio::SpeechCreateParams::StreamFormat } + + # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, stream_format: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::SpeechCreateParams} for more details. # @@ -67,6 +74,8 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # + # @param stream_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::StreamFormat] The format to stream the audio in. Supported formats are `sse` and `audio`. `sse + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # One of the available [TTS models](https://platform.openai.com/docs/models#tts): @@ -153,6 +162,18 @@ module ResponseFormat # @!method self.values # @return [Array] end + + # The format to stream the audio in. Supported formats are `sse` and `audio`. + # `sse` is not supported for `tts-1` or `tts-1-hd`. + module StreamFormat + extend OpenAI::Internal::Type::Enum + + SSE = :sse + AUDIO = :audio + + # @!method self.values + # @return [Array] + end end end end diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 82dc9e67..05aa2875 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -18,7 +18,13 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::Transcription::Logprob] } - # @!method initialize(text:, logprobs: nil) + # @!attribute usage + # Token usage statistics for the request. + # + # @return [OpenAI::Models::Audio::Transcription::Usage::Tokens, OpenAI::Models::Audio::Transcription::Usage::Duration, nil] + optional :usage, union: -> { OpenAI::Audio::Transcription::Usage } + + # @!method initialize(text:, logprobs: nil, usage: nil) # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::Transcription} for more details. # @@ -28,6 +34,8 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @param text [String] The transcribed text. # # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the + # + # @param usage [OpenAI::Models::Audio::Transcription::Usage::Tokens, OpenAI::Models::Audio::Transcription::Usage::Duration] Token usage statistics for the request. class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -55,6 +63,115 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @param logprob [Float] The log probability of the token. end + + # Token usage statistics for the request. + # + # @see OpenAI::Models::Audio::Transcription#usage + module Usage + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Usage statistics for models billed by token usage. + variant :tokens, -> { OpenAI::Audio::Transcription::Usage::Tokens } + + # Usage statistics for models billed by audio input duration. + variant :duration, -> { OpenAI::Audio::Transcription::Usage::Duration } + + class Tokens < OpenAI::Internal::Type::BaseModel + # @!attribute input_tokens + # Number of input tokens billed for this request. + # + # @return [Integer] + required :input_tokens, Integer + + # @!attribute output_tokens + # Number of output tokens generated. + # + # @return [Integer] + required :output_tokens, Integer + + # @!attribute total_tokens + # Total number of tokens used (input + output). + # + # @return [Integer] + required :total_tokens, Integer + + # @!attribute type + # The type of the usage object. Always `tokens` for this variant. + # + # @return [Symbol, :tokens] + required :type, const: :tokens + + # @!attribute input_token_details + # Details about the input tokens billed for this request. + # + # @return [OpenAI::Models::Audio::Transcription::Usage::Tokens::InputTokenDetails, nil] + optional :input_token_details, + -> { + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + } + + # @!method initialize(input_tokens:, output_tokens:, total_tokens:, input_token_details: nil, type: :tokens) + # Usage statistics for models billed by token usage. + # + # @param input_tokens [Integer] Number of input tokens billed for this request. + # + # @param output_tokens [Integer] Number of output tokens generated. + # + # @param total_tokens [Integer] Total number of tokens used (input + output). + # + # @param input_token_details [OpenAI::Models::Audio::Transcription::Usage::Tokens::InputTokenDetails] Details about the input tokens billed for this request. + # + # @param type [Symbol, :tokens] The type of the usage object. Always `tokens` for this variant. + + # @see OpenAI::Models::Audio::Transcription::Usage::Tokens#input_token_details + class InputTokenDetails < OpenAI::Internal::Type::BaseModel + # @!attribute audio_tokens + # Number of audio tokens billed for this request. + # + # @return [Integer, nil] + optional :audio_tokens, Integer + + # @!attribute text_tokens + # Number of text tokens billed for this request. + # + # @return [Integer, nil] + optional :text_tokens, Integer + + # @!method initialize(audio_tokens: nil, text_tokens: nil) + # Details about the input tokens billed for this request. + # + # @param audio_tokens [Integer] Number of audio tokens billed for this request. + # + # @param text_tokens [Integer] Number of text tokens billed for this request. + end + end + + class Duration < OpenAI::Internal::Type::BaseModel + # @!attribute duration + # Duration of the input audio in seconds. + # + # @return [Float] + required :duration, Float + + # @!attribute type + # The type of the usage object. Always `duration` for this variant. + # + # @return [Symbol, :duration] + required :type, const: :duration + + # @!method initialize(duration:, type: :duration) + # Usage statistics for models billed by audio input duration. + # + # @param duration [Float] Duration of the input audio in seconds. + # + # @param type [Symbol, :duration] The type of the usage object. Always `duration` for this variant. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::Transcription::Usage::Tokens, OpenAI::Models::Audio::Transcription::Usage::Duration)] + end end end end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index eac7a34d..951d195f 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -26,7 +26,13 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] } - # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") + # @!attribute usage + # Usage statistics for models billed by token usage. + # + # @return [OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage, nil] + optional :usage, -> { OpenAI::Audio::TranscriptionTextDoneEvent::Usage } + + # @!method initialize(text:, logprobs: nil, usage: nil, type: :"transcript.text.done") # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranscriptionTextDoneEvent} for more details. # @@ -39,6 +45,8 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ # + # @param usage [OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage] Usage statistics for models billed by token usage. + # # @param type [Symbol, :"transcript.text.done"] The type of the event. Always `transcript.text.done`. class Logprob < OpenAI::Internal::Type::BaseModel @@ -70,6 +78,77 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @param logprob [Float] The log probability of the token. end + + # @see OpenAI::Models::Audio::TranscriptionTextDoneEvent#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute input_tokens + # Number of input tokens billed for this request. + # + # @return [Integer] + required :input_tokens, Integer + + # @!attribute output_tokens + # Number of output tokens generated. + # + # @return [Integer] + required :output_tokens, Integer + + # @!attribute total_tokens + # Total number of tokens used (input + output). + # + # @return [Integer] + required :total_tokens, Integer + + # @!attribute type + # The type of the usage object. Always `tokens` for this variant. + # + # @return [Symbol, :tokens] + required :type, const: :tokens + + # @!attribute input_token_details + # Details about the input tokens billed for this request. + # + # @return [OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails, nil] + optional :input_token_details, + -> { + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + } + + # @!method initialize(input_tokens:, output_tokens:, total_tokens:, input_token_details: nil, type: :tokens) + # Usage statistics for models billed by token usage. + # + # @param input_tokens [Integer] Number of input tokens billed for this request. + # + # @param output_tokens [Integer] Number of output tokens generated. + # + # @param total_tokens [Integer] Total number of tokens used (input + output). + # + # @param input_token_details [OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails] Details about the input tokens billed for this request. + # + # @param type [Symbol, :tokens] The type of the usage object. Always `tokens` for this variant. + + # @see OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage#input_token_details + class InputTokenDetails < OpenAI::Internal::Type::BaseModel + # @!attribute audio_tokens + # Number of audio tokens billed for this request. + # + # @return [Integer, nil] + optional :audio_tokens, Integer + + # @!attribute text_tokens + # Number of text tokens billed for this request. + # + # @return [Integer, nil] + optional :text_tokens, Integer + + # @!method initialize(audio_tokens: nil, text_tokens: nil) + # Details about the input tokens billed for this request. + # + # @param audio_tokens [Integer] Number of audio tokens billed for this request. + # + # @param text_tokens [Integer] Number of text tokens billed for this request. + end + end end end end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index eaa0ebf3..6d7c08d7 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -28,13 +28,19 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } + # @!attribute usage + # Usage statistics for models billed by audio input duration. + # + # @return [OpenAI::Models::Audio::TranscriptionVerbose::Usage, nil] + optional :usage, -> { OpenAI::Audio::TranscriptionVerbose::Usage } + # @!attribute words # Extracted words and their corresponding timestamps. # # @return [Array, nil] optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionWord] } - # @!method initialize(duration:, language:, text:, segments: nil, words: nil) + # @!method initialize(duration:, language:, text:, segments: nil, usage: nil, words: nil) # Represents a verbose json transcription response returned by model, based on the # provided input. # @@ -46,7 +52,31 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # # @param segments [Array] Segments of the transcribed text and their corresponding details. # + # @param usage [OpenAI::Models::Audio::TranscriptionVerbose::Usage] Usage statistics for models billed by audio input duration. + # # @param words [Array] Extracted words and their corresponding timestamps. + + # @see OpenAI::Models::Audio::TranscriptionVerbose#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute duration + # Duration of the input audio in seconds. + # + # @return [Float] + required :duration, Float + + # @!attribute type + # The type of the usage object. Always `duration` for this variant. + # + # @return [Symbol, :duration] + required :type, const: :duration + + # @!method initialize(duration:, type: :duration) + # Usage statistics for models billed by audio input duration. + # + # @param duration [Float] Duration of the input audio in seconds. + # + # @param type [Symbol, :duration] The type of the usage object. Always `duration` for this variant. + end end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb index e22c36d8..6ffbdf4d 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb @@ -6,41 +6,76 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#retrieve class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The permission identifier, which can be referenced in the API endpoints. + # @!attribute data # - # @return [String] - required :id, String + # @return [Array] + required :data, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data] } - # @!attribute created_at - # The Unix timestamp (in seconds) for when the permission was created. + # @!attribute has_more # - # @return [Integer] - required :created_at, Integer + # @return [Boolean] + required :has_more, OpenAI::Internal::Type::Boolean # @!attribute object - # The object type, which is always "checkpoint.permission". # - # @return [Symbol, :"checkpoint.permission"] - required :object, const: :"checkpoint.permission" + # @return [Symbol, :list] + required :object, const: :list - # @!attribute project_id - # The project identifier that the permission is for. + # @!attribute first_id # - # @return [String] - required :project_id, String + # @return [String, nil] + optional :first_id, String, nil?: true - # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") - # The `checkpoint.permission` object represents a permission for a fine-tuned - # model checkpoint. + # @!attribute last_id # - # @param id [String] The permission identifier, which can be referenced in the API endpoints. - # - # @param created_at [Integer] The Unix timestamp (in seconds) for when the permission was created. - # - # @param project_id [String] The project identifier that the permission is for. - # - # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". + # @return [String, nil] + optional :last_id, String, nil?: true + + # @!method initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list) + # @param data [Array] + # @param has_more [Boolean] + # @param first_id [String, nil] + # @param last_id [String, nil] + # @param object [Symbol, :list] + + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The permission identifier, which can be referenced in the API endpoints. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) for when the permission was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The object type, which is always "checkpoint.permission". + # + # @return [Symbol, :"checkpoint.permission"] + required :object, const: :"checkpoint.permission" + + # @!attribute project_id + # The project identifier that the permission is for. + # + # @return [String] + required :project_id, String + + # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + # + # @param id [String] The permission identifier, which can be referenced in the API endpoints. + # + # @param created_at [Integer] The Unix timestamp (in seconds) for when the permission was created. + # + # @param project_id [String] The project identifier that the permission is for. + # + # @param object [Symbol, :"checkpoint.permission"] The object type, which is always "checkpoint.permission". + end end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 73bc4f43..60f1568a 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -5,19 +5,26 @@ module Models module Responses class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta - # The partial code snippet added by the code interpreter. + # The partial code snippet being streamed by the code interpreter. # # @return [String] required :delta, String + # @!attribute item_id + # The unique identifier of the code interpreter tool call item. + # + # @return [String] + required :item_id, String + # @!attribute output_index - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code is being + # streamed. # # @return [Integer] required :output_index, Integer # @!attribute sequence_number - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. # # @return [Integer] required :sequence_number, Integer @@ -28,18 +35,20 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @return [Symbol, :"response.code_interpreter_call_code.delta"] required :type, const: :"response.code_interpreter_call_code.delta" - # @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.delta") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more # details. # - # Emitted when a partial code snippet is added by the code interpreter. + # Emitted when a partial code snippet is streamed by the code interpreter. + # + # @param delta [String] The partial code snippet being streamed by the code interpreter. # - # @param delta [String] The partial code snippet added by the code interpreter. + # @param item_id [String] The unique identifier of the code interpreter tool call item. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. + # @param output_index [Integer] The index of the output item in the response for which the code is being streame # - # @param sequence_number [Integer] The sequence number of this event. + # @param sequence_number [Integer] The sequence number of this event, used to order streaming events. # # @param type [Symbol, :"response.code_interpreter_call_code.delta"] The type of the event. Always `response.code_interpreter_call_code.delta`. end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 356bcee2..d6b47e89 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -10,14 +10,20 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @return [String] required :code, String + # @!attribute item_id + # The unique identifier of the code interpreter tool call item. + # + # @return [String] + required :item_id, String + # @!attribute output_index - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code is finalized. # # @return [Integer] required :output_index, Integer # @!attribute sequence_number - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. # # @return [Integer] required :sequence_number, Integer @@ -28,18 +34,16 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @return [Symbol, :"response.code_interpreter_call_code.done"] required :type, const: :"response.code_interpreter_call_code.done" - # @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.done") - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more - # details. - # - # Emitted when code snippet output is finalized by the code interpreter. + # @!method initialize(code:, item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call_code.done") + # Emitted when the code snippet is finalized by the code interpreter. # # @param code [String] The final code snippet output by the code interpreter. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. + # @param item_id [String] The unique identifier of the code interpreter tool call item. + # + # @param output_index [Integer] The index of the output item in the response for which the code is finalized. # - # @param sequence_number [Integer] The sequence number of this event. + # @param sequence_number [Integer] The sequence number of this event, used to order streaming events. # # @param type [Symbol, :"response.code_interpreter_call_code.done"] The type of the event. Always `response.code_interpreter_call_code.done`. end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index 0e4b05f0..20224eca 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -4,20 +4,21 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel - # @!attribute code_interpreter_call - # A tool call to run code. + # @!attribute item_id + # The unique identifier of the code interpreter tool call item. # - # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + # @return [String] + required :item_id, String # @!attribute output_index - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code interpreter call + # is completed. # # @return [Integer] required :output_index, Integer # @!attribute sequence_number - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. # # @return [Integer] required :sequence_number, Integer @@ -28,18 +29,18 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @return [Symbol, :"response.code_interpreter_call.completed"] required :type, const: :"response.code_interpreter_call.completed" - # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call.completed") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent} for more # details. # # Emitted when the code interpreter call is completed. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param item_id [String] The unique identifier of the code interpreter tool call item. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. + # @param output_index [Integer] The index of the output item in the response for which the code interpreter call # - # @param sequence_number [Integer] The sequence number of this event. + # @param sequence_number [Integer] The sequence number of this event, used to order streaming events. # # @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`. end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 26ec12b4..ff821153 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -4,20 +4,21 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel - # @!attribute code_interpreter_call - # A tool call to run code. + # @!attribute item_id + # The unique identifier of the code interpreter tool call item. # - # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + # @return [String] + required :item_id, String # @!attribute output_index - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code interpreter call + # is in progress. # # @return [Integer] required :output_index, Integer # @!attribute sequence_number - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. # # @return [Integer] required :sequence_number, Integer @@ -28,18 +29,18 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @return [Symbol, :"response.code_interpreter_call.in_progress"] required :type, const: :"response.code_interpreter_call.in_progress" - # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call.in_progress") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent} for more # details. # # Emitted when a code interpreter call is in progress. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param item_id [String] The unique identifier of the code interpreter tool call item. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. + # @param output_index [Integer] The index of the output item in the response for which the code interpreter call # - # @param sequence_number [Integer] The sequence number of this event. + # @param sequence_number [Integer] The sequence number of this event, used to order streaming events. # # @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`. end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index f0cf91cc..0000a542 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -4,20 +4,21 @@ module OpenAI module Models module Responses class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel - # @!attribute code_interpreter_call - # A tool call to run code. + # @!attribute item_id + # The unique identifier of the code interpreter tool call item. # - # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + # @return [String] + required :item_id, String # @!attribute output_index - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code interpreter is + # interpreting code. # # @return [Integer] required :output_index, Integer # @!attribute sequence_number - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. # # @return [Integer] required :sequence_number, Integer @@ -28,18 +29,18 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @return [Symbol, :"response.code_interpreter_call.interpreting"] required :type, const: :"response.code_interpreter_call.interpreting" - # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.interpreting") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.code_interpreter_call.interpreting") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent} for # more details. # # Emitted when the code interpreter is actively interpreting the code snippet. # - # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param item_id [String] The unique identifier of the code interpreter tool call item. # - # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. + # @param output_index [Integer] The index of the output item in the response for which the code interpreter is i # - # @param sequence_number [Integer] The sequence number of this event. + # @param sequence_number [Integer] The sequence number of this event, used to order streaming events. # # @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`. end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index f04eba44..081ea74a 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -11,19 +11,27 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel required :id, String # @!attribute code - # The code to run. + # The code to run, or null if not available. + # + # @return [String, nil] + required :code, String, nil?: true + + # @!attribute container_id + # The ID of the container used to run the code. # # @return [String] - required :code, String + required :container_id, String - # @!attribute results - # The results of the code interpreter tool call. + # @!attribute outputs + # The outputs generated by the code interpreter, such as logs or images. Can be + # null if no outputs are available. # - # @return [Array] - required :results, + # @return [Array, nil] + required :outputs, -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result] - } + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Output] + }, + nil?: true # @!attribute status # The status of the code interpreter tool call. @@ -37,13 +45,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter_call] required :type, const: :code_interpreter_call - # @!attribute container_id - # The ID of the container used to run the code. - # - # @return [String, nil] - optional :container_id, String - - # @!method initialize(id:, code:, results:, status:, container_id: nil, type: :code_interpreter_call) + # @!method initialize(id:, code:, container_id:, outputs:, status:, type: :code_interpreter_call) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details. # @@ -51,105 +53,72 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the code interpreter tool call. # - # @param code [String] The code to run. + # @param code [String, nil] The code to run, or null if not available. # - # @param results [Array] The results of the code interpreter tool call. + # @param container_id [String] The ID of the container used to run the code. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. + # @param outputs [Array, nil] The outputs generated by the code interpreter, such as logs or images. # - # @param container_id [String] The ID of the container used to run the code. + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. # # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. - # The output of a code interpreter tool. - module Result + # The logs output from the code interpreter. + module Output extend OpenAI::Internal::Type::Union discriminator :type - # The output of a code interpreter tool call that is text. - variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs } + # The logs output from the code interpreter. + variant :logs, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs } - # The output of a code interpreter tool call that is a file. - variant :files, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files } + # The image output from the code interpreter. + variant :image, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute logs - # The logs of the code interpreter tool call. + # The logs output from the code interpreter. # # @return [String] required :logs, String # @!attribute type - # The type of the code interpreter text output. Always `logs`. + # The type of the output. Always 'logs'. # # @return [Symbol, :logs] required :type, const: :logs # @!method initialize(logs:, type: :logs) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for - # more details. + # The logs output from the code interpreter. # - # The output of a code interpreter tool call that is text. + # @param logs [String] The logs output from the code interpreter. # - # @param logs [String] The logs of the code interpreter tool call. - # - # @param type [Symbol, :logs] The type of the code interpreter text output. Always `logs`. + # @param type [Symbol, :logs] The type of the output. Always 'logs'. end - class Files < OpenAI::Internal::Type::BaseModel - # @!attribute files - # - # @return [Array] - required :files, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] - } - + class Image < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of the code interpreter file output. Always `files`. + # The type of the output. Always 'image'. # - # @return [Symbol, :files] - required :type, const: :files + # @return [Symbol, :image] + required :type, const: :image - # @!method initialize(files:, type: :files) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for - # more details. + # @!attribute url + # The URL of the image output from the code interpreter. # - # The output of a code interpreter tool call that is a file. + # @return [String] + required :url, String + + # @!method initialize(url:, type: :image) + # The image output from the code interpreter. # - # @param files [Array] + # @param url [String] The URL of the image output from the code interpreter. # - # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`. - - class File < OpenAI::Internal::Type::BaseModel - # @!attribute file_id - # The ID of the file. - # - # @return [String] - required :file_id, String - - # @!attribute mime_type - # The MIME type of the file. - # - # @return [String] - required :mime_type, String - - # @!method initialize(file_id:, mime_type:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} - # for more details. - # - # @param file_id [String] The ID of the file. - # - # @param mime_type [String] The MIME type of the file. - end + # @param type [Symbol, :image] The type of the output. Always 'image'. end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] + # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image)] end # The status of the code interpreter tool call. @@ -159,8 +128,10 @@ module Status extend OpenAI::Internal::Type::Enum IN_PROGRESS = :in_progress - INTERPRETING = :interpreting COMPLETED = :completed + INCOMPLETE = :incomplete + INTERPRETING = :interpreting + FAILED = :failed # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 6f0420ce..994098c2 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -70,6 +70,12 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String + # @!attribute filename + # The filename of the file cited. + # + # @return [String] + required :filename, String + # @!attribute index # The index of the file in the list of files. # @@ -82,11 +88,13 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!method initialize(file_id:, index:, type: :file_citation) + # @!method initialize(file_id:, filename:, index:, type: :file_citation) # A citation to a file. # # @param file_id [String] The ID of the file. # + # @param filename [String] The filename of the file cited. + # # @param index [Integer] The index of the file in the list of files. # # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. @@ -156,6 +164,12 @@ class ContainerFileCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String + # @!attribute filename + # The filename of the container file cited. + # + # @return [String] + required :filename, String + # @!attribute start_index # The index of the first character of the container file citation in the message. # @@ -168,7 +182,7 @@ class ContainerFileCitation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :container_file_citation] required :type, const: :container_file_citation - # @!method initialize(container_id:, end_index:, file_id:, start_index:, type: :container_file_citation) + # @!method initialize(container_id:, end_index:, file_id:, filename:, start_index:, type: :container_file_citation) # A citation for a container file used to generate a model response. # # @param container_id [String] The ID of the container file. @@ -177,6 +191,8 @@ class ContainerFileCitation < OpenAI::Internal::Type::BaseModel # # @param file_id [String] The ID of the file. # + # @param filename [String] The filename of the container file cited. + # # @param start_index [Integer] The index of the first character of the container file citation in the message. # # @param type [Symbol, :container_file_citation] The type of the container file citation. Always `container_file_citation`. diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index b5e90a6d..df0809de 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -24,11 +24,11 @@ module ResponseStreamEvent # Emitted when the full audio transcript is completed. variant :"response.audio.transcript.done", -> { OpenAI::Responses::ResponseAudioTranscriptDoneEvent } - # Emitted when a partial code snippet is added by the code interpreter. + # Emitted when a partial code snippet is streamed by the code interpreter. variant :"response.code_interpreter_call_code.delta", -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent } - # Emitted when code snippet output is finalized by the code interpreter. + # Emitted when the code snippet is finalized by the code interpreter. variant :"response.code_interpreter_call_code.done", -> { OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent } diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index 744c3b69..7a7eb0fe 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -9,7 +9,7 @@ class Speech # # Generates audio from the input text. # - # @overload create(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) + # @overload create(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, stream_format: nil, request_options: {}) # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # @@ -23,6 +23,8 @@ class Speech # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # + # @param stream_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::StreamFormat] The format to stream the audio in. Supported formats are `sse` and `audio`. `sse + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [StringIO] diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index 4fe26ec9..f0afec14 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -60,7 +60,7 @@ def create(fine_tuned_model_checkpoint, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse] # # @see OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams def retrieve(fine_tuned_model_checkpoint, params = {}) @@ -69,7 +69,6 @@ def retrieve(fine_tuned_model_checkpoint, params = {}) method: :get, path: ["fine_tuning/checkpoints/%1$s/permissions", fine_tuned_model_checkpoint], query: parsed, - page: OpenAI::Internal::CursorPage, model: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse, options: options ) diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index 87b9e0b9..0a15c3d8 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -60,13 +60,30 @@ module OpenAI attr_writer :response_format # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. Does not work with `gpt-4o-mini-tts`. + # the default. sig { returns(T.nilable(Float)) } attr_reader :speed sig { params(speed: Float).void } attr_writer :speed + # The format to stream the audio in. Supported formats are `sse` and `audio`. + # `sse` is not supported for `tts-1` or `tts-1-hd`. + sig do + returns( + T.nilable(OpenAI::Audio::SpeechCreateParams::StreamFormat::OrSymbol) + ) + end + attr_reader :stream_format + + sig do + params( + stream_format: + OpenAI::Audio::SpeechCreateParams::StreamFormat::OrSymbol + ).void + end + attr_writer :stream_format + sig do params( input: String, @@ -77,6 +94,8 @@ module OpenAI response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, + stream_format: + OpenAI::Audio::SpeechCreateParams::StreamFormat::OrSymbol, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -98,8 +117,11 @@ module OpenAI # `wav`, and `pcm`. response_format: nil, # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. Does not work with `gpt-4o-mini-tts`. + # the default. speed: nil, + # The format to stream the audio in. Supported formats are `sse` and `audio`. + # `sse` is not supported for `tts-1` or `tts-1-hd`. + stream_format: nil, request_options: {} ) end @@ -118,6 +140,8 @@ module OpenAI response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, + stream_format: + OpenAI::Audio::SpeechCreateParams::StreamFormat::OrSymbol, request_options: OpenAI::RequestOptions } ) @@ -267,6 +291,39 @@ module OpenAI def self.values end end + + # The format to stream the audio in. Supported formats are `sse` and `audio`. + # `sse` is not supported for `tts-1` or `tts-1-hd`. + module StreamFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Audio::SpeechCreateParams::StreamFormat) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SSE = + T.let( + :sse, + OpenAI::Audio::SpeechCreateParams::StreamFormat::TaggedSymbol + ) + AUDIO = + T.let( + :audio, + OpenAI::Audio::SpeechCreateParams::StreamFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Audio::SpeechCreateParams::StreamFormat::TaggedSymbol + ] + ) + end + def self.values + end + end end end end diff --git a/rbi/openai/models/audio/transcription.rbi b/rbi/openai/models/audio/transcription.rbi index 22ae3343..8328d081 100644 --- a/rbi/openai/models/audio/transcription.rbi +++ b/rbi/openai/models/audio/transcription.rbi @@ -28,12 +28,34 @@ module OpenAI end attr_writer :logprobs + # Token usage statistics for the request. + sig do + returns(T.nilable(OpenAI::Audio::Transcription::Usage::Variants)) + end + attr_reader :usage + + sig do + params( + usage: + T.any( + OpenAI::Audio::Transcription::Usage::Tokens::OrHash, + OpenAI::Audio::Transcription::Usage::Duration::OrHash + ) + ).void + end + attr_writer :usage + # Represents a transcription response returned by model, based on the provided # input. sig do params( text: String, - logprobs: T::Array[OpenAI::Audio::Transcription::Logprob::OrHash] + logprobs: T::Array[OpenAI::Audio::Transcription::Logprob::OrHash], + usage: + T.any( + OpenAI::Audio::Transcription::Usage::Tokens::OrHash, + OpenAI::Audio::Transcription::Usage::Duration::OrHash + ) ).returns(T.attached_class) end def self.new( @@ -42,7 +64,9 @@ module OpenAI # The log probabilities of the tokens in the transcription. Only returned with the # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. - logprobs: nil + logprobs: nil, + # Token usage statistics for the request. + usage: nil ) end @@ -50,7 +74,8 @@ module OpenAI override.returns( { text: String, - logprobs: T::Array[OpenAI::Audio::Transcription::Logprob] + logprobs: T::Array[OpenAI::Audio::Transcription::Logprob], + usage: OpenAI::Audio::Transcription::Usage::Variants } ) end @@ -112,6 +137,191 @@ module OpenAI def to_hash end end + + # Token usage statistics for the request. + module Usage + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Audio::Transcription::Usage::Tokens, + OpenAI::Audio::Transcription::Usage::Duration + ) + end + + class Tokens < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::Transcription::Usage::Tokens, + OpenAI::Internal::AnyHash + ) + end + + # Number of input tokens billed for this request. + sig { returns(Integer) } + attr_accessor :input_tokens + + # Number of output tokens generated. + sig { returns(Integer) } + attr_accessor :output_tokens + + # Total number of tokens used (input + output). + sig { returns(Integer) } + attr_accessor :total_tokens + + # The type of the usage object. Always `tokens` for this variant. + sig { returns(Symbol) } + attr_accessor :type + + # Details about the input tokens billed for this request. + sig do + returns( + T.nilable( + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + ) + ) + end + attr_reader :input_token_details + + sig do + params( + input_token_details: + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails::OrHash + ).void + end + attr_writer :input_token_details + + # Usage statistics for models billed by token usage. + sig do + params( + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + input_token_details: + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Number of input tokens billed for this request. + input_tokens:, + # Number of output tokens generated. + output_tokens:, + # Total number of tokens used (input + output). + total_tokens:, + # Details about the input tokens billed for this request. + input_token_details: nil, + # The type of the usage object. Always `tokens` for this variant. + type: :tokens + ) + end + + sig do + override.returns( + { + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + type: Symbol, + input_token_details: + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + } + ) + end + def to_hash + end + + class InputTokenDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails, + OpenAI::Internal::AnyHash + ) + end + + # Number of audio tokens billed for this request. + sig { returns(T.nilable(Integer)) } + attr_reader :audio_tokens + + sig { params(audio_tokens: Integer).void } + attr_writer :audio_tokens + + # Number of text tokens billed for this request. + sig { returns(T.nilable(Integer)) } + attr_reader :text_tokens + + sig { params(text_tokens: Integer).void } + attr_writer :text_tokens + + # Details about the input tokens billed for this request. + sig do + params(audio_tokens: Integer, text_tokens: Integer).returns( + T.attached_class + ) + end + def self.new( + # Number of audio tokens billed for this request. + audio_tokens: nil, + # Number of text tokens billed for this request. + text_tokens: nil + ) + end + + sig do + override.returns( + { audio_tokens: Integer, text_tokens: Integer } + ) + end + def to_hash + end + end + end + + class Duration < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::Transcription::Usage::Duration, + OpenAI::Internal::AnyHash + ) + end + + # Duration of the input audio in seconds. + sig { returns(Float) } + attr_accessor :duration + + # The type of the usage object. Always `duration` for this variant. + sig { returns(Symbol) } + attr_accessor :type + + # Usage statistics for models billed by audio input duration. + sig do + params(duration: Float, type: Symbol).returns(T.attached_class) + end + def self.new( + # Duration of the input audio in seconds. + duration:, + # The type of the usage object. Always `duration` for this variant. + type: :duration + ) + end + + sig { override.returns({ duration: Float, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[OpenAI::Audio::Transcription::Usage::Variants] + ) + end + def self.variants + end + end end end end diff --git a/rbi/openai/models/audio/transcription_text_done_event.rbi b/rbi/openai/models/audio/transcription_text_done_event.rbi index 21788792..c1135e31 100644 --- a/rbi/openai/models/audio/transcription_text_done_event.rbi +++ b/rbi/openai/models/audio/transcription_text_done_event.rbi @@ -43,6 +43,19 @@ module OpenAI end attr_writer :logprobs + # Usage statistics for models billed by token usage. + sig do + returns(T.nilable(OpenAI::Audio::TranscriptionTextDoneEvent::Usage)) + end + attr_reader :usage + + sig do + params( + usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage::OrHash + ).void + end + attr_writer :usage + # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) @@ -54,6 +67,7 @@ module OpenAI T::Array[ OpenAI::Audio::TranscriptionTextDoneEvent::Logprob::OrHash ], + usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage::OrHash, type: Symbol ).returns(T.attached_class) end @@ -65,6 +79,8 @@ module OpenAI # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. logprobs: nil, + # Usage statistics for models billed by token usage. + usage: nil, # The type of the event. Always `transcript.text.done`. type: :"transcript.text.done" ) @@ -76,7 +92,8 @@ module OpenAI text: String, type: Symbol, logprobs: - T::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + T::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob], + usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage } ) end @@ -138,6 +155,134 @@ module OpenAI def to_hash end end + + class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDoneEvent::Usage, + OpenAI::Internal::AnyHash + ) + end + + # Number of input tokens billed for this request. + sig { returns(Integer) } + attr_accessor :input_tokens + + # Number of output tokens generated. + sig { returns(Integer) } + attr_accessor :output_tokens + + # Total number of tokens used (input + output). + sig { returns(Integer) } + attr_accessor :total_tokens + + # The type of the usage object. Always `tokens` for this variant. + sig { returns(Symbol) } + attr_accessor :type + + # Details about the input tokens billed for this request. + sig do + returns( + T.nilable( + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + ) + ) + end + attr_reader :input_token_details + + sig do + params( + input_token_details: + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails::OrHash + ).void + end + attr_writer :input_token_details + + # Usage statistics for models billed by token usage. + sig do + params( + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + input_token_details: + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Number of input tokens billed for this request. + input_tokens:, + # Number of output tokens generated. + output_tokens:, + # Total number of tokens used (input + output). + total_tokens:, + # Details about the input tokens billed for this request. + input_token_details: nil, + # The type of the usage object. Always `tokens` for this variant. + type: :tokens + ) + end + + sig do + override.returns( + { + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + type: Symbol, + input_token_details: + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + } + ) + end + def to_hash + end + + class InputTokenDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails, + OpenAI::Internal::AnyHash + ) + end + + # Number of audio tokens billed for this request. + sig { returns(T.nilable(Integer)) } + attr_reader :audio_tokens + + sig { params(audio_tokens: Integer).void } + attr_writer :audio_tokens + + # Number of text tokens billed for this request. + sig { returns(T.nilable(Integer)) } + attr_reader :text_tokens + + sig { params(text_tokens: Integer).void } + attr_writer :text_tokens + + # Details about the input tokens billed for this request. + sig do + params(audio_tokens: Integer, text_tokens: Integer).returns( + T.attached_class + ) + end + def self.new( + # Number of audio tokens billed for this request. + audio_tokens: nil, + # Number of text tokens billed for this request. + text_tokens: nil + ) + end + + sig do + override.returns({ audio_tokens: Integer, text_tokens: Integer }) + end + def to_hash + end + end + end end end end diff --git a/rbi/openai/models/audio/transcription_verbose.rbi b/rbi/openai/models/audio/transcription_verbose.rbi index afd2b789..45fb31e6 100644 --- a/rbi/openai/models/audio/transcription_verbose.rbi +++ b/rbi/openai/models/audio/transcription_verbose.rbi @@ -37,6 +37,15 @@ module OpenAI end attr_writer :segments + # Usage statistics for models billed by audio input duration. + sig { returns(T.nilable(OpenAI::Audio::TranscriptionVerbose::Usage)) } + attr_reader :usage + + sig do + params(usage: OpenAI::Audio::TranscriptionVerbose::Usage::OrHash).void + end + attr_writer :usage + # Extracted words and their corresponding timestamps. sig { returns(T.nilable(T::Array[OpenAI::Audio::TranscriptionWord])) } attr_reader :words @@ -54,6 +63,7 @@ module OpenAI language: String, text: String, segments: T::Array[OpenAI::Audio::TranscriptionSegment::OrHash], + usage: OpenAI::Audio::TranscriptionVerbose::Usage::OrHash, words: T::Array[OpenAI::Audio::TranscriptionWord::OrHash] ).returns(T.attached_class) end @@ -66,6 +76,8 @@ module OpenAI text:, # Segments of the transcribed text and their corresponding details. segments: nil, + # Usage statistics for models billed by audio input duration. + usage: nil, # Extracted words and their corresponding timestamps. words: nil ) @@ -78,12 +90,47 @@ module OpenAI language: String, text: String, segments: T::Array[OpenAI::Audio::TranscriptionSegment], + usage: OpenAI::Audio::TranscriptionVerbose::Usage, words: T::Array[OpenAI::Audio::TranscriptionWord] } ) end def to_hash end + + class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Audio::TranscriptionVerbose::Usage, + OpenAI::Internal::AnyHash + ) + end + + # Duration of the input audio in seconds. + sig { returns(Float) } + attr_accessor :duration + + # The type of the usage object. Always `duration` for this variant. + sig { returns(Symbol) } + attr_accessor :type + + # Usage statistics for models billed by audio input duration. + sig do + params(duration: Float, type: Symbol).returns(T.attached_class) + end + def self.new( + # Duration of the input audio in seconds. + duration:, + # The type of the usage object. Always `duration` for this variant. + type: :duration + ) + end + + sig { override.returns({ duration: Float, type: Symbol }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi index 8690aad8..2501be7b 100644 --- a/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi +++ b/rbi/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbi @@ -13,56 +13,125 @@ module OpenAI ) end - # The permission identifier, which can be referenced in the API endpoints. - sig { returns(String) } - attr_accessor :id + sig do + returns( + T::Array[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data + ] + ) + end + attr_accessor :data - # The Unix timestamp (in seconds) for when the permission was created. - sig { returns(Integer) } - attr_accessor :created_at + sig { returns(T::Boolean) } + attr_accessor :has_more - # The object type, which is always "checkpoint.permission". sig { returns(Symbol) } attr_accessor :object - # The project identifier that the permission is for. - sig { returns(String) } - attr_accessor :project_id + sig { returns(T.nilable(String)) } + attr_accessor :first_id + + sig { returns(T.nilable(String)) } + attr_accessor :last_id - # The `checkpoint.permission` object represents a permission for a fine-tuned - # model checkpoint. sig do params( - id: String, - created_at: Integer, - project_id: String, + data: + T::Array[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data::OrHash + ], + has_more: T::Boolean, + first_id: T.nilable(String), + last_id: T.nilable(String), object: Symbol ).returns(T.attached_class) end def self.new( - # The permission identifier, which can be referenced in the API endpoints. - id:, - # The Unix timestamp (in seconds) for when the permission was created. - created_at:, - # The project identifier that the permission is for. - project_id:, - # The object type, which is always "checkpoint.permission". - object: :"checkpoint.permission" + data:, + has_more:, + first_id: nil, + last_id: nil, + object: :list ) end sig do override.returns( { - id: String, - created_at: Integer, + data: + T::Array[ + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data + ], + has_more: T::Boolean, object: Symbol, - project_id: String + first_id: T.nilable(String), + last_id: T.nilable(String) } ) end def to_hash end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data, + OpenAI::Internal::AnyHash + ) + end + + # The permission identifier, which can be referenced in the API endpoints. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) for when the permission was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The object type, which is always "checkpoint.permission". + sig { returns(Symbol) } + attr_accessor :object + + # The project identifier that the permission is for. + sig { returns(String) } + attr_accessor :project_id + + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + sig do + params( + id: String, + created_at: Integer, + project_id: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # The permission identifier, which can be referenced in the API endpoints. + id:, + # The Unix timestamp (in seconds) for when the permission was created. + created_at:, + # The project identifier that the permission is for. + project_id:, + # The object type, which is always "checkpoint.permission". + object: :"checkpoint.permission" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + object: Symbol, + project_id: String + } + ) + end + def to_hash + end + end end end end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index d0012c45..91935135 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -12,15 +12,20 @@ module OpenAI ) end - # The partial code snippet added by the code interpreter. + # The partial code snippet being streamed by the code interpreter. sig { returns(String) } attr_accessor :delta - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response for which the code is being + # streamed. sig { returns(Integer) } attr_accessor :output_index - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sig { returns(Integer) } attr_accessor :sequence_number @@ -28,21 +33,25 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - # Emitted when a partial code snippet is added by the code interpreter. + # Emitted when a partial code snippet is streamed by the code interpreter. sig do params( delta: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( - # The partial code snippet added by the code interpreter. + # The partial code snippet being streamed by the code interpreter. delta:, - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + item_id:, + # The index of the output item in the response for which the code is being + # streamed. output_index:, - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sequence_number:, # The type of the event. Always `response.code_interpreter_call_code.delta`. type: :"response.code_interpreter_call_code.delta" @@ -53,6 +62,7 @@ module OpenAI override.returns( { delta: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 0ab6b04b..ee42ae87 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -16,11 +16,15 @@ module OpenAI sig { returns(String) } attr_accessor :code - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response for which the code is finalized. sig { returns(Integer) } attr_accessor :output_index - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sig { returns(Integer) } attr_accessor :sequence_number @@ -28,10 +32,11 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - # Emitted when code snippet output is finalized by the code interpreter. + # Emitted when the code snippet is finalized by the code interpreter. sig do params( code: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol @@ -40,9 +45,11 @@ module OpenAI def self.new( # The final code snippet output by the code interpreter. code:, - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + item_id:, + # The index of the output item in the response for which the code is finalized. output_index:, - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sequence_number:, # The type of the event. Always `response.code_interpreter_call_code.done`. type: :"response.code_interpreter_call_code.done" @@ -53,6 +60,7 @@ module OpenAI override.returns( { code: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi index 5e40e50d..fdc86de0 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -12,23 +12,16 @@ module OpenAI ) end - # A tool call to run code. - sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } - attr_reader :code_interpreter_call + # The unique identifier of the code interpreter tool call item. + sig { returns(String) } + attr_accessor :item_id - sig do - params( - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash - ).void - end - attr_writer :code_interpreter_call - - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code interpreter call + # is completed. sig { returns(Integer) } attr_accessor :output_index - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sig { returns(Integer) } attr_accessor :sequence_number @@ -39,19 +32,19 @@ module OpenAI # Emitted when the code interpreter call is completed. sig do params( - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( - # A tool call to run code. - code_interpreter_call:, - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + item_id:, + # The index of the output item in the response for which the code interpreter call + # is completed. output_index:, - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sequence_number:, # The type of the event. Always `response.code_interpreter_call.completed`. type: :"response.code_interpreter_call.completed" @@ -61,8 +54,7 @@ module OpenAI sig do override.returns( { - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index bf880f76..c15d559e 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -12,23 +12,16 @@ module OpenAI ) end - # A tool call to run code. - sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } - attr_reader :code_interpreter_call + # The unique identifier of the code interpreter tool call item. + sig { returns(String) } + attr_accessor :item_id - sig do - params( - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash - ).void - end - attr_writer :code_interpreter_call - - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code interpreter call + # is in progress. sig { returns(Integer) } attr_accessor :output_index - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sig { returns(Integer) } attr_accessor :sequence_number @@ -39,19 +32,19 @@ module OpenAI # Emitted when a code interpreter call is in progress. sig do params( - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( - # A tool call to run code. - code_interpreter_call:, - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + item_id:, + # The index of the output item in the response for which the code interpreter call + # is in progress. output_index:, - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sequence_number:, # The type of the event. Always `response.code_interpreter_call.in_progress`. type: :"response.code_interpreter_call.in_progress" @@ -61,8 +54,7 @@ module OpenAI sig do override.returns( { - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index eb135de8..458cf8e1 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -12,23 +12,16 @@ module OpenAI ) end - # A tool call to run code. - sig { returns(OpenAI::Responses::ResponseCodeInterpreterToolCall) } - attr_reader :code_interpreter_call + # The unique identifier of the code interpreter tool call item. + sig { returns(String) } + attr_accessor :item_id - sig do - params( - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash - ).void - end - attr_writer :code_interpreter_call - - # The index of the output item that the code interpreter call is in progress. + # The index of the output item in the response for which the code interpreter is + # interpreting code. sig { returns(Integer) } attr_accessor :output_index - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sig { returns(Integer) } attr_accessor :sequence_number @@ -39,19 +32,19 @@ module OpenAI # Emitted when the code interpreter is actively interpreting the code snippet. sig do params( - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( - # A tool call to run code. - code_interpreter_call:, - # The index of the output item that the code interpreter call is in progress. + # The unique identifier of the code interpreter tool call item. + item_id:, + # The index of the output item in the response for which the code interpreter is + # interpreting code. output_index:, - # The sequence number of this event. + # The sequence number of this event, used to order streaming events. sequence_number:, # The type of the event. Always `response.code_interpreter_call.interpreting`. type: :"response.code_interpreter_call.interpreting" @@ -61,8 +54,7 @@ module OpenAI sig do override.returns( { - code_interpreter_call: - OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: Symbol diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index 95089eaa..a4b99c63 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -16,22 +16,29 @@ module OpenAI sig { returns(String) } attr_accessor :id - # The code to run. - sig { returns(String) } + # The code to run, or null if not available. + sig { returns(T.nilable(String)) } attr_accessor :code - # The results of the code interpreter tool call. + # The ID of the container used to run the code. + sig { returns(String) } + attr_accessor :container_id + + # The outputs generated by the code interpreter, such as logs or images. Can be + # null if no outputs are available. sig do returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - ] + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image + ) + ] + ) ) end - attr_accessor :results + attr_accessor :outputs # The status of the code interpreter tool call. sig do @@ -45,42 +52,38 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - # The ID of the container used to run the code. - sig { returns(T.nilable(String)) } - attr_reader :container_id - - sig { params(container_id: String).void } - attr_writer :container_id - # A tool call to run code. sig do params( id: String, - code: String, - results: - T::Array[ - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs::OrHash, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::OrHash - ) - ], + code: T.nilable(String), + container_id: String, + outputs: + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image::OrHash + ) + ] + ), status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, - container_id: String, type: Symbol ).returns(T.attached_class) end def self.new( # The unique ID of the code interpreter tool call. id:, - # The code to run. + # The code to run, or null if not available. code:, - # The results of the code interpreter tool call. - results:, + # The ID of the container used to run the code. + container_id:, + # The outputs generated by the code interpreter, such as logs or images. Can be + # null if no outputs are available. + outputs:, # The status of the code interpreter tool call. status:, - # The ID of the container used to run the code. - container_id: nil, # The type of the code interpreter tool call. Always `code_interpreter_call`. type: :code_interpreter_call ) @@ -90,33 +93,35 @@ module OpenAI override.returns( { id: String, - code: String, - results: - T::Array[ - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - ], + code: T.nilable(String), + container_id: String, + outputs: + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image + ) + ] + ), status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, - type: Symbol, - container_id: String + type: Symbol } ) end def to_hash end - # The output of a code interpreter tool. - module Result + # The logs output from the code interpreter. + module Output extend OpenAI::Internal::Type::Union Variants = T.type_alias do T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image ) end @@ -124,25 +129,25 @@ module OpenAI OrHash = T.type_alias do T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Internal::AnyHash ) end - # The logs of the code interpreter tool call. + # The logs output from the code interpreter. sig { returns(String) } attr_accessor :logs - # The type of the code interpreter text output. Always `logs`. + # The type of the output. Always 'logs'. sig { returns(Symbol) } attr_accessor :type - # The output of a code interpreter tool call that is text. + # The logs output from the code interpreter. sig { params(logs: String, type: Symbol).returns(T.attached_class) } def self.new( - # The logs of the code interpreter tool call. + # The logs output from the code interpreter. logs:, - # The type of the code interpreter text output. Always `logs`. + # The type of the output. Always 'logs'. type: :logs ) end @@ -152,99 +157,42 @@ module OpenAI end end - class Files < OpenAI::Internal::Type::BaseModel + class Image < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image, OpenAI::Internal::AnyHash ) end - sig do - returns( - T::Array[ - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File - ] - ) - end - attr_accessor :files - - # The type of the code interpreter file output. Always `files`. + # The type of the output. Always 'image'. sig { returns(Symbol) } attr_accessor :type - # The output of a code interpreter tool call that is a file. - sig do - params( - files: - T::Array[ - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end + # The URL of the image output from the code interpreter. + sig { returns(String) } + attr_accessor :url + + # The image output from the code interpreter. + sig { params(url: String, type: Symbol).returns(T.attached_class) } def self.new( - files:, - # The type of the code interpreter file output. Always `files`. - type: :files + # The URL of the image output from the code interpreter. + url:, + # The type of the output. Always 'image'. + type: :image ) end - sig do - override.returns( - { - files: - T::Array[ - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File - ], - type: Symbol - } - ) - end + sig { override.returns({ type: Symbol, url: String }) } def to_hash end - - class File < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File, - OpenAI::Internal::AnyHash - ) - end - - # The ID of the file. - sig { returns(String) } - attr_accessor :file_id - - # The MIME type of the file. - sig { returns(String) } - attr_accessor :mime_type - - sig do - params(file_id: String, mime_type: String).returns( - T.attached_class - ) - end - def self.new( - # The ID of the file. - file_id:, - # The MIME type of the file. - mime_type: - ) - end - - sig { override.returns({ file_id: String, mime_type: String }) } - def to_hash - end - end end sig do override.returns( T::Array[ - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Variants + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Variants ] ) end @@ -270,14 +218,24 @@ module OpenAI :in_progress, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + ) INTERPRETING = T.let( :interpreting, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol ) - COMPLETED = + FAILED = T.let( - :completed, + :failed, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol ) diff --git a/rbi/openai/models/responses/response_output_text.rbi b/rbi/openai/models/responses/response_output_text.rbi index f59f7ada..ab31e485 100644 --- a/rbi/openai/models/responses/response_output_text.rbi +++ b/rbi/openai/models/responses/response_output_text.rbi @@ -127,6 +127,10 @@ module OpenAI sig { returns(String) } attr_accessor :file_id + # The filename of the file cited. + sig { returns(String) } + attr_accessor :filename + # The index of the file in the list of files. sig { returns(Integer) } attr_accessor :index @@ -137,13 +141,18 @@ module OpenAI # A citation to a file. sig do - params(file_id: String, index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + file_id: String, + filename: String, + index: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the file. file_id:, + # The filename of the file cited. + filename:, # The index of the file in the list of files. index:, # The type of the file citation. Always `file_citation`. @@ -153,7 +162,12 @@ module OpenAI sig do override.returns( - { file_id: String, index: Integer, type: Symbol } + { + file_id: String, + filename: String, + index: Integer, + type: Symbol + } ) end def to_hash @@ -249,6 +263,10 @@ module OpenAI sig { returns(String) } attr_accessor :file_id + # The filename of the container file cited. + sig { returns(String) } + attr_accessor :filename + # The index of the first character of the container file citation in the message. sig { returns(Integer) } attr_accessor :start_index @@ -263,6 +281,7 @@ module OpenAI container_id: String, end_index: Integer, file_id: String, + filename: String, start_index: Integer, type: Symbol ).returns(T.attached_class) @@ -274,6 +293,8 @@ module OpenAI end_index:, # The ID of the file. file_id:, + # The filename of the container file cited. + filename:, # The index of the first character of the container file citation in the message. start_index:, # The type of the container file citation. Always `container_file_citation`. @@ -287,6 +308,7 @@ module OpenAI container_id: String, end_index: Integer, file_id: String, + filename: String, start_index: Integer, type: Symbol } diff --git a/rbi/openai/resources/audio/speech.rbi b/rbi/openai/resources/audio/speech.rbi index 2c8d03a3..d7c6b56a 100644 --- a/rbi/openai/resources/audio/speech.rbi +++ b/rbi/openai/resources/audio/speech.rbi @@ -15,6 +15,8 @@ module OpenAI response_format: OpenAI::Audio::SpeechCreateParams::ResponseFormat::OrSymbol, speed: Float, + stream_format: + OpenAI::Audio::SpeechCreateParams::StreamFormat::OrSymbol, request_options: OpenAI::RequestOptions::OrHash ).returns(StringIO) end @@ -36,8 +38,11 @@ module OpenAI # `wav`, and `pcm`. response_format: nil, # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is - # the default. Does not work with `gpt-4o-mini-tts`. + # the default. speed: nil, + # The format to stream the audio in. Supported formats are `sse` and `audio`. + # `sse` is not supported for `tts-1` or `tts-1-hd`. + stream_format: nil, request_options: {} ) end diff --git a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi index dae4366b..be76789c 100644 --- a/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi +++ b/rbi/openai/resources/fine_tuning/checkpoints/permissions.rbi @@ -43,9 +43,7 @@ module OpenAI project_id: String, request_options: OpenAI::RequestOptions::OrHash ).returns( - OpenAI::Internal::CursorPage[ - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse - ] + OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse ) end def retrieve( diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 6c188596..f58474c0 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -8,7 +8,8 @@ module OpenAI voice: OpenAI::Models::Audio::SpeechCreateParams::voice, instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, - speed: Float + speed: Float, + stream_format: OpenAI::Models::Audio::SpeechCreateParams::stream_format } & OpenAI::Internal::Type::request_parameters @@ -36,6 +37,12 @@ module OpenAI def speed=: (Float) -> Float + attr_reader stream_format: OpenAI::Models::Audio::SpeechCreateParams::stream_format? + + def stream_format=: ( + OpenAI::Models::Audio::SpeechCreateParams::stream_format + ) -> OpenAI::Models::Audio::SpeechCreateParams::stream_format + def initialize: ( input: String, model: OpenAI::Models::Audio::SpeechCreateParams::model, @@ -43,6 +50,7 @@ module OpenAI ?instructions: String, ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, ?speed: Float, + ?stream_format: OpenAI::Models::Audio::SpeechCreateParams::stream_format, ?request_options: OpenAI::request_opts ) -> void @@ -53,6 +61,7 @@ module OpenAI instructions: String, response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, speed: Float, + stream_format: OpenAI::Models::Audio::SpeechCreateParams::stream_format, request_options: OpenAI::RequestOptions } @@ -110,6 +119,17 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::response_format] end + + type stream_format = :sse | :audio + + module StreamFormat + extend OpenAI::Internal::Type::Enum + + SSE: :sse + AUDIO: :audio + + def self?.values: -> ::Array[OpenAI::Models::Audio::SpeechCreateParams::stream_format] + end end end end diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 2e1beee5..d7c58be3 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -4,7 +4,8 @@ module OpenAI type transcription = { text: String, - logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] + logprobs: ::Array[OpenAI::Audio::Transcription::Logprob], + usage: OpenAI::Models::Audio::Transcription::usage } class Transcription < OpenAI::Internal::Type::BaseModel @@ -16,14 +17,22 @@ module OpenAI ::Array[OpenAI::Audio::Transcription::Logprob] ) -> ::Array[OpenAI::Audio::Transcription::Logprob] + attr_reader usage: OpenAI::Models::Audio::Transcription::usage? + + def usage=: ( + OpenAI::Models::Audio::Transcription::usage + ) -> OpenAI::Models::Audio::Transcription::usage + def initialize: ( text: String, - ?logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] + ?logprobs: ::Array[OpenAI::Audio::Transcription::Logprob], + ?usage: OpenAI::Models::Audio::Transcription::usage ) -> void def to_hash: -> { text: String, - logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] + logprobs: ::Array[OpenAI::Audio::Transcription::Logprob], + usage: OpenAI::Models::Audio::Transcription::usage } type logprob = { token: String, bytes: ::Array[Float], logprob: Float } @@ -53,6 +62,89 @@ module OpenAI logprob: Float } end + + type usage = + OpenAI::Audio::Transcription::Usage::Tokens + | OpenAI::Audio::Transcription::Usage::Duration + + module Usage + extend OpenAI::Internal::Type::Union + + type tokens = + { + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + type: :tokens, + input_token_details: OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + } + + class Tokens < OpenAI::Internal::Type::BaseModel + attr_accessor input_tokens: Integer + + attr_accessor output_tokens: Integer + + attr_accessor total_tokens: Integer + + attr_accessor type: :tokens + + attr_reader input_token_details: OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails? + + def input_token_details=: ( + OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + ) -> OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + + def initialize: ( + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + ?input_token_details: OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails, + ?type: :tokens + ) -> void + + def to_hash: -> { + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + type: :tokens, + input_token_details: OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails + } + + type input_token_details = + { audio_tokens: Integer, text_tokens: Integer } + + class InputTokenDetails < OpenAI::Internal::Type::BaseModel + attr_reader audio_tokens: Integer? + + def audio_tokens=: (Integer) -> Integer + + attr_reader text_tokens: Integer? + + def text_tokens=: (Integer) -> Integer + + def initialize: ( + ?audio_tokens: Integer, + ?text_tokens: Integer + ) -> void + + def to_hash: -> { audio_tokens: Integer, text_tokens: Integer } + end + end + + type duration = { duration: Float, type: :duration } + + class Duration < OpenAI::Internal::Type::BaseModel + attr_accessor duration: Float + + attr_accessor type: :duration + + def initialize: (duration: Float, ?type: :duration) -> void + + def to_hash: -> { duration: Float, type: :duration } + end + + def self?.variants: -> ::Array[OpenAI::Models::Audio::Transcription::usage] + end end end end diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs index 53e317e8..b2a14db4 100644 --- a/sig/openai/models/audio/transcription_text_done_event.rbs +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -5,7 +5,8 @@ module OpenAI { text: String, type: :"transcript.text.done", - logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob], + usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage } class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel @@ -19,16 +20,24 @@ module OpenAI ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] ) -> ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + attr_reader usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage? + + def usage=: ( + OpenAI::Audio::TranscriptionTextDoneEvent::Usage + ) -> OpenAI::Audio::TranscriptionTextDoneEvent::Usage + def initialize: ( text: String, ?logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob], + ?usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage, ?type: :"transcript.text.done" ) -> void def to_hash: -> { text: String, type: :"transcript.text.done", - logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob], + usage: OpenAI::Audio::TranscriptionTextDoneEvent::Usage } type logprob = @@ -59,6 +68,67 @@ module OpenAI logprob: Float } end + + type usage = + { + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + type: :tokens, + input_token_details: OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor input_tokens: Integer + + attr_accessor output_tokens: Integer + + attr_accessor total_tokens: Integer + + attr_accessor type: :tokens + + attr_reader input_token_details: OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails? + + def input_token_details=: ( + OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + ) -> OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + + def initialize: ( + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + ?input_token_details: OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails, + ?type: :tokens + ) -> void + + def to_hash: -> { + input_tokens: Integer, + output_tokens: Integer, + total_tokens: Integer, + type: :tokens, + input_token_details: OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails + } + + type input_token_details = + { audio_tokens: Integer, text_tokens: Integer } + + class InputTokenDetails < OpenAI::Internal::Type::BaseModel + attr_reader audio_tokens: Integer? + + def audio_tokens=: (Integer) -> Integer + + attr_reader text_tokens: Integer? + + def text_tokens=: (Integer) -> Integer + + def initialize: ( + ?audio_tokens: Integer, + ?text_tokens: Integer + ) -> void + + def to_hash: -> { audio_tokens: Integer, text_tokens: Integer } + end + end end end end diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 171bfa08..88f0d4af 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -7,6 +7,7 @@ module OpenAI language: String, text: String, segments: ::Array[OpenAI::Audio::TranscriptionSegment], + usage: OpenAI::Audio::TranscriptionVerbose::Usage, words: ::Array[OpenAI::Audio::TranscriptionWord] } @@ -23,6 +24,12 @@ module OpenAI ::Array[OpenAI::Audio::TranscriptionSegment] ) -> ::Array[OpenAI::Audio::TranscriptionSegment] + attr_reader usage: OpenAI::Audio::TranscriptionVerbose::Usage? + + def usage=: ( + OpenAI::Audio::TranscriptionVerbose::Usage + ) -> OpenAI::Audio::TranscriptionVerbose::Usage + attr_reader words: ::Array[OpenAI::Audio::TranscriptionWord]? def words=: ( @@ -34,6 +41,7 @@ module OpenAI language: String, text: String, ?segments: ::Array[OpenAI::Audio::TranscriptionSegment], + ?usage: OpenAI::Audio::TranscriptionVerbose::Usage, ?words: ::Array[OpenAI::Audio::TranscriptionWord] ) -> void @@ -42,8 +50,21 @@ module OpenAI language: String, text: String, segments: ::Array[OpenAI::Audio::TranscriptionSegment], + usage: OpenAI::Audio::TranscriptionVerbose::Usage, words: ::Array[OpenAI::Audio::TranscriptionWord] } + + type usage = { duration: Float, type: :duration } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor duration: Float + + attr_accessor type: :duration + + def initialize: (duration: Float, ?type: :duration) -> void + + def to_hash: -> { duration: Float, type: :duration } + end end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs index ef545155..54f9630a 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs @@ -4,34 +4,71 @@ module OpenAI module Checkpoints type permission_retrieve_response = { - id: String, - created_at: Integer, - object: :"checkpoint.permission", - project_id: String + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + object: :list, + first_id: String?, + last_id: String? } class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel - attr_accessor id: String + attr_accessor data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data] - attr_accessor created_at: Integer + attr_accessor has_more: bool - attr_accessor object: :"checkpoint.permission" + attr_accessor object: :list - attr_accessor project_id: String + attr_accessor first_id: String? + + attr_accessor last_id: String? def initialize: ( - id: String, - created_at: Integer, - project_id: String, - ?object: :"checkpoint.permission" + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + ?first_id: String?, + ?last_id: String?, + ?object: :list ) -> void def to_hash: -> { - id: String, - created_at: Integer, - object: :"checkpoint.permission", - project_id: String + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + object: :list, + first_id: String?, + last_id: String? } + + type data = + { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"checkpoint.permission" + + attr_accessor project_id: String + + def initialize: ( + id: String, + created_at: Integer, + project_id: String, + ?object: :"checkpoint.permission" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } + end end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index 51d5e73d..e5f7b5de 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -4,6 +4,7 @@ module OpenAI type response_code_interpreter_call_code_delta_event = { delta: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call_code.delta" @@ -12,6 +13,8 @@ module OpenAI class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String + attr_accessor item_id: String + attr_accessor output_index: Integer attr_accessor sequence_number: Integer @@ -20,6 +23,7 @@ module OpenAI def initialize: ( delta: String, + item_id: String, output_index: Integer, sequence_number: Integer, ?type: :"response.code_interpreter_call_code.delta" @@ -27,6 +31,7 @@ module OpenAI def to_hash: -> { delta: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call_code.delta" diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index a025e240..57fe27ff 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -4,6 +4,7 @@ module OpenAI type response_code_interpreter_call_code_done_event = { code: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call_code.done" @@ -12,6 +13,8 @@ module OpenAI class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseModel attr_accessor code: String + attr_accessor item_id: String + attr_accessor output_index: Integer attr_accessor sequence_number: Integer @@ -20,6 +23,7 @@ module OpenAI def initialize: ( code: String, + item_id: String, output_index: Integer, sequence_number: Integer, ?type: :"response.code_interpreter_call_code.done" @@ -27,6 +31,7 @@ module OpenAI def to_hash: -> { code: String, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call_code.done" diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index d7833fb2..ce97fd9b 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -3,14 +3,14 @@ module OpenAI module Responses type response_code_interpreter_call_completed_event = { - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call.completed" } class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseModel - attr_accessor code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall + attr_accessor item_id: String attr_accessor output_index: Integer @@ -19,14 +19,14 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.completed" def initialize: ( - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, ?type: :"response.code_interpreter_call.completed" ) -> void def to_hash: -> { - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call.completed" diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index 5efe8048..3ce614af 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -3,14 +3,14 @@ module OpenAI module Responses type response_code_interpreter_call_in_progress_event = { - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call.in_progress" } class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseModel - attr_accessor code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall + attr_accessor item_id: String attr_accessor output_index: Integer @@ -19,14 +19,14 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.in_progress" def initialize: ( - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, ?type: :"response.code_interpreter_call.in_progress" ) -> void def to_hash: -> { - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call.in_progress" diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index ccdd6dae..9fd220a6 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -3,14 +3,14 @@ module OpenAI module Responses type response_code_interpreter_call_interpreting_event = { - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call.interpreting" } class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::BaseModel - attr_accessor code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall + attr_accessor item_id: String attr_accessor output_index: Integer @@ -19,14 +19,14 @@ module OpenAI attr_accessor type: :"response.code_interpreter_call.interpreting" def initialize: ( - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, ?type: :"response.code_interpreter_call.interpreting" ) -> void def to_hash: -> { - code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + item_id: String, output_index: Integer, sequence_number: Integer, type: :"response.code_interpreter_call.interpreting" diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index cfca1f50..343ac022 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -4,51 +4,49 @@ module OpenAI type response_code_interpreter_tool_call = { id: String, - code: String, - results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + code: String?, + container_id: String, + outputs: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::output]?, status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, - type: :code_interpreter_call, - container_id: String + type: :code_interpreter_call } class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor code: String + attr_accessor code: String? - attr_accessor results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] + attr_accessor container_id: String + + attr_accessor outputs: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::output]? attr_accessor status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status attr_accessor type: :code_interpreter_call - attr_reader container_id: String? - - def container_id=: (String) -> String - def initialize: ( id: String, - code: String, - results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + code: String?, + container_id: String, + outputs: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::output]?, status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, - ?container_id: String, ?type: :code_interpreter_call ) -> void def to_hash: -> { id: String, - code: String, - results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + code: String?, + container_id: String, + outputs: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::output]?, status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, - type: :code_interpreter_call, - container_id: String + type: :code_interpreter_call } - type result = - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs - | OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files + type output = + OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Logs + | OpenAI::Responses::ResponseCodeInterpreterToolCall::Output::Image - module Result + module Output extend OpenAI::Internal::Type::Union type logs = { logs: String, type: :logs } @@ -63,51 +61,32 @@ module OpenAI def to_hash: -> { logs: String, type: :logs } end - type files = - { - files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], - type: :files - } - - class Files < OpenAI::Internal::Type::BaseModel - attr_accessor files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] - - attr_accessor type: :files + type image = { type: :image, url: String } - def initialize: ( - files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], - ?type: :files - ) -> void + class Image < OpenAI::Internal::Type::BaseModel + attr_accessor type: :image - def to_hash: -> { - files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], - type: :files - } + attr_accessor url: String - type file = { file_id: String, mime_type: String } + def initialize: (url: String, ?type: :image) -> void - class File < OpenAI::Internal::Type::BaseModel - attr_accessor file_id: String - - attr_accessor mime_type: String - - def initialize: (file_id: String, mime_type: String) -> void - - def to_hash: -> { file_id: String, mime_type: String } - end + def to_hash: -> { type: :image, url: String } end - def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::output] end - type status = :in_progress | :interpreting | :completed + type status = + :in_progress | :completed | :incomplete | :interpreting | :failed module Status extend OpenAI::Internal::Type::Enum IN_PROGRESS: :in_progress - INTERPRETING: :interpreting COMPLETED: :completed + INCOMPLETE: :incomplete + INTERPRETING: :interpreting + FAILED: :failed def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status] end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index a72f5df5..c1ad5888 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -46,23 +46,32 @@ module OpenAI extend OpenAI::Internal::Type::Union type file_citation = - { file_id: String, index: Integer, type: :file_citation } + { + file_id: String, + filename: String, + index: Integer, + type: :file_citation + } class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String + attr_accessor filename: String + attr_accessor index: Integer attr_accessor type: :file_citation def initialize: ( file_id: String, + filename: String, index: Integer, ?type: :file_citation ) -> void def to_hash: -> { file_id: String, + filename: String, index: Integer, type: :file_citation } @@ -110,6 +119,7 @@ module OpenAI container_id: String, end_index: Integer, file_id: String, + filename: String, start_index: Integer, type: :container_file_citation } @@ -121,6 +131,8 @@ module OpenAI attr_accessor file_id: String + attr_accessor filename: String + attr_accessor start_index: Integer attr_accessor type: :container_file_citation @@ -129,6 +141,7 @@ module OpenAI container_id: String, end_index: Integer, file_id: String, + filename: String, start_index: Integer, ?type: :container_file_citation ) -> void @@ -137,6 +150,7 @@ module OpenAI container_id: String, end_index: Integer, file_id: String, + filename: String, start_index: Integer, type: :container_file_citation } diff --git a/sig/openai/resources/audio/speech.rbs b/sig/openai/resources/audio/speech.rbs index ad93110a..7155cd0b 100644 --- a/sig/openai/resources/audio/speech.rbs +++ b/sig/openai/resources/audio/speech.rbs @@ -9,6 +9,7 @@ module OpenAI ?instructions: String, ?response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, ?speed: Float, + ?stream_format: OpenAI::Models::Audio::SpeechCreateParams::stream_format, ?request_options: OpenAI::request_opts ) -> StringIO diff --git a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs index 011e1286..f36dcbbb 100644 --- a/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs +++ b/sig/openai/resources/fine_tuning/checkpoints/permissions.rbs @@ -16,7 +16,7 @@ module OpenAI ?order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, ?project_id: String, ?request_options: OpenAI::request_opts - ) -> OpenAI::Internal::CursorPage[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse] + ) -> OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse def delete: ( String permission_id, diff --git a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb index 60bc8e8f..0ed0d4c1 100644 --- a/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb +++ b/test/openai/resources/fine_tuning/checkpoints/permissions_test.rb @@ -35,22 +35,16 @@ def test_retrieve response = @openai.fine_tuning.checkpoints.permissions.retrieve("ft-AF1WoRqd3aJAHsqc9NY7iL8F") assert_pattern do - response => OpenAI::Internal::CursorPage - end - - row = response.to_enum.first - return if row.nil? - - assert_pattern do - row => OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse + response => OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse end assert_pattern do - row => { - id: String, - created_at: Integer, + response => { + data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data]), + has_more: OpenAI::Internal::Type::Boolean, object: Symbol, - project_id: String + first_id: String | nil, + last_id: String | nil } end end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 51476189..cb00ba85 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -94,10 +94,10 @@ def test_list in { type: :code_interpreter_call, id: String, - code: String, - results: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result]), - status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status, - container_id: String | nil + code: String | nil, + container_id: String, + outputs: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Output]) | nil, + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status } in { type: :local_shell_call, From 4e8ee6437034e524753080025372df62d830f505 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 23 Jun 2025 21:04:45 +0000 Subject: [PATCH 238/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 6d78745c..091cfb12 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.9.0" + ".": "0.10.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 48d1c60f..16924ce7 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.9.0) + openai (0.10.0) connection_pool GEM diff --git a/README.md b/README.md index 672a19e6..ef9c582e 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.9.0" +gem "openai", "~> 0.10.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 502435cb..067cb054 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.9.0" + VERSION = "0.10.0" end From 3c14f3f51558854c3931452968e6aadefa6aec65 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 24 Jun 2025 20:28:28 +0000 Subject: [PATCH 239/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index e6290b25..5a680807 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-ef4ecb19eb61e24c49d77fef769ee243e5279bc0bdbaee8d0f8dba4da8722559.yml openapi_spec_hash: 1b8a9767c9f04e6865b06c41948cdc24 -config_hash: fd2af1d5eff0995bb7dc02ac9a34851d +config_hash: cae2d1f187b5b9f8dfa00daa807da42a From 6eaffcaf0d7261bb442209e3cf3bf7dd8516d808 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 14:04:51 +0000 Subject: [PATCH 240/295] =?UTF-8?q?fix(ci):=20release-doctor=20=E2=80=94?= =?UTF-8?q?=20report=20correct=20token=20name?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- bin/check-release-environment | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/check-release-environment b/bin/check-release-environment index 6aa95c4f..468572ab 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -7,7 +7,7 @@ if [ -z "${STAINLESS_API_KEY}" ]; then fi if [ -z "${GEM_HOST_API_KEY}" ]; then - errors+=("The OPENAI_GEM_HOST_API_KEY secret has not been set. Please set it in either this repository's secrets or your organization secrets") + errors+=("The GEM_HOST_API_KEY secret has not been set. Please set it in either this repository's secrets or your organization secrets") fi lenErrors=${#errors[@]} From 11755bf6c1efc50b61f94dc09e072593704adaed Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 16:51:20 +0000 Subject: [PATCH 241/295] feat(api): webhook and deep research support --- .stats.yml | 6 +- lib/openai.rb | 18 ++ lib/openai/client.rb | 4 + lib/openai/models.rb | 2 + lib/openai/models/all_models.rb | 4 + lib/openai/models/chat/chat_completion.rb | 63 +++--- .../models/chat/chat_completion_chunk.rb | 59 +++--- .../models/chat/completion_create_params.rb | 65 ++++--- lib/openai/models/images_response.rb | 93 ++++++++- lib/openai/models/responses/response.rb | 94 +++++---- .../responses/response_create_params.rb | 103 ++++++---- .../responses/response_function_web_search.rb | 124 +++++++++++- .../models/responses/response_includable.rb | 14 +- .../models/responses/tool_choice_mcp.rb | 40 ++++ .../models/responses/tool_choice_types.rb | 3 - lib/openai/models/responses_model.rb | 4 + .../webhooks/batch_cancelled_webhook_event.rb | 84 ++++++++ .../webhooks/batch_completed_webhook_event.rb | 84 ++++++++ .../webhooks/batch_expired_webhook_event.rb | 84 ++++++++ .../webhooks/batch_failed_webhook_event.rb | 84 ++++++++ .../eval_run_canceled_webhook_event.rb | 84 ++++++++ .../webhooks/eval_run_failed_webhook_event.rb | 84 ++++++++ .../eval_run_succeeded_webhook_event.rb | 84 ++++++++ ...fine_tuning_job_cancelled_webhook_event.rb | 85 +++++++++ .../fine_tuning_job_failed_webhook_event.rb | 85 +++++++++ ...fine_tuning_job_succeeded_webhook_event.rb | 85 +++++++++ .../response_cancelled_webhook_event.rb | 85 +++++++++ .../response_completed_webhook_event.rb | 85 +++++++++ .../webhooks/response_failed_webhook_event.rb | 84 ++++++++ .../response_incomplete_webhook_event.rb | 85 +++++++++ .../models/webhooks/unwrap_webhook_event.rb | 59 ++++++ .../models/webhooks/webhook_unwrap_params.rb | 16 ++ lib/openai/resources/chat/completions.rb | 4 +- lib/openai/resources/responses.rb | 20 +- lib/openai/resources/webhooks.rb | 24 +++ rbi/openai/client.rbi | 3 + rbi/openai/models.rbi | 2 + rbi/openai/models/all_models.rbi | 20 ++ rbi/openai/models/chat/chat_completion.rbi | 89 +++++---- .../models/chat/chat_completion_chunk.rbi | 89 +++++---- .../models/chat/completion_create_params.rbi | 93 +++++---- rbi/openai/models/images_response.rbi | 146 ++++++++++++++ rbi/openai/models/responses/response.rbi | 119 +++++++----- .../responses/response_create_params.rbi | 146 ++++++++------ .../response_function_web_search.rbi | 180 ++++++++++++++++++ .../models/responses/response_includable.rbi | 28 +-- .../models/responses/tool_choice_mcp.rbi | 53 ++++++ .../models/responses/tool_choice_types.rbi | 5 - rbi/openai/models/responses_model.rbi | 20 ++ .../batch_cancelled_webhook_event.rbi | 154 +++++++++++++++ .../batch_completed_webhook_event.rbi | 154 +++++++++++++++ .../webhooks/batch_expired_webhook_event.rbi | 150 +++++++++++++++ .../webhooks/batch_failed_webhook_event.rbi | 149 +++++++++++++++ .../eval_run_canceled_webhook_event.rbi | 154 +++++++++++++++ .../eval_run_failed_webhook_event.rbi | 151 +++++++++++++++ .../eval_run_succeeded_webhook_event.rbi | 154 +++++++++++++++ ...ine_tuning_job_cancelled_webhook_event.rbi | 158 +++++++++++++++ .../fine_tuning_job_failed_webhook_event.rbi | 156 +++++++++++++++ ...ine_tuning_job_succeeded_webhook_event.rbi | 158 +++++++++++++++ .../response_cancelled_webhook_event.rbi | 154 +++++++++++++++ .../response_completed_webhook_event.rbi | 154 +++++++++++++++ .../response_failed_webhook_event.rbi | 154 +++++++++++++++ .../response_incomplete_webhook_event.rbi | 155 +++++++++++++++ .../models/webhooks/unwrap_webhook_event.rbi | 40 ++++ .../models/webhooks/webhook_unwrap_params.rbi | 32 ++++ rbi/openai/resources/chat/completions.rbi | 64 ++++--- rbi/openai/resources/responses.rbi | 100 ++++++---- rbi/openai/resources/webhooks.rbi | 39 ++++ sig/openai/client.rbs | 2 + sig/openai/models.rbs | 2 + sig/openai/models/all_models.rbs | 8 + sig/openai/models/chat/chat_completion.rbs | 3 +- .../models/chat/chat_completion_chunk.rbs | 3 +- .../models/chat/completion_create_params.rbs | 3 +- sig/openai/models/images_response.rbs | 83 ++++++++ sig/openai/models/responses/response.rbs | 14 +- .../responses/response_create_params.rbs | 14 +- .../response_function_web_search.rbs | 67 +++++++ .../models/responses/response_includable.rbs | 12 +- .../models/responses/tool_choice_mcp.rbs | 23 +++ .../models/responses/tool_choice_types.rbs | 2 - sig/openai/models/responses_model.rbs | 8 + .../batch_cancelled_webhook_event.rbs | 66 +++++++ .../batch_completed_webhook_event.rbs | 66 +++++++ .../webhooks/batch_expired_webhook_event.rbs | 66 +++++++ .../webhooks/batch_failed_webhook_event.rbs | 66 +++++++ .../eval_run_canceled_webhook_event.rbs | 66 +++++++ .../eval_run_failed_webhook_event.rbs | 66 +++++++ .../eval_run_succeeded_webhook_event.rbs | 66 +++++++ ...ine_tuning_job_cancelled_webhook_event.rbs | 66 +++++++ .../fine_tuning_job_failed_webhook_event.rbs | 66 +++++++ ...ine_tuning_job_succeeded_webhook_event.rbs | 66 +++++++ .../response_cancelled_webhook_event.rbs | 66 +++++++ .../response_completed_webhook_event.rbs | 66 +++++++ .../response_failed_webhook_event.rbs | 66 +++++++ .../response_incomplete_webhook_event.rbs | 66 +++++++ .../models/webhooks/unwrap_webhook_event.rbs | 27 +++ .../models/webhooks/webhook_unwrap_params.rbs | 17 ++ sig/openai/resources/responses.rbs | 4 + sig/openai/resources/webhooks.rbs | 24 +++ test/openai/resources/images_test.rb | 12 ++ .../resources/responses/input_items_test.rb | 7 +- test/openai/resources/responses_test.rb | 6 + test/openai/resources/webhooks_test.rb | 6 + 104 files changed, 6186 insertions(+), 509 deletions(-) create mode 100644 lib/openai/models/responses/tool_choice_mcp.rb create mode 100644 lib/openai/models/webhooks/batch_cancelled_webhook_event.rb create mode 100644 lib/openai/models/webhooks/batch_completed_webhook_event.rb create mode 100644 lib/openai/models/webhooks/batch_expired_webhook_event.rb create mode 100644 lib/openai/models/webhooks/batch_failed_webhook_event.rb create mode 100644 lib/openai/models/webhooks/eval_run_canceled_webhook_event.rb create mode 100644 lib/openai/models/webhooks/eval_run_failed_webhook_event.rb create mode 100644 lib/openai/models/webhooks/eval_run_succeeded_webhook_event.rb create mode 100644 lib/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rb create mode 100644 lib/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rb create mode 100644 lib/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rb create mode 100644 lib/openai/models/webhooks/response_cancelled_webhook_event.rb create mode 100644 lib/openai/models/webhooks/response_completed_webhook_event.rb create mode 100644 lib/openai/models/webhooks/response_failed_webhook_event.rb create mode 100644 lib/openai/models/webhooks/response_incomplete_webhook_event.rb create mode 100644 lib/openai/models/webhooks/unwrap_webhook_event.rb create mode 100644 lib/openai/models/webhooks/webhook_unwrap_params.rb create mode 100644 lib/openai/resources/webhooks.rb create mode 100644 rbi/openai/models/responses/tool_choice_mcp.rbi create mode 100644 rbi/openai/models/webhooks/batch_cancelled_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/batch_completed_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/batch_expired_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/batch_failed_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/eval_run_canceled_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/eval_run_failed_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/eval_run_succeeded_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/response_cancelled_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/response_completed_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/response_failed_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/response_incomplete_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/unwrap_webhook_event.rbi create mode 100644 rbi/openai/models/webhooks/webhook_unwrap_params.rbi create mode 100644 rbi/openai/resources/webhooks.rbi create mode 100644 sig/openai/models/responses/tool_choice_mcp.rbs create mode 100644 sig/openai/models/webhooks/batch_cancelled_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/batch_completed_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/batch_expired_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/batch_failed_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/eval_run_canceled_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/eval_run_failed_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/eval_run_succeeded_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/response_cancelled_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/response_completed_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/response_failed_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/response_incomplete_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/unwrap_webhook_event.rbs create mode 100644 sig/openai/models/webhooks/webhook_unwrap_params.rbs create mode 100644 sig/openai/resources/webhooks.rbs create mode 100644 test/openai/resources/webhooks_test.rb diff --git a/.stats.yml b/.stats.yml index 5a680807..e0c854cb 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-ef4ecb19eb61e24c49d77fef769ee243e5279bc0bdbaee8d0f8dba4da8722559.yml -openapi_spec_hash: 1b8a9767c9f04e6865b06c41948cdc24 -config_hash: cae2d1f187b5b9f8dfa00daa807da42a +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-cca460eaf5cc13e9d6e5293eb97aac53d66dc1385c691f74b768c97d165b6e8b.yml +openapi_spec_hash: 9ec43d443b3dd58ca5aa87eb0a7eb49f +config_hash: e74d6791681e3af1b548748ff47a22c2 diff --git a/lib/openai.rb b/lib/openai.rb index 8fbad7a4..c637e6b1 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -437,6 +437,7 @@ require_relative "openai/models/responses/response_web_search_call_searching_event" require_relative "openai/models/responses/tool" require_relative "openai/models/responses/tool_choice_function" +require_relative "openai/models/responses/tool_choice_mcp" require_relative "openai/models/responses/tool_choice_options" require_relative "openai/models/responses/tool_choice_types" require_relative "openai/models/responses/web_search_tool" @@ -473,6 +474,22 @@ require_relative "openai/models/vector_store_search_params" require_relative "openai/models/vector_store_search_response" require_relative "openai/models/vector_store_update_params" +require_relative "openai/models/webhooks/batch_cancelled_webhook_event" +require_relative "openai/models/webhooks/batch_completed_webhook_event" +require_relative "openai/models/webhooks/batch_expired_webhook_event" +require_relative "openai/models/webhooks/batch_failed_webhook_event" +require_relative "openai/models/webhooks/eval_run_canceled_webhook_event" +require_relative "openai/models/webhooks/eval_run_failed_webhook_event" +require_relative "openai/models/webhooks/eval_run_succeeded_webhook_event" +require_relative "openai/models/webhooks/fine_tuning_job_cancelled_webhook_event" +require_relative "openai/models/webhooks/fine_tuning_job_failed_webhook_event" +require_relative "openai/models/webhooks/fine_tuning_job_succeeded_webhook_event" +require_relative "openai/models/webhooks/response_cancelled_webhook_event" +require_relative "openai/models/webhooks/response_completed_webhook_event" +require_relative "openai/models/webhooks/response_failed_webhook_event" +require_relative "openai/models/webhooks/response_incomplete_webhook_event" +require_relative "openai/models/webhooks/unwrap_webhook_event" +require_relative "openai/models/webhooks/webhook_unwrap_params" require_relative "openai/models" require_relative "openai/resources/audio" require_relative "openai/resources/audio/speech" @@ -517,3 +534,4 @@ require_relative "openai/resources/vector_stores" require_relative "openai/resources/vector_stores/file_batches" require_relative "openai/resources/vector_stores/files" +require_relative "openai/resources/webhooks" diff --git a/lib/openai/client.rb b/lib/openai/client.rb index b583ead2..08a2a7da 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -57,6 +57,9 @@ class Client < OpenAI::Internal::Transport::BaseClient # @return [OpenAI::Resources::VectorStores] attr_reader :vector_stores + # @return [OpenAI::Resources::Webhooks] + attr_reader :webhooks + # @return [OpenAI::Resources::Beta] attr_reader :beta @@ -145,6 +148,7 @@ def initialize( @fine_tuning = OpenAI::Resources::FineTuning.new(client: self) @graders = OpenAI::Resources::Graders.new(client: self) @vector_stores = OpenAI::Resources::VectorStores.new(client: self) + @webhooks = OpenAI::Resources::Webhooks.new(client: self) @beta = OpenAI::Resources::Beta.new(client: self) @batches = OpenAI::Resources::Batches.new(client: self) @uploads = OpenAI::Resources::Uploads.new(client: self) diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 5a1af010..3261259a 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -234,4 +234,6 @@ module OpenAI VectorStoreSearchParams = OpenAI::Models::VectorStoreSearchParams VectorStoreUpdateParams = OpenAI::Models::VectorStoreUpdateParams + + Webhooks = OpenAI::Models::Webhooks end diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 93144d36..188ab131 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -18,6 +18,10 @@ module ResponsesOnlyModel O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" O3_PRO = :"o3-pro" O3_PRO_2025_06_10 = :"o3-pro-2025-06-10" + O3_DEEP_RESEARCH = :"o3-deep-research" + O3_DEEP_RESEARCH_2025_06_26 = :"o3-deep-research-2025-06-26" + O4_MINI_DEEP_RESEARCH = :"o4-mini-deep-research" + O4_MINI_DEEP_RESEARCH_2025_06_26 = :"o4-mini-deep-research-2025-06-26" COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 79d3e56e..7da96170 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -39,23 +39,23 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel required :object, const: :"chat.completion" # @!attribute service_tier - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletion::ServiceTier }, nil?: true @@ -90,7 +90,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # # @param model [String] The model used for the chat completion. # - # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # @@ -188,23 +188,23 @@ class Logprobs < OpenAI::Internal::Type::BaseModel end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @see OpenAI::Models::Chat::ChatCompletion#service_tier module ServiceTier @@ -214,6 +214,7 @@ module ServiceTier DEFAULT = :default FLEX = :flex SCALE = :scale + PRIORITY = :priority # @!method self.values # @return [Array] diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index bdce048f..f624e66d 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -38,23 +38,23 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel required :object, const: :"chat.completion.chunk" # @!attribute service_tier - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletionChunk::ServiceTier }, nil?: true @@ -95,7 +95,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # # @param model [String] The model to generate the completion. # - # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # @@ -371,23 +371,23 @@ class Logprobs < OpenAI::Internal::Type::BaseModel end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier module ServiceTier @@ -397,6 +397,7 @@ module ServiceTier DEFAULT = :default FLEX = :flex SCALE = :scale + PRIORITY = :priority # @!method self.values # @return [Array] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index c47d3728..53a2c521 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -219,23 +219,23 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :seed, Integer, nil?: true # @!attribute service_tier - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::CompletionCreateParams::ServiceTier }, nil?: true @@ -254,6 +254,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. # + # Supports text and image inputs. Note: image inputs over 10MB will be dropped. + # # @return [Boolean, nil] optional :store, OpenAI::Internal::Type::Boolean, nil?: true @@ -370,7 +372,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # @@ -535,23 +537,23 @@ module ResponseFormat # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -559,6 +561,7 @@ module ServiceTier DEFAULT = :default FLEX = :flex SCALE = :scale + PRIORITY = :priority # @!method self.values # @return [Array] diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index ecd33bc9..d6f6e63d 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -10,19 +10,45 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # @return [Integer] required :created, Integer + # @!attribute background + # The background parameter used for the image generation. Either `transparent` or + # `opaque`. + # + # @return [Symbol, OpenAI::Models::ImagesResponse::Background, nil] + optional :background, enum: -> { OpenAI::ImagesResponse::Background } + # @!attribute data # The list of generated images. # # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Image] } + # @!attribute output_format + # The output format of the image generation. Either `png`, `webp`, or `jpeg`. + # + # @return [Symbol, OpenAI::Models::ImagesResponse::OutputFormat, nil] + optional :output_format, enum: -> { OpenAI::ImagesResponse::OutputFormat } + + # @!attribute quality + # The quality of the image generated. Either `low`, `medium`, or `high`. + # + # @return [Symbol, OpenAI::Models::ImagesResponse::Quality, nil] + optional :quality, enum: -> { OpenAI::ImagesResponse::Quality } + + # @!attribute size + # The size of the image generated. Either `1024x1024`, `1024x1536`, or + # `1536x1024`. + # + # @return [Symbol, OpenAI::Models::ImagesResponse::Size, nil] + optional :size, enum: -> { OpenAI::ImagesResponse::Size } + # @!attribute usage # For `gpt-image-1` only, the token usage information for the image generation. # # @return [OpenAI::Models::ImagesResponse::Usage, nil] optional :usage, -> { OpenAI::ImagesResponse::Usage } - # @!method initialize(created:, data: nil, usage: nil) + # @!method initialize(created:, background: nil, data: nil, output_format: nil, quality: nil, size: nil, usage: nil) # Some parameter documentations has been truncated, see # {OpenAI::Models::ImagesResponse} for more details. # @@ -30,10 +56,75 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # # @param created [Integer] The Unix timestamp (in seconds) of when the image was created. # + # @param background [Symbol, OpenAI::Models::ImagesResponse::Background] The background parameter used for the image generation. Either `transparent` or + # # @param data [Array] The list of generated images. # + # @param output_format [Symbol, OpenAI::Models::ImagesResponse::OutputFormat] The output format of the image generation. Either `png`, `webp`, or `jpeg`. + # + # @param quality [Symbol, OpenAI::Models::ImagesResponse::Quality] The quality of the image generated. Either `low`, `medium`, or `high`. + # + # @param size [Symbol, OpenAI::Models::ImagesResponse::Size] The size of the image generated. Either `1024x1024`, `1024x1536`, or `1536x1024` + # # @param usage [OpenAI::Models::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. + # The background parameter used for the image generation. Either `transparent` or + # `opaque`. + # + # @see OpenAI::Models::ImagesResponse#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + + # @!method self.values + # @return [Array] + end + + # The output format of the image generation. Either `png`, `webp`, or `jpeg`. + # + # @see OpenAI::Models::ImagesResponse#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality of the image generated. Either `low`, `medium`, or `high`. + # + # @see OpenAI::Models::ImagesResponse#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + + # The size of the image generated. Either `1024x1024`, `1024x1536`, or + # `1536x1024`. + # + # @see OpenAI::Models::ImagesResponse#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + + # @!method self.values + # @return [Array] + end + # @see OpenAI::Models::ImagesResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 0fbf7be3..d963980a 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -100,7 +100,7 @@ class Response < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] required :tool_choice, union: -> { OpenAI::Responses::Response::ToolChoice } # @!attribute tools @@ -147,6 +147,15 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :max_output_tokens, Integer, nil?: true + # @!attribute max_tool_calls + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + # + # @return [Integer, nil] + optional :max_tool_calls, Integer, nil?: true + # @!attribute previous_response_id # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about @@ -172,23 +181,23 @@ class Response < OpenAI::Internal::Type::BaseModel optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Responses::Response::ServiceTier }, nil?: true @@ -210,6 +219,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Responses::ResponseTextConfig } + # @!attribute top_logprobs + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + # + # @return [Integer, nil] + optional :top_logprobs, Integer, nil?: true + # @!attribute truncation # The truncation strategy to use for the model response. # @@ -237,7 +253,7 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Response} for more details. # @@ -261,7 +277,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating # # @param tools [Array] An array of tools the model may call while generating a response. You # @@ -271,18 +287,22 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # + # @param max_tool_calls [Integer, nil] The maximum number of total calls to built-in tools that can be processed in a r + # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to + # # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. # # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, @@ -369,27 +389,30 @@ module ToolChoice # Use this option to force the model to call a specific function. variant -> { OpenAI::Responses::ToolChoiceFunction } + # Use this option to force the model to call a specific tool on a remote MCP server. + variant -> { OpenAI::Responses::ToolChoiceMcp } + # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp)] end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @see OpenAI::Models::Responses::Response#service_tier module ServiceTier @@ -399,6 +422,7 @@ module ServiceTier DEFAULT = :default FLEX = :flex SCALE = :scale + PRIORITY = :priority # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 117716ed..86a9f56f 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -21,18 +21,19 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. # # @return [Array, nil] optional :include, @@ -71,6 +72,15 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :max_output_tokens, Integer, nil?: true + # @!attribute max_tool_calls + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + # + # @return [Integer, nil] + optional :max_tool_calls, Integer, nil?: true + # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -123,23 +133,23 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. # # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Responses::ResponseCreateParams::ServiceTier }, nil?: true @@ -174,7 +184,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, nil] optional :tool_choice, union: -> { OpenAI::Responses::ResponseCreateParams::ToolChoice } # @!attribute tools @@ -196,6 +206,13 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } + # @!attribute top_logprobs + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + # + # @return [Integer, nil] + optional :top_logprobs, Integer, nil?: true + # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -226,7 +243,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # @@ -240,6 +257,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # + # @param max_tool_calls [Integer, nil] The maximum number of total calls to built-in tools that can be processed in a r + # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI @@ -252,7 +271,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # @@ -260,10 +279,12 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating # # @param tools [Array] An array of tools the model may call while generating a response. You # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to + # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. @@ -296,23 +317,23 @@ module Input # @return [Array(String, Array)] end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -320,6 +341,7 @@ module ServiceTier DEFAULT = :default FLEX = :flex SCALE = :scale + PRIORITY = :priority # @!method self.values # @return [Array] @@ -348,8 +370,11 @@ module ToolChoice # Use this option to force the model to call a specific function. variant -> { OpenAI::Responses::ToolChoiceFunction } + # Use this option to force the model to call a specific tool on a remote MCP server. + variant -> { OpenAI::Responses::ToolChoiceMcp } + # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp)] end # The truncation strategy to use for the model response. diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index a95b9030..91ab5d55 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -10,6 +10,13 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @return [String] required :id, String + # @!attribute action + # An object describing the specific action taken in this web search call. Includes + # details on how the model used the web (search, open_page, find). + # + # @return [OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search, OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::OpenPage, OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Find] + required :action, union: -> { OpenAI::Responses::ResponseFunctionWebSearch::Action } + # @!attribute status # The status of the web search tool call. # @@ -22,7 +29,7 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :web_search_call] required :type, const: :web_search_call - # @!method initialize(id:, status:, type: :web_search_call) + # @!method initialize(id:, action:, status:, type: :web_search_call) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseFunctionWebSearch} for more details. # @@ -32,10 +39,125 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the web search tool call. # + # @param action [OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search, OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::OpenPage, OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Find] An object describing the specific action taken in this web search call. + # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. # # @param type [Symbol, :web_search_call] The type of the web search tool call. Always `web_search_call`. + # An object describing the specific action taken in this web search call. Includes + # details on how the model used the web (search, open_page, find). + # + # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#action + module Action + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Action type "search" - Performs a web search query. + variant :search, -> { OpenAI::Responses::ResponseFunctionWebSearch::Action::Search } + + # Action type "open_page" - Opens a specific URL from search results. + variant :open_page, -> { OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage } + + # Action type "find": Searches for a pattern within a loaded page. + variant :find, -> { OpenAI::Responses::ResponseFunctionWebSearch::Action::Find } + + class Search < OpenAI::Internal::Type::BaseModel + # @!attribute query + # The search query. + # + # @return [String] + required :query, String + + # @!attribute type + # The action type. + # + # @return [Symbol, :search] + required :type, const: :search + + # @!attribute domains + # Domains to restrict the search or domains where results were found. + # + # @return [Array, nil] + optional :domains, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(query:, domains: nil, type: :search) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search} for more + # details. + # + # Action type "search" - Performs a web search query. + # + # @param query [String] The search query. + # + # @param domains [Array] Domains to restrict the search or domains where results were found. + # + # @param type [Symbol, :search] The action type. + end + + class OpenPage < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The action type. + # + # @return [Symbol, :open_page] + required :type, const: :open_page + + # @!attribute url + # The URL opened by the model. + # + # @return [String] + required :url, String + + # @!method initialize(url:, type: :open_page) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::OpenPage} for + # more details. + # + # Action type "open_page" - Opens a specific URL from search results. + # + # @param url [String] The URL opened by the model. + # + # @param type [Symbol, :open_page] The action type. + end + + class Find < OpenAI::Internal::Type::BaseModel + # @!attribute pattern + # The pattern or text to search for within the page. + # + # @return [String] + required :pattern, String + + # @!attribute type + # The action type. + # + # @return [Symbol, :find] + required :type, const: :find + + # @!attribute url + # The URL of the page searched for the pattern. + # + # @return [String] + required :url, String + + # @!method initialize(pattern:, url:, type: :find) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Find} for more + # details. + # + # Action type "find": Searches for a pattern within a loaded page. + # + # @param pattern [String] The pattern or text to search for within the page. + # + # @param url [String] The URL of the page searched for the pattern. + # + # @param type [Symbol, :find] The action type. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search, OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::OpenPage, OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Find)] + end + # The status of the web search tool call. # # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#status diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 7300f818..bfd6f54d 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -6,26 +6,28 @@ module Responses # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. module ResponseIncludable extend OpenAI::Internal::Type::Enum + CODE_INTERPRETER_CALL_OUTPUTS = :"code_interpreter_call.outputs" + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" FILE_SEARCH_CALL_RESULTS = :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" + MESSAGE_OUTPUT_TEXT_LOGPROBS = :"message.output_text.logprobs" REASONING_ENCRYPTED_CONTENT = :"reasoning.encrypted_content" - CODE_INTERPRETER_CALL_OUTPUTS = :"code_interpreter_call.outputs" # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/tool_choice_mcp.rb b/lib/openai/models/responses/tool_choice_mcp.rb new file mode 100644 index 00000000..1ced8a01 --- /dev/null +++ b/lib/openai/models/responses/tool_choice_mcp.rb @@ -0,0 +1,40 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ToolChoiceMcp < OpenAI::Internal::Type::BaseModel + # @!attribute server_label + # The label of the MCP server to use. + # + # @return [String] + required :server_label, String + + # @!attribute type + # For MCP tools, the type is always `mcp`. + # + # @return [Symbol, :mcp] + required :type, const: :mcp + + # @!attribute name + # The name of the tool to call on the server. + # + # @return [String, nil] + optional :name, String, nil?: true + + # @!method initialize(server_label:, name: nil, type: :mcp) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ToolChoiceMcp} for more details. + # + # Use this option to force the model to call a specific tool on a remote MCP + # server. + # + # @param server_label [String] The label of the MCP server to use. + # + # @param name [String, nil] The name of the tool to call on the server. + # + # @param type [Symbol, :mcp] For MCP tools, the type is always `mcp`. + end + end + end +end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index ef4278cd..8b526e65 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -14,7 +14,6 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `web_search_preview` # - `computer_use_preview` # - `code_interpreter` - # - `mcp` # - `image_generation` # # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] @@ -38,7 +37,6 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `web_search_preview` # - `computer_use_preview` # - `code_interpreter` - # - `mcp` # - `image_generation` # # @see OpenAI::Models::Responses::ToolChoiceTypes#type @@ -51,7 +49,6 @@ module Type WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 IMAGE_GENERATION = :image_generation CODE_INTERPRETER = :code_interpreter - MCP = :mcp # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index a344e7b6..e86bc725 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -18,6 +18,10 @@ module ResponsesOnlyModel O1_PRO_2025_03_19 = :"o1-pro-2025-03-19" O3_PRO = :"o3-pro" O3_PRO_2025_06_10 = :"o3-pro-2025-06-10" + O3_DEEP_RESEARCH = :"o3-deep-research" + O3_DEEP_RESEARCH_2025_06_26 = :"o3-deep-research-2025-06-26" + O4_MINI_DEEP_RESEARCH = :"o4-mini-deep-research" + O4_MINI_DEEP_RESEARCH_2025_06_26 = :"o4-mini-deep-research-2025-06-26" COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" diff --git a/lib/openai/models/webhooks/batch_cancelled_webhook_event.rb b/lib/openai/models/webhooks/batch_cancelled_webhook_event.rb new file mode 100644 index 00000000..1cef64b5 --- /dev/null +++ b/lib/openai/models/webhooks/batch_cancelled_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class BatchCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the batch API request was cancelled. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::BatchCancelledWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `batch.cancelled`. + # + # @return [Symbol, :"batch.cancelled"] + required :type, const: :"batch.cancelled" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::BatchCancelledWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"batch.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchCancelledWebhookEvent} for more details. + # + # Sent when a batch API request has been cancelled. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the batch API request was cancelled. + # + # @param data [OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"batch.cancelled"] The type of the event. Always `batch.cancelled`. + + # @see OpenAI::Models::Webhooks::BatchCancelledWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the batch API request. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the batch API request. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::BatchCancelledWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/batch_completed_webhook_event.rb b/lib/openai/models/webhooks/batch_completed_webhook_event.rb new file mode 100644 index 00000000..fb130f53 --- /dev/null +++ b/lib/openai/models/webhooks/batch_completed_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class BatchCompletedWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the batch API request was completed. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::BatchCompletedWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `batch.completed`. + # + # @return [Symbol, :"batch.completed"] + required :type, const: :"batch.completed" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::BatchCompletedWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"batch.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchCompletedWebhookEvent} for more details. + # + # Sent when a batch API request has been completed. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the batch API request was completed. + # + # @param data [OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"batch.completed"] The type of the event. Always `batch.completed`. + + # @see OpenAI::Models::Webhooks::BatchCompletedWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the batch API request. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the batch API request. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::BatchCompletedWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/batch_expired_webhook_event.rb b/lib/openai/models/webhooks/batch_expired_webhook_event.rb new file mode 100644 index 00000000..cf0bb285 --- /dev/null +++ b/lib/openai/models/webhooks/batch_expired_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class BatchExpiredWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the batch API request expired. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::BatchExpiredWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `batch.expired`. + # + # @return [Symbol, :"batch.expired"] + required :type, const: :"batch.expired" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::BatchExpiredWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"batch.expired") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchExpiredWebhookEvent} for more details. + # + # Sent when a batch API request has expired. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the batch API request expired. + # + # @param data [OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"batch.expired"] The type of the event. Always `batch.expired`. + + # @see OpenAI::Models::Webhooks::BatchExpiredWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the batch API request. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the batch API request. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::BatchExpiredWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/batch_failed_webhook_event.rb b/lib/openai/models/webhooks/batch_failed_webhook_event.rb new file mode 100644 index 00000000..c84be5ee --- /dev/null +++ b/lib/openai/models/webhooks/batch_failed_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class BatchFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the batch API request failed. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::BatchFailedWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::BatchFailedWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `batch.failed`. + # + # @return [Symbol, :"batch.failed"] + required :type, const: :"batch.failed" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::BatchFailedWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::BatchFailedWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"batch.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchFailedWebhookEvent} for more details. + # + # Sent when a batch API request has failed. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the batch API request failed. + # + # @param data [OpenAI::Models::Webhooks::BatchFailedWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::BatchFailedWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"batch.failed"] The type of the event. Always `batch.failed`. + + # @see OpenAI::Models::Webhooks::BatchFailedWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the batch API request. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::BatchFailedWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the batch API request. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::BatchFailedWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/eval_run_canceled_webhook_event.rb b/lib/openai/models/webhooks/eval_run_canceled_webhook_event.rb new file mode 100644 index 00000000..684cd7f0 --- /dev/null +++ b/lib/openai/models/webhooks/eval_run_canceled_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class EvalRunCanceledWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the eval run was canceled. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `eval.run.canceled`. + # + # @return [Symbol, :"eval.run.canceled"] + required :type, const: :"eval.run.canceled" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"eval.run.canceled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent} for more details. + # + # Sent when an eval run has been canceled. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the eval run was canceled. + # + # @param data [OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"eval.run.canceled"] The type of the event. Always `eval.run.canceled`. + + # @see OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the eval run. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the eval run. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/eval_run_failed_webhook_event.rb b/lib/openai/models/webhooks/eval_run_failed_webhook_event.rb new file mode 100644 index 00000000..c0db2b68 --- /dev/null +++ b/lib/openai/models/webhooks/eval_run_failed_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class EvalRunFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the eval run failed. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `eval.run.failed`. + # + # @return [Symbol, :"eval.run.failed"] + required :type, const: :"eval.run.failed" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"eval.run.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent} for more details. + # + # Sent when an eval run has failed. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the eval run failed. + # + # @param data [OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"eval.run.failed"] The type of the event. Always `eval.run.failed`. + + # @see OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the eval run. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the eval run. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/eval_run_succeeded_webhook_event.rb b/lib/openai/models/webhooks/eval_run_succeeded_webhook_event.rb new file mode 100644 index 00000000..55321f79 --- /dev/null +++ b/lib/openai/models/webhooks/eval_run_succeeded_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class EvalRunSucceededWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the eval run succeeded. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `eval.run.succeeded`. + # + # @return [Symbol, :"eval.run.succeeded"] + required :type, const: :"eval.run.succeeded" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"eval.run.succeeded") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent} for more details. + # + # Sent when an eval run has succeeded. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the eval run succeeded. + # + # @param data [OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"eval.run.succeeded"] The type of the event. Always `eval.run.succeeded`. + + # @see OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the eval run. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the eval run. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rb b/lib/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rb new file mode 100644 index 00000000..66d6c0f3 --- /dev/null +++ b/lib/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class FineTuningJobCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the fine-tuning job was cancelled. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `fine_tuning.job.cancelled`. + # + # @return [Symbol, :"fine_tuning.job.cancelled"] + required :type, const: :"fine_tuning.job.cancelled" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"fine_tuning.job.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent} for more details. + # + # Sent when a fine-tuning job has been cancelled. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the fine-tuning job was cancelled. + # + # @param data [OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"fine_tuning.job.cancelled"] The type of the event. Always `fine_tuning.job.cancelled`. + + # @see OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the fine-tuning job. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::Data} for more + # details. + # + # Event data payload. + # + # @param id [String] The unique ID of the fine-tuning job. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rb b/lib/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rb new file mode 100644 index 00000000..160a2a3c --- /dev/null +++ b/lib/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class FineTuningJobFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the fine-tuning job failed. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `fine_tuning.job.failed`. + # + # @return [Symbol, :"fine_tuning.job.failed"] + required :type, const: :"fine_tuning.job.failed" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"fine_tuning.job.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent} for more details. + # + # Sent when a fine-tuning job has failed. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the fine-tuning job failed. + # + # @param data [OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"fine_tuning.job.failed"] The type of the event. Always `fine_tuning.job.failed`. + + # @see OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the fine-tuning job. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::Data} for more + # details. + # + # Event data payload. + # + # @param id [String] The unique ID of the fine-tuning job. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rb b/lib/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rb new file mode 100644 index 00000000..20c1ee68 --- /dev/null +++ b/lib/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class FineTuningJobSucceededWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the fine-tuning job succeeded. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `fine_tuning.job.succeeded`. + # + # @return [Symbol, :"fine_tuning.job.succeeded"] + required :type, const: :"fine_tuning.job.succeeded" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"fine_tuning.job.succeeded") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent} for more details. + # + # Sent when a fine-tuning job has succeeded. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the fine-tuning job succeeded. + # + # @param data [OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"fine_tuning.job.succeeded"] The type of the event. Always `fine_tuning.job.succeeded`. + + # @see OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the fine-tuning job. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::Data} for more + # details. + # + # Event data payload. + # + # @param id [String] The unique ID of the fine-tuning job. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/response_cancelled_webhook_event.rb b/lib/openai/models/webhooks/response_cancelled_webhook_event.rb new file mode 100644 index 00000000..8e3d0632 --- /dev/null +++ b/lib/openai/models/webhooks/response_cancelled_webhook_event.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class ResponseCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the model response was cancelled. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `response.cancelled`. + # + # @return [Symbol, :"response.cancelled"] + required :type, const: :"response.cancelled" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"response.cancelled") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent} for more details. + # + # Sent when a background response has been cancelled. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the model response was cancelled. + # + # @param data [OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"response.cancelled"] The type of the event. Always `response.cancelled`. + + # @see OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the model response. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::Data} for more + # details. + # + # Event data payload. + # + # @param id [String] The unique ID of the model response. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/response_completed_webhook_event.rb b/lib/openai/models/webhooks/response_completed_webhook_event.rb new file mode 100644 index 00000000..9228af45 --- /dev/null +++ b/lib/openai/models/webhooks/response_completed_webhook_event.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class ResponseCompletedWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the model response was completed. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `response.completed`. + # + # @return [Symbol, :"response.completed"] + required :type, const: :"response.completed" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"response.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent} for more details. + # + # Sent when a background response has been completed. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the model response was completed. + # + # @param data [OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"response.completed"] The type of the event. Always `response.completed`. + + # @see OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the model response. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::Data} for more + # details. + # + # Event data payload. + # + # @param id [String] The unique ID of the model response. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/response_failed_webhook_event.rb b/lib/openai/models/webhooks/response_failed_webhook_event.rb new file mode 100644 index 00000000..fe54ea51 --- /dev/null +++ b/lib/openai/models/webhooks/response_failed_webhook_event.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class ResponseFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the model response failed. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::ResponseFailedWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `response.failed`. + # + # @return [Symbol, :"response.failed"] + required :type, const: :"response.failed" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::ResponseFailedWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"response.failed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseFailedWebhookEvent} for more details. + # + # Sent when a background response has failed. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the model response failed. + # + # @param data [OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"response.failed"] The type of the event. Always `response.failed`. + + # @see OpenAI::Models::Webhooks::ResponseFailedWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the model response. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::Data} for more details. + # + # Event data payload. + # + # @param id [String] The unique ID of the model response. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::ResponseFailedWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/response_incomplete_webhook_event.rb b/lib/openai/models/webhooks/response_incomplete_webhook_event.rb new file mode 100644 index 00000000..9dcecc9e --- /dev/null +++ b/lib/openai/models/webhooks/response_incomplete_webhook_event.rb @@ -0,0 +1,85 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + class ResponseIncompleteWebhookEvent < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the event. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The Unix timestamp (in seconds) of when the model response was interrupted. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute data + # Event data payload. + # + # @return [OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::Data] + required :data, -> { OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data } + + # @!attribute type + # The type of the event. Always `response.incomplete`. + # + # @return [Symbol, :"response.incomplete"] + required :type, const: :"response.incomplete" + + # @!attribute object + # The object of the event. Always `event`. + # + # @return [Symbol, OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::Object, nil] + optional :object, enum: -> { OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object } + + # @!method initialize(id:, created_at:, data:, object: nil, type: :"response.incomplete") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent} for more details. + # + # Sent when a background response has been interrupted. + # + # @param id [String] The unique ID of the event. + # + # @param created_at [Integer] The Unix timestamp (in seconds) of when the model response was interrupted. + # + # @param data [OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::Data] Event data payload. + # + # @param object [Symbol, OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::Object] The object of the event. Always `event`. + # + # @param type [Symbol, :"response.incomplete"] The type of the event. Always `response.incomplete`. + + # @see OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent#data + class Data < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the model response. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::Data} for more + # details. + # + # Event data payload. + # + # @param id [String] The unique ID of the model response. + end + + # The object of the event. Always `event`. + # + # @see OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent#object + module Object + extend OpenAI::Internal::Type::Enum + + EVENT = :event + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/webhooks/unwrap_webhook_event.rb b/lib/openai/models/webhooks/unwrap_webhook_event.rb new file mode 100644 index 00000000..821aa816 --- /dev/null +++ b/lib/openai/models/webhooks/unwrap_webhook_event.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + # Sent when a batch API request has been cancelled. + module UnwrapWebhookEvent + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Sent when a batch API request has been cancelled. + variant :"batch.cancelled", -> { OpenAI::Webhooks::BatchCancelledWebhookEvent } + + # Sent when a batch API request has been completed. + variant :"batch.completed", -> { OpenAI::Webhooks::BatchCompletedWebhookEvent } + + # Sent when a batch API request has expired. + variant :"batch.expired", -> { OpenAI::Webhooks::BatchExpiredWebhookEvent } + + # Sent when a batch API request has failed. + variant :"batch.failed", -> { OpenAI::Webhooks::BatchFailedWebhookEvent } + + # Sent when an eval run has been canceled. + variant :"eval.run.canceled", -> { OpenAI::Webhooks::EvalRunCanceledWebhookEvent } + + # Sent when an eval run has failed. + variant :"eval.run.failed", -> { OpenAI::Webhooks::EvalRunFailedWebhookEvent } + + # Sent when an eval run has succeeded. + variant :"eval.run.succeeded", -> { OpenAI::Webhooks::EvalRunSucceededWebhookEvent } + + # Sent when a fine-tuning job has been cancelled. + variant :"fine_tuning.job.cancelled", -> { OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent } + + # Sent when a fine-tuning job has failed. + variant :"fine_tuning.job.failed", -> { OpenAI::Webhooks::FineTuningJobFailedWebhookEvent } + + # Sent when a fine-tuning job has succeeded. + variant :"fine_tuning.job.succeeded", -> { OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent } + + # Sent when a background response has been cancelled. + variant :"response.cancelled", -> { OpenAI::Webhooks::ResponseCancelledWebhookEvent } + + # Sent when a background response has been completed. + variant :"response.completed", -> { OpenAI::Webhooks::ResponseCompletedWebhookEvent } + + # Sent when a background response has failed. + variant :"response.failed", -> { OpenAI::Webhooks::ResponseFailedWebhookEvent } + + # Sent when a background response has been interrupted. + variant :"response.incomplete", -> { OpenAI::Webhooks::ResponseIncompleteWebhookEvent } + + # @!method self.variants + # @return [Array(OpenAI::Models::Webhooks::BatchCancelledWebhookEvent, OpenAI::Models::Webhooks::BatchCompletedWebhookEvent, OpenAI::Models::Webhooks::BatchExpiredWebhookEvent, OpenAI::Models::Webhooks::BatchFailedWebhookEvent, OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent, OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent, OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent, OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent, OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent, OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent, OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent, OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent, OpenAI::Models::Webhooks::ResponseFailedWebhookEvent, OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent)] + end + end + end +end diff --git a/lib/openai/models/webhooks/webhook_unwrap_params.rb b/lib/openai/models/webhooks/webhook_unwrap_params.rb new file mode 100644 index 00000000..2413665c --- /dev/null +++ b/lib/openai/models/webhooks/webhook_unwrap_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Webhooks + # @see OpenAI::Resources::Webhooks#unwrap + class WebhookUnwrapParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 4dd48bca..44b63a90 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -70,7 +70,7 @@ class Completions # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # @@ -175,7 +175,7 @@ def create(params) # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 230d44c9..2b2d33f4 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,7 +23,7 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # @@ -35,6 +35,8 @@ class Responses # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # + # @param max_tool_calls [Integer, nil] The maximum number of total calls to built-in tools that can be processed in a r + # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI @@ -47,7 +49,7 @@ class Responses # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # @@ -55,10 +57,12 @@ class Responses # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating # # @param tools [Array] An array of tools the model may call while generating a response. You # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to + # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. @@ -102,7 +106,7 @@ def create(params = {}) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # @@ -114,6 +118,8 @@ def create(params = {}) # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # + # @param max_tool_calls [Integer, nil] The maximum number of total calls to built-in tools that can be processed in a r + # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI @@ -126,7 +132,7 @@ def create(params = {}) # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # @@ -134,10 +140,12 @@ def create(params = {}) # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating # # @param tools [Array] An array of tools the model may call while generating a response. You # + # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to + # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. diff --git a/lib/openai/resources/webhooks.rb b/lib/openai/resources/webhooks.rb new file mode 100644 index 00000000..b5c3a91d --- /dev/null +++ b/lib/openai/resources/webhooks.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Webhooks + # Validates that the given payload was sent by OpenAI and parses the payload. + # + # @param payload [String] The raw webhook payload as a string + # + # @return [OpenAI::Models::Webhooks::BatchCancelledWebhookEvent, OpenAI::Models::Webhooks::BatchCompletedWebhookEvent, OpenAI::Models::Webhooks::BatchExpiredWebhookEvent, OpenAI::Models::Webhooks::BatchFailedWebhookEvent, OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent, OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent, OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent, OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent, OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent, OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent, OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent, OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent, OpenAI::Models::Webhooks::ResponseFailedWebhookEvent, OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent] + def unwrap(payload) + parsed = JSON.parse(payload, symbolize_names: true) + OpenAI::Internal::Type::Converter.coerce(OpenAI::Models::Webhooks::UnwrapWebhookEvent, parsed) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end +end diff --git a/rbi/openai/client.rbi b/rbi/openai/client.rbi index fc1f9348..64ac2f60 100644 --- a/rbi/openai/client.rbi +++ b/rbi/openai/client.rbi @@ -52,6 +52,9 @@ module OpenAI sig { returns(OpenAI::Resources::VectorStores) } attr_reader :vector_stores + sig { returns(OpenAI::Resources::Webhooks) } + attr_reader :webhooks + sig { returns(OpenAI::Resources::Beta) } attr_reader :beta diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index c4a20f0f..78cb3785 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -199,4 +199,6 @@ module OpenAI VectorStoreSearchParams = OpenAI::Models::VectorStoreSearchParams VectorStoreUpdateParams = OpenAI::Models::VectorStoreUpdateParams + + Webhooks = OpenAI::Models::Webhooks end diff --git a/rbi/openai/models/all_models.rbi b/rbi/openai/models/all_models.rbi index acbc553b..34231363 100644 --- a/rbi/openai/models/all_models.rbi +++ b/rbi/openai/models/all_models.rbi @@ -35,6 +35,26 @@ module OpenAI :"o3-pro-2025-06-10", OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol ) + O3_DEEP_RESEARCH = + T.let( + :"o3-deep-research", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) + O3_DEEP_RESEARCH_2025_06_26 = + T.let( + :"o3-deep-research-2025-06-26", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) + O4_MINI_DEEP_RESEARCH = + T.let( + :"o4-mini-deep-research", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) + O4_MINI_DEEP_RESEARCH_2025_06_26 = + T.let( + :"o4-mini-deep-research-2025-06-26", + OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW = T.let( :"computer-use-preview", diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index 6c8172ca..d26bd0ca 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -32,23 +32,23 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. sig do returns( T.nilable(OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol) @@ -98,23 +98,23 @@ module OpenAI created:, # The model used for the chat completion. model:, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # This fingerprint represents the backend configuration that the model runs with. # @@ -363,23 +363,23 @@ module OpenAI end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -409,6 +409,11 @@ module OpenAI :scale, OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol ) + PRIORITY = + T.let( + :priority, + OpenAI::Chat::ChatCompletion::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index cd2d322e..dfd263ec 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -34,23 +34,23 @@ module OpenAI sig { returns(Symbol) } attr_accessor :object - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. sig do returns( T.nilable( @@ -113,23 +113,23 @@ module OpenAI created:, # The model to generate the completion. model:, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # This fingerprint represents the backend configuration that the model runs with. # Can be used in conjunction with the `seed` request parameter to understand when @@ -783,23 +783,23 @@ module OpenAI end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -829,6 +829,11 @@ module OpenAI :scale, OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol ) + PRIORITY = + T.let( + :priority, + OpenAI::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 9e6ffe46..89b7ffe3 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -268,23 +268,23 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :seed - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. sig do returns( T.nilable( @@ -308,6 +308,8 @@ module OpenAI # Whether or not to store the output of this chat completion request for use in # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. + # + # Supports text and image inputs. Note: image inputs over 10MB will be dropped. sig { returns(T.nilable(T::Boolean)) } attr_accessor :store @@ -608,23 +610,23 @@ module OpenAI # should refer to the `system_fingerprint` response parameter to monitor changes # in the backend. seed: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # Not supported with latest reasoning models `o3` and `o4-mini`. # @@ -634,6 +636,8 @@ module OpenAI # Whether or not to store the output of this chat completion request for use in # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. + # + # Supports text and image inputs. Note: image inputs over 10MB will be dropped. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, @@ -984,23 +988,23 @@ module OpenAI end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -1030,6 +1034,11 @@ module OpenAI :scale, OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol ) + PRIORITY = + T.let( + :priority, + OpenAI::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/images_response.rbi b/rbi/openai/models/images_response.rbi index b9d60ee3..b4b96022 100644 --- a/rbi/openai/models/images_response.rbi +++ b/rbi/openai/models/images_response.rbi @@ -12,6 +12,18 @@ module OpenAI sig { returns(Integer) } attr_accessor :created + # The background parameter used for the image generation. Either `transparent` or + # `opaque`. + sig do + returns(T.nilable(OpenAI::ImagesResponse::Background::TaggedSymbol)) + end + attr_reader :background + + sig do + params(background: OpenAI::ImagesResponse::Background::OrSymbol).void + end + attr_writer :background + # The list of generated images. sig { returns(T.nilable(T::Array[OpenAI::Image])) } attr_reader :data @@ -19,6 +31,34 @@ module OpenAI sig { params(data: T::Array[OpenAI::Image::OrHash]).void } attr_writer :data + # The output format of the image generation. Either `png`, `webp`, or `jpeg`. + sig do + returns(T.nilable(OpenAI::ImagesResponse::OutputFormat::TaggedSymbol)) + end + attr_reader :output_format + + sig do + params( + output_format: OpenAI::ImagesResponse::OutputFormat::OrSymbol + ).void + end + attr_writer :output_format + + # The quality of the image generated. Either `low`, `medium`, or `high`. + sig { returns(T.nilable(OpenAI::ImagesResponse::Quality::TaggedSymbol)) } + attr_reader :quality + + sig { params(quality: OpenAI::ImagesResponse::Quality::OrSymbol).void } + attr_writer :quality + + # The size of the image generated. Either `1024x1024`, `1024x1536`, or + # `1536x1024`. + sig { returns(T.nilable(OpenAI::ImagesResponse::Size::TaggedSymbol)) } + attr_reader :size + + sig { params(size: OpenAI::ImagesResponse::Size::OrSymbol).void } + attr_writer :size + # For `gpt-image-1` only, the token usage information for the image generation. sig { returns(T.nilable(OpenAI::ImagesResponse::Usage)) } attr_reader :usage @@ -30,15 +70,29 @@ module OpenAI sig do params( created: Integer, + background: OpenAI::ImagesResponse::Background::OrSymbol, data: T::Array[OpenAI::Image::OrHash], + output_format: OpenAI::ImagesResponse::OutputFormat::OrSymbol, + quality: OpenAI::ImagesResponse::Quality::OrSymbol, + size: OpenAI::ImagesResponse::Size::OrSymbol, usage: OpenAI::ImagesResponse::Usage::OrHash ).returns(T.attached_class) end def self.new( # The Unix timestamp (in seconds) of when the image was created. created:, + # The background parameter used for the image generation. Either `transparent` or + # `opaque`. + background: nil, # The list of generated images. data: nil, + # The output format of the image generation. Either `png`, `webp`, or `jpeg`. + output_format: nil, + # The quality of the image generated. Either `low`, `medium`, or `high`. + quality: nil, + # The size of the image generated. Either `1024x1024`, `1024x1536`, or + # `1536x1024`. + size: nil, # For `gpt-image-1` only, the token usage information for the image generation. usage: nil ) @@ -48,7 +102,11 @@ module OpenAI override.returns( { created: Integer, + background: OpenAI::ImagesResponse::Background::TaggedSymbol, data: T::Array[OpenAI::Image], + output_format: OpenAI::ImagesResponse::OutputFormat::TaggedSymbol, + quality: OpenAI::ImagesResponse::Quality::TaggedSymbol, + size: OpenAI::ImagesResponse::Size::TaggedSymbol, usage: OpenAI::ImagesResponse::Usage } ) @@ -56,6 +114,94 @@ module OpenAI def to_hash end + # The background parameter used for the image generation. Either `transparent` or + # `opaque`. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImagesResponse::Background) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let(:transparent, OpenAI::ImagesResponse::Background::TaggedSymbol) + OPAQUE = + T.let(:opaque, OpenAI::ImagesResponse::Background::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImagesResponse::Background::TaggedSymbol] + ) + end + def self.values + end + end + + # The output format of the image generation. Either `png`, `webp`, or `jpeg`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImagesResponse::OutputFormat) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = T.let(:png, OpenAI::ImagesResponse::OutputFormat::TaggedSymbol) + WEBP = T.let(:webp, OpenAI::ImagesResponse::OutputFormat::TaggedSymbol) + JPEG = T.let(:jpeg, OpenAI::ImagesResponse::OutputFormat::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImagesResponse::OutputFormat::TaggedSymbol] + ) + end + def self.values + end + end + + # The quality of the image generated. Either `low`, `medium`, or `high`. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImagesResponse::Quality) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = T.let(:low, OpenAI::ImagesResponse::Quality::TaggedSymbol) + MEDIUM = T.let(:medium, OpenAI::ImagesResponse::Quality::TaggedSymbol) + HIGH = T.let(:high, OpenAI::ImagesResponse::Quality::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImagesResponse::Quality::TaggedSymbol] + ) + end + def self.values + end + end + + # The size of the image generated. Either `1024x1024`, `1024x1536`, or + # `1536x1024`. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImagesResponse::Size) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let(:"1024x1024", OpenAI::ImagesResponse::Size::TaggedSymbol) + SIZE_1024X1536 = + T.let(:"1024x1536", OpenAI::ImagesResponse::Size::TaggedSymbol) + SIZE_1536X1024 = + T.let(:"1536x1024", OpenAI::ImagesResponse::Size::TaggedSymbol) + + sig do + override.returns(T::Array[OpenAI::ImagesResponse::Size::TaggedSymbol]) + end + def self.values + end + end + class Usage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 1ebda937..4eba041a 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -140,6 +140,13 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :max_output_tokens + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + sig { returns(T.nilable(Integer)) } + attr_accessor :max_tool_calls + # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). @@ -168,23 +175,23 @@ module OpenAI sig { params(reasoning: T.nilable(OpenAI::Reasoning::OrHash)).void } attr_writer :reasoning - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. sig do returns( T.nilable(OpenAI::Responses::Response::ServiceTier::TaggedSymbol) @@ -213,6 +220,11 @@ module OpenAI sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } attr_writer :text + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + sig { returns(T.nilable(Integer)) } + attr_accessor :top_logprobs + # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -283,7 +295,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes::OrHash, - OpenAI::Responses::ToolChoiceFunction::OrHash + OpenAI::Responses::ToolChoiceFunction::OrHash, + OpenAI::Responses::ToolChoiceMcp::OrHash ), tools: T::Array[ @@ -301,6 +314,7 @@ module OpenAI top_p: T.nilable(Float), background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), + max_tool_calls: T.nilable(Integer), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), reasoning: T.nilable(OpenAI::Reasoning::OrHash), @@ -308,6 +322,7 @@ module OpenAI T.nilable(OpenAI::Responses::Response::ServiceTier::OrSymbol), status: OpenAI::Responses::ResponseStatus::OrSymbol, text: OpenAI::Responses::ResponseTextConfig::OrHash, + top_logprobs: T.nilable(Integer), truncation: T.nilable(OpenAI::Responses::Response::Truncation::OrSymbol), usage: OpenAI::Responses::ResponseUsage::OrHash, @@ -390,6 +405,11 @@ module OpenAI # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + max_tool_calls: nil, # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). @@ -402,23 +422,23 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # The status of the response generation. One of `completed`, `failed`, # `in_progress`, `cancelled`, `queued`, or `incomplete`. @@ -429,6 +449,9 @@ module OpenAI # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + top_logprobs: nil, # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's @@ -470,6 +493,7 @@ module OpenAI top_p: T.nilable(Float), background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), + max_tool_calls: T.nilable(Integer), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt), reasoning: T.nilable(OpenAI::Reasoning), @@ -479,6 +503,7 @@ module OpenAI ), status: OpenAI::Responses::ResponseStatus::TaggedSymbol, text: OpenAI::Responses::ResponseTextConfig, + top_logprobs: T.nilable(Integer), truncation: T.nilable( OpenAI::Responses::Response::Truncation::TaggedSymbol @@ -622,7 +647,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction + OpenAI::Responses::ToolChoiceFunction, + OpenAI::Responses::ToolChoiceMcp ) end @@ -635,23 +661,23 @@ module OpenAI end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -675,6 +701,11 @@ module OpenAI :scale, OpenAI::Responses::Response::ServiceTier::TaggedSymbol ) + PRIORITY = + T.let( + :priority, + OpenAI::Responses::Response::ServiceTier::TaggedSymbol + ) sig do override.returns( diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index da0022e2..a8aa310b 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -23,18 +23,19 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. sig do returns( T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) @@ -79,6 +80,13 @@ module OpenAI sig { returns(T.nilable(Integer)) } attr_accessor :max_output_tokens + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + sig { returns(T.nilable(Integer)) } + attr_accessor :max_tool_calls + # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -150,23 +158,23 @@ module OpenAI sig { params(reasoning: T.nilable(OpenAI::Reasoning::OrHash)).void } attr_writer :reasoning - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. sig do returns( T.nilable( @@ -207,7 +215,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction + OpenAI::Responses::ToolChoiceFunction, + OpenAI::Responses::ToolChoiceMcp ) ) ) @@ -220,7 +229,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes::OrHash, - OpenAI::Responses::ToolChoiceFunction::OrHash + OpenAI::Responses::ToolChoiceFunction::OrHash, + OpenAI::Responses::ToolChoiceMcp::OrHash ) ).void end @@ -279,6 +289,11 @@ module OpenAI end attr_writer :tools + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + sig { returns(T.nilable(Integer)) } + attr_accessor :top_logprobs + # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 # means only the tokens comprising the top 10% probability mass are considered. @@ -322,6 +337,7 @@ module OpenAI input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), + max_tool_calls: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any( @@ -344,7 +360,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes::OrHash, - OpenAI::Responses::ToolChoiceFunction::OrHash + OpenAI::Responses::ToolChoiceFunction::OrHash, + OpenAI::Responses::ToolChoiceMcp::OrHash ), tools: T::Array[ @@ -359,6 +376,7 @@ module OpenAI OpenAI::Responses::WebSearchTool::OrHash ) ], + top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), truncation: T.nilable( @@ -375,18 +393,19 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. include: nil, # Text, image, or file inputs to the model, used to generate a response. # @@ -408,6 +427,11 @@ module OpenAI # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + max_tool_calls: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -435,23 +459,23 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, @@ -485,6 +509,9 @@ module OpenAI # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + top_logprobs: nil, # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 # means only the tokens comprising the top 10% probability mass are considered. @@ -518,6 +545,7 @@ module OpenAI input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), + max_tool_calls: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any( @@ -540,7 +568,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction + OpenAI::Responses::ToolChoiceFunction, + OpenAI::Responses::ToolChoiceMcp ), tools: T::Array[ @@ -555,6 +584,7 @@ module OpenAI OpenAI::Responses::WebSearchTool ) ], + top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), truncation: T.nilable( @@ -597,23 +627,23 @@ module OpenAI end end - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. module ServiceTier extend OpenAI::Internal::Type::Enum @@ -646,6 +676,11 @@ module OpenAI :scale, OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol ) + PRIORITY = + T.let( + :priority, + OpenAI::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol + ) sig do override.returns( @@ -669,7 +704,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction + OpenAI::Responses::ToolChoiceFunction, + OpenAI::Responses::ToolChoiceMcp ) end diff --git a/rbi/openai/models/responses/response_function_web_search.rbi b/rbi/openai/models/responses/response_function_web_search.rbi index d95d1cb9..f2027488 100644 --- a/rbi/openai/models/responses/response_function_web_search.rbi +++ b/rbi/openai/models/responses/response_function_web_search.rbi @@ -16,6 +16,19 @@ module OpenAI sig { returns(String) } attr_accessor :id + # An object describing the specific action taken in this web search call. Includes + # details on how the model used the web (search, open_page, find). + sig do + returns( + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search, + OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage, + OpenAI::Responses::ResponseFunctionWebSearch::Action::Find + ) + ) + end + attr_accessor :action + # The status of the web search tool call. sig do returns( @@ -34,6 +47,12 @@ module OpenAI sig do params( id: String, + action: + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::Action::Find::OrHash + ), status: OpenAI::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol @@ -42,6 +61,9 @@ module OpenAI def self.new( # The unique ID of the web search tool call. id:, + # An object describing the specific action taken in this web search call. Includes + # details on how the model used the web (search, open_page, find). + action:, # The status of the web search tool call. status:, # The type of the web search tool call. Always `web_search_call`. @@ -53,6 +75,12 @@ module OpenAI override.returns( { id: String, + action: + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search, + OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage, + OpenAI::Responses::ResponseFunctionWebSearch::Action::Find + ), status: OpenAI::Responses::ResponseFunctionWebSearch::Status::OrSymbol, type: Symbol @@ -62,6 +90,158 @@ module OpenAI def to_hash end + # An object describing the specific action taken in this web search call. Includes + # details on how the model used the web (search, open_page, find). + module Action + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search, + OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage, + OpenAI::Responses::ResponseFunctionWebSearch::Action::Find + ) + end + + class Search < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search, + OpenAI::Internal::AnyHash + ) + end + + # The search query. + sig { returns(String) } + attr_accessor :query + + # The action type. + sig { returns(Symbol) } + attr_accessor :type + + # Domains to restrict the search or domains where results were found. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :domains + + sig { params(domains: T::Array[String]).void } + attr_writer :domains + + # Action type "search" - Performs a web search query. + sig do + params( + query: String, + domains: T::Array[String], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The search query. + query:, + # Domains to restrict the search or domains where results were found. + domains: nil, + # The action type. + type: :search + ) + end + + sig do + override.returns( + { query: String, type: Symbol, domains: T::Array[String] } + ) + end + def to_hash + end + end + + class OpenPage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage, + OpenAI::Internal::AnyHash + ) + end + + # The action type. + sig { returns(Symbol) } + attr_accessor :type + + # The URL opened by the model. + sig { returns(String) } + attr_accessor :url + + # Action type "open_page" - Opens a specific URL from search results. + sig { params(url: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The URL opened by the model. + url:, + # The action type. + type: :open_page + ) + end + + sig { override.returns({ type: Symbol, url: String }) } + def to_hash + end + end + + class Find < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Find, + OpenAI::Internal::AnyHash + ) + end + + # The pattern or text to search for within the page. + sig { returns(String) } + attr_accessor :pattern + + # The action type. + sig { returns(Symbol) } + attr_accessor :type + + # The URL of the page searched for the pattern. + sig { returns(String) } + attr_accessor :url + + # Action type "find": Searches for a pattern within a loaded page. + sig do + params(pattern: String, url: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The pattern or text to search for within the page. + pattern:, + # The URL of the page searched for the pattern. + url:, + # The action type. + type: :find + ) + end + + sig do + override.returns({ pattern: String, type: Symbol, url: String }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseFunctionWebSearch::Action::Variants + ] + ) + end + def self.variants + end + end + # The status of the web search tool call. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/openai/models/responses/response_includable.rbi b/rbi/openai/models/responses/response_includable.rbi index f5f63506..1007c6fd 100644 --- a/rbi/openai/models/responses/response_includable.rbi +++ b/rbi/openai/models/responses/response_includable.rbi @@ -6,18 +6,19 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. module ResponseIncludable extend OpenAI::Internal::Type::Enum @@ -25,6 +26,16 @@ module OpenAI T.type_alias { T.all(Symbol, OpenAI::Responses::ResponseIncludable) } OrSymbol = T.type_alias { T.any(Symbol, String) } + CODE_INTERPRETER_CALL_OUTPUTS = + T.let( + :"code_interpreter_call.outputs", + OpenAI::Responses::ResponseIncludable::TaggedSymbol + ) + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = + T.let( + :"computer_call_output.output.image_url", + OpenAI::Responses::ResponseIncludable::TaggedSymbol + ) FILE_SEARCH_CALL_RESULTS = T.let( :"file_search_call.results", @@ -35,9 +46,9 @@ module OpenAI :"message.input_image.image_url", OpenAI::Responses::ResponseIncludable::TaggedSymbol ) - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = + MESSAGE_OUTPUT_TEXT_LOGPROBS = T.let( - :"computer_call_output.output.image_url", + :"message.output_text.logprobs", OpenAI::Responses::ResponseIncludable::TaggedSymbol ) REASONING_ENCRYPTED_CONTENT = @@ -45,11 +56,6 @@ module OpenAI :"reasoning.encrypted_content", OpenAI::Responses::ResponseIncludable::TaggedSymbol ) - CODE_INTERPRETER_CALL_OUTPUTS = - T.let( - :"code_interpreter_call.outputs", - OpenAI::Responses::ResponseIncludable::TaggedSymbol - ) sig do override.returns( diff --git a/rbi/openai/models/responses/tool_choice_mcp.rbi b/rbi/openai/models/responses/tool_choice_mcp.rbi new file mode 100644 index 00000000..7b2c782f --- /dev/null +++ b/rbi/openai/models/responses/tool_choice_mcp.rbi @@ -0,0 +1,53 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ToolChoiceMcp < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Responses::ToolChoiceMcp, OpenAI::Internal::AnyHash) + end + + # The label of the MCP server to use. + sig { returns(String) } + attr_accessor :server_label + + # For MCP tools, the type is always `mcp`. + sig { returns(Symbol) } + attr_accessor :type + + # The name of the tool to call on the server. + sig { returns(T.nilable(String)) } + attr_accessor :name + + # Use this option to force the model to call a specific tool on a remote MCP + # server. + sig do + params( + server_label: String, + name: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The label of the MCP server to use. + server_label:, + # The name of the tool to call on the server. + name: nil, + # For MCP tools, the type is always `mcp`. + type: :mcp + ) + end + + sig do + override.returns( + { server_label: String, type: Symbol, name: T.nilable(String) } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/tool_choice_types.rbi b/rbi/openai/models/responses/tool_choice_types.rbi index f66d4aac..d56cb2eb 100644 --- a/rbi/openai/models/responses/tool_choice_types.rbi +++ b/rbi/openai/models/responses/tool_choice_types.rbi @@ -18,7 +18,6 @@ module OpenAI # - `web_search_preview` # - `computer_use_preview` # - `code_interpreter` - # - `mcp` # - `image_generation` sig { returns(OpenAI::Responses::ToolChoiceTypes::Type::OrSymbol) } attr_accessor :type @@ -40,7 +39,6 @@ module OpenAI # - `web_search_preview` # - `computer_use_preview` # - `code_interpreter` - # - `mcp` # - `image_generation` type: ) @@ -63,7 +61,6 @@ module OpenAI # - `web_search_preview` # - `computer_use_preview` # - `code_interpreter` - # - `mcp` # - `image_generation` module Type extend OpenAI::Internal::Type::Enum @@ -104,8 +101,6 @@ module OpenAI :code_interpreter, OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol ) - MCP = - T.let(:mcp, OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol) sig do override.returns( diff --git a/rbi/openai/models/responses_model.rbi b/rbi/openai/models/responses_model.rbi index 459a52c4..b7ec74bf 100644 --- a/rbi/openai/models/responses_model.rbi +++ b/rbi/openai/models/responses_model.rbi @@ -43,6 +43,26 @@ module OpenAI :"o3-pro-2025-06-10", OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol ) + O3_DEEP_RESEARCH = + T.let( + :"o3-deep-research", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) + O3_DEEP_RESEARCH_2025_06_26 = + T.let( + :"o3-deep-research-2025-06-26", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) + O4_MINI_DEEP_RESEARCH = + T.let( + :"o4-mini-deep-research", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) + O4_MINI_DEEP_RESEARCH_2025_06_26 = + T.let( + :"o4-mini-deep-research-2025-06-26", + OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol + ) COMPUTER_USE_PREVIEW = T.let( :"computer-use-preview", diff --git a/rbi/openai/models/webhooks/batch_cancelled_webhook_event.rbi b/rbi/openai/models/webhooks/batch_cancelled_webhook_event.rbi new file mode 100644 index 00000000..b400f160 --- /dev/null +++ b/rbi/openai/models/webhooks/batch_cancelled_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class BatchCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchCancelledWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the batch API request was cancelled. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::BatchCancelledWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `batch.cancelled`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a batch API request has been cancelled. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the batch API request was cancelled. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `batch.cancelled`. + type: :"batch.cancelled" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchCancelledWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the batch API request. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the batch API request. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::BatchCancelledWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/batch_completed_webhook_event.rbi b/rbi/openai/models/webhooks/batch_completed_webhook_event.rbi new file mode 100644 index 00000000..eb2777c0 --- /dev/null +++ b/rbi/openai/models/webhooks/batch_completed_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class BatchCompletedWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchCompletedWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the batch API request was completed. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::BatchCompletedWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `batch.completed`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a batch API request has been completed. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the batch API request was completed. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `batch.completed`. + type: :"batch.completed" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchCompletedWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the batch API request. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the batch API request. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::BatchCompletedWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/batch_expired_webhook_event.rbi b/rbi/openai/models/webhooks/batch_expired_webhook_event.rbi new file mode 100644 index 00000000..bb40a5fe --- /dev/null +++ b/rbi/openai/models/webhooks/batch_expired_webhook_event.rbi @@ -0,0 +1,150 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class BatchExpiredWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchExpiredWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the batch API request expired. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::BatchExpiredWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `batch.expired`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::BatchExpiredWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: OpenAI::Webhooks::BatchExpiredWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a batch API request has expired. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::BatchExpiredWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the batch API request expired. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `batch.expired`. + type: :"batch.expired" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::BatchExpiredWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchExpiredWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the batch API request. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the batch API request. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Webhooks::BatchExpiredWebhookEvent::Object) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::BatchExpiredWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::BatchExpiredWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/batch_failed_webhook_event.rbi b/rbi/openai/models/webhooks/batch_failed_webhook_event.rbi new file mode 100644 index 00000000..e6d403b3 --- /dev/null +++ b/rbi/openai/models/webhooks/batch_failed_webhook_event.rbi @@ -0,0 +1,149 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class BatchFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchFailedWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the batch API request failed. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::BatchFailedWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `batch.failed`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::BatchFailedWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: OpenAI::Webhooks::BatchFailedWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a batch API request has failed. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data::OrHash, + object: OpenAI::Webhooks::BatchFailedWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the batch API request failed. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `batch.failed`. + type: :"batch.failed" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::BatchFailedWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchFailedWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the batch API request. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the batch API request. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Webhooks::BatchFailedWebhookEvent::Object) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::BatchFailedWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::BatchFailedWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/eval_run_canceled_webhook_event.rbi b/rbi/openai/models/webhooks/eval_run_canceled_webhook_event.rbi new file mode 100644 index 00000000..41d4fcea --- /dev/null +++ b/rbi/openai/models/webhooks/eval_run_canceled_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class EvalRunCanceledWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::EvalRunCanceledWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the eval run was canceled. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `eval.run.canceled`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when an eval run has been canceled. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the eval run was canceled. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `eval.run.canceled`. + type: :"eval.run.canceled" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the eval run. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the eval run. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/eval_run_failed_webhook_event.rbi b/rbi/openai/models/webhooks/eval_run_failed_webhook_event.rbi new file mode 100644 index 00000000..5df6eb00 --- /dev/null +++ b/rbi/openai/models/webhooks/eval_run_failed_webhook_event.rbi @@ -0,0 +1,151 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class EvalRunFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::EvalRunFailedWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the eval run failed. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `eval.run.failed`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when an eval run has failed. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the eval run failed. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `eval.run.failed`. + type: :"eval.run.failed" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the eval run. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the eval run. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::EvalRunFailedWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/eval_run_succeeded_webhook_event.rbi b/rbi/openai/models/webhooks/eval_run_succeeded_webhook_event.rbi new file mode 100644 index 00000000..27c80361 --- /dev/null +++ b/rbi/openai/models/webhooks/eval_run_succeeded_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class EvalRunSucceededWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::EvalRunSucceededWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the eval run succeeded. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `eval.run.succeeded`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when an eval run has succeeded. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the eval run succeeded. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `eval.run.succeeded`. + type: :"eval.run.succeeded" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the eval run. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the eval run. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbi b/rbi/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbi new file mode 100644 index 00000000..e310d37c --- /dev/null +++ b/rbi/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbi @@ -0,0 +1,158 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class FineTuningJobCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the fine-tuning job was cancelled. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig do + returns(OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data) + end + attr_reader :data + + sig do + params( + data: + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `fine_tuning.job.cancelled`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a fine-tuning job has been cancelled. + sig do + params( + id: String, + created_at: Integer, + data: + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the fine-tuning job was cancelled. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `fine_tuning.job.cancelled`. + type: :"fine_tuning.job.cancelled" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the fine-tuning job. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the fine-tuning job. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbi b/rbi/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbi new file mode 100644 index 00000000..f15252b2 --- /dev/null +++ b/rbi/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbi @@ -0,0 +1,156 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class FineTuningJobFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the fine-tuning job failed. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `fine_tuning.job.failed`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a fine-tuning job has failed. + sig do + params( + id: String, + created_at: Integer, + data: + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the fine-tuning job failed. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `fine_tuning.job.failed`. + type: :"fine_tuning.job.failed" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the fine-tuning job. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the fine-tuning job. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbi b/rbi/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbi new file mode 100644 index 00000000..2012aefe --- /dev/null +++ b/rbi/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbi @@ -0,0 +1,158 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class FineTuningJobSucceededWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the fine-tuning job succeeded. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig do + returns(OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data) + end + attr_reader :data + + sig do + params( + data: + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `fine_tuning.job.succeeded`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a fine-tuning job has succeeded. + sig do + params( + id: String, + created_at: Integer, + data: + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the fine-tuning job succeeded. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `fine_tuning.job.succeeded`. + type: :"fine_tuning.job.succeeded" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the fine-tuning job. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the fine-tuning job. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/response_cancelled_webhook_event.rbi b/rbi/openai/models/webhooks/response_cancelled_webhook_event.rbi new file mode 100644 index 00000000..7a0ff036 --- /dev/null +++ b/rbi/openai/models/webhooks/response_cancelled_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class ResponseCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseCancelledWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the model response was cancelled. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `response.cancelled`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a background response has been cancelled. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the model response was cancelled. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `response.cancelled`. + type: :"response.cancelled" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the model response. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the model response. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::ResponseCancelledWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/response_completed_webhook_event.rbi b/rbi/openai/models/webhooks/response_completed_webhook_event.rbi new file mode 100644 index 00000000..16506ae1 --- /dev/null +++ b/rbi/openai/models/webhooks/response_completed_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class ResponseCompletedWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseCompletedWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the model response was completed. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `response.completed`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a background response has been completed. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the model response was completed. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `response.completed`. + type: :"response.completed" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the model response. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the model response. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::ResponseCompletedWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/response_failed_webhook_event.rbi b/rbi/openai/models/webhooks/response_failed_webhook_event.rbi new file mode 100644 index 00000000..1c9edc19 --- /dev/null +++ b/rbi/openai/models/webhooks/response_failed_webhook_event.rbi @@ -0,0 +1,154 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class ResponseFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseFailedWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the model response failed. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::ResponseFailedWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `response.failed`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a background response has failed. + sig do + params( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the model response failed. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `response.failed`. + type: :"response.failed" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseFailedWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the model response. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the model response. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::ResponseFailedWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/response_incomplete_webhook_event.rbi b/rbi/openai/models/webhooks/response_incomplete_webhook_event.rbi new file mode 100644 index 00000000..94712e7b --- /dev/null +++ b/rbi/openai/models/webhooks/response_incomplete_webhook_event.rbi @@ -0,0 +1,155 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class ResponseIncompleteWebhookEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseIncompleteWebhookEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the event. + sig { returns(String) } + attr_accessor :id + + # The Unix timestamp (in seconds) of when the model response was interrupted. + sig { returns(Integer) } + attr_accessor :created_at + + # Event data payload. + sig { returns(OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data) } + attr_reader :data + + sig do + params( + data: OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data::OrHash + ).void + end + attr_writer :data + + # The type of the event. Always `response.incomplete`. + sig { returns(Symbol) } + attr_accessor :type + + # The object of the event. Always `event`. + sig do + returns( + T.nilable( + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object::TaggedSymbol + ) + ) + end + attr_reader :object + + sig do + params( + object: + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object::OrSymbol + ).void + end + attr_writer :object + + # Sent when a background response has been interrupted. + sig do + params( + id: String, + created_at: Integer, + data: + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data::OrHash, + object: + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the event. + id:, + # The Unix timestamp (in seconds) of when the model response was interrupted. + created_at:, + # Event data payload. + data:, + # The object of the event. Always `event`. + object: nil, + # The type of the event. Always `response.incomplete`. + type: :"response.incomplete" + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data, + type: Symbol, + object: + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object::TaggedSymbol + } + ) + end + def to_hash + end + + class Data < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the model response. + sig { returns(String) } + attr_accessor :id + + # Event data payload. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the model response. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + + # The object of the event. Always `event`. + module Object + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + EVENT = + T.let( + :event, + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Object::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/unwrap_webhook_event.rbi b/rbi/openai/models/webhooks/unwrap_webhook_event.rbi new file mode 100644 index 00000000..30214682 --- /dev/null +++ b/rbi/openai/models/webhooks/unwrap_webhook_event.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + # Sent when a batch API request has been cancelled. + module UnwrapWebhookEvent + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Webhooks::BatchCancelledWebhookEvent, + OpenAI::Webhooks::BatchCompletedWebhookEvent, + OpenAI::Webhooks::BatchExpiredWebhookEvent, + OpenAI::Webhooks::BatchFailedWebhookEvent, + OpenAI::Webhooks::EvalRunCanceledWebhookEvent, + OpenAI::Webhooks::EvalRunFailedWebhookEvent, + OpenAI::Webhooks::EvalRunSucceededWebhookEvent, + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent, + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent, + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent, + OpenAI::Webhooks::ResponseCancelledWebhookEvent, + OpenAI::Webhooks::ResponseCompletedWebhookEvent, + OpenAI::Webhooks::ResponseFailedWebhookEvent, + OpenAI::Webhooks::ResponseIncompleteWebhookEvent + ) + end + + sig do + override.returns( + T::Array[OpenAI::Webhooks::UnwrapWebhookEvent::Variants] + ) + end + def self.variants + end + end + end + end +end diff --git a/rbi/openai/models/webhooks/webhook_unwrap_params.rbi b/rbi/openai/models/webhooks/webhook_unwrap_params.rbi new file mode 100644 index 00000000..8d784207 --- /dev/null +++ b/rbi/openai/models/webhooks/webhook_unwrap_params.rbi @@ -0,0 +1,32 @@ +# typed: strong + +module OpenAI + module Models + module Webhooks + class WebhookUnwrapParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Webhooks::WebhookUnwrapParams, + OpenAI::Internal::AnyHash + ) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 5b66b431..7addc65f 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -221,23 +221,23 @@ module OpenAI # should refer to the `system_fingerprint` response parameter to monitor changes # in the backend. seed: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # Not supported with latest reasoning models `o3` and `o4-mini`. # @@ -247,6 +247,8 @@ module OpenAI # Whether or not to store the output of this chat completion request for use in # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. + # + # Supports text and image inputs. Note: image inputs over 10MB will be dropped. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, @@ -508,23 +510,23 @@ module OpenAI # should refer to the `system_fingerprint` response parameter to monitor changes # in the backend. seed: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: - # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # Specifies the processing type used for serving the request. + # + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # Not supported with latest reasoning models `o3` and `o4-mini`. # @@ -534,6 +536,8 @@ module OpenAI # Whether or not to store the output of this chat completion request for use in # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. + # + # Supports text and image inputs. Note: image inputs over 10MB will be dropped. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 2e7db0e6..d4a5d37d 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -29,6 +29,7 @@ module OpenAI input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), + max_tool_calls: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any( @@ -51,7 +52,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes::OrHash, - OpenAI::Responses::ToolChoiceFunction::OrHash + OpenAI::Responses::ToolChoiceFunction::OrHash, + OpenAI::Responses::ToolChoiceMcp::OrHash ), tools: T::Array[ @@ -66,6 +68,7 @@ module OpenAI OpenAI::Responses::WebSearchTool::OrHash ) ], + top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), truncation: T.nilable( @@ -83,18 +86,19 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. include: nil, # Text, image, or file inputs to the model, used to generate a response. # @@ -116,6 +120,11 @@ module OpenAI # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + max_tool_calls: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -143,23 +152,23 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, @@ -193,6 +202,9 @@ module OpenAI # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + top_logprobs: nil, # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 # means only the tokens comprising the top 10% probability mass are considered. @@ -241,6 +253,7 @@ module OpenAI input: OpenAI::Responses::ResponseCreateParams::Input::Variants, instructions: T.nilable(String), max_output_tokens: T.nilable(Integer), + max_tool_calls: T.nilable(Integer), metadata: T.nilable(T::Hash[Symbol, String]), model: T.any( @@ -263,7 +276,8 @@ module OpenAI T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Responses::ToolChoiceTypes::OrHash, - OpenAI::Responses::ToolChoiceFunction::OrHash + OpenAI::Responses::ToolChoiceFunction::OrHash, + OpenAI::Responses::ToolChoiceMcp::OrHash ), tools: T::Array[ @@ -278,6 +292,7 @@ module OpenAI OpenAI::Responses::WebSearchTool::OrHash ) ], + top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), truncation: T.nilable( @@ -299,18 +314,19 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. # - `file_search_call.results`: Include the search results of the file search tool # call. # - `message.input_image.image_url`: Include image urls from the input message. - # - `computer_call_output.output.image_url`: Include image urls from the computer - # call output. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning # tokens in reasoning item outputs. This enables reasoning items to be used in # multi-turn conversations when using the Responses API statelessly (like when # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). - # - `code_interpreter_call.outputs`: Includes the outputs of python code execution - # in code interpreter tool call items. include: nil, # Text, image, or file inputs to the model, used to generate a response. # @@ -332,6 +348,11 @@ module OpenAI # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). max_output_tokens: nil, + # The maximum number of total calls to built-in tools that can be processed in a + # response. This maximum number applies across all built-in tool calls, not per + # individual tool. Any further attempts to call a tool by the model will be + # ignored. + max_tool_calls: nil, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and # querying for objects via API or the dashboard. @@ -359,23 +380,23 @@ module OpenAI # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, - # Specifies the latency tier to use for processing the request. This parameter is - # relevant for customers subscribed to the scale tier service: + # Specifies the processing type used for serving the request. # - # - If set to 'auto', and the Project is Scale tier enabled, the system will - # utilize scale tier credits until they are exhausted. - # - If set to 'auto', and the Project is not Scale tier enabled, the request will - # be processed using the default service tier with a lower uptime SLA and no - # latency guarantee. - # - If set to 'default', the request will be processed using the default service - # tier with a lower uptime SLA and no latency guarantee. - # - If set to 'flex', the request will be processed with the Flex Processing - # service tier. - # [Learn more](https://platform.openai.com/docs/guides/flex-processing). + # - If set to 'auto', then the request will be processed with the service tier + # configured in the Project settings. Unless otherwise configured, the Project + # will use 'default'. + # - If set to 'default', then the requset will be processed with the standard + # pricing and performance for the selected model. + # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or + # 'priority', then the request will be processed with the corresponding service + # tier. [Contact sales](https://openai.com/contact-sales) to learn more about + # Priority processing. # - When not set, the default behavior is 'auto'. # - # When this parameter is set, the response body will include the `service_tier` - # utilized. + # When the `service_tier` parameter is set, the response body will include the + # `service_tier` value based on the processing mode actually used to serve the + # request. This response value may be different from the value set in the + # parameter. service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, @@ -409,6 +430,9 @@ module OpenAI # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). tools: nil, + # An integer between 0 and 20 specifying the number of most likely tokens to + # return at each token position, each with an associated log probability. + top_logprobs: nil, # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 # means only the tokens comprising the top 10% probability mass are considered. diff --git a/rbi/openai/resources/webhooks.rbi b/rbi/openai/resources/webhooks.rbi new file mode 100644 index 00000000..c5ac27ec --- /dev/null +++ b/rbi/openai/resources/webhooks.rbi @@ -0,0 +1,39 @@ +# typed: strong + +module OpenAI + module Resources + class Webhooks + # Validates that the given payload was sent by OpenAI and parses the payload. + sig do + params(payload: String).returns( + T.any( + OpenAI::Webhooks::BatchCancelledWebhookEvent, + OpenAI::Webhooks::BatchCompletedWebhookEvent, + OpenAI::Webhooks::BatchExpiredWebhookEvent, + OpenAI::Webhooks::BatchFailedWebhookEvent, + OpenAI::Webhooks::EvalRunCanceledWebhookEvent, + OpenAI::Webhooks::EvalRunFailedWebhookEvent, + OpenAI::Webhooks::EvalRunSucceededWebhookEvent, + OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent, + OpenAI::Webhooks::FineTuningJobFailedWebhookEvent, + OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent, + OpenAI::Webhooks::ResponseCancelledWebhookEvent, + OpenAI::Webhooks::ResponseCompletedWebhookEvent, + OpenAI::Webhooks::ResponseFailedWebhookEvent, + OpenAI::Webhooks::ResponseIncompleteWebhookEvent + ) + ) + end + def unwrap( + # The raw webhook payload as a string + payload + ) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end +end diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 2b085363..8e9b34a1 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -36,6 +36,8 @@ module OpenAI attr_reader vector_stores: OpenAI::Resources::VectorStores + attr_reader webhooks: OpenAI::Resources::Webhooks + attr_reader beta: OpenAI::Resources::Beta attr_reader batches: OpenAI::Resources::Batches diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index 3d40bce0..997e6f93 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -192,4 +192,6 @@ module OpenAI class VectorStoreSearchParams = OpenAI::Models::VectorStoreSearchParams class VectorStoreUpdateParams = OpenAI::Models::VectorStoreUpdateParams + + module Webhooks = OpenAI::Models::Webhooks end diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 8a7b555a..7b1f8e0a 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -13,6 +13,10 @@ module OpenAI | :"o1-pro-2025-03-19" | :"o3-pro" | :"o3-pro-2025-06-10" + | :"o3-deep-research" + | :"o3-deep-research-2025-06-26" + | :"o4-mini-deep-research" + | :"o4-mini-deep-research-2025-06-26" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" @@ -23,6 +27,10 @@ module OpenAI O1_PRO_2025_03_19: :"o1-pro-2025-03-19" O3_PRO: :"o3-pro" O3_PRO_2025_06_10: :"o3-pro-2025-06-10" + O3_DEEP_RESEARCH: :"o3-deep-research" + O3_DEEP_RESEARCH_2025_06_26: :"o3-deep-research-2025-06-26" + O4_MINI_DEEP_RESEARCH: :"o4-mini-deep-research" + O4_MINI_DEEP_RESEARCH_2025_06_26: :"o4-mini-deep-research-2025-06-26" COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index 9b3eb5f9..ee4cae65 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -127,7 +127,7 @@ module OpenAI end end - type service_tier = :auto | :default | :flex | :scale + type service_tier = :auto | :default | :flex | :scale | :priority module ServiceTier extend OpenAI::Internal::Type::Enum @@ -136,6 +136,7 @@ module OpenAI DEFAULT: :default FLEX: :flex SCALE: :scale + PRIORITY: :priority def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] end diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index e68150c9..8c263bcd 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -272,7 +272,7 @@ module OpenAI end end - type service_tier = :auto | :default | :flex | :scale + type service_tier = :auto | :default | :flex | :scale | :priority module ServiceTier extend OpenAI::Internal::Type::Enum @@ -281,6 +281,7 @@ module OpenAI DEFAULT: :default FLEX: :flex SCALE: :scale + PRIORITY: :priority def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index e35205f3..00dd9ff5 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -280,7 +280,7 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::response_format] end - type service_tier = :auto | :default | :flex | :scale + type service_tier = :auto | :default | :flex | :scale | :priority module ServiceTier extend OpenAI::Internal::Type::Enum @@ -289,6 +289,7 @@ module OpenAI DEFAULT: :default FLEX: :flex SCALE: :scale + PRIORITY: :priority def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::service_tier] end diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index fd899bb5..7cf7b0db 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -3,17 +3,45 @@ module OpenAI type images_response = { created: Integer, + background: OpenAI::Models::ImagesResponse::background, data: ::Array[OpenAI::Image], + output_format: OpenAI::Models::ImagesResponse::output_format, + quality: OpenAI::Models::ImagesResponse::quality, + size: OpenAI::Models::ImagesResponse::size, usage: OpenAI::ImagesResponse::Usage } class ImagesResponse < OpenAI::Internal::Type::BaseModel attr_accessor created: Integer + attr_reader background: OpenAI::Models::ImagesResponse::background? + + def background=: ( + OpenAI::Models::ImagesResponse::background + ) -> OpenAI::Models::ImagesResponse::background + attr_reader data: ::Array[OpenAI::Image]? def data=: (::Array[OpenAI::Image]) -> ::Array[OpenAI::Image] + attr_reader output_format: OpenAI::Models::ImagesResponse::output_format? + + def output_format=: ( + OpenAI::Models::ImagesResponse::output_format + ) -> OpenAI::Models::ImagesResponse::output_format + + attr_reader quality: OpenAI::Models::ImagesResponse::quality? + + def quality=: ( + OpenAI::Models::ImagesResponse::quality + ) -> OpenAI::Models::ImagesResponse::quality + + attr_reader size: OpenAI::Models::ImagesResponse::size? + + def size=: ( + OpenAI::Models::ImagesResponse::size + ) -> OpenAI::Models::ImagesResponse::size + attr_reader usage: OpenAI::ImagesResponse::Usage? def usage=: ( @@ -22,16 +50,71 @@ module OpenAI def initialize: ( created: Integer, + ?background: OpenAI::Models::ImagesResponse::background, ?data: ::Array[OpenAI::Image], + ?output_format: OpenAI::Models::ImagesResponse::output_format, + ?quality: OpenAI::Models::ImagesResponse::quality, + ?size: OpenAI::Models::ImagesResponse::size, ?usage: OpenAI::ImagesResponse::Usage ) -> void def to_hash: -> { created: Integer, + background: OpenAI::Models::ImagesResponse::background, data: ::Array[OpenAI::Image], + output_format: OpenAI::Models::ImagesResponse::output_format, + quality: OpenAI::Models::ImagesResponse::quality, + size: OpenAI::Models::ImagesResponse::size, usage: OpenAI::ImagesResponse::Usage } + type background = :transparent | :opaque + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + + def self?.values: -> ::Array[OpenAI::Models::ImagesResponse::background] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Models::ImagesResponse::output_format] + end + + type quality = :low | :medium | :high + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::ImagesResponse::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + + def self?.values: -> ::Array[OpenAI::Models::ImagesResponse::size] + end + type usage = { input_tokens: Integer, diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 8825885e..bc63e515 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -19,12 +19,14 @@ module OpenAI top_p: Float?, background: bool?, max_output_tokens: Integer?, + max_tool_calls: Integer?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Responses::ResponseTextConfig, + top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, user: String @@ -63,6 +65,8 @@ module OpenAI attr_accessor max_output_tokens: Integer? + attr_accessor max_tool_calls: Integer? + attr_accessor previous_response_id: String? attr_accessor prompt: OpenAI::Responses::ResponsePrompt? @@ -83,6 +87,8 @@ module OpenAI OpenAI::Responses::ResponseTextConfig ) -> OpenAI::Responses::ResponseTextConfig + attr_accessor top_logprobs: Integer? + attr_accessor truncation: OpenAI::Models::Responses::Response::truncation? attr_reader usage: OpenAI::Responses::ResponseUsage? @@ -111,12 +117,14 @@ module OpenAI top_p: Float?, ?background: bool?, ?max_output_tokens: Integer?, + ?max_tool_calls: Integer?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, ?reasoning: OpenAI::Reasoning?, ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, ?text: OpenAI::Responses::ResponseTextConfig, + ?top_logprobs: Integer?, ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, ?user: String, @@ -140,12 +148,14 @@ module OpenAI top_p: Float?, background: bool?, max_output_tokens: Integer?, + max_tool_calls: Integer?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, reasoning: OpenAI::Reasoning?, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Responses::ResponseTextConfig, + top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, user: String @@ -198,6 +208,7 @@ module OpenAI OpenAI::Models::Responses::tool_choice_options | OpenAI::Responses::ToolChoiceTypes | OpenAI::Responses::ToolChoiceFunction + | OpenAI::Responses::ToolChoiceMcp module ToolChoice extend OpenAI::Internal::Type::Union @@ -205,7 +216,7 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::Response::tool_choice] end - type service_tier = :auto | :default | :flex | :scale + type service_tier = :auto | :default | :flex | :scale | :priority module ServiceTier extend OpenAI::Internal::Type::Enum @@ -214,6 +225,7 @@ module OpenAI DEFAULT: :default FLEX: :flex SCALE: :scale + PRIORITY: :priority def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index faedd8f8..da0ff6a2 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -8,6 +8,7 @@ module OpenAI input: OpenAI::Models::Responses::ResponseCreateParams::input, instructions: String?, max_output_tokens: Integer?, + max_tool_calls: Integer?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, parallel_tool_calls: bool?, @@ -20,6 +21,7 @@ module OpenAI text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], + top_logprobs: Integer?, top_p: Float?, truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, user: String @@ -44,6 +46,8 @@ module OpenAI attr_accessor max_output_tokens: Integer? + attr_accessor max_tool_calls: Integer? + attr_accessor metadata: OpenAI::Models::metadata? attr_reader model: OpenAI::Models::responses_model? @@ -84,6 +88,8 @@ module OpenAI ::Array[OpenAI::Models::Responses::tool] ) -> ::Array[OpenAI::Models::Responses::tool] + attr_accessor top_logprobs: Integer? + attr_accessor top_p: Float? attr_accessor truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation? @@ -98,6 +104,7 @@ module OpenAI ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, ?max_output_tokens: Integer?, + ?max_tool_calls: Integer?, ?metadata: OpenAI::Models::metadata?, ?model: OpenAI::Models::responses_model, ?parallel_tool_calls: bool?, @@ -110,6 +117,7 @@ module OpenAI ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_logprobs: Integer?, ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, @@ -122,6 +130,7 @@ module OpenAI input: OpenAI::Models::Responses::ResponseCreateParams::input, instructions: String?, max_output_tokens: Integer?, + max_tool_calls: Integer?, metadata: OpenAI::Models::metadata?, model: OpenAI::Models::responses_model, parallel_tool_calls: bool?, @@ -134,6 +143,7 @@ module OpenAI text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], + top_logprobs: Integer?, top_p: Float?, truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, user: String, @@ -148,7 +158,7 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::input] end - type service_tier = :auto | :default | :flex | :scale + type service_tier = :auto | :default | :flex | :scale | :priority module ServiceTier extend OpenAI::Internal::Type::Enum @@ -157,6 +167,7 @@ module OpenAI DEFAULT: :default FLEX: :flex SCALE: :scale + PRIORITY: :priority def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::service_tier] end @@ -165,6 +176,7 @@ module OpenAI OpenAI::Models::Responses::tool_choice_options | OpenAI::Responses::ToolChoiceTypes | OpenAI::Responses::ToolChoiceFunction + | OpenAI::Responses::ToolChoiceMcp module ToolChoice extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 0aa3c5a6..268883f4 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -4,6 +4,7 @@ module OpenAI type response_function_web_search = { id: String, + action: OpenAI::Models::Responses::ResponseFunctionWebSearch::action, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, type: :web_search_call } @@ -11,22 +12,88 @@ module OpenAI class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel attr_accessor id: String + attr_accessor action: OpenAI::Models::Responses::ResponseFunctionWebSearch::action + attr_accessor status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status attr_accessor type: :web_search_call def initialize: ( id: String, + action: OpenAI::Models::Responses::ResponseFunctionWebSearch::action, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, ?type: :web_search_call ) -> void def to_hash: -> { id: String, + action: OpenAI::Models::Responses::ResponseFunctionWebSearch::action, status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, type: :web_search_call } + type action = + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search + | OpenAI::Responses::ResponseFunctionWebSearch::Action::OpenPage + | OpenAI::Responses::ResponseFunctionWebSearch::Action::Find + + module Action + extend OpenAI::Internal::Type::Union + + type search = + { query: String, type: :search, domains: ::Array[String] } + + class Search < OpenAI::Internal::Type::BaseModel + attr_accessor query: String + + attr_accessor type: :search + + attr_reader domains: ::Array[String]? + + def domains=: (::Array[String]) -> ::Array[String] + + def initialize: ( + query: String, + ?domains: ::Array[String], + ?type: :search + ) -> void + + def to_hash: -> { + query: String, + type: :search, + domains: ::Array[String] + } + end + + type open_page = { type: :open_page, url: String } + + class OpenPage < OpenAI::Internal::Type::BaseModel + attr_accessor type: :open_page + + attr_accessor url: String + + def initialize: (url: String, ?type: :open_page) -> void + + def to_hash: -> { type: :open_page, url: String } + end + + type find = { pattern: String, type: :find, url: String } + + class Find < OpenAI::Internal::Type::BaseModel + attr_accessor pattern: String + + attr_accessor type: :find + + attr_accessor url: String + + def initialize: (pattern: String, url: String, ?type: :find) -> void + + def to_hash: -> { pattern: String, type: :find, url: String } + end + + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::action] + end + type status = :in_progress | :searching | :completed | :failed module Status diff --git a/sig/openai/models/responses/response_includable.rbs b/sig/openai/models/responses/response_includable.rbs index 7d37f3af..5d4fdc9c 100644 --- a/sig/openai/models/responses/response_includable.rbs +++ b/sig/openai/models/responses/response_includable.rbs @@ -2,20 +2,22 @@ module OpenAI module Models module Responses type response_includable = - :"file_search_call.results" - | :"message.input_image.image_url" + :"code_interpreter_call.outputs" | :"computer_call_output.output.image_url" + | :"file_search_call.results" + | :"message.input_image.image_url" + | :"message.output_text.logprobs" | :"reasoning.encrypted_content" - | :"code_interpreter_call.outputs" module ResponseIncludable extend OpenAI::Internal::Type::Enum + CODE_INTERPRETER_CALL_OUTPUTS: :"code_interpreter_call.outputs" + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" FILE_SEARCH_CALL_RESULTS: :"file_search_call.results" MESSAGE_INPUT_IMAGE_IMAGE_URL: :"message.input_image.image_url" - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL: :"computer_call_output.output.image_url" + MESSAGE_OUTPUT_TEXT_LOGPROBS: :"message.output_text.logprobs" REASONING_ENCRYPTED_CONTENT: :"reasoning.encrypted_content" - CODE_INTERPRETER_CALL_OUTPUTS: :"code_interpreter_call.outputs" def self?.values: -> ::Array[OpenAI::Models::Responses::response_includable] end diff --git a/sig/openai/models/responses/tool_choice_mcp.rbs b/sig/openai/models/responses/tool_choice_mcp.rbs new file mode 100644 index 00000000..1a5f20a6 --- /dev/null +++ b/sig/openai/models/responses/tool_choice_mcp.rbs @@ -0,0 +1,23 @@ +module OpenAI + module Models + module Responses + type tool_choice_mcp = { server_label: String, type: :mcp, name: String? } + + class ToolChoiceMcp < OpenAI::Internal::Type::BaseModel + attr_accessor server_label: String + + attr_accessor type: :mcp + + attr_accessor name: String? + + def initialize: ( + server_label: String, + ?name: String?, + ?type: :mcp + ) -> void + + def to_hash: -> { server_label: String, type: :mcp, name: String? } + end + end + end +end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 5b7a418e..cae0cf70 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -22,7 +22,6 @@ module OpenAI | :web_search_preview_2025_03_11 | :image_generation | :code_interpreter - | :mcp module Type extend OpenAI::Internal::Type::Enum @@ -33,7 +32,6 @@ module OpenAI WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 IMAGE_GENERATION: :image_generation CODE_INTERPRETER: :code_interpreter - MCP: :mcp def self?.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceTypes::type_] end diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index cd5a5405..fb64b13b 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -13,6 +13,10 @@ module OpenAI | :"o1-pro-2025-03-19" | :"o3-pro" | :"o3-pro-2025-06-10" + | :"o3-deep-research" + | :"o3-deep-research-2025-06-26" + | :"o4-mini-deep-research" + | :"o4-mini-deep-research-2025-06-26" | :"computer-use-preview" | :"computer-use-preview-2025-03-11" @@ -23,6 +27,10 @@ module OpenAI O1_PRO_2025_03_19: :"o1-pro-2025-03-19" O3_PRO: :"o3-pro" O3_PRO_2025_06_10: :"o3-pro-2025-06-10" + O3_DEEP_RESEARCH: :"o3-deep-research" + O3_DEEP_RESEARCH_2025_06_26: :"o3-deep-research-2025-06-26" + O4_MINI_DEEP_RESEARCH: :"o4-mini-deep-research" + O4_MINI_DEEP_RESEARCH_2025_06_26: :"o4-mini-deep-research-2025-06-26" COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" diff --git a/sig/openai/models/webhooks/batch_cancelled_webhook_event.rbs b/sig/openai/models/webhooks/batch_cancelled_webhook_event.rbs new file mode 100644 index 00000000..9efbe689 --- /dev/null +++ b/sig/openai/models/webhooks/batch_cancelled_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type batch_cancelled_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data, + type: :"batch.cancelled", + object: OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object + } + + class BatchCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data + + attr_accessor type: :"batch.cancelled" + + attr_reader object: OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object + ) -> OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object, + ?type: :"batch.cancelled" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCancelledWebhookEvent::Data, + type: :"batch.cancelled", + object: OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::BatchCancelledWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/batch_completed_webhook_event.rbs b/sig/openai/models/webhooks/batch_completed_webhook_event.rbs new file mode 100644 index 00000000..2a113018 --- /dev/null +++ b/sig/openai/models/webhooks/batch_completed_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type batch_completed_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data, + type: :"batch.completed", + object: OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object + } + + class BatchCompletedWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data + + attr_accessor type: :"batch.completed" + + attr_reader object: OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object + ) -> OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object, + ?type: :"batch.completed" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchCompletedWebhookEvent::Data, + type: :"batch.completed", + object: OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::BatchCompletedWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/batch_expired_webhook_event.rbs b/sig/openai/models/webhooks/batch_expired_webhook_event.rbs new file mode 100644 index 00000000..8f6d4f00 --- /dev/null +++ b/sig/openai/models/webhooks/batch_expired_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type batch_expired_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data, + type: :"batch.expired", + object: OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object + } + + class BatchExpiredWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data + + attr_accessor type: :"batch.expired" + + attr_reader object: OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object + ) -> OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object, + ?type: :"batch.expired" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchExpiredWebhookEvent::Data, + type: :"batch.expired", + object: OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::BatchExpiredWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/batch_failed_webhook_event.rbs b/sig/openai/models/webhooks/batch_failed_webhook_event.rbs new file mode 100644 index 00000000..a7426344 --- /dev/null +++ b/sig/openai/models/webhooks/batch_failed_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type batch_failed_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data, + type: :"batch.failed", + object: OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object + } + + class BatchFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data + + attr_accessor type: :"batch.failed" + + attr_reader object: OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object + ) -> OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object, + ?type: :"batch.failed" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::BatchFailedWebhookEvent::Data, + type: :"batch.failed", + object: OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::BatchFailedWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/eval_run_canceled_webhook_event.rbs b/sig/openai/models/webhooks/eval_run_canceled_webhook_event.rbs new file mode 100644 index 00000000..f1a415a4 --- /dev/null +++ b/sig/openai/models/webhooks/eval_run_canceled_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type eval_run_canceled_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data, + type: :"eval.run.canceled", + object: OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object + } + + class EvalRunCanceledWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data + + attr_accessor type: :"eval.run.canceled" + + attr_reader object: OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object + ) -> OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object, + ?type: :"eval.run.canceled" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunCanceledWebhookEvent::Data, + type: :"eval.run.canceled", + object: OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::EvalRunCanceledWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/eval_run_failed_webhook_event.rbs b/sig/openai/models/webhooks/eval_run_failed_webhook_event.rbs new file mode 100644 index 00000000..7cad463b --- /dev/null +++ b/sig/openai/models/webhooks/eval_run_failed_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type eval_run_failed_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data, + type: :"eval.run.failed", + object: OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object + } + + class EvalRunFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data + + attr_accessor type: :"eval.run.failed" + + attr_reader object: OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object + ) -> OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object, + ?type: :"eval.run.failed" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunFailedWebhookEvent::Data, + type: :"eval.run.failed", + object: OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::EvalRunFailedWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/eval_run_succeeded_webhook_event.rbs b/sig/openai/models/webhooks/eval_run_succeeded_webhook_event.rbs new file mode 100644 index 00000000..22ecc65d --- /dev/null +++ b/sig/openai/models/webhooks/eval_run_succeeded_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type eval_run_succeeded_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data, + type: :"eval.run.succeeded", + object: OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object + } + + class EvalRunSucceededWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data + + attr_accessor type: :"eval.run.succeeded" + + attr_reader object: OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object + ) -> OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object, + ?type: :"eval.run.succeeded" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::EvalRunSucceededWebhookEvent::Data, + type: :"eval.run.succeeded", + object: OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::EvalRunSucceededWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbs b/sig/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbs new file mode 100644 index 00000000..b60d0577 --- /dev/null +++ b/sig/openai/models/webhooks/fine_tuning_job_cancelled_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type fine_tuning_job_cancelled_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data, + type: :"fine_tuning.job.cancelled", + object: OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object + } + + class FineTuningJobCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data + + attr_accessor type: :"fine_tuning.job.cancelled" + + attr_reader object: OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object + ) -> OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object, + ?type: :"fine_tuning.job.cancelled" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent::Data, + type: :"fine_tuning.job.cancelled", + object: OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::FineTuningJobCancelledWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbs b/sig/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbs new file mode 100644 index 00000000..007817cf --- /dev/null +++ b/sig/openai/models/webhooks/fine_tuning_job_failed_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type fine_tuning_job_failed_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data, + type: :"fine_tuning.job.failed", + object: OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object + } + + class FineTuningJobFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data + + attr_accessor type: :"fine_tuning.job.failed" + + attr_reader object: OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object + ) -> OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object, + ?type: :"fine_tuning.job.failed" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobFailedWebhookEvent::Data, + type: :"fine_tuning.job.failed", + object: OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::FineTuningJobFailedWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbs b/sig/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbs new file mode 100644 index 00000000..f3c862eb --- /dev/null +++ b/sig/openai/models/webhooks/fine_tuning_job_succeeded_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type fine_tuning_job_succeeded_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data, + type: :"fine_tuning.job.succeeded", + object: OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object + } + + class FineTuningJobSucceededWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data + + attr_accessor type: :"fine_tuning.job.succeeded" + + attr_reader object: OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object + ) -> OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object, + ?type: :"fine_tuning.job.succeeded" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent::Data, + type: :"fine_tuning.job.succeeded", + object: OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::FineTuningJobSucceededWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/response_cancelled_webhook_event.rbs b/sig/openai/models/webhooks/response_cancelled_webhook_event.rbs new file mode 100644 index 00000000..bac1c69e --- /dev/null +++ b/sig/openai/models/webhooks/response_cancelled_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type response_cancelled_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data, + type: :"response.cancelled", + object: OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object + } + + class ResponseCancelledWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data + + attr_accessor type: :"response.cancelled" + + attr_reader object: OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object + ) -> OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object, + ?type: :"response.cancelled" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCancelledWebhookEvent::Data, + type: :"response.cancelled", + object: OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::ResponseCancelledWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/response_completed_webhook_event.rbs b/sig/openai/models/webhooks/response_completed_webhook_event.rbs new file mode 100644 index 00000000..e80a2f95 --- /dev/null +++ b/sig/openai/models/webhooks/response_completed_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type response_completed_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data, + type: :"response.completed", + object: OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object + } + + class ResponseCompletedWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data + + attr_accessor type: :"response.completed" + + attr_reader object: OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object + ) -> OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object, + ?type: :"response.completed" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseCompletedWebhookEvent::Data, + type: :"response.completed", + object: OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::ResponseCompletedWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/response_failed_webhook_event.rbs b/sig/openai/models/webhooks/response_failed_webhook_event.rbs new file mode 100644 index 00000000..3b0b25b2 --- /dev/null +++ b/sig/openai/models/webhooks/response_failed_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type response_failed_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data, + type: :"response.failed", + object: OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object + } + + class ResponseFailedWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data + + attr_accessor type: :"response.failed" + + attr_reader object: OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object + ) -> OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object, + ?type: :"response.failed" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseFailedWebhookEvent::Data, + type: :"response.failed", + object: OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::ResponseFailedWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/response_incomplete_webhook_event.rbs b/sig/openai/models/webhooks/response_incomplete_webhook_event.rbs new file mode 100644 index 00000000..4e2b3970 --- /dev/null +++ b/sig/openai/models/webhooks/response_incomplete_webhook_event.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Webhooks + type response_incomplete_webhook_event = + { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data, + type: :"response.incomplete", + object: OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object + } + + class ResponseIncompleteWebhookEvent < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor data: OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data + + attr_accessor type: :"response.incomplete" + + attr_reader object: OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object? + + def object=: ( + OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object + ) -> OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object + + def initialize: ( + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data, + ?object: OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object, + ?type: :"response.incomplete" + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + data: OpenAI::Webhooks::ResponseIncompleteWebhookEvent::Data, + type: :"response.incomplete", + object: OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object + } + + type data = { id: String } + + class Data < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + + type object = :event + + module Object + extend OpenAI::Internal::Type::Enum + + EVENT: :event + + def self?.values: -> ::Array[OpenAI::Models::Webhooks::ResponseIncompleteWebhookEvent::object] + end + end + end + end +end diff --git a/sig/openai/models/webhooks/unwrap_webhook_event.rbs b/sig/openai/models/webhooks/unwrap_webhook_event.rbs new file mode 100644 index 00000000..906b2b2f --- /dev/null +++ b/sig/openai/models/webhooks/unwrap_webhook_event.rbs @@ -0,0 +1,27 @@ +module OpenAI + module Models + module Webhooks + type unwrap_webhook_event = + OpenAI::Webhooks::BatchCancelledWebhookEvent + | OpenAI::Webhooks::BatchCompletedWebhookEvent + | OpenAI::Webhooks::BatchExpiredWebhookEvent + | OpenAI::Webhooks::BatchFailedWebhookEvent + | OpenAI::Webhooks::EvalRunCanceledWebhookEvent + | OpenAI::Webhooks::EvalRunFailedWebhookEvent + | OpenAI::Webhooks::EvalRunSucceededWebhookEvent + | OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent + | OpenAI::Webhooks::FineTuningJobFailedWebhookEvent + | OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent + | OpenAI::Webhooks::ResponseCancelledWebhookEvent + | OpenAI::Webhooks::ResponseCompletedWebhookEvent + | OpenAI::Webhooks::ResponseFailedWebhookEvent + | OpenAI::Webhooks::ResponseIncompleteWebhookEvent + + module UnwrapWebhookEvent + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Webhooks::unwrap_webhook_event] + end + end + end +end diff --git a/sig/openai/models/webhooks/webhook_unwrap_params.rbs b/sig/openai/models/webhooks/webhook_unwrap_params.rbs new file mode 100644 index 00000000..58737bf3 --- /dev/null +++ b/sig/openai/models/webhooks/webhook_unwrap_params.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Webhooks + type webhook_unwrap_params = + { } & OpenAI::Internal::Type::request_parameters + + class WebhookUnwrapParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } + end + end + end +end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 512efc12..8c5129cc 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -9,6 +9,7 @@ module OpenAI ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, ?max_output_tokens: Integer?, + ?max_tool_calls: Integer?, ?metadata: OpenAI::Models::metadata?, ?model: OpenAI::Models::responses_model, ?parallel_tool_calls: bool?, @@ -21,6 +22,7 @@ module OpenAI ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_logprobs: Integer?, ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, @@ -33,6 +35,7 @@ module OpenAI ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, ?max_output_tokens: Integer?, + ?max_tool_calls: Integer?, ?metadata: OpenAI::Models::metadata?, ?model: OpenAI::Models::responses_model, ?parallel_tool_calls: bool?, @@ -45,6 +48,7 @@ module OpenAI ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], + ?top_logprobs: Integer?, ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, diff --git a/sig/openai/resources/webhooks.rbs b/sig/openai/resources/webhooks.rbs new file mode 100644 index 00000000..bf67f552 --- /dev/null +++ b/sig/openai/resources/webhooks.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Resources + class Webhooks + def unwrap: ( + String payload + ) -> (OpenAI::Webhooks::BatchCancelledWebhookEvent + | OpenAI::Webhooks::BatchCompletedWebhookEvent + | OpenAI::Webhooks::BatchExpiredWebhookEvent + | OpenAI::Webhooks::BatchFailedWebhookEvent + | OpenAI::Webhooks::EvalRunCanceledWebhookEvent + | OpenAI::Webhooks::EvalRunFailedWebhookEvent + | OpenAI::Webhooks::EvalRunSucceededWebhookEvent + | OpenAI::Webhooks::FineTuningJobCancelledWebhookEvent + | OpenAI::Webhooks::FineTuningJobFailedWebhookEvent + | OpenAI::Webhooks::FineTuningJobSucceededWebhookEvent + | OpenAI::Webhooks::ResponseCancelledWebhookEvent + | OpenAI::Webhooks::ResponseCompletedWebhookEvent + | OpenAI::Webhooks::ResponseFailedWebhookEvent + | OpenAI::Webhooks::ResponseIncompleteWebhookEvent) + + def initialize: (client: OpenAI::Client) -> void + end + end +end diff --git a/test/openai/resources/images_test.rb b/test/openai/resources/images_test.rb index 6d6dd917..0b61a9a0 100644 --- a/test/openai/resources/images_test.rb +++ b/test/openai/resources/images_test.rb @@ -13,7 +13,11 @@ def test_create_variation_required_params assert_pattern do response => { created: Integer, + background: OpenAI::ImagesResponse::Background | nil, data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Image]) | nil, + output_format: OpenAI::ImagesResponse::OutputFormat | nil, + quality: OpenAI::ImagesResponse::Quality | nil, + size: OpenAI::ImagesResponse::Size | nil, usage: OpenAI::ImagesResponse::Usage | nil } end @@ -30,7 +34,11 @@ def test_edit_required_params assert_pattern do response => { created: Integer, + background: OpenAI::ImagesResponse::Background | nil, data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Image]) | nil, + output_format: OpenAI::ImagesResponse::OutputFormat | nil, + quality: OpenAI::ImagesResponse::Quality | nil, + size: OpenAI::ImagesResponse::Size | nil, usage: OpenAI::ImagesResponse::Usage | nil } end @@ -46,7 +54,11 @@ def test_generate_required_params assert_pattern do response => { created: Integer, + background: OpenAI::ImagesResponse::Background | nil, data: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Image]) | nil, + output_format: OpenAI::ImagesResponse::OutputFormat | nil, + quality: OpenAI::ImagesResponse::Quality | nil, + size: OpenAI::ImagesResponse::Size | nil, usage: OpenAI::ImagesResponse::Usage | nil } end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index cb00ba85..dc76d567 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -77,7 +77,12 @@ def test_list acknowledged_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, status: OpenAI::Responses::ResponseComputerToolCallOutputItem::Status | nil } - in {type: :web_search_call, id: String, status: OpenAI::Responses::ResponseFunctionWebSearch::Status} + in { + type: :web_search_call, + id: String, + action: OpenAI::Responses::ResponseFunctionWebSearch::Action, + status: OpenAI::Responses::ResponseFunctionWebSearch::Status + } in { type: :function_call_output, id: String, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index f760ae47..c6f9f101 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -28,12 +28,14 @@ def test_create top_p: Float | nil, background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, + max_tool_calls: Integer | nil, previous_response_id: String | nil, prompt: OpenAI::Responses::ResponsePrompt | nil, reasoning: OpenAI::Reasoning | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, text: OpenAI::Responses::ResponseTextConfig | nil, + top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, user: String | nil @@ -66,12 +68,14 @@ def test_retrieve top_p: Float | nil, background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, + max_tool_calls: Integer | nil, previous_response_id: String | nil, prompt: OpenAI::Responses::ResponsePrompt | nil, reasoning: OpenAI::Reasoning | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, text: OpenAI::Responses::ResponseTextConfig | nil, + top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, user: String | nil @@ -112,12 +116,14 @@ def test_cancel top_p: Float | nil, background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, + max_tool_calls: Integer | nil, previous_response_id: String | nil, prompt: OpenAI::Responses::ResponsePrompt | nil, reasoning: OpenAI::Reasoning | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, text: OpenAI::Responses::ResponseTextConfig | nil, + top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, user: String | nil diff --git a/test/openai/resources/webhooks_test.rb b/test/openai/resources/webhooks_test.rb new file mode 100644 index 00000000..514b3d31 --- /dev/null +++ b/test/openai/resources/webhooks_test.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::Resources::WebhooksTest < OpenAI::Test::ResourceTest +end From d6e9a5b3ec181ef681c592306333593d50ff3c28 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 19:33:15 +0000 Subject: [PATCH 242/295] chore(api): remove unsupported property --- .stats.yml | 4 ++-- .../responses/response_function_web_search.rb | 10 +-------- .../response_function_web_search.rbi | 21 ++----------------- .../response_function_web_search.rbs | 19 +++-------------- 4 files changed, 8 insertions(+), 46 deletions(-) diff --git a/.stats.yml b/.stats.yml index e0c854cb..2f55495d 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-cca460eaf5cc13e9d6e5293eb97aac53d66dc1385c691f74b768c97d165b6e8b.yml -openapi_spec_hash: 9ec43d443b3dd58ca5aa87eb0a7eb49f +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a473967d1766dc155994d932fbc4a5bcbd1c140a37c20d0a4065e1bf0640536d.yml +openapi_spec_hash: 67cdc62b0d6c8b1de29b7dc54b265749 config_hash: e74d6791681e3af1b548748ff47a22c2 diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index 91ab5d55..f8360067 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -76,13 +76,7 @@ class Search < OpenAI::Internal::Type::BaseModel # @return [Symbol, :search] required :type, const: :search - # @!attribute domains - # Domains to restrict the search or domains where results were found. - # - # @return [Array, nil] - optional :domains, OpenAI::Internal::Type::ArrayOf[String] - - # @!method initialize(query:, domains: nil, type: :search) + # @!method initialize(query:, type: :search) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search} for more # details. @@ -91,8 +85,6 @@ class Search < OpenAI::Internal::Type::BaseModel # # @param query [String] The search query. # - # @param domains [Array] Domains to restrict the search or domains where results were found. - # # @param type [Symbol, :search] The action type. end diff --git a/rbi/openai/models/responses/response_function_web_search.rbi b/rbi/openai/models/responses/response_function_web_search.rbi index f2027488..5e7a4263 100644 --- a/rbi/openai/models/responses/response_function_web_search.rbi +++ b/rbi/openai/models/responses/response_function_web_search.rbi @@ -121,36 +121,19 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type - # Domains to restrict the search or domains where results were found. - sig { returns(T.nilable(T::Array[String])) } - attr_reader :domains - - sig { params(domains: T::Array[String]).void } - attr_writer :domains - # Action type "search" - Performs a web search query. sig do - params( - query: String, - domains: T::Array[String], - type: Symbol - ).returns(T.attached_class) + params(query: String, type: Symbol).returns(T.attached_class) end def self.new( # The search query. query:, - # Domains to restrict the search or domains where results were found. - domains: nil, # The action type. type: :search ) end - sig do - override.returns( - { query: String, type: Symbol, domains: T::Array[String] } - ) - end + sig { override.returns({ query: String, type: Symbol }) } def to_hash end end diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 268883f4..2aa9d146 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -40,29 +40,16 @@ module OpenAI module Action extend OpenAI::Internal::Type::Union - type search = - { query: String, type: :search, domains: ::Array[String] } + type search = { query: String, type: :search } class Search < OpenAI::Internal::Type::BaseModel attr_accessor query: String attr_accessor type: :search - attr_reader domains: ::Array[String]? + def initialize: (query: String, ?type: :search) -> void - def domains=: (::Array[String]) -> ::Array[String] - - def initialize: ( - query: String, - ?domains: ::Array[String], - ?type: :search - ) -> void - - def to_hash: -> { - query: String, - type: :search, - domains: ::Array[String] - } + def to_hash: -> { query: String, type: :search } end type open_page = { type: :open_page, url: String } From 70f14c2c2d573d441e78b4ff3fc0830cc346e56b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 19:37:25 +0000 Subject: [PATCH 243/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 091cfb12..f7014c35 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.10.0" + ".": "0.11.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 16924ce7..cbecf062 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.10.0) + openai (0.11.0) connection_pool GEM diff --git a/README.md b/README.md index ef9c582e..2ddb4fe6 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.10.0" +gem "openai", "~> 0.11.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 067cb054..aa67665d 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.10.0" + VERSION = "0.11.0" end From 13db33dbedd0dc20f301993192b98251cd88cc80 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 27 Jun 2025 16:47:35 +0000 Subject: [PATCH 244/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 2f55495d..7accc2c4 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a473967d1766dc155994d932fbc4a5bcbd1c140a37c20d0a4065e1bf0640536d.yml openapi_spec_hash: 67cdc62b0d6c8b1de29b7dc54b265749 -config_hash: e74d6791681e3af1b548748ff47a22c2 +config_hash: 05c7d4a6f4d5983fe9550457114b47dd From b381e8adf0082a341d886d340dfb270178b21932 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 27 Jun 2025 21:13:01 +0000 Subject: [PATCH 245/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 7accc2c4..60823b73 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a473967d1766dc155994d932fbc4a5bcbd1c140a37c20d0a4065e1bf0640536d.yml openapi_spec_hash: 67cdc62b0d6c8b1de29b7dc54b265749 -config_hash: 05c7d4a6f4d5983fe9550457114b47dd +config_hash: 7b53f96f897ca1b3407a5341a6f820db From 67819f1433eb1bfd410808a5e67059c78b3fcaeb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 27 Jun 2025 22:37:52 +0000 Subject: [PATCH 246/295] chore(ci): only run for pushes and fork pull requests --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ffa789c2..749a9bcf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,6 +17,7 @@ jobs: timeout-minutes: 10 name: lint runs-on: ${{ github.repository == 'stainless-sdks/openai-ruby' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 @@ -33,6 +34,7 @@ jobs: timeout-minutes: 10 name: test runs-on: ${{ github.repository == 'stainless-sdks/openai-ruby' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 - name: Set up Ruby From 6e511a5d6017cef0a9046a28ecd7bb4d2f87d124 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 30 Jun 2025 16:00:05 +0000 Subject: [PATCH 247/295] chore(internal): allow streams to also be unwrapped on a per-row basis --- lib/openai/internal/stream.rb | 3 ++- lib/openai/internal/transport/base_client.rb | 12 ++++++++++-- lib/openai/internal/type/base_stream.rb | 4 +++- rbi/openai/internal/type/base_stream.rbi | 9 ++++++++- sig/openai/internal/type/base_stream.rbs | 4 ++++ 5 files changed, 27 insertions(+), 5 deletions(-) diff --git a/lib/openai/internal/stream.rb b/lib/openai/internal/stream.rb index 2d3b9ac3..3908f10f 100644 --- a/lib/openai/internal/stream.rb +++ b/lib/openai/internal/stream.rb @@ -47,7 +47,8 @@ class Stream message: message ) in decoded - y << OpenAI::Internal::Type::Converter.coerce(@model, decoded) + unwrapped = OpenAI::Internal::Util.dig(decoded, @unwrap) + y << OpenAI::Internal::Type::Converter.coerce(@model, unwrapped) end else end diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 8e6f703c..6b8c2d3a 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -471,6 +471,7 @@ def request(req) self.class.validate!(req) model = req.fetch(:model) { OpenAI::Internal::Type::Unknown } opts = req[:options].to_h + unwrap = req[:unwrap] OpenAI::RequestOptions.validate!(opts) request = build_request(req.except(:options), opts) url = request.fetch(:url) @@ -487,11 +488,18 @@ def request(req) decoded = OpenAI::Internal::Util.decode_content(response, stream: stream) case req in {stream: Class => st} - st.new(model: model, url: url, status: status, response: response, stream: decoded) + st.new( + model: model, + url: url, + status: status, + response: response, + unwrap: unwrap, + stream: decoded + ) in {page: Class => page} page.new(client: self, req: req, headers: response, page_data: decoded) else - unwrapped = OpenAI::Internal::Util.dig(decoded, req[:unwrap]) + unwrapped = OpenAI::Internal::Util.dig(decoded, unwrap) OpenAI::Internal::Type::Converter.coerce(model, unwrapped) end end diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index f1b1c8ff..3ebdf248 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -64,12 +64,14 @@ def to_enum = @iterator # @param url [URI::Generic] # @param status [Integer] # @param response [Net::HTTPResponse] + # @param unwrap [Symbol, Integer, Array, Proc] # @param stream [Enumerable] - def initialize(model:, url:, status:, response:, stream:) + def initialize(model:, url:, status:, response:, unwrap:, stream:) @model = model @url = url @status = status @response = response + @unwrap = unwrap @stream = stream @iterator = iterator diff --git a/rbi/openai/internal/type/base_stream.rbi b/rbi/openai/internal/type/base_stream.rbi index 82b62c1a..e1155943 100644 --- a/rbi/openai/internal/type/base_stream.rbi +++ b/rbi/openai/internal/type/base_stream.rbi @@ -52,10 +52,17 @@ module OpenAI url: URI::Generic, status: Integer, response: Net::HTTPResponse, + unwrap: + T.any( + Symbol, + Integer, + T::Array[T.any(Symbol, Integer)], + T.proc.params(arg0: T.anything).returns(T.anything) + ), stream: T::Enumerable[Message] ).void end - def initialize(model:, url:, status:, response:, stream:) + def initialize(model:, url:, status:, response:, unwrap:, stream:) end # @api private diff --git a/sig/openai/internal/type/base_stream.rbs b/sig/openai/internal/type/base_stream.rbs index d43b91c2..75f49297 100644 --- a/sig/openai/internal/type/base_stream.rbs +++ b/sig/openai/internal/type/base_stream.rbs @@ -23,6 +23,10 @@ module OpenAI url: URI::Generic, status: Integer, response: top, + unwrap: Symbol + | Integer + | ::Array[Symbol | Integer] + | ^(top arg0) -> top, stream: Enumerable[Message] ) -> void From 149bbf56be4f0857785dd22e4ade037587b07e5c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:37:44 +0000 Subject: [PATCH 248/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index f7014c35..a7130553 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.11.0" + ".": "0.12.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index cbecf062..179ef3c0 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.11.0) + openai (0.12.0) connection_pool GEM diff --git a/README.md b/README.md index 2ddb4fe6..05349e99 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.11.0" +gem "openai", "~> 0.12.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index aa67665d..0167afa4 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.11.0" + VERSION = "0.12.0" end From fd9635293bc431db38740d998aaeb3727c6befc8 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 18:31:01 +0000 Subject: [PATCH 249/295] feat(api): add file_url, fix event ID --- .stats.yml | 6 +++--- lib/openai/models/audio/transcription.rb | 8 ++++---- lib/openai/models/audio/transcription_verbose.rb | 8 ++++---- lib/openai/models/file_object.rb | 9 +++++---- lib/openai/models/responses/response_input_file.rb | 10 +++++++++- .../response_mcp_call_arguments_delta_event.rb | 10 +++++----- .../response_mcp_call_arguments_done_event.rb | 10 +++++----- .../response_output_text_annotation_added_event.rb | 10 +++++----- .../models/responses/response_stream_event.rb | 6 +++--- lib/openai/models/responses/tool.rb | 10 +++++++++- rbi/openai/models/audio/transcription.rbi | 8 ++++---- rbi/openai/models/audio/transcription_verbose.rbi | 10 ++++------ rbi/openai/models/file_object.rbi | 13 +++++++------ rbi/openai/models/responses/response_input_file.rbi | 11 +++++++++++ .../response_mcp_call_arguments_delta_event.rbi | 6 +++--- .../response_mcp_call_arguments_done_event.rbi | 6 +++--- .../response_output_text_annotation_added_event.rbi | 6 +++--- rbi/openai/models/responses/tool.rbi | 13 ++++++++++++- sig/openai/models/audio/transcription.rbs | 8 ++++---- sig/openai/models/audio/transcription_verbose.rbs | 8 ++++---- sig/openai/models/file_object.rbs | 2 ++ sig/openai/models/responses/response_input_file.rbs | 7 +++++++ .../response_mcp_call_arguments_delta_event.rbs | 8 ++++---- .../response_mcp_call_arguments_done_event.rbs | 8 ++++---- .../response_output_text_annotation_added_event.rbs | 8 ++++---- sig/openai/models/responses/tool.rbs | 11 +++++++++-- 26 files changed, 137 insertions(+), 83 deletions(-) diff --git a/.stats.yml b/.stats.yml index 60823b73..6539c603 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-a473967d1766dc155994d932fbc4a5bcbd1c140a37c20d0a4065e1bf0640536d.yml -openapi_spec_hash: 67cdc62b0d6c8b1de29b7dc54b265749 -config_hash: 7b53f96f897ca1b3407a5341a6f820db +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-2d116cda53321baa3479e628512def723207a81eb1cdaebb542bd0555e563bda.yml +openapi_spec_hash: 809d958fec261a32004a4b026b718793 +config_hash: e74d6791681e3af1b548748ff47a22c2 diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 05aa2875..3370cb81 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -149,11 +149,11 @@ class InputTokenDetails < OpenAI::Internal::Type::BaseModel end class Duration < OpenAI::Internal::Type::BaseModel - # @!attribute duration + # @!attribute seconds # Duration of the input audio in seconds. # # @return [Float] - required :duration, Float + required :seconds, Float # @!attribute type # The type of the usage object. Always `duration` for this variant. @@ -161,10 +161,10 @@ class Duration < OpenAI::Internal::Type::BaseModel # @return [Symbol, :duration] required :type, const: :duration - # @!method initialize(duration:, type: :duration) + # @!method initialize(seconds:, type: :duration) # Usage statistics for models billed by audio input duration. # - # @param duration [Float] Duration of the input audio in seconds. + # @param seconds [Float] Duration of the input audio in seconds. # # @param type [Symbol, :duration] The type of the usage object. Always `duration` for this variant. end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 6d7c08d7..678f54dd 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -58,11 +58,11 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Audio::TranscriptionVerbose#usage class Usage < OpenAI::Internal::Type::BaseModel - # @!attribute duration + # @!attribute seconds # Duration of the input audio in seconds. # # @return [Float] - required :duration, Float + required :seconds, Float # @!attribute type # The type of the usage object. Always `duration` for this variant. @@ -70,10 +70,10 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :duration] required :type, const: :duration - # @!method initialize(duration:, type: :duration) + # @!method initialize(seconds:, type: :duration) # Usage statistics for models billed by audio input duration. # - # @param duration [Float] Duration of the input audio in seconds. + # @param seconds [Float] Duration of the input audio in seconds. # # @param type [Symbol, :duration] The type of the usage object. Always `duration` for this variant. end diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index ac42a195..6efe2e16 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -36,8 +36,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel # @!attribute purpose # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`, + # `vision`, and `user_data`. # # @return [Symbol, OpenAI::Models::FileObject::Purpose] required :purpose, enum: -> { OpenAI::FileObject::Purpose } @@ -91,8 +91,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel # @param object [Symbol, :file] The object type, which is always `file`. # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`, + # `vision`, and `user_data`. # # @see OpenAI::Models::FileObject#purpose module Purpose @@ -105,6 +105,7 @@ module Purpose FINE_TUNE = :"fine-tune" FINE_TUNE_RESULTS = :"fine-tune-results" VISION = :vision + USER_DATA = :user_data # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 229e942a..9aee8404 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -22,13 +22,19 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :file_id, String, nil?: true + # @!attribute file_url + # The URL of the file to be sent to the model. + # + # @return [String, nil] + optional :file_url, String + # @!attribute filename # The name of the file to be sent to the model. # # @return [String, nil] optional :filename, String - # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) + # @!method initialize(file_data: nil, file_id: nil, file_url: nil, filename: nil, type: :input_file) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseInputFile} for more details. # @@ -38,6 +44,8 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # # @param file_id [String, nil] The ID of the file to be sent to the model. # + # @param file_url [String] The URL of the file to be sent to the model. + # # @param filename [String] The name of the file to be sent to the model. # # @param type [Symbol, :input_file] The type of the input item. Always `input_file`. diff --git a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb index 0c734e96..df8179fb 100644 --- a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb @@ -29,12 +29,12 @@ class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel required :sequence_number, Integer # @!attribute type - # The type of the event. Always 'response.mcp_call.arguments_delta'. + # The type of the event. Always 'response.mcp_call_arguments.delta'. # - # @return [Symbol, :"response.mcp_call.arguments_delta"] - required :type, const: :"response.mcp_call.arguments_delta" + # @return [Symbol, :"response.mcp_call_arguments.delta"] + required :type, const: :"response.mcp_call_arguments.delta" - # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call_arguments.delta") # Emitted when there is a delta (partial update) to the arguments of an MCP tool # call. # @@ -46,7 +46,7 @@ class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param sequence_number [Integer] The sequence number of this event. # - # @param type [Symbol, :"response.mcp_call.arguments_delta"] The type of the event. Always 'response.mcp_call.arguments_delta'. + # @param type [Symbol, :"response.mcp_call_arguments.delta"] The type of the event. Always 'response.mcp_call_arguments.delta'. end end end diff --git a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb index f8f70329..693bbcdc 100644 --- a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb @@ -29,12 +29,12 @@ class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel required :sequence_number, Integer # @!attribute type - # The type of the event. Always 'response.mcp_call.arguments_done'. + # The type of the event. Always 'response.mcp_call_arguments.done'. # - # @return [Symbol, :"response.mcp_call.arguments_done"] - required :type, const: :"response.mcp_call.arguments_done" + # @return [Symbol, :"response.mcp_call_arguments.done"] + required :type, const: :"response.mcp_call_arguments.done" - # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_done") + # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call_arguments.done") # Emitted when the arguments for an MCP tool call are finalized. # # @param arguments [Object] The finalized arguments for the MCP tool call. @@ -45,7 +45,7 @@ class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # # @param sequence_number [Integer] The sequence number of this event. # - # @param type [Symbol, :"response.mcp_call.arguments_done"] The type of the event. Always 'response.mcp_call.arguments_done'. + # @param type [Symbol, :"response.mcp_call_arguments.done"] The type of the event. Always 'response.mcp_call_arguments.done'. end end end diff --git a/lib/openai/models/responses/response_output_text_annotation_added_event.rb b/lib/openai/models/responses/response_output_text_annotation_added_event.rb index eba4d8a7..0a113db0 100644 --- a/lib/openai/models/responses/response_output_text_annotation_added_event.rb +++ b/lib/openai/models/responses/response_output_text_annotation_added_event.rb @@ -41,12 +41,12 @@ class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel required :sequence_number, Integer # @!attribute type - # The type of the event. Always 'response.output_text_annotation.added'. + # The type of the event. Always 'response.output_text.annotation.added'. # - # @return [Symbol, :"response.output_text_annotation.added"] - required :type, const: :"response.output_text_annotation.added" + # @return [Symbol, :"response.output_text.annotation.added"] + required :type, const: :"response.output_text.annotation.added" - # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text_annotation.added") + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text.annotation.added") # Emitted when an annotation is added to output text content. # # @param annotation [Object] The annotation object being added. (See annotation schema for details.) @@ -61,7 +61,7 @@ class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel # # @param sequence_number [Integer] The sequence number of this event. # - # @param type [Symbol, :"response.output_text_annotation.added"] The type of the event. Always 'response.output_text_annotation.added'. + # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always 'response.output_text.annotation.added'. end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index df0809de..4b51a511 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -151,13 +151,13 @@ module ResponseStreamEvent -> { OpenAI::Responses::ResponseImageGenCallPartialImageEvent } # Emitted when there is a delta (partial update) to the arguments of an MCP tool call. - variant :"response.mcp_call.arguments_delta", + variant :"response.mcp_call_arguments.delta", -> { OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent } # Emitted when the arguments for an MCP tool call are finalized. - variant :"response.mcp_call.arguments_done", + variant :"response.mcp_call_arguments.done", -> { OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent } @@ -185,7 +185,7 @@ module ResponseStreamEvent -> { OpenAI::Responses::ResponseMcpListToolsInProgressEvent } # Emitted when an annotation is added to output text content. - variant :"response.output_text_annotation.added", + variant :"response.output_text.annotation.added", -> { OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent } # Emitted when a response is queued and waiting to be processed. diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index f97fae7e..b8ed7736 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -72,7 +72,13 @@ class Mcp < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] optional :require_approval, union: -> { OpenAI::Responses::Tool::Mcp::RequireApproval }, nil?: true - # @!method initialize(server_label:, server_url:, allowed_tools: nil, headers: nil, require_approval: nil, type: :mcp) + # @!attribute server_description + # Optional description of the MCP server, used to provide more context. + # + # @return [String, nil] + optional :server_description, String + + # @!method initialize(server_label:, server_url:, allowed_tools: nil, headers: nil, require_approval: nil, server_description: nil, type: :mcp) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Tool::Mcp} for more details. # @@ -90,6 +96,8 @@ class Mcp < OpenAI::Internal::Type::BaseModel # # @param require_approval [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] Specify which of the MCP server's tools require approval. # + # @param server_description [String] Optional description of the MCP server, used to provide more context. + # # @param type [Symbol, :mcp] The type of the MCP tool. Always `mcp`. # List of allowed tool names or a filter object. diff --git a/rbi/openai/models/audio/transcription.rbi b/rbi/openai/models/audio/transcription.rbi index 8328d081..09191541 100644 --- a/rbi/openai/models/audio/transcription.rbi +++ b/rbi/openai/models/audio/transcription.rbi @@ -291,7 +291,7 @@ module OpenAI # Duration of the input audio in seconds. sig { returns(Float) } - attr_accessor :duration + attr_accessor :seconds # The type of the usage object. Always `duration` for this variant. sig { returns(Symbol) } @@ -299,17 +299,17 @@ module OpenAI # Usage statistics for models billed by audio input duration. sig do - params(duration: Float, type: Symbol).returns(T.attached_class) + params(seconds: Float, type: Symbol).returns(T.attached_class) end def self.new( # Duration of the input audio in seconds. - duration:, + seconds:, # The type of the usage object. Always `duration` for this variant. type: :duration ) end - sig { override.returns({ duration: Float, type: Symbol }) } + sig { override.returns({ seconds: Float, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/audio/transcription_verbose.rbi b/rbi/openai/models/audio/transcription_verbose.rbi index 45fb31e6..e3351d37 100644 --- a/rbi/openai/models/audio/transcription_verbose.rbi +++ b/rbi/openai/models/audio/transcription_verbose.rbi @@ -109,25 +109,23 @@ module OpenAI # Duration of the input audio in seconds. sig { returns(Float) } - attr_accessor :duration + attr_accessor :seconds # The type of the usage object. Always `duration` for this variant. sig { returns(Symbol) } attr_accessor :type # Usage statistics for models billed by audio input duration. - sig do - params(duration: Float, type: Symbol).returns(T.attached_class) - end + sig { params(seconds: Float, type: Symbol).returns(T.attached_class) } def self.new( # Duration of the input audio in seconds. - duration:, + seconds:, # The type of the usage object. Always `duration` for this variant. type: :duration ) end - sig { override.returns({ duration: Float, type: Symbol }) } + sig { override.returns({ seconds: Float, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/file_object.rbi b/rbi/openai/models/file_object.rbi index 437d1b71..8f7ab4ab 100644 --- a/rbi/openai/models/file_object.rbi +++ b/rbi/openai/models/file_object.rbi @@ -27,8 +27,8 @@ module OpenAI attr_accessor :object # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`, + # `vision`, and `user_data`. sig { returns(OpenAI::FileObject::Purpose::TaggedSymbol) } attr_accessor :purpose @@ -76,8 +76,8 @@ module OpenAI # The name of the file. filename:, # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`, + # `vision`, and `user_data`. purpose:, # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. @@ -111,8 +111,8 @@ module OpenAI end # The intended purpose of the file. Supported values are `assistants`, - # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` - # and `vision`. + # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results`, + # `vision`, and `user_data`. module Purpose extend OpenAI::Internal::Type::Enum @@ -132,6 +132,7 @@ module OpenAI FINE_TUNE_RESULTS = T.let(:"fine-tune-results", OpenAI::FileObject::Purpose::TaggedSymbol) VISION = T.let(:vision, OpenAI::FileObject::Purpose::TaggedSymbol) + USER_DATA = T.let(:user_data, OpenAI::FileObject::Purpose::TaggedSymbol) sig do override.returns(T::Array[OpenAI::FileObject::Purpose::TaggedSymbol]) diff --git a/rbi/openai/models/responses/response_input_file.rbi b/rbi/openai/models/responses/response_input_file.rbi index 19ecb616..bfa515c2 100644 --- a/rbi/openai/models/responses/response_input_file.rbi +++ b/rbi/openai/models/responses/response_input_file.rbi @@ -27,6 +27,13 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :file_id + # The URL of the file to be sent to the model. + sig { returns(T.nilable(String)) } + attr_reader :file_url + + sig { params(file_url: String).void } + attr_writer :file_url + # The name of the file to be sent to the model. sig { returns(T.nilable(String)) } attr_reader :filename @@ -39,6 +46,7 @@ module OpenAI params( file_data: String, file_id: T.nilable(String), + file_url: String, filename: String, type: Symbol ).returns(T.attached_class) @@ -48,6 +56,8 @@ module OpenAI file_data: nil, # The ID of the file to be sent to the model. file_id: nil, + # The URL of the file to be sent to the model. + file_url: nil, # The name of the file to be sent to the model. filename: nil, # The type of the input item. Always `input_file`. @@ -61,6 +71,7 @@ module OpenAI type: Symbol, file_data: String, file_id: T.nilable(String), + file_url: String, filename: String } ) diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi index be6f8e3e..58f623a5 100644 --- a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi @@ -28,7 +28,7 @@ module OpenAI sig { returns(Integer) } attr_accessor :sequence_number - # The type of the event. Always 'response.mcp_call.arguments_delta'. + # The type of the event. Always 'response.mcp_call_arguments.delta'. sig { returns(Symbol) } attr_accessor :type @@ -52,8 +52,8 @@ module OpenAI output_index:, # The sequence number of this event. sequence_number:, - # The type of the event. Always 'response.mcp_call.arguments_delta'. - type: :"response.mcp_call.arguments_delta" + # The type of the event. Always 'response.mcp_call_arguments.delta'. + type: :"response.mcp_call_arguments.delta" ) end diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi index df22b5d2..ef20e071 100644 --- a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi @@ -28,7 +28,7 @@ module OpenAI sig { returns(Integer) } attr_accessor :sequence_number - # The type of the event. Always 'response.mcp_call.arguments_done'. + # The type of the event. Always 'response.mcp_call_arguments.done'. sig { returns(Symbol) } attr_accessor :type @@ -51,8 +51,8 @@ module OpenAI output_index:, # The sequence number of this event. sequence_number:, - # The type of the event. Always 'response.mcp_call.arguments_done'. - type: :"response.mcp_call.arguments_done" + # The type of the event. Always 'response.mcp_call_arguments.done'. + type: :"response.mcp_call_arguments.done" ) end diff --git a/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi index 422e56dc..c26c48d6 100644 --- a/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi +++ b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi @@ -36,7 +36,7 @@ module OpenAI sig { returns(Integer) } attr_accessor :sequence_number - # The type of the event. Always 'response.output_text_annotation.added'. + # The type of the event. Always 'response.output_text.annotation.added'. sig { returns(Symbol) } attr_accessor :type @@ -65,8 +65,8 @@ module OpenAI output_index:, # The sequence number of this event. sequence_number:, - # The type of the event. Always 'response.output_text_annotation.added'. - type: :"response.output_text_annotation.added" + # The type of the event. Always 'response.output_text.annotation.added'. + type: :"response.output_text.annotation.added" ) end diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index 0b7fbd91..e3467167 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -70,6 +70,13 @@ module OpenAI end attr_accessor :require_approval + # Optional description of the MCP server, used to provide more context. + sig { returns(T.nilable(String)) } + attr_reader :server_description + + sig { params(server_description: String).void } + attr_writer :server_description + # Give the model access to additional tools via remote Model Context Protocol # (MCP) servers. # [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). @@ -92,6 +99,7 @@ module OpenAI OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol ) ), + server_description: String, type: Symbol ).returns(T.attached_class) end @@ -107,6 +115,8 @@ module OpenAI headers: nil, # Specify which of the MCP server's tools require approval. require_approval: nil, + # Optional description of the MCP server, used to provide more context. + server_description: nil, # The type of the MCP tool. Always `mcp`. type: :mcp ) @@ -132,7 +142,8 @@ module OpenAI OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol ) - ) + ), + server_description: String } ) end diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index d7c58be3..bacd5aa7 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -131,16 +131,16 @@ module OpenAI end end - type duration = { duration: Float, type: :duration } + type duration = { seconds: Float, type: :duration } class Duration < OpenAI::Internal::Type::BaseModel - attr_accessor duration: Float + attr_accessor seconds: Float attr_accessor type: :duration - def initialize: (duration: Float, ?type: :duration) -> void + def initialize: (seconds: Float, ?type: :duration) -> void - def to_hash: -> { duration: Float, type: :duration } + def to_hash: -> { seconds: Float, type: :duration } end def self?.variants: -> ::Array[OpenAI::Models::Audio::Transcription::usage] diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 88f0d4af..c84b3539 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -54,16 +54,16 @@ module OpenAI words: ::Array[OpenAI::Audio::TranscriptionWord] } - type usage = { duration: Float, type: :duration } + type usage = { seconds: Float, type: :duration } class Usage < OpenAI::Internal::Type::BaseModel - attr_accessor duration: Float + attr_accessor seconds: Float attr_accessor type: :duration - def initialize: (duration: Float, ?type: :duration) -> void + def initialize: (seconds: Float, ?type: :duration) -> void - def to_hash: -> { duration: Float, type: :duration } + def to_hash: -> { seconds: Float, type: :duration } end end end diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index acbdedc2..cfb4f32c 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -68,6 +68,7 @@ module OpenAI | :"fine-tune" | :"fine-tune-results" | :vision + | :user_data module Purpose extend OpenAI::Internal::Type::Enum @@ -79,6 +80,7 @@ module OpenAI FINE_TUNE: :"fine-tune" FINE_TUNE_RESULTS: :"fine-tune-results" VISION: :vision + USER_DATA: :user_data def self?.values: -> ::Array[OpenAI::Models::FileObject::purpose] end diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index bdfcd495..5241e013 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -6,6 +6,7 @@ module OpenAI type: :input_file, file_data: String, file_id: String?, + file_url: String, filename: String } @@ -18,6 +19,10 @@ module OpenAI attr_accessor file_id: String? + attr_reader file_url: String? + + def file_url=: (String) -> String + attr_reader filename: String? def filename=: (String) -> String @@ -25,6 +30,7 @@ module OpenAI def initialize: ( ?file_data: String, ?file_id: String?, + ?file_url: String, ?filename: String, ?type: :input_file ) -> void @@ -33,6 +39,7 @@ module OpenAI type: :input_file, file_data: String, file_id: String?, + file_url: String, filename: String } end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs index bb94cc20..a69fd15d 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs @@ -7,7 +7,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - type: :"response.mcp_call.arguments_delta" + type: :"response.mcp_call_arguments.delta" } class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel @@ -19,14 +19,14 @@ module OpenAI attr_accessor sequence_number: Integer - attr_accessor type: :"response.mcp_call.arguments_delta" + attr_accessor type: :"response.mcp_call_arguments.delta" def initialize: ( delta: top, item_id: String, output_index: Integer, sequence_number: Integer, - ?type: :"response.mcp_call.arguments_delta" + ?type: :"response.mcp_call_arguments.delta" ) -> void def to_hash: -> { @@ -34,7 +34,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - type: :"response.mcp_call.arguments_delta" + type: :"response.mcp_call_arguments.delta" } end end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs index f0a16dcf..c262d1a7 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs @@ -7,7 +7,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - type: :"response.mcp_call.arguments_done" + type: :"response.mcp_call_arguments.done" } class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel @@ -19,14 +19,14 @@ module OpenAI attr_accessor sequence_number: Integer - attr_accessor type: :"response.mcp_call.arguments_done" + attr_accessor type: :"response.mcp_call_arguments.done" def initialize: ( arguments: top, item_id: String, output_index: Integer, sequence_number: Integer, - ?type: :"response.mcp_call.arguments_done" + ?type: :"response.mcp_call_arguments.done" ) -> void def to_hash: -> { @@ -34,7 +34,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - type: :"response.mcp_call.arguments_done" + type: :"response.mcp_call_arguments.done" } end end diff --git a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs index 499b4556..177650c3 100644 --- a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs +++ b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs @@ -9,7 +9,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - type: :"response.output_text_annotation.added" + type: :"response.output_text.annotation.added" } class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel @@ -25,7 +25,7 @@ module OpenAI attr_accessor sequence_number: Integer - attr_accessor type: :"response.output_text_annotation.added" + attr_accessor type: :"response.output_text.annotation.added" def initialize: ( annotation: top, @@ -34,7 +34,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - ?type: :"response.output_text_annotation.added" + ?type: :"response.output_text.annotation.added" ) -> void def to_hash: -> { @@ -44,7 +44,7 @@ module OpenAI item_id: String, output_index: Integer, sequence_number: Integer, - type: :"response.output_text_annotation.added" + type: :"response.output_text.annotation.added" } end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 0655d5c6..87e9506b 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -21,7 +21,8 @@ module OpenAI type: :mcp, allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, headers: ::Hash[Symbol, String]?, - require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, + server_description: String } class Mcp < OpenAI::Internal::Type::BaseModel @@ -37,12 +38,17 @@ module OpenAI attr_accessor require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + attr_reader server_description: String? + + def server_description=: (String) -> String + def initialize: ( server_label: String, server_url: String, ?allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, ?headers: ::Hash[Symbol, String]?, ?require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, + ?server_description: String, ?type: :mcp ) -> void @@ -52,7 +58,8 @@ module OpenAI type: :mcp, allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, headers: ::Hash[Symbol, String]?, - require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, + server_description: String } type allowed_tools = From ace23487f97793e8cca39f6fb9480be2fb364905 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 10 Jul 2025 18:35:41 +0000 Subject: [PATCH 250/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a7130553..d52d2b97 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.12.0" + ".": "0.13.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 179ef3c0..87940077 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.12.0) + openai (0.13.0) connection_pool GEM diff --git a/README.md b/README.md index 05349e99..f22fed8a 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.12.0" +gem "openai", "~> 0.13.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 0167afa4..69be28c0 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.12.0" + VERSION = "0.13.0" end From 1641af78e62e2b54ecee46d4f4668274dd7fb238 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 11 Jul 2025 18:29:44 +0000 Subject: [PATCH 251/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 6539c603..876e59c6 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-2d116cda53321baa3479e628512def723207a81eb1cdaebb542bd0555e563bda.yml openapi_spec_hash: 809d958fec261a32004a4b026b718793 -config_hash: e74d6791681e3af1b548748ff47a22c2 +config_hash: 00b55237774c015fc35f58d2820759a9 From 79e26acad7e94ebd53b736e51af2bf687ef87687 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 11 Jul 2025 18:33:11 +0000 Subject: [PATCH 252/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 876e59c6..642f4982 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-2d116cda53321baa3479e628512def723207a81eb1cdaebb542bd0555e563bda.yml openapi_spec_hash: 809d958fec261a32004a4b026b718793 -config_hash: 00b55237774c015fc35f58d2820759a9 +config_hash: 5ef02e55671aae1ba9bd62fe4eb0f50f From ee546f991ea3cecee6f3371867861bf8baa45900 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 11 Jul 2025 20:39:04 +0000 Subject: [PATCH 253/295] codegen metadata --- .stats.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.stats.yml b/.stats.yml index 642f4982..d172ef5f 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-2d116cda53321baa3479e628512def723207a81eb1cdaebb542bd0555e563bda.yml -openapi_spec_hash: 809d958fec261a32004a4b026b718793 -config_hash: 5ef02e55671aae1ba9bd62fe4eb0f50f +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-de3e91790d0b9f3ce26d679ac07079880ccc695bd8c878f961c4d577a5025a2e.yml +openapi_spec_hash: 4b44e3f287583d01fbe7b10cd943254a +config_hash: 06b9a88561844d60d8efa4eaabf5fa3c From bbd929c97d5e97202ecd78cdb7be259ae2e00ff1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 13:49:16 +0000 Subject: [PATCH 254/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index d172ef5f..d97840a3 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-de3e91790d0b9f3ce26d679ac07079880ccc695bd8c878f961c4d577a5025a2e.yml openapi_spec_hash: 4b44e3f287583d01fbe7b10cd943254a -config_hash: 06b9a88561844d60d8efa4eaabf5fa3c +config_hash: cc92d0be2a0f3c77bfc988082dd0573e From 7b63fac2e1e0fa6243a6f3e3be626dbb4b4bf9c1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 15:47:13 +0000 Subject: [PATCH 255/295] chore(api): update realtime specs, build config --- .stats.yml | 6 +- lib/openai/models/eval_create_params.rb | 55 ++++++++++-- ...create_eval_completions_run_data_source.rb | 55 ++++++++++-- .../models/evals/run_cancel_response.rb | 53 ++++++++++-- lib/openai/models/evals/run_create_params.rb | 55 ++++++++++-- .../models/evals/run_create_response.rb | 53 ++++++++++-- lib/openai/models/evals/run_list_response.rb | 53 ++++++++++-- .../models/evals/run_retrieve_response.rb | 53 ++++++++++-- .../models/graders/label_model_grader.rb | 53 ++++++++++-- .../models/graders/score_model_grader.rb | 53 ++++++++++-- rbi/openai/models/eval_create_params.rbi | 83 +++++++++++++++++-- ...reate_eval_completions_run_data_source.rbi | 83 +++++++++++++++++-- .../models/evals/run_cancel_response.rbi | 75 +++++++++++++++-- rbi/openai/models/evals/run_create_params.rbi | 83 +++++++++++++++++-- .../models/evals/run_create_response.rbi | 75 +++++++++++++++-- rbi/openai/models/evals/run_list_response.rbi | 75 +++++++++++++++-- .../models/evals/run_retrieve_response.rbi | 75 +++++++++++++++-- .../models/graders/label_model_grader.rbi | 81 ++++++++++++++++-- .../models/graders/score_model_grader.rbi | 81 ++++++++++++++++-- sig/openai/models/eval_create_params.rbs | 29 +++++++ ...reate_eval_completions_run_data_source.rbs | 29 +++++++ .../models/evals/run_cancel_response.rbs | 33 ++++++++ sig/openai/models/evals/run_create_params.rbs | 33 ++++++++ .../models/evals/run_create_response.rbs | 33 ++++++++ sig/openai/models/evals/run_list_response.rbs | 33 ++++++++ .../models/evals/run_retrieve_response.rbs | 33 ++++++++ .../models/graders/label_model_grader.rbs | 29 +++++++ .../models/graders/score_model_grader.rbs | 29 +++++++ 28 files changed, 1378 insertions(+), 103 deletions(-) diff --git a/.stats.yml b/.stats.yml index d97840a3..be597f64 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-de3e91790d0b9f3ce26d679ac07079880ccc695bd8c878f961c4d577a5025a2e.yml -openapi_spec_hash: 4b44e3f287583d01fbe7b10cd943254a -config_hash: cc92d0be2a0f3c77bfc988082dd0573e +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-82fd6fcb3eea81cbbe09a6f831c82219f1251e1b76474b4c41f424bf277e6a71.yml +openapi_spec_hash: c8d54bd1ae3d704f6b6f72ffd2f876d8 +config_hash: 3315d58b60faf63b1bee251b81837cda diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index cdf26b6c..1055a3ae 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -280,9 +280,9 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, Array] required :content, union: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content @@ -318,13 +318,13 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content module Content @@ -341,6 +341,14 @@ module Content OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText } + # An image input to the model. + variant -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage + } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -366,8 +374,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 6521c8f7..28b05c47 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -277,9 +277,9 @@ module Template class Message < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array] required :content, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content @@ -315,13 +315,13 @@ class Message < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content module Content @@ -338,6 +338,14 @@ module Content OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText } + # An image input to the model. + variant -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage + } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -363,8 +371,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 74608cb9..0c9bb2d2 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -457,9 +457,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -489,13 +489,13 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content module Content @@ -510,6 +510,12 @@ module Content # A text output from the model. variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + # An image input to the model. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -535,8 +541,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 8b84487a..0237a2ab 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -409,9 +409,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content @@ -447,13 +447,13 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content module Content @@ -470,6 +470,14 @@ module Content OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText } + # An image input to the model. + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage + } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -495,8 +503,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 73327ea2..6c0cdf67 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -457,9 +457,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -489,13 +489,13 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content module Content @@ -510,6 +510,12 @@ module Content # A text output from the model. variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + # An image input to the model. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -535,8 +541,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 0f8abfad..66bfa2f1 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -457,9 +457,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -489,13 +489,13 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content module Content @@ -510,6 +510,12 @@ module Content # A text output from the model. variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + # An image input to the model. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -535,8 +541,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 20526587..1c0a6f8b 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -461,9 +461,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } @@ -493,13 +493,13 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content module Content @@ -514,6 +514,12 @@ module Content # A text output from the model. variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + # An image input to the model. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -539,8 +545,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage} + # for more details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index 0f0d52aa..bfed8c30 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -57,9 +57,9 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel class Input < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::InputImage, Array] required :content, union: -> { OpenAI::Graders::LabelModelGrader::Input::Content } # @!attribute role @@ -85,13 +85,13 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Graders::LabelModelGrader::Input#content module Content @@ -106,6 +106,12 @@ module Content # A text output from the model. variant -> { OpenAI::Graders::LabelModelGrader::Input::Content::OutputText } + # An image input to the model. + variant -> { OpenAI::Graders::LabelModelGrader::Input::Content::InputImage } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Graders::LabelModelGrader::Input::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -131,8 +137,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::LabelModelGrader::Input::Content::InputImage} for more + # details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index bdec7e50..47417994 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -57,9 +57,9 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel class Input < OpenAI::Internal::Type::BaseModel # @!attribute content - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::InputImage, Array] required :content, union: -> { OpenAI::Graders::ScoreModelGrader::Input::Content } # @!attribute role @@ -85,13 +85,13 @@ class Input < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::InputImage, Array] Inputs to the model - can contain template strings. # # @param role [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or # # @param type [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. # # @see OpenAI::Models::Graders::ScoreModelGrader::Input#content module Content @@ -106,6 +106,12 @@ module Content # A text output from the model. variant -> { OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText } + # An image input to the model. + variant -> { OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage } + + # A list of inputs, each of which may be either an input text or input image object. + variant -> { OpenAI::Models::Graders::ScoreModelGrader::Input::Content::AnArrayOfInputTextAndInputImageArray } + class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text # The text output from the model. @@ -131,8 +137,45 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. end + class InputImage < OpenAI::Internal::Type::BaseModel + # @!attribute image_url + # The URL of the image input. + # + # @return [String] + required :image_url, String + + # @!attribute type + # The type of the image input. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [String, nil] + optional :detail, String + + # @!method initialize(image_url:, detail: nil, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::ScoreModelGrader::Input::Content::InputImage} for more + # details. + # + # An image input to the model. + # + # @param image_url [String] The URL of the image input. + # + # @param detail [String] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param type [Symbol, :input_image] The type of the image input. Always `input_image`. + end + # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::InputImage, Array)] + + # @type [OpenAI::Internal::Type::Converter] + AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index 7131576a..379e276b 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -474,13 +474,15 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, + T::Array[T.anything] ) ) end @@ -524,7 +526,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, @@ -533,7 +537,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -550,7 +554,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, + T::Array[T.anything] ), role: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, @@ -562,7 +568,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -571,7 +577,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, + T::Array[T.anything] ) end @@ -609,6 +617,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -618,6 +679,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 8298b14c..d929b514 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -515,13 +515,15 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + T::Array[T.anything] ) ) end @@ -565,7 +567,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, @@ -574,7 +578,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -591,7 +595,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + T::Array[T.anything] ), role: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, @@ -603,7 +609,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -612,7 +618,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + T::Array[T.anything] ) end @@ -652,6 +660,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -661,6 +722,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 9ac8bf15..d9c6617c 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -749,7 +749,7 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants @@ -795,7 +795,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, @@ -804,7 +806,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -829,7 +831,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -838,7 +840,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ) end @@ -878,6 +882,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -887,6 +944,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index 32ad2f72..c4577765 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -693,13 +693,15 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ) ) end @@ -743,7 +745,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, @@ -752,7 +756,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -769,7 +773,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ), role: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, @@ -781,7 +787,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -790,7 +796,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ) end @@ -830,6 +838,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -839,6 +900,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index ae40fb83..7126ad47 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -749,7 +749,7 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants @@ -795,7 +795,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, @@ -804,7 +806,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -829,7 +831,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -838,7 +840,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ) end @@ -878,6 +882,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -887,6 +944,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index 9d45d00b..28b81204 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -745,7 +745,7 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants @@ -791,7 +791,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, @@ -800,7 +802,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -825,7 +827,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -834,7 +836,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ) end @@ -874,6 +878,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -883,6 +940,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 3a1d9ea8..306630c7 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -751,7 +751,7 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants @@ -797,7 +797,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, @@ -806,7 +808,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -831,7 +833,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -840,7 +842,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + T::Array[T.anything] ) end @@ -880,6 +884,59 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params( + image_url: String, + detail: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -889,6 +946,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/graders/label_model_grader.rbi b/rbi/openai/models/graders/label_model_grader.rbi index 9d062b87..22a3f239 100644 --- a/rbi/openai/models/graders/label_model_grader.rbi +++ b/rbi/openai/models/graders/label_model_grader.rbi @@ -85,13 +85,15 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, + OpenAI::Graders::LabelModelGrader::Input::Content::InputImage, + T::Array[T.anything] ) ) end @@ -132,14 +134,16 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash, + OpenAI::Graders::LabelModelGrader::Input::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -156,7 +160,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, + OpenAI::Graders::LabelModelGrader::Input::Content::InputImage, + T::Array[T.anything] ), role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol @@ -166,7 +172,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -175,7 +181,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, + OpenAI::Graders::LabelModelGrader::Input::Content::InputImage, + T::Array[T.anything] ) end @@ -213,6 +221,57 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params(image_url: String, detail: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -222,6 +281,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/rbi/openai/models/graders/score_model_grader.rbi b/rbi/openai/models/graders/score_model_grader.rbi index ab602eda..7baa4347 100644 --- a/rbi/openai/models/graders/score_model_grader.rbi +++ b/rbi/openai/models/graders/score_model_grader.rbi @@ -92,13 +92,15 @@ module OpenAI ) end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. sig do returns( T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage, + T::Array[T.anything] ) ) end @@ -139,14 +141,16 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash, + OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage::OrHash, + T::Array[T.anything] ), role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol ).returns(T.attached_class) end def self.new( - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. content:, # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. @@ -163,7 +167,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage, + T::Array[T.anything] ), role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol @@ -173,7 +179,7 @@ module OpenAI def to_hash end - # Text inputs to the model - can contain template strings. + # Inputs to the model - can contain template strings. module Content extend OpenAI::Internal::Type::Union @@ -182,7 +188,9 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage, + T::Array[T.anything] ) end @@ -220,6 +228,57 @@ module OpenAI end end + class InputImage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage, + OpenAI::Internal::AnyHash + ) + end + + # The URL of the image input. + sig { returns(String) } + attr_accessor :image_url + + # The type of the image input. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig { returns(T.nilable(String)) } + attr_reader :detail + + sig { params(detail: String).void } + attr_writer :detail + + # An image input to the model. + sig do + params(image_url: String, detail: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The URL of the image input. + image_url:, + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail: nil, + # The type of the image input. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { image_url: String, type: Symbol, detail: String } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -229,6 +288,14 @@ module OpenAI end def self.variants end + + AnArrayOfInputTextAndInputImageArray = + T.let( + OpenAI::Internal::Type::ArrayOf[ + OpenAI::Internal::Type::Unknown + ], + OpenAI::Internal::Type::Converter + ) end # The role of the message input. One of `user`, `assistant`, `system`, or diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 26468913..88abb4fb 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -218,6 +218,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -234,7 +236,34 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { image_url: String, type: :input_image, detail: String } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index 8c12eed5..a4e24364 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -234,6 +234,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -250,7 +252,34 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { image_url: String, type: :input_image, detail: String } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index b9897b66..3590bb4d 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -350,6 +350,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -369,7 +371,38 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { + image_url: String, + type: :input_image, + detail: String + } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index 1d4e8ac2..c16120a9 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -313,6 +313,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -332,7 +334,38 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { + image_url: String, + type: :input_image, + detail: String + } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 97e64211..d73a072b 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -350,6 +350,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -369,7 +371,38 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { + image_url: String, + type: :input_image, + detail: String + } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index be5a46e1..5e91e0f5 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -350,6 +350,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -369,7 +371,38 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { + image_url: String, + type: :input_image, + detail: String + } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index 63418be1..874cf7d0 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -350,6 +350,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -369,7 +371,38 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { + image_url: String, + type: :input_image, + detail: String + } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/graders/label_model_grader.rbs b/sig/openai/models/graders/label_model_grader.rbs index e6970199..29bcb103 100644 --- a/sig/openai/models/graders/label_model_grader.rbs +++ b/sig/openai/models/graders/label_model_grader.rbs @@ -78,6 +78,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + | OpenAI::Graders::LabelModelGrader::Input::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -94,7 +96,34 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { image_url: String, type: :input_image, detail: String } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer diff --git a/sig/openai/models/graders/score_model_grader.rbs b/sig/openai/models/graders/score_model_grader.rbs index caf20048..5ff024a4 100644 --- a/sig/openai/models/graders/score_model_grader.rbs +++ b/sig/openai/models/graders/score_model_grader.rbs @@ -82,6 +82,8 @@ module OpenAI String | OpenAI::Responses::ResponseInputText | OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + | OpenAI::Graders::ScoreModelGrader::Input::Content::InputImage + | ::Array[top] module Content extend OpenAI::Internal::Type::Union @@ -98,7 +100,34 @@ module OpenAI def to_hash: -> { text: String, type: :output_text } end + type input_image = + { image_url: String, type: :input_image, detail: String } + + class InputImage < OpenAI::Internal::Type::BaseModel + attr_accessor image_url: String + + attr_accessor type: :input_image + + attr_reader detail: String? + + def detail=: (String) -> String + + def initialize: ( + image_url: String, + ?detail: String, + ?type: :input_image + ) -> void + + def to_hash: -> { + image_url: String, + type: :input_image, + detail: String + } + end + def self?.variants: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::content] + + AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end type role = :user | :assistant | :system | :developer From 32deea021c84d58e7ab30c64d7a66957ab7ec20a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 16:20:58 +0000 Subject: [PATCH 256/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index be597f64..4f41de4f 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-82fd6fcb3eea81cbbe09a6f831c82219f1251e1b76474b4c41f424bf277e6a71.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-79dcb0ae501ac17004f50aecb112a798290ab3727fbe7c7d1b34299e38ed4f8e.yml openapi_spec_hash: c8d54bd1ae3d704f6b6f72ffd2f876d8 -config_hash: 3315d58b60faf63b1bee251b81837cda +config_hash: 167ad0ca036d0f023c78e6496b4311e8 From aeb942c0f54eda349b6923b2429749faa95a648b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 16:24:25 +0000 Subject: [PATCH 257/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d52d2b97..3b07edf5 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.13.0" + ".": "0.13.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 87940077..4cf4368d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.13.0) + openai (0.13.1) connection_pool GEM diff --git a/README.md b/README.md index f22fed8a..76df8a4c 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.13.0" +gem "openai", "~> 0.13.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 69be28c0..ddfc5bda 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.13.0" + VERSION = "0.13.1" end From 6140352503ff3af094c82d2b2ab2976e39a132f6 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 21:28:24 +0000 Subject: [PATCH 258/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 4f41de4f..da43840c 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-79dcb0ae501ac17004f50aecb112a798290ab3727fbe7c7d1b34299e38ed4f8e.yml -openapi_spec_hash: c8d54bd1ae3d704f6b6f72ffd2f876d8 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-c7dacca97e28bceff218684bb429481a70aa47aadad983ed9178bfda75ff4cd2.yml +openapi_spec_hash: 28eb1bb901ca10d2e37db4606d2bcfa7 config_hash: 167ad0ca036d0f023c78e6496b4311e8 From cc1a650c98a1da621c11b96fedd69c6d770c328b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Jul 2025 16:25:20 +0000 Subject: [PATCH 259/295] feat(api): manual updates --- .stats.yml | 6 +- lib/openai.rb | 6 + lib/openai/models.rb | 12 + .../models/image_edit_completed_event.rb | 198 ++++++++++ lib/openai/models/image_edit_params.rb | 37 +- .../models/image_edit_partial_image_event.rb | 135 +++++++ lib/openai/models/image_edit_stream_event.rb | 21 ++ .../models/image_gen_completed_event.rb | 198 ++++++++++ .../models/image_gen_partial_image_event.rb | 135 +++++++ lib/openai/models/image_gen_stream_event.rb | 21 ++ lib/openai/models/image_generate_params.rb | 14 +- lib/openai/models/images_response.rb | 3 + .../responses/response_output_refusal.rb | 4 +- lib/openai/models/responses/tool.rb | 31 +- lib/openai/resources/images.rb | 142 ++++++- rbi/openai/models.rbi | 12 + .../models/image_edit_completed_event.rbi | 346 ++++++++++++++++++ rbi/openai/models/image_edit_params.rbi | 51 +++ .../models/image_edit_partial_image_event.rbi | 249 +++++++++++++ rbi/openai/models/image_edit_stream_event.rbi | 22 ++ .../models/image_gen_completed_event.rbi | 339 +++++++++++++++++ .../models/image_gen_partial_image_event.rbi | 243 ++++++++++++ rbi/openai/models/image_gen_stream_event.rbi | 22 ++ rbi/openai/models/image_generate_params.rbi | 12 + .../responses/response_output_refusal.rbi | 4 +- rbi/openai/models/responses/tool.rbi | 61 +++ rbi/openai/resources/images.rbi | 225 ++++++++++++ sig/openai/models.rbs | 12 + .../models/image_edit_completed_event.rbs | 150 ++++++++ sig/openai/models/image_edit_params.rbs | 21 ++ .../models/image_edit_partial_image_event.rbs | 105 ++++++ sig/openai/models/image_edit_stream_event.rbs | 12 + .../models/image_gen_completed_event.rbs | 150 ++++++++ .../models/image_gen_partial_image_event.rbs | 105 ++++++ sig/openai/models/image_gen_stream_event.rbs | 12 + sig/openai/models/image_generate_params.rbs | 5 + sig/openai/models/responses/tool.rbs | 16 + sig/openai/resources/images.rbs | 38 ++ 38 files changed, 3163 insertions(+), 12 deletions(-) create mode 100644 lib/openai/models/image_edit_completed_event.rb create mode 100644 lib/openai/models/image_edit_partial_image_event.rb create mode 100644 lib/openai/models/image_edit_stream_event.rb create mode 100644 lib/openai/models/image_gen_completed_event.rb create mode 100644 lib/openai/models/image_gen_partial_image_event.rb create mode 100644 lib/openai/models/image_gen_stream_event.rb create mode 100644 rbi/openai/models/image_edit_completed_event.rbi create mode 100644 rbi/openai/models/image_edit_partial_image_event.rbi create mode 100644 rbi/openai/models/image_edit_stream_event.rbi create mode 100644 rbi/openai/models/image_gen_completed_event.rbi create mode 100644 rbi/openai/models/image_gen_partial_image_event.rbi create mode 100644 rbi/openai/models/image_gen_stream_event.rbi create mode 100644 sig/openai/models/image_edit_completed_event.rbs create mode 100644 sig/openai/models/image_edit_partial_image_event.rbs create mode 100644 sig/openai/models/image_edit_stream_event.rbs create mode 100644 sig/openai/models/image_gen_completed_event.rbs create mode 100644 sig/openai/models/image_gen_partial_image_event.rbs create mode 100644 sig/openai/models/image_gen_stream_event.rbs diff --git a/.stats.yml b/.stats.yml index da43840c..ca458135 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-c7dacca97e28bceff218684bb429481a70aa47aadad983ed9178bfda75ff4cd2.yml -openapi_spec_hash: 28eb1bb901ca10d2e37db4606d2bcfa7 -config_hash: 167ad0ca036d0f023c78e6496b4311e8 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-670ea0d2cc44f52a87dd3cadea45632953283e0636ba30788fdbdb22a232ccac.yml +openapi_spec_hash: d8b7d38911fead545adf3e4297956410 +config_hash: 5525bda35e48ea6387c6175c4d1651fa diff --git a/lib/openai.rb b/lib/openai.rb index c637e6b1..d0794067 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -315,8 +315,14 @@ require_relative "openai/models/graders/string_check_grader" require_relative "openai/models/image" require_relative "openai/models/image_create_variation_params" +require_relative "openai/models/image_edit_completed_event" require_relative "openai/models/image_edit_params" +require_relative "openai/models/image_edit_partial_image_event" +require_relative "openai/models/image_edit_stream_event" +require_relative "openai/models/image_gen_completed_event" require_relative "openai/models/image_generate_params" +require_relative "openai/models/image_gen_partial_image_event" +require_relative "openai/models/image_gen_stream_event" require_relative "openai/models/image_model" require_relative "openai/models/images_response" require_relative "openai/models/metadata" diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 3261259a..26243742 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -152,10 +152,22 @@ module OpenAI ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams + ImageEditCompletedEvent = OpenAI::Models::ImageEditCompletedEvent + ImageEditParams = OpenAI::Models::ImageEditParams + ImageEditPartialImageEvent = OpenAI::Models::ImageEditPartialImageEvent + + ImageEditStreamEvent = OpenAI::Models::ImageEditStreamEvent + + ImageGenCompletedEvent = OpenAI::Models::ImageGenCompletedEvent + ImageGenerateParams = OpenAI::Models::ImageGenerateParams + ImageGenPartialImageEvent = OpenAI::Models::ImageGenPartialImageEvent + + ImageGenStreamEvent = OpenAI::Models::ImageGenStreamEvent + ImageModel = OpenAI::Models::ImageModel ImagesResponse = OpenAI::Models::ImagesResponse diff --git a/lib/openai/models/image_edit_completed_event.rb b/lib/openai/models/image_edit_completed_event.rb new file mode 100644 index 00000000..2038c5f9 --- /dev/null +++ b/lib/openai/models/image_edit_completed_event.rb @@ -0,0 +1,198 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class ImageEditCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute b64_json + # Base64-encoded final edited image data, suitable for rendering as an image. + # + # @return [String] + required :b64_json, String + + # @!attribute background + # The background setting for the edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditCompletedEvent::Background] + required :background, enum: -> { OpenAI::ImageEditCompletedEvent::Background } + + # @!attribute created_at + # The Unix timestamp when the event was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute output_format + # The output format for the edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditCompletedEvent::OutputFormat] + required :output_format, enum: -> { OpenAI::ImageEditCompletedEvent::OutputFormat } + + # @!attribute quality + # The quality setting for the edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditCompletedEvent::Quality] + required :quality, enum: -> { OpenAI::ImageEditCompletedEvent::Quality } + + # @!attribute size + # The size of the edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditCompletedEvent::Size] + required :size, enum: -> { OpenAI::ImageEditCompletedEvent::Size } + + # @!attribute type + # The type of the event. Always `image_edit.completed`. + # + # @return [Symbol, :"image_edit.completed"] + required :type, const: :"image_edit.completed" + + # @!attribute usage + # For `gpt-image-1` only, the token usage information for the image generation. + # + # @return [OpenAI::Models::ImageEditCompletedEvent::Usage] + required :usage, -> { OpenAI::ImageEditCompletedEvent::Usage } + + # @!method initialize(b64_json:, background:, created_at:, output_format:, quality:, size:, usage:, type: :"image_edit.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageEditCompletedEvent} for more details. + # + # Emitted when image editing has completed and the final image is available. + # + # @param b64_json [String] Base64-encoded final edited image data, suitable for rendering as an image. + # + # @param background [Symbol, OpenAI::Models::ImageEditCompletedEvent::Background] The background setting for the edited image. + # + # @param created_at [Integer] The Unix timestamp when the event was created. + # + # @param output_format [Symbol, OpenAI::Models::ImageEditCompletedEvent::OutputFormat] The output format for the edited image. + # + # @param quality [Symbol, OpenAI::Models::ImageEditCompletedEvent::Quality] The quality setting for the edited image. + # + # @param size [Symbol, OpenAI::Models::ImageEditCompletedEvent::Size] The size of the edited image. + # + # @param usage [OpenAI::Models::ImageEditCompletedEvent::Usage] For `gpt-image-1` only, the token usage information for the image generation. + # + # @param type [Symbol, :"image_edit.completed"] The type of the event. Always `image_edit.completed`. + + # The background setting for the edited image. + # + # @see OpenAI::Models::ImageEditCompletedEvent#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The output format for the edited image. + # + # @see OpenAI::Models::ImageEditCompletedEvent#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality setting for the edited image. + # + # @see OpenAI::Models::ImageEditCompletedEvent#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The size of the edited image. + # + # @see OpenAI::Models::ImageEditCompletedEvent#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Models::ImageEditCompletedEvent#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute input_tokens + # The number of tokens (images and text) in the input prompt. + # + # @return [Integer] + required :input_tokens, Integer + + # @!attribute input_tokens_details + # The input tokens detailed information for the image generation. + # + # @return [OpenAI::Models::ImageEditCompletedEvent::Usage::InputTokensDetails] + required :input_tokens_details, -> { OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails } + + # @!attribute output_tokens + # The number of image tokens in the output image. + # + # @return [Integer] + required :output_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens (images and text) used for the image generation. + # + # @return [Integer] + required :total_tokens, Integer + + # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageEditCompletedEvent::Usage} for more details. + # + # For `gpt-image-1` only, the token usage information for the image generation. + # + # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. + # + # @param input_tokens_details [OpenAI::Models::ImageEditCompletedEvent::Usage::InputTokensDetails] The input tokens detailed information for the image generation. + # + # @param output_tokens [Integer] The number of image tokens in the output image. + # + # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. + + # @see OpenAI::Models::ImageEditCompletedEvent::Usage#input_tokens_details + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + # @!attribute image_tokens + # The number of image tokens in the input prompt. + # + # @return [Integer] + required :image_tokens, Integer + + # @!attribute text_tokens + # The number of text tokens in the input prompt. + # + # @return [Integer] + required :text_tokens, Integer + + # @!method initialize(image_tokens:, text_tokens:) + # The input tokens detailed information for the image generation. + # + # @param image_tokens [Integer] The number of image tokens in the input prompt. + # + # @param text_tokens [Integer] The number of text tokens in the input prompt. + end + end + end + end +end diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 833d7a5a..05014dd9 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -3,6 +3,8 @@ module OpenAI module Models # @see OpenAI::Resources::Images#edit + # + # @see OpenAI::Resources::Images#stream_raw class ImageEditParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -38,6 +40,14 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageEditParams::Background, nil] optional :background, enum: -> { OpenAI::ImageEditParams::Background }, nil?: true + # @!attribute input_fidelity + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + # + # @return [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] + optional :input_fidelity, enum: -> { OpenAI::ImageEditParams::InputFidelity }, nil?: true + # @!attribute mask # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, @@ -77,6 +87,14 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] optional :output_format, enum: -> { OpenAI::ImageEditParams::OutputFormat }, nil?: true + # @!attribute partial_images + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + # + # @return [Integer, nil] + optional :partial_images, Integer, nil?: true + # @!attribute quality # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. @@ -110,7 +128,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @!method initialize(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # @@ -120,6 +138,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # + # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features, + # # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup @@ -130,6 +150,8 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is # + # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for + # # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` @@ -179,6 +201,19 @@ module Background # @return [Array] end + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + module InputFidelity + extend OpenAI::Internal::Type::Enum + + HIGH = :high + LOW = :low + + # @!method self.values + # @return [Array] + end + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. diff --git a/lib/openai/models/image_edit_partial_image_event.rb b/lib/openai/models/image_edit_partial_image_event.rb new file mode 100644 index 00000000..95d5bd96 --- /dev/null +++ b/lib/openai/models/image_edit_partial_image_event.rb @@ -0,0 +1,135 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class ImageEditPartialImageEvent < OpenAI::Internal::Type::BaseModel + # @!attribute b64_json + # Base64-encoded partial image data, suitable for rendering as an image. + # + # @return [String] + required :b64_json, String + + # @!attribute background + # The background setting for the requested edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditPartialImageEvent::Background] + required :background, enum: -> { OpenAI::ImageEditPartialImageEvent::Background } + + # @!attribute created_at + # The Unix timestamp when the event was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute output_format + # The output format for the requested edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditPartialImageEvent::OutputFormat] + required :output_format, enum: -> { OpenAI::ImageEditPartialImageEvent::OutputFormat } + + # @!attribute partial_image_index + # 0-based index for the partial image (streaming). + # + # @return [Integer] + required :partial_image_index, Integer + + # @!attribute quality + # The quality setting for the requested edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditPartialImageEvent::Quality] + required :quality, enum: -> { OpenAI::ImageEditPartialImageEvent::Quality } + + # @!attribute size + # The size of the requested edited image. + # + # @return [Symbol, OpenAI::Models::ImageEditPartialImageEvent::Size] + required :size, enum: -> { OpenAI::ImageEditPartialImageEvent::Size } + + # @!attribute type + # The type of the event. Always `image_edit.partial_image`. + # + # @return [Symbol, :"image_edit.partial_image"] + required :type, const: :"image_edit.partial_image" + + # @!method initialize(b64_json:, background:, created_at:, output_format:, partial_image_index:, quality:, size:, type: :"image_edit.partial_image") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageEditPartialImageEvent} for more details. + # + # Emitted when a partial image is available during image editing streaming. + # + # @param b64_json [String] Base64-encoded partial image data, suitable for rendering as an image. + # + # @param background [Symbol, OpenAI::Models::ImageEditPartialImageEvent::Background] The background setting for the requested edited image. + # + # @param created_at [Integer] The Unix timestamp when the event was created. + # + # @param output_format [Symbol, OpenAI::Models::ImageEditPartialImageEvent::OutputFormat] The output format for the requested edited image. + # + # @param partial_image_index [Integer] 0-based index for the partial image (streaming). + # + # @param quality [Symbol, OpenAI::Models::ImageEditPartialImageEvent::Quality] The quality setting for the requested edited image. + # + # @param size [Symbol, OpenAI::Models::ImageEditPartialImageEvent::Size] The size of the requested edited image. + # + # @param type [Symbol, :"image_edit.partial_image"] The type of the event. Always `image_edit.partial_image`. + + # The background setting for the requested edited image. + # + # @see OpenAI::Models::ImageEditPartialImageEvent#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The output format for the requested edited image. + # + # @see OpenAI::Models::ImageEditPartialImageEvent#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality setting for the requested edited image. + # + # @see OpenAI::Models::ImageEditPartialImageEvent#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The size of the requested edited image. + # + # @see OpenAI::Models::ImageEditPartialImageEvent#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + AUTO = :auto + + # @!method self.values + # @return [Array] + end + end + end +end diff --git a/lib/openai/models/image_edit_stream_event.rb b/lib/openai/models/image_edit_stream_event.rb new file mode 100644 index 00000000..b72d2c27 --- /dev/null +++ b/lib/openai/models/image_edit_stream_event.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # Emitted when a partial image is available during image editing streaming. + module ImageEditStreamEvent + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Emitted when a partial image is available during image editing streaming. + variant :"image_edit.partial_image", -> { OpenAI::ImageEditPartialImageEvent } + + # Emitted when image editing has completed and the final image is available. + variant :"image_edit.completed", -> { OpenAI::ImageEditCompletedEvent } + + # @!method self.variants + # @return [Array(OpenAI::Models::ImageEditPartialImageEvent, OpenAI::Models::ImageEditCompletedEvent)] + end + end +end diff --git a/lib/openai/models/image_gen_completed_event.rb b/lib/openai/models/image_gen_completed_event.rb new file mode 100644 index 00000000..8a730653 --- /dev/null +++ b/lib/openai/models/image_gen_completed_event.rb @@ -0,0 +1,198 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class ImageGenCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute b64_json + # Base64-encoded image data, suitable for rendering as an image. + # + # @return [String] + required :b64_json, String + + # @!attribute background + # The background setting for the generated image. + # + # @return [Symbol, OpenAI::Models::ImageGenCompletedEvent::Background] + required :background, enum: -> { OpenAI::ImageGenCompletedEvent::Background } + + # @!attribute created_at + # The Unix timestamp when the event was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute output_format + # The output format for the generated image. + # + # @return [Symbol, OpenAI::Models::ImageGenCompletedEvent::OutputFormat] + required :output_format, enum: -> { OpenAI::ImageGenCompletedEvent::OutputFormat } + + # @!attribute quality + # The quality setting for the generated image. + # + # @return [Symbol, OpenAI::Models::ImageGenCompletedEvent::Quality] + required :quality, enum: -> { OpenAI::ImageGenCompletedEvent::Quality } + + # @!attribute size + # The size of the generated image. + # + # @return [Symbol, OpenAI::Models::ImageGenCompletedEvent::Size] + required :size, enum: -> { OpenAI::ImageGenCompletedEvent::Size } + + # @!attribute type + # The type of the event. Always `image_generation.completed`. + # + # @return [Symbol, :"image_generation.completed"] + required :type, const: :"image_generation.completed" + + # @!attribute usage + # For `gpt-image-1` only, the token usage information for the image generation. + # + # @return [OpenAI::Models::ImageGenCompletedEvent::Usage] + required :usage, -> { OpenAI::ImageGenCompletedEvent::Usage } + + # @!method initialize(b64_json:, background:, created_at:, output_format:, quality:, size:, usage:, type: :"image_generation.completed") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageGenCompletedEvent} for more details. + # + # Emitted when image generation has completed and the final image is available. + # + # @param b64_json [String] Base64-encoded image data, suitable for rendering as an image. + # + # @param background [Symbol, OpenAI::Models::ImageGenCompletedEvent::Background] The background setting for the generated image. + # + # @param created_at [Integer] The Unix timestamp when the event was created. + # + # @param output_format [Symbol, OpenAI::Models::ImageGenCompletedEvent::OutputFormat] The output format for the generated image. + # + # @param quality [Symbol, OpenAI::Models::ImageGenCompletedEvent::Quality] The quality setting for the generated image. + # + # @param size [Symbol, OpenAI::Models::ImageGenCompletedEvent::Size] The size of the generated image. + # + # @param usage [OpenAI::Models::ImageGenCompletedEvent::Usage] For `gpt-image-1` only, the token usage information for the image generation. + # + # @param type [Symbol, :"image_generation.completed"] The type of the event. Always `image_generation.completed`. + + # The background setting for the generated image. + # + # @see OpenAI::Models::ImageGenCompletedEvent#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The output format for the generated image. + # + # @see OpenAI::Models::ImageGenCompletedEvent#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality setting for the generated image. + # + # @see OpenAI::Models::ImageGenCompletedEvent#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The size of the generated image. + # + # @see OpenAI::Models::ImageGenCompletedEvent#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Models::ImageGenCompletedEvent#usage + class Usage < OpenAI::Internal::Type::BaseModel + # @!attribute input_tokens + # The number of tokens (images and text) in the input prompt. + # + # @return [Integer] + required :input_tokens, Integer + + # @!attribute input_tokens_details + # The input tokens detailed information for the image generation. + # + # @return [OpenAI::Models::ImageGenCompletedEvent::Usage::InputTokensDetails] + required :input_tokens_details, -> { OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails } + + # @!attribute output_tokens + # The number of image tokens in the output image. + # + # @return [Integer] + required :output_tokens, Integer + + # @!attribute total_tokens + # The total number of tokens (images and text) used for the image generation. + # + # @return [Integer] + required :total_tokens, Integer + + # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageGenCompletedEvent::Usage} for more details. + # + # For `gpt-image-1` only, the token usage information for the image generation. + # + # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. + # + # @param input_tokens_details [OpenAI::Models::ImageGenCompletedEvent::Usage::InputTokensDetails] The input tokens detailed information for the image generation. + # + # @param output_tokens [Integer] The number of image tokens in the output image. + # + # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. + + # @see OpenAI::Models::ImageGenCompletedEvent::Usage#input_tokens_details + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + # @!attribute image_tokens + # The number of image tokens in the input prompt. + # + # @return [Integer] + required :image_tokens, Integer + + # @!attribute text_tokens + # The number of text tokens in the input prompt. + # + # @return [Integer] + required :text_tokens, Integer + + # @!method initialize(image_tokens:, text_tokens:) + # The input tokens detailed information for the image generation. + # + # @param image_tokens [Integer] The number of image tokens in the input prompt. + # + # @param text_tokens [Integer] The number of text tokens in the input prompt. + end + end + end + end +end diff --git a/lib/openai/models/image_gen_partial_image_event.rb b/lib/openai/models/image_gen_partial_image_event.rb new file mode 100644 index 00000000..33601e7c --- /dev/null +++ b/lib/openai/models/image_gen_partial_image_event.rb @@ -0,0 +1,135 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class ImageGenPartialImageEvent < OpenAI::Internal::Type::BaseModel + # @!attribute b64_json + # Base64-encoded partial image data, suitable for rendering as an image. + # + # @return [String] + required :b64_json, String + + # @!attribute background + # The background setting for the requested image. + # + # @return [Symbol, OpenAI::Models::ImageGenPartialImageEvent::Background] + required :background, enum: -> { OpenAI::ImageGenPartialImageEvent::Background } + + # @!attribute created_at + # The Unix timestamp when the event was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute output_format + # The output format for the requested image. + # + # @return [Symbol, OpenAI::Models::ImageGenPartialImageEvent::OutputFormat] + required :output_format, enum: -> { OpenAI::ImageGenPartialImageEvent::OutputFormat } + + # @!attribute partial_image_index + # 0-based index for the partial image (streaming). + # + # @return [Integer] + required :partial_image_index, Integer + + # @!attribute quality + # The quality setting for the requested image. + # + # @return [Symbol, OpenAI::Models::ImageGenPartialImageEvent::Quality] + required :quality, enum: -> { OpenAI::ImageGenPartialImageEvent::Quality } + + # @!attribute size + # The size of the requested image. + # + # @return [Symbol, OpenAI::Models::ImageGenPartialImageEvent::Size] + required :size, enum: -> { OpenAI::ImageGenPartialImageEvent::Size } + + # @!attribute type + # The type of the event. Always `image_generation.partial_image`. + # + # @return [Symbol, :"image_generation.partial_image"] + required :type, const: :"image_generation.partial_image" + + # @!method initialize(b64_json:, background:, created_at:, output_format:, partial_image_index:, quality:, size:, type: :"image_generation.partial_image") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageGenPartialImageEvent} for more details. + # + # Emitted when a partial image is available during image generation streaming. + # + # @param b64_json [String] Base64-encoded partial image data, suitable for rendering as an image. + # + # @param background [Symbol, OpenAI::Models::ImageGenPartialImageEvent::Background] The background setting for the requested image. + # + # @param created_at [Integer] The Unix timestamp when the event was created. + # + # @param output_format [Symbol, OpenAI::Models::ImageGenPartialImageEvent::OutputFormat] The output format for the requested image. + # + # @param partial_image_index [Integer] 0-based index for the partial image (streaming). + # + # @param quality [Symbol, OpenAI::Models::ImageGenPartialImageEvent::Quality] The quality setting for the requested image. + # + # @param size [Symbol, OpenAI::Models::ImageGenPartialImageEvent::Size] The size of the requested image. + # + # @param type [Symbol, :"image_generation.partial_image"] The type of the event. Always `image_generation.partial_image`. + + # The background setting for the requested image. + # + # @see OpenAI::Models::ImageGenPartialImageEvent#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The output format for the requested image. + # + # @see OpenAI::Models::ImageGenPartialImageEvent#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality setting for the requested image. + # + # @see OpenAI::Models::ImageGenPartialImageEvent#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The size of the requested image. + # + # @see OpenAI::Models::ImageGenPartialImageEvent#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + AUTO = :auto + + # @!method self.values + # @return [Array] + end + end + end +end diff --git a/lib/openai/models/image_gen_stream_event.rb b/lib/openai/models/image_gen_stream_event.rb new file mode 100644 index 00000000..91af7984 --- /dev/null +++ b/lib/openai/models/image_gen_stream_event.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # Emitted when a partial image is available during image generation streaming. + module ImageGenStreamEvent + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Emitted when a partial image is available during image generation streaming. + variant :"image_generation.partial_image", -> { OpenAI::ImageGenPartialImageEvent } + + # Emitted when image generation has completed and the final image is available. + variant :"image_generation.completed", -> { OpenAI::ImageGenCompletedEvent } + + # @!method self.variants + # @return [Array(OpenAI::Models::ImageGenPartialImageEvent, OpenAI::Models::ImageGenCompletedEvent)] + end + end +end diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index b0b47d41..d63bfdf7 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -3,6 +3,8 @@ module OpenAI module Models # @see OpenAI::Resources::Images#generate + # + # @see OpenAI::Resources::Images#stream_raw class ImageGenerateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters @@ -64,6 +66,14 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] optional :output_format, enum: -> { OpenAI::ImageGenerateParams::OutputFormat }, nil?: true + # @!attribute partial_images + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + # + # @return [Integer, nil] + optional :partial_images, Integer, nil?: true + # @!attribute quality # The quality of the image that will be generated. # @@ -111,7 +121,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @!method initialize(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageGenerateParams} for more details. # @@ -129,6 +139,8 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # + # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for + # # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index d6f6e63d..725d2727 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -152,6 +152,9 @@ class Usage < OpenAI::Internal::Type::BaseModel required :total_tokens, Integer # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImagesResponse::Usage} for more details. + # # For `gpt-image-1` only, the token usage information for the image generation. # # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. diff --git a/lib/openai/models/responses/response_output_refusal.rb b/lib/openai/models/responses/response_output_refusal.rb index 62316c99..bf6272e6 100644 --- a/lib/openai/models/responses/response_output_refusal.rb +++ b/lib/openai/models/responses/response_output_refusal.rb @@ -5,7 +5,7 @@ module Models module Responses class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel # @!attribute refusal - # The refusal explanationfrom the model. + # The refusal explanation from the model. # # @return [String] required :refusal, String @@ -19,7 +19,7 @@ class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel # @!method initialize(refusal:, type: :refusal) # A refusal from the model. # - # @param refusal [String] The refusal explanationfrom the model. + # @param refusal [String] The refusal explanation from the model. # # @param type [Symbol, :refusal] The type of the refusal. Always `refusal`. end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index b8ed7736..16948259 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -303,6 +303,18 @@ class ImageGeneration < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background, nil] optional :background, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Background } + # @!attribute input_fidelity + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::InputFidelity, nil] + optional :input_fidelity, + enum: -> { + OpenAI::Responses::Tool::ImageGeneration::InputFidelity + }, + nil?: true + # @!attribute input_image_mask # Optional mask for inpainting. Contains `image_url` (string, optional) and # `file_id` (string, optional). @@ -356,7 +368,7 @@ class ImageGeneration < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Size, nil] optional :size, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Size } - # @!method initialize(background: nil, input_image_mask: nil, model: nil, moderation: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, size: nil, type: :image_generation) + # @!method initialize(background: nil, input_fidelity: nil, input_image_mask: nil, model: nil, moderation: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, size: nil, type: :image_generation) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Tool::ImageGeneration} for more details. # @@ -364,6 +376,8 @@ class ImageGeneration < OpenAI::Internal::Type::BaseModel # # @param background [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background] Background type for the generated image. One of `transparent`, # + # @param input_fidelity [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::InputFidelity, nil] Control how much effort the model will exert to match the style and features, + # # @param input_image_mask [OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask] Optional mask for inpainting. Contains `image_url` # # @param model [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Model] The image generation model to use. Default: `gpt-image-1`. @@ -397,6 +411,21 @@ module Background # @return [Array] end + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#input_fidelity + module InputFidelity + extend OpenAI::Internal::Type::Enum + + HIGH = :high + LOW = :low + + # @!method self.values + # @return [Array] + end + # @see OpenAI::Models::Responses::Tool::ImageGeneration#input_image_mask class InputImageMask < OpenAI::Internal::Type::BaseModel # @!attribute file_id diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index e1d26736..2779b538 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -39,13 +39,15 @@ def create_variation(params) ) end + # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. # - # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @overload edit(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # @@ -53,6 +55,8 @@ def create_variation(params) # # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # + # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features, + # # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup @@ -63,6 +67,8 @@ def create_variation(params) # # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is # + # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for + # # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` @@ -78,6 +84,10 @@ def create_variation(params) # @see OpenAI::Models::ImageEditParams def edit(params) parsed, options = OpenAI::ImageEditParams.dump_request(params) + if parsed[:stream] + message = "Please use `#stream_raw` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "images/edits", @@ -88,13 +98,76 @@ def edit(params) ) end + # See {OpenAI::Resources::Images#edit} for non-streaming counterpart. + # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageEditParams} for more details. + # + # Creates an edited or extended image given one or more source images and a + # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. + # + # @overload stream_raw(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. + # + # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character + # + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # + # @param input_fidelity [Symbol, OpenAI::Models::ImageEditParams::InputFidelity, nil] Control how much effort the model will exert to match the style and features, + # + # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. + # + # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter + # + # @param output_format [Symbol, OpenAI::Models::ImageEditParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is + # + # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for + # + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::Stream] + # + # @see OpenAI::Models::ImageEditParams + def stream_raw(params) + parsed, options = OpenAI::ImageEditParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#edit` for the non-streaming use case." + raise ArgumentError.new(message) + end + parsed.store(:stream, true) + @client.request( + method: :post, + path: "images/edits", + headers: {"content-type" => "multipart/form-data", "accept" => "text/event-stream"}, + body: parsed, + stream: OpenAI::Internal::Stream, + model: OpenAI::ImageEditStreamEvent, + options: options + ) + end + + # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageGenerateParams} for more details. # # Creates an image given a prompt. # [Learn more](https://platform.openai.com/docs/guides/images). # - # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @overload generate(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte # @@ -110,6 +183,8 @@ def edit(params) # # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # + # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for + # # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned @@ -127,6 +202,10 @@ def edit(params) # @see OpenAI::Models::ImageGenerateParams def generate(params) parsed, options = OpenAI::ImageGenerateParams.dump_request(params) + if parsed[:stream] + message = "Please use `#stream_raw` for the streaming use case." + raise ArgumentError.new(message) + end @client.request( method: :post, path: "images/generations", @@ -136,6 +215,65 @@ def generate(params) ) end + # See {OpenAI::Resources::Images#generate} for non-streaming counterpart. + # + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImageGenerateParams} for more details. + # + # Creates an image given a prompt. + # [Learn more](https://platform.openai.com/docs/guides/images). + # + # @overload stream_raw(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # + # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte + # + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # + # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # + # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only + # + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # + # @param partial_images [Integer, nil] The number of partial images to generate. This parameter is used for + # + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. + # + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # + # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::Stream] + # + # @see OpenAI::Models::ImageGenerateParams + def stream_raw(params) + parsed, options = OpenAI::ImageGenerateParams.dump_request(params) + unless parsed.fetch(:stream, true) + message = "Please use `#generate` for the non-streaming use case." + raise ArgumentError.new(message) + end + parsed.store(:stream, true) + @client.request( + method: :post, + path: "images/generations", + headers: {"accept" => "text/event-stream"}, + body: parsed, + stream: OpenAI::Internal::Stream, + model: OpenAI::ImageGenStreamEvent, + options: options + ) + end + # @api private # # @param client [OpenAI::Client] diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index 78cb3785..c5a9d836 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -115,10 +115,22 @@ module OpenAI ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams + ImageEditCompletedEvent = OpenAI::Models::ImageEditCompletedEvent + ImageEditParams = OpenAI::Models::ImageEditParams + ImageEditPartialImageEvent = OpenAI::Models::ImageEditPartialImageEvent + + ImageEditStreamEvent = OpenAI::Models::ImageEditStreamEvent + + ImageGenCompletedEvent = OpenAI::Models::ImageGenCompletedEvent + ImageGenerateParams = OpenAI::Models::ImageGenerateParams + ImageGenPartialImageEvent = OpenAI::Models::ImageGenPartialImageEvent + + ImageGenStreamEvent = OpenAI::Models::ImageGenStreamEvent + ImageModel = OpenAI::Models::ImageModel ImagesResponse = OpenAI::Models::ImagesResponse diff --git a/rbi/openai/models/image_edit_completed_event.rbi b/rbi/openai/models/image_edit_completed_event.rbi new file mode 100644 index 00000000..07cfa0ae --- /dev/null +++ b/rbi/openai/models/image_edit_completed_event.rbi @@ -0,0 +1,346 @@ +# typed: strong + +module OpenAI + module Models + class ImageEditCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::ImageEditCompletedEvent, OpenAI::Internal::AnyHash) + end + + # Base64-encoded final edited image data, suitable for rendering as an image. + sig { returns(String) } + attr_accessor :b64_json + + # The background setting for the edited image. + sig { returns(OpenAI::ImageEditCompletedEvent::Background::TaggedSymbol) } + attr_accessor :background + + # The Unix timestamp when the event was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The output format for the edited image. + sig do + returns(OpenAI::ImageEditCompletedEvent::OutputFormat::TaggedSymbol) + end + attr_accessor :output_format + + # The quality setting for the edited image. + sig { returns(OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol) } + attr_accessor :quality + + # The size of the edited image. + sig { returns(OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol) } + attr_accessor :size + + # The type of the event. Always `image_edit.completed`. + sig { returns(Symbol) } + attr_accessor :type + + # For `gpt-image-1` only, the token usage information for the image generation. + sig { returns(OpenAI::ImageEditCompletedEvent::Usage) } + attr_reader :usage + + sig { params(usage: OpenAI::ImageEditCompletedEvent::Usage::OrHash).void } + attr_writer :usage + + # Emitted when image editing has completed and the final image is available. + sig do + params( + b64_json: String, + background: OpenAI::ImageEditCompletedEvent::Background::OrSymbol, + created_at: Integer, + output_format: + OpenAI::ImageEditCompletedEvent::OutputFormat::OrSymbol, + quality: OpenAI::ImageEditCompletedEvent::Quality::OrSymbol, + size: OpenAI::ImageEditCompletedEvent::Size::OrSymbol, + usage: OpenAI::ImageEditCompletedEvent::Usage::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Base64-encoded final edited image data, suitable for rendering as an image. + b64_json:, + # The background setting for the edited image. + background:, + # The Unix timestamp when the event was created. + created_at:, + # The output format for the edited image. + output_format:, + # The quality setting for the edited image. + quality:, + # The size of the edited image. + size:, + # For `gpt-image-1` only, the token usage information for the image generation. + usage:, + # The type of the event. Always `image_edit.completed`. + type: :"image_edit.completed" + ) + end + + sig do + override.returns( + { + b64_json: String, + background: + OpenAI::ImageEditCompletedEvent::Background::TaggedSymbol, + created_at: Integer, + output_format: + OpenAI::ImageEditCompletedEvent::OutputFormat::TaggedSymbol, + quality: OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol, + size: OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol, + type: Symbol, + usage: OpenAI::ImageEditCompletedEvent::Usage + } + ) + end + def to_hash + end + + # The background setting for the edited image. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditCompletedEvent::Background) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let( + :transparent, + OpenAI::ImageEditCompletedEvent::Background::TaggedSymbol + ) + OPAQUE = + T.let( + :opaque, + OpenAI::ImageEditCompletedEvent::Background::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::ImageEditCompletedEvent::Background::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::ImageEditCompletedEvent::Background::TaggedSymbol] + ) + end + def self.values + end + end + + # The output format for the edited image. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditCompletedEvent::OutputFormat) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = + T.let( + :png, + OpenAI::ImageEditCompletedEvent::OutputFormat::TaggedSymbol + ) + WEBP = + T.let( + :webp, + OpenAI::ImageEditCompletedEvent::OutputFormat::TaggedSymbol + ) + JPEG = + T.let( + :jpeg, + OpenAI::ImageEditCompletedEvent::OutputFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ImageEditCompletedEvent::OutputFormat::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The quality setting for the edited image. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditCompletedEvent::Quality) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let(:low, OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol) + MEDIUM = + T.let(:medium, OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol) + HIGH = + T.let(:high, OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageEditCompletedEvent::Quality::TaggedSymbol] + ) + end + def self.values + end + end + + # The size of the edited image. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageEditCompletedEvent::Size) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol + ) + SIZE_1024X1536 = + T.let( + :"1024x1536", + OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol + ) + SIZE_1536X1024 = + T.let( + :"1536x1024", + OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol + ) + AUTO = T.let(:auto, OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageEditCompletedEvent::Size::TaggedSymbol] + ) + end + def self.values + end + end + + class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::ImageEditCompletedEvent::Usage, + OpenAI::Internal::AnyHash + ) + end + + # The number of tokens (images and text) in the input prompt. + sig { returns(Integer) } + attr_accessor :input_tokens + + # The input tokens detailed information for the image generation. + sig do + returns(OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails) + end + attr_reader :input_tokens_details + + sig do + params( + input_tokens_details: + OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails::OrHash + ).void + end + attr_writer :input_tokens_details + + # The number of image tokens in the output image. + sig { returns(Integer) } + attr_accessor :output_tokens + + # The total number of tokens (images and text) used for the image generation. + sig { returns(Integer) } + attr_accessor :total_tokens + + # For `gpt-image-1` only, the token usage information for the image generation. + sig do + params( + input_tokens: Integer, + input_tokens_details: + OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails::OrHash, + output_tokens: Integer, + total_tokens: Integer + ).returns(T.attached_class) + end + def self.new( + # The number of tokens (images and text) in the input prompt. + input_tokens:, + # The input tokens detailed information for the image generation. + input_tokens_details:, + # The number of image tokens in the output image. + output_tokens:, + # The total number of tokens (images and text) used for the image generation. + total_tokens: + ) + end + + sig do + override.returns( + { + input_tokens: Integer, + input_tokens_details: + OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash + end + + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails, + OpenAI::Internal::AnyHash + ) + end + + # The number of image tokens in the input prompt. + sig { returns(Integer) } + attr_accessor :image_tokens + + # The number of text tokens in the input prompt. + sig { returns(Integer) } + attr_accessor :text_tokens + + # The input tokens detailed information for the image generation. + sig do + params(image_tokens: Integer, text_tokens: Integer).returns( + T.attached_class + ) + end + def self.new( + # The number of image tokens in the input prompt. + image_tokens:, + # The number of text tokens in the input prompt. + text_tokens: + ) + end + + sig do + override.returns({ image_tokens: Integer, text_tokens: Integer }) + end + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 0e0957b1..6354595b 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -36,6 +36,14 @@ module OpenAI sig { returns(T.nilable(OpenAI::ImageEditParams::Background::OrSymbol)) } attr_accessor :background + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + sig do + returns(T.nilable(OpenAI::ImageEditParams::InputFidelity::OrSymbol)) + end + attr_accessor :input_fidelity + # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than @@ -70,6 +78,12 @@ module OpenAI end attr_accessor :output_format + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + sig { returns(T.nilable(Integer)) } + attr_accessor :partial_images + # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. @@ -105,12 +119,15 @@ module OpenAI image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), + input_fidelity: + T.nilable(OpenAI::ImageEditParams::InputFidelity::OrSymbol), mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), output_compression: T.nilable(Integer), output_format: T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), @@ -139,6 +156,10 @@ module OpenAI # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. background: nil, + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + input_fidelity: nil, # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than @@ -158,6 +179,10 @@ module OpenAI # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The # default value is `png`. output_format: nil, + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + partial_images: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. @@ -186,12 +211,15 @@ module OpenAI prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), + input_fidelity: + T.nilable(OpenAI::ImageEditParams::InputFidelity::OrSymbol), mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), output_compression: T.nilable(Integer), output_format: T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), @@ -258,6 +286,29 @@ module OpenAI end end + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + module InputFidelity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageEditParams::InputFidelity) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + HIGH = + T.let(:high, OpenAI::ImageEditParams::InputFidelity::TaggedSymbol) + LOW = T.let(:low, OpenAI::ImageEditParams::InputFidelity::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageEditParams::InputFidelity::TaggedSymbol] + ) + end + def self.values + end + end + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. diff --git a/rbi/openai/models/image_edit_partial_image_event.rbi b/rbi/openai/models/image_edit_partial_image_event.rbi new file mode 100644 index 00000000..f4ae7d8b --- /dev/null +++ b/rbi/openai/models/image_edit_partial_image_event.rbi @@ -0,0 +1,249 @@ +# typed: strong + +module OpenAI + module Models + class ImageEditPartialImageEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::ImageEditPartialImageEvent, OpenAI::Internal::AnyHash) + end + + # Base64-encoded partial image data, suitable for rendering as an image. + sig { returns(String) } + attr_accessor :b64_json + + # The background setting for the requested edited image. + sig do + returns(OpenAI::ImageEditPartialImageEvent::Background::TaggedSymbol) + end + attr_accessor :background + + # The Unix timestamp when the event was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The output format for the requested edited image. + sig do + returns(OpenAI::ImageEditPartialImageEvent::OutputFormat::TaggedSymbol) + end + attr_accessor :output_format + + # 0-based index for the partial image (streaming). + sig { returns(Integer) } + attr_accessor :partial_image_index + + # The quality setting for the requested edited image. + sig { returns(OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol) } + attr_accessor :quality + + # The size of the requested edited image. + sig { returns(OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol) } + attr_accessor :size + + # The type of the event. Always `image_edit.partial_image`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a partial image is available during image editing streaming. + sig do + params( + b64_json: String, + background: OpenAI::ImageEditPartialImageEvent::Background::OrSymbol, + created_at: Integer, + output_format: + OpenAI::ImageEditPartialImageEvent::OutputFormat::OrSymbol, + partial_image_index: Integer, + quality: OpenAI::ImageEditPartialImageEvent::Quality::OrSymbol, + size: OpenAI::ImageEditPartialImageEvent::Size::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Base64-encoded partial image data, suitable for rendering as an image. + b64_json:, + # The background setting for the requested edited image. + background:, + # The Unix timestamp when the event was created. + created_at:, + # The output format for the requested edited image. + output_format:, + # 0-based index for the partial image (streaming). + partial_image_index:, + # The quality setting for the requested edited image. + quality:, + # The size of the requested edited image. + size:, + # The type of the event. Always `image_edit.partial_image`. + type: :"image_edit.partial_image" + ) + end + + sig do + override.returns( + { + b64_json: String, + background: + OpenAI::ImageEditPartialImageEvent::Background::TaggedSymbol, + created_at: Integer, + output_format: + OpenAI::ImageEditPartialImageEvent::OutputFormat::TaggedSymbol, + partial_image_index: Integer, + quality: OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol, + size: OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The background setting for the requested edited image. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditPartialImageEvent::Background) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let( + :transparent, + OpenAI::ImageEditPartialImageEvent::Background::TaggedSymbol + ) + OPAQUE = + T.let( + :opaque, + OpenAI::ImageEditPartialImageEvent::Background::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::ImageEditPartialImageEvent::Background::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ImageEditPartialImageEvent::Background::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The output format for the requested edited image. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditPartialImageEvent::OutputFormat) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = + T.let( + :png, + OpenAI::ImageEditPartialImageEvent::OutputFormat::TaggedSymbol + ) + WEBP = + T.let( + :webp, + OpenAI::ImageEditPartialImageEvent::OutputFormat::TaggedSymbol + ) + JPEG = + T.let( + :jpeg, + OpenAI::ImageEditPartialImageEvent::OutputFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ImageEditPartialImageEvent::OutputFormat::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The quality setting for the requested edited image. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditPartialImageEvent::Quality) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let(:low, OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol) + MEDIUM = + T.let( + :medium, + OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::ImageEditPartialImageEvent::Quality::TaggedSymbol] + ) + end + def self.values + end + end + + # The size of the requested edited image. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageEditPartialImageEvent::Size) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol + ) + SIZE_1024X1536 = + T.let( + :"1024x1536", + OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol + ) + SIZE_1536X1024 = + T.let( + :"1536x1024", + OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol + ) + AUTO = + T.let(:auto, OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageEditPartialImageEvent::Size::TaggedSymbol] + ) + end + def self.values + end + end + end + end +end diff --git a/rbi/openai/models/image_edit_stream_event.rbi b/rbi/openai/models/image_edit_stream_event.rbi new file mode 100644 index 00000000..5bfaed0a --- /dev/null +++ b/rbi/openai/models/image_edit_stream_event.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Models + # Emitted when a partial image is available during image editing streaming. + module ImageEditStreamEvent + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::ImageEditPartialImageEvent, + OpenAI::ImageEditCompletedEvent + ) + end + + sig { override.returns(T::Array[OpenAI::ImageEditStreamEvent::Variants]) } + def self.variants + end + end + end +end diff --git a/rbi/openai/models/image_gen_completed_event.rbi b/rbi/openai/models/image_gen_completed_event.rbi new file mode 100644 index 00000000..922b39b7 --- /dev/null +++ b/rbi/openai/models/image_gen_completed_event.rbi @@ -0,0 +1,339 @@ +# typed: strong + +module OpenAI + module Models + class ImageGenCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::ImageGenCompletedEvent, OpenAI::Internal::AnyHash) + end + + # Base64-encoded image data, suitable for rendering as an image. + sig { returns(String) } + attr_accessor :b64_json + + # The background setting for the generated image. + sig { returns(OpenAI::ImageGenCompletedEvent::Background::TaggedSymbol) } + attr_accessor :background + + # The Unix timestamp when the event was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The output format for the generated image. + sig do + returns(OpenAI::ImageGenCompletedEvent::OutputFormat::TaggedSymbol) + end + attr_accessor :output_format + + # The quality setting for the generated image. + sig { returns(OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol) } + attr_accessor :quality + + # The size of the generated image. + sig { returns(OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol) } + attr_accessor :size + + # The type of the event. Always `image_generation.completed`. + sig { returns(Symbol) } + attr_accessor :type + + # For `gpt-image-1` only, the token usage information for the image generation. + sig { returns(OpenAI::ImageGenCompletedEvent::Usage) } + attr_reader :usage + + sig { params(usage: OpenAI::ImageGenCompletedEvent::Usage::OrHash).void } + attr_writer :usage + + # Emitted when image generation has completed and the final image is available. + sig do + params( + b64_json: String, + background: OpenAI::ImageGenCompletedEvent::Background::OrSymbol, + created_at: Integer, + output_format: OpenAI::ImageGenCompletedEvent::OutputFormat::OrSymbol, + quality: OpenAI::ImageGenCompletedEvent::Quality::OrSymbol, + size: OpenAI::ImageGenCompletedEvent::Size::OrSymbol, + usage: OpenAI::ImageGenCompletedEvent::Usage::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Base64-encoded image data, suitable for rendering as an image. + b64_json:, + # The background setting for the generated image. + background:, + # The Unix timestamp when the event was created. + created_at:, + # The output format for the generated image. + output_format:, + # The quality setting for the generated image. + quality:, + # The size of the generated image. + size:, + # For `gpt-image-1` only, the token usage information for the image generation. + usage:, + # The type of the event. Always `image_generation.completed`. + type: :"image_generation.completed" + ) + end + + sig do + override.returns( + { + b64_json: String, + background: + OpenAI::ImageGenCompletedEvent::Background::TaggedSymbol, + created_at: Integer, + output_format: + OpenAI::ImageGenCompletedEvent::OutputFormat::TaggedSymbol, + quality: OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol, + size: OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol, + type: Symbol, + usage: OpenAI::ImageGenCompletedEvent::Usage + } + ) + end + def to_hash + end + + # The background setting for the generated image. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenCompletedEvent::Background) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let( + :transparent, + OpenAI::ImageGenCompletedEvent::Background::TaggedSymbol + ) + OPAQUE = + T.let( + :opaque, + OpenAI::ImageGenCompletedEvent::Background::TaggedSymbol + ) + AUTO = + T.let(:auto, OpenAI::ImageGenCompletedEvent::Background::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenCompletedEvent::Background::TaggedSymbol] + ) + end + def self.values + end + end + + # The output format for the generated image. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenCompletedEvent::OutputFormat) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = + T.let( + :png, + OpenAI::ImageGenCompletedEvent::OutputFormat::TaggedSymbol + ) + WEBP = + T.let( + :webp, + OpenAI::ImageGenCompletedEvent::OutputFormat::TaggedSymbol + ) + JPEG = + T.let( + :jpeg, + OpenAI::ImageGenCompletedEvent::OutputFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::ImageGenCompletedEvent::OutputFormat::TaggedSymbol] + ) + end + def self.values + end + end + + # The quality setting for the generated image. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenCompletedEvent::Quality) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = T.let(:low, OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol) + MEDIUM = + T.let(:medium, OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol) + HIGH = + T.let(:high, OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenCompletedEvent::Quality::TaggedSymbol] + ) + end + def self.values + end + end + + # The size of the generated image. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ImageGenCompletedEvent::Size) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol + ) + SIZE_1024X1536 = + T.let( + :"1024x1536", + OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol + ) + SIZE_1536X1024 = + T.let( + :"1536x1024", + OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol + ) + AUTO = T.let(:auto, OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenCompletedEvent::Size::TaggedSymbol] + ) + end + def self.values + end + end + + class Usage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::ImageGenCompletedEvent::Usage, + OpenAI::Internal::AnyHash + ) + end + + # The number of tokens (images and text) in the input prompt. + sig { returns(Integer) } + attr_accessor :input_tokens + + # The input tokens detailed information for the image generation. + sig do + returns(OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails) + end + attr_reader :input_tokens_details + + sig do + params( + input_tokens_details: + OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails::OrHash + ).void + end + attr_writer :input_tokens_details + + # The number of image tokens in the output image. + sig { returns(Integer) } + attr_accessor :output_tokens + + # The total number of tokens (images and text) used for the image generation. + sig { returns(Integer) } + attr_accessor :total_tokens + + # For `gpt-image-1` only, the token usage information for the image generation. + sig do + params( + input_tokens: Integer, + input_tokens_details: + OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails::OrHash, + output_tokens: Integer, + total_tokens: Integer + ).returns(T.attached_class) + end + def self.new( + # The number of tokens (images and text) in the input prompt. + input_tokens:, + # The input tokens detailed information for the image generation. + input_tokens_details:, + # The number of image tokens in the output image. + output_tokens:, + # The total number of tokens (images and text) used for the image generation. + total_tokens: + ) + end + + sig do + override.returns( + { + input_tokens: Integer, + input_tokens_details: + OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + ) + end + def to_hash + end + + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails, + OpenAI::Internal::AnyHash + ) + end + + # The number of image tokens in the input prompt. + sig { returns(Integer) } + attr_accessor :image_tokens + + # The number of text tokens in the input prompt. + sig { returns(Integer) } + attr_accessor :text_tokens + + # The input tokens detailed information for the image generation. + sig do + params(image_tokens: Integer, text_tokens: Integer).returns( + T.attached_class + ) + end + def self.new( + # The number of image tokens in the input prompt. + image_tokens:, + # The number of text tokens in the input prompt. + text_tokens: + ) + end + + sig do + override.returns({ image_tokens: Integer, text_tokens: Integer }) + end + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/image_gen_partial_image_event.rbi b/rbi/openai/models/image_gen_partial_image_event.rbi new file mode 100644 index 00000000..c582e9c9 --- /dev/null +++ b/rbi/openai/models/image_gen_partial_image_event.rbi @@ -0,0 +1,243 @@ +# typed: strong + +module OpenAI + module Models + class ImageGenPartialImageEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::ImageGenPartialImageEvent, OpenAI::Internal::AnyHash) + end + + # Base64-encoded partial image data, suitable for rendering as an image. + sig { returns(String) } + attr_accessor :b64_json + + # The background setting for the requested image. + sig do + returns(OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol) + end + attr_accessor :background + + # The Unix timestamp when the event was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The output format for the requested image. + sig do + returns(OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol) + end + attr_accessor :output_format + + # 0-based index for the partial image (streaming). + sig { returns(Integer) } + attr_accessor :partial_image_index + + # The quality setting for the requested image. + sig { returns(OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol) } + attr_accessor :quality + + # The size of the requested image. + sig { returns(OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol) } + attr_accessor :size + + # The type of the event. Always `image_generation.partial_image`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a partial image is available during image generation streaming. + sig do + params( + b64_json: String, + background: OpenAI::ImageGenPartialImageEvent::Background::OrSymbol, + created_at: Integer, + output_format: + OpenAI::ImageGenPartialImageEvent::OutputFormat::OrSymbol, + partial_image_index: Integer, + quality: OpenAI::ImageGenPartialImageEvent::Quality::OrSymbol, + size: OpenAI::ImageGenPartialImageEvent::Size::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Base64-encoded partial image data, suitable for rendering as an image. + b64_json:, + # The background setting for the requested image. + background:, + # The Unix timestamp when the event was created. + created_at:, + # The output format for the requested image. + output_format:, + # 0-based index for the partial image (streaming). + partial_image_index:, + # The quality setting for the requested image. + quality:, + # The size of the requested image. + size:, + # The type of the event. Always `image_generation.partial_image`. + type: :"image_generation.partial_image" + ) + end + + sig do + override.returns( + { + b64_json: String, + background: + OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol, + created_at: Integer, + output_format: + OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol, + partial_image_index: Integer, + quality: OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol, + size: OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The background setting for the requested image. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenPartialImageEvent::Background) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let( + :transparent, + OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol + ) + OPAQUE = + T.let( + :opaque, + OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ImageGenPartialImageEvent::Background::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The output format for the requested image. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenPartialImageEvent::OutputFormat) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = + T.let( + :png, + OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol + ) + WEBP = + T.let( + :webp, + OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol + ) + JPEG = + T.let( + :jpeg, + OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ImageGenPartialImageEvent::OutputFormat::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The quality setting for the requested image. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenPartialImageEvent::Quality) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let(:low, OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol) + MEDIUM = + T.let( + :medium, + OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol + ) + HIGH = + T.let(:high, OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol) + AUTO = + T.let(:auto, OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenPartialImageEvent::Quality::TaggedSymbol] + ) + end + def self.values + end + end + + # The size of the requested image. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ImageGenPartialImageEvent::Size) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol + ) + SIZE_1024X1536 = + T.let( + :"1024x1536", + OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol + ) + SIZE_1536X1024 = + T.let( + :"1536x1024", + OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol + ) + AUTO = + T.let(:auto, OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ImageGenPartialImageEvent::Size::TaggedSymbol] + ) + end + def self.values + end + end + end + end +end diff --git a/rbi/openai/models/image_gen_stream_event.rbi b/rbi/openai/models/image_gen_stream_event.rbi new file mode 100644 index 00000000..2309b993 --- /dev/null +++ b/rbi/openai/models/image_gen_stream_event.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Models + # Emitted when a partial image is available during image generation streaming. + module ImageGenStreamEvent + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::ImageGenPartialImageEvent, + OpenAI::ImageGenCompletedEvent + ) + end + + sig { override.returns(T::Array[OpenAI::ImageGenStreamEvent::Variants]) } + def self.variants + end + end + end +end diff --git a/rbi/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi index c81dbbcf..29b5a6e2 100644 --- a/rbi/openai/models/image_generate_params.rbi +++ b/rbi/openai/models/image_generate_params.rbi @@ -60,6 +60,12 @@ module OpenAI end attr_accessor :output_format + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + sig { returns(T.nilable(Integer)) } + attr_accessor :partial_images + # The quality of the image that will be generated. # # - `auto` (default value) will automatically select the best quality for the @@ -116,6 +122,7 @@ module OpenAI output_compression: T.nilable(Integer), output_format: T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), @@ -155,6 +162,10 @@ module OpenAI # The format in which the generated images are returned. This parameter is only # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. output_format: nil, + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + partial_images: nil, # The quality of the image that will be generated. # # - `auto` (default value) will automatically select the best quality for the @@ -199,6 +210,7 @@ module OpenAI output_compression: T.nilable(Integer), output_format: T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), diff --git a/rbi/openai/models/responses/response_output_refusal.rbi b/rbi/openai/models/responses/response_output_refusal.rbi index ef08df31..b206e330 100644 --- a/rbi/openai/models/responses/response_output_refusal.rbi +++ b/rbi/openai/models/responses/response_output_refusal.rbi @@ -12,7 +12,7 @@ module OpenAI ) end - # The refusal explanationfrom the model. + # The refusal explanation from the model. sig { returns(String) } attr_accessor :refusal @@ -23,7 +23,7 @@ module OpenAI # A refusal from the model. sig { params(refusal: String, type: Symbol).returns(T.attached_class) } def self.new( - # The refusal explanationfrom the model. + # The refusal explanation from the model. refusal:, # The type of the refusal. Always `refusal`. type: :refusal diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index e3467167..fa1bac73 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -565,6 +565,18 @@ module OpenAI end attr_writer :background + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::InputFidelity::OrSymbol + ) + ) + end + attr_accessor :input_fidelity + # Optional mask for inpainting. Contains `image_url` (string, optional) and # `file_id` (string, optional). sig do @@ -695,6 +707,10 @@ module OpenAI params( background: OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol, + input_fidelity: + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::InputFidelity::OrSymbol + ), input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask::OrHash, model: OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol, @@ -714,6 +730,10 @@ module OpenAI # Background type for the generated image. One of `transparent`, `opaque`, or # `auto`. Default: `auto`. background: nil, + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + input_fidelity: nil, # Optional mask for inpainting. Contains `image_url` (string, optional) and # `file_id` (string, optional). input_image_mask: nil, @@ -746,6 +766,10 @@ module OpenAI type: Symbol, background: OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol, + input_fidelity: + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::InputFidelity::OrSymbol + ), input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, model: @@ -806,6 +830,43 @@ module OpenAI end end + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + module InputFidelity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::InputFidelity + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + HIGH = + T.let( + :high, + OpenAI::Responses::Tool::ImageGeneration::InputFidelity::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Responses::Tool::ImageGeneration::InputFidelity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::InputFidelity::TaggedSymbol + ] + ) + end + def self.values + end + end + class InputImageMask < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index f4f67d10..66ab51ca 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -42,6 +42,8 @@ module OpenAI ) end + # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. sig do @@ -49,17 +51,21 @@ module OpenAI image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), + input_fidelity: + T.nilable(OpenAI::ImageEditParams::InputFidelity::OrSymbol), mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), output_compression: T.nilable(Integer), output_format: T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::ImageEditParams::Size::OrSymbol), user: String, + stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::ImagesResponse) end @@ -83,6 +89,10 @@ module OpenAI # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. background: nil, + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + input_fidelity: nil, # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than @@ -102,6 +112,10 @@ module OpenAI # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The # default value is `png`. output_format: nil, + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + partial_images: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. @@ -119,10 +133,115 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#edit` for + # streaming and non-streaming use cases, respectively. + stream: false, request_options: {} ) end + # See {OpenAI::Resources::Images#edit} for non-streaming counterpart. + # + # Creates an edited or extended image given one or more source images and a + # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. + sig do + params( + image: OpenAI::ImageEditParams::Image::Variants, + prompt: String, + background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), + input_fidelity: + T.nilable(OpenAI::ImageEditParams::InputFidelity::OrSymbol), + mask: OpenAI::Internal::FileInput, + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + n: T.nilable(Integer), + output_compression: T.nilable(Integer), + output_format: + T.nilable(OpenAI::ImageEditParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), + quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageEditParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageEditParams::Size::OrSymbol), + user: String, + stream: T.noreturn, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[OpenAI::ImageEditStreamEvent::Variants] + ) + end + def stream_raw( + # The image(s) to edit. Must be a supported image file or an array of images. + # + # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than + # 50MB. You can provide up to 16 images. + # + # For `dall-e-2`, you can only provide one image, and it should be a square `png` + # file less than 4MB. + image:, + # A text description of the desired image(s). The maximum length is 1000 + # characters for `dall-e-2`, and 32000 characters for `gpt-image-1`. + prompt:, + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + background: nil, + # Control how much effort the model will exert to match the style and features, + # especially facial features, of input images. This parameter is only supported + # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`. + input_fidelity: nil, + # An additional image whose fully transparent areas (e.g. where alpha is zero) + # indicate where `image` should be edited. If there are multiple images provided, + # the mask will be applied on the first image. Must be a valid PNG file, less than + # 4MB, and have the same dimensions as `image`. + mask: nil, + # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are + # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` + # is used. + model: nil, + # The number of images to generate. Must be between 1 and 10. + n: nil, + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + output_compression: nil, + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. The + # default value is `png`. + output_format: nil, + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + partial_images: nil, + # The quality of the image that will be generated. `high`, `medium` and `low` are + # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. + # Defaults to `auto`. + quality: nil, + # The format in which the generated images are returned. Must be one of `url` or + # `b64_json`. URLs are only valid for 60 minutes after the image has been + # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` + # will always return base64-encoded images. + response_format: nil, + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. + size: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + user: nil, + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#edit` for + # streaming and non-streaming use cases, respectively. + stream: true, + request_options: {} + ) + end + + # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # # Creates an image given a prompt. # [Learn more](https://platform.openai.com/docs/guides/images). sig do @@ -137,12 +256,14 @@ module OpenAI output_compression: T.nilable(Integer), output_format: T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), response_format: T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), size: T.nilable(OpenAI::ImageGenerateParams::Size::OrSymbol), style: T.nilable(OpenAI::ImageGenerateParams::Style::OrSymbol), user: String, + stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::ImagesResponse) end @@ -176,6 +297,107 @@ module OpenAI # The format in which the generated images are returned. This parameter is only # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. output_format: nil, + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + partial_images: nil, + # The quality of the image that will be generated. + # + # - `auto` (default value) will automatically select the best quality for the + # given model. + # - `high`, `medium` and `low` are supported for `gpt-image-1`. + # - `hd` and `standard` are supported for `dall-e-3`. + # - `standard` is the only option for `dall-e-2`. + quality: nil, + # The format in which generated images with `dall-e-2` and `dall-e-3` are + # returned. Must be one of `url` or `b64_json`. URLs are only valid for 60 minutes + # after the image has been generated. This parameter isn't supported for + # `gpt-image-1` which will always return base64-encoded images. + response_format: nil, + # The size of the generated images. Must be one of `1024x1024`, `1536x1024` + # (landscape), `1024x1536` (portrait), or `auto` (default value) for + # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and + # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. + size: nil, + # The style of the generated images. This parameter is only supported for + # `dall-e-3`. Must be one of `vivid` or `natural`. Vivid causes the model to lean + # towards generating hyper-real and dramatic images. Natural causes the model to + # produce more natural, less hyper-real looking images. + style: nil, + # A unique identifier representing your end-user, which can help OpenAI to monitor + # and detect abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + user: nil, + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#generate` + # for streaming and non-streaming use cases, respectively. + stream: false, + request_options: {} + ) + end + + # See {OpenAI::Resources::Images#generate} for non-streaming counterpart. + # + # Creates an image given a prompt. + # [Learn more](https://platform.openai.com/docs/guides/images). + sig do + params( + prompt: String, + background: + T.nilable(OpenAI::ImageGenerateParams::Background::OrSymbol), + model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), + moderation: + T.nilable(OpenAI::ImageGenerateParams::Moderation::OrSymbol), + n: T.nilable(Integer), + output_compression: T.nilable(Integer), + output_format: + T.nilable(OpenAI::ImageGenerateParams::OutputFormat::OrSymbol), + partial_images: T.nilable(Integer), + quality: T.nilable(OpenAI::ImageGenerateParams::Quality::OrSymbol), + response_format: + T.nilable(OpenAI::ImageGenerateParams::ResponseFormat::OrSymbol), + size: T.nilable(OpenAI::ImageGenerateParams::Size::OrSymbol), + style: T.nilable(OpenAI::ImageGenerateParams::Style::OrSymbol), + user: String, + stream: T.noreturn, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::Stream[OpenAI::ImageGenStreamEvent::Variants] + ) + end + def stream_raw( + # A text description of the desired image(s). The maximum length is 32000 + # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters + # for `dall-e-3`. + prompt:, + # Allows to set transparency for the background of the generated image(s). This + # parameter is only supported for `gpt-image-1`. Must be one of `transparent`, + # `opaque` or `auto` (default value). When `auto` is used, the model will + # automatically determine the best background for the image. + # + # If `transparent`, the output format needs to support transparency, so it should + # be set to either `png` (default value) or `webp`. + background: nil, + # The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or + # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to + # `gpt-image-1` is used. + model: nil, + # Control the content-moderation level for images generated by `gpt-image-1`. Must + # be either `low` for less restrictive filtering or `auto` (default value). + moderation: nil, + # The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only + # `n=1` is supported. + n: nil, + # The compression level (0-100%) for the generated images. This parameter is only + # supported for `gpt-image-1` with the `webp` or `jpeg` output formats, and + # defaults to 100. + output_compression: nil, + # The format in which the generated images are returned. This parameter is only + # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. + output_format: nil, + # The number of partial images to generate. This parameter is used for streaming + # responses that return partial images. Value must be between 0 and 3. When set to + # 0, the response will be a single image sent in one streaming event. + partial_images: nil, # The quality of the image that will be generated. # # - `auto` (default value) will automatically select the best quality for the @@ -203,6 +425,9 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, + # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#generate` + # for streaming and non-streaming use cases, respectively. + stream: true, request_options: {} ) end diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index 997e6f93..1c5e1e9d 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -111,10 +111,22 @@ module OpenAI class ImageCreateVariationParams = OpenAI::Models::ImageCreateVariationParams + class ImageEditCompletedEvent = OpenAI::Models::ImageEditCompletedEvent + class ImageEditParams = OpenAI::Models::ImageEditParams + class ImageEditPartialImageEvent = OpenAI::Models::ImageEditPartialImageEvent + + module ImageEditStreamEvent = OpenAI::Models::ImageEditStreamEvent + + class ImageGenCompletedEvent = OpenAI::Models::ImageGenCompletedEvent + class ImageGenerateParams = OpenAI::Models::ImageGenerateParams + class ImageGenPartialImageEvent = OpenAI::Models::ImageGenPartialImageEvent + + module ImageGenStreamEvent = OpenAI::Models::ImageGenStreamEvent + module ImageModel = OpenAI::Models::ImageModel class ImagesResponse = OpenAI::Models::ImagesResponse diff --git a/sig/openai/models/image_edit_completed_event.rbs b/sig/openai/models/image_edit_completed_event.rbs new file mode 100644 index 00000000..e21a10d8 --- /dev/null +++ b/sig/openai/models/image_edit_completed_event.rbs @@ -0,0 +1,150 @@ +module OpenAI + module Models + type image_edit_completed_event = + { + :b64_json => String, + background: OpenAI::Models::ImageEditCompletedEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageEditCompletedEvent::output_format, + quality: OpenAI::Models::ImageEditCompletedEvent::quality, + size: OpenAI::Models::ImageEditCompletedEvent::size, + type: :"image_edit.completed", + usage: OpenAI::ImageEditCompletedEvent::Usage + } + + class ImageEditCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor b64_json: String + + attr_accessor background: OpenAI::Models::ImageEditCompletedEvent::background + + attr_accessor created_at: Integer + + attr_accessor output_format: OpenAI::Models::ImageEditCompletedEvent::output_format + + attr_accessor quality: OpenAI::Models::ImageEditCompletedEvent::quality + + attr_accessor size: OpenAI::Models::ImageEditCompletedEvent::size + + attr_accessor type: :"image_edit.completed" + + attr_accessor usage: OpenAI::ImageEditCompletedEvent::Usage + + def initialize: ( + b64_json: String, + background: OpenAI::Models::ImageEditCompletedEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageEditCompletedEvent::output_format, + quality: OpenAI::Models::ImageEditCompletedEvent::quality, + size: OpenAI::Models::ImageEditCompletedEvent::size, + usage: OpenAI::ImageEditCompletedEvent::Usage, + ?type: :"image_edit.completed" + ) -> void + + def to_hash: -> { + :b64_json => String, + background: OpenAI::Models::ImageEditCompletedEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageEditCompletedEvent::output_format, + quality: OpenAI::Models::ImageEditCompletedEvent::quality, + size: OpenAI::Models::ImageEditCompletedEvent::size, + type: :"image_edit.completed", + usage: OpenAI::ImageEditCompletedEvent::Usage + } + + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditCompletedEvent::background] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Models::ImageEditCompletedEvent::output_format] + end + + type quality = :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditCompletedEvent::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditCompletedEvent::size] + end + + type usage = + { + input_tokens: Integer, + input_tokens_details: OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor input_tokens: Integer + + attr_accessor input_tokens_details: OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails + + attr_accessor output_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + input_tokens: Integer, + input_tokens_details: OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> { + input_tokens: Integer, + input_tokens_details: OpenAI::ImageEditCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + + type input_tokens_details = + { image_tokens: Integer, text_tokens: Integer } + + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + attr_accessor image_tokens: Integer + + attr_accessor text_tokens: Integer + + def initialize: (image_tokens: Integer, text_tokens: Integer) -> void + + def to_hash: -> { image_tokens: Integer, text_tokens: Integer } + end + end + end + end +end diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index ca2820e9..66f4812f 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -5,11 +5,13 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, background: OpenAI::Models::ImageEditParams::background?, + input_fidelity: OpenAI::Models::ImageEditParams::input_fidelity?, mask: OpenAI::Internal::file_input, model: OpenAI::Models::ImageEditParams::model?, n: Integer?, output_compression: Integer?, output_format: OpenAI::Models::ImageEditParams::output_format?, + partial_images: Integer?, quality: OpenAI::Models::ImageEditParams::quality?, response_format: OpenAI::Models::ImageEditParams::response_format?, size: OpenAI::Models::ImageEditParams::size?, @@ -27,6 +29,8 @@ module OpenAI attr_accessor background: OpenAI::Models::ImageEditParams::background? + attr_accessor input_fidelity: OpenAI::Models::ImageEditParams::input_fidelity? + attr_reader mask: OpenAI::Internal::file_input? def mask=: (OpenAI::Internal::file_input) -> OpenAI::Internal::file_input @@ -39,6 +43,8 @@ module OpenAI attr_accessor output_format: OpenAI::Models::ImageEditParams::output_format? + attr_accessor partial_images: Integer? + attr_accessor quality: OpenAI::Models::ImageEditParams::quality? attr_accessor response_format: OpenAI::Models::ImageEditParams::response_format? @@ -53,11 +59,13 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, + ?input_fidelity: OpenAI::Models::ImageEditParams::input_fidelity?, ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?output_compression: Integer?, ?output_format: OpenAI::Models::ImageEditParams::output_format?, + ?partial_images: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, ?size: OpenAI::Models::ImageEditParams::size?, @@ -69,11 +77,13 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, background: OpenAI::Models::ImageEditParams::background?, + input_fidelity: OpenAI::Models::ImageEditParams::input_fidelity?, mask: OpenAI::Internal::file_input, model: OpenAI::Models::ImageEditParams::model?, n: Integer?, output_compression: Integer?, output_format: OpenAI::Models::ImageEditParams::output_format?, + partial_images: Integer?, quality: OpenAI::Models::ImageEditParams::quality?, response_format: OpenAI::Models::ImageEditParams::response_format?, size: OpenAI::Models::ImageEditParams::size?, @@ -104,6 +114,17 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::background] end + type input_fidelity = :high | :low + + module InputFidelity + extend OpenAI::Internal::Type::Enum + + HIGH: :high + LOW: :low + + def self?.values: -> ::Array[OpenAI::Models::ImageEditParams::input_fidelity] + end + type model = String | OpenAI::Models::image_model module Model diff --git a/sig/openai/models/image_edit_partial_image_event.rbs b/sig/openai/models/image_edit_partial_image_event.rbs new file mode 100644 index 00000000..1a96d108 --- /dev/null +++ b/sig/openai/models/image_edit_partial_image_event.rbs @@ -0,0 +1,105 @@ +module OpenAI + module Models + type image_edit_partial_image_event = + { + :b64_json => String, + background: OpenAI::Models::ImageEditPartialImageEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageEditPartialImageEvent::output_format, + partial_image_index: Integer, + quality: OpenAI::Models::ImageEditPartialImageEvent::quality, + size: OpenAI::Models::ImageEditPartialImageEvent::size, + type: :"image_edit.partial_image" + } + + class ImageEditPartialImageEvent < OpenAI::Internal::Type::BaseModel + attr_accessor b64_json: String + + attr_accessor background: OpenAI::Models::ImageEditPartialImageEvent::background + + attr_accessor created_at: Integer + + attr_accessor output_format: OpenAI::Models::ImageEditPartialImageEvent::output_format + + attr_accessor partial_image_index: Integer + + attr_accessor quality: OpenAI::Models::ImageEditPartialImageEvent::quality + + attr_accessor size: OpenAI::Models::ImageEditPartialImageEvent::size + + attr_accessor type: :"image_edit.partial_image" + + def initialize: ( + b64_json: String, + background: OpenAI::Models::ImageEditPartialImageEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageEditPartialImageEvent::output_format, + partial_image_index: Integer, + quality: OpenAI::Models::ImageEditPartialImageEvent::quality, + size: OpenAI::Models::ImageEditPartialImageEvent::size, + ?type: :"image_edit.partial_image" + ) -> void + + def to_hash: -> { + :b64_json => String, + background: OpenAI::Models::ImageEditPartialImageEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageEditPartialImageEvent::output_format, + partial_image_index: Integer, + quality: OpenAI::Models::ImageEditPartialImageEvent::quality, + size: OpenAI::Models::ImageEditPartialImageEvent::size, + type: :"image_edit.partial_image" + } + + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditPartialImageEvent::background] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Models::ImageEditPartialImageEvent::output_format] + end + + type quality = :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditPartialImageEvent::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageEditPartialImageEvent::size] + end + end + end +end diff --git a/sig/openai/models/image_edit_stream_event.rbs b/sig/openai/models/image_edit_stream_event.rbs new file mode 100644 index 00000000..0b0b65ce --- /dev/null +++ b/sig/openai/models/image_edit_stream_event.rbs @@ -0,0 +1,12 @@ +module OpenAI + module Models + type image_edit_stream_event = + OpenAI::ImageEditPartialImageEvent | OpenAI::ImageEditCompletedEvent + + module ImageEditStreamEvent + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::image_edit_stream_event] + end + end +end diff --git a/sig/openai/models/image_gen_completed_event.rbs b/sig/openai/models/image_gen_completed_event.rbs new file mode 100644 index 00000000..c47de644 --- /dev/null +++ b/sig/openai/models/image_gen_completed_event.rbs @@ -0,0 +1,150 @@ +module OpenAI + module Models + type image_gen_completed_event = + { + :b64_json => String, + background: OpenAI::Models::ImageGenCompletedEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageGenCompletedEvent::output_format, + quality: OpenAI::Models::ImageGenCompletedEvent::quality, + size: OpenAI::Models::ImageGenCompletedEvent::size, + type: :"image_generation.completed", + usage: OpenAI::ImageGenCompletedEvent::Usage + } + + class ImageGenCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor b64_json: String + + attr_accessor background: OpenAI::Models::ImageGenCompletedEvent::background + + attr_accessor created_at: Integer + + attr_accessor output_format: OpenAI::Models::ImageGenCompletedEvent::output_format + + attr_accessor quality: OpenAI::Models::ImageGenCompletedEvent::quality + + attr_accessor size: OpenAI::Models::ImageGenCompletedEvent::size + + attr_accessor type: :"image_generation.completed" + + attr_accessor usage: OpenAI::ImageGenCompletedEvent::Usage + + def initialize: ( + b64_json: String, + background: OpenAI::Models::ImageGenCompletedEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageGenCompletedEvent::output_format, + quality: OpenAI::Models::ImageGenCompletedEvent::quality, + size: OpenAI::Models::ImageGenCompletedEvent::size, + usage: OpenAI::ImageGenCompletedEvent::Usage, + ?type: :"image_generation.completed" + ) -> void + + def to_hash: -> { + :b64_json => String, + background: OpenAI::Models::ImageGenCompletedEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageGenCompletedEvent::output_format, + quality: OpenAI::Models::ImageGenCompletedEvent::quality, + size: OpenAI::Models::ImageGenCompletedEvent::size, + type: :"image_generation.completed", + usage: OpenAI::ImageGenCompletedEvent::Usage + } + + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenCompletedEvent::background] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Models::ImageGenCompletedEvent::output_format] + end + + type quality = :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenCompletedEvent::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenCompletedEvent::size] + end + + type usage = + { + input_tokens: Integer, + input_tokens_details: OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + + class Usage < OpenAI::Internal::Type::BaseModel + attr_accessor input_tokens: Integer + + attr_accessor input_tokens_details: OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails + + attr_accessor output_tokens: Integer + + attr_accessor total_tokens: Integer + + def initialize: ( + input_tokens: Integer, + input_tokens_details: OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + ) -> void + + def to_hash: -> { + input_tokens: Integer, + input_tokens_details: OpenAI::ImageGenCompletedEvent::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + + type input_tokens_details = + { image_tokens: Integer, text_tokens: Integer } + + class InputTokensDetails < OpenAI::Internal::Type::BaseModel + attr_accessor image_tokens: Integer + + attr_accessor text_tokens: Integer + + def initialize: (image_tokens: Integer, text_tokens: Integer) -> void + + def to_hash: -> { image_tokens: Integer, text_tokens: Integer } + end + end + end + end +end diff --git a/sig/openai/models/image_gen_partial_image_event.rbs b/sig/openai/models/image_gen_partial_image_event.rbs new file mode 100644 index 00000000..bffb443d --- /dev/null +++ b/sig/openai/models/image_gen_partial_image_event.rbs @@ -0,0 +1,105 @@ +module OpenAI + module Models + type image_gen_partial_image_event = + { + :b64_json => String, + background: OpenAI::Models::ImageGenPartialImageEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageGenPartialImageEvent::output_format, + partial_image_index: Integer, + quality: OpenAI::Models::ImageGenPartialImageEvent::quality, + size: OpenAI::Models::ImageGenPartialImageEvent::size, + type: :"image_generation.partial_image" + } + + class ImageGenPartialImageEvent < OpenAI::Internal::Type::BaseModel + attr_accessor b64_json: String + + attr_accessor background: OpenAI::Models::ImageGenPartialImageEvent::background + + attr_accessor created_at: Integer + + attr_accessor output_format: OpenAI::Models::ImageGenPartialImageEvent::output_format + + attr_accessor partial_image_index: Integer + + attr_accessor quality: OpenAI::Models::ImageGenPartialImageEvent::quality + + attr_accessor size: OpenAI::Models::ImageGenPartialImageEvent::size + + attr_accessor type: :"image_generation.partial_image" + + def initialize: ( + b64_json: String, + background: OpenAI::Models::ImageGenPartialImageEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageGenPartialImageEvent::output_format, + partial_image_index: Integer, + quality: OpenAI::Models::ImageGenPartialImageEvent::quality, + size: OpenAI::Models::ImageGenPartialImageEvent::size, + ?type: :"image_generation.partial_image" + ) -> void + + def to_hash: -> { + :b64_json => String, + background: OpenAI::Models::ImageGenPartialImageEvent::background, + created_at: Integer, + output_format: OpenAI::Models::ImageGenPartialImageEvent::output_format, + partial_image_index: Integer, + quality: OpenAI::Models::ImageGenPartialImageEvent::quality, + size: OpenAI::Models::ImageGenPartialImageEvent::size, + type: :"image_generation.partial_image" + } + + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenPartialImageEvent::background] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Models::ImageGenPartialImageEvent::output_format] + end + + type quality = :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenPartialImageEvent::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::ImageGenPartialImageEvent::size] + end + end + end +end diff --git a/sig/openai/models/image_gen_stream_event.rbs b/sig/openai/models/image_gen_stream_event.rbs new file mode 100644 index 00000000..b1489c24 --- /dev/null +++ b/sig/openai/models/image_gen_stream_event.rbs @@ -0,0 +1,12 @@ +module OpenAI + module Models + type image_gen_stream_event = + OpenAI::ImageGenPartialImageEvent | OpenAI::ImageGenCompletedEvent + + module ImageGenStreamEvent + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::image_gen_stream_event] + end + end +end diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index e9b0e2e0..e870a248 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -9,6 +9,7 @@ module OpenAI n: Integer?, output_compression: Integer?, output_format: OpenAI::Models::ImageGenerateParams::output_format?, + partial_images: Integer?, quality: OpenAI::Models::ImageGenerateParams::quality?, response_format: OpenAI::Models::ImageGenerateParams::response_format?, size: OpenAI::Models::ImageGenerateParams::size?, @@ -35,6 +36,8 @@ module OpenAI attr_accessor output_format: OpenAI::Models::ImageGenerateParams::output_format? + attr_accessor partial_images: Integer? + attr_accessor quality: OpenAI::Models::ImageGenerateParams::quality? attr_accessor response_format: OpenAI::Models::ImageGenerateParams::response_format? @@ -55,6 +58,7 @@ module OpenAI ?n: Integer?, ?output_compression: Integer?, ?output_format: OpenAI::Models::ImageGenerateParams::output_format?, + ?partial_images: Integer?, ?quality: OpenAI::Models::ImageGenerateParams::quality?, ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, ?size: OpenAI::Models::ImageGenerateParams::size?, @@ -71,6 +75,7 @@ module OpenAI n: Integer?, output_compression: Integer?, output_format: OpenAI::Models::ImageGenerateParams::output_format?, + partial_images: Integer?, quality: OpenAI::Models::ImageGenerateParams::quality?, response_format: OpenAI::Models::ImageGenerateParams::response_format?, size: OpenAI::Models::ImageGenerateParams::size?, diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 87e9506b..fffb117b 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -213,6 +213,7 @@ module OpenAI { type: :image_generation, background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + input_fidelity: OpenAI::Models::Responses::Tool::ImageGeneration::input_fidelity?, input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, model: OpenAI::Models::Responses::Tool::ImageGeneration::model, moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, @@ -232,6 +233,8 @@ module OpenAI OpenAI::Models::Responses::Tool::ImageGeneration::background ) -> OpenAI::Models::Responses::Tool::ImageGeneration::background + attr_accessor input_fidelity: OpenAI::Models::Responses::Tool::ImageGeneration::input_fidelity? + attr_reader input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask? def input_image_mask=: ( @@ -278,6 +281,7 @@ module OpenAI def initialize: ( ?background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + ?input_fidelity: OpenAI::Models::Responses::Tool::ImageGeneration::input_fidelity?, ?input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, ?model: OpenAI::Models::Responses::Tool::ImageGeneration::model, ?moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, @@ -292,6 +296,7 @@ module OpenAI def to_hash: -> { type: :image_generation, background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + input_fidelity: OpenAI::Models::Responses::Tool::ImageGeneration::input_fidelity?, input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, model: OpenAI::Models::Responses::Tool::ImageGeneration::model, moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, @@ -314,6 +319,17 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::background] end + type input_fidelity = :high | :low + + module InputFidelity + extend OpenAI::Internal::Type::Enum + + HIGH: :high + LOW: :low + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::input_fidelity] + end + type input_image_mask = { file_id: String, image_url: String } class InputImageMask < OpenAI::Internal::Type::BaseModel diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index 43595baa..7944859c 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -15,11 +15,13 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, + ?input_fidelity: OpenAI::Models::ImageEditParams::input_fidelity?, ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?output_compression: Integer?, ?output_format: OpenAI::Models::ImageEditParams::output_format?, + ?partial_images: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, ?response_format: OpenAI::Models::ImageEditParams::response_format?, ?size: OpenAI::Models::ImageEditParams::size?, @@ -27,6 +29,24 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::ImagesResponse + def stream_raw: ( + image: OpenAI::Models::ImageEditParams::image, + prompt: String, + ?background: OpenAI::Models::ImageEditParams::background?, + ?input_fidelity: OpenAI::Models::ImageEditParams::input_fidelity?, + ?mask: OpenAI::Internal::file_input, + ?model: OpenAI::Models::ImageEditParams::model?, + ?n: Integer?, + ?output_compression: Integer?, + ?output_format: OpenAI::Models::ImageEditParams::output_format?, + ?partial_images: Integer?, + ?quality: OpenAI::Models::ImageEditParams::quality?, + ?response_format: OpenAI::Models::ImageEditParams::response_format?, + ?size: OpenAI::Models::ImageEditParams::size?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::Stream[OpenAI::Models::image_edit_stream_event] + def generate: ( prompt: String, ?background: OpenAI::Models::ImageGenerateParams::background?, @@ -35,6 +55,7 @@ module OpenAI ?n: Integer?, ?output_compression: Integer?, ?output_format: OpenAI::Models::ImageGenerateParams::output_format?, + ?partial_images: Integer?, ?quality: OpenAI::Models::ImageGenerateParams::quality?, ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, ?size: OpenAI::Models::ImageGenerateParams::size?, @@ -43,6 +64,23 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::ImagesResponse + def stream_raw: ( + prompt: String, + ?background: OpenAI::Models::ImageGenerateParams::background?, + ?model: OpenAI::Models::ImageGenerateParams::model?, + ?moderation: OpenAI::Models::ImageGenerateParams::moderation?, + ?n: Integer?, + ?output_compression: Integer?, + ?output_format: OpenAI::Models::ImageGenerateParams::output_format?, + ?partial_images: Integer?, + ?quality: OpenAI::Models::ImageGenerateParams::quality?, + ?response_format: OpenAI::Models::ImageGenerateParams::response_format?, + ?size: OpenAI::Models::ImageGenerateParams::size?, + ?style: OpenAI::Models::ImageGenerateParams::style?, + ?user: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::Stream[OpenAI::Models::image_gen_stream_event] + def initialize: (client: OpenAI::Client) -> void end end From 7590b92c838a66b2029a1ca79a0c4c45f2efa4c3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Jul 2025 16:45:24 +0000 Subject: [PATCH 260/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 3b07edf5..a26ebfc1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.13.1" + ".": "0.14.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 4cf4368d..8bfb5ff5 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.13.1) + openai (0.14.0) connection_pool GEM diff --git a/README.md b/README.md index 76df8a4c..0795fd9b 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.13.1" +gem "openai", "~> 0.14.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index ddfc5bda..ef837985 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.13.1" + VERSION = "0.14.0" end From 955c84c8bdc3533e10a5584ec21febc1628df58c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 16 Jul 2025 19:06:45 +0000 Subject: [PATCH 261/295] feat(api): manual updates --- .stats.yml | 2 +- lib/openai/models/image_edit_params.rb | 2 +- lib/openai/models/image_generate_params.rb | 2 +- lib/openai/resources/images.rb | 16 +++++++-------- rbi/openai/resources/images.rbi | 24 +++++++++++----------- sig/openai/resources/images.rbs | 4 ++-- 6 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.stats.yml b/.stats.yml index ca458135..0d1c9702 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-670ea0d2cc44f52a87dd3cadea45632953283e0636ba30788fdbdb22a232ccac.yml openapi_spec_hash: d8b7d38911fead545adf3e4297956410 -config_hash: 5525bda35e48ea6387c6175c4d1651fa +config_hash: b2a4028fdbb27a08de89831ed310e244 diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 05014dd9..57311392 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#edit # - # @see OpenAI::Resources::Images#stream_raw + # @see OpenAI::Resources::Images#edit_stream_raw class ImageEditParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index d63bfdf7..1c420d23 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -4,7 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#generate # - # @see OpenAI::Resources::Images#stream_raw + # @see OpenAI::Resources::Images#generate_stream_raw class ImageGenerateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index 2779b538..f245cab7 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -39,7 +39,7 @@ def create_variation(params) ) end - # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Images#edit_stream_raw} for streaming counterpart. # # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. @@ -85,7 +85,7 @@ def create_variation(params) def edit(params) parsed, options = OpenAI::ImageEditParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#edit_stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -106,7 +106,7 @@ def edit(params) # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. # - # @overload stream_raw(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @overload edit_stream_raw(image:, prompt:, background: nil, input_fidelity: nil, mask: nil, model: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # @@ -141,7 +141,7 @@ def edit(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::ImageEditParams - def stream_raw(params) + def edit_stream_raw(params) parsed, options = OpenAI::ImageEditParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#edit` for the non-streaming use case." @@ -159,7 +159,7 @@ def stream_raw(params) ) end - # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Images#generate_stream_raw} for streaming counterpart. # # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageGenerateParams} for more details. @@ -203,7 +203,7 @@ def stream_raw(params) def generate(params) parsed, options = OpenAI::ImageGenerateParams.dump_request(params) if parsed[:stream] - message = "Please use `#stream_raw` for the streaming use case." + message = "Please use `#generate_stream_raw` for the streaming use case." raise ArgumentError.new(message) end @client.request( @@ -223,7 +223,7 @@ def generate(params) # Creates an image given a prompt. # [Learn more](https://platform.openai.com/docs/guides/images). # - # @overload stream_raw(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @overload generate_stream_raw(prompt:, background: nil, model: nil, moderation: nil, n: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte # @@ -256,7 +256,7 @@ def generate(params) # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::ImageGenerateParams - def stream_raw(params) + def generate_stream_raw(params) parsed, options = OpenAI::ImageGenerateParams.dump_request(params) unless parsed.fetch(:stream, true) message = "Please use `#generate` for the non-streaming use case." diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index 66ab51ca..03c96283 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -42,7 +42,7 @@ module OpenAI ) end - # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Images#edit_stream_raw} for streaming counterpart. # # Creates an edited or extended image given one or more source images and a # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. @@ -133,8 +133,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#edit` for - # streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#edit_stream_raw` or + # `#edit` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -169,7 +169,7 @@ module OpenAI OpenAI::Internal::Stream[OpenAI::ImageEditStreamEvent::Variants] ) end - def stream_raw( + def edit_stream_raw( # The image(s) to edit. Must be a supported image file or an array of images. # # For `gpt-image-1`, each image should be a `png`, `webp`, or `jpg` file less than @@ -233,14 +233,14 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#edit` for - # streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#edit_stream_raw` or + # `#edit` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) end - # See {OpenAI::Resources::Images#stream_raw} for streaming counterpart. + # See {OpenAI::Resources::Images#generate_stream_raw} for streaming counterpart. # # Creates an image given a prompt. # [Learn more](https://platform.openai.com/docs/guides/images). @@ -328,8 +328,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#generate` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#generate_stream_raw` or + # `#generate` for streaming and non-streaming use cases, respectively. stream: false, request_options: {} ) @@ -364,7 +364,7 @@ module OpenAI OpenAI::Internal::Stream[OpenAI::ImageGenStreamEvent::Variants] ) end - def stream_raw( + def generate_stream_raw( # A text description of the desired image(s). The maximum length is 32000 # characters for `gpt-image-1`, 1000 characters for `dall-e-2` and 4000 characters # for `dall-e-3`. @@ -425,8 +425,8 @@ module OpenAI # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, - # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#generate` - # for streaming and non-streaming use cases, respectively. + # There is no need to provide `stream:`. Instead, use `#generate_stream_raw` or + # `#generate` for streaming and non-streaming use cases, respectively. stream: true, request_options: {} ) diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index 7944859c..bd5dfbcf 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -29,7 +29,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::ImagesResponse - def stream_raw: ( + def edit_stream_raw: ( image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, @@ -64,7 +64,7 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> OpenAI::ImagesResponse - def stream_raw: ( + def generate_stream_raw: ( prompt: String, ?background: OpenAI::Models::ImageGenerateParams::background?, ?model: OpenAI::Models::ImageGenerateParams::model?, From de5be7f18f805c1ba1a7ab32bb8fb97ea91971cb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 15:27:23 +0000 Subject: [PATCH 262/295] fix(internal): tests should use normalized property names --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 0795fd9b..96d372c5 100644 --- a/README.md +++ b/README.md @@ -254,7 +254,7 @@ You can provide typesafe request parameters like so: ```ruby openai.chat.completions.create( - messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], + messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(content: "Say this is a test")], model: :"gpt-4.1" ) ``` @@ -270,7 +270,7 @@ openai.chat.completions.create( # You can also splat a full Params class: params = OpenAI::Chat::CompletionCreateParams.new( - messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(role: "user", content: "Say this is a test")], + messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(content: "Say this is a test")], model: :"gpt-4.1" ) openai.chat.completions.create(**params) From 8fb785cd53c9457b3e655037d991e342db1c4705 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 21 Jul 2025 21:17:18 +0000 Subject: [PATCH 263/295] chore(api): event shapes more accurate --- .stats.yml | 6 +- lib/openai.rb | 2 - .../models/audio/speech_create_params.rb | 9 -- lib/openai/models/chat/chat_completion.rb | 4 +- .../chat/chat_completion_audio_param.rb | 9 -- .../models/chat/chat_completion_chunk.rb | 4 +- .../models/chat/completion_create_params.rb | 4 +- lib/openai/models/function_definition.rb | 2 +- lib/openai/models/image_edit_params.rb | 3 + lib/openai/models/image_generate_params.rb | 3 + lib/openai/models/images_response.rb | 7 +- lib/openai/models/responses/response.rb | 4 +- .../response_code_interpreter_tool_call.rb | 8 +- .../responses/response_create_params.rb | 4 +- ...response_mcp_call_arguments_delta_event.rb | 13 +- .../response_mcp_call_arguments_done_event.rb | 11 +- .../response_mcp_call_completed_event.rb | 18 ++- .../response_mcp_call_failed_event.rb | 18 ++- ...response_mcp_list_tools_completed_event.rb | 18 ++- .../response_mcp_list_tools_failed_event.rb | 18 ++- ...sponse_mcp_list_tools_in_progress_event.rb | 18 ++- .../response_reasoning_delta_event.rb | 60 -------- .../response_reasoning_done_event.rb | 60 -------- .../models/responses/response_stream_event.rb | 8 +- .../responses/response_text_delta_event.rb | 67 ++++++++- .../responses/response_text_done_event.rb | 67 ++++++++- lib/openai/resources/responses.rb | 4 +- .../models/audio/speech_create_params.rbi | 9 -- rbi/openai/models/chat/chat_completion.rbi | 6 +- .../chat/chat_completion_audio_param.rbi | 15 -- .../models/chat/chat_completion_chunk.rbi | 6 +- .../models/chat/completion_create_params.rbi | 6 +- rbi/openai/models/function_definition.rbi | 4 +- rbi/openai/models/image_edit_params.rbi | 6 + rbi/openai/models/image_generate_params.rbi | 6 + rbi/openai/models/images_response.rbi | 4 +- rbi/openai/models/responses/response.rbi | 6 +- .../response_code_interpreter_tool_call.rbi | 9 +- .../responses/response_create_params.rbi | 6 +- ...esponse_mcp_call_arguments_delta_event.rbi | 12 +- ...response_mcp_call_arguments_done_event.rbi | 10 +- .../response_mcp_call_completed_event.rbi | 32 ++++- .../response_mcp_call_failed_event.rbi | 32 ++++- ...esponse_mcp_list_tools_completed_event.rbi | 32 ++++- .../response_mcp_list_tools_failed_event.rbi | 32 ++++- ...ponse_mcp_list_tools_in_progress_event.rbi | 32 ++++- .../response_reasoning_delta_event.rbi | 83 ----------- .../response_reasoning_done_event.rbi | 83 ----------- .../responses/response_stream_event.rbi | 2 - .../responses/response_text_delta_event.rbi | 131 ++++++++++++++++++ .../responses/response_text_done_event.rbi | 131 ++++++++++++++++++ rbi/openai/resources/chat/completions.rbi | 4 +- rbi/openai/resources/images.rbi | 12 ++ rbi/openai/resources/responses.rbi | 4 +- .../models/audio/speech_create_params.rbs | 6 - .../chat/chat_completion_audio_param.rbs | 6 - ...esponse_mcp_call_arguments_delta_event.rbs | 8 +- ...response_mcp_call_arguments_done_event.rbs | 8 +- .../response_mcp_call_completed_event.rbs | 15 +- .../response_mcp_call_failed_event.rbs | 15 +- ...esponse_mcp_list_tools_completed_event.rbs | 15 +- .../response_mcp_list_tools_failed_event.rbs | 15 +- ...ponse_mcp_list_tools_in_progress_event.rbs | 10 ++ .../response_reasoning_delta_event.rbs | 47 ------- .../response_reasoning_done_event.rbs | 47 ------- .../responses/response_stream_event.rbs | 2 - .../responses/response_text_delta_event.rbs | 52 +++++++ .../responses/response_text_done_event.rbs | 52 +++++++ 68 files changed, 908 insertions(+), 554 deletions(-) delete mode 100644 lib/openai/models/responses/response_reasoning_delta_event.rb delete mode 100644 lib/openai/models/responses/response_reasoning_done_event.rb delete mode 100644 rbi/openai/models/responses/response_reasoning_delta_event.rbi delete mode 100644 rbi/openai/models/responses/response_reasoning_done_event.rbi delete mode 100644 sig/openai/models/responses/response_reasoning_delta_event.rbs delete mode 100644 sig/openai/models/responses/response_reasoning_done_event.rbs diff --git a/.stats.yml b/.stats.yml index 0d1c9702..6fdd98ee 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-670ea0d2cc44f52a87dd3cadea45632953283e0636ba30788fdbdb22a232ccac.yml -openapi_spec_hash: d8b7d38911fead545adf3e4297956410 -config_hash: b2a4028fdbb27a08de89831ed310e244 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-b2a451656ca64d30d174391ebfd94806b4de3ab76dc55b92843cfb7f1a54ecb6.yml +openapi_spec_hash: 27d9691b400f28c17ef063a1374048b0 +config_hash: e822d0c9082c8b312264403949243179 diff --git a/lib/openai.rb b/lib/openai.rb index d0794067..11168cfb 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -420,8 +420,6 @@ require_relative "openai/models/responses/response_output_text_annotation_added_event" require_relative "openai/models/responses/response_prompt" require_relative "openai/models/responses/response_queued_event" -require_relative "openai/models/responses/response_reasoning_delta_event" -require_relative "openai/models/responses/response_reasoning_done_event" require_relative "openai/models/responses/response_reasoning_item" require_relative "openai/models/responses/response_reasoning_summary_delta_event" require_relative "openai/models/responses/response_reasoning_summary_done_event" diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index cf875025..a260ccdd 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -111,12 +111,6 @@ module Voice variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE } - - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX } - - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA } - variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE } variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER } @@ -137,9 +131,6 @@ module Voice BALLAD = :ballad CORAL = :coral ECHO = :echo - FABLE = :fable - ONYX = :onyx - NOVA = :nova SAGE = :sage SHIMMER = :shimmer VERSE = :verse diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 7da96170..f104825f 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -44,7 +44,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -193,7 +193,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 46f1463f..04df08e2 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -67,12 +67,6 @@ module Voice variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE } - - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX } - - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA } - variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE } variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER } @@ -93,9 +87,6 @@ module Voice BALLAD = :ballad CORAL = :coral ECHO = :echo - FABLE = :fable - ONYX = :onyx - NOVA = :nova SAGE = :sage SHIMMER = :shimmer VERSE = :verse diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index f624e66d..52d7dcbc 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -43,7 +43,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -376,7 +376,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 53a2c521..a2a2bbd9 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -224,7 +224,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -542,7 +542,7 @@ module ResponseFormat # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 14e2bc4c..6471ea8e 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -34,7 +34,7 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # set to true, the model will follow the exact schema defined in the `parameters` # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn # more about Structured Outputs in the - # [function calling guide](docs/guides/function-calling). + # [function calling guide](https://platform.openai.com/docs/guides/function-calling). # # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 57311392..aee4b491 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -92,6 +92,9 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. + # # @return [Integer, nil] optional :partial_images, Integer, nil?: true diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 1c420d23..a438755e 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -71,6 +71,9 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. + # # @return [Integer, nil] optional :partial_images, Integer, nil?: true diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index 725d2727..d1aee9f5 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -140,7 +140,7 @@ class Usage < OpenAI::Internal::Type::BaseModel required :input_tokens_details, -> { OpenAI::ImagesResponse::Usage::InputTokensDetails } # @!attribute output_tokens - # The number of image tokens in the output image. + # The number of output tokens generated by the model. # # @return [Integer] required :output_tokens, Integer @@ -152,16 +152,13 @@ class Usage < OpenAI::Internal::Type::BaseModel required :total_tokens, Integer # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, total_tokens:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::ImagesResponse::Usage} for more details. - # # For `gpt-image-1` only, the token usage information for the image generation. # # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. # # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. # - # @param output_tokens [Integer] The number of image tokens in the output image. + # @param output_tokens [Integer] The number of output tokens generated by the model. # # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index d963980a..4b4281a3 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -186,7 +186,7 @@ class Response < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -401,7 +401,7 @@ module ToolChoice # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 081ea74a..dddf9e3b 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -34,7 +34,8 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel nil?: true # @!attribute status - # The status of the code interpreter tool call. + # The status of the code interpreter tool call. Valid values are `in_progress`, + # `completed`, `incomplete`, `interpreting`, and `failed`. # # @return [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Status } @@ -59,7 +60,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # # @param outputs [Array, nil] The outputs generated by the code interpreter, such as logs or images. # - # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. Valid values are `in_progress`, `c # # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. @@ -121,7 +122,8 @@ class Image < OpenAI::Internal::Type::BaseModel # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Output::Image)] end - # The status of the code interpreter tool call. + # The status of the code interpreter tool call. Valid values are `in_progress`, + # `completed`, `incomplete`, `interpreting`, and `failed`. # # @see OpenAI::Models::Responses::ResponseCodeInterpreterToolCall#status module Status diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 86a9f56f..2bc6f814 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -138,7 +138,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -322,7 +322,7 @@ module Input # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb index df8179fb..c5f7d2f3 100644 --- a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb @@ -5,10 +5,11 @@ module Models module Responses class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta - # The partial update to the arguments for the MCP tool call. + # A JSON string containing the partial update to the arguments for the MCP tool + # call. # - # @return [Object] - required :delta, OpenAI::Internal::Type::Unknown + # @return [String] + required :delta, String # @!attribute item_id # The unique identifier of the MCP tool call item being processed. @@ -35,10 +36,14 @@ class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.mcp_call_arguments.delta" # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call_arguments.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent} for more + # details. + # # Emitted when there is a delta (partial update) to the arguments of an MCP tool # call. # - # @param delta [Object] The partial update to the arguments for the MCP tool call. + # @param delta [String] A JSON string containing the partial update to the arguments for the MCP tool ca # # @param item_id [String] The unique identifier of the MCP tool call item being processed. # diff --git a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb index 693bbcdc..8b52f9db 100644 --- a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb @@ -5,10 +5,10 @@ module Models module Responses class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute arguments - # The finalized arguments for the MCP tool call. + # A JSON string containing the finalized arguments for the MCP tool call. # - # @return [Object] - required :arguments, OpenAI::Internal::Type::Unknown + # @return [String] + required :arguments, String # @!attribute item_id # The unique identifier of the MCP tool call item being processed. @@ -35,9 +35,12 @@ class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel required :type, const: :"response.mcp_call_arguments.done" # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call_arguments.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent} for more details. + # # Emitted when the arguments for an MCP tool call are finalized. # - # @param arguments [Object] The finalized arguments for the MCP tool call. + # @param arguments [String] A JSON string containing the finalized arguments for the MCP tool call. # # @param item_id [String] The unique identifier of the MCP tool call item being processed. # diff --git a/lib/openai/models/responses/response_mcp_call_completed_event.rb b/lib/openai/models/responses/response_mcp_call_completed_event.rb index 082474fd..217de6ff 100644 --- a/lib/openai/models/responses/response_mcp_call_completed_event.rb +++ b/lib/openai/models/responses/response_mcp_call_completed_event.rb @@ -4,6 +4,18 @@ module OpenAI module Models module Responses class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the MCP tool call item that completed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item that completed. + # + # @return [Integer] + required :output_index, Integer + # @!attribute sequence_number # The sequence number of this event. # @@ -16,9 +28,13 @@ class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.mcp_call.completed"] required :type, const: :"response.mcp_call.completed" - # @!method initialize(sequence_number:, type: :"response.mcp_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_call.completed") # Emitted when an MCP tool call has completed successfully. # + # @param item_id [String] The ID of the MCP tool call item that completed. + # + # @param output_index [Integer] The index of the output item that completed. + # # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.mcp_call.completed"] The type of the event. Always 'response.mcp_call.completed'. diff --git a/lib/openai/models/responses/response_mcp_call_failed_event.rb b/lib/openai/models/responses/response_mcp_call_failed_event.rb index cca4a22d..2ddbc594 100644 --- a/lib/openai/models/responses/response_mcp_call_failed_event.rb +++ b/lib/openai/models/responses/response_mcp_call_failed_event.rb @@ -4,6 +4,18 @@ module OpenAI module Models module Responses class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the MCP tool call item that failed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item that failed. + # + # @return [Integer] + required :output_index, Integer + # @!attribute sequence_number # The sequence number of this event. # @@ -16,9 +28,13 @@ class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.mcp_call.failed"] required :type, const: :"response.mcp_call.failed" - # @!method initialize(sequence_number:, type: :"response.mcp_call.failed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_call.failed") # Emitted when an MCP tool call has failed. # + # @param item_id [String] The ID of the MCP tool call item that failed. + # + # @param output_index [Integer] The index of the output item that failed. + # # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.mcp_call.failed"] The type of the event. Always 'response.mcp_call.failed'. diff --git a/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb index dd6bfdcb..fd58a599 100644 --- a/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb +++ b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb @@ -4,6 +4,18 @@ module OpenAI module Models module Responses class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the MCP tool call item that produced this output. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item that was processed. + # + # @return [Integer] + required :output_index, Integer + # @!attribute sequence_number # The sequence number of this event. # @@ -16,9 +28,13 @@ class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.mcp_list_tools.completed"] required :type, const: :"response.mcp_list_tools.completed" - # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_list_tools.completed") # Emitted when the list of available MCP tools has been successfully retrieved. # + # @param item_id [String] The ID of the MCP tool call item that produced this output. + # + # @param output_index [Integer] The index of the output item that was processed. + # # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.mcp_list_tools.completed"] The type of the event. Always 'response.mcp_list_tools.completed'. diff --git a/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb index a4c2fc10..d93f1e94 100644 --- a/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb +++ b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb @@ -4,6 +4,18 @@ module OpenAI module Models module Responses class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the MCP tool call item that failed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item that failed. + # + # @return [Integer] + required :output_index, Integer + # @!attribute sequence_number # The sequence number of this event. # @@ -16,9 +28,13 @@ class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.mcp_list_tools.failed"] required :type, const: :"response.mcp_list_tools.failed" - # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.failed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_list_tools.failed") # Emitted when the attempt to list available MCP tools has failed. # + # @param item_id [String] The ID of the MCP tool call item that failed. + # + # @param output_index [Integer] The index of the output item that failed. + # # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.mcp_list_tools.failed"] The type of the event. Always 'response.mcp_list_tools.failed'. diff --git a/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb index e931fb3b..5bf708ac 100644 --- a/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb +++ b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb @@ -4,6 +4,18 @@ module OpenAI module Models module Responses class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The ID of the MCP tool call item that is being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item that is being processed. + # + # @return [Integer] + required :output_index, Integer + # @!attribute sequence_number # The sequence number of this event. # @@ -16,10 +28,14 @@ class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.mcp_list_tools.in_progress"] required :type, const: :"response.mcp_list_tools.in_progress" - # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_list_tools.in_progress") # Emitted when the system is in the process of retrieving the list of available # MCP tools. # + # @param item_id [String] The ID of the MCP tool call item that is being processed. + # + # @param output_index [Integer] The index of the output item that is being processed. + # # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.mcp_list_tools.in_progress"] The type of the event. Always 'response.mcp_list_tools.in_progress'. diff --git a/lib/openai/models/responses/response_reasoning_delta_event.rb b/lib/openai/models/responses/response_reasoning_delta_event.rb deleted file mode 100644 index a8b51c21..00000000 --- a/lib/openai/models/responses/response_reasoning_delta_event.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Responses - class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel - # @!attribute content_index - # The index of the reasoning content part within the output item. - # - # @return [Integer] - required :content_index, Integer - - # @!attribute delta - # The partial update to the reasoning content. - # - # @return [Object] - required :delta, OpenAI::Internal::Type::Unknown - - # @!attribute item_id - # The unique identifier of the item for which reasoning is being updated. - # - # @return [String] - required :item_id, String - - # @!attribute output_index - # The index of the output item in the response's output array. - # - # @return [Integer] - required :output_index, Integer - - # @!attribute sequence_number - # The sequence number of this event. - # - # @return [Integer] - required :sequence_number, Integer - - # @!attribute type - # The type of the event. Always 'response.reasoning.delta'. - # - # @return [Symbol, :"response.reasoning.delta"] - required :type, const: :"response.reasoning.delta" - - # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.reasoning.delta") - # Emitted when there is a delta (partial update) to the reasoning content. - # - # @param content_index [Integer] The index of the reasoning content part within the output item. - # - # @param delta [Object] The partial update to the reasoning content. - # - # @param item_id [String] The unique identifier of the item for which reasoning is being updated. - # - # @param output_index [Integer] The index of the output item in the response's output array. - # - # @param sequence_number [Integer] The sequence number of this event. - # - # @param type [Symbol, :"response.reasoning.delta"] The type of the event. Always 'response.reasoning.delta'. - end - end - end -end diff --git a/lib/openai/models/responses/response_reasoning_done_event.rb b/lib/openai/models/responses/response_reasoning_done_event.rb deleted file mode 100644 index 0c5e1861..00000000 --- a/lib/openai/models/responses/response_reasoning_done_event.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Responses - class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel - # @!attribute content_index - # The index of the reasoning content part within the output item. - # - # @return [Integer] - required :content_index, Integer - - # @!attribute item_id - # The unique identifier of the item for which reasoning is finalized. - # - # @return [String] - required :item_id, String - - # @!attribute output_index - # The index of the output item in the response's output array. - # - # @return [Integer] - required :output_index, Integer - - # @!attribute sequence_number - # The sequence number of this event. - # - # @return [Integer] - required :sequence_number, Integer - - # @!attribute text - # The finalized reasoning text. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the event. Always 'response.reasoning.done'. - # - # @return [Symbol, :"response.reasoning.done"] - required :type, const: :"response.reasoning.done" - - # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.reasoning.done") - # Emitted when the reasoning content is finalized for an item. - # - # @param content_index [Integer] The index of the reasoning content part within the output item. - # - # @param item_id [String] The unique identifier of the item for which reasoning is finalized. - # - # @param output_index [Integer] The index of the output item in the response's output array. - # - # @param sequence_number [Integer] The sequence number of this event. - # - # @param text [String] The finalized reasoning text. - # - # @param type [Symbol, :"response.reasoning.done"] The type of the event. Always 'response.reasoning.done'. - end - end - end -end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 4b51a511..21dec3eb 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -191,12 +191,6 @@ module ResponseStreamEvent # Emitted when a response is queued and waiting to be processed. variant :"response.queued", -> { OpenAI::Responses::ResponseQueuedEvent } - # Emitted when there is a delta (partial update) to the reasoning content. - variant :"response.reasoning.delta", -> { OpenAI::Responses::ResponseReasoningDeltaEvent } - - # Emitted when the reasoning content is finalized for an item. - variant :"response.reasoning.done", -> { OpenAI::Responses::ResponseReasoningDoneEvent } - # Emitted when there is a delta (partial update) to the reasoning summary content. variant :"response.reasoning_summary.delta", -> { @@ -210,7 +204,7 @@ module ResponseStreamEvent } # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index b08caebc..4e51d86c 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -22,6 +22,13 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :item_id, String + # @!attribute logprobs + # The log probabilities of the tokens in the delta. + # + # @return [Array] + required :logprobs, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDeltaEvent::Logprob] } + # @!attribute output_index # The index of the output item that the text delta was added to. # @@ -40,7 +47,7 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.delta"] required :type, const: :"response.output_text.delta" - # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.output_text.delta") + # @!method initialize(content_index:, delta:, item_id:, logprobs:, output_index:, sequence_number:, type: :"response.output_text.delta") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseTextDeltaEvent} for more details. # @@ -52,11 +59,69 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param item_id [String] The ID of the output item that the text delta was added to. # + # @param logprobs [Array] The log probabilities of the tokens in the delta. + # # @param output_index [Integer] The index of the output item that the text delta was added to. # # @param sequence_number [Integer] The sequence number for this event. # # @param type [Symbol, :"response.output_text.delta"] The type of the event. Always `response.output_text.delta`. + + class Logprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # A possible text token. + # + # @return [String] + required :token, String + + # @!attribute logprob + # The log probability of this token. + # + # @return [Float] + required :logprob, Float + + # @!attribute top_logprobs + # The log probability of the top 20 most likely tokens. + # + # @return [Array, nil] + optional :top_logprobs, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] + } + + # @!method initialize(token:, logprob:, top_logprobs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextDeltaEvent::Logprob} for more details. + # + # A logprob is the logarithmic probability that the model assigns to producing a + # particular token at a given position in the sequence. Less-negative (higher) + # logprob values indicate greater model confidence in that token choice. + # + # @param token [String] A possible text token. + # + # @param logprob [Float] The log probability of this token. + # + # @param top_logprobs [Array] The log probability of the top 20 most likely tokens. + + class TopLogprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # A possible text token. + # + # @return [String, nil] + optional :token, String + + # @!attribute logprob + # The log probability of this token. + # + # @return [Float, nil] + optional :logprob, Float + + # @!method initialize(token: nil, logprob: nil) + # @param token [String] A possible text token. + # + # @param logprob [Float] The log probability of this token. + end + end end end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 319b4eeb..3b861ee3 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -16,6 +16,13 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :item_id, String + # @!attribute logprobs + # The log probabilities of the tokens in the delta. + # + # @return [Array] + required :logprobs, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDoneEvent::Logprob] } + # @!attribute output_index # The index of the output item that the text content is finalized. # @@ -40,7 +47,7 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.done"] required :type, const: :"response.output_text.done" - # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.output_text.done") + # @!method initialize(content_index:, item_id:, logprobs:, output_index:, sequence_number:, text:, type: :"response.output_text.done") # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseTextDoneEvent} for more details. # @@ -50,6 +57,8 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param item_id [String] The ID of the output item that the text content is finalized. # + # @param logprobs [Array] The log probabilities of the tokens in the delta. + # # @param output_index [Integer] The index of the output item that the text content is finalized. # # @param sequence_number [Integer] The sequence number for this event. @@ -57,6 +66,62 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @param text [String] The text content that is finalized. # # @param type [Symbol, :"response.output_text.done"] The type of the event. Always `response.output_text.done`. + + class Logprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # A possible text token. + # + # @return [String] + required :token, String + + # @!attribute logprob + # The log probability of this token. + # + # @return [Float] + required :logprob, Float + + # @!attribute top_logprobs + # The log probability of the top 20 most likely tokens. + # + # @return [Array, nil] + optional :top_logprobs, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] + } + + # @!method initialize(token:, logprob:, top_logprobs: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseTextDoneEvent::Logprob} for more details. + # + # A logprob is the logarithmic probability that the model assigns to producing a + # particular token at a given position in the sequence. Less-negative (higher) + # logprob values indicate greater model confidence in that token choice. + # + # @param token [String] A possible text token. + # + # @param logprob [Float] The log probability of this token. + # + # @param top_logprobs [Array] The log probability of the top 20 most likely tokens. + + class TopLogprob < OpenAI::Internal::Type::BaseModel + # @!attribute token + # A possible text token. + # + # @return [String, nil] + optional :token, String + + # @!attribute logprob + # The log probability of this token. + # + # @return [Float, nil] + optional :logprob, Float + + # @!method initialize(token: nil, logprob: nil) + # @param token [String] A possible text token. + # + # @param logprob [Float] The log probability of this token. + end + end end end end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 2b2d33f4..48d2ca88 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -154,7 +154,7 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params = {}) @@ -227,7 +227,7 @@ def retrieve(response_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve_streaming(response_id, params = {}) diff --git a/rbi/openai/models/audio/speech_create_params.rbi b/rbi/openai/models/audio/speech_create_params.rbi index 0a15c3d8..41949313 100644 --- a/rbi/openai/models/audio/speech_create_params.rbi +++ b/rbi/openai/models/audio/speech_create_params.rbi @@ -216,15 +216,6 @@ module OpenAI ) ECHO = T.let(:echo, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) - FABLE = - T.let( - :fable, - OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol - ) - ONYX = - T.let(:onyx, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) - NOVA = - T.let(:nova, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) SAGE = T.let(:sage, OpenAI::Audio::SpeechCreateParams::Voice::TaggedSymbol) SHIMMER = diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index d26bd0ca..61f4d426 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -37,7 +37,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -103,7 +103,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -368,7 +368,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/rbi/openai/models/chat/chat_completion_audio_param.rbi b/rbi/openai/models/chat/chat_completion_audio_param.rbi index 49a3dd9d..6299321c 100644 --- a/rbi/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/openai/models/chat/chat_completion_audio_param.rbi @@ -176,21 +176,6 @@ module OpenAI :echo, OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol ) - FABLE = - T.let( - :fable, - OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol - ) - ONYX = - T.let( - :onyx, - OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol - ) - NOVA = - T.let( - :nova, - OpenAI::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol - ) SAGE = T.let( :sage, diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index dfd263ec..7865525a 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -39,7 +39,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -118,7 +118,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -788,7 +788,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 89b7ffe3..c1bfdd13 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -273,7 +273,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -615,7 +615,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -993,7 +993,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/rbi/openai/models/function_definition.rbi b/rbi/openai/models/function_definition.rbi index 1d615e84..413025fd 100644 --- a/rbi/openai/models/function_definition.rbi +++ b/rbi/openai/models/function_definition.rbi @@ -38,7 +38,7 @@ module OpenAI # set to true, the model will follow the exact schema defined in the `parameters` # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn # more about Structured Outputs in the - # [function calling guide](docs/guides/function-calling). + # [function calling guide](https://platform.openai.com/docs/guides/function-calling). sig { returns(T.nilable(T::Boolean)) } attr_accessor :strict @@ -69,7 +69,7 @@ module OpenAI # set to true, the model will follow the exact schema defined in the `parameters` # field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn # more about Structured Outputs in the - # [function calling guide](docs/guides/function-calling). + # [function calling guide](https://platform.openai.com/docs/guides/function-calling). strict: nil ) end diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 6354595b..742bab15 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -81,6 +81,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. sig { returns(T.nilable(Integer)) } attr_accessor :partial_images @@ -182,6 +185,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. partial_images: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. diff --git a/rbi/openai/models/image_generate_params.rbi b/rbi/openai/models/image_generate_params.rbi index 29b5a6e2..c4111420 100644 --- a/rbi/openai/models/image_generate_params.rbi +++ b/rbi/openai/models/image_generate_params.rbi @@ -63,6 +63,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. sig { returns(T.nilable(Integer)) } attr_accessor :partial_images @@ -165,6 +168,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. partial_images: nil, # The quality of the image that will be generated. # diff --git a/rbi/openai/models/images_response.rbi b/rbi/openai/models/images_response.rbi index b4b96022..68ca057a 100644 --- a/rbi/openai/models/images_response.rbi +++ b/rbi/openai/models/images_response.rbi @@ -224,7 +224,7 @@ module OpenAI end attr_writer :input_tokens_details - # The number of image tokens in the output image. + # The number of output tokens generated by the model. sig { returns(Integer) } attr_accessor :output_tokens @@ -247,7 +247,7 @@ module OpenAI input_tokens:, # The input tokens detailed information for the image generation. input_tokens_details:, - # The number of image tokens in the output image. + # The number of output tokens generated by the model. output_tokens:, # The total number of tokens (images and text) used for the image generation. total_tokens: diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 4eba041a..fa42d7d6 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -180,7 +180,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -427,7 +427,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -666,7 +666,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index a4b99c63..1d3f422f 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -40,7 +40,8 @@ module OpenAI end attr_accessor :outputs - # The status of the code interpreter tool call. + # The status of the code interpreter tool call. Valid values are `in_progress`, + # `completed`, `incomplete`, `interpreting`, and `failed`. sig do returns( OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol @@ -82,7 +83,8 @@ module OpenAI # The outputs generated by the code interpreter, such as logs or images. Can be # null if no outputs are available. outputs:, - # The status of the code interpreter tool call. + # The status of the code interpreter tool call. Valid values are `in_progress`, + # `completed`, `incomplete`, `interpreting`, and `failed`. status:, # The type of the code interpreter tool call. Always `code_interpreter_call`. type: :code_interpreter_call @@ -200,7 +202,8 @@ module OpenAI end end - # The status of the code interpreter tool call. + # The status of the code interpreter tool call. Valid values are `in_progress`, + # `completed`, `incomplete`, `interpreting`, and `failed`. module Status extend OpenAI::Internal::Type::Enum diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index a8aa310b..fe649d45 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -163,7 +163,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -464,7 +464,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -632,7 +632,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi index 58f623a5..b45c2295 100644 --- a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi @@ -12,8 +12,9 @@ module OpenAI ) end - # The partial update to the arguments for the MCP tool call. - sig { returns(T.anything) } + # A JSON string containing the partial update to the arguments for the MCP tool + # call. + sig { returns(String) } attr_accessor :delta # The unique identifier of the MCP tool call item being processed. @@ -36,7 +37,7 @@ module OpenAI # call. sig do params( - delta: T.anything, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, @@ -44,7 +45,8 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # The partial update to the arguments for the MCP tool call. + # A JSON string containing the partial update to the arguments for the MCP tool + # call. delta:, # The unique identifier of the MCP tool call item being processed. item_id:, @@ -60,7 +62,7 @@ module OpenAI sig do override.returns( { - delta: T.anything, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi index ef20e071..c37ddf46 100644 --- a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi @@ -12,8 +12,8 @@ module OpenAI ) end - # The finalized arguments for the MCP tool call. - sig { returns(T.anything) } + # A JSON string containing the finalized arguments for the MCP tool call. + sig { returns(String) } attr_accessor :arguments # The unique identifier of the MCP tool call item being processed. @@ -35,7 +35,7 @@ module OpenAI # Emitted when the arguments for an MCP tool call are finalized. sig do params( - arguments: T.anything, + arguments: String, item_id: String, output_index: Integer, sequence_number: Integer, @@ -43,7 +43,7 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # The finalized arguments for the MCP tool call. + # A JSON string containing the finalized arguments for the MCP tool call. arguments:, # The unique identifier of the MCP tool call item being processed. item_id:, @@ -59,7 +59,7 @@ module OpenAI sig do override.returns( { - arguments: T.anything, + arguments: String, item_id: String, output_index: Integer, sequence_number: Integer, diff --git a/rbi/openai/models/responses/response_mcp_call_completed_event.rbi b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi index 4ba445d9..88890871 100644 --- a/rbi/openai/models/responses/response_mcp_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi @@ -12,6 +12,14 @@ module OpenAI ) end + # The ID of the MCP tool call item that completed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item that completed. + sig { returns(Integer) } + attr_accessor :output_index + # The sequence number of this event. sig { returns(Integer) } attr_accessor :sequence_number @@ -22,11 +30,18 @@ module OpenAI # Emitted when an MCP tool call has completed successfully. sig do - params(sequence_number: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( + # The ID of the MCP tool call item that completed. + item_id:, + # The index of the output item that completed. + output_index:, # The sequence number of this event. sequence_number:, # The type of the event. Always 'response.mcp_call.completed'. @@ -34,7 +49,16 @@ module OpenAI ) end - sig { override.returns({ sequence_number: Integer, type: Symbol }) } + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_call_failed_event.rbi b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi index 81d64fff..364dbbe3 100644 --- a/rbi/openai/models/responses/response_mcp_call_failed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi @@ -12,6 +12,14 @@ module OpenAI ) end + # The ID of the MCP tool call item that failed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item that failed. + sig { returns(Integer) } + attr_accessor :output_index + # The sequence number of this event. sig { returns(Integer) } attr_accessor :sequence_number @@ -22,11 +30,18 @@ module OpenAI # Emitted when an MCP tool call has failed. sig do - params(sequence_number: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( + # The ID of the MCP tool call item that failed. + item_id:, + # The index of the output item that failed. + output_index:, # The sequence number of this event. sequence_number:, # The type of the event. Always 'response.mcp_call.failed'. @@ -34,7 +49,16 @@ module OpenAI ) end - sig { override.returns({ sequence_number: Integer, type: Symbol }) } + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi index 619af81d..76c90724 100644 --- a/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi @@ -12,6 +12,14 @@ module OpenAI ) end + # The ID of the MCP tool call item that produced this output. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item that was processed. + sig { returns(Integer) } + attr_accessor :output_index + # The sequence number of this event. sig { returns(Integer) } attr_accessor :sequence_number @@ -22,11 +30,18 @@ module OpenAI # Emitted when the list of available MCP tools has been successfully retrieved. sig do - params(sequence_number: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( + # The ID of the MCP tool call item that produced this output. + item_id:, + # The index of the output item that was processed. + output_index:, # The sequence number of this event. sequence_number:, # The type of the event. Always 'response.mcp_list_tools.completed'. @@ -34,7 +49,16 @@ module OpenAI ) end - sig { override.returns({ sequence_number: Integer, type: Symbol }) } + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi index 5ac00403..1e4748f4 100644 --- a/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi +++ b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi @@ -12,6 +12,14 @@ module OpenAI ) end + # The ID of the MCP tool call item that failed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item that failed. + sig { returns(Integer) } + attr_accessor :output_index + # The sequence number of this event. sig { returns(Integer) } attr_accessor :sequence_number @@ -22,11 +30,18 @@ module OpenAI # Emitted when the attempt to list available MCP tools has failed. sig do - params(sequence_number: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( + # The ID of the MCP tool call item that failed. + item_id:, + # The index of the output item that failed. + output_index:, # The sequence number of this event. sequence_number:, # The type of the event. Always 'response.mcp_list_tools.failed'. @@ -34,7 +49,16 @@ module OpenAI ) end - sig { override.returns({ sequence_number: Integer, type: Symbol }) } + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi index 7bbcbda2..17686c8a 100644 --- a/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi @@ -12,6 +12,14 @@ module OpenAI ) end + # The ID of the MCP tool call item that is being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item that is being processed. + sig { returns(Integer) } + attr_accessor :output_index + # The sequence number of this event. sig { returns(Integer) } attr_accessor :sequence_number @@ -23,11 +31,18 @@ module OpenAI # Emitted when the system is in the process of retrieving the list of available # MCP tools. sig do - params(sequence_number: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( + # The ID of the MCP tool call item that is being processed. + item_id:, + # The index of the output item that is being processed. + output_index:, # The sequence number of this event. sequence_number:, # The type of the event. Always 'response.mcp_list_tools.in_progress'. @@ -35,7 +50,16 @@ module OpenAI ) end - sig { override.returns({ sequence_number: Integer, type: Symbol }) } + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_reasoning_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_delta_event.rbi deleted file mode 100644 index 37fc9e63..00000000 --- a/rbi/openai/models/responses/response_reasoning_delta_event.rbi +++ /dev/null @@ -1,83 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Responses - class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseReasoningDeltaEvent, - OpenAI::Internal::AnyHash - ) - end - - # The index of the reasoning content part within the output item. - sig { returns(Integer) } - attr_accessor :content_index - - # The partial update to the reasoning content. - sig { returns(T.anything) } - attr_accessor :delta - - # The unique identifier of the item for which reasoning is being updated. - sig { returns(String) } - attr_accessor :item_id - - # The index of the output item in the response's output array. - sig { returns(Integer) } - attr_accessor :output_index - - # The sequence number of this event. - sig { returns(Integer) } - attr_accessor :sequence_number - - # The type of the event. Always 'response.reasoning.delta'. - sig { returns(Symbol) } - attr_accessor :type - - # Emitted when there is a delta (partial update) to the reasoning content. - sig do - params( - content_index: Integer, - delta: T.anything, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The index of the reasoning content part within the output item. - content_index:, - # The partial update to the reasoning content. - delta:, - # The unique identifier of the item for which reasoning is being updated. - item_id:, - # The index of the output item in the response's output array. - output_index:, - # The sequence number of this event. - sequence_number:, - # The type of the event. Always 'response.reasoning.delta'. - type: :"response.reasoning.delta" - ) - end - - sig do - override.returns( - { - content_index: Integer, - delta: T.anything, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: Symbol - } - ) - end - def to_hash - end - end - end - end -end diff --git a/rbi/openai/models/responses/response_reasoning_done_event.rbi b/rbi/openai/models/responses/response_reasoning_done_event.rbi deleted file mode 100644 index 05c8b893..00000000 --- a/rbi/openai/models/responses/response_reasoning_done_event.rbi +++ /dev/null @@ -1,83 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Responses - class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseReasoningDoneEvent, - OpenAI::Internal::AnyHash - ) - end - - # The index of the reasoning content part within the output item. - sig { returns(Integer) } - attr_accessor :content_index - - # The unique identifier of the item for which reasoning is finalized. - sig { returns(String) } - attr_accessor :item_id - - # The index of the output item in the response's output array. - sig { returns(Integer) } - attr_accessor :output_index - - # The sequence number of this event. - sig { returns(Integer) } - attr_accessor :sequence_number - - # The finalized reasoning text. - sig { returns(String) } - attr_accessor :text - - # The type of the event. Always 'response.reasoning.done'. - sig { returns(Symbol) } - attr_accessor :type - - # Emitted when the reasoning content is finalized for an item. - sig do - params( - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - text: String, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The index of the reasoning content part within the output item. - content_index:, - # The unique identifier of the item for which reasoning is finalized. - item_id:, - # The index of the output item in the response's output array. - output_index:, - # The sequence number of this event. - sequence_number:, - # The finalized reasoning text. - text:, - # The type of the event. Always 'response.reasoning.done'. - type: :"response.reasoning.done" - ) - end - - sig do - override.returns( - { - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - text: String, - type: Symbol - } - ) - end - def to_hash - end - end - end - end -end diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index 1afd018a..8b5e96b2 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -59,8 +59,6 @@ module OpenAI OpenAI::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Responses::ResponseQueuedEvent, - OpenAI::Responses::ResponseReasoningDeltaEvent, - OpenAI::Responses::ResponseReasoningDoneEvent, OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryDoneEvent ) diff --git a/rbi/openai/models/responses/response_text_delta_event.rbi b/rbi/openai/models/responses/response_text_delta_event.rbi index ba989ab5..bd438476 100644 --- a/rbi/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_delta_event.rbi @@ -24,6 +24,12 @@ module OpenAI sig { returns(String) } attr_accessor :item_id + # The log probabilities of the tokens in the delta. + sig do + returns(T::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob]) + end + attr_accessor :logprobs + # The index of the output item that the text delta was added to. sig { returns(Integer) } attr_accessor :output_index @@ -42,6 +48,10 @@ module OpenAI content_index: Integer, delta: String, item_id: String, + logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDeltaEvent::Logprob::OrHash + ], output_index: Integer, sequence_number: Integer, type: Symbol @@ -54,6 +64,8 @@ module OpenAI delta:, # The ID of the output item that the text delta was added to. item_id:, + # The log probabilities of the tokens in the delta. + logprobs:, # The index of the output item that the text delta was added to. output_index:, # The sequence number for this event. @@ -69,6 +81,8 @@ module OpenAI content_index: Integer, delta: String, item_id: String, + logprobs: + T::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob], output_index: Integer, sequence_number: Integer, type: Symbol @@ -77,6 +91,123 @@ module OpenAI end def to_hash end + + class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextDeltaEvent::Logprob, + OpenAI::Internal::AnyHash + ) + end + + # A possible text token. + sig { returns(String) } + attr_accessor :token + + # The log probability of this token. + sig { returns(Float) } + attr_accessor :logprob + + # The log probability of the top 20 most likely tokens. + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob + ] + ) + ) + end + attr_reader :top_logprobs + + sig do + params( + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob::OrHash + ] + ).void + end + attr_writer :top_logprobs + + # A logprob is the logarithmic probability that the model assigns to producing a + # particular token at a given position in the sequence. Less-negative (higher) + # logprob values indicate greater model confidence in that token choice. + sig do + params( + token: String, + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob::OrHash + ] + ).returns(T.attached_class) + end + def self.new( + # A possible text token. + token:, + # The log probability of this token. + logprob:, + # The log probability of the top 20 most likely tokens. + top_logprobs: nil + ) + end + + sig do + override.returns( + { + token: String, + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob + ] + } + ) + end + def to_hash + end + + class TopLogprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob, + OpenAI::Internal::AnyHash + ) + end + + # A possible text token. + sig { returns(T.nilable(String)) } + attr_reader :token + + sig { params(token: String).void } + attr_writer :token + + # The log probability of this token. + sig { returns(T.nilable(Float)) } + attr_reader :logprob + + sig { params(logprob: Float).void } + attr_writer :logprob + + sig do + params(token: String, logprob: Float).returns(T.attached_class) + end + def self.new( + # A possible text token. + token: nil, + # The log probability of this token. + logprob: nil + ) + end + + sig { override.returns({ token: String, logprob: Float }) } + def to_hash + end + end + end end end end diff --git a/rbi/openai/models/responses/response_text_done_event.rbi b/rbi/openai/models/responses/response_text_done_event.rbi index aa712038..2fe91762 100644 --- a/rbi/openai/models/responses/response_text_done_event.rbi +++ b/rbi/openai/models/responses/response_text_done_event.rbi @@ -20,6 +20,12 @@ module OpenAI sig { returns(String) } attr_accessor :item_id + # The log probabilities of the tokens in the delta. + sig do + returns(T::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob]) + end + attr_accessor :logprobs + # The index of the output item that the text content is finalized. sig { returns(Integer) } attr_accessor :output_index @@ -41,6 +47,10 @@ module OpenAI params( content_index: Integer, item_id: String, + logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDoneEvent::Logprob::OrHash + ], output_index: Integer, sequence_number: Integer, text: String, @@ -52,6 +62,8 @@ module OpenAI content_index:, # The ID of the output item that the text content is finalized. item_id:, + # The log probabilities of the tokens in the delta. + logprobs:, # The index of the output item that the text content is finalized. output_index:, # The sequence number for this event. @@ -68,6 +80,8 @@ module OpenAI { content_index: Integer, item_id: String, + logprobs: + T::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob], output_index: Integer, sequence_number: Integer, text: String, @@ -77,6 +91,123 @@ module OpenAI end def to_hash end + + class Logprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextDoneEvent::Logprob, + OpenAI::Internal::AnyHash + ) + end + + # A possible text token. + sig { returns(String) } + attr_accessor :token + + # The log probability of this token. + sig { returns(Float) } + attr_accessor :logprob + + # The log probability of the top 20 most likely tokens. + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob + ] + ) + ) + end + attr_reader :top_logprobs + + sig do + params( + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob::OrHash + ] + ).void + end + attr_writer :top_logprobs + + # A logprob is the logarithmic probability that the model assigns to producing a + # particular token at a given position in the sequence. Less-negative (higher) + # logprob values indicate greater model confidence in that token choice. + sig do + params( + token: String, + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob::OrHash + ] + ).returns(T.attached_class) + end + def self.new( + # A possible text token. + token:, + # The log probability of this token. + logprob:, + # The log probability of the top 20 most likely tokens. + top_logprobs: nil + ) + end + + sig do + override.returns( + { + token: String, + logprob: Float, + top_logprobs: + T::Array[ + OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob + ] + } + ) + end + def to_hash + end + + class TopLogprob < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob, + OpenAI::Internal::AnyHash + ) + end + + # A possible text token. + sig { returns(T.nilable(String)) } + attr_reader :token + + sig { params(token: String).void } + attr_writer :token + + # The log probability of this token. + sig { returns(T.nilable(Float)) } + attr_reader :logprob + + sig { params(logprob: Float).void } + attr_writer :logprob + + sig do + params(token: String, logprob: Float).returns(T.attached_class) + end + def self.new( + # A possible text token. + token: nil, + # The log probability of this token. + logprob: nil + ) + end + + sig { override.returns({ token: String, logprob: Float }) } + def to_hash + end + end + end end end end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 7addc65f..4e98592f 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -226,7 +226,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -515,7 +515,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index 03c96283..b27c7c15 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -115,6 +115,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. partial_images: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. @@ -215,6 +218,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. partial_images: nil, # The quality of the image that will be generated. `high`, `medium` and `low` are # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. @@ -300,6 +306,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. partial_images: nil, # The quality of the image that will be generated. # @@ -397,6 +406,9 @@ module OpenAI # The number of partial images to generate. This parameter is used for streaming # responses that return partial images. Value must be between 0 and 3. When set to # 0, the response will be a single image sent in one streaming event. + # + # Note that the final image may be sent before the full number of partial images + # are generated if the full image is generated more quickly. partial_images: nil, # The quality of the image that will be generated. # diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index d4a5d37d..bac4f319 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -157,7 +157,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service @@ -385,7 +385,7 @@ module OpenAI # - If set to 'auto', then the request will be processed with the service tier # configured in the Project settings. Unless otherwise configured, the Project # will use 'default'. - # - If set to 'default', then the requset will be processed with the standard + # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or # 'priority', then the request will be processed with the corresponding service diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index f58474c0..e01b1b9e 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -80,9 +80,6 @@ module OpenAI | :ballad | :coral | :echo - | :fable - | :onyx - | :nova | :sage | :shimmer | :verse @@ -97,9 +94,6 @@ module OpenAI BALLAD: :ballad CORAL: :coral ECHO: :echo - FABLE: :fable - ONYX: :onyx - NOVA: :nova SAGE: :sage SHIMMER: :shimmer VERSE: :verse diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index d4689dfe..3f77d8bd 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -46,9 +46,6 @@ module OpenAI | :ballad | :coral | :echo - | :fable - | :onyx - | :nova | :sage | :shimmer | :verse @@ -63,9 +60,6 @@ module OpenAI BALLAD: :ballad CORAL: :coral ECHO: :echo - FABLE: :fable - ONYX: :onyx - NOVA: :nova SAGE: :sage SHIMMER: :shimmer VERSE: :verse diff --git a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs index a69fd15d..09b2df87 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_mcp_call_arguments_delta_event = { - delta: top, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, @@ -11,7 +11,7 @@ module OpenAI } class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor delta: top + attr_accessor delta: String attr_accessor item_id: String @@ -22,7 +22,7 @@ module OpenAI attr_accessor type: :"response.mcp_call_arguments.delta" def initialize: ( - delta: top, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, @@ -30,7 +30,7 @@ module OpenAI ) -> void def to_hash: -> { - delta: top, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, diff --git a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs index c262d1a7..b997774e 100644 --- a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs @@ -3,7 +3,7 @@ module OpenAI module Responses type response_mcp_call_arguments_done_event = { - arguments: top, + arguments: String, item_id: String, output_index: Integer, sequence_number: Integer, @@ -11,7 +11,7 @@ module OpenAI } class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel - attr_accessor arguments: top + attr_accessor arguments: String attr_accessor item_id: String @@ -22,7 +22,7 @@ module OpenAI attr_accessor type: :"response.mcp_call_arguments.done" def initialize: ( - arguments: top, + arguments: String, item_id: String, output_index: Integer, sequence_number: Integer, @@ -30,7 +30,7 @@ module OpenAI ) -> void def to_hash: -> { - arguments: top, + arguments: String, item_id: String, output_index: Integer, sequence_number: Integer, diff --git a/sig/openai/models/responses/response_mcp_call_completed_event.rbs b/sig/openai/models/responses/response_mcp_call_completed_event.rbs index 336d5eed..85f13ce9 100644 --- a/sig/openai/models/responses/response_mcp_call_completed_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_completed_event.rbs @@ -2,19 +2,32 @@ module OpenAI module Models module Responses type response_mcp_call_completed_event = - { sequence_number: Integer, type: :"response.mcp_call.completed" } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.completed" + } class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + attr_accessor sequence_number: Integer attr_accessor type: :"response.mcp_call.completed" def initialize: ( + item_id: String, + output_index: Integer, sequence_number: Integer, ?type: :"response.mcp_call.completed" ) -> void def to_hash: -> { + item_id: String, + output_index: Integer, sequence_number: Integer, type: :"response.mcp_call.completed" } diff --git a/sig/openai/models/responses/response_mcp_call_failed_event.rbs b/sig/openai/models/responses/response_mcp_call_failed_event.rbs index 9d643dbc..a38a0c39 100644 --- a/sig/openai/models/responses/response_mcp_call_failed_event.rbs +++ b/sig/openai/models/responses/response_mcp_call_failed_event.rbs @@ -2,19 +2,32 @@ module OpenAI module Models module Responses type response_mcp_call_failed_event = - { sequence_number: Integer, type: :"response.mcp_call.failed" } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.failed" + } class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + attr_accessor sequence_number: Integer attr_accessor type: :"response.mcp_call.failed" def initialize: ( + item_id: String, + output_index: Integer, sequence_number: Integer, ?type: :"response.mcp_call.failed" ) -> void def to_hash: -> { + item_id: String, + output_index: Integer, sequence_number: Integer, type: :"response.mcp_call.failed" } diff --git a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs index af17675a..2e6aa85a 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs @@ -2,19 +2,32 @@ module OpenAI module Models module Responses type response_mcp_list_tools_completed_event = - { sequence_number: Integer, type: :"response.mcp_list_tools.completed" } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_list_tools.completed" + } class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + attr_accessor sequence_number: Integer attr_accessor type: :"response.mcp_list_tools.completed" def initialize: ( + item_id: String, + output_index: Integer, sequence_number: Integer, ?type: :"response.mcp_list_tools.completed" ) -> void def to_hash: -> { + item_id: String, + output_index: Integer, sequence_number: Integer, type: :"response.mcp_list_tools.completed" } diff --git a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs index fab64580..55e8ac00 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs @@ -2,19 +2,32 @@ module OpenAI module Models module Responses type response_mcp_list_tools_failed_event = - { sequence_number: Integer, type: :"response.mcp_list_tools.failed" } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_list_tools.failed" + } class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + attr_accessor sequence_number: Integer attr_accessor type: :"response.mcp_list_tools.failed" def initialize: ( + item_id: String, + output_index: Integer, sequence_number: Integer, ?type: :"response.mcp_list_tools.failed" ) -> void def to_hash: -> { + item_id: String, + output_index: Integer, sequence_number: Integer, type: :"response.mcp_list_tools.failed" } diff --git a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs index 72e57c18..2f8c7b30 100644 --- a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs +++ b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs @@ -3,21 +3,31 @@ module OpenAI module Responses type response_mcp_list_tools_in_progress_event = { + item_id: String, + output_index: Integer, sequence_number: Integer, type: :"response.mcp_list_tools.in_progress" } class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + attr_accessor sequence_number: Integer attr_accessor type: :"response.mcp_list_tools.in_progress" def initialize: ( + item_id: String, + output_index: Integer, sequence_number: Integer, ?type: :"response.mcp_list_tools.in_progress" ) -> void def to_hash: -> { + item_id: String, + output_index: Integer, sequence_number: Integer, type: :"response.mcp_list_tools.in_progress" } diff --git a/sig/openai/models/responses/response_reasoning_delta_event.rbs b/sig/openai/models/responses/response_reasoning_delta_event.rbs deleted file mode 100644 index a56121bc..00000000 --- a/sig/openai/models/responses/response_reasoning_delta_event.rbs +++ /dev/null @@ -1,47 +0,0 @@ -module OpenAI - module Models - module Responses - type response_reasoning_delta_event = - { - content_index: Integer, - delta: top, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: :"response.reasoning.delta" - } - - class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor content_index: Integer - - attr_accessor delta: top - - attr_accessor item_id: String - - attr_accessor output_index: Integer - - attr_accessor sequence_number: Integer - - attr_accessor type: :"response.reasoning.delta" - - def initialize: ( - content_index: Integer, - delta: top, - item_id: String, - output_index: Integer, - sequence_number: Integer, - ?type: :"response.reasoning.delta" - ) -> void - - def to_hash: -> { - content_index: Integer, - delta: top, - item_id: String, - output_index: Integer, - sequence_number: Integer, - type: :"response.reasoning.delta" - } - end - end - end -end diff --git a/sig/openai/models/responses/response_reasoning_done_event.rbs b/sig/openai/models/responses/response_reasoning_done_event.rbs deleted file mode 100644 index 0123cfcf..00000000 --- a/sig/openai/models/responses/response_reasoning_done_event.rbs +++ /dev/null @@ -1,47 +0,0 @@ -module OpenAI - module Models - module Responses - type response_reasoning_done_event = - { - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - text: String, - type: :"response.reasoning.done" - } - - class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel - attr_accessor content_index: Integer - - attr_accessor item_id: String - - attr_accessor output_index: Integer - - attr_accessor sequence_number: Integer - - attr_accessor text: String - - attr_accessor type: :"response.reasoning.done" - - def initialize: ( - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - text: String, - ?type: :"response.reasoning.done" - ) -> void - - def to_hash: -> { - content_index: Integer, - item_id: String, - output_index: Integer, - sequence_number: Integer, - text: String, - type: :"response.reasoning.done" - } - end - end - end -end diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 21511833..21677586 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -51,8 +51,6 @@ module OpenAI | OpenAI::Responses::ResponseMcpListToolsInProgressEvent | OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent | OpenAI::Responses::ResponseQueuedEvent - | OpenAI::Responses::ResponseReasoningDeltaEvent - | OpenAI::Responses::ResponseReasoningDoneEvent | OpenAI::Responses::ResponseReasoningSummaryDeltaEvent | OpenAI::Responses::ResponseReasoningSummaryDoneEvent diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index bbea6304..1069f71d 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI content_index: Integer, delta: String, item_id: String, + logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob], output_index: Integer, sequence_number: Integer, type: :"response.output_text.delta" @@ -18,6 +19,8 @@ module OpenAI attr_accessor item_id: String + attr_accessor logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob] + attr_accessor output_index: Integer attr_accessor sequence_number: Integer @@ -28,6 +31,7 @@ module OpenAI content_index: Integer, delta: String, item_id: String, + logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob], output_index: Integer, sequence_number: Integer, ?type: :"response.output_text.delta" @@ -37,10 +41,58 @@ module OpenAI content_index: Integer, delta: String, item_id: String, + logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob], output_index: Integer, sequence_number: Integer, type: :"response.output_text.delta" } + + type logprob = + { + token: String, + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] + } + + class Logprob < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor logprob: Float + + attr_reader top_logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob]? + + def top_logprobs=: ( + ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] + ) -> ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] + + def initialize: ( + token: String, + logprob: Float, + ?top_logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] + ) -> void + + def to_hash: -> { + token: String, + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] + } + + type top_logprob = { token: String, logprob: Float } + + class TopLogprob < OpenAI::Internal::Type::BaseModel + attr_reader token: String? + + def token=: (String) -> String + + attr_reader logprob: Float? + + def logprob=: (Float) -> Float + + def initialize: (?token: String, ?logprob: Float) -> void + + def to_hash: -> { token: String, logprob: Float } + end + end end end end diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 742fb5e3..7e42d6a5 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { content_index: Integer, item_id: String, + logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob], output_index: Integer, sequence_number: Integer, text: String, @@ -16,6 +17,8 @@ module OpenAI attr_accessor item_id: String + attr_accessor logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob] + attr_accessor output_index: Integer attr_accessor sequence_number: Integer @@ -27,6 +30,7 @@ module OpenAI def initialize: ( content_index: Integer, item_id: String, + logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob], output_index: Integer, sequence_number: Integer, text: String, @@ -36,11 +40,59 @@ module OpenAI def to_hash: -> { content_index: Integer, item_id: String, + logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob], output_index: Integer, sequence_number: Integer, text: String, type: :"response.output_text.done" } + + type logprob = + { + token: String, + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] + } + + class Logprob < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor logprob: Float + + attr_reader top_logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob]? + + def top_logprobs=: ( + ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] + ) -> ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] + + def initialize: ( + token: String, + logprob: Float, + ?top_logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] + ) -> void + + def to_hash: -> { + token: String, + logprob: Float, + top_logprobs: ::Array[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] + } + + type top_logprob = { token: String, logprob: Float } + + class TopLogprob < OpenAI::Internal::Type::BaseModel + attr_reader token: String? + + def token=: (String) -> String + + attr_reader logprob: Float? + + def logprob=: (Float) -> Float + + def initialize: (?token: String, ?logprob: Float) -> void + + def to_hash: -> { token: String, logprob: Float } + end + end end end end From 928be808a50be5b89b1415026c72d97204e94101 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 22 Jul 2025 13:02:55 +0000 Subject: [PATCH 264/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a26ebfc1..8f3e0a49 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.14.0" + ".": "0.15.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 8bfb5ff5..ba6af0f0 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.14.0) + openai (0.15.0) connection_pool GEM diff --git a/README.md b/README.md index 96d372c5..7df39ccb 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.14.0" +gem "openai", "~> 0.15.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index ef837985..3bc41d60 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.14.0" + VERSION = "0.15.0" end From 00eef2bb66a1fb75d99ef8fa073ac7ca08e7ef90 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 25 Jul 2025 18:19:40 +0000 Subject: [PATCH 265/295] chore: update contribute.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 97d0df78..28f9cf3b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -78,7 +78,7 @@ $ bundle exec rake test ## Linting and formatting -This repository uses [rubocop](https://github.com/rubocop/rubocop) for linting and formatting of `*.rb` and `*.rbi` files. [syntax_tree](https://github.com/ruby-syntax-tree/syntax_tree) is used for formatting `*.rbs` files. +This repository uses [rubocop](https://github.com/rubocop/rubocop) for linting and formatting of `*.rb` files; And [syntax_tree](https://github.com/ruby-syntax-tree/syntax_tree) is used for formatting of both `*.rbi` and `*.rbs` files. There are two separate type checkers supported by this library: [sorbet](https://github.com/sorbet/sorbet) and [steep](https://github.com/soutaro/steep) are used for verifying `*.rbi` and `*.rbs` files respectively. From 4c718bc4ff2ac9ebb2c798037d82c30e8fa32882 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 28 Jul 2025 15:12:27 +0000 Subject: [PATCH 266/295] fix(internal): ensure sorbet test always runs serially --- test/openai/internal/sorbet_runtime_support_test.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/openai/internal/sorbet_runtime_support_test.rb b/test/openai/internal/sorbet_runtime_support_test.rb index d1179f7e..a3ad6c5a 100644 --- a/test/openai/internal/sorbet_runtime_support_test.rb +++ b/test/openai/internal/sorbet_runtime_support_test.rb @@ -3,6 +3,8 @@ require_relative "../test_helper" class OpenAI::Test::SorbetRuntimeSupportTest < Minitest::Test + extend Minitest::Serial + i_suck_and_my_tests_are_order_dependent! module E From 31fa1f1877ba7fba6298cc01ca4e59ab2e2812fb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 12:40:58 +0000 Subject: [PATCH 267/295] feat(api): manual updates --- .stats.yml | 6 +- .../chat/chat_completion_store_message.rb | 33 ++++++++- .../models/chat/completion_create_params.rb | 36 ++++++++-- lib/openai/models/responses/response.rb | 36 ++++++++-- .../responses/response_create_params.rb | 36 ++++++++-- lib/openai/resources/chat/completions.rb | 16 +++-- lib/openai/resources/responses.rb | 16 +++-- .../chat/chat_completion_store_message.rbi | 71 ++++++++++++++++++- .../models/chat/completion_create_params.rbi | 50 +++++++++++-- rbi/openai/models/responses/response.rbi | 50 +++++++++++-- .../responses/response_create_params.rbi | 50 +++++++++++-- rbi/openai/resources/chat/completions.rbi | 40 +++++++++-- rbi/openai/resources/responses.rbi | 40 +++++++++-- .../chat/chat_completion_store_message.rbs | 32 ++++++++- .../models/chat/completion_create_params.rbs | 14 ++++ sig/openai/models/responses/response.rbs | 14 ++++ .../responses/response_create_params.rbs | 14 ++++ sig/openai/resources/chat/completions.rbs | 4 ++ sig/openai/resources/responses.rbs | 4 ++ test/openai/resources/responses_test.rb | 6 ++ 20 files changed, 505 insertions(+), 63 deletions(-) diff --git a/.stats.yml b/.stats.yml index 6fdd98ee..e567ce69 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-b2a451656ca64d30d174391ebfd94806b4de3ab76dc55b92843cfb7f1a54ecb6.yml -openapi_spec_hash: 27d9691b400f28c17ef063a1374048b0 -config_hash: e822d0c9082c8b312264403949243179 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-721e6ccaa72205ee14c71f8163129920464fb814b95d3df9567a9476bbd9b7fb.yml +openapi_spec_hash: 2115413a21df8b5bf9e4552a74df4312 +config_hash: 9606bb315a193bfd8da0459040143242 diff --git a/lib/openai/models/chat/chat_completion_store_message.rb b/lib/openai/models/chat/chat_completion_store_message.rb index cd926898..f63c57fe 100644 --- a/lib/openai/models/chat/chat_completion_store_message.rb +++ b/lib/openai/models/chat/chat_completion_store_message.rb @@ -10,10 +10,41 @@ class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # @return [String] required :id, String - # @!method initialize(id:) + # @!attribute content_parts + # If a content parts array was provided, this is an array of `text` and + # `image_url` parts. Otherwise, null. + # + # @return [Array, nil] + optional :content_parts, + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionStoreMessage::ContentPart] + }, + nil?: true + + # @!method initialize(id:, content_parts: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionStoreMessage} for more details. + # # A chat completion message generated by the model. # # @param id [String] The identifier of the chat message. + # + # @param content_parts [Array, nil] If a content parts array was provided, this is an array of `text` and `image_url + + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). + module ContentPart + extend OpenAI::Internal::Type::Union + + # Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). + variant -> { OpenAI::Chat::ChatCompletionContentPartText } + + # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + variant -> { OpenAI::Chat::ChatCompletionContentPartImage } + + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage)] + end end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index a2a2bbd9..6ed151f0 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -182,6 +182,14 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :presence_penalty, Float, nil?: true + # @!attribute prompt_cache_key + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + # + # @return [String, nil] + optional :prompt_cache_key, String + # @!attribute reasoning_effort # **o-series models only** # @@ -208,6 +216,16 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, union: -> { OpenAI::Chat::CompletionCreateParams::ResponseFormat } + # @!attribute safety_identifier + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + # + # @return [String, nil] + optional :safety_identifier, String + # @!attribute seed # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and @@ -315,9 +333,13 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!attribute user - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # @deprecated + # + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). # # @return [String, nil] optional :user, String @@ -330,7 +352,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } - # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # @@ -366,10 +388,14 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param seed [Integer, nil] This feature is in Beta. # # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. @@ -390,7 +416,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 4b4281a3..923aeb29 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -171,6 +171,14 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponsePrompt, nil] optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true + # @!attribute prompt_cache_key + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + # + # @return [String, nil] + optional :prompt_cache_key, String + # @!attribute reasoning # **o-series models only** # @@ -180,6 +188,16 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Reasoning }, nil?: true + # @!attribute safety_identifier + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + # + # @return [String, nil] + optional :safety_identifier, String + # @!attribute service_tier # Specifies the processing type used for serving the request. # @@ -246,14 +264,18 @@ class Response < OpenAI::Internal::Type::BaseModel optional :usage, -> { OpenAI::Responses::ResponseUsage } # @!attribute user - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # @deprecated + # + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). # # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Response} for more details. # @@ -293,8 +315,12 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, @@ -307,7 +333,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param object [Symbol, :response] The object type of this resource - always set to `response`. diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 2bc6f814..56a26016 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -123,6 +123,14 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponsePrompt, nil] optional :prompt, -> { OpenAI::Responses::ResponsePrompt }, nil?: true + # @!attribute prompt_cache_key + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + # + # @return [String, nil] + optional :prompt_cache_key, String + # @!attribute reasoning # **o-series models only** # @@ -132,6 +140,16 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Reasoning }, nil?: true + # @!attribute safety_identifier + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + # + # @return [String, nil] + optional :safety_identifier, String + # @!attribute service_tier # Specifies the processing type used for serving the request. # @@ -236,14 +254,18 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :truncation, enum: -> { OpenAI::Responses::ResponseCreateParams::Truncation }, nil?: true # @!attribute user - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # @deprecated + # + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). # # @return [String, nil] optional :user, String - # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # @@ -269,8 +291,12 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via @@ -289,7 +315,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 44b63a90..42081fac 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -30,7 +30,7 @@ class Completions # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -64,10 +64,14 @@ class Completions # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param seed [Integer, nil] This feature is in Beta. # # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. @@ -88,7 +92,7 @@ class Completions # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # @@ -135,7 +139,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -169,10 +173,14 @@ def create(params) # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param seed [Integer, nil] This feature is in Beta. # # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. @@ -193,7 +201,7 @@ def create(params) # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 48d2ca88..2435e51c 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,7 +23,7 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # @@ -47,8 +47,12 @@ class Responses # # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via @@ -67,7 +71,7 @@ class Responses # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # @@ -106,7 +110,7 @@ def create(params = {}) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # @@ -130,8 +134,12 @@ def create(params = {}) # # @param prompt [OpenAI::Models::Responses::ResponsePrompt, nil] Reference to a prompt template and its variables. # + # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi + # # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # + # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi + # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the processing type used for serving the request. # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via @@ -150,7 +158,7 @@ def create(params = {}) # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A stable identifier for your end-users. + # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/rbi/openai/models/chat/chat_completion_store_message.rbi b/rbi/openai/models/chat/chat_completion_store_message.rbi index c19850a1..dee06a9d 100644 --- a/rbi/openai/models/chat/chat_completion_store_message.rbi +++ b/rbi/openai/models/chat/chat_completion_store_message.rbi @@ -18,17 +18,82 @@ module OpenAI sig { returns(String) } attr_accessor :id + # If a content parts array was provided, this is an array of `text` and + # `image_url` parts. Otherwise, null. + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Chat::ChatCompletionStoreMessage::ContentPart::Variants + ] + ) + ) + end + attr_accessor :content_parts + # A chat completion message generated by the model. - sig { params(id: String).returns(T.attached_class) } + sig do + params( + id: String, + content_parts: + T.nilable( + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionContentPartText::OrHash, + OpenAI::Chat::ChatCompletionContentPartImage::OrHash + ) + ] + ) + ).returns(T.attached_class) + end def self.new( # The identifier of the chat message. - id: + id:, + # If a content parts array was provided, this is an array of `text` and + # `image_url` parts. Otherwise, null. + content_parts: nil ) end - sig { override.returns({ id: String }) } + sig do + override.returns( + { + id: String, + content_parts: + T.nilable( + T::Array[ + OpenAI::Chat::ChatCompletionStoreMessage::ContentPart::Variants + ] + ) + } + ) + end def to_hash end + + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). + module ContentPart + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionContentPartText, + OpenAI::Chat::ChatCompletionContentPartImage + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionStoreMessage::ContentPart::Variants + ] + ) + end + def self.variants + end + end end end end diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index c1bfdd13..3e6fc91b 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -216,6 +216,15 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :presence_penalty + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + sig { returns(T.nilable(String)) } + attr_reader :prompt_cache_key + + sig { params(prompt_cache_key: String).void } + attr_writer :prompt_cache_key + # **o-series models only** # # Constrains effort on reasoning for @@ -260,6 +269,17 @@ module OpenAI end attr_writer :response_format + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + sig { returns(T.nilable(String)) } + attr_reader :safety_identifier + + sig { params(safety_identifier: String).void } + attr_writer :safety_identifier + # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and # parameters should return the same result. Determinism is not guaranteed, and you @@ -389,9 +409,11 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :top_p - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). sig { returns(T.nilable(String)) } attr_reader :user @@ -455,6 +477,7 @@ module OpenAI prediction: T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash), presence_penalty: T.nilable(Float), + prompt_cache_key: String, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: T.any( @@ -462,6 +485,7 @@ module OpenAI OpenAI::ResponseFormatJSONSchema::OrHash, OpenAI::ResponseFormatJSONObject::OrHash ), + safety_identifier: String, seed: T.nilable(Integer), service_tier: T.nilable( @@ -586,6 +610,10 @@ module OpenAI # whether they appear in the text so far, increasing the model's likelihood to # talk about new topics. presence_penalty: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Constrains effort on reasoning for @@ -604,6 +632,12 @@ module OpenAI # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. response_format: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and # parameters should return the same result. Determinism is not guaranteed, and you @@ -670,9 +704,11 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, # This tool searches the web for relevant results to use in a response. Learn more # about the @@ -722,6 +758,7 @@ module OpenAI prediction: T.nilable(OpenAI::Chat::ChatCompletionPredictionContent), presence_penalty: T.nilable(Float), + prompt_cache_key: String, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: T.any( @@ -729,6 +766,7 @@ module OpenAI OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject ), + safety_identifier: String, seed: T.nilable(Integer), service_tier: T.nilable( diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index fa42d7d6..3cdfaa54 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -165,6 +165,15 @@ module OpenAI end attr_writer :prompt + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + sig { returns(T.nilable(String)) } + attr_reader :prompt_cache_key + + sig { params(prompt_cache_key: String).void } + attr_writer :prompt_cache_key + # **o-series models only** # # Configuration options for @@ -175,6 +184,17 @@ module OpenAI sig { params(reasoning: T.nilable(OpenAI::Reasoning::OrHash)).void } attr_writer :reasoning + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + sig { returns(T.nilable(String)) } + attr_reader :safety_identifier + + sig { params(safety_identifier: String).void } + attr_writer :safety_identifier + # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier @@ -247,9 +267,11 @@ module OpenAI sig { params(usage: OpenAI::Responses::ResponseUsage::OrHash).void } attr_writer :usage - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). sig { returns(T.nilable(String)) } attr_reader :user @@ -317,7 +339,9 @@ module OpenAI max_tool_calls: T.nilable(Integer), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), + prompt_cache_key: String, reasoning: T.nilable(OpenAI::Reasoning::OrHash), + safety_identifier: String, service_tier: T.nilable(OpenAI::Responses::Response::ServiceTier::OrSymbol), status: OpenAI::Responses::ResponseStatus::OrSymbol, @@ -417,11 +441,21 @@ module OpenAI # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). prompt: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier @@ -463,9 +497,11 @@ module OpenAI # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. usage: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, # The object type of this resource - always set to `response`. object: :response @@ -496,7 +532,9 @@ module OpenAI max_tool_calls: T.nilable(Integer), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt), + prompt_cache_key: String, reasoning: T.nilable(OpenAI::Reasoning), + safety_identifier: String, service_tier: T.nilable( OpenAI::Responses::Response::ServiceTier::TaggedSymbol diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index fe649d45..00498f46 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -148,6 +148,15 @@ module OpenAI end attr_writer :prompt + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + sig { returns(T.nilable(String)) } + attr_reader :prompt_cache_key + + sig { params(prompt_cache_key: String).void } + attr_writer :prompt_cache_key + # **o-series models only** # # Configuration options for @@ -158,6 +167,17 @@ module OpenAI sig { params(reasoning: T.nilable(OpenAI::Reasoning::OrHash)).void } attr_writer :reasoning + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + sig { returns(T.nilable(String)) } + attr_reader :safety_identifier + + sig { params(safety_identifier: String).void } + attr_writer :safety_identifier + # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier @@ -318,9 +338,11 @@ module OpenAI end attr_accessor :truncation - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). sig { returns(T.nilable(String)) } attr_reader :user @@ -348,7 +370,9 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), + prompt_cache_key: String, reasoning: T.nilable(OpenAI::Reasoning::OrHash), + safety_identifier: String, service_tier: T.nilable( OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol @@ -454,11 +478,21 @@ module OpenAI # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). prompt: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier @@ -526,9 +560,11 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, request_options: {} ) @@ -556,7 +592,9 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt), + prompt_cache_key: String, reasoning: T.nilable(OpenAI::Reasoning), + safety_identifier: String, service_tier: T.nilable( OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 4e98592f..dcc6e96a 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -65,6 +65,7 @@ module OpenAI prediction: T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash), presence_penalty: T.nilable(Float), + prompt_cache_key: String, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: T.any( @@ -72,6 +73,7 @@ module OpenAI OpenAI::ResponseFormatJSONSchema::OrHash, OpenAI::ResponseFormatJSONObject::OrHash ), + safety_identifier: String, seed: T.nilable(Integer), service_tier: T.nilable( @@ -197,6 +199,10 @@ module OpenAI # whether they appear in the text so far, increasing the model's likelihood to # talk about new topics. presence_penalty: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Constrains effort on reasoning for @@ -215,6 +221,12 @@ module OpenAI # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. response_format: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and # parameters should return the same result. Determinism is not guaranteed, and you @@ -281,9 +293,11 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, # This tool searches the web for relevant results to use in a response. Learn more # about the @@ -354,6 +368,7 @@ module OpenAI prediction: T.nilable(OpenAI::Chat::ChatCompletionPredictionContent::OrHash), presence_penalty: T.nilable(Float), + prompt_cache_key: String, reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), response_format: T.any( @@ -361,6 +376,7 @@ module OpenAI OpenAI::ResponseFormatJSONSchema::OrHash, OpenAI::ResponseFormatJSONObject::OrHash ), + safety_identifier: String, seed: T.nilable(Integer), service_tier: T.nilable( @@ -486,6 +502,10 @@ module OpenAI # whether they appear in the text so far, increasing the model's likelihood to # talk about new topics. presence_penalty: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Constrains effort on reasoning for @@ -504,6 +524,12 @@ module OpenAI # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. response_format: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and # parameters should return the same result. Determinism is not guaranteed, and you @@ -570,9 +596,11 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, # This tool searches the web for relevant results to use in a response. Learn more # about the diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index bac4f319..576bcc24 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -40,7 +40,9 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), + prompt_cache_key: String, reasoning: T.nilable(OpenAI::Reasoning::OrHash), + safety_identifier: String, service_tier: T.nilable( OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol @@ -147,11 +149,21 @@ module OpenAI # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). prompt: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier @@ -219,9 +231,11 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` # for streaming and non-streaming use cases, respectively. @@ -264,7 +278,9 @@ module OpenAI parallel_tool_calls: T.nilable(T::Boolean), previous_response_id: T.nilable(String), prompt: T.nilable(OpenAI::Responses::ResponsePrompt::OrHash), + prompt_cache_key: String, reasoning: T.nilable(OpenAI::Reasoning::OrHash), + safety_identifier: String, service_tier: T.nilable( OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol @@ -375,11 +391,21 @@ module OpenAI # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). prompt: nil, + # Used by OpenAI to cache responses for similar requests to optimize your cache + # hit rates. Replaces the `user` field. + # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). + prompt_cache_key: nil, # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). reasoning: nil, + # A stable identifier used to help detect users of your application that may be + # violating OpenAI's usage policies. The IDs should be a string that uniquely + # identifies each user. We recommend hashing their username or email address, in + # order to avoid sending us any identifying information. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). + safety_identifier: nil, # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier @@ -447,9 +473,11 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A stable identifier for your end-users. Used to boost cache hit rates by better - # bucketing similar requests and to help OpenAI detect and prevent abuse. - # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). + # This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use + # `prompt_cache_key` instead to maintain caching optimizations. A stable + # identifier for your end-users. Used to boost cache hit rates by better bucketing + # similar requests and to help OpenAI detect and prevent abuse. + # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` # for streaming and non-streaming use cases, respectively. diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 7b236cff..f40fd577 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -3,16 +3,42 @@ module OpenAI class ChatCompletionStoreMessage = Chat::ChatCompletionStoreMessage module Chat - type chat_completion_store_message = { id: String } + type chat_completion_store_message = + { + id: String, + content_parts: ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part]? + } class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage def id: -> String def id=: (String _) -> String - def initialize: (id: String) -> void + def content_parts: -> ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part]? - def to_hash: -> { id: String } + def content_parts=: ( + ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part]? _ + ) -> ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part]? + + def initialize: ( + id: String, + ?content_parts: ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part]? + ) -> void + + def to_hash: -> { + id: String, + content_parts: ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part]? + } + + type content_part = + OpenAI::Chat::ChatCompletionContentPartText + | OpenAI::Chat::ChatCompletionContentPartImage + + module ContentPart + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionStoreMessage::content_part] + end end end end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 00dd9ff5..298d3d31 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -19,8 +19,10 @@ module OpenAI parallel_tool_calls: bool, prediction: OpenAI::Chat::ChatCompletionPredictionContent?, presence_penalty: Float?, + prompt_cache_key: String, reasoning_effort: OpenAI::Models::reasoning_effort?, response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + safety_identifier: String, seed: Integer?, service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, @@ -82,6 +84,10 @@ module OpenAI attr_accessor presence_penalty: Float? + attr_reader prompt_cache_key: String? + + def prompt_cache_key=: (String) -> String + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? attr_reader response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format? @@ -90,6 +96,10 @@ module OpenAI OpenAI::Models::Chat::CompletionCreateParams::response_format ) -> OpenAI::Models::Chat::CompletionCreateParams::response_format + attr_reader safety_identifier: String? + + def safety_identifier=: (String) -> String + attr_accessor seed: Integer? attr_accessor service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier? @@ -145,8 +155,10 @@ module OpenAI ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, ?presence_penalty: Float?, + ?prompt_cache_key: String, ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?safety_identifier: String, ?seed: Integer?, ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, @@ -179,8 +191,10 @@ module OpenAI parallel_tool_calls: bool, prediction: OpenAI::Chat::ChatCompletionPredictionContent?, presence_penalty: Float?, + prompt_cache_key: String, reasoning_effort: OpenAI::Models::reasoning_effort?, response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + safety_identifier: String, seed: Integer?, service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index bc63e515..5e6ab27f 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -22,7 +22,9 @@ module OpenAI max_tool_calls: Integer?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, + prompt_cache_key: String, reasoning: OpenAI::Reasoning?, + safety_identifier: String, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Responses::ResponseTextConfig, @@ -71,8 +73,16 @@ module OpenAI attr_accessor prompt: OpenAI::Responses::ResponsePrompt? + attr_reader prompt_cache_key: String? + + def prompt_cache_key=: (String) -> String + attr_accessor reasoning: OpenAI::Reasoning? + attr_reader safety_identifier: String? + + def safety_identifier=: (String) -> String + attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier? attr_reader status: OpenAI::Models::Responses::response_status? @@ -120,7 +130,9 @@ module OpenAI ?max_tool_calls: Integer?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, + ?prompt_cache_key: String, ?reasoning: OpenAI::Reasoning?, + ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, ?text: OpenAI::Responses::ResponseTextConfig, @@ -151,7 +163,9 @@ module OpenAI max_tool_calls: Integer?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, + prompt_cache_key: String, reasoning: OpenAI::Reasoning?, + safety_identifier: String, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Responses::ResponseTextConfig, diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index da0ff6a2..bde15b88 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -14,7 +14,9 @@ module OpenAI parallel_tool_calls: bool?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, + prompt_cache_key: String, reasoning: OpenAI::Reasoning?, + safety_identifier: String, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, temperature: Float?, @@ -62,8 +64,16 @@ module OpenAI attr_accessor prompt: OpenAI::Responses::ResponsePrompt? + attr_reader prompt_cache_key: String? + + def prompt_cache_key=: (String) -> String + attr_accessor reasoning: OpenAI::Reasoning? + attr_reader safety_identifier: String? + + def safety_identifier=: (String) -> String + attr_accessor service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier? attr_accessor store: bool? @@ -110,7 +120,9 @@ module OpenAI ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, + ?prompt_cache_key: String, ?reasoning: OpenAI::Reasoning?, + ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, @@ -136,7 +148,9 @@ module OpenAI parallel_tool_calls: bool?, previous_response_id: String?, prompt: OpenAI::Responses::ResponsePrompt?, + prompt_cache_key: String, reasoning: OpenAI::Reasoning?, + safety_identifier: String, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, temperature: Float?, diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 5e457166..79a01c27 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -21,8 +21,10 @@ module OpenAI ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, ?presence_penalty: Float?, + ?prompt_cache_key: String, ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?safety_identifier: String, ?seed: Integer?, ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, @@ -55,8 +57,10 @@ module OpenAI ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, ?presence_penalty: Float?, + ?prompt_cache_key: String, ?reasoning_effort: OpenAI::Models::reasoning_effort?, ?response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + ?safety_identifier: String, ?seed: Integer?, ?service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, ?stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 8c5129cc..e0d85fee 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -15,7 +15,9 @@ module OpenAI ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, + ?prompt_cache_key: String, ?reasoning: OpenAI::Reasoning?, + ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, @@ -41,7 +43,9 @@ module OpenAI ?parallel_tool_calls: bool?, ?previous_response_id: String?, ?prompt: OpenAI::Responses::ResponsePrompt?, + ?prompt_cache_key: String, ?reasoning: OpenAI::Reasoning?, + ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, ?temperature: Float?, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index c6f9f101..28d1d7e4 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -31,7 +31,9 @@ def test_create max_tool_calls: Integer | nil, previous_response_id: String | nil, prompt: OpenAI::Responses::ResponsePrompt | nil, + prompt_cache_key: String | nil, reasoning: OpenAI::Reasoning | nil, + safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, text: OpenAI::Responses::ResponseTextConfig | nil, @@ -71,7 +73,9 @@ def test_retrieve max_tool_calls: Integer | nil, previous_response_id: String | nil, prompt: OpenAI::Responses::ResponsePrompt | nil, + prompt_cache_key: String | nil, reasoning: OpenAI::Reasoning | nil, + safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, text: OpenAI::Responses::ResponseTextConfig | nil, @@ -119,7 +123,9 @@ def test_cancel max_tool_calls: Integer | nil, previous_response_id: String | nil, prompt: OpenAI::Responses::ResponsePrompt | nil, + prompt_cache_key: String | nil, reasoning: OpenAI::Reasoning | nil, + safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, text: OpenAI::Responses::ResponseTextConfig | nil, From 4a7a81d6b8b53605f6256d92a55fd884ed517823 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 12:49:33 +0000 Subject: [PATCH 268/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8f3e0a49..b4e9013b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.15.0" + ".": "0.16.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index ba6af0f0..6eadce9d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.15.0) + openai (0.16.0) connection_pool GEM diff --git a/README.md b/README.md index 7df39ccb..7b1f57dd 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.15.0" +gem "openai", "~> 0.16.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 3bc41d60..a619a057 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.15.0" + VERSION = "0.16.0" end From 0560982b7966ee080b504205e32aa28f008bc864 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 31 Jul 2025 14:22:33 +0000 Subject: [PATCH 269/295] chore(internal): increase visibility of internal helper method --- lib/openai/internal/transport/base_client.rb | 2 +- rbi/openai/internal/transport/base_client.rbi | 2 +- sig/openai/internal/transport/base_client.rbs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 6b8c2d3a..9ff4effb 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -365,7 +365,7 @@ def initialize( # # @raise [OpenAI::Errors::APIError] # @return [Array(Integer, Net::HTTPResponse, Enumerable)] - private def send_request(request, redirect_count:, retry_count:, send_retry_header:) + def send_request(request, redirect_count:, retry_count:, send_retry_header:) url, headers, max_retries, timeout = request.fetch_values(:url, :headers, :max_retries, :timeout) input = {**request.except(:timeout), deadline: OpenAI::Internal::Util.monotonic_secs + timeout} diff --git a/rbi/openai/internal/transport/base_client.rbi b/rbi/openai/internal/transport/base_client.rbi index d41cf4f8..095d4476 100644 --- a/rbi/openai/internal/transport/base_client.rbi +++ b/rbi/openai/internal/transport/base_client.rbi @@ -221,7 +221,7 @@ module OpenAI send_retry_header: T::Boolean ).returns([Integer, Net::HTTPResponse, T::Enumerable[String]]) end - private def send_request( + def send_request( request, redirect_count:, retry_count:, diff --git a/sig/openai/internal/transport/base_client.rbs b/sig/openai/internal/transport/base_client.rbs index 109af718..db2e8ff4 100644 --- a/sig/openai/internal/transport/base_client.rbs +++ b/sig/openai/internal/transport/base_client.rbs @@ -99,7 +99,7 @@ module OpenAI retry_count: Integer ) -> Float - private def send_request: ( + def send_request: ( OpenAI::Internal::Transport::BaseClient::request_input request, redirect_count: Integer, retry_count: Integer, From 47ea4a65b3ebdf7b3c0caff668708da0c2fa49ac Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 5 Aug 2025 16:51:19 +0000 Subject: [PATCH 270/295] feat(api): manual updates --- .stats.yml | 6 +- lib/openai.rb | 4 +- .../responses/response_reasoning_item.rb | 41 +++++++-- .../response_reasoning_summary_delta_event.rb | 65 -------------- .../response_reasoning_summary_done_event.rb | 60 ------------- .../response_reasoning_text_delta_event.rb | 63 ++++++++++++++ .../response_reasoning_text_done_event.rb | 63 ++++++++++++++ .../models/responses/response_stream_event.rb | 20 ++--- .../models/vector_store_search_params.rb | 7 +- lib/openai/resources/responses.rb | 4 +- .../responses/response_reasoning_item.rbi | 67 ++++++++++++++- ...response_reasoning_summary_delta_event.rbi | 85 ------------------- .../response_reasoning_text_delta_event.rbi | 83 ++++++++++++++++++ ...=> response_reasoning_text_done_event.rbi} | 40 ++++----- .../responses/response_stream_event.rbi | 6 +- .../models/vector_store_search_params.rbi | 13 ++- .../responses/response_reasoning_item.rbs | 21 +++++ ...> response_reasoning_text_delta_event.rbs} | 30 +++---- ...=> response_reasoning_text_done_event.rbs} | 22 ++--- .../responses/response_stream_event.rbs | 4 +- .../models/vector_store_search_params.rbs | 3 +- 21 files changed, 414 insertions(+), 293 deletions(-) delete mode 100644 lib/openai/models/responses/response_reasoning_summary_delta_event.rb delete mode 100644 lib/openai/models/responses/response_reasoning_summary_done_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_text_delta_event.rb create mode 100644 lib/openai/models/responses/response_reasoning_text_done_event.rb delete mode 100644 rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi create mode 100644 rbi/openai/models/responses/response_reasoning_text_delta_event.rbi rename rbi/openai/models/responses/{response_reasoning_summary_done_event.rbi => response_reasoning_text_done_event.rbi} (54%) rename sig/openai/models/responses/{response_reasoning_summary_delta_event.rbs => response_reasoning_text_delta_event.rbs} (50%) rename sig/openai/models/responses/{response_reasoning_summary_done_event.rbs => response_reasoning_text_done_event.rbs} (58%) diff --git a/.stats.yml b/.stats.yml index e567ce69..be4e281b 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-721e6ccaa72205ee14c71f8163129920464fb814b95d3df9567a9476bbd9b7fb.yml -openapi_spec_hash: 2115413a21df8b5bf9e4552a74df4312 -config_hash: 9606bb315a193bfd8da0459040143242 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d6a16b25b969c3e5382e7d413de15bf83d5f7534d5c3ecce64d3a7e847418f9e.yml +openapi_spec_hash: 0c0bcf4aee9ca2a948dd14b890dfe728 +config_hash: aeff9289bd7f8c8482e4d738c3c2fde1 diff --git a/lib/openai.rb b/lib/openai.rb index 11168cfb..f8016213 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -421,12 +421,12 @@ require_relative "openai/models/responses/response_prompt" require_relative "openai/models/responses/response_queued_event" require_relative "openai/models/responses/response_reasoning_item" -require_relative "openai/models/responses/response_reasoning_summary_delta_event" -require_relative "openai/models/responses/response_reasoning_summary_done_event" require_relative "openai/models/responses/response_reasoning_summary_part_added_event" require_relative "openai/models/responses/response_reasoning_summary_part_done_event" require_relative "openai/models/responses/response_reasoning_summary_text_delta_event" require_relative "openai/models/responses/response_reasoning_summary_text_done_event" +require_relative "openai/models/responses/response_reasoning_text_delta_event" +require_relative "openai/models/responses/response_reasoning_text_done_event" require_relative "openai/models/responses/response_refusal_delta_event" require_relative "openai/models/responses/response_refusal_done_event" require_relative "openai/models/responses/response_retrieve_params" diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index fa878448..3c4068fb 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -11,7 +11,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel required :id, String # @!attribute summary - # Reasoning text contents. + # Reasoning summary content. # # @return [Array] required :summary, @@ -23,6 +23,13 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :reasoning] required :type, const: :reasoning + # @!attribute content + # Reasoning text content. + # + # @return [Array, nil] + optional :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Content] } + # @!attribute encrypted_content # The encrypted content of the reasoning item - populated when a response is # generated with `reasoning.encrypted_content` in the `include` parameter. @@ -37,7 +44,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseReasoningItem::Status } - # @!method initialize(id:, summary:, encrypted_content: nil, status: nil, type: :reasoning) + # @!method initialize(id:, summary:, content: nil, encrypted_content: nil, status: nil, type: :reasoning) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseReasoningItem} for more details. # @@ -48,7 +55,9 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique identifier of the reasoning content. # - # @param summary [Array] Reasoning text contents. + # @param summary [Array] Reasoning summary content. + # + # @param content [Array] Reasoning text content. # # @param encrypted_content [String, nil] The encrypted content of the reasoning item - populated when a response is # @@ -58,7 +67,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel class Summary < OpenAI::Internal::Type::BaseModel # @!attribute text - # A short summary of the reasoning used by the model when generating the response. + # A summary of the reasoning output from the model so far. # # @return [String] required :text, String @@ -73,11 +82,33 @@ class Summary < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseReasoningItem::Summary} for more details. # - # @param text [String] A short summary of the reasoning used by the model when generating + # @param text [String] A summary of the reasoning output from the model so far. # # @param type [Symbol, :summary_text] The type of the object. Always `summary_text`. end + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute text + # Reasoning text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the object. Always `reasoning_text`. + # + # @return [Symbol, :reasoning_text] + required :type, const: :reasoning_text + + # @!method initialize(text:, type: :reasoning_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningItem::Content} for more details. + # + # @param text [String] Reasoning text output from the model. + # + # @param type [Symbol, :reasoning_text] The type of the object. Always `reasoning_text`. + end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # diff --git a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb deleted file mode 100644 index f3299e9f..00000000 --- a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb +++ /dev/null @@ -1,65 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Responses - class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel - # @!attribute delta - # The partial update to the reasoning summary content. - # - # @return [Object] - required :delta, OpenAI::Internal::Type::Unknown - - # @!attribute item_id - # The unique identifier of the item for which the reasoning summary is being - # updated. - # - # @return [String] - required :item_id, String - - # @!attribute output_index - # The index of the output item in the response's output array. - # - # @return [Integer] - required :output_index, Integer - - # @!attribute sequence_number - # The sequence number of this event. - # - # @return [Integer] - required :sequence_number, Integer - - # @!attribute summary_index - # The index of the summary part within the output item. - # - # @return [Integer] - required :summary_index, Integer - - # @!attribute type - # The type of the event. Always 'response.reasoning_summary.delta'. - # - # @return [Symbol, :"response.reasoning_summary.delta"] - required :type, const: :"response.reasoning_summary.delta" - - # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary.delta") - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent} for more - # details. - # - # Emitted when there is a delta (partial update) to the reasoning summary content. - # - # @param delta [Object] The partial update to the reasoning summary content. - # - # @param item_id [String] The unique identifier of the item for which the reasoning summary is being updat - # - # @param output_index [Integer] The index of the output item in the response's output array. - # - # @param sequence_number [Integer] The sequence number of this event. - # - # @param summary_index [Integer] The index of the summary part within the output item. - # - # @param type [Symbol, :"response.reasoning_summary.delta"] The type of the event. Always 'response.reasoning_summary.delta'. - end - end - end -end diff --git a/lib/openai/models/responses/response_reasoning_summary_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_done_event.rb deleted file mode 100644 index 42716381..00000000 --- a/lib/openai/models/responses/response_reasoning_summary_done_event.rb +++ /dev/null @@ -1,60 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Responses - class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel - # @!attribute item_id - # The unique identifier of the item for which the reasoning summary is finalized. - # - # @return [String] - required :item_id, String - - # @!attribute output_index - # The index of the output item in the response's output array. - # - # @return [Integer] - required :output_index, Integer - - # @!attribute sequence_number - # The sequence number of this event. - # - # @return [Integer] - required :sequence_number, Integer - - # @!attribute summary_index - # The index of the summary part within the output item. - # - # @return [Integer] - required :summary_index, Integer - - # @!attribute text - # The finalized reasoning summary text. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the event. Always 'response.reasoning_summary.done'. - # - # @return [Symbol, :"response.reasoning_summary.done"] - required :type, const: :"response.reasoning_summary.done" - - # @!method initialize(item_id:, output_index:, sequence_number:, summary_index:, text:, type: :"response.reasoning_summary.done") - # Emitted when the reasoning summary content is finalized for an item. - # - # @param item_id [String] The unique identifier of the item for which the reasoning summary is finalized. - # - # @param output_index [Integer] The index of the output item in the response's output array. - # - # @param sequence_number [Integer] The sequence number of this event. - # - # @param summary_index [Integer] The index of the summary part within the output item. - # - # @param text [String] The finalized reasoning summary text. - # - # @param type [Symbol, :"response.reasoning_summary.done"] The type of the event. Always 'response.reasoning_summary.done'. - end - end - end -end diff --git a/lib/openai/models/responses/response_reasoning_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_text_delta_event.rb new file mode 100644 index 00000000..fc3380bb --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_text_delta_event.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningTextDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute content_index + # The index of the reasoning content part this delta is associated with. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute delta + # The text delta that was added to the reasoning content. + # + # @return [String] + required :delta, String + + # @!attribute item_id + # The ID of the item this reasoning text delta is associated with. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item this reasoning text delta is associated with. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always `response.reasoning_text.delta`. + # + # @return [Symbol, :"response.reasoning_text.delta"] + required :type, const: :"response.reasoning_text.delta" + + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.reasoning_text.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningTextDeltaEvent} for more details. + # + # Emitted when a delta is added to a reasoning text. + # + # @param content_index [Integer] The index of the reasoning content part this delta is associated with. + # + # @param delta [String] The text delta that was added to the reasoning content. + # + # @param item_id [String] The ID of the item this reasoning text delta is associated with. + # + # @param output_index [Integer] The index of the output item this reasoning text delta is associated with. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.reasoning_text.delta"] The type of the event. Always `response.reasoning_text.delta`. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_text_done_event.rb b/lib/openai/models/responses/response_reasoning_text_done_event.rb new file mode 100644 index 00000000..e07630bc --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_text_done_event.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningTextDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute content_index + # The index of the reasoning content part. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute item_id + # The ID of the item this reasoning text is associated with. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item this reasoning text is associated with. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute text + # The full text of the completed reasoning content. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always `response.reasoning_text.done`. + # + # @return [Symbol, :"response.reasoning_text.done"] + required :type, const: :"response.reasoning_text.done" + + # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.reasoning_text.done") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningTextDoneEvent} for more details. + # + # Emitted when a reasoning text is completed. + # + # @param content_index [Integer] The index of the reasoning content part. + # + # @param item_id [String] The ID of the item this reasoning text is associated with. + # + # @param output_index [Integer] The index of the output item this reasoning text is associated with. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param text [String] The full text of the completed reasoning content. + # + # @param type [Symbol, :"response.reasoning_text.done"] The type of the event. Always `response.reasoning_text.done`. + end + end + end +end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 21dec3eb..70b11b19 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -110,6 +110,12 @@ module ResponseStreamEvent variant :"response.reasoning_summary_text.done", -> { OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent } + # Emitted when a delta is added to a reasoning text. + variant :"response.reasoning_text.delta", -> { OpenAI::Responses::ResponseReasoningTextDeltaEvent } + + # Emitted when a reasoning text is completed. + variant :"response.reasoning_text.done", -> { OpenAI::Responses::ResponseReasoningTextDoneEvent } + # Emitted when there is a partial refusal text. variant :"response.refusal.delta", -> { OpenAI::Responses::ResponseRefusalDeltaEvent } @@ -191,20 +197,8 @@ module ResponseStreamEvent # Emitted when a response is queued and waiting to be processed. variant :"response.queued", -> { OpenAI::Responses::ResponseQueuedEvent } - # Emitted when there is a delta (partial update) to the reasoning summary content. - variant :"response.reasoning_summary.delta", - -> { - OpenAI::Responses::ResponseReasoningSummaryDeltaEvent - } - - # Emitted when the reasoning summary content is finalized for an item. - variant :"response.reasoning_summary.done", - -> { - OpenAI::Responses::ResponseReasoningSummaryDoneEvent - } - # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseReasoningTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent)] end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index c1ae6419..fdf4e91f 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -85,6 +85,7 @@ module Filters class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker + # Enable re-ranking; set to `none` to disable, which can help reduce latency. # # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::VectorStoreSearchParams::RankingOptions::Ranker } @@ -97,13 +98,17 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # - # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] + # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] Enable re-ranking; set to `none` to disable, which can help reduce latency. + # # @param score_threshold [Float] + # Enable re-ranking; set to `none` to disable, which can help reduce latency. + # # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum + NONE = :none AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 2435e51c..12db704d 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -162,7 +162,7 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params = {}) @@ -235,7 +235,7 @@ def retrieve(response_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve_streaming(response_id, params = {}) diff --git a/rbi/openai/models/responses/response_reasoning_item.rbi b/rbi/openai/models/responses/response_reasoning_item.rbi index 0317e535..328f2828 100644 --- a/rbi/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/openai/models/responses/response_reasoning_item.rbi @@ -16,7 +16,7 @@ module OpenAI sig { returns(String) } attr_accessor :id - # Reasoning text contents. + # Reasoning summary content. sig do returns(T::Array[OpenAI::Responses::ResponseReasoningItem::Summary]) end @@ -26,6 +26,26 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # Reasoning text content. + sig do + returns( + T.nilable( + T::Array[OpenAI::Responses::ResponseReasoningItem::Content] + ) + ) + end + attr_reader :content + + sig do + params( + content: + T::Array[ + OpenAI::Responses::ResponseReasoningItem::Content::OrHash + ] + ).void + end + attr_writer :content + # The encrypted content of the reasoning item - populated when a response is # generated with `reasoning.encrypted_content` in the `include` parameter. sig { returns(T.nilable(String)) } @@ -60,6 +80,10 @@ module OpenAI T::Array[ OpenAI::Responses::ResponseReasoningItem::Summary::OrHash ], + content: + T::Array[ + OpenAI::Responses::ResponseReasoningItem::Content::OrHash + ], encrypted_content: T.nilable(String), status: OpenAI::Responses::ResponseReasoningItem::Status::OrSymbol, type: Symbol @@ -68,8 +92,10 @@ module OpenAI def self.new( # The unique identifier of the reasoning content. id:, - # Reasoning text contents. + # Reasoning summary content. summary:, + # Reasoning text content. + content: nil, # The encrypted content of the reasoning item - populated when a response is # generated with `reasoning.encrypted_content` in the `include` parameter. encrypted_content: nil, @@ -88,6 +114,8 @@ module OpenAI summary: T::Array[OpenAI::Responses::ResponseReasoningItem::Summary], type: Symbol, + content: + T::Array[OpenAI::Responses::ResponseReasoningItem::Content], encrypted_content: T.nilable(String), status: OpenAI::Responses::ResponseReasoningItem::Status::OrSymbol } @@ -105,7 +133,7 @@ module OpenAI ) end - # A short summary of the reasoning used by the model when generating the response. + # A summary of the reasoning output from the model so far. sig { returns(String) } attr_accessor :text @@ -115,7 +143,7 @@ module OpenAI sig { params(text: String, type: Symbol).returns(T.attached_class) } def self.new( - # A short summary of the reasoning used by the model when generating the response. + # A summary of the reasoning output from the model so far. text:, # The type of the object. Always `summary_text`. type: :summary_text @@ -127,6 +155,37 @@ module OpenAI end end + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningItem::Content, + OpenAI::Internal::AnyHash + ) + end + + # Reasoning text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the object. Always `reasoning_text`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new( + # Reasoning text output from the model. + text:, + # The type of the object. Always `reasoning_text`. + type: :reasoning_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. module Status diff --git a/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi deleted file mode 100644 index c92fd014..00000000 --- a/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi +++ /dev/null @@ -1,85 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Responses - class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, - OpenAI::Internal::AnyHash - ) - end - - # The partial update to the reasoning summary content. - sig { returns(T.anything) } - attr_accessor :delta - - # The unique identifier of the item for which the reasoning summary is being - # updated. - sig { returns(String) } - attr_accessor :item_id - - # The index of the output item in the response's output array. - sig { returns(Integer) } - attr_accessor :output_index - - # The sequence number of this event. - sig { returns(Integer) } - attr_accessor :sequence_number - - # The index of the summary part within the output item. - sig { returns(Integer) } - attr_accessor :summary_index - - # The type of the event. Always 'response.reasoning_summary.delta'. - sig { returns(Symbol) } - attr_accessor :type - - # Emitted when there is a delta (partial update) to the reasoning summary content. - sig do - params( - delta: T.anything, - item_id: String, - output_index: Integer, - sequence_number: Integer, - summary_index: Integer, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The partial update to the reasoning summary content. - delta:, - # The unique identifier of the item for which the reasoning summary is being - # updated. - item_id:, - # The index of the output item in the response's output array. - output_index:, - # The sequence number of this event. - sequence_number:, - # The index of the summary part within the output item. - summary_index:, - # The type of the event. Always 'response.reasoning_summary.delta'. - type: :"response.reasoning_summary.delta" - ) - end - - sig do - override.returns( - { - delta: T.anything, - item_id: String, - output_index: Integer, - sequence_number: Integer, - summary_index: Integer, - type: Symbol - } - ) - end - def to_hash - end - end - end - end -end diff --git a/rbi/openai/models/responses/response_reasoning_text_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_text_delta_event.rbi new file mode 100644 index 00000000..54336041 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_text_delta_event.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningTextDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningTextDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The index of the reasoning content part this delta is associated with. + sig { returns(Integer) } + attr_accessor :content_index + + # The text delta that was added to the reasoning content. + sig { returns(String) } + attr_accessor :delta + + # The ID of the item this reasoning text delta is associated with. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item this reasoning text delta is associated with. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always `response.reasoning_text.delta`. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a delta is added to a reasoning text. + sig do + params( + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The index of the reasoning content part this delta is associated with. + content_index:, + # The text delta that was added to the reasoning content. + delta:, + # The ID of the item this reasoning text delta is associated with. + item_id:, + # The index of the output item this reasoning text delta is associated with. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always `response.reasoning_text.delta`. + type: :"response.reasoning_text.delta" + ) + end + + sig do + override.returns( + { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi b/rbi/openai/models/responses/response_reasoning_text_done_event.rbi similarity index 54% rename from rbi/openai/models/responses/response_reasoning_summary_done_event.rbi rename to rbi/openai/models/responses/response_reasoning_text_done_event.rbi index eead2395..2561422b 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_text_done_event.rbi @@ -3,20 +3,24 @@ module OpenAI module Models module Responses - class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + class ResponseReasoningTextDoneEvent < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do T.any( - OpenAI::Responses::ResponseReasoningSummaryDoneEvent, + OpenAI::Responses::ResponseReasoningTextDoneEvent, OpenAI::Internal::AnyHash ) end - # The unique identifier of the item for which the reasoning summary is finalized. + # The index of the reasoning content part. + sig { returns(Integer) } + attr_accessor :content_index + + # The ID of the item this reasoning text is associated with. sig { returns(String) } attr_accessor :item_id - # The index of the output item in the response's output array. + # The index of the output item this reasoning text is associated with. sig { returns(Integer) } attr_accessor :output_index @@ -24,52 +28,48 @@ module OpenAI sig { returns(Integer) } attr_accessor :sequence_number - # The index of the summary part within the output item. - sig { returns(Integer) } - attr_accessor :summary_index - - # The finalized reasoning summary text. + # The full text of the completed reasoning content. sig { returns(String) } attr_accessor :text - # The type of the event. Always 'response.reasoning_summary.done'. + # The type of the event. Always `response.reasoning_text.done`. sig { returns(Symbol) } attr_accessor :type - # Emitted when the reasoning summary content is finalized for an item. + # Emitted when a reasoning text is completed. sig do params( + content_index: Integer, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, text: String, type: Symbol ).returns(T.attached_class) end def self.new( - # The unique identifier of the item for which the reasoning summary is finalized. + # The index of the reasoning content part. + content_index:, + # The ID of the item this reasoning text is associated with. item_id:, - # The index of the output item in the response's output array. + # The index of the output item this reasoning text is associated with. output_index:, # The sequence number of this event. sequence_number:, - # The index of the summary part within the output item. - summary_index:, - # The finalized reasoning summary text. + # The full text of the completed reasoning content. text:, - # The type of the event. Always 'response.reasoning_summary.done'. - type: :"response.reasoning_summary.done" + # The type of the event. Always `response.reasoning_text.done`. + type: :"response.reasoning_text.done" ) end sig do override.returns( { + content_index: Integer, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, text: String, type: Symbol } diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index 8b5e96b2..e6269fa0 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -38,6 +38,8 @@ module OpenAI OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, + OpenAI::Responses::ResponseReasoningTextDeltaEvent, + OpenAI::Responses::ResponseReasoningTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextDeltaEvent, @@ -58,9 +60,7 @@ module OpenAI OpenAI::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, - OpenAI::Responses::ResponseQueuedEvent, - OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, - OpenAI::Responses::ResponseReasoningSummaryDoneEvent + OpenAI::Responses::ResponseQueuedEvent ) end diff --git a/rbi/openai/models/vector_store_search_params.rbi b/rbi/openai/models/vector_store_search_params.rbi index 3d78d7c9..74b8d1bd 100644 --- a/rbi/openai/models/vector_store_search_params.rbi +++ b/rbi/openai/models/vector_store_search_params.rbi @@ -157,6 +157,7 @@ module OpenAI ) end + # Enable re-ranking; set to `none` to disable, which can help reduce latency. sig do returns( T.nilable( @@ -188,7 +189,11 @@ module OpenAI score_threshold: Float ).returns(T.attached_class) end - def self.new(ranker: nil, score_threshold: nil) + def self.new( + # Enable re-ranking; set to `none` to disable, which can help reduce latency. + ranker: nil, + score_threshold: nil + ) end sig do @@ -203,6 +208,7 @@ module OpenAI def to_hash end + # Enable re-ranking; set to `none` to disable, which can help reduce latency. module Ranker extend OpenAI::Internal::Type::Enum @@ -215,6 +221,11 @@ module OpenAI end OrSymbol = T.type_alias { T.any(Symbol, String) } + NONE = + T.let( + :none, + OpenAI::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol + ) AUTO = T.let( :auto, diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 620ee9dc..e4e79c73 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -6,6 +6,7 @@ module OpenAI id: String, summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], type: :reasoning, + content: ::Array[OpenAI::Responses::ResponseReasoningItem::Content], encrypted_content: String?, status: OpenAI::Models::Responses::ResponseReasoningItem::status } @@ -17,6 +18,12 @@ module OpenAI attr_accessor type: :reasoning + attr_reader content: ::Array[OpenAI::Responses::ResponseReasoningItem::Content]? + + def content=: ( + ::Array[OpenAI::Responses::ResponseReasoningItem::Content] + ) -> ::Array[OpenAI::Responses::ResponseReasoningItem::Content] + attr_accessor encrypted_content: String? attr_reader status: OpenAI::Models::Responses::ResponseReasoningItem::status? @@ -28,6 +35,7 @@ module OpenAI def initialize: ( id: String, summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], + ?content: ::Array[OpenAI::Responses::ResponseReasoningItem::Content], ?encrypted_content: String?, ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, ?type: :reasoning @@ -37,6 +45,7 @@ module OpenAI id: String, summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], type: :reasoning, + content: ::Array[OpenAI::Responses::ResponseReasoningItem::Content], encrypted_content: String?, status: OpenAI::Models::Responses::ResponseReasoningItem::status } @@ -53,6 +62,18 @@ module OpenAI def to_hash: -> { text: String, type: :summary_text } end + type content = { text: String, type: :reasoning_text } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :reasoning_text + + def initialize: (text: String, ?type: :reasoning_text) -> void + + def to_hash: -> { text: String, type: :reasoning_text } + end + type status = :in_progress | :completed | :incomplete module Status diff --git a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs b/sig/openai/models/responses/response_reasoning_text_delta_event.rbs similarity index 50% rename from sig/openai/models/responses/response_reasoning_summary_delta_event.rbs rename to sig/openai/models/responses/response_reasoning_text_delta_event.rbs index 4e613606..a8d33a4f 100644 --- a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_text_delta_event.rbs @@ -1,18 +1,20 @@ module OpenAI module Models module Responses - type response_reasoning_summary_delta_event = + type response_reasoning_text_delta_event = { - delta: top, + content_index: Integer, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, - type: :"response.reasoning_summary.delta" + type: :"response.reasoning_text.delta" } - class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor delta: top + class ResponseReasoningTextDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor content_index: Integer + + attr_accessor delta: String attr_accessor item_id: String @@ -20,26 +22,24 @@ module OpenAI attr_accessor sequence_number: Integer - attr_accessor summary_index: Integer - - attr_accessor type: :"response.reasoning_summary.delta" + attr_accessor type: :"response.reasoning_text.delta" def initialize: ( - delta: top, + content_index: Integer, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, - ?type: :"response.reasoning_summary.delta" + ?type: :"response.reasoning_text.delta" ) -> void def to_hash: -> { - delta: top, + content_index: Integer, + delta: String, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, - type: :"response.reasoning_summary.delta" + type: :"response.reasoning_text.delta" } end end diff --git a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs b/sig/openai/models/responses/response_reasoning_text_done_event.rbs similarity index 58% rename from sig/openai/models/responses/response_reasoning_summary_done_event.rbs rename to sig/openai/models/responses/response_reasoning_text_done_event.rbs index cb56e84f..9e3712b0 100644 --- a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_text_done_event.rbs @@ -1,45 +1,45 @@ module OpenAI module Models module Responses - type response_reasoning_summary_done_event = + type response_reasoning_text_done_event = { + content_index: Integer, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, text: String, - type: :"response.reasoning_summary.done" + type: :"response.reasoning_text.done" } - class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + class ResponseReasoningTextDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor content_index: Integer + attr_accessor item_id: String attr_accessor output_index: Integer attr_accessor sequence_number: Integer - attr_accessor summary_index: Integer - attr_accessor text: String - attr_accessor type: :"response.reasoning_summary.done" + attr_accessor type: :"response.reasoning_text.done" def initialize: ( + content_index: Integer, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, text: String, - ?type: :"response.reasoning_summary.done" + ?type: :"response.reasoning_text.done" ) -> void def to_hash: -> { + content_index: Integer, item_id: String, output_index: Integer, sequence_number: Integer, - summary_index: Integer, text: String, - type: :"response.reasoning_summary.done" + type: :"response.reasoning_text.done" } end end diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 21677586..731c96f4 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -30,6 +30,8 @@ module OpenAI | OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent | OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent | OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent + | OpenAI::Responses::ResponseReasoningTextDeltaEvent + | OpenAI::Responses::ResponseReasoningTextDoneEvent | OpenAI::Responses::ResponseRefusalDeltaEvent | OpenAI::Responses::ResponseRefusalDoneEvent | OpenAI::Responses::ResponseTextDeltaEvent @@ -51,8 +53,6 @@ module OpenAI | OpenAI::Responses::ResponseMcpListToolsInProgressEvent | OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent | OpenAI::Responses::ResponseQueuedEvent - | OpenAI::Responses::ResponseReasoningSummaryDeltaEvent - | OpenAI::Responses::ResponseReasoningSummaryDoneEvent module ResponseStreamEvent extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 0ad7493a..aecf7fd7 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -99,11 +99,12 @@ module OpenAI score_threshold: Float } - type ranker = :auto | :"default-2024-11-15" + type ranker = :none | :auto | :"default-2024-11-15" module Ranker extend OpenAI::Internal::Type::Enum + NONE: :none AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" From bfaf6eca72174be0990bbbdc325a0673baf2616e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 7 Aug 2025 17:02:57 +0000 Subject: [PATCH 271/295] feat(api): adds GPT-5 and new API features: platform.openai.com/docs/guides/gpt-5 --- .stats.yml | 6 +- README.md | 32 +- lib/openai.rb | 17 + lib/openai/internal/type/enum.rb | 12 +- lib/openai/internal/type/union.rb | 30 +- lib/openai/models.rb | 6 + .../models/beta/assistant_create_params.rb | 9 +- .../models/beta/assistant_update_params.rb | 27 +- .../models/beta/threads/run_create_params.rb | 9 +- .../chat_completion_allowed_tool_choice.rb | 33 ++ .../chat/chat_completion_allowed_tools.rb | 64 ++++ ...chat_completion_assistant_message_param.rb | 8 +- .../chat/chat_completion_custom_tool.rb | 163 +++++++++ .../chat/chat_completion_function_tool.rb | 29 ++ .../models/chat/chat_completion_message.rb | 8 +- ...hat_completion_message_custom_tool_call.rb | 60 ++++ ...t_completion_message_function_tool_call.rb | 67 ++++ .../chat/chat_completion_message_tool_call.rb | 60 +--- .../chat/chat_completion_named_tool_choice.rb | 4 +- ...hat_completion_named_tool_choice_custom.rb | 42 +++ .../chat/chat_completion_stream_options.rb | 15 +- .../models/chat/chat_completion_tool.rb | 26 +- .../chat_completion_tool_choice_option.rb | 8 +- .../models/chat/completion_create_params.rb | 51 ++- lib/openai/models/chat_model.rb | 7 + lib/openai/models/custom_tool_input_format.rb | 76 ++++ ...create_eval_completions_run_data_source.rb | 6 +- .../models/evals/run_cancel_response.rb | 4 +- lib/openai/models/evals/run_create_params.rb | 4 +- .../models/evals/run_create_response.rb | 4 +- lib/openai/models/evals/run_list_response.rb | 4 +- .../models/evals/run_retrieve_response.rb | 4 +- lib/openai/models/reasoning.rb | 9 +- lib/openai/models/reasoning_effort.rb | 8 +- .../models/response_format_text_grammar.rb | 27 ++ .../models/response_format_text_python.rb | 20 ++ lib/openai/models/responses/custom_tool.rb | 48 +++ lib/openai/models/responses/response.rb | 58 ++- .../responses/response_create_params.rb | 82 ++++- .../responses/response_custom_tool_call.rb | 55 +++ ...onse_custom_tool_call_input_delta_event.rb | 52 +++ ...ponse_custom_tool_call_input_done_event.rb | 52 +++ .../response_custom_tool_call_output.rb | 47 +++ .../models/responses/response_input_item.rb | 8 +- .../models/responses/response_output_item.rb | 5 +- .../response_output_item_added_event.rb | 4 +- .../response_output_item_done_event.rb | 4 +- .../responses/response_retrieve_params.rb | 15 +- .../models/responses/response_stream_event.rb | 10 +- lib/openai/models/responses/tool.rb | 6 +- .../models/responses/tool_choice_allowed.rb | 73 ++++ .../models/responses/tool_choice_custom.rb | 28 ++ lib/openai/resources/beta/assistants.rb | 4 +- lib/openai/resources/beta/threads/runs.rb | 4 +- lib/openai/resources/chat/completions.rb | 20 +- lib/openai/resources/responses.rb | 36 +- rbi/openai/models.rbi | 6 + .../models/beta/assistant_create_params.rbi | 14 +- .../models/beta/assistant_update_params.rbi | 44 ++- .../models/beta/threads/run_create_params.rbi | 14 +- .../chat_completion_allowed_tool_choice.rbi | 60 ++++ .../chat/chat_completion_allowed_tools.rbi | 118 ++++++ ...hat_completion_assistant_message_param.rbi | 31 +- .../chat/chat_completion_custom_tool.rbi | 335 ++++++++++++++++++ .../chat/chat_completion_function_tool.rbi | 51 +++ .../models/chat/chat_completion_message.rbi | 21 +- ...at_completion_message_custom_tool_call.rbi | 105 ++++++ ..._completion_message_function_tool_call.rbi | 114 ++++++ .../chat_completion_message_tool_call.rbi | 97 +---- .../chat_completion_named_tool_choice.rbi | 4 +- ...at_completion_named_tool_choice_custom.rbi | 89 +++++ .../chat/chat_completion_stream_options.rbi | 32 +- .../models/chat/chat_completion_tool.rbi | 41 +-- .../chat_completion_tool_choice_option.rbi | 4 +- .../models/chat/completion_create_params.rbi | 142 ++++++-- rbi/openai/models/chat_model.rbi | 11 + .../models/custom_tool_input_format.rbi | 136 +++++++ ...reate_eval_completions_run_data_source.rbi | 12 +- .../models/evals/run_cancel_response.rbi | 2 + rbi/openai/models/evals/run_create_params.rbi | 4 + .../models/evals/run_create_response.rbi | 2 + rbi/openai/models/evals/run_list_response.rbi | 2 + .../models/evals/run_retrieve_response.rbi | 2 + rbi/openai/models/reasoning.rbi | 14 +- rbi/openai/models/reasoning_effort.rbi | 8 +- .../models/response_format_text_grammar.rbi | 35 ++ .../models/response_format_text_python.rbi | 30 ++ rbi/openai/models/responses/custom_tool.rbi | 96 +++++ rbi/openai/models/responses/response.rbi | 68 +++- .../responses/response_create_params.rbi | 164 ++++++++- .../responses/response_custom_tool_call.rbi | 78 ++++ ...nse_custom_tool_call_input_delta_event.rbi | 75 ++++ ...onse_custom_tool_call_input_done_event.rbi | 75 ++++ .../response_custom_tool_call_output.rbi | 65 ++++ .../models/responses/response_input_item.rbi | 2 + .../models/responses/response_output_item.rbi | 3 +- .../response_output_item_added_event.rbi | 3 +- .../response_output_item_done_event.rbi | 3 +- .../responses/response_retrieve_params.rbi | 21 ++ .../responses/response_stream_event.rbi | 4 +- rbi/openai/models/responses/tool.rbi | 1 + .../models/responses/tool_choice_allowed.rbi | 124 +++++++ .../models/responses/tool_choice_custom.rbi | 39 ++ rbi/openai/resources/beta/assistants.rbi | 14 +- rbi/openai/resources/beta/threads/runs.rbi | 18 +- rbi/openai/resources/chat/completions.rbi | 66 +++- rbi/openai/resources/responses.rbi | 62 +++- sig/openai/models.rbs | 6 + .../models/beta/assistant_update_params.rbs | 12 + .../chat_completion_allowed_tool_choice.rbs | 29 ++ .../chat/chat_completion_allowed_tools.rbs | 38 ++ ...hat_completion_assistant_message_param.rbs | 12 +- .../chat/chat_completion_custom_tool.rbs | 137 +++++++ .../chat/chat_completion_function_tool.rbs | 26 ++ .../models/chat/chat_completion_message.rbs | 12 +- ...at_completion_message_custom_tool_call.rbs | 46 +++ ..._completion_message_function_tool_call.rbs | 46 +++ .../chat_completion_message_tool_call.rbs | 41 +-- ...at_completion_named_tool_choice_custom.rbs | 39 ++ .../chat/chat_completion_stream_options.rbs | 14 +- .../models/chat/chat_completion_tool.rbs | 21 +- .../chat_completion_tool_choice_option.rbs | 2 + .../models/chat/completion_create_params.rbs | 29 +- sig/openai/models/chat_model.rbs | 16 +- .../models/custom_tool_input_format.rbs | 61 ++++ ...reate_eval_completions_run_data_source.rbs | 12 +- sig/openai/models/reasoning_effort.rbs | 3 +- .../models/response_format_text_grammar.rbs | 15 + .../models/response_format_text_python.rbs | 13 + sig/openai/models/responses/custom_tool.rbs | 43 +++ sig/openai/models/responses/response.rbs | 23 +- .../responses/response_create_params.rbs | 38 +- .../responses/response_custom_tool_call.rbs | 44 +++ ...nse_custom_tool_call_input_delta_event.rbs | 42 +++ ...onse_custom_tool_call_input_done_event.rbs | 42 +++ .../response_custom_tool_call_output.rbs | 39 ++ .../models/responses/response_input_item.rbs | 2 + .../models/responses/response_output_item.rbs | 1 + .../responses/response_retrieve_params.rbs | 7 + .../responses/response_stream_event.rbs | 2 + sig/openai/models/responses/tool.rbs | 1 + .../models/responses/tool_choice_allowed.rbs | 43 +++ .../models/responses/tool_choice_custom.rbs | 17 + sig/openai/resources/chat/completions.rbs | 6 +- sig/openai/resources/responses.rbs | 6 + test/openai/client_test.rb | 30 +- test/openai/resources/beta/assistants_test.rb | 2 +- .../openai/resources/chat/completions_test.rb | 2 +- test/openai/resources/responses_test.rb | 9 +- 149 files changed, 4577 insertions(+), 583 deletions(-) create mode 100644 lib/openai/models/chat/chat_completion_allowed_tool_choice.rb create mode 100644 lib/openai/models/chat/chat_completion_allowed_tools.rb create mode 100644 lib/openai/models/chat/chat_completion_custom_tool.rb create mode 100644 lib/openai/models/chat/chat_completion_function_tool.rb create mode 100644 lib/openai/models/chat/chat_completion_message_custom_tool_call.rb create mode 100644 lib/openai/models/chat/chat_completion_message_function_tool_call.rb create mode 100644 lib/openai/models/chat/chat_completion_named_tool_choice_custom.rb create mode 100644 lib/openai/models/custom_tool_input_format.rb create mode 100644 lib/openai/models/response_format_text_grammar.rb create mode 100644 lib/openai/models/response_format_text_python.rb create mode 100644 lib/openai/models/responses/custom_tool.rb create mode 100644 lib/openai/models/responses/response_custom_tool_call.rb create mode 100644 lib/openai/models/responses/response_custom_tool_call_input_delta_event.rb create mode 100644 lib/openai/models/responses/response_custom_tool_call_input_done_event.rb create mode 100644 lib/openai/models/responses/response_custom_tool_call_output.rb create mode 100644 lib/openai/models/responses/tool_choice_allowed.rb create mode 100644 lib/openai/models/responses/tool_choice_custom.rb create mode 100644 rbi/openai/models/chat/chat_completion_allowed_tool_choice.rbi create mode 100644 rbi/openai/models/chat/chat_completion_allowed_tools.rbi create mode 100644 rbi/openai/models/chat/chat_completion_custom_tool.rbi create mode 100644 rbi/openai/models/chat/chat_completion_function_tool.rbi create mode 100644 rbi/openai/models/chat/chat_completion_message_custom_tool_call.rbi create mode 100644 rbi/openai/models/chat/chat_completion_message_function_tool_call.rbi create mode 100644 rbi/openai/models/chat/chat_completion_named_tool_choice_custom.rbi create mode 100644 rbi/openai/models/custom_tool_input_format.rbi create mode 100644 rbi/openai/models/response_format_text_grammar.rbi create mode 100644 rbi/openai/models/response_format_text_python.rbi create mode 100644 rbi/openai/models/responses/custom_tool.rbi create mode 100644 rbi/openai/models/responses/response_custom_tool_call.rbi create mode 100644 rbi/openai/models/responses/response_custom_tool_call_input_delta_event.rbi create mode 100644 rbi/openai/models/responses/response_custom_tool_call_input_done_event.rbi create mode 100644 rbi/openai/models/responses/response_custom_tool_call_output.rbi create mode 100644 rbi/openai/models/responses/tool_choice_allowed.rbi create mode 100644 rbi/openai/models/responses/tool_choice_custom.rbi create mode 100644 sig/openai/models/chat/chat_completion_allowed_tool_choice.rbs create mode 100644 sig/openai/models/chat/chat_completion_allowed_tools.rbs create mode 100644 sig/openai/models/chat/chat_completion_custom_tool.rbs create mode 100644 sig/openai/models/chat/chat_completion_function_tool.rbs create mode 100644 sig/openai/models/chat/chat_completion_message_custom_tool_call.rbs create mode 100644 sig/openai/models/chat/chat_completion_message_function_tool_call.rbs create mode 100644 sig/openai/models/chat/chat_completion_named_tool_choice_custom.rbs create mode 100644 sig/openai/models/custom_tool_input_format.rbs create mode 100644 sig/openai/models/response_format_text_grammar.rbs create mode 100644 sig/openai/models/response_format_text_python.rbs create mode 100644 sig/openai/models/responses/custom_tool.rbs create mode 100644 sig/openai/models/responses/response_custom_tool_call.rbs create mode 100644 sig/openai/models/responses/response_custom_tool_call_input_delta_event.rbs create mode 100644 sig/openai/models/responses/response_custom_tool_call_input_done_event.rbs create mode 100644 sig/openai/models/responses/response_custom_tool_call_output.rbs create mode 100644 sig/openai/models/responses/tool_choice_allowed.rbs create mode 100644 sig/openai/models/responses/tool_choice_custom.rbs diff --git a/.stats.yml b/.stats.yml index be4e281b..c3c07bdb 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d6a16b25b969c3e5382e7d413de15bf83d5f7534d5c3ecce64d3a7e847418f9e.yml -openapi_spec_hash: 0c0bcf4aee9ca2a948dd14b890dfe728 -config_hash: aeff9289bd7f8c8482e4d738c3c2fde1 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f5c45f4ae5c2075cbc603d6910bba3da31c23714c209fbd3fd82a94f634a126b.yml +openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba +config_hash: 9a64321968e21ed72f5c0e02164ea00d diff --git a/README.md b/README.md index 7b1f57dd..95da0d39 100644 --- a/README.md +++ b/README.md @@ -30,10 +30,7 @@ openai = OpenAI::Client.new( api_key: ENV["OPENAI_API_KEY"] # This is the default and can be omitted ) -chat_completion = openai.chat.completions.create( - messages: [{role: "user", content: "Say this is a test"}], - model: :"gpt-4.1" -) +chat_completion = openai.chat.completions.create(messages: [{role: "user", content: "Say this is a test"}], model: :"gpt-5") puts(chat_completion) ``` @@ -45,7 +42,7 @@ We provide support for streaming responses using Server-Sent Events (SSE). ```ruby stream = openai.chat.completions.stream_raw( messages: [{role: "user", content: "Say this is a test"}], - model: :"gpt-4.1" + model: :"gpt-5" ) stream.each do |completion| @@ -154,7 +151,7 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( messages: [{role: "user", content: "How can I get the name of the current day in JavaScript?"}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {max_retries: 5} ) ``` @@ -172,7 +169,7 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( messages: [{role: "user", content: "How can I list all files in a directory using Python?"}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {timeout: 5} ) ``` @@ -207,7 +204,7 @@ Note: the `extra_` parameters of the same name overrides the documented paramete chat_completion = openai.chat.completions.create( messages: [{role: "user", content: "How can I get the name of the current day in JavaScript?"}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: { extra_query: {my_query_parameter: value}, extra_body: {my_body_parameter: value}, @@ -255,7 +252,7 @@ You can provide typesafe request parameters like so: ```ruby openai.chat.completions.create( messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(content: "Say this is a test")], - model: :"gpt-4.1" + model: :"gpt-5" ) ``` @@ -263,15 +260,12 @@ Or, equivalently: ```ruby # Hashes work, but are not typesafe: -openai.chat.completions.create( - messages: [{role: "user", content: "Say this is a test"}], - model: :"gpt-4.1" -) +openai.chat.completions.create(messages: [{role: "user", content: "Say this is a test"}], model: :"gpt-5") # You can also splat a full Params class: params = OpenAI::Chat::CompletionCreateParams.new( messages: [OpenAI::Chat::ChatCompletionUserMessageParam.new(content: "Say this is a test")], - model: :"gpt-4.1" + model: :"gpt-5" ) openai.chat.completions.create(**params) ``` @@ -281,11 +275,11 @@ openai.chat.completions.create(**params) Since this library does not depend on `sorbet-runtime`, it cannot provide [`T::Enum`](https://sorbet.org/docs/tenum) instances. Instead, we provide "tagged symbols" instead, which is always a primitive at runtime: ```ruby -# :low -puts(OpenAI::ReasoningEffort::LOW) +# :minimal +puts(OpenAI::ReasoningEffort::MINIMAL) # Revealed type: `T.all(OpenAI::ReasoningEffort, Symbol)` -T.reveal_type(OpenAI::ReasoningEffort::LOW) +T.reveal_type(OpenAI::ReasoningEffort::MINIMAL) ``` Enum parameters have a "relaxed" type, so you can either pass in enum constants or their literal value: @@ -293,13 +287,13 @@ Enum parameters have a "relaxed" type, so you can either pass in enum constants ```ruby # Using the enum constants preserves the tagged type information: openai.chat.completions.create( - reasoning_effort: OpenAI::ReasoningEffort::LOW, + reasoning_effort: OpenAI::ReasoningEffort::MINIMAL, # … ) # Literal values are also permissible: openai.chat.completions.create( - reasoning_effort: :low, + reasoning_effort: :minimal, # … ) ``` diff --git a/lib/openai.rb b/lib/openai.rb index f8016213..a83087c5 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -178,6 +178,8 @@ require_relative "openai/models/beta/thread_stream_event" require_relative "openai/models/beta/thread_update_params" require_relative "openai/models/chat/chat_completion" +require_relative "openai/models/chat/chat_completion_allowed_tool_choice" +require_relative "openai/models/chat/chat_completion_allowed_tools" require_relative "openai/models/chat/chat_completion_assistant_message_param" require_relative "openai/models/chat/chat_completion_audio" require_relative "openai/models/chat/chat_completion_audio_param" @@ -187,14 +189,19 @@ require_relative "openai/models/chat/chat_completion_content_part_input_audio" require_relative "openai/models/chat/chat_completion_content_part_refusal" require_relative "openai/models/chat/chat_completion_content_part_text" +require_relative "openai/models/chat/chat_completion_custom_tool" require_relative "openai/models/chat/chat_completion_deleted" require_relative "openai/models/chat/chat_completion_developer_message_param" require_relative "openai/models/chat/chat_completion_function_call_option" require_relative "openai/models/chat/chat_completion_function_message_param" +require_relative "openai/models/chat/chat_completion_function_tool" +require_relative "openai/models/chat/chat_completion_message_custom_tool_call" +require_relative "openai/models/chat/chat_completion_message_function_tool_call" require_relative "openai/models/chat/chat_completion_message_param" require_relative "openai/models/chat/chat_completion_message_tool_call" require_relative "openai/models/chat/chat_completion_modality" require_relative "openai/models/chat/chat_completion_named_tool_choice" +require_relative "openai/models/chat/chat_completion_named_tool_choice_custom" require_relative "openai/models/chat/chat_completion_prediction_content" require_relative "openai/models/chat/chat_completion_reasoning_effort" require_relative "openai/models/chat/chat_completion_role" @@ -235,6 +242,7 @@ require_relative "openai/models/containers/file_retrieve_response" require_relative "openai/models/containers/files/content_retrieve_params" require_relative "openai/models/create_embedding_response" +require_relative "openai/models/custom_tool_input_format" require_relative "openai/models/embedding" require_relative "openai/models/embedding_create_params" require_relative "openai/models/embedding_model" @@ -343,7 +351,10 @@ require_relative "openai/models/response_format_json_object" require_relative "openai/models/response_format_json_schema" require_relative "openai/models/response_format_text" +require_relative "openai/models/response_format_text_grammar" +require_relative "openai/models/response_format_text_python" require_relative "openai/models/responses/computer_tool" +require_relative "openai/models/responses/custom_tool" require_relative "openai/models/responses/easy_input_message" require_relative "openai/models/responses/file_search_tool" require_relative "openai/models/responses/function_tool" @@ -369,6 +380,10 @@ require_relative "openai/models/responses/response_content_part_done_event" require_relative "openai/models/responses/response_created_event" require_relative "openai/models/responses/response_create_params" +require_relative "openai/models/responses/response_custom_tool_call" +require_relative "openai/models/responses/response_custom_tool_call_input_delta_event" +require_relative "openai/models/responses/response_custom_tool_call_input_done_event" +require_relative "openai/models/responses/response_custom_tool_call_output" require_relative "openai/models/responses/response_delete_params" require_relative "openai/models/responses/response_error" require_relative "openai/models/responses/response_error_event" @@ -440,6 +455,8 @@ require_relative "openai/models/responses/response_web_search_call_in_progress_event" require_relative "openai/models/responses/response_web_search_call_searching_event" require_relative "openai/models/responses/tool" +require_relative "openai/models/responses/tool_choice_allowed" +require_relative "openai/models/responses/tool_choice_custom" require_relative "openai/models/responses/tool_choice_function" require_relative "openai/models/responses/tool_choice_mcp" require_relative "openai/models/responses/tool_choice_options" diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 3ea232d9..70476264 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -19,11 +19,11 @@ module Type # @example # # `chat_model` is a `OpenAI::ChatModel` # case chat_model - # when OpenAI::ChatModel::GPT_4_1 + # when OpenAI::ChatModel::GPT_5 # # ... - # when OpenAI::ChatModel::GPT_4_1_MINI + # when OpenAI::ChatModel::GPT_5_MINI # # ... - # when OpenAI::ChatModel::GPT_4_1_NANO + # when OpenAI::ChatModel::GPT_5_NANO # # ... # else # puts(chat_model) @@ -31,11 +31,11 @@ module Type # # @example # case chat_model - # in :"gpt-4.1" + # in :"gpt-5" # # ... - # in :"gpt-4.1-mini" + # in :"gpt-5-mini" # # ... - # in :"gpt-4.1-nano" + # in :"gpt-5-nano" # # ... # else # puts(chat_model) diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 5b00bc39..dda813a2 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -6,28 +6,24 @@ module Type # @api private # # @example - # # `chat_completion_content_part` is a `OpenAI::Chat::ChatCompletionContentPart` - # case chat_completion_content_part - # when OpenAI::Chat::ChatCompletionContentPartText - # puts(chat_completion_content_part.text) - # when OpenAI::Chat::ChatCompletionContentPartImage - # puts(chat_completion_content_part.image_url) - # when OpenAI::Chat::ChatCompletionContentPartInputAudio - # puts(chat_completion_content_part.input_audio) + # # `custom_tool_input_format` is a `OpenAI::CustomToolInputFormat` + # case custom_tool_input_format + # when OpenAI::CustomToolInputFormat::Text + # puts(custom_tool_input_format.type) + # when OpenAI::CustomToolInputFormat::Grammar + # puts(custom_tool_input_format.definition) # else - # puts(chat_completion_content_part) + # puts(custom_tool_input_format) # end # # @example - # case chat_completion_content_part - # in {type: :text, text: text} - # puts(text) - # in {type: :image_url, image_url: image_url} - # puts(image_url) - # in {type: :input_audio, input_audio: input_audio} - # puts(input_audio) + # case custom_tool_input_format + # in {type: :text} + # # ... + # in {type: :grammar, definition: definition, syntax: syntax} + # puts(definition) # else - # puts(chat_completion_content_part) + # puts(custom_tool_input_format) # end module Union include OpenAI::Internal::Type::Converter diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 26243742..23993a69 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -93,6 +93,8 @@ module OpenAI CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse + CustomToolInputFormat = OpenAI::Models::CustomToolInputFormat + Embedding = OpenAI::Models::Embedding EmbeddingCreateParams = OpenAI::Models::EmbeddingCreateParams @@ -209,6 +211,10 @@ module OpenAI ResponseFormatText = OpenAI::Models::ResponseFormatText + ResponseFormatTextGrammar = OpenAI::Models::ResponseFormatTextGrammar + + ResponseFormatTextPython = OpenAI::Models::ResponseFormatTextPython + Responses = OpenAI::Models::Responses ResponsesModel = OpenAI::Models::ResponsesModel diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 84e6d083..6aa28dd3 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -49,12 +49,11 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel optional :name, String, nil?: true # @!attribute reasoning_effort - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true @@ -133,7 +132,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 6f8f9b27..433bd650 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -49,12 +49,11 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel optional :name, String, nil?: true # @!attribute reasoning_effort - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true @@ -133,7 +132,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # @@ -157,6 +156,18 @@ module Model variant String + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_5 } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_5_MINI } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_5_NANO } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_5_2025_08_07 } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_5_MINI_2025_08_07 } + + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_5_NANO_2025_08_07 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1 } variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } @@ -238,6 +249,12 @@ module Model # @!group + GPT_5 = :"gpt-5" + GPT_5_MINI = :"gpt-5-mini" + GPT_5_NANO = :"gpt-5-nano" + GPT_5_2025_08_07 = :"gpt-5-2025-08-07" + GPT_5_MINI_2025_08_07 = :"gpt-5-mini-2025-08-07" + GPT_5_NANO_2025_08_07 = :"gpt-5-nano-2025-08-07" GPT_4_1 = :"gpt-4.1" GPT_4_1_MINI = :"gpt-4.1-mini" GPT_4_1_NANO = :"gpt-4.1-nano" diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 9efd3f28..3c910eb7 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -107,12 +107,11 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean # @!attribute reasoning_effort - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true @@ -216,7 +215,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # diff --git a/lib/openai/models/chat/chat_completion_allowed_tool_choice.rb b/lib/openai/models/chat/chat_completion_allowed_tool_choice.rb new file mode 100644 index 00000000..bc81ddfb --- /dev/null +++ b/lib/openai/models/chat/chat_completion_allowed_tool_choice.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionAllowedToolChoice < OpenAI::Internal::Type::BaseModel + # @!attribute allowed_tools + # Constrains the tools available to the model to a pre-defined set. + # + # @return [OpenAI::Models::Chat::ChatCompletionAllowedTools] + required :allowed_tools, -> { OpenAI::Chat::ChatCompletionAllowedTools } + + # @!attribute type + # Allowed tool configuration type. Always `allowed_tools`. + # + # @return [Symbol, :allowed_tools] + required :type, const: :allowed_tools + + # @!method initialize(allowed_tools:, type: :allowed_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAllowedToolChoice} for more details. + # + # Constrains the tools available to the model to a pre-defined set. + # + # @param allowed_tools [OpenAI::Models::Chat::ChatCompletionAllowedTools] Constrains the tools available to the model to a pre-defined set. + # + # @param type [Symbol, :allowed_tools] Allowed tool configuration type. Always `allowed_tools`. + end + end + + ChatCompletionAllowedToolChoice = Chat::ChatCompletionAllowedToolChoice + end +end diff --git a/lib/openai/models/chat/chat_completion_allowed_tools.rb b/lib/openai/models/chat/chat_completion_allowed_tools.rb new file mode 100644 index 00000000..59d2c94b --- /dev/null +++ b/lib/openai/models/chat/chat_completion_allowed_tools.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionAllowedTools < OpenAI::Internal::Type::BaseModel + # @!attribute mode + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + # + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionAllowedTools::Mode] + required :mode, enum: -> { OpenAI::Chat::ChatCompletionAllowedTools::Mode } + + # @!attribute tools + # A list of tool definitions that the model should be allowed to call. + # + # For the Chat Completions API, the list of tool definitions might look like: + # + # ```json + # [ + # { "type": "function", "function": { "name": "get_weather" } }, + # { "type": "function", "function": { "name": "get_time" } } + # ] + # ``` + # + # @return [ArrayObject}>] + required :tools, + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]] + + # @!method initialize(mode:, tools:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionAllowedTools} for more details. + # + # Constrains the tools available to the model to a pre-defined set. + # + # @param mode [Symbol, OpenAI::Models::Chat::ChatCompletionAllowedTools::Mode] Constrains the tools available to the model to a pre-defined set. + # + # @param tools [ArrayObject}>] A list of tool definitions that the model should be allowed to call. + + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + # + # @see OpenAI::Models::Chat::ChatCompletionAllowedTools#mode + module Mode + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + REQUIRED = :required + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 338351a8..ddff94b5 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -55,11 +55,9 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionMessageToolCall] } # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see @@ -77,7 +75,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # # @param refusal [String, nil] The refusal message by the assistant. # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the messages author, in this case `assistant`. diff --git a/lib/openai/models/chat/chat_completion_custom_tool.rb b/lib/openai/models/chat/chat_completion_custom_tool.rb new file mode 100644 index 00000000..fed4564e --- /dev/null +++ b/lib/openai/models/chat/chat_completion_custom_tool.rb @@ -0,0 +1,163 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionCustomTool < OpenAI::Internal::Type::BaseModel + # @!attribute custom + # Properties of the custom tool. + # + # @return [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom] + required :custom, -> { OpenAI::Chat::ChatCompletionCustomTool::Custom } + + # @!attribute type + # The type of the custom tool. Always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!method initialize(custom:, type: :custom) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionCustomTool} for more details. + # + # A custom tool that processes input using a specified format. + # + # @param custom [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom] Properties of the custom tool. + # + # @param type [Symbol, :custom] The type of the custom tool. Always `custom`. + + # @see OpenAI::Models::Chat::ChatCompletionCustomTool#custom + class Custom < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the custom tool, used to identify it in tool calls. + # + # @return [String] + required :name, String + + # @!attribute description + # Optional description of the custom tool, used to provide more context. + # + # @return [String, nil] + optional :description, String + + # @!attribute format_ + # The input format for the custom tool. Default is unconstrained text. + # + # @return [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Text, OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar, nil] + optional :format_, + union: -> { + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format + }, + api_name: :format + + # @!method initialize(name:, description: nil, format_: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionCustomTool::Custom} for more details. + # + # Properties of the custom tool. + # + # @param name [String] The name of the custom tool, used to identify it in tool calls. + # + # @param description [String] Optional description of the custom tool, used to provide more context. + # + # @param format_ [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Text, OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar] The input format for the custom tool. Default is unconstrained text. + + # The input format for the custom tool. Default is unconstrained text. + # + # @see OpenAI::Models::Chat::ChatCompletionCustomTool::Custom#format_ + module Format + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Unconstrained free-form text. + variant :text, -> { OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text } + + # A grammar defined by the user. + variant :grammar, -> { OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar } + + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute type + # Unconstrained text format. Always `text`. + # + # @return [Symbol, :text] + required :type, const: :text + + # @!method initialize(type: :text) + # Unconstrained free-form text. + # + # @param type [Symbol, :text] Unconstrained text format. Always `text`. + end + + class Grammar < OpenAI::Internal::Type::BaseModel + # @!attribute grammar + # Your chosen grammar. + # + # @return [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar] + required :grammar, + -> { + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar + } + + # @!attribute type + # Grammar format. Always `grammar`. + # + # @return [Symbol, :grammar] + required :type, const: :grammar + + # @!method initialize(grammar:, type: :grammar) + # A grammar defined by the user. + # + # @param grammar [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar] Your chosen grammar. + # + # @param type [Symbol, :grammar] Grammar format. Always `grammar`. + + # @see OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar#grammar + class Grammar < OpenAI::Internal::Type::BaseModel + # @!attribute definition + # The grammar definition. + # + # @return [String] + required :definition, String + + # @!attribute syntax + # The syntax of the grammar definition. One of `lark` or `regex`. + # + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax] + required :syntax, + enum: -> { + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax + } + + # @!method initialize(definition:, syntax:) + # Your chosen grammar. + # + # @param definition [String] The grammar definition. + # + # @param syntax [Symbol, OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax] The syntax of the grammar definition. One of `lark` or `regex`. + + # The syntax of the grammar definition. One of `lark` or `regex`. + # + # @see OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar#syntax + module Syntax + extend OpenAI::Internal::Type::Enum + + LARK = :lark + REGEX = :regex + + # @!method self.values + # @return [Array] + end + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Text, OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar)] + end + end + end + end + + ChatCompletionCustomTool = Chat::ChatCompletionCustomTool + end +end diff --git a/lib/openai/models/chat/chat_completion_function_tool.rb b/lib/openai/models/chat/chat_completion_function_tool.rb new file mode 100644 index 00000000..dbedf8e5 --- /dev/null +++ b/lib/openai/models/chat/chat_completion_function_tool.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionFunctionTool < OpenAI::Internal::Type::BaseModel + # @!attribute function + # + # @return [OpenAI::Models::FunctionDefinition] + required :function, -> { OpenAI::FunctionDefinition } + + # @!attribute type + # The type of the tool. Currently, only `function` is supported. + # + # @return [Symbol, :function] + required :type, const: :function + + # @!method initialize(function:, type: :function) + # A function tool that can be used to generate a response. + # + # @param function [OpenAI::Models::FunctionDefinition] + # + # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. + end + end + + ChatCompletionFunctionTool = Chat::ChatCompletionFunctionTool + end +end diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 4459c8a6..e6823a60 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -50,11 +50,9 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionMessageToolCall] } # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see @@ -72,7 +70,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the author of this message. diff --git a/lib/openai/models/chat/chat_completion_message_custom_tool_call.rb b/lib/openai/models/chat/chat_completion_message_custom_tool_call.rb new file mode 100644 index 00000000..2d83c132 --- /dev/null +++ b/lib/openai/models/chat/chat_completion_message_custom_tool_call.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionMessageCustomToolCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute custom + # The custom tool that the model called. + # + # @return [OpenAI::Models::Chat::ChatCompletionMessageCustomToolCall::Custom] + required :custom, -> { OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom } + + # @!attribute type + # The type of the tool. Always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!method initialize(id:, custom:, type: :custom) + # A call to a custom tool created by the model. + # + # @param id [String] The ID of the tool call. + # + # @param custom [OpenAI::Models::Chat::ChatCompletionMessageCustomToolCall::Custom] The custom tool that the model called. + # + # @param type [Symbol, :custom] The type of the tool. Always `custom`. + + # @see OpenAI::Models::Chat::ChatCompletionMessageCustomToolCall#custom + class Custom < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The input for the custom tool call generated by the model. + # + # @return [String] + required :input, String + + # @!attribute name + # The name of the custom tool to call. + # + # @return [String] + required :name, String + + # @!method initialize(input:, name:) + # The custom tool that the model called. + # + # @param input [String] The input for the custom tool call generated by the model. + # + # @param name [String] The name of the custom tool to call. + end + end + end + + ChatCompletionMessageCustomToolCall = Chat::ChatCompletionMessageCustomToolCall + end +end diff --git a/lib/openai/models/chat/chat_completion_message_function_tool_call.rb b/lib/openai/models/chat/chat_completion_message_function_tool_call.rb new file mode 100644 index 00000000..a9514a10 --- /dev/null +++ b/lib/openai/models/chat/chat_completion_message_function_tool_call.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionMessageFunctionToolCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute function + # The function that the model called. + # + # @return [OpenAI::Models::Chat::ChatCompletionMessageFunctionToolCall::Function] + required :function, -> { OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function } + + # @!attribute type + # The type of the tool. Currently, only `function` is supported. + # + # @return [Symbol, :function] + required :type, const: :function + + # @!method initialize(id:, function:, type: :function) + # A call to a function tool created by the model. + # + # @param id [String] The ID of the tool call. + # + # @param function [OpenAI::Models::Chat::ChatCompletionMessageFunctionToolCall::Function] The function that the model called. + # + # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. + + # @see OpenAI::Models::Chat::ChatCompletionMessageFunctionToolCall#function + class Function < OpenAI::Internal::Type::BaseModel + # @!attribute arguments + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the function to call. + # + # @return [String] + required :name, String + + # @!method initialize(arguments:, name:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::ChatCompletionMessageFunctionToolCall::Function} for more + # details. + # + # The function that the model called. + # + # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma + # + # @param name [String] The name of the function to call. + end + end + end + + ChatCompletionMessageFunctionToolCall = Chat::ChatCompletionMessageFunctionToolCall + end +end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index 99793fa0..15d9bd07 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -3,60 +3,20 @@ module OpenAI module Models module Chat - class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The ID of the tool call. - # - # @return [String] - required :id, String + # A call to a function tool created by the model. + module ChatCompletionMessageToolCall + extend OpenAI::Internal::Type::Union - # @!attribute function - # The function that the model called. - # - # @return [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] - required :function, -> { OpenAI::Chat::ChatCompletionMessageToolCall::Function } + discriminator :type - # @!attribute type - # The type of the tool. Currently, only `function` is supported. - # - # @return [Symbol, :function] - required :type, const: :function + # A call to a function tool created by the model. + variant :function, -> { OpenAI::Chat::ChatCompletionMessageFunctionToolCall } - # @!method initialize(id:, function:, type: :function) - # @param id [String] The ID of the tool call. - # - # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. - # - # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. + # A call to a custom tool created by the model. + variant :custom, -> { OpenAI::Chat::ChatCompletionMessageCustomToolCall } - # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function - class Function < OpenAI::Internal::Type::BaseModel - # @!attribute arguments - # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. - # - # @return [String] - required :arguments, String - - # @!attribute name - # The name of the function to call. - # - # @return [String] - required :name, String - - # @!method initialize(arguments:, name:) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function} for more - # details. - # - # The function that the model called. - # - # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma - # - # @param name [String] The name of the function to call. - end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionMessageFunctionToolCall, OpenAI::Models::Chat::ChatCompletionMessageCustomToolCall)] end end diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index 493e6c0c..a7cea41d 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -10,7 +10,7 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel required :function, -> { OpenAI::Chat::ChatCompletionNamedToolChoice::Function } # @!attribute type - # The type of the tool. Currently, only `function` is supported. + # For function calling, the type is always `function`. # # @return [Symbol, :function] required :type, const: :function @@ -21,7 +21,7 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] # - # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. + # @param type [Symbol, :function] For function calling, the type is always `function`. # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice_custom.rb b/lib/openai/models/chat/chat_completion_named_tool_choice_custom.rb new file mode 100644 index 00000000..cf2d854e --- /dev/null +++ b/lib/openai/models/chat/chat_completion_named_tool_choice_custom.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Chat + class ChatCompletionNamedToolChoiceCustom < OpenAI::Internal::Type::BaseModel + # @!attribute custom + # + # @return [OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom::Custom] + required :custom, -> { OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom } + + # @!attribute type + # For custom tool calling, the type is always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!method initialize(custom:, type: :custom) + # Specifies a tool the model should use. Use to force the model to call a specific + # custom tool. + # + # @param custom [OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom::Custom] + # + # @param type [Symbol, :custom] For custom tool calling, the type is always `custom`. + + # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom#custom + class Custom < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the custom tool to call. + # + # @return [String] + required :name, String + + # @!method initialize(name:) + # @param name [String] The name of the custom tool to call. + end + end + end + + ChatCompletionNamedToolChoiceCustom = Chat::ChatCompletionNamedToolChoiceCustom + end +end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index acb72bd9..8ce0d03e 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -4,6 +4,17 @@ module OpenAI module Models module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel + # @!attribute include_obfuscation + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + # + # @return [Boolean, nil] + optional :include_obfuscation, OpenAI::Internal::Type::Boolean + # @!attribute include_usage # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire @@ -16,12 +27,14 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :include_usage, OpenAI::Internal::Type::Boolean - # @!method initialize(include_usage: nil) + # @!method initialize(include_obfuscation: nil, include_usage: nil) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::ChatCompletionStreamOptions} for more details. # # Options for streaming response. Only set this when you set `stream: true`. # + # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds + # # @param include_usage [Boolean] If set, an additional chunk will be streamed before the `data: [DONE]` end end diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index d6ee8c94..a9a2facf 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -3,22 +3,20 @@ module OpenAI module Models module Chat - class ChatCompletionTool < OpenAI::Internal::Type::BaseModel - # @!attribute function - # - # @return [OpenAI::Models::FunctionDefinition] - required :function, -> { OpenAI::FunctionDefinition } + # A function tool that can be used to generate a response. + module ChatCompletionTool + extend OpenAI::Internal::Type::Union - # @!attribute type - # The type of the tool. Currently, only `function` is supported. - # - # @return [Symbol, :function] - required :type, const: :function + discriminator :type - # @!method initialize(function:, type: :function) - # @param function [OpenAI::Models::FunctionDefinition] - # - # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. + # A function tool that can be used to generate a response. + variant :function, -> { OpenAI::Chat::ChatCompletionFunctionTool } + + # A custom tool that processes input using a specified format. + variant :custom, -> { OpenAI::Chat::ChatCompletionCustomTool } + + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionFunctionTool, OpenAI::Models::Chat::ChatCompletionCustomTool)] end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 73874236..cbb70889 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -18,9 +18,15 @@ module ChatCompletionToolChoiceOption # `none` means the model will not call any tool and instead generates a message. `auto` means the model can pick between generating a message or calling one or more tools. `required` means the model must call one or more tools. variant enum: -> { OpenAI::Chat::ChatCompletionToolChoiceOption::Auto } + # Constrains the tools available to the model to a pre-defined set. + variant -> { OpenAI::Chat::ChatCompletionAllowedToolChoice } + # Specifies a tool the model should use. Use to force the model to call a specific function. variant -> { OpenAI::Chat::ChatCompletionNamedToolChoice } + # Specifies a tool the model should use. Use to force the model to call a specific custom tool. + variant -> { OpenAI::Chat::ChatCompletionNamedToolChoiceCustom } + # `none` means the model will not call any tool and instead generates a message. # `auto` means the model can pick between generating a message or calling one or # more tools. `required` means the model must call one or more tools. @@ -36,7 +42,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] + # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom)] end end diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 6ed151f0..b81acb79 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -191,12 +191,11 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :prompt_cache_key, String # @!attribute reasoning_effort - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true @@ -303,16 +302,16 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no tools are present. `auto` is the default if tools # are present. # - # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom, nil] optional :tool_choice, union: -> { OpenAI::Chat::ChatCompletionToolChoiceOption } # @!attribute tools - # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # A list of tools the model may call. You can provide either + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools) + # or [function tools](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTool] } + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionTool] } # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to @@ -344,6 +343,14 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Verbosity, nil] + optional :verbosity, enum: -> { OpenAI::Chat::CompletionCreateParams::Verbosity }, nil?: true + # @!attribute web_search_options # This tool searches the web for relevant results to use in a response. Learn more # about the @@ -352,7 +359,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } - # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # @@ -390,7 +397,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # @@ -408,9 +415,9 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. You can provide either # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -418,6 +425,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -611,6 +620,20 @@ module Stop StringArray = OpenAI::Internal::Type::ArrayOf[String] end + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + class WebSearchOptions < OpenAI::Internal::Type::BaseModel # @!attribute search_context_size # High level guidance for the amount of context window space to use for the diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index e62d4d46..adeb1665 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -5,6 +5,13 @@ module Models module ChatModel extend OpenAI::Internal::Type::Enum + GPT_5 = :"gpt-5" + GPT_5_MINI = :"gpt-5-mini" + GPT_5_NANO = :"gpt-5-nano" + GPT_5_2025_08_07 = :"gpt-5-2025-08-07" + GPT_5_MINI_2025_08_07 = :"gpt-5-mini-2025-08-07" + GPT_5_NANO_2025_08_07 = :"gpt-5-nano-2025-08-07" + GPT_5_CHAT_LATEST = :"gpt-5-chat-latest" GPT_4_1 = :"gpt-4.1" GPT_4_1_MINI = :"gpt-4.1-mini" GPT_4_1_NANO = :"gpt-4.1-nano" diff --git a/lib/openai/models/custom_tool_input_format.rb b/lib/openai/models/custom_tool_input_format.rb new file mode 100644 index 00000000..e716a67d --- /dev/null +++ b/lib/openai/models/custom_tool_input_format.rb @@ -0,0 +1,76 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # The input format for the custom tool. Default is unconstrained text. + module CustomToolInputFormat + extend OpenAI::Internal::Type::Union + + discriminator :type + + # Unconstrained free-form text. + variant :text, -> { OpenAI::CustomToolInputFormat::Text } + + # A grammar defined by the user. + variant :grammar, -> { OpenAI::CustomToolInputFormat::Grammar } + + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute type + # Unconstrained text format. Always `text`. + # + # @return [Symbol, :text] + required :type, const: :text + + # @!method initialize(type: :text) + # Unconstrained free-form text. + # + # @param type [Symbol, :text] Unconstrained text format. Always `text`. + end + + class Grammar < OpenAI::Internal::Type::BaseModel + # @!attribute definition + # The grammar definition. + # + # @return [String] + required :definition, String + + # @!attribute syntax + # The syntax of the grammar definition. One of `lark` or `regex`. + # + # @return [Symbol, OpenAI::Models::CustomToolInputFormat::Grammar::Syntax] + required :syntax, enum: -> { OpenAI::CustomToolInputFormat::Grammar::Syntax } + + # @!attribute type + # Grammar format. Always `grammar`. + # + # @return [Symbol, :grammar] + required :type, const: :grammar + + # @!method initialize(definition:, syntax:, type: :grammar) + # A grammar defined by the user. + # + # @param definition [String] The grammar definition. + # + # @param syntax [Symbol, OpenAI::Models::CustomToolInputFormat::Grammar::Syntax] The syntax of the grammar definition. One of `lark` or `regex`. + # + # @param type [Symbol, :grammar] Grammar format. Always `grammar`. + + # The syntax of the grammar definition. One of `lark` or `regex`. + # + # @see OpenAI::Models::CustomToolInputFormat::Grammar#syntax + module Syntax + extend OpenAI::Internal::Type::Enum + + LARK = :lark + REGEX = :regex + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::CustomToolInputFormat::Text, OpenAI::Models::CustomToolInputFormat::Grammar)] + end + end +end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 28b05c47..0fd34948 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -512,8 +512,8 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTool] } + # @return [Array, nil] + optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionFunctionTool] } # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. @@ -534,7 +534,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param temperature [Float] A higher temperature increases randomness in the outputs. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 0c9bb2d2..49fda248 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -686,7 +686,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -708,7 +708,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 0237a2ab..494bce0c 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -650,7 +650,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -672,7 +672,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 6c0cdf67..175718a2 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -686,7 +686,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -708,7 +708,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 66bfa2f1..86690fce 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -685,7 +685,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -707,7 +707,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 1c0a6f8b..942f613c 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -690,7 +690,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -712,7 +712,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 2cdebe06..d39b2671 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -4,12 +4,11 @@ module OpenAI module Models class Reasoning < OpenAI::Internal::Type::BaseModel # @!attribute effort - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. # # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true @@ -43,7 +42,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. # diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index f9990508..486b6d31 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -2,15 +2,15 @@ module OpenAI module Models - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. module ReasoningEffort extend OpenAI::Internal::Type::Enum + MINIMAL = :minimal LOW = :low MEDIUM = :medium HIGH = :high diff --git a/lib/openai/models/response_format_text_grammar.rb b/lib/openai/models/response_format_text_grammar.rb new file mode 100644 index 00000000..8d43e38f --- /dev/null +++ b/lib/openai/models/response_format_text_grammar.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class ResponseFormatTextGrammar < OpenAI::Internal::Type::BaseModel + # @!attribute grammar + # The custom grammar for the model to follow. + # + # @return [String] + required :grammar, String + + # @!attribute type + # The type of response format being defined. Always `grammar`. + # + # @return [Symbol, :grammar] + required :type, const: :grammar + + # @!method initialize(grammar:, type: :grammar) + # A custom grammar for the model to follow when generating text. Learn more in the + # [custom grammars guide](https://platform.openai.com/docs/guides/custom-grammars). + # + # @param grammar [String] The custom grammar for the model to follow. + # + # @param type [Symbol, :grammar] The type of response format being defined. Always `grammar`. + end + end +end diff --git a/lib/openai/models/response_format_text_python.rb b/lib/openai/models/response_format_text_python.rb new file mode 100644 index 00000000..9e12a904 --- /dev/null +++ b/lib/openai/models/response_format_text_python.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + class ResponseFormatTextPython < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of response format being defined. Always `python`. + # + # @return [Symbol, :python] + required :type, const: :python + + # @!method initialize(type: :python) + # Configure the model to generate valid Python code. See the + # [custom grammars guide](https://platform.openai.com/docs/guides/custom-grammars) + # for more details. + # + # @param type [Symbol, :python] The type of response format being defined. Always `python`. + end + end +end diff --git a/lib/openai/models/responses/custom_tool.rb b/lib/openai/models/responses/custom_tool.rb new file mode 100644 index 00000000..05c3665f --- /dev/null +++ b/lib/openai/models/responses/custom_tool.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class CustomTool < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the custom tool, used to identify it in tool calls. + # + # @return [String] + required :name, String + + # @!attribute type + # The type of the custom tool. Always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!attribute description + # Optional description of the custom tool, used to provide more context. + # + # @return [String, nil] + optional :description, String + + # @!attribute format_ + # The input format for the custom tool. Default is unconstrained text. + # + # @return [OpenAI::Models::CustomToolInputFormat::Text, OpenAI::Models::CustomToolInputFormat::Grammar, nil] + optional :format_, union: -> { OpenAI::CustomToolInputFormat }, api_name: :format + + # @!method initialize(name:, description: nil, format_: nil, type: :custom) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::CustomTool} for more details. + # + # A custom tool that processes input using a specified format. Learn more about + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). + # + # @param name [String] The name of the custom tool, used to identify it in tool calls. + # + # @param description [String] Optional description of the custom tool, used to provide more context. + # + # @param format_ [OpenAI::Models::CustomToolInputFormat::Text, OpenAI::Models::CustomToolInputFormat::Grammar] The input format for the custom tool. Default is unconstrained text. + # + # @param type [Symbol, :custom] The type of the custom tool. Always `custom`. + end + end + end +end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 923aeb29..b5af411f 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -38,7 +38,7 @@ class Response < OpenAI::Internal::Type::BaseModel # response will not be carried over to the next response. This makes it simple to # swap out system (or developer) messages in new responses. # - # @return [String, Array, nil] + # @return [String, Array, nil] required :instructions, union: -> { OpenAI::Responses::Response::Instructions }, nil?: true # @!attribute metadata @@ -77,7 +77,7 @@ class Response < OpenAI::Internal::Type::BaseModel # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. # - # @return [Array] + # @return [Array] required :output, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem] } # @!attribute parallel_tool_calls @@ -100,7 +100,7 @@ class Response < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] required :tool_choice, union: -> { OpenAI::Responses::Response::ToolChoice } # @!attribute tools @@ -116,10 +116,12 @@ class Response < OpenAI::Internal::Type::BaseModel # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -275,7 +277,15 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Responses::Response::Verbosity, nil] + optional :verbosity, enum: -> { OpenAI::Responses::Response::Verbosity }, nil?: true + + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, verbosity: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Response} for more details. # @@ -287,21 +297,21 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. # - # @param instructions [String, Array, nil] A system (or developer) message inserted into the model's context. + # @param instructions [String, Array, nil] A system (or developer) message inserted into the model's context. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param output [Array] An array of content items generated by the model. + # @param output [Array] An array of content items generated by the model. # # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # @@ -335,6 +345,8 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Responses::Response::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param object [Symbol, :response] The object type of this resource - always set to `response`. # @see OpenAI::Models::Responses::Response#incomplete_details @@ -383,7 +395,7 @@ module Instructions variant -> { OpenAI::Models::Responses::Response::Instructions::ResponseInputItemArray } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ResponseInputItemArray = @@ -408,6 +420,9 @@ module ToolChoice # `required` means the model must call one or more tools. variant enum: -> { OpenAI::Responses::ToolChoiceOptions } + # Constrains the tools available to the model to a pre-defined set. + variant -> { OpenAI::Responses::ToolChoiceAllowed } + # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). variant -> { OpenAI::Responses::ToolChoiceTypes } @@ -418,8 +433,11 @@ module ToolChoice # Use this option to force the model to call a specific tool on a remote MCP server. variant -> { OpenAI::Responses::ToolChoiceMcp } + # Use this option to force the model to call a specific custom tool. + variant -> { OpenAI::Responses::ToolChoiceCustom } + # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp)] + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom)] end # Specifies the processing type used for serving the request. @@ -472,6 +490,22 @@ module Truncation # @!method self.values # @return [Array] end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @see OpenAI::Models::Responses::Response#verbosity + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 56a26016..2d140375 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -51,7 +51,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @return [String, Array, nil] + # @return [String, Array, nil] optional :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } # @!attribute instructions @@ -178,6 +178,12 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :store, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute stream_options + # Options for streaming responses. Only set this when you set `stream: true`. + # + # @return [OpenAI::Models::Responses::ResponseCreateParams::StreamOptions, nil] + optional :stream_options, -> { OpenAI::Responses::ResponseCreateParams::StreamOptions }, nil?: true + # @!attribute temperature # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more @@ -202,7 +208,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, nil] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom, nil] optional :tool_choice, union: -> { OpenAI::Responses::ResponseCreateParams::ToolChoice } # @!attribute tools @@ -218,10 +224,12 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_logprobs @@ -265,7 +273,15 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] + optional :verbosity, enum: -> { OpenAI::Responses::ResponseCreateParams::Verbosity }, nil?: true + + # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, verbosity: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # @@ -273,7 +289,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # @@ -301,13 +317,15 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # + # @param stream_options [OpenAI::Models::Responses::ResponseCreateParams::StreamOptions, nil] Options for streaming responses. Only set this when you set `stream: true`. + # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -317,6 +335,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Text, image, or file inputs to the model, used to generate a response. @@ -340,7 +360,7 @@ module Input variant -> { OpenAI::Responses::ResponseInput } # @!method self.variants - # @return [Array(String, Array)] + # @return [Array(String, Array)] end # Specifies the processing type used for serving the request. @@ -373,6 +393,28 @@ module ServiceTier # @return [Array] end + class StreamOptions < OpenAI::Internal::Type::BaseModel + # @!attribute include_obfuscation + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + # + # @return [Boolean, nil] + optional :include_obfuscation, OpenAI::Internal::Type::Boolean + + # @!method initialize(include_obfuscation: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCreateParams::StreamOptions} for more + # details. + # + # Options for streaming responses. Only set this when you set `stream: true`. + # + # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. @@ -389,6 +431,9 @@ module ToolChoice # `required` means the model must call one or more tools. variant enum: -> { OpenAI::Responses::ToolChoiceOptions } + # Constrains the tools available to the model to a pre-defined set. + variant -> { OpenAI::Responses::ToolChoiceAllowed } + # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). variant -> { OpenAI::Responses::ToolChoiceTypes } @@ -399,8 +444,11 @@ module ToolChoice # Use this option to force the model to call a specific tool on a remote MCP server. variant -> { OpenAI::Responses::ToolChoiceMcp } + # Use this option to force the model to call a specific custom tool. + variant -> { OpenAI::Responses::ToolChoiceCustom } + # @!method self.variants - # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp)] + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom)] end # The truncation strategy to use for the model response. @@ -419,6 +467,20 @@ module Truncation # @!method self.values # @return [Array] end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end end end end diff --git a/lib/openai/models/responses/response_custom_tool_call.rb b/lib/openai/models/responses/response_custom_tool_call.rb new file mode 100644 index 00000000..48dae1e0 --- /dev/null +++ b/lib/openai/models/responses/response_custom_tool_call.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseCustomToolCall < OpenAI::Internal::Type::BaseModel + # @!attribute call_id + # An identifier used to map this custom tool call to a tool call output. + # + # @return [String] + required :call_id, String + + # @!attribute input + # The input for the custom tool call generated by the model. + # + # @return [String] + required :input, String + + # @!attribute name + # The name of the custom tool being called. + # + # @return [String] + required :name, String + + # @!attribute type + # The type of the custom tool call. Always `custom_tool_call`. + # + # @return [Symbol, :custom_tool_call] + required :type, const: :custom_tool_call + + # @!attribute id + # The unique ID of the custom tool call in the OpenAI platform. + # + # @return [String, nil] + optional :id, String + + # @!method initialize(call_id:, input:, name:, id: nil, type: :custom_tool_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCustomToolCall} for more details. + # + # A call to a custom tool created by the model. + # + # @param call_id [String] An identifier used to map this custom tool call to a tool call output. + # + # @param input [String] The input for the custom tool call generated by the model. + # + # @param name [String] The name of the custom tool being called. + # + # @param id [String] The unique ID of the custom tool call in the OpenAI platform. + # + # @param type [Symbol, :custom_tool_call] The type of the custom tool call. Always `custom_tool_call`. + end + end + end +end diff --git a/lib/openai/models/responses/response_custom_tool_call_input_delta_event.rb b/lib/openai/models/responses/response_custom_tool_call_input_delta_event.rb new file mode 100644 index 00000000..5fa83189 --- /dev/null +++ b/lib/openai/models/responses/response_custom_tool_call_input_delta_event.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseCustomToolCallInputDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute delta + # The incremental input data (delta) for the custom tool call. + # + # @return [String] + required :delta, String + + # @!attribute item_id + # Unique identifier for the API item associated with this event. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output this delta applies to. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The event type identifier. + # + # @return [Symbol, :"response.custom_tool_call_input.delta"] + required :type, const: :"response.custom_tool_call_input.delta" + + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.custom_tool_call_input.delta") + # Event representing a delta (partial update) to the input of a custom tool call. + # + # @param delta [String] The incremental input data (delta) for the custom tool call. + # + # @param item_id [String] Unique identifier for the API item associated with this event. + # + # @param output_index [Integer] The index of the output this delta applies to. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.custom_tool_call_input.delta"] The event type identifier. + end + end + end +end diff --git a/lib/openai/models/responses/response_custom_tool_call_input_done_event.rb b/lib/openai/models/responses/response_custom_tool_call_input_done_event.rb new file mode 100644 index 00000000..e45a41e0 --- /dev/null +++ b/lib/openai/models/responses/response_custom_tool_call_input_done_event.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseCustomToolCallInputDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute input + # The complete input data for the custom tool call. + # + # @return [String] + required :input, String + + # @!attribute item_id + # Unique identifier for the API item associated with this event. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output this event applies to. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The event type identifier. + # + # @return [Symbol, :"response.custom_tool_call_input.done"] + required :type, const: :"response.custom_tool_call_input.done" + + # @!method initialize(input:, item_id:, output_index:, sequence_number:, type: :"response.custom_tool_call_input.done") + # Event indicating that input for a custom tool call is complete. + # + # @param input [String] The complete input data for the custom tool call. + # + # @param item_id [String] Unique identifier for the API item associated with this event. + # + # @param output_index [Integer] The index of the output this event applies to. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.custom_tool_call_input.done"] The event type identifier. + end + end + end +end diff --git a/lib/openai/models/responses/response_custom_tool_call_output.rb b/lib/openai/models/responses/response_custom_tool_call_output.rb new file mode 100644 index 00000000..644997e7 --- /dev/null +++ b/lib/openai/models/responses/response_custom_tool_call_output.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseCustomToolCallOutput < OpenAI::Internal::Type::BaseModel + # @!attribute call_id + # The call ID, used to map this custom tool call output to a custom tool call. + # + # @return [String] + required :call_id, String + + # @!attribute output + # The output from the custom tool call generated by your code. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the custom tool call output. Always `custom_tool_call_output`. + # + # @return [Symbol, :custom_tool_call_output] + required :type, const: :custom_tool_call_output + + # @!attribute id + # The unique ID of the custom tool call output in the OpenAI platform. + # + # @return [String, nil] + optional :id, String + + # @!method initialize(call_id:, output:, id: nil, type: :custom_tool_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCustomToolCallOutput} for more details. + # + # The output of a custom tool call from your code, being sent back to the model. + # + # @param call_id [String] The call ID, used to map this custom tool call output to a custom tool call. + # + # @param output [String] The output from the custom tool call generated by your code. + # + # @param id [String] The unique ID of the custom tool call output in the OpenAI platform. + # + # @param type [Symbol, :custom_tool_call_output] The type of the custom tool call output. Always `custom_tool_call_output`. + end + end + end +end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index a8f469fa..330e1e7c 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -80,6 +80,12 @@ module ResponseInputItem # An invocation of a tool on an MCP server. variant :mcp_call, -> { OpenAI::Responses::ResponseInputItem::McpCall } + # The output of a custom tool call from your code, being sent back to the model. + variant :custom_tool_call_output, -> { OpenAI::Responses::ResponseCustomToolCallOutput } + + # A call to a custom tool created by the model. + variant :custom_tool_call, -> { OpenAI::Responses::ResponseCustomToolCall } + # An internal identifier for an item to reference. variant :item_reference, -> { OpenAI::Responses::ResponseInputItem::ItemReference } @@ -869,7 +875,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index b6fb663b..9d9b204f 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -52,6 +52,9 @@ module ResponseOutputItem # A request for human approval of a tool invocation. variant :mcp_approval_request, -> { OpenAI::Responses::ResponseOutputItem::McpApprovalRequest } + # A call to a custom tool created by the model. + variant :custom_tool_call, -> { OpenAI::Responses::ResponseCustomToolCall } + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel # @!attribute id # The unique ID of the image generation call. @@ -435,7 +438,7 @@ class McpApprovalRequest < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest)] + # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseCustomToolCall)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 119bea71..1ca221ee 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was added. # - # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] + # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseCustomToolCall] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -34,7 +34,7 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # # Emitted when a new output item is added. # - # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was added. + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseCustomToolCall] The output item that was added. # # @param output_index [Integer] The index of the output item that was added. # diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 45b061b7..f96435eb 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was marked done. # - # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] + # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseCustomToolCall] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -34,7 +34,7 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # # Emitted when an output item is marked done. # - # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was marked done. + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseCustomToolCall] The output item that was marked done. # # @param output_index [Integer] The index of the output item that was marked done. # diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 979fe0b2..67aa9110 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -17,18 +17,31 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } + # @!attribute include_obfuscation + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + # + # @return [Boolean, nil] + optional :include_obfuscation, OpenAI::Internal::Type::Boolean + # @!attribute starting_after # The sequence number of the event after which to start streaming. # # @return [Integer, nil] optional :starting_after, Integer - # @!method initialize(include: nil, starting_after: nil, request_options: {}) + # @!method initialize(include: nil, include_obfuscation: nil, starting_after: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds + # # @param starting_after [Integer] The sequence number of the event after which to start streaming. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 70b11b19..d0bc4144 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -197,8 +197,16 @@ module ResponseStreamEvent # Emitted when a response is queued and waiting to be processed. variant :"response.queued", -> { OpenAI::Responses::ResponseQueuedEvent } + # Event representing a delta (partial update) to the input of a custom tool call. + variant :"response.custom_tool_call_input.delta", + -> { OpenAI::Responses::ResponseCustomToolCallInputDeltaEvent } + + # Event indicating that input for a custom tool call is complete. + variant :"response.custom_tool_call_input.done", + -> { OpenAI::Responses::ResponseCustomToolCallInputDoneEvent } + # @!method self.variants - # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseReasoningTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent)] + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseReasoningTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseCustomToolCallInputDeltaEvent, OpenAI::Models::Responses::ResponseCustomToolCallInputDoneEvent)] end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 16948259..0d8beed3 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -31,6 +31,10 @@ module Tool # A tool that allows the model to execute shell commands in a local environment. variant :local_shell, -> { OpenAI::Responses::Tool::LocalShell } + # A custom tool that processes input using a specified format. Learn more about + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). + variant :custom, -> { OpenAI::Responses::CustomTool } + # This tool searches the web for relevant results to use in a response. Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Responses::WebSearchTool } @@ -540,7 +544,7 @@ class LocalShell < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::WebSearchTool)] + # @return [Array(OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::CustomTool, OpenAI::Models::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_allowed.rb b/lib/openai/models/responses/tool_choice_allowed.rb new file mode 100644 index 00000000..228e4611 --- /dev/null +++ b/lib/openai/models/responses/tool_choice_allowed.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ToolChoiceAllowed < OpenAI::Internal::Type::BaseModel + # @!attribute mode + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + # + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceAllowed::Mode] + required :mode, enum: -> { OpenAI::Responses::ToolChoiceAllowed::Mode } + + # @!attribute tools + # A list of tool definitions that the model should be allowed to call. + # + # For the Responses API, the list of tool definitions might look like: + # + # ```json + # [ + # { "type": "function", "name": "get_weather" }, + # { "type": "mcp", "server_label": "deepwiki" }, + # { "type": "image_generation" } + # ] + # ``` + # + # @return [ArrayObject}>] + required :tools, + OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown]] + + # @!attribute type + # Allowed tool configuration type. Always `allowed_tools`. + # + # @return [Symbol, :allowed_tools] + required :type, const: :allowed_tools + + # @!method initialize(mode:, tools:, type: :allowed_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ToolChoiceAllowed} for more details. + # + # Constrains the tools available to the model to a pre-defined set. + # + # @param mode [Symbol, OpenAI::Models::Responses::ToolChoiceAllowed::Mode] Constrains the tools available to the model to a pre-defined set. + # + # @param tools [ArrayObject}>] A list of tool definitions that the model should be allowed to call. + # + # @param type [Symbol, :allowed_tools] Allowed tool configuration type. Always `allowed_tools`. + + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + # + # @see OpenAI::Models::Responses::ToolChoiceAllowed#mode + module Mode + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + REQUIRED = :required + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/responses/tool_choice_custom.rb b/lib/openai/models/responses/tool_choice_custom.rb new file mode 100644 index 00000000..310413ed --- /dev/null +++ b/lib/openai/models/responses/tool_choice_custom.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ToolChoiceCustom < OpenAI::Internal::Type::BaseModel + # @!attribute name + # The name of the custom tool to call. + # + # @return [String] + required :name, String + + # @!attribute type + # For custom tool calling, the type is always `custom`. + # + # @return [Symbol, :custom] + required :type, const: :custom + + # @!method initialize(name:, type: :custom) + # Use this option to force the model to call a specific custom tool. + # + # @param name [String] The name of the custom tool to call. + # + # @param type [Symbol, :custom] For custom tool calling, the type is always `custom`. + end + end + end +end diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index 8b8adf5a..77b58e66 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -21,7 +21,7 @@ class Assistants # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # @@ -88,7 +88,7 @@ def retrieve(assistant_id, params = {}) # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index 6c1d4b54..37648e04 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -43,7 +43,7 @@ class Runs # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # @@ -113,7 +113,7 @@ def create(thread_id, params) # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: Constrains effort on reasoning for # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 42081fac..d6be9e1e 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -30,7 +30,7 @@ class Completions # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -66,7 +66,7 @@ class Completions # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # @@ -84,9 +84,9 @@ class Completions # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. You can provide either # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -94,6 +94,8 @@ class Completions # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] @@ -139,7 +141,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -175,7 +177,7 @@ def create(params) # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Constrains effort on reasoning for # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # @@ -193,9 +195,9 @@ def create(params) # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. You can provide either # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -203,6 +205,8 @@ def create(params) # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 12db704d..8754df72 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,13 +23,13 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, verbosity: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # @@ -57,13 +57,15 @@ class Responses # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # + # @param stream_options [OpenAI::Models::Responses::ResponseCreateParams::StreamOptions, nil] Options for streaming responses. Only set this when you set `stream: true`. + # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -73,6 +75,8 @@ class Responses # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] @@ -110,13 +114,13 @@ def create(params = {}) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, verbosity: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # # @param instructions [String, nil] A system (or developer) message inserted into the model's context. # @@ -144,13 +148,15 @@ def create(params = {}) # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # + # @param stream_options [OpenAI::Models::Responses::ResponseCreateParams::StreamOptions, nil] Options for streaming responses. Only set this when you set `stream: true`. + # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -160,9 +166,11 @@ def create(params = {}) # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # + # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params = {}) @@ -190,12 +198,14 @@ def stream_raw(params = {}) # # Retrieves a model response with the given ID. # - # @overload retrieve(response_id, include: nil, starting_after: nil, request_options: {}) + # @overload retrieve(response_id, include: nil, include_obfuscation: nil, starting_after: nil, request_options: {}) # # @param response_id [String] The ID of the response to retrieve. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds + # # @param starting_after [Integer] The sequence number of the event after which to start streaming. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] @@ -225,17 +235,19 @@ def retrieve(response_id, params = {}) # # Retrieves a model response with the given ID. # - # @overload retrieve_streaming(response_id, include: nil, starting_after: nil, request_options: {}) + # @overload retrieve_streaming(response_id, include: nil, include_obfuscation: nil, starting_after: nil, request_options: {}) # # @param response_id [String] The ID of the response to retrieve. # # @param include [Array] Additional fields to include in the response. See the `include` # + # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds + # # @param starting_after [Integer] The sequence number of the event after which to start streaming. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve_streaming(response_id, params = {}) diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index c5a9d836..c2fa35a9 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -55,6 +55,8 @@ module OpenAI CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse + CustomToolInputFormat = OpenAI::Models::CustomToolInputFormat + Embedding = OpenAI::Models::Embedding EmbeddingCreateParams = OpenAI::Models::EmbeddingCreateParams @@ -172,6 +174,10 @@ module OpenAI ResponseFormatText = OpenAI::Models::ResponseFormatText + ResponseFormatTextGrammar = OpenAI::Models::ResponseFormatTextGrammar + + ResponseFormatTextPython = OpenAI::Models::ResponseFormatTextPython + Responses = OpenAI::Models::Responses ResponsesModel = OpenAI::Models::ResponsesModel diff --git a/rbi/openai/models/beta/assistant_create_params.rbi b/rbi/openai/models/beta/assistant_create_params.rbi index c04793b1..f4783b7e 100644 --- a/rbi/openai/models/beta/assistant_create_params.rbi +++ b/rbi/openai/models/beta/assistant_create_params.rbi @@ -45,12 +45,11 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :name - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort @@ -208,12 +207,11 @@ module OpenAI metadata: nil, # The name of the assistant. The maximum length is 256 characters. name: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), diff --git a/rbi/openai/models/beta/assistant_update_params.rbi b/rbi/openai/models/beta/assistant_update_params.rbi index b044c34b..217e526e 100644 --- a/rbi/openai/models/beta/assistant_update_params.rbi +++ b/rbi/openai/models/beta/assistant_update_params.rbi @@ -65,12 +65,11 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :name - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort @@ -232,12 +231,11 @@ module OpenAI model: nil, # The name of the assistant. The maximum length is 256 characters. name: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -354,6 +352,36 @@ module OpenAI end OrSymbol = T.type_alias { T.any(Symbol, String) } + GPT_5 = + T.let( + :"gpt-5", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_5_MINI = + T.let( + :"gpt-5-mini", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_5_NANO = + T.let( + :"gpt-5-nano", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_5_2025_08_07 = + T.let( + :"gpt-5-2025-08-07", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_5_MINI_2025_08_07 = + T.let( + :"gpt-5-mini-2025-08-07", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) + GPT_5_NANO_2025_08_07 = + T.let( + :"gpt-5-nano-2025-08-07", + OpenAI::Beta::AssistantUpdateParams::Model::TaggedSymbol + ) GPT_4_1 = T.let( :"gpt-4.1", diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index bb9d6882..04d643a2 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -111,12 +111,11 @@ module OpenAI sig { params(parallel_tool_calls: T::Boolean).void } attr_writer :parallel_tool_calls - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort @@ -330,12 +329,11 @@ module OpenAI # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. parallel_tool_calls: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), diff --git a/rbi/openai/models/chat/chat_completion_allowed_tool_choice.rbi b/rbi/openai/models/chat/chat_completion_allowed_tool_choice.rbi new file mode 100644 index 00000000..17d1cecc --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_allowed_tool_choice.rbi @@ -0,0 +1,60 @@ +# typed: strong + +module OpenAI + module Models + ChatCompletionAllowedToolChoice = Chat::ChatCompletionAllowedToolChoice + + module Chat + class ChatCompletionAllowedToolChoice < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionAllowedToolChoice, + OpenAI::Internal::AnyHash + ) + end + + # Constrains the tools available to the model to a pre-defined set. + sig { returns(OpenAI::Chat::ChatCompletionAllowedTools) } + attr_reader :allowed_tools + + sig do + params( + allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools::OrHash + ).void + end + attr_writer :allowed_tools + + # Allowed tool configuration type. Always `allowed_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Constrains the tools available to the model to a pre-defined set. + sig do + params( + allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Constrains the tools available to the model to a pre-defined set. + allowed_tools:, + # Allowed tool configuration type. Always `allowed_tools`. + type: :allowed_tools + ) + end + + sig do + override.returns( + { + allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_allowed_tools.rbi b/rbi/openai/models/chat/chat_completion_allowed_tools.rbi new file mode 100644 index 00000000..6dbb2e20 --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_allowed_tools.rbi @@ -0,0 +1,118 @@ +# typed: strong + +module OpenAI + module Models + module Chat + class ChatCompletionAllowedTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionAllowedTools, + OpenAI::Internal::AnyHash + ) + end + + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + sig do + returns(OpenAI::Chat::ChatCompletionAllowedTools::Mode::OrSymbol) + end + attr_accessor :mode + + # A list of tool definitions that the model should be allowed to call. + # + # For the Chat Completions API, the list of tool definitions might look like: + # + # ```json + # [ + # { "type": "function", "function": { "name": "get_weather" } }, + # { "type": "function", "function": { "name": "get_time" } } + # ] + # ``` + sig { returns(T::Array[T::Hash[Symbol, T.anything]]) } + attr_accessor :tools + + # Constrains the tools available to the model to a pre-defined set. + sig do + params( + mode: OpenAI::Chat::ChatCompletionAllowedTools::Mode::OrSymbol, + tools: T::Array[T::Hash[Symbol, T.anything]] + ).returns(T.attached_class) + end + def self.new( + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + mode:, + # A list of tool definitions that the model should be allowed to call. + # + # For the Chat Completions API, the list of tool definitions might look like: + # + # ```json + # [ + # { "type": "function", "function": { "name": "get_weather" } }, + # { "type": "function", "function": { "name": "get_time" } } + # ] + # ``` + tools: + ) + end + + sig do + override.returns( + { + mode: OpenAI::Chat::ChatCompletionAllowedTools::Mode::OrSymbol, + tools: T::Array[T::Hash[Symbol, T.anything]] + } + ) + end + def to_hash + end + + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + module Mode + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::ChatCompletionAllowedTools::Mode) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Chat::ChatCompletionAllowedTools::Mode::TaggedSymbol + ) + REQUIRED = + T.let( + :required, + OpenAI::Chat::ChatCompletionAllowedTools::Mode::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionAllowedTools::Mode::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi index b2233b53..59c05235 100644 --- a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -85,7 +85,14 @@ module OpenAI # The tool calls generated by the model, such as function calls. sig do returns( - T.nilable(T::Array[OpenAI::Chat::ChatCompletionMessageToolCall]) + T.nilable( + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall, + OpenAI::Chat::ChatCompletionMessageCustomToolCall + ) + ] + ) ) end attr_reader :tool_calls @@ -93,7 +100,12 @@ module OpenAI sig do params( tool_calls: - T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash] + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::OrHash, + OpenAI::Chat::ChatCompletionMessageCustomToolCall::OrHash + ) + ] ).void end attr_writer :tool_calls @@ -116,7 +128,12 @@ module OpenAI name: String, refusal: T.nilable(String), tool_calls: - T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash], + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::OrHash, + OpenAI::Chat::ChatCompletionMessageCustomToolCall::OrHash + ) + ], role: Symbol ).returns(T.attached_class) end @@ -160,7 +177,13 @@ module OpenAI ), name: String, refusal: T.nilable(String), - tool_calls: T::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + tool_calls: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall, + OpenAI::Chat::ChatCompletionMessageCustomToolCall + ) + ] } ) end diff --git a/rbi/openai/models/chat/chat_completion_custom_tool.rbi b/rbi/openai/models/chat/chat_completion_custom_tool.rbi new file mode 100644 index 00000000..2ce1f84a --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_custom_tool.rbi @@ -0,0 +1,335 @@ +# typed: strong + +module OpenAI + module Models + ChatCompletionCustomTool = Chat::ChatCompletionCustomTool + + module Chat + class ChatCompletionCustomTool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionCustomTool, + OpenAI::Internal::AnyHash + ) + end + + # Properties of the custom tool. + sig { returns(OpenAI::Chat::ChatCompletionCustomTool::Custom) } + attr_reader :custom + + sig do + params( + custom: OpenAI::Chat::ChatCompletionCustomTool::Custom::OrHash + ).void + end + attr_writer :custom + + # The type of the custom tool. Always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # A custom tool that processes input using a specified format. + sig do + params( + custom: OpenAI::Chat::ChatCompletionCustomTool::Custom::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Properties of the custom tool. + custom:, + # The type of the custom tool. Always `custom`. + type: :custom + ) + end + + sig do + override.returns( + { + custom: OpenAI::Chat::ChatCompletionCustomTool::Custom, + type: Symbol + } + ) + end + def to_hash + end + + class Custom < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom, + OpenAI::Internal::AnyHash + ) + end + + # The name of the custom tool, used to identify it in tool calls. + sig { returns(String) } + attr_accessor :name + + # Optional description of the custom tool, used to provide more context. + sig { returns(T.nilable(String)) } + attr_reader :description + + sig { params(description: String).void } + attr_writer :description + + # The input format for the custom tool. Default is unconstrained text. + sig do + returns( + T.nilable( + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar + ) + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text::OrHash, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::OrHash + ) + ).void + end + attr_writer :format_ + + # Properties of the custom tool. + sig do + params( + name: String, + description: String, + format_: + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text::OrHash, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # The name of the custom tool, used to identify it in tool calls. + name:, + # Optional description of the custom tool, used to provide more context. + description: nil, + # The input format for the custom tool. Default is unconstrained text. + format_: nil + ) + end + + sig do + override.returns( + { + name: String, + description: String, + format_: + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar + ) + } + ) + end + def to_hash + end + + # The input format for the custom tool. Default is unconstrained text. + module Format + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar + ) + end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text, + OpenAI::Internal::AnyHash + ) + end + + # Unconstrained text format. Always `text`. + sig { returns(Symbol) } + attr_accessor :type + + # Unconstrained free-form text. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # Unconstrained text format. Always `text`. + type: :text + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + + class Grammar < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar, + OpenAI::Internal::AnyHash + ) + end + + # Your chosen grammar. + sig do + returns( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar + ) + end + attr_reader :grammar + + sig do + params( + grammar: + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::OrHash + ).void + end + attr_writer :grammar + + # Grammar format. Always `grammar`. + sig { returns(Symbol) } + attr_accessor :type + + # A grammar defined by the user. + sig do + params( + grammar: + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Your chosen grammar. + grammar:, + # Grammar format. Always `grammar`. + type: :grammar + ) + end + + sig do + override.returns( + { + grammar: + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar, + type: Symbol + } + ) + end + def to_hash + end + + class Grammar < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar, + OpenAI::Internal::AnyHash + ) + end + + # The grammar definition. + sig { returns(String) } + attr_accessor :definition + + # The syntax of the grammar definition. One of `lark` or `regex`. + sig do + returns( + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax::OrSymbol + ) + end + attr_accessor :syntax + + # Your chosen grammar. + sig do + params( + definition: String, + syntax: + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # The grammar definition. + definition:, + # The syntax of the grammar definition. One of `lark` or `regex`. + syntax: + ) + end + + sig do + override.returns( + { + definition: String, + syntax: + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax::OrSymbol + } + ) + end + def to_hash + end + + # The syntax of the grammar definition. One of `lark` or `regex`. + module Syntax + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LARK = + T.let( + :lark, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax::TaggedSymbol + ) + REGEX = + T.let( + :regex, + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Variants + ] + ) + end + def self.variants + end + end + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_function_tool.rbi b/rbi/openai/models/chat/chat_completion_function_tool.rbi new file mode 100644 index 00000000..b8fa3e81 --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_function_tool.rbi @@ -0,0 +1,51 @@ +# typed: strong + +module OpenAI + module Models + ChatCompletionFunctionTool = Chat::ChatCompletionFunctionTool + + module Chat + class ChatCompletionFunctionTool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionFunctionTool, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(OpenAI::FunctionDefinition) } + attr_reader :function + + sig { params(function: OpenAI::FunctionDefinition::OrHash).void } + attr_writer :function + + # The type of the tool. Currently, only `function` is supported. + sig { returns(Symbol) } + attr_accessor :type + + # A function tool that can be used to generate a response. + sig do + params( + function: OpenAI::FunctionDefinition::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + function:, + # The type of the tool. Currently, only `function` is supported. + type: :function + ) + end + + sig do + override.returns( + { function: OpenAI::FunctionDefinition, type: Symbol } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_message.rbi b/rbi/openai/models/chat/chat_completion_message.rbi index 85e74838..2f30fe03 100644 --- a/rbi/openai/models/chat/chat_completion_message.rbi +++ b/rbi/openai/models/chat/chat_completion_message.rbi @@ -74,7 +74,9 @@ module OpenAI # The tool calls generated by the model, such as function calls. sig do returns( - T.nilable(T::Array[OpenAI::Chat::ChatCompletionMessageToolCall]) + T.nilable( + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::Variants] + ) ) end attr_reader :tool_calls @@ -82,7 +84,12 @@ module OpenAI sig do params( tool_calls: - T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash] + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::OrHash, + OpenAI::Chat::ChatCompletionMessageCustomToolCall::OrHash + ) + ] ).void end attr_writer :tool_calls @@ -98,7 +105,12 @@ module OpenAI function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall::OrHash, tool_calls: - T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::OrHash], + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::OrHash, + OpenAI::Chat::ChatCompletionMessageCustomToolCall::OrHash + ) + ], role: Symbol ).returns(T.attached_class) end @@ -134,7 +146,8 @@ module OpenAI T::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], audio: T.nilable(OpenAI::Chat::ChatCompletionAudio), function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, - tool_calls: T::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + tool_calls: + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::Variants] } ) end diff --git a/rbi/openai/models/chat/chat_completion_message_custom_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_custom_tool_call.rbi new file mode 100644 index 00000000..77a50d60 --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_message_custom_tool_call.rbi @@ -0,0 +1,105 @@ +# typed: strong + +module OpenAI + module Models + ChatCompletionMessageCustomToolCall = + Chat::ChatCompletionMessageCustomToolCall + + module Chat + class ChatCompletionMessageCustomToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessageCustomToolCall, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # The custom tool that the model called. + sig do + returns(OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom) + end + attr_reader :custom + + sig do + params( + custom: + OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom::OrHash + ).void + end + attr_writer :custom + + # The type of the tool. Always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # A call to a custom tool created by the model. + sig do + params( + id: String, + custom: + OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the tool call. + id:, + # The custom tool that the model called. + custom:, + # The type of the tool. Always `custom`. + type: :custom + ) + end + + sig do + override.returns( + { + id: String, + custom: OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom, + type: Symbol + } + ) + end + def to_hash + end + + class Custom < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom, + OpenAI::Internal::AnyHash + ) + end + + # The input for the custom tool call generated by the model. + sig { returns(String) } + attr_accessor :input + + # The name of the custom tool to call. + sig { returns(String) } + attr_accessor :name + + # The custom tool that the model called. + sig { params(input: String, name: String).returns(T.attached_class) } + def self.new( + # The input for the custom tool call generated by the model. + input:, + # The name of the custom tool to call. + name: + ) + end + + sig { override.returns({ input: String, name: String }) } + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_message_function_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_function_tool_call.rbi new file mode 100644 index 00000000..a6d11892 --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_message_function_tool_call.rbi @@ -0,0 +1,114 @@ +# typed: strong + +module OpenAI + module Models + ChatCompletionMessageFunctionToolCall = + Chat::ChatCompletionMessageFunctionToolCall + + module Chat + class ChatCompletionMessageFunctionToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # The function that the model called. + sig do + returns(OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function) + end + attr_reader :function + + sig do + params( + function: + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function::OrHash + ).void + end + attr_writer :function + + # The type of the tool. Currently, only `function` is supported. + sig { returns(Symbol) } + attr_accessor :type + + # A call to a function tool created by the model. + sig do + params( + id: String, + function: + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the tool call. + id:, + # The function that the model called. + function:, + # The type of the tool. Currently, only `function` is supported. + type: :function + ) + end + + sig do + override.returns( + { + id: String, + function: + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function, + type: Symbol + } + ) + end + def to_hash + end + + class Function < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function, + OpenAI::Internal::AnyHash + ) + end + + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + sig { returns(String) } + attr_accessor :arguments + + # The name of the function to call. + sig { returns(String) } + attr_accessor :name + + # The function that the model called. + sig do + params(arguments: String, name: String).returns(T.attached_class) + end + def self.new( + # The arguments to call the function with, as generated by the model in JSON + # format. Note that the model does not always generate valid JSON, and may + # hallucinate parameters not defined by your function schema. Validate the + # arguments in your code before calling your function. + arguments:, + # The name of the function to call. + name: + ) + end + + sig { override.returns({ arguments: String, name: String }) } + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi index 0e512346..6c99f830 100644 --- a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi @@ -5,103 +5,24 @@ module OpenAI ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall module Chat - class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel - OrHash = + # A call to a function tool created by the model. + module ChatCompletionMessageToolCall + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias do T.any( - OpenAI::Chat::ChatCompletionMessageToolCall, - OpenAI::Internal::AnyHash + OpenAI::Chat::ChatCompletionMessageFunctionToolCall, + OpenAI::Chat::ChatCompletionMessageCustomToolCall ) end - # The ID of the tool call. - sig { returns(String) } - attr_accessor :id - - # The function that the model called. - sig { returns(OpenAI::Chat::ChatCompletionMessageToolCall::Function) } - attr_reader :function - - sig do - params( - function: - OpenAI::Chat::ChatCompletionMessageToolCall::Function::OrHash - ).void - end - attr_writer :function - - # The type of the tool. Currently, only `function` is supported. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - id: String, - function: - OpenAI::Chat::ChatCompletionMessageToolCall::Function::OrHash, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The ID of the tool call. - id:, - # The function that the model called. - function:, - # The type of the tool. Currently, only `function` is supported. - type: :function - ) - end - sig do override.returns( - { - id: String, - function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, - type: Symbol - } + T::Array[OpenAI::Chat::ChatCompletionMessageToolCall::Variants] ) end - def to_hash - end - - class Function < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionMessageToolCall::Function, - OpenAI::Internal::AnyHash - ) - end - - # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. - sig { returns(String) } - attr_accessor :arguments - - # The name of the function to call. - sig { returns(String) } - attr_accessor :name - - # The function that the model called. - sig do - params(arguments: String, name: String).returns(T.attached_class) - end - def self.new( - # The arguments to call the function with, as generated by the model in JSON - # format. Note that the model does not always generate valid JSON, and may - # hallucinate parameters not defined by your function schema. Validate the - # arguments in your code before calling your function. - arguments:, - # The name of the function to call. - name: - ) - end - - sig { override.returns({ arguments: String, name: String }) } - def to_hash - end + def self.variants end end end diff --git a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi index daed342c..f85abf64 100644 --- a/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi +++ b/rbi/openai/models/chat/chat_completion_named_tool_choice.rbi @@ -25,7 +25,7 @@ module OpenAI end attr_writer :function - # The type of the tool. Currently, only `function` is supported. + # For function calling, the type is always `function`. sig { returns(Symbol) } attr_accessor :type @@ -40,7 +40,7 @@ module OpenAI end def self.new( function:, - # The type of the tool. Currently, only `function` is supported. + # For function calling, the type is always `function`. type: :function ) end diff --git a/rbi/openai/models/chat/chat_completion_named_tool_choice_custom.rbi b/rbi/openai/models/chat/chat_completion_named_tool_choice_custom.rbi new file mode 100644 index 00000000..0c4ba4f2 --- /dev/null +++ b/rbi/openai/models/chat/chat_completion_named_tool_choice_custom.rbi @@ -0,0 +1,89 @@ +# typed: strong + +module OpenAI + module Models + ChatCompletionNamedToolChoiceCustom = + Chat::ChatCompletionNamedToolChoiceCustom + + module Chat + class ChatCompletionNamedToolChoiceCustom < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom, + OpenAI::Internal::AnyHash + ) + end + + sig do + returns(OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom) + end + attr_reader :custom + + sig do + params( + custom: + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom::OrHash + ).void + end + attr_writer :custom + + # For custom tool calling, the type is always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # Specifies a tool the model should use. Use to force the model to call a specific + # custom tool. + sig do + params( + custom: + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + custom:, + # For custom tool calling, the type is always `custom`. + type: :custom + ) + end + + sig do + override.returns( + { + custom: OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom, + type: Symbol + } + ) + end + def to_hash + end + + class Custom < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom, + OpenAI::Internal::AnyHash + ) + end + + # The name of the custom tool to call. + sig { returns(String) } + attr_accessor :name + + sig { params(name: String).returns(T.attached_class) } + def self.new( + # The name of the custom tool to call. + name: + ) + end + + sig { override.returns({ name: String }) } + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/chat/chat_completion_stream_options.rbi b/rbi/openai/models/chat/chat_completion_stream_options.rbi index 7061b1ff..e970e19e 100644 --- a/rbi/openai/models/chat/chat_completion_stream_options.rbi +++ b/rbi/openai/models/chat/chat_completion_stream_options.rbi @@ -14,6 +14,18 @@ module OpenAI ) end + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :include_obfuscation + + sig { params(include_obfuscation: T::Boolean).void } + attr_writer :include_obfuscation + # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire # request, and the `choices` field will always be an empty array. @@ -28,8 +40,20 @@ module OpenAI attr_writer :include_usage # Options for streaming response. Only set this when you set `stream: true`. - sig { params(include_usage: T::Boolean).returns(T.attached_class) } + sig do + params( + include_obfuscation: T::Boolean, + include_usage: T::Boolean + ).returns(T.attached_class) + end def self.new( + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + include_obfuscation: nil, # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire # request, and the `choices` field will always be an empty array. @@ -41,7 +65,11 @@ module OpenAI ) end - sig { override.returns({ include_usage: T::Boolean }) } + sig do + override.returns( + { include_obfuscation: T::Boolean, include_usage: T::Boolean } + ) + end def to_hash end end diff --git a/rbi/openai/models/chat/chat_completion_tool.rbi b/rbi/openai/models/chat/chat_completion_tool.rbi index 0fcacb06..4f687406 100644 --- a/rbi/openai/models/chat/chat_completion_tool.rbi +++ b/rbi/openai/models/chat/chat_completion_tool.rbi @@ -5,41 +5,22 @@ module OpenAI ChatCompletionTool = Chat::ChatCompletionTool module Chat - class ChatCompletionTool < OpenAI::Internal::Type::BaseModel - OrHash = + # A function tool that can be used to generate a response. + module ChatCompletionTool + extend OpenAI::Internal::Type::Union + + Variants = T.type_alias do - T.any(OpenAI::Chat::ChatCompletionTool, OpenAI::Internal::AnyHash) + T.any( + OpenAI::Chat::ChatCompletionFunctionTool, + OpenAI::Chat::ChatCompletionCustomTool + ) end - sig { returns(OpenAI::FunctionDefinition) } - attr_reader :function - - sig { params(function: OpenAI::FunctionDefinition::OrHash).void } - attr_writer :function - - # The type of the tool. Currently, only `function` is supported. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - function: OpenAI::FunctionDefinition::OrHash, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - function:, - # The type of the tool. Currently, only `function` is supported. - type: :function - ) - end - sig do - override.returns( - { function: OpenAI::FunctionDefinition, type: Symbol } - ) + override.returns(T::Array[OpenAI::Chat::ChatCompletionTool::Variants]) end - def to_hash + def self.variants end end end diff --git a/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi index 5363a308..df0f67a7 100644 --- a/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -21,7 +21,9 @@ module OpenAI T.type_alias do T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice + OpenAI::Chat::ChatCompletionAllowedToolChoice, + OpenAI::Chat::ChatCompletionNamedToolChoice, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom ) end diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 3e6fc91b..4af9f379 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -225,12 +225,11 @@ module OpenAI sig { params(prompt_cache_key: String).void } attr_writer :prompt_cache_key - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :reasoning_effort @@ -366,7 +365,9 @@ module OpenAI T.nilable( T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice + OpenAI::Chat::ChatCompletionAllowedToolChoice, + OpenAI::Chat::ChatCompletionNamedToolChoice, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom ) ) ) @@ -378,20 +379,41 @@ module OpenAI tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + OpenAI::Chat::ChatCompletionAllowedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::OrHash ) ).void end attr_writer :tool_choice - # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. - sig { returns(T.nilable(T::Array[OpenAI::Chat::ChatCompletionTool])) } + # A list of tools the model may call. You can provide either + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools) + # or [function tools](https://platform.openai.com/docs/guides/function-calling). + sig do + returns( + T.nilable( + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionFunctionTool, + OpenAI::Chat::ChatCompletionCustomTool + ) + ] + ) + ) + end attr_reader :tools sig do - params(tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash]).void + params( + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionFunctionTool::OrHash, + OpenAI::Chat::ChatCompletionCustomTool::OrHash + ) + ] + ).void end attr_writer :tools @@ -420,6 +442,16 @@ module OpenAI sig { params(user: String).void } attr_writer :user + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable(OpenAI::Chat::CompletionCreateParams::Verbosity::OrSymbol) + ) + end + attr_accessor :verbosity + # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). @@ -500,12 +532,24 @@ module OpenAI tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + OpenAI::Chat::ChatCompletionAllowedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::OrHash ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionFunctionTool::OrHash, + OpenAI::Chat::ChatCompletionCustomTool::OrHash + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, + verbosity: + T.nilable( + OpenAI::Chat::CompletionCreateParams::Verbosity::OrSymbol + ), web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash, request_options: OpenAI::RequestOptions::OrHash @@ -614,12 +658,11 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # An object specifying the format that the model must output. # @@ -690,9 +733,9 @@ module OpenAI # `none` is the default when no tools are present. `auto` is the default if tools # are present. tool_choice: nil, - # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # A list of tools the model may call. You can provide either + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools) + # or [function tools](https://platform.openai.com/docs/guides/function-calling). tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -710,6 +753,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). @@ -781,12 +828,24 @@ module OpenAI tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice + OpenAI::Chat::ChatCompletionAllowedToolChoice, + OpenAI::Chat::ChatCompletionNamedToolChoice, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionFunctionTool, + OpenAI::Chat::ChatCompletionCustomTool + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, + verbosity: + T.nilable( + OpenAI::Chat::CompletionCreateParams::Verbosity::OrSymbol + ), web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, request_options: OpenAI::RequestOptions @@ -1113,6 +1172,45 @@ module OpenAI ) end + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Chat::CompletionCreateParams::Verbosity) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Chat::CompletionCreateParams::Verbosity::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Chat::CompletionCreateParams::Verbosity::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Chat::CompletionCreateParams::Verbosity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Verbosity::TaggedSymbol + ] + ) + end + def self.values + end + end + class WebSearchOptions < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/chat_model.rbi b/rbi/openai/models/chat_model.rbi index 26dc28c7..7ba1f29a 100644 --- a/rbi/openai/models/chat_model.rbi +++ b/rbi/openai/models/chat_model.rbi @@ -8,6 +8,17 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ChatModel) } OrSymbol = T.type_alias { T.any(Symbol, String) } + GPT_5 = T.let(:"gpt-5", OpenAI::ChatModel::TaggedSymbol) + GPT_5_MINI = T.let(:"gpt-5-mini", OpenAI::ChatModel::TaggedSymbol) + GPT_5_NANO = T.let(:"gpt-5-nano", OpenAI::ChatModel::TaggedSymbol) + GPT_5_2025_08_07 = + T.let(:"gpt-5-2025-08-07", OpenAI::ChatModel::TaggedSymbol) + GPT_5_MINI_2025_08_07 = + T.let(:"gpt-5-mini-2025-08-07", OpenAI::ChatModel::TaggedSymbol) + GPT_5_NANO_2025_08_07 = + T.let(:"gpt-5-nano-2025-08-07", OpenAI::ChatModel::TaggedSymbol) + GPT_5_CHAT_LATEST = + T.let(:"gpt-5-chat-latest", OpenAI::ChatModel::TaggedSymbol) GPT_4_1 = T.let(:"gpt-4.1", OpenAI::ChatModel::TaggedSymbol) GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::ChatModel::TaggedSymbol) GPT_4_1_NANO = T.let(:"gpt-4.1-nano", OpenAI::ChatModel::TaggedSymbol) diff --git a/rbi/openai/models/custom_tool_input_format.rbi b/rbi/openai/models/custom_tool_input_format.rbi new file mode 100644 index 00000000..d2f57068 --- /dev/null +++ b/rbi/openai/models/custom_tool_input_format.rbi @@ -0,0 +1,136 @@ +# typed: strong + +module OpenAI + module Models + # The input format for the custom tool. Default is unconstrained text. + module CustomToolInputFormat + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::CustomToolInputFormat::Text, + OpenAI::CustomToolInputFormat::Grammar + ) + end + + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::CustomToolInputFormat::Text, + OpenAI::Internal::AnyHash + ) + end + + # Unconstrained text format. Always `text`. + sig { returns(Symbol) } + attr_accessor :type + + # Unconstrained free-form text. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # Unconstrained text format. Always `text`. + type: :text + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + + class Grammar < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::CustomToolInputFormat::Grammar, + OpenAI::Internal::AnyHash + ) + end + + # The grammar definition. + sig { returns(String) } + attr_accessor :definition + + # The syntax of the grammar definition. One of `lark` or `regex`. + sig do + returns(OpenAI::CustomToolInputFormat::Grammar::Syntax::OrSymbol) + end + attr_accessor :syntax + + # Grammar format. Always `grammar`. + sig { returns(Symbol) } + attr_accessor :type + + # A grammar defined by the user. + sig do + params( + definition: String, + syntax: OpenAI::CustomToolInputFormat::Grammar::Syntax::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The grammar definition. + definition:, + # The syntax of the grammar definition. One of `lark` or `regex`. + syntax:, + # Grammar format. Always `grammar`. + type: :grammar + ) + end + + sig do + override.returns( + { + definition: String, + syntax: OpenAI::CustomToolInputFormat::Grammar::Syntax::OrSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The syntax of the grammar definition. One of `lark` or `regex`. + module Syntax + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::CustomToolInputFormat::Grammar::Syntax) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LARK = + T.let( + :lark, + OpenAI::CustomToolInputFormat::Grammar::Syntax::TaggedSymbol + ) + REGEX = + T.let( + :regex, + OpenAI::CustomToolInputFormat::Grammar::Syntax::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::CustomToolInputFormat::Grammar::Syntax::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns(T::Array[OpenAI::CustomToolInputFormat::Variants]) + end + def self.variants + end + end + end +end diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index d929b514..79dee373 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -935,12 +935,16 @@ module OpenAI # A list of tools the model may call. Currently, only functions are supported as a # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. - sig { returns(T.nilable(T::Array[OpenAI::Chat::ChatCompletionTool])) } + sig do + returns( + T.nilable(T::Array[OpenAI::Chat::ChatCompletionFunctionTool]) + ) + end attr_reader :tools sig do params( - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash] + tools: T::Array[OpenAI::Chat::ChatCompletionFunctionTool::OrHash] ).void end attr_writer :tools @@ -963,7 +967,7 @@ module OpenAI ), seed: Integer, temperature: Float, - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], + tools: T::Array[OpenAI::Chat::ChatCompletionFunctionTool::OrHash], top_p: Float ).returns(T.attached_class) end @@ -1006,7 +1010,7 @@ module OpenAI ), seed: Integer, temperature: Float, - tools: T::Array[OpenAI::Chat::ChatCompletionTool], + tools: T::Array[OpenAI::Chat::ChatCompletionFunctionTool], top_p: Float } ) diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index d9c6617c..13f94289 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -1174,6 +1174,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1205,6 +1206,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index c4577765..e7ea35ca 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -1125,6 +1125,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::CustomTool, OpenAI::Responses::WebSearchTool ) ] @@ -1145,6 +1146,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1176,6 +1178,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -1233,6 +1236,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::CustomTool, OpenAI::Responses::WebSearchTool ) ], diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index 7126ad47..bf8ed611 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -1174,6 +1174,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1205,6 +1206,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index 28b81204..ab3d4305 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -1170,6 +1170,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1201,6 +1202,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 306630c7..8797d797 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -1176,6 +1176,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1207,6 +1208,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/reasoning.rbi b/rbi/openai/models/reasoning.rbi index dc89cb60..5243471d 100644 --- a/rbi/openai/models/reasoning.rbi +++ b/rbi/openai/models/reasoning.rbi @@ -6,12 +6,11 @@ module OpenAI OrHash = T.type_alias { T.any(OpenAI::Reasoning, OpenAI::Internal::AnyHash) } - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } attr_accessor :effort @@ -42,12 +41,11 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. effort: nil, # **Deprecated:** use `summary` instead. # diff --git a/rbi/openai/models/reasoning_effort.rbi b/rbi/openai/models/reasoning_effort.rbi index 30ff7a5f..fb0629b1 100644 --- a/rbi/openai/models/reasoning_effort.rbi +++ b/rbi/openai/models/reasoning_effort.rbi @@ -2,18 +2,18 @@ module OpenAI module Models - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. module ReasoningEffort extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::ReasoningEffort) } OrSymbol = T.type_alias { T.any(Symbol, String) } + MINIMAL = T.let(:minimal, OpenAI::ReasoningEffort::TaggedSymbol) LOW = T.let(:low, OpenAI::ReasoningEffort::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::ReasoningEffort::TaggedSymbol) HIGH = T.let(:high, OpenAI::ReasoningEffort::TaggedSymbol) diff --git a/rbi/openai/models/response_format_text_grammar.rbi b/rbi/openai/models/response_format_text_grammar.rbi new file mode 100644 index 00000000..771688ea --- /dev/null +++ b/rbi/openai/models/response_format_text_grammar.rbi @@ -0,0 +1,35 @@ +# typed: strong + +module OpenAI + module Models + class ResponseFormatTextGrammar < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::ResponseFormatTextGrammar, OpenAI::Internal::AnyHash) + end + + # The custom grammar for the model to follow. + sig { returns(String) } + attr_accessor :grammar + + # The type of response format being defined. Always `grammar`. + sig { returns(Symbol) } + attr_accessor :type + + # A custom grammar for the model to follow when generating text. Learn more in the + # [custom grammars guide](https://platform.openai.com/docs/guides/custom-grammars). + sig { params(grammar: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The custom grammar for the model to follow. + grammar:, + # The type of response format being defined. Always `grammar`. + type: :grammar + ) + end + + sig { override.returns({ grammar: String, type: Symbol }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/response_format_text_python.rbi b/rbi/openai/models/response_format_text_python.rbi new file mode 100644 index 00000000..cc36114b --- /dev/null +++ b/rbi/openai/models/response_format_text_python.rbi @@ -0,0 +1,30 @@ +# typed: strong + +module OpenAI + module Models + class ResponseFormatTextPython < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::ResponseFormatTextPython, OpenAI::Internal::AnyHash) + end + + # The type of response format being defined. Always `python`. + sig { returns(Symbol) } + attr_accessor :type + + # Configure the model to generate valid Python code. See the + # [custom grammars guide](https://platform.openai.com/docs/guides/custom-grammars) + # for more details. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of response format being defined. Always `python`. + type: :python + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/responses/custom_tool.rbi b/rbi/openai/models/responses/custom_tool.rbi new file mode 100644 index 00000000..066ca268 --- /dev/null +++ b/rbi/openai/models/responses/custom_tool.rbi @@ -0,0 +1,96 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class CustomTool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Responses::CustomTool, OpenAI::Internal::AnyHash) + end + + # The name of the custom tool, used to identify it in tool calls. + sig { returns(String) } + attr_accessor :name + + # The type of the custom tool. Always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional description of the custom tool, used to provide more context. + sig { returns(T.nilable(String)) } + attr_reader :description + + sig { params(description: String).void } + attr_writer :description + + # The input format for the custom tool. Default is unconstrained text. + sig do + returns( + T.nilable( + T.any( + OpenAI::CustomToolInputFormat::Text, + OpenAI::CustomToolInputFormat::Grammar + ) + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::CustomToolInputFormat::Text::OrHash, + OpenAI::CustomToolInputFormat::Grammar::OrHash + ) + ).void + end + attr_writer :format_ + + # A custom tool that processes input using a specified format. Learn more about + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). + sig do + params( + name: String, + description: String, + format_: + T.any( + OpenAI::CustomToolInputFormat::Text::OrHash, + OpenAI::CustomToolInputFormat::Grammar::OrHash + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The name of the custom tool, used to identify it in tool calls. + name:, + # Optional description of the custom tool, used to provide more context. + description: nil, + # The input format for the custom tool. Default is unconstrained text. + format_: nil, + # The type of the custom tool. Always `custom`. + type: :custom + ) + end + + sig do + override.returns( + { + name: String, + type: Symbol, + description: String, + format_: + T.any( + OpenAI::CustomToolInputFormat::Text, + OpenAI::CustomToolInputFormat::Grammar + ) + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 3cdfaa54..022f201d 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -116,8 +116,10 @@ module OpenAI # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. sig { returns(T::Array[OpenAI::Responses::Tool::Variants]) } attr_accessor :tools @@ -278,6 +280,16 @@ module OpenAI sig { params(user: String).void } attr_writer :user + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable(OpenAI::Responses::Response::Verbosity::TaggedSymbol) + ) + end + attr_accessor :verbosity + sig do params( id: String, @@ -308,7 +320,8 @@ module OpenAI OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, - OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash ) ], parallel_tool_calls: T::Boolean, @@ -316,9 +329,11 @@ module OpenAI tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed::OrHash, OpenAI::Responses::ToolChoiceTypes::OrHash, OpenAI::Responses::ToolChoiceFunction::OrHash, - OpenAI::Responses::ToolChoiceMcp::OrHash + OpenAI::Responses::ToolChoiceMcp::OrHash, + OpenAI::Responses::ToolChoiceCustom::OrHash ), tools: T::Array[ @@ -330,6 +345,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -351,6 +367,8 @@ module OpenAI T.nilable(OpenAI::Responses::Response::Truncation::OrSymbol), usage: OpenAI::Responses::ResponseUsage::OrHash, user: String, + verbosity: + T.nilable(OpenAI::Responses::Response::Verbosity::OrSymbol), object: Symbol ).returns(T.attached_class) end @@ -413,8 +431,10 @@ module OpenAI # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. tools:, # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -503,6 +523,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, # The object type of this resource - always set to `response`. object: :response ) @@ -547,7 +571,9 @@ module OpenAI OpenAI::Responses::Response::Truncation::TaggedSymbol ), usage: OpenAI::Responses::ResponseUsage, - user: String + user: String, + verbosity: + T.nilable(OpenAI::Responses::Response::Verbosity::TaggedSymbol) } ) end @@ -684,9 +710,11 @@ module OpenAI T.type_alias do T.any( OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceAllowed, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, - OpenAI::Responses::ToolChoiceMcp + OpenAI::Responses::ToolChoiceMcp, + OpenAI::Responses::ToolChoiceCustom ) end @@ -786,6 +814,34 @@ module OpenAI def self.values end end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Response::Verbosity) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let(:low, OpenAI::Responses::Response::Verbosity::TaggedSymbol) + MEDIUM = + T.let(:medium, OpenAI::Responses::Response::Verbosity::TaggedSymbol) + HIGH = + T.let(:high, OpenAI::Responses::Response::Verbosity::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::Responses::Response::Verbosity::TaggedSymbol] + ) + end + def self.values + end + end end end end diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 00498f46..1c912e64 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -208,6 +208,24 @@ module OpenAI sig { returns(T.nilable(T::Boolean)) } attr_accessor :store + # Options for streaming responses. Only set this when you set `stream: true`. + sig do + returns( + T.nilable(OpenAI::Responses::ResponseCreateParams::StreamOptions) + ) + end + attr_reader :stream_options + + sig do + params( + stream_options: + T.nilable( + OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash + ) + ).void + end + attr_writer :stream_options + # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but @@ -234,9 +252,11 @@ module OpenAI T.nilable( T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, - OpenAI::Responses::ToolChoiceMcp + OpenAI::Responses::ToolChoiceMcp, + OpenAI::Responses::ToolChoiceCustom ) ) ) @@ -248,9 +268,11 @@ module OpenAI tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed::OrHash, OpenAI::Responses::ToolChoiceTypes::OrHash, OpenAI::Responses::ToolChoiceFunction::OrHash, - OpenAI::Responses::ToolChoiceMcp::OrHash + OpenAI::Responses::ToolChoiceMcp::OrHash, + OpenAI::Responses::ToolChoiceCustom::OrHash ) ).void end @@ -268,8 +290,10 @@ module OpenAI # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. sig do returns( T.nilable( @@ -282,6 +306,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::CustomTool, OpenAI::Responses::WebSearchTool ) ] @@ -302,6 +327,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -349,6 +375,18 @@ module OpenAI sig { params(user: String).void } attr_writer :user + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol + ) + ) + end + attr_accessor :verbosity + sig do params( background: T.nilable(T::Boolean), @@ -378,14 +416,20 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol ), store: T.nilable(T::Boolean), + stream_options: + T.nilable( + OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash + ), temperature: T.nilable(Float), text: OpenAI::Responses::ResponseTextConfig::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed::OrHash, OpenAI::Responses::ToolChoiceTypes::OrHash, OpenAI::Responses::ToolChoiceFunction::OrHash, - OpenAI::Responses::ToolChoiceMcp::OrHash + OpenAI::Responses::ToolChoiceMcp::OrHash, + OpenAI::Responses::ToolChoiceCustom::OrHash ), tools: T::Array[ @@ -397,6 +441,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -407,6 +452,10 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, + verbosity: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -513,6 +562,8 @@ module OpenAI service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, + # Options for streaming responses. Only set this when you set `stream: true`. + stream_options: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but @@ -540,8 +591,10 @@ module OpenAI # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -566,6 +619,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, request_options: {} ) end @@ -600,14 +657,20 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol ), store: T.nilable(T::Boolean), + stream_options: + T.nilable( + OpenAI::Responses::ResponseCreateParams::StreamOptions + ), temperature: T.nilable(Float), text: OpenAI::Responses::ResponseTextConfig, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, - OpenAI::Responses::ToolChoiceMcp + OpenAI::Responses::ToolChoiceMcp, + OpenAI::Responses::ToolChoiceCustom ), tools: T::Array[ @@ -619,6 +682,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::CustomTool, OpenAI::Responses::WebSearchTool ) ], @@ -629,6 +693,10 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, + verbosity: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol + ), request_options: OpenAI::RequestOptions } ) @@ -731,6 +799,47 @@ module OpenAI end end + class StreamOptions < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCreateParams::StreamOptions, + OpenAI::Internal::AnyHash + ) + end + + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :include_obfuscation + + sig { params(include_obfuscation: T::Boolean).void } + attr_writer :include_obfuscation + + # Options for streaming responses. Only set this when you set `stream: true`. + sig do + params(include_obfuscation: T::Boolean).returns(T.attached_class) + end + def self.new( + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + include_obfuscation: nil + ) + end + + sig { override.returns({ include_obfuscation: T::Boolean }) } + def to_hash + end + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. @@ -741,9 +850,11 @@ module OpenAI T.type_alias do T.any( OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, + OpenAI::Responses::ToolChoiceAllowed, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, - OpenAI::Responses::ToolChoiceMcp + OpenAI::Responses::ToolChoiceMcp, + OpenAI::Responses::ToolChoiceCustom ) end @@ -795,6 +906,45 @@ module OpenAI def self.values end end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseCreateParams::Verbosity) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol + ] + ) + end + def self.values + end + end end end end diff --git a/rbi/openai/models/responses/response_custom_tool_call.rbi b/rbi/openai/models/responses/response_custom_tool_call.rbi new file mode 100644 index 00000000..89eb7f74 --- /dev/null +++ b/rbi/openai/models/responses/response_custom_tool_call.rbi @@ -0,0 +1,78 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseCustomToolCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCustomToolCall, + OpenAI::Internal::AnyHash + ) + end + + # An identifier used to map this custom tool call to a tool call output. + sig { returns(String) } + attr_accessor :call_id + + # The input for the custom tool call generated by the model. + sig { returns(String) } + attr_accessor :input + + # The name of the custom tool being called. + sig { returns(String) } + attr_accessor :name + + # The type of the custom tool call. Always `custom_tool_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The unique ID of the custom tool call in the OpenAI platform. + sig { returns(T.nilable(String)) } + attr_reader :id + + sig { params(id: String).void } + attr_writer :id + + # A call to a custom tool created by the model. + sig do + params( + call_id: String, + input: String, + name: String, + id: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # An identifier used to map this custom tool call to a tool call output. + call_id:, + # The input for the custom tool call generated by the model. + input:, + # The name of the custom tool being called. + name:, + # The unique ID of the custom tool call in the OpenAI platform. + id: nil, + # The type of the custom tool call. Always `custom_tool_call`. + type: :custom_tool_call + ) + end + + sig do + override.returns( + { + call_id: String, + input: String, + name: String, + type: Symbol, + id: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_custom_tool_call_input_delta_event.rbi b/rbi/openai/models/responses/response_custom_tool_call_input_delta_event.rbi new file mode 100644 index 00000000..dbc175f0 --- /dev/null +++ b/rbi/openai/models/responses/response_custom_tool_call_input_delta_event.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseCustomToolCallInputDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCustomToolCallInputDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The incremental input data (delta) for the custom tool call. + sig { returns(String) } + attr_accessor :delta + + # Unique identifier for the API item associated with this event. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output this delta applies to. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The event type identifier. + sig { returns(Symbol) } + attr_accessor :type + + # Event representing a delta (partial update) to the input of a custom tool call. + sig do + params( + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The incremental input data (delta) for the custom tool call. + delta:, + # Unique identifier for the API item associated with this event. + item_id:, + # The index of the output this delta applies to. + output_index:, + # The sequence number of this event. + sequence_number:, + # The event type identifier. + type: :"response.custom_tool_call_input.delta" + ) + end + + sig do + override.returns( + { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_custom_tool_call_input_done_event.rbi b/rbi/openai/models/responses/response_custom_tool_call_input_done_event.rbi new file mode 100644 index 00000000..cd3c485d --- /dev/null +++ b/rbi/openai/models/responses/response_custom_tool_call_input_done_event.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseCustomToolCallInputDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCustomToolCallInputDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The complete input data for the custom tool call. + sig { returns(String) } + attr_accessor :input + + # Unique identifier for the API item associated with this event. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output this event applies to. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The event type identifier. + sig { returns(Symbol) } + attr_accessor :type + + # Event indicating that input for a custom tool call is complete. + sig do + params( + input: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The complete input data for the custom tool call. + input:, + # Unique identifier for the API item associated with this event. + item_id:, + # The index of the output this event applies to. + output_index:, + # The sequence number of this event. + sequence_number:, + # The event type identifier. + type: :"response.custom_tool_call_input.done" + ) + end + + sig do + override.returns( + { + input: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_custom_tool_call_output.rbi b/rbi/openai/models/responses/response_custom_tool_call_output.rbi new file mode 100644 index 00000000..b18c6a16 --- /dev/null +++ b/rbi/openai/models/responses/response_custom_tool_call_output.rbi @@ -0,0 +1,65 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseCustomToolCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCustomToolCallOutput, + OpenAI::Internal::AnyHash + ) + end + + # The call ID, used to map this custom tool call output to a custom tool call. + sig { returns(String) } + attr_accessor :call_id + + # The output from the custom tool call generated by your code. + sig { returns(String) } + attr_accessor :output + + # The type of the custom tool call output. Always `custom_tool_call_output`. + sig { returns(Symbol) } + attr_accessor :type + + # The unique ID of the custom tool call output in the OpenAI platform. + sig { returns(T.nilable(String)) } + attr_reader :id + + sig { params(id: String).void } + attr_writer :id + + # The output of a custom tool call from your code, being sent back to the model. + sig do + params( + call_id: String, + output: String, + id: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The call ID, used to map this custom tool call output to a custom tool call. + call_id:, + # The output from the custom tool call generated by your code. + output:, + # The unique ID of the custom tool call output in the OpenAI platform. + id: nil, + # The type of the custom tool call output. Always `custom_tool_call_output`. + type: :custom_tool_call_output + ) + end + + sig do + override.returns( + { call_id: String, output: String, type: Symbol, id: String } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi index bbafe9cb..6b28bab2 100644 --- a/rbi/openai/models/responses/response_input_item.rbi +++ b/rbi/openai/models/responses/response_input_item.rbi @@ -32,6 +32,8 @@ module OpenAI OpenAI::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Responses::ResponseCustomToolCallOutput, + OpenAI::Responses::ResponseCustomToolCall, OpenAI::Responses::ResponseInputItem::ItemReference ) end diff --git a/rbi/openai/models/responses/response_output_item.rbi b/rbi/openai/models/responses/response_output_item.rbi index 51542fc3..1f08b218 100644 --- a/rbi/openai/models/responses/response_output_item.rbi +++ b/rbi/openai/models/responses/response_output_item.rbi @@ -21,7 +21,8 @@ module OpenAI OpenAI::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Responses::ResponseOutputItem::McpCall, OpenAI::Responses::ResponseOutputItem::McpListTools, - OpenAI::Responses::ResponseOutputItem::McpApprovalRequest + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest, + OpenAI::Responses::ResponseCustomToolCall ) end diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index 6479c80d..6ac51356 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -44,7 +44,8 @@ module OpenAI OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, - OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash ), output_index: Integer, sequence_number: Integer, diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index 7789f951..ba0ecc6d 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -44,7 +44,8 @@ module OpenAI OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, - OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash ), output_index: Integer, sequence_number: Integer, diff --git a/rbi/openai/models/responses/response_retrieve_params.rbi b/rbi/openai/models/responses/response_retrieve_params.rbi index c25abeb6..f4d1f80c 100644 --- a/rbi/openai/models/responses/response_retrieve_params.rbi +++ b/rbi/openai/models/responses/response_retrieve_params.rbi @@ -31,6 +31,18 @@ module OpenAI end attr_writer :include + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :include_obfuscation + + sig { params(include_obfuscation: T::Boolean).void } + attr_writer :include_obfuscation + # The sequence number of the event after which to start streaming. sig { returns(T.nilable(Integer)) } attr_reader :starting_after @@ -41,6 +53,7 @@ module OpenAI sig do params( include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + include_obfuscation: T::Boolean, starting_after: Integer, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) @@ -49,6 +62,13 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + include_obfuscation: nil, # The sequence number of the event after which to start streaming. starting_after: nil, request_options: {} @@ -60,6 +80,7 @@ module OpenAI { include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + include_obfuscation: T::Boolean, starting_after: Integer, request_options: OpenAI::RequestOptions } diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index e6269fa0..0aba05b2 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -60,7 +60,9 @@ module OpenAI OpenAI::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, - OpenAI::Responses::ResponseQueuedEvent + OpenAI::Responses::ResponseQueuedEvent, + OpenAI::Responses::ResponseCustomToolCallInputDeltaEvent, + OpenAI::Responses::ResponseCustomToolCallInputDoneEvent ) end diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index fa1bac73..b5b5d1b5 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -17,6 +17,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter, OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, + OpenAI::Responses::CustomTool, OpenAI::Responses::WebSearchTool ) end diff --git a/rbi/openai/models/responses/tool_choice_allowed.rbi b/rbi/openai/models/responses/tool_choice_allowed.rbi new file mode 100644 index 00000000..47ee4cca --- /dev/null +++ b/rbi/openai/models/responses/tool_choice_allowed.rbi @@ -0,0 +1,124 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ToolChoiceAllowed < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceAllowed, + OpenAI::Internal::AnyHash + ) + end + + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + sig { returns(OpenAI::Responses::ToolChoiceAllowed::Mode::OrSymbol) } + attr_accessor :mode + + # A list of tool definitions that the model should be allowed to call. + # + # For the Responses API, the list of tool definitions might look like: + # + # ```json + # [ + # { "type": "function", "name": "get_weather" }, + # { "type": "mcp", "server_label": "deepwiki" }, + # { "type": "image_generation" } + # ] + # ``` + sig { returns(T::Array[T::Hash[Symbol, T.anything]]) } + attr_accessor :tools + + # Allowed tool configuration type. Always `allowed_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Constrains the tools available to the model to a pre-defined set. + sig do + params( + mode: OpenAI::Responses::ToolChoiceAllowed::Mode::OrSymbol, + tools: T::Array[T::Hash[Symbol, T.anything]], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + mode:, + # A list of tool definitions that the model should be allowed to call. + # + # For the Responses API, the list of tool definitions might look like: + # + # ```json + # [ + # { "type": "function", "name": "get_weather" }, + # { "type": "mcp", "server_label": "deepwiki" }, + # { "type": "image_generation" } + # ] + # ``` + tools:, + # Allowed tool configuration type. Always `allowed_tools`. + type: :allowed_tools + ) + end + + sig do + override.returns( + { + mode: OpenAI::Responses::ToolChoiceAllowed::Mode::OrSymbol, + tools: T::Array[T::Hash[Symbol, T.anything]], + type: Symbol + } + ) + end + def to_hash + end + + # Constrains the tools available to the model to a pre-defined set. + # + # `auto` allows the model to pick from among the allowed tools and generate a + # message. + # + # `required` requires the model to call one or more of the allowed tools. + module Mode + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ToolChoiceAllowed::Mode) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Responses::ToolChoiceAllowed::Mode::TaggedSymbol + ) + REQUIRED = + T.let( + :required, + OpenAI::Responses::ToolChoiceAllowed::Mode::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::Responses::ToolChoiceAllowed::Mode::TaggedSymbol] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/responses/tool_choice_custom.rbi b/rbi/openai/models/responses/tool_choice_custom.rbi new file mode 100644 index 00000000..d3944c11 --- /dev/null +++ b/rbi/openai/models/responses/tool_choice_custom.rbi @@ -0,0 +1,39 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ToolChoiceCustom < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ToolChoiceCustom, + OpenAI::Internal::AnyHash + ) + end + + # The name of the custom tool to call. + sig { returns(String) } + attr_accessor :name + + # For custom tool calling, the type is always `custom`. + sig { returns(Symbol) } + attr_accessor :type + + # Use this option to force the model to call a specific custom tool. + sig { params(name: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The name of the custom tool to call. + name:, + # For custom tool calling, the type is always `custom`. + type: :custom + ) + end + + sig { override.returns({ name: String, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/resources/beta/assistants.rbi b/rbi/openai/resources/beta/assistants.rbi index d58ecbb1..6489d48e 100644 --- a/rbi/openai/resources/beta/assistants.rbi +++ b/rbi/openai/resources/beta/assistants.rbi @@ -60,12 +60,11 @@ module OpenAI metadata: nil, # The name of the assistant. The maximum length is 256 characters. name: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -188,12 +187,11 @@ module OpenAI model: nil, # The name of the assistant. The maximum length is 256 characters. name: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index ba97e4db..bab46d8b 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -123,12 +123,11 @@ module OpenAI # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. parallel_tool_calls: nil, - # Body param: **o-series models only** - # - # Constrains effort on reasoning for + # Body param: Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Body param: Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -303,12 +302,11 @@ module OpenAI # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. parallel_tool_calls: nil, - # Body param: **o-series models only** - # - # Constrains effort on reasoning for + # Body param: Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # Body param: Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index dcc6e96a..dc064892 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -88,12 +88,24 @@ module OpenAI tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + OpenAI::Chat::ChatCompletionAllowedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::OrHash ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionFunctionTool::OrHash, + OpenAI::Chat::ChatCompletionCustomTool::OrHash + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, + verbosity: + T.nilable( + OpenAI::Chat::CompletionCreateParams::Verbosity::OrSymbol + ), web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash, stream: T.noreturn, @@ -203,12 +215,11 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # An object specifying the format that the model must output. # @@ -279,9 +290,9 @@ module OpenAI # `none` is the default when no tools are present. `auto` is the default if tools # are present. tool_choice: nil, - # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # A list of tools the model may call. You can provide either + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools) + # or [function tools](https://platform.openai.com/docs/guides/function-calling). tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -299,6 +310,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). @@ -391,12 +406,24 @@ module OpenAI tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash + OpenAI::Chat::ChatCompletionAllowedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash, + OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::OrHash ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionFunctionTool::OrHash, + OpenAI::Chat::ChatCompletionCustomTool::OrHash + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, + verbosity: + T.nilable( + OpenAI::Chat::CompletionCreateParams::Verbosity::OrSymbol + ), web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::OrHash, stream: T.noreturn, @@ -506,12 +533,11 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** - # # Constrains effort on reasoning for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). Currently - # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can - # result in faster responses and fewer tokens used on reasoning in a response. + # supported values are `minimal`, `low`, `medium`, and `high`. Reducing reasoning + # effort can result in faster responses and fewer tokens used on reasoning in a + # response. reasoning_effort: nil, # An object specifying the format that the model must output. # @@ -582,9 +608,9 @@ module OpenAI # `none` is the default when no tools are present. `auto` is the default if tools # are present. tool_choice: nil, - # A list of tools the model may call. Currently, only functions are supported as a - # tool. Use this to provide a list of functions the model may generate JSON inputs - # for. A max of 128 functions are supported. + # A list of tools the model may call. You can provide either + # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools) + # or [function tools](https://platform.openai.com/docs/guides/function-calling). tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -602,6 +628,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 576bcc24..fc4749fd 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -48,14 +48,20 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol ), store: T.nilable(T::Boolean), + stream_options: + T.nilable( + OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash + ), temperature: T.nilable(Float), text: OpenAI::Responses::ResponseTextConfig::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed::OrHash, OpenAI::Responses::ToolChoiceTypes::OrHash, OpenAI::Responses::ToolChoiceFunction::OrHash, - OpenAI::Responses::ToolChoiceMcp::OrHash + OpenAI::Responses::ToolChoiceMcp::OrHash, + OpenAI::Responses::ToolChoiceCustom::OrHash ), tools: T::Array[ @@ -67,6 +73,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -77,6 +84,10 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, + verbosity: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Responses::Response) @@ -184,6 +195,8 @@ module OpenAI service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, + # Options for streaming responses. Only set this when you set `stream: true`. + stream_options: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but @@ -211,8 +224,10 @@ module OpenAI # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -237,6 +252,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` # for streaming and non-streaming use cases, respectively. stream: false, @@ -286,14 +305,20 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::ServiceTier::OrSymbol ), store: T.nilable(T::Boolean), + stream_options: + T.nilable( + OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash + ), temperature: T.nilable(Float), text: OpenAI::Responses::ResponseTextConfig::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, + OpenAI::Responses::ToolChoiceAllowed::OrHash, OpenAI::Responses::ToolChoiceTypes::OrHash, OpenAI::Responses::ToolChoiceFunction::OrHash, - OpenAI::Responses::ToolChoiceMcp::OrHash + OpenAI::Responses::ToolChoiceMcp::OrHash, + OpenAI::Responses::ToolChoiceCustom::OrHash ), tools: T::Array[ @@ -305,6 +330,7 @@ module OpenAI OpenAI::Responses::Tool::CodeInterpreter::OrHash, OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, + OpenAI::Responses::CustomTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -315,6 +341,10 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, + verbosity: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( @@ -426,6 +456,8 @@ module OpenAI service_tier: nil, # Whether to store the generated model response for later retrieval via API. store: nil, + # Options for streaming responses. Only set this when you set `stream: true`. + stream_options: nil, # What sampling temperature to use, between 0 and 2. Higher values like 0.8 will # make the output more random, while lower values like 0.2 will make it more # focused and deterministic. We generally recommend altering this or `top_p` but @@ -453,8 +485,10 @@ module OpenAI # Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). # - **Function calls (custom tools)**: Functions that are defined by you, enabling - # the model to call your own code. Learn more about + # the model to call your own code with strongly typed arguments and outputs. + # Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). + # You can also use custom tools to call your own code. tools: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -479,6 +513,10 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` # for streaming and non-streaming use cases, respectively. stream: true, @@ -493,6 +531,7 @@ module OpenAI params( response_id: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + include_obfuscation: T::Boolean, starting_after: Integer, stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash @@ -504,6 +543,13 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + include_obfuscation: nil, # The sequence number of the event after which to start streaming. starting_after: nil, # There is no need to provide `stream:`. Instead, use `#retrieve_streaming` or @@ -520,6 +566,7 @@ module OpenAI params( response_id: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + include_obfuscation: T::Boolean, starting_after: Integer, stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash @@ -535,6 +582,13 @@ module OpenAI # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, + # When true, stream obfuscation will be enabled. Stream obfuscation adds random + # characters to an `obfuscation` field on streaming delta events to normalize + # payload sizes as a mitigation to certain side-channel attacks. These obfuscation + # fields are included by default, but add a small amount of overhead to the data + # stream. You can set `include_obfuscation` to false to optimize for bandwidth if + # you trust the network links between your application and the OpenAI API. + include_obfuscation: nil, # The sequence number of the event after which to start streaming. starting_after: nil, # There is no need to provide `stream:`. Instead, use `#retrieve_streaming` or diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index 1c5e1e9d..67856129 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -53,6 +53,8 @@ module OpenAI class CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse + module CustomToolInputFormat = OpenAI::Models::CustomToolInputFormat + class Embedding = OpenAI::Models::Embedding class EmbeddingCreateParams = OpenAI::Models::EmbeddingCreateParams @@ -167,6 +169,10 @@ module OpenAI class ResponseFormatText = OpenAI::Models::ResponseFormatText + class ResponseFormatTextGrammar = OpenAI::Models::ResponseFormatTextGrammar + + class ResponseFormatTextPython = OpenAI::Models::ResponseFormatTextPython + module Responses = OpenAI::Models::Responses module ResponsesModel = OpenAI::Models::ResponsesModel diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index 6ee6405d..9d1254f3 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -83,6 +83,12 @@ module OpenAI type model = String + | :"gpt-5" + | :"gpt-5-mini" + | :"gpt-5-nano" + | :"gpt-5-2025-08-07" + | :"gpt-5-mini-2025-08-07" + | :"gpt-5-nano-2025-08-07" | :"gpt-4.1" | :"gpt-4.1-mini" | :"gpt-4.1-nano" @@ -125,6 +131,12 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Beta::AssistantUpdateParams::model] + GPT_5: :"gpt-5" + GPT_5_MINI: :"gpt-5-mini" + GPT_5_NANO: :"gpt-5-nano" + GPT_5_2025_08_07: :"gpt-5-2025-08-07" + GPT_5_MINI_2025_08_07: :"gpt-5-mini-2025-08-07" + GPT_5_NANO_2025_08_07: :"gpt-5-nano-2025-08-07" GPT_4_1: :"gpt-4.1" GPT_4_1_MINI: :"gpt-4.1-mini" GPT_4_1_NANO: :"gpt-4.1-nano" diff --git a/sig/openai/models/chat/chat_completion_allowed_tool_choice.rbs b/sig/openai/models/chat/chat_completion_allowed_tool_choice.rbs new file mode 100644 index 00000000..6de6e1c5 --- /dev/null +++ b/sig/openai/models/chat/chat_completion_allowed_tool_choice.rbs @@ -0,0 +1,29 @@ +module OpenAI + module Models + class ChatCompletionAllowedToolChoice = Chat::ChatCompletionAllowedToolChoice + + module Chat + type chat_completion_allowed_tool_choice = + { + allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools, + type: :allowed_tools + } + + class ChatCompletionAllowedToolChoice < OpenAI::Internal::Type::BaseModel + attr_accessor allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools + + attr_accessor type: :allowed_tools + + def initialize: ( + allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools, + ?type: :allowed_tools + ) -> void + + def to_hash: -> { + allowed_tools: OpenAI::Chat::ChatCompletionAllowedTools, + type: :allowed_tools + } + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_allowed_tools.rbs b/sig/openai/models/chat/chat_completion_allowed_tools.rbs new file mode 100644 index 00000000..0744b34f --- /dev/null +++ b/sig/openai/models/chat/chat_completion_allowed_tools.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Models + module Chat + type chat_completion_allowed_tools = + { + mode: OpenAI::Models::Chat::ChatCompletionAllowedTools::mode, + tools: ::Array[::Hash[Symbol, top]] + } + + class ChatCompletionAllowedTools < OpenAI::Internal::Type::BaseModel + attr_accessor mode: OpenAI::Models::Chat::ChatCompletionAllowedTools::mode + + attr_accessor tools: ::Array[::Hash[Symbol, top]] + + def initialize: ( + mode: OpenAI::Models::Chat::ChatCompletionAllowedTools::mode, + tools: ::Array[::Hash[Symbol, top]] + ) -> void + + def to_hash: -> { + mode: OpenAI::Models::Chat::ChatCompletionAllowedTools::mode, + tools: ::Array[::Hash[Symbol, top]] + } + + type mode = :auto | :required + + module Mode + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + REQUIRED: :required + + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAllowedTools::mode] + end + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index afc4b011..0b86be4a 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -11,7 +11,7 @@ module OpenAI function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, name: String, refusal: String?, - tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] } class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel @@ -29,11 +29,11 @@ module OpenAI attr_accessor refusal: String? - attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall]? + attr_reader tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call]? def tool_calls=: ( - ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] - ) -> ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] + ) -> ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] def initialize: ( ?audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, @@ -41,7 +41,7 @@ module OpenAI ?function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, ?name: String, ?refusal: String?, - ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall], + ?tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call], ?role: :assistant ) -> void @@ -52,7 +52,7 @@ module OpenAI function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, name: String, refusal: String?, - tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] } type audio = { id: String } diff --git a/sig/openai/models/chat/chat_completion_custom_tool.rbs b/sig/openai/models/chat/chat_completion_custom_tool.rbs new file mode 100644 index 00000000..ac57cae7 --- /dev/null +++ b/sig/openai/models/chat/chat_completion_custom_tool.rbs @@ -0,0 +1,137 @@ +module OpenAI + module Models + class ChatCompletionCustomTool = Chat::ChatCompletionCustomTool + + module Chat + type chat_completion_custom_tool = + { + custom: OpenAI::Chat::ChatCompletionCustomTool::Custom, + type: :custom + } + + class ChatCompletionCustomTool < OpenAI::Internal::Type::BaseModel + attr_accessor custom: OpenAI::Chat::ChatCompletionCustomTool::Custom + + attr_accessor type: :custom + + def initialize: ( + custom: OpenAI::Chat::ChatCompletionCustomTool::Custom, + ?type: :custom + ) -> void + + def to_hash: -> { + custom: OpenAI::Chat::ChatCompletionCustomTool::Custom, + type: :custom + } + + type custom = + { + name: String, + description: String, + format_: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_ + } + + class Custom < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_reader description: String? + + def description=: (String) -> String + + attr_reader format_: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_? + + def format_=: ( + OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_ + ) -> OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_ + + def initialize: ( + name: String, + ?description: String, + ?format_: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_ + ) -> void + + def to_hash: -> { + name: String, + description: String, + format_: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_ + } + + type format_ = + OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Text + | OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar + + module Format + extend OpenAI::Internal::Type::Union + + type text = { type: :text } + + class Text < OpenAI::Internal::Type::BaseModel + attr_accessor type: :text + + def initialize: (?type: :text) -> void + + def to_hash: -> { type: :text } + end + + type grammar = + { + grammar: OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar, + type: :grammar + } + + class Grammar < OpenAI::Internal::Type::BaseModel + attr_accessor grammar: OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar + + attr_accessor type: :grammar + + def initialize: ( + grammar: OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar, + ?type: :grammar + ) -> void + + def to_hash: -> { + grammar: OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar, + type: :grammar + } + + type grammar = + { + definition: String, + syntax: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::syntax + } + + class Grammar < OpenAI::Internal::Type::BaseModel + attr_accessor definition: String + + attr_accessor syntax: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::syntax + + def initialize: ( + definition: String, + syntax: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::syntax + ) -> void + + def to_hash: -> { + definition: String, + syntax: OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::syntax + } + + type syntax = :lark | :regex + + module Syntax + extend OpenAI::Internal::Type::Enum + + LARK: :lark + REGEX: :regex + + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::syntax] + end + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::format_] + end + end + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_function_tool.rbs b/sig/openai/models/chat/chat_completion_function_tool.rbs new file mode 100644 index 00000000..03d0abce --- /dev/null +++ b/sig/openai/models/chat/chat_completion_function_tool.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + class ChatCompletionFunctionTool = Chat::ChatCompletionFunctionTool + + module Chat + type chat_completion_function_tool = + { function: OpenAI::FunctionDefinition, type: :function } + + class ChatCompletionFunctionTool < OpenAI::Internal::Type::BaseModel + attr_accessor function: OpenAI::FunctionDefinition + + attr_accessor type: :function + + def initialize: ( + function: OpenAI::FunctionDefinition, + ?type: :function + ) -> void + + def to_hash: -> { + function: OpenAI::FunctionDefinition, + type: :function + } + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index 7225d0a0..dbf4b405 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -11,7 +11,7 @@ module OpenAI annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], audio: OpenAI::Chat::ChatCompletionAudio?, function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, - tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] } class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel @@ -35,11 +35,11 @@ module OpenAI OpenAI::Chat::ChatCompletionMessage::FunctionCall ) -> OpenAI::Chat::ChatCompletionMessage::FunctionCall - attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall]? + attr_reader tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call]? def tool_calls=: ( - ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] - ) -> ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] + ) -> ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] def initialize: ( content: String?, @@ -47,7 +47,7 @@ module OpenAI ?annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], ?audio: OpenAI::Chat::ChatCompletionAudio?, ?function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, - ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall], + ?tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call], ?role: :assistant ) -> void @@ -58,7 +58,7 @@ module OpenAI annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], audio: OpenAI::Chat::ChatCompletionAudio?, function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, - tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + tool_calls: ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] } type annotation = diff --git a/sig/openai/models/chat/chat_completion_message_custom_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_custom_tool_call.rbs new file mode 100644 index 00000000..b3852753 --- /dev/null +++ b/sig/openai/models/chat/chat_completion_message_custom_tool_call.rbs @@ -0,0 +1,46 @@ +module OpenAI + module Models + class ChatCompletionMessageCustomToolCall = Chat::ChatCompletionMessageCustomToolCall + + module Chat + type chat_completion_message_custom_tool_call = + { + id: String, + custom: OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom, + type: :custom + } + + class ChatCompletionMessageCustomToolCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor custom: OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom + + attr_accessor type: :custom + + def initialize: ( + id: String, + custom: OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom, + ?type: :custom + ) -> void + + def to_hash: -> { + id: String, + custom: OpenAI::Chat::ChatCompletionMessageCustomToolCall::Custom, + type: :custom + } + + type custom = { input: String, name: String } + + class Custom < OpenAI::Internal::Type::BaseModel + attr_accessor input: String + + attr_accessor name: String + + def initialize: (input: String, name: String) -> void + + def to_hash: -> { input: String, name: String } + end + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_message_function_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_function_tool_call.rbs new file mode 100644 index 00000000..ebd90f7f --- /dev/null +++ b/sig/openai/models/chat/chat_completion_message_function_tool_call.rbs @@ -0,0 +1,46 @@ +module OpenAI + module Models + class ChatCompletionMessageFunctionToolCall = Chat::ChatCompletionMessageFunctionToolCall + + module Chat + type chat_completion_message_function_tool_call = + { + id: String, + function: OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function, + type: :function + } + + class ChatCompletionMessageFunctionToolCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor function: OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function + + attr_accessor type: :function + + def initialize: ( + id: String, + function: OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function, + ?type: :function + ) -> void + + def to_hash: -> { + id: String, + function: OpenAI::Chat::ChatCompletionMessageFunctionToolCall::Function, + type: :function + } + + type function = { arguments: String, name: String } + + class Function < OpenAI::Internal::Type::BaseModel + attr_accessor arguments: String + + attr_accessor name: String + + def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } + end + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index c787ea9c..446c9f1e 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -1,45 +1,16 @@ module OpenAI module Models - class ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall + module ChatCompletionMessageToolCall = Chat::ChatCompletionMessageToolCall module Chat type chat_completion_message_tool_call = - { - id: String, - function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, - type: :function - } + OpenAI::Chat::ChatCompletionMessageFunctionToolCall + | OpenAI::Chat::ChatCompletionMessageCustomToolCall - class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel - attr_accessor id: String + module ChatCompletionMessageToolCall + extend OpenAI::Internal::Type::Union - attr_accessor function: OpenAI::Chat::ChatCompletionMessageToolCall::Function - - attr_accessor type: :function - - def initialize: ( - id: String, - function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, - ?type: :function - ) -> void - - def to_hash: -> { - id: String, - function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, - type: :function - } - - type function = { arguments: String, name: String } - - class Function < OpenAI::Internal::Type::BaseModel - attr_accessor arguments: String - - attr_accessor name: String - - def initialize: (arguments: String, name: String) -> void - - def to_hash: -> { arguments: String, name: String } - end + def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_message_tool_call] end end end diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice_custom.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice_custom.rbs new file mode 100644 index 00000000..ec6ae0e9 --- /dev/null +++ b/sig/openai/models/chat/chat_completion_named_tool_choice_custom.rbs @@ -0,0 +1,39 @@ +module OpenAI + module Models + class ChatCompletionNamedToolChoiceCustom = Chat::ChatCompletionNamedToolChoiceCustom + + module Chat + type chat_completion_named_tool_choice_custom = + { + custom: OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom, + type: :custom + } + + class ChatCompletionNamedToolChoiceCustom < OpenAI::Internal::Type::BaseModel + attr_accessor custom: OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom + + attr_accessor type: :custom + + def initialize: ( + custom: OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom, + ?type: :custom + ) -> void + + def to_hash: -> { + custom: OpenAI::Chat::ChatCompletionNamedToolChoiceCustom::Custom, + type: :custom + } + + type custom = { name: String } + + class Custom < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + def initialize: (name: String) -> void + + def to_hash: -> { name: String } + end + end + end + end +end diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 6905d394..7217a030 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -3,16 +3,24 @@ module OpenAI class ChatCompletionStreamOptions = Chat::ChatCompletionStreamOptions module Chat - type chat_completion_stream_options = { include_usage: bool } + type chat_completion_stream_options = + { include_obfuscation: bool, include_usage: bool } class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel + attr_reader include_obfuscation: bool? + + def include_obfuscation=: (bool) -> bool + attr_reader include_usage: bool? def include_usage=: (bool) -> bool - def initialize: (?include_usage: bool) -> void + def initialize: ( + ?include_obfuscation: bool, + ?include_usage: bool + ) -> void - def to_hash: -> { include_usage: bool } + def to_hash: -> { include_obfuscation: bool, include_usage: bool } end end end diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index 23153c68..34abaf37 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -1,25 +1,16 @@ module OpenAI module Models - class ChatCompletionTool = Chat::ChatCompletionTool + module ChatCompletionTool = Chat::ChatCompletionTool module Chat type chat_completion_tool = - { function: OpenAI::FunctionDefinition, type: :function } + OpenAI::Chat::ChatCompletionFunctionTool + | OpenAI::Chat::ChatCompletionCustomTool - class ChatCompletionTool < OpenAI::Internal::Type::BaseModel - attr_accessor function: OpenAI::FunctionDefinition + module ChatCompletionTool + extend OpenAI::Internal::Type::Union - attr_accessor type: :function - - def initialize: ( - function: OpenAI::FunctionDefinition, - ?type: :function - ) -> void - - def to_hash: -> { - function: OpenAI::FunctionDefinition, - type: :function - } + def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_tool] end end end diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index 523db9a4..383ccb44 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -5,7 +5,9 @@ module OpenAI module Chat type chat_completion_tool_choice_option = OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto + | OpenAI::Chat::ChatCompletionAllowedToolChoice | OpenAI::Chat::ChatCompletionNamedToolChoice + | OpenAI::Chat::ChatCompletionNamedToolChoiceCustom module ChatCompletionToolChoiceOption extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 298d3d31..e02095c4 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -30,10 +30,11 @@ module OpenAI stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - tools: ::Array[OpenAI::Chat::ChatCompletionTool], + tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], top_logprobs: Integer?, top_p: Float?, user: String, + verbosity: OpenAI::Models::Chat::CompletionCreateParams::verbosity?, web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions } & OpenAI::Internal::Type::request_parameters @@ -118,11 +119,11 @@ module OpenAI OpenAI::Models::Chat::chat_completion_tool_choice_option ) -> OpenAI::Models::Chat::chat_completion_tool_choice_option - attr_reader tools: ::Array[OpenAI::Chat::ChatCompletionTool]? + attr_reader tools: ::Array[OpenAI::Models::Chat::chat_completion_tool]? def tools=: ( - ::Array[OpenAI::Chat::ChatCompletionTool] - ) -> ::Array[OpenAI::Chat::ChatCompletionTool] + ::Array[OpenAI::Models::Chat::chat_completion_tool] + ) -> ::Array[OpenAI::Models::Chat::chat_completion_tool] attr_accessor top_logprobs: Integer? @@ -132,6 +133,8 @@ module OpenAI def user=: (String) -> String + attr_accessor verbosity: OpenAI::Models::Chat::CompletionCreateParams::verbosity? + attr_reader web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions? def web_search_options=: ( @@ -166,10 +169,11 @@ module OpenAI ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, ?top_p: Float?, ?user: String, + ?verbosity: OpenAI::Models::Chat::CompletionCreateParams::verbosity?, ?web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts ) -> void @@ -202,10 +206,11 @@ module OpenAI stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - tools: ::Array[OpenAI::Chat::ChatCompletionTool], + tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], top_logprobs: Integer?, top_p: Float?, user: String, + verbosity: OpenAI::Models::Chat::CompletionCreateParams::verbosity?, web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, request_options: OpenAI::RequestOptions } @@ -318,6 +323,18 @@ module OpenAI StringArray: OpenAI::Internal::Type::Converter end + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::verbosity] + end + type web_search_options = { search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index bbe91426..afba9c16 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -1,7 +1,14 @@ module OpenAI module Models type chat_model = - :"gpt-4.1" + :"gpt-5" + | :"gpt-5-mini" + | :"gpt-5-nano" + | :"gpt-5-2025-08-07" + | :"gpt-5-mini-2025-08-07" + | :"gpt-5-nano-2025-08-07" + | :"gpt-5-chat-latest" + | :"gpt-4.1" | :"gpt-4.1-mini" | :"gpt-4.1-nano" | :"gpt-4.1-2025-04-14" @@ -60,6 +67,13 @@ module OpenAI module ChatModel extend OpenAI::Internal::Type::Enum + GPT_5: :"gpt-5" + GPT_5_MINI: :"gpt-5-mini" + GPT_5_NANO: :"gpt-5-nano" + GPT_5_2025_08_07: :"gpt-5-2025-08-07" + GPT_5_MINI_2025_08_07: :"gpt-5-mini-2025-08-07" + GPT_5_NANO_2025_08_07: :"gpt-5-nano-2025-08-07" + GPT_5_CHAT_LATEST: :"gpt-5-chat-latest" GPT_4_1: :"gpt-4.1" GPT_4_1_MINI: :"gpt-4.1-mini" GPT_4_1_NANO: :"gpt-4.1-nano" diff --git a/sig/openai/models/custom_tool_input_format.rbs b/sig/openai/models/custom_tool_input_format.rbs new file mode 100644 index 00000000..b0898e18 --- /dev/null +++ b/sig/openai/models/custom_tool_input_format.rbs @@ -0,0 +1,61 @@ +module OpenAI + module Models + type custom_tool_input_format = + OpenAI::CustomToolInputFormat::Text + | OpenAI::CustomToolInputFormat::Grammar + + module CustomToolInputFormat + extend OpenAI::Internal::Type::Union + + type text = { type: :text } + + class Text < OpenAI::Internal::Type::BaseModel + attr_accessor type: :text + + def initialize: (?type: :text) -> void + + def to_hash: -> { type: :text } + end + + type grammar = + { + definition: String, + syntax: OpenAI::Models::CustomToolInputFormat::Grammar::syntax, + type: :grammar + } + + class Grammar < OpenAI::Internal::Type::BaseModel + attr_accessor definition: String + + attr_accessor syntax: OpenAI::Models::CustomToolInputFormat::Grammar::syntax + + attr_accessor type: :grammar + + def initialize: ( + definition: String, + syntax: OpenAI::Models::CustomToolInputFormat::Grammar::syntax, + ?type: :grammar + ) -> void + + def to_hash: -> { + definition: String, + syntax: OpenAI::Models::CustomToolInputFormat::Grammar::syntax, + type: :grammar + } + + type syntax = :lark | :regex + + module Syntax + extend OpenAI::Internal::Type::Enum + + LARK: :lark + REGEX: :regex + + def self?.values: -> ::Array[OpenAI::Models::CustomToolInputFormat::Grammar::syntax] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::custom_tool_input_format] + end + end +end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index a4e24364..d3806378 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -335,7 +335,7 @@ module OpenAI response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format, seed: Integer, temperature: Float, - tools: ::Array[OpenAI::Chat::ChatCompletionTool], + tools: ::Array[OpenAI::Chat::ChatCompletionFunctionTool], top_p: Float } @@ -358,11 +358,11 @@ module OpenAI def temperature=: (Float) -> Float - attr_reader tools: ::Array[OpenAI::Chat::ChatCompletionTool]? + attr_reader tools: ::Array[OpenAI::Chat::ChatCompletionFunctionTool]? def tools=: ( - ::Array[OpenAI::Chat::ChatCompletionTool] - ) -> ::Array[OpenAI::Chat::ChatCompletionTool] + ::Array[OpenAI::Chat::ChatCompletionFunctionTool] + ) -> ::Array[OpenAI::Chat::ChatCompletionFunctionTool] attr_reader top_p: Float? @@ -373,7 +373,7 @@ module OpenAI ?response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format, ?seed: Integer, ?temperature: Float, - ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Chat::ChatCompletionFunctionTool], ?top_p: Float ) -> void @@ -382,7 +382,7 @@ module OpenAI response_format: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::response_format, seed: Integer, temperature: Float, - tools: ::Array[OpenAI::Chat::ChatCompletionTool], + tools: ::Array[OpenAI::Chat::ChatCompletionFunctionTool], top_p: Float } diff --git a/sig/openai/models/reasoning_effort.rbs b/sig/openai/models/reasoning_effort.rbs index d449d634..2245e639 100644 --- a/sig/openai/models/reasoning_effort.rbs +++ b/sig/openai/models/reasoning_effort.rbs @@ -1,10 +1,11 @@ module OpenAI module Models - type reasoning_effort = :low | :medium | :high + type reasoning_effort = :minimal | :low | :medium | :high module ReasoningEffort extend OpenAI::Internal::Type::Enum + MINIMAL: :minimal LOW: :low MEDIUM: :medium HIGH: :high diff --git a/sig/openai/models/response_format_text_grammar.rbs b/sig/openai/models/response_format_text_grammar.rbs new file mode 100644 index 00000000..5a49c0bd --- /dev/null +++ b/sig/openai/models/response_format_text_grammar.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Models + type response_format_text_grammar = { grammar: String, type: :grammar } + + class ResponseFormatTextGrammar < OpenAI::Internal::Type::BaseModel + attr_accessor grammar: String + + attr_accessor type: :grammar + + def initialize: (grammar: String, ?type: :grammar) -> void + + def to_hash: -> { grammar: String, type: :grammar } + end + end +end diff --git a/sig/openai/models/response_format_text_python.rbs b/sig/openai/models/response_format_text_python.rbs new file mode 100644 index 00000000..ac13e843 --- /dev/null +++ b/sig/openai/models/response_format_text_python.rbs @@ -0,0 +1,13 @@ +module OpenAI + module Models + type response_format_text_python = { type: :python } + + class ResponseFormatTextPython < OpenAI::Internal::Type::BaseModel + attr_accessor type: :python + + def initialize: (?type: :python) -> void + + def to_hash: -> { type: :python } + end + end +end diff --git a/sig/openai/models/responses/custom_tool.rbs b/sig/openai/models/responses/custom_tool.rbs new file mode 100644 index 00000000..6d529cea --- /dev/null +++ b/sig/openai/models/responses/custom_tool.rbs @@ -0,0 +1,43 @@ +module OpenAI + module Models + module Responses + type custom_tool = + { + name: String, + type: :custom, + description: String, + format_: OpenAI::Models::custom_tool_input_format + } + + class CustomTool < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor type: :custom + + attr_reader description: String? + + def description=: (String) -> String + + attr_reader format_: OpenAI::Models::custom_tool_input_format? + + def format_=: ( + OpenAI::Models::custom_tool_input_format + ) -> OpenAI::Models::custom_tool_input_format + + def initialize: ( + name: String, + ?description: String, + ?format_: OpenAI::Models::custom_tool_input_format, + ?type: :custom + ) -> void + + def to_hash: -> { + name: String, + type: :custom, + description: String, + format_: OpenAI::Models::custom_tool_input_format + } + end + end + end +end diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 5e6ab27f..de75fbda 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -31,7 +31,8 @@ module OpenAI top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, - user: String + user: String, + verbosity: OpenAI::Models::Responses::Response::verbosity? } class Response < OpenAI::Internal::Type::BaseModel @@ -111,6 +112,8 @@ module OpenAI def user=: (String) -> String + attr_accessor verbosity: OpenAI::Models::Responses::Response::verbosity? + def initialize: ( id: String, created_at: Float, @@ -140,6 +143,7 @@ module OpenAI ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, ?user: String, + ?verbosity: OpenAI::Models::Responses::Response::verbosity?, ?object: :response ) -> void @@ -172,7 +176,8 @@ module OpenAI top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, - user: String + user: String, + verbosity: OpenAI::Models::Responses::Response::verbosity? } type incomplete_details = @@ -220,9 +225,11 @@ module OpenAI type tool_choice = OpenAI::Models::Responses::tool_choice_options + | OpenAI::Responses::ToolChoiceAllowed | OpenAI::Responses::ToolChoiceTypes | OpenAI::Responses::ToolChoiceFunction | OpenAI::Responses::ToolChoiceMcp + | OpenAI::Responses::ToolChoiceCustom module ToolChoice extend OpenAI::Internal::Type::Union @@ -254,6 +261,18 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] end + + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::verbosity] + end end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index bde15b88..c8150854 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -19,6 +19,7 @@ module OpenAI safety_identifier: String, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, + stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, temperature: Float?, text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, @@ -26,7 +27,8 @@ module OpenAI top_logprobs: Integer?, top_p: Float?, truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - user: String + user: String, + verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity? } & OpenAI::Internal::Type::request_parameters @@ -78,6 +80,8 @@ module OpenAI attr_accessor store: bool? + attr_accessor stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions? + attr_accessor temperature: Float? attr_reader text: OpenAI::Responses::ResponseTextConfig? @@ -108,6 +112,8 @@ module OpenAI def user=: (String) -> String + attr_accessor verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity? + def initialize: ( ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, @@ -125,6 +131,7 @@ module OpenAI ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, + ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, @@ -133,6 +140,7 @@ module OpenAI ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, + ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, ?request_options: OpenAI::request_opts ) -> void @@ -153,6 +161,7 @@ module OpenAI safety_identifier: String, service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, store: bool?, + stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, temperature: Float?, text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, @@ -161,6 +170,7 @@ module OpenAI top_p: Float?, truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, user: String, + verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, request_options: OpenAI::RequestOptions } @@ -186,11 +196,25 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::service_tier] end + type stream_options = { include_obfuscation: bool } + + class StreamOptions < OpenAI::Internal::Type::BaseModel + attr_reader include_obfuscation: bool? + + def include_obfuscation=: (bool) -> bool + + def initialize: (?include_obfuscation: bool) -> void + + def to_hash: -> { include_obfuscation: bool } + end + type tool_choice = OpenAI::Models::Responses::tool_choice_options + | OpenAI::Responses::ToolChoiceAllowed | OpenAI::Responses::ToolChoiceTypes | OpenAI::Responses::ToolChoiceFunction | OpenAI::Responses::ToolChoiceMcp + | OpenAI::Responses::ToolChoiceCustom module ToolChoice extend OpenAI::Internal::Type::Union @@ -208,6 +232,18 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::truncation] end + + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::verbosity] + end end end end diff --git a/sig/openai/models/responses/response_custom_tool_call.rbs b/sig/openai/models/responses/response_custom_tool_call.rbs new file mode 100644 index 00000000..16916b4f --- /dev/null +++ b/sig/openai/models/responses/response_custom_tool_call.rbs @@ -0,0 +1,44 @@ +module OpenAI + module Models + module Responses + type response_custom_tool_call = + { + call_id: String, + input: String, + name: String, + type: :custom_tool_call, + id: String + } + + class ResponseCustomToolCall < OpenAI::Internal::Type::BaseModel + attr_accessor call_id: String + + attr_accessor input: String + + attr_accessor name: String + + attr_accessor type: :custom_tool_call + + attr_reader id: String? + + def id=: (String) -> String + + def initialize: ( + call_id: String, + input: String, + name: String, + ?id: String, + ?type: :custom_tool_call + ) -> void + + def to_hash: -> { + call_id: String, + input: String, + name: String, + type: :custom_tool_call, + id: String + } + end + end + end +end diff --git a/sig/openai/models/responses/response_custom_tool_call_input_delta_event.rbs b/sig/openai/models/responses/response_custom_tool_call_input_delta_event.rbs new file mode 100644 index 00000000..030f7237 --- /dev/null +++ b/sig/openai/models/responses/response_custom_tool_call_input_delta_event.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Responses + type response_custom_tool_call_input_delta_event = + { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.custom_tool_call_input.delta" + } + + class ResponseCustomToolCallInputDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor delta: String + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.custom_tool_call_input.delta" + + def initialize: ( + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.custom_tool_call_input.delta" + ) -> void + + def to_hash: -> { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.custom_tool_call_input.delta" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_custom_tool_call_input_done_event.rbs b/sig/openai/models/responses/response_custom_tool_call_input_done_event.rbs new file mode 100644 index 00000000..2378e7ae --- /dev/null +++ b/sig/openai/models/responses/response_custom_tool_call_input_done_event.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Responses + type response_custom_tool_call_input_done_event = + { + input: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.custom_tool_call_input.done" + } + + class ResponseCustomToolCallInputDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor input: String + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.custom_tool_call_input.done" + + def initialize: ( + input: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.custom_tool_call_input.done" + ) -> void + + def to_hash: -> { + input: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.custom_tool_call_input.done" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_custom_tool_call_output.rbs b/sig/openai/models/responses/response_custom_tool_call_output.rbs new file mode 100644 index 00000000..d9c9486a --- /dev/null +++ b/sig/openai/models/responses/response_custom_tool_call_output.rbs @@ -0,0 +1,39 @@ +module OpenAI + module Models + module Responses + type response_custom_tool_call_output = + { + call_id: String, + output: String, + type: :custom_tool_call_output, + id: String + } + + class ResponseCustomToolCallOutput < OpenAI::Internal::Type::BaseModel + attr_accessor call_id: String + + attr_accessor output: String + + attr_accessor type: :custom_tool_call_output + + attr_reader id: String? + + def id=: (String) -> String + + def initialize: ( + call_id: String, + output: String, + ?id: String, + ?type: :custom_tool_call_output + ) -> void + + def to_hash: -> { + call_id: String, + output: String, + type: :custom_tool_call_output, + id: String + } + end + end + end +end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index 35bf8908..08461d42 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -20,6 +20,8 @@ module OpenAI | OpenAI::Responses::ResponseInputItem::McpApprovalRequest | OpenAI::Responses::ResponseInputItem::McpApprovalResponse | OpenAI::Responses::ResponseInputItem::McpCall + | OpenAI::Responses::ResponseCustomToolCallOutput + | OpenAI::Responses::ResponseCustomToolCall | OpenAI::Responses::ResponseInputItem::ItemReference module ResponseInputItem diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index dc0254bd..13b7bedc 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -14,6 +14,7 @@ module OpenAI | OpenAI::Responses::ResponseOutputItem::McpCall | OpenAI::Responses::ResponseOutputItem::McpListTools | OpenAI::Responses::ResponseOutputItem::McpApprovalRequest + | OpenAI::Responses::ResponseCustomToolCall module ResponseOutputItem extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 56f3ed6a..66b490f5 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -4,6 +4,7 @@ module OpenAI type response_retrieve_params = { include: ::Array[OpenAI::Models::Responses::response_includable], + include_obfuscation: bool, starting_after: Integer } & OpenAI::Internal::Type::request_parameters @@ -18,18 +19,24 @@ module OpenAI ::Array[OpenAI::Models::Responses::response_includable] ) -> ::Array[OpenAI::Models::Responses::response_includable] + attr_reader include_obfuscation: bool? + + def include_obfuscation=: (bool) -> bool + attr_reader starting_after: Integer? def starting_after=: (Integer) -> Integer def initialize: ( ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?include_obfuscation: bool, ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> void def to_hash: -> { include: ::Array[OpenAI::Models::Responses::response_includable], + include_obfuscation: bool, starting_after: Integer, request_options: OpenAI::RequestOptions } diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index 731c96f4..b9a88468 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -53,6 +53,8 @@ module OpenAI | OpenAI::Responses::ResponseMcpListToolsInProgressEvent | OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent | OpenAI::Responses::ResponseQueuedEvent + | OpenAI::Responses::ResponseCustomToolCallInputDeltaEvent + | OpenAI::Responses::ResponseCustomToolCallInputDoneEvent module ResponseStreamEvent extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index fffb117b..71600464 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -9,6 +9,7 @@ module OpenAI | OpenAI::Responses::Tool::CodeInterpreter | OpenAI::Responses::Tool::ImageGeneration | OpenAI::Responses::Tool::LocalShell + | OpenAI::Responses::CustomTool | OpenAI::Responses::WebSearchTool module Tool diff --git a/sig/openai/models/responses/tool_choice_allowed.rbs b/sig/openai/models/responses/tool_choice_allowed.rbs new file mode 100644 index 00000000..add7a8ce --- /dev/null +++ b/sig/openai/models/responses/tool_choice_allowed.rbs @@ -0,0 +1,43 @@ +module OpenAI + module Models + module Responses + type tool_choice_allowed = + { + mode: OpenAI::Models::Responses::ToolChoiceAllowed::mode, + tools: ::Array[::Hash[Symbol, top]], + type: :allowed_tools + } + + class ToolChoiceAllowed < OpenAI::Internal::Type::BaseModel + attr_accessor mode: OpenAI::Models::Responses::ToolChoiceAllowed::mode + + attr_accessor tools: ::Array[::Hash[Symbol, top]] + + attr_accessor type: :allowed_tools + + def initialize: ( + mode: OpenAI::Models::Responses::ToolChoiceAllowed::mode, + tools: ::Array[::Hash[Symbol, top]], + ?type: :allowed_tools + ) -> void + + def to_hash: -> { + mode: OpenAI::Models::Responses::ToolChoiceAllowed::mode, + tools: ::Array[::Hash[Symbol, top]], + type: :allowed_tools + } + + type mode = :auto | :required + + module Mode + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + REQUIRED: :required + + def self?.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceAllowed::mode] + end + end + end + end +end diff --git a/sig/openai/models/responses/tool_choice_custom.rbs b/sig/openai/models/responses/tool_choice_custom.rbs new file mode 100644 index 00000000..9848fb4f --- /dev/null +++ b/sig/openai/models/responses/tool_choice_custom.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Responses + type tool_choice_custom = { name: String, type: :custom } + + class ToolChoiceCustom < OpenAI::Internal::Type::BaseModel + attr_accessor name: String + + attr_accessor type: :custom + + def initialize: (name: String, ?type: :custom) -> void + + def to_hash: -> { name: String, type: :custom } + end + end + end +end diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 79a01c27..a4237ff1 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -32,10 +32,11 @@ module OpenAI ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, ?top_p: Float?, ?user: String, + ?verbosity: OpenAI::Models::Chat::CompletionCreateParams::verbosity?, ?web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts ) -> OpenAI::Chat::ChatCompletion @@ -68,10 +69,11 @@ module OpenAI ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, - ?tools: ::Array[OpenAI::Chat::ChatCompletionTool], + ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, ?top_p: Float?, ?user: String, + ?verbosity: OpenAI::Models::Chat::CompletionCreateParams::verbosity?, ?web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Chat::ChatCompletionChunk] diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index e0d85fee..1c1c2263 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -20,6 +20,7 @@ module OpenAI ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, + ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, @@ -28,6 +29,7 @@ module OpenAI ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, + ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, ?request_options: OpenAI::request_opts ) -> OpenAI::Responses::Response @@ -48,6 +50,7 @@ module OpenAI ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, ?store: bool?, + ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, @@ -56,12 +59,14 @@ module OpenAI ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, + ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Responses::response_stream_event] def retrieve: ( String response_id, ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?include_obfuscation: bool, ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> OpenAI::Responses::Response @@ -69,6 +74,7 @@ module OpenAI def retrieve_streaming: ( String response_id, ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?include_obfuscation: bool, ?starting_after: Integer, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Responses::response_stream_event] diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 85ec0868..8ba8bd2d 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -40,7 +40,7 @@ def test_client_default_request_default_retry_attempts openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") end assert_requested(:any, /./, times: 3) @@ -52,7 +52,7 @@ def test_client_given_request_default_retry_attempts openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 3) assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") end assert_requested(:any, /./, times: 4) @@ -66,7 +66,7 @@ def test_client_default_request_given_retry_attempts assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {max_retries: 3} ) end @@ -82,7 +82,7 @@ def test_client_given_request_given_retry_attempts assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {max_retries: 4} ) end @@ -100,7 +100,7 @@ def test_client_retry_after_seconds openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 1) assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") end assert_requested(:any, /./, times: 2) @@ -118,7 +118,7 @@ def test_client_retry_after_date assert_raises(OpenAI::Errors::InternalServerError) do Thread.current.thread_variable_set(:time_now, Time.now) - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") Thread.current.thread_variable_set(:time_now, nil) end @@ -136,7 +136,7 @@ def test_client_retry_after_ms openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key", max_retries: 1) assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") end assert_requested(:any, /./, times: 2) @@ -149,7 +149,7 @@ def test_retry_count_header openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") assert_raises(OpenAI::Errors::InternalServerError) do - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") end 3.times do @@ -165,7 +165,7 @@ def test_omit_retry_count_header assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {extra_headers: {"x-stainless-retry-count" => nil}} ) end @@ -183,7 +183,7 @@ def test_overwrite_retry_count_header assert_raises(OpenAI::Errors::InternalServerError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {extra_headers: {"x-stainless-retry-count" => "42"}} ) end @@ -207,7 +207,7 @@ def test_client_redirect_307 assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {extra_headers: {}} ) end @@ -240,7 +240,7 @@ def test_client_redirect_303 assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {extra_headers: {}} ) end @@ -268,7 +268,7 @@ def test_client_redirect_auth_keep_same_origin assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {extra_headers: {"authorization" => "Bearer xyz"}} ) end @@ -299,7 +299,7 @@ def test_client_redirect_auth_strip_cross_origin assert_raises(OpenAI::Errors::APIConnectionError) do openai.chat.completions.create( messages: [{content: "string", role: :developer}], - model: :"gpt-4.1", + model: :"gpt-5", request_options: {extra_headers: {"authorization" => "Bearer xyz"}} ) end @@ -315,7 +315,7 @@ def test_default_headers openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") - openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") assert_requested(:any, /./) do |req| headers = req.headers.transform_keys(&:downcase).fetch_values("accept", "content-type") diff --git a/test/openai/resources/beta/assistants_test.rb b/test/openai/resources/beta/assistants_test.rb index ea241550..d10b07fd 100644 --- a/test/openai/resources/beta/assistants_test.rb +++ b/test/openai/resources/beta/assistants_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Beta::AssistantsTest < OpenAI::Test::ResourceTest def test_create_required_params - response = @openai.beta.assistants.create(model: :"gpt-4.1") + response = @openai.beta.assistants.create(model: :"gpt-5") assert_pattern do response => OpenAI::Beta::Assistant diff --git a/test/openai/resources/chat/completions_test.rb b/test/openai/resources/chat/completions_test.rb index 2bb2db4b..e4e742e8 100644 --- a/test/openai/resources/chat/completions_test.rb +++ b/test/openai/resources/chat/completions_test.rb @@ -5,7 +5,7 @@ class OpenAI::Test::Resources::Chat::CompletionsTest < OpenAI::Test::ResourceTest def test_create_required_params response = - @openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-4.1") + @openai.chat.completions.create(messages: [{content: "string", role: :developer}], model: :"gpt-5") assert_pattern do response => OpenAI::Chat::ChatCompletion diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 28d1d7e4..705f33f5 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -40,7 +40,8 @@ def test_create top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, - user: String | nil + user: String | nil, + verbosity: OpenAI::Responses::Response::Verbosity | nil } end end @@ -82,7 +83,8 @@ def test_retrieve top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, - user: String | nil + user: String | nil, + verbosity: OpenAI::Responses::Response::Verbosity | nil } end end @@ -132,7 +134,8 @@ def test_cancel top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, - user: String | nil + user: String | nil, + verbosity: OpenAI::Responses::Response::Verbosity | nil } end end From 6149892d6fd8369fa03eefd8f57e07a206ee6aca Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 7 Aug 2025 18:27:39 +0000 Subject: [PATCH 272/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index c3c07bdb..4ec5fe4a 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f5c45f4ae5c2075cbc603d6910bba3da31c23714c209fbd3fd82a94f634a126b.yml openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: 9a64321968e21ed72f5c0e02164ea00d +config_hash: e53ea2d984c4e05a57eb0227fa379b2b From 2358b621c02583f4cfcab475f3560ea4d749d055 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 7 Aug 2025 19:56:33 +0000 Subject: [PATCH 273/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 4ec5fe4a..72f89899 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f5c45f4ae5c2075cbc603d6910bba3da31c23714c209fbd3fd82a94f634a126b.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d7e255da603b878e7e823135520211ce6a9e02890c9d549bbf3953a877ee5ef3.yml openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: e53ea2d984c4e05a57eb0227fa379b2b +config_hash: f0e0ce47bee61bd779ccaad22930f186 From a263905aba48421989073c1e75d08714f80ab2f2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 7 Aug 2025 20:10:16 +0000 Subject: [PATCH 274/295] codegen metadata --- .stats.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.stats.yml b/.stats.yml index 72f89899..31e04781 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d7e255da603b878e7e823135520211ce6a9e02890c9d549bbf3953a877ee5ef3.yml openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: f0e0ce47bee61bd779ccaad22930f186 +config_hash: 2e7cf948f94e24f94c7d12ba2de2734a From ccca59c049b902917f0af872594278509a8e8d24 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 11:22:56 +0000 Subject: [PATCH 275/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 31e04781..7029d2ad 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-d7e255da603b878e7e823135520211ce6a9e02890c9d549bbf3953a877ee5ef3.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-56d3a72a5caa187aebcf9de169a6a28a9dc3f70a79d7467a03a9e22595936066.yml openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: 2e7cf948f94e24f94c7d12ba2de2734a +config_hash: 7e18239879286d68a48ac5487a649aa6 From ae9a26b82c94a0bf1024596665f2cbd3b8be00d2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 14:54:49 +0000 Subject: [PATCH 276/295] fix(client): fix verbosity parameter location in Responses fixes error with unsupported `verbosity` parameter by correctly placing it inside the `text` parameter --- .stats.yml | 4 +- lib/openai/models/responses/response.rb | 28 +------- .../responses/response_create_params.rb | 26 +------- .../models/responses/response_text_config.rb | 28 +++++++- lib/openai/resources/responses.rb | 8 +-- rbi/openai/models/responses/response.rbi | 48 +------------- .../responses/response_create_params.rbi | 63 ------------------ .../models/responses/response_text_config.rbi | 65 ++++++++++++++++++- rbi/openai/resources/responses.rbi | 16 ----- sig/openai/models/responses/response.rbs | 21 +----- .../responses/response_create_params.rbs | 19 +----- .../models/responses/response_text_config.rbs | 25 ++++++- sig/openai/resources/responses.rbs | 2 - test/openai/resources/responses_test.rb | 9 +-- 14 files changed, 126 insertions(+), 236 deletions(-) diff --git a/.stats.yml b/.stats.yml index 7029d2ad..65629665 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-56d3a72a5caa187aebcf9de169a6a28a9dc3f70a79d7467a03a9e22595936066.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6a1bfd4738fff02ef5becc3fdb2bf0cd6c026f2c924d4147a2a515474477dd9a.yml openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: 7e18239879286d68a48ac5487a649aa6 +config_hash: a67c5e195a59855fe8a5db0dc61a3e7f diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index b5af411f..5fdf354e 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -277,15 +277,7 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!attribute verbosity - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @return [Symbol, OpenAI::Models::Responses::Response::Verbosity, nil] - optional :verbosity, enum: -> { OpenAI::Responses::Response::Verbosity }, nil?: true - - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, verbosity: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Response} for more details. # @@ -345,8 +337,6 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # - # @param verbosity [Symbol, OpenAI::Models::Responses::Response::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - # # @param object [Symbol, :response] The object type of this resource - always set to `response`. # @see OpenAI::Models::Responses::Response#incomplete_details @@ -490,22 +480,6 @@ module Truncation # @!method self.values # @return [Array] end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @see OpenAI::Models::Responses::Response#verbosity - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW = :low - MEDIUM = :medium - HIGH = :high - - # @!method self.values - # @return [Array] - end end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 2d140375..b11ef1f3 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -273,15 +273,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!attribute verbosity - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] - optional :verbosity, enum: -> { OpenAI::Responses::ResponseCreateParams::Verbosity }, nil?: true - - # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, verbosity: nil, request_options: {}) + # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # @@ -335,8 +327,6 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # - # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Text, image, or file inputs to the model, used to generate a response. @@ -467,20 +457,6 @@ module Truncation # @!method self.values # @return [Array] end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW = :low - MEDIUM = :medium - HIGH = :high - - # @!method self.values - # @return [Array] - end end end end diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 9a203e84..7017ea8f 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -22,7 +22,15 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format - # @!method initialize(format_: nil) + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseTextConfig::Verbosity, nil] + optional :verbosity, enum: -> { OpenAI::Responses::ResponseTextConfig::Verbosity }, nil?: true + + # @!method initialize(format_: nil, verbosity: nil) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseTextConfig} for more details. # @@ -33,6 +41,24 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + # + # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseTextConfig::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @see OpenAI::Models::Responses::ResponseTextConfig#verbosity + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end end end end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 8754df72..fd3c429e 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,7 +23,7 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, verbosity: nil, request_options: {}) + # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # @@ -75,8 +75,6 @@ class Responses # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # - # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Responses::Response] @@ -114,7 +112,7 @@ def create(params = {}) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, verbosity: nil, request_options: {}) + # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # @@ -166,8 +164,6 @@ def create(params = {}) # # @param user [String] This field is being replaced by `safety_identifier` and `prompt_cache_key`. Use # - # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Internal::Stream] diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 022f201d..8980d26b 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -280,16 +280,6 @@ module OpenAI sig { params(user: String).void } attr_writer :user - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - sig do - returns( - T.nilable(OpenAI::Responses::Response::Verbosity::TaggedSymbol) - ) - end - attr_accessor :verbosity - sig do params( id: String, @@ -367,8 +357,6 @@ module OpenAI T.nilable(OpenAI::Responses::Response::Truncation::OrSymbol), usage: OpenAI::Responses::ResponseUsage::OrHash, user: String, - verbosity: - T.nilable(OpenAI::Responses::Response::Verbosity::OrSymbol), object: Symbol ).returns(T.attached_class) end @@ -523,10 +511,6 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil, # The object type of this resource - always set to `response`. object: :response ) @@ -571,9 +555,7 @@ module OpenAI OpenAI::Responses::Response::Truncation::TaggedSymbol ), usage: OpenAI::Responses::ResponseUsage, - user: String, - verbosity: - T.nilable(OpenAI::Responses::Response::Verbosity::TaggedSymbol) + user: String } ) end @@ -814,34 +796,6 @@ module OpenAI def self.values end end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - module Verbosity - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Responses::Response::Verbosity) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - LOW = - T.let(:low, OpenAI::Responses::Response::Verbosity::TaggedSymbol) - MEDIUM = - T.let(:medium, OpenAI::Responses::Response::Verbosity::TaggedSymbol) - HIGH = - T.let(:high, OpenAI::Responses::Response::Verbosity::TaggedSymbol) - - sig do - override.returns( - T::Array[OpenAI::Responses::Response::Verbosity::TaggedSymbol] - ) - end - def self.values - end - end end end end diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 1c912e64..fe108358 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -375,18 +375,6 @@ module OpenAI sig { params(user: String).void } attr_writer :user - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - sig do - returns( - T.nilable( - OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol - ) - ) - end - attr_accessor :verbosity - sig do params( background: T.nilable(T::Boolean), @@ -452,10 +440,6 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, - verbosity: - T.nilable( - OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol - ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -619,10 +603,6 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil, request_options: {} ) end @@ -693,10 +673,6 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, - verbosity: - T.nilable( - OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol - ), request_options: OpenAI::RequestOptions } ) @@ -906,45 +882,6 @@ module OpenAI def self.values end end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - module Verbosity - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Responses::ResponseCreateParams::Verbosity) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - LOW = - T.let( - :low, - OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol - ) - MEDIUM = - T.let( - :medium, - OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol - ) - HIGH = - T.let( - :high, - OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Responses::ResponseCreateParams::Verbosity::TaggedSymbol - ] - ) - end - def self.values - end - end end end end diff --git a/rbi/openai/models/responses/response_text_config.rbi b/rbi/openai/models/responses/response_text_config.rbi index 5103891f..e36e1bac 100644 --- a/rbi/openai/models/responses/response_text_config.rbi +++ b/rbi/openai/models/responses/response_text_config.rbi @@ -50,6 +50,18 @@ module OpenAI end attr_writer :format_ + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseTextConfig::Verbosity::OrSymbol + ) + ) + end + attr_accessor :verbosity + # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: # @@ -62,6 +74,10 @@ module OpenAI OpenAI::ResponseFormatText::OrHash, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, OpenAI::ResponseFormatJSONObject::OrHash + ), + verbosity: + T.nilable( + OpenAI::Responses::ResponseTextConfig::Verbosity::OrSymbol ) ).returns(T.attached_class) end @@ -79,7 +95,11 @@ module OpenAI # Setting to `{ "type": "json_object" }` enables the older JSON mode, which # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. - format_: nil + format_: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil ) end @@ -91,12 +111,55 @@ module OpenAI OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject + ), + verbosity: + T.nilable( + OpenAI::Responses::ResponseTextConfig::Verbosity::OrSymbol ) } ) end def to_hash end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::ResponseTextConfig::Verbosity) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::ResponseTextConfig::Verbosity::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::ResponseTextConfig::Verbosity::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::ResponseTextConfig::Verbosity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseTextConfig::Verbosity::TaggedSymbol + ] + ) + end + def self.values + end + end end end end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index fc4749fd..0256338b 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -84,10 +84,6 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, - verbosity: - T.nilable( - OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol - ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Responses::Response) @@ -252,10 +248,6 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` # for streaming and non-streaming use cases, respectively. stream: false, @@ -341,10 +333,6 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::Truncation::OrSymbol ), user: String, - verbosity: - T.nilable( - OpenAI::Responses::ResponseCreateParams::Verbosity::OrSymbol - ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( @@ -513,10 +501,6 @@ module OpenAI # similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#safety-identifiers). user: nil, - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` # for streaming and non-streaming use cases, respectively. stream: true, diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index de75fbda..6bf78a3c 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -31,8 +31,7 @@ module OpenAI top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, - user: String, - verbosity: OpenAI::Models::Responses::Response::verbosity? + user: String } class Response < OpenAI::Internal::Type::BaseModel @@ -112,8 +111,6 @@ module OpenAI def user=: (String) -> String - attr_accessor verbosity: OpenAI::Models::Responses::Response::verbosity? - def initialize: ( id: String, created_at: Float, @@ -143,7 +140,6 @@ module OpenAI ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, ?user: String, - ?verbosity: OpenAI::Models::Responses::Response::verbosity?, ?object: :response ) -> void @@ -176,8 +172,7 @@ module OpenAI top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, - user: String, - verbosity: OpenAI::Models::Responses::Response::verbosity? + user: String } type incomplete_details = @@ -261,18 +256,6 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] end - - type verbosity = :low | :medium | :high - - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW: :low - MEDIUM: :medium - HIGH: :high - - def self?.values: -> ::Array[OpenAI::Models::Responses::Response::verbosity] - end end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index c8150854..83f641b4 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -27,8 +27,7 @@ module OpenAI top_logprobs: Integer?, top_p: Float?, truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, - user: String, - verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity? + user: String } & OpenAI::Internal::Type::request_parameters @@ -112,8 +111,6 @@ module OpenAI def user=: (String) -> String - attr_accessor verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity? - def initialize: ( ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, @@ -140,7 +137,6 @@ module OpenAI ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, - ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, ?request_options: OpenAI::request_opts ) -> void @@ -170,7 +166,6 @@ module OpenAI top_p: Float?, truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, user: String, - verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, request_options: OpenAI::RequestOptions } @@ -232,18 +227,6 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::truncation] end - - type verbosity = :low | :medium | :high - - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW: :low - MEDIUM: :medium - HIGH: :high - - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::verbosity] - end end end end diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index e60dae0f..3f0a6608 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -2,7 +2,10 @@ module OpenAI module Models module Responses type response_text_config = - { format_: OpenAI::Models::Responses::response_format_text_config } + { + format_: OpenAI::Models::Responses::response_format_text_config, + verbosity: OpenAI::Models::Responses::ResponseTextConfig::verbosity? + } class ResponseTextConfig < OpenAI::Internal::Type::BaseModel attr_reader format_: OpenAI::Models::Responses::response_format_text_config? @@ -11,13 +14,29 @@ module OpenAI OpenAI::Models::Responses::response_format_text_config ) -> OpenAI::Models::Responses::response_format_text_config + attr_accessor verbosity: OpenAI::Models::Responses::ResponseTextConfig::verbosity? + def initialize: ( - ?format_: OpenAI::Models::Responses::response_format_text_config + ?format_: OpenAI::Models::Responses::response_format_text_config, + ?verbosity: OpenAI::Models::Responses::ResponseTextConfig::verbosity? ) -> void def to_hash: -> { - format_: OpenAI::Models::Responses::response_format_text_config + format_: OpenAI::Models::Responses::response_format_text_config, + verbosity: OpenAI::Models::Responses::ResponseTextConfig::verbosity? } + + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseTextConfig::verbosity] + end end end end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 1c1c2263..6e926188 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -29,7 +29,6 @@ module OpenAI ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, - ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, ?request_options: OpenAI::request_opts ) -> OpenAI::Responses::Response @@ -59,7 +58,6 @@ module OpenAI ?top_p: Float?, ?truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, ?user: String, - ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::verbosity?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Responses::response_stream_event] diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 705f33f5..28d1d7e4 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -40,8 +40,7 @@ def test_create top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, - user: String | nil, - verbosity: OpenAI::Responses::Response::Verbosity | nil + user: String | nil } end end @@ -83,8 +82,7 @@ def test_retrieve top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, - user: String | nil, - verbosity: OpenAI::Responses::Response::Verbosity | nil + user: String | nil } end end @@ -134,8 +132,7 @@ def test_cancel top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, - user: String | nil, - verbosity: OpenAI::Responses::Response::Verbosity | nil + user: String | nil } end end From bc1fd452bf3c8dc8e48237955fb7424b73cd0cb3 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 17:59:21 +0000 Subject: [PATCH 277/295] chore: update @stainless-api/prism-cli to v5.15.0 --- scripts/mock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/mock b/scripts/mock index d2814ae6..0b28f6ea 100755 --- a/scripts/mock +++ b/scripts/mock @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}" # Run prism mock on the given spec if [ "$1" == "--daemon" ]; then - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log & + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log & # Wait for server to come online echo -n "Waiting for server" @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then echo else - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" fi From 4192b25afad6dc3337eac6e8c5f49da667eb954a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 19:41:52 +0000 Subject: [PATCH 278/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b4e9013b..6db19b95 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.16.0" + ".": "0.17.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 6eadce9d..ea2ca4c6 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.16.0) + openai (0.17.0) connection_pool GEM diff --git a/README.md b/README.md index 95da0d39..e492b523 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.16.0" +gem "openai", "~> 0.17.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index a619a057..182ed68d 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.16.0" + VERSION = "0.17.0" end From a6aea38a1194cd0c5e7db9858dea05482ae913ad Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 21:23:43 +0000 Subject: [PATCH 279/295] chore(internal): update comment in script --- scripts/test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/test b/scripts/test index 8e5d35cd..e0dc1374 100755 --- a/scripts/test +++ b/scripts/test @@ -43,7 +43,7 @@ elif ! prism_is_running ; then echo -e "To run the server, pass in the path or url of your OpenAPI" echo -e "spec to the prism command:" echo - echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" + echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}" echo exit 1 From 6dab6326bb97db055753ad302595468882069a96 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 9 Aug 2025 08:50:07 +0000 Subject: [PATCH 280/295] chore: collect metadata from type DSL --- lib/openai/internal/type/array_of.rb | 1 + lib/openai/internal/type/base_model.rb | 4 +- lib/openai/internal/type/converter.rb | 27 ++++++++++++ lib/openai/internal/type/hash_of.rb | 1 + lib/openai/internal/type/union.rb | 16 ++++--- rbi/openai/internal/type/converter.rbi | 46 ++++++++++++++++++++ rbi/openai/internal/type/union.rbi | 9 +++- sig/openai/internal/type/converter.rbs | 17 ++++++++ sig/openai/internal/type/union.rbs | 4 +- test/openai/internal/type/base_model_test.rb | 32 ++++++++++++++ 10 files changed, 145 insertions(+), 12 deletions(-) diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index f939a621..b43a8c49 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -148,6 +148,7 @@ def to_sorbet_type # @option spec [Boolean] :"nil?" def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Internal::Type::Converter.type_info(type_info || spec) + @meta = OpenAI::Internal::Type::Converter.meta_info(type_info, spec) @nilable = spec.fetch(:nil?, false) end diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index f5bfdcd6..791db452 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -52,6 +52,7 @@ def fields # # @option spec [Boolean] :"nil?" private def add_field(name_sym, required:, type_info:, spec:) + meta = OpenAI::Internal::Type::Converter.meta_info(type_info, spec) type_fn, info = case type_info in Proc | OpenAI::Internal::Type::Converter | Class @@ -74,7 +75,8 @@ def fields required: required, nilable: nilable, const: const, - type_fn: type_fn + type_fn: type_fn, + meta: meta } define_method(setter) do |value| diff --git a/lib/openai/internal/type/converter.rb b/lib/openai/internal/type/converter.rb index 2784b03b..0b31dfcf 100644 --- a/lib/openai/internal/type/converter.rb +++ b/lib/openai/internal/type/converter.rb @@ -98,6 +98,33 @@ def type_info(spec) end end + # @api private + # + # @param type_info [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] . + # + # @option type_info [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option type_info [Proc] :enum + # + # @option type_info [Proc] :union + # + # @option type_info [Boolean] :"nil?" + # + # @param spec [Hash{Symbol=>Object}, Proc, OpenAI::Internal::Type::Converter, Class] . + # + # @option spec [NilClass, TrueClass, FalseClass, Integer, Float, Symbol] :const + # + # @option spec [Proc] :enum + # + # @option spec [Proc] :union + # + # @option spec [Boolean] :"nil?" + # + # @return [Hash{Symbol=>Object}] + def meta_info(type_info, spec) + [spec, type_info].grep(Hash).first.to_h.except(:const, :enum, :union, :nil?) + end + # @api private # # @param translate_names [Boolean] diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 1f178fcd..4c4a00d0 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -168,6 +168,7 @@ def to_sorbet_type # @option spec [Boolean] :"nil?" def initialize(type_info, spec = {}) @item_type_fn = OpenAI::Internal::Type::Converter.type_info(type_info || spec) + @meta = OpenAI::Internal::Type::Converter.meta_info(type_info, spec) @nilable = spec.fetch(:nil?, false) end diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index dda813a2..0199d301 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -33,20 +33,20 @@ module Union # # All of the specified variant info for this union. # - # @return [Array] + # @return [ArrayObject})>] private def known_variants = (@known_variants ||= []) # @api private # - # @return [Array] + # @return [ArrayObject})>] protected def derefed_variants - known_variants.map { |key, variant_fn| [key, variant_fn.call] } + known_variants.map { |key, variant_fn, meta| [key, variant_fn.call, meta] } end # All of the specified variants for this union. # # @return [Array] - def variants = derefed_variants.map(&:last) + def variants = derefed_variants.map { _2 } # @api private # @@ -72,12 +72,13 @@ def variants = derefed_variants.map(&:last) # # @option spec [Boolean] :"nil?" private def variant(key, spec = nil) + meta = OpenAI::Internal::Type::Converter.meta_info(nil, spec) variant_info = case key in Symbol - [key, OpenAI::Internal::Type::Converter.type_info(spec)] + [key, OpenAI::Internal::Type::Converter.type_info(spec), meta] in Proc | OpenAI::Internal::Type::Converter | Class | Hash - [nil, OpenAI::Internal::Type::Converter.type_info(key)] + [nil, OpenAI::Internal::Type::Converter.type_info(key), meta] end known_variants << variant_info @@ -100,7 +101,8 @@ def variants = derefed_variants.map(&:last) return nil if key == OpenAI::Internal::OMIT key = key.to_sym if key.is_a?(String) - known_variants.find { |k,| k == key }&.last&.call + _, found = known_variants.find { |k,| k == key } + found&.call else nil end diff --git a/rbi/openai/internal/type/converter.rbi b/rbi/openai/internal/type/converter.rbi index 5996a904..4c746081 100644 --- a/rbi/openai/internal/type/converter.rbi +++ b/rbi/openai/internal/type/converter.rbi @@ -86,6 +86,52 @@ module OpenAI def self.type_info(spec) end + # @api private + sig do + params( + type_info: + T.any( + { + const: + T.nilable( + T.any(NilClass, T::Boolean, Integer, Float, Symbol) + ), + enum: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ), + union: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ) + }, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ), + spec: + T.any( + { + const: + T.nilable( + T.any(NilClass, T::Boolean, Integer, Float, Symbol) + ), + enum: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ), + union: + T.nilable( + T.proc.returns(OpenAI::Internal::Type::Converter::Input) + ) + }, + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::Type::Converter::Input + ) + ).returns(OpenAI::Internal::AnyHash) + end + def self.meta_info(type_info, spec) + end + # @api private sig do params(translate_names: T::Boolean).returns( diff --git a/rbi/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi index fee27fc2..4aa6836b 100644 --- a/rbi/openai/internal/type/union.rbi +++ b/rbi/openai/internal/type/union.rbi @@ -16,7 +16,8 @@ module OpenAI T::Array[ [ T.nilable(Symbol), - T.proc.returns(OpenAI::Internal::Type::Converter::Input) + T.proc.returns(OpenAI::Internal::Type::Converter::Input), + OpenAI::Internal::AnyHash ] ] ) @@ -25,7 +26,11 @@ module OpenAI end # @api private - sig { returns(T::Array[[T.nilable(Symbol), T.anything]]) } + sig do + returns( + T::Array[[T.nilable(Symbol), T.anything, OpenAI::Internal::AnyHash]] + ) + end protected def derefed_variants end diff --git a/sig/openai/internal/type/converter.rbs b/sig/openai/internal/type/converter.rbs index 552a5330..3a4aa4a6 100644 --- a/sig/openai/internal/type/converter.rbs +++ b/sig/openai/internal/type/converter.rbs @@ -39,6 +39,23 @@ module OpenAI | OpenAI::Internal::Type::Converter::input spec ) -> (^-> top) + def self.meta_info: ( + { + const: (nil | bool | Integer | Float | Symbol)?, + enum: ^-> OpenAI::Internal::Type::Converter::input?, + union: ^-> OpenAI::Internal::Type::Converter::input? + } + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input type_info, + { + const: (nil | bool | Integer | Float | Symbol)?, + enum: ^-> OpenAI::Internal::Type::Converter::input?, + union: ^-> OpenAI::Internal::Type::Converter::input? + } + | ^-> OpenAI::Internal::Type::Converter::input + | OpenAI::Internal::Type::Converter::input spec + ) -> ::Hash[Symbol, top] + def self.new_coerce_state: ( ?translate_names: bool ) -> OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 7a01ff6e..6f209ac7 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -5,9 +5,9 @@ module OpenAI include OpenAI::Internal::Type::Converter include OpenAI::Internal::Util::SorbetRuntimeSupport - private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Internal::Type::Converter::input)]] + private def self.known_variants: -> ::Array[[Symbol?, (^-> OpenAI::Internal::Type::Converter::input), ::Hash[Symbol, top]]] - def self.derefed_variants: -> ::Array[[Symbol?, top]] + def self.derefed_variants: -> ::Array[[Symbol?, top, ::Hash[Symbol, top]]] def self.variants: -> ::Array[top] diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index f8d54669..5f3ba474 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -687,3 +687,35 @@ def test_equality end end end + +class OpenAI::Test::MetaInfoTest < Minitest::Test + A1 = OpenAI::Internal::Type::ArrayOf[Integer, nil?: true, doc: "dog"] + H1 = OpenAI::Internal::Type::HashOf[-> { String }, nil?: true, doc: "dawg"] + + class M1 < OpenAI::Internal::Type::BaseModel + required :a, Integer, doc: "dog" + optional :b, -> { String }, nil?: true, doc: "dawg" + end + + module U1 + extend OpenAI::Internal::Type::Union + + variant -> { Integer }, const: 2, doc: "dog" + variant -> { String }, doc: "dawg" + end + + def test_meta_retrieval + m1 = A1.instance_variable_get(:@meta) + m2 = H1.instance_variable_get(:@meta) + assert_equal({doc: "dog"}, m1) + assert_equal({doc: "dawg"}, m2) + + ma, mb = M1.fields.fetch_values(:a, :b) + assert_equal({doc: "dog"}, ma.fetch(:meta)) + assert_equal({doc: "dawg"}, mb.fetch(:meta)) + + ua, ub = U1.send(:known_variants).map(&:last) + assert_equal({doc: "dog"}, ua) + assert_equal({doc: "dawg"}, ub) + end +end From 28be51165b38547db5a6f74b58cf5f51003088da Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 9 Aug 2025 15:20:28 +0000 Subject: [PATCH 281/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 6db19b95..463488b6 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.17.0" + ".": "0.17.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index ea2ca4c6..bceb047e 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.17.0) + openai (0.17.1) connection_pool GEM diff --git a/README.md b/README.md index e492b523..27c5dc78 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.17.0" +gem "openai", "~> 0.17.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 182ed68d..32d93453 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.17.0" + VERSION = "0.17.1" end From b05de06ba1ece4a28ae06eb560b145954e01e97b Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 11 Aug 2025 12:56:02 +0000 Subject: [PATCH 282/295] codegen metadata --- .stats.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.stats.yml b/.stats.yml index 65629665..bbf5750e 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-6a1bfd4738fff02ef5becc3fdb2bf0cd6c026f2c924d4147a2a515474477dd9a.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-9cadfad609f94f20ebf74fdc06a80302f1a324dc69700a309a8056aabca82fd2.yml openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: a67c5e195a59855fe8a5db0dc61a3e7f +config_hash: 68337b532875626269c304372a669f67 From 0c5b0d8cbdb0b3d3be990ea7268e6febc841d444 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 11 Aug 2025 16:19:14 +0000 Subject: [PATCH 283/295] chore(internal): update test skipping reason --- test/openai/resources/audio/speech_test.rb | 2 +- test/openai/resources/containers/files/content_test.rb | 2 +- test/openai/resources/files_test.rb | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/openai/resources/audio/speech_test.rb b/test/openai/resources/audio/speech_test.rb index f50614b3..af4189d5 100644 --- a/test/openai/resources/audio/speech_test.rb +++ b/test/openai/resources/audio/speech_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Audio::SpeechTest < OpenAI::Test::ResourceTest def test_create_required_params - skip("skipped: test server currently has no support for method content-type") + skip("Prism doesn't support application/octet-stream responses") response = @openai.audio.speech.create(input: "input", model: :"tts-1", voice: :alloy) diff --git a/test/openai/resources/containers/files/content_test.rb b/test/openai/resources/containers/files/content_test.rb index 4d4252d4..0a57b6cb 100644 --- a/test/openai/resources/containers/files/content_test.rb +++ b/test/openai/resources/containers/files/content_test.rb @@ -4,7 +4,7 @@ class OpenAI::Test::Resources::Containers::Files::ContentTest < OpenAI::Test::ResourceTest def test_retrieve_required_params - skip("skipped: test server currently has no support for method content-type") + skip("Prism doesn't support application/binary responses") response = @openai.containers.files.content.retrieve("file_id", container_id: "container_id") diff --git a/test/openai/resources/files_test.rb b/test/openai/resources/files_test.rb index de03395e..5833751a 100644 --- a/test/openai/resources/files_test.rb +++ b/test/openai/resources/files_test.rb @@ -93,7 +93,7 @@ def test_delete end def test_content - skip("skipped: test server currently has no support for method content-type") + skip("Prism doesn't support application/binary responses") response = @openai.files.content("file_id") From f3f5371c7de0ed31f54661ef47a4a952e5e321b7 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 15 Aug 2025 19:12:09 +0000 Subject: [PATCH 284/295] feat(api): add new text parameters, expiration options --- .stats.yml | 6 +- lib/openai/models/batch_create_params.rb | 39 +++- .../beta/thread_create_and_run_params.rb | 4 +- lib/openai/models/beta/threads/run.rb | 4 +- .../models/beta/threads/run_create_params.rb | 4 +- lib/openai/models/chat/chat_completion.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 12 +- .../models/chat/completion_create_params.rb | 53 ++++- lib/openai/models/file_create_params.rb | 38 +++- lib/openai/models/reasoning.rb | 2 +- lib/openai/models/responses/response.rb | 78 +++++-- .../responses/response_create_params.rb | 81 +++++-- lib/openai/models/upload_create_params.rb | 38 +++- lib/openai/resources/batches.rb | 4 +- lib/openai/resources/chat/completions.rb | 8 +- lib/openai/resources/files.rb | 6 +- lib/openai/resources/responses.rb | 8 +- lib/openai/resources/uploads.rb | 4 +- rbi/openai/models/batch_create_params.rbi | 60 ++++++ .../beta/thread_create_and_run_params.rbi | 6 +- rbi/openai/models/beta/threads/run.rbi | 6 +- .../models/beta/threads/run_create_params.rbi | 6 +- rbi/openai/models/chat/chat_completion.rbi | 15 +- .../models/chat/chat_completion_chunk.rbi | 15 +- .../models/chat/completion_create_params.rbi | 119 ++++++++++- rbi/openai/models/file_create_params.rbi | 56 +++++ rbi/openai/models/reasoning.rbi | 2 +- rbi/openai/models/responses/response.rbi | 180 +++++++++++++--- .../responses/response_create_params.rbi | 200 +++++++++++++++--- rbi/openai/models/upload_create_params.rbi | 56 +++++ rbi/openai/resources/batches.rbi | 5 + rbi/openai/resources/beta/threads.rbi | 4 +- rbi/openai/resources/beta/threads/runs.rbi | 4 +- rbi/openai/resources/chat/completions.rbi | 14 +- rbi/openai/resources/files.rbi | 6 +- rbi/openai/resources/responses.rbi | 28 +-- rbi/openai/resources/uploads.rbi | 4 + sig/openai/models/batch_create_params.rbs | 23 +- .../models/chat/completion_create_params.rbs | 38 ++++ sig/openai/models/file_create_params.rbs | 23 +- sig/openai/models/responses/response.rbs | 50 ++++- .../responses/response_create_params.rbs | 50 ++++- sig/openai/models/upload_create_params.rbs | 23 +- sig/openai/resources/batches.rbs | 1 + sig/openai/resources/chat/completions.rbs | 2 + sig/openai/resources/files.rbs | 1 + sig/openai/resources/responses.rbs | 4 +- sig/openai/resources/uploads.rbs | 1 + test/openai/resources/responses_test.rb | 6 +- 49 files changed, 1197 insertions(+), 212 deletions(-) diff --git a/.stats.yml b/.stats.yml index bbf5750e..ce30bcee 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-9cadfad609f94f20ebf74fdc06a80302f1a324dc69700a309a8056aabca82fd2.yml -openapi_spec_hash: 3eb8d86c06f0bb5e1190983e5acfc9ba -config_hash: 68337b532875626269c304372a669f67 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-24be531010b354303d741fc9247c1f84f75978f9f7de68aca92cb4f240a04722.yml +openapi_spec_hash: 3e46f439f6a863beadc71577eb4efa15 +config_hash: ed87b9139ac595a04a2162d754df2fed diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 9b39fcd2..bdb51f2e 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -48,7 +48,14 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!method initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) + # @!attribute output_expires_after + # The expiration policy for the output and/or error file that are generated for a + # batch. + # + # @return [OpenAI::Models::BatchCreateParams::OutputExpiresAfter, nil] + optional :output_expires_after, -> { OpenAI::BatchCreateParams::OutputExpiresAfter } + + # @!method initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, output_expires_after: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::BatchCreateParams} for more details. # @@ -60,6 +67,8 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # + # @param output_expires_after [OpenAI::Models::BatchCreateParams::OutputExpiresAfter] The expiration policy for the output and/or error file that are generated for a + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The time frame within which the batch should be processed. Currently only `24h` @@ -88,6 +97,34 @@ module Endpoint # @!method self.values # @return [Array] end + + class OutputExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. Note that the anchor is the file creation time, not the time the + # batch is created. + # + # @return [Symbol, :created_at] + required :anchor, const: :created_at + + # @!attribute seconds + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + # + # @return [Integer] + required :seconds, Integer + + # @!method initialize(seconds:, anchor: :created_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::BatchCreateParams::OutputExpiresAfter} for more details. + # + # The expiration policy for the output and/or error file that are generated for a + # batch. + # + # @param seconds [Integer] The number of seconds after the anchor time that the file will expire. Must be b + # + # @param anchor [Symbol, :created_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index d244bcee..c53df32e 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -157,7 +157,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] optional :truncation_strategy, @@ -694,7 +694,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # details. # # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. # # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 2533a7a1..1f1b9746 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -195,7 +195,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. # # @return [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] required :truncation_strategy, -> { OpenAI::Beta::Threads::Run::TruncationStrategy }, nil?: true @@ -415,7 +415,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Beta::Threads::Run::TruncationStrategy} for more details. # # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. # # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 3c910eb7..43e028d1 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -184,7 +184,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. # # @return [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] optional :truncation_strategy, @@ -413,7 +413,7 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # details. # # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. # # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to # diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index f104825f..c288fe61 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -47,9 +47,8 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -61,6 +60,8 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletion::ServiceTier }, nil?: true # @!attribute system_fingerprint + # @deprecated + # # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when @@ -196,9 +197,8 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 52d7dcbc..25e97bc0 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -46,9 +46,8 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -60,6 +59,8 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletionChunk::ServiceTier }, nil?: true # @!attribute system_fingerprint + # @deprecated + # # This fingerprint represents the backend configuration that the model runs with. # Can be used in conjunction with the `seed` request parameter to understand when # backend changes have been made that might impact determinism. @@ -379,9 +380,8 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index b81acb79..19321125 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -226,6 +226,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :safety_identifier, String # @!attribute seed + # @deprecated + # # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and # parameters should return the same result. Determinism is not guaranteed, and you @@ -244,9 +246,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -291,6 +292,11 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true + # @!attribute text + # + # @return [OpenAI::Models::Chat::CompletionCreateParams::Text, nil] + optional :text, -> { OpenAI::Chat::CompletionCreateParams::Text } + # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can @@ -359,7 +365,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } - # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) + # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # @@ -415,6 +421,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # + # @param text [OpenAI::Models::Chat::CompletionCreateParams::Text] + # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # # @param tools [Array] A list of tools the model may call. You can provide either @@ -580,9 +588,8 @@ module ResponseFormat # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -620,6 +627,38 @@ module Stop StringArray = OpenAI::Internal::Type::ArrayOf[String] end + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Text::Verbosity, nil] + optional :verbosity, enum: -> { OpenAI::Chat::CompletionCreateParams::Text::Verbosity }, nil?: true + + # @!method initialize(verbosity: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Chat::CompletionCreateParams::Text} for more details. + # + # @param verbosity [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Text::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @see OpenAI::Models::Chat::CompletionCreateParams::Text#verbosity + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + end + # Constrains the verbosity of the model's response. Lower values will result in # more concise responses, while higher values will result in more verbose # responses. Currently supported values are `low`, `medium`, and `high`. diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 44e48264..7de18db4 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -22,7 +22,14 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::FilePurpose } - # @!method initialize(file:, purpose:, request_options: {}) + # @!attribute expires_after + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + # + # @return [OpenAI::Models::FileCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::FileCreateParams::ExpiresAfter } + + # @!method initialize(file:, purpose:, expires_after: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::FileCreateParams} for more details. # @@ -30,7 +37,36 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # + # @param expires_after [OpenAI::Models::FileCreateParams::ExpiresAfter] The expiration policy for a file. By default, files with `purpose=batch` expire + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. + # + # @return [Symbol, :created_at] + required :anchor, const: :created_at + + # @!attribute seconds + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + # + # @return [Integer] + required :seconds, Integer + + # @!method initialize(seconds:, anchor: :created_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileCreateParams::ExpiresAfter} for more details. + # + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + # + # @param seconds [Integer] The number of seconds after the anchor time that the file will expire. Must be b + # + # @param anchor [Symbol, :created_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index d39b2671..f2718df3 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -37,7 +37,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Reasoning} for more details. # - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 5fdf354e..917bcc19 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -182,7 +182,7 @@ class Response < OpenAI::Internal::Type::BaseModel optional :prompt_cache_key, String # @!attribute reasoning - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -209,9 +209,8 @@ class Response < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -230,14 +229,9 @@ class Response < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Responses::ResponseStatus } # @!attribute text - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - # - # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] - optional :text, -> { OpenAI::Responses::ResponseTextConfig } + # @return [OpenAI::Models::Responses::Response::Text, nil] + optional :text, -> { OpenAI::Responses::Response::Text } # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to @@ -319,7 +313,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **gpt-5 and o-series models only** # # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi # @@ -327,7 +321,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::Response::Text] # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -438,9 +432,8 @@ module ToolChoice # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -462,6 +455,59 @@ module ServiceTier # @return [Array] end + # @see OpenAI::Models::Responses::Response#text + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format + + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Responses::Response::Text::Verbosity, nil] + optional :verbosity, enum: -> { OpenAI::Responses::Response::Text::Verbosity }, nil?: true + + # @!method initialize(format_: nil, verbosity: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Response::Text} for more details. + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + # + # @param verbosity [Symbol, OpenAI::Models::Responses::Response::Text::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @see OpenAI::Models::Responses::Response::Text#verbosity + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + end + # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index b11ef1f3..49515075 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -132,7 +132,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :prompt_cache_key, String # @!attribute reasoning - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -159,9 +159,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -194,14 +193,9 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :temperature, Float, nil?: true # @!attribute text - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - # - # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] - optional :text, -> { OpenAI::Responses::ResponseTextConfig } + # @return [OpenAI::Models::Responses::ResponseCreateParams::Text, nil] + optional :text, -> { OpenAI::Responses::ResponseCreateParams::Text } # @!attribute tool_choice # How the model should select which tool (or tools) to use when generating a @@ -301,7 +295,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **gpt-5 and o-series models only** # # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi # @@ -313,7 +307,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseCreateParams::Text] # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # @@ -361,9 +355,8 @@ module Input # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -405,6 +398,62 @@ class StreamOptions < OpenAI::Internal::Type::BaseModel # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds end + class Text < OpenAI::Internal::Type::BaseModel + # @!attribute format_ + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + # + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] + optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format + + # @!attribute verbosity + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Text::Verbosity, nil] + optional :verbosity, + enum: -> { + OpenAI::Responses::ResponseCreateParams::Text::Verbosity + }, + nil?: true + + # @!method initialize(format_: nil, verbosity: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseCreateParams::Text} for more details. + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. + # + # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Text::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + # + # @see OpenAI::Models::Responses::ResponseCreateParams::Text#verbosity + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 54e7530f..2431059e 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -37,7 +37,14 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::FilePurpose } - # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) + # @!attribute expires_after + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + # + # @return [OpenAI::Models::UploadCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::UploadCreateParams::ExpiresAfter } + + # @!method initialize(bytes:, filename:, mime_type:, purpose:, expires_after: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::UploadCreateParams} for more details. # @@ -49,7 +56,36 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. # + # @param expires_after [OpenAI::Models::UploadCreateParams::ExpiresAfter] The expiration policy for a file. By default, files with `purpose=batch` expire + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. + # + # @return [Symbol, :created_at] + required :anchor, const: :created_at + + # @!attribute seconds + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + # + # @return [Integer] + required :seconds, Integer + + # @!method initialize(seconds:, anchor: :created_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::UploadCreateParams::ExpiresAfter} for more details. + # + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + # + # @param seconds [Integer] The number of seconds after the anchor time that the file will expire. Must be b + # + # @param anchor [Symbol, :created_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index bdd8d876..bd8f7530 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -8,7 +8,7 @@ class Batches # # Creates and executes a batch from an uploaded file of requests # - # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) + # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, output_expires_after: nil, request_options: {}) # # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # @@ -18,6 +18,8 @@ class Batches # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # + # @param output_expires_after [OpenAI::Models::BatchCreateParams::OutputExpiresAfter] The expiration policy for the output and/or error file that are generated for a + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Batch] diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index d6be9e1e..1b326dce 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -30,7 +30,7 @@ class Completions # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) + # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -84,6 +84,8 @@ class Completions # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # + # @param text [OpenAI::Models::Chat::CompletionCreateParams::Text] + # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # # @param tools [Array] A list of tools the model may call. You can provide either @@ -141,7 +143,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) + # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -195,6 +197,8 @@ def create(params) # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # + # @param text [OpenAI::Models::Chat::CompletionCreateParams::Text] + # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # # @param tools [Array] A list of tools the model may call. You can provide either diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index 34eccf82..52d9a808 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -8,7 +8,7 @@ class Files # # Upload a file that can be used across various endpoints. Individual files can be # up to 512 MB, and the size of all files uploaded by one organization can be up - # to 100 GB. + # to 1 TB. # # The Assistants API supports files up to 2 million tokens and of specific file # types. See the @@ -28,12 +28,14 @@ class Files # Please [contact us](https://help.openai.com/) if you need to increase these # storage limits. # - # @overload create(file:, purpose:, request_options: {}) + # @overload create(file:, purpose:, expires_after: nil, request_options: {}) # # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # + # @param expires_after [OpenAI::Models::FileCreateParams::ExpiresAfter] The expiration policy for a file. By default, files with `purpose=batch` expire + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::FileObject] diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index fd3c429e..6ec8f719 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -49,7 +49,7 @@ class Responses # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **gpt-5 and o-series models only** # # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi # @@ -61,7 +61,7 @@ class Responses # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseCreateParams::Text] # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # @@ -138,7 +138,7 @@ def create(params = {}) # # @param prompt_cache_key [String] Used by OpenAI to cache responses for similar requests to optimize your cache hi # - # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **gpt-5 and o-series models only** # # @param safety_identifier [String] A stable identifier used to help detect users of your application that may be vi # @@ -150,7 +150,7 @@ def create(params = {}) # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseCreateParams::Text] # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 65d75883..b9037fd9 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -29,7 +29,7 @@ class Uploads # the documentation on # [creating a File](https://platform.openai.com/docs/api-reference/files/create). # - # @overload create(bytes:, filename:, mime_type:, purpose:, request_options: {}) + # @overload create(bytes:, filename:, mime_type:, purpose:, expires_after: nil, request_options: {}) # # @param bytes [Integer] The number of bytes in the file you are uploading. # @@ -39,6 +39,8 @@ class Uploads # # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. # + # @param expires_after [OpenAI::Models::UploadCreateParams::ExpiresAfter] The expiration policy for a file. By default, files with `purpose=batch` expire + # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # # @return [OpenAI::Models::Upload] diff --git a/rbi/openai/models/batch_create_params.rbi b/rbi/openai/models/batch_create_params.rbi index 7568daed..c15c791a 100644 --- a/rbi/openai/models/batch_create_params.rbi +++ b/rbi/openai/models/batch_create_params.rbi @@ -44,6 +44,19 @@ module OpenAI sig { returns(T.nilable(T::Hash[Symbol, String])) } attr_accessor :metadata + # The expiration policy for the output and/or error file that are generated for a + # batch. + sig { returns(T.nilable(OpenAI::BatchCreateParams::OutputExpiresAfter)) } + attr_reader :output_expires_after + + sig do + params( + output_expires_after: + OpenAI::BatchCreateParams::OutputExpiresAfter::OrHash + ).void + end + attr_writer :output_expires_after + sig do params( completion_window: @@ -51,6 +64,8 @@ module OpenAI endpoint: OpenAI::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), + output_expires_after: + OpenAI::BatchCreateParams::OutputExpiresAfter::OrHash, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -80,6 +95,9 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, + # The expiration policy for the output and/or error file that are generated for a + # batch. + output_expires_after: nil, request_options: {} ) end @@ -92,6 +110,7 @@ module OpenAI endpoint: OpenAI::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), + output_expires_after: OpenAI::BatchCreateParams::OutputExpiresAfter, request_options: OpenAI::RequestOptions } ) @@ -165,6 +184,47 @@ module OpenAI def self.values end end + + class OutputExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::BatchCreateParams::OutputExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. Note that the anchor is the file creation time, not the time the + # batch is created. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + sig { returns(Integer) } + attr_accessor :seconds + + # The expiration policy for the output and/or error file that are generated for a + # batch. + sig do + params(seconds: Integer, anchor: Symbol).returns(T.attached_class) + end + def self.new( + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + seconds:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. Note that the anchor is the file creation time, not the time the + # batch is created. + anchor: :created_at + ) + end + + sig { override.returns({ anchor: Symbol, seconds: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index 2a603dd6..31e4023d 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -187,7 +187,7 @@ module OpenAI attr_accessor :top_p # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. sig do returns( T.nilable( @@ -343,7 +343,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. truncation_strategy: nil, request_options: {} ) @@ -1459,7 +1459,7 @@ module OpenAI attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. sig do params( type: diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index 7940c801..1f45da92 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -184,7 +184,7 @@ module OpenAI attr_accessor :tools # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. sig do returns(T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy)) end @@ -375,7 +375,7 @@ module OpenAI # this run. tools:, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. truncation_strategy:, # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). @@ -740,7 +740,7 @@ module OpenAI attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. sig do params( type: diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index 04d643a2..ccb3b696 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -204,7 +204,7 @@ module OpenAI attr_accessor :top_p # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. sig do returns( T.nilable( @@ -378,7 +378,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. truncation_strategy: nil, request_options: {} ) @@ -803,7 +803,7 @@ module OpenAI attr_accessor :last_messages # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. sig do params( type: diff --git a/rbi/openai/models/chat/chat_completion.rbi b/rbi/openai/models/chat/chat_completion.rbi index 61f4d426..c29271e7 100644 --- a/rbi/openai/models/chat/chat_completion.rbi +++ b/rbi/openai/models/chat/chat_completion.rbi @@ -40,9 +40,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -106,9 +105,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -371,9 +369,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the diff --git a/rbi/openai/models/chat/chat_completion_chunk.rbi b/rbi/openai/models/chat/chat_completion_chunk.rbi index 7865525a..f309f2e3 100644 --- a/rbi/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/openai/models/chat/chat_completion_chunk.rbi @@ -42,9 +42,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -121,9 +120,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -791,9 +789,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index 4af9f379..ae6dd9ce 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -295,9 +295,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -351,6 +350,14 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :temperature + sig { returns(T.nilable(OpenAI::Chat::CompletionCreateParams::Text)) } + attr_reader :text + + sig do + params(text: OpenAI::Chat::CompletionCreateParams::Text::OrHash).void + end + attr_writer :text + # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -529,6 +536,7 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), + text: OpenAI::Chat::CompletionCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -695,9 +703,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -723,6 +730,7 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, + text: nil, # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -825,6 +833,7 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), + text: OpenAI::Chat::CompletionCreateParams::Text, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -1093,9 +1102,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -1172,6 +1180,99 @@ module OpenAI ) end + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Chat::CompletionCreateParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable( + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::OrSymbol + ) + ) + end + attr_accessor :verbosity + + sig do + params( + verbosity: + T.nilable( + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::OrSymbol + ) + ).returns(T.attached_class) + end + def self.new( + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil + ) + end + + sig do + override.returns( + { + verbosity: + T.nilable( + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::OrSymbol + ) + } + ) + end + def to_hash + end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Chat::CompletionCreateParams::Text::Verbosity + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol + ] + ) + end + def self.values + end + end + end + # Constrains the verbosity of the model's response. Lower values will result in # more concise responses, while higher values will result in more verbose # responses. Currently supported values are `low`, `medium`, and `high`. diff --git a/rbi/openai/models/file_create_params.rbi b/rbi/openai/models/file_create_params.rbi index 493d915c..ef0c24d4 100644 --- a/rbi/openai/models/file_create_params.rbi +++ b/rbi/openai/models/file_create_params.rbi @@ -22,10 +22,23 @@ module OpenAI sig { returns(OpenAI::FilePurpose::OrSymbol) } attr_accessor :purpose + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + sig { returns(T.nilable(OpenAI::FileCreateParams::ExpiresAfter)) } + attr_reader :expires_after + + sig do + params( + expires_after: OpenAI::FileCreateParams::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + sig do params( file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, + expires_after: OpenAI::FileCreateParams::ExpiresAfter::OrHash, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -37,6 +50,9 @@ module OpenAI # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets purpose:, + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + expires_after: nil, request_options: {} ) end @@ -46,12 +62,52 @@ module OpenAI { file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, + expires_after: OpenAI::FileCreateParams::ExpiresAfter, request_options: OpenAI::RequestOptions } ) end def to_hash end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::FileCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + sig { returns(Integer) } + attr_accessor :seconds + + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + sig do + params(seconds: Integer, anchor: Symbol).returns(T.attached_class) + end + def self.new( + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + seconds:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. + anchor: :created_at + ) + end + + sig { override.returns({ anchor: Symbol, seconds: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/reasoning.rbi b/rbi/openai/models/reasoning.rbi index 5243471d..f6364147 100644 --- a/rbi/openai/models/reasoning.rbi +++ b/rbi/openai/models/reasoning.rbi @@ -28,7 +28,7 @@ module OpenAI sig { returns(T.nilable(OpenAI::Reasoning::Summary::OrSymbol)) } attr_accessor :summary - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 8980d26b..8a36dab9 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -176,7 +176,7 @@ module OpenAI sig { params(prompt_cache_key: String).void } attr_writer :prompt_cache_key - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -205,9 +205,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -231,15 +230,10 @@ module OpenAI sig { params(status: OpenAI::Responses::ResponseStatus::OrSymbol).void } attr_writer :status - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - sig { returns(T.nilable(OpenAI::Responses::ResponseTextConfig)) } + sig { returns(T.nilable(OpenAI::Responses::Response::Text)) } attr_reader :text - sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } + sig { params(text: OpenAI::Responses::Response::Text::OrHash).void } attr_writer :text # An integer between 0 and 20 specifying the number of most likely tokens to @@ -351,7 +345,7 @@ module OpenAI service_tier: T.nilable(OpenAI::Responses::Response::ServiceTier::OrSymbol), status: OpenAI::Responses::ResponseStatus::OrSymbol, - text: OpenAI::Responses::ResponseTextConfig::OrHash, + text: OpenAI::Responses::Response::Text::OrHash, top_logprobs: T.nilable(Integer), truncation: T.nilable(OpenAI::Responses::Response::Truncation::OrSymbol), @@ -453,7 +447,7 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -472,9 +466,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -485,11 +478,6 @@ module OpenAI # The status of the response generation. One of `completed`, `failed`, # `in_progress`, `cancelled`, `queued`, or `incomplete`. status: nil, - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -548,7 +536,7 @@ module OpenAI OpenAI::Responses::Response::ServiceTier::TaggedSymbol ), status: OpenAI::Responses::ResponseStatus::TaggedSymbol, - text: OpenAI::Responses::ResponseTextConfig, + text: OpenAI::Responses::Response::Text, top_logprobs: T.nilable(Integer), truncation: T.nilable( @@ -717,9 +705,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -764,6 +751,149 @@ module OpenAI end end + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Response::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable(OpenAI::Responses::ResponseFormatTextConfig::Variants) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable( + OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol + ) + ) + end + attr_accessor :verbosity + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ), + verbosity: + T.nilable( + OpenAI::Responses::Response::Text::Verbosity::OrSymbol + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil + ) + end + + sig do + override.returns( + { + format_: OpenAI::Responses::ResponseFormatTextConfig::Variants, + verbosity: + T.nilable( + OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol + ) + } + ) + end + def to_hash + end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Response::Text::Verbosity) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol + ] + ) + end + def self.values + end + end + end + # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index fe108358..cdd0ea9b 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -157,7 +157,7 @@ module OpenAI sig { params(prompt_cache_key: String).void } attr_writer :prompt_cache_key - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -186,9 +186,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -233,15 +232,16 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :temperature - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - sig { returns(T.nilable(OpenAI::Responses::ResponseTextConfig)) } + sig do + returns(T.nilable(OpenAI::Responses::ResponseCreateParams::Text)) + end attr_reader :text - sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } + sig do + params( + text: OpenAI::Responses::ResponseCreateParams::Text::OrHash + ).void + end attr_writer :text # How the model should select which tool (or tools) to use when generating a @@ -409,7 +409,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseTextConfig::OrHash, + text: OpenAI::Responses::ResponseCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -515,7 +515,7 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -534,9 +534,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -553,11 +552,6 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model @@ -642,7 +636,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseTextConfig, + text: OpenAI::Responses::ResponseCreateParams::Text, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -717,9 +711,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -816,6 +809,163 @@ module OpenAI end end + class Text < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCreateParams::Text, + OpenAI::Internal::AnyHash + ) + end + + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + sig do + returns( + T.nilable( + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject + ) + ) + ) + end + attr_reader :format_ + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ) + ).void + end + attr_writer :format_ + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::OrSymbol + ) + ) + end + attr_accessor :verbosity + + sig do + params( + format_: + T.any( + OpenAI::ResponseFormatText::OrHash, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, + OpenAI::ResponseFormatJSONObject::OrHash + ), + verbosity: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::OrSymbol + ) + ).returns(T.attached_class) + end + def self.new( + # An object specifying the format that the model must output. + # + # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which + # ensures the model will match your supplied JSON schema. Learn more in the + # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + # + # The default format is `{ "type": "text" }` with no additional options. + # + # **Not recommended for gpt-4o and newer models:** + # + # Setting to `{ "type": "json_object" }` enables the older JSON mode, which + # ensures the message the model generates is valid JSON. Using `json_schema` is + # preferred for models that support it. + format_: nil, + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + verbosity: nil + ) + end + + sig do + override.returns( + { + format_: + T.any( + OpenAI::ResponseFormatText, + OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, + OpenAI::ResponseFormatJSONObject + ), + verbosity: + T.nilable( + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::OrSymbol + ) + } + ) + end + def to_hash + end + + # Constrains the verbosity of the model's response. Lower values will result in + # more concise responses, while higher values will result in more verbose + # responses. Currently supported values are `low`, `medium`, and `high`. + module Verbosity + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseCreateParams::Text::Verbosity + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol + ] + ) + end + def self.values + end + end + end + # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/rbi/openai/models/upload_create_params.rbi b/rbi/openai/models/upload_create_params.rbi index d7e3500c..63349340 100644 --- a/rbi/openai/models/upload_create_params.rbi +++ b/rbi/openai/models/upload_create_params.rbi @@ -33,12 +33,25 @@ module OpenAI sig { returns(OpenAI::FilePurpose::OrSymbol) } attr_accessor :purpose + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + sig { returns(T.nilable(OpenAI::UploadCreateParams::ExpiresAfter)) } + attr_reader :expires_after + + sig do + params( + expires_after: OpenAI::UploadCreateParams::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + sig do params( bytes: Integer, filename: String, mime_type: String, purpose: OpenAI::FilePurpose::OrSymbol, + expires_after: OpenAI::UploadCreateParams::ExpiresAfter::OrHash, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -57,6 +70,9 @@ module OpenAI # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). purpose:, + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + expires_after: nil, request_options: {} ) end @@ -68,12 +84,52 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::FilePurpose::OrSymbol, + expires_after: OpenAI::UploadCreateParams::ExpiresAfter, request_options: OpenAI::RequestOptions } ) end def to_hash end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::UploadCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + sig { returns(Integer) } + attr_accessor :seconds + + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + sig do + params(seconds: Integer, anchor: Symbol).returns(T.attached_class) + end + def self.new( + # The number of seconds after the anchor time that the file will expire. Must be + # between 3600 (1 hour) and 2592000 (30 days). + seconds:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `created_at`. + anchor: :created_at + ) + end + + sig { override.returns({ anchor: Symbol, seconds: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/resources/batches.rbi b/rbi/openai/resources/batches.rbi index 64125755..26543c2d 100644 --- a/rbi/openai/resources/batches.rbi +++ b/rbi/openai/resources/batches.rbi @@ -11,6 +11,8 @@ module OpenAI endpoint: OpenAI::BatchCreateParams::Endpoint::OrSymbol, input_file_id: String, metadata: T.nilable(T::Hash[Symbol, String]), + output_expires_after: + OpenAI::BatchCreateParams::OutputExpiresAfter::OrHash, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Batch) end @@ -40,6 +42,9 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, + # The expiration policy for the output and/or error file that are generated for a + # batch. + output_expires_after: nil, request_options: {} ) end diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index 738bcf29..9efef885 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -242,7 +242,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or # `#create_and_run` for streaming and non-streaming use cases, respectively. @@ -396,7 +396,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. + # control the initial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or # `#create_and_run` for streaming and non-streaming use cases, respectively. diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index bab46d8b..d6e83222 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -174,7 +174,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, # Body param: Controls for how a thread will be truncated prior to the run. Use - # this to control the intial context window of the run. + # this to control the initial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or # `#create` for streaming and non-streaming use cases, respectively. @@ -353,7 +353,7 @@ module OpenAI # We generally recommend altering this or temperature but not both. top_p: nil, # Body param: Controls for how a thread will be truncated prior to the run. Use - # this to control the intial context window of the run. + # this to control the initial context window of the run. truncation_strategy: nil, # There is no need to provide `stream:`. Instead, use `#create_stream_raw` or # `#create` for streaming and non-streaming use cases, respectively. diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index dc064892..f14e5987 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -85,6 +85,7 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), + text: OpenAI::Chat::CompletionCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -252,9 +253,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -280,6 +280,7 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, + text: nil, # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -403,6 +404,7 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), + text: OpenAI::Chat::CompletionCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -570,9 +572,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -598,6 +599,7 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, + text: nil, # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means diff --git a/rbi/openai/resources/files.rbi b/rbi/openai/resources/files.rbi index 9ac9665f..0b28857e 100644 --- a/rbi/openai/resources/files.rbi +++ b/rbi/openai/resources/files.rbi @@ -5,7 +5,7 @@ module OpenAI class Files # Upload a file that can be used across various endpoints. Individual files can be # up to 512 MB, and the size of all files uploaded by one organization can be up - # to 100 GB. + # to 1 TB. # # The Assistants API supports files up to 2 million tokens and of specific file # types. See the @@ -28,6 +28,7 @@ module OpenAI params( file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, + expires_after: OpenAI::FileCreateParams::ExpiresAfter::OrHash, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::FileObject) end @@ -39,6 +40,9 @@ module OpenAI # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets purpose:, + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + expires_after: nil, request_options: {} ) end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 0256338b..b95dead9 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -53,7 +53,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseTextConfig::OrHash, + text: OpenAI::Responses::ResponseCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -160,7 +160,7 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -179,9 +179,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -198,11 +197,6 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model @@ -302,7 +296,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseTextConfig::OrHash, + text: OpenAI::Responses::ResponseCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -413,7 +407,7 @@ module OpenAI # hit rates. Replaces the `user` field. # [Learn more](https://platform.openai.com/docs/guides/prompt-caching). prompt_cache_key: nil, - # **o-series models only** + # **gpt-5 and o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). @@ -432,9 +426,8 @@ module OpenAI # - If set to 'default', then the request will be processed with the standard # pricing and performance for the selected model. # - If set to '[flex](https://platform.openai.com/docs/guides/flex-processing)' or - # 'priority', then the request will be processed with the corresponding service - # tier. [Contact sales](https://openai.com/contact-sales) to learn more about - # Priority processing. + # '[priority](https://openai.com/api-priority-processing/)', then the request + # will be processed with the corresponding service tier. # - When not set, the default behavior is 'auto'. # # When the `service_tier` parameter is set, the response body will include the @@ -451,11 +444,6 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, - # Configuration options for a text response from the model. Can be plain text or - # structured JSON data. Learn more: - # - # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model diff --git a/rbi/openai/resources/uploads.rbi b/rbi/openai/resources/uploads.rbi index eef3396f..1a0f93c2 100644 --- a/rbi/openai/resources/uploads.rbi +++ b/rbi/openai/resources/uploads.rbi @@ -31,6 +31,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::FilePurpose::OrSymbol, + expires_after: OpenAI::UploadCreateParams::ExpiresAfter::OrHash, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Upload) end @@ -49,6 +50,9 @@ module OpenAI # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). purpose:, + # The expiration policy for a file. By default, files with `purpose=batch` expire + # after 30 days and all other files are persisted until they are manually deleted. + expires_after: nil, request_options: {} ) end diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index 85193e35..3b5ff7a8 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -5,7 +5,8 @@ module OpenAI completion_window: OpenAI::Models::BatchCreateParams::completion_window, endpoint: OpenAI::Models::BatchCreateParams::endpoint, input_file_id: String, - metadata: OpenAI::Models::metadata? + metadata: OpenAI::Models::metadata?, + output_expires_after: OpenAI::BatchCreateParams::OutputExpiresAfter } & OpenAI::Internal::Type::request_parameters @@ -21,11 +22,18 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? + attr_reader output_expires_after: OpenAI::BatchCreateParams::OutputExpiresAfter? + + def output_expires_after=: ( + OpenAI::BatchCreateParams::OutputExpiresAfter + ) -> OpenAI::BatchCreateParams::OutputExpiresAfter + def initialize: ( completion_window: OpenAI::Models::BatchCreateParams::completion_window, endpoint: OpenAI::Models::BatchCreateParams::endpoint, input_file_id: String, ?metadata: OpenAI::Models::metadata?, + ?output_expires_after: OpenAI::BatchCreateParams::OutputExpiresAfter, ?request_options: OpenAI::request_opts ) -> void @@ -34,6 +42,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::endpoint, input_file_id: String, metadata: OpenAI::Models::metadata?, + output_expires_after: OpenAI::BatchCreateParams::OutputExpiresAfter, request_options: OpenAI::RequestOptions } @@ -63,6 +72,18 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::BatchCreateParams::endpoint] end + + type output_expires_after = { anchor: :created_at, seconds: Integer } + + class OutputExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :created_at + + attr_accessor seconds: Integer + + def initialize: (seconds: Integer, ?anchor: :created_at) -> void + + def to_hash: -> { anchor: :created_at, seconds: Integer } + end end end end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index e02095c4..d6328830 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -29,6 +29,7 @@ module OpenAI store: bool?, stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, + text: OpenAI::Chat::CompletionCreateParams::Text, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], top_logprobs: Integer?, @@ -113,6 +114,12 @@ module OpenAI attr_accessor temperature: Float? + attr_reader text: OpenAI::Chat::CompletionCreateParams::Text? + + def text=: ( + OpenAI::Chat::CompletionCreateParams::Text + ) -> OpenAI::Chat::CompletionCreateParams::Text + attr_reader tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option? def tool_choice=: ( @@ -168,6 +175,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, + ?text: OpenAI::Chat::CompletionCreateParams::Text, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, @@ -205,6 +213,7 @@ module OpenAI store: bool?, stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, + text: OpenAI::Chat::CompletionCreateParams::Text, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], top_logprobs: Integer?, @@ -323,6 +332,35 @@ module OpenAI StringArray: OpenAI::Internal::Type::Converter end + type text = + { + verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_accessor verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? + + def initialize: ( + ?verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? + ) -> void + + def to_hash: -> { + verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? + } + + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity] + end + end + type verbosity = :low | :medium | :high module Verbosity diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 2abf1615..90ff00c6 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -3,7 +3,8 @@ module OpenAI type file_create_params = { file: OpenAI::Internal::file_input, - purpose: OpenAI::Models::file_purpose + purpose: OpenAI::Models::file_purpose, + expires_after: OpenAI::FileCreateParams::ExpiresAfter } & OpenAI::Internal::Type::request_parameters @@ -15,17 +16,37 @@ module OpenAI attr_accessor purpose: OpenAI::Models::file_purpose + attr_reader expires_after: OpenAI::FileCreateParams::ExpiresAfter? + + def expires_after=: ( + OpenAI::FileCreateParams::ExpiresAfter + ) -> OpenAI::FileCreateParams::ExpiresAfter + def initialize: ( file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, + ?expires_after: OpenAI::FileCreateParams::ExpiresAfter, ?request_options: OpenAI::request_opts ) -> void def to_hash: -> { file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, + expires_after: OpenAI::FileCreateParams::ExpiresAfter, request_options: OpenAI::RequestOptions } + + type expires_after = { anchor: :created_at, seconds: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :created_at + + attr_accessor seconds: Integer + + def initialize: (seconds: Integer, ?anchor: :created_at) -> void + + def to_hash: -> { anchor: :created_at, seconds: Integer } + end end end end diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 6bf78a3c..34a35148 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -27,7 +27,7 @@ module OpenAI safety_identifier: String, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, - text: OpenAI::Responses::ResponseTextConfig, + text: OpenAI::Responses::Response::Text, top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, @@ -91,11 +91,11 @@ module OpenAI OpenAI::Models::Responses::response_status ) -> OpenAI::Models::Responses::response_status - attr_reader text: OpenAI::Responses::ResponseTextConfig? + attr_reader text: OpenAI::Responses::Response::Text? def text=: ( - OpenAI::Responses::ResponseTextConfig - ) -> OpenAI::Responses::ResponseTextConfig + OpenAI::Responses::Response::Text + ) -> OpenAI::Responses::Response::Text attr_accessor top_logprobs: Integer? @@ -135,7 +135,7 @@ module OpenAI ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, - ?text: OpenAI::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::Response::Text, ?top_logprobs: Integer?, ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, @@ -168,7 +168,7 @@ module OpenAI safety_identifier: String, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, - text: OpenAI::Responses::ResponseTextConfig, + text: OpenAI::Responses::Response::Text, top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, @@ -246,6 +246,44 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] end + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config, + verbosity: OpenAI::Models::Responses::Response::Text::verbosity? + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + attr_accessor verbosity: OpenAI::Models::Responses::Response::Text::verbosity? + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config, + ?verbosity: OpenAI::Models::Responses::Response::Text::verbosity? + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config, + verbosity: OpenAI::Models::Responses::Response::Text::verbosity? + } + + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::Text::verbosity] + end + end + type truncation = :auto | :disabled module Truncation diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 83f641b4..9abee74d 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -21,7 +21,7 @@ module OpenAI store: bool?, stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, temperature: Float?, - text: OpenAI::Responses::ResponseTextConfig, + text: OpenAI::Responses::ResponseCreateParams::Text, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_logprobs: Integer?, @@ -83,11 +83,11 @@ module OpenAI attr_accessor temperature: Float? - attr_reader text: OpenAI::Responses::ResponseTextConfig? + attr_reader text: OpenAI::Responses::ResponseCreateParams::Text? def text=: ( - OpenAI::Responses::ResponseTextConfig - ) -> OpenAI::Responses::ResponseTextConfig + OpenAI::Responses::ResponseCreateParams::Text + ) -> OpenAI::Responses::ResponseCreateParams::Text attr_reader tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice? @@ -130,7 +130,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, - ?text: OpenAI::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::ResponseCreateParams::Text, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_logprobs: Integer?, @@ -159,7 +159,7 @@ module OpenAI store: bool?, stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, temperature: Float?, - text: OpenAI::Responses::ResponseTextConfig, + text: OpenAI::Responses::ResponseCreateParams::Text, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_logprobs: Integer?, @@ -203,6 +203,44 @@ module OpenAI def to_hash: -> { include_obfuscation: bool } end + type text = + { + format_: OpenAI::Models::Responses::response_format_text_config, + verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? + } + + class Text < OpenAI::Internal::Type::BaseModel + attr_reader format_: OpenAI::Models::Responses::response_format_text_config? + + def format_=: ( + OpenAI::Models::Responses::response_format_text_config + ) -> OpenAI::Models::Responses::response_format_text_config + + attr_accessor verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? + + def initialize: ( + ?format_: OpenAI::Models::Responses::response_format_text_config, + ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? + ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config, + verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? + } + + type verbosity = :low | :medium | :high + + module Verbosity + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity] + end + end + type tool_choice = OpenAI::Models::Responses::tool_choice_options | OpenAI::Responses::ToolChoiceAllowed diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index 85f45a52..f9f767fb 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -5,7 +5,8 @@ module OpenAI bytes: Integer, filename: String, mime_type: String, - purpose: OpenAI::Models::file_purpose + purpose: OpenAI::Models::file_purpose, + expires_after: OpenAI::UploadCreateParams::ExpiresAfter } & OpenAI::Internal::Type::request_parameters @@ -21,11 +22,18 @@ module OpenAI attr_accessor purpose: OpenAI::Models::file_purpose + attr_reader expires_after: OpenAI::UploadCreateParams::ExpiresAfter? + + def expires_after=: ( + OpenAI::UploadCreateParams::ExpiresAfter + ) -> OpenAI::UploadCreateParams::ExpiresAfter + def initialize: ( bytes: Integer, filename: String, mime_type: String, purpose: OpenAI::Models::file_purpose, + ?expires_after: OpenAI::UploadCreateParams::ExpiresAfter, ?request_options: OpenAI::request_opts ) -> void @@ -34,8 +42,21 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::file_purpose, + expires_after: OpenAI::UploadCreateParams::ExpiresAfter, request_options: OpenAI::RequestOptions } + + type expires_after = { anchor: :created_at, seconds: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :created_at + + attr_accessor seconds: Integer + + def initialize: (seconds: Integer, ?anchor: :created_at) -> void + + def to_hash: -> { anchor: :created_at, seconds: Integer } + end end end end diff --git a/sig/openai/resources/batches.rbs b/sig/openai/resources/batches.rbs index b5382fa1..ca6f761c 100644 --- a/sig/openai/resources/batches.rbs +++ b/sig/openai/resources/batches.rbs @@ -6,6 +6,7 @@ module OpenAI endpoint: OpenAI::Models::BatchCreateParams::endpoint, input_file_id: String, ?metadata: OpenAI::Models::metadata?, + ?output_expires_after: OpenAI::BatchCreateParams::OutputExpiresAfter, ?request_options: OpenAI::request_opts ) -> OpenAI::Batch diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index a4237ff1..0634d0eb 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -31,6 +31,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, + ?text: OpenAI::Chat::CompletionCreateParams::Text, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, @@ -68,6 +69,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, + ?text: OpenAI::Chat::CompletionCreateParams::Text, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index 49435b99..759285d0 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -4,6 +4,7 @@ module OpenAI def create: ( file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, + ?expires_after: OpenAI::FileCreateParams::ExpiresAfter, ?request_options: OpenAI::request_opts ) -> OpenAI::FileObject diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 6e926188..819df21f 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -22,7 +22,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, - ?text: OpenAI::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::ResponseCreateParams::Text, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_logprobs: Integer?, @@ -51,7 +51,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, - ?text: OpenAI::Responses::ResponseTextConfig, + ?text: OpenAI::Responses::ResponseCreateParams::Text, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_logprobs: Integer?, diff --git a/sig/openai/resources/uploads.rbs b/sig/openai/resources/uploads.rbs index 50996546..28663206 100644 --- a/sig/openai/resources/uploads.rbs +++ b/sig/openai/resources/uploads.rbs @@ -8,6 +8,7 @@ module OpenAI filename: String, mime_type: String, purpose: OpenAI::Models::file_purpose, + ?expires_after: OpenAI::UploadCreateParams::ExpiresAfter, ?request_options: OpenAI::request_opts ) -> OpenAI::Upload diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 28d1d7e4..cf4ce20f 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -36,7 +36,7 @@ def test_create safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, - text: OpenAI::Responses::ResponseTextConfig | nil, + text: OpenAI::Responses::Response::Text | nil, top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, @@ -78,7 +78,7 @@ def test_retrieve safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, - text: OpenAI::Responses::ResponseTextConfig | nil, + text: OpenAI::Responses::Response::Text | nil, top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, @@ -128,7 +128,7 @@ def test_cancel safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, - text: OpenAI::Responses::ResponseTextConfig | nil, + text: OpenAI::Responses::Response::Text | nil, top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, From f05e85fb0c282a07416d5d7433b9e966d1450c2c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 15:02:40 +0000 Subject: [PATCH 285/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 463488b6..4ad3fef3 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.17.1" + ".": "0.18.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index bceb047e..ef7ae9b4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.17.1) + openai (0.18.0) connection_pool GEM diff --git a/README.md b/README.md index 27c5dc78..abbb9c27 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.17.1" +gem "openai", "~> 0.18.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 32d93453..a326627c 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.17.1" + VERSION = "0.18.0" end From 3796618b60da727467f4777dbb8d675f23bbea8f Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 13:48:57 +0000 Subject: [PATCH 286/295] chore(api): accurately represent shape for verbosity on Chat Completions --- .stats.yml | 6 +- .../models/chat/completion_create_params.rb | 43 +---- .../models/graders/text_similarity_grader.rb | 11 +- lib/openai/models/responses/response.rb | 64 +------ .../responses/response_create_params.rb | 67 +------ lib/openai/resources/chat/completions.rb | 8 +- lib/openai/resources/responses.rb | 4 +- .../models/chat/completion_create_params.rbi | 108 +---------- .../models/graders/text_similarity_grader.rbi | 17 +- rbi/openai/models/responses/response.rbi | 161 ++-------------- .../responses/response_create_params.rbi | 181 ++---------------- rbi/openai/resources/chat/completions.rbi | 8 +- rbi/openai/resources/responses.rbi | 14 +- .../models/chat/completion_create_params.rbs | 38 ---- .../models/graders/text_similarity_grader.rbs | 4 +- sig/openai/models/responses/response.rbs | 50 +---- .../responses/response_create_params.rbs | 50 +---- sig/openai/resources/chat/completions.rbs | 2 - sig/openai/resources/responses.rbs | 4 +- test/openai/resources/responses_test.rb | 6 +- 20 files changed, 106 insertions(+), 740 deletions(-) diff --git a/.stats.yml b/.stats.yml index ce30bcee..71c2d79d 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-24be531010b354303d741fc9247c1f84f75978f9f7de68aca92cb4f240a04722.yml -openapi_spec_hash: 3e46f439f6a863beadc71577eb4efa15 -config_hash: ed87b9139ac595a04a2162d754df2fed +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-7ef7a457c3bf05364e66e48c9ca34f31bfef1f6c9b7c15b1812346105e0abb16.yml +openapi_spec_hash: a2b1f5d8fbb62175c93b0ebea9f10063 +config_hash: 76afa3236f36854a8705f1281b1990b8 diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 19321125..31eaede1 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -272,7 +272,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. # - # Supports text and image inputs. Note: image inputs over 10MB will be dropped. + # Supports text and image inputs. Note: image inputs over 8MB will be dropped. # # @return [Boolean, nil] optional :store, OpenAI::Internal::Type::Boolean, nil?: true @@ -292,11 +292,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute text - # - # @return [OpenAI::Models::Chat::CompletionCreateParams::Text, nil] - optional :text, -> { OpenAI::Chat::CompletionCreateParams::Text } - # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can @@ -365,7 +360,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } - # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) + # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # @@ -421,8 +416,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Chat::CompletionCreateParams::Text] - # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # # @param tools [Array] A list of tools the model may call. You can provide either @@ -627,38 +620,6 @@ module Stop StringArray = OpenAI::Internal::Type::ArrayOf[String] end - class Text < OpenAI::Internal::Type::BaseModel - # @!attribute verbosity - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Text::Verbosity, nil] - optional :verbosity, enum: -> { OpenAI::Chat::CompletionCreateParams::Text::Verbosity }, nil?: true - - # @!method initialize(verbosity: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Chat::CompletionCreateParams::Text} for more details. - # - # @param verbosity [Symbol, OpenAI::Models::Chat::CompletionCreateParams::Text::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @see OpenAI::Models::Chat::CompletionCreateParams::Text#verbosity - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW = :low - MEDIUM = :medium - HIGH = :high - - # @!method self.values - # @return [Array] - end - end - # Constrains the verbosity of the model's response. Lower values will result in # more concise responses, while higher values will result in more verbose # responses. Currently supported values are `low`, `medium`, and `high`. diff --git a/lib/openai/models/graders/text_similarity_grader.rb b/lib/openai/models/graders/text_similarity_grader.rb index a64e3314..8cc51499 100644 --- a/lib/openai/models/graders/text_similarity_grader.rb +++ b/lib/openai/models/graders/text_similarity_grader.rb @@ -5,8 +5,8 @@ module Models module Graders class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @!attribute evaluation_metric - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # # @return [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] required :evaluation_metric, enum: -> { OpenAI::Graders::TextSimilarityGrader::EvaluationMetric } @@ -41,7 +41,7 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @param evaluation_metric [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r + # @param evaluation_metric [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, # # @param input [String] The text being graded. # @@ -51,13 +51,14 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :text_similarity] The type of grader. - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # # @see OpenAI::Models::Graders::TextSimilarityGrader#evaluation_metric module EvaluationMetric extend OpenAI::Internal::Type::Enum + COSINE = :cosine FUZZY_MATCH = :fuzzy_match BLEU = :bleu GLEU = :gleu diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 917bcc19..8dbd7a4b 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -229,9 +229,14 @@ class Response < OpenAI::Internal::Type::BaseModel optional :status, enum: -> { OpenAI::Responses::ResponseStatus } # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: # - # @return [OpenAI::Models::Responses::Response::Text, nil] - optional :text, -> { OpenAI::Responses::Response::Text } + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] + optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to @@ -321,7 +326,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, # - # @param text [OpenAI::Models::Responses::Response::Text] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -455,59 +460,6 @@ module ServiceTier # @return [Array] end - # @see OpenAI::Models::Responses::Response#text - class Text < OpenAI::Internal::Type::BaseModel - # @!attribute format_ - # An object specifying the format that the model must output. - # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # The default format is `{ "type": "text" }` with no additional options. - # - # **Not recommended for gpt-4o and newer models:** - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - # - # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] - optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format - - # @!attribute verbosity - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @return [Symbol, OpenAI::Models::Responses::Response::Text::Verbosity, nil] - optional :verbosity, enum: -> { OpenAI::Responses::Response::Text::Verbosity }, nil?: true - - # @!method initialize(format_: nil, verbosity: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::Response::Text} for more details. - # - # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. - # - # @param verbosity [Symbol, OpenAI::Models::Responses::Response::Text::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @see OpenAI::Models::Responses::Response::Text#verbosity - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW = :low - MEDIUM = :medium - HIGH = :high - - # @!method self.values - # @return [Array] - end - end - # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 49515075..7161a8d4 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -193,9 +193,14 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel optional :temperature, Float, nil?: true # @!attribute text + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: # - # @return [OpenAI::Models::Responses::ResponseCreateParams::Text, nil] - optional :text, -> { OpenAI::Responses::ResponseCreateParams::Text } + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] + optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute tool_choice # How the model should select which tool (or tools) to use when generating a @@ -307,7 +312,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Responses::ResponseCreateParams::Text] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # @@ -398,62 +403,6 @@ class StreamOptions < OpenAI::Internal::Type::BaseModel # @param include_obfuscation [Boolean] When true, stream obfuscation will be enabled. Stream obfuscation adds end - class Text < OpenAI::Internal::Type::BaseModel - # @!attribute format_ - # An object specifying the format that the model must output. - # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # The default format is `{ "type": "text" }` with no additional options. - # - # **Not recommended for gpt-4o and newer models:** - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - # - # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] - optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format - - # @!attribute verbosity - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Text::Verbosity, nil] - optional :verbosity, - enum: -> { - OpenAI::Responses::ResponseCreateParams::Text::Verbosity - }, - nil?: true - - # @!method initialize(format_: nil, verbosity: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Models::Responses::ResponseCreateParams::Text} for more details. - # - # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. - # - # @param verbosity [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Text::Verbosity, nil] Constrains the verbosity of the model's response. Lower values will result in - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - # - # @see OpenAI::Models::Responses::ResponseCreateParams::Text#verbosity - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW = :low - MEDIUM = :medium - HIGH = :high - - # @!method self.values - # @return [Array] - end - end - # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 1b326dce..d6be9e1e 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -30,7 +30,7 @@ class Completions # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) + # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -84,8 +84,6 @@ class Completions # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Chat::CompletionCreateParams::Text] - # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # # @param tools [Array] A list of tools the model may call. You can provide either @@ -143,7 +141,7 @@ def create(params) # unsupported parameters in reasoning models, # [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning). # - # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) + # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, prompt_cache_key: nil, reasoning_effort: nil, response_format: nil, safety_identifier: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, verbosity: nil, web_search_options: nil, request_options: {}) # # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # @@ -197,8 +195,6 @@ def create(params) # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Chat::CompletionCreateParams::Text] - # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionAllowedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, OpenAI::Models::Chat::ChatCompletionNamedToolChoiceCustom] Controls which (if any) tool is called by the model. # # @param tools [Array] A list of tools the model may call. You can provide either diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 6ec8f719..150220b3 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -61,7 +61,7 @@ class Responses # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Responses::ResponseCreateParams::Text] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # @@ -150,7 +150,7 @@ def create(params = {}) # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Models::Responses::ResponseCreateParams::Text] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index ae6dd9ce..9d7cf007 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -327,7 +327,7 @@ module OpenAI # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. # - # Supports text and image inputs. Note: image inputs over 10MB will be dropped. + # Supports text and image inputs. Note: image inputs over 8MB will be dropped. sig { returns(T.nilable(T::Boolean)) } attr_accessor :store @@ -350,14 +350,6 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :temperature - sig { returns(T.nilable(OpenAI::Chat::CompletionCreateParams::Text)) } - attr_reader :text - - sig do - params(text: OpenAI::Chat::CompletionCreateParams::Text::OrHash).void - end - attr_writer :text - # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -536,7 +528,6 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), - text: OpenAI::Chat::CompletionCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -721,7 +712,7 @@ module OpenAI # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. # - # Supports text and image inputs. Note: image inputs over 10MB will be dropped. + # Supports text and image inputs. Note: image inputs over 8MB will be dropped. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, @@ -730,7 +721,6 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, - text: nil, # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -833,7 +823,6 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), temperature: T.nilable(Float), - text: OpenAI::Chat::CompletionCreateParams::Text, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -1180,99 +1169,6 @@ module OpenAI ) end - class Text < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Chat::CompletionCreateParams::Text, - OpenAI::Internal::AnyHash - ) - end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - sig do - returns( - T.nilable( - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::OrSymbol - ) - ) - end - attr_accessor :verbosity - - sig do - params( - verbosity: - T.nilable( - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::OrSymbol - ) - ).returns(T.attached_class) - end - def self.new( - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil - ) - end - - sig do - override.returns( - { - verbosity: - T.nilable( - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::OrSymbol - ) - } - ) - end - def to_hash - end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - module Verbosity - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Chat::CompletionCreateParams::Text::Verbosity - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - LOW = - T.let( - :low, - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol - ) - MEDIUM = - T.let( - :medium, - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol - ) - HIGH = - T.let( - :high, - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Chat::CompletionCreateParams::Text::Verbosity::TaggedSymbol - ] - ) - end - def self.values - end - end - end - # Constrains the verbosity of the model's response. Lower values will result in # more concise responses, while higher values will result in more verbose # responses. Currently supported values are `low`, `medium`, and `high`. diff --git a/rbi/openai/models/graders/text_similarity_grader.rbi b/rbi/openai/models/graders/text_similarity_grader.rbi index 687e2f93..389c8b3b 100644 --- a/rbi/openai/models/graders/text_similarity_grader.rbi +++ b/rbi/openai/models/graders/text_similarity_grader.rbi @@ -14,8 +14,8 @@ module OpenAI ) end - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. sig do returns( OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::OrSymbol @@ -51,8 +51,8 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. evaluation_metric:, # The text being graded. input:, @@ -80,8 +80,8 @@ module OpenAI def to_hash end - # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, - # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. + # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, + # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. module EvaluationMetric extend OpenAI::Internal::Type::Enum @@ -94,6 +94,11 @@ module OpenAI end OrSymbol = T.type_alias { T.any(Symbol, String) } + COSINE = + T.let( + :cosine, + OpenAI::Graders::TextSimilarityGrader::EvaluationMetric::TaggedSymbol + ) FUZZY_MATCH = T.let( :fuzzy_match, diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 8a36dab9..94087328 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -230,10 +230,15 @@ module OpenAI sig { params(status: OpenAI::Responses::ResponseStatus::OrSymbol).void } attr_writer :status - sig { returns(T.nilable(OpenAI::Responses::Response::Text)) } + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig { returns(T.nilable(OpenAI::Responses::ResponseTextConfig)) } attr_reader :text - sig { params(text: OpenAI::Responses::Response::Text::OrHash).void } + sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } attr_writer :text # An integer between 0 and 20 specifying the number of most likely tokens to @@ -345,7 +350,7 @@ module OpenAI service_tier: T.nilable(OpenAI::Responses::Response::ServiceTier::OrSymbol), status: OpenAI::Responses::ResponseStatus::OrSymbol, - text: OpenAI::Responses::Response::Text::OrHash, + text: OpenAI::Responses::ResponseTextConfig::OrHash, top_logprobs: T.nilable(Integer), truncation: T.nilable(OpenAI::Responses::Response::Truncation::OrSymbol), @@ -478,6 +483,11 @@ module OpenAI # The status of the response generation. One of `completed`, `failed`, # `in_progress`, `cancelled`, `queued`, or `incomplete`. status: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -536,7 +546,7 @@ module OpenAI OpenAI::Responses::Response::ServiceTier::TaggedSymbol ), status: OpenAI::Responses::ResponseStatus::TaggedSymbol, - text: OpenAI::Responses::Response::Text, + text: OpenAI::Responses::ResponseTextConfig, top_logprobs: T.nilable(Integer), truncation: T.nilable( @@ -751,149 +761,6 @@ module OpenAI end end - class Text < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::Response::Text, - OpenAI::Internal::AnyHash - ) - end - - # An object specifying the format that the model must output. - # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # The default format is `{ "type": "text" }` with no additional options. - # - # **Not recommended for gpt-4o and newer models:** - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - sig do - returns( - T.nilable(OpenAI::Responses::ResponseFormatTextConfig::Variants) - ) - end - attr_reader :format_ - - sig do - params( - format_: - T.any( - OpenAI::ResponseFormatText::OrHash, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, - OpenAI::ResponseFormatJSONObject::OrHash - ) - ).void - end - attr_writer :format_ - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - sig do - returns( - T.nilable( - OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol - ) - ) - end - attr_accessor :verbosity - - sig do - params( - format_: - T.any( - OpenAI::ResponseFormatText::OrHash, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, - OpenAI::ResponseFormatJSONObject::OrHash - ), - verbosity: - T.nilable( - OpenAI::Responses::Response::Text::Verbosity::OrSymbol - ) - ).returns(T.attached_class) - end - def self.new( - # An object specifying the format that the model must output. - # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # The default format is `{ "type": "text" }` with no additional options. - # - # **Not recommended for gpt-4o and newer models:** - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - format_: nil, - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil - ) - end - - sig do - override.returns( - { - format_: OpenAI::Responses::ResponseFormatTextConfig::Variants, - verbosity: - T.nilable( - OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol - ) - } - ) - end - def to_hash - end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - module Verbosity - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all(Symbol, OpenAI::Responses::Response::Text::Verbosity) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - LOW = - T.let( - :low, - OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol - ) - MEDIUM = - T.let( - :medium, - OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol - ) - HIGH = - T.let( - :high, - OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Responses::Response::Text::Verbosity::TaggedSymbol - ] - ) - end - def self.values - end - end - end - # The truncation strategy to use for the model response. # # - `auto`: If the context of this response and previous ones exceeds the model's diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index cdd0ea9b..850cfe82 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -232,16 +232,15 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :temperature - sig do - returns(T.nilable(OpenAI::Responses::ResponseCreateParams::Text)) - end + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + sig { returns(T.nilable(OpenAI::Responses::ResponseTextConfig)) } attr_reader :text - sig do - params( - text: OpenAI::Responses::ResponseCreateParams::Text::OrHash - ).void - end + sig { params(text: OpenAI::Responses::ResponseTextConfig::OrHash).void } attr_writer :text # How the model should select which tool (or tools) to use when generating a @@ -409,7 +408,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseCreateParams::Text::OrHash, + text: OpenAI::Responses::ResponseTextConfig::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -552,6 +551,11 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model @@ -636,7 +640,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseCreateParams::Text, + text: OpenAI::Responses::ResponseTextConfig, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -809,163 +813,6 @@ module OpenAI end end - class Text < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseCreateParams::Text, - OpenAI::Internal::AnyHash - ) - end - - # An object specifying the format that the model must output. - # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # The default format is `{ "type": "text" }` with no additional options. - # - # **Not recommended for gpt-4o and newer models:** - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - sig do - returns( - T.nilable( - T.any( - OpenAI::ResponseFormatText, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::ResponseFormatJSONObject - ) - ) - ) - end - attr_reader :format_ - - sig do - params( - format_: - T.any( - OpenAI::ResponseFormatText::OrHash, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, - OpenAI::ResponseFormatJSONObject::OrHash - ) - ).void - end - attr_writer :format_ - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - sig do - returns( - T.nilable( - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::OrSymbol - ) - ) - end - attr_accessor :verbosity - - sig do - params( - format_: - T.any( - OpenAI::ResponseFormatText::OrHash, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig::OrHash, - OpenAI::ResponseFormatJSONObject::OrHash - ), - verbosity: - T.nilable( - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::OrSymbol - ) - ).returns(T.attached_class) - end - def self.new( - # An object specifying the format that the model must output. - # - # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which - # ensures the model will match your supplied JSON schema. Learn more in the - # [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). - # - # The default format is `{ "type": "text" }` with no additional options. - # - # **Not recommended for gpt-4o and newer models:** - # - # Setting to `{ "type": "json_object" }` enables the older JSON mode, which - # ensures the message the model generates is valid JSON. Using `json_schema` is - # preferred for models that support it. - format_: nil, - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - verbosity: nil - ) - end - - sig do - override.returns( - { - format_: - T.any( - OpenAI::ResponseFormatText, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::ResponseFormatJSONObject - ), - verbosity: - T.nilable( - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::OrSymbol - ) - } - ) - end - def to_hash - end - - # Constrains the verbosity of the model's response. Lower values will result in - # more concise responses, while higher values will result in more verbose - # responses. Currently supported values are `low`, `medium`, and `high`. - module Verbosity - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Responses::ResponseCreateParams::Text::Verbosity - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - LOW = - T.let( - :low, - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol - ) - MEDIUM = - T.let( - :medium, - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol - ) - HIGH = - T.let( - :high, - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Responses::ResponseCreateParams::Text::Verbosity::TaggedSymbol - ] - ) - end - def self.values - end - end - end - # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index f14e5987..72cdcd20 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -85,7 +85,6 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), - text: OpenAI::Chat::CompletionCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -271,7 +270,7 @@ module OpenAI # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. # - # Supports text and image inputs. Note: image inputs over 10MB will be dropped. + # Supports text and image inputs. Note: image inputs over 8MB will be dropped. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, @@ -280,7 +279,6 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, - text: nil, # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -404,7 +402,6 @@ module OpenAI stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), temperature: T.nilable(Float), - text: OpenAI::Chat::CompletionCreateParams::Text::OrHash, tool_choice: T.any( OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, @@ -590,7 +587,7 @@ module OpenAI # our [model distillation](https://platform.openai.com/docs/guides/distillation) # or [evals](https://platform.openai.com/docs/guides/evals) products. # - # Supports text and image inputs. Note: image inputs over 10MB will be dropped. + # Supports text and image inputs. Note: image inputs over 8MB will be dropped. store: nil, # Options for streaming response. Only set this when you set `stream: true`. stream_options: nil, @@ -599,7 +596,6 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, - text: nil, # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index b95dead9..adad3d32 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -53,7 +53,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseCreateParams::Text::OrHash, + text: OpenAI::Responses::ResponseTextConfig::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -197,6 +197,11 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model @@ -296,7 +301,7 @@ module OpenAI OpenAI::Responses::ResponseCreateParams::StreamOptions::OrHash ), temperature: T.nilable(Float), - text: OpenAI::Responses::ResponseCreateParams::Text::OrHash, + text: OpenAI::Responses::ResponseTextConfig::OrHash, tool_choice: T.any( OpenAI::Responses::ToolChoiceOptions::OrSymbol, @@ -444,6 +449,11 @@ module OpenAI # focused and deterministic. We generally recommend altering this or `top_p` but # not both. temperature: nil, + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) text: nil, # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index d6328830..e02095c4 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -29,7 +29,6 @@ module OpenAI store: bool?, stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, - text: OpenAI::Chat::CompletionCreateParams::Text, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], top_logprobs: Integer?, @@ -114,12 +113,6 @@ module OpenAI attr_accessor temperature: Float? - attr_reader text: OpenAI::Chat::CompletionCreateParams::Text? - - def text=: ( - OpenAI::Chat::CompletionCreateParams::Text - ) -> OpenAI::Chat::CompletionCreateParams::Text - attr_reader tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option? def tool_choice=: ( @@ -175,7 +168,6 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, - ?text: OpenAI::Chat::CompletionCreateParams::Text, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, @@ -213,7 +205,6 @@ module OpenAI store: bool?, stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, temperature: Float?, - text: OpenAI::Chat::CompletionCreateParams::Text, tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], top_logprobs: Integer?, @@ -332,35 +323,6 @@ module OpenAI StringArray: OpenAI::Internal::Type::Converter end - type text = - { - verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? - } - - class Text < OpenAI::Internal::Type::BaseModel - attr_accessor verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? - - def initialize: ( - ?verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? - ) -> void - - def to_hash: -> { - verbosity: OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity? - } - - type verbosity = :low | :medium | :high - - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW: :low - MEDIUM: :medium - HIGH: :high - - def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::Text::verbosity] - end - end - type verbosity = :low | :medium | :high module Verbosity diff --git a/sig/openai/models/graders/text_similarity_grader.rbs b/sig/openai/models/graders/text_similarity_grader.rbs index 9002b540..e1da4b35 100644 --- a/sig/openai/models/graders/text_similarity_grader.rbs +++ b/sig/openai/models/graders/text_similarity_grader.rbs @@ -40,7 +40,8 @@ module OpenAI } type evaluation_metric = - :fuzzy_match + :cosine + | :fuzzy_match | :bleu | :gleu | :meteor @@ -54,6 +55,7 @@ module OpenAI module EvaluationMetric extend OpenAI::Internal::Type::Enum + COSINE: :cosine FUZZY_MATCH: :fuzzy_match BLEU: :bleu GLEU: :gleu diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 34a35148..6bf78a3c 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -27,7 +27,7 @@ module OpenAI safety_identifier: String, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, - text: OpenAI::Responses::Response::Text, + text: OpenAI::Responses::ResponseTextConfig, top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, @@ -91,11 +91,11 @@ module OpenAI OpenAI::Models::Responses::response_status ) -> OpenAI::Models::Responses::response_status - attr_reader text: OpenAI::Responses::Response::Text? + attr_reader text: OpenAI::Responses::ResponseTextConfig? def text=: ( - OpenAI::Responses::Response::Text - ) -> OpenAI::Responses::Response::Text + OpenAI::Responses::ResponseTextConfig + ) -> OpenAI::Responses::ResponseTextConfig attr_accessor top_logprobs: Integer? @@ -135,7 +135,7 @@ module OpenAI ?safety_identifier: String, ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, - ?text: OpenAI::Responses::Response::Text, + ?text: OpenAI::Responses::ResponseTextConfig, ?top_logprobs: Integer?, ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, @@ -168,7 +168,7 @@ module OpenAI safety_identifier: String, service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, - text: OpenAI::Responses::Response::Text, + text: OpenAI::Responses::ResponseTextConfig, top_logprobs: Integer?, truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, @@ -246,44 +246,6 @@ module OpenAI def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] end - type text = - { - format_: OpenAI::Models::Responses::response_format_text_config, - verbosity: OpenAI::Models::Responses::Response::Text::verbosity? - } - - class Text < OpenAI::Internal::Type::BaseModel - attr_reader format_: OpenAI::Models::Responses::response_format_text_config? - - def format_=: ( - OpenAI::Models::Responses::response_format_text_config - ) -> OpenAI::Models::Responses::response_format_text_config - - attr_accessor verbosity: OpenAI::Models::Responses::Response::Text::verbosity? - - def initialize: ( - ?format_: OpenAI::Models::Responses::response_format_text_config, - ?verbosity: OpenAI::Models::Responses::Response::Text::verbosity? - ) -> void - - def to_hash: -> { - format_: OpenAI::Models::Responses::response_format_text_config, - verbosity: OpenAI::Models::Responses::Response::Text::verbosity? - } - - type verbosity = :low | :medium | :high - - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW: :low - MEDIUM: :medium - HIGH: :high - - def self?.values: -> ::Array[OpenAI::Models::Responses::Response::Text::verbosity] - end - end - type truncation = :auto | :disabled module Truncation diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 9abee74d..83f641b4 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -21,7 +21,7 @@ module OpenAI store: bool?, stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, temperature: Float?, - text: OpenAI::Responses::ResponseCreateParams::Text, + text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_logprobs: Integer?, @@ -83,11 +83,11 @@ module OpenAI attr_accessor temperature: Float? - attr_reader text: OpenAI::Responses::ResponseCreateParams::Text? + attr_reader text: OpenAI::Responses::ResponseTextConfig? def text=: ( - OpenAI::Responses::ResponseCreateParams::Text - ) -> OpenAI::Responses::ResponseCreateParams::Text + OpenAI::Responses::ResponseTextConfig + ) -> OpenAI::Responses::ResponseTextConfig attr_reader tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice? @@ -130,7 +130,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, - ?text: OpenAI::Responses::ResponseCreateParams::Text, + ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_logprobs: Integer?, @@ -159,7 +159,7 @@ module OpenAI store: bool?, stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, temperature: Float?, - text: OpenAI::Responses::ResponseCreateParams::Text, + text: OpenAI::Responses::ResponseTextConfig, tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_logprobs: Integer?, @@ -203,44 +203,6 @@ module OpenAI def to_hash: -> { include_obfuscation: bool } end - type text = - { - format_: OpenAI::Models::Responses::response_format_text_config, - verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? - } - - class Text < OpenAI::Internal::Type::BaseModel - attr_reader format_: OpenAI::Models::Responses::response_format_text_config? - - def format_=: ( - OpenAI::Models::Responses::response_format_text_config - ) -> OpenAI::Models::Responses::response_format_text_config - - attr_accessor verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? - - def initialize: ( - ?format_: OpenAI::Models::Responses::response_format_text_config, - ?verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? - ) -> void - - def to_hash: -> { - format_: OpenAI::Models::Responses::response_format_text_config, - verbosity: OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity? - } - - type verbosity = :low | :medium | :high - - module Verbosity - extend OpenAI::Internal::Type::Enum - - LOW: :low - MEDIUM: :medium - HIGH: :high - - def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::Text::verbosity] - end - end - type tool_choice = OpenAI::Models::Responses::tool_choice_options | OpenAI::Responses::ToolChoiceAllowed diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 0634d0eb..a4237ff1 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -31,7 +31,6 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, - ?text: OpenAI::Chat::CompletionCreateParams::Text, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, @@ -69,7 +68,6 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, ?temperature: Float?, - ?text: OpenAI::Chat::CompletionCreateParams::Text, ?tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, ?tools: ::Array[OpenAI::Models::Chat::chat_completion_tool], ?top_logprobs: Integer?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 819df21f..6e926188 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -22,7 +22,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, - ?text: OpenAI::Responses::ResponseCreateParams::Text, + ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_logprobs: Integer?, @@ -51,7 +51,7 @@ module OpenAI ?store: bool?, ?stream_options: OpenAI::Responses::ResponseCreateParams::StreamOptions?, ?temperature: Float?, - ?text: OpenAI::Responses::ResponseCreateParams::Text, + ?text: OpenAI::Responses::ResponseTextConfig, ?tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, ?tools: ::Array[OpenAI::Models::Responses::tool], ?top_logprobs: Integer?, diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index cf4ce20f..28d1d7e4 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -36,7 +36,7 @@ def test_create safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, - text: OpenAI::Responses::Response::Text | nil, + text: OpenAI::Responses::ResponseTextConfig | nil, top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, @@ -78,7 +78,7 @@ def test_retrieve safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, - text: OpenAI::Responses::Response::Text | nil, + text: OpenAI::Responses::ResponseTextConfig | nil, top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, @@ -128,7 +128,7 @@ def test_cancel safety_identifier: String | nil, service_tier: OpenAI::Responses::Response::ServiceTier | nil, status: OpenAI::Responses::ResponseStatus | nil, - text: OpenAI::Responses::Response::Text | nil, + text: OpenAI::Responses::ResponseTextConfig | nil, top_logprobs: Integer | nil, truncation: OpenAI::Responses::Response::Truncation | nil, usage: OpenAI::Responses::ResponseUsage | nil, From 90c5677c40c37530007e7d6824c872a11da347cc Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 18:03:12 +0000 Subject: [PATCH 287/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4ad3fef3..d661066e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.18.0" + ".": "0.18.1" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index ef7ae9b4..62406c0f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.18.0) + openai (0.18.1) connection_pool GEM diff --git a/README.md b/README.md index abbb9c27..1f7a95c8 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.18.0" +gem "openai", "~> 0.18.1" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index a326627c..8309d38b 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.18.0" + VERSION = "0.18.1" end From 4a7257f129a100674e4782ba5ad1f5cb2cb76dc1 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 19:36:29 +0000 Subject: [PATCH 288/295] chore(internal/ci): setup breaking change detection --- .stats.yml | 2 +- scripts/detect-breaking-changes | 66 +++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+), 1 deletion(-) create mode 100755 scripts/detect-breaking-changes diff --git a/.stats.yml b/.stats.yml index 71c2d79d..8c981bc2 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 109 openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-7ef7a457c3bf05364e66e48c9ca34f31bfef1f6c9b7c15b1812346105e0abb16.yml openapi_spec_hash: a2b1f5d8fbb62175c93b0ebea9f10063 -config_hash: 76afa3236f36854a8705f1281b1990b8 +config_hash: 4870312b04f48fd717ea4151053e7fb9 diff --git a/scripts/detect-breaking-changes b/scripts/detect-breaking-changes new file mode 100755 index 00000000..61f7a2ec --- /dev/null +++ b/scripts/detect-breaking-changes @@ -0,0 +1,66 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +echo "==> Detecting breaking changes" + +TEST_PATHS=( + test/openai/resources/shared_test.rb + test/openai/resources/completions_test.rb + test/openai/resources/chat_test.rb + test/openai/resources/chat/completions_test.rb + test/openai/resources/chat/completions/messages_test.rb + test/openai/resources/embeddings_test.rb + test/openai/resources/files_test.rb + test/openai/resources/images_test.rb + test/openai/resources/audio_test.rb + test/openai/resources/audio/transcriptions_test.rb + test/openai/resources/audio/translations_test.rb + test/openai/resources/audio/speech_test.rb + test/openai/resources/moderations_test.rb + test/openai/resources/models_test.rb + test/openai/resources/fine_tuning_test.rb + test/openai/resources/fine_tuning/methods_test.rb + test/openai/resources/fine_tuning/jobs_test.rb + test/openai/resources/fine_tuning/jobs/checkpoints_test.rb + test/openai/resources/fine_tuning/checkpoints_test.rb + test/openai/resources/fine_tuning/checkpoints/permissions_test.rb + test/openai/resources/fine_tuning/alpha_test.rb + test/openai/resources/fine_tuning/alpha/graders_test.rb + test/openai/resources/graders_test.rb + test/openai/resources/graders/grader_models_test.rb + test/openai/resources/vector_stores_test.rb + test/openai/resources/vector_stores/files_test.rb + test/openai/resources/vector_stores/file_batches_test.rb + test/openai/resources/webhooks_test.rb + test/openai/resources/beta_test.rb + test/openai/resources/beta/assistants_test.rb + test/openai/resources/beta/threads_test.rb + test/openai/resources/beta/threads/runs_test.rb + test/openai/resources/beta/threads/runs/steps_test.rb + test/openai/resources/beta/threads/messages_test.rb + test/openai/resources/batches_test.rb + test/openai/resources/uploads_test.rb + test/openai/resources/uploads/parts_test.rb + test/openai/resources/responses_test.rb + test/openai/resources/responses/input_items_test.rb + test/openai/resources/evals_test.rb + test/openai/resources/evals/runs_test.rb + test/openai/resources/evals/runs/output_items_test.rb + test/openai/resources/containers_test.rb + test/openai/resources/containers/files_test.rb + test/openai/resources/containers/files/content_test.rb + test/openai/client_test.rb +) + +for PATHSPEC in "${TEST_PATHS[@]}"; do + # Try to check out previous versions of the test files + # with the current SDK. + git checkout "$1" -- "${PATHSPEC}" 2>/dev/null || true +done + +# Instead of running the tests, use the linter to check if an +# older test is no longer compatible with the latest SDK. +./scripts/lint From abfd740853abf15dd82bfe3ad60231a9f20254ef Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 22:03:24 +0000 Subject: [PATCH 289/295] fix: bump sorbet version and fix new type errors from the breaking change --- .rubocop.yml | 2 +- Gemfile.lock | 88 +++++++++++-------- lib/openai/internal/transport/base_client.rb | 5 +- .../transport/pooled_net_requester.rb | 10 +-- lib/openai/internal/util.rb | 2 +- lib/openai/models/audio/transcription.rb | 5 +- .../audio/transcription_create_params.rb | 9 +- .../audio/transcription_text_done_event.rb | 5 +- .../models/beta/assistant_create_params.rb | 25 ++---- .../models/beta/assistant_stream_event.rb | 30 ++----- .../models/beta/assistant_update_params.rb | 5 +- .../models/beta/message_stream_event.rb | 5 +- .../models/beta/run_step_stream_event.rb | 5 +- .../beta/thread_create_and_run_params.rb | 42 +++------ .../models/beta/thread_create_params.rb | 29 ++---- lib/openai/models/beta/threads/message.rb | 13 +-- .../beta/threads/message_create_params.rb | 9 +- lib/openai/models/beta/threads/run.rb | 9 +- .../models/beta/threads/run_create_params.rb | 13 +-- .../threads/run_submit_tool_outputs_params.rb | 4 +- .../runs/code_interpreter_tool_call.rb | 22 ++--- .../runs/code_interpreter_tool_call_delta.rb | 4 +- .../threads/runs/file_search_tool_call.rb | 16 +--- .../runs/run_step_delta_message_delta.rb | 5 +- .../threads/runs/tool_calls_step_details.rb | 5 +- lib/openai/models/beta/threads/text.rb | 5 +- .../models/chat/chat_completion_chunk.rb | 4 +- .../chat/chat_completion_custom_tool.rb | 9 +- lib/openai/models/eval_create_params.rb | 26 ++---- ...create_eval_completions_run_data_source.rb | 41 +++------ .../create_eval_jsonl_run_data_source.rb | 4 +- lib/openai/models/evals/run_create_params.rb | 72 ++++----------- .../reinforcement_hyperparameters.rb | 5 +- lib/openai/models/moderation.rb | 20 ++--- .../responses/response_computer_tool_call.rb | 8 +- ...response_computer_tool_call_output_item.rb | 4 +- .../models/responses/response_input_item.rb | 9 +- .../responses/response_input_message_item.rb | 5 +- .../models/responses/response_output_item.rb | 4 +- .../responses/response_output_message.rb | 4 +- .../models/responses/response_output_text.rb | 13 +-- .../models/responses/response_stream_event.rb | 20 +---- .../responses/response_text_delta_event.rb | 4 +- .../responses/response_text_done_event.rb | 4 +- lib/openai/models/responses/tool.rb | 10 +-- rbi/openai/errors.rbi | 10 +-- test/openai/internal/type/base_model_test.rb | 6 ++ test/openai/internal/util_test.rb | 12 +-- 48 files changed, 193 insertions(+), 473 deletions(-) diff --git a/.rubocop.yml b/.rubocop.yml index d9007489..39accffd 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -149,7 +149,7 @@ Naming/ClassAndModuleCamelCase: Naming/MethodParameterName: Enabled: false -Naming/PredicateName: +Naming/PredicatePrefix: Exclude: - "**/*.rbi" diff --git a/Gemfile.lock b/Gemfile.lock index 62406c0f..c633e58b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -17,7 +17,7 @@ PATH GEM remote: https://rubygems.org/ specs: - activesupport (8.0.2) + activesupport (8.0.2.1) base64 benchmark (>= 0.3) bigdecimal @@ -33,45 +33,50 @@ GEM addressable (2.8.7) public_suffix (>= 2.0.2, < 7.0) ast (2.4.3) - async (2.24.0) + async (2.27.3) console (~> 1.29) fiber-annotation - io-event (~> 1.9) + io-event (~> 1.11) metrics (~> 0.12) traces (~> 0.15) - base64 (0.2.0) - benchmark (0.4.0) - bigdecimal (3.1.9) + base64 (0.3.0) + benchmark (0.4.1) + bigdecimal (3.2.2) concurrent-ruby (1.3.5) connection_pool (2.5.3) - console (1.30.2) + console (1.33.0) fiber-annotation fiber-local (~> 1.1) json crack (1.0.0) bigdecimal rexml - csv (3.3.4) - drb (2.2.1) + csv (3.3.5) + drb (2.2.3) erubi (1.13.1) + ffi (1.17.2-aarch64-linux-gnu) + ffi (1.17.2-aarch64-linux-musl) + ffi (1.17.2-arm64-darwin) + ffi (1.17.2-x86_64-darwin) ffi (1.17.2-x86_64-linux-gnu) + ffi (1.17.2-x86_64-linux-musl) fiber-annotation (0.2.0) fiber-local (1.1.0) fiber-storage fiber-storage (1.0.1) fileutils (1.7.3) - hashdiff (1.1.2) + hashdiff (1.2.0) i18n (1.14.7) concurrent-ruby (~> 1.0) - io-event (1.10.0) - json (2.11.3) - language_server-protocol (3.17.0.4) + io-event (1.11.2) + json (2.13.2) + language_server-protocol (3.17.0.5) lint_roller (1.1.0) listen (3.9.0) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) logger (1.7.0) - metrics (0.12.2) + metrics (0.13.0) minitest (5.25.5) minitest-focus (1.4.0) minitest (>= 4, < 6) @@ -84,7 +89,7 @@ GEM mutex_m (0.3.0) netrc (0.11.0) parallel (1.27.0) - parser (3.3.8.0) + parser (3.3.9.0) ast (~> 2.4.1) racc prettier_print (1.2.1) @@ -92,20 +97,19 @@ GEM public_suffix (6.0.2) racc (1.8.1) rainbow (3.1.1) - rake (13.2.1) + rake (13.3.0) rb-fsevent (0.11.2) rb-inotify (0.11.1) ffi (~> 1.0) - rbi (0.3.2) + rbi (0.3.6) prism (~> 1.0) rbs (>= 3.4.4) - sorbet-runtime (>= 0.5.9204) - rbs (3.9.2) + rbs (3.9.4) logger redcarpet (3.6.1) - regexp_parser (2.10.0) + regexp_parser (2.11.2) rexml (3.4.1) - rubocop (1.75.5) + rubocop (1.79.2) json (~> 2.3) language_server-protocol (~> 3.17.0.2) lint_roller (~> 1.1.0) @@ -113,25 +117,28 @@ GEM parser (>= 3.3.0.2) rainbow (>= 2.2.2, < 4.0) regexp_parser (>= 2.9.3, < 3.0) - rubocop-ast (>= 1.44.0, < 2.0) + rubocop-ast (>= 1.46.0, < 2.0) ruby-progressbar (~> 1.7) unicode-display_width (>= 2.4.0, < 4.0) - rubocop-ast (1.44.1) + rubocop-ast (1.46.0) parser (>= 3.3.7.2) prism (~> 1.4) ruby-progressbar (1.13.0) securerandom (0.4.1) - sorbet (0.5.12067) - sorbet-static (= 0.5.12067) - sorbet-runtime (0.5.12067) - sorbet-static (0.5.12067-x86_64-linux) - sorbet-static-and-runtime (0.5.12067) - sorbet (= 0.5.12067) - sorbet-runtime (= 0.5.12067) - spoom (1.6.1) + sorbet (0.5.12424) + sorbet-static (= 0.5.12424) + sorbet-runtime (0.5.12424) + sorbet-static (0.5.12424-aarch64-linux) + sorbet-static (0.5.12424-universal-darwin) + sorbet-static (0.5.12424-x86_64-linux) + sorbet-static-and-runtime (0.5.12424) + sorbet (= 0.5.12424) + sorbet-runtime (= 0.5.12424) + spoom (1.6.3) erubi (>= 1.10.0) prism (>= 0.28.0) - rbi (>= 0.2.3) + rbi (>= 0.3.3) + rexml (>= 3.2.6) sorbet-static-and-runtime (>= 0.5.10187) thor (>= 0.19.2) steep (1.10.0) @@ -152,7 +159,7 @@ GEM terminal-table (>= 2, < 5) uri (>= 0.12.0) strscan (3.1.5) - syntax_tree (6.2.0) + syntax_tree (6.3.0) prettier_print (>= 1.2.0) tapioca (0.16.11) benchmark @@ -166,11 +173,11 @@ GEM yard-sorbet terminal-table (4.0.0) unicode-display_width (>= 1.1.1, < 4) - thor (1.3.2) - traces (0.15.2) + thor (1.4.0) + traces (0.17.0) tzinfo (2.0.6) concurrent-ruby (~> 1.0) - unicode-display_width (3.1.4) + unicode-display_width (3.1.5) unicode-emoji (~> 4.0, >= 4.0.4) unicode-emoji (4.0.4) uri (1.0.3) @@ -185,7 +192,14 @@ GEM yard PLATFORMS - x86_64-linux + aarch64-linux + aarch64-linux-gnu + aarch64-linux-musl + arm64-darwin + universal-darwin + x86_64-darwin + x86_64-linux-gnu + x86_64-linux-musl DEPENDENCIES async diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 9ff4effb..ca1b145f 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -539,10 +539,7 @@ def inspect ) ), page: T.nilable(T::Class[OpenAI::Internal::Type::BasePage[OpenAI::Internal::Type::BaseModel]]), - stream: T.nilable( - T::Class[OpenAI::Internal::Type::BaseStream[T.anything, - OpenAI::Internal::Type::BaseModel]] - ), + stream: T.nilable(T::Class[OpenAI::Internal::Type::BaseStream[T.anything, OpenAI::Internal::Type::BaseModel]]), model: T.nilable(OpenAI::Internal::Type::Converter::Input), options: T.nilable(OpenAI::RequestOptions::OrHash) } diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index 7891c279..0736b441 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -193,15 +193,7 @@ def initialize(size: self.class::DEFAULT_MAX_CONNECTIONS) end define_sorbet_constant!(:Request) do - T.type_alias do - { - method: Symbol, - url: URI::Generic, - headers: T::Hash[String, String], - body: T.anything, - deadline: Float - } - end + T.type_alias { {method: Symbol, url: URI::Generic, headers: T::Hash[String, String], body: T.anything, deadline: Float} } end end end diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index 00653aa3..bec08a64 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -244,7 +244,7 @@ class << self # # @return [String] def uri_origin(uri) - "#{uri.scheme}://#{uri.host}#{uri.port == uri.default_port ? '' : ":#{uri.port}"}" + "#{uri.scheme}://#{uri.host}#{":#{uri.port}" unless uri.port == uri.default_port}" end # @api private diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 3370cb81..989688b9 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -107,10 +107,7 @@ class Tokens < OpenAI::Internal::Type::BaseModel # Details about the input tokens billed for this request. # # @return [OpenAI::Models::Audio::Transcription::Usage::Tokens::InputTokenDetails, nil] - optional :input_token_details, - -> { - OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails - } + optional :input_token_details, -> { OpenAI::Audio::Transcription::Usage::Tokens::InputTokenDetails } # @!method initialize(input_tokens:, output_tokens:, total_tokens:, input_token_details: nil, type: :tokens) # Usage statistics for models billed by token usage. diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 2d51435c..2ad4984e 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -90,9 +90,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :timestamp_granularities, - -> { - OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] - } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] } # @!method initialize(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -150,10 +148,7 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # Must be set to `server_vad` to enable manual chunking using server side VAD. # # @return [Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] - required :type, - enum: -> { - OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type - } + required :type, enum: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type } # @!attribute prefix_padding_ms # Amount of audio to include before the VAD detected speech (in milliseconds). diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 951d195f..f49f062a 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -109,10 +109,7 @@ class Usage < OpenAI::Internal::Type::BaseModel # Details about the input tokens billed for this request. # # @return [OpenAI::Models::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails, nil] - optional :input_token_details, - -> { - OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails - } + optional :input_token_details, -> { OpenAI::Audio::TranscriptionTextDoneEvent::Usage::InputTokenDetails } # @!method initialize(input_tokens:, output_tokens:, total_tokens:, input_token_details: nil, type: :tokens) # Usage statistics for models billed by token usage. diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 6aa28dd3..3fe326c7 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -167,10 +167,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, - -> { - OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter - } + optional :code_interpreter, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter } # @!attribute file_search # @@ -223,9 +220,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :vector_stores, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see @@ -243,9 +238,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, - union: -> { - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy - } + union: -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to @@ -288,14 +281,10 @@ module ChunkingStrategy # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. variant :auto, - -> { - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto - } + -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto } variant :static, - -> { - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - } + -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -316,9 +305,7 @@ class Static < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, - -> { - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static - } + -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static } # @!attribute type # Always `static`. diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 0245a53a..a4113b4e 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -41,10 +41,7 @@ module AssistantStreamEvent variant :"thread.run.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) moves to a `requires_action` status. - variant :"thread.run.requires_action", - -> { - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction - } + variant :"thread.run.requires_action", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction } # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) is completed. variant :"thread.run.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted } @@ -68,28 +65,19 @@ module AssistantStreamEvent variant :"thread.run.step.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state. - variant :"thread.run.step.in_progress", - -> { - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress - } + variant :"thread.run.step.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress } # Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed. variant :"thread.run.step.delta", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is completed. - variant :"thread.run.step.completed", - -> { - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted - } + variant :"thread.run.step.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) fails. variant :"thread.run.step.failed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) is cancelled. - variant :"thread.run.step.cancelled", - -> { - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled - } + variant :"thread.run.step.cancelled", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) expires. variant :"thread.run.step.expired", -> { OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired } @@ -98,10 +86,7 @@ module AssistantStreamEvent variant :"thread.message.created", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state. - variant :"thread.message.in_progress", - -> { - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress - } + variant :"thread.message.in_progress", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress } # Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed. variant :"thread.message.delta", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta } @@ -110,10 +95,7 @@ module AssistantStreamEvent variant :"thread.message.completed", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) ends before it is completed. - variant :"thread.message.incomplete", - -> { - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete - } + variant :"thread.message.incomplete", -> { OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete } # Occurs when an [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. This can happen due to an internal server error or a timeout. variant :error, -> { OpenAI::Beta::AssistantStreamEvent::ErrorEvent } diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 433bd650..75ec6a82 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -299,10 +299,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, - -> { - OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter - } + optional :code_interpreter, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter } # @!attribute file_search # diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 9f75576e..1c147dd7 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -15,10 +15,7 @@ module MessageStreamEvent variant :"thread.message.created", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated } # Occurs when a [message](https://platform.openai.com/docs/api-reference/messages/object) moves to an `in_progress` state. - variant :"thread.message.in_progress", - -> { - OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress - } + variant :"thread.message.in_progress", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress } # Occurs when parts of a [Message](https://platform.openai.com/docs/api-reference/messages/object) are being streamed. variant :"thread.message.delta", -> { OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta } diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 9dbbbb9c..16c87a3d 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -15,10 +15,7 @@ module RunStepStreamEvent variant :"thread.run.step.created", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated } # Occurs when a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) moves to an `in_progress` state. - variant :"thread.run.step.in_progress", - -> { - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress - } + variant :"thread.run.step.in_progress", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress } # Occurs when parts of a [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) are being streamed. variant :"thread.run.step.delta", -> { OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta } diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index c53df32e..ed371b08 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -223,9 +223,7 @@ class Thread < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :messages, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] } # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -366,9 +364,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tools, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. @@ -383,9 +379,7 @@ module Tool variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } variant :file_search, - -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - } + -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -415,10 +409,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] - optional :file_search, - -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch - } + optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this @@ -466,9 +457,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :vector_stores, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see @@ -486,9 +475,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, - union: -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy - } + union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy } # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to @@ -531,14 +518,10 @@ module ChunkingStrategy # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. variant :auto, - -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto - } + -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto } variant :static, - -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - } + -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -559,9 +542,7 @@ class Static < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, - -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static - } + -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static } # @!attribute type # Always `static`. @@ -614,10 +595,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] - optional :code_interpreter, - -> { - OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter - } + optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter } # @!attribute file_search # diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 7c5b41ef..02d8accc 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -148,9 +148,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tools, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. @@ -164,10 +162,7 @@ module Tool variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } - variant :file_search, - -> { - OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - } + variant :file_search, -> { OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -243,9 +238,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :vector_stores, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see @@ -263,9 +256,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, - union: -> { - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy - } + union: -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to @@ -308,14 +299,10 @@ module ChunkingStrategy # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of `800` and `chunk_overlap_tokens` of `400`. variant :auto, - -> { - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto - } + -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto } variant :static, - -> { - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - } + -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static } class Auto < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -336,9 +323,7 @@ class Static < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, - -> { - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static - } + -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static } # @!attribute type # Always `static`. diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index d0053ec3..b5bf5069 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -38,10 +38,7 @@ class Message < OpenAI::Internal::Type::BaseModel # The content of the message in array of text and/or images. # # @return [Array] - required :content, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent] - } + required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent] } # @!attribute created_at # The Unix timestamp (in seconds) for when the message was created. @@ -153,9 +150,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tools, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. @@ -167,9 +162,7 @@ module Tool variant -> { OpenAI::Beta::CodeInterpreterTool } - variant -> { - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - } + variant -> { OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly } class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @!attribute type diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 25a63182..5bcd793e 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -107,9 +107,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tools, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. @@ -123,10 +121,7 @@ module Tool variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } - variant :file_search, - -> { - OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - } + variant :file_search, -> { OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 1f1b9746..510077e4 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -355,10 +355,7 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # Details on the tool outputs needed for this run to continue. # # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] - required :submit_tool_outputs, - -> { - OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs - } + required :submit_tool_outputs, -> { OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs } # @!attribute type # For now, this is always `submit_tool_outputs`. @@ -381,9 +378,7 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # # @return [Array] required :tool_calls, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] } # @!method initialize(tool_calls:) # Details on the tool outputs needed for this run to continue. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 43e028d1..2d44abe8 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -236,10 +236,7 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # The text contents of the message. # # @return [String, Array] - required :content, - union: -> { - OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content - } + required :content, union: -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content } # @!attribute role # The role of the entity that is creating the message. Allowed values include: @@ -336,9 +333,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tools, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. @@ -353,9 +348,7 @@ module Tool variant :code_interpreter, -> { OpenAI::Beta::CodeInterpreterTool } variant :file_search, - -> { - OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - } + -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch } class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute type diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index d37572fd..4e404c85 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -21,9 +21,7 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # # @return [Array] required :tool_outputs, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } # @!method initialize(thread_id:, tool_outputs:, request_options: {}) # @param thread_id [String] diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 708b96f8..33268434 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -16,10 +16,7 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # The Code Interpreter tool call definition. # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] - required :code_interpreter, - -> { - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter - } + required :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter } # @!attribute type # The type of tool call. This is always going to be `code_interpreter` for this @@ -55,9 +52,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] required :outputs, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } # @!method initialize(input:, outputs:) # Some parameter documentations has been truncated, see @@ -77,15 +72,10 @@ module Output discriminator :type # Text output from the Code Interpreter tool call as part of a run step. - variant :logs, - -> { - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs - } + variant :logs, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs } variant :image, - -> { - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - } + -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image } class Logs < OpenAI::Internal::Type::BaseModel # @!attribute logs @@ -113,9 +103,7 @@ class Image < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] required :image, - -> { - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image - } + -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image } # @!attribute type # Always `image`. diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index b123ac62..853807ee 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -62,9 +62,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :outputs, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } # @!method initialize(input: nil, outputs: nil) # Some parameter documentations has been truncated, see diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 68036d67..1877433e 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -49,9 +49,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :results, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. @@ -68,9 +66,7 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] required :ranker, - enum: -> { - OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker - } + enum: -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker } # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point @@ -131,9 +127,7 @@ class Result < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } # @!method initialize(file_id:, file_name:, score:, content: nil) # Some parameter documentations has been truncated, see @@ -162,9 +156,7 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] optional :type, - enum: -> { - OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type - } + enum: -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type } # @!method initialize(text: nil, type: nil) # @param text [String] The text content of the file. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 5cc90d66..856e1c65 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -15,10 +15,7 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] - optional :message_creation, - -> { - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation - } + optional :message_creation, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation } # @!method initialize(message_creation: nil, type: :message_creation) # Details of the message creation by the run step. diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 9adca268..d83defac 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -12,10 +12,7 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # `function`. # # @return [Array] - required :tool_calls, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCall] - } + required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCall] } # @!attribute type # Always `tool_calls`. diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index a0247200..3e914e18 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -8,10 +8,7 @@ class Text < OpenAI::Internal::Type::BaseModel # @!attribute annotations # # @return [Array] - required :annotations, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Annotation] - } + required :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Annotation] } # @!attribute value # The data that makes up the text. diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 25e97bc0..4e2642bd 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -183,9 +183,7 @@ class Delta < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :tool_calls, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) # Some parameter documentations has been truncated, see diff --git a/lib/openai/models/chat/chat_completion_custom_tool.rb b/lib/openai/models/chat/chat_completion_custom_tool.rb index fed4564e..9a4b458b 100644 --- a/lib/openai/models/chat/chat_completion_custom_tool.rb +++ b/lib/openai/models/chat/chat_completion_custom_tool.rb @@ -94,10 +94,7 @@ class Grammar < OpenAI::Internal::Type::BaseModel # Your chosen grammar. # # @return [OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar] - required :grammar, - -> { - OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar - } + required :grammar, -> { OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar } # @!attribute type # Grammar format. Always `grammar`. @@ -125,9 +122,7 @@ class Grammar < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax] required :syntax, - enum: -> { - OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax - } + enum: -> { OpenAI::Chat::ChatCompletionCustomTool::Custom::Format::Grammar::Grammar::Syntax } # @!method initialize(definition:, syntax:) # Your chosen grammar. diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index 1055a3ae..10f1ef02 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -191,9 +191,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # # @return [Array] required :input, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] } # @!attribute labels # The labels to classify to each item in the evaluation. @@ -284,28 +282,20 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage, Array] required :content, - union: -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content - } + union: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] - required :role, - enum: -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role - } + required :role, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role } # @!attribute type # The type of the message input. Always `message`. # # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] - optional :type, - enum: -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type - } + optional :type, enum: -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see @@ -337,14 +327,10 @@ module Content variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText - } + variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText } # An image input to the model. - variant -> { - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage - } + variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage } # A list of inputs, each of which may be either an input text or input image object. variant -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 0fd34948..b8723853 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -23,10 +23,7 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # namespace. # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] - optional :input_messages, - union: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages - } + optional :input_messages, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages } # @!attribute model # The name of the model to use for generating completions (e.g. "o3-mini"). @@ -77,9 +74,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel # # @return [Array] required :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] } # @!attribute type # The type of jsonl source. Always `file_content`. @@ -229,9 +224,7 @@ class Template < OpenAI::Internal::Type::BaseModel # # @return [Array] required :template, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] } # @!attribute type # The type of input messages. Always `template`. @@ -271,9 +264,7 @@ module Template # `assistant` role are presumed to have been generated by the model in previous # interactions. variant :message, - -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message - } + -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message } class Message < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -281,9 +272,7 @@ class Message < OpenAI::Internal::Type::BaseModel # # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array] required :content, - union: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content - } + union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or @@ -291,18 +280,14 @@ class Message < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] required :role, - enum: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role - } + enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role } # @!attribute type # The type of the message input. Always `message`. # # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] optional :type, - enum: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type - } + enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see @@ -334,14 +319,10 @@ module Content variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - } + variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText } # An image input to the model. - variant -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage - } + variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage } # A list of inputs, each of which may be either an input text or input image object. variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::AnArrayOfInputTextAndInputImageArray } @@ -491,9 +472,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, - union: -> { - OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::ResponseFormat - } + union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams::ResponseFormat } # @!attribute seed # A seed value to initialize the randomness, during sampling. diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index fc29873a..3afc6154 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -42,9 +42,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel # # @return [Array] required :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] } # @!attribute type # The type of jsonl source. Always `file_content`. diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 494bce0c..edacb844 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -62,18 +62,14 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] required :source, - union: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source - } + union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source } # @!attribute type # The type of run data source. Always `responses`. # # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] required :type, - enum: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type - } + enum: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type } # @!attribute input_messages # Used when sampling from a model. Dictates the structure of the messages passed @@ -83,9 +79,7 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, - union: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages - } + union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages } # @!attribute model # The name of the model to use for generating completions (e.g. "o3-mini"). @@ -97,9 +91,7 @@ class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] optional :sampling_params, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # Some parameter documentations has been truncated, see @@ -127,20 +119,14 @@ module Source discriminator :type variant :file_content, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent } variant :file_id, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID } # A EvalResponsesSource object describing a run data source configuration. variant :responses, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses } class FileContent < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -148,9 +134,7 @@ class FileContent < OpenAI::Internal::Type::BaseModel # # @return [Array] required :content, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] } # @!attribute type # The type of jsonl source. Always `file_content`. @@ -330,14 +314,10 @@ module InputMessages discriminator :type variant :template, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template } variant :item_reference, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference } class Template < OpenAI::Internal::Type::BaseModel # @!attribute template @@ -375,18 +355,14 @@ class Template < OpenAI::Internal::Type::BaseModel module Template extend OpenAI::Internal::Type::Union - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage } # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem - } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem } class ChatMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -413,9 +389,7 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, - union: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content - } + union: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or @@ -423,18 +397,14 @@ class EvalItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] required :role, - enum: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role - } + enum: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role } # @!attribute type # The type of the message input. Always `message`. # # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] optional :type, - enum: -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type - } + enum: -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see @@ -466,14 +436,10 @@ module Content variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText - } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText } # An image input to the model. - variant -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage - } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage } # A list of inputs, each of which may be either an input text or input image object. variant -> { OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } @@ -630,9 +596,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text, nil] optional :text, - -> { - OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text - } + -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text } # @!attribute tools # An array of tools the model may call while generating a response. You can diff --git a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb index 8484df8c..160bc34c 100644 --- a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb @@ -49,10 +49,7 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # Level of reasoning effort. # # @return [Symbol, OpenAI::Models::FineTuning::ReinforcementHyperparameters::ReasoningEffort, nil] - optional :reasoning_effort, - enum: -> { - OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort - } + optional :reasoning_effort, enum: -> { OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort } # @!method initialize(batch_size: nil, compute_multiplier: nil, eval_interval: nil, eval_samples: nil, learning_rate_multiplier: nil, n_epochs: nil, reasoning_effort: nil) # Some parameter documentations has been truncated, see diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 82a8c595..ccc71730 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -176,9 +176,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # # @return [Array] required :harassment, - -> { - OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Harassment] - } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Harassment] } # @!attribute harassment_threatening # The applied input type(s) for the category 'harassment/threatening'. @@ -195,9 +193,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # # @return [Array] required :hate, - -> { - OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Hate] - } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Hate] } # @!attribute hate_threatening # The applied input type(s) for the category 'hate/threatening'. @@ -214,9 +210,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # # @return [Array] required :illicit, - -> { - OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Illicit] - } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Illicit] } # @!attribute illicit_violent # The applied input type(s) for the category 'illicit/violent'. @@ -263,9 +257,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # # @return [Array] required :sexual, - -> { - OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Sexual] - } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Sexual] } # @!attribute sexual_minors # The applied input type(s) for the category 'sexual/minors'. @@ -282,9 +274,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # # @return [Array] required :violence, - -> { - OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Violence] - } + -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Violence] } # @!attribute violence_graphic # The applied input type(s) for the category 'violence/graphic'. diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 4bb9bf28..96bf3742 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -27,9 +27,7 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # # @return [Array] required :pending_safety_checks, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] } # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -207,9 +205,7 @@ class Drag < OpenAI::Internal::Type::BaseModel # # @return [Array] required :path, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path] } # @!attribute type # Specifies the event type. For a drag action, this property is always set to diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 71412a11..3e19423b 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -34,9 +34,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :acknowledged_safety_checks, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 330e1e7c..c1aa3165 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -95,10 +95,7 @@ class Message < OpenAI::Internal::Type::BaseModel # types. # # @return [Array] - required :content, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] - } + required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] } # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. @@ -605,9 +602,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # # @return [Array] required :tools, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] } # @!attribute type # The type of the item. Always `mcp_list_tools`. diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 90fade2d..48236782 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -15,10 +15,7 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # types. # # @return [Array] - required :content, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] - } + required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] } # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 9d9b204f..5e2bd164 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -314,9 +314,7 @@ class McpListTools < OpenAI::Internal::Type::BaseModel # # @return [Array] required :tools, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] } # @!attribute type # The type of the item. Always `mcp_list_tools`. diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index 72e65a6d..0f5f43e6 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -15,9 +15,7 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # # @return [Array] required :content, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputMessage::Content] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputMessage::Content] } # @!attribute role # The role of the output message. Always `assistant`. diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 994098c2..b290fe14 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -9,9 +9,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # # @return [Array] required :annotations, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputText::Annotation] - } + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputText::Annotation] } # @!attribute text # The text output from the model. @@ -28,10 +26,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # # @return [Array, nil] - optional :logprobs, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob] - } + optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob] } # @!method initialize(annotations:, text:, logprobs: nil, type: :output_text) # A text output from the model. @@ -255,9 +250,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Array] required :top_logprobs, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputText::Logprob::TopLogprob] } # @!method initialize(token:, bytes:, logprob:, top_logprobs:) # The log probability of a token. diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index d0bc4144..c5eeca54 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -16,10 +16,7 @@ module ResponseStreamEvent variant :"response.audio.done", -> { OpenAI::Responses::ResponseAudioDoneEvent } # Emitted when there is a partial transcript of audio. - variant :"response.audio.transcript.delta", - -> { - OpenAI::Responses::ResponseAudioTranscriptDeltaEvent - } + variant :"response.audio.transcript.delta", -> { OpenAI::Responses::ResponseAudioTranscriptDeltaEvent } # Emitted when the full audio transcript is completed. variant :"response.audio.transcript.done", -> { OpenAI::Responses::ResponseAudioTranscriptDoneEvent } @@ -157,16 +154,10 @@ module ResponseStreamEvent -> { OpenAI::Responses::ResponseImageGenCallPartialImageEvent } # Emitted when there is a delta (partial update) to the arguments of an MCP tool call. - variant :"response.mcp_call_arguments.delta", - -> { - OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent - } + variant :"response.mcp_call_arguments.delta", -> { OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent } # Emitted when the arguments for an MCP tool call are finalized. - variant :"response.mcp_call_arguments.done", - -> { - OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent - } + variant :"response.mcp_call_arguments.done", -> { OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent } # Emitted when an MCP tool call has completed successfully. variant :"response.mcp_call.completed", -> { OpenAI::Responses::ResponseMcpCallCompletedEvent } @@ -178,10 +169,7 @@ module ResponseStreamEvent variant :"response.mcp_call.in_progress", -> { OpenAI::Responses::ResponseMcpCallInProgressEvent } # Emitted when the list of available MCP tools has been successfully retrieved. - variant :"response.mcp_list_tools.completed", - -> { - OpenAI::Responses::ResponseMcpListToolsCompletedEvent - } + variant :"response.mcp_list_tools.completed", -> { OpenAI::Responses::ResponseMcpListToolsCompletedEvent } # Emitted when the attempt to list available MCP tools has failed. variant :"response.mcp_list_tools.failed", -> { OpenAI::Responses::ResponseMcpListToolsFailedEvent } diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 4e51d86c..9701d97c 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -85,9 +85,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :top_logprobs, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDeltaEvent::Logprob::TopLogprob] } # @!method initialize(token:, logprob:, top_logprobs: nil) # Some parameter documentations has been truncated, see diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 3b861ee3..77110a2a 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -85,9 +85,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Array, nil] optional :top_logprobs, - -> { - OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] - } + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseTextDoneEvent::Logprob::TopLogprob] } # @!method initialize(token:, logprob:, top_logprobs: nil) # Some parameter documentations has been truncated, see diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 0d8beed3..752c5f53 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -154,19 +154,13 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # A list of tools that always require approval. # # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, nil] - optional :always, - -> { - OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always - } + optional :always, -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always } # @!attribute never # A list of tools that never require approval. # # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, nil] - optional :never, - -> { - OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never - } + optional :never, -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } # @!method initialize(always: nil, never: nil) # Some parameter documentations has been truncated, see diff --git a/rbi/openai/errors.rbi b/rbi/openai/errors.rbi index 04c913cb..b3cfeb33 100644 --- a/rbi/openai/errors.rbi +++ b/rbi/openai/errors.rbi @@ -68,19 +68,19 @@ module OpenAI end class APIConnectionError < OpenAI::Errors::APIError - sig { void } + sig { returns(NilClass) } attr_accessor :status - sig { void } + sig { returns(NilClass) } attr_accessor :body - sig { void } + sig { returns(NilClass) } attr_accessor :code - sig { void } + sig { returns(NilClass) } attr_accessor :param - sig { void } + sig { returns(NilClass) } attr_accessor :type # @api private diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index 5f3ba474..438037e4 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -157,6 +157,7 @@ def test_dump_retry class OpenAI::Test::EnumModelTest < Minitest::Test class E0 include OpenAI::Internal::Type::Enum + attr_reader :values def initialize(*values) = (@values = values) @@ -476,6 +477,7 @@ def initialize(*variants) = variants.each { variant(_1) } module U1 extend OpenAI::Internal::Type::Union + variant const: :a variant const: 2 end @@ -492,6 +494,7 @@ class M2 < OpenAI::Internal::Type::BaseModel module U2 extend OpenAI::Internal::Type::Union + discriminator :type variant :a, M1 @@ -500,6 +503,7 @@ module U2 module U3 extend OpenAI::Internal::Type::Union + discriminator :type variant :a, M1 @@ -508,6 +512,7 @@ module U3 module U4 extend OpenAI::Internal::Type::Union + discriminator :type variant String @@ -601,6 +606,7 @@ def test_coerce class OpenAI::Test::BaseModelQoLTest < Minitest::Test class E0 include OpenAI::Internal::Type::Enum + attr_reader :values def initialize(*values) = (@values = values) diff --git a/test/openai/internal/util_test.rb b/test/openai/internal/util_test.rb index 76f2f9f5..fc91db45 100644 --- a/test/openai/internal/util_test.rb +++ b/test/openai/internal/util_test.rb @@ -242,11 +242,7 @@ def test_hash_encode {strio: StringIO.new("a")} => {"strio" => "a"}, {strio: OpenAI::FilePart.new("a")} => {"strio" => "a"}, {pathname: Pathname(__FILE__)} => {"pathname" => -> { _1.read in /^class OpenAI/ }}, - {pathname: OpenAI::FilePart.new(Pathname(__FILE__))} => { - "pathname" => -> { - _1.read in /^class OpenAI/ - } - } + {pathname: OpenAI::FilePart.new(Pathname(__FILE__))} => {"pathname" => -> { _1.read in /^class OpenAI/ }} } cases.each do |body, testcase| encoded = OpenAI::Internal::Util.encode_content(headers, body) @@ -324,9 +320,9 @@ def test_rewind_chain end def test_external_iteration - it = [1, 2, 3].to_enum - first = it.next - fused = OpenAI::Internal::Util.fused_enum(it, external: true) + iter = [1, 2, 3].to_enum + first = iter.next + fused = OpenAI::Internal::Util.fused_enum(iter, external: true) assert_equal(1, first) assert_equal([2, 3], fused.to_a) From 7bcf2d814036075509a78474e0f703f322809132 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 21 Aug 2025 16:23:32 +0000 Subject: [PATCH 290/295] feat(api): adding support for /v1/conversations to the API --- .stats.yml | 8 +- lib/openai.rb | 31 + lib/openai/client.rb | 4 + .../internal/conversation_cursor_page.rb | 92 ++ lib/openai/models.rb | 2 + .../computer_screenshot_content.rb | 38 + .../container_file_citation_body.rb | 58 ++ .../models/conversations/conversation.rb | 51 ++ .../conversation_create_params.rb | 39 + .../conversation_delete_params.rb | 16 + .../conversations/conversation_deleted.rb | 29 + .../conversation_deleted_resource.rb | 30 + .../models/conversations/conversation_item.rb | 568 ++++++++++++ .../conversations/conversation_item_list.rb | 55 ++ .../conversation_retrieve_params.rb | 16 + .../conversation_update_params.rb | 31 + .../conversations/file_citation_body.rb | 42 + .../conversations/input_file_content.rb | 42 + .../conversations/input_image_content.rb | 62 ++ .../conversations/input_text_content.rb | 26 + .../conversations/item_create_params.rb | 37 + .../conversations/item_delete_params.rb | 22 + .../models/conversations/item_list_params.rb | 84 ++ .../conversations/item_retrieve_params.rb | 36 + lib/openai/models/conversations/lob_prob.rb | 35 + lib/openai/models/conversations/message.rb | 115 +++ .../conversations/output_text_content.rb | 57 ++ .../models/conversations/refusal_content.rb | 26 + .../conversations/summary_text_content.rb | 23 + .../models/conversations/text_content.rb | 23 + .../models/conversations/top_log_prob.rb | 29 + .../models/conversations/url_citation_body.rb | 50 ++ ...create_eval_completions_run_data_source.rb | 53 +- .../responses/input_item_list_params.rb | 10 +- lib/openai/models/responses/response.rb | 27 +- .../responses/response_conversation_param.rb | 20 + .../responses/response_create_params.rb | 35 +- lib/openai/resources/conversations.rb | 112 +++ lib/openai/resources/conversations/items.rb | 141 +++ lib/openai/resources/responses.rb | 8 +- lib/openai/resources/responses/input_items.rb | 4 +- rbi/openai/client.rbi | 3 + .../internal/conversation_cursor_page.rbi | 25 + rbi/openai/models.rbi | 2 + .../computer_screenshot_content.rbi | 60 ++ .../container_file_citation_body.rbi | 82 ++ .../models/conversations/conversation.rbi | 76 ++ .../conversation_create_params.rbi | 144 +++ .../conversation_delete_params.rbi | 32 + .../conversations/conversation_deleted.rbi | 40 + .../conversation_deleted_resource.rbi | 40 + .../conversations/conversation_item.rbi | 835 ++++++++++++++++++ .../conversations/conversation_item_list.rbi | 101 +++ .../conversation_retrieve_params.rbi | 32 + .../conversation_update_params.rbi | 56 ++ .../conversations/file_citation_body.rbi | 61 ++ .../conversations/input_file_content.rbi | 72 ++ .../conversations/input_image_content.rbi | 113 +++ .../conversations/input_text_content.rbi | 38 + .../conversations/item_create_params.rbi | 150 ++++ .../conversations/item_delete_params.rbi | 40 + .../models/conversations/item_list_params.rbi | 174 ++++ .../conversations/item_retrieve_params.rbi | 70 ++ rbi/openai/models/conversations/lob_prob.rbi | 50 ++ rbi/openai/models/conversations/message.rbi | 196 ++++ .../conversations/output_text_content.rbi | 110 +++ .../models/conversations/refusal_content.rbi | 38 + .../conversations/summary_text_content.rbi | 31 + .../models/conversations/text_content.rbi | 28 + .../models/conversations/top_log_prob.rbi | 41 + .../conversations/url_citation_body.rbi | 74 ++ ...reate_eval_completions_run_data_source.rbi | 66 +- .../responses/input_item_list_params.rbi | 11 - rbi/openai/models/responses/response.rbi | 49 + .../responses/response_conversation_param.rbi | 33 + .../responses/response_create_params.rbi | 54 ++ rbi/openai/resources/conversations.rbi | 110 +++ rbi/openai/resources/conversations/items.rbi | 152 ++++ rbi/openai/resources/responses.rbi | 26 + .../resources/responses/input_items.rbi | 3 - scripts/detect-breaking-changes | 2 + sig/openai/client.rbs | 2 + .../internal/conversation_cursor_page.rbs | 15 + sig/openai/models.rbs | 2 + .../computer_screenshot_content.rbs | 28 + .../container_file_citation_body.rbs | 47 + .../models/conversations/conversation.rbs | 37 + .../conversation_create_params.rbs | 33 + .../conversation_delete_params.rbs | 17 + .../conversations/conversation_deleted.rbs | 28 + .../conversation_deleted_resource.rbs | 28 + .../conversations/conversation_item.rbs | 403 +++++++++ .../conversations/conversation_item_list.rbs | 44 + .../conversation_retrieve_params.rbs | 17 + .../conversation_update_params.rbs | 26 + .../conversations/file_citation_body.rbs | 37 + .../conversations/input_file_content.rbs | 41 + .../conversations/input_image_content.rbs | 49 + .../conversations/input_text_content.rbs | 17 + .../conversations/item_create_params.rbs | 37 + .../conversations/item_delete_params.rbs | 25 + .../models/conversations/item_list_params.rbs | 66 ++ .../conversations/item_retrieve_params.rbs | 37 + sig/openai/models/conversations/lob_prob.rbs | 37 + sig/openai/models/conversations/message.rbs | 95 ++ .../conversations/output_text_content.rbs | 52 ++ .../models/conversations/refusal_content.rbs | 17 + .../conversations/summary_text_content.rbs | 17 + .../models/conversations/text_content.rbs | 17 + .../models/conversations/top_log_prob.rbs | 28 + .../conversations/url_citation_body.rbs | 42 + ...reate_eval_completions_run_data_source.rbs | 44 +- .../responses/input_item_list_params.rbs | 7 - sig/openai/models/responses/response.rbs | 15 + .../responses/response_conversation_param.rbs | 15 + .../responses/response_create_params.rbs | 14 + sig/openai/resources/conversations.rbs | 31 + sig/openai/resources/conversations/items.rbs | 38 + sig/openai/resources/responses.rbs | 2 + .../resources/responses/input_items.rbs | 1 - test/openai/resource_namespaces.rb | 3 + .../resources/conversations/items_test.rb | 327 +++++++ test/openai/resources/conversations_test.rb | 72 ++ test/openai/resources/responses_test.rb | 3 + 124 files changed, 7295 insertions(+), 125 deletions(-) create mode 100644 lib/openai/internal/conversation_cursor_page.rb create mode 100644 lib/openai/models/conversations/computer_screenshot_content.rb create mode 100644 lib/openai/models/conversations/container_file_citation_body.rb create mode 100644 lib/openai/models/conversations/conversation.rb create mode 100644 lib/openai/models/conversations/conversation_create_params.rb create mode 100644 lib/openai/models/conversations/conversation_delete_params.rb create mode 100644 lib/openai/models/conversations/conversation_deleted.rb create mode 100644 lib/openai/models/conversations/conversation_deleted_resource.rb create mode 100644 lib/openai/models/conversations/conversation_item.rb create mode 100644 lib/openai/models/conversations/conversation_item_list.rb create mode 100644 lib/openai/models/conversations/conversation_retrieve_params.rb create mode 100644 lib/openai/models/conversations/conversation_update_params.rb create mode 100644 lib/openai/models/conversations/file_citation_body.rb create mode 100644 lib/openai/models/conversations/input_file_content.rb create mode 100644 lib/openai/models/conversations/input_image_content.rb create mode 100644 lib/openai/models/conversations/input_text_content.rb create mode 100644 lib/openai/models/conversations/item_create_params.rb create mode 100644 lib/openai/models/conversations/item_delete_params.rb create mode 100644 lib/openai/models/conversations/item_list_params.rb create mode 100644 lib/openai/models/conversations/item_retrieve_params.rb create mode 100644 lib/openai/models/conversations/lob_prob.rb create mode 100644 lib/openai/models/conversations/message.rb create mode 100644 lib/openai/models/conversations/output_text_content.rb create mode 100644 lib/openai/models/conversations/refusal_content.rb create mode 100644 lib/openai/models/conversations/summary_text_content.rb create mode 100644 lib/openai/models/conversations/text_content.rb create mode 100644 lib/openai/models/conversations/top_log_prob.rb create mode 100644 lib/openai/models/conversations/url_citation_body.rb create mode 100644 lib/openai/models/responses/response_conversation_param.rb create mode 100644 lib/openai/resources/conversations.rb create mode 100644 lib/openai/resources/conversations/items.rb create mode 100644 rbi/openai/internal/conversation_cursor_page.rbi create mode 100644 rbi/openai/models/conversations/computer_screenshot_content.rbi create mode 100644 rbi/openai/models/conversations/container_file_citation_body.rbi create mode 100644 rbi/openai/models/conversations/conversation.rbi create mode 100644 rbi/openai/models/conversations/conversation_create_params.rbi create mode 100644 rbi/openai/models/conversations/conversation_delete_params.rbi create mode 100644 rbi/openai/models/conversations/conversation_deleted.rbi create mode 100644 rbi/openai/models/conversations/conversation_deleted_resource.rbi create mode 100644 rbi/openai/models/conversations/conversation_item.rbi create mode 100644 rbi/openai/models/conversations/conversation_item_list.rbi create mode 100644 rbi/openai/models/conversations/conversation_retrieve_params.rbi create mode 100644 rbi/openai/models/conversations/conversation_update_params.rbi create mode 100644 rbi/openai/models/conversations/file_citation_body.rbi create mode 100644 rbi/openai/models/conversations/input_file_content.rbi create mode 100644 rbi/openai/models/conversations/input_image_content.rbi create mode 100644 rbi/openai/models/conversations/input_text_content.rbi create mode 100644 rbi/openai/models/conversations/item_create_params.rbi create mode 100644 rbi/openai/models/conversations/item_delete_params.rbi create mode 100644 rbi/openai/models/conversations/item_list_params.rbi create mode 100644 rbi/openai/models/conversations/item_retrieve_params.rbi create mode 100644 rbi/openai/models/conversations/lob_prob.rbi create mode 100644 rbi/openai/models/conversations/message.rbi create mode 100644 rbi/openai/models/conversations/output_text_content.rbi create mode 100644 rbi/openai/models/conversations/refusal_content.rbi create mode 100644 rbi/openai/models/conversations/summary_text_content.rbi create mode 100644 rbi/openai/models/conversations/text_content.rbi create mode 100644 rbi/openai/models/conversations/top_log_prob.rbi create mode 100644 rbi/openai/models/conversations/url_citation_body.rbi create mode 100644 rbi/openai/models/responses/response_conversation_param.rbi create mode 100644 rbi/openai/resources/conversations.rbi create mode 100644 rbi/openai/resources/conversations/items.rbi create mode 100644 sig/openai/internal/conversation_cursor_page.rbs create mode 100644 sig/openai/models/conversations/computer_screenshot_content.rbs create mode 100644 sig/openai/models/conversations/container_file_citation_body.rbs create mode 100644 sig/openai/models/conversations/conversation.rbs create mode 100644 sig/openai/models/conversations/conversation_create_params.rbs create mode 100644 sig/openai/models/conversations/conversation_delete_params.rbs create mode 100644 sig/openai/models/conversations/conversation_deleted.rbs create mode 100644 sig/openai/models/conversations/conversation_deleted_resource.rbs create mode 100644 sig/openai/models/conversations/conversation_item.rbs create mode 100644 sig/openai/models/conversations/conversation_item_list.rbs create mode 100644 sig/openai/models/conversations/conversation_retrieve_params.rbs create mode 100644 sig/openai/models/conversations/conversation_update_params.rbs create mode 100644 sig/openai/models/conversations/file_citation_body.rbs create mode 100644 sig/openai/models/conversations/input_file_content.rbs create mode 100644 sig/openai/models/conversations/input_image_content.rbs create mode 100644 sig/openai/models/conversations/input_text_content.rbs create mode 100644 sig/openai/models/conversations/item_create_params.rbs create mode 100644 sig/openai/models/conversations/item_delete_params.rbs create mode 100644 sig/openai/models/conversations/item_list_params.rbs create mode 100644 sig/openai/models/conversations/item_retrieve_params.rbs create mode 100644 sig/openai/models/conversations/lob_prob.rbs create mode 100644 sig/openai/models/conversations/message.rbs create mode 100644 sig/openai/models/conversations/output_text_content.rbs create mode 100644 sig/openai/models/conversations/refusal_content.rbs create mode 100644 sig/openai/models/conversations/summary_text_content.rbs create mode 100644 sig/openai/models/conversations/text_content.rbs create mode 100644 sig/openai/models/conversations/top_log_prob.rbs create mode 100644 sig/openai/models/conversations/url_citation_body.rbs create mode 100644 sig/openai/models/responses/response_conversation_param.rbs create mode 100644 sig/openai/resources/conversations.rbs create mode 100644 sig/openai/resources/conversations/items.rbs create mode 100644 test/openai/resources/conversations/items_test.rb create mode 100644 test/openai/resources/conversations_test.rb diff --git a/.stats.yml b/.stats.yml index 8c981bc2..fb355661 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 109 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-7ef7a457c3bf05364e66e48c9ca34f31bfef1f6c9b7c15b1812346105e0abb16.yml -openapi_spec_hash: a2b1f5d8fbb62175c93b0ebea9f10063 -config_hash: 4870312b04f48fd717ea4151053e7fb9 +configured_endpoints: 117 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bcdfe525558e67a09b32dec7a573e87b94bab47db3951eb4a86a4dafb60296c.yml +openapi_spec_hash: 49e7e46bfe9f61b7b7a60e36840c0cd7 +config_hash: e4514526ae01126a61f9b6c14a351737 diff --git a/lib/openai.rb b/lib/openai.rb index a83087c5..fb87b5a4 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -52,6 +52,7 @@ require_relative "openai/internal/transport/pooled_net_requester" require_relative "openai/client" require_relative "openai/internal/stream" +require_relative "openai/internal/conversation_cursor_page" require_relative "openai/internal/cursor_page" require_relative "openai/internal/page" require_relative "openai/models/reasoning_effort" @@ -241,6 +242,33 @@ require_relative "openai/models/containers/file_retrieve_params" require_relative "openai/models/containers/file_retrieve_response" require_relative "openai/models/containers/files/content_retrieve_params" +require_relative "openai/models/conversations/computer_screenshot_content" +require_relative "openai/models/conversations/container_file_citation_body" +require_relative "openai/models/conversations/conversation" +require_relative "openai/models/conversations/conversation_create_params" +require_relative "openai/models/conversations/conversation_deleted" +require_relative "openai/models/conversations/conversation_deleted_resource" +require_relative "openai/models/conversations/conversation_delete_params" +require_relative "openai/models/conversations/conversation_item" +require_relative "openai/models/conversations/conversation_item_list" +require_relative "openai/models/conversations/conversation_retrieve_params" +require_relative "openai/models/conversations/conversation_update_params" +require_relative "openai/models/conversations/file_citation_body" +require_relative "openai/models/conversations/input_file_content" +require_relative "openai/models/conversations/input_image_content" +require_relative "openai/models/conversations/input_text_content" +require_relative "openai/models/conversations/item_create_params" +require_relative "openai/models/conversations/item_delete_params" +require_relative "openai/models/conversations/item_list_params" +require_relative "openai/models/conversations/item_retrieve_params" +require_relative "openai/models/conversations/lob_prob" +require_relative "openai/models/conversations/message" +require_relative "openai/models/conversations/output_text_content" +require_relative "openai/models/conversations/refusal_content" +require_relative "openai/models/conversations/summary_text_content" +require_relative "openai/models/conversations/text_content" +require_relative "openai/models/conversations/top_log_prob" +require_relative "openai/models/conversations/url_citation_body" require_relative "openai/models/create_embedding_response" require_relative "openai/models/custom_tool_input_format" require_relative "openai/models/embedding" @@ -378,6 +406,7 @@ require_relative "openai/models/responses/response_content" require_relative "openai/models/responses/response_content_part_added_event" require_relative "openai/models/responses/response_content_part_done_event" +require_relative "openai/models/responses/response_conversation_param" require_relative "openai/models/responses/response_created_event" require_relative "openai/models/responses/response_create_params" require_relative "openai/models/responses/response_custom_tool_call" @@ -530,6 +559,8 @@ require_relative "openai/resources/containers" require_relative "openai/resources/containers/files" require_relative "openai/resources/containers/files/content" +require_relative "openai/resources/conversations" +require_relative "openai/resources/conversations/items" require_relative "openai/resources/embeddings" require_relative "openai/resources/evals" require_relative "openai/resources/evals/runs" diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 08a2a7da..b2bef0ac 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -72,6 +72,9 @@ class Client < OpenAI::Internal::Transport::BaseClient # @return [OpenAI::Resources::Responses] attr_reader :responses + # @return [OpenAI::Resources::Conversations] + attr_reader :conversations + # @return [OpenAI::Resources::Evals] attr_reader :evals @@ -153,6 +156,7 @@ def initialize( @batches = OpenAI::Resources::Batches.new(client: self) @uploads = OpenAI::Resources::Uploads.new(client: self) @responses = OpenAI::Resources::Responses.new(client: self) + @conversations = OpenAI::Resources::Conversations.new(client: self) @evals = OpenAI::Resources::Evals.new(client: self) @containers = OpenAI::Resources::Containers.new(client: self) end diff --git a/lib/openai/internal/conversation_cursor_page.rb b/lib/openai/internal/conversation_cursor_page.rb new file mode 100644 index 00000000..6dddc16e --- /dev/null +++ b/lib/openai/internal/conversation_cursor_page.rb @@ -0,0 +1,92 @@ +# frozen_string_literal: true + +module OpenAI + module Internal + # @generic Elem + # + # @example + # if conversation_cursor_page.has_next? + # conversation_cursor_page = conversation_cursor_page.next_page + # end + # + # @example + # conversation_cursor_page.auto_paging_each do |item| + # puts(item) + # end + class ConversationCursorPage + include OpenAI::Internal::Type::BasePage + + # @return [Array>, nil] + attr_accessor :data + + # @return [Boolean] + attr_accessor :has_more + + # @return [String] + attr_accessor :last_id + + # @return [Boolean] + def next_page? + has_more + end + + # @raise [OpenAI::HTTP::Error] + # @return [self] + def next_page + unless next_page? + message = "No more pages available. Please check #next_page? before calling ##{__method__}" + raise RuntimeError.new(message) + end + + req = OpenAI::Internal::Util.deep_merge(@req, {query: {after: last_id}}) + @client.request(req) + end + + # @param blk [Proc] + # + # @yieldparam [generic] + def auto_paging_each(&blk) + unless block_given? + raise ArgumentError.new("A block must be given to ##{__method__}") + end + + page = self + loop do + page.data&.each(&blk) + + break unless page.next_page? + page = page.next_page + end + end + + # @api private + # + # @param client [OpenAI::Internal::Transport::BaseClient] + # @param req [Hash{Symbol=>Object}] + # @param headers [Hash{String=>String}, Net::HTTPHeader] + # @param page_data [Hash{Symbol=>Object}] + def initialize(client:, req:, headers:, page_data:) + super + + case page_data + in {data: Array => data} + @data = data.map { OpenAI::Internal::Type::Converter.coerce(@model, _1) } + else + end + @has_more = page_data[:has_more] + @last_id = page_data[:last_id] + end + + # @api private + # + # @return [String] + def inspect + # rubocop:disable Layout/LineLength + model = OpenAI::Internal::Type::Converter.inspect(@model, depth: 1) + + "#<#{self.class}[#{model}]:0x#{object_id.to_s(16)} has_more=#{has_more.inspect} last_id=#{last_id.inspect}>" + # rubocop:enable Layout/LineLength + end + end + end +end diff --git a/lib/openai/models.rb b/lib/openai/models.rb index 23993a69..df4aaaa4 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -91,6 +91,8 @@ module OpenAI Containers = OpenAI::Models::Containers + Conversations = OpenAI::Models::Conversations + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse CustomToolInputFormat = OpenAI::Models::CustomToolInputFormat diff --git a/lib/openai/models/conversations/computer_screenshot_content.rb b/lib/openai/models/conversations/computer_screenshot_content.rb new file mode 100644 index 00000000..1b030a1c --- /dev/null +++ b/lib/openai/models/conversations/computer_screenshot_content.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class ComputerScreenshotContent < OpenAI::Internal::Type::BaseModel + # @!attribute file_id + # The identifier of an uploaded file that contains the screenshot. + # + # @return [String, nil] + required :file_id, String, nil?: true + + # @!attribute image_url + # The URL of the screenshot image. + # + # @return [String, nil] + required :image_url, String, nil?: true + + # @!attribute type + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. + # + # @return [Symbol, :computer_screenshot] + required :type, const: :computer_screenshot + + # @!method initialize(file_id:, image_url:, type: :computer_screenshot) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ComputerScreenshotContent} for more details. + # + # @param file_id [String, nil] The identifier of an uploaded file that contains the screenshot. + # + # @param image_url [String, nil] The URL of the screenshot image. + # + # @param type [Symbol, :computer_screenshot] Specifies the event type. For a computer screenshot, this property is always set + end + end + end +end diff --git a/lib/openai/models/conversations/container_file_citation_body.rb b/lib/openai/models/conversations/container_file_citation_body.rb new file mode 100644 index 00000000..4c373465 --- /dev/null +++ b/lib/openai/models/conversations/container_file_citation_body.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class ContainerFileCitationBody < OpenAI::Internal::Type::BaseModel + # @!attribute container_id + # The ID of the container file. + # + # @return [String] + required :container_id, String + + # @!attribute end_index + # The index of the last character of the container file citation in the message. + # + # @return [Integer] + required :end_index, Integer + + # @!attribute file_id + # The ID of the file. + # + # @return [String] + required :file_id, String + + # @!attribute filename + # The filename of the container file cited. + # + # @return [String] + required :filename, String + + # @!attribute start_index + # The index of the first character of the container file citation in the message. + # + # @return [Integer] + required :start_index, Integer + + # @!attribute type + # The type of the container file citation. Always `container_file_citation`. + # + # @return [Symbol, :container_file_citation] + required :type, const: :container_file_citation + + # @!method initialize(container_id:, end_index:, file_id:, filename:, start_index:, type: :container_file_citation) + # @param container_id [String] The ID of the container file. + # + # @param end_index [Integer] The index of the last character of the container file citation in the message. + # + # @param file_id [String] The ID of the file. + # + # @param filename [String] The filename of the container file cited. + # + # @param start_index [Integer] The index of the first character of the container file citation in the message. + # + # @param type [Symbol, :container_file_citation] The type of the container file citation. Always `container_file_citation`. + end + end + end +end diff --git a/lib/openai/models/conversations/conversation.rb b/lib/openai/models/conversations/conversation.rb new file mode 100644 index 00000000..8e39df4c --- /dev/null +++ b/lib/openai/models/conversations/conversation.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations#create + class Conversation < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the conversation. + # + # @return [String] + required :id, String + + # @!attribute created_at + # The time at which the conversation was created, measured in seconds since the + # Unix epoch. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + # + # @return [Object] + required :metadata, OpenAI::Internal::Type::Unknown + + # @!attribute object + # The object type, which is always `conversation`. + # + # @return [Symbol, :conversation] + required :object, const: :conversation + + # @!method initialize(id:, created_at:, metadata:, object: :conversation) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::Conversation} for more details. + # + # @param id [String] The unique ID of the conversation. + # + # @param created_at [Integer] The time at which the conversation was created, measured in seconds since the Un + # + # @param metadata [Object] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param object [Symbol, :conversation] The object type, which is always `conversation`. + end + end + end +end diff --git a/lib/openai/models/conversations/conversation_create_params.rb b/lib/openai/models/conversations/conversation_create_params.rb new file mode 100644 index 00000000..58cc4ba9 --- /dev/null +++ b/lib/openai/models/conversations/conversation_create_params.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations#create + class ConversationCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute items + # Initial items to include in the conversation context. You may add up to 20 items + # at a time. + # + # @return [Array, nil] + optional :items, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputItem] }, + nil?: true + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. Useful for storing + # additional information about the object in a structured format. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(items: nil, metadata: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationCreateParams} for more details. + # + # @param items [Array, nil] Initial items to include in the conversation context. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. Useful for + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/conversation_delete_params.rb b/lib/openai/models/conversations/conversation_delete_params.rb new file mode 100644 index 00000000..88897fec --- /dev/null +++ b/lib/openai/models/conversations/conversation_delete_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations#delete + class ConversationDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/conversation_deleted.rb b/lib/openai/models/conversations/conversation_deleted.rb new file mode 100644 index 00000000..3d599ebe --- /dev/null +++ b/lib/openai/models/conversations/conversation_deleted.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class ConversationDeleted < OpenAI::Internal::Type::BaseModel + # @!attribute id + # + # @return [String] + required :id, String + + # @!attribute deleted + # + # @return [Boolean] + required :deleted, OpenAI::Internal::Type::Boolean + + # @!attribute object + # + # @return [Symbol, :"conversation.deleted"] + required :object, const: :"conversation.deleted" + + # @!method initialize(id:, deleted:, object: :"conversation.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"conversation.deleted"] + end + end + end +end diff --git a/lib/openai/models/conversations/conversation_deleted_resource.rb b/lib/openai/models/conversations/conversation_deleted_resource.rb new file mode 100644 index 00000000..08baf435 --- /dev/null +++ b/lib/openai/models/conversations/conversation_deleted_resource.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations#delete + class ConversationDeletedResource < OpenAI::Internal::Type::BaseModel + # @!attribute id + # + # @return [String] + required :id, String + + # @!attribute deleted + # + # @return [Boolean] + required :deleted, OpenAI::Internal::Type::Boolean + + # @!attribute object + # + # @return [Symbol, :"conversation.deleted"] + required :object, const: :"conversation.deleted" + + # @!method initialize(id:, deleted:, object: :"conversation.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"conversation.deleted"] + end + end + end +end diff --git a/lib/openai/models/conversations/conversation_item.rb b/lib/openai/models/conversations/conversation_item.rb new file mode 100644 index 00000000..3ca95f12 --- /dev/null +++ b/lib/openai/models/conversations/conversation_item.rb @@ -0,0 +1,568 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # A single item within a conversation. The set of possible types are the same as + # the `output` type of a + # [Response object](https://platform.openai.com/docs/api-reference/responses/object#responses/object-output). + # + # @see OpenAI::Resources::Conversations::Items#retrieve + module ConversationItem + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :message, -> { OpenAI::Conversations::Message } + + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) for more information. + variant :function_call, -> { OpenAI::Responses::ResponseFunctionToolCallItem } + + variant :function_call_output, -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem } + + # The results of a file search tool call. See the + # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) for more information. + variant :file_search_call, -> { OpenAI::Responses::ResponseFileSearchToolCall } + + # The results of a web search tool call. See the + # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for more information. + variant :web_search_call, -> { OpenAI::Responses::ResponseFunctionWebSearch } + + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Conversations::ConversationItem::ImageGenerationCall } + + # A tool call to a computer use tool. See the + # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) for more information. + variant :computer_call, -> { OpenAI::Responses::ResponseComputerToolCall } + + variant :computer_call_output, -> { OpenAI::Responses::ResponseComputerToolCallOutputItem } + + # A description of the chain of thought used by a reasoning model while generating + # a response. Be sure to include these items in your `input` to the Responses API + # for subsequent turns of a conversation if you are manually + # [managing context](https://platform.openai.com/docs/guides/conversation-state). + variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Conversations::ConversationItem::LocalShellCall } + + # The output of a local shell tool call. + variant :local_shell_call_output, -> { OpenAI::Conversations::ConversationItem::LocalShellCallOutput } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Conversations::ConversationItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Conversations::ConversationItem::McpApprovalRequest } + + # A response to an MCP approval request. + variant :mcp_approval_response, -> { OpenAI::Conversations::ConversationItem::McpApprovalResponse } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Conversations::ConversationItem::McpCall } + + # A call to a custom tool created by the model. + variant :custom_tool_call, -> { OpenAI::Responses::ResponseCustomToolCall } + + # The output of a custom tool call from your code, being sent back to the model. + variant :custom_tool_call_output, -> { OpenAI::Responses::ResponseCustomToolCallOutput } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall} for more + # details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Models::Conversations::ConversationItem::LocalShellCall::Action] + required :action, -> { OpenAI::Conversations::ConversationItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Models::Conversations::ConversationItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Conversations::ConversationItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::LocalShellCall} for more + # details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Models::Conversations::ConversationItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Models::Conversations::ConversationItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Models::Conversations::ConversationItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::LocalShellCall::Action} for + # more details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Models::Conversations::ConversationItem::LocalShellCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :id, String + + # @!attribute output + # A JSON string of the output of the local shell tool call. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the local shell tool call output. Always `local_shell_call_output`. + # + # @return [Symbol, :local_shell_call_output] + required :type, const: :local_shell_call_output + + # @!attribute status + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @return [Symbol, OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::Status, nil] + optional :status, + enum: -> { OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status }, + nil?: true + + # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput} for more + # details. + # + # The output of a local shell tool call. + # + # @param id [String] The unique ID of the local shell tool call generated by the model. + # + # @param output [String] A JSON string of the output of the local shell tool call. + # + # @param status [Symbol, OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @see OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::ConversationItem::McpListTools::Tool] } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::McpListTools} for more + # details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::McpListTools::Tool} for more + # details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::McpApprovalRequest} for more + # details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval response + # + # @return [String] + required :id, String + + # @!attribute approval_request_id + # The ID of the approval request being answered. + # + # @return [String] + required :approval_request_id, String + + # @!attribute approve + # Whether the request was approved. + # + # @return [Boolean] + required :approve, OpenAI::Internal::Type::Boolean + + # @!attribute type + # The type of the item. Always `mcp_approval_response`. + # + # @return [Symbol, :mcp_approval_response] + required :type, const: :mcp_approval_response + + # @!attribute reason + # Optional reason for the decision. + # + # @return [String, nil] + optional :reason, String, nil?: true + + # @!method initialize(id:, approval_request_id:, approve:, reason: nil, type: :mcp_approval_response) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::McpApprovalResponse} for more + # details. + # + # A response to an MCP approval request. + # + # @param id [String] The unique ID of the approval response + # + # @param approval_request_id [String] The ID of the approval request being answered. + # + # @param approve [Boolean] Whether the request was approved. + # + # @param reason [String, nil] Optional reason for the decision. + # + # @param type [Symbol, :mcp_approval_response] The type of the item. Always `mcp_approval_response`. + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Conversations::Message, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput, OpenAI::Models::Conversations::ConversationItem::McpListTools, OpenAI::Models::Conversations::ConversationItem::McpApprovalRequest, OpenAI::Models::Conversations::ConversationItem::McpApprovalResponse, OpenAI::Models::Conversations::ConversationItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput)] + end + end + + ConversationItem = Conversations::ConversationItem + end +end diff --git a/lib/openai/models/conversations/conversation_item_list.rb b/lib/openai/models/conversations/conversation_item_list.rb new file mode 100644 index 00000000..4c36f797 --- /dev/null +++ b/lib/openai/models/conversations/conversation_item_list.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations::Items#create + class ConversationItemList < OpenAI::Internal::Type::BaseModel + # @!attribute data + # A list of conversation items. + # + # @return [Array] + required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::ConversationItem] } + + # @!attribute first_id + # The ID of the first item in the list. + # + # @return [String] + required :first_id, String + + # @!attribute has_more + # Whether there are more items available. + # + # @return [Boolean] + required :has_more, OpenAI::Internal::Type::Boolean + + # @!attribute last_id + # The ID of the last item in the list. + # + # @return [String] + required :last_id, String + + # @!attribute object + # The type of object returned, must be `list`. + # + # @return [Symbol, :list] + required :object, const: :list + + # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) + # A list of Conversation items. + # + # @param data [Array] A list of conversation items. + # + # @param first_id [String] The ID of the first item in the list. + # + # @param has_more [Boolean] Whether there are more items available. + # + # @param last_id [String] The ID of the last item in the list. + # + # @param object [Symbol, :list] The type of object returned, must be `list`. + end + end + + ConversationItemList = Conversations::ConversationItemList + end +end diff --git a/lib/openai/models/conversations/conversation_retrieve_params.rb b/lib/openai/models/conversations/conversation_retrieve_params.rb new file mode 100644 index 00000000..4683d715 --- /dev/null +++ b/lib/openai/models/conversations/conversation_retrieve_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations#retrieve + class ConversationRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/conversation_update_params.rb b/lib/openai/models/conversations/conversation_update_params.rb new file mode 100644 index 00000000..d268166e --- /dev/null +++ b/lib/openai/models/conversations/conversation_update_params.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations#update + class ConversationUpdateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + # + # @return [Hash{Symbol=>String}] + required :metadata, OpenAI::Internal::Type::HashOf[String] + + # @!method initialize(metadata:, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationUpdateParams} for more details. + # + # @param metadata [Hash{Symbol=>String}] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/file_citation_body.rb b/lib/openai/models/conversations/file_citation_body.rb new file mode 100644 index 00000000..93d84a93 --- /dev/null +++ b/lib/openai/models/conversations/file_citation_body.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class FileCitationBody < OpenAI::Internal::Type::BaseModel + # @!attribute file_id + # The ID of the file. + # + # @return [String] + required :file_id, String + + # @!attribute filename + # The filename of the file cited. + # + # @return [String] + required :filename, String + + # @!attribute index + # The index of the file in the list of files. + # + # @return [Integer] + required :index, Integer + + # @!attribute type + # The type of the file citation. Always `file_citation`. + # + # @return [Symbol, :file_citation] + required :type, const: :file_citation + + # @!method initialize(file_id:, filename:, index:, type: :file_citation) + # @param file_id [String] The ID of the file. + # + # @param filename [String] The filename of the file cited. + # + # @param index [Integer] The index of the file in the list of files. + # + # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. + end + end + end +end diff --git a/lib/openai/models/conversations/input_file_content.rb b/lib/openai/models/conversations/input_file_content.rb new file mode 100644 index 00000000..1cb5b5fa --- /dev/null +++ b/lib/openai/models/conversations/input_file_content.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class InputFileContent < OpenAI::Internal::Type::BaseModel + # @!attribute file_id + # The ID of the file to be sent to the model. + # + # @return [String, nil] + required :file_id, String, nil?: true + + # @!attribute type + # The type of the input item. Always `input_file`. + # + # @return [Symbol, :input_file] + required :type, const: :input_file + + # @!attribute file_url + # The URL of the file to be sent to the model. + # + # @return [String, nil] + optional :file_url, String + + # @!attribute filename + # The name of the file to be sent to the model. + # + # @return [String, nil] + optional :filename, String + + # @!method initialize(file_id:, file_url: nil, filename: nil, type: :input_file) + # @param file_id [String, nil] The ID of the file to be sent to the model. + # + # @param file_url [String] The URL of the file to be sent to the model. + # + # @param filename [String] The name of the file to be sent to the model. + # + # @param type [Symbol, :input_file] The type of the input item. Always `input_file`. + end + end + end +end diff --git a/lib/openai/models/conversations/input_image_content.rb b/lib/openai/models/conversations/input_image_content.rb new file mode 100644 index 00000000..63d1ef58 --- /dev/null +++ b/lib/openai/models/conversations/input_image_content.rb @@ -0,0 +1,62 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class InputImageContent < OpenAI::Internal::Type::BaseModel + # @!attribute detail + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @return [Symbol, OpenAI::Models::Conversations::InputImageContent::Detail] + required :detail, enum: -> { OpenAI::Conversations::InputImageContent::Detail } + + # @!attribute file_id + # The ID of the file to be sent to the model. + # + # @return [String, nil] + required :file_id, String, nil?: true + + # @!attribute image_url + # The URL of the image to be sent to the model. A fully qualified URL or base64 + # encoded image in a data URL. + # + # @return [String, nil] + required :image_url, String, nil?: true + + # @!attribute type + # The type of the input item. Always `input_image`. + # + # @return [Symbol, :input_image] + required :type, const: :input_image + + # @!method initialize(detail:, file_id:, image_url:, type: :input_image) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::InputImageContent} for more details. + # + # @param detail [Symbol, OpenAI::Models::Conversations::InputImageContent::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or + # + # @param file_id [String, nil] The ID of the file to be sent to the model. + # + # @param image_url [String, nil] The URL of the image to be sent to the model. A fully qualified URL or base64 en + # + # @param type [Symbol, :input_image] The type of the input item. Always `input_image`. + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + # + # @see OpenAI::Models::Conversations::InputImageContent#detail + module Detail + extend OpenAI::Internal::Type::Enum + + LOW = :low + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/conversations/input_text_content.rb b/lib/openai/models/conversations/input_text_content.rb new file mode 100644 index 00000000..81c74843 --- /dev/null +++ b/lib/openai/models/conversations/input_text_content.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class InputTextContent < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text input to the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the input item. Always `input_text`. + # + # @return [Symbol, :input_text] + required :type, const: :input_text + + # @!method initialize(text:, type: :input_text) + # @param text [String] The text input to the model. + # + # @param type [Symbol, :input_text] The type of the input item. Always `input_text`. + end + end + end +end diff --git a/lib/openai/models/conversations/item_create_params.rb b/lib/openai/models/conversations/item_create_params.rb new file mode 100644 index 00000000..bebc18e0 --- /dev/null +++ b/lib/openai/models/conversations/item_create_params.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations::Items#create + class ItemCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute items + # The items to add to the conversation. You may add up to 20 items at a time. + # + # @return [Array] + required :items, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputItem] } + + # @!attribute include + # Additional fields to include in the response. See the `include` parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + # + # @return [Array, nil] + optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } + + # @!method initialize(items:, include: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ItemCreateParams} for more details. + # + # @param items [Array] The items to add to the conversation. You may add up to 20 items at a time. + # + # @param include [Array] Additional fields to include in the response. See the `include` + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/item_delete_params.rb b/lib/openai/models/conversations/item_delete_params.rb new file mode 100644 index 00000000..6a2eb57f --- /dev/null +++ b/lib/openai/models/conversations/item_delete_params.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations::Items#delete + class ItemDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute conversation_id + # + # @return [String] + required :conversation_id, String + + # @!method initialize(conversation_id:, request_options: {}) + # @param conversation_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/item_list_params.rb b/lib/openai/models/conversations/item_list_params.rb new file mode 100644 index 00000000..ffddb386 --- /dev/null +++ b/lib/openai/models/conversations/item_list_params.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations::Items#list + class ItemListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute after + # An item ID to list items after, used in pagination. + # + # @return [String, nil] + optional :after, String + + # @!attribute include + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). + # + # @return [Array, nil] + optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } + + # @!attribute limit + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!attribute order + # The order to return the input items in. Default is `desc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + # + # @return [Symbol, OpenAI::Models::Conversations::ItemListParams::Order, nil] + optional :order, enum: -> { OpenAI::Conversations::ItemListParams::Order } + + # @!method initialize(after: nil, include: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ItemListParams} for more details. + # + # @param after [String] An item ID to list items after, used in pagination. + # + # @param include [Array] Specify additional output data to include in the model response. Currently + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between + # + # @param order [Symbol, OpenAI::Models::Conversations::ItemListParams::Order] The order to return the input items in. Default is `desc`. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # The order to return the input items in. Default is `desc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/conversations/item_retrieve_params.rb b/lib/openai/models/conversations/item_retrieve_params.rb new file mode 100644 index 00000000..b6ef4a2f --- /dev/null +++ b/lib/openai/models/conversations/item_retrieve_params.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + # @see OpenAI::Resources::Conversations::Items#retrieve + class ItemRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute conversation_id + # + # @return [String] + required :conversation_id, String + + # @!attribute include + # Additional fields to include in the response. See the `include` parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + # + # @return [Array, nil] + optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } + + # @!method initialize(conversation_id:, include: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ItemRetrieveParams} for more details. + # + # @param conversation_id [String] + # + # @param include [Array] Additional fields to include in the response. See the `include` + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/conversations/lob_prob.rb b/lib/openai/models/conversations/lob_prob.rb new file mode 100644 index 00000000..60d72ed2 --- /dev/null +++ b/lib/openai/models/conversations/lob_prob.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class LobProb < OpenAI::Internal::Type::BaseModel + # @!attribute token + # + # @return [String] + required :token, String + + # @!attribute bytes + # + # @return [Array] + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer] + + # @!attribute logprob + # + # @return [Float] + required :logprob, Float + + # @!attribute top_logprobs + # + # @return [Array] + required :top_logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::TopLogProb] } + + # @!method initialize(token:, bytes:, logprob:, top_logprobs:) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] + # @param top_logprobs [Array] + end + end + end +end diff --git a/lib/openai/models/conversations/message.rb b/lib/openai/models/conversations/message.rb new file mode 100644 index 00000000..5b620a90 --- /dev/null +++ b/lib/openai/models/conversations/message.rb @@ -0,0 +1,115 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class Message < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the message. + # + # @return [String] + required :id, String + + # @!attribute content + # The content of the message + # + # @return [Array] + required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::Message::Content] } + + # @!attribute role + # The role of the message. One of `unknown`, `user`, `assistant`, `system`, + # `critic`, `discriminator`, `developer`, or `tool`. + # + # @return [Symbol, OpenAI::Models::Conversations::Message::Role] + required :role, enum: -> { OpenAI::Conversations::Message::Role } + + # @!attribute status + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + # + # @return [Symbol, OpenAI::Models::Conversations::Message::Status] + required :status, enum: -> { OpenAI::Conversations::Message::Status } + + # @!attribute type + # The type of the message. Always set to `message`. + # + # @return [Symbol, :message] + required :type, const: :message + + # @!method initialize(id:, content:, role:, status:, type: :message) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::Message} for more details. + # + # @param id [String] The unique ID of the message. + # + # @param content [Array] The content of the message + # + # @param role [Symbol, OpenAI::Models::Conversations::Message::Role] The role of the message. One of `unknown`, `user`, `assistant`, `system`, `criti + # + # @param status [Symbol, OpenAI::Models::Conversations::Message::Status] The status of item. One of `in_progress`, `completed`, or `incomplete`. Populate + # + # @param type [Symbol, :message] The type of the message. Always set to `message`. + + module Content + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :input_text, -> { OpenAI::Conversations::InputTextContent } + + variant :output_text, -> { OpenAI::Conversations::OutputTextContent } + + variant :text, -> { OpenAI::Conversations::TextContent } + + variant :summary_text, -> { OpenAI::Conversations::SummaryTextContent } + + variant :refusal, -> { OpenAI::Conversations::RefusalContent } + + variant :input_image, -> { OpenAI::Conversations::InputImageContent } + + variant :computer_screenshot, -> { OpenAI::Conversations::ComputerScreenshotContent } + + variant :input_file, -> { OpenAI::Conversations::InputFileContent } + + # @!method self.variants + # @return [Array(OpenAI::Models::Conversations::InputTextContent, OpenAI::Models::Conversations::OutputTextContent, OpenAI::Models::Conversations::TextContent, OpenAI::Models::Conversations::SummaryTextContent, OpenAI::Models::Conversations::RefusalContent, OpenAI::Models::Conversations::InputImageContent, OpenAI::Models::Conversations::ComputerScreenshotContent, OpenAI::Models::Conversations::InputFileContent)] + end + + # The role of the message. One of `unknown`, `user`, `assistant`, `system`, + # `critic`, `discriminator`, `developer`, or `tool`. + # + # @see OpenAI::Models::Conversations::Message#role + module Role + extend OpenAI::Internal::Type::Enum + + UNKNOWN = :unknown + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + CRITIC = :critic + DISCRIMINATOR = :discriminator + DEVELOPER = :developer + TOOL = :tool + + # @!method self.values + # @return [Array] + end + + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + # + # @see OpenAI::Models::Conversations::Message#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/conversations/output_text_content.rb b/lib/openai/models/conversations/output_text_content.rb new file mode 100644 index 00000000..23791d5b --- /dev/null +++ b/lib/openai/models/conversations/output_text_content.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class OutputTextContent < OpenAI::Internal::Type::BaseModel + # @!attribute annotations + # The annotations of the text output. + # + # @return [Array] + required :annotations, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::OutputTextContent::Annotation] } + + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!attribute logprobs + # + # @return [Array, nil] + optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::LobProb] } + + # @!method initialize(annotations:, text:, logprobs: nil, type: :output_text) + # @param annotations [Array] The annotations of the text output. + # + # @param text [String] The text output from the model. + # + # @param logprobs [Array] + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + + module Annotation + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_citation, -> { OpenAI::Conversations::FileCitationBody } + + variant :url_citation, -> { OpenAI::Conversations::URLCitationBody } + + variant :container_file_citation, -> { OpenAI::Conversations::ContainerFileCitationBody } + + # @!method self.variants + # @return [Array(OpenAI::Models::Conversations::FileCitationBody, OpenAI::Models::Conversations::URLCitationBody, OpenAI::Models::Conversations::ContainerFileCitationBody)] + end + end + end + end +end diff --git a/lib/openai/models/conversations/refusal_content.rb b/lib/openai/models/conversations/refusal_content.rb new file mode 100644 index 00000000..2b1cdce3 --- /dev/null +++ b/lib/openai/models/conversations/refusal_content.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class RefusalContent < OpenAI::Internal::Type::BaseModel + # @!attribute refusal + # The refusal explanation from the model. + # + # @return [String] + required :refusal, String + + # @!attribute type + # The type of the refusal. Always `refusal`. + # + # @return [Symbol, :refusal] + required :type, const: :refusal + + # @!method initialize(refusal:, type: :refusal) + # @param refusal [String] The refusal explanation from the model. + # + # @param type [Symbol, :refusal] The type of the refusal. Always `refusal`. + end + end + end +end diff --git a/lib/openai/models/conversations/summary_text_content.rb b/lib/openai/models/conversations/summary_text_content.rb new file mode 100644 index 00000000..e3768df9 --- /dev/null +++ b/lib/openai/models/conversations/summary_text_content.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class SummaryTextContent < OpenAI::Internal::Type::BaseModel + # @!attribute text + # + # @return [String] + required :text, String + + # @!attribute type + # + # @return [Symbol, :summary_text] + required :type, const: :summary_text + + # @!method initialize(text:, type: :summary_text) + # @param text [String] + # @param type [Symbol, :summary_text] + end + end + end +end diff --git a/lib/openai/models/conversations/text_content.rb b/lib/openai/models/conversations/text_content.rb new file mode 100644 index 00000000..3fa27b08 --- /dev/null +++ b/lib/openai/models/conversations/text_content.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class TextContent < OpenAI::Internal::Type::BaseModel + # @!attribute text + # + # @return [String] + required :text, String + + # @!attribute type + # + # @return [Symbol, :text] + required :type, const: :text + + # @!method initialize(text:, type: :text) + # @param text [String] + # @param type [Symbol, :text] + end + end + end +end diff --git a/lib/openai/models/conversations/top_log_prob.rb b/lib/openai/models/conversations/top_log_prob.rb new file mode 100644 index 00000000..4677b3bf --- /dev/null +++ b/lib/openai/models/conversations/top_log_prob.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class TopLogProb < OpenAI::Internal::Type::BaseModel + # @!attribute token + # + # @return [String] + required :token, String + + # @!attribute bytes + # + # @return [Array] + required :bytes, OpenAI::Internal::Type::ArrayOf[Integer] + + # @!attribute logprob + # + # @return [Float] + required :logprob, Float + + # @!method initialize(token:, bytes:, logprob:) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] + end + end + end +end diff --git a/lib/openai/models/conversations/url_citation_body.rb b/lib/openai/models/conversations/url_citation_body.rb new file mode 100644 index 00000000..bab5ffdf --- /dev/null +++ b/lib/openai/models/conversations/url_citation_body.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Conversations + class URLCitationBody < OpenAI::Internal::Type::BaseModel + # @!attribute end_index + # The index of the last character of the URL citation in the message. + # + # @return [Integer] + required :end_index, Integer + + # @!attribute start_index + # The index of the first character of the URL citation in the message. + # + # @return [Integer] + required :start_index, Integer + + # @!attribute title + # The title of the web resource. + # + # @return [String] + required :title, String + + # @!attribute type + # The type of the URL citation. Always `url_citation`. + # + # @return [Symbol, :url_citation] + required :type, const: :url_citation + + # @!attribute url + # The URL of the web resource. + # + # @return [String] + required :url, String + + # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # @param end_index [Integer] The index of the last character of the URL citation in the message. + # + # @param start_index [Integer] The index of the first character of the URL citation in the message. + # + # @param title [String] The title of the web resource. + # + # @param url [String] The URL of the web resource. + # + # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. + end + end + end +end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index b8723853..3dc158bd 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -222,7 +222,7 @@ class Template < OpenAI::Internal::Type::BaseModel # A list of chat messages forming the prompt or context. May include variable # references to the `item` namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] } @@ -237,7 +237,7 @@ class Template < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} # for more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -249,49 +249,46 @@ class Template < OpenAI::Internal::Type::BaseModel module Template extend OpenAI::Internal::Type::Union - discriminator :type - # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, -> { OpenAI::Responses::EasyInputMessage } + variant -> { OpenAI::Responses::EasyInputMessage } # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, - -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message } + variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem } - class Message < OpenAI::Internal::Type::BaseModel + class EvalItem < OpenAI::Internal::Type::BaseModel # @!attribute content # Inputs to the model - can contain template strings. # - # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array] + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] required :content, - union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content } + union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role] required :role, - enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role } + enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] optional :type, - enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type } + enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem} # for more details. # # A message input to the model with a role indicating instruction following @@ -300,15 +297,15 @@ class Message < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array] Inputs to the model - can contain template strings. + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array] Inputs to the model - can contain template strings. # - # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. # Inputs to the model - can contain template strings. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem#content module Content extend OpenAI::Internal::Type::Union @@ -319,13 +316,13 @@ module Content variant -> { OpenAI::Responses::ResponseInputText } # A text output from the model. - variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText } + variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText } # An image input to the model. - variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage } + variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage } # A list of inputs, each of which may be either an input text or input image object. - variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::AnArrayOfInputTextAndInputImageArray } + variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::AnArrayOfInputTextAndInputImageArray } class OutputText < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -342,7 +339,7 @@ class OutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :output_text) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} # for more details. # # A text output from the model. @@ -374,7 +371,7 @@ class InputImage < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, detail: nil, type: :input_image) # Some parameter documentations has been truncated, see - # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage} # for more details. # # An image input to the model. @@ -387,7 +384,7 @@ class InputImage < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, Array)] + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, Array)] # @type [OpenAI::Internal::Type::Converter] AnArrayOfInputTextAndInputImageArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] @@ -396,7 +393,7 @@ class InputImage < OpenAI::Internal::Type::BaseModel # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem#role module Role extend OpenAI::Internal::Type::Enum @@ -411,7 +408,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem#type module Type extend OpenAI::Internal::Type::Enum @@ -423,7 +420,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem)] end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index cb40574b..b627afea 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -14,12 +14,6 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!attribute before - # An item ID to list items before, used in pagination. - # - # @return [String, nil] - optional :before, String - # @!attribute include # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. @@ -43,14 +37,12 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Responses::InputItemListParams::Order } - # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # @!method initialize(after: nil, include: nil, limit: nil, order: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::InputItemListParams} for more details. # # @param after [String] An item ID to list items after, used in pagination. # - # @param before [String] An item ID to list items before, used in pagination. - # # @param include [Array] Additional fields to include in the response. See the `include` # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 8dbd7a4b..ec153ad5 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -141,6 +141,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :background, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute conversation + # The conversation that this response belongs to. Input items and output items + # from this response are automatically added to this conversation. + # + # @return [OpenAI::Models::Responses::Response::Conversation, nil] + optional :conversation, -> { OpenAI::Responses::Response::Conversation }, nil?: true + # @!attribute max_output_tokens # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and @@ -162,6 +169,7 @@ class Response < OpenAI::Internal::Type::BaseModel # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. # # @return [String, nil] optional :previous_response_id, String, nil?: true @@ -276,7 +284,7 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, conversation: nil, max_output_tokens: nil, max_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, status: nil, text: nil, top_logprobs: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Response} for more details. # @@ -308,6 +316,8 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param background [Boolean, nil] Whether to run the model response in the background. # + # @param conversation [OpenAI::Models::Responses::Response::Conversation, nil] The conversation that this response belongs to. Input items and output items fro + # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # # @param max_tool_calls [Integer, nil] The maximum number of total calls to built-in tools that can be processed in a r @@ -429,6 +439,21 @@ module ToolChoice # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom)] end + # @see OpenAI::Models::Responses::Response#conversation + class Conversation < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the conversation. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # The conversation that this response belongs to. Input items and output items + # from this response are automatically added to this conversation. + # + # @param id [String] The unique ID of the conversation. + end + # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier diff --git a/lib/openai/models/responses/response_conversation_param.rb b/lib/openai/models/responses/response_conversation_param.rb new file mode 100644 index 00000000..b4ab2977 --- /dev/null +++ b/lib/openai/models/responses/response_conversation_param.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseConversationParam < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the conversation. + # + # @return [String] + required :id, String + + # @!method initialize(id:) + # The conversation that this response belongs to. + # + # @param id [String] The unique ID of the conversation. + end + end + end +end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 7161a8d4..6a193914 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -17,6 +17,19 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :background, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute conversation + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + # + # @return [String, OpenAI::Models::Responses::ResponseConversationParam, nil] + optional :conversation, + union: -> { + OpenAI::Responses::ResponseCreateParams::Conversation + }, + nil?: true + # @!attribute include # Specify additional output data to include in the model response. Currently # supported values are: @@ -112,6 +125,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. # # @return [String, nil] optional :previous_response_id, String, nil?: true @@ -272,12 +286,14 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!method initialize(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(background: nil, conversation: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # # @param background [Boolean, nil] Whether to run the model response in the background. # + # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are + # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. @@ -328,6 +344,23 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + module Conversation + extend OpenAI::Internal::Type::Union + + # The unique ID of the conversation. + variant String + + # The conversation that this response belongs to. + variant -> { OpenAI::Responses::ResponseConversationParam } + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseConversationParam)] + end + # Text, image, or file inputs to the model, used to generate a response. # # Learn more: diff --git a/lib/openai/resources/conversations.rb b/lib/openai/resources/conversations.rb new file mode 100644 index 00000000..34d66da5 --- /dev/null +++ b/lib/openai/resources/conversations.rb @@ -0,0 +1,112 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Conversations + # @return [OpenAI::Resources::Conversations::Items] + attr_reader :items + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationCreateParams} for more details. + # + # Create a conversation with the given ID. + # + # @overload create(items: nil, metadata: nil, request_options: {}) + # + # @param items [Array, nil] Initial items to include in the conversation context. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. Useful for + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::Conversation] + # + # @see OpenAI::Models::Conversations::ConversationCreateParams + def create(params = {}) + parsed, options = OpenAI::Conversations::ConversationCreateParams.dump_request(params) + @client.request( + method: :post, + path: "conversations", + body: parsed, + model: OpenAI::Conversations::Conversation, + options: options + ) + end + + # Get a conversation with the given ID. + # + # @overload retrieve(conversation_id, request_options: {}) + # + # @param conversation_id [String] The ID of the conversation to retrieve. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::Conversation] + # + # @see OpenAI::Models::Conversations::ConversationRetrieveParams + def retrieve(conversation_id, params = {}) + @client.request( + method: :get, + path: ["conversations/%1$s", conversation_id], + model: OpenAI::Conversations::Conversation, + options: params[:request_options] + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ConversationUpdateParams} for more details. + # + # Update a conversation's metadata with the given ID. + # + # @overload update(conversation_id, metadata:, request_options: {}) + # + # @param conversation_id [String] The ID of the conversation to update. + # + # @param metadata [Hash{Symbol=>String}] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::Conversation] + # + # @see OpenAI::Models::Conversations::ConversationUpdateParams + def update(conversation_id, params) + parsed, options = OpenAI::Conversations::ConversationUpdateParams.dump_request(params) + @client.request( + method: :post, + path: ["conversations/%1$s", conversation_id], + body: parsed, + model: OpenAI::Conversations::Conversation, + options: options + ) + end + + # Delete a conversation with the given ID. + # + # @overload delete(conversation_id, request_options: {}) + # + # @param conversation_id [String] The ID of the conversation to delete. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::ConversationDeletedResource] + # + # @see OpenAI::Models::Conversations::ConversationDeleteParams + def delete(conversation_id, params = {}) + @client.request( + method: :delete, + path: ["conversations/%1$s", conversation_id], + model: OpenAI::Conversations::ConversationDeletedResource, + options: params[:request_options] + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @items = OpenAI::Resources::Conversations::Items.new(client: client) + end + end + end +end diff --git a/lib/openai/resources/conversations/items.rb b/lib/openai/resources/conversations/items.rb new file mode 100644 index 00000000..90bfac5a --- /dev/null +++ b/lib/openai/resources/conversations/items.rb @@ -0,0 +1,141 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Conversations + class Items + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ItemCreateParams} for more details. + # + # Create items in a conversation with the given ID. + # + # @overload create(conversation_id, items:, include: nil, request_options: {}) + # + # @param conversation_id [String] Path param: The ID of the conversation to add the item to. + # + # @param items [Array] Body param: The items to add to the conversation. You may add up to 20 items at + # + # @param include [Array] Query param: Additional fields to include in the response. See the `include` + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::ConversationItemList] + # + # @see OpenAI::Models::Conversations::ItemCreateParams + def create(conversation_id, params) + parsed, options = OpenAI::Conversations::ItemCreateParams.dump_request(params) + query_params = [:include] + @client.request( + method: :post, + path: ["conversations/%1$s/items", conversation_id], + query: parsed.slice(*query_params), + body: parsed.except(*query_params), + model: OpenAI::Conversations::ConversationItemList, + options: options + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ItemRetrieveParams} for more details. + # + # Get a single item from a conversation with the given IDs. + # + # @overload retrieve(item_id, conversation_id:, include: nil, request_options: {}) + # + # @param item_id [String] Path param: The ID of the item to retrieve. + # + # @param conversation_id [String] Path param: The ID of the conversation that contains the item. + # + # @param include [Array] Query param: Additional fields to include in the response. See the `include` + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::Message, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCall, OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput, OpenAI::Models::Conversations::ConversationItem::McpListTools, OpenAI::Models::Conversations::ConversationItem::McpApprovalRequest, OpenAI::Models::Conversations::ConversationItem::McpApprovalResponse, OpenAI::Models::Conversations::ConversationItem::McpCall, OpenAI::Models::Responses::ResponseCustomToolCall, OpenAI::Models::Responses::ResponseCustomToolCallOutput] + # + # @see OpenAI::Models::Conversations::ItemRetrieveParams + def retrieve(item_id, params) + parsed, options = OpenAI::Conversations::ItemRetrieveParams.dump_request(params) + conversation_id = + parsed.delete(:conversation_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["conversations/%1$s/items/%2$s", conversation_id, item_id], + query: parsed, + model: OpenAI::Conversations::ConversationItem, + options: options + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Conversations::ItemListParams} for more details. + # + # List all items for a conversation with the given ID. + # + # @overload list(conversation_id, after: nil, include: nil, limit: nil, order: nil, request_options: {}) + # + # @param conversation_id [String] The ID of the conversation to list items for. + # + # @param after [String] An item ID to list items after, used in pagination. + # + # @param include [Array] Specify additional output data to include in the model response. Currently + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between + # + # @param order [Symbol, OpenAI::Models::Conversations::ItemListParams::Order] The order to return the input items in. Default is `desc`. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::ConversationCursorPage] + # + # @see OpenAI::Models::Conversations::ItemListParams + def list(conversation_id, params = {}) + parsed, options = OpenAI::Conversations::ItemListParams.dump_request(params) + @client.request( + method: :get, + path: ["conversations/%1$s/items", conversation_id], + query: parsed, + page: OpenAI::Internal::ConversationCursorPage, + model: OpenAI::Conversations::ConversationItem, + options: options + ) + end + + # Delete an item from a conversation with the given IDs. + # + # @overload delete(item_id, conversation_id:, request_options: {}) + # + # @param item_id [String] The ID of the item to delete. + # + # @param conversation_id [String] The ID of the conversation that contains the item. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Conversations::Conversation] + # + # @see OpenAI::Models::Conversations::ItemDeleteParams + def delete(item_id, params) + parsed, options = OpenAI::Conversations::ItemDeleteParams.dump_request(params) + conversation_id = + parsed.delete(:conversation_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :delete, + path: ["conversations/%1$s/items/%2$s", conversation_id, item_id], + model: OpenAI::Conversations::Conversation, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end +end diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 150220b3..38fd98a1 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,10 +23,12 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(background: nil, conversation: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # + # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are + # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. @@ -112,10 +114,12 @@ def create(params = {}) # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(background: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(background: nil, conversation: nil, include: nil, input: nil, instructions: nil, max_output_tokens: nil, max_tool_calls: nil, metadata: nil, model: nil, parallel_tool_calls: nil, previous_response_id: nil, prompt: nil, prompt_cache_key: nil, reasoning: nil, safety_identifier: nil, service_tier: nil, store: nil, stream_options: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # # @param background [Boolean, nil] Whether to run the model response in the background. # + # @param conversation [String, OpenAI::Models::Responses::ResponseConversationParam, nil] The conversation that this response belongs to. Items from this conversation are + # # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 9b62eaa2..551f4f38 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -9,14 +9,12 @@ class InputItems # # Returns a list of input items for a given response. # - # @overload list(response_id, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # @overload list(response_id, after: nil, include: nil, limit: nil, order: nil, request_options: {}) # # @param response_id [String] The ID of the response to retrieve input items for. # # @param after [String] An item ID to list items after, used in pagination. # - # @param before [String] An item ID to list items before, used in pagination. - # # @param include [Array] Additional fields to include in the response. See the `include` # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between diff --git a/rbi/openai/client.rbi b/rbi/openai/client.rbi index 64ac2f60..260d8bb8 100644 --- a/rbi/openai/client.rbi +++ b/rbi/openai/client.rbi @@ -67,6 +67,9 @@ module OpenAI sig { returns(OpenAI::Resources::Responses) } attr_reader :responses + sig { returns(OpenAI::Resources::Conversations) } + attr_reader :conversations + sig { returns(OpenAI::Resources::Evals) } attr_reader :evals diff --git a/rbi/openai/internal/conversation_cursor_page.rbi b/rbi/openai/internal/conversation_cursor_page.rbi new file mode 100644 index 00000000..122b3c11 --- /dev/null +++ b/rbi/openai/internal/conversation_cursor_page.rbi @@ -0,0 +1,25 @@ +# typed: strong + +module OpenAI + module Internal + class ConversationCursorPage + include OpenAI::Internal::Type::BasePage + + Elem = type_member + + sig { returns(T.nilable(T::Array[Elem])) } + attr_accessor :data + + sig { returns(T::Boolean) } + attr_accessor :has_more + + sig { returns(String) } + attr_accessor :last_id + + # @api private + sig { returns(String) } + def inspect + end + end + end +end diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index c2fa35a9..54cd5582 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -53,6 +53,8 @@ module OpenAI Containers = OpenAI::Models::Containers + Conversations = OpenAI::Models::Conversations + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse CustomToolInputFormat = OpenAI::Models::CustomToolInputFormat diff --git a/rbi/openai/models/conversations/computer_screenshot_content.rbi b/rbi/openai/models/conversations/computer_screenshot_content.rbi new file mode 100644 index 00000000..50a29357 --- /dev/null +++ b/rbi/openai/models/conversations/computer_screenshot_content.rbi @@ -0,0 +1,60 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ComputerScreenshotContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ComputerScreenshotContent, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of an uploaded file that contains the screenshot. + sig { returns(T.nilable(String)) } + attr_accessor :file_id + + # The URL of the screenshot image. + sig { returns(T.nilable(String)) } + attr_accessor :image_url + + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + file_id: T.nilable(String), + image_url: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The identifier of an uploaded file that contains the screenshot. + file_id:, + # The URL of the screenshot image. + image_url:, + # Specifies the event type. For a computer screenshot, this property is always set + # to `computer_screenshot`. + type: :computer_screenshot + ) + end + + sig do + override.returns( + { + file_id: T.nilable(String), + image_url: T.nilable(String), + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/container_file_citation_body.rbi b/rbi/openai/models/conversations/container_file_citation_body.rbi new file mode 100644 index 00000000..d828926e --- /dev/null +++ b/rbi/openai/models/conversations/container_file_citation_body.rbi @@ -0,0 +1,82 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ContainerFileCitationBody < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ContainerFileCitationBody, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the container file. + sig { returns(String) } + attr_accessor :container_id + + # The index of the last character of the container file citation in the message. + sig { returns(Integer) } + attr_accessor :end_index + + # The ID of the file. + sig { returns(String) } + attr_accessor :file_id + + # The filename of the container file cited. + sig { returns(String) } + attr_accessor :filename + + # The index of the first character of the container file citation in the message. + sig { returns(Integer) } + attr_accessor :start_index + + # The type of the container file citation. Always `container_file_citation`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + container_id: String, + end_index: Integer, + file_id: String, + filename: String, + start_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the container file. + container_id:, + # The index of the last character of the container file citation in the message. + end_index:, + # The ID of the file. + file_id:, + # The filename of the container file cited. + filename:, + # The index of the first character of the container file citation in the message. + start_index:, + # The type of the container file citation. Always `container_file_citation`. + type: :container_file_citation + ) + end + + sig do + override.returns( + { + container_id: String, + end_index: Integer, + file_id: String, + filename: String, + start_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation.rbi b/rbi/openai/models/conversations/conversation.rbi new file mode 100644 index 00000000..f60e90df --- /dev/null +++ b/rbi/openai/models/conversations/conversation.rbi @@ -0,0 +1,76 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class Conversation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::Conversation, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the conversation. + sig { returns(String) } + attr_accessor :id + + # The time at which the conversation was created, measured in seconds since the + # Unix epoch. + sig { returns(Integer) } + attr_accessor :created_at + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + sig { returns(T.anything) } + attr_accessor :metadata + + # The object type, which is always `conversation`. + sig { returns(Symbol) } + attr_accessor :object + + sig do + params( + id: String, + created_at: Integer, + metadata: T.anything, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the conversation. + id:, + # The time at which the conversation was created, measured in seconds since the + # Unix epoch. + created_at:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + metadata:, + # The object type, which is always `conversation`. + object: :conversation + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + metadata: T.anything, + object: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_create_params.rbi b/rbi/openai/models/conversations/conversation_create_params.rbi new file mode 100644 index 00000000..72463e9b --- /dev/null +++ b/rbi/openai/models/conversations/conversation_create_params.rbi @@ -0,0 +1,144 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ConversationCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationCreateParams, + OpenAI::Internal::AnyHash + ) + end + + # Initial items to include in the conversation context. You may add up to 20 items + # at a time. + sig do + returns( + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Responses::ResponseCustomToolCallOutput, + OpenAI::Responses::ResponseCustomToolCall, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + ] + ) + ) + end + attr_accessor :items + + # Set of 16 key-value pairs that can be attached to an object. Useful for storing + # additional information about the object in a structured format. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + sig do + params( + items: + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::OrHash, + OpenAI::Responses::ResponseInputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse::OrHash, + OpenAI::Responses::ResponseInputItem::McpCall::OrHash, + OpenAI::Responses::ResponseCustomToolCallOutput::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # Initial items to include in the conversation context. You may add up to 20 items + # at a time. + items: nil, + # Set of 16 key-value pairs that can be attached to an object. Useful for storing + # additional information about the object in a structured format. + metadata: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + items: + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Responses::ResponseCustomToolCallOutput, + OpenAI::Responses::ResponseCustomToolCall, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_delete_params.rbi b/rbi/openai/models/conversations/conversation_delete_params.rbi new file mode 100644 index 00000000..672f47bf --- /dev/null +++ b/rbi/openai/models/conversations/conversation_delete_params.rbi @@ -0,0 +1,32 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ConversationDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationDeleteParams, + OpenAI::Internal::AnyHash + ) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_deleted.rbi b/rbi/openai/models/conversations/conversation_deleted.rbi new file mode 100644 index 00000000..eabe5dd0 --- /dev/null +++ b/rbi/openai/models/conversations/conversation_deleted.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ConversationDeleted < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationDeleted, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :id + + sig { returns(T::Boolean) } + attr_accessor :deleted + + sig { returns(Symbol) } + attr_accessor :object + + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"conversation.deleted") + end + + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_deleted_resource.rbi b/rbi/openai/models/conversations/conversation_deleted_resource.rbi new file mode 100644 index 00000000..a415a346 --- /dev/null +++ b/rbi/openai/models/conversations/conversation_deleted_resource.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ConversationDeletedResource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationDeletedResource, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :id + + sig { returns(T::Boolean) } + attr_accessor :deleted + + sig { returns(Symbol) } + attr_accessor :object + + sig do + params(id: String, deleted: T::Boolean, object: Symbol).returns( + T.attached_class + ) + end + def self.new(id:, deleted:, object: :"conversation.deleted") + end + + sig do + override.returns({ id: String, deleted: T::Boolean, object: Symbol }) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_item.rbi b/rbi/openai/models/conversations/conversation_item.rbi new file mode 100644 index 00000000..d4c14870 --- /dev/null +++ b/rbi/openai/models/conversations/conversation_item.rbi @@ -0,0 +1,835 @@ +# typed: strong + +module OpenAI + module Models + ConversationItem = Conversations::ConversationItem + + module Conversations + # A single item within a conversation. The set of possible types are the same as + # the `output` type of a + # [Response object](https://platform.openai.com/docs/api-reference/responses/object#responses/object-output). + module ConversationItem + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Conversations::Message, + OpenAI::Responses::ResponseFunctionToolCallItem, + OpenAI::Responses::ResponseFunctionToolCallOutputItem, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Conversations::ConversationItem::ImageGenerationCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseComputerToolCallOutputItem, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Conversations::ConversationItem::LocalShellCall, + OpenAI::Conversations::ConversationItem::LocalShellCallOutput, + OpenAI::Conversations::ConversationItem::McpListTools, + OpenAI::Conversations::ConversationItem::McpApprovalRequest, + OpenAI::Conversations::ConversationItem::McpApprovalResponse, + OpenAI::Conversations::ConversationItem::McpCall, + OpenAI::Responses::ResponseCustomToolCall, + OpenAI::Responses::ResponseCustomToolCallOutput + ) + end + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns( + OpenAI::Conversations::ConversationItem::LocalShellCall::Action + ) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Conversations::ConversationItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Conversations::ConversationItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: + OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Conversations::ConversationItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Conversations::ConversationItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::LocalShellCallOutput, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the output of the local shell tool call. + sig { returns(String) } + attr_accessor :output + + # The type of the local shell tool call output. Always `local_shell_call_output`. + sig { returns(Symbol) } + attr_accessor :type + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + sig do + returns( + T.nilable( + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::TaggedSymbol + ) + ) + end + attr_accessor :status + + # The output of a local shell tool call. + sig do + params( + id: String, + output: String, + status: + T.nilable( + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::OrSymbol + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell tool call generated by the model. + id:, + # A JSON string of the output of the local shell tool call. + output:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + status: nil, + # The type of the local shell tool call output. Always `local_shell_call_output`. + type: :local_shell_call_output + ) + end + + sig do + override.returns( + { + id: String, + output: String, + type: Symbol, + status: + T.nilable( + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::TaggedSymbol + ) + } + ) + end + def to_hash + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[ + OpenAI::Conversations::ConversationItem::McpListTools::Tool + ] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Conversations::ConversationItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Conversations::ConversationItem::McpListTools::Tool + ], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::McpApprovalResponse, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval response + sig { returns(String) } + attr_accessor :id + + # The ID of the approval request being answered. + sig { returns(String) } + attr_accessor :approval_request_id + + # Whether the request was approved. + sig { returns(T::Boolean) } + attr_accessor :approve + + # The type of the item. Always `mcp_approval_response`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional reason for the decision. + sig { returns(T.nilable(String)) } + attr_accessor :reason + + # A response to an MCP approval request. + sig do + params( + id: String, + approval_request_id: String, + approve: T::Boolean, + reason: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval response + id:, + # The ID of the approval request being answered. + approval_request_id:, + # Whether the request was approved. + approve:, + # Optional reason for the decision. + reason: nil, + # The type of the item. Always `mcp_approval_response`. + type: :mcp_approval_response + ) + end + + sig do + override.returns( + { + id: String, + approval_request_id: String, + approve: T::Boolean, + type: Symbol, + reason: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[OpenAI::Conversations::ConversationItem::Variants] + ) + end + def self.variants + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_item_list.rbi b/rbi/openai/models/conversations/conversation_item_list.rbi new file mode 100644 index 00000000..f8931151 --- /dev/null +++ b/rbi/openai/models/conversations/conversation_item_list.rbi @@ -0,0 +1,101 @@ +# typed: strong + +module OpenAI + module Models + ConversationItemList = Conversations::ConversationItemList + + module Conversations + class ConversationItemList < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationItemList, + OpenAI::Internal::AnyHash + ) + end + + # A list of conversation items. + sig do + returns(T::Array[OpenAI::Conversations::ConversationItem::Variants]) + end + attr_accessor :data + + # The ID of the first item in the list. + sig { returns(String) } + attr_accessor :first_id + + # Whether there are more items available. + sig { returns(T::Boolean) } + attr_accessor :has_more + + # The ID of the last item in the list. + sig { returns(String) } + attr_accessor :last_id + + # The type of object returned, must be `list`. + sig { returns(Symbol) } + attr_accessor :object + + # A list of Conversation items. + sig do + params( + data: + T::Array[ + T.any( + OpenAI::Conversations::Message::OrHash, + OpenAI::Responses::ResponseFunctionToolCallItem::OrHash, + OpenAI::Responses::ResponseFunctionToolCallOutputItem::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Conversations::ConversationItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCallOutputItem::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Conversations::ConversationItem::LocalShellCall::OrHash, + OpenAI::Conversations::ConversationItem::LocalShellCallOutput::OrHash, + OpenAI::Conversations::ConversationItem::McpListTools::OrHash, + OpenAI::Conversations::ConversationItem::McpApprovalRequest::OrHash, + OpenAI::Conversations::ConversationItem::McpApprovalResponse::OrHash, + OpenAI::Conversations::ConversationItem::McpCall::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash, + OpenAI::Responses::ResponseCustomToolCallOutput::OrHash + ) + ], + first_id: String, + has_more: T::Boolean, + last_id: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of conversation items. + data:, + # The ID of the first item in the list. + first_id:, + # Whether there are more items available. + has_more:, + # The ID of the last item in the list. + last_id:, + # The type of object returned, must be `list`. + object: :list + ) + end + + sig do + override.returns( + { + data: T::Array[OpenAI::Conversations::ConversationItem::Variants], + first_id: String, + has_more: T::Boolean, + last_id: String, + object: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_retrieve_params.rbi b/rbi/openai/models/conversations/conversation_retrieve_params.rbi new file mode 100644 index 00000000..3d403724 --- /dev/null +++ b/rbi/openai/models/conversations/conversation_retrieve_params.rbi @@ -0,0 +1,32 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ConversationRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationRetrieveParams, + OpenAI::Internal::AnyHash + ) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/conversation_update_params.rbi b/rbi/openai/models/conversations/conversation_update_params.rbi new file mode 100644 index 00000000..5edfee7f --- /dev/null +++ b/rbi/openai/models/conversations/conversation_update_params.rbi @@ -0,0 +1,56 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ConversationUpdateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ConversationUpdateParams, + OpenAI::Internal::AnyHash + ) + end + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :metadata + + sig do + params( + metadata: T::Hash[Symbol, String], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + metadata:, + request_options: {} + ) + end + + sig do + override.returns( + { + metadata: T::Hash[Symbol, String], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/file_citation_body.rbi b/rbi/openai/models/conversations/file_citation_body.rbi new file mode 100644 index 00000000..ec14bf91 --- /dev/null +++ b/rbi/openai/models/conversations/file_citation_body.rbi @@ -0,0 +1,61 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class FileCitationBody < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::FileCitationBody, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the file. + sig { returns(String) } + attr_accessor :file_id + + # The filename of the file cited. + sig { returns(String) } + attr_accessor :filename + + # The index of the file in the list of files. + sig { returns(Integer) } + attr_accessor :index + + # The type of the file citation. Always `file_citation`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + file_id: String, + filename: String, + index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the file. + file_id:, + # The filename of the file cited. + filename:, + # The index of the file in the list of files. + index:, + # The type of the file citation. Always `file_citation`. + type: :file_citation + ) + end + + sig do + override.returns( + { file_id: String, filename: String, index: Integer, type: Symbol } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/input_file_content.rbi b/rbi/openai/models/conversations/input_file_content.rbi new file mode 100644 index 00000000..5516a933 --- /dev/null +++ b/rbi/openai/models/conversations/input_file_content.rbi @@ -0,0 +1,72 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class InputFileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::InputFileContent, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the file to be sent to the model. + sig { returns(T.nilable(String)) } + attr_accessor :file_id + + # The type of the input item. Always `input_file`. + sig { returns(Symbol) } + attr_accessor :type + + # The URL of the file to be sent to the model. + sig { returns(T.nilable(String)) } + attr_reader :file_url + + sig { params(file_url: String).void } + attr_writer :file_url + + # The name of the file to be sent to the model. + sig { returns(T.nilable(String)) } + attr_reader :filename + + sig { params(filename: String).void } + attr_writer :filename + + sig do + params( + file_id: T.nilable(String), + file_url: String, + filename: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the file to be sent to the model. + file_id:, + # The URL of the file to be sent to the model. + file_url: nil, + # The name of the file to be sent to the model. + filename: nil, + # The type of the input item. Always `input_file`. + type: :input_file + ) + end + + sig do + override.returns( + { + file_id: T.nilable(String), + type: Symbol, + file_url: String, + filename: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/input_image_content.rbi b/rbi/openai/models/conversations/input_image_content.rbi new file mode 100644 index 00000000..b9b48a84 --- /dev/null +++ b/rbi/openai/models/conversations/input_image_content.rbi @@ -0,0 +1,113 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class InputImageContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::InputImageContent, + OpenAI::Internal::AnyHash + ) + end + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + sig do + returns( + OpenAI::Conversations::InputImageContent::Detail::TaggedSymbol + ) + end + attr_accessor :detail + + # The ID of the file to be sent to the model. + sig { returns(T.nilable(String)) } + attr_accessor :file_id + + # The URL of the image to be sent to the model. A fully qualified URL or base64 + # encoded image in a data URL. + sig { returns(T.nilable(String)) } + attr_accessor :image_url + + # The type of the input item. Always `input_image`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + detail: OpenAI::Conversations::InputImageContent::Detail::OrSymbol, + file_id: T.nilable(String), + image_url: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + detail:, + # The ID of the file to be sent to the model. + file_id:, + # The URL of the image to be sent to the model. A fully qualified URL or base64 + # encoded image in a data URL. + image_url:, + # The type of the input item. Always `input_image`. + type: :input_image + ) + end + + sig do + override.returns( + { + detail: + OpenAI::Conversations::InputImageContent::Detail::TaggedSymbol, + file_id: T.nilable(String), + image_url: T.nilable(String), + type: Symbol + } + ) + end + def to_hash + end + + # The detail level of the image to be sent to the model. One of `high`, `low`, or + # `auto`. Defaults to `auto`. + module Detail + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Conversations::InputImageContent::Detail) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Conversations::InputImageContent::Detail::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Conversations::InputImageContent::Detail::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Conversations::InputImageContent::Detail::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Conversations::InputImageContent::Detail::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/conversations/input_text_content.rbi b/rbi/openai/models/conversations/input_text_content.rbi new file mode 100644 index 00000000..1f8e0760 --- /dev/null +++ b/rbi/openai/models/conversations/input_text_content.rbi @@ -0,0 +1,38 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class InputTextContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::InputTextContent, + OpenAI::Internal::AnyHash + ) + end + + # The text input to the model. + sig { returns(String) } + attr_accessor :text + + # The type of the input item. Always `input_text`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The text input to the model. + text:, + # The type of the input item. Always `input_text`. + type: :input_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/item_create_params.rbi b/rbi/openai/models/conversations/item_create_params.rbi new file mode 100644 index 00000000..2c68cbe9 --- /dev/null +++ b/rbi/openai/models/conversations/item_create_params.rbi @@ -0,0 +1,150 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ItemCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ItemCreateParams, + OpenAI::Internal::AnyHash + ) + end + + # The items to add to the conversation. You may add up to 20 items at a time. + sig do + returns( + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Responses::ResponseCustomToolCallOutput, + OpenAI::Responses::ResponseCustomToolCall, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + ] + ) + end + attr_accessor :items + + # Additional fields to include in the response. See the `include` parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) + ) + end + attr_reader :include + + sig do + params( + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ).void + end + attr_writer :include + + sig do + params( + items: + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::OrHash, + OpenAI::Responses::ResponseInputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse::OrHash, + OpenAI::Responses::ResponseInputItem::McpCall::OrHash, + OpenAI::Responses::ResponseCustomToolCallOutput::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ], + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # The items to add to the conversation. You may add up to 20 items at a time. + items:, + # Additional fields to include in the response. See the `include` parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + include: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + items: + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Responses::ResponseInputItem::Message, + OpenAI::Responses::ResponseOutputMessage, + OpenAI::Responses::ResponseFileSearchToolCall, + OpenAI::Responses::ResponseComputerToolCall, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput, + OpenAI::Responses::ResponseFunctionWebSearch, + OpenAI::Responses::ResponseFunctionToolCall, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput, + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Responses::ResponseCustomToolCallOutput, + OpenAI::Responses::ResponseCustomToolCall, + OpenAI::Responses::ResponseInputItem::ItemReference + ) + ], + include: + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/item_delete_params.rbi b/rbi/openai/models/conversations/item_delete_params.rbi new file mode 100644 index 00000000..d0464e8e --- /dev/null +++ b/rbi/openai/models/conversations/item_delete_params.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ItemDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ItemDeleteParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :conversation_id + + sig do + params( + conversation_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(conversation_id:, request_options: {}) + end + + sig do + override.returns( + { conversation_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/item_list_params.rbi b/rbi/openai/models/conversations/item_list_params.rbi new file mode 100644 index 00000000..2239e139 --- /dev/null +++ b/rbi/openai/models/conversations/item_list_params.rbi @@ -0,0 +1,174 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ItemListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ItemListParams, + OpenAI::Internal::AnyHash + ) + end + + # An item ID to list items after, used in pagination. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) + ) + end + attr_reader :include + + sig do + params( + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ).void + end + attr_writer :include + + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # The order to return the input items in. Default is `desc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + sig do + returns( + T.nilable(OpenAI::Conversations::ItemListParams::Order::OrSymbol) + ) + end + attr_reader :order + + sig do + params( + order: OpenAI::Conversations::ItemListParams::Order::OrSymbol + ).void + end + attr_writer :order + + sig do + params( + after: String, + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + limit: Integer, + order: OpenAI::Conversations::ItemListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # An item ID to list items after, used in pagination. + after: nil, + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). + include: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # The order to return the input items in. Default is `desc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + order: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + after: String, + include: + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + limit: Integer, + order: OpenAI::Conversations::ItemListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # The order to return the input items in. Default is `desc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Conversations::ItemListParams::Order) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ASC = + T.let( + :asc, + OpenAI::Conversations::ItemListParams::Order::TaggedSymbol + ) + DESC = + T.let( + :desc, + OpenAI::Conversations::ItemListParams::Order::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Conversations::ItemListParams::Order::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/conversations/item_retrieve_params.rbi b/rbi/openai/models/conversations/item_retrieve_params.rbi new file mode 100644 index 00000000..e175c837 --- /dev/null +++ b/rbi/openai/models/conversations/item_retrieve_params.rbi @@ -0,0 +1,70 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class ItemRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::ItemRetrieveParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :conversation_id + + # Additional fields to include in the response. See the `include` parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + sig do + returns( + T.nilable(T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol]) + ) + end + attr_reader :include + + sig do + params( + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] + ).void + end + attr_writer :include + + sig do + params( + conversation_id: String, + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + conversation_id:, + # Additional fields to include in the response. See the `include` parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + include: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + conversation_id: String, + include: + T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/lob_prob.rbi b/rbi/openai/models/conversations/lob_prob.rbi new file mode 100644 index 00000000..737d318b --- /dev/null +++ b/rbi/openai/models/conversations/lob_prob.rbi @@ -0,0 +1,50 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class LobProb < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Conversations::LobProb, OpenAI::Internal::AnyHash) + end + + sig { returns(String) } + attr_accessor :token + + sig { returns(T::Array[Integer]) } + attr_accessor :bytes + + sig { returns(Float) } + attr_accessor :logprob + + sig { returns(T::Array[OpenAI::Conversations::TopLogProb]) } + attr_accessor :top_logprobs + + sig do + params( + token: String, + bytes: T::Array[Integer], + logprob: Float, + top_logprobs: T::Array[OpenAI::Conversations::TopLogProb::OrHash] + ).returns(T.attached_class) + end + def self.new(token:, bytes:, logprob:, top_logprobs:) + end + + sig do + override.returns( + { + token: String, + bytes: T::Array[Integer], + logprob: Float, + top_logprobs: T::Array[OpenAI::Conversations::TopLogProb] + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/message.rbi b/rbi/openai/models/conversations/message.rbi new file mode 100644 index 00000000..6d6a4e07 --- /dev/null +++ b/rbi/openai/models/conversations/message.rbi @@ -0,0 +1,196 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Conversations::Message, OpenAI::Internal::AnyHash) + end + + # The unique ID of the message. + sig { returns(String) } + attr_accessor :id + + # The content of the message + sig do + returns(T::Array[OpenAI::Conversations::Message::Content::Variants]) + end + attr_accessor :content + + # The role of the message. One of `unknown`, `user`, `assistant`, `system`, + # `critic`, `discriminator`, `developer`, or `tool`. + sig { returns(OpenAI::Conversations::Message::Role::TaggedSymbol) } + attr_accessor :role + + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + sig { returns(OpenAI::Conversations::Message::Status::TaggedSymbol) } + attr_accessor :status + + # The type of the message. Always set to `message`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + id: String, + content: + T::Array[ + T.any( + OpenAI::Conversations::InputTextContent::OrHash, + OpenAI::Conversations::OutputTextContent::OrHash, + OpenAI::Conversations::TextContent::OrHash, + OpenAI::Conversations::SummaryTextContent::OrHash, + OpenAI::Conversations::RefusalContent::OrHash, + OpenAI::Conversations::InputImageContent::OrHash, + OpenAI::Conversations::ComputerScreenshotContent::OrHash, + OpenAI::Conversations::InputFileContent::OrHash + ) + ], + role: OpenAI::Conversations::Message::Role::OrSymbol, + status: OpenAI::Conversations::Message::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the message. + id:, + # The content of the message + content:, + # The role of the message. One of `unknown`, `user`, `assistant`, `system`, + # `critic`, `discriminator`, `developer`, or `tool`. + role:, + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + status:, + # The type of the message. Always set to `message`. + type: :message + ) + end + + sig do + override.returns( + { + id: String, + content: + T::Array[OpenAI::Conversations::Message::Content::Variants], + role: OpenAI::Conversations::Message::Role::TaggedSymbol, + status: OpenAI::Conversations::Message::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Conversations::InputTextContent, + OpenAI::Conversations::OutputTextContent, + OpenAI::Conversations::TextContent, + OpenAI::Conversations::SummaryTextContent, + OpenAI::Conversations::RefusalContent, + OpenAI::Conversations::InputImageContent, + OpenAI::Conversations::ComputerScreenshotContent, + OpenAI::Conversations::InputFileContent + ) + end + + sig do + override.returns( + T::Array[OpenAI::Conversations::Message::Content::Variants] + ) + end + def self.variants + end + end + + # The role of the message. One of `unknown`, `user`, `assistant`, `system`, + # `critic`, `discriminator`, `developer`, or `tool`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::Conversations::Message::Role) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + UNKNOWN = + T.let(:unknown, OpenAI::Conversations::Message::Role::TaggedSymbol) + USER = + T.let(:user, OpenAI::Conversations::Message::Role::TaggedSymbol) + ASSISTANT = + T.let( + :assistant, + OpenAI::Conversations::Message::Role::TaggedSymbol + ) + SYSTEM = + T.let(:system, OpenAI::Conversations::Message::Role::TaggedSymbol) + CRITIC = + T.let(:critic, OpenAI::Conversations::Message::Role::TaggedSymbol) + DISCRIMINATOR = + T.let( + :discriminator, + OpenAI::Conversations::Message::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Conversations::Message::Role::TaggedSymbol + ) + TOOL = + T.let(:tool, OpenAI::Conversations::Message::Role::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::Conversations::Message::Role::TaggedSymbol] + ) + end + def self.values + end + end + + # The status of item. One of `in_progress`, `completed`, or `incomplete`. + # Populated when items are returned via API. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Conversations::Message::Status) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Conversations::Message::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Conversations::Message::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Conversations::Message::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::Conversations::Message::Status::TaggedSymbol] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/conversations/output_text_content.rbi b/rbi/openai/models/conversations/output_text_content.rbi new file mode 100644 index 00000000..f9c643c6 --- /dev/null +++ b/rbi/openai/models/conversations/output_text_content.rbi @@ -0,0 +1,110 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class OutputTextContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::OutputTextContent, + OpenAI::Internal::AnyHash + ) + end + + # The annotations of the text output. + sig do + returns( + T::Array[ + OpenAI::Conversations::OutputTextContent::Annotation::Variants + ] + ) + end + attr_accessor :annotations + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + sig { returns(T.nilable(T::Array[OpenAI::Conversations::LobProb])) } + attr_reader :logprobs + + sig do + params( + logprobs: T::Array[OpenAI::Conversations::LobProb::OrHash] + ).void + end + attr_writer :logprobs + + sig do + params( + annotations: + T::Array[ + T.any( + OpenAI::Conversations::FileCitationBody::OrHash, + OpenAI::Conversations::URLCitationBody::OrHash, + OpenAI::Conversations::ContainerFileCitationBody::OrHash + ) + ], + text: String, + logprobs: T::Array[OpenAI::Conversations::LobProb::OrHash], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The annotations of the text output. + annotations:, + # The text output from the model. + text:, + logprobs: nil, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig do + override.returns( + { + annotations: + T::Array[ + OpenAI::Conversations::OutputTextContent::Annotation::Variants + ], + text: String, + type: Symbol, + logprobs: T::Array[OpenAI::Conversations::LobProb] + } + ) + end + def to_hash + end + + module Annotation + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Conversations::FileCitationBody, + OpenAI::Conversations::URLCitationBody, + OpenAI::Conversations::ContainerFileCitationBody + ) + end + + sig do + override.returns( + T::Array[ + OpenAI::Conversations::OutputTextContent::Annotation::Variants + ] + ) + end + def self.variants + end + end + end + end + end +end diff --git a/rbi/openai/models/conversations/refusal_content.rbi b/rbi/openai/models/conversations/refusal_content.rbi new file mode 100644 index 00000000..77516323 --- /dev/null +++ b/rbi/openai/models/conversations/refusal_content.rbi @@ -0,0 +1,38 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class RefusalContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::RefusalContent, + OpenAI::Internal::AnyHash + ) + end + + # The refusal explanation from the model. + sig { returns(String) } + attr_accessor :refusal + + # The type of the refusal. Always `refusal`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(refusal: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The refusal explanation from the model. + refusal:, + # The type of the refusal. Always `refusal`. + type: :refusal + ) + end + + sig { override.returns({ refusal: String, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/summary_text_content.rbi b/rbi/openai/models/conversations/summary_text_content.rbi new file mode 100644 index 00000000..7292fa8d --- /dev/null +++ b/rbi/openai/models/conversations/summary_text_content.rbi @@ -0,0 +1,31 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class SummaryTextContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::SummaryTextContent, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :text + + sig { returns(Symbol) } + attr_accessor :type + + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :summary_text) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/text_content.rbi b/rbi/openai/models/conversations/text_content.rbi new file mode 100644 index 00000000..abb7a442 --- /dev/null +++ b/rbi/openai/models/conversations/text_content.rbi @@ -0,0 +1,28 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class TextContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Conversations::TextContent, OpenAI::Internal::AnyHash) + end + + sig { returns(String) } + attr_accessor :text + + sig { returns(Symbol) } + attr_accessor :type + + sig { params(text: String, type: Symbol).returns(T.attached_class) } + def self.new(text:, type: :text) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/top_log_prob.rbi b/rbi/openai/models/conversations/top_log_prob.rbi new file mode 100644 index 00000000..c11b651a --- /dev/null +++ b/rbi/openai/models/conversations/top_log_prob.rbi @@ -0,0 +1,41 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class TopLogProb < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Conversations::TopLogProb, OpenAI::Internal::AnyHash) + end + + sig { returns(String) } + attr_accessor :token + + sig { returns(T::Array[Integer]) } + attr_accessor :bytes + + sig { returns(Float) } + attr_accessor :logprob + + sig do + params( + token: String, + bytes: T::Array[Integer], + logprob: Float + ).returns(T.attached_class) + end + def self.new(token:, bytes:, logprob:) + end + + sig do + override.returns( + { token: String, bytes: T::Array[Integer], logprob: Float } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/conversations/url_citation_body.rbi b/rbi/openai/models/conversations/url_citation_body.rbi new file mode 100644 index 00000000..4c34ad3d --- /dev/null +++ b/rbi/openai/models/conversations/url_citation_body.rbi @@ -0,0 +1,74 @@ +# typed: strong + +module OpenAI + module Models + module Conversations + class URLCitationBody < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Conversations::URLCitationBody, + OpenAI::Internal::AnyHash + ) + end + + # The index of the last character of the URL citation in the message. + sig { returns(Integer) } + attr_accessor :end_index + + # The index of the first character of the URL citation in the message. + sig { returns(Integer) } + attr_accessor :start_index + + # The title of the web resource. + sig { returns(String) } + attr_accessor :title + + # The type of the URL citation. Always `url_citation`. + sig { returns(Symbol) } + attr_accessor :type + + # The URL of the web resource. + sig { returns(String) } + attr_accessor :url + + sig do + params( + end_index: Integer, + start_index: Integer, + title: String, + url: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The index of the last character of the URL citation in the message. + end_index:, + # The index of the first character of the URL citation in the message. + start_index:, + # The title of the web resource. + title:, + # The URL of the web resource. + url:, + # The type of the URL citation. Always `url_citation`. + type: :url_citation + ) + end + + sig do + override.returns( + { + end_index: Integer, + start_index: Integer, + title: String, + type: Symbol, + url: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 79dee373..63682eda 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -441,7 +441,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem ) ] ) @@ -458,7 +458,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::OrHash + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::OrHash ) ], type: Symbol @@ -480,7 +480,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem ) ], type: Symbol @@ -502,15 +502,15 @@ module OpenAI T.type_alias do T.any( OpenAI::Responses::EasyInputMessage, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem ) end - class Message < OpenAI::Internal::Type::BaseModel + class EvalItem < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem, OpenAI::Internal::AnyHash ) end @@ -521,8 +521,8 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, T::Array[T.anything] ) ) @@ -533,7 +533,7 @@ module OpenAI # `developer`. sig do returns( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol ) end attr_accessor :role @@ -542,7 +542,7 @@ module OpenAI sig do returns( T.nilable( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol ) ) end @@ -551,7 +551,7 @@ module OpenAI sig do params( type: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol ).void end attr_writer :type @@ -567,14 +567,14 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash, T::Array[T.anything] ), role: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, type: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol ).returns(T.attached_class) end def self.new( @@ -595,14 +595,14 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, T::Array[T.anything] ), role: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, type: - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol } ) end @@ -618,8 +618,8 @@ module OpenAI T.any( String, OpenAI::Responses::ResponseInputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, T::Array[T.anything] ) end @@ -628,7 +628,7 @@ module OpenAI OrHash = T.type_alias do T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, OpenAI::Internal::AnyHash ) end @@ -664,7 +664,7 @@ module OpenAI OrHash = T.type_alias do T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage, OpenAI::Internal::AnyHash ) end @@ -716,7 +716,7 @@ module OpenAI sig do override.returns( T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::Variants + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::Variants ] ) end @@ -741,7 +741,7 @@ module OpenAI T.type_alias do T.all( Symbol, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -749,28 +749,28 @@ module OpenAI USER = T.let( :user, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) ASSISTANT = T.let( :assistant, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) SYSTEM = T.let( :system, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) DEVELOPER = T.let( :developer, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ) sig do override.returns( T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol ] ) end @@ -786,7 +786,7 @@ module OpenAI T.type_alias do T.all( Symbol, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type ) end OrSymbol = T.type_alias { T.any(Symbol, String) } @@ -794,13 +794,13 @@ module OpenAI MESSAGE = T.let( :message, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol ) sig do override.returns( T::Array[ - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol ] ) end diff --git a/rbi/openai/models/responses/input_item_list_params.rbi b/rbi/openai/models/responses/input_item_list_params.rbi index f495c4f3..02e79111 100644 --- a/rbi/openai/models/responses/input_item_list_params.rbi +++ b/rbi/openai/models/responses/input_item_list_params.rbi @@ -22,13 +22,6 @@ module OpenAI sig { params(after: String).void } attr_writer :after - # An item ID to list items before, used in pagination. - sig { returns(T.nilable(String)) } - attr_reader :before - - sig { params(before: String).void } - attr_writer :before - # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. sig do @@ -74,7 +67,6 @@ module OpenAI sig do params( after: String, - before: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Responses::InputItemListParams::Order::OrSymbol, @@ -84,8 +76,6 @@ module OpenAI def self.new( # An item ID to list items after, used in pagination. after: nil, - # An item ID to list items before, used in pagination. - before: nil, # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, @@ -105,7 +95,6 @@ module OpenAI override.returns( { after: String, - before: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], limit: Integer, diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index 94087328..efacebab 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -136,6 +136,19 @@ module OpenAI sig { returns(T.nilable(T::Boolean)) } attr_accessor :background + # The conversation that this response belongs to. Input items and output items + # from this response are automatically added to this conversation. + sig { returns(T.nilable(OpenAI::Responses::Response::Conversation)) } + attr_reader :conversation + + sig do + params( + conversation: + T.nilable(OpenAI::Responses::Response::Conversation::OrHash) + ).void + end + attr_writer :conversation + # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). @@ -152,6 +165,7 @@ module OpenAI # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. sig { returns(T.nilable(String)) } attr_accessor :previous_response_id @@ -340,6 +354,8 @@ module OpenAI ], top_p: T.nilable(Float), background: T.nilable(T::Boolean), + conversation: + T.nilable(OpenAI::Responses::Response::Conversation::OrHash), max_output_tokens: T.nilable(Integer), max_tool_calls: T.nilable(Integer), previous_response_id: T.nilable(String), @@ -432,6 +448,9 @@ module OpenAI # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, + # The conversation that this response belongs to. Input items and output items + # from this response are automatically added to this conversation. + conversation: nil, # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). @@ -444,6 +463,7 @@ module OpenAI # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. previous_response_id: nil, # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). @@ -534,6 +554,8 @@ module OpenAI tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: T.nilable(Float), background: T.nilable(T::Boolean), + conversation: + T.nilable(OpenAI::Responses::Response::Conversation), max_output_tokens: T.nilable(Integer), max_tool_calls: T.nilable(Integer), previous_response_id: T.nilable(String), @@ -707,6 +729,33 @@ module OpenAI end end + class Conversation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Response::Conversation, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the conversation. + sig { returns(String) } + attr_accessor :id + + # The conversation that this response belongs to. Input items and output items + # from this response are automatically added to this conversation. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the conversation. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + # Specifies the processing type used for serving the request. # # - If set to 'auto', then the request will be processed with the service tier diff --git a/rbi/openai/models/responses/response_conversation_param.rbi b/rbi/openai/models/responses/response_conversation_param.rbi new file mode 100644 index 00000000..e4b1df7f --- /dev/null +++ b/rbi/openai/models/responses/response_conversation_param.rbi @@ -0,0 +1,33 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseConversationParam < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseConversationParam, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the conversation. + sig { returns(String) } + attr_accessor :id + + # The conversation that this response belongs to. + sig { params(id: String).returns(T.attached_class) } + def self.new( + # The unique ID of the conversation. + id: + ) + end + + sig { override.returns({ id: String }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 850cfe82..3fa43c5b 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -20,6 +20,19 @@ module OpenAI sig { returns(T.nilable(T::Boolean)) } attr_accessor :background + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + sig do + returns( + T.nilable( + T.any(String, OpenAI::Responses::ResponseConversationParam) + ) + ) + end + attr_accessor :conversation + # Specify additional output data to include in the model response. Currently # supported values are: # @@ -133,6 +146,7 @@ module OpenAI # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. sig { returns(T.nilable(String)) } attr_accessor :previous_response_id @@ -377,6 +391,13 @@ module OpenAI sig do params( background: T.nilable(T::Boolean), + conversation: + T.nilable( + T.any( + String, + OpenAI::Responses::ResponseConversationParam::OrHash + ) + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -446,6 +467,11 @@ module OpenAI # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + conversation: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -506,6 +532,7 @@ module OpenAI # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. previous_response_id: nil, # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). @@ -609,6 +636,10 @@ module OpenAI override.returns( { background: T.nilable(T::Boolean), + conversation: + T.nilable( + T.any(String, OpenAI::Responses::ResponseConversationParam) + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -678,6 +709,29 @@ module OpenAI def to_hash end + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + module Conversation + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any(String, OpenAI::Responses::ResponseConversationParam) + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseCreateParams::Conversation::Variants + ] + ) + end + def self.variants + end + end + # Text, image, or file inputs to the model, used to generate a response. # # Learn more: diff --git a/rbi/openai/resources/conversations.rbi b/rbi/openai/resources/conversations.rbi new file mode 100644 index 00000000..6d83ca4e --- /dev/null +++ b/rbi/openai/resources/conversations.rbi @@ -0,0 +1,110 @@ +# typed: strong + +module OpenAI + module Resources + class Conversations + sig { returns(OpenAI::Resources::Conversations::Items) } + attr_reader :items + + # Create a conversation with the given ID. + sig do + params( + items: + T.nilable( + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::OrHash, + OpenAI::Responses::ResponseInputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse::OrHash, + OpenAI::Responses::ResponseInputItem::McpCall::OrHash, + OpenAI::Responses::ResponseCustomToolCallOutput::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ] + ), + metadata: T.nilable(T::Hash[Symbol, String]), + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::Conversation) + end + def create( + # Initial items to include in the conversation context. You may add up to 20 items + # at a time. + items: nil, + # Set of 16 key-value pairs that can be attached to an object. Useful for storing + # additional information about the object in a structured format. + metadata: nil, + request_options: {} + ) + end + + # Get a conversation with the given ID. + sig do + params( + conversation_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::Conversation) + end + def retrieve( + # The ID of the conversation to retrieve. + conversation_id, + request_options: {} + ) + end + + # Update a conversation's metadata with the given ID. + sig do + params( + conversation_id: String, + metadata: T::Hash[Symbol, String], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::Conversation) + end + def update( + # The ID of the conversation to update. + conversation_id, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. Keys are strings with a maximum + # length of 64 characters. Values are strings with a maximum length of 512 + # characters. + metadata:, + request_options: {} + ) + end + + # Delete a conversation with the given ID. + sig do + params( + conversation_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::ConversationDeletedResource) + end + def delete( + # The ID of the conversation to delete. + conversation_id, + request_options: {} + ) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end +end diff --git a/rbi/openai/resources/conversations/items.rbi b/rbi/openai/resources/conversations/items.rbi new file mode 100644 index 00000000..fb396699 --- /dev/null +++ b/rbi/openai/resources/conversations/items.rbi @@ -0,0 +1,152 @@ +# typed: strong + +module OpenAI + module Resources + class Conversations + class Items + # Create items in a conversation with the given ID. + sig do + params( + conversation_id: String, + items: + T::Array[ + T.any( + OpenAI::Responses::EasyInputMessage::OrHash, + OpenAI::Responses::ResponseInputItem::Message::OrHash, + OpenAI::Responses::ResponseOutputMessage::OrHash, + OpenAI::Responses::ResponseFileSearchToolCall::OrHash, + OpenAI::Responses::ResponseComputerToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, + OpenAI::Responses::ResponseFunctionWebSearch::OrHash, + OpenAI::Responses::ResponseFunctionToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::OrHash, + OpenAI::Responses::ResponseInputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse::OrHash, + OpenAI::Responses::ResponseInputItem::McpCall::OrHash, + OpenAI::Responses::ResponseCustomToolCallOutput::OrHash, + OpenAI::Responses::ResponseCustomToolCall::OrHash, + OpenAI::Responses::ResponseInputItem::ItemReference::OrHash + ) + ], + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::ConversationItemList) + end + def create( + # Path param: The ID of the conversation to add the item to. + conversation_id, + # Body param: The items to add to the conversation. You may add up to 20 items at + # a time. + items:, + # Query param: Additional fields to include in the response. See the `include` + # parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + include: nil, + request_options: {} + ) + end + + # Get a single item from a conversation with the given IDs. + sig do + params( + item_id: String, + conversation_id: String, + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::ConversationItem::Variants) + end + def retrieve( + # Path param: The ID of the item to retrieve. + item_id, + # Path param: The ID of the conversation that contains the item. + conversation_id:, + # Query param: Additional fields to include in the response. See the `include` + # parameter for + # [listing Conversation items above](https://platform.openai.com/docs/api-reference/conversations/list-items#conversations_list_items-include) + # for more information. + include: nil, + request_options: {} + ) + end + + # List all items for a conversation with the given ID. + sig do + params( + conversation_id: String, + after: String, + include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], + limit: Integer, + order: OpenAI::Conversations::ItemListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::ConversationCursorPage[ + OpenAI::Conversations::ConversationItem::Variants + ] + ) + end + def list( + # The ID of the conversation to list items for. + conversation_id, + # An item ID to list items after, used in pagination. + after: nil, + # Specify additional output data to include in the model response. Currently + # supported values are: + # + # - `code_interpreter_call.outputs`: Includes the outputs of python code execution + # in code interpreter tool call items. + # - `computer_call_output.output.image_url`: Include image urls from the computer + # call output. + # - `file_search_call.results`: Include the search results of the file search tool + # call. + # - `message.input_image.image_url`: Include image urls from the input message. + # - `message.output_text.logprobs`: Include logprobs with assistant messages. + # - `reasoning.encrypted_content`: Includes an encrypted version of reasoning + # tokens in reasoning item outputs. This enables reasoning items to be used in + # multi-turn conversations when using the Responses API statelessly (like when + # the `store` parameter is set to `false`, or when an organization is enrolled + # in the zero data retention program). + include: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # The order to return the input items in. Default is `desc`. + # + # - `asc`: Return the input items in ascending order. + # - `desc`: Return the input items in descending order. + order: nil, + request_options: {} + ) + end + + # Delete an item from a conversation with the given IDs. + sig do + params( + item_id: String, + conversation_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Conversations::Conversation) + end + def delete( + # The ID of the item to delete. + item_id, + # The ID of the conversation that contains the item. + conversation_id:, + request_options: {} + ) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end +end diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index adad3d32..743d1278 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -22,6 +22,13 @@ module OpenAI sig do params( background: T.nilable(T::Boolean), + conversation: + T.nilable( + T.any( + String, + OpenAI::Responses::ResponseConversationParam::OrHash + ) + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -92,6 +99,11 @@ module OpenAI # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + conversation: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -152,6 +164,7 @@ module OpenAI # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. previous_response_id: nil, # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). @@ -270,6 +283,13 @@ module OpenAI sig do params( background: T.nilable(T::Boolean), + conversation: + T.nilable( + T.any( + String, + OpenAI::Responses::ResponseConversationParam::OrHash + ) + ), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -344,6 +364,11 @@ module OpenAI # Whether to run the model response in the background. # [Learn more](https://platform.openai.com/docs/guides/background). background: nil, + # The conversation that this response belongs to. Items from this conversation are + # prepended to `input_items` for this response request. Input items and output + # items from this response are automatically added to this conversation after this + # response completes. + conversation: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -404,6 +429,7 @@ module OpenAI # The unique ID of the previous response to the model. Use this to create # multi-turn conversations. Learn more about # [conversation state](https://platform.openai.com/docs/guides/conversation-state). + # Cannot be used in conjunction with `conversation`. previous_response_id: nil, # Reference to a prompt template and its variables. # [Learn more](https://platform.openai.com/docs/guides/text?api-mode=responses#reusable-prompts). diff --git a/rbi/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi index db8126f6..5875866b 100644 --- a/rbi/openai/resources/responses/input_items.rbi +++ b/rbi/openai/resources/responses/input_items.rbi @@ -9,7 +9,6 @@ module OpenAI params( response_id: String, after: String, - before: String, include: T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol], limit: Integer, order: OpenAI::Responses::InputItemListParams::Order::OrSymbol, @@ -25,8 +24,6 @@ module OpenAI response_id, # An item ID to list items after, used in pagination. after: nil, - # An item ID to list items before, used in pagination. - before: nil, # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. include: nil, diff --git a/scripts/detect-breaking-changes b/scripts/detect-breaking-changes index 61f7a2ec..750b7b64 100755 --- a/scripts/detect-breaking-changes +++ b/scripts/detect-breaking-changes @@ -46,6 +46,8 @@ TEST_PATHS=( test/openai/resources/uploads/parts_test.rb test/openai/resources/responses_test.rb test/openai/resources/responses/input_items_test.rb + test/openai/resources/conversations_test.rb + test/openai/resources/conversations/items_test.rb test/openai/resources/evals_test.rb test/openai/resources/evals/runs_test.rb test/openai/resources/evals/runs/output_items_test.rb diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 8e9b34a1..26c023fa 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -46,6 +46,8 @@ module OpenAI attr_reader responses: OpenAI::Resources::Responses + attr_reader conversations: OpenAI::Resources::Conversations + attr_reader evals: OpenAI::Resources::Evals attr_reader containers: OpenAI::Resources::Containers diff --git a/sig/openai/internal/conversation_cursor_page.rbs b/sig/openai/internal/conversation_cursor_page.rbs new file mode 100644 index 00000000..f2b11b2c --- /dev/null +++ b/sig/openai/internal/conversation_cursor_page.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Internal + class ConversationCursorPage[Elem] + include OpenAI::Internal::Type::BasePage[Elem] + + attr_accessor data: ::Array[Elem]? + + attr_accessor has_more: bool + + attr_accessor last_id: String + + def inspect: -> String + end + end +end diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index 67856129..338313ae 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -51,6 +51,8 @@ module OpenAI module Containers = OpenAI::Models::Containers + module Conversations = OpenAI::Models::Conversations + class CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse module CustomToolInputFormat = OpenAI::Models::CustomToolInputFormat diff --git a/sig/openai/models/conversations/computer_screenshot_content.rbs b/sig/openai/models/conversations/computer_screenshot_content.rbs new file mode 100644 index 00000000..f0485edf --- /dev/null +++ b/sig/openai/models/conversations/computer_screenshot_content.rbs @@ -0,0 +1,28 @@ +module OpenAI + module Models + module Conversations + type computer_screenshot_content = + { file_id: String?, image_url: String?, type: :computer_screenshot } + + class ComputerScreenshotContent < OpenAI::Internal::Type::BaseModel + attr_accessor file_id: String? + + attr_accessor image_url: String? + + attr_accessor type: :computer_screenshot + + def initialize: ( + file_id: String?, + image_url: String?, + ?type: :computer_screenshot + ) -> void + + def to_hash: -> { + file_id: String?, + image_url: String?, + type: :computer_screenshot + } + end + end + end +end diff --git a/sig/openai/models/conversations/container_file_citation_body.rbs b/sig/openai/models/conversations/container_file_citation_body.rbs new file mode 100644 index 00000000..c8d856d0 --- /dev/null +++ b/sig/openai/models/conversations/container_file_citation_body.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + module Conversations + type container_file_citation_body = + { + container_id: String, + end_index: Integer, + file_id: String, + filename: String, + start_index: Integer, + type: :container_file_citation + } + + class ContainerFileCitationBody < OpenAI::Internal::Type::BaseModel + attr_accessor container_id: String + + attr_accessor end_index: Integer + + attr_accessor file_id: String + + attr_accessor filename: String + + attr_accessor start_index: Integer + + attr_accessor type: :container_file_citation + + def initialize: ( + container_id: String, + end_index: Integer, + file_id: String, + filename: String, + start_index: Integer, + ?type: :container_file_citation + ) -> void + + def to_hash: -> { + container_id: String, + end_index: Integer, + file_id: String, + filename: String, + start_index: Integer, + type: :container_file_citation + } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation.rbs b/sig/openai/models/conversations/conversation.rbs new file mode 100644 index 00000000..9648f0fc --- /dev/null +++ b/sig/openai/models/conversations/conversation.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Conversations + type conversation = + { + id: String, + created_at: Integer, + metadata: top, + object: :conversation + } + + class Conversation < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor metadata: top + + attr_accessor object: :conversation + + def initialize: ( + id: String, + created_at: Integer, + metadata: top, + ?object: :conversation + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + metadata: top, + object: :conversation + } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_create_params.rbs b/sig/openai/models/conversations/conversation_create_params.rbs new file mode 100644 index 00000000..e4152ec4 --- /dev/null +++ b/sig/openai/models/conversations/conversation_create_params.rbs @@ -0,0 +1,33 @@ +module OpenAI + module Models + module Conversations + type conversation_create_params = + { + items: ::Array[OpenAI::Models::Responses::response_input_item]?, + metadata: OpenAI::Models::metadata? + } + & OpenAI::Internal::Type::request_parameters + + class ConversationCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor items: ::Array[OpenAI::Models::Responses::response_input_item]? + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + ?items: ::Array[OpenAI::Models::Responses::response_input_item]?, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + items: ::Array[OpenAI::Models::Responses::response_input_item]?, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_delete_params.rbs b/sig/openai/models/conversations/conversation_delete_params.rbs new file mode 100644 index 00000000..075734ce --- /dev/null +++ b/sig/openai/models/conversations/conversation_delete_params.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Conversations + type conversation_delete_params = + { } & OpenAI::Internal::Type::request_parameters + + class ConversationDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_deleted.rbs b/sig/openai/models/conversations/conversation_deleted.rbs new file mode 100644 index 00000000..67550c38 --- /dev/null +++ b/sig/openai/models/conversations/conversation_deleted.rbs @@ -0,0 +1,28 @@ +module OpenAI + module Models + module Conversations + type conversation_deleted = + { id: String, deleted: bool, object: :"conversation.deleted" } + + class ConversationDeleted < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor deleted: bool + + attr_accessor object: :"conversation.deleted" + + def initialize: ( + id: String, + deleted: bool, + ?object: :"conversation.deleted" + ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"conversation.deleted" + } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_deleted_resource.rbs b/sig/openai/models/conversations/conversation_deleted_resource.rbs new file mode 100644 index 00000000..b0ae90b2 --- /dev/null +++ b/sig/openai/models/conversations/conversation_deleted_resource.rbs @@ -0,0 +1,28 @@ +module OpenAI + module Models + module Conversations + type conversation_deleted_resource = + { id: String, deleted: bool, object: :"conversation.deleted" } + + class ConversationDeletedResource < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor deleted: bool + + attr_accessor object: :"conversation.deleted" + + def initialize: ( + id: String, + deleted: bool, + ?object: :"conversation.deleted" + ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"conversation.deleted" + } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_item.rbs b/sig/openai/models/conversations/conversation_item.rbs new file mode 100644 index 00000000..f4c07482 --- /dev/null +++ b/sig/openai/models/conversations/conversation_item.rbs @@ -0,0 +1,403 @@ +module OpenAI + module Models + module ConversationItem = Conversations::ConversationItem + + module Conversations + type conversation_item = + OpenAI::Conversations::Message + | OpenAI::Responses::ResponseFunctionToolCallItem + | OpenAI::Responses::ResponseFunctionToolCallOutputItem + | OpenAI::Responses::ResponseFileSearchToolCall + | OpenAI::Responses::ResponseFunctionWebSearch + | OpenAI::Conversations::ConversationItem::ImageGenerationCall + | OpenAI::Responses::ResponseComputerToolCall + | OpenAI::Responses::ResponseComputerToolCallOutputItem + | OpenAI::Responses::ResponseReasoningItem + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Conversations::ConversationItem::LocalShellCall + | OpenAI::Conversations::ConversationItem::LocalShellCallOutput + | OpenAI::Conversations::ConversationItem::McpListTools + | OpenAI::Conversations::ConversationItem::McpApprovalRequest + | OpenAI::Conversations::ConversationItem::McpApprovalResponse + | OpenAI::Conversations::ConversationItem::McpCall + | OpenAI::Responses::ResponseCustomToolCall + | OpenAI::Responses::ResponseCustomToolCallOutput + + module ConversationItem + extend OpenAI::Internal::Type::Union + + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::status, + type: :image_generation_call + } + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Models::Conversations::ConversationItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Conversations::ConversationItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Conversations::ConversationItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Models::Conversations::ConversationItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Conversations::ConversationItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + def to_hash: -> { + id: String, + action: OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Conversations::ConversationItem::LocalShellCall::status, + type: :local_shell_call + } + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Conversations::ConversationItem::LocalShellCall::status] + end + end + + type local_shell_call_output = + { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::status? + } + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor output: String + + attr_accessor type: :local_shell_call_output + + attr_accessor status: OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::status? + + def initialize: ( + id: String, + output: String, + ?status: OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::status?, + ?type: :local_shell_call_output + ) -> void + + def to_hash: -> { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::status? + } + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Conversations::ConversationItem::LocalShellCallOutput::status] + end + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Conversations::ConversationItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Conversations::ConversationItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Conversations::ConversationItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Conversations::ConversationItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + end + + type mcp_approval_response = + { + id: String, + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + reason: String? + } + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor approval_request_id: String + + attr_accessor approve: bool + + attr_accessor type: :mcp_approval_response + + attr_accessor reason: String? + + def initialize: ( + id: String, + approval_request_id: String, + approve: bool, + ?reason: String?, + ?type: :mcp_approval_response + ) -> void + + def to_hash: -> { + id: String, + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + reason: String? + } + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Conversations::conversation_item] + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_item_list.rbs b/sig/openai/models/conversations/conversation_item_list.rbs new file mode 100644 index 00000000..9cab46d2 --- /dev/null +++ b/sig/openai/models/conversations/conversation_item_list.rbs @@ -0,0 +1,44 @@ +module OpenAI + module Models + class ConversationItemList = Conversations::ConversationItemList + + module Conversations + type conversation_item_list = + { + data: ::Array[OpenAI::Models::Conversations::conversation_item], + first_id: String, + has_more: bool, + last_id: String, + object: :list + } + + class ConversationItemList < OpenAI::Internal::Type::BaseModel + attr_accessor data: ::Array[OpenAI::Models::Conversations::conversation_item] + + attr_accessor first_id: String + + attr_accessor has_more: bool + + attr_accessor last_id: String + + attr_accessor object: :list + + def initialize: ( + data: ::Array[OpenAI::Models::Conversations::conversation_item], + first_id: String, + has_more: bool, + last_id: String, + ?object: :list + ) -> void + + def to_hash: -> { + data: ::Array[OpenAI::Models::Conversations::conversation_item], + first_id: String, + has_more: bool, + last_id: String, + object: :list + } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_retrieve_params.rbs b/sig/openai/models/conversations/conversation_retrieve_params.rbs new file mode 100644 index 00000000..5cf1fb84 --- /dev/null +++ b/sig/openai/models/conversations/conversation_retrieve_params.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Conversations + type conversation_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters + + class ConversationRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } + end + end + end +end diff --git a/sig/openai/models/conversations/conversation_update_params.rbs b/sig/openai/models/conversations/conversation_update_params.rbs new file mode 100644 index 00000000..cff724e5 --- /dev/null +++ b/sig/openai/models/conversations/conversation_update_params.rbs @@ -0,0 +1,26 @@ +module OpenAI + module Models + module Conversations + type conversation_update_params = + { metadata: ::Hash[Symbol, String] } + & OpenAI::Internal::Type::request_parameters + + class ConversationUpdateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor metadata: ::Hash[Symbol, String] + + def initialize: ( + metadata: ::Hash[Symbol, String], + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + metadata: ::Hash[Symbol, String], + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/conversations/file_citation_body.rbs b/sig/openai/models/conversations/file_citation_body.rbs new file mode 100644 index 00000000..cfdd97ac --- /dev/null +++ b/sig/openai/models/conversations/file_citation_body.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Conversations + type file_citation_body = + { + file_id: String, + filename: String, + index: Integer, + type: :file_citation + } + + class FileCitationBody < OpenAI::Internal::Type::BaseModel + attr_accessor file_id: String + + attr_accessor filename: String + + attr_accessor index: Integer + + attr_accessor type: :file_citation + + def initialize: ( + file_id: String, + filename: String, + index: Integer, + ?type: :file_citation + ) -> void + + def to_hash: -> { + file_id: String, + filename: String, + index: Integer, + type: :file_citation + } + end + end + end +end diff --git a/sig/openai/models/conversations/input_file_content.rbs b/sig/openai/models/conversations/input_file_content.rbs new file mode 100644 index 00000000..69739208 --- /dev/null +++ b/sig/openai/models/conversations/input_file_content.rbs @@ -0,0 +1,41 @@ +module OpenAI + module Models + module Conversations + type input_file_content = + { + file_id: String?, + type: :input_file, + file_url: String, + filename: String + } + + class InputFileContent < OpenAI::Internal::Type::BaseModel + attr_accessor file_id: String? + + attr_accessor type: :input_file + + attr_reader file_url: String? + + def file_url=: (String) -> String + + attr_reader filename: String? + + def filename=: (String) -> String + + def initialize: ( + file_id: String?, + ?file_url: String, + ?filename: String, + ?type: :input_file + ) -> void + + def to_hash: -> { + file_id: String?, + type: :input_file, + file_url: String, + filename: String + } + end + end + end +end diff --git a/sig/openai/models/conversations/input_image_content.rbs b/sig/openai/models/conversations/input_image_content.rbs new file mode 100644 index 00000000..bfd75dcf --- /dev/null +++ b/sig/openai/models/conversations/input_image_content.rbs @@ -0,0 +1,49 @@ +module OpenAI + module Models + module Conversations + type input_image_content = + { + detail: OpenAI::Models::Conversations::InputImageContent::detail, + file_id: String?, + image_url: String?, + type: :input_image + } + + class InputImageContent < OpenAI::Internal::Type::BaseModel + attr_accessor detail: OpenAI::Models::Conversations::InputImageContent::detail + + attr_accessor file_id: String? + + attr_accessor image_url: String? + + attr_accessor type: :input_image + + def initialize: ( + detail: OpenAI::Models::Conversations::InputImageContent::detail, + file_id: String?, + image_url: String?, + ?type: :input_image + ) -> void + + def to_hash: -> { + detail: OpenAI::Models::Conversations::InputImageContent::detail, + file_id: String?, + image_url: String?, + type: :input_image + } + + type detail = :low | :high | :auto + + module Detail + extend OpenAI::Internal::Type::Enum + + LOW: :low + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::Conversations::InputImageContent::detail] + end + end + end + end +end diff --git a/sig/openai/models/conversations/input_text_content.rbs b/sig/openai/models/conversations/input_text_content.rbs new file mode 100644 index 00000000..59155bd2 --- /dev/null +++ b/sig/openai/models/conversations/input_text_content.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Conversations + type input_text_content = { text: String, type: :input_text } + + class InputTextContent < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :input_text + + def initialize: (text: String, ?type: :input_text) -> void + + def to_hash: -> { text: String, type: :input_text } + end + end + end +end diff --git a/sig/openai/models/conversations/item_create_params.rbs b/sig/openai/models/conversations/item_create_params.rbs new file mode 100644 index 00000000..108ef141 --- /dev/null +++ b/sig/openai/models/conversations/item_create_params.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Conversations + type item_create_params = + { + items: ::Array[OpenAI::Models::Responses::response_input_item], + include: ::Array[OpenAI::Models::Responses::response_includable] + } + & OpenAI::Internal::Type::request_parameters + + class ItemCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor items: ::Array[OpenAI::Models::Responses::response_input_item] + + attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? + + def include=: ( + ::Array[OpenAI::Models::Responses::response_includable] + ) -> ::Array[OpenAI::Models::Responses::response_includable] + + def initialize: ( + items: ::Array[OpenAI::Models::Responses::response_input_item], + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + items: ::Array[OpenAI::Models::Responses::response_input_item], + include: ::Array[OpenAI::Models::Responses::response_includable], + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/conversations/item_delete_params.rbs b/sig/openai/models/conversations/item_delete_params.rbs new file mode 100644 index 00000000..fb864cb8 --- /dev/null +++ b/sig/openai/models/conversations/item_delete_params.rbs @@ -0,0 +1,25 @@ +module OpenAI + module Models + module Conversations + type item_delete_params = + { conversation_id: String } & OpenAI::Internal::Type::request_parameters + + class ItemDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor conversation_id: String + + def initialize: ( + conversation_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + conversation_id: String, + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/conversations/item_list_params.rbs b/sig/openai/models/conversations/item_list_params.rbs new file mode 100644 index 00000000..ca693e07 --- /dev/null +++ b/sig/openai/models/conversations/item_list_params.rbs @@ -0,0 +1,66 @@ +module OpenAI + module Models + module Conversations + type item_list_params = + { + after: String, + include: ::Array[OpenAI::Models::Responses::response_includable], + limit: Integer, + order: OpenAI::Models::Conversations::ItemListParams::order + } + & OpenAI::Internal::Type::request_parameters + + class ItemListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? + + def include=: ( + ::Array[OpenAI::Models::Responses::response_includable] + ) -> ::Array[OpenAI::Models::Responses::response_includable] + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::Conversations::ItemListParams::order? + + def order=: ( + OpenAI::Models::Conversations::ItemListParams::order + ) -> OpenAI::Models::Conversations::ItemListParams::order + + def initialize: ( + ?after: String, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?limit: Integer, + ?order: OpenAI::Models::Conversations::ItemListParams::order, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + after: String, + include: ::Array[OpenAI::Models::Responses::response_includable], + limit: Integer, + order: OpenAI::Models::Conversations::ItemListParams::order, + request_options: OpenAI::RequestOptions + } + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::Conversations::ItemListParams::order] + end + end + end + end +end diff --git a/sig/openai/models/conversations/item_retrieve_params.rbs b/sig/openai/models/conversations/item_retrieve_params.rbs new file mode 100644 index 00000000..c2a3f209 --- /dev/null +++ b/sig/openai/models/conversations/item_retrieve_params.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Conversations + type item_retrieve_params = + { + conversation_id: String, + include: ::Array[OpenAI::Models::Responses::response_includable] + } + & OpenAI::Internal::Type::request_parameters + + class ItemRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor conversation_id: String + + attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? + + def include=: ( + ::Array[OpenAI::Models::Responses::response_includable] + ) -> ::Array[OpenAI::Models::Responses::response_includable] + + def initialize: ( + conversation_id: String, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + conversation_id: String, + include: ::Array[OpenAI::Models::Responses::response_includable], + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/conversations/lob_prob.rbs b/sig/openai/models/conversations/lob_prob.rbs new file mode 100644 index 00000000..7d64c4d1 --- /dev/null +++ b/sig/openai/models/conversations/lob_prob.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Conversations + type lob_prob = + { + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Conversations::TopLogProb] + } + + class LobProb < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor bytes: ::Array[Integer] + + attr_accessor logprob: Float + + attr_accessor top_logprobs: ::Array[OpenAI::Conversations::TopLogProb] + + def initialize: ( + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Conversations::TopLogProb] + ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float, + top_logprobs: ::Array[OpenAI::Conversations::TopLogProb] + } + end + end + end +end diff --git a/sig/openai/models/conversations/message.rbs b/sig/openai/models/conversations/message.rbs new file mode 100644 index 00000000..370dc4c3 --- /dev/null +++ b/sig/openai/models/conversations/message.rbs @@ -0,0 +1,95 @@ +module OpenAI + module Models + module Conversations + type message = + { + id: String, + content: ::Array[OpenAI::Models::Conversations::Message::content], + role: OpenAI::Models::Conversations::Message::role, + status: OpenAI::Models::Conversations::Message::status, + type: :message + } + + class Message < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor content: ::Array[OpenAI::Models::Conversations::Message::content] + + attr_accessor role: OpenAI::Models::Conversations::Message::role + + attr_accessor status: OpenAI::Models::Conversations::Message::status + + attr_accessor type: :message + + def initialize: ( + id: String, + content: ::Array[OpenAI::Models::Conversations::Message::content], + role: OpenAI::Models::Conversations::Message::role, + status: OpenAI::Models::Conversations::Message::status, + ?type: :message + ) -> void + + def to_hash: -> { + id: String, + content: ::Array[OpenAI::Models::Conversations::Message::content], + role: OpenAI::Models::Conversations::Message::role, + status: OpenAI::Models::Conversations::Message::status, + type: :message + } + + type content = + OpenAI::Conversations::InputTextContent + | OpenAI::Conversations::OutputTextContent + | OpenAI::Conversations::TextContent + | OpenAI::Conversations::SummaryTextContent + | OpenAI::Conversations::RefusalContent + | OpenAI::Conversations::InputImageContent + | OpenAI::Conversations::ComputerScreenshotContent + | OpenAI::Conversations::InputFileContent + + module Content + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Conversations::Message::content] + end + + type role = + :unknown + | :user + | :assistant + | :system + | :critic + | :discriminator + | :developer + | :tool + + module Role + extend OpenAI::Internal::Type::Enum + + UNKNOWN: :unknown + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + CRITIC: :critic + DISCRIMINATOR: :discriminator + DEVELOPER: :developer + TOOL: :tool + + def self?.values: -> ::Array[OpenAI::Models::Conversations::Message::role] + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Conversations::Message::status] + end + end + end + end +end diff --git a/sig/openai/models/conversations/output_text_content.rbs b/sig/openai/models/conversations/output_text_content.rbs new file mode 100644 index 00000000..fbb50d7b --- /dev/null +++ b/sig/openai/models/conversations/output_text_content.rbs @@ -0,0 +1,52 @@ +module OpenAI + module Models + module Conversations + type output_text_content = + { + annotations: ::Array[OpenAI::Models::Conversations::OutputTextContent::annotation], + text: String, + type: :output_text, + logprobs: ::Array[OpenAI::Conversations::LobProb] + } + + class OutputTextContent < OpenAI::Internal::Type::BaseModel + attr_accessor annotations: ::Array[OpenAI::Models::Conversations::OutputTextContent::annotation] + + attr_accessor text: String + + attr_accessor type: :output_text + + attr_reader logprobs: ::Array[OpenAI::Conversations::LobProb]? + + def logprobs=: ( + ::Array[OpenAI::Conversations::LobProb] + ) -> ::Array[OpenAI::Conversations::LobProb] + + def initialize: ( + annotations: ::Array[OpenAI::Models::Conversations::OutputTextContent::annotation], + text: String, + ?logprobs: ::Array[OpenAI::Conversations::LobProb], + ?type: :output_text + ) -> void + + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Conversations::OutputTextContent::annotation], + text: String, + type: :output_text, + logprobs: ::Array[OpenAI::Conversations::LobProb] + } + + type annotation = + OpenAI::Conversations::FileCitationBody + | OpenAI::Conversations::URLCitationBody + | OpenAI::Conversations::ContainerFileCitationBody + + module Annotation + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Conversations::OutputTextContent::annotation] + end + end + end + end +end diff --git a/sig/openai/models/conversations/refusal_content.rbs b/sig/openai/models/conversations/refusal_content.rbs new file mode 100644 index 00000000..904922e1 --- /dev/null +++ b/sig/openai/models/conversations/refusal_content.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Conversations + type refusal_content = { refusal: String, type: :refusal } + + class RefusalContent < OpenAI::Internal::Type::BaseModel + attr_accessor refusal: String + + attr_accessor type: :refusal + + def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } + end + end + end +end diff --git a/sig/openai/models/conversations/summary_text_content.rbs b/sig/openai/models/conversations/summary_text_content.rbs new file mode 100644 index 00000000..33844203 --- /dev/null +++ b/sig/openai/models/conversations/summary_text_content.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Conversations + type summary_text_content = { text: String, type: :summary_text } + + class SummaryTextContent < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :summary_text + + def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } + end + end + end +end diff --git a/sig/openai/models/conversations/text_content.rbs b/sig/openai/models/conversations/text_content.rbs new file mode 100644 index 00000000..34216457 --- /dev/null +++ b/sig/openai/models/conversations/text_content.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Conversations + type text_content = { text: String, type: :text } + + class TextContent < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :text + + def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } + end + end + end +end diff --git a/sig/openai/models/conversations/top_log_prob.rbs b/sig/openai/models/conversations/top_log_prob.rbs new file mode 100644 index 00000000..43425978 --- /dev/null +++ b/sig/openai/models/conversations/top_log_prob.rbs @@ -0,0 +1,28 @@ +module OpenAI + module Models + module Conversations + type top_log_prob = + { token: String, bytes: ::Array[Integer], logprob: Float } + + class TopLogProb < OpenAI::Internal::Type::BaseModel + attr_accessor token: String + + attr_accessor bytes: ::Array[Integer] + + attr_accessor logprob: Float + + def initialize: ( + token: String, + bytes: ::Array[Integer], + logprob: Float + ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer], + logprob: Float + } + end + end + end +end diff --git a/sig/openai/models/conversations/url_citation_body.rbs b/sig/openai/models/conversations/url_citation_body.rbs new file mode 100644 index 00000000..70a4b20e --- /dev/null +++ b/sig/openai/models/conversations/url_citation_body.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Conversations + type url_citation_body = + { + end_index: Integer, + start_index: Integer, + title: String, + type: :url_citation, + url: String + } + + class URLCitationBody < OpenAI::Internal::Type::BaseModel + attr_accessor end_index: Integer + + attr_accessor start_index: Integer + + attr_accessor title: String + + attr_accessor type: :url_citation + + attr_accessor url: String + + def initialize: ( + end_index: Integer, + start_index: Integer, + title: String, + url: String, + ?type: :url_citation + ) -> void + + def to_hash: -> { + end_index: Integer, + start_index: Integer, + title: String, + type: :url_citation, + url: String + } + end + end + end +end diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index d3806378..9e72fe05 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -195,46 +195,46 @@ module OpenAI type template = OpenAI::Responses::EasyInputMessage - | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem module Template extend OpenAI::Internal::Type::Union - type message = + type eval_item = { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_ } - class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::content - attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role + attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::role - attr_reader type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? + attr_reader type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_? def type=: ( - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ - ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_ def initialize: ( - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - ?type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_ ) -> void def to_hash: -> { - content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, - role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, - type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_ } type content = String | OpenAI::Responses::ResponseInputText - | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText - | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage | ::Array[top] module Content @@ -277,7 +277,7 @@ module OpenAI } end - def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::content] AnArrayOfInputTextAndInputImageArray: OpenAI::Internal::Type::Converter end @@ -292,7 +292,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::role] end type type_ = :message @@ -302,7 +302,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::EvalItem::type_] end end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 4760e97a..321f338c 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -4,7 +4,6 @@ module OpenAI type input_item_list_params = { after: String, - before: String, include: ::Array[OpenAI::Models::Responses::response_includable], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::order @@ -19,10 +18,6 @@ module OpenAI def after=: (String) -> String - attr_reader before: String? - - def before=: (String) -> String - attr_reader include: ::Array[OpenAI::Models::Responses::response_includable]? def include=: ( @@ -41,7 +36,6 @@ module OpenAI def initialize: ( ?after: String, - ?before: String, ?include: ::Array[OpenAI::Models::Responses::response_includable], ?limit: Integer, ?order: OpenAI::Models::Responses::InputItemListParams::order, @@ -50,7 +44,6 @@ module OpenAI def to_hash: -> { after: String, - before: String, include: ::Array[OpenAI::Models::Responses::response_includable], limit: Integer, order: OpenAI::Models::Responses::InputItemListParams::order, diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index 6bf78a3c..41dbcf8d 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -18,6 +18,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, background: bool?, + conversation: OpenAI::Responses::Response::Conversation?, max_output_tokens: Integer?, max_tool_calls: Integer?, previous_response_id: String?, @@ -65,6 +66,8 @@ module OpenAI attr_accessor background: bool? + attr_accessor conversation: OpenAI::Responses::Response::Conversation? + attr_accessor max_output_tokens: Integer? attr_accessor max_tool_calls: Integer? @@ -126,6 +129,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, ?background: bool?, + ?conversation: OpenAI::Responses::Response::Conversation?, ?max_output_tokens: Integer?, ?max_tool_calls: Integer?, ?previous_response_id: String?, @@ -159,6 +163,7 @@ module OpenAI tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, background: bool?, + conversation: OpenAI::Responses::Response::Conversation?, max_output_tokens: Integer?, max_tool_calls: Integer?, previous_response_id: String?, @@ -232,6 +237,16 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Responses::Response::tool_choice] end + type conversation = { id: String } + + class Conversation < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + type service_tier = :auto | :default | :flex | :scale | :priority module ServiceTier diff --git a/sig/openai/models/responses/response_conversation_param.rbs b/sig/openai/models/responses/response_conversation_param.rbs new file mode 100644 index 00000000..9784d5c7 --- /dev/null +++ b/sig/openai/models/responses/response_conversation_param.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Models + module Responses + type response_conversation_param = { id: String } + + class ResponseConversationParam < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + def initialize: (id: String) -> void + + def to_hash: -> { id: String } + end + end + end +end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index 83f641b4..4a636dd1 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -4,6 +4,7 @@ module OpenAI type response_create_params = { background: bool?, + conversation: OpenAI::Models::Responses::ResponseCreateParams::conversation?, include: ::Array[OpenAI::Models::Responses::response_includable]?, input: OpenAI::Models::Responses::ResponseCreateParams::input, instructions: String?, @@ -37,6 +38,8 @@ module OpenAI attr_accessor background: bool? + attr_accessor conversation: OpenAI::Models::Responses::ResponseCreateParams::conversation? + attr_accessor include: ::Array[OpenAI::Models::Responses::response_includable]? attr_reader input: OpenAI::Models::Responses::ResponseCreateParams::input? @@ -113,6 +116,7 @@ module OpenAI def initialize: ( ?background: bool?, + ?conversation: OpenAI::Models::Responses::ResponseCreateParams::conversation?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, @@ -142,6 +146,7 @@ module OpenAI def to_hash: -> { background: bool?, + conversation: OpenAI::Models::Responses::ResponseCreateParams::conversation?, include: ::Array[OpenAI::Models::Responses::response_includable]?, input: OpenAI::Models::Responses::ResponseCreateParams::input, instructions: String?, @@ -169,6 +174,15 @@ module OpenAI request_options: OpenAI::RequestOptions } + type conversation = + String | OpenAI::Responses::ResponseConversationParam + + module Conversation + extend OpenAI::Internal::Type::Union + + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCreateParams::conversation] + end + type input = String | OpenAI::Models::Responses::response_input module Input diff --git a/sig/openai/resources/conversations.rbs b/sig/openai/resources/conversations.rbs new file mode 100644 index 00000000..ae48106e --- /dev/null +++ b/sig/openai/resources/conversations.rbs @@ -0,0 +1,31 @@ +module OpenAI + module Resources + class Conversations + attr_reader items: OpenAI::Resources::Conversations::Items + + def create: ( + ?items: ::Array[OpenAI::Models::Responses::response_input_item]?, + ?metadata: OpenAI::Models::metadata?, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Conversations::Conversation + + def retrieve: ( + String conversation_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Conversations::Conversation + + def update: ( + String conversation_id, + metadata: ::Hash[Symbol, String], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Conversations::Conversation + + def delete: ( + String conversation_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Conversations::ConversationDeletedResource + + def initialize: (client: OpenAI::Client) -> void + end + end +end diff --git a/sig/openai/resources/conversations/items.rbs b/sig/openai/resources/conversations/items.rbs new file mode 100644 index 00000000..f4349e59 --- /dev/null +++ b/sig/openai/resources/conversations/items.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Resources + class Conversations + class Items + def create: ( + String conversation_id, + items: ::Array[OpenAI::Models::Responses::response_input_item], + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Conversations::ConversationItemList + + def retrieve: ( + String item_id, + conversation_id: String, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Conversations::conversation_item + + def list: ( + String conversation_id, + ?after: String, + ?include: ::Array[OpenAI::Models::Responses::response_includable], + ?limit: Integer, + ?order: OpenAI::Models::Conversations::ItemListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::ConversationCursorPage[OpenAI::Models::Conversations::conversation_item] + + def delete: ( + String item_id, + conversation_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Conversations::Conversation + + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 6e926188..aa09eca6 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -5,6 +5,7 @@ module OpenAI def create: ( ?background: bool?, + ?conversation: OpenAI::Models::Responses::ResponseCreateParams::conversation?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, @@ -34,6 +35,7 @@ module OpenAI def stream_raw: ( ?background: bool?, + ?conversation: OpenAI::Models::Responses::ResponseCreateParams::conversation?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?input: OpenAI::Models::Responses::ResponseCreateParams::input, ?instructions: String?, diff --git a/sig/openai/resources/responses/input_items.rbs b/sig/openai/resources/responses/input_items.rbs index 9fcece7a..eb2a0262 100644 --- a/sig/openai/resources/responses/input_items.rbs +++ b/sig/openai/resources/responses/input_items.rbs @@ -5,7 +5,6 @@ module OpenAI def list: ( String response_id, ?after: String, - ?before: String, ?include: ::Array[OpenAI::Models::Responses::response_includable], ?limit: Integer, ?order: OpenAI::Models::Responses::InputItemListParams::order, diff --git a/test/openai/resource_namespaces.rb b/test/openai/resource_namespaces.rb index 5134069c..db0dcbc7 100644 --- a/test/openai/resource_namespaces.rb +++ b/test/openai/resource_namespaces.rb @@ -32,6 +32,9 @@ module Files end end + module Conversations + end + module Evals module Runs end diff --git a/test/openai/resources/conversations/items_test.rb b/test/openai/resources/conversations/items_test.rb new file mode 100644 index 00000000..bb012cd0 --- /dev/null +++ b/test/openai/resources/conversations/items_test.rb @@ -0,0 +1,327 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::Conversations::ItemsTest < OpenAI::Test::ResourceTest + def test_create_required_params + response = @openai.conversations.items.create("conv_123", items: [{content: "string", role: :user}]) + + assert_pattern do + response => OpenAI::Conversations::ConversationItemList + end + + assert_pattern do + response => { + data: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::ConversationItem]), + first_id: String, + has_more: OpenAI::Internal::Type::Boolean, + last_id: String, + object: Symbol + } + end + end + + def test_retrieve_required_params + response = @openai.conversations.items.retrieve("msg_abc", conversation_id: "conv_123") + + assert_pattern do + response => OpenAI::Conversations::ConversationItem + end + + assert_pattern do + case response + in OpenAI::Conversations::Message + in OpenAI::Responses::ResponseFunctionToolCallItem + in OpenAI::Responses::ResponseFunctionToolCallOutputItem + in OpenAI::Responses::ResponseFileSearchToolCall + in OpenAI::Responses::ResponseFunctionWebSearch + in OpenAI::Conversations::ConversationItem::ImageGenerationCall + in OpenAI::Responses::ResponseComputerToolCall + in OpenAI::Responses::ResponseComputerToolCallOutputItem + in OpenAI::Responses::ResponseReasoningItem + in OpenAI::Responses::ResponseCodeInterpreterToolCall + in OpenAI::Conversations::ConversationItem::LocalShellCall + in OpenAI::Conversations::ConversationItem::LocalShellCallOutput + in OpenAI::Conversations::ConversationItem::McpListTools + in OpenAI::Conversations::ConversationItem::McpApprovalRequest + in OpenAI::Conversations::ConversationItem::McpApprovalResponse + in OpenAI::Conversations::ConversationItem::McpCall + in OpenAI::Responses::ResponseCustomToolCall + in OpenAI::Responses::ResponseCustomToolCallOutput + end + end + + assert_pattern do + case response + in { + type: :message, + id: String, + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::Message::Content]), + role: OpenAI::Conversations::Message::Role, + status: OpenAI::Conversations::Message::Status + } + in { + type: :function_call_output, + id: String, + call_id: String, + output: String, + status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status | nil + } + in { + type: :file_search_call, + id: String, + queries: ^(OpenAI::Internal::Type::ArrayOf[String]), + status: OpenAI::Responses::ResponseFileSearchToolCall::Status, + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFileSearchToolCall::Result]) | nil + } + in { + type: :web_search_call, + id: String, + action: OpenAI::Responses::ResponseFunctionWebSearch::Action, + status: OpenAI::Responses::ResponseFunctionWebSearch::Status + } + in { + type: :image_generation_call, + id: String, + result: String | nil, + status: OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status + } + in { + type: :computer_call, + id: String, + action: OpenAI::Responses::ResponseComputerToolCall::Action, + call_id: String, + pending_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck]), + status: OpenAI::Responses::ResponseComputerToolCall::Status + } + in { + type: :computer_call_output, + id: String, + call_id: String, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + acknowledged_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, + status: OpenAI::Responses::ResponseComputerToolCallOutputItem::Status | nil + } + in { + type: :reasoning, + id: String, + summary: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Summary]), + content: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Content]) | nil, + encrypted_content: String | nil, + status: OpenAI::Responses::ResponseReasoningItem::Status | nil + } + in { + type: :code_interpreter_call, + id: String, + code: String | nil, + container_id: String, + outputs: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Output]) | nil, + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status + } + in { + type: :local_shell_call, + id: String, + action: OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Conversations::ConversationItem::LocalShellCall::Status + } + in { + type: :local_shell_call_output, + id: String, + output: String, + status: OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status | nil + } + in { + type: :mcp_list_tools, + id: String, + server_label: String, + tools: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::ConversationItem::McpListTools::Tool]), + error: String | nil + } + in {type: :mcp_approval_request, id: String, arguments: String, name: String, server_label: String} + in { + type: :mcp_approval_response, + id: String, + approval_request_id: String, + approve: OpenAI::Internal::Type::Boolean, + reason: String | nil + } + in { + type: :mcp_call, + id: String, + arguments: String, + name: String, + server_label: String, + error: String | nil, + output: String | nil + } + in {type: :custom_tool_call, call_id: String, input: String, name: String, id: String | nil} + in {type: :custom_tool_call_output, call_id: String, output: String, id: String | nil} + end + end + end + + def test_list + response = @openai.conversations.items.list("conv_123") + + assert_pattern do + response => OpenAI::Internal::ConversationCursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Conversations::ConversationItem + end + + assert_pattern do + case row + in OpenAI::Conversations::Message + in OpenAI::Responses::ResponseFunctionToolCallItem + in OpenAI::Responses::ResponseFunctionToolCallOutputItem + in OpenAI::Responses::ResponseFileSearchToolCall + in OpenAI::Responses::ResponseFunctionWebSearch + in OpenAI::Conversations::ConversationItem::ImageGenerationCall + in OpenAI::Responses::ResponseComputerToolCall + in OpenAI::Responses::ResponseComputerToolCallOutputItem + in OpenAI::Responses::ResponseReasoningItem + in OpenAI::Responses::ResponseCodeInterpreterToolCall + in OpenAI::Conversations::ConversationItem::LocalShellCall + in OpenAI::Conversations::ConversationItem::LocalShellCallOutput + in OpenAI::Conversations::ConversationItem::McpListTools + in OpenAI::Conversations::ConversationItem::McpApprovalRequest + in OpenAI::Conversations::ConversationItem::McpApprovalResponse + in OpenAI::Conversations::ConversationItem::McpCall + in OpenAI::Responses::ResponseCustomToolCall + in OpenAI::Responses::ResponseCustomToolCallOutput + end + end + + assert_pattern do + case row + in { + type: :message, + id: String, + content: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Conversations::Message::Content]), + role: OpenAI::Conversations::Message::Role, + status: OpenAI::Conversations::Message::Status + } + in { + type: :function_call_output, + id: String, + call_id: String, + output: String, + status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status | nil + } + in { + type: :file_search_call, + id: String, + queries: ^(OpenAI::Internal::Type::ArrayOf[String]), + status: OpenAI::Responses::ResponseFileSearchToolCall::Status, + results: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFileSearchToolCall::Result]) | nil + } + in { + type: :web_search_call, + id: String, + action: OpenAI::Responses::ResponseFunctionWebSearch::Action, + status: OpenAI::Responses::ResponseFunctionWebSearch::Status + } + in { + type: :image_generation_call, + id: String, + result: String | nil, + status: OpenAI::Conversations::ConversationItem::ImageGenerationCall::Status + } + in { + type: :computer_call, + id: String, + action: OpenAI::Responses::ResponseComputerToolCall::Action, + call_id: String, + pending_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck]), + status: OpenAI::Responses::ResponseComputerToolCall::Status + } + in { + type: :computer_call_output, + id: String, + call_id: String, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + acknowledged_safety_checks: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck]) | nil, + status: OpenAI::Responses::ResponseComputerToolCallOutputItem::Status | nil + } + in { + type: :reasoning, + id: String, + summary: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Summary]), + content: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Content]) | nil, + encrypted_content: String | nil, + status: OpenAI::Responses::ResponseReasoningItem::Status | nil + } + in { + type: :code_interpreter_call, + id: String, + code: String | nil, + container_id: String, + outputs: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Output]) | nil, + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status + } + in { + type: :local_shell_call, + id: String, + action: OpenAI::Conversations::ConversationItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Conversations::ConversationItem::LocalShellCall::Status + } + in { + type: :local_shell_call_output, + id: String, + output: String, + status: OpenAI::Conversations::ConversationItem::LocalShellCallOutput::Status | nil + } + in { + type: :mcp_list_tools, + id: String, + server_label: String, + tools: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Conversations::ConversationItem::McpListTools::Tool]), + error: String | nil + } + in {type: :mcp_approval_request, id: String, arguments: String, name: String, server_label: String} + in { + type: :mcp_approval_response, + id: String, + approval_request_id: String, + approve: OpenAI::Internal::Type::Boolean, + reason: String | nil + } + in { + type: :mcp_call, + id: String, + arguments: String, + name: String, + server_label: String, + error: String | nil, + output: String | nil + } + in {type: :custom_tool_call, call_id: String, input: String, name: String, id: String | nil} + in {type: :custom_tool_call_output, call_id: String, output: String, id: String | nil} + end + end + end + + def test_delete_required_params + response = @openai.conversations.items.delete("msg_abc", conversation_id: "conv_123") + + assert_pattern do + response => OpenAI::Conversations::Conversation + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + metadata: OpenAI::Internal::Type::Unknown, + object: Symbol + } + end + end +end diff --git a/test/openai/resources/conversations_test.rb b/test/openai/resources/conversations_test.rb new file mode 100644 index 00000000..3064e2af --- /dev/null +++ b/test/openai/resources/conversations_test.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::Resources::ConversationsTest < OpenAI::Test::ResourceTest + def test_create + response = @openai.conversations.create + + assert_pattern do + response => OpenAI::Conversations::Conversation + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + metadata: OpenAI::Internal::Type::Unknown, + object: Symbol + } + end + end + + def test_retrieve + response = @openai.conversations.retrieve("conv_123") + + assert_pattern do + response => OpenAI::Conversations::Conversation + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + metadata: OpenAI::Internal::Type::Unknown, + object: Symbol + } + end + end + + def test_update_required_params + response = @openai.conversations.update("conv_123", metadata: {foo: "string"}) + + assert_pattern do + response => OpenAI::Conversations::Conversation + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + metadata: OpenAI::Internal::Type::Unknown, + object: Symbol + } + end + end + + def test_delete + response = @openai.conversations.delete("conv_123") + + assert_pattern do + response => OpenAI::Conversations::ConversationDeletedResource + end + + assert_pattern do + response => { + id: String, + deleted: OpenAI::Internal::Type::Boolean, + object: Symbol + } + end + end +end diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index 28d1d7e4..d8e57585 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -27,6 +27,7 @@ def test_create tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, background: OpenAI::Internal::Type::Boolean | nil, + conversation: OpenAI::Responses::Response::Conversation | nil, max_output_tokens: Integer | nil, max_tool_calls: Integer | nil, previous_response_id: String | nil, @@ -69,6 +70,7 @@ def test_retrieve tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, background: OpenAI::Internal::Type::Boolean | nil, + conversation: OpenAI::Responses::Response::Conversation | nil, max_output_tokens: Integer | nil, max_tool_calls: Integer | nil, previous_response_id: String | nil, @@ -119,6 +121,7 @@ def test_cancel tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, background: OpenAI::Internal::Type::Boolean | nil, + conversation: OpenAI::Responses::Response::Conversation | nil, max_output_tokens: Integer | nil, max_tool_calls: Integer | nil, previous_response_id: String | nil, From 6ee1c74e32f519343c48e8b249f0866952dea45a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 21 Aug 2025 20:30:46 +0000 Subject: [PATCH 291/295] feat(api): Add connectors support for MCP tool --- .stats.yml | 6 +- lib/openai/models/responses/tool.rb | 169 ++++++++++++++--- rbi/openai/models/responses/tool.rbi | 274 ++++++++++++++++++++++++--- sig/openai/models/responses/tool.rbs | 101 ++++++++-- 4 files changed, 472 insertions(+), 78 deletions(-) diff --git a/.stats.yml b/.stats.yml index fb355661..6725d699 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 117 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-4bcdfe525558e67a09b32dec7a573e87b94bab47db3951eb4a86a4dafb60296c.yml -openapi_spec_hash: 49e7e46bfe9f61b7b7a60e36840c0cd7 -config_hash: e4514526ae01126a61f9b6c14a351737 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-ddbdf9343316047e8a773c54fb24e4a8d225955e202a1888fde6f9c8898ebf98.yml +openapi_spec_hash: 9802f6dd381558466c897f6e387e06ca +config_hash: fe0ea26680ac2075a6cd66416aefe7db diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 752c5f53..37feec46 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -45,12 +45,6 @@ class Mcp < OpenAI::Internal::Type::BaseModel # @return [String] required :server_label, String - # @!attribute server_url - # The URL for the MCP server. - # - # @return [String] - required :server_url, String - # @!attribute type # The type of the MCP tool. Always `mcp`. # @@ -60,9 +54,37 @@ class Mcp < OpenAI::Internal::Type::BaseModel # @!attribute allowed_tools # List of allowed tool names or a filter object. # - # @return [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] + # @return [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpToolFilter, nil] optional :allowed_tools, union: -> { OpenAI::Responses::Tool::Mcp::AllowedTools }, nil?: true + # @!attribute authorization + # An OAuth access token that can be used with a remote MCP server, either with a + # custom MCP server URL or a service connector. Your application must handle the + # OAuth authorization flow and provide the token here. + # + # @return [String, nil] + optional :authorization, String + + # @!attribute connector_id + # Identifier for service connectors, like those available in ChatGPT. One of + # `server_url` or `connector_id` must be provided. Learn more about service + # connectors + # [here](https://platform.openai.com/docs/guides/tools-remote-mcp#connectors). + # + # Currently supported `connector_id` values are: + # + # - Dropbox: `connector_dropbox` + # - Gmail: `connector_gmail` + # - Google Calendar: `connector_googlecalendar` + # - Google Drive: `connector_googledrive` + # - Microsoft Teams: `connector_microsoftteams` + # - Outlook Calendar: `connector_outlookcalendar` + # - Outlook Email: `connector_outlookemail` + # - SharePoint: `connector_sharepoint` + # + # @return [Symbol, OpenAI::Models::Responses::Tool::Mcp::ConnectorID, nil] + optional :connector_id, enum: -> { OpenAI::Responses::Tool::Mcp::ConnectorID } + # @!attribute headers # Optional HTTP headers to send to the MCP server. Use for authentication or other # purposes. @@ -82,7 +104,14 @@ class Mcp < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :server_description, String - # @!method initialize(server_label:, server_url:, allowed_tools: nil, headers: nil, require_approval: nil, server_description: nil, type: :mcp) + # @!attribute server_url + # The URL for the MCP server. One of `server_url` or `connector_id` must be + # provided. + # + # @return [String, nil] + optional :server_url, String + + # @!method initialize(server_label:, allowed_tools: nil, authorization: nil, connector_id: nil, headers: nil, require_approval: nil, server_description: nil, server_url: nil, type: :mcp) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::Tool::Mcp} for more details. # @@ -92,9 +121,11 @@ class Mcp < OpenAI::Internal::Type::BaseModel # # @param server_label [String] A label for this MCP server, used to identify it in tool calls. # - # @param server_url [String] The URL for the MCP server. + # @param allowed_tools [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpToolFilter, nil] List of allowed tool names or a filter object. + # + # @param authorization [String] An OAuth access token that can be used with a remote MCP server, either # - # @param allowed_tools [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] List of allowed tool names or a filter object. + # @param connector_id [Symbol, OpenAI::Models::Responses::Tool::Mcp::ConnectorID] Identifier for service connectors, like those available in ChatGPT. One of # # @param headers [Hash{Symbol=>String}, nil] Optional HTTP headers to send to the MCP server. Use for authentication # @@ -102,6 +133,8 @@ class Mcp < OpenAI::Internal::Type::BaseModel # # @param server_description [String] Optional description of the MCP server, used to provide more context. # + # @param server_url [String] The URL for the MCP server. One of `server_url` or `connector_id` must be + # # @param type [Symbol, :mcp] The type of the MCP tool. Always `mcp`. # List of allowed tool names or a filter object. @@ -114,34 +147,85 @@ module AllowedTools variant -> { OpenAI::Models::Responses::Tool::Mcp::AllowedTools::StringArray } # A filter object to specify which tools are allowed. - variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter } + variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter } + + class McpToolFilter < OpenAI::Internal::Type::BaseModel + # @!attribute read_only + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + # + # @return [Boolean, nil] + optional :read_only, OpenAI::Internal::Type::Boolean - class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel # @!attribute tool_names # List of allowed tool names. # # @return [Array, nil] optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] - # @!method initialize(tool_names: nil) + # @!method initialize(read_only: nil, tool_names: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpToolFilter} for more + # details. + # # A filter object to specify which tools are allowed. # + # @param read_only [Boolean] Indicates whether or not a tool modifies data or is read-only. If an + # # @param tool_names [Array] List of allowed tool names. end # @!method self.variants - # @return [Array(Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter)] + # @return [Array(Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpToolFilter)] # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end + # Identifier for service connectors, like those available in ChatGPT. One of + # `server_url` or `connector_id` must be provided. Learn more about service + # connectors + # [here](https://platform.openai.com/docs/guides/tools-remote-mcp#connectors). + # + # Currently supported `connector_id` values are: + # + # - Dropbox: `connector_dropbox` + # - Gmail: `connector_gmail` + # - Google Calendar: `connector_googlecalendar` + # - Google Drive: `connector_googledrive` + # - Microsoft Teams: `connector_microsoftteams` + # - Outlook Calendar: `connector_outlookcalendar` + # - Outlook Email: `connector_outlookemail` + # - SharePoint: `connector_sharepoint` + # + # @see OpenAI::Models::Responses::Tool::Mcp#connector_id + module ConnectorID + extend OpenAI::Internal::Type::Enum + + CONNECTOR_DROPBOX = :connector_dropbox + CONNECTOR_GMAIL = :connector_gmail + CONNECTOR_GOOGLECALENDAR = :connector_googlecalendar + CONNECTOR_GOOGLEDRIVE = :connector_googledrive + CONNECTOR_MICROSOFTTEAMS = :connector_microsoftteams + CONNECTOR_OUTLOOKCALENDAR = :connector_outlookcalendar + CONNECTOR_OUTLOOKEMAIL = :connector_outlookemail + CONNECTOR_SHAREPOINT = :connector_sharepoint + + # @!method self.values + # @return [Array] + end + # Specify which of the MCP server's tools require approval. # # @see OpenAI::Models::Responses::Tool::Mcp#require_approval module RequireApproval extend OpenAI::Internal::Type::Union + # Specify which of the MCP server's tools require approval. Can be + # `always`, `never`, or a filter object associated with tools + # that require approval. variant -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter } # Specify a single approval policy for all tools. One of `always` or @@ -151,13 +235,13 @@ module RequireApproval class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # @!attribute always - # A list of tools that always require approval. + # A filter object to specify which tools are allowed. # # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, nil] optional :always, -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always } # @!attribute never - # A list of tools that never require approval. + # A filter object to specify which tools are allowed. # # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, nil] optional :never, -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } @@ -167,36 +251,69 @@ class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel # {OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter} # for more details. # - # @param always [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A list of tools that always require approval. + # Specify which of the MCP server's tools require approval. Can be `always`, + # `never`, or a filter object associated with tools that require approval. + # + # @param always [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A filter object to specify which tools are allowed. # - # @param never [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A list of tools that never require approval. + # @param never [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A filter object to specify which tools are allowed. # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#always class Always < OpenAI::Internal::Type::BaseModel + # @!attribute read_only + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + # + # @return [Boolean, nil] + optional :read_only, OpenAI::Internal::Type::Boolean + # @!attribute tool_names - # List of tools that require approval. + # List of allowed tool names. # # @return [Array, nil] optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] - # @!method initialize(tool_names: nil) - # A list of tools that always require approval. + # @!method initialize(read_only: nil, tool_names: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always} + # for more details. + # + # A filter object to specify which tools are allowed. # - # @param tool_names [Array] List of tools that require approval. + # @param read_only [Boolean] Indicates whether or not a tool modifies data or is read-only. If an + # + # @param tool_names [Array] List of allowed tool names. end # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#never class Never < OpenAI::Internal::Type::BaseModel + # @!attribute read_only + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + # + # @return [Boolean, nil] + optional :read_only, OpenAI::Internal::Type::Boolean + # @!attribute tool_names - # List of tools that do not require approval. + # List of allowed tool names. # # @return [Array, nil] optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] - # @!method initialize(tool_names: nil) - # A list of tools that never require approval. + # @!method initialize(read_only: nil, tool_names: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never} + # for more details. + # + # A filter object to specify which tools are allowed. + # + # @param read_only [Boolean] Indicates whether or not a tool modifies data or is read-only. If an # - # @param tool_names [Array] List of tools that do not require approval. + # @param tool_names [Array] List of allowed tool names. end end diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index b5b5d1b5..7764d3a5 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -32,10 +32,6 @@ module OpenAI sig { returns(String) } attr_accessor :server_label - # The URL for the MCP server. - sig { returns(String) } - attr_accessor :server_url - # The type of the MCP tool. Always `mcp`. sig { returns(Symbol) } attr_accessor :type @@ -46,13 +42,51 @@ module OpenAI T.nilable( T.any( T::Array[String], - OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter ) ) ) end attr_accessor :allowed_tools + # An OAuth access token that can be used with a remote MCP server, either with a + # custom MCP server URL or a service connector. Your application must handle the + # OAuth authorization flow and provide the token here. + sig { returns(T.nilable(String)) } + attr_reader :authorization + + sig { params(authorization: String).void } + attr_writer :authorization + + # Identifier for service connectors, like those available in ChatGPT. One of + # `server_url` or `connector_id` must be provided. Learn more about service + # connectors + # [here](https://platform.openai.com/docs/guides/tools-remote-mcp#connectors). + # + # Currently supported `connector_id` values are: + # + # - Dropbox: `connector_dropbox` + # - Gmail: `connector_gmail` + # - Google Calendar: `connector_googlecalendar` + # - Google Drive: `connector_googledrive` + # - Microsoft Teams: `connector_microsoftteams` + # - Outlook Calendar: `connector_outlookcalendar` + # - Outlook Email: `connector_outlookemail` + # - SharePoint: `connector_sharepoint` + sig do + returns( + T.nilable(OpenAI::Responses::Tool::Mcp::ConnectorID::OrSymbol) + ) + end + attr_reader :connector_id + + sig do + params( + connector_id: OpenAI::Responses::Tool::Mcp::ConnectorID::OrSymbol + ).void + end + attr_writer :connector_id + # Optional HTTP headers to send to the MCP server. Use for authentication or other # purposes. sig { returns(T.nilable(T::Hash[Symbol, String])) } @@ -78,20 +112,29 @@ module OpenAI sig { params(server_description: String).void } attr_writer :server_description + # The URL for the MCP server. One of `server_url` or `connector_id` must be + # provided. + sig { returns(T.nilable(String)) } + attr_reader :server_url + + sig { params(server_url: String).void } + attr_writer :server_url + # Give the model access to additional tools via remote Model Context Protocol # (MCP) servers. # [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). sig do params( server_label: String, - server_url: String, allowed_tools: T.nilable( T.any( T::Array[String], - OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter::OrHash + OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter::OrHash ) ), + authorization: String, + connector_id: OpenAI::Responses::Tool::Mcp::ConnectorID::OrSymbol, headers: T.nilable(T::Hash[Symbol, String]), require_approval: T.nilable( @@ -101,16 +144,35 @@ module OpenAI ) ), server_description: String, + server_url: String, type: Symbol ).returns(T.attached_class) end def self.new( # A label for this MCP server, used to identify it in tool calls. server_label:, - # The URL for the MCP server. - server_url:, # List of allowed tool names or a filter object. allowed_tools: nil, + # An OAuth access token that can be used with a remote MCP server, either with a + # custom MCP server URL or a service connector. Your application must handle the + # OAuth authorization flow and provide the token here. + authorization: nil, + # Identifier for service connectors, like those available in ChatGPT. One of + # `server_url` or `connector_id` must be provided. Learn more about service + # connectors + # [here](https://platform.openai.com/docs/guides/tools-remote-mcp#connectors). + # + # Currently supported `connector_id` values are: + # + # - Dropbox: `connector_dropbox` + # - Gmail: `connector_gmail` + # - Google Calendar: `connector_googlecalendar` + # - Google Drive: `connector_googledrive` + # - Microsoft Teams: `connector_microsoftteams` + # - Outlook Calendar: `connector_outlookcalendar` + # - Outlook Email: `connector_outlookemail` + # - SharePoint: `connector_sharepoint` + connector_id: nil, # Optional HTTP headers to send to the MCP server. Use for authentication or other # purposes. headers: nil, @@ -118,6 +180,9 @@ module OpenAI require_approval: nil, # Optional description of the MCP server, used to provide more context. server_description: nil, + # The URL for the MCP server. One of `server_url` or `connector_id` must be + # provided. + server_url: nil, # The type of the MCP tool. Always `mcp`. type: :mcp ) @@ -127,15 +192,17 @@ module OpenAI override.returns( { server_label: String, - server_url: String, type: Symbol, allowed_tools: T.nilable( T.any( T::Array[String], - OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter ) ), + authorization: String, + connector_id: + OpenAI::Responses::Tool::Mcp::ConnectorID::OrSymbol, headers: T.nilable(T::Hash[Symbol, String]), require_approval: T.nilable( @@ -144,7 +211,8 @@ module OpenAI OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol ) ), - server_description: String + server_description: String, + server_url: String } ) end @@ -159,19 +227,29 @@ module OpenAI T.type_alias do T.any( T::Array[String], - OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter ) end - class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + class McpToolFilter < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do T.any( - OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, + OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter, OpenAI::Internal::AnyHash ) end + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :read_only + + sig { params(read_only: T::Boolean).void } + attr_writer :read_only + # List of allowed tool names. sig { returns(T.nilable(T::Array[String])) } attr_reader :tool_names @@ -181,15 +259,27 @@ module OpenAI # A filter object to specify which tools are allowed. sig do - params(tool_names: T::Array[String]).returns(T.attached_class) + params( + read_only: T::Boolean, + tool_names: T::Array[String] + ).returns(T.attached_class) end def self.new( + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + read_only: nil, # List of allowed tool names. tool_names: nil ) end - sig { override.returns({ tool_names: T::Array[String] }) } + sig do + override.returns( + { read_only: T::Boolean, tool_names: T::Array[String] } + ) + end def to_hash end end @@ -209,6 +299,82 @@ module OpenAI ) end + # Identifier for service connectors, like those available in ChatGPT. One of + # `server_url` or `connector_id` must be provided. Learn more about service + # connectors + # [here](https://platform.openai.com/docs/guides/tools-remote-mcp#connectors). + # + # Currently supported `connector_id` values are: + # + # - Dropbox: `connector_dropbox` + # - Gmail: `connector_gmail` + # - Google Calendar: `connector_googlecalendar` + # - Google Drive: `connector_googledrive` + # - Microsoft Teams: `connector_microsoftteams` + # - Outlook Calendar: `connector_outlookcalendar` + # - Outlook Email: `connector_outlookemail` + # - SharePoint: `connector_sharepoint` + module ConnectorID + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::Mcp::ConnectorID) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + CONNECTOR_DROPBOX = + T.let( + :connector_dropbox, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_GMAIL = + T.let( + :connector_gmail, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_GOOGLECALENDAR = + T.let( + :connector_googlecalendar, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_GOOGLEDRIVE = + T.let( + :connector_googledrive, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_MICROSOFTTEAMS = + T.let( + :connector_microsoftteams, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_OUTLOOKCALENDAR = + T.let( + :connector_outlookcalendar, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_OUTLOOKEMAIL = + T.let( + :connector_outlookemail, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + CONNECTOR_SHAREPOINT = + T.let( + :connector_sharepoint, + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::Mcp::ConnectorID::TaggedSymbol + ] + ) + end + def self.values + end + end + # Specify which of the MCP server's tools require approval. module RequireApproval extend OpenAI::Internal::Type::Union @@ -230,7 +396,7 @@ module OpenAI ) end - # A list of tools that always require approval. + # A filter object to specify which tools are allowed. sig do returns( T.nilable( @@ -248,7 +414,7 @@ module OpenAI end attr_writer :always - # A list of tools that never require approval. + # A filter object to specify which tools are allowed. sig do returns( T.nilable( @@ -266,6 +432,8 @@ module OpenAI end attr_writer :never + # Specify which of the MCP server's tools require approval. Can be `always`, + # `never`, or a filter object associated with tools that require approval. sig do params( always: @@ -275,9 +443,9 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A list of tools that always require approval. + # A filter object to specify which tools are allowed. always: nil, - # A list of tools that never require approval. + # A filter object to specify which tools are allowed. never: nil ) end @@ -304,24 +472,46 @@ module OpenAI ) end - # List of tools that require approval. + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :read_only + + sig { params(read_only: T::Boolean).void } + attr_writer :read_only + + # List of allowed tool names. sig { returns(T.nilable(T::Array[String])) } attr_reader :tool_names sig { params(tool_names: T::Array[String]).void } attr_writer :tool_names - # A list of tools that always require approval. + # A filter object to specify which tools are allowed. sig do - params(tool_names: T::Array[String]).returns(T.attached_class) + params( + read_only: T::Boolean, + tool_names: T::Array[String] + ).returns(T.attached_class) end def self.new( - # List of tools that require approval. + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + read_only: nil, + # List of allowed tool names. tool_names: nil ) end - sig { override.returns({ tool_names: T::Array[String] }) } + sig do + override.returns( + { read_only: T::Boolean, tool_names: T::Array[String] } + ) + end def to_hash end end @@ -335,24 +525,46 @@ module OpenAI ) end - # List of tools that do not require approval. + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + sig { returns(T.nilable(T::Boolean)) } + attr_reader :read_only + + sig { params(read_only: T::Boolean).void } + attr_writer :read_only + + # List of allowed tool names. sig { returns(T.nilable(T::Array[String])) } attr_reader :tool_names sig { params(tool_names: T::Array[String]).void } attr_writer :tool_names - # A list of tools that never require approval. + # A filter object to specify which tools are allowed. sig do - params(tool_names: T::Array[String]).returns(T.attached_class) + params( + read_only: T::Boolean, + tool_names: T::Array[String] + ).returns(T.attached_class) end def self.new( - # List of tools that do not require approval. + # Indicates whether or not a tool modifies data or is read-only. If an MCP server + # is + # [annotated with `readOnlyHint`](https://modelcontextprotocol.io/specification/2025-06-18/schema#toolannotations-readonlyhint), + # it will match this filter. + read_only: nil, + # List of allowed tool names. tool_names: nil ) end - sig { override.returns({ tool_names: T::Array[String] }) } + sig do + override.returns( + { read_only: T::Boolean, tool_names: T::Array[String] } + ) + end def to_hash end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 71600464..130dbb9f 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -18,23 +18,33 @@ module OpenAI type mcp = { server_label: String, - server_url: String, type: :mcp, allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + authorization: String, + connector_id: OpenAI::Models::Responses::Tool::Mcp::connector_id, headers: ::Hash[Symbol, String]?, require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, - server_description: String + server_description: String, + server_url: String } class Mcp < OpenAI::Internal::Type::BaseModel attr_accessor server_label: String - attr_accessor server_url: String - attr_accessor type: :mcp attr_accessor allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools? + attr_reader authorization: String? + + def authorization=: (String) -> String + + attr_reader connector_id: OpenAI::Models::Responses::Tool::Mcp::connector_id? + + def connector_id=: ( + OpenAI::Models::Responses::Tool::Mcp::connector_id + ) -> OpenAI::Models::Responses::Tool::Mcp::connector_id + attr_accessor headers: ::Hash[Symbol, String]? attr_accessor require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? @@ -43,43 +53,59 @@ module OpenAI def server_description=: (String) -> String + attr_reader server_url: String? + + def server_url=: (String) -> String + def initialize: ( server_label: String, - server_url: String, ?allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + ?authorization: String, + ?connector_id: OpenAI::Models::Responses::Tool::Mcp::connector_id, ?headers: ::Hash[Symbol, String]?, ?require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, ?server_description: String, + ?server_url: String, ?type: :mcp ) -> void def to_hash: -> { server_label: String, - server_url: String, type: :mcp, allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + authorization: String, + connector_id: OpenAI::Models::Responses::Tool::Mcp::connector_id, headers: ::Hash[Symbol, String]?, require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, - server_description: String + server_description: String, + server_url: String } type allowed_tools = ::Array[String] - | OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + | OpenAI::Responses::Tool::Mcp::AllowedTools::McpToolFilter module AllowedTools extend OpenAI::Internal::Type::Union - type mcp_allowed_tools_filter = { tool_names: ::Array[String] } + type mcp_tool_filter = + { read_only: bool, tool_names: ::Array[String] } + + class McpToolFilter < OpenAI::Internal::Type::BaseModel + attr_reader read_only: bool? + + def read_only=: (bool) -> bool - class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel attr_reader tool_names: ::Array[String]? def tool_names=: (::Array[String]) -> ::Array[String] - def initialize: (?tool_names: ::Array[String]) -> void + def initialize: ( + ?read_only: bool, + ?tool_names: ::Array[String] + ) -> void - def to_hash: -> { tool_names: ::Array[String] } + def to_hash: -> { read_only: bool, tool_names: ::Array[String] } end def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::allowed_tools] @@ -87,6 +113,31 @@ module OpenAI StringArray: OpenAI::Internal::Type::Converter end + type connector_id = + :connector_dropbox + | :connector_gmail + | :connector_googlecalendar + | :connector_googledrive + | :connector_microsoftteams + | :connector_outlookcalendar + | :connector_outlookemail + | :connector_sharepoint + + module ConnectorID + extend OpenAI::Internal::Type::Enum + + CONNECTOR_DROPBOX: :connector_dropbox + CONNECTOR_GMAIL: :connector_gmail + CONNECTOR_GOOGLECALENDAR: :connector_googlecalendar + CONNECTOR_GOOGLEDRIVE: :connector_googledrive + CONNECTOR_MICROSOFTTEAMS: :connector_microsoftteams + CONNECTOR_OUTLOOKCALENDAR: :connector_outlookcalendar + CONNECTOR_OUTLOOKEMAIL: :connector_outlookemail + CONNECTOR_SHAREPOINT: :connector_sharepoint + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::connector_id] + end + type require_approval = OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter | OpenAI::Models::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting @@ -123,28 +174,42 @@ module OpenAI never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never } - type always = { tool_names: ::Array[String] } + type always = { read_only: bool, tool_names: ::Array[String] } class Always < OpenAI::Internal::Type::BaseModel + attr_reader read_only: bool? + + def read_only=: (bool) -> bool + attr_reader tool_names: ::Array[String]? def tool_names=: (::Array[String]) -> ::Array[String] - def initialize: (?tool_names: ::Array[String]) -> void + def initialize: ( + ?read_only: bool, + ?tool_names: ::Array[String] + ) -> void - def to_hash: -> { tool_names: ::Array[String] } + def to_hash: -> { read_only: bool, tool_names: ::Array[String] } end - type never = { tool_names: ::Array[String] } + type never = { read_only: bool, tool_names: ::Array[String] } class Never < OpenAI::Internal::Type::BaseModel + attr_reader read_only: bool? + + def read_only=: (bool) -> bool + attr_reader tool_names: ::Array[String]? def tool_names=: (::Array[String]) -> ::Array[String] - def initialize: (?tool_names: ::Array[String]) -> void + def initialize: ( + ?read_only: bool, + ?tool_names: ::Array[String] + ) -> void - def to_hash: -> { tool_names: ::Array[String] } + def to_hash: -> { read_only: bool, tool_names: ::Array[String] } end end From f3a802f6dd500ed8b348381aba5310352f7642ed Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 21 Aug 2025 21:13:35 +0000 Subject: [PATCH 292/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index d661066e..e7562934 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.18.1" + ".": "0.19.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index c633e58b..46a5d18d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.18.1) + openai (0.19.0) connection_pool GEM diff --git a/README.md b/README.md index 1f7a95c8..d4bc51fd 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.18.1" +gem "openai", "~> 0.19.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 8309d38b..345bbb9b 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.18.1" + VERSION = "0.19.0" end From c035711c3ba7e6eb0c195cc817df8f845bd4e95a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sun, 24 Aug 2025 20:48:24 +0000 Subject: [PATCH 293/295] chore: add json schema comment for rubocop.yml --- .rubocop.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.rubocop.yml b/.rubocop.yml index 39accffd..decbf212 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,3 +1,4 @@ +# yaml-language-server: $schema=https://www.rubyschema.org/rubocop.json --- # Explicitly disable pending cops for now. This is the default behaviour but # this avoids a large warning every time we run it. @@ -8,7 +9,7 @@ AllCops: - "bin/*" NewCops: enable SuggestExtensions: false - TargetRubyVersion: 3.2.0 + TargetRubyVersion: 3.2 # Whether MFA is required or not should be left to the token configuration. Gemspec/RequireMFA: From 405bab606c5aaf84cc3e27f7277bac6dbf7fa110 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 26 Aug 2025 20:43:27 +0000 Subject: [PATCH 294/295] feat(api): add web search filters --- .stats.yml | 4 +- .../models/conversations/item_list_params.rb | 2 + .../models/evals/run_cancel_response.rb | 4 +- lib/openai/models/evals/run_create_params.rb | 4 +- .../models/evals/run_create_response.rb | 4 +- lib/openai/models/evals/run_list_response.rb | 4 +- .../models/evals/run_retrieve_response.rb | 4 +- lib/openai/models/responses/response.rb | 4 +- .../responses/response_create_params.rb | 6 +- .../responses/response_function_web_search.rb | 36 +- .../models/responses/response_includable.rb | 2 + lib/openai/models/responses/tool.rb | 159 +++++++- lib/openai/resources/conversations.rb | 2 +- lib/openai/resources/responses.rb | 4 +- .../models/conversations/item_list_params.rbi | 4 + .../models/evals/run_cancel_response.rbi | 2 + rbi/openai/models/evals/run_create_params.rbi | 4 + .../models/evals/run_create_response.rbi | 2 + rbi/openai/models/evals/run_list_response.rbi | 2 + .../models/evals/run_retrieve_response.rbi | 2 + rbi/openai/models/responses/response.rbi | 1 + .../responses/response_create_params.rbi | 8 + .../response_function_web_search.rbi | 80 +++- .../models/responses/response_includable.rbi | 2 + rbi/openai/models/responses/tool.rbi | 347 ++++++++++++++++++ rbi/openai/resources/conversations.rbi | 2 +- rbi/openai/resources/conversations/items.rbi | 2 + rbi/openai/resources/responses.rbi | 6 + .../response_function_web_search.rbs | 37 +- sig/openai/models/responses/tool.rbs | 121 ++++++ 30 files changed, 834 insertions(+), 27 deletions(-) diff --git a/.stats.yml b/.stats.yml index 6725d699..3b8aefe8 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 117 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-ddbdf9343316047e8a773c54fb24e4a8d225955e202a1888fde6f9c8898ebf98.yml -openapi_spec_hash: 9802f6dd381558466c897f6e387e06ca +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-8517ffa1004e31ca2523d617629e64be6fe4f13403ddfd9db5b3be002656cbde.yml +openapi_spec_hash: b64dd8c8b23082a7aa2a3e5c5fffd8bd config_hash: fe0ea26680ac2075a6cd66416aefe7db diff --git a/lib/openai/models/conversations/item_list_params.rb b/lib/openai/models/conversations/item_list_params.rb index ffddb386..3c23afba 100644 --- a/lib/openai/models/conversations/item_list_params.rb +++ b/lib/openai/models/conversations/item_list_params.rb @@ -18,6 +18,8 @@ class ItemListParams < OpenAI::Internal::Type::BaseModel # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 49fda248..e90cb82c 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -686,7 +686,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -708,7 +708,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index edacb844..fbe30490 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -614,7 +614,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -636,7 +636,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index 175718a2..544a7530 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -686,7 +686,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -708,7 +708,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 86690fce..e09cb4e5 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -685,7 +685,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -707,7 +707,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 942f613c..c69c7d8f 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -690,7 +690,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -712,7 +712,7 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @param text [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::Text] Configuration options for a text response from the model. Can be plain # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index ec153ad5..66807216 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -121,7 +121,7 @@ class Response < OpenAI::Internal::Type::BaseModel # [function calling](https://platform.openai.com/docs/guides/function-calling). # You can also use custom tools to call your own code. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -310,7 +310,7 @@ class Response < OpenAI::Internal::Type::BaseModel # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 6a193914..4dcbfbfd 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -34,6 +34,8 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer @@ -242,7 +244,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # [function calling](https://platform.openai.com/docs/guides/function-calling). # You can also use custom tools to call your own code. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_logprobs @@ -332,7 +334,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index f8360067..a59495c9 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -76,7 +76,14 @@ class Search < OpenAI::Internal::Type::BaseModel # @return [Symbol, :search] required :type, const: :search - # @!method initialize(query:, type: :search) + # @!attribute sources + # The sources used in the search. + # + # @return [Array, nil] + optional :sources, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source] } + + # @!method initialize(query:, sources: nil, type: :search) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search} for more # details. @@ -85,7 +92,34 @@ class Search < OpenAI::Internal::Type::BaseModel # # @param query [String] The search query. # + # @param sources [Array] The sources used in the search. + # # @param type [Symbol, :search] The action type. + + class Source < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of source. Always `url`. + # + # @return [Symbol, :url] + required :type, const: :url + + # @!attribute url + # The URL of the source. + # + # @return [String] + required :url, String + + # @!method initialize(url:, type: :url) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseFunctionWebSearch::Action::Search::Source} + # for more details. + # + # A source used in the search. + # + # @param url [String] The URL of the source. + # + # @param type [Symbol, :url] The type of source. Always `url`. + end end class OpenPage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index bfd6f54d..08bb23ff 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -6,6 +6,8 @@ module Responses # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 37feec46..8d9edd5d 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -35,9 +35,166 @@ module Tool # [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). variant :custom, -> { OpenAI::Responses::CustomTool } + # Search the Internet for sources related to the prompt. Learn more about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + variant -> { OpenAI::Responses::Tool::WebSearchTool } + # This tool searches the web for relevant results to use in a response. Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Responses::WebSearchTool } + class WebSearchTool < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the web search tool. One of `web_search` or `web_search_2025_08_26`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::WebSearchTool::Type] + required :type, enum: -> { OpenAI::Responses::Tool::WebSearchTool::Type } + + # @!attribute filters + # Filters for the search. + # + # @return [OpenAI::Models::Responses::Tool::WebSearchTool::Filters, nil] + optional :filters, -> { OpenAI::Responses::Tool::WebSearchTool::Filters }, nil?: true + + # @!attribute search_context_size + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::WebSearchTool::SearchContextSize, nil] + optional :search_context_size, enum: -> { OpenAI::Responses::Tool::WebSearchTool::SearchContextSize } + + # @!attribute user_location + # The approximate location of the user. + # + # @return [OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation, nil] + optional :user_location, -> { OpenAI::Responses::Tool::WebSearchTool::UserLocation }, nil?: true + + # @!method initialize(type:, filters: nil, search_context_size: nil, user_location: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::WebSearchTool} for more details. + # + # Search the Internet for sources related to the prompt. Learn more about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + # + # @param type [Symbol, OpenAI::Models::Responses::Tool::WebSearchTool::Type] The type of the web search tool. One of `web_search` or `web_search_2025_08_26`. + # + # @param filters [OpenAI::Models::Responses::Tool::WebSearchTool::Filters, nil] Filters for the search. + # + # @param search_context_size [Symbol, OpenAI::Models::Responses::Tool::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search + # + # @param user_location [OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation, nil] The approximate location of the user. + + # The type of the web search tool. One of `web_search` or `web_search_2025_08_26`. + # + # @see OpenAI::Models::Responses::Tool::WebSearchTool#type + module Type + extend OpenAI::Internal::Type::Enum + + WEB_SEARCH = :web_search + WEB_SEARCH_2025_08_26 = :web_search_2025_08_26 + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Models::Responses::Tool::WebSearchTool#filters + class Filters < OpenAI::Internal::Type::BaseModel + # @!attribute allowed_domains + # Allowed domains for the search. If not provided, all domains are allowed. + # Subdomains of the provided domains are allowed as well. + # + # Example: `["pubmed.ncbi.nlm.nih.gov"]` + # + # @return [Array, nil] + optional :allowed_domains, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(allowed_domains: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::WebSearchTool::Filters} for more details. + # + # Filters for the search. + # + # @param allowed_domains [Array, nil] Allowed domains for the search. If not provided, all domains are allowed. + end + + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + # + # @see OpenAI::Models::Responses::Tool::WebSearchTool#search_context_size + module SearchContextSize + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Models::Responses::Tool::WebSearchTool#user_location + class UserLocation < OpenAI::Internal::Type::BaseModel + # @!attribute city + # Free text input for the city of the user, e.g. `San Francisco`. + # + # @return [String, nil] + optional :city, String, nil?: true + + # @!attribute country + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. + # + # @return [String, nil] + optional :country, String, nil?: true + + # @!attribute region + # Free text input for the region of the user, e.g. `California`. + # + # @return [String, nil] + optional :region, String, nil?: true + + # @!attribute timezone + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. + # + # @return [String, nil] + optional :timezone, String, nil?: true + + # @!attribute type + # The type of location approximation. Always `approximate`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::Type, nil] + optional :type, enum: -> { OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type } + + # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation} for more details. + # + # The approximate location of the user. + # + # @param city [String, nil] Free text input for the city of the user, e.g. `San Francisco`. + # + # @param country [String, nil] The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of t + # + # @param region [String, nil] Free text input for the region of the user, e.g. `California`. + # + # @param timezone [String, nil] The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the user + # + # @param type [Symbol, OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::Type] The type of location approximation. Always `approximate`. + + # The type of location approximation. Always `approximate`. + # + # @see OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation#type + module Type + extend OpenAI::Internal::Type::Enum + + APPROXIMATE = :approximate + + # @!method self.values + # @return [Array] + end + end + end + class Mcp < OpenAI::Internal::Type::BaseModel # @!attribute server_label # A label for this MCP server, used to identify it in tool calls. @@ -655,7 +812,7 @@ class LocalShell < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::CustomTool, OpenAI::Models::Responses::WebSearchTool)] + # @return [Array(OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::CustomTool, OpenAI::Models::Responses::Tool::WebSearchTool, OpenAI::Models::Responses::WebSearchTool)] end end end diff --git a/lib/openai/resources/conversations.rb b/lib/openai/resources/conversations.rb index 34d66da5..c88b2deb 100644 --- a/lib/openai/resources/conversations.rb +++ b/lib/openai/resources/conversations.rb @@ -9,7 +9,7 @@ class Conversations # Some parameter documentations has been truncated, see # {OpenAI::Models::Conversations::ConversationCreateParams} for more details. # - # Create a conversation with the given ID. + # Create a conversation. # # @overload create(items: nil, metadata: nil, request_options: {}) # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index 38fd98a1..2fc95872 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -67,7 +67,7 @@ class Responses # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # @@ -158,7 +158,7 @@ def create(params = {}) # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceAllowed, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, OpenAI::Models::Responses::ToolChoiceMcp, OpenAI::Models::Responses::ToolChoiceCustom] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # diff --git a/rbi/openai/models/conversations/item_list_params.rbi b/rbi/openai/models/conversations/item_list_params.rbi index 2239e139..dd9f1a2b 100644 --- a/rbi/openai/models/conversations/item_list_params.rbi +++ b/rbi/openai/models/conversations/item_list_params.rbi @@ -25,6 +25,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer @@ -93,6 +95,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index 13f94289..5d26cb7d 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -1175,6 +1175,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1207,6 +1208,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index e7ea35ca..26ae3ec8 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -1126,6 +1126,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::CustomTool, + OpenAI::Responses::Tool::WebSearchTool, OpenAI::Responses::WebSearchTool ) ] @@ -1147,6 +1148,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1179,6 +1181,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -1237,6 +1240,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::CustomTool, + OpenAI::Responses::Tool::WebSearchTool, OpenAI::Responses::WebSearchTool ) ], diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index bf8ed611..faa9c70b 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -1175,6 +1175,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1207,6 +1208,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index ab3d4305..80412496 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -1171,6 +1171,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1203,6 +1204,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 8797d797..76a1e7c7 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -1177,6 +1177,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -1209,6 +1210,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index efacebab..2eb7bd14 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -349,6 +349,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 3fa43c5b..cfc2a27e 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -36,6 +36,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer @@ -320,6 +322,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::CustomTool, + OpenAI::Responses::Tool::WebSearchTool, OpenAI::Responses::WebSearchTool ) ] @@ -341,6 +344,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -450,6 +454,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -475,6 +480,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer @@ -692,6 +699,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::CustomTool, + OpenAI::Responses::Tool::WebSearchTool, OpenAI::Responses::WebSearchTool ) ], diff --git a/rbi/openai/models/responses/response_function_web_search.rbi b/rbi/openai/models/responses/response_function_web_search.rbi index 5e7a4263..d9a6f9f5 100644 --- a/rbi/openai/models/responses/response_function_web_search.rbi +++ b/rbi/openai/models/responses/response_function_web_search.rbi @@ -121,21 +121,97 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # The sources used in the search. + sig do + returns( + T.nilable( + T::Array[ + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source + ] + ) + ) + end + attr_reader :sources + + sig do + params( + sources: + T::Array[ + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source::OrHash + ] + ).void + end + attr_writer :sources + # Action type "search" - Performs a web search query. sig do - params(query: String, type: Symbol).returns(T.attached_class) + params( + query: String, + sources: + T::Array[ + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source::OrHash + ], + type: Symbol + ).returns(T.attached_class) end def self.new( # The search query. query:, + # The sources used in the search. + sources: nil, # The action type. type: :search ) end - sig { override.returns({ query: String, type: Symbol }) } + sig do + override.returns( + { + query: String, + type: Symbol, + sources: + T::Array[ + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source + ] + } + ) + end def to_hash end + + class Source < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source, + OpenAI::Internal::AnyHash + ) + end + + # The type of source. Always `url`. + sig { returns(Symbol) } + attr_accessor :type + + # The URL of the source. + sig { returns(String) } + attr_accessor :url + + # A source used in the search. + sig do + params(url: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The URL of the source. + url:, + # The type of source. Always `url`. + type: :url + ) + end + + sig { override.returns({ type: Symbol, url: String }) } + def to_hash + end + end end class OpenPage < OpenAI::Internal::Type::BaseModel diff --git a/rbi/openai/models/responses/response_includable.rbi b/rbi/openai/models/responses/response_includable.rbi index 1007c6fd..3c6236a7 100644 --- a/rbi/openai/models/responses/response_includable.rbi +++ b/rbi/openai/models/responses/response_includable.rbi @@ -6,6 +6,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index 7764d3a5..2f85e887 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -18,10 +18,357 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration, OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::CustomTool, + OpenAI::Responses::Tool::WebSearchTool, OpenAI::Responses::WebSearchTool ) end + class WebSearchTool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::WebSearchTool, + OpenAI::Internal::AnyHash + ) + end + + # The type of the web search tool. One of `web_search` or `web_search_2025_08_26`. + sig do + returns(OpenAI::Responses::Tool::WebSearchTool::Type::OrSymbol) + end + attr_accessor :type + + # Filters for the search. + sig do + returns(T.nilable(OpenAI::Responses::Tool::WebSearchTool::Filters)) + end + attr_reader :filters + + sig do + params( + filters: + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::Filters::OrHash + ) + ).void + end + attr_writer :filters + + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::OrSymbol + ) + ) + end + attr_reader :search_context_size + + sig do + params( + search_context_size: + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::OrSymbol + ).void + end + attr_writer :search_context_size + + # The approximate location of the user. + sig do + returns( + T.nilable(OpenAI::Responses::Tool::WebSearchTool::UserLocation) + ) + end + attr_reader :user_location + + sig do + params( + user_location: + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::UserLocation::OrHash + ) + ).void + end + attr_writer :user_location + + # Search the Internet for sources related to the prompt. Learn more about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + sig do + params( + type: OpenAI::Responses::Tool::WebSearchTool::Type::OrSymbol, + filters: + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::Filters::OrHash + ), + search_context_size: + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::OrSymbol, + user_location: + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::UserLocation::OrHash + ) + ).returns(T.attached_class) + end + def self.new( + # The type of the web search tool. One of `web_search` or `web_search_2025_08_26`. + type:, + # Filters for the search. + filters: nil, + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + search_context_size: nil, + # The approximate location of the user. + user_location: nil + ) + end + + sig do + override.returns( + { + type: OpenAI::Responses::Tool::WebSearchTool::Type::OrSymbol, + filters: + T.nilable(OpenAI::Responses::Tool::WebSearchTool::Filters), + search_context_size: + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::OrSymbol, + user_location: + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::UserLocation + ) + } + ) + end + def to_hash + end + + # The type of the web search tool. One of `web_search` or `web_search_2025_08_26`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::WebSearchTool::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + WEB_SEARCH = + T.let( + :web_search, + OpenAI::Responses::Tool::WebSearchTool::Type::TaggedSymbol + ) + WEB_SEARCH_2025_08_26 = + T.let( + :web_search_2025_08_26, + OpenAI::Responses::Tool::WebSearchTool::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::WebSearchTool::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + + class Filters < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::WebSearchTool::Filters, + OpenAI::Internal::AnyHash + ) + end + + # Allowed domains for the search. If not provided, all domains are allowed. + # Subdomains of the provided domains are allowed as well. + # + # Example: `["pubmed.ncbi.nlm.nih.gov"]` + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :allowed_domains + + # Filters for the search. + sig do + params(allowed_domains: T.nilable(T::Array[String])).returns( + T.attached_class + ) + end + def self.new( + # Allowed domains for the search. If not provided, all domains are allowed. + # Subdomains of the provided domains are allowed as well. + # + # Example: `["pubmed.ncbi.nlm.nih.gov"]` + allowed_domains: nil + ) + end + + sig do + override.returns({ allowed_domains: T.nilable(T::Array[String]) }) + end + def to_hash + end + end + + # High level guidance for the amount of context window space to use for the + # search. One of `low`, `medium`, or `high`. `medium` is the default. + module SearchContextSize + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::WebSearchTool::SearchContextSize::TaggedSymbol + ] + ) + end + def self.values + end + end + + class UserLocation < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::WebSearchTool::UserLocation, + OpenAI::Internal::AnyHash + ) + end + + # Free text input for the city of the user, e.g. `San Francisco`. + sig { returns(T.nilable(String)) } + attr_accessor :city + + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. + sig { returns(T.nilable(String)) } + attr_accessor :country + + # Free text input for the region of the user, e.g. `California`. + sig { returns(T.nilable(String)) } + attr_accessor :region + + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. + sig { returns(T.nilable(String)) } + attr_accessor :timezone + + # The type of location approximation. Always `approximate`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type::OrSymbol + ).void + end + attr_writer :type + + # The approximate location of the user. + sig do + params( + city: T.nilable(String), + country: T.nilable(String), + region: T.nilable(String), + timezone: T.nilable(String), + type: + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Free text input for the city of the user, e.g. `San Francisco`. + city: nil, + # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of + # the user, e.g. `US`. + country: nil, + # Free text input for the region of the user, e.g. `California`. + region: nil, + # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the + # user, e.g. `America/Los_Angeles`. + timezone: nil, + # The type of location approximation. Always `approximate`. + type: nil + ) + end + + sig do + override.returns( + { + city: T.nilable(String), + country: T.nilable(String), + region: T.nilable(String), + timezone: T.nilable(String), + type: + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type::OrSymbol + } + ) + end + def to_hash + end + + # The type of location approximation. Always `approximate`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + APPROXIMATE = + T.let( + :approximate, + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::WebSearchTool::UserLocation::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + class Mcp < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/resources/conversations.rbi b/rbi/openai/resources/conversations.rbi index 6d83ca4e..ac1427e1 100644 --- a/rbi/openai/resources/conversations.rbi +++ b/rbi/openai/resources/conversations.rbi @@ -6,7 +6,7 @@ module OpenAI sig { returns(OpenAI::Resources::Conversations::Items) } attr_reader :items - # Create a conversation with the given ID. + # Create a conversation. sig do params( items: diff --git a/rbi/openai/resources/conversations/items.rbi b/rbi/openai/resources/conversations/items.rbi index fb396699..d01e15a9 100644 --- a/rbi/openai/resources/conversations/items.rbi +++ b/rbi/openai/resources/conversations/items.rbi @@ -99,6 +99,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 743d1278..0112b97f 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -81,6 +81,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -107,6 +108,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer @@ -342,6 +345,7 @@ module OpenAI OpenAI::Responses::Tool::ImageGeneration::OrHash, OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::CustomTool::OrHash, + OpenAI::Responses::Tool::WebSearchTool::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -372,6 +376,8 @@ module OpenAI # Specify additional output data to include in the model response. Currently # supported values are: # + # - `web_search_call.action.sources`: Include the sources of the web search tool + # call. # - `code_interpreter_call.outputs`: Includes the outputs of python code execution # in code interpreter tool call items. # - `computer_call_output.output.image_url`: Include image urls from the computer diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 2aa9d146..fa239831 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -40,16 +40,47 @@ module OpenAI module Action extend OpenAI::Internal::Type::Union - type search = { query: String, type: :search } + type search = + { + query: String, + type: :search, + sources: ::Array[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source] + } class Search < OpenAI::Internal::Type::BaseModel attr_accessor query: String attr_accessor type: :search - def initialize: (query: String, ?type: :search) -> void + attr_reader sources: ::Array[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source]? - def to_hash: -> { query: String, type: :search } + def sources=: ( + ::Array[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source] + ) -> ::Array[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source] + + def initialize: ( + query: String, + ?sources: ::Array[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source], + ?type: :search + ) -> void + + def to_hash: -> { + query: String, + type: :search, + sources: ::Array[OpenAI::Responses::ResponseFunctionWebSearch::Action::Search::Source] + } + + type source = { type: :url, url: String } + + class Source < OpenAI::Internal::Type::BaseModel + attr_accessor type: :url + + attr_accessor url: String + + def initialize: (url: String, ?type: :url) -> void + + def to_hash: -> { type: :url, url: String } + end end type open_page = { type: :open_page, url: String } diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 130dbb9f..ccc17da9 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -10,11 +10,132 @@ module OpenAI | OpenAI::Responses::Tool::ImageGeneration | OpenAI::Responses::Tool::LocalShell | OpenAI::Responses::CustomTool + | OpenAI::Responses::Tool::WebSearchTool | OpenAI::Responses::WebSearchTool module Tool extend OpenAI::Internal::Type::Union + type web_search_tool = + { + type: OpenAI::Models::Responses::Tool::WebSearchTool::type_, + filters: OpenAI::Responses::Tool::WebSearchTool::Filters?, + search_context_size: OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size, + user_location: OpenAI::Responses::Tool::WebSearchTool::UserLocation? + } + + class WebSearchTool < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Models::Responses::Tool::WebSearchTool::type_ + + attr_accessor filters: OpenAI::Responses::Tool::WebSearchTool::Filters? + + attr_reader search_context_size: OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size? + + def search_context_size=: ( + OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size + ) -> OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size + + attr_accessor user_location: OpenAI::Responses::Tool::WebSearchTool::UserLocation? + + def initialize: ( + type: OpenAI::Models::Responses::Tool::WebSearchTool::type_, + ?filters: OpenAI::Responses::Tool::WebSearchTool::Filters?, + ?search_context_size: OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size, + ?user_location: OpenAI::Responses::Tool::WebSearchTool::UserLocation? + ) -> void + + def to_hash: -> { + type: OpenAI::Models::Responses::Tool::WebSearchTool::type_, + filters: OpenAI::Responses::Tool::WebSearchTool::Filters?, + search_context_size: OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size, + user_location: OpenAI::Responses::Tool::WebSearchTool::UserLocation? + } + + type type_ = :web_search | :web_search_2025_08_26 + + module Type + extend OpenAI::Internal::Type::Enum + + WEB_SEARCH: :web_search + WEB_SEARCH_2025_08_26: :web_search_2025_08_26 + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::WebSearchTool::type_] + end + + type filters = { allowed_domains: ::Array[String]? } + + class Filters < OpenAI::Internal::Type::BaseModel + attr_accessor allowed_domains: ::Array[String]? + + def initialize: (?allowed_domains: ::Array[String]?) -> void + + def to_hash: -> { allowed_domains: ::Array[String]? } + end + + type search_context_size = :low | :medium | :high + + module SearchContextSize + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::WebSearchTool::search_context_size] + end + + type user_location = + { + city: String?, + country: String?, + region: String?, + timezone: String?, + type: OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_ + } + + class UserLocation < OpenAI::Internal::Type::BaseModel + attr_accessor city: String? + + attr_accessor country: String? + + attr_accessor region: String? + + attr_accessor timezone: String? + + attr_reader type: OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_? + + def type=: ( + OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_ + ) -> OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_ + + def initialize: ( + ?city: String?, + ?country: String?, + ?region: String?, + ?timezone: String?, + ?type: OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_ + ) -> void + + def to_hash: -> { + city: String?, + country: String?, + region: String?, + timezone: String?, + type: OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_ + } + + type type_ = :approximate + + module Type + extend OpenAI::Internal::Type::Enum + + APPROXIMATE: :approximate + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::WebSearchTool::UserLocation::type_] + end + end + end + type mcp = { server_label: String, From 565bd781d9f0920f67ade8932a6c9154308d3ad2 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 26 Aug 2025 20:49:38 +0000 Subject: [PATCH 295/295] chore(internal): version bump --- .release-please-manifest.json | 2 +- Gemfile.lock | 2 +- README.md | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index e7562934..0c2ecec6 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.19.0" + ".": "0.20.0" } \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 46a5d18d..93bc99bf 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.19.0) + openai (0.20.0) connection_pool GEM diff --git a/README.md b/README.md index d4bc51fd..26bc5854 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ To use this gem, install via Bundler by adding the following to your application ```ruby -gem "openai", "~> 0.19.0" +gem "openai", "~> 0.20.0" ``` diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 345bbb9b..d4be0202 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.19.0" + VERSION = "0.20.0" end